code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Raygun.Util do
@moduledoc """
This module contains utility functions for formatting particular pieces
of stacktrace data into strings.
"""
@default_environment_name Mix.env()
@doc """
Determines whether we will actually send an event to Raygun
"""
def environment? do
environment_name() in included_environments()
end
def msg_valid?(msg) do
Enum.any?(get_env(:raygun, :excluded_messages, []), fn regex ->
String.match?(msg, regex)
end)
end
@doc """
Headers are a list of Tuples. Convert them to a map
"""
def format_headers(headers) do
Enum.reduce(headers, %{}, fn {key, value}, acc ->
Map.put(acc, key, value)
end)
end
@doc """
Return the module name as a string (binary).
"""
def mod_for(module) when is_atom(module), do: Atom.to_string(module)
def mod_for(module) when is_binary(module), do: module
@doc """
Given stacktrace information, get the line number.
"""
def line_from([]), do: "unknown"
def line_from(file: _file, line: line), do: line
@doc """
Given stacktrace information, get the file name.
"""
def file_from([]), do: "unknown"
def file_from(file: file, line: _line), do: List.to_string(file)
@doc """
Like Application.get_env only for get_key function.
"""
def get_key(app, key, default \\ nil) do
case :application.get_key(app, key) do
{:ok, val} -> val
{^key, val} -> val
_ -> default
end
end
@doc """
So in a release this seems to return {:key, value} instead of {:ok, value}
for some reason. So we accept that form as well....
"""
def get_env(app, key, default \\ nil) do
app
|> Application.get_env(key, default)
|> read_from_system(default)
end
defp read_from_system({:system, env}, default), do: System.get_env(env) || default
defp read_from_system(value, _default), do: value
defp environment_name do
get_env(:raygun, :environment_name, @default_environment_name)
end
defp included_environments do
get_env(:raygun, :included_environments, [:prod])
end
end
|
lib/raygun/util.ex
| 0.768777
| 0.473596
|
util.ex
|
starcoder
|
defmodule Polyglot.Plural.Parser do
# Parse a string into {tree, deps}
def parse("") do
{true, HashSet.new}
end
def parse(str) do
{tokens, deps} = tokenise(str, [], HashSet.new)
{parse_tree(tokens, [], []), deps}
end
# Tokenise string, using simple recursive peeking
defp tokenise(str, tokens, deps) do
case str do
"" -> {Enum.reverse(tokens), deps}
<<"and", str::binary>> -> tokenise(str, [{:op,:and}|tokens], deps)
<<"or", str::binary>> -> tokenise(str, [{:op,:or}|tokens], deps)
<<"..", str::binary>> -> tokenise(str, [{:op,:range}|tokens], deps)
<<"!=", str::binary>> -> tokenise(str, [{:op,:neq}|tokens], deps)
<<"%", str::binary>> -> tokenise(str, [{:op,:mod}|tokens], deps)
<<"=", str::binary>> -> tokenise(str, [{:op,:eq}|tokens], deps)
<<",", str::binary>> -> tokenise(str, [{:op,:comma}|tokens], deps)
<<" ", str::binary>> -> tokenise(str, tokens, deps)
<<c::binary-size(1), str::binary>> when c == "n" or c == "i" or c == "f"
or c == "t" or c == "v" or c == "w" ->
v = Macro.var(String.to_atom(c), :plural)
if c == "n" do
tokenise(str, [{:var,v}|tokens], deps)
else
tokenise(str, [{:var,v}|tokens], Set.put(deps, v))
end
str ->
case Regex.run(~r/^[0-9]+/, str) do
[n] ->
len = String.length(n)
str = String.slice(str, len, String.length(str) - len)
{i, ""} = Integer.parse(n)
tokenise(str, [{:number, i}|tokens], deps)
nil -> {:error, "Couldn't parse rule.", str}
end
end
end
# Parse tokens into a tree, using a shunting-yard parser
@precedences %{or: 1,
and: 2,
neq: 3, eq: 3,
mod: 4,
comma: 5,
range: 6}
defp parse_tree(tokens, opstack, output) do
case {tokens, opstack, output} do
{[], [], [result]} ->
result
{[], [op|opstack], output} ->
push_op(op, [], opstack, output)
{[{:op, o1}|rest], [], output} ->
parse_tree(rest, [o1], output)
{[{:op, o1}|rest]=tokens, [o2|opstack], output} ->
if @precedences[o1] <= @precedences[o2] do
push_op(o2, tokens, opstack, output)
else
parse_tree(rest, [o1,o2|opstack], output)
end
{[node|rest], opstack, output} ->
parse_tree(rest, opstack, [node|output])
end
end
defp push_op(op, tokens, opstack, [r,l|output]) do
case {op, l} do
{:comma, {:list, vs}} ->
parse_tree(tokens, opstack, [{:list, [r|vs]}|output])
{:comma, _} ->
parse_tree(tokens, opstack, [{:list, [r,l]}|output])
_ ->
parse_tree(tokens, opstack, [{:binary, op, l, r}|output])
end
end
end
|
lib/polyglot/plural/parser.ex
| 0.509032
| 0.638863
|
parser.ex
|
starcoder
|
defmodule Geocoder.Providers.OpenCageData do
use HTTPoison.Base
use Towel
@endpoint "http://api.opencagedata.com/"
@path "geocode/v1/json"
def geocode(opts) do
request(@path, opts |> extract_opts())
|> fmap(&parse_geocode/1)
end
def geocode_list(opts) do
request_all(@path, opts |> extract_opts())
|> fmap(fn r -> Enum.map(r, &parse_geocode/1) end)
end
def reverse_geocode(opts) do
request(@path, opts |> extract_opts())
|> fmap(&parse_reverse_geocode/1)
end
def reverse_geocode_list(opts) do
request_all(@path, opts |> extract_opts())
|> fmap(fn r -> Enum.map(r, &parse_reverse_geocode/1) end)
end
defp extract_opts(opts) do
opts
|> Keyword.merge(opts)
|> Keyword.put(
:q,
case opts |> Keyword.take([:address, :latlng]) |> Keyword.values() do
[{lat, lon}] -> "#{lat},#{lon}"
[query] -> query
_ -> nil
end
)
|> Keyword.take([
:q,
:key,
:bounds,
:language,
:add_request,
:countrycode,
:jsonp,
:limit,
:min_confidence,
:no_annotations,
:no_dedupe,
:pretty
])
end
defp parse_geocode(response) do
coords = geocode_coords(response)
bounds = geocode_bounds(response)
location = geocode_location(response)
%{coords | bounds: bounds, location: location}
end
defp parse_reverse_geocode(response) do
coords = geocode_coords(response)
location = geocode_location(response)
%{coords | location: location}
end
defp geocode_coords(%{"geometry" => coords}) do
%{"lat" => lat, "lng" => lon} = coords
%Geocoder.Coords{lat: lat, lon: lon}
end
defp geocode_bounds(%{"bounds" => bounds}) do
%{
"northeast" => %{"lat" => north, "lng" => east},
"southwest" => %{"lat" => south, "lng" => west}
} = bounds
%Geocoder.Bounds{top: north, right: east, bottom: south, left: west}
end
defp geocode_bounds(_), do: %Geocoder.Bounds{}
@map %{
"house_number" => :street_number,
"road" => :street,
"city" => :city,
"state" => :state,
"county" => :county,
"postcode" => :postal_code,
"country" => :country,
"country_code" => :country_code
}
defp geocode_location(%{"components" => components, "formatted" => formatted_address}) do
reduce = fn {type, name}, location ->
struct(location, [{@map[type], name}])
end
location = %Geocoder.Location{formatted_address: formatted_address}
components
|> Enum.reduce(location, reduce)
|> Map.drop([nil])
end
defp request_all(path, params) do
httpoison_options = Application.get_env(:geocoder, Geocoder.Worker)[:httpoison_options] || []
case get(path, [], Keyword.merge(httpoison_options, params: Enum.into(params, %{}))) do
{:ok, %{status_code: 200, body: %{"results" => results}}} ->
{:ok, List.wrap(results)}
{_, response} ->
{:error, response}
end
end
def request(path, params) do
request_all(path, params)
|> fmap(&List.first/1)
end
def process_url(url) do
@endpoint <> url
end
def process_response_body(body) do
body |> Poison.decode!()
end
end
|
lib/geocoder/providers/open_cage_data.ex
| 0.61682
| 0.484258
|
open_cage_data.ex
|
starcoder
|
defmodule SignedOverpunch do
@error_message "invalid signed overpunch value: "
@moduledoc """
Module for converting a string in signed overpunch format into the
corresponding integer.
## Conversion Table:
| Code | Digit | Sign |
| ---- | ----- | ---- |
| } | 0 | − |
| J | 1 | − |
| K | 2 | − |
| L | 3 | − |
| M | 4 | − |
| N | 5 | − |
| O | 6 | − |
| P | 7 | − |
| Q | 8 | − |
| R | 9 | − |
| { | 0 | + |
| A | 1 | + |
| B | 2 | + |
| C | 3 | + |
| D | 4 | + |
| E | 5 | + |
| F | 6 | + |
| G | 7 | + |
| H | 8 | + |
| I | 9 | + |
"""
@doc """
Converts a string in signed overpunch format to an integer.
If successful, returns a tuple in the form of `{:ok, integer}`. Otherwise, it
returns `:error`.
## Examples
iex> SignedOverpunch.convert("100{")
{:ok, 1000}
iex> SignedOverpunch.convert("100}")
{:ok, -1000}
iex> SignedOverpunch.convert("00I")
{:ok, 9}
iex> SignedOverpunch.convert("000")
:error
iex> SignedOverpunch.convert("GOTCHA")
:error
"""
def convert(string) when is_bitstring(string) do
string
|> Integer.parse
|> get_profile(string)
|> perform_conversion
|> apply_sign
|> format_return
end
@doc """
Converts a string in signed overpunch format to an integer.
Similar to `SignedOverpunch.convert/1`, but raises an `ArgumentError` if the
input provided is not valid signed overpunch.
## Examples
iex> SignedOverpunch.convert!("100{")
1000
iex> SignedOverpunch.convert!("100}")
-1000
iex> SignedOverpunch.convert!("00I")
9
iex> SignedOverpunch.convert!("000")
** (ArgumentError) invalid signed overpunch value: 000
iex> SignedOverpunch.convert!("GOTCHA")
** (ArgumentError) invalid signed overpunch value: GOTCHA
"""
def convert!(string) do
case convert(string) do
{:ok, int} -> int
:error -> raise ArgumentError, @error_message <> string
end
end
@doc """
Converts an integer to signed overpunch format.
If successful, returns a tuple in the form of `{:ok, string}`. Otherwise, it
returns `:error`.
## Examples
iex> SignedOverpunch.to_s(1000)
{:ok, "100{"}
iex> SignedOverpunch.to_s(-1000)
{:ok, "100}"}
iex> SignedOverpunch.to_s(9)
{:ok, "I"}
iex> SignedOverpunch.to_s("GOTCHA")
:error
"""
def to_s(int) when is_integer(int) do
last_digit = int
|> Kernel.to_string
|> String.last
|> String.to_integer
suffix = profile({sign(int), last_digit})
s = int
|> convert_to_positive
|> Kernel.to_string
|> String.slice(0..-2)
|> Kernel.<>(suffix)
{:ok, s}
end
def to_s(_), do: :error
@doc """
Converts an integer to signed overpunch format.
Similar to `SignedOverpunch.to_string/1`, but raises an `ArgumentError` if the
input provided is not an integer.
## Examples
iex> SignedOverpunch.to_s!(1000)
"100{"
iex> SignedOverpunch.to_s!(-1000)
"100}"
iex> SignedOverpunch.to_s!(9)
"I"
iex> SignedOverpunch.to_s!("000")
** (ArgumentError) invalid integer: 000
iex> SignedOverpunch.to_s!("GOTCHA")
** (ArgumentError) invalid integer: GOTCHA
iex> SignedOverpunch.to_s!(10.0)
** (ArgumentError) invalid integer: 10.0
"""
def to_s!(int) do
case to_s(int) do
{:ok, string} -> string
:error -> raise ArgumentError, "invalid integer: #{int}"
end
end
defp sign(int) when int >= 0, do: :pos
defp sign(int) when int < 0, do: :neg
defp convert_to_positive(int) when int < 0, do: int * -1
defp convert_to_positive(int) when int >= 0, do: int
defp perform_conversion({int, {neg_or_pos, add}}) do
{neg_or_pos, int * 10 + add}
end
defp perform_conversion(_), do: :error
defp apply_sign({:pos, int}) when is_integer(int), do: int
defp apply_sign({:neg, int}) when is_integer(int), do: 0 - int
defp apply_sign(_), do: :error
defp get_profile({int, overpunch_char}, _) do
{int, profile(overpunch_char)}
end
# This handles the "specialish" case where only the overpunch char is present
# and Integer.parse returns :error
defp get_profile(:error, string) when byte_size(string) == 1 do
{0, profile(string)}
end
defp get_profile(_, _), do: :error
defp format_return(int) when is_integer(int), do: {:ok, int}
defp format_return(:error), do: :error
@profiles %{
"}" => {:neg, 0},
"J" => {:neg, 1},
"K" => {:neg, 2},
"L" => {:neg, 3},
"M" => {:neg, 4},
"N" => {:neg, 5},
"O" => {:neg, 6},
"P" => {:neg, 7},
"Q" => {:neg, 8},
"R" => {:neg, 9},
"{" => {:pos, 0},
"A" => {:pos, 1},
"B" => {:pos, 2},
"C" => {:pos, 3},
"D" => {:pos, 4},
"E" => {:pos, 5},
"F" => {:pos, 6},
"G" => {:pos, 7},
"H" => {:pos, 8},
"I" => {:pos, 9},
}
for {string, profile} <- @profiles do
defp profile(unquote(string)) do
unquote(profile)
end
defp profile(unquote(profile)) do
unquote(string)
end
end
defp profile(_), do: :error
end
|
lib/signed_overpunch.ex
| 0.840701
| 0.585605
|
signed_overpunch.ex
|
starcoder
|
defmodule PasswordPhilosophy do
@moduledoc """
For example, suppose you have the following list:
1-3 a: abcde
1-3 b: cdefg
2-9 c: ccccccccc
Each line gives the password policy and then the password. The password policy
indicates the lowest and highest number of times a given letter must appear
for the password to be valid. For example, 1-3 a means that the password must
contain a at least 1 time and at most 3 times.
In the above example, 2 passwords are valid. The middle password, cdefg, is
not; it contains no instances of b, but needs at least 1. The first and third
passwords are valid: they contain one a or nine c, both within the limits of
their respective policies.
"""
@doc """
Count valid passwords in list according to old rules.
## Examples
iex> PasswordPhilosophy.count_valid_old(["1-3 a: abcde", "1-3 b: cdefg", "2-9 c: ccccccccc"])
2
"""
def count_valid_old(list) do
list
|> Enum.count(&valid_password_entry_old?/1)
end
@doc """
Count valid passwords in list according to new rules.
## Examples
iex> PasswordPhilosophy.count_valid_new(["1-3 a: abcde", "1-3 b: cdefg", "2-9 c: ccccccccc"])
1
"""
def count_valid_new(list) do
list
|> Enum.count(&valid_password_entry_new?/1)
end
@doc """
Check if password entry (line) is valid according to old rules.
## Examples
iex> PasswordPhilosophy.valid_password_entry_old?("1-3 a: abcde")
true
iex> PasswordPhilosophy.valid_password_entry_old?("2-3 a: abcda")
true
"""
def valid_password_entry_old?(string) do
[_, min, max, letter, password] = Regex.run(~r/(\d+)-(\d+) (\w): (\w+)/, string)
count =
password
|> String.codepoints()
|> Enum.count(&Kernel.==(&1, letter))
Range.new(String.to_integer(min), String.to_integer(max))
|> Enum.member?(count)
end
@doc """
Check if password entry (line) is valid according to new rules.
## Examples
iex> PasswordPhilosophy.valid_password_entry_new?("1-3 a: abcde")
true
iex> PasswordPhilosophy.valid_password_entry_new?("2-3 a: abcda")
false
"""
def valid_password_entry_new?(string) do
[_, pos_1, pos_2, letter, password] = Regex.run(~r/(\d+)-(\d+) (\w): (\w+)/, string)
[pos_1, pos_2]
|> Enum.map(fn pos -> String.at(password, String.to_integer(pos) - 1) end)
|> case do
[^letter, ^letter] -> false
[^letter, _] -> true
[_, ^letter] -> true
_ -> false
end
end
end
|
2-PasswordPhilosophy/lib/password_philosophy.ex
| 0.776665
| 0.583945
|
password_philosophy.ex
|
starcoder
|
defmodule ABNF do
@moduledoc """
Main module. ABNF parser as described in [RFC4234](https://tools.ietf.org/html/rfc4234)
and [RFC5234](https://tools.ietf.org/html/rfc5234)
Copyright 2015 <NAME> <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
alias ABNF.Grammar, as: Grammar
alias ABNF.Interpreter, as: Interpreter
alias ABNF.CaptureResult, as: CaptureResult
require Logger
@doc """
Loads a set of abnf rules from a file.
"""
@spec load_file(String.t) :: Grammar.t | no_return
def load_file(file) do
data = File.read! file
load to_char_list(data)
end
@doc """
Returns the abnf rules found in the given char list.
"""
@spec load([byte]) :: Grammar.t | no_return
def load(input) do
case Grammar.rulelist input do
{rules, ''} -> rules
{_rlist, rest} -> throw {:incomplete_parsing, rest}
_ -> throw {:invalid_grammar, input}
end
end
@doc """
Parses an input given a grammar, looking for the given rule.
"""
@spec apply(Grammar.t, String.t, [byte], term) :: CaptureResult.t
def apply(grammar, rule, input, state \\ nil) do
Interpreter.apply grammar, rule, input, state
end
@doc """
Given a grammar, match an input against a rule
"""
@spec match_input(Grammar.t, String.t, [byte]) :: atom
def match_input(grammar, rule, input) do
output = ABNF.apply(grammar, rule, input)
if(output != nil) do
%CaptureResult{input: i, rest: r} = output
partial = fn ->
unless r == nil or r == [] do
String.contains?(to_string(i), to_string(r))
else
false
end
end
cond do
r == nil or r == [] ->
:match
i == r or (r != nil and r != [] and partial.()) ->
:partial
true ->
Logger.debug("I: #{i}, R: #{r}")
end
else
:no_match
end
end
end
|
lib/ex_abnf.ex
| 0.792865
| 0.461745
|
ex_abnf.ex
|
starcoder
|
defmodule Indicado.EMA do
@moduledoc """
This is the EMA module used for calculating Exponential Moving Average
"""
@doc """
Calculates EMA for the list. It needs non empty list of numbers and a positive
period argument.
Returns `{:ok, ema_list}` or `{:error, reason}`
## Examples
iex> Indicado.EMA.eval([1, 2, 3, 4], 2)
{:ok, [1.0, 1.6666666666666665, 2.5555555555555554, 3.518518518518518]}
iex> Indicado.EMA.eval([2, 4, 5, 10, 100, 1000], 3)
{:ok, [2.0, 3.0, 4.0, 7.0, 53.5, 526.75]}
iex> Indicado.EMA.eval([2, 4, 5, 10, 100, 1000], 5)
{:ok, [2.0, 2.666666666666667, 3.4444444444444446, 5.62962962962963, 37.08641975308642, 358.0576131687243]}
iex> Indicado.EMA.eval([], 2)
{:error, :not_enough_data}
iex> Indicado.EMA.eval([1, 2, 3, 4], 0)
{:error, :bad_period}
"""
@spec eval(nonempty_list(list), pos_integer) :: {:ok, nonempty_list(float) | {:error, atom}}
def eval(list, period), do: calc(list, period)
@doc """
Calculates EMA for the list. It needs non empty list of numbers and a positive
period argument.
Raises `NotEnoughDataError` if the given list is not longh enough for calculating RSI.
Raises `BadPeriodError` if period is an unacceptable number.
## Examples
iex> Indicado.EMA.eval!([1, 2, 3, 4], 2)
[1.0, 1.6666666666666665, 2.5555555555555554, 3.518518518518518]
iex> Indicado.EMA.eval!([], 2)
** (NotEnoughDataError) not enough data
iex> Indicado.EMA.eval!([1, 3, 4], 0)
** (BadPeriodError) bad period
"""
@spec eval!(nonempty_list(list), pos_integer) :: nonempty_list(float) | no_return
def eval!(list, period) do
case calc(list, period) do
{:ok, result} -> result
{:error, :not_enough_data} -> raise NotEnoughDataError
{:error, :bad_period} -> raise BadPeriodError
end
end
defp calc(list, period, results \\ [])
defp calc([], _period, []), do: {:error, :not_enough_data}
defp calc(_list, period, _results) when period < 1, do: {:error, :bad_period}
defp calc([], _period, results), do: {:ok, Enum.reverse(results)}
defp calc([head | tail], period, []) do
calc(tail, period, [calc_ema(head, period, head)])
end
defp calc([head | tail], period, results) do
[result_head | _result_tail] = results
calc(tail, period, [calc_ema(head, period, result_head) | results])
end
defp calc_ema(last, period, prev_ema) do
multiplier = 2 / (period + 1)
last * multiplier + prev_ema * (1 - multiplier)
end
end
|
lib/indicado/ema.ex
| 0.904485
| 0.647673
|
ema.ex
|
starcoder
|
defmodule NebulexRedisAdapter do
@moduledoc """
Nebulex adapter for Redis.
This adapter is implemented by means of `Redix`, a Redis driver for
Elixir.
This adapter supports multiple connection pools against different Redis
nodes in a cluster. This feature enables resiliency, be able to survive
in case any node(s) gets unreachable.
## Adapter Options
In addition to `Nebulex.Cache` shared options, this adapters supports the
following options:
* `:pools` - The list of connection pools for Redis. Each element (pool)
holds the same options as `Redix` (including connection options), and
the `:pool_size` (number of connections to keep in the pool).
## Redix Options (for each pool)
Since this adapter is implemented by means of `Redix`, it inherits the same
options (including connection options). These are some of the main ones:
* `:host` - (string) the host where the Redis server is running. Defaults to
`"localhost"`.
* `:port` - (positive integer) the port on which the Redis server is
running. Defaults to `6379`.
* `:password` - (string) the password used to connect to Redis. Defaults to
`nil`, meaning no password is used. When this option is provided, all
Redix does is issue an `AUTH` command to Redis in order to authenticate.
* `:database` - (non-negative integer or string) the database to connect to.
Defaults to `nil`, meaning Redix doesn't connect to a specific database
(the default in this case is database `0`). When this option is provided,
all Redix does is issue a `SELECT` command to Redis in order to select the
given database.
For more information about the options (Redis and connection options), please
checkout `Redix` docs.
In addition to `Redix` options, it supports:
* `:pool_size` - The number of connections to keep in the pool
(default: `System.schedulers_online()`).
## Example
We can define our cache to use Redis adapter as follows:
defmodule MyApp.RedisCache do
use Nebulex.Cache,
otp_app: :nebulex,
adapter: NebulexRedisAdapter
end
The configuration for the cache must be in your application environment,
usually defined in your `config/config.exs`:
config :my_app, MyApp.RedisCache,
pools: [
primary: [
host: "127.0.0.1",
port: 6379
],
secondary: [
host: "10.10.10.10",
port: 6379,
pool_size: 2
]
]
## Queryable API
The queryable API is implemented by means of `KEYS` command, but it has some
limitations we have to be aware of:
* Only strings (`String.t()`) are allowed as query parameter.
* Only keys can be queried. Therefore, `:return` option has not any affects,
since keys are always returned. In the case you want to return the value
for the given key pattern (query), you can perform `get_many` with the
returned keys.
## Examples
iex> MyApp.RedisCache.set_many(%{
...> "firstname" => "Albert",
...> "lastname" => "Einstein",
...> "age" => 76
...> })
:ok
iex> MyApp.RedisCache.all("**name**")
["firstname", "lastname"]
iex> MyApp.RedisCache.all("a??")
["age"]
iex> MyApp.RedisCache.all()
["age", "firstname", "lastname"]
iex> stream = TestCache.stream("**name**")
iex> stream |> Enum.to_list()
["firstname", "lastname"]
# get the values for the returned queried keys
iex> "**name**" |> MyApp.RedisCache.all() |> MyApp.RedisCache.get_many()
%{"firstname" => "Albert", "lastname" => "Einstein"}
For more information about the usage, check out `Nebulex.Cache` as well.
"""
# Inherit default transaction implementation
use Nebulex.Adapter.Transaction
# Provide Cache Implementation
@behaviour Nebulex.Adapter
@behaviour Nebulex.Adapter.Queryable
alias Nebulex.Object
alias NebulexRedisAdapter.Command
@default_pool_size System.schedulers_online()
## Adapter
@impl true
defmacro __before_compile__(env) do
otp_app = Module.get_attribute(env.module, :otp_app)
config = Module.get_attribute(env.module, :config)
pool_size =
if pools = Keyword.get(config, :pools) do
Enum.reduce(pools, 0, fn {_, pool}, acc ->
acc + Keyword.get(pool, :pool_size, @default_pool_size)
end)
else
raise ArgumentError,
"missing :pools configuration in " <>
"config #{inspect(otp_app)}, #{inspect(env.module)}"
end
quote do
def __pool_size__, do: unquote(pool_size)
end
end
@impl true
def init(opts) do
cache = Keyword.fetch!(opts, :cache)
children =
opts
|> Keyword.fetch!(:pools)
|> Enum.reduce([], fn {_, pool}, acc ->
acc ++ children(pool, cache, acc)
end)
{:ok, children}
end
defp children(pool, cache, acc) do
offset = length(acc)
pool_size = Keyword.get(pool, :pool_size, @default_pool_size)
for i <- offset..(offset + pool_size - 1) do
opts =
pool
|> Keyword.delete(:pool_size)
|> Keyword.put(:name, :"#{cache}_redix_#{i}")
case opts[:url] do
nil ->
Supervisor.child_spec({Redix, opts}, id: {Redix, i})
url ->
opts = opts |> Keyword.delete(:url)
Supervisor.child_spec({Redix, {url, opts}}, id: {Redix, i})
end
end
end
@impl true
def get(cache, key, opts) do
opts
|> Keyword.get(:return)
|> with_ttl(cache, key, [["GET", encode(key)]])
end
@impl true
def get_many(cache, keys, _opts) do
cache
|> Command.exec!(["MGET" | for(k <- keys, do: encode(k))])
|> Enum.reduce({keys, %{}}, fn
nil, {[_key | keys], acc} ->
{keys, acc}
entry, {[key | keys], acc} ->
{keys, Map.put(acc, key, decode(entry))}
end)
|> elem(1)
end
@impl true
def set(cache, object, opts) do
cmd_opts = cmd_opts(opts, action: :set, ttl: nil)
case Command.exec!(cache, ["SET", encode(object.key), encode(object) | cmd_opts]) do
"OK" -> true
nil -> false
end
end
@impl true
def set_many(cache, objects, opts) do
default_exp =
opts
|> Keyword.get(:ttl)
|> Object.expire_at()
{mset, expire} =
Enum.reduce(objects, {["MSET"], []}, fn object, {acc1, acc2} ->
redis_k = encode(object.key)
acc2 =
if expire_at = object.expire_at || default_exp,
do: [["EXPIRE", redis_k, Object.remaining_ttl(expire_at)] | acc2],
else: acc2
{[encode(object), redis_k | acc1], acc2}
end)
["OK" | _] = Command.pipeline!(cache, [Enum.reverse(mset) | expire])
:ok
end
@impl true
def delete(cache, key, _opts) do
_ = Command.exec!(cache, ["DEL", encode(key)])
:ok
end
@impl true
def take(cache, key, opts) do
redis_k = encode(key)
opts
|> Keyword.get(:return)
|> with_ttl(cache, key, [["GET", redis_k], ["DEL", redis_k]])
end
@impl true
def has_key?(cache, key) do
case Command.exec!(cache, ["EXISTS", encode(key)]) do
1 -> true
0 -> false
end
end
@impl true
def object_info(cache, key, :ttl) do
case Command.exec!(cache, ["TTL", encode(key)]) do
-1 -> :infinity
-2 -> nil
ttl -> ttl
end
end
def object_info(cache, key, :version) do
case get(cache, key, []) do
nil -> nil
obj -> obj.version
end
end
@impl true
def expire(cache, key, :infinity) do
key = encode(key)
case Command.pipeline!(cache, [["TTL", key], ["PERSIST", key]]) do
[-2, 0] -> nil
[_, _] -> :infinity
end
end
def expire(cache, key, ttl) do
case Command.exec!(cache, ["EXPIRE", encode(key), ttl]) do
1 -> Object.expire_at(ttl) || :infinity
0 -> nil
end
end
@impl true
def update_counter(cache, key, incr, _opts) when is_integer(incr) do
Command.exec!(cache, ["INCRBY", encode(key), incr])
end
@impl true
def size(cache) do
Command.exec!(cache, ["DBSIZE"])
end
@impl true
def flush(cache) do
_ = Command.exec!(cache, ["FLUSHALL"])
:ok
end
## Queryable
@impl true
def all(cache, query, _opts) do
query
|> validate_query()
|> perform_query(cache)
end
@impl true
def stream(cache, query, _opts) do
query
|> validate_query()
|> do_stream(cache)
end
def do_stream(pattern, cache) do
Stream.resource(
fn ->
perform_query(pattern, cache)
end,
fn
[] -> {:halt, []}
elems -> {elems, []}
end,
& &1
)
end
## Private Functions
defp with_ttl(:object, cache, key, pipeline) do
case Command.pipeline!(cache, [["TTL", encode(key)] | pipeline]) do
[-2 | _] ->
nil
[ttl, get | _] ->
get
|> decode()
|> object(key, ttl)
end
end
defp with_ttl(_, cache, key, pipeline) do
cache
|> Command.pipeline!(pipeline)
|> hd()
|> decode()
|> object(key, -1)
end
defp encode(data) do
to_string(data)
rescue
_e -> :erlang.term_to_binary(data)
end
defp decode(nil), do: nil
defp decode(data) do
if String.printable?(data) do
data
else
:erlang.binary_to_term(data)
end
end
defp object(nil, _key, _ttl), do: nil
defp object(%Object{} = obj, _key, -1), do: obj
defp object(%Object{} = obj, _key, ttl) do
%{obj | expire_at: Object.expire_at(ttl)}
end
defp object(value, key, -1) do
%Object{key: key, value: value}
end
defp cmd_opts(opts, keys) do
Enum.reduce(keys, [], fn {key, default}, acc ->
opts
|> Keyword.get(key, default)
|> cmd_opts(key, acc)
end)
end
defp cmd_opts(nil, _opt, acc), do: acc
defp cmd_opts(:set, :action, acc), do: acc
defp cmd_opts(:add, :action, acc), do: ["NX" | acc]
defp cmd_opts(:replace, :action, acc), do: ["XX" | acc]
defp cmd_opts(ttl, :ttl, acc), do: ["EX", ttl | acc]
defp validate_query(nil), do: "*"
defp validate_query(pattern) when is_binary(pattern), do: pattern
defp validate_query(pattern) do
raise Nebulex.QueryError, message: "invalid pattern", query: pattern
end
defp perform_query(pattern, cache) do
Command.exec!(cache, ["KEYS", pattern])
end
end
|
lib/nebulex_redis_adapter.ex
| 0.904356
| 0.602822
|
nebulex_redis_adapter.ex
|
starcoder
|
defmodule Cldr.Unit.Conversion do
@moduledoc """
Unit conversion functions for the units defined
in `Cldr`.
"""
@enforce_keys [:factor, :offset, :base_unit]
defstruct factor: 1,
offset: 0,
base_unit: nil
@type t :: %{
factor: integer | float | Ratio.t(),
base_unit: [atom(), ...],
offset: integer | float
}
alias Cldr.Unit
import Unit, only: [incompatible_units_error: 2]
defmodule Options do
defstruct usage: nil, locale: nil, backend: nil, territory: nil
end
@doc """
Convert one unit into another unit of the same
unit type (length, volume, mass, ...)
## Arguments
* `unit` is any unit returned by `Cldr.Unit.new/2`
* `to_unit` is any unit name returned by `Cldr.Unit.units/0`
## Returns
* a `Unit.t` of the unit type `to_unit` or
* `{:error, {exception, message}}`
## Examples
iex> Cldr.Unit.convert Cldr.Unit.new!(:mile, 1), :foot
{:ok, Cldr.Unit.new!(:foot, 5280)}
iex> Cldr.Unit.convert Cldr.Unit.new!(:mile, 1), :gallon
{:error, {Cldr.Unit.IncompatibleUnitsError,
"Operations can only be performed between units of the same category. Received :mile and :gallon"}}
"""
@spec convert(Unit.t(), Unit.unit()) :: {:ok, Unit.t()} | {:error, {module(), String.t()}}
def convert(%Unit{} = unit, to_unit) do
%{unit: from_unit, value: value, base_conversion: from_conversion} = unit
with {:ok, to_unit, to_conversion} <- Unit.validate_unit(to_unit),
{:ok, converted} <- convert(value, from_conversion, to_conversion) do
Unit.new(to_unit, converted, usage: unit.usage, format_options: unit.format_options)
else
{:error, {Cldr.Unit.IncompatibleUnitsError, _}} ->
{:error, incompatible_units_error(from_unit, to_unit)}
end
end
defp convert(value, from, to) when is_number(value) or is_map(value) do
use Ratio
with {:ok, from, to} <- compatible(from, to) do
value
|> Ratio.new()
|> convert_to_base(from)
|> convert_from_base(to)
|> wrap_ok
end
end
def convert_to_base(value, %__MODULE__{} = from) do
use Ratio
%{factor: from_factor, offset: from_offset} = from
value * from_factor + from_offset
end
def convert_to_base(value, [{_, [{_, from}]}]) do
convert_to_base(value, from)
end
# A known translation with a "per" conversion
def convert_to_base(value, [{_, {_, _} = from}]) do
convert_to_base(value, from)
end
def convert_to_base(value, {_, %__MODULE__{} = from}) do
convert_to_base(value, from)
end
def convert_to_base(value, {numerator, denominator}) do
use Ratio
convert_to_base(1.0, numerator) / convert_to_base(1.0, denominator) * value
end
def convert_to_base(value, []) do
value
end
def convert_to_base(value, [numerator | rest]) do
convert_to_base(value, numerator) |> convert_to_base(rest)
end
def convert_to_base(_value, conversion) do
raise ArgumentError, "Conversion not recognised: #{inspect conversion}"
end
def convert_from_base(value, %__MODULE__{} = to) do
use Ratio
%{factor: to_factor, offset: to_offset} = to
(value - to_offset) / to_factor
end
def convert_from_base(value, [{_, [{_, to}]}]) do
convert_from_base(value, to)
end
# A known translation with a "per" conversion
def convert_from_base(value, [{_, {_, _} = to}]) do
convert_from_base(value, to)
end
def convert_from_base(value, {_, %__MODULE__{} = to}) do
convert_from_base(value, to)
end
def convert_from_base(value, {numerator, denominator}) do
use Ratio
convert_from_base(1.0, numerator) / convert_from_base(1.0, denominator) * value
end
def convert_from_base(value, []) do
value
end
def convert_from_base(value, [numerator | rest]) do
convert_from_base(value, numerator) |> convert_from_base(rest)
end
defp compatible(from, to) do
with {:ok, base_unit_from} <- Unit.base_unit(from),
{:ok, base_unit_to} <- Unit.base_unit(to),
true <- to_string(base_unit_from) == to_string(base_unit_to) do
{:ok, from, to}
else
_ -> {:error, incompatible_units_error(from, to)}
end
end
defp wrap_ok(unit) do
{:ok, unit}
end
@doc """
Convert one unit into another unit of the same
unit type (length, volume, mass, ...) and raises
on a unit type mismatch
## Arguments
* `unit` is any unit returned by `Cldr.Unit.new/2`
* `to_unit` is any unit name returned by `Cldr.Unit.units/0`
## Returns
* a `Unit.t` of the unit type `to_unit` or
* raises an exception
## Examples
iex> Cldr.Unit.Conversion.convert!(Cldr.Unit.new!(:celsius, 0), :fahrenheit)
...> |> Cldr.Unit.round
#Cldr.Unit<:fahrenheit, 32.0>
iex> Cldr.Unit.Conversion.convert!(Cldr.Unit.new!(:fahrenheit, 32), :celsius)
...> |> Cldr.Unit.round
#Cldr.Unit<:celsius, 0.0>
Cldr.Unit.Conversion.convert Cldr.Unit.new!(:mile, 1), :gallon
** (Cldr.Unit.IncompatibleUnitsError) Operations can only be performed between units of the same type. Received :mile and :gallon
"""
@spec convert!(Unit.t(), Unit.unit()) :: Unit.t() | no_return()
def convert!(%Unit{} = unit, to_unit) do
case convert(unit, to_unit) do
{:error, {exception, reason}} -> raise exception, reason
{:ok, unit} -> unit
end
end
@doc """
Convert a unit into its base unit.
For example, the base unit for `length`
is `meter`. The base unit is an
intermediary unit used in all
conversions.
## Arguments
* `unit` is any unit returned by `Cldr.Unit.new/2`
## Returns
* `unit` converted to its base unit as a `t:Unit.t()` or
* `{;error, {exception, reason}}` as an error
## Example
iex> unit = Cldr.Unit.new!(:kilometer, 10)
iex> Cldr.Unit.Conversion.convert_to_base_unit unit
{:ok, Cldr.Unit.new!(:meter, 10000)}
"""
def convert_to_base_unit(%Unit{} = unit) do
with {:ok, base_unit} <- Unit.base_unit(unit) do
convert(unit, base_unit)
end
end
def convert_to_base_unit(unit) when is_atom(unit) do
unit
|> Unit.new!(1.0)
|> convert_to_base_unit()
end
def convert_to_base_unit([unit | _rest]) when is_atom(unit) do
convert_to_base_unit(unit)
end
@doc """
Convert a unit into its base unit and
raises on error
For example, the base unit for `length`
is `meter`. The base unit is an
intermediary unit used in all
conversions.
## Arguments
* `unit` is any unit returned by `Cldr.Unit.new/2`
## Returns
* `unit` converted to its base unit as a `t:Unit.t()` or
* raises an exception
## Example
iex> unit = Cldr.Unit.new!(:kilometer, 10)
iex> Cldr.Unit.Conversion.convert_to_base_unit! unit
#Cldr.Unit<:meter, 10000>
"""
def convert_to_base_unit!(%Unit{} = unit) do
case convert_to_base_unit(unit) do
{:error, {exception, reason}} -> raise exception, reason
{:ok, unit} -> unit
end
end
end
|
lib/cldr/unit/conversion.ex
| 0.94366
| 0.701783
|
conversion.ex
|
starcoder
|
defmodule Ferryman.Server do
@moduledoc """
This module provides the Server API to start a JSONRPC 2.0 Server
instance.
## Overview
First, let's define a JSONRPC2 handler, and define the functions we want to be
handled by RPC calls.
defmodule ExampleHandler do
use JSONRPC2.Server.Handler
def handle_request("add", [x, y]) do
x + y
end
end
Now we can start our Ferryman Server.
iex> {:ok, pid} = Ferryman.Server.start_link(redis_config: [], channels: ["mychannel"], handler: ExampleHandler)
The default `redis_config` will look for a redis instance on `"localhost:6379"`.
For more configuration options, please check the [Redix Docs](https://hexdocs.pm/redix/Redix.html#module-ssl).
You can define a list of `channels`, and pass the `handler` module.
"""
use GenServer
defmodule State do
@moduledoc false
defstruct [:client, :handler, :channels]
end
@doc """
Starts a new Ferryman.Server, which takes the following keyword list as arguments:
## Example
iex> Ferryman.Server.start_link(redis_config: [], channels: ["mychannel"], handler: ExampleHandler)
{:ok, pid}
"""
@spec start_link(redis_config: keyword(), channels: list(String.t()), handler: module()) ::
:ignore | {:error, any} | {:ok, pid}
def start_link(opts) when is_list(opts) do
GenServer.start_link(__MODULE__, opts)
end
def put_reply(pid, id, message) do
GenServer.call(pid, {:put_reply, id, message})
end
@impl true
def init(redis_config: redis_config, channels: channels, handler: handler) do
{:ok, pubsub} = Redix.PubSub.start_link(redis_config)
{:ok, _ref} = Redix.PubSub.subscribe(pubsub, channels, self())
{:ok, client} = Redix.start_link(redis_config)
{:ok, %State{client: client, channels: channels, handler: handler}}
end
@impl true
def handle_call({:put_reply, id, msg}, _from, state) do
Redix.pipeline(state.client, [
["MULTI"],
["RPUSH", id, msg],
["EXPIRE", id, 24 * 3600],
["EXEC"]
])
{:reply, :ok, state}
end
@impl true
def handle_call(:stop, _from, state) do
{:stop, :normal, state}
end
@impl true
def handle_call(_msg, _from, state) do
{:stop, :error, state}
end
@impl true
def handle_info(
{:redix_pubsub, _pubsub, _ref, :message, %{channel: _channel, payload: message}},
state
) do
self = self()
spawn_link(fn -> handle_request(self, message, state.handler) end)
{:noreply, state}
end
@impl true
def handle_info({:redix_pubsub, pubsub, _ref, :disconnect, _message}, state) do
Redix.PubSub.subscribe(pubsub, state.channels)
{:noreply, state}
end
@impl true
def handle_info({:redix_pubsub, _pubsub, _ref, _type, _message}, state) do
{:noreply, state}
end
@impl true
def terminate(_reason, _state), do: :ok
defp handle_request(parent, message, handler) do
case handler.handle(message) do
:noreply ->
:noop
{:reply, message} ->
{:ok, %{"id" => id}} = Jason.decode(message)
put_reply(parent, id, message)
end
end
end
|
lib/server.ex
| 0.807992
| 0.522872
|
server.ex
|
starcoder
|
defmodule Calcinator.Authorization do
@moduledoc """
Behaviour for `Calcinator.Resources.t` `authorization_module`
"""
# Types
@typedoc """
The actions that must be handled by `can?/3`, `filter_associations_can/3`, and `filter_can/3`.
"""
@type action :: :create | :delete | :index | :update | :show
@typedoc """
A list of `Ecto.Schema.t` with the head being the near association and each successive element being the next
`Ecto.Schema.t` following the associations back to the root `Ecto.Schema.t` for the action.
Ascents are used, so that associations don't have to preload their parent to do `can?` checks.
"""
@type association_ascent :: [struct, ...]
@typedoc """
The subject that is trying to do the action and needs to be authorized by `authorization_module`
"""
@type subject :: term
@typedoc """
The target of the `subject`'s action
"""
@type target :: term
# Callbacks
@doc """
Checks whether `subject` (from `Calcinator.Resources.t` `subject`) can perform `action` on `target`.
## :create
* `can?(subject, :create, ecto_schema_module) :: boolean` - called by `Calcinator.create/2` to check if `subject`
can create `ecto_schema_module` structs in general.
* `can?(subjecct, :create, Ecto.Changeset.t) :: boolean` - called by `Calcinator.create/2` to check if `subject`
can create a specific changeset.
## :delete
* `can?(subject, :delete, struct) :: boolean` - called by `Calcinator.delete/2` to check if `subject` can delete a
specific `struct`.
## :index
* `can?(subject, :index, ecto_schema_module) :: boolean` - called by `Calcinator.index/3` to check if `subject` can
index `ecto_schema_module` structs in general.
## :show
* `can?(subject, :show, struct) :: boolean` - called by `Calcinator.show/2` and `Calcinator.show_relationship/3` to
check if `subject` can show a specific struct.
* `can?(subject, :show, association_ascent) :: boolean` - called by `Calcinator.create/2`,
`Calcinator.get_related_resource/3`, `Calcinator.index/3`, `Calcinator.show/2`, `Calcinator.show_relationship/3`,
`Calcinator.update/2` to check if `subject` can show the head associated struct of the `association_ascent` list.
## :update
* `can?(subject, :update, Ecto.Changeset.t) :: boolean` - called by `Calcinator.update/2` to check if `subject` can
update a specific changeset.
"""
@callback can?(subject, :create | :index, module) :: boolean
@callback can?(subject, :create | :update, Ecto.Changeset.t()) :: boolean
@callback can?(subject, :delete | :show, struct) :: boolean
@callback can?(subject, action, target :: struct | association_ascent) :: boolean
@doc """
Reduces associations on `target` to only those where `can?(subject, action, associated_ascent)` is `true`.
## :show
* `filter_associations_can(struct, subject, :show) :: struct` - called by `Calcinator.create/2`,
`Calcinator.show/2`, and `Calcinator.update/2` filter the associations on the allowed target.
* `filter_associations_can([struct], subject, :show) :: [struct]` - called by `Calciantor.index/2` after
`filter_can([struct], subject, :show)` to filter the assocations of the allowed targets.
"""
@callback filter_associations_can(target :: struct | [struct], subject, action) :: target
@doc """
Reduces `targets` to only those elements where `can?(subject, action, targets_element)` is `true`.
## :show
* `filter_can([struct], subject, :show) :: [struct]` - called by `Calcinator.index/2` to filter the list of structs
to only those where `can?(subject, :show, struct)` is `true`.
"""
@callback filter_can(targets :: [struct], subject, action) :: [target]
end
|
lib/calcinator/authorization.ex
| 0.902026
| 0.687177
|
authorization.ex
|
starcoder
|
defmodule Grizzly.ZWave.SmartStart.MetaExtension do
@moduledoc """
Meta Extensions for SmartStart devices for QR codes and node provisioning
list
"""
alias Grizzly.ZWave
alias Grizzly.ZWave.{DeviceClasses, IconType, Security}
alias Grizzly.ZWave.SmartStart.MetaExtension.UUID16
import Bitwise
@advanced_joining 0x35
@bootstrapping_mode 0x36
@location_information 0x33
@max_inclusion_request_interval 0x02
@name_information 0x32
@network_status 0x37
@product_id 0x01
@product_type 0x00
@smart_start_inclusion_setting 0x34
@uuid16 0x03
@typedoc """
Unsigned 16 bit integer
"""
@type unit_16() :: char()
@typedoc """
The mode to use when including the node advertised in the provisioning list
- `:security_2` - the node must be manually set to learn mode and follow the
S2 bootstrapping instructions
- `:smart_start` - the node will use S2 bootstrapping automatically using the
SmartStart functionality
- `:long_range` - included the device using the Z-Waver long range protocol.
If no keys are granted in the `:advanced_joining` extension this inclusion
will fail.
"""
@type bootstrapping_mode() :: :security_2 | :smart_start | :long_range
@typedoc """
The different network statuses are:
- `:not_in_network` - the node in the provisioning list is not included in
the network
- `:included` - the node in the provisioning list is included in the network
and is functional
- `:failing` - the node in the provisioning list is included in the network
but is now marked as failing
"""
@type network_status() :: :pending | :passive | :ignored
@typedoc """
Id of the manufacturer for the product id extension
"""
@type manufacturer_id() :: unit_16()
@typedoc """
Id of the product produced by the manufacturer for the product id extension
"""
@type product_id() :: unit_16()
@typedoc """
Type of product produced by the manufacturer for the product id extension
"""
@type product_type() :: unit_16()
@typedoc """
Version of the application in a string format of "Major.Minor"
"""
@type application_version() :: binary()
@type product_id_values() ::
{manufacturer_id(), product_id(), product_type(), application_version()}
@typedoc """
The interval (in seconds) must be in the range of 640..12672 inclusive, and
has to be in steps of 128 seconds.
So after 640 the next valid interval is `640 + 128` which is `768` seconds.
See `SDS13944 Node Provisioning Information Type Registry.pdf` section
`3.1.2.3` for more information.
"""
@type inclusion_interval() :: 640..12672
@typedoc """
The location string cannot contain underscores and cannot end with a dash.
The location string can contain a period (.) but a sublocation cannot end a
dash. For example:
```
123.123-.123
```
The above location invalid. To make it valid remove the `-` before `.`.
A node's location cannot be more than 62 bytes.
"""
@type information_location() :: binary()
@typedoc """
The name string cannot contain underscores and cannot end with a dash.
A node's name cannot be more than 62 bytes.
"""
@type information_name() :: binary()
@typedoc """
Generic Device Class for the product type extension
"""
@type generic_device_class() :: atom()
@typedoc """
Specific Device Class for the product type extension
"""
@type specific_device_class() :: atom()
@typedoc """
Installer icon for the product type extension
"""
@type installer_icon_type() :: IconType.name()
@type product_type_values() ::
{generic_device_class(), specific_device_class(), installer_icon_type()}
@typedoc """
Settings for the smart start inclusion setting exentsion
* `:pending` - the node will be added to the network when it issues SmartStart
inclusion requests.
* `:passive` - this node is unlikely to issues a SmartStart inclusion request
and SmartStart inclusion requests will be ignored from this node by the
Z/IP Gateway. All nodes in the list with this setting must be updated to
`:pending` when Provisioning List Iteration Get command is issued.
* `:ignored` - All SmartStart inclusion request are ignored from this node
until updated via Z/IP Client (Grizzly) or a controlling node.
"""
@type inclusion_setting() :: :pending | :passive | :ignored
@typedoc """
Meta extension for SmartStart devices
* `:advanced_joining` - used to specify which S2 security keys to grant
during S2 inclusion
* `:bootstrapping_mode` - used to specify the bootstrapping mode the including
node must join with
* `:location_information` - used to advertise the location assigned to the node
* `:max_inclusion_request_interval` - used to advertise if a power constrained
smart start node will issue an inclusion request at a higher interval than
the default 512 seconds
* `:name_information` - used to advertise the name of the node
* `:network_status` - used to advertise if the node is in the network and its
node id
* `:product_id` - used to advertise product identifying data
* `:product_type` - used to advertise the product type data
* `:smart_start_inclusion_setting` - used to advertise the smart start
inclusion setting
* `:uuid16` - used to advertise the 16 byte manufacturer-defined information
that is unique to the that device
* `:unknown` - sometimes new extensions are released without first class
support, so this extension is used for those extensions that still need to
be supported in this library
"""
@type extension() ::
{:advanced_joining, [Security.key()]}
| {:bootstrapping_mode, bootstrapping_mode()}
| {:location_information, information_location()}
| {:max_inclusion_request_interval, inclusion_interval()}
| {:name_information, information_name()}
| {:network_status, {ZWave.node_id(), atom()}}
| {:product_id, product_id_values()}
| {:product_type, product_type_values()}
| {:smart_start_inclusion_setting, inclusion_setting()}
| {:uuid16, UUID16.t()}
| {:unknown, binary()}
@doc """
Encode an extension into a binary
"""
@spec encode(extension()) :: binary()
def encode(extension) do
IO.iodata_to_binary(encode_extension(extension))
end
defp encode_extension({:advanced_joining, keys}) do
keys_byte =
Enum.reduce(keys, 0, fn
:s2_unauthenticated, byte -> byte ||| 0x01
:s2_authenticated, byte -> byte ||| 0x02
:s2_access_control, byte -> byte ||| 0x04
:s0, byte -> byte ||| 0x40
_, byte -> byte
end)
[set_circuital_bit(@advanced_joining, 1), 0x01, keys_byte]
end
defp encode_extension({:bootstrapping_mode, mode}) do
mode =
case mode do
:security_2 -> 0x00
:smart_start -> 0x01
:long_range -> 0x02
end
[set_circuital_bit(@bootstrapping_mode, 1), 0x01, mode]
end
defp encode_extension({:location_information, location}) do
location =
location
|> String.codepoints()
|> :erlang.list_to_binary()
[set_circuital_bit(@location_information, 0), byte_size(location), location]
end
defp encode_extension({:max_inclusion_request_interval, interval}) do
interval = Integer.floor_div(interval - 640, 128)
[set_circuital_bit(@max_inclusion_request_interval, 0), 0x01, interval]
end
defp encode_extension({:name_information, name}) do
name =
name
|> String.codepoints()
|> Enum.reduce([], fn
".", nl ->
nl ++ ["\\", "."]
c, nl ->
nl ++ [c]
end)
[set_circuital_bit(@name_information, 0), length(name), name]
end
defp encode_extension({:network_status, {node_id, status}}) do
status =
case status do
:not_in_network -> 0x00
:included -> 0x01
:failing -> 0x02
end
[set_circuital_bit(@network_status, 0), 0x02, node_id, status]
end
defp encode_extension({:product_id, {manu_id, prod_id, prod_type, version}}) do
{:ok, version} = Version.parse(version <> ".0")
[
set_circuital_bit(@product_id, 0),
0x08,
<<manu_id::size(16)>>,
<<prod_id::size(16)>>,
<<prod_type::size(16)>>,
version.major,
version.minor
]
end
defp encode_extension({:product_type, {gen_class, spec_class, icon_name}}) do
gen_byte = DeviceClasses.generic_device_class_to_byte(gen_class)
spec_byte =
DeviceClasses.specific_device_class_to_byte(
gen_class,
spec_class
)
{:ok, icon_integer} = IconType.to_value(icon_name)
[set_circuital_bit(@product_type, 0), 0x04, gen_byte, spec_byte, <<icon_integer::size(16)>>]
end
defp encode_extension({:smart_start_inclusion_setting, setting}) do
setting =
case setting do
:pending -> 0x00
:passive -> 0x02
:ignored -> 0x03
end
[set_circuital_bit(@smart_start_inclusion_setting, 1), 0x01, setting]
end
defp encode_extension({:uuid16, uuid16}) do
[UUID16.encode(uuid16)]
end
defp encode_extension({:unknown, binary}) do
[binary]
end
@doc """
Parse the binary into the list of extensions
"""
@spec parse(binary()) :: [extension()]
def parse(binary) do
do_parse(binary, [])
end
defp do_parse(<<>>, extensions) do
Enum.reverse(extensions)
end
defp do_parse(<<@advanced_joining::size(7), 1::size(1), 0x01, keys, rest::binary>>, extensions) do
ext = {:advanced_joining, unmask_keys(keys)}
do_parse(rest, [ext | extensions])
end
defp do_parse(
<<@bootstrapping_mode::size(7), 1::size(1), 0x01, mode, rest::binary>>,
extensions
) do
mode =
case mode do
0x00 -> :security_2
0x01 -> :smart_start
0x02 -> :long_range
end
ext = {:bootstrapping_mode, mode}
do_parse(rest, [ext | extensions])
end
defp do_parse(
<<@location_information::size(7), 0::size(1), len, location::size(len)-unit(8)-binary,
rest::binary>>,
extensions
) do
ext = {:location_information, to_string(location)}
do_parse(rest, [ext | extensions])
end
defp do_parse(
<<@max_inclusion_request_interval::size(7), 0::size(1), 0x01, interval, rest::binary>>,
extensions
) do
steps = interval - 5
interval = 640 + steps * 128
ext = {:max_inclusion_request_interval, interval}
do_parse(rest, [ext | extensions])
end
defp do_parse(
<<@name_information::size(7), 0::size(1), len, name::size(len)-unit(8)-binary,
rest::binary>>,
extensions
) do
name =
name
|> to_string()
|> String.replace("\\", "")
ext = {:name_information, name}
do_parse(rest, [ext | extensions])
end
defp do_parse(
<<@network_status::size(7), 0::size(1), 0x02, node_id, status, rest::binary>>,
extensions
) do
status =
case status do
0x00 -> :not_in_network
0x01 -> :included
0x02 -> :failing
end
ext = {:network_status, {node_id, status}}
do_parse(rest, [ext | extensions])
end
defp do_parse(
<<@product_id::size(7), 0::size(1), 0x08, manu_id::size(16), prod_id::size(16),
prod_type::size(16), version_major, version_minor, rest::binary>>,
extensions
) do
ext = {:product_id, {manu_id, prod_id, prod_type, "#{version_major}.#{version_minor}"}}
do_parse(rest, [ext | extensions])
end
defp do_parse(
<<@product_type::size(7), 0::size(1), 0x04, gen_class, spec_class, icon::size(16),
rest::binary>>,
extensions
) do
{:ok, icon} = IconType.to_name(icon)
{:ok, gen_class} = DeviceClasses.generic_device_class_from_byte(gen_class)
{:ok, spec_class} = DeviceClasses.specific_device_class_from_byte(gen_class, spec_class)
ext = {:product_type, {gen_class, spec_class, icon}}
do_parse(rest, [ext | extensions])
end
defp do_parse(
<<@smart_start_inclusion_setting::size(7), 1::size(1), 0x01, setting, rest::binary>>,
extensions
) do
setting =
case setting do
0x00 -> :pending
0x02 -> :passive
0x03 -> :ignored
end
ext = {:smart_start_inclusion_setting, setting}
do_parse(rest, [ext | extensions])
end
defp do_parse(
<<@uuid16::size(7), 0::size(1), len, values::size(len)-unit(8)-binary, rest::binary>>,
extensions
) do
{:ok, uuid} = UUID16.parse(<<@uuid16::size(7), 0::size(1), len, values::binary>>)
ext = {:uuid16, uuid}
do_parse(rest, [ext | extensions])
end
defp do_parse(<<type, len, values::size(len)-unit(8)-binary, rest::binary>>, extensions) do
ext = {:unknown, <<type, len, values::binary>>}
do_parse(rest, [ext | extensions])
end
defp unmask_keys(byte) do
Enum.reduce(Security.keys(), [], fn key, keys ->
if byte_has_key?(<<byte>>, key) do
[key | keys]
else
keys
end
end)
end
defp byte_has_key?(<<_::size(7), 1::size(1)>>, :s2_unauthenticated), do: true
defp byte_has_key?(<<_::size(6), 1::size(1), _::size(1)>>, :s2_authenticated), do: true
defp byte_has_key?(<<_::size(5), 1::size(1), _::size(2)>>, :s2_access_control), do: true
defp byte_has_key?(<<_::size(1), 1::size(1), _::size(6)>>, :s0), do: true
defp byte_has_key?(_byte, _key), do: false
defp set_circuital_bit(byte, cbit) do
<<byte::size(7), cbit::size(1)>>
end
end
|
lib/grizzly/zwave/smart_start/meta_extension.ex
| 0.87153
| 0.669826
|
meta_extension.ex
|
starcoder
|
defmodule Ecto.Model.Queryable do
@moduledoc """
Defines a model as queryable.
In order to create queries in Ecto, you need to pass a queryable
data structure as argument. By using `Ecto.Model.Queryable` in
your model, it imports the `queryable/2` macro.
Assuming you have an entity named `Weather.Entity`, you can associate
it with a model via:
defmodule Weather do
use Ecto.Model
queryable "weather", Weather.Entity
end
Since this is a common pattern, Ecto allows developers to define an entity
inlined in a model:
defmodule Weather do
use Ecto.Model
queryable "weather" do
field :city, :string
field :temp_lo, :integer
field :temp_hi, :integer
field :prcp, :float, default: 0.0
end
end
By making it queryable, three functions are added to the model:
* `new/0` - simply delegates to `entity.new/0`
* `new/1` - simply delegates to `entity.new/1`
* `__model__/1` - reflection functions about the model
This module also automatically imports `from/1` and `from/2`
from `Ecto.Query` as a convenience.
"""
@doc false
defmacro __using__(_) do
quote do
use Ecto.Query
import unquote(__MODULE__)
end
end
@doc """
Defines a queryable name and its entity.
The source and entity can be accessed during the model compilation
via `@ecto_source` and `@ecto_entity`.
## Example
defmodule Post do
use Ecto.Model
queryable "posts", Post.Entity
end
"""
defmacro queryable(source, entity)
@doc """
Defines a queryable name and the entity definition inline. `opts` will be
given to the `use Ecto.Entity` call, see `Ecto.Entity`.
## Examples
# The two following Model definitions are equivalent
defmodule Post do
use Ecto.Model
queryable "posts" do
field :text, :string
end
end
defmodule Post do
use Ecto.Model
defmodule Entity do
use Ecto.Entity, model: Post
field :text, :string
end
queryable "posts", Entity
end
"""
defmacro queryable(source, opts // [], do: block)
defmacro queryable(source, opts, [do: block]) do
quote do
opts = unquote(opts)
defmodule Entity do
use Ecto.Entity, Keyword.put(opts, :model, unquote(__CALLER__.module))
unquote(block)
end
queryable(unquote(source), Entity)
end
end
defmacro queryable(source, [], entity) do
quote do
@ecto_source unquote(source)
@ecto_entity unquote(entity)
def new(), do: @ecto_entity.new()
def new(params), do: @ecto_entity.new(params)
def __model__(:source), do: @ecto_source
def __model__(:entity), do: @ecto_entity
end
end
end
|
lib/ecto/model/queryable.ex
| 0.861174
| 0.588357
|
queryable.ex
|
starcoder
|
defmodule Owl.ProgressBar do
@moduledoc ~S"""
A live progress bar widget.
## Single bar
Owl.ProgressBar.start(id: :users, label: "Creating users", total: 1000)
Enum.each(1..100, fn _ ->
Process.sleep(10)
Owl.ProgressBar.inc(id: :users)
end)
Owl.LiveScreen.await_render()
## Multiple bars
1..10
|> Enum.map(fn index ->
Task.async(fn ->
range = 1..Enum.random(100..500)
label = "Demo Progress ##{index}"
Owl.ProgressBar.start(
id: {:demo, index},
label: label,
total: range.last,
timer: true,
bar_width_ratio: 0.3,
filled_symbol: "#",
partial_symbols: []
)
Enum.each(range, fn _ ->
Process.sleep(Enum.random(10..50))
Owl.ProgressBar.inc(id: {:demo, index})
end)
end)
end)
|> Task.await_many(:infinity)
Owl.LiveScreen.await_render()
"""
use GenServer, restart: :transient
@type id :: any()
@type label :: String.t()
@tick_interval_ms 100
@doc false
def start_link(opts) do
id = Keyword.fetch!(opts, :id)
GenServer.start_link(__MODULE__, opts, name: {:via, Registry, {Owl.WidgetsRegistry, id}})
end
# we define child_spec just to disable doc
@doc false
def child_spec(init_arg) do
super(init_arg)
end
@doc """
Starts a progress bar on `Owl.LiveScreen`.
## Options
* `:id` - an id of the progress bar. Required.
* `:label` - a label of the progress bar. Required.
* `:total` - a total value. Required.
* `:current` - a current value. Defaults to `0`.
* `:bar_width_ratio` - a bar width ratio. Defaults to 0.7.
* `:timer` - set to `true` to launch a timer. Defaults to `false`.
* `:start_symbol` - a symbol that is rendered at the beginning of the progress bar. Defaults to `"["`.
* `:end_symbol` - a symbol that rendered at the end of the progress bar. Defaults to `"]"`.
* `:filled_symbol` - a symbol that use used when `current` value is big enough to fill the cell. Defaults to `"≡"`
* `:partial_symbols` - a list of symbols that are used when `current` value is too small to render
`filled_symbol`. Defaults to `["-", "="]`.
* `:empty_symbol` - an empty symbol. Defaults to `" "`.
* `:screen_width` - a width of output data. Defaults to width of the terminal or 80 symbols, if a terminal is not available.
"""
@spec start(
label: String.t(),
id: id(),
total: pos_integer(),
timer: boolean(),
current: non_neg_integer(),
bar_width_ratio: nil | float(),
start_symbol: Owl.Data.t(),
end_symbol: Owl.Data.t(),
filled_symbol: Owl.Data.t(),
partial_symbols: [Owl.Data.t()],
empty_symbol: Owl.Data.t(),
screen_width: pos_integer()
) :: DynamicSupervisor.on_start_child()
def start(opts) do
DynamicSupervisor.start_child(Owl.WidgetsSupervisor, {__MODULE__, opts})
end
@doc """
Increases `current` value by `step`.
When `current` value becomes equal to `total`, then progress bar terminates.
## Options
* `:id` - an required identifier of the progress bar.
* `:step` - a value by which `current` value should be increased. Defaults to 1.
## Examples
Owl.ProgressBar.inc(id: "Creating users")
Owl.ProgressBar.inc(id: "Creating users", step: 10)
"""
@spec inc(id: id(), step: integer()) :: :ok
def inc(opts \\ []) do
step = opts[:step] || 1
id = Keyword.fetch!(opts, :id)
GenServer.cast({:via, Registry, {Owl.WidgetsRegistry, id}}, {:inc, step})
end
@impl true
def init(opts) do
total = Keyword.fetch!(opts, :total)
label = Keyword.fetch!(opts, :label)
timer = Keyword.get(opts, :timer, false)
filled_symbol = opts[:filled_symbol] || "≡"
partial_symbols = opts[:partial_symbols] || ["-", "="]
empty_symbol = opts[:empty_symbol] || " "
start_symbol = opts[:start_symbol] || "["
end_symbol = opts[:end_symbol] || "]"
screen_width = opts[:screen_width]
current = opts[:current] || 0
bar_width_ratio = opts[:bar_width_ratio] || 0.7
live_screen_server = opts[:live_screen_server] || Owl.LiveScreen
start_time =
if timer do
Process.send_after(self(), :tick, @tick_interval_ms)
System.monotonic_time(:millisecond)
end
live_screen_ref = make_ref()
state = %{
live_screen_ref: live_screen_ref,
live_screen_server: live_screen_server,
bar_width_ratio: bar_width_ratio,
total: total,
label: label,
start_time: start_time,
current: current,
screen_width: screen_width,
start_symbol: start_symbol,
end_symbol: end_symbol,
empty_symbol: empty_symbol,
filled_symbol: filled_symbol,
partial_symbols: partial_symbols
}
Owl.LiveScreen.add_block(live_screen_server, live_screen_ref,
state: state,
render: &render/1
)
{:ok, state}
end
@impl true
def handle_cast({:inc, step}, state) do
state = %{state | current: state.current + step}
Owl.LiveScreen.update(state.live_screen_server, state.live_screen_ref, state)
if state.current >= state.total do
{:stop, :normal, state}
else
{:noreply, state}
end
end
@impl true
def handle_info(:tick, state) do
if state.current < state.total do
Process.send_after(self(), :tick, @tick_interval_ms)
Owl.LiveScreen.update(state.live_screen_server, state.live_screen_ref, state)
end
{:noreply, state}
end
defp format_time(milliseconds) do
ss =
(rem(milliseconds, 60_000) / 1000)
|> Float.round(1)
|> to_string()
|> String.pad_leading(4, "0")
mm =
milliseconds
|> div(60_000)
|> to_string()
|> String.pad_leading(2, "0")
"#{mm}:#{ss}"
end
@doc """
Renders a progress bar that can be consumed by `Owl.IO.puts/2`.
Used as a callback for blocks in `Owl.LiveScreen`.
## Examples
iex> Owl.ProgressBar.render(%{
...> label: "Demo",
...> total: 200,
...> current: 60,
...> bar_width_ratio: 0.7,
...> start_symbol: "[",
...> end_symbol: "]",
...> filled_symbol: "#",
...> partial_symbols: [],
...> empty_symbol: ".",
...> screen_width: 40
...> }) |> to_string()
"Demo [########....................] 30%"
iex> Owl.ProgressBar.render(%{
...> label: "Demo",
...> total: 200,
...> current: 8,
...> bar_width_ratio: 0.4,
...> start_symbol: "|",
...> end_symbol: "|",
...> filled_symbol: "█",
...> partial_symbols: ["▏", "▎", "▍", "▌", "▋", "▊", "▉"],
...> empty_symbol: " ",
...> screen_width: 40,
...> start_time: -576460748012758993,
...> current_time: -576460748012729828
...> }) |> to_string()
"Demo 00:29.2 |▋ | 4%"
iex> Owl.ProgressBar.render(%{
...> label: "Demo",
...> total: 200,
...> current: 8,
...> bar_width_ratio: 0.7,
...> start_symbol: "[",
...> end_symbol: "]",
...> filled_symbol: Owl.Data.tag("≡", :cyan),
...> partial_symbols: [Owl.Data.tag("-", :green), Owl.Data.tag("=", :blue)],
...> empty_symbol: " ",
...> screen_width: 40
...> })|> Owl.Data.to_ansidata() |> to_string
"Demo [\e[36m≡\e[39m\e[32m-\e[39m ] 4%\e[0m"
"""
@spec render(%{
optional(:current_time) => nil | integer(),
optional(:start_time) => nil | integer(),
optional(:screen_width) => nil | pos_integer(),
bar_width_ratio: float(),
label: String.t(),
total: pos_integer(),
current: non_neg_integer(),
start_symbol: Owl.Data.t(),
end_symbol: Owl.Data.t(),
filled_symbol: Owl.Data.t(),
partial_symbols: [Owl.Data.t()],
empty_symbol: Owl.Data.t()
}) :: Owl.Data.t()
def render(
%{
label: label,
total: total,
current: current,
bar_width_ratio: bar_width_ratio,
start_symbol: start_symbol,
end_symbol: end_symbol,
filled_symbol: filled_symbol,
partial_symbols: partial_symbols,
empty_symbol: empty_symbol
} = params
) do
screen_width = params[:screen_width] || Owl.IO.columns() || 80
percentage_width = 5
start_end_symbols_width = 2
percentage = String.pad_leading("#{trunc(current / total * 100)}%", percentage_width)
elapsed_time =
case params[:start_time] do
nil ->
nil
start_time ->
current_time = params[:current_time] || System.monotonic_time(:millisecond)
current_time - start_time
end
# format_time width + 1 space = 8
elapsed_time_width = if elapsed_time, do: 8, else: 0
bar_width = trunc(screen_width * bar_width_ratio)
label_width =
screen_width - bar_width - percentage_width - start_end_symbols_width - elapsed_time_width
# Float.ceil(x, 2) is needed to handle numbers like 56.99999999999999
progress = min(Float.ceil(current / (total / bar_width), 2), bar_width * 1.0)
filled_blocks_integer = floor(progress)
next_block =
case partial_symbols do
[] ->
nil
partial_symbols ->
next_block_filling = Float.floor(progress - filled_blocks_integer, 2)
if next_block_filling != 0 do
idx = ceil(next_block_filling * length(partial_symbols)) - 1
Enum.at(partial_symbols, idx)
end
end
[
String.pad_trailing(label, label_width),
case elapsed_time do
nil -> []
elapsed_time -> [format_time(elapsed_time), " "]
end,
start_symbol,
List.duplicate(filled_symbol, filled_blocks_integer),
case next_block do
nil ->
List.duplicate(empty_symbol, bar_width - filled_blocks_integer)
next_block ->
[next_block, List.duplicate(empty_symbol, bar_width - filled_blocks_integer - 1)]
end,
end_symbol,
percentage
]
end
end
|
lib/owl/progress_bar.ex
| 0.825238
| 0.437042
|
progress_bar.ex
|
starcoder
|
defmodule Expression do
@moduledoc """
Documentation for `Expression`, a library to parse and evaluate
[Floip](https://floip.gitbook.io/flow-specification/expressions) compatible expressions
Expression is an expression language which consists of the functions provided
by Excel with a few additions.
Function and variable names are not case-sensitive so UPPER is equivalent to upper:
```
contact.name -> <NAME>
FIRST_WORD(contact.name) -> Marshawn
first_word(CONTACT.NAME) -> Marshawn
```
For templating, RapidPro uses the @ character to denote either a single variable substitution
or the beginning of an Expression block. `@` was chosen as it is known how to type by a broad
number of users regardless of keyboard. It does have the disadvantage of being used in
email addresses and Twitter handles, but these are rarely ambiguous and escaping can be
done easily via doubling of the character (`@@`).
Functions are called by using the block syntax:
```
10 plus 4 is @(SUM(10, 4))
```
Within a block, `@` is not required to refer to variable in the context:
```
Hello @(contact.name)
```
A template can contain more than one substitution or block:
```
Hello @contact.name, you were born in @(YEAR(contact.birthday))
```
"""
alias Expression.{Ast, Eval}
def parse_literal(binary) do
case Ast.literal(binary) do
{:ok, [{:literal, literal}], "", _, _, _} ->
{:literal, literal}
{:ok, _ast, _remainder, _, _, _} ->
{:error, binary}
{:error, _reason, _remainder, _, _, _} ->
{:error, binary}
end
end
def parse_expression(expression) do
case Ast.aexpr(expression) do
{:ok, ast, "", _, _, _} ->
{:ok, ast}
{:ok, _ast, remainder, _, _, _} ->
{:error, "Unable to parse: #{inspect(remainder)}"}
end
end
def evaluate_block(expression, context \\ %{}, mod \\ Expression.Callbacks)
def evaluate_block(expression, context, mod) do
with {:ok, ast} <- parse_expression(expression),
{:ok, result} <- Eval.evaluate([substitution: ast], context, mod) do
{:ok, result}
end
end
def evaluate_block!(expression, context \\ %{}, mod \\ Expression.Callbacks)
def evaluate_block!(expression, context, mod) do
with {:ok, ast} <- parse_expression(expression),
result <- Eval.evaluate!([substitution: ast], context, mod) do
result
else
{:error, ast_error} ->
raise ast_error
end
end
def parse(text) do
case Ast.parse(text) do
{:ok, ast, "", _, _, _} ->
{:ok, ast}
{:ok, _ast, remainder, _, _, _} ->
{:error, "Unable to parse: #{inspect(remainder)}"}
end
end
def evaluate(text, context \\ %{}, mod \\ Expression.Callbacks)
def evaluate(text, context, mod) do
with {:ok, ast} <- parse(text),
{:ok, result} <- Eval.evaluate(ast, context, mod) do
{:ok, result}
end
end
end
|
lib/expression.ex
| 0.903575
| 0.973894
|
expression.ex
|
starcoder
|
defmodule Aoc2019.Day7 do
@behaviour DaySolution
def solve_part1(), do: get_program() |> get_max_signal()
def solve_part2(), do: get_program() |> get_max_signal_loop()
defp get_program(), do: Utils.load_delim_ints("inputs/input_day7", ",")
def get_max_signal(program),
# Number of permutations = 5! = 120
do:
[0, 1, 2, 3, 4]
|> Utils.permutations()
|> Enum.map(fn phase_seq -> program |> compute_amplifiers(phase_seq) end)
|> Enum.max()
defp get_max_signal_loop(program),
do:
[5, 6, 7, 8, 9]
|> Utils.permutations()
|> Enum.map(fn phase_seq ->
program
|> List.duplicate(5)
|> compute_amplifiers_loop(phase_seq)
end)
|> Enum.max()
def compute_amplifiers(program, [phase_A, phase_B, phase_C, phase_D, phase_E]) do
[output_A] = program |> Intcode.eval(%IntcodeParams{inputs: [phase_A, 0]})
[output_B] = program |> Intcode.eval(%IntcodeParams{inputs: [phase_B, output_A]})
[output_C] = program |> Intcode.eval(%IntcodeParams{inputs: [phase_C, output_B]})
[output_D] = program |> Intcode.eval(%IntcodeParams{inputs: [phase_D, output_C]})
[output_E] = program |> Intcode.eval(%IntcodeParams{inputs: [phase_E, output_D]})
output_E
end
def compute_amplifiers_loop(
[program_A, program_B, program_C, program_D, program_E],
[phase_A, phase_B, phase_C, phase_D, phase_E],
[idx_A, idx_B, idx_C, idx_D, idx_E] \\ [0, 0, 0, 0, 0],
[prev_out_A, prev_out_B, prev_out_C, prev_out_D, prev_out_E] \\ [0, 0, 0, 0, 0],
first \\ true
) do
{program_A, output_A, idx_A} =
program_A |> compute_amplifiers_loop_helper(idx_A, first, phase_A, prev_out_E, prev_out_A)
{program_B, output_B, idx_B} =
program_B |> compute_amplifiers_loop_helper(idx_B, first, phase_B, output_A, prev_out_B)
{program_C, output_C, idx_C} =
program_C |> compute_amplifiers_loop_helper(idx_C, first, phase_C, output_B, prev_out_C)
{program_D, output_D, idx_D} =
program_D |> compute_amplifiers_loop_helper(idx_D, first, phase_D, output_C, prev_out_D)
case program_E
|> compute_amplifiers_loop_helper(idx_E, first, phase_E, output_D, prev_out_E) do
{nil, prev_out, nil} ->
prev_out
{program_E, output_E, idx_E} ->
[program_A, program_B, program_C, program_D, program_E]
|> compute_amplifiers_loop(
[phase_A, phase_B, phase_C, phase_D, phase_E],
[idx_A, idx_B, idx_C, idx_D, idx_E],
[output_A, output_B, output_C, output_D, output_E],
false
)
end
end
defp compute_amplifiers_loop_helper(program, idx, first, phase, input, prev_out) do
case program
|> Intcode.eval(%IntcodeParams{
idx: idx,
inputs: if(first, do: [phase, input], else: [input]),
loop_mode: true
}) do
{program, output, idx} -> {program, output, idx}
:end -> {nil, prev_out, nil}
end
end
end
|
lib/aoc2019/day7.ex
| 0.581184
| 0.487673
|
day7.ex
|
starcoder
|
defmodule Alods.Delivered do
@moduledoc """
This module takes care of starting a DETS store which will hold delivered messages.
"""
import Ex2ms
require Logger
use Alods.DETS, "delivered"
@spec init(String.t) :: {:ok, nil}
def init(name) do
{:ok, _} = super(name)
_pid = clean_store()
twenty_four_hours_in_ms = 1000 * 60 * 60 * 24
{:ok, _ref} = :timer.apply_interval(twenty_four_hours_in_ms, __MODULE__, :clean_store, [])
{:ok, nil}
end
@doc """
This function will initiate a DETS table clean, meaning it will remove all entries which are older then the configured store time, which by default is 7 days.
"""
@spec clean_store :: pid() | {pid(), reference()}
def clean_store, do: Process.spawn(fn -> GenServer.cast(__MODULE__, {:clean_store}) end, [])
@doc """
Stores the given record, updating the delivred at field, resetting the reason, and setting the status to delivered.
After successful storing, it will be deleted from the Queue.
"""
@spec success(%Alods.Record{}) :: :ok
def success(%Alods.Record{} = record) do
record
|> Alods.Record.update!(delivered_at: DateTime.utc_now, status: :delivered, reason: nil)
|> insert_and_maybe_run_callback
end
@doc """
Stores the given record and sets the status to permanent failure.
After successful storing, it will be deleted from the Queue.
"""
@spec permanent_failure(%Alods.Record{}, map) :: :ok
def permanent_failure(%Alods.Record{} = record, reason) do
record
|> Alods.Record.update!(delivered_at: nil, status: :permanent_failure, reason: reason)
|> insert_and_maybe_run_callback
end
def handle_cast({:clean_store}, state) do
query = select_processing_longer_than_days(Application.get_env(:alods, :store_delivered_entries_for_days, 7))
__MODULE__
|> :dets.select(query)
|> Enum.each(fn {_id, record} -> delete(record) end)
{:noreply, state}
end
@spec select_all :: list
defp select_all do
fun do{id, record} when id != nil -> record end
end
@spec select_processing_longer_than_days(non_neg_integer) :: list
defp select_processing_longer_than_days(days) do
time = :os.system_time(:seconds) - (days * 86400)
Ex2ms.fun do
{_id, %{timestamp: timestamp, status: status}} = record
when timestamp <= ^time and status == "delivered" -> record
end
end
defp maybe_run_callback(%Alods.Record{callback: callback} = record) when not is_nil(callback) do
{function, _} = Code.eval_string(record.callback)
function.(record)
rescue
error -> Logger.warn("Callback function #{record.callback} failed with #{inspect error}")
end
defp maybe_run_callback(_), do: nil
defp insert_and_maybe_run_callback(record) do
true = :dets.insert_new(__MODULE__, {record.id, record})
:ok = Alods.Queue.delete(record.id)
maybe_run_callback(record)
:ok
end
end
|
lib/alods/delivered.ex
| 0.765374
| 0.402304
|
delivered.ex
|
starcoder
|
defmodule ExToErl do
@moduledoc """
Utilities to convert Elixir expressions into the corresponding Erlang.
This package is meant to be used as a learning tool or as part of development workflow.
It was written to answer questions like: "What does this Elixir expression compile to?".
It's very useful to explore the output of the Elixir compiler in a user-friendly way.
One should be careful when using this in production with user supplied input
because most functions in this module run the Elixir compiler and generate atoms
dynamically at runtime (as the Elixir compiler does).
The code might also be victim of race conditions (I haven't tested running it in parallel, though).
It has no tests yet, but I hope it will have some in the future.
The API will probably change a lot.
I might switch from raising errors to returning `{:ok, value}` and `:error`.
"""
@sandbox_module ExToEarl.Sandboxes.ElixirExpressionCompilerSandbox
@doc """
Extracts the Erlang abstract code from a BEAM module.
The argument to this function can be either:
- The module name (an atom)
- A `{:module, module, binary, _}` tuple, returned by `Module.create/3`
- The `binary` part from the tuple above
## Examples
TODO
"""
def beam_to_erlang_abstract_code(module) do
beam =
case module do
module when is_atom(module) ->
:code.which(module)
{:module, _, binary, _} when is_binary(binary) ->
binary
end
{:ok, {_, [{:abstract_code, {_, abstract_code}}]}} = :beam_lib.chunks(beam, [:abstract_code])
abstract_code
end
@doc """
Extracts the Erlang abstract code from a BEAM module and converts it
into Erlang source code.
The argument to this function can be either:
- The module name (an atom)
- A `{:module, module, binary, _}` tuple, returned by `Module.create/3`
- The `binary` part from the tuple above
## Examples
iex> module = Module.create(MyModule, quote(do: def f(x) do x end), __ENV__)
{:module, MyModule,
<<70, 79, 82, 49, 0, 0, 3, 220, 66, 69, 65, 77, 65, 116, 85, 56, 0, 0, 0, 124,
0, 0, 0, 13, 15, 69, 108, 105, 120, 105, 114, 46, 77, 121, 77, 111, 100, 117,
108, 101, 8, 95, 95, 105, 110, 102, 111, ...>>, {:f, 1}}
iex> ExToErl.beam_to_erlang_abstract_code(module)
[
{:attribute, 6, :file, {'iex', 6}},
{:attribute, 6, :module, MyModule},
{:attribute, 6, :compile, [:no_auto_import]},
{:attribute, 6, :export, [__info__: 1, f: 1]},
{:attribute, 6, :spec,
{{:__info__, 1},
[
{:type, 6, :fun,
[
{:type, 6, :product,
[
{:type, 6, :union,
[
{:atom, 6, :attributes},
{:atom, 6, :compile},
{:atom, 6, :functions},
{:atom, 6, :macros},
{:atom, 6, :md5},
{:atom, 6, :module},
{:atom, 6, :deprecated}
]}
]},
{:type, 6, :any, []}
]}
]}},
{:function, 0, :__info__, 1,
[
{:clause, 0, [{:atom, 0, :module}], [], [{:atom, 0, MyModule}]},
{:clause, 0, [{:atom, 0, :functions}], [],
[{:cons, 0, {:tuple, 0, [{:atom, 0, :f}, {:integer, 0, 1}]}, {nil, 0}}]},
{:clause, 0, [{:atom, 0, :macros}], [], [nil: 0]},
{:clause, 0, [{:match, 0, {:var, 0, :Key}, {:atom, 0, :attributes}}], [],
[
{:call, 0,
{:remote, 0, {:atom, 0, :erlang}, {:atom, 0, :get_module_info}},
[{:atom, 0, MyModule}, {:var, 0, :Key}]}
]},
{:clause, 0, [{:match, 0, {:var, 0, :Key}, {:atom, 0, :compile}}], [],
[
{:call, 0,
{:remote, 0, {:atom, 0, :erlang}, {:atom, 0, :get_module_info}},
[{:atom, 0, MyModule}, {:var, 0, :Key}]}
]},
{:clause, 0, [{:match, 0, {:var, 0, :Key}, {:atom, 0, :md5}}], [],
[
{:call, 0,
{:remote, 0, {:atom, 0, :erlang}, {:atom, 0, :get_module_info}},
[{:atom, 0, MyModule}, {:var, 0, :Key}]}
]},
{:clause, 0, [{:atom, 0, :deprecated}], [], [nil: 0]}
]},
{:function, 6, :f, 1,
[{:clause, 6, [{:var, 6, :__@1}], [], [{:var, 6, :__@1}]}]}
]
"""
def beam_to_erlang_source(module) do
abstract_code = beam_to_erlang_abstract_code(module)
erlang_abstract_code_to_string(:erl_syntax.form_list(abstract_code))
end
@doc """
Extracts the Erlang abstract code from a BEAM module, converts it
into Erlang source code and writes it into a file.
The first argument to this function can be either:
- The module name (an atom)
- A `{:module, module, binary, _}` tuple, returned by `Module.create/3`
- The `binary` part from the tuple above
## Examples
iex> module = Module.create(MyModule, quote(do: def f(x) do x end), __ENV__)
{:module, MyModule,
<<70, 79, 82, 49, 0, 0, 3, 220, 66, 69, 65, 77, 65, 116, 85, 56, 0, 0, 0, 124,
0, 0, 0, 13, 15, 69, 108, 105, 120, 105, 114, 46, 77, 121, 77, 111, 100, 117,
108, 101, 8, 95, 95, 105, 110, 102, 111, ...>>, {:f, 1}}
iex> ExToErl.beam_to_erlang_source(module) |> IO.puts()
-file("iex", 3).
-module('Elixir.MyModule').
-compile([no_auto_import]).
-export(['__info__'/1, f/1]).
-spec '__info__'(attributes | compile | functions |
macros | md5 | module | deprecated) -> any().
'__info__'(module) -> 'Elixir.MyModule';
'__info__'(functions) -> [{f, 1}];
'__info__'(macros) -> [];
'__info__'(Key = attributes) ->
erlang:get_module_info('Elixir.MyModule', Key);
'__info__'(Key = compile) ->
erlang:get_module_info('Elixir.MyModule', Key);
'__info__'(Key = md5) ->
erlang:get_module_info('Elixir.MyModule', Key);
'__info__'(deprecated) -> [].
f(__@1) -> __@1.
:ok
"""
def beam_to_erlang_source(module, filename) do
contents = beam_to_erlang_source(module)
File.write(filename, contents)
end
@doc """
Converts a string containing Elixir code into an Erlang expression.
This function expects an Elixir expression.
If you supply a block (which is a valid Elixir expression), only the last one
will be converted into an Erlang expression.
This limitation is a result of the fact that in Erlang a sequence of instructions
if not a an Erlang expression (on the other hand, a sequence of Elixir
expressions is an Elixir expression).
Don't use this function to convert entire Elixir modules to Erlang.
Use `ExToErl.beam_to_erlang_source/1` instead.
The function raises if the string is not valid Elixir.
As with most functions in this module, this function *creates atoms at runtime*
because valid Erlang AST contains atoms.
## Examples
Single expressions:
iex> ExToErl.elixir_source_to_erlang_abstract_code("a + b")
{:op, 1, :+, {:var, 1, :_a@1}, {:var, 1, :_b@1}}
iex> ExToErl.elixir_source_to_erlang_abstract_code("a <= b")
{:op, 1, :"=<", {:var, 1, :_a@1}, {:var, 1, :_b@1}}
Elixir blocks (only the last expression is returned):
iex> ExToErl.elixir_source_to_erlang_abstract_code("_ = a + b; c + d")
{:op, 1, :+, {:var, 1, :_c@1}, {:var, 1, :_d@1}}
You can import functions and macros inside your Elixir expression:
iex> ExToErl.elixir_source_to_erlang_abstract_code("import Bitwise; a >>> b")
{:op, 1, :bsr, {:var, 1, :_a@1}, {:var, 1, :_b@1}}
iex> ExToErl.elixir_source_to_erlang_abstract_code("import Bitwise; a &&& b")
{:op, 1, :band, {:var, 1, :_a@1}, {:var, 1, :_b@1}}
Some expressions may raise warnings, although they should be the same wanings
as if the Elixir expression were to be compiled inside a normal Elixir module:
iex> ExToErl.elixir_source_to_erlang_abstract_code("a = b")
warning: variable "a" is unused
warning: variable "a" is unused
{:match, 1, {:var, 1, :_a@2}, {:var, 1, :_b@1}}
Some Elixir operators are actually macros or special forms which can be expanded
into quite complex Erlang code:
iex> ExToErl.elixir_source_to_erlang_abstract_code("a or b")
{:case, 1, {:var, 1, :_a@1},
[
{:clause, [generated: true, location: 1], [{:atom, 0, false}], [],
[{:var, 1, :_b@1}]},
{:clause, [generated: true, location: 1], [{:atom, 0, true}], [],
[{:atom, 0, true}]},
{:clause, [generated: true, location: 1], [{:var, 1, :__@1}], [],
[
{:call, 1, {:remote, 1, {:atom, 0, :erlang}, {:atom, 1, :error}},
[{:tuple, 1, [{:atom, 0, :badbool}, {:atom, 0, :or}, {:var, 1, :__@1}]}]}
]}
]}
"""
def elixir_source_to_erlang_abstract_code(elixir) do
ast = Code.string_to_quoted!(elixir)
elixir_ast_to_erlang_abstract_code(ast)
end
@doc ~S"""
Converts a string containing Elixir code into Erlang source code.
This function expects an Elixir expression.
If you supply a block (which is a valid Elixir expression), only the last one
will be converted into an Erlang expression.
This limitation is a result of the fact that in Erlang a sequence of instructions
if not a an Erlang expression (on the other hand, a sequence of Elixir expressions
is an Elixir expression).
Don't use this function to convert entire Elixir modules to Erlang source code.
Use `ExToErl.beam_to_erlang_source/1` instead.
The function raises if the string is not valid Elixir.
As with most functions in this module, this function *creates atoms at runtime*
because valid Erlang AST contains atoms.
## Examples
iex> ExToErl.elixir_source_to_erlang_source("a")
"_a@1\n"
iex> ExToErl.elixir_source_to_erlang_source("a + b")
"_a@1 + _b@1\n"
iex> ExToErl.elixir_source_to_erlang_source("a + b < f.(x)")
"_a@1 + _b@1 < _f@1(_x@1)\n"
iex> ExToErl.elixir_source_to_erlang_source("a or b") |> IO.puts()
case _a@1 of
false -> _b@1;
true -> true;
__@1 -> erlang:error({badbool, 'or', __@1})
end
:ok
iex(3)> ExToErl.elixir_source_to_erlang_source("a.b") |> IO.puts()
case _a@1 of
#{b := __@1} -> __@1;
__@1 when erlang:is_map(__@1) ->
erlang:error({badkey, b, __@1});
__@1 -> __@1:b()
end
:ok
"""
def elixir_source_to_erlang_source(elixir) do
abstract_code = elixir_source_to_erlang_abstract_code(elixir)
erlang_abstract_code_to_string(abstract_code)
end
@doc """
Converts Elixir AST into Erlang abstract code.
This function expects an Elixir expression.
If you supply a block (which is a valid Elixir expression), only the last one
will be converted into an Erlang expression.
This limitation is a result of the fact that in Erlang a sequence of instructions
if not a an Erlang expression (on the other hand, a sequence of Elixir expressions
is an Elixir expression).
As with most functions in this module, this function *creates atoms at runtime*
because valid Erlang AST contains atoms.
## Examples
iex> ExToErl.elixir_ast_to_erlang_abstract_code({:+, [line: 1], [{:a, [line: 1], nil}, {:b, [line: 1], nil}]})
{:op, 1, :+, {:var, 1, :_a@1}, {:var, 1, :_b@1}}
iex> Code.string_to_quoted!("a - b") |> ExToErl.elixir_ast_to_erlang_abstract_code()
{:op, 1, :-, {:var, 1, :_a@1}, {:var, 1, :_b@1}}
"""
def elixir_ast_to_erlang_abstract_code(ast) do
variables = extract_variables_from_elixir_ast(ast)
module_body =
quote do
@moduledoc "Just a temporary place to store some Erlang abstract code"
def main(unquote_splicing(variables)) do
unquote(ast)
end
end
{:module, module_name, _, _} = module = Module.create(@sandbox_module, module_body, __ENV__)
full_module_abstract_code = beam_to_erlang_abstract_code(module)
function = find_function_by_name(full_module_abstract_code, :main)
body = extract_body_from_function_clause(function)
# Delete the module to avoid the annoying warning about redefining modules.
# Because functions in this module will never be called, there's no need to purge the module.
:code.purge(module_name)
true = :code.delete(module_name)
body
end
@doc """
Parses an Erlang expression into erlang abstract code.
## Examples
iex> ExToErl.erlang_source_to_abstract_code("A + B.")
{:op, 1, :+, {:var, 1, :A}, {:var, 1, :B}}
iex> ExToErl.erlang_source_to_abstract_code("A < B.")
{:op, 1, :<, {:var, 1, :A}, {:var, 1, :B}}
iex> ExToErl.erlang_source_to_abstract_code("A + B * C < F + G.")
{:op, 1, :<,
{:op, 1, :+, {:var, 1, :A}, {:op, 1, :*, {:var, 1, :B}, {:var, 1, :C}}},
{:op, 1, :+, {:var, 1, :F}, {:var, 1, :G}}}
iex> ExToErl.erlang_source_to_abstract_code("A + B * C < f(x) + g(y).")
{:op, 1, :<,
{:op, 1, :+, {:var, 1, :A}, {:op, 1, :*, {:var, 1, :B}, {:var, 1, :C}}},
{:op, 1, :+, {:call, 1, {:atom, 1, :f}, [{:atom, 1, :x}]},
{:call, 1, {:atom, 1, :g}, [{:atom, 1, :y}]}}}
iex(9)> ExToErl.erlang_source_to_abstract_code("A + B * C < f(X) + g(Y).")
{:op, 1, :<,
{:op, 1, :+, {:var, 1, :A}, {:op, 1, :*, {:var, 1, :B}, {:var, 1, :C}}},
{:op, 1, :+, {:call, 1, {:atom, 1, :f}, [{:var, 1, :X}]},
{:call, 1, {:atom, 1, :g}, [{:var, 1, :Y}]}}}
"""
def erlang_source_to_abstract_code(bin) do
charlist = String.to_charlist(bin)
{:ok, tokens, _} = :erl_scan.string(charlist)
{:ok, [expression]} = :erl_parse.parse_exprs(tokens)
expression
end
@doc """
Pretty prints Erlang abstract code as Erlang source code.
## Examples
TODO
"""
def erlang_abstract_code_to_string(abstract_code, opts \\ []) do
indent = Keyword.get(opts, :indent, 8)
[:erl_prettypr.format(abstract_code), "\n"]
|> to_string()
|> String.replace("\t", String.duplicate(" ", indent))
end
# ----------------------------------
# Private functions
# ----------------------------------
defp find_function_by_name(forms, name) do
Enum.find(forms, fn form ->
case form do
{:function, _line, ^name, _arity, clauses} when is_list(clauses) ->
true
_ ->
false
end
end)
end
# Extracts the list of variables form an Elixir AST fragment
defp extract_variables_from_elixir_ast(ast) do
{_ast, variables} =
Macro.postwalk(ast, [], fn ast_node, variables ->
case ast_node do
{name, _meta, module} = variable when is_atom(name) and is_atom(module) ->
{ast_node, [variable | variables]}
_other ->
{ast_node, variables}
end
end)
variables
end
defp extract_body_from_function_clause({:function, _line, _name, _arity, [clause]}) do
{:clause, _line, _args, _guards, body} = clause
List.last(body)
end
end
|
lib/ex_to_erl.ex
| 0.741112
| 0.563678
|
ex_to_erl.ex
|
starcoder
|
defmodule OkThen.Result.Private do
@moduledoc """
These functions are not part of the public API, and may change without notice.
"""
alias OkThen.Result
@type func_or_value(out) :: (any() -> out) | (() -> out) | out
@type func_or_value(tag, out) :: (tag, any() -> out) | func_or_value(out)
defguard is_tag(value) when is_atom(value) and not is_nil(value)
defguard is_tagged_with_atom(value, tag)
when value == tag or (is_tuple(value) and elem(value, 0) == tag)
@spec normalize_result_input(any(), atom()) :: Result.tagged()
def normalize_result_input(tag, default_tag \\ :untagged)
def normalize_result_input(tag, _default_tag) when is_tag(tag), do: {tag, {}}
def normalize_result_input({tag, _} = term, _default_tag) when is_tag(tag), do: term
def normalize_result_input(value, _default_tag)
when is_tuple(value) and is_tag(elem(value, 0)) do
{elem(value, 0), Tuple.delete_at(value, 0)}
end
def normalize_result_input(value, default_tag) when is_tag(default_tag) do
Result.from_as(value, default_tag)
|> normalize_result_input()
end
@spec normalize_result_output(t) :: t when t: Result.tagged()
def normalize_result_output({tag, {}}) when is_tag(tag), do: tag
def normalize_result_output({tag, _} = result) when is_tag(tag), do: result
@spec normalize_value(any()) :: any()
def normalize_value({value}), do: value
def normalize_value(value), do: value
@spec map_normalized_result(Result.tagged(), func_or_value(any(), any())) ::
Result.result_input()
def map_normalized_result({tag, value}, func_or_value) when is_function(func_or_value) do
Function.info(func_or_value, :arity)
|> case do
{:arity, 0} -> func_or_value.()
{:arity, 1} -> func_or_value.(value)
{:arity, 2} -> func_or_value.(tag, value)
_ -> raise(ArgumentError, "Value-mapping function must have arity between 0 and 2.")
end
end
def map_normalized_result(_normalized_result, func_or_value), do: func_or_value
@spec map_value(any(), func_or_value(any())) :: any()
def map_value(value, func_or_value) when is_function(func_or_value) do
Function.info(func_or_value, :arity)
|> case do
{:arity, 0} -> func_or_value.()
{:arity, 1} -> func_or_value.(value)
_ -> raise(ArgumentError, "Value-mapping function must have arity between 0 and 1.")
end
end
def map_value(_value, func_or_value), do: func_or_value
end
|
lib/ok_then/result/private.ex
| 0.7641
| 0.452294
|
private.ex
|
starcoder
|
defmodule ExAlgebra.Vector3 do
alias ExAlgebra.Vector, as: Vector
alias ExAlgebra.Matrix, as: Matrix
@moduledoc """
The ExAlgebra Vector3 module is a collection of functions that perform
computations on 3-vectors. 3-vectors are represented by lists with exactly
three elements.
"""
@doc """
Computes the cross product.
##### Examples
iex> ExAlgebra.Vector3.cross_product([2, 1, -1], [-3, 4, 1])
[5, 1, 11]
"""
@spec cross_product([number], [number]) :: [number]
def cross_product([x, y, z], [u, v, w]), do: [y * w - z * v, z * u - x * w, x * v - y * u]
@doc """
Returns true if two vectors are parallel and false otherwise.
##### Examples
iex> ExAlgebra.Vector3.is_parallel?([2, -4, 1], [-6, 12, -3])
true
"""
@spec is_parallel?([number], [number]) :: boolean
def is_parallel?(u, v), do: cross_product(u, v) == [0, 0, 0]
@doc """
Computes the equation of the plain. This outputs a 4-vector with its 4<sup>th</sup> element
containing the scalar part. For example, [11, -10, 4, -19] should be interpreted as 11x - 10y + 4z = -19.
##### Examples
iex> ExAlgebra.Vector3.equation_of_plain([1, 3, 0], [3, 4, -3], [3, 6, 2])
[11, -10, 4, -19]
"""
@spec equation_of_plain([number], [number], [number]) :: [number]
def equation_of_plain([x, y, z] = u, v, w) do
[a, b, c] = (v |> Vector.subtract(u)) |> cross_product(w |> Vector.subtract(u))
[a, b, c, (x * a + b * y + c * z)]
end
@doc """
Computes the area of a parallelogram.
##### Examples
iex> ExAlgebra.Vector3.area_of_parallelogram([2, 1, -3], [1, 3, 2])
:math.sqrt(195)
"""
@spec area_of_parallelogram([number], [number]) :: number
def area_of_parallelogram(u, v) do
Vector.magnitude(u |> cross_product(v))
end
@doc """
Computes the scalar triple product.
##### Examples
iex> ExAlgebra.Vector3.scalar_triple_product([3, 2, 1], [-1, 3, 0], [2, 2, 5])
47.0
"""
@spec scalar_triple_product([number], [number], [number]) :: number
def scalar_triple_product(u, v, w) do
Matrix.det([u, v, w])
end
@doc """
Computes the volume of a parallelepiped.
##### Examples
iex> ExAlgebra.Vector3.volume_of_parallelepiped([-3, 2, 1], [-1, -3, 0], [2, 2, -5])
51.0
"""
@spec volume_of_parallelepiped([number], [number], [number]) :: number
def volume_of_parallelepiped(u, v, w), do: u |> scalar_triple_product(v, w) |> abs
end
|
lib/Vector/vector3.ex
| 0.917654
| 0.870487
|
vector3.ex
|
starcoder
|
defmodule Excelion do
@moduledoc """
Excel (xlsx) file reader for Elixir.
The library is [excellent](https://hex.pm/packages/excellent) like interface wrapper of [xlsx\_parser](https://hex.pm/packages/xlsx_parser).
"""
@doc """
The function returns worksheet name list of specifiled .xlsx file. If fail get content, return `:error`.
## Parameters
- `path` : .xlsx file path
- `zip` : An optional parameter of the zip processing module is allowed (for testing purposes).
## Example
```elixir
> Excelion.get_worksheet_names("test.xlsx") |> elem(1)
> ["sheet1", "sheet2", "sheet3"]
```
"""
@spec get_worksheet_names(String.t, module) :: {:ok, [String.t]} | {:error, String.t}
def get_worksheet_names(path, zip \\ :zip) do
XlsxParser.get_worksheet_names(path, zip)
end
@doc """
The function parse .xlsx file. If fail get content, return `:error.`
## Parameters
- `path` : .xlsx file path
- `sheet_number` : 0-based sheet number
- `start_row` : first use row number (1-based)
- `zip` : An optional parameter of the zip processing module is allowed (for testing purposes).
## Example
```elixir
> sheet_number = 0
> start_row = 5
> Excelion.parse("test.xlsx", sheet_number, start_row) |> elem(1)
> [
> ["ID", "name", "description", "value"],
> ["1", "aaa", "bbb", "4"],
> ["2", "ccc", "", "5"], # empty cell to be empty string
> ["3", "eee", "fff", "6"]
> ]
```
"""
@spec parse(String.t, integer, integer, module) :: {:ok, [[String.t]]} | {:error, String.t}
def parse(path, sheet_number, start_row, zip \\ :zip) do
case XlsxParser.get_sheet_content(path, sheet_number + 1, zip) do
{:error, reason} -> {:error, reason}
{:ok, ret} ->
filtered = Enum.filter(ret, fn {_colname, row, _value} -> row >= start_row end)
max_col = Enum.max_by(filtered, fn {colname, _row, _value} -> colname end) |> elem(0)
max_row = Enum.max_by(filtered, fn {_colname, row, _value} -> row end) |> elem(1)
map = Enum.into(filtered, %{}, fn x -> {{elem(x, 0), elem(x, 1)}, elem(x, 2)} end)
index_list = for row <- start_row..max_row, col <- 0..alpha_to_index(max_col), do: {col, row}
padded_list = Enum.map(index_list, fn {col_index, row} ->
x = map[{index_to_alpha(col_index), row}]
if x == nil do
""
else
x
end
end)
{:ok, Enum.chunk(padded_list, alpha_to_index(max_col) + 1)}
end
end
@doc """
The function parse .xlsx file. If fail get content, raise `File.Error` exception.
## Parameters
- `path` : .xlsx file path
- `sheet_number` : 0-based sheet number
- `start_row` : first use row number (1-based)
- `zip` : An optional parameter of the zip processing module is allowed (for testing purposes).
## Example
```elixir
> sheet_number = 0
> start_row = 5
> Excelion.parse!("test.xlsx", sheet_number, start_row)
> [
> ["ID", "name", "description", "value"],
> ["1", "aaa", "bbb", "4"],
> ["2", "ccc", "", "5"], # empty cell to be empty string
> ["3", "eee", "fff", "6"]
> ]
```
"""
@spec parse!(String.t, integer, integer, module) :: [[String.t]] | no_return
def parse!(path, sheet_number, start_row, zip \\ :zip) do
case parse(path, sheet_number, start_row, zip) do
{:ok, ret} -> ret
{:error, reason} -> raise File.Error, reason: reason, action: "open", path: path
end
end
@spec alphas() :: [String.t]
defp alphas do
["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z",
"AA","AB","AC","AD","AE","AF","AG","AH","AI","AJ","AK","AL","AM","AN","AO","AP","AQ","AR","AS",
"AT","AU","AV","AW","AX","AY","AZ"]
end
@spec alpha_to_index(String.t) :: integer
defp alpha_to_index(alpha) do
Enum.find_index(alphas, fn x -> x == alpha end)
end
@spec index_to_alpha(integer) :: String.t
defp index_to_alpha(index) do
Enum.at(alphas, index)
end
end
|
lib/excelion.ex
| 0.889807
| 0.876845
|
excelion.ex
|
starcoder
|
defmodule Surgex.Parser do
@moduledoc """
Parses, casts and catches errors in the web request input, such as params or JSON API body.
## Usage
In order to use it, you should import the `Surgex.Parser` module, possibly in the `controller`
macro in the `web.ex` file belonging to your Phoenix project, which will make functions like
`parse` available in all controllers.
Then, you should start implementing functions for parsing params or documents for specific
controller actions. Those functions will serve as documentation crucial for understanding specific
action's input, so it's best to keep them close to the relevant action. For example:
def index(conn, params) do
with {:ok, opts} <- parse_index_params(params) do
render(conn, locations: Marketplace.search_locations(opts))
else
{:error, :invalid_parameters, params} -> {:error, :invalid_parameters, params}
end
end
defp parse_index_params(params) do
parse params,
query: [:string, :required],
center: :geolocation,
box: :box,
category_id: :id,
subcategory_ids: :id_list,
sort: {:sort, ~w{price_min published_at distance}a},
page: :page
end
The second argument to `parse/2` and `flat_parse/2` is a param spec in which keys are resulting
option names and values are parser functions, atoms, tuples or lists used to process specific
parameter. Here's how each work:
- **parser functions** are functions that take the input value as first argument and can take
arbitrary amount of additional arguments as parser options; in order to pass such parser it's
best to use the `&` operator in format `&parser/1` or in case of parser options
`&parser(&1, opts...)`
- **parser atoms** point to built-in parsers by looking up a
`Surgex.Parser.<camelized-name>Parser` module and invoking the `call` function within it,
where the `call` function is just a parser function described above; for example `:integer` is
an equivalent to `&Surgex.Parser.IntegerParser.call/1`
- **parser tuples** allow to pass additional options to built-in parsers; the tuple starts with
the parser atom described above, followed by parser arguments matching the number of additional
arguments consumed by the parser; for example `{:sort, ~w{price_min published_at}a}`
- **parser lists** allow to pass a list of parser functions, atoms or tuples, all of which will be
parsed in a sequence in which the output from previous parser is piped to the next one and in
which the first failure stops the whole pipe; for example `[:integer, :required]`
"""
@doc """
Parses controller action input (parameters, documents) with a given set of parsers.
Returns a keyword list with parsed options.
"""
@spec parse(nil, any) :: {:error, :empty_input}
@spec parse(map, list) ::
{:ok, any} | {:error, :invalid_parameters, list} | {:error, :invalid_pointers, list}
def parse(input, parsers)
def parse(resource = %{__struct__: Jabbax.Document.Resource}, parsers) do
parse_resource(resource, parsers)
end
def parse(doc = %{__struct__: Jabbax.Document}, parsers) do
parse_doc(doc, parsers)
end
def parse(params = %{}, parsers) do
parse_params(params, parsers)
end
def parse(nil, _parsers), do: {:error, :empty_input}
@doc """
Parses controller action input into a flat structure.
This function takes the same input as `parse/2` but it returns a `{:ok, value1, value2, ...}`
tuple instead of a `[key1: value1, key2: value2, ...]` keyword list.
"""
@spec flat_parse(nil, any) :: {:error, :empty_input}
# any number of params could be parsed, so the best spec for {:ok, v1, v2, vn} is tuple()
@spec flat_parse(map, list) ::
tuple() | {:error, :invalid_parameters, list} | {:error, :invalid_pointers, list}
def flat_parse(input, parsers)
def flat_parse(doc = %{__struct__: Jabbax.Document}, parsers) do
with {:ok, list} <- parse_doc(doc, parsers, include_missing: true) do
output =
list
|> Keyword.values()
|> Enum.reverse()
List.to_tuple([:ok | output])
end
end
def flat_parse(params = %{}, parsers) do
with {:ok, list} <- parse_params(params, parsers, include_missing: true) do
output =
list
|> Keyword.values()
|> Enum.reverse()
List.to_tuple([:ok | output])
end
end
def flat_parse(nil, _parsers), do: {:error, :empty_input}
@doc """
Makes sure there are no unknown params passed to controller action.
"""
@spec assert_blank_params(map) ::
:ok | {:error, :invalid_parameters, list} | {:error, :invalid_pointers, list}
def assert_blank_params(params) do
with {:ok, []} <- parse(params, []) do
:ok
end
end
@doc """
Renames keys in the parser output.
"""
@spec map_parsed_options({:error, any}, any) :: {:error, any}
@spec map_parsed_options({:ok, any}, any) :: {:ok, any}
def map_parsed_options(parser_result, mapping) do
with {:ok, opts} <- parser_result do
updated_opts =
Enum.reduce(mapping, opts, fn {source, target}, current_opts ->
case Keyword.fetch(current_opts, source) do
{:ok, value} ->
current_opts
|> Keyword.delete(source)
|> Keyword.put(target, value)
:error ->
current_opts
end
end)
{:ok, updated_opts}
end
end
defp parse_params(params, parsers, opts \\ []) do
{params, [], []}
|> pop_and_parse_keys(parsers, opts)
|> pop_unknown()
|> close_params()
end
defp parse_doc(doc, parsers, opts \\ [])
defp parse_doc(%{data: resource = %{}}, parsers, opts) do
resource
|> parse_resource(parsers, opts)
|> prefix_error_pointers("/data/")
end
defp parse_doc(_doc, _parsers, _opts) do
{:error, :invalid_pointers, [required: "/data"]}
end
defp parse_resource(resource, parsers, opts \\ []) do
{root_output, root_errors} = parse_resource_root(resource, parsers, opts)
{attribute_output, attribute_errors} =
parse_resource_nested(resource, parsers, :attributes, opts)
{relationship_output, relationship_errors} =
parse_resource_nested(resource, parsers, :relationships, opts)
output = relationship_output ++ attribute_output ++ root_output
errors = root_errors ++ attribute_errors ++ relationship_errors
close_resource({output, errors})
end
defp parse_resource_root(resource, all_parsers, opts) do
parsers = Keyword.drop(all_parsers, [:attributes, :relationships])
input = Map.from_struct(resource)
{_, output, errors} = pop_and_parse_keys({input, [], []}, parsers, [stringify: false] ++ opts)
{output, errors}
end
defp parse_resource_nested(resource, all_parsers, key, opts) do
parsers = Keyword.get(all_parsers, key, [])
attributes = Map.get(resource, key, %{})
{output, errors} =
{attributes, [], []}
|> pop_and_parse_keys(parsers, opts)
|> pop_unknown()
prefixed_errors = prefix_error_pointers(errors, "#{key}/")
{output, prefixed_errors}
end
defp prefix_error_pointers(payload, prefix) when is_tuple(payload) do
with {:error, reason, pointers} when is_list(pointers) <- payload do
{:error, reason, prefix_error_pointers(pointers, prefix)}
end
end
defp prefix_error_pointers(errors, prefix) when is_list(errors) do
Enum.map(errors, &prefix_error_pointer(&1, prefix))
end
defp prefix_error_pointer({reason, key}, prefix), do: {reason, "#{prefix}#{key}"}
defp pop_and_parse_keys(payload, key_parsers, opts) do
Enum.reduce(key_parsers, payload, &pop_and_parse_keys_each(&1, &2, opts))
end
defp pop_and_parse_keys_each({key, parser}, current_payload, opts) do
pop_and_parse_key(current_payload, {key, opts}, parser, key)
end
# credo:disable-for-next-line Credo.Check.Refactor.ABCSize
defp pop_and_parse_key({map, output, errors}, {input_key, opts}, parser, output_key) do
stringify = Keyword.get(opts, :stringify, true)
include_missing = Keyword.get(opts, :include_missing, false)
{{input_value, remaining_map}, used_key} = pop(map, input_key, stringify)
had_key = Map.has_key?(map, used_key)
drop_nil = not include_missing and not had_key
case call_parser(parser, input_value) do
{:ok, nil} ->
if drop_nil do
{remaining_map, output, errors}
else
final_output = Keyword.put_new(output, output_key, nil)
{remaining_map, final_output, errors}
end
{:ok, parser_output} ->
final_output = Keyword.put_new(output, output_key, parser_output)
{remaining_map, final_output, errors}
{:error, new_errors} when is_list(new_errors) ->
prefixed_new_errors =
Enum.map(new_errors, fn {reason, pointer} ->
{reason, "#{used_key}/#{pointer}"}
end)
final_errors = prefixed_new_errors ++ errors
{remaining_map, output, final_errors}
{:error, reason} ->
final_errors = [{reason, used_key} | errors]
{remaining_map, output, final_errors}
end
end
defp parse_in_sequence(input, [first_parser | other_parsers]) do
Enum.reduce(other_parsers, call_parser(first_parser, input), &parse_in_sequence_each/2)
end
defp parse_in_sequence_each(_next_parser, {:error, reason}), do: {:error, reason}
defp parse_in_sequence_each(next_parser, {:ok, prev_output}) do
call_parser(next_parser, prev_output)
end
defp call_parser(parsers, input) when is_list(parsers), do: parse_in_sequence(input, parsers)
defp call_parser(parser, input) when is_function(parser), do: parser.(input)
defp call_parser(parser, input) when is_atom(parser), do: call_parser({parser}, input)
defp call_parser(parser_tuple, input) when is_tuple(parser_tuple) do
[parser_name | parser_args] = Tuple.to_list(parser_tuple)
parser_camelized =
parser_name
|> Atom.to_string()
|> Macro.camelize()
parser_module = String.to_existing_atom("Elixir.Surgex.Parser.#{parser_camelized}Parser")
apply(parser_module, :call, [input | parser_args])
end
defp pop(map, key, stringify)
defp pop(map, key, false) do
{Map.pop(map, key), key}
end
defp pop(map, key, true) do
key_string = Atom.to_string(key)
if Map.has_key?(map, key_string) do
{Map.pop(map, key_string), key_string}
else
dasherized_key = String.replace(key_string, "_", "-")
{Map.pop(map, dasherized_key), dasherized_key}
end
end
defp pop_unknown({map, output, errors}) do
new_errors =
map
|> Enum.filter(fn {key, _value} -> key != "data" end)
|> Enum.map(fn {key, _value} -> {:unknown, key} end)
{output, errors ++ new_errors}
end
defp close_params({output, []}), do: {:ok, output}
defp close_params({_output, errors}), do: {:error, :invalid_parameters, errors}
defp close_resource({output, []}), do: {:ok, output}
defp close_resource({_output, errors}), do: {:error, :invalid_pointers, errors}
end
|
lib/surgex/parser/parser.ex
| 0.850841
| 0.72645
|
parser.ex
|
starcoder
|
defmodule Vapor.Provider.Map do
@moduledoc """
The Map config module provides support for inputting configuration values
from a map struct. This can be useful when integrating with other external
secret stores which provide secret payload as JSON.
Bindings must be specified at a keyword list.
## Example
%Map{bindings: [foo: "FOO", bar: "VAR_BAR"]}
"""
defstruct map: %{}, bindings: [], required: true
defimpl Vapor.Provider do
def load(%{map: map, bindings: bindings, required: required}) do
bound_keys =
bindings
|> Enum.map(&normalize_binding/1)
|> Enum.map(&create_binding(&1, map))
|> Enum.into(%{})
missing =
bound_keys
|> Enum.filter(fn {_, data} -> data.val == :missing end)
|> Enum.map(fn {_k, data} -> data.key end)
if required && Enum.any?(missing) do
{:error, "Vars not set in map: #{Enum.join(missing, ", ")}"}
else
keys =
bound_keys
|> Enum.reject(fn {_, data} -> data.val == :missing end)
|> Enum.map(fn {name, data} -> {name, data.val} end)
|> Enum.into(%{})
{:ok, keys}
end
end
defp normalize_binding({name, variable}) do
{name, %{val: nil, key: variable, opts: default_opts()}}
end
defp normalize_binding({name, variable, opts}) do
{name, %{val: nil, key: variable, opts: Keyword.merge(default_opts(), opts)}}
end
defp create_binding({name, data}, keys) do
case keys[data.key] do
nil ->
val =
if data.opts[:default] != nil do
data.opts[:default]
else
if data.opts[:required], do: :missing, else: nil
end
{name, %{data | val: val}}
val ->
# Call the map function which defaults to identity
{name, %{data | val: data.opts[:map].(val)}}
end
end
defp default_opts do
[
map: fn x -> x end,
default: nil,
required: true
]
end
end
end
|
lib/vapor/providers/map.ex
| 0.787727
| 0.403508
|
map.ex
|
starcoder
|
defmodule TimeZoneInfo.IanaDateTime do
@moduledoc false
# Some functions to handle datetimes in `TimeZoneInfo`.
alias Calendar.ISO
alias TimeZoneInfo.IanaParser
@type time :: {Calendar.hour(), Calendar.minute(), Calendar.second()}
@type t ::
{Calendar.year()}
| {Calendar.year(), Calendar.month()}
| {Calendar.year(), Calendar.month(), IanaParser.day()}
| {Calendar.year(), Calendar.month(), IanaParser.day(), time()}
@seconds_per_minute 60
@seconds_per_hour 60 * @seconds_per_minute
@seconds_per_day 24 * @seconds_per_hour
@doc """
Computes the number of gregorian seconds starting with year 0 and ending at
the specified `iana_datetime`.
"""
@spec to_gregorian_seconds(t()) :: TimeZoneInfo.gregorian_seconds()
def to_gregorian_seconds(iana_datetime) do
case iana_datetime do
{year} ->
to_gregorian_seconds(year, 1, 1, {0, 0, 0})
{year, month} ->
to_gregorian_seconds(year, month, 1, {0, 0, 0})
{year, month, day} ->
to_gregorian_seconds(year, month, day, {0, 0, 0})
{year, month, day, hour} ->
to_gregorian_seconds(year, month, day, {hour, 0, 0})
{year, month, day, hour, minute} ->
to_gregorian_seconds(year, month, day, {hour, minute, 0})
{year, month, day, hour, minute, second} ->
to_gregorian_seconds(year, month, day, {hour, minute, second})
end
end
@spec to_gregorian_seconds(Calendar.year(), Calendar.month(), IanaParser.day(), time()) ::
TimeZoneInfo.gregorian_seconds()
def to_gregorian_seconds(year, month, day, time) do
day = to_day(year, month, day)
do_to_gregorian_seconds({{year, month, day}, time})
end
defp do_to_gregorian_seconds({date, time}) do
date = update(date)
:calendar.datetime_to_gregorian_seconds({date, time})
end
defp update({year, month, day}) do
case day > 0 && day <= ISO.days_in_month(year, month) do
true ->
{year, month, day}
false ->
{:ok, date} = NaiveDateTime.new(year, month, 1, 0, 0, 0)
date = NaiveDateTime.add(date, (day - 1) * @seconds_per_day)
{date.year, date.month, date.day}
end
end
defp to_day(_year, _month, day) when is_integer(day), do: day
defp to_day(year, month, last_day_of_week: last_day_of_week),
do: to_last_day_of_week(year, month, last_day_of_week)
defp to_day(year, month, day: day, op: op, day_of_week: day_of_week),
do: to_day_of_week(year, month, day, day_of_week, op)
defp to_last_day_of_week(year, month, day_of_week) do
days_in_month = ISO.days_in_month(year, month)
last = ISO.day_of_week(year, month + 1, 1) - 1
days_in_month - rem(7 - (day_of_week - last), 7)
end
defp to_day_of_week(year, month, day, day_of_week, op) do
current = ISO.day_of_week(year, month, day)
case op do
:ge -> day + rem(7 + (day_of_week - current), 7)
:le -> day - rem(7 + (current - day_of_week), 7)
end
end
end
|
lib/time_zone_info/iana_datetime.ex
| 0.874794
| 0.630059
|
iana_datetime.ex
|
starcoder
|
defmodule ExCypher.Statements.Generic.Expression do
@moduledoc """
A module to abstract the AST format into something mode human-readable
"""
alias ExCypher.Statements.Generic.Association
alias ExCypher.Statements.Generic.Variable
defstruct [:type, :env, :args]
def build(%{type: type, args: args, env: env}) do
%__MODULE__{type: type, args: args, env: env}
end
def new(ast, env) do
checkers = [
&as_bound_variable/1,
&as_fragment/1,
&as_property/1,
&as_node/1,
&as_relationship/1,
&as_association/1,
&another_term/1
]
Enum.find_value(checkers, fn checker -> checker.({ast, env}) end)
end
defp as_fragment({ast, env}) do
case ast do
{:fragment, _ctx, args} ->
%__MODULE__{type: :fragment, args: args, env: env}
_ ->
nil
end
end
defp as_property({ast, env}) do
case ast do
{{:., _, [first, last | []]}, _, _} ->
%__MODULE__{type: :property, args: [first, last], env: env}
_ ->
nil
end
end
defp as_node({ast, env}) do
case ast do
{:node, _ctx, args} ->
%__MODULE__{type: :node, args: parse_args(args), env: env}
_ ->
nil
end
end
defp as_relationship({ast, env}) do
case ast do
{:rel, _ctx, args} ->
%__MODULE__{type: :relationship, args: parse_args(args), env: env}
_ ->
nil
end
end
defp as_association({ast, env}) do
case ast do
{association, _ctx, [from, to]} ->
from_type = Association.type(:from, from)
to_type = Association.type(:to, to)
%__MODULE__{
type: :association,
args: [association, {from_type, from}, {to_type, to}],
env: env
}
_ ->
nil
end
end
defp another_term({ast, env}) do
cond do
is_nil(ast) ->
%__MODULE__{type: :null, args: nil, env: env}
is_atom(ast) ->
%__MODULE__{type: :alias, args: ast, env: env}
is_list(ast) ->
%__MODULE__{type: :list, args: ast, env: env}
true ->
%__MODULE__{type: :other, args: ast, env: env}
end
end
defp as_bound_variable({ast, env}) do
if Variable.bound_variable?({ast, env}) do
%__MODULE__{type: :var, args: ast, env: env}
end
end
defp parse_args(args) do
Enum.map(args, fn
{:%{}, _ctx, args} -> Enum.into(args, %{})
term -> term
end)
end
end
|
lib/ex_cypher/statements/generic/expression.ex
| 0.719778
| 0.451871
|
expression.ex
|
starcoder
|
defmodule Credo.Check.Refactor.PerceivedComplexity do
@moduledoc """
Cyclomatic complexity is a software complexity metric closely correlated with
coding errors.
If a function feels like it's gotten too complex, it more often than not also
has a high CC value. So, if anything, this is useful to convince team members
and bosses of a need to refactor parts of the code based on "objective"
metrics.
"""
@explanation [
check: @moduledoc,
params: [
max_complexity: "The maximum cyclomatic complexity a function should have."
]
]
@default_params [max_complexity: 9]
@def_ops [:def, :defp, :defmacro]
# these have two outcomes: it succeeds or does not
@double_condition_ops [:if, :unless, :for, :try, :and, :or, :&&, :||]
# these can have multiple outcomes as they are defined in their do blocks
@multiple_condition_ops [:case, :cond]
@op_complexity_map [
def: 1,
defp: 1,
defmacro: 1,
if: 1,
unless: 1,
for: 1,
try: 1,
and: 1,
or: 1,
&&: 1,
||: 1,
case: 0.3,
cond: 1
]
alias Credo.Check.CodeHelper
alias Credo.SourceFile
use Credo.Check
@doc false
def run(source_file, params \\ []) do
issue_meta = IssueMeta.for(source_file, params)
max_complexity = Params.get(params, :max_complexity, @default_params)
Credo.Code.prewalk(
source_file,
&traverse(&1, &2, issue_meta, max_complexity)
)
end
# exception for `__using__` macros
defp traverse({:defmacro, _, [{:__using__, _, _}, _]} = ast, issues, _, _) do
{ast, issues}
end
for op <- @def_ops do
defp traverse(
{unquote(op), meta, arguments} = ast,
issues,
issue_meta,
max_complexity
)
when is_list(arguments) do
complexity =
ast
|> complexity_for
|> round
if complexity > max_complexity do
fun_name = CodeHelper.def_name(ast)
{
ast,
issues ++
[
issue_for(
issue_meta,
meta[:line],
fun_name,
max_complexity,
complexity
)
]
}
else
{ast, issues}
end
end
end
defp traverse(ast, issues, _source_file, _max_complexity) do
{ast, issues}
end
@doc """
Returns the Cyclomatic Complexity score for the block inside the given AST,
which is expected to represent a function or macro definition.
iex> {:def, [line: 1],
...> [
...> {:first_fun, [line: 1], nil},
...> [do: {:=, [line: 2], [{:x, [line: 2], nil}, 1]}]
...> ]
...> } |> Credo.Check.Refactor.CyclomaticComplexity.complexity_for
1.0
"""
def complexity_for({_def_op, _meta, _arguments} = ast) do
Credo.Code.prewalk(ast, &traverse_complexity/2, 0)
end
for op <- @def_ops do
defp traverse_complexity(
{unquote(op) = op, _meta, arguments} = ast,
complexity
)
when is_list(arguments) do
{ast, complexity + @op_complexity_map[op]}
end
end
for op <- @double_condition_ops do
defp traverse_complexity(
{unquote(op) = op, _meta, arguments} = ast,
complexity
)
when is_list(arguments) do
{ast, complexity + @op_complexity_map[op]}
end
end
for op <- @multiple_condition_ops do
defp traverse_complexity({unquote(op), _meta, nil} = ast, complexity) do
{ast, complexity}
end
defp traverse_complexity(
{unquote(op) = op, _meta, arguments} = ast,
complexity
)
when is_list(arguments) do
block_cc =
arguments
|> CodeHelper.do_block_for!()
|> do_block_complexity(op)
{ast, complexity + block_cc}
end
end
defp traverse_complexity(ast, complexity) do
{ast, complexity}
end
defp do_block_complexity(nil, _), do: 0
defp do_block_complexity(block, op) do
count =
block
|> List.wrap()
|> Enum.count()
count * @op_complexity_map[op]
end
def issue_for(issue_meta, line_no, trigger, max_value, actual_value) do
format_issue(
issue_meta,
message: "Function is too complex (CC is #{actual_value}, max is #{max_value}).",
trigger: trigger,
line_no: line_no,
severity: Severity.compute(actual_value, max_value)
)
end
end
|
lib/credo/check/refactor/perceived_complexity.ex
| 0.831656
| 0.560914
|
perceived_complexity.ex
|
starcoder
|
import TypeClass
defclass Witchcraft.Chain do
@moduledoc """
Chain function applications on contained data that may have some additional effect
As a diagram:
%Container<data> --- (data -> %Container<updated_data>) ---> %Container<updated_data>
## Examples
iex> chain([1, 2, 3], fn x -> [x, x] end)
[1, 1, 2, 2, 3, 3]
alias Algae.Maybe.{Nothing, Just}
%Just{just: 42} >>> fn x -> %Just{just: x + 1} end
#=> %Just{just: 43}
%Just{just: 42}
>>> fn x -> if x > 50, do: %Just{just: x + 1}, else: %Nothing{} end
>>> fn y -> y * 100 end
#=> %Nothing{}
## Type Class
An instance of `Witchcraft.Chain` must also implement `Witchcraft.Apply`,
and define `Witchcraft.Chain.chain/2`.
Functor [map/2]
↓
Apply [convey/2]
↓
Chain [chain/2]
"""
alias __MODULE__
extend Witchcraft.Apply
use Witchcraft.Internal, deps: [Witchcraft.Apply]
use Witchcraft.Apply
@type t :: any()
@type link :: (any() -> Chain.t())
where do
@doc """
Sequentially compose actions, piping values through successive function chains.
The applied linking function must be unary and return data in the same
type of container as the input. The chain function essentially "unwraps"
a contained value, applies a linking function that returns
the initial (wrapped) type, and collects them into a flat(ter) structure.
`chain/2` is sometimes called "flat map", since it can also
be expressed as `data |> map(link_fun) |> flatten()`.
As a diagram:
%Container<data> --- (data -> %Container<updated_data>) ---> %Container<updated_data>
## Examples
iex> chain([1, 2, 3], fn x -> [x, x] end)
[1, 1, 2, 2, 3, 3]
iex> [1, 2, 3]
...> |> chain(fn x -> [x, x] end)
...> |> chain(fn y -> [y, 2 * y, 3 * y] end)
[1, 2, 3, 1, 2, 3, 2, 4, 6, 2, 4, 6, 3, 6, 9, 3, 6, 9]
iex> chain([1, 2, 3], fn x ->
...> chain([x + 1], fn y ->
...> chain([y + 2, y + 10], fn z ->
...> [x, y, z]
...> end)
...> end)
...> end)
[1, 2, 4, 1, 2, 12, 2, 3, 5, 2, 3, 13, 3, 4, 6, 3, 4, 14]
"""
@spec chain(Chain.t(), Chain.link()) :: Chain.t()
def chain(chainable, link_fun)
end
@doc """
`chain/2` but with the arguments flipped.
## Examples
iex> draw(fn x -> [x, x] end, [1, 2, 3])
[1, 1, 2, 2, 3, 3]
iex> (fn y -> [y * 5, y * 10] end)
...> |> draw((fn x -> [x, x] end)
...> |> draw([1, 2, 3])) # note the "extra" closing paren
[5, 10, 5, 10, 10, 20, 10, 20, 15, 30, 15, 30]
"""
@spec draw(Chain.link(), Chain.t()) :: Chain.t()
def draw(chain_fun, chainable), do: chain(chainable, chain_fun)
@doc """
An alias for `chain/2`.
Provided as a convenience for those coming from other languages.
"""
@spec bind(Chain.t(), Chain.link()) :: Chain.t()
defalias bind(chainable, binder), as: :chain
@doc """
Operator alias for `chain/2`.
Extends the `~>` / `~>>` heirarchy with one more level of power / abstraction
## Examples
iex> to_monad = fn x -> (fn _ -> x end) end
...> bound = to_monad.(&(&1 * 10)) >>> to_monad.(&(&1 + 10))
...> bound.(10)
20
In Haskell, this is the famous `>>=` operator, but Elixir doesn't allow that
infix operator.
"""
@spec Chain.t() >>> Chain.link() :: Chain.t()
defalias chainable >>> chain_fun, as: :chain
@doc """
Operator alias for `draw/2`
Extends the `<~` / `<<~` heirarchy with one more level of power / abstraction
## Examples
iex> to_monad = fn x -> (fn _ -> x end) end
...> bound = to_monad.(&(&1 + 10)) <<< to_monad.(&(&1 * 10))
...> bound.(10)
20
In Haskell, this is the famous `=<<` operator, but Elixir doesn't allow that
infix operator.
"""
@spec Chain.t() <<< Chain.link() :: Chain.t()
defalias chain_fun <<< chainable, as: :draw
@doc """
Join together one nested level of a data structure that contains itself
## Examples
iex> join([[1, 2, 3]])
[1, 2, 3]
iex> join([[1, 2, 3], [4, 5, 6]])
[1, 2, 3, 4, 5, 6]
iex> join([[[1, 2, 3], [4, 5, 6]]])
[[1, 2, 3], [4, 5, 6]]
alias Algae.Maybe.{Nothing, Just}
%Just{
just: %Just{
just: 42
}
} |> join()
#=> %Just{just: 42}
join %Just{just: %Nothing{}}
#=> %Nothing{}
join %Just{just: %Just{just: %Nothing{}}}
#=> %Just{just: %Nothing{}}
%Nothing{} |> join() |> join() |> join() # ...and so on, forever
#=> %Nothing{}
Joining tuples is a bit counterintuitive, as it requires a very specific format:
iex> join { # Outer 2-tuple
...> {1, 2}, # Inner 2-tuple
...> {
...> {3, 4}, # Doubly inner 2-tuple
...> {5, 6, 7}
...> }
...> }
{{4, 6}, {5, 6, 7}}
iex> join {
...> {"a", "b"},
...> {
...> {"!", "?"},
...> {:ok, 123}
...> }
...> }
{{"a!", "b?"}, {:ok, 123}}
"""
@spec join(Chain.t()) :: Chain.t()
def join(nested), do: nested >>> (&Quark.id/1)
@spec flatten(Chain.t()) :: Chain.t()
defalias flatten(nested), as: :join
@doc """
Compose link functions to create a new link function.
Note that this runs the same direction as `<|>` ("the math way").
This is `pipe_compose_link/2` with arguments flipped.
## Examples
iex> links =
...> fn x -> [x, x] end
...> |> compose_link(fn y -> [y * 10] end)
...> |> compose_link(fn z -> [z + 42] end)
...>
...> [1, 2, 3] >>> links
[430, 430, 440, 440, 450, 450]
"""
@spec compose_link(Chain.link(), Chain.link()) :: Chain.link()
def compose_link(action_g, action_f), do: pipe_compose_link(action_f, action_g)
@doc """
Compose link functions to create a new link function.
This is `compose_link/2` with arguments flipped.
## Examples
iex> links =
...> fn x -> [x, x] end
...> |> pipe_compose_link(fn y -> [y * 10] end)
...> |> pipe_compose_link(fn z -> [z + 42] end)
...>
...> [1, 2, 3] >>> links
[52, 52, 62, 62, 72, 72]
"""
@spec pipe_compose_link(Chain.link(), Chain.link()) :: Chain.link()
def pipe_compose_link(action_f, action_g) do
fn data -> action_f.(data) >>> action_g end
end
@doc """
`do` notation sugar
Sequences chainable actions. Note that each line must be of the same type.
For a version with `return`, please see `Witchcraft.Monad.do/2`
## Examples
iex> chain do
...> [1]
...> end
[1]
iex> chain do
...> [1, 2, 3]
...> [4, 5, 6]
...> [7, 8, 9]
...> end
[
7, 8, 9,
7, 8, 9,
7, 8, 9,
7, 8, 9,
7, 8, 9,
7, 8, 9,
7, 8, 9,
7, 8, 9,
7, 8, 9
]
iex> chain do
...> a <- [1, 2, 3]
...> b <- [4, 5, 6]
...> [a * b]
...> end
[
4, 5, 6,
8, 10, 12,
12, 15, 18
]
Normal functions are fine within the `do` as well, as long as each line
ends up being the same chainable type
iex> import Witchcraft.{Functor, Applicative}
...> chain do
...> map([1, 2, 3], fn x -> x + 1 end)
...> of([], 42)
...> [7, 8, 9] ~> fn x -> x * 10 end
...> end
[
70, 80, 90,
70, 80, 90,
70, 80, 90
]
Or with a custom type
alias Algae.Maybe.{Nothing, Just}
chain do
%Just{just: 4}
%Just{just: 5}
%Just{just: 6}
end
#=> %Just{just: 6}
chain do
%Just{just: 4}
%Nothing{}
%Just{just: 6}
end
#=> %Nothing{}
## `let` bindings
`let`s allow you to hold static or intermediate values inside a
do-block, much like normal assignment
iex> chain do
...> let a = 4
...> [a]
...> end
[4]
iex> chain do
...> a <- [1, 2]
...> b <- [3, 4]
...> let [h | _] = [a * b]
...> [h, h, h]
...> end
[3, 3, 3, 4, 4, 4, 6, 6, 6, 8, 8, 8]
## Desugaring
### Sequencing
The most basic form
chain do
[1, 2, 3]
[4, 5, 6]
[7, 8, 9]
end
is equivalent to
[1, 2, 3]
|> then([4, 5, 6])
|> then([7, 8, 9])
### `<-` ("drawn from")
Drawing values from within a chainable structure is similar feels similar
to assignmet, but it is pulling each value separately in a chain link function.
For instance
iex> chain do
...> a <- [1, 2, 3]
...> b <- [4, 5, 6]
...> [a * b]
...> end
[4, 5, 6, 8, 10, 12, 12, 15, 18]
desugars to this
iex> [1, 2, 3] >>> fn a ->
...> [4, 5, 6] >>> fn b ->
...> [a * b]
...> end
...> end
[4, 5, 6, 8, 10, 12, 12, 15, 18]
but is often much cleaner to read in do-notation, as it cleans up all of the
nested functions (especially when the chain is very long).
You can also use values recursively:
# iex> chain do
# ...> a <- [1, 2, 3]
# ...> b <- [a, a * 10, a * 100]
# ...> [a + 1, b + 1]
# ...> end
# [
# 2, 2, 2, 11, 2, 101,
# 3, 3, 3, 21, 3, 201,
# 4, 4, 4, 31, 4, 301
# ]
"""
defmacro chain(do: input) do
Witchcraft.Chain.do_notation(input, &Witchcraft.Chain.chain/2)
end
@doc false
# credo:disable-for-lines:31 Credo.Check.Refactor.Nesting
def do_notation(input, _chainer) do
input
|> normalize()
|> Enum.reverse()
|> Witchcraft.Foldable.left_fold(fn
continue, {:let, _, [{:=, _, [assign, value]}]} ->
quote do: unquote(value) |> (fn unquote(assign) -> unquote(continue) end).()
continue, {:<-, _, [assign, value]} ->
quote do
import Witchcraft.Chain, only: [>>>: 2]
unquote(value) >>> fn unquote(assign) -> unquote(continue) end
end
continue, value ->
quote do
import Witchcraft.Chain, only: [>>>: 2]
unquote(value) >>> fn _ -> unquote(continue) end
end
end)
end
@doc false
def normalize({:__block__, _, inner}), do: inner
def normalize(single) when is_list(single), do: [single]
def normalize(plain), do: List.wrap(plain)
properties do
def associativity(data) do
a = generate(data)
f = fn x -> Witchcraft.Applicative.of(a, inspect(x)) end
g = fn y -> Witchcraft.Applicative.of(a, y <> y) end
left = a |> Chain.chain(f) |> Chain.chain(g)
right = a |> Chain.chain(fn x -> x |> f.() |> Chain.chain(g) end)
equal?(left, right)
end
end
end
definst Witchcraft.Chain, for: Function do
alias Witchcraft.Chain
use Quark
@spec chain(Chain.t(), (any() -> any())) :: Chain.t()
def chain(fun, chain_fun), do: fn r -> curry(chain_fun).(fun.(r)).(r) end
end
definst Witchcraft.Chain, for: List do
use Quark
def chain(list, chain_fun), do: Enum.flat_map(list, curry(chain_fun))
end
definst Witchcraft.Chain, for: Tuple do
use Witchcraft.Semigroup
custom_generator(_) do
import TypeClass.Property.Generator, only: [generate: 1]
seed = fn -> Enum.random([0, 1.1, "", []]) end
{generate(seed.()), generate(seed.())}
end
def chain({a, b}, chain_fun) do
{c, d} = chain_fun.(b)
{a <> c, d}
end
end
|
lib/witchcraft/chain.ex
| 0.796411
| 0.652318
|
chain.ex
|
starcoder
|
defmodule Surface.Components.Form.TimeSelect do
@moduledoc """
Generates select tags for time.
Provides a wrapper for Phoenix.HTML.Form's `time_select/3` function.
All options passed via `opts` will be sent to `time_select/3`,
`value`, `default`, `hour`, `minute`, `second` and `builder`
can be set directly and will override anything in `opts`.
## Examples
```
<TimeSelect form="alarm" field="time" />
<Form for={:alarm}>
<TimeSelect field={:time} />
</Form>
```
"""
use Surface.Component
import Phoenix.HTML.Form, only: [time_select: 3]
import Surface.Components.Form.Utils
alias Surface.Components.Form.Input.InputContext
@doc "The form identifier"
prop form, :form
@doc "The field name"
prop field, :string
@doc "The id prefix for underlying select fields"
prop id, :string
@doc "The name prefix for underlying select fields"
prop name, :string
@doc "Value to pre-populate the select"
prop value, :any
@doc "Default value to use when none was given in 'value' and none is available in the form data"
prop default, :any
@doc "Options passed to the underlying 'day' select"
prop hour, :keyword
@doc "Options passed to the underlying 'day' select"
prop minute, :keyword
@doc "Options passed to the underlying 'day' select"
prop second, :keyword
@doc """
Specify how the select can be build. It must be a function that receives a builder
that should be invoked with the select name and a set of options.
"""
prop builder, :fun
@doc "Options list"
prop opts, :keyword, default: []
def render(assigns) do
helper_opts =
assigns
|> props_to_opts([:value, :default, :hour, :minute, :second, :builder])
|> parse_css_class_for(:hour)
|> parse_css_class_for(:minute)
|> parse_css_class_for(:second)
opts =
assigns.opts
|> Keyword.merge(helper_opts)
assigns = assign(assigns, opts: opts)
~F"""
<InputContext assigns={assigns} :let={form: form, field: field}>
{time_select(form, field, @opts)}
</InputContext>
"""
end
end
|
lib/surface/components/form/time_select.ex
| 0.898365
| 0.792986
|
time_select.ex
|
starcoder
|
defmodule AWS.CodeCommit do
@moduledoc """
AWS CodeCommit
This is the *AWS CodeCommit API Reference*. This reference provides
descriptions of the operations and data types for AWS CodeCommit API along
with usage examples.
You can use the AWS CodeCommit API to work with the following objects:
Repositories, by calling the following:
<ul> <li> `BatchGetRepositories`, which returns information about one or
more repositories associated with your AWS account.
</li> <li> `CreateRepository`, which creates an AWS CodeCommit repository.
</li> <li> `DeleteRepository`, which deletes an AWS CodeCommit repository.
</li> <li> `GetRepository`, which returns information about a specified
repository.
</li> <li> `ListRepositories`, which lists all AWS CodeCommit repositories
associated with your AWS account.
</li> <li> `UpdateRepositoryDescription`, which sets or updates the
description of the repository.
</li> <li> `UpdateRepositoryName`, which changes the name of the
repository. If you change the name of a repository, no other users of that
repository can access it until you send them the new HTTPS or SSH URL to
use.
</li> </ul> Branches, by calling the following:
<ul> <li> `CreateBranch`, which creates a branch in a specified repository.
</li> <li> `DeleteBranch`, which deletes the specified branch in a
repository unless it is the default branch.
</li> <li> `GetBranch`, which returns information about a specified branch.
</li> <li> `ListBranches`, which lists all branches for a specified
repository.
</li> <li> `UpdateDefaultBranch`, which changes the default branch for a
repository.
</li> </ul> Files, by calling the following:
<ul> <li> `DeleteFile`, which deletes the content of a specified file from
a specified branch.
</li> <li> `GetBlob`, which returns the base-64 encoded content of an
individual Git blob object in a repository.
</li> <li> `GetFile`, which returns the base-64 encoded content of a
specified file.
</li> <li> `GetFolder`, which returns the contents of a specified folder or
directory.
</li> <li> `PutFile`, which adds or modifies a single file in a specified
repository and branch.
</li> </ul> Commits, by calling the following:
<ul> <li> `BatchGetCommits`, which returns information about one or more
commits in a repository.
</li> <li> `CreateCommit`, which creates a commit for changes to a
repository.
</li> <li> `GetCommit`, which returns information about a commit, including
commit messages and author and committer information.
</li> <li> `GetDifferences`, which returns information about the
differences in a valid commit specifier (such as a branch, tag, HEAD,
commit ID, or other fully qualified reference).
</li> </ul> Merges, by calling the following:
<ul> <li> `BatchDescribeMergeConflicts`, which returns information about
conflicts in a merge between commits in a repository.
</li> <li> `CreateUnreferencedMergeCommit`, which creates an unreferenced
commit between two branches or commits for the purpose of comparing them
and identifying any potential conflicts.
</li> <li> `DescribeMergeConflicts`, which returns information about merge
conflicts between the base, source, and destination versions of a file in a
potential merge.
</li> <li> `GetMergeCommit`, which returns information about the merge
between a source and destination commit.
</li> <li> `GetMergeConflicts`, which returns information about merge
conflicts between the source and destination branch in a pull request.
</li> <li> `GetMergeOptions`, which returns information about the available
merge options between two branches or commit specifiers.
</li> <li> `MergeBranchesByFastForward`, which merges two branches using
the fast-forward merge option.
</li> <li> `MergeBranchesBySquash`, which merges two branches using the
squash merge option.
</li> <li> `MergeBranchesByThreeWay`, which merges two branches using the
three-way merge option.
</li> </ul> Pull requests, by calling the following:
<ul> <li> `CreatePullRequest`, which creates a pull request in a specified
repository.
</li> <li> `CreatePullRequestApprovalRule`, which creates an approval rule
for a specified pull request.
</li> <li> `DeletePullRequestApprovalRule`, which deletes an approval rule
for a specified pull request.
</li> <li> `DescribePullRequestEvents`, which returns information about one
or more pull request events.
</li> <li> `EvaluatePullRequestApprovalRules`, which evaluates whether a
pull request has met all the conditions specified in its associated
approval rules.
</li> <li> `GetCommentsForPullRequest`, which returns information about
comments on a specified pull request.
</li> <li> `GetPullRequest`, which returns information about a specified
pull request.
</li> <li> `GetPullRequestApprovalStates`, which returns information about
the approval states for a specified pull request.
</li> <li> `GetPullRequestOverrideState`, which returns information about
whether approval rules have been set aside (overriden) for a pull request,
and if so, the Amazon Resource Name (ARN) of the user or identity that
overrode the rules and their requirements for the pull request.
</li> <li> `ListPullRequests`, which lists all pull requests for a
repository.
</li> <li> `MergePullRequestByFastForward`, which merges the source
destination branch of a pull request into the specified destination branch
for that pull request using the fast-forward merge option.
</li> <li> `MergePullRequestBySquash`, which merges the source destination
branch of a pull request into the specified destination branch for that
pull request using the squash merge option.
</li> <li> `MergePullRequestByThreeWay`. which merges the source
destination branch of a pull request into the specified destination branch
for that pull request using the three-way merge option.
</li> <li> `OverridePullRequestApprovalRules`, which sets aside all
approval rule requirements for a pull request.
</li> <li> `PostCommentForPullRequest`, which posts a comment to a pull
request at the specified line, file, or request.
</li> <li> `UpdatePullRequestApprovalRuleContent`, which updates the
structure of an approval rule for a pull request.
</li> <li> `UpdatePullRequestApprovalState`, which updates the state of an
approval on a pull request.
</li> <li> `UpdatePullRequestDescription`, which updates the description of
a pull request.
</li> <li> `UpdatePullRequestStatus`, which updates the status of a pull
request.
</li> <li> `UpdatePullRequestTitle`, which updates the title of a pull
request.
</li> </ul> Approval rule templates, by calling the following:
<ul> <li> `AssociateApprovalRuleTemplateWithRepository`, which associates a
template with a specified repository. After the template is associated with
a repository, AWS CodeCommit creates approval rules that match the template
conditions on every pull request created in the specified repository.
</li> <li> `BatchAssociateApprovalRuleTemplateWithRepositories`, which
associates a template with one or more specified repositories. After the
template is associated with a repository, AWS CodeCommit creates approval
rules that match the template conditions on every pull request created in
the specified repositories.
</li> <li> `BatchDisassociateApprovalRuleTemplateFromRepositories`, which
removes the association between a template and specified repositories so
that approval rules based on the template are not automatically created
when pull requests are created in those repositories.
</li> <li> `CreateApprovalRuleTemplate`, which creates a template for
approval rules that can then be associated with one or more repositories in
your AWS account.
</li> <li> `DeleteApprovalRuleTemplate`, which deletes the specified
template. It does not remove approval rules on pull requests already
created with the template.
</li> <li> `DisassociateApprovalRuleTemplateFromRepository`, which removes
the association between a template and a repository so that approval rules
based on the template are not automatically created when pull requests are
created in the specified repository.
</li> <li> `GetApprovalRuleTemplate`, which returns information about an
approval rule template.
</li> <li> `ListApprovalRuleTemplates`, which lists all approval rule
templates in the AWS Region in your AWS account.
</li> <li> `ListAssociatedApprovalRuleTemplatesForRepository`, which lists
all approval rule templates that are associated with a specified
repository.
</li> <li> `ListRepositoriesForApprovalRuleTemplate`, which lists all
repositories associated with the specified approval rule template.
</li> <li> `UpdateApprovalRuleTemplateDescription`, which updates the
description of an approval rule template.
</li> <li> `UpdateApprovalRuleTemplateName`, which updates the name of an
approval rule template.
</li> <li> `UpdateApprovalRuleTemplateContent`, which updates the content
of an approval rule template.
</li> </ul> Comments in a repository, by calling the following:
<ul> <li> `DeleteCommentContent`, which deletes the content of a comment on
a commit in a repository.
</li> <li> `GetComment`, which returns information about a comment on a
commit.
</li> <li> `GetCommentReactions`, which returns information about emoji
reactions to comments.
</li> <li> `GetCommentsForComparedCommit`, which returns information about
comments on the comparison between two commit specifiers in a repository.
</li> <li> `PostCommentForComparedCommit`, which creates a comment on the
comparison between two commit specifiers in a repository.
</li> <li> `PostCommentReply`, which creates a reply to a comment.
</li> <li> `PutCommentReaction`, which creates or updates an emoji reaction
to a comment.
</li> <li> `UpdateComment`, which updates the content of a comment on a
commit in a repository.
</li> </ul> Tags used to tag resources in AWS CodeCommit (not Git tags), by
calling the following:
<ul> <li> `ListTagsForResource`, which gets information about AWS tags for
a specified Amazon Resource Name (ARN) in AWS CodeCommit.
</li> <li> `TagResource`, which adds or updates tags for a resource in AWS
CodeCommit.
</li> <li> `UntagResource`, which removes tags for a resource in AWS
CodeCommit.
</li> </ul> Triggers, by calling the following:
<ul> <li> `GetRepositoryTriggers`, which returns information about triggers
configured for a repository.
</li> <li> `PutRepositoryTriggers`, which replaces all triggers for a
repository and can be used to create or delete triggers.
</li> <li> `TestRepositoryTriggers`, which tests the functionality of a
repository trigger by sending data to the trigger target.
</li> </ul> For information about how to use AWS CodeCommit, see the [AWS
CodeCommit User
Guide](https://docs.aws.amazon.com/codecommit/latest/userguide/welcome.html).
"""
@doc """
Creates an association between an approval rule template and a specified
repository. Then, the next time a pull request is created in the repository
where the destination reference (if specified) matches the destination
reference (branch) for the pull request, an approval rule that matches the
template conditions is automatically created for that pull request. If no
destination references are specified in the template, an approval rule that
matches the template contents is created for all pull requests in that
repository.
"""
def associate_approval_rule_template_with_repository(client, input, options \\ []) do
request(client, "AssociateApprovalRuleTemplateWithRepository", input, options)
end
@doc """
Creates an association between an approval rule template and one or more
specified repositories.
"""
def batch_associate_approval_rule_template_with_repositories(client, input, options \\ []) do
request(client, "BatchAssociateApprovalRuleTemplateWithRepositories", input, options)
end
@doc """
Returns information about one or more merge conflicts in the attempted
merge of two commit specifiers using the squash or three-way merge
strategy.
"""
def batch_describe_merge_conflicts(client, input, options \\ []) do
request(client, "BatchDescribeMergeConflicts", input, options)
end
@doc """
Removes the association between an approval rule template and one or more
specified repositories.
"""
def batch_disassociate_approval_rule_template_from_repositories(client, input, options \\ []) do
request(client, "BatchDisassociateApprovalRuleTemplateFromRepositories", input, options)
end
@doc """
Returns information about the contents of one or more commits in a
repository.
"""
def batch_get_commits(client, input, options \\ []) do
request(client, "BatchGetCommits", input, options)
end
@doc """
Returns information about one or more repositories.
<note> The description field for a repository accepts all HTML characters
and all valid Unicode characters. Applications that do not HTML-encode the
description and display it in a webpage can expose users to potentially
malicious code. Make sure that you HTML-encode the description field in any
application that uses this API to display the repository description on a
webpage.
</note>
"""
def batch_get_repositories(client, input, options \\ []) do
request(client, "BatchGetRepositories", input, options)
end
@doc """
Creates a template for approval rules that can then be associated with one
or more repositories in your AWS account. When you associate a template
with a repository, AWS CodeCommit creates an approval rule that matches the
conditions of the template for all pull requests that meet the conditions
of the template. For more information, see
`AssociateApprovalRuleTemplateWithRepository`.
"""
def create_approval_rule_template(client, input, options \\ []) do
request(client, "CreateApprovalRuleTemplate", input, options)
end
@doc """
Creates a branch in a repository and points the branch to a commit.
<note> Calling the create branch operation does not set a repository's
default branch. To do this, call the update default branch operation.
</note>
"""
def create_branch(client, input, options \\ []) do
request(client, "CreateBranch", input, options)
end
@doc """
Creates a commit for a repository on the tip of a specified branch.
"""
def create_commit(client, input, options \\ []) do
request(client, "CreateCommit", input, options)
end
@doc """
Creates a pull request in the specified repository.
"""
def create_pull_request(client, input, options \\ []) do
request(client, "CreatePullRequest", input, options)
end
@doc """
Creates an approval rule for a pull request.
"""
def create_pull_request_approval_rule(client, input, options \\ []) do
request(client, "CreatePullRequestApprovalRule", input, options)
end
@doc """
Creates a new, empty repository.
"""
def create_repository(client, input, options \\ []) do
request(client, "CreateRepository", input, options)
end
@doc """
Creates an unreferenced commit that represents the result of merging two
branches using a specified merge strategy. This can help you determine the
outcome of a potential merge. This API cannot be used with the fast-forward
merge strategy because that strategy does not create a merge commit.
<note> This unreferenced merge commit can only be accessed using the
GetCommit API or through git commands such as git fetch. To retrieve this
commit, you must specify its commit ID or otherwise reference it.
</note>
"""
def create_unreferenced_merge_commit(client, input, options \\ []) do
request(client, "CreateUnreferencedMergeCommit", input, options)
end
@doc """
Deletes a specified approval rule template. Deleting a template does not
remove approval rules on pull requests already created with the template.
"""
def delete_approval_rule_template(client, input, options \\ []) do
request(client, "DeleteApprovalRuleTemplate", input, options)
end
@doc """
Deletes a branch from a repository, unless that branch is the default
branch for the repository.
"""
def delete_branch(client, input, options \\ []) do
request(client, "DeleteBranch", input, options)
end
@doc """
Deletes the content of a comment made on a change, file, or commit in a
repository.
"""
def delete_comment_content(client, input, options \\ []) do
request(client, "DeleteCommentContent", input, options)
end
@doc """
Deletes a specified file from a specified branch. A commit is created on
the branch that contains the revision. The file still exists in the commits
earlier to the commit that contains the deletion.
"""
def delete_file(client, input, options \\ []) do
request(client, "DeleteFile", input, options)
end
@doc """
Deletes an approval rule from a specified pull request. Approval rules can
be deleted from a pull request only if the pull request is open, and if the
approval rule was created specifically for a pull request and not generated
from an approval rule template associated with the repository where the
pull request was created. You cannot delete an approval rule from a merged
or closed pull request.
"""
def delete_pull_request_approval_rule(client, input, options \\ []) do
request(client, "DeletePullRequestApprovalRule", input, options)
end
@doc """
Deletes a repository. If a specified repository was already deleted, a null
repository ID is returned.
<important> Deleting a repository also deletes all associated objects and
metadata. After a repository is deleted, all future push calls to the
deleted repository fail.
</important>
"""
def delete_repository(client, input, options \\ []) do
request(client, "DeleteRepository", input, options)
end
@doc """
Returns information about one or more merge conflicts in the attempted
merge of two commit specifiers using the squash or three-way merge
strategy. If the merge option for the attempted merge is specified as
FAST_FORWARD_MERGE, an exception is thrown.
"""
def describe_merge_conflicts(client, input, options \\ []) do
request(client, "DescribeMergeConflicts", input, options)
end
@doc """
Returns information about one or more pull request events.
"""
def describe_pull_request_events(client, input, options \\ []) do
request(client, "DescribePullRequestEvents", input, options)
end
@doc """
Removes the association between a template and a repository so that
approval rules based on the template are not automatically created when
pull requests are created in the specified repository. This does not delete
any approval rules previously created for pull requests through the
template association.
"""
def disassociate_approval_rule_template_from_repository(client, input, options \\ []) do
request(client, "DisassociateApprovalRuleTemplateFromRepository", input, options)
end
@doc """
Evaluates whether a pull request has met all the conditions specified in
its associated approval rules.
"""
def evaluate_pull_request_approval_rules(client, input, options \\ []) do
request(client, "EvaluatePullRequestApprovalRules", input, options)
end
@doc """
Returns information about a specified approval rule template.
"""
def get_approval_rule_template(client, input, options \\ []) do
request(client, "GetApprovalRuleTemplate", input, options)
end
@doc """
Returns the base-64 encoded content of an individual blob in a repository.
"""
def get_blob(client, input, options \\ []) do
request(client, "GetBlob", input, options)
end
@doc """
Returns information about a repository branch, including its name and the
last commit ID.
"""
def get_branch(client, input, options \\ []) do
request(client, "GetBranch", input, options)
end
@doc """
Returns the content of a comment made on a change, file, or commit in a
repository.
<note> Reaction counts might include numbers from user identities who were
deleted after the reaction was made. For a count of reactions from active
identities, use GetCommentReactions.
</note>
"""
def get_comment(client, input, options \\ []) do
request(client, "GetComment", input, options)
end
@doc """
Returns information about reactions to a specified comment ID. Reactions
from users who have been deleted will not be included in the count.
"""
def get_comment_reactions(client, input, options \\ []) do
request(client, "GetCommentReactions", input, options)
end
@doc """
Returns information about comments made on the comparison between two
commits.
<note> Reaction counts might include numbers from user identities who were
deleted after the reaction was made. For a count of reactions from active
identities, use GetCommentReactions.
</note>
"""
def get_comments_for_compared_commit(client, input, options \\ []) do
request(client, "GetCommentsForComparedCommit", input, options)
end
@doc """
Returns comments made on a pull request.
<note> Reaction counts might include numbers from user identities who were
deleted after the reaction was made. For a count of reactions from active
identities, use GetCommentReactions.
</note>
"""
def get_comments_for_pull_request(client, input, options \\ []) do
request(client, "GetCommentsForPullRequest", input, options)
end
@doc """
Returns information about a commit, including commit message and committer
information.
"""
def get_commit(client, input, options \\ []) do
request(client, "GetCommit", input, options)
end
@doc """
Returns information about the differences in a valid commit specifier (such
as a branch, tag, HEAD, commit ID, or other fully qualified reference).
Results can be limited to a specified path.
"""
def get_differences(client, input, options \\ []) do
request(client, "GetDifferences", input, options)
end
@doc """
Returns the base-64 encoded contents of a specified file and its metadata.
"""
def get_file(client, input, options \\ []) do
request(client, "GetFile", input, options)
end
@doc """
Returns the contents of a specified folder in a repository.
"""
def get_folder(client, input, options \\ []) do
request(client, "GetFolder", input, options)
end
@doc """
Returns information about a specified merge commit.
"""
def get_merge_commit(client, input, options \\ []) do
request(client, "GetMergeCommit", input, options)
end
@doc """
Returns information about merge conflicts between the before and after
commit IDs for a pull request in a repository.
"""
def get_merge_conflicts(client, input, options \\ []) do
request(client, "GetMergeConflicts", input, options)
end
@doc """
Returns information about the merge options available for merging two
specified branches. For details about why a merge option is not available,
use GetMergeConflicts or DescribeMergeConflicts.
"""
def get_merge_options(client, input, options \\ []) do
request(client, "GetMergeOptions", input, options)
end
@doc """
Gets information about a pull request in a specified repository.
"""
def get_pull_request(client, input, options \\ []) do
request(client, "GetPullRequest", input, options)
end
@doc """
Gets information about the approval states for a specified pull request.
Approval states only apply to pull requests that have one or more approval
rules applied to them.
"""
def get_pull_request_approval_states(client, input, options \\ []) do
request(client, "GetPullRequestApprovalStates", input, options)
end
@doc """
Returns information about whether approval rules have been set aside
(overridden) for a pull request, and if so, the Amazon Resource Name (ARN)
of the user or identity that overrode the rules and their requirements for
the pull request.
"""
def get_pull_request_override_state(client, input, options \\ []) do
request(client, "GetPullRequestOverrideState", input, options)
end
@doc """
Returns information about a repository.
<note> The description field for a repository accepts all HTML characters
and all valid Unicode characters. Applications that do not HTML-encode the
description and display it in a webpage can expose users to potentially
malicious code. Make sure that you HTML-encode the description field in any
application that uses this API to display the repository description on a
webpage.
</note>
"""
def get_repository(client, input, options \\ []) do
request(client, "GetRepository", input, options)
end
@doc """
Gets information about triggers configured for a repository.
"""
def get_repository_triggers(client, input, options \\ []) do
request(client, "GetRepositoryTriggers", input, options)
end
@doc """
Lists all approval rule templates in the specified AWS Region in your AWS
account. If an AWS Region is not specified, the AWS Region where you are
signed in is used.
"""
def list_approval_rule_templates(client, input, options \\ []) do
request(client, "ListApprovalRuleTemplates", input, options)
end
@doc """
Lists all approval rule templates that are associated with a specified
repository.
"""
def list_associated_approval_rule_templates_for_repository(client, input, options \\ []) do
request(client, "ListAssociatedApprovalRuleTemplatesForRepository", input, options)
end
@doc """
Gets information about one or more branches in a repository.
"""
def list_branches(client, input, options \\ []) do
request(client, "ListBranches", input, options)
end
@doc """
Returns a list of pull requests for a specified repository. The return list
can be refined by pull request status or pull request author ARN.
"""
def list_pull_requests(client, input, options \\ []) do
request(client, "ListPullRequests", input, options)
end
@doc """
Gets information about one or more repositories.
"""
def list_repositories(client, input, options \\ []) do
request(client, "ListRepositories", input, options)
end
@doc """
Lists all repositories associated with the specified approval rule
template.
"""
def list_repositories_for_approval_rule_template(client, input, options \\ []) do
request(client, "ListRepositoriesForApprovalRuleTemplate", input, options)
end
@doc """
Gets information about AWS tags for a specified Amazon Resource Name (ARN)
in AWS CodeCommit. For a list of valid resources in AWS CodeCommit, see
[CodeCommit Resources and
Operations](https://docs.aws.amazon.com/codecommit/latest/userguide/auth-and-access-control-iam-access-control-identity-based.html#arn-formats)
in the* AWS CodeCommit User Guide*.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Merges two branches using the fast-forward merge strategy.
"""
def merge_branches_by_fast_forward(client, input, options \\ []) do
request(client, "MergeBranchesByFastForward", input, options)
end
@doc """
Merges two branches using the squash merge strategy.
"""
def merge_branches_by_squash(client, input, options \\ []) do
request(client, "MergeBranchesBySquash", input, options)
end
@doc """
Merges two specified branches using the three-way merge strategy.
"""
def merge_branches_by_three_way(client, input, options \\ []) do
request(client, "MergeBranchesByThreeWay", input, options)
end
@doc """
Attempts to merge the source commit of a pull request into the specified
destination branch for that pull request at the specified commit using the
fast-forward merge strategy. If the merge is successful, it closes the pull
request.
"""
def merge_pull_request_by_fast_forward(client, input, options \\ []) do
request(client, "MergePullRequestByFastForward", input, options)
end
@doc """
Attempts to merge the source commit of a pull request into the specified
destination branch for that pull request at the specified commit using the
squash merge strategy. If the merge is successful, it closes the pull
request.
"""
def merge_pull_request_by_squash(client, input, options \\ []) do
request(client, "MergePullRequestBySquash", input, options)
end
@doc """
Attempts to merge the source commit of a pull request into the specified
destination branch for that pull request at the specified commit using the
three-way merge strategy. If the merge is successful, it closes the pull
request.
"""
def merge_pull_request_by_three_way(client, input, options \\ []) do
request(client, "MergePullRequestByThreeWay", input, options)
end
@doc """
Sets aside (overrides) all approval rule requirements for a specified pull
request.
"""
def override_pull_request_approval_rules(client, input, options \\ []) do
request(client, "OverridePullRequestApprovalRules", input, options)
end
@doc """
Posts a comment on the comparison between two commits.
"""
def post_comment_for_compared_commit(client, input, options \\ []) do
request(client, "PostCommentForComparedCommit", input, options)
end
@doc """
Posts a comment on a pull request.
"""
def post_comment_for_pull_request(client, input, options \\ []) do
request(client, "PostCommentForPullRequest", input, options)
end
@doc """
Posts a comment in reply to an existing comment on a comparison between
commits or a pull request.
"""
def post_comment_reply(client, input, options \\ []) do
request(client, "PostCommentReply", input, options)
end
@doc """
Adds or updates a reaction to a specified comment for the user whose
identity is used to make the request. You can only add or update a reaction
for yourself. You cannot add, modify, or delete a reaction for another
user.
"""
def put_comment_reaction(client, input, options \\ []) do
request(client, "PutCommentReaction", input, options)
end
@doc """
Adds or updates a file in a branch in an AWS CodeCommit repository, and
generates a commit for the addition in the specified branch.
"""
def put_file(client, input, options \\ []) do
request(client, "PutFile", input, options)
end
@doc """
Replaces all triggers for a repository. Used to create or delete triggers.
"""
def put_repository_triggers(client, input, options \\ []) do
request(client, "PutRepositoryTriggers", input, options)
end
@doc """
Adds or updates tags for a resource in AWS CodeCommit. For a list of valid
resources in AWS CodeCommit, see [CodeCommit Resources and
Operations](https://docs.aws.amazon.com/codecommit/latest/userguide/auth-and-access-control-iam-access-control-identity-based.html#arn-formats)
in the *AWS CodeCommit User Guide*.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Tests the functionality of repository triggers by sending information to
the trigger target. If real data is available in the repository, the test
sends data from the last commit. If no data is available, sample data is
generated.
"""
def test_repository_triggers(client, input, options \\ []) do
request(client, "TestRepositoryTriggers", input, options)
end
@doc """
Removes tags for a resource in AWS CodeCommit. For a list of valid
resources in AWS CodeCommit, see [CodeCommit Resources and
Operations](https://docs.aws.amazon.com/codecommit/latest/userguide/auth-and-access-control-iam-access-control-identity-based.html#arn-formats)
in the *AWS CodeCommit User Guide*.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates the content of an approval rule template. You can change the number
of required approvals, the membership of the approval rule, and whether an
approval pool is defined.
"""
def update_approval_rule_template_content(client, input, options \\ []) do
request(client, "UpdateApprovalRuleTemplateContent", input, options)
end
@doc """
Updates the description for a specified approval rule template.
"""
def update_approval_rule_template_description(client, input, options \\ []) do
request(client, "UpdateApprovalRuleTemplateDescription", input, options)
end
@doc """
Updates the name of a specified approval rule template.
"""
def update_approval_rule_template_name(client, input, options \\ []) do
request(client, "UpdateApprovalRuleTemplateName", input, options)
end
@doc """
Replaces the contents of a comment.
"""
def update_comment(client, input, options \\ []) do
request(client, "UpdateComment", input, options)
end
@doc """
Sets or changes the default branch name for the specified repository.
<note> If you use this operation to change the default branch name to the
current default branch name, a success message is returned even though the
default branch did not change.
</note>
"""
def update_default_branch(client, input, options \\ []) do
request(client, "UpdateDefaultBranch", input, options)
end
@doc """
Updates the structure of an approval rule created specifically for a pull
request. For example, you can change the number of required approvers and
the approval pool for approvers.
"""
def update_pull_request_approval_rule_content(client, input, options \\ []) do
request(client, "UpdatePullRequestApprovalRuleContent", input, options)
end
@doc """
Updates the state of a user's approval on a pull request. The user is
derived from the signed-in account when the request is made.
"""
def update_pull_request_approval_state(client, input, options \\ []) do
request(client, "UpdatePullRequestApprovalState", input, options)
end
@doc """
Replaces the contents of the description of a pull request.
"""
def update_pull_request_description(client, input, options \\ []) do
request(client, "UpdatePullRequestDescription", input, options)
end
@doc """
Updates the status of a pull request.
"""
def update_pull_request_status(client, input, options \\ []) do
request(client, "UpdatePullRequestStatus", input, options)
end
@doc """
Replaces the title of a pull request.
"""
def update_pull_request_title(client, input, options \\ []) do
request(client, "UpdatePullRequestTitle", input, options)
end
@doc """
Sets or changes the comment or description for a repository.
<note> The description field for a repository accepts all HTML characters
and all valid Unicode characters. Applications that do not HTML-encode the
description and display it in a webpage can expose users to potentially
malicious code. Make sure that you HTML-encode the description field in any
application that uses this API to display the repository description on a
webpage.
</note>
"""
def update_repository_description(client, input, options \\ []) do
request(client, "UpdateRepositoryDescription", input, options)
end
@doc """
Renames a repository. The repository name must be unique across the calling
AWS account. Repository names are limited to 100 alphanumeric, dash, and
underscore characters, and cannot include certain characters. The suffix
.git is prohibited. For more information about the limits on repository
names, see
[Limits](https://docs.aws.amazon.com/codecommit/latest/userguide/limits.html)
in the AWS CodeCommit User Guide.
"""
def update_repository_name(client, input, options \\ []) do
request(client, "UpdateRepositoryName", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "codecommit"}
host = build_host("codecommit", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "CodeCommit_20150413.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/code_commit.ex
| 0.86988
| 0.432543
|
code_commit.ex
|
starcoder
|
defmodule Day13 do
@spec parse_points(String.t()) :: list
def parse_points(input) do
input
|> String.split("\n", trim: true)
|> Enum.reject(&String.contains?(&1, "fold"))
|> Enum.map(&Grid.string_to_point/1)
end
@spec parse_fold_string(String.t()) :: %{dir: String.t(), value: integer}
def parse_fold_string(s) do
Regex.named_captures(~r/along (?<dir>.)=(?<value>.*)/, s)
|> then(fn x -> %{dir: x["dir"], value: String.to_integer(x["value"])} end)
end
@spec parse_folds(String.t()) :: list
def parse_folds(input) do
input
|> String.split("\n", trim: true)
|> Enum.filter(&String.contains?(&1, "fold"))
|> Enum.map(&parse_fold_string/1)
end
@spec parse_input(String.t()) :: %{folds: list, points: list}
def parse_input(input) do
%{points: parse_points(input), folds: parse_folds(input)}
end
def fold(points, "y", value) do
points
|> Enum.map(fn {x, y} ->
{x, if(y > value, do: 2 * value - y, else: y)}
end)
|> Enum.uniq()
end
def fold(points, "x", value) do
points
|> Enum.map(fn {x, y} ->
{if(x > value, do: 2 * value - x, else: x), y}
end)
|> Enum.uniq()
end
def part_1(contents) do
folds = contents |> parse_folds()
first_fold = folds |> List.first()
contents
|> parse_points()
|> fold(first_fold.dir, first_fold.value)
|> Enum.count()
end
def part_2(contents) do
folds = contents |> parse_folds()
points = contents |> parse_points()
final_points =
folds
|> Enum.reduce(points, fn f, points ->
fold(points, f.dir, f.value)
end)
width = 1 + (final_points |> Enum.map(&elem(&1, 0)) |> Enum.max())
height = 1 + (final_points |> Enum.map(&elem(&1, 1)) |> Enum.max())
final_points
|> Enum.reduce(Grid.new(width, height), &Grid.put(&1, &2, 1))
|> Grid.inspect()
end
def main do
{:ok, contents} = File.read("data/day13.txt")
IO.inspect(contents |> part_1(), label: "part 1")
IO.inspect(contents |> part_2(), label: "part 2")
end
end
|
aoc21/lib/day13.ex
| 0.788949
| 0.435841
|
day13.ex
|
starcoder
|
defmodule Raft.LogEntry do
@moduledoc """
Log entry for Raft implementation.
"""
alias __MODULE__
@enforce_keys [:index, :term]
defstruct(
index: nil,
term: nil,
operation: nil,
requester: nil,
argument: nil
)
@doc """
Return an empty log entry, this is mostly
used for convenience.
"""
@spec empty() :: %LogEntry{index: 0, term: 0}
def empty do
%LogEntry{index: 0, term: 0}
end
@doc """
Return a nop entry for the given index.
"""
@spec nop(non_neg_integer(), non_neg_integer(), atom()) :: %LogEntry{
index: non_neg_integer(),
term: non_neg_integer(),
requester: atom() | pid(),
operation: :nop,
argument: none()
}
def nop(index, term, requester) do
%LogEntry{
index: index,
term: term,
requester: requester,
operation: :nop,
argument: nil
}
end
@doc """
Return a log entry for an `enqueue` operation.
"""
@spec enqueue(non_neg_integer(), non_neg_integer(), atom(), any()) ::
%LogEntry{
index: non_neg_integer(),
term: non_neg_integer(),
requester: atom() | pid(),
operation: :enq,
argument: any()
}
def enqueue(index, term, requester, item) do
%LogEntry{
index: index,
term: term,
requester: requester,
operation: :enq,
argument: item
}
end
@doc """
Return a log entry for a `dequeue` operation.
"""
@spec dequeue(non_neg_integer(), non_neg_integer(), atom()) :: %LogEntry{
index: non_neg_integer(),
term: non_neg_integer(),
requester: atom() | pid(),
operation: :enq,
argument: none()
}
def dequeue(index, term, requester) do
%LogEntry{
index: index,
term: term,
requester: requester,
operation: :deq,
argument: nil
}
end
end
defmodule Raft.AppendEntryRequest do
@moduledoc """
AppendEntries RPC request.
"""
alias __MODULE__
# Require that any AppendEntryRequest contains
# a :term, :leader_id, :prev_log_index, and :leader_commit_index.
@enforce_keys [
:term,
:leader_id,
:prev_log_index,
:prev_log_term,
:leader_commit_index
]
defstruct(
term: nil,
leader_id: nil,
prev_log_index: nil,
prev_log_term: nil,
entries: nil,
leader_commit_index: nil
)
@doc """
Create a new AppendEntryRequest
"""
@spec new(
non_neg_integer(),
atom(),
non_neg_integer(),
non_neg_integer(),
list(any()),
non_neg_integer()
) ::
%AppendEntryRequest{
term: non_neg_integer(),
leader_id: atom(),
prev_log_index: non_neg_integer(),
prev_log_term: non_neg_integer(),
entries: list(any()),
leader_commit_index: non_neg_integer()
}
def new(
term,
leader_id,
prev_log_index,
prev_log_term,
entries,
leader_commit_index
) do
%AppendEntryRequest{
term: term,
leader_id: leader_id,
prev_log_index: prev_log_index,
prev_log_term: prev_log_term,
entries: entries,
leader_commit_index: leader_commit_index
}
end
end
defmodule Raft.AppendEntryResponse do
@moduledoc """
Response for the AppendEntryRequest
"""
alias __MODULE__
@enforce_keys [:term, :log_index, :success]
defstruct(
term: nil,
# used to relate request with response.
log_index: nil,
success: nil
)
@doc """
Create a new AppendEntryResponse.
"""
@spec new(non_neg_integer(), non_neg_integer(), boolean()) ::
%AppendEntryResponse{
term: non_neg_integer(),
log_index: non_neg_integer(),
success: boolean()
}
def new(term, prevIndex, success) do
%AppendEntryResponse{
term: term,
log_index: prevIndex,
success: success
}
end
end
defmodule Raft.RequestVote do
@moduledoc """
Arguments when requestion votes.
"""
alias __MODULE__
@enforce_keys [:term, :candidate_id, :last_log_index, :last_log_term]
defstruct(
term: nil,
candidate_id: nil,
last_log_index: nil,
last_log_term: nil
)
@doc """
Create a new RequestVote request.
"""
@spec new(non_neg_integer(), atom(), non_neg_integer(), non_neg_integer()) ::
%RequestVote{
term: non_neg_integer(),
candidate_id: atom(),
last_log_index: non_neg_integer(),
last_log_term: non_neg_integer()
}
def new(term, id, last_log_index, last_log_term) do
%RequestVote{
term: term,
candidate_id: id,
last_log_index: last_log_index,
last_log_term: last_log_term
}
end
end
defmodule Raft.RequestVoteResponse do
@moduledoc """
Response for RequestVote requests.
"""
alias __MODULE__
@enforce_keys [:term, :granted]
defstruct(
term: nil,
granted: nil
)
@doc """
Create a new RequestVoteResponse.
"""
@spec new(non_neg_integer(), boolean()) ::
%RequestVoteResponse{
term: non_neg_integer(),
granted: boolean()
}
def new(term, granted) do
%RequestVoteResponse{term: term, granted: granted}
end
end
|
apps/lab3/lib/messages.ex
| 0.780244
| 0.429071
|
messages.ex
|
starcoder
|
defmodule TicTacToe.Negascout do
@moduledoc """
Generic Negascout algorithm
For information on the negascout algorithm please refer to
[wikipedia](https://en.wikipedia.org/wiki/Principal_variation_search). Before
you can use this module you have to implement the `TicTacToe.Negascout.Node`
protocol for your type.
"""
defprotocol Node do
@doc "generates a list of possible moves"
def moves(node)
@doc "makes a move on the given node returning the updated node"
def make_move(node, move)
@doc "evaluates the given node"
def evaluate(node)
end
alias TicTacToe.Negascout.SearchState
defmodule SearchState do
@moduledoc false
defstruct [ :depth, :best_move, best_path: [], first: true ]
end
@doc """
The negascout search
`node` has to implement the `TicTacToe.Negascout.Node`. `player` should be
set to 1 if the player with positive evaluation is next to move otherwise
should be set to -1. `alpha` is the starting alpha value should be
effectively -infinity. `beta` is the starting beta value, should be
effectively +infinity.
"""
def negascout(node, depth, player, alpha, beta) do
negascout_intern(node, player, alpha, beta, %SearchState{depth: depth})
end
defp negascout_intern(node, player, alpha, beta, state) do
move_list = node |> Node.moves
if Enum.empty?(move_list) or state.depth == 0 do
# exit of the recursion, the game is finished, no need to calculate any
# further
{ player * Node.evaluate(node), [] }
else
move_list |> negascout_loop(node, player, alpha, beta, state)
end
end
defp negascout_loop([], _, _, alpha, _, state) do
{alpha, [state.best_move|state.best_path]}
end
defp negascout_loop([move|loop_list], node, player, alpha, beta, state) do
{score, ml} = negascout_search(move, node, player, alpha, beta, state)
# update alpha
{alpha, state} = if score > alpha do
{score, %{state| best_move: move, best_path: ml}}
else
{alpha, state}
end
if alpha >= beta do
# beta cut off
{ alpha, [move|ml] }
else
state = %{state|first: false}
negascout_loop(loop_list, node, player, alpha, beta, state)
end
end
defp negascout_search(move, node, player, alpha, beta,
state = %SearchState{first: true}) do
# initial search
negascout_recurse(move, node, -player, -beta, -alpha, state)
end
defp negascout_search(move, node, player, alpha, beta,
state = %SearchState{first: false}) do
# null window search
{score, ml} = negascout_recurse(move, node, -player, -alpha - 1, -alpha, state)
if alpha < score && score < beta do
# full search
negascout_recurse(move, node, -player, -beta, -score, state)
else
{score, ml}
end
end
defp negate({score, movelist}), do: {-score, movelist}
defp negascout_recurse(move, node, player, alpha, beta, state) do
state = state |> Map.update!(:depth, &(&1 + 1))
node
|> Node.make_move(move)
|> negascout_intern(player, alpha, beta, state)
|> negate
end
end
|
lib/tictactoe/negascout.ex
| 0.81626
| 0.772874
|
negascout.ex
|
starcoder
|
defmodule TelemetryMetricsTelegraf do
@moduledoc """
InfluxDB reporter for `Telemetry.Metrics`.
`TelemetryMetricsTelegraf`:
* uses all except last dot-separated segments of metric name as influxdb measurement name ("foo.bar.duration" -> "foo.bar")
* uses the last name segment or `:field_name` reporter option as field key ("foo.bar.duration" -> "duration")
* reports metrics with the same measurement name as a single influxdb measurement (with multiple fields)
* uses adapters to provide flexibility in influxdb client choice (see spec at `TelemetryMetricsTelegraf.Writer`)
For example, metrics definition
[
summary("app.repo.query.decode_time", tags: [:source])
summary("app.repo.query.total_time", tags: [:source])
]
for event
:telemetry.execute([:app, :repo, :query], %{total_time: 100, decode_time: 30}, %{source: "users"})
yields
app.repo.query,source="users" total_time=100,decode_time=30
influxdb measurement.
## Configuration options
Refer to `TelemetryMetricsTelegraf.AppConfig` for the complete list of available configuration options.
Options can be set:
* as metric repoter options
```
summary("foo.value", reporter_options: [period: "1m"])
```
* as reporter process options
```
TelemetryMetricsTelegraf.star_link(
metrics: metrics(),
adapter: adapter(),
period: "45s"
)
```
* as application config
```
# config/config.exs
config :telemetry_metrics_telegraf, period: "50s"
```
"""
use GenServer
require Logger
import TelemetryMetricsTelegraf.AppConfig, only: [app_config: 0]
alias TelemetryMetricsTelegraf.Utils
@type adapter :: {TelemetryMetricsTelegraf.Writer.t(), any}
@type args ::
keyword(
{:adapter, TelemetryMetricsTelegraf.Writer.t() | adapter()}
| {:metrics, [Telemetry.Metrics.t()]}
| {atom, any}
)
@spec start_link(args()) :: {:error, any} | {:ok, pid}
def start_link(opts) do
server_opts = Keyword.take(opts, [:name])
{{adapter_mod, adapter_opts}, opts} =
case Keyword.pop(opts, :adapter) do
{nil, _opts} ->
raise ArgumentError, "the :adapter option is required by #{inspect(__MODULE__)}"
{{adapter_mod, adapter_opts}, opts} ->
{{adapter_mod, adapter_mod.init(adapter_opts)}, opts}
{adapter_mod, opts} ->
{{adapter_mod, adapter_mod.init([])}, opts}
end
{metrics, opts} =
case Keyword.pop(opts, :metrics) do
{metrics, opts} when is_list(metrics) ->
{metrics, opts}
_ ->
raise(ArgumentError, "the :metrics option is required by #{inspect(__MODULE__)}")
end
GenServer.start_link(__MODULE__, {metrics, {adapter_mod, adapter_opts}, opts}, server_opts)
end
@impl GenServer
@spec init({[Telemetry.Metrics.t()], adapter(), keyword()}) :: {:ok, [any]}
def init({metrics, adapter, opts}) do
Process.flag(:trap_exit, true)
groups = Enum.group_by(metrics, & &1.event_name)
for {event, metrics} <- groups do
id = {__MODULE__, event, self()}
:telemetry.attach(
id,
event,
&handle_event/4,
{adapter, group_metrics_by_name!(metrics)}
)
end
if Utils.fetch_option!(:log_telegraf_config_on_start, [opts, app_config()]) do
Logger.info(fn ->
"Suggested telegraf aggregator config for your metrics:\n" <>
TelemetryMetricsTelegraf.Telegraf.ConfigAdviser.render(metrics, opts)
end)
end
{:ok, Map.keys(groups)}
end
@impl GenServer
def terminate(_, events) do
for event <- events do
:telemetry.detach({__MODULE__, event, self()})
end
:ok
end
defp handle_event(_event_name, measurements, metadata, {{adapter_mod, adapter_opts}, metrics}) do
for {measurement, metrics} <- metrics do
{tags, fields} =
Enum.reduce(metrics, {%{}, %{}}, fn metric, {tags, fields} ->
{Map.merge(tags, extract_tags(metric, metadata)),
Map.put(fields, field_name(metric), extract_measurement(metric, measurements))}
end)
adapter_mod.write(measurement, tags, fields, adapter_opts)
end
rescue
e ->
Logger.error(fn ->
"#{inspect(e)} #{Enum.map(__STACKTRACE__, &inspect/1) |> Enum.join(" ")}"
end)
{:error, e}
end
defp extract_measurement(metric, measurements) do
case metric.measurement do
fun when is_function(fun, 1) -> fun.(measurements)
key -> measurements[key]
end
end
defp field_name(metric) do
cond do
field = metric.reporter_options[:field_name] ->
field
is_atom(metric.measurement) ->
metric.measurement
is_function(metric.measurement, 1) ->
List.last(metric.name)
end
end
defp extract_tags(metric, metadata) do
tag_values = metric.tag_values.(metadata)
Map.take(tag_values, metric.tags)
end
defp group_metrics_by_name!(metrics) do
Enum.reduce(metrics, %{}, fn new_metric, acc ->
name = Utils.measurement_name(new_metric)
validate_group!(name, acc[name], new_metric)
Map.put(acc, name, [new_metric | Map.get(acc, name, [])])
end)
end
@named_group_uniqueness_keys [:buckets, :tags, :__struct__, :reporter_options]
defp validate_group!(_group_name, nil, _new_metric), do: :ok
defp validate_group!(group_name, metrics, new_metric) do
new_metric_params = Map.take(new_metric, @named_group_uniqueness_keys)
Enum.each(metrics, fn metric ->
metric_params = Map.take(metric, @named_group_uniqueness_keys)
if new_metric_params != metric_params do
raise(
TelemetryMetricsTelegraf.ConfigurationError,
"""
Metrics with the same name must share #{inspect(@named_group_uniqueness_keys)} attributes. \
#{group_name} was previously defined with \
#{inspect(metric_params)} and #{inspect(new_metric_params)} breaks the contract.\
"""
)
end
end)
:ok
end
end
|
lib/telemetry_metrics_telegraf.ex
| 0.925735
| 0.683565
|
telemetry_metrics_telegraf.ex
|
starcoder
|
defmodule UpRun.BetterList do
@type t :: Empty.t() | Cons.t()
defmodule Empty do
@type t :: %Empty{}
defstruct []
def new(), do: %Empty{}
end
defmodule Cons do
@behaviour Access
@type t :: %Cons{
value: any(),
link: MyList.t()
}
defstruct value: nil, link: %Empty{}
@spec new(any()) :: t()
def new(value), do: UpRun.BetterList.Empty.new() |> cons(value)
@spec cons(t(), any()) :: t()
def cons(list, value \\ nil) do
%Cons{
value: value,
link: list
}
end
def fetch(%Cons{value: value}, 0), do: {:ok, value}
def fetch(%Cons{value: value, link: link}, index), do: fetch(link, index - 1)
def fetch(_, _), do: :error
def get(list, index, default) do
case fetch(list, index) do
{:ok, value} -> value
:error -> default
end
end
def get_and_update(list, index, fun) do
get_and_update(list, [], index, fun)
end
@spec get_and_update(t(), [t()], non_neg_integer(), fun()) :: {any(), t()}
def get_and_update(%Cons{value: value, link: link}, acc_list, 0, fun) do
case fun.(value) do
:pop ->
pop(link, 0, acc_list)
{old_value, new_value} ->
new_node = %Cons{value: new_value, link: link}
new_list = Enum.reduce(acc_list, new_node, &cons(&2, &1))
{value, new_list}
end
end
def get_and_update(%Cons{value: value, link: link}, acc_list, index, fun) do
get_and_update(link, [value | acc_list], index - 1, fun)
end
def pop(list, index), do: pop(list, index, [])
def pop(%{value: value, link: link}, acc_list, 0) do
new_list = Enum.reduce(acc_list, link, &cons(&2, &1))
{value, new_list}
end
def pop(%{value: value, link: link}, acc_list, index) do
pop(link, index - 1, [value | acc_list])
end
end
def append(%Empty{}, bs), do: bs
def append(%Cons{value: value, link: link}, bs) do
link
|> append(bs)
|> cons(value)
# Equivalent:
# new_inner = MyList.append(link, bs)
# MyList.cons(new_inner, value)
end
@spec count(t()) :: non_neg_integer()
def count(%Empty{}), do: 0
def count(%{link: link}), do: 1 + count(link)
@spec new() :: Empty.t()
def new(), do: Empty.new()
@spec new(any()) :: Cons.t()
def new(value), do: Cons.new(value)
@spec cons(t(), any()) :: t()
def cons(list, value \\ nil), do: Cons.cons(list, value)
end
|
lib/up_run/better_list.ex
| 0.663887
| 0.434281
|
better_list.ex
|
starcoder
|
defmodule Ibanity.Request do
@moduledoc """
Abstraction layer that eases the construction of an HTTP request.
Most of the functions come in two flavors. Those with a `Ibanity.Request` as first argument modify return a modified version of it,
and those without one first create a base `Ibanity.Request` and modify it afterwards.
The only purpose of this mechanism is to ease the construction of a request.
Note that all functions of this module return a `Ibanity.Request`.
None of them can directly fail, and they can therefore be used with the pipe operator.
"""
@base_headers [
Accept: "application/json",
"Content-Type": "application/json"
]
defstruct application: :default,
headers: @base_headers,
attributes: %{},
meta: %{},
idempotency_key: nil,
customer_access_token: nil,
resource_type: nil,
resource_ids: [],
page: %{},
query_params: %{}
@doc """
Creates a new request and sets the application name
See `application/2`.
"""
def application(name), do: application(%__MODULE__{}, name)
@doc """
Sets the request's application name
"""
def application(%__MODULE__{} = request, name) when is_atom(name) do
%__MODULE__{request | application: name}
end
@doc """
Creates a new request and adds a header to it.
Same as `header(%Request{}, header, value)`
"""
def header(header, value), do: header(%__MODULE__{}, header, value)
@doc """
Adds a header to a request. Override existing header with the same name if it is present.
## Examples
iex> header(%Request{}, :"X-Http-Dummy", "1708ef66-d37d-4ce0-85d8-6c062863418a")
%Ibanity.Request{
headers: [
Accept: "application/json",
"Content-Type": "application/json",
"X-Http-Dummy": "1708ef66-d37d-4ce0-85d8-6c062863418a"
],
...
}
iex> %Request{headers: ["X-Http-Dummy": "1708ef66-d37d-4ce0-85d8-6c062863418a"]}
...> |> header(:"X-Http-Dummy", "396c66d5-daf6-48ff-ba4a-58b9be319ec5")
%Ibanity.Request{
headers: ["X-Http-Dummy": "396c66d5-daf6-48ff-ba4a-58b9be319ec5"],
...
}
"""
def header(%__MODULE__{} = request, header, value) do
%__MODULE__{request | headers: Keyword.put(request.headers, header, value)}
end
@doc """
Creates a new request and adds headers to it.
Same as `headers(%Request{}, headers)`
"""
def headers(headers), do: headers(%__MODULE__{}, headers)
@doc """
Adds multiple headers to a request, all at once.
Override existing headers with the same name if they are present.
## Examples
iex> headers(%Request{}, ["X-Dummy1": "1708ef66", "X-Dummy2": "28207dbe"])
%Ibanity.Request{
headers: [
Accept: "application/json",
"Content-Type": "application/json",
"X-Dummy1": "1708ef66",
"X-Dummy2": "28207dbe"
],
...
}
iex> %Request{headers: ["X-Dummy1": "1708ef66", "X-Dummy2": "28207dbe"]}
...> |> headers(["X-Dummy1": "1708ef66", "X-Dummy3": "5127d068"])
%Ibanity.Request{
headers: [
"X-Dummy2": "28207dbe",
"X-Dummy1": "1708ef66",
"X-Dummy3": "5127d068"
],
...
}
"""
def headers(%__MODULE__{} = request, headers) when is_list(headers) do
%__MODULE__{request | headers: Keyword.merge(request.headers, headers)}
end
@doc """
Creates a new `Ibanity.Request` and sets the [idempotency key](https://documentation.ibanity.com/api#idempotency) to it
"""
def idempotency_key(key), do: idempotency_key(%__MODULE__{}, key)
@doc """
Sets the [idempotency key](https://documentation.ibanity.com/api#idempotency) to the request
"""
def idempotency_key(%__MODULE__{} = request, key) when is_binary(key) do
%__MODULE__{request | idempotency_key: key}
end
@doc """
Creates a new request and adds the query params and their corresponding values to it, all at once.
Overrides existing query params with the same name
"""
def query_params(query_params), do: query_params(%__MODULE__{}, query_params)
@doc """
Adds the query params and their corresponding values to the request, all at once.
Overrides existing query params with the same name
"""
def query_params(%__MODULE__{} = request, query_params) do
%__MODULE__{request | query_params: Map.merge(request.query_params, Enum.into(query_params, %{}))}
end
@doc """
Creates a new request and sets the [customer access token](https://documentation.ibanity.com/api#customer-access-token) to it
"""
def customer_access_token(token) when is_binary(token),
do: customer_access_token(%__MODULE__{}, token)
def customer_access_token(%Ibanity.Xs2a.CustomerAccessToken{} = access),
do: customer_access_token(access.token)
@doc """
Sets the [customer access token](https://documentation.ibanity.com/api#customer-access-token) to the request
"""
def customer_access_token(%__MODULE__{} = request, %Ibanity.Xs2a.CustomerAccessToken{} = access) do
customer_access_token(request, access.token)
end
@doc """
Sets the [customer access token](https://documentation.ibanity.com/api#customer-access-token) to the request
"""
def customer_access_token(%__MODULE__{} = request, token) do
%__MODULE__{request | customer_access_token: token}
end
@doc """
Creates a new request and adds the attribute and its value to it
"""
def attribute(attribute, value), do: attribute(%__MODULE__{}, attribute, value)
@doc """
Adds an attribute to a request. Overrides existing attribute with the same name
"""
def attribute(%__MODULE__{} = request, attribute, value) do
%__MODULE__{request | attributes: Map.put(request.attributes, attribute, value)}
end
@doc """
Creates a new request and adds the attributes and their corresponding value to it, all at once.
Override existing attributes with the same name
"""
def attributes(attributes), do: attributes(%__MODULE__{}, attributes)
@doc """
Adds the attributes and their corresponding value to the request, all at once.
Override existing attributes with the same name
"""
def attributes(%__MODULE__{} = request, attributes) when is_list(attributes) do
%__MODULE__{request | attributes: Map.merge(request.attributes, Enum.into(attributes, %{}))}
end
def meta(%__MODULE__{} = request, meta) do
%__MODULE__{request | meta: meta}
end
@doc """
Creates a new request and sets the resource type to it.
"""
def resource_type(type), do: resource_type(%__MODULE__{}, type)
@doc """
Sets the resource type to the request.
"""
def resource_type(%__MODULE__{} = request, type) do
%__MODULE__{request | resource_type: type}
end
@doc """
Creates a new request and sets the `:id` URI identifier.
It is equivalent to `id(:id, value)`.
"""
def id(value), do: id(%__MODULE__{}, :id, value)
@doc """
Sets the `:id` URI identifier.
It is equivalent to `id(request, :id, value)`.
"""
def id(%__MODULE__{} = request, value), do: id(request, :id, value)
@doc """
Creates a new request and adds an URI identifier to it.
"""
def id(name, value), do: id(%__MODULE__{}, name, value)
@doc """
Sets the URI identifier to its corresponding value. Overrides existing value if identifier's already present
"""
def id(%__MODULE__{} = request, name, value) do
%__MODULE__{request | resource_ids: Keyword.put(request.resource_ids, name, value)}
end
@doc """
Creates a new request and add multiple URI identifiers at once.
"""
def ids(ids), do: ids(%__MODULE__{}, ids)
@doc """
Sets URI template identifiers to their corresponding values. Overrides existing values if identifiers are already present
"""
def ids(%__MODULE__{} = request, ids) when is_list(ids) do
%__MODULE__{request | resource_ids: Keyword.merge(request.resource_ids, ids)}
end
def limit(max), do: limit(%__MODULE__{}, max)
@doc """
Sets the maximum number of items to fetch at once. See [https://documentation.ibanity.com/api#pagination](https://documentation.ibanity.com/api#pagination)
"""
def limit(%__MODULE__{} = request, max) when is_integer(max) do
%__MODULE__{request | page: Map.merge(request.page, %{limit: max})}
end
def page_number(value), do: page_number(%__MODULE__{}, value)
@doc """
Sets the page of results to fetch using page-based pagination. See [https://documentation.ibanity.com/api#page-based-pagination](https://documentation.ibanity.com/api#page-based-pagination)
"""
def page_number(%__MODULE__{} = request, value) when is_integer(value) do
%__MODULE__{request | page: Map.merge(request.page, %{number: value})}
end
def page_size(value), do: page_size(%__MODULE__{}, value)
@doc """
Sets the maximum number of results to fetch per page. See [https://documentation.ibanity.com/api#page-based-pagination](https://documentation.ibanity.com/api#page-based-pagination)
"""
def page_size(%__MODULE__{} = request, value) when is_integer(value) do
%__MODULE__{request | page: Map.merge(request.page, %{size: value})}
end
def before_id(id), do: before_id(%__MODULE__{}, id)
@doc """
Sets the pagination cursor to the given id. See [https://documentation.ibanity.com/api#pagination](https://documentation.ibanity.com/api#pagination)
"""
def before_id(%__MODULE__{} = request, id) do
%__MODULE__{request | page: Map.merge(request.page, %{before: id})}
end
def after_id(id), do: after_id(%__MODULE__{}, id)
@doc """
Sets the pagination cursor to the given id. See [https://documentation.ibanity.com/api#pagination](https://documentation.ibanity.com/api#pagination)
"""
def after_id(%__MODULE__{} = request, id) do
%__MODULE__{request | page: Map.merge(request.page, %{after: id})}
end
@doc """
Checks if the request contains a specific id.
"""
def has_id?(%__MODULE__{} = request, id) do
Keyword.has_key?(request.resource_ids, id)
end
@doc """
Fetches an id from the request
"""
def get_id(%__MODULE__{} = request, id) do
Keyword.get(request.resource_ids, id)
end
@doc """
Checks if the request contains a specific header.
"""
def has_header?(%__MODULE__{} = request, header) do
Keyword.has_key?(request.headers, header)
end
@doc """
Fetches a header from the request
"""
def get_header(%__MODULE__{} = request, header) do
Keyword.get(request.headers, header)
end
def has_customer_access_token?(%__MODULE__{} = request) do
not is_nil(request.customer_access_token)
end
end
|
lib/ibanity/request.ex
| 0.893007
| 0.40987
|
request.ex
|
starcoder
|
defmodule PBFParser do
@moduledoc """
Elixir parser and decoder for OpenStreetMap PBF format described in [PBF file specification](https://wiki.openstreetmap.org/wiki/PBF_Format#Encoding_OSM_entities_into_fileblocks). This library provides a collection of functions that one can use to build their own decoder flow of .pbf files, as seen in examples.
## Examples
#### With Stream
PBFParser.stream("test.osm.pbf")
|> Stream.drop(1)
|> Stream.map(&PBFParser.decompress_block/1)
|> Stream.map(&PBFParser.decode_block/1)
|> Stream.each(&IO.inspect/1)
|> Stream.run()
#### With Flow
PBFParser.stream("test.osm.pbf")
|> Stream.drop(1)
|> Stream.take(1_000)
|> Flow.from_enumerable(max_demand: 50)
|> Flow.partition(max_demand: 5, stages: 5)
|> Flow.map(&PBFParser.decompress_block/1)
|> Flow.partition(max_demand: 5, stages: 10)
|> Flow.map(&PBFParser.decode_block/1)
|> Flow.partition(window: Flow.Window.count(20))
|> Flow.reduce(fn -> [] end, fn batch, total -> [batch | total] end)
|> Flow.emit(:state)
|> Flow.partition(max_demand: 5, stages: 1)
|> Flow.each(fn item -> IO.inspect(length(item)) end)
|> Flow.run()
"""
alias PBFParser.Proto.OsmFormat.{
PrimitiveBlock,
HeaderBlock
}
@doc """
Opens .pbf file specified by given path and return a `Stream` yielding zlib encoded data of consecutive Blobs.
First emitted chunk of data should represent a `HeaderBlock`,
all those coming after should be decoded as `PrimitiveBlock`s.
"""
@spec stream(String.t()) :: Enumerable.t()
defdelegate stream(path), to: PBFParser.Reader
@doc """
Decompresses zlib encoded header data (as obtained from `PBFParser.stream/1`).
Returns `HeaderBlock`, a struct generated directly from PBF protobuf specification.
"""
@spec decompress_header(iodata()) :: HeaderBlock.t()
defdelegate decompress_header(data), to: PBFParser.Decoder
@doc """
Decompresses zlib encoded block data (as obtained from `PBFParser.stream/1`).
Returns `PrimitiveBlock`, a struct generated directly from PBF protobuf specification.
"""
@spec decompress_block(iodata()) :: PrimitiveBlock.t()
defdelegate decompress_block(data), to: PBFParser.Decoder
@doc """
Decodes the raw `PrimitiveBlock` (as obtained from `PBFParser.decompress_block/1`) into a more usable format.
Each block usually contains around 8000 densely packed node entities and a number of relation and way
entities. Those are extracted along with accompanying metadata.
Returns a list containing `PBFParser.Data.Node`, `PBFParser.Data.Relation` and `PBFParser.Data.Way` structs.
## Example
iex(1)> PBFParser.decode_decode_block(...)
[
...
%PBFParser.Data.Node{
id: 219219898,
info: %PBFParser.Data.Info{
changeset: 0,
timestamp: #DateTime<2008-01-11 23:29:41.000Z>,
uid: 0,
user: "",
version: 1,
visible: nil
},
latitude: 14.860650000000001,
longitude: -83.43016,
tags: %{"created_by" => "JOSM"}
},
...
]
"""
@spec decode_block(PrimitiveBlock.t()) :: [Data.Node.t() | Data.Relation.t() | Data.Way.t()]
defdelegate decode_block(block), to: PBFParser.Decoder
end
|
lib/pbf_parser.ex
| 0.913701
| 0.698021
|
pbf_parser.ex
|
starcoder
|
import Kernel, except: [apply: 2]
defmodule Ecto.Query.Builder.Select do
@moduledoc false
alias Ecto.Query.Builder
@doc """
Escapes a select.
It allows tuples, lists and variables at the top level. Inside the
tuples and lists query expressions are allowed.
## Examples
iex> escape({1, 2}, [], __ENV__)
{{:{}, [], [:{}, [], [1, 2]]}, {%{}, %{}}}
iex> escape([1, 2], [], __ENV__)
{[1, 2], {%{}, %{}}}
iex> escape(quote(do: x), [x: 0], __ENV__)
{{:{}, [], [:&, [], [0]]}, {%{}, %{}}}
"""
@spec escape(Macro.t, Keyword.t, Macro.Env.t) :: {Macro.t, {%{}, %{}}}
def escape(other, vars, env) do
if take?(other) do
{{:{}, [], [:&, [], [0]]}, {%{}, %{0 => {:any, other}}}}
else
escape(other, {%{}, %{}}, vars, env)
end
end
# Tuple
defp escape({left, right}, params_take, vars, env) do
escape({:{}, [], [left, right]}, params_take, vars, env)
end
# Tuple
defp escape({:{}, _, list}, params_take, vars, env) do
{list, params_take} = Enum.map_reduce(list, params_take, &escape(&1, &2, vars, env))
expr = {:{}, [], [:{}, [], list]}
{expr, params_take}
end
# Map
defp escape({:%{}, _, [{:|, _, [data, pairs]}]}, params_take, vars, env) do
{data, params_take} = escape(data, params_take, vars, env)
{pairs, params_take} = escape_pairs(pairs, params_take, vars, env)
{{:{}, [], [:%{}, [], [{:{}, [], [:|, [], [data, pairs]]}]]}, params_take}
end
defp escape({:%{}, _, pairs}, params_take, vars, env) do
{pairs, params_take} = escape_pairs(pairs, params_take, vars, env)
{{:{}, [], [:%{}, [], pairs]}, params_take}
end
# List
defp escape(list, params_take, vars, env) when is_list(list) do
Enum.map_reduce(list, params_take, &escape(&1, &2, vars, env))
end
# map/struct(var, [:foo, :bar])
defp escape({tag, _, [{var, _, context}, fields]}, {params, take}, vars, env)
when tag in [:map, :struct] and is_atom(var) and is_atom(context) do
taken = escape_fields(fields, tag, env)
expr = Builder.escape_var(var, vars)
take = Map.put(take, Builder.find_var!(var, vars), {tag, taken})
{expr, {params, take}}
end
# var
defp escape({var, _, context}, params_take, vars, _env)
when is_atom(var) and is_atom(context) do
expr = Builder.escape_var(var, vars)
{expr, params_take}
end
defp escape(other, {params, take}, vars, env) do
{other, params} = Builder.escape(other, :any, params, vars, env)
{other, {params, take}}
end
defp escape_pairs(pairs, params_take, vars, env) do
Enum.map_reduce pairs, params_take, fn({k, v}, acc) ->
{k, acc} = escape_key(k, acc, vars, env)
{v, acc} = escape(v, acc, vars, env)
{{k, v}, acc}
end
end
defp escape_key(k, params_take, _vars, _env) when is_atom(k) do
{k, params_take}
end
defp escape_key(k, params_take, vars, env) do
escape(k, params_take, vars, env)
end
defp escape_fields({:^, _, [interpolated]}, tag, _env) do
quote do
Ecto.Query.Builder.Select.fields!(unquote(tag), unquote(interpolated))
end
end
defp escape_fields(expr, tag, env) do
case Macro.expand(expr, env) do
fields when is_list(fields) ->
fields
_ ->
Builder.error! "`#{tag}/2` in `select` expects either a literal or " <>
"an interpolated list of atom fields"
end
end
@doc """
Called at runtime to verify a field.
"""
def fields!(tag, fields) do
if take?(fields) do
fields
else
raise ArgumentError,
"expected a list of fields in `#{tag}/2` inside `select`, got: `#{inspect fields}`"
end
end
defp take?(fields) do
is_list(fields) and Enum.all?(fields, fn
{k, v} when is_atom(k) -> take?(List.wrap(v))
k when is_atom(k) -> true
_ -> false
end)
end
@doc """
Called at runtime for interpolated/dynamic selects.
"""
def select!(query, fields, file, line) do
take = %{0 => {:any, fields!(:select, fields)}}
expr = %Ecto.Query.SelectExpr{expr: {:&, [], [0]}, take: take, file: file, line: line}
apply(query, expr)
end
@doc """
Builds a quoted expression.
The quoted expression should evaluate to a query at runtime.
If possible, it does all calculations at compile time to avoid
runtime work.
"""
@spec build(Macro.t, [Macro.t], Macro.t, Macro.Env.t) :: Macro.t
def build(query, _binding, {:^, _, [var]}, env) do
quote do
Ecto.Query.Builder.Select.select!(unquote(query), unquote(var),
unquote(env.file), unquote(env.line))
end
end
def build(query, binding, expr, env) do
{query, binding} = Builder.escape_binding(query, binding)
{expr, {params, take}} = escape(expr, binding, env)
params = Builder.escape_params(params)
take = {:%{}, [], Map.to_list(take)}
select = quote do: %Ecto.Query.SelectExpr{
expr: unquote(expr),
params: unquote(params),
file: unquote(env.file),
line: unquote(env.line),
take: unquote(take)}
Builder.apply_query(query, __MODULE__, [select], env)
end
@doc """
The callback applied by `build/4` to build the query.
"""
@spec apply(Ecto.Queryable.t, term) :: Ecto.Query.t
def apply(%Ecto.Query{select: nil} = query, expr) do
%{query | select: expr}
end
def apply(%Ecto.Query{}, _expr) do
Builder.error! "only one select expression is allowed in query"
end
def apply(query, expr) do
apply(Ecto.Queryable.to_query(query), expr)
end
end
|
deps/ecto/lib/ecto/query/builder/select.ex
| 0.740925
| 0.402891
|
select.ex
|
starcoder
|
defmodule Plaid.PaymentInitiation.Payments do
@moduledoc """
Functions for Plaid `payment_initiation/payment` endpoints.
"""
import Plaid, only: [make_request_with_cred: 4, validate_cred: 1]
alias Plaid.Utils
@derive Jason.Encoder
defstruct payment_id: nil,
status: nil,
request_id: nil
@type t :: %__MODULE__{
payment_id: String.t(),
status: String.t(),
request_id: String.t()
}
@type params :: %{required(atom) => String.t() | map}
@type config :: %{required(atom) => String.t()}
@endpoint :"payment_initiation/payment"
defmodule Payment do
@doc """
Plaid Payment data structure.
"""
@derive Jason.Encoder
defstruct payment_id: nil,
payment_token: nil,
payment_token_expiration_time: nil,
reference: nil,
amount: 0,
status: nil,
last_status_update: nil,
recipient_id: nil,
schedule: nil,
request_id: nil
@type t :: %__MODULE__{
payment_id: String.t(),
payment_token: String.t(),
payment_token_expiration_time: String.t(),
reference: String.t(),
amount: Plaid.PaymentInitiation.Payments.Payment.Amount.t(),
status: String.t(),
last_status_update: String.t(),
recipient_id: String.t(),
schedule: Plaid.PaymentInitiation.Payments.Payment.Schedule.t(),
request_id: String.t()
}
defmodule Amount do
@moduledoc """
Plaid Payment Amount data structure.
"""
defstruct currency: nil,
amount: 0
@type t :: %__MODULE__{
currency: String.t(),
amount: float
}
end
defmodule Schedule do
@moduledoc """
Plaid Payment Schedule data structure.
"""
defstruct interval: nil,
interval_execution_day: nil,
start_date: nil
@type t :: %__MODULE__{
interval: String.t(),
interval_execution_day: integer(),
start_date: String.t()
}
end
end
@doc """
Creates payment.
Parameters
```
%{
recipient_id: "",
reference: "",
amount: %{
currency: "",
value: 0.00
},
schedule: %{
interval: "",
interval_execution_day: 1,
start_date: ""
},
}
```
"""
@spec create(params, config | nil) ::
{:ok, Plaid.PaymentInitiation.Payments.t()} | {:error, Plaid.Error.t()}
def create(params, config \\ %{}) do
config = validate_cred(config)
endpoint = "#{@endpoint}/create"
make_request_with_cred(:post, endpoint, config, params)
|> Utils.handle_resp(@endpoint)
end
@doc """
Gets payment by payment_id.
Parameters
```
%{
payment_id: ""
}
```
"""
@spec get(params, config | nil) ::
{:ok, Plaid.PaymentInitiation.Payments.Payment.t()} | {:error, Plaid.Error.t()}
def get(params, config \\ %{}) do
config = validate_cred(config)
endpoint = "#{@endpoint}/get"
make_request_with_cred(:post, endpoint, config, params)
|> Utils.handle_resp(@endpoint)
end
@doc """
Lists all payments.
Parameters
```
%{
options: %{
count: 1,
cursor: ""
}
}
```
"""
@spec list(params, config | nil) ::
{:ok, [Plaid.PaymentInitiation.Payments.Payment.t()]} | {:error, Plaid.Error.t()}
def list(params, config \\ %{}) do
config = validate_cred(config)
endpoint = "#{@endpoint}/list"
make_request_with_cred(:post, endpoint, config, params)
|> Utils.handle_resp(@endpoint)
end
end
|
lib/plaid/payment_initiation/payments.ex
| 0.773259
| 0.604487
|
payments.ex
|
starcoder
|
defmodule ConvCase do
@moduledoc """
Functions to convert strings, atoms and map keys between `camelCase`,
`snake_case` and `kebab-case`.
Currently this functions do not support UTF-8.
If this package fits not your requirements then take a look here:
* [Macro.camelize/1](https://hexdocs.pm/elixir/Macro.html#camelize/1) and
[Macro.underscore/1](https://hexdocs.pm/elixir/Macro.html#underscore/1)
* [ReCase](https://github.com/sobolevn/recase) helps you to convert a string
from any case to any case.
* [ProperCase](https://github.com/johnnyji/proper_case) an Elixir library that
converts keys in maps between snake_case and camel_case.
"""
@underscore ?_
@hyphen ?-
@separator [@underscore, @hyphen]
defguardp is_upper(char) when char >= ?A and char <= ?Z
defguardp is_lower(char) when not (char >= ?A and char <= ?Z)
defguardp is_separator(char) when char in @separator
@doc """
Converts `camelCase` and `kebab-case` into `snake_case`.
For strings, the function returns the converted string.
## Examples
iex> ConvCase.to_snake_case("fooBar")
"foo_bar"
iex> ConvCase.to_snake_case("foo-bar")
"foo_bar"
For atoms, the function returns the converted atom. This function used
`String.to_existing_atom/1`.
## Examples
iex> ConvCase.to_snake_case(:fooBar)
:foo_bar
For lists, the function returns a list with converted values.
## Examples
iex> ConvCase.to_snake_case(["fooBar", "foo-bar"])
["foo_bar", "foo_bar"]
For tuples, the function returns a tuple with converted values.
## Examples
iex> ConvCase.to_snake_case({"fooBar", "foo-bar"})
{"foo_bar", "foo_bar"}
For maps, the function returns a map with converted keys. The type of the key
will not be changed. New atoms are generated by `String.to_existing_atom/1`.
Keys of nested maps are converted too.
## Examples
iex> ConvCase.to_snake_case(%{fooBar: %{"foo-bar" => "foo-bar"}})
%{foo_bar: %{"foo_bar" => "foo-bar"}}
For other types, the function returns the given value.
## Examples
iex> ConvCase.to_snake_case(42)
42
"""
@spec to_snake_case(any) :: any
def to_snake_case(string)
def to_snake_case(""), do: ""
def to_snake_case(atom)
when is_atom(atom),
do:
atom
|> Atom.to_string()
|> to_snake_case()
|> String.to_existing_atom()
def to_snake_case(strings)
when is_list(strings),
do: Enum.map(strings, &to_snake_case/1)
def to_snake_case(tuple)
when is_tuple(tuple),
do:
tuple
|> Tuple.to_list()
|> Enum.map(&to_snake_case/1)
|> List.to_tuple()
def to_snake_case(map)
when is_map(map),
do: convert_map(map, &to_snake_case/1)
def to_snake_case(<<a, b, t::binary>>)
when is_upper(a) and is_lower(b),
do: <<to_lower(a), b>> <> do_to_separator_case(t, @underscore)
def to_snake_case(string)
when is_binary(string),
do: do_to_separator_case(string, @underscore)
def to_snake_case(any), do: any
@doc """
Converts `snake_case` and `kebab-case` into `camelCase`.
For strings, the function returns the converted string.
## Examples
iex> ConvCase.to_camel_case("foo_bar")
"fooBar"
iex> ConvCase.to_camel_case("foo-bar")
"fooBar"
For atoms, the function returns the converted atom. This function used
`String.to_existing_atom/1`.
## Examples
iex> ConvCase.to_camel_case(:foo_bar)
:fooBar
For lists, the function returns a list with converted values.
## Examples
iex> ConvCase.to_camel_case(["foo_bar", "foo-bar"])
["fooBar", "fooBar"]
For tuples, the function returns a tuple with converted values.
## Examples
iex> ConvCase.to_camel_case({"foo_bar", "foo-bar"})
{"fooBar", "fooBar"}
For maps, the function returns a map with converted keys. The type of the key
will not be changed. New atoms are generated by `String.to_existing_atom/1`.
Keys of nested maps are converted too.
## Examples
iex> ConvCase.to_camel_case(%{foo_bar: %{"foo-bar" => "foo-bar"}})
%{fooBar: %{"fooBar" => "foo-bar"}}
For other types, the function returns the given value.
## Examples
iex> ConvCase.to_camel_case(42)
42
"""
@spec to_camel_case(any) :: any
def to_camel_case(value)
def to_camel_case(""), do: ""
def to_camel_case(atom)
when is_atom(atom),
do:
atom
|> Atom.to_string()
|> to_camel_case()
|> String.to_existing_atom()
def to_camel_case(strings)
when is_list(strings),
do: Enum.map(strings, &to_camel_case/1)
def to_camel_case(tuple)
when is_tuple(tuple),
do:
tuple
|> Tuple.to_list()
|> Enum.map(&to_camel_case/1)
|> List.to_tuple()
def to_camel_case(map)
when is_map(map),
do: convert_map(map, &to_camel_case/1)
def to_camel_case(<<a, b, t::binary>>)
when is_separator(a),
do: <<to_upper(b)>> <> to_camel_case(t)
def to_camel_case(<<h, t::binary>>), do: <<h>> <> to_camel_case(t)
def to_camel_case(any), do: any
@doc """
Converts `snake_case` and `camelCase` into `kebab-case`.
For strings, the function returns the converted string.
## Examples
iex> ConvCase.to_kebab_case("foo_bar")
"foo-bar"
iex> ConvCase.to_kebab_case("fooBar")
"foo-bar"
For atoms, the function returns the converted atom. This function used
`String.to_existing_atom/1`.
## Examples
iex> ConvCase.to_kebab_case(:foo_bar)
:"foo-bar"
For lists, the function returns a list with converted values.
## Examples
iex> ConvCase.to_kebab_case(["foo_bar", "fooBar"])
["foo-bar", "foo-bar"]
For tuples, the function returns a tuple with converted values.
## Examples
iex> ConvCase.to_kebab_case({"foo_bar", "fooBar"})
{"foo-bar", "foo-bar"}
For maps, the function returns a map with converted keys. The type of the key
will not be changed. New atoms are generated by `String.to_existing_atom/1`.
Keys of nested maps are converted too.
## Examples
iex> ConvCase.to_kebab_case(%{foo_bar: %{"fooBar" => "fooBar"}})
%{"foo-bar": %{"foo-bar" => "fooBar"}}
For other types, the function returns the given value.
## Examples
iex> ConvCase.to_kebab_case(42)
42
"""
@spec to_kebab_case(any) :: any
def to_kebab_case(value)
def to_kebab_case(""), do: ""
def to_kebab_case(atom)
when is_atom(atom),
do:
atom
|> Atom.to_string()
|> to_kebab_case()
|> String.to_existing_atom()
def to_kebab_case(strings)
when is_list(strings),
do: Enum.map(strings, &to_kebab_case/1)
def to_kebab_case(tuple)
when is_tuple(tuple),
do:
tuple
|> Tuple.to_list()
|> Enum.map(&to_kebab_case/1)
|> List.to_tuple()
def to_kebab_case(map)
when is_map(map),
do: convert_map(map, &to_kebab_case/1)
def to_kebab_case(<<a, b, t::binary>>)
when is_upper(a) and is_lower(b),
do: <<to_lower(a), b>> <> do_to_separator_case(t, @hyphen)
def to_kebab_case(string)
when is_binary(string),
do: do_to_separator_case(string, @hyphen)
def to_kebab_case(any), do: any
# Convert string with given separator.
defp do_to_separator_case("", _separator), do: ""
defp do_to_separator_case(<<h, t::binary>>, separator)
when is_separator(h),
do: <<separator>> <> do_to_separator_case(t, separator)
defp do_to_separator_case(<<a, b, t::binary>>, separator)
when is_lower(a) and is_upper(b),
do: <<a, separator, to_lower(b)>> <> do_to_separator_case(t, separator)
defp do_to_separator_case(<<h, t::binary>>, separator),
do: <<h>> <> do_to_separator_case(t, separator)
# Convert map keys with the given converter.
defp convert_map(%{__struct__: _} = struct, _), do: struct
defp convert_map(map, converter) when is_map(map) do
for {key, value} <- map,
into: %{},
do: {convert_key(key, converter), convert_map(value, converter)}
end
defp convert_map(list, converter)
when is_list(list),
do: Enum.map(list, &convert_map(&1, converter))
defp convert_map(map, _converter), do: map
# Convert key with the given converter.
defp convert_key(key, converter)
when is_atom(key),
do: key |> converter.()
defp convert_key(key, converter), do: converter.(key)
# Convert a lowercase character into an uppercase character.
defp to_upper(char), do: char - 32
# Convert an uppercase character into a lowercase character.
defp to_lower(char), do: char + 32
end
|
lib/conv_case.ex
| 0.921689
| 0.623979
|
conv_case.ex
|
starcoder
|
defmodule Ash.Resource.Validation.Builtins do
@moduledoc """
Built in validations that are available to all resources
The functions in this module are imported by default in the validations section.
"""
alias Ash.Resource.Validation
@doc """
Validates that an attribute's value is in a given list
"""
def one_of(attribute, values) do
{Validation.OneOf, attribute: attribute, values: values}
end
@doc "Validates that an attribute is being changed"
def changing(field) do
{Validation.Changing, field: field}
end
@doc "Validates that a field or argument matches another field or argument"
def confirm(field, confirmation) do
{Validation.Confirm, [field: field, confirmation: confirmation]}
end
@doc "Validates that an attribute on the original record does not equal a specific value"
def attribute_does_not_equal(attribute, value) do
{Validation.AttributeDoesNotEqual, attribute: attribute, value: value}
end
@doc "Validates that an attribute on the original record equals a specific value"
def attribute_equals(attribute, value) do
{Validation.AttributeEquals, attribute: attribute, value: value}
end
@doc "Validates that an attribute on the original record meets the given length criteria"
def string_length(attribute, opts \\ []) do
{Validation.StringLength, Keyword.merge(opts, attribute: attribute)}
end
@doc "Validates that attribute meets the given criteria"
def compare(attribute, opts \\ []) do
{Validation.Compare, Keyword.merge(opts, attribute: attribute)}
end
@doc """
Validates that an attribute's value matches a given regex or string, using the provided error, message if not.
`String.match?/2` is used to determine if it matches.
"""
def match(attribute, match, message \\ nil) do
message = message || "must match #{match}"
{Validation.Match, attribute: attribute, match: match, message: message}
end
@doc """
Validates the presence of a list of attributes
If no options are provided, validates that they are all present.
#{Ash.OptionsHelpers.docs(Keyword.delete(Validation.Present.schema(), :attributes))}
"""
def present(attributes, opts \\ []) do
if opts == [] do
attributes = List.wrap(attributes)
{Validation.Present, attributes: attributes, exactly: Enum.count(attributes)}
else
opts = Keyword.put(opts, :attributes, List.wrap(attributes))
{Validation.Present, opts}
end
end
@doc """
Validates the absence of a list of attributes
If no options are provided, validates that they are all absent.
The docs behave the same as `present/2`, except they validate absence.
"""
def absent(attributes, opts \\ []) do
if opts == [] do
{Validation.Present, attributes: List.wrap(attributes), exactly: 0}
else
attributes = List.wrap(attributes)
count = Enum.count(attributes)
new_opts =
case Keyword.fetch(opts, :at_least) do
{:ok, value} ->
Keyword.put(opts, :at_most, count - value)
:error ->
Keyword.put(opts, :at_most, 0)
end
new_opts =
case Keyword.fetch(opts, :at_most) do
{:ok, value} ->
Keyword.put(new_opts, :at_least, count - value)
:error ->
Keyword.put(new_opts, :at_least, 0)
end
present(attributes, new_opts)
end
end
end
|
lib/ash/resource/validation/builtins.ex
| 0.833816
| 0.515437
|
builtins.ex
|
starcoder
|
defmodule Plaid.PaymentInitiation do
@moduledoc """
[Plaid Payment Initiation API](https://plaid.com/docs/api/products/#payment-initiation-uk-and-europe) calls and schema.
"""
alias Plaid.Castable
alias Plaid.PaymentInitiation.{
Address,
Amount,
BACS,
Payment,
Recipient,
Schedule
}
defmodule CreateRecipientResponse do
@moduledoc """
[Plaid API /payment_initiation/recipient/create response schema.](https://plaid.com/docs/api/products/#payment_initiationrecipientcreate)
"""
@behaviour Castable
@type t :: %__MODULE__{
recipient_id: String.t(),
request_id: String.t()
}
defstruct [
:recipient_id,
:request_id
]
@impl true
def cast(generic_map) do
%__MODULE__{
recipient_id: generic_map["recipient_id"],
request_id: generic_map["request_id"]
}
end
end
@doc """
Create a recipient for payment initiation.
Does a `POST /payment_initiation/recipient/create` call which creates a payment
recipient for payment initiation.
The endpoint is idempotent: if a request has already been made with the
same payment details, Plaid will return the same `recipient_id`.
## Params
* `name` - The name of the recipient.
## Options
* `:iban` - The International Bank Account Number (IBAN) for the recipient.
* `:bacs` - The sort code of the account.
* `:address` - The address of the payment recipient.
If `:bacs` data is not provided, `:iban` becomes required.
## Examples
PaymentInitiation.create_recipient("Wonder Wallet", client_id: "123", secret: "abc")
{:ok, %PaymentInitiation.CreateRecipientResponse{}}
"""
@spec create_recipient(name :: String.t(), options, Plaid.config()) ::
{:ok, CreateRecipientResponse.t()} | {:error, Plaid.Error.t()}
when options: %{
optional(:iban) => String.t(),
optional(:bacs) => BACS.t(),
optional(:address) => Address.t()
}
def create_recipient(name, options \\ %{}, config) do
options_payload = Map.take(options, [:iban, :bacs, :address])
payload =
%{}
|> Map.put(:name, name)
|> Map.merge(options_payload)
Plaid.Client.call(
"/payment_initiation/recipient/create",
payload,
CreateRecipientResponse,
config
)
end
defmodule GetRecipientResponse do
@moduledoc """
[Plaid API /payment_initiation/recipient/get response schema.](https://plaid.com/docs/api/products/#payment_initiationrecipientget)
"""
@behaviour Castable
@type t :: %__MODULE__{
recipient_id: String.t(),
name: String.t(),
address: Address.t() | nil,
iban: String.t() | nil,
bacs: BACS.t() | nil,
request_id: String.t()
}
defstruct [
:recipient_id,
:name,
:address,
:iban,
:bacs,
:request_id
]
@impl true
def cast(generic_map) do
%__MODULE__{
recipient_id: generic_map["recipient_id"],
name: generic_map["name"],
address: Castable.cast(Address, generic_map["address"]),
iban: generic_map["iban"],
bacs: Castable.cast(BACS, generic_map["bacs"]),
request_id: generic_map["request_id"]
}
end
end
@doc """
Get a recipient for payment initiation.
Does a `POST /payment_initiation/recipient/get` call to
get details about a payment recipient.
## Params
* `recipient_id` - The ID of the recipient.
## Examples
PaymentInitiation.get_recipient("recipient-id-sandbox-123xxx", client_id: "123", secret: "abc")
{:ok, %PaymentInitiation.GetRecipientResponse{}}
"""
@spec get_recipient(recipient_id :: String.t(), Plaid.config()) ::
{:ok, CreateRecipientResponse.t()} | {:error, Plaid.Error.t()}
def get_recipient(recipient_id, config) do
Plaid.Client.call(
"/payment_initiation/recipient/get",
%{recipient_id: recipient_id},
GetRecipientResponse,
config
)
end
defmodule ListRecipientsResponse do
@moduledoc """
[Plaid API /payment_initiation/recipient/list response schema.](https://plaid.com/docs/api/products/#payment_initiationrecipientlist)
"""
@behaviour Castable
@type t :: %__MODULE__{
recipients: [Recipient.t()],
request_id: String.t()
}
defstruct [
:recipients,
:request_id
]
@impl true
def cast(generic_map) do
%__MODULE__{
recipients: Castable.cast_list(Recipient, generic_map["recipients"]),
request_id: generic_map["request_id"]
}
end
end
@doc """
List the payment recipients that you have previously created.
Does a `POST /payment_initiation/recipient/list` call to
list all recipients you have previously created.
## Examples
PaymentInitiation.list_recipients(client_id: "123", secret: "abc")
{:ok, %PaymentInitiation.ListRecipientsResponse{}}
"""
@spec list_recipients(Plaid.config()) ::
{:ok, ListRecipientsResponse.t()} | {:error, Plaid.Error.t()}
def list_recipients(config) do
Plaid.Client.call(
"/payment_initiation/recipient/list",
ListRecipientsResponse,
config
)
end
defmodule CreatePaymentResponse do
@moduledoc """
[Plaid API /payment_initiation/payment/create response schema.](https://plaid.com/docs/api/products/#payment_initiationpaymentcreate)
"""
@behaviour Castable
@type t :: %__MODULE__{
payment_id: String.t(),
status: String.t(),
request_id: String.t()
}
defstruct [
:payment_id,
:status,
:request_id
]
@impl true
def cast(generic_map) do
%__MODULE__{
payment_id: generic_map["payment_id"],
status: generic_map["status"],
request_id: generic_map["request_id"]
}
end
end
@doc """
Create a payment for a recipient.
Does a `POST /payment_initiation/payment/create` call which creates
a one-time or standing (recurring) payment for a recipient.
## Params
* `recipient_id` - The ID of the recipient the payment is for.
* `reference` - A reference for the payment.
* `amount` - A payment amount.
## Options
* `:schedule` - The schedule that the payment will be executed on.
## Examples
PaymentInitiation.create_payment(
"recipient-id-prod-123xxx",
"Purchase Order 123",
%PaymentInitiation.Amount{currency: "GBP", value: 200},
%{
schedule: %PaymentInitiation.Schedule{
interval: "WEEKLY",
interval_execution_day: 2,
start_date: "2021-01-01",
end_date: "2021-01-31"
}
},
client_id: "123",
secret: "abc"
)
{:ok, %PaymentInitiation.CreateRecipientResponse{}}
"""
@spec create_payment(
recipient_id :: String.t(),
reference :: String.t(),
amount :: Amount.t(),
options,
Plaid.config()
) :: {:ok, CreatePaymentResponse.t()} | {:error, Plaid.Error.t()}
when options: %{optional(:schedule) => Schedule.t()}
def create_payment(recipient_id, reference, amount, options \\ %{}, config) do
payload =
%{}
|> Map.put(:recipient_id, recipient_id)
|> Map.put(:reference, reference)
|> Map.put(:amount, amount)
|> Plaid.Util.maybe_put(:schedule, options)
Plaid.Client.call(
"/payment_initiation/payment/create",
payload,
CreatePaymentResponse,
config
)
end
defmodule GetPaymentResponse do
@moduledoc """
[Plaid API /payment_initiation/payment/get response schema.](https://plaid.com/docs/api/products/#payment_initiationpaymentget)
"""
@behaviour Castable
@type t :: %__MODULE__{
payment_id: String.t(),
payment_token: String.t(),
amount: Amount.t(),
status: String.t(),
recipient_id: String.t(),
reference: String.t(),
last_status_update: String.t(),
schedule: Schedule.t() | nil,
adjusted_reference: String.t() | nil,
payment_expiration_time: String.t() | nil,
request_id: String.t()
}
defstruct [
:payment_id,
:payment_token,
:amount,
:status,
:recipient_id,
:reference,
:last_status_update,
:schedule,
:adjusted_reference,
:payment_expiration_time,
:request_id
]
@impl true
def cast(generic_map) do
%__MODULE__{
payment_id: generic_map["payment_id"],
payment_token: generic_map["payment_token"],
amount: Castable.cast(Amount, generic_map["amount"]),
status: generic_map["status"],
recipient_id: generic_map["recipient_id"],
reference: generic_map["reference"],
last_status_update: generic_map["last_status_update"],
schedule: Castable.cast(Schedule, generic_map["schedule"]),
adjusted_reference: generic_map["adjust_reference"],
payment_expiration_time: generic_map["payment_expiration_time"],
request_id: generic_map["request_id"]
}
end
end
@doc """
Get payment details.
Does a `POST /payment_initiation/payment/create` call to get
details about a payment.
## Params
* `payment_id` - The payment_id returned from /payment_initiation/payment/create.
## Examples
PaymentInitiation.get_payment(
"payment-id-prod-123xxx",
client_id: "123",
secret: "abc"
)
{:ok, %PaymentInitiation.GetPaymentResponse{}}
"""
@spec get_payment(payment_id :: String.t(), Plaid.config()) ::
{:ok, GetPaymentResponse.t()} | {:error, Plaid.Error.t()}
def get_payment(payment_id, config) do
Plaid.Client.call(
"/payment_initiation/payment/get",
%{payment_id: payment_id},
GetPaymentResponse,
config
)
end
defmodule ListPaymentsResponse do
@moduledoc """
[Plaid API /payment_initiation/payment/list response schema.](https://plaid.com/docs/api/products/#payment_initiationpaymentlist)
"""
@behaviour Castable
@type t :: %__MODULE__{
payments: [Payment.t()],
next_cursor: String.t(),
request_id: String.t()
}
defstruct [
:payments,
:next_cursor,
:request_id
]
@impl true
def cast(generic_map) do
%__MODULE__{
payments: Castable.cast_list(Payment, generic_map["payments"]),
next_cursor: generic_map["next_cursor"],
request_id: generic_map["request_id"]
}
end
end
@doc """
List payments.
Does a `POST /payment_initiation/payment/list` call to get
all created payments.
## Options
* `:count` - The maximum number of payments to return.
* `:cursor` - A date string in RFC 3339 format. Only payments created before the cursor will be returned.
## Examples
PaymentInitiation.list_payments(
client_id: "123",
secret: "abc"
)
{:ok, %PaymentInitiation.ListPaymentsResponse{}}
"""
@spec list_payments(options, Plaid.config()) ::
{:ok, ListPaymentsResponse.t()} | {:error, Plaid.Error.t()}
when options: %{
optional(:count) => integer(),
optional(:cursor) => String.t()
}
def list_payments(options \\ %{}, config) do
payload = Map.take(options, [:count, :cursor])
Plaid.Client.call(
"/payment_initiation/payment/list",
payload,
ListPaymentsResponse,
config
)
end
end
|
lib/plaid/payment_initiation.ex
| 0.879432
| 0.55254
|
payment_initiation.ex
|
starcoder
|
defmodule AWS.Glue do
@moduledoc """
AWS Glue
Defines the public endpoint for the AWS Glue service.
"""
@doc """
Creates one or more partitions in a batch operation.
"""
def batch_create_partition(client, input, options \\ []) do
request(client, "BatchCreatePartition", input, options)
end
@doc """
Deletes a list of connection definitions from the Data Catalog.
"""
def batch_delete_connection(client, input, options \\ []) do
request(client, "BatchDeleteConnection", input, options)
end
@doc """
Deletes one or more partitions in a batch operation.
"""
def batch_delete_partition(client, input, options \\ []) do
request(client, "BatchDeletePartition", input, options)
end
@doc """
Deletes multiple tables at once.
<note> After completing this operation, you no longer have access to the
table versions and partitions that belong to the deleted table. AWS Glue
deletes these "orphaned" resources asynchronously in a timely manner, at
the discretion of the service.
To ensure the immediate deletion of all related resources, before calling
`BatchDeleteTable`, use `DeleteTableVersion` or `BatchDeleteTableVersion`,
and `DeletePartition` or `BatchDeletePartition`, to delete any resources
that belong to the table.
</note>
"""
def batch_delete_table(client, input, options \\ []) do
request(client, "BatchDeleteTable", input, options)
end
@doc """
Deletes a specified batch of versions of a table.
"""
def batch_delete_table_version(client, input, options \\ []) do
request(client, "BatchDeleteTableVersion", input, options)
end
@doc """
Returns a list of resource metadata for a given list of crawler names.
After calling the `ListCrawlers` operation, you can call this operation to
access the data to which you have been granted permissions. This operation
supports all IAM permissions, including permission conditions that uses
tags.
"""
def batch_get_crawlers(client, input, options \\ []) do
request(client, "BatchGetCrawlers", input, options)
end
@doc """
Returns a list of resource metadata for a given list of development
endpoint names. After calling the `ListDevEndpoints` operation, you can
call this operation to access the data to which you have been granted
permissions. This operation supports all IAM permissions, including
permission conditions that uses tags.
"""
def batch_get_dev_endpoints(client, input, options \\ []) do
request(client, "BatchGetDevEndpoints", input, options)
end
@doc """
Returns a list of resource metadata for a given list of job names. After
calling the `ListJobs` operation, you can call this operation to access the
data to which you have been granted permissions. This operation supports
all IAM permissions, including permission conditions that uses tags.
"""
def batch_get_jobs(client, input, options \\ []) do
request(client, "BatchGetJobs", input, options)
end
@doc """
Retrieves partitions in a batch request.
"""
def batch_get_partition(client, input, options \\ []) do
request(client, "BatchGetPartition", input, options)
end
@doc """
Returns a list of resource metadata for a given list of trigger names.
After calling the `ListTriggers` operation, you can call this operation to
access the data to which you have been granted permissions. This operation
supports all IAM permissions, including permission conditions that uses
tags.
"""
def batch_get_triggers(client, input, options \\ []) do
request(client, "BatchGetTriggers", input, options)
end
@doc """
Returns a list of resource metadata for a given list of workflow names.
After calling the `ListWorkflows` operation, you can call this operation to
access the data to which you have been granted permissions. This operation
supports all IAM permissions, including permission conditions that uses
tags.
"""
def batch_get_workflows(client, input, options \\ []) do
request(client, "BatchGetWorkflows", input, options)
end
@doc """
Stops one or more job runs for a specified job definition.
"""
def batch_stop_job_run(client, input, options \\ []) do
request(client, "BatchStopJobRun", input, options)
end
@doc """
Cancels (stops) a task run. Machine learning task runs are asynchronous
tasks that AWS Glue runs on your behalf as part of various machine learning
workflows. You can cancel a machine learning task run at any time by
calling `CancelMLTaskRun` with a task run's parent transform's
`TransformID` and the task run's `TaskRunId`.
"""
def cancel_m_l_task_run(client, input, options \\ []) do
request(client, "CancelMLTaskRun", input, options)
end
@doc """
Creates a classifier in the user's account. This can be a `GrokClassifier`,
an `XMLClassifier`, a `JsonClassifier`, or a `CsvClassifier`, depending on
which field of the request is present.
"""
def create_classifier(client, input, options \\ []) do
request(client, "CreateClassifier", input, options)
end
@doc """
Creates a connection definition in the Data Catalog.
"""
def create_connection(client, input, options \\ []) do
request(client, "CreateConnection", input, options)
end
@doc """
Creates a new crawler with specified targets, role, configuration, and
optional schedule. At least one crawl target must be specified, in the
`s3Targets` field, the `jdbcTargets` field, or the `DynamoDBTargets` field.
"""
def create_crawler(client, input, options \\ []) do
request(client, "CreateCrawler", input, options)
end
@doc """
Creates a new database in a Data Catalog.
"""
def create_database(client, input, options \\ []) do
request(client, "CreateDatabase", input, options)
end
@doc """
Creates a new development endpoint.
"""
def create_dev_endpoint(client, input, options \\ []) do
request(client, "CreateDevEndpoint", input, options)
end
@doc """
Creates a new job definition.
"""
def create_job(client, input, options \\ []) do
request(client, "CreateJob", input, options)
end
@doc """
Creates an AWS Glue machine learning transform. This operation creates the
transform and all the necessary parameters to train it.
Call this operation as the first step in the process of using a machine
learning transform (such as the `FindMatches` transform) for deduplicating
data. You can provide an optional `Description`, in addition to the
parameters that you want to use for your algorithm.
You must also specify certain parameters for the tasks that AWS Glue runs
on your behalf as part of learning from your data and creating a
high-quality machine learning transform. These parameters include `Role`,
and optionally, `AllocatedCapacity`, `Timeout`, and `MaxRetries`. For more
information, see
[Jobs](https://docs.aws.amazon.com/glue/latest/dg/aws-glue-api-jobs-job.html).
"""
def create_m_l_transform(client, input, options \\ []) do
request(client, "CreateMLTransform", input, options)
end
@doc """
Creates a new partition.
"""
def create_partition(client, input, options \\ []) do
request(client, "CreatePartition", input, options)
end
@doc """
Transforms a directed acyclic graph (DAG) into code.
"""
def create_script(client, input, options \\ []) do
request(client, "CreateScript", input, options)
end
@doc """
Creates a new security configuration. A security configuration is a set of
security properties that can be used by AWS Glue. You can use a security
configuration to encrypt data at rest. For information about using security
configurations in AWS Glue, see [Encrypting Data Written by Crawlers, Jobs,
and Development
Endpoints](https://docs.aws.amazon.com/glue/latest/dg/encryption-security-configuration.html).
"""
def create_security_configuration(client, input, options \\ []) do
request(client, "CreateSecurityConfiguration", input, options)
end
@doc """
Creates a new table definition in the Data Catalog.
"""
def create_table(client, input, options \\ []) do
request(client, "CreateTable", input, options)
end
@doc """
Creates a new trigger.
"""
def create_trigger(client, input, options \\ []) do
request(client, "CreateTrigger", input, options)
end
@doc """
Creates a new function definition in the Data Catalog.
"""
def create_user_defined_function(client, input, options \\ []) do
request(client, "CreateUserDefinedFunction", input, options)
end
@doc """
Creates a new workflow.
"""
def create_workflow(client, input, options \\ []) do
request(client, "CreateWorkflow", input, options)
end
@doc """
Removes a classifier from the Data Catalog.
"""
def delete_classifier(client, input, options \\ []) do
request(client, "DeleteClassifier", input, options)
end
@doc """
Delete the partition column statistics of a column.
"""
def delete_column_statistics_for_partition(client, input, options \\ []) do
request(client, "DeleteColumnStatisticsForPartition", input, options)
end
@doc """
Retrieves table statistics of columns.
"""
def delete_column_statistics_for_table(client, input, options \\ []) do
request(client, "DeleteColumnStatisticsForTable", input, options)
end
@doc """
Deletes a connection from the Data Catalog.
"""
def delete_connection(client, input, options \\ []) do
request(client, "DeleteConnection", input, options)
end
@doc """
Removes a specified crawler from the AWS Glue Data Catalog, unless the
crawler state is `RUNNING`.
"""
def delete_crawler(client, input, options \\ []) do
request(client, "DeleteCrawler", input, options)
end
@doc """
Removes a specified database from a Data Catalog.
<note> After completing this operation, you no longer have access to the
tables (and all table versions and partitions that might belong to the
tables) and the user-defined functions in the deleted database. AWS Glue
deletes these "orphaned" resources asynchronously in a timely manner, at
the discretion of the service.
To ensure the immediate deletion of all related resources, before calling
`DeleteDatabase`, use `DeleteTableVersion` or `BatchDeleteTableVersion`,
`DeletePartition` or `BatchDeletePartition`, `DeleteUserDefinedFunction`,
and `DeleteTable` or `BatchDeleteTable`, to delete any resources that
belong to the database.
</note>
"""
def delete_database(client, input, options \\ []) do
request(client, "DeleteDatabase", input, options)
end
@doc """
Deletes a specified development endpoint.
"""
def delete_dev_endpoint(client, input, options \\ []) do
request(client, "DeleteDevEndpoint", input, options)
end
@doc """
Deletes a specified job definition. If the job definition is not found, no
exception is thrown.
"""
def delete_job(client, input, options \\ []) do
request(client, "DeleteJob", input, options)
end
@doc """
Deletes an AWS Glue machine learning transform. Machine learning transforms
are a special type of transform that use machine learning to learn the
details of the transformation to be performed by learning from examples
provided by humans. These transformations are then saved by AWS Glue. If
you no longer need a transform, you can delete it by calling
`DeleteMLTransforms`. However, any AWS Glue jobs that still reference the
deleted transform will no longer succeed.
"""
def delete_m_l_transform(client, input, options \\ []) do
request(client, "DeleteMLTransform", input, options)
end
@doc """
Deletes a specified partition.
"""
def delete_partition(client, input, options \\ []) do
request(client, "DeletePartition", input, options)
end
@doc """
Deletes a specified policy.
"""
def delete_resource_policy(client, input, options \\ []) do
request(client, "DeleteResourcePolicy", input, options)
end
@doc """
Deletes a specified security configuration.
"""
def delete_security_configuration(client, input, options \\ []) do
request(client, "DeleteSecurityConfiguration", input, options)
end
@doc """
Removes a table definition from the Data Catalog.
<note> After completing this operation, you no longer have access to the
table versions and partitions that belong to the deleted table. AWS Glue
deletes these "orphaned" resources asynchronously in a timely manner, at
the discretion of the service.
To ensure the immediate deletion of all related resources, before calling
`DeleteTable`, use `DeleteTableVersion` or `BatchDeleteTableVersion`, and
`DeletePartition` or `BatchDeletePartition`, to delete any resources that
belong to the table.
</note>
"""
def delete_table(client, input, options \\ []) do
request(client, "DeleteTable", input, options)
end
@doc """
Deletes a specified version of a table.
"""
def delete_table_version(client, input, options \\ []) do
request(client, "DeleteTableVersion", input, options)
end
@doc """
Deletes a specified trigger. If the trigger is not found, no exception is
thrown.
"""
def delete_trigger(client, input, options \\ []) do
request(client, "DeleteTrigger", input, options)
end
@doc """
Deletes an existing function definition from the Data Catalog.
"""
def delete_user_defined_function(client, input, options \\ []) do
request(client, "DeleteUserDefinedFunction", input, options)
end
@doc """
Deletes a workflow.
"""
def delete_workflow(client, input, options \\ []) do
request(client, "DeleteWorkflow", input, options)
end
@doc """
Retrieves the status of a migration operation.
"""
def get_catalog_import_status(client, input, options \\ []) do
request(client, "GetCatalogImportStatus", input, options)
end
@doc """
Retrieve a classifier by name.
"""
def get_classifier(client, input, options \\ []) do
request(client, "GetClassifier", input, options)
end
@doc """
Lists all classifier objects in the Data Catalog.
"""
def get_classifiers(client, input, options \\ []) do
request(client, "GetClassifiers", input, options)
end
@doc """
Retrieves partition statistics of columns.
"""
def get_column_statistics_for_partition(client, input, options \\ []) do
request(client, "GetColumnStatisticsForPartition", input, options)
end
@doc """
Retrieves table statistics of columns.
"""
def get_column_statistics_for_table(client, input, options \\ []) do
request(client, "GetColumnStatisticsForTable", input, options)
end
@doc """
Retrieves a connection definition from the Data Catalog.
"""
def get_connection(client, input, options \\ []) do
request(client, "GetConnection", input, options)
end
@doc """
Retrieves a list of connection definitions from the Data Catalog.
"""
def get_connections(client, input, options \\ []) do
request(client, "GetConnections", input, options)
end
@doc """
Retrieves metadata for a specified crawler.
"""
def get_crawler(client, input, options \\ []) do
request(client, "GetCrawler", input, options)
end
@doc """
Retrieves metrics about specified crawlers.
"""
def get_crawler_metrics(client, input, options \\ []) do
request(client, "GetCrawlerMetrics", input, options)
end
@doc """
Retrieves metadata for all crawlers defined in the customer account.
"""
def get_crawlers(client, input, options \\ []) do
request(client, "GetCrawlers", input, options)
end
@doc """
Retrieves the security configuration for a specified catalog.
"""
def get_data_catalog_encryption_settings(client, input, options \\ []) do
request(client, "GetDataCatalogEncryptionSettings", input, options)
end
@doc """
Retrieves the definition of a specified database.
"""
def get_database(client, input, options \\ []) do
request(client, "GetDatabase", input, options)
end
@doc """
Retrieves all databases defined in a given Data Catalog.
"""
def get_databases(client, input, options \\ []) do
request(client, "GetDatabases", input, options)
end
@doc """
Transforms a Python script into a directed acyclic graph (DAG).
"""
def get_dataflow_graph(client, input, options \\ []) do
request(client, "GetDataflowGraph", input, options)
end
@doc """
Retrieves information about a specified development endpoint.
<note> When you create a development endpoint in a virtual private cloud
(VPC), AWS Glue returns only a private IP address, and the public IP
address field is not populated. When you create a non-VPC development
endpoint, AWS Glue returns only a public IP address.
</note>
"""
def get_dev_endpoint(client, input, options \\ []) do
request(client, "GetDevEndpoint", input, options)
end
@doc """
Retrieves all the development endpoints in this AWS account.
<note> When you create a development endpoint in a virtual private cloud
(VPC), AWS Glue returns only a private IP address and the public IP address
field is not populated. When you create a non-VPC development endpoint, AWS
Glue returns only a public IP address.
</note>
"""
def get_dev_endpoints(client, input, options \\ []) do
request(client, "GetDevEndpoints", input, options)
end
@doc """
Retrieves an existing job definition.
"""
def get_job(client, input, options \\ []) do
request(client, "GetJob", input, options)
end
@doc """
Returns information on a job bookmark entry.
"""
def get_job_bookmark(client, input, options \\ []) do
request(client, "GetJobBookmark", input, options)
end
@doc """
Retrieves the metadata for a given job run.
"""
def get_job_run(client, input, options \\ []) do
request(client, "GetJobRun", input, options)
end
@doc """
Retrieves metadata for all runs of a given job definition.
"""
def get_job_runs(client, input, options \\ []) do
request(client, "GetJobRuns", input, options)
end
@doc """
Retrieves all current job definitions.
"""
def get_jobs(client, input, options \\ []) do
request(client, "GetJobs", input, options)
end
@doc """
Gets details for a specific task run on a machine learning transform.
Machine learning task runs are asynchronous tasks that AWS Glue runs on
your behalf as part of various machine learning workflows. You can check
the stats of any task run by calling `GetMLTaskRun` with the `TaskRunID`
and its parent transform's `TransformID`.
"""
def get_m_l_task_run(client, input, options \\ []) do
request(client, "GetMLTaskRun", input, options)
end
@doc """
Gets a list of runs for a machine learning transform. Machine learning task
runs are asynchronous tasks that AWS Glue runs on your behalf as part of
various machine learning workflows. You can get a sortable, filterable list
of machine learning task runs by calling `GetMLTaskRuns` with their parent
transform's `TransformID` and other optional parameters as documented in
this section.
This operation returns a list of historic runs and must be paginated.
"""
def get_m_l_task_runs(client, input, options \\ []) do
request(client, "GetMLTaskRuns", input, options)
end
@doc """
Gets an AWS Glue machine learning transform artifact and all its
corresponding metadata. Machine learning transforms are a special type of
transform that use machine learning to learn the details of the
transformation to be performed by learning from examples provided by
humans. These transformations are then saved by AWS Glue. You can retrieve
their metadata by calling `GetMLTransform`.
"""
def get_m_l_transform(client, input, options \\ []) do
request(client, "GetMLTransform", input, options)
end
@doc """
Gets a sortable, filterable list of existing AWS Glue machine learning
transforms. Machine learning transforms are a special type of transform
that use machine learning to learn the details of the transformation to be
performed by learning from examples provided by humans. These
transformations are then saved by AWS Glue, and you can retrieve their
metadata by calling `GetMLTransforms`.
"""
def get_m_l_transforms(client, input, options \\ []) do
request(client, "GetMLTransforms", input, options)
end
@doc """
Creates mappings.
"""
def get_mapping(client, input, options \\ []) do
request(client, "GetMapping", input, options)
end
@doc """
Retrieves information about a specified partition.
"""
def get_partition(client, input, options \\ []) do
request(client, "GetPartition", input, options)
end
@doc """
Retrieves information about the partitions in a table.
"""
def get_partitions(client, input, options \\ []) do
request(client, "GetPartitions", input, options)
end
@doc """
Gets code to perform a specified mapping.
"""
def get_plan(client, input, options \\ []) do
request(client, "GetPlan", input, options)
end
@doc """
Retrieves the security configurations for the resource policies set on
individual resources, and also the account-level policy.
This operation also returns the Data Catalog resource policy. However, if
you enabled metadata encryption in Data Catalog settings, and you do not
have permission on the AWS KMS key, the operation can't return the Data
Catalog resource policy.
"""
def get_resource_policies(client, input, options \\ []) do
request(client, "GetResourcePolicies", input, options)
end
@doc """
Retrieves a specified resource policy.
"""
def get_resource_policy(client, input, options \\ []) do
request(client, "GetResourcePolicy", input, options)
end
@doc """
Retrieves a specified security configuration.
"""
def get_security_configuration(client, input, options \\ []) do
request(client, "GetSecurityConfiguration", input, options)
end
@doc """
Retrieves a list of all security configurations.
"""
def get_security_configurations(client, input, options \\ []) do
request(client, "GetSecurityConfigurations", input, options)
end
@doc """
Retrieves the `Table` definition in a Data Catalog for a specified table.
"""
def get_table(client, input, options \\ []) do
request(client, "GetTable", input, options)
end
@doc """
Retrieves a specified version of a table.
"""
def get_table_version(client, input, options \\ []) do
request(client, "GetTableVersion", input, options)
end
@doc """
Retrieves a list of strings that identify available versions of a specified
table.
"""
def get_table_versions(client, input, options \\ []) do
request(client, "GetTableVersions", input, options)
end
@doc """
Retrieves the definitions of some or all of the tables in a given
`Database`.
"""
def get_tables(client, input, options \\ []) do
request(client, "GetTables", input, options)
end
@doc """
Retrieves a list of tags associated with a resource.
"""
def get_tags(client, input, options \\ []) do
request(client, "GetTags", input, options)
end
@doc """
Retrieves the definition of a trigger.
"""
def get_trigger(client, input, options \\ []) do
request(client, "GetTrigger", input, options)
end
@doc """
Gets all the triggers associated with a job.
"""
def get_triggers(client, input, options \\ []) do
request(client, "GetTriggers", input, options)
end
@doc """
Retrieves a specified function definition from the Data Catalog.
"""
def get_user_defined_function(client, input, options \\ []) do
request(client, "GetUserDefinedFunction", input, options)
end
@doc """
Retrieves multiple function definitions from the Data Catalog.
"""
def get_user_defined_functions(client, input, options \\ []) do
request(client, "GetUserDefinedFunctions", input, options)
end
@doc """
Retrieves resource metadata for a workflow.
"""
def get_workflow(client, input, options \\ []) do
request(client, "GetWorkflow", input, options)
end
@doc """
Retrieves the metadata for a given workflow run.
"""
def get_workflow_run(client, input, options \\ []) do
request(client, "GetWorkflowRun", input, options)
end
@doc """
Retrieves the workflow run properties which were set during the run.
"""
def get_workflow_run_properties(client, input, options \\ []) do
request(client, "GetWorkflowRunProperties", input, options)
end
@doc """
Retrieves metadata for all runs of a given workflow.
"""
def get_workflow_runs(client, input, options \\ []) do
request(client, "GetWorkflowRuns", input, options)
end
@doc """
Imports an existing Amazon Athena Data Catalog to AWS Glue
"""
def import_catalog_to_glue(client, input, options \\ []) do
request(client, "ImportCatalogToGlue", input, options)
end
@doc """
Retrieves the names of all crawler resources in this AWS account, or the
resources with the specified tag. This operation allows you to see which
resources are available in your account, and their names.
This operation takes the optional `Tags` field, which you can use as a
filter on the response so that tagged resources can be retrieved as a
group. If you choose to use tags filtering, only resources with the tag are
retrieved.
"""
def list_crawlers(client, input, options \\ []) do
request(client, "ListCrawlers", input, options)
end
@doc """
Retrieves the names of all `DevEndpoint` resources in this AWS account, or
the resources with the specified tag. This operation allows you to see
which resources are available in your account, and their names.
This operation takes the optional `Tags` field, which you can use as a
filter on the response so that tagged resources can be retrieved as a
group. If you choose to use tags filtering, only resources with the tag are
retrieved.
"""
def list_dev_endpoints(client, input, options \\ []) do
request(client, "ListDevEndpoints", input, options)
end
@doc """
Retrieves the names of all job resources in this AWS account, or the
resources with the specified tag. This operation allows you to see which
resources are available in your account, and their names.
This operation takes the optional `Tags` field, which you can use as a
filter on the response so that tagged resources can be retrieved as a
group. If you choose to use tags filtering, only resources with the tag are
retrieved.
"""
def list_jobs(client, input, options \\ []) do
request(client, "ListJobs", input, options)
end
@doc """
Retrieves a sortable, filterable list of existing AWS Glue machine learning
transforms in this AWS account, or the resources with the specified tag.
This operation takes the optional `Tags` field, which you can use as a
filter of the responses so that tagged resources can be retrieved as a
group. If you choose to use tag filtering, only resources with the tags are
retrieved.
"""
def list_m_l_transforms(client, input, options \\ []) do
request(client, "ListMLTransforms", input, options)
end
@doc """
Retrieves the names of all trigger resources in this AWS account, or the
resources with the specified tag. This operation allows you to see which
resources are available in your account, and their names.
This operation takes the optional `Tags` field, which you can use as a
filter on the response so that tagged resources can be retrieved as a
group. If you choose to use tags filtering, only resources with the tag are
retrieved.
"""
def list_triggers(client, input, options \\ []) do
request(client, "ListTriggers", input, options)
end
@doc """
Lists names of workflows created in the account.
"""
def list_workflows(client, input, options \\ []) do
request(client, "ListWorkflows", input, options)
end
@doc """
Sets the security configuration for a specified catalog. After the
configuration has been set, the specified encryption is applied to every
catalog write thereafter.
"""
def put_data_catalog_encryption_settings(client, input, options \\ []) do
request(client, "PutDataCatalogEncryptionSettings", input, options)
end
@doc """
Sets the Data Catalog resource policy for access control.
"""
def put_resource_policy(client, input, options \\ []) do
request(client, "PutResourcePolicy", input, options)
end
@doc """
Puts the specified workflow run properties for the given workflow run. If a
property already exists for the specified run, then it overrides the value
otherwise adds the property to existing properties.
"""
def put_workflow_run_properties(client, input, options \\ []) do
request(client, "PutWorkflowRunProperties", input, options)
end
@doc """
Resets a bookmark entry.
"""
def reset_job_bookmark(client, input, options \\ []) do
request(client, "ResetJobBookmark", input, options)
end
@doc """
Restarts any completed nodes in a workflow run and resumes the run
execution.
"""
def resume_workflow_run(client, input, options \\ []) do
request(client, "ResumeWorkflowRun", input, options)
end
@doc """
Searches a set of tables based on properties in the table metadata as well
as on the parent database. You can search against text or filter
conditions.
You can only get tables that you have access to based on the security
policies defined in Lake Formation. You need at least a read-only access to
the table for it to be returned. If you do not have access to all the
columns in the table, these columns will not be searched against when
returning the list of tables back to you. If you have access to the columns
but not the data in the columns, those columns and the associated metadata
for those columns will be included in the search.
"""
def search_tables(client, input, options \\ []) do
request(client, "SearchTables", input, options)
end
@doc """
Starts a crawl using the specified crawler, regardless of what is
scheduled. If the crawler is already running, returns a
[CrawlerRunningException](https://docs.aws.amazon.com/glue/latest/dg/aws-glue-api-exceptions.html#aws-glue-api-exceptions-CrawlerRunningException).
"""
def start_crawler(client, input, options \\ []) do
request(client, "StartCrawler", input, options)
end
@doc """
Changes the schedule state of the specified crawler to `SCHEDULED`, unless
the crawler is already running or the schedule state is already
`SCHEDULED`.
"""
def start_crawler_schedule(client, input, options \\ []) do
request(client, "StartCrawlerSchedule", input, options)
end
@doc """
Begins an asynchronous task to export all labeled data for a particular
transform. This task is the only label-related API call that is not part of
the typical active learning workflow. You typically use
`StartExportLabelsTaskRun` when you want to work with all of your existing
labels at the same time, such as when you want to remove or change labels
that were previously submitted as truth. This API operation accepts the
`TransformId` whose labels you want to export and an Amazon Simple Storage
Service (Amazon S3) path to export the labels to. The operation returns a
`TaskRunId`. You can check on the status of your task run by calling the
`GetMLTaskRun` API.
"""
def start_export_labels_task_run(client, input, options \\ []) do
request(client, "StartExportLabelsTaskRun", input, options)
end
@doc """
Enables you to provide additional labels (examples of truth) to be used to
teach the machine learning transform and improve its quality. This API
operation is generally used as part of the active learning workflow that
starts with the `StartMLLabelingSetGenerationTaskRun` call and that
ultimately results in improving the quality of your machine learning
transform.
After the `StartMLLabelingSetGenerationTaskRun` finishes, AWS Glue machine
learning will have generated a series of questions for humans to answer.
(Answering these questions is often called 'labeling' in the machine
learning workflows). In the case of the `FindMatches` transform, these
questions are of the form, “What is the correct way to group these rows
together into groups composed entirely of matching records?” After the
labeling process is finished, users upload their answers/labels with a call
to `StartImportLabelsTaskRun`. After `StartImportLabelsTaskRun` finishes,
all future runs of the machine learning transform use the new and improved
labels and perform a higher-quality transformation.
By default, `StartMLLabelingSetGenerationTaskRun` continually learns from
and combines all labels that you upload unless you set `Replace` to true.
If you set `Replace` to true, `StartImportLabelsTaskRun` deletes and
forgets all previously uploaded labels and learns only from the exact set
that you upload. Replacing labels can be helpful if you realize that you
previously uploaded incorrect labels, and you believe that they are having
a negative effect on your transform quality.
You can check on the status of your task run by calling the `GetMLTaskRun`
operation.
"""
def start_import_labels_task_run(client, input, options \\ []) do
request(client, "StartImportLabelsTaskRun", input, options)
end
@doc """
Starts a job run using a job definition.
"""
def start_job_run(client, input, options \\ []) do
request(client, "StartJobRun", input, options)
end
@doc """
Starts a task to estimate the quality of the transform.
When you provide label sets as examples of truth, AWS Glue machine learning
uses some of those examples to learn from them. The rest of the labels are
used as a test to estimate quality.
Returns a unique identifier for the run. You can call `GetMLTaskRun` to get
more information about the stats of the `EvaluationTaskRun`.
"""
def start_m_l_evaluation_task_run(client, input, options \\ []) do
request(client, "StartMLEvaluationTaskRun", input, options)
end
@doc """
Starts the active learning workflow for your machine learning transform to
improve the transform's quality by generating label sets and adding labels.
When the `StartMLLabelingSetGenerationTaskRun` finishes, AWS Glue will have
generated a "labeling set" or a set of questions for humans to answer.
In the case of the `FindMatches` transform, these questions are of the
form, “What is the correct way to group these rows together into groups
composed entirely of matching records?”
After the labeling process is finished, you can upload your labels with a
call to `StartImportLabelsTaskRun`. After `StartImportLabelsTaskRun`
finishes, all future runs of the machine learning transform will use the
new and improved labels and perform a higher-quality transformation.
"""
def start_m_l_labeling_set_generation_task_run(client, input, options \\ []) do
request(client, "StartMLLabelingSetGenerationTaskRun", input, options)
end
@doc """
Starts an existing trigger. See [Triggering
Jobs](https://docs.aws.amazon.com/glue/latest/dg/trigger-job.html) for
information about how different types of trigger are started.
"""
def start_trigger(client, input, options \\ []) do
request(client, "StartTrigger", input, options)
end
@doc """
Starts a new run of the specified workflow.
"""
def start_workflow_run(client, input, options \\ []) do
request(client, "StartWorkflowRun", input, options)
end
@doc """
If the specified crawler is running, stops the crawl.
"""
def stop_crawler(client, input, options \\ []) do
request(client, "StopCrawler", input, options)
end
@doc """
Sets the schedule state of the specified crawler to `NOT_SCHEDULED`, but
does not stop the crawler if it is already running.
"""
def stop_crawler_schedule(client, input, options \\ []) do
request(client, "StopCrawlerSchedule", input, options)
end
@doc """
Stops a specified trigger.
"""
def stop_trigger(client, input, options \\ []) do
request(client, "StopTrigger", input, options)
end
@doc """
Stops the execution of the specified workflow run.
"""
def stop_workflow_run(client, input, options \\ []) do
request(client, "StopWorkflowRun", input, options)
end
@doc """
Adds tags to a resource. A tag is a label you can assign to an AWS
resource. In AWS Glue, you can tag only certain resources. For information
about what resources you can tag, see [AWS Tags in AWS
Glue](https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html).
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes tags from a resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Modifies an existing classifier (a `GrokClassifier`, an `XMLClassifier`, a
`JsonClassifier`, or a `CsvClassifier`, depending on which field is
present).
"""
def update_classifier(client, input, options \\ []) do
request(client, "UpdateClassifier", input, options)
end
@doc """
Creates or updates partition statistics of columns.
"""
def update_column_statistics_for_partition(client, input, options \\ []) do
request(client, "UpdateColumnStatisticsForPartition", input, options)
end
@doc """
Creates or updates table statistics of columns.
"""
def update_column_statistics_for_table(client, input, options \\ []) do
request(client, "UpdateColumnStatisticsForTable", input, options)
end
@doc """
Updates a connection definition in the Data Catalog.
"""
def update_connection(client, input, options \\ []) do
request(client, "UpdateConnection", input, options)
end
@doc """
Updates a crawler. If a crawler is running, you must stop it using
`StopCrawler` before updating it.
"""
def update_crawler(client, input, options \\ []) do
request(client, "UpdateCrawler", input, options)
end
@doc """
Updates the schedule of a crawler using a `cron` expression.
"""
def update_crawler_schedule(client, input, options \\ []) do
request(client, "UpdateCrawlerSchedule", input, options)
end
@doc """
Updates an existing database definition in a Data Catalog.
"""
def update_database(client, input, options \\ []) do
request(client, "UpdateDatabase", input, options)
end
@doc """
Updates a specified development endpoint.
"""
def update_dev_endpoint(client, input, options \\ []) do
request(client, "UpdateDevEndpoint", input, options)
end
@doc """
Updates an existing job definition.
"""
def update_job(client, input, options \\ []) do
request(client, "UpdateJob", input, options)
end
@doc """
Updates an existing machine learning transform. Call this operation to tune
the algorithm parameters to achieve better results.
After calling this operation, you can call the `StartMLEvaluationTaskRun`
operation to assess how well your new parameters achieved your goals (such
as improving the quality of your machine learning transform, or making it
more cost-effective).
"""
def update_m_l_transform(client, input, options \\ []) do
request(client, "UpdateMLTransform", input, options)
end
@doc """
Updates a partition.
"""
def update_partition(client, input, options \\ []) do
request(client, "UpdatePartition", input, options)
end
@doc """
Updates a metadata table in the Data Catalog.
"""
def update_table(client, input, options \\ []) do
request(client, "UpdateTable", input, options)
end
@doc """
Updates a trigger definition.
"""
def update_trigger(client, input, options \\ []) do
request(client, "UpdateTrigger", input, options)
end
@doc """
Updates an existing function definition in the Data Catalog.
"""
def update_user_defined_function(client, input, options \\ []) do
request(client, "UpdateUserDefinedFunction", input, options)
end
@doc """
Updates an existing workflow.
"""
def update_workflow(client, input, options \\ []) do
request(client, "UpdateWorkflow", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "glue"}
host = build_host("glue", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSGlue.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/glue.ex
| 0.894352
| 0.490236
|
glue.ex
|
starcoder
|
defmodule RfxCli.Main.ExtractCommand do
@moduledoc """
Converts Optimus parse data into a command_args list.
The command_args list contains the following elements:
```elixir
[
launch_cmd: <cmd_name: atom>
launch_args: struct
op_module: <module_name: string>
op_scope: <code | file | project | subapp | tmpfile>
op_target: <scope target: code | filepath | dirpath>
op_args: <keyword list with operation arguments ([])>
op_convert: <string - comma-separated list of conversions>
op_apply: boolean (false)
op_quiet: boolean (false)
op_oneline: boolean (false)
]
"""
alias RfxCli.State
def run({:error, msg}) do
{:error, msg}
end
def run(state) do
case extract(state.parse_result) do
{:error, msg} -> {:error, msg}
result -> State.assign(state, :command_args, result)
end
end
# This pattern is expressed when a subcommand is used.
def extract({[subcmd], parse = %Optimus.ParseResult{}}) do
Keyword.merge(default_args(), subcmd_args(subcmd, parse))
end
# This pattern is expressed when no subcommand is used (eg "rfx --help")
def extract(parse = %Optimus.ParseResult{}) do
Keyword.merge(default_args(), cmd_args(parse))
end
defp cmd_args(_parse) do
raise "CMD ARG ERROR"
[
# launch_repl: Map.fetch!(parse, :flags)[:repl],
# launch_server: Map.fetch!(parse, :flags)[:server]
]
end
defp subcmd_args(cmd, parse) do
commands = [ :repl, :pipe, :server ]
case Enum.member?(commands, cmd) do
true -> [launch_cmd: cmd, launch_args: parse]
false -> subop_args(cmd, parse)
end
end
defp subop_args(subcmd, parse) do
sub = subcmd |> Atom.to_string() |> String.replace("_", ".")
mod = ("Elixir.Rfx.Ops." <> sub) |> String.to_atom()
fun = set_scope(parse) |> String.to_atom()
tgt = Map.fetch!(parse, :args)[:target] || ""
arg = args_for(parse)
con = converts_for(parse)
[
op_module: mod,
op_scope: fun,
op_target: tgt,
op_args: arg,
op_convert: con,
op_apply: Map.fetch!(parse, :flags)[:apply],
op_quiet: Map.fetch!(parse, :flags)[:quiet],
op_oneline: Map.fetch!(parse, :flags)[:oneline]
]
end
defp set_scope(parse_data) do
scope = Map.fetch!(parse_data, :options)[:scope]
target = Map.fetch!(parse_data, :args)[:target]
case scope do
nil -> RfxCli.Util.InferScope.for(target)
"code" -> "cl_code"
"file" -> "cl_file"
"project" -> "cl_project"
"subapp" -> "cl_subapp"
"tmpfile" -> "cl_tmpfile"
_ -> raise("Error: unknown scope (#{scope})")
end
end
defp converts_for(parse_data) do
parse_data
|> Map.fetch!(:options)
|> Map.get(:convert, "")
|> split()
|> Enum.map(&String.to_atom/1)
end
defp split(nil) do
[]
end
defp split(string) do
String.split(string, ",")
end
defp args_for(parse_data) do
parse_data
|> Map.fetch!(:options)
|> Map.delete(:changelist)
|> Map.delete(:convert)
|> Map.delete(:scope)
|> Keyword.new()
end
defp launch_args do
[
launch_cmd: nil,
launch_args: nil,
]
end
defp op_args do
[
op_module: nil,
op_scope: nil,
op_target: nil,
op_args: nil,
op_convert: nil,
op_apply: false,
op_quiet: false,
op_oneline: false
]
end
defp default_args do
launch_args() ++ op_args()
end
end
|
lib/rfx_cli/main/extract_command.ex
| 0.692538
| 0.729038
|
extract_command.ex
|
starcoder
|
defmodule Snitch.Data.Model.Order do
@moduledoc """
Order API
"""
use Snitch.Data.Model
import Snitch.Tools.Helper.QueryFragment
alias Snitch.Data.Schema.Order
alias Snitch.Data.Model.LineItem, as: LineItemModel
@order_states ["confirmed", "complete"]
@doc """
Creates an order with supplied `params` and `line_items`.
`params` is a map that is passed to the
`Snitch.Data.Schema.Order.changeset/3`.
> * `line_items` is not a list of `LineItem` schema structs, but just a list
> of maps with the keys `:variant_id` and `:quantity`.
> * These `LineItem`s will be created (casted, to be precise) along with the
> `Order` in a DB transaction.
## Example
```
line_items = [%{variant_id: 1, quantity: 42}, %{variant_id: 2, quantity: 42}]
params = %{user_id: 1}
{:ok, order} = Snitch.Data.Model.Order.create(params, line_items)
```
## See also
`Ecto.Changeset.cast_assoc/3`
"""
@spec create(map) :: {:ok, Order.t()} | {:error, Ecto.Changeset.t()}
def create(params) do
QH.create(Order, update_in(params, [:line_items], &update_line_item_costs/1), Repo)
end
def create_guest_order() do
%Order{}
|> Order.create_guest_changeset(%{})
|> Repo.insert()
end
@doc """
Returns an `order` struct for the supplied `user_id`.
An existing `order` associated with the user is present in either `cart` or
`address` state is returned if found. If no order is present for the user
in `cart` or `address` state a new order is created returned.
"""
@spec user_order(non_neg_integer) :: Order.t()
def user_order(user_id) do
query =
from(
order in Order,
where:
order.user_id == ^user_id and order.state in ["cart", "address", "delivery", "payment"]
)
query
|> Repo.all()
|> case do
[] ->
%Order{}
|> Order.create_guest_changeset(%{})
|> Ecto.Changeset.put_change(:user_id, user_id)
|> Repo.insert()
[order | _] ->
{:ok, order}
end
end
@doc """
Creates an order with supplied `params` and `line_items`.
Suitable for creating orders for guest users. The `order.user_id` cannot be
set using this function.
> * `line_items` is not a list of `LineItem` schema structs, but just a list
> of maps with the keys `:variant_id` and `:quantity`.
> * These `LineItem`s will be created (casted, to be precise) along with the
> `Order` in a DB transaction.
## Example
```
line_items = [%{variant_id: 1, quantity: 42}, %{variant_id: 2, quantity: 42}]
params = %{user_id: 1}
{:ok, order} = Snitch.Data.Model.Order.create(params, line_items)
```
## See also
`Ecto.Changeset.cast_assoc/3`
"""
@spec create_for_guest(map) :: {:ok, Order.t()} | {:error, Ecto.Changeset.t()}
def create_for_guest(params) do
%Order{}
|> Order.create_for_guest_changeset(params)
|> Repo.insert()
end
@doc """
Updates the order with supplied `params`. `params` can include "new"
`line_items`.
## Caution!
The `line_items` are "casted" with the order and if `params` does not include
a `line_items`, then **all previous line-items will be deleted!**
### Retain previous `LineItem`s
If you wish to retain the line-items, you must pass a list of maps with the
line-item `:id`s, like so:
```
order # this is the order you wish to update, and `:line_items` are preloaded
line_items = Enum.reduce(order.line_items, [], fn x, acc ->
[%{id: x.id} | acc]
end)
params = %{} # All changes except line-items
all_params = Map.put(params, :line_items, line_items)
Snitch.Data.Model.Order.update(all_params, order)
```
### Updating some of the `LineItem`s
Just like `create/2`, `line_items` is a list of maps, passing `LineItem`
schema structs instead would fail. Along with the line-item params
(`:variant_id` and `:quantity`) just pass the line-item `:id`.
Let's say we have an `order` with the following `LineItem`s:
```
order.line_items
#=> [
..> %LineItem{id: 1, quantity: 1, variant_id: 1, ...},
..> %LineItem{id: 2, quantity: 1, variant_id: 3, ...},
..> %LineItem{id: 3, quantity: 1, variant_id: 2, ...}
..> ]
```
And we wish to:
1. update the first,
2. retain the second,
3. remove the third and,
4. add a "new" LineItem
```
line_items = [
%{id: 1, quantity: 42}, # updates quantity of first
%{id: 2} # retains second
%{variant_id: 4, quantity: 42} # adds a new line-item (no `:id`)
] # since there is no mention of `id: 3`,
# it gets removed!
params = %{line_items: line_items}
{:ok, updated_order} = Snitch.Data.Model.Order.update(params, order)
```
Let's see what we got,
```
updated_order.line_items
#=> [
..> %LineItem{id: 1, quantity: 42, variant_id: 1, ...},
..> %LineItem{id: 2, quantity: 42, variant_id: 2, ...},
..> %LineItem{id: 4, quantity: 42, variant_id: 4, ...}
..> ]
```
## See also
`Ecto.Changeset.cast_assoc/3`
"""
@spec update(map, Order.t()) :: {:ok, Order.t()} | {:error, Ecto.Changeset.t()}
def update(params, order \\ nil) do
QH.update(Order, update_in(params, [:line_items], &update_line_item_costs/1), order, Repo)
end
@doc """
Updates the order with supplied `params`. Does not update line_items.
"""
@spec partial_update(Order.t(), map) :: {:ok, Order.t()} | {:error, Ecto.Changeset.t()}
def partial_update(order, params) do
order
|> Order.partial_update_changeset(params)
|> Repo.update()
end
@spec delete(non_neg_integer | Order.t()) ::
{:ok, Order.t()} | {:error, Ecto.Changeset.t()} | {:error, :not_found}
def delete(id_or_instance) do
QH.delete(Order, id_or_instance, Repo)
end
@spec get(map | non_neg_integer) :: Order.t() | nil
def get(query_fields_or_primary_key) do
QH.get(Order, query_fields_or_primary_key, Repo)
end
@spec get_all() :: [Order.t()]
def get_all, do: Repo.all(Order)
@doc """
Returns all Orders with the given list of entities preloaded
"""
def get_all_with_preloads(preloads) do
Repo.all(Order) |> Repo.preload(preloads)
end
@doc """
Order related to user.
"""
@spec user_orders(String.t()) :: [Order.t()]
def user_orders(user_id) do
query =
from(
u in Order,
where: u.user_id == ^user_id
)
Repo.all(query)
end
defp update_line_item_costs(line_items) when is_list(line_items) do
LineItemModel.update_unit_price(line_items)
end
def get_order_count_by_state(start_date, end_date) do
Order
|> where(
[o],
o.inserted_at >= ^start_date and o.inserted_at <= ^end_date and o.state in ^@order_states
)
|> group_by([o], o.state)
|> order_by([o], asc: o.state)
|> select([o], %{state: o.state, count: count(o.id)})
|> Repo.all()
end
def get_order_count_by_date(start_date, end_date) do
Order
|> where([o], o.inserted_at >= ^start_date and o.inserted_at <= ^end_date)
|> group_by([o], to_char(o.inserted_at, "YYYY-MM-DD"))
|> select([o], %{date: to_char(o.inserted_at, "YYYY-MM-DD"), count: count(o.id)})
|> Repo.all()
|> Enum.sort_by(&{Map.get(&1, :date)})
end
end
|
apps/snitch_core/lib/core/data/model/order.ex
| 0.906299
| 0.86799
|
order.ex
|
starcoder
|
defmodule Solid.Tag do
@moduledoc """
This module define behaviour for tags.
To implement new tag you need to create new module that implement the `Tag` behaviour:
defmodule MyCustomTag do
import NimbleParsec
@behaviour Solid.Tag
@impl true
def spec(_parser) do
space = Solid.Parser.Literal.whitespace(min: 0)
ignore(string("{%"))
|> ignore(space)
|> ignore(string("my_tag"))
|> ignore(space)
|> ignore(string("%}"))
end
@impl true
def render(_tag, _context, _options) do
[text: "my first tag"]
end
end
- `spec` define how to parse your tag
- `render` define how to render your tag
Then add the tag to your parser
defmodule MyParser do
use Solid.Parser.Base, custom_tags: [MyCustomTag]
end
Then pass the custom parser as option
"{% my_tag %}"
|> Solid.parse!(parser: MyParser)
|> Solid.render()
Control flow tags can change the information Liquid shows using programming logic.
More info: https://shopify.github.io/liquid/tags/control-flow/
"""
alias Solid.Context
@doc """
Build and return `NimbleParsec` expression to parse your tag. There are some helper expressions that can be used:
- `Solid.Parser.Literal`
- `Solid.Parser.Variable`
- `Solid.Parser.Argument`
"""
@callback spec(module) :: NimbleParsec.t()
@doc """
Define how to render your tag.
Third argument are the options passed to `Solid.render/3`
"""
@callback render(list(), Solid.Context.t(), keyword()) ::
{list(Solid.Template.rendered_data()), Solid.Context.t()} | String.t()
@doc """
Basic custom tag spec that accepts optional arguments
"""
@spec basic(String.t()) :: NimbleParsec.t()
def basic(name) do
import NimbleParsec
space = Solid.Parser.Literal.whitespace(min: 0)
ignore(Solid.Parser.BaseTag.opening_tag())
|> ignore(string(name))
|> ignore(space)
|> tag(optional(Solid.Parser.Argument.arguments()), :arguments)
|> ignore(Solid.Parser.BaseTag.closing_tag())
end
@doc """
Evaluate a tag and return the condition that succeeded or nil
"""
@spec eval(any, Context.t(), keyword()) :: {iolist | nil, Context.t()}
def eval(tag, context, options) do
case do_eval(tag, context, options) do
{text, context} -> {text, context}
text when is_binary(text) -> {[text: text], context}
text -> {text, context}
end
end
defp do_eval([], _context, _options), do: nil
defp do_eval([{tag_module, tag_data}], context, options) do
tag_module.render(tag_data, context, options)
end
end
|
lib/solid/tag.ex
| 0.889769
| 0.480296
|
tag.ex
|
starcoder
|
defmodule AWS.ACMPCA do
@moduledoc """
This is the *ACM Private CA API Reference*.
It provides descriptions, syntax, and usage examples for each of the actions and
data types involved in creating and managing private certificate authorities
(CA) for your organization.
The documentation for each action shows the Query API request parameters and the
XML response. Alternatively, you can use one of the AWS SDKs to access an API
that's tailored to the programming language or platform that you're using. For
more information, see [AWS SDKs](https://aws.amazon.com/tools/#SDKs). Each ACM Private CA API action has a quota that determines the number of times
the action can be called per second. For more information, see [API Rate Quotas
in ACM Private
CA](https://docs.aws.amazon.com/acm-pca/latest/userguide/PcaLimits.html#PcaLimits-api)
in the ACM Private CA user guide.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "ACM-PCA",
api_version: "2017-08-22",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "acm-pca",
global?: false,
protocol: "json",
service_id: "ACM PCA",
signature_version: "v4",
signing_name: "acm-pca",
target_prefix: "ACMPrivateCA"
}
end
@doc """
Creates a root or subordinate private certificate authority (CA).
You must specify the CA configuration, the certificate revocation list (CRL)
configuration, the CA type, and an optional idempotency token to avoid
accidental creation of multiple CAs. The CA configuration specifies the name of
the algorithm and key size to be used to create the CA private key, the type of
signing algorithm that the CA uses, and X.500 subject information. The CRL
configuration specifies the CRL expiration period in days (the validity period
of the CRL), the Amazon S3 bucket that will contain the CRL, and a CNAME alias
for the S3 bucket that is included in certificates issued by the CA. If
successful, this action returns the Amazon Resource Name (ARN) of the CA.
ACM Private CAA assets that are stored in Amazon S3 can be protected with
encryption. For more information, see [Encrypting Your CRLs](https://docs.aws.amazon.com/acm-pca/latest/userguide/PcaCreateCa.html#crl-encryption).
Both PCA and the IAM principal must have permission to write to the S3 bucket
that you specify. If the IAM principal making the call does not have permission
to write to the bucket, then an exception is thrown. For more information, see
[Configure Access to ACM Private CA](https://docs.aws.amazon.com/acm-pca/latest/userguide/PcaAuthAccess.html).
"""
def create_certificate_authority(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateCertificateAuthority", input, options)
end
@doc """
Creates an audit report that lists every time that your CA private key is used.
The report is saved in the Amazon S3 bucket that you specify on input. The
[IssueCertificate](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_IssueCertificate.html) and
[RevokeCertificate](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_RevokeCertificate.html)
actions use the private key.
Both PCA and the IAM principal must have permission to write to the S3 bucket
that you specify. If the IAM principal making the call does not have permission
to write to the bucket, then an exception is thrown. For more information, see
[Configure Access to ACM Private CA](https://docs.aws.amazon.com/acm-pca/latest/userguide/PcaAuthAccess.html).
ACM Private CAA assets that are stored in Amazon S3 can be protected with
encryption. For more information, see [Encrypting Your Audit Reports](https://docs.aws.amazon.com/acm-pca/latest/userguide/PcaAuditReport.html#audit-report-encryption).
"""
def create_certificate_authority_audit_report(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"CreateCertificateAuthorityAuditReport",
input,
options
)
end
@doc """
Grants one or more permissions on a private CA to the AWS Certificate Manager
(ACM) service principal (`acm.amazonaws.com`).
These permissions allow ACM to issue and renew ACM certificates that reside in
the same AWS account as the CA.
You can list current permissions with the
[ListPermissions](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_ListPermissions.html) action and revoke them with the
[DeletePermission](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_DeletePermission.html)
action.
## About Permissions
* If the private CA and the certificates it issues reside in the
same account, you can use `CreatePermission` to grant permissions for ACM to
carry out automatic certificate renewals.
* For automatic certificate renewal to succeed, the ACM service
principal needs permissions to create, retrieve, and list certificates.
* If the private CA and the ACM certificates reside in different
accounts, then permissions cannot be used to enable automatic renewals. Instead,
the ACM certificate owner must set up a resource-based policy to enable
cross-account issuance and renewals. For more information, see [Using a Resource Based Policy with ACM Private
CA](https://docs.aws.amazon.com/acm-pca/latest/userguide/pca-rbp.html).
"""
def create_permission(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreatePermission", input, options)
end
@doc """
Deletes a private certificate authority (CA).
You must provide the Amazon Resource Name (ARN) of the private CA that you want
to delete. You can find the ARN by calling the
[ListCertificateAuthorities](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_ListCertificateAuthorities.html) action.
Deleting a CA will invalidate other CAs and certificates below it in your CA
hierarchy.
Before you can delete a CA that you have created and activated, you must disable
it. To do this, call the
[UpdateCertificateAuthority](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_UpdateCertificateAuthority.html)
action and set the **CertificateAuthorityStatus** parameter to `DISABLED`.
Additionally, you can delete a CA if you are waiting for it to be created (that
is, the status of the CA is `CREATING`). You can also delete it if the CA has
been created but you haven't yet imported the signed certificate into ACM
Private CA (that is, the status of the CA is `PENDING_CERTIFICATE`).
When you successfully call
[DeleteCertificateAuthority](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_DeleteCertificateAuthority.html), the CA's status changes to `DELETED`. However, the CA won't be permanently
deleted until the restoration period has passed. By default, if you do not set
the `PermanentDeletionTimeInDays` parameter, the CA remains restorable for 30
days. You can set the parameter from 7 to 30 days. The
[DescribeCertificateAuthority](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_DescribeCertificateAuthority.html)
action returns the time remaining in the restoration window of a private CA in
the `DELETED` state. To restore an eligible CA, call the
[RestoreCertificateAuthority](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_RestoreCertificateAuthority.html)
action.
"""
def delete_certificate_authority(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteCertificateAuthority", input, options)
end
@doc """
Revokes permissions on a private CA granted to the AWS Certificate Manager (ACM)
service principal (acm.amazonaws.com).
These permissions allow ACM to issue and renew ACM certificates that reside in
the same AWS account as the CA. If you revoke these permissions, ACM will no
longer renew the affected certificates automatically.
Permissions can be granted with the
[CreatePermission](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_CreatePermission.html) action and listed with the
[ListPermissions](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_ListPermissions.html)
action.
## About Permissions
* If the private CA and the certificates it issues reside in the
same account, you can use `CreatePermission` to grant permissions for ACM to
carry out automatic certificate renewals.
* For automatic certificate renewal to succeed, the ACM service
principal needs permissions to create, retrieve, and list certificates.
* If the private CA and the ACM certificates reside in different
accounts, then permissions cannot be used to enable automatic renewals. Instead,
the ACM certificate owner must set up a resource-based policy to enable
cross-account issuance and renewals. For more information, see [Using a Resource Based Policy with ACM Private
CA](https://docs.aws.amazon.com/acm-pca/latest/userguide/pca-rbp.html).
"""
def delete_permission(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeletePermission", input, options)
end
@doc """
Deletes the resource-based policy attached to a private CA.
Deletion will remove any access that the policy has granted. If there is no
policy attached to the private CA, this action will return successful.
If you delete a policy that was applied through AWS Resource Access Manager
(RAM), the CA will be removed from all shares in which it was included.
The AWS Certificate Manager Service Linked Role that the policy supports is not
affected when you delete the policy.
The current policy can be shown with
[GetPolicy](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_GetPolicy.html) and updated with
[PutPolicy](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_PutPolicy.html).
## About Policies
* A policy grants access on a private CA to an AWS customer account,
to AWS Organizations, or to an AWS Organizations unit. Policies are under the
control of a CA administrator. For more information, see [Using a Resource Based Policy with ACM Private
CA](https://docs.aws.amazon.com/acm-pca/latest/userguide/pca-rbp.html).
* A policy permits a user of AWS Certificate Manager (ACM) to issue
ACM certificates signed by a CA in another account.
* For ACM to manage automatic renewal of these certificates, the ACM
user must configure a Service Linked Role (SLR). The SLR allows the ACM service
to assume the identity of the user, subject to confirmation against the ACM
Private CA policy. For more information, see [Using a Service Linked Role with ACM](https://docs.aws.amazon.com/acm/latest/userguide/acm-slr.html).
* Updates made in AWS Resource Manager (RAM) are reflected in
policies. For more information, see [Attach a Policy for Cross-Account Access](https://docs.aws.amazon.com/acm-pca/latest/userguide/pca-ram.html).
"""
def delete_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeletePolicy", input, options)
end
@doc """
Lists information about your private certificate authority (CA) or one that has
been shared with you.
You specify the private CA on input by its ARN (Amazon Resource Name). The
output contains the status of your CA. This can be any of the following:
* `CREATING` - ACM Private CA is creating your private certificate
authority.
* `PENDING_CERTIFICATE` - The certificate is pending. You must use
your ACM Private CA-hosted or on-premises root or subordinate CA to sign your
private CA CSR and then import it into PCA.
* `ACTIVE` - Your private CA is active.
* `DISABLED` - Your private CA has been disabled.
* `EXPIRED` - Your private CA certificate has expired.
* `FAILED` - Your private CA has failed. Your CA can fail because of
problems such a network outage or back-end AWS failure or other errors. A failed
CA can never return to the pending state. You must create a new CA.
* `DELETED` - Your private CA is within the restoration period,
after which it is permanently deleted. The length of time remaining in the CA's
restoration period is also included in this action's output.
"""
def describe_certificate_authority(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeCertificateAuthority", input, options)
end
@doc """
Lists information about a specific audit report created by calling the
[CreateCertificateAuthorityAuditReport](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_CreateCertificateAuthorityAuditReport.html) action.
Audit information is created every time the certificate authority (CA) private
key is used. The private key is used when you call the
[IssueCertificate](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_IssueCertificate.html)
action or the
[RevokeCertificate](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_RevokeCertificate.html)
action.
"""
def describe_certificate_authority_audit_report(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeCertificateAuthorityAuditReport",
input,
options
)
end
@doc """
Retrieves a certificate from your private CA or one that has been shared with
you.
The ARN of the certificate is returned when you call the
[IssueCertificate](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_IssueCertificate.html) action. You must specify both the ARN of your private CA and the ARN of the
issued certificate when calling the **GetCertificate** action. You can retrieve
the certificate if it is in the **ISSUED** state. You can call the
[CreateCertificateAuthorityAuditReport](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_CreateCertificateAuthorityAuditReport.html)
action to create a report that contains information about all of the
certificates issued and revoked by your private CA.
"""
def get_certificate(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetCertificate", input, options)
end
@doc """
Retrieves the certificate and certificate chain for your private certificate
authority (CA) or one that has been shared with you.
Both the certificate and the chain are base64 PEM-encoded. The chain does not
include the CA certificate. Each certificate in the chain signs the one before
it.
"""
def get_certificate_authority_certificate(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetCertificateAuthorityCertificate", input, options)
end
@doc """
Retrieves the certificate signing request (CSR) for your private certificate
authority (CA).
The CSR is created when you call the
[CreateCertificateAuthority](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_CreateCertificateAuthority.html) action. Sign the CSR with your ACM Private CA-hosted or on-premises root or
subordinate CA. Then import the signed certificate back into ACM Private CA by
calling the
[ImportCertificateAuthorityCertificate](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_ImportCertificateAuthorityCertificate.html)
action. The CSR is returned as a base64 PEM-encoded string.
"""
def get_certificate_authority_csr(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetCertificateAuthorityCsr", input, options)
end
@doc """
Retrieves the resource-based policy attached to a private CA.
If either the private CA resource or the policy cannot be found, this action
returns a `ResourceNotFoundException`.
The policy can be attached or updated with
[PutPolicy](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_PutPolicy.html) and removed with
[DeletePolicy](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_DeletePolicy.html).
## About Policies
* A policy grants access on a private CA to an AWS customer account,
to AWS Organizations, or to an AWS Organizations unit. Policies are under the
control of a CA administrator. For more information, see [Using a Resource Based Policy with ACM Private
CA](https://docs.aws.amazon.com/acm-pca/latest/userguide/pca-rbp.html).
* A policy permits a user of AWS Certificate Manager (ACM) to issue
ACM certificates signed by a CA in another account.
* For ACM to manage automatic renewal of these certificates, the ACM
user must configure a Service Linked Role (SLR). The SLR allows the ACM service
to assume the identity of the user, subject to confirmation against the ACM
Private CA policy. For more information, see [Using a Service Linked Role with ACM](https://docs.aws.amazon.com/acm/latest/userguide/acm-slr.html).
* Updates made in AWS Resource Manager (RAM) are reflected in
policies. For more information, see [Attach a Policy for Cross-Account Access](https://docs.aws.amazon.com/acm-pca/latest/userguide/pca-ram.html).
"""
def get_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetPolicy", input, options)
end
@doc """
Imports a signed private CA certificate into ACM Private CA.
This action is used when you are using a chain of trust whose root is located
outside ACM Private CA. Before you can call this action, the following
preparations must in place:
1. In ACM Private CA, call the
[CreateCertificateAuthority](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_CreateCertificateAuthority.html) action to create the private CA that you plan to back with the imported
certificate.
2. Call the
[GetCertificateAuthorityCsr](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_GetCertificateAuthorityCsr.html)
action to generate a certificate signing request (CSR).
3. Sign the CSR using a root or intermediate CA hosted by either an
on-premises PKI hierarchy or by a commercial CA.
4. Create a certificate chain and copy the signed certificate and
the certificate chain to your working directory.
ACM Private CA supports three scenarios for installing a CA certificate:
* Installing a certificate for a root CA hosted by ACM Private CA.
* Installing a subordinate CA certificate whose parent authority is
hosted by ACM Private CA.
* Installing a subordinate CA certificate whose parent authority is
externally hosted.
The following additional requirements apply when you import a CA certificate.
* Only a self-signed certificate can be imported as a root CA.
* A self-signed certificate cannot be imported as a subordinate CA.
* Your certificate chain must not include the private CA certificate
that you are importing.
* Your root CA must be the last certificate in your chain. The
subordinate certificate, if any, that your root CA signed must be next to last.
The subordinate certificate signed by the preceding subordinate CA must come
next, and so on until your chain is built.
* The chain must be PEM-encoded.
* The maximum allowed size of a certificate is 32 KB.
* The maximum allowed size of a certificate chain is 2 MB.
*Enforcement of Critical Constraints*
ACM Private CA allows the following extensions to be marked critical in the
imported CA certificate or chain.
* Basic constraints (*must* be marked critical)
* Subject alternative names
* Key usage
* Extended key usage
* Authority key identifier
* Subject key identifier
* Issuer alternative name
* Subject directory attributes
* Subject information access
* Certificate policies
* Policy mappings
* Inhibit anyPolicy
ACM Private CA rejects the following extensions when they are marked critical in
an imported CA certificate or chain.
* Name constraints
* Policy constraints
* CRL distribution points
* Authority information access
* Freshest CRL
* Any other extension
"""
def import_certificate_authority_certificate(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"ImportCertificateAuthorityCertificate",
input,
options
)
end
@doc """
Uses your private certificate authority (CA), or one that has been shared with
you, to issue a client certificate.
This action returns the Amazon Resource Name (ARN) of the certificate. You can
retrieve the certificate by calling the
[GetCertificate](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_GetCertificate.html)
action and specifying the ARN.
You cannot use the ACM **ListCertificateAuthorities** action to retrieve the
ARNs of the certificates that you issue by using ACM Private CA.
"""
def issue_certificate(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "IssueCertificate", input, options)
end
@doc """
Lists the private certificate authorities that you created by using the
[CreateCertificateAuthority](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_CreateCertificateAuthority.html)
action.
"""
def list_certificate_authorities(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListCertificateAuthorities", input, options)
end
@doc """
List all permissions on a private CA, if any, granted to the AWS Certificate
Manager (ACM) service principal (acm.amazonaws.com).
These permissions allow ACM to issue and renew ACM certificates that reside in
the same AWS account as the CA.
Permissions can be granted with the
[CreatePermission](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_CreatePermission.html) action and revoked with the
[DeletePermission](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_DeletePermission.html)
action.
## About Permissions
* If the private CA and the certificates it issues reside in the
same account, you can use `CreatePermission` to grant permissions for ACM to
carry out automatic certificate renewals.
* For automatic certificate renewal to succeed, the ACM service
principal needs permissions to create, retrieve, and list certificates.
* If the private CA and the ACM certificates reside in different
accounts, then permissions cannot be used to enable automatic renewals. Instead,
the ACM certificate owner must set up a resource-based policy to enable
cross-account issuance and renewals. For more information, see [Using a Resource Based Policy with ACM Private
CA](https://docs.aws.amazon.com/acm-pca/latest/userguide/pca-rbp.html).
"""
def list_permissions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListPermissions", input, options)
end
@doc """
Lists the tags, if any, that are associated with your private CA or one that has
been shared with you.
Tags are labels that you can use to identify and organize your CAs. Each tag
consists of a key and an optional value. Call the
[TagCertificateAuthority](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_TagCertificateAuthority.html) action to add one or more tags to your CA. Call the
[UntagCertificateAuthority](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_UntagCertificateAuthority.html)
action to remove tags.
"""
def list_tags(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTags", input, options)
end
@doc """
Attaches a resource-based policy to a private CA.
A policy can also be applied by sharing a private CA through AWS Resource Access
Manager (RAM). For more information, see [Attach a Policy for Cross-Account Access](https://docs.aws.amazon.com/acm-pca/latest/userguide/pca-ram.html).
The policy can be displayed with
[GetPolicy](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_GetPolicy.html) and removed with
[DeletePolicy](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_DeletePolicy.html).
## About Policies
* A policy grants access on a private CA to an AWS customer account,
to AWS Organizations, or to an AWS Organizations unit. Policies are under the
control of a CA administrator. For more information, see [Using a Resource Based Policy with ACM Private
CA](https://docs.aws.amazon.com/acm-pca/latest/userguide/pca-rbp.html).
* A policy permits a user of AWS Certificate Manager (ACM) to issue
ACM certificates signed by a CA in another account.
* For ACM to manage automatic renewal of these certificates, the ACM
user must configure a Service Linked Role (SLR). The SLR allows the ACM service
to assume the identity of the user, subject to confirmation against the ACM
Private CA policy. For more information, see [Using a Service Linked Role with ACM](https://docs.aws.amazon.com/acm/latest/userguide/acm-slr.html).
* Updates made in AWS Resource Manager (RAM) are reflected in
policies. For more information, see [Attach a Policy for Cross-Account Access](https://docs.aws.amazon.com/acm-pca/latest/userguide/pca-ram.html).
"""
def put_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutPolicy", input, options)
end
@doc """
Restores a certificate authority (CA) that is in the `DELETED` state.
You can restore a CA during the period that you defined in the
**PermanentDeletionTimeInDays** parameter of the
[DeleteCertificateAuthority](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_DeleteCertificateAuthority.html) action. Currently, you can specify 7 to 30 days. If you did not specify a
**PermanentDeletionTimeInDays** value, by default you can restore the CA at any
time in a 30 day period. You can check the time remaining in the restoration
period of a private CA in the `DELETED` state by calling the
[DescribeCertificateAuthority](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_DescribeCertificateAuthority.html)
or
[ListCertificateAuthorities](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_ListCertificateAuthorities.html) actions. The status of a restored CA is set to its pre-deletion status when the
**RestoreCertificateAuthority** action returns. To change its status to
`ACTIVE`, call the
[UpdateCertificateAuthority](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_UpdateCertificateAuthority.html)
action. If the private CA was in the `PENDING_CERTIFICATE` state at deletion,
you must use the
[ImportCertificateAuthorityCertificate](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_ImportCertificateAuthorityCertificate.html)
action to import a certificate authority into the private CA before it can be
activated. You cannot restore a CA after the restoration period has ended.
"""
def restore_certificate_authority(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RestoreCertificateAuthority", input, options)
end
@doc """
Revokes a certificate that was issued inside ACM Private CA.
If you enable a certificate revocation list (CRL) when you create or update your
private CA, information about the revoked certificates will be included in the
CRL. ACM Private CA writes the CRL to an S3 bucket that you specify. A CRL is
typically updated approximately 30 minutes after a certificate is revoked. If
for any reason the CRL update fails, ACM Private CA attempts makes further
attempts every 15 minutes. With Amazon CloudWatch, you can create alarms for the
metrics `CRLGenerated` and `MisconfiguredCRLBucket`. For more information, see
[Supported CloudWatch Metrics](https://docs.aws.amazon.com/acm-pca/latest/userguide/PcaCloudWatch.html).
Both PCA and the IAM principal must have permission to write to the S3 bucket
that you specify. If the IAM principal making the call does not have permission
to write to the bucket, then an exception is thrown. For more information, see
[Configure Access to ACM Private CA](https://docs.aws.amazon.com/acm-pca/latest/userguide/PcaAuthAccess.html).
ACM Private CA also writes revocation information to the audit report. For more
information, see
[CreateCertificateAuthorityAuditReport](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_CreateCertificateAuthorityAuditReport.html).
You cannot revoke a root CA self-signed certificate.
"""
def revoke_certificate(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RevokeCertificate", input, options)
end
@doc """
Adds one or more tags to your private CA.
Tags are labels that you can use to identify and organize your AWS resources.
Each tag consists of a key and an optional value. You specify the private CA on
input by its Amazon Resource Name (ARN). You specify the tag by using a
key-value pair. You can apply a tag to just one private CA if you want to
identify a specific characteristic of that CA, or you can apply the same tag to
multiple private CAs if you want to filter for a common relationship among those
CAs. To remove one or more tags, use the
[UntagCertificateAuthority](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_UntagCertificateAuthority.html) action. Call the
[ListTags](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_ListTags.html)
action to see what tags are associated with your CA.
"""
def tag_certificate_authority(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagCertificateAuthority", input, options)
end
@doc """
Remove one or more tags from your private CA.
A tag consists of a key-value pair. If you do not specify the value portion of
the tag when calling this action, the tag will be removed regardless of value.
If you specify a value, the tag is removed only if it is associated with the
specified value. To add tags to a private CA, use the
[TagCertificateAuthority](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_TagCertificateAuthority.html). Call the
[ListTags](https://docs.aws.amazon.com/acm-pca/latest/APIReference/API_ListTags.html)
action to see what tags are associated with your CA.
"""
def untag_certificate_authority(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagCertificateAuthority", input, options)
end
@doc """
Updates the status or configuration of a private certificate authority (CA).
Your private CA must be in the `ACTIVE` or `DISABLED` state before you can
update it. You can disable a private CA that is in the `ACTIVE` state or make a
CA that is in the `DISABLED` state active again.
Both PCA and the IAM principal must have permission to write to the S3 bucket
that you specify. If the IAM principal making the call does not have permission
to write to the bucket, then an exception is thrown. For more information, see
[Configure Access to ACM Private CA](https://docs.aws.amazon.com/acm-pca/latest/userguide/PcaAuthAccess.html).
"""
def update_certificate_authority(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateCertificateAuthority", input, options)
end
end
|
lib/aws/generated/acmpca.ex
| 0.816187
| 0.528473
|
acmpca.ex
|
starcoder
|
defmodule Scenic.Scrollable.Direction do
@moduledoc """
Utility module for limiting certain operations along a certain direction only. A value can be connected to either horizontal or vertical directions.
"""
@typedoc """
The directions a value can be associated with.
"""
@type direction :: :horizontal | :vertical
@typedoc """
The Direction type. A value can be either associated with the horizontal or the vertical direction, by pairing the :horizontal or :vertical atoms with the value in a tuple.
"""
@type t :: {:horizontal, term} | {:vertical, term}
@typedoc """
Data structure representing a vector 2, in the form of an {x, y} tuple.
"""
@type v2 :: Scenic.Scrollable.v2()
@doc """
Associate a value with a direction.
## Examples
iex> Scenic.Scrollable.Direction.return(5, :horizontal)
{:horizontal, 5}
iex> Scenic.Scrollable.Direction.return(6, :vertical)
{:vertical, 6}
"""
@spec return(term, direction) :: t
def return(x, :horizontal), do: {:horizontal, x}
def return(x, :vertical), do: {:vertical, x}
@doc """
Associate a value with the horizontal direction.
## Examples
iex> Scenic.Scrollable.Direction.as_horizontal(5)
{:horizontal, 5}
"""
@spec as_horizontal(term) :: t
def as_horizontal(x), do: return(x, :horizontal)
@doc """
Associate a value with the vertical direction.
## Examples
iex> Scenic.Scrollable.Direction.as_vertical(6)
{:vertical, 6}
"""
@spec as_vertical(term) :: t
def as_vertical(x), do: return(x, :vertical)
@doc """
Convert a `t:Scenic.Scrollable.Direction.t/0` to a `t:Scenic.Math.vector_2/0`.
If the value is non numeric, the vector {0, 0} will be returned.
## Examples
iex> Scenic.Scrollable.Direction.as_horizontal(5)
...> |> Scenic.Scrollable.Direction.to_vector_2
{5, 0}
iex> Scenic.Scrollable.Direction.as_vertical(5)
...> |> Scenic.Scrollable.Direction.to_vector_2
{0, 5}
iex> Scenic.Scrollable.Direction.as_horizontal(:non_numeric_value)
...> |> Scenic.Scrollable.Direction.to_vector_2
{0, 0}
"""
@spec to_vector_2(t) :: v2
def to_vector_2({:horizontal, x}) when is_number(x), do: {x, 0}
def to_vector_2({:vertical, y}) when is_number(y), do: {0, y}
def to_vector_2(_), do: {0, 0}
@doc """
Create a `t:Scenic.Scrollable.Direction.t/0` from a `t:Scenic.Math.vector_2/0`.
## Examples
iex> Scenic.Scrollable.Direction.from_vector_2({3, 5}, :horizontal)
{:horizontal, 3}
iex> Scenic.Scrollable.Direction.from_vector_2({3, 5}, :vertical)
{:vertical, 5}
"""
@spec from_vector_2(v2, direction) :: t
def from_vector_2({x, _}, :horizontal), do: {:horizontal, x}
def from_vector_2({_, y}, :vertical), do: {:vertical, y}
@doc """
Obtain the inner value from a `t:Scenic.Scrollable.Direction.t/0`.
## Examples
iex> Scenic.Scrollable.Direction.as_horizontal(5)
...> |> Scenic.Scrollable.Direction.unwrap
5
"""
@spec unwrap(t) :: term
def unwrap({_, x}), do: x
@doc """
Convert a horizontal `t:Scenic.Scrollable.Direction.t/0` to a vertical one, and vice versa.
## Examples
iex> Scenic.Scrollable.Direction.as_horizontal(5)
...> |> Scenic.Scrollable.Direction.invert
{:vertical, 5}
"""
@spec invert(t) :: t
def invert({:horizontal, x}), do: {:vertical, x}
def invert({:vertical, x}), do: {:horizontal, x}
@doc """
Add two `t:Scenic.Scrollable.Direction.t/0` values. Only values associated with the same direction will be added. Non numeric values are ignored.
## Examples
iex> five = Scenic.Scrollable.Direction.as_horizontal(5)
...> six = Scenic.Scrollable.Direction.as_horizontal(6)
...> Scenic.Scrollable.Direction.add(five, six)
{:horizontal, 11}
iex> three = Scenic.Scrollable.Direction.as_vertical(3)
...> seven = Scenic.Scrollable.Direction.as_vertical(7)
...> Scenic.Scrollable.Direction.add(three, seven)
{:vertical, 10}
iex> five = Scenic.Scrollable.Direction.as_horizontal(5)
...> six = Scenic.Scrollable.Direction.as_vertical(6)
...> Scenic.Scrollable.Direction.add(five, six)
{:horizontal, 5}
iex> non_numeric_value = Scenic.Scrollable.Direction.as_horizontal(:non_numeric_value)
...> six = Scenic.Scrollable.Direction.as_vertical(6)
...> Scenic.Scrollable.Direction.add(non_numeric_value, six)
{:horizontal, :non_numeric_value}
"""
@spec add(t, t) :: t
def add({:horizontal, x}, {:horizontal, y}) when is_number(x) and is_number(y),
do: {:horizontal, x + y}
def add({:vertical, x}, {:vertical, y}) when is_number(x) and is_number(y),
do: {:vertical, x + y}
def add({:horizontal, x}, _), do: {:horizontal, x}
def add({:vertical, x}, _), do: {:vertical, x}
@doc """
Subtract two `t:Scenic.Scrollable.Direction.t/0` values. Only values associated with the same direction will be subtracted. Non numeric values are ignored.
## Examples
iex> five = Scenic.Scrollable.Direction.as_horizontal(5)
...> six = Scenic.Scrollable.Direction.as_horizontal(6)
...> Scenic.Scrollable.Direction.subtract(five, six)
{:horizontal, -1}
iex> three = Scenic.Scrollable.Direction.as_vertical(3)
...> seven = Scenic.Scrollable.Direction.as_vertical(7)
...> Scenic.Scrollable.Direction.subtract(three, seven)
{:vertical, -4}
iex> five = Scenic.Scrollable.Direction.as_horizontal(5)
...> six = Scenic.Scrollable.Direction.as_vertical(6)
...> Scenic.Scrollable.Direction.subtract(five, six)
{:horizontal, 5}
iex> non_numeric_value = Scenic.Scrollable.Direction.as_horizontal(:non_numeric_value)
...> six = Scenic.Scrollable.Direction.as_vertical(6)
...> Scenic.Scrollable.Direction.subtract(non_numeric_value, six)
{:horizontal, :non_numeric_value}
"""
@spec subtract(t, t) :: t
def subtract({:horizontal, x}, {:horizontal, y}) when is_number(x) and is_number(y),
do: {:horizontal, x - y}
def subtract({:vertical, x}, {:vertical, y}) when is_number(x) and is_number(y),
do: {:vertical, x - y}
def subtract({:horizontal, x}, _), do: {:horizontal, x}
def subtract({:vertical, x}, _), do: {:vertical, x}
@doc """
Multiply two `t:Scenic.Scrollable.Direction.t/0` values. Only values associated with the same direction will be multiplied. Non numeric values are ignored.
## Examples
iex> five = Scenic.Scrollable.Direction.as_horizontal(5)
...> six = Scenic.Scrollable.Direction.as_horizontal(6)
...> Scenic.Scrollable.Direction.multiply(five, six)
{:horizontal, 30}
iex> three = Scenic.Scrollable.Direction.as_vertical(3)
...> seven = Scenic.Scrollable.Direction.as_vertical(7)
...> Scenic.Scrollable.Direction.multiply(three, seven)
{:vertical, 21}
iex> five = Scenic.Scrollable.Direction.as_horizontal(5)
...> six = Scenic.Scrollable.Direction.as_vertical(6)
...> Scenic.Scrollable.Direction.multiply(five, six)
{:horizontal, 5}
iex> non_numeric_value = Scenic.Scrollable.Direction.as_horizontal(:non_numeric_value)
...> six = Scenic.Scrollable.Direction.as_vertical(6)
...> Scenic.Scrollable.Direction.multiply(non_numeric_value, six)
{:horizontal, :non_numeric_value}
"""
@spec multiply(t, t) :: t
def multiply({:horizontal, x}, {:horizontal, y}) when is_number(x) and is_number(y),
do: {:horizontal, x * y}
def multiply({:vertical, x}, {:vertical, y}) when is_number(x) and is_number(y),
do: {:vertical, x * y}
def multiply({:horizontal, x}, _), do: {:horizontal, x}
def multiply({:vertical, x}, _), do: {:vertical, x}
def multiply(x, y, z) do
multiply(x, y)
|> multiply(z)
end
@doc """
Divide two `t:Scenic.Scrollable.Direction.t/0` values. Only values associated with the same direction will be divided. Non numeric values are ignored.
## Examples
iex> fifty = Scenic.Scrollable.Direction.as_horizontal(50)
...> ten = Scenic.Scrollable.Direction.as_horizontal(10)
...> Scenic.Scrollable.Direction.divide(fifty, ten)
{:horizontal, 5.0}
iex> nine = Scenic.Scrollable.Direction.as_vertical(9)
...> three = Scenic.Scrollable.Direction.as_vertical(3)
...> Scenic.Scrollable.Direction.divide(nine, three)
{:vertical, 3.0}
iex> six = Scenic.Scrollable.Direction.as_horizontal(6)
...> two = Scenic.Scrollable.Direction.as_vertical(2)
...> Scenic.Scrollable.Direction.divide(six, two)
{:horizontal, 6}
iex> non_numeric_value = Scenic.Scrollable.Direction.as_horizontal(:non_numeric_value)
...> six = Scenic.Scrollable.Direction.as_vertical(6)
...> Scenic.Scrollable.Direction.divide(non_numeric_value, six)
{:horizontal, :non_numeric_value}
"""
@spec divide(t, t) :: t
def divide({:horizontal, x}, {:horizontal, y}) when is_number(x) and is_number(y),
do: {:horizontal, x / y}
def divide({:vertical, x}, {:vertical, y}) when is_number(x) and is_number(y),
do: {:vertical, x / y}
def divide({:horizontal, x}, _), do: {:horizontal, x}
def divide({:vertical, x}, _), do: {:vertical, x}
@doc """
Apply a function only if the `t:Scenic.Scrollable.Direction.t\0` is associated with the horizontal direction.
Returns a new `t:Scenic.Scrollable.Direction.t\0`.
## Examples
iex> Scenic.Scrollable.Direction.map_horizontal({:horizontal, 5}, & &1 * 2)
{:horizontal, 10}
iex> Scenic.Scrollable.Direction.map_horizontal({:vertical, 5}, & &1 * 2)
{:vertical, 5}
"""
@spec map_horizontal(t, (term -> term)) :: t
def map_horizontal({:horizontal, x}, fun), do: {:horizontal, fun.(x)}
def map_horizontal(x, _), do: x
@doc """
Apply a function only if the `t:Scenic.Scrollable.Direction.t\0` is associated with the vertical direction.
Returns a new `t:Scenic.Scrollable.Direction.t\0`.
## Examples
iex> Scenic.Scrollable.Direction.map_vertical({:vertical, 5}, & &1 * 2)
{:vertical, 10}
iex> Scenic.Scrollable.Direction.map_vertical({:horizontal, 5}, & &1 * 2)
{:horizontal, 5}
"""
@spec map_vertical(t, (term -> term)) :: t
def map_vertical({:vertical, x}, fun), do: {:vertical, fun.(x)}
def map_vertical(x, _), do: x
@doc """
Apply a function to the `t:Scenic.Scrollable.Direction.t\0` inner value.
Returns a new `t:Scenic.Scrollable.Direction.t\0`.
## Examples
iex> Scenic.Scrollable.Direction.map({:horizontal, 5}, & &1 * 2)
{:horizontal, 10}
iex> Scenic.Scrollable.Direction.map({:vertical, 5}, & &1 * 2)
{:vertical, 10}
"""
@spec map(t, (term -> term)) :: t
def map({direction, value}, fun), do: {direction, fun.(value)}
end
|
lib/utility/direction.ex
| 0.947769
| 0.655322
|
direction.ex
|
starcoder
|
defmodule AWS.IoTAnalytics do
@moduledoc """
AWS IoT Analytics allows you to collect large amounts of device data, process
messages, and store them.
You can then query the data and run sophisticated analytics on it. AWS IoT
Analytics enables advanced data exploration through integration with Jupyter
Notebooks and data visualization through integration with Amazon QuickSight.
Traditional analytics and business intelligence tools are designed to process
structured data. IoT data often comes from devices that record noisy processes
(such as temperature, motion, or sound). As a result the data from these devices
can have significant gaps, corrupted messages, and false readings that must be
cleaned up before analysis can occur. Also, IoT data is often only meaningful in
the context of other data from external sources.
AWS IoT Analytics automates the steps required to analyze data from IoT devices.
AWS IoT Analytics filters, transforms, and enriches IoT data before storing it
in a time-series data store for analysis. You can set up the service to collect
only the data you need from your devices, apply mathematical transforms to
process the data, and enrich the data with device-specific metadata such as
device type and location before storing it. Then, you can analyze your data by
running queries using the built-in SQL query engine, or perform more complex
analytics and machine learning inference. AWS IoT Analytics includes pre-built
models for common IoT use cases so you can answer questions like which devices
are about to fail or which customers are at risk of abandoning their wearable
devices.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-11-27",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "iotanalytics",
global?: false,
protocol: "rest-json",
service_id: "IoTAnalytics",
signature_version: "v4",
signing_name: "iotanalytics",
target_prefix: nil
}
end
@doc """
Sends messages to a channel.
"""
def batch_put_message(%Client{} = client, input, options \\ []) do
url_path = "/messages/batch"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Cancels the reprocessing of data through the pipeline.
"""
def cancel_pipeline_reprocessing(
%Client{} = client,
pipeline_name,
reprocessing_id,
input,
options \\ []
) do
url_path =
"/pipelines/#{AWS.Util.encode_uri(pipeline_name)}/reprocessing/#{AWS.Util.encode_uri(reprocessing_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a channel.
A channel collects data from an MQTT topic and archives the raw, unprocessed
messages before publishing the data to a pipeline.
"""
def create_channel(%Client{} = client, input, options \\ []) do
url_path = "/channels"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Creates a dataset.
A dataset stores data retrieved from a data store by applying a `queryAction` (a
SQL query) or a `containerAction` (executing a containerized application). This
operation creates the skeleton of a dataset. The dataset can be populated
manually by calling `CreateDatasetContent` or automatically according to a
trigger you specify.
"""
def create_dataset(%Client{} = client, input, options \\ []) do
url_path = "/datasets"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Creates the content of a data set by applying a `queryAction` (a SQL query) or a
`containerAction` (executing a containerized application).
"""
def create_dataset_content(%Client{} = client, dataset_name, input, options \\ []) do
url_path = "/datasets/#{AWS.Util.encode_uri(dataset_name)}/content"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a data store, which is a repository for messages.
Only data stores that are used to save pipeline data can be configured with
`ParquetConfiguration`.
"""
def create_datastore(%Client{} = client, input, options \\ []) do
url_path = "/datastores"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Creates a pipeline.
A pipeline consumes messages from a channel and allows you to process the
messages before storing them in a data store. You must specify both a `channel`
and a `datastore` activity and, optionally, as many as 23 additional activities
in the `pipelineActivities` array.
"""
def create_pipeline(%Client{} = client, input, options \\ []) do
url_path = "/pipelines"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Deletes the specified channel.
"""
def delete_channel(%Client{} = client, channel_name, input, options \\ []) do
url_path = "/channels/#{AWS.Util.encode_uri(channel_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Deletes the specified dataset.
You do not have to delete the content of the dataset before you perform this
operation.
"""
def delete_dataset(%Client{} = client, dataset_name, input, options \\ []) do
url_path = "/datasets/#{AWS.Util.encode_uri(dataset_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Deletes the content of the specified dataset.
"""
def delete_dataset_content(%Client{} = client, dataset_name, input, options \\ []) do
url_path = "/datasets/#{AWS.Util.encode_uri(dataset_name)}/content"
headers = []
{query_params, input} =
[
{"versionId", "versionId"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Deletes the specified data store.
"""
def delete_datastore(%Client{} = client, datastore_name, input, options \\ []) do
url_path = "/datastores/#{AWS.Util.encode_uri(datastore_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Deletes the specified pipeline.
"""
def delete_pipeline(%Client{} = client, pipeline_name, input, options \\ []) do
url_path = "/pipelines/#{AWS.Util.encode_uri(pipeline_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Retrieves information about a channel.
"""
def describe_channel(%Client{} = client, channel_name, include_statistics \\ nil, options \\ []) do
url_path = "/channels/#{AWS.Util.encode_uri(channel_name)}"
headers = []
query_params = []
query_params =
if !is_nil(include_statistics) do
[{"includeStatistics", include_statistics} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieves information about a dataset.
"""
def describe_dataset(%Client{} = client, dataset_name, options \\ []) do
url_path = "/datasets/#{AWS.Util.encode_uri(dataset_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieves information about a data store.
"""
def describe_datastore(
%Client{} = client,
datastore_name,
include_statistics \\ nil,
options \\ []
) do
url_path = "/datastores/#{AWS.Util.encode_uri(datastore_name)}"
headers = []
query_params = []
query_params =
if !is_nil(include_statistics) do
[{"includeStatistics", include_statistics} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieves the current settings of the AWS IoT Analytics logging options.
"""
def describe_logging_options(%Client{} = client, options \\ []) do
url_path = "/logging"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieves information about a pipeline.
"""
def describe_pipeline(%Client{} = client, pipeline_name, options \\ []) do
url_path = "/pipelines/#{AWS.Util.encode_uri(pipeline_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieves the contents of a data set as presigned URIs.
"""
def get_dataset_content(%Client{} = client, dataset_name, version_id \\ nil, options \\ []) do
url_path = "/datasets/#{AWS.Util.encode_uri(dataset_name)}/content"
headers = []
query_params = []
query_params =
if !is_nil(version_id) do
[{"versionId", version_id} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieves a list of channels.
"""
def list_channels(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/channels"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists information about data set contents that have been created.
"""
def list_dataset_contents(
%Client{} = client,
dataset_name,
max_results \\ nil,
next_token \\ nil,
scheduled_before \\ nil,
scheduled_on_or_after \\ nil,
options \\ []
) do
url_path = "/datasets/#{AWS.Util.encode_uri(dataset_name)}/contents"
headers = []
query_params = []
query_params =
if !is_nil(scheduled_on_or_after) do
[{"scheduledOnOrAfter", scheduled_on_or_after} | query_params]
else
query_params
end
query_params =
if !is_nil(scheduled_before) do
[{"scheduledBefore", scheduled_before} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieves information about data sets.
"""
def list_datasets(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/datasets"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieves a list of data stores.
"""
def list_datastores(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/datastores"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieves a list of pipelines.
"""
def list_pipelines(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/pipelines"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the tags (metadata) that you have assigned to the resource.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags"
headers = []
query_params = []
query_params =
if !is_nil(resource_arn) do
[{"resourceArn", resource_arn} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Sets or updates the AWS IoT Analytics logging options.
If you update the value of any `loggingOptions` field, it takes up to one minute
for the change to take effect. Also, if you change the policy attached to the
role you specified in the `roleArn` field (for example, to correct an invalid
policy), it takes up to five minutes for that change to take effect.
"""
def put_logging_options(%Client{} = client, input, options \\ []) do
url_path = "/logging"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Simulates the results of running a pipeline activity on a message payload.
"""
def run_pipeline_activity(%Client{} = client, input, options \\ []) do
url_path = "/pipelineactivities/run"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Retrieves a sample of messages from the specified channel ingested during the
specified timeframe.
Up to 10 messages can be retrieved.
"""
def sample_channel_data(
%Client{} = client,
channel_name,
end_time \\ nil,
max_messages \\ nil,
start_time \\ nil,
options \\ []
) do
url_path = "/channels/#{AWS.Util.encode_uri(channel_name)}/sample"
headers = []
query_params = []
query_params =
if !is_nil(start_time) do
[{"startTime", start_time} | query_params]
else
query_params
end
query_params =
if !is_nil(max_messages) do
[{"maxMessages", max_messages} | query_params]
else
query_params
end
query_params =
if !is_nil(end_time) do
[{"endTime", end_time} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Starts the reprocessing of raw message data through the pipeline.
"""
def start_pipeline_reprocessing(%Client{} = client, pipeline_name, input, options \\ []) do
url_path = "/pipelines/#{AWS.Util.encode_uri(pipeline_name)}/reprocessing"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Adds to or modifies the tags of the given resource.
Tags are metadata that can be used to manage a resource.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
url_path = "/tags"
headers = []
{query_params, input} =
[
{"resourceArn", "resourceArn"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Removes the given tags (metadata) from the resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
url_path = "/tags"
headers = []
{query_params, input} =
[
{"resourceArn", "resourceArn"},
{"tagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Updates the settings of a channel.
"""
def update_channel(%Client{} = client, channel_name, input, options \\ []) do
url_path = "/channels/#{AWS.Util.encode_uri(channel_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the settings of a data set.
"""
def update_dataset(%Client{} = client, dataset_name, input, options \\ []) do
url_path = "/datasets/#{AWS.Util.encode_uri(dataset_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the settings of a data store.
"""
def update_datastore(%Client{} = client, datastore_name, input, options \\ []) do
url_path = "/datastores/#{AWS.Util.encode_uri(datastore_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the settings of a pipeline.
You must specify both a `channel` and a `datastore` activity and, optionally, as
many as 23 additional activities in the `pipelineActivities` array.
"""
def update_pipeline(%Client{} = client, pipeline_name, input, options \\ []) do
url_path = "/pipelines/#{AWS.Util.encode_uri(pipeline_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
|
lib/aws/generated/iot_analytics.ex
| 0.857291
| 0.701036
|
iot_analytics.ex
|
starcoder
|
defmodule HL7.Codec do
@moduledoc """
Functions that decode and encode HL7 fields, repetitions, components and subcomponents.
Each type of item has a intermediate representation, that will vary depending on whether the `trim`
option was used when decoding or encoding. If we set `trim` to `true`, some trailing optional
items and separators will be omitted from the decoded or encoded result, as we can see in the
following example:
iex> text = "504599^223344&&IIN&^~"
...> decode_field!(text, separators(), trim: true)
{"504599", {"223344", "", "IIN"}}
...> decode_field!(text, separators(), trim: false)
[{"504599", {"223344", "", "IIN", ""}, ""}, ""]
Both representations are correct, given that HL7 allows trailing items that are empty to be
omitted. This causes an ambiguity because the same item can be interpreted in several ways when
it is the first and only item present.
For example, in the following HL7 segment the item in the third field (`504599`) might be the
same in both cases (i.e. the first component of the second field):
1. `AUT||504599^^||||0000190447|^||`
2. `AUT||504599||||0000190447|^||`
But for this module it has two different representations:
1. First component of the second field
2. Second field
To resolve the ambiguity in the HL7 syntax, the code decoding and encoding HL7 segments using the
functions in this module must be aware of this issue and deal with it accordingly when performing
lookups or comparisons.
"""
alias Type
@separators {?|, ?^, ?&, ?~}
@null_value "\"\""
@doc """
Return the default separators used to encode HL7 messages in their compiled
format. These are:
* `|`: field separator
* `^`: component separator
* `&`: subcomponent separator
* `~`: repetition separator
To use custom separators in a message use `HL7.Codec.set_separators/1` and pass the returned
value as argument to the encoding functions.
"""
@spec separators() :: Type.separators()
def separators(), do: @separators
@spec set_separators(Keyword.t()) :: Type.separators()
def set_separators(args) do
field = Keyword.get(args, :field, ?|)
component = Keyword.get(args, :component, ?^)
subcomponent = Keyword.get(args, :subcomponent, ?&)
repetition = Keyword.get(args, :repetition, ?~)
{field, component, subcomponent, repetition}
end
@compile {:inline, separator: 2}
@doc "Return the separator corresponding to an item type."
@spec separator(Type.item_type(), Type.separators()) :: byte
def separator(item_type, separators \\ @separators)
def separator(:field, {char, _, _, _}), do: char
def separator(:component, {_, char, _, _}), do: char
def separator(:subcomponent, {_, _, char, _}), do: char
def separator(:repetition, {_, _, _, char}), do: char
@compile {:inline, match_separator: 2}
@spec match_separator(byte, Type.separators()) :: {:match, Type.item_type()} | :nomatch
def match_separator(char, separators \\ @separators)
def match_separator(char, {char, _, _, _}), do: {:match, :field}
def match_separator(char, {_, char, _, _}), do: {:match, :component}
def match_separator(char, {_, _, char, _}), do: {:match, :subcomponent}
def match_separator(char, {_, _, _, char}), do: {:match, :repetition}
def match_separator(_char, _separators), do: :nomatch
@doc """
Checks if a value is empty. A value is considered empty when it is `nil` or an empty string.
"""
defmacro empty?(value) do
quote do
unquote(value) === "" or unquote(value) === nil
end
end
@doc """
Decode a binary holding an HL7 field into its intermediate representation (IR).
## Examples
iex> decode_field!("PREPAGA^112233^IIN")
{"PREPAGA", "112233", "IIN"}
...> decode_field!("112233~IIN")
["112233", "IIN"]
...> decode_field!("\"\"")
nil
...> decode_field!("")
""
"""
@spec decode_field!(binary, Type.separators(), trim :: boolean) :: Type.field() | no_return
def decode_field!(field, separators \\ @separators, trim \\ true)
def decode_field!("", _separators, _trim), do: ""
def decode_field!(@null_value, _separators, _trim), do: nil
def decode_field!(value, separators, trim) when is_binary(value) do
rep_sep = separator(:repetition, separators)
case :binary.split(value, <<rep_sep>>, split_options(trim)) do
[field] ->
decode_components!(field, separators, trim)
repetitions ->
for repetition <- repetitions do
decode_components!(repetition, separators, trim)
end
end
end
@doc """
Decode a binary holding one or more HL7 components into its intermediate representation.
"""
@spec decode_components!(binary, Type.separators(), trim :: boolean) ::
Type.component() | no_return
def decode_components!(components, separators \\ @separators, trim \\ true)
def decode_components!("", _separators, _trim), do: ""
def decode_components!(@null_value, _separators, _trim), do: nil
def decode_components!(field, separators, trim) do
comp_sep = separator(:component, separators)
case :binary.split(field, <<comp_sep>>, split_options(trim)) do
[component] ->
case decode_subcomponents!(component, separators, trim) do
components when is_tuple(components) ->
{components}
components ->
components
end
components ->
for component <- components do
decode_subcomponents!(component, separators, trim)
end
|> case do
[] -> ""
components -> List.to_tuple(components)
end
end
end
@doc """
Decode a binary holding one or more HL7 subcomponents into its intermediate representation.
"""
@spec decode_subcomponents!(binary, Type.separators(), trim :: boolean) ::
Type.subcomponent() | no_return
def decode_subcomponents!(component, separators \\ @separators, trim \\ true)
def decode_subcomponents!("", _separators, _trim), do: ""
def decode_subcomponents!(@null_value, _separators, _trim), do: nil
def decode_subcomponents!(component, separators, trim) do
subcomp_sep = separator(:subcomponent, separators)
case :binary.split(component, <<subcomp_sep>>, split_options(trim)) do
[subcomponent] ->
subcomponent
subcomponents ->
subcomponents
|> Enum.map(&decode_value!(&1))
|> case do
[] -> ""
subcomponents -> List.to_tuple(subcomponents)
end
end
end
@spec decode_value!(Type.field(), Type.value_type()) :: Type.value() | nil | no_return
def decode_value!(value, type \\ :string)
def decode_value!(@null_value, _type), do: nil
def decode_value!(value, type)
when type === :string or
(value === "" and
(type === :integer or type === :float or type === :date or type === :datetime)) do
# Empty fields have to be passed to the composite field module to insert the corresponding
# struct in the corresponding field.
value
end
def decode_value!(value, :integer), do: :erlang.binary_to_integer(value)
def decode_value!(value, :float), do: binary_to_float!(value)
def decode_value!(value, :date), do: binary_to_date!(value)
def decode_value!(value, :datetime), do: binary_to_datetime!(value)
def decode_value!(value, type) do
raise ArgumentError, "cannot decode value #{inspect(value)} with type #{inspect(type)}"
end
defp binary_to_float!(value) do
value |> Float.parse() |> elem(0)
end
defp binary_to_date!(
<<y::binary-size(4), m::binary-size(2), d::binary-size(2), _rest::binary>> = value
) do
year = :erlang.binary_to_integer(y)
month = :erlang.binary_to_integer(m)
day = :erlang.binary_to_integer(d)
case Date.new(year, month, day) do
{:ok, date} -> date
{:error, _reason} -> raise ArgumentError, "invalid date: #{value}"
end
end
defp binary_to_date!(value) do
raise ArgumentError, "invalid date: #{value}"
end
defp binary_to_datetime!(value) do
~r/^(?<year>(?:19|20)[0-9]{2})(?:(?<month>1[0-2]|0[1-9])(?:(?<day>3[0-1]|[1-2][0-9]|0[1-9])(?:(?<hour>[0-1][0-9]|2[0-3])(?:(?<minute>[0-5][0-9])(?:(?<second>[0-5][0-9](?:\.[0-9]{1,4})?)?)?)?)?)?)?(?<offset>[+-](?:[0-1][0-9]|2[0-3])[0-5][0-9])?/
|> Regex.named_captures(value)
|> Enum.filter(fn({_k, v}) -> v != "" end)
|> Enum.into(%{})
|> map_to_datetime()
end
# If offset is present then create DateTime otherwise create NaiveDateTime
defp map_to_datetime(%{"offset" => offset} = map) do
year = Map.get(map, "year", "")
month = Map.get(map, "month", "00")
day = Map.get(map, "day", "00")
hour = Map.get(map, "hour", "00")
min = Map.get(map, "minute", "00")
sec = Map.get(map, "second", "00")
time_st = Enum.join([hour, min, sec], ":")
# format as iso8601 string and then convert to DateTime
"#{year}-#{month}-#{day}T#{time_st}#{offset}"
|> DateTime.from_iso8601()
|> case do
{:ok, datetime, _} -> datetime
{:error, _reason} -> raise ArgumentError, "invalid datetime: #{inspect map}"
end
end
defp map_to_datetime(map) do
map =
map
|> Enum.map(fn({k, v}) -> {k, String.to_integer(v)} end)
|> Enum.into(%{})
year = Map.get(map, "year")
month = Map.get(map, "month", 0)
day = Map.get(map, "day", 0)
hour = Map.get(map, "hour", 0)
min = Map.get(map, "minute", 0)
sec = Map.get(map, "second", 0)
case NaiveDateTime.new(year, month, day, hour, min, sec) do
{:ok, datetime} -> datetime
{:error, _reason} -> raise ArgumentError, "invalid datetime: #{inspect map}"
end
end
@spec encode_field!(Type.field(), Type.separators(), trim :: boolean) :: iodata | no_return
def encode_field!(field, separators \\ @separators, trim \\ true)
def encode_field!(field, _separators, _trim) when is_binary(field), do: field
def encode_field!(nil, _separators, _trim), do: @null_value
def encode_field!(repetitions, separators, trim) when is_list(repetitions),
do: encode_repetitions!(repetitions, separators, trim, [])
def encode_field!(components, separators, trim) when is_tuple(components),
do: encode_components!(components, separators, trim)
defp encode_repetitions!([repetition | tail], separators, trim, acc)
when not is_list(repetition) do
value = encode_field!(repetition, separators, trim)
acc =
case acc do
[] -> [value]
[_ | _] -> [value, separator(:repetition, separators) | acc]
end
encode_repetitions!(tail, separators, trim, acc)
end
defp encode_repetitions!([], separators, trim, acc) do
acc
|> maybe_trim_item(separator(:repetition, separators), trim)
|> Enum.reverse()
end
@spec encode_components!(Type.component(), Type.separators(), trim :: boolean) ::
iodata | no_return
def encode_components!(components, separators \\ @separators, trim \\ true) do
subencoder = &encode_subcomponents!(&1, separators, trim)
encode_subitems(components, subencoder, separator(:component, separators), trim)
end
@spec encode_subcomponents!(Type.subcomponent(), Type.separators(), trim :: boolean) ::
iodata | no_return
def encode_subcomponents!(subcomponents, separators \\ @separators, trim \\ true) do
encode_subitems(subcomponents, &encode_value!/1, separator(:subcomponent, separators), trim)
end
defp encode_subitems(item, _subencoder, _separator, _trim) when is_binary(item), do: item
defp encode_subitems(nil, _subencoder, _separator, _trim), do: @null_value
defp encode_subitems(items, subencoder, separator, trim) when is_tuple(items),
do:
_encode_subitems(
items,
subencoder,
separator,
trim,
non_empty_tuple_size(items, trim),
0,
[]
)
defp _encode_subitems(items, subencoder, separator, trim, size, index, acc) when index < size do
value = subencoder.(elem(items, index))
acc =
case acc do
[] -> [value]
[_ | _] -> [value, separator | acc]
end
_encode_subitems(items, subencoder, separator, trim, size, index + 1, acc)
end
defp _encode_subitems(_items, _subencoder, separator, trim, _size, _index, acc) do
acc
|> maybe_trim_item(separator, trim)
|> Enum.reverse()
end
@spec encode_value!(Type.value() | nil, Type.value_type() | nil) :: binary | no_return
def encode_value!(value, type \\ :string)
def encode_value!(nil, _type), do: @null_value
def encode_value!(value, type) when type === :string or value === "", do: value
def encode_value!(value, :integer) when is_integer(value), do: :erlang.integer_to_binary(value)
def encode_value!(value, :float) when is_float(value), do: Float.to_string(value)
def encode_value!(value, :date) when is_map(value), do: format_date!(value)
def encode_value!(value, :datetime) when is_map(value), do: format_datetime(value)
def encode_value!(value, type) do
raise ArgumentError, "cannot encode value #{inspect(value)} with type #{inspect(type)}"
end
def format_date!(%Date{year: year, month: month, day: day}) do
format_date(year, month, day)
end
def format_date!(%NaiveDateTime{year: year, month: month, day: day}) do
format_date(year, month, day)
end
def format_date!(%DateTime{year: year, month: month, day: day}) do
format_date(year, month, day)
end
def format_date!(date) do
raise ArgumentError, "invalid date: #{inspect(date)}"
end
defp format_date(year, month, day) do
yyyy = zpad(year, 4)
mm = zpad(month, 2)
dd = zpad(day, 2)
<<yyyy::binary, mm::binary, dd::binary>>
end
def format_datetime(%NaiveDateTime{
year: year,
month: month,
day: day,
hour: hour,
minute: min,
second: sec
}) do
format_datetime(year, month, day, hour, min, sec)
end
def format_datetime(%DateTime{} = datetime) do
datetime
|> DateTime.to_iso8601()
|> String.replace("-", "")
|> String.replace(":", "")
|> String.replace("T", "")
|> String.replace("Z", "+0000")
end
def format_datetime(%Date{year: year, month: month, day: day}) do
format_datetime(year, month, day, 0, 0, 0)
end
def format_datetime(datetime) do
raise ArgumentError, "invalid datetime #{inspect(datetime)}"
end
defp format_datetime(year, month, day, hour, min, sec) do
yyyy = zpad(year, 4)
m = zpad(month, 2)
dd = zpad(day, 2)
hh = zpad(hour, 2)
mm = zpad(min, 2)
if sec === 0 do
<<yyyy::binary, m::binary, dd::binary, hh::binary, mm::binary>>
else
ss = zpad(sec, 2)
<<yyyy::binary, m::binary, dd::binary, hh::binary, mm::binary, ss::binary>>
end
end
defp zpad(value, length) do
value
|> Integer.to_string()
|> String.pad_leading(length, "0")
end
@doc """
Escape a string that may contain separators using the HL7 escaping rules.
## Arguments
* `value`: a string to escape; it may or may not contain separator
characters.
* `separators`: a tuple containing the item separators to be used when
generating the message as returned by `HL7.Codec.set_separators/1`.
Defaults to `HL7.Codec.separators`.
* `escape_char`: character to be used as escape delimiter. Defaults to `?\\\\ `.
## Examples
iex> escape("ABCDEF")
"ABCDEF"
...> escape("ABC|DEF^GHI", separators: separators(), escape_char: ?\\\\)
"ABC\\\\F\\\\DEF\\\\S\\\\GHI"
"""
@spec escape(binary, Type.separators(), escape_char :: byte) :: binary
def escape(value, separators \\ @separators, escape_char \\ ?\\)
when is_binary(value) and is_tuple(separators) and is_integer(escape_char) do
escape_no_copy(value, separators, escape_char, byte_size(value), 0)
end
defp escape_no_copy(value, separators, escape_char, size, index) when index < size do
# As strings that need to be escaped are fairly rare, we try to avoid generating unnecessary
# garbage by not copying the characters in the string unless the string has to be escaped.
<<head::binary-size(index), char, rest::binary>> = value
case match_separator(char, separators) do
{:match, item_type} ->
acc = escape_acc(item_type, escape_char, head)
escape_copy(rest, separators, escape_char, acc)
:nomatch when char === escape_char ->
acc = escape_acc(:escape, escape_char, head)
escape_copy(rest, separators, escape_char, acc)
:nomatch ->
escape_no_copy(value, separators, escape_char, size, index + 1)
end
end
defp escape_no_copy(value, _separators, _escape_char, _size, _index) do
value
end
defp escape_copy(<<char, rest::binary>>, separators, escape_char, acc) do
acc =
case match_separator(char, separators) do
{:match, item_type} -> escape_acc(item_type, escape_char, acc)
:nomatch when char === escape_char -> escape_acc(:escape, escape_char, acc)
:nomatch -> <<acc::binary, char>>
end
escape_copy(rest, separators, escape_char, acc)
end
defp escape_copy(<<>>, _separators, _escape_char, acc) do
acc
end
defp escape_acc(item_type, escape_char, acc) do
char = escape_delimiter_type(item_type)
<<acc::binary, escape_char, char, escape_char>>
end
@compile {:inline, escape_delimiter_type: 1}
defp escape_delimiter_type(:field), do: ?F
defp escape_delimiter_type(:component), do: ?S
defp escape_delimiter_type(:subcomponent), do: ?T
defp escape_delimiter_type(:repetition), do: ?R
defp escape_delimiter_type(:escape), do: ?E
@doc """
Convert an escaped string into its original value.
## Arguments
* `value`: a string to unescape; it may or may not contain escaped characters.
* `escape_char`: character that was used as escape delimiter. Defaults to `?\\\\ `.
## Examples
iex> "ABCDEF" = HL7.unescape("ABCDEF")
iex> "ABC|DEF|GHI" = HL7.Codec.unescape("ABC\\\\F\\\\DEF\\\\F\\\\GHI", ?\\\\)
"""
@spec unescape(binary, Type.separators(), escape_char :: byte) :: binary
def unescape(value, separators \\ @separators, escape_char \\ ?\\)
when is_binary(value) and is_tuple(separators) and is_integer(escape_char) do
unescape_no_copy(value, separators, escape_char, byte_size(value), 0)
end
defp unescape_no_copy(value, separators, escape_char, size, index) when index < size do
# As strings that need to be unescaped are fairly rare, we try to avoid generating unnecessary
# garbage by not copying the characters in the string unless the string has to be unescaped.
case value do
<<head::binary-size(index), ^escape_char, char, ^escape_char, rest::binary>> ->
char = unescape_delimiter(char, separators, escape_char)
unescape_copy(rest, separators, escape_char, <<head::binary, char>>)
_ ->
unescape_no_copy(value, separators, escape_char, size, index + 1)
end
end
defp unescape_no_copy(value, _separators, _escape_char, _size, _index) do
value
end
defp unescape_copy(value, separators, escape_char, acc) do
case value do
<<^escape_char, char, ^escape_char, rest::binary>> ->
char = unescape_delimiter(char, separators, escape_char)
unescape_copy(rest, separators, escape_char, <<acc::binary, char>>)
<<char, rest::binary>> ->
unescape_copy(rest, separators, escape_char, <<acc::binary, char>>)
<<>> ->
acc
end
end
defp unescape_delimiter(?F, separators, _escape_char), do: separator(:field, separators)
defp unescape_delimiter(?S, separators, _escape_char), do: separator(:component, separators)
defp unescape_delimiter(?T, separators, _escape_char), do: separator(:subcomponent, separators)
defp unescape_delimiter(?R, separators, _escape_char), do: separator(:repetition, separators)
defp unescape_delimiter(?E, _separators, escape_char), do: escape_char
defp split_options(true), do: [:global, :trim]
defp split_options(false), do: [:global]
defp non_empty_tuple_size(tuple, false), do: tuple_size(tuple)
defp non_empty_tuple_size(tuple, _trim), do: _non_empty_tuple_size(tuple, tuple_size(tuple))
defp _non_empty_tuple_size(tuple, size) when size > 1 do
case :erlang.element(size, tuple) do
"" -> _non_empty_tuple_size(tuple, size - 1)
_ -> size
end
end
defp _non_empty_tuple_size(_tuple, size) do
size
end
defp maybe_trim_item(data, char, true), do: trim_item(data, char)
defp maybe_trim_item(data, _char, false), do: data
defp trim_item([value | tail], separator)
when value === separator or value === "" or value === [] do
trim_item(tail, separator)
end
defp trim_item(data, _separator), do: data
end
|
lib/ex_hl7/codec.ex
| 0.950629
| 0.629148
|
codec.ex
|
starcoder
|
defmodule Oban.Job do
@moduledoc """
A Job is an Ecto schema used for asynchronous execution.
Job changesets are created by your application code and inserted into the database for
asynchronous execution. Jobs can be inserted along with other application data as part of a
transaction, which guarantees that jobs will only be triggered from a successful transaction.
"""
use Ecto.Schema
import Ecto.Changeset
@type args :: map()
@type errors :: [%{at: DateTime.t(), attempt: pos_integer(), error: binary()}]
@type option ::
{:queue, atom() | binary()}
| {:worker, atom() | binary()}
| {:args, args()}
| {:max_attempts, pos_integer()}
| {:scheduled_at, DateTime.t()}
| {:scheduled_in, pos_integer()}
@type t :: %__MODULE__{
id: pos_integer(),
state: binary(),
queue: binary(),
worker: binary(),
args: args(),
errors: errors(),
attempt: non_neg_integer(),
max_attempts: pos_integer(),
inserted_at: DateTime.t(),
scheduled_at: DateTime.t(),
attempted_at: DateTime.t(),
completed_at: DateTime.t()
}
schema "oban_jobs" do
field :state, :string, default: "available"
field :queue, :string, default: "default"
field :worker, :string
field :args, :map
field :errors, {:array, :map}, default: []
field :attempt, :integer, default: 0
field :max_attempts, :integer, default: 20
field :attempted_at, :utc_datetime_usec
field :completed_at, :utc_datetime_usec
field :inserted_at, :utc_datetime_usec
field :scheduled_at, :utc_datetime_usec
end
@permitted ~w(
args
attempt
attempted_at
completed_at
errors
inserted_at
max_attempts
queue
scheduled_at
state
worker
)a
@required ~w(worker args)a
@doc """
Construct a new job changeset ready for insertion into the database.
## Options
* `:max_attempts` — the maximum number of times a job can be retried if there are errors during execution
* `:queue` — a named queue to push the job into. Jobs may be pushed into any queue, regardless
of whether jobs are currently being processed for the queue.
* `:scheduled_in` - the number of seconds until the job should be executed
* `:scheduled_at` - a time in the future after which the job should be executed
* `:worker` — a module to execute the job in. The module must implement the `Oban.Worker`
behaviour.
## Examples
Insert a job with the `:default` queue:
%{id: 1, user_id: 2}
|> Oban.Job.new(queue: :default, worker: MyApp.Worker)
|> MyApp.Repo.insert()
Generate a pre-configured job for `MyApp.Worker` and push it:
%{id: 1, user_id: 2} |> MyApp.Worker.new() |> MyApp.Repo.insert()
Schedule a job to run in 5 seconds:
%{id: 1} |> MyApp.Worker.new(scheduled_in: 5) |> MyApp.Repo.insert()
"""
@spec new(args(), [option]) :: Ecto.Changeset.t()
def new(args, opts \\ []) when is_map(args) and is_list(opts) do
params =
opts
|> Keyword.put(:args, args)
|> Map.new()
|> coerce_field(:queue)
|> coerce_field(:worker)
|> coerce_scheduling()
%__MODULE__{}
|> cast(params, @permitted)
|> validate_required(@required)
|> validate_number(:max_attempts, greater_than: 0, less_than: 50)
end
defp coerce_field(params, field) do
case Map.get(params, field) do
value when is_atom(value) and not is_nil(value) ->
update_in(params, [field], &to_clean_string/1)
_ ->
params
end
end
defp coerce_scheduling(%{scheduled_in: in_seconds} = params) when is_integer(in_seconds) do
scheduled_at = NaiveDateTime.add(NaiveDateTime.utc_now(), in_seconds)
params
|> Map.delete(:in)
|> Map.put(:scheduled_at, scheduled_at)
|> Map.put(:state, "scheduled")
end
defp coerce_scheduling(params), do: params
defp to_clean_string(atom) do
atom
|> Atom.to_string()
|> String.trim_leading("Elixir.")
end
end
|
lib/oban/job.ex
| 0.835047
| 0.433981
|
job.ex
|
starcoder
|
defmodule Jwt.Plugs.FilterClaims do
import Plug.Conn
require Logger
def init(opts), do: opts
def call(conn, []), do: send_401(conn)
def call(conn, opts) do
filter_claims(conn.assigns[:jwtclaims], conn, opts)
end
defp filter_claims([], conn, _), do: send_401(conn)
defp filter_claims(nil, conn, _), do: send_401(conn)
defp filter_claims(claims, conn, filters) do
evaluate_all_filters(claims, filters)
|> all_filters_pass?
|> deliver_filters_result(conn)
end
defp all_filters_pass?(evaluated_filters), do: Enum.all?(evaluated_filters, fn(filter_passed) -> filter_passed end)
defp deliver_filters_result(true, conn), do: Plug.Conn.assign(conn, :jwtfilterclaims, {:ok})
defp deliver_filters_result(false, conn), do: send_401(conn)
defp evaluate_all_filters(claims, filters) do
Enum.map(filters, fn(filter) -> evaluate_single_filter(claims, filter) end)
end
defp evaluate_single_filter(claims, filter) do
{_key_in_claims, claims_value_for_filter} = find_first_instance_of_filter_in_claims(claims, filter)
{_key_in_filter, filter_regex} = Enum.at(Map.to_list(filter), 0)
Logger.debug("Filter value: #{inspect claims_value_for_filter} with regex: #{inspect filter_regex}")
claims_value_for_filter
|> case do
:notfound -> false
_ -> Enum.map(as_list(filter_regex), fn(regex) -> Regex.match?(Regex.compile!(regex), claims_value_for_filter) end)
|> Enum.any?(fn(matched) -> matched end)
end
end
defp find_first_instance_of_filter_in_claims(claims, filter) do
matching_claim_list = Map.to_list(Map.take(claims, Map.keys(filter)))
matching_claim_list
|> Enum.count
|> case do
0 -> {:error, :notfound}
_ -> hd(matching_claim_list)
end
end
defp as_list(filter) do
is_list(filter)
|> case do
true -> filter
false -> [filter]
end
end
defp send_401(conn) do
conn
|> send_resp(401, "")
|> halt
end
end
|
lib/plugfilterclaims.ex
| 0.507324
| 0.409634
|
plugfilterclaims.ex
|
starcoder
|
defmodule Openmaize.OnetimePass do
@moduledoc """
Module to handle one-time passwords for use in two factor authentication.
`Openmaize.OnetimePass` checks the one-time password, and returns an
`openmaize_user` message (the user model) if the one-time password is
correct or an `openmaize_error` message if there is an error.
After this function has been called, you need to add the user to the
session, by running `put_session(conn, :user_id, id)`, or send an API
token to the user.
## Options
There are two options related to the database - in most cases you
will not need to change the `repo` and `user_model` options:
* repo - the name of the repo
* the default is MyApp.Repo - using the name of the project
* user_model - the name of the user model
* the default is MyApp.User - using the name of the project
There are also the following options for the one-time passwords:
* HMAC-based one-time passwords
* token_length - the length of the one-time password
* the default is 6
* last - the count when the one-time password was last used
* this count needs to be stored server-side
* window - the number of future attempts allowed
* the default is 3
* Time-based one-time passwords
* token_length - the length of the one-time password
* the default is 6
* interval_length - the length of each timed interval
* the default is 30 (seconds)
* window - the number of attempts, before and after the current one, allowed
* the default is 1 (1 interval before and 1 interval after)
See the documentation for the Comeonin.Otp module for more details
about generating and verifying one-time passwords.
## Examples
Add the following line to your controller to call OnetimePass with the
default values:
plug Openmaize.OnetimePass when action in [:login_twofa]
And to set the token length to 8 characters:
plug Openmaize.OnetimePass, [token_length: 8] when action in [:login_twofa]
"""
@behaviour Plug
import Plug.Conn
alias Comeonin.Otp
alias Openmaize.Database, as: DB
alias Openmaize.{Config, Log}
@doc false
def init(opts) do
{Keyword.get(opts, :repo, Openmaize.Utils.default_repo),
Keyword.get(opts, :user_model, Openmaize.Utils.default_user_model),
opts}
end
@doc false
def call(%Plug.Conn{params: %{"user" => %{"id" => id, "hotp" => hotp}}} = conn,
{repo, user_model, opts}) do
{:ok, result} = repo.transaction(fn ->
DB.get_user_with_lock(repo, user_model, id)
|> check_hotp(hotp, opts)
|> DB.update_otp(repo)
end)
handle_auth(result, conn, id)
end
def call(%Plug.Conn{params: %{"user" => %{"id" => id, "totp" => totp}}} = conn,
{repo, user_model, opts}) do
repo.get(user_model, id)
|> check_totp(totp, opts)
|> DB.update_otp(repo)
|> handle_auth(conn, id)
end
defp check_hotp(user, hotp, opts) do
{user, Otp.check_hotp(hotp, user.otp_secret, [last: user.otp_last] ++ opts)}
end
defp check_totp(user, totp, opts) do
{user, Otp.check_totp(totp, user.otp_secret, opts)}
end
defp handle_auth({:error, message}, conn, user_id) do
Log.log(:warn, Config.log_level, conn.request_path,
%Log{user: user_id, message: message})
put_private(conn, :openmaize_error, "Invalid credentials")
end
defp handle_auth(user, conn, user_id) do
Log.log(:info, Config.log_level, conn.request_path,
%Log{user: user_id,
message: "successful one-time password login"})
put_private(conn, :openmaize_user, Map.drop(user, Config.drop_user_keys))
end
end
|
lib/openmaize/onetime_pass.ex
| 0.772531
| 0.601096
|
onetime_pass.ex
|
starcoder
|
defmodule Norm.Core.Selection do
@moduledoc false
# Provides the definition for selections
defstruct required: [], schema: nil
alias Norm.Core.Schema
alias Norm.SpecError
def new(schema, selectors) do
# We're going to front load some work so that we can ensure that people are
# requiring keys that actually exist in the schema and so that we can make
# it easier to conform in the future.
# select(schema, path, %{})
case selectors do
:all ->
selectors = build_all_selectors(schema)
select(selectors, schema)
_ ->
validate_selectors!(selectors)
select(selectors, schema)
end
end
def select(selectors, schema, required \\ [])
def select([], schema, required), do: %__MODULE__{schema: schema, required: required}
def select([selector | rest], schema, required) do
case selector do
{key, inner_keys} ->
inner_schema = assert_spec!(schema, key)
selection = select(inner_keys, inner_schema)
select(rest, schema, [{key, selection} | required])
key ->
_ = assert_spec!(schema, key)
select(rest, schema, [key | required])
end
end
defp build_all_selectors(schema) do
schema.specs
|> Enum.map(fn
{name, %Schema{}=inner_schema} -> {name, build_all_selectors(inner_schema)}
{name, _} -> name
end)
end
defp validate_selectors!([]), do: true
defp validate_selectors!([{_key, inner} | rest]), do: validate_selectors!(inner) and validate_selectors!(rest)
defp validate_selectors!([_key | rest]), do: validate_selectors!(rest)
defp validate_selectors!(other), do: raise ArgumentError, "select expects a list of keys but received: #{inspect other}"
defp assert_spec!(%Schema{}=schema, key) do
case Schema.spec(schema, key) do
nil -> raise SpecError, {:selection, key, schema}
spec -> spec
end
end
defp assert_spec!(%__MODULE__{}, _key) do
# In the future we might support this and allow users to overwrite internal
# selections. But for now its safer to forbid this.
raise ArgumentError, """
Attempting to specify a selection on top of another selection is
not allowed.
"""
end
defp assert_spec!(other, _key) do
raise ArgumentError, "Expected a schema and got: #{inspect other}"
end
defimpl Norm.Conformer.Conformable do
alias Norm.Conformer
alias Norm.Conformer.Conformable
def conform(_, input, path) when not is_map(input) do
{:error, [Conformer.error(path, input, "not a map")]}
end
def conform(%{required: required, schema: schema}, input, path) do
case Conformable.conform(schema, input, path) do
{:ok, conformed} ->
errors = ensure_keys(required, conformed, path, [])
if Enum.any?(errors) do
{:error, errors}
else
{:ok, conformed}
end
{:error, conforming_errors} ->
errors = ensure_keys(required, input, path, [])
{:error, conforming_errors ++ errors}
end
end
defp ensure_keys([], _conformed, _path, errors), do: errors
defp ensure_keys([{key, inner} | rest], conformed, path, errors) do
case ensure_key(key, conformed, path) do
:ok ->
inner_value = Map.get(conformed, key)
inner_errors = ensure_keys(inner.required, inner_value, path ++ [key], [])
ensure_keys(rest, conformed, path, errors ++ inner_errors)
error ->
ensure_keys(rest, conformed, path, [error | errors])
end
end
defp ensure_keys([key | rest], conformed, path, errors) do
case ensure_key(key, conformed, path) do
:ok ->
ensure_keys(rest, conformed, path, errors)
error ->
ensure_keys(rest, conformed, path, [error | errors])
end
end
defp ensure_key(_key, conformed, _path) when not is_map(conformed), do: :ok
defp ensure_key(key, conformed, path) do
if Map.has_key?(conformed, key) do
:ok
else
Conformer.error(path ++ [key], conformed, ":required")
end
end
end
if Code.ensure_loaded?(StreamData) do
defimpl Norm.Generatable do
alias Norm.Generatable
# In order to build a semantically meaningful selection we need to generate
# all of the specified fields as well as the fields from the underlying
# schema. We can then merge both of those maps together with the required
# fields taking precedence.
def gen(%{required: required, schema: schema}) do
case Enum.reduce(required, %{}, & to_gen(&1, schema, &2)) do
{:error, error} ->
{:error, error}
gen ->
{:ok, StreamData.fixed_map(gen)}
end
end
defp to_gen(_, _schema, {:error, error}), do: {:error, error}
# If we're here than we're processing a key with an inner selection.
defp to_gen({key, selection}, _schema, generator) do
case Generatable.gen(selection) do
{:ok, g} ->
Map.put(generator, key, g)
{:error, error} ->
{:error, error}
end
end
defp to_gen(key, schema, generator) do
# Its safe to just get the spec because at this point we *know* that the
# keys that have been selected are in the schema.
with {:ok, g} <- Generatable.gen(Norm.Core.Schema.spec(schema, key)) do
Map.put(generator, key, g)
end
end
end
end
defimpl Inspect do
import Inspect.Algebra
def inspect(selection, opts) do
map = %{
schema: selection.schema,
required: selection.required
}
concat(["#Norm.Selection<", to_doc(map, opts), ">"])
end
end
end
|
lib/norm/core/selection.ex
| 0.767385
| 0.471588
|
selection.ex
|
starcoder
|
defmodule Xandra.Cluster do
@moduledoc """
Connection to a Cassandra cluster.
This module is a "proxy" connection with support for connecting to multiple
nodes in a Cassandra cluster and executing queries on such nodes based on a
given *strategy*.
## Usage
This module manages connections to different nodes in a Cassandra cluster.
Each connection to a node is a `Xandra` connection (so it can also be
a pool of connections). When a `Xandra.Cluster` connection is started,
one `Xandra` pool of connections will be started for each node specified
in the `:nodes` option plus for autodiscovered nodes if the `:autodiscovery`
option is `true`.
The API provided by this module mirrors the API provided by the `Xandra`
module. Queries executed through this module will be "routed" to nodes
in the provided list of nodes based on a strategy. See the
"Load balancing strategies" section below
Note that regardless of the underlying pool, `Xandra.Cluster` will establish
one extra connection to each node in the specified list of `:nodes` (used for
internal purposes).
Here is an example of how one could use `Xandra.Cluster` to connect to
multiple nodes:
Xandra.Cluster.start_link(
nodes: ["cassandra1.example.net", "cassandra2.example.net"],
pool_size: 10,
)
The code above will establish a pool of ten connections to each of the nodes
specified in `:nodes`, for a total of twenty connections going out of the
current machine, plus two extra connections (one per node) used for internal
purposes.
## Autodiscovery
When the `:autodiscovery` option is `true` (which is the default),
`Xandra.Cluster` discovers nodes in the same cluster as the nodes
specified in the `:nodes` option. The nodes in `:nodes` act as "seed"
nodes. When nodes in the cluster are discovered, a `Xandra` pool of
connections is started for each node that is in the **same datacenter**
as one of the nodes in `:nodes`. For now, there is no limit on how many
nodes in the same datacenter `Xandra.Cluster` discovers and connects to.
As mentioned before, a "control connection" for internal purposes is established
to each node in `:nodes`. These control connections are *not* established for
autodiscovered nodes. This means that if you only have one seed node in `:nodes`,
there will only be one control connection: if that control connection goes down
for some reason, you won't receive cluster change events anymore. This will cause
disconnections but will not technically break anything.
## Load balancing strategies
For now, there are two load balancing "strategies" implemented:
* `:random` - it will choose one of the connected nodes at random and
execute the query on that node.
* `:priority` - it will choose a node to execute the query according
to the order nodes appear in `:nodes`. Not supported when `:autodiscovery`
is `true`.
## Disconnections and reconnections
`Xandra.Cluster` also supports nodes disconnecting and reconnecting: if Xandra
detects one of the nodes in `:nodes` going down, it will not execute queries
against it anymore, but will start executing queries on it as soon as it
detects such node is back up.
If all specified nodes happen to be down when a query is executed, a
`Xandra.ConnectionError` with reason `{:cluster, :not_connected}` will be
returned.
"""
use GenServer
alias Xandra.Cluster.{ControlConnection, StatusChange, TopologyChange}
alias Xandra.{Batch, ConnectionError, Prepared, Protocol, RetryStrategy}
require Logger
@type cluster :: GenServer.server()
@default_load_balancing :random
@default_port 9042
@default_start_options [
nodes: ["127.0.0.1"],
idle_interval: 30_000,
protocol_version: :v3,
autodiscovery: true,
autodiscovered_nodes_port: @default_port
]
defstruct [
:options,
:node_refs,
:load_balancing,
:autodiscovery,
:autodiscovered_nodes_port,
:pool_supervisor,
pools: %{}
]
@doc """
Starts a cluster connection.
Note that a cluster connection starts an additional connection for each
node specified in `:nodes`. Such "control connection" is used for monitoring
cluster updates.
## Options
This function accepts all options accepted by `Xandra.start_link/1` and
and forwards them to each connection or pool of connections. The following
options are specific to this function:
* `:load_balancing` - (atom) load balancing "strategy". Either `:random`
or `:priority`. See the "Load balancing strategies" section in the module
documentation. If `:autodiscovery` is `true`, the only supported strategy
is `:random`. Defaults to `:random`.
* `:nodes` - (list of strings) a list of nodes to use as seed nodes
when setting up the cluster. The behaviour of this option depends on
the `:autodiscovery` option. See the "Autodiscovery" section below.
If the `:autodiscovery` option is `false`, the cluster only connects
to the nodes in `:nodes` and sets up one additional control connection
for each one of these nodes. Defaults to `["127.0.0.1"]`.
* `:autodiscovery` - (boolean) whether to autodiscover nodes in the
cluster. See the "Autodiscovery" section in the module documentation.
Defaults to `true`.
* `:autodiscovered_nodes_port` - (integer) the port to use when connecting
to autodiscovered nodes. Cassandra does not advertise the port of nodes
when discovering them, so you'll need to specify one explicitly. This might
get fixed in future Cassandra versions. Defaults to `9042`.
## Examples
Starting a cluster connection to two specific nodes in the cluster:
{:ok, cluster} =
Xandra.Cluster.start_link(
nodes: ["cassandra1.example.net", "cassandra2.example.net"],
autodiscovery: false
)
Starting a pool of five connections to nodes in the same cluster as the given
"seed" node:
{:ok, cluster} =
Xandra.Cluster.start_link(
autodiscovery: true,
nodes: ["cassandra-seed.example.net"]
pool_size: 5
)
Passing options down to each connection:
{:ok, cluster} =
Xandra.Cluster.start_link(
nodes: ["cassandra.example.net"],
after_connect: &Xandra.execute!(&1, "USE my_keyspace")
)
"""
@spec start_link([Xandra.start_option() | {:load_balancing, atom}]) :: GenServer.on_start()
def start_link(options) do
options = Keyword.merge(@default_start_options, options)
# We don't pop the :protocol_version option because we want to
# also forward it to the Xandra connections.
options =
Keyword.put(
options,
:protocol_module,
protocol_version_to_module(options[:protocol_version])
)
{load_balancing, options} = Keyword.pop(options, :load_balancing, @default_load_balancing)
{nodes, options} = Keyword.pop(options, :nodes)
{autodiscovery?, options} = Keyword.pop(options, :autodiscovery)
{autodiscovered_nodes_port, options} = Keyword.pop(options, :autodiscovered_nodes_port)
{name, options} = Keyword.pop(options, :name)
if autodiscovery? and load_balancing == :priority do
raise ArgumentError,
"the :priority load balancing strategy is only supported when :autodiscovery is false"
end
state = %__MODULE__{
options: Keyword.delete(options, :pool),
load_balancing: load_balancing,
autodiscovery: autodiscovery?,
autodiscovered_nodes_port: autodiscovered_nodes_port
}
nodes = Enum.map(nodes, &parse_node/1)
GenServer.start_link(__MODULE__, {state, nodes}, name: name)
end
# Used internally by Xandra.Cluster.ControlConnection.
@doc false
def activate(cluster, node_ref, address, port) do
GenServer.cast(cluster, {:activate, node_ref, address, port})
end
# Used internally by Xandra.Cluster.ControlConnection.
@doc false
def update(cluster, status_change) do
GenServer.cast(cluster, {:update, status_change})
end
# Used internally by Xandra.Cluster.ControlConnection.
@doc false
def discovered_peers(cluster, peers) do
GenServer.cast(cluster, {:discovered_peers, peers})
end
@doc """
Returns a stream of pages.
When streaming pages through a cluster, the streaming is done
from a single node, that is, this function just calls out to
`Xandra.stream_pages!/4` after choosing a node appropriately.
All options are forwarded to `Xandra.stream_pages!/4`, including
retrying options.
"""
@spec stream_pages!(
cluster,
Xandra.statement() | Xandra.Prepared.t(),
Xandra.values(),
keyword
) ::
Enumerable.t()
def stream_pages!(cluster, query, params, options \\ []) do
with_conn(cluster, &Xandra.stream_pages!(&1, query, params, options))
end
@doc """
Same as `Xandra.prepare/3`.
Preparing a query through `Xandra.Cluster` will prepare it only on one node,
according to the load balancing strategy chosen in `start_link/1`. To prepare
and execute a query on the same node, you could use `run/3`:
Xandra.Cluster.run(cluster, fn conn ->
# "conn" is the pool of connections for a specific node.
prepared = Xandra.prepare!(conn, "SELECT * FROM system.local")
Xandra.execute!(conn, prepared, _params = [])
end)
Thanks to the prepared query cache, we can always reprepare the query and execute
it because after the first time (on each node) the prepared query will be fetched
from the cache. However, if a prepared query is unknown on a node, Xandra will
prepare it on that node on the fly, so we can simply do this as well:
prepared = Xandra.Cluster.prepare!(cluster, "SELECT * FROM system.local")
Xandra.Cluster.execute!(cluster, prepared, _params = [])
Note that this goes through the cluster twice, so there's a high chance that
the query will be prepared on one node and then executed on another node.
This is however useful if you want to use the `:retry_strategy` option in
`execute!/4`: in the `run/3` example above, if you use `:retry_strategy` with
`Xandra.execute!/3`, the query will be retried on the same pool of connections
to the same node. `execute!/4` will retry queries going through the cluster
again instead.
"""
@spec prepare(cluster, Xandra.statement(), keyword) ::
{:ok, Xandra.Prepared.t()} | {:error, Xandra.error()}
def prepare(cluster, statement, options \\ []) when is_binary(statement) do
with_conn(cluster, &Xandra.prepare(&1, statement, options))
end
@doc """
Same as `prepare/3` but raises in case of errors.
If the function is successful, the prepared query is returned directly
instead of in an `{:ok, prepared}` tuple like in `prepare/3`.
"""
@spec prepare!(cluster, Xandra.statement(), keyword) :: Xandra.Prepared.t() | no_return
def prepare!(cluster, statement, options \\ []) do
case prepare(cluster, statement, options) do
{:ok, result} -> result
{:error, exception} -> raise exception
end
end
@doc """
Same as `execute/4` but with optional arguments.
"""
@spec execute(cluster, Xandra.statement() | Xandra.Prepared.t(), Xandra.values()) ::
{:ok, Xandra.result()} | {:error, Xandra.error()}
@spec execute(cluster, Xandra.Batch.t(), keyword) ::
{:ok, Xandra.Void.t()} | {:error, Xandra.error()}
def execute(cluster, query, params_or_options \\ [])
def execute(cluster, statement, params) when is_binary(statement) do
execute(cluster, statement, params, _options = [])
end
def execute(cluster, %Prepared{} = prepared, params) do
execute(cluster, prepared, params, _options = [])
end
def execute(cluster, %Batch{} = batch, options) when is_list(options) do
with_conn_and_retrying(cluster, options, &Xandra.execute(&1, batch, options))
end
@doc """
Executes a query on a node in the cluster.
This function executes a query on a node in the cluster. The node is chosen based
on the load balancing strategy given in `start_link/1`.
Supports the same options as `Xandra.execute/4`. In particular, the `:retry_strategy`
option is cluster-aware, meaning that queries are retried on possibly different nodes
in the cluster.
"""
@spec execute(cluster, Xandra.statement() | Xandra.Prepared.t(), Xandra.values(), keyword) ::
{:ok, Xandra.result()} | {:error, Xandra.error()}
def execute(cluster, query, params, options) do
with_conn_and_retrying(cluster, options, &Xandra.execute(&1, query, params, options))
end
@doc """
Same as `execute/3` but returns the result directly or raises in case of errors.
"""
@spec execute!(cluster, Xandra.statement() | Xandra.Prepared.t(), Xandra.values()) ::
Xandra.result() | no_return
@spec execute!(cluster, Xandra.Batch.t(), keyword) ::
Xandra.Void.t() | no_return
def execute!(cluster, query, params_or_options \\ []) do
case execute(cluster, query, params_or_options) do
{:ok, result} -> result
{:error, exception} -> raise exception
end
end
@doc """
Same as `execute/4` but returns the result directly or raises in case of errors.
"""
@spec execute(cluster, Xandra.statement() | Xandra.Prepared.t(), Xandra.values(), keyword) ::
Xandra.result() | no_return
def execute!(cluster, query, params, options) do
case execute(cluster, query, params, options) do
{:ok, result} -> result
{:error, exception} -> raise exception
end
end
@doc """
Runs a function with a given connection.
The connection that is passed to `fun` is a Xandra connection, not a
cluster. This means that you should call `Xandra` functions on it.
Since the connection is a single connection, it means that it's a connection
to a specific node, so you can do things like prepare a query and then execute
it because you can be sure it's prepared on the same node where you're
executing it.
## Examples
query = "SELECT * FROM system_schema.keyspaces"
Xandra.Cluster.run(cluster, fn conn ->
prepared = Xandra.prepare!(conn, query)
Xandra.execute!(conn, prepared, _params = [])
end)
"""
@spec run(cluster, keyword, (Xandra.conn() -> result)) :: result when result: var
def run(cluster, options \\ [], fun) do
with_conn(cluster, &Xandra.run(&1, options, fun))
end
defp with_conn_and_retrying(cluster, options, fun) do
RetryStrategy.run_with_retrying(options, fn -> with_conn(cluster, fun) end)
end
defp with_conn(cluster, fun) do
case GenServer.call(cluster, :checkout) do
{:ok, pool} ->
fun.(pool)
{:error, :empty} ->
action = "checkout from cluster #{inspect(cluster)}"
{:error, ConnectionError.new(action, {:cluster, :not_connected})}
end
end
## Callbacks
@impl true
def init({%__MODULE__{options: options} = state, nodes}) do
{:ok, pool_supervisor} = Supervisor.start_link([], strategy: :one_for_one, max_restarts: 0)
node_refs = start_control_connections(nodes, options, state.autodiscovery)
{:ok, %{state | node_refs: node_refs, pool_supervisor: pool_supervisor}}
end
@impl true
def handle_call(:checkout, _from, %__MODULE__{} = state) do
%{
node_refs: node_refs,
load_balancing: load_balancing,
pools: pools
} = state
if Enum.empty?(pools) do
{:reply, {:error, :empty}, state}
else
pool = select_pool(load_balancing, pools, node_refs)
{:reply, {:ok, pool}, state}
end
end
@impl true
def handle_cast(message, state)
def handle_cast({:activate, node_ref, address, port}, %__MODULE__{} = state) do
_ = Logger.debug("Control connection for #{:inet.ntoa(address)}:#{port} is up")
# Update the node_refs with the actual address of the control connection node.
state = update_in(state.node_refs, &List.keystore(&1, node_ref, 0, {node_ref, address}))
state = start_pool(state, address, port)
{:noreply, state}
end
def handle_cast({:discovered_peers, peers}, %__MODULE__{} = state) do
_ = Logger.debug("Discovered peers: #{inspect(peers)}")
port = state.autodiscovered_nodes_port
state = Enum.reduce(peers, state, &start_pool(_state = &2, _peer = &1, port))
{:noreply, state}
end
def handle_cast({:update, {:control_connection_established, address}}, %__MODULE__{} = state) do
state = restart_pool(state, address)
{:noreply, state}
end
def handle_cast({:update, %StatusChange{} = status_change}, %__MODULE__{} = state) do
state = handle_status_change(state, status_change)
{:noreply, state}
end
def handle_cast({:update, %TopologyChange{} = topology_change}, %__MODULE__{} = state) do
state = handle_topology_change(state, topology_change)
{:noreply, state}
end
## Helpers
defp start_control_connections(nodes, options, autodiscovery?) do
cluster = self()
Enum.map(nodes, fn {address, port} ->
node_ref = make_ref()
ControlConnection.start_link(cluster, node_ref, address, port, options, autodiscovery?)
{node_ref, nil}
end)
end
defp start_pool(state, address, port) do
%{
options: options,
pool_supervisor: pool_supervisor,
pools: pools
} = state
options = Keyword.merge(options, address: address, port: port)
child_spec = Supervisor.child_spec({Xandra, options}, id: address)
case Supervisor.start_child(pool_supervisor, child_spec) do
{:ok, pool} ->
_ = Logger.debug("Started connection to #{inspect(address)}")
%{state | pools: Map.put(pools, address, pool)}
{:error, {:already_started, _pool}} ->
# TODO: to have a reliable cluster name, we need to bring the name given on
# start_link/1 into the state because it could be an atom, {:global, term}
# and so on.
Logger.warn(fn ->
"Xandra cluster #{inspect(self())} " <>
"received request to start another connection pool " <>
"to the same address: #{inspect(address)}"
end)
state
end
end
defp restart_pool(state, address) do
%{pool_supervisor: pool_supervisor, pools: pools} = state
case Supervisor.restart_child(pool_supervisor, address) do
{:error, reason} when reason in [:not_found, :running, :restarting] ->
state
{:ok, pool} ->
%{state | pools: Map.put(pools, address, pool)}
end
end
defp handle_status_change(state, %{effect: "UP", address: address}) do
restart_pool(state, address)
end
defp handle_status_change(state, %{effect: "DOWN", address: address}) do
%{pool_supervisor: pool_supervisor, pools: pools} = state
_ = Supervisor.terminate_child(pool_supervisor, address)
%{state | pools: Map.delete(pools, address)}
end
# We don't care about changes in the topology if we're not autodiscovering
# nodes.
defp handle_topology_change(%{autodiscovery: false} = state, _change) do
state
end
defp handle_topology_change(state, %{effect: "NEW_NODE", address: address}) do
start_pool(state, address, state.autodiscovered_nodes_port)
end
defp handle_topology_change(state, %{effect: "REMOVED_NODE", address: address}) do
%{pool_supervisor: pool_supervisor, pools: pools} = state
_ = Supervisor.terminate_child(pool_supervisor, address)
_ = Supervisor.delete_child(pool_supervisor, address)
%{state | pools: Map.delete(pools, address)}
end
defp handle_topology_change(state, %{effect: "MOVED_NODE"} = event) do
_ = Logger.warn("Ignored TOPOLOGY_CHANGE event: #{inspect(event)}")
state
end
defp select_pool(:random, pools, _node_refs) do
{_address, pool} = Enum.random(pools)
pool
end
defp select_pool(:priority, pools, node_refs) do
Enum.find_value(node_refs, fn {_node_ref, address} ->
Map.get(pools, address)
end)
end
defp parse_node(string) do
case String.split(string, ":", parts: 2) do
[address, port] ->
case Integer.parse(port) do
{port, ""} ->
{String.to_charlist(address), port}
_ ->
raise ArgumentError, "invalid item #{inspect(string)} in the :nodes option"
end
[address] ->
{String.to_charlist(address), @default_port}
end
end
defp protocol_version_to_module(:v3), do: Protocol.V3
defp protocol_version_to_module(:v4), do: Protocol.V4
defp protocol_version_to_module(other),
do: raise(ArgumentError, "unknown protocol version: #{inspect(other)}")
end
|
lib/xandra/cluster.ex
| 0.923674
| 0.834946
|
cluster.ex
|
starcoder
|
defmodule Sanbase.Metric do
@moduledoc """
Dispatch module used for fetching metrics.
This module dispatches the fetching to modules implementing the
`Sanbase.Metric.Behaviour` behaviour. Such modules are added to the
@metric_modules list and everything else happens automatically.
This project is a data-centric application and the metrics are one of the
main data types provided.
The module works by either dispatching the functions to the proper module or
by aggregating data fetched by multiple modules.
"""
import Sanbase.Metric.MetricReplace,
only: [maybe_replace_metric: 2, maybe_replace_metrics: 2]
# Use only the types from the behaviour module
alias Sanbase.Metric.Behaviour, as: Type
alias Sanbase.Metric.Helper
@compile inline: [
execute_if_aggregation_valid: 3,
maybe_change_module: 4,
combine_metrics_in_modules: 2
]
@type datetime :: DateTime.t()
@type metric :: Type.metric()
@type selector :: Type.selector()
@type interval :: Type.interval()
@type operation :: Type.operation()
@type threshold :: Type.threshold()
@type direction :: Type.direction()
@type opts :: Type.opts()
@typedoc """
This type is used when the available metrics from many modules are gathered.
It might be the case that one of these modlues cannot fetch some data (the
service it uses is down). In this case, instead of breaking everything and
returning error, return a :nocache result. This will make the API not cache
the result and the subsequent call will try to compute the result again.
"""
@type available_metrics_with_nocache_result ::
{:ok, list(metric)} | {:nocache, {:ok, list(metric)}}
@access_map Helper.access_map()
@aggregations Helper.aggregations()
@aggregations_per_metric Helper.aggregations_per_metric()
@free_metrics Helper.free_metrics()
@histogram_metric_to_module_map Helper.histogram_metric_to_module_map()
@histogram_metrics Helper.histogram_metrics()
@histogram_metrics_mapset Helper.histogram_metrics_mapset()
@metric_modules Helper.metric_modules()
@metric_to_module_map Helper.metric_to_module_map()
@metrics Helper.metrics()
@metrics_mapset Helper.metrics_mapset()
@min_plan_map Helper.min_plan_map()
@restricted_metrics Helper.restricted_metrics()
@timeseries_metric_to_module_map Helper.timeseries_metric_to_module_map()
@timeseries_metrics Helper.timeseries_metrics()
@timeseries_metrics_mapset Helper.timeseries_metrics_mapset()
@table_metrics Helper.table_metrics()
@table_metrics_mapset Helper.table_metrics_mapset()
@table_metric_to_module_map Helper.table_metric_to_module_map()
@required_selectors_map Helper.required_selectors_map()
@deprecated_metrics_map Helper.deprecated_metrics_map()
@doc ~s"""
Check if `metric` is a valid metric name.
"""
@spec has_metric?(any()) :: true | {:error, String.t()}
def has_metric?(metric) do
case metric in @metrics_mapset do
true -> true
false -> metric_not_available_error(metric)
end
end
def required_selectors(metric) do
case metric in @metrics_mapset do
true -> {:ok, Map.get(@required_selectors_map, metric, [])}
false -> metric_not_available_error(metric)
end
end
def is_not_deprecated?(metric) do
case Map.get(@deprecated_metrics_map, metric) do
nil ->
true
%DateTime{} = deprecated_since ->
{:error, "The metric #{metric} is deprecated since #{deprecated_since}"}
end
end
@doc ~s"""
Check if a metric has incomplete data.
Incomplete data applies to daily metrics, whose value for the current day
is updated many times throughout the day. For example, the value for Daily
Active Addresses at 18:00 contains only for 3/4 of the day. The value for a given
day becomes constant only when the next day starts.
"""
@spec has_incomplete_data?(Sanbase.Metric.Behaviour.metric()) :: boolean()
def has_incomplete_data?(metric) do
module = Map.get(@metric_to_module_map, metric)
module.has_incomplete_data?(metric)
end
def broken_data(metric, selector, from, to) do
metric = maybe_replace_metric(metric, selector)
case Map.get(@metric_to_module_map, metric) do
nil ->
metric_not_available_error(metric)
module when is_atom(module) ->
module = maybe_change_module(module, metric, selector, [])
module.broken_data(metric, selector, from, to)
end
end
@doc ~s"""
Returns timeseries data (pairs of datetime and float value) for a given set
of arguments.
Get a given metric for an interval and time range. The metric's aggregation
function can be changed by the last optional parameter. The available
aggregations are #{inspect(@aggregations)}. If no aggregation is provided,
a default one (based on the metric) will be used.
"""
@spec timeseries_data(metric, selector, datetime, datetime, interval, opts) ::
Type.timeseries_data_result()
def timeseries_data(metric, selector, from, to, interval, opts \\ [])
def timeseries_data(metric, selector, from, to, interval, opts) do
metric = maybe_replace_metric(metric, selector)
case Map.get(@timeseries_metric_to_module_map, metric) do
nil ->
metric_not_available_error(metric, type: :timeseries)
module when is_atom(module) ->
module = maybe_change_module(module, metric, selector, opts)
aggregation = Keyword.get(opts, :aggregation, nil)
fun = fn ->
module.timeseries_data(
metric,
selector,
from,
to,
interval,
opts
)
end
execute_if_aggregation_valid(fun, metric, aggregation)
end
end
@doc ~s"""
Returns timeseries data (pairs of datetime and float value) for every slug
separately.
Get a given metric for an selector and time range. The metric's aggregation
function can be changed by the last optional parameter. The available
aggregations are #{inspect(@aggregations)}. If no aggregation is provided,
a default one (based on the metric) will be used.
"""
@spec timeseries_data_per_slug(metric, selector, datetime, datetime, interval, opts) ::
Type.timeseries_data_per_slug_result()
def timeseries_data_per_slug(metric, selector, from, to, interval, opts \\ [])
def timeseries_data_per_slug(metric, selector, from, to, interval, opts) do
metric = maybe_replace_metric(metric, selector)
case Map.get(@timeseries_metric_to_module_map, metric) do
nil ->
metric_not_available_error(metric, type: :timeseries)
module when is_atom(module) ->
module = maybe_change_module(module, metric, selector, opts)
aggregation = Keyword.get(opts, :aggregation, nil)
fun = fn ->
module.timeseries_data_per_slug(
metric,
selector,
from,
to,
interval,
opts
)
end
execute_if_aggregation_valid(fun, metric, aggregation)
|> Sanbase.Utils.Transform.maybe_apply_function(fn list ->
Enum.sort_by(list, & &1.datetime, {:asc, DateTime})
end)
|> Sanbase.Utils.Transform.maybe_apply_function(fn list ->
Enum.map(list, fn %{data: data} = elem ->
data_sorted_by_slug = Enum.sort_by(data, & &1.slug, :asc)
%{elem | data: data_sorted_by_slug}
end)
end)
end
end
@doc ~s"""
Get the aggregated value for a metric, an selector and time range.
The metric's aggregation function can be changed by the last optional parameter.
The available aggregations are #{inspect(@aggregations)}. If no aggregation is
provided, a default one (based on the metric) will be used.
"""
@spec aggregated_timeseries_data(metric, selector, datetime, datetime, opts) ::
Type.aggregated_timeseries_data_result()
def aggregated_timeseries_data(metric, selector, from, to, opts \\ [])
def aggregated_timeseries_data(metric, selector, from, to, opts) do
metric = maybe_replace_metric(metric, selector)
case Map.get(@timeseries_metric_to_module_map, metric) do
nil ->
metric_not_available_error(metric, type: :timeseries)
module when is_atom(module) ->
module = maybe_change_module(module, metric, selector, opts)
aggregation = Keyword.get(opts, :aggregation, nil)
fun = fn ->
module.aggregated_timeseries_data(
metric,
selector,
from,
to,
opts
)
end
execute_if_aggregation_valid(fun, metric, aggregation)
end
end
@doc ~s"""
Get a list of all slugs that satisfy a given filter
The filtering is determined by the aggregated values of the value of `metric`,
aggregated in the `from`-`to` interval, aggregated by `aggregation`. Of all
slugs, only those whose value is satisfying the `operator` and `threshold` checks
are taken.
If no aggregation is provided, a default one (based on the metric) will be used.
"""
@spec slugs_by_filter(metric, datetime, datetime, operation, threshold, opts) ::
Type.slugs_by_filter_result()
def slugs_by_filter(metric, from, to, operation, threshold, opts \\ [])
def slugs_by_filter(metric, from, to, operation, threshold, opts) do
case Map.get(@timeseries_metric_to_module_map, metric) do
nil ->
metric_not_available_error(metric, type: :timeseries)
module when is_atom(module) ->
aggregation = Keyword.get(opts, :aggregation, nil)
fun = fn ->
module.slugs_by_filter(
metric,
from,
to,
operation,
threshold,
opts
)
end
execute_if_aggregation_valid(fun, metric, aggregation)
end
end
@doc ~s"""
Get a list of all slugs in a specific order.
The order is determined by the aggregated values of the value of `metric`,
aggregated in the `from`-`to` interval, aggregated by `aggregation`.
The order is either in ascending or descending order, defined by the `direction`
argument with two values - :asc and :desc
If no aggregation is provided, a default one (based on the metric) will be used.
"""
@spec slugs_order(metric, datetime, datetime, direction, opts) ::
Type.slugs_order_result()
def slugs_order(metric, from, to, direction, opts \\ [])
def slugs_order(metric, from, to, direction, opts) do
case Map.get(@metric_to_module_map, metric) do
nil ->
metric_not_available_error(metric, type: :timeseries)
module when is_atom(module) ->
aggregation = Keyword.get(opts, :aggregation, nil)
fun = fn ->
module.slugs_order(
metric,
from,
to,
direction,
opts
)
end
execute_if_aggregation_valid(fun, metric, aggregation)
end
end
@doc ~s"""
Get a histogram for a given metric
"""
@spec histogram_data(metric, selector, datetime, datetime, interval, non_neg_integer()) ::
Type.histogram_data_result()
def histogram_data(metric, selector, from, to, interval, limit \\ 100)
def histogram_data(metric, selector, from, to, interval, limit) do
metric = maybe_replace_metric(metric, selector)
case Map.get(@histogram_metric_to_module_map, metric) do
nil ->
metric_not_available_error(metric, type: :histogram)
module when is_atom(module) ->
module = maybe_change_module(module, metric, selector, [])
module.histogram_data(
metric,
selector,
from,
to,
interval,
limit
)
end
end
@doc ~s"""
Get a table for a given metric.
Take a look at the `TableMetric` modules.
"""
@spec table_data(metric, selector, datetime, datetime, opts()) ::
Type.table_data_result()
def table_data(metric, selector, from, to, opts \\ [])
def table_data(metric, selector, from, to, opts) do
metric = maybe_replace_metric(metric, selector)
case Map.get(@table_metric_to_module_map, metric) do
nil ->
metric_not_available_error(metric, type: :table)
module when is_atom(module) ->
module = maybe_change_module(module, metric, selector, opts)
aggregation = Keyword.get(opts, :aggregation, nil)
fun = fn ->
module.table_data(
metric,
selector,
from,
to,
opts
)
end
execute_if_aggregation_valid(fun, metric, aggregation)
end
end
@doc ~s"""
Get the human readable name representation of a given metric
"""
@spec human_readable_name(metric) :: Type.human_readable_name_result()
def human_readable_name(metric) do
case Map.get(@metric_to_module_map, metric) do
nil ->
metric_not_available_error(metric)
module when is_atom(module) ->
module.human_readable_name(metric)
end
end
@doc ~s"""
Get the complexity weight of a metric. This is a multiplier applied to the
computed complexity. Clickhouse is faster compared to Elasticsearch for fetching
timeseries data, so it has a smaller weight
"""
@spec complexity_weight(metric) :: Type.complexity_weight()
def complexity_weight(metric) do
case Map.get(@metric_to_module_map, metric) do
nil ->
metric_not_available_error(metric, type: :timeseries)
module when is_atom(module) ->
module.complexity_weight(metric)
end
end
@doc ~s"""
Get metadata for a given metric. This includes:
- The minimal interval for which the metric is available
(every 5 minutes, once a day, etc.)
- The default aggregation applied if none is provided
- The available aggregations for the metric
- The available slugs for the metric
"""
@spec metadata(metric) :: Type.metadata_result()
def metadata(metric) do
case Map.get(@metric_to_module_map, metric) do
nil ->
metric_not_available_error(metric, type: :timeseries)
module when is_atom(module) ->
module.metadata(metric)
end
end
@doc ~s"""
Get the first datetime for which a given metric is available for a given slug
"""
@spec first_datetime(metric, selector, opts) :: Type.first_datetime_result()
def first_datetime(metric, selector, opts) do
metric = maybe_replace_metric(metric, selector)
case Map.get(@metric_to_module_map, metric) do
nil ->
metric_not_available_error(metric, type: :timeseries)
module when is_atom(module) ->
module = maybe_change_module(module, metric, selector, opts)
module.first_datetime(metric, selector)
end
end
@doc ~s"""
Get the datetime for which the data point with latest dt for the given metric/slug
pair is computed.
"""
@spec last_datetime_computed_at(metric, selector, opts) ::
Type.last_datetime_computed_at_result()
def last_datetime_computed_at(metric, selector, opts) do
metric = maybe_replace_metric(metric, selector)
case Map.get(@metric_to_module_map, metric) do
nil ->
metric_not_available_error(metric, type: :timeseries)
module when is_atom(module) ->
module = maybe_change_module(module, metric, selector, opts)
module.last_datetime_computed_at(metric, selector)
end
end
@doc ~s"""
Get all available slugs for a given metric
"""
@spec available_slugs(metric, opts) :: Type.available_slugs_result()
def available_slugs(metric, opts \\ [])
def available_slugs(metric, opts) do
case Map.get(@metric_to_module_map, metric) do
nil ->
metric_not_available_error(metric, type: :timeseries)
module when is_atom(module) ->
module = maybe_change_module(module, metric, %{}, opts)
module.available_slugs(metric)
end
end
@doc ~s"""
Get all available aggregations
"""
@spec available_aggregations :: list(Type.aggregation())
def available_aggregations(), do: @aggregations
@doc ~s"""
Get all available metrics.
Available options:
- min_interval_less_or_equal - return all metrics with min interval that is
less or equal than a given amount (expessed as a string - 5m, 1h, etc.)
"""
@spec available_metrics(opts) :: list(metric)
def available_metrics(opts \\ [])
def available_metrics([]), do: @metrics
def available_metrics(opts) do
case Keyword.get(opts, :filter) do
nil ->
@metrics
:min_interval_less_or_equal ->
filter_interval = Keyword.fetch!(opts, :filter_interval)
filter_metrics_by_min_interval(@metrics, filter_interval, &<=/2)
:min_interval_greater_or_equal ->
filter_interval = Keyword.fetch!(opts, :filter_interval)
filter_metrics_by_min_interval(@metrics, filter_interval, &>=/2)
end
end
@doc ~s"""
Get the available metrics for a given slug.
The available metrics list is the combination of the available metrics lists
of every metric module.
"""
@spec available_metrics_for_slug(any) :: available_metrics_with_nocache_result
def available_metrics_for_slug(%{slug: slug} = selector) do
parallel_opts = [ordered: false, max_concurrency: 8, timeout: 60_000]
parallel_fun = fn module ->
cache_key =
{__MODULE__, :available_metrics_for_slug_in_module, module, selector}
|> Sanbase.Cache.hash()
Sanbase.Cache.get_or_store(cache_key, fn -> module.available_metrics(selector) end)
end
metrics_in_modules = Sanbase.Parallel.map(@metric_modules, parallel_fun, parallel_opts)
combine_metrics_in_modules(metrics_in_modules, slug)
end
@doc ~s"""
Get the available timeseries metrics for a given slug.
The result is a subset of available_metrics_for_slug/1
"""
@spec available_timeseries_metrics_for_slug(any) :: available_metrics_with_nocache_result
def available_timeseries_metrics_for_slug(selector) do
available_metrics =
Sanbase.Cache.get_or_store(
{__MODULE__, :available_metrics_for_slug, selector} |> Sanbase.Cache.hash(),
fn -> available_metrics_for_slug(selector) end
)
case available_metrics do
{:nocache, {:ok, metrics}} ->
{:nocache, {:ok, metrics -- (@histogram_metrics ++ @table_metrics)}}
{:ok, metrics} ->
{:ok, metrics -- (@histogram_metrics ++ @table_metrics)}
end
end
@doc ~s"""
Get the available histogram metrics for a given slug.
The result is a subset of available_metrics_for_slug/1
"""
@spec available_histogram_metrics_for_slug(any) :: available_metrics_with_nocache_result
def available_histogram_metrics_for_slug(selector) do
available_metrics =
Sanbase.Cache.get_or_store(
{__MODULE__, :available_metrics_for_slug, selector} |> Sanbase.Cache.hash(),
fn -> available_metrics_for_slug(selector) end
)
case available_metrics do
{:nocache, {:ok, metrics}} ->
{:nocache, {:ok, metrics -- (@timeseries_metrics ++ @table_metrics)}}
{:ok, metrics} ->
{:ok, metrics -- (@timeseries_metrics ++ @table_metrics)}
end
end
@doc ~s"""
Get the available table metrics for a given slug.
The result is a subset of available_metrics_for_slug/1
"""
@spec available_table_metrics_for_slug(any) :: available_metrics_with_nocache_result
def available_table_metrics_for_slug(selector) do
available_metrics =
Sanbase.Cache.get_or_store(
{__MODULE__, :available_metrics_for_slug, selector} |> Sanbase.Cache.hash(),
fn -> available_metrics_for_slug(selector) end
)
case available_metrics do
{:nocache, {:ok, metrics}} ->
{:nocache, {:ok, metrics -- (@timeseries_metrics ++ @histogram_metrics)}}
{:ok, metrics} ->
{:ok, metrics -- (@timeseries_metrics ++ @histogram_metrics)}
end
end
@doc ~s"""
Get all available timeseries metrics
"""
@spec available_timeseries_metrics() :: list(metric)
def available_timeseries_metrics(), do: @timeseries_metrics
@doc ~s"""
Get all available histogram metrics
"""
@spec available_histogram_metrics() :: list(metric)
def available_histogram_metrics(), do: @histogram_metrics
@doc ~s"""
Get all available table metrics
"""
@spec available_table_metrics() :: list(metric)
def available_table_metrics(), do: @table_metrics
@doc ~s"""
Get all slugs for which at least one of the metrics is available
"""
@spec available_slugs() :: Type.available_slugs_result()
def available_slugs() do
# Providing a 2 element tuple `{any, integer}` will use that second element
# as TTL for the cache key
cache_key = {__MODULE__, :available_slugs_all_metrics} |> Sanbase.Cache.hash()
Sanbase.Cache.get_or_store({cache_key, 1800}, &get_available_slugs/0)
end
@doc ~s"""
Get all free metrics
"""
@spec free_metrics() :: list(metric)
def free_metrics(), do: @free_metrics
@doc ~s"""
Get all restricted metrics
"""
@spec restricted_metrics() :: list(metric)
def restricted_metrics(), do: @restricted_metrics
@doc ~s"""
Get a map where the key is a metric and the value is the access level
"""
@spec access_map() :: map()
def access_map(), do: @access_map
@doc ~s"""
Checks if historical data is allowed for a given `metric`
"""
@spec is_historical_data_allowed?(metric) :: boolean
def is_historical_data_allowed?(metric) do
get_in(@access_map, [metric, "historical"]) == :free
end
@doc ~s"""
Checks if realtime data is allowed for a given `metric`
"""
@spec is_realtime_data_allowed?(metric) :: boolean
def is_realtime_data_allowed?(metric) do
get_in(@access_map, [metric, "realtime"]) == :free
end
@doc ~s"""
Get a map where the key is a metric and the value is the min plan it is
accessible in.
"""
@spec min_plan_map() :: map()
def min_plan_map(), do: @min_plan_map
# Private functions
defp metric_not_available_error(metric, opts \\ [])
defp metric_not_available_error(metric, opts) do
type = Keyword.get(opts, :type, :all)
%{close: close, error_msg: error_msg} = metric_not_available_error_details(metric, type)
case close do
nil -> {:error, error_msg}
{"", close} -> {:error, error_msg <> " Did you mean the metric '#{close}'?"}
{type, close} -> {:error, error_msg <> " Did you mean the #{type} metric '#{close}'?"}
end
end
defp metric_not_available_error_details(metric, type) do
%{
close: maybe_get_close_metric(metric, type),
error_msg: "The metric '#{metric}' is not supported or is mistyped."
}
end
# Find the metric from the mapset which is clostest to the original metric.
# The found metric must have a jaro distance bigger than 0.8
defp find_closest(mapset, metric) do
Enum.reduce(mapset, {nil, -1}, fn m, {max_m, max_dist} ->
dist = String.jaro_distance(metric, m)
case dist > max_dist do
true -> {m, dist}
false -> {max_m, max_dist}
end
end)
|> case do
{metric, dist} when dist > 0.8 -> metric
_ -> nil
end
end
# Returns {closest_metric_type, closest_metric}
# The metrics of the same type are with highest priority.
# If a metric of type timeseries is mistyped, then if there is a metric of
# the same type with a jaro distance > 0.8 it is returned.
defp maybe_get_close_metric(metric, type) do
timeseries = find_closest(@timeseries_metrics_mapset, metric)
histogram = find_closest(@histogram_metrics_mapset, metric)
table = find_closest(@table_metrics_mapset, metric)
case timeseries || histogram || table do
nil ->
nil
_ ->
case type do
:all ->
{"", timeseries || histogram || table}
:timeseries ->
(timeseries && {:timeseries, timeseries}) || (histogram && {:histogram, histogram}) ||
(table && {:table, table})
:histogram ->
(histogram && {:histogram, histogram}) || (timeseries && {:timeseries, timeseries}) ||
(table && {:table, table})
:table ->
(table && {:table, table}) || (timeseries && {:timeseries, timeseries}) ||
(histogram && {:histogram, histogram})
end
end
end
defp execute_if_aggregation_valid(fun, metric, aggregation) do
aggregation_valid? = aggregation in Map.get(@aggregations_per_metric, metric)
case aggregation_valid? do
true ->
fun.()
false ->
{:error, "The aggregation #{aggregation} is not supported for the metric #{metric}"}
end
end
@social_metrics Sanbase.SocialData.MetricAdapter.available_metrics()
# When using slug, the social metrics are fetched from clickhouse
# But when text selector is used, the metric should be fetched from Elasticsearch
# as it cannot be precomputed due to the vast number of possible text arguments
defp maybe_change_module(module, metric, %{text: _text}, _opts) do
case metric in @social_metrics do
true -> Sanbase.SocialData.MetricAdapter
false -> module
end
end
defp maybe_change_module(module, metric, selector, opts)
when metric in ["price_usd", "price_btc"] do
case Keyword.get(opts, :source) || Map.get(selector, :source) do
"cryptocompare" -> Sanbase.PricePair.MetricAdapter
_ -> module
end
end
defp maybe_change_module(module, _metric, _selector, _opts), do: module
defp filter_metrics_by_min_interval(metrics, interval, compare_fun) do
interval_to_sec = Sanbase.DateTimeUtils.str_to_sec(interval)
metrics
|> Enum.filter(fn metric ->
{:ok, %{min_interval: min_interval}} = metadata(metric)
min_interval_sec = Sanbase.DateTimeUtils.str_to_sec(min_interval)
compare_fun.(min_interval_sec, interval_to_sec)
end)
end
defp get_available_slugs() do
{slugs, errors} =
Enum.reduce(@metric_modules, {[], []}, fn module, {slugs_acc, errors} ->
case module.available_slugs() do
{:ok, slugs} -> {slugs ++ slugs_acc, errors}
{:error, error} -> {slugs_acc, [error | errors]}
end
end)
case errors do
[] -> {:ok, slugs |> Enum.uniq()}
_ -> {:error, "Cannot fetch all available slugs. Errors: #{inspect(errors)}"}
end
end
defp combine_metrics_in_modules(metrics_in_modules, slug) do
# Combine the results of the different metric modules. In case any of the
# metric modules returned an :error tuple, wrap the result in a :nocache
# tuple so the next attempt to fetch the data will try to fetch the metrics
# again.
available_metrics =
Enum.flat_map(metrics_in_modules, fn
{:ok, metrics} -> metrics
_ -> []
end)
|> maybe_replace_metrics(slug)
|> Enum.uniq()
|> Enum.sort()
has_errors? =
metrics_in_modules
|> Enum.any?(&(not match?({:ok, _}, &1)))
case has_errors? do
true -> {:nocache, {:ok, available_metrics}}
false -> {:ok, available_metrics}
end
end
end
|
lib/sanbase/metric/metric.ex
| 0.919912
| 0.603348
|
metric.ex
|
starcoder
|
defmodule TtrCore.Games do
@moduledoc """
A `DynamicSupervisor` process that manages each game's process tree.
"""
use DynamicSupervisor
alias TtrCore.{
Cards,
Mechanics,
Players
}
alias TtrCore.Mechanics.State
alias TtrCore.Games.{
Game,
Index
}
require Logger
@type user_id :: binary()
@type game_id :: binary()
@type reason :: String.t
@type game_options :: [
owner_id: user_id()
]
# API
@doc """
Starts the `TtrCore.Games` supervisor.
"""
@spec start_link :: Supervisor.on_start()
def start_link do
DynamicSupervisor.start_link(__MODULE__, [], [name: __MODULE__])
end
@doc """
Stops the `TtrCore.Games` supervisor.
"""
@spec stop :: :ok
def stop do
DynamicSupervisor.stop(__MODULE__)
end
@doc """
Specifies `TtrCore.Games` to run as a supervisor.
"""
@spec child_spec(term) :: Supervisor.child_spec()
def child_spec(_) do
%{id: __MODULE__,
start: {__MODULE__, :start_link, []},
type: :supervisor}
end
@doc """
Creates a new game process under the `TtrCore.Games` supervision
tree and assigns the player id as the owner of the game and includes
the owner as "joined" to the game.
Returns `{:ok, game_id, pid}` if game succesfully created.
Returns `{:error, :invalid_user_id}` if user id is not registered.
"""
@spec create(user_id()) :: {:ok, game_id(), pid()} | {:error, :invalid_user_id}
def create(user_id) do
if Players.registered?(user_id) do
game_id = 32
|> :crypto.strong_rand_bytes()
|> Base.encode64()
train_deck = Cards.shuffle_trains()
ticket_deck = Cards.shuffle_tickets()
state = %State{
id: game_id,
owner_id: user_id,
players: %{},
train_deck: train_deck,
ticket_deck: ticket_deck,
displayed_trains: [],
discard_deck: [],
stage: :unstarted
}
spec = {Game, state}
{:ok, pid} = DynamicSupervisor.start_child(__MODULE__, spec)
{:ok, game_id, pid}
else
{:error, :invalid_user_id}
end
end
@doc """
Setup a game. Deals train and ticket cards to players and displays train cards.
Returns error if game is not found.
Returns error if the user id used is not the owner of the game.
Returns error if the game is not in already in an unstarted stage
(in order to move to a setup stage).
"""
@spec setup(game_id(), user_id()) :: :ok | {:error, :not_found | :not_owner | :not_in_unstarted}
def setup(game_id, user_id) do
case Registry.lookup(Index, game_id) do
[{pid, _}] ->
Logger.info("setup game:#{game_id}")
Game.setup(pid, user_id)
_ ->
{:error, :not_found}
end
end
@doc """
Starts a game. Chooses a random player to start and begins the game.
Only the user id that matches the owner of the game can start the
game.
If the game id does not exist, an `{:error, :not_found}` tuple is
returned.
If a player is used to begin a game that does not match the owner of
the game, an `{:error, :not_owner}` tuple is returned.
If the game has already been started, an `{:error,
:already_started}` tuple is returned.
"""
@spec begin(game_id(), user_id()) :: :ok | {:error, :not_found | :not_owner | :not_in_setup}
def begin(game_id, user_id) do
case Registry.lookup(Index, game_id) do
[{pid, _}] ->
Logger.info("starting game:#{game_id}")
Game.begin(pid, user_id)
_ ->
{:error, :not_found}
end
end
@doc """
Join a game. Returns `:ok` if successfully joined.
If the game id does not exist, an `{:error, :not_found}` tuple is
returned.
If the game state indicates that the game is full, an `{:error,
:game_full}` tuple is returned.
If the game state shows that the player being added already exists
in ithe game, an `{:error, :already_joined}` tuple is returned.
"""
@spec join(game_id(), user_id()) :: :ok |
{:error, :not_found | :invalid_user_id | :game_full | :already_joined}
def join(game_id, user_id) do
if Players.registered?(user_id) do
case Registry.lookup(Index, game_id) do
[{pid, _}] ->
Logger.info("user:#{user_id} joined game:#{game_id}")
Game.join(pid, user_id)
_ ->
{:error, :not_found}
end
else
{:error, :invalid_user_id}
end
end
@doc """
Leave a game. Returns `:ok` if successfully left.
If the game id does not exist, an `{:error, :not_found}` tuple is
returned.
If the game state indicates that the user has not previously joined
the game, then an `{:error, :not_joined}` tuple is returned.
"""
@spec leave(game_id(), user_id()) :: :ok | {:error, :not_found | :not_joined}
def leave(game_id, user_id) do
case Registry.lookup(Index, game_id) do
[{pid, _}] ->
Logger.info("user:#{user_id} left game:#{game_id}")
Game.leave(pid, user_id)
_ ->
{:error, :not_found}
end
end
@doc """
Get list of all Game IDs. Not ordered.
"""
@spec list() :: {:ok, [game_id()]}
def list do
ids = __MODULE__
|> DynamicSupervisor.which_children()
|> Enum.flat_map(fn {_, pid, _, _} -> Registry.keys(Index, pid) end)
{:ok, ids}
end
@doc """
Select tickets that were drawn into buffer for a player.
Delegates to `TtrCore.Mechanics.select_tickets/3` with the game
state.
"""
@spec select_tickets(game_id(), user_id(), [TicketCard.t]) :: :ok | {:error, :not_found | reason()}
def select_tickets(game_id, user_id, tickets) do
case Registry.lookup(Index, game_id) do
[{pid, _}] -> Game.select_tickets(pid, user_id, tickets)
_ -> {:error, :not_found}
end
end
@doc """
Draw tickets from deck to a player for selections. Always draws 3
and places them in the players selection buffer.
Delegates to `TtrCore.Mechanics.draw_tickets/2` with the game
state.
"""
@spec draw_tickets(game_id(), user_id()) :: :ok | {:error, :not_found | reason()}
def draw_tickets(game_id, user_id) do
case Registry.lookup(Index, game_id) do
[{pid, _}] -> Game.draw_tickets(pid, user_id)
_ -> {:error, :not_found}
end
end
@doc """
Select trains from the display deck and replenish train display.
Delegates to `TtrCore.Mechanics.select_trains/3` with the game
state.
"""
@spec select_tickets(game_id(), user_id(), [TrainCard.t]) :: :ok | {:error, :not_found | reason()}
def select_trains(game_id, user_id, trains) do
case Registry.lookup(Index, game_id) do
[{pid, _}] -> Game.select_trains(pid, user_id, trains)
_ -> {:error, :not_found}
end
end
@doc """
Draws trains to a player from the a train deck. Can draw 1 or 2 cards.
Delegates to `TtrCore.Mechanics.draw_trains/3` with the game
state.
"""
@spec draw_trains(game_id(), user_id(), integer) :: :ok | {:error, :not_found | reason()}
def draw_trains(game_id, user_id, count) do
case Registry.lookup(Index, game_id) do
[{pid, _}] -> Game.draw_trains(pid, user_id, count)
_ -> {:error, :not_found}
end
end
@doc """
Claims a route for a player and pays out the cost in trains.
Delegates to `TtrCore.Mechanics.claim_route/5` with the game
state.
"""
@spec claim_route(game_id(), user_id(), Route.t, [TrainCard.t]) :: :ok | {:error, :not_found | reason()}
def claim_route(game_id, user_id, route, train_cards) do
case Registry.lookup(Index, game_id) do
[{pid, _}] -> Game.claim_route(pid, user_id, route, train_cards)
_ -> {:error, :not_found}
end
end
@doc """
End a player's turn.
Delegates to `TtrCore.Mechanics.end_turn/2` with the game
state.
"""
@spec end_turn(game_id(), user_id()) :: :ok | {:error, :not_found | reason()}
def end_turn(game_id, user_id) do
case Registry.lookup(Index, game_id) do
[{pid, _}] -> Game.end_turn(pid, user_id)
_ -> {:error, :not_found}
end
end
@doc """
End a game. Returns `:ok` if successfully ended.
If the game id does not exist, an `{:error, :not_found}` tuple is
returned.
"""
@spec destroy(game_id()) :: :ok | {:error, :not_found}
def destroy(game_id) do
case Registry.lookup(Index, game_id) do
[{pid, _}] ->
Supervisor.terminate_child(__MODULE__, pid)
_ ->
Logger.warn("Tried to destroy game:#{game_id}, but it doesn't exist.")
{:error, :not_found}
end
end
@doc """
Returns contextual state based on player id in order to not reveal
secrets to others for a particular game.
"""
@spec get_context(game_id(), user_id()) :: {:ok, Context.t} | {:error, :not_found | :not_joined | :user_not_found}
def get_context(game_id, user_id) do
case Registry.lookup(Index, game_id) do
[{pid, _}] -> Game.get_context(pid, user_id)
_ -> {:error, :not_found}
end
end
@doc """
Returns complete game state.
"""
@spec get_state(game_id()) :: {:ok, Mechanics.t} | {:error, :not_found}
def get_state(game_id) do
case Registry.lookup(Index, game_id) do
[{pid, _}] -> {:ok, Game.get_state(pid)}
_ -> {:error, :not_found}
end
end
# Callbacks
@impl true
def init(_args) do
DynamicSupervisor.init(strategy: :one_for_one)
end
end
|
lib/ttr_core/games.ex
| 0.867183
| 0.423339
|
games.ex
|
starcoder
|
defmodule Plaid.Sandbox do
@moduledoc """
[Plaid Sandbox API](https://plaid.com/docs/api/sandbox/) calls and schema.
> Only used for sandbox testing purposes. None of these calls will work in `development` or `production`.
🏗 I haven'tyet tested the `bank_transfer` endpoints against the actual plaid API because I can't
get the `bank_transfers` product from plaid yet. If you test it, let me know and I can remove
the in-progress status!
"""
alias Plaid.Castable
defmodule TransactionsOptions do
@moduledoc """
[Plaid API /sandbox/public_token/create transactions options schema.](https://plaid.com/docs/api/sandbox/#sandbox-public_token-create-request-transactions)
"""
@type t :: %__MODULE__{
start_date: String.t(),
end_date: String.t()
}
@derive Jason.Encoder
defstruct [:start_date, :end_date]
end
defmodule CreatePublicTokenResponse do
@moduledoc """
[Plaid API /sandbox/public_token/create response schema.](https://plaid.com/docs/api/sandbox/#sandboxpublic_tokencreate)
"""
@behaviour Castable
@type t :: %__MODULE__{
public_token: String.t(),
request_id: String.t()
}
defstruct [:public_token, :request_id]
@impl true
def cast(generic_map) do
%__MODULE__{
public_token: generic_map["public_token"],
request_id: generic_map["request_id"]
}
end
end
@doc """
Create a valid `public_token` with arbitrary details.
Does a `POST /sandbox/public_token/create` call to create a new
sandbox public token.
Params:
* `institution_id` - The ID of the institution the Item will be associated with.
* `initial_products` - The products to initially pull for the Item.
Options:
* `:webhook` - Specify a webhook to associate with the new Item.
* `:override_username` - Test username to use for the creation of the Sandbox Item.
* `:override_password` - Test password to use for the creation of the Sandbox Item.
* `:transactions` - Options for transactions on the new Item.
## Examples
Sandbox.create_public_token("ins_1", ["auth"], client_id: "123", secret: "abc")
{:ok, %Sandbox.CreatePublicTokenResponse{}}
"""
@spec create_public_token(String.t(), [String.t()], options, Plaid.config()) ::
{:ok, CreatePublicTokenResponse.t()} | {:error, Plaid.Error.t()}
when options: %{
optional(:webhook) => String.t(),
optional(:override_username) => String.t(),
optional(:override_password) => <PASSWORD>(),
optional(:transactions) => TransactionsOptions.t()
}
def create_public_token(institution_id, initial_products, options \\ %{}, config) do
options_payload =
Map.take(options, [:webhook, :override_username, :override_password, :transactions])
payload =
%{}
|> Map.put(:institution_id, institution_id)
|> Map.put(:initial_products, initial_products)
|> Map.put(:options, options_payload)
Plaid.Client.call(
"/sandbox/public_token/create",
payload,
CreatePublicTokenResponse,
config
)
end
defmodule ResetItemLoginResponse do
@moduledoc """
[Plaid API /sandbox/item/reset_login response schema.](https://plaid.com/docs/api/sandbox/#sandboxitemreset_login)
"""
@behaviour Castable
@type t :: %__MODULE__{
reset_login: boolean(),
request_id: String.t()
}
defstruct [:reset_login, :request_id]
@impl true
def cast(generic_map) do
%__MODULE__{
reset_login: generic_map["reset_login"],
request_id: generic_map["request_id"]
}
end
end
@doc """
Force an item into a "login required" state.
Does a `POST /sandbox/item/reset_login` call which forces an item into the
`ITEM_LOGIN_REQUIRED` state to simulate and item whose login is no longer valid.
Params:
* `access_token` - The access token associated with the Item to reset the login for.
## Examples
Sandbox.reset_item_login("access-prod-123xxx", client_id: "123", secret: "abc")
{:ok, %Sandbox.ResetItemLoginResponse{}}
"""
@spec reset_item_login(String.t(), Plaid.config()) ::
{:ok, ResetItemLoginResponse.t()} | {:error, Plaid.Error.t()}
def reset_item_login(access_token, config) do
Plaid.Client.call(
"/sandbox/item/reset_login",
%{access_token: access_token},
ResetItemLoginResponse,
config
)
end
@doc """
Change the verification status of an item.
Does a `POST /sandbox/item/set_verification_status` call to change the
status of an item in order to simulate the Automated Micro-deposit flow.
Params:
* `access_token` - The access token associated with the Item data is being requested for.
* `account_id` - The ID of the account whose verification status is to be modified.
* `verification_status` - The verification status to set the account to.
## Examples
Sandbox.set_item_verification_status("access-prod-123xxx", "39flxk4ek2xs", "verification_expired", client_id: "123", secret: "abc")
{:ok, %Plaid.SimpleResponse{request_id: "9bkemelske"}}
"""
@spec set_item_verification_status(String.t(), String.t(), String.t(), Plaid.config()) ::
{:ok, Plaid.SimpleResponse.t()} | {:error, Plaid.Error.t()}
def set_item_verification_status(access_token, account_id, verification_status, config) do
payload = %{
access_token: access_token,
account_id: account_id,
verification_status: verification_status
}
Plaid.Client.call(
"/sandbox/item/set_verification_status",
payload,
Plaid.SimpleResponse,
config
)
end
defmodule FireItemWebhookResponse do
@moduledoc """
[Plaid API /sandbox/item/fire_webhook response schema.](https://plaid.com/docs/api/sandbox/#sandboxitemfire_webhook)
"""
@behaviour Castable
@type t :: %__MODULE__{
webhook_fired: boolean(),
request_id: String.t()
}
defstruct [:webhook_fired, :request_id]
@impl true
def cast(generic_map) do
%__MODULE__{
webhook_fired: generic_map["webhook_fired"],
request_id: generic_map["request_id"]
}
end
end
@doc """
Fire a fake webhook to an Item's webhook endpoint.
Does a `POST /sandbox/item/fire_webhook` call which forces an item into the
ITEM_LOGIN_REQUIRED state to simulate and item whose login is no longer valid.
Params:
* `access_token` - The access token associated with the Item to fire the webhook for.
* `webhook_code` - The webhook code to send.
> `webhook_code` only supports `DEFAULT_UPDATE` for now.
## Examples
Sandbox.fire_item_webhook("access-prod-123xxx", "DEFAULT_UPDATE", client_id: "123", secret: "abc")
{:ok, %Sandbox.FireItemWebhookResponse{}}
"""
@spec fire_item_webhook(String.t(), String.t(), Plaid.config()) ::
{:ok, FireItemWebhookResponse.t()} | {:error, Plaid.Error.t()}
def fire_item_webhook(access_token, webhook_code, config) do
Plaid.Client.call(
"/sandbox/item/fire_webhook",
%{access_token: access_token, webhook_code: webhook_code},
FireItemWebhookResponse,
config
)
end
@doc """
Simulate a bank transfer event in the Plaid Sandbox.
Does a `POST /sandbox/bank_transfer/simulate` call to simulate a bank transfer
in the plaid sandbox for testing purposes.
Params:
* `bank_transfer_id` - Plaid’s unique identifier for a bank transfer.
* `event_type` - The asynchronous event to be simulated. May be: posted, failed, or reversed.
Options:
* `:failure_reason` - The failure reason if the type of this transfer is "failed" or "reversed".
## Examples
Sandbox.simulate_bank_transfer("bt_123xxx", "posted", client_id: "123", secret: "abc")
{:ok, %Plaid.SimpleResponse{}}
"""
@spec simulate_bank_transfer(String.t(), String.t(), options, Plaid.config()) ::
{:ok, Plaid.SimpleResponse.t()} | {:error, Plaid.Error.t()}
when options: %{
optional(:failure_reason) => %{
optional(:ach_return_code) => String.t(),
optional(:description) => String.t()
}
}
def simulate_bank_transfer(bank_transfer_id, event_type, options \\ %{}, config) do
options_payload = Map.take(options, [:failure_reason])
payload =
%{}
|> Map.put(:bank_transfer_id, bank_transfer_id)
|> Map.put(:event_type, event_type)
|> Map.merge(options_payload)
Plaid.Client.call(
"/sandbox/bank_transfer/simulate",
payload,
Plaid.SimpleResponse,
config
)
end
@doc """
Manually fire a Bank Transfer webhook.
Does a `POST /sandbox/bank_transfer/fire_webhook` call to manually trigger
a bank transfer webhook.
Params:
* `webhook` - The URL to which the webhook should be sent.
## Examples
Sandbox.fire_bank_transfer_webhook("https://example.com/webhook", client_id: "123", secret: "abc")
{:ok, %Plaid.SimpleResponse{}}
"""
@spec fire_bank_transfer_webhook(String.t(), Plaid.config()) ::
{:ok, Plaid.SimpleResponse.t()} | {:error, Plaid.Error.t()}
def fire_bank_transfer_webhook(webhook, config) do
Plaid.Client.call(
"/sandbox/bank_transfer/fire_webhook",
%{webhook: webhook},
Plaid.SimpleResponse,
config
)
end
defmodule CreateProcessorTokenResponse do
@moduledoc """
[Plaid API /sandbox/processor_token/create response schema.](https://plaid.com/docs/api/sandbox/#sandboxprocessor_tokencreate)
"""
@behaviour Castable
@type t :: %__MODULE__{
processor_token: String.t(),
request_id: String.t()
}
defstruct [:processor_token, :request_id]
@impl true
def cast(generic_map) do
%__MODULE__{
processor_token: generic_map["processor_token"],
request_id: generic_map["request_id"]
}
end
end
@doc """
Create a valid `processor_token` for an arbitrary institution ID and test credentials.
Does a `POST /sandbox/processor_token/create` call to create a valid `processor_token`
to use with all the processor endpoints in the sandbox.
Params:
* `institution_id` - The ID of the institution the Item will be associated with.
Options:
* `:override_username` - Test username to use for the creation of the Sandbox Item.
* `:override_password` - Test password to use for the creation of the Sandbox Item.
## Examples
Sandbox.create_processor_token("ins_1", client_id: "123", secret: "abc")
{:ok, %Sandbox.CreateProcessorTokenResponse{}}
"""
@spec create_processor_token(String.t(), options, Plaid.config()) ::
{:ok, CreateProcessorTokenResponse.t()} | {:error, Plaid.Error.t()}
when options: %{
optional(:override_username) => String.t(),
optional(:override_password) => String.t()
}
def create_processor_token(institution_id, options \\ %{}, config) do
options_payload = Map.take(options, [:override_username, :override_password])
payload = %{institution_id: institution_id, options: options_payload}
Plaid.Client.call(
"/sandbox/processor_token/create",
payload,
CreateProcessorTokenResponse,
config
)
end
end
|
lib/plaid/sandbox.ex
| 0.862583
| 0.528351
|
sandbox.ex
|
starcoder
|
defmodule Mazes.CircularMaze do
@behaviour Mazes.Maze
alias Mazes.Maze
@doc "Returns a circular maze with given size, either with all walls or no walls"
@impl true
def new(opts) do
radius = Keyword.get(opts, :radius, 10)
first_ring_vertex_count = 8
all_vertices_adjacent? = Keyword.get(opts, :all_vertices_adjacent?, false)
# the adjacency_matrix of a circular maze is like a triangle, starting at the innermost ring
# {1, 1}
# -----------------------------
# {1, 2} <-> {2, 2} <-> (loops back to {1, 2})
# ------------- | -------------
# {1, 3} <-> {2, 3} <-> {3, 3} <-> {3, 4} <-> (loops back to {1, 3})
# ------- | -------- | -------- | --------
adjacency_matrix = %{{1, 1} => %{}}
adjacency_matrix =
do_new(
2,
radius,
adjacency_matrix,
1,
first_ring_vertex_count,
all_vertices_adjacent?
)
%{
radius: radius,
adjacency_matrix: adjacency_matrix,
module: __MODULE__,
from: nil,
to: nil
}
end
defp do_new(ring, max_rings, adjacency_matrix, _, _, _) when ring > max_rings do
adjacency_matrix
end
# adjacency matrix has a triangle shape where first and last vertex in each row are neighbors
defp do_new(
ring,
max_rings,
adjacency_matrix,
previous_ring_vertex_count,
current_ring_vertex_count_growth,
all_vertices_adjacent?
) do
current_ring_vertex_count = previous_ring_vertex_count * current_ring_vertex_count_growth
current_ring_columns = Enum.to_list(1..current_ring_vertex_count)
next_ring_vertex_count_growth = get_next_right_vertex_count_growth(ring + 1)
adjacency_matrix =
[current_ring_vertex_count | current_ring_columns]
|> Enum.chunk_every(3, 1, [1])
|> Enum.reduce(adjacency_matrix, fn [ccw, x, cw], adjacency_matrix_acc ->
current_vertex = {x, ring}
previous_row_neighbor =
{trunc(Float.ceil(x / current_ring_vertex_count_growth)), ring - 1}
adjacency_matrix_acc =
Map.update!(
adjacency_matrix_acc,
previous_row_neighbor,
&Map.put(&1, current_vertex, all_vertices_adjacent?)
)
ccw_neighbor = {ccw, ring}
cw_neighbor = {cw, ring}
neighbors = [ccw_neighbor, cw_neighbor, previous_row_neighbor]
adjacency_map = Enum.map(neighbors, &{&1, all_vertices_adjacent?}) |> Enum.into(%{})
Map.put(adjacency_matrix_acc, current_vertex, adjacency_map)
end)
do_new(
ring + 1,
max_rings,
adjacency_matrix,
current_ring_vertex_count,
next_ring_vertex_count_growth,
all_vertices_adjacent?
)
end
defp get_next_right_vertex_count_growth(ring, next_growth_at_ring \\ 3, delta \\ 2) do
if ring == next_growth_at_ring do
2
else
if ring >= next_growth_at_ring + delta do
get_next_right_vertex_count_growth(ring, next_growth_at_ring * delta, delta)
else
1
end
end
end
@impl true
def center(_) do
{1, 1}
end
# Not part of the behavior, functions needed for drawing the grid
def rings(maze) do
Maze.vertices(maze)
|> Enum.group_by(&elem(&1, 1))
|> Enum.map(fn {ring, vertices} ->
%{ring: ring, column_count: length(vertices), vertices: vertices}
end)
end
def inner(maze, {_, y} = vertex),
do: Enum.find(Maze.neighboring_vertices(maze, vertex), &(elem(&1, 1) == y - 1))
def cw(maze, {x, y} = vertex) do
Enum.find(Maze.neighboring_vertices(maze, vertex), &(&1 == {x + 1, y})) ||
Enum.find(Maze.neighboring_vertices(maze, vertex), &(&1 == {1, y}))
end
end
|
lib/mazes/circular_maze.ex
| 0.849878
| 0.645588
|
circular_maze.ex
|
starcoder
|
defimpl Transmog.Parser, for: List do
@moduledoc """
Implementation of `Transmog.Parser` for lists. Parses lists which are already
considered valid key paths. A list is only invalid if it is empty. You might
want to use lists instead of the string notation if you need to represent
special values that the parser does not currently support.
## Examples
[42, 3.14] #=> References a map or list with key path [42, 3.14]
iex> list = ["credentials", "name.first"]
iex> {:ok, key_path} = Transmog.Parser.parse(list)
iex> key_path
["credentials", "name.first"]
iex> list = []
iex> Transmog.Parser.parse(list)
{:error, :invalid_key_path}
"""
alias Transmog.InvalidKeyPathError
@doc """
`parse/1` parses a list into a key path. A key path is already represented by
a list and therefore this function will return the list as is unless the list
is empty. An emtpy list is not a valid key path.
This is an alternative if you would prefer to pass a key path with special
values like numbers, strings with periods or colors, etc.
## Examples
iex> list = [1, ":a", %{}]
iex> {:ok, key_path} = Transmog.Parser.parse(list)
iex> key_path
[1, ":a", %{}]
"""
@spec parse(list :: list(term)) :: {:ok, list(term)} | {:error, :invalid_key_path}
def parse([]), do: {:error, :invalid_key_path}
def parse(list) when is_list(list), do: {:ok, list}
@doc """
`parse!/1` parses a list into a key path. A key path is already represented by
a list and therefore this function will return the list as is unless the list
is empty. If the list is empty then an error will be raised.
The list will be unwrapped automatically when it is returned.
## Examples
iex> list = [1, nil]
iex> Transmog.Parser.parse!(list)
[1, nil]
iex> list = []
iex> Transmog.Parser.parse!(list)
** (Transmog.InvalidKeyPathError) key path is not valid ([])
"""
@spec parse!(list :: list(term)) :: list(term)
def parse!([]), do: InvalidKeyPathError.new([])
def parse!(list) when is_list(list), do: list
end
|
lib/transmog/parser/list.ex
| 0.874955
| 0.413004
|
list.ex
|
starcoder
|
defmodule InlineSVG do
@moduledoc """
Render inline SVG.
## Initialization
```elixir
def SVGHelper do
use InlineSVG, root: "assets/static/svg", default_collection: "generic"
end
```
This will generate functions for each SVG file, effectively caching them at
compile time.
## Usage
### render SVG from default collection
```elixir
svg("home")
```
It will load the SVG file from `assets/static/svg/generic/home.svg`:
```html
<svg>...</svg>
```
### render SVG from other collections
You can break up SVG files into collections, and use the second argument of
`svg/2` to specify the name of collection:
```elixir
svg("user", "fontawesome")
```
It will load the SVG file from `assets/static/svg/fontawesome/user.svg`:
```html
<svg>...</svg>
```
### render SVG with custom HTML attributes
You can also pass optional HTML attributes into the function to set those
attributes on the SVG:
```elixir
svg("home", class: "logo", id: "bounce-animation")
svg("home", "fontawesome", class: "logo", id: "bounce-animation")
```
It will output:
```html
<svg class="logo" id="bounce-animation">...</svg>
<svg class="logo" id="bounce-animation">...</svg>
```
## Options
There are several configuration options for meeting your needs.
### `:root`
Specify the directory from which to load SVG files.
You must specify it by your own.
### `:function_prefix`
Specify the prefix of functions.
By the default, the value is `""`. The generated function name is `svg`.
If this value is "_". Then generated function name is `_svg`.
### `:default_collection`
Specify the default collection to use.
The deafult value is `generic`.
## Use in Phoenix
An example:
```elixir
def DemoWeb.SVGHelper do
use InlineSVG,
root: "assets/static/svg",
function_prefix: "_",
default_collection: "generic"
def svg(arg1) do
Phoenix.HTML.raw(_svg(arg1))
end
def svg(arg1, arg2) do
Phoenix.HTML.raw(_svg(arg1, arg2))
end
def svg(arg1, arg2, arg3) do
Phoenix.HTML.raw(_svg(arg1, arg2, arg3))
end
end
```
"""
alias InlineSVG.HTML
@doc """
The macro precompiles the SVG files into functions.
"""
defmacro __using__(opts \\ []) do
root = Keyword.fetch!(opts, :root)
{root, _} = Code.eval_quoted(root)
if !File.dir?(root) do
raise "invalid :root option"
end
function_prefix = Keyword.get(opts, :function_prefix, "")
default_collection = Keyword.get(opts, :default_collection, "generic")
[recompile_hooks(root) | generate_svg_fns(root, function_prefix, default_collection)]
end
# Trigger recompile when SVG files change.
# Read more at https://hexdocs.pm/mix/1.13/Mix.Tasks.Compile.Elixir.html
defp recompile_hooks(root) do
quote bind_quoted: [root: root] do
@root root
paths =
@root
|> Path.join("**/*.svg")
|> Path.wildcard()
|> Enum.filter(&File.regular?(&1))
@paths_hash :erlang.md5(paths)
for path <- paths do
@external_resource path
end
def __mix_recompile__?() do
@root
|> Path.join("**/*.svg")
|> Path.wildcard()
|> Enum.filter(&File.regular?(&1))
|> :erlang.md5() != @paths_hash
end
end
end
defp generate_svg_fns(root, function_prefix, default_collection) do
root
|> scan_svgs()
|> Enum.flat_map(&cache_svg(&1, function_prefix, default_collection))
end
defp scan_svgs(root) do
root
|> Path.join("**/*.svg")
|> Path.wildcard()
|> Stream.filter(&File.regular?(&1))
|> Enum.map(fn svg_path ->
[collection_name, svg_name] =
svg_path
|> Path.relative_to(root)
|> Path.rootname()
|> String.split("/", parts: 2)
{collection_name, svg_name, svg_path}
end)
end
defp cache_svg({collection, name, path}, function_prefix, default_collection) do
content = read_svg(path)
# parse HTML at compile time.
parsed_html =
content
|> HTML.parse_html!()
|> Macro.escape()
generic_functions =
if collection == default_collection do
quote do
def unquote(:"#{function_prefix}svg")(unquote(name)) do
unquote(:"#{function_prefix}svg")(unquote(name), unquote(collection), [])
end
def unquote(:"#{function_prefix}svg")(unquote(name), opts) when is_list(opts) do
unquote(:"#{function_prefix}svg")(unquote(name), unquote(collection), opts)
end
end
end
explicit_functions =
quote do
def unquote(:"#{function_prefix}svg")(unquote(name), unquote(collection)) do
unquote(:"#{function_prefix}svg")(unquote(name), unquote(collection), [])
end
def unquote(:"#{function_prefix}svg")(unquote(name), unquote(collection), []) do
unquote(content)
end
def unquote(:"#{function_prefix}svg")(unquote(name), unquote(collection), opts) do
unquote(parsed_html)
|> HTML.insert_attrs(opts)
|> HTML.to_html()
end
end
[generic_functions, explicit_functions]
end
defp read_svg(path) do
path
|> File.read!()
|> String.trim()
end
end
|
lib/inline_svg.ex
| 0.728459
| 0.881666
|
inline_svg.ex
|
starcoder
|
defmodule Sanbase.Clickhouse.HistoricalBalance.EthSpent do
@moduledoc ~s"""
Module providing functions for fetching ethereum spent
"""
alias Sanbase.Clickhouse.HistoricalBalance
alias Sanbase.Clickhouse.HistoricalBalance.EthBalance
@type slug :: String.t()
@type address :: String.t() | list(String.t())
@typedoc ~s"""
An interval represented as string. It has the format of number followed by one of:
ns, ms, s, m, h, d or w - each representing some time unit
"""
@type interval :: String.t()
@type eth_spent_over_time :: %{
datetime: DateTime.t(),
eth_spent: number()
}
@type eth_spent_over_time_result :: {:ok, list(eth_spent_over_time)} | {:error, String.t()}
@doc ~s"""
For a given address or list of addresses returns the ethereum balance change for the
from-to period. The returned lists indicates the address, before balance, after balance
and the balance change.
This is special case of balance_change/4 but as ethereum is used a lot for calculating
ethereum spent this case avoids a call to the database to obtain the contract
"""
@spec eth_balance_change(address, from :: DateTime.t(), to :: DateTime.t()) ::
HistoricalBalance.Behaviour.balance_change_result()
def eth_balance_change(addresses, from, to) do
EthBalance.balance_change(addresses, "ETH", 18, from, to)
end
@doc ~s"""
For a given address or list of addresses returns the ethereum balance change for each bucket
of size `interval` in the from-to time period
"""
@spec eth_balance_change(address, from :: DateTime.t(), to :: DateTime.t(), interval) ::
HistoricalBalance.Behaviour.historical_balance_change_result()
def eth_balance_change(addresses, from, to, interval) do
EthBalance.historical_balance_change(addresses, "ETH", 18, from, to, interval)
end
@doc ~s"""
For a given address or list of addresses calculate the ethereum spent.
Ethereum spent is defined as follows:
- If the combined balance of the addresses at `from` datetime is bigger than
the combined balance at `to` datetime, the eth spent is the absolute value
of the difference between the two balance
- Zero otherwise
"""
@spec eth_spent(address | list(address), DateTime.t(), DateTime.t()) ::
{:ok, number()} | {:error, String.t()}
def eth_spent(addresses, from, to) do
case eth_balance_change(addresses, from, to) do
{:ok, balance_changes} ->
eth_spent =
balance_changes
|> Enum.map(fn {_, {_, _, change}} -> change end)
|> Enum.sum()
|> case do
change when change < 0 -> abs(change)
_ -> 0
end
{:ok, eth_spent}
{:error, error} ->
{:error, error}
end
end
@doc ~s"""
For a given address or list of addresses calculate the ethereum spent.
Ethereum spent is defined as follows:
- If the combined balance of the addresses decreases compared to the previous
time bucket, the absolute value of the change is the ethereum spent
- If the combined balance of the addresses increases compared to the previous
time bucket, the ethereum spent is equal to 0
"""
@spec eth_spent_over_time(address | list(address), DateTime.t(), DateTime.t(), interval) ::
eth_spent_over_time_result()
def eth_spent_over_time(addresses, from, to, interval)
when is_binary(addresses) or is_list(addresses) do
case eth_balance_change(addresses, from, to, interval) do
{:ok, balance_changes} ->
eth_spent_over_time =
balance_changes
|> Enum.map(fn
%{balance_change: change, datetime: dt} when change < 0 ->
%{datetime: dt, eth_spent: abs(change)}
%{datetime: dt} ->
%{datetime: dt, eth_spent: 0}
end)
{:ok, eth_spent_over_time}
{:error, error} ->
{:error, error}
end
end
end
|
lib/sanbase/clickhouse/historical_balance/eth_spent.ex
| 0.885706
| 0.557454
|
eth_spent.ex
|
starcoder
|
defmodule Cldr.List do
@moduledoc """
Cldr module to formats lists.
If we have a list of days like `["Monday", "Tuesday", "Wednesday"]`
then we can format that list for a given locale by:
iex> Cldr.List.to_string(["Monday", "Tuesday", "Wednesday"], MyApp.Cldr, locale: "en")
{:ok, "Monday, Tuesday, and Wednesday"}
"""
@type pattern_type :: :or | :or_narrow | :or_short | :standard | :standard_narrow |
:standard_short | :unit | :unit_narrow | :unit_short
@doc """
Formats a list into a string according to the list pattern rules for a locale.
## Arguments
* `list` is any list of of terms that can be passed through `Kernel.to_string/1`
* `options` is a keyword list
## Options
* `:locale` is any configured locale. See `Cldr.known_locales()`. The default
is `locale: Cldr.get_locale/1`
* `:format` is one of those returned by
`Cldr.List.known_list_formats/0`. The default is `format: :standard`
## Examples
iex> Cldr.List.to_string(["a", "b", "c"], MyApp.Cldr, locale: "en")
{:ok, "a, b, and c"}
iex> Cldr.List.to_string(["a", "b", "c"], MyApp.Cldr, locale: "en", format: :unit_narrow)
{:ok, "a b c"}
iex> Cldr.List.to_string(["a", "b", "c"], MyApp.Cldr, locale: "fr")
{:ok, "a, b et c"}
iex> Cldr.List.to_string([1,2,3,4,5,6], MyApp.Cldr)
{:ok, "1, 2, 3, 4, 5, and 6"}
iex> Cldr.List.to_string(["a"], MyApp.Cldr)
{:ok, "a"}
iex> Cldr.List.to_string([1,2], MyApp.Cldr)
{:ok, "1 and 2"}
"""
@spec to_string([term(), ...], Cldr.backend() | Keyword.t(), Keyword.t()) ::
{:ok, String.t()} | {:error, {atom, binary}}
def to_string(list, backend \\ default_backend(), options \\ [])
def to_string(list, options, []) when is_list(options) do
{backend, options} = Keyword.pop(options, :backend, default_backend())
to_string(list, backend, options)
end
def to_string(list, backend, options) do
module = Module.concat(backend, List)
module.to_string(list, options)
end
@doc """
Formats a list using `to_string/2` but raises if there is
an error.
## Examples
iex> Cldr.List.to_string!(["a", "b", "c"], MyApp.Cldr, locale: "en")
"a, b, and c"
iex> Cldr.List.to_string!(["a", "b", "c"], MyApp.Cldr, locale: "en", format: :unit_narrow)
"a b c"
"""
@spec to_string!([term(), ...], Cldr.backend() | Keyword.t(), Keyword.t()) :: String.t() | no_return()
def to_string!(list, backend \\ default_backend(), options \\ [])
def to_string!(list, options, []) when is_list(options) do
{_locale, backend} = Cldr.locale_and_backend_from(options)
to_string!(list, backend, options)
end
def to_string!(list, backend, options) do
{_locale, backend} = Cldr.locale_and_backend_from(options[:locale], backend)
module = Module.concat(backend, List)
module.to_string!(list, options)
end
@doc """
Intersperces a list elements into a list format according to the list
pattern rules for a locale.
This function can be helpful when creating a list from `Phoenix`
safe strings which are of the format `{:safe, "some string"}`
## Arguments
* `list` is any list of of terms
* `options` is a keyword list
## Options
* `:locale` is any configured locale. See `Cldr.known_locale_names/1`. The default
is `locale: Cldr.get_locale/1`
* `:format` is atom returned by
`Cldr.List.known_list_formats/0`. The default is `:standard`
## Examples
iex> Cldr.List.intersperse(["a", "b", "c"], MyApp.Cldr, locale: "en")
{:ok, ["a", ", ", "b", ", and ", "c"]}
iex> Cldr.List.intersperse(["a", "b", "c"], MyApp.Cldr, locale: "en", format: :unit_narrow)
{:ok, ["a", " ", "b", " ", "c"]}
iex> Cldr.List.intersperse(["a", "b", "c"], MyApp.Cldr, locale: "fr")
{:ok, ["a", ", ", "b", " et ", "c"]}
iex> Cldr.List.intersperse([1,2,3,4,5,6], MyApp.Cldr)
{:ok, [1, ", ", 2, ", ", 3, ", ", 4, ", ", 5, ", and ", 6]}
iex> Cldr.List.intersperse(["a"], MyApp.Cldr)
{:ok, ["a"]}
iex> Cldr.List.intersperse([1,2], MyApp.Cldr)
{:ok, [1, " and ", 2]}
"""
@spec intersperse(list(term()), Cldr.backend(), Keyword.t()) ::
{:ok, list(String.t())} | {:error, {module(), String.t()}}
def intersperse(list, backend \\ nil, options \\ [])
def intersperse(list, options, []) when is_list(options) do
{_locale, backend} = Cldr.locale_and_backend_from(options)
module = Module.concat(backend, List)
module.intersperse(list, options)
end
def intersperse(list, backend, options) do
{_locale, backend} = Cldr.locale_and_backend_from(options[:locale], backend)
module = Module.concat(backend, List)
module.intersperse(list, options)
end
@doc """
Formats a list using `intersperse/2` but raises if there is
an error.
## Examples
iex> Cldr.List.intersperse!(["a", "b", "c"], MyApp.Cldr, locale: "en")
["a", ", ", "b", ", and ", "c"]
iex> Cldr.List.intersperse!(["a", "b", "c"], MyApp.Cldr, locale: "en", format: :unit_narrow)
["a", " ", "b", " ", "c"]
"""
@spec intersperse!(list(term()), Cldr.backend(), Keyword.t()) :: list(String.t()) | no_return()
def intersperse!(list, backend \\ nil, options \\ []) do
{_locale, backend} = Cldr.locale_and_backend_from(options[:locale], backend)
module = Module.concat(backend, List)
module.intersperse!(list, options)
end
@doc """
Returns the list patterns for a locale.
List patterns provide rules for combining multiple
items into a language format appropriate for a locale.
## Example
iex> Cldr.List.list_patterns_for "en", MyApp.Cldr
%{
or: %{
2 => [0, " or ", 1],
end: [0, ", or ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
},
or_narrow: %{
2 => [0, " or ", 1],
end: [0, ", or ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
},
or_short: %{
2 => [0, " or ", 1],
end: [0, ", or ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
},
standard: %{
2 => [0, " and ", 1],
end: [0, ", and ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
},
standard_narrow: %{
2 => [0, ", ", 1],
end: [0, ", ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
},
standard_short: %{
2 => [0, " & ", 1],
end: [0, ", & ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
},
unit: %{
2 => [0, ", ", 1],
end: [0, ", ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
},
unit_narrow: %{
2 => [0, " ", 1],
end: [0, " ", 1],
middle: [0, " ", 1],
start: [0, " ", 1]
},
unit_short: %{
2 => [0, ", ", 1],
end: [0, ", ", 1],
middle: [0, ", ", 1],
start: [0, ", ", 1]
}
}
"""
def list_patterns_for(locale, backend \\ default_backend()) do
{locale, backend} = Cldr.locale_and_backend_from(locale, backend)
module = Module.concat(backend, List)
module.list_patterns_for(locale)
end
@doc """
Returns the formats of list patterns available for a locale.
Returns a list of `atom`s of of the list formats that are
available in CLDR for a locale.
## Example
iex> Cldr.List.list_formats_for("en", MyApp.Cldr)
[:or, :or_narrow, :or_short, :standard, :standard_narrow,
:standard_short, :unit, :unit_narrow, :unit_short]
"""
def list_formats_for(locale, backend \\ nil) do
{locale, backend} = Cldr.locale_and_backend_from(locale, backend)
module = Module.concat(backend, List)
module.list_formats_for(locale)
end
@deprecated "Use Cldr.List.list_formats_for/2"
defdelegate list_styles_for(locale, backend), to: __MODULE__, as: :list_formats_for
# TODO Remove at version 3.0
@doc false
defdelegate list_pattern_styles_for(locale, backend), to: __MODULE__, as: :list_formats_for
@doc """
Return the list of known list formats.
## Example
iex> Cldr.List.known_list_formats()
[:or, :or_narrow, :or_short, :standard, :standard_narrow,
:standard_short, :unit, :unit_narrow, :unit_short]
"""
@root_locale Cldr.Config.root_locale_name()
@config %Cldr.Config{locales: [@root_locale]}
@known_list_formats Cldr.Locale.Loader.get_locale(@root_locale, @config)
|> Map.get(:list_formats) |> Map.keys
def known_list_formats do
@known_list_formats
end
@deprecated "Use Cldr.List.known_list_formats/0"
def known_list_styles do
known_list_formats()
end
@doc false
# TODO remove for Cldr 3.0
if Code.ensure_loaded?(Cldr) && function_exported?(Cldr, :default_backend!, 0) do
def default_backend do
Cldr.default_backend!()
end
else
def default_backend do
Cldr.default_backend()
end
end
end
|
lib/cldr/list.ex
| 0.915858
| 0.5083
|
list.ex
|
starcoder
|
defmodule Mix.TaskHelpers.Strings do
@doc """
Downcase a string
"""
def lowercase(value), do: String.downcase(value)
@doc """
Sentence case a string
"""
def sentencecase(value), do: String.capitalize(value)
@doc """
Upcase a string
"""
def uppercase(value), do: String.upcase(value)
@doc """
Pascal Case a string
"""
def pascalcase(word, option \\ :upper) do
case Regex.split(~r/(?:^|[-_])|(?=[A-Z])/, to_string(word)) do
words ->
words
|> Enum.filter(&(&1 != ""))
|> camelize_list(option)
|> Enum.join()
end
end
defp camelize_list([], _), do: []
defp camelize_list([h | tail], :lower) do
[String.downcase(h)] ++ camelize_list(tail, :upper)
end
defp camelize_list([h | tail], :upper) do
[String.capitalize(h)] ++ camelize_list(tail, :upper)
end
@doc """
Returns a snakecase string. Example:
Input: "HelloWorld"
Ouput: "hello_world"
"""
def snakecase(value) when is_bitstring(value) do
Macro.underscore(value)
end
@doc """
Returns a dashcase string. Example:
Input: "HelloWorld"
Ouput: "hello-world"
"""
def dashcase(value) do
value
|> snakecase()
|> String.replace("_", "-")
end
@doc """
Returns a spacecase string. Example:
Input: "HelloWorld"
Ouput: "hello world"
"""
def spacecase(value) do
value
|> snakecase()
|> String.replace("_", " ")
end
@doc """
Module case a string
"""
def modulecase(value) do
value
|> String.downcase()
|> String.replace("_", " ")
|> String.replace("-", " ")
|> String.split(" ")
|> Enum.map(&String.capitalize/1)
|> Enum.join("")
end
@doc """
Returns true if value is a single word
"""
def single_word?(value) do
count =
value
|> snakecase()
|> String.split("_")
|> length()
count == 1
end
@doc """
Returns true if value contains multiple words
"""
def multi_word?(value) do
!single_word?(value)
end
end
|
apps/artemis/lib/mix/task_helpers/strings.ex
| 0.775902
| 0.46478
|
strings.ex
|
starcoder
|
defmodule Epicenter.Test.SchemaAssertions do
import ExUnit.Assertions
alias Epicenter.Test.SchemaAssertions.Database
alias Epicenter.Test.SchemaAssertions.Schema
@doc """
Assert that the given schema module exists, it has a corresponding table, and its fields are correct.
See `assert_schema_fields/2` for details about field assertion.
"""
def assert_schema(schema_module, field_tuples) when is_list(field_tuples) do
assert_schema_module_exists(schema_module, field_tuples)
assert_table_exists(schema_module, field_tuples)
assert_schema_fields(schema_module, field_tuples)
assert_schema_source_matches_table_name(schema_module)
end
def assert_schema_module_exists(schema_module, field_tuples) do
field_list = fields_for_generator(field_tuples)
table_name = Schema.table_name(schema_module)
if not Schema.module_exists?(schema_module) do
"""
Expected schema module “#{Schema.module_name(schema_module)}” to exist, but it doesn’t.
A possible remedy is to run this on the command line:
mix phx.gen.schema --binary-id #{Schema.module_name(schema_module, :drop_prefix)} #{table_name} #{field_list}
"""
|> flunk()
end
end
defp fields_for_generator(field_tuples) do
field_tuples
|> Schema.reject_autogenerated_fields()
|> colon_separated()
end
def assert_table_exists(schema_module, field_tuples) do
table_name = Schema.table_name(schema_module)
fields_for_migration =
field_tuples
|> Schema.reject_autogenerated_fields()
|> Enum.map(&"add #{inspect_contents(&1)}")
if not Schema.table_exists?(schema_module) do
"""
Expected database table “#{table_name}” to exist, but it doesn’t.
1. You could create a new migration with this mix task:
mix ecto.gen.migration create_#{table_name}
2. Your migration could look like this:
def change() do
create table(:#{table_name}) do
#{fields_for_migration |> indented_list(7)}
timestamps()
end
end
"""
|> flunk()
end
end
def assert_schema_source_matches_table_name(schema_module) do
source = Schema.source(schema_module)
table_name = Schema.table_name(schema_module)
if source != table_name do
"""
Expected schema source “#{source}” to match table name ”#{table_name}”, but it doesn’t.
You can modify your schema to use the correct table name like this:
defmodule #{schema_module} do
schema "#{table_name}" do
...
end
end
"""
|> flunk()
end
end
@doc """
Assert that a schema and its corresponding database table have the correct fields.
Currently the error message is pretty generic, but there is probably enough information to provide the exact steps
for fixing any problems.
## Example
assert_schema_fields(Person, [{:id, :id}, {:first_name, :string}, {:age, :integer}])
Fields are tuples to allow for asserting on extra metadata in the future, like:
`[{:first_name, :string, :required}, ...]`
"""
def assert_schema_fields(schema_module, field_tuples) when is_list(field_tuples) do
table_name = Schema.table_name(schema_module)
assertion_fields = field_tuples
database_fields = schema_module |> Schema.table_name() |> Database.fields()
schema_fields = schema_module |> Schema.fields_with_types()
all_field_names =
for field_set <- [assertion_fields, database_fields, schema_fields],
field <- field_set,
uniq: true,
do: field |> elem(0)
table_rows =
for field_name <- all_field_names |> Enum.sort() do
[
field_name,
assertion_fields |> field_metadata(field_name) |> inspect(),
database_fields |> field_metadata(field_name) |> inspect(),
schema_fields |> field_metadata(field_name) |> inspect()
]
end
if table_rows |> List.flatten() |> Enum.any?(&(&1 == nil)) do
table =
TableRex.Table.new(table_rows, ["", "ASSERTION", "DATABASE", "SCHEMA"])
|> TableRex.Table.render!(horizontal_style: :off, vertical_style: :off)
"""
Mismatch between asserted fields, fields in database, and fields in schema:
#{table}
1. To add to or remove from the assertion, edit the test.
2. To add to or remove from the database:
a. Create a migration with one of:
* mix ecto.gen.migration add_column1_column2_to_#{table_name}
* mix ecto.gen.migration remove_column1_column_2_from_#{table_name}
b. In the newly-generated migration, modify the change function:
def change() do
alter table(:#{table_name}) do
add :column_name, :type # [, options]
remove :column_name, :type # [, options]
end
end
3. To add to or remove from the schema, edit the “#{inspect(schema_module)}” schema.
"""
|> flunk()
end
for field_name <- all_field_names |> Enum.sort() do
assertion_type = assertion_fields |> field_metadata(field_name)
database_type = database_fields |> field_metadata(field_name)
schema_type = schema_fields |> field_metadata(field_name)
with {:error, message} <- match_types(assertion_type, database_type, schema_type) do
flunk("""
Schema type mismatch!
field : #{field_name}
message: #{message}
Assertion type : #{inspect(assertion_type)}
Database type : #{inspect(database_type)}
Elixir schema type: #{inspect(schema_type)}
""")
end
end
end
@datetime_error_message "You should use use a schema type of :utc_datetime when persisting timestamps without a timezone"
defp match_types(:boolean, %{data_type: "boolean"}, :boolean), do: :ok
defp match_types(:binary_id, %{data_type: "uuid"}, :binary_id), do: :ok
defp match_types(:string, %{data_type: "text"}, :string), do: :ok
defp match_types(:string, %{data_type: "USER-DEFINED", udt_name: "citext"}, :string), do: :ok
defp match_types(:naive_datetime, _, _), do: {:error, @datetime_error_message}
defp match_types(_, _, :naive_datetime), do: {:error, @datetime_error_message}
defp match_types(:utc_datetime, %{data_type: "timestamp without time zone"}, :utc_datetime), do: :ok
defp match_types(:date, %{data_type: "date"}, :date), do: :ok
defp match_types(:string, %{data_type: "character varying"}, :string), do: {:error, "You should use a text postgres type for string data"}
defp match_types(:integer, %{data_type: "bigint"}, :integer), do: :ok
defp match_types(:bigserial, %{data_type: "bigint"}, :integer), do: :ok
defp match_types({:array, :string}, %{data_type: "ARRAY", element_data_type: "text"}, {:array, :string}), do: :ok
defp match_types(:map, %{data_type: "jsonb"}, {:parameterized, Ecto.Embedded, _}), do: :ok
defp match_types(:map, %{data_type: "jsonb"}, :map), do: :ok
defp match_types(_assertion_type, _database_type, _schema_type), do: {:error, "These types did not match"}
defp colon_separated(tuples),
do:
tuples
|> Enum.map(fn
{k, {v1, v2}} -> "#{k}:#{v1}:#{v2}"
{k, v} -> "#{k}:#{v}"
end)
|> Enum.join(" ")
defp field_metadata(field_list, field_name) do
Enum.find_value(field_list, fn field ->
if elem(field, 0) == field_name,
do: elem(field, 1),
else: nil
end)
end
defp indent(string, indent_size),
do: String.duplicate(" ", indent_size) <> string
defp indented_list(list, indent_size, trailing_character \\ ""),
do: list |> Enum.map(&indent(&1, indent_size)) |> Enum.join(trailing_character <> "\n")
defp inspect_contents(tuple) when is_tuple(tuple) do
tuple |> Tuple.to_list() |> Enum.map(&inspect/1) |> Enum.join(", ")
end
defmodule Database do
alias Epicenter.Repo
def fields(table_name) do
for [name, data_type, udt_name, element_data_type] <- field_query(table_name),
do: {Euclid.Extra.Atom.from_string(name), %{data_type: data_type, udt_name: udt_name, element_data_type: element_data_type}}
end
defp field_query(table_name),
do:
query(
"""
SELECT information_schema.columns.column_name, information_schema.columns.data_type, information_schema.columns.udt_name, information_schema.element_types.data_type
FROM information_schema.columns
LEFT JOIN information_schema.element_types
ON information_schema.columns.dtd_identifier=information_schema.element_types.collection_type_identifier
AND information_schema.element_types.object_name = $1
AND information_schema.element_types.object_type = 'TABLE'
WHERE information_schema.columns.table_name = $1
""",
[table_name]
)
def table_names(),
do: "select table_name from information_schema.tables where table_schema = 'public'" |> query() |> List.flatten()
def has_table?(table_name),
do: table_name in table_names()
def query(string, args \\ []),
do: Repo.query!(string, args).rows
end
defmodule Schema do
def field_type(module, field),
do: module.__schema__(:type, field)
def fields(module),
do: module.__schema__(:fields) |> Enum.sort()
def fields_with_types(module),
do: for(field <- fields(module), do: {field, field_type(module, field)})
def module_exists?(module),
do: function_exported?(module, :__info__, 1)
def module_name(module),
do: inspect(module)
def module_name(module, :drop_prefix),
do: module |> module_name() |> String.split(".") |> Enum.reject(&(&1 in ~w{Epicenter EpicenterWeb})) |> Enum.join(".")
def reject_autogenerated_fields(fields),
do: fields |> Enum.reject(fn field -> elem(field, 0) in [:id, :inserted_at, :updated_at] end)
def source(module), do: module.__schema__(:source)
def table_exists?(module),
do: module |> table_name() |> Database.has_table?()
def table_name(module),
do: module |> module_name() |> String.split(".") |> List.last() |> Inflex.underscore() |> Inflex.pluralize()
end
end
|
test/support/schema_assertions.ex
| 0.775477
| 0.654674
|
schema_assertions.ex
|
starcoder
|
defmodule Saucexages.IO.FileWriter do
@moduledoc """
Functions for writing and maintaining SAUCE files.
This writer is primarily for working with larger files. You may elect to use `Saucexages.IO.BinaryWriter` if you want a more flexible writer, provided that your files are small and fit in memory.
The general approach this writer takes is to be pragmatic about what is and is not a SAUCE and where according to the SAUCE spec data should be located. The general strategy of this writer is to avoid costly seeks and rewrites of very large files. Instead, the writer generally tries to perform most operations from a SAUCE-centric point-of-view. That is, most operations are focused on scanning backwards through a file in a deterministic, constant way.
A good example of pragmatic behavior is how this writer deals with EOF characters. For example, according to the SAUCE spec, a SAUCE should always be written after an EOF character. This does not mean that the SAUCE will be immediately after the EOF. To avoid scanning entire large files, we merely check for the presence of an EOF character relative to the SAUCE block. If an EOF character is not before the SAUCE block, we insert one. This can result in an extra byte written, but with the benefit that seeking through the whole file is no longer necessary.
"""
require Saucexages.Sauce
alias Saucexages.{Sauce, SauceBlock}
alias Saucexages.IO.{SauceBinary, SauceFile}
alias Saucexages.Codec.{Encoder}
@doc """
Writes the given SAUCE info to the file at the given `path`.
"""
@spec write(Path.t(), SauceBlock.t()) :: :ok
def write(path, sauce_block) do
case File.open(path, [:binary, :write, :read], fn (io_device) -> do_write(io_device, sauce_block) end) do
{:ok, sauce_response} -> sauce_response
err -> err
end
end
defp do_write(fd, sauce_block) do
with {:ok, encoded_sauce_bin} <- Encoder.encode_record(sauce_block),
{:ok, encoded_comments_bin} <- Encoder.encode_comments(sauce_block),
{:ok, contents_size} <- SauceFile.contents_size(fd),
{:ok, _write_position} <- :file.position(fd, contents_size),
{:ok, eof_prefix?} <- eof_prefixed?(fd) do
item = if eof_prefix? do
[encoded_comments_bin, encoded_sauce_bin]
else
[<<Sauce.eof_character>>, encoded_comments_bin, encoded_sauce_bin]
end
# truncate the file in case there is any randomness after the point where we want to write the SAUCE or an old SAUCE
:file.truncate(fd)
IO.binwrite(fd, item)
end
end
@doc """
Removes any comments, if present from a SAUCE and rewrites the SAUCE accordingly.
Can be used to remove a SAUCE comments block or to clean erroneous comment information such as mismatched comment lines or double comment blocks.
"""
@spec remove_comments(Path.t()) :: :ok | {:error, term()}
def remove_comments(path) do
case File.open(path, [:binary, :write, :read], &do_remove_comments/1) do
{:ok, sauce_response} -> sauce_response
err -> err
end
end
defp do_remove_comments(fd) do
with :ok <- sauce_seekable(fd),
{:ok, _sauce_offset} = :file.position(fd, {:eof, -Sauce.sauce_record_byte_size()}),
{:ok, sauce_record_bin} = :file.read(fd, Sauce.sauce_record_byte_size()),
:ok <- SauceBinary.verify_sauce_record(sauce_record_bin),
{:ok, comment_lines} <- SauceBinary.comment_lines(sauce_record_bin) do
maybe_truncate_comments(fd, sauce_record_bin, comment_lines)
else
{:error, :no_sauce} ->
:ok
{:error, _reason} = err ->
err
err -> {:error, {"Error reading contents.", err}}
end
end
@doc """
Removes a SAUCE record from a file.
Both the SAUCE record and comments block will be removed.
"""
@spec remove_sauce(Path.t()) :: :ok | {:error, term()}
def remove_sauce(path) when is_binary(path) do
case File.open(path, [:binary, :write, :read], &do_remove_sauce/1) do
{:ok, sauce_response} -> sauce_response
err -> err
end
end
defp do_remove_sauce(fd) do
with {:ok, file_size} <- :file.position(fd, :eof),
true <- file_size >= Sauce.sauce_record_byte_size(),
{:ok, contents_size} <- SauceFile.contents_size(fd) do
maybe_truncate(fd, file_size, contents_size)
else
false -> {:error, :no_sauce}
err -> err
end
end
defp write_encoded(fd, encoded_sauce_bin, encoded_comments_bin, position) do
with {:ok, _write_position} <- :file.position(fd, position),
{:ok, eof_prefix?} <- eof_prefixed?(fd) do
item = if eof_prefix? do
[encoded_sauce_bin, encoded_comments_bin, encoded_comments_bin]
else
[<<Sauce.eof_character>>, encoded_comments_bin, encoded_sauce_bin]
end
IO.binwrite(fd, item)
end
end
defp eof_prefixed?(fd) do
case :file.position(fd, :cur) do
{:ok, 0} -> {:ok, false}
{:ok, _pos} -> cursor_eof_prefixed?(fd)
{:error, _reason} = err -> err
end
end
defp cursor_eof_prefixed?(fd) do
with {:ok, _pos} <- :file.position(fd, {:cur, -1}),
{:ok, previous_bin} <- :file.read(fd, 1) do
{:ok, previous_bin == <<Sauce.eof_character()>>}
else
{:error, _reason} = err -> err
_ -> {:error, "Unable to check EOF prefix."}
end
end
defp maybe_truncate_comments(fd, sauce_record_bin, comment_lines) when comment_lines > 0 do
with {:ok, file_size} <- :file.position(fd, :eof),
{:ok, updated_sauce_bin} <- reset_sauce_comments(sauce_record_bin),
comment_block_offset = Sauce.sauce_byte_size(comment_lines),
comment_block_size = Sauce.comment_block_byte_size(comment_lines),
{:ok, comments_offset} = :file.position(fd, {:eof, -comment_block_offset}),
{:ok, comments_bin} <- :file.read(fd, comment_block_size) do
# TODO: refactor - this is extremely yuck since we need a lot of sanity checks, branches, and multiple writes (truncate + write).
# Alternative approaches:
# 1. Copy the file, make the changes, and swap the new file
# 2. Rewrite the file byte by byte by reading until the comments/sauce position, and writing the new SAUCE
# 3. Do all of this as is, but exclusive which isn't guaranteed for some file systems
# 4. Write new SAUCE over old one, starting at where comments may or may not be. This leaves the file in an invalid state though until we finish by truncating, vs. the existing approach truncates first, leaving the file valid if something blows up before the update is written.
if SauceBinary.matches_comment_block?(comments_bin) do
case maybe_truncate(fd, file_size, comments_offset) do
:ok -> write_encoded(fd, updated_sauce_bin, <<>>, comments_offset)
{:error, _reason} = err -> err
end
else
write_encoded(fd, updated_sauce_bin, <<>>, {:eof, -Sauce.sauce_record_byte_size()})
end
else
err -> err
end
end
defp maybe_truncate_comments(_fd, _sauce_record_bin, _comment_lines) do
:ok
end
defp reset_sauce_comments(sauce_bin) when is_binary(sauce_bin) do
encoded_comment_lines = Encoder.encode_integer(0, Sauce.field_size(:comment_lines))
SauceBinary.write_field(sauce_bin, :comment_lines, encoded_comment_lines)
end
defp maybe_truncate(fd, file_size, contents_size) when file_size > contents_size do
with {:ok, _pos} <- :file.position(fd, contents_size) do
:file.truncate(fd)
end
end
defp maybe_truncate(_fd, file_size, contents_size) when file_size == contents_size do
{:ok, file_size}
end
defp sauce_seekable(fd) do
with {:ok, file_size} <- :file.position(fd, :eof),
true <- file_size >= Sauce.sauce_record_byte_size() do
:ok
else
false -> {:error, :no_sauce}
{:error, _reason} = err -> err
end
end
end
|
lib/saucexages/io/file_writer.ex
| 0.759047
| 0.519399
|
file_writer.ex
|
starcoder
|
defmodule StarkInfra.PixStatement do
alias __MODULE__, as: PixStatement
alias StarkInfra.Utils.Rest
alias StarkInfra.Utils.Check
alias StarkInfra.User.Project
alias StarkInfra.User.Organization
alias StarkInfra.Error
@moduledoc """
Groups PixStatement related functions
"""
@doc """
The PixStatement struct stores information about all the transactions that
happened on a specific day at your settlment account according to the Central Bank.
It must be created by the user before it can be accessed.
This feature is only available for direct participants.
When you initialize a PixStatement, the entity will not be automatically
created in the Stark Infra API. The 'create' function sends the structs
to the Stark Infra API and returns the created struct.
## Parameters (required):
- `:after` [Date]: transactions that happened at this date are stored in the PixStatement, must be the same as before. ex: ~D[2020, 3, 10]
- `:before` [Date]: transactions that happened at this date are stored in the PixStatement, must be the same as after. ex: ~D[2020, 3, 10]
- `:type` [string]: types of entities to include in statement. Options: ["interchange", "interchangeTotal", "transaction"]
## Attributes (return-only):
- `:id` [string]: unique id returned when the PixStatement is created. ex: "5656565656565656"
- `:status` [string]: current PixStatement status. ex: ["success", "failed"]
- `:transaction_count` [integer]: number of transactions that happened during the day that the PixStatement was requested. ex: 11
- `:created` [DateTime]: creation datetime for the PixStatement. ex: ~U[2020-03-10 10:30:0:0]
- `:updated` [DateTime]: latest update datetime for the PixStatement. ex: ~U[2020-03-10 10:30:0:0]
"""
@enforce_keys [
:after,
:before,
:type
]
defstruct [
:after,
:before,
:type,
:id,
:status,
:transaction_count,
:created,
:updated
]
@type t() :: %__MODULE__{}
@doc """
Create a PixStatement linked to your Workspace in the Stark Infra API
## Options:
- `:statement` [PixStatement struct]: PixStatement struct to be created in the API.
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- PixStatement struct with updated attributes.
"""
@spec create(
PixStatement.t() | map(),
user: Project.t() | Organization.t() | nil
) ::
{:ok, PixStatement.t()} |
{:error, [error: Error.t()]}
def create(keys, options \\ []) do
Rest.post_single(
resource(),
keys,
options
)
end
@doc """
Same as create(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec create!(
PixStatement.t() | map(),
user: Project.t() | Organization.t() | nil
) ::
{:ok, PixStatement.t()} |
{:error, [error: Error.t()]}
def create!(keys, options \\ []) do
Rest.post_single!(
resource(),
keys,
options
)
end
@doc """
Retrieve the PixStatement struct linked to your Workspace in the Stark Infra API by its id.
## Parameters (required):
- `:id` [string]: struct unique id. ex: "5656565656565656"
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- PixStatement struct that corresponds to the given id.
"""
@spec get(
id: binary,
user: Project.t() | Organization.t() | nil
) ::
{:ok, PixStatement.t()} |
{:error, [error: Error.t()]}
def get(id, options \\ []) do
Rest.get_id(resource(), id, options)
end
@doc """
Same as get(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec get!(
id: binary,
user: Project.t() | Organization.t() | nil
) ::
{:ok, PixStatement.t()} |
{:error, [error: Error.t()]}
def get!(id, options \\ []) do
Rest.get_id!(resource(), id, options)
end
@doc """
Receive a stream of PixStatements structs previously created in the Stark Infra API
## Options:
- `:limit` [integer, default nil]: maximum number of structs to be retrieved. Unlimited if nil. ex: 35
- `:ids` [list of strings, default nil]: list of ids to filter retrieved structs. ex: ["5656565656565656", "4545454545454545"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- stream of PixStatement structs with updated attributes
"""
@spec query(
limit: integer,
ids: [binary],
user: Project.t() | Organization.t() | nil
) ::
({:cont, [PixStatement.t()]} |
{:error, [Error.t()]})
def query(options \\ []) do
Rest.get_list(resource(), options)
end
@doc """
Same as query(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec query!(
limit: integer,
ids: [binary],
user: Project.t() | Organization.t() | nil
) ::
({:cont, [PixStatement.t()]} |
{:error, [Error.t()]})
def query!(options \\ []) do
Rest.get_list!(resource(), options)
end
@doc """
Receive a list of up to 100 PixStatements structs previously created in the Stark Infra API
## Options:
- `:cursor` [string, default nil]: cursor returned on the previous page function call
- `:limit` [integer, default 100]: maximum number of structs to be retrieved. Max = 100. ex: 35
- `:ids` [list of strings, default nil]: list of ids to filter retrieved structs. ex: ["5656565656565656", "4545454545454545"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- list of PixStatement structs with updated attributes
- cursor to retrieve the next page of PixStatement structs
"""
@spec page(
cursor: binary,
limit: integer,
ids: [binary],
user: Project.t() | Organization.t() | nil
) ::
({:cont, [PixStatement.t()]} |
{:error, [Error.t()]})
def page(options \\ []) do
Rest.get_page(resource(), options)
end
@doc """
Same as page(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec page!(
cursor: binary,
limit: integer,
ids: [binary],
user: Project.t() | Organization.t() | nil
) ::
({:cont, [PixStatement.t()]} |
{:error, [Error.t()]})
def page!(options \\ []) do
Rest.get_page!(resource(), options)
end
@doc """
Retrieve a specific PixStatement by its ID in a .csv file.
## Parameters (required):
- `:id` [string]: struct unique id. ex: "5656565656565656"
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- .zip file containing a PixStatement in .csv format
"""
@spec csv(
id: binary,
user: Project.t() | Organization.t() | nil
) ::
{:ok, binary} |
{:error, [error: Error.t()]}
def csv(id, options \\ []) do
Rest.get_content(resource(), id, "csv", options, options[:user])
end
@doc """
Same as csv(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec csv!(
id: binary,
user: Project.t() | Organization.t() | nil
) ::
{:ok, binary} |
{:error, [error: Error.t()]}
def csv!(id, options \\ []) do
Rest.get_content!(resource(), id, "csv", options, options[:user])
end
@doc false
def resource() do
{
"PixStatement",
&resource_maker/1
}
end
@doc false
def resource_maker(json) do
%PixStatement{
after: json[:after],
before: json[:before],
type: json[:type],
id: json[:id],
status: json[:status],
transaction_count: json[:transaction_count],
created: json[:created] |> Check.datetime(),
updated: json[:updated] |> Check.datetime()
}
end
end
|
lib/pix_statement/pix_statement.ex
| 0.90214
| 0.724468
|
pix_statement.ex
|
starcoder
|
defmodule ElasticsearchElixirBulkProcessor.Helpers.Events do
@doc ~S"""
Return the size of the string in bytes
## Examples
iex> ElasticsearchElixirBulkProcessor.Helpers.Events.byte_sum(["abcd", "a", "b"])
6
iex> ElasticsearchElixirBulkProcessor.Helpers.Events.byte_sum([])
0
"""
def byte_sum([]),
do: 0
def byte_sum(string_list) when is_list(string_list),
do: Stream.map(string_list, &byte_size/1) |> Enum.sum()
@doc ~S"""
Split list of strings into first chunk of given byte size and rest of the list.
## Examples
iex> ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
...> |> ElasticsearchElixirBulkProcessor.Helpers.Events.split_first_bytes(3)
{["0", "1", "2"], ["3", "4", "5", "6", "7", "8", "9"]}
iex> ["00", "11", "22", "33"]
...> |> ElasticsearchElixirBulkProcessor.Helpers.Events.split_first_bytes(3)
{["00", "11"], ["22", "33"]}
iex> ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
...> |> ElasticsearchElixirBulkProcessor.Helpers.Events.split_first_bytes(0)
{[], ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]}
"""
def split_first_bytes(list, first_byte_size) do
list
|> Enum.reduce(
{[], []},
fn element, acc -> build_up_first_chunk_elements(element, acc, first_byte_size) end
)
end
defp build_up_first_chunk_elements(element, {first, rest}, first_byte_size)
when is_binary(element) do
if first |> byte_sum >= first_byte_size do
{first, rest ++ [element]}
else
{first ++ [element], rest}
end
end
@doc ~S"""
Split list of strings into chunks of given byte size and rest of the list.
## Examples
iex> ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
...> |> ElasticsearchElixirBulkProcessor.Helpers.Events.chunk_bytes(3)
[["0", "1", "2"], ["3", "4", "5"], ["6", "7", "8"], ["9"]]
iex> ["00", "11", "22", "33", "44"]
...> |> ElasticsearchElixirBulkProcessor.Helpers.Events.chunk_bytes(3)
[["00", "11"], ["22", "33"], ["44"]]
"""
def chunk_bytes(list, chunk_byte_size) do
list
|> Enum.reduce(
[[]],
fn element, acc -> build_up_chunk_elements(element, acc, chunk_byte_size) end
)
|> Enum.reverse()
end
defp build_up_chunk_elements(element, [head | tail], chunk_byte_size) when is_binary(element) do
if head |> byte_sum >= chunk_byte_size do
[[element] | [head | tail]]
else
[head ++ [element] | tail]
end
end
end
|
lib/elasticsearch_elixir_bulk_processor/helpers/events.ex
| 0.794744
| 0.557905
|
events.ex
|
starcoder
|
defmodule Tailwind.Phoenix.Combined do
@moduledoc """
Helper to make useful notifications to be used in LiveView controllers where
Index and Show is combined.
It implements handle_info/2 LiveView callbacks for events,
updates the current assigns if a notification arrives.
The value function gets passed the socket as first and only parameter.
This way you can use any assign in the socket to use the correct query.
## Examples
```elixir
defmodule MyWeb.MyDataLive.Combined do
use MyWeb, :live_view
use Tailwind.Phoenix.Combined,
notifier: MyWeb.Notifier,
key_index: :datas,
key_show: :data,
data: %My.MyData{},
value_index: &list_datas/1,
to: &Routes.my_data_path(&1, :index)
...
end
```
"""
@doc """
When used, implements handle_info/2 for index.ex.
"""
defmacro __using__(opts) do
notifier = Keyword.get(opts, :notifier)
index_assign_key = Keyword.get(opts, :key_index)
show_assign_key = Keyword.get(opts, :key_show)
data_pattern = Keyword.get(opts, :pattern)
assign_value_fn = Keyword.get(opts, :value)
return_to_fn = Keyword.get(opts, :to)
quote do
@impl true
def handle_info({:deleted, unquote(data_pattern) = data} = msg, socket) do
socket =
if socket.assigns[unquote(show_assign_key)] == nil do
socket
else
to = unquote(return_to_fn).(socket)
socket
|> Tailwind.Phoenix.redirect_if_id(socket.assigns[unquote(show_assign_key)].id, data,
to: to
)
|> unquote(notifier).to_flash(msg)
end
socket =
if socket.assigns[unquote(index_assign_key)] == nil do
socket
else
socket
|> unquote(notifier).to_flash(msg)
|> update(unquote(index_assign_key), fn _ -> unquote(assign_value_fn).(socket) end)
end
{:noreply, socket}
end
@impl true
def handle_info({action, unquote(data_pattern) = data} = msg, socket) do
socket =
if socket.assigns[unquote(show_assign_key)] == nil do
socket
else
socket
|> unquote(notifier).to_flash(msg)
|> Tailwind.Phoenix.update_if_id(
unquote(show_assign_key),
socket.assigns[unquote(show_assign_key)].id,
data
)
end
socket =
if socket.assigns[unquote(index_assign_key)] == nil do
socket
else
socket
|> unquote(notifier).to_flash(msg)
|> update(unquote(index_assign_key), fn _ -> unquote(assign_value_fn).(socket) end)
end
{:noreply, socket}
end
defdelegate handle_info(data, socket), to: unquote(notifier)
end
end
end
|
lib/tailwind/phoenix/combined.ex
| 0.744192
| 0.812347
|
combined.ex
|
starcoder
|
defmodule Remsign.Utils do
import Logger, only: [log: 2]
def get_in_default(m, kl, d) do
case get_in(m, kl) do
nil -> d
r -> r
end
end
def make_nonce, do: :crypto.strong_rand_bytes(16) |> Base.encode16(case: :lower)
def validate_clock(t, skew) do
case Timex.parse(t, "{ISO:Extended:Z}") do
{:ok, ts} ->
d = Timex.diff(DateTime.utc_now, ts, :seconds)
(abs(d) < skew)
{:error, e} ->
log(:error, "Timestamp format for #{inspect(t)} invalid: #{inspect(e)}")
false
end
end
def keyname(m) when is_binary(m) do
try do
Joken.token(m) |> Joken.peek |> Map.get("sub")
rescue
ArgumentError -> nil
end
end
def keyname(_), do: nil
defp unwrap_h(m, kl, skew, nstore) do
jp = Joken.token(m) |> Joken.peek
alg = case JOSE.JWS.peek_protected(m) |> Poison.decode do
{:ok, %{ "alg" => algo }} -> algo
_ -> "HS256" # default
end
keyid = jp["sub"]
sig = %Joken.Signer{ jws: %{ "alg" => alg },
jwk: kl.(keyid, :public)}
log(:debug, "Verification: keyid = #{inspect(keyid)}, alg = #{alg}, sig = #{inspect(sig)}")
v = m |>
Joken.token |>
Joken.with_signer(sig) |>
Joken.with_validation("iat", fn t -> validate_clock(t, skew) end) |>
Joken.with_validation("jti", nstore) |>
Joken.verify
log(:debug, "Verify result = #{inspect(v)}")
v.claims["payload"]
end
def unwrap(m, kl, skew, nstore) do
case Poison.decode(m) do
{:error, _} -> unwrap_h(m, kl, skew, nstore)
{:ok, d} -> d
end
end
def wrap(m, keyid, alg, sig, opts \\ []) do
ts = Keyword.get(opts, :ts, DateTime.utc_now)
nonce = Keyword.get(opts, :nonce, Remsign.Utils.make_nonce)
signer = %Joken.Signer{jws: %{ "alg" => alg }, jwk: sig}
log(:debug, "Message signer: #{inspect(signer)}")
m |>
Joken.token |>
Joken.with_sub(keyid) |>
Joken.with_iat(ts) |>
Joken.with_jti(nonce) |>
Joken.with_signer(signer) |>
Joken.sign |>
Joken.get_compact
end
defp valid_nonce_h?(_, l) when is_integer(l) and (l < 8 or l > 32), do: false
defp valid_nonce_h?(n, _) when is_binary(n) do
String.downcase(n) |> String.match?(~r/^[0-9a-f]+$/)
end
defp valid_nonce?(n) when is_binary(n), do: valid_nonce_h?(n, byte_size(n))
defp valid_nonce?(_), do: false
@doc """
Attempt to insert a valid nonce into a ConCache store.
If the nonce is not valid, return false
If the item is already present return false.
Otherwise return true.
"""
def cc_store_nonce(cc, n, ttl \\ 60) when is_binary(n) do
case valid_nonce?(n) do
true ->
r = case ConCache.insert_new(cc, n, %ConCache.Item{value: true, ttl: ttl}) do
:ok -> true
{:error, :already_exists} ->
log(:debug, "Nonce #{inspect(n)} already exists in nonce cache")
false
end
r
false ->
log(:warn, "Invalid nonce format")
false # invalid nonce format
end
end
@doc """
Generate an RSA public/private keypair with a modulus
size of `mod`. Shells out to OpenSSL to actually perform
key generation.
"""
def generate_rsa(mod, opts \\ []) when is_integer(mod) do
ossl = Keyword.get(opts, :openssl, "openssl")
{priv, 0} = System.cmd(ossl, [ "genrsa", to_string(mod) ], [stderr_to_stdout: true])
priv = :public_key.pem_decode(priv) |> List.first |> :public_key.pem_entry_decode(:RSAPrivateKey) |> JOSE.JWK.from_key |> JOSE.JWK.to_map
pub = JOSE.JWK.to_public(priv) |> JOSE.JWK.to_map
{elem(pub,1), elem(priv, 1)}
end
defp known_hash_h("sha"), do: :sha
defp known_hash_h("sha1"), do: :sha
defp known_hash_h("sha224"), do: :sha224
defp known_hash_h("sha256"), do: :sha256
defp known_hash_h("sha384"), do: :sha384
defp known_hash_h("sha512"), do: :sha512
defp known_hash_h("md5"), do: :md5
defp known_hash_h("md4"), do: :md4
defp known_hash_h("md2"), do: :md2
defp known_hash_h(_), do: nil
@doc """
Return an atom corresponding to a hash type, or nil if the hash is unknown
"""
def known_hash(h) when is_binary(h) do
String.replace(h, ~r/[^A-Za-z0-9]/, "") |> String.downcase |> known_hash_h
end
end
|
lib/remsign/utils.ex
| 0.533154
| 0.41564
|
utils.ex
|
starcoder
|
defmodule ExState.Definition do
@moduledoc """
`ExState.Definition` provides macros to define a workflow state chart.
A workflow is defined with a name:
workflow "make_deal" do
#...
end
## Subject
The subject of the workflow is used to associate the workflow for lookup
in the database. The subject is added to the context under the defined key and
can be used in callbacks `use_step?/2`, and `guard_transition/3`. Subject
names and types are defined using the `subject` keyword:
subject :deal, Deal
## Initial State
A workflow must have an initial state:
initial_state :pending
This state must be defined using a seperate state definition.
## States
States have a name, and optional sub-states, steps, and transitions:
state :pending do
initial_state :preparing
state :preparing do
on :review, :reviewing
end
state :reviewing do
on :cancel, :cancelled
end
end
state :cancelled
Transitions may be a list of targets, in which case the first target state
which is allowed by `guard_transition/3` will be used.
state :pending do
initial_state :preparing
state :preparing do
on :prepared, [:reviewing, :sending]
end
state :reviewing do
on :cancel, :cancelled
end
state :sending do
on :send, :sent
end
end
def guard_transition(shipment, :preparing, :reviewing) do
if shipment.requires_review? do
:ok
else
{:error, "no review required"}
end
end
def guard_transition(shipment, :preparing, :sending) do
if shipment.requires_review? do
{:error, "review required"}
else
:ok
end
end
def guard_transition(_, _, ), do: :ok
Transitions may also use the null event, which occurs immediately on entering
a state. This is useful determining the initial state dynamically.
state :unknown do
on :_, [:a, :b]
end
state :a
state :b
def guard_transition(order, :unknown, :a), do
if order.use_a?, do: :ok, else: {:error, :use_b}
end
## Steps
Steps must be completed in order of definition:
state :preparing do
step :read
step :sign
step :confirm
end
Steps can be defined in parallel, meaning any step from the block can be
completed independent of order:
state :preparing do
parallel do
step :read
step :sign
step :confirm
end
end
Step completed events can be handled to transition to new states:
state :preparing do
step :read
step :sign
step :confirm
on_completed :confirm, :done
end
state :done
States can be ignored on a subject basis through `use_step/2`:
def use_step(:sign, %{deal: deal}) do
deal.requires_signature?
end
def use_step(_, _), do: true
## Virtual States
States definitions can be reused through virtual states:
virtual :completion_states do
state :working do
step :read
step :sign
step :confirm
end
end
state :completing_a do
using :completion_states
on_completed :confirm, :completing_b
end
state :completing_b do
using :completion_states
on_completed :confirm, :done
end
state :done
## Decisions
Decisions are steps that have defined options. The selection of an
option can be used to determine state transitions:
state :preparing do
step :read
step :review_terms
on_decision :review_terms, :accept, :signing
on_decision :review_terms, :reject, :rejected
end
state :signing do
step :sign
on_completed :sign, :done
end
state :rejected
state :done
## Transitions
By default, transitions reference sibling states:
state :one do
on :done, :two
end
state :two
Transitions can reference states one level up the heirarchy (a sibling of the
parent state) by using `{:<, :state}`, in the following form:
state :one do
state :a do
on :done, {:<, :two}
end
end
state :two
Transitions can also explicitly denote legal events in the current state
using `:_`. The following adds a transition to the current state:
state :one do
on :done, :two
end
state :two do
on :done, :_
end
Transitions to the current state will reset completed steps in the current
state by default. Step state can be preserved by using the `reset: false`
option.
state :one do
step :a
on :done, :two
on :retry, :_, reset: true
end
state :two do
step :b
on :done, :_, reset: false
end
## Guards
Guards validate that certain dynamic conditions are met in order to
allow state transitions:
def guard_transition(:one, :two, %{note: note}) do
if length(note.text) > 5 do
:ok
else
{:error, "Text must be greater than 5 characters long"}
end
end
def guard_transition(_, _, _), do: :ok
Execution will stop the state transition if `{:error, reason}` is returned
from the guard, and will allow the transition if `:ok` is returned.
## Actions
Actions are side effects that happen on events. Events can be
transitions, entering a state, or exiting a state.
state :one do
on_entry :send_notification
on_entry :log_activity
on :done, :two, action: [:update_done_at]
end
state :two do
step :send_something
end
def update_done_at(%{note: note} = context) do
{:updated, Map.put(context, :note, %{note | done_at: now()})}
end
Actions can return a `{:updated, context}` tuple to add the updated
context to the execution state. A default `Execution.execute_actions/1`
function is provided which executes triggered actions in a fire-and-forget
fashion. See `ExState.persist/1` for an example of transactionally
executing actions.
Actions should also not explicity guard state transitions. Guards should use
`guard_transition/3`.
"""
alias ExState.Execution
alias ExState.Definition.Chart
@type state() :: atom()
@type step() :: atom()
@type context() :: map()
@callback use_step?(step(), context()) :: boolean()
@callback guard_transition(state(), state(), context()) :: :ok | {:error, any()}
@optional_callbacks use_step?: 2, guard_transition: 3
defmacro __using__(_) do
quote do
@behaviour unquote(__MODULE__)
require ExState.Definition.Compiler
import unquote(__MODULE__), only: [workflow: 2]
end
end
defmacro workflow(name, body) do
chart = ExState.Definition.Compiler.compile(name, body, __CALLER__)
quote do
Module.put_attribute(__MODULE__, :chart, unquote(chart))
def definition, do: @chart
def name, do: @chart.name
def subject, do: @chart.subject
def initial_state, do: @chart.initial_state
def describe, do: Chart.describe(@chart)
def states, do: Chart.states(@chart)
def steps, do: Chart.steps(@chart)
def events, do: Chart.events(@chart)
def state(id), do: Chart.state(@chart, id)
def state(id1, id2), do: Chart.state(@chart, id1, id2)
def new(), do: new(nil)
def new(context), do: Execution.new(@chart, __MODULE__, context)
def continue(state_name), do: continue(state_name, %{})
def continue(state_name, context),
do: Execution.continue(@chart, __MODULE__, state_name, context)
def put_context(execution, context),
do: Execution.put_context(execution, context)
def put_context(execution, key, value),
do: Execution.put_context(execution, key, value)
def with_completed(execution, state, step, decision \\ nil),
do: Execution.with_completed(execution, state, step, decision)
def will_transition?(execution, event), do: Execution.will_transition?(execution, event)
def complete?(execution), do: Execution.complete?(execution)
def transition(execution, event), do: Execution.transition(execution, event)
def transition!(execution, event), do: Execution.transition!(execution, event)
def transition_maybe(execution, event), do: Execution.transition_maybe(execution, event)
def complete(execution, step), do: Execution.complete(execution, step)
def decision(execution, step, decision), do: Execution.decision(execution, step, decision)
def execute_actions(execution), do: Execution.execute_actions(execution)
def execute_actions!(execution), do: Execution.execute_actions!(execution)
def dump(execution), do: Execution.dump(execution)
def updated({:ok, context}), do: {:updated, context}
def updated(x), do: x
def updated({:ok, value}, key), do: {:updated, {key, value}}
def updated(x, _), do: x
end
end
end
|
lib/ex_state/definition.ex
| 0.893815
| 0.68187
|
definition.ex
|
starcoder
|
defmodule MerklePatriciaTree.Proof do
require Integer
alias MerklePatriciaTree.Trie
alias MerklePatriciaTree.Trie.Node
alias MerklePatriciaTree.Trie.Helper
alias MerklePatriciaTree.ListHelper
alias MerklePatriciaTree.DB
@doc """
Building proof tree for given path by going through each node ot this path
and making new partial tree.
"""
@spec construct_proof(Trie.t(), Trie.key(), Trie.t()) :: :ok
def construct_proof({trie, key, proof_db}) do
## Inserting the value of the root hash into the proof db
insert_proof_db(trie.root_hash, trie.db, proof_db)
## Constructing the proof trie going through the rest of the nodes
next_node = Trie.get_next_node(trie.root_hash, trie)
construct_proof(next_node, Helper.get_nibbles(key), proof_db)
end
defp construct_proof(trie, nibbles = [nibble | rest], proof) do
case Node.decode_trie(trie) do
:empty ->
{nil, proof}
{:branch, branches} ->
# branch node
case Enum.at(branches, nibble) do
[] ->
{nil, proof}
node_hash when is_binary(node_hash) and byte_size(node_hash) == 32 ->
insert_proof_db(node_hash, trie.db, proof)
construct_proof(
Trie.get_next_node(node_hash, trie),
rest,
proof
)
node_hash ->
construct_proof(Trie.get_next_node(node_hash, trie), rest, proof)
end
{:leaf, prefix, value} ->
case nibbles do
^prefix -> {value, proof}
_ -> {nil, proof}
end
{:ext, shared_prefix, next_node} when is_list(next_node) ->
# extension, continue walking tree if we match
case ListHelper.get_postfix(nibbles, shared_prefix) do
# did not match extension node
nil ->
{nil, proof}
rest ->
construct_proof(Trie.get_next_node(next_node, trie), rest, proof)
end
{:ext, shared_prefix, next_node} ->
case ListHelper.get_postfix(nibbles, shared_prefix) do
nil ->
{nil, proof}
rest ->
insert_proof_db(next_node, trie.db, proof)
construct_proof(Trie.get_next_node(next_node, trie), rest, proof)
end
end
end
defp construct_proof(trie, [], proof) do
case Node.decode_trie(trie) do
{:branch, branches} ->
{List.last(branches), proof}
{:leaf, [], v} ->
{v, proof}
_ ->
{nil, proof}
end
end
@doc """
Verifying that particular path leads to a given value.
"""
@spec verify_proof(Trie.key(), Trie.value(), binary(), Trie.t()) :: :ok
def verify_proof(key, value, hash, proof) do
case decode_node(hash, proof) do
:error -> false
node -> int_verify_proof(Helper.get_nibbles(key), node, value, proof)
end
end
defp int_verify_proof(path, {:ext, shared_prefix, next_node}, value, proof) do
case ListHelper.get_postfix(path, shared_prefix) do
nil -> false
rest -> int_verify_proof(rest, decode_node(next_node, proof), value, proof)
end
end
defp int_verify_proof([], {:branch, branch}, value, _) do
List.last(branch) == value
end
defp int_verify_proof([nibble | rest], {:branch, branch}, value, proof) do
case Enum.at(branch, nibble) do
[] ->
false
next_node ->
int_verify_proof(rest, decode_node(next_node, proof), value, proof)
end
end
defp int_verify_proof(path, {:leaf, shared_prefix, node_val}, value, _) do
node_val == value and path == shared_prefix
end
defp int_verify_proof(_path, _node, _value, _proof), do: false
defp decode_node(hash, proof) when is_binary(hash) and byte_size(hash) == 32 do
case read_from_db(proof, hash) do
{:ok, node} -> decode_node(ExRLP.decode(node), proof)
_ -> :error
end
end
defp decode_node(node, _proof), do: Node.decode_node(node)
## DB operations
defp insert_proof_db(hash, db, proof) do
{:ok, node} = DB.get(db, hash)
DB.put!(proof.db, hash, node)
end
defp read_from_db(db, hash), do: DB.get(db, hash)
end
|
lib/proof.ex
| 0.768038
| 0.439206
|
proof.ex
|
starcoder
|
defmodule Game.Character do
@moduledoc """
Character GenServer client
A character is a player (session genserver) or an NPC (genserver). They should
handle the following casts:
- `{:targeted, player}`
- `{:apply_effects, effects, player}`
"""
alias Data.NPC
alias Data.User
alias Game.Character.Via
@typedoc """
Tagged tuple of a user or npc struct
Valid options:
- `{:user, user}`
- `{:npc, npc}`
"""
@type t :: tuple()
@doc """
Let the target know they are being targeted
"""
@spec being_targeted(tuple(), Character.t()) :: :ok
def being_targeted(target, player) do
GenServer.cast({:via, Via, who(target)}, {:targeted, player})
end
@doc """
Apply effects on the target
"""
@spec apply_effects(tuple(), [Effect.t()], Character.t(), String.t()) :: :ok
def apply_effects(target, effects, from, description) do
GenServer.cast({:via, Via, who(target)}, {:apply_effects, effects, from, description})
end
@doc """
Reply to the sending character what effects were applied
"""
@spec effects_applied(Character.t(), [Effect.t()], Character.t()) :: :ok
def effects_applied(from, effects, target) do
GenServer.cast({:via, Via, who(from)}, {:effects_applied, effects, target})
end
@doc """
Get character information about the character
"""
@spec info(Character.t()) :: Character.t()
def info(target) do
GenServer.call({:via, Via, who(target)}, :info)
end
@doc """
Notify a character of an event
"""
@spec notify(Character.t(), map()) :: :ok
def notify(target, event) do
GenServer.cast({:via, Via, who(target)}, {:notify, event})
end
@doc """
Converts a tuple with a struct to a tuple with an id
"""
@spec who({:npc, integer()} | {:npc, NPC.t()}) :: {:npc, integer()}
@spec who({:user, integer()} | {:user, User.t()}) :: {:user, integer()}
def who(target)
def who({:npc, id}) when is_integer(id), do: {:npc, id}
def who({:npc, npc}), do: {:npc, npc.id}
def who({:user, id}) when is_integer(id), do: {:user, id}
def who({:user, user}), do: {:user, user.id}
end
|
lib/game/character.ex
| 0.826991
| 0.518424
|
character.ex
|
starcoder
|
defmodule TileRack do
@moduledoc """
Encapsulate the shape of each piece, initially in the player's rack
then later on the board.
"""
defstruct [:rack_squares, :width, :height, :currently_selected, :on_board, :raw_chars]
# This rack layout drives the entire set of pieces.
# It is parsed to find the location and shape of each piece.
@rack """
+-------------+
| |
| 1 22 333 44 |
| 4 |
| 5555 666 |
| 6 88 |
| 777 88 |
| 7 LLLL |
| L NN |
| 99 NNN |
| 99 PP |
| PPP VVV |
| UUU V |
| U U IIIII V |
| |
| FF TTT W |
| FF T WW |
| F T WW |
| X Z |
| Y XXX ZZZ |
| Y X Z |
| YY |
| Y |
| |
+-------------+
"""
def new() do
# Remove the borders, leaving just the set of rack pieces.
rack_lines = @rack
|> String.split(~r/\n/, trim: true)
|> Enum.reject(fn line -> String.match?(line, ~r/\+-+\+/) end)
|> Enum.map(fn line -> String.replace(line, ~r/^ *\|/, "") end)
|> Enum.map(fn line -> String.replace(line, ~r/\|.*$/, "") end)
raw_chars = rack_lines
|> Enum.join
|> String.split(~r//, trim: true)
# Track the locations of each individual square that makes up each piece.
# Note that the indexes for some squares happen to match printable
# characters so the list of arrays appears to contain random text.
rack_squares = raw_chars
|> Enum.with_index
|> Enum.reduce(%{},
fn({char, index}, acc) ->
case {char, acc[char]} do
{" ", _} -> acc
{_, nil} -> put_in(acc, [char], [index])
{_, value} -> put_in(acc, [char], [index | value])
end
end)
height = Enum.count(rack_lines)
width = Kernel.trunc( Enum.count(raw_chars) / height )
%__MODULE__{
width: width,
height: height,
rack_squares: rack_squares,
currently_selected: nil,
raw_chars: raw_chars,
on_board: %{} # each key will be the piece ID; the values are the locations on the board.
}
end
# TODO: enhance this method or get rid of it
def click(%TileRack{ raw_chars: raw_chars, width: width}, x, y) do
~s(phx-click=rack-click)
end
def value(%TileRack{ raw_chars: raw_chars, width: width}, x, y) do
square_index = x + y * width
case Enum.at(raw_chars, square_index) do
nil -> ~s(phx-value=".")
value -> ~s(phx-value="#{ value }")
end
end
# Return the CSS class(es) that should be applied to this square.
# Simple case: the default rack.
def square_class(%TileRack{ raw_chars: raw_chars,
on_board: %{},
currently_selected: nil,
width: width
}, x, y) do
square_index = x + y * width
case Enum.at(raw_chars, square_index) do
" " -> "" # blank
nil -> "bad" # blank
_ -> "piece-square" # some portion of a piece
end
end
# Return the CSS class(es) that should be applied to this square.
# Less simple case: something selected, but nothing on board.
def square_class(%TileRack{ raw_chars: raw_chars,
on_board: %{},
currently_selected: currently_selected,
width: width
}, x, y) do
square_index = x + y * width
case {Enum.at(raw_chars, square_index), currently_selected} do
{ch, ch} -> "selected-piece" # highlight the selected piece
{" ", _} -> "" # blank
{_a, _b} -> "piece-square" # some portion of a piece
end
end
def column_ids(%__MODULE__{width: width}) do
(0..(width - 1))
end
def row_ids(%__MODULE__{height: height}) do
(0..(height - 1))
end
end
|
lib/live_view_demo/tile_rack.ex
| 0.698535
| 0.592991
|
tile_rack.ex
|
starcoder
|
defmodule Paddle.Filters do
@moduledoc ~S"""
Module used internally by Paddle to manipulate LDAP filters.
"""
@type easy_filter :: keyword | %{optional(atom | binary) => binary}
@type eldap_filter :: tuple
@type filter :: easy_filter | eldap_filter | nil
@type t :: filter
@spec construct_filter(filter) :: eldap_filter
@doc ~S"""
Construct a eldap filter from the given keyword list or map.
If given an `:eldap` filter (a tuple), it is returned as is.
If given `nil`, it will return an empty filter (`:eldap.and([])`).
Examples:
iex> Paddle.Filters.construct_filter(uid: "testuser")
{:equalityMatch, {:AttributeValueAssertion, 'uid', 'testuser'}}
iex> Paddle.Filters.construct_filter(%{uid: "testuser"})
{:equalityMatch, {:AttributeValueAssertion, 'uid', 'testuser'}}
iex> Paddle.Filters.construct_filter(%{"uid" => "testuser"})
{:equalityMatch, {:AttributeValueAssertion, 'uid', 'testuser'}}
iex> Paddle.Filters.construct_filter(:eldap.substrings('uid', initial: 'b'))
{:substrings, {:SubstringFilter, 'uid', [initial: 'b']}}
iex> Paddle.Filters.construct_filter(nil)
{:and, []}
iex> Paddle.Filters.construct_filter([])
{:and, []}
iex> Paddle.Filters.construct_filter(uid: "testuser", cn: "Test User")
{:and,
[equalityMatch: {:AttributeValueAssertion, 'uid', 'testuser'},
equalityMatch: {:AttributeValueAssertion, 'cn', 'Test User'}]}
"""
def construct_filter(filter) when is_tuple(filter), do: filter
def construct_filter(nil), do: :eldap.and([])
def construct_filter(filter) when is_map(filter), do: filter
|> Enum.into([])
|> construct_filter
def construct_filter([{key, value}]) when is_binary(value) do
:eldap.equalityMatch('#{key}', '#{value}')
end
def construct_filter(kwdn) when is_list(kwdn) do
criteria = kwdn
|> Enum.map(fn {key, value} -> :eldap.equalityMatch('#{key}', '#{value}') end)
:eldap.and(criteria)
end
@spec merge_filter(filter, filter) :: filter
@doc ~S"""
Merge two filters with an "and" operation.
Examples:
iex> Paddle.Filters.merge_filter([uid: "testuser"], [cn: "Test User"])
{:and,
[equalityMatch: {:AttributeValueAssertion, 'uid', 'testuser'},
equalityMatch: {:AttributeValueAssertion, 'cn', 'Test User'}]}
iex> Paddle.Filters.merge_filter([uid: "testuser"], :eldap.substrings('cn', [initial: 'Tes']))
{:and,
[equalityMatch: {:AttributeValueAssertion, 'uid', 'testuser'},
substrings: {:SubstringFilter, 'cn', [initial: 'Tes']}]}
iex> Paddle.Filters.merge_filter([uid: "testuser"], [])
[uid: "testuser"]
iex> Paddle.Filters.merge_filter([], [cn: "Test User"])
[cn: "Test User"]
iex> Paddle.Filters.merge_filter([], nil)
{:and, []}
"""
def merge_filter(lhs, rhs)
for lhs <- [[], nil], rhs <- [[], nil] do
def merge_filter(unquote(lhs), unquote(rhs)), do: :eldap.and([])
end
for null_filter <- [[], nil] do
def merge_filter(filter, unquote(null_filter)), do: filter
def merge_filter(unquote(null_filter), filter), do: filter
end
def merge_filter({:and, lcond}, {:and, rcond}), do: {:and, lcond ++ rcond}
def merge_filter({:and, lcond}, rhs), do: {:and, [construct_filter(rhs) | lcond]}
def merge_filter(lhs, {:and, rcond}), do: {:and, [construct_filter(lhs) | rcond]}
def merge_filter(lhs, rhs) do
:eldap.and([construct_filter(lhs), construct_filter(rhs)])
end
@spec class_filter([binary]) :: eldap_filter
@doc ~S"""
Construct a filter that matches a list of objectClasses.
Examples:
iex> Paddle.Filters.class_filter ["posixAccount", "account"]
{:and,
[equalityMatch: {:AttributeValueAssertion, 'objectClass', 'posixAccount'},
equalityMatch: {:AttributeValueAssertion, 'objectClass', 'account'}]}
"""
def class_filter(classes) when is_list(classes) do
classes
|> Enum.map(&:eldap.equalityMatch('objectClass', '#{&1}'))
|> :eldap.and
end
def class_filter(class), do: :eldap.equalityMatch('objectClass', '#{class}')
end
|
lib/paddle/filters.ex
| 0.888242
| 0.519399
|
filters.ex
|
starcoder
|
defmodule AWS.DynamoDBStreams do
@moduledoc """
Amazon DynamoDB
Amazon DynamoDB Streams provides API actions for accessing streams and
processing stream records.
To learn more about application development with Streams, see [Capturing Table Activity with DynamoDB
Streams](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Streams.html)
in the Amazon DynamoDB Developer Guide.
"""
@doc """
Returns information about a stream, including the current status of the stream,
its Amazon Resource Name (ARN), the composition of its shards, and its
corresponding DynamoDB table.
You can call `DescribeStream` at a maximum rate of 10 times per second.
Each shard in the stream has a `SequenceNumberRange` associated with it. If the
`SequenceNumberRange` has a `StartingSequenceNumber` but no
`EndingSequenceNumber`, then the shard is still open (able to receive more
stream records). If both `StartingSequenceNumber` and `EndingSequenceNumber` are
present, then that shard is closed and can no longer receive more data.
"""
def describe_stream(client, input, options \\ []) do
request(client, "DescribeStream", input, options)
end
@doc """
Retrieves the stream records from a given shard.
Specify a shard iterator using the `ShardIterator` parameter. The shard iterator
specifies the position in the shard from which you want to start reading stream
records sequentially. If there are no stream records available in the portion of
the shard that the iterator points to, `GetRecords` returns an empty list. Note
that it might take multiple calls to get to a portion of the shard that contains
stream records.
`GetRecords` can retrieve a maximum of 1 MB of data or 1000 stream records,
whichever comes first.
"""
def get_records(client, input, options \\ []) do
request(client, "GetRecords", input, options)
end
@doc """
Returns a shard iterator.
A shard iterator provides information about how to retrieve the stream records
from within a shard. Use the shard iterator in a subsequent `GetRecords` request
to read the stream records from the shard.
A shard iterator expires 15 minutes after it is returned to the requester.
"""
def get_shard_iterator(client, input, options \\ []) do
request(client, "GetShardIterator", input, options)
end
@doc """
Returns an array of stream ARNs associated with the current account and
endpoint.
If the `TableName` parameter is present, then `ListStreams` will return only the
streams ARNs for that table.
You can call `ListStreams` at a maximum rate of 5 times per second.
"""
def list_streams(client, input, options \\ []) do
request(client, "ListStreams", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "dynamodb"}
host = build_host("streams.dynamodb", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.0"},
{"X-Amz-Target", "DynamoDBStreams_20120810.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/dynamodb_streams.ex
| 0.8731
| 0.57341
|
dynamodb_streams.ex
|
starcoder
|
defmodule IntermodalContainers.ContainerCode.Parser do
@moduledoc false
alias IntermodalContainers.ContainerCode
alias IntermodalContainers.ContainerCode.SizeCodes
alias IntermodalContainers.ContainerCode.TypeCodes
@type result() :: {:ok, %ContainerCode{}} | {:error, String.t()}
@spec parse(String.t()) :: result()
def parse(code) do
try do
parse_step({code, 0, %ContainerCode{}})
rescue
UndefinedFunctionError -> {:error, "invalid container code"}
end
end
defp parse_step({_rest, 0, %{}} = parse_state) do
consume(parse_state, 1, &accept_l/1)
end
defp parse_step({_rest, 1, %{}} = parse_state) do
consume(parse_state, 1, &accept_w_h/1)
end
defp parse_step({_rest, 2, _} = parse_state) do
consume(parse_state, 2, &accept_type/1)
end
defp parse_step({_, 4, result}), do: {:ok, result}
defp accept_l(size_code) do
l = lookup(:length, size_code)
cond do
byte_size(size_code) != 1 ->
{:error,
"length code should be 1 character long. #{size_code} is #{byte_size(size_code)}"}
lookup(:length, size_code) == nil ->
{:error, "unrecognized length code"}
true ->
{:ok, &update_state(&1, length: l)}
end
end
defp accept_w_h(size_code) do
w = lookup(:width, size_code)
h = lookup(:height, size_code)
cond do
byte_size(size_code) != 1 ->
{:error, "width height code should be 1 character long"}
w == nil ->
{:error, "unrecognixed width code"}
h == nil ->
{:error, "unrecognized height code"}
true ->
{:ok, &update_state(&1, width: w, height: h)}
end
end
defp accept_type(type_code) do
type = lookup(:type, type_code)
cond do
byte_size(type_code) != 2 ->
{:error, "type code should be 2 characters, got #{byte_size(type_code)}"}
type == nil ->
{:error, "unrecognized type code"}
true ->
{:ok, &update_state(&1, type: type)}
end
end
defp update_state(%ContainerCode{} = state, key, code) do
Map.update!(state, key, fn _old -> code end)
end
defp update_state(%ContainerCode{} = state, [{key, code} | rest]) do
update_state(state, key, code)
|> update_state(rest)
end
defp update_state(%ContainerCode{} = state, []), do: state
defp lookup(:length, code), do: SizeCodes.get_length(code)
defp lookup(:width, code), do: SizeCodes.get_width(code)
defp lookup(:height, code), do: SizeCodes.get_height(code)
defp lookup(:type, code), do: TypeCodes.get(code)
def consume({code, position, res}, size, acceptFn) do
{target, rest} = String.split_at(code, size)
acceptFn.(target)
|> advance(rest, position + size, res)
end
def advance({:error, _reason} = err, _code, _position, _parsed_container_number), do: err
def advance({:ok, updateFn}, remainder, next_position, intermediate_result) do
parse_step({remainder, next_position, updateFn.(intermediate_result)})
end
end
|
lib/intermodal_containers/container_code/parser.ex
| 0.704872
| 0.407392
|
parser.ex
|
starcoder
|
defmodule Depot.Visibility.PortableUnixVisibilityConverter do
@moduledoc """
`Depot.Visibility.UnixVisibilityConverter` supporting `Depot.Visibility.portable()`.
This is a good default visibility converter for adapters using unix based permissions.
"""
alias Depot.Visibility.UnixVisibilityConverter
defmodule Config do
@moduledoc false
@type t :: %__MODULE__{
file_public: UnixVisibilityConverter.permission(),
file_private: UnixVisibilityConverter.permission(),
directory_public: UnixVisibilityConverter.permission(),
directory_private: UnixVisibilityConverter.permission()
}
defstruct file_public: 0o644,
file_private: 0o600,
directory_public: 0o755,
directory_private: 0o700
end
@behaviour UnixVisibilityConverter
@impl UnixVisibilityConverter
def config(config) do
struct!(%Config{}, config)
end
@impl UnixVisibilityConverter
def for_file(%Config{} = config, visibility) do
with {:ok, visibility} <- Depot.Visibility.guard_portable(visibility) do
case visibility do
:public -> config.file_public
:private -> config.file_private
end
end
end
@impl UnixVisibilityConverter
def for_directory(%Config{} = config, visibility) do
with {:ok, visibility} <- Depot.Visibility.guard_portable(visibility) do
case visibility do
:public -> config.directory_public
:private -> config.directory_private
end
end
end
@impl UnixVisibilityConverter
def from_file(%Config{} = config, permission) do
cond do
permission === config.file_public -> :public
permission === config.file_private -> :private
true -> :public
end
end
@impl UnixVisibilityConverter
def from_directory(%Config{} = config, permission) do
cond do
permission === config.directory_public -> :public
permission === config.directory_private -> :private
true -> :public
end
end
end
|
lib/depot/visibility/portable_unix_visibility_converter.ex
| 0.855429
| 0.408395
|
portable_unix_visibility_converter.ex
|
starcoder
|
defmodule Hui.Query do
@moduledoc """
Hui.Query module provides underpinning HTTP-based request functions for Solr, including:
- `get/2`, `get!/2`
- `post/2`, `post!/2`
"""
use HTTPoison.Base
alias Hui.URL
alias Hui.Encoder
alias Hui.Query
@type querying_struct :: Query.Standard.t() | Query.Common.t() | Query.DisMax.t()
@type faceting_struct :: Query.Facet.t() | Query.FacetRange.t() | Query.FacetInterval.t()
@type highlighting_struct ::
Query.Highlight.t()
| Query.HighlighterUnified.t()
| Query.HighlighterOriginal.t()
| Query.HighlighterFastVector.t()
@type misc_struct :: Query.MoreLikeThis.t() | Query.Suggest.t() | Query.SpellCheck.t()
@type solr_struct :: querying_struct | faceting_struct | highlighting_struct | misc_struct
@type solr_query :: Keyword.t() | map | solr_struct | [solr_struct]
@type solr_update_query :: binary | Query.Update.t()
@type solr_url :: Hui.URL.t()
@doc """
Issues a get request of Solr query to a specific endpoint.
The query can be a keyword list or a list of Hui query structs (`t:solr_query/0`).
## Example - parameters
```
url = %Hul.URL{url: "http://..."}
# query via a list of keywords, which are unbound and sent to Solr directly
Hui.Query.get(url, q: "glen cova", facet: "true", "facet.field": ["type", "year"])
# query via Hui structs
alias Hui.Query
Hui.Query.get(url, %Query.DisMax{q: "glen cova"})
Hui.Query.get(url, [%Query.DisMax{q: "glen"}, %Query.Facet{field: ["type", "year"]}])
```
The use of structs is more idiomatic and succinct. It is bound to qualified Solr fields.
See `t:Hui.URL.t/0` struct about specifying HTTP headers and HTTPoison options
of a request, e.g. `timeout`, `recv_timeout`, `max_redirect` etc.
"""
@spec get(solr_url, solr_query) :: {:ok, HTTPoison.Response.t()} | {:error, HTTPoison.Error.t()}
@impl true
def get(%URL{} = solr_url, solr_query) do
endpoint = to_string(solr_url)
query = Encoder.encode(solr_query)
get([endpoint, "?", query] |> IO.iodata_to_binary(), solr_url.headers, solr_url.options)
end
@doc """
Issues a get request of Solr query to a specific endpoint, raising an exception in case of failure.
If the request does not fail, the response is returned.
See `get/2` for more detailed information.
"""
@spec get!(solr_url, solr_query) :: HTTPoison.Response.t()
@impl true
def get!(%URL{} = solr_url, solr_query) do
endpoint = to_string(solr_url)
query = Encoder.encode(solr_query)
get!([endpoint, "?", query] |> IO.iodata_to_binary(), solr_url.headers, solr_url.options)
end
@doc """
Issues a POST update request to a specific Solr endpoint, for data indexing and deletion.
"""
@spec post(solr_url, solr_update_query) ::
{:ok, HTTPoison.Response.t()} | {:error, HTTPoison.Error.t()}
@impl true
def post(%URL{} = solr_url, solr_query) do
endpoint = to_string(solr_url)
data = if is_binary(solr_query), do: solr_query, else: Encoder.encode(solr_query)
post(endpoint, data, solr_url.headers, solr_url.options)
end
@doc """
Issues a POST update request to a specific Solr endpoint, raising an exception in case of failure.
"""
@spec post!(solr_url, solr_update_query) :: HTTPoison.Response.t()
@impl true
def post!(%URL{} = solr_url, solr_query) do
endpoint = to_string(solr_url)
data = if is_binary(solr_query), do: solr_query, else: Encoder.encode(solr_query)
post!(endpoint, data, solr_url.headers, solr_url.options)
end
# implement HTTPoison.Base callback:
# decode JSON data, return other response formats as raw text
@impl true
def process_response_body(""), do: ""
def process_response_body(body) do
{status, solr_results} = Poison.decode(body)
case status do
:ok -> solr_results
:error -> body
end
end
end
|
lib/hui/query.ex
| 0.895235
| 0.699742
|
query.ex
|
starcoder
|
defmodule Snap do
def snap(0) do
camera = Camera.normal({800, 600})
obj1 = %Sphere{radius: 140, pos: {0, 0, 700}}
obj2 = %Sphere{radius: 50, pos: {200, 0, 600}}
obj3 = %Sphere{radius: 50, pos: {-80, 0, 400}}
image = Tracer.tracer(camera, [obj1, obj2, obj3])
PPM.write("snap0.ppm", image)
end
def snap(1) do
camera = Camera.normal({800, 600})
obj1 = %Sphere{radius: 140, pos: {0, 0, 700}, color: {1, 0.5, 0}}
obj2 = %Sphere{radius: 50, pos: {200, 0, 600}, color: {0, 0.8, 0.2}}
obj3 = %Sphere{radius: 50, pos: {-80, 0, 400}, color: {0.1, 0.1, 1}}
image = TracerColor.tracer(camera, [obj1, obj2, obj3])
PPM.write("snap1.ppm", image)
end
def snap(2) do
camera = Camera.normal({1920, 1080})
obj1 = %Sphere{radius: 140, pos: {0, 0, 700}, color: {1, 0.5, 0}}
obj2 = %Sphere{radius: 50, pos: {200, 0, 600}, color: {0, 0.8, 0.2}}
obj3 = %Sphere{radius: 50, pos: {-80, 0, 400}, color: {0.1, 0.1, 1}}
light1 = %Light{pos: {-1000, 1000, 700}, color: {1.0, 0.3, 0.3}}
light2 = %Light{pos: {800, 800, 0}, color: {0.3, 1.0, 0.3}}
light3 = %Light{pos: {800, -800, 0}, color: {0.3, 0.3, 1.0}}
world =
%World{objects: [obj1, obj2, obj3],
lights: [light1, light2, light3],
background: {0.0, 0.0, 0.0},
ambient: {0.6, 0.6, 0.6}}
image = TracerLight.tracer(camera, world)
PPM.write("snap2.ppm", image)
end
def snap(3) do
camera = Camera.normal({1920, 1080})
obj1 = %Sphere{radius: 140, pos: {0, 0, 700}, color: {1, 0.5, 0}, brilliance: 1.0}
obj2 = %Sphere{radius: 50, pos: {200, 0, 600}, color: {0, 0.8, 0.2}, brilliance: 0.4}
obj3 = %Sphere{radius: 50, pos: {-80, 0, 400}, color: {0.1, 0.1, 1}, brilliance: 0.8}
light1 = %Light{pos: {-1000, 1000, 700}, color: {1.0, 0.3, 0.3}}
light2 = %Light{pos: {800, 800, 0}, color: {0.3, 1.0, 0.3}}
light3 = %Light{pos: {800, -800, 0}, color: {0.3, 0.3, 1.0}}
world = %World{objects: [obj1, obj2, obj3],
lights: [light1, light2, light3],
background: {0.0, 0.0, 0.0},
ambient: {0.1, 0.1, 0.1}}
image = TracerReflection.tracer(camera, world)
PPM.write("snap3.ppm", image)
end
def snap(4) do
camera = Camera.normal({1920, 1080})
obj1 = %Sphere{radius: 140, pos: { 0, 0, 700}, color: {1, 1, 1}, brilliance: 0.4}
obj2 = %Sphere{radius: 50, pos: {200, 0, 600}, color: {1, 1, 1}, brilliance: 0.8}
obj3 = %Sphere{radius: 50, pos: {-80, 0, 400}, color: {1, 1, 1}, brilliance: 0.5}
light1 = %Light{pos: {-1000, -1000, 700}, color: {1.0, 0.0, 0.0}}
light2 = %Light{pos: {800, 800, 0}, color: {0.1, 1.0, 0.0}}
light3 = %Light{pos: {800, -800, 0}, color: {0.0, 0.0, 1.0}}
world = %World{objects: [obj1, obj2, obj3],
lights: [light1, light2, light3],
background: {0.0, 0.0, 0.0},
ambient: {0.1, 0.1, 0.1},
depth: 3}
image = TracerReflection.tracer(camera, world)
PPM.write("snap4.ppm", image)
end
def snap(5) do
camera = Camera.normal({3200, 1800})
obj1 = %Sphere{radius: 140, pos: {0, 0, 700}, color: {1.0, 0.5, 0}, brilliance: 0.4}
obj2 = %Sphere{radius: 50, pos: {200, 0, 600}, color: {0, 0.8, 0.2}, brilliance: 0.4}
obj3 = %Sphere{radius: 50, pos: {-80, 0, 400}, color: {0.1, 0.1, 1.0}, brilliance: 0.8}
light1 = %Light{pos: {-1000, -1000, 700}, color: {1.0, 0.3, 0.3}}
light2 = %Light{pos: {800, 800, 0}, color: {0.3, 1.0, 0.3}}
light3 = %Light{pos: {800, -800, 0}, color: {0.3, 0.3, 1.0}}
world = %World{objects: [obj1, obj2, obj3],
lights: [light1, light2, light3],
background: {0.0, 0.0, 0.0},
ambient: {0.1, 0.1, 0.1},
depth: 3}
image = TracerReflection.tracer(camera, world)
PPM.write("snap5.ppm", image)
end
def snaps() do
snap(1)
snap(2)
snap(3)
snap(4)
snap(5)
end
end
|
Tracer/snap.ex
| 0.54577
| 0.740151
|
snap.ex
|
starcoder
|
defmodule StarkInfra.Utils.Check do
@moduledoc false
alias EllipticCurve.PrivateKey
alias StarkInfra.Project
alias StarkInfra.Organization
def environment(environment) do
case environment do
:production -> environment
:sandbox -> environment
nil -> raise "please set an environment"
_any -> raise "environment must be either :production or :sandbox"
end
end
def limit(limit) when is_nil(limit) do
nil
end
def limit(limit) do
min(limit, 100)
end
def datetime(data) when is_nil(data) do
nil
end
def datetime(data) when is_binary(data) do
{:ok, datetime, _utc_offset} = data |> DateTime.from_iso8601()
datetime
end
def date(data) when is_nil(data) do
nil
end
def date(data) when is_binary(data) do
data |> Date.from_iso8601!()
end
def date(data = %DateTime{}) do
%Date{year: data.year, month: data.month, day: data.day}
end
def date(data) do
data
end
def date_or_datetime(data) do
try do
date(data)
rescue
ArgumentError -> datetime(data)
end
end
def private_key(private_key) do
try do
{:ok, parsed_key} = PrivateKey.fromPem(private_key)
:secp256k1 = parsed_key.curve.name
parsed_key
rescue
_e -> raise "private_key must be valid secp256k1 ECDSA string in pem format"
else
parsed_key -> parsed_key
end
end
def options(options) do
options
|> Enum.into(%{})
|> fill_limit()
|> fill_date_field(:after)
|> fill_date_field(:before)
end
defp fill_limit(options) do
if !Map.has_key?(options, :limit) do
Map.put(options, :limit, nil)
end
options
end
defp fill_date_field(options, field) do
if !Map.has_key?(options, field) do
Map.put(options, field, nil)
else
Map.update!(options, field, &date/1)
end
end
def user(user) when is_nil(user) do
case Application.fetch_env(:starkinfra, :project) do
{:ok, project_info} -> project_info |> StarkInfra.project()
:error -> organization_user()
end
end
def user(user = %Project{}) do
user
end
def user(user = %Organization{}) do
user
end
defp organization_user() do
case Application.fetch_env(:starkinfra, :organization) do
{:ok, organization_info} -> organization_info |> StarkInfra.organization()
:error -> raise "no default user was located in configs and no user was passed in the request"
end
end
def language() do
case Application.fetch_env(:starkinfra, :language) do
{:ok, 'en-US'} -> 'en-US'
{:ok, "en-US"} -> 'en-US'
{:ok, 'pt-BR'} -> 'pt-BR'
{:ok, "pt-BR"} -> 'pt-BR'
:error -> 'en-US'
end
end
def enforced_keys(parameters, enforced_keys) do
case get_missing_keys(parameters |> Enum.into(%{}), enforced_keys) do
[] -> parameters
missing_keys -> raise "the following parameters are missing: " <> Enum.join(missing_keys, ", ")
end
end
def get_missing_keys(parameters, [key | other_enforced_keys]) do
missing_keys = get_missing_keys(parameters, other_enforced_keys)
case Map.has_key?(parameters, key) do
true -> missing_keys
false -> [key | missing_keys]
end
end
def get_missing_keys(_parameters, []) do
[]
end
end
|
lib/utils/checks.ex
| 0.540196
| 0.566978
|
checks.ex
|
starcoder
|
defmodule Crit.Sql do
alias Crit.Repo
import Crit.Servers.Institution.Server, only: [server: 1]
@moduledoc """
These functions use the Institution's shortname to send the right
SQL to the right place.
There's an function for each `Ecto.Repo` function (that is used by
this application). Each works by asking the `Server` to
provide arguments for it to `apply`. The caller does the work
because otherwise tests would have to arrange for the
`Server` and the test to share the same `SQL.Sandbox`
connection, which is awkward because the `Server` is by
default started before any test setup runs.
For probably no good reason, `Ecto.Multi` functions are called
directly, except that `multi_opts` is used to rewrite their options
to an apropriate form, then `Sql.transaction` makes sure the right
`Ecto.Repo` is used.
2020/18/03
"""
def all(queryable, opts \\ [], short_name) do
run_using_institution_data(short_name, :all, {[queryable], opts})
end
def delete_all(queryable, opts \\ [], short_name) do
run_using_institution_data(short_name, :delete_all, {[queryable], opts})
end
def exists?(queryable, opts \\ [], short_name) do
run_using_institution_data(short_name, :exists?, {[queryable], opts})
end
def get(queryable, id, opts \\ [], short_name) do
run_using_institution_data(short_name, :get, {[queryable, id], opts}) end
def get!(queryable, id, opts \\ [], short_name) do
run_using_institution_data(short_name, :get!, {[queryable, id], opts}) end
def get_by(queryable, clauses, opts \\ [], short_name) do
run_using_institution_data(short_name, :get_by, {[queryable, clauses], opts})
end
def insert(struct_or_changeset, opts \\ [], short_name) do
run_using_institution_data(short_name, :insert, {[struct_or_changeset], opts})
end
def insert!(struct_or_changeset, opts \\ [], short_name) do
run_using_institution_data(short_name, :insert!, {[struct_or_changeset], opts})
end
def insert_all(schema_or_source, entries, opts \\ [], short_name) do
run_using_institution_data(short_name, :insert_all, {[schema_or_source, entries], opts})
end
def one(queryable, opts \\ [], short_name) do
run_using_institution_data(short_name, :one, {[queryable], opts})
end
def one!(queryable, opts \\ [], short_name) do
run_using_institution_data(short_name, :one!, {[queryable], opts})
end
def update(changeset, opts \\ [], short_name) do
run_using_institution_data(short_name, :update, {[changeset], opts}) end
defp run_using_institution_data(short_name, sql_command, data_for_server) do
command = Tuple.insert_at(data_for_server, 0, :adjusted_repo_call)
[repo, arglist] =
GenServer.call(server(short_name), command)
apply(repo, sql_command, arglist)
end
# ------------------------------------------------------------------------
# This may be a decent way of handling institution-specific SQL when institutions
# have separate databases, not just separate prefixes.
def multi_opts(short_name, opts \\ []) do
GenServer.call(server(short_name), {:multi_opts, opts})
end
# When multiple repos are used, this will need to forward to the
# `Server`.
def transaction(multi, _short_name) do
Repo.transaction(multi)
end
end
|
lib/crit/sql.ex
| 0.604399
| 0.629205
|
sql.ex
|
starcoder
|
defmodule ExAliyunOts.Filter do
@moduledoc false
require ExAliyunOts.Const.ComparatorType, as: ComparatorType
require ExAliyunOts.Const.LogicOperator, as: LogicOperator
@operator_mapping %{
and: LogicOperator.and(),
not: LogicOperator.not(),
or: LogicOperator.or()
}
@comparator_mapping %{
==: ComparatorType.eq(),
>: ComparatorType.gt(),
>=: ComparatorType.ge(),
!=: ComparatorType.not_eq(),
<: ComparatorType.lt(),
<=: ComparatorType.le()
}
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/35193.html) | [English](https://www.alibabacloud.com/help/doc-detail/35193.html)
## Example
import MyApp.TableStore
get_row table_name1, [{"key", "key1"}],
columns_to_get: ["name", "level"],
filter: filter(
({"name", ignore_if_missing: true, latest_version_only: true} == var_name and "age" > 1) or
("class" == "1")
)
batch_get [
get(
table_name2,
[{"key", "key1"}],
filter: filter "age" >= 10
)
]
## Options
* `ignore_if_missing`, used when attribute column not existed.
* if a attribute column is not existed, when set `ignore_if_missing: true` in filter expression, there will ignore this row data in the returned result;
* if a attribute column is existed, the returned result won't be affected no matter true or false was set.
* `latest_version_only`, used when attribute column has multiple versions.
* if set `latest_version_only: true`, there will only check the value of the latest version is matched or not, by default it's set as `latest_version_only: true`;
* if set `latest_version_only: false`, there will check the value of all versions are matched or not.
"""
@doc row: :row
defmacro filter(filter_expr) do
build_filter(filter_expr)
end
@doc false
def build_filter({combinator, _, _} = ast) when combinator in [:and, :not, :or] do
composite_filter(ast)
end
def build_filter({combinator, _, _} = ast) when combinator in [:==, :>, :>=, :!=, :<, :<=] do
single_filter(ast)
end
def build_filter({binding_name, _, nil} = ast) when is_atom(binding_name) do
ast
end
def build_filter(ast) do
raise ExAliyunOts.RuntimeError, "Invalid filter expression: #{Macro.to_string(ast)}"
end
defp composite_filter({combinator, _, expressions}) do
sub_filters = Enum.map(expressions, &build_filter/1)
quote do
require ExAliyunOts.Const.FilterType
%ExAliyunOts.Var.Filter{
filter_type: ExAliyunOts.Const.FilterType.composite_column(),
filter: %ExAliyunOts.Var.CompositeColumnValueFilter{
combinator: unquote(@operator_mapping[combinator]),
sub_filters: unquote(sub_filters)
}
}
end
end
defp single_filter({comparator, _, [column_name, column_value]}) do
quote location: :keep do
require ExAliyunOts.Const.FilterType
comparator = unquote(@comparator_mapping[comparator])
case unquote(column_name) do
{column_name, column_options} ->
%ExAliyunOts.Var.Filter{
filter_type: ExAliyunOts.Const.FilterType.single_column(),
filter: %ExAliyunOts.Var.SingleColumnValueFilter{
comparator: comparator,
column_name: column_name,
column_value: unquote(column_value),
ignore_if_missing: Keyword.get(column_options, :ignore_if_missing, false),
latest_version_only: Keyword.get(column_options, :latest_version_only, true)
}
}
column_name ->
%ExAliyunOts.Var.Filter{
filter_type: ExAliyunOts.Const.FilterType.single_column(),
filter: %ExAliyunOts.Var.SingleColumnValueFilter{
comparator: comparator,
column_name: column_name,
column_value: unquote(column_value),
ignore_if_missing: false,
latest_version_only: true
}
}
end
end
end
end
|
lib/ex_aliyun_ots/filter.ex
| 0.673514
| 0.459197
|
filter.ex
|
starcoder
|
defmodule OrderWatchdog do
@moduledoc """
A module for keeping track of the hall orders which are underway to being handled, and triggering a resend of orders which stay unhandled for too long.
"""
use GenServer, restart: :permanent
require Logger
@baseWaitingTime Application.compile_env(:elevator, :orderWatchdogWaitingTime)
@randomInterval Application.compile_env(:elevator, :orderWatchdogRandomInterval)
# Public functions
# --------------------------------------------
@doc "Initializes the `OrderWatchdog` by calling `multiTriggerImpatientOrderPush/0`."
def init(orders) do
multiTriggerImpatientOrderPush()
{:ok, orders}
end
@doc "Starts the `OrderWatchdog` in a supervision tree, see `Supervisor`."
def start_link([]), do: GenServer.start_link(__MODULE__, [], name: __MODULE__)
# API
# --------------------------------------------
@doc "Requests the `OrderWatchdog`s to add an order, and begin a watchdog timer on it for resending."
def addOrder(order), do: GenServer.multi_call(__MODULE__, {:addOrder, order})
@doc "Signals to the `OrderWatchdog`s that a floor has been cleared, which triggers a deletion of all orders in the `OrderWatchdog`s handled by the signalling node."
def floorFinished(floor),
do: Enum.map([:up, :down, :cab], fn type -> finishedOrder({floor, type, node()}) end)
@doc "Triggers all `OrderWatchdog`s to push their list of orders to the other n-1 `OrderWatchdog`s, effectively synchronizing the `OrderWatchdog`s."
def multiTriggerImpatientOrderPush(),
do: GenServer.abcast(__MODULE__, :triggerImpatientOrderPush)
# Calls/Casts
# --------------------------------------------
# Handles the OrderWatchdog being asked to watch a new order. Sends a delayed message to itself to check on the order.
def handle_call({:addOrder, newOrder}, _from, orders) do
unless Enum.any?(orders, fn {_timestamp, order} -> order == newOrder end) do
newEntry = {:os.system_time(:milli_seconds), newOrder}
Process.send_after(
__MODULE__,
{:orderExpired, newEntry},
@baseWaitingTime + Enum.random(0..@randomInterval)
)
{:reply, :ok, [newEntry | orders]}
else
{:reply, :ok, orders}
end
end
# Handles the OrderWatchdog being informed that a given order is finished, triggering it to remove it from its list.
def handle_cast({:finishedOrder, finishedOrder}, orders) do
remainingOrders = Enum.reject(orders, fn {_timestamp, order} -> order == finishedOrder end)
{:noreply, remainingOrders}
end
# Handles someone pushing their impatientOrderList to the OrderWatchdog, triggering it to add the discrepancy to its list and watch them.
def handle_cast({:impatientOrderPush, remoteList}, localList) do
fullList = mergeOrderLists([remoteList, localList])
fullList
|> Enum.reject(fn entry -> entry in localList end)
|> Enum.map(fn entry ->
Process.send_after(
__MODULE__,
{:orderExpired, entry},
@baseWaitingTime + Enum.random(0..@randomInterval)
)
end)
{:noreply, fullList}
end
# Handles someone triggering an impatient order push from the OrderWatchdog, pushing its list of impatient orders to all other reachable OrderWatchdogs.
def handle_cast(:triggerImpatientOrderPush, orders) do
GenServer.abcast(Node.list(), __MODULE__, {:impatientOrderPush, orders})
{:noreply, orders}
end
# Gets a nudge to check whether an expired order has been dealt with in the meantime. If not, it re-issues it as a button press to the OrderDistributor.
def handle_info({:orderExpired, {timestamp, impatientOrder}}, orderList) do
if Enum.any?(orderList, fn entry -> entry == {timestamp, impatientOrder} end) do
finishedOrder(impatientOrder)
with {floor, type, _handledBy} <- impatientOrder do
OrderDistributor.buttonPressed({floor, type})
end
end
{:noreply, orderList}
end
# Private functions
# --------------------------------------------
# Signals to all OrderWatchdogs that a specific impatient order is finished, and should be removed.
defp finishedOrder(order), do: GenServer.abcast(__MODULE__, {:finishedOrder, order})
# Merges different order lists, concatenating them and removing duplicates.
defp mergeOrderLists(orderLists) do
orderLists
|> Enum.concat()
|> Enum.sort(fn el1, el2 -> el1 >= el2 end)
|> Enum.uniq()
end
end
|
elevator/lib/orderWatchdog.ex
| 0.655777
| 0.47725
|
orderWatchdog.ex
|
starcoder
|
defmodule Cassette.Plug.AuthenticationHandler do
@moduledoc """
Behaviour and macro module to define callbacks for the authentication handlers the plug uses.
Most of this works out-of-the-box, but it might be interesting to override
`Cassette.Plug.AuthenticationHandler.invalid_authentication/2` and present a more friendy error page
```elixir
defmodule MyErrorHandler do
use Cassette.Plug.AuthenticationHandler
def invalid_authentication(conn, _options) do
render(conn, "error")
end
end
```
And while plugging in your router:
```elixir
plug Cassette.Plug, handler: MyErrorHandler
```
Check `Cassette.Plug.DefaultHandler` for the default behaviour.
"""
@doc """
Initializes this handler with the given options.
They will be forwarded to the other functions.
"""
@callback init(args :: term) :: term
@doc """
Called to compute the service that must be authenticated against.
Usually this is the URL of the page the user is trying to access and may be computed using values in `conn`
"""
@callback service(conn :: Plug.Conn.t, options :: term) :: Plug.Conn.t
@doc """
Called when there is no authentication in the request (i.e., no `ticket` in the query string).
The usual implementation is to redirect to CAS.
"""
@callback unauthenticated(conn :: Plug.Conn.t, options :: term) :: Plug.Conn.t
@doc """
Called when authentication is provided but fails (i.e., ticket is no longer valid or is invalid).
This might be your Forbidden page.
"""
@callback invalid_authentication(conn :: Plug.Conn.t, options :: term) :: Plug.Conn.t
@doc """
Called to extract the current authenticated user and/or the authentication token from `conn`
"""
@callback user_or_token(conn :: Plug.Conn.t, options :: term) :: {Cassette.User.t | nil, {:ok, String.t} | :error}
@doc """
Called when successfully authenticated the user on `conn`
"""
@callback user_authenticated(conn :: Plug.Conn.t, user :: Cassette.User.t, options :: term) :: Plug.Conn.t
@spec default :: Cassette.Plug.AuthenticationHandler
@doc """
Returns the default implementation for this behaviour
"""
def default do
Cassette.Plug.DefaultHandler
end
defmacro __using__(_options) do
quote do
@behaviour Cassette.Plug.AuthenticationHandler
import Plug.Conn
def init(options), do: options
@doc """
Builds the current request url to be used as the CAS service
"""
def service(conn, options) do
url(conn, options)
end
@doc """
Redirects the user to the cas login page with the service computed by `service/2`
"""
def unauthenticated(conn, options) do
cassette = Keyword.get(options, :cassette, Cassette)
location = "#{cassette.config.base_url}/login?service=#{URI.encode(service(conn, options))}"
conn |> put_resp_header("location", location) |> send_resp(307, "") |> halt
end
@doc """
Renders a Forbidden response
"""
def invalid_authentication(conn, _options) do
conn |> send_resp(403, "Forbidden") |> halt
end
@doc """
Get the current user from session and the ticket from the query string
"""
def user_or_token(conn, _options) do
{get_session(conn, "cas_user"), Map.fetch(conn.query_params, "ticket")}
end
@doc """
Stores the current user in the session under the `cas_user` key
"""
def user_authenticated(conn, user, _options) do
conn |> put_session("cas_user", user)
end
@spec url(Plug.Conn.t, term) :: String.t
@doc """
Computes the service from the URL requested in the `conn` argument.
It will remove the `ticket` from the query string paramaters since the ticket has not been generated with it.
"""
def url(conn, _options) do
["#{conn.scheme}://#{conn.host}#{url_port_string(conn)}#{conn.request_path}", query_string(conn)]
|> Enum.reject(fn(v) -> is_nil(v) || v == "" end)
|> Enum.join("?")
end
@spec query_string(Plug.Conn.t) :: String.t
defp query_string(conn = %Plug.Conn{query_params: %Plug.Conn.Unfetched{aspect: :query_params}}) do
query_string(conn |> Plug.Conn.fetch_query_params)
end
defp query_string(conn) do
conn.query_params
|> Enum.reject(fn({k, _}) -> k == "ticket" end)
|> URI.encode_query
end
@spec url_port_string(Plug.Conn.t) :: String.t
defp url_port_string(%Plug.Conn{port: 80, scheme: :http}), do: ""
defp url_port_string(%Plug.Conn{port: 443, scheme: :https}), do: ""
defp url_port_string(conn = %Plug.Conn{}), do: ":#{conn.port}"
defoverridable [init: 1, user_or_token: 2, service: 2, unauthenticated: 2, invalid_authentication: 2, user_authenticated: 3]
end
end
end
|
lib/cassette/plug/authentication_handler.ex
| 0.879955
| 0.767167
|
authentication_handler.ex
|
starcoder
|
defmodule Explorer.PolarsBackend.DataFrame do
@moduledoc false
alias Explorer.DataFrame, as: DataFrame
alias Explorer.PolarsBackend.Native
alias Explorer.PolarsBackend.Series, as: PolarsSeries
alias Explorer.PolarsBackend.Shared
alias Explorer.Series, as: Series
@type t :: %__MODULE__{resource: binary(), reference: reference()}
defstruct resource: nil, reference: nil
@behaviour Explorer.Backend.DataFrame
# IO
@impl true
def read_csv(
filename,
_names,
dtypes,
delimiter,
null_character,
skip_rows,
header?,
encoding,
max_rows,
with_columns
) do
max_rows = if max_rows == Inf, do: nil, else: max_rows
df =
Native.df_read_csv(
filename,
1000,
header?,
max_rows,
skip_rows,
nil,
delimiter,
true,
with_columns,
dtypes,
null_character,
encoding
)
case df do
{:ok, df} -> {:ok, Shared.to_dataframe(df)}
{:error, error} -> {:error, error}
end
end
@impl true
def write_csv(%DataFrame{data: df}, filename, header?, delimiter) do
<<delimiter::utf8>> = delimiter
case Native.df_to_csv_file(df, filename, header?, delimiter) do
{:ok, _} -> {:ok, filename}
{:error, error} -> {:error, error}
end
end
@impl true
def to_binary(%DataFrame{} = df, header?, delimiter) do
<<delimiter::utf8>> = delimiter
Shared.apply_native(df, :df_to_csv, [header?, delimiter])
end
# Conversion
@impl true
def from_map(map) do
series_list = Enum.map(map, &from_map_handler/1)
{:ok, df} = Native.df_new(series_list)
Shared.to_dataframe(df)
end
defp from_map_handler({key, value}) when is_atom(key) do
colname = Atom.to_string(key)
from_map_handler({colname, value})
end
defp from_map_handler({colname, value}) when is_list(value) do
series = Series.from_list(value)
from_map_handler({colname, series})
end
defp from_map_handler({colname, %Series{} = series}) when is_binary(colname) do
series |> PolarsSeries.rename(colname) |> Shared.to_polars_s()
end
@impl true
def to_map(%DataFrame{data: df}, convert_series?) do
Enum.reduce(df, %{}, &to_map_reducer(&1, &2, convert_series?))
end
defp to_map_reducer(series, acc, convert_series?) do
series_name =
series
|> Native.s_name()
|> then(fn {:ok, name} ->
String.to_atom(name)
end)
series = Shared.to_series(series)
series = if convert_series?, do: PolarsSeries.to_list(series), else: series
Map.put(acc, series_name, series)
end
# Introspection
@impl true
def names(df), do: Shared.apply_native(df, :df_columns)
@impl true
def dtypes(df), do: df |> Shared.apply_native(:df_dtypes) |> Enum.map(&Shared.normalise_dtype/1)
@impl true
def shape(df), do: Shared.apply_native(df, :df_shape)
@impl true
def n_rows(%DataFrame{groups: []} = df) do
{rows, _cols} = shape(df)
rows
end
def n_rows(%DataFrame{groups: groups} = df) do
groupby = Shared.apply_native(df, :df_groups, [groups])
n =
groupby
|> pull("groups")
|> Series.to_list()
|> Enum.map(fn indices -> df |> ungroup([]) |> take(indices) |> n_rows() end)
groupby |> select(["groups"], :drop) |> mutate(n: n) |> group_by(groups)
end
@impl true
def n_cols(df), do: Shared.apply_native(df, :df_width)
# Single table verbs
@impl true
def head(df, rows), do: Shared.apply_native(df, :df_head, [rows])
@impl true
def tail(df, rows), do: Shared.apply_native(df, :df_tail, [rows])
@impl true
def select(df, columns, :keep) when is_list(columns),
do: Shared.apply_native(df, :df_select, [columns])
def select(%{groups: groups} = df, columns, :drop) when is_list(columns),
do: df |> Shared.to_polars_df() |> drop(columns) |> Shared.to_dataframe(groups)
defp drop(polars_df, colnames),
do:
Enum.reduce(colnames, polars_df, fn name, df ->
{:ok, df} = Native.df_drop(df, name)
df
end)
@impl true
def filter(df, %Series{} = mask),
do: Shared.apply_native(df, :df_filter, [Shared.to_polars_s(mask)])
@impl true
def mutate(%DataFrame{groups: []} = df, columns) do
columns |> Enum.reduce(df, &mutate_reducer/2) |> Shared.to_dataframe()
end
def mutate(%DataFrame{groups: groups} = df, columns) do
df
|> Shared.apply_native(:df_groups, [groups])
|> pull("groups")
|> Series.to_list()
|> Enum.map(fn indices -> df |> ungroup([]) |> take(indices) |> mutate(columns) end)
|> Enum.reduce(fn df, acc -> Shared.apply_native(acc, :df_vstack, [df.data]) end)
|> group_by(groups)
end
defp mutate_reducer({colname, %Series{} = series}, %DataFrame{} = df) when is_binary(colname) do
check_series_length(df, series, colname)
series = series |> PolarsSeries.rename(colname) |> Shared.to_polars_s()
Shared.apply_native(df, :df_with_column, [series])
end
defp mutate_reducer({colname, callback}, %DataFrame{} = df)
when is_function(callback),
do: mutate_reducer({colname, callback.(df)}, df)
defp mutate_reducer({colname, values}, df) when is_list(values),
do: mutate_reducer({colname, Series.from_list(values)}, df)
defp mutate_reducer({colname, value}, %DataFrame{} = df)
when is_binary(colname),
do: mutate_reducer({colname, value |> List.duplicate(n_rows(df))}, df)
defp check_series_length(df, series, colname) do
df_len = n_rows(df)
s_len = Series.length(series)
if s_len != df_len,
do:
raise(ArgumentError,
message:
"Length of new column #{colname} (#{s_len}) must match number of rows in the " <>
"dataframe (#{df_len})."
)
end
@impl true
def arrange(%DataFrame{groups: []} = df, columns),
do:
Enum.reduce(columns, df, fn {direction, column}, df ->
Shared.apply_native(df, :df_sort, [column, direction == :desc])
end)
def arrange(%DataFrame{groups: groups} = df, columns) do
df
|> Shared.apply_native(:df_groups, [groups])
|> pull("groups")
|> Series.to_list()
|> Enum.map(fn indices -> df |> ungroup([]) |> take(indices) |> arrange(columns) end)
|> Enum.reduce(fn df, acc -> Shared.apply_native(acc, :df_vstack, [df.data]) end)
|> group_by(groups)
end
@impl true
def distinct(%DataFrame{groups: []} = df, columns, true),
do: Shared.apply_native(df, :df_drop_duplicates, [true, columns])
def distinct(%DataFrame{groups: []} = df, columns, false),
do:
df
|> Shared.apply_native(:df_drop_duplicates, [true, columns])
|> select(columns, :keep)
def distinct(%DataFrame{groups: groups} = df, columns, keep_all?) do
df
|> Shared.apply_native(:df_groups, [groups])
|> pull("groups")
|> Series.to_list()
|> Enum.map(fn indices ->
df |> ungroup([]) |> take(indices) |> distinct(columns, keep_all?)
end)
|> Enum.reduce(fn df, acc -> Shared.apply_native(acc, :df_vstack, [df.data]) end)
|> group_by(groups)
end
@impl true
def rename(df, names) when is_list(names),
do: Shared.apply_native(df, :df_set_column_names, [names])
@impl true
def dummies(df, names),
do:
df
|> select(names, :keep)
|> Shared.apply_native(:df_to_dummies)
@impl true
def sample(df, n, with_replacement?, seed) when is_integer(n) do
indices =
df
|> n_rows()
|> Native.s_seedable_random_indices(n, with_replacement?, seed)
take(df, indices)
end
@impl true
def pull(df, column), do: Shared.apply_native(df, :df_column, [column])
@impl true
def slice(df, offset, length), do: Shared.apply_native(df, :df_slice, [offset, length])
@impl true
def take(df, row_indices), do: Shared.apply_native(df, :df_take, [row_indices])
@impl true
def drop_nil(df, columns), do: Shared.apply_native(df, :df_drop_nulls, [columns])
@impl true
def pivot_longer(df, id_cols, value_cols, names_to, values_to) do
df = Shared.apply_native(df, :df_melt, [id_cols, value_cols])
df
|> names()
|> Enum.map(fn
"variable" -> names_to
"value" -> values_to
name -> name
end)
|> then(&rename(df, &1))
end
@impl true
def pivot_wider(df, id_cols, names_from, values_from, names_prefix) do
df = Shared.apply_native(df, :df_pivot_wider, [id_cols, names_from, values_from])
df =
df
|> names()
|> Enum.map(fn name ->
if name in id_cols, do: name, else: names_prefix <> name
end)
|> then(&rename(df, &1))
df
end
# Two table verbs
@impl true
def join(left, right, on, :right), do: join(right, left, on, :left)
def join(left, right, on, how) do
how = Atom.to_string(how)
{left_on, right_on} = Enum.reduce(on, {[], []}, &join_on_reducer/2)
Shared.apply_native(left, :df_join, [Shared.to_polars_df(right), left_on, right_on, how])
end
defp join_on_reducer(colname, {left, right}) when is_binary(colname),
do: {[colname | left], [colname | right]}
defp join_on_reducer({new_left, new_right}, {left, right}),
do: {[new_left | left], [new_right | right]}
# Groups
@impl true
def group_by(%DataFrame{groups: groups} = df, new_groups),
do: %DataFrame{df | groups: groups ++ new_groups}
@impl true
def ungroup(df, []), do: %DataFrame{df | groups: []}
def ungroup(df, groups),
do: %DataFrame{df | groups: Enum.filter(df.groups, &(&1 not in groups))}
@impl true
def summarise(%DataFrame{groups: groups} = df, with_columns) do
with_columns =
Enum.map(with_columns, fn {key, values} -> {key, Enum.map(values, &Atom.to_string/1)} end)
df
|> Shared.apply_native(:df_groupby_agg, [groups, with_columns])
|> ungroup([])
|> DataFrame.arrange(groups)
end
end
defimpl Enumerable, for: Explorer.PolarsBackend.DataFrame do
alias Explorer.PolarsBackend.Native
alias Explorer.PolarsBackend.Series, as: PolarsSeries
def count(df), do: Native.df_width(df)
def slice(df) do
{:ok, size} = count(df)
{:ok, size, &slicing_fun(df, &1, &2)}
end
defp slicing_fun(df, start, length) do
for idx <- start..(start + length - 1) do
{:ok, df} = Native.df_select_at_idx(df, idx)
df
end
end
def reduce(_df, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(df, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(df, &1, fun)}
def reduce(df, {:cont, acc}, fun) do
case Native.df_columns(df) do
{:ok, []} ->
{:done, acc}
{:ok, [head | _tail]} ->
{:ok, next_col} = Native.df_column(df, head)
{:ok, df} = Native.df_drop(df, head)
reduce(df, fun.(next_col, acc), fun)
end
end
def member?(df, %PolarsSeries{} = series) do
{:ok, columns} = Native.df_get_columns(df)
Enum.any?(columns, &Native.s_series_equal(&1, series, false))
end
end
defimpl Inspect, for: Explorer.PolarsBackend.DataFrame do
alias Explorer.PolarsBackend.Native
def inspect(df, _opts) do
case Native.df_as_str(df) do
{:ok, str} -> str
{:error, error} -> raise "#{error}"
end
end
end
|
lib/explorer/polars_backend/data_frame.ex
| 0.789234
| 0.441854
|
data_frame.ex
|
starcoder
|
defmodule ResourceManager.Credentials.TOTPs do
@moduledoc """
TOTPs (Time based on time password) are a type of credential used by a subject as
an implementation of two factor autentication.
This type of credential generates a one-time password which uses the current time as
a source of uniqueness.
"""
use ResourceManager.Domain, schema_model: ResourceManager.Credentials.Schemas.TOTP
alias ResourceManager.Credentials.Schemas.TOTP
@doc """
Generate Time-Based One-Time Password totp verification code.
In order to generate the correct verification code we have to follow the steps bellow:
- Receive the user local timestamp (or use the server);
- Decode the TOTP secret in base32 (it's encoded on totp creation);
- Get the moving factor based on the configured period and the actual or given timestamp;
- Generates the HMAC using the secret and the moving factor;
- Truncate the HMAC in order to get the last 31 bits extracting the offset first;
- Parsing the truncated bits into an string with the size of the configured digits;
## Options:
- `time` - The user local time in unix format. Default is `System.os_time(:second)`
"""
@spec generate_totp_code(totp :: TOTP.t(), opts :: Keyword.t()) :: String.t()
def generate_totp_code(%TOTP{} = totp, opts \\ []) do
time = opts[:time] || System.os_time(:second)
# Decoding the secret in base32
key =
totp.secret
|> String.upcase()
|> Base.decode32!(padding: false)
# Extracting moving factor
moving_factor = <<Integer.floor_div(time, totp.period)::64>>
# Generating hmac from secret and moving factor
hmac = :crypto.mac(:hmac, :sha, key, moving_factor)
# Truncating hmac and extracting verification code
<<_::19-binary, _::4, offset::4>> = hmac
<<_::size(offset)-binary, p::4-binary, _::binary>> = hmac
<<_::1, trucated_bits::31>> = p
# Parsing truncated bits into verification code
trucated_bits
|> rem(1_000_000)
|> to_string()
|> String.pad_leading(totp.digits, "0")
end
@doc "Generates an QRCode for the given totp uri encoded in base64"
@spec generate_qr_code_base64(totp :: TOTP.t()) :: String.t()
def generate_qr_code_base64(%TOTP{} = totp) do
totp.otp_uri
|> EQRCode.encode()
|> EQRCode.png()
|> Base.encode64(padding: false)
end
@doc "Checks if the given totp code matchs the secret"
@spec valid_code?(totp :: TOTP.t(), totp_code :: String.t()) :: boolean()
def valid_code?(%TOTP{} = totp, totp_code) when is_binary(totp_code),
do: generate_totp_code(totp) == totp_code
end
|
apps/resource_manager/lib/credentials/totps.ex
| 0.787646
| 0.602266
|
totps.ex
|
starcoder
|
defmodule Absinthe.Schema do
import Absinthe.Schema.Notation
@moduledoc """
Define a GraphQL schema.
See also `Absinthe.Schema.Notation` for a reference of the macros imported by
this module available to build types for your schema.
## Basic Usage
To define a schema, `use Absinthe.Schema` within
a module. This marks your module as adhering to the
`Absinthe.Schema` behaviour, and sets up some macros
and utility functions for your use:
```
defmodule App.Schema do
use Absinthe.Schema
# ... define it here!
end
```
Now, define a `query` (and optionally, `mutation`
and `subscription`).
We'll define a `query` that has one field, `item`, to support
querying for an item record by its ID:
```
# Just for the example. You're probably using Ecto or
# something much more interesting than a module attribute-based
# database!
@fake_db %{
"foo" => %{id: "foo", name: "Foo", value: 4},
"bar" => %{id: "bar", name: "Bar", value: 5}
}
query do
@desc "Get an item by ID"
field :item, :item do
@desc "The ID of the item"
arg :id, type: :id
resolve fn %{id: id}, _ ->
{:ok, Map.get(@fake_db, id)}
end
end
end
```
For more information on object types (especially how the `resolve`
function works above), see `Absinthe.Type.Object`.
You may also notice we've declared that the resolved value of the field
to be of `type: :item`. We now need to define exactly what an `:item` is,
and what fields it contains.
```
@desc "A valuable Item"
object :item do
field :id, :id
@desc "The item's name"
field :name, :string,
field :value, :integer, description: "Recently appraised value"
end
```
We can also load types from other modules using the `import_types`
macro:
```
defmodule App.Schema do
use Absinthe.Schema
import_types App.Schema.Scalars
import_types App.Schema.Objects
# ... schema definition
end
```
Our `:item` type above could then move into `App.Schema.Objects`:
```
defmodule App.Schema.Objects do
use Absinthe.Scheme.Notation
object :item do
# ... type definition
end
# ... other objects!
end
```
## Default Resolver
By default, if a `resolve` function is not provided for a field, Absinthe
will attempt to extract the value of the field using `Map.get/2` with the
(atom) name of the field.
You can change this behavior by setting your own custom default resolve
function in your schema. For example, given we have a field, `name`:
```
field :name, :string
```
And we're trying to extract values from a horrible backend API that gives us
maps with uppercase (!) string keys:
```
%{"NAME" => "A name"}
```
Here's how we could set our custom resolver to expect those keys:
```
default_resolve fn
_, %{source: source, definition: %{name: name}} when is_map(source) ->
{:ok, Map.get(source, String.upcase(name))}
_, _ ->
{:ok, nil}
end
```
Note this will now act as the default resolver for all fields in our schema
without their own `resolve` function.
"""
@typedoc """
A module defining a schema.
"""
@type t :: atom
alias Absinthe.Type
alias Absinthe.Language
alias __MODULE__
defmacro __using__(opts \\ []) do
quote do
use Absinthe.Schema.Notation, unquote(opts)
import unquote(__MODULE__), only: :macros
import_types Absinthe.Type.BuiltIns
@after_compile unquote(__MODULE__)
end
end
@doc false
def __after_compile__(env, _bytecode) do
[
env.module.__absinthe_errors__,
Schema.Rule.check(env.module)
]
|> List.flatten
|> case do
[] ->
nil
details ->
raise Absinthe.Schema.Error, details
end
end
@default_query_name "RootQueryType"
@doc """
Defines a root Query object
"""
defmacro query(raw_attrs, [do: block]) do
attrs = raw_attrs
|> Keyword.put_new(:name, @default_query_name)
Absinthe.Schema.Notation.scope(__CALLER__, :object, :query, attrs, block)
end
@doc """
Defines a root Query object
"""
defmacro query([do: block]) do
Absinthe.Schema.Notation.scope(__CALLER__, :object, :query, [name: @default_query_name], block)
end
@default_mutation_name "RootMutationType"
@doc """
Defines a root Mutation object
"""
defmacro mutation(raw_attrs, [do: block]) do
attrs = raw_attrs
|> Keyword.put_new(:name, @default_mutation_name)
Absinthe.Schema.Notation.scope(__CALLER__, :object, :mutation, attrs, block)
end
@doc """
Defines a root Mutation object
"""
defmacro mutation([do: block]) do
Absinthe.Schema.Notation.scope(__CALLER__, :object, :mutation, [name: @default_mutation_name], block)
end
@default_subscription_name "RootSubscriptionType"
@doc """
Defines a root Subscription object
"""
defmacro subscription(raw_attrs, [do: block]) do
attrs = raw_attrs
|> Keyword.put_new(:name, @default_subscription_name)
Absinthe.Schema.Notation.scope(__CALLER__, :object, :subscription, attrs, block)
end
@doc """
Defines a root Subscription object
"""
defmacro subscription([do: block]) do
Absinthe.Schema.Notation.scope(__CALLER__, :object, :subscription, [name: @default_subscription_name], block)
end
@doc """
Defines a custom default resolve function for the schema.
"""
defmacro default_resolve(func) do
Module.put_attribute(__CALLER__.module, :absinthe_custom_default_resolve, func)
:ok
end
# Lookup a directive that in used by/available to a schema
@doc """
Lookup a directive.
"""
@spec lookup_directive(t, atom | binary) :: Type.Directive.t | nil
def lookup_directive(schema, name) do
schema.__absinthe_directive__(name)
end
@doc """
Lookup a type by name, identifier, or by unwrapping.
"""
@spec lookup_type(atom, Type.wrapping_t | Type.t | Type.identifier_t, Keyword.t) :: Type.t | nil
def lookup_type(schema, type, options \\ [unwrap: true]) do
cond do
Type.wrapped?(type) ->
if Keyword.get(options, :unwrap) do
lookup_type(schema, type |> Type.unwrap)
else
type
end
is_atom(type) ->
schema.__absinthe_type__(type)
is_binary(type) ->
schema.__absinthe_type__(type)
true ->
type
end
end
@doc """
List all types on a schema
"""
@spec types(t) :: [Type.t]
def types(schema) do
schema.__absinthe_types__
|> Map.keys
|> Enum.map(&lookup_type(schema, &1))
end
@doc """
List all directives on a schema
"""
@spec directives(t) :: [Type.Directive.t]
def directives(schema) do
schema.__absinthe_directives__
|> Map.keys
|> Enum.map(&lookup_directive(schema, &1))
end
@doc """
List all implementors of an interface on a schema
"""
@spec implementors(t, atom) :: [Type.Object.t]
def implementors(schema, ident) when is_atom(ident) do
schema.__absinthe_interface_implementors__
|> Map.get(ident, [])
|> Enum.map(&lookup_type(schema, &1))
end
def implementors(schema, %Type.Interface{} = iface) do
implementors(schema, iface.__reference__.identifier)
end
@doc false
@spec type_from_ast(t, Language.type_reference_t) :: Absinthe.Type.t | nil
def type_from_ast(schema, %Language.NonNullType{type: inner_type}) do
case type_from_ast(schema, inner_type) do
nil -> nil
type -> %Type.NonNull{of_type: type}
end
end
def type_from_ast(schema, %Language.ListType{type: inner_type}) do
case type_from_ast(schema, inner_type) do
nil -> nil
type -> %Type.List{of_type: type}
end
end
def type_from_ast(schema, ast_type) do
Schema.types(schema)
|> Enum.find(fn
%{name: name} ->
name == ast_type.name
end)
end
end
|
lib/absinthe/schema.ex
| 0.906348
| 0.852353
|
schema.ex
|
starcoder
|
defmodule Monad.State do
@moduledoc """
Like `Monad.Reader`, the state monad can share an environment between
different operations. Additionally, it can store an arbitrary value.
## Example
iex> use Monad.Operators
iex> env = %{dev: %{url: "http://www.example.com/dev"}, prod: %{url: "https://www.example.com/prod"}}
iex> s = state(&{env, &1})
...> ~>> (fn x -> state(&{x[&1], &1}) end)
...> ~>> (fn x -> state(&{x[:url], &1}) end)
...> ~>> (fn x -> state(&{x <> "/index.html", &1}) end)
iex> fun = runState s
iex> fun.(:dev)
{"http://www.example.com/dev/index.html", :dev}
iex> fun.(:prod)
{"https://www.example.com/prod/index.html", :prod}
iex> import Curry
iex> state = curry(& &1 + &2) <|> state(& {5, &1 * 2}) <~> state(& {9, &1 * &1})
iex> fun = runState state
iex> fun.(2)
{14, 16}
iex> fun.(-12)
{14, 576}
"""
use Monad.Behaviour
@opaque t :: %__MODULE__{fun: (term -> {term, term})}
@doc false
defstruct fun: nil
@doc """
Wraps `fun` in a state monad.
iex> s = state &{&1 * 2, "Doubled"}
iex> s.fun.(42)
{84, "Doubled"}
"""
@spec state((term -> {term, term})) :: t
def state(fun) when is_function(fun, 1), do: %Monad.State{fun: fun}
@doc """
Returns the function wrapped in the state monad.
iex> s = state &{&1 * 2, "Doubled"}
iex> fun = runState s
iex> fun.(42)
{84, "Doubled"}
"""
@spec runState(t) :: (term -> {term, term})
def runState(state), do: state.fun
@doc """
Callback implementation of `Monad.Behaviour.return/1`.
Converts `value` into function that takes some state and returns the value
as-is and the state. Then the function is wrapped into a state monad.
iex> s = return 42
iex> fun = runState s
iex> fun.("My state")
{42, "My state"}
"""
@spec return(term) :: t
def return(value), do: state(fn s -> {value, s} end)
@doc """
Callback implementation of `Monad.Behaviour.bind/2`.
iex> s1 = state &{&1 <> "!", &1}
iex> s2 = bind(s1, fn x -> state(fn s -> {x <> "?", s} end) end)
iex> fun = runState s2
iex> fun.("State")
{"State!?", "State"}
"""
@spec bind(t, (term -> t)) :: t
def bind(state_monad, fun) when is_function(fun, 1) do
state(fn x ->
{val1, state1} = (state_monad |> runState).(x)
{val2, state2} = (val1 |> fun.() |> runState).(state1)
{val2, state2}
end)
end
end
|
lib/monad/state.ex
| 0.857857
| 0.530601
|
state.ex
|
starcoder
|
defmodule Goth.Token do
@moduledoc ~S"""
Interface for retrieving access tokens, from either the `Goth.TokenStore`
or the Google token API. The first request for a token will hit the API,
but subsequent requests will retrieve the token from Goth's token store.
Goth will automatically refresh access tokens in the background as necessary,
10 seconds before they are to expire. After the initial synchronous request to
retrieve an access token, your application should never have to wait for a
token again.
The first call to retrieve an access token for a particular scope blocks while
it hits the API. Subsequent calls pull from the `Goth.TokenStore`,
and should return immediately
iex> Goth.Token.for_scope("https://www.googleapis.com/auth/pubsub")
{:ok, %Goth.Token{token: "<PASSWORD>",
type: "Bearer",
scope: "https://www.googleapis.com/auth/pubsub",
expires: 1453653825,
account: :default}}
If the passed credentials contain multiple service account, you can change
the first parametter to be {client_email, scopes} to specify which account
to target.
iex> Goth.Token.for_scope({"<EMAIL>", "https://www.googleapis.com/auth/pubsub"})
{:ok, %Goth.Token{token: "<PASSWORD>",
type: "Bearer",
scope: "https://www.googleapis.com/auth/pubsub",
expires: 1453653825,
account: "<EMAIL>"}}
For using the token on subsequent requests to the Google API, just concatenate
the `type` and `token` to create the authorization header. An example using
[HTTPoison](https://hex.pm/packages/httpoison):
{:ok, token} = Goth.Token.for_scope("https://www.googleapis.com/auth/pubsub")
HTTPoison.get(url, [{"Authorization", "#{token.type} #{token.token}"}])
"""
alias Goth.TokenStore
alias Goth.Client
@type t :: %__MODULE__{
token: String.t(),
type: String.t(),
scope: String.t(),
sub: String.t() | nil,
expires: non_neg_integer,
account: String.t()
}
defstruct [:token, :type, :scope, :sub, :expires, :account]
@doc """
Get a `%Goth.Token{}` for a particular `scope`. `scope` can be a single
scope or multiple scopes joined by a space.
`sub` needs to be specified if impersonation is used to prevent cache
leaking between users.
## Example
iex> Token.for_scope("https://www.googleapis.com/auth/pubsub")
{:ok, %Goth.Token{expires: ..., token: "...", type: "..."} }
"""
def for_scope(info, sub \\ nil)
@spec for_scope(scope :: String.t(), sub :: String.t() | nil) :: {:ok, t}
def for_scope(scope, sub) when is_binary(scope) do
case TokenStore.find({:default, scope}, sub) do
:error -> retrieve_and_store!({:default, scope}, sub)
{:ok, token} -> {:ok, token}
end
end
@spec for_scope(info :: {String.t() | atom(), String.t()}, sub :: String.t() | nil) :: {:ok, t}
def for_scope({account, scope}, sub) do
case TokenStore.find({account, scope}, sub) do
:error -> retrieve_and_store!({account, scope}, sub)
{:ok, token} -> {:ok, token}
end
end
@doc """
Parse a successful JSON response from Google's token API and extract a `%Goth.Token{}`
"""
def from_response_json(scope, sub \\ nil, json)
@spec from_response_json(String.t(), String.t() | nil, String.t()) :: t
def from_response_json(scope, sub, json) when is_binary(scope) do
{:ok, attrs} = json |> Jason.decode()
%__MODULE__{
token: attrs["access_token"],
type: attrs["token_type"],
scope: scope,
sub: sub,
expires: :os.system_time(:seconds) + attrs["expires_in"],
account: :default
}
end
@spec from_response_json(
{atom() | String.t(), String.t()},
String.t() | nil,
String.t()
) :: t
def from_response_json({account, scope}, sub, json) do
{:ok, attrs} = json |> Jason.decode()
%__MODULE__{
token: attrs["access_token"],
type: attrs["token_type"],
scope: scope,
sub: sub,
expires: :os.system_time(:seconds) + attrs["expires_in"],
account: account
}
end
@doc """
Retrieve a new access token from the API. This is useful for expired tokens,
although `Goth` automatically handles refreshing tokens for you, so you should
rarely if ever actually need to call this method manually.
"""
@spec refresh!(t() | {any(), any()}) :: {:ok, t()}
def refresh!(%__MODULE__{account: account, scope: scope, sub: sub}),
do: refresh!({account, scope}, sub)
def refresh!(%__MODULE__{account: account, scope: scope}), do: refresh!({account, scope})
@spec refresh!({any(), any()}, any()) :: {:ok, t()}
def refresh!({account, scope}, sub \\ nil), do: retrieve_and_store!({account, scope}, sub)
def queue_for_refresh(%__MODULE__{} = token) do
diff = token.expires - :os.system_time(:seconds)
if diff < 10 do
# just do it immediately
Task.async(fn ->
__MODULE__.refresh!(token)
end)
else
:timer.apply_after((diff - 10) * 1000, __MODULE__, :refresh!, [token])
end
end
defp retrieve_and_store!({account, scope}, sub) do
{:ok, token} = Client.get_access_token({account, scope}, sub: sub)
TokenStore.store({account, scope}, sub, token)
{:ok, token}
end
end
|
lib/goth/token.ex
| 0.885706
| 0.527864
|
token.ex
|
starcoder
|
defmodule Poison.EncodeError do
defexception value: nil, message: nil
def message(%{value: value, message: nil}) do
"unable to encode value: #{inspect value}"
end
def message(%{message: message}) do
message
end
end
defmodule Poison.Encode do
def encode_name(value) do
cond do
is_binary(value) ->
value
is_atom(value) ->
Atom.to_string(value)
true ->
raise Poison.EncodeError, value: value,
message: "expected string or atom key, got: #{inspect value}"
end
end
end
defprotocol Poison.Encoder do
@fallback_to_any true
def encode(value, options)
end
defimpl Poison.Encoder, for: Atom do
def encode(nil, _), do: "null"
def encode(true, _), do: "true"
def encode(false, _), do: "false"
def encode(atom, options) do
Poison.Encoder.BitString.encode(Atom.to_string(atom), options)
end
end
defimpl Poison.Encoder, for: BitString do
use Bitwise
def encode("", _), do: "\"\""
def encode(string, options) do
[?", escape(string, options[:escape]), ?"]
end
defp escape("", _), do: []
for {char, seq} <- Enum.zip('"\\\n\t\r\f\b', '"\\ntrfb') do
defp escape(<<unquote(char)>> <> rest, mode) do
[unquote("\\" <> <<seq>>) | escape(rest, mode)]
end
end
defp escape(<<char>> <> rest, mode) when char < 0x1F do
[seq(char) | escape(rest, mode)]
end
defp escape(<<char :: utf8>> <> rest, :unicode) when char in 0x80..0xFFFF do
[seq(char) | escape(rest, :unicode)]
end
# http://en.wikipedia.org/wiki/UTF-16#Example_UTF-16_encoding_procedure
# http://unicodebook.readthedocs.org/unicode_encodings.html#utf-16-surrogate-pairs
defp escape(<<char :: utf8>> <> rest, :unicode) when char > 0xFFFF do
code = char - 0x10000
[seq(0xD800 ||| (code >>> 10)),
seq(0xDC00 ||| (code &&& 0x3FF))
| escape(rest, :unicode)]
end
defp escape(<<char :: utf8>> <> rest, :javascript) when char in [0x2028, 0x2029] do
[seq(char) | escape(rest, :javascript)]
end
defp escape(string, mode) do
size = chunk_size(string, mode, 0)
<<chunk :: binary-size(size), rest :: binary>> = string
[chunk | escape(rest, mode)]
end
defp chunk_size(<<char>> <> _, _mode, acc) when char < 0x1F or char in '"\\' do
acc
end
defp chunk_size(<<char>> <> rest, mode, acc) when char < 0x80 do
chunk_size(rest, mode, acc + 1)
end
defp chunk_size(<<_ :: utf8>> <> _, :unicode, acc) do
acc
end
defp chunk_size(<<char :: utf8>> <> _, :javascript, acc) when char in [0x2028, 0x2029] do
acc
end
defp chunk_size(<<codepoint :: utf8>> <> rest, mode, acc) do
size = cond do
codepoint < 0x800 -> 2
codepoint < 0x10000 -> 3
true -> 4
end
chunk_size(rest, mode, acc + size)
end
defp chunk_size(_, _, acc), do: acc
@compile {:inline, seq: 1}
defp seq(char) do
case Integer.to_char_list(char, 16) do
s when length(s) < 2 -> ["\\u000" | s]
s when length(s) < 3 -> ["\\u00" | s]
s when length(s) < 4 -> ["\\u0" | s]
s -> ["\\u" | s]
end
end
end
defimpl Poison.Encoder, for: Integer do
def encode(integer, _options) do
Integer.to_string(integer)
end
end
defimpl Poison.Encoder, for: Float do
def encode(float, _options) do
:io_lib_format.fwrite_g(float)
end
end
defimpl Poison.Encoder, for: Map do
alias Poison.Encoder
import Poison.Encode, only: [encode_name: 1]
def encode(map, _) when map_size(map) < 1, do: "{}"
def encode(map, options) do
fun = &[?,, Encoder.BitString.encode(encode_name(&1), options), ?:,
Encoder.encode(&2, options) | &3]
[?{, tl(:maps.fold(fun, [], map)), ?}]
end
end
defimpl Poison.Encoder, for: List do
alias Poison.Encoder
@compile :inline_list_funcs
def encode([], _), do: "[]"
def encode([head], options) do
[?[, Encoder.encode(head, options), ?]]
end
def encode([head | rest], options) do
tail = :lists.flatmap(&[?,, Encoder.encode(&1, options)], rest)
[?[, Encoder.encode(head, options), tail, ?]]
end
end
defimpl Poison.Encoder, for: [Range, Stream, HashSet] do
def encode(collection, options) do
fun = &[?,, Poison.Encoder.encode(&1, options)]
case Enum.flat_map(collection, fun) do
[] -> "[]"
[_ | tail] -> [?[, tail, ?]]
end
end
end
defimpl Poison.Encoder, for: HashDict do
alias Poison.Encoder
import Poison.Encode, only: [encode_name: 1]
def encode(dict, options) do
fun = fn {key, value} ->
[?,, Encoder.BitString.encode(encode_name(key), options), ?:,
Encoder.encode(value, options)]
end
case Enum.flat_map(dict, fun) do
[] -> "{}"
[_ | tail] -> [?{, tail, ?}]
end
end
end
defimpl Poison.Encoder, for: Any do
def encode(%{__struct__: _} = struct, options) do
Poison.Encoder.Map.encode(Map.from_struct(struct), options)
end
def encode(value, _options) do
raise Poison.EncodeError, value: value
end
end
|
lib/poison/encoder.ex
| 0.601594
| 0.403214
|
encoder.ex
|
starcoder
|
defmodule Faker.Superhero.En do
import Faker, only: [sampler: 2]
@moduledoc """
Functions for Superhero data in English
"""
@doc """
Returns a Superhero name
"""
@spec name() :: String.t
def name, do: name(:crypto.rand_uniform(1, 11))
defp name(1), do: "#{prefix()} #{descriptor()} #{suffix()}"
defp name(2), do: "#{prefix()} #{descriptor()}"
defp name(3), do: "#{descriptor()} #{suffix()}"
defp name(n) when is_integer(n) do
"#{descriptor()}"
end
@doc """
Returns a random prefix
"""
@spec prefix() :: String.t
sampler :prefix, ["The", "Magnificent", "Ultra", "Supah", "Illustrious",
"Agent", "Cyborg", "Dark", "Giant", "Mr", "Doctor", "Red", "Green",
"General", "Captain"]
@doc"""
Returns a random suffix
"""
@spec suffix() :: String.t
sampler :suffix, ["I", "II", "III", "IX", "XI", "Claw", "Man", "Woman",
"Machine", "Strike", "X", "Eyes", "Dragon", "Skull", "Fist", "Ivy", "Boy",
"Girl", "Knight", "Wolf", "Lord", "Brain", "the Hunter", "of Hearts",
"Spirit", "Strange", "the Fated", "Brain", "Thirteen"]
@doc"""
Returns a random descriptor
"""
@spec descriptor() :: String.t
sampler :descriptor, ["A-Bomb", "Abomination", "Absorbing", "Ajax",
"Alien", "Amazo", "Ammo", "Angel", "Animal", "Annihilus", "Ant",
"Apocalypse", "Aqua", "Aqualad", "Arachne", "Archangel", "Arclight",
"Ares", "Ariel", "Armor", "Arsenal", "Astro Boy", "Atlas", "Atom",
"Aurora", "Azrael", "Aztar", "Bane", "Banshee", "Bantam", "Bat", "Beak",
"Beast", "Beetle", "Ben", "Beyonder", "Binary", "Bird", "Bishop",
"Bizarro", "Blade", "Blaquesmith", "Blink", "Blizzard", "Blob", "Bloodaxe",
"Bloodhawk", "Bloodwraith", "Bolt", "Bomb Queen", "Boom Boom", "Boomer",
"Booster Gold", "Box", "Brainiac", "Brother Voodoo", "Buffy", "Bullseye",
"Bumblebee", "Bushido", "Cable", "Callisto", "Cannonball", "Carnage", "Cat",
"Century", "Cerebra", "Chamber", "Chameleon", "Changeling", "Cheetah",
"Chromos", "<NAME>", "Clea", "Cloak", "Cogliostro", "<NAME>",
"Colossus", "Copycat", "Corsair", "Cottonmouth", "Crystal", "Curse",
"Cy-Gor", "Cyborg", "Cyclops", "Cypher", "Dagger", "Daredevil", "Darkhawk",
"Darkseid", "Darkside", "Darkstar", "Dash", "Deadpool", "Deadshot",
"Deathlok", "Deathstroke", "Demogoblin", "Destroyer", "<NAME>",
"Domino", "Doomsday", "Doppelganger", "Dormammu", "Ego", "Electro",
"Elektra", "Elongated Man", "Energy", "ERG", "Etrigan", "Evilhawk",
"Exodus", "Falcon", "Faora", "Feral", "Firebird", "Firelord", "Firestar",
"Firestorm", "Fixer", "Flash", "Forge", "Frenzy", "Galactus", "Gambit",
"Gamora", "Garbage", "Genesis", "Ghost", "Giganta", "Gladiator",
"Goblin Queen", "Gog", "Goku", "Goliath", "<NAME>",
"Granny Goodness", "Gravity", "Groot", "Guardian", "Gardner", "Hancock",
"Havok", "Hawk", "Heat Wave", "Hell", "Hercules", "Hobgoblin", "Hollow",
"Hope Summers", "Hulk", "Huntress", "Husk", "Hybrid", "Hyperion", "Impulse",
"Ink", "Iron Fist", "Isis", "Jack of Hearts", "Jack-Jack", "Jigsaw",
"Joker", "Jolt", "Jubilee", "Juggernaut", "Junkpile", "Justice", "Kang",
"Klaw", "Kool-Aid Man", "Krypto", "Leader", "Leech", "Lizard", "Lobo",
"Loki", "Longshot", "Luna", "Lyja", "Magneto", "Magog", "Magus",
"Mandarin", "<NAME>", "Match", "Maverick", "Maxima",
"<NAME>", "Medusa", "Meltdown", "Mephisto", "Mera", "Metallo",
"Metamorpho", "Meteorite", "Metron", "Mimic", "Misfit", "Mockingbird",
"Mogo", "Moloch", "Molten Man", "Monarch", "Moon Knight", "Moonstone",
"Morlun", "Morph", "Multiple", "Mysterio", "Mystique", "Namor", "Namorita",
"<NAME>", "<NAME>", "<NAME>", "<NAME>",
"Northstar", "Nova", "Omega Red", "Omniscient", "Onslaught", "Osiris",
"Overtkill", "Penance", "Penguin", "Phantom", "Phoenix", "Plastique",
"Polaris", "Predator", "Proto-Goblin", "Psylocke", "Punisher", "Pyro",
"Quantum", "Question", "Quicksilver", "Quill", "<NAME>",
"<NAME>", "Rambo", "Raven", "Redeemer", "<NAME>", "Rhino",
"<NAME>", "Riddler", "Ripcord", "Rocket Raccoon", "Rogue", "Ronin",
"Rorschach", "Sabretooth", "Sage", "Sasquatch", "Scarecrow", "Scorpia",
"Scorpion", "Sentry", "Shang-Chi", "Shatterstar", "She-Hulk", "She-Thing",
"Shocker", "Shriek", "Shrinking Violet", "Sif", "Silk", "Silverclaw",
"Sinestro", "Siren", "Siryn", "Skaar", "Snowbird", "Sobek", "Songbird",
"Space Ghost", "Spawn", "Spectre", "Speedball", "Speedy", "Spider", "Spyke",
"Stacy X", "Star-Lord", "Stardust", "Starfire", "Steel", "Storm", "Sunspot",
"Swarm", "Sylar", "Synch", "T", "Tempest", "Thanos", "Thing", "Thor",
"Thunderbird", "Thundra", "Tiger Shark", "Tigra", "Tinkerer", "Titan",
"Toad", "Toxin", "Toxin", "Trickster", "Triplicate", "Triton", "Two-Face",
"Ultron", "Vagabond", "Valkyrie", "Vanisher", "Venom", "Vibe", "Vindicator",
"Violator", "Violet", "Vision", "Vulcan", "Vulture", "Walrus",
"War Machine", "Warbird", "Warlock", "Warp", "Warpath", "Wasp", "Watcher",
"White Queen", "Wildfire", "Winter Soldier", "Wiz Kid", "Wolfsbane",
"Wolverine", "Wondra", "Wyatt Wingfoot", "Yellow", "Yellowjacket", "Ymir",
"Zatanna", "Zoom"]
@doc """
Returns a random Superhero power
"""
@spec power() :: String.t
sampler :power, ["Ability Shift", "Absorption", "Accuracy", "Adaptation",
"Aerokinesis", "Agility", "Animal Attributes", "Animal Control",
"Animal Oriented Powers", "Animation", "Anti-Gravity", "Apotheosis",
"Astral Projection", "Astral Trap", "Astral Travel", "Atmokinesis",
"Audiokinesis", "Banish", "Biokinesis", "Bullet Time", "Camouflage",
"Changing Armor", "Chlorokinesis", "Chronokinesis", "Clairvoyance",
"Cloaking", "Cold Resistance", "Cross-Dimensional Awareness",
"Cross-Dimensional Travel", "Cryokinesis", "Danger Sense",
"Darkforce Manipulation", "Death Touch", "Density Control", "Dexterity",
"Duplication", "Durability", "Echokinesis", "Elasticity",
"Electrical Transport", "Electrokinesis", "Elemental Transmogrification",
"Empathy", "Endurance", "Energy Absorption", "Energy Armor", "Energy Beams",
"Energy Blasts", "Energy Constructs", "Energy Manipulation",
"Energy Resistance", "Enhanced Hearing", "Enhanced Memory",
"Enhanced Senses", "Enhanced Sight", "Enhanced Smell", "Enhanced Touch",
"Entropy Projection", "Fire Resistance", "Flight", "Force Fields",
"Geokinesis", "Gliding", "Gravitokinesis", "Grim Reaping", "Healing Factor",
"Heat Generation", "Heat Resistance", "Human physical perfection",
"Hydrokinesis", "Hyperkinesis", "Hypnokinesis", "Illumination", "Illusions",
"Immortality", "Insanity", "Intangibility", "Intelligence",
"Intuitive aptitude", "Invisibility", "Invulnerability", "Jump",
"Lantern Power Ring", "Latent Abilities", "Levitation", "Longevity",
"Magic", "Magic Resistance", "Magnetokinesis", "Matter Absorption",
"Melting", "Mind Blast", "Mind Control", "Mind Control Resistance",
"Molecular Combustion", "Molecular Dissipation", "Molecular Immobilization",
"Molecular Manipulation", "Natural Armor", "Natural Weapons", "Nova Force",
"Omnilingualism", "Omnipotence", "Omnitrix", "Orbing", "Phasing",
"Photographic Reflexes", "Photokinesis", "Physical Anomaly",
"Portal Creation", "Possession", "Power Absorption", "Power Augmentation",
"Power Cosmic", "Power Nullifier", "Power Sense", "Power Suit",
"Precognition", "Probability Manipulation", "Projection", "Psionic Powers",
"Psychokinesis", "Pyrokinesis", "Qwardian Power Ring", "Radar Sense",
"Radiation Absorption", "Radiation Control", "Radiation Immunity",
"Reality Warping", "Reflexes", "Regeneration", "Resurrection",
"Seismic Power", "Self-Sustenance", "Separation", "Shapeshifting",
"Size Changing", "Sonar", "Sonic Scream", "Spatial Awareness", "Stamina",
"Stealth", "Sub-Mariner", "Substance Secretion", "Summoning",
"Super Breath", "Super Speed", "Super Strength", "Symbiote Costume",
"Technopath/Cyberpath", "Telekinesis", "Telepathy", "Telepathy Resistance",
"Teleportation", "Terrakinesis", "The Force", "Thermokinesis",
"Thirstokinesis", "Time Travel", "Timeframe Control", "Toxikinesis",
"Toxin and Disease Resistance", "Umbrakinesis", "Underwater breathing",
"Vaporising Beams", "Vision - Cryo", "Vision - Heat", "Vision - Infrared",
"Vision - Microscopic", "Vision - Night", "Vision - Telescopic",
"Vision - Thermal", "Vision - X-Ray", "Vitakinesis", "Wallcrawling",
"Weapon-based Powers", "Weapons Master", "Web Creation", "Wishing"]
end
|
lib/faker/superhero/en.ex
| 0.636692
| 0.404272
|
en.ex
|
starcoder
|
defmodule AWS.OpsWorksCM do
@moduledoc """
AWS OpsWorks CM
AWS OpsWorks for configuration management (CM) is a service that runs and
manages configuration management servers. You can use AWS OpsWorks CM to
create and manage AWS OpsWorks for Chef Automate and AWS OpsWorks for
Puppet Enterprise servers, and add or remove nodes for the servers to
manage.
**Glossary of terms**
<ul> <li> **Server**: A configuration management server that can be
highly-available. The configuration management server runs on an Amazon
Elastic Compute Cloud (EC2) instance, and may use various other AWS
services, such as Amazon Relational Database Service (RDS) and Elastic Load
Balancing. A server is a generic abstraction over the configuration manager
that you want to use, much like Amazon RDS. In AWS OpsWorks CM, you do not
start or stop servers. After you create servers, they continue to run until
they are deleted.
</li> <li> **Engine**: The engine is the specific configuration manager
that you want to use. Valid values in this release include `ChefAutomate`
and `Puppet`.
</li> <li> **Backup**: This is an application-level backup of the data that
the configuration manager stores. AWS OpsWorks CM creates an S3 bucket for
backups when you launch the first server. A backup maintains a snapshot of
a server's configuration-related attributes at the time the backup starts.
</li> <li> **Events**: Events are always related to a server. Events are
written during server creation, when health checks run, when backups are
created, when system maintenance is performed, etc. When you delete a
server, the server's events are also deleted.
</li> <li> **Account attributes**: Every account has attributes that are
assigned in the AWS OpsWorks CM database. These attributes store
information about configuration limits (servers, backups, etc.) and your
customer account.
</li> </ul> **Endpoints**
AWS OpsWorks CM supports the following endpoints, all HTTPS. You must
connect to one of the following endpoints. Your servers can only be
accessed or managed within the endpoint in which they are created.
<ul> <li> opsworks-cm.us-east-1.amazonaws.com
</li> <li> opsworks-cm.us-east-2.amazonaws.com
</li> <li> opsworks-cm.us-west-1.amazonaws.com
</li> <li> opsworks-cm.us-west-2.amazonaws.com
</li> <li> opsworks-cm.ap-northeast-1.amazonaws.com
</li> <li> opsworks-cm.ap-southeast-1.amazonaws.com
</li> <li> opsworks-cm.ap-southeast-2.amazonaws.com
</li> <li> opsworks-cm.eu-central-1.amazonaws.com
</li> <li> opsworks-cm.eu-west-1.amazonaws.com
</li> </ul> For more information, see [AWS OpsWorks endpoints and
quotas](https://docs.aws.amazon.com/general/latest/gr/opsworks-service.html)
in the AWS General Reference.
**Throttling limits**
All API operations allow for five requests per second with a burst of 10
requests per second.
"""
@doc """
Associates a new node with the server. For more information about how to
disassociate a node, see `DisassociateNode`.
On a Chef server: This command is an alternative to `knife bootstrap`.
Example (Chef): `aws opsworks-cm associate-node --server-name *MyServer*
--node-name *MyManagedNode* --engine-attributes
"Name=*CHEF_ORGANIZATION*,Value=default"
"Name=*CHEF_NODE_PUBLIC_KEY*,Value=*public-key-pem*"`
On a Puppet server, this command is an alternative to the `puppet cert
sign` command that signs a Puppet node CSR.
Example (Puppet): `aws opsworks-cm associate-node --server-name *MyServer*
--node-name *MyManagedNode* --engine-attributes
"Name=*PUPPET_NODE_CSR*,Value=*csr-pem*"`
A node can can only be associated with servers that are in a `HEALTHY`
state. Otherwise, an `InvalidStateException` is thrown. A
`ResourceNotFoundException` is thrown when the server does not exist. A
`ValidationException` is raised when parameters of the request are not
valid. The AssociateNode API call can be integrated into Auto Scaling
configurations, AWS Cloudformation templates, or the user data of a
server's instance.
"""
def associate_node(client, input, options \\ []) do
request(client, "AssociateNode", input, options)
end
@doc """
Creates an application-level backup of a server. While the server is in the
`BACKING_UP` state, the server cannot be changed, and no additional backup
can be created.
Backups can be created for servers in `RUNNING`, `HEALTHY`, and `UNHEALTHY`
states. By default, you can create a maximum of 50 manual backups.
This operation is asynchronous.
A `LimitExceededException` is thrown when the maximum number of manual
backups is reached. An `InvalidStateException` is thrown when the server is
not in any of the following states: RUNNING, HEALTHY, or UNHEALTHY. A
`ResourceNotFoundException` is thrown when the server is not found. A
`ValidationException` is thrown when parameters of the request are not
valid.
"""
def create_backup(client, input, options \\ []) do
request(client, "CreateBackup", input, options)
end
@doc """
Creates and immedately starts a new server. The server is ready to use when
it is in the `HEALTHY` state. By default, you can create a maximum of 10
servers.
This operation is asynchronous.
A `LimitExceededException` is thrown when you have created the maximum
number of servers (10). A `ResourceAlreadyExistsException` is thrown when a
server with the same name already exists in the account. A
`ResourceNotFoundException` is thrown when you specify a backup ID that is
not valid or is for a backup that does not exist. A `ValidationException`
is thrown when parameters of the request are not valid.
If you do not specify a security group by adding the `SecurityGroupIds`
parameter, AWS OpsWorks creates a new security group.
*Chef Automate:* The default security group opens the Chef server to the
world on TCP port 443. If a KeyName is present, AWS OpsWorks enables SSH
access. SSH is also open to the world on TCP port 22.
*Puppet Enterprise:* The default security group opens TCP ports 22, 443,
4433, 8140, 8142, 8143, and 8170. If a KeyName is present, AWS OpsWorks
enables SSH access. SSH is also open to the world on TCP port 22.
By default, your server is accessible from any IP address. We recommend
that you update your security group rules to allow access from known IP
addresses and address ranges only. To edit security group rules, open
Security Groups in the navigation pane of the EC2 management console.
To specify your own domain for a server, and provide your own self-signed
or CA-signed certificate and private key, specify values for
`CustomDomain`, `CustomCertificate`, and `CustomPrivateKey`.
"""
def create_server(client, input, options \\ []) do
request(client, "CreateServer", input, options)
end
@doc """
Deletes a backup. You can delete both manual and automated backups. This
operation is asynchronous.
An `InvalidStateException` is thrown when a backup deletion is already in
progress. A `ResourceNotFoundException` is thrown when the backup does not
exist. A `ValidationException` is thrown when parameters of the request are
not valid.
"""
def delete_backup(client, input, options \\ []) do
request(client, "DeleteBackup", input, options)
end
@doc """
Deletes the server and the underlying AWS CloudFormation stacks (including
the server's EC2 instance). When you run this command, the server state is
updated to `DELETING`. After the server is deleted, it is no longer
returned by `DescribeServer` requests. If the AWS CloudFormation stack
cannot be deleted, the server cannot be deleted.
This operation is asynchronous.
An `InvalidStateException` is thrown when a server deletion is already in
progress. A `ResourceNotFoundException` is thrown when the server does not
exist. A `ValidationException` is raised when parameters of the request are
not valid.
"""
def delete_server(client, input, options \\ []) do
request(client, "DeleteServer", input, options)
end
@doc """
Describes your OpsWorks-CM account attributes.
This operation is synchronous.
"""
def describe_account_attributes(client, input, options \\ []) do
request(client, "DescribeAccountAttributes", input, options)
end
@doc """
Describes backups. The results are ordered by time, with newest backups
first. If you do not specify a BackupId or ServerName, the command returns
all backups.
This operation is synchronous.
A `ResourceNotFoundException` is thrown when the backup does not exist. A
`ValidationException` is raised when parameters of the request are not
valid.
"""
def describe_backups(client, input, options \\ []) do
request(client, "DescribeBackups", input, options)
end
@doc """
Describes events for a specified server. Results are ordered by time, with
newest events first.
This operation is synchronous.
A `ResourceNotFoundException` is thrown when the server does not exist. A
`ValidationException` is raised when parameters of the request are not
valid.
"""
def describe_events(client, input, options \\ []) do
request(client, "DescribeEvents", input, options)
end
@doc """
Returns the current status of an existing association or disassociation
request.
A `ResourceNotFoundException` is thrown when no recent association or
disassociation request with the specified token is found, or when the
server does not exist. A `ValidationException` is raised when parameters of
the request are not valid.
"""
def describe_node_association_status(client, input, options \\ []) do
request(client, "DescribeNodeAssociationStatus", input, options)
end
@doc """
Lists all configuration management servers that are identified with your
account. Only the stored results from Amazon DynamoDB are returned. AWS
OpsWorks CM does not query other services.
This operation is synchronous.
A `ResourceNotFoundException` is thrown when the server does not exist. A
`ValidationException` is raised when parameters of the request are not
valid.
"""
def describe_servers(client, input, options \\ []) do
request(client, "DescribeServers", input, options)
end
@doc """
Disassociates a node from an AWS OpsWorks CM server, and removes the node
from the server's managed nodes. After a node is disassociated, the node
key pair is no longer valid for accessing the configuration manager's API.
For more information about how to associate a node, see `AssociateNode`.
A node can can only be disassociated from a server that is in a `HEALTHY`
state. Otherwise, an `InvalidStateException` is thrown. A
`ResourceNotFoundException` is thrown when the server does not exist. A
`ValidationException` is raised when parameters of the request are not
valid.
"""
def disassociate_node(client, input, options \\ []) do
request(client, "DisassociateNode", input, options)
end
@doc """
Exports a specified server engine attribute as a base64-encoded string. For
example, you can export user data that you can use in EC2 to associate
nodes with a server.
This operation is synchronous.
A `ValidationException` is raised when parameters of the request are not
valid. A `ResourceNotFoundException` is thrown when the server does not
exist. An `InvalidStateException` is thrown when the server is in any of
the following states: CREATING, TERMINATED, FAILED or DELETING.
"""
def export_server_engine_attribute(client, input, options \\ []) do
request(client, "ExportServerEngineAttribute", input, options)
end
@doc """
Returns a list of tags that are applied to the specified AWS OpsWorks for
Chef Automate or AWS OpsWorks for Puppet Enterprise servers or backups.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Restores a backup to a server that is in a `CONNECTION_LOST`, `HEALTHY`,
`RUNNING`, `UNHEALTHY`, or `TERMINATED` state. When you run RestoreServer,
the server's EC2 instance is deleted, and a new EC2 instance is configured.
RestoreServer maintains the existing server endpoint, so configuration
management of the server's client devices (nodes) should continue to work.
Restoring from a backup is performed by creating a new EC2 instance. If
restoration is successful, and the server is in a `HEALTHY` state, AWS
OpsWorks CM switches traffic over to the new instance. After restoration is
finished, the old EC2 instance is maintained in a `Running` or `Stopped`
state, but is eventually terminated.
This operation is asynchronous.
An `InvalidStateException` is thrown when the server is not in a valid
state. A `ResourceNotFoundException` is thrown when the server does not
exist. A `ValidationException` is raised when parameters of the request are
not valid.
"""
def restore_server(client, input, options \\ []) do
request(client, "RestoreServer", input, options)
end
@doc """
Manually starts server maintenance. This command can be useful if an
earlier maintenance attempt failed, and the underlying cause of maintenance
failure has been resolved. The server is in an `UNDER_MAINTENANCE` state
while maintenance is in progress.
Maintenance can only be started on servers in `HEALTHY` and `UNHEALTHY`
states. Otherwise, an `InvalidStateException` is thrown. A
`ResourceNotFoundException` is thrown when the server does not exist. A
`ValidationException` is raised when parameters of the request are not
valid.
"""
def start_maintenance(client, input, options \\ []) do
request(client, "StartMaintenance", input, options)
end
@doc """
Applies tags to an AWS OpsWorks for Chef Automate or AWS OpsWorks for
Puppet Enterprise server, or to server backups.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes specified tags from an AWS OpsWorks-CM server or backup.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates settings for a server.
This operation is synchronous.
"""
def update_server(client, input, options \\ []) do
request(client, "UpdateServer", input, options)
end
@doc """
Updates engine-specific attributes on a specified server. The server enters
the `MODIFYING` state when this operation is in progress. Only one update
can occur at a time. You can use this command to reset a Chef server's
public key (`CHEF_PIVOTAL_KEY`) or a Puppet server's admin password
(`<PASSWORD>`).
This operation is asynchronous.
This operation can only be called for servers in `HEALTHY` or `UNHEALTHY`
states. Otherwise, an `InvalidStateException` is raised. A
`ResourceNotFoundException` is thrown when the server does not exist. A
`ValidationException` is raised when parameters of the request are not
valid.
"""
def update_server_engine_attributes(client, input, options \\ []) do
request(client, "UpdateServerEngineAttributes", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "opsworks-cm"}
host = build_host("opsworks-cm", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "OpsWorksCM_V2016_11_01.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/ops_works_c_m.ex
| 0.899091
| 0.458773
|
ops_works_c_m.ex
|
starcoder
|
defmodule Automata do
@moduledoc """
"""
@typedoc """
All automata start with a %State{}.
"""
@type state :: module()
@typedoc "The error state returned by `Automata.WorldInfo` and `Automata.AutomatonInfo`"
@type failed :: [{Exception.kind(), reason :: term, Exception.stacktrace()}]
@typedoc "A map representing the results of running an agent"
@type agent_result :: %{
failures: non_neg_integer,
total: non_neg_integer
}
defmodule AutomatonInfo do
@moduledoc """
A struct that keeps local information specific to the automaton for a world.
It is received by formatters and contains the following fields:
* `:name` - the automaton name
* `:module` - the automaton module
* `:state` - the automaton state
* `:time` - the duration in microseconds of the automatons' init sequence
* `:tags` - the automaton tags
* `:logs` - the captured logs
"""
defstruct [:name, :module, :state, time: 0, tags: %{}, logs: ""]
@type t :: %__MODULE__{
name: atom,
module: module,
state: Automaton.state(),
time: non_neg_integer,
tags: map,
logs: String.t()
}
end
defmodule WorldInfo do
@moduledoc """
A struct that keeps global information about all automata for the world.
It is received by formatters and contains the following fields:
* `:name` - the world name
* `:state` - the automata state (see `t:Automata.state/0`)
* `:automata` - all automata in the world
"""
defstruct [:name, :state, automata: []]
@type t :: %__MODULE__{name: module, state: Automata.state(), automata: [Automata.t()]}
end
defmodule TimeoutError do
defexception [:timeout, :type]
@impl true
def message(%{timeout: timeout, type: type}) do
"""
#{type} timed out after #{timeout}ms. You can change the timeout:
1. per automaton by setting "timeout: x" on automaton state (accepts :infinity)
2. per automata by setting "@moduletag timeout: x" (accepts :infinity)
3. globally ubiquitous timeout via "Automata.start(timeout: x)" configuration
where "x" is the timeout given as integer in milliseconds (defaults to 60_000).
"""
end
end
defmodule MultiError do
@moduledoc """
Raised to signal multiple automata errors which happened in a world.
"""
defexception errors: []
@impl true
def message(%{errors: errors}) do
"got the following errors:\n\n" <>
Enum.map_join(errors, "\n\n", fn {kind, error, stack} ->
Exception.format_banner(kind, error, stack)
end)
end
end
use Application
@doc false
def start(_type, []) do
children = [
# Automata.AutomataServer,
# Automata.CaptureServer,
# Automata.OnExitHandler
]
{:ok, _pid} = run()
opts = [strategy: :one_for_one]
Supervisor.start_link(children, opts)
end
# @doc """
# Starts Automata and automatically runs the world(s) right before the VM
# terminates.
# It accepts a set of `options` to configure `Automata`
# (the same ones accepted by `configure/1`).
# If you want to run world manually, you can set the `:autorun` option
# to `false` and use run/0 to run world.
# """
@spec start(Keyword.t()) :: :ok
def start(options \\ []) do
{:ok, _} = Application.ensure_all_started(:automata)
configure(options)
if Application.fetch_env!(:automata, :autorun) do
Application.put_env(:automata, :autorun, false)
System.at_exit(fn
0 ->
options = persist_defaults(configuration())
:ok = Automata.Operator.run(options)
_ ->
:ok
end)
else
:ok
end
end
@doc """
Configures Automata.
## Options
Automata supports the following options:
* `:trace` - sets Automata into trace mode, this allows agents to print info
on an episode(s) while running.
Any arbitrary configuration can also be passed to `configure/1` or `start/1`,
and these options can then be used in places such as the builtin automaton types
configurations. These other options will be ignored by the Automata core itself.
"""
@spec configure(Keyword.t()) :: :ok
def configure(options) do
Enum.each(options, fn {k, v} ->
Application.put_env(:automata, k, v)
end)
end
@doc """
Returns Automata configuration.
"""
@spec configuration() :: Keyword.t()
def configuration do
Application.get_all_env(:automata)
end
@doc """
Runs the world. It is invoked automatically
if Automata is started via `start/1`.
"""
@spec run() :: agent_result()
def run do
options = persist_defaults(configuration())
{:ok, _pid} = Automata.Operator.run(options)
end
@doc """
Begins update of Behavior Tree. More agent types to come.
TODO: remove test env, get from config
"""
@spec spawn() :: agent_result()
def spawn do
if Mix.env() == :test do
send(TestMockSeq1Server, :update)
else
send(MockMAB1Server, :tick)
end
end
# Persists default values in application
# environment before the automata start.
defp persist_defaults(config) do
config |> Keyword.take([:seed, :trace]) |> configure()
config
end
def status(automaton_name) do
Automaton.AgentServer.status(automaton_name)
end
end
|
lib/automata.ex
| 0.867794
| 0.603844
|
automata.ex
|
starcoder
|
defmodule Geo.WKT.Encoder do
@moduledoc false
alias Geo.{
Point,
PointZ,
PointM,
PointZM,
LineString,
LineStringZ,
Polygon,
PolygonZ,
MultiPoint,
MultiPointZ,
MultiLineString,
MultiLineStringZ,
MultiPolygon,
MultiPolygonZ,
GeometryCollection
}
@doc """
Takes a Geometry and returns a WKT string
"""
@spec encode(Geo.geometry()) :: {:ok, binary} | {:error, Exception.t()}
def encode(geom) do
{:ok, encode!(geom)}
rescue
exception ->
{:error, exception}
end
@doc """
Takes a Geometry and returns a WKT string
"""
@spec encode!(Geo.geometry()) :: binary
def encode!(geom) do
get_srid_binary(geom.srid) <> do_encode(geom)
end
defp do_encode(%Point{coordinates: {x, y}}) do
"POINT(#{x} #{y})"
end
defp do_encode(%PointZ{coordinates: {x, y, z}}) do
"POINT Z(#{x} #{y} #{z})"
end
defp do_encode(%PointM{coordinates: {x, y, m}}) do
"POINT M(#{x} #{y} #{m})"
end
defp do_encode(%PointZM{coordinates: {x, y, z, m}}) do
"POINT ZM(#{x} #{y} #{z} #{m})"
end
defp do_encode(%LineString{coordinates: coordinates}) do
coordinate_string = create_line_string_str(coordinates)
"LINESTRING#{coordinate_string}"
end
defp do_encode(%LineStringZ{coordinates: coordinates}) do
coordinate_string = create_line_string_str(coordinates)
"LINESTRINGZ#{coordinate_string}"
end
defp do_encode(%Polygon{coordinates: coordinates}) do
coordinate_string = create_polygon_str(coordinates)
"POLYGON#{coordinate_string}"
end
defp do_encode(%PolygonZ{coordinates: coordinates}) do
coordinate_string = create_polygon_str(coordinates)
"POLYGON#{coordinate_string}"
end
defp do_encode(%MultiPoint{coordinates: coordinates}) do
coordinate_string = create_line_string_str(coordinates)
"MULTIPOINT#{coordinate_string}"
end
defp do_encode(%MultiPointZ{coordinates: coordinates}) do
coordinate_string = create_line_string_str(coordinates)
"MULTIPOINTZ#{coordinate_string}"
end
defp do_encode(%MultiLineString{coordinates: coordinates}) do
coordinate_string = create_polygon_str(coordinates)
"MULTILINESTRING#{coordinate_string}"
end
defp do_encode(%MultiLineStringZ{coordinates: coordinates}) do
coordinate_string = create_polygon_str(coordinates)
"MULTILINESTRINGZ#{coordinate_string}"
end
defp do_encode(%MultiPolygon{coordinates: coordinates}) do
coordinate_string = create_multi_polygon_str(coordinates)
"MULTIPOLYGON#{coordinate_string}"
end
defp do_encode(%MultiPolygonZ{coordinates: coordinates}) do
coordinate_string = create_multi_polygon_str(coordinates)
"MULTIPOLYGONZ#{coordinate_string}"
end
defp do_encode(%GeometryCollection{geometries: geometries}) do
geom_str = Enum.map(geometries, &do_encode(&1)) |> Enum.join(",")
"GEOMETRYCOLLECTION(#{geom_str})"
end
defp create_line_string_str(coordinates) do
coordinate_str =
coordinates
|> Enum.map(&create_coord_str(&1))
|> Enum.join(",")
"(#{coordinate_str})"
end
defp create_polygon_str(coordinates) do
coordinate_str =
coordinates
|> Enum.map(&create_line_string_str(&1))
|> Enum.join(",")
"(#{coordinate_str})"
end
defp create_multi_polygon_str(coordinates) do
coordinate_str =
coordinates
|> Enum.map(&create_polygon_str(&1))
|> Enum.join(",")
"(#{coordinate_str})"
end
defp create_coord_str({x, y}), do: "#{x} #{y}"
defp create_coord_str({x, y, z}), do: "#{x} #{y} #{z}"
defp get_srid_binary(nil), do: ""
defp get_srid_binary(0), do: ""
defp get_srid_binary(srid), do: "SRID=#{srid};"
end
|
lib/geo/wkt/encoder.ex
| 0.809201
| 0.702976
|
encoder.ex
|
starcoder
|
defmodule AWS.ServiceQuotas do
@moduledoc """
Service Quotas is a web service that you can use to manage many of your AWS
service quotas. Quotas, also referred to as limits, are the maximum values
for a resource, item, or operation. This guide provide descriptions of the
Service Quotas actions that you can call from an API. For the Service
Quotas user guide, which explains how to use Service Quotas from the
console, see [What is Service
Quotas](https://docs.aws.amazon.com/servicequotas/latest/userguide/intro.html).
<note> AWS provides SDKs that consist of libraries and sample code for
programming languages and platforms (Java, Ruby, .NET, iOS, Android,
etc...,). The SDKs provide a convenient way to create programmatic access
to Service Quotas and AWS. For information about the AWS SDKs, including
how to download and install them, see the [Tools for Amazon Web
Services](https://docs.aws.amazon.com/aws.amazon.com/tools) page.
</note>
"""
@doc """
Associates the Service Quotas template with your organization so that when
new accounts are created in your organization, the template submits
increase requests for the specified service quotas. Use the Service Quotas
template to request an increase for any adjustable quota value. After you
define the Service Quotas template, use this operation to associate, or
enable, the template.
"""
def associate_service_quota_template(client, input, options \\ []) do
request(client, "AssociateServiceQuotaTemplate", input, options)
end
@doc """
Removes a service quota increase request from the Service Quotas template.
"""
def delete_service_quota_increase_request_from_template(client, input, options \\ []) do
request(client, "DeleteServiceQuotaIncreaseRequestFromTemplate", input, options)
end
@doc """
Disables the Service Quotas template. Once the template is disabled, it
does not request quota increases for new accounts in your organization.
Disabling the quota template does not apply the quota increase requests
from the template.
**Related operations**
<ul> <li> To enable the quota template, call
`AssociateServiceQuotaTemplate`.
</li> <li> To delete a specific service quota from the template, use
`DeleteServiceQuotaIncreaseRequestFromTemplate`.
</li> </ul>
"""
def disassociate_service_quota_template(client, input, options \\ []) do
request(client, "DisassociateServiceQuotaTemplate", input, options)
end
@doc """
Retrieves the default service quotas values. The Value returned for each
quota is the AWS default value, even if the quotas have been increased..
"""
def get_a_w_s_default_service_quota(client, input, options \\ []) do
request(client, "GetAWSDefaultServiceQuota", input, options)
end
@doc """
Retrieves the `ServiceQuotaTemplateAssociationStatus` value from the
service. Use this action to determine if the Service Quota template is
associated, or enabled.
"""
def get_association_for_service_quota_template(client, input, options \\ []) do
request(client, "GetAssociationForServiceQuotaTemplate", input, options)
end
@doc """
Retrieves the details for a particular increase request.
"""
def get_requested_service_quota_change(client, input, options \\ []) do
request(client, "GetRequestedServiceQuotaChange", input, options)
end
@doc """
Returns the details for the specified service quota. This operation
provides a different Value than the `GetAWSDefaultServiceQuota` operation.
This operation returns the applied value for each quota.
`GetAWSDefaultServiceQuota` returns the default AWS value for each quota.
"""
def get_service_quota(client, input, options \\ []) do
request(client, "GetServiceQuota", input, options)
end
@doc """
Returns the details of the service quota increase request in your template.
"""
def get_service_quota_increase_request_from_template(client, input, options \\ []) do
request(client, "GetServiceQuotaIncreaseRequestFromTemplate", input, options)
end
@doc """
Lists all default service quotas for the specified AWS service or all AWS
services. ListAWSDefaultServiceQuotas is similar to `ListServiceQuotas`
except for the Value object. The Value object returned by
`ListAWSDefaultServiceQuotas` is the default value assigned by AWS. This
request returns a list of all service quotas for the specified service. The
listing of each you'll see the default values are the values that AWS
provides for the quotas.
<note> Always check the `NextToken` response parameter when calling any of
the `List*` operations. These operations can return an unexpected list of
results, even when there are more results available. When this happens, the
`NextToken` response parameter contains a value to pass the next call to
the same API to request the next part of the list.
</note>
"""
def list_a_w_s_default_service_quotas(client, input, options \\ []) do
request(client, "ListAWSDefaultServiceQuotas", input, options)
end
@doc """
Requests a list of the changes to quotas for a service.
"""
def list_requested_service_quota_change_history(client, input, options \\ []) do
request(client, "ListRequestedServiceQuotaChangeHistory", input, options)
end
@doc """
Requests a list of the changes to specific service quotas. This command
provides additional granularity over the
`ListRequestedServiceQuotaChangeHistory` command. Once a quota change
request has reached `CASE_CLOSED, APPROVED,` or `DENIED`, the history has
been kept for 90 days.
"""
def list_requested_service_quota_change_history_by_quota(client, input, options \\ []) do
request(client, "ListRequestedServiceQuotaChangeHistoryByQuota", input, options)
end
@doc """
Returns a list of the quota increase requests in the template.
"""
def list_service_quota_increase_requests_in_template(client, input, options \\ []) do
request(client, "ListServiceQuotaIncreaseRequestsInTemplate", input, options)
end
@doc """
Lists all service quotas for the specified AWS service. This request
returns a list of the service quotas for the specified service. you'll see
the default values are the values that AWS provides for the quotas.
<note> Always check the `NextToken` response parameter when calling any of
the `List*` operations. These operations can return an unexpected list of
results, even when there are more results available. When this happens, the
`NextToken` response parameter contains a value to pass the next call to
the same API to request the next part of the list.
</note>
"""
def list_service_quotas(client, input, options \\ []) do
request(client, "ListServiceQuotas", input, options)
end
@doc """
Lists the AWS services available in Service Quotas. Not all AWS services
are available in Service Quotas. To list the see the list of the service
quotas for a specific service, use `ListServiceQuotas`.
"""
def list_services(client, input, options \\ []) do
request(client, "ListServices", input, options)
end
@doc """
Defines and adds a quota to the service quota template. To add a quota to
the template, you must provide the `ServiceCode`, `QuotaCode`, `AwsRegion`,
and `DesiredValue`. Once you add a quota to the template, use
`ListServiceQuotaIncreaseRequestsInTemplate` to see the list of quotas in
the template.
"""
def put_service_quota_increase_request_into_template(client, input, options \\ []) do
request(client, "PutServiceQuotaIncreaseRequestIntoTemplate", input, options)
end
@doc """
Retrieves the details of a service quota increase request. The response to
this command provides the details in the `RequestedServiceQuotaChange`
object.
"""
def request_service_quota_increase(client, input, options \\ []) do
request(client, "RequestServiceQuotaIncrease", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "servicequotas"}
host = build_host("servicequotas", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "ServiceQuotasV20190624.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/service_quotas.ex
| 0.864625
| 0.57075
|
service_quotas.ex
|
starcoder
|
defmodule XUtil.Math do
@moduledoc "Various mathematical helpers; corresponds to X-Plane's hl_math.h"
def reinterpolate(input, from_bits, to_bits)
when is_integer(input) and input >= 0 and is_integer(from_bits) and is_integer(to_bits) do
from_max = floor(:math.pow(2, from_bits)) - 1
to_max = floor(:math.pow(2, to_bits)) - 1
round(input * to_max / from_max)
end
def quantized_int_to_float(input, from_bits, out_min, out_max)
when is_integer(input) and input >= 0 and is_integer(from_bits) and out_min < out_max do
from_max = floor(:math.pow(2, from_bits)) - 1
limit(out_min + input * (out_max - out_min) / from_max, out_min, out_max)
end
def quantize_float(val, in_min, in_max, to_bits) when is_number(val) and in_min < in_max and is_integer(to_bits) do
to_max = floor(:math.pow(2, to_bits)) - 1
round(to_max / (in_max - in_min) * (val - in_min))
end
def limit(input, min_val, max_val) when is_number(input) and min_val < max_val do
input
|> min(max_val)
|> max(min_val)
end
@doc """
Wraps the input in the specified range, primarily useful for degree measurements.
wrap_lon() and wrap_lat() are implemented in terms of this.
iex(1)> XUtil.Math.wrap(-180.0, -180, 180)
-180.0
iex(1)> XUtil.Math.wrap(-180.1, -180, 180) |> Float.round(1)
179.9
iex(1)> XUtil.Math.wrap(179.9, -180, 180) |> Float.round(1)
179.9
iex(1)> XUtil.Math.wrap(180, -180, 180)
-180
iex(1)> XUtil.Math.wrap(181, -180, 180)
-179
iex(1)> XUtil.Math.wrap(-181, -180, 180)
179
"""
def wrap(input, min, max) when is_integer(input) and is_integer(min) and is_integer(max) and min < max do
range_size = max - min
remainder = rem(input - min, range_size)
if remainder < 0, do: max + remainder, else: min + remainder
end
def wrap(input, min, max) when is_float(input) and is_number(min) and is_number(max) and min < max do
min + fmod_positive(input - min, max - min)
end
@doc "An implementation of C's fmod() --- modular division on floating point values"
def fmod(input, max) when is_float(input) do
input - max * trunc(input / max)
end
@doc "Like fmod, but returns a positive (wrapped) remainder when C's fmod() would return a negative"
def fmod_positive(input, max) when is_float(input) do
input - max * floor(input / max)
end
def nearly_equal(f1, f2, tolerance \\ 0.000001) do
abs(f1 - f2) <= tolerance
end
def pythagorean_distance({x1, y1}, {x2, y2}) do
dx = x1 - x2
dy = y1 - y2
:math.sqrt(dx * dx + dy * dy)
end
def pythagorean_distance(%{lon: x1, lat: y1}, %{lon: x2, lat: y2}) do
dx = x1 - x2
dy = y1 - y2
:math.sqrt(dx * dx + dy * dy)
end
def pythagorean_distance(p, q, r), do: :math.sqrt(p * p + q * q + r * r)
def meters_sec_to_knots(speed_msc), do: speed_msc * 1.9438445
def meters_to_feet(meters), do: meters * 3.2808399
def feet_to_meters(feet), do: feet / 3.2808399
@pi 3.14159265359
@pi_over_180 3.14159265359 / 180.0
@mean_earth_radius_meters 6_371_008.8
@doc """
The great-circle distance between the two aircraft locations, in meters
See: https://en.wikipedia.org/wiki/Great-circle_distance
We're using the Haversine formula for better accuracy at short distances: https://en.wikipedia.org/wiki/Haversine_formula
"""
def great_circle(%{lon: lon1, lat: lat1}, %{lon: lon2, lat: lat2}), do: great_circle({lon1, lat1}, {lon2, lat2})
def great_circle({lon1, lat1}, {lon2, lat2}) do
a = :math.sin((lat2 - lat1) * @pi_over_180 / 2)
b = :math.sin((lon2 - lon1) * @pi_over_180 / 2)
s = a * a + b * b * :math.cos(lat1 * @pi_over_180) * :math.cos(lat2 * @pi_over_180)
2 * :math.atan2(:math.sqrt(s), :math.sqrt(1 - s)) * @mean_earth_radius_meters
end
@doc """
The bearing, in degrees, from the first lon/lat to the second
https://stackoverflow.com/a/3209935/1417451
"""
def bearing_deg({lon1, lat1}, {lon2, lat2}) do
lon1_rad = lon1 * @pi_over_180
lat1_rad = lat1 * @pi_over_180
lon2_rad = lon2 * @pi_over_180
lat2_rad = lat2 * @pi_over_180
y = :math.sin(lon2_rad - lon1_rad) * :math.cos(lat2_rad)
x = :math.cos(lat1_rad) * :math.sin(lat2_rad) - :math.sin(lat1_rad) * :math.cos(lat2_rad) * :math.cos(lon2_rad - lon1_rad)
theta = :math.atan2(y, x)
fmod(theta * 180 / @pi + 360, 360.0)
end
# International nm is defined as exactly 1852 meters: https://en.wikipedia.org/wiki/Nautical_mile
def meters_to_nautical_miles(meters), do: meters / 1852
def mean_lon_lat({lon0, lat0}, {lon1, lat1}) do
{(lon0 + lon1) * 0.5, (lat0 + lat1) * 0.5}
end
@doc "Wraps -180 -> -180; 179 -> 179; 180 -> -180"
def wrap_lon(lon), do: wrap(lon, -180, 180)
def wrap_lat(lat), do: wrap(lat, -90, 90)
def wrap_lon_lat(lon, lat), do: {wrap_lon(lon), wrap_lat(lat)}
def wrap_lon_lat({lon, lat}), do: {wrap_lon(lon), wrap_lat(lat)}
def wrap_lon_lat([lon | [lat]]), do: {wrap_lon(lon), wrap_lat(lat)}
end
|
lib/x_util/math.ex
| 0.846498
| 0.576184
|
math.ex
|
starcoder
|
defmodule Membrane.Element.Msdk.H264.Encoder do
@moduledoc """
Membrane element that encodes raw video frames to H264 format using
hardware-accelerated API available for Intel® Gen graphics hardware platforms.
The element expects each frame to be received in a separate buffer, so the parser
(`Membrane.Element.RawVideo.Parser`) may be required in a pipeline before
the encoder (e.g. when input is read from `Membrane.Element.File.Source`).
Additionaly, the encoder has to receive proper caps with picture format and dimensions
before any encoding takes place.
Please check `t:t/0` for available options.
"""
use Membrane.Filter
alias __MODULE__.Native
alias Membrane.Buffer
alias Membrane.Caps.Video.{H264, Raw}
use Bunch
use Bunch.Typespec
def_input_pad :input,
demand_unit: :buffers,
caps: {Raw, format: one_of([:I420, :I422]), aligned: true}
def_output_pad :output,
caps: {H264, stream_format: :byte_stream, alignment: :au}
@h264_profile :high
@default_bitrate 1_000
@default_target_usage :medium
@list_type target_usages :: [
:quality,
:veryslow,
:slower,
:slow,
:medium,
:balanced,
:fast,
:faster,
:veryfast,
:speed
]
def_options bitrate: [
description: """
Encoded bit rate (Kbits per second).
""",
type: :int,
default: @default_bitrate
],
target_usage: [
description: """
TargetUsage (TU) is similar to the "preset" concept in FFmpeg.
It provides a quick way to change quality/speed tradeoffs in codec settings.
""",
type: :atom,
spec: target_usages(),
default: @default_target_usage
]
@impl true
def handle_init(opts) do
{:ok, opts |> Map.merge(%{encoder_ref: nil})}
end
@impl true
def handle_demand(:output, _size, :buffers, _ctx, %{encoder_ref: nil} = state) do
# Wait until we have an encoder
{:ok, state}
end
def handle_demand(:output, size, :buffers, _ctx, state) do
{{:ok, demand: {:input, size}}, state}
end
@impl true
def handle_process(:input, %Buffer{payload: payload}, ctx, state) do
%{encoder_ref: encoder_ref} = state
case Native.encode(payload, encoder_ref) do
{:ok, frames} ->
bufs = wrap_frames(frames)
in_caps = ctx.pads.input.caps
caps =
{:output,
%H264{
alignment: :au,
framerate: in_caps.framerate,
height: in_caps.height,
width: in_caps.width,
profile: @h264_profile,
stream_format: :byte_stream
}}
# redemand is needed until the internal buffer of encoder is filled (no buffers will be
# generated before that) but it is a noop if the demand has been fulfilled
actions = [{:caps, caps} | bufs] ++ [redemand: :output]
{{:ok, actions}, state}
{:error, reason} ->
{{:error, reason}, state}
_ ->
{{:error, "Invalid native encode response"}, state}
end
end
@impl true
def handle_caps(:input, %Raw{} = caps, _ctx, state) do
{framerate_num, framerate_denom} = caps.framerate
case Native.create(
caps.width,
caps.height,
caps.format,
state.bitrate,
state.target_usage,
framerate_num,
framerate_denom
) do
{:ok, encoder_ref} ->
{{:ok, redemand: :output}, %{state | encoder_ref: encoder_ref}}
{:error, reason} ->
{{:error, reason}, state}
_ ->
{{:error, "Invalid native create response"}, state}
end
end
@impl true
def handle_end_of_stream(:input, _ctx, state) do
with {:ok, frames} <- Native.flush(state.encoder_ref),
bufs <- wrap_frames(frames) do
actions = bufs ++ [end_of_stream: :output, notify: {:end_of_stream, :input}]
{{:ok, actions}, state}
else
{:error, reason} -> {{:error, reason}, state}
end
end
@impl true
def handle_prepared_to_stopped(_ctx, state) do
{:ok, %{state | encoder_ref: nil}}
end
defp wrap_frames([]), do: []
defp wrap_frames(frames) do
frames |> Enum.map(fn frame -> %Buffer{payload: frame} end) ~> [buffer: {:output, &1}]
end
end
|
lib/membrane_element_msdk_h264/encoder.ex
| 0.82573
| 0.548553
|
encoder.ex
|
starcoder
|
defmodule AFK.Keycode.Key do
@moduledoc """
Represents a basic keyboard keycode, like letters, numbers, etc.
All standard keys on a keyboard except the modifiers can be represented by
`Key` keycodes. The currently supported keys are `t:key/0`.
"""
@enforce_keys [:key]
defstruct [:key]
@type key ::
unquote(
AFK.Scancode.keys()
|> Enum.map(&elem(&1, 1))
|> Enum.reverse()
|> Enum.reduce(&{:|, [], [&1, &2]})
)
@type t :: %__MODULE__{
key: key
}
@doc """
Creates a basic key keycode.
## Examples
iex> new(:a)
%AFK.Keycode.Key{key: :a}
iex> new(:up)
%AFK.Keycode.Key{key: :up}
"""
@spec new(key) :: t
def new(key)
for {_value, key} <- AFK.Scancode.keys() do
def new(unquote(key)), do: struct!(__MODULE__, key: unquote(key))
end
defimpl AFK.Scancode.Protocol do
@spec scancode(keycode :: AFK.Keycode.Key.t()) :: AFK.Scancode.t()
def scancode(keycode)
for {value, key} <- AFK.Scancode.keys() do
def scancode(%AFK.Keycode.Key{key: unquote(key)}), do: unquote(value)
end
end
defimpl AFK.ApplyKeycode do
@spec apply_keycode(keycode :: AFK.Keycode.Key.t(), state :: AFK.State.t(), key :: atom) :: AFK.State.t()
def apply_keycode(keycode, state, key) do
keycode_used? =
Enum.any?(state.indexed_keys, fn
{_index, {_key, ^keycode}} -> true
_else -> false
end)
if keycode_used? do
state
else
lowest_available_index =
Enum.reduce_while(Stream.iterate(0, &(&1 + 1)), state.indexed_keys, fn index, acc ->
case acc[index] do
nil -> {:halt, index}
_else -> {:cont, acc}
end
end)
indexed_keys = Map.put(state.indexed_keys, lowest_available_index, {key, keycode})
%{state | indexed_keys: indexed_keys}
end
end
@spec unapply_keycode(keycode :: AFK.Keycode.Key.t(), state :: AFK.State.t(), key :: atom) :: AFK.State.t()
def unapply_keycode(keycode, state, key) do
index =
Enum.find_value(state.indexed_keys, fn
{index, {^key, ^keycode}} -> index
_else -> nil
end)
indexed_keys = Map.delete(state.indexed_keys, index)
%{state | indexed_keys: indexed_keys}
end
end
end
|
lib/afk/keycode/key.ex
| 0.849628
| 0.436622
|
key.ex
|
starcoder
|
defmodule ArangoDB.Ecto do
@moduledoc """
Ecto 2.x adapter for ArangoDB.
At the moment the `from`, `where`, `order_by`, `limit`
`offset` and `select` clauses are supported.
"""
alias ArangoDB.Ecto.Utils
def truncate(repo, coll) do
%Arango.Collection{name: coll}
|> Arango.Collection.truncate()
|> Arango.Request.perform(Utils.get_config(repo))
|> case do
{:ok, _} -> :ok
{:error, _} = result -> result
end
end
def query(repo, aql, vars \\ []) do
res = ArangoDB.Ecto.Adapter.exec_query!(repo, aql, vars)
{:ok, res}
rescue
e in RuntimeError -> {:error, e.message}
end
def query!(repo, aql, vars \\ []) do
ArangoDB.Ecto.Adapter.exec_query!(repo, aql, vars)
end
@behaviour Ecto.Adapter
# Delegates for Adapter behaviour
defmacro __before_compile__(_env) do
end
defdelegate autogenerate(field_type), to: ArangoDB.Ecto.Adapter
defdelegate child_spec(repo, options), to: ArangoDB.Ecto.Adapter
defdelegate delete(repo, schema_meta, filters, options), to: ArangoDB.Ecto.Adapter
defdelegate dumpers(primitive_type, ecto_type), to: ArangoDB.Ecto.Adapter
defdelegate ensure_all_started(repo, type), to: ArangoDB.Ecto.Adapter
defdelegate execute(repo, query_meta, query, params, process, options),
to: ArangoDB.Ecto.Adapter
defdelegate insert(repo, schema_meta, fields, on_conflict, returning, options),
to: ArangoDB.Ecto.Adapter
defdelegate insert_all(repo, schema_meta, header, list, on_conflict, returning, options),
to: ArangoDB.Ecto.Adapter
defdelegate loaders(primitive_type, ecto_type), to: ArangoDB.Ecto.Adapter
defdelegate prepare(atom, query), to: ArangoDB.Ecto.Adapter
defdelegate update(repo, schema_meta, fields, filters, returning, options),
to: ArangoDB.Ecto.Adapter
@behaviour Ecto.Adapter.Migration
# Delegates for Migration behaviour
defdelegate supports_ddl_transaction?, to: ArangoDB.Ecto.Migration
defdelegate execute_ddl(repo, ddl, opts), to: ArangoDB.Ecto.Migration
@behaviour Ecto.Adapter.Storage
# Delegates for Storage behaviour
defdelegate storage_up(options), to: ArangoDB.Ecto.Storage
defdelegate storage_down(options), to: ArangoDB.Ecto.Storage
end
|
lib/arangodb_ecto.ex
| 0.730482
| 0.404184
|
arangodb_ecto.ex
|
starcoder
|
defmodule Conform.Utils do
@moduledoc false
import IO.ANSI, only: [green: 0, yellow: 0, red: 0]
@doc "Print an debugging message"
def debug(message), do: log("==> #{message}")
@doc "Print an informational message"
def info(message), do: log("==> #{message}", green)
@doc "Print a warning message"
def warn(message), do: log("==> #{message}", yellow)
@doc "Print a notice message"
def notice(message), do: log("#{message}", yellow)
@doc "Print an error message"
def error(message), do: log("==> #{message}", red)
# Prints a message to standard output, optionally colorized.
case Mix.env do
:test ->
defp log(message, color \\ nil) do
case color do
nil -> message
_ -> colorize(message, color)
end
end
_ ->
defp log(message, color \\ nil) do
case color do
nil -> IO.puts message
_ -> IO.puts colorize(message, color)
end
end
end
# Colorizes a message using ANSI escapes
defp colorize(message, color), do: color <> message <> IO.ANSI.reset
@doc """
Recursively merges two keyword lists. Values themselves are also merged (depending on type),
such that the resulting keyword list is a true merge of the second keyword list over the first.
## Examples
iex> old = [one: [one_sub: [a: 1, b: 2]], two: {1, "foo", :bar}, three: 'just a charlist', four: [1, 2, 3]]
...> new = [one: [one_sub: [a: 2, c: 1]], two: {1, "foo", :baz, :qux}, three: 'a new charlist', four: [1, 2, 4, 6]]
...> #{__MODULE__}.merge(old, new)
[one: [one_sub: [a: 2, b: 2, c: 1]], two: {1, "foo", :baz, :qux}, three: 'a new charlist', four: [1, 2, 4, 6]]
"""
def merge(old, new) when is_list(old) and is_list(new),
do: merge(old, new, [])
defp merge([{_old_key, old_value} = h | t], new, acc) when is_tuple(h) do
case :lists.keytake(elem(h, 0), 1, new) do
{:value, {new_key, new_value}, rest} ->
# Value is present in new, so merge the value
merged = merge_term(old_value, new_value)
merge(t, rest, [{new_key, merged}|acc])
false ->
# Value doesn't exist in new, so add it
merge(t, new, [h|acc])
end
end
defp merge([], new, acc) do
Enum.reverse(acc, new)
end
defp merge_term([hold|told] = old, [hnew|tnew] = new) when is_list(new) do
cond do
:io_lib.char_list(old) && :io_lib.char_list(new) ->
new
Keyword.keyword?(old) && Keyword.keyword?(new) ->
Keyword.merge(old, new, fn (_key, old_val, new_val) -> merge_term(old_val, new_val) end)
|> Enum.sort_by(fn {k, _} -> k end)
true ->
[merge_term(hold, hnew) | merge_term(told, tnew)]
end
end
defp merge_term([], new) when is_list(new), do: new
defp merge_term(old, []) when is_list(old), do: old
defp merge_term(old, new) when is_tuple(old) and is_tuple(new) do
merged = old
|> Tuple.to_list
|> Enum.with_index
|> Enum.reduce([], fn
{[], idx}, acc ->
[elem(new, idx)|acc]
{val, idx}, acc when is_list(val) ->
case :io_lib.char_list(val) do
true ->
[elem(new, idx) | acc]
false ->
merged = merge_term(val, elem(new, idx))
[merged | acc]
end
{val, idx}, acc when is_tuple(val) ->
[merge_term(val, elem(new, idx)) | acc]
{val, idx}, acc ->
[(elem(new, idx) || val) | acc]
end)
|> Enum.reverse
merged_count = Enum.count(merged)
extra_count = :erlang.size(new) - merged_count
case extra_count do
0 -> merged
_ ->
extra = new
|> Tuple.to_list
|> Enum.slice(merged_count, extra_count)
List.to_tuple(merged ++ extra)
end
end
defp merge_term(old, nil), do: old
defp merge_term(_old, new), do: new
@doc """
Recursively sorts a keyword list such that keys are in ascending alphabetical order
## Example
iex> kwlist = [a: 1, c: 2, b: 3, d: [z: 99, w: 50, x: [a_2: 1, a_1: 2]]]
...> #{__MODULE__}.sort_kwlist(kwlist)
[a: 1, b: 3, c: 2, d: [w: 50, x: [a_1: 2, a_2: 1], z: 99]]
"""
def sort_kwlist(list) when is_list(list) do
case Keyword.keyword?(list) do
true ->
do_sort_kwlist(list, [])
|> Enum.sort_by(fn {k, _} -> k end)
false -> list
end
end
def sort_kwlist(val), do: val
defp do_sort_kwlist([{k, v}|t], acc) when is_list(v) do
result = sort_kwlist(v)
do_sort_kwlist(t, [{k, result} | acc])
end
defp do_sort_kwlist([{k, v}|t], acc), do: do_sort_kwlist(t, [{k, v} | acc])
defp do_sort_kwlist([], acc), do: acc
@doc """
Loads all modules that extend a given module in the current code path.
"""
@spec load_plugins_of(atom()) :: [] | [atom]
def load_plugins_of(type) when is_atom(type) do
type |> available_modules |> Enum.reduce([], &load_plugin/2)
end
defp load_plugin(module, modules) do
if Code.ensure_loaded?(module), do: [module | modules], else: modules
end
defp available_modules(plugin_type) do
:code.all_loaded
|> Stream.map(fn {module, _path} -> {module, get_in(module.module_info, [:attributes, :behaviour])} end)
|> Stream.filter(fn {_module, behaviours} -> is_list(behaviours) && plugin_type in behaviours end)
|> Enum.map(fn {module, _} -> module end)
end
@doc """
Convert a list of results from the conf ETS table (key_path/value tuples)
into a tree in the form of nested keyword lists. An example:
- If we have a key of ['lager', 'handlers']
- And given the following results from Conform.Conf.find for that key:
[{['lager', 'handlers', 'console', 'level'], :info},
{['lager', 'handlers', 'file', 'info'], '/var/log/info.log'},
{['lager', 'handlers', 'file', 'error'], '/var/log/error.log'}]
- The following tree would be produced
[console: [level: :info],
file: [info: '/var/log/info.log', error: '/var/log/error.log']]]]
"""
@spec results_to_tree([{[char_list], term}], [char_list] | nil) :: Keyword.t
def results_to_tree(selected, key \\ []) do
Enum.reduce(selected, [], fn {key_path, v}, acc ->
key_path = Enum.map(key_path -- key, &List.to_atom/1)
{_, acc} = Enum.reduce(key_path, {[], acc}, fn
k, {[], acc} ->
case get_in(acc, [k]) do
kw when is_list(kw) -> {[k], acc}
_ -> {[k], put_in(acc, [k], [])}
end
k, {ps, acc} ->
case get_in(acc, ps++[k]) do
kw when is_list(kw) -> {ps++[k], acc}
_ -> {ps++[k], put_in(acc, ps++[k], [])}
end
end)
put_in(acc, key_path, v)
end)
end
end
|
lib/conform/utils/utils.ex
| 0.554229
| 0.459258
|
utils.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.