code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Ecto.Query.JoinBuilder do
@moduledoc false
alias Ecto.Query.BuilderUtil
alias Ecto.Query.Query
alias Ecto.Query.QueryExpr
alias Ecto.Query.JoinExpr
@doc """
Escapes a join expression (not including the `on` expression).
It returns a tuple containing the binds, the on expression (if available)
and the association expression.
## Examples
iex> escape(quote(do: x in "foo"), [])
{ :x, "foo", nil }
iex> escape(quote(do: "foo"), [])
{ nil, "foo", nil }
iex> escape(quote(do: x in Sample), [])
{ :x, { :__aliases__, [alias: false], [:Sample] }, nil }
iex> escape(quote(do: c in p.comments), [p: 0])
{ :c, nil, {{:{}, [], [:&, [], [0]]}, :comments} }
"""
@spec escape(Macro.t, Keyword.t) :: { [atom], Macro.t | nil, Macro.t | nil }
def escape({ :in, _, [{ var, _, context }, expr] }, vars)
when is_atom(var) and is_atom(context) do
{ _, expr, assoc } = escape(expr, vars)
{ var, expr, assoc }
end
def escape({ :in, _, [{ var, _, context }, expr] }, vars)
when is_atom(var) and is_atom(context) do
{ _, expr, assoc } = escape(expr, vars)
{ var, expr, assoc }
end
def escape({ :__aliases__, _, _ } = module, _vars) do
{ nil, module, nil }
end
def escape(string, _vars) when is_binary(string) do
{ nil, string, nil }
end
def escape(dot, vars) do
case BuilderUtil.escape_dot(dot, vars) do
{ _, _ } = var_field ->
{ [], nil, var_field }
:error ->
raise Ecto.QueryError, reason: "malformed `join` query expression"
end
end
@doc """
Builds a quoted expression.
The quoted expression should evaluate to a query at runtime.
If possible, it does all calculations at compile time to avoid
runtime work.
"""
@spec build_with_binds(Macro.t, atom, [Macro.t], Macro.t, Macro.t, Macro.t, Macro.Env.t) :: { Macro.t, Keyword.t, non_neg_integer | nil }
def build_with_binds(query, qual, binding, expr, on, count_bind, env) do
binding = BuilderUtil.escape_binding(binding)
{ join_bind, join_expr, join_assoc } = escape(expr, binding)
is_assoc? = not nil?(join_assoc)
validate_qual(qual)
validate_on(on, is_assoc?)
validate_bind(join_bind, binding)
if join_bind && !count_bind do
# If count_bind is not an integer, make it a variable.
# The variable is the getter/setter storage.
count_bind = quote(do: count_bind)
count_setter = quote(do: unquote(count_bind) = BuilderUtil.count_binds(query))
end
binding = binding ++ [{ join_bind, count_bind }]
join_on = escape_on(on, binding, env)
join =
quote do
JoinExpr[qual: unquote(qual), source: unquote(join_expr), on: unquote(join_on),
file: unquote(env.file), line: unquote(env.line), assoc: unquote(join_assoc)]
end
if is_integer(count_bind) do
count_bind = count_bind + 1
quoted = BuilderUtil.apply_query(query, __MODULE__, [join], env)
else
count_bind = quote(do: unquote(count_bind) + 1)
quoted =
quote do
Query[joins: joins] = query = Ecto.Queryable.to_query(unquote(query))
unquote(count_setter)
query.joins(joins ++ [unquote(join)])
end
end
{ quoted, binding, count_bind }
end
def apply(query, expr) do
Ecto.Query.Query[joins: joins] = query = Ecto.Queryable.to_query(query)
query.joins(joins ++ [expr])
end
defp escape_on(nil, _binding, _env), do: nil
defp escape_on(on, binding, env) do
on = BuilderUtil.escape(on, binding)
quote do: QueryExpr[expr: unquote(on), line: unquote(env.line), file: unquote(env.file)]
end
@qualifiers [:inner, :left, :right, :full]
defp validate_qual(qual) when qual in @qualifiers, do: :ok
defp validate_qual(_qual) do
raise Ecto.QueryError,
reason: "invalid join qualifier, accepted qualifiers are: " <>
Enum.map_join(@qualifiers, ", ", &"`#{inspect &1}`")
end
defp validate_on(nil, false) do
raise Ecto.QueryError,
reason: "`join` expression requires explicit `on` " <>
"expression unless it's an association join expression"
end
defp validate_on(_on, _is_assoc?), do: :ok
defp validate_bind(bind, all) do
if bind && bind in all do
raise Ecto.QueryError, reason: "variable `#{bind}` is already defined in query"
end
end
end
|
lib/ecto/query/join_builder.ex
| 0.815637
| 0.435121
|
join_builder.ex
|
starcoder
|
defmodule Shippex.ISO do
@moduledoc """
This module contains data and functions for obtaining geographic data in
compliance with the ISO-3166-2 standard.
"""
import Shippex.Util, only: [unaccent: 1]
@iso Shippex.Config.json_library().decode!(
File.read!(:code.priv_dir(:shippex) ++ '/iso-3166-2.json')
)
@type country_code() :: binary()
@doc """
Returns all ISO-3166-2 data.
"""
@spec data() :: %{country_code() => map()}
def data(), do: @iso
@doc """
Returns a map of country codes and their full names. Takes in a list of
optional atoms to tailor the results. For example, `:with_subdivisions` only
includes countries with subdivisions.
iex> countries = ISO.countries()
iex> get_in(countries, ["US", "name"])
"United States of America (the)"
iex> get_in(countries, ["PR", "name"])
"Puerto Rico"
iex> countries = ISO.countries([:with_subdivisions])
iex> countries["PR"]
nil
iex> countries = ISO.countries([:exclude_territories])
iex> countries["PR"]
nil
"""
@spec countries([atom()]) :: %{String.t() => map()}
def countries(opts \\ []) do
with_subdivisions? = :with_subdivisions in opts
exclude_territories? = :exclude_territories in opts
Enum.reduce(@iso, %{}, fn {code, %{"subdivisions" => subs} = country}, acc ->
cond do
with_subdivisions? and subs == %{} -> acc
exclude_territories? and territory?(code) -> acc
true -> Map.put(acc, code, country)
end
end)
end
@doc """
Returns true if the country with the given code is a territory of another
country. This only applies to subdivisions that have their own country code.
iex> ISO.territory?("PR")
true
iex> ISO.territory?("US")
false
iex> ISO.territory?("TX")
false
"""
@spec territory?(country_code()) :: boolean()
def territory?(code) do
country = @iso[code]
Enum.any?(@iso, fn
{a_code, %{"subdivisions" => subdivisions}} ->
case Map.get(subdivisions, "#{a_code}-#{code}") do
%{"name" => name} ->
equal_names?(name, country["name"]) or
equal_names?(name, country["full_name"]) or
equal_names?(name, country["short_name"])
_ ->
false
end
_ ->
false
end)
end
@doc """
Converts a country's 2-letter code to its full name.
iex> ISO.country_name("US")
"United States of America (the)"
iex> ISO.country_name("US", :informal)
"United States of America"
iex> ISO.country_name("US", :short_name)
"UNITED STATES OF AMERICA"
iex> ISO.country_name("TN")
"Tunisia"
iex> ISO.country_name("TX")
nil
"""
@spec country_name(country_code(), nil | :informal | :short_name) :: nil | String.t()
def country_name(code, type \\ nil) do
case @iso[code] do
%{"name" => name, "short_name" => short_name} ->
case type do
nil -> name
:short_name -> short_name
:informal -> strip_parens(name)
end
_ ->
nil
end
end
@doc """
Converts a full country name to its 2-letter ISO-3166-2 code.
iex> ISO.country_code("United States")
"US"
iex> ISO.country_code("UNITED STATES")
"US"
iex> ISO.country_code("Mexico")
"MX"
iex> ISO.country_code("Venezuela")
"VE"
iex> ISO.country_code("Iran")
"IR"
iex> ISO.country_code("Taiwan")
"TW"
iex> ISO.country_code("Bolivia")
"BO"
iex> ISO.country_code("Not a country.")
nil
"""
@spec country_code(String.t()) :: nil | country_code()
def country_code(country) do
country
|> String.upcase()
|> do_country_code()
end
defp do_country_code("ASCENSION" <> _), do: "SH"
defp do_country_code("BRITISH VIRGIN ISLANDS"), do: "VG"
defp do_country_code("GREAT BRITAIN" <> _), do: "GB"
defp do_country_code("IRAN" <> _), do: "IR"
defp do_country_code("SAINT HELENA" <> _), do: "SH"
defp do_country_code("SAINT MARTIN" <> _), do: "MF"
defp do_country_code("SINT MAARTEN" <> _), do: "SX"
defp do_country_code("SWAZILAND" <> _), do: "SZ"
defp do_country_code("SYRIA" <> _), do: "SY"
defp do_country_code("TAIWAN" <> _), do: "TW"
defp do_country_code("TRISTAN" <> _), do: "SH"
defp do_country_code("UNITED STATES" <> _), do: "US"
defp do_country_code("UNITED KINGDOM" <> _), do: "GB"
defp do_country_code("VENEZUELA" <> _), do: "VE"
defp do_country_code(country) do
Enum.find_value(@iso, fn
{code, %{"short_name" => ^country}} ->
code
{code, %{"short_name" => short_name, "name" => name, "full_name" => full_name}} ->
cond do
String.upcase(name) == country -> code
String.upcase(full_name) == country -> code
strip_parens(short_name) == country -> code
true -> nil
end
_ ->
nil
end)
end
defp strip_parens(short_name) do
short_name
|> String.replace(~r/\(([\w\s]+)\)$/i, "", global: true)
|> unaccent()
|> String.trim()
end
@doc """
Converts a full subdivision name to its 2-letter ISO-3166-2 code. The country
MUST be an ISO-compliant 2-letter country code.
iex> ISO.subdivision_code("US", "Texas")
"US-TX"
iex> ISO.subdivision_code("US", "US-TX")
"US-TX"
iex> ISO.subdivision_code("CA", "AlberTa")
"CA-AB"
iex> ISO.subdivision_code("MX", "Veracruz")
"MX-VER"
iex> ISO.subdivision_code("MX", "Yucatán")
"MX-YUC"
iex> ISO.subdivision_code("MX", "Yucatan")
"MX-YUC"
iex> ISO.subdivision_code("MX", "YucatAN")
"MX-YUC"
iex> ISO.subdivision_code("CA", "US-TX")
nil
iex> ISO.subdivision_code("MX", "Not a subdivision.")
nil
"""
@spec subdivision_code(country_code(), String.t()) :: nil | String.t()
def subdivision_code(country, subdivision)
when is_binary(subdivision) and is_binary(country) do
divisions = @iso[country]["subdivisions"]
cond do
Map.has_key?(divisions, "#{country}-#{subdivision}") ->
"#{country}-#{subdivision}"
Map.has_key?(divisions, subdivision) ->
subdivision
true ->
subdivision = filter_for_comparison(subdivision)
divisions
|> Enum.find(fn {_subdivision_code, %{"name" => full_subdivision} = s} ->
variation = s["variation"]
equal_names?(full_subdivision, subdivision) or
(is_binary(variation) and equal_names?(variation, subdivision))
end)
|> case do
nil -> nil
{subdivision_code, _full_subdivision} -> subdivision_code
end
end
end
defp equal_names?(a, b) do
filter_for_comparison(a) == filter_for_comparison(b)
end
defp filter_for_comparison(string) do
string
|> String.trim()
|> String.upcase()
|> String.normalize(:nfd)
|> String.replace(~r/[^A-z\s]/u, "")
end
@doc """
Finds the country data for the given country. May be an ISO-3166-compliant
country code or a string to perform a search with. Return a tuple in the
format `{code, data}` if a country was found; otherwise `nil`.
iex> {code, data} = ISO.find_country("United States")
iex> code
"US"
iex> data |> Map.get("short_name")
"UNITED STATES OF AMERICA"
iex> {code, data} = ISO.find_country("US")
iex> code
"US"
iex> data |> Map.get("short_name")
"UNITED STATES OF AMERICA"
iex> ISO.find_country("Invalid")
nil
"""
@spec find_country(country_code() | String.t()) :: nil | {country_code(), map()}
def find_country(country) do
if code?(country) do
country
else
country_code(country)
end
|> case do
nil -> nil
code -> {code, @iso[code]}
end
end
defp code?(<<code::binary-size(2)>>), do: not is_nil(@iso[code])
defp code?(_), do: false
@doc """
Takes a country input and subdivision and returns the validated,
ISO-3166-compliant subdivision code in a tuple.
iex> ISO.find_subdivision_code("US", "TX")
{:ok, "US-TX"}
iex> ISO.find_subdivision_code("US", "US-TX")
{:ok, "US-TX"}
iex> ISO.find_subdivision_code("US", "Texas")
{:ok, "US-TX"}
iex> ISO.find_subdivision_code("United States", "Texas")
{:ok, "US-TX"}
iex> ISO.find_subdivision_code("SomeCountry", "SG-SG")
{:error, "Invalid country: SomeCountry"}
iex> ISO.find_subdivision_code("SG", "SG-Invalid")
{:error, "Invalid subdivision 'SG-Invalid' for country: SG (SG)"}
"""
@spec find_subdivision_code(country_code(), String.t()) ::
{:ok, String.t()} | {:error, String.t()}
def find_subdivision_code(country, subdivision) do
country_code =
case @iso do
%{^country => %{}} -> country
_ -> country_code(country)
end
cond do
is_nil(country_code) ->
{:error, "Invalid country: #{country}"}
code = subdivision_code(country_code, subdivision) ->
{:ok, code}
true ->
{:error, "Invalid subdivision '#{subdivision}' for country: #{country} (#{country_code})"}
end
end
@doc """
Returns the subdivision data for the ISO-3166-compliant subdivision code.
iex> ISO.get_subdivision("US-TX")
{:ok, %{"category" => "state", "name" => "Texas"}}
iex> ISO.get_subdivision("MX-CMX")
{:ok, %{"category" => "federal district", "name" => "Ciudad de México"}}
iex> ISO.get_subdivision("11-SG")
{:error, :not_found}
iex> ISO.get_subdivision("SG-Invalid")
{:error, :not_found}
iex> ISO.get_subdivision("Invalid")
{:error, :not_found}
"""
@spec get_subdivision(String.t()) :: {:ok, map()} | {:error, :invalid_country | :not_found}
def get_subdivision(subdivision_code) do
with <<country_code::binary-size(2), "-", _::binary>> <- subdivision_code,
%{} = sub <- get_in(@iso, [country_code, "subdivisions", subdivision_code]) do
{:ok, sub}
else
_ -> {:error, :not_found}
end
end
end
|
lib/shippex/iso.ex
| 0.804751
| 0.456955
|
iso.ex
|
starcoder
|
defmodule Pathex.Builder.Viewer do
@moduledoc """
Module with common functions for viewers
"""
import Pathex.Common, only: [list_match: 2, pin: 1]
# Helpers
def match_from_path(path, initial \\ {:x, [], Elixir}) do
path
|> Enum.reverse()
|> Enum.reduce_while({:ok, initial}, fn
{:map, {_, _, _} = key}, {:ok, acc} ->
{:cont, {:ok, quote(do: %{^unquote(key) => unquote(acc)})}}
{:map, key}, {:ok, acc} ->
{:cont, {:ok, quote(do: %{unquote(key) => unquote(acc)})}}
{:list, index}, {:ok, acc} ->
{:cont, {:ok, list_match(index, acc)}}
item, _ ->
{:halt, {:error, {:bad_item, item}}}
end)
end
# Non variable cases
def create_getter({:tuple, index}, tail) when is_integer(index) and index >= 0 do
quote do
t when is_tuple(t) and tuple_size(t) > unquote(index) ->
elem(t, unquote(index)) |> unquote(tail)
end
end
def create_getter({:keyword, key}, tail) when is_atom(key) do
quote do
kwd when is_list(kwd) ->
with {:ok, value} <- Keyword.fetch(kwd, unquote(key)) do
value |> unquote(tail)
end
end
end
def create_getter({:map, key}, tail) do
key = pin(key)
quote do
%{unquote(key) => x} -> x |> unquote(tail)
end
end
def create_getter({:list, index}, tail) when is_integer(index) and index >= 0 do
x = {:x, [], Elixir}
match = list_match(index, x)
quote do
unquote(match) -> unquote(x) |> unquote(tail)
end
end
# Variable cases
def create_getter({:keyword, {_, _, _} = key}, tail) do
quote do
[{_, _} | _] = kwd when is_atom(unquote(key)) ->
with {:ok, value} <- Keyword.fetch(kwd, unquote(key)) do
value |> unquote(tail)
end
end
end
def create_getter({:list, index}, tail) do
quote do
l when is_list(l) and is_integer(unquote(index)) ->
case Enum.at(l, unquote(index), :__pathex_var_not_found__) do
:__pathex_var_not_found__ ->
:error
value ->
value |> unquote(tail)
end
end
end
def create_getter({:tuple, {_, _, _} = index}, tail) do
quote do
t
when is_tuple(t) and is_integer(unquote(index)) and
unquote(index) >= 0 and
tuple_size(t) > unquote(index) ->
elem(t, unquote(index)) |> unquote(tail)
end
end
def create_getter({:tuple, index}, _tail) when is_integer(index) and index < 0 do
# Can't create getter for tuple with negative index. What can we do?
[]
end
def fallback do
quote do
_ -> :error
end
end
# Some bug in Macro.expand
def expand_local({:and, _, _} = quoted), do: quoted
def expand_local(quoted) do
env = %Macro.Env{requires: [__MODULE__]}
Macro.expand(quoted, env)
end
end
|
lib/pathex/builder/viewer.ex
| 0.647575
| 0.53959
|
viewer.ex
|
starcoder
|
defmodule Assent.JWTAdapter.AssentJWT do
@moduledoc """
JWT adapter module for parsing JSON Web Tokens natively.
See `Assent.JWTAdapter` for more.
"""
alias Assent.{Config, JWTAdapter}
@behaviour Assent.JWTAdapter
@impl JWTAdapter
def sign(claims, alg, secret_or_private_key, opts) do
header = jws(alg, opts)
with {:ok, encoded_header} <- encode_json_base64(header, opts),
{:ok, encoded_claims} <- encode_json_base64(claims, opts),
{:ok, signature} <- sign_message("#{encoded_header}.#{encoded_claims}", alg, secret_or_private_key) do
encoded_signature = Base.url_encode64(signature, padding: false)
{:ok, "#{encoded_header}.#{encoded_claims}.#{encoded_signature}"}
end
end
defp jws(alg, opts) do
jws = %{"typ" => "JWT", "alg" => alg}
case Keyword.get(opts, :private_key_id) do
nil -> jws
kid -> Map.put(jws, "kid", kid)
end
end
defp encode_json_base64(map, opts) do
with {:ok, json_library} <- Config.fetch(opts, :json_library),
{:ok, json} <- json_library.encode(map) do
{:ok, Base.url_encode64(json, padding: false)}
end
end
defp sign_message(message, "HS" <> sha_bit_size, secret) do
with {:ok, sha_alg} <- sha2_alg(sha_bit_size) do
{:ok, :crypto.hmac(sha_alg, secret, message)}
end
end
defp sign_message(message, "ES" <> sha_bit_size, private_key) do
# Per https://tools.ietf.org/html/rfc7515#appendix-A.3.1
with {:ok, sha_alg} <- sha2_alg(sha_bit_size),
{:ok, key} <- decode_pem(private_key),
der_signature <- :public_key.sign(message, sha_alg, key) do
{:'ECDSA-Sig-Value', r, s} = :public_key.der_decode(:'ECDSA-Sig-Value', der_signature)
r_bin = sha_bit_pad(int_to_bin(r), sha_bit_size)
s_bin = sha_bit_pad(int_to_bin(s), sha_bit_size)
{:ok, r_bin <> s_bin}
end
end
defp sign_message(message, <<_, "S", sha_bit_size :: binary>>, private_key) do
with {:ok, sha_alg} <- sha2_alg(sha_bit_size),
{:ok, key} <- decode_pem(private_key) do
{:ok, :public_key.sign(message, sha_alg, key)}
end
end
defp sign_message(_message, alg, _jwk), do: {:error, "Unsupported JWT alg #{alg} or invalid JWK"}
defp sha2_alg("256"), do: {:ok, :sha256}
defp sha2_alg("384"), do: {:ok, :sha384}
defp sha2_alg("512"), do: {:ok, :sha512}
defp sha2_alg(bit_size), do: {:error, "Invalid SHA-2 algorithm bit size: #{bit_size}"}
defp decode_pem(pem) do
case :public_key.pem_decode(pem) do
[entry] -> {:ok, :public_key.pem_entry_decode(entry)}
_any -> {:error, "Private key should only have one entry"}
end
end
# From erlang crypto lib
defp int_to_bin(x) when x < 0, do: int_to_bin_neg(x, [])
defp int_to_bin(x), do: int_to_bin_pos(x, [])
defp int_to_bin_pos(0, [_ | _] = ds), do: :erlang.list_to_binary(ds)
defp int_to_bin_pos(x, ds), do: int_to_bin_pos(:erlang.bsr(x, 8), [:erlang.band(x, 255) | ds])
defp int_to_bin_neg(-1, [msb | _] = ds) when msb >= 128, do: :erlang.list_to_binary(ds)
defp int_to_bin_neg(x, ds), do: int_to_bin_neg(:erlang.bsr(x, 8), [:erlang.band(x, 255) | ds])
defp sha_bit_pad(binary, "256"), do: lpad_binary(binary, byte_size(binary) - 32)
defp sha_bit_pad(binary, "384"), do: lpad_binary(binary, byte_size(binary) - 48)
defp sha_bit_pad(binary, "512"), do: lpad_binary(binary, byte_size(binary) - 66)
defp lpad_binary(binary, length) when length > 0 do
:binary.copy(<<0>>, length - byte_size(binary)) <> binary
end
defp lpad_binary(binary, _length), do: binary
@impl JWTAdapter
def verify(token, secret_or_public_key, opts) do
with {:ok, encoded_jwt} <- split(token),
{:ok, %{"alg" => alg} = header} <- decode_base64_json(encoded_jwt.header, opts),
{:ok, claims} <- decode_base64_json(encoded_jwt.claims, opts),
{:ok, signature} <- Base.url_decode64(encoded_jwt.signature, padding: false) do
verified = verify_message("#{encoded_jwt.header}.#{encoded_jwt.claims}", signature, alg, secret_or_public_key)
{:ok, %{
header: header,
claims: claims,
signature: signature,
verified?: verified
}}
end
end
defp split(token) do
case String.split(token, ".") do
[header, claims, signature] -> {:ok, %{header: header, claims: claims, signature: signature}}
_any -> {:error, "Invalid JWT"}
end
end
defp decode_base64_json(encoded, opts) do
with {:ok, json_library} <- Config.fetch(opts, :json_library),
{:ok, json} <- Base.url_decode64(encoded, padding: false),
{:ok, map} <- json_library.decode(json) do
{:ok, map}
end
end
defp verify_message(_message, _signature, "none", _secret), do: false
defp verify_message(_message, _signature, _alg, nil), do: false
defp verify_message(message, signature, "HS" <> _rest = alg, secret) when is_binary(secret) do
case sign_message(message, alg, secret) do
{:ok, signature_2} -> Assent.constant_time_compare(signature_2, signature)
_any -> false
end
end
defp verify_message(message, signature, "ES" <> sha_bit_size, public_key) do
with {:ok, sha_alg} <- sha2_alg(sha_bit_size),
{:ok, pem} <- decode_key(public_key) do
# Per https://tools.ietf.org/html/rfc7515#appendix-A.3.1
size = :erlang.byte_size(signature)
{r_bin, s_bin} = :erlang.split_binary(signature, Integer.floor_div(size, 2))
r = :crypto.bytes_to_integer(r_bin)
s = :crypto.bytes_to_integer(s_bin)
der_signature = :public_key.der_encode(:'ECDSA-Sig-Value', {:'ECDSA-Sig-Value', r, s})
:public_key.verify(message, sha_alg, der_signature, pem)
end
end
defp verify_message(message, signature, <<_, "S", sha_bit_size :: binary>>, public_key) do
with {:ok, sha_alg} <- sha2_alg(sha_bit_size),
{:ok, pem} <- decode_key(public_key) do
:public_key.verify(message, sha_alg, signature, pem)
end
end
defp decode_key(pem) when is_binary(pem), do: decode_pem(pem)
defp decode_key(%{"kty" => "RSA", "n" => n, "e" => e}) do
with {:ok, n} <- Base.url_decode64(n, padding: false),
{:ok, e} <- Base.url_decode64(e, padding: false) do
{:ok, {:RSAPublicKey, :crypto.bytes_to_integer(n), :crypto.bytes_to_integer(e)}}
end
end
defp decode_key(jwk) when is_map(jwk), do: {:error, "Can't decode JWK"}
end
|
lib/assent/jwt_adapter/assent_jwt.ex
| 0.783616
| 0.443661
|
assent_jwt.ex
|
starcoder
|
defprotocol Cat.Applicative do
@moduledoc """
Applicative defines
* `pure(t(any), a) :: t(a)`
* `ap(t((a -> b)), t(a)) :: t(b)`
* `product(t(a), t(b)) :: t({a, b})`
* `product_l(t(a), t(any)) :: t(a)`
* `product_r(t(any), t(a)) :: t(a)`
* `map2(t(a), t(b), (a, b -> c)) :: t(c)`
**It must also be `Functor`.**
Default implementations (at `Applicative.Default`):
* `product(t(a), t(b)) :: t({a, b})`
* `product_l(t(a), t(any)) :: t(a)`
* `product_r(t(any), t(a)) :: t(a)`
* `map2(t(a), t(b), (a, b -> c)) :: t(c)`
Module provides implementations for:
* `List`
"""
@type t(_x) :: term
@spec pure(t(any), a) :: t(a) when a: var
def pure(example, a)
@spec ap(t((a -> b)), t(a)) :: t(b) when a: var, b: var
def ap(tf, ta)
@spec product(t(a), t(b)) :: t({a, b}) when a: var, b: var
def product(ta, tb)
@spec product_l(t(a), t(any)) :: t(a) when a: var
def product_l(ta, tb)
@spec product_r(t(any), t(b)) :: t(b) when b: var
def product_r(ta, tb)
@spec map2(t(a), t(b), (a, b -> c)) :: t(c) when a: var, b: var, c: var
def map2(ta, tb, f)
end
alias Cat.{Applicative, Functor}
defmodule Cat.Applicative.Arrow do
@spec pure(Applicative.t(any)) :: (a -> Applicative.t(a)) when a: var
def pure(example), do: &Applicative.pure(example, &1)
@spec ap(Applicative.t((a -> b))) :: (Applicative.t(a) -> Applicative.t(b)) when a: var, b: var
def ap(tf), do: &Applicative.ap(tf, &1)
@spec product_l(Applicative.t(any)) :: (Applicative.t(a) -> Applicative.t(a)) when a: var
def product_l(tb), do: &Applicative.product_l(&1, tb)
@spec product_r(Applicative.t(any)) :: (Applicative.t(b) -> Applicative.t(b)) when b: var
def product_r(ta), do: &Applicative.product_r(ta, &1)
@spec map2((a, b -> c)) :: (Applicative.t(a), Applicative.t(b) -> Applicative.t(c)) when a: var, b: var, c: var
def map2(f), do: &Applicative.map2(&1, &2, f)
end
defmodule Cat.Applicative.Default do
@moduledoc false
alias Cat.Fun
require Fun
@spec product(Applicative.t(a), Applicative.t(b)) :: Applicative.t({a, b}) when a: var, b: var
def product(ta, tb) do
fs = Functor.map(ta, fn a -> (fn b -> {a, b} end) end)
Applicative.ap(fs, tb)
end
@spec product_l(Applicative.t(a), Applicative.t(any)) :: Applicative.t(a) when a: var
def product_l(ta, tb), do: Applicative.map2(ta, tb, fn a, _ -> a end)
@spec product_r(Applicative.t(any), Applicative.t(b)) :: Applicative.t(b) when b: var
def product_r(ta, tb), do: Applicative.map2(ta, tb, fn _, b -> b end)
@spec map2(Applicative.t(a), Applicative.t(b), (a, b -> c)) :: Applicative.t(c) when a: var, b: var, c: var
def map2(ta, tb, f), do: Functor.map(Applicative.product(ta, tb), Fun.tupled(f))
defmodule FromMonad do
alias Cat.Monad
@spec ap(Applicative.t((a -> b)), Applicative.t(a)) :: Applicative.t(b) when a: var, b: var
def ap(tf, ta), do:
Monad.flat_map tf, fn f ->
Functor.map(ta, f)
end
end
end
defimpl Applicative, for: List do
@type t(a) :: [a]
@spec pure([any], a) :: [a] when a: var
def pure(_, a), do: [a]
@spec ap([(a -> b)], [a]) :: [b] when a: var, b: var
def ap(tf, ta), do: _ap(tf, ta, ta, [])
defp _ap(tf, ta, fx0, acc)
defp _ap([tfh | _]=tf, [tah | tat], ta0, acc), do: _ap(tf, tat, ta0, [tfh.(tah) | acc])
defp _ap([_ | tft], [], ta0, acc), do: _ap(tft, ta0, ta0, acc)
defp _ap([], _, _, acc), do: Enum.reverse(acc)
defdelegate product(ta, tb), to: Applicative.Default
defdelegate product_l(ta, tb), to: Applicative.Default
defdelegate product_r(ta, tb), to: Applicative.Default
defdelegate map2(ta, tb, f), to: Applicative.Default
end
|
lib/cat/protocols/applicative.ex
| 0.890276
| 0.611121
|
applicative.ex
|
starcoder
|
defmodule CodeCorps.StripeService.Adapters.StripeConnectAccountAdapter do
alias CodeCorps.MapUtils
alias CodeCorps.Adapter.MapTransformer
# Mapping of stripe record attributes to locally stored attributes
# Format is {:local_key, [:nesting, :of, :stripe, :keys]}
@stripe_mapping [
{:id_from_stripe, [:id]},
{:business_name, [:business_name]},
{:business_url, [:business_url]},
{:charges_enabled, [:charges_enabled]},
{:country, [:country]},
{:default_currency, [:default_currency]},
{:details_submitted, [:details_submitted]},
{:display_name, [:display_name]},
{:email, [:email]},
{:external_account, [:external_account]},
{:legal_entity_address_city, [:legal_entity, :address, :city]},
{:legal_entity_address_country, [:legal_entity, :address, :country]},
{:legal_entity_address_line1, [:legal_entity, :address, :line1]},
{:legal_entity_address_line2, [:legal_entity, :address, :line2]},
{:legal_entity_address_postal_code, [:legal_entity, :address, :postal_code]},
{:legal_entity_address_state, [:legal_entity, :address, :state]},
{:legal_entity_business_name, [:legal_entity, :business_name]},
{:legal_entity_business_tax_id, [:legal_entity, :business_tax_id]},
{:legal_entity_business_tax_id_provided, [:legal_entity, :business_tax_id_provided]},
{:legal_entity_business_vat_id, [:legal_entity, :business_vat_id]},
{:legal_entity_business_vat_id_provided, [:legal_entity, :business_vat_id_provided]},
{:legal_entity_dob_day, [:legal_entity, :dob, :day]},
{:legal_entity_dob_month, [:legal_entity, :dob, :month]},
{:legal_entity_dob_year, [:legal_entity, :dob, :year]},
{:legal_entity_first_name, [:legal_entity, :first_name]},
{:legal_entity_gender, [:legal_entity, :gender]},
{:legal_entity_last_name, [:legal_entity, :last_name]},
{:legal_entity_maiden_name, [:legal_entity, :maiden_name]},
{:legal_entity_personal_address_city, [:legal_entity, :personal_address, :city]},
{:legal_entity_personal_address_country, [:legal_entity, :personal_address, :country]},
{:legal_entity_personal_address_line1, [:legal_entity, :personal_address, :line1]},
{:legal_entity_personal_address_line2, [:legal_entity, :personal_address, :line2]},
{:legal_entity_personal_address_postal_code, [:legal_entity, :personal_address, :postal_code]},
{:legal_entity_personal_address_state, [:legal_entity, :personal_address, :state]},
{:legal_entity_phone_number, [:legal_entity, :phone_number]},
{:legal_entity_personal_id_number, [:legal_entity, :personal_id_number]},
{:legal_entity_personal_id_number_provided, [:legal_entity, :personal_id_number_provided]},
{:legal_entity_ssn_last_4, [:legal_entity, :ssn_last_4]},
{:legal_entity_ssn_last_4_provided, [:legal_entity, :ssn_last_4_provided]},
{:legal_entity_type, [:legal_entity, :type]},
{:legal_entity_verification_details, [:legal_entity, :verification, :details]},
{:legal_entity_verification_details_code, [:legal_entity, :verification, :details_code]},
{:legal_entity_verification_document, [:legal_entity, :verification, :document]},
{:legal_entity_verification_status, [:legal_entity, :verification, :status]},
{:payouts_enabled, [:payouts_enabled]},
{:support_email, [:support_email]},
{:support_phone, [:support_phone]},
{:support_url, [:support_url]},
{:tos_acceptance_date, [:tos_acceptance, :date]},
{:tos_acceptance_ip, [:tos_acceptance, :ip]},
{:tos_acceptance_user_agent, [:tos_acceptance, :user_agent]},
{:type, [:type]},
{:verification_disabled_reason, [:verification, :disabled_reason]},
{:verification_due_by, [:verification, :due_by]},
{:verification_fields_needed, [:verification, :fields_needed]}
]
@doc """
Transforms a set of local attributes into a map of parameters used to
update a `%Stripe.Account{}`.
"""
def from_params(%{} = attributes) do
result =
attributes
|> remove_attributes()
|> MapUtils.keys_to_atom()
|> MapTransformer.transform_inverse(@stripe_mapping)
{:ok, result}
end
def from_params_update(%{} = attributes) do
result =
attributes
|> remove_attributes()
|> MapUtils.keys_to_atom()
|> MapTransformer.transform_inverse(@stripe_mapping)
|> Map.drop([:type])
{:ok, result}
end
@doc """
Transforms a `%Stripe.Account{}` and a set of local attributes into a
map of parameters used to create or update a `StripeConnectAccount` record.
"""
def to_params(%Stripe.Account{} = stripe_account, %{} = attributes) do
result =
stripe_account
|> Map.from_struct
|> MapTransformer.transform(@stripe_mapping)
|> add_nested_attributes(stripe_account)
|> MapUtils.keys_to_string()
|> add_non_stripe_attributes(attributes)
{:ok, result}
end
# Names of attributes which we need to store localy,
# but are not part of the Stripe API record
@non_stripe_attributes ["organization_id"]
defp add_non_stripe_attributes(%{} = params, %{} = attributes) do
attributes
|> get_non_stripe_attributes
|> add_to(params)
end
defp get_non_stripe_attributes(%{} = attributes) do
attributes |> Map.take(@non_stripe_attributes)
end
defp add_to(%{} = attributes, %{} = params) do
params |> Map.merge(attributes)
end
defp add_nested_attributes(map, stripe_account) do
map |> add_external_account(stripe_account)
end
defp add_external_account(map, %Stripe.Account{external_accounts: %Stripe.List{data: list}}) do
latest = list |> List.last
map |> do_add_external_account(latest)
end
defp do_add_external_account(map, nil), do: map
defp do_add_external_account(map, %Stripe.BankAccount{id: id}) do
map |> Map.put(:external_account, id)
end
defp remove_attributes(%{"legal_entity_verification_status" => "verified"} = attributes) do
attributes |> Map.delete("legal_entity_verification_document")
end
defp remove_attributes(attributes), do: attributes
end
|
lib/code_corps/stripe_service/adapters/stripe_connect_account.ex
| 0.724286
| 0.445107
|
stripe_connect_account.ex
|
starcoder
|
defmodule Duration do
@type t :: %__MODULE__{
years: non_neg_integer,
months: pos_integer,
days: pos_integer,
hours: non_neg_integer,
minutes: non_neg_integer,
seconds: non_neg_integer
}
defstruct years: 0,
months: 0,
days: 0,
hours: 0,
minutes: 0,
seconds: 0
alias Duration.Parser
@moduledoc """
Convenient module to play with ISO 8601:2004 durations and `Timex.shift/2`.
"""
@doc """
Loads a `Duration.t`.
"""
@spec new(Duration.t) :: {:ok, Duration.t} | {:error, atom}
@spec new(String.t) :: {:ok, Duration.t} | {:error, atom}
@spec new(list) :: {:ok, Duration.t} | {:error, atom}
def new(%Duration{} = val), do: {:ok, val}
def new(val) when is_binary(val) do
parse(val)
end
def new(opts) when is_list(opts) do
params = opts |> Keyword.take([:years, :months, :days, :hours, :minutes, :seconds])
params = params |> Enum.map(fn {k, v} -> {k, abs(v)} end)
if params == opts do
{:ok, struct(__MODULE__, params)}
else
{:error, :wrong_options}
end
end
@doc """
Parse a duration string to a `Duration.t`.
## Examples
iex> Duration.parse("PT3S")
{:ok, %Duration{seconds: 3}}
"""
@spec parse(String.t) :: {:ok, Duration.t} | {:error, atom}
def parse(value) when is_binary(value) do
case Parser.parse(value) do
{:ok, params, _, _, _, _} ->
{:ok, struct(__MODULE__, params)}
_ ->
{:error, :invalid_duration}
end
end
def parse(_) do
{:error, :parse_error}
end
@doc """
Converts a `Duration.t` into `Timex.shift_options`, wich can be used with `Timex.shift/2`.
## Examples
Go forward
iex > Duration.to_timex_options(%Duration{years: 1})
{:ok, [days: 0, hours: 0, minutes: 0, months: 0, seconds: 0, years: 1]}
Go backward
iex > Duration.to_timex_options(%Duration{years: 1}; :backward)
{:ok, [days: 0, hours: 0, minutes: 0, months: 0, seconds: 0, years: -1]}
"""
def to_timex_options(%Duration{} = duration, direction \\ :forward) when direction in [:forward, :backward] do
options =
case direction do
:forward -> Map.from_struct(duration) |> Map.to_list
:backward -> Map.from_struct(duration) |> Enum.map(fn {k, v} -> {k, v * -1} end)
end
{:ok, options |> Enum.filter(fn {_, v} -> v != 0 end)}
end
end
defimpl String.Chars, for: Duration do
def to_string(value) do
output = "P"
output = output <> if value.years > 0, do: "#{value.years}Y", else: ""
output = output <> if value.months > 0, do: "#{value.months}M", else: ""
output = output <> if value.days > 0, do: "#{value.days}D", else: ""
output = output <> if (value.hours || value.minutes || value.seconds) > 0, do: "T", else: ""
output = output <> if value.hours > 0, do: "#{value.hours}H", else: ""
output = output <> if value.minutes > 0, do: "#{value.minutes}M", else: ""
output = output <> if value.seconds > 0, do: "#{value.seconds}S", else: ""
output = if output in ["P", "PT"], do: "PT0S", else: output
output
end
end
if Code.ensure_loaded?(Phoenix.HTML.Safe) do
defimpl Phoenix.HTML.Safe, for: Duration do
def to_iodata(value), do: to_string(value)
end
end
|
lib/duration.ex
| 0.886131
| 0.557665
|
duration.ex
|
starcoder
|
defmodule MangoPay.PayIn do
@moduledoc """
Functions for MangoPay [pay in](https://docs.mangopay.com/endpoints/v2.01/payins#e264_the-payin-object).
"""
use MangoPay.Query.Base
set_path "payins"
@doc """
Get a pay in.
## Examples
{:ok, pay_in} = MangoPay.PayIn.get(id)
"""
def get id do
_get id
end
@doc """
Get a pay in.
## Examples
pay_in = MangoPay.PayIn.get!(id)
"""
def get! id do
_get! id
end
@doc """
Get card details.
## Examples
{:ok, pay_in} = MangoPay.PayIn.get_card_details("pay_in_id")
"""
def get_card_details(payin_id) do
_get [resource(), "card/web", payin_id, "extended"]
end
defmodule Card do
@moduledoc false
defmodule Web do
@moduledoc """
Functions for MangoPay [pay in](https://docs.mangopay.com/endpoints/v2.01/payins#e264_the-payin-object).
"""
use MangoPay.Query.Base
set_path "payins/card/web"
@doc """
Create a payin.
## Examples
params = %{
"Tag": "custom meta",
"AuthorId": "8494514",
"CreditedUserId": "8494514",
"DebitedFunds": %{
"Currency": "EUR",
"Amount": 12
},
"Fees": %{
"Currency": "EUR",
"Amount": 12
},
"ReturnURL": "http://www.my-site.com/returnURL/",
"CreditedWalletId": "8494559",
"CardType": "CB_VISA_MASTERCARD",
"SecureMode": "DEFAULT",
"Culture": "EN",
"TemplateURLOptions": %{
"Payline": "https://www.mysite.com/template/"
},
"StatementDescriptor": "Mar2016"
}
{:ok, payin} = MangoPay.PayIn.Card.Web.create(params)
"""
def create params do
_create params
end
@doc """
Create a payin.
## Examples
params = %{
"Tag": "custom meta",
"AuthorId": "8494514",
"CreditedUserId": "8494514",
"DebitedFunds": %{
"Currency": "EUR",
"Amount": 12
},
"Fees": %{
"Currency": "EUR",
"Amount": 12
},
"ReturnURL": "http://www.my-site.com/returnURL/",
"CreditedWalletId": "8494559",
"CardType": "CB_VISA_MASTERCARD",
"SecureMode": "DEFAULT",
"Culture": "EN",
"TemplateURLOptions": %{
"Payline": "https://www.mysite.com/template/"
},
"StatementDescriptor": "Mar2016"
}
payin = MangoPay.PayIn.Card.Web.create!(params)
"""
def create! params do
_create! params
end
end
defmodule Direct do
@moduledoc """
Functions for MangoPay [pay in](https://docs.mangopay.com/endpoints/v2.01/payins#e264_the-payin-object).
"""
use MangoPay.Query.Base
set_path "payins/card/direct"
@doc """
Create a payin.
## Examples
params = %{
"Tag": "custom meta",
"AuthorId": "8494514",
"CreditedUserId": "8494514",
"CreditedWalletId": "8494559",
"DebitedFunds": %{
"Currency": "EUR",
"Amount": 12
},
"Fees": {
"Currency": "EUR",
"Amount": 12
},
"SecureModeReturnURL": "http://www.my-site.com/returnURL",
"CardId": "14213157",
"SecureMode": "DEFAULT",
"Billing": %{
"Address": %{
"AddressLine1": "1 Mangopay Street",
"AddressLine2": "The Loop",
"City": "Paris",
"Region": "Ile de France",
"PostalCode": "75001",
"Country": "FR"
}
},
"StatementDescriptor": "Mar2016"
}
{:ok, payin} = MangoPay.PayIn.Card.Direct.create(params)
"""
def create params do
_create params
end
@doc """
Create a payin.
## Examples
params = %{
"Tag": "custom meta",
"AuthorId": "8494514",
"CreditedUserId": "8494514",
"CreditedWalletId": "8494559",
"DebitedFunds": %{
"Currency": "EUR",
"Amount": 12
},
"Fees": {
"Currency": "EUR",
"Amount": 12
},
"SecureModeReturnURL": "http://www.my-site.com/returnURL",
"CardId": "14213157",
"SecureMode": "DEFAULT",
"Billing": %{
"Address": %{
"AddressLine1": "1 Mangopay Street",
"AddressLine2": "The Loop",
"City": "Paris",
"Region": "Ile de France",
"PostalCode": "75001",
"Country": "FR"
}
},
"StatementDescriptor": "Mar2016"
}
payin = MangoPay.PayIn.Card.Direct.create!(params)
"""
def create! params do
_create! params
end
end
defmodule PreAuthorized do
@moduledoc """
Functions for MangoPay [pay in](https://docs.mangopay.com/endpoints/v2.01/payins#e264_the-payin-object).
"""
use MangoPay.Query.Base
set_path "payins/preauthorized/direct"
@doc """
Create a payin.
## Examples
params = %{
"Tag": "custom meta",
"AuthorId": "8494514",
"CreditedUserId": "8494514",
"CreditedWalletId": "8494559",
"DebitedFunds": {
"Currency": "EUR",
"Amount": 12
},
"Fees": {
"Currency": "EUR",
"Amount": 12
},
"PreauthorizationId": "12639123"
}
{:ok, payin} = MangoPay.PayIn.Card.PreAuthorized.create(params)
"""
def create params do
_create params
end
@doc """
Create a payin.
## Examples
params = %{
"Tag": "custom meta",
"AuthorId": "8494514",
"CreditedUserId": "8494514",
"CreditedWalletId": "8494559",
"DebitedFunds": {
"Currency": "EUR",
"Amount": 12
},
"Fees": {
"Currency": "EUR",
"Amount": 12
},
"PreauthorizationId": "12639123"
}
payin = MangoPay.PayIn.Card.PreAuthorized.create!(params)
"""
def create! params do
_create! params
end
end
end
defmodule BankWire do
@moduledoc false
defmodule Wallet do
@moduledoc """
Functions for MangoPay [pay in](https://docs.mangopay.com/endpoints/v2.01/payins#e264_the-payin-object).
"""
use MangoPay.Query.Base
set_path "clients/payins/bankwire/direct"
@doc """
Create a payin.
## Examples
params = %{
"Tag": "custom meta",
"CreditedWalletId": "CREDIT_EUR",
"DeclaredDebitedFunds": %{
"Currency": "EUR",
"Amount": 12
},
"DeclaredFees": %{
"Currency": "EUR",
"Amount": 12
}
}
{:ok, payin} = MangoPay.PayIn.BankWire.Wallet.create(params)
"""
def create params do
_create params
end
@doc """
Create a payin.
## Examples
params = %{
"Tag": "custom meta",
"CreditedWalletId": "CREDIT_EUR",
"DeclaredDebitedFunds": %{
"Currency": "EUR",
"Amount": 12
},
"DeclaredFees": %{
"Currency": "EUR",
"Amount": 12
}
}
payin = MangoPay.PayIn.BankWire.Wallet.create!(params)
"""
def create! params do
_create! params
end
end
defmodule Direct do
@moduledoc """
Functions for MangoPay [pay in](https://docs.mangopay.com/endpoints/v2.01/payins#e264_the-payin-object).
"""
use MangoPay.Query.Base
set_path "payins/bankwire/direct"
@doc """
Create a payin.
## Examples
params = %{
"Tag": "custom meta",
"AuthorId": "8494514",
"CreditedUserId": "8494514",
"CreditedWalletId": "8494559",
"DeclaredDebitedFunds": %{
"Currency": "EUR",
"Amount": 12
},
"DeclaredFees": %{
"Currency": "EUR",
"Amount": 12
}
}
{:ok, payin} = MangoPay.PayIn.Direct.create(params)
"""
def create params do
_create params
end
@doc """
Create a payin.
## Examples
params = %{
"Tag": "custom meta",
"AuthorId": "8494514",
"CreditedUserId": "8494514",
"CreditedWalletId": "8494559",
"DeclaredDebitedFunds": %{
"Currency": "EUR",
"Amount": 12
},
"DeclaredFees": %{
"Currency": "EUR",
"Amount": 12
}
}
payin = MangoPay.PayIn.BankWire.Direct.create!(params)
"""
def create! params do
_create! params
end
end
end
defmodule DirectDebit do
@moduledoc false
defmodule Web do
@moduledoc """
Functions for MangoPay [pay in](https://docs.mangopay.com/endpoints/v2.01/payins#e264_the-payin-object).
"""
use MangoPay.Query.Base
set_path "payins/directdebit/web"
@doc """
Create a payin.
## Examples
params = %{
"Tag": "custom meta",
"AuthorId": "8494514",
"CreditedUserId": "8494514",
"CreditedWalletId": "8494559",
"DeclaredDebitedFunds": %{
"Currency": "EUR",
"Amount": 12
},
"DeclaredFees": %{
"Currency": "EUR",
"Amount": 12
}
}
{:ok, payin} = MangoPay.PayIn.DirectDebit.Web.create(params)
"""
def create params do
_create params
end
@doc """
Create a payin.
## Examples
params = %{
"Tag": "custom meta",
"AuthorId": "8494514",
"CreditedUserId": "8494514",
"CreditedWalletId": "8494559",
"DeclaredDebitedFunds": %{
"Currency": "EUR",
"Amount": 12
},
"DeclaredFees": %{
"Currency": "EUR",
"Amount": 12
}
}
payin = MangoPay.PayIn.DirectDebit.Web.create!(params)
"""
def create! params do
_create! params
end
end
defmodule Direct do
@moduledoc """
Functions for MangoPay [pay in](https://docs.mangopay.com/endpoints/v2.01/payins#e264_the-payin-object).
"""
use MangoPay.Query.Base
set_path "payins/directdebit/direct"
@doc """
Create a payin.
## Examples
params = %{
"Tag": "custom meta",
"AuthorId": "8494514",
"CreditedUserId": "8494514",
"CreditedWalletId": "8494559",
"DebitedFunds": %{
"Currency": "EUR",
"Amount": 12
},
"Fees": %{
"Currency": "EUR",
"Amount": 12
},
"MandateId": "24733034",
"StatementDescriptor": "Nov2016"
}
{:ok, payin} = MangoPay.PayIn.DirectDebit.Direct.create(params)
"""
def create params do
_create params
end
@doc """
Create a payin.
## Examples
params = %{
"Tag": "custom meta",
"AuthorId": "8494514",
"CreditedUserId": "8494514",
"CreditedWalletId": "8494559",
"DebitedFunds": %{
"Currency": "EUR",
"Amount": 12
},
"Fees": %{
"Currency": "EUR",
"Amount": 12
},
"MandateId": "24733034",
"StatementDescriptor": "Nov2016"
}
payin = MangoPay.PayIn.DirectDebit.Direct.create!(params)
"""
def create! params do
_create! params
end
end
end
end
|
lib/mango_pay/pay_in.ex
| 0.749271
| 0.455078
|
pay_in.ex
|
starcoder
|
defmodule TaskBunny.JobError do
@moduledoc """
A struct that holds an error information occured during the job processing.
## Attributes
- job: the job module failed
- payload: the payload(arguments) for the job execution
- error_type: the type of the error. :exception, :return_value, :timeout or :exit
- exception: the inner exception (option)
- stacktrace: the stacktrace (only available for the exception)
- return_value: the return value from the job (only available for the return value error)
- reason: the reason information passed with EXIT signal (only available for exit error)
- raw_body: the raw body for the message
- meta: the meta data given by RabbitMQ
- failed_count: the number of failures for the job processing request
- queue: the name of the queue
- concurrency: the number of concurrent job processing of the worker
- pid: the process ID of the worker
- reject: sets true if the job is rejected for the failure (means it won't be retried again)
"""
@type t :: %__MODULE__{
job: atom | nil,
payload: any,
error_type: :exception | :return_value | :timeout | :exit | nil,
exception: struct | nil,
stacktrace: list(tuple) | nil,
return_value: any,
reason: any,
raw_body: String.t(),
meta: map,
failed_count: integer,
queue: String.t(),
concurrency: integer,
pid: pid | nil,
reject: boolean
}
defstruct job: nil,
payload: nil,
error_type: nil,
exception: nil,
stacktrace: nil,
return_value: nil,
reason: nil,
raw_body: "",
meta: %{},
failed_count: 0,
queue: "",
concurrency: 1,
pid: nil,
reject: false
@doc """
Take information related to the result and make some of them JSON encode safe.
Since raw body can be bigger as you retry, you do not want to put the information.
"""
@spec get_result_info(t) :: map()
def get_result_info(job_error) do
job_error
|> Map.take([
:error_type,
:exception,
:stacktrace,
:return_value,
:reason,
:failed_count,
:queue
])
|> Enum.map(fn {k, v} -> {k, inspect(v)} end)
|> Map.new()
end
@doc false
@spec handle_exception(atom, any, struct, any) :: t
def handle_exception(job, payload, exception, stacktrace) do
%__MODULE__{
job: job,
payload: payload,
error_type: :exception,
exception: exception,
stacktrace: stacktrace
}
end
@doc false
@spec handle_exit(atom, any, any, any) :: t
def handle_exit(job, payload, reason, stacktrace) do
%__MODULE__{
job: job,
payload: payload,
error_type: :exit,
reason: reason,
stacktrace: stacktrace
}
end
@doc false
@spec handle_return_value(atom, any, any) :: t
def handle_return_value(job, payload, return_value) do
%__MODULE__{
job: job,
payload: payload,
error_type: :return_value,
return_value: return_value
}
end
@doc false
@spec handle_timeout(atom, any) :: t
def handle_timeout(job, payload) do
%__MODULE__{
job: job,
payload: payload,
error_type: :timeout
}
end
end
|
lib/task_bunny/job_error.ex
| 0.743727
| 0.529263
|
job_error.ex
|
starcoder
|
defmodule Mix.SCM do
@doc """
This module provides helper functions and defines the
behavior required by any SCM used by mix.
"""
@doc """
Register required callbacks.
"""
def behaviour_info(:callbacks) do
[key: 0, consumes?: 1, available?: 2, get: 2, check?: 2, update: 2, clean: 2]
end
@doc """
This behavior function should retrieve an atom representing
the SCM key. In the dependency opts, a value for the given
must be found since it is used to print information about
the requested dependency.
"""
def key
@doc """
This behavior function receives a keywords list of `opts`
and should return an updated list in case the SCM consumes
the available options. For example, when a developer specifies
a dependency:
{ "foo", "0.1.0", github: "foo/bar" }
Each registered SCM will be asked if they consume this dependency,
receiving [github: "foo/bar"] as argument. Since this option makes
sense for the Git SCM, it will return an update list of options
while other SCMs would simply return nil.
"""
def consumes?(opts)
@doc """
This behavior function receives a `path`, `opts` and returns
a boolean if the dependency is available.
"""
def available?(path, opts)
@doc """
This behavior function gets unchecked dependencies.
If the dependency is locked, it receives the lock under the
`:lock` key in `opts`. In case no lock is given, it must
return a new lock (if one exists). If a lock is given,
it must preferably return the same lock, but can return
a different one in case of failure.
"""
def get(path, opts)
@doc """
This behavior function updates dependencies. It may be
called either directly via `deps.update` or implicitly
by `deps.get`. In the first scenario, no lock is received,
while one is given in the second.
"""
def update(path, opts)
@doc """
This behavior function checks if the dependency is locked and
the current repository version matches the lock. Note that some
SCMs do not require a lock, for such, this function can simply
return true.
"""
def check?(path, opts)
@doc """
This behavior function should clean the given dependency.
"""
def clean(path, opts)
@doc """
Returns all available SCM.
"""
def available do
Mix.Server.call(:scm)
end
@doc """
Register the scm repository with the given `key` and `mod`.
"""
def register(mod) when is_atom(mod) do
Mix.Server.cast({ :add_scm, mod })
end
@doc """
Register builtin SCMs.
"""
def register_builtin do
register Mix.SCM.Git
register Mix.SCM.Raw
end
end
|
lib/mix/lib/mix/scm.ex
| 0.832985
| 0.512083
|
scm.ex
|
starcoder
|
defmodule Bliss.Interpreter do
@moduledoc """
The actual Bliss (Joy) interpreter. Takes parsed input.
Note about the modes:
- normal is :RUN mode, i.e. evaluating whatever gets handed in.
- 'compile' is :LIBRA or :DEFINE mode, they are terminated with a period.
"""
def new_opts, do: %{mode: :RUN, flags: MapSet.new, hidden: MapSet.new}
# The actual interpreter.
@doc "No more elements on the current parsed line. Go back."
def interpret(opts, [], dict, stack),
do: {opts, dict, stack}
@doc "No more elements on the current parsed line. Go back."
def interpret(opts, [:DBG | tl], dict, stack) do
IO.puts("Exec:\t#{inspect tl}")
IO.puts("Dict:\t#{inspect dict}")
IO.puts("Stack:\t#{inspect stack}")
IO.puts("Opts:\t#{inspect opts}")
interpret(opts, tl, dict, stack)
end
# Mode switching
@doc "Start LIBRA mode, mark on stack."
def interpret(opts, [:LIBRA | tl], dict, stack),
do: interpret(set_mode(opts, :LIBRA), tl, dict, [:LIBRA | stack])
@doc "Start LIBRA mode - by way of DEFINE, mark on stack."
def interpret(opts, [:DEFINE | tl], dict, stack),
do: interpret(set_mode(opts, :LIBRA), tl, dict, [:LIBRA | stack])
@doc "End LIBRA."
def interpret(opts, [:END_LIBRA | tl], dict, stack),
do: interpret(set_mode(opts, :RUN), tl, dict, Enum.drop_while(stack, &(&1 != :LIBRA)) |> tl())
# RUN
@doc "RUN: Invoke elixir from bliss."
def interpret(%{mode: :RUN} = opts, [:apply | tl], dict, [[module, fun, arity] | stack]) do
{args, new_stack} = Enum.split(stack, arity)
new_stack =
case apply(module, fun, args) do
args when is_tuple(args) -> Tuple.to_list(args) |> Enum.map(&cast/1)
args -> [cast(args)]
end
|> Kernel.++(new_stack)
interpret(opts, tl, dict, new_stack)
end
@doc "RUN: Push the first n elements from the stack to the line interpreter."
def interpret(%{mode: :RUN} = opts, [:INTPR | tl], dict, [n | stack]) do
{top, new_stack} = Enum.split(stack, n)
interpret(opts, top ++ tl, dict, new_stack)
end
@doc "RUN: Push non-atom terms on the stack."
def interpret(%{mode: :RUN} = opts, [hd | tl], dict, stack)
when not is_atom(hd),
do: interpret(opts, tl, dict, [hd | stack])
@doc "RUN: Actually execute stuff."
def interpret(%{mode: :RUN} = opts, [hd | tl], dict, stack)
when is_atom(hd) do
new_line =
case Map.fetch(dict, hd) do
{:ok, {_flags, defn}} -> defn ++ tl
_ -> raise "Undefined operation: #{inspect(hd)}"
end
interpret(opts, new_line, dict, stack)
end
# LIBRA / DEFINE
@doc "LIBRA: Add the hidden flag."
def interpret(%{mode: :LIBRA} = opts, [:HIDE | tl], dict, stack),
do: interpret(set_flag(opts, :HIDDEN), tl, dict, stack)
@doc "LIBRA: Remove the hidden flag."
def interpret(%{mode: :LIBRA} = opts, [:IN | tl], dict, stack),
do: interpret(unset_flag(opts, :HIDDEN), tl, dict, stack)
@doc "LIBRA: Special case: Wrong order of clauses in LIBRA, HIDE."
def interpret(%{mode: :LIBRA} = opts, [:END, :";" | tl], dict, stack),
do: interpret(opts, [:";", :END | tl], dict, stack)
@doc "LIBRA: Special case: Wrong order of clauses in LIBRA, HIDE, END_LIBRA."
def interpret(%{mode: :LIBRA} = opts, [:END, :"." | tl], dict, stack),
do: interpret(opts, [:";", :END, :END_LIBRA | tl], dict, stack)
@doc "LIBRA: End HIDE-IN section. Drop all hidden definitions."
def interpret(%{mode: :LIBRA} = opts, [:END | tl], dict, stack) do
{new_opts, new_dict} = clear_hidden(opts, dict)
interpret(new_opts, tl, new_dict, stack)
end
@doc "LIBRA: Syntactic sugar for END_LIBRA."
def interpret(%{mode: :LIBRA} = opts, [:"." | tl], dict, stack),
do: interpret(opts, [:";", :END_LIBRA | tl], dict, stack)
@doc "LIBRA: add a definition to the dict."
def interpret(%{mode: :LIBRA} = opts, [:";" | tl], dict, stack) do
{defn, name, rest} =
stack
|> Enum.split_while(&(&1 != :==))
|> case do
{defn, [:==, name | rest]} when is_atom(name) -> {defn, name, rest}
{_defn, [:==, name | _rest]} -> raise "Bad name (not an atom): #{inspect(name)}"
end
opts = if MapSet.member?(opts.flags, :HIDDEN), do: put_hidden(opts, name), else: opts
interpret(opts, tl,
add_def(name, defn, opts.flags, opts.hidden, dict),
Enum.drop_while(rest, &(&1 != :LIBRA)))
end
@doc "LIBRA: push intermediate terms on the stack."
def interpret(%{mode: :LIBRA} = opts, [hd | tl], dict, stack),
do: interpret(opts, tl, dict, [hd | stack])
# Helpers.
defp set_mode(opts, mode), do: %{opts | mode: mode}
defp set_flag(%{flags: flags} = opts, new_flag),
do: %{opts | flags: MapSet.put(flags, new_flag)}
defp unset_flag(%{flags: flags} = opts, old_flag),
do: %{opts | flags: MapSet.delete(flags, old_flag)}
defp put_hidden(%{hidden: hidden} = opts, name),
do: %{opts | hidden: MapSet.put(hidden, name)}
# Drop all entries marked as hidden in the dict
defp clear_hidden(%{hidden: hidden} = opts, dict),
do: {opts |> unset_flag(:HIDDEN) |> Map.put(:hidden, MapSet.new), Map.drop(dict, MapSet.to_list(hidden))}
# Add operator. New entries have the form {flags :: MapSet, definition :: [_ | _]}
defp add_def(name, defn, flags, hidden, dict) do
new_defn =
Enum.reduce(defn, [], fn (elem, acc) ->
if MapSet.member?(hidden, elem) do
{_flags, hidden_def} = Map.get(dict, elem)
hidden_def ++ acc
else
[elem | acc]
end
end)
Map.put(dict, name, {flags, new_defn})
end
# Cast to Joy-known datatypes
defp cast(args) when is_list(args), do: Enum.map(args, &cast/1)
defp cast(args) when is_tuple(args), do: Tuple.to_list(args) |> Enum.map(&cast/1)
defp cast(args) when is_map(args), do: Map.to_list(args) |> Enum.map(&cast/1)
defp cast(args), do: args
end
|
bliss_01/lib/bliss/interpreter.ex
| 0.624752
| 0.415314
|
interpreter.ex
|
starcoder
|
defmodule Plug.Crypto.MessageEncryptor do
@moduledoc ~S"""
`MessageEncryptor` is a simple way to encrypt values which get stored
somewhere you don't trust.
The cipher text and initialization vector are base64 encoded and
returned to you.
This can be used in situations similar to the `MessageVerifier`, but where
you don't want users to be able to determine the value of the payload.
## Example
secret_key_base = "072d1e0157c008193fe48a670cce031faa4e..."
encrypted_cookie_salt = "encrypted cookie"
encrypted_signed_cookie_salt = "signed encrypted cookie"
secret = KeyGenerator.generate(secret_key_base, encrypted_cookie_salt)
sign_secret = KeyGenerator.generate(secret_key_base, encrypted_signed_cookie_salt)
encryptor = MessageEncryptor.new(secret, sign_secret)
data = %{current_user: %{name: "José"}}
encrypted = MessageEncryptor.encrypt_and_sign(encryptor, data)
decrypted = MessageEncryptor.verify_and_decrypt(encryptor, encrypted)
decrypted.current_user.name # => "José"
"""
alias Plug.Crypto.MessageVerifier
@doc """
Encrypts and signs a message.
"""
def encrypt_and_sign(message, secret, sign_secret, cipher \\ :aes_cbc256)
when is_binary(message) and is_binary(secret) and is_binary(sign_secret) do
iv = :crypto.strong_rand_bytes(16)
message
|> pad_message
|> encrypt(cipher, secret, iv)
|> Base.encode64()
|> Kernel.<>("--#{Base.encode64(iv)}")
|> MessageVerifier.sign(sign_secret)
end
@doc """
Decrypts and verifies a message.
We need to verify the message in order to avoid padding attacks.
Reference: http://www.limited-entropy.com/padding-oracle-attacks
"""
def verify_and_decrypt(encrypted, secret, sign_secret, cipher \\ :aes_cbc256)
when is_binary(encrypted) and is_binary(secret) and is_binary(sign_secret) do
case MessageVerifier.verify(encrypted, sign_secret) do
{:ok, verified} ->
[encrypted, iv] = String.split(verified, "--") |> Enum.map(&Base.decode64!/1)
encrypted |> decrypt(cipher, secret, iv) |> unpad_message
:error ->
:error
end
end
defp encrypt(message, cipher, secret, iv) do
:crypto.block_encrypt(cipher, trim_secret(secret), iv, message)
end
defp decrypt(encrypted, cipher, secret, iv) do
:crypto.block_decrypt(cipher, trim_secret(secret), iv, encrypted)
end
defp pad_message(msg) do
bytes_remaining = rem(byte_size(msg), 16)
padding_size = 16 - bytes_remaining
msg <> :binary.copy(<<padding_size>>, padding_size)
end
defp unpad_message(msg) do
padding_size = :binary.last(msg)
if padding_size <= 16 do
msg_size = byte_size(msg)
if binary_part(msg, msg_size, -padding_size) == :binary.copy(<<padding_size>>, padding_size) do
{:ok, binary_part(msg, 0, msg_size - padding_size)}
else
:error
end
else
:error
end
end
defp trim_secret(secret) do
case byte_size(secret) do
large when large > 32 -> :binary.part(secret, 0, 32)
_ -> secret
end
end
end
|
lib/plug/crypto/message_encryptor.ex
| 0.870989
| 0.441372
|
message_encryptor.ex
|
starcoder
|
defmodule Grizzly do
@moduledoc """
Grizzly functions for controlling Z-Wave devices and the
Z-Wave network.
## Sending commands to Z-Wave
The most fundamental function in `Grizzly` is `Grizzly.send_command/3`.
There are two ways of using this function.
First, by passing in a node id for a node on the network:
```elixir
Grizzly.send_command(10, Grizzly.CommandClass.SwitchBinary.Get)
{:ok, :on}
Grizzly.send_command(10, Grizzly.CommandClass.SwitchBinary.Set, value: :off)
```
This is useful for short lived deterministic communication like `iex`
and scripts. This is because there is the overhead of connecting and
disconnecting to the node for each call.
For long lived applications that have non-deterministic sending of
messages (some type of automated commands) and user expectations on
device action we recommend using this function by passing in a
`Grizzly.Node`, `Grizzly.Conn`, or `Grizzly.Controller`.
```elixir
{:ok, zw_node} = Grizzly.get_node(10)
{:ok, zw_node} = Grizzly.Node.connect(zw_node)
{:ok, :on} = Grizzly.send_command(zw_node, Grizzly.CommandClass.SwitchBinary.Get)
:ok = Grizzly.send_command(zw_node, Grizzly.CommandClass.SwitchBinary.Set, value: :on)
```
This is useful because we maintain a heart beat with the node and overhead
of establishing the connection is removed from `send_command`.
In order for the consumer of Grizzly to use this in a long running application they
will need to hold on to a reference to the connected Z-Wave Node.
To know more commands and their arguments see the modules under the
`Grizzly.CommandClass` name space.
"""
alias Grizzly.{Conn, Command, Node, Controller, Notifications}
alias Grizzly.Conn.Config
alias Grizzly.Client.DTLS
@type seq_number :: 0..255
@typedoc """
A type the repersents things the have/can establish connections
to the Z/IP network.
1. `Conn.t` - A Connection struct
2. `Grizzly.Controller` - The controller process, this is a global, started on
application start process
3. `Node.t` - This is a Z-Wave Node that has been connected to the network
"""
@type connected :: Conn.t() | Controller | Node.t()
@conn_opts [:owner]
@spec config() :: Config.t()
def config() do
case Application.get_env(:grizzly, Grizzly.Controller) do
nil ->
Config.new(
ip: {0xFD00, 0xAAAA, 0, 0, 0, 0, 0, 1},
port: 41230,
client: DTLS
)
opts ->
Config.new(opts)
end
end
@doc """
Send a command to the Z-Wave device, first checking if in inclusion/exclusion state.
See individual command modules for information about what options it takes.
"""
@spec send_command(
connected | Node.node_id(),
command_module :: module,
command_opts :: keyword
) ::
:ok | {:ok, any} | {:error, any}
def send_command(connected, command_module, command_opts \\ [])
def send_command(%Conn{} = conn, command_module, opts) do
# an option in opts is either a command or connection option
command_opts = Keyword.drop(opts, @conn_opts)
conn_opts = opts -- command_opts
with {:ok, command} <- Command.start(command_module, command_opts) do
Conn.send_command(conn, command, conn_opts)
else
{:error, reason} ->
{:error, reason}
end
end
def send_command(Controller, command_module, command_opts) do
send_command(Controller.conn(), command_module, command_opts)
end
def send_command(%Node{conn: nil, id: 1}, command_module, command_opts) do
send_command(Controller, command_module, command_opts)
end
def send_command(%Node{conn: conn}, command_module, command_opts) do
send_command(conn, command_module, command_opts)
end
def send_command(node_id, command_module, command_opts) when is_integer(node_id) do
with {:ok, zw_node} <- Grizzly.get_node(node_id),
{:ok, zw_node} <- Node.connect(zw_node) do
response = send_command(zw_node, command_module, command_opts)
:ok = Node.disconnect(zw_node)
response
else
error -> error
end
end
@doc """
Close a connection
"""
@spec close_connection(Conn.t()) :: :ok
def close_connection(%Conn{} = conn) do
Conn.close(conn)
end
@doc """
Get a node from the network
This does not make a DTLS connection to the `Node.t()`
and if you want to connect to the node use `Grizzly.Node.connect/1`.
"""
@spec get_node(Node.node_id()) :: {:ok, Node.t()} | {:error, :node_not_found}
defdelegate get_node(node_id), to: Grizzly.Network
@doc """
Reset the Z-Wave Module to a clean state
"""
@spec reset_controller() :: :ok | {:error, :network_busy}
defdelegate reset_controller(), to: Grizzly.Network, as: :reset
@doc """
List the nodes on the Z-Wave network
"""
@spec get_nodes() :: {:ok, [Node.t()]} | {:error, :unable_to_get_nodes}
defdelegate get_nodes(), to: Grizzly.Network
@doc """
Check to see if the network is busy
"""
@spec network_busy?() :: boolean()
defdelegate network_busy?(), to: Grizzly.Network, as: :busy?
@doc """
Check to see if the network is ready
"""
@spec network_ready?() :: boolean()
defdelegate network_ready?(), to: Grizzly.Network, as: :ready?
@doc """
Put network in inclusion mode
"""
@spec add_node([Grizzly.Inclusion.opt()]) ::
:ok | {:error, {:invalid_option, Grizzly.Inclusion.invalid_opts_reason()}}
defdelegate add_node(opts \\ []), to: Grizzly.Inclusion
@doc """
Put network in exclusion mode
"""
@spec remove_node([Grizzly.Inclusion.opt()]) :: :ok
defdelegate remove_node(opts \\ []), to: Grizzly.Inclusion
@doc """
Put network out of inclusion mode
"""
@spec add_node_stop() :: :ok
defdelegate add_node_stop(), to: Grizzly.Inclusion
@doc """
Put network out of exclusion mode
"""
@spec remove_node() :: :ok
defdelegate remove_node_stop(), to: Grizzly.Inclusion
@doc """
Whether the node's command class versions are known
"""
@spec command_class_versions_known?(Node.t()) :: boolean
defdelegate command_class_versions_known?(zw_node), to: Grizzly.Node
@doc """
Update the command class version of a node
"""
@spec update_command_class_versions(Node.t()) :: Node.t()
defdelegate update_command_class_versions(zw_node), to: Node
@doc """
Put the controller in learn mode for a few seconds
"""
@spec start_learn_mode([Grizzly.Inclusion.opt()]) :: :ok
defdelegate start_learn_mode(opts \\ []), to: Grizzly.Inclusion
@doc """
Get the version of a node's command class, if the node does not have a version for
this command class this function will try to get it from the Z-Wave network.
"""
@spec get_command_class_version(Node.t(), atom) ::
{:ok, non_neg_integer} | {:error, atom}
defdelegate get_command_class_version(node, command_class_name), to: Node
@doc """
Whether a node has a given command class
"""
@spec has_command_class?(Node.t(), atom) :: boolean
defdelegate has_command_class?(node, command_class_name), to: Node
@doc """
Whether a node is connected.
"""
@spec connected?(Node.t()) :: boolean
defdelegate connected?(node), to: Node
@doc """
Get the command classes supported by a node.
"""
@spec command_class_names(Node.t()) :: [atom()]
defdelegate command_class_names(node), to: Node
@doc """
Subscribe to notifications about a topic
See `Grizzly.Notifications` for more information
"""
@spec subscribe(Notifications.topic()) :: :ok | {:error, :already_subscribed}
defdelegate subscribe(topic), to: Notifications
end
|
lib/grizzly.ex
| 0.902142
| 0.823612
|
grizzly.ex
|
starcoder
|
defmodule ExMachina do
@moduledoc """
Defines functions for generating data
In depth examples are in the [README](readme.html)
"""
defmodule UndefinedFactoryError do
@moduledoc """
Error raised when trying to build or create a factory that is undefined.
"""
defexception [:message]
def exception(factory_name) do
message = """
No factory defined for #{inspect(factory_name)}.
Please check for typos or define your factory:
def #{factory_name}_factory do
...
end
"""
%UndefinedFactoryError{message: message}
end
end
use Application
@doc false
def start(_type, _args), do: ExMachina.Sequence.start_link()
defmacro __using__(_opts) do
quote do
@before_compile unquote(__MODULE__)
import ExMachina, only: [sequence: 1, sequence: 2, merge_attributes: 2]
def build(factory_name, attrs \\ %{}) do
ExMachina.build(__MODULE__, factory_name, attrs)
end
def build_pair(factory_name, attrs \\ %{}) do
ExMachina.build_pair(__MODULE__, factory_name, attrs)
end
def build_list(number_of_records, factory_name, attrs \\ %{}) do
ExMachina.build_list(__MODULE__, number_of_records, factory_name, attrs)
end
@spec create(any) :: no_return
def create(_) do
raise_function_replaced_error("create/1", "insert/1")
end
@spec create(any, any) :: no_return
def create(_, _) do
raise_function_replaced_error("create/2", "insert/2")
end
@spec create_pair(any, any) :: no_return
def create_pair(_, _) do
raise_function_replaced_error("create_pair/2", "insert_pair/2")
end
@spec create_list(any, any, any) :: no_return
def create_list(_, _, _) do
raise_function_replaced_error("create_list/3", "insert_list/3")
end
@spec raise_function_replaced_error(String.t(), String.t()) :: no_return
defp raise_function_replaced_error(old_function, new_function) do
raise """
#{old_function} has been removed.
If you are using ExMachina.Ecto, use #{new_function} instead.
If you are using ExMachina with a custom `save_record/2`, you now must use ExMachina.Strategy.
See the ExMachina.Strategy documentation for examples.
"""
end
defoverridable create: 1, create: 2, create_pair: 2, create_list: 3
end
end
@doc """
Shortcut for creating unique string values.
This is automatically imported into a model factory when you `use ExMachina`.
This is equivalent to `sequence(name, &"\#{name}\#{&1}")`. If you need to
customize the returned string, see `sequence/2`.
Note that sequences keep growing and are *not* reset by ExMachina. Most of the
time you won't need to reset the sequence, but when you do need to reset them,
you can use `ExMachina.Sequence.reset/0`.
## Examples
def user_factory do
%User{
# Will generate "username0" then "username1", etc.
username: sequence("username")
}
end
def article_factory do
%Article{
# Will generate "Article Title0" then "Article Title1", etc.
title: sequence("Article Title")
}
end
"""
@spec sequence(String.t()) :: String.t()
def sequence(name), do: ExMachina.Sequence.next(name)
@doc """
Create sequences for generating unique values.
This is automatically imported into a model factory when you `use ExMachina`.
The `name` can be any term, although it is typically an atom describing the
sequence. Each time a sequence is called with the same `name`, its number is
incremented by one.
The `formatter` function takes the sequence number, and returns a sequential
representation of that number – typically a formatted string.
## Examples
def user_factory do
%{
# Will generate "<EMAIL>" then "<EMAIL>", etc.
email: sequence(:email, &"<EMAIL>"),
# Will generate "admin" then "user", "other", "admin" etc.
role: sequence(:role, ["admin", "user", "other"])
}
end
"""
@spec sequence(any, (integer -> any) | nonempty_list) :: any
def sequence(name, formatter), do: ExMachina.Sequence.next(name, formatter)
@doc """
Builds a single factory.
This will defer to the `[factory_name]_factory/0` callback defined in the
factory module in which it is `use`d.
## Example
def user_factory do
%{name: "<NAME>", admin: false}
end
# Returns %{name: "<NAME>", admin: false}
build(:user)
# Returns %{name: "<NAME>", admin: true}
build(:user, admin: true)
If you want full control over the factory attributes, you can define the
factory with `[factory_name]_factory/1`. Note that you will need to merge the
attributes passed if you want to emulate ExMachina's default behavior.
## Example
def article_factory(attrs) do
title = Map.get(attrs, :title, "default title")
slug = Article.title_to_slug(title)
article = %Article{title: title, slug: slug}
# merge attributes on your own
merge_attributes(article, attrs)
end
# Returns %Article{title: "default title", slug: "default-title"}
build(:article)
# Returns %Article{title: "hello world", slug: "hello-world"}
build(:article, title: "hello world")
"""
@callback build(factory_name :: atom) :: any
@callback build(factory_name :: atom, attrs :: keyword | map) :: any
@doc false
def build(module, factory_name, attrs \\ %{}) do
attrs = Enum.into(attrs, %{})
function_name = build_function_name(factory_name)
cond do
factory_accepting_attributes_defined?(module, function_name) ->
apply(module, function_name, [attrs])
factory_without_attributes_defined?(module, function_name) ->
apply(module, function_name, []) |> merge_attributes(attrs)
true ->
raise UndefinedFactoryError, factory_name
end
end
defp build_function_name(factory_name) do
factory_name
|> Atom.to_string()
|> Kernel.<>("_factory")
|> String.to_atom()
end
defp factory_accepting_attributes_defined?(module, function_name) do
Code.ensure_loaded?(module) && function_exported?(module, function_name, 1)
end
defp factory_without_attributes_defined?(module, function_name) do
Code.ensure_loaded?(module) && function_exported?(module, function_name, 0)
end
@doc """
Helper function to merge attributes into a factory that could be either a map
or a struct.
## Example
# custom factory
def article_factory(attrs) do
title = Map.get(attrs, :title, "default title")
article = %Article{
title: title
}
merge_attributes(article, attrs)
end
Note that when trying to merge attributes into a struct, this function will
raise if one of the attributes is not defined in the struct.
"""
@spec merge_attributes(struct | map, map) :: struct | map | no_return
def merge_attributes(%{__struct__: _} = record, attrs), do: struct!(record, attrs)
def merge_attributes(record, attrs), do: Map.merge(record, attrs)
@doc """
Builds two factories.
This is just an alias for `build_list(2, factory_name, attrs)`.
## Example
# Returns a list of 2 users
build_pair(:user)
"""
@callback build_pair(factory_name :: atom) :: list
@callback build_pair(factory_name :: atom, attrs :: keyword | map) :: list
@doc false
def build_pair(module, factory_name, attrs \\ %{}) do
ExMachina.build_list(module, 2, factory_name, attrs)
end
@doc """
Builds any number of factories.
## Example
# Returns a list of 3 users
build_list(3, :user)
"""
@callback build_list(number_of_records :: integer, factory_name :: atom) :: list
@callback build_list(number_of_records :: integer, factory_name :: atom, attrs :: keyword | map) ::
list
@doc false
def build_list(module, number_of_records, factory_name, attrs \\ %{}) do
Stream.repeatedly(fn ->
ExMachina.build(module, factory_name, attrs)
end)
|> Enum.take(number_of_records)
end
defmacro __before_compile__(_env) do
quote do
@doc "Raises a helpful error if no factory is defined."
@spec factory(any) :: no_return
def factory(factory_name), do: raise(UndefinedFactoryError, factory_name)
end
end
end
|
lib/ex_machina.ex
| 0.875375
| 0.452717
|
ex_machina.ex
|
starcoder
|
defmodule AOC.Y2021.Day18 do
@behaviour AOC.Solution
def input_path() do
"./lib/2021/input/day18.txt"
end
# sfn = snailfish number
def parse_input(input) do
input
|> String.split("\n", trim: true)
|> Enum.map(&parse_pair/1)
end
defp parse_pair(pair) do
{sfn, _} = Code.eval_string(pair)
sfn
end
def star_1(sf_numbers) do
Enum.reduce(sf_numbers, &add_sf_numbers(&2, &1))
# |> IO.inspect(charlists: :as_lists)
|> magnitude()
end
defguardp is_splitable(element) when is_integer(element) and element >= 10
# regular number
defp reduce_explode(regular_number, _cur_level) when is_integer(regular_number),
do: {:no_explode, regular_number, 0, 0}
# explode
defp reduce_explode([l_regular_number, r_regular_number], cur_level) when cur_level > 4 do
{:explode, 0, l_regular_number, r_regular_number}
end
# catch-all, keep reducing to deeper levels
defp reduce_explode([left, right], cur_level) do
with {:left, {:no_explode, left_pair, _, _}} <- {:left, reduce_explode(left, cur_level + 1)},
{:right, {:no_explode, right_pair, _, _}} <- {:right, reduce_explode(right, cur_level + 1)} do
{:no_explode, [left_pair, right_pair], 0, 0}
else
{:left, {:explode, left_pair, left_carry, right_add}} ->
right_pair = (is_integer(right) && right + right_add) || add_left(right, right_add)
{:explode, [left_pair, right_pair], left_carry, 0}
{:right, {:explode, right_pair, left_add, right_carry}} ->
left_pair = (is_integer(left) && left + left_add) || add_right(left, left_add)
{:explode, [left_pair, right_pair], 0, right_carry}
end
end
# split numbers >= 10
defp reduce_split(splitable) when is_splitable(splitable),
do: {:split, [div(splitable, 2), ceil(splitable / 2)]}
# regular number
defp reduce_split(regular_number) when is_integer(regular_number),
do: {:no_split, regular_number}
# catch-all, keep reducing to deeper levels
defp reduce_split([left, right]) do
with {:left, {:no_split, left_pair}} <- {:left, reduce_split(left)},
{:right, {:no_split, right_pair}} <- {:right, reduce_split(right)} do
{:no_split, [left_pair, right_pair]}
else
{:left, {:split, left_pair}} -> {:split, [left_pair, right]}
{:right, {:split, right_pair}} -> {:split, [left, right_pair]}
end
end
# entry
def reduce(sfn) do
with {:no_explode, ^sfn, _, _} <- reduce_explode(sfn, _cur_level = 1),
{:no_split, ^sfn} <- reduce_split(sfn) do
sfn
else
reduced ->
reduced = (is_tuple(reduced) && elem(reduced, 1)) || reduced
# IO.inspect(reduced, label: "one iter", charlists: :as_lists)
reduce(reduced)
end
end
defp add_left([l, r], to_add) when is_integer(l), do: [l + to_add, r]
defp add_left([l, r], to_add), do: [add_left(l, to_add), r]
defp add_right([l, r], to_add) when is_integer(r), do: [l, r + to_add]
defp add_right([l, r], to_add), do: [l, add_right(r, to_add)]
def add_sf_numbers(sfn1, sfn2), do: reduce([sfn1, sfn2])
def magnitude(regular_number) when is_integer(regular_number), do: regular_number
def magnitude([left, right] = _sfn), do: magnitude(left) * 3 + magnitude(right) * 2
def star_2(numbers) do
get_highest_sum_magnitude(numbers)
end
defp get_highest_sum_magnitude(_old_number, [], [], max), do: max
defp get_highest_sum_magnitude(_old_number, [], [number | next_acc], max),
do: get_highest_sum_magnitude(number, next_acc, next_acc, max)
defp get_highest_sum_magnitude(number, [other | rest], next_acc, max) do
magnitude1 =
number
|> add_sf_numbers(other)
|> magnitude()
magnitude2 =
other
|> add_sf_numbers(number)
|> magnitude()
max = Enum.max([magnitude1, magnitude2, max])
get_highest_sum_magnitude(number, rest, next_acc, max)
end
defp get_highest_sum_magnitude([first | sf_numbers]),
do: get_highest_sum_magnitude(first, sf_numbers, sf_numbers, -1)
end
|
lib/2021/day18.ex
| 0.658198
| 0.522689
|
day18.ex
|
starcoder
|
defmodule CanvasAPI.CanvasService do
@moduledoc """
A service for viewing and manipulating canvases.
"""
use CanvasAPI.Web, :service
import CanvasAPI.UUIDMatch
alias CanvasAPI.{Account, Canvas, SlackNotifier, Team, User}
@preload [:team, :template, creator: [:team]]
@doc """
Create a new canvas from the given params.
The creator must provide a team and creator, and can optionally provide a
template.
Options:
- `creator`: `%User{}` (**required**) The user who created the canvas
- `team`: `%Team{}` (**required**) The team to create the canvas in
- `template`: `map` A map with `"type" => "canvas"` and an ID for the new
canvas's template. **Ignored if the template is not found.**
## Examples
```elixir
CanvasService.create(
%{"is_template" => true},
creator: current_user,
team: current_team,
template: %{"id" => "6ijSghOIflAjKVki5j0dpL", "type" => "canvas"})
```
"""
@spec create(map, Keyword.t) :: {:ok, %Canvas{}} | {:error, Ecto.Changeset.t}
def create(params, opts) do
%Canvas{}
|> Canvas.changeset(params)
|> put_assoc(:creator, opts[:creator])
|> put_assoc(:team, opts[:team])
|> Canvas.put_template(opts[:template])
|> Repo.insert
|> case do
{:ok, canvas} ->
if opts[:notify],
do: notify_slack(opts[:notify], canvas, [], delay: 300)
{:ok, Repo.preload(canvas, @preload)}
error ->
error
end
end
@doc """
List canvases on behalf of a user.
Available filters:
- `user`: `%User{}` (**required**) A user to list canvases for
- `only_templates`: `boolean` List only templates canvases, including global
templates, if they are defined.
## Examples
```elixir
CanvasService.list(user: current_user, only_templates: true)
```
"""
@spec list(Keyword.t) :: [%Canvas{}] | []
def list(user: user) do
from(assoc(user, :canvases),
order_by: [asc: :inserted_at],
preload: ^@preload)
|> Repo.all
end
def list(user: user, only_templates: true) do
from(assoc(user, :canvases),
where: [is_template: true],
order_by: [asc: :inserted_at],
preload: ^@preload)
|> Repo.all
|> merge_global_templates
|> Enum.sort_by(&Canvas.title/1)
end
@doc """
Get a canvas that is in an account's teams.
The user must pass in an account.
Options:
- `account`: `%Account{}` (**required**) The account requesting the canvas
## Examples
```elixir
CanvasService.get(
"6ijSghOIflAjKVki5j0dpL",
account: conn.private.current_account)
```
"""
@spec get(String.t, Keyword.t) :: {:ok, %Canvas{}} | {:error, :not_found}
def get(id, account: account) do
from(assoc(account, :canvases),
preload: ^@preload)
|> Repo.get(id)
|> case do
nil -> {:error, :not_found}
canvas -> {:ok, canvas}
end
end
@doc """
Show a canvas, verifying that the account has view access.
The user must pass in an account and a team identity, which is either an ID
or a domain.
Options:
- `account`: `%Account{}` (**required**) The account requesting the canvas
- `team_id`: `String.t` (**required**) The team identity the canvas is in
## Examples
```elixir
CanvasService.show(
"6ijSghOIflAjKVki5j0dpL",
account: conn.private.current_account,
team_id: "87ee9199-e2fa-49e6-9d99-16988af57fd5")
```
"""
@spec show(String.t, Keyword.t) :: {:ok, %Canvas{}} | {:error, :not_found}
def show(id, opts) do
do_show(id, opts[:team_id])
|> verify_can_show(opts[:account])
end
@spec do_show(String.t, String.t) :: %Canvas{} | nil
defp do_show(id, team_id = match_uuid()) do
from(Canvas, where: [team_id: ^team_id], preload: ^@preload)
|> Repo.get(id)
end
defp do_show(id, domain) do
from(c in Canvas,
join: t in Team, on: c.team_id == t.id,
where: t.domain == ^domain,
preload: ^@preload)
|> Repo.get(id)
end
@doc """
Update a canvas.
## Examples
```elixir
CanvasService.update(canvas, %{"is_template" => false})
```
"""
@spec update(%Canvas{}, map, Keyword.t) :: {:ok, %Canvas{}}
| {:error, Ecto.Changeset.t}
def update(canvas, params, opts \\ []) do
old_channel_ids = canvas.slack_channel_ids
canvas
|> Canvas.update_changeset(params)
|> Canvas.put_template(opts[:template], ignore_blocks: true)
|> Repo.update
|> case do
{:ok, canvas} ->
if opts[:notify],
do: notify_slack(opts[:notify], canvas, old_channel_ids)
{:ok, Repo.preload(canvas, @preload)}
error ->
error
end
end
@doc """
Delete a canvas.
If the canvas is not found, returns `nil`. If the delete was invalid, returns
`{:error, changeset}`. If it was successful, returns `{:ok, canvas}`.
## Examples
```elixir
CanvasService.delete("6ijSghOIflAjKVki5j0dpL", account: account)
```
"""
@spec delete(String.t, Keyword.t) :: {:ok, %Canvas{}}
| {:error, Ecto.Changeset.t}
| {:error, :not_found}
def delete(id, account: account) do
get(id, account: account)
|> case do
{:ok, canvas} -> Repo.delete(canvas)
{:error, :not_found} -> {:error, :not_found}
end
end
@spec merge_global_templates([%Canvas{}]) :: [%Canvas{}]
defp merge_global_templates(team_templates) do
do_merge_global_templates(
team_templates, System.get_env("TEMPLATE_USER_ID"))
end
@spec do_merge_global_templates([%Canvas{}], String.t | nil) :: [%Canvas{}]
defp do_merge_global_templates(templates, nil), do: templates
defp do_merge_global_templates(templates, ""), do: templates
defp do_merge_global_templates(templates, id) do
templates ++
(from(c in Canvas,
join: u in User, on: u.id == c.creator_id,
where: u.id == ^id,
where: c.is_template == true,
preload: [creator: [:team]])
|> Repo.all)
end
@spec notify_slack(%User{}, %Canvas{}, list, Keyword.t) :: any
defp notify_slack(notifier, canvas, old_channel_ids, opts \\ []) do
with {:ok, token} <- Team.get_token(canvas.team, "slack"),
token = get_in(token.meta, ~w(bot bot_access_token)) do
(canvas.slack_channel_ids -- old_channel_ids)
|> Enum.each(
&SlackNotifier.delay(
{:notify_new, [token, canvas.id, notifier.id, &1]}, opts))
end
end
@spec verify_can_show(%Canvas{} | nil, %Account{}) :: {:ok, %Canvas{}}
| {:error, :not_found}
defp verify_can_show(nil, _), do: {:error, :not_found}
defp verify_can_show(canvas, account) do
not_found = {:error, :not_found}
case canvas.link_access do
"none" ->
case account do
nil -> not_found
account ->
account = Repo.preload(account, [:teams])
if canvas.team in account.teams, do: {:ok, canvas}, else: not_found
end
_ ->
{:ok, canvas}
end
end
end
|
lib/canvas_api/services/canvas_service.ex
| 0.909877
| 0.896659
|
canvas_service.ex
|
starcoder
|
defmodule Mirage do
@moduledoc """
This top level module is for transforming images.
For reading and writing images, see the `Mirage.Image` module.
"""
alias Mirage.Image
@type filter_type ::
:nearest
| :triangle
| :catmull_rom
| :gaussian
| :lanczos3
@doc """
Returns a resized image with the given dimensions using the given filter.
The filter defaults to `:triangle` which performs well and looks decent.
## Examples
With implied `:triangle` filter.
```
Mirage.resize(image, 100, 100)
```
With a custom filter algorithm.
```
Mirage.resize(image, 100, 100, :lanczos3)
```
"""
@spec resize(Image.t(), integer(), integer(), filter_type()) :: Image.t()
def resize(image, width, height, filter \\ :triangle) do
Mirage.Native.resize(image.resource, width, height, filter)
end
@doc """
Returns a resized image using the specified filter algorithm defaults to
`:triangle`. The image’s aspect ratio is preserved. The image is scaled to the
maximum possible size that fits within the larger (relative to aspect ratio)
of the bounds specified by nwidth and nheight, then cropped to fit within the
other bound.
## Examples
With implied `:triangle` filter.
```
Mirage.resize_to_fill(image, 100, 100)
```
With a custom filter algorithm.
```
Mirage.resize_to_fill(image, 100, 100, :lanczos3)
```
"""
@spec resize_to_fill(Image.t(), integer(), integer(), filter_type()) :: Image.t()
def resize_to_fill(image, new_width, new_height, filter \\ :triangle) do
Mirage.Native.resize_to_fill(image.resource, new_width, new_height, filter)
end
@doc """
Returns a new image with the `top` image overlayed over the `bottom` image.
## Example
```
{:ok, canvas} = Mirage.Image.empty(100, 100)
canvas
|> Mirage.overlay(image_a)
|> Mirage.overlay(image_b)
```
"""
@spec overlay(Image.t(), Image.t(), non_neg_integer(), non_neg_integer()) :: Image.t()
def overlay(bottom, top, x \\ 0, y \\ 0) do
Mirage.Native.overlay(bottom, top, x, y)
end
end
|
lib/mirage.ex
| 0.957566
| 0.953405
|
mirage.ex
|
starcoder
|
defmodule Exnoops.Wordbot do
@moduledoc """
Module to interact with Github's Noop: Wordbot
See the [official `noop` documentation](https://noopschallenge.com/challenges/wordbot) for API information including the accepted parameters
"""
require Logger
import Exnoops.API
@noop "wordbot"
@doc """
Query Wordbot for word(s)
+ Parameters are sent with a keyword list into the function
## Examples
iex> Exnoops.Wordbot.get_word()
{:ok, ["pepperoni"]}
iex> Exnoops.Wordbot.get_word([count: 7])
{:ok,
[
"extracorporeal",
"behooves",
"superregionals",
"stepmother",
"heckle",
"clanks",
"hippest"
]
}
iex> Exnoops.Wordbot.get_word([count: 4, set: "dinosaurs"])
{:ok,
[
"Xixiposaurus",
"Ischyrosaurus",
"Crocodylomorph",
"Tatankaceratops"
]
}
"""
@spec get_word(keyword()) :: {atom(), list()}
def get_word(opts \\ []) when is_list(opts) do
Logger.debug("Calling Wordbot.get_word()")
case get("/" <> @noop, opts) do
{:ok, %{"words" => value}} -> {:ok, value}
error -> error
end
end
@doc """
Query Wordbot for word sets
## Examples
iex> Exnoops.Wordbot.get_word_sets()
{:ok,
[
"adjectives",
"adverbs",
"all",
"animals",
"cats",
"common",
"default",
"dinosaurs",
"dogs",
"encouragement",
"fabrics",
"flowers",
"fruits",
"gemstones",
"genres",
"horses",
"instruments",
"knots",
"menu",
"metals",
"moods",
"nouns",
"objects",
"occupations",
"prepositions",
"rhymeless",
"sports",
"vegetables",
"verbs",
"verbs_past",
"weather",
"wrestlers"
]}
"""
@spec get_word_sets :: {atom(), list()}
def get_word_sets do
Logger.debug("Calling Wordbot.get_word_sets()")
case get("/" <> @noop <> "/sets", []) do
{:ok, %{"words" => value}} -> {:ok, value}
error -> error
end
end
end
|
lib/exnoops/wordbot.ex
| 0.648021
| 0.440289
|
wordbot.ex
|
starcoder
|
defmodule Appsignal.Instrumentation do
@tracer Application.get_env(:appsignal, :appsignal_tracer, Appsignal.Tracer)
@span Application.get_env(:appsignal, :appsignal_span, Appsignal.Span)
@spec instrument(function()) :: any()
@doc false
def instrument(fun) do
span = @tracer.create_span("background_job", @tracer.current_span)
result = call_with_optional_argument(fun, span)
@tracer.close_span(span)
result
end
@spec instrument(String.t(), function()) :: any()
@doc """
Instrument a function.
def call do
Appsignal.instrument("foo.bar", fn ->
:timer.sleep(1000)
end)
end
When passing a function that takes an argument, the function is called with
the created span to allow adding extra information.
def call(params) do
Appsignal.instrument("foo.bar", fn span ->
Appsignal.Span.set_sample_data(span, "params", params)
:timer.sleep(1000)
end)
end
"""
def instrument(name, fun) do
instrument(name, name, fun)
end
@spec instrument(String.t(), String.t(), function()) :: any()
@doc """
Instrument a function, and set the `"appsignal:category"` attribute to the
value passed as the `category` argument.
"""
def instrument(name, category, fun) do
instrument(fn span ->
_ =
span
|> @span.set_name(name)
|> @span.set_attribute("appsignal:category", category)
call_with_optional_argument(fun, span)
end)
end
@deprecated "Use Appsignal.instrument/3 instead."
def instrument(_, name, category, fun) do
instrument(name, category, fun)
end
@spec instrument_root(String.t(), String.t(), function()) :: any()
@doc false
def instrument_root(namespace, name, fun) do
span = @tracer.create_span(namespace, nil)
span
|> @span.set_name(name)
|> @span.set_attribute("appsignal:category", name)
result = fun.()
@tracer.close_span(span)
result
end
@spec set_error(Exception.t(), Exception.stacktrace()) :: Appsignal.Span.t() | nil
@doc """
Set an error in the current root span.
"""
def set_error(%_{__exception__: true} = exception, stacktrace) do
@span.add_error(@tracer.root_span(), exception, stacktrace)
end
@spec set_error(Exception.kind(), any(), Exception.stacktrace()) :: Appsignal.Span.t() | nil
@doc """
Set an error in the current root span by passing a `kind` and `reason`.
"""
def set_error(kind, reason, stacktrace) do
@span.add_error(@tracer.root_span(), kind, reason, stacktrace)
end
@spec send_error(Exception.t(), Exception.stacktrace()) :: Appsignal.Span.t() | nil
@doc """
Send an error in a newly created `Appsignal.Span`.
"""
def send_error(%_{__exception__: true} = exception, stacktrace) do
send_error(exception, stacktrace, & &1)
end
@spec send_error(Exception.t(), Exception.stacktrace(), function()) :: Appsignal.Span.t() | nil
@doc """
Send an error in a newly created `Appsignal.Span`. Calls the passed function
with the created `Appsignal.Span` before closing it.
"""
def send_error(%_{__exception__: true} = exception, stacktrace, fun) when is_function(fun) do
@span.create_root("http_request", self())
|> @span.add_error(exception, stacktrace)
|> fun.()
|> @span.close()
end
@spec send_error(Exception.kind(), any(), Exception.stacktrace()) :: Appsignal.Span.t() | nil
def send_error(kind, reason, stacktrace) do
send_error(kind, reason, stacktrace, & &1)
end
def send_error(kind, reason, stacktrace, fun) do
@span.create_root("http_request", self())
|> @span.add_error(kind, reason, stacktrace)
|> fun.()
|> @span.close()
end
defp call_with_optional_argument(fun, _argument) when is_function(fun, 0), do: fun.()
defp call_with_optional_argument(fun, argument) when is_function(fun, 1), do: fun.(argument)
end
|
lib/appsignal/instrumentation.ex
| 0.803752
| 0.507385
|
instrumentation.ex
|
starcoder
|
defmodule AWS.CognitoIdentity do
@moduledoc """
Amazon Cognito Federated Identities
Amazon Cognito Federated Identities is a web service that delivers scoped
temporary credentials to mobile devices and other untrusted environments.
It uniquely identifies a device and supplies the user with a consistent
identity over the lifetime of an application.
Using Amazon Cognito Federated Identities, you can enable authentication
with one or more third-party identity providers (Facebook, Google, or Login
with Amazon) or an Amazon Cognito user pool, and you can also choose to
support unauthenticated access from your app. Cognito delivers a unique
identifier for each user and acts as an OpenID token provider trusted by
AWS Security Token Service (STS) to access temporary, limited-privilege AWS
credentials.
For a description of the authentication flow from the Amazon Cognito
Developer Guide see [Authentication
Flow](https://docs.aws.amazon.com/cognito/latest/developerguide/authentication-flow.html).
For more information see [Amazon Cognito Federated
Identities](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-identity.html).
"""
@doc """
Creates a new identity pool. The identity pool is a store of user identity
information that is specific to your AWS account. The keys for
`SupportedLoginProviders` are as follows:
<ul> <li> Facebook: `graph.facebook.com`
</li> <li> Google: `accounts.google.com`
</li> <li> Amazon: `www.amazon.com`
</li> <li> Twitter: `api.twitter.com`
</li> <li> Digits: `www.digits.com`
</li> </ul> You must use AWS Developer credentials to call this API.
"""
def create_identity_pool(client, input, options \\ []) do
request(client, "CreateIdentityPool", input, options)
end
@doc """
Deletes identities from an identity pool. You can specify a list of 1-60
identities that you want to delete.
You must use AWS Developer credentials to call this API.
"""
def delete_identities(client, input, options \\ []) do
request(client, "DeleteIdentities", input, options)
end
@doc """
Deletes an identity pool. Once a pool is deleted, users will not be able to
authenticate with the pool.
You must use AWS Developer credentials to call this API.
"""
def delete_identity_pool(client, input, options \\ []) do
request(client, "DeleteIdentityPool", input, options)
end
@doc """
Returns metadata related to the given identity, including when the identity
was created and any associated linked logins.
You must use AWS Developer credentials to call this API.
"""
def describe_identity(client, input, options \\ []) do
request(client, "DescribeIdentity", input, options)
end
@doc """
Gets details about a particular identity pool, including the pool name, ID
description, creation date, and current number of users.
You must use AWS Developer credentials to call this API.
"""
def describe_identity_pool(client, input, options \\ []) do
request(client, "DescribeIdentityPool", input, options)
end
@doc """
Returns credentials for the provided identity ID. Any provided logins will
be validated against supported login providers. If the token is for
cognito-identity.amazonaws.com, it will be passed through to AWS Security
Token Service with the appropriate role for the token.
This is a public API. You do not need any credentials to call this API.
"""
def get_credentials_for_identity(client, input, options \\ []) do
request(client, "GetCredentialsForIdentity", input, options)
end
@doc """
Generates (or retrieves) a Cognito ID. Supplying multiple logins will
create an implicit linked account.
This is a public API. You do not need any credentials to call this API.
"""
def get_id(client, input, options \\ []) do
request(client, "GetId", input, options)
end
@doc """
Gets the roles for an identity pool.
You must use AWS Developer credentials to call this API.
"""
def get_identity_pool_roles(client, input, options \\ []) do
request(client, "GetIdentityPoolRoles", input, options)
end
@doc """
Gets an OpenID token, using a known Cognito ID. This known Cognito ID is
returned by `GetId`. You can optionally add additional logins for the
identity. Supplying multiple logins creates an implicit link.
The OpenId token is valid for 10 minutes.
This is a public API. You do not need any credentials to call this API.
"""
def get_open_id_token(client, input, options \\ []) do
request(client, "GetOpenIdToken", input, options)
end
@doc """
Registers (or retrieves) a Cognito `IdentityId` and an OpenID Connect token
for a user authenticated by your backend authentication process. Supplying
multiple logins will create an implicit linked account. You can only
specify one developer provider as part of the `Logins` map, which is linked
to the identity pool. The developer provider is the "domain" by which
Cognito will refer to your users.
You can use `GetOpenIdTokenForDeveloperIdentity` to create a new identity
and to link new logins (that is, user credentials issued by a public
provider or developer provider) to an existing identity. When you want to
create a new identity, the `IdentityId` should be null. When you want to
associate a new login with an existing authenticated/unauthenticated
identity, you can do so by providing the existing `IdentityId`. This API
will create the identity in the specified `IdentityPoolId`.
You must use AWS Developer credentials to call this API.
"""
def get_open_id_token_for_developer_identity(client, input, options \\ []) do
request(client, "GetOpenIdTokenForDeveloperIdentity", input, options)
end
@doc """
Lists the identities in an identity pool.
You must use AWS Developer credentials to call this API.
"""
def list_identities(client, input, options \\ []) do
request(client, "ListIdentities", input, options)
end
@doc """
Lists all of the Cognito identity pools registered for your account.
You must use AWS Developer credentials to call this API.
"""
def list_identity_pools(client, input, options \\ []) do
request(client, "ListIdentityPools", input, options)
end
@doc """
Lists the tags that are assigned to an Amazon Cognito identity pool.
A tag is a label that you can apply to identity pools to categorize and
manage them in different ways, such as by purpose, owner, environment, or
other criteria.
You can use this action up to 10 times per second, per account.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Retrieves the `IdentityID` associated with a `DeveloperUserIdentifier` or
the list of `DeveloperUserIdentifier` values associated with an
`IdentityId` for an existing identity. Either `IdentityID` or
`DeveloperUserIdentifier` must not be null. If you supply only one of these
values, the other value will be searched in the database and returned as a
part of the response. If you supply both, `DeveloperUserIdentifier` will be
matched against `IdentityID`. If the values are verified against the
database, the response returns both values and is the same as the request.
Otherwise a `ResourceConflictException` is thrown.
`LookupDeveloperIdentity` is intended for low-throughput control plane
operations: for example, to enable customer service to locate an identity
ID by username. If you are using it for higher-volume operations such as
user authentication, your requests are likely to be throttled.
`GetOpenIdTokenForDeveloperIdentity` is a better option for higher-volume
operations for user authentication.
You must use AWS Developer credentials to call this API.
"""
def lookup_developer_identity(client, input, options \\ []) do
request(client, "LookupDeveloperIdentity", input, options)
end
@doc """
Merges two users having different `IdentityId`s, existing in the same
identity pool, and identified by the same developer provider. You can use
this action to request that discrete users be merged and identified as a
single user in the Cognito environment. Cognito associates the given source
user (`SourceUserIdentifier`) with the `IdentityId` of the
`DestinationUserIdentifier`. Only developer-authenticated users can be
merged. If the users to be merged are associated with the same public
provider, but as two different users, an exception will be thrown.
The number of linked logins is limited to 20. So, the number of linked
logins for the source user, `SourceUserIdentifier`, and the destination
user, `DestinationUserIdentifier`, together should not be larger than 20.
Otherwise, an exception will be thrown.
You must use AWS Developer credentials to call this API.
"""
def merge_developer_identities(client, input, options \\ []) do
request(client, "MergeDeveloperIdentities", input, options)
end
@doc """
Sets the roles for an identity pool. These roles are used when making calls
to `GetCredentialsForIdentity` action.
You must use AWS Developer credentials to call this API.
"""
def set_identity_pool_roles(client, input, options \\ []) do
request(client, "SetIdentityPoolRoles", input, options)
end
@doc """
Assigns a set of tags to an Amazon Cognito identity pool. A tag is a label
that you can use to categorize and manage identity pools in different ways,
such as by purpose, owner, environment, or other criteria.
Each tag consists of a key and value, both of which you define. A key is a
general category for more specific values. For example, if you have two
versions of an identity pool, one for testing and another for production,
you might assign an `Environment` tag key to both identity pools. The value
of this key might be `Test` for one identity pool and `Production` for the
other.
Tags are useful for cost tracking and access control. You can activate your
tags so that they appear on the Billing and Cost Management console, where
you can track the costs associated with your identity pools. In an IAM
policy, you can constrain permissions for identity pools based on specific
tags or tag values.
You can use this action up to 5 times per second, per account. An identity
pool can have as many as 50 tags.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Unlinks a `DeveloperUserIdentifier` from an existing identity. Unlinked
developer users will be considered new identities next time they are seen.
If, for a given Cognito identity, you remove all federated identities as
well as the developer user identifier, the Cognito identity becomes
inaccessible.
You must use AWS Developer credentials to call this API.
"""
def unlink_developer_identity(client, input, options \\ []) do
request(client, "UnlinkDeveloperIdentity", input, options)
end
@doc """
Unlinks a federated identity from an existing account. Unlinked logins will
be considered new identities next time they are seen. Removing the last
linked login will make this identity inaccessible.
This is a public API. You do not need any credentials to call this API.
"""
def unlink_identity(client, input, options \\ []) do
request(client, "UnlinkIdentity", input, options)
end
@doc """
Removes the specified tags from an Amazon Cognito identity pool. You can
use this action up to 5 times per second, per account
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates an identity pool.
You must use AWS Developer credentials to call this API.
"""
def update_identity_pool(client, input, options \\ []) do
request(client, "UpdateIdentityPool", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "cognito-identity"}
host = build_host("cognito-identity", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSCognitoIdentityService.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/cognito_identity.ex
| 0.896169
| 0.574574
|
cognito_identity.ex
|
starcoder
|
defmodule ESpec.AssertionHelpers do
@moduledoc """
Defines helper functions for modules which use ESpec.
These functions wrap arguments for ESpec.ExpectTo module.
See `ESpec.Assertion` module for corresponding 'assertion modules'
"""
alias ESpec.Assertions
@elixir_types ~w(atom binary bitstring boolean float function integer list map number pid port reference tuple)a
def eq(value), do: {Assertions.Eq, value}
def eql(value), do: {Assertions.Eql, value}
def be(value), do: {Assertions.Eq, value}
def be(operator, value), do: {Assertions.Be, [operator, value]}
def be(operator, value, [{granularity, delta}]),
do: {Assertions.Be, [operator, value, [{granularity, delta}]]}
def be(operator, value, {granularity, delta}),
do: {Assertions.Be, [operator, value, {granularity, delta}]}
def be_between(min, max), do: {Assertions.BeBetween, [min, max]}
def be_close_to(value, delta), do: {Assertions.BeCloseTo, [value, delta]}
def match(value), do: {Assertions.Match, value}
defmacro match_pattern(pattern) do
pattern = Macro.escape(pattern)
quote do
{Assertions.MatchPattern, [unquote(pattern), __ENV__, binding()]}
end
end
def be_true, do: {Assertions.Boolean.BeTrue, []}
def be_false, do: {Assertions.Boolean.BeFalse, []}
def be_truthy, do: {Assertions.Boolean.BeTruthy, []}
def be_falsy, do: {Assertions.Boolean.BeFalsy, []}
def raise_exception(exception, message) when is_atom(exception) and is_binary(message) do
{Assertions.RaiseException, [exception, message]}
end
def raise_exception(exception) when is_atom(exception),
do: {Assertions.RaiseException, [exception]}
def raise_exception(), do: {Assertions.RaiseException, []}
def throw_term(term), do: {Assertions.ThrowTerm, [term]}
def throw_term(), do: {Assertions.ThrowTerm, []}
def change(func) when is_function(func), do: {Assertions.Change, [func]}
def change(func, value) when is_function(func) and is_integer(value),
do: {Assertions.ChangeTo, [func, value]}
def change(func, value) when is_function(func) and is_list(value),
do: {Assertions.ChangeBy, [func, value]}
def change(func, before, value) when is_function(func),
do: {Assertions.ChangeFromTo, [func, before, value]}
def have_all(func) when is_function(func), do: {Assertions.Enum.HaveAll, func}
def have_any(func) when is_function(func), do: {Assertions.Enum.HaveAny, func}
def have_count_by(func, val) when is_function(func),
do: {Assertions.Enum.HaveCountBy, [func, val]}
def be_empty, do: {Assertions.Enum.BeEmpty, []}
def have_max(value), do: {Assertions.Enum.HaveMax, value}
def have_max_by(func, value) when is_function(func),
do: {Assertions.Enum.HaveMaxBy, [func, value]}
def have_min(value), do: {Assertions.Enum.HaveMin, value}
def have_min_by(func, value) when is_function(func),
do: {Assertions.Enum.HaveMinBy, [func, value]}
def match_list(value) when is_list(value), do: {Assertions.ContainExactly, value}
def contain_exactly(value) when is_list(value), do: {Assertions.ContainExactly, value}
def have(val), do: {Assertions.EnumString.Have, val}
def have_at(pos, val) when is_number(pos), do: {Assertions.EnumString.HaveAt, [pos, val]}
def have_first(value), do: {Assertions.ListString.HaveFirst, value}
def have_last(value), do: {Assertions.ListString.HaveLast, value}
def have_count(value), do: {Assertions.EnumString.HaveCount, value}
def have_size(value), do: {Assertions.EnumString.HaveCount, value}
def have_length(value), do: {Assertions.EnumString.HaveCount, value}
def have_hd(value), do: {Assertions.List.HaveHd, value}
def have_tl(value), do: {Assertions.List.HaveTl, value}
def have_byte_size(value), do: {Assertions.Binary.HaveByteSize, value}
def start_with(value), do: {Assertions.String.StartWith, value}
def end_with(value), do: {Assertions.String.EndWith, value}
def be_printable(), do: {Assertions.String.BePrintable, []}
def be_valid_string(), do: {Assertions.String.BeValidString, []}
def be_blank(), do: {Assertions.String.BeBlank, []}
def have_key(value), do: {Assertions.Map.HaveKey, value}
def have_value(value), do: {Assertions.Map.HaveValue, value}
def be_alive(), do: {Assertions.PID.BeAlive, []}
Enum.each(@elixir_types, fn type ->
def unquote(String.to_atom("be_#{type}"))() do
{Assertions.BeType, unquote(Macro.escape(type))}
end
end)
def be_nil, do: {Assertions.BeType, :null}
def be_function(arity), do: {Assertions.BeType, [:function, arity]}
def be_struct, do: {Assertions.BeType, :struct}
def be_struct(name), do: {Assertions.BeType, [:struct, name]}
def accepted(func, args \\ :any, opts \\ [pid: :any, count: :any]),
do: {Assertions.Accepted, [func, args, opts]}
def be_ok_result(), do: {Assertions.Result.BeOkResult, []}
def be_error_result(), do: {Assertions.Result.BeErrorResult, []}
end
|
lib/espec/assertion_helpers.ex
| 0.782538
| 0.929184
|
assertion_helpers.ex
|
starcoder
|
defmodule Cog.Support.ModelUtilities do
@moduledoc """
Utilities for making it easier to interact with models.
Intended for use in interactive situations and testing fixture
setup. From your interactive shell prompt, just type:
iex> import #{inspect __MODULE__}
and you'll be good to go.
These functions should use the standard high-level methods for
operating on models, as appropriate. For instance, insertion into
the repo of bare model structs should be avoided in favor of using
changesets. Similarly, models that are the roots of complex graphs
of models (e.g., bundles, commands, rules) should be inserted using
API functions that establish the entire graph.
(That's actually sensible advice, anyway. If you find yourself doing
bare inserts, here or elsewhere, think whether there is a more
appropriate method.)
Additionally, many of these functions may use "dummy data", either
partially or in full, in order to be easier to use in interactive or
testing scenarios.
For example, when creating a user, we don't necessarily care about
specifying a first name, last name, or even a real password; we just
want a user in the system. As such, our `user/1` function fills
in much of this data with sensible defaults. This is *definitely*
not something to use in production code, though.
Other functions may actually be legitimately useful in production
code, to the extent that they do not make such use of fake data. If
you find yourself wanting to use such code in production, *do not*
use it directly from this module! Let's have a discussion and talk
about pulling the specific functions out into a more appropriate
place.
To reiterate: this module is *explicitly not* for use in production
code!
"""
alias Cog.Models.Bundle
alias Cog.Models.BundleVersion
alias Cog.Models.ChatHandle
alias Cog.Models.ChatProvider
alias Cog.Models.Command
alias Cog.Models.CommandVersion
alias Cog.Models.Group
alias Cog.Repository.Groups
alias Cog.Models.Permission
alias Cog.Models.Relay
alias Cog.Models.RelayGroup
alias Cog.Models.RelayGroupAssignment
alias Cog.Models.RelayGroupMembership
alias Cog.Models.Role
alias Cog.Models.Token
alias Cog.Models.User
alias Cog.Models.UserCommandAlias
alias Cog.Models.SiteCommandAlias
alias Cog.Repo
require Logger
@doc """
Create a user, filling in dummy data as appropriate.
## Options
Sometimes you want a little more control, so you can pass an
optional keyword list as a second argument to override some of the
defaults.
* `:first_name` - defaults to the value of `username`
* `:last_name` - defaults to `"Mc\#{username}` ;)
## Example
iex> user("robot")
%Cog.Models.User{
id: "<PASSWORD>",
username: "robot"
first_name: "Robot",
last_name: "McRobot",
email_address: "<EMAIL>",
password: <PASSWORD>, # the password is actually "<PASSWORD>"
password_digest: <PASSWORD>",
inserted_at: #Ecto.DateTime<2015-11-18T11:53:03Z>,
updated_at: #Ecto.DateTime<2015-11-18T11:53:03Z>
# extra fields elided
}
"""
def user(username, options \\ []) do
user = %User{}
|> User.changeset(%{username: username,
first_name: Access.get(options, :first_name, String.capitalize(username)),
last_name: Access.get(options, :last_name, "Mc#{String.capitalize(username)}"),
email_address: Access.get(options, :email_address, <EMAIL>"),
password: username})
|> Repo.insert!
# Password is a virtual field that won't be present if we retrieve
# this user from the database, so test comparisons can fail.
%User{user | password: <PASSWORD>}
end
@doc """
Assign a new randomly-generated token to `user`. Returns the
un-modified user for pipelines
"""
def with_token(%User{}=user) do
{:ok, _} = Token.insert_new(user, %{value: Token.generate})
user
end
@doc """
Associate a chat handle for the specified provider with
`user`. Returns the un-modified user for piplines.
"""
def with_chat_handle_for(%User{}=internal_user, provider) do
provider = ChatProvider |> Repo.get_by!(name: String.downcase(provider))
handle = internal_user.username
{:ok, external_user} = Cog.Chat.Adapter.lookup_user(provider.name, handle)
# TODO Provider ID!!!!!!
params = %{provider_id: provider.id,
handle: external_user.handle,
chat_provider_user_id: external_user.id,
user_id: internal_user.id}
%ChatHandle{}
|> ChatHandle.changeset(params)
|> Repo.insert!
internal_user
end
@doc """
Grant a permission role and return the role. If a user is passed instead of
a role, a group and role is created. The user and role are added to the new
group. The role is granted the permission and the user is returned.
"""
def with_permission(%User{username: username}=user, permission) do
role = role("#{username}_role")
group = group("#{username}_group")
:ok = Permittable.grant_to(group, role)
:ok = Groupable.add_to(user, group)
with_permission(role, permission)
user
end
def with_permission(grantee, permission_name) when is_binary(permission_name) do
with_permission(grantee, permission(permission_name))
end
def with_permission(%Role{}=role, %Permission{}=permission) do
:ok = Permittable.grant_to(role, permission)
role
end
@doc """
Create or retrieve a permission with the given name.
If either the bundle or the permission do not already exist, they
are created.
Example:
permission("foo:bar")
"""
def permission(full_name) do
permission = full_name
|> Cog.Queries.Permission.from_full_name
|> Repo.one
case permission do
%Permission{} ->
permission
nil ->
# TODO: For now this is fine, but we should come back and make
# this use Cog.Repository.Permissions.create_permission/2
# instead... that will require creating or looking up a bundle
# version, though
{ns, name} = Permission.split_name(full_name)
b = case Repo.get_by(Bundle, name: ns) do
nil ->
Repo.insert! %Bundle{name: ns}
bundle ->
bundle
end
Permission.build_new(b, %{name: name}) |> Repo.insert!
end
end
@doc """
Create a group with the given name
"""
def group(name) do
%Group{} |> Group.changeset(%{name: name}) |> Repo.insert!
end
@doc """
Adds a user or a role to a group based
"""
def add_to_group(group, user_or_role) do
{:ok, group} = Groups.manage_membership(group,
%{"members" =>
%{"add" => [user_or_role]}})
group
end
@doc """
Create a role with the given name
"""
def role(name) do
%Role{} |> Role.changeset(%{name: name}) |> Repo.insert!
end
def role_with_permission(role_name, permission_name) do
role = role(role_name)
permission = permission(permission_name)
:ok = Permittable.grant_to(role, permission)
{role, permission}
end
@doc """
Create a command with the given name
"""
def command(name, params \\ %{}) do
bundle_name = Map.get(params, :bundle_name, "test-bundle")
bundle_version = bundle_version(bundle_name)
bundle = bundle_version.bundle
command = %Command{}
|> Command.changeset(%{name: name, bundle_id: bundle.id})
|> Repo.insert!
%CommandVersion{}
|> CommandVersion.changeset(Map.merge(%{command_id: command.id, bundle_version_id: bundle_version.id}, params))
|> Repo.insert!
bundle = Map.put(bundle, :bundle_versions, [bundle_version])
command = Map.put(command, :bundle, bundle)
command
end
@doc """
Creates a bundle version
"""
def bundle_version(name, opts \\ []) do
bundle_template = %{
"name" => name,
"version" => "0.1.0",
"cog_bundle_version" => 4,
"commands" => %{}
}
bundle_config = Enum.into(opts, bundle_template, fn
({key, value}) when is_atom(key) ->
{Atom.to_string(key), value}
(opt) ->
opt
end)
# TODO what's enabled here?
# enabled = Keyword.get(opts, :enabled, false)
{:ok, bundle_version} = Cog.Repository.Bundles.install(%{"name" => name,
"version" => bundle_config["version"],
"config_file" => bundle_config})
bundle_version
end
@doc """
Creates a relay record
Options:
:enabled - set's whether the relay should be enabled on create
:desc - set's the relays description
"""
def relay(name, token, opts \\ []) do
relay = %Relay{}
|> Relay.changeset(%{name: name,
token: token,
enabled: Keyword.get(opts, :enabled, false),
desc: Keyword.get(opts, :desc, nil)})
|> Repo.insert!
%{relay | token: nil}
end
@doc """
Creates a relay group record
"""
def relay_group(name, desc \\ nil) do
%RelayGroup{}
|> RelayGroup.changeset(%{name: name, desc: desc})
|> Repo.insert!
end
@doc """
Adds a relay to a relay group
"""
def add_relay_to_group(group_id, relay_id) do
%RelayGroupMembership{}
|> RelayGroupMembership.changeset(%{group_id: group_id,
relay_id: relay_id})
|> Repo.insert!
end
@doc """
Assigns a bundle to a relay group
"""
def assign_bundle_to_group(group_id, bundle_id) do
%RelayGroupAssignment{}
|> RelayGroupAssignment.changeset(%{group_id: group_id,
bundle_id: bundle_id})
|> Repo.insert!
end
@doc """
Creates a relay, relay-group and bundle. Then assigns the bundle and adds the
relay to the relay-group
Options:
:token - sets the token for the new relay
:relay_opts - set any additional options for the relay as described by `__MODULE__.relay/3`
:bundle_opts - options to pass to create a new bundle version
"""
@spec create_relay_bundle_and_group(String.t, [{Atom.t, any()}]) :: {%Relay{}, %BundleVersion{}, %RelayGroup{}}
def create_relay_bundle_and_group(name, opts \\ []) do
relay = relay("relay-#{name}",
Keyword.get(opts, :token, "<PASSWORD>"),
Keyword.get(opts, :relay_opts, []))
bundle_version = bundle_version("bundle-#{name}", Keyword.get(opts, :bundle_opts, []))
Cog.Repository.Bundles.set_bundle_version_status(bundle_version, :enabled)
relay_group = relay_group("group-#{name}")
add_relay_to_group(relay_group.id, relay.id)
assign_bundle_to_group(relay_group.id, bundle_version.bundle.id)
{relay, bundle_version, relay_group}
end
@doc """
Creates a command alias in the user namespace
Returns user for use in pipelines
"""
def with_alias(user, name, pipeline_text) do
%UserCommandAlias{}
|> UserCommandAlias.changeset(%{name: name,
pipeline: pipeline_text,
user_id: user.id})
|> Repo.insert!
user
end
@doc """
Creates a command alias in the site namespace
"""
def site_alias(name, pipeline_text) do
%SiteCommandAlias{}
|> SiteCommandAlias.changeset(%{name: name,
pipeline: pipeline_text})
|> Repo.insert!
end
@doc """
Returns a command alias in the site namespace
"""
def get_alias(name),
do: Repo.get_by(SiteCommandAlias, name: name)
@doc """
Returns a command alias in the user namespace
"""
def get_alias(name, user_id),
do: Repo.get_by(UserCommandAlias, name: name, user_id: user_id)
@doc """
Remove everything from the database.
As this removes bundles, too, we want to terminate any running
bundle processes, so they won't interfere with anything you might
reload later.
"""
def clean_db! do
[Bundle, User, Group, Relay, Role, Namespace]
|> Enum.each(&Repo.delete_all/1)
Supervisor.terminate_child(Cog.Relay.RelaySup, Cog.Bundle.Embedded)
end
end
|
lib/cog/support/model_utilities.ex
| 0.638723
| 0.48054
|
model_utilities.ex
|
starcoder
|
defmodule Cldr.Calendar.Backend do
@moduledoc false
def define_calendar_module(config) do
backend = config.backend
quote location: :keep, bind_quoted: [config: config, backend: backend] do
defmodule Calendar do
@moduledoc """
Calendar support functions for formatting dates, times and datetimes.
`Cldr` defines formats for several calendars, the names of which
are returned by `Cldr.Calendar.known_calendars/0`.
Currently this implementation only supports the `:gregorian`
calendar which aligns with the proleptic Gregorian calendar
defined by Elixir, `Calendar.ISO`.
This module will be extacted in the future to become part of
a separate calendrical module.
"""
alias Cldr.Locale
@doc false
def era(locale \\ unquote(backend).get_locale(), calendar \\ @default_calendar)
def era(%LanguageTag{cldr_locale_name: cldr_locale_name}, calendar) do
era(cldr_locale_name, calendar)
end
@doc false
def period(locale \\ unquote(backend).get_locale(), calendar \\ @default_calendar)
def period(%LanguageTag{cldr_locale_name: cldr_locale_name}, calendar) do
period(cldr_locale_name, calendar)
end
@doc false
def quarter(locale \\ unquote(backend).get_locale(), calendar \\ @default_calendar)
def quarter(%LanguageTag{cldr_locale_name: cldr_locale_name}, calendar) do
quarter(cldr_locale_name, calendar)
end
@doc false
def month(locale \\ unquote(backend).get_locale(), calendar \\ @default_calendar)
def month(%LanguageTag{cldr_locale_name: cldr_locale_name}, calendar) do
month(cldr_locale_name, calendar)
end
@doc false
def day(locale \\ unquote(backend).get_locale(), calendar \\ @default_calendar)
def day(%LanguageTag{cldr_locale_name: cldr_locale_name}, calendar) do
day(cldr_locale_name, calendar)
end
for locale_name <- Cldr.Config.known_locale_names(config) do
date_data =
locale_name
|> Cldr.Config.get_locale(config)
|> Map.get(:dates)
calendars =
date_data
|> Map.get(:calendars)
|> Map.take(Cldr.known_calendars())
|> Map.keys()
for calendar <- calendars do
def era(unquote(locale_name), unquote(calendar)) do
unquote(Macro.escape(get_in(date_data, [:calendars, calendar, :eras])))
end
def period(unquote(locale_name), unquote(calendar)) do
unquote(Macro.escape(get_in(date_data, [:calendars, calendar, :day_periods])))
end
def quarter(unquote(locale_name), unquote(calendar)) do
unquote(Macro.escape(get_in(date_data, [:calendars, calendar, :quarters])))
end
def month(unquote(locale_name), unquote(calendar)) do
unquote(Macro.escape(get_in(date_data, [:calendars, calendar, :months])))
end
def day(unquote(locale_name), unquote(calendar)) do
unquote(Macro.escape(get_in(date_data, [:calendars, calendar, :days])))
end
end
def era(unquote(locale_name), calendar), do: {:error, calendar_error(calendar)}
def period(unquote(locale_name), calendar), do: {:error, calendar_error(calendar)}
def quarter(unquote(locale_name), calendar), do: {:error, calendar_error(calendar)}
def month(unquote(locale_name), calendar), do: {:error, calendar_error(calendar)}
def day(unquote(locale_name), calendar), do: {:error, calendar_error(calendar)}
end
def era(locale, _calendar), do: {:error, Locale.locale_error(locale)}
def period(locale, _calendar), do: {:error, Locale.locale_error(locale)}
def quarter(locale, _calendar), do: {:error, Locale.locale_error(locale)}
def month(locale, _calendar), do: {:error, Locale.locale_error(locale)}
def day(locale, _calendar), do: {:error, Locale.locale_error(locale)}
end
end
end
end
|
lib/cldr/backend/calendar.ex
| 0.836721
| 0.517815
|
calendar.ex
|
starcoder
|
defmodule Plaid.Investments.Holdings do
@moduledoc """
Functions for Plaid `investments/holdings` endpoints.
"""
import Plaid, only: [make_request_with_cred: 4, validate_cred: 1]
alias Plaid.Utils
@derive Jason.Encoder
defstruct accounts: [], item: nil, securities: [], holdings: [], request_id: nil
@type t :: %__MODULE__{
accounts: [Plaid.Accounts.Account.t()],
item: Plaid.Item.t(),
securities: [Plaid.Investments.Security.t()],
holdings: [Plaid.Investments.Holdings.Holding.t()],
request_id: String.t()
}
@type params :: %{required(atom) => String.t() | map}
@type config :: %{required(atom) => String.t()}
@endpoint :"investments/holdings"
defmodule Holding do
@moduledoc """
Plaid Investments Holding data structure.
"""
@derive Jason.Encoder
defstruct account_id: nil,
security_id: nil,
institution_price: nil,
institution_price_as_of: nil,
institution_value: nil,
cost_basis: nil,
quantity: nil,
iso_currency_code: nil,
unofficial_currency_code: nil
@type t :: %__MODULE__{
account_id: String.t(),
security_id: String.t(),
institution_price: float,
institution_price_as_of: String.t() | nil,
institution_value: float,
cost_basis: float | nil,
quantity: float,
iso_currency_code: String.t() | nil,
unofficial_currency_code: String.t() | nil
}
end
@doc """
Gets user-authorized stock position data for investment-type Accounts
Parameters
```
%{
access_token: "<PASSWORD>-<PASSWORD>",
options: %{
account_ids: ["<KEY>"]
}
}
```
"""
@spec get(params, config | nil) ::
{:ok, Plaid.Investments.Holdings.t()} | {:error, Plaid.Error.t()}
def get(params, config \\ %{}) do
config = validate_cred(config)
endpoint = "#{@endpoint}/get"
make_request_with_cred(:post, endpoint, config, params)
|> Utils.handle_resp(@endpoint)
end
end
|
lib/plaid/investments/holdings.ex
| 0.78968
| 0.607547
|
holdings.ex
|
starcoder
|
if Code.ensure_loaded?(Plug) do
defmodule Guardian.Plug.LoadResource do
@moduledoc """
This plug loads the resource associated with a previously
validated token. Tokens are found and validated using the `Verify*` plugs.
By default, load resource will return an error if no resource can be found.
You can override this behaviour using the `allow_blank: true` option.
If `allow_blank` is not set to true, the plug will return an error
if no resource can be found with `:no_resource_found`
This, like all other Guardian plugs, requires a Guardian pipeline to be setup.
It requires an implementation module, an error handler and a key.
These can be set either:
1. Upstream on the connection with `plug Guardian.Pipeline`
2. Upstream on the connection with `Guardian.Pipeline.{put_module, put_error_handler, put_key}`
3. Inline with an option of `:module`, `:error_handler`, `:key`
Options:
* `allow_blank` - boolean. If set to true, will try to load a resource but
will not fail if no resource is found.
* `key` - The location to find the information in the connection. Defaults to: `default`
* `halt` - Whether to halt the connection in case of error. Defaults to `true`
## Example
```elixir
# setup the upstream pipeline
plug Guardian.Plug.LoadResource, allow_blank: true
plug Guardian.Plug.LoadResource, key: :secret
```
"""
alias Guardian.Plug.Pipeline
@behaviour Plug
@impl Plug
@spec init(opts :: Keyword.t()) :: Keyword.t()
def init(opts), do: opts
@impl Plug
@spec call(conn :: Plug.Conn.t(), opts :: Keyword.t()) :: Plug.Conn.t()
def call(conn, opts) do
allow_blank = Keyword.get(opts, :allow_blank)
conn
|> Guardian.Plug.current_claims(opts)
|> resource(conn, opts)
|> respond(allow_blank)
end
defp resource(nil, conn, opts), do: {:error, :no_resource_found, conn, opts}
defp resource(claims, conn, opts) do
module = Pipeline.fetch_module!(conn, opts)
case apply(module, :resource_from_claims, [claims]) do
{:ok, resource} -> {:ok, resource, conn, opts}
{:error, reason} -> {:error, reason, conn, opts}
_ -> {:error, :no_resource_found, conn, opts}
end
end
defp respond({:error, _reason, conn, _opts}, true), do: conn
defp respond({:error, reason, conn, opts}, _), do: return_error(conn, reason, opts)
defp respond({:ok, resource, conn, opts}, _),
do: Guardian.Plug.put_current_resource(conn, resource, opts)
defp return_error(conn, reason, opts) do
handler = Pipeline.fetch_error_handler!(conn, opts)
conn = apply(handler, :auth_error, [conn, {:no_resource_found, reason}, opts])
Guardian.Plug.maybe_halt(conn, opts)
end
end
end
|
lib/guardian/plug/load_resource.ex
| 0.826852
| 0.793586
|
load_resource.ex
|
starcoder
|
defmodule Mix.Tasks.Compile.ElixirMake do
use Mix.Task
@recursive true
@moduledoc """
Runs `make` in the current project.
This task runs `make` in the current project; any output coming from `make` is
printed in real-time on stdout.
## Configuration
This compiler can be configured through the return value of the `project/0`
function in `mix.exs`; for example:
def project() do
[app: :myapp,
make_executable: "make",
make_makefile: "Othermakefile",
compilers: [:elixir_make] ++ Mix.compilers,
deps: deps()]
end
The following options are available:
* `:make_executable` - (binary or `:default`) it's the executable to use as the
`make` program. If not provided or if `:default`, it defaults to `"nmake"`
on Windows, `"gmake"` on FreeBSD and OpenBSD, and `"make"` on everything
else. You can, for example, customize which executable to use on a
specific OS and use `:default` for every other OS. If the `MAKE`
environment variable is present, that is used as the value of this option.
* `:make_makefile` - (binary or `:default`) it's the Makefile to
use. Defaults to `"Makefile"` for Unix systems and `"Makefile.win"` for
Windows systems if not provided or if `:default`.
* `:make_targets` - (list of binaries) it's the list of Make targets that
should be run. Defaults to `[]`, meaning `make` will run the first target.
* `:make_clean` - (list of binaries) it's a list of Make targets to be run
when `mix clean` is run. It's only run if a non-`nil` value for
`:make_clean` is provided. Defaults to `nil`.
* `:make_cwd` - (binary) it's the directory where `make` will be run,
relative to the root of the project.
* `:make_env` - (map of binary to binary) it's a map of extra environment
variables to be passed to `make`. You can also pass a function in here in
case `make_env` needs access to things that are not available during project
setup; the function should return a map of binary to binary.
* `:make_error_message` - (binary or `:default`) it's a custom error message
that can be used to give instructions as of how to fix the error (e.g., it
can be used to suggest installing `gcc` if you're compiling a C
dependency).
* `:make_args` - (list of binaries) it's a list of extra arguments to be
passed.
"""
@mac_error_msg """
You need to have gcc and make installed. Try running the
commands "gcc --version" and / or "make --version". If these programs
are not installed, you will be prompted to install them.
"""
@unix_error_msg """
You need to have gcc and make installed. If you are using
Ubuntu or any other Debian-based system, install the packages
"build-essential". Also install "erlang-dev" package if not
included in your Erlang/OTP version. If you're on Fedora, run
"dnf group install 'Development Tools'".
"""
@windows_error_msg ~S"""
One option is to install a recent version of
[Visual C++ Build Tools](http://landinghub.visualstudio.com/visual-cpp-build-tools)
either manually or using [Chocolatey](https://chocolatey.org/) -
`choco install VisualCppBuildTools`.
After installing Visual C++ Build Tools, look in the "Program Files (x86)"
directory and search for "Microsoft Visual Studio". Note down the full path
of the folder with the highest version number. Open the "run" command and
type in the following command (make sure that the path and version number
are correct):
cmd /K "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" amd64
This should open up a command prompt with the necessary environment variables
set, and from which you will be able to run the "mix compile", "mix deps.compile",
and "mix test" commands.
"""
@spec run(OptionParser.argv()) :: :ok | no_return
def run(args) do
config = Mix.Project.config()
Mix.shell().print_app()
build(config, args)
Mix.Project.build_structure()
:ok
end
# This is called by Elixir when `mix clean` is run and `:elixir_make` is in
# the list of compilers.
def clean() do
config = Mix.Project.config()
{clean_targets, config} = Keyword.pop(config, :make_clean)
if clean_targets do
config
|> Keyword.put(:make_targets, clean_targets)
|> build([])
end
end
defp build(config, task_args) do
exec =
System.get_env("MAKE") ||
os_specific_executable(Keyword.get(config, :make_executable, :default))
makefile = Keyword.get(config, :make_makefile, :default)
targets = Keyword.get(config, :make_targets, [])
env = Keyword.get(config, :make_env, %{})
env = if is_function(env), do: env.(), else: env
# In OTP 19, Erlang's `open_port/2` ignores the current working
# directory when expanding relative paths. This means that `:make_cwd`
# must be an absolute path. This is a different behaviour from earlier
# OTP versions and appears to be a bug. It is being tracked at
# http://bugs.erlang.org/browse/ERL-175.
cwd = Keyword.get(config, :make_cwd, ".") |> Path.expand(File.cwd!())
error_msg = Keyword.get(config, :make_error_message, :default) |> os_specific_error_msg()
custom_args = Keyword.get(config, :make_args, [])
args = args_for_makefile(exec, makefile) ++ targets ++ custom_args
case cmd(exec, args, cwd, env, "--verbose" in task_args) do
0 ->
:ok
exit_status ->
raise_build_error(exec, exit_status, error_msg)
end
end
# Runs `exec [args]` in `cwd` and prints the stdout and stderr in real time,
# as soon as `exec` prints them (using `IO.Stream`).
defp cmd(exec, args, cwd, env, verbose?) do
opts = [
into: IO.stream(:stdio, :line),
stderr_to_stdout: true,
cd: cwd,
env: env
]
if verbose? do
print_verbose_info(exec, args)
end
{%IO.Stream{}, status} = System.cmd(find_executable(exec), args, opts)
status
end
defp find_executable(exec) do
System.find_executable(exec) ||
Mix.raise("""
"#{exec}" not found in the path. If you have set the MAKE environment variable,
please make sure it is correct.
""")
end
defp raise_build_error(exec, exit_status, error_msg) do
Mix.raise(~s{Could not compile with "#{exec}" (exit status: #{exit_status}).\n} <> error_msg)
end
defp os_specific_executable(exec) when is_binary(exec) do
exec
end
defp os_specific_executable(:default) do
case :os.type() do
{:win32, _} ->
"nmake"
{:unix, type} when type in [:freebsd, :openbsd] ->
"gmake"
_ ->
"make"
end
end
defp os_specific_error_msg(msg) when is_binary(msg) do
msg
end
defp os_specific_error_msg(:default) do
case :os.type() do
{:unix, :darwin} -> @mac_error_msg
{:unix, _} -> @unix_error_msg
{:win32, _} -> @windows_error_msg
_ -> ""
end
end
# Returns a list of command-line args to pass to make (or nmake/gmake) in
# order to specify the makefile to use.
defp args_for_makefile("nmake", :default), do: ["/F", "Makefile.win"]
defp args_for_makefile("nmake", makefile), do: ["/F", makefile]
defp args_for_makefile(_, :default), do: []
defp args_for_makefile(_, makefile), do: ["-f", makefile]
defp print_verbose_info(exec, args) do
args =
Enum.map_join(args, " ", fn arg ->
if String.contains?(arg, " "), do: inspect(arg), else: arg
end)
Mix.shell().info("Compiling with make: #{exec} #{args}")
end
end
|
deps/elixir_make/lib/mix/tasks/compile.make.ex
| 0.788746
| 0.489564
|
compile.make.ex
|
starcoder
|
defmodule ExLibSRTP do
@moduledoc """
[libsrtp](https://github.com/cisco/libsrtp) bindings for Elixir.
The workflow goes as follows:
- create ExLibSRTP instance with `new/0`
- add streams with `add_stream/2`
- protect or unprotect packets with `protect/3`, `unprotect/3`, `protect_rtcp/3`, `unprotect_rtcp/3`
- remove streams with `remove_stream/2`
"""
alias ExLibSRTP.{Native, Policy}
require Record
@opaque t :: {__MODULE__, native :: reference}
@type ssrc_t :: 0..4_294_967_295
defguard is_ssrc(ssrc) when ssrc in 0..4_294_967_295
defmacrop ref(native) do
quote do
{unquote(__MODULE__), unquote(native)}
end
end
@spec new() :: t()
def new() do
ref(Native.create())
end
@spec add_stream(t(), policy :: Policy.t()) :: :ok
def add_stream(ref(native) = _srtp, %Policy{} = policy) do
{ssrc_type, ssrc} = Native.marshal_ssrc(policy.ssrc)
{keys, keys_mkis} = Native.marshal_master_keys(policy.key)
Native.add_stream(
native,
ssrc_type,
ssrc,
keys,
keys_mkis,
policy.rtp,
policy.rtcp,
policy.window_size,
policy.allow_repeat_tx
)
end
@spec remove_stream(t(), ssrc :: ssrc_t()) :: :ok
def remove_stream(ref(native) = _srtp, ssrc) when is_ssrc(ssrc) do
Native.remove_stream(native, ssrc)
end
@spec update(t(), policy :: Policy.t()) :: :ok
def update(ref(native), %Policy{} = policy) do
{ssrc_type, ssrc} = Native.marshal_ssrc(policy.ssrc)
{keys, keys_mkis} = Native.marshal_master_keys(policy.key)
Native.update(
native,
ssrc_type,
ssrc,
keys,
keys_mkis,
policy.rtp,
policy.rtcp,
policy.window_size,
policy.allow_repeat_tx
)
end
@spec protect(t(), unprotected :: binary(), mki_index :: pos_integer() | nil) ::
{:ok, protected :: binary()}
def protect(srtp, unprotected, mki_index \\ nil)
def protect(ref(native), unprotected, nil) do
Native.protect(native, :rtp, unprotected, false, 0)
end
def protect(ref(native), unprotected, mki_index) when is_integer(mki_index) do
Native.protect(native, :rtp, unprotected, true, mki_index)
end
@spec protect_rtcp(t(), unprotected :: binary(), mki_index :: pos_integer() | nil) ::
{:ok, protected :: binary()}
def protect_rtcp(srtp, unprotected, mki_index \\ nil)
def protect_rtcp(ref(native), unprotected, nil) do
Native.protect(native, :rtcp, unprotected, false, 0)
end
def protect_rtcp(ref(native), unprotected, mki_index) when is_integer(mki_index) do
Native.protect(native, :rtcp, unprotected, true, mki_index)
end
@spec unprotect(t(), protected :: binary(), use_mki :: boolean()) ::
{:ok, unprotected :: binary()} | {:error, :auth_fail | :reply_fail | :bad_mki}
def unprotect(ref(native) = _srtp, protected, use_mki \\ false) do
Native.unprotect(native, :rtp, protected, use_mki)
end
@spec unprotect_rtcp(t(), protected :: binary(), use_mki :: boolean()) ::
{:ok, unprotected :: binary()} | {:error, :auth_fail | :reply_fail | :bad_mki}
def unprotect_rtcp(ref(native) = _srtp, protected, use_mki \\ false) do
Native.unprotect(native, :rtcp, protected, use_mki)
end
end
|
lib/ex_libsrtp.ex
| 0.787359
| 0.416915
|
ex_libsrtp.ex
|
starcoder
|
defmodule Txpost do
@moduledoc """


Send and receive Bitcoin transactions from your Phoenix or Plug-based Elixir
application.
Txpost implements a standard for encoding and decoding Bitcoin transactions
and other data in a concise binary format using [CBOR](https://cbor.io). A
number of modules following the Plug specification can easily be slotted in
your Phoenix or Plug-based application's pipeline. An optional Router module
is available, allowing you to implement routing logic for different types of
transactions from a single endpoint.
* Receive Bitcoin transactions in a concise and efficient binary serialisation format
* Simple and flexible schema for sending Bitcoin data with other data parameters
* Send multiple transactions in a single request, or build streaming applications
* Sign and verify data payloads with ECDSA signatures
### BRFC specifications
Txpost is an implementation of the following BRFC specifications. They
describe a standard for serialising Bitcoin transactions and associated
parameters, along with arbitrary meta data, in a concise binary format using
CBOR:
* BRFC `c9a2975b3d19` - [CBOR Tx Payload specification](cbor-tx-payload.md)
* BRFC `5b82a2ed7b16` - [CBOR Tx Envelope specification](cbor-tx-envelope.md)
## Installation
The package can be installed by adding `txpost` to your list of dependencies
in `mix.exs`.
def deps do
[
{:txpost, "~> 0.1"}
]
end
Add `Txpost.Parsers.CBOR` to your endpoint's list of parsers.
plug Plug.Parsers,
parsers: [
:json,
Txpost.Parsers.CBOR
]
Finally create any routes needed to handle transaction requests and add
`Txpost.Plug` to the plug pipeline. For example, adding a route to a Phoenix
router:
defmodule MyAppWeb.Router do
use MyAppWeb, :router
pipeline :tx_api do
plug :accepts, ["cbor"]
plug Txpost.Plug
end
scope "/tx" do
pipe_through :tx_api
post "/create", MyAppWeb.TxController, :create
end
end
## Transaction routing
The example above creates a single route passing all transactions to the same
controller. You could create many routes for different transactions but in
some applications it may be desirable to advertise a single endpoint to
receive different types of transactions, each handled by different
controllers. In this case `Txpost.Router` can be used to route transactions to
different controllers, using any logic you need.
A tx router is a module that implements the `c:Txpost.Router.handle_tx/2`
callback.
defmodule MyApp.TxRouter do
use Txpost.Router
def handle_tx(conn, _params) do
case get_req_meta(conn) do
%{"type" => "article"} -> ArticleController.call(conn, :create)
%{"type" => "image"} -> ImageController.call(conn, :create)
end
end
end
For more details, see `Txpost.Router`.
"""
end
|
lib/txpost.ex
| 0.936205
| 0.72331
|
txpost.ex
|
starcoder
|
defmodule ABA do
@moduledoc """
ABA is an Elixir library for performing validation and lookups on ABA routing
numbers. It stores all routing numbers and bank information in an ETS table.
Therefore, you should initialize the application in a supervision tree.
## Installation
Add `aba` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:aba, "~> 0.1.0"}
]
end
```
## Usage
To perform routing number validation without an ETS table lookup:
```elixir
iex> ABA.routing_number_valid?("111900659")
true
```
Otherwise, performing lookups can be done with:
```elixir
iex> ABA.get_bank("111900659")
{:ok, %ABA.Bank{routing_number: "111900659", name: "<NAME>",
address: "255 2ND AVE SOUTH", city: "MINNEAPOLIS",
state: "MN", zip: "55479"}}
```
"""
use Application
@doc false
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
ABA.Registry
]
opts = [strategy: :one_for_one, name: ABA.Supervisor]
Supervisor.start_link(children, opts)
end
@doc """
Looks up bank info via the routing number passed.
## Examples
iex> ABA.get_bank("111900659")
{:ok, %ABA.Bank{routing_number: "111900659", name: "<NAME>",
address: "255 2ND AVE SOUTH", city: "MINNEAPOLIS",
state: "MN", zip: "55479"}}
iex> ABA.get_bank("111XXX659")
{:error, :invalid}
"""
@spec get_bank(any) :: {:ok, ABA.Bank.t()} | {:error, :not_found | :invalid}
def get_bank(routing_number) do
if routing_number_valid?(routing_number) do
case ABA.Registry.lookup(routing_number) do
nil -> {:error, :not_found}
bank -> {:ok, bank}
end
else
{:error, :invalid}
end
end
@doc """
Validates the routing number. Can be passed any Elixir term.
## Examples
iex> ABA.routing_number_valid?("111900659")
true
iex> ABA.routing_number_valid?("111900658")
false
"""
defdelegate routing_number_valid?(routing_number),
to: ABA.Validator
end
|
lib/aba.ex
| 0.829665
| 0.857887
|
aba.ex
|
starcoder
|
defmodule Gobstopper.Service.Auth.Identity do
@moduledoc """
Provides interfaces to identities.
Requires operations that have restricted access, meet those requirements.
"""
require Logger
alias Gobstopper.Service.Auth.Identity
defp unique_identity({ :error, %{ errors: [identity: _] } }), do: unique_identity(Gobstopper.Service.Repo.insert(Identity.Model.changeset(%Identity.Model{})))
defp unique_identity(identity), do: identity
@doc """
Create an identity with the initial credential.
Returns the session token on successful creation. Otherwise returns an
error.
"""
@spec create(atom, term) :: { :ok, String.t } | { :error, String.t }
def create(type, credential) do
with { :identity, { :ok, identity } } <- { :identity, unique_identity(Gobstopper.Service.Repo.insert(Identity.Model.changeset(%Identity.Model{}))) },
{ :create_credential, :ok } <- { :create_credential, Identity.Credential.create(type, identity, credential) },
{ :jwt, { :ok, jwt, _ } } <- { :jwt, Guardian.encode_and_sign(identity) } do
{ :ok, jwt }
else
{ :identity, { :error, changeset } } ->
Logger.debug("create identity: #{inspect(changeset.errors)}")
{ :error, "Failed to create credential" }
{ :create_credential, { :error, reason } } -> { :error, reason }
{ :jwt, { :error, _ } } -> { :error, "Could not create JWT" }
end
end
@doc """
Create a new credential to associate with an identity.
Returns `:ok` on successful creation. Otherwise returns an error.
"""
@spec create(atom, term, String.t) :: :ok | { :error, String.t }
def create(type, credential, token) do
with { :identity, identity = %Identity.Model{} } <- { :identity, verify_identity(token) },
{ :create_credential, :ok } <- { :create_credential, Identity.Credential.create(type, identity, credential) } do
:ok
else
{ :identity, nil } -> { :error, "Invalid token" }
{ :create_credential, { :error, reason } } -> { :error, reason }
end
end
@doc """
Update a credential associated with an identity.
Returns `:ok` on successful update. Otherwise returns an error.
"""
@spec update(atom, term, String.t) :: :ok | { :error, String.t }
def update(type, credential, token) do
case verify_identity(token) do
nil -> { :error, "Invalid token" }
identity -> Identity.Credential.change(type, identity, credential)
end
end
@doc """
Remove a credential associated with an identity.
Returns `:ok` on successful removal. Otherwise returns an error.
"""
@spec remove(atom, String.t) :: :ok | { :error, String.t }
def remove(type, token) do
case verify_identity(token) do
nil -> { :error, "Invalid token" }
identity -> Identity.Credential.revoke(type, identity)
end
end
@doc """
Login into an identity using the credential.
Returns the session token on successful login. Otherwise returns an error.
"""
@spec login(atom, term) :: { :ok, String.t } | { :error, String.t }
def login(type, credential) do
with { :identity, { :ok, identity } } <- { :identity, Identity.Credential.authenticate(type, credential) },
{ :jwt, { :ok, jwt, _ } } <- { :jwt, Guardian.encode_and_sign(identity) } do
{ :ok, jwt }
else
{ :identity, { :error, reason } } -> { :error, reason }
{ :jwt, { :error, _ } } -> { :error, "Could not create JWT" }
end
end
@doc """
Logout of an identity's active session.
Returns `:ok` on successful logout. Otherwise returns an error.
"""
@spec logout(String.t) :: :ok | { :error, String.t }
def logout(token) do
case Guardian.revoke!(token) do
:ok -> :ok
_ -> { :error, "Could not logout of session" }
end
end
@spec verify_identity(String.t) :: Identity.Model.t | nil
defp verify_identity(token) do
with { :ok, %{ "sub" => sub } } <- Guardian.decode_and_verify(token),
{ :ok, identity } <- Guardian.serializer.from_token(sub) do
identity
else
_ -> nil
end
end
@doc """
Verify an identity's session.
Returns the unique ID of the identity if verifying a valid session token.
Otherwise returns `nil`.
"""
@spec verify(String.t) :: String.t | nil
def verify(token) do
case verify_identity(token) do
nil -> nil
identity -> identity.identity
end
end
@doc """
Check if a credential type is associated with an identity.
Returns whether the credential exists or not, if successful. Otherwise returns
an error.
"""
@spec credential?(atom, String.t) :: { :ok, boolean } | { :error, String.t }
def credential?(type, token) do
case verify_identity(token) do
nil -> { :error, "Invalid token" }
identity -> { :ok, Identity.Credential.credential?(type, identity) }
end
end
@credential_types Enum.filter(for type <- Path.wildcard(Path.join(__DIR__, "identity/credential/*.ex")) do
name = Path.basename(type)
size = byte_size(name) - 3
case name do
<<credential :: size(size)-binary, ".ex">> -> String.to_atom(String.downcase(credential))
_ -> nil
end
end, &(&1 != nil))
@doc """
Get the state of all credentials an identity could be associated with.
Returns all the credentials presentable state if successful. Otherwise returns
an error.
"""
@spec all_credentials(String.t) :: { :ok, [{ atom, { :unverified | :verified, String.t } | { :none, nil } }] } | { :error, String.t }
def all_credentials(token) do
case verify_identity(token) do
nil -> { :error, "Invalid token" }
identity -> { :ok, (for type <- @credential_types, do: { type, Identity.Credential.info(type, identity) }) }
end
end
end
|
apps/gobstopper_service/lib/gobstopper.service/auth/identity.ex
| 0.863852
| 0.494263
|
identity.ex
|
starcoder
|
defmodule Monad.Behaviour do
@moduledoc """
A behaviour that provides the common code for monads.
Creating a monad consists of three steps:
1. Call `use Monad.Behaviour`
2. Implement `return/1`
3. Implement `bind/2`
By completing the above steps, the monad will automatically conform to the
`Functor` and `Applicative` protocols in addition to the `Monad` protocol.
## Example
The following is an example showing how to use `Monad.Behaviour`.
iex> defmodule Monad.Identity.Sample do
...> use Elixir.Monad.Behaviour # The `Elixir` prefix is needed for the doctest.
...>
...> defstruct value: nil
...>
...> def return(value) do
...> %Monad.Identity.Sample{value: value}
...> end
...>
...> def bind(%Monad.Identity.Sample{value: value}, fun) do
...> fun.(value)
...> end
...>
...> def unwrap(%Monad.Identity.Sample{value: value}) do
...> value
...> end
...> end
iex> m = Monad.Identity.Sample.return 42
iex> Monad.Identity.Sample.unwrap m
42
iex> m2 = Elixir.Monad.bind m, (& Monad.Identity.Sample.return(&1 * 2))
iex> Monad.Identity.Sample.unwrap m2
84
"""
@type t :: Monad.t()
@type bind_fun :: (term -> t)
@callback return(value :: term) :: t
@callback bind(monad :: t, fun :: bind_fun) :: t
@doc """
Calls `module`'s `return/1` function.
Wraps the given value in the specified monad.
"""
@spec return(atom, term) :: t
def return(module, value), do: module.return(value)
@doc """
Calls `module`'s `bind/2` function.
Unwraps `monad` then applies the wrapped value to `fun`. Returns a new monad.
"""
@spec bind(atom, t, (term -> t)) :: t
def bind(module, monad, fun), do: module.bind(monad, fun)
@doc false
defmacro __using__(_) do
quote do
@behaviour Monad.Behaviour
defimpl Functor, for: __MODULE__ do
def fmap(monad, fun) do
return = &Monad.Behaviour.return(@for, &1)
Monad.Behaviour.bind(@for, monad, &(&1 |> fun.() |> return.()))
end
end
defimpl Applicative, for: __MODULE__ do
def apply(monad, monad_fun) do
Monad.Behaviour.bind(@for, monad_fun, &Functor.fmap(monad, &1))
end
end
defimpl Monad, for: __MODULE__ do
def bind(monad, fun) do
Monad.Behaviour.bind(@for, monad, fun)
end
end
end
end
end
|
lib/monad/behaviour.ex
| 0.887467
| 0.574037
|
behaviour.ex
|
starcoder
|
defmodule ExotelEx.InMemoryMessenger do
@behaviour ExotelEx.Messenger
# Public API
@doc """
The send_sms/4 function sends an sms to a
given phone number from a given phone number.
## Example:
```
iex(1)> ExotelEx.Messenger.InMemoryAdapter.send_sms("15005550006", "15005550001", "test message")
%{"SMSMessage" => %{
"AccountSid" => "probe",
"ApiVersion" => nil,
"Body" => "test message",
"DateCreated" => "2017-11-12 00:24:31",
"DateSent" => nil,
"DateUpdated" => "2017-11-12 00:24:31",
"DetailedStatus" => "PENDING_TO_OPERATOR",
"DetailedStatusCode" => 21010,
"Direction" => "outbound-api",
"From" => "01139595093/SCRPBX",
"Price" => nil,
"Sid" => "6dbfdc50133d0e51ec8d793356559868",
"Status" => "queued",
"To" => "08884733565",
"Uri" => "/v1/Accounts/probe/SMS/Messages/6dbfdc50133d0e51ec8d793356559868.json"}}
```
"""
@spec send_sms(String.t(), String.t(), String.t(), String.t()) :: map()
def send_sms(from, to, body, _ \\ "") do
%{
"SMSMessage" => %{
"AccountSid" => "account_sid",
"ApiVersion" => nil,
"Body" => body,
"DateCreated" => "2017-11-12 00:24:31",
"DateSent" => nil,
"DateUpdated" => "2017-11-12 00:24:31",
"DetailedStatus" => "PENDING_TO_OPERATOR",
"DetailedStatusCode" => 21010,
"Direction" => "outbound-api",
"From" => from,
"Price" => nil,
"Sid" => "3412jhkj4123h4kj123h4lk12j3h4lk12j34",
"Status" => "queued",
"To" => to,
"Uri" => "/v1/Accounts/probe/SMS/Messages/3412jhkj4123h4kj123h4lk12j3h4lk12j34.json"
}
}
end
@doc """
The sms_details/1 function gets an sms details.
## Example:
```
iex(1)> ExotelEx.Messenger.InMemoryAdapter.sms_details("sms_sid")
%{"SMSMessage" => %{
"AccountSid" => "probe",
"ApiVersion" => nil,
"Body" => "test message",
"DateCreated" => "2017-11-12 00:24:31",
"DateSent" => "2017-11-12 00:24:35",
"DateUpdated" => "2017-11-12 00:24:36",
"DetailedStatus" => "DELIVERED_TO_HANDSET",
"DetailedStatusCode" => 20005,
"Direction" => "outbound-api",
"From" => "01139595093/SCRPBX",
"Price" => "0.180000",
"Sid" => "6dbfdc50133d0e51ec8d793356559868",
"Status" => "sent",
"To" => "08884733565",
"Uri" => "/v1/Accounts/probe/SMS/Messages/6dbfdc50133d0e51ec8d793356559868.json"}}
```
"""
@spec sms_details(String.t()) :: map()
def sms_details(sms_sid) do
%{
"SMSMessage" => %{
"AccountSid" => "account_sid",
"ApiVersion" => nil,
"Body" => "<PASSWORD>",
"DateCreated" => "2017-11-12 00:24:31",
"DateSent" => nil,
"DateUpdated" => "2017-11-12 00:24:31",
"DetailedStatus" => "DELIVERED_TO_HANDSET",
"DetailedStatusCode" => 20005,
"Direction" => "outbound-api",
"From" => "01139595093/SCRPBX",
"Price" => nil,
"Sid" => sms_sid,
"Status" => "sent",
"To" => "08884733565",
"Uri" => "/v1/Accounts/probe/SMS/Messages/#{sms_sid}.json"
}
}
end
@doc """
The time_to_next_bucket/0 function gets the time in ms to next bucket limit.
## Example:
```
iex(1)> ExotelEx.Messenger.InMemoryMessenger.time_to_next_bucket
{:ok, 500} # 500 ms to next bucket reset
```
"""
@spec time_to_next_bucket() :: tuple()
def time_to_next_bucket do
{:ok, 0}
end
end
|
lib/exotel_ex/messengers/in_memory_messenger.ex
| 0.672439
| 0.658935
|
in_memory_messenger.ex
|
starcoder
|
defmodule TimeZoneInfo.DataStore do
@moduledoc """
A behaviour to store data and serve them later on.
"""
@default_time_zone "Etc/UTC"
@doc "Puts the given `data` into the store."
@callback put(data :: TimeZoneInfo.data()) :: :ok | :error
@doc """
Returns the `transitions` for a given `time_zone`.
"""
@callback get_transitions(time_zone :: Calendar.time_zone()) ::
{:ok, transitions :: [TimeZoneInfo.transition()]} | {:error, :transitions_not_found}
@doc """
Returns `rules` for a given `rule_name`.
"""
@callback get_rules(rule_name :: TimeZoneInfo.rule_name()) ::
{:ok, rules :: [TimeZoneInfo.rule()]} | {:error, :rule_not_found}
@doc """
Returns the list of all available time zones with or without links. The option
`:links` can be used to customize the list.
Values for `:links`:
- `:ignore` just the time zone names will be returned
- `:only` just the link name will be returned
- `:include` the time zone and link names will be returned
The list will be sorted.
"""
@callback get_time_zones(links: :ignore | :only | :include) :: [Calendar.time_zone()]
@doc """
Returns true if the `DataSore` is empty.
"""
@callback empty? :: boolean
@doc """
Returns the version of the IANA database from which the data was generated.
"""
@callback version :: String.t() | nil
@doc """
Deletes all data in the data store.
"""
@callback delete! :: :ok
@doc """
Returns infos about the data store.
"""
@callback info :: term()
@optional_callbacks info: 0
# Implementation
defp impl do
case Application.fetch_env!(:time_zone_info, :data_store) do
:detect -> detect()
module -> module
end
end
defp detect do
module =
case function_exported?(:persistent_term, :get, 0) do
true -> __MODULE__.PersistentTerm
false -> __MODULE__.ErlangTermStorage
end
Application.put_env(:time_zone_info, :data_store, module)
module
end
@doc false
@spec get_transitions(Calendar.time_zone()) ::
{:ok, [TimeZoneInfo.transition()]} | {:error, :transitions_not_found}
def get_transitions(time_zone), do: impl().get_transitions(time_zone)
@doc false
@spec get_time_zones(links: :ignore | :only | :include) :: [Calendar.time_zone()]
def get_time_zones(opts) do
time_zones = impl().get_time_zones(opts)
case opts[:links] do
:only ->
time_zones
_ ->
case Enum.member?(time_zones, @default_time_zone) do
true -> time_zones
false -> Enum.sort([@default_time_zone | time_zones])
end
end
end
@doc false
@spec get_rules(TimeZoneInfo.rule_name()) ::
{:ok, [TimeZoneInfo.rule()]} | {:error, :rules_not_found}
def get_rules(rule_name), do: impl().get_rules(rule_name)
@doc false
@spec put(TimeZoneInfo.data()) :: :ok | :error
def put(data), do: impl().put(data)
@doc false
@spec empty? :: boolean()
def empty?, do: impl().empty?()
@doc false
@spec version :: String.t() | nil
def version, do: impl().version()
@doc false
@spec delete! :: :ok
def delete!, do: impl().delete!()
@doc false
@spec info :: term()
def info do
impl = impl()
case function_exported?(impl, :info, 0) do
false -> :no_implementation_found
true -> impl.info()
end
end
end
|
lib/time_zone_info/data_store.ex
| 0.894092
| 0.547283
|
data_store.ex
|
starcoder
|
defmodule Lemma do
@moduledoc ~S"""
A morphological parser (analyzer) / lemmatizer implemented with
textbook standard method, using an abstraction called Finite State Transducer (FST).
FST is implemented in [gen_fst](https://github.com/xiamx/gen_fst) package
A parser can be initilized with desired language using `Lemma.new/1`.
This initialized parser can be used to parse words with `Lemma.parse/2`
## Examples
```
en_parser = Lemma.new :en
#=> nil
en_parser |> Lemma.parse("plays")
#=> "play"
```
## About morphological parsing / lemmatization
> For grammatical reasons, documents are going to use different forms of a word, such as organize, organizes, and organizing. Additionally, there are families of derivationally related words with similar meanings, such as democracy, democratic, and democratization. In many situations, it seems as if it would be useful for a search for one of these words to return documents that contain another word in the set.
> <br/> The goal of both stemming and lemmatization is to reduce inflectional forms and sometimes derivationally related forms of a word to a common base form. For instance:
> <br/><br/>am, are, is ⇒ be
> <br/>car, cars, car's, cars' ⇒ car
> <br/><br/>The result of this mapping of text will be something like:
> <br/>_the boy's cars are different colors_ ⇒
> the boy car be differ color.
> <br/> -- [Stanford NLP Group](https://nlp.stanford.edu/IR-book/html/htmledition/stemming-and-lemmatization-1.html)
"""
import Lemma.MorphParserGenerator
@doc """
Initialize a morphological parser for the given language.
Only English (`:en`) is supported currently.
"""
@spec new(atom) :: GenFST.fst
def new(:en) do
GenFST.new
|> generate_rules(Lemma.En.IrregularAdjectives.rules)
|> generate_rules(Lemma.En.IrregularAdverbs.rules)
|> generate_rules(Lemma.En.IrregularNouns.rules)
|> generate_rules(Lemma.En.IrregularVerbs.rules)
|> generate_rules(Lemma.En.Verbs.all, Lemma.En.Rules.verbs)
|> generate_rules(Lemma.En.Nouns.all, Lemma.En.Rules.nouns)
|> generate_rules(Lemma.En.Adjectives.all, Lemma.En.Rules.adjs)
end
def new(l) do
raise "language #{l} not supported"
end
@doc """
Use the given parser to parse a word or a list of words.
"""
@spec parse(GenFST.fst, String.t | [String.t]) :: String.t | [String.t]
def parse(parser, [_w | _ws] = words) do
Enum.map(words, &(parse(parser, &1)))
end
def parse(parser, word) do
GenFST.parse(parser, String.downcase(word))
end
end
|
lib/lemma.ex
| 0.879283
| 0.914711
|
lemma.ex
|
starcoder
|
defmodule Exnoops.Fizzbot do
@moduledoc """
Module to interact with Github's Noop: Fizzbot
See the [official `noop` documentation](https://noopschallenge.com/challenges/fizzbot) for API information
"""
require Logger
import Exnoops.API
@noop "fizzbot"
@doc ~S"""
Query Fizzbot for a question
If you don't provide a question number, it will query the default endpoint which returns the instructions.
**Note**: Due to the many possible keys in each response, minimal processing will occur.
## Examples
iex> Exnoops.Fizzbot.ask_question()
{:ok,
%{
"message" => "Thank you for your application to Noops Inc.\n\nOur automated fizzbot interview process will help us determine if you have what it takes to become a Noop.\n\nFor each question, you will GET the question and then give us the answer back to the same URL.\nYou will also find the URL for the next question in the nextQuestion parameter for each response.\n\nThe first question is at https://api.noopschallenge.com/fizzbot/questions/1.\n\nGood Luck\n",
"nextQuestion" => "/fizzbot/questions/1"
}
}
iex> Exnoops.Fizzbot.ask_question(1)
{:ok,
%{
"message" => "What is your favorite programming language?\nMine is COBOL, of course.\nPOST your answer back to this URL in JSON format. If you are having difficulties, see the exampleResponse provided.",
"exampleResponse" => %{ "answer" => "COBOL" }
}
}
iex> Exnoops.Fizzbot.ask_question(1234567)
{:ok,
%{
"message" => "FizzBuzz is the name of the game.\nHere's a list of numbers.\nSend me back a string as follows:\nFor each number:\nIf it is divisible by 3, print \"Fizz\".\nIf it is divisible by 5, print \"Buzz\".\nIf it is divisible by 3 and 5, print \"FizzBuzz\".\nOtherwise, print the number.\n\nEach entry in the string should be separated by a space.\n\nFor example, if the numbers are [1, 2, 3, 4, 5], you would send back:\n\n{\n \"answer\": \"1 2 Fizz 4 Buzz\"\n}\n",
"rules" => [
%{ "number" => 3, "response" => "Fizz" },
%{ "number" => 5, "response" => "Buzz" }
],
"numbers" => [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 ],
"exampleResponse" => %{ "answer" => "1 2 Fizz 4 Buzz..." }
}
}
"""
@spec ask_question(integer()) :: {atom(), map()}
def ask_question(question_id \\ 0) when is_integer(question_id) do
Logger.debug("Calling Fizzbot.ask_question(#{question_id})")
endpoint = if question_id == 0, do: "", else: "/questions/#{question_id}"
case get("/" <> @noop <> endpoint, []) do
{:ok, _} = res -> res
error -> error
end
end
@doc ~S"""
Submit an answer to Fizzbot
## Examples
iex> Exnoops.Fizzbot.answer_question(1, %{"answer" => "COBOL"})
{:ok,
%{
"result" => "correct",
"message" => "Of course. How interesting. Are you ready for your first REAL question?",
"nextQuestion" => "/fizzbot/questions/1234567"
}
}
"""
@spec answer_question(integer(), map()) :: {atom(), map()}
def answer_question(question_id, %{"answer" => _} = answer) when is_integer(question_id) do
Logger.debug("Calling Fizzbot.answer_question(#{question_id})")
case post("/" <> @noop <> "/questions/#{question_id}", answer) do
{:ok, %{"result" => _}} = res -> res
error -> error
end
end
end
|
lib/exnoops/fizzbot.ex
| 0.57523
| 0.566049
|
fizzbot.ex
|
starcoder
|
defmodule YamlFrontMatter do
@moduledoc """
Parse a file or string containing front matter and a document body.
Front matter is a block of yaml wrapped between two lines containing `---`.
In this example, the front matter contains `title: Hello`, and the body is
`Hello, world`:
```md
---
title: Hello
---
Hello, world
```
After parsing the document, front matter is returned as a map, and the body as
a string.
```elixir
YamlFrontMatter.parse_file "hello_world.md"
{:ok, %{"title" => "Hello"}, "Hello, world"}
```
"""
@doc """
Read a file, parse it's contents, and return it's front matter and body.
Returns `{:ok, matter, body}` on success (`matter` is a map), or
`{:error, error}` on error.
iex> YamlFrontMatter.parse_file "test/fixtures/dummy.md"
{:ok, %{"title" => "Hello"}, "Hello, world\\n"}
iex> YamlFrontMatter.parse_file "test/fixtures/idontexist.md"
{:error, :enoent}
"""
def parse_file(path) do
case File.read(path) do
{:ok, contents} -> parse(contents)
{:error, error} -> {:error, error}
end
end
@doc """
Read a file, parse it's contents, and return it's front matter and body.
Returns `{matter, body}` on success (`matter` is a map), throws on error.
iex> YamlFrontMatter.parse_file! "test/fixtures/dummy.md"
{%{"title" => "Hello"}, "Hello, world\\n"}
iex> try do
...> YamlFrontMatter.parse_file! "test/fixtures/idontexist.md"
...> rescue
...> e in YamlFrontMatter.Error -> e.message
...> end
"File not found"
iex> try do
...> YamlFrontMatter.parse_file! "test/fixtures/invalid.md"
...> rescue
...> e in YamlFrontMatter.Error -> e.message
...> end
"Error parsing yaml front matter"
"""
def parse_file!(path) do
case parse_file(path) do
{:ok, matter, body} -> {matter, body}
{:error, :enoent} -> raise YamlFrontMatter.Error, message: "File not found"
{:error, _} -> raise YamlFrontMatter.Error
end
end
@doc """
Parse a string and return it's front matter and body.
Returns `{:ok, matter, body}` on success (`matter` is a map), or
`{:error, error}` on error.
iex> YamlFrontMatter.parse "---\\ntitle: Hello\\n---\\nHello, world"
{:ok, %{"title" => "Hello"}, "Hello, world"}
iex> YamlFrontMatter.parse "---\\ntitle: Hello\\n--\\nHello, world"
{:error, :invalid_front_matter}
"""
def parse(string) do
string
|> split_string
|> process_parts
end
@doc """
Parse a string and return it's front matter and body.
Returns `{matter, body}` on success (`matter` is a map), throws on error.
iex> YamlFrontMatter.parse! "---\\ntitle: Hello\\n---\\nHello, world"
{%{"title" => "Hello"}, "Hello, world"}
iex> try do
...> YamlFrontMatter.parse! "---\\ntitle: Hello\\n--\\nHello, world"
...> rescue
...> e in YamlFrontMatter.Error -> e.message
...> end
"Error parsing yaml front matter"
"""
def parse!(string) do
case parse(string) do
{:ok, matter, body} -> {matter, body}
{:error, _} -> raise YamlFrontMatter.Error
end
end
defp split_string(string) do
split_pattern = ~r/[\s\r\n]---[\s\r\n]/s
string
|> (&String.trim_leading(&1)).()
|> (&("\n" <> &1)).()
|> (&Regex.split(split_pattern, &1, parts: 3)).()
end
defp process_parts([_, yaml, body]) do
case parse_yaml(yaml) do
{:ok, yaml} -> {:ok, yaml, body}
{:error, error} -> {:error, error}
end
end
defp process_parts(_), do: {:error, :invalid_front_matter}
defp parse_yaml(yaml) do
case YamlElixir.read_from_string(yaml) do
{:ok, parsed} -> {:ok, parsed}
error -> error
end
end
end
|
lib/yaml_front_matter.ex
| 0.872687
| 0.860838
|
yaml_front_matter.ex
|
starcoder
|
defmodule CCSP.Chapter3.Start do
alias CCSP.Chapter3.CSP
alias CCSP.Chapter3.QueensConstraint
alias CCSP.Chapter3.MapColoringConstraint
alias CCSP.Chapter3.WordSearch
alias CCSP.Chapter3.WordSearchConstraint
alias CCSP.Chapter3.SendMoreMoneyConstraint
@moduledoc """
Convenience module for setting up and running more elaborate sections.
"""
def run_map_coloring() do
variables = [
"Western Australia",
"Northern Territory",
"South Australia",
"Queensland",
"New South Wales",
"Victoria",
"Tasmania"
]
domains =
Enum.reduce(variables, %{}, fn variable, acc ->
Map.put(acc, variable, ["red", "green", "blue"])
end)
csp = CSP.new(variables, domains)
result =
CSP.add_constraint(
csp,
MapColoringConstraint.new("Western Australia", "Northern Territory")
)
|> CSP.add_constraint(MapColoringConstraint.new("Western Australia", "South Australia"))
|> CSP.add_constraint(MapColoringConstraint.new("South Australia", "Northern Territory"))
|> CSP.add_constraint(MapColoringConstraint.new("Queensland", "Northern Territory"))
|> CSP.add_constraint(MapColoringConstraint.new("Queensland", "South Australia"))
|> CSP.add_constraint(MapColoringConstraint.new("Queensland", "New South Wales"))
|> CSP.add_constraint(MapColoringConstraint.new("New South Wales", "South Australia"))
|> CSP.add_constraint(MapColoringConstraint.new("Victoria", "South Australia"))
|> CSP.add_constraint(MapColoringConstraint.new("Victoria", "New South Wales"))
|> CSP.add_constraint(MapColoringConstraint.new("Victoria", "Tasmania"))
|> CSP.backtracking_search()
result
end
def run_queens(n \\ 8) do
columns = Enum.to_list(1..n)
rows =
Enum.reduce(columns, %{}, fn column, acc ->
Map.put(acc, column, Enum.to_list(1..n))
end)
CSP.new(columns, rows)
|> CSP.add_constraint(QueensConstraint.new(columns))
|> CSP.backtracking_search()
|> (&(if nil == &1 do
{:error, "No solution found."}
else
&1
end)).()
end
def run_word_search() do
grid = WordSearch.generate_grid(9, 9)
words = ["MATTHEW", "JOE", "MARY", "SARAH", "SALLY"]
locations =
Enum.reduce(words, %{}, fn word, acc ->
Map.put(acc, word, WordSearch.generate_domain(word, grid))
end)
solution =
CSP.new(words, locations)
|> CSP.add_constraint(WordSearchConstraint.new(words))
|> CSP.backtracking_search()
if nil == solution do
{:error, "No solution found."}
else
{:ok, solution} = solution
Enum.reduce(Map.to_list(solution), grid, fn {word, grid_locations}, acc ->
# randomly reverse half the time
# Enum.reverse(grid_locations)
indexed_letters = Enum.zip(0..String.length(word), String.graphemes(word))
Enum.reduce(indexed_letters, acc, fn {index, letter}, acc ->
{row, column} =
{Enum.at(grid_locations, index).row, Enum.at(grid_locations, index).column}
List.update_at(
acc,
row,
&List.update_at(
&1,
column,
fn _ -> letter end
)
)
end)
end)
|> WordSearch.display_grid()
end
end
def run_send_more_money() do
letters = ["S", "E", "N", "D", "M", "O", "R", "Y"]
possible_digits =
Enum.reduce(letters, %{}, fn letter, acc ->
Map.put(acc, letter, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
end)
possible_digits = Map.put(possible_digits, "M", [1])
CSP.new(letters, possible_digits)
|> CSP.add_constraint(SendMoreMoneyConstraint.new(letters))
|> CSP.backtracking_search()
|> (&(if nil == &1 do
{:error, "No solution found."}
else
&1
end)).()
end
end
|
lib/ccsp/chapter3/start.ex
| 0.585694
| 0.423249
|
start.ex
|
starcoder
|
defmodule Day13.Route do
@moduledoc """
Computes the shortest route to the destination.
"""
alias Day13.Cubicle
alias Day13.Route
defstruct [:input, :confirmed, :unconfirmed]
def new(input) do
%Route{input: input, confirmed: %{}, unconfirmed: %{{1, 1} => 0}}
end
def find(route, destination) do
(route |> distance(destination)) || (route |> step |> find(destination))
end
def cubicles_within(route, steps) do
if route |> unconfirmed_within(steps) do
cubicles_within(route |> step, steps)
else
route |> number_confirmed
end
end
def step(route = %Route{input: input,
confirmed: confirmed,
unconfirmed: unconfirmed}) do
{next, distance} =
unconfirmed
|> Enum.min_by(fn ({_, distance}) -> distance end)
confirmed = confirmed |> Map.put(next, distance)
unconfirmed = unconfirmed |> Map.delete(next)
neighbours =
next
|> Cubicle.neighbours
|> Enum.reject(&Cubicle.wall?(&1, input))
unconfirmed =
neighbours
|> Enum.reduce(unconfirmed,
&handle_neighbour(&1, &2, distance, confirmed))
%{route | confirmed: confirmed, unconfirmed: unconfirmed}
end
def distance(%Route{confirmed: confirmed}, target), do: confirmed[target]
defp handle_neighbour(neighbour, unconfirmed, distance, confirmed) do
unconfirmed_distance = unconfirmed[neighbour]
cond do
confirmed |> Map.has_key?(neighbour) ->
unconfirmed
!(unconfirmed |> Map.has_key?(neighbour)) ->
unconfirmed |> Map.put(neighbour, distance + 1)
unconfirmed_distance <= distance ->
unconfirmed
true ->
unconfirmed |> Map.put(neighbour, distance + 1)
end
end
defp unconfirmed_within(%Route{unconfirmed: unconfirmed}, steps) do
unconfirmed |> Map.values |> Enum.any?(&(&1 <= steps))
end
defp number_confirmed(%Route{confirmed: confirmed}) do
confirmed |> Enum.count
end
end
|
2016/day13/lib/day13/route.ex
| 0.785103
| 0.50116
|
route.ex
|
starcoder
|
defmodule Netcode.ReadEncryptedPacket do
@moduledoc """
The following steps are taken when reading an encrypted packet, in this exact order:
If the packet size is less than 18 bytes then it is too small to possibly be valid, ignore the packet.
If the low 4 bits of the prefix byte are greater than or equal to 7, the packet type is invalid, ignore the packet.
The server ignores packets with type connection challenge packet.
The client ignores packets with type connection request packet and connection response packet.
If the high 4 bits of the prefix byte (sequence bytes) are outside the range [1,8], ignore the packet.
If the packet size is less than 1 + sequence bytes + 16, it cannot possibly be valid, ignore the packet.
If the packet type fails the replay protection test, ignore the packet. See the section on replay protection below for details.
If the per-packet type data fails to decrypt, ignore the packet.
If the per-packet type data size does not match the expected size for the packet type, ignore the packet.
* 0 bytes for connection denied packet
* 308 bytes for connection challenge packet
* 308 bytes for connection response packet
* 8 bytes for connection keep-alive packet
* [1,1200] bytes for connection payload packet
* 0 bytes for connection disconnect packet
* If all the above checks pass, the packet is processed.
----
connect token expired (-6)
invalid connect token (-5)
connection timed out (-4)
connection response timed out (-3)
connection request timed out (-2)
connection denied (-1)
disconnected (0)
sending connection request (1)
sending connection response (2)
connected (3)
# The initial state is disconnected (0). Negative states represent error states. The goal state is connected (3).
handle(:start, data)
case data do
size(data) -> :ignore
check_prefix_byte(data) -> :ignore
ignore_connection_challenge -> :ignore
ignore_connection_request -> :ignore
ignore_connection_response -> :ignore
check_packet_size(data) -> :ignore
check_replay_protection(data) -> :ignore
data -> decrypt(data)
end
def decrypt(data) do
case actual_decrypt(data) do
{:ok, plain} = x ->
type = get_packet_type(plain)
case type do
:connection_denied and size(plain) == 0 -> plain
:connection_challenge and size(plain) == 308 -> plain
:connection_response and size(plain) == 308 -> plain
:connection_keep_alive and size(plain) == 8 -> plain
:connection_payload and size(plain) => 1 and size(plain) <= 1200 -> plain
:connection_disconnect and size(plain) == 0 -> plain
_ -> :ignore
end
end
end
def check_packet_size(data) do
case size(data) do
end
end
---
If the packet is not the expected size of 1062 bytes, ignore the packet.
If the version info in the packet doesn't match "NETCODE 1.00" (13 bytes, with null terminator), ignore the packet.
If the protocol id in the packet doesn't match the expected protocol id of the dedicated server, ignore the packet.
If the connect token expire timestamp is <= the current timestamp, ignore the packet.
If the encrypted private connect token data doesn't decrypt with the private key, using the associated data constructed from: version info, protocol id and expire timestamp, ignore the packet.
If the decrypted private connect token fails to be read for any reason, for example, having a number of server addresses outside of the expected range of [1,32], or having an address type value outside of range [0,1], ignore the packet.
If the dedicated server public address is not in the list of server addresses in the private connect token, ignore the packet.
If a client from the packet IP source address and port is already connected, ignore the packet.
If a client with the client id contained in the private connect token data is already connected, ignore the packet.
If the connect token has already been used by a different packet source IP address and port, ignore the packet.
Otherwise, add the private connect token hmac + packet source IP address and port to the history of connect tokens already used.
If no client slots are available, then the server is full. Respond with a connection denied packet.
Add an encryption mapping for the packet source IP address and port so that packets read from that address and port are decrypted with the client to server key in the private connect token, and packets sent to that address and port are encrypted with the server to client key in the private connect token. This encryption mapping expires in timeout seconds of no packets being sent to or received from that address and port, or if a client fails to establish a connection with the server within timeout seconds.
If for some reason this encryption mapping cannot be added, ignore the packet.
Otherwise, respond with a connection challenge packet and increment the connection challenge sequence number.
:processing_connection_requests
:processing_connection_response
:connected
:
"""
use GenStateMachine, callback_mode: :state_functions
def off(:cast, :flip, data) do
{:next_state, :on, data + 1}
end
def off(event_type, event_content, data) do
handle_event(event_type, event_content, data)
end
def on(:cast, :flip, data) do
{:next_state, :off, data}
end
def on(event_type, event_content, data) do
handle_event(event_type, event_content, data)
end
def handle_event({:call, from}, :get_count, data) do
{:keep_state_and_data, [{:reply, from, data}]}
end
end
|
lib/netcode/ReadEncryptedPackets.ex
| 0.780579
| 0.782205
|
ReadEncryptedPackets.ex
|
starcoder
|
defmodule Strava.Athlete do
@moduledoc """
Athletes are Strava users, Strava users are athletes.
More info: https://strava.github.io/api/v3/athlete/
"""
import Strava.Util, only: [parse_date: 1, struct_from_map: 2]
@type t :: %__MODULE__{
id: integer,
resource_state: integer,
firstname: String.t,
lastname: String.t,
profile_medium: String.t,
profile: String.t,
city: String.t,
state: String.t,
country: String.t,
sex: String.t,
friend: String.t,
follower: String.t,
premium: boolean,
created_at: NaiveDateTime.t | String.t,
updated_at: NaiveDateTime.t | String.t,
follower_count: integer,
friend_count: integer,
mutual_friend_count: integer,
athlete_type: String.t,
date_preference: String.t,
measurement_preference: String.t,
email: String.t,
ftp: integer,
weight: float,
clubs: list(Strava.Club.Summary.t),
bikes: list(map),
shoes: list(map),
}
defstruct [
:id,
:resource_state,
:firstname,
:lastname,
:profile_medium,
:profile,
:city,
:state,
:country,
:sex,
:friend,
:follower,
:premium,
:created_at,
:updated_at,
:follower_count,
:friend_count,
:mutual_friend_count,
:athlete_type,
:date_preference,
:measurement_preference,
:email,
:ftp,
:weight,
:clubs,
:bikes,
:shoes,
]
@doc """
Retrieve details about the current athlete.
## Example
Strava.Athlete.retrieve_current()
More info: http://strava.github.io/api/v3/athlete/#get-details
"""
@spec retrieve_current :: Strava.Athlete.t
def retrieve_current(client \\ Strava.Client.new) do
"athlete"
|> Strava.request(client, as: %Strava.Athlete{})
|> parse
end
@doc """
Retrieve details about an athlete by ID.
## Example
Strava.Athlete.retrieve(5324239)
More info: http://strava.github.io/api/v3/athlete/#get-another-details
"""
@spec retrieve(integer) :: Strava.Athlete.t
def retrieve(id, client \\ Strava.Client.new) do
"athletes/#{id}"
|> Strava.request(client, as: %Strava.Athlete{})
|> parse
end
@doc """
Retrieve an athlete's totals and stats.
## Example
Strava.Athlete.stats(5324239)
More info: http://strava.github.io/api/v3/athlete/#stats
"""
@spec stats(integer) :: Strava.Athlete.Stats.t
def stats(id, client \\ Strava.Client.new) do
"athletes/#{id}/stats"
|> Strava.request(client, as: %Strava.Athlete.Stats{})
|> Strava.Athlete.Stats.parse
end
@doc """
Retrieve an athlete's friends.
## Example
Strava.Athlete.friends(5324239)
More info: http://strava.github.io/api/v3/follow/
"""
@spec friends(integer) :: Strava.Athlete.Stats.t
def friends(id, client \\ Strava.Client.new) do
"athletes/#{id}/friends"
|> Strava.request(client, as: [%Strava.Athlete{}])
|> Enum.map(&Strava.Athlete.parse/1)
end
@doc """
Retrieve an athlete's followers.
## Example
Strava.Athlete.followers(5324239)
More info: http://strava.github.io/api/v3/follow/
"""
@spec followers(integer) :: Strava.Athlete.Stats.t
def followers(id, client \\ Strava.Client.new) do
"athletes/#{id}/followers"
|> Strava.request(client, as: [%Strava.Athlete{}])
|> Enum.map(&Strava.Athlete.parse/1)
end
@spec parse(Strava.Athlete.t) :: Strava.Athlete.t
def parse(%Strava.Athlete{} = athlete) do
athlete
|> parse_dates
|> parse_clubs
end
@spec parse_dates(Strava.Athlete.t) :: Strava.Athlete.t
defp parse_dates(%Strava.Athlete{created_at: created_at, updated_at: updated_at} = athlete) do
%Strava.Athlete{athlete |
created_at: parse_date(created_at),
updated_at: parse_date(updated_at),
}
end
@spec parse_clubs(Strava.Athlete.t) :: Strava.Athlete.t
defp parse_clubs(athlete)
defp parse_clubs(%Strava.Athlete{clubs: nil} = athlete), do: athlete
defp parse_clubs(%Strava.Athlete{clubs: clubs} = athlete) do
%Strava.Athlete{athlete |
clubs: Enum.map(clubs, fn club -> struct_from_map(club, Strava.Club.Summary) end),
}
end
defmodule Summary do
@type t :: %__MODULE__{
id: integer,
resource_state: integer,
firstname: String.t,
lastname: String.t,
profile_medium: String.t,
profile: String.t,
city: String.t,
state: String.t,
country: String.t,
sex: String.t,
friend: String.t,
follower: String.t,
premium: boolean,
email: String.t,
created_at: NaiveDateTime.t | String.t,
updated_at: NaiveDateTime.t | String.t,
}
defstruct [
:id,
:resource_state,
:firstname,
:lastname,
:profile_medium,
:profile,
:city,
:state,
:country,
:sex,
:friend,
:follower,
:premium,
:email,
:created_at,
:updated_at,
]
@spec parse(Strava.Athlete.Summary.t) :: Strava.Athlete.Summary.t
def parse(%Strava.Athlete.Summary{} = athlete) do
athlete
|> parse_dates
end
@spec parse_dates(Strava.Athlete.Summary.t) :: Strava.Athlete.Summary.t
defp parse_dates(%Strava.Athlete.Summary{created_at: created_at, updated_at: updated_at} = athlete) do
%Strava.Athlete.Summary{athlete |
created_at: parse_date(created_at),
updated_at: parse_date(updated_at),
}
end
end
defmodule Meta do
@type t :: %__MODULE__{
id: integer,
resource_state: integer,
}
defstruct [
:id,
:resource_state,
]
end
end
|
lib/strava/athlete.ex
| 0.845767
| 0.442757
|
athlete.ex
|
starcoder
|
defmodule Mole.Content do
use Private
@moduledoc """
The Content context. Stores interesting information about the game, like the
images and the statistics.
"""
import Ecto.Query, warn: false
alias Mole.Repo
alias Mole.Accounts.User
alias Mole.Content.{Answer, Condition, Image, Set, Survey, SurveyServer}
def list_images do
1..4
|> Enum.map(&get_images_by_set/1)
|> List.flatten()
end
@doc """
Gets a single image.
Raises `Ecto.NoResultsError` if the Image does not exist.
## Examples
iex> get_image!(123)
%Image{}
iex> get_image!(456)
** (Ecto.NoResultsError)
"""
def get_image!(id), do: Repo.get!(Image, id)
@doc """
Get a set of images by set number
"""
def get_images_by_set(set_number) do
from(s in Set, select: s, where: [id: ^set_number], preload: :images)
|> Repo.one()
|> Map.get(:images)
end
@doc """
Creates a image.
## Examples
iex> create_image(%{field: value})
{:ok, %Image{}}
iex> create_image(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_image(attrs \\ %{}) do
%Image{}
|> Image.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a image.
## Examples
iex> update_image(image, %{field: new_value})
{:ok, %Image{}}
iex> update_image(image, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_image(%Image{} = image, attrs) do
image
|> Image.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Image.
## Examples
iex> delete_image(image)
{:ok, %Image{}}
iex> delete_image(image)
{:error, %Ecto.Changeset{}}
"""
def delete_image(%Image{} = image) do
Repo.delete(image)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking image changes.
## Examples
iex> change_image(image)
%Ecto.Changeset{source: %Image{}}
"""
def change_image(%Image{} = image) do
Image.changeset(image, %{})
end
@doc """
Count the number of images in the repo.
"""
def count_images(), do: Repo.aggregate(Image, :count, :id)
# Ecto query to select malignant images
@malignant_query from(
i in "images",
where: [malignant: true],
select: i.id
)
# Get the percent of malignant images in the local datastore.
@spec percent_malignant() :: float()
def percent_malignant() do
total_amount = count_images()
@malignant_query
|> Mole.Repo.aggregate(:count, :id)
|> Kernel./(total_amount)
|> Kernel.*(100)
|> round()
end
@doc "Produce a static path in which to access the image"
@spec static_path(String.t() | map()) :: String.t()
def static_path(id) when is_binary(id), do: "/images/moles/#{id}.png"
def static_path(%{origin_id: id}), do: static_path(id)
def static_path(%{id: id}), do: static_path(id)
@doc """
Returns the list of surveys.
## Examples
iex> list_surveys()
[%Survey{}, ...]
"""
def list_surveys do
Repo.all(Survey)
end
@doc """
Gets a single survey.
Raises `Ecto.NoResultsError` if the Survey does not exist.
## Examples
iex> get_survey!(123)
%Survey{}
iex> get_survey!(456)
** (Ecto.NoResultsError)
"""
def get_survey!(id), do: Repo.get!(Survey, id)
def get_survey(id), do: Repo.get(Survey, id)
@doc """
Creates a survey.
## Examples
iex> create_survey(%{field: value})
{:ok, %Survey{}}
iex> create_survey(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_survey(attrs \\ %{}) do
result =
%Survey{}
|> Survey.changeset(attrs)
|> Repo.insert()
SurveyServer.poke()
result
end
@doc """
Updates a survey.
## Examples
iex> update_survey(survey, %{field: new_value})
{:ok, %Survey{}}
iex> update_survey(survey, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_survey(%Survey{} = survey, attrs) do
survey
|> Survey.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Survey.
## Examples
iex> delete_survey(survey)
{:ok, %Survey{}}
iex> delete_survey(survey)
{:error, %Ecto.Changeset{}}
"""
def delete_survey(%Survey{} = survey) do
Repo.delete(survey)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking survey changes.
## Examples
iex> change_survey(survey)
%Ecto.Changeset{source: %Survey{}}
"""
def change_survey(%Survey{} = survey) do
Survey.changeset(survey, %{})
end
def get_survey_by_slug(slug) do
from(s in "surveys", where: [slug: ^slug], select: s.id)
|> Repo.one()
end
@static_headers ~w(moniker feedback learning condition in_time out_time)a
def write_survey(id) do
filename =
[static_path(), get_survey!(id).slug <> ".csv"]
|> Path.join()
file = File.open!(filename, [:write, :utf8])
images =
list_images()
|> Enum.map(&Map.take(&1, [:origin_id, :id]))
|> Enum.reduce(%{}, fn %{origin_id: oid, id: id}, acc ->
Map.put(acc, id, oid)
end)
users =
from(u in User, select: u, where: [survey_id: ^id], preload: [:answers])
|> Repo.all()
|> Enum.map(&Map.take(&1, [:answers, :moniker, :condition]))
|> Enum.map(&map_user_values(&1, images))
users
|> CSV.encode(headers: @static_headers ++ Map.values(images))
|> Enum.each(&IO.write(file, &1))
filename
end
defp map_user_values(%{answers: answers, condition: condition} = user, images) do
{in_time, out_time} = get_times_from_answers(answers)
Enum.reduce(answers, user, fn %{image_id: iid, correct: cor?}, acc ->
Map.put(acc, images[iid], cor?)
end)
|> Map.delete(:answers)
|> Map.put(:condition, condition)
|> Map.put(:feedback, condition |> Condition.feedback() |> to_string())
|> Map.put(:learning, condition |> Condition.learning() |> to_string())
|> Map.put(:in_time, in_time)
|> Map.put(:out_time, out_time)
end
defp get_times_from_answers(answers) do
sorted_times =
answers
|> Enum.map(& &1.inserted_at)
|> Enum.sort(&(NaiveDateTime.compare(&1, &2) != :gt))
{List.first(sorted_times), List.last(sorted_times)}
end
@doc """
Returns the list of answers.
## Examples
iex> list_answers()
[%Answer{}, ...]
"""
def list_answers do
Repo.all(Answer)
end
@doc """
Gets a single answer.
Raises `Ecto.NoResultsError` if the Answer does not exist.
## Examples
iex> get_answer!(123)
%Answer{}
iex> get_answer!(456)
** (Ecto.NoResultsError)
"""
def get_answer!(id), do: Repo.get!(Answer, id)
@doc """
Creates a answer.
## Examples
iex> create_answer(%{field: value})
{:ok, %Answer{}}
iex> create_answer(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_answer(attrs \\ %{}) do
%Answer{}
|> Answer.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a answer.
## Examples
iex> update_answer(answer, %{field: new_value})
{:ok, %Answer{}}
iex> update_answer(answer, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_answer(%Answer{} = answer, attrs) do
answer
|> Answer.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Answer.
## Examples
iex> delete_answer(answer)
{:ok, %Answer{}}
iex> delete_answer(answer)
{:error, %Ecto.Changeset{}}
"""
def delete_answer(%Answer{} = answer) do
Repo.delete(answer)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking answer changes.
## Examples
iex> change_answer(answer)
%Ecto.Changeset{source: %Answer{}}
"""
def change_answer(%Answer{} = answer) do
Answer.changeset(answer, %{})
end
def save_answers(gameplay, user) do
gameplay.played
|> Enum.map(fn %{id: id, correct?: correct?, time_spent: time} ->
%{user_id: user.id, correct: correct?, image_id: id, time_spent: time}
end)
|> Enum.each(&insert_or_update_answer/1)
end
private do
defp insert_or_update_answer(%{user_id: uid, image_id: iid} = attrs) do
q = from(a in Answer, select: a, where: [user_id: ^uid, image_id: ^iid])
case Repo.one(q) do
nil -> struct(Answer, attrs)
ans -> ans
end
|> Answer.changeset(attrs)
|> Repo.insert_or_update()
end
defp static_path, do: Path.join(["#{:code.priv_dir(:mole)}", "static"])
end
end
|
lib/mole/content/content.ex
| 0.860677
| 0.422207
|
content.ex
|
starcoder
|
defmodule Ecto.Adapters.Riak.DateTime do
@type year :: non_neg_integer
@type month :: non_neg_integer
@type day :: non_neg_integer
@type hour :: non_neg_integer
@type min :: non_neg_integer
@type sec :: non_neg_integer
@type msec :: non_neg_integer
@type date :: { year, month, day }
@type time :: { hour, min, sec }
@type datetime :: { date, time }
@type dt :: date | time | datetime
@type ecto_dt :: Ecto.DateTime
@type ecto_int :: Ecto.Interval
@type ecto_type :: ecto_dt | ecto_int
@spec now() :: datetime
def now(), do: :calendar.now_to_universal_time(:os.timestamp)
def now_local(), do: :calendar.now_to_local_time(:os.timestamp)
def now_ecto(), do: now |> to_datetime
def now_local_ecto(), do: now_local |> to_datetime
@spec to_str(datetime | ecto_type) :: binary
def to_str({ _, _, _ } = x) do
to_str({ x, { 0, 0, 0 } })
end
def to_str(Ecto.DateTime[] = x) do
ecto_to_erl(x) |> to_str()
end
def to_str(Ecto.Interval[] = x) do
ecto_to_erl(x) |> to_str()
end
def to_str({ { year, month, day }, { hour, min, sec } }) do
"#{pad(year, 4)}-#{pad(month, 2)}-#{pad(day, 2)}T#{pad(hour, 2)}:#{pad(min, 2)}:#{pad(sec, 2)}Z"
end
@spec to_datetime(dt) :: ecto_dt
def to_datetime(x) when is_binary(x) do
parse_string(x) |> to_datetime
end
def to_datetime(Ecto.DateTime[] = x), do: x
def to_datetime(Ecto.Interval[] = x) do
Ecto.DateTime[year: x.year, month: x.month, day: x.day,
hour: x.hour, min: x.min, sec: x.sec]
end
def to_datetime({ year, month, day }) do
Ecto.DateTime.new(year: year, month: month, day: day)
end
def to_datetime({ { year, month, day }, { hour, min, sec } }) do
Ecto.DateTime.new(year: year, month: month, day: day,
hour: hour, min: min, sec: sec)
end
@spec to_interval(dt) :: ecto_dt
def to_interval(x) when is_binary(x) do
parse_string(x) |> to_datetime
end
def to_interval(Ecto.Interval[] = x), do: x
def to_interval(Ecto.DateTime[] = x) do
Ecto.Interval[year: x.year, month: x.month, day: x.day,
hour: x.hour, min: x.min, sec: x.sec]
end
def to_interval({ year, month, day }) do
Ecto.Interval.new(year: year, month: month, day: day)
end
def to_interval({ { year, month, day }, { hour, min, sec } }) do
Ecto.Interval.new(year: year, month: month, day: day,
hour: hour, min: min, sec: sec)
end
## ------------------------------------------------------------
## Predicates and Guards
## ------------------------------------------------------------
# defmacro ecto_timestamp?(x) do
# quote do
# (is_record(unquote(x), Ecto.DateTime) or is_record(unquote(x), Ecto.Interval))
# end
# end
def ecto_timestamp?(x) do
is_record(x, Ecto.DateTime) || is_record(x, Ecto.Interval)
end
def ecto_interval?(x) do
## treat nil values as valid
if is_record(x, Ecto.Interval) do
year = if x.year != nil, do: year?(x.year), else: true
month = if x.month != nil, do: month?(x.month), else: true
day = if x.day != nil, do: day?(x.day), else: true
hour = if x.hour != nil, do: hour?(x.hour), else: true
min = if x.min != nil, do: minute?(x.min), else: true
sec = if x.sec != nil, do: second?(x.sec), else: true
(year && month && day && hour && min && sec)
else
false
end
end
def ecto_datetime?(x) do
if is_record(x, Ecto.DateTime) do
datetime?({ { x.year, x.month, x.day }, { x.hour, x.min, x.sec } })
else
false
end
end
def datetime?(x) when is_tuple(x) do
case x do
{ date, time } ->
date?(date) && time?(time)
_ ->
false
end
end
def date?(x) do
case x do
{ year,month,day } ->
year?(year) && month?(month) && day?(day)
_ ->
false
end
end
def time?(x) do
case x do
{ hr, min, sec } ->
hour?(hr) && minute?(min) && second?(sec)
_ ->
false
end
end
def year?(x), do: x > 0
def month?(x), do: x >= 1 && x <= 12
def day?(x), do: x >= 1 && x <= 31
def hour?(x), do: x >= 0 && x <= 23
def minute?(x), do: x >= 0 && x <= 59
def second?(x), do: x >= 0 && x <= 59
## ------------------------------------------------------------
## Solr Specific
def solr_datetime(x) when x == "NOW" do
solr_datetime(now)
end
def solr_datetime(x) when is_binary(x), do: x
def solr_datetime(Ecto.DateTime[] = x) do
ecto_to_erl(x) |> solr_datetime
end
def solr_datetime(Ecto.Interval[] = x) do
ecto_to_erl(x) |> solr_datetime
end
def solr_datetime({ year, month, day } = arg) do
cond do
year?(year) && month?(month) && day?(day) ->
"#{pad(year, 4)}-#{pad(month, 2)}-#{pad(day, 2)}T00:00:00Z"
true ->
raise Ecto.Adapters.Riak.DateTimeError,
message: "invalid solr_datetime: #{inspect arg}"
end
end
def solr_datetime({ { _, _, _ }, { _, _, _ } } = dt) do
to_str(dt)
end
@spec solr_datetime_add(binary | ecto_dt) :: binary
def solr_datetime_add(x) when is_binary(x), do: x
def solr_datetime_add(x) do
{ { year, month, day }, { hour, min, sec } } = ecto_to_erl(x)
"+#{year}YEARS+#{month}MONTHS+#{day}DAYS+#{hour}HOURS+#{min}MINUTES+#{sec}SECONDS"
end
@spec solr_datetime_subtract(binary | ecto_type) :: binary
def solr_datetime_subtract(x) when is_binary(x), do: x
def solr_datetime_subtract(x) do
{ { year, month, day }, { hour, min, sec } } = ecto_to_erl(x)
"-#{year}YEARS-#{month}MONTHS-#{day}DAYS-#{hour}HOURS-#{min}MINUTES-#{sec}SECONDS"
end
defp ecto_to_erl(x) do
year = if x.year != nil, do: x.year, else: 0
month = if x.month != nil, do: x.month, else: 0
day = if x.day != nil, do: x.day, else: 0
hour = if x.hour != nil, do: x.hour, else: 0
min = if x.min != nil, do: x.min, else: 0
sec = if x.sec != nil, do: x.sec, else: 0
{ { year, month, day }, { hour, min, sec } }
end
## ----------------------------------------------------------------------
## Util
defp parse_string(nil), do: nil
defp parse_string(x) do
[ year, month, day, hour, min, sec ] =
String.split(x, ~r"-|:|T|Z", trim: true)
|> Enum.map(fn bin ->
{ int, _ } = Integer.parse(bin)
int
end)
{ { year, month, day }, { hour, min, sec} }
end
defp pad(int, padding) do
str = to_string(int)
padding = max(padding-byte_size(str), 0)
do_pad(str, padding)
end
defp do_pad(str, 0), do: str
defp do_pad(str, n), do: do_pad("0" <> str, n-1)
end
|
lib/ecto/adapters/riak/datetime.ex
| 0.516352
| 0.552419
|
datetime.ex
|
starcoder
|
defmodule BPXE.Engine.SensorGateway do
@moduledoc """
*Note: This gateway is not described in BPMN 2.0. However, it's available through
BPXE's extension schema.*
This gateway senses which of first N-1 incoming sequence flows fired (i.e.
their conditions were truthful) [where N is the total number of incoming
sequence flows], maps these N-1 incoming sequence flows to first N-1 outgoing
sequence flows, and once Nth incoming sequence fires, it sends 0-based
indices of incoming sequences fired
to Nth outgoing sequence flow.
This gateway is used to facilitate things like `BPXE.Engine.InclusiveGateway`
"""
use GenServer
use BPXE.Engine.FlowNode
alias BPXE.Engine.Process
alias BPXE.Engine.Process.Log
defstate fired: []
@persist_state :fired
def start_link(element, attrs, model, process) do
GenServer.start_link(__MODULE__, {element, attrs, model, process})
end
def init({_element, attrs, model, process}) do
state =
%__MODULE__{}
|> put_state(Base, %{attrs: attrs, model: model, process: process})
state = initialize(state)
{:ok, state}
end
defmodule Token do
defstruct fired: [], token_id: nil
use ExConstructor
end
def handle_token({%BPXE.Token{} = token, id}, state) do
base_state = get_state(state, BPXE.Engine.Base)
Process.log(base_state.process, %Log.SensorGatewayActivated{
pid: self(),
id: base_state.attrs["id"],
token_id: token.token_id
})
flow_node_state = get_state(state, BPXE.Engine.FlowNode)
index = Enum.find_index(flow_node_state.incoming, fn id_ -> id_ == id end)
if index == 0 do
# completion flow
Process.log(base_state.process, %Log.SensorGatewayCompleted{
pid: self(),
id: base_state.attrs["id"],
token_id: token.token_id
})
{:send,
BPXE.Token.new(
activation: BPXE.Token.activation(token),
payload: Token.new(fired: state.fired, token_id: token.token_id)
), [flow_node_state.outgoing |> List.first()], %{state | fired: []}}
else
# regular flow
{:send, token, [flow_node_state.outgoing |> Enum.at(index)],
%{state | fired: [length(flow_node_state.incoming) - index - 1 | state.fired]}}
end
end
end
|
lib/bpxe/engine/sensor_gateway.ex
| 0.717507
| 0.431225
|
sensor_gateway.ex
|
starcoder
|
defmodule Livebook.Intellisense.Docs do
@moduledoc false
# This module is responsible for extracting and normalizing
# information like documentation, signatures and specs.
@type member_info :: %{
kind: member_kind(),
name: atom(),
arity: non_neg_integer(),
documentation: documentation(),
signatures: list(signature()),
specs: list(spec()),
meta: meta()
}
@type member_kind :: :function | :macro | :type
@type documentation :: {format :: String.t(), content :: String.t()} | :hidden | nil
@type signature :: String.t()
@type meta :: map()
@typedoc """
A single spec annotation in the Erlang Abstract Format.
"""
@type spec :: term()
@doc """
Fetches documentation for the given module if available.
"""
@spec get_module_documentation(module()) :: documentation()
def get_module_documentation(module) do
case Code.fetch_docs(module) do
{:docs_v1, _, _, format, %{"en" => docstring}, _, _} ->
{format, docstring}
{:docs_v1, _, _, _, :hidden, _, _} ->
:hidden
_ ->
nil
end
end
@doc """
Fetches information about the given module members if available.
The given `members` are used to limit the result to the relevant
entries. Arity may be given as `:any`, in which case all entries
matching the name are returned.
Functions with default arguments are normalized, such that each
arity is treated as a separate member, sourcing documentation
from the original one.
## Options
* `:kinds` - a list of member kinds to limit the lookup to.
Valid kinds are `:function`, `:macro` and `:type`. Defaults
to all kinds
"""
@spec lookup_module_members(
module(),
list({name :: atom(), arity :: non_neg_integer() | :any}),
keyword()
) :: list(member_info())
def lookup_module_members(module, members, opts \\ []) do
members = MapSet.new(members)
kinds = opts[:kinds] || [:function, :macro, :type]
specs =
with true <- :function in kinds or :macro in kinds,
{:ok, specs} <- Code.Typespec.fetch_specs(module) do
Map.new(specs)
else
_ -> %{}
end
case Code.fetch_docs(module) do
{:docs_v1, _, _, format, _, _, docs} ->
for {{kind, name, base_arity}, _line, signatures, doc, meta} <- docs,
kind in kinds,
defaults = Map.get(meta, :defaults, 0),
arity <- (base_arity - defaults)..base_arity,
MapSet.member?(members, {name, arity}) or MapSet.member?(members, {name, :any}),
do: %{
kind: kind,
name: name,
arity: arity,
documentation: documentation(doc, format),
signatures: signatures,
specs: Map.get(specs, {name, base_arity}, []),
meta: meta
}
_ ->
[]
end
end
defp documentation(%{"en" => docstr}, format), do: {format, docstr}
defp documentation(:hidden, _format), do: :hidden
defp documentation(_doc, _format), do: nil
@doc """
Determines a more specific module type if any.
"""
@spec get_module_subtype(module) ::
:protocol | :implementation | :exception | :struct | :behaviour | nil
def get_module_subtype(module) do
cond do
not ensure_loaded?(module) ->
nil
function_exported?(module, :__protocol__, 1) ->
:protocol
function_exported?(module, :__impl__, 1) ->
:implementation
function_exported?(module, :__struct__, 0) ->
if function_exported?(module, :exception, 1) do
:exception
else
:struct
end
function_exported?(module, :behaviour_info, 1) ->
:behaviour
true ->
nil
end
end
# In case insensitive file systems, attempting to load
# Elixir will log a warning in the terminal as it wrongly
# loads elixir.beam, so we explicitly list it.
defp ensure_loaded?(Elixir), do: false
defp ensure_loaded?(module), do: Code.ensure_loaded?(module)
end
|
lib/livebook/intellisense/docs.ex
| 0.81812
| 0.459743
|
docs.ex
|
starcoder
|
defmodule ExAlgo.Number.Catalan do
@moduledoc """
Catalan numbers are a sequence of natural numbers that occurs in many interesting
counting problems like counting the number of expressions containing n pairs
of parentheses that are correctly matched, the number of possible Binary Search
Trees with n keys, the number of full binary trees etc.
Here we present multiple ways to get Catalan numbers.
The first few Catalan numbers are: 1, 1, 2, 5, 14, 42, 132, 429, 1430, 4862
To see a list of Catalan numbers to aid in testing, visit:
https://www.mymathtables.com/numbers/first-hundred-catalan-number-table.html
"""
@doc """
Recursive implementation of catalan number.
NOTE: This is SLOW.
## Example
iex> 0..10 |> Enum.map(&Catalan.nth_recur/1)
[1, 1, 2, 5, 14, 42, 132, 429, 1430, 4862, 16796]
"""
@spec nth_recur(non_neg_integer()) :: non_neg_integer()
def nth_recur(n) when n <= 1, do: 1
def nth_recur(n) when n > 1, do: do_recur(n, 0, 0)
defp do_recur(n, n, catalan), do: catalan
defp do_recur(n, iter, catalan) do
do_recur(
n,
iter + 1,
catalan + nth_recur(iter) * nth_recur(n - iter - 1)
)
end
@doc """
DP based implementation of catalan numbers.
## Example
iex> 0..10 |> Enum.map(&Catalan.nth_dp/1)
[1, 1, 2, 5, 14, 42, 132, 429, 1430, 4862, 16796]
iex> Catalan.nth_dp(100)
896_519_947_090_131_496_687_170_070_074_100_632_420_837_521_538_745_909_320
"""
@spec nth_dp(non_neg_integer()) :: non_neg_integer()
def nth_dp(n), do: n |> as_map() |> Map.get(n)
@doc """
Dynamic programming implementation of catalan numbers that returns a list of
catalan numbers until `n`.
## Example
iex> Catalan.as_map(10)
%{
0 => 1,
1 => 1,
2 => 2,
3 => 5,
4 => 14,
5 => 42,
6 => 132,
7 => 429,
8 => 1430,
9 => 4862,
10 => 16796
}
"""
@spec as_map(non_neg_integer()) :: map()
def as_map(n) when n <= 1, do: %{n => 1}
def as_map(n) when n > 1 do
2..n
|> Enum.flat_map(fn i -> Enum.map(0..(i - 1), &{i, &1}) end)
|> Enum.reduce(init_catalans(n), fn {i, j}, acc ->
%{acc | i => acc[i] + acc[j] * acc[i - j - 1]}
end)
end
defp init_catalans(limit) do
0..limit
|> Enum.map(fn value -> {value, (value > 1 && 0) || 1} end)
|> Map.new()
end
end
|
lib/ex_algo/number/catalan.ex
| 0.841435
| 0.745885
|
catalan.ex
|
starcoder
|
defmodule Pond.Acc do
import Pond
@idle {__MODULE__, :idle}
@halt {__MODULE__, :halt}
@moduledoc ~S"""
Functions for accumulating state.
State accumulators are useful when combined with
`Pond.Next` for piping while preserving previous
invocations state.
For example, piping our hello world example and
accumulating its values into a list:
iex> f = pond(:hello, fn
...> pond, state = :hello ->
...> {state, pond.(:world)}
...> pond, state ->
...> {state, pond.(state)}
...> end)
...>
...> f
...> |> Acc.into(Acc.list())
...> |> next()
...> |> next()
...> |> Acc.value()
[:hello, :world]
`Pond.Next` can use this module's functions in order
to accumulate state for functions that follow
the convention of returning `{value, next_fun}`.
This module provides some accumulators for common
cases. However, if your function return a different
structure you can easily use these as reference to
build your own.
Accumulators are themselves just *pond*s. So they
can be used independently. For example:
iex> f = Acc.list()
...> f = f.(:hello)
...> f = f.(:world)
...> Acc.value(f)
[:hello, :world]
"""
@type pond :: (... -> term())
@type acc :: (term() -> term())
@type acc_pond :: (term() -> acc())
@type acc_and_pond :: {acc_pond(), pond()}
@type reducer :: (term(), term() -> term())
@doc ~S"""
Combines a function and an accumulator in a tuple as expected by `Pond.Next`.
iex> f = pond(:hello, fn
...> pond, state = :hello ->
...> {state, pond.(:world)}
...> pond, state ->
...> {state, pond.(state)}
...> end)
...>
...> assert {acc, ^f} = f |> Acc.into(Acc.list())
...> assert acc == Acc.list()
...> assert is_function(acc, 1)
true
"""
@spec into(pond :: pond(), acc :: acc_pond()) :: acc_and_pond()
def into(pond, acc) do
{acc, pond}
end
@doc ~S"""
Extracts the current value from the accumulator.
Normally, this will be the last step of a pipe, in order
to extract the accumulated state.
See this module doc.
"""
@spec value(acc() | acc_and_pond()) :: term()
def value(acc)
def value({acc, _pond}) when is_function(acc, 1) do
acc.(@halt)
end
def value(acc) when is_function(acc, 1) do
acc.(@halt)
end
@doc ~S"""
Creates a new list accumulator.
Extracting value from this accumulator
returns a list of all values yield to it.
See this module doc.
"""
@spec list() :: acc_pond()
def list do
pond(@idle, fn
_pond, @idle, @halt ->
[]
_pond, state, @halt ->
state |> Enum.reverse()
pond, @idle, state ->
pond.([state])
pond, acc, state ->
pond.([state | acc])
end)
end
@doc ~S"""
Creates an accumulator that stores only the
last value given to it.
iex> f = pond(:hello, fn
...> pond, state = :hello ->
...> {state, pond.(:world)}
...> pond, state ->
...> {state, pond.(state)}
...> end)
...>
...> f
...> |> Acc.into(Acc.last())
...> |> next()
...> |> next()
...> |> Acc.value()
:world
"""
@spec last() :: acc_pond()
def last do
pond(@idle, fn
_pond, @idle, @halt ->
nil
_pond, state, @halt ->
state
pond, _state, value ->
pond.(value)
end)
end
@doc ~S"""
Creates an accumulator that reduces state
iex> f = pond(:hello, fn
...> pond, state = :hello ->
...> {state, pond.(:world)}
...> pond, state ->
...> {state, pond.(state)}
...> end)
...>
...> f
...> |> Acc.into(Acc.reduce(&"#{&1} #{&2}"))
...> |> next()
...> |> next()
...> |> Acc.value()
"hello world"
"""
@spec reduce(reducer()) :: acc_pond()
def reduce(reducer) do
pond(@idle, fn
_pond, @idle, @halt ->
nil
_pond, state, @halt ->
state
pond, @idle, value ->
pond.(value)
pond, acc, value ->
pond.(reducer.(acc, value))
end)
end
@doc ~S"""
Creates an accumulator that reduces state
starting with an initial value.
iex> f = pond(:hello, fn
...> pond, state = :hello ->
...> {state, pond.(:world)}
...> pond, state ->
...> {state, pond.(state)}
...> end)
...>
...> f
...> |> Acc.into(Acc.reduce(&"#{&1} #{&2}", "yay"))
...> |> next()
...> |> next()
...> |> Acc.value()
"yay hello world"
"""
@spec reduce(reducer(), initial_value :: term()) :: acc_pond()
def reduce(reducer, initial_value) do
pond(initial_value, fn
_pond, state, @halt ->
state
pond, acc, value ->
pond.(reducer.(acc, value))
end)
end
end
|
lib/pond/acc.ex
| 0.68616
| 0.577227
|
acc.ex
|
starcoder
|
defmodule Stripe.Orders do
@moduledoc """
Main API for working with Customers at Stripe. Through this API you can:
-create orders
-delete single order
-delete all order
-count orders
Supports Connect workflow by allowing to pass in any API key explicitely (vs using the one from env/config).
(API ref: https://stripe.com/docs/api/curl#order_object
"""
@endpoint "orders"
@doc """
Creates a Customer with the given parameters - all of which are optional.
## Example
```
new_order = [
currency: "usd",
email: "<EMAIL>",
description: "An Test Account",
metadata:[
app_order_id: "ABC123"
app_state_x: "xyz"
],
items: [
[
type: "sku",
parent: "sku_8rFqplprEgXbUJ"
]
]
]
{:ok, res} = Stripe.Orders.create new_order
```
"""
def create(params) do
create params, Stripe.config_or_env_key
end
@doc """
Creates a new Order with the given parameters - all of which are optional.
Using a given stripe key to apply against the account associated.
## Example
```
{:ok, res} = Stripe.Orders.create new_order, key
```
"""
def create(params, key) do
Stripe.make_request_with_key(:post, @endpoint, key, params)
|> Stripe.Util.handle_stripe_response
end
def pay(id, params) do
pay id, params, Stripe.config_or_env_key
end
def pay(id, params, key) do
Stripe.make_request_with_key(:post, "#{@endpoint}/#{id}/pay", key, params)
|> Stripe.Util.handle_stripe_response
end
@doc """
Retrieves a given Order with the specified ID. Returns 404 if not found.
## Example
```
{:ok, cust} = Stripe.Orders.get "order_id"
```
"""
def get(id) do
get id, Stripe.config_or_env_key
end
@doc """
Retrieves a given Customer with the specified ID. Returns 404 if not found.
Using a given stripe key to apply against the account associated.
## Example
```
{:ok, cust} = Stripe.Orders.get "order_id", key
```
"""
def get(id, key) do
Stripe.make_request_with_key(:get, "#{@endpoint}/#{id}", key)
|> Stripe.Util.handle_stripe_response
end
@doc """
Updates a Order with the given parameters - all of which are optional.
## Example
```
new_fields = [
email: "<EMAIL>",
]
{:ok, res} = Stripe.Orders.update(order_id, new_fields)
```
"""
def update(order_id, params) do
update(order_id, params, Stripe.config_or_env_key)
end
@doc """
Updates a Order with the given parameters - all of which are optional.
Using a given stripe key to apply against the account associated.
## Example
```
{:ok, res} = Stripe.Orders.update(order_id, new_fields, key)
```
"""
def update(order_id, params, key) do
Stripe.make_request_with_key(:post, "#{@endpoint}/#{order_id}", key, params)
|> Stripe.Util.handle_stripe_response
end
@doc """
Returns a list of Orders with a default limit of 10 which you can override with `list/1`
## Example
```
{:ok, customers} = Stripe.Orders.list(starting_after, 20)
```
"""
def list(starting_after,limit \\ 10) do
list Stripe.config_or_env_key, "", limit
end
@doc """
Returns a list of Orders with a default limit of 10 which you can override with `list/1`
Using a given stripe key to apply against the account associated.
## Example
```
{:ok, orders} = Stripe.Orders.list(key,starting_after,20)
```
"""
def list(key, starting_after, limit) do
Stripe.Util.list @endpoint, key, starting_after, limit
end
@doc """
Deletes an Order with the specified ID
## Example
```
{:ok, resp} = Stripe.Orders.delete "customer_id"
```
"""
def delete(id) do
delete id, Stripe.config_or_env_key
end
@doc """
Deletes a Order with the specified ID
Using a given stripe key to apply against the account associated.
## Example
```
{:ok, resp} = Stripe.Orders.delete "customer_id", key
```
"""
def delete(id,key) do
Stripe.make_request_with_key(:delete, "#{@endpoint}/#{id}", key)
|> Stripe.Util.handle_stripe_response
end
@doc """
Deletes all Orders
## Example
```
Stripe.Orders.delete_all
```
"""
def delete_all do
case all() do
{:ok, orders} ->
Enum.each orders, fn c -> delete(c["id"]) end
{:error, err} -> raise err
end
end
@doc """
Deletes all Orders
Using a given stripe key to apply against the account associated.
## Example
```
Stripe.Orders.delete_all key
```
"""
def delete_all key do
case all() do
{:ok, orders} ->
Enum.each orders, fn c -> delete(c["id"], key) end
{:error, err} -> raise err
end
end
@max_fetch_size 100
@doc """
List all orders.
##Example
```
{:ok, orders} = Stripe.Orders.all
```
"""
def all( accum \\ [], starting_after \\ "") do
all Stripe.config_or_env_key, accum, starting_after
end
@doc """
List all orders.
Using a given stripe key to apply against the account associated.
##Example
```
{:ok, orders} = Stripe.Orders.all key, accum, starting_after
```
"""
def all( key, accum, starting_after) do
case Stripe.Util.list_raw("#{@endpoint}",key, @max_fetch_size, starting_after) do
{:ok, resp} ->
case resp[:has_more] do
true ->
last_sub = List.last( resp[:data] )
all( key, resp[:data] ++ accum, last_sub["id"] )
false ->
result = resp[:data] ++ accum
{:ok, result}
end
{:error, err} -> raise err
end
end
@doc """
Count total number of orders.
## Example
```
{:ok, count} = Stripe.Orders.count
```
"""
def count do
count Stripe.config_or_env_key
end
@doc """
Count total number of orders.
Using a given stripe key to apply against the account associated.
## Example
```
{:ok, count} = Stripe.Orders.count key
```
"""
def count( key )do
Stripe.Util.count "#{@endpoint}", key
end
end
|
lib/stripe/orders.ex
| 0.842378
| 0.778186
|
orders.ex
|
starcoder
|
defmodule Cizen.Automaton do
@moduledoc """
A saga framework to create an automaton.
Handle requests from `Saga.call/2` and `Saga.cast/2`:
case perform(%Receive{}) do
%Automaton.Cast{request: {:push, item}} ->
[item | state]
%Automaton.Call{request: :pop, from: from} ->
[head | tail] = state
Saga.reply(from, head)
tail
end
"""
alias Cizen.Dispatcher
alias Cizen.EffectHandler
alias Cizen.Saga
alias Cizen.Automaton.{PerformEffect, Yield}
defmodule Call do
@moduledoc """
An event for `Saga.call/2`
"""
@keys [:request, :from]
@enforce_keys @keys
defstruct @keys
end
defmodule Cast do
@moduledoc """
An event for `Saga.cast/2`
"""
@keys [:request]
@enforce_keys @keys
defstruct @keys
end
@finish {__MODULE__, :finish}
def finish, do: @finish
@type finish :: {__MODULE__, :finish}
@type state :: term
@doc """
Invoked when the automaton is spawned.
Saga.Started event will be dispatched after this callback.
Returned value will be used as the next state to pass `c:yield/1` callback.
Returning `Automaton.finish()` will cause the automaton to finish.
If not defined, default implementation is used,
and it passes the given saga struct to `c:yield/1` callback.
"""
@callback spawn(Saga.t()) :: finish | state
@doc """
Invoked when other callbacks returns a next state.
Returned value will be used as the next state to pass `c:yield/1` callback.
Returning `Automaton.finish()` will cause the automaton to finish.
If not defined, default implementation is used,
and it returns `Automaton.finish()`.
"""
@callback yield(state) :: finish | state
@doc """
Invoked when the automaton is resumed.
Returned value will be used as the next state to pass `c:yield/1` callback.
Returning `Automaton.finish()` will cause the automaton to finish.
This callback is predefined. The default implementation is here:
```
def respawn(saga, state) do
spawn(saga)
state
end
```
"""
@callback respawn(Saga.t(), state) :: finish | state
@automaton_pid_key :"$cizen.automaton.automaton_pid"
defmacro __using__(_opts) do
quote do
alias Cizen.Automaton
import Cizen.Automaton, only: [perform: 1, finish: 0]
require Cizen.Pattern
use Saga
@behaviour Automaton
@impl Automaton
def spawn(struct) do
struct
end
@impl Automaton
def respawn(saga, state) do
__MODULE__.spawn(saga)
state
end
@impl Automaton
def yield(_state) do
finish()
end
defoverridable spawn: 1, respawn: 2, yield: 1
@impl Saga
def on_start(struct) do
id = Saga.self()
Automaton.start(id, struct)
end
@impl Saga
def on_resume(struct, state) do
id = Saga.self()
Automaton.resume(id, struct, state)
end
@impl Saga
def handle_event(event, state) do
Automaton.handle_event(event, state)
end
@impl Saga
def handle_call(message, from, state) do
Automaton.handle_call(message, from, state)
end
@impl Saga
def handle_cast(message, state) do
Automaton.handle_cast(message, state)
end
end
end
@doc """
Performs an effect.
`perform/1` blocks the current block until the effect is resolved,
and returns the result of the effect.
Note that `perform/1` does not work only on the current process.
"""
def perform(effect) do
event = %PerformEffect{effect: effect}
Saga.send_to(Saga.self(), event)
receive do
response -> response
end
end
defp do_yield(module, id, state) do
Dispatcher.dispatch(%Yield{saga_id: id, state: state})
case state do
@finish ->
Dispatcher.dispatch(%Saga.Finish{saga_id: id})
state ->
state = module.yield(state)
do_yield(module, id, state)
end
end
def start(id, saga) do
init_with(id, saga, %Saga.Started{saga_id: id}, :spawn, [saga])
end
def resume(id, saga, state) do
init_with(id, saga, %Saga.Resumed{saga_id: id}, :respawn, [saga, state])
end
defp init_with(id, saga, event, function, arguments) do
module = Saga.module(saga)
{:ok, pid} =
Task.start_link(fn ->
Process.put(Saga.saga_id_key(), id)
try do
state = apply(module, function, arguments)
Dispatcher.dispatch(event)
do_yield(module, id, state)
rescue
reason -> Saga.exit(id, reason, __STACKTRACE__)
end
end)
Process.put(@automaton_pid_key, pid)
handler_state = EffectHandler.init(id)
{Saga.lazy_init(), handler_state}
end
def handle_event(%PerformEffect{effect: effect}, handler) do
handle_result(EffectHandler.perform_effect(handler, effect))
end
def handle_event(event, state) do
feed_event(state, event)
end
def handle_call(request, from, handler) do
feed_event(handler, %Call{request: request, from: from})
end
def handle_cast(request, handler) do
feed_event(handler, %Cast{request: request})
end
defp feed_event(handler, event) do
handle_result(EffectHandler.feed_event(handler, event))
end
defp handle_result({:resolve, value, state}) do
pid = Process.get(@automaton_pid_key)
send(pid, value)
state
end
defp handle_result(state), do: state
end
|
lib/cizen/automaton.ex
| 0.88225
| 0.718224
|
automaton.ex
|
starcoder
|
defmodule SanbaseWeb.Graphql.DocumentProvider do
@moduledoc ~s"""
Custom Absinthe DocumentProvider for more effective caching.
Absinthe phases have one main difference compared to plugs - all phases must run
and cannot be halted. But phases can be jumped over by returning
`{:jump, result, destination_phase}`
This module makes use of 2 new phases - a `CacheDocument` phase and `Idempotent`
phase.
If the value is present in the cache it is put in the blueprint and the execution
jumps to the Idempotent phase, effectively skipping the Absinthe's Resolution
and Result phases. Result is the last phase in the pipeline so the Idempotent
phase is inserted after it.
If the value is not present in the cache, the Absinthe's default Resolution and
Result phases are being executed and the new DocumentCache and Idempotent phases
are doing nothing.
In the end there's a `before_send` hook that adds the result into the cache.
"""
@behaviour Absinthe.Plug.DocumentProvider
alias SanbaseWeb.Graphql.Cache
@doc false
@impl true
def pipeline(%Absinthe.Plug.Request.Query{pipeline: pipeline}) do
pipeline
|> Absinthe.Pipeline.insert_before(
Absinthe.Phase.Document.Complexity.Analysis,
SanbaseWeb.Graphql.Phase.Document.Complexity.Preprocess
)
|> Absinthe.Pipeline.insert_before(
Absinthe.Phase.Document.Execution.Resolution,
SanbaseWeb.Graphql.Phase.Document.Execution.CacheDocument
)
|> Absinthe.Pipeline.insert_after(
Absinthe.Phase.Document.Result,
SanbaseWeb.Graphql.Phase.Document.Execution.Idempotent
)
end
@doc false
@impl true
def process(%Absinthe.Plug.Request.Query{document: nil} = query, _), do: {:cont, query}
def process(%Absinthe.Plug.Request.Query{document: _} = query, _), do: {:halt, query}
end
defmodule SanbaseWeb.Graphql.Phase.Document.Execution.CacheDocument do
@moduledoc ~s"""
Custom phase for obtaining the result from cache.
In case the value is not present in the cache, the default Resolution and Result
phases are ran. Otherwise the custom Resolution phase is ran and Result is jumped
over.
When calculating the cache key only some of the fields in the whole blueprint are
taken into account. They are defined in the module attribute @cache_fields
The only values that are converted to something else during constructing
of the cache key are:
- DateTime - It is rounded by TTL so all datetiems in a range yield the same cache key
- Struct - All structs are converted to plain maps
"""
use Absinthe.Phase
alias SanbaseWeb.Graphql.Cache
@compile inline: [add_cache_key_to_blueprint: 2, queries_in_request: 1]
@cached_queries SanbaseWeb.Graphql.AbsintheBeforeSend.cached_queries()
@spec run(Absinthe.Blueprint.t(), Keyword.t()) :: Absinthe.Phase.result_t()
def run(bp_root, _) do
queries_in_request = queries_in_request(bp_root)
case Enum.any?(queries_in_request, &(&1 in @cached_queries)) do
false ->
{:ok, bp_root}
true ->
context = bp_root.execution.context
# Add keys that can affect the data the user can have access to
additional_keys_hash =
{context.permissions, context.product_id, context.auth.subscription, context.auth.plan,
context.auth.auth_method}
|> Sanbase.Cache.hash()
# The ttl/max_ttl_offset might be rewritten in case `caching_params`
# are provided. The rewriting happens in the absinthe before_send function
cache_key =
SanbaseWeb.Graphql.Cache.cache_key(
{"bp_root", additional_keys_hash},
sanitize_blueprint(bp_root),
ttl: 120,
max_ttl_offset: 120
)
bp_root = add_cache_key_to_blueprint(bp_root, cache_key)
case Cache.get(cache_key) do
nil ->
{:ok, bp_root}
result ->
# Storing it again `touch`es it and the TTL timer is restarted.
# This can lead to infinite storing the same value
Process.put(:do_not_cache_query, true)
{:jump, %{bp_root | result: result},
SanbaseWeb.Graphql.Phase.Document.Execution.Idempotent}
end
end
end
# Private functions
defp queries_in_request(%{operations: operations}) do
operations
|> Enum.flat_map(fn %{selections: selections} ->
selections
|> Enum.map(fn %{name: name} -> Inflex.camelize(name, :lower) end)
end)
end
defp add_cache_key_to_blueprint(
%{execution: %{context: context} = execution} = blueprint,
cache_key
) do
%{
blueprint
| execution: %{execution | context: Map.put(context, :query_cache_key, cache_key)}
}
end
# Leave only the fields that are needed to generate the cache key
# This let's us cache with values that are interpolated into the query string itself
# The datetimes are rounded so all datetimes in a bucket generate the same
# cache key
defp sanitize_blueprint(%DateTime{} = dt), do: dt
defp sanitize_blueprint(
{:argument_data, %{function: %{"args" => %{"baseProjects" => base_projects}}}} = tuple
) do
has_watchlist_base? =
Enum.any?(base_projects, fn elem ->
match?(%{"watchlistId" => _}, elem) or match?(%{"watchlistSlug" => _}, elem)
end)
has_watchlist_base? && Process.put(:do_not_cache_query, true)
tuple
end
defp sanitize_blueprint({:argument_data, _} = tuple), do: tuple
defp sanitize_blueprint({a, b}), do: {a, sanitize_blueprint(b)}
@cache_fields [
:name,
:argument_data,
:selection_set,
:selections,
:fragments,
:operations,
:alias
]
defp sanitize_blueprint(map) when is_map(map) do
Map.take(map, @cache_fields)
|> Enum.map(&sanitize_blueprint/1)
|> Map.new()
end
defp sanitize_blueprint(list) when is_list(list) do
Enum.map(list, &sanitize_blueprint/1)
end
defp sanitize_blueprint(data), do: data
end
defmodule SanbaseWeb.Graphql.Phase.Document.Execution.Idempotent do
@moduledoc ~s"""
A phase that does nothing and is inserted after the Absinthe's Result phase.
`CacheDocument` phase jumps to this `Idempotent` phase if it finds the needed
value in the cache so the Absinthe's Resolution and Result phases are skipped.
"""
use Absinthe.Phase
@spec run(Absinthe.Blueprint.t(), Keyword.t()) :: Absinthe.Phase.result_t()
def run(bp_root, _), do: {:ok, bp_root}
end
defmodule SanbaseWeb.Graphql.Phase.Document.Complexity.Preprocess do
use Absinthe.Phase
@spec run(Absinthe.Blueprint.t(), Keyword.t()) :: Absinthe.Phase.result_t()
def run(bp_root, _) do
metrics =
bp_root.operations
|> Enum.flat_map(fn %{selections: selections} ->
selections_to_metrics(selections)
end)
case metrics do
[_ | _] = metrics -> Process.put(:__metric_name_from_get_metric_api__, metrics)
_ -> :ok
end
{:ok, bp_root}
end
defp selections_to_metrics(selections) do
selections
|> Enum.flat_map(fn
%{name: name, argument_data: %{metric: metric}} = struct ->
case Inflex.underscore(name) do
"get_metric" ->
get_metric_selections_to_metrics(struct.selections, metric)
_ ->
[]
end
_ ->
[]
end)
end
defp get_metric_selections_to_metrics(selections, metric) do
selections =
Enum.map(selections, fn
%{name: name} -> name |> Inflex.underscore()
_ -> nil
end)
|> Enum.reject(&is_nil/1)
# Put the metric name in the list 0, 1 or 2 times, depending
# on the selections. `timeseries_data` and `aggregated_timeseries_data`
# would go through the complexity code once, remioing the metric
# name from the list both times - so it has to be there twice, while
# `timeseries_data_complexity` won't go through that path.
# `histogram_data` does not have complexity checks right now.
# This is equivalent to X -- (X -- Y) because the `--` operator
# has right to left associativity
common_parts = selections -- selections -- ["timeseries_data", "aggregated_timeseries_data"]
Enum.map(common_parts, fn _ -> metric end)
end
end
|
lib/sanbase_web/graphql/document/document_provider.ex
| 0.866613
| 0.461381
|
document_provider.ex
|
starcoder
|
defmodule DataTracer.SupervisorUtils do
@doc """
Get the restart settings for a supervisor
Example return value: `%{max_restarts: 3, max_seconds: 5}`
"""
def restart_settings(supervisor_pid) do
supervisor_state = :sys.get_state(supervisor_pid)
max_restarts = supervisor_state |> elem(5)
max_seconds = supervisor_state |> elem(6)
%{max_restarts: max_restarts, max_seconds: max_seconds}
end
@doc """
Find the chain of parent superivors for a given process
Example return value: `{:ok, [#PID<0.275.0>, #PID<0.276.0>, #PID<0.283.0>]}`
Returns `:error` if unable to find the supervisors for the process
"""
def find_supervisors(application, pid) do
top_level_supervisor = get_supervisor(application)
case find_supervisors(top_level_supervisor, pid, []) do
nil -> :error
supervisors -> {:ok, Enum.reverse(supervisors)}
end
end
defp find_supervisors(supervisor, pid, parents) do
Supervisor.which_children(supervisor)
|> Enum.find_value(fn
# Found the pid
{_, ^pid, _, _} ->
[supervisor | parents]
# Recurse down a supervisor
{_id, child_pid, :supervisor, _modules} ->
case find_supervisors(child_pid, pid, [supervisor | parents]) do
nil -> nil
results -> results
end
{_id, _child_pid, _type, _modules} ->
nil
end)
end
# Uses Erlang internals to find the top-level supervisor of an application
defp get_supervisor(application) do
{pid, _name} =
:application_controller.get_master(application)
|> :application_master.get_child()
pid
end
@doc """
Find the max number of crashes that the given PID can have before it brings
down the entire application.
NOTE: Currently assumes all the crashes happen at the same time (i.e.
`max_seconds` is ignored)
"""
def max_crashes(application, pid) do
with {:ok, supervisors} <- find_supervisors(application, pid),
restart_settings = Enum.map(supervisors, &restart_settings/1) do
Enum.reduce(restart_settings, 1, fn restart_settings, acc ->
num_crashes = restart_settings.max_restarts + 1
acc * num_crashes
end)
end
end
end
|
lib/data_tracer/supervisor_utils.ex
| 0.695545
| 0.498901
|
supervisor_utils.ex
|
starcoder
|
defmodule Ash.Query.Aggregate do
@moduledoc "Represents an aggregated association value"
defstruct [
:name,
:relationship_path,
:default_value,
:resource,
:query,
:field,
:kind,
:type,
:authorization_filter,
:load,
filterable?: true
]
@type t :: %__MODULE__{}
@kinds [:count, :first, :sum, :list]
@type kind :: unquote(Enum.reduce(@kinds, &{:|, [], [&1, &2]}))
alias Ash.Actions.Load
alias Ash.Engine.Request
alias Ash.Error.Query.{NoReadAction, NoSuchRelationship}
require Ash.Query
@doc false
def kinds, do: @kinds
def new(resource, name, kind, relationship, query, field, default \\ nil, filterable? \\ true) do
field_type =
if field do
related = Ash.Resource.Info.related(resource, relationship)
Ash.Resource.Info.attribute(related, field).type
end
with :ok <- validate_path(resource, List.wrap(relationship)),
{:ok, type} <- kind_to_type(kind, field_type),
{:ok, query} <- validate_query(query) do
{:ok,
%__MODULE__{
name: name,
resource: resource,
default_value: default || default_value(kind),
relationship_path: List.wrap(relationship),
field: field,
kind: kind,
type: type,
query: query,
filterable?: filterable?
}}
end
end
defp validate_path(_, []), do: :ok
defp validate_path(resource, [relationship | rest]) do
case Ash.Resource.Info.relationship(resource, relationship) do
nil ->
{:error, NoSuchRelationship.exception(resource: resource, name: relationship)}
%{type: :many_to_many, through: through, destination: destination} ->
cond do
!Ash.Resource.Info.primary_action(through, :read) ->
{:error, NoReadAction.exception(resource: through, when: "aggregating")}
!Ash.Resource.Info.primary_action(destination, :read) ->
{:error, NoReadAction.exception(resource: destination, when: "aggregating")}
!Ash.DataLayer.data_layer(through) == Ash.DataLayer.data_layer(resource) ->
{:error, "Cannot cross data layer boundaries when building an aggregate"}
true ->
validate_path(destination, rest)
end
relationship ->
cond do
!Ash.Resource.Info.primary_action(relationship.destination, :read) ->
NoReadAction.exception(resource: relationship.destination, when: "aggregating")
!Ash.DataLayer.data_layer(relationship.destination) ==
Ash.DataLayer.data_layer(resource) ->
{:error, "Cannot cross data layer boundaries when building an aggregate"}
true ->
validate_path(relationship.destination, rest)
end
end
end
def default_value(:count), do: 0
def default_value(:first), do: nil
def default_value(:sum), do: nil
def default_value(:list), do: []
defp validate_query(nil), do: {:ok, nil}
defp validate_query(query) do
cond do
query.load != [] ->
{:error, "Cannot load in an aggregate"}
not is_nil(query.limit) ->
{:error, "Cannot limit an aggregate (for now)"}
not (is_nil(query.offset) || query.offset == 0) ->
{:error, "Cannot offset an aggregate (for now)"}
true ->
{:ok, query}
end
end
@doc false
def kind_to_type(:count, _field_type), do: {:ok, Ash.Type.Integer}
def kind_to_type(kind, nil), do: {:error, "Must provide field type for #{kind}"}
def kind_to_type(kind, field_type) when kind in [:first, :sum], do: {:ok, field_type}
def kind_to_type(:list, field_type), do: {:ok, {:array, field_type}}
def kind_to_type(kind, _field_type), do: {:error, "Invalid aggregate kind: #{kind}"}
def requests(initial_query, can_be_in_query?, authorizing?, calculations_in_query, request_path) do
initial_query.aggregates
|> Map.values()
|> Enum.map(&{{&1.resource, &1.relationship_path, []}, &1})
|> Enum.concat(aggregates_from_filter(initial_query))
|> Enum.group_by(&elem(&1, 0))
|> Enum.map(fn {key, value} ->
{key, Enum.uniq_by(Enum.map(value, &elem(&1, 1)), & &1.name)}
end)
|> Enum.uniq()
|> Enum.reduce({[], [], []}, fn {{aggregate_resource, relationship_path, ref_path},
aggregates},
{auth_requests, value_requests, aggregates_in_query} ->
related = Ash.Resource.Info.related(aggregate_resource, relationship_path)
relationship =
Ash.Resource.Info.relationship(
aggregate_resource,
List.first(relationship_path)
)
path_for_checking =
relationship_path
|> tl()
|> Enum.reduce({[], relationship.destination}, fn rel, {path, resource} ->
relationship = Ash.Resource.Info.relationship(resource, rel)
{[relationship | path], relationship.destination}
end)
|> elem(0)
|> Enum.reverse()
{in_query?, reverse_relationship} =
case Load.reverse_relationship_path(relationship, path_for_checking) do
:error ->
{ref_path == [] && can_be_in_query?, nil}
{:ok, reverse_relationship} ->
{ref_path == [] && can_be_in_query? &&
any_aggregate_matching_path_used_in_query?(
initial_query,
relationship_path,
calculations_in_query
), reverse_relationship}
end
auth_request =
if authorizing? do
auth_request(
related,
initial_query,
reverse_relationship,
ref_path ++ relationship_path,
request_path
)
else
nil
end
new_auth_requests =
if auth_request do
[auth_request | auth_requests]
else
auth_requests
end
if in_query? do
{new_auth_requests, value_requests, aggregates_in_query ++ aggregates}
else
if ref_path == [] do
request =
value_request(
initial_query,
related,
reverse_relationship,
relationship_path,
aggregates,
auth_request,
aggregate_resource,
request_path
)
{new_auth_requests, [request | value_requests], aggregates_in_query}
else
{new_auth_requests, value_requests, aggregates_in_query}
end
end
end)
end
defp aggregates_from_filter(query) do
aggs =
query.filter
|> Ash.Filter.used_aggregates(:all, true)
|> Enum.reject(&(&1.relationship_path == []))
|> Enum.map(fn ref ->
{{ref.resource, ref.attribute.relationship_path, ref.attribute.relationship_path},
ref.attribute}
end)
calculations =
query.filter
|> Ash.Filter.used_calculations(query.resource)
|> Enum.flat_map(fn calculation ->
expression = calculation.module.expression(calculation.opts, calculation.context)
case Ash.Filter.hydrate_refs(expression, %{
resource: query.resource,
aggregates: query.aggregates,
calculations: query.calculations,
relationship_path: [],
public?: false
}) do
{:ok, expression} ->
Ash.Filter.used_aggregates(expression)
_ ->
[]
end
end)
|> Enum.map(fn aggregate ->
{{query.resource, aggregate.relationship_path, []}, aggregate}
end)
Enum.uniq_by(aggs ++ calculations, &elem(&1, 1).name)
end
defp auth_request(related, initial_query, reverse_relationship, relationship_path, request_path) do
Request.new(
resource: related,
api: initial_query.api,
async?: false,
query: aggregate_query(related, reverse_relationship, request_path),
path: request_path ++ [:aggregate, relationship_path],
strict_check_only?: true,
action: Ash.Resource.Info.primary_action(related, :read),
name: "authorize aggregate: #{Enum.join(relationship_path, ".")}",
data: []
)
end
defp value_request(
initial_query,
related,
reverse_relationship,
relationship_path,
aggregates,
auth_request,
aggregate_resource,
request_path
) do
pkey = Ash.Resource.Info.primary_key(aggregate_resource)
deps =
if auth_request do
[auth_request.path ++ [:authorization_filter], request_path ++ [:fetch, :data]]
else
[request_path ++ [:fetch, :data]]
end
Request.new(
resource: aggregate_resource,
api: initial_query.api,
query: aggregate_query(related, reverse_relationship, request_path),
path: request_path ++ [:aggregate_values, relationship_path],
action: Ash.Resource.Info.primary_action(aggregate_resource, :read),
name: "fetch aggregate: #{Enum.join(relationship_path, ".")}",
data:
Request.resolve(
deps,
fn data ->
records = get_in(data, request_path ++ [:fetch, :data, :results])
if records == [] do
{:ok, %{}}
else
initial_query =
Ash.Query.unset(initial_query, [:filter, :sort, :aggregates, :limit, :offset])
query =
case records do
[record] ->
filter = record |> Map.take(pkey) |> Enum.to_list()
Ash.Query.filter(
initial_query,
^filter
)
records ->
filter = [or: Enum.map(records, &Map.take(&1, pkey))]
Ash.Query.filter(
initial_query,
^filter
)
end
aggregates =
if auth_request do
case get_in(data, auth_request.path ++ [:authorization_filter]) do
nil ->
aggregates
filter ->
Enum.map(aggregates, fn aggregate ->
%{
aggregate
| query: Ash.Query.filter(aggregate.query, ^filter)
}
end)
end
else
aggregates
end
with {:ok, data_layer_query} <- Ash.Query.data_layer_query(query),
{:ok, data_layer_query} <-
add_data_layer_aggregates(
data_layer_query,
aggregates,
initial_query.resource
),
{:ok, results} <-
Ash.DataLayer.run_query(
data_layer_query,
query.resource
) do
loaded_aggregates =
aggregates
|> Enum.map(& &1.load)
|> Enum.reject(&is_nil/1)
all_aggregates = Enum.map(aggregates, & &1.name)
aggregate_values =
Enum.reduce(results, %{}, fn result, acc ->
loaded_aggregate_values = Map.take(result, loaded_aggregates)
all_aggregate_values =
result.aggregates
|> Kernel.||(%{})
|> Map.take(all_aggregates)
|> Map.merge(loaded_aggregate_values)
Map.put(
acc,
Map.take(result, pkey),
all_aggregate_values
)
end)
{:ok, aggregate_values}
else
{:error, error} ->
{:error, error}
end
end
end
)
)
end
defp add_data_layer_aggregates(data_layer_query, aggregates, aggregate_resource) do
Ash.DataLayer.add_aggregates(data_layer_query, aggregates, aggregate_resource)
end
defp aggregate_query(resource, reverse_relationship, request_path) do
Request.resolve(
[request_path ++ [:fetch, :query]],
fn data ->
data_query = get_in(data, request_path ++ [:fetch, :query])
if reverse_relationship do
filter =
Ash.Filter.put_at_path(
data_query.filter,
reverse_relationship
)
{:ok, Ash.Query.filter(resource, ^filter)}
else
{:ok, data_query}
end
end
)
end
defp any_aggregate_matching_path_used_in_query?(query, relationship_path, calculations_in_query) do
filter_aggregates =
if query.filter do
Ash.Filter.used_aggregates(query.filter)
else
[]
end
used_calculations =
Ash.Filter.used_calculations(
query.filter,
query.resource
) ++ calculations_in_query
calculation_aggregates =
used_calculations
|> Enum.filter(&:erlang.function_exported(&1.module, :expression, 2))
|> Enum.flat_map(fn calculation ->
case Ash.Filter.hydrate_refs(
calculation.module.expression(calculation.opts, calculation.context),
%{
resource: query.resource,
aggregates: query.aggregates,
calculations: query.calculations,
relationship_path: [],
public?: false
}
) do
{:ok, hydrated} ->
Ash.Filter.used_aggregates(hydrated)
_ ->
[]
end
end)
if Enum.any?(
filter_aggregates ++ calculation_aggregates,
&(&1.relationship_path == relationship_path)
) do
true
else
sort_aggregates =
Enum.flat_map(query.sort, fn {field, _} ->
case Map.fetch(query.aggregates, field) do
:error ->
[]
{:ok, agg} ->
[agg]
end
end)
sort_calculations =
Enum.flat_map(query.sort, fn
{%Ash.Query.Calculation{} = calc, _} ->
[calc]
{field, _} ->
case Map.fetch(query.calculations, field) do
:error ->
[]
{:ok, calc} ->
[calc]
end
end)
sort_calc_aggregates =
sort_calculations
|> Enum.filter(&:erlang.function_exported(&1.module, :expression, 2))
|> Enum.flat_map(fn calculation ->
case Ash.Filter.hydrate_refs(
calculation.module.expression(calculation.opts, calculation.context),
%{
resource: query.resource,
aggregates: query.aggregates,
calculations: query.calculations,
public?: false
}
) do
{:ok, hydrated} ->
Ash.Filter.used_aggregates(hydrated)
_ ->
[]
end
end)
Enum.any?(
sort_aggregates ++ sort_calc_aggregates,
&(&1.relationship_path == relationship_path)
)
end
end
defimpl Inspect do
import Inspect.Algebra
def inspect(%{query: nil} = aggregate, opts) do
container_doc(
"#" <> to_string(aggregate.kind) <> "<",
[Enum.join(aggregate.relationship_path, ".")],
">",
opts,
fn str, _ -> str end,
separator: ""
)
end
def inspect(%{query: query} = aggregate, opts) do
field =
if aggregate.field do
[aggregate.field]
else
[]
end
container_doc(
"#" <> to_string(aggregate.kind) <> "<",
[
concat([
Enum.join(aggregate.relationship_path ++ field, "."),
concat(" from ", to_doc(query, opts))
])
],
">",
opts,
fn str, _ -> str end
)
end
end
end
|
lib/ash/query/aggregate.ex
| 0.761893
| 0.45641
|
aggregate.ex
|
starcoder
|
defmodule Chunky.Sequence.OEIS.Sigma do
@moduledoc """
OEIS Sequences for Sigma values.
Some sigma sequences are in the `Sequence.OEIS.Core` module.
## Available Sequences
### Sigma_M of integers
Sequences of `sigma_M(n)` of integers:
- `create_sequence_a001158/1` - A001158 - Sum of cubes of divisors of N, simga-3(n)
- `create_sequence_a001159/1` - A001159 - sum of 4th powers of divisors of n, simga-4(n)
- `create_sequence_a001160/1` - A001160 - sum of 5th powers of divisors of n, simga-5(n)
- `create_sequence_a013954/1` - A013954 - sum of 6th powers of divisors of n, simga-6(n)
- `create_sequence_a013955/1` - A013955 - sum of 7th powers of divisors of n, simga-7(n)
- `create_sequence_a013956/1` - A013956 - sum of 8th powers of divisors of n, simga-8(n)
- `create_sequence_a013957/1` - A013957 - sum of 9th powers of divisors of n, simga-9(n)
- `create_sequence_a013958/1` - A013958 - sum of 10th powers of divisors of n, simga-10(n)
- `create_sequence_a013959/1` - A013959 - sum of 11th powers of divisors of n, simga-11(n)
- `create_sequence_a013960/1` - A013960 - sum of 12th powers of divisors of n, simga-12(n)
- `create_sequence_a013961/1` - A013961 - sum of 13th powers of divisors of n, simga-13(n)
- `create_sequence_a013962/1` - A013962 - sum of 14th powers of divisors of n, simga-14(n)
- `create_sequence_a013963/1` - A013963 - sum of 15th powers of divisors of n, simga-15(n)
- `create_sequence_a013964/1` - A013964 - sum of 16th powers of divisors of n, simga-16(n)
- `create_sequence_a013965/1` - A013965 - sum of 17th powers of divisors of n, simga-17(n)
- `create_sequence_a013966/1` - A013966 - sum of 18th powers of divisors of n, simga-18(n)
- `create_sequence_a013967/1` - A013967 - sum of 19th powers of divisors of n, simga-19(n)
- `create_sequence_a013968/1` - A013968 - sum of 20th powers of divisors of n, simga-20(n)
Variations on sums of divisors:
- `create_sequence_a002093/1` - A002093 - Highly Abundant Numbers
- `create_sequence_a003601/1` - A003601 - Arithmetic Numbers
"""
import Chunky.Sequence, only: [sequence_for_function: 1]
alias Chunky.Math
alias Chunky.Math.Predicates
@doc """
OEIS Sequence `A001158` - Sum of cubes of divisors of N, simga-3(n), `𝝈3(n)`.
From [OEIS A001158](https://oeis.org/A001158):
> sigma_3(n): sum of cubes of divisors of n.
> (Formerly M4605 N1964)
**Sequence IDs**: `:a001158`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a001158) |> Sequence.take!(10)
[1, 9, 28, 73, 126, 252, 344, 585, 757, 1134]
"""
@doc offset: 1,
sequence: "Sum of Cubes of Divisors of N",
references: [{:oeis, :a001158, "https://oeis.org/A001158"}]
def create_sequence_a001158(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a001158/1)
end
@doc false
@doc offset: 1
def seq_a001158(idx) do
Math.sigma(idx, 3)
end
@doc """
OEIS Sequence `A001159` - sum of 4th powers of divisors of n, simga-4(n), `𝝈4(n)`.
From [OEIS A001159](https://oeis.org/A001159):
> sigma_4(n): sum of 4th powers of divisors of n.
> (Formerly M5041 N2177)
**Sequence IDs**: `:a001159`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a001159) |> Sequence.take!(10)
[1, 17, 82, 273, 626, 1394, 2402, 4369, 6643, 10642]
"""
@doc offset: 1,
sequence: "Sum of 4th powers of Divisors of N",
references: [{:oeis, :a001159, "https://oeis.org/A001159"}]
def create_sequence_a001159(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a001159/1)
end
@doc false
@doc offset: 1
def seq_a001159(idx) do
Math.sigma(idx, 4)
end
@doc """
OEIS Sequence `A001160` - sum of 5th powers of divisors of n, simga-5(n), `𝝈5(n)`.
From [OEIS A001160](https://oeis.org/A001160):
> sigma_5(n): sum of 5th powers of divisors of n.
> (Formerly M5240 N2279)
**Sequence IDs**: `:a001160`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a001160) |> Sequence.take!(10)
[1, 33, 244, 1057, 3126, 8052, 16808, 33825, 59293, 103158]
"""
@doc offset: 1,
sequence: "Sum of 5th powers of Divisors of N",
references: [{:oeis, :a001160, "https://oeis.org/A001160"}]
def create_sequence_a001160(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a001160/1)
end
@doc false
@doc offset: 1
def seq_a001160(idx) do
Math.sigma(idx, 5)
end
@doc """
OEIS Sequence `A002093` - Highly Abundant Numbers
From [OEIS A002093](https://oeis.org/A002093):
> Highly abundant numbers: numbers n such that sigma(n) > sigma(m) for all m < n.
> (Formerly M0553 N0200)
**Sequence IDs**: `:a002093`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a002093) |> Sequence.take!(25)
[1, 2, 3, 4, 6, 8, 10, 12, 16, 18, 20, 24, 30, 36, 42, 48, 60, 72, 84, 90, 96, 108, 120, 144, 168]
"""
@doc offset: 1,
sequence: "Highly abundant numbers",
references: [
{:oeis, :a002093, "https://oeis.org/A002093"},
{:wikipedia, :highly_abundant_number,
"https://en.wikipedia.org/wiki/Highly_abundant_number"}
]
def create_sequence_a002093(_opts) do
%{
next_fn: &seq_a002093/3,
data: %{
sigma_max: 0
}
}
end
@doc false
def seq_a002093(:init, data, _value) do
%{
data: data,
value: 0
}
end
@doc false
def seq_a002093(:next, data, value) do
# find the next number after value that has a sigma greater than sigma max
s_m = data.sigma_max
s_n = seq_a002093_greater_sigma(s_m, value + 1)
next_sigma_max = Math.sigma(s_n)
{
:continue,
%{
data: data |> Map.put(:sigma_max, next_sigma_max),
value: s_n
}
}
end
defp seq_a002093_greater_sigma(sig_max, val) do
if Math.sigma(val) > sig_max do
val
else
seq_a002093_greater_sigma(sig_max, val + 1)
end
end
@doc """
OEIS Sequence `A003601` - Arithmetic Numbers
From [OEIS A003601](https://oeis.org/A003601):
> Numbers n such that the average of the divisors of n is an integer: sigma_0(n) divides sigma_1(n). Alternatively, tau(n) (A000005(n)) divides sigma(n) (A000203(n)).
> (Formerly M2389)
**Sequence IDs**: `:a003601`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a003601) |> Sequence.take!(10)
[1, 3, 5, 6, 7, 11, 13, 14, 15, 17]
"""
@doc offset: 1,
sequence: "Numbers n such that the average of the divisors of n is an integer",
references: [{:oeis, :a003601, "https://oeis.org/A003601"}]
def create_sequence_a003601(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a003601/2)
end
@doc false
@doc offset: 1
def seq_a003601(_idx, last) do
next_seq_a003601(last)
end
defp next_seq_a003601(last) do
if Predicates.is_arithmetic_number?(last + 1) do
last + 1
else
next_seq_a003601(last + 1)
end
end
@doc """
OEIS Sequence `A013954` - sum of 6th powers of divisors of n, simga-6(n), `𝝈6(n)`.
From [OEIS A013954](https://oeis.org/A013954):
> sigma_6(n): sum of 6th powers of divisors of n.
**Sequence IDs**: `:a013954`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013954) |> Sequence.take!(10)
[1, 65, 730, 4161, 15626, 47450, 117650, 266305, 532171, 1015690]
"""
@doc offset: 1,
sequence: "Sum of 6th powers of Divisors of N",
references: [{:oeis, :a013954, "https://oeis.org/A013954"}]
def create_sequence_a013954(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013954/1)
end
@doc false
@doc offset: 1
def seq_a013954(idx) do
Math.sigma(idx, 6)
end
@doc """
OEIS Sequence `A013955` - sum of 7th powers of divisors of n, simga-7(n), `𝝈7(n)`.
From [OEIS A013955](https://oeis.org/A013955):
> sigma_7(n): sum of 7th powers of divisors of n.
**Sequence IDs**: `:a013955`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013955) |> Sequence.take!(10)
[1, 129, 2188, 16513, 78126, 282252, 823544, 2113665, 4785157, 10078254]
"""
@doc offset: 1,
sequence: "Sum of 7th powers of Divisors of N",
references: [{:oeis, :a013955, "https://oeis.org/A013955"}]
def create_sequence_a013955(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013955/1)
end
@doc false
@doc offset: 1
def seq_a013955(idx) do
Math.sigma(idx, 7)
end
@doc """
OEIS Sequence `A013956` - sum of 8th powers of divisors of n, simga-8(n), `𝝈8(n)`.
From [OEIS A013956](https://oeis.org/A013956):
> sigma_8(n): sum of 8th powers of divisors of n.
**Sequence IDs**: `:a013956`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013956) |> Sequence.take!(10)
[1, 257, 6562, 65793, 390626, 1686434, 5764802, 16843009, 43053283, 100390882]
"""
@doc offset: 1,
sequence: "Sum of 8th powers of Divisors of N",
references: [{:oeis, :a013956, "https://oeis.org/A013956"}]
def create_sequence_a013956(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013956/1)
end
@doc false
@doc offset: 1
def seq_a013956(idx) do
Math.sigma(idx, 8)
end
@doc """
OEIS Sequence `A013957` - sum of 9th powers of divisors of n, simga-9(n), `𝝈9(n)`.
From [OEIS A013957](https://oeis.org/A013957):
> sigma_9(n): sum of 9th powers of divisors of n.
**Sequence IDs**: `:a013957`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013957) |> Sequence.take!(10)
[1, 513, 19684, 262657, 1953126, 10097892, 40353608, 134480385, 387440173, 1001953638]
"""
@doc offset: 1,
sequence: "Sum of 9th powers of Divisors of N",
references: [{:oeis, :a013957, "https://oeis.org/A013957"}]
def create_sequence_a013957(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013957/1)
end
@doc false
@doc offset: 1
def seq_a013957(idx) do
Math.sigma(idx, 9)
end
@doc """
OEIS Sequence `A013958` - sum of 10th powers of divisors of n, simga-10(n), `𝝈10(n)`.
From [OEIS A013958](https://oeis.org/A013958):
> sigma_10(n): sum of 10th powers of divisors of n.
**Sequence IDs**: `:a013958`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013958) |> Sequence.take!(10)
[1, 1025, 59050, 1049601, 9765626, 60526250, 282475250, 1074791425, 3486843451, 10009766650]
"""
@doc offset: 1,
sequence: "Sum of 10th powers of Divisors of N",
references: [{:oeis, :a013958, "https://oeis.org/A013958"}]
def create_sequence_a013958(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013958/1)
end
@doc false
@doc offset: 1
def seq_a013958(idx) do
Math.sigma(idx, 10)
end
@doc """
OEIS Sequence `A013959` - sum of 11th powers of divisors of n, simga-11(n), `𝝈11(n)`.
From [OEIS A013959](https://oeis.org/A013959):
> sigma_11(n): sum of 11th powers of divisors of n.
**Sequence IDs**: `:a013959`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013959) |> Sequence.take!(10)
[1, 2049, 177148, 4196353, 48828126, 362976252, 1977326744, 8594130945, 31381236757, 100048830174]
"""
@doc offset: 1,
sequence: "Sum of 11th powers of Divisors of N",
references: [{:oeis, :a013959, "https://oeis.org/A013959"}]
def create_sequence_a013959(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013959/1)
end
@doc false
@doc offset: 1
def seq_a013959(idx) do
Math.sigma(idx, 11)
end
@doc """
OEIS Sequence `A013960` - sum of 12th powers of divisors of n, simga-12(n), `𝝈12(n)`.
From [OEIS A013960](https://oeis.org/A013960):
> sigma_12(n): sum of 12th powers of divisors of n.
**Sequence IDs**: `:a013960`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013960) |> Sequence.take!(10)
[1, 4097, 531442, 16781313, 244140626, 2177317874, 13841287202, 68736258049, 282430067923, 1000244144722]
"""
@doc offset: 1,
sequence: "Sum of 12th powers of Divisors of N",
references: [{:oeis, :a013960, "https://oeis.org/A013960"}]
def create_sequence_a013960(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013960/1)
end
@doc false
@doc offset: 1
def seq_a013960(idx) do
Math.sigma(idx, 12)
end
@doc """
OEIS Sequence `A013961` - sum of 13th powers of divisors of n, simga-13(n), `𝝈13(n)`.
From [OEIS A013961](https://oeis.org/A013961):
> sigma_13(n): sum of 13th powers of divisors of n.
**Sequence IDs**: `:a013961`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013961) |> Sequence.take!(10)
[1, 8193, 1594324, 67117057, 1220703126, 13062296532, 96889010408, 549822930945, 2541867422653, 10001220711318]
"""
@doc offset: 1,
sequence: "Sum of 13th powers of Divisors of N",
references: [{:oeis, :a013961, "https://oeis.org/A013961"}]
def create_sequence_a013961(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013961/1)
end
@doc false
@doc offset: 1
def seq_a013961(idx) do
Math.sigma(idx, 13)
end
@doc """
OEIS Sequence `A013962` - sum of 14th powers of divisors of n, simga-14(n), `𝝈14(n)`.
From [OEIS A013962](https://oeis.org/A013962):
> sigma_14(n): sum of 14th powers of divisors of n.
**Sequence IDs**: `:a013962`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013962) |> Sequence.take!(10)
[1, 16385, 4782970, 268451841, 6103515626, 78368963450, 678223072850, 4398314962945, 22876797237931, 100006103532010]
"""
@doc offset: 1,
sequence: "Sum of 14th powers of Divisors of N",
references: [{:oeis, :a013962, "https://oeis.org/A013962"}]
def create_sequence_a013962(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013962/1)
end
@doc false
@doc offset: 1
def seq_a013962(idx) do
Math.sigma(idx, 14)
end
@doc """
OEIS Sequence `A013963` - sum of 15th powers of divisors of n, simga-15(n), `𝝈15(n)`.
From [OEIS A013963](https://oeis.org/A013963):
> sigma_15(n): sum of 15th powers of divisors of n.
**Sequence IDs**: `:a013963`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013963) |> Sequence.take!(10)
[1, 32769, 14348908, 1073774593, 30517578126, 470199366252, 4747561509944, 35185445863425, 205891146443557, 1000030517610894]
"""
@doc offset: 1,
sequence: "Sum of 15th powers of Divisors of N",
references: [{:oeis, :a013963, "https://oeis.org/A013963"}]
def create_sequence_a013963(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013963/1)
end
@doc false
@doc offset: 1
def seq_a013963(idx) do
Math.sigma(idx, 15)
end
@doc """
OEIS Sequence `A013964` - sum of 16th powers of divisors of n, simga-16(n), `𝝈16(n)`.
From [OEIS A013964](https://oeis.org/A013964):
> sigma_16(n): sum of 16th powers of divisors of n.
**Sequence IDs**: `:a013964`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013964) |> Sequence.take!(10)
[1, 65537, 43046722, 4295032833, 152587890626, 2821153019714, 33232930569602, 281479271743489, 1853020231898563, 10000152587956162]
"""
@doc offset: 1,
sequence: "Sum of 16th powers of Divisors of N",
references: [{:oeis, :a013964, "https://oeis.org/A013964"}]
def create_sequence_a013964(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013964/1)
end
@doc false
@doc offset: 1
def seq_a013964(idx) do
Math.sigma(idx, 16)
end
@doc """
OEIS Sequence `A013965` - sum of 17th powers of divisors of n, simga-17(n), `𝝈17(n)`.
From [OEIS A013965](https://oeis.org/A013965):
> sigma_17(n): sum of 17th powers of divisors of n.
**Sequence IDs**: `:a013965`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013965) |> Sequence.take!(10)
[1, 131073, 129140164, 17180000257, 762939453126, 16926788715972, 232630513987208, 2251816993685505, 16677181828806733, 100000762939584198]
"""
@doc offset: 1,
sequence: "Sum of 17th powers of Divisors of N",
references: [{:oeis, :a013965, "https://oeis.org/A013965"}]
def create_sequence_a013965(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013965/1)
end
@doc false
@doc offset: 1
def seq_a013965(idx) do
Math.sigma(idx, 17)
end
@doc """
OEIS Sequence `A013966` - sum of 18th powers of divisors of n, simga-18(n), `𝝈18(n)`.
From [OEIS A013966](https://oeis.org/A013966):
> sigma_18(n): sum of 18th powers of divisors of n.
**Sequence IDs**: `:a013966`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013966) |> Sequence.take!(10)
[1, 262145, 387420490, 68719738881, 3814697265626, 101560344351050, 1628413597910450, 18014467229220865, 150094635684419611, 1000003814697527770]
"""
@doc offset: 1,
sequence: "Sum of 18th powers of Divisors of N",
references: [{:oeis, :a013966, "https://oeis.org/A013966"}]
def create_sequence_a013966(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013966/1)
end
@doc false
@doc offset: 1
def seq_a013966(idx) do
Math.sigma(idx, 18)
end
@doc """
OEIS Sequence `A013967` - sum of 19th powers of divisors of n, simga-19(n), `𝝈19(n)`.
From [OEIS A013967](https://oeis.org/A013967):
> sigma_19(n): sum of 19th powers of divisors of n.
**Sequence IDs**: `:a013967`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013967) |> Sequence.take!(10)
[1, 524289, 1162261468, 274878431233, 19073486328126, 609360902796252, 11398895185373144, 144115462954287105, 1350851718835253557, 10000019073486852414]
"""
@doc offset: 1,
sequence: "Sum of 19th powers of Divisors of N",
references: [{:oeis, :a013967, "https://oeis.org/A013967"}]
def create_sequence_a013967(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013967/1)
end
@doc false
@doc offset: 1
def seq_a013967(idx) do
Math.sigma(idx, 19)
end
@doc """
OEIS Sequence `A013968` - sum of 20th powers of divisors of n, simga-20(n), `𝝈20(n)`.
From [OEIS A013968](https://oeis.org/A013968):
> sigma_20(n): sum of 20th powers of divisors of n.
**Sequence IDs**: `:a013968`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Sequence.OEIS.Sigma, :a013968) |> Sequence.take!(10)
[1, 1048577, 3486784402, 1099512676353, 95367431640626, 3656161927895954, 79792266297612002, 1152922604119523329, 12157665462543713203, 100000095367432689202]
"""
@doc offset: 1,
sequence: "Sum of 20th powers of Divisors of N",
references: [{:oeis, :a013968, "https://oeis.org/A013968"}]
def create_sequence_a013968(_opts) do
sequence_for_function(&Chunky.Sequence.OEIS.Sigma.seq_a013968/1)
end
@doc false
@doc offset: 1
def seq_a013968(idx) do
Math.sigma(idx, 20)
end
end
|
lib/sequence/oeis/sigma.ex
| 0.893979
| 0.801042
|
sigma.ex
|
starcoder
|
defmodule DBConnection.SojournError do
defexception [:message]
def exception(message), do: %DBConnection.SojournError{message: message}
end
defmodule DBConnection.Sojourn do
@moduledoc """
A `DBConnection.Pool` using sbroker.
### Options
* `:pool_size` - The number of connections (default: `10`)
* `:pool_overflow` - The number of extra connections that can be created if
required (default: `0`)
* `:broker` - The `:sbroker` callback module (see `:sbroker`,
default: `DBConnection.Sojourn.Broker`)
* `:broker_start_opt` - Start options for the broker (see
`:sbroker`, default: `[]`)
* `:regulator` - The `:sregulator` callback module (see `:sregulator`,
default: `DBConnection.Sojourn.Regulator`)
* `:regulator_start_opt` - Start options for the regulator (see
`:sregulator`, default; `[]`)
* `:max_restarts` - the maximum amount of connection restarts allowed in a
time frame (default `3`)
* `:max_seconds` - the time frame in which `:max_restarts` applies (default
`5`)
* `:shutdown` - the shutdown strategy for connections (default `5_000`)
All options are passed as the argument to the sbroker callback module. This
pool overrides `:idle` to always be `:passive` and may not honour
`:idle_timeout` if it tries to prevent the connection queue becoming too short
or spreads out pings evenly.
"""
@behaviour DBConnection.Pool
@broker DBConnection.Sojourn.Broker
@regulator DBConnection.Sojourn.Regulator
@timeout 15_000
import Supervisor.Spec
@doc false
def ensure_all_started(_opts, type) do
Application.ensure_all_started(:sbroker, type)
end
@doc false
def start_link(mod, opts) do
apply(:sbroker, :start_link, broker_args(mod, opts))
end
@doc false
def child_spec(mod, opts, child_opts \\ []) do
worker(:sbroker, broker_args(mod, opts), child_opts ++ [modules: :dynamic])
end
@doc false
def checkout(broker, opts) do
case ask(broker, opts) do
{:go, ref, {pid, mod, state}, _, _} ->
{:ok, {pid, ref}, mod, state}
:drop ->
message = "connection not available and queuing is disabled"
{:error, DBConnection.ConnectionError.exception(message)}
{:drop, wait} ->
wait = :erlang.convert_time_unit(wait, :native, :milli_seconds)
message = "connection not available " <>
"and request was dropped from queue after #{wait}ms"
{:error, DBConnection.ConnectionError.exception(message)}
end
end
@doc false
defdelegate checkin(ref, state, opts), to: DBConnection.Connection
@doc false
defdelegate disconnect(ref, err, state, opts), to: DBConnection.Connection
@doc false
defdelegate stop(ref, err, state, opts), to: DBConnection.Connection
## Helpers
defp broker_args(mod, opts) do
broker = Keyword.get(opts, :broker, @broker)
start_opts = Keyword.get(opts, :broker_start_opt, [])
args = [__MODULE__.Broker, {broker, mod, opts}, start_opts]
case Keyword.get(opts, :name) do
nil -> args
name when is_atom(name) -> [{:local, name} | args]
name -> [name | args]
end
end
defp ask(broker, opts) do
timeout = Keyword.get(opts, :timeout, @timeout)
info = {self(), timeout}
broker = via(broker, opts)
case Keyword.get(opts, :queue, true) do
true -> :sbroker.ask(broker, info)
false -> nb_ask(broker, info)
end
end
defp via(broker, opts) do
case Keyword.get(opts, :protector, true) do
true -> {:via, :sprotector, {broker, :ask}}
false -> broker
end
end
defp nb_ask(broker, info) do
case :sbroker.nb_ask(broker, info) do
{:go, _, _, _, _} = go -> go
{:drop, _} -> :drop
end
end
end
|
deps/db_connection/lib/db_connection/sojourn.ex
| 0.803868
| 0.436322
|
sojourn.ex
|
starcoder
|
defmodule Norm.Schema do
@moduledoc false
# Provides the definition for schemas
alias __MODULE__
defstruct specs: %{}, struct: nil
def build(%{__struct__: name} = struct) do
# If we're building a schema from a struct then we need to reject any keys with
# values that don't implement the conformable protocol. This allows users to specify
# struct types without needing to specify specs for each key
specs =
struct
|> Map.from_struct()
|> Enum.reject(fn {_, value} -> Norm.Conformer.Conformable.impl_for(value) == nil end)
|> Enum.into(%{})
%Schema{specs: specs, struct: name}
end
def build(map) when is_map(map) do
%Schema{specs: map}
end
def spec(schema, key) do
schema.specs
|> Enum.filter(fn {name, _} -> name == key end)
|> Enum.map(fn {_, spec} -> spec end)
|> Enum.at(0)
end
defimpl Norm.Conformer.Conformable do
alias Norm.Conformer
alias Norm.Conformer.Conformable
def conform(_, input, path) when not is_map(input) do
{:error, [Conformer.error(path, input, "not a map")]}
end
# Conforming a struct
def conform(%{specs: specs, struct: target}, input, path) when not is_nil(target) do
# Ensure we're mapping the correct struct
cond do
Map.get(input, :__struct__) != target ->
short_name =
target
|> Atom.to_string()
|> String.replace("Elixir.", "")
{:error, [Conformer.error(path, input, "#{short_name}")]}
true ->
with {:ok, conformed} <- check_specs(specs, Map.from_struct(input), path) do
{:ok, struct(target, conformed)}
end
end
end
# conforming a map.
def conform(%Norm.Schema{specs: specs}, input, path) do
if Map.get(input, :__struct__) != nil do
with {:ok, conformed} <- check_specs(specs, Map.from_struct(input), path) do
{:ok, struct(input.__struct__, conformed)}
end
else
check_specs(specs, input, path)
end
end
defp check_specs(specs, input, path) do
results =
input
|> Enum.map(&check_spec(&1, specs, path))
|> Enum.reduce(%{ok: [], error: []}, fn {key, {result, conformed}}, acc ->
Map.put(acc, result, acc[result] ++ [{key, conformed}])
end)
errors =
results.error
|> Enum.flat_map(fn {_, error} -> error end)
if Enum.any?(errors) do
{:error, errors}
else
{:ok, Enum.into(results.ok, %{})}
end
end
defp check_spec({key, value}, specs, path) do
case Map.get(specs, key) do
nil ->
{key, {:ok, value}}
spec ->
{key, Conformable.conform(spec, value, path ++ [key])}
end
end
end
if Code.ensure_loaded?(StreamData) do
defimpl Norm.Generatable do
alias Norm.Generatable
def gen(%{struct: target, specs: specs}) do
case Enum.reduce(specs, %{}, &to_gen/2) do
{:error, error} ->
{:error, error}
generator ->
to_streamdata(generator, target)
end
end
defp to_streamdata(generator, nil) do
{:ok, StreamData.fixed_map(generator)}
end
defp to_streamdata(generator, target) do
sd =
generator
|> StreamData.fixed_map()
|> StreamData.bind(fn map -> StreamData.constant(struct(target, map)) end)
{:ok, sd}
end
def to_gen(_, {:error, error}), do: {:error, error}
def to_gen({key, spec}, generator) do
case Generatable.gen(spec) do
{:ok, g} ->
Map.put(generator, key, g)
{:error, error} ->
{:error, error}
end
end
end
end
defimpl Inspect do
import Inspect.Algebra
def inspect(schema, opts) do
map = if schema.struct do
struct(schema.struct, schema.specs)
else
schema.specs
end
concat(["#Norm.Schema<", to_doc(map, opts), ">"])
end
end
end
|
lib/norm/schema.ex
| 0.78316
| 0.489259
|
schema.ex
|
starcoder
|
defmodule JUnitFormatter do
@moduledoc """
An `ExUnit.Formatter` implementation that generates a XML in the format understood by JUnit.
To accomplish this, there are some mappings that are not straight one to one.
Therefore, here goes the mapping:
- JUnit - `ExUnit`
- Testsuites - :testsuite
- Testsuite - `ExUnit.Case`
- failures = failures
- skipped = skip
- errors = invalid
- time = (sum of all times in seconds rounded down)
- Testcase - `ExUnit.Test`
- name = :case
- test = :test
- content (only if not successful)
- skipped = {:state, {:skip, _}}
- failed = {:state, {:failed, {_, reason, stacktrace}}}
- reason = reason.message
- content = `Exception.format_stacktrace/1`
- error = {:invalid, module}
The report is written to a file in the `_build` directory.
"""
require Record
use GenServer
defmodule Stats do
@moduledoc """
A struct to keep track of test values and tests themselves.
It is used to build the testsuite JUnit node.
"""
defstruct errors: 0,
failures: 0,
skipped: 0,
tests: 0,
time: 0,
test_cases: []
@type t :: %__MODULE__{
errors: non_neg_integer,
failures: non_neg_integer,
skipped: non_neg_integer,
tests: non_neg_integer,
time: non_neg_integer,
test_cases: [ExUnit.Test.t()]
}
end
defstruct cases: %{}, properties: %{}
@impl true
def init(opts) do
{:ok,
%__MODULE__{
properties: %{
seed: opts[:seed],
date: DateTime.to_iso8601(DateTime.utc_now())
}
}}
end
@impl true
def handle_cast({:suite_finished, _run_us, _load_us}, config) do
# do the real magic
suites = Enum.map(config.cases, &generate_testsuite_xml(&1, config.properties))
# wrap result in a root node (not adding any attribute to root)
result = :xmerl.export_simple([{:testsuites, [], suites}], :xmerl_xml)
# save the report in an XML file
file_name = get_report_file_path()
:ok = File.write!(file_name, result, [:write])
if Application.get_env(:junit_formatter, :print_report_file, false) do
IO.puts(:stderr, "Wrote JUnit report to: #{file_name}")
end
{:noreply, config}
end
def handle_cast({:test_finished, %ExUnit.Test{state: nil} = test}, config) do
config = adjust_case_stats(test, nil, config)
{:noreply, config}
end
def handle_cast({:test_finished, %ExUnit.Test{state: {:skip, _}} = test}, config) do
config = adjust_case_stats(test, :skipped, config)
{:noreply, config}
end
def handle_cast({:test_finished, %ExUnit.Test{state: {:excluded, _}} = test}, config) do
config = adjust_case_stats(test, :skipped, config)
{:noreply, config}
end
def handle_cast({:test_finished, %ExUnit.Test{state: {:failed, _failed}} = test}, config) do
config = adjust_case_stats(test, :failures, config)
{:noreply, config}
end
def handle_cast({:test_finished, %ExUnit.Test{state: {:invalid, _module}} = test}, config) do
config = adjust_case_stats(test, :errors, config)
{:noreply, config}
end
def handle_cast(_event, config), do: {:noreply, config}
@doc "Formats time from nanos to seconds"
@spec format_time(integer) :: binary
def format_time(time), do: '~.4f' |> :io_lib.format([time / 1_000_000]) |> List.to_string()
@doc """
Helper function to get the full path of the generated report file.
It can be passed 2 configurations
- report_dir: full path of a directory (defaults to `Mix.Project.app_path()`)
- report_file: name of the generated file (defaults to "test-junit-report.xml")
"""
@spec get_report_file_path() :: String.t()
def get_report_file_path do
prepend = Application.get_env(:junit_formatter, :prepend_project_name?, false)
report_file = Application.get_env(:junit_formatter, :report_file, "test-junit-report.xml")
report_dir = Application.get_env(:junit_formatter, :report_dir, Mix.Project.app_path())
prefix = if prepend, do: "#{Mix.Project.config()[:app]}-", else: ""
Path.join(report_dir, prefix <> report_file)
end
# PRIVATE ------------
defp adjust_case_stats(%ExUnit.Test{case: name, time: time} = test, type, state) do
cases =
Map.update(
state.cases,
name,
struct(Stats, [{type, 1}, test_cases: [test], time: time, tests: 1]),
fn stats ->
stats =
struct(
stats,
test_cases: [test | stats.test_cases],
time: stats.time + time,
tests: stats.tests + 1
)
if type, do: Map.update!(stats, type, &(&1 + 1)), else: stats
end
)
%{state | cases: cases}
end
defp generate_testsuite_xml({name, %Stats{} = stats}, properties) do
properties =
for {name, value} <- properties do
{:property, [name: name, value: value], []}
end
cases =
for {test, idx} <- Enum.with_index(stats.test_cases, 1) do
generate_testcases(test, idx)
end
{
:testsuite,
[
errors: stats.errors,
failures: stats.failures,
name: name,
tests: stats.tests,
time: format_time(stats.time)
],
[{:properties, [], properties} | cases]
}
end
defp generate_testcases(test, idx) do
attrs = [
classname: Atom.to_string(test.case),
name: Atom.to_string(test.name),
time: format_time(test.time)
]
attrs = maybe_add_filename(attrs, test.tags.file, test.tags.line)
{
:testcase,
attrs,
generate_test_body(test, idx)
}
end
defp generate_test_body(%ExUnit.Test{state: nil}, _idx), do: []
defp generate_test_body(%ExUnit.Test{state: {atom, message}}, _idx)
when atom in ~w[skip excluded]a do
[{:skipped, [message: message], []}]
end
defp generate_test_body(%ExUnit.Test{state: {:failed, failures}} = test, idx) do
body =
test
|> ExUnit.Formatter.format_test_failure(failures, idx, :infinity, fn _, msg -> msg end)
|> :erlang.binary_to_list()
[{:failure, [message: message(failures)], [body]}]
end
defp generate_test_body(%ExUnit.Test{state: {:invalid, %name{} = module}}, _idx),
do: [{:error, [message: "Invalid module #{name}"], ['#{inspect(module)}']}]
defp message([msg | _]), do: message(msg)
defp message({_, %ExUnit.AssertionError{message: reason}, _}), do: reason
defp message({:error, reason, _}), do: "error: #{Exception.message(reason)}"
defp message({type, reason, _}) when is_atom(type), do: "#{type}: #{inspect(reason)}"
defp message({type, reason, _}), do: "#{inspect(type)}: #{inspect(reason)}"
defp maybe_add_filename(attrs, path, line) do
if Application.get_env(:junit_formatter, :include_filename?) do
path = Path.relative_to_cwd(path)
file =
if Application.get_env(:junit_formatter, :include_file_line?) do
"#{path}:#{line}"
else
path
end
Keyword.put(attrs, :file, file)
else
attrs
end
end
end
|
lib/formatter.ex
| 0.863449
| 0.592195
|
formatter.ex
|
starcoder
|
defmodule Xgit.FilePath do
@moduledoc ~S"""
Describes a file path as stored in a git repo.
Paths are always stored as a list of bytes. The git specification
does not explicitly specify an encoding, but most commonly the
path is interpreted as UTF-8.
We use byte lists here to avoid confusion and possible misintepretation
in Elixir's `String` type for non-UTF-8 paths.
Paths are alternately referred to in git as "file name," "path,"
"path name," and "object name." We're using the name `FilePath`
to avoid collision with Elixir's built-in `Path` module and to make
it clear that we're talking about the path to where a file is stored
on disk.
"""
use Bitwise
use Xgit.FileMode
import Xgit.Util.ForceCoverage
alias Xgit.Util.Comparison
@typedoc """
Representation of a file's path within a git repo.
Typically, though not necessarily, interpreted as UTF-8.
"""
@type t :: [byte]
@doc ~S"""
Return `true` if the value is a valid file path.
This performs the same checks as `check_path/2`, but folds away all of the potential
error values to `false`.
## Parameters
`path` is a UTF-8 byte list containing the path to be tested.
## Options
* `windows?`: `true` to additionally verify that the path is permissible on Windows file systems
* `macosx?`: `true` to additionally verify that the path is permissible on Mac OS X file systems
"""
@spec valid?(path :: any, windows?: boolean, macosx?: boolean) :: boolean
def valid?(path, opts \\ [])
def valid?(path, opts) when is_list(path) and is_list(opts), do: check_path(path, opts) == :ok
def valid?(_path, _opts), do: cover(false)
@typedoc ~S"""
Error codes which can be returned by `check_path/2`.
"""
@type check_path_reason ::
:invalid_name | :empty_path | :absolute_path | :duplicate_slash | :trailing_slash
@typedoc ~S"""
Error codes which can be returned by `check_path_segment/2`.
"""
@type check_path_segment_reason ::
:invalid_name
| :empty_name
| :reserved_name
| :invalid_utf8_sequence
| :invalid_name_on_windows
| :windows_device_name
@doc ~S"""
Check the provided path to see if it is a valid path within a git repository.
The rules enforced here are slightly different from what is allowed in a `tree`
object in that we allow `/` characters to build hierarchical paths.
## Parameters
`path` is a UTF-8 byte list containing the path to be tested.
## Options
* `windows?`: `true` to additionally verify that the path is permissible on Windows file systems
* `macosx?`: `true` to additionally verify that the path is permissible on Mac OS X file systems
## Return Values
* `:ok` if the name is permissible given the constraints chosen above
* `{:error, :invalid_name}` if the name is not permissible
* `{:error, :empty_path}` if the name is empty
* `{:error, :absolute_path}` if the name starts with a `/`
* `{:error, :duplicate_slash}` if the name contains two `/` characters in a row
* `{:error, :trailing_slash}` if the name contains a trailing `/`
See also: error return values from `check_path_segment/2`.
"""
@spec check_path(path :: t, windows?: boolean, macosx?: boolean) ::
:ok | {:error, check_path_reason} | {:error, check_path_segment_reason}
def check_path(path, opts \\ [])
def check_path([], opts) when is_list(opts), do: cover({:error, :empty_path})
def check_path([?/ | _], opts) when is_list(opts), do: cover({:error, :absolute_path})
def check_path(path, opts) when is_list(path) and is_list(opts) do
{first_segment, remaining_path} = Enum.split_while(path, &(&1 != ?/))
case check_path_segment(first_segment, opts) do
:ok -> check_remaining_path(remaining_path, opts)
{:error, reason} -> cover {:error, reason}
end
end
defp check_remaining_path([], _opts), do: cover(:ok)
defp check_remaining_path([?/], _opts),
do: cover({:error, :trailing_slash})
defp check_remaining_path([?/, ?/ | _remainder], _opts),
do: cover({:error, :duplicate_slash})
defp check_remaining_path([?/ | remainder], opts), do: check_path(remainder, opts)
@doc ~S"""
Check the provided path segment to see if it is a valid path within a git `tree`
object.
## Parameters
`path` is a UTF-8 byte list containing the path segment to be tested.
## Options
* `windows?`: `true` to additionally verify that the path is permissible on Windows file systems
* `macosx?`: `true` to additionally verify that the path is permissible on Mac OS X file systems
## Return Values
* `:ok` if the name is permissible given the constraints chosen above
* `{:error, :invalid_name}` if the name is not permissible
* `{:error, :empty_name}` if the name is empty
* `{:error, :reserved_name}` if the name is reserved for git's use (i.e. `.git`)
* `{:error, :invalid_utf8_sequence}` if the name contains certain incomplete UTF-8 byte sequences
(only when `macosx?: true` is selected)
* `{:error, :invalid_name_on_windows}` if the name contains characters that are
not allowed on Windows file systems (only when `windows?: true` is selected)
* `{:error, :windows_device_name}` if the name matches a Windows device name (`aux`, etc.)
(only when `windows?: true` is selected)
"""
@spec check_path_segment(path :: t, windows?: boolean, macosx?: boolean) ::
:ok | {:error, check_path_segment_reason}
def check_path_segment(path, opts \\ [])
def check_path_segment([], opts) when is_list(opts), do: cover({:error, :empty_name})
def check_path_segment(path_segment, opts) when is_list(path_segment) and is_list(opts) do
windows? = Keyword.get(opts, :windows?, false)
macosx? = Keyword.get(opts, :macosx?, false)
with :ok <- refute_has_nil_bytes(path_segment),
:ok <- refute_has_slash(path_segment),
:ok <- check_windows_git_name(path_segment),
:ok <- check_windows_characters(path_segment, windows?),
:ok <- check_git_special_name(path_segment),
:ok <- check_git_path_with_mac_ignorables(path_segment, macosx?),
:ok <- check_truncated_utf8_for_mac(path_segment, macosx?),
:ok <- check_illegal_windows_name_ending(path_segment, windows?),
:ok <- check_windows_device_name(path_segment, windows?) do
cover :ok
else
{:error, reason} -> cover {:error, reason}
end
end
defp refute_has_nil_bytes(path_segment) do
if Enum.any?(path_segment, &(&1 == 0)) do
cover {:error, :invalid_name}
else
cover :ok
end
end
defp refute_has_slash(path_segment) do
if Enum.any?(path_segment, &(&1 == ?/)) do
cover {:error, :invalid_name}
else
cover :ok
end
end
defp check_windows_git_name(path_segment) do
with 5 <- Enum.count(path_segment),
'git~1' <- Enum.map(path_segment, &to_lower/1) do
cover {:error, :invalid_name}
else
_ -> cover :ok
end
end
defp check_windows_characters(_path_segment, false = _windows?), do: cover(:ok)
defp check_windows_characters(path_segment, true = _windows?) do
case Enum.find(path_segment, &invalid_on_windows?/1) do
nil -> cover :ok
_ -> cover {:error, :invalid_name_on_windows}
end
end
defp invalid_on_windows?(?"), do: cover(true)
defp invalid_on_windows?(?*), do: cover(true)
defp invalid_on_windows?(?:), do: cover(true)
defp invalid_on_windows?(?<), do: cover(true)
defp invalid_on_windows?(?>), do: cover(true)
defp invalid_on_windows?(??), do: cover(true)
defp invalid_on_windows?(?\\), do: cover(true)
defp invalid_on_windows?(?|), do: cover(true)
defp invalid_on_windows?(c) when c >= 1 and c <= 31, do: cover(true)
defp invalid_on_windows?(_), do: cover(false)
defp check_git_special_name('.'), do: cover({:error, :reserved_name})
defp check_git_special_name('..'), do: cover({:error, :reserved_name})
defp check_git_special_name('.git'), do: cover({:error, :reserved_name})
defp check_git_special_name([?. | rem] = _name) do
if normalized_git?(rem) do
cover {:error, :reserved_name}
else
cover :ok
end
end
defp check_git_special_name(_), do: cover(:ok)
defp normalized_git?(name) do
if git_name_prefix?(name) do
name
|> Enum.drop(3)
|> valid_git_suffix?()
else
cover false
end
end
# The simpler approach would be to convert this to a string and use
# String.downcase/1 on it. But that would create a lot of garbage to collect.
# This approach is a bit more cumbersome, but more efficient.
defp git_name_prefix?([?g | it]), do: it_name_prefix?(it)
defp git_name_prefix?([?G | it]), do: it_name_prefix?(it)
defp git_name_prefix?(_), do: cover(false)
defp it_name_prefix?([?i | it]), do: t_name_prefix?(it)
defp it_name_prefix?([?I | it]), do: t_name_prefix?(it)
defp it_name_prefix?(_), do: cover(false)
defp t_name_prefix?([?t | _]), do: cover(true)
defp t_name_prefix?([?T | _]), do: cover(true)
defp t_name_prefix?(_), do: cover(false)
defp valid_git_suffix?([]), do: cover(true)
defp valid_git_suffix?(' '), do: cover(true)
defp valid_git_suffix?('.'), do: cover(true)
defp valid_git_suffix?('. '), do: cover(true)
defp valid_git_suffix?(' .'), do: cover(true)
defp valid_git_suffix?(' . '), do: cover(true)
defp valid_git_suffix?(_), do: cover(false)
defp check_git_path_with_mac_ignorables(_path_segment, false = _macosx?), do: cover(:ok)
defp check_git_path_with_mac_ignorables(path_segment, true = _macosx?) do
if match_mac_hfs_path?(path_segment, '.git') do
cover {:error, :reserved_name}
else
cover :ok
end
end
defp check_truncated_utf8_for_mac(_path_segment, false = _macosx?), do: cover(:ok)
defp check_truncated_utf8_for_mac(path_segment, true = _macosx?) do
tail3 = Enum.slice(path_segment, -2, 2)
if Enum.any?(tail3, &(&1 == 0xE2 or &1 == 0xEF)) do
cover {:error, :invalid_utf8_sequence}
else
cover :ok
end
end
defp check_illegal_windows_name_ending(_path_segment, false = _windows?), do: cover(:ok)
defp check_illegal_windows_name_ending(path_segment, true = _windows?) do
last_char = List.last(path_segment)
if last_char == ?\s || last_char == ?. do
cover {:error, :invalid_name_on_windows}
else
cover :ok
end
end
defp check_windows_device_name(_path_segment, false = _windows?), do: cover(:ok)
defp check_windows_device_name(path_segment, true = _windows?) do
lc_name =
path_segment
|> Enum.map(&to_lower/1)
|> Enum.take_while(&(&1 != ?.))
if windows_device_name?(lc_name) do
cover {:error, :windows_device_name}
else
cover :ok
end
end
defp windows_device_name?('aux'), do: cover(true)
defp windows_device_name?('con'), do: cover(true)
defp windows_device_name?('com' ++ [d]), do: positive_digit?(d)
defp windows_device_name?('lpt' ++ [d]), do: positive_digit?(d)
defp windows_device_name?('nul'), do: cover(true)
defp windows_device_name?('prn'), do: cover(true)
defp windows_device_name?(_), do: cover(false)
defp positive_digit?(b) when b >= ?1 and b <= ?9, do: cover(true)
defp positive_digit?(_), do: cover(false)
@doc ~S"""
Return `true` if the filename _could_ be read as a `.gitmodules` file when
checked out to the working directory.
This would seem like a simple comparison, but some filesystems have peculiar
rules for normalizing filenames:
NTFS has backward-compatibility support for 8.3 synonyms of long file
names. (See
https://web.archive.org/web/20160318181041/https://usn.pw/blog/gen/2015/06/09/filenames/
for details.) NTFS is also case-insensitive.
MacOS's HFS+ folds away ignorable Unicode characters in addition to case
folding.
## Parameters
`path` is a UTF-8 byte list containing the path to be tested.
## Options
By default, this function will only check for the plain `.gitmodules` name.
* `windows?`: `true` to additionally check for any path that might be treated
as a `.gitmodules` file on Windows file systems
* `macosx?`: `true` to additionally check for any path that might be treated
as a `.gitmodules` file on Mac OS X file systems
"""
@spec gitmodules?(path :: t, windows?: boolean, macosx?: boolean) :: boolean
def gitmodules?(path, opts \\ [])
def gitmodules?('.gitmodules', opts) when is_list(opts), do: cover(true)
def gitmodules?(path, opts) when is_list(opts) do
(Keyword.get(opts, :windows?, false) and ntfs_gitmodules?(path)) or
(Keyword.get(opts, :macosx?, false) and mac_hfs_gitmodules?(path))
end
defp ntfs_gitmodules?(name) do
case Enum.count(name) do
8 -> ntfs_shortened_gitmodules?(Enum.map(name, &to_lower(&1)))
11 -> Enum.map(name, &to_lower(&1)) == '.gitmodules'
_ -> cover false
end
end
defp ntfs_shortened_gitmodules?('gitmod~' ++ rem), do: ntfs_numeric_suffix?(rem)
defp ntfs_shortened_gitmodules?('gi7eba~' ++ rem), do: ntfs_numeric_suffix?(rem)
defp ntfs_shortened_gitmodules?('gi7eb~' ++ rem), do: ntfs_numeric_suffix?(rem)
defp ntfs_shortened_gitmodules?('gi7e~' ++ rem), do: ntfs_numeric_suffix?(rem)
defp ntfs_shortened_gitmodules?('gi7~' ++ rem), do: ntfs_numeric_suffix?(rem)
defp ntfs_shortened_gitmodules?('gi~' ++ rem), do: ntfs_numeric_suffix?(rem)
defp ntfs_shortened_gitmodules?('g~' ++ rem), do: ntfs_numeric_suffix?(rem)
defp ntfs_shortened_gitmodules?('~' ++ rem), do: ntfs_numeric_suffix?(rem)
defp ntfs_shortened_gitmodules?(_), do: cover(false)
# The first digit of the numeric suffix must not be zero.
defp ntfs_numeric_suffix?([?0 | _rem]), do: cover(false)
defp ntfs_numeric_suffix?(rem), do: ntfs_numeric_suffix_zero_ok?(rem)
defp ntfs_numeric_suffix_zero_ok?([c | rem]) when c >= ?0 and c <= ?9,
do: ntfs_numeric_suffix_zero_ok?(rem)
defp ntfs_numeric_suffix_zero_ok?([]), do: cover(true)
defp ntfs_numeric_suffix_zero_ok?(_), do: cover(false)
defp mac_hfs_gitmodules?(path), do: match_mac_hfs_path?(path, '.gitmodules')
# http://www.utf8-chartable.de/unicode-utf8-table.pl?start=8192
defp match_mac_hfs_path?(data, match, ignorable? \\ false)
# U+200C 0xe2808c ZERO WIDTH NON-JOINER
defp match_mac_hfs_path?([0xE2, 0x80, 0x8C | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
# U+200D 0xe2808d ZERO WIDTH JOINER
defp match_mac_hfs_path?([0xE2, 0x80, 0x8D | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
# U+200E 0xe2808e LEFT-TO-RIGHT MARK
defp match_mac_hfs_path?([0xE2, 0x80, 0x8E | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
# U+200F 0xe2808f RIGHT-TO-LEFT MARK
defp match_mac_hfs_path?([0xE2, 0x80, 0x8F | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
# U+202A 0xe280aa LEFT-TO-RIGHT EMBEDDING
defp match_mac_hfs_path?([0xE2, 0x80, 0xAA | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
# U+202B 0xe280ab RIGHT-TO-LEFT EMBEDDING
defp match_mac_hfs_path?([0xE2, 0x80, 0xAB | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
# U+202C 0xe280ac POP DIRECTIONAL FORMATTING
defp match_mac_hfs_path?([0xE2, 0x80, 0xAC | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
# U+202D 0xe280ad LEFT-TO-RIGHT OVERRIDE
defp match_mac_hfs_path?([0xE2, 0x80, 0xAD | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
# U+202E 0xe280ae RIGHT-TO-LEFT OVERRIDE
defp match_mac_hfs_path?([0xE2, 0x80, 0xAE | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
defp match_mac_hfs_path?([0xE2, 0x80, _ | _], _match, _ignorable?), do: cover(false)
# U+206A 0xe281aa INHIBIT SYMMETRIC SWAPPING
defp match_mac_hfs_path?([0xE2, 0x81, 0xAA | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
# U+206B 0xe281ab ACTIVATE SYMMETRIC SWAPPING
defp match_mac_hfs_path?([0xE2, 0x81, 0xAB | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
# U+206C 0xe281ac INHIBIT ARABIC FORM SHAPING
defp match_mac_hfs_path?([0xE2, 0x81, 0xAC | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
# U+206D 0xe281ad ACTIVATE ARABIC FORM SHAPING
defp match_mac_hfs_path?([0xE2, 0x81, 0xAD | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
# U+206E 0xe281ae NATIONAL DIGIT SHAPES
defp match_mac_hfs_path?([0xE2, 0x81, 0xAE | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
# U+206F 0xe281af NOMINAL DIGIT SHAPES
defp match_mac_hfs_path?([0xE2, 0x81, 0xAF | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
defp match_mac_hfs_path?([0xE2, 0x81, _ | _], _match, _ignorable?), do: cover(false)
defp match_mac_hfs_path?([0xEF, 0xBB, 0xBF | data], match, _ignorable?),
do: match_mac_hfs_path?(data, match, true)
defp match_mac_hfs_path?([0xEF, _, _ | _], _match, _ignorable?), do: cover(false)
defp match_mac_hfs_path?([c | _] = _list, _match, _ignorable?)
when c == 0xE2 or c == 0xEF,
do: cover(false)
defp match_mac_hfs_path?([c | data], [m | match], ignorable?) do
if to_lower(c) == m do
match_mac_hfs_path?(data, match, ignorable?)
else
cover(false)
end
end
defp match_mac_hfs_path?([], [], _ignorable?), do: cover(true)
# defp match_mac_hfs_path?([], [], ignorable?), do: ignorable?
# TO DO: Understand what jgit was trying to accomplish with ignorable.
# https://github.com/elixir-git/xgit/issues/34
defp match_mac_hfs_path?(_data, _match, _ignorable?), do: cover(false)
defp to_lower(b) when b >= ?A and b <= ?Z, do: cover(b + 32)
defp to_lower(b), do: cover(b)
@doc ~S"""
Returns `true` if `path` starts with `prefix`.
Unlike `String.starts_with?/2`, only accepts a single prefix path.
"""
@spec starts_with?(path :: t, prefix :: t) :: boolean
def starts_with?(path, prefix)
def starts_with?(_path, []), do: cover(true)
def starts_with?([c | path], [c | prefix]), do: starts_with?(path, prefix)
def starts_with?(_path, _prefix), do: cover(false)
@doc ~S"""
Ensure that a trailing `/` is present.
**Exception:** If the path is empty, it will be returned as-is.
"""
@spec ensure_trailing_separator(path :: t) :: t
def ensure_trailing_separator([]), do: cover([])
def ensure_trailing_separator(path) when is_list(path) do
# We strip trailing `/` because there might be more than one.
strip_trailing_separator(path) ++ '/'
end
@doc ~S"""
Remove trailing `/` if present.
"""
@spec strip_trailing_separator(path :: t) :: t
def strip_trailing_separator([]), do: cover([])
def strip_trailing_separator(path) when is_list(path) do
if List.last(path) == ?/ do
path
|> Enum.reverse()
|> Enum.drop_while(&(&1 == ?/))
|> Enum.reverse()
else
cover path
end
end
@doc ~S"""
Compare two paths according to git path sort ordering rules.
## Return Value
* `:lt` if `path1` sorts before `path2`.
* `:eq` if they are the same.
* `:gt` if `path1` sorts after `path2`.
"""
@spec compare(
path1 :: t,
mode1 :: FileMode.t(),
path2 :: t,
mode2 :: FileMode.t()
) :: Comparison.result()
def compare(path1, mode1, path2, mode2)
when is_list(path1) and is_file_mode(mode1) and is_list(path2) and is_file_mode(mode2) do
case core_compare(path1, mode1, path2, mode2) do
:eq -> mode_compare(mode1, mode2)
x -> cover x
end
end
@doc ~S"""
Compare two paths, checking for identical name.
Unlike `compare/4`, this method returns `:eq` when the paths have
the same characters in their names, even if the mode differs. It is
intended for use in validation routines detecting duplicate entries.
## Parameters
`mode2` is the mode of the second file. Trees are sorted as though
`List.last(path2) == ?/`, even if no such character exists.
## Return Value
Returns `:eq` if the names are identical and a conflict exists
between `path1` and `path2`, as they share the same name.
Returns `:lt` if all possible occurrences of `path1` sort
before `path2` and no conflict can happen. In a properly sorted
tree there are no other occurrences of `path1` and therefore there
are no duplicate names.
Returns `:gt` when it is possible for a duplicate occurrence of
`path1` to appear later, after `path2`. Callers should
continue to examine candidates for `path2` until the method returns
one of the other return values.
"""
@spec compare_same_name(path1 :: t, path2 :: t, mode2 :: FileMode.t()) ::
Comparison.result()
def compare_same_name(path1, path2, mode2),
do: core_compare(path1, FileMode.tree(), path2, mode2)
defp core_compare(path1, mode1, path2, mode2)
defp core_compare([c | rem1], mode1, [c | rem2], mode2),
do: core_compare(rem1, mode1, rem2, mode2)
defp core_compare([c1 | _rem1], _mode1, [c2 | _rem2], _mode2),
do: compare_chars(c1, c2)
defp core_compare([c1 | _rem1], _mode1, [], mode2),
do: compare_chars(band(c1, 0xFF), last_path_char(mode2))
defp core_compare([], mode1, [c2 | _], _mode2),
do: compare_chars(last_path_char(mode1), band(c2, 0xFF))
defp core_compare([], _mode1, [], _mode2), do: cover(:eq)
defp compare_chars(c, c), do: cover(:eq)
defp compare_chars(c1, c2) when c1 < c2, do: cover(:lt)
defp compare_chars(_, _), do: cover(:gt)
defp last_path_char(mode) do
if FileMode.tree?(mode) do
cover ?/
else
cover 0
end
end
defp mode_compare(mode1, mode2) do
if FileMode.gitlink?(mode1) or FileMode.gitlink?(mode2) do
cover :eq
else
compare_chars(last_path_char(mode1), last_path_char(mode2))
end
end
end
|
lib/xgit/file_path.ex
| 0.909581
| 0.581184
|
file_path.ex
|
starcoder
|
defmodule Mix.Tasks.Phx.Gen.Live.Slime do
@shortdoc "Generates LiveView, templates, and context for a resource"
@moduledoc """
Generates LiveView, templates, and context for a resource.
mix phx.gen.live Accounts User users name:string age:integer
The first argument is the context module followed by the schema module
and its plural name (used as the schema table name).
The context is an Elixir module that serves as an API boundary for
the given resource. A context often holds many related resources.
Therefore, if the context already exists, it will be augmented with
functions for the given resource.
When this command is run for the first time, a `ModalComponent` and
`LiveHelpers` module will be created, along with the resource level
LiveViews and components, including an `IndexLive`, `ShowLive`, `FormComponent`
for the new resource.
> Note: A resource may also be split
> over distinct contexts (such as `Accounts.User` and `Payments.User`).
The schema is responsible for mapping the database fields into an
Elixir struct. It is followed by an optional list of attributes,
with their respective names and types. See `mix phx.gen.schema`
for more information on attributes.
Overall, this generator will add the following files to `lib/`:
* a context module in `lib/app/accounts.ex` for the accounts API
* a schema in `lib/app/accounts/user.ex`, with an `users` table
* a view in `lib/app_web/views/user_view.ex`
* a LiveView in `lib/app_web/live/user_live/show_live.ex`
* a LiveView in `lib/app_web/live/user_live/index_live.ex`
* a LiveComponent in `lib/app_web/live/user_live/form_component.ex`
* a LiveComponent in `lib/app_web/live/modal_component.ex`
* a helpers modules in `lib/app_web/live/live_helpers.ex`
## The context app
A migration file for the repository and test files for the context and
controller features will also be generated.
The location of the web files (LiveView's, views, templates, etc) in an
umbrella application will vary based on the `:context_app` config located
in your applications `:generators` configuration. When set, the Phoenix
generators will generate web files directly in your lib and test folders
since the application is assumed to be isolated to web specific functionality.
If `:context_app` is not set, the generators will place web related lib
and test files in a `web/` directory since the application is assumed
to be handling both web and domain specific functionality.
Example configuration:
config :my_app_web, :generators, context_app: :my_app
Alternatively, the `--context-app` option may be supplied to the generator:
mix phx.gen.live Sales User users --context-app warehouse
## Web namespace
By default, the controller and view will be namespaced by the schema name.
You can customize the web module namespace by passing the `--web` flag with a
module name, for example:
mix phx.gen.live Sales User users --web Sales
Which would generate a LiveViews inside `lib/app_web/live/sales/user_live/` and a
view at `lib/app_web/views/sales/user_view.ex`.
## Customising the context, schema, tables and migrations
In some cases, you may wish to bootstrap HTML templates, LiveViews,
and tests, but leave internal implementation of the context or schema
to yourself. You can use the `--no-context` and `--no-schema` flags
for file generation control.
You can also change the table name or configure the migrations to
use binary ids for primary keys, see `mix phx.gen.schema` for more
information.
"""
use Mix.Task
alias Mix.Phoenix.{Context,Schema}
alias Mix.Tasks.Phx.Gen
import Mix.Tasks.Phx.Gen.Live, only: [print_shell_instructions: 1]
@doc false
def run(args) do
if Mix.Project.umbrella?() do
Mix.raise "mix phx.gen.live must be invoked from within your *_web application root directory"
end
{context, schema} = Gen.Context.build(args)
Gen.Context.prompt_for_code_injection(context)
binding = [context: context, schema: schema, inputs: inputs(schema)]
paths = [".", :phoenix_slime, :phoenix]
prompt_for_conflicts(context)
context
|> copy_new_files(binding, paths)
|> maybe_inject_helpers()
|> print_shell_instructions()
end
defp prompt_for_conflicts(context) do
context
|> files_to_be_generated()
|> Kernel.++(context_files(context))
|> Mix.Phoenix.prompt_for_conflicts()
end
defp context_files(%Context{generate?: true} = context) do
Gen.Context.files_to_be_generated(context)
end
defp context_files(%Context{generate?: false}) do
[]
end
defp files_to_be_generated(%Context{schema: schema, context_app: context_app}) do
web_prefix = Mix.Phoenix.web_path(context_app)
test_prefix = Mix.Phoenix.web_test_path(context_app)
web_path = to_string(schema.web_path)
live_subdir = "#{schema.singular}_live"
[
{:eex, "show.ex", Path.join([web_prefix, "live", web_path, live_subdir, "show.ex"])},
{:eex, "index.ex", Path.join([web_prefix, "live", web_path, live_subdir, "index.ex"])},
{:eex, "form_component.ex", Path.join([web_prefix, "live", web_path, live_subdir, "form_component.ex"])},
{:eex, "form_component.html.leex", Path.join([web_prefix, "live", web_path, live_subdir, "form_component.html.slimleex"])},
{:eex, "index.html.leex", Path.join([web_prefix, "live", web_path, live_subdir, "index.html.slimleex"])},
{:eex, "show.html.leex", Path.join([web_prefix, "live", web_path, live_subdir, "show.html.slimleex"])},
{:eex, "live_test.exs", Path.join([test_prefix, "live", web_path, "#{schema.singular}_live_test.exs"])},
{:new_eex, "modal_component.ex", Path.join([web_prefix, "live", "modal_component.ex"])},
{:new_eex, "live_helpers.ex", Path.join([web_prefix, "live", "live_helpers.ex"])},
]
end
defp copy_new_files(%Context{} = context, binding, paths) do
files = files_to_be_generated(context)
Mix.Phoenix.copy_from(paths, "priv/templates/phx.gen.live.slime", binding, files)
if context.generate?, do: Gen.Context.copy_new_files(context, paths, binding)
context
end
defp maybe_inject_helpers(%Context{context_app: ctx_app} = context) do
web_prefix = Mix.Phoenix.web_path(ctx_app)
[lib_prefix, web_dir] = Path.split(web_prefix)
file_path = Path.join(lib_prefix, "#{web_dir}.ex")
file = File.read!(file_path)
inject = "import #{inspect(context.web_module)}.LiveHelpers"
if String.contains?(file, inject) do
:ok
else
do_inject_helpers(context, file, file_path, inject)
end
context
end
defp do_inject_helpers(context, file, file_path, inject) do
Mix.shell().info([:green, "* injecting ", :reset, Path.relative_to_cwd(file_path)])
new_file = String.replace(file, "import Phoenix.LiveView.Helpers", "import Phoenix.LiveView.Helpers\n #{inject}")
if file != new_file do
File.write!(file_path, new_file)
else
Mix.shell().info """
Could not find Phoenix.LiveView.Helpers imported in #{file_path}.
This typically happens because your application was not generated
with the --live flag:
mix phx.new my_app --live
Please make sure LiveView is installed and that #{inspect(context.web_module)}
defines both `live_view/0` and `live_component/0` functions,
and that both functions import #{inspect(context.web_module)}.LiveHelpers.
"""
end
end
defp live_route_instructions(schema) do
[
~s|live "/#{schema.plural}", #{inspect(schema.alias)}Live.Index, :index\n|,
~s|live "/#{schema.plural}/new", #{inspect(schema.alias)}Live.Index, :new\n|,
~s|live "/#{schema.plural}/:id/edit", #{inspect(schema.alias)}Live.Index, :edit\n\n|,
~s|live "/#{schema.plural}/:id", #{inspect(schema.alias)}Live.Show, :show\n|,
~s|live "/#{schema.plural}/:id/show/edit", #{inspect(schema.alias)}Live.Show, :edit|
]
end
defp inputs(%Schema{attrs: attrs}) do
Enum.map(attrs, fn
{_, {:array, _}} ->
{nil, nil, nil}
{_, {:references, _}} ->
{nil, nil, nil}
{key, :integer} ->
{label(key), ~s(= number_input f, #{inspect(key)}, class: "form-control", autocomplete: "off"), error(key)}
{key, :float} ->
{label(key), ~s(= number_input f, #{inspect(key)}, step: "any", class: "form-control", autocomplete: "off"),
error(key)}
{key, :decimal} ->
{label(key), ~s(= number_input f, #{inspect(key)}, step: "any", class: "form-control", autocomplete: "off"),
error(key)}
{key, :boolean} ->
{label(key), ~s(= checkbox f, #{inspect(key)}, class: "form-control", autocomplete: "off"), error(key)}
{key, :text} ->
{label(key), ~s(= textarea f, #{inspect(key)}, class: "form-control", autocomplete: "off"), error(key)}
{key, :date} ->
{label(key), ~s(= date_select f, #{inspect(key)}, class: "form-control", autocomplete: "off"), error(key)}
{key, :time} ->
{label(key), ~s(= time_select f, #{inspect(key)}, class: "form-control", autocomplete: "off"), error(key)}
{key, :utc_datetime} ->
{label(key), ~s(= datetime_select f, #{inspect(key)}, class: "form-control", autocomplete: "off"), error(key)}
{key, :naive_datetime} ->
{label(key), ~s(= datetime_select f, #{inspect(key)}, class: "form-control", autocomplete: "off"), error(key)}
{key, _} ->
{label(key), ~s(= text_input f, #{inspect(key)}, class: "form-control", autocomplete: "off"), error(key)}
end)
end
defp label(key) do
~s(= label f, #{inspect(key)}, class: "control-label")
end
defp error(field) do
~s(= error_tag f, #{inspect(field)})
end
end
|
lib/mix/tasks/phx.gen.live.slime.ex
| 0.866019
| 0.520435
|
phx.gen.live.slime.ex
|
starcoder
|
defmodule Kaffe.GroupMember do
@moduledoc """
Note: The `brod_group_member` behavior is used.
Consume messages from a Kafka topic for a consumer group. There is one brod
group member per topic! So as new topics are added to configuration so are
the number of brod group members. Likewise, if you're using something like
Heroku Kafka and have multiple dynos for your consumer, there will be a
Kaffe.GroupMember per dyno and each group member will receive a equal set
of partition assignments for each topic.
The actual consumption is delegated to a series of subscribers, see
`Kaffe.Subscriber` for details on how messages are processed.
The subscribers are assigned generations. Each generation represents a
specific configuration. In order to allow the partitions to be rebalanced on
startup, there is a delay between receiving a set of assignments associated
with that generation and actually creating the subscribers. If a new
generation is received in the mean time, the older generation is discarded.
See the follow for more details:
https://github.com/klarna/brod/blob/master/src/brod_group_member.erl
https://github.com/klarna/brucke/blob/master/src/brucke_member.erl
"""
use GenServer
require Logger
@behaviour :brod_group_member
defmodule State do
defstruct subscribers: [],
subscriber_name: nil,
group_coordinator_pid: nil,
consumer_group: nil,
worker_manager_pid: nil,
topic: nil,
configured_offset: nil,
current_gen_id: nil,
partitions_listener: nil
end
## ==========================================================================
## Public API
## ==========================================================================
def start_link(subscriber_name, consumer_group, worker_manager_pid, topic) do
GenServer.start_link(
__MODULE__,
[
subscriber_name,
consumer_group,
worker_manager_pid,
topic
],
name: name(subscriber_name, topic)
)
end
# Should not receive this
def get_committed_offsets(_group_member_pid, _topic_partitions) do
Logger.warn("event#get_committed_offsets")
end
# Should not receive this
def assign_partitions(_pid, _members, _topic_partitions) do
Logger.warn("event#assign_partitions")
end
def assignments_received(pid, _member_id, generation_id, assignments) do
GenServer.cast(pid, {:assignments_received, generation_id, assignments})
end
def assignments_revoked(pid) do
GenServer.cast(pid, {:assignments_revoked})
end
## ==========================================================================
## Callbacks
## ==========================================================================
def init([subscriber_name, consumer_group, worker_manager_pid, topic]) do
:ok = kafka().start_consumer(subscriber_name, topic, [])
{:ok, pid} =
group_coordinator().start_link(
subscriber_name,
consumer_group,
[topic],
group_config(),
__MODULE__,
self()
)
Logger.info(
"event#init=#{__MODULE__}
group_coordinator=#{inspect(pid)}
subscriber_name=#{subscriber_name}
consumer_group=#{consumer_group}"
)
{:ok,
%State{
subscriber_name: subscriber_name,
group_coordinator_pid: pid,
consumer_group: consumer_group,
worker_manager_pid: worker_manager_pid,
topic: topic,
partitions_listener: Kaffe.Config.Consumer.configuration().partitions_listener
}}
end
# Handle the partition assignments. Wait the configured duration before allocating the
# subscribers to give each consumer a chance to handle the latest generation of the
# configuration.
def handle_cast({:assignments_received, gen_id, assignments}, state) do
Logger.info("event#assignments_received=#{name(state.subscriber_name, state.topic)} generation_id=#{gen_id}")
state.partitions_listener.assigned(assignments)
Process.send_after(self(), {:allocate_subscribers, gen_id, assignments}, rebalance_delay())
{:noreply, %{state | current_gen_id: gen_id}}
end
def handle_cast({:assignments_revoked}, state) do
Logger.info("event#assignments_revoked=#{name(state.subscriber_name, state.topic)}")
state.partitions_listener.revoked()
stop_subscribers(state.subscribers)
{:noreply, %{state | :subscribers => []}}
end
# If we're not at the latest generation, discard the assignment for whatever is next.
def handle_info({:allocate_subscribers, gen_id, _assignments}, %{current_gen_id: current_gen_id} = state) when gen_id < current_gen_id do
Logger.debug("Discarding old generation #{gen_id} for current generation: #{current_gen_id}")
{:noreply, state}
end
# If we are at the latest, allocate a subscriber per partition.
def handle_info({:allocate_subscribers, gen_id, assignments}, state) do
Logger.info("event#allocate_subscribers=#{inspect(self())} generation_id=#{gen_id}")
if state.subscribers != [] do
# Did we try to allocate without deallocating? We'd like to know.
Logger.info("event#subscribers_not_empty=#{inspect(self())}")
stop_subscribers(state.subscribers)
end
subscribers =
Enum.map(assignments, fn assignment ->
Logger.debug("Allocating subscriber for assignment: #{inspect(assignment)}")
{:brod_received_assignment, topic, partition, offset} = assignment
worker_pid = worker_manager().worker_for(state.worker_manager_pid, topic, partition)
{:ok, pid} =
subscriber().subscribe(
state.subscriber_name,
state.group_coordinator_pid,
worker_pid,
gen_id,
topic,
partition,
compute_offset(offset, configured_offset())
)
pid
end)
{:noreply, %{state | :subscribers => subscribers}}
end
## ==========================================================================
## Helpers
## ==========================================================================
defp stop_subscribers(subscribers) do
Enum.each(subscribers, fn s ->
subscriber().stop(s)
end)
end
defp compute_offset(:undefined, configured_offset) do
[begin_offset: configured_offset]
end
defp compute_offset(offset, _configured_offset) do
[begin_offset: offset]
end
defp configured_offset do
Kaffe.Config.Consumer.begin_offset()
end
defp rebalance_delay do
Kaffe.Config.Consumer.configuration().rebalance_delay_ms
end
defp group_config do
Kaffe.Config.Consumer.configuration().group_config
end
defp kafka do
Application.get_env(:kaffe, :kafka_mod, :brod)
end
defp group_coordinator do
Application.get_env(:kaffe, :group_coordinator_mod, :brod_group_coordinator)
end
defp worker_manager do
Application.get_env(:kaffe, :worker_manager_mod, Kaffe.WorkerManager)
end
defp subscriber do
Application.get_env(:kaffe, :subscriber_mod, Kaffe.Subscriber)
end
defp partitions_listener do
Application.get_env(:kaffe, :partitions_listener)
end
defp name(subscriber_name, topic) do
:"#{__MODULE__}.#{subscriber_name}.#{topic}"
end
end
|
lib/kaffe/consumer_group/subscriber/group_member.ex
| 0.847983
| 0.436202
|
group_member.ex
|
starcoder
|
defmodule MdnsLite do
@moduledoc """
A simple implementation of an mDNS (multicast DNS (Domain Name Server))
server. mDNS uses multicast UDP rather than TCP. Its primary use is to
provide DNS support for the `local` domain. `MdnsLite` listens on a
well-known ip address/port. If a request arrives that it recognizes, it
constructs the appropriate DNS response.
`MdnsLite` responds to a limited number of DNS requests; they are all handled
in the `MdnsLite.Query` module. Of particular note is the SRV request. The
response will be a list of known services and how to contact them (domain and
port) as described in the configuration file.
`MdnsLite` uses a "network monitor", a module that listens for changes in a
network. Its purpose is to ensure that the network interfaces are up to
date. The current version of `MdnsLite` has an `MdnsLite.InetMonitor` which
periodically checks, via `inet:getifaddrs()`, for changes in the network. For
example, a change could be the re-assignment of IP addresses.
This module is initialized, at runtime, with host information and service
descriptions found in the `config.exs` file. The descriptions will be used
by `MdnsLite` to construct a response to a query.
Please refer to the `README.md` for further information.
"""
@doc """
Set the list of host names
`host` can have the value of `:hostname` in which case the value will be
replaced with the value of `:inet.gethostname()`, otherwise you can provide a
string value. You can specify an alias hostname in which case `host` will be
`["hostname", "alias-example"]`. The second value must be a string. When you
use an alias, an "A" query can be made to `alias-example.local` as well as
to `hostname.local`. This can also be configured at runtime via
`MdnsLite.set_host/1`:
```elixir
iex> MdnsLite.set_host([:hostname, "nerves"])
:ok
```
"""
defdelegate set_host(host), to: MdnsLite.Configuration
@doc """
Add services for mdns_lite to advertise
The `services` section lists the services that the host offers, such as
providing an HTTP server. You must supply the `protocol`, `transport` and
`port` values for each service. You may also specify `weight` and/or `host`.
They each default to a zero value. Please consult the RFC for an explanation
of these values. Services can be configured in `config.exs` as shown above,
or at runtime:
```elixir
iex> services = [
# service type: _http._tcp.local - used in match
%{
name: "Web Server",
protocol: "http",
transport: "tcp",
port: 80,
},
# service_type: _ssh._tcp.local - used in match
%{
name: "Secure Socket",
protocol: "ssh",
transport: "tcp",
port: 22,
}
]
iex> MdnsLite.add_mds_services(services)
:ok
```
"""
defdelegate add_mdns_services(services), to: MdnsLite.Configuration
@doc """
Remove services
Services can also be removed at runtime via `remove_mdns_services/1` with the
service name to remove:
```elixir
iex> service_names = ["Web Server", "Secure Socket"]
iex> MdnsLite.remove_mdns_services(services)
:ok
# Remove just a single service
iex> MdnsLite.remove_mdns_services("Secure Socket")
:ok
```
"""
defdelegate remove_mdns_services(service_names), to: MdnsLite.Configuration
end
|
lib/mdns_lite.ex
| 0.857112
| 0.812421
|
mdns_lite.ex
|
starcoder
|
defmodule Broadway.Message do
@moduledoc """
This struct holds all information about a message.
A message is first created by the producers. It is then
sent downstream and gets updated multiple times, either
by a module implementing the `Broadway` behaviour
through the `c:Broadway.handle_message/3` callback
or internally by one of the built-in stages of Broadway.
Instead of modifying the struct directly, you should use the functions
provided by this module to manipulate messages.
"""
alias __MODULE__, as: Message
alias Broadway.{Acknowledger, NoopAcknowledger}
@type t :: %Message{
data: term,
metadata: %{optional(atom) => term},
acknowledger: {module, ack_ref :: term, data :: term},
batcher: atom,
batch_key: term,
batch_mode: :bulk | :flush,
status:
:ok
| {:failed, reason :: binary}
| {:throw | :error | :exit, term, Exception.stacktrace()}
}
@enforce_keys [:data, :acknowledger]
defstruct data: nil,
metadata: %{},
acknowledger: nil,
batcher: :default,
batch_key: :default,
batch_mode: :bulk,
status: :ok
@doc """
Updates the data in the message.
This function is usually used inside the `c:Broadway.handle_message/3` implementation
to update data with new processed data.
"""
@spec update_data(message :: Message.t(), fun :: (term -> term)) :: Message.t()
def update_data(%Message{} = message, fun) when is_function(fun, 1) do
%Message{message | data: fun.(message.data)}
end
@doc """
Stores the given data in the message.
This function is usually used inside the `c:Broadway.handle_message/3` implementation
to replace data with new processed data.
"""
@spec put_data(message :: Message.t(), term) :: Message.t()
def put_data(%Message{} = message, data) do
%Message{message | data: data}
end
@doc """
Defines the target batcher which the message should be forwarded to.
"""
@spec put_batcher(message :: Message.t(), batcher :: atom) :: Message.t()
def put_batcher(%Message{} = message, batcher) when is_atom(batcher) do
%Message{message | batcher: batcher}
end
@doc """
Defines the message batch key.
The batch key identifies the batch the message belongs to, within
a given batcher. Each batcher then groups batches with the same
`batch_key`, with size of at most `batch_size` within period
`batch_timeout`. Both `batch_size` and `batch_timeout` are managed
per batch key, so a batcher is capable of grouping multiple batch
keys at the same time, regardless of the concurrency level.
If a given batcher has multiple batch processors (concurrency > 1),
all messages with the same batch key are routed to the same processor.
So different batch keys may run concurrently but the same batch key
is always run serially and in the same batcher processor.
"""
@spec put_batch_key(message :: Message.t(), batch_key :: term) :: Message.t()
def put_batch_key(%Message{} = message, batch_key) do
%Message{message | batch_key: batch_key}
end
@doc """
Sets the batching mode for the message.
When the mode is `:bulk`, the batch that the message is in is delivered after
the batch size or batch timeout is reached.
When the mode is `:flush`, the batch that the message is in is delivered
immediately after processing. Note it doesn't mean the batch contains only a single element
but rather that all messages received from the processor are delivered without waiting.
The default mode for messages is `:bulk`.
"""
@spec put_batch_mode(message :: Message.t(), mode :: :bulk | :flush) :: Message.t()
def put_batch_mode(%Message{} = message, mode) when mode in [:bulk, :flush] do
%Message{message | batch_mode: mode}
end
@doc """
Configures the acknowledger of this message.
This function calls the `c:Broadway.Acknowledger.configure/3` callback to
change the configuration of the acknowledger for the given `message`.
This function can only be called if the acknowledger implements the `configure/3`
callback. If it doesn't, an error is raised.
"""
@spec configure_ack(message :: Message.t(), options :: keyword) :: Message.t()
def configure_ack(%Message{} = message, options) when is_list(options) do
%{acknowledger: {module, ack_ref, ack_data}} = message
if Code.ensure_loaded?(module) and function_exported?(module, :configure, 3) do
{:ok, ack_data} = module.configure(ack_ref, ack_data, options)
%{message | acknowledger: {module, ack_ref, ack_data}}
else
raise "the configure/3 callback is not defined by acknowledger #{inspect(module)}"
end
end
@doc """
Mark a message as failed.
Failed messages are sent directly to the related acknowledger at the end
of this step and therefore they're not forwarded to the next step in the
pipeline.
Failing a message does not emit any log but it does trigger the
`c:Broadway.handle_failed/2` callback.
"""
@spec failed(message :: Message.t(), reason :: binary) :: Message.t()
def failed(%Message{} = message, reason) do
%Message{message | status: {:failed, reason}}
end
@doc """
Immediately acknowledges the given message or list of messages.
This function can be used to acknowledge a message (or list of messages)
immediately without waiting for the rest of the pipeline.
Acknowledging a message sets that message's acknowledger to a no-op
acknowledger so that it's safe to ack at the end of the pipeline.
Returns the updated acked message if a message is passed in,
or the updated list of acked messages if a list of messages is passed in.
"""
@spec ack_immediately(message :: Message.t()) :: Message.t()
@spec ack_immediately(messages :: [Message.t(), ...]) :: [Message.t(), ...]
def ack_immediately(message_or_messages)
def ack_immediately(%Message{} = message) do
[message] = ack_immediately([message])
message
end
def ack_immediately(messages) when is_list(messages) and messages != [] do
{successful, failed} = Enum.split_with(messages, &(&1.status == :ok))
_ = Acknowledger.ack_messages(successful, failed)
for message <- messages do
%{message | acknowledger: {NoopAcknowledger, _ack_ref = nil, _data = nil}}
end
end
end
|
lib/broadway/message.ex
| 0.860325
| 0.597872
|
message.ex
|
starcoder
|
defmodule Mayo.Number do
@doc """
Checks the minimum value of a number.
iex> Mayo.Number.min(4, 3)
4
iex> Mayo.Number.min(1, 3)
{:error, %Mayo.Error{type: "number.min"}}
"""
def min(value, limit) when is_number(value) and value >= limit, do: value
def min(value, _) when is_number(value) do
{:error, %Mayo.Error{
type: "number.min"
}}
end
def min(value, _), do: value
@doc """
Checks the maximum value of a number.
iex> Mayo.Number.max(4, 5)
4
iex> Mayo.Number.max(6, 5)
{:error, %Mayo.Error{type: "number.max"}}
"""
def max(value, limit) when is_number(value) and value <= limit, do: value
def max(value, _) when is_number(value) do
{:error, %Mayo.Error{
type: "number.max"
}}
end
def max(value, _), do: value
@doc """
Checks if the number is positive.
iex> Mayo.Number.positive(2)
2
iex> Mayo.Number.positive(0)
0
iex> Mayo.Number.positive(-2)
{:error, %Mayo.Error{type: "number.positive"}}
"""
def positive(value) do
case Mayo.Number.min(value, 0) do
{:error, err} -> {:error, %{err | type: "number.positive"}}
result -> result
end
end
@doc """
Checks if the number is negative.
iex> Mayo.Number.negative(-2)
-2
iex> Mayo.Number.negative(0)
0
iex> Mayo.Number.negative(2)
{:error, %Mayo.Error{type: "number.negative"}}
"""
def negative(value) do
case Mayo.Number.max(value, 0) do
{:error, err} -> {:error, %{err | type: "number.negative"}}
result -> result
end
end
@doc """
Check if the number is odd.
iex> Mayo.Number.odd(1)
1
iex> Mayo.Number.odd(2)
{:error, %Mayo.Error{type: "number.odd"}}
"""
def odd(value) when is_number(value) and rem(value, 2) == 1, do: value
def odd(value) when is_number(value) do
{:error, %Mayo.Error{
type: "number.odd"
}}
end
def odd(value), do: value
@doc """
Check if the number is even.
iex> Mayo.Number.even(2)
2
iex> Mayo.Number.even(1)
{:error, %Mayo.Error{type: "number.even"}}
"""
def even(value) when is_number(value) and rem(value, 2) == 0, do: value
def even(value) when is_number(value) do
{:error, %Mayo.Error{
type: "number.even"
}}
end
def even(value), do: value
@doc """
Check if the number is an integer.
iex> Mayo.Number.integer(2)
2
iex> Mayo.Number.integer(3.14)
{:error, %Mayo.Error{type: "number.integer"}}
"""
def integer(value) when is_number(value) and is_integer(value), do: value
def integer(value) when is_number(value) do
{:error, %Mayo.Error{
type: "number.integer"
}}
end
def integer(value), do: value
@doc """
Check if the number is a float.
iex> Mayo.Number.float(3.14)
3.14
iex> Mayo.Number.float(3)
{:error, %Mayo.Error{type: "number.float"}}
"""
def float(value) when is_number(value) and is_float(value), do: value
def float(value) when is_number(value) do
{:error, %Mayo.Error{
type: "number.float"
}}
end
def float(value), do: value
@doc """
Checks if the number is greater than the limit.
iex> Mayo.Number.greater(4, 3)
4
iex> Mayo.Number.greater(3, 3)
{:error, %Mayo.Error{type: "number.greater"}}
"""
def greater(value, limit) when is_number(value) and value > limit, do: value
def greater(value, _) when is_number(value) do
{:error, %Mayo.Error{
type: "number.greater"
}}
end
def greater(value, _), do: value
@doc """
Checks if the number is less than the limit.
iex> Mayo.Number.less(2, 3)
2
iex> Mayo.Number.less(3, 3)
{:error, %Mayo.Error{type: "number.less"}}
"""
def less(value, limit) when is_number(value) and value < limit, do: value
def less(value, _) when is_number(value) do
{:error, %Mayo.Error{
type: "number.less"
}}
end
def less(value, _), do: value
end
|
lib/mayo/number.ex
| 0.794744
| 0.49347
|
number.ex
|
starcoder
|
defmodule WandCore.WandFile do
alias WandCore.WandFile
alias WandCore.Interfaces.File
@requirement "~> 1.0"
@vsn "1.0.0"
@moduledoc """
Module describing the internal state of a wand file, along with helper functions to manipulate the dependencies and serialize the module to disk.
## Wand.json
The format for wand.json looks like this:
<pre>
{
"version": #{@vsn},
"dependencies": {
"dependency_name': dependency,
}
}
</pre>
A dependency can have the following serialized formats in wand.json:
### Simple dependency
The value can be a string of the version requirement: `"poison": "~> 3.1.0"`.
### Dependency with just opts
If, say, pulling from git, the value can be just a map of options: `"poison": {"git": "https://github.com/devinus/poison.git"}`
### Dependency with a version and opts
Lastly, a dependency can be a list of `[requirement, opts]`. For example: `"poison": ["~> 3.1.0", {"only": ":test"}`
"""
@type t :: %__MODULE__{
version: String.t(),
dependencies: %{optional(atom()) => WandCore.WandFile.Dependency.t()}
}
@type success :: {:ok, t}
@type error :: {:error, any()}
@type success_or_error :: success | error
defstruct version: @vsn,
dependencies: []
defmodule Dependency do
@type name :: String.t()
@type requirement :: String.t() | nil
@type source :: :hex | :git | :path
@type t :: %__MODULE__{name: String.t(), requirement: requirement, opts: WandCore.Opts.t()}
@enforce_keys [:name]
@moduledoc """
A dependency describes the information for a specific mix dependency, including its name, requirement string, and any options See `WandCore.WandFile` for more information.
"""
defstruct name: nil, requirement: nil, opts: %{}
@doc """
Determine if the dependency is referring to a hex repository, a git repository, or a local path
"""
@spec source(t) :: source
def source(%Dependency{opts: opts}) do
cond do
Map.get(opts, :git) -> :git
Map.get(opts, :path) -> :path
Map.get(opts, :in_umbrella) -> :path
true -> :hex
end
end
end
@doc """
Add a new Dependency to a WandFile, unless the name already exists in the file
"""
@spec add(t, Dependency.t()) :: success_or_error
def add(%WandFile{} = file, %Dependency{} = dependency) do
case exists?(file, dependency.name) do
false ->
file = update_in(file.dependencies, &[dependency | &1])
{:ok, file}
true ->
{:error, {:already_exists, dependency.name}}
end
end
@doc """
Load a wand.json file from disk, and parse it into a WandFile
"""
@spec load(Path.t()) :: success_or_error
def load(path \\ "wand.json") do
with {:ok, contents} <- read(path),
{:ok, data} <- parse(contents),
{:ok, wand_file} <- validate(data) do
{:ok, wand_file}
else
error -> error
end
end
@doc """
Remove a dependency to a WandFile by name. Returns the file (always succeeds)
"""
@spec remove(t, Dependency.name()) :: t
def remove(%WandFile{} = file, name) do
update_in(file.dependencies, fn dependencies ->
Enum.reject(dependencies, &(&1.name == name))
end)
end
@doc """
Save the WandFile as a JSON file to the path indicated.
"""
@spec save(t, Path.t()) :: :ok | error
def save(%WandFile{} = file, path \\ "wand.json") do
contents = WandCore.Poison.encode!(file, pretty: true)
File.impl().write(path, contents)
end
defp validate(data) do
with {:ok, version} <- validate_version(extract_version(data)),
{:ok, dependencies} <- validate_dependencies(Map.get(data, :dependencies, %{})) do
{:ok, %WandCore.WandFile{version: to_string(version), dependencies: dependencies}}
else
error -> error
end
end
defp validate_dependencies(dependencies) when not is_map(dependencies),
do: {:error, :invalid_dependencies}
defp validate_dependencies(dependencies) do
{dependencies, errors} =
Enum.map(dependencies, fn
{name, [requirement, opts]} ->
create_dependency(name, requirement, opts)
{name, [opts]} ->
create_dependency(name, nil, opts)
{name, requirement} ->
create_dependency(name, requirement, %{})
end)
|> Enum.split_with(fn
%Dependency{} -> true
_ -> false
end)
case errors do
[] -> {:ok, dependencies}
[error | _] -> error
end
end
defp validate_version({:error, _} = error), do: error
defp validate_version({:ok, version}) do
if Version.match?(version, @requirement) do
{:ok, version}
else
{:error, :version_mismatch}
end
end
defp extract_version(%{version: version}) when is_binary(version) do
case Version.parse(version) do
:error -> {:error, :invalid_version}
{:ok, version} -> {:ok, version}
end
end
defp extract_version(%{version: _}), do: {:error, :invalid_version}
defp extract_version(_data), do: {:error, :missing_version}
defp create_dependency(name, nil, opts) do
name = to_string(name)
opts = WandCore.Opts.decode(opts)
%Dependency{name: name, opts: opts}
end
defp create_dependency(name, requirement, opts) do
name = to_string(name)
opts = WandCore.Opts.decode(opts)
case Version.parse_requirement(requirement) do
:error -> {:error, {:invalid_dependency, name}}
_ -> %Dependency{name: name, requirement: requirement, opts: opts}
end
end
defp exists?(%WandFile{dependencies: dependencies}, name) do
Enum.find(dependencies, &(&1.name == name)) != nil
end
defp parse(contents) do
case WandCore.Poison.decode(contents, keys: :atoms) do
{:ok, data} -> {:ok, data}
{:error, _reason} -> {:error, :json_decode_error}
end
end
defp read(path) do
case File.impl().read(path) do
{:ok, contents} -> {:ok, contents}
{:error, reason} -> {:error, {:file_read_error, reason}}
end
end
end
|
lib/wand_file.ex
| 0.676834
| 0.45302
|
wand_file.ex
|
starcoder
|
defmodule Phoenix.Token do
@moduledoc """
Tokens provide a way to generate, verify bearer
tokens for use in Channels or API authentication.
## Basic Usage
When generating a unique token for usage in an API or Channel
it is advised to use a unique identifier for the user typically
the id from a database. For example:
iex> user_id = 1
iex> token = Phoenix.Token.sign(endpoint, "user", user_id)
iex> Phoenix.Token.verify(endpoint, "user", token)
{:ok, 1}
In that example we have a user's id, we generate a token and send
it to the client. We could send it to the client in multiple ways.
One is via the meta tag:
<%= tag :meta, name: "channel_token"
content: Phoenix.Token.sign(@conn, "user", @current_user.id) %>
Or an endpoint that returns it:
def create(conn, params) do
user = User.create(params)
render conn, "user.json",
%{token: Phoenix.Token.sign(conn, "user", user.id), user: user}
end
When using it with a socket a typical example might be:
defmodule MyApp.UserSocket do
use Phoenix.Socket
def connect(%{"token" => token}, socket) do
# Max age of 2 weeks (1209600 seconds)
case Phoenix.Token.verify(socket, "user", token, max_age: 1209600) do
{:ok, user_id} ->
socket = assign(socket, :user, Repo.get!(User, user_id))
{:ok, socket}
{:error, _} ->
:error
end
end
end
In this example the phoenix.js client will be sending up the token
in the connect command.
`Phoenix.Token` can also be used for validating APIs, handling
password resets, e-mail confirmation and more.
"""
alias Plug.Crypto.KeyGenerator
alias Plug.Crypto.MessageVerifier
@doc """
Signs your data into a token you can send down to clients
## Options
* `:key_iterations` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to 1000;
* `:key_length` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to 32;
* `:key_digest` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to `:sha256';
"""
def sign(context, salt, data, opts \\ []) when is_binary(salt) do
secret = get_endpoint(context) |> get_secret(salt, opts)
message = %{
data: data,
signed: now_ms()
} |> :erlang.term_to_binary()
MessageVerifier.sign(message, secret)
end
@doc """
Decrypts the token into the originaly present data.
## Options
* `:max_age` - verifies the token only if it has been generated
"max age" ago in seconds. A reasonable value is 2 weeks (`1209600`
seconds);
* `:key_iterations` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to 1000;
* `:key_length` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to 32;
* `:key_digest` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to `:sha256';
"""
def verify(context, salt, token, opts \\ [])
def verify(context, salt, token, opts) when is_binary(salt) and is_binary(token) do
secret = get_endpoint(context) |> get_secret(salt, opts)
case MessageVerifier.verify(token, secret) do
{:ok, message} ->
%{data: data, signed: signed} = :erlang.binary_to_term(message)
if (max_age = opts[:max_age]) && (signed + max_age) < now_ms() do
{:error, :expired}
else
{:ok, data}
end
:error ->
{:error, :invalid}
end
end
def verify(_context, salt, nil, _opts) when is_binary(salt) do
{:error, :missing}
end
defp get_endpoint(%Plug.Conn{} = conn), do: Phoenix.Controller.endpoint_module(conn)
defp get_endpoint(%Phoenix.Socket{} = socket), do: socket.endpoint
defp get_endpoint(endpoint) when is_atom(endpoint), do: endpoint
# Gathers configuration and generates the key secrets and signing secrets.
defp get_secret(endpoint, salt, opts) do
secret_key_base = endpoint.config(:secret_key_base)
iterations = Keyword.get(opts, :key_iterations, 1000)
length = Keyword.get(opts, :key_length, 32)
digest = Keyword.get(opts, :key_digest, :sha256)
key_opts = [iterations: iterations,
length: length,
digest: digest,
cache: Plug.Keys]
KeyGenerator.generate(secret_key_base, salt, key_opts)
end
defp time_to_ms({mega, sec, _micro}), do: (mega * 1000000 + sec) * 1000
defp now_ms, do: :os.timestamp() |> time_to_ms()
end
|
lib/phoenix/token.ex
| 0.838647
| 0.520862
|
token.ex
|
starcoder
|
defmodule Jackalope do
use Supervisor
require Logger
@moduledoc "README.md"
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
@default_mqtt_server {
Tortoise311.Transport.Tcp,
host: "localhost", port: 1883
}
@default_max_work_list_size 100
@doc """
Start a Jackalope session
This will start a supervised group of processes; part of the group
will keep track of the topic filter subscription state, and hold a
list of yet to be published messages, as well as the requested
subscription changes; the other part of the process tree will keep
the MQTT connection specific parts, making sure we got a
connection. See the main documentation on the `Jackalope` module for
more information on the process architecture.
`Jackalope.start_link/1` takes a keyword list containing option
values, that configure the instance, as an argument. The available
options and their defaults are:
- `client_id` (default: "jackalope"), string that will be used as
the client_id of the MQTT connection; see `t Tortoise311.client_id`
for more information on valid client ids. Notice that the
client_id needs to be unique on the server, so two clients may not
have the same client_id.
- `initial_topics` (optional) specifies a list of topic_filters
Jackalope should connect to when a connection has been
established. Notice that this list is also used should a reconnect
happen later in the life-cycle. Note that Jackalope does not support
dynamic subscriptions or unsubscribing. This is the only mechanism
for subscribing.
- `handler` (default: `Jackalope.Handler.Logger`) specifies the
module implementing the callbacks (implementing
`Jackalope.Handler` behaviour) to use. This module reacts to
the events Jackalope communicates about the connection
life-cycle, including receiving a message on a
subscribed topic filter. Read the documentation for
`Jackalope.Handler` for more information on the events and
callbacks.
- `server` (default: #{inspect(@default_mqtt_server)}) specifies the
connection type, and its options, to use when connecting to the
MQTT server. The default specification will attempt to connect to
a broker running on localhost:1883, on an insecure
connection. This value should only be used for testing and
development.
Server options for use with AWS IoT:
[
verify: :verify_peer,
host: mqtt_host(), # must return the full name, *without wild cards*, for e.g. "abcdefghijklmo-ats.iot.us-east-1.amazonaws.com"
port: mqtt_port(), # must return the correct port, e.g. 443
alpn_advertised_protocols: ["x-amzn-mqtt-ca"],
server_name_indication: to_charlist(mqtt_host()),
cert: cert, # the device's X509 certificate in DER format
key: key, # the device's private key in DER format
cacerts: [signer_cert] ++ aws_ca_certs(), # the device's signer cert, plus AWS IoT CA certs in DER format to be returned by aws_ca_certs()
versions: [:"tlsv1.2"],
customize_hostname_check: [match_fun: :public_key.pkix_verify_hostname_match_fun(:https)]
]
- `max_work_list_size` (default: #{@default_max_work_list_size}) specifies the maximum
number of unexpired work orders Jackalope will retain in its work list
(the commands yet to be sent to the MQTT server). When the maximum is
reached, the oldest work order is dropped before adding a new work order.
- `last_will` (default: nil) specifies the last will message that
should get published on the MQTT broker if the connection is
closed or dropped unexpectedly. If we want to specify a last will
topic we should define a keyword list containing the following:
- `topic` (Required) the topic to post the last will message to;
this should be specified as a string and it should be a valid
MQTT topic; consult `t Tortoise311.topic` for more info on valid
MQTT topics.
- `payload` (default: nil) the payload of the last will message;
notice that we will attempt to JSON encode the payload term
(unless it is nil), so it will fail if the data fails the JSON
encode.
- `qos` (default: 0) either 0 or 1, denoting the quality of
service the last will message should get published with; note
that QoS=2 is not supported by AWS IoT.
- `backoff` (default: [min_interval: 1_000, max_interval: 30_000])
gives the bounds of an exponential backoff algorithm used when retrying
from failed connections.
"""
@spec start_link(keyword()) :: Supervisor.on_start()
def start_link(opts) do
Supervisor.start_link(__MODULE__, opts)
end
@impl Supervisor
def init(opts) do
client_id = Keyword.get(opts, :client_id, "jackalope")
jackalope_handler = Keyword.get(opts, :handler, Jackalope.Handler.Logger)
max_work_list_size = Keyword.get(opts, :max_work_list_size, @default_max_work_list_size)
children = [
{Jackalope.Session,
[
handler: jackalope_handler,
max_work_list_size: max_work_list_size
]},
{Jackalope.Supervisor,
[
handler: jackalope_handler,
client_id: client_id,
connection_options: connection_options(opts),
last_will: Keyword.get(opts, :last_will)
]}
]
# Supervision strategy is rest for one, as a crash in Jackalope
# would result in inconsistent state in Jackalope; we would not be
# able to know about the subscription state; so we teardown the
# tortoise311 if Jackalope crash. Should the Jackalope.Supervisor
# crash, Jackalope should resubscribe to the topic filters it
# currently know about, so that should be okay.
Supervisor.init(children, strategy: :rest_for_one)
end
@doc """
Request the MQTT client to reconnect to the broker
This can be useful on devices that has multiple network
interfaces.
"""
@spec reconnect() :: :ok
defdelegate reconnect(), to: Jackalope.Session
@doc """
Publish a message to the MQTT broker
The `payload` will get published on `topic`. `Jackalope` will keep
the message in a queue until we got a connection, at which point it
will dispatch the publish. This of course present us with a problem:
what if we place a publish request to "unlock the front door" while
the client is offline? We don't want to receive a message that the
front door has been unlocked two hours later when the MQTT client
reconnect; To solve that problem we have a `ttl` option we can
specify on the publish.
```elixir
Jackalope.publish("doors/front_door", %{action: "unlock"}, qos: 1, ttl: 5_000)
```
The available publish options are:
- `qos` (default `1`) sets the quality of service of the message
delivery; Notice that only quality of service 0 an 1 are
supported by AWS IoT.
- `retain` (default `false`) sets whether the broker should retain the message.
Note that AWS IoT does not support this feature.
- `ttl` (default `3_600_000`) sets how long (in msecs) publishing the message will be
retried until it has expired.
Notice that Jackalope will JSON encode the `payload`; so the data
should be JSON encodable.
"""
@spec publish(String.t(), any, options) ::
:ok | {:error, :invalid_qos}
when options: [
{:qos, 0..2} | {:retain, boolean} | {:ttl, non_neg_integer}
]
defdelegate publish(topic, payload, opts \\ []), to: Jackalope.Session
# TODO Get rid of this stuff
defp connection_options(opts) do
server =
Keyword.get(opts, :server, @default_mqtt_server)
|> do_configure_server()
# Default backoff options is 1 sec to 30 secs, doubling each time.
backoff_opts = Keyword.get(opts, :backoff) || [min_interval: 1_000, max_interval: 30_000]
Logger.info("[Jackalope] Connecting with backoff options #{inspect(backoff_opts)}")
initial_topics = Keyword.get(opts, :initial_topics)
subscriptions =
for topic_filter <- List.wrap(initial_topics),
do: {topic_filter, 1}
[
server: server,
backoff: backoff_opts,
subscriptions: subscriptions
]
end
# Pass normal Tortoise311 transports through as is; assume that the
# configuration is correct!
defp do_configure_server({Tortoise311.Transport.Tcp, _opts} = keep), do: keep
defp do_configure_server({Tortoise311.Transport.SSL, _opts} = keep), do: keep
# Attempt to create setup a connection that works with AWS IoT
defp do_configure_server(aws_iot_opts) when is_list(aws_iot_opts) do
# TODO improve the user experience when working with AWS IoT and
# then remove this raise
raise ArgumentError, "Please specify a Tortoise311 transport for the server"
end
end
|
lib/jackalope.ex
| 0.820469
| 0.535402
|
jackalope.ex
|
starcoder
|
defmodule Semigroup do
@typedoc """
Semigroup dictionary
intuitive type: fmap : f (a -> b) -> f a -> f b
* `fmap`: (f a, a -> b) -> f b # params are swapped to facilitate piping, mandatory
* `lift_left`: a -> f b -> f a # default implementation provided, optional
"""
@type t :: %__MODULE__{
<>: (any, any -> any),
}
@doc """
An associative operation
(<>) : (a, a) -> a
## Examples
iex> [1, 2, 3] <> [4, 5, 6]
[1, 2, 3, 4, 5, 6]
"""
@callback any <> any :: any
def __struct__, do: %{
__struct__: __MODULE__,
<>: fn _, _ -> raise("Semigroup: missing definition for concat") end,
sconcat: fn _ -> raise("Semigroup: missing definition for sconcat") end,
stimes: fn _, _ -> raise("Semigroup: missing definition for stimes") end,
}
def __struct__(kv) do
required_keys = [:<>, :sconcat, :stimes]
{map, keys} =
Enum.reduce(kv, {__struct__(), required_keys}, fn {key, val}, {map, keys} ->
{Map.replace!(map, key, val), List.delete(keys, key)}
end)
case keys do
[] ->
map
_ ->
raise ArgumentError,
"the following keys must also be given when building " <>
"struct #{inspect(__MODULE__)}: #{inspect(keys)}"
end
end
def define(t) do
t = Map.new(t)
concat = Map.fetch!(t, :<>)
sconcat = Map.get(t, :sconcat, fn [x | xs] -> List.foldr(xs, x, concat) end)
stimes = Map.get(t, :stimes, fn x, n -> stimes_default(concat, n, x) end)
%__MODULE__{
<>: concat,
sconcat: sconcat,
stimes: stimes,
}
end
def stimes_default(cct, n, x) when n > 0, do: sdgf(cct, x, n)
# exponentiation by squaring
import Integer
def sdgf(cct, x, n) when is_even(n), do: sdgf(cct, cct.(x, x), div(n, 2))
def sdgf(_, x, 1), do: x
def sdgf(cct, x, n), do: sdgg(cct, cct.(x, x), div(n, 2), x)
def sdgg(cct, x, n, z) when is_even(n), do: sdgg(cct, cct.(x, x), div(n, 2), z)
def sdgg(cct, x, 1, z), do: cct.(x, z)
def sdgg(cct, x, n, z), do: sdgg(cct, cct.(x, x), div(n, 2), cct.(x, z))
end
|
typeclassopedia/lib/semigroup.ex
| 0.854536
| 0.555194
|
semigroup.ex
|
starcoder
|
defmodule TableRex.Table do
@moduledoc """
A set of functions for working with tables.
The `Table` is represented internally as a struct though the
fields are private and must not be accessed directly. Instead,
use the functions in this module.
"""
alias TableRex.Cell
alias TableRex.Column
alias TableRex.Renderer
alias TableRex.Table
defstruct title: nil, header_row: [], rows: [], columns: %{}, default_column: %Column{}
@type t :: %__MODULE__{}
@default_renderer Renderer.Text
@doc """
Creates a new blank table.
The table created will not be able to be rendered until it has some row data.
## Examples
iex> Table.new
%TableRex.Table{}
"""
@spec new() :: Table.t
def new, do: %Table{}
@doc """
Creates a new table with an initial set of rows and an optional header and title.
"""
@spec new(list, list, String.t) :: Table.t
def new(rows, header_row \\ [], title \\ nil) when is_list(rows) and is_list(header_row) do
new()
|> put_title(title)
|> put_header(header_row)
|> add_rows(rows)
end
# Mutation API
@doc """
Sets a string as the optional table title.
Set to `nil` or `""` to remove an already set title from renders.
"""
@spec put_title(Table.t, String.t | nil) :: Table.t
def put_title(%Table{} = table, ""), do: put_title(table, nil)
def put_title(%Table{} = table, title) when is_binary(title) or is_nil(title) do
%Table{table | title: title}
end
@doc """
Sets a list as the optional header row.
Set to `nil` or `[]` to remove an already set header from renders.
"""
@spec put_header(Table.t, list | nil) :: Table.t
def put_header(%Table{} = table, nil), do: put_header(table, [])
def put_header(%Table{} = table, header_row) when is_list(header_row) do
new_header_row = Enum.map(header_row, &Cell.to_cell(&1))
%Table{table | header_row: new_header_row}
end
@doc """
Sets column level information such as padding and alignment.
"""
@spec put_column_meta(Table.t, integer | atom | Enum.t, Keyword.t) :: Table.t
def put_column_meta(%Table{} = table, col_index, col_meta) when is_integer(col_index) and is_list(col_meta) do
col_meta = col_meta |> Enum.into(%{})
col = get_column(table, col_index) |> Map.merge(col_meta)
new_columns = Map.put(table.columns, col_index, col)
%Table{table | columns: new_columns}
end
def put_column_meta(%Table{} = table, :all, col_meta) when is_list(col_meta) do
col_meta = col_meta |> Enum.into(%{})
# First update default column, then any already set columns.
table = put_in(table.default_column, Map.merge(table.default_column, col_meta))
new_columns = Enum.reduce(table.columns, %{}, fn({col_index, col}, acc) ->
new_col = Map.merge(col, col_meta)
Map.put(acc, col_index, new_col)
end)
%Table{table | columns: new_columns}
end
def put_column_meta(%Table{} = table, col_indexes, col_meta) when is_list(col_meta) do
Enum.reduce(col_indexes, table, &put_column_meta(&2, &1, col_meta))
end
@doc """
Sets cell level information such as alignment.
"""
@spec put_cell_meta(Table.t, integer, integer, Keyword.t) :: Table.t
def put_cell_meta(%Table{} = table, col_index, row_index, cell_meta) when is_integer(col_index) and is_integer(row_index) and is_list(cell_meta) do
cell_meta = cell_meta |> Enum.into(%{})
inverse_row_index = -(row_index + 1)
rows = List.update_at(table.rows, inverse_row_index, fn(row) ->
List.update_at(row, col_index, &Map.merge(&1, cell_meta))
end)
%Table{table | rows: rows}
end
@doc """
Sets cell level information for the header cells.
"""
@spec put_header_meta(Table.t, integer | Enum.t, Keyword.t) :: Table.t
def put_header_meta(%Table{} = table, col_index, cell_meta) when is_integer(col_index) and is_list(cell_meta) do
cell_meta = cell_meta |> Enum.into(%{})
header_row = List.update_at(table.header_row, col_index, &Map.merge(&1, cell_meta))
%Table{table | header_row: header_row}
end
def put_header_meta(%Table{} = table, col_indexes, cell_meta) when is_list(cell_meta) do
Enum.reduce(col_indexes, table, &put_header_meta(&2, &1, cell_meta))
end
@doc """
Adds a single row to the table.
"""
@spec add_row(Table.t, list) :: Table.t
def add_row(%Table{} = table, row) when is_list(row) do
new_row = Enum.map(row, &Cell.to_cell(&1))
%Table{table | rows: [new_row | table.rows]}
end
@doc """
Adds multiple rows to the table.
"""
@spec add_rows(Table.t, list) :: Table.t
def add_rows(%Table{} = table, rows) when is_list(rows) do
rows = rows
|> Enum.reverse
|> Enum.map(fn row ->
Enum.map(row, &Cell.to_cell(&1))
end)
%Table{table | rows: rows ++ table.rows}
end
@doc """
Removes column meta for all columns, effectively resetting
column meta back to the default options across the board.
"""
@spec clear_all_column_meta(Table.t) :: Table.t
def clear_all_column_meta(%Table{} = table) do
%Table{table | columns: %{}}
end
@doc """
Removes all row data from the table, keeping everything else.
"""
@spec clear_rows(Table.t) :: Table.t
def clear_rows(%Table{} = table) do
%Table{table | rows: []}
end
# Retrieval API
defp get_column(%Table{} = table, col_index) when is_integer(col_index) do
Map.get(table.columns, col_index, table.default_column)
end
@doc """
Retreives the value of a column meta option at the specified col_index.
If no value has been set, default values are returned.
"""
@spec get_column_meta(Table.t, integer, atom) :: any
def get_column_meta(%Table{} = table, col_index, key) when is_integer(col_index) and is_atom(key) do
get_column(table, col_index)
|> Map.fetch!(key)
end
@doc """
Returns a boolean detailing if the passed table has any row data set.
"""
@spec has_rows?(Table.t) :: boolean
def has_rows?(%Table{rows: []}), do: false
def has_rows?(%Table{rows: rows}) when is_list(rows), do: true
@doc """
Returns a boolean detailing if the passed table has a header row set.
"""
@spec has_header?(Table.t) :: boolean
def has_header?(%Table{header_row: []}), do: false
def has_header?(%Table{header_row: header_row}) when is_list(header_row), do: true
# Rendering API
@doc """
Renders the current table state to string, ready for display via `IO.puts/2` or other means.
At least one row must have been added before rendering.
Returns `{:ok, rendered_string}` on success and `{:error, reason}` on failure.
"""
@spec render(Table.t, list) :: Renderer.render_return
def render(%Table{} = table, opts \\ []) when is_list(opts) do
{renderer, opts} = Keyword.pop(opts, :renderer, @default_renderer)
opts = opts |> Enum.into(renderer.default_options)
if Table.has_rows?(table) do
renderer.render(table, opts)
else
{:error, "Table must have at least one row before being rendered"}
end
end
@doc """
Renders the current table state to string, ready for display via `IO.puts/2` or other means.
At least one row must have been added before rendering.
Returns the rendered string on success, or raises `TableRex.Error` on failure.
"""
@spec render!(Table.t, list) :: String.t | no_return
def render!(%Table{} = table, opts \\ []) when is_list(opts) do
case render(table, opts) do
{:ok, rendered_string} -> rendered_string
{:error, reason} -> raise TableRex.Error, message: reason
end
end
end
|
lib/table_rex/table.ex
| 0.904405
| 0.563798
|
table.ex
|
starcoder
|
defmodule TILEX.Behaviours do
defmodule Greet do
@moduledoc """
Defines a behaviour to greet people hello and goodbye in different languages.
Behaviours provide a way to:
- define a set of functions that have to be implemented by a module;
- ensure that a module implements all the functions in that set.
Many modules share the same public API. For example: Each plug is a module
which has to implement at least two public functions: init/1 and call/2.
"""
@callback say_hello(name :: String.t) :: nil
@callback say_bye(name :: String.t) :: nil
@doc """
Dynamic dispatching of say_hello/1.
## Examples
iex> alias TILEX.Behaviours.Greet
iex> Greet.dispatch_say_hello(TILEX.Behaviours.SpanishGreet, "Rob")
"Hola Rob"
iex> alias TILEX.Behaviours.Greet
iex> Greet.dispatch_say_hello(TILEX.Behaviours.EnglishGreet, "Rob")
"Hello Rob"
"""
def dispatch_say_hello(implementation, name) do
implementation.say_hello(name)
end
end
defmodule EnglishGreet do
@moduledoc """
Implementation of the Greet behaviour for the English language.
"""
@behaviour Greet
@doc """
Greets to `name` in English.
## Examples
iex> TILEX.Behaviours.EnglishGreet.say_hello("Rafa")
"Hello Rafa"
"""
def say_hello(name), do: "Hello " <> name
@doc """
Goodbyes to `name` in English.
## Examples
iex> TILEX.Behaviours.EnglishGreet.say_bye("Rafa")
"Goodbye Rafa"
"""
def say_bye(name), do: "Goodbye " <> name
end
defmodule SpanishGreet do
@moduledoc """
Implementation of the Greet behaviour for the Spanish language.
"""
@behaviour Greet
@doc """
Greets to `name` in Spanish.
## Examples
iex> TILEX.Behaviours.SpanishGreet.say_hello("Rafa")
"<NAME>"
"""
def say_hello(name), do: "Hola " <> name
@doc """
Goodbyes to `name` in Spanish.
## Examples
iex> TILEX.Behaviours.SpanishGreet.say_bye("Rafa")
"<NAME>"
"""
def say_bye(name), do: "Adios " <> name
end
end
|
lib/behaviours.ex
| 0.683314
| 0.676092
|
behaviours.ex
|
starcoder
|
defmodule Exop.Chain do
@moduledoc """
Provides macros to organize a number of Exop.Operation modules into an invocation chain.
## Example
defmodule CreateUser do
use Exop.Chain
alias Operations.{User, Backoffice, Notifications}
operation User.Create
operation Backoffice.SaveStats
operation Notifications.SendEmail
end
# CreateUser.run(name: "<NAME>", age: 37, gender: "m")
`Exop.Chain` defines `run/1` function that takes `keyword()` or `map()` of params.
Those params will be passed into the first operation in the chain.
Bear in mind that each of chained operations (except the first one) awaits a returned result of
a previous operation as incoming params.
So in the example above `CreateUser.run(name: "<NAME>", age: 37, gender: "m")` will invoke
the chain by passing `[name: "<NAME>", age: 37, gender: "m"]` params to the first `User.Create`
operation.
The result of `User.Create` operation will be passed to `Backoffice.SaveStats`
operation as its params and so on.
Once any of operations in the chain returns non-ok-tuple result (error result, interruption, auth error etc.)
the chain execution interrupts and error result returned (as the chain (`CreateUser`) result).
"""
defmacro __using__(opts \\ []) do
quote do
import unquote(__MODULE__)
Module.register_attribute(__MODULE__, :operations, accumulate: true)
@error_includes_operation_name unquote(opts)[:name_in_error] == true
@before_compile unquote(__MODULE__)
end
end
defmacro operation(operation, additional_params \\ []) do
quote bind_quoted: [operation: operation, additional_params: additional_params] do
{:module, operation} = Code.ensure_compiled(operation)
@operations %{operation: operation, additional_params: additional_params}
end
end
defmacro step(operation, additional_params \\ []) do
quote bind_quoted: [operation: operation, additional_params: additional_params] do
{:module, operation} = Code.ensure_compiled(operation)
@operations %{operation: operation, additional_params: additional_params}
end
end
defmacro __before_compile__(_env) do
quote do
alias Exop.Validation
@type interrupt_result :: {:interrupt, any}
@type auth_result :: :ok | no_return
# throws:
# {:error, {:auth, :undefined_policy}} |
# {:error, {:auth, :unknown_policy}} |
# {:error, {:auth, :unknown_action}} |
# {:error, {:auth, atom}}
@not_ok :exop_not_ok
@doc """
Invokes all operations defined in a chain. Returns either a result of the last operation
in the chain or the first result that differs from ok-tuple (validation error, for example).
"""
@spec run(Keyword.t() | map() | nil) ::
{:ok, any} | Validation.validation_error() | interrupt_result | auth_result
def run(received_params) do
try do
ok_result = @operations |> Enum.reverse() |> invoke_operations({:ok, received_params})
{:ok, ok_result}
catch
{@not_ok, not_ok_result, operation} ->
add_operation_name(@error_includes_operation_name, not_ok_result, operation)
{@not_ok, not_ok_result} ->
not_ok_result
end
end
defp add_operation_name(true, not_ok_result, operation), do: {operation, not_ok_result}
defp add_operation_name(_, not_ok_result, _), do: not_ok_result
@spec invoke_operations([%{operation: atom(), additional_params: Keyword.t()}], any()) ::
any()
defp invoke_operations([], result) do
result
end
defp invoke_operations(
[%{operation: operation, additional_params: additional_params} | []],
{:ok, params} = _result
) do
params = params |> merge_params(additional_params) |> resolve_params_values()
case apply(operation, :run, [params]) do
result when is_tuple(result) and elem(result, 0) == :error ->
throw({@not_ok, result, operation})
@not_ok
{:ok, result} ->
result
result ->
throw({@not_ok, result})
@not_ok
end
end
defp invoke_operations(
[%{operation: operation, additional_params: additional_params} | tail],
{:ok, params} = _result
) do
params = params |> merge_params(additional_params) |> resolve_params_values()
case apply(operation, :run, [params]) do
result when is_tuple(result) and elem(result, 0) == :error ->
throw({@not_ok, result, operation})
@not_ok
{:ok, _} = result ->
invoke_operations(tail, result)
result ->
throw({@not_ok, result})
@not_ok
end
end
defp invoke_operations(_operations, not_ok = _result) do
throw({@not_ok, not_ok})
@not_ok
end
@spec merge_params(map() | keyword(), map() | keyword()) :: map()
defp merge_params(params, additional_params)
when is_map(params) and is_map(additional_params) do
Map.merge(params, additional_params)
end
defp merge_params(params, additional_params)
when is_list(params) and is_map(additional_params) do
params |> Enum.into(%{}) |> Map.merge(additional_params)
end
defp merge_params(params, additional_params)
when is_map(params) and is_list(additional_params) do
Map.merge(params, Enum.into(additional_params, %{}))
end
defp merge_params(params, additional_params)
when is_list(params) and is_list(additional_params) do
params |> Enum.into(%{}) |> Map.merge(Enum.into(additional_params, %{}))
end
@spec resolve_params_values(map()) :: map()
defp resolve_params_values(params) do
Enum.reduce(params, %{}, fn {k, v}, acc ->
v = if is_function(v), do: v.(), else: v
Map.put(acc, k, v)
end)
end
end
end
end
|
lib/exop/chain.ex
| 0.885983
| 0.598371
|
chain.ex
|
starcoder
|
defmodule Livebook.ANSI.Modifier do
@moduledoc false
defmacro defmodifier(modifier, code, terminator \\ "m") do
quote bind_quoted: [modifier: modifier, code: code, terminator: terminator] do
defp ansi_prefix_to_modifier(unquote("#{code}#{terminator}") <> rest) do
{:ok, unquote(modifier), rest}
end
end
end
end
defmodule LivebookWeb.ANSI do
@moduledoc false
import Livebook.ANSI.Modifier
# modifier ::
# :reset
# | {:font_weight, :bold | :light | :reset}
# | {:font_style, :italic | :reset}
# | {:text_decoration, :underline | :line_through | :overline | :reset}
# | {:foreground_color, color() | :reset}
# | {:background_color, color() | :reset}
# | :ignored
# color :: atom() | {:grayscale24, 0..23} | {:rgb6, 0..5, 0..5, 0..5}
@doc """
Takes a string with ANSI escape codes and build a HTML safe string
with `span` tags having classes corresponding to the escape codes.
Any HTML in the string is escaped.
## Options
* `:renderer` - a function used to render styled HTML content.
The function receives HTML styles string and HTML-escaped content (iodata).
By default the renderer wraps the whole content in a single `<span>` tag with the given style.
Note that the style may be an empty string for plain text.
"""
@spec ansi_string_to_html(String.t(), keyword()) :: Phoenix.HTML.safe()
def ansi_string_to_html(string, opts \\ []) do
renderer = Keyword.get(opts, :renderer, &default_renderer/2)
[head | ansi_prefixed_strings] = String.split(string, "\e[")
{:safe, head_html} = Phoenix.HTML.html_escape(head)
head_html = renderer.("", head_html)
# Each pair has the form of {modifiers, html_content}
{pairs, _} =
Enum.map_reduce(ansi_prefixed_strings, %{}, fn string, modifiers ->
{modifiers, rest} =
case ansi_prefix_to_modifier(string) do
{:ok, modifier, rest} ->
modifiers = add_modifier(modifiers, modifier)
{modifiers, rest}
{:error, _rest} ->
{modifiers, "\e[" <> string}
end
{:safe, content} = Phoenix.HTML.html_escape(rest)
{{modifiers, content}, modifiers}
end)
pairs = Enum.filter(pairs, fn {_modifiers, content} -> content not in ["", []] end)
tail_html = pairs_to_html(pairs, renderer)
Phoenix.HTML.raw([head_html, tail_html])
end
# Below goes a number of `ansi_prefix_to_modifier` function definitions,
# that take a string like "32msomething" (starting with ANSI code without the leading "\e[")
# and parse the prefix into the corresponding modifier.
# The function returns either {:ok, modifier, rest} or {:error, rest}
defmodifier(:reset, 0)
@colors [:black, :red, :green, :yellow, :blue, :magenta, :cyan, :white]
for {color, index} <- Enum.with_index(@colors) do
defmodifier({:foreground_color, color}, 30 + index)
defmodifier({:background_color, color}, 40 + index)
defmodifier({:foreground_color, :"light_#{color}"}, 90 + index)
defmodifier({:background_color, :"light_#{color}"}, 100 + index)
end
defmodifier({:foreground_color, :reset}, 39)
defmodifier({:background_color, :reset}, 49)
defmodifier({:font_weight, :bold}, 1)
defmodifier({:font_weight, :light}, 2)
defmodifier({:font_style, :italic}, 3)
defmodifier({:text_decoration, :underline}, 4)
defmodifier({:text_decoration, :line_through}, 9)
defmodifier({:font_weight, :reset}, 22)
defmodifier({:font_style, :reset}, 23)
defmodifier({:text_decoration, :reset}, 24)
defmodifier({:text_decoration, :overline}, 53)
defmodifier({:text_decoration, :reset}, 55)
defp ansi_prefix_to_modifier("38;5;" <> string) do
with {:ok, color, rest} <- bit8_prefix_to_color(string) do
{:ok, {:foreground_color, color}, rest}
end
end
defp ansi_prefix_to_modifier("48;5;" <> string) do
with {:ok, color, rest} <- bit8_prefix_to_color(string) do
{:ok, {:background_color, color}, rest}
end
end
defp bit8_prefix_to_color(string) do
case Integer.parse(string) do
{n, "m" <> rest} when n in 0..255 ->
color = color_from_code(n)
{:ok, color, rest}
_ ->
{:error, string}
end
end
ignored_codes = [5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 25, 27, 51, 52, 54]
for code <- ignored_codes do
defmodifier(:ignored, code)
end
defmodifier(:ignored, 1, "A")
defmodifier(:ignored, 1, "B")
defmodifier(:ignored, 1, "C")
defmodifier(:ignored, 1, "D")
defmodifier(:ignored, 2, "J")
defmodifier(:ignored, 2, "K")
defmodifier(:ignored, "", "H")
defp ansi_prefix_to_modifier(string), do: {:error, string}
defp color_from_code(code) when code in 0..7 do
Enum.at(@colors, code)
end
defp color_from_code(code) when code in 8..15 do
color = Enum.at(@colors, code - 8)
:"light_#{color}"
end
defp color_from_code(code) when code in 16..231 do
rgb_code = code - 16
b = rgb_code |> rem(6)
g = rgb_code |> div(6) |> rem(6)
r = rgb_code |> div(36)
{:rgb6, r, g, b}
end
defp color_from_code(code) when code in 232..255 do
level = code - 232
{:grayscale24, level}
end
defp add_modifier(modifiers, :ignored), do: modifiers
defp add_modifier(_modifiers, :reset), do: %{}
defp add_modifier(modifiers, {key, :reset}), do: Map.delete(modifiers, key)
defp add_modifier(modifiers, {key, value}), do: Map.put(modifiers, key, value)
# Converts a list of {modifiers, html_content} pairs
# into HTML with appropriate styling.
defp pairs_to_html(pairs, iodata \\ [], renderer)
defp pairs_to_html([], iodata, _renderer), do: iodata
defp pairs_to_html([{modifiers, content1}, {modifiers, content2} | pairs], iodata, renderer) do
# Merge content with the same modifiers, so we don't produce unnecessary tags
pairs_to_html([{modifiers, [content1, content2]} | pairs], iodata, renderer)
end
defp pairs_to_html([{modifiers, content} | pairs], iodata, renderer) do
style = modifiers_to_css(modifiers)
rendered = renderer.(style, content)
pairs_to_html(pairs, [iodata, rendered], renderer)
end
def default_renderer("", content) do
content
end
def default_renderer(style, content) do
[~s{<span style="#{style}">}, content, ~s{</span>}]
end
defp modifiers_to_css(modifiers) do
modifiers
|> Enum.map(&modifier_to_css/1)
|> Enum.join()
end
defp modifier_to_css({:font_weight, :bold}), do: "font-weight: 600;"
defp modifier_to_css({:font_weight, :light}), do: "font-weight: 200;"
defp modifier_to_css({:font_style, :italic}), do: "font-style: italic;"
defp modifier_to_css({:text_decoration, :underline}), do: "text-decoration: underline;"
defp modifier_to_css({:text_decoration, :line_through}), do: "text-decoration: line-through;"
defp modifier_to_css({:text_decoration, :overline}), do: "text-decoration: overline;"
defp modifier_to_css({:foreground_color, color}), do: "color: #{color_to_css(color)};"
defp modifier_to_css({:background_color, color}),
do: "background-color: #{color_to_css(color)};"
defp color_to_css(:black), do: "var(--ansi-color-black)"
defp color_to_css(:light_black), do: "var(--ansi-color-light-black)"
defp color_to_css(:red), do: "var(--ansi-color-red)"
defp color_to_css(:light_red), do: "var(--ansi-color-light-red)"
defp color_to_css(:green), do: "var(--ansi-color-green)"
defp color_to_css(:light_green), do: "var(--ansi-color-light-green)"
defp color_to_css(:yellow), do: "var(--ansi-color-yellow)"
defp color_to_css(:light_yellow), do: "var(--ansi-color-light-yellow)"
defp color_to_css(:blue), do: "var(--ansi-color-blue)"
defp color_to_css(:light_blue), do: "var(--ansi-color-light-blue)"
defp color_to_css(:magenta), do: "var(--ansi-color-magenta)"
defp color_to_css(:light_magenta), do: "var(--ansi-color-light-magenta)"
defp color_to_css(:cyan), do: "var(--ansi-color-cyan)"
defp color_to_css(:light_cyan), do: "var(--ansi-color-light-cyan)"
defp color_to_css(:white), do: "var(--ansi-color-white)"
defp color_to_css(:light_white), do: "var(--ansi-color-light-white)"
defp color_to_css({:rgb6, r, g, b}) do
r = div(255 * r, 5)
g = div(255 * g, 5)
b = div(255 * b, 5)
"rgb(#{r}, #{g}, #{b})"
end
defp color_to_css({:grayscale24, level}) do
value = div(255 * level, 23)
"rgb(#{value}, #{value}, #{value})"
end
end
|
lib/livebook_web/ansi.ex
| 0.828245
| 0.479626
|
ansi.ex
|
starcoder
|
defmodule AWS.CodePipeline do
@moduledoc """
AWS CodePipeline
## Overview
This is the AWS CodePipeline API Reference.
This guide provides descriptions of the actions and data types for AWS
CodePipeline. Some functionality for your pipeline can only be configured
through the API. For more information, see the [AWS CodePipeline User Guide](https://docs.aws.amazon.com/codepipeline/latest/userguide/welcome.html).
You can use the AWS CodePipeline API to work with pipelines, stages, actions,
and transitions.
*Pipelines* are models of automated release processes. Each pipeline is uniquely
named, and consists of stages, actions, and transitions.
You can work with pipelines by calling:
* `CreatePipeline`, which creates a uniquely named pipeline.
* `DeletePipeline`, which deletes the specified pipeline.
* `GetPipeline`, which returns information about the pipeline
structure and pipeline metadata, including the pipeline Amazon Resource Name
(ARN).
* `GetPipelineExecution`, which returns information about a specific
execution of a pipeline.
* `GetPipelineState`, which returns information about the current
state of the stages and actions of a pipeline.
* `ListActionExecutions`, which returns action-level details for
past executions. The details include full stage and action-level details,
including individual action duration, status, any errors that occurred during
the execution, and input and output artifact location details.
* `ListPipelines`, which gets a summary of all of the pipelines
associated with your account.
* `ListPipelineExecutions`, which gets a summary of the most recent
executions for a pipeline.
* `StartPipelineExecution`, which runs the most recent revision of
an artifact through the pipeline.
* `StopPipelineExecution`, which stops the specified pipeline
execution from continuing through the pipeline.
* `UpdatePipeline`, which updates a pipeline with edits or changes
to the structure of the pipeline.
Pipelines include *stages*. Each stage contains one or more actions that must
complete before the next stage begins. A stage results in success or failure. If
a stage fails, the pipeline stops at that stage and remains stopped until either
a new version of an artifact appears in the source location, or a user takes
action to rerun the most recent artifact through the pipeline. You can call
`GetPipelineState`, which displays the status of a pipeline, including the
status of stages in the pipeline, or `GetPipeline`, which returns the entire
structure of the pipeline, including the stages of that pipeline. For more
information about the structure of stages and actions, see [AWS CodePipeline Pipeline Structure
Reference](https://docs.aws.amazon.com/codepipeline/latest/userguide/pipeline-structure.html).
Pipeline stages include *actions* that are categorized into categories such as
source or build actions performed in a stage of a pipeline. For example, you can
use a source action to import artifacts into a pipeline from a source such as
Amazon S3. Like stages, you do not work with actions directly in most cases, but
you do define and interact with actions when working with pipeline operations
such as `CreatePipeline` and `GetPipelineState`. Valid action categories are:
* Source
* Build
* Test
* Deploy
* Approval
* Invoke
Pipelines also include *transitions*, which allow the transition of artifacts
from one stage to the next in a pipeline after the actions in one stage
complete.
You can work with transitions by calling:
* `DisableStageTransition`, which prevents artifacts from
transitioning to the next stage in a pipeline.
* `EnableStageTransition`, which enables transition of artifacts
between stages in a pipeline.
## Using the API to integrate with AWS CodePipeline
For third-party integrators or developers who want to create their own
integrations with AWS CodePipeline, the expected sequence varies from the
standard API user. To integrate with AWS CodePipeline, developers need to work
with the following items:
**Jobs**, which are instances of an action. For example, a job for a source
action might import a revision of an artifact from a source.
You can work with jobs by calling:
* `AcknowledgeJob`, which confirms whether a job worker has received
the specified job.
* `GetJobDetails`, which returns the details of a job.
* `PollForJobs`, which determines whether there are any jobs to act
on.
* `PutJobFailureResult`, which provides details of a job failure.
* `PutJobSuccessResult`, which provides details of a job success.
**Third party jobs**, which are instances of an action created by a partner
action and integrated into AWS CodePipeline. Partner actions are created by
members of the AWS Partner Network.
You can work with third party jobs by calling:
* `AcknowledgeThirdPartyJob`, which confirms whether a job worker
has received the specified job.
* `GetThirdPartyJobDetails`, which requests the details of a job for
a partner action.
* `PollForThirdPartyJobs`, which determines whether there are any
jobs to act on.
* `PutThirdPartyJobFailureResult`, which provides details of a job
failure.
* `PutThirdPartyJobSuccessResult`, which provides details of a job
success.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "CodePipeline",
api_version: "2015-07-09",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "codepipeline",
global?: false,
protocol: "json",
service_id: "CodePipeline",
signature_version: "v4",
signing_name: "codepipeline",
target_prefix: "CodePipeline_20150709"
}
end
@doc """
Returns information about a specified job and whether that job has been received
by the job worker.
Used for custom actions only.
"""
def acknowledge_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AcknowledgeJob", input, options)
end
@doc """
Confirms a job worker has received the specified job.
Used for partner actions only.
"""
def acknowledge_third_party_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AcknowledgeThirdPartyJob", input, options)
end
@doc """
Creates a new custom action that can be used in all pipelines associated with
the AWS account.
Only used for custom actions.
"""
def create_custom_action_type(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateCustomActionType", input, options)
end
@doc """
Creates a pipeline.
In the pipeline structure, you must include either `artifactStore` or
`artifactStores` in your pipeline, but you cannot use both. If you create a
cross-region action in your pipeline, you must use `artifactStores`.
"""
def create_pipeline(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreatePipeline", input, options)
end
@doc """
Marks a custom action as deleted.
`PollForJobs` for the custom action fails after the action is marked for
deletion. Used for custom actions only.
To re-create a custom action after it has been deleted you must use a string in
the version field that has never been used before. This string can be an
incremented version number, for example. To restore a deleted custom action, use
a JSON file that is identical to the deleted action, including the original
string in the version field.
"""
def delete_custom_action_type(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteCustomActionType", input, options)
end
@doc """
Deletes the specified pipeline.
"""
def delete_pipeline(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeletePipeline", input, options)
end
@doc """
Deletes a previously created webhook by name.
Deleting the webhook stops AWS CodePipeline from starting a pipeline every time
an external event occurs. The API returns successfully when trying to delete a
webhook that is already deleted. If a deleted webhook is re-created by calling
PutWebhook with the same name, it will have a different URL.
"""
def delete_webhook(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteWebhook", input, options)
end
@doc """
Removes the connection between the webhook that was created by CodePipeline and
the external tool with events to be detected.
Currently supported only for webhooks that target an action type of GitHub.
"""
def deregister_webhook_with_third_party(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeregisterWebhookWithThirdParty", input, options)
end
@doc """
Prevents artifacts in a pipeline from transitioning to the next stage in the
pipeline.
"""
def disable_stage_transition(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableStageTransition", input, options)
end
@doc """
Enables artifacts in a pipeline to transition to a stage in a pipeline.
"""
def enable_stage_transition(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableStageTransition", input, options)
end
@doc """
Returns information about an action type created for an external provider, where
the action is to be used by customers of the external provider.
The action can be created with any supported integration model.
"""
def get_action_type(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetActionType", input, options)
end
@doc """
Returns information about a job.
Used for custom actions only.
When this API is called, AWS CodePipeline returns temporary credentials for the
S3 bucket used to store artifacts for the pipeline, if the action requires
access to that S3 bucket for input or output artifacts. This API also returns
any secret values defined for the action.
"""
def get_job_details(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetJobDetails", input, options)
end
@doc """
Returns the metadata, structure, stages, and actions of a pipeline.
Can be used to return the entire structure of a pipeline in JSON format, which
can then be modified and used to update the pipeline structure with
`UpdatePipeline`.
"""
def get_pipeline(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetPipeline", input, options)
end
@doc """
Returns information about an execution of a pipeline, including details about
artifacts, the pipeline execution ID, and the name, version, and status of the
pipeline.
"""
def get_pipeline_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetPipelineExecution", input, options)
end
@doc """
Returns information about the state of a pipeline, including the stages and
actions.
Values returned in the `revisionId` and `revisionUrl` fields indicate the source
revision information, such as the commit ID, for the current state.
"""
def get_pipeline_state(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetPipelineState", input, options)
end
@doc """
Requests the details of a job for a third party action.
Used for partner actions only.
When this API is called, AWS CodePipeline returns temporary credentials for the
S3 bucket used to store artifacts for the pipeline, if the action requires
access to that S3 bucket for input or output artifacts. This API also returns
any secret values defined for the action.
"""
def get_third_party_job_details(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetThirdPartyJobDetails", input, options)
end
@doc """
Lists the action executions that have occurred in a pipeline.
"""
def list_action_executions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListActionExecutions", input, options)
end
@doc """
Gets a summary of all AWS CodePipeline action types associated with your
account.
"""
def list_action_types(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListActionTypes", input, options)
end
@doc """
Gets a summary of the most recent executions for a pipeline.
"""
def list_pipeline_executions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListPipelineExecutions", input, options)
end
@doc """
Gets a summary of all of the pipelines associated with your account.
"""
def list_pipelines(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListPipelines", input, options)
end
@doc """
Gets the set of key-value pairs (metadata) that are used to manage the resource.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Gets a listing of all the webhooks in this AWS Region for this account.
The output lists all webhooks and includes the webhook URL and ARN and the
configuration for each webhook.
"""
def list_webhooks(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListWebhooks", input, options)
end
@doc """
Returns information about any jobs for AWS CodePipeline to act on.
`PollForJobs` is valid only for action types with "Custom" in the owner field.
If the action type contains "AWS" or "ThirdParty" in the owner field, the
`PollForJobs` action returns an error.
When this API is called, AWS CodePipeline returns temporary credentials for the
S3 bucket used to store artifacts for the pipeline, if the action requires
access to that S3 bucket for input or output artifacts. This API also returns
any secret values defined for the action.
"""
def poll_for_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PollForJobs", input, options)
end
@doc """
Determines whether there are any third party jobs for a job worker to act on.
Used for partner actions only.
When this API is called, AWS CodePipeline returns temporary credentials for the
S3 bucket used to store artifacts for the pipeline, if the action requires
access to that S3 bucket for input or output artifacts.
"""
def poll_for_third_party_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PollForThirdPartyJobs", input, options)
end
@doc """
Provides information to AWS CodePipeline about new revisions to a source.
"""
def put_action_revision(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutActionRevision", input, options)
end
@doc """
Provides the response to a manual approval request to AWS CodePipeline.
Valid responses include Approved and Rejected.
"""
def put_approval_result(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutApprovalResult", input, options)
end
@doc """
Represents the failure of a job as returned to the pipeline by a job worker.
Used for custom actions only.
"""
def put_job_failure_result(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutJobFailureResult", input, options)
end
@doc """
Represents the success of a job as returned to the pipeline by a job worker.
Used for custom actions only.
"""
def put_job_success_result(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutJobSuccessResult", input, options)
end
@doc """
Represents the failure of a third party job as returned to the pipeline by a job
worker.
Used for partner actions only.
"""
def put_third_party_job_failure_result(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutThirdPartyJobFailureResult", input, options)
end
@doc """
Represents the success of a third party job as returned to the pipeline by a job
worker.
Used for partner actions only.
"""
def put_third_party_job_success_result(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutThirdPartyJobSuccessResult", input, options)
end
@doc """
Defines a webhook and returns a unique webhook URL generated by CodePipeline.
This URL can be supplied to third party source hosting providers to call every
time there's a code change. When CodePipeline receives a POST request on this
URL, the pipeline defined in the webhook is started as long as the POST request
satisfied the authentication and filtering requirements supplied when defining
the webhook. RegisterWebhookWithThirdParty and DeregisterWebhookWithThirdParty
APIs can be used to automatically configure supported third parties to call the
generated webhook URL.
"""
def put_webhook(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutWebhook", input, options)
end
@doc """
Configures a connection between the webhook that was created and the external
tool with events to be detected.
"""
def register_webhook_with_third_party(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RegisterWebhookWithThirdParty", input, options)
end
@doc """
Resumes the pipeline execution by retrying the last failed actions in a stage.
You can retry a stage immediately if any of the actions in the stage fail. When
you retry, all actions that are still in progress continue working, and failed
actions are triggered again.
"""
def retry_stage_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RetryStageExecution", input, options)
end
@doc """
Starts the specified pipeline.
Specifically, it begins processing the latest commit to the source location
specified as part of the pipeline.
"""
def start_pipeline_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartPipelineExecution", input, options)
end
@doc """
Stops the specified pipeline execution.
You choose to either stop the pipeline execution by completing in-progress
actions without starting subsequent actions, or by abandoning in-progress
actions. While completing or abandoning in-progress actions, the pipeline
execution is in a `Stopping` state. After all in-progress actions are completed
or abandoned, the pipeline execution is in a `Stopped` state.
"""
def stop_pipeline_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopPipelineExecution", input, options)
end
@doc """
Adds to or modifies the tags of the given resource.
Tags are metadata that can be used to manage a resource.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes tags from an AWS resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates an action type that was created with any supported integration model,
where the action type is to be used by customers of the action type provider.
Use a JSON file with the action definition and `UpdateActionType` to provide the
full structure.
"""
def update_action_type(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateActionType", input, options)
end
@doc """
Updates a specified pipeline with edits or changes to its structure.
Use a JSON file with the pipeline structure and `UpdatePipeline` to provide the
full structure of the pipeline. Updating the pipeline increases the version
number of the pipeline by 1.
"""
def update_pipeline(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdatePipeline", input, options)
end
end
|
lib/aws/generated/code_pipeline.ex
| 0.933454
| 0.871693
|
code_pipeline.ex
|
starcoder
|
defmodule Day22 do
alias Day22.RangeParser
def part_one(input) do
22
|> input.contents_of(:stream)
|> Stream.map(&String.trim/1)
|> Stream.map(&parse_line(&1, -50, 50))
|> Enum.reduce(MapSet.new(), fn
{:on, cubes}, acc -> MapSet.union(acc, cubes)
{:off, cubes}, acc -> MapSet.difference(acc, cubes)
end)
|> Enum.count()
end
def part_two(input) do
22
|> input.contents_of(:stream)
|> Stream.map(&String.trim/1)
|> Stream.map(&parse_line(&1))
|> Enum.reduce(MapSet.new(), fn
{:on, cubes}, acc -> MapSet.union(acc, cubes)
{:off, cubes}, acc -> MapSet.difference(acc, cubes)
end)
|> Enum.count()
end
def parse_line(<<"on ", ranges::binary>>), do: {:on, parse_ranges(ranges)}
def parse_line(<<"off ", ranges::binary>>), do: {:off, parse_ranges(ranges)}
def parse_line(<<"on ", ranges::binary>>, min, max), do: {:on, parse_ranges(ranges, min, max)}
def parse_line(<<"off ", ranges::binary>>, min, max), do: {:off, parse_ranges(ranges, min, max)}
def parse_ranges(ranges) do
{:ok, parsed, _, _, _, _} = RangeParser.ranges(ranges)
[min_x, max_x, min_y, max_y, min_z, max_z] = handle_negatives(parsed, [])
for x <- min_x..max_x, y <- min_y..max_y, z <- min_z..max_z do
{x, y, z}
end
|> Enum.into(MapSet.new())
end
def parse_ranges(ranges, min, max) do
{:ok, parsed, _, _, _, _} = RangeParser.ranges(ranges)
[min_x, max_x, min_y, max_y, min_z, max_z] = handle_negatives(parsed, [])
for x <- range(min_x, max_x, min, max),
y <- range(min_y, max_y, min, max),
z <- range(min_z, max_z, min, max) do
{x, y, z}
end
|> Enum.into(MapSet.new())
end
def range(from, _to, _min, max) when from > max, do: []
def range(_from, to, min, _max) when to < min, do: []
def range(from, to, min, max), do: Enum.max([from, min])..Enum.min([to, max])
defp handle_negatives([], acc), do: Enum.reverse(acc)
defp handle_negatives(["-", i | rest], acc), do: handle_negatives(rest, [-i | acc])
defp handle_negatives([i | rest], acc), do: handle_negatives(rest, [i | acc])
end
|
year_2021/lib/day_22.ex
| 0.512693
| 0.446796
|
day_22.ex
|
starcoder
|
defmodule MealTracker.Command do
@moduledoc """
Defines the interface and utilities for the command-line sub-commands.
## Attributes
There are attributes that integrate a command module with the rest of the system:
* `@shortdoc` - makes the command public with a short description that shows up in `track help`
## Documentation
Users can read the documentation for a command by executing `track help command_name`. The
documentation that will be shown is the `@moduledoc` of the command's module.
"""
@doc """
A command needs to implement `run` which receives a list of command-line args.
"""
@callback run(command_line_args :: [binary]) :: any
@doc false
defmacro __using__(_opts) do
quote do
Enum.each(
MealTracker.Command.supported_attributes(),
&Module.register_attribute(__MODULE__, &1, persist: true)
)
@behaviour MealTracker.Command
import MealTracker.DateUtils
import MealTracker.PathUtils
end
end
@doc false
def supported_attributes, do: [:shortdoc]
@doc """
Returns all loaded command modules.
"""
def all_modules() do
for {module, _} <- :code.all_loaded(), command?(module), do: module
end
@doc """
Returns `true` if the given module is a command.
"""
def command?(module) do
match?('Elixir.MealTracker.' ++ _, Atom.to_charlist(module)) and
ensure_command?(module)
end
defp ensure_command?(module) do
Code.ensure_loaded?(module) and function_exported?(module, :run, 1)
end
def command_to_module(command) do
MealTracker.Commands
|> Module.concat(command_to_module_name(command))
|> Code.ensure_loaded()
|> case do
{:module, module} -> module
_ -> nil
end
end
@doc """
Converts a command name to the matching module name.
"""
def command_to_module_name(command) do
command
|> String.replace("-", "_")
|> Macro.camelize()
end
@doc """
Load all command modules.
"""
def load_all() do
{:ok, mods} = :application.get_key(:meal_tracker, :modules)
mods
|> Enum.filter(fn mod ->
String.starts_with?(Atom.to_string(mod), "Elixir.MealTracker.Commands")
end)
|> Enum.each(&ensure_command?/1)
end
@doc """
Retrieves the contents of the `moduledoc` attribute from the given `module`.
"""
def moduledoc(module) when is_atom(module) do
case Code.fetch_docs(module) do
{:docs_v1, _, _, _, %{"en" => moduledoc}, _, _} -> moduledoc
{:docs_v1, _, _, _, :hidden, _, _} -> false
_ -> nil
end
end
@doc """
Converts a module name into the matching command name.
"""
def module_name_to_command(module, nesting \\ 2)
def module_name_to_command(atom, nesting) when is_atom(atom) do
module_name_to_command(inspect(atom), nesting)
end
def module_name_to_command(module, nesting) do
module
|> String.split(".")
|> Enum.drop(nesting)
|> Enum.map_join(".", &dasherize/1)
end
@doc """
Retrieves the contents of the `shortdoc` attribute from the `module`.
"""
def shortdoc(module) do
case List.keyfind(module.__info__(:attributes), :shortdoc, 0) do
{:shortdoc, [shortdoc]} -> shortdoc
_ -> nil
end
end
defp dasherize(<<h, t::binary>>) do
<<to_lower_char(h)>> <> do_dasherize(t, h)
end
defp dasherize(""), do: ""
defp do_dasherize(<<h, t, rest::binary>>, _)
when h >= ?A and h <= ?Z and not (t >= ?A and t <= ?Z) and t != ?- do
<<?-, to_lower_char(h), t>> <> do_dasherize(rest, t)
end
defp do_dasherize(<<h, t::binary>>, prev)
when h >= ?A and h <= ?Z and not (prev >= ?A and prev <= ?Z) and prev != ?- do
<<?-, to_lower_char(h)>> <> do_dasherize(t, h)
end
defp do_dasherize(<<h, t::binary>>, _) do
<<to_lower_char(h)>> <> do_dasherize(t, h)
end
defp do_dasherize(<<>>, _) do
<<>>
end
defp to_lower_char(char) when char >= ?A and char <= ?Z, do: char + 32
defp to_lower_char(char), do: char
end
|
lib/meal_tracker/command.ex
| 0.78403
| 0.510008
|
command.ex
|
starcoder
|
defmodule AWS.KMS do
@moduledoc """
AWS Key Management Service
AWS Key Management Service (AWS KMS) is an encryption and key management web
service.
This guide describes the AWS KMS operations that you can call programmatically.
For general information about AWS KMS, see the [ *AWS Key Management Service Developer Guide* ](https://docs.aws.amazon.com/kms/latest/developerguide/).
AWS provides SDKs that consist of libraries and sample code for various
programming languages and platforms (Java, Ruby, .Net, macOS, Android, etc.).
The SDKs provide a convenient way to create programmatic access to AWS KMS and
other AWS services. For example, the SDKs take care of tasks such as signing
requests (see below), managing errors, and retrying requests automatically. For
more information about the AWS SDKs, including how to download and install them,
see [Tools for Amazon Web Services](http://aws.amazon.com/tools/).
We recommend that you use the AWS SDKs to make programmatic API calls to AWS
KMS.
Clients must support TLS (Transport Layer Security) 1.0. We recommend TLS 1.2.
Clients must also support cipher suites with Perfect Forward Secrecy (PFS) such
as Ephemeral Diffie-Hellman (DHE) or Elliptic Curve Ephemeral Diffie-Hellman
(ECDHE). Most modern systems such as Java 7 and later support these modes.
## Signing Requests
Requests must be signed by using an access key ID and a secret access key. We
strongly recommend that you *do not* use your AWS account (root) access key ID
and secret key for everyday work with AWS KMS. Instead, use the access key ID
and secret access key for an IAM user. You can also use the AWS Security Token
Service to generate temporary security credentials that you can use to sign
requests.
All AWS KMS operations require [Signature Version
4](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html).
## Logging API Requests
AWS KMS supports AWS CloudTrail, a service that logs AWS API calls and related
events for your AWS account and delivers them to an Amazon S3 bucket that you
specify. By using the information collected by CloudTrail, you can determine
what requests were made to AWS KMS, who made the request, when it was made, and
so on. To learn more about CloudTrail, including how to turn it on and find your
log files, see the [AWS CloudTrail User Guide](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/).
## Additional Resources
For more information about credentials and request signing, see the following:
* [AWS Security Credentials](https://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html)
- This topic provides general information about the types of credentials used
for accessing AWS.
* [Temporary Security Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp.html)
- This section of the *IAM User Guide* describes how to create and use temporary
security credentials.
* [Signature Version 4 Signing Process](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html)
- This set of topics walks you through the process of signing a request using an
access key ID and a secret access key.
## Commonly Used API Operations
Of the API operations discussed in this guide, the following will prove the most
useful for most applications. You will likely perform operations other than
these, such as creating keys and assigning policies, by using the console.
* `Encrypt`
* `Decrypt`
* `GenerateDataKey`
* `GenerateDataKeyWithoutPlaintext`
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "KMS",
api_version: "2014-11-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "kms",
global?: false,
protocol: "json",
service_id: "KMS",
signature_version: "v4",
signing_name: "kms",
target_prefix: "TrentService"
}
end
@doc """
Cancels the deletion of a customer master key (CMK).
When this operation succeeds, the key state of the CMK is `Disabled`. To enable
the CMK, use `EnableKey`.
For more information about scheduling and canceling deletion of a CMK, see
[Deleting Customer Master Keys](https://docs.aws.amazon.com/kms/latest/developerguide/deleting-keys.html)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:CancelKeyDeletion](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `ScheduleKeyDeletion`
"""
def cancel_key_deletion(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelKeyDeletion", input, options)
end
@doc """
Connects or reconnects a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
to its associated AWS CloudHSM cluster.
The custom key store must be connected before you can create customer master
keys (CMKs) in the key store or use the CMKs it contains. You can disconnect and
reconnect a custom key store at any time.
To connect a custom key store, its associated AWS CloudHSM cluster must have at
least one active HSM. To get the number of active HSMs in a cluster, use the
[DescribeClusters](https://docs.aws.amazon.com/cloudhsm/latest/APIReference/API_DescribeClusters.html) operation. To add HSMs to the cluster, use the
[CreateHsm](https://docs.aws.amazon.com/cloudhsm/latest/APIReference/API_CreateHsm.html)
operation. Also, the [ `kmsuser` crypto user](https://docs.aws.amazon.com/kms/latest/developerguide/key-store-concepts.html#concept-kmsuser)
(CU) must not be logged into the cluster. This prevents AWS KMS from using this
account to log in.
The connection process can take an extended amount of time to complete; up to 20
minutes. This operation starts the connection process, but it does not wait for
it to complete. When it succeeds, this operation quickly returns an HTTP 200
response and a JSON object with no properties. However, this response does not
indicate that the custom key store is connected. To get the connection state of
the custom key store, use the `DescribeCustomKeyStores` operation.
During the connection process, AWS KMS finds the AWS CloudHSM cluster that is
associated with the custom key store, creates the connection infrastructure,
connects to the cluster, logs into the AWS CloudHSM client as the `kmsuser` CU,
and rotates its password.
The `ConnectCustomKeyStore` operation might fail for various reasons. To find
the reason, use the `DescribeCustomKeyStores` operation and see the
`ConnectionErrorCode` in the response. For help interpreting the
`ConnectionErrorCode`, see `CustomKeyStoresListEntry`.
To fix the failure, use the `DisconnectCustomKeyStore` operation to disconnect
the custom key store, correct the error, use the `UpdateCustomKeyStore`
operation if necessary, and then use `ConnectCustomKeyStore` again.
If you are having trouble connecting or disconnecting a custom key store, see
[Troubleshooting a Custom Key Store](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html)
in the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different AWS account.
**Required permissions**:
[kms:ConnectCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations
* `CreateCustomKeyStore`
* `DeleteCustomKeyStore`
* `DescribeCustomKeyStores`
* `DisconnectCustomKeyStore`
* `UpdateCustomKeyStore`
"""
def connect_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ConnectCustomKeyStore", input, options)
end
@doc """
Creates a friendly name for a customer master key (CMK).
You can use an alias to identify a CMK in the AWS KMS console, in the
`DescribeKey` operation and in [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations),
such as `Encrypt` and `GenerateDataKey`.
You can also change the CMK that's associated with the alias (`UpdateAlias`) or
delete the alias (`DeleteAlias`) at any time. These operations don't affect the
underlying CMK.
You can associate the alias with any customer managed CMK in the same AWS
Region. Each alias is associated with only on CMK at a time, but a CMK can have
multiple aliases. A valid CMK is required. You can't create an alias without a
CMK.
The alias must be unique in the account and Region, but you can have aliases
with the same name in different Regions. For detailed information about aliases,
see [Using aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html)
in the *AWS Key Management Service Developer Guide*.
This operation does not return a response. To get the alias that you created,
use the `ListAliases` operation.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on an alias in a
different AWS account.
## Required permissions
*
[kms:CreateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) on the alias (IAM policy).
*
[kms:CreateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
on the CMK (key policy).
For details, see [Controlling access to aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html#alias-access)
in the *AWS Key Management Service Developer Guide*.
## Related operations:
* `DeleteAlias`
* `ListAliases`
* `UpdateAlias`
"""
def create_alias(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateAlias", input, options)
end
@doc """
Creates a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
that is associated with an [AWS CloudHSM cluster](https://docs.aws.amazon.com/cloudhsm/latest/userguide/clusters.html)
that you own and manage.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive integration of
AWS KMS with the isolation and control of a single-tenant key store.
Before you create the custom key store, you must assemble the required elements,
including an AWS CloudHSM cluster that fulfills the requirements for a custom
key store. For details about the required elements, see [Assemble the Prerequisites](https://docs.aws.amazon.com/kms/latest/developerguide/create-keystore.html#before-keystore)
in the *AWS Key Management Service Developer Guide*.
When the operation completes successfully, it returns the ID of the new custom
key store. Before you can use your new custom key store, you need to use the
`ConnectCustomKeyStore` operation to connect the new key store to its AWS
CloudHSM cluster. Even if you are not going to use your custom key store
immediately, you might want to connect it to verify that all settings are
correct and then disconnect it until you are ready to use it.
For help with failures, see [Troubleshooting a Custom Key Store](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html)
in the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different AWS account.
**Required permissions**:
[kms:CreateCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy).
## Related operations:
* `ConnectCustomKeyStore`
* `DeleteCustomKeyStore`
* `DescribeCustomKeyStores`
* `DisconnectCustomKeyStore`
* `UpdateCustomKeyStore`
"""
def create_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateCustomKeyStore", input, options)
end
@doc """
Adds a grant to a customer master key (CMK).
The grant allows the grantee principal to use the CMK when the conditions
specified in the grant are met. When setting permissions, grants are an
alternative to key policies.
To create a grant that allows a [cryptographic operation](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations)
only when the request includes a particular [encryption context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context),
use the `Constraints` parameter. For details, see `GrantConstraints`.
You can create grants on symmetric and asymmetric CMKs. However, if the grant
allows an operation that the CMK does not support, `CreateGrant` fails with a
`ValidationException`.
* Grants for symmetric CMKs cannot allow operations that are not
supported for symmetric CMKs, including `Sign`, `Verify`, and `GetPublicKey`.
(There are limited exceptions to this rule for legacy operations, but you should
not create a grant for an operation that AWS KMS does not support.)
* Grants for asymmetric CMKs cannot allow operations that are not
supported for asymmetric CMKs, including operations that [generate data keys](https://docs.aws.amazon.com/kms/latest/APIReference/API_GenerateDataKey)
or [data key pairs](https://docs.aws.amazon.com/kms/latest/APIReference/API_GenerateDataKeyPair),
or operations related to [automatic key rotation](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html),
[imported key material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html),
or CMKs in [custom key stores](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html).
* Grants for asymmetric CMKs with a `KeyUsage` of `ENCRYPT_DECRYPT`
cannot allow the `Sign` or `Verify` operations. Grants for asymmetric CMKs with
a `KeyUsage` of `SIGN_VERIFY` cannot allow the `Encrypt` or `Decrypt`
operations.
* Grants for asymmetric CMKs cannot include an encryption context
grant constraint. An encryption context is not supported on asymmetric CMKs.
For information about symmetric and asymmetric CMKs, see [Using Symmetric and Asymmetric
CMKs](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *AWS Key Management Service Developer Guide*. For more information about
grants, see
[Grants](https://docs.aws.amazon.com/kms/latest/developerguide/grants.html) in the * *AWS Key Management Service Developer Guide* *.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master
Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation on a CMK in a different
AWS account, specify the key ARN in the value of the `KeyId` parameter.
**Required permissions**:
[kms:CreateGrant](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `ListGrants`
* `ListRetirableGrants`
* `RetireGrant`
* `RevokeGrant`
"""
def create_grant(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateGrant", input, options)
end
@doc """
Creates a unique customer managed [customer master key](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#master-keys)
(CMK) in your AWS account and Region.
You can use the `CreateKey` operation to create symmetric or asymmetric CMKs.
* **Symmetric CMKs** contain a 256-bit symmetric key that never
leaves AWS KMS unencrypted. To use the CMK, you must call AWS KMS. You can use a
symmetric CMK to encrypt and decrypt small amounts of data, but they are
typically used to generate [data keys](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-keys)
and [data keys pairs](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-key-pairs).
For details, see `GenerateDataKey` and `GenerateDataKeyPair`.
* **Asymmetric CMKs** can contain an RSA key pair or an Elliptic
Curve (ECC) key pair. The private key in an asymmetric CMK never leaves AWS KMS
unencrypted. However, you can use the `GetPublicKey` operation to download the
public key so it can be used outside of AWS KMS. CMKs with RSA key pairs can be
used to encrypt or decrypt data or sign and verify messages (but not both). CMKs
with ECC key pairs can be used only to sign and verify messages.
For information about symmetric and asymmetric CMKs, see [Using Symmetric and Asymmetric
CMKs](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *AWS Key Management Service Developer Guide*.
To create different types of CMKs, use the following guidance:
## Definitions
### Asymmetric CMKs
To create an asymmetric CMK, use the `CustomerMasterKeySpec` parameter to
specify the type of key material in the CMK. Then, use the `KeyUsage` parameter
to determine whether the CMK will be used to encrypt and decrypt or sign and
verify. You can't change these properties after the CMK is created.
### Symmetric CMKs
When creating a symmetric CMK, you don't need to specify the
`CustomerMasterKeySpec` or `KeyUsage` parameters. The default value for
`CustomerMasterKeySpec`, `SYMMETRIC_DEFAULT`, and the default value for
`KeyUsage`, `ENCRYPT_DECRYPT`, are the only valid values for symmetric CMKs.
### Imported Key Material
To import your own key material, begin by creating a symmetric CMK with no key
material. To do this, use the `Origin` parameter of `CreateKey` with a value of
`EXTERNAL`. Next, use `GetParametersForImport` operation to get a public key and
import token, and use the public key to encrypt your key material. Then, use
`ImportKeyMaterial` with your import token to import the key material. For
step-by-step instructions, see [Importing Key Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the * *AWS Key Management Service Developer Guide* *. You cannot import the
key material into an asymmetric CMK.
### Custom Key Stores
To create a symmetric CMK in a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html),
use the `CustomKeyStoreId` parameter to specify the custom key store. You must
also use the `Origin` parameter with a value of `AWS_CLOUDHSM`. The AWS CloudHSM
cluster that is associated with the custom key store must have at least two
active HSMs in different Availability Zones in the AWS Region.
You cannot create an asymmetric CMK in a custom key store. For information about
custom key stores in AWS KMS see [Using Custom Key Stores](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
in the * *AWS Key Management Service Developer Guide* *.
**Cross-account use**: No. You cannot use this operation to create a CMK in a
different AWS account.
**Required permissions**:
[kms:CreateKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) (IAM policy). To use the `Tags` parameter,
[kms:TagResource](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy). For examples and information about related permissions, see [Allow a user to create
CMKs](https://docs.aws.amazon.com/kms/latest/developerguide/iam-policies.html#iam-policy-example-create-key)
in the *AWS Key Management Service Developer Guide*.
## Related operations:
* `DescribeKey`
* `ListKeys`
* `ScheduleKeyDeletion`
"""
def create_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateKey", input, options)
end
@doc """
Decrypts ciphertext that was encrypted by a AWS KMS customer master key (CMK)
using any of the following operations:
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
* `GenerateDataKeyWithoutPlaintext`
* `GenerateDataKeyPairWithoutPlaintext`
You can use this operation to decrypt ciphertext that was encrypted under a
symmetric or asymmetric CMK.
When the CMK is asymmetric, you must specify the CMK and the encryption
algorithm that was used to encrypt the ciphertext. For information about
symmetric and asymmetric CMKs, see [Using Symmetric and Asymmetric CMKs](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *AWS Key Management Service Developer Guide*.
The Decrypt operation also decrypts ciphertext that was encrypted outside of AWS
KMS by the public key in an AWS KMS asymmetric CMK. However, it cannot decrypt
ciphertext produced by other libraries, such as the [AWS Encryption SDK](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/) or
[Amazon S3 client-side encryption](https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingClientSideEncryption.html).
These libraries return a ciphertext format that is incompatible with AWS KMS.
If the ciphertext was encrypted under a symmetric CMK, the `KeyId` parameter is
optional. AWS KMS can get this information from metadata that it adds to the
symmetric ciphertext blob. This feature adds durability to your implementation
by ensuring that authorized users can decrypt ciphertext decades after it was
encrypted, even if they've lost track of the CMK ID. However, specifying the CMK
is always recommended as a best practice. When you use the `KeyId` parameter to
specify a CMK, AWS KMS only uses the CMK you specify. If the ciphertext was
encrypted under a different CMK, the `Decrypt` operation fails. This practice
ensures that you use the CMK that you intend.
Whenever possible, use key policies to give users permission to call the
`Decrypt` operation on a particular CMK, instead of using IAM policies.
Otherwise, you might create an IAM user policy that gives the user `Decrypt`
permission on all CMKs. This user could decrypt ciphertext that was encrypted by
CMKs in other accounts if the key policy for the cross-account CMK permits it.
If you must use an IAM policy for `Decrypt` permissions, limit the user to
particular CMKs or particular trusted accounts. For details, see [Best practices for IAM
policies](https://docs.aws.amazon.com/kms/latest/developerguide/iam-policies.html#iam-policies-best-practices)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. You can decrypt a ciphertext using a CMK in a
different AWS account.
**Required permissions**:
[kms:Decrypt](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
* `ReEncrypt`
"""
def decrypt(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "Decrypt", input, options)
end
@doc """
Deletes the specified alias.
Because an alias is not a property of a CMK, you can delete and change the
aliases of a CMK without affecting the CMK. Also, aliases do not appear in the
response from the `DescribeKey` operation. To get the aliases of all CMKs, use
the `ListAliases` operation.
Each CMK can have multiple aliases. To change the alias of a CMK, use
`DeleteAlias` to delete the current alias and `CreateAlias` to create a new
alias. To associate an existing alias with a different customer master key
(CMK), call `UpdateAlias`.
**Cross-account use**: No. You cannot perform this operation on an alias in a
different AWS account.
## Required permissions
*
[kms:DeleteAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) on the alias (IAM policy).
*
[kms:DeleteAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
on the CMK (key policy).
For details, see [Controlling access to aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html#alias-access)
in the *AWS Key Management Service Developer Guide*.
## Related operations:
* `CreateAlias`
* `ListAliases`
* `UpdateAlias`
"""
def delete_alias(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAlias", input, options)
end
@doc """
Deletes a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html).
This operation does not delete the AWS CloudHSM cluster that is associated with
the custom key store, or affect any users or keys in the cluster.
The custom key store that you delete cannot contain any AWS KMS [customer master keys
(CMKs)](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#master_keys).
Before deleting the key store, verify that you will never need to use any of the
CMKs in the key store for any [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations).
Then, use `ScheduleKeyDeletion` to delete the AWS KMS customer master keys
(CMKs) from the key store. When the scheduled waiting period expires, the
`ScheduleKeyDeletion` operation deletes the CMKs. Then it makes a best effort to
delete the key material from the associated cluster. However, you might need to
manually [delete the orphaned key material](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html#fix-keystore-orphaned-key)
from the cluster and its backups.
After all CMKs are deleted from AWS KMS, use `DisconnectCustomKeyStore` to
disconnect the key store from AWS KMS. Then, you can delete the custom key
store.
Instead of deleting the custom key store, consider using
`DisconnectCustomKeyStore` to disconnect it from AWS KMS. While the key store is
disconnected, you cannot create or use the CMKs in the key store. But, you do
not need to delete CMKs and you can reconnect a disconnected custom key store at
any time.
If the operation succeeds, it returns a JSON object with no properties.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive integration of
AWS KMS with the isolation and control of a single-tenant key store.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different AWS account.
**Required permissions**:
[kms:DeleteCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `ConnectCustomKeyStore`
* `CreateCustomKeyStore`
* `DescribeCustomKeyStores`
* `DisconnectCustomKeyStore`
* `UpdateCustomKeyStore`
"""
def delete_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteCustomKeyStore", input, options)
end
@doc """
Deletes key material that you previously imported.
This operation makes the specified customer master key (CMK) unusable. For more
information about importing key material into AWS KMS, see [Importing Key Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *AWS Key Management Service Developer Guide*.
When the specified CMK is in the `PendingDeletion` state, this operation does
not change the CMK's state. Otherwise, it changes the CMK's state to
`PendingImport`.
After you delete key material, you can use `ImportKeyMaterial` to reimport the
same key material into the CMK.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:DeleteImportedKeyMaterial](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `GetParametersForImport`
* `ImportKeyMaterial`
"""
def delete_imported_key_material(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteImportedKeyMaterial", input, options)
end
@doc """
Gets information about [custom key stores](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
in the account and region.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive integration of
AWS KMS with the isolation and control of a single-tenant key store.
By default, this operation returns information about all custom key stores in
the account and region. To get only information about a particular custom key
store, use either the `CustomKeyStoreName` or `CustomKeyStoreId` parameter (but
not both).
To determine whether the custom key store is connected to its AWS CloudHSM
cluster, use the `ConnectionState` element in the response. If an attempt to
connect the custom key store failed, the `ConnectionState` value is `FAILED` and
the `ConnectionErrorCode` element in the response indicates the cause of the
failure. For help interpreting the `ConnectionErrorCode`, see
`CustomKeyStoresListEntry`.
Custom key stores have a `DISCONNECTED` connection state if the key store has
never been connected or you use the `DisconnectCustomKeyStore` operation to
disconnect it. If your custom key store state is `CONNECTED` but you are having
trouble using it, make sure that its associated AWS CloudHSM cluster is active
and contains the minimum number of HSMs required for the operation, if any.
For help repairing your custom key store, see the [Troubleshooting Custom Key Stores](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html)
topic in the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different AWS account.
**Required permissions**:
[kms:DescribeCustomKeyStores](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `ConnectCustomKeyStore`
* `CreateCustomKeyStore`
* `DeleteCustomKeyStore`
* `DisconnectCustomKeyStore`
* `UpdateCustomKeyStore`
"""
def describe_custom_key_stores(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeCustomKeyStores", input, options)
end
@doc """
Provides detailed information about a customer master key (CMK).
You can run `DescribeKey` on a [customer managed CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#customer-cmk)
or an [AWS managed CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-managed-cmk).
This detailed information includes the key ARN, creation date (and deletion
date, if applicable), the key state, and the origin and expiration date (if any)
of the key material. For CMKs in custom key stores, it includes information
about the custom key store, such as the key store ID and the AWS CloudHSM
cluster ID. It includes fields, like `KeySpec`, that help you distinguish
symmetric from asymmetric CMKs. It also provides information that is
particularly important to asymmetric CMKs, such as the key usage (encryption or
signing) and the encryption algorithms or signing algorithms that the CMK
supports.
`DescribeKey` does not return the following information:
* Aliases associated with the CMK. To get this information, use
`ListAliases`.
* Whether automatic key rotation is enabled on the CMK. To get this
information, use `GetKeyRotationStatus`. Also, some key states prevent a CMK
from being automatically rotated. For details, see [How Automatic Key Rotation Works](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html#rotate-keys-how-it-works)
in *AWS Key Management Service Developer Guide*.
* Tags on the CMK. To get this information, use `ListResourceTags`.
* Key policies and grants on the CMK. To get this information, use
`GetKeyPolicy` and `ListGrants`.
If you call the `DescribeKey` operation on a *predefined AWS alias*, that is, an
AWS alias with no key ID, AWS KMS creates an [AWS managed CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#master_keys).
Then, it associates the alias with the new CMK, and returns the `KeyId` and
`Arn` of the new CMK in the response.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:DescribeKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `GetKeyPolicy`
* `GetKeyRotationStatus`
* `ListAliases`
* `ListGrants`
* `ListKeys`
* `ListResourceTags`
* `ListRetirableGrants`
"""
def describe_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeKey", input, options)
end
@doc """
Sets the state of a customer master key (CMK) to disabled.
This change temporarily prevents use of the CMK for [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations).
For more information about how key state affects the use of a CMK, see [How Key State Affects the Use of a Customer Master
Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the * *AWS Key Management Service Developer Guide* *.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:DisableKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `EnableKey`
"""
def disable_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableKey", input, options)
end
@doc """
Disables [automatic rotation of the key material](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html)
for the specified symmetric customer master key (CMK).
You cannot enable automatic rotation of asymmetric CMKs, CMKs with imported key
material, or CMKs in a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html).
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:DisableKeyRotation](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `EnableKeyRotation`
* `GetKeyRotationStatus`
"""
def disable_key_rotation(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableKeyRotation", input, options)
end
@doc """
Disconnects the [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
from its associated AWS CloudHSM cluster.
While a custom key store is disconnected, you can manage the custom key store
and its customer master keys (CMKs), but you cannot create or use CMKs in the
custom key store. You can reconnect the custom key store at any time.
While a custom key store is disconnected, all attempts to create customer master
keys (CMKs) in the custom key store or to use existing CMKs in [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations)
will fail. This action can prevent users from storing and accessing sensitive
data.
To find the connection state of a custom key store, use the
`DescribeCustomKeyStores` operation. To reconnect a custom key store, use the
`ConnectCustomKeyStore` operation.
If the operation succeeds, it returns a JSON object with no properties.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive integration of
AWS KMS with the isolation and control of a single-tenant key store.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different AWS account.
**Required permissions**:
[kms:DisconnectCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `ConnectCustomKeyStore`
* `CreateCustomKeyStore`
* `DeleteCustomKeyStore`
* `DescribeCustomKeyStores`
* `UpdateCustomKeyStore`
"""
def disconnect_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisconnectCustomKeyStore", input, options)
end
@doc """
Sets the key state of a customer master key (CMK) to enabled.
This allows you to use the CMK for [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations).
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:EnableKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `DisableKey`
"""
def enable_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableKey", input, options)
end
@doc """
Enables [automatic rotation of the key material](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html)
for the specified symmetric customer master key (CMK).
You cannot enable automatic rotation of asymmetric CMKs, CMKs with imported key
material, or CMKs in a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html).
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:EnableKeyRotation](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `DisableKeyRotation`
* `GetKeyRotationStatus`
"""
def enable_key_rotation(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableKeyRotation", input, options)
end
@doc """
Encrypts plaintext into ciphertext by using a customer master key (CMK).
The `Encrypt` operation has two primary use cases:
* You can encrypt small amounts of arbitrary data, such as a
personal identifier or database password, or other sensitive information.
* You can use the `Encrypt` operation to move encrypted data from
one AWS Region to another. For example, in Region A, generate a data key and use
the plaintext key to encrypt your data. Then, in Region A, use the `Encrypt`
operation to encrypt the plaintext data key under a CMK in Region B. Now, you
can move the encrypted data and the encrypted data key to Region B. When
necessary, you can decrypt the encrypted data key and the encrypted data
entirely within in Region B.
You don't need to use the `Encrypt` operation to encrypt a data key. The
`GenerateDataKey` and `GenerateDataKeyPair` operations return a plaintext data
key and an encrypted copy of that data key.
When you encrypt data, you must specify a symmetric or asymmetric CMK to use in
the encryption operation. The CMK must have a `KeyUsage` value of
`ENCRYPT_DECRYPT.` To find the `KeyUsage` of a CMK, use the `DescribeKey`
operation.
If you use a symmetric CMK, you can use an encryption context to add additional
security to your encryption operation. If you specify an `EncryptionContext`
when encrypting data, you must specify the same encryption context (a
case-sensitive exact match) when decrypting the data. Otherwise, the request to
decrypt fails with an `InvalidCiphertextException`. For more information, see
[Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *AWS Key Management Service Developer Guide*.
If you specify an asymmetric CMK, you must also specify the encryption
algorithm. The algorithm must be compatible with the CMK type.
When you use an asymmetric CMK to encrypt or reencrypt data, be sure to record
the CMK and encryption algorithm that you choose. You will be required to
provide the same CMK and encryption algorithm when you decrypt the data. If the
CMK and algorithm do not match the values used to encrypt the data, the decrypt
operation fails.
You are not required to supply the CMK ID and encryption algorithm when you
decrypt with symmetric CMKs because AWS KMS stores this information in the
ciphertext blob. AWS KMS cannot store metadata in ciphertext generated with
asymmetric keys. The standard format for asymmetric key ciphertext does not
include configurable fields.
The maximum size of the data that you can encrypt varies with the type of CMK
and the encryption algorithm that you choose.
* Symmetric CMKs
* `SYMMETRIC_DEFAULT`: 4096 bytes
* `RSA_2048`
* `RSAES_OAEP_SHA_1`: 214 bytes
* `RSAES_OAEP_SHA_256`: 190 bytes
* `RSA_3072`
* `RSAES_OAEP_SHA_1`: 342 bytes
* `RSAES_OAEP_SHA_256`: 318 bytes
* `RSA_4096`
* `RSAES_OAEP_SHA_1`: 470 bytes
* `RSAES_OAEP_SHA_256`: 446 bytes
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:Encrypt](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
"""
def encrypt(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "Encrypt", input, options)
end
@doc """
Generates a unique symmetric data key for client-side encryption.
This operation returns a plaintext copy of the data key and a copy that is
encrypted under a customer master key (CMK) that you specify. You can use the
plaintext key to encrypt your data outside of AWS KMS and store the encrypted
data key with the encrypted data.
`GenerateDataKey` returns a unique data key for each request. The bytes in the
plaintext key are not related to the caller or the CMK.
To generate a data key, specify the symmetric CMK that will be used to encrypt
the data key. You cannot use an asymmetric CMK to generate data keys. To get the
type of your CMK, use the `DescribeKey` operation. You must also specify the
length of the data key. Use either the `KeySpec` or `NumberOfBytes` parameters
(but not both). For 128-bit and 256-bit data keys, use the `KeySpec` parameter.
To get only an encrypted copy of the data key, use
`GenerateDataKeyWithoutPlaintext`. To generate an asymmetric data key pair, use
the `GenerateDataKeyPair` or `GenerateDataKeyPairWithoutPlaintext` operation. To
get a cryptographically secure random byte string, use `GenerateRandom`.
You can use the optional encryption context to add additional security to the
encryption operation. If you specify an `EncryptionContext`, you must specify
the same encryption context (a case-sensitive exact match) when decrypting the
encrypted data key. Otherwise, the request to decrypt fails with an
`InvalidCiphertextException`. For more information, see [Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
## How to use your data key
We recommend that you use the following pattern to encrypt data locally in your
application. You can write your own code or use a client-side encryption
library, such as the [AWS Encryption SDK](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/), the
[Amazon DynamoDB Encryption Client](https://docs.aws.amazon.com/dynamodb-encryption-client/latest/devguide/),
or [Amazon S3 client-side encryption](https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingClientSideEncryption.html)
to do these tasks for you.
To encrypt data outside of AWS KMS:
1. Use the `GenerateDataKey` operation to get a data key.
2. Use the plaintext data key (in the `Plaintext` field of the
response) to encrypt your data outside of AWS KMS. Then erase the plaintext data
key from memory.
3. Store the encrypted data key (in the `CiphertextBlob` field of
the response) with the encrypted data.
To decrypt data outside of AWS KMS:
1. Use the `Decrypt` operation to decrypt the encrypted data key.
The operation returns a plaintext copy of the data key.
2. Use the plaintext data key to decrypt data outside of AWS KMS,
then erase the plaintext data key from memory.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:GenerateDataKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKeyPair`
* `GenerateDataKeyPairWithoutPlaintext`
* `GenerateDataKeyWithoutPlaintext`
"""
def generate_data_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GenerateDataKey", input, options)
end
@doc """
Generates a unique asymmetric data key pair.
The `GenerateDataKeyPair` operation returns a plaintext public key, a plaintext
private key, and a copy of the private key that is encrypted under the symmetric
CMK you specify. You can use the data key pair to perform asymmetric
cryptography outside of AWS KMS.
`GenerateDataKeyPair` returns a unique data key pair for each request. The bytes
in the keys are not related to the caller or the CMK that is used to encrypt the
private key.
You can use the public key that `GenerateDataKeyPair` returns to encrypt data or
verify a signature outside of AWS KMS. Then, store the encrypted private key
with the data. When you are ready to decrypt data or sign a message, you can use
the `Decrypt` operation to decrypt the encrypted private key.
To generate a data key pair, you must specify a symmetric customer master key
(CMK) to encrypt the private key in a data key pair. You cannot use an
asymmetric CMK or a CMK in a custom key store. To get the type and origin of
your CMK, use the `DescribeKey` operation.
If you are using the data key pair to encrypt data, or for any operation where
you don't immediately need a private key, consider using the
`GenerateDataKeyPairWithoutPlaintext` operation.
`GenerateDataKeyPairWithoutPlaintext` returns a plaintext public key and an
encrypted private key, but omits the plaintext private key that you need only to
decrypt ciphertext or sign a message. Later, when you need to decrypt the data
or sign a message, use the `Decrypt` operation to decrypt the encrypted private
key in the data key pair.
You can use the optional encryption context to add additional security to the
encryption operation. If you specify an `EncryptionContext`, you must specify
the same encryption context (a case-sensitive exact match) when decrypting the
encrypted data key. Otherwise, the request to decrypt fails with an
`InvalidCiphertextException`. For more information, see [Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:GenerateDataKeyPair](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPairWithoutPlaintext`
* `GenerateDataKeyWithoutPlaintext`
"""
def generate_data_key_pair(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GenerateDataKeyPair", input, options)
end
@doc """
Generates a unique asymmetric data key pair.
The `GenerateDataKeyPairWithoutPlaintext` operation returns a plaintext public
key and a copy of the private key that is encrypted under the symmetric CMK you
specify. Unlike `GenerateDataKeyPair`, this operation does not return a
plaintext private key.
To generate a data key pair, you must specify a symmetric customer master key
(CMK) to encrypt the private key in the data key pair. You cannot use an
asymmetric CMK or a CMK in a custom key store. To get the type and origin of
your CMK, use the `KeySpec` field in the `DescribeKey` response.
You can use the public key that `GenerateDataKeyPairWithoutPlaintext` returns to
encrypt data or verify a signature outside of AWS KMS. Then, store the encrypted
private key with the data. When you are ready to decrypt data or sign a message,
you can use the `Decrypt` operation to decrypt the encrypted private key.
`GenerateDataKeyPairWithoutPlaintext` returns a unique data key pair for each
request. The bytes in the key are not related to the caller or CMK that is used
to encrypt the private key.
You can use the optional encryption context to add additional security to the
encryption operation. If you specify an `EncryptionContext`, you must specify
the same encryption context (a case-sensitive exact match) when decrypting the
encrypted data key. Otherwise, the request to decrypt fails with an
`InvalidCiphertextException`. For more information, see [Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:GenerateDataKeyPairWithoutPlaintext](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
* `GenerateDataKeyWithoutPlaintext`
"""
def generate_data_key_pair_without_plaintext(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GenerateDataKeyPairWithoutPlaintext",
input,
options
)
end
@doc """
Generates a unique symmetric data key.
This operation returns a data key that is encrypted under a customer master key
(CMK) that you specify. To request an asymmetric data key pair, use the
`GenerateDataKeyPair` or `GenerateDataKeyPairWithoutPlaintext` operations.
`GenerateDataKeyWithoutPlaintext` is identical to the `GenerateDataKey`
operation except that returns only the encrypted copy of the data key. This
operation is useful for systems that need to encrypt data at some point, but not
immediately. When you need to encrypt the data, you call the `Decrypt` operation
on the encrypted copy of the key.
It's also useful in distributed systems with different levels of trust. For
example, you might store encrypted data in containers. One component of your
system creates new containers and stores an encrypted data key with each
container. Then, a different component puts the data into the containers. That
component first decrypts the data key, uses the plaintext data key to encrypt
data, puts the encrypted data into the container, and then destroys the
plaintext data key. In this system, the component that creates the containers
never sees the plaintext data key.
`GenerateDataKeyWithoutPlaintext` returns a unique data key for each request.
The bytes in the keys are not related to the caller or CMK that is used to
encrypt the private key.
To generate a data key, you must specify the symmetric customer master key (CMK)
that is used to encrypt the data key. You cannot use an asymmetric CMK to
generate a data key. To get the type of your CMK, use the `DescribeKey`
operation.
If the operation succeeds, you will find the encrypted copy of the data key in
the `CiphertextBlob` field.
You can use the optional encryption context to add additional security to the
encryption operation. If you specify an `EncryptionContext`, you must specify
the same encryption context (a case-sensitive exact match) when decrypting the
encrypted data key. Otherwise, the request to decrypt fails with an
`InvalidCiphertextException`. For more information, see [Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:GenerateDataKeyWithoutPlaintext](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
* `GenerateDataKeyPairWithoutPlaintext`
"""
def generate_data_key_without_plaintext(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GenerateDataKeyWithoutPlaintext", input, options)
end
@doc """
Returns a random byte string that is cryptographically secure.
By default, the random byte string is generated in AWS KMS. To generate the byte
string in the AWS CloudHSM cluster that is associated with a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html),
specify the custom key store ID.
For more information about entropy and random number generation, see the [AWS Key Management Service Cryptographic
Details](https://d0.awsstatic.com/whitepapers/KMS-Cryptographic-Details.pdf)
whitepaper.
**Required permissions**:
[kms:GenerateRandom](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
"""
def generate_random(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GenerateRandom", input, options)
end
@doc """
Gets a key policy attached to the specified customer master key (CMK).
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:GetKeyPolicy](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `PutKeyPolicy`
"""
def get_key_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetKeyPolicy", input, options)
end
@doc """
Gets a Boolean value that indicates whether [automatic rotation of the key material](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html)
is enabled for the specified customer master key (CMK).
You cannot enable automatic rotation of asymmetric CMKs, CMKs with imported key
material, or CMKs in a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html).
The key rotation status for these CMKs is always `false`.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
* Disabled: The key rotation status does not change when you disable
a CMK. However, while the CMK is disabled, AWS KMS does not rotate the backing
key.
* Pending deletion: While a CMK is pending deletion, its key
rotation status is `false` and AWS KMS does not rotate the backing key. If you
cancel the deletion, the original key rotation status is restored.
**Cross-account use**: Yes. To perform this operation on a CMK in a different
AWS account, specify the key ARN in the value of the `KeyId` parameter.
**Required permissions**:
[kms:GetKeyRotationStatus](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `DisableKeyRotation`
* `EnableKeyRotation`
"""
def get_key_rotation_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetKeyRotationStatus", input, options)
end
@doc """
Returns the items you need to import key material into a symmetric, customer
managed customer master key (CMK).
For more information about importing key material into AWS KMS, see [Importing Key
Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *AWS Key Management Service Developer Guide*.
This operation returns a public key and an import token. Use the public key to
encrypt the symmetric key material. Store the import token to send with a
subsequent `ImportKeyMaterial` request.
You must specify the key ID of the symmetric CMK into which you will import key
material. This CMK's `Origin` must be `EXTERNAL`. You must also specify the
wrapping algorithm and type of wrapping key (public key) that you will use to
encrypt the key material. You cannot perform this operation on an asymmetric CMK
or on any CMK in a different AWS account.
To import key material, you must use the public key and import token from the
same response. These items are valid for 24 hours. The expiration date and time
appear in the `GetParametersForImport` response. You cannot use an expired token
in an `ImportKeyMaterial` request. If your key and token expire, send another
`GetParametersForImport` request.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:GetParametersForImport](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `ImportKeyMaterial`
* `DeleteImportedKeyMaterial`
"""
def get_parameters_for_import(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetParametersForImport", input, options)
end
@doc """
Returns the public key of an asymmetric CMK.
Unlike the private key of a asymmetric CMK, which never leaves AWS KMS
unencrypted, callers with `kms:GetPublicKey` permission can download the public
key of an asymmetric CMK. You can share the public key to allow others to
encrypt messages and verify signatures outside of AWS KMS. For information about
symmetric and asymmetric CMKs, see [Using Symmetric and Asymmetric CMKs](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *AWS Key Management Service Developer Guide*.
You do not need to download the public key. Instead, you can use the public key
within AWS KMS by calling the `Encrypt`, `ReEncrypt`, or `Verify` operations
with the identifier of an asymmetric CMK. When you use the public key within AWS
KMS, you benefit from the authentication, authorization, and logging that are
part of every AWS KMS operation. You also reduce of risk of encrypting data that
cannot be decrypted. These features are not effective outside of AWS KMS. For
details, see [Special Considerations for Downloading Public Keys](https://docs.aws.amazon.com/kms/latest/developerguide/download-public-key.html#download-public-key-considerations).
To help you use the public key safely outside of AWS KMS, `GetPublicKey` returns
important information about the public key in the response, including:
*
[CustomerMasterKeySpec](https://docs.aws.amazon.com/kms/latest/APIReference/API_GetPublicKey.html#KMS-GetPublicKey-response-CustomerMasterKeySpec): The type of key material in the public key, such as `RSA_4096` or
`ECC_NIST_P521`.
*
[KeyUsage](https://docs.aws.amazon.com/kms/latest/APIReference/API_GetPublicKey.html#KMS-GetPublicKey-response-KeyUsage):
Whether the key is used for encryption or signing.
*
[EncryptionAlgorithms](https://docs.aws.amazon.com/kms/latest/APIReference/API_GetPublicKey.html#KMS-GetPublicKey-response-EncryptionAlgorithms) or
[SigningAlgorithms](https://docs.aws.amazon.com/kms/latest/APIReference/API_GetPublicKey.html#KMS-GetPublicKey-response-SigningAlgorithms):
A list of the encryption algorithms or the signing algorithms for the key.
Although AWS KMS cannot enforce these restrictions on external operations, it is
crucial that you use this information to prevent the public key from being used
improperly. For example, you can prevent a public signing key from being used
encrypt data, or prevent a public key from being used with an encryption
algorithm that is not supported by AWS KMS. You can also avoid errors, such as
using the wrong signing algorithm in a verification operation.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:GetPublicKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `CreateKey`
"""
def get_public_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetPublicKey", input, options)
end
@doc """
Imports key material into an existing symmetric AWS KMS customer master key
(CMK) that was created without key material.
After you successfully import key material into a CMK, you can [reimport the same key
material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html#reimport-key-material)
into that CMK, but you cannot import different key material.
You cannot perform this operation on an asymmetric CMK or on any CMK in a
different AWS account. For more information about creating CMKs with no key
material and then importing key material, see [Importing Key Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *AWS Key Management Service Developer Guide*.
Before using this operation, call `GetParametersForImport`. Its response
includes a public key and an import token. Use the public key to encrypt the key
material. Then, submit the import token from the same `GetParametersForImport`
response.
When calling this operation, you must specify the following values:
* The key ID or key ARN of a CMK with no key material. Its `Origin`
must be `EXTERNAL`.
To create a CMK with no key material, call `CreateKey` and set the value of its
`Origin` parameter to `EXTERNAL`. To get the `Origin` of a CMK, call
`DescribeKey`.)
* The encrypted key material. To get the public key to encrypt the
key material, call `GetParametersForImport`.
* The import token that `GetParametersForImport` returned. You must
use a public key and token from the same `GetParametersForImport` response.
* Whether the key material expires and if so, when. If you set an
expiration date, AWS KMS deletes the key material from the CMK on the specified
date, and the CMK becomes unusable. To use the CMK again, you must reimport the
same key material. The only way to change an expiration date is by reimporting
the same key material and specifying a new expiration date.
When this operation is successful, the key state of the CMK changes from
`PendingImport` to `Enabled`, and you can use the CMK.
If this operation fails, use the exception to help determine the problem. If the
error is related to the key material, the import token, or wrapping key, use
`GetParametersForImport` to get a new public key and import token for the CMK
and repeat the import procedure. For help, see [How To Import Key Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html#importing-keys-overview)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:ImportKeyMaterial](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `DeleteImportedKeyMaterial`
* `GetParametersForImport`
"""
def import_key_material(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ImportKeyMaterial", input, options)
end
@doc """
Gets a list of aliases in the caller's AWS account and region.
For more information about aliases, see `CreateAlias`.
By default, the `ListAliases` operation returns all aliases in the account and
region. To get only the aliases associated with a particular customer master key
(CMK), use the `KeyId` parameter.
The `ListAliases` response can include aliases that you created and associated
with your customer managed CMKs, and aliases that AWS created and associated
with AWS managed CMKs in your account. You can recognize AWS aliases because
their names have the format `aws/<service-name>`, such as `aws/dynamodb`.
The response might also include aliases that have no `TargetKeyId` field. These
are predefined aliases that AWS has created but has not yet associated with a
CMK. Aliases that AWS creates in your account, including predefined aliases, do
not count against your [AWS KMS aliases quota](https://docs.aws.amazon.com/kms/latest/developerguide/limits.html#aliases-limit).
**Cross-account use**: No. `ListAliases` does not return aliases in other AWS
accounts.
**Required permissions**:
[kms:ListAliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) (IAM policy)
For details, see [Controlling access to
aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html#alias-access)
in the *AWS Key Management Service Developer Guide*.
## Related operations:
* `CreateAlias`
* `DeleteAlias`
* `UpdateAlias`
"""
def list_aliases(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAliases", input, options)
end
@doc """
Gets a list of all grants for the specified customer master key (CMK).
You must specify the CMK in all requests. You can filter the grant list by grant
ID or grantee principal.
The `GranteePrincipal` field in the `ListGrants` response usually contains the
user or role designated as the grantee principal in the grant. However, when the
grantee principal in the grant is an AWS service, the `GranteePrincipal` field
contains the [service principal](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html#principal-services),
which might represent several different grantee principals.
**Cross-account use**: Yes. To perform this operation on a CMK in a different
AWS account, specify the key ARN in the value of the `KeyId` parameter.
**Required permissions**:
[kms:ListGrants](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `CreateGrant`
* `ListRetirableGrants`
* `RetireGrant`
* `RevokeGrant`
"""
def list_grants(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListGrants", input, options)
end
@doc """
Gets the names of the key policies that are attached to a customer master key
(CMK).
This operation is designed to get policy names that you can use in a
`GetKeyPolicy` operation. However, the only valid policy name is `default`.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:ListKeyPolicies](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `GetKeyPolicy`
* `PutKeyPolicy`
"""
def list_key_policies(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListKeyPolicies", input, options)
end
@doc """
Gets a list of all customer master keys (CMKs) in the caller's AWS account and
Region.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:ListKeys](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `CreateKey`
* `DescribeKey`
* `ListAliases`
* `ListResourceTags`
"""
def list_keys(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListKeys", input, options)
end
@doc """
Returns all tags on the specified customer master key (CMK).
For general information about tags, including the format and syntax, see
[Tagging AWS resources](https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html) in
the *Amazon Web Services General Reference*. For information about using tags in
AWS KMS, see [Tagging keys](https://docs.aws.amazon.com/kms/latest/developerguide/tagging-keys.html).
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:ListResourceTags](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `TagResource`
* `UntagResource`
"""
def list_resource_tags(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListResourceTags", input, options)
end
@doc """
Returns all grants in which the specified principal is the `RetiringPrincipal`
in the grant.
You can specify any principal in your AWS account. The grants that are returned
include grants for CMKs in your AWS account and other AWS accounts.
You might use this operation to determine which grants you may retire. To retire
a grant, use the `RetireGrant` operation.
**Cross-account use**: You must specify a principal in your AWS account.
However, this operation can return grants in any AWS account. You do not need
`kms:ListRetirableGrants` permission (or any other additional permission) in any
AWS account other than your own.
**Required permissions**:
[kms:ListRetirableGrants](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy) in your AWS account.
## Related operations:
* `CreateGrant`
* `ListGrants`
* `RetireGrant`
* `RevokeGrant`
"""
def list_retirable_grants(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListRetirableGrants", input, options)
end
@doc """
Attaches a key policy to the specified customer master key (CMK).
For more information about key policies, see [Key Policies](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html)
in the *AWS Key Management Service Developer Guide*. For help writing and
formatting a JSON policy document, see the [IAM JSON Policy Reference](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies.html)
in the * *IAM User Guide* *. For examples of adding a key policy in multiple
programming languages, see [Setting a key policy](https://docs.aws.amazon.com/kms/latest/developerguide/programming-key-policies.html#put-policy)
in the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:PutKeyPolicy](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `GetKeyPolicy`
"""
def put_key_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutKeyPolicy", input, options)
end
@doc """
Decrypts ciphertext and then reencrypts it entirely within AWS KMS.
You can use this operation to change the customer master key (CMK) under which
data is encrypted, such as when you [manually rotate](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html#rotate-keys-manually)
a CMK or change the CMK that protects a ciphertext. You can also use it to
reencrypt ciphertext under the same CMK, such as to change the [encryption context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
of a ciphertext.
The `ReEncrypt` operation can decrypt ciphertext that was encrypted by using an
AWS KMS CMK in an AWS KMS operation, such as `Encrypt` or `GenerateDataKey`. It
can also decrypt ciphertext that was encrypted by using the public key of an
[asymmetric CMK](https://docs.aws.amazon.com/kms/latest/developerguide/symm-asymm-concepts.html#asymmetric-cmks)
outside of AWS KMS. However, it cannot decrypt ciphertext produced by other
libraries, such as the [AWS Encryption SDK](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/) or
[Amazon S3 client-side encryption](https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingClientSideEncryption.html).
These libraries return a ciphertext format that is incompatible with AWS KMS.
When you use the `ReEncrypt` operation, you need to provide information for the
decrypt operation and the subsequent encrypt operation.
* If your ciphertext was encrypted under an asymmetric CMK, you must
use the `SourceKeyId` parameter to identify the CMK that encrypted the
ciphertext. You must also supply the encryption algorithm that was used. This
information is required to decrypt the data.
* If your ciphertext was encrypted under a symmetric CMK, the
`SourceKeyId` parameter is optional. AWS KMS can get this information from
metadata that it adds to the symmetric ciphertext blob. This feature adds
durability to your implementation by ensuring that authorized users can decrypt
ciphertext decades after it was encrypted, even if they've lost track of the CMK
ID. However, specifying the source CMK is always recommended as a best practice.
When you use the `SourceKeyId` parameter to specify a CMK, AWS KMS uses only the
CMK you specify. If the ciphertext was encrypted under a different CMK, the
`ReEncrypt` operation fails. This practice ensures that you use the CMK that you
intend.
* To reencrypt the data, you must use the `DestinationKeyId`
parameter specify the CMK that re-encrypts the data after it is decrypted. You
can select a symmetric or asymmetric CMK. If the destination CMK is an
asymmetric CMK, you must also provide the encryption algorithm. The algorithm
that you choose must be compatible with the CMK.
When you use an asymmetric CMK to encrypt or reencrypt data, be sure to record
the CMK and encryption algorithm that you choose. You will be required to
provide the same CMK and encryption algorithm when you decrypt the data. If the
CMK and algorithm do not match the values used to encrypt the data, the decrypt
operation fails.
You are not required to supply the CMK ID and encryption algorithm when you
decrypt with symmetric CMKs because AWS KMS stores this information in the
ciphertext blob. AWS KMS cannot store metadata in ciphertext generated with
asymmetric keys. The standard format for asymmetric key ciphertext does not
include configurable fields.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. The source CMK and destination CMK can be in
different AWS accounts. Either or both CMKs can be in a different account than
the caller.
**Required permissions**:
*
[kms:ReEncryptFrom](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) permission on the source CMK (key policy)
*
[kms:ReEncryptTo](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
permission on the destination CMK (key policy)
To permit reencryption from or to a CMK, include the `"kms:ReEncrypt*"`
permission in your [key policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html).
This permission is automatically included in the key policy when you use the
console to create a CMK. But you must include it manually when you create a CMK
programmatically or when you use the `PutKeyPolicy` operation to set a key
policy.
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
"""
def re_encrypt(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ReEncrypt", input, options)
end
@doc """
Retires a grant.
To clean up, you can retire a grant when you're done using it. You should revoke
a grant when you intend to actively deny operations that depend on it. The
following are permitted to call this API:
* The AWS account (root user) under which the grant was created
* The `RetiringPrincipal`, if present in the grant
* The `GranteePrincipal`, if `RetireGrant` is an operation specified
in the grant
You must identify the grant to retire by its grant token or by a combination of
the grant ID and the Amazon Resource Name (ARN) of the customer master key
(CMK). A grant token is a unique variable-length base64-encoded string. A grant
ID is a 64 character unique identifier of a grant. The `CreateGrant` operation
returns both.
**Cross-account use**: Yes. You can retire a grant on a CMK in a different AWS
account.
**Required permissions:**: Permission to retire a grant is specified in the
grant. You cannot control access to this operation in a policy. For more
information, see [Using grants](https://docs.aws.amazon.com/kms/latest/developerguide/grants.html) in
the *AWS Key Management Service Developer Guide*.
## Related operations:
* `CreateGrant`
* `ListGrants`
* `ListRetirableGrants`
* `RevokeGrant`
"""
def retire_grant(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RetireGrant", input, options)
end
@doc """
Revokes the specified grant for the specified customer master key (CMK).
You can revoke a grant to actively deny operations that depend on it.
**Cross-account use**: Yes. To perform this operation on a CMK in a different
AWS account, specify the key ARN in the value of the `KeyId` parameter.
**Required permissions**:
[kms:RevokeGrant](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `CreateGrant`
* `ListGrants`
* `ListRetirableGrants`
* `RetireGrant`
"""
def revoke_grant(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RevokeGrant", input, options)
end
@doc """
Schedules the deletion of a customer master key (CMK).
You may provide a waiting period, specified in days, before deletion occurs. If
you do not provide a waiting period, the default period of 30 days is used. When
this operation is successful, the key state of the CMK changes to
`PendingDeletion`. Before the waiting period ends, you can use
`CancelKeyDeletion` to cancel the deletion of the CMK. After the waiting period
ends, AWS KMS deletes the CMK and all AWS KMS data associated with it, including
all aliases that refer to it.
Deleting a CMK is a destructive and potentially dangerous operation. When a CMK
is deleted, all data that was encrypted under the CMK is unrecoverable. To
prevent the use of a CMK without deleting it, use `DisableKey`.
If you schedule deletion of a CMK from a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html),
when the waiting period expires, `ScheduleKeyDeletion` deletes the CMK from AWS
KMS. Then AWS KMS makes a best effort to delete the key material from the
associated AWS CloudHSM cluster. However, you might need to manually [delete the orphaned key
material](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html#fix-keystore-orphaned-key)
from the cluster and its backups.
For more information about scheduling a CMK for deletion, see [Deleting Customer Master
Keys](https://docs.aws.amazon.com/kms/latest/developerguide/deleting-keys.html)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:ScheduleKeyDeletion](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations
* `CancelKeyDeletion`
* `DisableKey`
"""
def schedule_key_deletion(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ScheduleKeyDeletion", input, options)
end
@doc """
Creates a [digital signature](https://en.wikipedia.org/wiki/Digital_signature) for a message or message digest by using the private key in an asymmetric CMK.
To verify the signature, use the `Verify` operation, or use the public key in
the same asymmetric CMK outside of AWS KMS. For information about symmetric and
asymmetric CMKs, see [Using Symmetric and Asymmetric
CMKs](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *AWS Key Management Service Developer Guide*.
Digital signatures are generated and verified by using asymmetric key pair, such
as an RSA or ECC pair that is represented by an asymmetric customer master key
(CMK). The key owner (or an authorized user) uses their private key to sign a
message. Anyone with the public key can verify that the message was signed with
that particular private key and that the message hasn't changed since it was
signed.
To use the `Sign` operation, provide the following information:
* Use the `KeyId` parameter to identify an asymmetric CMK with a
`KeyUsage` value of `SIGN_VERIFY`. To get the `KeyUsage` value of a CMK, use the
`DescribeKey` operation. The caller must have `kms:Sign` permission on the CMK.
* Use the `Message` parameter to specify the message or message
digest to sign. You can submit messages of up to 4096 bytes. To sign a larger
message, generate a hash digest of the message, and then provide the hash digest
in the `Message` parameter. To indicate whether the message is a full message or
a digest, use the `MessageType` parameter.
* Choose a signing algorithm that is compatible with the CMK.
When signing a message, be sure to record the CMK and the signing algorithm.
This information is required to verify the signature.
To verify the signature that this operation generates, use the `Verify`
operation. Or use the `GetPublicKey` operation to download the public key and
then use the public key to verify the signature outside of AWS KMS.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:Sign](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `Verify`
"""
def sign(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "Sign", input, options)
end
@doc """
Adds or edits tags on a [customer managed CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#customer-cmk).
Each tag consists of a tag key and a tag value, both of which are case-sensitive
strings. The tag value can be an empty (null) string.
To add a tag, specify a new tag key and a tag value. To edit a tag, specify an
existing tag key and a new tag value.
You can use this operation to tag a [customer managed CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#customer-cmk),
but you cannot tag an [AWS managed CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-managed-cmk),
an [AWS owned CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-owned-cmk),
or an alias.
For general information about tags, including the format and syntax, see
[Tagging AWS resources](https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html) in
the *Amazon Web Services General Reference*. For information about using tags in
AWS KMS, see [Tagging keys](https://docs.aws.amazon.com/kms/latest/developerguide/tagging-keys.html).
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:TagResource](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations
* `UntagResource`
* `ListResourceTags`
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Deletes tags from a [customer managed CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#customer-cmk).
To delete a tag, specify the tag key and the CMK.
When it succeeds, the `UntagResource` operation doesn't return any output. Also,
if the specified tag key isn't found on the CMK, it doesn't throw an exception
or return a response. To confirm that the operation worked, use the
`ListResourceTags` operation.
For general information about tags, including the format and syntax, see
[Tagging AWS resources](https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html) in
the *Amazon Web Services General Reference*. For information about using tags in
AWS KMS, see [Tagging keys](https://docs.aws.amazon.com/kms/latest/developerguide/tagging-keys.html).
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:UntagResource](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations
* `TagResource`
* `ListResourceTags`
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Associates an existing AWS KMS alias with a different customer master key (CMK).
Each alias is associated with only one CMK at a time, although a CMK can have
multiple aliases. The alias and the CMK must be in the same AWS account and
region.
The current and new CMK must be the same type (both symmetric or both
asymmetric), and they must have the same key usage (`ENCRYPT_DECRYPT` or
`SIGN_VERIFY`). This restriction prevents errors in code that uses aliases. If
you must assign an alias to a different type of CMK, use `DeleteAlias` to delete
the old alias and `CreateAlias` to create a new alias.
You cannot use `UpdateAlias` to change an alias name. To change an alias name,
use `DeleteAlias` to delete the old alias and `CreateAlias` to create a new
alias.
Because an alias is not a property of a CMK, you can create, update, and delete
the aliases of a CMK without affecting the CMK. Also, aliases do not appear in
the response from the `DescribeKey` operation. To get the aliases of all CMKs in
the account, use the `ListAliases` operation.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
## Required permissions
*
[kms:UpdateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) on the alias (IAM policy).
*
[kms:UpdateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
on the current CMK (key policy).
*
[kms:UpdateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) on the new CMK (key policy).
For details, see [Controlling access to
aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html#alias-access)
in the *AWS Key Management Service Developer Guide*.
## Related operations:
* `CreateAlias`
* `DeleteAlias`
* `ListAliases`
"""
def update_alias(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateAlias", input, options)
end
@doc """
Changes the properties of a custom key store.
Use the `CustomKeyStoreId` parameter to identify the custom key store you want
to edit. Use the remaining parameters to change the properties of the custom key
store.
You can only update a custom key store that is disconnected. To disconnect the
custom key store, use `DisconnectCustomKeyStore`. To reconnect the custom key
store after the update completes, use `ConnectCustomKeyStore`. To find the
connection state of a custom key store, use the `DescribeCustomKeyStores`
operation.
Use the parameters of `UpdateCustomKeyStore` to edit your keystore settings.
* Use the **NewCustomKeyStoreName** parameter to change the friendly
name of the custom key store to the value that you specify.
* Use the **KeyStorePassword** parameter tell AWS KMS the current
password of the [ `kmsuser` crypto user (CU)](https://docs.aws.amazon.com/kms/latest/developerguide/key-store-concepts.html#concept-kmsuser)
in the associated AWS CloudHSM cluster. You can use this parameter to [fix connection
failures](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html#fix-keystore-password)
that occur when AWS KMS cannot log into the associated cluster because the
`kmsuser` password has changed. This value does not change the password in the
AWS CloudHSM cluster.
* Use the **CloudHsmClusterId** parameter to associate the custom
key store with a different, but related, AWS CloudHSM cluster. You can use this
parameter to repair a custom key store if its AWS CloudHSM cluster becomes
corrupted or is deleted, or when you need to create or restore a cluster from a
backup.
If the operation succeeds, it returns a JSON object with no properties.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive integration of
AWS KMS with the isolation and control of a single-tenant key store.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different AWS account.
**Required permissions**:
[kms:UpdateCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `ConnectCustomKeyStore`
* `CreateCustomKeyStore`
* `DeleteCustomKeyStore`
* `DescribeCustomKeyStores`
* `DisconnectCustomKeyStore`
"""
def update_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateCustomKeyStore", input, options)
end
@doc """
Updates the description of a customer master key (CMK).
To see the description of a CMK, use `DescribeKey`.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:UpdateKeyDescription](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations
* `CreateKey`
* `DescribeKey`
"""
def update_key_description(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateKeyDescription", input, options)
end
@doc """
Verifies a digital signature that was generated by the `Sign` operation.
Verification confirms that an authorized user signed the message with the
specified CMK and signing algorithm, and the message hasn't changed since it was
signed. If the signature is verified, the value of the `SignatureValid` field in
the response is `True`. If the signature verification fails, the `Verify`
operation fails with an `KMSInvalidSignatureException` exception.
A digital signature is generated by using the private key in an asymmetric CMK.
The signature is verified by using the public key in the same asymmetric CMK.
For information about symmetric and asymmetric CMKs, see [Using Symmetric and Asymmetric
CMKs](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *AWS Key Management Service Developer Guide*.
To verify a digital signature, you can use the `Verify` operation. Specify the
same asymmetric CMK, message, and signing algorithm that were used to produce
the signature.
You can also verify the digital signature by using the public key of the CMK
outside of AWS KMS. Use the `GetPublicKey` operation to download the public key
in the asymmetric CMK and then use the public key to verify the signature
outside of AWS KMS. The advantage of using the `Verify` operation is that it is
performed within AWS KMS. As a result, it's easy to call, the operation is
performed within the FIPS boundary, it is logged in AWS CloudTrail, and you can
use key policy and IAM policy to determine who is authorized to use the CMK to
verify signatures.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:Verify](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `Sign`
"""
def verify(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "Verify", input, options)
end
end
|
lib/aws/generated/kms.ex
| 0.896761
| 0.572006
|
kms.ex
|
starcoder
|
defmodule Membrane.Core.Element.PadModel do
@moduledoc false
# Utility functions for veryfying and manipulating pads and their data.
alias Membrane.Element.Pad
alias Membrane.Core.Element.State
use Bunch
@type pads_data_t :: %{Pad.ref_t() => Pad.Data.t()}
@type pad_info_t :: %{
required(:accepted_caps) => any,
required(:availability) => Pad.availability_t(),
required(:direction) => Pad.direction_t(),
required(:mode) => Pad.mode_t(),
required(:options) => %{
optional(:demand_unit) => Membrane.Buffer.Metric.unit_t(),
optional(:other_demand_unit) => Membrane.Buffer.Metric.unit_t()
},
optional(:current_id) => non_neg_integer
}
@type pads_info_t :: %{Pad.name_t() => pad_info_t}
@type pads_t :: %{
data: pads_data_t,
info: pads_info_t,
dynamic_currently_linking: [Pad.ref_t()]
}
@type unknown_pad_error_t :: {:error, {:unknown_pad, Pad.name_t()}}
@spec assert_instance(Pad.ref_t(), State.t()) :: :ok | unknown_pad_error_t
def assert_instance(pad_ref, state) do
if state.pads.data |> Map.has_key?(pad_ref) do
:ok
else
{:error, {:unknown_pad, pad_ref}}
end
end
@spec assert_instance!(Pad.ref_t(), State.t()) :: :ok
def assert_instance!(pad_ref, state) do
:ok = assert_instance(pad_ref, state)
end
defmacro assert_data(pad_ref, pattern, state) do
quote do
with {:ok, data} <- unquote(__MODULE__).get_data(unquote(pad_ref), unquote(state)) do
if match?(unquote(pattern), data) do
:ok
else
{:error,
{:invalid_pad_data, ref: unquote(pad_ref), pattern: unquote(pattern), data: data}}
end
end
end
end
defmacro assert_data!(pad_ref, pattern, state) do
quote do
:ok = unquote(__MODULE__).assert_data(unquote(pad_ref), unquote(pattern), unquote(state))
end
end
@spec filter_refs_by_data(constraints :: map, State.t()) :: [Pad.ref_t()]
def filter_refs_by_data(constraints \\ %{}, state)
def filter_refs_by_data(constraints, state) when constraints == %{} do
state.pads.data |> Map.keys()
end
def filter_refs_by_data(constraints, state) do
state.pads.data
|> Enum.filter(fn {_name, data} -> data |> constraints_met?(constraints) end)
|> Keyword.keys()
end
@spec filter_data(constraints :: map, State.t()) :: %{atom => Pad.Data.t()}
def filter_data(constraints \\ %{}, state)
def filter_data(constraints, state) when constraints == %{} do
state.pads.data
end
def filter_data(constraints, state) do
state.pads.data
|> Enum.filter(fn {_name, data} -> data |> constraints_met?(constraints) end)
|> Map.new()
end
@spec get_data(Pad.ref_t(), keys :: atom | [atom], State.t()) ::
{:ok, Pad.Data.t() | any} | unknown_pad_error_t
def get_data(pad_ref, keys \\ [], state) do
with :ok <- assert_instance(pad_ref, state) do
state
|> Bunch.Access.get_in(data_keys(pad_ref, keys))
~> {:ok, &1}
end
end
@spec get_data!(Pad.ref_t(), keys :: atom | [atom], State.t()) :: Pad.Data.t() | any
def get_data!(pad_ref, keys \\ [], state) do
{:ok, pad_data} = get_data(pad_ref, keys, state)
pad_data
end
@spec set_data(Pad.ref_t(), keys :: atom | [atom], State.t()) ::
State.stateful_t(:ok | unknown_pad_error_t)
def set_data(pad_ref, keys \\ [], v, state) do
with {:ok, state} <- {assert_instance(pad_ref, state), state} do
state
|> Bunch.Access.put_in(data_keys(pad_ref, keys), v)
~> {:ok, &1}
end
end
@spec set_data!(Pad.ref_t(), keys :: atom | [atom], State.t()) ::
State.stateful_t(:ok | unknown_pad_error_t)
def set_data!(pad_ref, keys \\ [], v, state) do
{:ok, state} = set_data(pad_ref, keys, v, state)
state
end
@spec update_data(Pad.ref_t(), keys :: atom | [atom], (data -> {:ok | error, data}), State.t()) ::
State.stateful_t(:ok | error | unknown_pad_error_t)
when data: Pad.Data.t() | any, error: {:error, reason :: any}
def update_data(pad_ref, keys \\ [], f, state) do
with {:ok, state} <- {assert_instance(pad_ref, state), state},
{:ok, state} <-
state
|> Bunch.Access.get_and_update_in(data_keys(pad_ref, keys), f) do
{:ok, state}
else
{{:error, reason}, state} -> {{:error, reason}, state}
end
end
@spec update_data!(Pad.ref_t(), keys :: atom | [atom], (data -> data), State.t()) :: State.t()
when data: Pad.Data.t() | any
def update_data!(pad_ref, keys \\ [], f, state) do
:ok = assert_instance(pad_ref, state)
state
|> Bunch.Access.update_in(data_keys(pad_ref, keys), f)
end
@spec get_and_update_data(
Pad.ref_t(),
keys :: atom | [atom],
(data -> {success | error, data}),
State.t()
) :: State.stateful_t(success | error | unknown_pad_error_t)
when data: Pad.Data.t() | any, success: {:ok, data}, error: {:error, reason :: any}
def get_and_update_data(pad_ref, keys \\ [], f, state) do
with {:ok, state} <- {assert_instance(pad_ref, state), state},
{{:ok, out}, state} <-
state
|> Bunch.Access.get_and_update_in(data_keys(pad_ref, keys), f) do
{{:ok, out}, state}
else
{{:error, reason}, state} -> {{:error, reason}, state}
end
end
@spec get_and_update_data!(
Pad.ref_t(),
keys :: atom | [atom],
(data -> {data, data}),
State.t()
) :: State.stateful_t(data)
when data: Pad.Data.t() | any
def get_and_update_data!(pad_ref, keys \\ [], f, state) do
:ok = assert_instance(pad_ref, state)
state
|> Bunch.Access.get_and_update_in(data_keys(pad_ref, keys), f)
end
@spec pop_data(Pad.ref_t(), State.t()) ::
State.stateful_t({:ok, Pad.Data.t() | any} | unknown_pad_error_t)
def pop_data(pad_ref, state) do
with {:ok, state} <- {assert_instance(pad_ref, state), state} do
state
|> Bunch.Access.pop_in(data_keys(pad_ref))
~> {:ok, &1}
end
end
@spec pop_data!(Pad.ref_t(), State.t()) :: State.stateful_t(Pad.Data.t() | any)
def pop_data!(pad_ref, state) do
{{:ok, pad_data}, state} = pop_data(pad_ref, state)
{pad_data, state}
end
@spec delete_data(Pad.ref_t(), State.t()) :: State.stateful_t(:ok | unknown_pad_error_t)
def delete_data(pad_ref, state) do
with {:ok, {_out, state}} <- pop_data(pad_ref, state) do
{:ok, state}
end
end
@spec delete_data!(Pad.ref_t(), State.t()) :: State.t()
def delete_data!(pad_ref, state) do
{:ok, state} = delete_data(pad_ref, state)
state
end
@spec constraints_met?(Pad.Data.t(), map) :: boolean
defp constraints_met?(data, constraints) do
constraints |> Enum.all?(fn {k, v} -> data[k] === v end)
end
@spec data_keys(Pad.ref_t(), keys :: atom | [atom]) :: [atom]
defp data_keys(pad_ref, keys \\ []) do
[:pads, :data, pad_ref | Bunch.listify(keys)]
end
end
|
lib/membrane/core/element/pad_model.ex
| 0.856167
| 0.492798
|
pad_model.ex
|
starcoder
|
defmodule Elasticlunr.Dsl.BoolQuery do
use Elasticlunr.Dsl.Query
alias Elasticlunr.Index
alias Elasticlunr.Dsl.{NotQuery, Query, QueryRepository}
defstruct ~w[rewritten should must must_not filter minimum_should_match]a
@type clause :: struct() | list(struct())
@type t :: %__MODULE__{
filter: clause(),
should: clause(),
must: nil | struct(),
must_not: nil | struct(),
rewritten: boolean(),
minimum_should_match: integer()
}
@spec new(keyword) :: t()
def new(opts) do
attrs = %{
should: Keyword.get(opts, :should, []),
must: Keyword.get(opts, :must),
must_not: Keyword.get(opts, :must_not),
filter: Keyword.get(opts, :filter),
rewritten: Keyword.get(opts, :rewritten, false),
minimum_should_match: extract_minimum_should_match(opts)
}
struct!(__MODULE__, attrs)
end
@impl true
def rewrite(
%__MODULE__{
filter: filter,
must: must,
must_not: must_not,
should: should,
minimum_should_match: minimum_should_match
},
%Index{} = index
) do
should =
should
|> Kernel.||([])
|> Enum.map(&QueryRepository.rewrite(&1, index))
must =
case must do
nil ->
nil
mod when is_struct(mod) ->
QueryRepository.rewrite(mod, index)
end
filters = filter || []
filters =
case must_not do
nil ->
filters
must_not when is_struct(must_not) ->
query =
must_not
|> QueryRepository.rewrite(index)
|> NotQuery.new()
[query] ++ filters
end
|> Enum.map(&QueryRepository.rewrite(&1, index))
opts = [
must: must,
should: should,
filter: filters,
rewritten: true,
minimum_should_match: minimum_should_match
]
new(opts)
end
@impl true
def score(%__MODULE__{rewritten: false} = query, %Index{} = index, options) do
query
|> rewrite(index)
|> score(index, options)
end
def score(
%__MODULE__{
must: must,
filter: filter,
should: should,
minimum_should_match: minimum_should_match
},
%Index{} = index,
_options
) do
filter_results = filter_result(filter, index)
filter_results = filter_must(must, filter_results, index)
{docs, filtered} =
case filter_results do
false ->
{%{}, nil}
value ->
Enum.reduce(value, {%{}, []}, fn %{ref: ref, score: score}, {docs, filtered} ->
filtered = [ref] ++ filtered
doc = %{
ref: ref,
matched: 0,
positions: %{},
score: score || 0
}
docs = Map.put(docs, ref, doc)
{docs, filtered}
end)
end
{docs, _filtered} =
should
|> Enum.reduce({docs, filtered}, fn query, {docs, filtered} ->
opts =
case filtered do
nil ->
[]
filtered ->
[filtered: filtered]
end
results = QueryRepository.score(query, index, opts)
docs =
results
|> Enum.reduce(docs, fn doc, docs ->
ob =
Map.get(docs, doc.ref, %{
ref: doc.ref,
score: 0,
matched: 0,
positions: %{}
})
%{matched: matched, score: score, positions: positions} = ob
# credo:disable-for-lines:3
positions =
Map.get(doc, :positions, %{})
|> Enum.reduce(positions, fn {field, tokens}, positions ->
p = Map.get(positions, field, [])
p = Enum.reduce(tokens, p, &(&2 ++ [&1]))
Map.put(positions, field, p)
end)
doc_score = Map.get(doc, :score, 0)
ob = %{ob | positions: positions, matched: matched + 1, score: score + doc_score}
Map.put(docs, doc.ref, ob)
end)
{docs, filtered}
end)
docs
|> Stream.map(&elem(&1, 1))
|> Stream.filter(fn doc -> doc.matched >= minimum_should_match && doc.score > 0 end)
end
defp filter_result(nil, _index), do: false
defp filter_result([], _index), do: false
defp filter_result(filter, index) do
filter
|> Enum.reduce(false, fn query, acc ->
q =
case acc do
false ->
[]
val ->
[filtered: Enum.map(val, & &1.ref)]
end
QueryRepository.filter(query, index, q)
end)
end
defp filter_must(nil, filter_results, _index), do: filter_results
defp filter_must(must_query, filter_results, index) when is_struct(must_query) do
q =
case filter_results do
false ->
[]
results ->
[filtered: Enum.map(results, & &1.ref)]
end
QueryRepository.score(must_query, index, q)
end
@impl true
def parse(options, _query_options, repo) do
default_mapper = fn query ->
case Query.split_root(query) do
{key, value} ->
repo.parse(key, value, query)
_ ->
repo.parse("match_all", [])
end
end
[]
|> patch_options(:should, options, default_mapper)
|> patch_options(:filter, options, default_mapper)
|> patch_options(:must, options, repo)
|> patch_options(:must_not, options, repo)
|> patch_options(:minimum_should_match, options)
|> __MODULE__.new()
end
defp patch_options(opts, :should, options, mapper) do
case Map.get(options, "should") do
nil ->
opts
should when is_list(should) ->
should =
should
|> Enum.map(mapper)
Keyword.put(opts, :should, should)
should ->
Keyword.put(opts, :should, [mapper.(should)])
end
end
defp patch_options(opts, :filter, options, mapper) do
case Map.get(options, "filter") do
nil ->
opts
filter when is_list(filter) ->
filter = Enum.map(filter, mapper)
Keyword.put(opts, :filter, filter)
filter ->
Keyword.put(opts, :filter, [mapper.(filter)])
end
end
defp patch_options(opts, :must, options, repo) do
case Map.get(options, "must") do
nil ->
opts
must when is_map(must) ->
{key, options} = Query.split_root(must)
must = repo.parse(key, options, must)
Keyword.put(opts, :must, must)
end
end
defp patch_options(opts, :must_not, options, repo) do
case Map.get(options, "must_not") do
nil ->
opts
must_not ->
{key, options} = Query.split_root(must_not)
q = repo.parse(key, options, must_not)
Keyword.put(opts, :must_not, q)
end
end
defp patch_options(opts, :minimum_should_match, options) do
options
|> Map.get("minimum_should_match")
|> case do
nil ->
opts
value when is_integer(value) ->
value <= Keyword.get(opts, :should) |> Enum.count()
end
|> case do
true ->
minimum_should_match = Map.get(options, "minimum_should_match")
Keyword.put(opts, :minimum_should_match, minimum_should_match)
_ ->
opts
end
end
defp extract_minimum_should_match(opts) do
default_value =
case not is_empty_clause?(opts[:should]) and
(is_empty_clause?(opts[:must]) or is_empty_clause?(opts[:filter])) do
true -> 1
false -> 0
end
Keyword.get(opts, :minimum_should_match, default_value)
end
defp is_empty_clause?(nil), do: true
defp is_empty_clause?(list) when is_list(list), do: Enum.empty?(list)
defp is_empty_clause?(%{}), do: false
end
|
lib/elasticlunr/dsl/query/bool_query.ex
| 0.766905
| 0.473779
|
bool_query.ex
|
starcoder
|
defmodule Durango.Dsl.Function.Names do
@functions [
#{name, arity}
# https://docs.arangodb.com/3.3/AQL/Functions/TypeCast.html
document: 1,
document: 2,
collections: 0,
has: 2,
to_bool: 1,
to_number: 1,
to_string: 1,
to_array: 1,
to_list: 1,
is_null: 1,
is_bool: 1,
is_number: 1,
is_string: 1,
is_array: 1,
is_list: 1,
is_object: 1,
is_document: 1,
is_datestring: 1,
typename: 1,
# https://docs.arangodb.com/3.3/AQL/Functions/String.html
char_length: 1,
concat: 1..255,
concat_separator: 2..255,
contains: 2..3,
count: 1,
lenght: 1,
find_first: 2..4,
find_last: 2..4,
json_parse: 1,
json_stringify: 1,
left: 2,
like: 2..3,
lower: 1,
ltrim: 1..2,
md5: 1,
random_token: 1,
regex_test: 2..3,
regex_replace: 3..4,
reverse: 1,
right: 2,
rtrim: 1..2,
sha1: 1,
split: 2..3,
substitute: 2..4,
substring: 2..3,
trim: 1..2,
upper: 1,
# https://docs.arangodb.com/3.3/AQL/Functions/Numeric.html
abs: 1,
acos: 1,
asin: 1,
atan: 1..2,
atan2: 2,
average: 1,
ceil: 1,
cos: 1,
degrees: 1,
exp: 1,
exp2: 1,
floor: 1,
log: 1,
log2: 1,
log10: 1,
max: 1,
median: 1,
min: 1,
percentile: 2..3,
pi: 0,
pow: 2,
radians: 1,
rand: 0,
range: 2..3,
round: 1,
sin: 1,
sqrt: 1,
stddev_population: 1,
stddev_sample: 1,
sum: 1,
tan: 1,
variance_population: 1,
variance_sample: 1,
date_now: 0,
date_iso8601: 1,
date_timestamp: 1,
date_dayofweek: 1,
date_year: 1,
date_month: 1,
date_day: 1,
date_hour: 1,
date_minute: 1,
date_second: 1,
date_millisecond: 1,
date_dateofyear: 1,
date_isoweek: 1,
date_leapyear: 1,
date_quarter: 1,
date_days_in_month: 1,
date_format: 2,
date_add: 3,
date_subtract: 3,
date_diff: 3..4,
date_compare: 3..4,
# https://docs.arangodb.com/3.3/AQL/Functions/Array.html
append: 2..3,
first: 1,
flatten: 1..2,
intersection: 2..255,
last: 1,
minus: 2..255,
nth: 2,
outersection: 2..255,
pop: 1,
position: 2..3,
push: 2..3,
remove_nth: 2,
remove_value: 2..3,
remove_values: 2,
reverse: 1,
shift: 1,
slice: 2..3,
union: 2..255,
union_distinct: 2..255,
unique: 2..255,
unshift: 2..3,
# https://docs.arangodb.com/3.3/AQL/Functions/Document.html
attributes: 1..3,
has: 2,
is_same_collection: 2,
keep: 2..255,
matches: 2..3,
merge: 1..255,
merge_recursive: 1..255,
parse_identifier: 1,
translate: 2..3,
unset: 2..255,
unset_recursive: 2..255,
values: 1..2,
zip: 2,
near: 3..5,
within: 4..5,
within_rectangle: 5,
distance: 4,
is_in_polygon: 3,
# https://docs.arangodb.com/3.3/AQL/Functions/Fulltext.html
fulltext: 3..4,
# https://docs.arangodb.com/3.3/AQL/Functions/Miscellaneous.html
not_null: 1..255,
first_list: 1,
first_document: 1,
collection_count: 1,
current_user: 0,
hash: 1,
fail: 1,
call: 2,
apply: 2,
noopt: 1,
passthru: 1,
sleep: 1,
v8: 1,
# https://docs.arangodb.com/devel/AQL/Functions/Geo.html#geo-utility-functions
distance: 4, # DISTANCE(latitude1, longitude1, latitude2, longitude2) -> distance
geo_contains: 2, # GEO_CONTAINS(geoJsonA, geoJsonB) → bool
geo_distance: 2, # GEO_DISTANCE(geoJsonA, geoJsonB) → distance
geo_equals: 2, # GEO_EQUALS(geoJsonA, geoJsonB) → bool
geo_intersects: 2, # GEO_INTERSECTS(geoJsonA, geoJsonB) → bool
geo_linestring: 1, # GEO_LINESTRING(points) → geoJson
geo_multilinestring: 1, # GEO_MULTILINESTRING(points) → geoJson
geo_multipoint: 1, # GEO_MULTIPOINT(points) → geoJson
geo_point: 2, # GEO_POINT(longitude, latitude) → geoJson
geo_polygon: 1, # GEO_POLYGON(points) → geoJson
]
def functions(), do: @functions
def names_list(), do: Keyword.keys(@functions)
end
|
lib/dsl/function/names.ex
| 0.60964
| 0.438905
|
names.ex
|
starcoder
|
defmodule MIDISynth.Command do
@moduledoc """
Convert MIDI commands to raw bytes
"""
defguardp is_int7(num) when num >= 0 and num <= 127
@typedoc "A 7-bit integer"
@type int7 :: 0..127
@typedoc """
A MIDI note
For non-percussion instruments, the frequency of a note
is `440 * 2^((n − 69) / 12)` where `n` is the note number.
Middle C is 60
"""
@type note :: int7()
@typedoc """
The duration in milliseconds for which to hold down a note.
"""
@type duration :: non_neg_integer()
@typedoc """
The velocity to strike the note.
127 = maximum velocity
"""
@type velocity :: int7()
@typedoc "A MIDI program"
@type program :: int7()
@typedoc "A MIDI channel number"
@type channel :: 0..15
@typedoc "A channel volume"
@type volume :: int7()
@doc """
Turn a note in a channel on.
"""
@spec note_on(channel(), note(), velocity()) :: <<_::24>>
def note_on(channel, note, velocity) do
<<0x9::4, channel::4, note, velocity>>
end
@doc """
Turn a note in a channel off.
"""
@spec note_off(channel(), note()) :: <<_::24>>
def note_off(channel, note) do
<<0x8::4, channel::4, note, 64>>
end
@doc """
Change the current program (e.g. instrument) of a channel.
"""
@spec change_program(channel(), program()) :: <<_::16>>
def change_program(channel, prog) when is_int7(prog) do
<<0xC::4, channel::4, prog>>
end
@doc """
Turn all active notes in a channel off.
"""
@spec note_off_all(channel()) :: <<_::24>>
def note_off_all(channel) do
change_control(channel, 123)
end
@doc """
Change the volume of a MIDI channel.
This change is applied to all playing and future notes.
"""
@spec change_volume(channel(), int7()) :: <<_::24>>
def change_volume(channel, volume) when is_int7(volume) do
change_control(channel, 7, volume)
end
@doc """
Bend the pitch of notes playing in a channel.
Values below 0x2000 will decrease the pitch, and higher values will increase it.
"""
@spec pitch_bend(channel(), integer()) :: <<_::24>>
def pitch_bend(channel, bend) when bend >= 0 and bend < 0x4000 do
<<msb::7, lsb::7>> = <<bend::14>>
<<0xE::4, channel::4, lsb, msb>>
end
@doc """
Change the sound bank of a channel.
"""
@spec change_sound_bank(channel(), integer()) :: <<_::48>>
def change_sound_bank(channel, bank) when bank >= 0 and bank < 0x4000 do
<<msb::7, lsb::7>> = <<bank::14>>
msb_binary = change_control(channel, 0, msb)
lsb_binary = change_control(channel, 0x20, lsb)
<<msb_binary::binary, lsb_binary::binary>>
end
@doc """
Change the panoramic (pan) of a channel.
This shifts the sound from the left or right ear in when playing stereo.
Values below 64 moves the sound to the left, and above to the right.
"""
@spec pan(channel(), int7()) :: <<_::24>>
def pan(channel, pan) when is_int7(pan) do
change_control(channel, 10, pan)
end
@doc """
Change the MIDI controller value of a channel.
"""
@spec change_control(channel(), int7(), int7()) :: <<_::24>>
def change_control(channel, control_number, control_value \\ 0)
when is_int7(control_number) and is_int7(control_value) do
<<0xB::4, channel::4, control_number, control_value>>
end
end
|
lib/midi_synth/command.ex
| 0.859177
| 0.459015
|
command.ex
|
starcoder
|
defmodule AWS.AutoScalingPlans do
@moduledoc """
AWS Auto Scaling
Use AWS Auto Scaling to create scaling plans for your applications to
automatically scale your scalable AWS resources.
## API Summary
You can use the AWS Auto Scaling service API to accomplish the following tasks:
* Create and manage scaling plans
* Define target tracking scaling policies to dynamically scale your
resources based on utilization
* Scale Amazon EC2 Auto Scaling groups using predictive scaling and
dynamic scaling to scale your Amazon EC2 capacity faster
* Set minimum and maximum capacity limits
* Retrieve information on existing scaling plans
* Access current forecast data and historical forecast data for up
to 56 days previous
To learn more about AWS Auto Scaling, including information about granting IAM
users required permissions for AWS Auto Scaling actions, see the [AWS Auto Scaling User
Guide](https://docs.aws.amazon.com/autoscaling/plans/userguide/what-is-aws-auto-scaling.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2018-01-06",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "autoscaling-plans",
global?: false,
protocol: "json",
service_id: "Auto Scaling Plans",
signature_version: "v4",
signing_name: "autoscaling-plans",
target_prefix: "AnyScaleScalingPlannerFrontendService"
}
end
@doc """
Creates a scaling plan.
"""
def create_scaling_plan(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateScalingPlan", input, options)
end
@doc """
Deletes the specified scaling plan.
Deleting a scaling plan deletes the underlying `ScalingInstruction` for all of
the scalable resources that are covered by the plan.
If the plan has launched resources or has scaling activities in progress, you
must delete those resources separately.
"""
def delete_scaling_plan(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteScalingPlan", input, options)
end
@doc """
Describes the scalable resources in the specified scaling plan.
"""
def describe_scaling_plan_resources(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeScalingPlanResources", input, options)
end
@doc """
Describes one or more of your scaling plans.
"""
def describe_scaling_plans(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeScalingPlans", input, options)
end
@doc """
Retrieves the forecast data for a scalable resource.
Capacity forecasts are represented as predicted values, or data points, that are
calculated using historical data points from a specified CloudWatch load metric.
Data points are available for up to 56 days.
"""
def get_scaling_plan_resource_forecast_data(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetScalingPlanResourceForecastData", input, options)
end
@doc """
Updates the specified scaling plan.
You cannot update a scaling plan if it is in the process of being created,
updated, or deleted.
"""
def update_scaling_plan(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateScalingPlan", input, options)
end
end
|
lib/aws/generated/auto_scaling_plans.ex
| 0.888623
| 0.587884
|
auto_scaling_plans.ex
|
starcoder
|
defmodule MetricsReporter.LatencyStatsCalculator do
def calculate_latency_percentage_bins_data(latency_bins_list, expected_latency_bins) do
time_sorted_latency_bins_list = time_sort_latency_bins_list(latency_bins_list)
aggregated_latency_bins_data = aggregate_latency_bins_data(time_sorted_latency_bins_list)
total_count = calculate_latency_bins_total(aggregated_latency_bins_data)
do_calculate_latency_percentage_bins_data(aggregated_latency_bins_data, expected_latency_bins, total_count)
end
def calculate_cumalative_latency_percentage_bins_data(latency_percentage_bins_data) do
sorted_keys = Map.keys(latency_percentage_bins_data)
|> Enum.sort(&(String.to_integer(&1)< String.to_integer(&2)))
{_, cumalative_latency_percentage_bins_data} = sorted_keys
|> Enum.reduce({0, %{}}, fn key, {cumalative_percentage, cumalative_latency_percentage_bins_data} ->
val = Map.get(latency_percentage_bins_data, key)
cumalative_percentage = calculate_cumalative_percentage(val, cumalative_percentage)
cumalative_latency_percentage_bins_data = Map.put(cumalative_latency_percentage_bins_data, key, cumalative_percentage)
{cumalative_percentage, cumalative_latency_percentage_bins_data}
end)
cumalative_latency_percentage_bins_data
end
defp aggregate_latency_bins_data(latency_bins_list) do
latency_bins_list
|> Enum.reduce(%{}, &Map.merge(&1["latency_bins"], &2, fn _k, v1, v2 -> v1 + v2 end))
end
def get_empty_latency_percentage_bins_data do
0..64 |> Enum.reduce(%{}, fn(pow, map) -> Map.put(map, to_string(pow), 0) end)
end
def get_empty_latency_percentage_bins_data(expected_latency_bins) do
Map.new(expected_latency_bins, & {&1, 0})
end
defp get_percentiles do
["50.0", "95.0", "99.0", "99.9", "99.99"]
end
def calculate_latency_percentile_bin_stats(cumalative_latency_percentage_bins_msg) do
sorted_bin_keys = Map.keys(cumalative_latency_percentage_bins_msg)
|> Enum.sort(&(String.to_integer(&1)< String.to_integer(&2)))
_latency_percent_bin_stats = get_percentiles()
|> Enum.reduce(%{}, fn percentile, percentile_map ->
bin = Enum.reduce_while(sorted_bin_keys, 0, fn bin, _acc ->
percentage_at_bin = cumalative_latency_percentage_bins_msg[bin]
if String.to_float(percentile) > percentage_at_bin do
{:cont, Enum.max_by(sorted_bin_keys, fn bin ->
String.to_integer(bin)
end)}
else
{:halt, bin}
end
end)
Map.put(percentile_map, percentile, bin)
end)
end
def generate_empty_latency_percentile_bin_stats do
get_percentiles() |>
Enum.reduce(%{}, fn percentile, percentile_map ->
Map.put(percentile_map, percentile, "0")
end)
end
defp calculate_latency_bins_total(latency_bins_data) do
Map.keys(latency_bins_data)
|> Enum.reduce(0, &(latency_bins_data[&1] + &2))
end
defp do_calculate_latency_percentage_bins_data(aggregated_latency_bins_data, expected_latency_bins, total_count) do
sorted_keys = Map.keys(aggregated_latency_bins_data)
|> Enum.sort(&(String.to_integer(&1) < String.to_integer(&2)))
{_, aggregated_map} =
expected_latency_bins
|> Enum.sort(&(String.to_integer(&1) < String.to_integer(&2)))
|> Enum.reduce({sorted_keys, %{}}, fn(bin, {keys_list, acc_map}) ->
{acc_count, updated_keys_list} =
Enum.reduce_while(keys_list, {0, keys_list}, fn (key, {acc, remaining_keys}) ->
if String.to_integer(key) <= String.to_integer(bin) do
{:cont, {acc + Map.get(aggregated_latency_bins_data, key), List.delete(remaining_keys, key)}}
else
{:halt, {acc, remaining_keys}}
end
end)
{updated_keys_list, Map.put(acc_map, bin, acc_count)}
end)
map = get_empty_latency_percentage_bins_data(expected_latency_bins)
|> Map.merge(aggregated_map, fn _k, _v1, v2 ->
calculate_percentile(v2, total_count)
end)
map
|> Map.new(fn {k, v} ->
pow_key = :math.pow(2, String.to_integer(k)) |> round |> to_string
{pow_key, v} end)
end
defp time_sort_latency_bins_list(latency_bins_list) do
latency_bins_list
|> Enum.sort(&(&1["time"] < &2["time"]))
end
defp calculate_percentile(value, total_count) do
if total_count == 0 do
0
else
Float.round((value / total_count) * 100, 4)
end
end
defp calculate_cumalative_percentage(value, cumalative_percentage) do
cond do
(value + cumalative_percentage >= 100) ->
100
(value + cumalative_percentage == 0) ->
0
true ->
Float.round(value + cumalative_percentage, 4)
end
end
end
|
monitoring_hub/apps/metrics_reporter/lib/metrics_reporter/latency_stats_calculator.ex
| 0.563258
| 0.712545
|
latency_stats_calculator.ex
|
starcoder
|
defmodule Video.TrimmedSource do
@known_params [
:source,
:coord_from,
:coord_to,
:duration_ms_uncut,
:coords_uncut
]
@type t :: %__MODULE__{
source: binary(),
coord_from: Video.TimedPoint.t(),
coord_to: Video.TimedPoint.t(),
duration_ms_uncut: Video.Timestamp.t(),
coords_uncut: [Video.TimedPoint.t()]
}
@enforce_keys @known_params
defstruct @known_params
def new_from_path(name) when is_binary(name) do
name
|> Video.Source.new_from_path()
|> new_from_source
end
def new_from_source(%Video.Source{} = source) do
with coords when is_list(coords) <- Video.Source.timed_points(source) do
coord_from = coords |> hd()
coord_to = coords |> List.last()
%__MODULE__{
source: source.source,
coords_uncut: coords,
coord_from: coord_from,
coord_to: coord_to,
duration_ms_uncut: Video.Source.video_length_ms(source)
}
|> assert_valid()
end
end
@doc """
Extract the coordinates for the configured time frame. It also returns the
interpolated points that are closest to the given timestamps as convenience.
"""
@typep coords_ret() :: %{
coords: [Video.TimedPoint.t()],
first: Video.TimedPoint.t(),
last: Video.TimedPoint.t()
}
@spec coords(t()) :: coords_ret()
def coords(%__MODULE__{} = tsv) do
{from_ms, to_ms} = in_ms(tsv)
coords(tsv, from_ms, to_ms)
end
@spec coords(t(), integer(), integer()) :: coords_ret() | {:error, binary()}
def coords(%__MODULE__{} = tsv, from_ms, to_ms)
when is_integer(from_ms) and is_integer(to_ms) do
rev_coords =
tsv.coords_uncut
|> Enum.reduce_while(%{prev: nil, acc: []}, fn coord, %{prev: prev, acc: acc} ->
cond do
# i.e. before start
coord.time_offset_ms < from_ms ->
{:cont, %{prev: coord, acc: []}}
# i.e. first coordinate after start, insert interpolated start
coord.time_offset_ms <= to_ms && prev && acc == [] ->
t = calc_t(from_ms, prev, coord)
interp = Video.TimedPoint.interpolate(prev, coord, t)
{:cont, %{prev: coord, acc: [coord, interp]}}
# i.e. between start/stop
coord.time_offset_ms <= to_ms ->
{:cont, %{prev: coord, acc: [coord | acc]}}
# i.e. first coordinate after stop, insert interpolated end
true ->
t = calc_t(to_ms, prev, coord)
interp = Video.TimedPoint.interpolate(prev, coord, t)
{:halt, %{prev: coord, acc: [interp | acc]}}
end
end)
|> Map.get(:acc)
|> Enum.uniq()
if rev_coords == [] do
{:error, "Cutting #{tsv.source} from #{from_ms}ms to #{to_ms}ms yields an empty video"}
else
last = hd(rev_coords)
coords = Enum.reverse(rev_coords)
first = hd(coords)
%{coords: coords, first: first, last: last}
end
end
@doc """
Extract a part of the video as denoted by the timestamps.
"""
@spec extract(
t(),
Video.Timestamp.t() | integer() | :start,
Video.Timestamp.t() | integer() | :end
) ::
t() | {:error, binary()}
def extract(%__MODULE__{} = tsv, :start, to),
do: extract(tsv, 0, to)
def extract(%__MODULE__{} = tsv, from, :end),
do: extract(tsv, from, tsv.duration_ms_uncut)
def extract(%__MODULE__{} = tsv, "" <> from, to),
do: extract(tsv, Video.Timestamp.in_milliseconds(from), to)
def extract(%__MODULE__{} = tsv, from, "" <> to),
do: extract(tsv, from, Video.Timestamp.in_milliseconds(to))
def extract(%__MODULE__{} = tsv, from, to)
when is_integer(from) and is_integer(to) and from <= to do
%{first: from_pt, last: to_pt} = coords(tsv, from, to)
%{tsv | coord_from: from_pt, coord_to: to_pt} |> assert_valid()
end
defp calc_t(interp, prev, next),
do: (interp - prev.time_offset_ms) / (next.time_offset_ms - prev.time_offset_ms)
@doc """
Recursively finds all valid source videos within the given folder, and converts
them into trimmed source videos. No cutting is being done.
"""
@spec new_from_folder(binary()) :: [t()]
def new_from_folder(path) do
path
|> Video.Source.new_from_folder()
|> Parallel.map(&new_from_source/1)
|> Enum.reject(fn
{:error, msg} -> IO.puts(:stderr, msg)
_ -> false
end)
end
defp in_ms(%__MODULE__{coord_from: %{time_offset_ms: from}, coord_to: %{time_offset_ms: to}}) do
{from, to}
end
defp assert_valid(%__MODULE__{} = tsv) do
{from_ms, to_ms} = in_ms(tsv)
if from_ms <= to_ms,
do: tsv,
else: {:error, "Invalid time range requested: from #{from_ms} >= to #{to_ms}"}
end
@doc """
Returns the duration in milliseconds from the current cut-off points
"""
@spec duration_ms(t()) :: integer()
def duration_ms(%__MODULE__{} = tsv) do
{from_ms, to_ms} = in_ms(tsv)
to_ms - from_ms
end
@spec source(t()) :: {binary(), Video.Timestamp.t() | :start, Video.Timestamp.t() | :end}
def source(%__MODULE__{} = tsv) do
{from, to} = in_ms(tsv)
from = if from == 0, do: :start, else: Video.Timestamp.from_milliseconds(from)
to = if to == tsv.duration_ms_uncut, do: :end, else: Video.Timestamp.from_milliseconds(to)
{Video.Path.source_base_with_ending(tsv.source), from, to}
end
def hash_ident(%__MODULE__{source: p, coord_from: f, coord_to: t}) do
"#{Video.Path.source_base_with_ending(p)} #{f.time_offset_ms} #{inspect(t.time_offset_ms)}"
end
end
|
lib/video/trimmed_source.ex
| 0.88836
| 0.528047
|
trimmed_source.ex
|
starcoder
|
defmodule LibPE.ResourceTable do
@moduledoc """
Parses windows resource tables
By convention these are always three levels:
Type > Name > Language
"""
alias LibPE.ResourceTable
use Bitwise
defstruct characteristics: 0,
timestamp: 0,
major_version: 0,
minor_version: 0,
entries: []
defmodule DirEntry do
@moduledoc false
defstruct name: nil,
entry: nil,
raw_entry: 0,
raw_name: 0
end
defmodule DataBlob do
@moduledoc false
defstruct data_rva: 0,
data: nil,
codepage: 0,
reserved: 0
end
defmodule EncodeContext do
@moduledoc false
defstruct [
:image_offset,
:names,
:tables,
:output,
:data_entries
]
def append(%EncodeContext{output: output} = ex, data) do
%EncodeContext{ex | output: output <> data}
end
end
def parse(resources, image_offset) do
parse(resources, resources, image_offset)
end
defp parse(
<<characteristics::little-size(32), timestamp::little-size(32),
major_version::little-size(16), minor_version::little-size(16),
number_of_name_entries::little-size(16), number_of_id_entries::little-size(16),
rest::binary>>,
resources,
image_offset
) do
{entries, _rest} =
List.duplicate(%DirEntry{}, number_of_name_entries + number_of_id_entries)
|> Enum.map_reduce(rest, fn _entry, rest ->
parse_entry(rest, resources, image_offset)
end)
%ResourceTable{
characteristics: characteristics,
timestamp: timestamp,
major_version: major_version,
minor_version: minor_version,
entries: entries
}
end
@doc """
Allows updating a resources. At the moment this call is destructive as it does
not allows defining more than one name or language per resource entry.
Each defined resource entry set with `set_resource` will have it's PE name
set to `1` and it's language to the provided language code by default `1033`
Example:
> LibPE.ResourceTable.set_resource(table, "RT_MANIFEST", manifest)
Known resources types are:
```
{"RT_ACCELERATOR", 9, "Accelerator table."},
{"RT_ANICURSOR", 21, "Animated cursor."},
{"RT_ANIICON", 22, "Animated icon."},
{"RT_BITMAP", 2, "Bitmap resource."},
{"RT_CURSOR", 1, "Hardware-dependent cursor resource."},
{"RT_DIALOG", 5, "Dialog box."},
{"RT_DLGINCLUDE", 17,
"Allows a resource editing tool to associate a string with an .rc file. Typically, the string is the name of the header file that provides symbolic names. The resource compiler parses the string but otherwise ignores the value. For example,"},
{"RT_FONT", 8, "Font resource."},
{"RT_FONTDIR", 7, "Font directory resource."},
{"RT_GROUP_CURSOR", 12, "Hardware-independent cursor resource."},
{"RT_GROUP_ICON", 14, "Hardware-independent icon resource."},
{"RT_HTML", 23, "HTML resource."},
{"RT_ICON", 3, "Hardware-dependent icon resource."},
{"RT_MANIFEST", 24, "Side-by-Side Assembly Manifest."},
{"RT_MENU", 4, "Menu resource."},
{"RT_MESSAGETABLE", 11, "Message-table entry."},
{"RT_PLUGPLAY", 19, "Plug and Play resource."},
{"RT_RCDATA", 10, "Application-defined resource (raw data)."},
{"RT_STRING", 6, "String-table entry."},
{"RT_VERSION", 16, "Version resource."},
{"RT_VXD", 20, "VXD."}
```
"""
def set_resource(
table = %ResourceTable{entries: entries},
resource_type,
data,
codepage \\ 0,
language \\ 1033
)
when is_binary(data) do
type = LibPE.ResourceTypes.encode(resource_type)
if type == nil, do: raise("ResourceType #{resource_type} is unknown")
page = LibPE.Codepage.encode(codepage)
if page == nil, do: raise("Codepage #{codepage} is unknown")
lang = LibPE.Language.encode(language)
if lang == nil, do: raise("Language #{language} is unknown")
entry =
Enum.find(entries, %DirEntry{name: type}, fn %DirEntry{name: name} -> type == name end)
entry = %DirEntry{
entry
| entry: %ResourceTable{
entries: [
%DirEntry{
name: 1,
entry: %ResourceTable{
entries: [
%DirEntry{name: lang, entry: %DataBlob{codepage: page, data: data}}
]
}
}
]
}
}
idx = Enum.find_index(entries, fn %DirEntry{name: name} -> type == name end)
entries =
if idx == nil do
sorted_entries(%ResourceTable{table | entries: entries ++ [entry]})
else
List.update_at(entries, idx, fn _ -> entry end)
end
%ResourceTable{table | entries: entries}
end
def encode(resource_table, image_offset) do
context = %EncodeContext{
image_offset: image_offset,
tables: %{},
names: %{},
output: "",
data_entries: []
}
# First run to establish offsets and fulll size
context =
do_encode(resource_table, context)
|> encode_tables(resource_table)
|> encode_data_entries()
|> encode_names()
|> encode_data_leaves()
|> Map.put(:output, "")
# IO.puts("ROUND#2")
# Second run now inserting all correct offsets
context =
do_encode(resource_table, context)
|> encode_tables(resource_table)
|> encode_data_entries()
|> encode_names()
|> encode_data_leaves()
context.output
end
@high 0x80000000
defp do_encode(
%ResourceTable{
characteristics: characteristics,
timestamp: timestamp,
major_version: major_version,
minor_version: minor_version
} = table,
context = %EncodeContext{}
) do
entries = sorted_entries(table)
number_of_id_entries = Enum.count(entries, fn %DirEntry{name: name} -> is_integer(name) end)
number_of_name_entries = length(entries) - number_of_id_entries
context =
EncodeContext.append(
context,
<<characteristics::little-size(32), timestamp::little-size(32),
major_version::little-size(16), minor_version::little-size(16),
number_of_name_entries::little-size(16), number_of_id_entries::little-size(16)>>
)
Enum.reduce(entries, context, fn entry, context -> encode_entry(entry, context) end)
end
defp sorted_entries(%ResourceTable{entries: entries}) do
{name_entries, id_entries} =
Enum.reduce(entries, {[], []}, fn entry = %DirEntry{}, {names, ids} ->
if is_integer(entry.name) do
{names, ids ++ [entry]}
else
{names ++ [entry], ids}
end
end)
Enum.sort(name_entries, fn a, b -> a.name < b.name end) ++
Enum.sort(id_entries, fn a, b -> a.name < b.name end)
end
defp encode_tables(context, %ResourceTable{} = table) do
# Reducing recursively other DirectoryTables
entries = sorted_entries(table)
context =
Enum.reduce(entries, context, fn %DirEntry{entry: entry},
context = %EncodeContext{tables: tables, output: output} ->
case entry do
dir = %ResourceTable{} ->
offset = byte_size(output) ||| @high
# IO.puts("table offset: #{byte_size(output)}")
context = %EncodeContext{context | tables: Map.put(tables, dir, offset)}
do_encode(dir, context)
_other ->
context
end
end)
Enum.reduce(entries, context, fn %DirEntry{entry: entry}, context ->
case entry do
table = %ResourceTable{} -> encode_tables(context, table)
_other -> context
end
end)
end
defp parse_entry(
<<raw_name::little-size(32), raw_entry::little-size(32), rest::binary>>,
resources,
image_offset
) do
name =
if (raw_name &&& @high) == 0 do
raw_name
else
name_offset = raw_name &&& bnot(@high)
<<_::binary-size(name_offset), length::little-size(16), name::binary-size(length),
name2::binary-size(length), _rest::binary>> = resources
:unicode.characters_to_binary(name <> name2, {:utf16, :little}, :utf8)
end
entry =
if (raw_entry &&& @high) == @high do
entry_offset = raw_entry &&& bnot(@high)
<<_::binary-size(entry_offset), data::binary>> = resources
parse(data, resources, image_offset)
else
parse_data_entry(raw_entry, resources, image_offset)
end
{%DirEntry{
name: name,
entry: entry,
raw_name: raw_name &&& bnot(@high),
raw_entry: raw_entry &&& bnot(@high)
}, rest}
end
defp encode_entry(
%DirEntry{
name: name,
raw_name: raw_name,
entry: entry,
raw_entry: raw_entry
},
context = %EncodeContext{
names: names,
tables: tables,
data_entries: data_entries
}
) do
{raw_name, context} =
cond do
is_integer(name) -> {name, context}
names[name] != nil -> {names[name], context}
true -> {raw_name, %EncodeContext{context | names: Map.put(names, name, 0)}}
end
{raw_entry, context} =
case entry do
dir = %ResourceTable{} ->
if tables[dir] != nil do
{tables[dir], context}
else
{raw_entry, %EncodeContext{context | tables: Map.put(tables, dir, 0)}}
end
%DataBlob{data: blob, codepage: codepage} ->
key = %DataBlob{data: blob, codepage: codepage}
if fetch(data_entries, key) != nil do
{fetch!(data_entries, key).offset, context}
else
{raw_entry,
%EncodeContext{
context
| data_entries: put(data_entries, key, %{data_rva: 0, offset: 0})
}}
end
end
EncodeContext.append(context, <<raw_name::little-size(32), raw_entry::little-size(32)>>)
end
defp encode_names(%EncodeContext{names: names} = context) do
Enum.sort(names)
|> Enum.reduce(context, fn {name, _offset},
context = %EncodeContext{output: output, names: names} ->
output = output <> String.duplicate(<<0>>, rem(byte_size(output), 2))
offset = byte_size(output) ||| @high
# IO.puts("name offset #{byte_size(output)}")
names = Map.put(names, name, offset)
bin = :unicode.characters_to_binary(name, :utf8, {:utf16, :little})
output = output <> <<String.length(name)::little-size(16), bin::binary>>
%EncodeContext{context | names: names, output: output}
end)
end
defp parse_data_entry(entry_offset, resources, image_offset) do
<<_::binary-size(entry_offset), data_rva::little-size(32), size::little-size(32),
codepage::little-size(32), reserved::little-size(32), _rest::binary>> = resources
data = binary_part(resources, data_rva - image_offset, size)
%DataBlob{
data_rva: data_rva,
data: data,
codepage: codepage,
reserved: reserved
}
end
defp encode_data_entries(%EncodeContext{data_entries: data_entries} = context) do
Enum.reduce(data_entries, context, fn {%DataBlob{codepage: codepage, data: blob} = key,
%{data_rva: data_rva}},
context = %EncodeContext{
output: output,
data_entries: data_entries
} ->
# output = output <> String.duplicate(<<0>>, rem(byte_size(output), 2))
offset = byte_size(output)
# IO.puts("blob offset = #{offset}")
size = byte_size(blob)
reserved = 0
output =
output <>
<<data_rva::little-size(32), size::little-size(32), codepage::little-size(32),
reserved::little-size(32)>>
data_entries = put(data_entries, key, %{offset: offset, data_rva: data_rva})
%EncodeContext{context | data_entries: data_entries, output: output}
end)
end
defp encode_data_leaves(
%EncodeContext{data_entries: entries, image_offset: image_offset} = context
) do
# some binaries do this, others don't
context = EncodeContext.append(context, <<0::little-size(32)>>)
entries
|> Enum.reduce(context, fn {%DataBlob{data: blob} = key, offsets},
context = %EncodeContext{
output: output,
data_entries: entries
} ->
output = LibPE.binary_pad_trailing(output, ceil(byte_size(output) / 8) * 8)
data_rva = byte_size(output) + image_offset
output = output <> blob
output = LibPE.binary_pad_trailing(output, ceil(byte_size(output) / 8) * 8)
entries = put(entries, key, %{offsets | data_rva: data_rva})
%EncodeContext{context | data_entries: entries, output: output}
end)
end
def dump(nil) do
IO.puts("NO RESOURCE TABLE")
end
def dump(data) do
dump(data, 0)
end
defp dump(
%ResourceTable{
characteristics: _characteristics,
timestamp: _timestamp,
major_version: _major_version,
minor_version: _minor_version,
entries: entries
},
level
) do
# Those values are always 0 it seems
# IO.puts(
# "#{dup(level)} flags: #{characteristics}, timestamp: #{timestamp}, version: #{major_version}:#{
# minor_version
# }"
# )
Enum.each(entries, fn entry ->
dump(entry, level)
end)
end
defp dump(%DirEntry{name: name, entry: entry}, level) do
label =
case level do
0 -> "TYPE: #{inspect(LibPE.ResourceTypes.decode(name))}"
1 -> "NAME: #{inspect(name)}"
2 -> "LANG: #{inspect(LibPE.Language.decode(name))}"
_other -> inspect(name)
end
IO.puts("#{dup(level)} DIRENTRY: #{label}")
dump(entry, level + 1)
end
defp dump(%DataBlob{data_rva: _data_rva, data: data, codepage: codepage}, level) do
IO.puts(
"#{dup(level)} DATA size: #{byte_size(data)}, codepage: #{
inspect(LibPE.Codepage.decode(codepage))
}"
)
end
defp dup(level) do
String.duplicate(" ", level)
end
defp put(list, key, value) do
List.keystore(list, key, 0, {key, value})
end
defp fetch(list, key) do
{^key, value} = List.keyfind(list, key, 0, {key, nil})
value
end
defp fetch!(list, key) do
{^key, value} = List.keyfind(list, key, 0)
value
end
end
|
lib/libpe/resource_table.ex
| 0.845624
| 0.654674
|
resource_table.ex
|
starcoder
|
defmodule DES do
@moduledoc """
Encrypt and Decrypt files using DES symmetric algorithm.
## Examples
``` bash
$ ./bin enc my_file my_file.enc
$ ./bin dec my_file.enc my_file
```
iex> DES.encrypt(['12345678', '12345678'])
iex> DES.decrypt(['12345678', '12345678'])
"""
@block_size_bytes 8
@num_rounds 16
@p [
16, 7, 20, 21, 29, 12, 28, 17,
1, 15, 23, 26, 5, 18, 31, 10,
2, 8, 24, 14, 32, 27, 3, 9,
19, 13, 30, 6, 22, 11, 4, 25
]
@s_box [
[[14, 4, 13, 1, 2, 15, 11, 8, 3, 10, 6, 12, 5, 9, 0, 7],
[0, 15, 7, 4, 14, 2, 13, 1, 10, 6, 12, 11, 9, 5, 3, 8],
[4, 1, 14, 8, 13, 6, 2, 11, 15, 12, 9, 7, 3, 10, 5, 0],
[15, 12, 8, 2, 4, 9, 1, 7, 5, 11, 3, 14, 10, 0, 6, 13],
],
[[15, 1, 8, 14, 6, 11, 3, 4, 9, 7, 2, 13, 12, 0, 5, 10],
[3, 13, 4, 7, 15, 2, 8, 14, 12, 0, 1, 10, 6, 9, 11, 5],
[0, 14, 7, 11, 10, 4, 13, 1, 5, 8, 12, 6, 9, 3, 2, 15],
[13, 8, 10, 1, 3, 15, 4, 2, 11, 6, 7, 12, 0, 5, 14, 9],
],
[[10, 0, 9, 14, 6, 3, 15, 5, 1, 13, 12, 7, 11, 4, 2, 8],
[13, 7, 0, 9, 3, 4, 6, 10, 2, 8, 5, 14, 12, 11, 15, 1],
[13, 6, 4, 9, 8, 15, 3, 0, 11, 1, 2, 12, 5, 10, 14, 7],
[1, 10, 13, 0, 6, 9, 8, 7, 4, 15, 14, 3, 11, 5, 2, 12],
],
[[7, 13, 14, 3, 0, 6, 9, 10, 1, 2, 8, 5, 11, 12, 4, 15],
[13, 8, 11, 5, 6, 15, 0, 3, 4, 7, 2, 12, 1, 10, 14, 9],
[10, 6, 9, 0, 12, 11, 7, 13, 15, 1, 3, 14, 5, 2, 8, 4],
[3, 15, 0, 6, 10, 1, 13, 8, 9, 4, 5, 11, 12, 7, 2, 14],
],
[[2, 12, 4, 1, 7, 10, 11, 6, 8, 5, 3, 15, 13, 0, 14, 9],
[14, 11, 2, 12, 4, 7, 13, 1, 5, 0, 15, 10, 3, 9, 8, 6],
[4, 2, 1, 11, 10, 13, 7, 8, 15, 9, 12, 5, 6, 3, 0, 14],
[11, 8, 12, 7, 1, 14, 2, 13, 6, 15, 0, 9, 10, 4, 5, 3],
],
[[12, 1, 10, 15, 9, 2, 6, 8, 0, 13, 3, 4, 14, 7, 5, 11],
[10, 15, 4, 2, 7, 12, 9, 5, 6, 1, 13, 14, 0, 11, 3, 8],
[9, 14, 15, 5, 2, 8, 12, 3, 7, 0, 4, 10, 1, 13, 11, 6],
[4, 3, 2, 12, 9, 5, 15, 10, 11, 14, 1, 7, 6, 0, 8, 13],
],
[[4, 11, 2, 14, 15, 0, 8, 13, 3, 12, 9, 7, 5, 10, 6, 1],
[13, 0, 11, 7, 4, 9, 1, 10, 14, 3, 5, 12, 2, 15, 8, 6],
[1, 4, 11, 13, 12, 3, 7, 14, 10, 15, 6, 8, 0, 5, 9, 2],
[6, 11, 13, 8, 1, 4, 10, 7, 9, 5, 0, 15, 14, 2, 3, 12],
],
[[13, 2, 8, 4, 6, 15, 11, 1, 10, 9, 3, 14, 5, 0, 12, 7],
[1, 15, 13, 8, 10, 3, 7, 4, 12, 5, 6, 11, 0, 14, 9, 2],
[7, 11, 4, 1, 9, 12, 14, 2, 0, 6, 10, 13, 15, 3, 5, 8],
[2, 1, 14, 7, 4, 10, 8, 13, 15, 12, 9, 0, 3, 5, 6, 11],
]
]
@cp_1 [
57, 49, 41, 33, 25, 17, 9,
1, 58, 50, 42, 34, 26, 18,
10, 2, 59, 51, 43, 35, 27,
19, 11, 3, 60, 52, 44, 36,
63, 55, 47, 39, 31, 23, 15,
7, 62, 54, 46, 38, 30, 22,
14, 6, 61, 53, 45, 37, 29,
21, 13, 5, 28, 20, 12, 4
]
@cp_2 [
14, 17, 11, 24, 1, 5, 3, 28,
15, 6, 21, 10, 23, 19, 12, 4,
26, 8, 16, 7, 27, 20, 13, 2,
41, 52, 31, 37, 47, 55, 30, 40,
51, 45, 33, 48, 44, 49, 39, 56,
34, 53, 46, 42, 50, 36, 29, 32
]
@e [
32, 1, 2, 3, 4, 5,
4, 5, 6, 7, 8, 9,
8, 9, 10, 11, 12, 13,
12, 13, 14, 15, 16, 17,
16, 17, 18, 19, 20, 21,
20, 21, 22, 23, 24, 25,
24, 25, 26, 27, 28, 29,
28, 29, 30, 31, 32, 1
]
@pi [
58, 50, 42, 34, 26, 18, 10, 2,
60, 52, 44, 36, 28, 20, 12, 4,
62, 54, 46, 38, 30, 22, 14, 6,
64, 56, 48, 40, 32, 24, 16, 8,
57, 49, 41, 33, 25, 17, 9, 1,
59, 51, 43, 35, 27, 19, 11, 3,
61, 53, 45, 37, 29, 21, 13, 5,
63, 55, 47, 39, 31, 23, 15, 7
]
@pi_1 [
40, 8, 48, 16, 56, 24, 64, 32,
39, 7, 47, 15, 55, 23, 63, 31,
38, 6, 46, 14, 54, 22, 62, 30,
37, 5, 45, 13, 53, 21, 61, 29,
36, 4, 44, 12, 52, 20, 60, 28,
35, 3, 43, 11, 51, 19, 59, 27,
34, 2, 42, 10, 50, 18, 58, 26,
33, 1, 41, 9, 49, 17, 57, 25
]
@shift_order [1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1]
require System
require IO
require String
use Bitwise
# Parse argv inputs to make sure user called the module right. Exits on fail
@spec parse_input() :: {atom(), String.t(), String.t()}
defp parse_input() do
if length(System.argv) == 3 do
[operation, in_file, out_file] = System.argv
IO.puts "Processando arquivo '#{in_file}'..."
operation = case operation do
"enc" ->
:enc
"dec" ->
:dec
_ ->
IO.puts "Invalid operation !!!"
exit(:normal)
""
end
{operation, in_file, out_file}
else
IO.puts "Pass the parameters like this:\n./bin [enc|dec] [in_file] [out_file]"
exit(:normal)
end
end
@spec expansion([integer()]) :: [integer()]
defp expansion(block) do
permute block, @e
end
defp shifts(list, left, right, index \\ 0)
defp shifts(list, left, right, index) when index == (length @shift_order) do
list
end
defp shifts(list, left, right, index) do
left = shift(left, Enum.at(@shift_order, index))
right = shift(right, Enum.at(@shift_order, index))
list = list ++ [Enum.map(@cp_2, fn x -> Enum.at((left++right), x - 1) end)]
shifts(list, left, right, index + 1)
end
@spec generate_keys([integer()]) :: [integer()]
defp generate_keys(key) do
k = permute(key, @cp_1, false)
list = []
left = Enum.slice(k,0..27)
right = Enum.slice(k,28..56)
keys = shifts(list, left, right)
Enum.map(keys, fn x -> Enum.map(Enum.chunk_every(x, @block_size_bytes), fn t -> String.to_integer(Enum.join(t),2) end) end)
end
@spec shift([String.t()], integer()) :: [integer()]
defp shift(block, shift_size) do
Enum.slice(block,shift_size..27) ++ Enum.slice(block,0..(shift_size-1))
end
@spec to_binary_list_string(charlist()) :: [String.t()]
defp to_binary_list_string(block, pad_leading \\ @block_size_bytes) do
Enum.map(block, fn x -> Integer.to_string(x, 2) |> String.pad_leading(pad_leading, "0") end)
end
@spec to_binary_string(charlist()) :: [String.t()]
defp to_binary_string(block, pad_leading \\ @block_size_bytes) do
to_binary_list_string(block, pad_leading) |> Enum.join |> String.graphemes
end
@spec permute(charlist(), [integer()]) :: [integer()]
defp permute(block, table, to_integer \\ true) do
binary_string = to_binary_string block
permuted = Enum.map(table, fn x -> Enum.at(binary_string, x - 1) end)
if to_integer do
Enum.map(Enum.chunk_every(permuted, @block_size_bytes), fn x -> String.to_integer(Enum.join(x),2) end)
else
permuted
end
end
defp substitute(block) do
binary_string = to_binary_string block
blocks = Enum.chunk_every(binary_string, 6)
edges = for i <- blocks, do: [Enum.at(i, 0), Enum.at(i, 5)] |> Enum.join |> String.to_integer(2)
middles = for k <- blocks, do: Enum.slice(k, 1..4) |> Enum.join |> String.to_integer(2)
result = for index <- 0..7, do: Enum.at(@s_box, index) |> Enum.at(Enum.at(edges, index)) |> Enum.at(Enum.at(middles, index))
bin_result = to_binary_string result, 4
(Enum.map(Enum.chunk_every(bin_result, @block_size_bytes), fn x -> String.to_integer(Enum.join(x),2) end))
end
@spec initial_permutation(charlist()) :: [integer()]
defp initial_permutation(block) do
permute block, @pi
end
@spec final_permutation([integer()]) :: [integer()]
defp final_permutation(block) do
permute block, @pi_1
end
# Read a file, given a path, show content on stdout and return it
@spec read_file(String.t()) :: String.t()
defp read_file(file_path) do
text = File.read!file_path |> String.trim
IO.puts "============================== Text ==========================="
IO.puts text
IO.puts "==============================================================="
text
end
# Write a content to a file
@spec write_to(String.t(), String.t()) :: atom()
defp write_to(file_path, content) do
File.write file_path, content
IO.puts "Result saved to file '#{file_path}'..."
end
@spec split_block([integer()]) :: {[integer()], [integer()]}
defp split_block(block) do
half = div (length block), 2
[left, right] = Enum.chunk_every block, half
{left, right}
end
# Substitution stage of the algorithm
@spec xor([integer()], [integer()]) :: [integer()]
defp xor(key, right_e) do
for {x, y} <- (Enum.zip key, right_e), do: x ^^^ y
end
@spec encrypt_block([integer()], integer()) :: [integer()]
defp encrypt_block(block, keys \\ '', n \\ 0)
defp encrypt_block(block, keys, n) when n == 0 do
keys = generate_keys('12345678')
block = initial_permutation block
{left, right} = split_block(block)
d_e = expansion(right)
tmp = xor Enum.at(keys, n), d_e
tmp = substitute(tmp)
tmp = permute tmp, @p
tmp = xor left, tmp
left = right
right = tmp
block = left ++ right
encrypt_block(block, keys, n + 1)
end
defp encrypt_block(block, keys, n) when n == @num_rounds do
{left, right} = split_block(block)
IO.inspect final_permutation (right ++ left)
final_permutation (right ++ left)
end
defp encrypt_block(block, keys, n) do
{left, right} = split_block(block)
d_e = expansion(right)
tmp = xor Enum.at(keys, n), d_e
tmp = substitute(tmp)
tmp = permute tmp, @p
tmp = xor left, tmp
left = right
right = tmp
block = left ++ right
encrypt_block(block, keys, n + 1)
end
# Encrypt the given plain text, which must be a an list of blocks
@spec encrypt_blocks([integer()]) :: [[integer()]]
defp encrypt_blocks(plain) do
Enum.map(plain, &encrypt_block/1)
end
@spec decrypt_block([integer()], [integer()], integer()) :: [integer()]
defp decrypt_block(block, keys \\ '', n \\ 0)
defp decrypt_block(block, keys, n) when n == 0 do
keys = generate_keys('12345678')
block = initial_permutation block
{left, right} = split_block(block)
d_e = expansion(right)
tmp = xor Enum.at(keys, 15-n), d_e
tmp = substitute(tmp)
tmp = permute tmp, @p
tmp = xor left, tmp
left = right
right = tmp
block = left ++ right
decrypt_block(block, keys, n + 1)
end
defp decrypt_block(block, keys, n) when n == @num_rounds do
{left, right} = split_block(block)
final_permutation (right ++ left)
end
defp decrypt_block(block, keys, n) do
{left, right} = split_block(block)
d_e = expansion(right)
tmp = xor Enum.at(keys, 15-n), d_e
tmp = substitute(tmp)
tmp = permute tmp, @p
tmp = xor left, tmp
left = right
right = tmp
block = left ++ right
decrypt_block(block, keys, n + 1)
end
@spec split_blocks([integer()]) :: [[integer()]]
defp split_blocks(text) do
leftover = List.duplicate (hd ' '), @block_size_bytes
Enum.chunk_every text, @block_size_bytes, @block_size_bytes, leftover
end
# Descrypt the given plain text, which must be a list of blocks
defp decrypt_blocks(cyphered) do
Enum.map(cyphered, &decrypt_block/1)
end
@spec encrypt([integer()]) :: String.t()
def encrypt(plain) do
IO.puts "Encrypting..."
split_blocks(plain) |> encrypt_blocks |> Enum.join
end
@spec decrypt([integer()]) :: String.t()
def decrypt(plain) do
IO.puts "Decrypting..."
split_blocks(plain) |> decrypt_blocks |> Enum.join
end
@doc """
Used for calling the module as an stand-alone file
"""
@spec main() :: atom()
def main() do
{operation, in_file, out_file} = parse_input()
text = read_file(in_file) |> String.trim |> to_charlist
process_function = fn
text when operation == :enc ->
encrypt text
text ->
decrypt text
end
write_to(out_file, process_function.(text))
:ok
end
end
DES.main()
|
des/lib/des.ex
| 0.656548
| 0.866302
|
des.ex
|
starcoder
|
defmodule MarathonEventExporter.SSEParser do
@moduledoc """
A GenServer to turn a stream of bytes (usually from an HTTP response) into a
stream of server-sent events.
See https://html.spec.whatwg.org/multipage/server-sent-events.html
(particularly sections 9.2.4 and 9.2.5) for the protocol specification.
"""
use GenServer
defmodule Event do
defstruct data: "", event: "", id: ""
end
defimpl String.Chars, for: Event do
def to_string(event) do
"#Event<#{event.event} #{inspect event.data} id=#{inspect event.id}>"
end
end
defmodule State do
defstruct listeners: MapSet.new(), event: %Event{}, line_part: ""
end
## Client API
@doc """
Starts a new SSEParser.
"""
def start_link(opts) do
GenServer.start_link(__MODULE__, :ok, opts)
end
@doc """
Feeds some input data into the parser.
"""
def feed_data(server, data) do
GenServer.call(server, {:feed_data, data})
end
@doc """
Registers a listener process to receive parsed events.
"""
def register_listener(server, pid) do
GenServer.call(server, {:register_listener, pid})
end
@doc """
Unregisters a listener process.
"""
def unregister_listener(server, pid) do
GenServer.call(server, {:unregister_listener, pid})
end
## Server callbacks
def init(:ok) do
{:ok, %State{}}
end
def handle_call({:feed_data, data}, _from, state) do
new_state = data_received(data, state)
{:reply, :ok, new_state}
end
def handle_call({:register_listener, pid}, _from, state) do
new_listeners = MapSet.put(state.listeners, pid)
{:reply, :ok, %{state | listeners: new_listeners}}
end
def handle_call({:unregister_listener, pid}, _from, state) do
new_listeners = MapSet.delete(state.listeners, pid)
{:reply, :ok, %{state | listeners: new_listeners}}
end
def handle_call(:_get_state, _from, state) do
{:reply, {:ok, state}, state}
end
## Internals
def emit_event(event, state) do
Enum.each(state.listeners, fn l -> send(l, {:sse, event}) end)
end
## Parser
# This clause handles the end of the input.
defp data_received("", state) do
state
end
# These three clauses handle newlines.
defp data_received("\r\n" <> data, state), do: line_complete(data, state)
defp data_received("\r" <> data, state), do: line_complete(data, state)
defp data_received("\n" <> data, state), do: line_complete(data, state)
# This clause handles anything not matched above, which is all non-newlines
# characters.
defp data_received(<<char, data :: binary>>, state) do
%State{line_part: line} = state
new_state = %{state | line_part: line <> <<char>>}
data_received(data, new_state)
end
defp line_complete(data, state) do
new_state = line_received(state.line_part, %{state | line_part: ""})
data_received(data, new_state)
end
# Handle an empty line, which indicates the end of an event.
defp line_received("", state) do
if state.event.data != "" do
# Remove one trailing newline (if there is one).
data = String.replace_suffix(state.event.data, "\n", "")
emit_event(%{state.event | data: data}, state)
end
%{state | event: %Event{}}
end
# Handle a comment by ignoring it.
defp line_received(":" <> _, state), do: state
# Parse the line into the field and value for further processing.
defp line_received(line, state) do
{field, value} = case String.split(line, ":", parts: 2) do
[field] -> {field, ""}
[field, " " <> value] -> {field, value}
[field, value] -> {field, value}
end
process_field(field, value, state)
end
# Append the data value to the data field with a trailing newline.
defp process_field("data", value, state) do
new_event = %{state.event | data: state.event.data <> value <> "\n"}
%{state | event: new_event}
end
# Set the event field to the event value.
defp process_field("event", value, state) do
new_event = %{state.event | event: value}
%{state | event: new_event}
end
# Set the id field to the id value if the value does not contain a NUL.
defp process_field("id", value, state) do
cond do
String.contains?(value, <<0>>) -> state
true ->
new_event = %{state.event | id: value}
%{state | event: new_event}
end
end
# Ignore any other field.
defp process_field(_field, _value, state), do: state
end
|
lib/sse_parser.ex
| 0.652352
| 0.467453
|
sse_parser.ex
|
starcoder
|
defmodule BitcoinRpc do
@moduledoc """
Module to connect to a bitcoin node and make requests to the node through JSON RPC calls.
## Configuration
config :bitcoin_rpc,
host: "localhost",
port: "18333",
user: "myuser",
pass: "<PASSWORD>",
callback: nil # required only for BitcoinRpc.Transactions. Ex: {BitcoinWallet, :notify_queue}
## Examples
iex> BitcoinRpc.get_new_address() # get a new address for the default account
{:ok, "2N3jU3uoaU7s3fVv5RPyNrxjW8DbdvQAPvk"}
"""
@doc """
https://bitcoin.org/en/developer-reference#abandontransaction
"""
def abandon_transaction(txid), do: bitcoin_rpc("abandontransaction", [txid])
@doc """
https://bitcoin.org/en/developer-reference#addmultisigaddress
"""
def add_multi_sig_address(required, addresses), do: bitcoin_rpc("addmultisigaddress", [required, addresses])
@doc """
https://bitcoin.org/en/developer-reference#addnode
"""
def add_node(node, command), do: bitcoin_rpc("addnode", [node, command])
@doc """
https://bitcoin.org/en/developer-reference#addwitnessaddress
"""
def add_witness_address(address), do: bitcoin_rpc("addwitnessaddress", [address])
@doc """
https://bitcoin.org/en/developer-reference#backupwallet
"""
def backup_wallet(destination), do: bitcoin_rpc("backupwallet", [destination])
@doc """
https://bitcoin.org/en/developer-reference#bumpfee
"""
def bump_fee(txid), do: bitcoin_rpc("bumpfee", [txid])
def bump_fee(txid, options), do: bitcoin_rpc("bumpfee", [txid, options])
@doc """
https://bitcoin.org/en/developer-reference#clearbanned
"""
def clear_banned(), do: bitcoin_rpc("clearbanned")
@doc """
https://bitcoin.org/en/developer-reference#createmultisig
"""
def create_multi_sig(required, addresses), do: bitcoin_rpc("createmultisig", [required, addresses])
@doc """
https://bitcoin.org/en/developer-reference#createrawtransaction
"""
def create_raw_transaction(inputs, outputs), do: bitcoin_rpc("createrawtransaction", [inputs, outputs])
def create_raw_transaction(inputs, outputs, locktime), do: bitcoin_rpc("createrawtransaction", [inputs, outputs, locktime])
@doc """
https://bitcoin.org/en/developer-reference#decoderawtransaction
"""
def decode_raw_transaction(transaction), do: bitcoin_rpc("decoderawtransaction", [transaction])
@doc """
https://bitcoin.org/en/developer-reference#decodescript
"""
def decode_script(script), do: bitcoin_rpc("decodescript", [script])
@doc """
https://bitcoin.org/en/developer-reference#disconnectnode
"""
def disconnect_node(address), do: bitcoin_rpc("disconnectnode", [address])
@doc """
https://bitcoin.org/en/developer-reference#dumpprivkey
"""
def dump_priv_key(address), do: bitcoin_rpc("dumpprivkey", [address])
@doc """
https://bitcoin.org/en/developer-reference#dumpwallet
"""
def dump_wallet(filename), do: bitcoin_rpc("dumpwallet", [filename])
@doc """
https://bitcoin.org/en/developer-reference#encryptwallet
"""
def encrypt_wallet(passphrase), do: bitcoin_rpc("encryptwallet", [passphrase])
@doc """
https://bitcoin.org/en/developer-reference#estimatefee
"""
def estimate_fee(blocks), do: bitcoin_rpc("estimatefee", [blocks])
@doc """
https://bitcoin.org/en/developer-reference#estimatepriority
"""
def estimate_priority(blocks), do: bitcoin_rpc("estimatepriority", [blocks])
@doc """
https://bitcoin.org/en/developer-reference#fundrawtransaction
"""
def fund_raw_transaction(hexstring, options), do: bitcoin_rpc("fundrawtransaction", [hexstring, options])
@doc """
https://bitcoin.org/en/developer-reference#generate
"""
def generate(blocks), do: bitcoin_rpc("generate", [blocks])
def generate(blocks, maxtries), do: bitcoin_rpc("generate", [blocks, maxtries])
@doc """
https://bitcoin.org/en/developer-reference#generatetoaddress
"""
def generate_to_address(blocks, address), do: bitcoin_rpc("generatetoaddress", [blocks, address])
def generate_to_address(blocks, address, maxtries), do: bitcoin_rpc("generatetoaddress", [blocks, address, maxtries])
@doc """
https://bitcoin.org/en/developer-reference#getaccountaddress
"""
def get_account_address(account), do: bitcoin_rpc("getaccountaddress", [account])
@doc """
https://bitcoin.org/en/developer-reference#getaccount
"""
def get_account(address), do: bitcoin_rpc("getaccount", [address])
@doc """
https://bitcoin.org/en/developer-reference#getaddednodeinfo
"""
def get_added_node_info(details), do: bitcoin_rpc("getaddednodeinfo", [details])
def get_added_node_info(details, node), do: bitcoin_rpc("getaddednodeinfo", [details, node])
@doc """
https://bitcoin.org/en/developer-reference#getaddressesbyaccount
"""
def get_addresses_by_account(account), do: bitcoin_rpc("getaddressesbyaccount", [account])
@doc """
https://bitcoin.org/en/developer-reference#getbalance
"""
def get_balance(account \\ "", confirmations \\ 1, watch_only \\ false), do: bitcoin_rpc("getbalance", [account, confirmations, watch_only])
@doc """
https://bitcoin.org/en/developer-reference#getbestblockhash
"""
def get_best_block_hash(), do: bitcoin_rpc("getbestblockhash", [])
@doc """
https://bitcoin.org/en/developer-reference#getblock
"""
def get_block(hash, format \\ 1), do: bitcoin_rpc("getblock", [hash, format])
@doc """
https://bitcoin.org/en/developer-reference#getblockchaininfo
"""
def get_block_chain_info(), do: bitcoin_rpc("getblockchaininfo", [])
@doc """
https://bitcoin.org/en/developer-reference#getblockcount
"""
def get_block_count(), do: bitcoin_rpc("getblockcount", [])
@doc """
https://bitcoin.org/en/developer-reference#getblockhash
"""
def get_block_hash(block_height), do: bitcoin_rpc("getblockhash", [block_height])
@doc """
https://bitcoin.org/en/developer-reference#getblockheader
"""
def get_block_header(header_hash, format \\ true), do: bitcoin_rpc("getblockheader", [header_hash, format])
@doc """
https://bitcoin.org/en/developer-reference#getblocktemplate
"""
def get_block_template(params \\ []), do: bitcoin_rpc("getblocktemplate", params)
@doc """
https://bitcoin.org/en/developer-reference#getchaintips
"""
def get_chain_tips(), do: bitcoin_rpc("getchaintips", [])
@doc """
https://bitcoin.org/en/developer-reference#getconnectioncount
"""
def get_connection_count(), do: bitcoin_rpc("getconnectioncount", [])
@doc """
https://bitcoin.org/en/developer-reference#getdifficulty
"""
def get_difficulty(), do: bitcoin_rpc("getdifficulty", [])
@doc """
https://bitcoin.org/en/developer-reference#getgenerate
"""
def get_generate(), do: :deprecated
@doc """
https://bitcoin.org/en/developer-reference#gethashespersec
"""
def get_hashes_per_sec(), do: :deprecated
@doc """
https://bitcoin.org/en/developer-reference#getinfo
"""
def get_info(), do: bitcoin_rpc("getinfo", [])
@doc """
https://bitcoin.org/en/developer-reference#getmemoryinfo
"""
def get_memory_info(), do: bitcoin_rpc("getmemoryinfo", [])
@doc """
https://bitcoin.org/en/developer-reference#getmempoolancestors
"""
def get_mem_pool_ancestors(address, format \\ false), do: bitcoin_rpc("getmempoolancestors", [address, format])
@doc """
https://bitcoin.org/en/developer-reference#getmempooldescendants
"""
def get_mem_pool_descendants(address, format \\ false), do: bitcoin_rpc("getmempooldescendants", [address, format])
@doc """
https://bitcoin.org/en/developer-reference#getmempoolentry
"""
def get_mem_pool_entry(address), do: bitcoin_rpc("getmempoolentry", [address])
@doc """
https://bitcoin.org/en/developer-reference#getmempoolinfo
"""
def get_mem_pool_info(), do: bitcoin_rpc("getmempoolinfo", [])
@doc """
https://bitcoin.org/en/developer-reference#getmininginfo
"""
def get_mining_info(), do: bitcoin_rpc("getmininginfo", [])
@doc """
https://bitcoin.org/en/developer-reference#getnettotals
"""
def get_net_totals(), do: bitcoin_rpc("getnettotals", [])
@doc """
https://bitcoin.org/en/developer-reference#getnetworkhashps
"""
def get_network_hash_ps(blocks \\ 120, height \\ -1), do: bitcoin_rpc("getnetworkhashps", [blocks, height])
@doc """
https://bitcoin.org/en/developer-reference#getnetworkinfo
"""
def get_network_info(), do: bitcoin_rpc("getnetworkinfo", [])
@doc """
https://bitcoin.org/en/developer-reference#getnewaddress
"""
def get_new_address(account \\ "", address_type \\ "p2sh-segwit"), do: bitcoin_rpc("getnewaddress", [account, address_type])
@doc """
https://bitcoin.org/en/developer-reference#getpeerinfo
"""
def get_peer_info(), do: bitcoin_rpc("getpeerinfo", [])
@doc """
https://bitcoin.org/en/developer-reference#getrawchangeaddress
"""
def get_raw_change_address(), do: bitcoin_rpc("getrawchangeaddress", [])
@doc """
https://bitcoin.org/en/developer-reference#getrawmempool
"""
def get_raw_mem_pool(format \\ false), do: bitcoin_rpc("getrawmempool", [format])
@doc """
https://bitcoin.org/en/developer-reference#getrawtransaction
"""
def get_raw_transaction(txid, format \\ false), do: bitcoin_rpc("getrawtransaction", [txid, format])
@doc """
https://bitcoin.org/en/developer-reference#getreceivedbyaccount
"""
def get_received_by_account(account, confirmations \\ 1), do: bitcoin_rpc("getreceivedbyaccount", [account, confirmations])
@doc """
https://bitcoin.org/en/developer-reference#getreceivedbyaddress
"""
def get_received_by_address(account, confirmations \\ 1), do: bitcoin_rpc("getreceivedbyaddress", [account, confirmations])
@doc """
https://bitcoin.org/en/developer-reference#gettransaction
"""
def get_transaction(txid, watch_only \\ false), do: bitcoin_rpc("gettransaction", [txid, watch_only])
@doc """
https://bitcoin.org/en/developer-reference#gettxout
"""
def get_tx_out(txid, vout, unconfirmed \\ false), do: bitcoin_rpc("gettxout", [txid, vout, unconfirmed])
@doc """
https://bitcoin.org/en/developer-reference#gettxoutproof
"""
def get_tx_out_proof(txid), do: bitcoin_rpc("gettxoutproof", [txid])
def get_tx_out_proof(txid, hash), do: bitcoin_rpc("gettxoutproof", [txid, hash])
@doc """
https://bitcoin.org/en/developer-reference#gettxoutsetinfo
"""
def get_tx_out_set_info(), do: bitcoin_rpc("gettxoutsetinfo", [])
@doc """
https://bitcoin.org/en/developer-reference#getunconfirmedbalance
"""
def get_unconfirmed_balance(), do: bitcoin_rpc("getunconfirmedbalance", [])
@doc """
https://bitcoin.org/en/developer-reference#getwalletinfo
"""
def get_wallet_info(), do: bitcoin_rpc("getwalletinfo", [])
@doc """
https://bitcoin.org/en/developer-reference#getwork
"""
def get_work(), do: :deprecated
@doc """
https://bitcoin.org/en/developer-reference#help
"""
def help(), do: bitcoin_rpc("help", [])
def help(name), do: bitcoin_rpc("help", [name])
@doc """
https://bitcoin.org/en/developer-reference#importaddress
"""
def import_address(address, account \\ "", rescan \\ true), do: bitcoin_rpc("importaddress", [address, account, rescan])
@doc """
https://bitcoin.org/en/developer-reference#importmulti
"""
def import_multi(imports, options \\ %{ rescan: true }), do: bitcoin_rpc("importmulti", [imports, options])
@doc """
https://bitcoin.org/en/developer-reference#importprivkey
"""
def import_priv_key(private_key, account \\ "", rescan \\ true), do: bitcoin_rpc("importprivkey", [private_key, account, rescan])
@doc """
https://bitcoin.org/en/developer-reference#importprunedfunds
"""
def import_pruned_funds(transaction, proof), do: bitcoin_rpc("importprunedfunds", [transaction, proof])
@doc """
https://bitcoin.org/en/developer-reference#importwallet
"""
def import_wallet(filename), do: bitcoin_rpc("importwallet", [filename])
@doc """
https://bitcoin.org/en/developer-reference#keypoolrefill
"""
def key_pool_refill(size \\ 100), do: bitcoin_rpc("keypoolrefill", [size])
@doc """
https://bitcoin.org/en/developer-reference#listaccounts
"""
def list_accounts(confirmations \\ 1, watch_only \\ false), do: bitcoin_rpc("listaccounts", [confirmations, watch_only])
@doc """
https://bitcoin.org/en/developer-reference#listaddressgroupings
"""
def list_address_groupings(), do: bitcoin_rpc("listaddressgroupings", [])
@doc """
https://bitcoin.org/en/developer-reference#listbanned
"""
def list_banned(), do: bitcoin_rpc("listbanned", [])
@doc """
https://bitcoin.org/en/developer-reference#listlockunspent
"""
def list_lock_unspent(), do: bitcoin_rpc("listlockunspent", [])
@doc """
https://bitcoin.org/en/developer-reference#listreceivedbyaccount
"""
def list_received_by_account(confirmations \\ 1, empty \\ false, watch_only \\ false), do: bitcoin_rpc("listreceivedbyaccount", [confirmations, empty, watch_only])
@doc """
https://bitcoin.org/en/developer-reference#listreceivedbyaddress
"""
def list_received_by_address(confirmations \\ 1, empty \\ false, watch_only \\ false), do: bitcoin_rpc("listreceivedbyaddress", [confirmations, empty, watch_only])
@doc """
List transactions since the given block.
https://bitcoin.org/en/developer-reference#listsinceblock
Example:
BitcoinWallet.RpcClient.list_since_block("00000000000001867fb937891ba3777bd7eaee1dca3a0ba74ae2684bf21b1333", 1)
{:ok,
%{
"lastblock" => "00000000005c7e378ee6845be8826f707607437b990ffeea6b23c269710209ec",
"removed" => [],
"transactions" => [
%{
"account" => "",
"address" => "2N3jU3uoaU7s3fVv5RPyNrxjW8DbdvQAPvk",
"amount" => 0.8425725,
"bip125-replaceable" => "no",
"blockhash" => "00000000005c7e378ee6845be8826f707607437b990ffeea6b23c269710209ec",
"blockindex" => 22,
"blocktime" => 1525548900,
"category" => "receive",
"confirmations" => 1,
"label" => "",
"time" => 1525548332,
"timereceived" => 1525548332,
"txid" => "ea87edc568f998b2c8871e1c8b7d677987e4605cc89a666c0f15d374dd52a9ee",
"vout" => 0,
"walletconflicts" => []
}
]
}}
"""
def list_since_block(), do: bitcoin_rpc("listsinceblock", [])
def list_since_block(hash), do: bitcoin_rpc("listsinceblock", [hash])
def list_since_block(hash, confirmations), do: bitcoin_rpc("listsinceblock", [hash, confirmations])
def list_since_block(hash, confirmations, watch_only), do: bitcoin_rpc("listsinceblock", [hash, confirmations, watch_only])
@doc """
https://bitcoin.org/en/developer-reference#listtransactions
"""
def list_transactions(account \\ "", count \\ 10, skip \\ 0, watch_only \\ false), do: bitcoin_rpc("listtransactions", [account, count, skip, watch_only])
@doc """
https://bitcoin.org/en/developer-reference#listunspent
"""
def list_unspent(min \\ 1, max \\ 9999999, addresses \\ []), do: bitcoin_rpc("listunspent", [min, max, addresses])
@doc """
https://bitcoin.org/en/developer-reference#lockunspent
"""
def lock_unspent(unlock), do: bitcoin_rpc("lockunspent", [unlock])
def lock_unspent(unlock, outputs), do: bitcoin_rpc("lockunspent", [unlock, outputs])
@doc """
https://bitcoin.org/en/developer-reference#move
"""
def move(from, to, amount), do: bitcoin_rpc("move", [from, to, amount])
def move(from, to, amount, unused), do: bitcoin_rpc("move", [from, to, amount, unused])
def move(from, to, amount, unused, comment), do: bitcoin_rpc("move", [from, to, amount, unused, comment])
@doc """
https://bitcoin.org/en/developer-reference#ping-rpc
"""
def ping(), do: bitcoin_rpc("ping", [])
@doc """
https://bitcoin.org/en/developer-reference#preciousblock
"""
def precious_block(hash), do: bitcoin_rpc("preciousblock", [hash])
@doc """
https://bitcoin.org/en/developer-reference#prioritisetransaction
"""
def prioritise_transaction(txid, priority, fee), do: bitcoin_rpc("prioritisetransaction", [txid, priority, fee])
@doc """
https://bitcoin.org/en/developer-reference#pruneblockchain
"""
def prune_block_chain(height), do: bitcoin_rpc("pruneblockchain", [height])
@doc """
https://bitcoin.org/en/developer-reference#removeprunedfunds
"""
def remove_pruned_funds(txid), do: bitcoin_rpc("removeprunedfunds", [txid])
@doc """
https://bitcoin.org/en/developer-reference#sendfrom
"""
def send_from(from_account, address, amount, confirmations \\ 1, comment \\ "", comment_to \\ ""), do: bitcoin_rpc("sendfrom", [from_account, address, amount, confirmations, comment, comment_to])
@doc """
https://bitcoin.org/en/developer-reference#sendmany
"""
def send_many(account, addresses, confirmations \\ 1, comment \\ "", subtract_fee \\ [], replaceable \\ true, conf_target \\ 3, estimate_mode \\ "ECONOMICAL"), do:
bitcoin_rpc("sendmany", [account, addresses, confirmations, comment, subtract_fee, replaceable, conf_target, estimate_mode])
@doc """
https://bitcoin.org/en/developer-reference#sendrawtransaction
"""
def send_raw_transaction(transaction, high_fees \\ false), do: bitcoin_rpc("sendrawtransaction", [transaction, high_fees])
@doc """
https://bitcoin.org/en/developer-reference#sendtoaddress
"""
def send_to_address(address, amount, comment \\"", comment_to \\"", subtract_fee \\ false), do: bitcoin_rpc("sendtoaddress", [address, amount, comment, comment_to, subtract_fee])
@doc """
https://bitcoin.org/en/developer-reference#setaccount
"""
def set_account(address, account), do: bitcoin_rpc("setaccount", [address, account])
@doc """
https://bitcoin.org/en/developer-reference#setban
"""
def set_ban(ip, command), do: bitcoin_rpc("setban", [ip, command])
def set_ban(ip, command, bantime), do: bitcoin_rpc("setban", [ip, command, bantime])
def set_ban(ip, command, bantime, absolute), do: bitcoin_rpc("setban", [ip, command, bantime, absolute])
@doc """
https://bitcoin.org/en/developer-reference#setgenerate
"""
def set_generate(), do: :deprecated
@doc """
https://bitcoin.org/en/developer-reference#setnetworkactive
"""
def set_network_active(activate), do: bitcoin_rpc("setnetworkactive", [activate])
@doc """
https://bitcoin.org/en/developer-reference#settxfee
"""
def set_tx_fee(fee), do: bitcoin_rpc("settxfee", [fee])
@doc """
https://bitcoin.org/en/developer-reference#signmessage
"""
def sign_message(address, message), do: bitcoin_rpc("signmessage", [address, message])
@doc """
https://bitcoin.org/en/developer-reference#signmessagewithprivkey
"""
def sign_message_with_priv_key(private_key, message), do: bitcoin_rpc("signmessagewithprivkey", [private_key, message])
@doc """
https://bitcoin.org/en/developer-reference#signrawtransaction
"""
def sign_raw_transaction(transaction), do: bitcoin_rpc("signrawtransaction", [transaction])
def sign_raw_transaction(transaction, dependencies), do: bitcoin_rpc("signrawtransaction", [transaction, dependencies])
def sign_raw_transaction(transaction, dependencies, private_keys), do: bitcoin_rpc("signrawtransaction", [transaction, dependencies, private_keys])
def sign_raw_transaction(transaction, dependencies, private_keys, sig_hash), do: bitcoin_rpc("signrawtransaction", [transaction, dependencies, private_keys, sig_hash])
@doc """
https://bitcoin.org/en/developer-reference#stop
"""
def stop(), do: bitcoin_rpc("stop", [])
@doc """
https://bitcoin.org/en/developer-reference#submitblock
"""
def submit_block(block), do: bitcoin_rpc("submitblock", [block])
def submit_block(block, parameters), do: bitcoin_rpc("submitblock", [block, parameters])
@doc """
https://bitcoin.org/en/developer-reference#validateaddress
"""
def validate_address(address), do: bitcoin_rpc("validateaddress", [address])
@doc """
https://bitcoin.org/en/developer-reference#verifychain
"""
def verify_chain(), do: bitcoin_rpc("verifychain", [])
def verify_chain(check_level), do: bitcoin_rpc("verifychain", [check_level])
def verify_chain(check_level, number_of_blocks), do: bitcoin_rpc("verifychain", [check_level, number_of_blocks])
@doc """
https://bitcoin.org/en/developer-reference#verifymessage
"""
def verify_message(address, signature, message), do: bitcoin_rpc("verifymessage", [address, signature, message])
@doc """
https://bitcoin.org/en/developer-reference#verifytxoutproof
"""
def verify_tx_out_proof(proof), do: bitcoin_rpc("verifytxoutproof", [proof])
@doc """
https://bitcoin.org/en/developer-reference#walletlock
"""
def wallet_lock(), do: bitcoin_rpc("walletlock", [])
@doc """
https://bitcoin.org/en/developer-reference#walletpassphrase
"""
def wallet_passphrase(passphrase, seconds \\ 300), do: bitcoin_rpc("walletpassphrase", [passphrase, seconds])
@doc """
https://bitcoin.org/en/developer-reference#walletpassphrasechange
"""
def wallet_passphrase_change(current, passphrase), do: bitcoin_rpc("walletpassphrasechange", [current, passphrase])
defp bitcoin_rpc(method, params \\ []) do
host = Application.get_env(:bitcoin_rpc, :host)
port = Application.get_env(:bitcoin_rpc, :port)
user = Application.get_env(:bitcoin_rpc, :user)
pass = Application.get_env(:bitcoin_rpc, :pass)
command = %{"jsonrpc": "2.0", "method": method, "params": params, "id": ""}
headers = ["Authorization": "Basic " <> Base.encode64(user <> ":" <> pass)]
options = [timeout: 30000, recv_timeout: 20000]
url = "http://" <> host <> ":" <> to_string(port) <> "/"
body = Poison.encode!(command)
with {:ok, response} <- HTTPoison.post(url, body, headers, options),
{:ok, metadata} <- Poison.decode(response.body),
%{"error" => nil, "result" => result} <- metadata do
{:ok, result}
else
%{"error" => reason} -> {:error, reason}
error -> error
end
end
end
|
lib/bitcoin_rpc.ex
| 0.806358
| 0.439447
|
bitcoin_rpc.ex
|
starcoder
|
defmodule Timex.Format.Time.Formatter do
@moduledoc """
This module defines the behaviour for custom Time formatters
"""
use Behaviour
use Timex
import Timex.Macros
alias Timex.Translator
alias Timex.Format.Time.Formatters.Default
alias Timex.Format.Time.Formatters.Humanized
defmacro __using__(_) do
quote do
alias Timex.Time
@behaviour Timex.Format.Time.Formatter
end
end
defcallback format(timestamp :: Types.timestamp) :: String.t | {:error, term}
defcallback lformat(timestamp :: Types.timestamp, locale :: String.t) :: String.t | {:error, term}
@doc """
Formats a Time tuple/Erlang timestamp, as a string, using the provided
formatter. If a formatter is not provided, the formatter used is
`Timex.Format.Time.Formatters.Default`. As a handy shortcut, you can reference
the other built-in formatter (Humanized) via the :humanized atom as shown below.
# Examples
iex> #{__MODULE__}.format({1435, 180354, 590264})
"P45Y6M5DT21H12M34.590264S"
"""
@spec format(Types.timestamp) :: String.t | {:error, term}
def format(timestamp), do: lformat(timestamp, Translator.default_locale, Default)
@doc """
Same as format/1, but takes a formatter name as an argument
## Examples
iex> #{__MODULE__}.format({1435, 180354, 590264}, :humanized)
"45 years, 6 months, 5 days, 21 hours, 12 minutes, 34 seconds, 590.264 milliseconds"
"""
@spec format(Types.timestamp, atom) :: String.t | {:error, term}
def format(timestamp, formatter), do: lformat(timestamp, Translator.default_locale, formatter)
@doc """
Same as format/1, but takes a locale name as an argument, and translates the format string,
if the locale has translations.
"""
@spec lformat(Types.timestamp, String.t) :: String.t | {:error, term}
def lformat(timestamp, locale), do: lformat(timestamp, locale, Default)
@doc """
Same as lformat/2, but takes a formatter as an argument
"""
@spec lformat(Types.timestamp, String.t, atom) :: String.t | {:error, term}
def lformat({mega,s,micro} = timestamp, locale, formatter)
when is_timestamp(mega,s,micro) and is_binary(locale) and is_atom(formatter) do
case formatter do
:humanized -> Humanized.lformat(timestamp, locale)
_ -> formatter.lformat(timestamp, locale)
end
end
def lformat(_, _, _), do: {:error, :invalid_timestamp}
end
|
lib/format/time/formatter.ex
| 0.892199
| 0.466116
|
formatter.ex
|
starcoder
|
defmodule Blogit.Components.Metas do
@moduledoc """
A `Blogit.Component` process which can be queried from outside.
The `Blogit.Components.Metas` process holds the meta data for all the posts
in the blog as its state.
For some queries only the meta data of the posts is needed and quering this
component for it is cheaper than retrieving it from the
`Blogit.Components.Posts` one. Also the messages with only meta data are
much smaller, as they only include the preview HTML of the posts and not the
whole content.
This process handles the following `call` messages:
* {:list, from, size} -> returns a list of post metas sorted by their
`created_at` field, newest first. The first `from` are dropped and
the size of the result list is specified by `size`. The `size` can be
`:infinity`.
* :list_pinned -> returns a list of tuples representing posts,
sorted by the post's `meta.updated_at` field, newest first,
only if their `pinned` field is `true`. The tuples consist of two elements
the first is the uniq name of the post and the second - its title.
This component is supervised by `Blogit.Components.Supervisor` and added to
it by `Blogit.Server`.
When the posts get updated, this process' state is reset to nil and on the
next request to it, it is re-calculated.
"""
use Blogit.Component
alias Blogit.Components.Posts
alias Blogit.Models.Post.Meta
def init({language, _}) do
{:ok, %{language: language, metas: nil}}
end
def handle_cast(:reset, %{language: language}) do
{:noreply, %{language: language, metas: nil}}
end
def handle_call({:list, from, size}, _, %{metas: metas, language: lang}) do
post_metas = get(metas, lang)
take = if size == :infinity, do: length(post_metas), else: size
result = post_metas |> Enum.drop(from) |> Enum.take(take)
{:reply, result, %{language: lang, metas: post_metas}}
end
def handle_call(:list_pinned, _from, %{metas: metas, language: lang}) do
post_metas = get(metas, lang)
result =
post_metas
|> Enum.filter(& &1.pinned)
|> Meta.sorted(:updated_at)
|> Enum.map(fn meta -> {meta.name, meta.title} end)
{:reply, result, %{language: lang, metas: post_metas}}
end
defp get(nil, language) do
posts = GenServer.call(Posts.name(language), :all)
posts |> Enum.map(& &1.meta) |> Meta.sorted()
end
defp get(metas, _), do: metas
end
|
lib/blogit/components/metas.ex
| 0.702938
| 0.550728
|
metas.ex
|
starcoder
|
defmodule Floki do
alias Floki.{Finder, HTMLParser, FilterOut, HTMLTree}
@moduledoc """
Floki is a simple HTML parser that enables search for nodes using CSS selectors.
## Example
Assuming that you have the following HTML:
```html
<!doctype html>
<html>
<body>
<section id="content">
<p class="headline">Floki</p>
<a href="http://github.com/philss/floki">Github page</a>
<span data-model="user">philss</span>
</section>
</body>
</html>
```
Examples of queries that you can perform:
* Floki.find(html, "#content")
* Floki.find(html, ".headline")
* Floki.find(html, "a")
* Floki.find(html, "[data-model=user]")
* Floki.find(html, "#content a")
* Floki.find(html, ".headline, a")
Each HTML node is represented by a tuple like:
{tag_name, attributes, children_nodes}
Example of node:
{"p", [{"class", "headline"}], ["Floki"]}
So even if the only child node is the element text, it is represented
inside a list.
You can write a simple HTML crawler (with support of [HTTPoison](https://github.com/edgurgel/httpoison)) with a few lines of code:
html
|> Floki.find(".pages a")
|> Floki.attribute("href")
|> Enum.map(fn(url) -> HTTPoison.get!(url) end)
It is simple as that!
"""
@type html_tree :: tuple | list
@doc """
Parses a HTML string.
## Examples
iex> Floki.parse("<div class=js-action>hello world</div>")
{"div", [{"class", "js-action"}], ["hello world"]}
iex> Floki.parse("<div>first</div><div>second</div>")
[{"div", [], ["first"]}, {"div", [], ["second"]}]
"""
@spec parse(binary) :: html_tree
def parse(html) do
HTMLParser.parse(html)
end
@self_closing_tags ["area", "base", "br", "col", "command", "embed", "hr", "img", "input", "keygen", "link", "meta", "param", "source", "track", "wbr"]
@doc """
Converts HTML tree to raw HTML.
Note that the resultant HTML may be different from the original one.
Spaces after tags and doctypes are ignored.
## Examples
iex> Floki.parse(~s(<div class="wrapper">my content</div>)) |> Floki.raw_html
~s(<div class="wrapper">my content</div>)
"""
@spec raw_html(html_tree) :: binary
def raw_html(html_tree), do: raw_html(html_tree, "")
defp raw_html([], html), do: html
defp raw_html(tuple, html) when is_tuple(tuple), do: raw_html([tuple], html)
defp raw_html([string|tail], html) when is_binary(string), do: raw_html(tail, html <> string)
defp raw_html([{:comment, comment}|tail], html), do: raw_html(tail, html <> "<!--#{comment}-->")
defp raw_html([{type, attrs, children}|tail], html) do
raw_html(tail, html <> tag_for(type, tag_attrs(attrs), children))
end
defp tag_attrs(attr_list) do
attr_list
|> Enum.reduce("", &build_attrs/2)
|> String.strip
end
defp build_attrs({attr, value}, attrs), do: ~s(#{attrs} #{attr}="#{value}")
defp build_attrs(attr, attrs), do: "#{attrs} #{attr}"
defp tag_for(type, attrs, _children) when type in @self_closing_tags do
case attrs do
"" -> "<#{type}/>"
_ -> "<#{type} #{attrs}/>"
end
end
defp tag_for(type, attrs, children) do
case attrs do
"" -> "<#{type}>#{raw_html(children)}</#{type}>"
_ -> "<#{type} #{attrs}>#{raw_html(children)}</#{type}>"
end
end
@doc """
Find elements inside a HTML tree or string.
## Examples
iex> Floki.find("<p><span class=hint>hello</span></p>", ".hint")
[{"span", [{"class", "hint"}], ["hello"]}]
iex> Floki.find("<body><div id=important><div>Content</div></div></body>", "#important")
[{"div", [{"id", "important"}], [{"div", [], ["Content"]}]}]
iex> Floki.find("<p><a href='https://google.com'>Google</a></p>", "a")
[{"a", [{"href", "https://google.com"}], ["Google"]}]
iex> Floki.find([{ "div", [], [{"a", [{"href", "https://google.com"}], ["Google"]}]}], "div a")
[{"a", [{"href", "https://google.com"}], ["Google"]}]
"""
@spec find(binary | html_tree, binary) :: html_tree
def find(html, selector) when is_binary(html) do
html_as_tuple = parse(html)
{tree, results} = Finder.find(html_as_tuple, selector)
Enum.map(results, fn(html_node) -> HTMLTree.to_tuple(tree, html_node) end)
end
def find(html_tree_as_tuple, selector) do
{tree, results} = Finder.find(html_tree_as_tuple, selector)
Enum.map(results, fn(html_node) -> HTMLTree.to_tuple(tree, html_node) end)
end
def transform(html_tree_list, transformation) when is_list(html_tree_list) do
Enum.map(html_tree_list, fn(html_tree) ->
Finder.apply_transformation(html_tree, transformation)
end)
end
def transform(html_tree, transformation) do
Finder.apply_transformation(html_tree, transformation)
end
@doc """
Returns the text nodes from a HTML tree.
By default, it will perform a deep search through the HTML tree.
You can disable deep search with the option `deep` assigned to false.
You can include content of script tags with the option `js` assigned to true.
You can specify a separator between nodes content.
## Examples
iex> Floki.text("<div><span>hello</span> world</div>")
"hello world"
iex> Floki.text("<div><span>hello</span> world</div>", deep: false)
" world"
iex> Floki.text("<div><script>hello</script> world</div>")
" world"
iex> Floki.text("<div><script>hello</script> world</div>", js: true)
"hello world"
iex> Floki.text("<ul><li>hello</li><li>world</li></ul>", sep: " ")
"hello world"
iex> Floki.text([{"div", [], ["hello world"]}])
"hello world"
iex> Floki.text([{"p", [], ["1"]},{"p", [], ["2"]}])
"12"
"""
@spec text(html_tree | binary) :: binary
def text(html, opts \\ [deep: true, js: false, sep: ""]) do
html_tree =
if is_binary(html) do
parse(html)
else
html
end
cleaned_html_tree =
case opts[:js] do
true -> html_tree
_ -> filter_out(html_tree, "script")
end
search_strategy =
case opts[:deep] do
false -> Floki.FlatText
_ -> Floki.DeepText
end
case opts[:sep] do
nil -> search_strategy.get(cleaned_html_tree)
sep -> search_strategy.get(cleaned_html_tree, sep)
end
end
@doc """
Returns a list with attribute values for a given selector.
## Examples
iex> Floki.attribute("<a href='https://google.com'>Google</a>", "a", "href")
["https://google.com"]
iex> Floki.attribute([{"a", [{"href", "https://google.com"}], ["Google"]}], "a", "href")
["https://google.com"]
"""
@spec attribute(binary | html_tree, binary, binary) :: list
def attribute(html, selector, attribute_name) do
html
|> find(selector)
|> attribute_values(attribute_name)
end
@doc """
Returns a list with attribute values from elements.
## Examples
iex> Floki.attribute("<a href=https://google.com>Google</a>", "href")
["https://google.com"]
iex> Floki.attribute([{"a", [{"href", "https://google.com"}], ["Google"]}], "href")
["https://google.com"]
"""
@spec attribute(binary | html_tree, binary) :: list
def attribute(html_tree, attribute_name) when is_binary(html_tree) do
html_tree
|> parse
|> attribute_values(attribute_name)
end
def attribute(elements, attribute_name) do
attribute_values(elements, attribute_name)
end
defp attribute_values(element, attr_name) when is_tuple(element) do
attribute_values([element], attr_name)
end
defp attribute_values(elements, attr_name) do
values = Enum.reduce elements, [], fn({_, attributes, _}, acc) ->
case attribute_match?(attributes, attr_name) do
{_attr_name, value} ->
[value|acc]
_ ->
acc
end
end
Enum.reverse(values)
end
defp attribute_match?(attributes, attribute_name) do
Enum.find attributes, fn({attr_name, _}) ->
attr_name == attribute_name
end
end
@doc """
Returns the nodes from a HTML tree that don't match the filter selector.
## Examples
iex> Floki.filter_out("<div><script>hello</script> world</div>", "script")
{"div", [], [" world"]}
iex> Floki.filter_out([{"body", [], [{"script", [], []},{"div", [], []}]}], "script")
[{"body", [], [{"div", [], []}]}]
iex> Floki.filter_out("<div><!-- comment --> text</div>", :comment)
{"div", [], [" text"]}
"""
@spec filter_out(binary | html_tree, binary) :: list
def filter_out(html_tree, selector) when is_binary(html_tree) do
html_tree
|> parse
|> FilterOut.filter_out(selector)
end
def filter_out(elements, selector) do
FilterOut.filter_out(elements, selector)
end
end
|
lib/floki.ex
| 0.784897
| 0.64646
|
floki.ex
|
starcoder
|
defmodule StarkInfra.Utils.Parse do
alias EllipticCurve.Signature
alias EllipticCurve.PublicKey
alias EllipticCurve.Ecdsa
alias StarkInfra.Utils.Check
alias StarkInfra.Utils.JSON
alias StarkInfra.Utils.API
alias StarkInfra.User.Project
alias StarkInfra.User.Organization
alias StarkInfra.Error
alias StarkInfra.Utils.Request
@moduledoc false
@doc """
Create a single Event struct received from event listening at subscribed user endpoint.
If the provided digital signature does not check out with the StarkInfra public key, an "invalidSignature"
error will be returned.
## Parameters (required):
- `content` [string]: response content from request received at user endpoint (not parsed)
- `signature` [string]: base-64 digital signature received at response header "Digital-Signature"
## Options:
- `cache_pid` [PID, default nil]: PID of the process that holds the public key cache, returned on previous parses. If not provided, a new cache process will be generated.
- `user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- Event struct with updated attributes
- Cache PID that holds the Stark Infra public key in order to avoid unnecessary requests to the API on future parses
"""
@spec parse_and_verify(
content: binary,
signature: binary,
cache_pid: PID,
resource_maker: any,
user: Project.t() | Organization.t()
) ::
{:ok, {Event.t(), binary}} | {:error, [Error.t()]}
def parse_and_verify(parameters \\ []) do
parameters =
Enum.into(
parameters |> Check.enforced_keys([:content, :signature]),
%{cache_pid: nil, user: nil}
)
parse(parameters.user, parameters.content, parameters.signature, parameters.cache_pid, parameters.resource_maker, parameters.key, 0)
end
@doc """
Same as parse(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec parse_and_verify!(
content: binary,
signature: binary,
cache_pid: PID,
resource_maker: any,
key: binary,
user: Project.t() | Organization.t()
) :: {Event.t(), any}
def parse_and_verify!(parameters \\ []) do
case parse_and_verify(parameters) do
{:ok, {event, cache_pid_}} -> {event, cache_pid_}
{:error, errors} -> raise API.errors_to_string(errors)
end
end
defp parse(user, content, signature, cache_pid, resource_maker, key, counter) when is_nil(cache_pid) do
{:ok, new_cache_pid} = Agent.start_link(fn -> %{} end)
parse(user, content, signature, new_cache_pid, resource_maker, key, counter)
end
defp parse(user, content, signature, cache_pid, resource_maker, key, counter) do
case verify_signature(user, content, signature, cache_pid, counter) do
{:ok, true} ->
{:ok, {content |> parse_content(resource_maker, key), cache_pid}}
{:ok, false} ->
parse(user, content, signature, cache_pid |> update_public_key(nil), resource_maker, key, counter + 1)
{:error, errors} -> {:error, errors}
end
end
defp parse_content(content, resource_maker, key) when is_nil(key) do
API.from_api_json(
JSON.decode!(content),
resource_maker
)
end
defp parse_content(content, resource_maker, key) do
API.from_api_json(
JSON.decode!(content)[key],
resource_maker
)
end
defp verify_signature(_user, _content, _signature_base_64, _cache_pid, counter)
when counter > 1 do
{
:error,
[
%Error{
code: "invalidSignature",
message: "The provided signature and content do not match the Stark Infra public key"
}
]
}
end
defp verify_signature(user, content, signature_base_64, cache_pid, counter)
when is_binary(signature_base_64) and counter <= 1 do
try do
signature_base_64 |> Signature.fromBase64!()
rescue
_error -> {
:error,
[
%Error{
code: "invalidSignature",
message: "The provided signature is not valid"
}
]
}
else
signature -> verify_signature(
user,
content,
signature,
cache_pid,
counter
)
end
end
defp verify_signature(user, content, signature, cache_pid, _counter) do
case get_StarkInfra_public_key(user, cache_pid) do
{:ok, public_key} ->
{
:ok,
(fn p ->
Ecdsa.verify?(
content,
signature,
p |> PublicKey.fromPem!()
)
end).(public_key)
}
{:error, errors} ->
{:error, errors}
end
end
defp get_StarkInfra_public_key(user, cache_pid) do
get_public_key(cache_pid) |> fill_public_key(user, cache_pid)
end
defp fill_public_key(public_key, user, cache_pid) when is_nil(public_key) do
case Request.fetch(:get, "public-key", query: %{limit: 1}, user: user) do
{:ok, response} -> {:ok, response |> extract_public_key(cache_pid)}
{:error, errors} -> {:error, errors}
end
end
defp fill_public_key(public_key, _user, _cache_pid) do
{:ok, public_key}
end
defp extract_public_key(response, cache_pid) do
public_key =
JSON.decode!(response)["publicKeys"]
|> hd
|> (fn x -> x["content"] end).()
update_public_key(cache_pid, public_key)
public_key
end
defp get_public_key(cache_pid) do
Agent.get(cache_pid, fn map -> Map.get(map, :StarkInfra_public_key) end)
end
defp update_public_key(cache_pid, public_key) do
Agent.update(cache_pid, fn map -> Map.put(map, :StarkInfra_public_key, public_key) end)
cache_pid
end
end
|
lib/utils/parse.ex
| 0.839931
| 0.51129
|
parse.ex
|
starcoder
|
defmodule Csvto.Builder do
@moduledoc """
Conveniences for building a Csvto
This module can be `use`-d into a Module to build a Csvto
## Example
```
defmodule MyCsvto do
use Csvto.Builder
csv :product do
field :name, :string, name: "Name"
field :number, :string, name: "Number"
field :description, :string, name: "Desc"
field :price, :float, name: "Price", validate: &(&1 >= 0)
field :images, {:array, :string}, name: "Images", separator: "|"
end
end
```
## Types and casting
When defining a schema, types of fields need to be given. The data comming from a csv file will be validated and casted
according to specified type.
Types are split into two categories, base type and compositional type
### Base types
The base types are
Type | Value example
:-------------------------|:------------------------------------
`:integer` | 1, 2, 3
`:float` | 1.0, 2.0, 3.0
`:boolean` | yes, no, no, off, 1, 0, true, false
`:string` | string
`:binary` | binary
`:decimal` | 1.0, 2.0, 3.0
`:naive_datetime` | `ISO8601` datetime
`:datetime` | `ISO8601` with timezone
`date` | `ISO8601` date
`time` | `ISO8601` time
### Compositional type
There is only one compositional type: `{:array, subtype}`
For the subtype, you can replace it with any valid simple types, such as `:string`
While parsing array from csv field, a `:separator` option could be specified to define how should the subtype
should be seperated, by default, it is `"|"`
## Reflection
Any Csvto defined with `use #{__MODULE__}` will generate the `__csvto__` function that can be
used for runtime introspection of the shcemas:
* `__csvto__(:schemas)` - Lists all the schemas defined in this module
* `__csvto__(:schema, name)` - Returns the `Csvto.Schema` identified by the given name on this module
"""
@type t :: struct
defmacro __using__(_opts) do
Module.register_attribute(__CALLER__.module, :csvto_schemas, accumulate: true)
quote do
import Csvto.Builder, only: [csv: 2]
@before_compile unquote(__MODULE__)
end
end
defmacro __before_compile__(env) do
schemas = Module.get_attribute(env.module, :csvto_schemas) |> Enum.reverse
schema_reflections = Enum.map(schemas, "e_define_schema_reflection/1)
schema_froms = Enum.map(schemas, "e_define_from/1)
quote do
def __csvto__(:schemas), do: unquote(schemas |> Macro.escape)
unquote(schema_reflections)
def __csvto__(:schema, schema) do
raise ArgumentError, "undefined schema #{schema} for #{inspect __MODULE__}"
end
def from(path, schema, opts \\ [])
unquote(schema_froms)
def from(path, schema, opts) do
raise ArgumentError, "undefined schema #{schema} for #{inspect __MODULE__}"
end
end
end
defp quote_define_schema_reflection(schema) do
name = schema.name
quote do
def __csvto__(:schema, unquote(name)), do: unquote(schema |> Macro.escape)
end
end
defp quote_define_from(schema) do
name = schema.name
quote do
def from(path, unquote(name), opts), do: unquote(:"__from_#{name}__")(path, opts)
end
end
defmacro csv(name, [do: block]) do
module = __CALLER__.module
Module.register_attribute(module, :csvto_fields, accumulate: false)
Module.register_attribute(module, :csvto_field_index, accumulate: false)
Module.register_attribute(module, :csvto_index_mode, accumulate: false)
Module.register_attribute(module, :csvto_schema, accumulate: false)
Module.put_attribute(module, :csvto_fields, [])
Module.put_attribute(module, :csvto_field_index, -1)
Module.put_attribute(module, :csvto_index_mode, nil)
Module.put_attribute(module, :csvto_schema, name)
{_, validators} = escape(module, __CALLER__.file, block)
csvto_fields = Module.get_attribute(module, :csvto_fields) |> Enum.reverse
index_mode = Module.get_attribute(module, :csvto_index_mode)
schema = Csvto.Builder.build_schema(module, name, index_mode, csvto_fields)
Module.put_attribute(module, :csvto_schemas, schema)
[Csvto.Builder.__from__(name)] ++
validators
end
defp escape(module, file, block) do
Macro.prewalk(block, [], fn
{:field, env, args} = node, acc ->
{node, [apply(__MODULE__, :__define_field__, [module, file, Keyword.get(env, :line), :single | args]) | acc]}
{:fields, env, args} = node, acc ->
{node, [apply(__MODULE__, :__define_field__, [module, file, Keyword.get(env, :line), :aggregate | args]) | acc]}
node, acc ->
{node, acc}
end)
end
def build_schema(module, name, _index_mode, []), do: raise ArgumentError, "no field are defined for schema #{name} in #{inspect module}"
def build_schema(module, name, index_mode, fields) do
index_mode = case index_mode do
:name -> :name
{:index, _} -> :index
end
%Csvto.Schema{module: module, name: name, index_mode: index_mode, fields: fields}
end
def __define_field__(module, file, line, field_type, name, type, opts \\ []) do
meta = %{
module: module,
field_type: field_type,
field_index: Module.get_attribute(module, :csvto_field_index) + 1,
index_mode: Module.get_attribute(module, :csvto_index_mode),
schema: Module.get_attribute(module, :csvto_schema),
fields: Module.get_attribute(module, :csvto_fields),
file: file,
line: line
}
check_type!(meta, name, type)
meta = check_index_mode!(meta, name, opts)
check_duplicate_declaration!(meta, name)
check_aggregate_field!(meta, name, type, opts)
{validator, code} = convert_validator(meta, name, opts)
field = build_field(field_type, name, type, meta[:index_mode], validator, opts)
Module.put_attribute(module, :csvto_fields, [field|meta[:fields]])
Module.put_attribute(module, :csvto_field_index, meta[:field_index])
Module.put_attribute(module, :csvto_index_mode, meta[:index_mode])
code
end
defp build_field(field_type, name, type, index_mode, validator, opts) do
default = default_for_type(type, opts)
field_opts = opts |> Enum.into(%{}) |> Map.drop(~w{required name validator}a)
field_index = case index_mode do
:name -> nil
{:index, index} -> index
end
%Csvto.Field{
name: name,
type: type,
field_type: field_type,
required?: Keyword.get(opts, :required, true),
field_name: Keyword.get(opts, :name),
field_index: field_index,
validator: validator,
default: default,
opts: field_opts
}
end
defp check_duplicate_declaration!(meta, name) do
case Enum.find(meta[:fields], &(&1.name == name)) do
nil ->
:ok
field ->
raise ArgumentError, "duplicate field declaration for field #{inspect name} on #{meta[:line]} which has been defined on #{field.line}"
end
end
defp convert_validator(meta, name, opts) do
case Keyword.get(opts, :validator) do
nil ->
{nil, nil}
{:&, _, _} = validator ->
do_define_validator_fun_1(meta[:schema], name, validator)
validator when is_atom(validator) ->
{validator, nil}
{validator, opts} when is_atom(validator) ->
{{validator, opts}, nil}
validator ->
raise ArgumentError, "illegal validator for field #{name} defined on line #{meta[:line]}, validator should be anonymous function with 1 capture, atom or {atom, any} but got #{inspect validator}"
end
end
defp do_define_validator_fun_1(schema, name, validator) do
validator_name = "__csvto_validate_#{schema}_#{name}__" |> String.to_atom
{validator_name, quote do
def unquote(validator_name)(value), do: (unquote(validator)).(value)
end}
end
defp check_index_mode!(%{index_mode: nil} = meta, _name, opts) do
index_mode = case Keyword.get(opts, :name) do
nil ->
{:index, 0}
_ ->
:name
end
%{meta | index_mode: index_mode}
end
defp check_index_mode!(%{index_mode: {:index, index}} = meta, field_name, opts) do
index_mode = case Keyword.get(opts, :name) do
nil ->
index = Keyword.get(opts, :index, index + 1)
{:index, index}
_name ->
raise ArgumentError, "cannot define name option for field #{inspect field_name} defined on #{meta.line}, either all fields or none of them should declare name option"
end
%{meta | index_mode: index_mode}
end
defp check_index_mode!(%{index_mode: :name} = meta, field_name, opts) do
index_mode = case Keyword.get(opts, :name) do
nil ->
raise ArgumentError, "forget to define name option for field #{inspect field_name} defined on #{meta.line}, either all fields or none of them should declare name option"
_name ->
:name
end
%{meta | index_mode: index_mode}
end
defp check_type!(meta, field_name, type) do
if Csvto.Type.primitive?(type) do
case meta[:field_type] do
:single ->
type
:aggregate ->
cond do
Csvto.Type.array?(type) ->
type
true ->
raise ArgumentError, "invalid type #{inspect type} for aggregate field defined on line #{meta[:line]}, expect {:array, type} but got #{inspect type}"
end
end
else
raise ArgumentError, "invalid type #{inspect type} for field #{inspect field_name} defined on line #{meta[:line]}"
end
end
defp check_aggregate_field!(%{field_type: :aggregate} = meta, field_name, _type, opts) do
case meta[:index_mode] do
{:index, _} ->
if _another_aggrate_field = Enum.find(meta[:fields], &(&1.field_type == :aggregate)) do
raise ArgumentError, "more than one aggregate field in #{inspect meta[:schema]}: only one aggrate field can be defined in the index mode"
end
:ok
:name ->
name = case Keyword.fetch(opts, :name) do
{:ok, name} ->
name
:error ->
raise ArgumentError, "name option is required for the aggregate field #{inspect field_name}"
end
if name_conflict_field = Enum.find(meta[:fields], &has_conflict_name?(name, &1)) do
raise ArgumentError, "the name option of field #{inspect field_name} conflicts with the field #{inspect name_conflict_field.name}: #{name} and #{name_conflict_field.field_name} overlap each other"
end
:ok
end
end
defp check_aggregate_field!(meta, _field_name, _type, _opts) do
case meta[:index_mode] do
{:index, _} ->
if preceding_aggregate_field = Enum.find(meta[:fields], &(&1.field_type == :aggregate)) do
raise ArgumentError, "#{inspect preceding_aggregate_field.name} should be the last field: aggregate field can only be the last field in index mode"
end
:ok
_ ->
:ok
end
end
defp has_conflict_name?(name, %{field_type: :aggregate, field_name: another_name}) do
String.starts_with?(name, another_name) || String.starts_with?(another_name, name)
end
defp has_conflict_name?(_name, _field), do: false
defp default_for_type(_, opts) do
Keyword.get(opts, :default)
end
def __schema__(schema_name, schema) do
quote do
def __csvto__(:schema, unquote(schema_name)) do
unquote(schema |> Macro.escape)
end
end
end
def __from__(schema_name) do
quote do
def unquote(:"__from_#{schema_name}__")(path, opts) do
Csvto.Reader.from(path, __MODULE__, unquote(schema_name), opts)
end
end
end
end
|
lib/csvto/builder.ex
| 0.903431
| 0.770206
|
builder.ex
|
starcoder
|
defmodule Ferryman.Client do
@moduledoc """
This module provides the Client API to communicate with a Ferryman Server.
## Overview
To start communicating with the Ferryman server, let's first start our redis process:
iex> {:ok, redis} = Redix.start_link()
Now we can simply call the functions, the server has implemented:
iex> Ferryman.Client.call(redis, "mychannel", "add", [1, 2])
{:ok, 3}
"""
@doc """
Executes a function on the server async, without a response.
It will be unknown, wether the Ferryman server successfully handled
the message.
"""
def cast(redis, channel, method, params) do
req = JSONRPC2.Request.request({method, params})
with {:ok, json_req} <- Jason.encode(req) do
Redix.command(redis, ["PUBLISH", channel, json_req])
end
end
@doc """
Executes a function on the server and returns the response.
## Example
iex> Ferryman.Client.call(redis, "mychannel", "add", [1, 2])
{:ok, 3}
"""
def call(redis, channel, method, params, timeout \\ 1) do
case multicall(redis, channel, method, params, timeout) do
[value | _] -> value
_ -> {:error, :no_subscriber}
end
end
@doc """
Executes a function on the servers and returns a list of responses.
## Example
iex> Ferryman.Client.multicall(redis, "mychannel", "add", [1, 2])
{:ok, 3}
"""
def multicall(redis, channel, method, params, timeout \\ 1) do
id = random_key()
req = JSONRPC2.Request.request({method, params, id})
with {:ok, json_req} <- Jason.encode(req),
{:ok, server_count} <- Redix.command(redis, ["PUBLISH", channel, json_req]) do
for n <- :lists.seq(1, server_count), do: get_value(redis, id, timeout)
end
end
defp get_value(redis, id, timeout) do
with {:ok, [_key, value]} <- Redix.command(redis, ["BLPOP", id, timeout]),
{:ok, %{"result" => result}} <- Jason.decode(value) do
{:ok, result}
end
end
defp random_key() do
Base.encode64(:crypto.strong_rand_bytes(10))
end
end
|
lib/client.ex
| 0.770292
| 0.508422
|
client.ex
|
starcoder
|
defmodule Rajska do
@moduledoc """
Rajska is an elixir authorization library for [Absinthe](https://github.com/absinthe-graphql/absinthe).
It provides the following middlewares:
- `Rajska.QueryAuthorization`
- `Rajska.QueryScopeAuthorization`
- `Rajska.ObjectAuthorization`
- `Rajska.ObjectScopeAuthorization`
- `Rajska.FieldAuthorization`
## Installation
The package can be installed by adding `rajska` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:rajska, "~> 1.3.0"},
]
end
```
## Usage
Create your Authorization module, which will implement the `Rajska.Authorization` behaviour and contain the logic to validate user permissions and will be called by Rajska middlewares. Rajska provides some helper functions by default, such as `c:Rajska.Authorization.role_authorized?/2` and `c:Rajska.Authorization.has_user_access?/3`, but you can override them with your application needs.
```elixir
defmodule Authorization do
use Rajska,
valid_roles: [:user, :admin]
end
```
Available options and their default values:
```elixir
valid_roles: [:admin],
super_role: :admin,
default_rule: :default
```
Add your Authorization module to your `Absinthe.Schema` [context/1](https://hexdocs.pm/absinthe/Absinthe.Schema.html#c:context/1) callback and the desired middlewares to the [middleware/3](https://hexdocs.pm/absinthe/Absinthe.Middleware.html#module-the-middleware-3-callback) callback:
```elixir
def context(ctx), do: Map.put(ctx, :authorization, Authorization)
def middleware(middleware, field, %Absinthe.Type.Object{identifier: identifier})
when identifier in [:query, :mutation] do
middleware
|> Rajska.add_query_authorization(field, Authorization)
|> Rajska.add_object_authorization()
end
def middleware(middleware, field, object) do
Rajska.add_field_authorization(middleware, field, object)
end
```
The only exception is [Object Scope Authorization](#object-scope-authorization), which isn't a middleware, but an [Absinthe Phase](https://hexdocs.pm/absinthe/Absinthe.Phase.html). To use it, add it to your pipeline after the resolution:
```elixir
# router.ex
alias Absinthe.Phase.Document.Execution.Resolution
alias Absinthe.Pipeline
alias Rajska.ObjectScopeAuthorization
forward "/graphql", Absinthe.Plug,
schema: MyProjectWeb.Schema,
socket: MyProjectWeb.UserSocket,
pipeline: {__MODULE__, :pipeline} # Add this line
def pipeline(config, pipeline_opts) do
config
|> Map.fetch!(:schema_mod)
|> Pipeline.for_document(pipeline_opts)
|> Pipeline.insert_after(Resolution, ObjectScopeAuthorization)
end
```
Since Scope Authorization middleware must be used with Query Authorization, it is automatically called when adding the former.
"""
alias Rajska.Authorization
defmacro __using__(opts \\ []) do
super_role = Keyword.get(opts, :super_role, :admin)
valid_roles = Keyword.get(opts, :valid_roles, [super_role])
default_rule = Keyword.get(opts, :default_rule, :default)
quote do
@behaviour Authorization
@spec config() :: Keyword.t()
def config do
Keyword.merge(unquote(opts), [
valid_roles: unquote(valid_roles),
super_role: unquote(super_role),
default_rule: unquote(default_rule)
])
end
def get_current_user(%{current_user: current_user}), do: current_user
def get_ip(%{ip: ip}), do: ip
def get_user_role(%{role: role}), do: role
def get_user_role(nil), do: nil
def default_rule, do: unquote(default_rule)
def valid_roles, do: [:all | unquote(valid_roles)]
def not_scoped_roles, do: [:all, unquote(super_role)]
defguard is_super_role(role) when role === unquote(super_role)
def super_role?(role) when is_super_role(role), do: true
def super_role?(_user_role), do: false
def role_authorized?(_user_role, :all), do: true
def role_authorized?(role, _allowed_role) when is_super_role(role), do: true
def role_authorized?(user_role, allowed_role) when is_atom(allowed_role), do: user_role === allowed_role
def role_authorized?(user_role, allowed_roles) when is_list(allowed_roles), do: user_role in allowed_roles
def has_user_access?(%user_struct{id: user_id} = current_user, %scope{} = struct, unquote(default_rule)) do
super_user? = current_user |> get_user_role() |> super_role?()
owner? = (user_struct === scope) && (user_id === struct.id)
super_user? || owner?
end
def unauthorized_message(_resolution), do: "unauthorized"
def unauthorized_query_scope_message(_resolution, object_type) do
"Not authorized to access this #{replace_underscore(object_type)}"
end
defp replace_underscore(string) when is_binary(string), do: String.replace(string, "_", " ")
defp replace_underscore(atom) when is_atom(atom) do
atom
|> Atom.to_string()
|> replace_underscore()
end
def unauthorized_object_scope_message(_result_object, object) do
"Not authorized to access object #{object.identifier}"
end
def unauthorized_object_message(_resolution, object), do: "Not authorized to access object #{object.identifier}"
def unauthorized_field_message(_resolution, field), do: "Not authorized to access field #{field}"
def super_user?(context) do
context
|> get_current_user()
|> get_user_role()
|> super_role?()
end
def context_role_authorized?(context, allowed_role) do
context
|> get_current_user()
|> get_user_role()
|> role_authorized?(allowed_role)
end
def context_user_authorized?(context, scoped_struct, rule) do
context
|> get_current_user()
|> has_user_access?(scoped_struct, rule)
end
defoverridable Authorization
end
end
@doc false
def apply_auth_mod(context, fnc_name, args \\ [])
def apply_auth_mod(%{authorization: authorization}, fnc_name, args) do
apply(authorization, fnc_name, args)
end
def apply_auth_mod(_context, _fnc_name, _args) do
raise "Rajska authorization module not found in Absinthe's context"
end
defdelegate add_query_authorization(middleware, field, authorization), to: Rajska.Schema
defdelegate add_object_authorization(middleware), to: Rajska.Schema
defdelegate add_field_authorization(middleware, field, object), to: Rajska.Schema
end
|
lib/rajska.ex
| 0.829457
| 0.808483
|
rajska.ex
|
starcoder
|
defmodule Indicatorex.MACD do
@type t :: %Indicatorex.MACD{
fast: number(),
slow: number(),
dif: number(),
v: [Indicatorex.MACD.Sericalize.t()]
}
defstruct fast: 0, slow: 0, dif: 0, v: [%Indicatorex.MACD.Sericalize{}]
@doc """
MACD calc function
"""
@spec calc([number()], number(), number(), number()) ::
{:error, String.t()} | {:ok, Indicatorex.MACD.t()}
def calc(data, fast \\ 12, slow \\ 26, diff \\ 9), do: run(data, fast, slow, diff)
defp run(_, fast, slow, _) when slow <= fast and is_integer(slow + fast),
do: {:error, "fast must less than slow"}
defp run(data, fast, slow, diff) when is_integer(fast + slow + diff) do
alias Indicatorex.EMA
{:ok, %EMA{span: ^fast, v: ema_f}} = EMA.calc(data, fast)
{:ok, %EMA{span: ^slow, v: ema_s}} = EMA.calc(data, slow)
{:ok, dif_fs} = differ(ema_f, ema_s)
{:ok, %EMA{span: ^diff, v: ema_d}} = EMA.calc(dif_fs, diff)
case macd(ema_f, ema_s, ema_d) do
{:ok, v} -> {:ok, %Indicatorex.MACD{fast: fast, slow: slow, dif: diff, v: v}}
error -> error
end
end
defp macd(ema_f, ema_s, ema_d, resp \\ [])
defp macd([], [], [], []), do: {:error, "run macd empty"}
defp macd([], [], [], resp), do: {:ok, resp}
defp macd([fh | ft], [sh | st], [dh | dt], resp) do
alias Indicatorex.MACD.Sericalize
dif = fh - sh
macd(
ft,
st,
dt,
resp ++
[
%Sericalize{
ema_f: fh,
ema_s: sh,
dea: dh,
dif: dif,
macd: 2 * (dif - dh)
}
]
)
end
@doc """
ema_f and ema_s differ list generate
"""
@spec differ([number()], [number()]) :: {:error, String.t()} | {:ok, [number()]}
def differ(f, s), do: diff(f, s)
defp diff(ema_f, ema_s, resp \\ [])
defp diff(ema_f, ema_s, []) when length(ema_f) != length(ema_s),
do:
{:error,
"faster and slower length not match, faster is #{length(ema_f)}, slower is #{length(ema_s)}"}
defp diff([], [], []), do: {:error, "diff empty"}
defp diff([], [], resp), do: {:ok, resp}
defp diff([fh | ft], [sh | st], resp), do: diff(ft, st, resp ++ [fh - sh])
end
|
lib/macd.ex
| 0.70202
| 0.523481
|
macd.ex
|
starcoder
|
defmodule Applicative do
@typedoc """
Functor dictionary
intuitive type: fmap : f (a -> b) -> f a -> f b
* `fmap`: (f a, a -> b) -> f b # params are swapped to facilitate piping, mandatory
* `lift_left`: a -> f b -> f a # default implementation provided, optional
"""
@type t :: %__MODULE__{
functor: Functor.t,
pure: (any -> any),
apA: (any, any -> any),
}
def __struct__, do: %{
__struct__: __MODULE__,
functor: Functor.__struct__,
pure: fn _ -> raise("Applicative: missing definition for pure") end,
apA: fn _, _ -> raise("Applicative: missing definition for apA") end,
liftA2: fn _, _ -> raise("Applicative: missing definition for liftA2") end,
leftA: fn _, _ -> raise("Applicative: missing definition for leftA") end,
rightA: fn _, _ -> raise("Applicative: missing definition for leftA") end,
}
def __struct__(kv) do
required_keys = [:functor, :pure, :apA, :liftA2, :leftA, :rightA]
{map, keys} =
Enum.reduce(kv, {__struct__(), required_keys}, fn {key, val}, {map, keys} ->
{Map.replace!(map, key, val), List.delete(keys, key)}
end)
case keys do
[] ->
map
_ ->
raise ArgumentError,
"the following keys must also be given when building " <>
"struct #{inspect(__MODULE__)}: #{inspect(keys)}"
end
end
def define(t) do
t = Map.new(t)
functor = Map.fetch!(t, :functor)
pure = Map.fetch!(t, :pure)
unless Map.has_key?(t, :apA) or Map.has_key?(t, :liftA2) do
raise KeyError, term: t, key: ":apA, or :liftA2"
end
{apA, liftA2} = case t do
%{apA: apA, liftA2: liftA2} -> {apA, liftA2}
%{apA: apA} -> {apA, fn f, mx, my -> pure.(fn x -> fn y -> f.(x, y) end end) |> apA.(mx) |> apA.(my) end}
%{liftA2: liftA2} -> {fn mf, mx -> liftA2.(fn f, x -> f.(x) end, mf, mx) end, liftA2}
end
leftA = Map.get(t, :leftA, fn mx, my -> liftA2.(fn x, _ -> x end, mx, my) end)
rightA = Map.get(t, :rightA, fn mx, my -> liftA2.(fn _, y -> y end, mx, my) end)
%__MODULE__{
functor: functor,
pure: pure,
apA: apA,
liftA2: liftA2,
leftA: leftA,
rightA: rightA
}
end
def liftA(f, mx, dict), do: dict.functor.map.(f, mx)
def liftA3(f, mx, my, mz, dict), do: dict.liftA2.(fn x, y -> fn z -> f.(x, y, z) end end, mx, my) |> dict.apA.(mz)
end
|
typeclassopedia/lib/applicative.ex
| 0.837487
| 0.634685
|
applicative.ex
|
starcoder
|
defmodule WiseHomex do
@moduledoc """
Api Client for Wise Home
## Usage:
### Getting a configuration
First, get a configuration struct by invoking `new_config/2` or `new_config/3` with either `:api_key`, `:plain` or `:auth_header` as first argument.
```
config = WiseHomex.new_config(:api_key, "your_api_key")
config = WiseHomex.new_config(:plain, {"user_name", "password"})
config = WiseHomex.new_config(:auth_header, "auth_header")
```
Optionally you can call `new_config/3` with a keyword list for overriding the default configuration values, `base_url`, `timeout` and `api_version`
```
config = WiseHomex.new_config(:api_key, "your_api_key", timeout: 60_000, base_url: "https://another.wisehome.server.dk", api_version: "v4")
```
### Making requests
Next, use that `config` to do requests to the Wise Home API.
```
config |> WiseHomex.get_gateways()
```
Most `GET`-requests have a `query` that will be encoded and included.
```
config |> WiseHomex.get_gateways(%{"include" => "sim"})
```
Many `POST` and `PATCH` requests take a map for `attributes` and `relationships` for the created or updated entity, for example:
```
attributes = %{move_in_date: "2019-01-01", move_out_date: "2019-02-01"}
relationships = %{
"household" => %{
data: %{
type: "households",
id: "123"
}
},
"tenant" => %{
data: %{
type: "accounts",
id: "987"
}
}
}
config |> WiseHomex.create_tenancy(attributes, relationships)
```
If the request is successful, you will receive a response of the `{:ok, data}` where data is the included Ecto models. If the response is empty, the response will be {:ok, :empty}
If unsuccessful, the response will be one of
```
{:invalid_request, map | nil}
{:not_authorized, map | nil}
{:not_found, map | nil}
:server_error
:bad_gateway
{:service_not_available, map | nil}
:econnrefused
:connect_timeout
:closed
```
"""
@behaviour WiseHomex.ApiClientBehaviour
use WiseHomex.Creator
@doc """
Get a guest configuration without an api key for the API Client
"""
defdelegate anonymous_config(opts \\ []), to: WiseHomex.Config
@doc """
Get a new configuration for the API Client
"""
defdelegate new_config(auth_type, credentials, opts \\ []), to: WiseHomex.Config
# Get the ApiClient implementation to use
defp api_client(), do: Application.get_env(:wise_homex, :api_client_impl, WiseHomex.ApiClientImpl)
# Account invitation
@doc """
Create an account invitation
"""
def create_account_invitation(config, account_id, attrs),
do: api_client().create_account_invitation(config, account_id, attrs)
# Add start readings
@doc """
Add zero-valued start readings for all devices that does not have any readings near the start of a statement period
"""
def add_start_readings(config, statement_id), do: api_client().add_start_readings(config, statement_id)
# Angel Note
@doc """
Get an angel note by a `target_type` and a `target_id`
"""
def get_angel_note(config, target_type, target_id), do: api_client().get_angel_note(config, target_type, target_id)
# Bmeters Keys
@doc """
Upload a bmeters key file
"""
def upload_bmeters_keys(config, opts), do: api_client().upload_bmeters_keys(config, opts)
# Calculate missing readings
def calculate_missing_readings(config, statement_id),
do: api_client().calculate_missing_readings(config, statement_id)
# Device
@doc """
Authorize a device
"""
def authorize_device(config, device_id), do: api_client().authorize_device(config, device_id)
@doc """
Deauthorize a device
"""
def deauthorize_device(config, device_id), do: api_client().deauthorize_device(config, device_id)
@doc """
Import devices from CSV
"""
def import_devices(config, attrs, rels), do: api_client().import_devices(config, attrs, rels)
# Device Balancer
@doc """
Rebalance devices
"""
def rebalance_devices(config, admin_id), do: api_client().rebalance_devices(config, admin_id)
# Email Settings
@doc """
Update EmailSettings for a device
"""
def update_account_email_settings(config, account_id, id, attrs),
do: api_client().update_account_email_settings(config, account_id, id, attrs)
# Gateway
@doc """
Lock a gateway
"""
def lock_gateway(config, id), do: api_client().lock_gateway(config, id)
@doc """
Restart a gateway
"""
def restart_gateway(config, id), do: api_client().restart_gateway(config, id)
@doc """
Unlock a gateway
"""
def unlock_gateway(config, id, seconds), do: api_client().unlock_gateway(config, id, seconds)
# KEM uploads
@doc """
Upload a KEM file
"""
def upload_kem(config, opts), do: api_client().upload_kem(config, opts)
# Ping
@doc """
Ping the Wise Home API to check availability and get authentication status.
* Optional includes: `user`, `account`
Example:
```
config |> WiseHomex.ping(%{"include" => "user,accont"})
```
"""
def ping(config, query), do: api_client().ping(config, query)
# Property Syncs
@doc """
Trigger sync of an existing property
"""
def sync_property(config, property_id), do: api_client().sync_property(config, property_id)
# Property Syncs UNIK
@doc """
Create a synced property from unik
"""
def create_synced_property_unik(config, property_number, company_number, admin_id),
do: api_client().create_synced_property_unik(config, property_number, company_number, admin_id)
# Reports
@doc """
Get reports for a device
"""
def get_device_reports(config, id), do: api_client().get_device_reports(config, id)
@doc """
Create latest report for a device
"""
def create_latest_report(config, device_id, query \\ %{}),
do: api_client().create_latest_report(config, device_id, query)
# Radiator
@doc """
Import radiators
"""
def import_radiators(config, attrs), do: api_client().import_radiators(config, attrs)
# READy
def ready_installation_matches(config, rels, query \\ %{}),
do: api_client().ready_installation_matches(config, rels, query)
def ready_installation_already_matched(config, rels, query \\ %{}),
do: api_client().ready_installation_already_matched(config, rels, query)
def link_ready_installation_match(config, rels, query \\ %{}),
do: api_client().link_ready_installation_match(config, rels, query)
def unlink_ready_installation_match(config, rels, query \\ %{}),
do: api_client().unlink_ready_installation_match(config, rels, query)
# Statement
@doc """
Create the next statement from a current statement
"""
def create_next_statement(config, prev_statement_id, query \\ %{}),
do: api_client().create_next_statement(config, prev_statement_id, query)
# Wmbus Cache
@doc """
Get wmbus cache
"""
def get_wmbus_cache(config, gateway_id, query \\ %{}), do: api_client().get_wmbus_cache(config, gateway_id, query)
@doc """
Refresh wmbus cache
"""
def refresh_wmbus_cache(config, gateway_id), do: api_client().refresh_wmbus_cache(config, gateway_id)
end
|
lib/wise_homex.ex
| 0.849191
| 0.776453
|
wise_homex.ex
|
starcoder
|
defmodule Exop.TypeValidation do
@known_types ~w(boolean integer float string tuple struct map list atom function keyword module uuid)a
Enum.each(@known_types, fn type ->
def type_supported?(unquote(type), _opts), do: :ok
end)
def type_supported?(nil, nil), do: :ok
def type_supported?(nil, []), do: :ok
def type_supported?(nil, opts) when is_list(opts) do
if Keyword.has_key?(opts, :struct) do
opts
|> Keyword.get(:struct)
|> check_struct_exists()
else
:ok
end
end
def type_supported?(nil, _opts) do
:ok
end
def type_supported?(unknown_type, _opts) do
{:error, {:unknown_type, unknown_type}}
end
def known_types, do: @known_types
def check_value(check_item, :boolean) when is_boolean(check_item), do: true
def check_value(check_item, :integer) when is_integer(check_item), do: true
def check_value(check_item, :float) when is_float(check_item), do: true
def check_value(check_item, :string) when is_binary(check_item), do: true
def check_value(check_item, :tuple) when is_tuple(check_item), do: true
def check_value(check_item, :map) when is_map(check_item), do: true
def check_value(check_item, :list) when is_list(check_item), do: true
def check_value(check_item, :atom) when is_atom(check_item), do: true
def check_value(check_item, :function) when is_function(check_item), do: true
def check_value([] = _check_item, :keyword), do: true
def check_value([{atom, _} | _] = _check_item, :keyword) when is_atom(atom), do: true
def check_value(check_item, :module) when is_atom(check_item) do
Code.ensure_loaded?(check_item)
end
def check_value(check_item, :uuid) when is_binary(check_item), do: validate_uuid(check_item)
def check_value(_, _), do: false
@spec validate_uuid(binary()) :: boolean()
defp validate_uuid(
<<a1, a2, a3, a4, a5, a6, a7, a8, ?-, b1, b2, b3, b4, ?-, c1, c2, c3, c4, ?-, d1, d2, d3,
d4, ?-, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12>>
) do
<<c(a1), c(a2), c(a3), c(a4), c(a5), c(a6), c(a7), c(a8), ?-, c(b1), c(b2), c(b3), c(b4), ?-,
c(c1), c(c2), c(c3), c(c4), ?-, c(d1), c(d2), c(d3), c(d4), ?-, c(e1), c(e2), c(e3), c(e4),
c(e5), c(e6), c(e7), c(e8), c(e9), c(e10), c(e11), c(e12)>>
catch
:error -> false
else
_ -> true
end
defp validate_uuid(_), do: false
defp c(?0), do: ?0
defp c(?1), do: ?1
defp c(?2), do: ?2
defp c(?3), do: ?3
defp c(?4), do: ?4
defp c(?5), do: ?5
defp c(?6), do: ?6
defp c(?7), do: ?7
defp c(?8), do: ?8
defp c(?9), do: ?9
defp c(?A), do: ?a
defp c(?B), do: ?b
defp c(?C), do: ?c
defp c(?D), do: ?d
defp c(?E), do: ?e
defp c(?F), do: ?f
defp c(?a), do: ?a
defp c(?b), do: ?b
defp c(?c), do: ?c
defp c(?d), do: ?d
defp c(?e), do: ?e
defp c(?f), do: ?f
defp c(_), do: throw(:error)
defp check_struct_exists(struct_name) when is_atom(struct_name) do
with {:module, _} <- Code.ensure_compiled(struct_name),
true <- function_exported?(struct_name, :__struct__, 0) do
:ok
else
_ -> {:error, {:unknown_struct, struct_name}}
end
end
defp check_struct_exists(%_{}), do: :ok
defp check_struct_exists(unknown_struct), do: {:error, {:unknown_struct, unknown_struct}}
end
|
lib/exop/validations/type_validation.ex
| 0.596198
| 0.530723
|
type_validation.ex
|
starcoder
|
defmodule Vivaldi.Simulation.Runner do
@moduledoc """
Implements all the boilerplate stuff required for both the centralized and distributed algorithm.
"""
alias Vivaldi.Simulation.Vector
def run(n, radius, compute_next_x_i_func) do
points = create_coordinate_cluster(n, type: :circular, radius: radius)
latencies = get_latency_matrix(points)
initial_x = get_initial_x(n, radius)
computed_x = compute_coordinates(n, latencies, initial_x, 0.1,
2000, 0, compute_next_x_i_func)
computed_x_list = Enum.map(0..(n-1), fn i -> computed_x[i] end)
computed_latencies = get_latency_matrix(computed_x_list)
error = compute_total_error(n, latencies, computed_latencies)
{computed_x_list, computed_latencies, error}
end
def compute_coordinates(n, latencies, x, t,
max_iterations, iteration,
compute_next_x_i_func) do
# Print Total Error
# x_list = Enum.map(0..(n-1), fn i -> x[i] end)
# computed_latencies = get_latency_matrix(x_list)
# cost = compute_total_error(n, latencies, computed_latencies)
# IO.puts "Iteration: #{iteration}, Error: #{cost}"
if iteration == max_iterations do
x
else
i = rem(iteration, n)
next_x_i = compute_next_x_i_func.(n, latencies, x, i, t)
x = Map.put(x, i, next_x_i)
compute_coordinates(n, latencies, x, t,
max_iterations, iteration + 1,
compute_next_x_i_func)
end
end
def compute_total_error(n, expected_latencies, computed_latencies) do
pairwise_errors = for i <- 0..(n-1) do
a_i = Enum.at(expected_latencies, i)
b_i = Enum.at(computed_latencies, i)
for j <- 0..i do
a_ij = Enum.at(a_i, j)
b_ij = Enum.at(b_i, j)
if a_ij != 0 do
abs(a_ij - b_ij) / a_ij
else
0
end
end
end
pairwise_errors = List.flatten(pairwise_errors)
Enum.sum(pairwise_errors) / (Enum.count(pairwise_errors) - n)
end
def get_initial_x(n, _radius) do
0..(n-1)
|> Enum.map(fn i ->
{i, [0, 0]}
end)
|> Enum.into(%{})
end
def create_coordinate_cluster(n, type: :circular, radius: r) do
angle_step = 2 * :math.pi / n
0..(n-1)
|> Enum.map(fn i ->
angle = i * angle_step
[r * :math.cos(angle), r * :math.sin(angle)]
end)
end
def get_latency_matrix(coordinates) do
count = Enum.count(coordinates)
Enum.map(0..(count-1), fn i ->
x_i = Enum.at(coordinates, i)
Enum.map(0..(count-1), fn j ->
x_j = Enum.at(coordinates, j)
Vector.distance(x_i, x_j)
end)
end)
end
end
|
vivaldi/lib/simulation/runner.ex
| 0.785061
| 0.550668
|
runner.ex
|
starcoder
|
defmodule Mix.Tasks.Changix.Gen.Changelog do
@moduledoc """
Generates a new changelog entry.
## Command line options:
- `--folder` or `-f`. Optional, defaults to `/changelog`.
- `--kind` or `-k`. Optional, defaults to nil.
- `--quiet` or `-q'. Optional.
- `title`. Mandatory.
## Examples:
```
mix changix.gen.changelog My new feature
mix changix.gen.changelog --folder /priv/changelog --kind bugfix Fixed login
mix changix.gen.changelog -f /priv/changelog -k bugfix Fixed login
```
"""
alias Mix.Generator
use Mix.Task
@shortdoc "Generates a new changelog entry"
@template "priv/templates/changelog.md.eex"
@default_folder "changelog"
def run(args) do
{switches, title_parts, _} =
OptionParser.parse(args,
switches: [folder: :string, kind: :string, quiet: :boolean],
aliases: [f: :folder, k: :kind, q: :quiet]
)
if blank?(title_parts) do
Mix.raise("expected mix changix.gen.changelog to take the changelog title")
end
kind = Keyword.get(switches, :kind, "")
folder = Keyword.get(switches, :folder, @default_folder)
quiet = Keyword.get(switches, :quiet, false)
folder = File.cwd!() |> Path.join(folder)
changed_at = local_now()
Generator.create_directory(folder, quiet: quiet)
Generator.copy_template(
:changix |> Application.app_dir() |> Path.join(@template),
Path.join(folder, file_name(changed_at, title_parts)),
[
title: Enum.join(title_parts, " "),
changed_at: NaiveDateTime.to_iso8601(changed_at),
kind: kind
],
quiet: quiet
)
end
defp local_now do
{{year, month, day}, {hour, minute, second}} = :erlang.localtime()
{:ok, ndt} = NaiveDateTime.new(year, month, day, hour, minute, second)
ndt
end
defp file_name(changed_at, title_parts) do
file_title = title_parts |> Enum.map(&String.downcase/1) |> Enum.join("_")
changed_at = changed_at |> NaiveDateTime.to_string() |> String.replace(["-", " ", ":"], "")
"#{changed_at}-#{file_title}.md"
end
defp blank?([]), do: true
defp blank?(title_parts) when length(title_parts) > 1, do: false
defp blank?([title_part]), do: String.trim(title_part) == ""
end
|
lib/mix/tasks/changix.gen.changelog.ex
| 0.88513
| 0.585546
|
changix.gen.changelog.ex
|
starcoder
|
defmodule DataTracer.Server do
use GenServer
require Logger
@moduledoc """
Reads and writes the traced data to ETS
Data format `{key, timestamp, value}`
"""
@table_name :data_tracer
defmodule State do
defstruct [:table_name, :table]
end
@doc """
Options:
* `:table` - The ETS table to use for writing (optional)
"""
def start_link(opts, name \\ __MODULE__) do
GenServer.start_link(__MODULE__, opts, name: name)
end
@impl GenServer
def init(opts) do
Logger.debug("DataTracer starting!")
table_name = Keyword.get(opts, :table, @table_name)
table = new(table_name)
{:ok, %State{table_name: table_name, table: table}}
end
def all(opts \\ []) do
table_name = Keyword.get(opts, :table, @table_name)
:ets.match(table_name, {:"$1", :"$2", :"$3"})
|> Enum.sort(fn [_, a, _], [_, b, _] ->
case NaiveDateTime.compare(a, b) do
:lt -> false
:eq -> true
:gt -> true
end
end)
end
def last(opts \\ []) do
[_key, _timestamp, value] = all(opts) |> List.first()
value
end
def store(value, opts \\ []) do
key = Keyword.get(opts, :key)
time = Keyword.get(opts, :time, NaiveDateTime.utc_now())
tracer = Keyword.get(opts, :tracer, __MODULE__)
GenServer.call(tracer, {:store_key, key, time, value})
value
end
def lookup(key, opts \\ []) do
table_name = Keyword.get(opts, :table, @table_name)
:ets.lookup(table_name, key)
|> Enum.map(fn {_, _, val} -> val end)
|> case do
[] -> nil
val -> val
end
end
def clear(opts \\ []) do
tracer = Keyword.get(opts, :tracer, __MODULE__)
GenServer.call(tracer, :clear)
end
@impl GenServer
def handle_call({:store_key, key, timestamp, value}, _from, state) do
%State{table: table} = state
if key do
Logger.warn("Storing #{inspect(key)}:#{inspect(timestamp)} => #{inspect(value, pretty: true)}")
else
Logger.warn("Storing #{inspect(timestamp)} => #{inspect(value, pretty: true)}")
end
:ets.insert(table, {key, timestamp, value})
{:reply, :ok, state}
end
def handle_call(:clear, _from, state) do
%State{table_name: table_name} = state
:ets.delete(table_name)
table = new(table_name)
{:reply, :ok, %State{state | table: table}}
end
defp new(table_name) do
:ets.new(table_name, [:duplicate_bag, :protected, :named_table])
end
end
|
lib/data_tracer/server.ex
| 0.765418
| 0.494263
|
server.ex
|
starcoder
|
defmodule ExComponentSchema.Validator.Error do
# credo:disable-for-this-file Credo.Check.Readability.ModuleDoc
defstruct [:error, :path]
defmodule AdditionalItems do
defstruct([:additional_indices])
end
defmodule AdditionalProperties do
defstruct([])
end
defmodule AllOf do
defstruct([:invalid])
end
defmodule AnyOf do
defstruct([:invalid])
end
defmodule Const do
defstruct([:expected])
end
defmodule Contains do
defstruct([:empty?, :invalid])
end
defmodule ContentEncoding do
defstruct([:expected])
end
defmodule ContentMediaType do
defstruct([:expected])
end
defmodule Dependencies do
defstruct([:property, :missing])
end
defmodule Enum do
defstruct([:enum, :actual])
end
defmodule False do
defstruct([])
end
defmodule Format do
defstruct([:expected])
end
defmodule IfThenElse do
defstruct([:branch, :errors])
end
defmodule InvalidAtIndex do
defstruct([:index, :errors])
end
defmodule ItemsNotAllowed do
defstruct([])
end
defmodule MaxItems do
defstruct([:expected, :actual])
end
defmodule MaxLength do
defstruct([:expected, :actual])
end
defmodule MaxProperties do
defstruct([:expected, :actual])
end
defmodule Maximum do
defstruct([:expected, :exclusive?])
end
defmodule MinItems do
defstruct([:expected, :actual])
end
defmodule MinLength do
defstruct([:expected, :actual])
end
defmodule MinProperties do
defstruct([:expected, :actual])
end
defmodule Minimum do
defstruct([:expected, :exclusive?])
end
defmodule MultipleOf do
defstruct([:expected])
end
defmodule Not do
defstruct([])
end
defmodule OneOf do
defstruct([:valid_indices, :invalid])
end
defmodule Pattern do
defstruct([:expected])
end
defmodule PropertyNames do
defstruct([:invalid])
end
defmodule Required do
defstruct([:missing])
end
defmodule Type do
defstruct([:expected, :actual])
end
defmodule Component do
defstruct([:expected, :actual, :no_comp_property])
end
defmodule UniqueItems do
defstruct([])
end
end
|
lib/ex_component_schema/validator/error.ex
| 0.536799
| 0.59887
|
error.ex
|
starcoder
|
defmodule Robot.Links.Server do
@moduledoc """
Robot model as a GenServer
"""
use GenServer
import Robot.Links
import Collision.Detector
# Client
@doc """
## Example
{:ok, pid} =Robot.Links.Server.start_link([])
"""
def start_link(ops) do
GenServer.start_link(__MODULE__, :ok, ops)
end
@doc """
## Example
{:ok, pid} =Robot.Links.Server.start_link([])
Robot.Links.Server.draw_default(pid)
"""
def draw_default(pid) do
GenServer.cast(pid, :draw_default)
end
@doc """
## Example
{:ok, pid} =Robot.Links.Server.start_link([])
joints = %Robot.Joints{joint1: :math.pi/2, joint2: 0.0, joint3: 0.0, joint4: :math.pi/2, joint5: 0.0, joint6: 0.0}
robot_model = Robot.Links.Server.get_mesh_with_joints(pid, joints)
Robot.Links.Server.draw(pid, robot_model)
"""
def draw(pid, robot_model) do
GenServer.cast(pid, {:draw, robot_model})
end
@doc """
## Example
{:ok, pid} =Robot.Links.Server.start_link([])
joints = %Robot.Joints{joint1: :math.pi/2, joint2: 0.0, joint3: 0.0, joint4: :math.pi/2, joint5: 0.0, joint6: 0.0}
robot_model = Robot.Links.Server.get_mesh_with_joints(pid, joints)
"""
def get_mesh_with_joints(pid, joints) do
GenServer.call(pid, {:get_mesh_with_joints, joints})
end
# Server (callbacks)
@impl true
def init(_ops) do
geometries = File.read!("geometries.txt") |> :erlang.binary_to_term
link_base = File.read!("link_base.txt") |> :erlang.binary_to_term
link_1 = File.read!("link_1.txt") |> :erlang.binary_to_term
link_2 = File.read!("link_2.txt") |> :erlang.binary_to_term
link_3 = File.read!("link_3.txt") |> :erlang.binary_to_term
link_4 = File.read!("link_4.txt") |> :erlang.binary_to_term
link_5 = File.read!("link_5.txt") |> :erlang.binary_to_term
link_6 = File.read!("link_6.txt") |> :erlang.binary_to_term
names = geometries |> Enum.map(fn map -> map[:name] end)
{:ok, %{geometries: geometries, names: names, link_base: link_base, link_1: link_1, link_2: link_2, link_3: link_3, link_4: link_4, link_5: link_5, link_6: link_6}}
end
@impl true
def handle_cast(:draw_default, state) do
joints = %Robot.Joints{joint1: 0.0, joint2: 0.0, joint3: 0.0, joint4: 0.0, joint5: 0.0, joint6: 0.0}
robot_model = %Robot.Mesh{points: [], indices: [], translates: [], rotates: []}
robot_model = add_link_to_robot_model_by_name(robot_model, state[:geometries], state[:names], state[:link_base], 'gkmodel0_base_link_geom0', 0.0)
robot_model = add_link_to_robot_model_by_name(robot_model, state[:geometries], state[:names], state[:link_1], 'gkmodel0_link_1_geom0', joints.joint1)
robot_model = add_link_to_robot_model_by_name(robot_model, state[:geometries], state[:names], state[:link_2], 'gkmodel0_link_2_geom0', joints.joint2)
robot_model = add_link_to_robot_model_by_name(robot_model, state[:geometries], state[:names], state[:link_3], 'gkmodel0_link_3_geom0', joints.joint3)
robot_model = add_link_to_robot_model_by_name(robot_model, state[:geometries], state[:names], state[:link_4], 'gkmodel0_link_4_geom0', joints.joint4)
robot_model = add_link_to_robot_model_by_name(robot_model, state[:geometries], state[:names], state[:link_5], 'gkmodel0_link_5_geom0', joints.joint5)
robot_model = add_link_to_robot_model_by_name(robot_model, state[:geometries], state[:names], state[:link_6], 'gkmodel0_link_6_geom0', joints.joint6)
points = robot_model.points
indices = robot_model.indices
translates = robot_model.translates
rotates = robot_model.rotates
#IO.inspect translates
#IO.inspect rotates
draw(points,indices,translates,rotates)
{:noreply, state}
end
@impl true
def handle_cast({:draw, robot_model}, state) do
points = robot_model.points
indices = robot_model.indices
translates = robot_model.translates
rotates = robot_model.rotates
#IO.inspect translates
#IO.inspect rotates
draw(points,indices,translates,rotates)
{:noreply, state}
end
@impl true
def handle_call({:get_mesh_with_joints, joints}, _from, state) do
robot_model = %Robot.Mesh{points: [], indices: [], translates: [], rotates: []}
robot_model = add_link_to_robot_model_by_name(robot_model, state[:geometries], state[:names], state[:link_base], 'gkmodel0_base_link_geom0', 0.0)
robot_model = add_link_to_robot_model_by_name(robot_model, state[:geometries], state[:names], state[:link_1], 'gkmodel0_link_1_geom0', joints.joint1)
robot_model = add_link_to_robot_model_by_name(robot_model, state[:geometries], state[:names], state[:link_2], 'gkmodel0_link_2_geom0', joints.joint2)
robot_model = add_link_to_robot_model_by_name(robot_model, state[:geometries], state[:names], state[:link_3], 'gkmodel0_link_3_geom0', joints.joint3)
robot_model = add_link_to_robot_model_by_name(robot_model, state[:geometries], state[:names], state[:link_4], 'gkmodel0_link_4_geom0', joints.joint4)
robot_model = add_link_to_robot_model_by_name(robot_model, state[:geometries], state[:names], state[:link_5], 'gkmodel0_link_5_geom0', joints.joint5)
robot_model = add_link_to_robot_model_by_name(robot_model, state[:geometries], state[:names], state[:link_6], 'gkmodel0_link_6_geom0', joints.joint6)
{:reply, robot_model, state}
end
end
|
dpp/lib/robot/robot_links_server.ex
| 0.787564
| 0.498901
|
robot_links_server.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.