code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
use Croma
defmodule RaftKV.ValuePerKey do
@moduledoc """
Behaviour module to define interface functions to manipulate stored value for each key.
The implementations of `c:command/4` and `c:query/4` must be pure (i.e., they must consist of only deterministic computations).
To introduce side effects for your key-value operations see `RaftKV.LeaderHook`.
See also `RaftedValue.Data`.
"""
@type key :: any
@type value :: any
@type command_arg :: any
@type command_ret :: any
@type query_arg :: any
@type query_ret :: any
@type load :: non_neg_integer
@type size :: non_neg_integer
@doc """
Generic read/write operation on the stored value.
This callback function is invoked by `RaftKV.command/4` or `RaftKV.command_on_all_keys_in_shard/3`.
Commands are replicated across members of the consensus group and executed in all members
in order to reproduce the same value in all nodes.
The callback function must return a 4-tuple.
- 0th element : Return value for the caller of `RaftKV.command/4`.
- 1st element : Approximate load (in an arbitrary unit) required by execution of the command.
- 2nd element : The next version of the value after the command. If you return `nil` the key is removed.
- 3rd element : Size of the next version of the value (in an arbitrary unit). Neglected if you specify `nil` for the 3rd element.
"""
@callback command(nil | value, size, key, command_arg) :: {command_ret, load, nil | value, size}
@doc """
Read-only operation on the stored value.
This callback function is invoked by `RaftKV.query/4`.
This function must return a 2-tuple.
- 0th element : Return value for the caller of `RaftKV.query/4`.
- 1st element : Approximate load (in an arbitrary unit) required by execution of the query.
Note that (most of the time) read-only queries can bypass the Raft log replication (which is necessary in the case of commands),
thanks to leader leases in the Raft protocol.
Load values to return in `c:command/4` and `c:query/4` should reflect this difference.
"""
@callback query(value, size, key, query_arg) :: {query_ret, load}
end
|
lib/raft_kv/value_per_key.ex
| 0.889972
| 0.541954
|
value_per_key.ex
|
starcoder
|
defmodule Game.Format.Table do
@moduledoc """
Format a table
"""
import Game.Format.Context
alias Game.Color
alias Game.Format
@doc """
Format an ASCII table
"""
def format(legend, rows, column_sizes) do
width = total_width(column_sizes)
context()
|> assign_many(:rows, rows, &row(&1, column_sizes))
|> assign(:line, horizontal_line(width))
|> assign(:legend, pad_trailing(legend, width - 4))
|> Format.template(template("table"))
end
def horizontal_line(width) do
context()
|> assign(:line, pad_trailing("", width - 2, "-"))
|> Format.template("+[line]+")
end
@doc """
Find the total width of the table from column sizes
Counts the borders and padding spaces
iex> Game.Format.Table.total_width([5, 10, 3])
1+5+3+10+3+3+3
"""
def total_width(column_sizes) do
Enum.reduce(column_sizes, 0, fn column_size, size -> column_size + size + 1 + 2 end) + 1
end
def row(row, column_sizes) do
row =
row
|> Enum.with_index()
|> Enum.map(fn {column, index} ->
column_size = Enum.at(column_sizes, index)
column = to_string(column)
column = limit_visible(column, column_size)
" #{pad_trailing(column, column_size)} "
end)
|> Enum.join("|")
"|#{row}|"
end
@doc """
Pad the end of a string with spaces
iex> Game.Format.Table.pad_trailing("string", 7)
"string "
iex> Game.Format.Table.pad_trailing("string", 6)
"string"
iex> Game.Format.Table.pad_trailing("", 5, "-")
"-----"
"""
def pad_trailing(string, width, pad_string \\ " ") do
no_color_string = Color.strip_color(string)
no_color_string_length = String.length(no_color_string)
case width - no_color_string_length do
str_length when str_length > 0 ->
padder = String.pad_trailing("", width - no_color_string_length, pad_string)
string <> padder
_ ->
string
end
end
@doc """
Limit strings to visible characters
iex> Game.Format.Table.limit_visible("string", 3)
"str"
iex> Game.Format.Table.limit_visible("{cyan}string{/cyan}", 3)
"{cyan}str{/cyan}"
"""
def limit_visible(string, limit) do
string
|> String.to_charlist()
|> _limit_visible(limit)
|> to_string()
end
defp _limit_visible(characters, limit, pass \\ false)
defp _limit_visible([], _limit, _), do: []
defp _limit_visible([char | left], limit, _) when [char] == '{' do
[char | _limit_visible(left, limit, true)]
end
defp _limit_visible([char | left], limit, _) when [char] == '}' do
[char | _limit_visible(left, limit, false)]
end
defp _limit_visible([char | left], limit, true) do
[char | _limit_visible(left, limit, true)]
end
defp _limit_visible([_char | left], limit, _) when limit <= 0 do
_limit_visible(left, limit)
end
defp _limit_visible([char | left], limit, false) do
[char | _limit_visible(left, limit - 1)]
end
def template("table") do
"""
[line]
| [legend] |
[line]
[rows]
[line]
"""
end
end
|
lib/game/format/table.ex
| 0.817866
| 0.462291
|
table.ex
|
starcoder
|
defmodule Throttlex do
@moduledoc """
Throttler Main Interface.
It is possible to set up multiple time buckets at the same time. This is
useful when you want to have a non-linear behavior. For example, having
a single bucket, let's say per minute and with limit `10`, the throttling
logic is applied only when that limit is reached every minute. But we can
have a bucket per minute, another one for 5 minutes and so on. If any of
the time-bucket limits are reached, then the throttling logic is applied.
When used, the defined throttle counter module expects the `:otp_app` as
option. The `:otp_app` should point to an OTP application. For example,
the throttler:
defmodule MyApp.Throttler do
use Throttlex, otp_app: :my_app
end
Could be configured with:
config :my_app, MyApp.Throttler,
buckets: [
bucket0: [gc_interval: 180, slot_size: 60], #=> 1 min
bucket1: [gc_interval: 900, slot_size: 300], #=> 5 min
bucket2: [gc_interval: 1800, slot_size: 600] #=> 10 min
]
The final piece of configuration is to setup `MyApp.Throttler` as a
supervisor within the application’s supervision tree, which we can do in
`lib/my_app/application.ex` inside the `start/2` function:
def start(_type, _args) do
children = [
MyApp.Throttler
]
...
## Options
See `c:start_link/1`.
"""
@doc false
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
@behaviour Throttlex
import Throttlex.Utils
alias Throttlex.Bucket
{otp_app, buckets} = Throttlex.Supervisor.compile_config(__MODULE__, opts)
@otp_app otp_app
@buckets buckets
## API
@doc false
def __buckets__, do: @buckets
@doc false
def child_spec(opts) do
%{
id: __MODULE__,
start: {
Throttlex.Supervisor,
:start_link,
[__MODULE__, @otp_app, opts]
},
type: :supervisor
}
end
@doc false
def start_link(opts \\ []) do
Throttlex.Supervisor.start_link(__MODULE__, @otp_app, opts)
end
@doc false
def stop(sup, timeout \\ 5000) do
Supervisor.stop(sup, :normal, timeout)
end
@doc false
def incr(counter, timestamp \\ now(), slot_size \\ nil) do
Bucket.incr(@buckets, counter, timestamp, slot_size)
end
@doc false
def value(counter, timestamp \\ now(), slot_size \\ nil) do
Bucket.value(@buckets, counter, timestamp, slot_size)
end
@doc false
def stats do
Bucket.stats(@buckets)
end
@doc false
def reset do
Bucket.reset(@buckets)
end
@doc false
def to_list do
Bucket.to_list(@buckets)
end
@doc false
def gc_run do
Bucket.gc_run(@buckets)
end
@doc false
def time_slots do
Bucket.time_slots(@buckets)
end
end
end
@doc """
Starts a new throttle counter with the configured buckets.
## Options
* `:buckets`: A list of buckets with their options; list of `Keyword.t()`
where the key is a name identifying the bucket and the value the options
for that bucket. For example: `buckets: [b1: opts, ...]`.
Defaults to `[]`.
See `Throttlex.Bucket.Counter.start_link/1` for bucket options.
"""
@callback start_link(opts :: Keyword.t()) :: GenServer.on_start()
@doc """
Shuts down the throttle counter represented by the given pid.
"""
@callback stop(pid, timeout) :: :ok
@doc """
Increments the value for `counter` into the time-slot given by `timestamp`
and `slot_size`.
Returns a list with the current count for each bucket.
## Example
Throttler.incr(:errors)
"""
@callback incr(
counter :: Throttlex.Bucket.Counter.counter(),
timestamp :: integer,
time_slot :: pos_integer | nil
) :: [integer]
@doc """
Returns the value for `counter` into the time-slot given by `timestamp`
and `slot_size`.
Returns a list with the current count for each bucket.
## Example
Throttler.counter(:errors)
"""
@callback value(
counter :: Throttlex.Bucket.Counter.counter(),
timestamp :: integer,
time_slot :: pos_integer | nil
) :: [integer]
@doc """
Returns the gathered stats for the given server `name`.
Returns a list with the stats for each bucket.
## Example
Throttler.stats()
"""
@callback stats :: [map]
@doc """
Resets or sets to `0` all counters for the bucket linked to the given
server `name`.
## Example
Throttler.reset()
"""
@callback reset :: :ok
@doc """
Returns a list of all counters for each bucket.
## Example
Throttler.to_list()
"""
@callback to_list :: [term]
@doc """
Forces the garbage collector to run.
## Example
Throttler.gc_run()
"""
@callback gc_run :: :ok
@doc """
Returns a list with the slot size for each bucket.
## Example
Throttler.time_slots()
"""
@callback time_slots :: [pos_integer]
end
|
lib/throttlex.ex
| 0.911456
| 0.434281
|
throttlex.ex
|
starcoder
|
defmodule Adap.Unit do
@moduledoc "Behaviour describing an ADAP distributed processing unit"
@callback start_link(args :: term) :: {:ok,pid}
@callback cast(pid,fun) :: :ok
@callback node(args :: term) :: node
end
defmodule Adap.Unit.Router do
@moduledoc """
Route element to a node/process started on demand: `Adap.Unit.Router.cast({mod,arg}=unit_spec,elem)` will:
- route the query to `mod.node(arg)`
- see if a process for the spec `{mod,arg}` is running locally
- if not start a process tree with `mod.start_link(arg)`
- route the query to existing or newly created process with `mod.cast(pid,elem)`
Processes are monitored in order to restart them on demand when they die.
A process specification is defined as a tuple `{module,args}`: module must
implement behaviour `Adap.Unit` with previously described callbacks.
A Unit can represent : a GenServer, a pool of GenServers, a pool of
node of GenServer, etc. The reference unit is a simple GenServer:
- which dies itself after a given "time to live"
- where the routed element is an anonymous function with one parameter
- casting the function on server and apply it with the server state as parameter
You can `use Adap.Unit.Simple` to take the default implementation for this
kind of processing unit.
"""
use GenServer
def start_link, do: GenServer.start_link(__MODULE__,[], name: __MODULE__)
def cast({m,a},fun) do
GenServer.cast({__MODULE__,m.node(a)},{:route,{m,a},fun})
end
def init(_) do
{:ok, %{pids: Map.new(), specs: Map.new()}}
end
def handle_cast({:route, {m, a}=spec, fun}, %{pids: pids, specs: specs}=state) do
if (pid = Map.get(pids, spec)) do
m.cast(pid, fun)
{:noreply, state}
else
{:ok, pid} = m.start_link(a)
m.cast(pid, fun)
{:noreply, %{state | pids: Map.put(pids, spec, pid), specs: Map.put(specs, pid, spec)}}
end
end
def handle_info({:EXIT, pid, _}, %{pids: pids, specs: specs}=state) do
# no need to supervise backends, since they will be restarted by next query
{:noreply, %{state | pids: Map.delete(pids, Map.fetch!(specs, pid)), specs: Map.delete(specs, pid)}}
end
def terminate(_, %{pids: pids}) do
Enum.each(pids, fn {_, pid} -> Process.exit(pid, :shutdown) end)
end
end
defmodule Adap.Unit.Simple do
defmacro __using__(opts) do
quote do
@behaviour Adap.Unit
use GenServer
def start_link(arg), do: GenServer.start_link(__MODULE__, arg)
def cast(pid, fun), do: GenServer.cast(pid, {:apply, fun})
def node(_), do: node()
def handle_cast({:apply, fun}, state) do
(fun.(state); {:noreply, state, unquote(opts[:ttl])})
end
def handle_info(:timeout, state) do
{:stop, :normal, state}
end
end
end
end
|
lib/unit.ex
| 0.794903
| 0.527803
|
unit.ex
|
starcoder
|
defmodule Earmark.Transform do
import Earmark.Helpers, only: [replace: 3]
@moduledoc """
Public Interface to functions operating on the AST
exposed by `Earmark.as_ast`
"""
@doc """
**EXPERIMENTAL**
But well tested, just expect API changes in the 1.4 branch
Takes an ast, and optional options (I love this pun), which can be
a map or keyword list of which the following keys will be used:
- `smartypants:` `boolean`
- `initial_indent:` `number`
- `indent:` `number`
iex(1)> transform({"p", [], [{"em", [], "help"}, "me"]})
"<p>\\n <em>\\n help\\n </em>\\n me\\n</p>\\n"
Right now only transformation to HTML is supported.
The transform is also agnostic to any annotation map that is added to the AST.
Only the `:meta` key is reserved and by passing annotation maps with a `:meta` key
into the AST the result might become altered or an exception might be raised, otherwise...
iex(2)> transform({"p", [], [{"em", [], ["help"], %{inner: true}}], %{level: 1}})
"<p>\\n <em>\\n help\\n </em>\\n</p>\\n"
"""
def transform(ast, options \\ %{initial_indent: 0, indent: 2})
def transform(ast, options) when is_list(options) do
transform(ast, options|>Enum.into(%{initial_indent: 0, indent: 2}))
end
def transform(ast, options) when is_map(options) do
options1 = options
|> Map.put_new(:indent, 2)
to_html(ast, options1)
end
defp to_html(ast, options) do
_to_html(ast, options, Map.get(options, :initial_indent, 0)) |> IO.iodata_to_binary
end
defp _to_html(ast, options, level, verbatim \\ false)
defp _to_html(elements, options, level, verbatim) when is_list(elements) do
elements
|> Enum.map(&_to_html(&1, options, level, verbatim))
end
defp _to_html(element, options, level, false) when is_binary(element) do
escape(element, options, level)
end
defp _to_html(element, options, level, true) when is_binary(element) do
[make_indent(options, level), element]
end
# Void tags: `area`, `br`, `hr`, `img`, and `wbr` are rendered slightly differently
defp _to_html({"area", _, _}=tag, options, level, _verbatim), do: void_tag(tag, options, level)
defp _to_html({"br", _, _}=tag, options, level, _verbatim), do: void_tag(tag, options, level)
defp _to_html({"hr", _, _}=tag, options, level, _verbatim), do: void_tag(tag, options, level)
defp _to_html({"img", _, _}=tag, options, level, _verbatim), do: void_tag(tag, options, level)
defp _to_html({"wbr", _, _}=tag, options, level, _verbatim), do: void_tag(tag, options, level)
defp _to_html({:comment, _, children}, options, level, _verbatim) do
indent = make_indent(options, level)
[ indent,
"<!--", Enum.intersperse(children, ["\n", indent, " "]), "-->"]
end
defp _to_html({tag, atts, []}, options, level, _verbatim) do
[ make_indent(options, level),
open_tag(tag, atts),
"</",
tag,
">\n" ]
end
defp _to_html({"code", atts, children}, options, _level, _verbatim) do
[ make_indent(options, 0),
open_tag("code", atts),
Enum.join(children, "\n")|>Earmark.Helpers.escape(),
"</code>"]
end
defp _to_html({"pre", atts, children}, options, level, _verbatim) do
[ make_indent(options, level),
open_tag("pre", atts),
_to_html(children, options, level),
"</pre>\n"]
end
defp _to_html({"pre", atts, children, meta}, options, level, _verbatim) do
verbatim = Map.get(meta, :meta, %{}) |> Map.get(:verbatim, false)
[ make_indent(options, level),
open_tag("pre", atts),
_to_html(children, options, level, verbatim),
"</pre>\n"]
end
defp _to_html({tag, atts, children}, options, level, _verbatim) do
[ make_indent(options, level),
open_tag(tag, atts),
"\n",
_to_html(children, options, level+1),
close_tag(tag, options, level)]
end
defp _to_html({tag, atts, children, meta}, options, level, _verbatim) do
verbatim = Map.get(meta, :meta, %{}) |> Map.get(:verbatim, false)
[ make_indent(options, level),
open_tag(tag, atts),
"\n",
_to_html(children, options, level+1, verbatim),
close_tag(tag, options, level)]
end
defp close_tag(tag, options, level) do
[make_indent(options, level), "</", tag, ">\n"]
end
defp escape(element, options, level)
defp escape("", _opions, _level) do
[]
end
defp escape(element, options, level) do
element1 =
element
|> smartypants(options)
|> Earmark.Helpers.escape(true)
[make_indent(options, level), element1, "\n"]
end
defp make_att(name_value_pair, tag)
defp make_att({name, value}, _) do
[" ", name, "=\"", value, "\""]
end
defp make_indent(%{indent: indent}, level) do
Stream.cycle([" "])
|> Enum.take(level*indent)
end
defp open_tag(tag, atts, void? \\ false) do
closer =
if void?, do: " />", else: ">"
["<", tag, atts |> Enum.map(&make_att(&1, tag)), closer]
end
@dashes_rgx ~r{--}
@dbl1_rgx ~r{(^|[-—/\(\[\{"”“\s])'}
@single_rgx ~r{\'}
@dbl2_rgx ~r{(^|[-—/\(\[\{‘\s])\"}
@dbl3_rgx ~r{"}
defp smartypants(text, options)
defp smartypants(text, %{smartypants: true}) do
text
|> replace(@dashes_rgx, "—")
|> replace(@dbl1_rgx, "\\1‘")
|> replace(@single_rgx, "’")
|> replace(@dbl2_rgx, "\\1“")
|> replace(@dbl3_rgx, "”")
|> String.replace("...", "…")
end
defp smartypants(text, _options), do: text
defp void_tag({tag, atts, []}, options, level) do
[ make_indent(options, level),
open_tag(tag, atts, true),
"\n" ]
end
end
|
lib/earmark/transform.ex
| 0.713931
| 0.470189
|
transform.ex
|
starcoder
|
defmodule Univrse.Key do
@moduledoc """
A Univrse Key is a CBOR data structure that represents a cryptographic key.
Univrse Keys closely mirror JSON Web Keys, and it should prove simple to
convert keys between the two specifications.
Keys are used in the `t:Univrse.Signature.t/0` and `t:Univrse.Recipient.t/0`
specifications.
"""
alias Univrse.Recipient
import Univrse.Util, only: [tag_binary: 1, untag: 1]
defdelegate decrypt(env, key, opts \\ []), to: Recipient
defdelegate encrypt(env, key, headers, opts \\ []), to: Recipient
defstruct type: nil,
params: %{}
@typedoc "Key struct"
@type t :: %__MODULE__{
type: String.t,
params: ec_params | oct_params
}
@typedoc "Elliptic curve key params"
@type ec_params :: %{
crv: String.t,
x: binary,
y: binary,
d: binary
} | %{
crv: String.t,
x: binary,
y: binary
}
@typedoc "Octet sequence key params"
@type oct_params :: %{
k: binary
}
@typedoc "Key initialisation params"
@type init_params :: {:ec, atom} | {:oct, integer}
@doc """
Decodes the given CBOR encoded key into a Key struct.
"""
@spec decode(binary) :: {:ok, t} | {:error, any}
def decode(data) when is_binary(data) do
with {:ok, map, _rest} <- CBOR.decode(data) do
%{"kty" => type} = params = untag(map)
params = params
|> Map.take(["crv", "x", "y", "d", "k"])
|> Enum.reduce(%{}, fn {k, v}, p -> Map.put(p, String.to_atom(k), v) end)
{:ok, %__MODULE__{type: type, params: params}}
end
end
@doc """
Encodes the Key as a CBOR encoded binary.
"""
@spec encode(t) :: binary
def encode(%__MODULE__{type: type, params: params}) do
params
|> Map.put(:kty, type)
|> tag_binary()
|> CBOR.encode()
end
@doc """
Securely generates a new key of the given `t:init_params`.
## Supported key types
* `{:ec, :secp256k1}` - Eliptic curve key on the `secp256k1` curve
* `{:oct, 128}` - Octet sequence key of 128 bits
* `{:oct, 256}` - Octet sequence key of 256 bits
* `{:oct, 512}` - Octet sequence key of 512 bits
"""
@spec generate_key(init_params | t) :: t
def generate_key({:ec, :secp256k1}) do
{pubkey, privkey} = :crypto.generate_key(:ecdh, :secp256k1)
<<_::size(8), x::binary-size(32), y::binary-size(32)>> = pubkey
params = %{
crv: "secp256k1",
x: x,
y: y,
d: privkey
}
%__MODULE__{type: "EC", params: params}
end
def generate_key({:oct, bits})
when is_number(bits) and bits in [128, 256, 512]
do
params = %{
k: :crypto.strong_rand_bytes(div(bits, 8))
}
%__MODULE__{type: "oct", params: params}
end
def generate_key(%__MODULE__{type: "EC", params: %{crv: "secp256k1"}}),
do: generate_key({:ec, :secp256k1})
def generate_key(%__MODULE__{type: "oct", params: %{k: k}}),
do: generate_key({:oct, bit_size(k)})
@doc """
Returns a public key from the current key, which can be safely shared with
other parties.
Only for use with `EC` key types.
"""
@spec to_public(t) :: t
def to_public(%__MODULE__{type: "EC"} = key),
do: update_in(key.params, & Map.take(&1, [:crv, :x, :y]))
end
|
lib/univrse/key.ex
| 0.819533
| 0.596286
|
key.ex
|
starcoder
|
defmodule AWS.ServiceCatalog do
@moduledoc """
AWS Service Catalog
**Overview**
[AWS Service Catalog](https://aws.amazon.com/servicecatalog/) allows
organizations to create and manage catalogs of IT services that are
approved for use on AWS. This documentation provides reference material for
the AWS Service Catalog end user API. To get the most out of this
documentation, you need to be familiar with the terminology discussed in
[AWS Service Catalog
Concepts](http://docs.aws.amazon.com/servicecatalog/latest/userguide/what-is_concepts.html).
*Additional Resources*
<ul> <li> [AWS Service Catalog Administrator
Guide](http://docs.aws.amazon.com/servicecatalog/latest/adminguide/introduction.html)
</li> <li> [AWS Service Catalog User
Guide](http://docs.aws.amazon.com/servicecatalog/latest/userguide/introduction.html)
</li> </ul>
"""
@doc """
Accepts an offer to share a portfolio.
"""
def accept_portfolio_share(client, input, options \\ []) do
request(client, "AcceptPortfolioShare", input, options)
end
@doc """
Associates the specified principal ARN with the specified portfolio.
"""
def associate_principal_with_portfolio(client, input, options \\ []) do
request(client, "AssociatePrincipalWithPortfolio", input, options)
end
@doc """
Associates a product with a portfolio.
"""
def associate_product_with_portfolio(client, input, options \\ []) do
request(client, "AssociateProductWithPortfolio", input, options)
end
@doc """
Creates a new constraint.
"""
def create_constraint(client, input, options \\ []) do
request(client, "CreateConstraint", input, options)
end
@doc """
Creates a new portfolio.
"""
def create_portfolio(client, input, options \\ []) do
request(client, "CreatePortfolio", input, options)
end
@doc """
Creates a new portfolio share.
"""
def create_portfolio_share(client, input, options \\ []) do
request(client, "CreatePortfolioShare", input, options)
end
@doc """
Creates a new product.
"""
def create_product(client, input, options \\ []) do
request(client, "CreateProduct", input, options)
end
@doc """
Create a new provisioning artifact for the specified product. This
operation will not work with a product that has been shared with you.
"""
def create_provisioning_artifact(client, input, options \\ []) do
request(client, "CreateProvisioningArtifact", input, options)
end
@doc """
Deletes the specified constraint.
"""
def delete_constraint(client, input, options \\ []) do
request(client, "DeleteConstraint", input, options)
end
@doc """
Deletes the specified portfolio. This operation will not work with a
portfolio that has been shared with you or if it has products, users,
constraints, or shared accounts associated with it.
"""
def delete_portfolio(client, input, options \\ []) do
request(client, "DeletePortfolio", input, options)
end
@doc """
Deletes the specified portfolio share.
"""
def delete_portfolio_share(client, input, options \\ []) do
request(client, "DeletePortfolioShare", input, options)
end
@doc """
Deletes the specified product. This operation will not work with a product
that has been shared with you or is associated with a portfolio.
"""
def delete_product(client, input, options \\ []) do
request(client, "DeleteProduct", input, options)
end
@doc """
Deletes the specified provisioning artifact. This operation will not work
on a provisioning artifact associated with a product that has been shared
with you, or on the last provisioning artifact associated with a product (a
product must have at least one provisioning artifact).
"""
def delete_provisioning_artifact(client, input, options \\ []) do
request(client, "DeleteProvisioningArtifact", input, options)
end
@doc """
Retrieves detailed information for a specified constraint.
"""
def describe_constraint(client, input, options \\ []) do
request(client, "DescribeConstraint", input, options)
end
@doc """
Retrieves detailed information and any tags associated with the specified
portfolio.
"""
def describe_portfolio(client, input, options \\ []) do
request(client, "DescribePortfolio", input, options)
end
@doc """
Retrieves information about a specified product.
This operation is functionally identical to `DescribeProductView` except
that it takes as input `ProductId` instead of `ProductViewId`.
"""
def describe_product(client, input, options \\ []) do
request(client, "DescribeProduct", input, options)
end
@doc """
Retrieves information about a specified product, run with administrator
access.
"""
def describe_product_as_admin(client, input, options \\ []) do
request(client, "DescribeProductAsAdmin", input, options)
end
@doc """
Retrieves information about a specified product.
This operation is functionally identical to `DescribeProduct` except that
it takes as input `ProductViewId` instead of `ProductId`.
"""
def describe_product_view(client, input, options \\ []) do
request(client, "DescribeProductView", input, options)
end
@doc """
Retrieves detailed information about the specified provisioning artifact.
"""
def describe_provisioning_artifact(client, input, options \\ []) do
request(client, "DescribeProvisioningArtifact", input, options)
end
@doc """
Provides information about parameters required to provision a specified
product in a specified manner. Use this operation to obtain the list of
`ProvisioningArtifactParameters` parameters available to call the
`ProvisionProduct` operation for the specified product.
"""
def describe_provisioning_parameters(client, input, options \\ []) do
request(client, "DescribeProvisioningParameters", input, options)
end
@doc """
Retrieves a paginated list of the full details of a specific request. Use
this operation after calling a request operation (`ProvisionProduct`,
`TerminateProvisionedProduct`, or `UpdateProvisionedProduct`).
"""
def describe_record(client, input, options \\ []) do
request(client, "DescribeRecord", input, options)
end
@doc """
Disassociates a previously associated principal ARN from a specified
portfolio.
"""
def disassociate_principal_from_portfolio(client, input, options \\ []) do
request(client, "DisassociatePrincipalFromPortfolio", input, options)
end
@doc """
Disassociates the specified product from the specified portfolio.
"""
def disassociate_product_from_portfolio(client, input, options \\ []) do
request(client, "DisassociateProductFromPortfolio", input, options)
end
@doc """
Lists details of all portfolios for which sharing was accepted by this
account.
"""
def list_accepted_portfolio_shares(client, input, options \\ []) do
request(client, "ListAcceptedPortfolioShares", input, options)
end
@doc """
Retrieves detailed constraint information for the specified portfolio and
product.
"""
def list_constraints_for_portfolio(client, input, options \\ []) do
request(client, "ListConstraintsForPortfolio", input, options)
end
@doc """
Returns a paginated list of all paths to a specified product. A path is how
the user has access to a specified product, and is necessary when
provisioning a product. A path also determines the constraints put on the
product.
"""
def list_launch_paths(client, input, options \\ []) do
request(client, "ListLaunchPaths", input, options)
end
@doc """
Lists the account IDs that have been authorized sharing of the specified
portfolio.
"""
def list_portfolio_access(client, input, options \\ []) do
request(client, "ListPortfolioAccess", input, options)
end
@doc """
Lists all portfolios in the catalog.
"""
def list_portfolios(client, input, options \\ []) do
request(client, "ListPortfolios", input, options)
end
@doc """
Lists all portfolios that the specified product is associated with.
"""
def list_portfolios_for_product(client, input, options \\ []) do
request(client, "ListPortfoliosForProduct", input, options)
end
@doc """
Lists all principal ARNs associated with the specified portfolio.
"""
def list_principals_for_portfolio(client, input, options \\ []) do
request(client, "ListPrincipalsForPortfolio", input, options)
end
@doc """
Lists all provisioning artifacts associated with the specified product.
"""
def list_provisioning_artifacts(client, input, options \\ []) do
request(client, "ListProvisioningArtifacts", input, options)
end
@doc """
Returns a paginated list of all performed requests, in the form of
RecordDetails objects that are filtered as specified.
"""
def list_record_history(client, input, options \\ []) do
request(client, "ListRecordHistory", input, options)
end
@doc """
Requests a *Provision* of a specified product. A *ProvisionedProduct* is a
resourced instance for a product. For example, provisioning a
CloudFormation-template-backed product results in launching a
CloudFormation stack and all the underlying resources that come with it.
You can check the status of this request using the `DescribeRecord`
operation.
"""
def provision_product(client, input, options \\ []) do
request(client, "ProvisionProduct", input, options)
end
@doc """
Rejects an offer to share a portfolio.
"""
def reject_portfolio_share(client, input, options \\ []) do
request(client, "RejectPortfolioShare", input, options)
end
@doc """
Returns a paginated list of all the ProvisionedProduct objects that are
currently available (not terminated).
"""
def scan_provisioned_products(client, input, options \\ []) do
request(client, "ScanProvisionedProducts", input, options)
end
@doc """
Returns a paginated list all of the `Products` objects to which the caller
has access.
The output of this operation can be used as input for other operations,
such as `DescribeProductView`.
"""
def search_products(client, input, options \\ []) do
request(client, "SearchProducts", input, options)
end
@doc """
Retrieves summary and status information about all products created within
the caller's account. If a portfolio ID is provided, this operation
retrieves information for only those products that are associated with the
specified portfolio.
"""
def search_products_as_admin(client, input, options \\ []) do
request(client, "SearchProductsAsAdmin", input, options)
end
@doc """
Requests termination of an existing ProvisionedProduct object. If there are
`Tags` associated with the object, they are terminated when the
ProvisionedProduct object is terminated.
This operation does not delete any records associated with the
ProvisionedProduct object.
You can check the status of this request using the `DescribeRecord`
operation.
"""
def terminate_provisioned_product(client, input, options \\ []) do
request(client, "TerminateProvisionedProduct", input, options)
end
@doc """
Updates an existing constraint.
"""
def update_constraint(client, input, options \\ []) do
request(client, "UpdateConstraint", input, options)
end
@doc """
Updates the specified portfolio's details. This operation will not work
with a product that has been shared with you.
"""
def update_portfolio(client, input, options \\ []) do
request(client, "UpdatePortfolio", input, options)
end
@doc """
Updates an existing product.
"""
def update_product(client, input, options \\ []) do
request(client, "UpdateProduct", input, options)
end
@doc """
Requests updates to the configuration of an existing ProvisionedProduct
object. If there are tags associated with the object, they cannot be
updated or added with this operation. Depending on the specific updates
requested, this operation may update with no interruption, with some
interruption, or replace the ProvisionedProduct object entirely.
You can check the status of this request using the `DescribeRecord`
operation.
"""
def update_provisioned_product(client, input, options \\ []) do
request(client, "UpdateProvisionedProduct", input, options)
end
@doc """
Updates an existing provisioning artifact's information. This operation
will not work on a provisioning artifact associated with a product that has
been shared with you.
"""
def update_provisioning_artifact(client, input, options \\ []) do
request(client, "UpdateProvisioningArtifact", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "servicecatalog"}
host = get_host("servicecatalog", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWS242ServiceCatalogService.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/service_catalog.ex
| 0.901438
| 0.468669
|
service_catalog.ex
|
starcoder
|
defmodule Rmc.FOne2018.CarMotion do
alias __MODULE__
@moduledoc false
# struct CarMotionData
# {
# float m_worldPositionX; // World space X position
# float m_worldPositionY; // World space Y position
# float m_worldPositionZ; // World space Z position
# float m_worldVelocityX; // Velocity in world space X
# float m_worldVelocityY; // Velocity in world space Y
# float m_worldVelocityZ; // Velocity in world space Z
# int16 m_worldForwardDirX; // World space forward X direction (normalised)
# int16 m_worldForwardDirY; // World space forward Y direction (normalised)
# int16 m_worldForwardDirZ; // World space forward Z direction (normalised)
# int16 m_worldRightDirX; // World space right X direction (normalised)
# int16 m_worldRightDirY; // World space right Y direction (normalised)
# int16 m_worldRightDirZ; // World space right Z direction (normalised)
# float m_gForceLateral; // Lateral G-Force component
# float m_gForceLongitudinal; // Longitudinal G-Force component
# float m_gForceVertical; // Vertical G-Force component
# float m_yaw; // Yaw angle in radians
# float m_pitch; // Pitch angle in radians
# float m_roll; // Roll angle in radians
# };
@derive Jason.Encoder
defstruct [
:position,
:velocity,
:forward_dir,
:right_dir,
:g_force_lateral,
:g_force_longitudinal,
:g_force_vertical,
:yaw,
:pitch,
:roll
]
def parse_motions(<<
position_x::little-float-size(32),
position_y::little-float-size(32),
position_z::little-float-size(32),
velocity_x::little-float-size(32),
velocity_y::little-float-size(32),
velocity_z::little-float-size(32),
forward_dir_x::little-size(16),
forward_dir_y::little-size(16),
forward_dir_z::little-size(16),
right_dir_x::little-size(16),
right_dir_y::little-size(16),
right_dir_z::little-size(16),
g_force_lateral::little-float-size(32),
g_force_longitudinal::little-float-size(32),
g_force_vertical::little-float-size(32),
yaw::little-float-size(32),
pitch::little-float-size(32),
roll::little-float-size(32),
motions::binary
>>) do
[
%CarMotion{
position: [
position_x,
position_y,
position_z
],
velocity: [
velocity_x,
velocity_y,
velocity_z
],
forward_dir: [
forward_dir_x,
forward_dir_y,
forward_dir_z
],
right_dir: [
right_dir_x,
right_dir_y,
right_dir_z
],
g_force_lateral: g_force_lateral,
g_force_longitudinal: g_force_longitudinal,
g_force_vertical: g_force_vertical,
yaw: yaw,
pitch: pitch,
roll: roll
}
| parse_motions(motions)
]
end
def parse_motions(<<>>), do: []
end
|
lib/rmc/f_one2018/car_motion.ex
| 0.560734
| 0.525551
|
car_motion.ex
|
starcoder
|
defmodule PipeTo do
@doc """
PipeTo operator.
This operator will replace the placeholder argument `_` in the right-hand
side function call with left-hand side expression.
### Examples
iex> 1 ~> Enum.at(1..3, _)
2
It can mix with `|>` operation
### Examples
iex> 1 ~> Enum.at(1..3, _) |> Kernel.*(5)
10
When using ~> withou placeholder `_`, it act just like `|>` pipe operator.
### Examples
iex> [1, 2, 3] ~> Enum.take(2)
[1, 2]
"""
defmacro left ~> right do
[{h, _} | t] = __MODULE__.unpipe({:~>, [], [left, right]})
# Bascially follows `lib/elixir/lib/kernel` left |> right
# https://github.com/elixir-lang/elixir/blob/master/lib/elixir/lib/kernel.ex#L3134
fun = fn {x, pos}, acc ->
case x do
{op, _, [_]} when op == :+ or op == :- ->
message =
"piping into a unary operator is deprecated, please use the " <>
"qualified name. For example, Kernel.+(5), instead of +5"
IO.warn(message, Macro.Env.stacktrace(__CALLER__))
_ ->
:ok
end
Macro.pipe(acc, x, pos)
end
:lists.foldl(fun, h, t)
end
@doc """
Breaks a pipeline expression into a list. This is where the target position being calculated.
PipeTo.unpipe(quote do: 5 ~> div(100, _) ~> div(2))
# => [{5, 0},
# {{:div, [context: Elixir, import: Kernel], 'd'}, 1},
# {{:div, [], [2]}, 0}]
"""
@spec unpipe(Macro.t) :: [Macro.t]
def unpipe(expr) do
:lists.reverse(unpipe(expr, []))
end
defp unpipe({:~>, _, [left, right]}, acc) do
unpipe(right, unpipe(left, acc))
end
defp unpipe(ast = {_, _, args}, acc) when is_list(args) do
placeholder_index =
Enum.find_index(args, &is_placeholder/1)
fixed_ast = remove_placeholder(ast, placeholder_index)
[{fixed_ast, pipe_position(placeholder_index)} | acc]
end
defp unpipe(other, acc) do
[{other, 0} | acc]
end
defp is_placeholder({:_, _, _}), do: true
defp is_placeholder(_), do: false
defp pipe_position(nil), do: 0
defp pipe_position(index), do: index
defp remove_placeholder(ast, nil), do: ast
defp remove_placeholder({fun, meta, args}, index) do
{fun, meta, List.delete_at(args, index)}
end
end
|
lib/pipe_to.ex
| 0.834811
| 0.574723
|
pipe_to.ex
|
starcoder
|
defmodule Playground.Scenario.Counters.Many.Atomics.Many do
use Playground.Scenario
def scenario_type do
{:iterations, Stream.map(10..20, &round(:math.pow(2, &1)))}
end
def scenario_banner do
"""
Scenario: Deeper Comparison of various Atomics access patterns
Tasks:
- Sequentially update <count> 1-arity atomics (sequential, ordered)
- Sequentially update <count> 1-arity atomics (sequential, randomised)
- Concurrently update <count> 1-arity atomics (concurrent, sequential, unordered tasks)
- Concurrently update <count> 1-arity atomics (concurrent, sequential, ordered tasks)
- Concurrently update <count> 1-arity atomics (concurrent, randomised, unordered tasks)
- Concurrently update <count> 1-arity atomics (concurrent, randomised, ordered tasks)
"""
end
def scenario_arguments do
[]
end
def scenario_iteration(count) do
IO.write("#{String.pad_leading(Integer.to_string(count), 11)}: ")
atomics_refs = List.to_tuple(for _ <- 1..count, do: :atomics.new(1, signed: false))
run_tasks("sequential, ordered atomics:add/3", 1, fn _ ->
for x <- 1..count do
atomics_ref = elem(atomics_refs, x - 1)
:atomics.add(atomics_ref, 1, 1)
end
end)
atomics_refs = List.to_tuple(for _ <- 1..count, do: :atomics.new(1, signed: false))
run_tasks("sequential, randomised atomics:add/3", 1, fn _ ->
for x <- random_count(count) do
atomics_ref = elem(atomics_refs, x - 1)
:atomics.add(atomics_ref, 1, 1)
end
end)
atomics_refs = List.to_tuple(for _ <- 1..count, do: :atomics.new(1, signed: false))
run_tasks("concurrent, sequential, unordered atomics:add/3", count, fn x ->
atomics_ref = elem(atomics_refs, x - 1)
:atomics.add(atomics_ref, 1, 1)
end)
atomics_refs = List.to_tuple(for _ <- 1..count, do: :atomics.new(1, signed: false))
run_tasks("concurrent, sequential, ordered atomics:add/3", count, [ordered: true], fn x ->
atomics_ref = elem(atomics_refs, x - 1)
:atomics.add(atomics_ref, 1, 1)
end)
atomics_refs = List.to_tuple(for _ <- 1..count, do: :atomics.new(1, signed: false))
run_tasks("concurrent, randomised, unordered atomics:add/3", random_count(count), fn x ->
atomics_ref = elem(atomics_refs, x - 1)
:atomics.add(atomics_ref, 1, 1)
end)
atomics_refs = List.to_tuple(for _ <- 1..count, do: :atomics.new(1, signed: false))
run_tasks(
"concurrent, randomised, ordered atomics:add/3",
random_count(count),
[ordered: true],
fn x ->
atomics_ref = elem(atomics_refs, x - 1)
:atomics.add(atomics_ref, 1, 1)
end
)
end
end
|
lib/playground/scenario/counters/many/atomics/many.ex
| 0.678114
| 0.777764
|
many.ex
|
starcoder
|
defmodule DataQuacker.Schema.State do
@moduledoc false
alias DataQuacker.Schema.State
defstruct cursor: [], flags: %{}, schema: %{}, matchers: [], rows: [], fields: %{}
def new(), do: %State{}
def clear_fields(state) do
%State{state | fields: %{}}
end
def flag(%State{flags: flags} = state, flag, value) do
flags = Map.put(flags, flag, value)
%State{state | flags: flags}
end
def flagged?(%State{flags: flags}, flag) do
Map.get(flags, flag, false)
end
def cursor_at?(%State{cursor: []}, type), do: is_nil(type)
def cursor_at?(%State{cursor: cursor}, type) do
elem(hd(cursor), 0) == type
end
def target(%State{cursor: cursor}) do
target_from_cursor(cursor)
end
def cursor_exit(%State{cursor: cursor} = state, levels \\ 1) do
%State{state | cursor: Enum.drop(cursor, levels)}
end
def register(%State{cursor: cursor} = state, :schema, {schema_name, schema}) do
cursor = [{:schema, schema_name} | cursor]
schema = Map.merge(new_schema(schema_name), schema)
%State{state | schema: schema, cursor: cursor}
end
def register(%State{cursor: cursor, rows: rows} = state, :row, {row_index, row}) do
cursor = [{:row, row_index} | cursor]
row = Map.merge(new_row(row_index), row)
rows = rows ++ [row]
%State{state | rows: rows, cursor: cursor}
end
def register(%State{cursor: cursor, fields: fields} = state, :field, {field_name, field}) do
cursor = [{:field, field_name} | cursor]
needle = field_needle(cursor)
field = Map.merge(new_field(field_name), field)
fields = put_in(fields, Enum.reverse(needle), field)
%State{state | fields: fields, cursor: cursor}
end
def register(%State{matchers: matchers, cursor: cursor} = state, :matcher, rule) do
matcher = %{rule: rule, target: target_from_cursor(cursor)}
matchers = [matcher | matchers]
%State{state | matchers: matchers}
end
def update(%State{schema: existing_schema} = state, :schema, schema) do
schema = Map.merge(existing_schema, schema)
%State{state | schema: schema}
end
def update(%State{cursor: cursor, rows: rows} = state, :row, row) do
index = elem(hd(cursor), 1)
rows = List.update_at(rows, index, &Map.merge(&1, row))
%State{state | rows: rows}
end
def update(%State{cursor: cursor, fields: fields} = state, :field, field) do
needle = field_needle(cursor)
fields = update_in(fields, Enum.reverse(needle), &Map.merge(&1, field))
%State{state | fields: fields}
end
def get(%State{cursor: cursor, rows: rows}, :row) do
Enum.at(rows, elem(hd(cursor), 1))
end
def get(%State{cursor: cursor, fields: fields}, :field) do
needle = field_needle(cursor)
get_in(fields, Enum.reverse(needle))
end
defp new_schema(name) do
%{__name__: name, matchers: [], rows: []}
end
defp new_row(index) do
%{__index__: index, fields: %{}, validators: [], transformers: [], skip_if: nil}
end
defp new_field(name) do
%{
__name__: name,
__type__: nil,
source: nil,
subfields: %{},
validators: [],
transformers: [],
skip_if: nil
}
end
defp fields_cursor(cursor) do
cursor |> Enum.split_while(&(elem(&1, 0) == :field)) |> elem(0)
end
defp target_from_cursor(cursor) do
Enum.map(cursor, &elem(&1, 1))
end
defp field_needle(cursor) do
cursor |> fields_cursor() |> target_from_cursor() |> Enum.intersperse(:subfields)
end
end
|
lib/schema/state.ex
| 0.596198
| 0.716479
|
state.ex
|
starcoder
|
defmodule Biggie do
@moduledoc """
Provides a client for programmatically interfacing with BigQuery
Relevant models:
https://hexdocs.pm/google_api_big_query/GoogleApi.BigQuery.V2.Api.Jobs.html#bigquery_jobs_list/3
https://hexdocs.pm/google_api_big_query/GoogleApi.BigQuery.V2.Model.Job.html#content
https://hexdocs.pm/google_api_big_query/GoogleApi.BigQuery.V2.Model.JobConfiguration.html#content
https://hexdocs.pm/google_api_big_query/GoogleApi.BigQuery.V2.Model.JobConfigurationQuery.html#content
"""
alias Biggie.Api
alias Biggie.Models.{
Job,
JobConfiguration,
JobConfigurationQuery
}
require Logger
@poll_interval 4_000 # four seconds
@doc """
Runs a query job
Given an SQL query to execute and a list of options,
runs a query job on BigQuery. Returns a reference to
the job for accessing query results later on.
Options available here: https://hexdocs.pm/google_api_big_query/GoogleApi.BigQuery.V2.Model.JobConfigurationQuery.html#module-attributes
"""
def run_query_job(sql, labels \\ %{}, opts \\ %{}) do
job =
opts
|> Map.put(:query, sql)
|> JobConfigurationQuery.assemble()
|> JobConfiguration.with_query()
|> JobConfiguration.with_labels(labels)
|> Job.with_configuration()
Api.Jobs.insert(body: job)
end
@doc """
Fetches a list of results for the given job. If the job is not finished,
the process will poll until it is.
"""
def fetch_results(job_id, offset \\ 0, limit \\ 500, acc \\ []) do
case poll_for_results(job_id, offset, limit) do
{:ok, %{rows: nil}} -> acc
{:ok, %{rows: rows}} ->
fetch_results(job_id, offset + limit, limit, rows ++ acc)
{:error, reason} ->
Logger.error("Could not fetch job results: #{inspect(reason)}")
raise(reason)
end
end
@doc """
Lists rows in the given table.
"""
def tabledata_list(dataset_id, table_id, offset \\ 0, limit \\ 500) do
Api.Tabledata.list([dataset_id, table_id], [
maxResults: limit,
startIndex: offset
])
end
@doc """
Polls for query results
If the query is done, results will be returned. If the query
is still running, sleeps for an interval defined in the
@poll_interval module attribute and then tries again.
"""
def poll_for_results(job_id, offset, limit, poll \\ 0)
def poll_for_results(_job_id, _offset, _limit, poll) when poll > 5 do
{:error, :timeout}
end
def poll_for_results(job_id, offset, limit, poll) do
case Api.Jobs.get_query_results(job_id, [maxResults: limit, startIndex: offset]) do
{:ok, %{jobComplete: true} = results} -> {:ok, results}
{:ok, %{jobComplete: false}} ->
:timer.sleep(@poll_interval)
poll_for_results(job_id, offset, limit, poll + 1)
error -> error
end
end
end
|
lib/biggie.ex
| 0.657209
| 0.485905
|
biggie.ex
|
starcoder
|
defmodule Abacus do
@moduledoc """
Abacus is a tool to simplify the handling of units.
For example :
```
# This module is used during the documentation to
# show some examples.
defmodule AbacusTest.Length do
use Abacus.SystemMetric
# :cm is the unit used as a reference in the metric system 'Length'
unit :cm
unit :mm, (1/10)
unit :dm, 10
unit :m, 100
unit :km, 100000
end
```
This module provides functions for referencing a metric type:
- `Length.cm/0`
- `Length.mm/0`
- `Length.dm/0`
- `Length.m/0`
- `Length.km/0`
and to create a value enclosed in a metric system:
- `Length.cm/1`
- `Length.mm/1`
- `Length.dm/1`
- `Length.m/1`
- `Length.km/1`
Here is an example of using Abacus :
```
a_distance = Length.cm(12)
a_distance_in_km = Abacus.from(a_distance, to: Length.km)
```
A `metric_type` is defined by a module and a subtype. For example `Length` and `:cm`.
"""
@typedoc """
This type represents a unit of measure (defined with using Abacus.SystemMetric)
"""
@type metric_type :: {
module,
atom,
number
}
@typedoc """
This type represents a value wrapped in a metric system
"""
@type typed_value :: {
metric_type,
float
}
@typedoc """
This type represents a results of a comparison
"""
@type comparison_result :: :eq | :lt | :gt
defmodule SystemMetric do
@doc false
defmacro __using__(_opts) do
quote do
import Abacus.SystemMetric
@base nil
end
end
@doc """
A macro to generate the base of the system.
This unit is the reference of each other units.
For example :
```
defmodule Example do
use Abacus.SystemMetric
unit :cm
end
```
"""
defmacro unit(name) do
quote do
if @base do
raise RuntimeError, message: "Base is already defined"
end
@base unquote(name)
def unquote(name)(), do: {__MODULE__, unquote(name), unquote(1.0)}
def unquote(name)(value) do
{
apply(__MODULE__, unquote(name), []),
value * 1.0
}
end
end
end
@doc """
A macro to generate an unit using the `base` as a reference.
This is referenced by a name (`:km` for example) and by a
reference to the base, in the case of `:km` in a system
referenced by `:cm` : 100000.
For example:
```
defmodule Example do
use Abacus.SystemMetric
unit :cm
unit :m, 100 # (100 cm == 1 m)
unit :dm, 10 # (10 cm == 1 dm)
end
```
"""
defmacro unit(name, expr) do
quote do
unless @base do
raise RuntimeError, message: "Base must be defined"
end
unit_name = unquote(name)
if @base == unit_name do
raise RuntimeError, message: "#{unit_name} is already defined"
end
def unquote(name)(), do: {__MODULE__, unquote(name), unquote(expr)}
def unquote(name)(value) do
{
apply(__MODULE__, unquote(name), []),
value * 1.0
}
end
end
end
end
@doc """
Retrieves the wrapped numeric value in a `typed_value()`.
For example:
iex> x = AbacusTest.Length.cm(12)
...> Abacus.unwrap(x)
12.0
"""
@spec unwrap(typed_value()) :: number()
def unwrap({_, elt}), do: elt
@doc """
Converts a `typed_value()` to another subtype of its metric system.
For example:
iex> x = AbacusTest.Length.cm(120)
...> Abacus.from(x, to: AbacusTest.Length.m)
{AbacusTest.Length.m, 1.2}
"""
@spec from(typed_value(), [to: metric_type()]) :: typed_value()
def from({{module, _, coeff}, elt}, to: {module, _, coeff_basis} = basis) do
divider = 1 / coeff_basis
basis_elt = (elt * coeff) * divider
{basis, basis_elt}
end
def from({{module, _, _}, _}, to: {other_module, _, _}) do
raise RuntimeError, message: "[#{module}] is not compatible with [#{other_module}]"
end
@doc """
Applies a function to the numeric value of a typed value and re-packs
the result of the function in the same subtype.
For example:
iex> AbacusTest.Length.km(120)
...> |> Abacus.map(fn(x) -> x * 2 end)
{AbacusTest.Length.km, 240.0}
"""
@spec map(typed_value(), (number() -> number())) :: typed_value()
def map({type, elt}, f) do
{type, f.(elt)}
end
@doc """
Applies a function to the two numeric values of two `typed_values()` in
the same metric system, and re-packages the result
of the function in a `typed_value()` of the subtype of the left `typed_values()`.
For example:
iex> a = AbacusTest.Length.dm(100)
...> b = AbacusTest.Length.dm(2)
...> Abacus.map2(a, b, &(&1 * &2))
{AbacusTest.Length.dm, 200.0}
"""
@spec map2(
typed_value(),
typed_value(),
(number(), number() -> number())
) :: typed_value
def map2({{module, _, _} = t, elt}, {{module, _, _}, _} = elt2, f) do
converted = from(elt2, to: t)
{t, f.(elt, unwrap(converted))}
end
def map2({{module, _, _}, _}, {{other_module, _, _}, _}, _) do
raise RuntimeError, message: "[#{module}] is not compatible with [#{other_module}]"
end
@doc """
`List.foldl` for a list of `typed_value()` from the same metric system.
For example:
iex> Abacus.fold(
...> [
...> AbacusTest.Length.cm(10),
...> AbacusTest.Length.dm(1),
...> AbacusTest.Length.m(12)
...> ],
...> AbacusTest.Length.cm(12),
...> fn(x, acc) -> Abacus.map2(x, acc, &(&1+&2)) end,
...> to: AbacusTest.Length.cm
...>)
{AbacusTest.Length.cm, 1232.0}
"""
@spec fold(
[typed_value()],
any(),
(typed_value(), any() -> any()),
[to: metric_type()]
) :: any()
def fold(list, default, f, to: basis) do
List.foldl(list, default, fn(x, acc) ->
converted = Abacus.from(x, to: basis)
f.(converted, acc)
end)
end
@doc """
Calculates the sum of a list of `typed_value()` of the same
metric system, projected into a specific subtype.
For example:
iex> Abacus.sum(
...> [
...> AbacusTest.Length.cm(10),
...> AbacusTest.Length.dm(1),
...> AbacusTest.Length.m(12)
...> ],
...> to: AbacusTest.Length.dm
...> )
{AbacusTest.Length.dm, 122.0}
"""
@spec sum([typed_value()], [to: metric_type]) :: typed_value()
def sum(list, to: {module, basis_name, _coeff} = basis) do
fold(
list, apply(module, basis_name, [0]),
&add/2,
to: basis
)
end
@doc """
Comparison between two `typed_value()` of the same metric system.
The function returns:
- `:eq` for `equals`
- `:lt` if the left-values is **lower than** the right-values
- `:gt` if the left-values is **greater than** the right-values
For example:
iex> x = AbacusTest.Length.m(1)
...> y = AbacusTest.Length.cm(100)
...> Abacus.compare(x, with: y)
:eq
"""
@spec compare(typed_value(), [with: typed_value()]) :: comparison_result
def compare({t, _} = left, with: right) do
a = unwrap(left)
b = unwrap(from(right, to: t))
cond do
a > b -> :gt
b > a -> :lt
true -> :eq
end
end
@doc """
Makes the addition between two `typed_value()` of the same metric system.
The return value will have the subtype of the left `typed_value()`.
iex> a = AbacusTest.Length.cm(12)
...> b = AbacusTest.Length.m(2)
...> Abacus.add(a, b)
{AbacusTest.Length.cm, 212.0}
"""
@spec add(typed_value(), typed_value()) :: typed_value()
def add(a, b) do
map2(a, b, &(&1 + &2))
end
@doc """
Makes the subtraction between two `typed_value()` of the same metric system.
The return value will have the subtype of the left `typed_value()`.
iex> a = AbacusTest.Length.cm(12)
...> b = AbacusTest.Length.m(2)
...> Abacus.sub(b, a)
{AbacusTest.Length.m, 1.88}
"""
@spec sub(typed_value(), typed_value()) :: typed_value()
def sub(a, b) do
map2(a, b, &(&1 - &2))
end
@doc """
Multiplies a `typed_value()` by a `number()`. The subtype of the return value
will be the subtype of the left `typed_value()`.
iex> a = AbacusTest.Length.cm(12)
...> Abacus.mult(a, 10)
{AbacusTest.Length.cm, 120.0}
"""
@spec mult(typed_value(), number) :: typed_value()
def mult(a, b) do
map(a, fn(x) -> x * b end)
end
@doc """
Divides a `typed_value()` by a `number()`. The subtype of the return value
will be the subtype of the left `typed_value()`.
iex> a = AbacusTest.Length.cm(12)
...> Abacus.div(a, 2)
{AbacusTest.Length.cm, 6.0}
"""
@spec div(typed_value(), number) :: typed_value()
def div(a, b) do
mult(a, 1/b)
end
end
|
lib/abacus.ex
| 0.944625
| 0.878835
|
abacus.ex
|
starcoder
|
defmodule BSV.Address do
@moduledoc """
A Bitcoin address is a 26-35 character string beginning with the number 1,
that represents the hash of a pulic key.
An address is derived by calculating the RIPEMD hash of a SHA-256 hash of the
public key, and then Base58check encoding it.
Addresses are used in [`P2PKH`](`BSV.Contract.P2PKH`) outputs, a common
script template used to send Bitcoin payments.
"""
alias BSV.{Hash, PubKey}
defstruct pubkey_hash: nil
@typedoc """
Bitcoin address
An Elixir struct containing the public key hash.
"""
@type t() :: %__MODULE__{
pubkey_hash: <<_::160>>
}
@typedoc """
Bitcoin address string
Base58Check encoded public key hash.
"""
@type address_str() :: String.t()
@version_bytes %{
main: <<0x00>>,
test: <<0x6F>>
}
@doc """
Converts the given `t:BSV.PubKey.t/0` into an `t:BSV.Address.t/0`.
## Examples
iex> Address.from_pubkey(@pubkey)
%Address{
pubkey_hash: <<83, 143, 209, 121, 200, 190, 15, 40, 156, 115, 14, 51, 181, 246, 163, 84, 27, 233, 102, 143>>
}
"""
@spec from_pubkey(PubKey.t() | binary()) :: t()
def from_pubkey(%PubKey{} = pubkey) do
pubkey
|> PubKey.to_binary()
|> from_pubkey()
end
def from_pubkey(pubkey)
when is_binary(pubkey) and byte_size(pubkey) in [33, 65]
do
pubkey_hash = Hash.sha256_ripemd160(pubkey)
struct(__MODULE__, pubkey_hash: pubkey_hash)
end
@doc """
Decodes the given `t:BSV.Address.address_str/0` into an `t:BSV.Address.t/0`.
Returns the result in an `:ok` / `:error` tuple pair.
## Examples
iex> Address.from_string("18cqNbEBxkAttxcZLuH9LWhZJPd1BNu1A5")
{:ok, %Address{
pubkey_hash: <<83, 143, 209, 121, 200, 190, 15, 40, 156, 115, 14, 51, 181, 246, 163, 84, 27, 233, 102, 143>>
}}
"""
@spec from_string(address_str()) :: {:ok, t()} | {:error, term()}
def from_string(address) when is_binary(address) do
version_byte = @version_bytes[BSV.network()]
case B58.decode58_check(address) do
{:ok, {<<pubkey_hash::binary-20>>, ^version_byte}} ->
{:ok, struct(__MODULE__, pubkey_hash: pubkey_hash)}
{:ok, {<<_pubkey_hash::binary-20>>, version_byte}} ->
{:error, {:invalid_base58_check, version_byte, BSV.network()}}
_error ->
{:error, :invalid_address}
end
end
@doc """
Decodes the given `t:BSV.Address.address_str/0` into an `t:BSV.Address.t/0`.
As `from_string/1` but returns the result or raises an exception.
"""
@spec from_string!(address_str()) :: t()
def from_string!(address) when is_binary(address) do
case from_string(address) do
{:ok, privkey} ->
privkey
{:error, error} ->
raise BSV.DecodeError, error
end
end
@doc """
Encodes the given `t:BSV.Address.t/0` as a Base58Check encoded
`t:BSV.Address.address_str/0`.
## Example
iex> Address.to_string(@address)
"18cqNbEBxkAttxcZLuH9LWhZJPd1BNu1A5"
"""
@spec to_string(t()) :: address_str()
def to_string(%__MODULE__{pubkey_hash: pubkey_hash}) do
version_byte = @version_bytes[BSV.network()]
B58.encode58_check!(pubkey_hash, version_byte)
end
end
|
lib/bsv/address.ex
| 0.923506
| 0.538559
|
address.ex
|
starcoder
|
defmodule D7 do
def parse(text) do
text
|> String.trim()
|> String.split("\n")
|> Enum.map(&String.split(&1, " contain "))
|> Enum.map(fn [outter, inners] ->
{String.trim_trailing(outter, " bags"),
inners
|> String.trim_trailing(".")
|> String.split(", ")
|> Enum.map(&String.replace(&1, [" bags", " bag"], ""))
|> Enum.map(&String.split(&1, " ", parts: 2))
|> Enum.map(fn
["no", _] -> []
[count, inner] -> {String.to_integer(count), inner}
end)
|> List.flatten()}
end)
end
def solve_p1(data) do
data
|> build_graph
|> do_solve_p1("shiny gold")
|> MapSet.size()
end
def solve_p2(data) do
data
|> build_graph
|> do_solve_p2("shiny gold", 1)
end
defp do_solve_p1(g, v) do
in_neighbours = :digraph.in_neighbours(g, v)
in_neighbours
|> MapSet.new()
|> MapSet.union(
in_neighbours
|> Enum.map(&do_solve_p1(g, &1))
|> Enum.reduce(MapSet.new(), &MapSet.union/2)
)
end
defp do_solve_p2(g, v, factor) do
:digraph.out_edges(g, v)
|> Enum.map(&:digraph.edge(g, &1))
|> Enum.reduce(0, fn {_, _, v, [count: c]}, sum ->
sum + c * factor + do_solve_p2(g, v, c * factor)
end)
end
defp build_graph(data) do
for {outter, inners} <- data, {count, inner} <- inners do
{outter, {count, inner}}
end
|> Enum.reduce(:digraph.new(), fn {outer, {count, inner}}, g ->
:digraph.add_vertex(g, outer)
:digraph.add_vertex(g, inner)
:digraph.add_edge(g, outer, inner, count: count)
g
end)
end
end
t = """
pale turquoise bags contain 3 muted cyan bags, 5 striped teal bags.
light tan bags contain 5 posh tomato bags.
shiny coral bags contain 2 muted bronze bags.
wavy orange bags contain 4 faded tomato bags.
light plum bags contain 3 drab orange bags, 4 faded coral bags.
pale purple bags contain 5 bright crimson bags.
bright blue bags contain 1 pale beige bag, 1 light teal bag.
pale bronze bags contain 1 dotted salmon bag, 1 striped blue bag, 2 clear tan bags.
muted maroon bags contain 5 pale crimson bags.
clear lavender bags contain 4 vibrant black bags, 2 posh red bags.
pale cyan bags contain 4 light olive bags, 2 dull lime bags, 4 faded black bags, 4 plaid red bags.
faded blue bags contain 1 posh tan bag, 1 dotted violet bag, 3 posh gold bags.
wavy teal bags contain 3 pale brown bags.
striped red bags contain 2 light bronze bags, 3 dark cyan bags.
drab brown bags contain 3 striped magenta bags, 3 clear silver bags.
posh salmon bags contain 4 bright purple bags, 5 mirrored green bags, 3 pale gold bags, 5 dull crimson bags.
light black bags contain 2 wavy coral bags.
striped tan bags contain 3 clear blue bags, 3 mirrored teal bags, 5 striped red bags.
posh plum bags contain 3 drab orange bags.
striped blue bags contain 4 bright violet bags, 5 dotted gray bags, 3 dotted violet bags, 1 dotted blue bag.
shiny white bags contain 4 dotted orange bags, 1 faded silver bag, 1 drab coral bag.
plaid maroon bags contain 3 light gold bags.
shiny fuchsia bags contain 2 dotted olive bags, 3 vibrant white bags, 3 dark salmon bags, 4 pale white bags.
muted bronze bags contain 4 vibrant bronze bags, 2 posh yellow bags, 1 shiny turquoise bag.
wavy cyan bags contain 2 striped crimson bags, 4 plaid tan bags.
vibrant indigo bags contain 4 pale gold bags, 3 posh gold bags, 1 drab red bag, 4 dull crimson bags.
drab tan bags contain 3 dark indigo bags, 3 striped black bags.
dull tan bags contain 3 drab blue bags, 3 pale green bags, 3 dotted red bags, 3 striped maroon bags.
dark red bags contain 4 bright chartreuse bags.
drab beige bags contain 5 bright teal bags, 1 faded cyan bag, 2 muted yellow bags, 1 dim lime bag.
dim black bags contain 5 wavy fuchsia bags, 3 muted tomato bags, 4 faded blue bags.
pale red bags contain 2 drab gray bags, 5 dull coral bags, 4 striped purple bags.
light cyan bags contain 4 mirrored gold bags, 3 vibrant bronze bags.
posh orange bags contain 2 dark silver bags, 3 striped chartreuse bags.
shiny cyan bags contain 5 dark turquoise bags.
vibrant cyan bags contain 2 light turquoise bags, 2 clear cyan bags, 4 dark cyan bags, 4 dotted orange bags.
vibrant gray bags contain 5 dark purple bags, 5 dark lime bags.
posh green bags contain 4 bright salmon bags, 2 muted tan bags.
posh tan bags contain 2 dotted indigo bags, 1 dull purple bag.
clear blue bags contain 5 dull purple bags.
wavy gray bags contain 4 pale cyan bags, 2 pale tomato bags.
posh red bags contain 4 dim green bags, 2 pale teal bags.
light tomato bags contain 5 dotted chartreuse bags.
faded yellow bags contain 3 plaid orange bags, 4 mirrored maroon bags.
dark black bags contain 1 faded gold bag, 3 striped purple bags, 2 dim teal bags.
light red bags contain 5 dark magenta bags, 3 striped purple bags.
dotted violet bags contain no other bags.
faded gold bags contain 1 dotted aqua bag, 2 light turquoise bags, 5 wavy violet bags.
dotted beige bags contain 5 vibrant turquoise bags, 5 clear maroon bags, 3 dim tomato bags, 4 pale maroon bags.
pale lavender bags contain 5 muted red bags, 3 dark teal bags, 3 faded black bags, 1 dim fuchsia bag.
clear tomato bags contain 3 dull white bags, 3 mirrored gold bags, 1 dark black bag.
vibrant silver bags contain 4 plaid orange bags, 2 shiny chartreuse bags, 3 dark salmon bags, 4 light silver bags.
plaid cyan bags contain 1 dark black bag.
drab green bags contain 1 plaid white bag.
posh turquoise bags contain 3 posh plum bags, 3 light gold bags, 1 bright crimson bag.
pale lime bags contain 3 pale olive bags, 3 vibrant chartreuse bags, 1 dotted tan bag, 5 striped cyan bags.
wavy fuchsia bags contain 3 shiny chartreuse bags, 3 vibrant tomato bags, 3 posh salmon bags, 1 light cyan bag.
shiny beige bags contain 1 muted orange bag, 3 clear olive bags.
posh tomato bags contain 5 muted tomato bags, 5 drab coral bags, 4 pale gold bags.
dark fuchsia bags contain 3 wavy blue bags, 5 faded indigo bags.
dotted green bags contain 5 dull plum bags, 5 muted lavender bags, 3 faded magenta bags, 4 clear white bags.
light salmon bags contain 2 muted purple bags, 5 shiny turquoise bags, 4 muted red bags, 5 posh red bags.
plaid purple bags contain 1 muted indigo bag, 4 pale silver bags, 4 dull crimson bags, 1 posh cyan bag.
light chartreuse bags contain 2 posh brown bags.
dotted black bags contain 3 dull gray bags, 5 muted gray bags, 5 pale maroon bags, 1 vibrant green bag.
mirrored teal bags contain 4 light gold bags, 5 striped maroon bags, 2 pale maroon bags.
dotted coral bags contain 3 dull teal bags.
shiny lime bags contain 3 mirrored gray bags.
shiny chartreuse bags contain no other bags.
clear red bags contain 4 dim tan bags, 4 dotted brown bags, 2 striped orange bags.
clear magenta bags contain 3 pale blue bags, 2 pale crimson bags.
faded black bags contain 5 dark turquoise bags.
wavy violet bags contain 5 light purple bags.
vibrant crimson bags contain 1 shiny gold bag, 1 dotted tomato bag, 1 plaid black bag, 1 drab olive bag.
plaid violet bags contain 1 shiny tan bag, 3 muted gray bags, 4 drab chartreuse bags.
vibrant lime bags contain 3 dark magenta bags, 2 dotted white bags, 4 muted tan bags.
faded violet bags contain 2 drab red bags.
plaid black bags contain 2 clear aqua bags, 2 wavy silver bags, 4 dim violet bags, 2 plaid red bags.
dull orange bags contain 1 clear tan bag, 1 plaid crimson bag, 1 pale chartreuse bag.
vibrant magenta bags contain 4 striped blue bags.
plaid fuchsia bags contain 1 vibrant lime bag, 4 faded indigo bags, 2 wavy fuchsia bags, 1 dim purple bag.
mirrored olive bags contain 1 dark teal bag, 1 pale brown bag, 1 light violet bag, 1 shiny yellow bag.
wavy plum bags contain 5 vibrant cyan bags, 1 pale gold bag, 2 wavy gray bags, 5 pale gray bags.
striped magenta bags contain 5 faded silver bags, 1 mirrored teal bag.
light indigo bags contain 1 light olive bag.
mirrored crimson bags contain 2 vibrant crimson bags.
muted crimson bags contain 2 plaid olive bags.
muted yellow bags contain 1 muted brown bag, 4 striped lavender bags, 1 bright violet bag.
posh lavender bags contain 2 wavy coral bags, 3 light tan bags.
striped indigo bags contain 4 dark brown bags.
dotted red bags contain 4 pale olive bags, 3 dark teal bags, 1 posh fuchsia bag.
plaid indigo bags contain 5 faded gold bags, 4 clear coral bags, 3 dull purple bags, 5 pale brown bags.
plaid magenta bags contain 5 faded black bags, 3 drab aqua bags, 5 vibrant green bags.
light coral bags contain 1 bright gray bag, 4 wavy orange bags, 2 drab coral bags, 1 dark coral bag.
clear brown bags contain 4 plaid aqua bags, 4 plaid coral bags, 5 drab red bags.
drab black bags contain 2 dotted white bags, 1 muted purple bag, 3 posh gold bags.
dull magenta bags contain 1 dull tomato bag, 4 posh gray bags, 4 wavy white bags, 1 pale cyan bag.
muted violet bags contain 3 posh chartreuse bags, 1 dotted magenta bag.
vibrant turquoise bags contain 4 plaid brown bags, 5 drab indigo bags, 4 mirrored green bags.
striped aqua bags contain 2 posh violet bags, 5 shiny blue bags, 3 pale tomato bags.
light teal bags contain 4 faded cyan bags, 2 clear turquoise bags.
clear turquoise bags contain 3 shiny aqua bags, 5 posh tomato bags.
dark maroon bags contain 2 shiny gold bags, 5 faded green bags.
vibrant beige bags contain 5 plaid turquoise bags, 2 shiny gold bags, 2 clear tan bags, 1 wavy black bag.
muted chartreuse bags contain 2 muted white bags, 2 striped tan bags, 1 muted brown bag, 5 posh lime bags.
pale maroon bags contain 5 pale teal bags, 4 dim violet bags, 5 posh teal bags.
vibrant white bags contain 5 light bronze bags, 1 wavy silver bag.
plaid olive bags contain 5 mirrored teal bags, 5 faded gray bags, 4 light olive bags.
dull silver bags contain 5 clear cyan bags, 1 dim tan bag, 5 dim black bags.
bright crimson bags contain 5 dotted indigo bags.
muted teal bags contain 3 striped aqua bags, 4 dotted gray bags, 2 bright salmon bags.
wavy salmon bags contain 5 posh tomato bags.
bright purple bags contain 4 light maroon bags, 2 dotted violet bags.
plaid green bags contain 3 faded black bags, 2 plaid red bags, 4 clear turquoise bags.
wavy gold bags contain 1 bright white bag.
drab violet bags contain 4 vibrant tomato bags.
posh maroon bags contain 3 clear tan bags, 3 light gold bags, 1 dim lime bag.
muted beige bags contain 4 clear white bags, 5 light maroon bags, 2 clear orange bags.
dull turquoise bags contain 4 posh blue bags, 1 mirrored green bag, 5 dotted orange bags, 5 wavy fuchsia bags.
shiny red bags contain 4 dark magenta bags.
light turquoise bags contain 3 dark cyan bags.
posh olive bags contain 3 clear green bags, 5 bright bronze bags, 5 light olive bags.
dim chartreuse bags contain 5 vibrant plum bags.
dark salmon bags contain 1 dotted orange bag, 3 light brown bags, 3 dotted chartreuse bags.
dull teal bags contain 4 wavy gold bags, 5 faded red bags, 4 light turquoise bags.
dark green bags contain 3 pale silver bags, 5 clear tan bags.
bright cyan bags contain 2 striped maroon bags, 1 clear silver bag, 1 dark maroon bag.
wavy crimson bags contain 3 wavy tan bags.
mirrored white bags contain 4 mirrored teal bags, 2 muted silver bags.
dark magenta bags contain 1 drab blue bag, 4 light white bags, 3 dark black bags.
light gold bags contain 4 posh red bags, 1 striped maroon bag, 5 bright purple bags, 4 dotted violet bags.
pale tomato bags contain 5 drab coral bags, 3 posh teal bags, 4 dotted blue bags.
dull purple bags contain 4 posh gold bags.
mirrored gold bags contain 3 mirrored teal bags, 1 striped maroon bag, 2 dotted indigo bags.
dotted turquoise bags contain 2 dim violet bags.
muted magenta bags contain 1 muted red bag.
bright maroon bags contain 3 wavy fuchsia bags, 2 dark magenta bags, 2 dim maroon bags, 1 dotted teal bag.
bright aqua bags contain 5 dim green bags, 2 striped tan bags, 1 faded olive bag.
striped silver bags contain 5 striped aqua bags, 2 striped purple bags, 3 dim blue bags, 3 faded olive bags.
light brown bags contain 4 posh red bags, 3 clear gold bags.
dark brown bags contain 4 clear olive bags.
dim crimson bags contain 4 dotted teal bags, 3 dark salmon bags.
wavy black bags contain 3 drab coral bags, 1 striped purple bag, 2 light brown bags, 4 plaid red bags.
striped coral bags contain 2 plaid blue bags, 5 drab tan bags, 5 light violet bags.
dull blue bags contain 3 pale gold bags, 1 posh crimson bag.
pale blue bags contain 4 dotted blue bags, 3 muted beige bags, 1 faded red bag.
mirrored yellow bags contain 4 muted cyan bags, 2 mirrored tan bags.
dim maroon bags contain 1 muted aqua bag.
drab plum bags contain 2 shiny magenta bags.
clear maroon bags contain 2 drab orange bags, 3 shiny red bags, 1 clear brown bag.
muted red bags contain 5 pale teal bags, 3 dim fuchsia bags, 1 light maroon bag.
dark plum bags contain 5 light salmon bags, 4 dim olive bags.
faded indigo bags contain 1 mirrored red bag, 3 faded lime bags.
bright coral bags contain 3 posh gold bags, 3 vibrant crimson bags.
dark indigo bags contain 5 faded silver bags, 2 dull tomato bags.
drab white bags contain 5 light silver bags.
vibrant violet bags contain 2 pale gray bags, 4 bright white bags, 3 light aqua bags.
mirrored maroon bags contain 1 pale cyan bag, 4 clear bronze bags.
wavy chartreuse bags contain 1 bright brown bag, 4 dim lime bags.
dull brown bags contain 3 bright olive bags.
vibrant yellow bags contain 3 shiny white bags, 2 clear blue bags.
posh bronze bags contain 1 light bronze bag.
dim aqua bags contain 5 wavy yellow bags, 3 muted purple bags, 3 pale crimson bags.
clear gray bags contain 4 faded coral bags, 1 striped violet bag, 5 pale crimson bags, 4 muted lavender bags.
muted plum bags contain 4 mirrored cyan bags.
dim lime bags contain 2 clear plum bags, 2 dim green bags, 5 posh tan bags.
dim red bags contain 2 plaid brown bags.
drab bronze bags contain 4 faded plum bags, 4 clear plum bags, 1 posh cyan bag, 1 dark cyan bag.
shiny purple bags contain 5 posh beige bags, 4 pale fuchsia bags, 2 wavy brown bags, 2 shiny maroon bags.
dull beige bags contain 1 mirrored indigo bag, 2 drab cyan bags, 1 dim fuchsia bag.
wavy blue bags contain 4 dotted maroon bags, 3 light maroon bags.
faded white bags contain 2 vibrant purple bags, 1 muted purple bag.
pale teal bags contain no other bags.
bright tan bags contain 3 clear indigo bags, 2 pale orange bags.
dull crimson bags contain 3 pale maroon bags, 3 vibrant bronze bags.
posh black bags contain 1 muted tomato bag.
vibrant maroon bags contain 5 mirrored crimson bags, 4 wavy beige bags.
dark lime bags contain 4 dim yellow bags, 1 pale beige bag, 1 vibrant beige bag.
bright white bags contain 2 dark turquoise bags.
faded plum bags contain 1 vibrant indigo bag, 5 dotted maroon bags, 1 vibrant bronze bag.
pale violet bags contain 2 pale lavender bags, 4 light brown bags, 5 vibrant tomato bags.
wavy purple bags contain 2 shiny tomato bags, 2 clear maroon bags, 3 posh bronze bags, 4 dull aqua bags.
plaid yellow bags contain 4 pale tomato bags, 2 dotted magenta bags, 5 wavy violet bags.
bright silver bags contain 5 light gold bags, 2 posh tan bags, 4 faded gray bags.
dark orange bags contain 4 dull yellow bags, 5 dull salmon bags.
wavy coral bags contain 5 dim lavender bags, 2 mirrored teal bags, 1 shiny chartreuse bag, 2 light gold bags.
pale aqua bags contain 1 faded plum bag, 5 vibrant plum bags.
dotted brown bags contain 2 light purple bags, 4 dim beige bags, 5 pale white bags.
plaid blue bags contain 1 shiny red bag, 5 light silver bags, 5 clear orange bags.
dim violet bags contain 3 posh teal bags.
dull yellow bags contain 5 muted aqua bags.
shiny yellow bags contain 5 muted aqua bags, 2 drab white bags, 5 muted purple bags.
dotted maroon bags contain 4 dim green bags, 2 faded silver bags.
bright brown bags contain 4 striped bronze bags.
posh crimson bags contain 2 posh red bags, 1 dotted indigo bag, 4 muted red bags.
dim orange bags contain 2 light tan bags, 4 dotted salmon bags.
mirrored beige bags contain 4 dim bronze bags, 5 vibrant salmon bags, 4 dim maroon bags.
shiny olive bags contain 5 dotted orange bags.
wavy tan bags contain 1 wavy brown bag, 1 faded silver bag.
mirrored magenta bags contain 5 drab teal bags, 3 striped bronze bags, 3 striped magenta bags, 5 dark tan bags.
muted brown bags contain 1 light indigo bag, 4 dotted blue bags.
vibrant aqua bags contain 1 shiny red bag, 5 wavy gold bags.
dark violet bags contain 2 dim orange bags, 5 dark purple bags, 2 pale yellow bags.
plaid teal bags contain 3 vibrant bronze bags.
shiny gold bags contain 5 drab red bags, 2 mirrored green bags, 2 muted tomato bags, 1 striped magenta bag.
wavy bronze bags contain 2 vibrant green bags, 2 plaid orange bags, 2 vibrant orange bags.
dark tomato bags contain 4 posh indigo bags.
drab red bags contain no other bags.
clear silver bags contain 1 plaid olive bag.
striped cyan bags contain 3 wavy silver bags, 2 faded indigo bags.
dark purple bags contain 1 wavy violet bag, 5 clear olive bags, 3 drab indigo bags, 5 striped purple bags.
dotted plum bags contain 4 vibrant purple bags, 3 muted lavender bags, 1 wavy coral bag.
posh yellow bags contain 5 light salmon bags, 2 light bronze bags.
dim tomato bags contain 3 pale white bags.
drab lime bags contain 3 drab chartreuse bags, 4 clear silver bags, 4 drab aqua bags.
plaid coral bags contain 2 bright olive bags.
mirrored salmon bags contain 3 plaid green bags.
faded chartreuse bags contain 2 light cyan bags, 5 pale tomato bags.
pale crimson bags contain 5 bright white bags, 3 shiny turquoise bags.
pale brown bags contain 4 bright silver bags.
shiny bronze bags contain 5 dull chartreuse bags, 4 dotted gray bags, 3 shiny blue bags, 1 dull blue bag.
faded red bags contain 2 muted gray bags.
dark lavender bags contain 5 dim coral bags, 4 muted gray bags, 1 shiny yellow bag.
faded purple bags contain 2 dotted white bags.
mirrored silver bags contain 3 pale maroon bags, 2 pale cyan bags, 4 dark chartreuse bags, 3 bright plum bags.
dull violet bags contain 1 dim coral bag, 3 wavy lavender bags.
faded beige bags contain 4 plaid bronze bags, 1 light salmon bag, 2 light brown bags.
vibrant orange bags contain 1 faded cyan bag, 2 vibrant olive bags, 2 bright plum bags.
vibrant purple bags contain 5 dotted orange bags, 1 striped aqua bag, 4 clear white bags, 3 dim olive bags.
faded crimson bags contain 1 clear lavender bag, 3 dim lavender bags, 3 dim cyan bags, 2 wavy tan bags.
dark yellow bags contain 1 plaid silver bag, 3 wavy maroon bags.
vibrant brown bags contain 5 dotted chartreuse bags, 4 clear silver bags, 4 dull lavender bags.
wavy lavender bags contain 4 dull gray bags.
bright green bags contain 5 striped gold bags.
faded magenta bags contain 5 dull beige bags.
posh white bags contain 2 dark coral bags.
muted green bags contain 1 wavy lavender bag, 1 striped aqua bag.
plaid turquoise bags contain 1 striped brown bag, 4 mirrored maroon bags.
plaid lavender bags contain 4 striped tan bags, 2 posh brown bags, 5 shiny brown bags.
pale fuchsia bags contain 5 light brown bags, 3 vibrant lime bags.
light gray bags contain 3 drab violet bags.
dim green bags contain no other bags.
light olive bags contain 4 vibrant bronze bags.
dotted aqua bags contain 5 muted red bags.
vibrant fuchsia bags contain 5 dull crimson bags, 5 dotted violet bags.
clear crimson bags contain 5 dotted tomato bags, 3 posh crimson bags, 5 vibrant magenta bags.
pale orange bags contain 5 mirrored indigo bags, 5 muted purple bags, 4 plaid orange bags.
mirrored lavender bags contain 4 dotted orange bags, 3 posh violet bags.
dotted tan bags contain 5 mirrored gray bags.
dim bronze bags contain 3 mirrored olive bags, 3 plaid magenta bags, 5 dim black bags, 2 drab blue bags.
clear olive bags contain 3 bright purple bags, 4 dim lime bags, 5 dim fuchsia bags.
dotted blue bags contain 1 dim green bag, 3 drab red bags, 2 posh gold bags.
shiny gray bags contain 3 pale lavender bags, 1 clear gold bag, 2 drab violet bags, 2 clear bronze bags.
bright plum bags contain 3 muted yellow bags, 4 posh chartreuse bags, 3 posh brown bags, 3 dim orange bags.
shiny violet bags contain 5 dim coral bags.
posh lime bags contain 3 plaid silver bags.
light beige bags contain 5 pale yellow bags, 3 light bronze bags, 5 pale turquoise bags.
drab gray bags contain 4 bright purple bags, 5 faded gold bags, 2 dim green bags.
muted aqua bags contain 4 clear white bags.
mirrored blue bags contain 2 vibrant green bags, 2 drab gray bags.
posh magenta bags contain 2 striped magenta bags, 5 dim cyan bags, 5 plaid orange bags, 1 wavy black bag.
dim yellow bags contain 2 muted purple bags, 1 striped black bag, 3 wavy coral bags.
dull indigo bags contain 3 posh fuchsia bags, 1 dotted beige bag.
posh gray bags contain 5 drab black bags.
dark blue bags contain 4 dim gold bags, 3 drab olive bags, 1 light cyan bag, 2 light tomato bags.
mirrored red bags contain 5 shiny white bags, 1 mirrored green bag, 4 wavy black bags, 1 dark brown bag.
mirrored violet bags contain 3 dim tan bags, 4 dark fuchsia bags, 4 pale turquoise bags.
shiny black bags contain 5 clear orange bags, 2 vibrant silver bags, 2 plaid maroon bags, 3 light olive bags.
vibrant chartreuse bags contain 4 dull salmon bags, 3 bright beige bags, 1 faded blue bag, 2 plaid brown bags.
dull cyan bags contain 3 faded green bags.
bright gold bags contain 3 posh plum bags.
vibrant olive bags contain 5 mirrored coral bags, 3 dotted lime bags, 5 drab blue bags, 2 dotted green bags.
faded maroon bags contain 4 mirrored green bags, 2 light lime bags, 3 light bronze bags.
vibrant red bags contain 2 posh lime bags, 1 dull maroon bag.
dull bronze bags contain 5 vibrant chartreuse bags.
vibrant bronze bags contain 2 vibrant tomato bags, 3 mirrored teal bags.
pale black bags contain 1 muted silver bag, 5 mirrored teal bags, 2 shiny blue bags.
dull maroon bags contain 1 clear cyan bag.
dotted crimson bags contain 5 posh black bags, 1 dotted teal bag, 4 vibrant salmon bags, 4 shiny silver bags.
striped tomato bags contain 5 drab tomato bags, 2 faded coral bags, 2 dim salmon bags.
bright bronze bags contain 5 plaid red bags, 4 striped yellow bags.
dark aqua bags contain 4 bright purple bags, 1 striped gold bag.
striped teal bags contain 4 striped black bags, 3 clear indigo bags.
dark olive bags contain 2 pale cyan bags, 5 mirrored tan bags.
dark teal bags contain 1 posh gold bag, 1 plaid orange bag, 1 vibrant bronze bag, 1 mirrored teal bag.
faded green bags contain 5 vibrant fuchsia bags, 3 dim olive bags.
posh brown bags contain 4 drab lime bags, 2 mirrored fuchsia bags, 3 shiny lime bags, 2 dim violet bags.
striped lavender bags contain 2 plaid red bags, 5 dark brown bags, 3 clear turquoise bags.
shiny indigo bags contain 2 striped lavender bags, 1 light gray bag.
plaid orange bags contain 1 pale teal bag, 5 dim violet bags, 5 vibrant bronze bags, 3 light maroon bags.
dull salmon bags contain 1 striped bronze bag, 4 shiny aqua bags, 4 dark brown bags.
plaid white bags contain 1 dim green bag.
drab lavender bags contain 2 dotted maroon bags, 3 pale aqua bags, 1 light olive bag.
striped olive bags contain 5 striped magenta bags.
mirrored gray bags contain 2 dim lavender bags, 2 shiny chartreuse bags.
dull coral bags contain 1 mirrored gold bag, 5 clear gold bags, 5 clear olive bags, 2 posh tomato bags.
pale beige bags contain 2 dark indigo bags, 4 dim beige bags.
posh teal bags contain 1 dim green bag, 3 dim fuchsia bags, 1 pale teal bag, 2 dotted indigo bags.
mirrored turquoise bags contain 4 dim olive bags, 2 plaid tomato bags.
dim turquoise bags contain 2 bright yellow bags, 1 striped lavender bag.
light violet bags contain 4 faded gold bags, 3 clear plum bags, 1 dark teal bag.
dark cyan bags contain 3 drab red bags, 4 pale maroon bags.
shiny crimson bags contain 5 dotted turquoise bags, 1 vibrant fuchsia bag, 5 dotted lime bags, 2 wavy green bags.
muted lime bags contain 2 light brown bags, 5 plaid tomato bags, 4 plaid aqua bags.
vibrant tan bags contain 4 wavy fuchsia bags.
wavy maroon bags contain 4 dull white bags, 5 dark crimson bags, 5 mirrored salmon bags, 4 vibrant purple bags.
muted indigo bags contain 5 shiny red bags.
clear violet bags contain 2 plaid green bags.
drab teal bags contain 2 pale cyan bags, 1 shiny turquoise bag.
wavy green bags contain 5 plaid indigo bags, 3 muted silver bags, 5 light brown bags.
striped crimson bags contain 3 shiny aqua bags.
drab purple bags contain 3 drab orange bags, 3 dark aqua bags, 1 bright lavender bag.
plaid aqua bags contain 1 plaid tomato bag.
striped green bags contain 2 vibrant tomato bags, 2 faded plum bags.
drab tomato bags contain 4 posh black bags, 3 dull brown bags, 1 drab cyan bag.
dotted bronze bags contain 1 clear indigo bag.
pale white bags contain 5 mirrored green bags, 2 dark turquoise bags, 3 dull olive bags, 4 drab indigo bags.
drab yellow bags contain 2 dotted orange bags, 4 light turquoise bags, 4 light salmon bags, 2 dotted tomato bags.
pale green bags contain 3 muted gold bags, 3 shiny turquoise bags.
dark tan bags contain 4 muted cyan bags, 5 dotted tomato bags, 2 dark indigo bags.
plaid tan bags contain 4 vibrant olive bags, 1 plaid aqua bag, 3 dotted coral bags, 4 bright violet bags.
shiny aqua bags contain 2 dull brown bags, 1 vibrant cyan bag, 2 dim lime bags, 5 light bronze bags.
bright turquoise bags contain 2 plaid yellow bags, 3 posh lavender bags, 1 pale yellow bag.
vibrant gold bags contain 1 clear tomato bag, 4 clear turquoise bags, 4 dark bronze bags.
posh cyan bags contain 3 dotted green bags, 5 plaid tomato bags, 3 wavy crimson bags, 2 striped olive bags.
clear salmon bags contain 3 plaid fuchsia bags, 5 muted bronze bags, 5 dull green bags, 2 pale brown bags.
drab maroon bags contain 5 dull crimson bags, 2 shiny white bags, 5 light purple bags.
clear tan bags contain 5 dull lime bags, 5 muted red bags, 2 clear cyan bags.
pale gray bags contain 3 shiny white bags.
vibrant teal bags contain 1 clear green bag, 1 dull beige bag.
vibrant plum bags contain 4 muted red bags, 2 faded blue bags, 5 vibrant tomato bags.
dark coral bags contain 2 plaid tomato bags, 1 bright yellow bag, 2 mirrored gray bags.
wavy indigo bags contain 1 mirrored tan bag, 1 wavy lavender bag.
dotted fuchsia bags contain 2 vibrant silver bags, 3 mirrored tan bags.
muted gold bags contain 3 muted red bags, 1 clear olive bag.
striped fuchsia bags contain 2 clear aqua bags, 4 mirrored coral bags, 3 muted gray bags, 2 dark beige bags.
striped maroon bags contain no other bags.
bright lavender bags contain 2 faded indigo bags, 1 dotted violet bag, 5 posh tomato bags, 3 clear indigo bags.
posh gold bags contain 1 dim violet bag, 2 shiny chartreuse bags.
drab olive bags contain 4 faded silver bags.
pale chartreuse bags contain 4 striped teal bags.
mirrored lime bags contain 5 dotted plum bags, 1 light yellow bag, 3 pale fuchsia bags.
clear bronze bags contain 4 plaid brown bags.
wavy tomato bags contain 4 faded lavender bags, 3 dull aqua bags, 1 drab green bag, 3 vibrant gray bags.
dim brown bags contain 4 dull teal bags, 2 vibrant black bags, 1 mirrored gold bag.
bright fuchsia bags contain 4 faded lavender bags, 1 dull crimson bag, 1 mirrored brown bag, 5 dark indigo bags.
muted gray bags contain 1 dull olive bag.
dull white bags contain 1 light maroon bag, 4 dark lavender bags, 2 posh red bags.
dull black bags contain 5 dark green bags, 4 bright lime bags, 4 mirrored gray bags.
dull tomato bags contain 4 dotted tomato bags.
pale coral bags contain 1 mirrored white bag, 5 clear aqua bags, 4 dim blue bags.
posh indigo bags contain 2 bright bronze bags.
plaid gold bags contain 4 dark maroon bags, 4 shiny lavender bags, 1 plaid tomato bag, 3 bright yellow bags.
plaid gray bags contain 4 muted bronze bags, 2 posh chartreuse bags, 5 pale tomato bags, 3 drab coral bags.
dim beige bags contain 4 dim blue bags, 4 dark lavender bags.
bright olive bags contain no other bags.
clear aqua bags contain 4 wavy fuchsia bags, 5 dim green bags.
posh violet bags contain 2 vibrant indigo bags, 3 posh tomato bags, 4 clear gold bags, 5 dim green bags.
faded coral bags contain 4 light purple bags, 4 mirrored salmon bags, 5 pale maroon bags.
dotted chartreuse bags contain 2 clear aqua bags, 4 plaid coral bags.
striped orange bags contain 5 bright tan bags, 5 pale white bags, 5 mirrored lavender bags.
bright tomato bags contain 5 muted white bags.
shiny silver bags contain 1 dotted orange bag, 2 light olive bags, 1 striped gold bag.
striped yellow bags contain 1 shiny turquoise bag.
faded aqua bags contain 5 shiny cyan bags, 3 dotted indigo bags, 4 faded fuchsia bags.
striped beige bags contain 1 bright white bag, 5 dim lavender bags, 5 striped black bags, 1 wavy black bag.
shiny orange bags contain 4 dark aqua bags.
striped white bags contain 3 vibrant fuchsia bags, 1 dotted teal bag, 5 dotted green bags, 2 shiny white bags.
bright black bags contain 5 pale blue bags, 2 drab teal bags, 1 dull gray bag.
shiny lavender bags contain 1 pale gold bag, 2 bright crimson bags, 2 pale maroon bags.
shiny maroon bags contain 2 wavy white bags, 2 muted aqua bags, 3 plaid gold bags.
drab cyan bags contain 4 posh crimson bags, 5 drab red bags, 5 bright purple bags.
dark bronze bags contain 5 posh teal bags.
shiny turquoise bags contain 2 shiny gold bags, 5 mirrored teal bags, 5 mirrored gray bags, 1 drab cyan bag.
dark turquoise bags contain 1 dim violet bag, 5 mirrored teal bags.
light lime bags contain 4 drab chartreuse bags.
light yellow bags contain 5 wavy olive bags, 2 wavy gray bags, 4 bright red bags, 5 shiny violet bags.
posh aqua bags contain 3 vibrant salmon bags.
drab silver bags contain 3 pale tan bags.
pale tan bags contain 3 wavy white bags.
light white bags contain 4 wavy black bags, 2 dark teal bags, 2 faded blue bags.
shiny teal bags contain 5 wavy gold bags.
shiny tomato bags contain 3 faded violet bags.
wavy brown bags contain 1 dim black bag, 1 bright yellow bag.
dim tan bags contain 2 clear teal bags, 5 drab teal bags, 4 posh lime bags.
faded cyan bags contain 1 pale plum bag, 4 posh gold bags, 4 posh yellow bags.
dotted magenta bags contain 4 drab tomato bags, 5 drab yellow bags, 2 clear maroon bags.
clear green bags contain 5 striped maroon bags, 4 shiny aqua bags.
clear fuchsia bags contain 5 dotted chartreuse bags, 5 pale plum bags, 2 muted red bags.
bright lime bags contain 3 dark salmon bags, 3 bright cyan bags, 4 striped black bags, 4 posh violet bags.
bright violet bags contain 1 light olive bag, 2 dark coral bags, 1 dull beige bag, 5 plaid maroon bags.
vibrant lavender bags contain 5 dim green bags, 1 plaid violet bag, 4 dotted coral bags.
wavy silver bags contain 2 mirrored green bags, 4 clear olive bags, 5 dark beige bags, 5 plaid orange bags.
clear orange bags contain 4 dotted orange bags, 3 bright silver bags, 5 dotted tomato bags, 4 striped purple bags.
light crimson bags contain 5 striped teal bags, 1 striped coral bag, 1 pale tomato bag, 2 dark crimson bags.
bright gray bags contain 3 posh tan bags.
mirrored indigo bags contain 1 mirrored green bag.
dull chartreuse bags contain 4 pale gold bags, 2 drab lavender bags, 3 shiny cyan bags.
pale salmon bags contain 5 drab purple bags, 2 dark olive bags, 4 mirrored silver bags.
bright indigo bags contain 5 striped beige bags, 5 shiny lime bags.
dim lavender bags contain 5 striped maroon bags.
bright magenta bags contain 2 plaid coral bags, 5 shiny aqua bags, 1 light purple bag.
muted purple bags contain 4 shiny turquoise bags, 1 shiny chartreuse bag, 3 muted tomato bags, 1 dotted aqua bag.
muted cyan bags contain 4 wavy black bags, 2 faded plum bags, 1 dull coral bag, 3 light tomato bags.
shiny plum bags contain 5 pale gray bags, 3 vibrant aqua bags.
dull lime bags contain 4 drab cyan bags, 1 posh gold bag, 4 bright purple bags, 3 posh tan bags.
faded teal bags contain 3 bright purple bags, 4 dotted magenta bags, 4 plaid olive bags.
clear chartreuse bags contain 2 dark brown bags, 1 pale lavender bag, 2 dark coral bags.
dotted tomato bags contain 1 dim green bag, 2 posh tomato bags.
pale silver bags contain 2 pale teal bags, 4 light purple bags, 4 bright yellow bags, 4 clear plum bags.
posh purple bags contain 2 faded tan bags, 3 clear aqua bags, 4 striped lavender bags, 3 dark teal bags.
striped bronze bags contain 4 drab red bags, 5 mirrored gray bags.
striped black bags contain 3 wavy coral bags, 3 faded blue bags, 5 bright olive bags, 2 dark bronze bags.
dark chartreuse bags contain 5 posh tomato bags.
muted white bags contain 2 mirrored gray bags, 5 dark cyan bags, 3 dotted indigo bags.
clear indigo bags contain 3 dark coral bags, 1 pale green bag, 2 plaid orange bags, 4 dim lime bags.
faded tan bags contain 3 dull purple bags, 2 dim orange bags.
clear yellow bags contain 1 vibrant fuchsia bag, 5 faded silver bags, 5 faded black bags.
dark gray bags contain 5 striped cyan bags.
clear plum bags contain 2 drab indigo bags, 5 pale maroon bags.
posh fuchsia bags contain 2 muted lavender bags, 5 posh red bags.
vibrant black bags contain 1 posh gold bag, 1 shiny white bag.
dim salmon bags contain 5 muted tan bags, 2 muted green bags, 2 pale bronze bags.
faded brown bags contain 3 dim tan bags.
mirrored brown bags contain 1 drab bronze bag, 3 wavy coral bags, 4 posh fuchsia bags.
dim gold bags contain 2 mirrored lavender bags, 5 pale gray bags.
faded fuchsia bags contain 3 wavy lavender bags, 5 shiny blue bags, 4 muted tomato bags.
mirrored chartreuse bags contain 2 faded aqua bags, 4 dark coral bags, 4 wavy beige bags, 5 dark orange bags.
muted fuchsia bags contain 3 light olive bags.
dotted silver bags contain 5 dotted turquoise bags, 3 dark cyan bags, 2 plaid red bags.
plaid lime bags contain 5 dull blue bags.
dim gray bags contain 4 striped magenta bags, 3 dotted indigo bags, 2 dim violet bags, 3 light olive bags.
wavy lime bags contain 2 bright salmon bags, 3 shiny cyan bags, 4 light gray bags, 4 shiny plum bags.
striped lime bags contain 5 posh teal bags.
dull red bags contain 3 mirrored tan bags, 3 dim tomato bags, 5 striped crimson bags.
faded lavender bags contain 1 bright indigo bag, 1 dim purple bag, 5 mirrored gray bags, 4 clear cyan bags.
wavy aqua bags contain 1 wavy gray bag, 3 dark crimson bags.
faded turquoise bags contain 1 drab plum bag, 5 dull gray bags, 4 plaid black bags, 1 wavy crimson bag.
dotted salmon bags contain 4 posh teal bags.
clear coral bags contain 5 drab tomato bags.
vibrant blue bags contain 4 dim lavender bags, 4 dark cyan bags.
muted coral bags contain 5 vibrant olive bags, 1 clear plum bag, 1 clear blue bag.
vibrant coral bags contain 5 vibrant silver bags, 2 plaid brown bags, 4 wavy brown bags.
mirrored aqua bags contain 4 bright lavender bags, 4 striped lavender bags, 1 posh fuchsia bag.
clear gold bags contain 1 posh tan bag, 1 dark beige bag, 5 striped gold bags.
dull aqua bags contain 4 dull plum bags, 2 light indigo bags.
clear cyan bags contain 5 light maroon bags, 5 posh tan bags, 3 dim lavender bags.
dotted olive bags contain 1 mirrored maroon bag, 2 dotted red bags, 4 drab lime bags.
wavy yellow bags contain 4 light silver bags, 4 dotted orange bags.
faded salmon bags contain 1 shiny lavender bag, 4 muted tomato bags, 3 plaid coral bags, 3 pale green bags.
dim blue bags contain 2 dim black bags.
faded tomato bags contain 3 shiny magenta bags.
light magenta bags contain 5 dim olive bags, 3 muted lavender bags.
muted turquoise bags contain 4 posh gold bags, 2 wavy beige bags, 3 posh magenta bags.
light purple bags contain 3 light maroon bags.
pale indigo bags contain 2 light green bags, 5 plaid bronze bags.
dim fuchsia bags contain no other bags.
plaid chartreuse bags contain 3 shiny silver bags, 1 posh teal bag.
plaid salmon bags contain 5 drab lime bags, 4 light aqua bags, 2 striped tan bags.
drab coral bags contain no other bags.
dotted gold bags contain 2 clear teal bags, 2 posh salmon bags, 1 plaid green bag, 5 muted tomato bags.
dull olive bags contain 2 dim lavender bags.
dotted purple bags contain 5 vibrant white bags, 5 wavy black bags.
dark crimson bags contain 4 posh chartreuse bags, 3 muted green bags, 3 dull plum bags, 5 muted beige bags.
mirrored tomato bags contain 2 clear crimson bags, 4 mirrored indigo bags, 2 muted black bags, 2 dark gray bags.
muted blue bags contain 5 pale brown bags.
wavy beige bags contain 5 plaid red bags.
muted black bags contain 3 muted magenta bags, 2 clear tomato bags, 1 pale red bag.
mirrored bronze bags contain 5 bright olive bags, 5 vibrant cyan bags, 2 drab cyan bags.
vibrant green bags contain 2 dull brown bags, 4 wavy white bags, 3 pale teal bags, 4 dark bronze bags.
dotted orange bags contain 5 light gold bags, 5 vibrant tomato bags, 3 light silver bags, 4 drab cyan bags.
clear purple bags contain 4 dull fuchsia bags.
mirrored green bags contain 2 muted red bags, 2 dim lavender bags.
wavy red bags contain 1 posh yellow bag, 2 shiny coral bags.
drab chartreuse bags contain 5 dull purple bags, 2 bright purple bags, 3 faded silver bags, 4 muted lavender bags.
dim indigo bags contain 1 light red bag, 4 wavy olive bags.
drab crimson bags contain 5 muted indigo bags, 5 vibrant crimson bags.
wavy olive bags contain 3 light black bags, 2 wavy plum bags.
dark silver bags contain 4 dull fuchsia bags, 3 dotted chartreuse bags.
pale olive bags contain 1 dark purple bag, 1 drab yellow bag, 1 vibrant coral bag.
posh silver bags contain 4 faded magenta bags, 5 muted coral bags, 4 posh cyan bags, 2 faded gray bags.
clear white bags contain 4 shiny blue bags.
light silver bags contain 3 mirrored green bags, 2 muted red bags, 1 muted tomato bag, 3 clear olive bags.
faded gray bags contain 5 dim green bags, 5 pale teal bags, 4 posh crimson bags, 3 dotted indigo bags.
clear teal bags contain 2 posh chartreuse bags, 2 posh blue bags.
dull green bags contain 5 muted tan bags, 3 faded gray bags, 2 dark tan bags.
dotted yellow bags contain 1 dim aqua bag, 5 dotted blue bags, 1 plaid teal bag, 2 dim salmon bags.
dotted indigo bags contain 1 muted red bag.
pale gold bags contain 1 mirrored green bag, 2 faded gray bags, 4 drab olive bags.
pale magenta bags contain 1 pale gray bag.
drab orange bags contain 4 dull beige bags, 1 dim gray bag.
light maroon bags contain no other bags.
dim silver bags contain 2 dull olive bags, 2 muted lavender bags, 5 dark fuchsia bags, 5 dotted tan bags.
shiny magenta bags contain 5 light purple bags.
shiny green bags contain 2 dim green bags, 1 pale plum bag, 2 striped teal bags.
drab magenta bags contain 1 plaid yellow bag, 3 bright crimson bags, 4 shiny salmon bags.
shiny salmon bags contain 4 dark bronze bags, 1 pale aqua bag, 5 posh red bags, 2 light gold bags.
mirrored cyan bags contain 2 bright olive bags, 2 bright aqua bags, 4 shiny turquoise bags.
drab aqua bags contain 1 drab olive bag, 5 shiny white bags, 2 dim gray bags.
wavy white bags contain 1 plaid red bag.
clear black bags contain 1 dim fuchsia bag, 5 pale white bags, 3 drab fuchsia bags.
dotted cyan bags contain 3 wavy aqua bags, 4 shiny brown bags, 4 faded tan bags.
dim magenta bags contain 4 striped orange bags, 2 mirrored turquoise bags, 3 vibrant turquoise bags, 3 pale chartreuse bags.
faded silver bags contain 5 dim fuchsia bags, 2 bright purple bags.
faded olive bags contain 3 dull lavender bags, 2 striped salmon bags, 1 bright yellow bag.
faded lime bags contain 4 posh tan bags, 4 dotted lavender bags, 3 striped magenta bags.
dark white bags contain 2 bright beige bags, 3 shiny chartreuse bags.
striped brown bags contain 5 muted lavender bags.
dotted lime bags contain 1 mirrored green bag, 4 dotted chartreuse bags, 2 shiny cyan bags, 1 bright purple bag.
bright beige bags contain 4 dull gray bags, 3 wavy violet bags, 5 light silver bags, 5 drab white bags.
dark beige bags contain 3 light gold bags, 1 muted tomato bag, 4 pale teal bags, 4 posh crimson bags.
dim olive bags contain 4 dark cyan bags.
plaid plum bags contain 1 clear tan bag, 4 posh brown bags.
wavy magenta bags contain 4 dotted plum bags, 2 dull tan bags.
drab gold bags contain 4 dark cyan bags, 2 clear yellow bags.
muted orange bags contain 2 faded tan bags, 5 vibrant salmon bags.
pale yellow bags contain 3 light turquoise bags, 3 plaid maroon bags, 2 dull salmon bags.
plaid tomato bags contain 2 dark beige bags.
dim purple bags contain 5 posh salmon bags, 2 dim lime bags, 2 dotted white bags.
bright red bags contain 4 dark teal bags, 3 shiny cyan bags.
bright salmon bags contain 4 pale aqua bags, 3 clear orange bags, 3 plaid black bags, 5 faded aqua bags.
plaid crimson bags contain 4 plaid tan bags, 4 dim aqua bags.
striped violet bags contain 2 dotted olive bags, 2 dotted red bags, 4 shiny gold bags.
vibrant tomato bags contain 3 dim lime bags.
drab turquoise bags contain 5 shiny purple bags, 1 light green bag, 1 pale chartreuse bag.
wavy turquoise bags contain 1 dark teal bag, 5 shiny fuchsia bags, 4 muted brown bags, 4 bright green bags.
striped plum bags contain 3 bright chartreuse bags, 1 dotted violet bag, 1 posh maroon bag.
mirrored plum bags contain 4 plaid indigo bags, 5 dotted white bags.
muted salmon bags contain 5 faded magenta bags, 3 plaid blue bags.
shiny brown bags contain 3 drab salmon bags.
drab blue bags contain 3 drab olive bags, 5 muted red bags, 2 bright purple bags.
striped gray bags contain 3 clear olive bags, 2 muted coral bags.
bright yellow bags contain 2 dull olive bags, 5 dark turquoise bags, 5 posh teal bags.
dim plum bags contain 2 dim cyan bags, 5 vibrant crimson bags.
muted lavender bags contain 2 clear bronze bags.
muted silver bags contain 3 vibrant salmon bags, 5 muted cyan bags, 1 dotted black bag.
mirrored orange bags contain 2 dull olive bags, 4 striped beige bags, 3 shiny aqua bags, 2 striped salmon bags.
dim teal bags contain 2 drab blue bags.
plaid brown bags contain 1 pale maroon bag, 4 light salmon bags, 1 vibrant indigo bag, 5 clear cyan bags.
bright chartreuse bags contain 4 plaid coral bags, 2 dull crimson bags, 3 plaid aqua bags, 2 faded blue bags.
dotted gray bags contain 1 dotted indigo bag, 2 posh crimson bags.
drab indigo bags contain 1 dotted violet bag, 1 dim fuchsia bag, 4 muted red bags, 4 striped maroon bags.
plaid bronze bags contain 3 drab plum bags, 1 posh violet bag, 2 dark tan bags, 3 plaid white bags.
striped salmon bags contain 3 clear brown bags.
dull lavender bags contain 4 pale green bags.
dull gray bags contain 4 drab indigo bags, 4 light salmon bags, 2 plaid coral bags, 3 striped magenta bags.
mirrored purple bags contain 2 vibrant brown bags, 1 plaid teal bag, 4 drab red bags, 4 plaid turquoise bags.
dull gold bags contain 3 drab teal bags.
light fuchsia bags contain 2 clear tan bags, 1 posh tan bag.
dim white bags contain 1 bright beige bag.
light green bags contain 1 dotted red bag, 4 muted gray bags, 5 dotted orange bags, 3 dim chartreuse bags.
bright teal bags contain 5 bright bronze bags, 2 pale green bags.
striped purple bags contain 1 dark beige bag.
muted tan bags contain 4 pale silver bags, 2 bright lavender bags, 4 drab cyan bags.
light blue bags contain 5 drab white bags, 1 pale olive bag.
dim cyan bags contain 1 wavy fuchsia bag, 5 posh teal bags.
striped chartreuse bags contain 4 bright beige bags, 1 muted lavender bag.
light lavender bags contain 2 posh lavender bags, 1 dim lavender bag.
striped gold bags contain 1 posh salmon bag, 3 mirrored gray bags, 1 faded silver bag.
light orange bags contain 4 dark black bags.
plaid silver bags contain 5 posh salmon bags, 3 vibrant tomato bags.
dotted white bags contain 3 dim fuchsia bags, 4 shiny gold bags, 2 bright olive bags, 4 muted purple bags.
faded bronze bags contain 3 pale green bags, 3 light yellow bags, 1 clear teal bag.
striped turquoise bags contain 4 mirrored aqua bags, 2 wavy orange bags, 1 pale lavender bag, 4 drab aqua bags.
clear lime bags contain 1 plaid green bag, 3 pale gold bags, 2 bright gray bags.
drab salmon bags contain 3 dull tomato bags.
plaid red bags contain 5 pale brown bags.
posh coral bags contain 2 dim violet bags, 4 dotted teal bags, 2 plaid red bags, 4 muted green bags.
light bronze bags contain 4 light purple bags.
faded orange bags contain 2 light teal bags.
dotted lavender bags contain 2 light olive bags, 3 muted tomato bags.
bright orange bags contain 3 light gray bags, 4 striped purple bags, 5 dull tomato bags.
mirrored coral bags contain 3 faded gray bags, 5 pale green bags, 4 pale aqua bags, 4 muted bronze bags.
dull plum bags contain 4 posh crimson bags, 4 clear cyan bags, 4 shiny white bags, 2 dotted maroon bags.
shiny blue bags contain 5 dim lime bags, 2 dim gray bags, 5 dark cyan bags, 3 posh teal bags.
mirrored fuchsia bags contain 4 muted tomato bags, 5 dotted chartreuse bags, 1 light red bag, 2 bright yellow bags.
dotted teal bags contain 1 dotted gray bag, 1 muted brown bag.
mirrored tan bags contain 2 clear aqua bags, 4 dim violet bags, 1 wavy gray bag.
posh chartreuse bags contain 2 faded blue bags, 4 dark coral bags, 2 light maroon bags, 5 dark purple bags.
shiny tan bags contain 1 wavy salmon bag, 2 shiny red bags, 5 clear coral bags, 3 wavy gold bags.
vibrant salmon bags contain 3 clear brown bags, 3 pale gold bags, 5 clear blue bags.
plaid beige bags contain 5 vibrant lavender bags, 2 dim brown bags, 4 dull yellow bags.
muted olive bags contain 3 vibrant blue bags, 5 shiny crimson bags, 5 pale beige bags, 2 dotted chartreuse bags.
clear beige bags contain 4 drab coral bags, 4 dark maroon bags, 1 light indigo bag.
dull fuchsia bags contain 2 pale magenta bags, 1 dotted indigo bag.
dark gold bags contain 3 posh crimson bags, 3 mirrored lavender bags.
pale plum bags contain 1 light bronze bag, 5 dotted violet bags, 2 dark salmon bags.
drab fuchsia bags contain 4 dull brown bags, 5 muted bronze bags.
mirrored black bags contain 1 muted silver bag, 3 plaid gray bags, 4 bright purple bags.
posh blue bags contain 3 dull beige bags, 5 dull olive bags.
posh beige bags contain 3 vibrant turquoise bags, 3 dotted lime bags.
light aqua bags contain 2 mirrored teal bags, 1 vibrant lime bag, 1 dim olive bag.
muted tomato bags contain 5 dim lavender bags.
dim coral bags contain 4 shiny magenta bags, 4 drab violet bags, 5 clear brown bags.
"""
IO.inspect(t |> D7.parse() |> D7.solve_p1())
IO.inspect(t |> D7.parse() |> D7.solve_p2())
|
2020/d7.ex
| 0.555676
| 0.566738
|
d7.ex
|
starcoder
|
defmodule Pbuf.Tests.Sub.User do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
id: 0,
status: :USER_STATUS_UNKNOWN,
name: nil
]
@type t :: %__MODULE__{
id: non_neg_integer,
status: Pbuf.Tests.Sub.UserStatus.t,
name: Pbuf.Tests.Sub.User.Name.t
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:uint32, data.id, <<8>>),
Encoder.enum_field(Pbuf.Tests.Sub.UserStatus, data.status, <<16>>),
Encoder.field(:struct, data.name, <<26>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<8, data::binary>>) do
Decoder.field(:uint32, :id, acc, data)
end
def decode(acc, <<16, data::binary>>) do
Decoder.enum_field(Pbuf.Tests.Sub.UserStatus, :status, acc, data)
end
def decode(acc, <<26, data::binary>>) do
Decoder.struct_field(Pbuf.Tests.Sub.User.Name, :name, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Pbuf.Tests.Sub.User.Name do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
first: "",
last: ""
]
@type t :: %__MODULE__{
first: String.t,
last: String.t
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:string, data.first, <<10>>),
Encoder.field(:string, data.last, <<18>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.field(:string, :first, acc, data)
end
def decode(acc, <<18, data::binary>>) do
Decoder.field(:string, :last, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Pbuf.Tests.Sub.UserStatus do
@moduledoc false
@type t :: :USER_STATUS_UNKNOWN | 0 | :USER_STATUS_NORMAL | 1 | :USER_STATUS_DELETED | 2
@spec to_int(t | non_neg_integer) :: integer
def to_int(:USER_STATUS_DELETED), do: 2
def to_int(2), do: 2
def to_int(:USER_STATUS_NORMAL), do: 1
def to_int(1), do: 1
def to_int(:USER_STATUS_UNKNOWN), do: 0
def to_int(0), do: 0
def to_int(invalid) do
raise Pbuf.Encoder.Error,
type: __MODULE__,
value: invalid,
tag: nil,
message: "#{inspect(invalid)} is not a valid enum value for #{__MODULE__}"
end
@spec from_int(integer) :: t
def from_int(2), do: :USER_STATUS_DELETED
def from_int(1), do: :USER_STATUS_NORMAL
def from_int(0), do: :USER_STATUS_UNKNOWN
def from_int(_unknown), do: :invalid
end
|
test/schemas/generated/user.pb.ex
| 0.705886
| 0.463566
|
user.pb.ex
|
starcoder
|
defmodule Tensorflex do
@moduledoc """
A simple and fast library for running Tensorflow graph models in Elixir.
Tensorflex is written around the [Tensorflow C
API](https://www.tensorflow.org/install/install_c), and allows Elixir
developers to leverage Machine Learning and Deep Learning solutions in their
projects.
__NOTE__:
- Make sure that the C API version and Python API version (assuming you are
using the Python API for first training your models) are the latest. As of
July 2018, the latest version is `r1.9`.
- Since Tensorflex provides Inference capability for pre-trained graph
models, it is assumed you have adequate knowledge of the pre-trained models
you are using (such as the input data type/dimensions, input and output
operation names, etc.). Some basic understanding of the [Tensorflow Python
API](https://www.tensorflow.org/api_docs/python/) can come in very handy.
- Tensorflex consists of multiple NIFs, so exercise caution while using it--
providing incorrect operation names for running sessions, incorrect
dimensions of tensors than the actual pre-trained graph requires, providing
different tensor datatypes than the ones required by the graph can all lead to
failure. While these are not easy errors to make, do ensure that you test your
solution well before deployment.
"""
alias Tensorflex.{NIFs, Graph, Tensor, Matrix}
defp empty_list?([[]]), do: true
defp empty_list?(list) when is_list(list) do
false
end
@doc """
Used for loading a Tensorflow `.pb` graph model in Tensorflex.
Reads in a pre-trained Tensorflow protobuf (`.pb`) Graph model binary file.
Returns a tuple `{:ok, %Graph}`.
`%Graph` is an internal Tensorflex struct which holds the name of the graph
file and the binary definition data that is read in via the `.pb` file.
## Examples:
_Reading in a graph_
As an example, we can try reading in the
[Inception](http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz)
convolutional neural network based image classification graph model by Google.
The graph file is named `classify_image_graph_def.pb`:
```elixir
iex(1)> {:ok, graph} = Tensorflex.read_graph "classify_image_graph_def.pb"
2018-07-23 15:31:35.949345: W tensorflow/core/framework/op_def_util.cc:346] Op BatchNormWithGlobalNormalization is deprecated. It will cease to work in GraphDef version 9. Use tf.nn.batch_normalization().
{:ok,
%Tensorflex.Graph{
def: #Reference<0.3018278404.759824385.5268>,
name: "classify_image_graph_def.pb"
}}
```
Generally to check that the loaded graph model is correct and contains
computational operations, the `get_graph_ops/1` function is useful:
```elixir
iex(2)> Tensorflex.get_graph_ops graph
["DecodeJpeg/contents", "DecodeJpeg", "Cast", "ExpandDims/dim", "ExpandDims",
"ResizeBilinear/size", "ResizeBilinear", "Sub/y", "Sub", "Mul/y", "Mul",
"conv/conv2d_params", "conv/Conv2D", "conv/batchnorm/beta",
"conv/batchnorm/gamma", "conv/batchnorm/moving_mean",
"conv/batchnorm/moving_variance", "conv/batchnorm", "conv/CheckNumerics",
"conv/control_dependency", "conv", "conv_1/conv2d_params", "conv_1/Conv2D",
"conv_1/batchnorm/beta", "conv_1/batchnorm/gamma",
"conv_1/batchnorm/moving_mean", "conv_1/batchnorm/moving_variance",
"conv_1/batchnorm", "conv_1/CheckNumerics", "conv_1/control_dependency",
"conv_1", "conv_2/conv2d_params", "conv_2/Conv2D", "conv_2/batchnorm/beta",
"conv_2/batchnorm/gamma", "conv_2/batchnorm/moving_mean",
"conv_2/batchnorm/moving_variance", "conv_2/batchnorm", "conv_2/CheckNumerics",
"conv_2/control_dependency", "conv_2", "pool/CheckNumerics",
"pool/control_dependency", "pool", "conv_3/conv2d_params", "conv_3/Conv2D",
"conv_3/batchnorm/beta", "conv_3/batchnorm/gamma",
"conv_3/batchnorm/moving_mean", "conv_3/batchnorm/moving_variance", ...]
```
_Incorrect usage will `raise`_:
```elixir
iex(3)> {:ok, graph} = Tensorflex.read_graph "Makefile"
** (ArgumentError) file is not a protobuf .pb file
(tensorflex) lib/tensorflex.ex:27: Tensorflex.read_graph/1
iex(3)> {:ok, graph} = Tensorflex.read_graph "Makefile.pb"
** (ArgumentError) graph definition file does not exist
(tensorflex) lib/tensorflex.ex:23: Tensorflex.read_graph/1
```
"""
def read_graph(filepath) do
unless File.exists?(filepath) do
raise ArgumentError, "graph definition file does not exist"
end
unless (Path.extname(filepath) == ".pb") do
raise ArgumentError, "file is not a protobuf .pb file"
end
{:ok, ref} = NIFs.read_graph(filepath)
{:ok, %Graph{def: ref, name: filepath}}
end
@doc """
Used for listing all the operations in a Tensorflow `.pb` graph.
Reads in a Tensorflex ```%Graph``` struct obtained from `read_graph/1`.
Returns a list of all the operation names (as strings) that populate the
graph model.
## Examples
- _Google Inception CNN Model_
([source](http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz))
```elixir
iex(1)> {:ok, graph} = Tensorflex.read_graph "classify_image_graph_def.pb"
2018-07-23 15:31:35.949345: W tensorflow/core/framework/op_def_util.cc:346] Op BatchNormWithGlobalNormalization is deprecated. It will cease to work in GraphDef version 9. Use tf.nn.batch_normalization().
{:ok,
%Tensorflex.Graph{
def: #Reference<0.3018278404.759824385.5268>,
name: "classify_image_graph_def.pb"
}}
iex(2)> Tensorflex.get_graph_ops graph
["DecodeJpeg/contents", "DecodeJpeg", "Cast", "ExpandDims/dim", "ExpandDims",
"ResizeBilinear/size", "ResizeBilinear", "Sub/y", "Sub", "Mul/y", "Mul",
"conv/conv2d_params", "conv/Conv2D", "conv/batchnorm/beta",
"conv/batchnorm/gamma", "conv/batchnorm/moving_mean",
"conv/batchnorm/moving_variance", "conv/batchnorm", "conv/CheckNumerics",
"conv/control_dependency", "conv", "conv_1/conv2d_params", "conv_1/Conv2D",
"conv_1/batchnorm/beta", "conv_1/batchnorm/gamma",
"conv_1/batchnorm/moving_mean", "conv_1/batchnorm/moving_variance",
"conv_1/batchnorm", "conv_1/CheckNumerics", "conv_1/control_dependency",
"conv_1", "conv_2/conv2d_params", "conv_2/Conv2D", "conv_2/batchnorm/beta",
"conv_2/batchnorm/gamma", "conv_2/batchnorm/moving_mean",
"conv_2/batchnorm/moving_variance", "conv_2/batchnorm", "conv_2/CheckNumerics",
"conv_2/control_dependency", "conv_2", "pool/CheckNumerics",
"pool/control_dependency", "pool", "conv_3/conv2d_params", "conv_3/Conv2D",
"conv_3/batchnorm/beta", "conv_3/batchnorm/gamma",
"conv_3/batchnorm/moving_mean", "conv_3/batchnorm/moving_variance", ...]
```
- _Iris Dataset MLP Model_
([source](http://www.anshumanc.ml/gsoc/2018/06/14/gsoc/))
```elixir
iex(1)> {:ok, graph} = Tensorflex.read_graph "graphdef_iris.pb"
{:ok,
%Tensorflex.Graph{
def: #Reference<0.4109712726.1847984130.24506>,
name: "graphdef_iris.pb"
}}
iex(2)> Tensorflex.get_graph_ops graph
["input", "weights1", "weights1/read", "biases1", "biases1/read", "weights2", "weights2/read", "biases2", "biases2/read", "MatMul", "Add", "Relu", "MatMul_1", "Add_1", "output"]
```
- _Toy Computational Graph Model_
([source](https://github.com/anshuman23/tensorflex/tree/master/examples/toy-example))
```elixir
iex(1)> {:ok, graph} = Tensorflex.read_graph "graphdef_toy.pb"
{:ok,
%Tensorflex.Graph{
def: #Reference<0.1274892327.1580335105.235135>,
name: "graphdef_toy.pb"
}}
iex(2)> Tensorflex.get_graph_ops graph
["input", "weights", "weights/read", "biases", "biases/read", "MatMul", "add", "output"]
```
- _RNN LSTM Sentiment Analysis Model_
([source](https://github.com/anshuman23/tensorflex/pull/25))
```elixir
iex(1)> {:ok, graph} = Tensorflex.read_graph "frozen_model_lstm.pb"
{:ok,
%Tensorflex.Graph{
def: #Reference<0.713975820.1050542081.11558>,
name: "frozen_model_lstm.pb"
}}
iex(2)> Tensorflex.get_graph_ops graph
["Placeholder_1", "embedding_lookup/params_0", "embedding_lookup",
"transpose/perm", "transpose", "rnn/Shape", "rnn/strided_slice/stack",
"rnn/strided_slice/stack_1", "rnn/strided_slice/stack_2", "rnn/strided_slice",
"rnn/stack/1", "rnn/stack", "rnn/zeros/Const", "rnn/zeros", "rnn/stack_1/1",
"rnn/stack_1", "rnn/zeros_1/Const", "rnn/zeros_1", "rnn/Shape_1",
"rnn/strided_slice_2/stack", "rnn/strided_slice_2/stack_1",
"rnn/strided_slice_2/stack_2", "rnn/strided_slice_2", "rnn/time",
"rnn/TensorArray", "rnn/TensorArray_1", "rnn/TensorArrayUnstack/Shape",
"rnn/TensorArrayUnstack/strided_slice/stack",
"rnn/TensorArrayUnstack/strided_slice/stack_1",
"rnn/TensorArrayUnstack/strided_slice/stack_2",
"rnn/TensorArrayUnstack/strided_slice", "rnn/TensorArrayUnstack/range/start",
"rnn/TensorArrayUnstack/range/delta", "rnn/TensorArrayUnstack/range",
"rnn/TensorArrayUnstack/TensorArrayScatter/TensorArrayScatterV3",
"rnn/while/Enter", "rnn/while/Enter_1", "rnn/while/Enter_2",
"rnn/while/Enter_3", "rnn/while/Merge", "rnn/while/Merge_1",
"rnn/while/Merge_2", "rnn/while/Merge_3", "rnn/while/Less/Enter",
"rnn/while/Less", "rnn/while/LoopCond", "rnn/while/Switch",
"rnn/while/Switch_1", "rnn/while/Switch_2", "rnn/while/Switch_3", ...]
```
"""
def get_graph_ops(%Graph{def: ref, name: _filepath}) do
NIFs.get_graph_ops(ref)
end
@doc """
Creates a 2-D Tensorflex matrix from custom input specifications.
Takes three input arguments: number of rows in matrix (`nrows`), number of
columns in matrix (`ncols`), and a list of lists of the data that will form the
matrix (`datalist`).
Returns a `%Matrix` Tensorflex struct type.
## Examples:
_Creating a new matrix_
```elixir
iex(1)> mat = Tensorflex.create_matrix(2,3,[[2.2,1.3,44.5],[5.5,6.1,3.333]])
%Tensorflex.Matrix{
data: #Reference<0.759278808.823525378.128525>,
ncols: 3,
nrows: 2
}
```
All `%Matrix` Tensorflex matrices can be passed in to the other matrix
inspection and manipulation functions-- `matrix_pos/3`,`size_of_matrix/1`,
`matrix_to_lists/1`, and `append_to_matrix/2`:
```elixir
iex(1)> mat = Tensorflex.create_matrix(4,4,[[123,431,23,1],[1,2,3,4],[5,6,7,8],[768,564,44,5]])
%Tensorflex.Matrix{
data: #Reference<0.878138179.2435973124.131489>,
ncols: 4,
nrows: 4
}
iex(2)> mat = Tensorflex.append_to_matrix(mat, [[1,1,1,1]])
%Tensorflex.Matrix{
data: #Reference<0.878138179.2435973124.131489>,
ncols: 4,
nrows: 5
}
iex(3)> Tensorflex.matrix_to_lists mat
[
[123.0, 431.0, 23.0, 1.0],
[1.0, 2.0, 3.0, 4.0],
[5.0, 6.0, 7.0, 8.0],
[768.0, 564.0, 44.0, 5.0],
[1.0, 1.0, 1.0, 1.0]
]
iex(4)> Tensorflex.matrix_pos(mat,5,3)
1.0
iex(5)> Tensorflex.size_of_matrix mat
{5, 4}
```
_Incorrect usage will `raise`_:
```elixir
iex(1)> Tensorflex.create_matrix(1,2,[[1,2,3]])
** (ArgumentError) argument error
(tensorflex) Tensorflex.NIFs.create_matrix(1, 2, [[1, 2, 3]])
(tensorflex) lib/tensorflex.ex:247: Tensorflex.create_matrix/3
iex(1)> Tensorflex.create_matrix(2,1,[[1,2,3]])
** (ArgumentError) argument error
(tensorflex) Tensorflex.NIFs.create_matrix(2, 1, [[1, 2, 3]])
(tensorflex) lib/tensorflex.ex:247: Tensorflex.create_matrix/3
iex(1)> Tensorflex.create_matrix(2,3,[[1.1,23,3.4], []])
** (ArgumentError) argument error
(tensorflex) Tensorflex.NIFs.create_matrix(2, 3, [[1.1, 23, 3.4], []])
(tensorflex) lib/tensorflex.ex:247: Tensorflex.create_matrix/3
iex(1)> Tensorflex.create_matrix(1,2,[[]])
** (ArgumentError) data provided cannot be an empty list
(tensorflex) lib/tensorflex.ex:243: Tensorflex.create_matrix/3
iex(1)> Tensorflex.create_matrix(-1,2,[[3,4]])
** (FunctionClauseError) no function clause matching in Tensorflex.create_matrix/3
```
"""
def create_matrix(nrows, ncols, datalist) when nrows > 0 and ncols > 0 do
if(empty_list? datalist) do
raise ArgumentError, "data provided cannot be an empty list"
end
ref = NIFs.create_matrix(nrows, ncols, datalist)
%Matrix{nrows: nrows, ncols: ncols, data: ref}
end
@doc """
Used for accessing an element of a Tensorflex matrix.
Takes in three input arguments: a Tensorflex `%Matrix` struct matrix, and the
row (`row`) and column (`col`) values of the required element in the matrix.
Both `row` and `col` here are __NOT__ zero indexed.
Returns the value as float.
## Examples
```elixir
iex(1)> mat = Tensorflex.create_matrix(2,3,[[2.2,1.3,44.5],[5.5,6.1,3.333]])
%Tensorflex.Matrix{
data: #Reference<0.759278808.823525378.128525>,
ncols: 3,
nrows: 2
}
iex(2)> Tensorflex.matrix_pos(mat,2,1)
5.5
iex(3)> Tensorflex.matrix_pos(mat,1,3)
44.5
```
"""
def matrix_pos(%Matrix{nrows: _nrows, ncols: _ncols, data: ref}, row, col) when row > 0 and col > 0 do
NIFs.matrix_pos(ref, row, col)
end
@doc """
Used for obtaining the size of a Tensorflex matrix.
Takes a Tensorflex `%Matrix` struct matrix as input.
Returns a tuple `{nrows, ncols}` where `nrows` represents the number of rows of
the matrix and `ncols` represents the number of columns of the matrix.
## Examples
```elixir
iex(1)> mat = Tensorflex.create_matrix(2,3,[[2.2,1.3,44.5],[5.5,6.1,3.333]])
%Tensorflex.Matrix{
data: #Reference<0.759278808.823525378.128525>,
ncols: 3,
nrows: 2
}
iex(2)> Tensorflex.size_of_matrix mat
{2, 3}
```
"""
def size_of_matrix(%Matrix{nrows: nrows, ncols: ncols, data: _ref}) do
{nrows, ncols}
end
@doc """
Appends a single row to the back of a Tensorflex matrix.
Takes a Tensorflex `%Matrix` matrix as input and a single row of data (with
the same number of columns as the original matrix) as a list of lists
(`datalist`) to append to the original matrix.
Returns the extended and modified `%Matrix` struct matrix.
## Examples
```elixir
iex(1)> m = Tensorflex.create_matrix(2,3,[[23,23,23],[32,32,32]])
%Tensorflex.Matrix{
data: #Reference<0.153563642.2042232833.193025>,
ncols: 3,
nrows: 2
}
iex(2)> m = Tensorflex.append_to_matrix(m,[[2,2,2]])
%Tensorflex.Matrix{
data: #Reference<0.153563642.2042232833.193025>,
ncols: 3,
nrows: 3
}
iex(3)> m = Tensorflex.append_to_matrix(m,[[3,3,3]])
%Tensorflex.Matrix{
data: #Reference<0.153563642.2042232833.193025>,
ncols: 3,
nrows: 4
}
iex(4)> m |> Tensorflex.matrix_to_lists
[[23.0, 23.0, 23.0], [32.0, 32.0, 32.0], [2.0, 2.0, 2.0], [3.0, 3.0, 3.0]]
```
_Incorrect usage will `raise`_:
```elixir
iex(5)> m = Tensorflex.append_to_matrix(m,[[2,2,2],[3,3,3]])
** (ArgumentError) data columns must be same as matrix and number of rows must be 1
(tensorflex) lib/tensorflex.ex:345: Tensorflex.append_to_matrix/2
iex(5)> m = Tensorflex.append_to_matrix(m,[[2,2,2,2]])
** (ArgumentError) data columns must be same as matrix and number of rows must be 1
(tensorflex) lib/tensorflex.ex:345: Tensorflex.append_to_matrix/2
```
"""
def append_to_matrix(%Matrix{nrows: nrows, ncols: ncols, data: ref}, datalist) do
unless (datalist |> List.flatten |> Kernel.length) == ncols do
raise ArgumentError, "data columns must be same as matrix and number of rows must be 1"
end
new_ref = NIFs.append_to_matrix(ref, datalist)
%Matrix{nrows: nrows+1, ncols: ncols, data: new_ref}
end
@doc """
Converts a Tensorflex matrix (back) to a list of lists format.
Takes a Tensorflex `%Matrix` struct matrix as input.
Returns a list of lists representing the data stored in the matrix.
__NOTE__: If the matrix contains very high dimensional data, typically
obtained from a function like `load_csv_as_matrix/2`, then it is not
recommended to convert the matrix back to a list of lists format due to a
possibility of memory errors.
## Examples
```elixir
iex(1)> Tensorflex.create_matrix(2,3,[[23,23,23],[32,32,32]]) |> Tensorflex.matrix_to_lists
[[23.0, 23.0, 23.0], [32.0, 32.0, 32.0]]
```
"""
def matrix_to_lists(%Matrix{nrows: _nrows, ncols: _ncols, data: ref}) do
NIFs.matrix_to_lists(ref)
end
@doc """
Creates a `TF_DOUBLE` tensor from Tensorflex matrices containing the values
and dimensions specified.
Takes two arguments: a `%Matrix` matrix (`matrix1`) containing the values the
tensor should have and another `%Matrix` matrix (`matrix2`) containing the
dimensions of the required tensor.
Returns a tuple `{:ok, %Tensor}` where `%Tensor` represents an internal
Tensorflex struct type that is used for holding tensor data and type.
## Examples:
```elixir
iex(1)> vals = Tensorflex.create_matrix(2,3,[[12.0,45.2,2.11],[36.7,8.09,9.81]])
%Tensorflex.Matrix{
data: #Reference<0.1251941183.3671982081.254268>,
ncols: 3,
nrows: 2
}
iex(2)> dims = Tensorflex.create_matrix(1,2,[[2,3]])
%Tensorflex.Matrix{
data: #Reference<0.1251941183.3671982081.254723>,
ncols: 2,
nrows: 1
}
iex(3)> {:ok, tensor} = Tensorflex.float64_tensor vals,dims
{:ok,
%Tensorflex.Tensor{
datatype: :tf_double,
tensor: #Reference<0.1251941183.3671982081.255216>
}}
```
"""
def float64_tensor(%Matrix{nrows: _val_rows, ncols: _val_cols, data: val_ref}, %Matrix{nrows: _dim_rows, ncols: _dim_cols, data: dim_ref}) do
{:ok, ref} = NIFs.float64_tensor(val_ref, dim_ref)
{:ok, %Tensor{datatype: :tf_double, tensor: ref}}
end
@doc """
Creates a `TF_DOUBLE` constant value one-dimensional tensor from the floating
point value specified.
Takes in a float value as input.
Returns a tuple `{:ok, %Tensor}` where `%Tensor` represents an internal
Tensorflex struct type that is used for holding tensor data and type.
## Examples
```elixir
iex(1)> {:ok, tensor} = Tensorflex.float64_tensor 123.123
{:ok,
%Tensorflex.Tensor{
datatype: :tf_double,
tensor: #Reference<0.2778616536.4219338753.155412>
}}
```
_Incorrect usage will `raise`_:
```elixir
iex(2)> {:ok, tensor} = Tensorflex.float64_tensor "123.123"
** (FunctionClauseError) no function clause matching in Tensorflex.float64_tensor/1
iex(2)> {:ok, tensor} = Tensorflex.float64_tensor 123
** (FunctionClauseError) no function clause matching in Tensorflex.float64_tensor/1
```
"""
def float64_tensor(floatval) when is_float(floatval) do
{:ok, ref} = NIFs.float64_tensor(floatval)
{:ok, %Tensor{datatype: :tf_double, tensor: ref}}
end
@doc """
Creates a `TF_FLOAT` tensor from Tensorflex matrices containing the values
and dimensions specified.
Takes two arguments: a `%Matrix` matrix (`matrix1`) containing the values the
tensor should have and another `%Matrix` matrix (`matrix2`) containing the
dimensions of the required tensor.
Returns a tuple `{:ok, %Tensor}` where `%Tensor` represents an internal
Tensorflex struct type that is used for holding tensor data and type.
## Examples:
```elixir
iex(1)> vals = Tensorflex.create_matrix(2,3,[[12.0,45.2,2.11],[36.7,8.09,9.81]])
%Tensorflex.Matrix{
data: #Reference<0.1251941183.3671982081.254268>,
ncols: 3,
nrows: 2
}
iex(2)> dims = Tensorflex.create_matrix(1,2,[[2,3]])
%Tensorflex.Matrix{
data: #Reference<0.1251941183.3671982081.254723>,
ncols: 2,
nrows: 1
}
iex(3)> {:ok, tensor} = Tensorflex.float32_tensor vals,dims
{:ok,
%Tensorflex.Tensor{
datatype: :tf_float,
tensor: #Reference<0.1251941183.3671982081.255228>
}}
```
"""
def float32_tensor(%Matrix{nrows: _val_rows, ncols: _val_cols, data: val_ref}, %Matrix{nrows: _dim_rows, ncols: _dim_cols, data: dim_ref}) do
{:ok, ref} = NIFs.float32_tensor(val_ref, dim_ref)
{:ok, %Tensor{datatype: :tf_float, tensor: ref}}
end
@doc """
Creates a `TF_FLOAT` constant value one-dimensional tensor from the floating
point value specified.
Takes in a float value as input.
Returns a tuple `{:ok, %Tensor}` where `%Tensor` represents an internal
Tensorflex struct type that is used for holding tensor data and type.
## Examples
```elixir
iex(1)> {:ok, tensor} = Tensorflex.float32_tensor 123.123
{:ok,
%Tensorflex.Tensor{
datatype: :tf_float,
tensor: #Reference<0.2011963375.1804468228.236110>
}}
```
_Incorrect usage will `raise`_:
```elixir
iex(2)> {:ok, tensor} = Tensorflex.float32_tensor "123.123"
** (FunctionClauseError) no function clause matching in Tensorflex.float32_tensor/1
iex(2)> {:ok, tensor} = Tensorflex.float32_tensor 123
** (FunctionClauseError) no function clause matching in Tensorflex.float32_tensor/1
```
"""
def float32_tensor(floatval) when is_float(floatval) do
{:ok, ref} = NIFs.float32_tensor(floatval)
{:ok, %Tensor{datatype: :tf_float, tensor: ref}}
end
@doc """
Creates a `TF_INT32` tensor from Tensorflex matrices containing the values
and dimensions specified.
Takes two arguments: a `%Matrix` matrix (`matrix1`) containing the values the
tensor should have and another `%Matrix` matrix (`matrix2`) containing the
dimensions of the required tensor.
Returns a tuple `{:ok, %Tensor}` where `%Tensor` represents an internal
Tensorflex struct type that is used for holding tensor data and type.
__NOTE__: In case floating point values are passed in the values matrix
(`matrix1`) as arguments for this function, the tensor will still be created
and all the float values will be typecast to integers.
## Examples:
```elixir
iex(1)> vals = Tensorflex.create_matrix(2,3,[[123,45,333],[2,2,899]])
%Tensorflex.Matrix{
data: #Reference<0.1256144000.2868510721.170449>,
ncols: 3,
nrows: 2
}
iex(2)> dims = Tensorflex.create_matrix(1,2,[[2,3]])
%Tensorflex.Matrix{
data: #Reference<0.1256144000.2868510721.170894>,
ncols: 2,
nrows: 1
}
iex(3)> {:ok, tensor} = Tensorflex.int32_tensor vals,dims
{:ok,
%Tensorflex.Tensor{
datatype: :tf_int32,
tensor: #Reference<0.1256144000.2868510721.171357>
}}
```
"""
def int32_tensor(%Matrix{nrows: _val_rows, ncols: _val_cols, data: val_ref}, %Matrix{nrows: _dim_rows, ncols: _dim_cols, data: dim_ref}) do
{:ok, ref} = NIFs.int32_tensor(val_ref, dim_ref)
{:ok, %Tensor{datatype: :tf_int32, tensor: ref}}
end
@doc """
Creates a `TF_INT32` constant value one-dimensional tensor from the integer
value specified.
Takes in an integer value as input.
Returns a tuple `{:ok, %Tensor}` where `%Tensor` represents an internal
Tensorflex struct type that is used for holding tensor data and type.
## Examples
```elixir
iex(1)> {:ok, tensor} = Tensorflex.int32_tensor 123
{:ok,
%Tensorflex.Tensor{
datatype: :tf_int32,
tensor: #Reference<0.1927663658.3415343105.162588>
}}
```
_Incorrect usage will `raise`_:
```elixir
iex(2)> {:ok, tensor} = Tensorflex.int32_tensor 123.123
** (FunctionClauseError) no function clause matching in Tensorflex.int32_tensor/1
iex(2)> {:ok, tensor} = Tensorflex.int32_tensor "123.123"
** (FunctionClauseError) no function clause matching in Tensorflex.int32_tensor/1
```
"""
def int32_tensor(intval) when is_integer(intval) do
{:ok, ref} = NIFs.int32_tensor(intval)
{:ok, %Tensor{datatype: :tf_int32, tensor: ref}}
end
@doc """
Creates a `TF_STRING` constant value string tensor from the string value
specified.
Takes in a string value as input.
Returns a tuple `{:ok, %Tensor}` where `%Tensor` represents an internal
Tensorflex struct type that is used for holding tensor data and type.
## Examples
```elixir
iex(1)> {:ok, tensor} = Tensorflex.string_tensor "123.123"
{:ok,
%Tensorflex.Tensor{
datatype: :tf_string,
tensor: #Reference<0.2069282048.194904065.41126>
}}
```
_Incorrect usage will `raise`_:
```elixir
iex(2)> {:ok, tensor} = Tensorflex.string_tensor 123.123
** (FunctionClauseError) no function clause matching in Tensorflex.string_tensor/1
iex(2)> {:ok, tensor} = Tensorflex.string_tensor 123
** (FunctionClauseError) no function clause matching in Tensorflex.string_tensor/1
```
"""
def string_tensor(stringval) when is_binary(stringval) do
{:ok, ref} = NIFs.string_tensor(stringval)
{:ok, %Tensor{datatype: :tf_string, tensor: ref}}
end
@doc """
Allocates a `TF_INT32` tensor of specified dimensions.
This function is generally used to allocate output tensors that do not hold
any value data yet, but _will_ after the session is run for Inference. Output
tensors of the required dimensions are allocated and then passed to the
`run_session/5` function to hold the output values generated as predictions.
Takes a Tensorflex `%Matrix` struct matrix as input.
Returns a tuple `{:ok, %Tensor}` where `%Tensor` represents an internal
Tensorflex struct type that is used for holding the potential tensor data and
type.
## Examples
As an example, we can allocate an `int32` output tensor that will be a vector
of 250 values (`1x250` matrix). Therefore, after the session is run, the output
will be an `integer` vector containing 250 values:
```elixir
iex(1)> {:ok, tensor} = Tensorflex.create_matrix(1,2,[[1,250]]) |> Tensorflex.int32_tensor_alloc
{:ok,
%Tensorflex.Tensor{
datatype: :tf_int32,
tensor: #Reference<0.961157994.2087059457.18950>
}}
```
"""
def int32_tensor_alloc(%Matrix{nrows: _dim_rows, ncols: _dim_cols, data: dim_ref}) do
{:ok, ref} = NIFs.int32_tensor_alloc(dim_ref)
{:ok, %Tensor{datatype: :tf_int32, tensor: ref}}
end
@doc """
Allocates a `TF_FLOAT` tensor of specified dimensions.
This function is generally used to allocate output tensors that do not hold
any value data yet, but _will_ after the session is run for Inference. Output
tensors of the required dimensions are allocated and then passed to the
`run_session/5` function to hold the output values generated as predictions.
Takes a Tensorflex `%Matrix` struct matrix as input.
Returns a tuple `{:ok, %Tensor}` where `%Tensor` represents an internal
Tensorflex struct type that is used for holding the potential tensor data and
type.
## Examples
As an example, we can allocate a `float32` output tensor that will be a
vector of 250 values (`1x250` matrix). Therefore, after the session is run, the
output will be a `float` vector containing 250 values:
```elixir
iex(1)> {:ok, tensor} = Tensorflex.create_matrix(1,2,[[1,250]]) |> Tensorflex.float32_tensor_alloc
{:ok,
%Tensorflex.Tensor{
datatype: :tf_float,
tensor: #Reference<0.961157994.2087059457.19014>
}}
```
"""
def float32_tensor_alloc(%Matrix{nrows: _dim_rows, ncols: _dim_cols, data: dim_ref}) do
{:ok, ref} = NIFs.float32_tensor_alloc(dim_ref)
{:ok, %Tensor{datatype: :tf_float, tensor: ref}}
end
@doc """
Allocates a `TF_DOUBLE` tensor of specified dimensions.
This function is generally used to allocate output tensors that do not hold
any value data yet, but _will_ after the session is run for Inference. Output
tensors of the required dimensions are allocated and then passed to the
`run_session/5` function to hold the output values generated as predictions.
Takes a Tensorflex `%Matrix` struct matrix as input.
Returns a tuple `{:ok, %Tensor}` where `%Tensor` represents an internal
Tensorflex struct type that is used for holding the potential tensor data and
type.
## Examples
As an example, we can allocate a `float64` output tensor that will be a
vector of 250 values (`1x250` matrix). Therefore, after the session is run, the
output will be a `double` vector containing 250 values:
```elixir
iex(1)> {:ok, tensor} = Tensorflex.create_matrix(1,2,[[1,250]]) |> Tensorflex.float64_tensor_alloc
{:ok,
%Tensorflex.Tensor{
datatype: :tf_double,
tensor: #Reference<0.961157994.2087059457.19025>
}}
```
"""
def float64_tensor_alloc(%Matrix{nrows: _dim_rows, ncols: _dim_cols, data: dim_ref}) do
{:ok, ref} = NIFs.float64_tensor_alloc(dim_ref)
{:ok, %Tensor{datatype: :tf_double, tensor: ref}}
end
@doc """
Used to get the datatype of a created tensor.
Takes in a `%Tensor` struct tensor as input.
Returns a tuple `{:ok, datatype}` where `datatype` is an atom representing
the list of Tensorflow `TF_DataType` tensor datatypes. Click
[here](https://github.com/anshuman23/tensorflex/blob/master/c_src/c_api.h#L98-L122)
to view a list of all possible datatypes.
## Examples
```elixir
iex(1)> {:ok, tensor} = Tensorflex.string_tensor "example"
{:ok,
%Tensorflex.Tensor{
datatype: :tf_string,
tensor: #Reference<0.4132928949.2894987267.194583>
}}
iex(2)> Tensorflex.tensor_datatype tensor
{:ok, :tf_string}
```
"""
def tensor_datatype(%Tensor{datatype: datatype, tensor: _ref}) do
{:ok, datatype}
end
@doc """
Loads `JPEG` images into Tensorflex directly as a `TF_UINT8` tensor of
dimensions `image height x image width x number of color channels`.
This function is very useful if you wish to do image classification using
Convolutional Neural Networks, or other Deep Learning Models. One of the most
widely adopted and robust image classification models is the
[Inception](http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz)
model by Google. It makes classifications on images from over a 1000 classes
with highly accurate results. The `load_image_as_tensor/1` function is an
essential component for the prediction pipeline of the Inception model (and for
other similar image classification models) to work in Tensorflex.
Reads in the path to a `JPEG` image file (`.jpg` or `.jpeg`).
Returns a tuple `{:ok, %Tensor}` where `%Tensor` represents an internal
Tensorflex struct type that is used for holding the tensor data and type. Here
the created Tensor is a `uint8` tensor (`TF_UINT8`).
__NOTE__: For now, only 3 channel RGB `JPEG` color images can be passed as
arguments. Support for grayscale images and other image formats such as `PNG`
will be added in the future.
## Examples
To exemplify the working of the `load_image_as_tensor/1` function we will
cover the entire prediction pipeline for the Inception model. However, this
makes use of many other Tensorflex functions such as `run_session/5` and the
other tensor functions so it would be advisable to go through them first. Also,
the Inception model can be downloaded
[here](http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz).
We will be making use of the `cropped_panda.jpg` image file that comes along
with the model to test out the model in Tensorflex.
First the graph is loaded:
```elixir
iex(1)> {:ok, graph} = Tensorflex.read_graph("classify_image_graph_def.pb")
2018-07-25 14:20:29.079139: W tensorflow/core/framework/op_def_util.cc:346] Op BatchNormWithGlobalNormalization is deprecated. It will cease to work in GraphDef version 9. Use tf.nn.batch_normalization().
{:ok,
%Tensorflex.Graph{
def: #Reference<0.542869014.389152771.105680>,
name: "classify_image_graph_def.pb"
}}
```
Then we load the image as a `uint8` tensor:
```elixir
iex(2)> {:ok, input_tensor} = Tensorflex.load_image_as_tensor("cropped_panda.jpg")
{:ok,
%Tensorflex.Tensor{
datatype: :tf_uint8,
tensor: #Reference<0.1203951739.122552322.52747>
}}
```
Then we create the output tensor which will hold out output vector values.
For the Inception model, the output is received as a `1008x1 float32` tensor,
as there are 1008 classes in the model:
```elixir
iex(3)> {:ok, output_tensor} = Tensorflex.create_matrix(1,2,[[1008,1]]) |> Tensorflex.float32_tensor_alloc
{:ok,
%Tensorflex.Tensor{
datatype: :tf_float,
tensor: #Reference<0.1203951739.122552322.52794>
}}
```
Next, we obtain the results by running the session:
```elixir
iex(4)> results = Tensorflex.run_session(graph, input_tensor, output_tensor, "DecodeJpeg", "softmax")
2018-07-25 14:33:40.992813: I tensorflow/core/platform/cpu_feature_guard.cc:141] Your CPU supports instructions that this TensorFlow binary was not compiled to use: SSE4.1 SSE4.2 AVX AVX2 FMA
[
[1.059142014128156e-4, 2.8240500250831246e-4, 8.30648496048525e-5,
1.2982363114133477e-4, 7.32232874725014e-5, 8.014426566660404e-5,
6.63459359202534e-5, 0.003170756157487631, 7.931600703159347e-5,
3.707312498590909e-5, 3.0997329304227605e-5, 1.4232713147066534e-4,
1.0381334868725389e-4, 1.1057958181481808e-4, 1.4321311027742922e-4,
1.203602587338537e-4, 1.3130248407833278e-4, 5.850398520124145e-5,
2.641105093061924e-4, 3.1629020668333396e-5, 3.906813799403608e-5,
2.8646905775531195e-5, 2.2863158665131778e-4, 1.2222197256051004e-4,
5.956588938715868e-5, 5.421260357252322e-5, 5.996063555357978e-5,
4.867801326327026e-4, 1.1005574924638495e-4, 2.3433618480339646e-4,
1.3062104699201882e-4, 1.317620772169903e-4, 9.388553007738665e-5,
7.076268957462162e-5, 4.281177825760096e-5, 1.6863139171618968e-4,
9.093972039408982e-5, 2.611844101920724e-4, 2.7584232157096267e-4,
5.157176201464608e-5, 2.144951868103817e-4, 1.3628098531626165e-4,
8.007588621694595e-5, 1.7929042223840952e-4, 2.2831936075817794e-4,
6.216531619429588e-5, 3.736453436431475e-5, 6.782123091397807e-5,
1.1538144462974742e-4, ...]
]
```
Finally, we need to find which class has the maximum probability and identify
it's label. Since `results` is a List of Lists, it's better to read in the
flattened list. Then we need to find the index of the element in the new list
which as the maximum value. Therefore:
```elixir
iex(5)> max_prob = List.flatten(results) |> Enum.max
0.8849328756332397
iex(6)> Enum.find_index(results |> List.flatten, fn(x) -> x == max_prob end)
169
```
We can thus see that the class with the maximum probability predicted
(`0.8849328756332397`) for the image is `169`. We will now find what the `169`
label corresponds to. For this we can look back into the unzipped Inception
folder, where there is a file called
`imagenet_2012_challenge_label_map_proto.pbtxt`. On opening this file, we can
find the string class identifier for the `169` class index. This is `n02510455`
and is present on Line 1556 in the file. Finally, we need to match this string
identifier to a set of identification labels by referring to the file
`imagenet_synset_to_human_label_map.txt` file. Here we can see that
corresponding to the string class `n02510455` the human labels are `giant
panda, panda, panda bear, coon bear, Ailuropoda melanoleuca` (Line 3691 in the
file). Thus, we have correctly identified the animal in the image as a panda
using Tensorflex.
"""
def load_image_as_tensor(imagepath) do
unless File.exists?(imagepath) do
raise ArgumentError, "image file does not exist"
end
unless (Path.extname(imagepath) == ".jpg" or Path.extname(imagepath) == ".jpeg") do
raise ArgumentError, "file is not a JPEG image file"
end
{:ok, ref} = NIFs.load_image_as_tensor(imagepath)
{:ok, %Tensor{datatype: :tf_uint8, tensor: ref}}
end
@doc """
Loads high-dimensional data from a `CSV` file as a Tensorflex 2-D matrix in a
super-fast manner.
The `load_csv_as_matrix/2` function is very fast-- when compared with the
Python based `pandas` library for data science and analysis' function
`read_csv` on the `test.csv` file from MNIST Kaggle data
([source](https://www.kaggle.com/c/digit-recognizer/data)), the following
execution times were obtained:
- `read_csv`: `2.549233` seconds
- `load_csv_as_matrix/2`: `1.711494` seconds
This function takes in 2 arguments: a path to a valid CSV file (`filepath`)
and other optional arguments `opts`. These include whether or not a header
needs to be discarded in the CSV, and what the delimiter type is. These are
specified by passing in an atom `:true` or `:false` to the `header:` key, and
setting a string value for the `delimiter:` key. By default, the header is
considered to be present (`:true`) and the delimiter is set to `,`.
Returns a `%Matrix` Tensorflex struct type.
## Examples:
We first exemplify the working with the `test.csv` file which belongs to the
MNIST Kaggle CSV data
([source](https://www.kaggle.com/c/digit-recognizer/data)), which contains
`28000` rows and `784` columns (without the header). It is comma delimited and
also contains a header. From the `test.csv` file, we also create a custom file
without the header present which we refer to as `test_without_header.csv` in the
examples below:
```elixir
iex(1)> mat = Tensorflex.load_csv_as_matrix("test.csv")
%Tensorflex.Matrix{
data: #Reference<0.4024686574.590479361.258459>,
ncols: 784,
nrows: 28000
}
iex(2)> Tensorflex.matrix_pos mat, 5,97
80.0
iex(3)> Tensorflex.matrix_pos mat, 5,96
13.0
```
On a visual inspection of the very large `test.csv` file, one can see that
the values in these particular positions are correct. Now we show usage for the
same file but without header, `test_without_header.csv`:
```elixir
iex(1)> no_header = Tensorflex.load_csv_as_matrix("test/test_without_header.csv", header: :false)
%Tensorflex.Matrix{
data: #Reference<0.4024686574.590479364.257078>,
ncols: 784,
nrows: 28000
}
iex(2)> Tensorflex.matrix_pos no_header,5,97
80.0
iex(3)> Tensorflex.matrix_pos no_header,5,96
13.0
```
Next we see the delimiter functionalities. First, assuming we have two simple
`CSV` files, `sample1.csv` and `sample2.csv`
_sample1.csv_:
```elixir
1,2,3,4,5
6,7,8,9,10
11,12,13,14,15
```
_sample2.csv_:
```elixir
col1-col2-col3-col4
1-2-3-4
5-6-7-8
9-10-11-12
```
The examples are as follows:
```elixir
iex(1)> m1 = Tensorflex.load_csv_as_matrix("sample1.csv", header: :false)
%Tensorflex.Matrix{
data: #Reference<0.3878093040.3013214209.247502>,
ncols: 5,
nrows: 3
}
iex(2)> Tensorflex.matrix_to_lists m1
[
[1.0, 2.0, 3.0, 4.0, 5.0],
[6.0, 7.0, 8.0, 9.0, 10.0],
[11.0, 12.0, 13.0, 14.0, 15.0]
]
iex(3)> m2 = Tensorflex.load_csv_as_matrix("sample2.csv", header: :true, delimiter: "-")
%Tensorflex.Matrix{
data: #Reference<0.4024686574.590479361.258952>,
ncols: 4,
nrows: 3
}
iex(4)> Tensorflex.matrix_to_lists m2
[[1.0, 2.0, 3.0, 4.0], [5.0, 6.0, 7.0, 8.0], [9.0, 10.0, 11.0, 12.0]]
```
_Incorrect usage will `raise`_:
```elixir
iex(1)> not_working = Tensorflex.load_csv_as_matrix("test.csv", header: :no_header, delimiter: ",")
** (ArgumentError) header indicator atom must be either :true or :false
(tensorflex) lib/tensorflex.ex:122: Tensorflex.load_csv_as_matrix/2
```
"""
def load_csv_as_matrix(filepath, opts \\ []) do
unless File.exists?(filepath) do
raise ArgumentError, "csv file does not exist"
end
unless (Path.extname(filepath) == ".csv") do
raise ArgumentError, "file is not a CSV file"
end
defaults = [header: :true, delimiter: ","]
opts = Keyword.merge(defaults, opts) |> Enum.into(%{})
%{header: header, delimiter: delimiter} = opts
if(header != :true and header != :false) do
raise ArgumentError, "header indicator atom must be either :true or :false"
end
ref = NIFs.load_csv_as_matrix(filepath, header, delimiter)
{nrows, ncols} = NIFs.size_of_matrix(ref)
%Matrix{nrows: nrows, ncols: ncols, data: ref}
end
@doc """
Runs a Tensorflow session to generate predictions for a given graph, input
data, and required input/output operations.
This function is the final step of the Inference (prediction) pipeline and
generates output for a given set of input data, a pre-trained graph model, and
the specified input and output operations of the graph.
Takes in five arguments: a pre-trained Tensorflow graph `.pb` model read in
from the `read_graph/1` function (`graph`), an input tensor with the dimensions
and data required for the input operation of the graph to run (`tensor1`), an
output tensor allocated with the right dimensions (`tensor2`), the name of the
input operation of the graph that needs where the input data is fed
(`input_opname`), and the output operation name in the graph where the outputs
are obtained (`output_opname`). The input tensor is generally created from the
matrices manually or using the `load_csv_as_matrix/2` function, and then passed
through to one of the tensor creation functions. For image classification the
`load_image_as_tensor/1` can also be used to create the input tensor from an
image. The output tensor is created using the tensor allocation functions
(generally containing `alloc` at the end of the function name).
Returns a List of Lists (similar to the `matrix_to_lists/1` function)
containing the generated predictions as per the output tensor dimensions.
These examples can be observed for understanding the prediction pipeline:
* A blog post [here](http://www.anshumanc.ml/gsoc/2018/06/14/gsoc/) covers
generating predictions and running sessions using an MLP model on the Iris
Dataset
* Generating predictions from the Inception model by Google is covered in the
`load_image_as_tensor/1` function examples.
* Working with an RNN-LSTM example for sentiment analysis is covered
[here](https://github.com/anshuman23/tensorflex/pull/25).
"""
def run_session(%Graph{def: graphdef, name: _filepath}, %Tensor{datatype: _input_datatype, tensor: input_ref}, %Tensor{datatype: _output_datatype, tensor: output_ref}, input_opname, output_opname) do
NIFs.run_session(graphdef, input_ref, output_ref, input_opname, output_opname)
end
@doc """
Adds scalar value to matrix.
Takes two arguments: `%Matrix` matrix and scalar value (int or float)
Returns a `%Matrix` modified matrix.
## Examples
```elixir
iex(1)> m = Tensorflex.create_matrix(2,3,[[1,2,3],[4,5,6]])
%Tensorflex.Matrix{
data: #Reference<0.2262135929.2234908676.182623>,
ncols: 3,
nrows: 2
}
iex(2)> m = Tensorflex.add_scalar_to_matrix(m, 5)
%Tensorflex.Matrix{
data: #Reference<0.2262135929.2234908673.182139>,
ncols: 3,
nrows: 2
}
iex(3)> Tensorflex.matrix_to_lists m
[[6.0, 7.0, 8.0], [9.0, 10.0, 11.0]]
```
"""
def add_scalar_to_matrix(%Matrix{nrows: nrows, ncols: ncols, data: ref}, scalar) do
new_ref = NIFs.add_scalar_to_matrix(ref, scalar/1)
%Matrix{nrows: nrows, ncols: ncols, data: new_ref}
end
@doc """
Subtracts scalar value from matrix.
Takes two arguments: `%Matrix` matrix and scalar value (int or float)
Returns a `%Matrix` modified matrix.
## Examples
```elixir
iex(1)> m = Tensorflex.create_matrix(2,3,[[1,2,3],[4,5,6]])
%Tensorflex.Matrix{
data: #Reference<0.2262135929.2234908676.182623>,
ncols: 3,
nrows: 2
}
iex(2)> m = Tensorflex.subtract_scalar_from_matrix m,5
%Tensorflex.Matrix{
data: #Reference<0.11868180.3310747649.147467>,
ncols: 3,
nrows: 2
}
iex(3)> Tensorflex.matrix_to_lists m
[[-4.0, -3.0, -2.0], [-1.0, 0.0, 1.0]]
```
"""
def subtract_scalar_from_matrix(%Matrix{nrows: nrows, ncols: ncols, data: ref}, scalar) do
new_ref = NIFs.subtract_scalar_from_matrix(ref, scalar/1)
%Matrix{nrows: nrows, ncols: ncols, data: new_ref}
end
@doc """
Multiplies scalar value with matrix.
Takes two arguments: `%Matrix` matrix and scalar value (int or float)
Returns a `%Matrix` modified matrix.
## Examples
```elixir
iex(1)> m = Tensorflex.create_matrix(2,3,[[1,2,3],[4,5,6]])
%Tensorflex.Matrix{
data: #Reference<0.2262135929.2234908676.182623>,
ncols: 3,
nrows: 2
}
iex(2)> m = Tensorflex.multiply_matrix_with_scalar m,5
%Tensorflex.Matrix{
data: #Reference<0.2093133110.1968832513.7094>,
ncols: 3,
nrows: 2
}
iex(3)> Tensorflex.matrix_to_lists m
[[5.0, 10.0, 15.0], [20.0, 25.0, 30.0]]
```
"""
def multiply_matrix_with_scalar(%Matrix{nrows: nrows, ncols: ncols, data: ref}, scalar) do
new_ref = NIFs.multiply_matrix_with_scalar(ref, scalar/1)
%Matrix{nrows: nrows, ncols: ncols, data: new_ref}
end
@doc """
Divides matrix values by scalar.
Takes two arguments: `%Matrix` matrix and scalar value (int or float)
Returns a `%Matrix` modified matrix.
## Examples
```elixir
iex(1)> m = Tensorflex.create_matrix(2,3,[[1,2,3],[4,5,6]])
%Tensorflex.Matrix{
data: #Reference<0.2262135929.2234908676.182623>,
ncols: 3,
nrows: 2
}
iex(2)> m = Tensorflex.divide_matrix_by_scalar m,5
%Tensorflex.Matrix{
data: #Reference<0.3723154058.2507014148.17262>,
ncols: 3,
nrows: 2
}
iex(3)> Tensorflex.matrix_to_lists m
[[0.2, 0.4, 0.6], [0.8, 1.0, 1.2]]
```
"""
def divide_matrix_by_scalar(%Matrix{nrows: nrows, ncols: ncols, data: ref}, scalar) do
new_ref = NIFs.divide_matrix_by_scalar(ref, scalar/1)
%Matrix{nrows: nrows, ncols: ncols, data: new_ref}
end
@doc """
Adds two matrices of same dimensions together.
Takes in two `%Matrix` matrices as arguments.
Returns the resultant `%Matrix` matrix.
## Examples
```elixir
iex(1)> m1 = Tensorflex.create_matrix(2,3,[[1,2,3],[4,5,6]])
%Tensorflex.Matrix{
data: #Reference<0.3124708718.3046244353.117555>,
ncols: 3,
nrows: 2
}
iex(2)> m2 = Tensorflex.create_matrix(2,3,[[4,5,6],[1,2,3]])
%Tensorflex.Matrix{
data: #Reference<0.3124708718.3046244354.115855>,
ncols: 3,
nrows: 2
}
iex(3)> m_added = Tensorflex.add_matrices m1,m2
%Tensorflex.Matrix{
data: #Reference<0.3124708718.3046244353.118873>,
ncols: 3,
nrows: 2
}
iex(4)> Tensorflex.matrix_to_lists m_added
[[5.0, 7.0, 9.0], [5.0, 7.0, 9.0]]
```
"""
def add_matrices(%Matrix{nrows: nrows1, ncols: _ncols1, data: ref1}, %Matrix{nrows: _nrows2, ncols: ncols2, data: ref2}) do
new_ref = NIFs.add_matrices(ref1, ref2)
%Matrix{nrows: nrows1, ncols: ncols2, data: new_ref}
end
@doc """
Subtracts `matrix2` from `matrix1`.
Takes in two `%Matrix` matrices as arguments.
Returns the resultant `%Matrix` matrix.
## Examples
```elixir
iex(1)> m1 = Tensorflex.create_matrix(2,3,[[1,2,3],[4,5,6]])
%Tensorflex.Matrix{
data: #Reference<0.3124708718.3046244353.117555>,
ncols: 3,
nrows: 2
}
iex(2)> m2 = Tensorflex.create_matrix(2,3,[[4,5,6],[1,2,3]])
%Tensorflex.Matrix{
data: #Reference<0.3124708718.3046244354.115855>,
ncols: 3,
nrows: 2
}
iex(3)> m_subtracted = Tensorflex.subtract_matrices m1,m2
%Tensorflex.Matrix{
data: #Reference<0.3124708718.3046244353.120058>,
ncols: 3,
nrows: 2
}
iex(4)> Tensorflex.matrix_to_lists m_subtracted
[[-3.0, -3.0, -3.0], [3.0, 3.0, 3.0]]
```
"""
def subtract_matrices(%Matrix{nrows: nrows1, ncols: _ncols1, data: ref1}, %Matrix{nrows: _nrows2, ncols: ncols2, data: ref2}) do
new_ref = NIFs.subtract_matrices(ref1, ref2)
%Matrix{nrows: nrows1, ncols: ncols2, data: new_ref}
end
@doc """
Converts the data stored in a 2-D tensor back to a 2-D matrix.
Takes in a single argument as a `%Tensor` tensor (any `TF_Datatype`).
Returns a `%Matrix` 2-D matrix.
__NOTE__: Tensorflex doesn't currently support 3-D matrices, and therefore
tensors that are 3-D (such as created using the `load_image_as_tensor/1`
function) cannot be converted back to a matrix, yet. Support for 3-D matrices
will be added soon.
## Examples
`tensor_to_matrix/1` converts any 2-D `%Tensor` tensor back to matrix form.
Consider `sample1.csv` back from the examples of `load_csv_as_matrix/2`:
```elixir
iex(1)> vals = Tensorflex.load_csv_as_matrix("sample1.csv", header: :false)
%Tensorflex.Matrix{
data: #Reference<0.124471106.2360737795.170799>,
ncols: 5,
nrows: 3
}
iex(2)> dims = Tensorflex.create_matrix(1,2,[[3,5]])
%Tensorflex.Matrix{
data: #Reference<0.124471106.2360737795.170827>,
ncols: 2,
nrows: 1
}
iex(3)> {:ok, float64_tensor} = Tensorflex.float64_tensor vals,dims
{:ok,
%Tensorflex.Tensor{
datatype: :tf_double,
tensor: #Reference<0.124471106.2360737794.171586>
}}
iex(4)> m_float64 = Tensorflex.tensor_to_matrix float64_tensor
%Tensorflex.Matrix{
data: #Reference<0.124471106.2360737794.171596>,
ncols: 5,
nrows: 3
}
iex(5)> Tensorflex.matrix_to_lists m_float64
[
[1.0, 2.0, 3.0, 4.0, 5.0],
[6.0, 7.0, 8.0, 9.0, 10.0],
[11.0, 12.0, 13.0, 14.0, 15.0]
]
iex(6)> {:ok, float32_tensor} = Tensorflex.float32_tensor vals,dims
{:ok,
%Tensorflex.Tensor{
datatype: :tf_float,
tensor: #Reference<0.124471106.2360737794.172555>
}}
iex(7)> m_float32 = Tensorflex.tensor_to_matrix float32_tensor
%Tensorflex.Matrix{
data: #Reference<0.124471106.2360737794.172563>,
ncols: 5,
nrows: 3
}
iex(8)> Tensorflex.matrix_to_lists m_float32
[
[1.0, 2.0, 3.0, 4.0, 5.0],
[6.0, 7.0, 8.0, 9.0, 10.0],
[11.0, 12.0, 13.0, 14.0, 15.0]
]
iex(9)> {:ok, int32_tensor} = Tensorflex.int32_tensor vals,dims
{:ok,
%Tensorflex.Tensor{
datatype: :tf_int32,
tensor: #Reference<0.124471106.2360737794.172578>
}}
iex(10)> m_int32 = Tensorflex.tensor_to_matrix int32_tensor
%Tensorflex.Matrix{
data: #Reference<0.124471106.2360737794.172586>,
ncols: 5,
nrows: 3
}
iex(11)> Tensorflex.matrix_to_lists m_int32
[
[1.0, 2.0, 3.0, 4.0, 5.0],
[6.0, 7.0, 8.0, 9.0, 10.0],
[11.0, 12.0, 13.0, 14.0, 15.0]
]
```
The matrix values obtained in the conversions, `m_int32`, `m_float32`,
`m_float64` are identical to the `vals` matrix we had generated from the
`sample1.csv` file:
```elixir
iex(12)> Tensorflex.matrix_to_lists vals
[
[1.0, 2.0, 3.0, 4.0, 5.0],
[6.0, 7.0, 8.0, 9.0, 10.0],
[11.0, 12.0, 13.0, 14.0, 15.0]
]
```
"""
def tensor_to_matrix(%Tensor{datatype: _datatype, tensor: ref}) do
matrix_ref = NIFs.tensor_to_matrix(ref)
{nrows, ncols} = NIFs.size_of_matrix matrix_ref
%Matrix{nrows: nrows, ncols: ncols, data: matrix_ref}
end
end
|
lib/tensorflex.ex
| 0.934253
| 0.940517
|
tensorflex.ex
|
starcoder
|
defmodule ReplacingWalk do
@moduledoc """
A hastily constructed replacing walk for use
with `DeferredConfig`; not
very performant, but for transforming data
in options and config, can be convenient.
"""
require Logger
@doc """
Recursive replacing walk that uses `recognize` and
`transform` functions to return a transformed version
of arbitrary data.
iex> ReplacingWalk.walk [1, 2, 3], &(&1 == 2), &(&1 * &1)
[1,4,3]
iex> ReplacingWalk.walk( [1, [2, [3, 2]]],
...> &(&1 == 2),
...> &(&1 * &1)
...> )
[1,[4, [3, 4]]]
It works for Maps:
iex> ReplacingWalk.walk %{2 => 1, 1 => 2}, &(&1 == 2), &(&1 * &1)
%{4 => 1, 1 => 4}
Structs in general are considered as leaf nodes; we support
structs that implement Enumerable, but **currently we expect
their `Enumerable` implementation to work like a Map.
If you feed this an Enumerable struct that doesn't iterate
like Map -- ie, doesn't iterate over `{k, v}` -- it will die.
(See an example in tests).
We may change that behavior in the future -- either removing
support for arbitrary Enumerables, or provision another protocol
that can be implemented to make a data type replacing-walkable.
Created quickly for
`:deferred_config`, so it's probably got some holes;
tests that break it are welcome.
"""
# lists
def walk(_data = [], _recognize, _transform), do: []
def walk([item | ls], recognize, transform) do
item = item |> maybe_transform_leaf(recognize, transform)
[ walk(item, recognize, transform) |
walk(ls, recognize, transform) ]
end
# structs (enumerable and not; see notes about Enumerable)
def walk(m = %{ :__struct__ => _ }, recognize, transform) do
if Enumerable.impl_for(m) do
m |> walk_map(recognize, transform)
else
m |> maybe_transform_leaf(recognize, transform)
end
end
# maps
def walk(m, recognize, transform) when is_map(m) do
m |> walk_map(recognize, transform)
end
def walk(%{}, _, _), do: %{}
# kv tuples (very common in config)
def walk(t = {k,v}, recognize, transform) do
t = maybe_transform_leaf(t, recognize, transform)
if is_tuple t do
{k, v} = t
{k |> walk(recognize, transform),
v |> walk(recognize, transform) }
else t end
end
# any other data (other tuples; structs; str, atoms, nums..)
def walk(other, recognize, transform) do
recognize.(other) |> maybe_do(transform, other)
end
# -- impl details for map and maplike enum support
defp walk_map(m, recognize, transform) do
m = m |> maybe_transform_leaf(recognize, transform)
# due to above, may not be enumerable any more.
# also, could be untransformed enumerable, but with
# non-map-like iteration, which we *can't* detect without trying.
try do
Enum.reduce(m, %{}, fn {k, v}, acc ->
k = recognize.(k) |> maybe_do( transform, k )
acc |> Map.put(k, walk(v, recognize, transform))
end)
catch _ ->
Logger.error("replacing walk: reduce failed for: #{inspect m}")
m
end
end
defp maybe_transform_leaf(o, recognize, transform) do
recognize.(o) |> maybe_do(transform, o)
end
defp maybe_do(_should_i = true, op, item), do: op.(item)
defp maybe_do(_shouldnt, _op, item), do: item
end
|
lib/replacing_walk.ex
| 0.722233
| 0.584419
|
replacing_walk.ex
|
starcoder
|
defmodule Esolix.Langs.Befunge93 do
@moduledoc """
Documentation for the Befunge93 Module
"""
# + Addition: Pop two values a and b, then push the result of a+b
# - Subtraction: Pop two values a and b, then push the result of b-a
# * Multiplication: Pop two values a and b, then push the result of a*b
# / Integer division: Pop two values a and b, then push the result of b/a, rounded down. According to the specifications, if a is zero, ask the user what result they want.
# % Modulo: Pop two values a and b, then push the remainder of the integer division of b/a.
# ! Logical NOT: Pop a value. If the value is zero, push 1; otherwise, push zero.
# ` Greater than: Pop two values a and b, then push 1 if b>a, otherwise zero.
# > PC direction right
# < PC direction left
# ^ PC direction up
# v PC direction down
# ? Random PC direction
# _ Horizontal IF: pop a value; set direction to right if value=0, set to left otherwise
# | Vertical IF: pop a value; set direction to down if value=0, set to up otherwise
# " Toggle stringmode (push each character's ASCII value all the way up to the next ")
# : Duplicate top stack value
# \ Swap top stack values
# $ Pop (remove) top stack value and discard
# . Pop top of stack and output as integer
# , Pop top of stack and output as ASCII character
# # Bridge: jump over next command in the current direction of the current PC
# g A "get" call (a way to retrieve data in storage). Pop two values y and x, then push the ASCII value of the character at that position in the program. If (x,y) is out of bounds, push 0
# p A "put" call (a way to store a value for later use). Pop three values y, x and v, then change the character at the position (x,y) in the program to the character with ASCII value v
# & Get integer from user and push it
# ~ Get character from user and push it
# @ End program
# 0 – 9 Push corresponding number onto the stack
# Data Structure used:
alias Esolix.DataStructures.Stack
import ExUnit.CaptureIO
# TODO: Add string input mode as alternative to interactive input mode
@max_width 80
@max_height 25
defmodule Befunge93Stack do
@moduledoc false
defstruct [:stack, :code, :x, :y, :direction, :string_mode?]
end
# Custom Module Errors
defmodule InvalidDirectionError do
@moduledoc false
defexception [:message]
def exception(direction) do
message = "Expected 'v', '>', '^' or '<', got '#{direction}'"
%InvalidDirectionError{message: message}
end
end
@spec eval(String.t(), keyword()) :: String.t()
@doc """
Runs Befunge93 Code and returns the IO output as a string.
## Examples
iex> result = Befunge93.eval("some hello world code")
"Hello World!"
"""
def eval(code, _params \\ []) do
capture_io(fn ->
execute(code)
end)
end
@spec eval_file(String.t(), keyword()) :: String.t()
@doc """
Runs Befunge93 Code from a file and returns the IO output as a string.
## Examples
iex> result = Befunge93.eval_file("path/to/some/hello_world.file")
"Hello World!"
"""
def eval_file(file, params \\ []) do
file
|> extract_file_contents()
|> eval(params)
end
@spec execute(String.t(), keyword()) :: :ok
@doc """
Run Befunge93 Code
## Examples
iex> Befunge93.execute("some hello world code")
"Hello World!"
:ok
"""
def execute(code, _params \\ []) do
# validate_code(code)
%Befunge93Stack{
stack: Stack.init(),
code: prep_code(code),
x: 0,
y: 0,
direction: ?>,
string_mode?: false
}
|> run()
:ok
end
@spec execute_file(String.t(), keyword()) :: :ok
@doc """
Runs Befunge93 Code from a file.
## Examples
iex> result = Befunge93.eval_file("path/to/some/hello_world.file")
"Hello World!"
"""
def execute_file(file, params \\ []) do
# validate_file(file)
file
|> extract_file_contents()
|> execute(params)
end
defp prep_code(code) do
code
|> String.split("\n")
|> Enum.map(&String.to_charlist(&1))
end
defp instruction(%Befunge93Stack{code: code, x: x, y: y}) do
instruction_at(code, x, y)
end
defp instruction_at(code, x, y) do
line = Enum.at(code, y)
if line, do: Enum.at(line, x), else: nil
end
defp overwrite_at(code, x, y, value) do
List.replace_at(
code,
y,
Enum.at(code, y)
|> List.replace_at(x, value)
)
end
defp run(%Befunge93Stack{} = bf_stack) do
instruction = instruction(bf_stack)
# debug(bf_stack)
if instruction == ?@ do
# End Program
bf_stack
else
run_instruction(bf_stack, instruction)
|> run()
end
end
defp run_instruction(
%Befunge93Stack{
stack: stack,
code: code,
x: x,
y: y,
direction: direction,
string_mode?: string_mode?
} = bf_stack,
instruction
) do
{x_next, y_next} = next_coordinates(x, y, direction)
bf_stack = %{bf_stack | x: x_next, y: y_next}
case {string_mode?, instruction} do
# String Mode Handlung
{true, ?"} ->
%{bf_stack | string_mode?: false}
{true, instruction} ->
%{bf_stack | stack: Stack.push(stack, instruction)}
{false, instruction} ->
# Execution Mode Handling
case instruction do
?+ ->
%{bf_stack | stack: Stack.add(stack)}
?- ->
%{bf_stack | stack: Stack.sub(stack, order: :reverse)}
?* ->
%{bf_stack | stack: Stack.mul(stack)}
?/ ->
%{bf_stack | stack: Stack.div(stack, order: :reverse)}
?% ->
%{bf_stack | stack: Stack.apply(stack, &Integer.mod/2, order: :reverse)}
?! ->
%{bf_stack | stack: Stack.logical_not(stack)}
?` ->
%{bf_stack | stack: Stack.greater_than(stack)}
?_ ->
{a, stack} = Stack.pop(stack)
direction = if a not in [0, nil], do: ?<, else: ?>
{x, y} = next_coordinates(x, y, direction)
%{bf_stack | stack: stack, x: x, y: y, direction: direction}
?| ->
{a, stack} = Stack.pop(stack)
direction = if a not in [0, nil], do: ?^, else: ?v
{x, y} = next_coordinates(x, y, direction)
%{bf_stack | stack: stack, x: x, y: y, direction: direction}
?" ->
%{bf_stack | string_mode?: true}
?: ->
%{bf_stack | stack: Stack.duplicate(stack)}
?\\ ->
{[a, b], stack} = Stack.popn(stack, 2)
%{bf_stack | stack: stack |> Stack.push([a, b])}
?$ ->
{_, stack} = Stack.pop(stack)
%{bf_stack | stack: stack}
?. ->
{a, stack} = Stack.pop(stack)
IO.write(a)
%{bf_stack | stack: stack}
?, ->
{a, stack} = Stack.pop(stack)
IO.write([a])
%{bf_stack | stack: stack}
?# ->
{x, y} = next_coordinates(x_next, y_next, direction)
%{bf_stack | x: x, y: y}
?g ->
{y_get, stack} = Stack.pop(stack)
{x_get, stack} = Stack.pop(stack)
%{bf_stack | stack: Stack.push(stack, instruction_at(code, x_get, y_get))}
?p ->
{[y_get, x_get, value], stack} = Stack.popn(stack, 3)
%{bf_stack | stack: stack, code: overwrite_at(code, x_get, y_get, value)}
?& ->
input = IO.gets("Enter integer") |> String.trim() |> String.to_integer()
%{bf_stack | stack: Stack.push(stack, input)}
?~ ->
input = IO.gets("Enter character") |> String.to_charlist() |> Enum.at(0)
%{bf_stack | stack: Stack.push(stack, input)}
?\s ->
bf_stack
nil ->
bf_stack
other ->
cond do
# Handle directions
other in '>v<^?' && direction != other ->
direction = maybe_randomize(other)
{x, y} = next_coordinates(x, y, direction)
%{bf_stack | x: x, y: y, direction: direction}
other in '0123456789' ->
%{
bf_stack
| stack: Stack.push(stack, List.to_string([other]) |> String.to_integer())
}
true ->
bf_stack
end
end
end
end
defp next_coordinates(x, y, direction) do
case direction do
?> ->
{x + 1, y}
?^ ->
{x, y - 1}
?< ->
{x - 1, y}
?v ->
{x, y + 1}
direction ->
raise InvalidDirectionError, direction
end
|> check_out_of_bounds()
end
defp maybe_randomize(direction) do
if direction == ??, do: Enum.random('>v<^'), else: direction
end
defp check_out_of_bounds({x, y}) do
cond do
x >= @max_width ->
{Integer.mod(x, @max_width), y}
x < 0 ->
{@max_width + x, y}
y >= @max_width ->
{x, Integer.mod(y, @max_height)}
y < 0 ->
{x, @max_height + y}
true ->
{x, y}
end
end
defp extract_file_contents(file) do
File.read!(file)
end
defp debug(%Befunge93Stack{code: code, x: x, y: y, direction: direction} = bf_stack) do
IO.puts("---------------------------------------------\n\n\n")
IO.inspect(bf_stack)
IO.inspect([direction], label: "dir")
IO.inspect([instruction(bf_stack)], label: "instr")
IO.puts("\n\n")
IO.write(" ")
Enum.each(0..x, fn l ->
IO.write(" ")
end)
IO.write("|\n")
IO.write(" ")
Enum.each(0..x, fn l ->
IO.write(" ")
end)
IO.write("v\n")
Enum.with_index(code)
|> Enum.each(fn {line, y_i} ->
if y_i == y do
IO.write("-> ")
IO.write(line)
IO.write("\n")
else
IO.write(" ")
IO.write(line)
IO.write("\n")
end
end)
IO.gets("Press enter for next step\n\n")
end
end
|
lib/langs/befunge93.ex
| 0.653238
| 0.609815
|
befunge93.ex
|
starcoder
|
defmodule PhoenixUp do
@moduledoc false
@doc """
Inflects path, scope, alias and more from the given name. (This function is stracted from phoenix framework source, and modified)
## Examples
How to use:
```
PhoenixUp.inflect("some_name")
PhoenixUp.inflect(SomeName)
PhoenixUp.inflect(Some.Name)
PhoenixUp.inflect("some/name")
```
Response examples:
```
[alias: "User",
human: "User",
base: "Phoenix",
base_path: "phoenix",
web_module: "PhoenixWeb",
web_path: "phoenix_web",
module: "Phoenix.User",
test_module: "Phoenix.UserTest",
scoped: "User",
singular: "user",
path: "user"]
[alias: "User",
human: "User",
base: "Phoenix",
base_path: "phoenix",
web_module: "PhoenixWeb",
web_path: "phoenix_web",
module: "Phoenix.Admin.User",
test_module: "Phoenix.Admin.UserTest",
scoped: "Admin.User",
singular: "user",
path: "admin/user"]
[alias: "SuperUser",
human: "Super user",
base: "Phoenix",
base_path: "phoenix",
web_module: "PhoenixWeb",
web_path: "phoenix_web",
module: "Phoenix.Admin.SuperUser",
test_module: "Phoenix.Admin.SuperUserTest",
scoped: "Admin.SuperUser",
singular: "super_user",
path: "admin/super_user"]
```
"""
def inflect(singular) do
base = get_base()
web_module = base |> web_module() |> inspect()
web_path = base |> web_path()
scoped = camelize(singular)
path = underscore(scoped)
base_path = underscore(base)
singular = String.split(path, "/") |> List.last()
module = Module.concat(base, scoped) |> inspect
test_module = "#{module}Test"
alias = String.split(module, ".") |> List.last()
human = humanize(singular)
[
alias: alias,
human: human,
base: base,
base_path: base_path,
web_module: web_module,
web_path: web_path,
module: module,
test_module: test_module,
scoped: scoped,
singular: singular,
path: path
]
end
defp web_path(base) do
"#{Macro.underscore(base)}_web"
end
defp web_module(base) do
if base |> to_string() |> String.ends_with?("Web") do
Module.concat([base])
else
Module.concat(["#{base}Web"])
end
end
defp get_base do
app_base(otp_app())
end
defp app_base(app) do
case Application.get_env(app, :namespace, app) do
^app -> app |> to_string() |> camelize()
mod -> mod |> inspect()
end
end
defp otp_app do
Mix.Project.config() |> Keyword.fetch!(:app)
end
defp camelize(value), do: Macro.camelize(value)
defp underscore(value), do: Macro.underscore(value)
defp humanize(atom) when is_atom(atom),
do: humanize(Atom.to_string(atom))
defp humanize(bin) when is_binary(bin) do
bin =
if String.ends_with?(bin, "_id") do
binary_part(bin, 0, byte_size(bin) - 3)
else
bin
end
bin |> String.replace("_", " ") |> String.capitalize()
end
end
|
lib/phoenix_up.ex
| 0.808559
| 0.609088
|
phoenix_up.ex
|
starcoder
|
defmodule Xgit.Tag do
@moduledoc ~S"""
Represents a git `tag` object in memory.
"""
alias Xgit.ContentSource
alias Xgit.Object
alias Xgit.ObjectId
alias Xgit.ObjectType
alias Xgit.PersonIdent
alias Xgit.Ref
use Xgit.ObjectType
import Xgit.Util.ForceCoverage
import Xgit.Util.ParseHeader, only: [next_header: 1]
@typedoc ~S"""
This struct describes a single `tag` object so it can be manipulated in memory.
## Struct Members
* `:object`: (`Xgit.ObjectId`) object referenced by this tag
* `:type`: (`Xgit.ObjectType`) type of the target object
* `:name`: (bytelist) name of the tag
* `:tagger`: (`Xgit.PersonIdent`) person who created the tag
* `:message`: (bytelist) user-entered tag message (encoding unspecified)
**TO DO:** Support signatures and other extensions.
https://github.com/elixir-git/xgit/issues/202
"""
@type t :: %__MODULE__{
object: ObjectId.t(),
type: ObjectType.t(),
name: [byte],
tagger: PersonIdent.t() | nil,
message: [byte]
}
@enforce_keys [:object, :type, :name, :message]
defstruct [:object, :type, :name, :message, tagger: nil]
@doc ~S"""
Return `true` if the value is a tag struct that is valid.
"""
@spec valid?(tag :: any) :: boolean
def valid?(tag)
def valid?(%__MODULE__{
object: object_id,
type: object_type,
name: name,
tagger: tagger,
message: message
})
when is_binary(object_id) and is_object_type(object_type) and is_list(name) and
is_list(message) do
ObjectId.valid?(object_id) &&
not Enum.empty?(name) &&
(tagger == nil || PersonIdent.valid?(tagger)) &&
not Enum.empty?(message)
end
def valid?(_), do: cover(false)
@doc ~S"""
Return `true` if the value provided is valid as a tag name.
"""
@spec valid_name?(name :: any) :: boolean
def valid_name?(name) when is_list(name), do: Ref.valid_name?("refs/tags/#{name}")
def valid_name?(_name), do: cover(false)
@typedoc ~S"""
Error response codes returned by `from_object/1`.
"""
@type from_object_reason :: :not_a_tag | :invalid_tag
@doc ~S"""
Renders a tag structure from an `Xgit.Object`.
## Return Values
`{:ok, tag}` if the object contains a valid `tag` object.
`{:error, :not_a_tag}` if the object contains an object of a different type.
`{:error, :invalid_tag}` if the object says that is of type `tag`, but
can not be parsed as such.
"""
@spec from_object(object :: Object.t()) :: {:ok, tag :: t} | {:error, from_object_reason}
def from_object(object)
def from_object(%Object{type: :tag, content: content} = _object) do
content
|> ContentSource.stream()
|> Enum.to_list()
|> from_object_internal()
end
def from_object(%Object{} = _object), do: cover({:error, :not_a_tag})
defp from_object_internal(data) do
with {:object, {'object', object_id_str, data}} <- {:object, next_header(data)},
{:object_id, {object_id, []}} <- {:object_id, ObjectId.from_hex_charlist(object_id_str)},
{:type_str, {'type', type_str, data}} <- {:type_str, next_header(data)},
{:type, type} when is_object_type(type) <- {:type, ObjectType.from_bytelist(type_str)},
{:name, {'tag', [_ | _] = name, data}} <- {:name, next_header(data)},
{:tagger_id, tagger, data} <- optional_tagger(data),
message when is_list(message) <- drop_if_lf(data) do
# TO DO: Support signatures and other extensions.
# https://github.com/elixir-git/xgit/issues/202
cover {:ok,
%__MODULE__{
object: object_id,
type: type,
name: name,
tagger: tagger,
message: message
}}
else
_ -> cover {:error, :invalid_tag}
end
end
defp optional_tagger(data) do
with {:tagger, {'tagger', tagger_str, data}} <- {:tagger, next_header(data)},
{:tagger_id, %PersonIdent{} = tagger} <-
{:tagger_id, PersonIdent.from_byte_list(tagger_str)} do
cover {:tagger_id, tagger, data}
else
{:tagger, :no_header_found} ->
cover {:tagger_id, nil, data}
{:tagger_id, x} ->
cover {:tagger_error, x}
end
end
defp drop_if_lf([10 | data]), do: cover(data)
defp drop_if_lf([]), do: cover([])
defp drop_if_lf(_), do: cover(:error)
@doc ~S"""
Renders this tag structure into a corresponding `Xgit.Object`.
If the tag structure is not valid, will raise `ArgumentError`.
"""
@spec to_object(commit :: t) :: Object.t()
def to_object(commit)
def to_object(
%__MODULE__{
object: object_id,
type: object_type,
name: tag_name,
tagger: %PersonIdent{} = tagger,
message: message
} = tag
) do
unless valid?(tag) do
raise ArgumentError, "Xgit.Tag.to_object/1: tag is not valid"
end
rendered_tag =
'object #{object_id}\n' ++
'type #{object_type}\n' ++
'tag #{tag_name}\n' ++
'tagger #{PersonIdent.to_external_string(tagger)}\n' ++
'\n' ++
message
# TO DO: Support signatures and other extensions.
# https://github.com/elixir-git/xgit/issues/202
cover %Object{
type: :tag,
content: rendered_tag,
size: Enum.count(rendered_tag),
id: ObjectId.calculate_id(rendered_tag, :tag)
}
end
end
|
lib/xgit/tag.ex
| 0.847889
| 0.406626
|
tag.ex
|
starcoder
|
defmodule DocuSign.Model.AccountTabSettings do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:allowTabOrder,
:allowTabOrderMetadata,
:approveDeclineTabsEnabled,
:approveDeclineTabsMetadata,
:calculatedFieldsEnabled,
:calculatedFieldsMetadata,
:checkboxTabsEnabled,
:checkboxTabsMetadata,
:dataFieldRegexEnabled,
:dataFieldRegexMetadata,
:dataFieldSizeEnabled,
:dataFieldSizeMetadata,
:firstLastEmailTabsEnabled,
:firstLastEmailTabsMetadata,
:listTabsEnabled,
:listTabsMetadata,
:noteTabsEnabled,
:noteTabsMetadata,
:radioTabsEnabled,
:radioTabsMetadata,
:savingCustomTabsEnabled,
:savingCustomTabsMetadata,
:senderToChangeTabAssignmentsEnabled,
:senderToChangeTabAssignmentsMetadata,
:sharedCustomTabsEnabled,
:sharedCustomTabsMetadata,
:tabDataLabelEnabled,
:tabDataLabelMetadata,
:tabLocationEnabled,
:tabLocationMetadata,
:tabLockingEnabled,
:tabLockingMetadata,
:tabScaleEnabled,
:tabScaleMetadata,
:tabTextFormattingEnabled,
:tabTextFormattingMetadata,
:textTabsEnabled,
:textTabsMetadata
]
@type t :: %__MODULE__{
:allowTabOrder => String.t(),
:allowTabOrderMetadata => SettingsMetadata,
:approveDeclineTabsEnabled => String.t(),
:approveDeclineTabsMetadata => SettingsMetadata,
:calculatedFieldsEnabled => String.t(),
:calculatedFieldsMetadata => SettingsMetadata,
:checkboxTabsEnabled => String.t(),
:checkboxTabsMetadata => SettingsMetadata,
:dataFieldRegexEnabled => String.t(),
:dataFieldRegexMetadata => SettingsMetadata,
:dataFieldSizeEnabled => String.t(),
:dataFieldSizeMetadata => SettingsMetadata,
:firstLastEmailTabsEnabled => String.t(),
:firstLastEmailTabsMetadata => SettingsMetadata,
:listTabsEnabled => String.t(),
:listTabsMetadata => SettingsMetadata,
:noteTabsEnabled => String.t(),
:noteTabsMetadata => SettingsMetadata,
:radioTabsEnabled => String.t(),
:radioTabsMetadata => SettingsMetadata,
:savingCustomTabsEnabled => String.t(),
:savingCustomTabsMetadata => SettingsMetadata,
:senderToChangeTabAssignmentsEnabled => String.t(),
:senderToChangeTabAssignmentsMetadata => SettingsMetadata,
:sharedCustomTabsEnabled => String.t(),
:sharedCustomTabsMetadata => SettingsMetadata,
:tabDataLabelEnabled => String.t(),
:tabDataLabelMetadata => SettingsMetadata,
:tabLocationEnabled => String.t(),
:tabLocationMetadata => SettingsMetadata,
:tabLockingEnabled => String.t(),
:tabLockingMetadata => SettingsMetadata,
:tabScaleEnabled => String.t(),
:tabScaleMetadata => SettingsMetadata,
:tabTextFormattingEnabled => String.t(),
:tabTextFormattingMetadata => SettingsMetadata,
:textTabsEnabled => String.t(),
:textTabsMetadata => SettingsMetadata
}
end
defimpl Poison.Decoder, for: DocuSign.Model.AccountTabSettings do
import DocuSign.Deserializer
def decode(value, options) do
value
|> deserialize(:allowTabOrderMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:approveDeclineTabsMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:calculatedFieldsMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:checkboxTabsMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:dataFieldRegexMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:dataFieldSizeMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:firstLastEmailTabsMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:listTabsMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:noteTabsMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:radioTabsMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:savingCustomTabsMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(
:senderToChangeTabAssignmentsMetadata,
:struct,
DocuSign.Model.SettingsMetadata,
options
)
|> deserialize(:sharedCustomTabsMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:tabDataLabelMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:tabLocationMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:tabLockingMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:tabScaleMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:tabTextFormattingMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
|> deserialize(:textTabsMetadata, :struct, DocuSign.Model.SettingsMetadata, options)
end
end
|
lib/docusign/model/account_tab_settings.ex
| 0.592902
| 0.401306
|
account_tab_settings.ex
|
starcoder
|
defmodule AdventOfCode.Day03 do
@moduledoc """
Day 3.
Part 1: We need to find overlapping square inches that are within two or more claims.
"""
@day3 Path.join(["day03.txt"])
def read do
File.stream!(@day3, [], :line)
|> Stream.map(&String.trim/1)
|> Stream.map(&parse_input/1)
|> Enum.reduce(%{}, fn (mapped_data, acc) ->
count_squares(mapped_data, acc)
end)
|> Map.values()
|> Enum.filter(&(&1 > 1))
|> length()
end
def parse_input("#" <> rest) do
rest
|> String.split()
|> clean_input()
end
defp clean_input([id, "@", xy, dimensions]) do
[x, y] =
xy
|> String.trim(":")
|> String.split(",")
|> Enum.map(&String.to_integer/1)
[width, height] =
dimensions
|> String.split("x")
|> Enum.map(&String.to_integer/1)
%{
id: String.to_integer(id),
from_left: x,
from_top: y,
width: width,
height: height
}
end
def count_squares(%{id: _id, from_left: left, from_top: top, width: width, height: height}, squares) do
start_x = left + 1
end_x = start_x + (width - 1)
start_y = -top - 1
end_y = start_y - (height - 1)
for x <- start_x..end_x, y <- start_y..end_y do
{x, y}
end
|> Enum.reduce(squares, fn (coordinate, acc) ->
Map.update(acc, coordinate, 1, &(&1 + 1))
end)
end
@doc """
Part 2.
Find the one ID that does _not_ have any points that overlap with other IDs.
So, if we fetch the coordinate keys and if *all* of the values are only seen
once, then we can assume that that specific ID is unique and doesn't overlap anything else.
"""
def part2 do
parsed_data =
File.stream!(@day3, [], :line)
|> Stream.map(&String.trim/1)
|> Stream.map(&parse_input/1)
coordinates_count =
parsed_data
|> Enum.reduce(%{}, fn (mapped_data, acc) ->
count_squares(mapped_data, acc)
end)
Enum.reduce(parsed_data, [], fn (box_data, acc) ->
if all_unique?(count_squares(box_data, %{}), coordinates_count) do
[box_data | acc]
else
[]
end
end)
end
def all_unique?(coordinates, coordinates_count) do
coordinates
|> Map.keys()
|> Enum.all?(fn (coordinate) ->
Map.get(coordinates_count, coordinate) == 1
end)
end
end
|
2018/elixir/advent_of_code/lib/day03/day03.ex
| 0.757256
| 0.507507
|
day03.ex
|
starcoder
|
defmodule Poker do
defmodule Card do
# look ahead to non-digit character
@suit ~r/(?=\D)/
@ranks ~w[a 2 3 4 5 6 7 8 9 10 J Q K A]
@num_ranks length(@ranks)
# Assign a value to a card of less than 1 dependent on its rank
# so that:
# - cards can be compared to each other
# - the value can be tallied along with hand scores
@single_card_value Decimal.new("0.01")
defstruct [:rank, :suit]
def new(card_string) do
[rank, suit] = String.split(card_string, @suit, trim: true)
%Card{rank: rank, suit: suit}
end
def single_card_value, do: @single_card_value
def num_ranks, do: @num_ranks
def to_string(%Card{rank: rank, suit: suit}), do: rank <> suit
def value(%Card{rank: rank}) do
@ranks
|> Enum.find_index(&(&1 == rank))
|> Decimal.cast()
|> Decimal.mult(@single_card_value)
end
def compare_value(card, {min, max} = acc) do
cond do
Decimal.lt?(value(card), value(min)) ->
{card, max}
Decimal.gt?(value(card), value(max)) ->
{min, card}
true ->
acc
end
end
def equal?(%Card{rank: rank1}, %Card{rank: rank2}), do: rank1 == rank2
def multiple?(card, ranks, num_cards) do
Enum.count(ranks, &(&1 == card.rank)) >= num_cards
end
end
defmodule Hand do
alias Poker.Score
defstruct cards: [], score: 0
def new(hand) do
cards = Enum.map(hand, &Card.new/1)
Score.calculate(%Hand{cards: cards})
end
def to_list(%Hand{cards: cards}), do: Enum.map(cards, &Card.to_string/1)
def equal?(%Hand{score: score1}, %Hand{score: score2}) do
Decimal.equal?(score1, score2)
end
def better?(%Hand{score: score1}, %Hand{score: score2}) do
Decimal.gt?(score1, score2)
end
def min_max(%Hand{cards: [first | rest]}) do
Enum.reduce(rest, {first, first}, &Card.compare_value/2)
end
def ranks(%Hand{cards: cards}), do: Enum.map(cards, & &1.rank)
def suits(%Hand{cards: cards}), do: Enum.map(cards, & &1.suit)
def find_multicard(%Hand{cards: cards} = hand, num_cards) do
cards
|> Enum.reduce([], &add_multicard({ranks(hand), num_cards}, &1, &2))
|> Enum.uniq_by(& &1.rank)
end
defp add_multicard({ranks, num_cards}, card, acc) do
if Card.multiple?(card, ranks, num_cards), do: [card | acc], else: acc
end
end
defmodule Score do
defmodule HighLowCard do
def score(hand) do
{min, max} = Hand.min_max(hand)
min
|> Card.value()
|> Decimal.div(2)
|> Decimal.add(Card.value(max))
end
end
defmodule OnePair do
@points 1
@pair 2
def score(hand) do
hand
|> Hand.find_multicard(@pair)
|> tally_points()
end
defp tally_points([]), do: 0
defp tally_points(cards) do
cards
|> Enum.max_by(& &1.rank)
|> Card.value()
|> Decimal.add(@points)
end
end
defmodule TwoPair do
@points 2
@pair 2
def score(hand) do
hand
|> Hand.find_multicard(@pair)
|> tally_points()
end
defp tally_points([]), do: 0
defp tally_points([_card]), do: 0
defp tally_points(cards) do
cards
|> Enum.max_by(& &1.rank)
|> Card.value()
|> Decimal.add(@points)
end
end
defmodule ThreeOfAKind do
@points 3
@triplet 3
def score(hand) do
hand
|> Hand.find_multicard(@triplet)
|> tally_points(hand)
end
defp tally_points([], _hand), do: 0
defp tally_points([card], hand) do
remaining_cards_score = remaining_cards_score(hand, card)
card
|> Card.value()
|> Decimal.add(@points)
|> Decimal.add(remaining_cards_score)
end
defp remaining_cards_score(%Hand{cards: cards}, card) do
threes = Enum.filter(cards, &Card.equal?(&1, card))
%Hand{cards: cards -- threes}
|> HighLowCard.score()
end
end
defmodule Straight do
@points 5
@high_ace "A"
@low_ace "a"
def score(hand) do
hand
|> straight()
|> tally_points()
end
def straight(hand) do
cards = Enum.sort_by(hand.cards, &Card.value(&1))
numbered_straight(cards) || ace_low_straight(cards)
end
defp tally_points(nil), do: 0
defp tally_points(cards) do
cards
|> Enum.reverse()
|> hd()
|> Card.value()
|> Decimal.mult(Card.num_ranks())
|> Decimal.add(@points)
end
defp numbered_straight(cards) do
straight =
cards
|> Enum.chunk_every(2, 1, :discard)
|> Enum.all?(&single_card_gap?/1)
if straight, do: cards, else: nil
end
defp single_card_gap?([card1, card2]) do
card2
|> Card.value()
|> Decimal.sub(Card.value(card1))
|> Decimal.equal?(Card.single_card_value())
end
defp ace_low_straight(cards) do
cards
|> Enum.split_with(&(&1.rank == @high_ace))
|> check_ace_low_straight()
end
defp check_ace_low_straight({[ace], other_cards}) do
ace_low = Card.new(@low_ace <> ace.suit)
numbered_straight([ace_low | other_cards])
end
defp check_ace_low_straight(_other), do: nil
end
defmodule Flush do
@points 8
def score(hand) do
if flush?(hand) do
hand.cards
|> Enum.sort_by(&Card.value(&1), &>=/2)
|> hd()
|> Card.value()
|> Decimal.add(@points)
else
0
end
end
def flush?(hand) do
hand
|> Hand.suits()
|> Enum.uniq()
|> length()
|> Kernel.==(1)
end
end
defmodule FullHouse do
@points 13
@triplet 3
@pair 2
def score(hand) do
ranks = Hand.ranks(hand)
{three_set, other_cards} = split_hand(hand, ranks)
if full_house?(three_set, other_cards, ranks) do
score =
three_set
|> hd()
|> Card.value()
|> Decimal.mult(Card.num_ranks())
|> Decimal.add(@points)
|> Decimal.add(hand.score)
throw({:halt, %Hand{hand | score: score}})
else
0
end
end
defp split_hand(%Hand{cards: cards}, ranks) do
cards
|> Enum.sort_by(&Card.value(&1))
|> Enum.split_with(&Card.multiple?(&1, ranks, @triplet))
end
defp full_house?([], _pair, _ranks), do: false
defp full_house?(_triplet, pair, ranks) do
Enum.all?(pair, &Card.multiple?(&1, ranks, @pair))
end
end
defmodule FourOfAKind do
@points 21
@square 4
def score(hand) do
hand
|> split_hand()
|> tally_points(hand)
end
defp split_hand(%Hand{cards: cards} = hand) do
ranks = Hand.ranks(hand)
Enum.split_with(cards, &Card.multiple?(&1, ranks, @square))
end
defp tally_points({[], _cards}, _hand), do: 0
defp tally_points({square, _other_card}, hand) do
score =
square
|> hd()
|> Card.value()
|> Decimal.add(@points)
|> Decimal.add(hand.score)
throw({:halt, %Hand{hand | score: score}})
end
end
defmodule StraightFlush do
@points 34
def score(hand) do
straight = Straight.straight(hand)
if straight && Flush.flush?(hand) do
score =
straight
|> Enum.reverse()
|> hd()
|> Card.value()
|> Decimal.add(@points)
|> Decimal.add(hand.score)
throw({:halt, %Hand{hand | score: score}})
else
0
end
end
end
@hand_types [
HighLowCard,
OnePair,
TwoPair,
ThreeOfAKind,
Straight,
Flush,
FullHouse,
FourOfAKind,
StraightFlush
]
def calculate(hand) do
Enum.reduce(@hand_types, hand, &sum_points/2)
catch
{:halt, hand} ->
hand
end
defp sum_points(module, hand) do
score =
module
|> apply(:score, [hand])
|> Decimal.add(hand.score)
%Hand{hand | score: score}
end
end
@doc """
Given a list of poker hands, return a list containing the highest scoring hand.
If two or more hands tie, return the list of tied hands in the order they were received.
The basic rules and hand rankings for Poker can be found at:
https://en.wikipedia.org/wiki/List_of_poker_hands
For this exercise, we'll consider the game to be using no Jokers,
so five-of-a-kind hands will not be tested. We will also consider
the game to be using multiple decks, so it is possible for multiple
players to have identical cards.
Aces can be used in low (A 2 3 4 5) or high (10 J Q K A) straights, but do not count as
a high card in the former case.
For example, (A 2 3 4 5) will lose to (2 3 4 5 6).
You can also assume all inputs will be valid, and do not need to perform error checking
when parsing card values. All hands will be a list of 5 strings, containing a number
(or letter) for the rank, followed by the suit.
Ranks (lowest to highest): 2 3 4 5 6 7 8 9 10 J Q K A
Suits (order doesn't matter): C D H S
Example hand: ~w(4S 5H 4C 5D 4H) # Full house, 5s over 4s
"""
@spec best_hand(list(list(String.t()))) :: list(list(String.t()))
def best_hand([hand]), do: [hand]
def best_hand(hands) do
[first_hand | rest] = Enum.map(hands, &Hand.new/1)
rest
|> Enum.reduce(initial_best_hands(first_hand), &compare_best_hand/2)
|> Map.fetch!(:best_hands)
|> Enum.reverse()
end
defp initial_best_hands(hand) do
%{best_hands: [Hand.to_list(hand)], best_hand: hand}
end
defp compare_best_hand(hand, %{best_hand: best_hand} = acc) do
cond do
Hand.equal?(hand, best_hand) ->
Map.update!(acc, :best_hands, &[Hand.to_list(hand) | &1])
Hand.better?(hand, best_hand) ->
acc
|> Map.put(:best_hands, [Hand.to_list(hand)])
|> Map.put(:best_hand, hand)
true ->
acc
end
end
end
|
elixir/poker/lib/poker.ex
| 0.628863
| 0.516047
|
poker.ex
|
starcoder
|
defmodule AWS.Ivschat do
@moduledoc """
## Introduction
The Amazon IVS Chat control-plane API enables you to create and manage Amazon
IVS Chat resources.
You also need to integrate with the [ Amazon IVS Chat Messaging API](https://docs.aws.amazon.com/ivs/latest/chatmsgapireference/chat-messaging-api.html),
to enable users to interact with chat rooms in real time.
The API is an AWS regional service. For a list of supported regions and Amazon
IVS Chat HTTPS service endpoints, see the Amazon IVS Chat information on the
[Amazon IVS page](https://docs.aws.amazon.com/general/latest/gr/ivs.html) in the *AWS General Reference*.
## Notes on terminology:
* You create service applications using the Amazon IVS Chat API. We
refer to these as *applications*.
* You create front-end client applications (browser and Android/iOS
apps) using the Amazon IVS Chat Messaging API. We refer to these as *clients*.
` ## Resources
The following resource is part of Amazon IVS Chat:
* **Room** — The central Amazon IVS Chat resource through which
clients connect to and exchange chat messages. See the Room endpoints for more
information.
## API Access Security
Your Amazon IVS Chat applications (service applications and clients) must be
authenticated and authorized to access Amazon IVS Chat resources. Note the
differences between these concepts:
* *Authentication* is about verifying identity. Requests to the
Amazon IVS Chat API must be signed to verify your identity.
* *Authorization* is about granting permissions. Your IAM roles need
to have permissions for Amazon IVS Chat API requests.
Users (viewers) connect to a room using secure access tokens that you create
using the `CreateChatToken` endpoint through the AWS SDK. You call
CreateChatToken for every user’s chat session, passing identity and
authorization information about the user.
## Signing API Requests
HTTP API requests must be signed with an AWS SigV4 signature using your AWS
security credentials. The AWS Command Line Interface (CLI) and the AWS SDKs take
care of signing the underlying API calls for you. However, if your application
calls the Amazon IVS Chat HTTP API directly, it’s your responsibility to sign
the requests.
You generate a signature using valid AWS credentials for an IAM role that has
permission to perform the requested action. For example, DeleteMessage requests
must be made using an IAM role that has the `ivschat:DeleteMessage` permission.
For more information:
* Authentication and generating signatures — See [Authenticating
Requests (Amazon Web Services Signature Version
4)](https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-authenticating-requests.html)
in the *Amazon Web Services General Reference*.
* Managing Amazon IVS permissions — See [Identity and Access Management](https://docs.aws.amazon.com/ivs/latest/userguide/security-iam.html)
on the Security page of the *Amazon IVS User Guide*.
## Messaging Endpoints
* `DeleteMessage` — Sends an event to a specific room which directs
clients to delete a specific message; that is, unrender it from view and delete
it from the client’s chat history. This event’s `EventName` is
`aws:DELETE_MESSAGE`. This replicates the [
DeleteMessage](https://docs.aws.amazon.com/ivs/latest/chatmsgapireference/actions-deletemessage-publish.html)
WebSocket operation in the Amazon IVS Chat Messaging API.
* `DisconnectUser` — Disconnects all connections using a specified
user ID from a room. This replicates the [
DisconnectUser](https://docs.aws.amazon.com/ivs/latest/chatmsgapireference/actions-disconnectuser-publish.html)
WebSocket operation in the Amazon IVS Chat Messaging API.
* `SendEvent` — Sends an event to a room. Use this within your
application’s business logic to send events to clients of a room; e.g., to
notify clients to change the way the chat UI is rendered.
## Chat Token Endpoint
* `CreateChatToken` — Creates an encrypted token that is used to
establish an individual WebSocket connection to a room. The token is valid for
one minute, and a connection (session) established with the token is valid for
the specified duration.
## Room Endpoints
* `CreateRoom` — Creates a room that allows clients to connect and
pass messages.
* `DeleteRoom` — Deletes the specified room.
* `GetRoom` — Gets the specified room.
* `ListRooms` — Gets summary information about all your rooms in the
AWS region where the API request is processed.
* `UpdateRoom` — Updates a room’s configuration.
## Tags Endpoints
* `ListTagsForResource` — Gets information about AWS tags for the
specified ARN.
* `TagResource` — Adds or updates tags for the AWS resource with the
specified ARN.
* `UntagResource` — Removes tags from the resource with the
specified ARN.
All the above are HTTP operations. There is a separate *messaging* API for
managing Chat resources; see the [ Amazon IVS Chat Messaging API Reference](https://docs.aws.amazon.com/ivs/latest/chatmsgapireference/chat-messaging-api.html).
`
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2020-07-14",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "ivschat",
global?: false,
protocol: "rest-json",
service_id: "ivschat",
signature_version: "v4",
signing_name: "ivschat",
target_prefix: nil
}
end
@doc """
Creates an encrypted token that is used to establish an individual WebSocket
connection to a room.
The token is valid for one minute, and a connection (session) established with
the token is valid for the specified duration.
Encryption keys are owned by Amazon IVS Chat and never used directly by your
application.
"""
def create_chat_token(%Client{} = client, input, options \\ []) do
url_path = "/CreateChatToken"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Creates a room that allows clients to connect and pass messages.
"""
def create_room(%Client{} = client, input, options \\ []) do
url_path = "/CreateRoom"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Sends an event to a specific room which directs clients to delete a specific
message; that is, unrender it from view and delete it from the client’s chat
history.
This event’s `EventName` is `aws:DELETE_MESSAGE`. This replicates the [
DeleteMessage](https://docs.aws.amazon.com/ivs/latest/chatmsgapireference/actions-deletemessage-publish.html)
WebSocket operation in the Amazon IVS Chat Messaging API.
"""
def delete_message(%Client{} = client, input, options \\ []) do
url_path = "/DeleteMessage"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Deletes the specified room.
"""
def delete_room(%Client{} = client, input, options \\ []) do
url_path = "/DeleteRoom"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Disconnects all connections using a specified user ID from a room.
This replicates the [
DisconnectUser](https://docs.aws.amazon.com/ivs/latest/chatmsgapireference/actions-disconnectuser-publish.html)
WebSocket operation in the Amazon IVS Chat Messaging API.
"""
def disconnect_user(%Client{} = client, input, options \\ []) do
url_path = "/DisconnectUser"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Gets the specified room.
"""
def get_room(%Client{} = client, input, options \\ []) do
url_path = "/GetRoom"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Gets summary information about all your rooms in the AWS region where the API
request is processed.
Results are sorted in descending order of `updateTime`.
"""
def list_rooms(%Client{} = client, input, options \\ []) do
url_path = "/ListRooms"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Gets information about AWS tags for the specified ARN.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Sends an event to a room.
Use this within your application’s business logic to send events to clients of a
room; e.g., to notify clients to change the way the chat UI is rendered.
"""
def send_event(%Client{} = client, input, options \\ []) do
url_path = "/SendEvent"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Adds or updates tags for the AWS resource with the specified ARN.
"""
def tag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Removes tags from the resource with the specified ARN.
"""
def untag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
{query_params, input} =
[
{"tagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Updates a room’s configuration.
"""
def update_room(%Client{} = client, input, options \\ []) do
url_path = "/UpdateRoom"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
end
|
lib/aws/generated/ivschat.ex
| 0.883198
| 0.503906
|
ivschat.ex
|
starcoder
|
defmodule Deque do
@moduledoc """
A fast deque implementation using 2 rotating lists.
"""
@opaque t :: %__MODULE__{
size: integer,
max_size: integer,
list1: list,
list2: list,
}
@type value :: term
defstruct size: 0, max_size: nil, list1: [], list2: []
@spec new(integer) :: t
def new(max_size \\ 100) do
%Deque{max_size: max_size}
end
@spec append(t, value) :: t
def append(%Deque{size: size, max_size: max_size, list1: [], list2: list2}=deque, value) when size < max_size do
%{deque | size: size + 1, list2: [value|list2]}
end
def append(%Deque{size: size, max_size: max_size, list2: list2}=deque, value) when size < max_size do
%{deque | size: size + 1, list2: [value|list2]}
end
def append(%Deque{list1: [], list2: list2}=deque, value) do
%{deque | list1: Enum.reverse(list2), list2: []} |> append(value)
end
def append(%Deque{list1: [_|list1], list2: list2}=deque, value) do
%{deque | list1: list1, list2: [value|list2]}
end
@spec appendleft(t, value) :: t
def appendleft(%Deque{size: size, max_size: max_size, list1: list1, list2: []}=deque, value) when size < max_size do
%{deque | size: size + 1, list1: [value|list1]}
end
def appendleft(%Deque{size: size, max_size: max_size, list1: list1}=deque, value) when size < max_size do
%{deque | size: size + 1, list1: [value|list1]}
end
def appendleft(%Deque{list1: list1, list2: []}=deque, value) do
%{deque | list1: [], list2: Enum.reverse(list1)} |> appendleft(value)
end
def appendleft(%Deque{list1: list1, list2: [_|list2]}=deque, value) do
%{deque | list1: [value|list1], list2: list2}
end
@spec pop(t) :: {value | nil, t}
def pop(%Deque{list1: [], list2: []}=deque) do
{nil, deque}
end
def pop(%Deque{size: size, list2: [value|list2]}=deque) do
{value, %{deque | size: size - 1, list2: list2}}
end
def pop(%Deque{list1: list1}=deque) do
%{deque | list1: [], list2: Enum.reverse(list1)} |> pop
end
@spec popleft(t) :: {value | nil, t}
def popleft(%Deque{list1: [], list2: []}=deque) do
{nil, deque}
end
def popleft(%Deque{size: size, list1: [value|list1]}=deque) do
{value, %{deque | size: size - 1, list1: list1}}
end
def popleft(%Deque{list2: list2}=deque) do
%{deque | list1: Enum.reverse(list2), list2: []} |> popleft
end
@spec last(t) :: value | nil
def last(%Deque{list1: [], list2: []}), do: nil
def last(%Deque{list2: [value|_]}), do: value
def last(%Deque{list1: list1}=deque) do
%{deque | list1: [], list2: Enum.reverse(list1)} |> last
end
@spec first(t) :: value | nil
def first(%Deque{list1: [], list2: []}), do: nil
def first(%Deque{list1: [value|_]}), do: value
def first(%Deque{list2: list2}=deque) do
%{deque | list1: Enum.reverse(list2), list2: []} |> first
end
@spec clear(t) :: t
def clear(%Deque{max_size: max_size}), do: new(max_size)
@spec take_while(t, (term -> boolean)) :: t
def take_while(%Deque{list1: [], list2: []}=deque, _func), do: deque
def take_while(%Deque{list1: list1, list2: list2}=deque, func) do
case lazy_take_while(list2, func) do
# If the tail list halts, then everything in head list is invalid.
{:halt, list2_n, list2} ->
%{deque | size: list2_n, list1: [], list2: Enum.reverse(list2)}
{list2_n, list2} ->
# Halting does not matter when filtering the head list. Reverse the list
# before attempting to filter it, it will automatically be reversed again.
{list1_n, list1} =
with {:halt, list1_n, list1} <- lazy_take_while(Enum.reverse(list1), func) do
{list1_n, list1}
end
%{deque | size: list1_n + list2_n, list1: list1, list2: Enum.reverse(list2)}
end
end
## Private
defp lazy_take_while(list, func), do: lazy_take_while(list, [], 0, func)
defp lazy_take_while([], acc, n, _func), do: {n, acc}
defp lazy_take_while([h | t], acc, n, func) do
if func.(h) do
lazy_take_while(t, [h | acc], n + 1, func)
else
{:halt, n, acc}
end
end
## Protocols
defimpl Enumerable do
def reduce(_, {:halt, acc}, _fun) do
{:halted, acc}
end
def reduce(deque, {:suspend, acc}, fun) do
{:suspended, acc, &reduce(deque, &1, fun)}
end
def reduce(%Deque{list1: list1, list2: list2}, {:cont, acc}, fun) do
reduce({list1, list2}, {:cont, acc}, fun)
end
def reduce({[], []}, {:cont, acc}, _fun) do
{:done, acc}
end
def reduce({[h|list1], list2}, {:cont, acc}, fun) do
reduce({list1, list2}, fun.(h, acc), fun)
end
def reduce({[], list2}, {:cont, acc}, fun) do
reduce({Enum.reverse(list2), []}, {:cont, acc}, fun)
end
def member?(%Deque{list1: list1, list2: list2}, element) do
{:ok, element in list1 or element in list2}
end
def count(%Deque{size: size}) do
{:ok, size}
end
def slice(%Deque{}) do
{:error, __MODULE__}
end
end
defimpl Collectable do
def into(original) do
{original, fn
deque, {:cont, value} -> Deque.append(deque, value)
deque, :done -> deque
_, :halt -> :ok
end}
end
end
defimpl Inspect do
import Inspect.Algebra
def inspect(deque, opts) do
concat ["#Deque<", Inspect.List.inspect(Enum.to_list(deque), opts), ">"]
end
end
end
|
lib/deque.ex
| 0.768993
| 0.609205
|
deque.ex
|
starcoder
|
defmodule SetTwo.ChallengeTwelve do
import SetTwo.ChallengeTen
@key SetTwo.ChallengeEleven.generate_random_aes_key()
@doc """
Cracks and discovers the secret encrypted with encryption_oracle_2
http://cryptopals.com/sets/2/challenges/12/
"""
@spec crack_code() :: binary
def crack_code do
ciphertext = encryption_oracle_2("")
if SetOne.ChallengeEight.encrypted_aes_ecb?(ciphertext) do
raise ArgumentError, message: "Not ECB encrypted"
end
block_size = determine_block_size(ciphertext)
1..num_blocks(ciphertext, block_size)
|> Enum.reduce("", fn x, acc ->
acc <> brute_force_decrypt(x * block_size, acc)
end)
|> String.replace(<<0>>, "")
end
def brute_force_decrypt(block_size, known) when byte_size(known) == block_size, do: ""
@doc """
Brute force decrypts to discover one byte of plaintext.
input_block is a mixture of A's and what is already known
The total size of input_block is one short of the byte to be discovered.
Iterates through possible combinations until it arrives at a ciphertext that matches
"""
@spec brute_force_decrypt(pos_integer, binary) :: binary
def brute_force_decrypt(block_size, known) do
input_block = :binary.copy("A", block_size - 1 - byte_size(known))
sample = Kernel.binary_part(encryption_oracle_2(input_block), 0, block_size)
result =
0..255
|> Stream.filter(fn x ->
sample ==
Kernel.binary_part(encryption_oracle_2(input_block <> known <> <<x>>), 0, block_size)
end)
|> Enum.take(1)
|> hd
<<result>> <> brute_force_decrypt(block_size, known <> <<result>>)
end
@doc """
Determines the block size of a given ciphertext encrypted with ECB
Brute forces it.
### Examples
iex> ciphertext = SetTwo.ChallengeTwelve.encryption_oracle_2("")
iex> SetTwo.ChallengeTwelve.determine_block_size(ciphertext)
16
"""
@spec determine_block_size(binary) :: pos_integer
def determine_block_size(ciphertext) do
1..100
|> Stream.filter(fn x ->
String.ends_with?(encryption_oracle_2(:binary.copy("A", x)), ciphertext)
end)
|> Enum.take(1)
|> hd
end
@doc """
An encryption oracle that takes plaintext, pads it with a secret
It then encrypts it with a constant, random key.
"""
@spec encryption_oracle_2(binary) :: binary
def encryption_oracle_2(plaintext) do
plaintext
|> pad_secret
|> encrypt_aes_128_ecb(@key)
end
@doc """
Helper function
Counts the number of blocks in a ciphertext when given the block size
"""
@spec num_blocks(binary, pos_integer) :: pos_integer
def num_blocks(ciphertext, block_size) do
ciphertext
|> :binary.bin_to_list()
|> Enum.chunk_every(block_size)
|> length
end
defp pad_secret(plaintext), do: plaintext <> secret()
defp secret do
"<KEY>"
|> Base.decode64!()
end
end
|
lib/set_2/challenge_12.ex
| 0.768386
| 0.462898
|
challenge_12.ex
|
starcoder
|
defmodule EliteInvestigations.Elite do
@moduledoc """
Context for dealing with actual Elite: Dangerous data.
"""
import Ecto.Query, warn: false
alias EliteInvestigations.Elite.Story
alias EliteInvestigations.Repo
@doc """
Returns the list of stories.
## Options
* `:order_by` How to sort the stories -- defaults to by `date` then `nid` from newest
## Examples
```
iex> list_stories()
[%Story{}, ...]
```
"""
def list_stories(opts \\ []) do
order = Keyword.get(opts, :order_by, desc: :date, desc: :nid)
Repo.all(from s in Story, order_by: ^order)
end
@doc """
Gets a single story.
Raises `Ecto.NoResultsError` if the story does not exist.
## Examples
```
iex> get_story!(123)
%Story{}
iex> get_story!(456)
** (Ecto.NoResultsError)
```
"""
def get_story!(id), do: Repo.get!(Story, id)
@doc """
Gets a single story by Galnet ID.
Raises `Ecto.NoResultsError` if the story does not exist.
## Examples
```
iex> get_story!(123)
%Story{}
iex> get_story!(456)
** (Ecto.NoResultsError)
```
"""
def get_story_by_nid!(nid), do: Repo.get_by!(Story, nid: nid)
@doc """
Indicates whether the story with the given Galnet ID exists.
"""
def story_exists?(nid), do: !is_nil(Repo.get_by(Story, nid: nid))
@doc """
Creates a story.
## Examples
```
iex> create_story(%{field: value})
{:ok, %Story{}}
iex> create_story(%{field: bad_value})
{:error, %Ecto.Changeset{}}
```
"""
def create_story(attrs \\ %{}) do
%Story{}
|> Story.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a story.
## Examples
```
iex> update_story(story, %{field: new_value})
{:ok, %Story{}}
iex> update_story(story, %{field: bad_value})
{:error, %Ecto.Changeset{}}
```
"""
def update_story(story = %Story{}, attrs) do
story
|> Story.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a story.
## Examples
```
iex> delete_story(story)
{:ok, %Story{}}
iex> delete_story(story)
{:error, %Ecto.Changeset{}}
```
"""
def delete_story(story = %Story{}) do
Repo.delete(story)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking story changes.
## Examples
```
iex> change_story(story)
%Ecto.Changeset{source: %Story{}}
```
"""
def change_story(story = %Story{}) do
Story.changeset(story, %{})
end
@doc """
Searches stories for `search_text`.
Matches are ranked by weighting title matches heavier than body matches.
Returns a list of story network IDs and titles.
"""
def search_stories(search_text) do
sub_query =
from s in Story,
select: %{
s_nid: s.nid,
s_title: s.title,
document:
fragment(
"setweight(to_tsvector(?), 'A') || setweight(to_tsvector(?), 'B')",
s.title,
s.body
)
}
query =
from s in Story,
join: q in subquery(sub_query),
on: q.s_nid == s.nid,
where: fragment("? @@ websearch_to_tsquery(?)", q.document, ^search_text),
order_by: fragment("ts_rank(?, websearch_to_tsquery(?)) DESC", q.document, ^search_text)
Repo.all(query)
end
end
|
lib/elite_investigations/elite.ex
| 0.858006
| 0.809012
|
elite.ex
|
starcoder
|
defmodule PhoenixStarter.Users.UserRole do
@moduledoc """
Authorizations for `PhoenixStarter.Users.User`.
`UserRole` is a struct with a `name` field as an atom and a `permissions`
field, which is a list of strings.
Permissions should be specified in the format: `"scope.action"`. For
example, `"me.update_profile"` or `"users.update"`.
"""
defstruct [:name, :permissions]
@type t :: %__MODULE__{name: atom, permissions: list(String.t())}
@doc """
Returns a list of valid roles.
"""
@spec roles :: list(atom)
def roles do
[:admin, :ops_admin, :user]
end
@doc """
Returns a `PhoenixStarter.Users.UserRole` struct for the given role name.
"""
@spec role(atom) :: t
def role(role)
def role(:admin) do
%__MODULE__{
name: :admin,
permissions: ["me.update_profile"]
}
end
def role(:ops_admin) do
%__MODULE__{
name: :ops_admin,
permissions: ["ops.dashboard"]
}
end
def role(:user) do
%__MODULE__{
name: :user,
permissions: ["me.update_profile"]
}
end
def role(role) do
raise ArgumentError, """
#{inspect(role)} given but no such role defined
"""
end
@spec permitted?(t, String.t()) :: boolean()
def permitted?(%__MODULE__{} = role, permission), do: permission in role.permissions
defmodule Type do
@moduledoc """
An `Ecto.ParameterizedType` representing a `PhoenixStarter.Users.UserRole`.
Stored as a `string` in the database but expanded as a struct with
hydrated `permissions` field, for easy usage.
"""
use Ecto.ParameterizedType
alias PhoenixStarter.Users.UserRole
@impl true
def type(_params), do: :string
@impl true
def init(opts) do
roles = Keyword.get(opts, :roles, nil)
unless is_list(roles) and Enum.all?(roles, &is_atom/1) do
raise ArgumentError, """
PhoenixStarter.Users.UserRole.Type must have a `roles` option specified as a list of atoms.
For example:
field :my_field, PhoenixStarter.Users.UserRole.Type, roles: [:admin, :user]
"""
end
on_load = Map.new(roles, &{Atom.to_string(&1), &1})
on_dump = Map.new(roles, &{&1, Atom.to_string(&1)})
%{on_load: on_load, on_dump: on_dump, roles: roles}
end
@impl true
def cast(data, params) do
case params do
%{on_load: %{^data => as_atom}} -> {:ok, UserRole.role(as_atom)}
%{on_dump: %{^data => _}} -> {:ok, UserRole.role(data)}
_ -> :error
end
end
@impl true
def load(nil, _, _), do: {:ok, nil}
def load(data, _loader, %{on_load: on_load}) do
case on_load do
%{^data => as_atom} -> {:ok, UserRole.role(as_atom)}
_ -> :error
end
end
@impl true
def dump(nil, _, _), do: {:ok, nil}
def dump(data, _dumper, %{on_dump: on_dump}) when is_atom(data) do
case on_dump do
%{^data => as_string} -> {:ok, as_string}
_ -> :error
end
end
def dump(%UserRole{name: data}, _dumper, %{on_dump: on_dump}) do
case on_dump do
%{^data => as_string} -> {:ok, as_string}
_ -> :error
end
end
@impl true
def equal?(a, b, _params), do: a == b
@impl true
def embed_as(_, _), do: :self
end
end
|
lib/phoenix_starter/users/user_role.ex
| 0.855731
| 0.50238
|
user_role.ex
|
starcoder
|
defmodule TinyXml do
@moduledoc """
XML navigation and data extraction functions.
"""
import TinyXml.Utils, only: [from_string: 1]
require Record
Record.defrecord(:xmlText, Record.extract(:xmlText, from_lib: "xmerl/include/xmerl.hrl"))
Record.defrecord(
:xmlAttribute,
Record.extract(:xmlAttribute, from_lib: "xmerl/include/xmerl.hrl")
)
@doc """
Giving an `xml_string` or `xml_node` and a `path` (XPath expression),
returns a list of `xml_node` result of the XPath expression or an empty list.
"""
@spec all(String.t(), String.t()) :: [tuple()] | []
def all(xml_string, path) when is_binary(xml_string) do
xml_string
|> from_string()
|> all(path)
end
@spec all(tuple(), String.t()) :: [tuple()] | []
def all(xml_node, path), do: xpath(xml_node, path)
@doc """
Giving an `xml_string` or `xml_node` and a `path` (XPath expression),
returns the first `xml_node` result of the XPath expression or `nil`.
"""
@spec first(String.t(), String.t()) :: tuple() | nil
def first(xml_string, path) when is_binary(xml_string) do
xml_string
|> from_string()
|> first(path)
end
@spec first(tuple(), String.t()) :: tuple() | nil
def first(xml_node, path) do
case xpath(xml_node, path) do
[head | _] -> head
_ -> nil
end
end
@doc """
Extracts value from `xml_node`. If value doesn't exists returns `nil`.
## Examples
xml = "
<Person>
<Name>Max</Name>
<Surname>Power</Surname>
</Person>
"
xml
|> TinyXml.first("/Person/Name")
|> TinyXml.text()
#=> "Max"
xml
|> TinyXml.first("/Person/Name/invalid")
|> TinyXml.text()
#=> nil
"""
@spec text(tuple()) :: String.t() | nil
def text(xml_node) do
case xpath(xml_node, "./text()") do
[xmlText(value: value)] -> to_string(value)
_ -> nil
end
end
@doc """
Extracts attribute from `xml_node`. If attribute doesn't exists returns `nil`.
## Examples
xml = "
<Person id="123">
<Name>Max</Name>
<Surname>Power</Surname>
</Person>
"
xml
|> TinyXml.first("/Person")
|> TinyXml.attribute()
#=> "123"
xml
|> TinyXml.first("/Person/invalid")
|> TinyXml.attribute()
#=> nil
"""
@spec attribute(tuple(), String.t()) :: String.t() | nil
def attribute(xml_node, attribute) do
case xpath(xml_node, "./@#{attribute}") do
[xmlAttribute(value: value)] -> to_string(value)
_ -> nil
end
end
@spec xpath(tuple() | nil, String.t()) :: [tuple()] | []
defp xpath(nil, _), do: []
defp xpath(xml, path), do: :xmerl_xpath.string(to_charlist(path), xml)
end
|
lib/tiny_xml.ex
| 0.779951
| 0.476397
|
tiny_xml.ex
|
starcoder
|
defmodule Membrane.RTP.TWCCSender.CongestionControl do
@moduledoc """
The module implements [Google congestion control algorithm](https://datatracker.ietf.org/doc/html/draft-ietf-rmcat-gcc-02).
"""
alias Membrane.Time
require Membrane.Logger
# disable Credo naming checks to use the RFC notation
# credo:disable-for-this-file /(ModuleAttributeNames|VariableNames)/
# state noise covariance
@q 0.001
# filter coefficient for the measured noise variance, between [0.1, 0.001]
@chi 0.01
# decrease rate factor
@beta 0.85
# coefficients for the adaptive threshold (reffered to as "K_u", "K_d" in the RFC)
@coeff_K_u 0.01
@coeff_K_d 0.00018
# alpha factor for exponential moving average
@ema_smoothing_factor 0.95
@last_receive_rates_probe_size 25
@last_receive_bandwidth_probe_size 25
defstruct [
# inter-packet delay estimate (in ms)
m_hat: 0.0,
# system error covariance
e: 0.1,
# estimate for the state noise variance
var_v_hat: 0.0,
# initial value for the adaptive threshold, 12.5ms
del_var_th: 12.5,
# last rates at which packets were received
last_receive_rates: [],
# current delay-based controller state
state: :increase,
# timestamp indicating when we started to overuse the link
overuse_start_ts: nil,
# timestamp indicating when we started to underuse the link
underuse_start_ts: nil,
# latest timestamp indicating when the receiver-side bandwidth was increased
last_bandwidth_increase_ts: Time.vm_time(),
# receiver-side bandwidth estimation in bps
a_hat: 300_000.0,
# sender-side bandwidth estimation in bps
as_hat: 300_000.0,
# latest estimates of receiver-side bandwidth
r_hats: [],
# time window for measuring the received bitrate, between [0.5, 1]s (reffered to as "T" in the RFC)
target_receive_interval: Time.milliseconds(750),
# accumulator for packet sizes in bits that have been received through target_receive_interval
packet_received_sizes: 0,
# starting timestamp for current packet received interval
packet_received_interval_start: nil,
# last timestamp for current packet received interval
packet_received_interval_end: nil,
# time required to trigger a signal (reffered to as "overuse_time_th" in the RFC)
signal_time_threshold: Time.milliseconds(10)
]
@type t :: %__MODULE__{
m_hat: float(),
e: float(),
var_v_hat: float(),
del_var_th: float(),
last_receive_rates: [float()],
state: :increase | :decrease | :hold,
overuse_start_ts: Time.t() | nil,
underuse_start_ts: Time.t() | nil,
last_bandwidth_increase_ts: Time.t(),
a_hat: float(),
as_hat: float(),
r_hats: [float()],
packet_received_sizes: non_neg_integer(),
packet_received_interval_start: Time.t() | nil,
packet_received_interval_end: Time.t() | nil
}
@spec update(t(), Time.t(), [Time.t() | :not_received], [Time.t()], [pos_integer()], Time.t()) ::
t()
def update(
%__MODULE__{} = cc,
reference_time,
receive_deltas,
send_deltas,
packet_sizes,
rtt
) do
cc
|> update_metrics(receive_deltas, send_deltas)
|> store_packet_received_sizes(reference_time, receive_deltas, packet_sizes)
|> update_receiver_bandwidth(packet_sizes, rtt)
|> update_sender_bandwidth(receive_deltas)
end
defp update_metrics(cc, [], []), do: cc
defp update_metrics(cc, [:not_received | recv_deltas], [send_delta | send_deltas]) do
case {recv_deltas, send_deltas} do
{[], []} ->
cc
{recv_deltas, [next_send_delta | other_send_deltas]} ->
update_metrics(cc, recv_deltas, [send_delta + next_send_delta | other_send_deltas])
end
end
defp update_metrics(cc, [recv_delta | recv_deltas], [send_delta | send_deltas])
when recv_delta < 0 do
case {recv_deltas, send_deltas} do
{[], []} ->
cc
{[:not_received | other_recv_deltas], [next_send_delta | other_send_deltas]} ->
update_metrics(cc, [recv_delta | other_recv_deltas], [
send_delta + next_send_delta | other_send_deltas
])
{[next_recv_delta | other_recv_deltas], [next_send_delta | other_send_deltas]} ->
update_metrics(cc, [recv_delta + next_recv_delta | other_recv_deltas], [
send_delta + next_send_delta | other_send_deltas
])
end
end
defp update_metrics(cc, [recv_delta | recv_deltas], [send_delta | send_deltas]) do
%__MODULE__{
m_hat: prev_m_hat,
e: e,
var_v_hat: var_v_hat,
del_var_th: del_var_th,
last_receive_rates: last_receive_rates
} = cc
[recv_delta_ms, send_delta_ms] = Enum.map([recv_delta, send_delta], &Time.to_milliseconds/1)
interpacket_delta = recv_delta_ms - send_delta_ms
z = interpacket_delta - prev_m_hat
last_receive_rates = [1 / max(recv_delta_ms, 1) | last_receive_rates]
f_max = Enum.max(last_receive_rates)
alpha = :math.pow(1 - @chi, 30 / (1000 * f_max))
var_v_hat = max(alpha * var_v_hat + (1 - alpha) * z * z, 1)
k = (e + @q) / (var_v_hat + e + @q)
e = (1 - k) * (e + @q)
coeff = min(z, 3 * :math.sqrt(var_v_hat))
m_hat = prev_m_hat + coeff * k
abs_m_hat = abs(m_hat)
del_var_th =
if abs_m_hat - del_var_th <= 15 do
coeff_K = if abs_m_hat < del_var_th, do: @coeff_K_d, else: @coeff_K_u
gain = recv_delta_ms * coeff_K * (abs_m_hat - del_var_th)
max(min(del_var_th + gain, 600), 6)
else
del_var_th
end
cc = %__MODULE__{
cc
| m_hat: m_hat,
var_v_hat: var_v_hat,
e: e,
last_receive_rates: Enum.take(last_receive_rates, @last_receive_rates_probe_size),
del_var_th: del_var_th
}
cc
|> make_signal(prev_m_hat)
|> then(fn {signal, cc} -> update_state(cc, signal) end)
|> update_metrics(recv_deltas, send_deltas)
end
defp make_signal(%__MODULE__{m_hat: m_hat, del_var_th: del_var_th} = cc, prev_m_hat)
when m_hat < -del_var_th do
now = Time.vm_time()
underuse_start_ts = cc.underuse_start_ts || now
trigger_underuse? =
now - underuse_start_ts >= cc.signal_time_threshold and m_hat <= prev_m_hat
if trigger_underuse? do
{:underuse, %__MODULE__{cc | underuse_start_ts: now, overuse_start_ts: nil}}
else
{:no_signal, %__MODULE__{cc | underuse_start_ts: underuse_start_ts, overuse_start_ts: nil}}
end
end
defp make_signal(%__MODULE__{m_hat: m_hat, del_var_th: del_var_th} = cc, prev_m_hat)
when m_hat > del_var_th do
now = Time.vm_time()
overuse_start_ts = cc.overuse_start_ts || now
trigger_overuse? = now - overuse_start_ts >= cc.signal_time_threshold and m_hat >= prev_m_hat
if trigger_overuse? do
{:overuse, %__MODULE__{cc | underuse_start_ts: nil, overuse_start_ts: now}}
else
{:no_signal, %__MODULE__{cc | underuse_start_ts: nil, overuse_start_ts: overuse_start_ts}}
end
end
defp make_signal(cc, _prev_m_hat),
do: {:normal, %__MODULE__{cc | underuse_start_ts: nil, overuse_start_ts: nil}}
# +----+--------+-----------+------------+--------+
# | \ State | Hold | Increase |Decrease|
# | \ | | | |
# | Signal\ | | | |
# +--------+----+-----------+------------+--------+
# | Over-use | Decrease | Decrease | |
# +-------------+-----------+------------+--------+
# | Normal | Increase | | Hold |
# +-------------+-----------+------------+--------+
# | Under-use | | Hold | Hold |
# +-------------+-----------+------------+--------+
defp update_state(cc, signal)
defp update_state(%__MODULE__{state: :hold} = cc, :overuse),
do: %__MODULE__{cc | state: :decrease}
defp update_state(%__MODULE__{state: :hold} = cc, :normal),
do: %__MODULE__{cc | state: :increase}
defp update_state(%__MODULE__{state: :increase} = cc, :overuse),
do: %__MODULE__{cc | state: :decrease}
defp update_state(%__MODULE__{state: :increase} = cc, :underuse),
do: %__MODULE__{cc | state: :hold}
defp update_state(%__MODULE__{state: :decrease} = cc, :normal),
do: %__MODULE__{cc | state: :hold}
defp update_state(%__MODULE__{state: :decrease} = cc, :underuse),
do: %__MODULE__{cc | state: :hold}
defp update_state(cc, _signal), do: cc
defp store_packet_received_sizes(cc, reference_time, receive_deltas, packet_sizes) do
%__MODULE__{
packet_received_interval_start: packet_received_interval_start,
packet_received_interval_end: packet_received_interval_end,
packet_received_sizes: packet_received_sizes
} = cc
{receive_deltas, packet_sizes} =
receive_deltas
|> Enum.zip(packet_sizes)
|> Enum.filter(fn {delta, _size} -> delta != :not_received end)
|> Enum.unzip()
timestamp_received = Enum.scan(receive_deltas, reference_time, &(&1 + &2))
{earliest_packet_ts, latest_packet_ts} = Enum.min_max(timestamp_received)
packet_received_interval_start = packet_received_interval_start || earliest_packet_ts
packet_received_interval_end = packet_received_interval_end || latest_packet_ts
%__MODULE__{
cc
| packet_received_interval_start: min(packet_received_interval_start, earliest_packet_ts),
packet_received_interval_end: max(packet_received_interval_end, latest_packet_ts),
packet_received_sizes: packet_received_sizes + Enum.sum(packet_sizes)
}
end
defp update_receiver_bandwidth(%__MODULE__{state: :decrease} = cc, _packet_sizes, _rtt),
do: %__MODULE__{cc | a_hat: @beta * cc.a_hat}
defp update_receiver_bandwidth(
%__MODULE__{
state: :increase,
packet_received_interval_end: packet_received_interval_end,
packet_received_interval_start: packet_received_interval_start
} = cc,
packet_sizes,
rtt
)
when packet_received_interval_end - packet_received_interval_start >=
cc.target_receive_interval do
%__MODULE__{
r_hats: r_hats,
a_hat: prev_a_hat,
last_bandwidth_increase_ts: last_bandwidth_increase_ts,
packet_received_sizes: packet_received_sizes
} = cc
packet_received_interval_ms =
Time.to_milliseconds(packet_received_interval_end - packet_received_interval_start)
r_hat = 1 / (packet_received_interval_ms / 1000) * packet_received_sizes
now = Time.vm_time()
last_bandwidth_increase_ts = last_bandwidth_increase_ts || now
time_since_last_update_ms = Time.to_milliseconds(now - last_bandwidth_increase_ts)
a_hat =
case bitrate_increase_mode(r_hat, cc) do
:multiplicative ->
eta = :math.pow(1.08, min(time_since_last_update_ms / 1000, 1))
eta * prev_a_hat
:additive ->
response_time_ms = 100 + Time.to_milliseconds(rtt)
alpha = 0.5 * min(time_since_last_update_ms / response_time_ms, 1)
expected_packet_size_bits = Enum.sum(packet_sizes) / length(packet_sizes)
prev_a_hat + max(1000, alpha * expected_packet_size_bits)
end
a_hat = min(1.5 * r_hat, a_hat)
%__MODULE__{
cc
| a_hat: a_hat,
r_hats: Enum.take([r_hat | r_hats], @last_receive_bandwidth_probe_size),
last_bandwidth_increase_ts: now,
packet_received_interval_end: nil,
packet_received_interval_start: nil,
packet_received_sizes: 0
}
end
defp update_receiver_bandwidth(cc, _packet_sizes, _rtt), do: cc
defp update_sender_bandwidth(%__MODULE__{as_hat: as_hat, a_hat: a_hat} = cc, receive_deltas) do
lost = Enum.count(receive_deltas, &(&1 == :not_received))
loss_ratio = lost / length(receive_deltas)
as_hat =
cond do
loss_ratio < 0.02 -> 1.05 * as_hat
loss_ratio > 0.1 -> as_hat * (1 - 0.5 * loss_ratio)
true -> as_hat
end
%__MODULE__{cc | as_hat: min(as_hat, 1.5 * a_hat)}
end
defp bitrate_increase_mode(_r_hat, %__MODULE__{r_hats: prev_r_hats})
when length(prev_r_hats) < @last_receive_bandwidth_probe_size,
do: :multiplicative
defp bitrate_increase_mode(r_hat, %__MODULE__{r_hats: prev_r_hats}) do
exp_average = exponential_moving_average(@ema_smoothing_factor, prev_r_hats)
std_dev = std_dev(prev_r_hats)
if abs(r_hat - exp_average) <= 3 * std_dev do
:additive
else
:multiplicative
end
end
defp exponential_moving_average(_alpha, []), do: 0
defp exponential_moving_average(alpha, [latest_observation | older_observations]) do
alpha * latest_observation +
(1 - alpha) * exponential_moving_average(alpha, older_observations)
end
defp std_dev(observations) when observations != [] do
n_obs = length(observations)
mean = Enum.sum(observations) / n_obs
observations
|> Enum.reduce(0, fn obs, acc -> acc + (obs - mean) * (obs - mean) end)
|> then(&(&1 / n_obs))
|> :math.sqrt()
end
end
|
lib/membrane/rtp/twcc_sender/congestion_control.ex
| 0.900217
| 0.542682
|
congestion_control.ex
|
starcoder
|
defmodule Day11 do
def part1(input) do
solve(input, &update_grid_part1/2)
end
def part2(input) do
solve(input, &update_grid_part2/2)
end
def solve(input, update) do
grid = parse(input)
seats = Map.keys(grid)
Stream.iterate(grid, fn grid ->
update.(grid, seats)
end)
|> Enum.reduce_while(nil, fn
grid, grid -> {:halt, grid}
grid, _prev -> {:cont, grid}
end)
|> Enum.count(fn {_, state} -> state === :occupied end)
end
defp update_grid_part1(grid, seats) do
Enum.map(seats, fn location ->
num_occupied = count_occupied_neigbors(grid, location)
state = case Map.get(grid, location) do
:empty when num_occupied === 0 -> :occupied
:occupied when num_occupied >= 4 -> :empty
state -> state
end
{location, state}
end)
|> Map.new
end
defp update_grid_part2(grid, seats) do
Enum.map(seats, fn location ->
num_occupied = count_occupied_neigbors(grid, location, :floor)
state = case Map.get(grid, location) do
:empty when num_occupied === 0 -> :occupied
:occupied when num_occupied >= 5 -> :empty
state -> state
end
{location, state}
end)
|> Map.new
end
defp count_occupied_neigbors(grid, location, ignore \\ nil) do
[{-1, -1}, {0, -1}, {1, -1},
{-1, 0}, {1, 0},
{-1, 1}, {0, 1}, {1, 1}]
|> Enum.reduce(0, fn direction, acc ->
case find_neighbor(grid, location, direction, ignore) do
:occupied -> acc + 1
:empty -> acc
:floor -> acc
:outside -> acc
end
end)
end
defp find_neighbor(grid, location, direction, ignore) do
location = vec_add(location, direction)
case Map.get(grid, location, :outside) do
^ignore -> find_neighbor(grid, location, direction, ignore)
other -> other
end
end
defp vec_add({x, y}, {dx, dy}), do: {x + dx, y + dy}
defp parse(input) do
init = {{0, 0}, %{}}
{_, grid} =
input
|> Enum.reduce(init, fn row, {{x, y}, grid} ->
{_, grid} = row
|> String.to_charlist()
|> Enum.reduce({x, grid}, fn char, {x, grid} ->
grid = case char do
?L ->
Map.put(grid, {x, y}, :empty)
?. ->
Map.put(grid, {x, y}, :floor)
end
{x + 1, grid}
end)
{{x, y + 1}, grid}
end)
grid
end
end
|
day11/lib/day11.ex
| 0.643217
| 0.648849
|
day11.ex
|
starcoder
|
defmodule FileUtils do
@moduledoc """
File utilities for Elixir. This is a collection of utilities for operating on
files and directories, some of which should probably be in the standard
library but aren't.
"""
@type posix :: :file.posix
@type badarg :: {:error, :badarg}
defp validate_opt({:error, _reason} = error, _opt, _validate), do: error
defp validate_opt(opts, opt, validate) do
if validate.(opts[opt]), do: opts, else: {:error, :badarg}
end
@doc """
Create a tree of file / directory specifications under the given root. Each
file / directory has a name and and optional permission (default permissions
are `0o755` for directories and `0o644` for files). Files also contain a
binary which is written to the newly created file. Directories contain a list
of additional specifications, which are recursed to build a full directory
tree.
Example:
iex> FileUtils.install_file_tree(System.tmp_dir, [
...> {"test-data", [
...> {"data", <<0, 1, 2, 3, 4>>},
...> {"read_only", 0o444, <<4, 3, 2, 1, 0>>},
...> {"no_access", 0o000, <<255, 255>>},
...> {"subdir", 0o555, [
...> {"more_data", "The quick brown fox..."}
...> ]}
...> ]}
...> ])
:ok
If the function succeeds, `:ok` is returned. If it fails then `{:error,
reason, pathname}` is returned with the reason being a `:posix` reason and the
pathname being the file / directory which caused the error (minus the root
directory portion).
"""
@type entry :: {Path.t, non_neg_integer, binary} |
{Path.t, binary} |
{Path.t, non_neg_integer, [entry]} |
{Path.t, [entry]}
@type install_tree_error :: {:error, posix, Path.t} | {:error, :badarg, term}
@spec install_file_tree(Path.t, [entry]) :: :ok | install_tree_error
def install_file_tree(rootdir, entries) when is_binary(rootdir) and is_list(entries) do
case File.mkdir_p(rootdir) do
:ok -> install_entry_list(rootdir, entries) |> trim_error_path(rootdir)
{:error, reason} -> {:error, reason, ""}
end
end
defp trim_error_path(:ok, _rootdir), do: :ok
defp trim_error_path({:error, :badarg, _term} = error, _rootdir), do: error
defp trim_error_path({:error, reason, pathname}, rootdir) do
{:error, reason, Path.relative_to(pathname, rootdir)}
end
defp install_entry_list(_workdir, []), do: :ok
defp install_entry_list(workdir, [entry | rest]) do
case install_entry(workdir, entry) do
:ok -> install_entry_list(workdir, rest)
{:error, _reason, _pathname} = error -> error
end
end
defp install_entry(workdir, {filename, perm, data}) when is_binary(filename) and perm in 0..0o777 and is_binary(data) do
pathname = Path.join(workdir, filename)
install_entry_using_actions(pathname, [
fn -> File.write(pathname, data, [:exclusive]) end,
fn -> File.chmod(pathname, perm) end
])
end
defp install_entry(workdir, {dirname, perm, entries}) when is_binary(dirname) and perm in 0..0o777 and is_list(entries) do
pathname = Path.join(workdir, dirname)
install_entry_using_actions(pathname, [
fn -> File.mkdir(pathname) end,
fn -> install_entry_list(pathname, entries) end,
fn -> File.chmod(pathname, perm) end
])
end
defp install_entry(workdir, {filename, data}) when is_binary(data) do
install_entry(workdir, {filename, 0o644, data})
end
defp install_entry(workdir, {dirname, entries}) when is_list(entries) do
install_entry(workdir, {dirname, 0o755, entries})
end
defp install_entry(_workdir, badarg) do
{:error, :badarg, badarg}
end
defp install_entry_using_actions(pathname, actions, result \\ :ok)
defp install_entry_using_actions(_pathname, [], :ok), do: :ok
defp install_entry_using_actions(pathname, [action | rest], :ok) do
install_entry_using_actions(pathname, rest, action.())
end
defp install_entry_using_actions(pathname, _actions, {:error, reason}) do
{:error, reason, pathname}
end
defp install_entry_using_actions(_pathname, _actions, {:error, _reason, _path} = error) do
error
end
@doc """
Returns information about the path. If it exists, it returns a {:ok, info}
tuple, where info is a `File.Stat` struct. Returns {:error, reason} with a
`:file.posix` reason if a failure occurs.
This is exactly the same operation as `File.stat/2` except in the case where
the path is a symbolic link. In this circumstance `lstat/2` returns
information about the link, where `File.stat/2` returns information about the
file the link references.
### Options
The accepted options are:
* `:time` - `:local` | `:universal` | `:posix`; default: `:local`
"""
@type lstat_opt :: {:time, (:local | :universal | :posix)}
@spec lstat(Path.t, [lstat_opt]) :: {:ok, File.Stat.t} | {:error, posix} | badarg
def lstat(path, opts \\ []) when is_binary(path) and is_list(opts) do
opts = [
time: :local
] |> Dict.merge(opts)
:file.read_link_info(path, opts) |> lstat_rec_to_struct
end
defp lstat_rec_to_struct({:ok, stat_record}) do
{:ok, File.Stat.from_record(stat_record)}
end
defp lstat_rec_to_struct(error), do: error
@doc """
Same as `lstat/2`, but returns the `File.Stat` directly and throws
`File.Error` if an error is returned.
"""
@spec lstat!(Path.t, [lstat_opt]) :: File.Stat.t | no_return
def lstat!(path, opts \\ []) when is_binary(path) and is_list(opts) do
case FileUtils.lstat(path, opts) do
{:ok, info} -> info
{:error, reason} ->
raise File.Error, reason: reason, action: "read file stats", path: path
end
end
@doc """
Return a stream which walks one or more directory trees. The trees are walked
depth first, with the directory entries returned in sorted order. Each
directory entry is returned as a `{path, stat}` tuple, where the path is the
full path to the entry and the stat is a `File.Stat` struct.
The following option may be set:
* `:symlink_stat` - If set to `true` then symbolic links will return `stat`
information about the link instead of the file object
referred to by the link. The default is `false`.
* `:time` - Form of the time data returned in the `File.Stat`
struct. This may be one of `:local`, `:universal`, or
`:posix`. The default is `:local`.
If an error occurs while walking the tree (a file node is deleted, a symlink
is invalid, etc.) then the node with the problem will be skipped.
"""
@type path_tree_walk_option :: {:symlink_stat, boolean} |
{:time, (:local | :universal | :posix)}
@spec path_tree_walk([Path.t] | Path.t, [path_tree_walk_option]) :: Enumerable.t
def path_tree_walk(rootdir, opts \\ [])
when (is_list(rootdir) or is_binary(rootdir)) and is_list(opts) do
default_opts = [
symlink_stat: false,
time: :local
]
default_opts
|> Dict.merge(opts)
|> validate_opt(:symlink_stat, &is_boolean/1)
|> validate_opt(:time, &(&1 in [:local, :universal, :posix]))
|> tree_walk_opts_to_funcs
|> stream_for_tree_walk(rootdir)
end
defp tree_walk_opts_to_funcs({:error, _reason} = error), do: error
defp tree_walk_opts_to_funcs(opts) when is_list(opts) do
[
stat: stat_fn(opts[:symlink_stat], opts[:time])
]
end
defp stat_fn(false, time_fmt), do: &File.stat(&1, time: time_fmt)
defp stat_fn(true, time_fmt), do: &lstat(&1, time: time_fmt)
defp stream_for_tree_walk({:error, _reason} = error, _rootdir), do: error
defp stream_for_tree_walk(funcs, rootdirs) when is_list(rootdirs) do
{:ok, Stream.resource(
fn -> {Enum.sort(rootdirs), funcs} end,
&path_tree_walk_next/1,
fn _ -> nil end
)}
end
defp stream_for_tree_walk(funcs, rootdir), do: stream_for_tree_walk(funcs, [rootdir])
defp path_tree_walk_next({[], funcs}), do: {:halt, {[], funcs}}
defp path_tree_walk_next({[dir_fn | rest], funcs}) when is_function(dir_fn) do
{[], {dir_fn.() ++ rest, funcs}}
end
defp path_tree_walk_next({[path | rest], funcs}) do
case funcs[:stat].(path) do
{:ok, stat} ->
if File.dir?(path) do
{[{path, stat}], {[walk_to(path) | rest], funcs}}
else
{[{path, stat}], {rest, funcs}}
end
{:error, _reason} ->
{[], {rest, funcs}}
end
end
defp walk_to(path) do
fn ->
case File.ls(path) do
{:ok, filenames} when is_list(filenames) ->
filenames |> Enum.sort |> Enum.map(&Path.join(path, &1))
{:error, _reason} -> []
end
end
end
end
|
lib/fileutils.ex
| 0.725843
| 0.428951
|
fileutils.ex
|
starcoder
|
defmodule TtrCore.Board.Routes do
@moduledoc false
use TtrCore.Board.Router
defroute Atlanta, to: Charleston, distance: 2
defroute Atlanta, to: Miami, distance: 5, trains: [:passenger]
defroute Atlanta, to: Raleigh, distance: 2, trains: [:any, :any]
defroute Atlanta, to: Nashville, distance: 1
defroute Atlanta, to: New.Orleans, distance: 4, trains: [:box, :tanker]
defroute Boston, to: Montreal, distance: 2, trains: [:any, :any]
defroute Boston, to: New.York, distance: 1, trains: [:coal, :box]
defroute Calgary, to: Vancouver, distance: 3
defroute Calgary, to: Seattle, distance: 4
defroute Calgary, to: Helena, distance: 4
defroute Calgary, to: Winnipeg, distance: 6, trains: [:reefer]
defroute Charleston, to: Raleigh, distance: 2
defroute Charleston, to: Miami, distance: 4, trains: [:freight]
defroute Chicago, to: Pittsburgh, distance: 3, trains: [:tanker, :hopper]
defroute Chicago, to: Toronto, distance: 4, trains: [:reefer]
defroute Chicago, to: Duluth, distance: 3, trains: [:coal]
defroute Chicago, to: Omaha, distance: 4, trains: [:passenger]
defroute Chicago, to: St.Louis, distance: 2, trains: [:caboose, :reefer]
defroute Dallas, to: Little.Rock, distance: 2
defroute Dallas, to: Houston, distance: 1, trains: [:any, :any]
defroute Dallas, to: Oklahoma.City, distance: 2, trains: [:any, :any]
defroute Dallas, to: El.Paso, distance: 4, trains: [:coal]
defroute Denver, to: Kansas.City, distance: 4, trains: [:tanker, :hopper]
defroute Denver, to: Omaha, distance: 4, trains: [:freight]
defroute Denver, to: Helena, distance: 4, trains: [:caboose]
defroute Denver, to: Salt.Lake.City, distance: 3, trains: [:coal, :box]
defroute Denver, to: Phoenix, distance: 5, trains: [:reefer]
defroute Denver, to: Sante.Fe, distance: 2
defroute Denver, to: Oklahoma.City, distance: 4, trains: [:coal]
defroute Duluth, to: Winnipeg, distance: 4, trains: [:hopper]
defroute Duluth, to: Sault.St.Marie, distance: 3
defroute Duluth, to: Toronto, distance: 6, trains: [:freight]
defroute Duluth, to: Omaha, distance: 2, trains: [:any, :any]
defroute Duluth, to: Helena, distance: 6, trains: [:tanker]
defroute El.Paso, to: Sante.Fe, distance: 2
defroute El.Paso, to: Oklahoma.City, distance: 5, trains: [:box]
defroute El.Paso, to: Houston, distance: 4, trains: [:coal]
defroute El.Paso, to: Los.Angeles, distance: 6, trains: [:hopper]
defroute El.Paso, to: Phoenix, distance: 3
defroute Helena, to: Winnipeg, distance: 4, trains: [:passenger]
defroute Helena, to: Omaha, distance: 5, trains: [:coal]
defroute Helena, to: Salt.Lake.City, distance: 3, trains: [:freight]
defroute Helena, to: Seattle, distance: 6, trains: [:box]
defroute Houston, to: New.Orleans, distance: 2
defroute Kansas.City, to: Omaha, distance: 1
defroute Kansas.City, to: St.Louis, distance: 2, trains: [:passenger, :freight]
defroute Kansas.City, to: Oklahoma.City, distance: 2, trains: [:any, :any]
defroute Las.Vegas, to: Los.Angeles, distance: 2
defroute Las.Vegas, to: Salt.Lake.City, distance: 3, trains: [:tanker]
defroute Little.Rock, to: St.Louis, distance: 2
defroute Little.Rock, to: Nashville, distance: 3, trains: [:reefer]
defroute Little.Rock, to: New.Orleans, distance: 3, trains: [:caboose]
defroute Little.Rock, to: Oklahoma.City, distance: 2
defroute Los.Angeles, to: San.Francisco, distance: 3, trains: [:box, :freight]
defroute Los.Angeles, to: Las.Vegas, distance: 2
defroute Los.Angeles, to: Phoenix, distance: 3
defroute Miami, to: New.Orleans, distance: 6
defroute Montreal, to: Toronto, distance: 3
defroute Montreal, to: Sault.St.Marie, distance: 5, trains: [:hopper]
defroute Montreal, to: New.York, distance: 3, trains: [:passenger]
defroute Nashville, to: Pittsburgh, distance: 4, trains: [:box]
defroute Nashville, to: Raleigh, distance: 3, trains: [:hopper]
defroute Nashville, to: St.Louis, distance: 2
defroute New.York, to: Pittsburgh, distance: 2, trains: [:reefer, :caboose]
defroute New.York, to: Washington, distance: 2, trains: [:tanker, :hopper]
defroute Oklahoma.City, to: Sante.Fe, distance: 3, trains: [:passenger]
defroute Phoenix, to: Sante.Fe, distance: 3
defroute Pittsburgh, to: Toronto, distance: 2
defroute Pittsburgh, to: Washington, distance: 2
defroute Pittsburgh, to: Raleigh, distance: 2
defroute Pittsburgh, to: St.Louis, distance: 5, trains: [:caboose]
defroute Portland, to: Seattle, distance: 1
defroute Portland, to: Salt.Lake.City, distance: 6, trains: [:passenger]
defroute Portland, to: San.Francisco, distance: 5, trains: [:freight, :caboose]
defroute Raleigh, to: Washington, distance: 2, trains: [:any, :any]
defroute Salt.Lake.City, to: San.Francisco, distance: 5, trains: [:tanker, :reefer]
defroute Sault.St.Marie, to: Winnipeg, distance: 6
defroute Sault.St.Marie, to: Toronto, distance: 2
defroute Seattle, to: Vancouver, distance: 1, trains: [:any, :any]
end
|
lib/ttr_core/board/routes.ex
| 0.695958
| 0.68725
|
routes.ex
|
starcoder
|
defmodule Rajska.QueryScopeAuthorization do
@moduledoc """
Absinthe middleware to perform query scoping.
## Usage
[Create your Authorization module and add it and QueryAuthorization to your Absinthe.Schema](https://hexdocs.pm/rajska/Rajska.html#module-usage). Since Scope Authorization middleware must be used with Query Authorization, it is automatically called when adding the former. Then set the scoped module and argument field:
```elixir
mutation do
field :create_user, :user do
arg :params, non_null(:user_params)
middleware Rajska.QueryAuthorization, permit: :all
resolve &AccountsResolver.create_user/2
end
field :update_user, :user do
arg :id, non_null(:integer)
arg :params, non_null(:user_params)
middleware Rajska.QueryAuthorization, [permit: :user, scoped: User] # same as {User, :id}
resolve &AccountsResolver.update_user/2
end
field :delete_user, :user do
arg :id, non_null(:integer)
middleware Rajska.QueryAuthorization, permit: :admin
resolve &AccountsResolver.delete_user/2
end
end
```
In the above example, `:all` and `:admin` permissions don't require the `:scoped` keyword, as defined in the `c:Rajska.Authorization.not_scoped_roles/0` function, but you can modify this behavior by overriding it.
Valid values for the `:scoped` keyword are:
- `false`: disables scoping
- `User`: a module that will be passed to `c:Rajska.Authorization.has_user_access?/3`. It must implement a `Rajska.Authorization` behaviour and a `__schema__(:source)` function (used to check if the module is valid in `Rajska.Schema.validate_query_auth_config!/2`)
- `{User, :id}`: where `:id` is the query argument that will also be passed to `c:Rajska.Authorization.has_user_access?/3`
- `{User, [:params, :id]}`: where `id` is the query argument as above, but it's not defined directly as an `arg` for the query. Instead, it's nested inside the `params` argument.
- `{User, :user_group_id, :optional}`: where `user_group_id` (it could also be a nested argument) is an optional argument for the query. If it's present, the scoping will be applied, otherwise no scoping is applied.
"""
@behaviour Absinthe.Middleware
alias Absinthe.{Resolution, Type}
def call(%Resolution{state: :resolved} = resolution, _config), do: resolution
def call(resolution, [_ | [scoped: false]]), do: resolution
def call(resolution, [{:permit, permission} | scoped_config]) do
not_scoped_roles = Rajska.apply_auth_mod(resolution.context, :not_scoped_roles)
case Enum.member?(not_scoped_roles, permission) do
true -> resolution
false -> scope_user!(resolution, scoped_config)
end
end
def scope_user!(%Resolution{source: source} = resolution, scoped: :source) do
apply_scope_authorization(resolution, get_scoped_field_value(source, :id), source.__struct__)
end
def scope_user!(%Resolution{source: source} = resolution, scoped: {:source, scoped_field}) do
apply_scope_authorization(resolution, get_scoped_field_value(source, scoped_field), source.__struct__)
end
def scope_user!(%Resolution{arguments: args} = resolution, scoped: {scoped_struct, scoped_field}) do
apply_scope_authorization(resolution, get_scoped_field_value(args, scoped_field), scoped_struct)
end
def scope_user!(%Resolution{arguments: args} = resolution, scoped: {scoped_struct, scoped_field, :optional}) do
case get_scoped_field_value(args, scoped_field) do
nil -> update_result(true, resolution)
field_value -> apply_scope_authorization(resolution, field_value, scoped_struct)
end
end
def scope_user!(%Resolution{arguments: args} = resolution, scoped: scoped_struct) do
apply_scope_authorization(resolution, get_scoped_field_value(args, :id), scoped_struct)
end
def scope_user!(
%Resolution{
definition: %{
name: name,
schema_node: %{type: %Type.List{of_type: _}}
}
},
_scoped_config
) do
raise "Error in query #{name}: Scope Authorization can't be used with a list query object type"
end
def scope_user!(%Resolution{definition: %{name: name}}, _scoped_config) do
raise "Error in query #{name}: no scoped argument found in middleware Scope Authorization"
end
defp get_scoped_field_value(args, fields) when is_list(fields), do: get_in(args, fields)
defp get_scoped_field_value(args, field) when is_atom(field), do: Map.get(args, field)
def apply_scope_authorization(%Resolution{definition: %{name: name}}, nil, _scoped_struct) do
raise "Error in query #{name}: no argument found in middleware Scope Authorization"
end
def apply_scope_authorization(%{context: context} = resolution, field_value, scoped_struct) do
context
|> Rajska.apply_auth_mod(:has_context_access?, [context, scoped_struct, field_value])
|> update_result(resolution)
end
defp update_result(true, resolution), do: resolution
defp update_result(
false,
%Resolution{definition: %{schema_node: %{type: object_type}}} = resolution
) do
put_error(resolution, "Not authorized to access this #{replace_underscore(object_type)}")
end
defp update_result({:error, msg}, resolution), do: put_error(resolution, msg)
defp put_error(resolution, message), do: Resolution.put_result(resolution, {:error, message})
defp replace_underscore(string) when is_binary(string), do: String.replace(string, "_", " ")
defp replace_underscore(atom) when is_atom(atom) do
atom
|> Atom.to_string()
|> replace_underscore()
end
end
|
lib/middlewares/scope_authorization.ex
| 0.844489
| 0.856992
|
scope_authorization.ex
|
starcoder
|
defmodule Asteroid.ObjectStore.DeviceCode.Mnesia do
@moduledoc """
Mnesia implementation of the `Asteroid.ObjectStore.DeviceCode` behaviour
## Options
The options (`Asteroid.ObjectStore.DeviceCode.opts()`) are:
- `:table_name`: an `atom()` for the table name. Defaults to `:asteroid_device_code`
- `:tab_def`: Mnesia's table definitions of the `:mnesia.create_table/2` function. Defaults to
the options below. User-defined `:tab_def` will be merged on a key basis, i.e. defaults will
not be erased
- `:purge_interval`: the `integer()` interval in seconds the purge process will be triggered,
or `:no_purge` to disable purge. Defaults to `300` (5 minutes)
## Default Mnesia table definition
```elixir
[
attributes: [:id, :refresh_token_id, :subject_id, :client_id, :device_id, :data],
index: [:refresh_token_id]
]
```
## Purge process
The purge process uses the `Singleton` library. Therefore the purge process will be unique
per cluster (and that's probably what you want if you use Mnesia).
"""
require Logger
alias Asteroid.Token.DeviceCode
@behaviour Asteroid.ObjectStore.DeviceCode
@impl true
def install(opts) do
:mnesia.stop()
:mnesia.create_schema([node()])
:mnesia.start()
table_name = opts[:table_name] || :asteroid_device_code
tab_def =
[
attributes: [:id, :user_code, :data],
index: [:user_code]
]
|> Keyword.merge(opts[:tab_def] || [])
case :mnesia.create_table(table_name, tab_def) do
{:atomic, :ok} ->
Logger.info("#{__MODULE__}: created device code store #{table_name}")
:ok
{:aborted, {:already_exists, _}} ->
Logger.info("#{__MODULE__}: device code store #{table_name} already exists")
:ok
{:aborted, reason} ->
Logger.error(
"#{__MODULE__}: failed to create device code store #{table_name} " <>
"(reason: #{inspect(reason)})"
)
{:error, reason}
end
end
@impl true
def start_link(opts) do
case :mnesia.start() do
:ok ->
opts = Keyword.merge([purge_interval: 300], opts)
# we launch the process anyway because we need to return a process
# but the singleton will do nothing if the value is `:no_purge`
Singleton.start_child(__MODULE__.Purge, opts, __MODULE__)
{:error, _} = error ->
error
end
end
@impl true
def get(device_code_id, opts) do
table_name = opts[:table_name] || :asteroid_device_code
case :mnesia.dirty_read(table_name, device_code_id) do
[] ->
Logger.debug("#{__MODULE__}: getting device code `#{device_code_id}`, " <> "value: `nil`")
{:ok, nil}
[
{^table_name, ^device_code_id, user_code, data}
] ->
device_code =
DeviceCode.new(
id: device_code_id,
user_code: user_code,
data: data
)
Logger.debug(
"#{__MODULE__}: getting device code `#{device_code_id}`, " <>
"value: `#{inspect(device_code)}`"
)
{:ok, device_code}
_ ->
{:error, "Multiple results from Mnesia"}
end
catch
:exit, reason ->
{:error, reason}
end
@impl true
def get_from_user_code(user_code, opts) do
table_name = opts[:table_name] || :asteroid_device_code
[{_table_name, device_code_id, _user_code, _data}] =
:mnesia.dirty_match_object({table_name, :_, user_code, :_})
get(device_code_id, opts)
catch
:exit, reason ->
{:error, reason}
end
@impl true
def put(device_code, opts) do
table_name = opts[:table_name] || :asteroid_device_code
record = {
table_name,
device_code.id,
device_code.user_code,
device_code.data
}
:mnesia.dirty_write(table_name, record)
Logger.debug(
"#{__MODULE__}: stored device code `#{device_code.id}`, " <>
"value: `#{inspect(device_code)}`"
)
:ok
catch
:exit, reason ->
{:error, reason}
end
@impl true
def delete(device_code_id, opts) do
table_name = opts[:table_name] || :asteroid_device_code
:mnesia.dirty_delete(table_name, device_code_id)
Logger.debug("#{__MODULE__}: deleted device code `#{device_code_id}`")
:ok
catch
:exit, reason ->
{:error, reason}
end
end
|
lib/asteroid/object_store/device_code/mnesia.ex
| 0.910814
| 0.842021
|
mnesia.ex
|
starcoder
|
defmodule ScenicStarter.Scene.Transforms do
use Scenic.Scene
alias Scenic.Graph
import Scenic.Primitives
import Scenic.Components
alias ScenicStarter.Component.Nav
alias ScenicStarter.Component.Notes
@notes """
\"Transforms\" demonstrates using transforms to position, rotate and scale.
The upper sliders apply transforms to the group containing the inset UI.
The lower slider rotates the quad independantly of the upper sliders.
"""
@start_x 150
@start_y 300
@start_scale 1.0
@graph Graph.build(font: :roboto, font_size: 20, theme: :dark)
|> group(
fn g ->
g
|> group(
fn g ->
g
|> text("X")
|> text("Y", translate: {0, 20})
|> text("Scale", translate: {0, 40})
|> text("Angle", translate: {0, 60})
end,
translate: {60, 20},
text_align: :right
)
|> group(
fn g ->
g
|> slider({{00, 500}, @start_x}, id: :pos_x)
|> slider({{180, 400}, @start_y}, id: :pos_y, translate: {0, 20})
|> slider({{0.2, 3.0}, @start_scale}, id: :scale, translate: {0, 40})
|> slider({{-1.5708, 1.5708}, 0}, id: :rotate_ui, translate: {0, 60})
end,
translate: {70, 6}
)
end,
translate: {0, 70}
)
|> group(
fn g ->
g
|> text("Inner UI group", translate: {0, 30})
|> quad({{0, 20}, {30, 0}, {36, 26}, {25, 40}},
id: :quad,
fill: {:linear, {0, 0, 40, 40, :yellow, :purple}},
stroke: {2, :khaki},
# pin: {400,310}
translate: {140, 0},
scale: 1.4
)
|> slider({{-1.5708, 1.5708}, 0}, id: :rotate_quad, translate: {0, 50}, width: 200)
end,
translate: {@start_x, @start_y},
pin: {100, 25},
id: :ui_group
)
# Nav and Notes are added last so that they draw on top
|> Nav.add_to_graph(__MODULE__)
|> Notes.add_to_graph(@notes)
# ============================================================================
# setup
# --------------------------------------------------------
def init(_, _opts) do
state = %{
graph: @graph,
x: @start_x,
y: @start_y
}
{:ok, state, push: @graph}
end
# --------------------------------------------------------
def filter_event(
{:value_changed, :pos_x, x},
_,
%{
graph: graph,
y: y
} = state
) do
graph = Graph.modify(graph, :ui_group, &update_opts(&1, translate: {x, y}))
{:halt, %{state | graph: graph, x: x}, push: graph}
end
# --------------------------------------------------------
def filter_event(
{:value_changed, :pos_y, y},
_,
%{
graph: graph,
x: x
} = state
) do
graph = Graph.modify(graph, :ui_group, &update_opts(&1, translate: {x, y}))
{:halt, %{state | graph: graph, y: y}, push: graph}
end
# --------------------------------------------------------
def filter_event({:value_changed, :scale, scale}, _, %{graph: graph} = state) do
graph = Graph.modify(graph, :ui_group, &update_opts(&1, scale: scale))
{:halt, %{state | graph: graph}, push: graph}
end
# --------------------------------------------------------
def filter_event({:value_changed, :rotate_ui, angle}, _, %{graph: graph} = state) do
graph = Graph.modify(graph, :ui_group, &update_opts(&1, rotate: angle))
{:halt, %{state | graph: graph}, push: graph}
end
# --------------------------------------------------------
def filter_event({:value_changed, :rotate_quad, angle}, _, %{graph: graph} = state) do
graph = Graph.modify(graph, :quad, &update_opts(&1, rotate: angle))
{:halt, %{state | graph: graph}, push: graph}
end
end
|
lib/scenes/transforms.ex
| 0.794385
| 0.508178
|
transforms.ex
|
starcoder
|
defmodule Sanbase.Price.MetricAdapter do
@behaviour Sanbase.Metric.Behaviour
alias Sanbase.Price
@aggregations [:any, :sum, :avg, :min, :max, :last, :first, :median]
@default_aggregation :last
@timeseries_metrics ["price_usd", "price_btc", "price_eth", "volume_usd", "marketcap_usd"]
@histogram_metrics []
@metrics @histogram_metrics ++ @timeseries_metrics
@access_map Enum.into(@metrics, %{}, fn metric -> {metric, :free} end)
@min_plan_map Enum.into(@metrics, %{}, fn metric -> {metric, :free} end)
@free_metrics Enum.filter(@access_map, fn {_, level} -> level == :free end)
|> Enum.map(&elem(&1, 0))
@restricted_metrics Enum.filter(@access_map, fn {_, level} -> level == :restricted end)
|> Enum.map(&elem(&1, 0))
@default_complexity_weight 0.3
@impl Sanbase.Metric.Behaviour
def has_incomplete_data?(_), do: false
@impl Sanbase.Metric.Behaviour
def complexity_weight(_), do: @default_complexity_weight
@impl Sanbase.Metric.Behaviour
def timeseries_data(metric, %{slug: slug}, from, to, interval, aggregation) do
aggregation = aggregation || @default_aggregation
Price.timeseries_metric_data(slug, metric, from, to, interval, aggregation: aggregation)
end
@impl Sanbase.Metric.Behaviour
def aggregated_timeseries_data(metric, %{slug: slug}, from, to, aggregation) do
aggregation = aggregation || @default_aggregation
Price.aggregated_metric_timeseries_data(slug, metric, from, to, aggregation: aggregation)
end
@impl Sanbase.Metric.Behaviour
def slugs_by_filter(metric, from, to, operator, threshold, aggregation) do
aggregation = aggregation || @default_aggregation
Price.slugs_by_filter(metric, from, to, operator, threshold, aggregation)
end
@impl Sanbase.Metric.Behaviour
def slugs_order(metric, from, to, direction, aggregation) do
aggregation = aggregation || @default_aggregation
Price.slugs_order(metric, from, to, direction, aggregation)
end
@impl Sanbase.Metric.Behaviour
def first_datetime(_metric, %{slug: slug}) do
Price.first_datetime(slug)
end
@impl Sanbase.Metric.Behaviour
def last_datetime_computed_at(_metric, %{slug: slug}) do
Price.last_datetime_computed_at(slug)
end
@impl Sanbase.Metric.Behaviour
def metadata(metric) do
{:ok,
%{
metric: metric,
min_interval: "5m",
default_aggregation: @default_aggregation,
available_aggregations: @aggregations,
available_selectors: [:slug],
data_type: :timeseries,
complexity_weight: @default_complexity_weight
}}
end
@impl Sanbase.Metric.Behaviour
def human_readable_name(metric) do
case metric do
"price_usd" -> {:ok, "Price in USD"}
"price_btc" -> {:ok, "Price in BTC"}
"price_eth" -> {:ok, "Price in ETH"}
"marketcap_usd" -> {:ok, "Marketcap in USD"}
"volume_usd" -> {:ok, "Volume in USd"}
end
end
@impl Sanbase.Metric.Behaviour
def available_aggregations(), do: @aggregations
@impl Sanbase.Metric.Behaviour
def available_timeseries_metrics(), do: @timeseries_metrics
@impl Sanbase.Metric.Behaviour
def available_histogram_metrics(), do: @histogram_metrics
@impl Sanbase.Metric.Behaviour
def available_metrics(), do: @metrics
@impl Sanbase.Metric.Behaviour
def available_metrics(%{slug: "TOTAL_ERC20"}), do: @metrics
def available_metrics(%{slug: slug}) do
case Price.has_data?(slug) do
{:ok, true} -> {:ok, @metrics}
{:ok, false} -> {:ok, []}
{:error, error} -> {:error, error}
end
end
@impl Sanbase.Metric.Behaviour
def available_slugs() do
Sanbase.Cache.get_or_store({:slugs_with_prices, 1800}, fn ->
Price.available_slugs()
end)
end
@impl Sanbase.Metric.Behaviour
def available_slugs(metric) when metric in @metrics do
available_slugs()
end
@impl Sanbase.Metric.Behaviour
def free_metrics(), do: @free_metrics
@impl Sanbase.Metric.Behaviour
def restricted_metrics(), do: @restricted_metrics
@impl Sanbase.Metric.Behaviour
def access_map(), do: @access_map
@impl Sanbase.Metric.Behaviour
def min_plan_map(), do: @min_plan_map
end
|
lib/sanbase/prices/metric_adapter.ex
| 0.807309
| 0.456713
|
metric_adapter.ex
|
starcoder
|
defmodule EQRCode.SpecTable do
@type error_correction_level :: :l | :m | :q | :h
@type version :: 1..40
@type mode :: :numeric | :alphanumeric | :byte | :kenji | :eci
def error_correction_level(), do: [:l, :m, :q, :h]
@mode [
numeric: 0b0001,
alphanumeric: 0b0010,
byte: 0b0100,
kanji: 0b1000,
eci: 0b0111
]
@error_corretion_bits [
l: 0b01,
m: 0b00,
q: 0b11,
h: 0b10
]
@version_information_bits [
{7, 0b000111110010010100},
{8, 0b001000010110111100},
{9, 0b001001101010011001},
{10, 0b001010010011010011},
{11, 0b001011101111110110},
{12, 0b001100011101100010},
{13, 0b001101100001000111},
{14, 0b001110011000001101},
{15, 0b001111100100101000},
{16, 0b010000101101111000},
{17, 0b010001010001011101},
{18, 0b010010101000010111},
{19, 0b010011010100110010},
{20, 0b010100100110100110},
{21, 0b010101011010000011},
{22, 0b010110100011001001},
{23, 0b010111011111101100},
{24, 0b011000111011000100},
{25, 0b011001000111100001},
{26, 0b011010111110101011},
{27, 0b011011000010001110},
{28, 0b011100110000011010},
{29, 0b011101001100111111},
{30, 0b011110110101110101},
{31, 0b011111001001010000},
{32, 0b100000100111010101},
{33, 0b100001011011110000},
{34, 0b100010100010111010},
{35, 0b100011011110011111},
{36, 0b100100101100001011},
{37, 0b100101010000101110},
{38, 0b100110101001100100},
{39, 0b100111010101000001},
{40, 0b101000110001101001}
]
@spec version_information_bits(version()) :: 1..1_114_111
def version_information_bits(version)
for {version, bits} <- @version_information_bits do
def version_information_bits(unquote(version)), do: unquote(bits)
end
@spec error_corretion_bits(error_correction_level()) :: 1..3
def error_corretion_bits(error_correction_level)
for {level, bits} <- @error_corretion_bits do
def error_corretion_bits(unquote(level)), do: unquote(bits)
end
def mode_indicator(mode \\ :byte)
for {mode, mode_indictor} <- @mode do
def mode_indicator(unquote(mode)), do: unquote(mode_indictor)
end
# {:version, :error_correction_level, :mode, :capacity, :character_count_indicator_bits}
@table [
{1, :l, :numeric, 41, 10},
{1, :l, :alphanumeric, 25, 9},
{1, :l, :byte, 17, 8},
{1, :l, :kenji, 10, 8},
{1, :m, :numeric, 34, 10},
{1, :m, :alphanumeric, 20, 9},
{1, :m, :byte, 14, 8},
{1, :m, :kenji, 8, 8},
{1, :q, :numeric, 27, 10},
{1, :q, :alphanumeric, 16, 9},
{1, :q, :byte, 11, 8},
{1, :q, :kenji, 7, 8},
{1, :h, :numeric, 17, 10},
{1, :h, :alphanumeric, 10, 9},
{1, :h, :byte, 7, 8},
{1, :h, :kenji, 4, 8},
{2, :l, :numeric, 77, 10},
{2, :l, :alphanumeric, 47, 9},
{2, :l, :byte, 32, 8},
{2, :l, :kenji, 20, 8},
{2, :m, :numeric, 63, 10},
{2, :m, :alphanumeric, 38, 9},
{2, :m, :byte, 26, 8},
{2, :m, :kenji, 16, 8},
{2, :q, :numeric, 48, 10},
{2, :q, :alphanumeric, 29, 9},
{2, :q, :byte, 20, 8},
{2, :q, :kenji, 12, 8},
{2, :h, :numeric, 34, 10},
{2, :h, :alphanumeric, 20, 9},
{2, :h, :byte, 14, 8},
{2, :h, :kenji, 8, 8},
{3, :l, :numeric, 127, 10},
{3, :l, :alphanumeric, 77, 9},
{3, :l, :byte, 53, 8},
{3, :l, :kenji, 32, 8},
{3, :m, :numeric, 101, 10},
{3, :m, :alphanumeric, 61, 9},
{3, :m, :byte, 42, 8},
{3, :m, :kenji, 26, 8},
{3, :q, :numeric, 77, 10},
{3, :q, :alphanumeric, 47, 9},
{3, :q, :byte, 32, 8},
{3, :q, :kenji, 20, 8},
{3, :h, :numeric, 58, 10},
{3, :h, :alphanumeric, 35, 9},
{3, :h, :byte, 24, 8},
{3, :h, :kenji, 15, 8},
{4, :l, :numeric, 187, 10},
{4, :l, :alphanumeric, 114, 9},
{4, :l, :byte, 78, 8},
{4, :l, :kenji, 48, 8},
{4, :m, :numeric, 149, 10},
{4, :m, :alphanumeric, 90, 9},
{4, :m, :byte, 62, 8},
{4, :m, :kenji, 38, 8},
{4, :q, :numeric, 111, 10},
{4, :q, :alphanumeric, 67, 9},
{4, :q, :byte, 46, 8},
{4, :q, :kenji, 28, 8},
{4, :h, :numeric, 82, 10},
{4, :h, :alphanumeric, 50, 9},
{4, :h, :byte, 34, 8},
{4, :h, :kenji, 21, 8},
{5, :l, :numeric, 255, 10},
{5, :l, :alphanumeric, 154, 9},
{5, :l, :byte, 106, 8},
{5, :l, :kenji, 65, 8},
{5, :m, :numeric, 202, 10},
{5, :m, :alphanumeric, 122, 9},
{5, :m, :byte, 84, 8},
{5, :m, :kenji, 52, 8},
{5, :q, :numeric, 144, 10},
{5, :q, :alphanumeric, 87, 9},
{5, :q, :byte, 60, 8},
{5, :q, :kenji, 37, 8},
{5, :h, :numeric, 106, 10},
{5, :h, :alphanumeric, 64, 9},
{5, :h, :byte, 44, 8},
{5, :h, :kenji, 27, 8},
{6, :l, :numeric, 322, 10},
{6, :l, :alphanumeric, 195, 9},
{6, :l, :byte, 134, 8},
{6, :l, :kenji, 82, 8},
{6, :m, :numeric, 255, 10},
{6, :m, :alphanumeric, 154, 9},
{6, :m, :byte, 106, 8},
{6, :m, :kenji, 65, 8},
{6, :q, :numeric, 178, 10},
{6, :q, :alphanumeric, 108, 9},
{6, :q, :byte, 74, 8},
{6, :q, :kenji, 45, 8},
{6, :h, :numeric, 139, 10},
{6, :h, :alphanumeric, 84, 9},
{6, :h, :byte, 58, 8},
{6, :h, :kenji, 36, 8},
{7, :l, :numeric, 370, 10},
{7, :l, :alphanumeric, 224, 9},
{7, :l, :byte, 154, 8},
{7, :l, :kenji, 95, 8},
{7, :m, :numeric, 293, 10},
{7, :m, :alphanumeric, 178, 9},
{7, :m, :byte, 122, 8},
{7, :m, :kenji, 75, 8},
{7, :q, :numeric, 207, 10},
{7, :q, :alphanumeric, 125, 9},
{7, :q, :byte, 86, 8},
{7, :q, :kenji, 53, 8},
{7, :h, :numeric, 154, 10},
{7, :h, :alphanumeric, 93, 9},
{7, :h, :byte, 64, 8},
{7, :h, :kenji, 39, 8},
{8, :l, :numeric, 461, 10},
{8, :l, :alphanumeric, 279, 9},
{8, :l, :byte, 192, 8},
{8, :l, :kenji, 118, 8},
{8, :m, :numeric, 365, 10},
{8, :m, :alphanumeric, 221, 9},
{8, :m, :byte, 152, 8},
{8, :m, :kenji, 93, 8},
{8, :q, :numeric, 259, 10},
{8, :q, :alphanumeric, 157, 9},
{8, :q, :byte, 108, 8},
{8, :q, :kenji, 66, 8},
{8, :h, :numeric, 202, 10},
{8, :h, :alphanumeric, 122, 9},
{8, :h, :byte, 84, 8},
{8, :h, :kenji, 52, 8},
{9, :l, :numeric, 552, 10},
{9, :l, :alphanumeric, 335, 9},
{9, :l, :byte, 230, 8},
{9, :l, :kenji, 141, 8},
{9, :m, :numeric, 432, 10},
{9, :m, :alphanumeric, 262, 9},
{9, :m, :byte, 180, 8},
{9, :m, :kenji, 111, 8},
{9, :q, :numeric, 312, 10},
{9, :q, :alphanumeric, 189, 9},
{9, :q, :byte, 130, 8},
{9, :q, :kenji, 80, 8},
{9, :h, :numeric, 235, 10},
{9, :h, :alphanumeric, 143, 9},
{9, :h, :byte, 98, 8},
{9, :h, :kenji, 60, 8},
{10, :l, :numeric, 652, 12},
{10, :l, :alphanumeric, 395, 11},
{10, :l, :byte, 271, 16},
{10, :l, :kenji, 167, 10},
{10, :m, :numeric, 513, 12},
{10, :m, :alphanumeric, 311, 11},
{10, :m, :byte, 213, 16},
{10, :m, :kenji, 131, 10},
{10, :q, :numeric, 364, 12},
{10, :q, :alphanumeric, 221, 11},
{10, :q, :byte, 151, 16},
{10, :q, :kenji, 93, 10},
{10, :h, :numeric, 288, 12},
{10, :h, :alphanumeric, 174, 11},
{10, :h, :byte, 119, 16},
{10, :h, :kenji, 74, 10},
{11, :l, :numeric, 772, 12},
{11, :l, :alphanumeric, 468, 11},
{11, :l, :byte, 321, 16},
{11, :l, :kenji, 198, 10},
{11, :m, :numeric, 604, 12},
{11, :m, :alphanumeric, 366, 11},
{11, :m, :byte, 251, 16},
{11, :m, :kenji, 155, 10},
{11, :q, :numeric, 427, 12},
{11, :q, :alphanumeric, 259, 11},
{11, :q, :byte, 177, 16},
{11, :q, :kenji, 109, 10},
{11, :h, :numeric, 331, 12},
{11, :h, :alphanumeric, 200, 11},
{11, :h, :byte, 137, 16},
{11, :h, :kenji, 85, 10},
{12, :l, :numeric, 883, 12},
{12, :l, :alphanumeric, 535, 11},
{12, :l, :byte, 367, 16},
{12, :l, :kenji, 226, 10},
{12, :m, :numeric, 691, 12},
{12, :m, :alphanumeric, 419, 11},
{12, :m, :byte, 287, 16},
{12, :m, :kenji, 177, 10},
{12, :q, :numeric, 489, 12},
{12, :q, :alphanumeric, 296, 11},
{12, :q, :byte, 203, 16},
{12, :q, :kenji, 125, 10},
{12, :h, :numeric, 374, 12},
{12, :h, :alphanumeric, 227, 11},
{12, :h, :byte, 155, 16},
{12, :h, :kenji, 96, 10},
{13, :l, :numeric, 1022, 12},
{13, :l, :alphanumeric, 619, 11},
{13, :l, :byte, 425, 16},
{13, :l, :kenji, 262, 10},
{13, :m, :numeric, 796, 12},
{13, :m, :alphanumeric, 483, 11},
{13, :m, :byte, 331, 16},
{13, :m, :kenji, 204, 10},
{13, :q, :numeric, 580, 12},
{13, :q, :alphanumeric, 352, 11},
{13, :q, :byte, 241, 16},
{13, :q, :kenji, 149, 10},
{13, :h, :numeric, 427, 12},
{13, :h, :alphanumeric, 259, 11},
{13, :h, :byte, 177, 16},
{13, :h, :kenji, 109, 10},
{14, :l, :numeric, 1101, 12},
{14, :l, :alphanumeric, 667, 11},
{14, :l, :byte, 458, 16},
{14, :l, :kenji, 282, 10},
{14, :m, :numeric, 871, 12},
{14, :m, :alphanumeric, 528, 11},
{14, :m, :byte, 362, 16},
{14, :m, :kenji, 223, 10},
{14, :q, :numeric, 621, 12},
{14, :q, :alphanumeric, 376, 11},
{14, :q, :byte, 258, 16},
{14, :q, :kenji, 159, 10},
{14, :h, :numeric, 468, 12},
{14, :h, :alphanumeric, 283, 11},
{14, :h, :byte, 194, 16},
{14, :h, :kenji, 120, 10},
{15, :l, :numeric, 1250, 12},
{15, :l, :alphanumeric, 758, 11},
{15, :l, :byte, 520, 16},
{15, :l, :kenji, 320, 10},
{15, :m, :numeric, 991, 12},
{15, :m, :alphanumeric, 600, 11},
{15, :m, :byte, 412, 16},
{15, :m, :kenji, 254, 10},
{15, :q, :numeric, 703, 12},
{15, :q, :alphanumeric, 426, 11},
{15, :q, :byte, 292, 16},
{15, :q, :kenji, 180, 10},
{15, :h, :numeric, 530, 12},
{15, :h, :alphanumeric, 321, 11},
{15, :h, :byte, 220, 16},
{15, :h, :kenji, 136, 10},
{16, :l, :numeric, 1408, 12},
{16, :l, :alphanumeric, 854, 11},
{16, :l, :byte, 586, 16},
{16, :l, :kenji, 361, 10},
{16, :m, :numeric, 1082, 12},
{16, :m, :alphanumeric, 656, 11},
{16, :m, :byte, 450, 16},
{16, :m, :kenji, 277, 10},
{16, :q, :numeric, 775, 12},
{16, :q, :alphanumeric, 470, 11},
{16, :q, :byte, 322, 16},
{16, :q, :kenji, 198, 10},
{16, :h, :numeric, 602, 12},
{16, :h, :alphanumeric, 365, 11},
{16, :h, :byte, 250, 16},
{16, :h, :kenji, 154, 10},
{17, :l, :numeric, 1548, 12},
{17, :l, :alphanumeric, 938, 11},
{17, :l, :byte, 644, 16},
{17, :l, :kenji, 397, 10},
{17, :m, :numeric, 1212, 12},
{17, :m, :alphanumeric, 734, 11},
{17, :m, :byte, 504, 16},
{17, :m, :kenji, 310, 10},
{17, :q, :numeric, 876, 12},
{17, :q, :alphanumeric, 531, 11},
{17, :q, :byte, 364, 16},
{17, :q, :kenji, 224, 10},
{17, :h, :numeric, 674, 12},
{17, :h, :alphanumeric, 408, 11},
{17, :h, :byte, 280, 16},
{17, :h, :kenji, 173, 10},
{18, :l, :numeric, 1725, 12},
{18, :l, :alphanumeric, 1046, 11},
{18, :l, :byte, 718, 16},
{18, :l, :kenji, 442, 10},
{18, :m, :numeric, 1346, 12},
{18, :m, :alphanumeric, 816, 11},
{18, :m, :byte, 560, 16},
{18, :m, :kenji, 345, 10},
{18, :q, :numeric, 948, 12},
{18, :q, :alphanumeric, 574, 11},
{18, :q, :byte, 394, 16},
{18, :q, :kenji, 243, 10},
{18, :h, :numeric, 746, 12},
{18, :h, :alphanumeric, 452, 11},
{18, :h, :byte, 310, 16},
{18, :h, :kenji, 191, 10},
{19, :l, :numeric, 1903, 12},
{19, :l, :alphanumeric, 1153, 11},
{19, :l, :byte, 792, 16},
{19, :l, :kenji, 488, 10},
{19, :m, :numeric, 1500, 12},
{19, :m, :alphanumeric, 909, 11},
{19, :m, :byte, 624, 16},
{19, :m, :kenji, 384, 10},
{19, :q, :numeric, 1063, 12},
{19, :q, :alphanumeric, 644, 11},
{19, :q, :byte, 442, 16},
{19, :q, :kenji, 272, 10},
{19, :h, :numeric, 813, 12},
{19, :h, :alphanumeric, 493, 11},
{19, :h, :byte, 338, 16},
{19, :h, :kenji, 208, 10},
{20, :l, :numeric, 2061, 12},
{20, :l, :alphanumeric, 1249, 11},
{20, :l, :byte, 858, 16},
{20, :l, :kenji, 528, 10},
{20, :m, :numeric, 1600, 12},
{20, :m, :alphanumeric, 970, 11},
{20, :m, :byte, 666, 16},
{20, :m, :kenji, 410, 10},
{20, :q, :numeric, 1159, 12},
{20, :q, :alphanumeric, 702, 11},
{20, :q, :byte, 482, 16},
{20, :q, :kenji, 297, 10},
{20, :h, :numeric, 919, 12},
{20, :h, :alphanumeric, 557, 11},
{20, :h, :byte, 382, 16},
{20, :h, :kenji, 235, 10},
{21, :l, :numeric, 2232, 12},
{21, :l, :alphanumeric, 1352, 11},
{21, :l, :byte, 929, 16},
{21, :l, :kenji, 572, 10},
{21, :m, :numeric, 1708, 12},
{21, :m, :alphanumeric, 1035, 11},
{21, :m, :byte, 711, 16},
{21, :m, :kenji, 438, 10},
{21, :q, :numeric, 1224, 12},
{21, :q, :alphanumeric, 742, 11},
{21, :q, :byte, 509, 16},
{21, :q, :kenji, 314, 10},
{21, :h, :numeric, 969, 12},
{21, :h, :alphanumeric, 587, 11},
{21, :h, :byte, 403, 16},
{21, :h, :kenji, 248, 10},
{22, :l, :numeric, 2409, 12},
{22, :l, :alphanumeric, 1460, 11},
{22, :l, :byte, 1003, 16},
{22, :l, :kenji, 618, 10},
{22, :m, :numeric, 1872, 12},
{22, :m, :alphanumeric, 1134, 11},
{22, :m, :byte, 779, 16},
{22, :m, :kenji, 480, 10},
{22, :q, :numeric, 1358, 12},
{22, :q, :alphanumeric, 823, 11},
{22, :q, :byte, 565, 16},
{22, :q, :kenji, 348, 10},
{22, :h, :numeric, 1056, 12},
{22, :h, :alphanumeric, 640, 11},
{22, :h, :byte, 439, 16},
{22, :h, :kenji, 270, 10},
{23, :l, :numeric, 2620, 12},
{23, :l, :alphanumeric, 1588, 11},
{23, :l, :byte, 1091, 16},
{23, :l, :kenji, 672, 10},
{23, :m, :numeric, 2059, 12},
{23, :m, :alphanumeric, 1248, 11},
{23, :m, :byte, 857, 16},
{23, :m, :kenji, 528, 10},
{23, :q, :numeric, 1468, 12},
{23, :q, :alphanumeric, 890, 11},
{23, :q, :byte, 611, 16},
{23, :q, :kenji, 376, 10},
{23, :h, :numeric, 1108, 12},
{23, :h, :alphanumeric, 672, 11},
{23, :h, :byte, 461, 16},
{23, :h, :kenji, 284, 10},
{24, :l, :numeric, 2812, 12},
{24, :l, :alphanumeric, 1704, 11},
{24, :l, :byte, 1171, 16},
{24, :l, :kenji, 721, 10},
{24, :m, :numeric, 2188, 12},
{24, :m, :alphanumeric, 1326, 11},
{24, :m, :byte, 911, 16},
{24, :m, :kenji, 561, 10},
{24, :q, :numeric, 1588, 12},
{24, :q, :alphanumeric, 963, 11},
{24, :q, :byte, 661, 16},
{24, :q, :kenji, 407, 10},
{24, :h, :numeric, 1228, 12},
{24, :h, :alphanumeric, 744, 11},
{24, :h, :byte, 511, 16},
{24, :h, :kenji, 315, 10},
{25, :l, :numeric, 3057, 12},
{25, :l, :alphanumeric, 1853, 11},
{25, :l, :byte, 1273, 16},
{25, :l, :kenji, 784, 10},
{25, :m, :numeric, 2395, 12},
{25, :m, :alphanumeric, 1451, 11},
{25, :m, :byte, 997, 16},
{25, :m, :kenji, 614, 10},
{25, :q, :numeric, 1718, 12},
{25, :q, :alphanumeric, 1041, 11},
{25, :q, :byte, 715, 16},
{25, :q, :kenji, 440, 10},
{25, :h, :numeric, 1286, 12},
{25, :h, :alphanumeric, 779, 11},
{25, :h, :byte, 535, 16},
{25, :h, :kenji, 330, 10},
{26, :l, :numeric, 3283, 12},
{26, :l, :alphanumeric, 1990, 11},
{26, :l, :byte, 1367, 16},
{26, :l, :kenji, 842, 10},
{26, :m, :numeric, 2544, 12},
{26, :m, :alphanumeric, 1542, 11},
{26, :m, :byte, 1059, 16},
{26, :m, :kenji, 652, 10},
{26, :q, :numeric, 1804, 12},
{26, :q, :alphanumeric, 1094, 11},
{26, :q, :byte, 751, 16},
{26, :q, :kenji, 462, 10},
{26, :h, :numeric, 1425, 12},
{26, :h, :alphanumeric, 864, 11},
{26, :h, :byte, 593, 16},
{26, :h, :kenji, 365, 10},
{27, :l, :numeric, 3517, 14},
{27, :l, :alphanumeric, 2132, 13},
{27, :l, :byte, 1465, 16},
{27, :l, :kenji, 902, 12},
{27, :m, :numeric, 2701, 14},
{27, :m, :alphanumeric, 1637, 13},
{27, :m, :byte, 1125, 16},
{27, :m, :kenji, 692, 12},
{27, :q, :numeric, 1933, 14},
{27, :q, :alphanumeric, 1172, 13},
{27, :q, :byte, 805, 16},
{27, :q, :kenji, 496, 12},
{27, :h, :numeric, 1501, 14},
{27, :h, :alphanumeric, 910, 13},
{27, :h, :byte, 625, 16},
{27, :h, :kenji, 385, 12},
{28, :l, :numeric, 3669, 14},
{28, :l, :alphanumeric, 2223, 13},
{28, :l, :byte, 1528, 16},
{28, :l, :kenji, 940, 12},
{28, :m, :numeric, 2857, 14},
{28, :m, :alphanumeric, 1732, 13},
{28, :m, :byte, 1190, 16},
{28, :m, :kenji, 732, 12},
{28, :q, :numeric, 2085, 14},
{28, :q, :alphanumeric, 1263, 13},
{28, :q, :byte, 868, 16},
{28, :q, :kenji, 534, 12},
{28, :h, :numeric, 1581, 14},
{28, :h, :alphanumeric, 958, 13},
{28, :h, :byte, 658, 16},
{28, :h, :kenji, 405, 12},
{29, :l, :numeric, 3909, 14},
{29, :l, :alphanumeric, 2369, 13},
{29, :l, :byte, 1628, 16},
{29, :l, :kenji, 1002, 12},
{29, :m, :numeric, 3035, 14},
{29, :m, :alphanumeric, 1839, 13},
{29, :m, :byte, 1264, 16},
{29, :m, :kenji, 778, 12},
{29, :q, :numeric, 2181, 14},
{29, :q, :alphanumeric, 1322, 13},
{29, :q, :byte, 908, 16},
{29, :q, :kenji, 559, 12},
{29, :h, :numeric, 1677, 14},
{29, :h, :alphanumeric, 1016, 13},
{29, :h, :byte, 698, 16},
{29, :h, :kenji, 430, 12},
{30, :l, :numeric, 4158, 14},
{30, :l, :alphanumeric, 2520, 13},
{30, :l, :byte, 1732, 16},
{30, :l, :kenji, 1066, 12},
{30, :m, :numeric, 3289, 14},
{30, :m, :alphanumeric, 1994, 13},
{30, :m, :byte, 1370, 16},
{30, :m, :kenji, 843, 12},
{30, :q, :numeric, 2358, 14},
{30, :q, :alphanumeric, 1429, 13},
{30, :q, :byte, 982, 16},
{30, :q, :kenji, 604, 12},
{30, :h, :numeric, 1782, 14},
{30, :h, :alphanumeric, 1080, 13},
{30, :h, :byte, 742, 16},
{30, :h, :kenji, 457, 12},
{31, :l, :numeric, 4417, 14},
{31, :l, :alphanumeric, 2677, 13},
{31, :l, :byte, 1840, 16},
{31, :l, :kenji, 1132, 12},
{31, :m, :numeric, 3486, 14},
{31, :m, :alphanumeric, 2113, 13},
{31, :m, :byte, 1452, 16},
{31, :m, :kenji, 894, 12},
{31, :q, :numeric, 2473, 14},
{31, :q, :alphanumeric, 1499, 13},
{31, :q, :byte, 1030, 16},
{31, :q, :kenji, 634, 12},
{31, :h, :numeric, 1897, 14},
{31, :h, :alphanumeric, 1150, 13},
{31, :h, :byte, 790, 16},
{31, :h, :kenji, 486, 12},
{32, :l, :numeric, 4686, 14},
{32, :l, :alphanumeric, 2840, 13},
{32, :l, :byte, 1952, 16},
{32, :l, :kenji, 1201, 12},
{32, :m, :numeric, 3693, 14},
{32, :m, :alphanumeric, 2238, 13},
{32, :m, :byte, 1538, 16},
{32, :m, :kenji, 947, 12},
{32, :q, :numeric, 2670, 14},
{32, :q, :alphanumeric, 1618, 13},
{32, :q, :byte, 1112, 16},
{32, :q, :kenji, 684, 12},
{32, :h, :numeric, 2022, 14},
{32, :h, :alphanumeric, 1226, 13},
{32, :h, :byte, 842, 16},
{32, :h, :kenji, 518, 12},
{33, :l, :numeric, 4965, 14},
{33, :l, :alphanumeric, 3009, 13},
{33, :l, :byte, 2068, 16},
{33, :l, :kenji, 1273, 12},
{33, :m, :numeric, 3909, 14},
{33, :m, :alphanumeric, 2369, 13},
{33, :m, :byte, 1628, 16},
{33, :m, :kenji, 1002, 12},
{33, :q, :numeric, 2805, 14},
{33, :q, :alphanumeric, 1700, 13},
{33, :q, :byte, 1168, 16},
{33, :q, :kenji, 719, 12},
{33, :h, :numeric, 2157, 14},
{33, :h, :alphanumeric, 1307, 13},
{33, :h, :byte, 898, 16},
{33, :h, :kenji, 553, 12},
{34, :l, :numeric, 5253, 14},
{34, :l, :alphanumeric, 3183, 13},
{34, :l, :byte, 2188, 16},
{34, :l, :kenji, 1347, 12},
{34, :m, :numeric, 4134, 14},
{34, :m, :alphanumeric, 2506, 13},
{34, :m, :byte, 1722, 16},
{34, :m, :kenji, 1060, 12},
{34, :q, :numeric, 2949, 14},
{34, :q, :alphanumeric, 1787, 13},
{34, :q, :byte, 1228, 16},
{34, :q, :kenji, 756, 12},
{34, :h, :numeric, 2301, 14},
{34, :h, :alphanumeric, 1394, 13},
{34, :h, :byte, 958, 16},
{34, :h, :kenji, 590, 12},
{35, :l, :numeric, 5529, 14},
{35, :l, :alphanumeric, 3351, 13},
{35, :l, :byte, 2303, 16},
{35, :l, :kenji, 1417, 12},
{35, :m, :numeric, 4343, 14},
{35, :m, :alphanumeric, 2632, 13},
{35, :m, :byte, 1809, 16},
{35, :m, :kenji, 1113, 12},
{35, :q, :numeric, 3081, 14},
{35, :q, :alphanumeric, 1867, 13},
{35, :q, :byte, 1283, 16},
{35, :q, :kenji, 790, 12},
{35, :h, :numeric, 2361, 14},
{35, :h, :alphanumeric, 1431, 13},
{35, :h, :byte, 983, 16},
{35, :h, :kenji, 605, 12},
{36, :l, :numeric, 5836, 14},
{36, :l, :alphanumeric, 3537, 13},
{36, :l, :byte, 2431, 16},
{36, :l, :kenji, 1496, 12},
{36, :m, :numeric, 4588, 14},
{36, :m, :alphanumeric, 2780, 13},
{36, :m, :byte, 1911, 16},
{36, :m, :kenji, 1176, 12},
{36, :q, :numeric, 3244, 14},
{36, :q, :alphanumeric, 1966, 13},
{36, :q, :byte, 1351, 16},
{36, :q, :kenji, 832, 12},
{36, :h, :numeric, 2524, 14},
{36, :h, :alphanumeric, 1530, 13},
{36, :h, :byte, 1051, 16},
{36, :h, :kenji, 647, 12},
{37, :l, :numeric, 6153, 14},
{37, :l, :alphanumeric, 3729, 13},
{37, :l, :byte, 2563, 16},
{37, :l, :kenji, 1577, 12},
{37, :m, :numeric, 4775, 14},
{37, :m, :alphanumeric, 2894, 13},
{37, :m, :byte, 1989, 16},
{37, :m, :kenji, 1224, 12},
{37, :q, :numeric, 3417, 14},
{37, :q, :alphanumeric, 2071, 13},
{37, :q, :byte, 1423, 16},
{37, :q, :kenji, 876, 12},
{37, :h, :numeric, 2625, 14},
{37, :h, :alphanumeric, 1591, 13},
{37, :h, :byte, 1093, 16},
{37, :h, :kenji, 673, 12},
{38, :l, :numeric, 6479, 14},
{38, :l, :alphanumeric, 3927, 13},
{38, :l, :byte, 2699, 16},
{38, :l, :kenji, 1661, 12},
{38, :m, :numeric, 5039, 14},
{38, :m, :alphanumeric, 3054, 13},
{38, :m, :byte, 2099, 16},
{38, :m, :kenji, 1292, 12},
{38, :q, :numeric, 3599, 14},
{38, :q, :alphanumeric, 2181, 13},
{38, :q, :byte, 1499, 16},
{38, :q, :kenji, 923, 12},
{38, :h, :numeric, 2735, 14},
{38, :h, :alphanumeric, 1658, 13},
{38, :h, :byte, 1139, 16},
{38, :h, :kenji, 701, 12},
{39, :l, :numeric, 6743, 14},
{39, :l, :alphanumeric, 4087, 13},
{39, :l, :byte, 2809, 16},
{39, :l, :kenji, 1729, 12},
{39, :m, :numeric, 5313, 14},
{39, :m, :alphanumeric, 3220, 13},
{39, :m, :byte, 2213, 16},
{39, :m, :kenji, 1362, 12},
{39, :q, :numeric, 3791, 14},
{39, :q, :alphanumeric, 2298, 13},
{39, :q, :byte, 1579, 16},
{39, :q, :kenji, 972, 12},
{39, :h, :numeric, 2927, 14},
{39, :h, :alphanumeric, 1774, 13},
{39, :h, :byte, 1219, 16},
{39, :h, :kenji, 750, 12},
{40, :l, :numeric, 7089, 14},
{40, :l, :alphanumeric, 4296, 13},
{40, :l, :byte, 2953, 16},
{40, :l, :kenji, 1817, 12},
{40, :m, :numeric, 5596, 14},
{40, :m, :alphanumeric, 3391, 13},
{40, :m, :byte, 2331, 16},
{40, :m, :kenji, 1435, 12},
{40, :q, :numeric, 3993, 14},
{40, :q, :alphanumeric, 2420, 13},
{40, :q, :byte, 1663, 16},
{40, :q, :kenji, 1024, 12},
{40, :h, :numeric, 3057, 14},
{40, :h, :alphanumeric, 1852, 13},
{40, :h, :byte, 1273, 16},
{40, :h, :kenji, 784, 12}
]
@ec_levels @table |> Enum.map(&elem(&1, 1)) |> Enum.uniq()
@modes @table |> Enum.map(&elem(&1, 2)) |> Enum.uniq()
@spec find_version(non_neg_integer(), error_correction_level(), mode()) :: {:error, :no_version_found} | {:ok, version()}
def find_version(bin_len, ec_level \\ :h, mode \\ :byte)
for ec_level <- @ec_levels,
mode <- @modes,
{version, _, _, cap, _} <-
Enum.filter(@table, &match?({_, ^ec_level, ^mode, _, _}, &1)) |> Enum.sort_by(&elem(&1, 3)) do
def find_version(bin_len, unquote(ec_level), unquote(mode)) when bin_len <= unquote(cap),
do: {:ok, unquote(version)}
end
def find_version(_bin_len, _ec_level, _mode), do: {:error, :no_version_found}
@spec character_count_indicator_bits(version(), error_correction_level(), mode()) :: non_neg_integer()
def character_count_indicator_bits(version, ec_level, mode \\ :byte)
for {version, ec_level, mode, _, cci_len} <- @table do
def character_count_indicator_bits(unquote(version), unquote(ec_level), unquote(mode)), do: unquote(cci_len)
end
def character_count_indicator_bits(_version, _ec_level, _mode), do: 0
# {:version, :error_correction_level, :ec_codewords_per_block, :group1_block_len, :group1_codewords_per_block, :group2_block_len, :group2_codewords_per_block}
@error_correction_table [
{1, :l, 7, 1, 19, 0, 0},
{1, :m, 10, 1, 16, 0, 0},
{1, :q, 13, 1, 13, 0, 0},
{1, :h, 17, 1, 9, 0, 0},
{2, :l, 10, 1, 34, 0, 0},
{2, :m, 16, 1, 28, 0, 0},
{2, :q, 22, 1, 22, 0, 0},
{2, :h, 28, 1, 16, 0, 0},
{3, :l, 15, 1, 55, 0, 0},
{3, :m, 26, 1, 44, 0, 0},
{3, :q, 18, 2, 17, 0, 0},
{3, :h, 22, 2, 13, 0, 0},
{4, :l, 20, 1, 80, 0, 0},
{4, :m, 18, 2, 32, 0, 0},
{4, :q, 26, 2, 24, 0, 0},
{4, :h, 16, 4, 9, 0, 0},
{5, :l, 26, 1, 108, 0, 0},
{5, :m, 24, 2, 43, 0, 0},
{5, :q, 18, 2, 15, 2, 16},
{5, :h, 22, 2, 11, 2, 12},
{6, :l, 18, 2, 68, 0, 0},
{6, :m, 16, 4, 27, 0, 0},
{6, :q, 24, 4, 19, 0, 0},
{6, :h, 28, 4, 15, 0, 0},
{7, :l, 20, 2, 78, 0, 0},
{7, :m, 18, 4, 31, 0, 0},
{7, :q, 18, 2, 14, 4, 15},
{7, :h, 26, 4, 13, 1, 14},
{8, :l, 24, 2, 97, 0, 0},
{8, :m, 22, 2, 38, 2, 39},
{8, :q, 22, 4, 18, 2, 19},
{8, :h, 26, 4, 14, 2, 15},
{9, :l, 30, 2, 116, 0, 0},
{9, :m, 22, 3, 36, 2, 37},
{9, :q, 20, 4, 16, 4, 17},
{9, :h, 24, 4, 12, 4, 13},
{10, :l, 18, 2, 68, 2, 69},
{10, :m, 26, 4, 43, 1, 44},
{10, :q, 24, 6, 19, 2, 20},
{10, :h, 28, 6, 15, 2, 16},
{11, :l, 20, 4, 81, 0, 0},
{11, :m, 30, 1, 50, 4, 51},
{11, :q, 28, 4, 22, 4, 23},
{11, :h, 24, 3, 12, 8, 13},
{12, :l, 24, 2, 92, 2, 93},
{12, :m, 22, 6, 36, 2, 37},
{12, :q, 26, 4, 20, 6, 21},
{12, :h, 28, 7, 14, 4, 15},
{13, :l, 26, 4, 107, 0, 0},
{13, :m, 22, 8, 37, 1, 38},
{13, :q, 24, 8, 20, 4, 21},
{13, :h, 22, 12, 11, 4, 12},
{14, :l, 30, 3, 115, 1, 116},
{14, :m, 24, 4, 40, 5, 41},
{14, :q, 20, 11, 16, 5, 17},
{14, :h, 24, 11, 12, 5, 13},
{15, :l, 22, 5, 87, 1, 88},
{15, :m, 24, 5, 41, 5, 42},
{15, :q, 30, 5, 24, 7, 25},
{15, :h, 24, 11, 12, 7, 13},
{16, :l, 24, 5, 98, 1, 99},
{16, :m, 28, 7, 45, 3, 46},
{16, :q, 24, 15, 19, 2, 20},
{16, :h, 30, 3, 15, 13, 16},
{17, :l, 28, 1, 107, 5, 108},
{17, :m, 28, 10, 46, 1, 47},
{17, :q, 28, 1, 22, 15, 23},
{17, :h, 28, 2, 14, 17, 15},
{18, :l, 30, 5, 120, 1, 121},
{18, :m, 26, 9, 43, 4, 44},
{18, :q, 28, 17, 22, 1, 23},
{18, :h, 28, 2, 14, 19, 15},
{19, :l, 28, 3, 113, 4, 114},
{19, :m, 26, 3, 44, 11, 45},
{19, :q, 26, 17, 21, 4, 22},
{19, :h, 26, 9, 13, 16, 14},
{20, :l, 28, 3, 107, 5, 108},
{20, :m, 26, 3, 41, 13, 42},
{20, :q, 30, 15, 24, 5, 25},
{20, :h, 28, 15, 15, 10, 16},
{21, :l, 28, 4, 116, 4, 117},
{21, :m, 26, 17, 42, 0, 0},
{21, :q, 28, 17, 22, 6, 23},
{21, :h, 30, 19, 16, 6, 17},
{22, :l, 28, 2, 111, 7, 112},
{22, :m, 28, 17, 46, 0, 0},
{22, :q, 30, 7, 24, 16, 25},
{22, :h, 24, 34, 13, 0, 0},
{23, :l, 30, 4, 121, 5, 122},
{23, :m, 28, 4, 47, 14, 48},
{23, :q, 30, 11, 24, 14, 25},
{23, :h, 30, 16, 15, 14, 16},
{24, :l, 30, 6, 117, 4, 118},
{24, :m, 28, 6, 45, 14, 46},
{24, :q, 30, 11, 24, 16, 25},
{24, :h, 30, 30, 16, 2, 17},
{25, :l, 26, 8, 106, 4, 107},
{25, :m, 28, 8, 47, 13, 48},
{25, :q, 30, 7, 24, 22, 25},
{25, :h, 30, 22, 15, 13, 16},
{26, :l, 28, 10, 114, 2, 115},
{26, :m, 28, 19, 46, 4, 47},
{26, :q, 28, 28, 22, 6, 23},
{26, :h, 30, 33, 16, 4, 17},
{27, :l, 30, 8, 122, 4, 123},
{27, :m, 28, 22, 45, 3, 46},
{27, :q, 30, 8, 23, 26, 24},
{27, :h, 30, 12, 15, 28, 16},
{28, :l, 30, 3, 117, 10, 118},
{28, :m, 28, 3, 45, 23, 46},
{28, :q, 30, 4, 24, 31, 25},
{28, :h, 30, 11, 15, 31, 16},
{29, :l, 30, 7, 116, 7, 117},
{29, :m, 28, 21, 45, 7, 46},
{29, :q, 30, 1, 23, 37, 24},
{29, :h, 30, 19, 15, 26, 16},
{30, :l, 30, 5, 115, 10, 116},
{30, :m, 28, 19, 47, 10, 48},
{30, :q, 30, 15, 24, 25, 25},
{30, :h, 30, 23, 15, 25, 16},
{31, :l, 30, 13, 115, 3, 116},
{31, :m, 28, 2, 46, 29, 47},
{31, :q, 30, 42, 24, 1, 25},
{31, :h, 30, 23, 15, 28, 16},
{32, :l, 30, 17, 115, 0, 0},
{32, :m, 28, 10, 46, 23, 47},
{32, :q, 30, 10, 24, 35, 25},
{32, :h, 30, 19, 15, 35, 16},
{33, :l, 30, 17, 115, 1, 116},
{33, :m, 28, 14, 46, 21, 47},
{33, :q, 30, 29, 24, 19, 25},
{33, :h, 30, 11, 15, 46, 16},
{34, :l, 30, 13, 115, 6, 116},
{34, :m, 28, 14, 46, 23, 47},
{34, :q, 30, 44, 24, 7, 25},
{34, :h, 30, 59, 16, 1, 17},
{35, :l, 30, 12, 121, 7, 122},
{35, :m, 28, 12, 47, 26, 48},
{35, :q, 30, 39, 24, 14, 25},
{35, :h, 30, 22, 15, 41, 16},
{36, :l, 30, 6, 121, 14, 122},
{36, :m, 28, 6, 47, 34, 48},
{36, :q, 30, 46, 24, 10, 25},
{36, :h, 30, 2, 15, 64, 16},
{37, :l, 30, 17, 122, 4, 123},
{37, :m, 28, 29, 46, 14, 47},
{37, :q, 30, 49, 24, 10, 25},
{37, :h, 30, 24, 15, 46, 16},
{38, :l, 30, 4, 122, 18, 123},
{38, :m, 28, 13, 46, 32, 47},
{38, :q, 30, 48, 24, 14, 25},
{38, :h, 30, 42, 15, 32, 16},
{39, :l, 30, 20, 117, 4, 118},
{39, :m, 28, 40, 47, 7, 48},
{39, :q, 30, 43, 24, 22, 25},
{39, :h, 30, 10, 15, 67, 16},
{40, :l, 30, 19, 118, 6, 119},
{40, :m, 28, 18, 47, 31, 48},
{40, :q, 30, 34, 24, 34, 25},
{40, :h, 30, 20, 15, 61, 16}
]
@spec code_words_len(version(), error_correction_level()) :: non_neg_integer()
def code_words_len(version, error_correction_level)
for {version, error_correction_level, _, g1_blocks, g1_codewords, g2_blocks, g2_codewords} <- @error_correction_table do
def code_words_len(unquote(version), unquote(error_correction_level)), do: unquote(g1_blocks * g1_codewords + g2_blocks * g2_codewords)
end
def code_words_len(_version, _error_correction_level), do: 0
@spec ec_codewords_per_block(version(), error_correction_level()) :: non_neg_integer()
def ec_codewords_per_block(version, error_correction_level)
for {version, error_correction_level, ec_codewords_per_block, _, _, _, _} <- @error_correction_table do
def ec_codewords_per_block(unquote(version), unquote(error_correction_level)), do: unquote(ec_codewords_per_block)
end
def ec_codewords_per_block(_version, _error_correction_level), do: 0
@spec group1_block_len(version(), error_correction_level()) :: non_neg_integer()
def group1_block_len(version, error_correction_level)
for {version, error_correction_level, _, group1_block_len, _, _, _} <- @error_correction_table do
def group1_block_len(unquote(version), unquote(error_correction_level)), do: unquote(group1_block_len)
end
def group1_block_len(_version, _error_correction_level), do: 0
@spec group1_codewords_per_block(version(), error_correction_level()) :: non_neg_integer()
def group1_codewords_per_block(version, error_correction_level)
for {version, error_correction_level, _, _, group1_codewords_per_block, _, _} <- @error_correction_table do
def group1_codewords_per_block(unquote(version), unquote(error_correction_level)), do: unquote(group1_codewords_per_block)
end
def group1_codewords_per_block(_version, _error_correction_level), do: 0
@spec group2_block_len(version(), error_correction_level()) :: non_neg_integer()
def group2_block_len(version, error_correction_level)
for {version, error_correction_level, _, _, _, group2_block_len, _} <- @error_correction_table do
def group2_block_len(unquote(version), unquote(error_correction_level)), do: unquote(group2_block_len)
end
def group2_block_len(_version, _error_correction_level), do: 0
@spec group2_codewords_per_block(version(), error_correction_level()) :: non_neg_integer()
def group2_codewords_per_block(version, error_correction_level)
for {version, error_correction_level, _, _, _, _, group2_codewords_per_block} <- @error_correction_table do
def group2_codewords_per_block(unquote(version), unquote(error_correction_level)), do: unquote(group2_codewords_per_block)
end
def group2_codewords_per_block(_version, _error_correction_level), do: 0
# {:version, :remainer}
@remainer [
{1, 0},
{2, 7},
{3, 7},
{4, 7},
{5, 7},
{6, 7},
{7, 0},
{8, 0},
{9, 0},
{10, 0},
{11, 0},
{12, 0},
{13, 0},
{14, 3},
{15, 3},
{16, 3},
{17, 3},
{18, 3},
{19, 3},
{20, 3},
{21, 4},
{22, 4},
{23, 4},
{24, 4},
{25, 4},
{26, 4},
{27, 4},
{28, 3},
{29, 3},
{30, 3},
{31, 3},
{32, 3},
{33, 3},
{34, 3},
{35, 0},
{36, 0},
{37, 0},
{38, 0},
{39, 0},
{40, 0}
]
@spec remainer(any) :: 0..7
def remainer(_version)
for {version, remainer} <- @remainer do
def remainer(unquote(version)), do: unquote(remainer)
end
def remainer(_version), do: 0
end
|
lib/eqrcode/spec_table.ex
| 0.614394
| 0.656466
|
spec_table.ex
|
starcoder
|
defprotocol Contex.PlotContent do
@moduledoc """
Defines what a charting component needs to implement to be rendered within a `Contex.Plot`
"""
@doc """
Generates svg as a string or improper list of strings *without* the SVG containing element.
"""
def to_svg(plot, plot_options)
@doc """
Generates svg content for a legend appropriate for the plot content.
"""
def get_svg_legend(plot)
@doc """
Sets the size for the plot content. This is called after the main layout and margin calculations
are performed by the container plot.
"""
def set_size(plot, width, height)
end
defmodule Contex.Plot do
@moduledoc """
Manages the layout of various plot elements, including titles, axis labels, legends etc and calculates
appropriate margins depending on the options set.
"""
import Contex.SVG
alias __MODULE__
alias Contex.{Dataset, PlotContent}
defstruct [
:title,
:subtitle,
:x_label,
:y_label,
:height,
:width,
:plot_content,
:margins,
:plot_options,
default_style: true
]
@type t() :: %__MODULE__{}
@type plot_text() :: String.t() | nil
@type row() :: list() | tuple()
@default_plot_options [
show_x_axis: true,
show_y_axis: true,
legend_setting: :legend_none
]
@default_padding 10
@top_title_margin 20
@top_subtitle_margin 15
@y_axis_margin 20
@y_axis_tick_labels 70
@legend_width 100
@x_axis_margin 20
@x_axis_tick_labels 70
@default_style """
<style type="text/css"><![CDATA[
text {fill: black}
line {stroke: black}
]]></style>
"""
@doc """
Creates a new plot with specified dataset and plot type. Other plot attributes can be set via a
keyword list of options.
"""
@spec new(Contex.Dataset.t(), module(), integer(), integer(), keyword()) :: Contex.Plot.t()
def new(%Dataset{} = dataset, type, width, height, attrs \\ []) do
# TODO
# Seems like should just add new/3 to PlotContent protocol, but my efforts to do this failed.
plot_content = apply(type, :new, [dataset, attrs])
attributes =
Keyword.merge(@default_plot_options, attrs)
|> parse_attributes()
%Plot{
title: attributes.title,
subtitle: attributes.subtitle,
x_label: attributes.x_label,
y_label: attributes.y_label,
width: width,
height: height,
plot_content: plot_content,
plot_options: attributes.plot_options
}
|> calculate_margins()
end
@doc """
Creates a new plot with specified plot content.
"""
@spec new(integer(), integer(), Contex.PlotContent.t()) :: Contex.Plot.t()
def new(width, height, plot_content) do
plot_options = %{show_x_axis: true, show_y_axis: true, legend_setting: :legend_none}
%Plot{plot_content: plot_content, width: width, height: height, plot_options: plot_options}
|> calculate_margins()
end
@doc """
Replaces the plot dataset and updates the plot content. Accepts list of lists/tuples
representing the new data and a list of strings with new headers.
"""
@spec dataset(Contex.Plot.t(), list(row()), list(String.t())) :: Contex.Plot.t()
def dataset(%Plot{} = plot, data, headers) do
dataset = Dataset.new(data, headers)
plot_content = apply(plot.plot_content.__struct__, :new, [dataset])
%{plot | plot_content: plot_content}
end
@doc """
Replaces the plot dataset and updates the plot content. Accepts a dataset or a list of lists/tuples
representing the new data. The plot's dataset's original headers are preserved.
"""
@spec dataset(Contex.Plot.t(), Contex.Dataset.t() | list(row())) :: Contex.Plot.t()
def dataset(%Plot{} = plot, %Dataset{} = dataset) do
plot_content = apply(plot.plot_content.__struct__, :new, [dataset])
%{plot | plot_content: plot_content}
end
def dataset(%Plot{} = plot, data) do
dataset =
case plot.plot_content.dataset.headers do
nil ->
Dataset.new(data)
headers ->
Dataset.new(data, headers)
end
plot_content = apply(plot.plot_content.__struct__, :new, [dataset])
%{plot | plot_content: plot_content}
end
@doc """
Updates attributes for the plot. Takes a keyword list of attributes, which can include both "plot options"
items passed individually as well as `:title`, `:subtitle`, `:x_label` and `:y_label`.
"""
@spec attributes(Contex.Plot.t(), keyword()) :: Contex.Plot.t()
def attributes(%Plot{} = plot, attrs) do
attributes_map = Enum.into(attrs, %{})
plot_options =
Map.merge(
plot.plot_options,
Map.take(attributes_map, [:show_x_axis, :show_y_axis, :legend_setting])
)
plot
|> Map.merge(
Map.take(attributes_map, [:title, :subtitle, :x_label, :y_label, :width, :height])
)
|> Map.put(:plot_options, plot_options)
|> calculate_margins()
end
@doc """
Updates plot options for the plot.
"""
def plot_options(%Plot{} = plot, new_plot_options) do
existing_plot_options = plot.plot_options
%{plot | plot_options: Map.merge(existing_plot_options, new_plot_options)}
|> calculate_margins()
end
@doc """
Sets the title and sub-title for the plot. Empty string or nil will remove the
title or sub-title
"""
@spec titles(Contex.Plot.t(), plot_text(), plot_text()) :: Contex.Plot.t()
def titles(%Plot{} = plot, title, subtitle) do
Plot.attributes(plot, title: title, subtitle: subtitle)
end
@doc """
Sets the x-axis & y-axis labels for the plot. Empty string or nil will remove them.
"""
@spec axis_labels(Contex.Plot.t(), plot_text(), plot_text()) :: Contex.Plot.t()
def axis_labels(%Plot{} = plot, x_label, y_label) do
Plot.attributes(plot, x_label: x_label, y_label: y_label)
end
@doc """
Updates the size for the plot
"""
@spec size(Contex.Plot.t(), integer(), integer()) :: Contex.Plot.t()
def size(%Plot{} = plot, width, height) do
Plot.attributes(plot, width: width, height: height)
end
@doc """
Generates SVG output marked as safe for the configured plot.
"""
def to_svg(%Plot{width: width, height: height, plot_content: plot_content} = plot) do
%{left: left, right: right, top: top, bottom: bottom} = plot.margins
content_height = height - (top + bottom)
content_width = width - (left + right)
# TODO: Legend calcs need to be redone if it can be displayed at the top
legend_left = left + content_width + @default_padding
legend_top = top + @default_padding
plot_content = PlotContent.set_size(plot_content, content_width, content_height)
output = [
~s|<svg version="1.1" xmlns="http://www.w3.org/2000/svg\" |,
~s|xmlns:xlink="http://www.w3.org/1999/xlink" class="chart" |,
~s|viewBox="0 0 #{width} #{height}" role="img">|,
get_default_style(plot),
get_titles_svg(plot, content_width),
get_axis_labels_svg(plot, content_width, content_height),
~s|<g transform="translate(#{left},#{top})">|,
PlotContent.to_svg(plot_content, plot.plot_options),
"</g>",
get_svg_legend(plot_content, legend_left, legend_top, plot.plot_options),
"</svg>"
]
{:safe, output}
end
@doc """
Generates a complete XML document string.
"""
@spec to_xml(Contex.Plot.t()) :: iolist()
def to_xml(%Plot{} = plot) do
plot
|> Plot.to_svg()
|> elem(1)
|> List.insert_at(0, ~s|<?xml version="1.0" encoding="utf-8"?>|)
end
defp get_default_style(%Plot{} = plot) do
if plot.default_style, do: @default_style, else: ""
end
defp get_svg_legend(plot_content, legend_left, legend_top, %{legend_setting: :legend_right}) do
[
~s|<g transform="translate(#{legend_left}, #{legend_top})">|,
PlotContent.get_svg_legend(plot_content),
"</g>"
]
end
defp get_svg_legend(_plot_content, _legend_left, _legend_top, _opts), do: ""
defp get_titles_svg(
%Plot{title: title, subtitle: subtitle, margins: margins} = _plot,
content_width
)
when is_binary(title) or is_binary(subtitle) do
centre = margins.left + content_width / 2.0
title_y = @top_title_margin
title_svg =
case is_non_empty_string(title) do
true ->
text(centre, title_y, title, class: "exc-title", text_anchor: "middle")
_ ->
""
end
subtitle_y =
case is_non_empty_string(title) do
true -> @top_subtitle_margin + @top_title_margin
_ -> @top_subtitle_margin
end
subtitle_svg =
case is_non_empty_string(subtitle) do
true ->
text(centre, subtitle_y, subtitle, class: "exc-subtitle", text_anchor: "middle")
_ ->
""
end
[title_svg, subtitle_svg]
end
defp get_titles_svg(_, _), do: ""
defp get_axis_labels_svg(
%Plot{x_label: x_label, y_label: y_label, margins: margins} = _plot,
content_width,
content_height
)
when is_binary(x_label) or is_binary(y_label) do
x_label_x = margins.left + content_width / 2.0
x_label_y = margins.top + content_height + @x_axis_tick_labels
# -90 rotation screws with coordinates
y_label_x = -1.0 * (margins.top + content_height / 2.0)
y_label_y = @y_axis_margin
x_label_svg =
case is_non_empty_string(x_label) do
true ->
text(x_label_x, x_label_y, x_label, class: "exc-subtitle", text_anchor: "middle")
_ ->
""
end
y_label_svg =
case is_non_empty_string(y_label) do
true ->
text(y_label_x, y_label_y, y_label,
class: "exc-subtitle",
text_anchor: "middle",
transform: "rotate(-90)"
)
false ->
""
end
[x_label_svg, y_label_svg]
end
defp get_axis_labels_svg(_, _, _), do: ""
defp parse_attributes(attrs) do
%{
title: Keyword.get(attrs, :title),
subtitle: Keyword.get(attrs, :subtitle),
x_label: Keyword.get(attrs, :x_label),
y_label: Keyword.get(attrs, :y_label),
plot_options:
Enum.into(Keyword.take(attrs, [:show_x_axis, :show_y_axis, :legend_setting]), %{})
}
end
defp calculate_margins(%Plot{} = plot) do
left = Map.get(plot.plot_options, :left_margin, calculate_left_margin(plot))
top = Map.get(plot.plot_options, :top_margin, calculate_top_margin(plot))
right = Map.get(plot.plot_options, :right_margin, calculate_right_margin(plot))
bottom = Map.get(plot.plot_options, :bottom_margin, calculate_bottom_margin(plot))
margins = %{left: left, top: top, right: right, bottom: bottom}
%{plot | margins: margins}
end
defp calculate_left_margin(%Plot{} = plot) do
margin = 0
margin = margin + if plot.plot_options.show_y_axis, do: @y_axis_tick_labels, else: 0
margin = margin + if is_non_empty_string(plot.y_label), do: @y_axis_margin, else: 0
margin
end
defp calculate_right_margin(%Plot{} = plot) do
margin = @default_padding
margin =
margin + if plot.plot_options.legend_setting == :legend_right, do: @legend_width, else: 0
margin
end
defp calculate_bottom_margin(%Plot{} = plot) do
margin = 0
margin = margin + if plot.plot_options.show_x_axis, do: @x_axis_tick_labels, else: 0
margin = margin + if is_non_empty_string(plot.x_label), do: @x_axis_margin, else: 0
margin
end
defp calculate_top_margin(%Plot{} = plot) do
margin = @default_padding
margin =
margin +
if is_non_empty_string(plot.title), do: @top_title_margin + @default_padding, else: 0
margin = margin + if is_non_empty_string(plot.subtitle), do: @top_subtitle_margin, else: 0
margin
end
defp is_non_empty_string(val) when is_nil(val), do: false
defp is_non_empty_string(val) when val == "", do: false
defp is_non_empty_string(val) when is_binary(val), do: true
defp is_non_empty_string(_), do: false
end
# TODO: Probably move to appropriate module files...
defimpl Contex.PlotContent, for: Contex.BarChart do
def to_svg(plot, options), do: Contex.BarChart.to_svg(plot, options)
def get_svg_legend(plot), do: Contex.BarChart.get_svg_legend(plot)
def set_size(plot, width, height), do: Contex.BarChart.set_size(plot, width, height)
end
defimpl Contex.PlotContent, for: Contex.PointPlot do
def to_svg(plot, _options), do: Contex.PointPlot.to_svg(plot)
def get_svg_legend(plot), do: Contex.PointPlot.get_svg_legend(plot)
def set_size(plot, width, height), do: Contex.PointPlot.set_size(plot, width, height)
end
defimpl Contex.PlotContent, for: Contex.LinePlot do
def to_svg(plot, _options), do: Contex.LinePlot.to_svg(plot)
def get_svg_legend(plot), do: Contex.LinePlot.get_svg_legend(plot)
def set_size(plot, width, height), do: Contex.LinePlot.set_size(plot, width, height)
end
defimpl Contex.PlotContent, for: Contex.GanttChart do
def to_svg(plot, options), do: Contex.GanttChart.to_svg(plot, options)
# Contex.PointPlot.get_legend_svg(plot)
def get_svg_legend(_plot), do: ""
def set_size(plot, width, height), do: Contex.GanttChart.set_size(plot, width, height)
end
defimpl Contex.PlotContent, for: Contex.PieChart do
def to_svg(plot, _options), do: Contex.PieChart.to_svg(plot)
def get_svg_legend(plot), do: Contex.PieChart.get_svg_legend(plot)
def set_size(plot, width, height), do: Contex.PieChart.set_size(plot, width, height)
end
|
lib/chart/plot.ex
| 0.872998
| 0.733535
|
plot.ex
|
starcoder
|
defmodule AWS.ServiceCatalog do
@moduledoc """
AWS Service Catalog
[AWS Service Catalog](https://aws.amazon.com/servicecatalog/) enables organizations to create and manage catalogs of IT services that are approved for
use on AWS.
To get the most out of this documentation, you should be familiar with the
terminology discussed in [AWS Service Catalog
Concepts](http://docs.aws.amazon.com/servicecatalog/latest/adminguide/what-is_concepts.html).
"""
@doc """
Accepts an offer to share the specified portfolio.
"""
def accept_portfolio_share(client, input, options \\ []) do
request(client, "AcceptPortfolioShare", input, options)
end
@doc """
Associates the specified budget with the specified resource.
"""
def associate_budget_with_resource(client, input, options \\ []) do
request(client, "AssociateBudgetWithResource", input, options)
end
@doc """
Associates the specified principal ARN with the specified portfolio.
"""
def associate_principal_with_portfolio(client, input, options \\ []) do
request(client, "AssociatePrincipalWithPortfolio", input, options)
end
@doc """
Associates the specified product with the specified portfolio.
A delegated admin is authorized to invoke this command.
"""
def associate_product_with_portfolio(client, input, options \\ []) do
request(client, "AssociateProductWithPortfolio", input, options)
end
@doc """
Associates a self-service action with a provisioning artifact.
"""
def associate_service_action_with_provisioning_artifact(client, input, options \\ []) do
request(client, "AssociateServiceActionWithProvisioningArtifact", input, options)
end
@doc """
Associate the specified TagOption with the specified portfolio or product.
"""
def associate_tag_option_with_resource(client, input, options \\ []) do
request(client, "AssociateTagOptionWithResource", input, options)
end
@doc """
Associates multiple self-service actions with provisioning artifacts.
"""
def batch_associate_service_action_with_provisioning_artifact(client, input, options \\ []) do
request(client, "BatchAssociateServiceActionWithProvisioningArtifact", input, options)
end
@doc """
Disassociates a batch of self-service actions from the specified provisioning
artifact.
"""
def batch_disassociate_service_action_from_provisioning_artifact(client, input, options \\ []) do
request(client, "BatchDisassociateServiceActionFromProvisioningArtifact", input, options)
end
@doc """
Copies the specified source product to the specified target product or a new
product.
You can copy a product to the same account or another account. You can copy a
product to the same region or another region.
This operation is performed asynchronously. To track the progress of the
operation, use `DescribeCopyProductStatus`.
"""
def copy_product(client, input, options \\ []) do
request(client, "CopyProduct", input, options)
end
@doc """
Creates a constraint.
A delegated admin is authorized to invoke this command.
"""
def create_constraint(client, input, options \\ []) do
request(client, "CreateConstraint", input, options)
end
@doc """
Creates a portfolio.
A delegated admin is authorized to invoke this command.
"""
def create_portfolio(client, input, options \\ []) do
request(client, "CreatePortfolio", input, options)
end
@doc """
Shares the specified portfolio with the specified account or organization node.
Shares to an organization node can only be created by the management account of
an organization or by a delegated administrator. You can share portfolios to an
organization, an organizational unit, or a specific account.
Note that if a delegated admin is de-registered, they can no longer create
portfolio shares.
`AWSOrganizationsAccess` must be enabled in order to create a portfolio share to
an organization node.
You can't share a shared resource. This includes portfolios that contain a
shared product.
"""
def create_portfolio_share(client, input, options \\ []) do
request(client, "CreatePortfolioShare", input, options)
end
@doc """
Creates a product.
A delegated admin is authorized to invoke this command.
"""
def create_product(client, input, options \\ []) do
request(client, "CreateProduct", input, options)
end
@doc """
Creates a plan.
A plan includes the list of resources to be created (when provisioning a new
product) or modified (when updating a provisioned product) when the plan is
executed.
You can create one plan per provisioned product. To create a plan for an
existing provisioned product, the product status must be AVAILBLE or TAINTED.
To view the resource changes in the change set, use
`DescribeProvisionedProductPlan`. To create or modify the provisioned product,
use `ExecuteProvisionedProductPlan`.
"""
def create_provisioned_product_plan(client, input, options \\ []) do
request(client, "CreateProvisionedProductPlan", input, options)
end
@doc """
Creates a provisioning artifact (also known as a version) for the specified
product.
You cannot create a provisioning artifact for a product that was shared with
you.
"""
def create_provisioning_artifact(client, input, options \\ []) do
request(client, "CreateProvisioningArtifact", input, options)
end
@doc """
Creates a self-service action.
"""
def create_service_action(client, input, options \\ []) do
request(client, "CreateServiceAction", input, options)
end
@doc """
Creates a TagOption.
"""
def create_tag_option(client, input, options \\ []) do
request(client, "CreateTagOption", input, options)
end
@doc """
Deletes the specified constraint.
A delegated admin is authorized to invoke this command.
"""
def delete_constraint(client, input, options \\ []) do
request(client, "DeleteConstraint", input, options)
end
@doc """
Deletes the specified portfolio.
You cannot delete a portfolio if it was shared with you or if it has associated
products, users, constraints, or shared accounts.
A delegated admin is authorized to invoke this command.
"""
def delete_portfolio(client, input, options \\ []) do
request(client, "DeletePortfolio", input, options)
end
@doc """
Stops sharing the specified portfolio with the specified account or organization
node.
Shares to an organization node can only be deleted by the management account of
an organization or by a delegated administrator.
Note that if a delegated admin is de-registered, portfolio shares created from
that account are removed.
"""
def delete_portfolio_share(client, input, options \\ []) do
request(client, "DeletePortfolioShare", input, options)
end
@doc """
Deletes the specified product.
You cannot delete a product if it was shared with you or is associated with a
portfolio.
A delegated admin is authorized to invoke this command.
"""
def delete_product(client, input, options \\ []) do
request(client, "DeleteProduct", input, options)
end
@doc """
Deletes the specified plan.
"""
def delete_provisioned_product_plan(client, input, options \\ []) do
request(client, "DeleteProvisionedProductPlan", input, options)
end
@doc """
Deletes the specified provisioning artifact (also known as a version) for the
specified product.
You cannot delete a provisioning artifact associated with a product that was
shared with you. You cannot delete the last provisioning artifact for a product,
because a product must have at least one provisioning artifact.
"""
def delete_provisioning_artifact(client, input, options \\ []) do
request(client, "DeleteProvisioningArtifact", input, options)
end
@doc """
Deletes a self-service action.
"""
def delete_service_action(client, input, options \\ []) do
request(client, "DeleteServiceAction", input, options)
end
@doc """
Deletes the specified TagOption.
You cannot delete a TagOption if it is associated with a product or portfolio.
"""
def delete_tag_option(client, input, options \\ []) do
request(client, "DeleteTagOption", input, options)
end
@doc """
Gets information about the specified constraint.
"""
def describe_constraint(client, input, options \\ []) do
request(client, "DescribeConstraint", input, options)
end
@doc """
Gets the status of the specified copy product operation.
"""
def describe_copy_product_status(client, input, options \\ []) do
request(client, "DescribeCopyProductStatus", input, options)
end
@doc """
Gets information about the specified portfolio.
A delegated admin is authorized to invoke this command.
"""
def describe_portfolio(client, input, options \\ []) do
request(client, "DescribePortfolio", input, options)
end
@doc """
Gets the status of the specified portfolio share operation.
This API can only be called by the management account in the organization or by
a delegated admin.
"""
def describe_portfolio_share_status(client, input, options \\ []) do
request(client, "DescribePortfolioShareStatus", input, options)
end
@doc """
Gets information about the specified product.
"""
def describe_product(client, input, options \\ []) do
request(client, "DescribeProduct", input, options)
end
@doc """
Gets information about the specified product.
This operation is run with administrator access.
"""
def describe_product_as_admin(client, input, options \\ []) do
request(client, "DescribeProductAsAdmin", input, options)
end
@doc """
Gets information about the specified product.
"""
def describe_product_view(client, input, options \\ []) do
request(client, "DescribeProductView", input, options)
end
@doc """
Gets information about the specified provisioned product.
"""
def describe_provisioned_product(client, input, options \\ []) do
request(client, "DescribeProvisionedProduct", input, options)
end
@doc """
Gets information about the resource changes for the specified plan.
"""
def describe_provisioned_product_plan(client, input, options \\ []) do
request(client, "DescribeProvisionedProductPlan", input, options)
end
@doc """
Gets information about the specified provisioning artifact (also known as a
version) for the specified product.
"""
def describe_provisioning_artifact(client, input, options \\ []) do
request(client, "DescribeProvisioningArtifact", input, options)
end
@doc """
Gets information about the configuration required to provision the specified
product using the specified provisioning artifact.
If the output contains a TagOption key with an empty list of values, there is a
TagOption conflict for that key. The end user cannot take action to fix the
conflict, and launch is not blocked. In subsequent calls to `ProvisionProduct`,
do not include conflicted TagOption keys as tags, or this causes the error
"Parameter validation failed: Missing required parameter in Tags[*N*]:*Value*".
Tag the provisioned product with the value
`sc-tagoption-conflict-portfolioId-productId`.
"""
def describe_provisioning_parameters(client, input, options \\ []) do
request(client, "DescribeProvisioningParameters", input, options)
end
@doc """
Gets information about the specified request operation.
Use this operation after calling a request operation (for example,
`ProvisionProduct`, `TerminateProvisionedProduct`, or
`UpdateProvisionedProduct`).
If a provisioned product was transferred to a new owner using
`UpdateProvisionedProductProperties`, the new owner will be able to describe all
past records for that product. The previous owner will no longer be able to
describe the records, but will be able to use `ListRecordHistory` to see the
product's history from when he was the owner.
"""
def describe_record(client, input, options \\ []) do
request(client, "DescribeRecord", input, options)
end
@doc """
Describes a self-service action.
"""
def describe_service_action(client, input, options \\ []) do
request(client, "DescribeServiceAction", input, options)
end
@doc """
Finds the default parameters for a specific self-service action on a specific
provisioned product and returns a map of the results to the user.
"""
def describe_service_action_execution_parameters(client, input, options \\ []) do
request(client, "DescribeServiceActionExecutionParameters", input, options)
end
@doc """
Gets information about the specified TagOption.
"""
def describe_tag_option(client, input, options \\ []) do
request(client, "DescribeTagOption", input, options)
end
@doc """
Disable portfolio sharing through AWS Organizations feature.
This feature will not delete your current shares but it will prevent you from
creating new shares throughout your organization. Current shares will not be in
sync with your organization structure if it changes after calling this API. This
API can only be called by the management account in the organization.
This API can't be invoked if there are active delegated administrators in the
organization.
Note that a delegated administrator is not authorized to invoke
`DisableAWSOrganizationsAccess`.
"""
def disable_a_w_s_organizations_access(client, input, options \\ []) do
request(client, "DisableAWSOrganizationsAccess", input, options)
end
@doc """
Disassociates the specified budget from the specified resource.
"""
def disassociate_budget_from_resource(client, input, options \\ []) do
request(client, "DisassociateBudgetFromResource", input, options)
end
@doc """
Disassociates a previously associated principal ARN from a specified portfolio.
"""
def disassociate_principal_from_portfolio(client, input, options \\ []) do
request(client, "DisassociatePrincipalFromPortfolio", input, options)
end
@doc """
Disassociates the specified product from the specified portfolio.
A delegated admin is authorized to invoke this command.
"""
def disassociate_product_from_portfolio(client, input, options \\ []) do
request(client, "DisassociateProductFromPortfolio", input, options)
end
@doc """
Disassociates the specified self-service action association from the specified
provisioning artifact.
"""
def disassociate_service_action_from_provisioning_artifact(client, input, options \\ []) do
request(client, "DisassociateServiceActionFromProvisioningArtifact", input, options)
end
@doc """
Disassociates the specified TagOption from the specified resource.
"""
def disassociate_tag_option_from_resource(client, input, options \\ []) do
request(client, "DisassociateTagOptionFromResource", input, options)
end
@doc """
Enable portfolio sharing feature through AWS Organizations.
This API will allow Service Catalog to receive updates on your organization in
order to sync your shares with the current structure. This API can only be
called by the management account in the organization.
By calling this API Service Catalog will make a call to
organizations:EnableAWSServiceAccess on your behalf so that your shares can be
in sync with any changes in your AWS Organizations structure.
Note that a delegated administrator is not authorized to invoke
`EnableAWSOrganizationsAccess`.
"""
def enable_a_w_s_organizations_access(client, input, options \\ []) do
request(client, "EnableAWSOrganizationsAccess", input, options)
end
@doc """
Provisions or modifies a product based on the resource changes for the specified
plan.
"""
def execute_provisioned_product_plan(client, input, options \\ []) do
request(client, "ExecuteProvisionedProductPlan", input, options)
end
@doc """
Executes a self-service action against a provisioned product.
"""
def execute_provisioned_product_service_action(client, input, options \\ []) do
request(client, "ExecuteProvisionedProductServiceAction", input, options)
end
@doc """
Get the Access Status for AWS Organization portfolio share feature.
This API can only be called by the management account in the organization or by
a delegated admin.
"""
def get_a_w_s_organizations_access_status(client, input, options \\ []) do
request(client, "GetAWSOrganizationsAccessStatus", input, options)
end
@doc """
This API takes either a `ProvisonedProductId` or a `ProvisionedProductName`,
along with a list of one or more output keys, and responds with the key/value
pairs of those outputs.
"""
def get_provisioned_product_outputs(client, input, options \\ []) do
request(client, "GetProvisionedProductOutputs", input, options)
end
@doc """
Lists all portfolios for which sharing was accepted by this account.
"""
def list_accepted_portfolio_shares(client, input, options \\ []) do
request(client, "ListAcceptedPortfolioShares", input, options)
end
@doc """
Lists all the budgets associated to the specified resource.
"""
def list_budgets_for_resource(client, input, options \\ []) do
request(client, "ListBudgetsForResource", input, options)
end
@doc """
Lists the constraints for the specified portfolio and product.
"""
def list_constraints_for_portfolio(client, input, options \\ []) do
request(client, "ListConstraintsForPortfolio", input, options)
end
@doc """
Lists the paths to the specified product.
A path is how the user has access to a specified product, and is necessary when
provisioning a product. A path also determines the constraints put on the
product.
"""
def list_launch_paths(client, input, options \\ []) do
request(client, "ListLaunchPaths", input, options)
end
@doc """
Lists the organization nodes that have access to the specified portfolio.
This API can only be called by the management account in the organization or by
a delegated admin.
If a delegated admin is de-registered, they can no longer perform this
operation.
"""
def list_organization_portfolio_access(client, input, options \\ []) do
request(client, "ListOrganizationPortfolioAccess", input, options)
end
@doc """
Lists the account IDs that have access to the specified portfolio.
A delegated admin can list the accounts that have access to the shared
portfolio. Note that if a delegated admin is de-registered, they can no longer
perform this operation.
"""
def list_portfolio_access(client, input, options \\ []) do
request(client, "ListPortfolioAccess", input, options)
end
@doc """
Lists all portfolios in the catalog.
"""
def list_portfolios(client, input, options \\ []) do
request(client, "ListPortfolios", input, options)
end
@doc """
Lists all portfolios that the specified product is associated with.
"""
def list_portfolios_for_product(client, input, options \\ []) do
request(client, "ListPortfoliosForProduct", input, options)
end
@doc """
Lists all principal ARNs associated with the specified portfolio.
"""
def list_principals_for_portfolio(client, input, options \\ []) do
request(client, "ListPrincipalsForPortfolio", input, options)
end
@doc """
Lists the plans for the specified provisioned product or all plans to which the
user has access.
"""
def list_provisioned_product_plans(client, input, options \\ []) do
request(client, "ListProvisionedProductPlans", input, options)
end
@doc """
Lists all provisioning artifacts (also known as versions) for the specified
product.
"""
def list_provisioning_artifacts(client, input, options \\ []) do
request(client, "ListProvisioningArtifacts", input, options)
end
@doc """
Lists all provisioning artifacts (also known as versions) for the specified
self-service action.
"""
def list_provisioning_artifacts_for_service_action(client, input, options \\ []) do
request(client, "ListProvisioningArtifactsForServiceAction", input, options)
end
@doc """
Lists the specified requests or all performed requests.
"""
def list_record_history(client, input, options \\ []) do
request(client, "ListRecordHistory", input, options)
end
@doc """
Lists the resources associated with the specified TagOption.
"""
def list_resources_for_tag_option(client, input, options \\ []) do
request(client, "ListResourcesForTagOption", input, options)
end
@doc """
Lists all self-service actions.
"""
def list_service_actions(client, input, options \\ []) do
request(client, "ListServiceActions", input, options)
end
@doc """
Returns a paginated list of self-service actions associated with the specified
Product ID and Provisioning Artifact ID.
"""
def list_service_actions_for_provisioning_artifact(client, input, options \\ []) do
request(client, "ListServiceActionsForProvisioningArtifact", input, options)
end
@doc """
Returns summary information about stack instances that are associated with the
specified `CFN_STACKSET` type provisioned product.
You can filter for stack instances that are associated with a specific AWS
account name or region.
"""
def list_stack_instances_for_provisioned_product(client, input, options \\ []) do
request(client, "ListStackInstancesForProvisionedProduct", input, options)
end
@doc """
Lists the specified TagOptions or all TagOptions.
"""
def list_tag_options(client, input, options \\ []) do
request(client, "ListTagOptions", input, options)
end
@doc """
Provisions the specified product.
A provisioned product is a resourced instance of a product. For example,
provisioning a product based on a CloudFormation template launches a
CloudFormation stack and its underlying resources. You can check the status of
this request using `DescribeRecord`.
If the request contains a tag key with an empty list of values, there is a tag
conflict for that key. Do not include conflicted keys as tags, or this causes
the error "Parameter validation failed: Missing required parameter in
Tags[*N*]:*Value*".
"""
def provision_product(client, input, options \\ []) do
request(client, "ProvisionProduct", input, options)
end
@doc """
Rejects an offer to share the specified portfolio.
"""
def reject_portfolio_share(client, input, options \\ []) do
request(client, "RejectPortfolioShare", input, options)
end
@doc """
Lists the provisioned products that are available (not terminated).
To use additional filtering, see `SearchProvisionedProducts`.
"""
def scan_provisioned_products(client, input, options \\ []) do
request(client, "ScanProvisionedProducts", input, options)
end
@doc """
Gets information about the products to which the caller has access.
"""
def search_products(client, input, options \\ []) do
request(client, "SearchProducts", input, options)
end
@doc """
Gets information about the products for the specified portfolio or all products.
"""
def search_products_as_admin(client, input, options \\ []) do
request(client, "SearchProductsAsAdmin", input, options)
end
@doc """
Gets information about the provisioned products that meet the specified
criteria.
"""
def search_provisioned_products(client, input, options \\ []) do
request(client, "SearchProvisionedProducts", input, options)
end
@doc """
Terminates the specified provisioned product.
This operation does not delete any records associated with the provisioned
product.
You can check the status of this request using `DescribeRecord`.
"""
def terminate_provisioned_product(client, input, options \\ []) do
request(client, "TerminateProvisionedProduct", input, options)
end
@doc """
Updates the specified constraint.
"""
def update_constraint(client, input, options \\ []) do
request(client, "UpdateConstraint", input, options)
end
@doc """
Updates the specified portfolio.
You cannot update a product that was shared with you.
"""
def update_portfolio(client, input, options \\ []) do
request(client, "UpdatePortfolio", input, options)
end
@doc """
Updates the specified product.
"""
def update_product(client, input, options \\ []) do
request(client, "UpdateProduct", input, options)
end
@doc """
Requests updates to the configuration of the specified provisioned product.
If there are tags associated with the object, they cannot be updated or added.
Depending on the specific updates requested, this operation can update with no
interruption, with some interruption, or replace the provisioned product
entirely.
You can check the status of this request using `DescribeRecord`.
"""
def update_provisioned_product(client, input, options \\ []) do
request(client, "UpdateProvisionedProduct", input, options)
end
@doc """
Requests updates to the properties of the specified provisioned product.
"""
def update_provisioned_product_properties(client, input, options \\ []) do
request(client, "UpdateProvisionedProductProperties", input, options)
end
@doc """
Updates the specified provisioning artifact (also known as a version) for the
specified product.
You cannot update a provisioning artifact for a product that was shared with
you.
"""
def update_provisioning_artifact(client, input, options \\ []) do
request(client, "UpdateProvisioningArtifact", input, options)
end
@doc """
Updates a self-service action.
"""
def update_service_action(client, input, options \\ []) do
request(client, "UpdateServiceAction", input, options)
end
@doc """
Updates the specified TagOption.
"""
def update_tag_option(client, input, options \\ []) do
request(client, "UpdateTagOption", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "servicecatalog"}
host = build_host("servicecatalog", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWS242ServiceCatalogService.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/service_catalog.ex
| 0.876727
| 0.474936
|
service_catalog.ex
|
starcoder
|
defmodule Conduit.Message do
@moduledoc """
The Conduit message.
This module defines a `Conduit.Message` struct and the main functions
for working with Conduit messages.
Note this struct is used for sending and receiving messages from a
message queue.
## Public fields
These fields are for you to use in your application. The values in
`user_id`, `correlation_id`, `message_id`, `content_type`,
`content_encoding`, `created_by`, `created_at`, `headers`, and
`status` may have special meaning based on the adapter you use.
See your adapters documention to understand how to use them correctly.
* `source` - For incoming messages, this will be set to the queue the message was
consumed from.
* `destination` - For outgoing messages, this will be set to the destination queue (or
routing key) it is published to.
* `user_id` - An ID representing which user the message pertains to.
* `correlation_id` - An ID for a chain of messages, where the current message is one in
that chain.
* `message_id` - A unique ID for this message.
* `content_type` - The media type of the message body.
* `content_encoding` - The encoding of the message body.
* `created_by` - The name of the app that created the message.
* `created_at` - A timestamp or epoch representing when the message was created.
* `headers` - Information applicable to a specific message stored as a keyword list.
* `body` - The contents of the message.
* `status` - The operation to perform on the message. This only applies to messages
that are being received.
## Private fields
These fields are reserved for library/framework usage.
* `private` - shared library data as a map
"""
@type source :: binary | fun | nil
@type destination :: binary | fun | nil
@type user_id :: binary | integer | fun | nil
@type correlation_id :: binary | integer | fun | nil
@type message_id :: binary | integer | fun | nil
@type content_type :: String.t() | fun | nil
@type content_encoding :: String.t() | fun | nil
@type created_by :: binary | fun | nil
@type created_at :: String.t() | integer | fun | nil
@type headers :: %{String.t() => any}
@type body :: any
@type status :: :ack | :nack
@type assigns :: %{atom => any}
@type private :: %{atom => any}
@type t :: %__MODULE__{
source: source,
destination: destination,
user_id: user_id,
correlation_id: correlation_id,
message_id: message_id,
content_type: content_type,
content_encoding: content_encoding,
created_by: created_by,
created_at: created_at,
headers: headers,
body: body,
status: status,
assigns: assigns,
private: private
}
defstruct source: nil,
destination: nil,
user_id: nil,
correlation_id: nil,
message_id: nil,
content_type: nil,
content_encoding: nil,
created_by: nil,
created_at: nil,
headers: %{},
body: nil,
status: :ack,
assigns: %{},
private: %{}
@doc """
Creates a new message with the fields and headers specified.
## Examples
iex> import Conduit.Message
iex> old_message =
iex> %Conduit.Message{}
iex> |> put_correlation_id("123")
iex> |> put_header("retries", 1)
iex> new_message = Conduit.Message.take(old_message,
iex> headers: ["retries"], fields: [:correlation_id])
iex> new_message.correlation_id
"123"
iex> get_header(new_message, "retries")
1
"""
@spec take(from :: __MODULE__.t(), opts :: [fields: [atom], headers: [String.t()]]) :: __MODULE__.t()
def take(from, opts) do
%__MODULE__{}
|> merge_fields(from, Keyword.get(opts, :fields, []))
|> merge_headers(from, Keyword.get(opts, :headers, []))
end
@allowed_fields [
:source,
:destination,
:user_id,
:correlation_id,
:message_id,
:content_type,
:content_encoding,
:created_by,
:created_at,
:status
]
@doc """
Merges fields to one message from another.
## Examples
iex> import Conduit.Message
iex> old_message = put_correlation_id(%Conduit.Message{}, "123")
iex> new_message = Conduit.Message.merge_fields(%Conduit.Message{}, old_message)
iex> new_message.correlation_id
"123"
iex> new_message = Conduit.Message.merge_fields(%Conduit.Message{}, old_message, [:correlation_id])
iex> new_message.correlation_id
"123"
"""
@spec merge_fields(to :: __MODULE__.t(), from :: __MODULE__.t(), fields :: [atom]) :: __MODULE__.t()
def merge_fields(%__MODULE__{} = to, %__MODULE__{} = from, fields \\ @allowed_fields) do
fields =
@allowed_fields
|> MapSet.new()
|> MapSet.intersection(MapSet.new(fields))
|> MapSet.to_list()
Map.merge(to, Map.take(from, fields))
end
@doc """
Merges headers to one message from another.
## Examples
iex> import Conduit.Message
iex> old_message = put_header(%Conduit.Message{}, "retries", 1)
iex> new_message = Conduit.Message.merge_headers(%Conduit.Message{}, old_message, ["retries"])
iex> get_header(new_message, "retries")
1
"""
@spec merge_headers(to :: __MODULE__.t(), from :: __MODULE__.t(), headers :: [String.t()]) :: __MODULE__.t()
def merge_headers(%__MODULE__{} = to, %__MODULE__{} = from, headers) do
headers = Map.take(from.headers, headers)
%{to | headers: Map.merge(to.headers, headers)}
end
@doc """
Assigns the source of the message.
## Examples
iex> import Conduit.Message
iex> message =
iex> %Conduit.Message{}
iex> |> put_source("my.queue")
iex> |> put_header("routing_key", "my.routing_key")
iex> message.source
"my.queue"
iex> message = put_source(message, fn mess ->
iex> get_header(mess, "routing_key")
iex> end)
iex> message.source
"my.routing_key"
"""
@spec put_source(__MODULE__.t(), source) :: __MODULE__.t()
def put_source(%__MODULE__{} = message, source) when is_function(source) do
put_source(message, call_fun(source, message))
end
def put_source(%__MODULE__{} = message, source) do
%{message | source: source}
end
@doc """
Assigns a source to the message when one isn't set already.
## Examples
iex> import Conduit.Message
iex> message = put_new_source(%Conduit.Message{}, "my.queue")
iex> message = put_new_source(message, "your.queue")
iex> message.source
"my.queue"
iex> message = put_new_source(%Conduit.Message{}, fn _mess -> "my.queue" end)
iex> message = put_new_source(message, fn _mess -> "your.queue" end)
iex> message.source
"my.queue"
"""
@spec put_new_source(__MODULE__.t(), source) :: __MODULE__.t()
def put_new_source(%__MODULE__{source: nil} = message, source) do
put_source(message, source)
end
def put_new_source(%__MODULE__{} = message, _) do
message
end
@doc """
Assigns the destination of the message.
## Examples
iex> import Conduit.Message
iex> message =
iex> %Conduit.Message{}
iex> |> put_source("over.there")
iex> |> put_destination("my.queue")
iex> message.destination
"my.queue"
iex> message = put_destination(message, fn mess -> mess.source <> ".error" end)
iex> message.destination
"over.there.error"
"""
@spec put_destination(__MODULE__.t(), destination) :: __MODULE__.t()
def put_destination(%__MODULE__{} = message, destination) when is_function(destination) do
put_destination(message, call_fun(destination, message))
end
def put_destination(%__MODULE__{} = message, destination) do
%{message | destination: destination}
end
@doc """
Assigns a destination to the message when one isn't set already.
## Examples
iex> import Conduit.Message
iex> message = put_new_destination(%Conduit.Message{}, "your.queue")
iex> message = put_new_destination(message, "my.queue")
iex> message.destination
"your.queue"
iex> message = put_new_destination(%Conduit.Message{}, fn _mess -> "your.queue" end)
iex> message = put_new_destination(message, fn _mess -> "my.queue" end)
iex> message.destination
"your.queue"
"""
@spec put_new_destination(__MODULE__.t(), destination) :: __MODULE__.t()
def put_new_destination(%__MODULE__{destination: nil} = message, destination) do
put_destination(message, destination)
end
def put_new_destination(%__MODULE__{} = message, _) do
message
end
@doc """
Assigns a user_id to the message.
## Examples
iex> import Conduit.Message
iex> message = put_user_id(%Conduit.Message{}, 1)
iex> message.user_id
1
iex> message = put_user_id(message, fn _mess -> 2 end)
iex> message.user_id
2
"""
@spec put_user_id(__MODULE__.t(), user_id) :: __MODULE__.t()
def put_user_id(%__MODULE__{} = message, user_id) when is_function(user_id) do
put_user_id(message, call_fun(user_id, message))
end
def put_user_id(%__MODULE__{} = message, user_id) do
%{message | user_id: user_id}
end
@doc """
Assigns a correlation_id to the message.
## Examples
iex> import Conduit.Message
iex> message = put_correlation_id(%Conduit.Message{}, 1)
iex> message.correlation_id
1
iex> message = put_correlation_id(message, fn _mess -> 2 end)
iex> message.correlation_id
2
"""
@spec put_correlation_id(__MODULE__.t(), correlation_id) :: __MODULE__.t()
def put_correlation_id(%__MODULE__{} = message, correlation_id)
when is_function(correlation_id) do
put_correlation_id(message, call_fun(correlation_id, message))
end
def put_correlation_id(%__MODULE__{} = message, correlation_id) do
%{message | correlation_id: correlation_id}
end
@doc """
Assigns a correlation_id to the message when one isn't set already.
## Examples
iex> import Conduit.Message
iex> message = put_new_correlation_id(%Conduit.Message{}, 1)
iex> message = put_new_correlation_id(message, 2)
iex> message.correlation_id
1
iex> message = put_new_correlation_id(%Conduit.Message{}, fn _mess -> 1 end)
iex> message = put_new_correlation_id(message, fn _mess -> 2 end)
iex> message.correlation_id
1
"""
@spec put_new_correlation_id(__MODULE__.t(), correlation_id) :: __MODULE__.t()
def put_new_correlation_id(%__MODULE__{correlation_id: nil} = message, correlation_id) do
put_correlation_id(message, correlation_id)
end
def put_new_correlation_id(%__MODULE__{} = message, _) do
message
end
@doc """
Assigns a message_id to the message.
## Examples
iex> import Conduit.Message
iex> message = put_message_id(%Conduit.Message{}, 1)
iex> message.message_id
1
iex> message = put_message_id(%Conduit.Message{}, fn _mess -> 1 end)
iex> message.message_id
1
"""
@spec put_message_id(__MODULE__.t(), message_id) :: __MODULE__.t()
def put_message_id(%__MODULE__{} = message, message_id) when is_function(message_id) do
put_message_id(message, call_fun(message_id, message))
end
def put_message_id(%__MODULE__{} = message, message_id) do
%{message | message_id: message_id}
end
@doc """
Assigns a message_id to the message when one isn't set already.
## Examples
iex> import Conduit.Message
iex> message = put_new_message_id(%Conduit.Message{}, 1)
iex> message = put_new_message_id(message, 2)
iex> message.message_id
1
iex> message = put_new_message_id(%Conduit.Message{}, fn _mess -> 1 end)
iex> message = put_new_message_id(message, fn _mess -> 2 end)
iex> message.message_id
1
"""
@spec put_new_message_id(__MODULE__.t(), message_id) :: __MODULE__.t()
def put_new_message_id(%__MODULE__{message_id: nil} = message, message_id) do
put_message_id(message, message_id)
end
def put_new_message_id(%__MODULE__{} = message, _) do
message
end
@doc """
Assigns a content_type to the message.
## Examples
iex> import Conduit.Message
iex> message = put_content_type(%Conduit.Message{}, "application/json")
iex> message.content_type
"application/json"
iex> message = put_content_type(%Conduit.Message{}, fn _mess -> "application/json" end)
iex> message.content_type
"application/json"
"""
@spec put_content_type(__MODULE__.t(), content_type) :: __MODULE__.t()
def put_content_type(%__MODULE__{} = message, content_type) when is_function(content_type) do
put_content_type(message, call_fun(content_type, message))
end
def put_content_type(%__MODULE__{} = message, content_type) do
%{message | content_type: content_type}
end
@doc """
Assigns a content_encoding to the message.
## Examples
iex> import Conduit.Message
iex> message = put_content_encoding(%Conduit.Message{}, "gzip")
iex> message.content_encoding
"gzip"
iex> message = put_content_encoding(%Conduit.Message{}, fn _mess -> "gzip" end)
iex> message.content_encoding
"gzip"
"""
@spec put_content_encoding(__MODULE__.t(), content_encoding) :: __MODULE__.t()
def put_content_encoding(%__MODULE__{} = message, content_encoding)
when is_function(content_encoding) do
put_content_encoding(message, call_fun(content_encoding, message))
end
def put_content_encoding(%__MODULE__{} = message, content_encoding) do
%{message | content_encoding: content_encoding}
end
@doc """
Assigns a created_by to the message.
## Examples
iex> import Conduit.Message
iex> message = put_created_by(%Conduit.Message{}, "my_app")
iex> message.created_by
"my_app"
iex> message = put_created_by(%Conduit.Message{}, fn _mess ->"my_app" end)
iex> message.created_by
"my_app"
"""
@spec put_created_by(__MODULE__.t(), created_by) :: __MODULE__.t()
def put_created_by(%__MODULE__{} = message, created_by) when is_function(created_by) do
put_created_by(message, call_fun(created_by, message))
end
def put_created_by(%__MODULE__{} = message, created_by) do
%{message | created_by: created_by}
end
@doc """
Assigns a created_at to the message.
## Examples
iex> import Conduit.Message
iex> message = put_created_at(%Conduit.Message{}, 1)
iex> message.created_at
1
iex> message = put_created_at(%Conduit.Message{}, fn _mess -> 1 end)
iex> message.created_at
1
"""
@spec put_created_at(__MODULE__.t(), created_at) :: __MODULE__.t()
def put_created_at(%__MODULE__{} = message, created_at) when is_function(created_at) do
put_created_at(message, call_fun(created_at, message))
end
def put_created_at(%__MODULE__{} = message, created_at) do
%{message | created_at: created_at}
end
@fields [
:source,
:destination,
:user_id,
:correlation_id,
:message_id,
:content_type,
:content_encoding,
:created_by,
:created_at
]
@doc """
Returns all non-`nil` fields from the message as a map.
The following fields will be returned:
#{@fields |> Enum.map(&"* `#{inspect(&1)}`") |> Enum.join("\n")}
## Examples
iex> import Conduit.Message
iex> message =
iex> %Conduit.Message{}
iex> |> put_message_id("1")
iex> |> put_correlation_id("2")
iex> get_fields(message)
%{
message_id: "1",
correlation_id: "2"
}
"""
@spec get_fields(__MODULE__.t()) :: %{atom() => term()}
def get_fields(%__MODULE__{} = message) do
message
|> Map.take(@fields)
|> Enum.filter(fn {_, value} -> value != nil end)
|> Enum.into(%{})
end
@doc """
Returns a header from the message specified by `key`.
## Examples
iex> import Conduit.Message
iex> message = put_header(%Conduit.Message{}, "retries", 1)
iex> get_header(message, "retries")
1
"""
@spec get_header(__MODULE__.t(), String.t()) :: any
def get_header(%__MODULE__{headers: headers}, key) when is_binary(key) do
get_in(headers, [key])
end
@doc """
Assigns a header for the message specified by `key`.
## Examples
iex> import Conduit.Message
iex> message = put_header(%Conduit.Message{}, "retries", 1)
iex> get_header(message, "retries")
1
iex> message = put_header(message, "retries", fn mess -> get_header(mess, "retries") + 1 end)
iex> get_header(message, "retries")
2
"""
@spec put_header(__MODULE__.t(), String.t(), any) :: __MODULE__.t()
def put_header(%__MODULE__{} = message, key, value)
when is_function(value) and is_binary(key) do
put_header(message, key, call_fun(value, message))
end
def put_header(%__MODULE__{headers: headers} = message, key, value) when is_binary(key) do
%{message | headers: put_in(headers, [key], value)}
end
@doc """
Assigns a header for the message specified by `key`.
## Examples
iex> import Conduit.Message
iex> message = put_headers(%Conduit.Message{}, %{"retries" => 1})
iex> get_header(message, "retries")
1
iex> message = put_headers(message, %{"retries" => fn mess -> get_header(mess, "retries") + 1 end})
iex> get_header(message, "retries")
2
"""
@spec put_headers(__MODULE__.t(), %{String.t() => any}) :: __MODULE__.t()
def put_headers(%__MODULE__{} = message, headers) when is_map(headers) do
Enum.reduce(headers, message, fn {key, value}, mess ->
put_header(mess, key, value)
end)
end
@doc """
Deletes a header from the message specified by `key`.
## Examples
iex> import Conduit.Message
iex> message = put_header(%Conduit.Message{}, "retries", 1)
iex> message = delete_header(message, "retries")
iex> get_header(message, "retries")
nil
"""
@spec delete_header(__MODULE__.t(), String.t()) :: __MODULE__.t()
def delete_header(%__MODULE__{headers: headers} = message, key) do
%{message | headers: Map.delete(headers, key)}
end
@doc """
Assigns the content of the message.
## Examples
iex> import Conduit.Message
iex> message = put_body(%Conduit.Message{}, "hi")
iex> message.body
"hi"
iex> message = put_body(message, fn _mess -> "bye" end)
iex> message.body
"bye"
"""
@spec put_body(__MODULE__.t(), body) :: __MODULE__.t()
def put_body(%__MODULE__{} = message, body) when is_function(body) do
put_body(message, call_fun(body, message))
end
def put_body(%__MODULE__{} = message, body) do
%{message | body: body}
end
@doc """
Assigs the status of the message as acknowledged. This will be used
to signal to the message queue that processing the message was successful
and can be discarded.
## Examples
iex> import Conduit.Message
iex> message = ack(%Conduit.Message{})
iex> message.status
:ack
"""
@spec ack(__MODULE__.t()) :: __MODULE__.t()
def ack(message) do
%{message | status: :ack}
end
@doc """
Assigs the status of the message to a negative acknowledged. This will be used
to signal to the message queue that processing the message was not successful.
## Examples
iex> import Conduit.Message
iex> message = nack(%Conduit.Message{})
iex> message.status
:nack
"""
@spec nack(__MODULE__.t()) :: __MODULE__.t()
def nack(message) do
%{message | status: :nack}
end
@doc """
Retrieves a named value from the message.
## Examples
iex> import Conduit.Message
iex> message = assign(%Conduit.Message{}, :user_id, 1)
iex> assigns(message, :user_id)
1
"""
@spec assigns(__MODULE__.t(), term) :: __MODULE__.t()
def assigns(%__MODULE__{assigns: assigns}, key) do
get_in(assigns, [key])
end
@doc """
Assigns a named value to the message.
## Examples
iex> import Conduit.Message
iex> message = assign(%Conduit.Message{}, :user_id, 1)
iex> assigns(message, :user_id)
1
"""
@spec assign(__MODULE__.t(), atom, any) :: __MODULE__.t()
def assign(%__MODULE__{assigns: assigns} = message, key, value) when is_atom(key) do
%{message | assigns: Map.put(assigns, key, value)}
end
@doc """
Retrieves a named value from the message. This is intended for libraries and framework use.
## Examples
iex> import Conduit.Message
iex> message = put_private(%Conduit.Message{}, :message_id, 1)
iex> get_private(message, :message_id)
1
"""
@spec get_private(__MODULE__.t(), atom) :: term
def get_private(%__MODULE__{private: private}, key) do
get_in(private, [key])
end
@doc """
Assigns a named value to the message. This is intended for libraries and framework use.
## Examples
iex> import Conduit.Message
iex> message = put_private(%Conduit.Message{}, :message_id, 1)
iex> get_private(message, :message_id)
1
"""
@spec put_private(__MODULE__.t(), atom, any) :: __MODULE__.t()
def put_private(%__MODULE__{private: private} = message, key, value) when is_atom(key) do
%{message | private: Map.put(private, key, value)}
end
defp call_fun(fun, message) do
call_fun(fun, message, :erlang.fun_info(fun, :arity))
end
defp call_fun(fun, _message, {:arity, 0}), do: fun.()
defp call_fun(fun, message, {:arity, 1}), do: fun.(message)
defp call_fun(_fun, _message, {:arity, n}) do
message = """
Expected function with arity of 0 or 1, but got one with arity #{n}.
"""
raise Conduit.BadArityError, message
end
end
|
lib/conduit/message.ex
| 0.886313
| 0.447521
|
message.ex
|
starcoder
|
defmodule Forth do
@opaque evaluator :: %{stack: [integer], ops: %{String.t() => [atom] | [String.t()]}}
@doc """
Create a new evaluator.
"""
@basic_ops %{
"dup" => [:dup],
"drop" => [:drop],
"swap" => [:swap],
"over" => [:over],
"+" => [:+],
"-" => [:-],
"*" => [:*],
"/" => [:/]
}
@spec new() :: evaluator
def new() do
%{stack: [], ops: @basic_ops}
end
@doc """
Evaluate an input string, updating the evaluator state.
"""
@spec eval(evaluator, String.t()) :: evaluator
def eval(ev, s) do
String.downcase(s)
|> tokenize()
|> evaluate(ev)
end
@separator ~r/[\s \x00-\x19]/
def tokenize(string) do
string
|> String.split(@separator, trim: true)
|> Enum.map(&parse_integer/1)
end
def parse_integer(word) do
case Integer.parse(word) do
{int, ""} -> int
_ -> word
end
end
# End of input
def evaluate([], ev), do: ev
# Integer input
def evaluate([i | tokens], %{stack: stack} = ev) when is_number(i),
do: evaluate(tokens, %{ev | stack: [i | stack]})
# Stack manipulation
def evaluate([op | tokens], %{stack: stack} = ev) when is_atom(op) do
new_stack =
case {op, stack} do
{:+, [a, b | rest]} -> [a + b | rest]
{:-, [a, b | rest]} -> [b - a | rest]
{:*, [a, b | rest]} -> [a * b | rest]
{:/, [0 | _rest]} -> raise __MODULE__.DivisionByZero
{:/, [a, b | rest]} -> [div(b, a) | rest]
{:dup, [a | rest]} -> [a, a | rest]
{:drop, [_a | rest]} -> rest
{:swap, [a, b | rest]} -> [b, a | rest]
{:over, [a, b | rest]} -> [b, a, b | rest]
{_op, _stack} -> raise __MODULE__.StackUnderflow
end
evaluate(tokens, %{ev | stack: new_stack})
end
# New words
def evaluate([":" | tokens], %{ops: ops} = ev) do
[name | instructions] = Enum.take_while(tokens, fn t -> t != ";" end)
if is_number(name) do
raise __MODULE__.InvalidWord, word: name
else
rest = tokens |> Enum.drop_while(fn t -> t != ";" end) |> tl
new_ops = Map.put(ops, name, instructions)
evaluate(rest, %{ev | ops: new_ops})
end
end
# Check for known words
def evaluate([token | tokens], %{ops: ops} = ev) do
if not Map.has_key?(ops, token) do
raise __MODULE__.UnknownWord, word: token
else
evaluate(ops[token] ++ tokens, ev)
end
end
@doc """
Return the current stack as a string with the element on top of the stack
being the rightmost element in the string.
"""
@spec format_stack(evaluator) :: String.t()
def format_stack(%{stack: stack}) do
stack
|> Enum.reverse()
|> Enum.map_join(" ", &Integer.to_string/1)
end
defmodule StackUnderflow do
defexception []
def message(_), do: "stack underflow"
end
defmodule InvalidWord do
defexception word: nil
def message(e), do: "invalid word: #{inspect(e.word)}"
end
defmodule UnknownWord do
defexception word: nil
def message(e), do: "unknown word: #{inspect(e.word)}"
end
defmodule DivisionByZero do
defexception []
def message(_), do: "division by zero"
end
end
|
elixir/forth/lib/forth.ex
| 0.641759
| 0.658486
|
forth.ex
|
starcoder
|
defmodule Scenic.Primitive.Style.Theme do
@moduledoc """
Themes are a way to bundle up a set of colors that are intended to be used
by components invoked by a scene.
There are a set of pre-defined themes.
You can also pass in a map of color values.
Unlike other styles, The currently set theme is given to child components.
Each component gets to pick, choose, or ignore any colors in a given style.
### Predefined Themes
* `:dark` - This is the default and most common. Use when the background is dark.
* `:light` - Use when the background is light colored.
### Specialty Themes
The remaining themes are designed to color the standard components and don't really
make much sense when applied to the root of a graph. You could, but it would be...
interesting.
The most obvious place to use them is with [`Button`](Scenic.Component.Button.html)
components.
* `:primary` - Blue background. This is the primary button type indicator.
* `:secondary` - Grey background. Not primary type indicator.
* `:success` - Green background.
* `:danger` - Red background. Use for irreversible or dangerous actions.
* `:warning` - Orange background.
* `:info` - Lightish blue background.
* `:text` - Transparent background.
"""
use Scenic.Primitive.Style
alias Scenic.Primitive.Style.Paint.Color
@theme_light %{
text: :black,
background: :white,
border: :dark_grey,
active: {215, 215, 215},
thumb: :cornflower_blue,
focus: :blue,
highlight: :saddle_brown
}
@theme_dark %{
text: :white,
background: :black,
border: :light_grey,
active: {40, 40, 40},
thumb: :cornflower_blue,
focus: :cornflower_blue,
highlight: :sandy_brown
}
# specialty themes
@primary Map.merge(@theme_dark, %{background: {72, 122, 252}, active: {58, 94, 201}})
@secondary Map.merge(@theme_dark, %{background: {111, 117, 125}, active: {86, 90, 95}})
@success Map.merge(@theme_dark, %{background: {99, 163, 74}, active: {74, 123, 56}})
@danger Map.merge(@theme_dark, %{background: {191, 72, 71}, active: {164, 54, 51}})
@warning Map.merge(@theme_light, %{background: {239, 196, 42}, active: {197, 160, 31}})
@info Map.merge(@theme_dark, %{background: {94, 159, 183}, active: {70, 119, 138}})
@text Map.merge(@theme_dark, %{text: {72, 122, 252}, background: :clear, active: :clear})
@themes %{
light: @theme_light,
dark: @theme_dark,
primary: @primary,
secondary: @secondary,
success: @success,
danger: @danger,
warning: @warning,
info: @info,
text: @text
}
# ============================================================================
# data verification and serialization
@doc false
def validate(theme)
def validate(:light), do: {:ok, :light}
def validate(:dark), do: {:ok, :dark}
def validate(:primary), do: {:ok, :primary}
def validate(:secondary), do: {:ok, :secondary}
def validate(:success), do: {:ok, :success}
def validate(:danger), do: {:ok, :danger}
def validate(:warning), do: {:ok, :warning}
def validate(:info), do: {:ok, :info}
def validate(:text), do: {:ok, :text}
def validate(
%{
text: _,
background: _,
border: _,
active: _,
thumb: _,
focus: _
} = theme
) do
# we know all the required colors are there.
# now make sure they are all valid colors, including any custom added ones.
theme
|> Enum.reduce({:ok, theme}, fn
_, {:error, msg} ->
{:error, msg}
{key, color}, {:ok, _} = acc ->
case Color.validate(color) do
{:ok, _} -> acc
{:error, msg} -> err_color(key, msg)
end
end)
end
def validate(name) when is_atom(name) do
{
:error,
"""
#{IO.ANSI.red()}Invalid theme name
Received: #{inspect(name)}
#{IO.ANSI.yellow()}
Named themes must be from the following list:
:light, :dark, :primary, :secondary, :success, :danger, :warning, :info, :text#{IO.ANSI.default_color()}
"""
}
end
def validate(%{} = map) do
{
:error,
"""
#{IO.ANSI.red()}Invalid theme specification
Received: #{inspect(map)}
#{IO.ANSI.yellow()}
You passed in a map, but it didn't include all the required color specifications.
It must contain a valid color for each of the following entries.
:text, :background, :border, :active, :thumb, :focus
#{IO.ANSI.default_color()}
"""
}
end
def validate(data) do
{
:error,
"""
#{IO.ANSI.red()}Invalid theme specification
Received: #{inspect(data)}
#{IO.ANSI.yellow()}
Themes can be a name from this list:
:light, :dark, :primary, :secondary, :success, :danger, :warning, :info, :text
Or it may also be a map defining colors for the values of
:text, :background, :border, :active, :thumb, :focus
If you pass in a map, you may add your own colors in addition to the required ones.#{IO.ANSI.default_color()}
"""
}
end
defp err_color(key, msg) do
{
:error,
"""
#{IO.ANSI.red()}Invalid color in map
Map entry: #{inspect(key)}
#{msg}
"""
}
end
# --------------------------------------------------------
@doc false
def normalize(theme) when is_atom(theme), do: Map.get(@themes, theme)
def normalize(theme) when is_map(theme), do: theme
# --------------------------------------------------------
@doc false
def preset(theme), do: Map.get(@themes, theme)
end
|
lib/scenic/primitive/style/theme.ex
| 0.850918
| 0.476397
|
theme.ex
|
starcoder
|
defmodule Automaton.Types.TWEANN.Cortex do
@moduledoc """
A NN synchronizing element. It knows the PID of every sensor and
actuator, so that it will know when all the actuators have received their
inputs, and that it's time for the sensors to again gather and
fanout sensory data to the neurons in the input layer.
Also can act as supervisor to all the neuron, sensor, and actuator elements
in the NN system.
Cortex's are represented by the tuple: { id, sensor_ids, actuator_ids, nids}
• id, a unique id (useful for datastores)
• sensor_ids, ids of the sensors that produce and pass the sensory signals to the
neurons in the input layer.
• actuator_ids, list of acuator ids that the neural output layer is connected to.
• nids, list of all neuron ids in the NN
"""
require Logger
defstruct id: nil, sensor_ids: [], actuator_ids: [], n_ids: []
@doc """
The `gen/1` function spawns the cortex element, which immediately starts to wait
for a the state message from the same process that spawned it, exoself. The
initial state message contains the sensor, actuator, and neuron PId lists. The
message also specifies how many total Sense-Think-Act cycles the Cortex
should execute before terminating the NN system. Once we implement the
learning algorithm, the termination criteria will depend on the fitness of the
NN, or some other useful property
"""
def gen(exoself_pid) do
spawn(fn -> loop(exoself_pid) end)
end
@doc """
The cortex’s goal is to synchronize the NN system such that when the actuators
have received all their control signals, the sensors are once again triggered
to gather new sensory information. Thus the cortex waits for the sync messages
from the actuator PIds in its system, and once it has received all the sync
messages, it triggers the sensors and then drops back to waiting for a new set
of sync messages. The cortex stores 2 copies of the actuator PIds: the a_pids,
and the Memorya_pids (Ma_pids). Once all the actuators have sent it the sync
messages, it can restore the a_pids list from the Ma_pids. Finally, there is
also the Step variable which decrements every time a full cycle of Sense-
Think-Act completes, once this reaches 0, the NN system begins its termination
and backup process.
"""
def loop(exoself_pid) do
receive do
{^exoself_pid, {id, s_pids, a_pids, n_pids}, total_steps} ->
for s_pid <- s_pids, do: send(s_pid, {self(), :sync})
loop(id, exoself_pid, s_pids, {a_pids, a_pids}, n_pids, total_steps)
end
end
def loop(id, exoself_pid, s_pids, {_a_pids, m_a_pids}, n_pids, 0) do
Logger.debug("Cortex:#{inspect(id)} finished, now backing up and terminating.")
neuron_ids_and_weights = get_backup(n_pids, [])
send(exoself_pid, {self(), :backup, neuron_ids_and_weights})
for lst <- [s_pids, m_a_pids, n_pids] do
for pid <- lst, do: send(pid, {self(), :terminate})
end
end
def loop(id, exoself_pid, s_pids, {[a_pid | a_pids], m_a_pids}, n_pids, step) do
receive do
{^a_pid, :sync} ->
loop(id, exoself_pid, s_pids, {a_pids, m_a_pids}, n_pids, step)
:terminate ->
Logger.info("Cortex:#{inspect(id)} is terminating.")
for lst <- [s_pids, m_a_pids, n_pids] do
for pid <- lst, do: send(pid, {self(), :terminate})
end
end
end
def loop(id, exoself_pid, s_pids, {[], m_a_pids}, n_pids, step) do
for s_pid <- s_pids, do: send(s_pid, {self(), :sync})
loop(id, exoself_pid, s_pids, {m_a_pids, m_a_pids}, n_pids, step - 1)
end
@doc """
During backup, cortex contacts all the neurons in its NN and requests for the
neuron’s Ids and their Input_IdPs. Once the updated Input_IdPs from all the
neurons have been accumulated, the list is sent to exoself for the actual
backup and storage.
"""
def get_backup([n_pid | n_pids], acc) do
send(n_pid, {self(), :get_backup})
receive do
{^n_pid, n_id, weight_tuples} ->
get_backup(n_pids, [{n_id, weight_tuples} | acc])
end
end
def get_backup([], acc), do: acc
end
|
lib/automata/automaton_types/neuroevolution/cortex.ex
| 0.710327
| 0.753126
|
cortex.ex
|
starcoder
|
defmodule AOC.Day11.SpacePolice do
alias AOC.Day11.Intcode
@moduledoc false
@type grid :: map
@type point :: {integer, integer}
def part1(path) do
Intcode.stream_puzzle_input(path)
|> Intcode.puzzle_input_to_map()
|> paint(%{{0, 0} => {0, 0}}, {0, 0}, :north)
|> count_painted_panels()
end
def part2(path) do
Intcode.stream_puzzle_input(path)
|> Intcode.puzzle_input_to_map()
|> paint(%{{0, 0} => {1, 0}}, {0, 0}, :north)
|> print()
end
def read_program_input(path) do
File.read!(path)
|> String.trim()
|> String.split(",")
end
@spec read_grid(grid, point) :: {integer, integer}
def read_grid(grid, location) do
Map.get(grid, location, {0, 0})
end
@spec update_grid(grid, point, integer) :: grid
def update_grid(grid, location, value) when is_integer(value) do
{_old_value, num_painted} = read_grid(grid, location)
Map.put(grid, location, {value, num_painted + 1})
end
def rotate_and_move(location, facing, rotation) when is_atom(facing) and is_integer(rotation) do
rotation =
case rotation do
0 -> :left
1 -> :right
end
new_facing = rotate(facing, rotation)
new_location = move(location, new_facing)
{new_location, new_facing}
end
def move(location, facing) do
{x, y} = location
case facing do
:north -> {x, y + 1}
:east -> {x + 1, y}
:south -> {x, y - 1}
:west -> {x - 1, y}
end
end
def rotate(facing, :left) do
case facing do
:north -> :west
:east -> :north
:south -> :east
:west -> :south
end
end
def rotate(facing, :right) do
case facing do
:north -> :east
:east -> :south
:south -> :west
:west -> :north
end
end
def paint(memory, grid, location, facing) do
{color, _num_painted} = read_grid(grid, location)
memory = Intcode.append_input(memory, color)
with {:waiting, memory} <- Intcode.compute(memory),
{[new_color, rotation], memory} <- Intcode.empty_outputs(memory),
grid <- update_grid(grid, location, new_color),
{location, facing} <- rotate_and_move(location, facing, rotation) do
paint(memory, grid, location, facing)
else
{:error, _memory} ->
:error_compute
{:terminate, _memory} ->
grid
end
end
def count_painted_panels(grid) do
map_size(grid)
end
def print(grid) do
{{min_x, min_y}, {max_x, max_y}} =
Map.keys(grid)
|> Enum.reduce({{0, 0}, {0, 0}}, fn {x, y}, {{min_x, min_y}, {max_x, max_y}} ->
min_x =
if x < min_x do
x
else
min_x
end
min_y =
if y < min_y do
y
else
min_y
end
max_x =
if x > max_x do
x
else
max_x
end
max_y =
if y > max_y do
y
else
max_y
end
{{min_x, min_y}, {max_x, max_y}}
end)
IO.puts("")
Enum.each(max_y..min_y, fn y ->
Enum.each(min_x..max_x, fn x ->
{color, _num_painted} = read_grid(grid, {x, y})
if color == 0 do
IO.write(" ")
else
IO.write(" # ")
end
end)
IO.puts("")
end)
:ok
end
end
|
aoc-2019/lib/aoc/day11/space_police.ex
| 0.690559
| 0.540318
|
space_police.ex
|
starcoder
|
defmodule Statistics.Distributions.Binomial do
alias Statistics.Math
@moduledoc """
Binomial distribution.
This models the expected outcome of a number
of binary trials, each with known probability,
(often called a Bernoulli trial)
"""
@doc """
The probability mass function
## Examples
iex> Statistics.Distributions.Binomial.pmf(4, 0.5).(2)
0.375
"""
@spec pmf(non_neg_integer, number) :: fun
def pmf(n, p) do
fn k ->
cond do
k < 1.0 ->
0.0
n < k ->
0.0
true ->
xk = Math.to_int(k)
Math.combination(n, xk) * Math.pow(p, xk) * Math.pow(1 - p, n - xk)
end
end
end
@doc """
The cumulative density function
## Examples
iex> Statistics.Distributions.Binomial.cdf(4, 0.5).(2)
0.6875
"""
@spec cdf(non_neg_integer, number) :: fun
def cdf(n, p) do
fn k ->
0..Math.to_int(Math.floor(k))
|> Enum.to_list()
|> Enum.map(fn i -> Math.combination(n, i) * Math.pow(p, i) * Math.pow(1 - p, n - i) end)
|> Enum.sum()
end
end
@doc """
The percentile-point function
## Examples
iex> Statistics.Distributions.Binomial.ppf(10, 0.5).(0.5)
5
"""
@spec ppf(non_neg_integer, number) :: fun
def ppf(n, p) do
fn x ->
ppf_tande(x, n, p, cdf(n, p), 0)
end
end
# trial-and-error method which refines guesses
# to arbitrary number of decimal places
defp ppf_tande(x, n, p, npcdf, g) do
g_cdf = npcdf.(g)
cond do
x > g_cdf ->
ppf_tande(x, n, p, npcdf, g + 1)
x <= g_cdf ->
g
end
end
@doc """
Draw a random number from a t distribution with specified degrees of freedom
Uses the [rejection sampling method](https://en.wikipedia.org/wiki/Rejection_sampling)
## Examples
iex> Statistics.Distributions.Binomial.rand(10, 0.5)
5.0
"""
@spec rand(non_neg_integer, number) :: non_neg_integer
def rand(n, p), do: rand(n, p, pmf(n, p))
defp rand(n, p, rpmf) do
x = Math.rand() * n
if rpmf.(x) > Math.rand() do
Float.round(x)
else
# keep trying
rand(n, p, rpmf)
end
end
end
|
lib/statistics/distributions/binomial.ex
| 0.910112
| 0.710653
|
binomial.ex
|
starcoder
|
defmodule Cizen.Saga do
@moduledoc """
The saga behaviour
## Example
defmodule SomeSaga do
use Cizen.Saga
defstruct []
@impl true
def init(_id, %__MODULE__{}) do
saga
end
@impl true
def handle_event(_id, _event, state) do
state
end
end
"""
@type t :: struct
@type state :: any
# `pid | {atom, node} | atom` is the same as the Process.monitor/1's argument.
@type lifetime :: pid | {atom, node} | atom | nil
use GenServer
alias Cizen.CizenSagaRegistry
alias Cizen.Dispatcher
alias Cizen.Event
alias Cizen.Filter
alias Cizen.SagaID
require Filter
@doc """
Invoked when the saga is started.
Saga.Started event will be dispatched after this callback.
Returned value will be used as the next state to pass `c:handle_event/3` callback.
"""
@callback init(SagaID.t(), t()) :: state
@doc """
Invoked when the saga receives an event.
Returned value will be used as the next state to pass `c:handle_event/3` callback.
"""
@callback handle_event(SagaID.t(), Event.t(), state) :: state
@doc """
Invoked when the saga is resumed.
Returned value will be used as the next state to pass `c:handle_event/3` callback.
This callback is predefined. The default implementation is here:
```
def resume(id, saga, state) do
init(id, saga)
state
end
```
"""
@callback resume(SagaID.t(), t(), state) :: state
defmacro __using__(_opts) do
quote do
@behaviour Cizen.Saga
@impl true
def resume(id, saga, state) do
init(id, saga)
state
end
defoverridable resume: 3
end
end
defmodule Finish do
@moduledoc "A event fired to finish"
defstruct([:id])
end
defmodule Started do
@moduledoc "A event fired on start"
defstruct([:id])
end
defmodule Resumed do
@moduledoc "A event fired on resume"
defstruct([:id])
end
defmodule Ended do
@moduledoc "A event fired on end"
defstruct([:id])
end
defmodule Finished do
@moduledoc "A event fired on finish"
defstruct([:id])
end
defmodule Crashed do
@moduledoc "A event fired on crash"
defstruct([:id, :reason, :stacktrace])
end
@doc """
Starts a saga which finishes when the current process exits.
"""
@spec fork(t) :: SagaID.t()
def fork(saga) do
lifetime = self()
id = SagaID.new()
{:ok, _pid} = GenServer.start_link(__MODULE__, {:start, id, saga, lifetime})
id
end
@doc """
Starts a saga linked to the current process
"""
@spec start_link(t) :: GenServer.on_start()
def start_link(saga) do
id = SagaID.new()
GenServer.start_link(__MODULE__, {:start, id, saga, nil})
end
@doc """
Returns the pid for the given saga ID.
"""
@spec get_pid(SagaID.t()) :: {:ok, pid} | :error
defdelegate get_pid(saga_id), to: CizenSagaRegistry
@doc """
Returns the saga struct for the given saga ID.
"""
@spec get_saga(SagaID.t()) :: {:ok, t()} | :error
defdelegate get_saga(saga_id), to: CizenSagaRegistry
@lazy_init {__MODULE__, :lazy_init}
def lazy_init, do: @lazy_init
@doc """
Returns the module for a saga.
"""
@spec module(t) :: module
def module(saga) do
saga.__struct__
end
@doc """
Resumes a saga with the given state.
"""
@spec resume(SagaID.t(), t(), state, pid | nil) :: GenServer.on_start()
def resume(id, saga, state, lifetime \\ nil) do
GenServer.start(__MODULE__, {:resume, id, saga, state, lifetime})
end
def start_saga(id, saga, lifetime) do
{:ok, _pid} = GenServer.start(__MODULE__, {:start, id, saga, lifetime})
end
def end_saga(id) do
GenServer.stop({:via, Registry, {CizenSagaRegistry, id}}, :shutdown)
catch
:exit, _ -> :ok
after
Dispatcher.dispatch(Event.new(nil, %Ended{id: id}))
end
def send_to(id, message) do
Registry.dispatch(CizenSagaRegistry, id, fn entries ->
for {pid, _} <- entries, do: send(pid, message)
end)
end
def exit(id, reason, trace) do
GenServer.stop({:via, Registry, {CizenSagaRegistry, id}}, {:shutdown, {reason, trace}})
end
@impl true
def init({:start, id, saga, lifetime}) do
init_with(id, saga, lifetime, %Started{id: id}, :init, [id, saga])
end
@impl true
def init({:resume, id, saga, state, lifetime}) do
init_with(id, saga, lifetime, %Resumed{id: id}, :resume, [id, saga, state])
end
defp init_with(id, saga, lifetime, event, function, arguments) do
Registry.register(CizenSagaRegistry, id, saga)
Dispatcher.listen(Filter.new(fn %Event{body: %Finish{id: ^id}} -> true end))
module = module(saga)
unless is_nil(lifetime), do: Process.monitor(lifetime)
state =
case apply(module, function, arguments) do
{@lazy_init, state} ->
state
state ->
Dispatcher.dispatch(Event.new(id, event))
state
end
{:ok, {id, module, state}}
end
@impl true
def handle_info(%Event{body: %Finish{id: id}}, {id, module, state}) do
{:stop, {:shutdown, :finish}, {id, module, state}}
end
@impl true
def handle_info(%Event{} = event, {id, module, state}) do
state = module.handle_event(id, event, state)
{:noreply, {id, module, state}}
rescue
reason -> {:stop, {:shutdown, {reason, __STACKTRACE__}}, {id, module, state}}
end
@impl true
def handle_info({:DOWN, _, :process, _, _}, state) do
{:stop, {:shutdown, :finish}, state}
end
@impl true
def terminate(:shutdown, {_id, _module, _state}) do
:shutdown
end
def terminate({:shutdown, :finish}, {id, _module, _state}) do
dispatch_async(Event.new(id, %Finished{id: id}))
:shutdown
end
def terminate({:shutdown, {reason, trace}}, {id, _module, _state}) do
dispatch_async(Event.new(id, %Crashed{id: id, reason: reason, stacktrace: trace}))
:shutdown
end
@impl true
def handle_call(:get_saga_id, _from, state) do
[saga_id] = Registry.keys(CizenSagaRegistry, self())
{:reply, saga_id, state}
end
def handle_call(request, _from, state) do
result = handle_request(request)
{:reply, result, state}
end
@doc false
def handle_request({:register, registry, saga_id, key, value}) do
Registry.register(registry, key, {saga_id, value})
end
def handle_request({:unregister, registry, key}) do
Registry.unregister(registry, key)
end
def handle_request({:unregister_match, registry, key, pattern, guards}) do
Registry.unregister_match(registry, key, pattern, guards)
end
def handle_request({:update_value, registry, key, callback}) do
Registry.update_value(registry, key, fn {saga_id, value} -> {saga_id, callback.(value)} end)
end
defp dispatch_async(event) do
Task.start(fn ->
Dispatcher.listen(Filter.new(fn %Event{id: ^event.id} -> true end))
Dispatcher.dispatch(event)
receive do
_ -> :ok
after
60_000 -> :ok
end
end)
end
end
|
lib/cizen/saga.ex
| 0.804636
| 0.665723
|
saga.ex
|
starcoder
|
defmodule Tirexs.Resources.APIs do
@moduledoc """
This module provides a set of API helpers. Helpers are useful for buiding
an URN part of particular request. Most commonly the result of this would
be used for dealing directly with variety of available `Tirexs.HTTP` functions.
## Examples:
iex> APIs._refresh({ [force: true] })
"_refresh?force=true"
iex> APIs._refresh(["bear_test", "duck_test"], { [force: false] })
"bear_test,duck_test/_refresh?force=false"
iex> APIs._field_mapping(["bear_test", "duck_test"], "message", {[ ignore_unavailable: true ]})
"bear_test,duck_test/_mapping/message/field?ignore_unavailable=true"
iex> APIs._field_mapping("_all", "tw*", ["*.id", "*.text"])
"_all/_mapping/tw*/field/*.id,*.text"
NOTICE: All of helpers have the same interface, behaviour and almost don't care about the details.
It means, you have a chance to create a complety unsupported API call.
## For instance:
iex> APIs._refresh(["bear_test", "duck_test"], ["a", "b"], {[ human: true ]})
"bear_test,duck_test/_refresh/a,b?human=true"
A `Tirexs.Resources.urn/x` is responsible for concatenation parts all together.
## Feature requests
Feature requests are welcome and should be discussed. But take a moment to find
out whether your idea fits with the scope and aims of the project. Please provide
as much detail and context as possible (from `CONTRIBUTING.md`).
"""
alias Tirexs.Resources.Document
defdelegate _bulk(), to: Document
defdelegate _bulk(a), to: Document
defdelegate _bulk(a,b), to: Document
defdelegate _bulk(a,b,c), to: Document
defdelegate _mget(), to: Document
defdelegate _mget(a), to: Document
defdelegate _mget(a,b), to: Document
defdelegate _mget(a,b,c), to: Document
defdelegate _source(a), to: Document
defdelegate _source(a,b), to: Document
defdelegate _source(a,b,c), to: Document
defdelegate _source(a,b,c,d), to: Document
defdelegate _update(a), to: Document
defdelegate _update(a,b), to: Document
defdelegate _update(a,b,c), to: Document
defdelegate _update(a,b,c,d), to: Document
defdelegate index(a), to: Document
defdelegate index(a,b), to: Document
defdelegate index(a,b,c), to: Document
defdelegate index(a,b,c,d), to: Document
defdelegate doc(a), to: Document
defdelegate doc(a,b), to: Document
defdelegate doc(a,b,c), to: Document
defdelegate doc(a,b,c,d), to: Document
alias Tirexs.Resources.Search
defdelegate _explain(a), to: Search
defdelegate _explain(a,b), to: Search
defdelegate _explain(a,b,c), to: Search
defdelegate _explain(a,b,c,d), to: Search
defdelegate _search_shards(a), to: Search
defdelegate _search_shards(a,b), to: Search
defdelegate _search_shards(a,b,c), to: Search
defdelegate _field_stats(), to: Search
defdelegate _field_stats(a), to: Search
defdelegate _field_stats(a,b), to: Search
defdelegate _validate_query(), to: Search
defdelegate _validate_query(a), to: Search
defdelegate _validate_query(a,b), to: Search
defdelegate _validate_query(a,b,c), to: Search
defdelegate _count(), to: Search
defdelegate _count(a), to: Search
defdelegate _count(a,b), to: Search
defdelegate _count(a,b,c), to: Search
defdelegate _search_exists(), to: Search
defdelegate _search_exists(a), to: Search
defdelegate _search_exists(a,b), to: Search
defdelegate _search_exists(a,b,c), to: Search
defdelegate _search(), to: Search
defdelegate _search(a), to: Search
defdelegate _search(a,b), to: Search
defdelegate _search(a,b,c), to: Search
defdelegate _search_scroll(), to: Search
defdelegate _search_scroll(a), to: Search
defdelegate _search_scroll_all(), to: Search
defdelegate percolator(a,b), to: Search
defdelegate _percolate(a), to: Search
defdelegate _percolate(a,b), to: Search
defdelegate _percolate(a,b,c), to: Search
defdelegate _percolate(a,b,c,d), to: Search
defdelegate _percolate_count(a), to: Search
defdelegate _percolate_count(a,b), to: Search
defdelegate _percolate_count(a,b,c), to: Search
defdelegate _percolate_count(a,b,c,d), to: Search
alias Tirexs.Resources.Indices
## Mapping Management
defdelegate _all_mapping(), to: Indices
defdelegate _mapping(), to: Indices
defdelegate _mapping(a), to: Indices
defdelegate _mapping(a,b), to: Indices
defdelegate _mapping(a,b,c), to: Indices
defdelegate _field_mapping(a), to: Indices
defdelegate _field_mapping(a,b), to: Indices
defdelegate _field_mapping(a,b,c,d), to: Indices
## Index Settings
defdelegate _analyze(), to: Indices
defdelegate _analyze(a), to: Indices
defdelegate _analyze(a,b), to: Indices
defdelegate _analyze(a,b,c), to: Indices
defdelegate _warmer(), to: Indices
defdelegate _warmer(a), to: Indices
defdelegate _warmer(a,b), to: Indices
defdelegate _warmer(a,b,c), to: Indices
defdelegate _template(), to: Indices
defdelegate _template(a), to: Indices
defdelegate _template(a,b), to: Indices
defdelegate _template(a,b,c), to: Indices
defdelegate _settings(), to: Indices
defdelegate _settings(a), to: Indices
defdelegate _settings(a,b), to: Indices
defdelegate _settings(a,b,c), to: Indices
## Index Management
defdelegate _open(), to: Indices
defdelegate _open(a), to: Indices
defdelegate _open(a,b), to: Indices
defdelegate _open(a,b,c), to: Indices
defdelegate _close(), to: Indices
defdelegate _close(a), to: Indices
defdelegate _close(a,b), to: Indices
defdelegate _close(a,b,c), to: Indices
## Alias Management
defdelegate _aliases(), to: Indices
defdelegate _aliases(a), to: Indices
defdelegate _aliases(a,b), to: Indices
defdelegate _aliases(a,b,c), to: Indices
defdelegate _alias(), to: Indices
defdelegate _alias(a), to: Indices
defdelegate _alias(a,b), to: Indices
defdelegate _alias(a,b,c), to: Indices
## Status Management
defdelegate _refresh(), to: Indices
defdelegate _refresh(a), to: Indices
defdelegate _refresh(a,b), to: Indices
defdelegate _refresh(a,b,c), to: Indices
defdelegate _flush(), to: Indices
defdelegate _flush(a), to: Indices
defdelegate _flush(a,b), to: Indices
defdelegate _flush(a,b,c), to: Indices
defdelegate _forcemerge(), to: Indices
defdelegate _forcemerge(a), to: Indices
defdelegate _forcemerge(a,b), to: Indices
defdelegate _forcemerge(a,b,c), to: Indices
defdelegate _upgrade(), to: Indices
defdelegate _upgrade(a), to: Indices
defdelegate _upgrade(a,b), to: Indices
defdelegate _upgrade(a,b,c), to: Indices
defdelegate _cache_clear(), to: Indices
defdelegate _cache_clear(a), to: Indices
defdelegate _cache_clear(a,b,c), to: Indices
## Monitoring Management
defdelegate _stats(), to: Indices
defdelegate _stats(a), to: Indices
defdelegate _stats(a,b), to: Indices
defdelegate _stats(a,b,c), to: Indices
defdelegate _segments(), to: Indices
defdelegate _segments(a), to: Indices
defdelegate _segments(a,b), to: Indices
defdelegate _segments(a,b,c), to: Indices
defdelegate _recovery(), to: Indices
defdelegate _recovery(a), to: Indices
defdelegate _recovery(a,b), to: Indices
defdelegate _recovery(a,b,c), to: Indices
defdelegate _shard_stores(), to: Indices
defdelegate _shard_stores(a), to: Indices
defdelegate _shard_stores(a,b), to: Indices
defdelegate _shard_stores(a,b,c), to: Indices
end
|
lib/tirexs/resources/apis.ex
| 0.762954
| 0.461927
|
apis.ex
|
starcoder
|
defmodule Flop do
@moduledoc """
Flop is a helper library for filtering, ordering and pagination with Ecto.
## Usage
The simplest way of using this library is just to use
`Flop.validate_and_run/3` and `Flop.validate_and_run!/3`. Both functions
take a queryable and a parameter map, validate the parameters, run the query
and return the query results and the meta information.
iex> Flop.Repo.insert_all(Flop.Pet, [
...> %{name: "Harry", age: 4, species: "C. lupus"},
...> %{name: "Maggie", age: 1, species: "O. cuniculus"},
...> %{name: "Patty", age: 2, species: "C. aegagrus"}
...> ])
iex> params = %{order_by: ["name", "age"], page: 1, page_size: 2}
iex> {:ok, {results, meta}} =
...> Flop.validate_and_run(
...> Flop.Pet,
...> params,
...> repo: Flop.Repo
...> )
iex> Enum.map(results, & &1.name)
["Harry", "Maggie"]
iex> meta.total_count
3
iex> meta.total_pages
2
iex> meta.has_next_page?
true
Under the hood, these functions just call `Flop.validate/2` and `Flop.run/3`,
which in turn calls `Flop.all/3` and `Flop.meta/3`. If you need finer control
about if and when to execute each step, you can call those functions directly.
See `Flop.Meta` for descriptions of the meta fields.
## Global configuration
You can set some global options like the default Ecto repo via the application
environment. All global options can be overridden by passing them directly to
the functions or configuring the options for a schema module via
`Flop.Schema`.
import Config
config :flop, repo: MyApp.Repo
See `t:Flop.option/0` for a description of all available options.
## Schema options
You can set some options for a schema by deriving `Flop.Schema`. The options
are evaluated at the validation step.
defmodule Pet do
use Ecto.Schema
@derive {Flop.Schema,
filterable: [:name, :species],
sortable: [:name, :age],
default_limit: 20,
max_limit: 100}
schema "pets" do
field :name, :string
field :age, :integer
field :species, :string
field :social_security_number, :string
end
end
You need to pass the schema to `Flop.validate/2` or any function that
includes the validation step with the `:for` option.
iex> params = %{"order_by" => ["name", "age"], "limit" => 5}
iex> {:ok, flop} = Flop.validate(params, for: Flop.Pet)
iex> flop.limit
5
iex> params = %{"order_by" => ["name", "age"], "limit" => 200}
iex> {:error, changeset} = Flop.validate(params, for: Flop.Pet)
iex> [{:limit, {msg, _}}] = changeset.errors
iex> msg
"must be less than or equal to %{number}"
iex> params = %{"order_by" => ["name", "age"], "limit" => 200}
iex> {:error, changeset} =
...> Flop.validate_and_run(
...> Flop.Pet,
...> params,
...> for: Flop.Pet
...> )
iex> [{:limit, {msg, _}}] = changeset.errors
iex> msg
"must be less than or equal to %{number}"
## Ordering
To add an ordering clause to a query, you need to set the `:order_by` and
optionally the `:order_directions` parameter. `:order_by` should be the list
of fields, while `:order_directions` is a list of `t:Flop.order_direction/0`.
`:order_by` and `:order_directions` are zipped when generating the `ORDER BY`
clause. If no order directions are given, `:asc` is used as default.
iex> params = %{
...> "order_by" => ["name", "age"],
...> "order_directions" => ["asc", "desc"]
...> }
iex> {:ok, flop} = Flop.validate(params)
iex> flop.order_by
[:name, :age]
iex> flop.order_directions
[:asc, :desc]
Flop uses these two fields instead of a keyword list, so that the order
instructions can be easily passed in a query string.
## Pagination
For queries using `OFFSET` and `LIMIT`, you have the choice between
page-based pagination parameters:
%{page: 5, page_size: 20}
and offset-based pagination parameters:
%{offset: 100, limit: 20}
For cursor-based pagination, you can either use `:first`/`:after` or
`:last`/`:before`. You also need to pass the `:order_by` parameter or set a
default order for the schema via `Flop.Schema`.
iex> Flop.Repo.insert_all(Flop.Pet, [
...> %{name: "Harry", age: 4, species: "C. lupus"},
...> %{name: "Maggie", age: 1, species: "O. cuniculus"},
...> %{name: "Patty", age: 2, species: "C. aegagrus"}
...> ])
iex>
iex> # forward (first/after)
iex>
iex> params = %{first: 2, order_by: [:species, :name]}
iex> {:ok, {results, meta}} = Flop.validate_and_run(Flop.Pet, params)
iex> Enum.map(results, & &1.name)
["Patty", "Harry"]
iex> meta.has_next_page?
true
iex> end_cursor = meta.end_cursor
"g3QAAAACZAAEbmFtZW0AAAAFSGFycnlkAAdzcGVjaWVzbQAAAAhDLiBsdXB1cw=="
iex> params = %{first: 2, after: end_cursor, order_by: [:species, :name]}
iex> {:ok, {results, meta}} = Flop.validate_and_run(Flop.Pet, params)
iex> Enum.map(results, & &1.name)
["Maggie"]
iex> meta.has_next_page?
false
iex>
iex> # backward (last/before)
iex>
iex> params = %{last: 2, order_by: [:species, :name]}
iex> {:ok, {results, meta}} = Flop.validate_and_run(Flop.Pet, params)
iex> Enum.map(results, & &1.name)
["Harry", "Maggie"]
iex> meta.has_previous_page?
true
iex> start_cursor = meta.start_cursor
"g3QAAAACZAAEbmFtZW0AAAAFSGFycnlkAAdzcGVjaWVzbQAAAAhDLiBsdXB1cw=="
iex> params = %{last: 2, before: start_cursor, order_by: [:species, :name]}
iex> {:ok, {results, meta}} = Flop.validate_and_run(Flop.Pet, params)
iex> Enum.map(results, & &1.name)
["Patty"]
iex> meta.has_previous_page?
false
By default, it is assumed that the query result is a list of maps or structs.
If your query returns a different data structure, you can pass the
`:get_cursor_value_func` option to retrieve the cursor values. See
`t:Flop.option/0` and `Flop.Cursor` for more information.
You can restrict which pagination types are available. See `t:Flop.option/0`
for details.
## Filters
Filters can be passed as a list of maps. It is recommended to define the
filterable fields for a schema using `Flop.Schema`.
iex> Flop.Repo.insert_all(Flop.Pet, [
...> %{name: "Harry", age: 4, species: "C. lupus"},
...> %{name: "Maggie", age: 1, species: "O. cuniculus"},
...> %{name: "Patty", age: 2, species: "C. aegagrus"}
...> ])
iex>
iex> params = %{filters: [%{field: :name, op: :=~, value: "Mag"}]}
iex> {:ok, {results, meta}} = Flop.validate_and_run(Flop.Pet, params)
iex> meta.total_count
1
iex> [pet] = results
iex> pet.name
"Maggie"
See `t:Flop.Filter.op/0` for a list of all available filter operators.
## GraphQL and Relay
The parameters used for cursor-based pagination follow the Relay
specification, so you can just pass the arguments you get from the client on
to Flop.
`Flop.Relay` can convert the query results returned by
`Flop.validate_and_run/3` into `Edges` and `PageInfo` formats required for
Relay connections.
For example, if you have a context module like this:
defmodule MyApp.Flora
import Ecto.query, warn: false
alias MyApp.Flora.Plant
def list_plants_by_continent(%Continent{} = continent, %{} = args) do
Plant
|> where(continent_id: ^continent.id)
|> Flop.validate_and_run(args, for: Plant)
end
end
Then your Absinthe resolver for the `plants` connection may look something
like this:
def list_plants(args, %{source: %Continent{} = continent}) do
with {:ok, result} <-
Flora.list_plants_by_continent(continent, args) do
{:ok, Flop.Relay.connection_from_result(result)}
end
end
"""
use Ecto.Schema
import Ecto.Changeset
alias Ecto.Changeset
alias Ecto.Query
alias Ecto.Queryable
alias Flop.Cursor
alias Flop.CustomTypes.ExistingAtom
alias Flop.CustomTypes.OrderDirection
alias Flop.Filter
alias Flop.Meta
require Ecto.Query
require Logger
@typedoc """
Options that can be passed to most of the functions or that can be set via
the application environment.
- `:for` - The schema module to be used for validation. `Flop.Schema` must be
derived for the given module. This option is optional and can not be set
globally. If it is not set, schema specific validation will be omitted. Used
by the validation functions and passed on by any function calling a
validation function.
- `:default_limit` - Sets a global default limit for queries that is used if
no default limit is set for a schema and no limit is set in the parameters.
Can only be set in the application configuration.
- `:filtering` (boolean) - Can be set to `false` to silently ignore filter
parameters.
- `:get_cursor_value_func` - 2-arity function used to get the (unencoded)
cursor value from a record. Only used with cursor-based pagination. The
first argument is the record, the second argument is the list of fields used
in the `ORDER BY` clause. Needs to return a map with the order fields as
keys and the the record values of these fields as values. Defaults to
`Flop.Cursor.get_cursor_from_map/2`.
- `:max_limit` - Sets a global maximum limit for queries that is used if no
maximum limit is set for a schema. Can only be set in the application
configuration.
- `:pagination_types` - Defines which pagination types are allowed. Passing
parameters for other pagination types will result in a validation error. By
default, all pagination types are allowed. See also
`t:Flop.pagination_type/0`. Note that an offset value of `0` and a limit
are still accepted even if offset-based pagination is disabled.
- `:ordering` (boolean) - Can be set to `false` to silently ignore order
parameters. Default orders are still applied.
- `:repo` - The Ecto Repo module to use for the database query. Used by all
functions that execute a database query.
All options can be passed directly to the functions. Some of the options can
be set on a schema level via `Flop.Schema`.
All options except `:for` can be set globally via the application environment.
import Config
config :flop,
default_limit: 25,
filtering: false,
get_cursor_value_func: &MyApp.Repo.get_cursor_value/2,
max_limit: 100,
ordering: false,
pagination_types: [:first, :last, :page],
repo: MyApp.Repo
The look up order is:
1. option passed to function
2. option set for schema using `Flop.Schema` (only `:max_limit`,
`:default_limit` and `:pagination_types`)
3. option set in global config (except `:for`)
4. default value (only `:get_cursor_value_func`)
"""
@type option ::
{:for, module}
| {:default_limit, pos_integer}
| {:filtering, boolean}
| {:get_cursor_value_func, (any, [atom] -> map)}
| {:max_limit, pos_integer}
| {:ordering, boolean}
| {:pagination_types, [pagination_type()]}
| {:repo, module}
@typedoc """
Represents the supported order direction values.
"""
@type order_direction ::
:asc
| :asc_nulls_first
| :asc_nulls_last
| :desc
| :desc_nulls_first
| :desc_nulls_last
@typedoc """
Represents the pagination type.
- `:offset` - pagination using the `offset` and `limit` parameters
- `:page` - pagination using the `page` and `page_size` parameters
- `:first` - cursor-based pagination using the `first` and `after` parameters
- `:last` - cursor-based pagination using the `last` and `before` parameters
"""
@type pagination_type :: :offset | :page | :first | :last
@typedoc """
Represents the query parameters for filtering, ordering and pagination.
### Fields
- `after`: Used for cursor-based pagination. Must be used with `first` or a
default limit.
- `before`: Used for cursor-based pagination. Must be used with `last` or a
default limit.
- `limit`, `offset`: Used for offset-based pagination.
- `first` Used for cursor-based pagination. Can be used alone to begin
pagination or with `after`
- `last` Used for cursor-based pagination.
- `page`, `page_size`: Used for offset-based pagination as an alternative to
`offset` and `limit`.
- `order_by`: List of fields to order by. Fields can be restricted by
deriving `Flop.Schema` in your Ecto schema.
- `order_directions`: List of order directions applied to the fields defined
in `order_by`. If empty or the list is shorter than the `order_by` list,
`:asc` will be used as a default for each missing order direction.
- `filters`: List of filters, see `t:Flop.Filter.t/0`.
"""
@type t :: %__MODULE__{
after: String.t() | nil,
before: String.t() | nil,
filters: [Filter.t()] | nil,
first: pos_integer | nil,
last: pos_integer | nil,
limit: pos_integer | nil,
offset: non_neg_integer | nil,
order_by: [atom | String.t()] | nil,
order_directions: [order_direction()] | nil,
page: pos_integer | nil,
page_size: pos_integer | nil
}
@primary_key false
embedded_schema do
field :after, :string
field :before, :string
field :first, :integer
field :last, :integer
field :limit, :integer
field :offset, :integer
field :order_by, {:array, ExistingAtom}
field :order_directions, {:array, OrderDirection}
field :page, :integer
field :page_size, :integer
embeds_many :filters, Filter
end
@doc """
Adds clauses for filtering, ordering and pagination to a
`t:Ecto.Queryable.t/0`.
The parameters are represented by the `t:Flop.t/0` type. Any `nil` values
will be ignored.
## Examples
iex> flop = %Flop{limit: 10, offset: 19}
iex> Flop.query(Flop.Pet, flop)
#Ecto.Query<from p0 in Flop.Pet, limit: ^10, offset: ^19>
Or enhance an already defined query:
iex> require Ecto.Query
iex> flop = %Flop{limit: 10}
iex> Flop.Pet |> Ecto.Query.where(species: "dog") |> Flop.query(flop)
#Ecto.Query<from p0 in Flop.Pet, where: p0.species == \"dog\", limit: ^10>
Note that when using cursor-based pagination, the applied limit will be
`first + 1` or `last + 1`. The extra record is removed by `Flop.run/3`.
"""
@spec query(Queryable.t(), Flop.t()) :: Queryable.t()
def query(q, flop) do
q
|> filter(flop)
|> order_by(flop)
|> paginate(flop)
end
@doc """
Applies the given Flop to the given queryable and returns all matchings
entries.
iex> Flop.all(Flop.Pet, %Flop{}, repo: Flop.Repo)
[]
You can also configure a default repo in your config files:
config :flop, repo: MyApp.Repo
This allows you to omit the third argument:
iex> Flop.all(Flop.Pet, %Flop{})
[]
Note that when using cursor-based pagination, the applied limit will be
`first + 1` or `last + 1`. The extra record is removed by `Flop.run/3`, but
not by this function.
"""
@doc since: "0.6.0"
@spec all(Queryable.t(), Flop.t(), [option()]) :: [any]
def all(q, flop, opts \\ []) do
repo = opts[:repo] || default_repo() || raise no_repo_error("all")
apply(repo, :all, [query(q, flop)])
end
@doc """
Applies the given Flop to the given queryable, retrieves the data and the
meta data.
This function does not validate the given flop parameters. You can validate
the parameters with `Flop.validate/2` or `Flop.validate!/2`, or you can use
`Flop.validate_and_run/3` or `Flop.validate_and_run!/3` instead of this
function.
iex> {data, meta} = Flop.run(Flop.Pet, %Flop{})
iex> data == []
true
iex> match?(%Flop.Meta{}, meta)
true
"""
@doc since: "0.6.0"
@spec run(Queryable.t(), Flop.t(), [option()]) :: {[any], Meta.t()}
def run(q, flop, opts \\ [])
def run(
q,
%Flop{
before: nil,
first: first,
last: nil
} = flop,
opts
)
when is_integer(first) do
results = all(q, flop, opts)
{Enum.take(results, first), meta(results, flop, opts)}
end
def run(
q,
%Flop{
after: nil,
first: nil,
last: last
} = flop,
opts
)
when is_integer(last) do
results = all(q, flop, opts)
page_data =
results
|> Enum.take(last)
|> Enum.reverse()
{page_data, meta(results, flop, opts)}
end
def run(q, flop, opts) do
{all(q, flop, opts), meta(q, flop, opts)}
end
@doc """
Validates the given flop parameters and retrieves the data and meta data on
success.
iex> {:ok, {[], %Flop.Meta{}}} =
...> Flop.validate_and_run(Flop.Pet, %Flop{}, for: Flop.Pet)
iex> {:error, %Ecto.Changeset{} = changeset} =
...> Flop.validate_and_run(Flop.Pet, %Flop{limit: -1})
iex> changeset.errors
[
limit: {"must be greater than %{number}",
[validation: :number, kind: :greater_than, number: 0]}
]
## Options
- `for`: Passed to `Flop.validate/2`.
- `repo`: The `Ecto.Repo` module. Required if no default repo is configured.
- `get_cursor_value_func`: An arity-2 function to be used to retrieve an
unencoded cursor value from a query result item and the `order_by` fields.
Defaults to `Flop.Cursor.get_cursor_from_map/2`.
"""
@doc since: "0.6.0"
@spec validate_and_run(Queryable.t(), map | Flop.t(), [option()]) ::
{:ok, {[any], Meta.t()}} | {:error, Changeset.t()}
def validate_and_run(q, flop, opts \\ []) do
validate_opts = Keyword.take(opts, [:for, :pagination_types])
with {:ok, flop} <- validate(flop, validate_opts) do
{:ok, run(q, flop, opts)}
end
end
@doc """
Same as `Flop.validate_and_run/3`, but raises on error.
"""
@doc since: "0.6.0"
@spec validate_and_run!(Queryable.t(), map | Flop.t(), [option()]) ::
{[any], Meta.t()}
def validate_and_run!(q, flop, opts \\ []) do
validate_opts = Keyword.take(opts, [:for, :pagination_types])
flop = validate!(flop, validate_opts)
run(q, flop, opts)
end
@doc """
Returns the total count of entries matching the filter conditions of the
Flop.
The pagination and ordering option are disregarded.
iex> Flop.count(Flop.Pet, %Flop{}, repo: Flop.Repo)
0
You can also configure a default repo in your config files:
config :flop, repo: MyApp.Repo
This allows you to omit the third argument:
iex> Flop.count(Flop.Pet, %Flop{})
0
"""
@doc since: "0.6.0"
@spec count(Queryable.t(), Flop.t(), [option()]) :: non_neg_integer
def count(q, flop, opts \\ []) do
repo = opts[:repo] || default_repo() || raise no_repo_error("count")
apply(repo, :aggregate, [filter(q, flop), :count])
end
@doc """
Returns meta information for the given query and flop that can be used for
building the pagination links.
iex> Flop.meta(Flop.Pet, %Flop{limit: 10}, repo: Flop.Repo)
%Flop.Meta{
current_offset: 0,
current_page: 1,
end_cursor: nil,
flop: %Flop{limit: 10},
has_next_page?: false,
has_previous_page?: false,
next_offset: nil,
next_page: nil,
page_size: 10,
previous_offset: nil,
previous_page: nil,
start_cursor: nil,
total_count: 0,
total_pages: 0
}
The function returns both the current offset and the current page, regardless
of the pagination type. If the offset lies in between pages, the current page
number is rounded up. This means that it is possible that the values for
`current_page` and `next_page` can be identical. This can only occur if you
use offset/limit based pagination with arbitrary offsets, but in that case,
you will use the `previous_offset`, `current_offset` and `next_offset` values
to render the pagination links anyway, so this shouldn't be a problem.
Unless cursor-based pagination is used, this function will run a query to
figure get the total count of matching records.
"""
@doc since: "0.6.0"
@spec meta(Queryable.t() | [any], Flop.t(), [option()]) :: Meta.t()
def meta(query_or_results, flop, opts \\ [])
def meta(
results,
%Flop{
first: first,
order_by: order_by,
before: nil,
last: nil
} = flop,
opts
)
when is_list(results) and is_integer(first) do
{start_cursor, end_cursor} =
results
|> Enum.take(first)
|> Cursor.get_cursors(order_by, opts)
%Meta{
flop: flop,
start_cursor: start_cursor,
end_cursor: end_cursor,
has_next_page?: length(results) > first,
has_previous_page?: false,
page_size: first
}
end
def meta(
results,
%Flop{
after: nil,
first: nil,
order_by: order_by,
last: last
} = flop,
opts
)
when is_list(results) and is_integer(last) do
{start_cursor, end_cursor} =
results
|> Enum.take(last)
|> Enum.reverse()
|> Cursor.get_cursors(order_by, opts)
%Meta{
flop: flop,
start_cursor: start_cursor,
end_cursor: end_cursor,
has_next_page?: false,
has_previous_page?: length(results) > last,
page_size: last
}
end
def meta(q, flop, opts) do
repo = opts[:repo] || default_repo() || raise no_repo_error("meta")
total_count = count(q, flop, repo: repo)
page_size = flop.page_size || flop.limit
total_pages = get_total_pages(total_count, page_size)
current_offset = get_current_offset(flop)
current_page = get_current_page(flop, total_pages)
{has_previous_page?, previous_offset, previous_page} =
get_previous(current_offset, current_page, page_size)
{has_next_page?, next_offset, next_page} =
get_next(
current_offset,
current_page,
page_size,
total_count,
total_pages
)
%Meta{
current_offset: current_offset,
current_page: current_page,
flop: flop,
has_next_page?: has_next_page?,
has_previous_page?: has_previous_page?,
next_offset: next_offset,
next_page: next_page,
page_size: page_size,
previous_offset: previous_offset,
previous_page: previous_page,
total_count: total_count,
total_pages: total_pages
}
end
defp get_previous(offset, current_page, limit) do
has_previous? = offset > 0
previous_offset = if has_previous?, do: max(0, offset - limit), else: nil
previous_page = if current_page > 1, do: current_page - 1, else: nil
{has_previous?, previous_offset, previous_page}
end
defp get_next(_, _, nil = _page_size, _, _) do
{false, nil, nil}
end
defp get_next(current_offset, _, page_size, total_count, _)
when current_offset + page_size >= total_count do
{false, nil, nil}
end
defp get_next(current_offset, current_page, page_size, _, total_pages) do
{true, current_offset + page_size, min(total_pages, current_page + 1)}
end
defp get_total_pages(0, _), do: 0
defp get_total_pages(_, nil), do: 1
defp get_total_pages(total_count, limit), do: ceil(total_count / limit)
defp get_current_offset(%Flop{offset: nil, page: nil}), do: 0
defp get_current_offset(%Flop{offset: nil, page: page, page_size: page_size}),
do: (page - 1) * page_size
defp get_current_offset(%Flop{offset: offset}), do: offset
defp get_current_page(%Flop{offset: nil, page: nil}, _), do: 1
defp get_current_page(%Flop{offset: nil, page: page}, _), do: page
defp get_current_page(%Flop{limit: limit, offset: offset, page: nil}, total),
do: min(ceil(offset / limit) + 1, total)
## Ordering
@doc """
Applies the `order_by` and `order_directions` parameters of a `t:Flop.t/0`
to an `t:Ecto.Queryable.t/0`.
Used by `Flop.query/2`.
"""
@spec order_by(Queryable.t(), Flop.t()) :: Queryable.t()
def order_by(q, %Flop{order_by: nil}), do: q
# For backwards cursor pagination
def order_by(
q,
%Flop{
last: last,
order_by: fields,
order_directions: directions,
first: nil,
after: nil,
offset: nil
}
)
when is_integer(last) do
reversed_order =
fields
|> prepare_order(directions)
|> reverse_ordering()
Query.order_by(q, ^reversed_order)
end
def order_by(q, %Flop{order_by: fields, order_directions: directions}) do
Query.order_by(q, ^prepare_order(fields, directions))
end
@spec prepare_order([atom], [order_direction()]) :: [
{order_direction(), atom}
]
defp prepare_order(fields, directions) do
directions = directions || []
field_count = length(fields)
direction_count = length(directions)
directions =
if direction_count < field_count,
do: directions ++ List.duplicate(:asc, field_count - direction_count),
else: directions
Enum.zip(directions, fields)
end
## Pagination
@doc """
Applies the pagination parameters of a `t:Flop.t/0` to an
`t:Ecto.Queryable.t/0`.
The function supports both `offset`/`limit` based pagination and
`page`/`page_size` based pagination.
If you validated the `t:Flop.t/0` with `Flop.validate/1` before, you can be
sure that the given `t:Flop.t/0` only has pagination parameters set for one
pagination method. If you pass an unvalidated `t:Flop.t/0` that has
pagination parameters set for multiple pagination methods, this function
will arbitrarily only apply one of the pagination methods.
Used by `Flop.query/2`.
"""
@spec paginate(Queryable.t(), Flop.t()) :: Queryable.t()
def paginate(q, %Flop{limit: limit, offset: offset})
when (is_integer(limit) and limit >= 1) or
(is_integer(offset) and offset >= 0) do
q
|> limit(limit)
|> offset(offset)
end
def paginate(q, %Flop{page: page, page_size: page_size})
when is_integer(page) and is_integer(page_size) and
page >= 1 and page_size >= 1 do
q
|> limit(page_size)
|> offset((page - 1) * page_size)
end
def paginate(q, %Flop{
first: first,
after: nil,
before: nil,
last: nil,
limit: nil
})
when is_integer(first),
do: limit(q, first + 1)
def paginate(
q,
%Flop{
first: first,
after: after_,
order_by: order_by,
order_directions: order_directions,
before: nil,
last: nil,
limit: nil
}
)
when is_integer(first) do
orderings = prepare_order(order_by, order_directions)
q
|> apply_cursor(after_, orderings)
|> limit(first + 1)
end
def paginate(
q,
%Flop{
last: last,
before: before,
order_by: order_by,
order_directions: order_directions,
first: nil,
after: nil,
limit: nil
}
)
when is_integer(last) do
prepared_order_reversed =
order_by
|> prepare_order(order_directions)
|> reverse_ordering()
q
|> apply_cursor(before, prepared_order_reversed)
|> limit(last + 1)
end
def paginate(q, _), do: q
## Offset/limit pagination
@spec limit(Queryable.t(), pos_integer | nil) :: Queryable.t()
defp limit(q, nil), do: q
defp limit(q, limit), do: Query.limit(q, ^limit)
@spec offset(Queryable.t(), non_neg_integer | nil) :: Queryable.t()
defp offset(q, nil), do: q
defp offset(q, offset), do: Query.offset(q, ^offset)
## Cursor pagination helpers
@spec apply_cursor(Queryable.t(), map() | nil, [order_direction()]) ::
Queryable.t()
defp apply_cursor(q, nil, _), do: q
defp apply_cursor(q, cursor, ordering) do
cursor = Cursor.decode!(cursor)
where_dynamic = cursor_dynamic(ordering, cursor)
Query.where(q, ^where_dynamic)
end
defp cursor_dynamic([], _), do: nil
defp cursor_dynamic([{direction, field}], cursor) do
case direction do
dir when dir in [:asc, :asc_nulls_first, :asc_nulls_last] ->
Query.dynamic([r], field(r, ^field) > ^cursor[field])
dir when dir in [:desc, :desc_nulls_first, :desc_nulls_last] ->
Query.dynamic([r], field(r, ^field) < ^cursor[field])
end
end
defp cursor_dynamic([{direction, field} | [{_, _} | _] = tail], cursor) do
field_cursor = cursor[field]
case direction do
dir when dir in [:asc, :asc_nulls_first, :asc_nulls_last] ->
Query.dynamic(
[r],
field(r, ^field) >= ^field_cursor and
(field(r, ^field) > ^field_cursor or ^cursor_dynamic(tail, cursor))
)
dir when dir in [:desc, :desc_nulls_first, :desc_nulls_last] ->
Query.dynamic(
[r],
field(r, ^field) <= ^field_cursor and
(field(r, ^field) < ^field_cursor or ^cursor_dynamic(tail, cursor))
)
end
end
@spec reverse_ordering([order_direction()]) :: [order_direction()]
defp reverse_ordering(order_directions) do
Enum.map(order_directions, fn
{:desc, field} -> {:asc, field}
{:desc_nulls_last, field} -> {:asc_nulls_first, field}
{:desc_nulls_first, field} -> {:asc_nulls_last, field}
{:asc, field} -> {:desc, field}
{:asc_nulls_last, field} -> {:desc_nulls_first, field}
{:asc_nulls_first, field} -> {:desc_nulls_last, field}
end)
end
## Filter
@doc """
Applies the `filter` parameter of a `t:Flop.t/0` to an `t:Ecto.Queryable.t/0`.
Used by `Flop.query/2`.
"""
@spec filter(Queryable.t(), Flop.t()) :: Queryable.t()
def filter(q, %Flop{filters: nil}), do: q
def filter(q, %Flop{filters: []}), do: q
def filter(q, %Flop{filters: filters}) when is_list(filters) do
Enum.reduce(filters, q, &filter(&2, &1))
end
def filter(_, %Filter{field: field, op: _, value: value})
when is_nil(field) or is_nil(value) do
raise ArgumentError
end
def filter(q, %Filter{field: field, op: :==, value: value}),
do: Query.where(q, ^[{field, value}])
def filter(q, %Filter{field: field, op: :!=, value: value}),
do: Query.where(q, [r], field(r, ^field) != ^value)
def filter(q, %Filter{field: field, op: :=~, value: value}) do
query_value = "%#{value}%"
Query.where(q, [r], ilike(field(r, ^field), ^query_value))
end
def filter(q, %Filter{field: field, op: :>=, value: value}),
do: Query.where(q, [r], field(r, ^field) >= ^value)
def filter(q, %Filter{field: field, op: :<=, value: value}),
do: Query.where(q, [r], field(r, ^field) <= ^value)
def filter(q, %Filter{field: field, op: :>, value: value}),
do: Query.where(q, [r], field(r, ^field) > ^value)
def filter(q, %Filter{field: field, op: :<, value: value}),
do: Query.where(q, [r], field(r, ^field) < ^value)
def filter(q, %Filter{field: field, op: :in, value: value}),
do: Query.where(q, [r], field(r, ^field) in ^value)
def filter(q, %Filter{field: field, op: :like, value: value}) do
query_value = "%#{value}%"
Query.where(q, [r], like(field(r, ^field), ^query_value))
end
def filter(q, %Filter{field: field, op: :like_and, value: value}) do
query_values = split_search_text(value)
dynamic =
Enum.reduce(query_values, Query.dynamic(true), fn value, dynamic ->
Query.dynamic([r], ^dynamic and like(field(r, ^field), ^value))
end)
Query.where(q, [r], ^dynamic)
end
def filter(q, %Filter{field: field, op: :like_or, value: value}) do
query_values = split_search_text(value)
dynamic =
Enum.reduce(query_values, Query.dynamic(false), fn value, dynamic ->
Query.dynamic([r], ^dynamic or like(field(r, ^field), ^value))
end)
Query.where(q, [r], ^dynamic)
end
def filter(q, %Filter{field: field, op: :ilike, value: value}) do
query_value = "%#{value}%"
Query.where(q, [r], ilike(field(r, ^field), ^query_value))
end
def filter(q, %Filter{field: field, op: :ilike_and, value: value}) do
query_values = split_search_text(value)
dynamic =
Enum.reduce(query_values, Query.dynamic(true), fn value, dynamic ->
Query.dynamic([r], ^dynamic and ilike(field(r, ^field), ^value))
end)
Query.where(q, [r], ^dynamic)
end
def filter(q, %Filter{field: field, op: :ilike_or, value: value}) do
query_values = split_search_text(value)
dynamic =
Enum.reduce(query_values, Query.dynamic(false), fn value, dynamic ->
Query.dynamic([r], ^dynamic or ilike(field(r, ^field), ^value))
end)
Query.where(q, [r], ^dynamic)
end
defp split_search_text(text) do
text |> String.split() |> Enum.map(&"%#{&1}%")
end
## Validation
@doc """
Validates a `t:Flop.t/0`.
## Examples
iex> params = %{"limit" => 10, "offset" => 0, "texture" => "fluffy"}
iex> Flop.validate(params)
{:ok,
%Flop{
filters: [],
limit: 10,
offset: 0,
order_by: nil,
order_directions: nil,
page: nil,
page_size: nil
}}
iex> flop = %Flop{offset: -1}
iex> {:error, changeset} = Flop.validate(flop)
iex> changeset.valid?
false
iex> changeset.errors
[
offset: {"must be greater than or equal to %{number}",
[validation: :number, kind: :greater_than_or_equal_to, number: 0]}
]
It also makes sure that only one pagination method is used.
iex> params = %{limit: 10, offset: 0, page: 5, page_size: 10}
iex> {:error, changeset} = Flop.validate(params)
iex> changeset.valid?
false
iex> changeset.errors
[limit: {"cannot combine multiple pagination types", []}]
If you derived `Flop.Schema` in your Ecto schema to define the filterable
and sortable fields, you can pass the module name to the function to validate
that only allowed fields are used. The function will also apply any default
values set for the schema.
iex> params = %{"order_by" => ["species"]}
iex> {:error, changeset} = Flop.validate(params, for: Flop.Pet)
iex> changeset.valid?
false
iex> [order_by: {msg, [_, {_, enum}]}] = changeset.errors
iex> msg
"has an invalid entry"
iex> enum
[:name, :age]
Note that currently, trying to use an existing field that is not allowed as
seen above will result in the error message `has an invalid entry`, while
trying to use a field name that does not exist in the schema (or more
precisely: a field name that doesn't exist as an atom) will result in
the error message `is invalid`. This might change in the future.
"""
@spec validate(Flop.t() | map, [option()]) ::
{:ok, Flop.t()} | {:error, Changeset.t()}
def validate(flop, opts \\ [])
def validate(%Flop{} = flop, opts) do
flop
|> Map.from_struct()
|> validate(opts)
end
def validate(%{} = params, opts) do
result =
params
|> Flop.Validation.changeset(opts)
|> apply_action(:replace)
case result do
{:ok, _} = r ->
r
{:error, %Changeset{} = changeset} = r ->
Logger.debug("Invalid Flop: #{inspect(changeset)}")
r
end
end
@doc """
Same as `Flop.validate/2`, but raises an `Ecto.InvalidChangesetError` if the
parameters are invalid.
"""
@doc since: "0.5.0"
@spec validate!(Flop.t() | map, [option()]) :: Flop.t()
def validate!(flop, opts \\ []) do
case validate(flop, opts) do
{:ok, flop} ->
flop
{:error, changeset} ->
raise Ecto.InvalidChangesetError, action: :replace, changeset: changeset
end
end
@doc """
Updates the `order_by` and `order_directions` values of a `Flop` struct.
- If the field is not in the current `order_by` value, it will be prepended to
the list. The order direction for the field will be set to `:asc`.
- If the field is already at the front of the `order_by` list, the order
direction will be reversed.
- If the field is already in the list, but not at the front, it will be moved
to the front and the order direction will be set to `:asc`.
## Example
iex> flop = push_order(%Flop{}, :name)
iex> flop.order_by
[:name]
iex> flop.order_directions
[:asc]
iex> flop = push_order(flop, :age)
iex> flop.order_by
[:age, :name]
iex> flop.order_directions
[:asc, :asc]
iex> flop = push_order(flop, :age)
iex> flop.order_by
[:age, :name]
iex> flop.order_directions
[:desc, :asc]
iex> flop = push_order(flop, :species)
iex> flop.order_by
[:species, :age, :name]
iex> flop.order_directions
[:asc, :desc, :asc]
iex> flop = push_order(flop, :age)
iex> flop.order_by
[:age, :species, :name]
iex> flop.order_directions
[:asc, :asc, :asc]
"""
@spec push_order(Flop.t(), atom) :: Flop.t()
@doc since: "0.10.0"
def push_order(
%Flop{order_by: order_by, order_directions: order_directions} = flop,
field
)
when is_atom(field) do
previous_index = get_index(order_by, field)
previous_direction = get_order_direction(order_directions, previous_index)
new_direction = new_order_direction(previous_index, previous_direction)
{order_by, order_directions} =
get_new_order(
order_by,
order_directions,
field,
new_direction,
previous_index
)
%{flop | order_by: order_by, order_directions: order_directions}
end
defp get_index(nil, _field), do: nil
defp get_index(order_by, field), do: Enum.find_index(order_by, &(&1 == field))
defp get_order_direction(_, nil), do: nil
defp get_order_direction(nil, _), do: :asc
defp get_order_direction(directions, index), do: Enum.at(directions, index)
defp new_order_direction(0, :asc), do: :desc
defp new_order_direction(0, :asc_nulls_first), do: :desc_nulls_last
defp new_order_direction(0, :asc_nulls_last), do: :desc_nulls_first
defp new_order_direction(0, :desc), do: :asc
defp new_order_direction(0, :desc_nulls_first), do: :asc_nulls_last
defp new_order_direction(0, :desc_nulls_last), do: :asc_nulls_first
defp new_order_direction(_, _), do: :asc
defp get_new_order(
order_by,
order_directions,
field,
new_direction,
previous_index
) do
{order_by, order_directions} =
if previous_index do
{List.delete_at(order_by, previous_index),
List.delete_at(order_directions, previous_index)}
else
{order_by, order_directions}
end
{[field | order_by || []], [new_direction | order_directions || []]}
end
defp default_repo, do: Application.get_env(:flop, :repo)
# coveralls-ignore-start
defp no_repo_error(function_name),
do: """
No repo specified. You can specify the repo either by passing it
explicitly:
Flop.#{function_name}(MyApp.Item, %Flop{}, repo: MyApp.Repo)
Or you can configure a default repo in your config:
config :flop, repo: MyApp.Repo
"""
# coveralls-ignore-end
end
|
lib/flop.ex
| 0.821223
| 0.572962
|
flop.ex
|
starcoder
|
defmodule HTTPRange.ParseError do
@type t :: %__MODULE__{part: atom, subject: binary | nil, value: binary}
defexception [:part, :subject, :value]
def message(%{part: :ranges, subject: nil, value: value}) do
"expected a range set to be defined in #{inspect(value)}"
end
def message(%{part: :ranges, subject: subject, value: value}) do
"invalid range specification #{inspect(subject)} in #{inspect(value)}"
end
def message(%{part: :unit, subject: nil, value: value}) do
"expected a unit to be defined in #{inspect(value)}"
end
def message(%{value: value}) do
"invalid header value #{inspect(value)}"
end
end
defmodule HTTPRange.Parser do
alias HTTPRange.{ParseError, RangeSpec, Range}
@type range :: Range.t()
@spec parse(binary) :: {:ok, RangeSpec.t()} | {:error, ParseError.t()}
def parse(input) when is_binary(input) do
case String.split(input, "=") do
[_, ""] ->
{:error, %ParseError{part: :ranges, subject: nil, value: input}}
["", _] ->
{:error, %ParseError{part: :unit, subject: nil, value: input}}
[unit, ranges] ->
ranges
|> String.split(",")
|> parse_ranges(%RangeSpec{unit: unit})
|> case do
{:ok, range} ->
{:ok, range}
{:error, range} ->
{:error, %ParseError{part: :ranges, subject: range, value: input}}
end
end
end
@spec parse_ranges([binary], RangeSpec.t()) :: {:ok, RangeSpec.t()} | {:error, binary}
defp parse_ranges([], %RangeSpec{} = header), do: {:ok, header}
defp parse_ranges([h | t], %RangeSpec{} = spec) do
with {:ok, range} <- parse_range(h) do
parse_ranges(t, RangeSpec.add_range(spec, range))
end
end
@spec parse_range(binary) :: {:ok, range} | {:error, binary}
defp parse_range(candidate) do
case String.split(candidate, "-") do
["", length] ->
case parse_fragment(length) do
{:ok, parsed} -> {:ok, %Range{first: -parsed}}
:error -> {:error, candidate}
end
[first, ""] ->
case parse_fragment(first) do
{:ok, parsed} -> {:ok, %Range{first: parsed}}
:error -> {:error, candidate}
end
[first, last] ->
with {:ok, parseda} <- parse_fragment(first),
{:ok, parsedb} <- parse_fragment(last) do
{:ok, %Range{first: parseda, last: parsedb}}
else
:error -> {:error, candidate}
end
_ ->
# Returning just the argument because it's not enough to construct a
# ParseError struct, the header value is needed.
{:error, candidate}
end
end
@spec parse_fragment(binary) :: {:ok, integer} | :error
defp parse_fragment(candidate) do
case Integer.parse(candidate) do
{parsed, ""} -> {:ok, parsed}
_ -> :error
end
end
end
|
lib/http_range_parser/parser.ex
| 0.87079
| 0.63114
|
parser.ex
|
starcoder
|
defmodule Bf2nasm.Optimizer.PassTwo do
def pattern(ast) do
pattern([], ast)
end
def pattern(processed, []) do
processed
end
def pattern(processed, [inner | tail]) when is_list(inner) do
cond do
is_closed(inner)->
new_inner = optimize_closed(inner)
pattern( processed, new_inner ++ tail)
true ->
new_inner = pattern(inner)
pattern( processed ++ [ new_inner ], tail)
end
end
def pattern(processed, [head | tail]) do
pattern(processed ++ [ head ], tail)
end
defp is_closed(inner) do
is_closed(inner, 0, 0)
end
def is_closed([{:inc, n, _pos}|tail], 0, inc) do
is_closed(tail, 0, inc+n)
end
def is_closed([{:inc, _n, _pos}|tail], pointer_pos, inc) do
is_closed(tail, pointer_pos, inc)
end
def is_closed([{:incptr, n, _pos}|tail], pointer_pos, inc) do
is_closed(tail, pointer_pos+n, inc)
end
def is_closed([], 0, -1) do
true
end
def is_closed(_, _, _) do
false
end
def optimize_closed(inner) do
env = Map.new()
optimize_closed(inner, env, 0)
end
def optimize_closed(list = [{_cmd, _args, pos} | _], env, offset) do
optimize_closed(list, env, offset, pos)
end
def optimize_closed([{:inc, n, _pos} | tail], env, offset, pos) do
delta = Map.get(env, offset, 0)
env = Map.put(env, offset, delta+n)
optimize_closed(tail, env, offset, pos)
end
def optimize_closed([{:incptr, n, _pos} | tail], env, offset, pos) do
optimize_closed(tail, env, offset+n, pos)
end
def optimize_closed([], env = %{0 => -1}, 0, pos) do
(env
|> Map.to_list()
|> Enum.with_index()
|> Enum.flat_map(fn x ->
env_to_instr(x, pos)
end)
)++ [{:set, 0, pos}]
end
defp env_to_instr({{0, -1}, _}, _pos) do
[]
end
defp env_to_instr({{offset, 1}, _}, pos) do
[{:add_value_to, offset, pos}]
end
defp env_to_instr({{offset, -1}, _}, pos) do
[{:sub_value_to, offset, pos}]
end
defp env_to_instr({{offset, n}, i}, pos) do
[{:add_multiple_of_value_to, {offset, n, rem(i,2)}, pos}]
end
end
|
lib/optimizer/pass_two.ex
| 0.509276
| 0.615926
|
pass_two.ex
|
starcoder
|
defmodule ExWareki.Number do
@moduledoc """
Number module provides handlers of Japanese-formatted-number (kansuji) string.
"""
@doc """
parse/1 converts kansuji(kanji-number) into arabic numbers
## Examples
iex> ExWareki.Number.parse(1234567890)
{:ok, 1234567890}
iex> ExWareki.Number.parse(012)
{:ok, 12}
"""
def parse(n) when is_number(n), do: {:ok, n}
@doc """
parse/1 converts kansuji(kanji-number) into arabic numbers
## Examples
iex> ExWareki.Number.parse("1234567890")
{:ok, 1234567890}
iex> ExWareki.Number.parse("012")
{:ok, 012}
"""
def parse(str_n) do
case String.match?(str_n, ~r/[0-9]+/) do
true ->
{:ok, String.to_integer(str_n)}
false ->
kansuji2arabic(str_n)
end
end
def parse!(v) do
case parse(v) do
{:ok, r} -> r
{:error, _} -> raise "cannot parse value to number: #{v}"
end
end
def kansuji2arabic("零"), do: {:ok, 0}
def kansuji2arabic("元"), do: {:ok, 1}
def kansuji2arabic("一"), do: {:ok, 1}
def kansuji2arabic("壱"), do: {:ok, 1}
def kansuji2arabic("二"), do: {:ok, 2}
def kansuji2arabic("弐"), do: {:ok, 2}
def kansuji2arabic("三"), do: {:ok, 3}
def kansuji2arabic("参"), do: {:ok, 3}
def kansuji2arabic("四"), do: {:ok, 4}
def kansuji2arabic("五"), do: {:ok, 5}
def kansuji2arabic("六"), do: {:ok, 6}
def kansuji2arabic("七"), do: {:ok, 7}
def kansuji2arabic("八"), do: {:ok, 8}
def kansuji2arabic("九"), do: {:ok, 9}
def kansuji2arabic("十"), do: {:ok, 10}
def kansuji2arabic("拾"), do: {:ok, 10}
def kansuji2arabic("百"), do: {:ok, 100}
def kansuji2arabic("千"), do: {:ok, 1000}
def kansuji2arabic("万"), do: {:ok, 10000}
def kansuji2arabic("萬"), do: {:ok, 10000}
def kansuji2arabic("億"), do: {:ok, 100000000}
@doc """
kansuji2arabic/1 converts kansuji(kanji-number) into arabic numbers
## Examples
iex> ExWareki.Number.kansuji2arabic("五億四千六百三十一万九千八百二十一")
{:ok, 546319821}
iex> ExWareki.Number.kansuji2arabic("十三")
{:ok, 13}
iex> ExWareki.Number.kansuji2arabic("二千十九")
{:ok, 2019}
iex> ExWareki.Number.kansuji2arabic("十万百十")
{:ok, 100110}
"""
def kansuji2arabic(kansuji) do
n =
String.codepoints(kansuji)
|> Enum.map(fn x -> kansuji2arabic!(x) end)
|> reduce_numbers()
{:ok, n}
end
defp reduce_numbers(numbers) do
numbers
|> Enum.reduce(%{stack: [], sum: 0, cont_unit: false}, fn (v, acc) -> reduce_base(v, acc) end)
|> sum_up()
end
defp sum_up(%{stack: stack, sum: sum}) do
ds = stack |> Enum.sum
sum + ds
end
defp reduce_base(v, %{stack: stack, sum: sum, cont_unit: cont_unit}) do
cond do
v >= 10000 ->
ds = stack |> Enum.sum()
%{stack: [], sum: sum + ds * v, cont_unit: false}
v >= 10 ->
case length(stack) do
0 ->
%{stack: [v], sum: sum, cont_unit: true}
_ ->
[head | tail] = stack
case cont_unit do
true ->
%{stack: [v + head | tail], sum: sum, cont_unit: true}
false ->
%{stack: [head * v] ++ tail, sum: sum, cont_unit: true}
end
end
true ->
%{stack: [v] ++ stack, sum: sum, cont_unit: false}
end
end
@doc """
kansuji2arabic!/1 converts kansuji(kanji-number) into arabic numbers
## Examples
iex> ExWareki.Number.kansuji2arabic!("五億四千六百三十一万九千八百二十一")
546319821
"""
def kansuji2arabic!(kansuji) do
case kansuji2arabic(kansuji) do
{:ok, n} -> n
{:error, _} -> raise "cannot convert kansuji(kanji-number) into arabic numbers"
end
end
end
|
lib/ex_wareki/number.ex
| 0.734405
| 0.474631
|
number.ex
|
starcoder
|
defmodule AshPhoenix.LiveView do
@moduledoc """
Utilities for keeping ash query results up to date in a live view.
"""
@type socket :: term
@type assign :: atom
@type assigns :: map
@type topic :: String.t()
@type liveness_options :: Keyword.t()
require Logger
@opts [
subscribe: [
type: {:custom, __MODULE__, :subscriptions, []},
doc: "A topic or list of topics that should cause this data to update."
],
refetch?: [
type: :boolean,
doc: "A boolean flag indicating whether a refetch is allowed to happen. Defaults to `true`"
],
results: [
type: {:in, [:keep, :lose]},
doc:
"For list and page queries, by default the records shown are never changed (unless the page changes)",
default: :keep
],
load_until_connected?: [
type: :boolean,
doc:
"If the socket is not connected, then the value of the provided assign is set to `:loading`. Has no effect if `initial` is provided."
],
initial: [
type: :any,
doc: "Results to use instead of running the query immediately."
],
api: [
type: :atom,
doc:
"For paginated views, in order to use `change_page/3`, you must set the Api to use for pagination"
],
refetch_interval: [
type: :non_neg_integer,
doc: "An interval (in ms) to periodically refetch the query"
],
refetch_window: [
type: :non_neg_integer,
doc:
"The minimum time (in ms) between refetches, including refetches caused by notifications."
]
]
@doc false
def subscriptions(subscription) when is_binary(subscription), do: {:ok, subscription}
def subscriptions(subscriptions) do
if is_list(subscriptions) and Enum.all?(subscriptions, &is_binary/1) do
{:ok, subscriptions}
else
{:error, "expected subscriptions to be a list of strings, got: #{inspect(subscriptions)}"}
end
end
@doc """
Runs the callback, and stores the information required to keep it live in the socket assigns.
The data will be assigned to the provided key, e.g `keep_live(socket, :me, ...)` would assign the results
to `:me` (accessed as `@me` in the template).
Additionally, you'll need to define a `handle_info/2` callback for your liveview to receive any
notifications, and pass that notification into `handle_live/3`. See `handle_live/3` for more.
## Pagination
To make paginated views convenient, as well as making it possible to keep those views live, Ash does not
simply rerun the query when it gets an update, as that could involve shuffling the records around on the
page. Eventually this will be configurable, but for now, Ash simply adjusts the query to only include the
records that are on the page. If a record would be removed from a page due to a data change, it will simply
be left there. For the best performance, use `keyset` pagination. If you *need* the ability to jump to a
page by number, you'll want to use `offset` pagination, but keep in mind that it performs worse on large
tables.
To support this, accept a second parameter to your callback function, which will be the options to use in `page_opts`
## Options:
#{NimbleOptions.docs(@opts)}
A great way to get readable millisecond values, you can use the functions in erlang's `:timer` module,
like `:timer.hours/1`, `:timer.minutes/1`, and `:timer.seconds/1`
#### refetch_interval
If this option is set, a message is sent as `{:refetch, assign_name, opts}` on that interval.
You can then match on that event, like so:
```
def handle_info({:refetch, assign, opts}, socket) do
{:noreply, handle_live(socket, :refetch, assign, opts)}
end
```
This is the equivalent of `:timer.send_interval(interval, {:refetch, assign, opts})`, so feel free to
roll your own solution if you have complex refetching requirements.
#### refetch_window
Normally, when a pubsub message is received the query is rerun. This option will cause the query to wait at least
this amount of time before doing a refetch. This is accomplished with `Process.send_after/4`, and recording the
last time each query was refetched. For example if a refetch happens at time `0`, and the `refetch_window` is
10,000 ms, we would refetch, and record the time. Then if another refetch should happen 5,000 ms later, we would
look and see that we need to wait another 5,000ms. So we use `Process.send_after/4` to send a
`{:refetch, assign, opts}` message in 5,000ms. The time that a refetch was requested is tracked, so if the
data has since been refetched, it won't be refetched again.
#### Future Plans
One interesting thing here is that, given that we know the scope of data that a resource cares about,
we should be able to make optimizations to this code, to support partial refetches, or even just updating
the data directly. However, this will need to be carefully considered, as the risks involve showing users
data they could be unauthorized to see, or having state in the socket that is inconsistent.
"""
require Ash.Query
@type callback_result :: struct() | list(struct()) | Ash.Page.page() | nil
@type callback :: (socket -> callback_result) | (socket, Keyword.t() | nil -> callback_result)
@spec keep_live(socket, assign, callback, liveness_options) :: socket
def keep_live(socket, assign, callback, opts \\ []) do
opts = NimbleOptions.validate!(opts, @opts)
if opts[:load_until_connected?] && match?(%Phoenix.LiveView.Socket{}, socket) &&
!Phoenix.LiveView.connected?(socket) do
assign(socket, assign, :loading)
else
if opts[:refetch_interval] do
:timer.send_interval(opts[:refetch_interval], {:refetch, assign, []})
end
case socket do
%Phoenix.LiveView.Socket{} ->
if Phoenix.LiveView.connected?(socket) do
for topic <- List.wrap(opts[:subscribe]) do
(opts[:pub_sub] || socket.endpoint).subscribe(topic)
end
end
_ ->
for topic <- List.wrap(opts[:subscribe]) do
(opts[:pub_sub] || socket.endpoint).subscribe(topic)
end
end
live_config = Map.get(socket.assigns, :ash_live_config, %{})
result =
case Keyword.fetch(opts, :initial) do
{:ok, result} ->
mark_page_as_first(result)
:error ->
callback
|> run_callback(socket, nil)
|> mark_page_as_first()
end
this_config = %{
last_fetched_at: System.monotonic_time(:millisecond),
callback: callback,
opts: opts
}
socket
|> assign(assign, result)
|> assign(:ash_live_config, Map.put(live_config, assign, this_config))
end
end
def change_page(socket, assign, target) do
live_config = socket.assigns.ash_live_config
config = Map.get(live_config, assign)
target =
if target in ["prev", "next", "first", "last"] do
String.to_existing_atom(target)
else
case Integer.parse(target) do
{int, ""} ->
int
_ ->
target
end
end
current_page =
case Map.get(socket.assigns, assign) do
{:ok, data} -> data
data -> data
end
unless config.opts[:api] do
raise "Must set api to use change_page/3"
end
new_result = config.opts[:api].page!(current_page, target)
{_query, rerun_opts} = new_result.rerun
new_page_opts = Keyword.merge(config.opts[:page] || [], rerun_opts[:page])
new_opts = Keyword.put(config.opts, :page, new_page_opts)
new_live_config = Map.update!(live_config, assign, &Map.put(&1, :opts, new_opts))
socket
|> assign(assign, new_result)
|> assign(:ash_live_config, new_live_config)
end
def page_from_params(params, default_limit, count? \\ false) do
params = params || %{}
params
|> Map.take(["after", "before", "limit", "offset"])
|> Enum.reject(fn {_, val} -> is_nil(val) || val == "" end)
|> Enum.map(fn {key, value} -> {String.to_existing_atom(key), value} end)
|> Enum.map(fn {key, value} ->
case Integer.parse(value) do
{int, ""} ->
{key, int}
_ ->
{key, value}
end
end)
|> Keyword.put_new(:limit, default_limit)
|> Keyword.put(:count, count? || params["count"] == "true")
end
def page_params(%Ash.Page.Keyset{} = keyset) do
cond do
keyset.after ->
[after: keyset.after]
keyset.before ->
[before: keyset.before]
true ->
[]
end
|> set_count(keyset)
end
def page_params(%Ash.Page.Offset{} = offset) do
if offset.offset do
[limit: offset.limit, offset: offset.offset]
else
[limit: offset.limit]
end
|> set_count(offset)
end
defp set_count(params, %{count: count}) when not is_nil(count) do
Keyword.put(params, :count, true)
end
defp set_count(params, _), do: params
def prev_page?(page) do
page_link_params(page, "prev") != :invalid
end
def next_page?(page) do
page_link_params(page, "next") != :invalid
end
def page_link_params(_, "first") do
[]
end
def page_link_params(%{__first__?: true}, "prev"), do: :invalid
def page_link_params(%Ash.Page.Offset{more?: false}, "next"), do: :invalid
def page_link_params(%Ash.Page.Keyset{more?: false, after: nil, before: before}, "prev")
when not is_nil(before) do
:invalid
end
def page_link_params(%Ash.Page.Keyset{more?: false, after: after_keyset, before: nil}, "next")
when not is_nil(after_keyset) do
:invalid
end
def page_link_params(%Ash.Page.Offset{} = offset, "next") do
cond do
offset.count && offset.offset + offset.limit >= offset.count ->
:invalid
Enum.count(offset.results) < offset.limit ->
:invalid
true ->
[limit: offset.limit, offset: (offset.offset || 0) + offset.limit]
end
end
def page_link_params(%Ash.Page.Offset{offset: 0}, "prev") do
:invalid
end
def page_link_params(%Ash.Page.Offset{} = offset, "prev") do
[limit: offset.limit, offset: max((offset.offset || 0) - offset.limit, 0)]
end
def page_link_params(%Ash.Page.Offset{count: count} = offset, "last") when not is_nil(count) do
[offset: count - offset.limit, limit: offset.limit]
end
def page_link_params(%Ash.Page.Keyset{results: [], after: after_keyset} = keyset, "prev") do
[before: after_keyset, limit: keyset.limit]
end
def page_link_params(%Ash.Page.Keyset{results: [], before: before_keyset} = keyset, "next") do
[after: before_keyset, limit: keyset.limit]
end
def page_link_params(%Ash.Page.Keyset{results: [first | _]} = keyset, "prev") do
[before: first.__metadata__.keyset, limit: keyset.limit]
end
def page_link_params(%Ash.Page.Keyset{results: results} = keyset, "next") do
[after: List.last(results).__metadata__.keyset, limit: keyset.limit]
end
def page_link_params(%Ash.Page.Offset{count: count, limit: limit} = offset, target)
when not is_nil(count) and is_integer(target) do
target = max(target, 1)
last_page = last_page(offset)
target = min(target, last_page)
[offset: (target - 1) * limit, limit: limit]
end
def page_link_params({:ok, data}, target) do
page_link_params(data, target)
end
def page_link_params(_page, _target) do
:invalid
end
def can_link_to_page?(page, target) do
page_link_params(page, target) != :invalid
end
def last_page(%Ash.Page.Offset{count: count, limit: limit}) when is_integer(count) do
if rem(count, limit) == 0 do
div(count, limit)
else
div(count, limit) + 1
end
end
def last_page(_), do: :unknown
def on_page?(page, num) do
page_number(page) == num
end
def page_number(%{offset: offset, limit: limit}) do
if rem(offset, limit) == 0 do
div(offset, limit)
else
div(offset, limit) + 1
end
end
def page_number(_), do: false
@doc """
Incorporates an `Ash.Notifier.Notification` into the query results, based on the liveness configuration.
You will want to match on receiving a notification from Ash, and the easiest way to do that is to match
on the payload like so:
```
@impl true
def handle_info(%{topic: topic, payload: %Ash.Notifier.Notification{}}, socket) do
{:noreply, handle_live(socket, topic, [:query1, :query2, :query3])}
end
```
Feel free to intercept notifications and do your own logic to respond to events. Ultimately, all
that matters is that you also call `handle_live/3` if you want it to update your query results.
The assign or list of assigns passed as the third argument must be the same names passed into
`keep_live`. If you only want some queries to update based on some events, you can define multiple
matches on events, and only call `handle_live/3` with the assigns that should be updated for that
notification.
"""
@spec handle_live(socket, topic | :refetch, assign | list(assign)) :: socket
def handle_live(socket, notification, assigns, refetch_info \\ [])
def handle_live(socket, notification, assigns, refetch_info) when is_list(assigns) do
Enum.reduce(assigns, socket, &handle_live(&2, notification, &1, refetch_info))
end
def handle_live(socket, topic, assign, refetch_info) when is_binary(topic) do
config = Map.get(socket.assigns.ash_live_config, assign)
if config.opts[:subscribe] do
if topic in List.wrap(config.opts[:subscribe]) do
handle_live(socket, :refetch, assign, refetch_info)
else
socket
end
else
handle_live(socket, :refetch, assign, refetch_info)
end
end
def handle_live(socket, :refetch, assign, refetch_info) do
config = Map.get(socket.assigns.ash_live_config, assign)
diff = System.monotonic_time(:millisecond) - (config[:last_fetched_at] || 0)
requested_before_last_refetch? =
refetch_info[:requested_at] && refetch_info[:requested_at] <= config[:last_fetched_at]
cond do
requested_before_last_refetch? ->
socket
config[:refetch_window] && diff < config[:refetch_window] ->
Process.send_after(
self(),
{:refetch, assign, [requested_at: System.monotonic_time(:millisecond)]},
diff
)
socket
true ->
result =
case Map.get(socket.assigns, assign) do
%page_struct{} = page when page_struct in [Ash.Page.Keyset, Ash.Page.Offset] ->
socket
|> refetch_page(config.callback, page, config.opts)
|> mark_page_as_first()
list when is_list(list) ->
refetch_list(socket, config.callback, list, config.opts)
_ ->
run_callback(config.callback, socket, nil)
end
new_config =
config
|> Map.put(:last_fetched_at, System.monotonic_time(:millisecond))
new_full_config = Map.put(socket.assigns.ash_live_config, assign, new_config)
socket
|> assign(assign, result)
|> assign(:ash_live_config, new_full_config)
end
end
defp refetch_list(socket, callback, current_list, opts) do
cond do
opts[:results] == :lose ->
run_callback(callback, socket, nil)
current_list == [] ->
[]
true ->
nil
first = List.first(current_list).__struct__
pkey = Ash.Resource.Info.primary_key(first)
case run_callback(callback, socket, nil) do
%struct{} = page when struct in [Ash.Page.Keyset, Ash.Page.Offset] ->
Enum.map(current_list, fn result ->
Enum.find(
page.results,
result,
&(Map.take(&1, pkey) == Map.take(result, pkey))
)
end)
list when is_list(list) ->
Enum.map(current_list, fn result ->
Enum.find(
list,
result,
&(Map.take(&1, pkey) == Map.take(result, pkey))
)
end)
value ->
value
end
end
end
defp refetch_page(socket, callback, current_page, opts) do
cond do
opts[:results] == :lose ->
run_callback(callback, socket, opts[:page])
current_page.results == [] ->
current_page
true ->
first = List.first(current_page.results).__struct__
pkey = Ash.Resource.Info.primary_key(first)
filter =
case pkey do
[key] ->
[{key, [in: Enum.map(current_page.results, &Map.get(&1, key))]}]
keys ->
[or: Enum.map(current_page.results, &Map.take(&1, keys))]
end
page_opts = Keyword.put(opts[:page] || [], :filter, filter)
resulting_page = run_callback(callback, socket, page_opts)
preserved_records =
current_page.results
|> Enum.map(fn result ->
Enum.find(
resulting_page.results,
result,
&(Map.take(&1, pkey) == Map.take(result, pkey))
)
end)
%{resulting_page | results: preserved_records}
end
end
defp run_callback(callback, socket, page_opts) when is_function(callback, 2) do
callback.(socket, page_opts)
end
defp run_callback(callback, socket, _page_opts) when is_function(callback, 1) do
callback.(socket)
end
defp mark_page_as_first(%Ash.Page.Keyset{} = page) do
if page.after || page.before do
page
else
Map.put(page, :__first__?, true)
end
end
defp mark_page_as_first(%Ash.Page.Offset{} = page) do
if is_nil(page.offset) || page.offset == 0 do
Map.put(page, :__first__?, true)
else
page
end
end
defp mark_page_as_first(page), do: page
defp assign(%Phoenix.LiveView.Socket{} = socket, one, two) do
Phoenix.LiveView.assign(socket, one, two)
end
defp assign(socket, one, two) do
Phoenix.Socket.assign(socket, one, two)
end
end
|
lib/ash_phoenix/live_view.ex
| 0.901495
| 0.651327
|
live_view.ex
|
starcoder
|
defmodule Vantagex.Forex do
@moduledoc """
Contains functions related to the Forex functions from Alpha Vantage
"""
import Vantagex.Helper
@module_id "FX"
@doc """
Uses Alpha Vantage's CURRENCY_EXCHANGE_RATE function.
Returns the realtime exchange rate for any pair of digital currency (e.g. Bitcoin)
or physical currency (e.g. USD)
Args:
* `from_currency` - The currency to get the exchange rate for. e.g. "USD" or "BTC"
* `to_currency` - The destination currency for the exchange rate. e.g. "USD" or "BTC"
* `opts` - A list of extra options to pass to the function.
Allowed options:
* `datatype` - `:map | :json | :csv` specifies the return format. Defaults to :map
## Examples:
iex> Vantagex.Forex.exchange_rate("USD", "COP")
%{
"Realtime Currency Exchange Rate" => %{
"1. From_Currency Code" => "USD",
"2. From_Currency Name" => "United States Dollar",
"3. To_Currency Code" => "COP",
"4. To_Currency Name" => "Colombian Peso",
"5. Exchange Rate" => "3130.00000000",
"6. Last Refreshed" => "2019-02-16 22:33:37",
"7. Time Zone" => "UTC"
}
}
iex> Vantagex.Forex.exchange_rate("USD", "COP", datatype: :json)
"{\\n \"Realtime Currency Exchange Rate\": {\\n \"1. From_Currency Code\": \"USD\",\\n \"2. From_Currency Name\": \"United States Dollar\",\\n \"3. To_Currency Code\": \"COP\",\\n \"4. To_Currency Name\": \"Colombian Peso\",\\n \"5. Exchange Rate\": \"3130.00000000\",\\n \"6. Last Refreshed\": \"2019-02-16 22:34:00\",\\n \"7. Time Zone\": \"UTC\"\\n }\\n}"
"""
@spec exchange_rate(String.t(), String.t(), Keyword.t()) :: Map.t() | String.t()
def exchange_rate(from_currency, to_currency, opts \\ []) do
params = %{
from_currency: from_currency,
to_currency: to_currency,
datatype: Keyword.get(opts, :datatype)
} |> clean_params()
resolve_request(:currency_exchange_rate, params)
end
@doc """
Uses Alpha Vantage `FX_INTRADAY` function.
Returns intraday time series of the FX currency pair specified
Args:
* `from_symbol` - three letter string representing the currency. e.g. `"EUR"`
* `to_symbol` - three letter string representing the currency. e.g. `"USD"`
* `interval` - an integer representing the time interval between data points of the time series. e.g. `5`
* `opts` - A list of extra options to pass to the function.
Allowed options:
* `outputsize` - `:compact | :full` when set to compact returns the latest 100
datapoints; when set to full returns the full length intraday time series. Defaults to compact
* `datatype` - `:map | :json | :csv` specifies the return format. Defaults to :map
## Examples
iex> Vantagex.Forex.intraday("USD", "COP", 5)
%{
"Meta Data" => %{
"1. Information" => "FX Intraday (5min) Time Series",
"2. From Symbol" => "USD",
"3. To Symbol" => "COP",
"4. Last Refreshed" => "2019-02-17 22:40:00",
"5. Interval" => "5min",
"6. Output Size" => "Compact",
"7. Time Zone" => "UTC"
},
"Time Series FX (5min)" => %{
"2019-02-17 17:45:00" => %{
"1. open" => "3130.0000",
"2. high" => "3130.0000",
"3. low" => "3130.0000",
"4. close" => "3130.0000"
},
...
}
}
"""
@spec intraday(String.t(), String.t(), integer(), Keyword.t()) :: Map.t() | String.t()
def intraday(from_symbol, to_symbol, interval, opts \\ []) do
params = %{
from_symbol: from_symbol,
to_symbol: to_symbol,
interval: "#{interval}min",
outputsize: Keyword.get(opts, :outputsize),
datatype: Keyword.get(opts, :datatype)
} |> clean_params()
resolve_request(:intraday, params, @module_id)
end
@doc """
Uses Alpha Vantage's `FX_DAILY` function.
Returns the daily time series of the FX currency pair specified.
Args:
* `from_symbol` - three letter string representing the currency. e.g. `"EUR"`
* `to_symbol` - three letter string representing the currency. e.g. `"USD"`
* `opts` - A list of extra options to pass to the function.
Allowed options:
* `outputsize` - `:compact | :full` when set to compact returns the latest 100
datapoints; when set to full returns the full length intraday time series. Defaults to compact
* `datatype` - `:map | :json | :csv` specifies the return format. Defaults to :map
## Examples
iex> Vantagex.Forex.daily("USD", "COP")
%{
"Meta Data" => %{
"1. Information" => "Forex Daily Prices (open, high, low, close)",
"2. From Symbol" => "USD",
"3. To Symbol" => "COP",
"4. Output Size" => "Compact",
"5. Last Refreshed" => "2019-02-19 06:40:00",
"6. Time Zone" => "GMT+8"
},
"Time Series FX (Daily)" => %{
"2018-11-12" => %{
"1. open" => "3178.5000",
"2. high" => "3178.5000",
"3. low" => "3170.3000",
"4. close" => "3174.3000"
},
"2018-12-06" => %{
"1. open" => "3159.0000",
"2. high" => "3191.8000",
"3. low" => "3154.3000",
"4. close" => "3184.2000"
},
...
}
}
"""
@spec daily(String.t(), String.t(), Keyword.t()) :: String.t() | Map.t()
def daily(from_symbol, to_symbol, opts \\ []) do
params = %{
from_symbol: from_symbol,
to_symbol: to_symbol,
outputsize: Keyword.get(opts, :outputsize),
datatype: Keyword.get(opts, :datatype)
} |> clean_params()
resolve_request(:daily, params, @module_id)
end
@doc """
Uses Alpha Vantage's `FX_WEEKLY` function
Returns the weekly time series of the FX currency pair specified.
Args:
* `from_symbol` - three letter string representing the currency. e.g. `"EUR"`
* `to_symbol` - three letter string representing the currency. e.g. `"USD"`
* `opts` - A list of extra options to pass to the function.
Allowed options:
* `outputsize` - `:compact | :full` when set to compact returns the latest 100
datapoints; when set to full returns the full length intraday time series. Defaults to compact
* `datatype` - `:map | :json | :csv` specifies the return format. Defaults to :map
## Examples
iex> Vantagex.Forex.weekly("USD", "EUR")
%{
"Meta Data" => %{
"1. Information" => "Forex Weekly Prices (open, high, low, close)",
"2. From Symbol" => "USD",
"3. To Symbol" => "EUR",
"4. Last Refreshed" => "2019-02-19 07:05:00",
"5. Time Zone" => "GMT+8"
},
"Time Series FX (Weekly)" => %{
"2018-09-02" => %{
"1. open" => "0.8597",
"2. high" => "0.8630",
"3. low" => "0.8522",
"4. close" => "0.8620"
},
"2016-09-18" => %{
"1. open" => "0.8897",
"2. high" => "0.8967",
"3. low" => "0.8867",
"4. close" => "0.8959"
},
...
}
}
"""
@spec weekly(String.t(), String.t(), Keyword.t()) :: String.t() | Map.t()
def weekly(from_symbol, to_symbol, opts \\ []) do
params = %{
from_symbol: from_symbol,
to_symbol: to_symbol,
outputsize: Keyword.get(opts, :outputsize),
datatype: Keyword.get(opts, :datatype)
} |> clean_params()
resolve_request(:weekly, params, @module_id)
end
@doc """
Uses Alpha Vantage's `FX_MONTHLY` function
Returns the monthly time series of the FX currency pair specified.
Args:
* `from_symbol` - three letter string representing the currency. e.g. `"EUR"`
* `to_symbol` - three letter string representing the currency. e.g. `"USD"`
* `opts` - A list of extra options to pass to the function.
Allowed options:
* `outputsize` - `:compact | :full` when set to compact returns the latest 100
datapoints; when set to full returns the full length intraday time series. Defaults to compact
* `datatype` - `:map | :json | :csv` specifies the return format. Defaults to :map
## Examples
iex> Vantagex.Forex.monthly("EUR", "USD")
%{
"Meta Data" => %{
"1. Information" => "Forex Monthly Prices (open, high, low, close)",
"2. From Symbol" => "EUR",
"3. To Symbol" => "USD",
"4. Last Refreshed" => "2019-02-19 07:10:00",
"5. Time Zone" => "GMT+8"
},
"Time Series FX (Monthly)" => %{
"2011-06-30" => %{
"1. open" => "1.4412",
"2. high" => "1.4696",
"3. low" => "1.4070",
"4. close" => "1.4491"
},
"2010-06-30" => %{
"1. open" => "1.2306",
"2. high" => "1.2467",
"3. low" => "1.1874",
"4. close" => "1.2234"
},
...
}
}
"""
@spec monthly(String.t(), String.t(), Keyword.t()) :: String.t() | Map.t()
def monthly(from_symbol, to_symbol, opts \\ []) do
params = %{
from_symbol: from_symbol,
to_symbol: to_symbol,
outputsize: Keyword.get(opts, :outputsize),
datatype: Keyword.get(opts, :datatype)
} |> clean_params()
resolve_request(:monthly, params, @module_id)
end
end
|
lib/vantagex/forex.ex
| 0.927716
| 0.642713
|
forex.ex
|
starcoder
|
if Code.ensure_loaded(Postgrex) do
Postgrex.Types.define(Flippant.PostgrexTypes, [], json: Jason)
defmodule Flippant.Adapter.Postgres do
@moduledoc """
This adapter provides Postgres 9.5+ backed rule storage.
The adapter relies on a table with the following structure:
* `name` - A `text` or `varchar` column with a unique constraint. The
adapter makes heavy use of `UPSERT` functionality, which relies on unique
names.
* `rules` - A `jsonb` column where rules will be stored. The use of jsonb and
jsonb specific operators means the Postgres version must be 9.5 or greater.
In the likely chance that you're managing a database using Ecto you can
create a migration to add the `flippant_features` table with the following
statement (or an equivalent):
CREATE TABLE IF NOT EXISTS flippant_features (
name varchar(140) NOT NULL CHECK (name <> ''),
rules jsonb NOT NULL DEFAULT '{}'::jsonb,
CONSTRAINT unique_name UNIQUE(name)
)
If you prefer you can also use the adapters `setup/0` function to create
the table automatically.
"""
use GenServer
import Postgrex, only: [query!: 3, transaction: 2]
import Flippant.Rules, only: [enabled_for_actor?: 2]
@defaults [postgres_opts: [database: "flippant_test"], table: "flippant_features"]
@doc """
Starts the Postgres adapter.
## Options
* `:postgres_opts` - Options that can be passed to Postgrex, the underlying
library used to connect to Postgres. At a minimum the `database` must be set,
otherwise it will attempt to connect to the `flippant_test` database.
* `table` - The table where rules will be stored. Defaults to `flippant_features`.
"""
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
# Callbacks
def init(opts) do
{:ok, _} = Application.ensure_all_started(:postgrex)
opts = Keyword.merge(@defaults, opts)
{:ok, pid} =
opts
|> Keyword.get(:postgres_opts, [])
|> Keyword.put(:types, Flippant.PostgrexTypes)
|> Postgrex.start_link()
{:ok, %{pid: pid, table: Keyword.get(opts, :table)}}
end
def handle_cast({:add, feature}, %{pid: pid, table: table} = state) do
query!(pid, "INSERT INTO #{table} (name) VALUES ($1) ON CONFLICT (name) DO NOTHING", [
feature
])
{:noreply, state}
end
def handle_cast({:add, feature, {group, values}}, %{pid: pid, table: table} = state) do
query!(
pid,
"""
INSERT INTO #{table} AS t (name, rules) VALUES ($1, $2)
ON CONFLICT (name) DO UPDATE
SET rules = jsonb_set(t.rules, $3, array_to_json(
ARRAY(
SELECT DISTINCT(UNNEST(ARRAY(
SELECT jsonb_array_elements(COALESCE(t.rules#>$3, '[]'::jsonb))
) || $4))
)
)::jsonb)
""",
[feature, %{group => values}, [group], values]
)
{:noreply, state}
end
def handle_cast(:clear, %{pid: pid, table: table} = state) do
query!(pid, "TRUNCATE #{table} RESTART IDENTITY", [])
{:noreply, state}
end
def handle_cast({:remove, feature}, %{pid: pid, table: table} = state) do
query!(pid, "DELETE FROM #{table} WHERE name = $1", [feature])
{:noreply, state}
end
def handle_cast({:remove, feature, group, []}, %{pid: pid, table: table} = state) do
query!(pid, "UPDATE #{table} SET rules = rules - $1 WHERE name = $2", [group, feature])
{:noreply, state}
end
def handle_cast({:remove, feature, group, values}, %{pid: pid, table: table} = state) do
query!(
pid,
"""
UPDATE #{table} SET rules = jsonb_set(rules, $1, array_to_json(
ARRAY(
SELECT UNNEST(ARRAY(SELECT jsonb_array_elements(COALESCE(rules#>$1, '[]'::jsonb))))
EXCEPT
SELECT UNNEST(ARRAY(SELECT jsonb_array_elements($2)))
)
)::jsonb)
WHERE name = $3
""",
[[group], values, feature]
)
{:noreply, state}
end
def handle_cast({:rename, old_name, new_name}, %{pid: pid, table: table} = state) do
{:ok, _} =
transaction(pid, fn conn ->
query!(conn, "DELETE FROM #{table} WHERE name = $1", [new_name])
query!(conn, "UPDATE #{table} SET name = $1 WHERE name = $2", [new_name, old_name])
end)
{:noreply, state}
end
def handle_cast({:restore, loaded}, %{pid: pid, table: table} = state) do
{:ok, _} =
transaction(pid, fn conn ->
for {feature, rules} <- loaded do
query!(conn, "INSERT INTO #{table} AS t (name, rules) VALUES ($1, $2)", [
feature,
rules
])
end
end)
{:noreply, state}
end
def handle_cast(:setup, %{pid: pid, table: table} = state) do
query!(
pid,
"""
CREATE TABLE IF NOT EXISTS #{table} (
name varchar(140) NOT NULL CHECK (name <> ''),
rules jsonb NOT NULL DEFAULT '{}'::jsonb,
CONSTRAINT unique_name UNIQUE(name)
)
""",
[]
)
{:noreply, state}
end
def handle_call({:breakdown, actor}, _from, %{pid: pid, table: table} = state) do
breakdown =
case query!(pid, "SELECT jsonb_object_agg(name, rules) FROM #{table}", []) do
%{rows: [[object]]} when is_map(object) ->
Enum.reduce(object, %{}, fn {feature, rules}, acc ->
Map.put(acc, feature, breakdown_value(rules, actor))
end)
_ ->
%{}
end
{:reply, breakdown, state}
end
def handle_call({:enabled?, feature, actor}, _from, %{pid: pid, table: table} = state) do
%{rows: rows} = query!(pid, "SELECT rules FROM #{table} WHERE name = $1", [feature])
enabled? =
case rows do
[[rules]] -> enabled_for_actor?(rules, actor)
_ -> false
end
{:reply, enabled?, state}
end
def handle_call({:exists?, feature, :any}, _from, %{pid: pid, table: table} = state) do
%{rows: [[exists?]]} =
query!(pid, "SELECT EXISTS (SELECT 1 FROM #{table} WHERE name = $1)", [feature])
{:reply, exists?, state}
end
def handle_call({:exists?, feature, group}, _from, %{pid: pid, table: table} = state) do
%{rows: [[exists?]]} =
query!(pid, "SELECT EXISTS (SELECT 1 FROM #{table} WHERE name = $1 AND rules ? $2)", [
feature,
group
])
{:reply, exists?, state}
end
def handle_call({:features, :all}, _from, %{pid: pid, table: table} = state) do
%{rows: rows} = query!(pid, "SELECT name FROM #{table} ORDER BY name ASC", [])
{:reply, List.flatten(rows), state}
end
def handle_call({:features, group}, _from, %{pid: pid, table: table} = state) do
%{rows: rows} =
query!(pid, "SELECT name FROM #{table} WHERE rules ? $1 ORDER BY name ASC", [group])
{:reply, List.flatten(rows), state}
end
# Helpers
defp breakdown_value(rules, :all) do
Enum.into(rules, %{})
end
defp breakdown_value(rules, actor) do
enabled_for_actor?(rules, actor)
end
end
end
|
lib/flippant/adapters/postgres.ex
| 0.682256
| 0.418875
|
postgres.ex
|
starcoder
|
defmodule Harald.HCI.ACLData do
@moduledoc """
Reference: version 5.2, vol 4, part E, 5.4.2.
"""
alias Harald.Host.L2CAP
alias Harald.HCI.Packet
@enforce_keys [
:handle,
:packet_boundary_flag,
:broadcast_flag,
:data_total_length,
:data
]
defstruct [
:handle,
:packet_boundary_flag,
:broadcast_flag,
:data_total_length,
:data
]
def decode(
<<
2,
handle::little-size(16),
data_total_length::little-size(16),
data::binary-size(data_total_length)
>> = encoded_bin
) do
with {:ok, decoded_data} <- L2CAP.decode(data) do
<<
encoded_broadcast_flag::size(2),
encoded_packet_boundary_flag::size(2),
connection_handle::size(12)
>> = <<handle::size(16)>>
decoded = %__MODULE__{
handle: connection_handle,
packet_boundary_flag: decode_packet_boundary_flag(encoded_packet_boundary_flag),
broadcast_flag: decode_broadcast_flag(encoded_broadcast_flag),
data_total_length: data_total_length,
data: decoded_data
}
{:ok, decoded}
else
{:error, {:not_implemented, error, _bin}} ->
{:error, {:not_implemented, error, encoded_bin}}
end
end
def encode(%__MODULE__{
broadcast_flag: broadcast_flag,
data: data,
data_total_length: data_total_length,
handle: connection_handle,
packet_boundary_flag: packet_boundary_flag
}) do
encoded_packet_boundary_flag = encode_flag(packet_boundary_flag)
encoded_broadcast_flag = encode_flag(broadcast_flag)
{:ok, encoded_data} = L2CAP.encode(data)
indicator = Packet.indicator(:acl_data)
<<encoded_handle::little-size(16)>> = <<
encoded_broadcast_flag::size(2),
encoded_packet_boundary_flag::size(2),
connection_handle::size(12)
>>
encoded = <<
indicator,
encoded_handle::size(16),
data_total_length::little-size(16),
encoded_data::binary-size(data_total_length)
>>
{:ok, encoded}
end
def new(handle, packet_boundary_flag, broadcast_flag, %data_module{} = data) do
{:ok, data_bin} = data_module.encode(data)
acl_data = %__MODULE__{
handle: handle,
packet_boundary_flag: packet_boundary_flag,
broadcast_flag: broadcast_flag,
data_total_length: byte_size(data_bin),
data: data
}
{:ok, acl_data}
end
defp decode_broadcast_flag(0b00 = bc_flag) do
%{description: "Point-to-point (ACL-U, AMP-U, or LE-U)", value: bc_flag}
end
defp decode_broadcast_flag(0b01 = bc_flag) do
%{description: "BR/EDR broadcast (ASB-U)", value: bc_flag}
end
defp decode_broadcast_flag(0b10 = bc_flag) do
%{description: "Reserved for future use.", value: bc_flag}
end
defp decode_broadcast_flag(0b11 = bc_flag) do
%{description: "Reserved for future use.", value: bc_flag}
end
defp decode_packet_boundary_flag(0b00 = pb_flag) do
%{
description:
"First non-automatically-flushable packet of a higher layer message (start of a non-automatically-flushable L2CAP PDU) from Host to Controller.",
value: pb_flag
}
end
defp decode_packet_boundary_flag(0b01 = pb_flag) do
%{
description: "Continuing fragment of a higher layer message",
value: pb_flag
}
end
defp decode_packet_boundary_flag(0b10 = pb_flag) do
%{
description:
"First automatically flushable packet of a higher layer message (start of an automatically-flushable L2CAP PDU).",
value: pb_flag
}
end
defp decode_packet_boundary_flag(0b11 = pb_flag) do
%{
description: "A complete L2CAP PDU. Automatically flushable.",
value: pb_flag
}
end
defp encode_flag(%{value: encoded_flag}) when encoded_flag in [0b00, 0b01, 0b10, 0b11] do
encoded_flag
end
end
|
src/lib/harald/hci/acl_data.ex
| 0.593963
| 0.40031
|
acl_data.ex
|
starcoder
|
defmodule Parselix do
@moduledoc """
Provides the macro for creating parser and some helper functions.
## Examples
### Function style
@doc "Replaces error messages."
def error_message(parser, message) do
fn target, position ->
case parser.(target, position) do
{:error, _, _} -> {:error, message, position}
x -> x
end
end
end
@doc "Parse lowercase characters."
def lowercases do
fn target, position do
parser = lowercase() |> many_1()
parser.(target, position)
end
end
### Function style with parser_body macro
@doc "Parse uppercase characters."
def uppercases do
parser_body do
uppercase() |> many_1()
end
end
### Macro style
@doc "Picks one value from the result of the given parser."
parser :pick, [parser, index] do
parser |> map(&Enum.at(&1, index))
end
@doc "Parses the end of text."
parser :eof do
fn
"", position -> {:ok, :eof, "", position}
_, position -> {:error, "There is not EOF.", position}
end
end
# Private
parserp :private_dump, [parser] do
parser |> map(fn _ -> :empty end)
end
"""
defmacro __using__(_opts) do
quote do
import Parselix
alias Parselix.Position
alias Parselix.Meta
end
end
defmodule Position, do: defstruct index: 0, vertical: 0, horizontal: 0
defmodule Meta, do: defstruct label: nil, value: nil, position: %Position{}
@typedoc """
A successful result.
`{:ok, RESULT, REMAINDER, NEW_POSITION}`
"""
@type ok :: {:ok, any, String.t, %Position{}}
@typedoc """
A failed result.
`{:error, ERROR_MESSAGE, POSITION}`
"""
@type error :: {:error, String.t, %Position{}}
@type parser :: (String.t, %Position{} -> ok | error)
def position(index \\ 0, vertical \\ 0, horizontal \\ 0), do: %Position{index: index, vertical: vertical, horizontal: horizontal}
def parse(parser, target, position \\ position) do
parser.(target, position)
end
def get_position(current, target, consumed) when is_integer(consumed) do
used = String.slice target, 0, consumed
used_list = String.codepoints used
a = fn x ->
case x do
nil -> []
x -> x
end
end
vertical = (used_list |> Enum.count(fn x -> x == "\n" or x == "\r" end)) - length(a.(Regex.run ~r/\r\n/, used))
get_horizontal = fn
[head | tail], count, get_horizontal -> case head do
x when x == "\r" or x == "\n" -> get_horizontal.(tail, 0, get_horizontal)
_ -> get_horizontal.(tail, count + 1, get_horizontal)
end
[], count, _ -> count
end
horizontal = get_horizontal.(used_list, current.horizontal, get_horizontal)
%Position{
index: current.index + (String.length used),
vertical: current.vertical + vertical,
horizontal: horizontal
}
end
def get_position(current, target, remainder) when is_binary(remainder) do
get_position current, target, String.length(target) - String.length(remainder)
end
def format_result(result, name \\ nil, target, current_position) do
name = if is_nil(name), do: "A parser", else: "\"#{name}\""
case result do
{:ok, _, _, _} = x -> x
{:ok, children, remainder} when is_binary(remainder) ->
{:ok, children, remainder, get_position(current_position, target, remainder)}
{:ok, children, consumed} when is_integer(consumed) ->
{:ok, children, String.slice(target, Range.new(consumed, -1)), get_position(current_position, target, consumed)}
{:error, _, _} = x -> x
{:error, message} -> {:error, message, current_position}
x -> {:error, "#{name} returns a misformed result.\n#{inspect x}", current_position}
end
end
@doc """
Wraps a parser body.
"""
defmacro parser_body(do: block) do
quote do
fn target, position ->
parser = unquote(block)
parser.(target, position)
end
end
end
@doc """
Defines a parser.
"""
defmacro parser(name, argument_names \\ [], do: block) do
quote do
def unquote(name)(unquote_splicing(argument_names)) do
parser_body do: unquote(block)
end
end
end
@doc """
Defines a private parser.
"""
defmacro parserp(name, argument_names \\ [], do: block) do
quote do
defp unquote(name)(unquote_splicing(argument_names)) do
parser_body do: unquote(block)
end
end
end
end
|
lib/parselix.ex
| 0.829319
| 0.451871
|
parselix.ex
|
starcoder
|
defmodule Snap.HTTPClient.Adapters.Finch do
@moduledoc """
Built in adapter using `Finch`.
You can also configure this adapter by explicitly setting the `http_client_adapter`
in the `Snap.Cluster` configuration with a tuple `{Snap.HTTPClient.Adapters.Finch, config}`.
For example:
```
config :my_app, MyApp.Cluster,
http_client_adapter: {Snap.HTTPClient.Adapters.Finch, pool_size: 20}
```
You can check the `t:config/0` for docs about the available configurations.
"""
@behaviour Snap.HTTPClient
require Logger
alias Snap.HTTPClient.Error
alias Snap.HTTPClient.Response
@typedoc """
Available options for configuring the Finch adapter. For more information about the options,
you can check [Finch's official docs](https://hexdocs.pm/finch/Finch.html#start_link/1-pool-configuration-options).
* `pool_size`: Set the pool size. Defaults to `5`.
* `conn_opts`: Connection options passed to `Mint.HTTP.connect/4`. Defaults to `[]`.
"""
@type config :: [
pool_size: pos_integer(),
conn_opts: keyword()
]
@default_pool_size 5
@default_conn_opts []
@impl true
def child_spec(config) do
if not Code.ensure_loaded?(Finch) do
Logger.error("""
Can't start Snap.HTTPClient.Adapters.Finch because :finch is not available.
Please make sure to add :finch as a dependency:
{:finch, "~> 0.8"}
Or set your own Snap.HTTPClient:
config :my_app, MyApp.Cluster, http_client_adapter: MyHTTPClient
""")
raise "missing finch dependency"
end
Application.ensure_all_started(:finch)
cluster = Keyword.fetch!(config, :cluster)
url = Keyword.fetch!(config, :url)
size = Keyword.get(config, :pool_size, @default_pool_size)
conn_opts = Keyword.get(config, :conn_opts, @default_conn_opts)
finch_config = [
name: connection_pool_name(cluster),
pools: %{
url => [size: size, count: 1, conn_opts: conn_opts]
}
]
{Finch, finch_config}
end
@impl true
def request(cluster, method, url, headers, body, opts \\ []) do
conn_pool_name = connection_pool_name(cluster)
method
|> Finch.build(url, headers, body)
|> Finch.request(conn_pool_name, opts)
|> handle_response()
end
defp handle_response({:ok, finch_response}) do
response = %Response{
headers: finch_response.headers,
status: finch_response.status,
body: finch_response.body
}
{:ok, response}
end
defp handle_response({:error, %{reason: reason} = origin}) when is_atom(reason) do
{:error, Error.new(reason, origin)}
end
defp handle_response({:error, origin}) do
{:error, Error.unknown(origin)}
end
defp connection_pool_name(cluster) do
Module.concat(cluster, Pool)
end
end
|
lib/snap/http_client/adapters/finch.ex
| 0.885866
| 0.734691
|
finch.ex
|
starcoder
|
defmodule Authex do
@moduledoc """
Defines an auth module.
This module provides a simple set of tools for the authorization and authentication
required by a typical API through use of JSON web tokens. To get started, we
need to define our auth module:
defmodule MyApp.Auth do
use Authex, otp_app: :my_app
end
We must then add the auth module to our supervision tree.
children = [
MyApp.Auth
]
## Configuration
While our auth module is defined, we will need to further configure it to our
requirements. At a minimum, we need to add a secret from which our tokens will
be signed with. There is a convenient mix task available for this:
mix authex.gen.secret
We should now add this secret to our config. In production this should be set
via an env var. We should use the `c:init/1` callback to configure this:
defmodule MyApp.Auth do
use Authex, otp_app: :my_app
def init(config) do
secret = System.get_env("AUTH_SECRET")
config = Keyword.put(config, :secret, secret)
{:ok, config}
end
end
Any other config can either be set with the `c:start_link/1` or `c:init/1` callbacks,
or via application config. Below are some of the values available:
config :my_app, MyApp.Auth, [
# REQUIRED
# The secret used to sign tokens with.
secret: "mysecret",
# OPTIONAL
# A blacklist repo, or false if disabled.
blacklist: false,
# A banlist repo, or false if disabled.
banlist: false,
# The default algorithm used to sign tokens.
default_alg: :hs256,
# The default iss claim used in tokens.
default_iss: nil,
# The default aud claim used in tokens.
default_aud: nil,
# The default time to live for tokens in seconds.
default_ttl: 3600,
# The default module, function, and arg used to generate the jti claim.
jti_mfa: {UUID, :uuid4, [:hex]}
# The plug called when an unauthorized status is determined.
unauthorized: Authex.UnauthorizedPlug
# The plug called when an forbidden status is determined.
forbidden: Authex.ForbiddenPlug
]
## Tokens
At the heart of Authex is the `Authex.Token` struct. This struct is simply
a wrapper around the typical JWT claims. The only additional item is the
`:scopes` and `:meta` key. There are 3 base actions required for these tokens -
creation, signing, and verification.
#### Creating
We can easily create token structs using the `c:token/2` function.
MyApp.Auth.token(sub: 1, scopes: ["admin/read"])
The above would create a token struct for a user with an id of 1 and with
"admin/read" authorization.
#### Signing
Once we have a token struct, we can sign it using the `c:sign/2` function to
create a compact token binary. This is what we will use for authentication and
authorization for our API.
[sub: 1, scopes: ["admin/read"]]
|> MyApp.Auth.token()
|> MyApp.Auth.sign()
#### Verifying
Once we have a compact token binary, we can verify it and turn it back to an
token struct using the `c:verify/2` function.
[sub: 1, scopes: ["admin/read"]]
|> MyApp.Auth.token()
|> MyApp.Auth.sign()
|> MyApp.Auth.verify()
## Serializers
Typically, we want to be able to create tokens from another source of data.
This could be something like a `User` struct. We also will want to take a token
and turn it back into a `User` struct.
To do this, we must create a serializer. A serializer is simply a module that
adopts the `Authex.Serializer` behaviour. For more information on creating
serializers, please see the `Authex.Serializer` documention.
Once we have created our serializer, we define it in our config.
config :my_app, MyApp.Auth, [
serializer: MyApp.Auth.UserSerializer,
]
We can now easily create tokens and compact tokens from our custom data using
the `c:for_token/2` and `c:for_compact_token/3` functions.
user = %MyApp.User{id: 1, scopes: []}
{:ok, token} = MyApp.Auth.for_token(user) # returns a token struct
{:ok, compact_token} = MyApp.Auth.for_compact_token(user) # returns a compact token
We can also turn tokens and compact tokens back into our custom data using the
`c:from_token/2` and `c:from_compact_token/2` functions.
user = %MyApp.User{id: 1, scopes: []}
{:ok, token} = MyApp.Auth.for_token(user)
{:ok, user} = MyApp.Auth.from_token(token)
{:ok, compact_token} = MyApp.Auth.for_compact_token(user)
{:ok, user} = MyApp.Auth.from_compact_token(compact_token)
## Repositories
Usually, use of JSON web tokens requires some form of persistence to blacklist
tokens through their `:jti` claim. Authex also adds the ability to ban a
token through its `:sub` claim.
To do this, we must create a repository. A repository is simply a module that
adopts the `Authex.Repo` behaviour. For more information on creating
repositories, please see the `Authex.Repo` documention.
Once we have created our blacklist or banlist repo, we define it in our config.
config :my_app, MyApp.Auth, [
blacklist: MyApp.Auth.Blacklist,
banlist: MyApp.Auth.Banlist
]
During the verification process used by `c:verify/2`, any blacklist or banlist
defined in our config will be checked against. Please be aware of any performance
penatly that may be incurred through use of database-backed repo's without use
of caching.
## Plugs
Authex provides a number of plugs to handle the typical authentication and
authorization process required by an API using your auth module.
For more information on handling authentication, please see the `Authex.AuthenticationPlug`
documention.
For more information on handling authorization, please see the `Authex.AuthorizationPlug`
documention.
"""
@type alg :: :hs256 | :hs384 | :hs512
@type signer_option :: {:alg, alg()} | {:secret, binary()}
@type signer_options :: [signer_option()]
@type verifier_option ::
{:alg, alg()}
| {:time, integer()}
| {:secret, binary()}
| {:banlist, Authex.Banlist.t()}
| {:blacklist, Authex.Blacklist.t()}
@type verifier_options :: [verifier_option()]
@type t :: module()
@doc """
Starts the auth process.
Returns `{:ok, pid}` on success.
Returns `{:error, {:already_started, pid}}` if the auth process is already
started or `{:error, term}` in case anything else goes wrong.
## Options
See the configuration in the moduledoc for options.
"""
@callback start_link(config :: Keyword.t()) :: GenServer.on_start()
@doc """
A callback executed when the auth process starts.
This should be used to dynamically set any config during runtime - such as the
secret key used to sign tokens with.
Returns `{:ok, config}`
## Example
def init(config) do
secret = System.get_env("AUTH_SECRET")
config = Keyword.put(config, :secret, secret)
{:ok, config}
end
"""
@callback init(config :: Keyword.t()) :: {:ok, Keyword.t()}
@doc """
Creates a new token.
A token is a struct that wraps the typical JWT claims but also adds a couple
new fields. Please see the `Authex.Token` documentation for more details.
Returns an `Authex.Token` struct,
## Options
* `:time` - The base time (timestamp format) in which to use.
* `:ttl` - The time-to-live for the token in seconds. The lifetime is based
on the time provided via the options, or the current time if not provided.
## Example
MyApp.Auth.token(sub: 1, scopes: ["admin/read"])
"""
@callback token(claims :: Authex.Token.claims(), options :: Authex.Token.options()) ::
Authex.Token.t()
@doc """
Signs a token, creating a compact token.
The compact token is a binary that can be used for authentication and authorization
purposes. Typically, this would be placed in an HTTP header, such as:
```bash
Authorization: Bearer mytoken
```
Returns `compact_token` or raises an `Authex.Error`.
## Options
* `:secret` - The secret key to sign the token with.
* `:alg` - The algorithm to sign the token with.
Any option provided would override the default set in the config.
"""
@callback sign(token :: Authex.Token.t(), signer_options()) ::
Authex.Token.compact() | no_return()
@doc """
Verifies a compact token.
Verification is a multi-step process that ensures:
1. The token has not been tampered with.
2. The current time is not before the `nbf` value.
3. The current time is not after the `exp` value.
4. The token `jti` is not included in the blacklist (if provided).
5. The token `sub` is not included in the banlist (if provided).
If all checks pass, the token is deemed verified.
## Options
* `:time` - The base time (timestamp format) in which to use.
* `:secret` - The secret key to verify the token with.
* `:alg` - The algorithm to verify the token with
* `:banlist` - The banlist module to verify with.
* `:blacklist` - The blacklist module to verify with.
Any option provided would override the default set in the config.
Returns `{:ok, token}` or `{:error, reason}`
## Example
{:ok, token} = MyApp.Auth.verify(compact_token)
"""
@callback verify(compact_token :: Authex.Token.compact(), options :: verifier_options()) ::
{:ok, Authex.Token.t()} | {:error, term()}
@doc """
Converts an `Authex.Token` struct into a resource.
This uses the serializer defined in the auth config. It will invoke the
`c:Authex.Serializer.from_token/2` callback defined in the serializer module.
Please see the `Authex.Serializer` documentation for more details on implementing
a serializer.
Returns `{:ok, resource}` or `{:error, reason}`
## Options
Any additional options your serializer might need.
## Example
{:ok, user} = MyApp.Auth.from_token(token)
"""
@callback from_token(token :: Authex.Token.t(), options :: Keyword.t()) ::
{:ok, term()} | {:error, term()}
@doc """
Verifies and converts a compact token into a resource.
Once verified, this invokes `c:from_token/2` with the verified token. Please see
`c:from_token/2` for additional details.
Returns `{:ok, resource}` or `{:error, reason}`
## Options
Please see the options available in `c:verify/2`. You can also include any
additional options your serializer might need.
## Example
{:ok, user} = MyApp.Auth.from_compact_token(compact_token)
"""
@callback from_compact_token(
compact_token :: Authex.Token.compact(),
verifier_options()
) :: {:ok, term()} | {:error, atom}
@doc """
Converts a resource into an `Authex.Token` struct.
This uses the serializer defined in the auth config. It will invoke the
`c:Authex.Serializer.for_token/2` callback defined in the serializer module.
Please see the `Authex.Serializer` documentation for more details on implementing
a serializer.
Returns `{:ok, token}` or `{:error, reason}`
## Options
Please see the options available in `c:token/2`.
## Example
{:ok, token} = MyApp.Auth.for_token(user)
"""
@callback for_token(term(), options :: Authex.Token.options()) ::
{:ok, Authex.Token.t()} | {:error, term()}
@doc """
Converts a resource into a compact token.
Returns `{:ok, compact_token}` or `{:error, reason}`
## Options
Please see the options available in `c:token/2`.
## Example
{:ok, compact_token} = MyApp.Auth.for_compact_token(user)
"""
@callback for_compact_token(term(), token_opts :: Authex.Token.options(), signer_options()) ::
{:ok, Authex.Token.compact()} | {:error, term()}
@doc """
Gets the current user from a `Plug.Conn`.
"""
@callback current_user(Plug.Conn.t()) :: {:ok, term()} | :error
@doc """
Gets the current scopes from a `Plug.Conn`.
"""
@callback current_scopes(Plug.Conn.t()) :: {:ok, list} | :error
@doc """
Checks whether a token subject is banned.
This uses the banlist repo defined in the auth config. The key is the `:sub`
key in the token.
Returns a boolean.
## Example
MyApp.Auth.banned?(token)
"""
@callback banned?(token :: Authex.Token.t()) :: boolean
@doc """
Bans a token subject.
This uses the banlist repo defined in the auth config. The key is the `:sub`
key in the token.
Returns `:ok` on success, or `:error` on failure.
## Example
MyApp.Auth.ban(token)
"""
@callback ban(token :: Authex.Token.t()) :: :ok | :error
@doc """
Unbans a token subject.
This uses the banlist repo defined in the auth config. The key is the `:sub`
key in the token.
Returns `:ok` on success, or `:error` on failure.
## Example
MyApp.Auth.unban(token)
"""
@callback unban(token :: Authex.Token.t()) :: :ok | :error
@doc """
Checks whether a token jti is blacklisted.
This uses the blaclist repo defined in the auth config. The key is the `:jti`
key in the token.
Returns a boolean.
## Example
MyApp.Auth.blacklisted?(token)
"""
@callback blacklisted?(token :: Authex.Token.t()) :: boolean
@doc """
Blacklists a token jti.
This uses the blaclist repo defined in the auth config. The key is the `:jti`
key in the token.
Returns `:ok` on success, or `:error` on failure.
## Example
MyApp.Auth.blacklist(token)
"""
@callback blacklist(token :: Authex.Token.t()) :: :ok | :error
@doc """
Unblacklists a token jti.
This uses the blaclist repo defined in the auth config. The key is the `:jti`
key in the token.
Returns `:ok` on success, or `:error` on failure.
## Example
MyApp.Auth.unblacklist(token)
"""
@callback unblacklist(token :: Authex.Token.t()) :: :ok | :error
@doc """
Saves the config that is currently associated with our auth module.
"""
@callback save_config() :: :ok | :error
@doc """
Sets the config that is used with our auth module.
"""
@callback save_config(keyword()) :: :ok | :error
@doc """
Sets a single config that is used with our auth module.
"""
@callback save_config(atom(), any()) :: :ok | :error
@doc """
Fetches a config value.
"""
@callback config(key :: atom(), default :: any()) :: any()
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour Authex
@otp_app Keyword.fetch!(opts, :otp_app)
@table_name :"#{__MODULE__}.Config"
@impl Authex
def start_link(config \\ []) do
config = @otp_app |> Application.get_env(__MODULE__, []) |> Keyword.merge(config)
with {:ok, pid} <- GenServer.start_link(__MODULE__, config, name: __MODULE__) do
save_config()
{:ok, pid}
end
end
@impl Authex
def init(config) do
{:ok, config}
end
@impl Authex
def token(claims \\ [], opts \\ []) do
Authex.Token.new(__MODULE__, claims, opts)
end
@impl Authex
def sign(%Authex.Token{} = token, opts \\ []) do
__MODULE__
|> Authex.Signer.new(opts)
|> Authex.Signer.compact(token)
end
@impl Authex
def verify(compact_token, opts \\ []) do
Authex.Verifier.run(__MODULE__, compact_token, opts)
end
@impl Authex
def from_token(%Authex.Token{} = token, opts \\ []) do
serializer = config(:serializer)
Authex.Serializer.from_token(serializer, token, opts)
end
@impl Authex
def from_compact_token(compact_token, opts \\ []) when is_binary(compact_token) do
with {:ok, token} <- verify(compact_token, opts) do
from_token(token, opts)
end
end
@impl Authex
def for_token(resource, opts \\ []) do
serializer = config(:serializer)
Authex.Serializer.for_token(serializer, resource, opts)
end
@impl Authex
def for_compact_token(resource, token_opts \\ [], signer_opts \\ []) do
with {:ok, token} <- for_token(resource, token_opts) do
{:ok, sign(token, signer_opts)}
end
end
@impl Authex
def current_user(%Plug.Conn{private: private}) do
Map.fetch(private, :authex_current_user)
end
@impl Authex
def current_user(_) do
:error
end
@impl Authex
def current_scopes(%Plug.Conn{private: private}) do
with {:ok, token} <- Map.fetch(private, :authex_token) do
Map.fetch(token, :scopes)
end
end
@impl Authex
def current_scopes(_) do
:error
end
@impl Authex
def banned?(%Authex.Token{sub: sub}) do
banlist = config(:banlist, false)
Authex.Repo.exists?(banlist, sub)
end
@impl Authex
def ban(%Authex.Token{sub: sub}) do
banlist = config(:banlist, false)
Authex.Repo.insert(banlist, sub)
end
@impl Authex
def unban(%Authex.Token{sub: sub}) do
banlist = config(:banlist, false)
Authex.Repo.delete(banlist, sub)
end
@impl Authex
def blacklisted?(%Authex.Token{jti: jti}) do
blacklist = config(:blacklist, false)
Authex.Repo.exists?(blacklist, jti)
end
@impl Authex
def blacklist(%Authex.Token{jti: jti}) do
blacklist = config(:blacklist, false)
ttl = Authex.Repo.insert(blacklist, jti)
end
@impl Authex
def unblacklist(%Authex.Token{jti: jti}) do
blacklist = config(:blacklist, false)
Authex.Repo.delete(blacklist, jti)
end
@impl Authex
def config(key, default \\ nil) do
@table_name
|> Authex.Config.read()
|> Keyword.get(key, default)
end
@impl Authex
def save_config do
GenServer.call(__MODULE__, :save_config)
end
@impl Authex
def save_config(config) when is_list(config) do
GenServer.call(__MODULE__, {:save_config, config})
end
@impl Authex
def save_config(key, value) do
GenServer.call(__MODULE__, {:save_config, key, value})
end
def child_spec(config) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [config]}
}
end
# GenServer callbacks
def handle_call(:save_config, _from, config) do
Authex.Config.save(@table_name, config)
{:reply, :ok, config}
end
def handle_call({:save_config, config}, _from, _config) do
Authex.Config.save(@table_name, config)
{:reply, :ok, config}
end
def handle_call({:save_config, key, value}, _from, old_config) do
config = Keyword.put(old_config, key, value)
Authex.Config.save(@table_name, config)
{:reply, :ok, config}
end
defoverridable init: 1
end
end
end
|
lib/authex.ex
| 0.816113
| 0.477615
|
authex.ex
|
starcoder
|
defmodule I18nHelpers.Ecto.TranslatableFields do
@moduledoc ~S"""
Provides macros for defining translatable fields and associations.
This module's purpose is to provide the `I18nHelpers.Ecto.Translator` module
with a way to access the list of fields and associations from the Ecto Schema
that needs to be translated, and with virtual fields allowing to store the
translations for the current locale.
`__using__\1` this module provides the caller module with two functions:
`get_translatable_fields\0` and `get_translatable_assocs\0` listing all the
translatable fields and the translatable associations respectively.
Fields that are to be translated are expected to hold maps
field :title, :map
where each key represents a locale and each value contains the text for
that locale. Below is an example of such map:
%{
"en" => "My Favorite Books",
"fr" => "Mes Livres Préférés",
"nl" => "<NAME>",
"en-GB" => "My Favourite Books"
}
Each of those fields must come with a virtual field which is used to
hold the translation for the current locale.
field :title, :map
field :translated_title, :string, virtual: true
Such a translatable field must be included in the translatable fields list:
def get_translatable_fields, do: [:title]
This module provides the macro `translatable_field\1` which allows to execute
those three steps above (add the field as `:map`, add the virtual field and
add the field to the translatable fields list) in one line:
translatable_field :title
Macros marking associations as translatable are also provided:
* translatable_belongs_to\2
* translatable_has_many\2
* translatable_has_one\2
* translatable_many_to_many\3
The macros above add the given association field name to the translatable
associations list, which is accessible with `get_translatable_assocs\0`.
"""
alias I18nHelpers.Ecto.TranslatableType
@callback get_translatable_fields() :: [atom]
@callback get_translatable_assocs() :: [atom]
defmacro __using__(_args) do
this_module = __MODULE__
quote do
@behaviour unquote(this_module)
import unquote(this_module),
only: [
translatable_field: 1,
translatable_belongs_to: 2,
translatable_has_many: 2,
translatable_has_one: 2,
translatable_many_to_many: 3
]
Module.register_attribute(__MODULE__, :translatable_fields, accumulate: true)
Module.register_attribute(__MODULE__, :translatable_assocs, accumulate: true)
@before_compile unquote(this_module)
end
end
defmacro __before_compile__(_env) do
quote do
def get_translatable_fields(), do: @translatable_fields
def get_translatable_assocs(), do: @translatable_assocs
end
end
@doc ~S"""
Defines a translatable field on the schema.
This macro will generate two fields:
* a field with the given name and type `:map` and
* a virtual field with the given name prepended by `"translated_"` and type `:string`.
For example
translatable_field :title
will generate
field :title, :map
field :translated_title, :string, virtual: true
The macro will add the given field name into the translatable fields list.
"""
defmacro translatable_field(field_name) do
quote do
fields = Module.get_attribute(__MODULE__, :struct_fields)
unless List.keyfind(fields, unquote(field_name), 0) do
field(unquote(field_name), TranslatableType)
end
field(String.to_atom("translated_" <> Atom.to_string(unquote(field_name))), :string,
virtual: true
)
Module.put_attribute(__MODULE__, :translatable_fields, unquote(field_name))
end
end
@doc ~S"""
Defines a translatable `belongs_to` association.
The macro will add the given field name into the translatable associations list.
"""
defmacro translatable_belongs_to(field_name, module_name) do
quote do
belongs_to(unquote(field_name), unquote(module_name))
Module.put_attribute(__MODULE__, :translatable_assocs, unquote(field_name))
end
end
@doc ~S"""
Defines a translatable `has_many` association.
The macro will add the given field name into the translatable associations list.
"""
defmacro translatable_has_many(field_name, module_name) do
quote do
has_many(unquote(field_name), unquote(module_name))
Module.put_attribute(__MODULE__, :translatable_assocs, unquote(field_name))
end
end
@doc ~S"""
Defines a translatable `has_one` association.
The macro will add the given field name into the translatable associations list.
"""
defmacro translatable_has_one(field_name, module_name) do
quote do
has_one(unquote(field_name), unquote(module_name))
Module.put_attribute(__MODULE__, :translatable_assocs, unquote(field_name))
end
end
@doc ~S"""
Defines a translatable `many_to_many` association.
The macro will add the given field name into the translatable associations list.
"""
defmacro translatable_many_to_many(field_name, module_name, opts \\ []) do
quote do
many_to_many(unquote(field_name), unquote(module_name), unquote(opts))
Module.put_attribute(__MODULE__, :translatable_assocs, unquote(field_name))
end
end
end
|
lib/ecto/translatable_fields.ex
| 0.858689
| 0.692122
|
translatable_fields.ex
|
starcoder
|
defmodule I18n2Elm do
@moduledoc ~S"""
Transforms a folder of i18n key/value JSON files into a series of Elm types \
and functions.
Expects a PATH to one or more JSON files from which to generate Elm code.
i18n2elm PATH [--module-name PATH]
The JSON files at the given PATH will be converted to Elm types and functions.
## Options
* `--module-name` - the module name prefix for the printed Elm modules \
default value is 'Translations'.
"""
require Logger
alias I18n2Elm.{Parser, Printer}
@spec main([String.t()]) :: :ok
def main(args) do
Logger.debug(fn -> "Arguments: #{inspect(args)}" end)
{options, paths, errors} = OptionParser.parse(args, strict: [module_name: :string])
if Enum.empty?(paths) do
IO.puts(@moduledoc)
exit(:normal)
end
if length(errors) > 0 do
IO.puts("Error: Found one or more errors in the supplied options")
exit({:unknown_arguments, errors})
end
files = resolve_all_paths(paths)
Logger.debug(fn -> "Files: #{inspect(files)}" end)
if Enum.empty?(files) do
IO.puts("Error: Could not find any JSON files in path: #{inspect(paths)}")
exit(:no_files)
end
output_path = create_output_dir(options)
generate(files, output_path)
end
@spec resolve_all_paths([String.t()]) :: [String.t()]
defp resolve_all_paths(paths) do
paths
|> Enum.filter(&File.exists?/1)
|> Enum.reduce([], fn filename, files ->
cond do
File.dir?(filename) ->
walk_directory(filename) ++ files
String.ends_with?(filename, ".json") ->
[filename | files]
true ->
files
end
end)
end
@spec walk_directory(String.t()) :: [String.t()]
defp walk_directory(dir) do
dir
|> File.ls!()
|> Enum.reduce([], fn file, files ->
filename = "#{dir}/#{file}"
cond do
File.dir?(filename) ->
walk_directory(filename) ++ files
String.ends_with?(file, ".json") ->
[filename | files]
true ->
files
end
end)
end
@spec create_output_dir(list) :: String.t()
defp create_output_dir(options) do
output_path =
if Keyword.has_key?(options, :module_name) do
Keyword.get(options, :module_name)
else
"Translations"
end
output_path
|> File.mkdir_p!()
output_path
end
@spec generate([String.t()], String.t()) :: :ok
def generate(json_translations_path, module_name) do
translations = Parser.parse_translation_files(json_translations_path)
printed_translations = Printer.print_translations(translations, module_name)
printed_translations
|> Enum.each(fn {file_path, file_content} ->
{:ok, file} = File.open(file_path, [:write])
IO.binwrite(file, file_content)
File.close(file)
Logger.info("Created file: #{file_path}")
end)
end
end
|
lib/i18n2elm.ex
| 0.536799
| 0.429669
|
i18n2elm.ex
|
starcoder
|
defmodule WhiteBread.Context.StepFunction do
alias WhiteBread.RegexExtension
defstruct string: nil,
regex: nil,
function: nil,
type: nil
def new(%Regex{} = regex, func) when is_function(func, 2) do
%__MODULE__{
regex: regex,
function: func,
type: :regex
}
end
def new(string, func) when is_function(func, 2) do
%__MODULE__{
string: string,
function: func,
type: :string
}
end
# All stored funcs must be arity two
def new(match, func) when is_function(func, 1) do
wrapped_func = fn(state, _extra) ->
func.(state)
end
new(match, wrapped_func)
end
# All stored funcs must be arity two
def new(match, func) when is_function(func, 0) do
wrapped_func = fn(state, _extra) ->
func.()
{:ok, state}
end
new(match, wrapped_func)
end
def type(%__MODULE__{type: type}) do
type
end
@doc ~S"""
Indicates if the step function matches the given string.
String step functions match if the whole string matches.
Regex steps pass the call to Regex.match?
## Example - String step functions
iex> step_function = StepFunction.new("hello midgard", fn -> :hooray end)
iex> StepFunction.match?(step_function, "hello midgard")
true
iex> step_function = StepFunction.new("hello asgard", fn -> :hooray end)
iex> StepFunction.match?(step_function, "hello midgard")
false
## Example - Regex step functions
iex> step_function = StepFunction.new(~r/^hello (.+)$/, fn -> :hooray end)
iex> StepFunction.match?(step_function, "hello midgard")
true
iex> step_function = StepFunction.new(~r/^hello ([a-z]+)$/, fn -> :hooray end)
iex> StepFunction.match?(step_function, "hello midgard 9")
false
"""
def match?(%__MODULE__{type: :string} = data, string) do
string == data.string
end
def match?(%__MODULE__{type: :regex} = data, string) do
Regex.match?(data.regex, string)
end
def call(%__MODULE__{type: :string, function: func}, step, state) do
args = [state, {:table_data, step.table_data}]
apply(func, args)
end
def call(%__MODULE__{type: :regex, function: func, regex: regex}, step, state) do
key_matches = RegexExtension.atom_keyed_named_captures(
regex,
step.text
)
extra = Map.new
|> Map.merge(key_matches)
|> Map.put(:table_data, step.table_data)
|> Map.put(:doc_string, step.doc_string)
apply(func, [state, extra])
end
end
|
lib/white_bread/context/step_function.ex
| 0.74512
| 0.464719
|
step_function.ex
|
starcoder
|
defmodule ExCO2Mini.Reader do
require Logger
use GenServer
alias ExCO2Mini.Decoder
@moduledoc """
Reads data packets from the USB CO₂ sensor, decodes them, and sends events
to the subscribed process(es).
Due to the `ioctl` calls required, this module will open a `Port` to a tiny
C wrapper, rather than reading the device directly.
"""
defmodule State do
@moduledoc false
@enforce_keys [:port, :log_name]
defstruct(
port: nil,
log_name: nil,
subscribers: MapSet.new(),
send_from: nil
)
end
@doc """
Starts reading from the USB CO₂ sensor device.
`opts` is a keyword list. It accepts all of the options that `GenServer.start_link/3` does, as well as the following:
* `opts[:device]` **(required)** — The path to the CO₂Mini device, e.g. `/dev/hidraw0` or a symlink to the device.
* `opts[:subscribers]` — A list of initial subscribers (default: none).
* This can be used to save a call to `subscribe/2`, and/or to ensure correct data routing in the case of a supervisor restart.
* `opts[:send_from_name]` — If true, then `opts[:name]` must be included as well. Messages sent by this reader will use that name instead of the PID.
Returns `{:ok, pid}` once the reader (and wrapper process) has been successfully started.
"""
def start_link(opts) do
device = Keyword.fetch!(opts, :device)
subscribers = Keyword.get(opts, :subscribers, [])
send_from = if Keyword.get(opts, :send_from_name), do: Keyword.fetch!(opts, :name)
log_name =
Keyword.get(opts, :name, __MODULE__)
|> Atom.to_string()
|> String.replace("Elixir.", "")
GenServer.start_link(__MODULE__, {device, subscribers, send_from, log_name}, opts)
end
@doc """
Starts sending data values to a target process.
`target` can be a PID or a registered name (atom or module).
A given name or PID can only be subscribed once. Subsequent calls to this
function will be effectively ignored, and `target` will still only receive
one message per data packet. However, if a process may receive multiple
messages per packet if subscribed under both its PID and name.
"""
def subscribe(reader, target \\ self()) do
GenServer.call(reader, {:subscribe, target})
end
@impl true
def init({device, subscribers, send_from, log_name}) do
Process.flag(:trap_exit, true)
port = open_port(device)
state = %State{
port: port,
subscribers: MapSet.new(subscribers),
send_from: send_from || self(),
log_name: log_name
}
Logger.info("#{state.log_name} started.")
{:ok, state}
end
@impl true
def handle_call({:subscribe, pid}, _from, state) do
state = %State{state | subscribers: MapSet.put(state.subscribers, pid)}
{:reply, :ok, state}
end
@impl true
def handle_info({port, {:data, bytes}}, %State{port: port} = state) do
{_key, _value} = data = Decoder.decode(bytes)
Enum.each(state.subscribers, fn pid ->
send(pid, {state.send_from, data})
end)
{:noreply, state}
end
@impl true
def handle_info({:EXIT, port, reason}, %State{port: port} = state) do
err = "#{state.log_name} port has died: #{inspect(reason)}"
Logger.error(err)
{:stop, err, state}
end
# Used for testing.
defp open_port(:dummy = dev), do: dev
defp open_port(device) do
args = decoder_key() ++ [device]
Port.open({:spawn_executable, reader_executable()}, [:binary, {:args, args}])
end
defp reader_executable do
:code.priv_dir(:ex_co2_mini)
|> Path.join("reader")
end
defp decoder_key do
Decoder.key()
|> Enum.map(&Integer.to_string/1)
end
end
|
lib/ex_co2_mini/reader.ex
| 0.784855
| 0.612628
|
reader.ex
|
starcoder
|
defmodule Tensorex.Analyzer do
@moduledoc """
Functions for tensor (or matrix) analysis.
Matrices are represented as 2-rank tensors.
"""
import Tensorex
import Tensorex.Operator
@doc """
Performs the householder conversion.
Returns a tuple of the converted vecter and the reflection matrix (`P`). The dot product of the
reflection matrix and the given vector (`V`) (`P·V`) results to the converted vector.
iex> Tensorex.Analyzer.householder(Tensorex.from_list([2, 3.5, -1.6, 8.2]))
{
%Tensorex{data: %{[0] => -9.276313923105448}, shape: [4]},
%Tensorex{data: %{[0, 0] => -0.21560288025811625, [0, 1] => -0.3773050404517033 , [0, 2] => 0.172482304206493 , [0, 3] => -0.8839718090582762 ,
[1, 0] => -0.3773050404517033 , [1, 1] => 0.8828901314218394 , [1, 2] => 0.05353593992144486, [1, 3] => -0.27437169209740486,
[2, 0] => 0.172482304206493 , [2, 1] => 0.05353593992144486, [2, 2] => 0.9755264274644824 , [2, 3] => 0.12542705924452796,
[3, 0] => -0.8839718090582762 , [3, 1] => -0.27437169209740486, [3, 2] => 0.12542705924452796, [3, 3] => 0.3571863213717944 }, shape: [4, 4]}
}
iex> Tensorex.Analyzer.householder(Tensorex.from_list([3.8, 0.0, 0.0, 0.0, 0.0]))
{
%Tensorex{data: %{[0] => 3.8}, shape: [5]},
%Tensorex{data: %{[0, 0] => 1,
[1, 1] => 1,
[2, 2] => 1,
[3, 3] => 1,
[4, 4] => 1}, shape: [5, 5]}
}
"""
@spec householder(Tensorex.t()) :: {Tensorex.t(), Tensorex.t()}
def householder(%Tensorex{data: %{[0] => _} = store, shape: [dimension]} = vector)
when map_size(store) === 1 do
{vector, kronecker_delta(dimension)}
end
def householder(%Tensorex{shape: [dimension]} = vector) do
dot = self_dot(vector)
norm = if vector[[0]] < 0, do: :math.sqrt(dot), else: -:math.sqrt(dot)
normal_vector =
update_in(vector[[0]], &(&1 - norm))
|> divide(dot * :math.sqrt(2 * abs(vector[[0]] - norm)))
reflector =
kronecker_delta(dimension)
|> subtract(
normal_vector
|> multiply(normal_vector)
|> divide(self_dot(normal_vector))
|> multiply(2)
)
{%{vector | data: %{[0] => norm}}, reflector}
end
@spec self_dot(Tensorex.t()) :: number
defp self_dot(%Tensorex{data: store, shape: [_]}) do
store |> Stream.map(fn {_, value} -> value * value end) |> Enum.sum()
end
@doc """
Bidiagonalizes a matrix.
Returns a 3-element tuple containing the left-side orthogonal matrix (`U`), the bidiagonalized
matrix (`A`) and the right-side orthogonal matrix (`V`). The dot product of them (`U·A·V`)
results to the given matrix.
iex> Tensorex.Analyzer.bidiagonalize(Tensorex.from_list([[1, 3],
...> [2, 4]]))
{
%Tensorex{data: %{[0, 0] => -0.4472135954999581, [0, 1] => -0.8944271909999159,
[1, 0] => -0.8944271909999159, [1, 1] => 0.447213595499958 }, shape: [2, 2]},
%Tensorex{data: %{[0, 0] => -2.23606797749979 , [0, 1] => -4.919349550499538 ,
[1, 1] => -0.8944271909999157}, shape: [2, 2]},
%Tensorex{data: %{[0, 0] => 1,
[1, 1] => 1 }, shape: [2, 2]}
}
iex> Tensorex.Analyzer.bidiagonalize(Tensorex.from_list([[1, 3],
...> [2, 4],
...> [8, 7]]))
{
%Tensorex{data: %{[0, 0] => -0.12038585308576932, [0, 1] => -0.6785172735171086, [0, 2] => -0.7246527140056269 ,
[1, 0] => -0.24077170617153842, [1, 1] => -0.6882103774244954, [1, 2] => 0.6843942298942036 ,
[2, 0] => -0.9630868246861537 , [2, 1] => 0.2568672535457624, [2, 2] => -0.08051696822284758}, shape: [3, 3]},
%Tensorex{data: %{[0, 0] => -8.306623862918075 , [0, 1] => -8.065852156746537 ,
[1, 1] => -2.9903225554289703}, shape: [3, 2]},
%Tensorex{data: %{[0, 0] => 1 ,
[1, 1] => 1 }, shape: [2, 2]}
}
iex> Tensorex.Analyzer.bidiagonalize(Tensorex.from_list([[1, 3, 4],
...> [2, 4, 7],
...> [6, 8, 9]]))
{
%Tensorex{data: %{[0, 0] => -0.1561737618886061 , [0, 1] => -0.5866584511916395, [0, 2] => -0.7946330082138469 ,
[1, 0] => -0.31234752377721214, [1, 1] => -0.7338871586541569, [1, 2] => 0.6031986925986929 ,
[2, 0] => -0.9370425713316364 , [2, 1] => 0.3424054614166589, [2, 2] => -0.06862739616392308}, shape: [3, 3]},
%Tensorex{data: %{[0, 0] => -6.4031242374328485 , [0, 1] => 14.537587950366257 ,
[1, 1] => 4.644937425077393 , [1, 2] => -1.277069945154351 ,
[2, 2] => 0.6724472155230544 }, shape: [3, 3]},
%Tensorex{data: %{[0, 0] => 1 ,
[1, 1] => -0.6338226109370237, [1, 2] => -0.7734784404655207 ,
[2, 1] => -0.7734784404655207, [2, 2] => 0.633822610937024 }, shape: [3, 3]}
}
iex> Tensorex.Analyzer.bidiagonalize(Tensorex.from_list([[2, -4, 8],
...> [3, 5, 6],
...> [1, 9, 11],
...> [7, 12, 13]]))
{
%Tensorex{data: %{[0, 0] => -0.25197631533948517, [0, 1] => 0.11018150006547239 , [0, 2] => -0.9605551708365737 , [0, 3] => 0.041252119101685414,
[1, 0] => -0.37796447300922725, [1, 1] => 0.020828566102646506, [1, 2] => 0.06187677855532897, [1, 3] => -0.9235151824699903 ,
[2, 0] => -0.12598815766974245, [2, 1] => -0.9885657272873108 , [2, 2] => -0.0793161285954799 , [2, 3] => 0.0239528433493658 ,
[3, 0] => -0.881917103688197 , [3, 1] => 0.1008167184069181 , [3, 2] => 0.2592565906575201 , [3, 3] => 0.3805840665510335 }, shape: [4, 4]},
%Tensorex{data: %{[0, 0] => -7.937253933193772 , [0, 1] => 21.267756353632144 ,
[1, 1] => 11.647368945587411 , [1, 2] => 1.3952492335365543 ,
[2, 2] => -8.128629398872933 }, shape: [4, 3]},
%Tensorex{data: %{[0, 0] => 1 ,
[1, 1] => -0.5923904504775179 , [1, 2] => -0.8056510126494246 ,
[2, 1] => -0.8056510126494246 , [2, 2] => 0.592390450477518 }, shape: [3, 3]}
}
iex> Tensorex.Analyzer.bidiagonalize(Tensorex.from_list([[1, 3, 5],
...> [2, 4, 6]]))
{
%Tensorex{data: %{[0, 0] => -0.4472135954999581, [0, 1] => -0.8944271909999159,
[1, 0] => -0.8944271909999159, [1, 1] => 0.447213595499958 }, shape: [2, 2]},
%Tensorex{data: %{[0, 0] => -2.23606797749979 , [0, 1] => 9.055385138137419 ,
[1, 1] => 1.9877674693472376, [1, 2] => -0.22086305214969304}, shape: [2, 3]},
%Tensorex{data: %{[0, 0] => 1 ,
[1, 1] => -0.5432512781572743, [1, 2] => -0.8395701571521512 ,
[2, 1] => -0.8395701571521512, [2, 2] => 0.5432512781572743 }, shape: [3, 3]}
}
iex> Tensorex.Analyzer.bidiagonalize(Tensorex.from_list([[1, 3, 5]]))
{
%Tensorex{data: %{[0, 0] => 1}, shape: [1, 1]},
%Tensorex{data: %{[0, 0] => 1, [0, 1] => -5.830951894845301 }, shape: [1, 3]},
%Tensorex{data: %{[0, 0] => 1,
[1, 1] => -0.5144957554275265, [1, 2] => -0.8574929257125442,
[2, 1] => -0.8574929257125442, [2, 2] => 0.5144957554275265}, shape: [3, 3]}
}
"""
@spec bidiagonalize(Tensorex.t()) :: {Tensorex.t(), Tensorex.t(), Tensorex.t()}
def bidiagonalize(%Tensorex{shape: [1, columns]} = matrix) when columns in 1..2 do
{kronecker_delta(1), matrix, kronecker_delta(columns)}
end
def bidiagonalize(%Tensorex{shape: [1, columns]} = matrix) do
{householdered, reflector} = householder(matrix[[0, 1..-1]])
right = put_in(kronecker_delta(columns)[[1..-1, 1..-1]], reflector)
{kronecker_delta(1), put_in(matrix[[0, 1..-1]], householdered), right}
end
def bidiagonalize(%Tensorex{shape: [_, 1]} = matrix) do
{householdered, reflector} = householder(matrix[[0..-1, 0]])
{reflector, put_in(matrix[[0..-1, 0]], householdered), kronecker_delta(1)}
end
def bidiagonalize(%Tensorex{shape: [rows, 2]} = matrix) do
{householdered, reflector} = householder(matrix[[0..-1, 0]])
sub_columns = reflector |> multiply(matrix[[0..-1, 1..1]], [{1, 0}])
{sub_left, sub_bidiagonalized, _} = sub_columns[[1..-1]] |> bidiagonalize()
left =
reflector |> multiply(put_in(kronecker_delta(rows)[[1..-1, 1..-1]], sub_left), [{1, 0}])
bidiagonalized =
matrix
|> put_in([[0..-1, 0]], householdered)
|> put_in([[0, 1]], sub_columns[[0, 0]])
|> put_in([[1..-1, 1]], sub_bidiagonalized[[0..-1, 0]])
{left, bidiagonalized, kronecker_delta(2)}
end
def bidiagonalize(%Tensorex{shape: [rows, columns]} = matrix) do
{householdered_column, column_reflector} = householder(matrix[[0..-1, 0]])
sub_columns = column_reflector |> multiply(matrix[[0..-1, 1..-1]], [{1, 0}])
{householdered_row, row_reflector} = householder(sub_columns[[0]])
{sub_left, sub_bidiagonalized, sub_right} =
sub_columns[[1..-1]] |> multiply(row_reflector, [{1, 0}]) |> bidiagonalize()
left =
column_reflector
|> multiply(put_in(kronecker_delta(rows)[[1..-1, 1..-1]], sub_left), [{1, 0}])
bidiagonalized =
matrix
|> put_in([[0..-1, 0]], householdered_column)
|> put_in([[0, 1..-1]], householdered_row)
|> put_in([[1..-1, 1..-1]], sub_bidiagonalized)
right =
kronecker_delta(columns)
|> put_in([[1..-1, 1..-1]], sub_right |> multiply(row_reflector, [{1, 1}]))
{left, bidiagonalized, right}
end
@doc """
Diagonalizes a square matrix.
Returns a 2-element tuple containing the diagonalized matrix (`D`) and the square matrix (`P`)
composed of eigen vectors of the given matrix. The dot product of (`P·D·P^-1`) results to the
given matrix.
iex> Tensorex.Analyzer.eigen_decomposition(Tensorex.from_list([[8, 1],
...> [4, 5]]))
{
%Tensorex{data: %{[0, 0] => 9.0 ,
[1, 1] => 4.0 }, shape: [2, 2]},
%Tensorex{data: %{[0, 0] => 0.7071067811865475, [0, 1] => 0.24253562503633297,
[1, 0] => 0.7071067811865475, [1, 1] => -0.9701425001453319 }, shape: [2, 2]}
}
iex> Tensorex.Analyzer.eigen_decomposition(Tensorex.from_list([[2, 0],
...> [0, 3]]))
{
%Tensorex{data: %{[0, 0] => 2,
[1, 1] => 3}, shape: [2, 2]},
%Tensorex{data: %{[0, 0] => 1,
[1, 1] => 1}, shape: [2, 2]}
}
iex> Tensorex.Analyzer.eigen_decomposition(Tensorex.from_list([[2, 0],
...> [4, 3]]))
{
%Tensorex{data: %{[0, 0] => 3.0,
[1, 1] => 2.0 }, shape: [2, 2]},
%Tensorex{data: %{ [0, 1] => -0.24253562503633297,
[1, 0] => 1 , [1, 1] => 0.9701425001453319 }, shape: [2, 2]}
}
iex> Tensorex.Analyzer.eigen_decomposition(Tensorex.from_list([[ 1, 8, 4],
...> [-3, 2, -6],
...> [ 8, -9, 11]]))
{
%Tensorex{data: %{[0, 0] => 15.303170410844274 ,
[1, 1] => -3.3868958657320674,
[2, 2] => 2.0837254548877966 }, shape: [3, 3]},
%Tensorex{data: %{[0, 0] => 0.022124491408649645, [0, 1] => 0.8133941080334768, [0, 2] => 0.8433114989223975 ,
[1, 0] => -0.4151790326348706 , [1, 1] => -0.1674957147614615, [1, 2] => 0.32735161385148664,
[2, 0] => 0.909470657987536 , [2, 1] => -0.5570773829127975, [2, 2] => -0.4262236932575271 }, shape: [3, 3]}
}
iex> Tensorex.Analyzer.eigen_decomposition(Tensorex.from_list([[ 1, 8, 4, -8, 6],
...> [ 8, 2, -6, 15, 4],
...> [ 4, -6, 11, 7, 9],
...> [-8, 15, 7, 3, 2],
...> [ 6, 4, 9, 2, 6]]))
{
%Tensorex{data: %{[0, 0] => 22.48141136723747 ,
[1, 1] => -21.990125946333524 ,
[2, 2] => 15.981743258501801 ,
[3, 3] => 9.870440666608177 ,
[4, 4] => -3.3434693460139164 }, shape: [5, 5]},
%Tensorex{data: %{[0, 0] => 0.22485471488273154, [0, 1] => 0.4533959138705312 , [0, 2] => 0.15613132580124428, [0, 3] => 0.6959350745397415 , [0, 4] => 0.48494317564991224 ,
[1, 0] => 0.3411622703810978 , [1, 1] => -0.5996180449764692 , [1, 2] => -0.6337681498982659 , [1, 3] => 0.31120102182464826, [1, 4] => 0.15986982703698566 ,
[2, 0] => 0.5907934013280463 , [2, 1] => -0.3000622953270929 , [2, 2] => 0.5126463927515507 , [2, 3] => -0.38215013214304583, [2, 4] => 0.38997529200213504 ,
[3, 0] => 0.4381110287686999 , [3, 1] => 0.5856554959326528 , [3, 2] => -0.5216463205486719 , [3, 3] => -0.4370125068120504 , [3, 4] => 0.044402158764973956,
[4, 0] => 0.5404355150079331 , [4, 1] => 0.043134724190818236, [4, 2] => 0.19758475025904781, [4, 3] => 0.2860238476658191 , [4, 4] => -0.7649963886964449 }, shape: [5, 5]}
}
"""
@spec eigen_decomposition(Tensorex.t()) :: {Tensorex.t(), Tensorex.t()}
def eigen_decomposition(%Tensorex{data: %{[0, 1] => divider}, shape: [2, 2]} = matrix) do
a = (matrix[[0, 0]] + matrix[[1, 1]]) * 0.5
b = :math.sqrt(a * a + divider * matrix[[1, 0]] - matrix[[0, 0]] * matrix[[1, 1]])
{diagonal, square} =
[a + b, a - b]
|> Stream.with_index()
|> Enum.map_reduce(%{}, fn {lambda, index}, acc ->
x = (lambda - matrix[[0, 0]]) / divider
norm = :math.sqrt(1 + x * x)
store = acc |> Map.put([0, index], 1 / norm) |> Map.put([1, index], x / norm)
{{[index, index], lambda}, store}
end)
{%{matrix | data: Enum.into(diagonal, %{})}, %{matrix | data: Enum.into(square, %{})}}
end
def eigen_decomposition(%Tensorex{data: %{[1, 0] => c}, shape: [2, 2]} = matrix) do
a = (matrix[[0, 0]] + matrix[[1, 1]]) * 0.5
b = :math.sqrt(a * a - matrix[[0, 0]] * matrix[[1, 1]])
{diagonal, square} =
[a + b, a - b]
|> Stream.with_index()
|> Enum.map_reduce(%{}, fn {lambda, index}, acc ->
store =
if lambda == matrix[[0, 0]] do
x = (lambda - matrix[[1, 1]]) / c
norm = :math.sqrt(x * x + 1)
acc |> Map.put([0, index], x / norm) |> Map.put([1, index], 1 / norm)
else
acc |> Map.put([1, index], 1)
end
{{[index, index], lambda}, store}
end)
{%{matrix | data: Enum.into(diagonal, %{})}, %{matrix | data: square}}
end
def eigen_decomposition(%Tensorex{shape: [2, 2]} = matrix), do: {matrix, kronecker_delta(2)}
def eigen_decomposition(%Tensorex{shape: [3, 3]} = matrix) do
a = -matrix[[0, 0]] - matrix[[1, 1]] - matrix[[2, 2]]
b =
matrix[[0, 0]] * matrix[[1, 1]] + matrix[[0, 0]] * matrix[[2, 2]] +
matrix[[1, 1]] * matrix[[2, 2]] - matrix[[0, 1]] * matrix[[1, 0]] -
matrix[[0, 2]] * matrix[[2, 0]] - matrix[[1, 2]] * matrix[[2, 1]]
c =
matrix[[0, 0]] * matrix[[1, 2]] * matrix[[2, 1]] +
matrix[[1, 1]] * matrix[[0, 2]] * matrix[[2, 0]] +
matrix[[2, 2]] * matrix[[0, 1]] * matrix[[1, 0]] -
matrix[[0, 0]] * matrix[[1, 1]] * matrix[[2, 2]] -
matrix[[0, 1]] * matrix[[1, 2]] * matrix[[2, 0]] -
matrix[[0, 2]] * matrix[[1, 0]] * matrix[[2, 1]]
p = (3 * b - a * a) / 9
q = (27 * c + 2 * a * a * a - 9 * a * b) / 54
d = :math.sqrt(-q * q - p * p * p)
r = :math.sqrt(d * d + q * q)
h = :math.acos(-q / r) / 3
cbrt = :math.pow(r, 1 / 3)
cos = :math.cos(h)
sin = :math.sqrt(3) * :math.sin(h)
{diagonal, square} =
[cbrt * 2 * cos - a / 3, -cbrt * (sin + cos) - a / 3, cbrt * (sin - cos) - a / 3]
|> Stream.with_index()
|> Enum.map_reduce(%{}, fn {lambda, index}, acc ->
y =
(matrix[[0, 2]] * matrix[[1, 0]] / matrix[[1, 2]] - matrix[[0, 0]] + lambda) /
(matrix[[0, 1]] - matrix[[0, 2]] * (matrix[[1, 1]] - lambda) / matrix[[1, 2]])
z =
(matrix[[0, 1]] * matrix[[1, 0]] / (matrix[[1, 1]] - lambda) - matrix[[0, 0]] + lambda) /
(matrix[[0, 2]] - matrix[[0, 1]] * matrix[[1, 2]] / (matrix[[1, 1]] - lambda))
norm = :math.sqrt(1 + y * y + z * z)
store =
acc
|> Map.put([0, index], 1 / norm)
|> Map.put([1, index], y / norm)
|> Map.put([2, index], z / norm)
{{[index, index], lambda}, store}
end)
{%{matrix | data: Enum.into(diagonal, %{})}, %{matrix | data: square}}
end
def eigen_decomposition(%Tensorex{shape: [dimension, dimension]} = matrix) do
{left, %{data: eigens} = diagonalized, right} = singular_value_decomposition(matrix)
signed =
Enum.into(eigens, %{}, fn {[index | _] = indices, value} = element ->
if left[[0, index]] * right[[0, index]] < 0, do: {indices, -value}, else: element
end)
{%{diagonalized | data: signed}, right}
end
@doc """
Finds the singular values and the singular vectors of the given matrix.
Returns a 3-element tuple that contains the left singular vectors (`U`), the diagonal containing
singular values (`S`) and the right singular vectors (`V`). The dot product of them (`U·S·V^T`)
results to the given matrix.
iex> Tensorex.Analyzer.singular_value_decomposition(Tensorex.from_list([[1, 2, 3],
...> [2, 3, 5],
...> [3, 8, 9],
...> [4, 5, 6]]))
{%Tensorex{data: %{[0, 0] => 0.2226615344045355 , [0, 1] => -0.05855885924201132, [0, 2] => -0.3843955833877411 ,
[1, 0] => 0.36536351530221217, [1, 1] => 0.15902949910885741, [1, 2] => -0.8106021820799661 ,
[2, 0] => 0.7400203406817314 , [2, 1] => -0.6059441250735513 , [2, 2] => 0.2714639225744238 ,
[3, 0] => 0.518942422779177 , [3, 1] => 0.7772465475679571 , [3, 2] => 0.3485275837286095 }, shape: [4, 3]},
%Tensorex{data: %{[0, 0] => 16.709361526261223 ,
[1, 1] => 1.6718956724884724 ,
[2, 2] => 1.0010006218857228 }, shape: [3, 3]},
%Tensorex{data: %{[0, 0] => 0.3141484053667126 , [0, 1] => 0.9274824856259514 , [0, 2] => 0.20269932970451207,
[1, 0] => 0.6018355900828712 , [1, 1] => -0.3596812252112713 , [1, 2] => 0.7130381046901498 ,
[2, 0] => 0.73423749694166 , [2, 1] => -0.10200811285199618, [2, 2] => -0.6711852523686888 }, shape: [3, 3]}}
"""
@spec singular_value_decomposition(Tensorex.t()) :: {Tensorex.t(), Tensorex.t(), Tensorex.t()}
def singular_value_decomposition(%Tensorex{shape: [_, columns]} = matrix) do
{_, bidiagonalized, right} = bidiagonalize(matrix)
tridiagonalized = bidiagonalized |> multiply(bidiagonalized, [{0, 0}])
eigen_values =
tridiagonalized
|> linearize()
|> bisection()
|> Stream.reject(&(&1 == 0))
|> Enum.to_list()
rank = length(eigen_values)
eigen_vectors =
eigen_values
|> Stream.map(fn eigen_value ->
coefficient_store =
Enum.into(tridiagonalized.data, %{}, fn
{[index, index] = indices, value} -> {indices, value - eigen_value}
element -> element
end)
coefficient = %{tridiagonalized | data: coefficient_store}
initial = put_in(zero([columns])[[0]], 1)
inverse_iteration(coefficient, initial)
end)
|> Stream.with_index()
|> Enum.reduce(zero([columns, rank]), fn {vector, index}, acc ->
put_in(acc[[0..-1, index]], vector)
end)
store =
eigen_values
|> Stream.map(&:math.sqrt/1)
|> Stream.with_index()
|> Enum.into(%{}, fn {value, index} -> {[index, index], value} end)
right_singular_vectors = right |> multiply(eigen_vectors, [{0, 0}])
{decomposited_vectors, decomposited_singular_values} =
matrix
|> multiply(right_singular_vectors, [{1, 0}])
|> qr_decomposition()
left_singular_vectors =
Enum.reduce(0..(rank - 1), decomposited_vectors, fn index, acc ->
if decomposited_singular_values[[index, index]] < 0 do
update_in(acc[[0..-1, index]], &negate/1)
else
acc
end
end)
{left_singular_vectors, %Tensorex{data: store, shape: [rank, rank]}, right_singular_vectors}
end
@spec inverse_iteration(Tensorex.t(), Tensorex.t()) :: Tensorex.t()
defp inverse_iteration(matrix, initial_vector) do
inverse_iteration(lu_decomposition(matrix), initial_vector, initial_vector)
end
defp inverse_iteration({pivot, lower, upper} = decomposited, initial_vector, prev_difference) do
result =
initial_vector
|> multiply(pivot, [{0, 0}])
|> substitute_forward(lower)
|> substitute_backward(upper)
|> normalize_vector()
|> arrange_vector()
case result |> subtract(initial_vector) |> arrange_vector() do
^prev_difference -> result
difference -> inverse_iteration(decomposited, result, difference)
end
end
@spec normalize_vector(Tensorex.t()) :: Tensorex.t()
defp normalize_vector(vector), do: vector |> divide(:math.sqrt(self_dot(vector)))
@spec arrange_vector(Tensorex.t()) :: Tensorex.t()
defp arrange_vector(%{data: store} = vector) when map_size(store) === 0, do: vector
defp arrange_vector(%{data: store} = vector) do
if elem(Enum.min(store), 1) < 0, do: negate(vector), else: vector
end
@doc """
Solves a system of linear equations.
Computes the solution vector (`X`) of the equation (`A·X = B`) where `A` is a matrix and `B` is
a matrix or a vector.
iex> Tensorex.Analyzer.solve(
...> Tensorex.from_list([[ 3, 2, 1],
...> [ 4, 7, 6],
...> [11, 8, 9]]),
...> Tensorex.from_list([6, 12, 18])
...> )
%Tensorex{data: %{[0] => 1.0000000000000002 ,
[1] => 2.0000000000000004 ,
[2] => -1.0000000000000007}, shape: [3]}
iex> Tensorex.Analyzer.solve(
...> Tensorex.from_list([[5]]),
...> Tensorex.from_list([10])
...> )
%Tensorex{data: %{[0] => 2.0}, shape: [1]}
"""
@spec solve(Tensorex.t(), Tensorex.t()) :: Tensorex.t()
def solve(
%Tensorex{shape: [dimension, dimension]} = coefficient,
%Tensorex{shape: [dimension, columns]} = constant
) do
{pivot, lower, upper} = lu_decomposition(coefficient)
Enum.reduce(0..(columns - 1), constant, fn index, acc ->
update_in(acc[[0..-1, index]], fn vector ->
vector
|> multiply(pivot, [{0, 0}])
|> substitute_forward(lower)
|> substitute_backward(upper)
end)
end)
end
def solve(
%Tensorex{shape: [dimension, dimension]} = coefficient,
%Tensorex{data: store, shape: [dimension]}
) do
new_store = Enum.into(store, %{}, fn {[index], value} -> {[index, 0], value} end)
solve(coefficient, %Tensorex{data: new_store, shape: [dimension, 1]})[[0..-1, 0]]
end
@spec substitute_forward(Tensorex.t(), Tensorex.t()) :: Tensorex.t()
defp substitute_forward(coefficient, %{shape: [1, 1]}), do: coefficient
defp substitute_forward(coefficient, %{shape: [dimension, dimension]} = lower) do
substitute_forward(coefficient, lower[[1..-1]])
end
defp substitute_forward(coefficient, %{shape: [1 | _]} = lower) do
update_in(coefficient[[-1]], fn x ->
x - (lower[[0, 0..-2]] |> multiply(coefficient[[0..-2]], [{0, 0}]))
end)
end
defp substitute_forward(coefficient, %{shape: [rows | _]} = lower) do
coefficient
|> update_in([[-rows]], fn x ->
x - (lower[[0, 0..(-rows - 1)]] |> multiply(coefficient[[0..(-rows - 1)]], [{0, 0}]))
end)
|> substitute_forward(lower[[1..-1]])
end
@spec substitute_backward(Tensorex.t(), Tensorex.t()) :: Tensorex.t()
defp substitute_backward(coefficient, %{shape: [1, 1]} = upper) do
update_in(coefficient[[-1]], fn x -> x / upper[[0, 0]] end)
end
defp substitute_backward(coefficient, %{shape: [dimension, dimension]} = upper) do
update_in(coefficient[[-1]], fn x -> x / upper[[-1, -1]] end)
|> substitute_backward(upper[[0..-2]])
end
defp substitute_backward(coefficient, %{shape: [1 | _]} = upper) do
update_in(coefficient[[0]], fn x ->
(x - (upper[[0, 1..-1]] |> multiply(coefficient[[1..-1]], [{0, 0}]))) / upper[[0, 0]]
end)
end
defp substitute_backward(coefficient, %{shape: [rows | _]} = upper) do
coefficient
|> update_in([[rows - 1]], fn x ->
(x - (upper[[-1, rows..-1]] |> multiply(coefficient[[rows..-1]], [{0, 0}]))) /
upper[[rows - 1, rows - 1]]
end)
|> substitute_backward(upper[[0..-2]])
end
@spec linearize(Tensorex.t()) :: [{number, number}, ...]
defp linearize(%{shape: [dimension | _]} = tridiagonalized) do
[{tridiagonalized[[0, 0]], 0}]
|> Enum.concat(
Stream.map(1..(dimension - 1), fn index ->
{tridiagonalized[[index, index]], tridiagonalized[[index - 1, index]]}
end)
)
end
@spec bisection([{number, number}, ...]) :: Enum.t()
defp bisection(linearized) do
{an, bm} = List.last(linearized)
radius =
Stream.zip(linearized, Stream.drop(linearized, 1))
|> Stream.map(fn {{ak, bj}, {_, bk}} -> abs(ak) + abs(bj) + abs(bk) end)
|> Stream.concat([abs(an) + abs(bm)])
|> Enum.max()
narrow_down(linearized, -radius, radius)
end
@spec narrow_down([{number, number}, ...], number, number) :: Enum.t()
defp narrow_down(linearized, a, b) do
case (a + b) * 0.5 do
c when a < c and c < b ->
nc = n(strum(linearized, c))
Stream.concat(
if(nc - n(strum(linearized, b)) < 1, do: [], else: narrow_down(linearized, c, b)),
if(n(strum(linearized, a)) - nc < 1, do: [], else: narrow_down(linearized, a, c))
)
c ->
[c]
end
end
@spec strum([{number, number}, ...], number) :: Enum.t()
defp strum([{a0, _} | linearized], lambda) do
[{1, lambda - a0} | linearized]
|> Stream.scan(fn {ak, bj}, {pj, pk} ->
{pk, (lambda - ak) * pk - bj * bj * pj}
end)
|> Stream.map(&elem(&1, 1))
end
@spec n(Enum.t()) :: non_neg_integer
defp n(strum) do
Enum.reduce(strum, {0, 1}, fn
y, acc when y == 0 -> acc
y, {count, prev_y} when y * prev_y < 0 -> {count + 1, y}
y, {count, _} -> {count, y}
end)
|> elem(0)
end
@doc """
Decomposites a square matrix into a pair of triangular matrices.
Returns a 3-element tuple containing a row pivot matrix (`P`), a lower triangular matrix (`L`)
and an upper triangular matrix (`U`). The dot product of them (`P·L·U`) results to the given
matrix.
iex> Tensorex.Analyzer.lu_decomposition(Tensorex.from_list([[10, 13, 15],
...> [ 5, 7, 9],
...> [ 9, 11, 13]]))
{
%Tensorex{data: %{[0, 0] => 1 ,
[1, 2] => 1 ,
[2, 1] => 1 }, shape: [3, 3]},
%Tensorex{data: %{[0, 0] => 1 ,
[1, 0] => 0.9, [1, 1] => 1 ,
[2, 0] => 0.5, [2, 1] => -0.7142857142857132, [2, 2] => 1 }, shape: [3, 3]},
%Tensorex{data: %{[0, 0] => 10 , [0, 1] => 13 , [0, 2] => 15 ,
[1, 1] => -0.7000000000000011, [1, 2] => -0.5 ,
[2, 2] => 1.1428571428571435}, shape: [3, 3]}
}
iex> Tensorex.Analyzer.lu_decomposition(Tensorex.from_list([[ 0, 13, 15],
...> [ 5, 7, 9],
...> [ 9, 11, 13]]))
{
%Tensorex{data: %{ [0, 1] => 1 ,
[1, 2] => 1 ,
[2, 0] => 1 }, shape: [3, 3]},
%Tensorex{data: %{[0, 0] => 1 ,
[1, 1] => 1 ,
[2, 0] => 0.5555555555555556, [2, 1] => 0.06837606837606834, [2, 2] => 1 }, shape: [3, 3]},
%Tensorex{data: %{[0, 0] => 9 , [0, 1] => 11 , [0, 2] => 13 ,
[1, 1] => 13 , [1, 2] => 15 ,
[2, 2] => 0.7521367521367526}, shape: [3, 3]}
}
"""
@spec lu_decomposition(Tensorex.t()) :: {Tensorex.t(), Tensorex.t(), Tensorex.t()}
def lu_decomposition(%Tensorex{shape: [1, 1]} = matrix) do
delta = kronecker_delta(1)
{delta, delta, matrix}
end
def lu_decomposition(%Tensorex{shape: [dimension, dimension]} = matrix) do
[pivot_index] =
Enum.max_by(matrix[[0..-1, 0]].data, &abs(elem(&1, 1)), fn -> {[0], nil} end) |> elem(0)
delta = kronecker_delta(dimension)
pivoted = matrix |> pivot_row(pivot_index)
column = pivoted[[1..-1, 0]] |> divide(pivoted[[0, 0]])
sub_tensor = pivoted[[1..-1, 1..-1]] |> subtract(column |> multiply(pivoted[[0, 1..-1]]))
{sub_pivot, sub_lower, sub_upper} = lu_decomposition(sub_tensor)
pivot =
delta
|> pivot_column(pivot_index)
|> multiply(delta |> put_in([[1..-1, 1..-1]], sub_pivot), [{1, 0}])
lower =
delta
|> put_in([[1..-1, 0]], sub_pivot |> multiply(column, [{1, 0}]))
|> put_in([[1..-1, 1..-1]], sub_lower)
upper =
pivoted
|> put_in([[1..-1, 0]], zero([dimension - 1]))
|> put_in([[1..-1, 1..-1]], sub_upper)
{pivot, lower, upper}
end
@spec pivot_row(Tensorex.t(), non_neg_integer) :: Tensorex.t()
defp pivot_row(matrix, 0), do: matrix
defp pivot_row(matrix, index) do
matrix |> put_in([[0]], matrix[[index]]) |> put_in([[index]], matrix[[0]])
end
@spec pivot_column(Tensorex.t(), non_neg_integer) :: Tensorex.t()
defp pivot_column(matrix, 0), do: matrix
defp pivot_column(matrix, index) do
matrix
|> put_in([[0..-1, 0]], matrix[[0..-1, index]])
|> put_in([[0..-1, index]], matrix[[0..-1, 0]])
end
@doc """
Decomposites a matrix into a pair of an orthogonal matrix and an upper triangular matrix.
Returns a 2-element tuple containing an orthogonal matrix (`Q`) and an upper triangular matrix
(`R`). The dot product of them (`Q·R`) results to the given matrix.
iex> Tensorex.Analyzer.qr_decomposition(Tensorex.from_list([[1, 2],
...> [3, 4],
...> [5, 6]]))
{%Tensorex{data: %{[0, 0] => -0.16903085094570347, [0, 1] => 0.89708522714506 ,
[1, 0] => -0.50709255283711 , [1, 1] => 0.27602622373694213,
[2, 0] => -0.8451542547285165 , [2, 1] => -0.34503277967117735}, shape: [3, 2]},
%Tensorex{data: %{[0, 0] => -5.916079783099616 , [0, 1] => -7.437357441610946 ,
[1, 1] => 0.8280786712108249 }, shape: [2, 2]}}
iex> Tensorex.Analyzer.qr_decomposition(Tensorex.from_list([[1, 2, 3],
...> [3, 4, 5]]))
{%Tensorex{data: %{[0, 0] => -0.316227766016838 , [0, 1] => -0.9486832980505137,
[1, 0] => -0.9486832980505137, [1, 1] => 0.3162277660168382}, shape: [2, 2]},
%Tensorex{data: %{[0, 0] => -3.1622776601683795, [0, 1] => -4.42718872423573 , [0, 2] => -5.692099788303082,
[1, 1] => -0.6324555320336744, [1, 2] => -1.26491106406735}, shape: [2, 3]}}
"""
@spec qr_decomposition(Tensorex.t()) :: {Tensorex.t(), Tensorex.t()}
def qr_decomposition(%Tensorex{shape: [1, _]} = matrix) do
{kronecker_delta(1), matrix}
end
def qr_decomposition(%Tensorex{shape: [rows, 1]} = matrix) do
{vector, reflector} = householder(matrix[[0..-1, 0]])
{reflector[[0..(rows - 1), 0..0]], zero([1, 1]) |> put_in([[0, 0]], vector[[0]])}
end
def qr_decomposition(%Tensorex{shape: [rows, columns]} = matrix) do
diagonals = min(rows, columns)
{vector, reflector} = householder(matrix[[0..-1, 0]])
sub_columns = reflector |> multiply(matrix[[0..-1, 1..-1]], [{1, 0}])
{sub_orthogonal, sub_triangular} = qr_decomposition(sub_columns[[1..-1]])
orthogonal =
reflector
|> multiply(
put_in(
%{kronecker_delta(diagonals) | shape: [rows, diagonals]}[[1..-1, 1..-1]],
sub_orthogonal
),
[{1, 0}]
)
triangular =
zero([diagonals, columns])
|> put_in([[0]], put_in(%{vector | shape: [columns]}[[1..-1]], sub_columns[[0]]))
|> put_in([[1..-1, 1..-1]], sub_triangular)
{orthogonal, triangular}
end
end
|
lib/tensorex/analyzer.ex
| 0.839224
| 0.678855
|
analyzer.ex
|
starcoder
|
defmodule SFTPToolkit.Upload do
@moduledoc """
Module containing functions that ease uploading data to the SFTP server.
"""
use Bunch
@default_operation_timeout 5000
@default_chunk_size 32768
@default_remote_mode [:binary, :write, :creat]
@default_local_mode [:binary, :read, :read_ahead]
@doc """
Uploads a single file by reading it in chunks to avoid loading whole
file into memory as `:ssh_sftp.write_file/3` does by default.
## Arguments
Expects the following arguments:
* `sftp_channel_pid` - PID of the already opened SFTP channel,
* `local_path` - local path to the file,
* `remote_path` - remote path to the file on the SFTP server,
* `options` - additional options, see below.
## Options
* `operation_timeout` - SFTP operation timeout (it is a timeout
per each SFTP operation, not total timeout), defaults to 5000 ms,
* `chunk_size` - chunk size in bytes, defaults to 32KB,
* `remote_mode` - mode used while opening the remote file, defaults
to `[:binary, :write, :creat]`, see `:ssh_sftp.open/3` for possible
values,
* `local_mode` - mode used while opening the local file, defaults
to `[:binary, :read, :read_ahead]`, see `File.open/2` for possible
values.
## Return values
On success returns `:ok`.
On error returns `{:error, reason}`, where `reason` might be one
of the following:
* `{:local_open, info}` - the `File.open/2` on the local file failed,
* `{:remote_open, info}` - the `:ssh_sftp.open/4` on the remote file
failed,
* `{:upload, {:read, info}}` - the `IO.binread/2` on the local file
failed,
* `{:upload, {:write, info}}` - the `:ssh_sftp.write/4` on the remote
file failed,
* `{:local_close, info}` - the `File.close/1` on the local file failed,
* `{:remote_close, info}` - the `:ssh_sftp.close/2` on the remote file
failed.
"""
@spec upload_file(pid, Path.t(), Path.t(),
operation_timeout: timeout,
chunk_size: pos_integer,
remote_mode: [:read | :write | :creat | :trunc | :append | :binary],
local_mode: [File.mode()]
) :: :ok | {:error, any}
def upload_file(sftp_channel_pid, local_path, remote_path, options \\ []) do
chunk_size = Keyword.get(options, :chunk_size, @default_chunk_size)
operation_timeout = Keyword.get(options, :operation_timeout, @default_operation_timeout)
remote_mode = Keyword.get(options, :remote_mode, @default_remote_mode)
local_mode = Keyword.get(options, :local_mode, @default_local_mode)
withl local_open: {:ok, local_handle} <- File.open(local_path, local_mode),
remote_open:
{:ok, remote_handle} <-
:ssh_sftp.open(
sftp_channel_pid,
remote_path,
remote_mode,
operation_timeout
),
upload:
:ok <-
do_upload_file(
sftp_channel_pid,
local_handle,
remote_handle,
chunk_size,
operation_timeout
),
remote_close:
:ok <- :ssh_sftp.close(sftp_channel_pid, remote_handle, operation_timeout),
local_close: :ok <- File.close(local_handle) do
:ok
else
local_open: {:error, reason} -> {:error, {:local_open, reason}}
remote_open: {:error, reason} -> {:error, {:remote_open, reason}}
upload: {:error, reason} -> {:error, {:upload, reason}}
remote_close: {:error, reason} -> {:error, {:remote_close, reason}}
local_close: {:error, reason} -> {:error, {:local_close, reason}}
end
end
defp do_upload_file(
sftp_channel_pid,
local_handle,
remote_handle,
chunk_size,
operation_timeout
) do
case IO.binread(local_handle, chunk_size) do
:eof ->
:ok
{:error, reason} ->
{:error, {:read, reason}}
data ->
case :ssh_sftp.write(sftp_channel_pid, remote_handle, data, operation_timeout) do
:ok ->
do_upload_file(
sftp_channel_pid,
local_handle,
remote_handle,
chunk_size,
operation_timeout
)
{:error, reason} ->
{:error, {:write, reason}}
end
end
end
end
|
lib/sftp_toolkit/upload.ex
| 0.77373
| 0.531209
|
upload.ex
|
starcoder
|
defmodule Shellwords do
@moduledoc """
Functions for manipulating strings according to the word parsing rules of the UNIX Bourne shell.
**See:** [Shell & Utilities volume of the IEEE Std 1003.1-2008, 2016 Edition][standard]
[standard]: http://pubs.opengroup.org/onlinepubs/9699919799/utilities/contents.html
"""
@split_pattern ~r/\G\s*(?>([^\s\\\'\"]+)|'([^\']*)'|"((?:[^\"\\]|\\.)*)"|(\\.?)|(\S))(\s|\z)?/m
@doc """
Splits a string into an array of tokens in the same way as the UNIX Bourne shell does.
Returns `{:ok, words}` on success and `{:error, reason}` on failure.
## Examples
```
iex> Shellwords.split("here are \\"two words\\"")
{:ok, ["here", "are", "two words"]}
```
```
iex> Shellwords.split("\\"unmatched quote")
{:error, "Unmatched quote: \\"\\\\\\"unmatched quote\\""}
```
"""
@spec split(String.t()) :: {:ok, [String.t()]} | {:error, String.t()}
def split(text) when is_binary(text) do
words =
@split_pattern
|> Regex.scan(text)
|> Enum.map(&handle_split_captures/1)
if :error in words do
{:error, "Unmatched quote: #{inspect(text)}"}
else
{:ok, words}
end
end
@doc """
Splits a string into an array of tokens in the same way as the UNIX Bourne shell does.
Operates in the same fashion as `split/1` but raises an exception when errors are encountered.
"""
@spec split!(String.t()) :: [String.t()] | no_return
def split!(text) when is_binary(text) do
case split(text) do
{:ok, words} -> words
{:error, reason} -> raise ArgumentError, reason
end
end
defp handle_split_captures(captures) do
{word, sq, dq, esc, garbage} = parse_split_captures(captures)
dq = Regex.replace(~r/\\([$`"\\\n])/, dq, "\\1")
esc = Regex.replace(~r/\\([$`"\\\n])/, esc, "\\1")
cond do
String.length(garbage) > 0 -> :error
String.length(word) > 0 -> word
String.length(sq) > 0 -> sq
String.length(dq) > 0 -> dq
String.length(esc) > 0 -> esc
end
end
defp parse_split_captures([_whole_match, word, sq, dq, esc, garbage, _sep]) do
{word, sq, dq, esc, garbage}
end
defp parse_split_captures([_whole_match, word, sq, dq, esc, garbage]) do
{word, sq, dq, esc, garbage}
end
end
|
lib/shellwords.ex
| 0.895125
| 0.771628
|
shellwords.ex
|
starcoder
|
defmodule LogicSim.Node do
@moduledoc """
A node is the basic building block of LogicSim. A node is a GenServer that has 0 or more
inputs, and 0 or more outputs. Each node keeps track of which nodes are connected to each
of its outputs. When an output changes the node sends a message to its connected nodes
telling them what value to set on the input they are conneted to.
Nodes are modules that `use LogicSim.Node` as demonstrated below, optionally specifying a list of inputs,
a list of outputs, and/or a map with additional state:
```
defmodule LogicSim.Node.Or do
use LogicSim.Node, inputs: [:a, :b], outputs: [:a]
defmodule LogicSim.Node.OnOffSwitch do
use LogicSim.Node, outputs: [:a], additional_state: %{on: false}
```
and implement the callback calculate_outputs/2 to generate all output values given the current input
values as demonstrated here (the `Not` gate):
```
def calculate_outputs(_state, %{a: a} = _input_values) do
%{a: !a}
end
```
"""
@callback calculate_outputs(state :: map(), input_values :: map()) :: map()
# credo:disable-for-this-file Credo.Check.Refactor.LongQuoteBlocks
defmacro __using__(opts) do
inputs = Keyword.get(opts, :inputs, [])
outputs = Keyword.get(opts, :outputs, [])
additional_state = Keyword.get(opts, :additional_state, Macro.escape(%{}))
quote do
use GenServer
require Logger
@behaviour LogicSim.Node
@doc """
Starts the node with the given options
Possible Options:
listeners: a list of process pids that should be notified whenever the state of the node
changes. Listener will receive `{:logic_sim_node_state, this_nodes_pid, this_nodes_state}`
"""
def start_link(opts \\ []) do
output_nodes = Enum.reduce(unquote(outputs), %{}, &Map.put(&2, &1, %{}))
output_values = Enum.reduce(unquote(outputs), %{}, &Map.put(&2, &1, false))
input_values = Enum.reduce(unquote(inputs), %{}, &Map.put(&2, &1, false))
listeners = Keyword.get(opts, :listeners, [])
state =
unquote(additional_state)
|> Map.put(:inputs, unquote(inputs))
|> Map.put(:outputs, unquote(outputs))
|> Map.put(:output_nodes, output_nodes)
|> Map.put(:output_values, output_values)
|> Map.put(:input_values, input_values)
|> Map.put(:listeners, listeners)
GenServer.start_link(__MODULE__, state)
end
@doc """
Same as `start_link/1` but raises on error.
"""
def start_link!(opts \\ []) do
{:ok, server} = start_link(opts)
server
end
@doc false
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]}
}
end
## GenServer Client functions
@doc """
Links this nodes output to the input of another node.
Takes the output node, the output to attach from, the node to attach to, and
the node's input to attach to.
"""
def link_output_to_node(output_node, output, input_node, input) do
GenServer.call(output_node, {:link_output_to_node, output, input_node, input})
end
@doc """
Tells this node to set its input to the given value
Will be called by another node when its output is changed while linked to this input.
"""
def set_node_input(node, input, input_value) do
GenServer.cast(node, {:set_node_input, input, input_value})
end
@doc """
Returns state of server.
Also can be called from `LogicSim.Node.get_state` if you don't know the node type.
"""
def get_state(server) do
GenServer.call(server, :get_state)
end
## GenServer Server functions
@doc false
def init(state) do
state = %{state | output_values: calculate_outputs(state, state.input_values)}
Logger.debug("Init node of type #{__MODULE__} with state #{inspect(state)}")
{:ok, state}
end
defp send_state_to_listeners(%{listeners: listeners} = state) do
listeners
|> Enum.map(&send(&1, {:logic_sim_node_state, self(), state}))
end
def handle_call(:get_state, _from, state) do
{:reply, state, state}
end
def handle_call(
{:link_output_to_node, output, node, input},
_from,
%{output_nodes: output_nodes, output_values: output_values} = state
) do
Logger.debug(
"Linking #{inspect(__MODULE__)} #{inspect(self())} output #{inspect(output)} to #{
inspect(node)
} input #{inspect(input)}"
)
output_nodes = put_in(output_nodes, [output, node], input)
set_node_input(node, input, Map.fetch!(output_values, output))
state = %{state | output_nodes: output_nodes}
send_state_to_listeners(state)
{:reply, :ok, state}
end
def handle_cast(
{:set_node_input, input, input_value},
%{
input_values: input_values,
output_values: old_output_values
} = state
) do
if Map.get(input_values, input) != input_value do
Logger.debug(
"Setting input value for #{inspect(__MODULE__)} #{inspect(self())} #{inspect(input)} to #{
inspect(input_value)
}"
)
input_values = Map.put(input_values, input, input_value)
output_values = calculate_outputs(state, input_values)
Logger.debug(
"New output values for #{inspect(__MODULE__)} #{inspect(self())} are #{
inspect(output_values)
}"
)
state = %{state | input_values: input_values}
state =
output_values
|> Map.keys()
|> Enum.filter(fn key -> old_output_values[key] != output_values[key] end)
|> Enum.reduce(state, fn output, state_acc ->
set_output_value(output, output_values[output], state_acc, false)
end)
send_state_to_listeners(state)
{:noreply, state}
else
{:noreply, state}
end
end
## Internal functions
defp set_output_value(
output,
output_value,
%{output_nodes: output_nodes, output_values: output_values} = state,
send_to_listeners \\ true
) do
if Map.get(output_values, output) != output_value do
Logger.debug(
"Setting output value for #{inspect(__MODULE__)} #{inspect(self())} #{inspect(output)} to #{
inspect(output_value)
}"
)
output_values = Map.put(output_values, output, output_value)
output_nodes
|> Map.get(output)
|> Enum.each(fn {node, input} -> set_node_input(node, input, output_value) end)
state = %{state | output_values: output_values}
if send_to_listeners, do: send_state_to_listeners(state)
state
else
state
end
end
end
end
@doc """
Generic version of function that allows linking of two nodes without having to know or
call the specific node type's version.
"""
def link_output_to_node(server, output, node, input) do
GenServer.call(server, {:link_output_to_node, output, node, input})
end
def get_state(server) do
GenServer.call(server, :get_state)
end
end
|
lib/logic_sim/node.ex
| 0.869361
| 0.907885
|
node.ex
|
starcoder
|
defmodule Membrane do
@moduledoc ~S"""
Membrane provides a wrapper for filtering data with simplicity and efficiently.
It filters out list of structs or maps that satisfies the query. The Query is inspired by
Mongo, hence there's a lot of similarities.
## Installation
Make sure elixir >= 1.6, then add following line to your `mix.exs` file
def deps do
...
{:membrane, "~> 0.1.0"}
end
Run `mix deps.get` to install the package
## Query
The examples below cover queries on exact, lesser than, greater than and nested comparision .
To get in depth idea on query parameters that the module supports, please visit
`Membrane.Query` module.
## Examples
In the examples below, list of maps are used; But it can be list of structs or mix of both structs and maps.
iex> data = [
...> %{id: 1, name: "Bob", action: "talk", age: 30},
...> %{id: 3, name: "Helen", action: "talk", age: 30},
...> %{id: 1, name: "Rocky", action: "bark", age: 10},
...> %{id: 3, name: "Rocky", action: "meow", age: 6},
...> %{id: 10, age: 3},
...> %{id: 1, type: "car", age: 12}
...> ]
[
%{action: "talk", age: 30, id: 1, name: "Bob"},
%{action: "talk", age: 30, id: 3, name: "Helen"},
%{action: "bark", age: 10, id: 1, name: "Rocky"},
%{action: "meow", age: 6, id: 3, name: "Rocky"},
%{age: 3, id: 10},
%{age: 12, id: 1, type: "car"}
]
## Filters data with id = 1
iex> Membrane.filter(data, id: 1)
[
%{age: 12, id: 1, type: "car"},
%{action: "bark", age: 10, id: 1, name: "Rocky"},
%{action: "talk", age: 30, id: 1, name: "Bob"}
]
## Filters data with id > 1 and has :action attribute
iex> Membrane.filter(data, id: [gt: 1], action: :exists)
[
%{action: "meow", age: 6, id: 3, name: "Rocky"},
%{action: "talk", age: 30, id: 3, name: "Helen"}
]
The module also supports nested structs.
## Examples
iex> data = [
...> %{id: 1, name: "Bob", action: "talk", data: %{age: 30, country: "USA"} },
...> %{id: 3, name: "Helen", action: "talk", data: %{age: 26, country: "USA"}},
...> %{id: 3, name: "William", action: "talk", data: %{age: 32, country: "France"}},
...> %{id: 1, name: "Rocky", action: "bark", age: 10},
...> ]
[
%{id: 1, name: "Bob", action: "talk", data: %{age: 30, country: "USA"} },
%{id: 3, name: "Helen", action: "talk", data: %{age: 26, country: "USA"}},
%{id: 3, name: "William", action: "talk", data: %{age: 32, country: "France"}},
%{id: 1, name: "Rocky", action: "bark", age: 10},
]
## Filters data with action = "talk" and age attribute in data >= 30
iex> Membrane.filter(data, action: "talk", data: [age: [gte: 30]])
[
%{id: 3, name: "William", action: "talk", data: %{age: 32, country: "France"}},
%{id: 1, name: "Bob", action: "talk", data: %{age: 30, country: "USA"} }
]
"""
alias Membrane.Query
@doc """
This method filters the data that satisfies the query. It takes in two arguments `data`
and `query`. The results will be in reverse order.
By default the option is `:default`, you can pass `:negate` to negate the filtered data.
## Examples
iex> data = [%{a: 1, b: 2, c: 3}, %{a: 2, b: 3, c: -1}, %{a: 1, b: -3, c: 5}]
iex> Membrane.filter(data, [a: 1])
[%{a: 1, b: -3, c: 5}, %{a: 1, b: 2, c: 3}]
iex> Membrane.filter(data, [b: [gt: 1], c: [lte: 3]])
[%{a: 2, b: 3, c: -1}, %{a: 1, b: 2, c: 3}]
# As the argument is keyword list, the brackets can be dropped
iex> Membrane.filter(data, b: [gt: 1], c: [lte: 3])
[%{a: 2, b: 3, c: -1}, %{a: 1, b: 2, c: 3}]
# To negate the data, add :negate as the third argument
iex> Membrane.filter(data,[a: [lt: 2]], :negate)
[%{a: 2, b: 3, c: -1}]
"""
@spec filter(list, list, atom) :: list
def filter(data, query, option \\ :default) do
case option do
:default -> process(data, query, [], false)
:negate -> process(data, query, [], true)
_ -> raise("Invalid option: #{option}, It should be either `:default` or `:negate`")
end
end
defp process([h | t], args, list, negate) do
state = Query.process(h, args)
if(negate, do: !state, else: state)
|> if do
process(t, args, [h | list], negate)
else
process(t, args, list, negate)
end
end
defp process([], _args, list, _negate) do
list
end
end
|
lib/membrane.ex
| 0.879981
| 0.66555
|
membrane.ex
|
starcoder
|
defmodule Oban.Notifiers.PG do
@moduledoc """
A PG/PG2 based notifier implementation that runs with Distributed Erlang.
Out of the box, Oban uses PostgreSQL's `LISTEN/NOTIFY` for PubSub. For most applications, that
is fine, but Postgres-based PubSub isn't sufficient in some circumstances. In particular,
Postgres notifications won't work when your application connects through PGbouncer in
_transaction_ or _statement_ mode.
_Note: You must be using [Distributed Erlang][de] to use the PG notifier._
## Usage
Specify the `PG` notifier in your Oban configuration:
```elixir
config :my_app, Oban,
notifier: Oban.Notifiers.PG,
...
```
## Implementation Notes
* The notifier will use `pg` if available (OTP 23+) or fall back to `pg2` for
older OTP releases.
* Like the Postgres implementation, notifications are namespaced by `prefix`.
* For compatbility, message payloads are always serialized to JSON before
broadcast and deserialized before relay to local processes.
[de]: https://elixir-lang.org/getting-started/mix-otp/distributed-tasks.html#our-first-distributed-code
"""
@behaviour Oban.Notifier
use GenServer
alias Oban.Config
defmodule State do
@moduledoc false
defstruct [:conf, :name, listeners: %{}]
end
@impl Oban.Notifier
def start_link(opts) do
{name, opts} = Keyword.pop(opts, :name, __MODULE__)
GenServer.start_link(__MODULE__, opts, name: name)
end
@impl Oban.Notifier
def listen(server, channels) do
GenServer.call(server, {:listen, channels})
end
@impl Oban.Notifier
def unlisten(server, channels) do
GenServer.call(server, {:unlisten, channels})
end
@impl Oban.Notifier
def notify(server, channel, payload) do
with %State{} = state <- GenServer.call(server, :get_state),
[_ | _] = pids <- members(state.conf.prefix) do
for pid <- pids, message <- payload_to_messages(channel, payload) do
send(pid, message)
end
:ok
end
end
@impl GenServer
def init(opts) do
state = struct!(State, opts)
start_pg()
:ok = join(state.conf.prefix)
{:ok, state}
end
@impl GenServer
def handle_call({:listen, channels}, {pid, _}, %State{listeners: listeners} = state) do
if Map.has_key?(listeners, pid) do
{:reply, :ok, state}
else
Process.monitor(pid)
{:reply, :ok, %{state | listeners: Map.put(listeners, pid, channels)}}
end
end
def handle_call({:unlisten, channels}, {pid, _}, %State{listeners: listeners} = state) do
orig_channels = Map.get(listeners, pid, [])
listeners =
case orig_channels -- channels do
[] -> Map.delete(listeners, pid)
new_channels -> Map.put(listeners, pid, new_channels)
end
{:reply, :ok, %{state | listeners: listeners}}
end
def handle_call(:get_state, _from, state), do: {:reply, state, state}
@impl GenServer
def handle_info({:notification, channel, payload}, %State{} = state) do
decoded = Jason.decode!(payload)
if in_scope?(decoded, state.conf) do
for {pid, channels} <- state.listeners, channel in channels do
send(pid, {:notification, channel, decoded})
end
end
{:noreply, state}
end
def handle_info(_message, state) do
{:noreply, state}
end
## PG Helpers
if Code.ensure_loaded?(:pg) do
defp start_pg do
:pg.start_link(__MODULE__)
end
defp members(prefix) do
:pg.get_members(__MODULE__, prefix)
end
defp join(prefix) do
:ok = :pg.join(__MODULE__, prefix, self())
end
else
defp start_pg, do: :ok
defp members(prefix) do
:pg2.get_members(namespace(prefix))
end
defp join(prefix) do
namespace = namespace(prefix)
:ok = :pg2.create(namespace)
:ok = :pg2.join(namespace, self())
end
defp namespace(prefix), do: {:oban, prefix}
end
## Message Helpers
defp payload_to_messages(channel, payload) do
Enum.map(payload, &{:notification, channel, &1})
end
defp in_scope?(%{"ident" => "any"}, _conf), do: true
defp in_scope?(%{"ident" => ident}, conf), do: Config.match_ident?(conf, ident)
defp in_scope?(_payload, _conf), do: true
end
|
lib/oban/notifiers/pg.ex
| 0.84296
| 0.702007
|
pg.ex
|
starcoder
|
defmodule Cog.Commands.Cat do
use Cog.Command.GenCommand.Base,
bundle: Cog.Util.Misc.embedded_bundle
alias Cog.Command.Service.DataStore
# Note, we use the `tee` namespace because the namespace we read from
# must be the same one that data was written into.
@data_namespace [ "commands", "tee" ]
@description "Retrieve saved pipeline output"
@long_description """
The cat command retrieves pipeline output that was previously saved using the tee command.
"""
@arguments "<name>"
rule "when command is #{Cog.Util.Misc.embedded_bundle}:cat allow"
option "merge", short: "m", type: "bool", required: false,
description: "Merge current pipeline map into saved pipeline map"
option "append", short: "a", type: "bool", required: false,
description: "Append current pipeline output to saved pipeline data, returning an array"
option "insert", short: "i", type: "string", required: false,
description: "Insert current pipeline output into saved pipeline map as the field specified for this option"
def handle_message(%{options: %{"merge" => true, "append" => true}} = req, state) do
{:error, req.reply_to, "The append and merge options cannot be specified together", state}
end
def handle_message(%{args: [key], options: opts} = req, state) do
case DataStore.fetch(@data_namespace, key) do
{:ok, data} ->
cond do
opts["insert"] ->
handle_transform(:insert, req, data, state)
opts["append"] ->
{:reply, req.reply_to, List.wrap(data) ++ List.wrap(req.cog_env), state}
opts["merge"] ->
handle_transform(:merge, req, data, state)
true ->
{:reply, req.reply_to, data, state}
end
{:error, reason} ->
{:error, req.reply_to, "Unable to retrieve data for #{key}: #{inspect reason}", state}
end
end
def handle_message(%{args: []} = req, state),
do: {:error, req.reply_to, "#{Cog.Util.Misc.embedded_bundle}:cat requires a name to be specified", state}
defp handle_transform(action, req, data, state) do
case transform_map_data(action, req.cog_env, data, req.options) do
{:ok, result} ->
{:reply, req.reply_to, result, state}
{:error, reason} ->
{:error, req.reply_to, reason, state}
end
end
defp transform_map_data(action, [prev], curr, opts),
do: transform_map_data(action, prev, curr, opts)
defp transform_map_data(action, prev, [curr], opts),
do: transform_map_data(action, prev, curr, opts)
defp transform_map_data(:merge, prev, curr, _opts) when is_map(prev) and is_map(curr) do
{:ok, Map.merge(prev, curr)}
end
defp transform_map_data(:insert, prev, curr, opts) when is_map(prev) and is_map(curr) do
{:ok, Map.put(prev, opts["insert"], curr)}
end
defp transform_map_data(action, _prev, _curr, _opts) do
{:error, "The #{Atom.to_string(action)} option is only applicable for map values"}
end
end
|
lib/cog/commands/cat.ex
| 0.698946
| 0.54359
|
cat.ex
|
starcoder
|
defmodule EllipticCurve.PublicKey do
@moduledoc """
Used to convert public keys between struct and .der or .pem formats.
Functions:
- toPem()
- toDer()
- fromPem()
- fromPem!()
- fromDer()
- fromDer!()
"""
alias __MODULE__, as: PublicKey
alias EllipticCurve.Utils.{Der, BinaryAscii}
alias EllipticCurve.{Point, Curve, Math}
@doc """
Holds public key data. Is usually extracted from .pem files or from the private key itself.
Parameters:
- `:point` [%EllipticCurve.Utils.Point]: public key point data;
- `:curve` [%EllipticCurve.Curve]: public key curve information;
"""
defstruct [:point, :curve]
@doc """
Converts a public key in decoded struct format into a pem string
Parameters:
- `publicKey` [%EllipticCurve.PublicKey]: decoded public key struct;
Returns:
- `pem` [string]: public key in pem format
## Example:
iex> EllipticCurve.PublicKey.toPem(%EllipticCurve.PublicKey{...})
"-----<KEY>"
"""
def toPem(publicKey) do
publicKey
|> toDer()
|> Der.toPem("PUBLIC KEY")
end
@doc """
Converts a public key in decoded struct format into a der string (raw binary)
Parameters:
- `publicKey` [%EllipticCurve.PublicKey]: decoded public key struct;
Returns:
- `der` [string]: public key in der format
## Example:
iex> EllipticCurve.PublicKey.toDer(%EllipticCurve.PublicKey{...})
<<48, 86, 48, 16, 6, 7, 42, 134, 72, 206, 61, ...>>
"""
def toDer(publicKey) do
Der.encodeSequence([
Der.encodeSequence([
Der.encodeOid([1, 2, 840, 10045, 2, 1]),
Der.encodeOid(publicKey.curve.oid)
]),
Der.encodeBitString(toString(publicKey, true))
])
end
@doc false
def toString(publicKey, encoded \\ false) do
curveLength = Curve.getLength(publicKey.curve)
xString =
BinaryAscii.stringFromNumber(
publicKey.point.x,
curveLength
)
yString =
BinaryAscii.stringFromNumber(
publicKey.point.y,
curveLength
)
if encoded do
"\x00\x04" <> xString <> yString
else
xString <> yString
end
end
@doc """
Converts a public key in pem format into decoded struct format
Parameters:
- `pem` [string]: public key in pem format
Returns {:ok, publicKey}:
- `publicKey` [%EllipticCurve.PublicKey]: decoded public key struct;
## Example:
iex> EllipticCurve.PublicKey.fromPem("-----BEGIN PUBLIC KEY-----\<KEY>END PUBLIC KEY-----\n")
{:ok, %EllipticCurve.PublicKey{...}}
"""
def fromPem(pem) do
{:ok, fromPem!(pem)}
rescue
e in RuntimeError -> {:error, e}
end
@doc """
Converts a public key in pem format into decoded struct format
Parameters:
- `pem` [string]: public key in pem format
Returns:
- `publicKey` [%EllipticCurve.PublicKey]: decoded public key struct;
## Example:
iex> EllipticCurve.PublicKey.fromPem!("-----BEGIN PUBLIC KEY-----\<KEY>END PUBLIC KEY-----\n")
%EllipticCurve.PublicKey{...}
"""
def fromPem!(pem) do
pem
|> Der.fromPem()
|> fromDer!()
end
@doc """
Converts a public key in der (raw binary) format into decoded struct format
Parameters:
- `der` [string]: public key in der format
Returns {:ok, publicKey}:
- `publicKey` [%EllipticCurve.PublicKey]: decoded public key struct;
## Example:
iex> EllipticCurve.PublicKey.fromDer(<<48, 86, 48, 16, 6, 7, 42, 134, ...>>)
{:ok, %EllipticCurve.PublicKey{...}}
"""
def fromDer(der) do
{:ok, fromDer!(der)}
rescue
e in RuntimeError -> {:error, e}
end
@doc """
Converts a public key in der (raw binary) format into decoded struct format
Parameters:
- `der` [string]: public key in der format
Returns:
- `publicKey` [%EllipticCurve.PublicKey]: decoded public key struct;
## Example:
iex> EllipticCurve.PublicKey.fromDer!(<<48, 86, 48, 16, 6, 7, 42, 134, ...>>)
%EllipticCurve.PublicKey{...}
"""
def fromDer!(der) do
{s1, empty} = Der.removeSequence(der)
if byte_size(empty) != 0 do
raise "trailing junk after DER public key: #{BinaryAscii.hexFromBinary(empty)}"
end
{s2, pointBitString} = Der.removeSequence(s1)
{_oidPublicKey, rest} = Der.removeObject(s2)
{oidCurve, empty} = Der.removeObject(rest)
if byte_size(empty) != 0 do
raise "trailing junk after DER public key objects: #{BinaryAscii.hexFromBinary(empty)}"
end
curve = Curve.KnownCurves.getCurveByOid(oidCurve)
{pointString, empty} = Der.removeBitString(pointBitString)
if byte_size(empty) != 0 do
raise "trailing junk after public key point-string: #{BinaryAscii.hexFromBinary(empty)}"
end
binary_part(pointString, 2, byte_size(pointString) - 2)
|> fromString!(curve.name)
end
@doc false
def fromString(string, curve \\ :secp256k1, validatePoint \\ true) do
{:ok, fromString!(string, curve, validatePoint)}
rescue
e in RuntimeError -> {:error, e}
end
@doc false
def fromString!(string, curve \\ :secp256k1, validatePoint \\ true) do
curve = Curve.KnownCurves.getCurveByName(curve)
baseLength = Curve.getLength(curve)
xs = binary_part(string, 0, baseLength)
ys = binary_part(string, baseLength, byte_size(string) - baseLength)
point = %Point{
x: BinaryAscii.numberFromString(xs),
y: BinaryAscii.numberFromString(ys)
}
publicKey = %PublicKey{point: point, curve: curve}
cond do
validatePoint == false -> publicKey
Point.isAtInfinity?(point) ->
raise "Public Key point is at infinity"
Curve.contains?(curve, point) == false ->
raise "Point (#{point.x},#{point.y}) is not valid for curve #{curve.name}"
Point.isAtInfinity?(Math.multiply(point, curve."N", curve."N", curve."A", curve."P")) == false ->
raise "Point (#{point.x},#{point.y}) * #{curve.name}.N is not at infinity"
true -> publicKey
end
end
end
|
lib/publicKey.ex
| 0.953546
| 0.516108
|
publicKey.ex
|
starcoder
|
defmodule Rajska.FieldAuthorization do
@moduledoc """
Absinthe middleware to ensure field permissions.
Authorizes Absinthe's object [field](https://hexdocs.pm/absinthe/Absinthe.Schema.Notation.html#field/4) according to the result of the `c:Rajska.Authorization.has_user_access?/3` function, which receives the user role, the `source` object that is resolving the field and the field rule.
## Usage
[Create your Authorization module and add it and FieldAuthorization to your Absinthe.Schema](https://hexdocs.pm/rajska/Rajska.html#module-usage).
```elixir
object :user do
# Turn on both Object and Field scoping, but if the ObjectScope Phase is not included, this is the same as using `scope_field?`
meta :scope?, true
field :name, :string
field :is_email_public, :boolean
field :phone, :string, meta: [private: true]
field :email, :string, meta: [private: & !&1.is_email_public]
# Can also use custom rules for each field
field :always_private, :string, meta: [private: true, rule: :private]
end
object :field_scope_user do
meta :scope_field?, true
field :name, :string
field :phone, :string, meta: [private: true]
end
```
As seen in the example above, a function can also be passed as value to the meta `:private` key, in order to check if a field is private dynamically, depending of the value of another field.
"""
@behaviour Absinthe.Middleware
alias Absinthe.{
Resolution,
Type
}
def call(resolution, [object: %Type.Object{fields: fields} = object, field: field]) do
field_private? = fields[field] |> Type.meta(:private) |> field_private?(resolution.source)
scope? = get_scope!(object)
default_rule = Rajska.apply_auth_mod(resolution.context, :default_rule)
rule = Type.meta(fields[field], :rule) || default_rule
resolution
|> Map.get(:context)
|> authorized?(scope? && field_private?, resolution.source, rule)
|> put_result(resolution, field)
end
defp field_private?(true, _source), do: true
defp field_private?(private, source) when is_function(private), do: private.(source)
defp field_private?(_private, _source), do: false
defp get_scope!(object) do
scope? = Type.meta(object, :scope?)
scope_field? = Type.meta(object, :scope_field?)
case {scope?, scope_field?} do
{nil, nil} -> true
{nil, scope_field?} -> scope_field?
{scope?, nil} -> scope?
{_, _} -> raise "Error in #{inspect object.identifier}. If scope_field? is defined, then scope? must not be defined"
end
end
defp authorized?(_context, false, _source, _rule), do: true
defp authorized?(context, true, source, rule) do
Rajska.apply_auth_mod(context, :context_user_authorized?, [context, source, rule])
end
defp put_result(true, resolution, _field), do: resolution
defp put_result(false, resolution, field) do
Resolution.put_result(resolution, {:error, "Not authorized to access field #{field}"})
end
end
|
lib/middlewares/field_authorization.ex
| 0.86306
| 0.848345
|
field_authorization.ex
|
starcoder
|
defmodule Cassette.Config do
@moduledoc """
Struct that represents Cassette configuration
"""
defstruct username: "",
password: "",
base_url: "",
base_authority: "",
service: "",
tgt_ttl: 14_400,
st_ttl: 252,
validation_ttl: 300,
insecure: false
@typedoc """
The following keys are supported and may be defined in your application env
* `username` - the username to authenticate on cas server
* `password` - the password to authenticate on cas server
* `base_url` - the base url for your CAS server (do not include the `login/`)
* `base_authority` - simplifies role checking, please refer to
`Cassette.User.role?/2`
* `service` - the CAS service to use when validating service tickets
* `tgt_ttl` - the TGT cache time to live
* `st_ttl` - the ST cache time to live
* `validation_ttl` - the ST validation cache time to live
* `insecure` - boolean to allow connection even with ssl check failures
Any of those keys may be set in your Application environment
(or the mix `config/config.exs`) as:
```elixir
config :cassette, username: "john.doe"
```
`Cassette.Server`s call the `resolve/1` function on this module to resolve any
configuration using environment variables.
To use an environment variable set the value to
`{:system, "SOME_ENVIRONMENT_VARIABLE"}`.
Or in `config.exs`:
```elixir
config :cassette, username: {:system, "CASSETTE_USERNAME"}
``
and configure your environment (provabably in something like
`/etc/default/your_app`):
```shell
CASSETTE_USERNAME=acme
```
Please check the `Cassette.Config.default/0` function.
"""
@type t :: %__MODULE__{
username: String.t(),
password: String.t(),
base_url: String.t(),
base_authority: String.t(),
service: String.t(),
tgt_ttl: non_neg_integer(),
st_ttl: non_neg_integer(),
validation_ttl: non_neg_integer()
}
@doc """
Returns a configuration based on what is set in application environment and
default values
Check `Cassette.Config.t` for key details
"""
@spec default() :: t
def default do
default_values = %Cassette.Config{}
env_or_default = fn key ->
case Application.fetch_env(:cassette, key) do
{:ok, {:system, var}} ->
System.get_env(var) || Map.get(default_values, key)
{:ok, value} ->
value
:error ->
Map.get(default_values, key)
end
end
default_values
|> Map.keys()
|> Enum.reduce(default_values, &Map.put(&2, &1, env_or_default.(&1)))
end
@doc """
Resolves config by fetching environment variables when values are in the form:
```elixir
{:system, "SOME_ENVIRONMENT_VARIABLE"}
```
The value will be fetched from the `SOME_ENVIRONMENT_VARIABLE` variable.
If that variable is `nil`, the default value in `Cassette.Config.t` will be
used
"""
@spec resolve(t) :: t
def resolve(config = %Cassette.Config{}) do
default_values = %Cassette.Config{}
resolve_env_var = fn
key, {:system, var} ->
{key, System.get_env(var) || Map.get(default_values, key)}
key, value ->
{key, value}
end
env_or_default = fn map ->
fn key ->
resolve_env_var.(key, Map.get(map, key))
end
end
config
|> Map.keys()
|> Enum.into(%{}, env_or_default.(config))
end
def resolve(nil), do: default()
end
|
lib/cassette/config.ex
| 0.811303
| 0.749202
|
config.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.TimeOffsetSet do
@moduledoc """
This command is used to set Time Zone Offset (TZO) and Daylight Savings Time (DST) at the supporting
node.
Params:
* `:sign_tzo` - This field is used to indicate the sign (:plus or :minus) to apply to the Hour TZO and Minute TZO fields. (required)
* `:hour_tzo` - This field is used to indicate the number of hours that the originating time zone deviates from UTC. (required - 0..14)
* `:minute_tzo` - This field is used to indicate the number of minutes that the originating time zone deviates from UTC. (required - 0..59)
* `:sign_offset_dst` - This field is used to indicate the sign (:plus or :minus) for the Minute Offset DST field to apply to the
current time while in the Daylight Saving Time. (required)
* `:minute_offset_dst` - This field MUST indicate the number of minutes by which the current time is to be adjusted when
Daylight Saving Time starts. (required - 0..59)
* `:month_start_dst` - This field MUST indicate the month of the year when Daylight Saving Time starts. (required, 1..12)
* `:day_start_dst` - This field MUST indicate the day of the month when Daylight Saving Time starts. (required - 1..31)
* `:hour_start_dst` - This field MUST indicate the hour of the day when Daylight Saving Time starts. (required - 0..23)
* `:month_end_dst` - This field MUST indicate the month of the year when Daylight Saving Time ends. (required, 1..12)
* `:day_end_dst` - This field MUST indicate the day of the month when Daylight Saving Time ends. (required - 1..31)
* `:hour_end_dst` - This field MUST indicate the hour of the day when Daylight Saving Time ends. (required - 0..23)
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.Command
alias Grizzly.ZWave.CommandClasses.Time
@type param ::
{:sign_tzo, Time.sign()}
| {:hour_tzo, 0..23}
| {:minute_tzo, 0..59}
| {:sign_offset_dst, Time.sign()}
| {:minute_offset_dst, byte}
| {:month_start_dst, 1..12}
| {:day_start_dst, 1..31}
| {:hour_start_dst, 0..59}
| {:month_end_dst, 1..12}
| {:day_end_dst, 1..31}
| {:hour_end_dst, 0..59}
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :time_offset_set,
command_byte: 0x05,
command_class: Time,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
def encode_params(command) do
sign_tzo = Command.param!(command, :sign_tzo)
hour_tzo = Command.param!(command, :hour_tzo)
minute_tzo = Command.param!(command, :minute_tzo)
sign_offset_dst = Command.param!(command, :sign_offset_dst)
minute_offset_dst = Command.param!(command, :minute_offset_dst)
month_start_dst = Command.param!(command, :month_start_dst)
day_start_dst = Command.param!(command, :day_start_dst)
hour_start_dst = Command.param!(command, :hour_start_dst)
month_end_dst = Command.param!(command, :month_end_dst)
day_end_dst = Command.param!(command, :day_end_dst)
hour_end_dst = Command.param!(command, :hour_end_dst)
<<Time.encode_sign(sign_tzo)::size(1), hour_tzo::size(7), minute_tzo,
Time.encode_sign(sign_offset_dst)::size(1), minute_offset_dst::size(7), month_start_dst,
day_start_dst, hour_start_dst, month_end_dst, day_end_dst, hour_end_dst>>
end
@impl true
def decode_params(
<<sign_tzo_bit::size(1), hour_tzo::size(7), minute_tzo, sign_offset_dst_bit::size(1),
minute_offset_dst::size(7), month_start_dst, day_start_dst, hour_start_dst,
month_end_dst, day_end_dst, hour_end_dst>>
) do
{:ok,
[
sign_tzo: Time.decode_sign(sign_tzo_bit),
hour_tzo: hour_tzo,
minute_tzo: minute_tzo,
sign_offset_dst: Time.decode_sign(sign_offset_dst_bit),
minute_offset_dst: minute_offset_dst,
month_start_dst: month_start_dst,
day_start_dst: day_start_dst,
hour_start_dst: hour_start_dst,
month_end_dst: month_end_dst,
day_end_dst: day_end_dst,
hour_end_dst: hour_end_dst
]}
end
end
|
lib/grizzly/zwave/commands/time_offset_set.ex
| 0.915978
| 0.739658
|
time_offset_set.ex
|
starcoder
|
defprotocol Swoosh.Email.Recipient do
@moduledoc """
Recipient Protocol controls how data is formatted into an email recipient
## Deriving
The protocol allows leveraging the Elixir's `@derive` feature to simplify protocol implementation
in trivial cases. Accepted options are:
* `:name` (optional)
* `:address` (required)
## Example
defmodule MyUser do
@derive {Swoosh.Email.Recipient, name: :name, address: :email}
defstruct [:name, :email, :other_props]
end
or with optional name...
defmodule MySubscriber do
@derive {Swoosh.Email.Recipient, address: :email}
defstruct [:email, :preferences]
end
full implementation without deriving...
defmodule MyUser do
defstruct [:name, :email, :other_props]
end
defimpl Swoosh.Email.Recipient, for: MyUser do
def format(%MyUser{name: name, email: address} = value) do
{name, address}
end
end
"""
@type t :: term
@fallback_to_any true
@doc """
Formats `value` into a Swoosh recipient, a 2-tuple with recipient name and recipient address
"""
@spec format(t) :: Swoosh.Email.mailbox()
def format(value)
end
defimpl Swoosh.Email.Recipient, for: Any do
defmacro __deriving__(module, struct, opts) do
name_field = Keyword.get(opts, :name)
address_field = Keyword.fetch!(opts, :address)
keys = Map.keys(struct)
fields =
[{:name, name_field}, {:address, address_field}]
|> Enum.reject(fn {_, field} -> is_nil(field) end)
|> Enum.map(fn {var, field} ->
unless field in keys do
raise ArgumentError, "#{inspect(field)} does not exist in #{inspect(struct)}"
end
{field, {var, [generated: true], __MODULE__}}
end)
quote do
defimpl Swoosh.Email.Recipient, for: unquote(module) do
def format(%{unquote_splicing(fields)}) do
{unquote(if(name_field, do: Macro.var(:name, __MODULE__), else: "")), address}
end
end
end
end
def format(data) do
raise Protocol.UndefinedError,
protocol: @protocol,
value: data,
description: """
Swoosh.Email.Recipient needs to be implemented for #{inspect(data)}
Default implementations of Recipient include
* a string representing an email address like `<EMAIL>`
* or a two-element tuple `{name, address}`, where name and address are strings.
- name is allowed to be nil in this case
"""
end
end
defimpl Swoosh.Email.Recipient, for: Tuple do
def format({name, address}) when name in [nil, ""] and is_binary(address) and address != "" do
{"", address}
end
def format({name, address}) when is_binary(name) and is_binary(address) and address != "" do
{name, address}
end
def format(tuple) do
raise ArgumentError, """
Unexpected tuple format, #{inspect(tuple)} cannot be formatted into a Recipeint.
The expected format is {name :: String.t() | nil, address :: String.t()}, where address cannot be empty.
"""
end
end
defimpl Swoosh.Email.Recipient, for: BitString do
def format("") do
raise ArgumentError, """
Cannot format empty string into a Recipeint.
"""
end
def format(address) when is_binary(address) do
{"", address}
end
end
|
lib/swoosh/email/recipient.ex
| 0.911256
| 0.551936
|
recipient.ex
|
starcoder
|
defmodule Needlework do
@moduledoc """
Needlework brings additional operators to Elixir that allows you to "thread" results of your functions into other function calls. Basically extending the `Kernel.|>/2` operator.
Just `use Needlework` in your modules and thread away!
Example:
```
defmodule MyModule do
use Needlework
@spec foo(func :: fun()) :: list()
def foo(func) do
func
~> Enum.map([1, 2, 3])
end
end
```
"""
defmacro __using__(_) do
quote do
import Needlework, only: :macros
end
end
@type ok :: {:ok, any()}
@type error :: {:error, any()}
@doc """
Wraps the value in `t:Needlework.ok/0` tuple.
Example:
iex> 5 |> Needlework.ok_unit()
{:ok, 5}
iex> {:ok, 5} |> Needlework.ok_unit()
{:ok, 5}
iex> {:error, ""} |> Needlework.ok_unit()
{:error, ""}
"""
@spec ok_unit(any) :: {:ok | :error, any}
def ok_unit({:ok, _} = value), do: value
def ok_unit({:error, _} = value), do: value
def ok_unit(value), do: {:ok, value}
@doc """
Bind operator.
If value on the left is a plain value -> converts it to `t:Needlework.ok/0` | `t:Needlework.error/0` tuple
then desctructures the tuple. If it was `t:Needlework.ok/0` tuple -> passes the value for evaluation.
If it was `t:Needlework.ok/0` tuple -> skips the evaluation
Example:
iex> import Needlework
iex> foo = fn x -> {:ok, x * 2} end
iex> 2 <|> foo.() <|> foo.() <|> foo.()
{:ok, 16}
iex> bar = fn _ -> {:error, "impossible"} end
iex> 2 <|> foo.() <|> bar.() <|> foo.()
{:error, "impossible"}
"""
defmacro left <|> right do
quote do
unquote(left)
|> Needlework.ok_unit()
|> (fn
{:ok, value} -> value |> unquote(right)
{:error, reason} -> {:error, reason}
end).()
end
end
@doc """
Same as `Needlework.<|>/2` but places the value instead of `_`.
If no `_` present works like a `Needlework.<|>/2`
Examples:
iex> import Needlework
iex> foo = fn x, y -> {:ok, x ++ y} end
iex> [1, 2, 3] <~> foo.(_, [1, 2, 3]) <~> foo.([4, 5, 6], _)
{:ok, [4, 5, 6, 1, 2, 3, 1, 2, 3]}
iex> [1, 2, 3] <~> foo.([1, 2, 3]) <~> foo.([4, 5, 6])
{:ok, [1, 2, 3, 1, 2, 3, 4, 5, 6]}
iex> bar = fn _, _ -> {:error, "reason"} end
iex> [1, 2, 3] <~> bar.([1, 2, 3]) <~> foo.([4, 5, 6])
{:error, "reason"}
"""
defmacro left <~> right do
quote do
unquote(left)
|> Needlework.ok_unit()
|> (fn
{:ok, value} -> value ~>> unquote(right)
{:error, reason} -> {:error, reason}
end).()
end
end
@doc """
Allows to thread the value on the left as the last argument
Example:
iex> import Needlework
iex> [1, 2, 3] ~> Kernel.++([4, 5, 6])
[4, 5, 6, 1, 2, 3]
iex> fn x -> x*2 end ~> Enum.map([1, 2, 3])
[2, 4, 6]
"""
defmacro left ~> right do
new_right = add_last_arg(right, left)
quote do
unquote(new_right)
end
end
@doc """
Allows to thread the value on the left to a specific spot on the right.
Value from the left will be placed instead of `_`.
If no `_` present works like a regular `Kernel.|>/2`
Example:
iex> import Needlework
iex> [1, 2, 3] ~>> Kernel.++([4, 5, 6], _)
[4, 5, 6, 1, 2, 3]
iex> [1, 2, 3] ~>> Kernel.++([4, 5, 6])
iex> [] ~>> Enum.reduce([1, 2, 3], _, fn x, acc -> [x | acc] end)
[3, 2, 1]
"""
defmacro left ~>> right do
case replace_underscore_args(right, left) do
{:ok, new_args} ->
new_right = put_elem(right, 2, new_args)
quote do
unquote(new_right)
end
{:error, _} ->
quote do
unquote(left)
|> unquote(right)
end
end
end
defp replace_underscore_args({_, _, args}, replacement) do
Enum.reduce(args, {:error, []}, fn
{:_, _, _}, {_, args} -> {:ok, args ++ [replacement]}
val, {res, args} -> {res, args ++ [val]}
end)
end
defp add_last_arg({name, context, args}, argument), do: {name, context, args ++ [argument]}
end
|
lib/needlework.ex
| 0.884573
| 0.793346
|
needlework.ex
|
starcoder
|
defmodule Recurly.Plan do
@moduledoc """
Module for handling plans in Recurly.
See the [developer docs on plans](https://dev.recurly.com/docs/list-plans)
for more details
"""
use Recurly.Resource
alias Recurly.{Resource,Money,Plan,AddOn}
@endpoint "/plans"
schema :plan do
field :accounting_code, :string
field :add_ons, AddOn, list: true
field :bypass_hosted_confirmation, :boolean
field :cancel_url, :string
field :display_phone_number, :string
field :display_quantity, :boolean
field :description, :string
field :name, :string
field :plan_code, :string
field :plan_interval_length, :integer
field :plan_interval_unit, :string
field :revenue_schedule_type, :string
field :setup_fee_accounting_code, :string
field :setup_fee_in_cents, Money
field :setup_fee_revenue_schedule_type, :string
field :success_url, :string
field :total_billing_cycles, :string
field :trial_interval_length, :integer
field :trial_interval_unit, :string
field :unit_amount_in_cents, Money
field :tax_code, :string
field :tax_exempt, :boolean
end
@doc """
Creates a stream of plans given some options.
## Parameters
- `options` Keyword list of the request options. See options in the
[plan list section](https://dev.recurly.com/docs/list-plans)
of the docs.
## Examples
See `Recurly.Resource.stream/3` for more detailed examples of
working with resource streams.
```
# stream of plans sorted from most recently
# updated to least recently updated
stream = Recurly.Plan.stream(sort: :updated_at)
```
"""
def stream(options \\ []) do
Resource.stream(Plan, @endpoint, options)
end
@doc """
Finds a plan given a plan code. Returns the plan or an error.
## Parameters
- `plan_code` String plan code
## Examples
```
alias Recurly.NotFoundError
case Recurly.Plan.find("myplancode") do
{:ok, plan} ->
# Found the plan
{:error, %NotFoundError{}} ->
# 404 plan was not found
end
```
"""
def find(plan_code) do
Resource.find(%Plan{}, path(plan_code))
end
@doc """
Creates a plan from a changeset.
## Parameters
- `changeset` Keyword list changeset
## Examples
```
alias Recurly.ValidationError
changeset = [
plan_code: "gold",
name: "Gold",
plan_interval_length: 1,
plan_interval_unit: "month",
unit_amount_in_cents: [
USD: 200,
EUR: 300
]
]
case Recurly.Plan.create(changeset) do
{:ok, plan} ->
# created the plan
{:error, %ValidationError{errors: errors}} ->
# will give you a list of validation errors
end
```
"""
def create(changeset) do
Resource.create(%Plan{}, changeset, @endpoint)
end
@doc """
Generates the path to a plan given the plan code
## Parameters
- `plan_code` String plan code
"""
def path(plan_code) do
Path.join(@endpoint, plan_code)
end
end
|
lib/recurly/plan.ex
| 0.860794
| 0.790854
|
plan.ex
|
starcoder
|
defmodule BST do
@moduledoc """
A binary search tree abstract data structure.
"""
alias BST.Node
defstruct [:comparator, :root]
@typedoc "The data structure stored on the data key for each node in the tree"
@type element :: term()
@typedoc """
Function used to determine whether to place new nodes as a left or right subtree.
Returns
- 0 if a == b
- negative integer if a < b
- positive integer if a > b
"""
@type comparator :: (a :: element(), b :: element() -> integer())
@type tree :: %__MODULE__{root: Node.t() | nil, comparator: comparator()}
@doc """
Creates a new `tree`.
## Examples
iex> tree = BST.new([0])
iex> tree.root
%BST.Node{data: 0, left: nil, right: nil}
iex> tree = BST.new([0, 1])
iex> tree.root
%BST.Node{data: 0, left: nil, right: %BST.Node{data: 1, left: nil, right: nil}}
iex> tree = BST.new([%{id: 1, name: "Alice"}, %{id: 2, name: "Bob"}], fn a, b -> a.id - b.id end)
iex> tree.root
%BST.Node{
data: %{id: 1, name: "Alice"},
left: nil,
right: %BST.Node{data: %{id: 2, name: "Bob"}, left: nil, right: nil}
}
"""
@spec new([element()], comparator()) :: tree()
def new(elements \\ [], comparator \\ fn a, b -> a - b end)
when is_list(elements) and is_function(comparator) do
tree = %__MODULE__{comparator: comparator}
Enum.reduce(elements, tree, fn element, tree ->
insert(tree, element)
end)
end
@doc """
Adds a node to a `tree`.
Resolves conflicts using `fun` where `a` is the existing `element` and `b` is
the new `element`. Defaults to replacing with the new `element`.
## Examples
iex> tree = BST.new([1])
iex> tree = BST.insert(tree, 2)
iex> tree.root
%BST.Node{data: 1, left: nil, right: %BST.Node{data: 2, left: nil, right: nil}}
"""
@spec insert(tree(), element(), (element(), element() -> element())) :: tree()
def insert(%__MODULE__{} = tree, element, fun \\ fn _a, b -> b end) do
%__MODULE__{tree | root: insert_node(tree.root, element, tree.comparator, fun)}
end
defp insert_node(nil, element, _comparator, _fun), do: %Node{data: element}
defp insert_node(%Node{data: elem1, left: left, right: right} = node, elem2, comparator, fun) do
case compare(elem2, elem1, comparator) do
:eq -> %Node{node | data: fun.(elem1, elem2)}
:lt -> %Node{node | left: insert_node(left, elem2, comparator, fun)}
:gt -> %Node{node | right: insert_node(right, elem2, comparator, fun)}
end
end
@doc """
Removes a node from `tree` if one is found with data matching `element`.
## Examples
iex> tree = BST.new([0, 1])
iex> tree = BST.remove(tree, 1)
iex> tree.root
%BST.Node{data: 0, left: nil, right: nil}
iex> tree = BST.new([%{id: 1, name: "Alice"}, %{id: 2, name: "Bob"}], fn a, b -> a.id - b.id end)
iex> tree = BST.remove(tree, %{id: 1})
iex> tree.root
%BST.Node{data: %{id: 2, name: "Bob"}, left: nil, right: nil}
"""
@spec remove(tree(), element()) :: tree()
def remove(%__MODULE__{} = tree, element) do
%__MODULE__{tree | root: remove_node(tree.root, element, tree.comparator)}
end
defp remove_node(nil, _element, _comparator), do: nil
defp remove_node(%Node{data: elem1, left: left, right: right} = node, elem2, comparator) do
case compare(elem2, elem1, comparator) do
:eq -> promote(left, right, comparator)
:lt -> %Node{node | left: remove_node(left, elem2, comparator)}
:gt -> %Node{node | right: remove_node(right, elem2, comparator)}
end
end
defp promote(nil, nil, _comparator), do: nil
defp promote(%Node{} = left, nil, _comparator), do: left
defp promote(nil, %Node{} = right, _comparator), do: right
defp promote(%Node{} = left, %Node{} = right, comparator) do
%Node{data: element} = leftmost_child(right)
right = remove_node(right, element, comparator)
%Node{data: element, left: left, right: right}
end
defp leftmost_child(%Node{left: nil} = node), do: node
defp leftmost_child(%Node{left: %Node{} = node}), do: leftmost_child(node)
@doc """
Updates a node using `fun`, but only if a node is found with the data in `element`.
`fun` will be invoked with the existing node's data as the first argument, and `element` as
the second argument. This allows the node to be looked up and updated all in one pass.
If `fun` returns `nil`, the node will be removed.
## Examples
iex> tree =
...> BST.new([], fn a, b -> a.id - b.id end)
...> |> BST.insert(%{id: 1, name: "Alice", hobbies: ["Painting"]})
...> |> BST.insert(%{id: 2, name: "Bob", hobbies: ["Programming"]})
...> |> BST.update(%{id: 2, hobbies: ["Biking"]}, fn a, b ->
...> %{a | hobbies: [hd(b.hobbies) | a.hobbies]}
...> end)
iex> tree.root
%BST.Node{
data: %{hobbies: ["Painting"], id: 1, name: "Alice"},
left: nil,
right: %BST.Node{
data: %{hobbies: ["Biking", "Programming"], id: 2, name: "Bob"},
left: nil,
right: nil
}
}
iex> tree =
...> BST.update(tree, %{id: 1}, fn a, _b ->
...> case Enum.reject(a.hobbies, &(&1 == "Painting")) do
...> [] -> nil
...> hobbies -> %{a | hobbies: hobbies}
...> end
...> end)
iex> tree.root
%BST.Node{
data: %{hobbies: ["Biking", "Programming"], id: 2, name: "Bob"},
left: nil,
right: nil
}
"""
@spec update(tree(), element(), (element(), element() -> element() | nil)) :: tree()
def update(%__MODULE__{} = tree, element, fun) when is_function(fun, 2) do
%__MODULE__{tree | root: update_node(tree.root, element, tree.comparator, fun)}
end
defp update_node(nil, _element, _comparator, _fun), do: nil
defp update_node(%Node{data: elem1, left: left, right: right} = node, elem2, comparator, fun) do
case compare(elem2, elem1, comparator) do
:eq -> update_or_promote(node, elem2, comparator, fun)
:lt -> %Node{node | left: update_node(left, elem2, comparator, fun)}
:gt -> %Node{node | right: update_node(right, elem2, comparator, fun)}
end
end
defp update_or_promote(%Node{data: elem1} = node, elem2, comparator, fun) do
case fun.(elem1, elem2) do
nil -> promote(node.left, node.right, comparator)
element -> %Node{node | data: element}
end
end
@doc """
Removes all nodes from a `tree`.
## Examples
iex> tree = BST.new([0])
iex> tree = BST.clear(tree)
iex> tree.root
nil
"""
@spec clear(tree()) :: tree()
def clear(%__MODULE__{} = tree), do: %__MODULE__{tree | root: nil}
@doc """
Returns a node's data matching `element`, or `nil` if not found.
## Examples
iex> tree = BST.new([0, 1])
iex> BST.find(tree, 1)
1
iex> tree = BST.new([%{id: 1, name: "Alice"}, %{id: 2, name: "Bob"}], fn a, b -> a.id - b.id end)
iex> BST.find(tree, %{id: 1})
%{id: 1, name: "Alice"}
"""
@spec find(tree(), element()) :: element() | nil
def find(%__MODULE__{} = tree, element) do
find_node(tree.root, element, tree.comparator)
end
defp find_node(nil, _element, _comparator), do: nil
defp find_node(%Node{data: elem1, left: left, right: right}, elem2, comparator) do
case compare(elem2, elem1, comparator) do
:eq -> elem1
:lt -> find_node(left, elem2, comparator)
:gt -> find_node(right, elem2, comparator)
end
end
@doc """
Returns a list of a `tree`'s `element`s in order.
Invokes `fun` on each `element` to transform it before adding it to the list.
## Examples
iex> BST.new([0])
...> |> BST.insert(1)
...> |> BST.insert(-1)
...> |> BST.to_list()
[-1, 0, 1]
iex> tree =
...> BST.new([], fn a, b -> a.id - b.id end)
...> |> BST.insert(%{id: 1, name: "Alice"})
...> |> BST.insert(%{id: 3, name: "Charlie"})
...> |> BST.insert(%{id: 2, name: "Bob"})
iex> BST.to_list(tree, fn a -> a.name end)
["Alice", "Bob", "Charlie"]
"""
@spec to_list(tree(), (element() -> any())) :: [element()]
def to_list(%__MODULE__{} = tree, fun \\ fn a -> a end) do
tree.root
|> list_nodes([], fun)
|> Enum.reverse()
end
defp list_nodes(nil, acc, _fun), do: acc
defp list_nodes(%Node{data: data, left: left, right: right}, acc, fun) do
lower_values = list_nodes(left, acc, fun)
list_nodes(right, [fun.(data) | lower_values], fun)
end
@doc """
Returns the minimum `element` in a `tree`, or `nil` if empty.
## Examples
iex> tree = BST.new([2, 1, 3])
iex> BST.min(tree)
1
"""
@spec min(tree()) :: element() | nil
def min(%__MODULE__{root: nil} = _tree), do: nil
def min(%__MODULE__{root: node}), do: find_min(node)
defp find_min(%Node{data: data, left: nil}), do: data
defp find_min(%Node{left: %Node{} = node}), do: find_min(node)
@doc """
Returns the maximum `element` in a `tree`, or `nil` if empty.
## Examples
iex> tree = BST.new([2, 3, 1])
iex> BST.max(tree)
3
"""
@spec max(tree()) :: element() | nil
def max(%__MODULE__{root: nil} = _tree), do: nil
def max(%__MODULE__{root: node}), do: find_max(node)
defp find_max(%Node{data: data, right: nil}), do: data
defp find_max(%Node{right: %Node{} = node}), do: find_max(node)
@doc """
Returns the height of a `tree`.
## Examples
iex> tree = BST.new([0, -3, 5, 10])
iex> BST.height(tree)
2
"""
@spec height(tree()) :: integer()
def height(%__MODULE__{root: nil} = _tree), do: -1
def height(%__MODULE__{root: root}) do
root
|> node_height(0)
|> Enum.max()
end
defp node_height(nil, _height), do: []
defp node_height(%Node{left: left, right: right}, height) do
[height] ++ node_height(left, height + 1) ++ node_height(right, height + 1)
end
defp compare(a, b, comparator) do
val = comparator.(a, b)
cond do
val == 0 -> :eq
val < 0 -> :lt
val > 0 -> :gt
end
end
end
|
lib/bst.ex
| 0.931975
| 0.756875
|
bst.ex
|
starcoder
|
defmodule GGity.Stat do
@moduledoc false
@type dataset :: list(map())
@doc false
@spec identity(dataset(), map()) :: {dataset(), map()}
def identity(data, mapping), do: {data, mapping}
@spec count(dataset(), map()) :: {dataset(), map()}
def count(data, mapping) do
discrete_aesthetics = discrete_aesthetics(data, mapping)
permutations = permutations(discrete_aesthetics, data, mapping)
stat =
Enum.reduce(permutations, [], fn permutation, stat ->
[
Map.new(
Enum.map(discrete_aesthetics, fn aesthetic ->
{mapping[aesthetic], permutation[aesthetic]}
end)
)
|> Map.put(
:count,
Enum.count(data, fn row ->
Enum.map(permutation, fn {k, _v} -> row[mapping[k]] end) ==
Enum.map(permutation, fn {_k, v} -> v end)
end)
)
| stat
]
end)
|> Enum.sort_by(fn row -> row[mapping[:x]] end)
mapping = Map.put(mapping, :y, :count)
{stat, mapping}
end
@spec boxplot(dataset(), map()) :: {dataset(), map()}
def boxplot(data, mapping) do
discrete_aesthetics = discrete_aesthetics(data, mapping)
permutations = permutations(discrete_aesthetics, data, mapping)
stat =
Enum.reduce(permutations, [], fn permutation, stat ->
[
Map.new(
Enum.map(discrete_aesthetics, fn aesthetic ->
{mapping[aesthetic], permutation[aesthetic]}
end)
)
|> Map.merge(boxplot_stats_map(data, mapping, permutation))
| stat
]
end)
|> Enum.sort_by(fn row -> row[mapping[:x]] end)
{stat, mapping}
end
defp boxplot_stats_map(data, mapping, permutation) do
permutation_data =
data
|> Enum.filter(fn row ->
Enum.map(permutation, fn {k, _v} -> row[mapping[k]] end) ==
Enum.map(permutation, fn {_k, v} -> v end)
end)
|> Enum.map(fn row -> row[mapping[:y]] end)
|> Enum.sort()
sample_size = length(permutation_data)
quantiles =
for quantile <- [0.25, 0.5, 0.75],
do: {quantile, percentile(permutation_data, sample_size, quantile)},
into: %{}
interquartile_range = quantiles[0.75] - quantiles[0.25]
ymin_threshold = quantiles[0.25] - 1.5 * interquartile_range
ymax_threshold = quantiles[0.75] + 1.5 * interquartile_range
outliers =
for record <- permutation_data,
record > ymax_threshold or record < ymin_threshold,
do: record
%{
ymin:
Enum.filter(permutation_data, fn record -> record >= ymin_threshold end) |> Enum.min(),
lower: quantiles[0.25],
middle: quantiles[0.5],
upper: quantiles[0.75],
ymax:
Enum.filter(permutation_data, fn record -> record <= ymax_threshold end) |> Enum.max(),
outliers: outliers
}
end
defp percentile([single_value], 1, _percentile), do: single_value
defp percentile(data, sample_size, percentile) when percentile >= 0 and percentile <= 1 do
p = percentile * (sample_size - 1) + 1
k = trunc(p)
d = p - k
{_first_half, [lower, upper | _rest_of_second_half]} = Enum.split(data, k - 1)
lower + d * (upper - lower)
end
defp discrete_aesthetics(data, mapping) do
discrete_variables =
data
|> hd()
|> Enum.filter(fn {_k, v} -> is_binary(v) end)
|> Enum.map(fn {k, _v} -> k end)
mapping
|> Enum.filter(fn {_k, v} -> v in discrete_variables end)
|> Keyword.keys()
end
defp permutations(discrete_aesthetics, data, mapping) do
for row <- data,
uniq: true,
do:
discrete_aesthetics
|> Enum.map(fn aesthetic -> {aesthetic, row[mapping[aesthetic]]} end)
|> Map.new()
end
end
|
lib/ggity/stat.ex
| 0.840406
| 0.659823
|
stat.ex
|
starcoder
|
defmodule Jeff.Reply do
@moduledoc """
Replies are sent from a PD to an ACU in response to a command
| Name | Code | Description | Data Type |
|-----------|------|--------------------------------------------|------------------|
| ACK | 0x40 | Command accepted, nothing else to report | - |
| NAK | 0x41 | Command not processed | ErrorCode |
| PDID | 0x45 | PD ID Report | IdReport |
| PDCAP | 0x46 | PD Capabilities Report | [Capability] |
| LSTATR | 0x48 | Local Status Report | Report data |
| ISTATR | 0x49 | Input Status Report | Report data |
| OSTATR | 0x4A | Output Status Report | Report data |
| RSTATR | 0x4B | Reader Status Report | Report data |
| RAW | 0x50 | Reader Data – Raw bit image of card data | CardData |
| FMT | 0x51 | Reader Data – Formatted character stream | CardData |
| KEYPAD | 0x53 | Keypad Data | KeypadData |
| COM | 0x54 | PD Communications Configuration Report | ComData |
| BIOREADR | 0x57 | Biometric Data | Biometric data |
| BIOMATCHR | 0x58 | Biometric Match Result | Result |
| CCRYPT | 0x76 | Client's ID, Random Number, and Cryptogram | EncryptionClient |
| BUSY | 0x79 | PD is Busy reply | - |
| RMAC_I | 0x78 | Initial R-MAC | Encryption Data |
| FTSTAT | 0x7A | File transfer status | Status details |
| PIVDATAR | 0x80 | PIV Data Reply | credential data |
| GENAUTHR | 0x81 | Authentication response | response details |
| CRAUTHR | 0x82 | Response to challenge | response details |
| MFGSTATR | 0x83 | MFG specific status | status details |
| MFGERRR | 0x84 | MFG specific error | error details |
| MFGREP | 0x90 | Manufacturer Specific Reply | Any |
| XRD | 0xB1 | Extended Read Response | APDU and details |
"""
use Bitwise
alias Jeff.Reply.{
Capability,
CardData,
ComData,
EncryptionClient,
ErrorCode,
IdReport,
KeypadData,
LocalStatus
}
@type t() :: %__MODULE__{
address: byte(),
code: byte(),
data: binary() | map() | list(),
name: atom()
}
defstruct [:address, :code, :data, :name]
@names %{
0x40 => ACK,
0x41 => NAK,
0x45 => PDID,
0x46 => PDCAP,
0x48 => LSTATR,
0x49 => ISTATR,
0x4A => OSTATR,
0x4B => RSTATR,
0x50 => RAW,
0x51 => FMT,
0x53 => KEYPAD,
0x54 => COM,
0x57 => BIOREADR,
0x58 => BIOMATCHR,
0x76 => CCRYPT,
0x79 => BUSY,
0x78 => RMAC_I,
0x7A => FTSTAT,
0x80 => PIVDATAR,
0x81 => GENAUTHR,
0x82 => CRAUTHR,
0x83 => MFGSTATR,
0x84 => MFGERRR,
0x90 => MFGREP,
0xB1 => XRD
}
@codes Map.new(@names, fn {code, name} -> {name, code} end)
def new(%{code: code, data: data, address: address}) do
name_or_code = Map.get(@names, code, code)
new(address, name_or_code, data)
end
def new(address, code, data \\ nil)
def new(address, code, data) when is_integer(code) do
%__MODULE__{
address: reply_mask(address),
code: code,
data: data,
name: UNKNOWN
}
end
def new(address, name, data) do
%__MODULE__{
address: reply_mask(address),
code: code(name),
data: decode(name, data),
name: name
}
end
defp reply_mask(address) do
address &&& 0b01111111
end
defp decode(ACK, _data), do: Jeff.Reply.ACK
defp decode(NAK, data), do: ErrorCode.decode(data)
defp decode(PDID, data), do: IdReport.decode(data)
defp decode(PDCAP, data), do: Capability.decode(data)
defp decode(LSTATR, data), do: LocalStatus.decode(data)
defp decode(COM, data), do: ComData.decode(data)
defp decode(KEYPAD, data), do: KeypadData.decode(data)
defp decode(RAW, data), do: CardData.decode(data)
defp decode(CCRYPT, data), do: EncryptionClient.decode(data)
defp decode(RMAC_I, data), do: data
defp decode(_name, nil), do: nil
defp decode(_name, <<>>), do: nil
defp decode(name, data), do: Module.concat(__MODULE__, name).decode(data)
def code(name), do: @codes[name]
def name(code), do: @names[code]
end
|
lib/jeff/reply.ex
| 0.741019
| 0.689606
|
reply.ex
|
starcoder
|
defmodule FalconPlusApi.Api.Expression do
alias Maxwell.Conn
alias FalconPlusApi.{Util, Sig, Api}
@doc """
* [Session](#/authentication) Required
### Request
```{
"right_value": "0",
"priority": 2,
"pause": 0,
"op": "==",
"note": "this is a test exp",
"max_step": 3,
"func": "all(#3)",
"expression": "each(metric=agent.alive endpoint=docker-agent)",
"action": {
"url": "http://localhost:1234/callback",
"uic": [
"test"
],
"callback": 1,
"before_callback_sms": 1,
"before_callback_mail": 0,
"after_callback_sms": 1,
"after_callback_mail": 0
}
}```
### Response
```Status: 200```
```{"message":"expression created"}```
"""
def create(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/expression>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.post
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
* ex. /api/v1/expression/5
### Response
```Status: 200```
```{"message":"expression:5 has been deleted"}```
"""
def delete(expression_id, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/expression/#{expression_id}>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.delete
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
ex. /api/v1/expression/5
### Response
```Status: 200```
```{
"action": {
"id": 5,
"uic": "taipei",
"url": "",
"callback": 0,
"before_callback_sms": 0,
"before_callback_mail": 0,
"after_callback_sms": 0,
"after_callback_mail": 0
},
"expression": {
"id": 5,
"expression": "each(metric=agent.alive endpoint=docker-agent)",
"func": "all(#3)",
"op": "==",
"right_value": "0",
"max_step": 3,
"priority": 2,
"note": "this is a test exp",
"action_id": 177,
"create_user": "root",
"pause": 1
}
}```
"""
def info_by_id(expression_id, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/expression/#{expression_id}>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Response
```Status: 200```
```[
{
"id": 2,
"expression": "each(metric=? xx=yy)",
"func": "all(#3)",
"op": "==",
"right_value": "0",
"max_step": 3,
"priority": 0,
"note": "",
"action_id": 18,
"create_user": "root",
"pause": 0
},
{
"id": 3,
"expression": "each(metric=ss.close.wait endpoint=docker-A)",
"func": "all(#1)",
"op": "!=",
"right_value": "0",
"max_step": 1,
"priority": 4,
"note": "boss docker-A 连接数大于10",
"action_id": 91,
"create_user": "root",
"pause": 0
},
{
"id": 4,
"expression": "each(metric=agent.alive endpoint=docker-agent)",
"func": "all(#3)",
"op": "==",
"right_value": "0",
"max_step": 3,
"priority": 2,
"note": "this is a test exp",
"action_id": 176,
"create_user": "root",
"pause": 1
}
]```
"""
def list(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/expression>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Request
```{
"right_value": "0",
"priority": 2,
"pause": 1,
"op": "==",
"note": "this is a test exp",
"max_step": 3,
"id": 5,
"func": "all(#3)",
"expression": "each(metric=agent.alive endpoint=docker-agent)",
"action": {
"url": "http://localhost:1234/callback",
"uic": [
"test",
"test2"
],
"callback": 0,
"before_callback_sms": 1,
"before_callback_mail": 0,
"after_callback_sms": 1,
"after_callback_mail": 0
}
}```
### Response
```Status: 200```
```{"message":"expression:5 has been updated"}```
"""
def update(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/expression>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.put
|> Api.get_result
end
end
|
lib/falcon_plus_api/api/expression.ex
| 0.624523
| 0.741253
|
expression.ex
|
starcoder
|
defmodule Noray.Vector do
@moduledoc """
Functions for operating on points.
These functions operate on `t:Noray.Tetrad.t/0`, but without confirming that the tetrad is actually a point.
"""
import Noray.Tetrad
@opaque t :: Noray.Tetrad.t()
@doc """
Returns a new vector.
"""
@spec new(float(), float(), float()) :: t()
def new(x, y, z), do: tetrad(x: x, y: y, z: z, w: 0.0)
@doc """
Returns the x component of the vector.
"""
@spec x(t()) :: float()
defdelegate x(vector), to: Noray.Tetrad
@doc """
Returns the y component of the vector.
"""
@spec y(t()) :: float()
defdelegate y(vector), to: Noray.Tetrad
@doc """
Returns the z component of the vector.
"""
@spec z(t()) :: float()
defdelegate z(vector), to: Noray.Tetrad
@doc """
Returns the magnitude (i.e., length) of a vector.
"""
@spec magnitude(t()) :: float()
def magnitude(tetrad) do
x = tetrad(tetrad, :x)
y = tetrad(tetrad, :y)
z = tetrad(tetrad, :z)
w = tetrad(tetrad, :w)
:math.sqrt(x * x + y * y + z * z + w * w)
end
@doc """
Returns a normalized version of the vector.
"""
@spec normalize(t()) :: t()
def normalize(vector) do
scale_inverse(vector, magnitude(vector))
end
@doc """
Returns the dot product of two vectors.
"""
@spec dot(t(), t()) :: float()
def dot(vector1, vector2) do
tetrad(vector1, :x) * tetrad(vector2, :x) +
tetrad(vector1, :y) * tetrad(vector2, :y) +
tetrad(vector1, :z) * tetrad(vector2, :z) +
tetrad(vector1, :w) * tetrad(vector2, :w)
end
@doc """
Returns the cross product of two vectors, using a left-handed coordinate system.
"""
@spec cross(t(), t()) :: t()
def cross(vector1, vector2) do
tetrad(
x: tetrad(vector1, :y) * tetrad(vector2, :z) - tetrad(vector1, :z) * tetrad(vector2, :y),
y: tetrad(vector1, :z) * tetrad(vector2, :x) - tetrad(vector1, :x) * tetrad(vector2, :z),
z: tetrad(vector1, :x) * tetrad(vector2, :y) - tetrad(vector1, :y) * tetrad(vector2, :x),
w: 0.0
)
end
end
|
lib/noray/vector.ex
| 0.944587
| 0.904651
|
vector.ex
|
starcoder
|
defmodule Sanbase.Clickhouse.EthTransfers do
@moduledoc ~s"""
Uses ClickHouse to work with ETH transfers.
"""
@type t :: %__MODULE__{
datetime: %DateTime{},
from_address: String.t(),
to_address: String.t(),
trx_hash: String.t(),
trx_value: float,
block_number: non_neg_integer,
trx_position: non_neg_integer,
type: String.t()
}
@type spent_over_time_type :: %{
eth_spent: float,
datetime: %DateTime{}
}
@type wallets :: list(String.t())
@type exchange_volume :: %{
datetime: non_neg_integer(),
exchange_inflow: float,
exchange_outflow: float
}
use Ecto.Schema
require Logger
alias Sanbase.ClickhouseRepo
alias Sanbase.Model.Project
alias Sanbase.DateTimeUtils
@table "eth_transfers"
@eth_decimals 1_000_000_000_000_000_000
@primary_key false
@timestamps_opts [updated_at: false]
schema @table do
field(:datetime, :utc_datetime, source: :dt)
field(:from_address, :string, primary_key: true, source: :from)
field(:to_address, :string, primary_key: true, source: :to)
field(:trx_hash, :string, source: :transactionHash)
field(:trx_value, :float, source: :value)
field(:block_number, :integer, source: :blockNumber)
field(:trx_position, :integer, source: :transactionPosition)
field(:type, :string)
end
@spec changeset(any(), any()) :: no_return()
def changeset(_, _) do
raise "Should not try to change eth transfers"
end
@doc ~s"""
Return the `limit` biggest transfers for a list of wallets and time period.
Only transfers which `from` address is in the list and `to` address is
not in the list are selected.
"""
@spec top_wallet_transfers(wallets, %DateTime{}, %DateTime{}, integer, String.t()) ::
{:ok, nil} | {:ok, list(t)} | {:error, String.t()}
def top_wallet_transfers([], _from, _to, _limit, _type), do: {:ok, []}
def top_wallet_transfers(wallets, from, to, limit, type) do
{query, args} = wallet_transactions_query(wallets, from, to, limit, type)
ClickhouseRepo.query_transform(query, args, fn
[timestamp, from_address, to_address, trx_hash, trx_value] ->
%{
datetime: DateTime.from_unix!(timestamp),
from_address: from_address,
to_address: to_address,
trx_hash: trx_hash,
trx_value: trx_value
}
end)
end
@doc ~s"""
The total ETH spent in the `from` - `to` interval
"""
@spec eth_spent(wallets, %DateTime{}, %DateTime{}) ::
{:ok, []} | {:ok, [{String.t(), float()}]} | {:error, String.t()}
def eth_spent([], _, _), do: {:ok, []}
def eth_spent(wallets, from, to) do
{query, args} = eth_spent_query(wallets, from, to)
ClickhouseRepo.query_transform(query, args, fn [from, value] ->
{from, value / @eth_decimals}
end)
|> case do
{:ok, result} ->
{:ok, result}
{:error, error} ->
{:error, error}
end
end
@doc ~s"""
Return a list of maps `%{datetime: datetime, eth_spent: ethspent}` that shows
how much ETH has been spent for the list of `wallets` for each `interval` in the
time period [`from`, `to`]
"""
@spec eth_spent_over_time(%Project{} | wallets, %DateTime{}, %DateTime{}, String.t()) ::
{:ok, list(spent_over_time_type)} | {:error, String.t()}
def eth_spent_over_time(%Project{} = project, from, to, interval) do
{:ok, eth_addresses} = Project.eth_addresses(project)
eth_spent_over_time(eth_addresses, from, to, interval)
end
def eth_spent_over_time([], _, _, _), do: {:ok, []}
def eth_spent_over_time(wallets, from, to, interval) when is_list(wallets) do
{query, args} = eth_spent_over_time_query(wallets, from, to, interval)
ClickhouseRepo.query_transform(query, args, fn [value, datetime_str] ->
%{
datetime: datetime_str |> DateTimeUtils.from_erl!(),
eth_spent: value / @eth_decimals
}
end)
end
@doc ~s"""
Returns the inflow and outflow volume for a list of exchange_addresses between two datetimes
"""
@spec exchange_volume(
list(String.t()),
%DateTime{},
%DateTime{}
) :: {:ok, list(exchange_volume)} | {:error, String.t()}
def exchange_volume(exchange_addresses, from, to) do
exchange_addresses = exchange_addresses |> Enum.map(&String.downcase/1)
{query, args} = exchange_volume_query(exchange_addresses, from, to)
query
|> ClickhouseRepo.query_transform(args, fn [dt, inflow, outflow] ->
%{
datetime: DateTime.from_unix!(dt),
exchange_inflow: inflow,
exchange_outflow: outflow
}
end)
|> case do
{:ok, result} ->
{:ok, result}
{:error, error} ->
{:error, error}
end
end
# Private functions
defp wallet_transactions_query(wallets, from, to, limit, :out) do
query = """
SELECT
toUnixTimestamp(dt),
from,
to,
transactionHash,
value / #{@eth_decimals}
FROM #{@table} FINAL
PREWHERE
from IN (?1) AND
NOT to IN (?1) AND
dt >= toDateTime(?2) AND
dt <= toDateTime(?3) AND
type = 'call'
ORDER BY value DESC
LIMIT ?4
"""
args = [
wallets,
from |> DateTime.to_unix(),
to |> DateTime.to_unix(),
limit
]
{query, args}
end
defp wallet_transactions_query(wallets, from, to, limit, :in) do
query = """
SELECT
toUnixTimestamp(dt),
from,
to,
transactionHash,
value / #{@eth_decimals}
FROM #{@table} FINAL
PREWHERE
from NOT IN (?1) AND
to IN (?1) AND
dt >= toDateTime(?2) AND
dt <= toDateTime(?3) AND
type = 'call'
ORDER BY value DESC
LIMIT ?4
"""
args = [
wallets,
from |> DateTime.to_unix(),
to |> DateTime.to_unix(),
limit
]
{query, args}
end
defp wallet_transactions_query(wallets, from, to, limit, :all) do
query = """
SELECT
toUnixTimestamp(dt),
from,
to,
transactionHash,
value / #{@eth_decimals}
FROM #{@table} FINAL
PREWHERE
(
(from IN (?1) AND NOT to IN (?1)) OR
(NOT from IN (?1) AND to IN (?1))
) AND
dt >= toDateTime(?2) AND
dt <= toDateTime(?3) AND
type = 'call'
ORDER BY value DESC
LIMIT ?4
"""
args = [
wallets,
from |> DateTime.to_unix(),
to |> DateTime.to_unix(),
limit
]
{query, args}
end
defp eth_spent_query(wallets, from, to) do
from_unix = DateTime.to_unix(from)
to_unix = DateTime.to_unix(to)
prewhere_clause =
wallets
|> Enum.map(fn list ->
list = list |> Enum.map(fn x -> ~s/'#{x}'/ end) |> Enum.join(", ")
"(from IN (#{list}) AND NOT to IN (#{list}))"
end)
|> Enum.join(" OR ")
query = """
SELECT from, SUM(value)
FROM (
SELECT any(value) as value, from
FROM #{@table}
PREWHERE (#{prewhere_clause})
AND dt >= toDateTime(?1)
AND dt <= toDateTime(?2)
AND type == 'call'
GROUP BY from, type, to, dt, transactionHash
)
GROUP BY from
"""
args = [
from_unix,
to_unix
]
{query, args}
end
defp eth_spent_over_time_query(wallets, from, to, interval) do
from_unix = DateTime.to_unix(from)
to_unix = DateTime.to_unix(to)
interval = DateTimeUtils.str_to_sec(interval)
span = div(to_unix - from_unix, interval) |> max(1)
query = """
SELECT SUM(value), time
FROM (
SELECT
toDateTime(intDiv(toUInt32(?4 + number * ?1), ?1) * ?1) as time,
toFloat64(0) AS value
FROM numbers(?2)
UNION ALL
SELECT toDateTime(intDiv(toUInt32(dt), ?1) * ?1) as time, sum(value) as value
FROM (
SELECT any(value) as value, dt
FROM #{@table}
PREWHERE from IN (?3) AND NOT to IN (?3)
AND dt >= toDateTime(?4)
AND dt <= toDateTime(?5)
AND type == 'call'
GROUP BY from, type, to, dt, transactionHash
)
GROUP BY time
)
GROUP BY time
ORDER BY time
"""
args = [
interval,
span,
wallets,
from_unix,
to_unix
]
{query, args}
end
defp exchange_volume_query(exchange_addresses, from, to) do
query = """
SELECT
toUnixTimestamp(dt) as datetime,
(inflow * price_usd) as exchange_inflow,
(outflow * price_usd) as exchange_outflow
FROM
(
SELECT dt, inflow, outflow
FROM
(
SELECT
toStartOfDay(dt) as dt,
sum(value) / #{@eth_decimals} as inflow
FROM #{@table}
PREWHERE
to IN (?1) AND NOT from IN (?1)
AND dt >= toDateTime(?2)
AND dt <= toDateTime(?3)
GROUP BY dt
)
ALL INNER JOIN
(
SELECT
toStartOfDay(dt) as dt,
sum(value) / #{@eth_decimals} as outflow
FROM #{@table}
PREWHERE
from IN (?1) AND NOT to IN (?1)
AND dt >= toDateTime(?2)
AND dt <= toDateTime(?3)
GROUP BY dt
) USING dt
)
ALL INNER JOIN
(
SELECT
toStartOfDay(dt) as dt, AVG(price_usd) as "price_usd"
FROM prices
PREWHERE
name = 'ETH_ethereum'
AND dt >= toDateTime(?2)
AND dt <= toDateTime(?3)
GROUP BY dt
) USING dt
ORDER BY dt
"""
args = [
exchange_addresses,
from,
to
]
{query, args}
end
end
|
lib/sanbase/clickhouse/eth_transfers.ex
| 0.853043
| 0.427456
|
eth_transfers.ex
|
starcoder
|
defmodule OpenHours.Schedule do
@moduledoc """
This module contains functions to work with schedules.
There are five settings to configure a schedule.
- `hours`: Map containing all the open hours intervals for a regular week.
- `holidays`: Dates in which the business is closed.
- `shifts`: Special dates where the business has a different hour schedule.
- `breaks`: Special dates where the business has interruption intervals.
- `time_zone`: Time zone of the schedule.
All the `%Time{}` values used for the schedule must be in local time.
"""
import OpenHours.Common
alias OpenHours.{Schedule, Interval}
@typedoc """
A struct representing the regular weekly schedule. Each key corresponds with the three first
characters of a week day. The values of each key are composed by lists of intervals.
"""
@type hour_schedule :: %{
optional(:mon) => list(Interval.t()),
optional(:tue) => list(Interval.t()),
optional(:wed) => list(Interval.t()),
optional(:thu) => list(Interval.t()),
optional(:fri) => list(Interval.t()),
optional(:sat) => list(Interval.t()),
optional(:sun) => list(Interval.t())
}
@typedoc """
A list of Date structs representing the days the business is closed
"""
@type holidays :: list(Date.t())
@typedoc """
A string containing a valid IANA time zone
"""
@type time_zone :: String.t()
@typedoc """
A tuple formed by a date and a list of intervals
"""
@type shift :: {Date.t(), list(Interval.t())}
@typedoc """
A list of shifts
"""
@type shifts :: list(shift)
@typedoc """
A tuple formed by a date and a list of intervals
"""
@type break :: {Date.t(), list(Interval.t())}
@typedoc """
A list of breaks
"""
@type breaks :: list(break)
@typedoc """
A struct containing all data of a schedule
"""
@type t :: %__MODULE__{
hours: hour_schedule,
shifts: shifts,
breaks: breaks,
holidays: holidays,
time_zone: time_zone
}
@enforce_keys [:time_zone]
defstruct hours: %{}, shifts: [], breaks: [], holidays: [], time_zone: nil
@doc """
It returns `true` if the supplied `%DateTime{}` is within the business hours. The result is
calculated based on all the settings of the schedule (hours, shifts, breaks and holidays).
The rules applied to establish if the date passed is in business hours is the following.
Shifts act as exceptions to the hours configured for a particular date; that is, if a date is
configured with both hours-based intervals and shifts, the shifts are in force and the intervals
are disregarded.
Periods occurring on holidays are disregarded.
Periods that overlaps with a break are treated as inactive. In case a date overlaps with a shift
and a break the shift will have priority. Priority works as follows:
Holidays > Shifts > Breaks > Hours
"""
@spec in_hours?(OpenHours.Schedule.t(), DateTime.t()) :: boolean()
def in_hours?(%Schedule{time_zone: schedule_tz} = schedule, %DateTime{time_zone: date_tz} = at)
when schedule_tz != date_tz do
{:ok, shifted_at} = DateTime.shift_zone(at, schedule_tz, Tzdata.TimeZoneDatabase)
in_hours?(schedule, shifted_at)
end
def in_hours?(%Schedule{} = schedule, %DateTime{} = at) do
with date <- DateTime.to_date(at),
false <- in_holidays?(schedule, date),
shifts <- shifts_for(schedule, date) do
case shifts do
nil -> is_within_business_hours?(schedule, at) && !is_within_breaks?(schedule, at)
_ -> Enum.any?(shifts, &Interval.within?(&1, at))
end
else
_ -> false
end
end
defp shifts_for(%Schedule{shifts: shifts}, %Date{} = at) do
case Enum.find(shifts, fn {shift_date, _} -> shift_date == at end) do
{_, shift_intervals} -> shift_intervals
_ -> nil
end
end
defp is_within_business_hours?(%Schedule{} = schedule, %DateTime{} = at) do
schedule.hours
|> Map.get(weekday(at), [])
|> Enum.any?(&Interval.within?(&1, at))
end
defp is_within_breaks?(%Schedule{breaks: breaks}, %DateTime{} = at) do
date = DateTime.to_date(at)
case Enum.find(breaks, fn {break_date, _intervals} -> break_date == date end) do
{_break_date, intervals} -> Enum.any?(intervals, &Interval.within?(&1, at))
_ -> false
end
end
defp in_holidays?(%Schedule{holidays: holidays}, %Date{} = at), do: Enum.member?(holidays, at)
end
|
lib/open_hours/schedule.ex
| 0.911071
| 0.79158
|
schedule.ex
|
starcoder
|
defmodule EVM.Builtin do
@moduledoc """
Implements the built-in functions as defined in Appendix E
of the Yellow Paper. These are contract functions that
natively exist in Ethereum.
"""
@doc """
A precompiled contract that recovers a public key from a signed hash
(Elliptic curve digital signature algorithm public key recovery function)
"""
@spec run_ecrec(EVM.Gas.t(), EVM.ExecEnv.t()) ::
{EVM.Gas.t(), EVM.SubState.t(), EVM.ExecEnv.t(), EVM.VM.output()}
def run_ecrec(gas, exec_env) do
EVM.Builtin.Ecrec.exec(gas, exec_env)
end
@doc """
Runs SHA256 hashing
"""
@spec run_sha256(EVM.Gas.t(), EVM.ExecEnv.t()) ::
{EVM.Gas.t(), EVM.SubState.t(), EVM.ExecEnv.t(), EVM.VM.output()}
def run_sha256(gas, exec_env) do
EVM.Builtin.Sha256.exec(gas, exec_env)
end
@doc """
Runs RIPEMD160 hashing
"""
@spec run_rip160(EVM.Gas.t(), EVM.ExecEnv.t()) ::
{EVM.Gas.t(), EVM.SubState.t(), EVM.ExecEnv.t(), EVM.VM.output()}
def run_rip160(gas, exec_env) do
EVM.Builtin.Rip160.exec(gas, exec_env)
end
@doc """
Identity simply returnes the output as the input
"""
@spec run_id(EVM.Gas.t(), EVM.ExecEnv.t()) ::
{EVM.Gas.t(), EVM.SubState.t(), EVM.ExecEnv.t(), EVM.VM.output()}
def run_id(gas, exec_env) do
EVM.Builtin.ID.exec(gas, exec_env)
end
@doc """
Arbitrary-precision exponentiation under modulo
"""
@spec mod_exp(EVM.Gas.t(), EVM.ExecEnv.t()) ::
{EVM.Gas.t(), EVM.SubState.t(), EVM.ExecEnv.t(), EVM.VM.output()}
def mod_exp(gas, exec_env) do
EVM.Builtin.ModExp.exec(gas, exec_env)
end
@doc """
Elliptic curve addition
"""
@spec ec_add(EVM.Gas.t(), EVM.ExecEnv.t()) ::
{EVM.Gas.t(), EVM.SubState.t(), EVM.ExecEnv.t(), EVM.VM.output()}
def ec_add(gas, exec_env) do
EVM.Builtin.EcAdd.exec(gas, exec_env)
end
@doc """
Elliptic curve multiplication
"""
@spec ec_mult(EVM.Gas.t(), EVM.ExecEnv.t()) ::
{EVM.Gas.t(), EVM.SubState.t(), EVM.ExecEnv.t(), EVM.VM.output()}
def ec_mult(gas, exec_env) do
EVM.Builtin.EcMult.exec(gas, exec_env)
end
@doc """
Elliptic curve pairing
"""
@spec ec_pairing(EVM.Gas.t(), EVM.ExecEnv.t()) ::
{EVM.Gas.t(), EVM.SubState.t(), EVM.ExecEnv.t(), EVM.VM.output()}
def ec_pairing(gas, exec_env) do
EVM.Builtin.EcPairing.exec(gas, exec_env)
end
end
|
apps/evm/lib/evm/builtin.ex
| 0.837188
| 0.461441
|
builtin.ex
|
starcoder
|
defmodule EctoSchemaStore.Proxy do
@moduledoc """
Generates proxy functions that pass off to the desired store module.
When used with a schema module, this is usually not possible with import because
we the store cannot be compiled until the schema is compiled. But the schema
cannot import until the store is compiled, so generally, the functions cannot
be brought into the store directly. Thbis solution works around that and allows the functions
to be public so that you can call the store functions on the schema module.
You cannot use multiple store modules at once as that each would override the others.
There is no requirement to place this on the schema module, it can be included in any
Elixir module.
Functions Included:
* `one`
* `all`
* `insert`
* `insert!`
* `insert_fields`
* `insert_fields!`
* `update`
* `update!`
* `update_fields`
* `update_fields!`
* `update_or_create`
* `update_or_create!`
* `update_or_create_fields`
* `update_or_create_fields!`
* `delete`
* `delete!`
* `delete_all`
* `generate`
* `generate!`
* `generate_default`
* `generate_default!`
* `exists?`
* `to_map`
* `count_records`
* `preload_assocs`
* `find_or_create`
* `find_or_create!`
* `find_or_create_fields`
* `find_or_create_fields!`
* `validate_insert`
* `validate_update`
* `transaction`
* `refresh`
```
defmodule Person do
use Ecto.Schema
use EctoSchemaStore.proxy, module: PersonStore
end
```
"""
@proxiable_functions [
one: 1, one: 2,
all: 0, all: 1, all: 2,
insert: 0, insert: 1, insert: 2,
insert!: 1, insert!: 2,
insert_fields: 1, insert_fields: 2,
insert_fields!: 1, insert_fields!: 2,
update: 0, update: 1, update: 2,
update!: 1, update!: 2,
update_fields: 1, update_fields: 2,
update_fields!: 1, update_fields!: 2,
update_or_create: 2, update_or_create: 3,
update_or_create!: 2, update_or_create!: 3,
update_or_create_fields: 2, update_or_create_fields!: 2,
delete: 1, delete: 2,
delete!: 1, delete!: 2,
delete_all: 0, delete_all: 1, delete_all: 2,
generate: 0, generate: 1, generate: 2,
generate!: 0, generate!: 1, generate!: 2,
generate_default: 0, generate_default: 1,
generate_default!: 0, generate_default!: 1,
exists?: 1,
to_map: 1,
count_records: 0, count_records: 1,
preload_assocs: 2,
find_or_create: 2, find_or_create: 3,
find_or_create!: 2, find_or_create!: 3,
find_or_create_fields: 2, find_or_create_fields!: 2,
validate_insert: 1, validate_insert: 2,
validate_update: 2, validate_update: 3,
transaction: 1,
refresh: 1
]
defmacro __using__(opts) do
module = Keyword.get opts, :store
functions = @proxiable_functions
setup =
if is_nil module do
quote do
@__store_module__ String.to_atom "#{__MODULE__}.Store"
end
else
quote do
@__store_module__ unquote(module)
end
end
store_access =
quote do
def store do
@__store_module__
end
end
functions =
for {function, arguments} <- functions do
generate(function, arguments)
end
[setup, store_access] ++ functions
end
def generate(function, arguments)
def generate(function, 0) do
quote do
def unquote(function)() do
apply(@__store_module__, unquote(function), [])
end
end
end
def generate(function, 1) do
quote do
def unquote(function)(arg1) do
apply(@__store_module__, unquote(function), [arg1])
end
end
end
def generate(function, 2) do
quote do
def unquote(function)(arg1, arg2) do
apply(@__store_module__, unquote(function), [arg1, arg2])
end
end
end
def generate(function, 3) do
quote do
def unquote(function)(arg1, arg2, arg3) do
apply(@__store_module__, unquote(function), [arg1, arg2, arg3])
end
end
end
end
|
lib/ecto_schema_store/proxy.ex
| 0.698124
| 0.875148
|
proxy.ex
|
starcoder
|
defmodule Aecore.Chain.ChainState do
@moduledoc """
Module used for calculating the block and chain states.
The chain state is a map, telling us what amount of tokens each account has.
"""
alias Aecore.Structures.SignedTx
alias Aecore.Structures.Account
alias Aeutil.Serialization
alias Aeutil.Bits
require Logger
@type account_chainstate() :: %{binary() => map()}
@spec calculate_and_validate_chain_state!(list(), account_chainstate(), integer()) :: account_chainstate()
def calculate_and_validate_chain_state!(txs, chain_state, block_height) do
txs
|> Enum.reduce(chain_state, fn(transaction, chain_state) ->
apply_tx!(transaction, chain_state, block_height)
end)
|> update_chain_state_locked(block_height)
end
@spec apply_tx!(SignedTx.t(), map(), integer()) :: map()
def apply_tx!(transaction, chain_state, block_height) do
if SignedTx.is_coinbase?(transaction) do
apply_fun_on_map(chain_state, transaction.data.to_acc,
fn a ->
Account.tx_in!(a,
transaction.data,
block_height)
end)
else
if !SignedTx.validate(transaction) do
throw {:error, "Invalid transaction"}
end
chain_state = apply_fun_on_map(chain_state, transaction.data.from_acc,
fn a ->
Account.tx_out!(a,
transaction.data,
block_height)
end)
case transaction.data.to_acc do
address ->
case Map.get(chain_state, address, Account.empty()) do
account = %Account{} ->
Map.put(chain_state,
address,
Account.tx_in!(account, transaction.data, block_height))
_ ->
throw {:error, "Invalid contract type on chainstate"}
end
end
end
end
@doc """
Builds a merkle tree from the passed chain state and
returns the root hash of the tree.
"""
@spec calculate_chain_state_hash(account_chainstate()) :: binary()
def calculate_chain_state_hash(chain_state) do
merkle_tree_data =
for {account, data} <- chain_state do
{account, Serialization.pack_binary(data)}
end
if Enum.empty?(merkle_tree_data) do
<<0::256>>
else
merkle_tree =
merkle_tree_data
|> List.foldl(:gb_merkle_trees.empty(), fn node, merkle_tree ->
:gb_merkle_trees.enter(elem(node, 0), elem(node, 1), merkle_tree)
end)
:gb_merkle_trees.root_hash(merkle_tree)
end
end
@spec calculate_total_tokens(account_chainstate()) :: integer()
def calculate_total_tokens(chain_state) do
Enum.reduce(chain_state, {0, 0, 0}, fn({_account, object}, acc) ->
case object do
account = %Account{} ->
{total_tokens, total_unlocked_tokens, total_locked_tokens} = acc
locked_tokens =
Enum.reduce(account.locked, 0, fn(%{amount: amount}, locked_sum) ->
locked_sum + amount
end)
new_total_tokens = total_tokens + account.balance + locked_tokens
new_total_unlocked_tokens = total_unlocked_tokens + account.balance
new_total_locked_tokens = total_locked_tokens + locked_tokens
{new_total_tokens, new_total_unlocked_tokens, new_total_locked_tokens}
_ ->
acc
end
end)
end
@spec update_chain_state_locked(account_chainstate(), Header.t()) :: map()
def update_chain_state_locked(chain_state, header) do
Enum.reduce(chain_state, %{}, fn({address, object}, acc) ->
case object do
account = %Account{} ->
Map.put(acc, address, Account.update_locked(account, header))
other ->
Map.put(acc, address, other)
end
end)
end
@spec bech32_encode(binary()) :: String.t()
def bech32_encode(bin) do
Bits.bech32_encode("cs", bin)
end
defp apply_fun_on_map(map, key, function) do
Map.put(map, key, function.(Map.get(map, key)))
end
end
|
apps/aecore/lib/aecore/chain/chain_state.ex
| 0.805594
| 0.557604
|
chain_state.ex
|
starcoder
|
defmodule Calendar.ISO do
@moduledoc """
A calendar implementation that follows to ISO 8601.
This calendar implements the proleptic Gregorian calendar and
is therefore compatible with the calendar used in most countries
today. The proleptic means the Gregorian rules for leap years are
applied for all time, consequently the dates give different results
before the year 1583 from when the Gregorian calendar was adopted.
Note that while ISO 8601 allows times and datetimes to specify
24:00:00 as the zero hour of the next day, this notation is not
supported by Elixir.
"""
@behaviour Calendar
@unix_epoch 62_167_219_200
unix_start = (315_537_897_600 + @unix_epoch) * -1_000_000
unix_end = 315_569_519_999_999_999 - @unix_epoch * 1_000_000
@unix_range_microseconds unix_start..unix_end
@type year :: -9999..9999
@type month :: 1..12
@type day :: 1..31
@seconds_per_minute 60
@seconds_per_hour 60 * 60
# Note that this does _not_ handle leap seconds.
@seconds_per_day 24 * 60 * 60
@last_second_of_the_day @seconds_per_day - 1
@microseconds_per_second 1_000_000
@parts_per_day @seconds_per_day * @microseconds_per_second
@days_per_nonleap_year 365
@days_per_leap_year 366
@months_in_year 12
@doc false
def __match_date__ do
quote do
[
<<y1, y2, y3, y4, ?-, m1, m2, ?-, d1, d2>>,
y1 >= ?0 and y1 <= ?9 and y2 >= ?0 and y2 <= ?9 and y3 >= ?0 and y3 <= ?9 and y4 >= ?0 and
y4 <= ?9 and m1 >= ?0 and m1 <= ?9 and m2 >= ?0 and m2 <= ?9 and d1 >= ?0 and d1 <= ?9 and
d2 >= ?0 and d2 <= ?9,
{
(y1 - ?0) * 1000 + (y2 - ?0) * 100 + (y3 - ?0) * 10 + (y4 - ?0),
(m1 - ?0) * 10 + (m2 - ?0),
(d1 - ?0) * 10 + (d2 - ?0)
}
]
end
end
@doc false
def __match_time__ do
quote do
[
<<h1, h2, ?:, i1, i2, ?:, s1, s2>>,
h1 >= ?0 and h1 <= ?9 and h2 >= ?0 and h2 <= ?9 and i1 >= ?0 and i1 <= ?9 and i2 >= ?0 and
i2 <= ?9 and s1 >= ?0 and s1 <= ?9 and s2 >= ?0 and s2 <= ?9,
{
(h1 - ?0) * 10 + (h2 - ?0),
(i1 - ?0) * 10 + (i2 - ?0),
(s1 - ?0) * 10 + (s2 - ?0)
}
]
end
end
@doc """
Returns the `t:Calendar.iso_days/0` format of the specified date.
## Examples
iex> Calendar.ISO.naive_datetime_to_iso_days(0, 1, 1, 0, 0, 0, {0, 6})
{0, {0, 86400000000}}
iex> Calendar.ISO.naive_datetime_to_iso_days(2000, 1, 1, 12, 0, 0, {0, 6})
{730485, {43200000000, 86400000000}}
iex> Calendar.ISO.naive_datetime_to_iso_days(2000, 1, 1, 13, 0, 0, {0, 6})
{730485, {46800000000, 86400000000}}
iex> Calendar.ISO.naive_datetime_to_iso_days(-1, 1, 1, 0, 0, 0, {0, 6})
{-365, {0, 86400000000}}
"""
@doc since: "1.5.0"
@impl true
@spec naive_datetime_to_iso_days(
Calendar.year(),
Calendar.month(),
Calendar.day(),
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond()
) :: Calendar.iso_days()
def naive_datetime_to_iso_days(year, month, day, hour, minute, second, microsecond) do
{date_to_iso_days(year, month, day), time_to_day_fraction(hour, minute, second, microsecond)}
end
@doc """
Converts the `t:Calendar.iso_days/0` format to the datetime format specified by this calendar.
## Examples
iex> Calendar.ISO.naive_datetime_from_iso_days({0, {0, 86400}})
{0, 1, 1, 0, 0, 0, {0, 6}}
iex> Calendar.ISO.naive_datetime_from_iso_days({730_485, {0, 86400}})
{2000, 1, 1, 0, 0, 0, {0, 6}}
iex> Calendar.ISO.naive_datetime_from_iso_days({730_485, {43200, 86400}})
{2000, 1, 1, 12, 0, 0, {0, 6}}
iex> Calendar.ISO.naive_datetime_from_iso_days({-365, {0, 86400000000}})
{-1, 1, 1, 0, 0, 0, {0, 6}}
"""
@doc since: "1.5.0"
@spec naive_datetime_from_iso_days(Calendar.iso_days()) :: {
Calendar.year(),
Calendar.month(),
Calendar.day(),
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond()
}
@impl true
def naive_datetime_from_iso_days({days, day_fraction}) do
{year, month, day} = date_from_iso_days(days)
{hour, minute, second, microsecond} = time_from_day_fraction(day_fraction)
{year, month, day, hour, minute, second, microsecond}
end
@doc """
Returns the normalized day fraction of the specified time.
## Examples
iex> Calendar.ISO.time_to_day_fraction(0, 0, 0, {0, 6})
{0, 86400000000}
iex> Calendar.ISO.time_to_day_fraction(12, 34, 56, {123, 6})
{45296000123, 86400000000}
"""
@doc since: "1.5.0"
@impl true
@spec time_to_day_fraction(
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond()
) :: Calendar.day_fraction()
def time_to_day_fraction(0, 0, 0, {0, _}) do
{0, @parts_per_day}
end
def time_to_day_fraction(hour, minute, second, {microsecond, _}) do
combined_seconds = hour * @seconds_per_hour + minute * @seconds_per_minute + second
{combined_seconds * @microseconds_per_second + microsecond, @parts_per_day}
end
@doc """
Converts a day fraction to this Calendar's representation of time.
## Examples
iex> Calendar.ISO.time_from_day_fraction({1, 2})
{12, 0, 0, {0, 6}}
iex> Calendar.ISO.time_from_day_fraction({13, 24})
{13, 0, 0, {0, 6}}
"""
@doc since: "1.5.0"
@impl true
@spec time_from_day_fraction(Calendar.day_fraction()) ::
{Calendar.hour(), Calendar.minute(), Calendar.second(), Calendar.microsecond()}
def time_from_day_fraction({0, _}) do
{0, 0, 0, {0, 6}}
end
def time_from_day_fraction({parts_in_day, parts_per_day}) do
total_microseconds = divide_by_parts_per_day(parts_in_day, parts_per_day)
{hours, rest_microseconds1} =
div_mod(total_microseconds, @seconds_per_hour * @microseconds_per_second)
{minutes, rest_microseconds2} =
div_mod(rest_microseconds1, @seconds_per_minute * @microseconds_per_second)
{seconds, microseconds} = div_mod(rest_microseconds2, @microseconds_per_second)
{hours, minutes, seconds, {microseconds, 6}}
end
defp divide_by_parts_per_day(parts_in_day, @parts_per_day), do: parts_in_day
defp divide_by_parts_per_day(parts_in_day, parts_per_day),
do: div(parts_in_day * @parts_per_day, parts_per_day)
# Converts year, month, day to count of days since 0000-01-01.
@doc false
def date_to_iso_days(0, 1, 1) do
0
end
def date_to_iso_days(1970, 1, 1) do
719_528
end
def date_to_iso_days(year, month, day) when year in -9999..9999 do
true = day <= days_in_month(year, month)
days_in_previous_years(year) + days_before_month(month) + leap_day_offset(year, month) + day -
1
end
# Converts count of days since 0000-01-01 to {year, month, day} tuple.
@doc false
def date_from_iso_days(days) when days in -3_652_059..3_652_424 do
{year, day_of_year} = days_to_year(days)
extra_day = if leap_year?(year), do: 1, else: 0
{month, day_in_month} = year_day_to_year_date(extra_day, day_of_year)
{year, month, day_in_month + 1}
end
defp div_mod(int1, int2) do
div = div(int1, int2)
rem = int1 - div * int2
if rem >= 0 do
{div, rem}
else
{div - 1, rem + int2}
end
end
@doc """
Returns how many days there are in the given year-month.
## Examples
iex> Calendar.ISO.days_in_month(1900, 1)
31
iex> Calendar.ISO.days_in_month(1900, 2)
28
iex> Calendar.ISO.days_in_month(2000, 2)
29
iex> Calendar.ISO.days_in_month(2001, 2)
28
iex> Calendar.ISO.days_in_month(2004, 2)
29
iex> Calendar.ISO.days_in_month(2004, 4)
30
iex> Calendar.ISO.days_in_month(-1, 5)
31
"""
@spec days_in_month(year, month) :: 28..31
@impl true
def days_in_month(year, month)
def days_in_month(year, 2) do
if leap_year?(year), do: 29, else: 28
end
def days_in_month(_, month) when month in [4, 6, 9, 11], do: 30
def days_in_month(_, month) when month in 1..12, do: 31
@doc """
Returns how many months there are in the given year.
## Example
iex> Calendar.ISO.months_in_year(2004)
12
"""
@doc since: "1.7.0"
@impl true
@spec months_in_year(year) :: 12
def months_in_year(_year) do
@months_in_year
end
@doc """
Returns if the given year is a leap year.
## Examples
iex> Calendar.ISO.leap_year?(2000)
true
iex> Calendar.ISO.leap_year?(2001)
false
iex> Calendar.ISO.leap_year?(2004)
true
iex> Calendar.ISO.leap_year?(1900)
false
iex> Calendar.ISO.leap_year?(-4)
true
"""
@spec leap_year?(year) :: boolean()
@impl true
def leap_year?(year) when is_integer(year) do
rem(year, 4) === 0 and (rem(year, 100) !== 0 or rem(year, 400) === 0)
end
@doc """
Calculates the day of the week from the given `year`, `month`, and `day`.
It is an integer from 1 to 7, where 1 is Monday and 7 is Sunday.
## Examples
iex> Calendar.ISO.day_of_week(2016, 10, 31)
1
iex> Calendar.ISO.day_of_week(2016, 11, 1)
2
iex> Calendar.ISO.day_of_week(2016, 11, 2)
3
iex> Calendar.ISO.day_of_week(2016, 11, 3)
4
iex> Calendar.ISO.day_of_week(2016, 11, 4)
5
iex> Calendar.ISO.day_of_week(2016, 11, 5)
6
iex> Calendar.ISO.day_of_week(2016, 11, 6)
7
iex> Calendar.ISO.day_of_week(-99, 1, 31)
4
"""
@spec day_of_week(year, month, day) :: 1..7
@impl true
def day_of_week(year, month, day)
when is_integer(year) and is_integer(month) and is_integer(day) do
Integer.mod(date_to_iso_days(year, month, day) + 5, 7) + 1
end
@doc """
Converts the given time into a string.
## Examples
iex> Calendar.ISO.time_to_string(2, 2, 2, {2, 6})
"02:02:02.000002"
iex> Calendar.ISO.time_to_string(2, 2, 2, {2, 2})
"02:02:02.00"
iex> Calendar.ISO.time_to_string(2, 2, 2, {2, 0})
"02:02:02"
"""
@spec time_to_string(
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond()
) :: String.t()
@impl true
def time_to_string(hour, minute, second, microsecond) do
time_to_string(hour, minute, second, microsecond, :extended)
end
def time_to_string(hour, minute, second, {_, 0}, format) do
time_to_string_format(hour, minute, second, format)
end
def time_to_string(hour, minute, second, {microsecond, precision}, format) do
time_to_string_format(hour, minute, second, format) <>
"." <> (microsecond |> zero_pad(6) |> binary_part(0, precision))
end
defp time_to_string_format(hour, minute, second, :extended) do
zero_pad(hour, 2) <> ":" <> zero_pad(minute, 2) <> ":" <> zero_pad(second, 2)
end
defp time_to_string_format(hour, minute, second, :basic) do
zero_pad(hour, 2) <> zero_pad(minute, 2) <> zero_pad(second, 2)
end
@doc """
Converts the given date into a string.
## Examples
iex> Calendar.ISO.date_to_string(2015, 2, 28)
"2015-02-28"
iex> Calendar.ISO.date_to_string(2017, 8, 1)
"2017-08-01"
iex> Calendar.ISO.date_to_string(-99, 1, 31)
"-0099-01-31"
"""
@spec date_to_string(year, month, day) :: String.t()
@impl true
def date_to_string(year, month, day) do
date_to_string(year, month, day, :extended)
end
defp date_to_string(year, month, day, :extended) do
zero_pad(year, 4) <> "-" <> zero_pad(month, 2) <> "-" <> zero_pad(day, 2)
end
defp date_to_string(year, month, day, :basic) do
zero_pad(year, 4) <> zero_pad(month, 2) <> zero_pad(day, 2)
end
@doc """
Converts the datetime (without time zone) into a string.
## Examples
iex> Calendar.ISO.naive_datetime_to_string(2015, 2, 28, 1, 2, 3, {4, 6})
"2015-02-28 01:02:03.000004"
iex> Calendar.ISO.naive_datetime_to_string(2017, 8, 1, 1, 2, 3, {4, 5})
"2017-08-01 01:02:03.00000"
"""
@impl true
@spec naive_datetime_to_string(
year,
month,
day,
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond()
) :: String.t()
def naive_datetime_to_string(year, month, day, hour, minute, second, microsecond) do
date_to_string(year, month, day) <> " " <> time_to_string(hour, minute, second, microsecond)
end
@doc """
Converts the datetime (with time zone) into a string.
## Examples
iex> time_zone = "Europe/Berlin"
iex> Calendar.ISO.datetime_to_string(2017, 8, 1, 1, 2, 3, {4, 5}, time_zone, "CET", 3600, 0)
"2017-08-01 01:02:03.00000+01:00 CET Europe/Berlin"
iex> Calendar.ISO.datetime_to_string(2017, 8, 1, 1, 2, 3, {4, 5}, time_zone, "CDT", 3600, 3600)
"2017-08-01 01:02:03.00000+02:00 CDT Europe/Berlin"
iex> time_zone = "America/Los_Angeles"
iex> Calendar.ISO.datetime_to_string(2015, 2, 28, 1, 2, 3, {4, 5}, time_zone, "PST", -28800, 0)
"2015-02-28 01:02:03.00000-08:00 PST America/Los_Angeles"
iex> Calendar.ISO.datetime_to_string(2015, 2, 28, 1, 2, 3, {4, 5}, time_zone, "PDT", -28800, 3600)
"2015-02-28 01:02:03.00000-07:00 PDT America/Los_Angeles"
"""
@impl true
@spec datetime_to_string(
year,
month,
day,
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond(),
Calendar.time_zone(),
Calendar.zone_abbr(),
Calendar.utc_offset(),
Calendar.std_offset()
) :: String.t()
def datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
) do
date_to_string(year, month, day) <>
" " <>
time_to_string(hour, minute, second, microsecond) <>
offset_to_string(utc_offset, std_offset, time_zone) <>
zone_to_string(utc_offset, std_offset, zone_abbr, time_zone)
end
@doc """
Determines if the date given is valid according to the proleptic Gregorian calendar.
## Examples
iex> Calendar.ISO.valid_date?(2015, 2, 28)
true
iex> Calendar.ISO.valid_date?(2015, 2, 30)
false
iex> Calendar.ISO.valid_date?(-1, 12, 31)
true
iex> Calendar.ISO.valid_date?(-1, 12, 32)
false
"""
@doc since: "1.5.0"
@impl true
@spec valid_date?(year, month, day) :: boolean
def valid_date?(year, month, day) do
month in 1..12 and year in -9999..9999 and
(is_integer(day) and day >= 1 and day <= days_in_month(year, month))
end
@doc """
Determines if the date given is valid according to the proleptic Gregorian calendar.
Note that leap seconds are considered valid, but the use of 24:00:00 as the
zero hour of the day is considered invalid.
## Examples
iex> Calendar.ISO.valid_time?(10, 50, 25, {3006, 6})
true
iex> Calendar.ISO.valid_time?(23, 59, 60, {0, 0})
true
iex> Calendar.ISO.valid_time?(24, 0, 0, {0, 0})
false
"""
@doc since: "1.5.0"
@impl true
@spec valid_time?(Calendar.hour(), Calendar.minute(), Calendar.second(), Calendar.microsecond()) ::
boolean
def valid_time?(hour, minute, second, {microsecond, precision}) do
hour in 0..23 and minute in 0..59 and second in 0..60 and microsecond in 0..999_999 and
precision in 0..6
end
@doc """
See `c:Calendar.day_rollover_relative_to_midnight_utc/0` for documentation.
"""
@doc since: "1.5.0"
@impl true
@spec day_rollover_relative_to_midnight_utc() :: {0, 1}
def day_rollover_relative_to_midnight_utc() do
{0, 1}
end
defp offset_to_string(utc, std, zone, format \\ :extended)
defp offset_to_string(0, 0, "Etc/UTC", _format), do: "Z"
defp offset_to_string(utc, std, _zone, format) do
total = utc + std
second = abs(total)
minute = second |> rem(3600) |> div(60)
hour = div(second, 3600)
format_offset(total, hour, minute, format)
end
defp format_offset(total, hour, minute, :extended) do
sign(total) <> zero_pad(hour, 2) <> ":" <> zero_pad(minute, 2)
end
defp format_offset(total, hour, minute, :basic) do
sign(total) <> zero_pad(hour, 2) <> zero_pad(minute, 2)
end
defp zone_to_string(0, 0, _abbr, "Etc/UTC"), do: ""
defp zone_to_string(_, _, abbr, zone), do: " " <> abbr <> " " <> zone
defp sign(total) when total < 0, do: "-"
defp sign(_), do: "+"
defp zero_pad(val, count) when val >= 0 do
num = Integer.to_string(val)
:binary.copy("0", max(count - byte_size(num), 0)) <> num
end
defp zero_pad(val, count) do
"-" <> zero_pad(-val, count)
end
## Helpers
@doc false
def from_unix(integer, unit) when is_integer(integer) do
total = System.convert_time_unit(integer, unit, :microsecond)
if total in @unix_range_microseconds do
microseconds = Integer.mod(total, @microseconds_per_second)
seconds = @unix_epoch + Integer.floor_div(total, @microseconds_per_second)
precision = precision_for_unit(unit)
{date, time} = iso_seconds_to_datetime(seconds)
{:ok, date, time, {microseconds, precision}}
else
{:error, :invalid_unix_time}
end
end
defp precision_for_unit(unit) do
subsecond = div(System.convert_time_unit(1, :second, unit), 10)
precision_for_unit(subsecond, 0)
end
defp precision_for_unit(0, precision), do: precision
defp precision_for_unit(_, 6), do: 6
defp precision_for_unit(number, precision),
do: precision_for_unit(div(number, 10), precision + 1)
@doc false
def date_to_iso8601(year, month, day, format \\ :extended) do
date_to_string(year, month, day, format)
end
@doc false
def time_to_iso8601(hour, minute, second, microsecond, format \\ :extended) do
time_to_string(hour, minute, second, microsecond, format)
end
@doc false
def naive_datetime_to_iso8601(
year,
month,
day,
hour,
minute,
second,
microsecond,
format \\ :extended
) do
date_to_string(year, month, day, format) <>
"T" <> time_to_string(hour, minute, second, microsecond, format)
end
@doc false
def datetime_to_iso8601(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
_zone_abbr,
utc_offset,
std_offset,
format \\ :extended
) do
date_to_string(year, month, day, format) <>
"T" <>
time_to_string(hour, minute, second, microsecond, format) <>
offset_to_string(utc_offset, std_offset, time_zone, format)
end
@doc false
def parse_microsecond("." <> rest) do
case parse_microsecond(rest, 0, "") do
{"", 0, _} ->
:error
{microsecond, precision, rest} when precision in 1..6 ->
pad = String.duplicate("0", 6 - byte_size(microsecond))
{{String.to_integer(microsecond <> pad), precision}, rest}
{microsecond, _precision, rest} ->
{{String.to_integer(binary_part(microsecond, 0, 6)), 6}, rest}
end
end
def parse_microsecond("," <> rest) do
parse_microsecond("." <> rest)
end
def parse_microsecond(rest) do
{{0, 0}, rest}
end
defp parse_microsecond(<<head, tail::binary>>, precision, acc) when head in ?0..?9,
do: parse_microsecond(tail, precision + 1, <<acc::binary, head>>)
defp parse_microsecond(rest, precision, acc), do: {acc, precision, rest}
@doc false
def parse_offset(""), do: {nil, ""}
def parse_offset("Z"), do: {0, ""}
def parse_offset("-00:00"), do: :error
def parse_offset(<<?+, hour::2-bytes, ?:, min::2-bytes, rest::binary>>),
do: parse_offset(1, hour, min, rest)
def parse_offset(<<?-, hour::2-bytes, ?:, min::2-bytes, rest::binary>>),
do: parse_offset(-1, hour, min, rest)
def parse_offset(<<?+, hour::2-bytes, min::2-bytes, rest::binary>>),
do: parse_offset(1, hour, min, rest)
def parse_offset(<<?-, hour::2-bytes, min::2-bytes, rest::binary>>),
do: parse_offset(-1, hour, min, rest)
def parse_offset(<<?+, hour::2-bytes, rest::binary>>), do: parse_offset(1, hour, "00", rest)
def parse_offset(<<?-, hour::2-bytes, rest::binary>>), do: parse_offset(-1, hour, "00", rest)
def parse_offset(_), do: :error
defp parse_offset(sign, hour, min, rest) do
with {hour, ""} when hour < 24 <- Integer.parse(hour),
{min, ""} when min < 60 <- Integer.parse(min) do
{(hour * 60 + min) * 60 * sign, rest}
else
_ -> :error
end
end
@doc false
def iso_days_to_unit({days, {parts, ppd}}, unit) do
day_microseconds = days * @parts_per_day
microseconds = divide_by_parts_per_day(parts, ppd)
System.convert_time_unit(day_microseconds + microseconds, :microsecond, unit)
end
@doc false
def add_day_fraction_to_iso_days({days, {parts, ppd}}, add, ppd) do
normalize_iso_days(days, parts + add, ppd)
end
def add_day_fraction_to_iso_days({days, {parts, ppd}}, add, add_ppd) do
parts = parts * add_ppd
add = add * ppd
gcd = Integer.gcd(ppd, add_ppd)
result_parts = div(parts + add, gcd)
result_ppd = div(ppd * add_ppd, gcd)
normalize_iso_days(days, result_parts, result_ppd)
end
defp normalize_iso_days(days, parts, ppd) do
days_offset = div(parts, ppd)
parts = rem(parts, ppd)
if parts < 0 do
{days + days_offset - 1, {parts + ppd, ppd}}
else
{days + days_offset, {parts, ppd}}
end
end
# Note that this function does not add the extra leap day for a leap year.
# If you want to add that leap day when appropriate,
# add the result of leap_day_offset/2 to the result of days_before_month/1.
defp days_before_month(1), do: 0
defp days_before_month(2), do: 31
defp days_before_month(3), do: 59
defp days_before_month(4), do: 90
defp days_before_month(5), do: 120
defp days_before_month(6), do: 151
defp days_before_month(7), do: 181
defp days_before_month(8), do: 212
defp days_before_month(9), do: 243
defp days_before_month(10), do: 273
defp days_before_month(11), do: 304
defp days_before_month(12), do: 334
defp leap_day_offset(_year, month) when month < 3, do: 0
defp leap_day_offset(year, _month) do
if leap_year?(year), do: 1, else: 0
end
defp days_to_year(days) when days < 0 do
year_estimate = -div(-days, @days_per_nonleap_year) - 1
{year, days_before_year} =
days_to_year(year_estimate, days, days_to_end_of_epoch(year_estimate))
leap_year_pad = if leap_year?(year), do: 1, else: 0
{year, leap_year_pad + @days_per_nonleap_year + days - days_before_year}
end
defp days_to_year(days) do
year_estimate = div(days, @days_per_nonleap_year)
{year, days_before_year} =
days_to_year(year_estimate, days, days_in_previous_years(year_estimate))
{year, days - days_before_year}
end
defp days_to_year(year, days1, days2) when year < 0 and days1 >= days2 do
days_to_year(year + 1, days1, days_to_end_of_epoch(year + 1))
end
defp days_to_year(year, days1, days2) when year >= 0 and days1 < days2 do
days_to_year(year - 1, days1, days_in_previous_years(year - 1))
end
defp days_to_year(year, _days1, days2) do
{year, days2}
end
defp days_to_end_of_epoch(year) when year < 0 do
previous_year = year + 1
div(previous_year, 4) - div(previous_year, 100) + div(previous_year, 400) +
previous_year * @days_per_nonleap_year
end
defp days_in_previous_years(0), do: 0
defp days_in_previous_years(year) do
previous_year = year - 1
Integer.floor_div(previous_year, 4) - Integer.floor_div(previous_year, 100) +
Integer.floor_div(previous_year, 400) + previous_year * @days_per_nonleap_year +
@days_per_leap_year
end
# Note that 0 is the first day of the month.
defp year_day_to_year_date(_extra_day, day_of_year) when day_of_year < 31 do
{1, day_of_year}
end
defp year_day_to_year_date(extra_day, day_of_year) when day_of_year < 59 + extra_day do
{2, day_of_year - 31}
end
defp year_day_to_year_date(extra_day, day_of_year) when day_of_year < 90 + extra_day do
{3, day_of_year - (59 + extra_day)}
end
defp year_day_to_year_date(extra_day, day_of_year) when day_of_year < 120 + extra_day do
{4, day_of_year - (90 + extra_day)}
end
defp year_day_to_year_date(extra_day, day_of_year) when day_of_year < 151 + extra_day do
{5, day_of_year - (120 + extra_day)}
end
defp year_day_to_year_date(extra_day, day_of_year) when day_of_year < 181 + extra_day do
{6, day_of_year - (151 + extra_day)}
end
defp year_day_to_year_date(extra_day, day_of_year) when day_of_year < 212 + extra_day do
{7, day_of_year - (181 + extra_day)}
end
defp year_day_to_year_date(extra_day, day_of_year) when day_of_year < 243 + extra_day do
{8, day_of_year - (212 + extra_day)}
end
defp year_day_to_year_date(extra_day, day_of_year) when day_of_year < 273 + extra_day do
{9, day_of_year - (243 + extra_day)}
end
defp year_day_to_year_date(extra_day, day_of_year) when day_of_year < 304 + extra_day do
{10, day_of_year - (273 + extra_day)}
end
defp year_day_to_year_date(extra_day, day_of_year) when day_of_year < 334 + extra_day do
{11, day_of_year - (304 + extra_day)}
end
defp year_day_to_year_date(extra_day, day_of_year) do
{12, day_of_year - (334 + extra_day)}
end
defp iso_seconds_to_datetime(seconds) do
{days, rest_seconds} = div_mod(seconds, @seconds_per_day)
date = date_from_iso_days(days)
time = seconds_to_time(rest_seconds)
{date, time}
end
defp seconds_to_time(seconds) when seconds in 0..@last_second_of_the_day do
{hour, rest_seconds} = div_mod(seconds, @seconds_per_hour)
{minute, second} = div_mod(rest_seconds, @seconds_per_minute)
{hour, minute, second}
end
end
|
lib/elixir/lib/calendar/iso.ex
| 0.918116
| 0.598165
|
iso.ex
|
starcoder
|
defmodule Supervisor do
@moduledoc ~S"""
A behaviour module for implementing supervisors.
A supervisor is a process which supervises other processes, which we
refer to as *child processes*. Supervisors are used to build a hierarchical
process structure called a *supervision tree*. Supervision trees provide
fault-tolerance and encapsulate how our applications start and shutdown.
A supervisor may be started directly with a list of children via
`start_link/2` or you may define a module-based supervisor that implements
the required callbacks. The sections below use `start_link/2` to start
supervisors in most examples, but it also includes a specific section
on module-based ones.
## Examples
In order to start a supervisor, we need to first define a child process
that will be supervised. As an example, we will define a GenServer that
represents a stack:
defmodule Stack do
use GenServer
def start_link(state) do
GenServer.start_link(__MODULE__, state, name: __MODULE__)
end
## Callbacks
def init(stack) do
{:ok, stack}
end
def handle_call(:pop, _from, [h | t]) do
{:reply, h, t}
end
def handle_cast({:push, h}, t) do
{:noreply, [h | t]}
end
end
The stack is a small wrapper around lists. It allows us to put
an element on the top of the stack, by prepending to the list,
and to get the top of the stack by pattern matching.
We can now start a supervisor that will start and supervise our
stack process. The first step is to define a list of **child
specifications** that control how each child behaves. Each child
specification is a map, as shown below:
children = [
# The Stack is a child started via Stack.start_link([:hello])
%{
id: Stack,
start: {Stack, :start_link, [[:hello]]}
}
]
# Now we start the supervisor with the children and a strategy
{:ok, pid} = Supervisor.start_link(children, strategy: :one_for_one)
# After started, we can query the supervisor for information
Supervisor.count_children(pid)
#=> %{active: 1, specs: 1, supervisors: 0, workers: 1}
Notice that when starting the GenServer, we are registering it
with name `Stack`, which allows us to call it directly and get
what is on the stack:
GenServer.call(Stack, :pop)
#=> :hello
GenServer.cast(Stack, {:push, :world})
#=> :ok
GenServer.call(Stack, :pop)
#=> :world
However, there is a bug in our stack server. If we call `:pop` and
the stack is empty, it is going to crash because no clause matches:
GenServer.call(Stack, :pop)
** (exit) exited in: GenServer.call(Stack, :pop, 5000)
Luckily, since the server is being supervised by a supervisor, the
supervisor will automatically start a new one, with the initial stack
of `[:hello]`:
GenServer.call(Stack, :pop)
#=> :hello
Supervisors support different strategies; in the example above, we
have chosen `:one_for_one`. Furthermore, each supervisor can have many
workers and supervisors as children, each of them with their specific
configuration, shutdown values, and restart strategies.
The rest of this document will cover how child processes are started,
how they can be specified, different supervision strategies and more.
## Start and shutdown
When the supervisor starts, it traverses all child specifications and
then starts each child in the order they are defined. This is done by
calling the function defined under the `:start` key in the child
specification and typically defaults to `start_link/1`.
The `start_link/1` (or a custom) is then called for each child process.
The `start_link/1` function must return `{:ok, pid}` where `pid` is the
process identifier of a new process that is linked to the supervisor.
The child process usually starts its work by executing the `init/1`
callback. Generally speaking, the `init` callback is where we initialize
and configure the child process.
The shutdown process happens in reverse order.
When a supervisor shuts down, it terminates all children in the opposite
order they are listed. The termination happens by sending a shutdown exit
signal, via `Process.exit(child_pid, :shutdown)`, to the child process and
then awaiting for a time interval for the child process to terminate. This
interval defaults to 5000 milliseconds. If the child process does not
terminate in this interval, the supervisor abruptly terminates the child
with reason `:brutal_kill`. The shutdown time can be configured in the
child specification which is fully detailed in the next section.
If the child process is not trapping exits, it will shutdown immediately
when it receives the first exit signal. If the child process is trapping
exits, then the `terminate` callback is invoked, and the child process
must terminate in a reasonable time interval before being abruptly
terminated by the supervisor.
In other words, if it is important that a process cleans after itself
when your application or the supervision tree is shutting down, then
this process must trap exits and its child specification should specify
the proper `:shutdown` value, ensuring it terminates within a reasonable
interval.
Now that we understand the start and shutdown process, let's take a
complete look at all of the options provided in the child specification.
## Child specification
The child specification describes how the supervisor start, shutdown and
restart child processes.
The child specification contains 5 keys. The first two are required
and the remaining ones are optional:
* `:id` - a value used to identify the child specification
internally by the supervisor; defaults to the given module.
In case of conflicting `:id`, the supervisor will refuse
to initialize and require explicit IDs. This key is required.
* `:start` - a tuple with the module-function-args to be invoked
to start the child process. This key is required.
* `:restart` - an atom that defines when a terminated child process
should be restarted (see the "Restart values" section below).
This key is optional and defaults to `:permanent`.
* `:shutdown` - an atom that defines how a child process should be
terminated (see the "Shutdown values" section below). This key
is optional and defaults to `5000` if the type is `:worker` or
`:infinity` if the type is `:supervisor`.
* `:type` - if the child process is a `:worker` or a `:supervisor`.
This key is optional and defaults to `:worker`.
There is a sixth key, called `:modules`, which is rarely changed and
it is set automatically based on the value in `:start`.
Let's understand what the `:shutdown` and `:restart` options control.
### Shutdown values (:shutdown)
The following shutdown values are supported in the `:shutdown` option:
* `:brutal_kill` - the child process is unconditionally and immediately
terminated using `Process.exit(child, :kill)`.
* any integer >= 0 - the amount of time in milliseconds that the
supervisor will wait for children to terminate after emitting a
`Process.exit(child, :shutdown)` signal. If the child process is
not trapping exits, the initial `:shutdown` signal will terminate
the child process immediately. If the child process is trapping
exits, it has the given amount of time in milliseconds to terminate.
If it doesn't terminate within the specified time, the child process
is unconditionally terminated by the supervisor via
`Process.exit(child, :kill)`.
* `:infinity` - works as an integer except the supervisor will wait
indefinitely for the child to terminate. If the child process is a
supervisor, the recommended value is `:infinity` to give the supervisor
and its children enough time to shutdown. This option can be used with
regular workers but doing so is discouraged and requires extreme care.
If not used carefully and the child process does not terminate, it means
your application will never terminate as well.
### Restart values (:restart)
The `:restart` option controls what the supervisor should consider to
be a successful termination or not. If the termination is successful,
the supervisor won't restart the child. If the child process crashed,
the supervisor will start a new one.
The following restart values are supported in the `:restart` option:
* `:permanent` - the child process is always restarted.
* `:temporary` - the child process is never restarted, regardless
of the supervision strategy.
* `:transient` - the child process is restarted only if it
terminates abnormally, i.e., with an exit reason other than
`:normal`, `:shutdown` or `{:shutdown, term}`.
For a more complete understanding of the exit reasons and their
impact, see the "Exit reasons and restarts" section.
## child_spec/1
When starting a supervisor, we pass a list of child specifications. Those
specifications are maps that tell how the supervisor should start, stop and
restart each of its children:
%{
id: Stack,
start: {Stack, :start_link, [[:hello]]}
}
The map above defines a supervisor with `:id` of `Stack` that is started
by calling `Stack.start_link([:hello])`.
However, specifying the child specification for each child as a map can be
quite error prone, as we may change the Stack implementation and forget to
update its specification. That's why Elixir allows you to pass a tuple with
the module name and the `start_link` argument instead of the specification:
children = [
{Stack, [:hello]}
]
The supervisor will then invoke `Stack.child_spec([:hello])` to retrieve a
child specification. Now the `Stack` module is responsible for building its
own specification. By default, `use GenServer` defines a `Stack.child_spec/1`
function which returns the same child specification we had before:
%{
id: Stack,
start: {Stack, :start_link, [[:hello]]}
}
It is also possible to simply pass the `Stack` module as a child:
children = [
Stack
]
When only the module name is given, it is equivalent to `{Stack, []}`. In this
case, we will end-up with a child specification that looks like this:
%{
id: Stack,
start: {Stack, :start_link, [[]]}
}
By replacing the map specification by `{Stack, [:hello]}` or `Stack`, we keep
the child specification encapsulated in the Stack module, using the default
implementation defined by `use GenServer`. We can now share our `Stack` worker
with other developers and they can add it directly to their supervision tree
without worrying about the low-level details of the worker.
If you need to access or modify how a worker or a supervisor runs, you can use
the `Supervisor.child_spec/2` function. For example, to run the stack with a
different `:id` and a `:shutdown` value of 10 seconds (10_000 milliseconds):
children = [
Supervisor.child_spec({Stack, [:hello]}, id: MyStack, shutdown: 10_000)
]
The call to `Supervisor.child_spec/2` above will return the following specification:
%{
id: MyStack,
start: {Stack, :start_link, [[:hello]]},
shutdown: 10_000
}
You may also configure the child specification in the Stack module itself to
use a different `:id` or `:shutdown` value by passing options to `use GenServer`:
defmodule Stack do
use GenServer, id: MyStack, shutdown: 10_000
The options above will customize the `Stack.child_spec/1` function defined
by `use GenServer`. It accepts the same options as the `Supervisor.child_spec/2`
function.
You may also completely override the `child_spec/1` function in the Stack module
and return your own child specification. Note there is no guarantee the `child_spec/1`
function will be called by the Supervisor process, as other processes may invoke
it to retrieve the child specification before reaching the supervisor.
## Exit reasons and restarts
A supervisor restarts a child process depending on its `:restart`
configuration. For example, when `:restart` is set to `:transient`, the
supervisor does not restart the child in case it exits with reason `:normal`,
`:shutdown` or `{:shutdown, term}`.
So one may ask: which exit reason should I choose when exiting? There are
three options:
* `:normal` - in such cases, the exit won't be logged, there is no restart
in transient mode, and linked processes do not exit
* `:shutdown` or `{:shutdown, term}` - in such cases, the exit won't be
logged, there is no restart in transient mode, and linked processes exit
with the same reason unless they're trapping exits
* any other term - in such cases, the exit will be logged, there are
restarts in transient mode, and linked processes exit with the same
reason unless they're trapping exits
Notice that supervisor that reached maximum restart intensity will exit with
`:shutdown` reason. In this case the supervisor will only be restarted if its
child specification was defined with the `:restart` option set to `:permanent`
(the default).
## Module-based supervisors
In the example above, a supervisor was started by passing the supervision
structure to `start_link/2`. However, supervisors can also be created by
explicitly defining a supervision module:
defmodule MyApp.Supervisor do
# Automatically defines child_spec/1
use Supervisor
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
def init(_arg) do
children = [
{Stack, [:hello]}
]
Supervisor.init(children, strategy: :one_for_one)
end
end
The difference between the two approaches is that a module-based
supervisor gives you more direct control over how the supervisor
is initialized. Instead of calling `Supervisor.start_link/2` with
a list of children that are automatically initialized, we have
defined a supervisor alongside its `c:init/1` callback and manually
initialized the children by calling `Supervisor.init/2`, passing
the same arguments we would have given to `start_link/2`.
You may want to use a module-based supervisor if:
* You need to perform some particular action on supervisor
initialization, like setting up an ETS table.
* You want to perform partial hot-code swapping of the
tree. The module-based approach allow you to add and remove
children on a case-by-case basis.
Note `use Supervisor` defines a `child_spec/1` function, allowing
the defined module itself to be put under a supervision tree.
The generated `child_spec/1` can be customized with the following
options:
* `:id` - the child specification id, defaults to the current module
* `:start` - how to start the child process (defaults to calling `__MODULE__.start_link/1`)
* `:restart` - when the supervisor should be restarted, defaults to `:permanent`
## `start_link/2`, `init/2`, and strategies
So far we have started the supervisor passing a single child as a tuple
as well as a strategy called `:one_for_one`:
Supervisor.start_link([
{Stack, [:hello]}
], strategy: :one_for_one)
or from inside the `c:init/1` callback:
Supervisor.init([
{Stack, [:hello]}
], strategy: :one_for_one)
Although we have mentioned that the supervisor automatically expands
`{Stack, [:hello]}` to a child specification by calling
`Stack.child_spec([:hello])`, we haven't formally defined all of the
arguments accepted by `start_link/2` and `init/2`. Let's rectify that
now.
The first argument given to `start_link/2` is a list of children which may
be either:
* a map representing the child specification itself - as outlined in the
"Child specification" section
* a tuple with a module as first element and the start argument as second -
such as `{Stack, [:hello]}`. In this case, `Stack.child_spec([:hello])`
is called to retrieve the child specification
* a module - such as `Stack`. In this case, `Stack.child_spec([])`
is called to retrieve the child specification
The second argument is a keyword list of options:
* `:strategy` - the restart strategy option. It can be either
`:one_for_one`, `:rest_for_one` or `:one_for_all`. See the
"Strategies" section.
* `:max_restarts` - the maximum number of restarts allowed in
a time frame. Defaults to `3`.
* `:max_seconds` - the time frame in which `:max_restarts` applies.
Defaults to `5`.
The `:strategy` option is required and by default a maximum of 3 restarts
is allowed within 5 seconds.
### Strategies
Supervisors support different supervision strategies (through the
`:strategy` option, as seen above):
* `:one_for_one` - if a child process terminates, only that
process is restarted.
* `:one_for_all` - if a child process terminates, all other child
processes are terminated and then all child processes (including
the terminated one) are restarted.
* `:rest_for_one` - if a child process terminates, the "rest" of
the child processes, i.e., the child processes after the terminated
one in start order, are terminated. Then the terminated child
process and the rest of the child processes are restarted.
There is also a deprecated strategy called `:simple_one_for_one` which
has been replaced by the `DynamicSupervisor`. The `:simple_one_for_one`
supervisor was similar to `:one_for_one` but suits better when dynamically
attaching children. Many functions in this module behaved slightly
differently when this strategy is used. See the `DynamicSupervisor` module
for more information and migration strategies.
## Name registration
A supervisor is bound to the same name registration rules as a `GenServer`.
Read more about these rules in the documentation for `GenServer`.
"""
@doc false
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
import Supervisor.Spec
@behaviour Supervisor
@doc """
Returns a specification to start this module under a supervisor.
See `Supervisor`.
"""
def child_spec(arg) do
default = %{
id: __MODULE__,
start: {__MODULE__, :start_link, [arg]},
type: :supervisor
}
Supervisor.child_spec(default, unquote(Macro.escape(opts)))
end
defoverridable child_spec: 1
@doc false
def init(arg)
end
end
@doc """
Callback invoked to start the supervisor and during hot code upgrades.
Developers typically invoke `Supervisor.init/2` at the end of their
init callback to return the proper supervision flags.
"""
@callback init(args :: term) ::
{:ok, {:supervisor.sup_flags(), [:supervisor.child_spec()]}}
| :ignore
@typedoc "Return values of `start_link` functions"
@type on_start ::
{:ok, pid}
| :ignore
| {:error, {:already_started, pid} | {:shutdown, term} | term}
@typedoc "Return values of `start_child` functions"
@type on_start_child ::
{:ok, child}
| {:ok, child, info :: term}
| {:error, {:already_started, child} | :already_present | term}
@type child :: pid | :undefined
@typedoc "The Supervisor name"
@type name :: atom | {:global, term} | {:via, module, term}
@typedoc "Option values used by the `start*` functions"
@type option :: {:name, name} | init_option()
@typedoc "Options used by the `start*` functions"
@type options :: [option, ...]
@typedoc "The supervisor reference"
@type supervisor :: pid | name | {atom, node}
@typedoc "Options given to `start_link/2` and `init/2`"
@type init_option ::
{:strategy, strategy}
| {:max_restarts, non_neg_integer}
| {:max_seconds, pos_integer}
@typedoc "Supported strategies"
@type strategy :: :one_for_one | :one_for_all | :rest_for_one
# Note we have inlined all types for readability
@typedoc "The supervisor specification"
@type child_spec :: %{
required(:id) => term(),
required(:start) => {module(), atom(), [term()]},
optional(:restart) => :permanent | :transient | :temporary,
optional(:shutdown) => :brutal_kill | non_neg_integer() | :infinity,
optional(:type) => :worker | :supervisor,
optional(:modules) => [module()] | :dynamic
}
@doc """
Starts a supervisor with the given children.
The children is a list of modules, 2-element tuples with module and
arguments or a map with the child specification. A strategy is required
to be provided through the `:strategy` option. See
"start_link/2, init/2 and strategies" for examples and other options.
The options can also be used to register a supervisor name.
The supported values are described under the "Name registration"
section in the `GenServer` module docs.
If the supervisor and its child processes are successfully spawned
(if the start function of each child process returns `{:ok, child}`,
`{:ok, child, info}`, or `:ignore`) this function returns
`{:ok, pid}`, where `pid` is the PID of the supervisor. If the supervisor
is given a name and a process with the specified name already exists,
the function returns `{:error, {:already_started, pid}}`, where `pid`
is the PID of that process.
If the start function of any of the child processes fails or returns an error
tuple or an erroneous value, the supervisor first terminates with reason
`:shutdown` all the child processes that have already been started, and then
terminates itself and returns `{:error, {:shutdown, reason}}`.
Note that a supervisor started with this function is linked to the parent
process and exits not only on crashes but also if the parent process exits
with `:normal` reason.
"""
@spec start_link([:supervisor.child_spec() | {module, term} | module], options) :: on_start
def start_link(children, options) when is_list(children) do
{sup_opts, start_opts} = Keyword.split(options, [:strategy, :max_seconds, :max_restarts])
start_link(Supervisor.Default, init(children, sup_opts), start_opts)
end
@doc """
Receives a list of children to initialize and a set of options.
This is typically invoked at the end of the `c:init/1` callback of
module-based supervisors. See the sections "Module-based supervisors"
and "start_link/2, init/2 and strategies" in the module
documentation for more information.
This function returns a tuple containing the supervisor
flags and child specifications.
## Examples
def init(_arg) do
Supervisor.init([
{Stack, [:hello]}
], strategy: :one_for_one)
end
## Options
* `:strategy` - the restart strategy option. It can be either
`:one_for_one`, `:rest_for_one`, `:one_for_all`, or the deprecated
`:simple_one_for_one`.
* `:max_restarts` - the maximum number of restarts allowed in
a time frame. Defaults to `3`.
* `:max_seconds` - the time frame in which `:max_restarts` applies.
Defaults to `5`.
The `:strategy` option is required and by default a maximum of 3 restarts
is allowed within 5 seconds. Check the `Supervisor` module for a detailed
description of the available strategies.
"""
# TODO: Warn if simple_one_for_one strategy is used on Elixir v1.8.
@since "1.5.0"
@spec init([:supervisor.child_spec() | {module, term} | module], [init_option]) :: {:ok, tuple}
def init(children, options) when is_list(children) and is_list(options) do
unless strategy = options[:strategy] do
raise ArgumentError, "expected :strategy option to be given"
end
intensity = Keyword.get(options, :max_restarts, 3)
period = Keyword.get(options, :max_seconds, 5)
flags = %{strategy: strategy, intensity: intensity, period: period}
{:ok, {flags, Enum.map(children, &init_child/1)}}
end
defp init_child(module) when is_atom(module) do
init_child({module, []})
end
defp init_child({module, arg}) when is_atom(module) do
try do
module.child_spec(arg)
rescue
e in UndefinedFunctionError ->
case __STACKTRACE__ do
[{^module, :child_spec, [^arg], _} | _] ->
raise ArgumentError, child_spec_error(module)
stack ->
reraise e, stack
end
end
end
defp init_child(map) when is_map(map) do
map
end
defp init_child({_, _, _, _, _, _} = tuple) do
tuple
end
defp init_child(other) do
raise ArgumentError, """
supervisors expect each child to be one of:
* a module
* a {module, arg} tuple
* a child specification as a map with at least the :id and :start fields
* or a tuple with 6 elements generated by Supervisor.Spec (deprecated)
Got: #{inspect(other)}
"""
end
defp child_spec_error(module) do
if Code.ensure_loaded?(module) do
"""
The module #{inspect(module)} was given as a child to a supervisor
but it does not implement child_spec/1.
If you own the given module, please define a child_spec/1 function
that receives an argument and returns a child specification as a map.
For example:
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :worker,
restart: :permanent,
shutdown: 500
}
end
Note that "use Agent", "use GenServer" and so on automatically define
this function for you.
However, if you don't own the given module and it doesn't implement
child_spec/1, instead of passing the module name directly as a supervisor
child, you will have to pass a child specification as a map:
%{
id: #{inspect(module)},
start: {#{inspect(module)}, :start_link, [arg1, arg2]}
}
See the Supervisor documentation for more information.
"""
else
"The module #{inspect(module)} was given as a child to a supervisor but it does not exist."
end
end
@doc """
Builds and overrides a child specification.
Similar to `start_link/2` and `init/2`, it expects a
`module`, `{module, arg}` or a map as the child specification.
If a module is given, the specification is retrieved by calling
`module.child_spec(arg)`.
After the child specification is retrieved, the fields on `config`
are directly applied on the child spec. If `config` has keys that
do not map to any child specification field, an error is raised.
See the "Child specification" section in the module documentation
for all of the available keys for overriding.
## Examples
This function is often used to set an `:id` option when
the same module needs to be started multiple times in the
supervision tree:
Supervisor.child_spec({Agent, fn -> :ok end}, id: {Agent, 1})
#=> %{id: {Agent, 1},
#=> start: {Agent, :start_link, [fn -> :ok end]}}
"""
@spec child_spec(child_spec() | {module, arg :: term} | module, keyword) :: child_spec()
def child_spec(module_or_map, overrides)
def child_spec({_, _, _, _, _, _} = tuple, _overrides) do
raise ArgumentError,
"old tuple-based child specification #{inspect(tuple)} " <>
"is not supported in Supervisor.child_spec/2"
end
def child_spec(module_or_map, overrides) do
Enum.reduce(overrides, init_child(module_or_map), fn
{key, value}, acc when key in [:id, :start, :restart, :shutdown, :type, :modules] ->
Map.put(acc, key, value)
{key, _value}, _acc ->
raise ArgumentError, "unknown key #{inspect(key)} in child specification override"
end)
end
@doc """
Starts a module-based supervisor process with the given `module` and `arg`.
To start the supervisor, the `c:init/1` callback will be invoked in the given
`module`, with `arg` as its argument. The `c:init/1` callback must return a
supervisor specification which can be created with the help of the `init/2`
function.
If the `c:init/1` callback returns `:ignore`, this function returns
`:ignore` as well and the supervisor terminates with reason `:normal`.
If it fails or returns an incorrect value, this function returns
`{:error, term}` where `term` is a term with information about the
error, and the supervisor terminates with reason `term`.
The `:name` option can also be given in order to register a supervisor
name, the supported values are described in the "Name registration"
section in the `GenServer` module docs.
"""
# It is important to keep the 2-arity spec because it is a catch
# all to start_link(children, options).
@spec start_link(module, term) :: on_start
@spec start_link(module, term, GenServer.options()) :: on_start
def start_link(module, arg, options \\ []) when is_list(options) do
case Keyword.get(options, :name) do
nil ->
:supervisor.start_link(module, arg)
atom when is_atom(atom) ->
:supervisor.start_link({:local, atom}, module, arg)
{:global, _term} = tuple ->
:supervisor.start_link(tuple, module, arg)
{:via, via_module, _term} = tuple when is_atom(via_module) ->
:supervisor.start_link(tuple, module, arg)
other ->
raise ArgumentError, """
expected :name option to be one of:
* nil
* atom
* {:global, term}
* {:via, module, term}
Got: #{inspect(other)}
"""
end
end
@doc """
Adds a child specification to `supervisor` and starts that child.
`child_spec` should be a valid child specification. The child process will
be started as defined in the child specification.
If a child specification with the specified id already exists, `child_spec` is
discarded and this function returns an error with `:already_started` or
`:already_present` if the corresponding child process is running or not,
respectively.
If the child process start function returns `{:ok, child}` or `{:ok, child,
info}`, then child specification and PID are added to the supervisor and
this function returns the same value.
If the child process start function returns `:ignore`, the child specification
is added to the supervisor, the PID is set to `:undefined` and this function
returns `{:ok, :undefined}`.
If the child process start function returns an error tuple or an erroneous
value, or if it fails, the child specification is discarded and this function
returns `{:error, error}` where `error` is a term containing information about
the error and child specification.
"""
@spec start_child(supervisor, :supervisor.child_spec() | {module, term} | module | [term]) ::
on_start_child
def start_child(supervisor, {_, _, _, _, _, _} = child_spec) do
call(supervisor, {:start_child, child_spec})
end
# TODO: Deprecate this on Elixir v1.8. Remove and update typespec on v2.0.
def start_child(supervisor, args) when is_list(args) do
call(supervisor, {:start_child, args})
end
def start_child(supervisor, child_spec) do
call(supervisor, {:start_child, Supervisor.child_spec(child_spec, [])})
end
@doc """
Terminates the given child identified by child id.
The process is terminated, if there's one. The child specification is
kept unless the child is temporary.
A non-temporary child process may later be restarted by the supervisor.
The child process can also be restarted explicitly by calling `restart_child/2`.
Use `delete_child/2` to remove the child specification.
If successful, this function returns `:ok`. If there is no child
specification for the given child id, this function returns
`{:error, :not_found}`.
"""
@spec terminate_child(supervisor, term()) :: :ok | {:error, error}
when error: :not_found | :simple_one_for_one
# TODO: Deprecate this on Elixir v1.8
def terminate_child(supervisor, child_id)
def terminate_child(supervisor, pid) when is_pid(pid) do
call(supervisor, {:terminate_child, pid})
end
def terminate_child(supervisor, child_id) do
call(supervisor, {:terminate_child, child_id})
end
@doc """
Deletes the child specification identified by `child_id`.
The corresponding child process must not be running; use `terminate_child/2`
to terminate it if it's running.
If successful, this function returns `:ok`. This function may return an error
with an appropriate error tuple if the `child_id` is not found, or if the
current process is running or being restarted.
"""
@spec delete_child(supervisor, term()) :: :ok | {:error, error}
when error: :not_found | :simple_one_for_one | :running | :restarting
def delete_child(supervisor, child_id) do
call(supervisor, {:delete_child, child_id})
end
@doc """
Restarts a child process identified by `child_id`.
The child specification must exist and the corresponding child process must not
be running.
Note that for temporary children, the child specification is automatically deleted
when the child terminates, and thus it is not possible to restart such children.
If the child process start function returns `{:ok, child}` or `{:ok, child, info}`,
the PID is added to the supervisor and this function returns the same value.
If the child process start function returns `:ignore`, the PID remains set to
`:undefined` and this function returns `{:ok, :undefined}`.
This function may return an error with an appropriate error tuple if the
`child_id` is not found, or if the current process is running or being
restarted.
If the child process start function returns an error tuple or an erroneous value,
or if it fails, this function returns `{:error, error}`.
"""
@spec restart_child(supervisor, term()) :: {:ok, child} | {:ok, child, term} | {:error, error}
when error: :not_found | :simple_one_for_one | :running | :restarting | term
def restart_child(supervisor, child_id) do
call(supervisor, {:restart_child, child_id})
end
@doc """
Returns a list with information about all children of the given supervisor.
Note that calling this function when supervising a large number of children
under low memory conditions can cause an out of memory exception.
This function returns a list of `{id, child, type, modules}` tuples, where:
* `id` - as defined in the child specification
* `child` - the PID of the corresponding child process, `:restarting` if the
process is about to be restarted, or `:undefined` if there is no such
process
* `type` - `:worker` or `:supervisor`, as specified by the child specification
* `modules` - as specified by the child specification
"""
@spec which_children(supervisor) :: [
{term() | :undefined, child | :restarting, :worker | :supervisor, :supervisor.modules()}
]
def which_children(supervisor) do
call(supervisor, :which_children)
end
@doc """
Returns a map containing count values for the given supervisor.
The map contains the following keys:
* `:specs` - the total count of children, dead or alive
* `:active` - the count of all actively running child processes managed by
this supervisor
* `:supervisors` - the count of all supervisors whether or not these
child supervisors are still alive
* `:workers` - the count of all workers, whether or not these child workers
are still alive
"""
@spec count_children(supervisor) :: %{
specs: non_neg_integer,
active: non_neg_integer,
supervisors: non_neg_integer,
workers: non_neg_integer
}
def count_children(supervisor) do
call(supervisor, :count_children) |> :maps.from_list()
end
@doc """
Synchronously stops the given supervisor with the given `reason`.
It returns `:ok` if the supervisor terminates with the given
reason. If it terminates with another reason, the call exits.
This function keeps OTP semantics regarding error reporting.
If the reason is any other than `:normal`, `:shutdown` or
`{:shutdown, _}`, an error report is logged.
"""
@spec stop(supervisor, reason :: term, timeout) :: :ok
def stop(supervisor, reason \\ :normal, timeout \\ :infinity) do
GenServer.stop(supervisor, reason, timeout)
end
@compile {:inline, call: 2}
defp call(supervisor, req) do
GenServer.call(supervisor, req, :infinity)
end
end
|
lib/elixir/lib/supervisor.ex
| 0.848502
| 0.68084
|
supervisor.ex
|
starcoder
|
defmodule AWS.Datapipeline do
@moduledoc """
AWS Data Pipeline configures and manages a data-driven workflow called a
pipeline.
AWS Data Pipeline handles the details of scheduling and ensuring that data
dependencies are met so that your application can focus on processing the data.
AWS Data Pipeline provides a JAR implementation of a task runner called AWS Data
Pipeline Task Runner. AWS Data Pipeline Task Runner provides logic for common
data management scenarios, such as performing database queries and running data
analysis using Amazon Elastic MapReduce (Amazon EMR). You can use AWS Data
Pipeline Task Runner as your task runner, or you can write your own task runner
to provide custom data management.
AWS Data Pipeline implements two main sets of functionality. Use the first set
to create a pipeline and define data sources, schedules, dependencies, and the
transforms to be performed on the data. Use the second set in your task runner
application to receive the next task ready for processing. The logic for
performing the task, such as querying the data, running data analysis, or
converting the data from one format to another, is contained within the task
runner. The task runner performs the task assigned to it by the web service,
reporting progress to the web service as it does so. When the task is done, the
task runner reports the final success or failure of the task to the web service.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2012-10-29",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "datapipeline",
global?: false,
protocol: "json",
service_id: nil,
signature_version: "v4",
signing_name: "datapipeline",
target_prefix: "DataPipeline"
}
end
@doc """
Validates the specified pipeline and starts processing pipeline tasks.
If the pipeline does not pass validation, activation fails.
If you need to pause the pipeline to investigate an issue with a component, such
as a data source or script, call `DeactivatePipeline`.
To activate a finished pipeline, modify the end date for the pipeline and then
activate it.
"""
def activate_pipeline(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ActivatePipeline", input, options)
end
@doc """
Adds or modifies tags for the specified pipeline.
"""
def add_tags(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddTags", input, options)
end
@doc """
Creates a new, empty pipeline.
Use `PutPipelineDefinition` to populate the pipeline.
"""
def create_pipeline(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreatePipeline", input, options)
end
@doc """
Deactivates the specified running pipeline.
The pipeline is set to the `DEACTIVATING` state until the deactivation process
completes.
To resume a deactivated pipeline, use `ActivatePipeline`. By default, the
pipeline resumes from the last completed execution. Optionally, you can specify
the date and time to resume the pipeline.
"""
def deactivate_pipeline(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeactivatePipeline", input, options)
end
@doc """
Deletes a pipeline, its pipeline definition, and its run history.
AWS Data Pipeline attempts to cancel instances associated with the pipeline that
are currently being processed by task runners.
Deleting a pipeline cannot be undone. You cannot query or restore a deleted
pipeline. To temporarily pause a pipeline instead of deleting it, call
`SetStatus` with the status set to `PAUSE` on individual components. Components
that are paused by `SetStatus` can be resumed.
"""
def delete_pipeline(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeletePipeline", input, options)
end
@doc """
Gets the object definitions for a set of objects associated with the pipeline.
Object definitions are composed of a set of fields that define the properties of
the object.
"""
def describe_objects(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeObjects", input, options)
end
@doc """
Retrieves metadata about one or more pipelines.
The information retrieved includes the name of the pipeline, the pipeline
identifier, its current state, and the user account that owns the pipeline.
Using account credentials, you can retrieve metadata about pipelines that you or
your IAM users have created. If you are using an IAM user account, you can
retrieve metadata about only those pipelines for which you have read
permissions.
To retrieve the full pipeline definition instead of metadata about the pipeline,
call `GetPipelineDefinition`.
"""
def describe_pipelines(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribePipelines", input, options)
end
@doc """
Task runners call `EvaluateExpression` to evaluate a string in the context of
the specified object.
For example, a task runner can evaluate SQL queries stored in Amazon S3.
"""
def evaluate_expression(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EvaluateExpression", input, options)
end
@doc """
Gets the definition of the specified pipeline.
You can call `GetPipelineDefinition` to retrieve the pipeline definition that
you provided using `PutPipelineDefinition`.
"""
def get_pipeline_definition(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetPipelineDefinition", input, options)
end
@doc """
Lists the pipeline identifiers for all active pipelines that you have permission
to access.
"""
def list_pipelines(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListPipelines", input, options)
end
@doc """
Task runners call `PollForTask` to receive a task to perform from AWS Data
Pipeline.
The task runner specifies which tasks it can perform by setting a value for the
`workerGroup` parameter. The task returned can come from any of the pipelines
that match the `workerGroup` value passed in by the task runner and that was
launched using the IAM user credentials specified by the task runner.
If tasks are ready in the work queue, `PollForTask` returns a response
immediately. If no tasks are available in the queue, `PollForTask` uses
long-polling and holds on to a poll connection for up to a 90 seconds, during
which time the first newly scheduled task is handed to the task runner. To
accomodate this, set the socket timeout in your task runner to 90 seconds. The
task runner should not call `PollForTask` again on the same `workerGroup` until
it receives a response, and this can take up to 90 seconds.
"""
def poll_for_task(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PollForTask", input, options)
end
@doc """
Adds tasks, schedules, and preconditions to the specified pipeline.
You can use `PutPipelineDefinition` to populate a new pipeline.
`PutPipelineDefinition` also validates the configuration as it adds it to the
pipeline. Changes to the pipeline are saved unless one of the following three
validation errors exists in the pipeline.
1. An object is missing a name or identifier field.
2. A string or reference field is empty.
3. The number of objects in the pipeline exceeds the maximum allowed
objects.
4. The pipeline is in a FINISHED state.
Pipeline object definitions are passed to the `PutPipelineDefinition` action and
returned by the `GetPipelineDefinition` action.
"""
def put_pipeline_definition(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutPipelineDefinition", input, options)
end
@doc """
Queries the specified pipeline for the names of objects that match the specified
set of conditions.
"""
def query_objects(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "QueryObjects", input, options)
end
@doc """
Removes existing tags from the specified pipeline.
"""
def remove_tags(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RemoveTags", input, options)
end
@doc """
Task runners call `ReportTaskProgress` when assigned a task to acknowledge that
it has the task.
If the web service does not receive this acknowledgement within 2 minutes, it
assigns the task in a subsequent `PollForTask` call. After this initial
acknowledgement, the task runner only needs to report progress every 15 minutes
to maintain its ownership of the task. You can change this reporting time from
15 minutes by specifying a `reportProgressTimeout` field in your pipeline.
If a task runner does not report its status after 5 minutes, AWS Data Pipeline
assumes that the task runner is unable to process the task and reassigns the
task in a subsequent response to `PollForTask`. Task runners should call
`ReportTaskProgress` every 60 seconds.
"""
def report_task_progress(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ReportTaskProgress", input, options)
end
@doc """
Task runners call `ReportTaskRunnerHeartbeat` every 15 minutes to indicate that
they are operational.
If the AWS Data Pipeline Task Runner is launched on a resource managed by AWS
Data Pipeline, the web service can use this call to detect when the task runner
application has failed and restart a new instance.
"""
def report_task_runner_heartbeat(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ReportTaskRunnerHeartbeat", input, options)
end
@doc """
Requests that the status of the specified physical or logical pipeline objects
be updated in the specified pipeline.
This update might not occur immediately, but is eventually consistent. The
status that can be set depends on the type of object (for example, DataNode or
Activity). You cannot perform this operation on `FINISHED` pipelines and
attempting to do so returns `InvalidRequestException`.
"""
def set_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetStatus", input, options)
end
@doc """
Task runners call `SetTaskStatus` to notify AWS Data Pipeline that a task is
completed and provide information about the final status.
A task runner makes this call regardless of whether the task was sucessful. A
task runner does not need to call `SetTaskStatus` for tasks that are canceled by
the web service during a call to `ReportTaskProgress`.
"""
def set_task_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetTaskStatus", input, options)
end
@doc """
Validates the specified pipeline definition to ensure that it is well formed and
can be run without error.
"""
def validate_pipeline_definition(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ValidatePipelineDefinition", input, options)
end
end
|
lib/aws/generated/datapipeline.ex
| 0.898037
| 0.740878
|
datapipeline.ex
|
starcoder
|
defmodule BUPE.Config do
@moduledoc ~S"""
Configuration structure that holds all the available options for EPUB.
Most of these fields are used in the Package Definition document, this
document includes bibliographic and structural metadata about an EPUB
Publication, and is thus the primary source of information about how to
process and display it.
## EPUB specification fields
* `title` - Represents an instance of a name given to the EPUB Publication.
* `creator` - Represents the name of a person, organization, etc.
responsible for the creation of the content
* `contributor` - Represents the name of a person, organization, etc. that
played a secondary role in the creation of the content.
* `date` - Define the publication date. The publication date is not the
same as the last modification date. See: [Date and Time Formats][datetime]
* `modified` - The modification date must be expressed in Coordinated
Universal Time (UTC) and must be terminated by the Z time zone indicator.
* `identifier` - Contains a single identifier associated with the EPUB
Publication, such as a UUID, DOI, ISBN or ISSN. Default: UUID
* `language` - Specifies the language used in the contents. Default: `"en"`
* `version` - Specifies the EPUB specification version to which the
Publication conforms. Default: "3.0"
* `unique_identifier` - Specifies a primary identifier that is unique to
one and only one particular EPUB Publication
* `source` - Identifies the source publication from which this EPUB
Publication is derived.
* `type` - Indicates that the given Publication is of a specialized type
(e.g., annotations packaged in EPUB format or a dictionary). See the
[EPUB Publication Types Registry][types] document for more information.
For more information about other fields as `description`, `format`,
`coverage`, `publisher`, `relation`, `rights`, `subject`, etc. please see
the [Package Metadata][meta] section of the EPUB specification.
## Support configuration
* `pages` - List of XHTML files which will be included in the EPUB document,
please keep in mind that the sequence here will set the navigation order in
the EPUB document.
* `styles` - List of CSS files which will be included in the EPUB document
* `scripts` - List of JS files which will be included in the EPUB document
* `images` - List of images which will be included in the EPUB document.
* `cover` - Specifies if you want a default cover page, default: `true`
* `logo` - Image for the cover page
[meta]: http://www.idpf.org/epub/30/spec/epub30-publications.html#sec-package-metadata
[datetime]: http://www.w3.org/TR/NOTE-datetime
[types]: http://idpf.github.io/epub-registries/types/
"""
alias BUPE.Item
@type title :: String.t()
@type creator :: String.t()
@type contributor :: String.t()
@type t :: %__MODULE__{
title: title,
creator: creator,
contributor: contributor,
date: String.t(),
identifier: String.t(),
language: String.t(),
version: String.t(),
unique_identifier: String.t(),
source: String.t(),
type: String.t(),
modified: String.t(),
description: String.t(),
format: String.t(),
coverage: String.t(),
publisher: String.t(),
relation: String.t(),
rights: String.t(),
subject: String.t(),
pages: [Path.t() | Item.t()],
nav: list(),
styles: [Path.t() | Item.t()],
scripts: [Path.t() | Item.t()],
images: [Path.t() | Item.t()],
cover: boolean,
logo: String.t(),
audio: [map()],
fonts: [map()]
}
@enforce_keys [:title, :pages]
defstruct title: nil,
creator: nil,
contributor: nil,
date: nil,
identifier: nil,
language: "en",
version: "3.0",
unique_identifier: nil,
source: nil,
type: nil,
modified: nil,
description: nil,
format: nil,
coverage: nil,
publisher: nil,
relation: nil,
rights: nil,
subject: nil,
pages: [],
nav: [],
styles: [],
scripts: [],
images: [],
cover: true,
logo: nil,
audio: [],
fonts: []
end
|
lib/bupe/config.ex
| 0.784979
| 0.660537
|
config.ex
|
starcoder
|
defmodule Deriv do
@type literal() :: {:const, number()}
| {:const, atom()}
| {:var, atom()}
@type expression() :: {:add, expression(), expression()}
| {:sub, expression(), expression()}
| {:mul, expression(), expression()}
| {:exp, base(), exp()}
| literal()
@type base() :: expression()
@type exp() :: expression()
# Completely untested and incomplete code, very bad idea
def deriv({:const, _}, _), do: {:const, 0}
def deriv({:var, v}, v), do: {:const, 1}
def deriv({:var, _y}, _), do: {:const, 0}
def deriv({:mul, e1, e2}, v), do: {:add, {:mul, deriv(e1, v), e2}, {:mul, e1, deriv(e2, v)}}
def deriv({:add, e1, e2}, v), do: {:add, deriv(e1, v), deriv(e2, v)}
def deriv({:sub, e1, e2}, v), do: {:sub, deriv(e1, v), deriv(e2, v)}
def deriv({:exp, {:const, _}, {:var, _}}, _v), do: {:error, :toohard} # requires log-handling
def deriv({:exp, {:const, _}, {:const, _}}, _v), do: {:const, 0}
def deriv({:exp, {:var, v}, {:var, v}}, v), do: {:error, :whywouldyoudothis}
def deriv({:exp, e1, {:const, c}}, v) do
{:mul, {:const, c}, {:mul, deriv(e1, v), {:exp, e1, {:sub, {:const, c}, {:const, 1}}}}}
end
def deriv({:exp, {:var, _v}, _, _}), do: {:const, 0}
def simplify({:mul, e1, e2}) do
case simplify(e1) do
{:const, 0} ->
{:const, 0}
{:const, 1} ->
simplify(e2)
{:var, v} ->
case simplify(e2) do
{:const, 0} ->
{:const, 0}
{:const, 1} ->
{:var, v}
{:var, ^v} ->
{:exp, {:var, v}, {:const, 2}}
end
sim1 ->
case simplify(e2) do
{:const, 0} ->
{:const, 0}
{:const, 1} ->
sim1
sim2 ->
{:mul, sim1, sim2} # no outer simplification made
end
end
end
def simplify({:add, e1, e2}) do
case simplify(e1) do
{:const, 0} -> simplify(e2)
{:const, c} when is_number(c) ->
case simplify(e2) do
{:const, c2} when is_number(c2) ->
{:const, c+c2} # 2 numbers that can be added, c2 0-case covered
sim2 -> {:add, {:const, c}, sim2}
end
sim1 -> case simplify(e2) do
{:const, 0} -> sim1
sim2 -> {:add, sim1, sim2}
end
end
end
def simplify({:exp, e1, e2}) do
case simplify(e1) do
{:const, 0} ->
{:const, 0}
{:const, 1} ->
{:const, 1}
sim1 -> case simplify(e2) do
{:const, 0} ->
{:const, 1}
{:const, 1} ->
sim1
sim2 ->
{:exp, sim1, sim2}
end
end
end
end
|
exercises/derivative/deriv.ex
| 0.55097
| 0.610918
|
deriv.ex
|
starcoder
|
defmodule ExTermbox.Bindings do
use InlineNif,
ex_termbox: :code.priv_dir(:ex_termbox) |> Path.join("termbox_bindings")
@moduledoc """
Provides the low-level bindings to the termbox library. This module loads the
NIFs defined in `c_src/` and thinly wraps the C interface.
For event-handling, it's recommended to use the `ExTermbox.EventManager` API
instead of the raw interface exposed here.
For more complex applications, it's recommended to use the high-level
rendering API provided by Ratatouille (a terminal UI kit based on the bindings
here). Ratatouille manages things like initialization, updates and shutdown
automatically, and provides a declarative, HTML-like interface for rendering
content to the screen. See the repo for details:
<https://github.com/ndreynolds/ratatouille>
See also the termbox header file for additional documentation of the functions
here:
<https://github.com/nsf/termbox/blob/master/src/termbox.h>
Note that the "NIF <function>/<arity> not loaded" messages below are fallbacks
normally replaced by the natively-implemented functions at load. If you're
seeing this message, it means the native bindings could not be loaded. Please
open an issue with the error and relevant system information.
### Event Polling
The event polling API differs slightly from the termbox API in order to make
it in the Erlang ecosystem. Instead of blocking poll calls, it uses
asynchronous message passing to deliver events to the caller.
It's recommended to use the `ExTermbox.EventManager` gen_server to subscribe
to terminal events instead of using these bindings directly. It supports
multiple subscriptions and more gracefully handles errors.
#### Implementation Notes
In the `start_polling/1` NIF, an OS-level thread is created which performs the
blocking event polling (i.e., a `select` call). This allows the NIF to return
quickly and avoid causing the scheduler too much trouble. It would be very bad
to block the scheduler thread until an event is received.
While using threads solves this problem, it unfortunately also introduces new
ones. The bindings implement some locking mechanisms to try to coordinate
threading logic and prevent polling from occurring simultaneously, but this
sort of logic is hard to get right (one of the reasons we use Elixir/Erlang).
No issues are currently known, but please report any you happen to encounter.
#### Timeouts
You might have noticed that there's no binding for `tb_peek_event` (which
accepts a timeout). That's because it's easy enough to implement a timeout
ourselves with `start_polling/1` and `receive` with `after`, e.g.:
{:ok, _resource} = Bindings.start_polling(self())
receive do
{:event, event} ->
# handle the event...
after
1_000 ->
:ok = Bindings.stop_polling(self())
# do something else...
end
"""
alias ExTermbox.{Cell, Constants, Position}
@on_load :load_nifs
def load_nifs do
nif_path(:ex_termbox)
|> to_charlist()
|> :erlang.load_nif(0)
end
@doc """
Initializes the termbox library. Must be called before any other bindings are
called.
Returns `:ok` on success and otherwise one of the following errors:
* `{:error, :already_running} - the library was already initialized.
* `{:error, code}` - where code is an integer error code from termbox.
"""
@spec init :: :ok | {:error, integer() | :already_running}
def init do
error("NIF init/0 not loaded")
end
@doc """
Finalizes the termbox library. Should be called when the terminal application
is exited, and before your program or OTP application stops.
Returns `:ok` on success and otherwise one of the following errors:
* `{:error, :not_running} - the library can not be shut down because it is not
initialized.
* `{:error, code}` - where `code` is an integer error code from termbox.
"""
@spec shutdown :: :ok | {:error, integer() | :not_running}
def shutdown do
error("NIF shutdown/0 not loaded")
end
@doc """
Returns `{:ok, width}` where `width` is the width of the terminal window in
characters.
If termbox was not initialized, returns `{:error, :not_running}` (call
`init/0` first).
"""
@spec width :: {:ok, integer()} | {:error, :not_running}
def width do
error("NIF width/0 not loaded")
end
@doc """
Returns `{:ok, height}` where `height` is the height of the terminal window in
characters.
If termbox was not initialized, returns `{:error, :not_running}` (call
`init/0` first).
"""
@spec height :: {:ok, integer()} | {:error, :not_running}
def height do
error("NIF height/0 not loaded")
end
@doc """
Clears the internal back buffer, setting the foreground and background to the
defaults, or those specified by `set_clear_attributes/2`.
Returns `:ok` if successful. If termbox was not initialized, returns
`{:error, :not_running}` (call `init/0` first).
"""
@spec clear :: :ok | {:error, :not_running}
def clear do
error("NIF clear/0 not loaded")
end
@doc """
Sets the default foreground and background colors used when `clear/0` is
called.
Returns `:ok` if successful. If termbox was not initialized, returns
`{:error, :not_running}` (call `init/0` first).
"""
@spec set_clear_attributes(Constants.color(), Constants.color()) ::
:ok | {:error, :not_running}
def set_clear_attributes(_fg, _bg) do
error("NIF set_clear_attributes/2 not loaded")
end
@doc """
Synchronizes the internal back buffer and the terminal.
Returns `:ok` if successful. If termbox was not initialized, returns
`{:error, :not_running}` (call `init/0` first).
"""
@spec present :: :ok | {:error, :not_running}
def present do
error("NIF present/0 not loaded")
end
@doc """
Sets the position of the cursor to the coordinates `(x, y)`, or hide the
cursor by passing `ExTermbox.Constants.hide_cursor/0` for both x and y.
Returns `:ok` if successful. If termbox was not initialized, returns
`{:error, :not_running}` (call `init/0` first).
"""
@spec set_cursor(non_neg_integer(), non_neg_integer()) ::
:ok | {:error, :not_running}
def set_cursor(_x, _y) do
error("NIF set_cursor/2 not loaded")
end
@doc """
Puts a cell in the internal back buffer at the cell's position. Note that this is
implemented in terms of `change_cell/5`.
Returns `:ok` if successful. If termbox was not initialized, returns
`{:error, :not_running}` (call `init/0` first).
"""
@spec put_cell(Cell.t()) :: :ok | {:error, :not_running}
def put_cell(%Cell{position: %Position{x: x, y: y}, ch: ch, fg: fg, bg: bg}) do
change_cell(x, y, ch, fg, bg)
end
@doc """
Changes the attributes of the cell at the specified position in the internal
back buffer. Prefer using `put_cell/1`, which supports passing an
`ExTermbox.Cell` struct.
Returns `:ok` if successful. If termbox was not initialized, returns
`{:error, :not_running}` (call `init/0` first).
"""
@spec change_cell(
non_neg_integer(),
non_neg_integer(),
non_neg_integer(),
Constants.color(),
Constants.color()
) :: :ok | {:error, :not_running}
def change_cell(_x, _y, _ch, _fg, _bg) do
error("NIF change_cell/5 not loaded")
end
@doc """
Sets or retrieves the input mode (see `ExTermbox.Constants.input_modes/0`).
See the [termbox source](https://github.com/nsf/termbox/blob/master/src/termbox.h)
for additional documentation.
Returns `{:ok, input_mode}` when successful, where `input_mode` is an integer
representing the current mode. If termbox was not initialized, returns
`{:error, :not_running}` (call `init/0` first).
"""
@spec select_input_mode(Constants.input_mode()) ::
{:ok, integer()} | {:error, :not_running}
def select_input_mode(_mode) do
error("NIF select_input_mode/1 not loaded")
end
@doc """
Sets or retrieves the output mode (see `ExTermbox.Constants.output_modes/0`).
See the [termbox source](https://github.com/nsf/termbox/blob/master/src/termbox.h)
for additional documentation.
Returns `{:ok, output_mode}` when successful, where `output_mode` is an
integer representing the current mode. If termbox was not initialized, returns
`{:error, :not_running}` (call `init/0` first).
"""
@spec select_output_mode(Constants.output_mode()) ::
{:ok, integer()} | {:error, :not_running}
def select_output_mode(_mode) do
error("NIF select_output_mode/1 not loaded")
end
@doc """
Starts polling for terminal events asynchronously. The function accepts a PID
as argument and returns immediately. When an event is received, it's sent to
the specified process. It continues polling until either `stop_polling/0` or
`shutdown/0` is called. An error is returned when this function is called
again before polling has been stopped.
If successful, returns `{:ok, resource}`, where `resource` is an Erlang
resource object representing a handle for the poll thread. Otherwise, one of
the following errors is returned:
* `{:error, :not_running} - termbox should be initialized before events are
polled.
* `{:error, :already_polling}` - `start_polling/1` was previously called and
has not been since stopped.
"""
@spec start_polling(pid()) ::
{:ok, reference()} | {:error, :not_running | :already_polling}
def start_polling(recipient_pid) when is_pid(recipient_pid) do
error("NIF start_polling/1 not loaded")
end
@doc """
Cancels a previous call to `start_polling/1` and blocks until polling has
stopped. The polling loop checks every 10 ms for a stop condition, so calls
can take up to 10 ms to return.
This can be useful, for example, if the `start_polling/1` recipient process
dies and the polling needs to be restarted by another process.
Returns `:ok` on success and otherwise one of the following errors:
* `{:error, :not_running} - termbox should be initialized before any polling
functions are called.
* `{:error, :not_polling} - polling cannot be stopped because it was already
stopped or never started.
"""
@spec stop_polling() :: :ok | {:error, :not_running | :not_polling}
def stop_polling do
error("NIF stop_polling/1 not loaded")
end
defp error(reason), do: :erlang.nif_error(reason)
end
|
lib/ex_termbox/bindings.ex
| 0.883638
| 0.59302
|
bindings.ex
|
starcoder
|
defmodule Serum.Post do
@moduledoc """
Defines a struct representing a blog post page.
## Fields
* `file`: Source path
* `title`: Post title
* `date`: Post date (formatted)
* `raw_date`: Post date (erlang tuple style)
* `tags`: A list of tags
* `url`: Absolute URL of the blog post in the website
* `html`: Post contents converted into HTML
* `preview`: Preview text of the post
* `output`: Destination path
"""
alias Serum.Fragment
alias Serum.Post.PreviewGenerator
alias Serum.Project
alias Serum.Renderer
alias Serum.Result
alias Serum.Tag
alias Serum.Template
alias Serum.Template.Storage, as: TS
@type t :: %__MODULE__{
file: binary(),
title: binary(),
date: binary(),
raw_date: :calendar.datetime(),
tags: [Tag.t()],
url: binary(),
html: binary(),
preview: binary(),
output: binary(),
extras: map(),
template: binary() | nil
}
defstruct [
:file,
:title,
:date,
:raw_date,
:tags,
:url,
:html,
:preview,
:output,
:extras,
:template
]
@spec new(binary(), {map(), map()}, binary(), Project.t()) :: t()
def new(path, {header, extras}, html, %Project{} = proj) do
tags = Tag.batch_create(header[:tags] || [], proj)
datetime = header[:date]
date_str = Timex.format!(datetime, proj.date_format)
raw_date = to_erl_datetime(datetime)
preview = PreviewGenerator.generate_preview(html, proj.preview_length)
output_name = Path.basename(path, ".md") <> ".html"
%__MODULE__{
file: path,
title: header[:title],
tags: tags,
html: html,
preview: preview,
raw_date: raw_date,
date: date_str,
url: Path.join([proj.base_url, proj.posts_path, output_name]),
output: Path.join([proj.dest, proj.posts_path, output_name]),
template: header[:template],
extras: extras
}
end
@spec compact(t()) :: map()
def compact(%__MODULE__{} = post) do
post
|> Map.drop(~w(__struct__ file html output)a)
|> Map.put(:type, :post)
end
@spec to_erl_datetime(term()) :: :calendar.datetime()
defp to_erl_datetime(obj) do
case Timex.to_erl(obj) do
{{_y, _m, _d}, {_h, _i, _s}} = erl_datetime -> erl_datetime
{_y, _m, _d} = erl_date -> {erl_date, {0, 0, 0}}
_ -> {{0, 1, 1}, {0, 0, 0}}
end
end
@spec to_fragment(t()) :: Result.t(Fragment.t())
def to_fragment(post) do
metadata = compact(post)
template_name = post.template || "post"
bindings = [page: metadata, contents: post.html]
with %Template{} = template <- TS.get(template_name, :template),
{:ok, html} <- Renderer.render_fragment(template, bindings) do
Fragment.new(post.file, post.output, metadata, html)
else
nil -> {:error, "the template \"#{template_name}\" is not available"}
{:error, _} = error -> error
end
end
defimpl Fragment.Source do
alias Serum.Post
alias Serum.Result
@spec to_fragment(Post.t()) :: Result.t(Fragment.t())
def to_fragment(post) do
Post.to_fragment(post)
end
end
end
|
lib/serum/post.ex
| 0.821939
| 0.446736
|
post.ex
|
starcoder
|
defmodule Geo.WKB.Decoder do
@moduledoc false
use Bitwise
alias Geo.{
Point,
PointZ,
PointM,
PointZM,
LineString,
Polygon,
GeometryCollection,
Utils
}
alias Geo.WKB.Reader
@doc """
Takes a WKB string and returns a Geometry
"""
@spec decode(binary, [Geo.geometry()]) :: {:ok, Geo.geometry()} | {:error, Exception.t()}
def decode(wkb, geometries \\ []) do
{:ok, decode!(wkb, geometries)}
rescue
exception ->
{:error, exception}
end
@doc """
Takes a WKB string and returns a Geometry
"""
@spec decode!(binary, [Geo.geometry()]) :: Geo.geometry() | no_return
def decode!(wkb, geometries \\ []) do
wkb_reader = Reader.new(wkb)
{type, wkb_reader} = Reader.read(wkb_reader, 8)
type = String.to_integer(type, 16)
{srid, wkb_reader} =
if (type &&& 0x20000000) != 0 do
{srid, wkb_reader} = Reader.read(wkb_reader, 8)
{String.to_integer(srid, 16), wkb_reader}
else
{nil, wkb_reader}
end
type = Utils.hex_to_type(type &&& 0xDF_FF_FF_FF)
{coordinates, wkb_reader} = decode_coordinates(type, wkb_reader)
geometries =
case type do
%Geo.GeometryCollection{} ->
coordinates =
coordinates
|> Enum.map(fn x -> %{x | srid: srid} end)
%{type | geometries: coordinates, srid: srid}
_ ->
geometries ++ [%{type | coordinates: coordinates, srid: srid}]
end
if Reader.eof?(wkb_reader) do
return_geom(geometries)
else
wkb_reader.wkb |> decode!(geometries)
end
end
defp return_geom(%GeometryCollection{} = geom) do
geom
end
defp return_geom(geom) when is_list(geom) do
if length(geom) == 1 do
hd(geom)
else
geom
end
end
defp decode_coordinates(%Point{}, wkb_reader) do
{x, wkb_reader} = Reader.read(wkb_reader, 16)
x = Utils.hex_to_float(x)
{y, wkb_reader} = Reader.read(wkb_reader, 16)
y = Utils.hex_to_float(y)
{{x, y}, wkb_reader}
end
defp decode_coordinates(%PointZ{}, wkb_reader) do
{x, wkb_reader} = Reader.read(wkb_reader, 16)
x = Utils.hex_to_float(x)
{y, wkb_reader} = Reader.read(wkb_reader, 16)
y = Utils.hex_to_float(y)
{z, wkb_reader} = Reader.read(wkb_reader, 16)
z = Utils.hex_to_float(z)
{{x, y, z}, wkb_reader}
end
defp decode_coordinates(%PointM{}, wkb_reader) do
{x, wkb_reader} = Reader.read(wkb_reader, 16)
x = Utils.hex_to_float(x)
{y, wkb_reader} = Reader.read(wkb_reader, 16)
y = Utils.hex_to_float(y)
{m, wkb_reader} = Reader.read(wkb_reader, 16)
m = Utils.hex_to_float(m)
{{x, y, m}, wkb_reader}
end
defp decode_coordinates(%PointZM{}, wkb_reader) do
{x, wkb_reader} = Reader.read(wkb_reader, 16)
x = Utils.hex_to_float(x)
{y, wkb_reader} = Reader.read(wkb_reader, 16)
y = Utils.hex_to_float(y)
{z, wkb_reader} = Reader.read(wkb_reader, 16)
z = Utils.hex_to_float(z)
{m, wkb_reader} = Reader.read(wkb_reader, 16)
m = Utils.hex_to_float(m)
{{x, y, z, m}, wkb_reader}
end
defp decode_coordinates(%LineString{}, wkb_reader) do
{number_of_points, wkb_reader} = Reader.read(wkb_reader, 8)
number_of_points = number_of_points |> String.to_integer(16)
Enum.map_reduce(Enum.to_list(0..(number_of_points - 1)), wkb_reader, fn _x, acc ->
decode_coordinates(%Point{}, acc)
end)
end
defp decode_coordinates(%Polygon{}, wkb_reader) do
{number_of_lines, wkb_reader} = Reader.read(wkb_reader, 8)
number_of_lines = number_of_lines |> String.to_integer(16)
Enum.map_reduce(Enum.to_list(0..(number_of_lines - 1)), wkb_reader, fn _x, acc ->
decode_coordinates(%LineString{}, acc)
end)
end
defp decode_coordinates(%GeometryCollection{}, wkb_reader) do
{_number_of_items, wkb_reader} = Reader.read(wkb_reader, 8)
geometries = decode!(wkb_reader.wkb)
{List.wrap(geometries), Reader.new("00")}
end
defp decode_coordinates(_geom, wkb_reader) do
{_number_of_items, wkb_reader} = Reader.read(wkb_reader, 8)
decoded_geom = wkb_reader.wkb |> decode!()
coordinates =
if is_list(decoded_geom) do
Enum.map(decoded_geom, fn x ->
x.coordinates
end)
else
[decoded_geom.coordinates]
end
{coordinates, Reader.new("00")}
end
end
|
lib/geo/wkb/decoder.ex
| 0.853516
| 0.650689
|
decoder.ex
|
starcoder
|
defmodule RethinkDB.Record do
@moduledoc false
defstruct data: "", profile: nil
end
defmodule RethinkDB.Collection do
@moduledoc false
defstruct data: [], profile: nil
defimpl Enumerable, for: __MODULE__ do
def reduce(%{data: data}, acc, fun) do
Enumerable.reduce(data, acc, fun)
end
def count(%{data: data}), do: Enumerable.count(data)
def member?(%{data: data}, el), do: Enumerable.member?(data, el)
def slice(%{data: data}), do: Enumerable.slice(data)
end
end
defmodule RethinkDB.Feed do
@moduledoc false
defstruct token: nil, data: nil, pid: nil, note: nil, profile: nil, opts: nil
defimpl Enumerable, for: __MODULE__ do
def reduce(changes, acc, fun) do
stream =
Stream.unfold(changes, fn
x = %RethinkDB.Feed{data: []} ->
{:ok, r} = RethinkDB.next(x)
{r, struct(r, data: [])}
x = %RethinkDB.Feed{} ->
{x, struct(x, data: [])}
x = %RethinkDB.Collection{} ->
{x, nil}
nil ->
nil
end)
|> Stream.flat_map(fn el ->
el.data
end)
stream.(acc, fun)
end
def count(_changes), do: raise("count/1 not supported for changes")
def member?(_changes, _values), do: raise("member/2 not supported for changes")
def slice(_changes), do: raise("slice/1 is not supported for changes")
end
end
defmodule RethinkDB.Response do
@moduledoc false
defstruct token: nil, data: "", profile: nil
def parse(raw_data, token, pid, opts) do
d = Poison.decode!(raw_data)
data = RethinkDB.Pseudotypes.convert_reql_pseudotypes(d["r"], opts)
{code, resp} =
case d["t"] do
1 -> {:ok, %RethinkDB.Record{data: hd(data)}}
2 -> {:ok, %RethinkDB.Collection{data: data}}
3 -> {:ok, %RethinkDB.Feed{token: token, data: data, pid: pid, note: d["n"], opts: opts}}
4 -> {:ok, %RethinkDB.Response{token: token, data: d}}
16 -> {:error, %RethinkDB.Response{token: token, data: d}}
17 -> {:error, %RethinkDB.Response{token: token, data: d}}
18 -> {:error, %RethinkDB.Response{token: token, data: d}}
end
{code, %{resp | :profile => d["p"]}}
end
end
|
lib/rethinkdb/response.ex
| 0.644449
| 0.473962
|
response.ex
|
starcoder
|
defmodule Membrane.Element.Base.Mixin.SourceBehaviour do
@moduledoc """
Module defining behaviour for source and filter elements.
When used declares behaviour implementation, provides default callback definitions
and imports macros.
For more information on implementing elements, see `Membrane.Element.Base`.
"""
alias Membrane.{Buffer, Element}
alias Membrane.Core.Element.PadsSpecsParser
alias Element.{CallbackContext, Pad}
alias Element.Base.Mixin.CommonBehaviour
@doc """
Callback that is called when buffers should be emitted by the source or filter.
It will be called only for output pads in the pull mode, as in their case demand
is triggered by the input pad of the subsequent element.
In source elements, appropriate amount of data should be sent here. If it happens
not to be yet available, element should store unsupplied demand and supply it
when possible.
In filter elements, this callback should usually return `:demand` action with
size sufficient (at least approximately) for supplying incoming demand. This
will result in calling `c:Membrane.Element.Base.Filter.handle_process_list/4` or
`c:Membrane.Element.Base.Sink.handle_write_list/4`, which is to supply
the demand. If it does not, or does only partially,
`c:Membrane.Element.Base.Mixin.SourceBehaviour.handle_demand/5` is called
again, until there is any data available on the input pad.
For output pads in the push mode, element should generate buffers without this
callback.
"""
@callback handle_demand(
pad :: Pad.ref_t(),
size :: non_neg_integer,
unit :: Buffer.Metric.unit_t(),
context :: CallbackContext.Demand.t(),
state :: Element.state_t()
) :: CommonBehaviour.callback_return_t()
@doc """
Macro that defines known output pads for the element type.
Allows to use `one_of/1` and `range/2` functions from `Membrane.Caps.Matcher`
without module prefix.
It automatically generates documentation from the given definition
and adds compile-time caps specs validation.
"""
defmacro def_output_pads(pads) do
PadsSpecsParser.def_pads(pads, :output)
end
defmacro __using__(_) do
quote location: :keep do
@behaviour unquote(__MODULE__)
import unquote(__MODULE__), only: [def_output_pads: 1]
end
end
end
|
lib/membrane/element/base/mixin/source_behaviour.ex
| 0.849316
| 0.467514
|
source_behaviour.ex
|
starcoder
|
defmodule PrimaAuth0Ex.Token do
@moduledoc """
Module to verify the integrity and validate the claims of tokens.
"""
use Joken.Config
add_hook JokenJwks, strategy: PrimaAuth0Ex.JwksStrategy
add_hook Joken.Hooks.RequiredClaims, [:aud, :iat, :exp]
@impl true
def token_config do
[skip: [:audience], iss: issuer()]
|> default_claims()
|> add_claim("aud", nil, &validate_audience/3)
|> add_claim("permissions", nil, &validate_permissions/3)
end
@impl Joken.Hooks
def after_validate(_hook_options, {:ok, claims} = result, {_token_config, _claims, context} = input) do
if missing_required_permissions_claim?(claims, context) do
{:halt, {:error, [message: "Invalid token", missing_claims: "permissions"]}}
else
{:cont, result, input}
end
end
def after_validate(_, result, input), do: {:cont, result, input}
@spec verify_and_validate_token(String.t(), String.t(), list(String.t()), boolean()) ::
{:ok, Joken.claims()} | {:error, atom | Keyword.t()}
def verify_and_validate_token(token, audience, required_permissions, ignore_signature) do
context = %{audience: audience, required_permissions: required_permissions}
if ignore_signature do
validate_token(token, context)
else
verify_and_validate(token, __default_signer__(), context)
end
end
@doc """
Returns the list of permissions held by a token.
In case of missing permissions claim or malformed token it defaults to an empty list.
Note that this function does not verify the signature of the token.
"""
@spec peek_permissions(String.t()) :: [String.t()]
def peek_permissions(token) do
with {:ok, claims} <- Joken.peek_claims(token),
permissions <- Map.get(claims, "permissions", []) do
permissions
else
_any_error -> []
end
end
defp validate_token(token, context) do
with {:ok, claims} <- Joken.peek_claims(token),
do: validate(claims, context)
end
defp issuer, do: Application.fetch_env!(:prima_auth0_ex, :server)[:issuer]
defp validate_audience(token_audience, _claims, context) do
expected_audience = context[:audience]
unless expected_audience do
raise ArgumentError, "It is required to set an expected audience in order to validate tokens"
end
do_validate_audience(token_audience, expected_audience)
end
defp do_validate_audience(found, expected) when is_list(found), do: expected in found
defp do_validate_audience(found, expected), do: found == expected
defp validate_permissions(token_permissions, _claims, context) do
required_permissions = context[:required_permissions]
Enum.all?(required_permissions, &(&1 in token_permissions))
end
defp missing_required_permissions_claim?(claims, context) do
permissions_required? =
context
|> Map.get(:required_permissions, [])
|> Enum.count()
|> Kernel.>(0)
permissions_required? and not Map.has_key?(claims, "permissions")
end
end
|
lib/prima_auth0_ex/token.ex
| 0.830044
| 0.515864
|
token.ex
|
starcoder
|
defmodule Appsignal.Instrumentation.Decorators do
@moduledoc false
require Appsignal.Utils
@span Appsignal.Utils.compile_env(:appsignal, :appsignal_span, Appsignal.Span)
use Decorator.Define,
instrument: 0,
instrument: 1,
transaction: 0,
transaction: 1,
transaction_event: 0,
transaction_event: 1,
channel_action: 0
import Appsignal.Utils, only: [module_name: 1]
def instrument(namespace, body, context) when is_atom(namespace) do
namespace
|> Atom.to_string()
|> instrument(body, context)
end
def instrument(namespace, body, context) when is_binary(namespace) do
do_instrument(body, Map.put(context, :namespace, namespace))
end
def instrument(body, context) do
do_instrument(body, context)
end
defp do_instrument(body, %{module: module, name: name, arity: arity, namespace: namespace}) do
quote do
Appsignal.Instrumentation.instrument(
"#{module_name(unquote(module))}.#{unquote(name)}_#{unquote(arity)}",
fn span ->
_ = unquote(@span).set_namespace(span, unquote(namespace))
unquote(body)
end
)
end
end
defp do_instrument(body, %{module: module, name: name, namespace: namespace}) do
quote do
Appsignal.Instrumentation.instrument(
"#{module_name(unquote(module))}.#{unquote(name)}",
fn span ->
_ = unquote(@span).set_namespace(span, unquote(namespace))
unquote(body)
end
)
end
end
defp do_instrument(body, %{module: module, name: name, arity: arity, category: category}) do
quote do
Appsignal.Instrumentation.instrument(
"#{module_name(unquote(module))}.#{unquote(name)}_#{unquote(arity)}",
unquote(category),
fn -> unquote(body) end
)
end
end
defp do_instrument(body, %{module: module, name: name, arity: arity}) do
quote do
Appsignal.Instrumentation.instrument(
"#{module_name(unquote(module))}.#{unquote(name)}_#{unquote(arity)}",
fn -> unquote(body) end
)
end
end
defp do_instrument(body, %{module: module, name: name}) do
quote do
Appsignal.Instrumentation.instrument(
"#{module_name(unquote(module))}.#{unquote(name)}",
fn -> unquote(body) end
)
end
end
def transaction(body, context) do
transaction("background_job", body, context)
end
def transaction(namespace, body, context) when is_atom(namespace) do
namespace
|> Atom.to_string()
|> transaction(body, context)
end
def transaction(namespace, body, %{module: module, name: name, arity: arity})
when is_binary(namespace) do
quote do
Appsignal.Instrumentation.instrument_root(
unquote(namespace),
"#{module_name(unquote(module))}.#{unquote(name)}_#{unquote(arity)}",
fn -> unquote(body) end
)
end
end
def transaction_event(body, context) do
instrument(body, context)
end
def transaction_event(category, body, context) do
do_instrument(body, Map.put(context, :category, category))
end
def channel_action(body, %{module: module, args: [action, _payload, _socket]}) do
quote do
Appsignal.Instrumentation.instrument_root(
"channel",
"#{module_name(unquote(module))}.#{unquote(action)}",
fn -> unquote(body) end
)
end
end
end
|
lib/appsignal/instrumentation/decorators.ex
| 0.528777
| 0.580709
|
decorators.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.