code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule ExZipper.Zipper.Editing do
@moduledoc """
Utility module for functions that concern editing zippers
"""
alias ExZipper.Zipper
@doc """
Replaces the current focus with the node passed as the second argument.
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> zipper = Zipper.down(zipper)
iex> Zipper.node(zipper)
1
iex> zipper = Zipper.replace(zipper, 10)
iex> Zipper.node(zipper)
10
"""
@spec replace(Zipper.t, any()) :: Zipper.t
def replace(zipper = %Zipper{}, new_focus) do
%{zipper | focus: new_focus}
end
@doc """
Replaces the current focus with the result of applying the given function
to the current focus
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> zipper = Zipper.down(zipper)
iex> Zipper.node(zipper)
1
iex> zipper = Zipper.edit(zipper, &(&1 * 10))
iex> Zipper.node(zipper)
10
"""
@spec edit(Zipper.t, (any() -> any())) :: Zipper.t
def edit(zipper = %Zipper{}, func) do
replace(zipper, func.(zipper.focus))
end
@doc """
Inserts a new node as a new sibling to the immediate left of the current focus.
Does not change focus. Returns an error if called on the root.
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> Zipper.insert_left(zipper, 0)
{:error, :insert_left_of_root}
iex> zipper |> Zipper.down |> Zipper.insert_left(0) |> Zipper.root |> Zipper.node
[0,1,[],[2,3,[4,5]]]
"""
@spec insert_left(Zipper.t, any()) :: Zipper.maybe_zipper
def insert_left(%Zipper{crumbs: nil}, _) do
{:error, :insert_left_of_root}
end
def insert_left(zipper = %Zipper{}, node) do
%{zipper | crumbs: %{zipper.crumbs | left: [node | zipper.crumbs.left]}}
end
@doc """
Inserts a new node as a new sibling to the immediate right of the current focus.
Does not change focus. Returns an error if called on the root.
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> Zipper.insert_right(zipper, 0)
{:error, :insert_right_of_root}
iex> zipper |> Zipper.down |> Zipper.insert_right(0) |> Zipper.root |> Zipper.node
[1,0,[],[2,3,[4,5]]]
"""
@spec insert_right(Zipper.t, any()) :: Zipper.maybe_zipper
def insert_right(%Zipper{crumbs: nil}, _) do
{:error, :insert_right_of_root}
end
def insert_right(zipper = %Zipper{}, node) do
%{zipper | crumbs: %{zipper.crumbs | right: [node | zipper.crumbs.right]}}
end
@doc """
Inserts a child as the leftmost child of the current focus. Returns an error
if called on a leaf.
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> zipper |> Zipper.insert_child(6) |> Zipper.node
[6,1,[],[2,3,[4,5]]]
iex> zipper |> Zipper.down |> Zipper.insert_child(6)
{:error, :insert_child_of_leaf}
"""
@spec insert_child(Zipper.t, any()) :: Zipper.maybe_zipper
def insert_child(zipper = %Zipper{}, new_child) do
case Zipper.branch?(zipper) do
false ->
{:error, :insert_child_of_leaf}
true ->
new_focus =
Zipper.make_node(zipper, zipper.focus, [new_child|Zipper.children(zipper)])
%{zipper | focus: new_focus}
end
end
@doc """
Appends a child as the rightmost child of the current focus. Returns an error
if called on a leaf.
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> zipper |> Zipper.append_child(6) |> Zipper.node
[1,[],[2,3,[4,5]],6]
iex> zipper |> Zipper.down |> Zipper.append_child(6)
{:error, :append_child_of_leaf}
"""
@spec append_child(Zipper.t, any()) :: Zipper.maybe_zipper
def append_child(zipper = %Zipper{}, new_child) do
case Zipper.branch?(zipper) do
false ->
{:error, :append_child_of_leaf}
true ->
new_children = Zipper.children(zipper) ++ [new_child]
new_focus = Zipper.make_node(zipper, zipper.focus, new_children)
%{zipper | focus: new_focus}
end
end
@doc """
Removes the current focus from the zipper, moving focus to the node previous
to the current focus in a depth-first walk. Will return an error if called on the root
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> Zipper.remove(zipper)
{:error, :remove_root}
iex> zipper |> Zipper.down |> Zipper.remove |> Zipper.node
[[],[2,3,[4,5]]]
"""
@spec remove(Zipper.t) :: Zipper.maybe_zipper
def remove(%Zipper{crumbs: nil}), do: {:error, :remove_root}
def remove(zipper = %Zipper{}) do
case Zipper.left(zipper) do
{:error, _} ->
parent_zipper = Zipper.up(zipper)
[_ | new_children] = Zipper.children(parent_zipper)
new_focus = Zipper.make_node(zipper, parent_zipper.focus, new_children)
%{parent_zipper | focus: new_focus}
left_zipper ->
[_ | new_right] = left_zipper.crumbs.right
%{left_zipper | crumbs: %{left_zipper.crumbs | right: new_right}}
end
end
end
|
lib/ex_zipper/zipper/editing.ex
| 0.761627
| 0.461927
|
editing.ex
|
starcoder
|
defmodule ExUnit.Case do
@moduledoc """
This module is meant to be used in other modules
as a way to configure and prepare them for testing.
When used, it allows the following options:
* :async - configure Elixir to run that specific test case
in parallel with others. Must be used for performance
when your test cases do not change any global state;
## Callbacks
This module defines four callbacks. `setup_all()` and
`teardown_all()` which are executed before and after
all tests respectively and `setup(test)` and `teardown(test)`
which are executed before and after each test, receiving
the test name as argument.
## Examples
defmodule AssertionTest do
use ExUnit.Case, async: true
def test_always_pass
assert true
end
end
"""
@doc false
defmacro __using__(opts // []) do
if Keyword.get(opts, :async, false) do
ExUnit.Server.add_async_case(__CALLER__.module)
else
ExUnit.Server.add_sync_case(__CALLER__.module)
end
quote do
import ExUnit.Assertions
import ExUnit.Case
def setup(_), do: :ok
def teardown(_), do: :ok
def setup_all, do: :ok
def teardown_all, do: :ok
defoverridable [setup: 1, teardown: 1, setup_all: 0, teardown_all: 0]
end
end
@doc """
Provides a convenient macro that allows a test to be
defined with a string. This macro automatically inserts
the atom :ok as the last line of the test. That said,
a passing test always returns :ok, but, more important,
it forces Elixir to not tail call optimize the test and
therefore avoiding hiding lines from the backtrace.
## Examples
test "true is equal to true" do
assert true == true
end
"""
defmacro test(message, contents) do
contents =
case contents do
[do: block] ->
quote do
unquote(contents)
:ok
end
_ ->
quote do
try(unquote(contents))
:ok
end
end
quote do
message = unquote(message)
message = if is_binary(message) do
:"test #{message}"
else
:"test_#{message}"
end
def message, [], [], do: unquote(contents)
end
end
end
|
lib/ex_unit/lib/ex_unit/case.ex
| 0.836855
| 0.541227
|
case.ex
|
starcoder
|
defmodule XUtil.Map do
@moduledoc """
Syntactic sugar for working with maps.
"""
@doc """
Returns a function to grab the value associated with key from your map (probably a struct).
Useful for composing with Enum algorithms.
Examples:
iex(1)> [%{a: 2, b: 1}, %{a: 3, b: 2}, %{a: 1, b: 3}] |> Enum.sort_by(XUtil.Map.select_key(:a))
[%{a: 1, b: 3}, %{a: 2, b: 1}, %{a: 3, b: 2}]
iex(1)> [%{a: 2, b: 1}, %{a: 3, b: 2}, %{a: 1, b: 3}] |> Enum.map(XUtil.Map.select_key(:a))
[2, 3, 1]
"""
def select_key(key) when is_atom(key) do
&Map.fetch!(&1, key)
end
@doc """
Returns a function to compare the value associated with a key in your map (probably a struct).
Useful for composing with Enum algorithms.
Examples:
iex(1)> [%{a: 1, b: 3}, %{a: 3, b: 1}, %{a: 3, b: 2}] |> Enum.filter(XUtil.Map.key_equals(:a, 3))
[%{a: 3, b: 1}, %{a: 3, b: 2}]
"""
def key_equals(key, sought_val) when is_atom(key) do
&(Map.fetch!(&1, key) == sought_val)
end
@doc """
Gives your updater a chance to modify each value in the map, leaving keys untouched.
Examples:
iex(1)> XUtil.Map.transform_values(%{a: 1, b: 2, c: 3, d: 4}, &(&1 * &1))
%{a: 1, b: 4, c: 9, d: 16}
iex(1)> XUtil.Map.transform_values(%{a: 1, b: 2, c: 3, d: 4}, &Integer.to_string/1)
%{a: "1", b: "2", c: "3", d: "4"}
"""
def transform_values(%{} = m, updater) when is_function(updater) do
Map.new(m, fn {k, v} -> {k, updater.(v)} end)
end
@doc """
Gives your updater a chance to modify each key in the map, leaving their associated values untouched.
Duplicated keys are removed; the latest one prevails.
Examples:
iex(1)> XUtil.Map.transform_keys(%{"a" => 1, "b" => 2, "c" => 3}, &String.to_atom/1)
%{a: 1, b: 2, c: 3}
iex(1)> XUtil.Map.transform_keys(%{"a" => 1, "b" => 2, "c" => 3}, fn _ -> "foo" end)
%{"foo" => 3}
"""
def transform_keys(%{} = m, updater) when is_function(updater) do
Map.new(m, fn {k, v} -> {updater.(k), v} end)
end
@doc """
Filter based on the {key, value} pairs in the map.
Returns a new map with only the key-value pairs your filterer returned true for.
Examples:
iex(1)> XUtil.Map.filter(%{a: 1, b: 2, c: 3, d: 4}, fn {_k, v} -> rem(v, 2) == 0 end)
%{b: 2, d: 4}
iex(1)> XUtil.Map.filter(%{a: 1, b: 2, c: 3, d: 4}, fn {k, _v} -> k == :a or k == :b end)
%{a: 1, b: 2}
"""
def filter(%{} = m, filterer) when is_function(filterer) do
m
|> Enum.filter(filterer)
|> Map.new()
end
@doc """
Filter based solely on the values. Your filterer looks at the values and
returns false to remove the key-value entry from the resulting map.
Examples:
iex(1)> XUtil.Map.filter_values(%{a: 1, b: 2, c: 3, d: 4}, fn v -> rem(v, 2) == 0 end)
%{b: 2, d: 4}
"""
def filter_values(%{} = m, filterer) when is_function(filterer) do
filter(m, fn {_key, val} -> filterer.(val) end)
end
@doc """
Filter based solely on the keys. Your filterer looks at the keys and
returns false to remove the key-value entry from the resulting map.
Examples:
iex(1)> XUtil.Map.filter_keys(%{a: 1, b: 2, c: 3, d: 4}, fn k -> k == :a or k == :b end)
%{a: 1, b: 2}
"""
def filter_keys(%{} = m, filterer) when is_function(filterer) do
filter(m, fn {key, _val} -> filterer.(key) end)
end
@doc """
Opposite of filter. Reject (drop from the resulting map) any key-value pairs
for which your rejecter returns true.
Examples:
iex(1)> XUtil.Map.reject(%{a: 1, b: 2, c: 3, d: 4}, fn {_k, v} -> rem(v, 2) == 0 end)
%{a: 1, c: 3}
iex(1)> XUtil.Map.reject(%{a: 1, b: 2, c: 3, d: 4}, fn {k, _v} -> k == :a or k == :b end)
%{c: 3, d: 4}
"""
def reject(%{} = m, rejecter) when is_function(rejecter) do
m
|> Enum.reject(rejecter)
|> Map.new()
end
@doc """
Reject based solely on the values.
Examples:
iex(1)> XUtil.Map.reject_values(%{a: 1, b: 2, c: 3, d: 4}, fn v -> rem(v, 2) == 0 end)
%{a: 1, c: 3}
"""
def reject_values(%{} = m, rejecter) when is_function(rejecter) do
reject(m, fn {_key, val} -> rejecter.(val) end)
end
@doc """
Reject based solely on keys.
Examples:
iex(1)> XUtil.Map.reject_keys(%{a: 1, b: 2, c: 3, d: 4}, fn k -> k == :a or k == :b end)
%{c: 3, d: 4}
"""
def reject_keys(%{} = m, rejecter) when is_function(rejecter) do
reject(m, fn {key, _val} -> rejecter.(key) end)
end
@doc "nil if the value doesn't exist at all in the map, otherwise the key for the first matching value we find"
def key_for_value(%{} = m, val) do
case Enum.find(m, fn {_k, v} -> v == val end) do
{k, _wv} -> k
_ -> nil
end
end
end
|
lib/x_util/map.ex
| 0.801858
| 0.721204
|
map.ex
|
starcoder
|
defmodule VegaLite.Export do
@moduledoc """
Various export methods for a `VegaLite` specification.
"""
alias VegaLite.Utils
@doc """
Saves a `VegaLite` specification to file in one of
the supported formats.
## Options
* `:format` - the format to export the graphic as,
must be either of: `:json`, `:html`. By default
the format is inferred from the file extension.
"""
@spec save!(VegaLite.t(), binary(), keyword()) :: :ok
def save!(vl, path, opts \\ []) do
format =
Keyword.get_lazy(opts, :format, fn ->
path |> Path.extname() |> String.trim_leading(".") |> String.to_existing_atom()
end)
content =
case format do
:json ->
to_json(vl)
:html ->
to_html(vl)
_ ->
raise ArgumentError,
"unsupported export format, expected :json or :html, got: #{inspect(format)}"
end
File.write!(path, content)
end
@doc """
Returns the underlying Vega-Lite specification as JSON.
"""
@spec to_json(VegaLite.t()) :: String.t()
def to_json(vl) do
Utils.assert_jason!("to_json/1")
vl
|> VegaLite.to_spec()
|> Jason.encode!()
end
@doc """
Builds an HTML page that renders the given graphic.
The HTML page loads necessary JavaScript dependencies from a CDN
and then renders the graphic in a root element.
"""
@spec to_html(VegaLite.t()) :: binary()
def to_html(vl) do
json = to_json(vl)
"""
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Vega-Lite graphic</title>
<script src="https://cdn.jsdelivr.net/npm/vega@5.20.2"></script>
<script src="https://cdn.jsdelivr.net/npm/vega-lite@5.1.0"></script>
<script src="https://cdn.jsdelivr.net/npm/vega-embed@6.17.0"></script>
</head>
<body>
<div id="graphic"></div>
<script type="text/javascript">
var spec = JSON.parse("#{escape_double_quotes(json)}");
vegaEmbed("#graphic", spec);
</script>
</body>
</html>
"""
end
defp escape_double_quotes(json) do
String.replace(json, ~s{"}, ~s{\\"})
end
end
|
lib/vega_lite/export.ex
| 0.854202
| 0.455501
|
export.ex
|
starcoder
|
defmodule Starship.Reactor.Websocket.Frame do
@moduledoc """
A websocket frame helper, used to parse and generate websocket frames.
"""
import Bitwise, only: [bxor: 2]
@typep fin_bit :: :fin | :not_fin
@typep mask_bit :: :masked | :unmasked
@typep payload :: bitstring | binary | nil
@typedoc "A websocket opcode"
@type opcode :: :continuation | :text | :binary | :close | :ping | :pong
@typedoc "A properly parsed websocket frame"
@type frame :: {:ok, fin_bit, mask_bit, opcode, payload}
@typep reason :: :no_fin_and_opcode_match | :unmasked_frame | :not_implemented_yet
@typedoc "Errors that occur when parsing a websocket frame"
@type parse_error :: {:error, reason}
## MASK Bit
@unmasked <<0::size(1)>>
@masked <<1::size(1)>>
## FIN Bit
@not_fin <<0::size(1)>>
@fin <<1::size(1)>>
## Opcodes
# 0x0
@continuation <<0::size(1), 0::size(1), 0::size(1), 0::size(1)>>
# 0x1
@text <<0::size(1), 0::size(1), 0::size(1), 1::size(1)>>
# 0x2
@binary <<0::size(1), 0::size(1), 1::size(1), 0::size(1)>>
# 0x8
@close <<1::size(1), 0::size(1), 0::size(1), 0::size(1)>>
# 0x9
@ping <<1::size(1), 0::size(1), 0::size(1), 1::size(1)>>
# 0xA
@pong <<1::size(1), 0::size(1), 1::size(1), 0::size(1)>>
@doc """
Parses a websocket frame into a readable payload (bitstring, binary, or nil values).
"""
@spec parse_frame(binary, opcode) :: frame | parse_error
def parse_frame(<<@fin::bits, _::bits-size(3), @text::bits, rest::bits>> = _frame, _opcode) do
case parse(rest) do
{:ok, payload} -> {:ok, :fin, :masked, :text, payload}
error -> error
end
end
def parse_frame(<<@fin::bits, _::bits-size(3), @binary::bits, _rest::bits>>, _opcode) do
# credo:disable-for-next-line
# TODO: Parse Final Binary Frame
{:error, :not_implemented_yet}
end
def parse_frame(<<@fin::bits, _::bits-size(3), @close::bits, rest::bits>>, _opcode) do
case parse(rest) do
{:ok, payload} -> {:ok, :fin, :masked, :close, payload}
error -> error
end
end
def parse_frame(<<@fin::bits, _::bits-size(3), @ping::bits, rest::bits>>, _opcode) do
if masked?(rest) do
{:ok, :fin, :masked, :ping, rest}
else
{:error, :unmasked_frame}
end
end
def parse_frame(<<@fin::bits, _::bits-size(3), @pong::bits, rest::bits>>, _opcode) do
if masked?(rest) do
{:ok, :fin, :masked, :pong, nil}
else
{:error, :unmasked_frame}
end
end
def parse_frame(<<@fin::bits, _::bits-size(3), @continuation::bits, rest::bits>>, opcode) do
case parse(rest) do
{:ok, payload} -> {:ok, :fin, :masked, opcode, payload}
error -> error
end
end
def parse_frame(<<@not_fin::bits, _::bits-size(3), @continuation::bits, rest::bits>>, opcode) do
case parse(rest) do
{:ok, payload} -> {:ok, :not_fin, :masked, opcode, payload}
error -> error
end
end
def parse_frame(<<@not_fin::bits, _::bits-size(3), @text::bits, rest::bits>>, _opcode) do
case parse(rest) do
{:ok, payload} -> {:ok, :not_fin, :masked, :text, payload}
error -> error
end
end
def parse_frame(<<@not_fin::bits, _::bits-size(3), @binary::bits, _rest::bits>>, _opcode) do
# credo:disable-for-next-line
# TODO: Parse Not Final Binary Frame
{:error, :not_implemented_yet}
end
def parse_frame(_frame, _opcode), do: {:error, :no_fin_and_opcode_match}
@spec parse(bitstring) :: {:ok, binary} | parse_error
defp parse(<<@masked::bits, rest::bits>> = _frame) do
{payload_length, masked_payload} = parse_payload_length(rest)
<<masking_key::bits-size(32), payload::bits>> = masked_payload
{:ok, decode_payload(masking_key, payload, payload_length)}
end
defp parse(<<@unmasked::bits, _rest::bits>> = _frame), do: {:error, :unmasked_frame}
@spec parse_payload_length(bitstring) :: {non_neg_integer, bitstring}
defp parse_payload_length(<<first_len::unsigned-integer-7, rest::bits>> = _frame) do
case first_len do
126 ->
<<actual_len::unsigned-integer-16, masked_payload::bits>> = rest
{actual_len, masked_payload}
127 ->
<<actual_len::unsigned-integer-64, masked_payload::bits>> = rest
{actual_len, masked_payload}
_ ->
{first_len, rest}
end
end
defp decode_payload(_masking_key, _payload, 0), do: <<>>
defp decode_payload(masking_key, payload, payload_length) do
Enum.reduce(0..(payload_length - 1), "", fn i, decoded ->
<<mask>> = binary_part(masking_key, rem(i, 4), 1)
<<encoded>> = binary_part(payload, i, 1)
decoded <> <<bxor(encoded, mask)>>
end)
end
@spec masked?(binary) :: boolean
defp masked?(<<@masked::bits, _rest::bits>> = _frame), do: true
defp masked?(<<@unmasked::bits, _rest::bits>> = _frame), do: false
@spec generate_frame(binary, atom) :: binary
def generate_frame(payload, :text) do
<<@fin::bits, 0::size(3), @text::bits, @unmasked::bits, byte_size(payload)::size(7),
payload::binary>>
end
def generate_frame(payload, :pong) do
<<@fin::bits, 0::size(3), @pong::bits, payload::binary>>
end
def generate_frame(payload, :close) do
<<@fin::bits, 0::size(3), @close::bits, @unmasked::bits, byte_size(payload)::size(7),
payload::binary>>
end
end
|
lib/starship/reactor/websocket/frame.ex
| 0.601125
| 0.408395
|
frame.ex
|
starcoder
|
defmodule PushInfluxDB do
@moduledoc """
Genserver Module to push channels information to InfluxDB
"""
use GenServer
require Logger
alias FSRealtime.InConnection
def start_link(_) do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
def init(args) do
{:ok, args}
end
@doc """
Channels dispatcher, get channels info and dispatch sub functions
## Examples
iex> PushInfluxDB.push_aggr_channel(
[[count: 3, campaign_id: 1, leg_type: 1],
[count: 3, campaign_id: 2, leg_type: 1]])
:ok
"""
def push_aggr_channel(chan_result) when is_list(chan_result) and length(chan_result) > 0 do
Logger.debug("pushing series...")
write_points(chan_result)
# Write total to influxDB for both Legs
write_total(chan_result, 1)
write_total(chan_result, 2)
{:ok, :pushed}
end
def push_aggr_channel(_) do
{:ok, nil}
end
@doc """
Build Map Series for InfluxDB and write to InfluxDb
## Examples
iex> PushInfluxDB.write_points(
[[count: 3, campaign_id: 1, leg_type: 1],
[count: 3, campaign_id: 2, leg_type: 1]])
:ok
"""
def write_points(chan_result) do
series = Enum.map(chan_result, fn x -> parse_channels(x) end)
case series |> InConnection.write(async: true, precision: :second) do
:ok ->
cnt = Enum.count(series)
Logger.info("#{cnt} points")
{:error, reason} ->
Logger.error("error writing points - #{reason}")
end
end
@doc """
Build Series for InfluxDB
Returns `%FSChannelsCampaignSeries`.
## Examples
iex> PushInfluxDB.parse_channels([count: 3, campaign_id: 1, leg_type: 1])
%FSChannelsCampaignSeries{fields: %FSChannelsCampaignSeries.Fields{value: 2},
tags: %FSChannelsCampaignSeries.Tags{campaign_id: 3, host: "127.0.0.1"},
timestamp: nil}
"""
def parse_channels(data) do
serie = %FSChannelsCampaignSeries{}
serie = %{
serie
| tags: %{
serie.tags
| campaign_id: data[:campaign_id],
leg_type: data[:leg_type],
host: Application.fetch_env!(:fs_realtime, :local_host)
}
}
serie = %{serie | fields: %{serie.fields | value: data[:count]}}
serie
end
@doc """
Write Total InfluxDB
`leg_type` define which leg to write (1: aleg, 2: bleg)
## Examples
iex> PushInfluxDB.write_total([
[count: 3, campaign_id: 1, leg_type: 1],
[count: 2, campaign_id: 1, leg_type: 2],
[count: 3, campaign_id: 2, leg_type: 1],
[count: 2, campaign_id: 3, leg_type: 1]
], 1)
:ok
"""
def write_total(chan_result, leg_type \\ 1) do
leg? = &(&1[:leg_type] == leg_type)
total_leg =
chan_result
|> Enum.filter(leg?)
|> Enum.reduce(0, fn x, acc -> x[:count] + acc end)
serie = %FSChannelsSeries{}
serie = %{
serie
| tags: %{
serie.tags
| leg_type: leg_type,
host: Application.fetch_env!(:fs_realtime, :local_host)
}
}
serie = %{serie | fields: %{serie.fields | value: total_leg}}
case serie |> InConnection.write(async: true, precision: :second) do
:ok ->
Logger.debug(fn ->
"total: #{total_leg} on leg: #{leg_type}"
end)
_ ->
Logger.error("error writing total")
end
end
@doc """
Async Push channels
"""
def async_push_aggr_channel(result) do
GenServer.cast(__MODULE__, {:push_aggr_channel, result})
end
def handle_cast({:push_aggr_channel, result}, state) do
{:ok, _} = push_aggr_channel(result)
{:noreply, state}
end
end
|
lib/push_influxdb.ex
| 0.835953
| 0.400544
|
push_influxdb.ex
|
starcoder
|
defmodule Anise.SubscriptionCase do
@moduledoc """
Conveniences for testing Absinthe subscriptions
# Usage
## Setup
You need to use `use` macro, and pass required options.
```elixir
@endpoint MyAppWeb.Endpoint
use Anise.SubscriptionCase,
schema: MyAppWeb.Schema,
socket: MyAppWeb.UserSocket
```
Example above is from application specific SubscriptionCase, like ones that Phoenix generates.
## Usage
```elixir
test "sub with fufilment", %{socket: socket, conn: conn} do
assert %{
payload: %{subscriptionId: sub_id},
status: :ok
} = subscribe(socket, @subscription)
graphql(conn, "/api", @mutation, %{email: "<EMAIL>", name: "Boris"})
assert subscription_fulfilment = %{
result: %{data: %{"userAdded" => %{"name" => "Boris"}}}
}
end
```
"""
use ExUnit.CaseTemplate
alias Absinthe.Phoenix.SubscriptionTest
Application.ensure_all_started(:ex_unit)
using(opts) do
quote do
import Plug.Conn
import Phoenix.ConnTest
import Phoenix.ChannelTest
# validates schema and import base absinthe testings functions
use Absinthe.Phoenix.SubscriptionTest,
schema: Keyword.get(unquote(opts), :schema)
setup do
# connects to socket
{:ok, socket} =
Phoenix.ChannelTest.connect(
Keyword.get(unquote(opts), :socket),
Keyword.get(unquote(opts), :socket_params) || %{}
)
# Join socket
{:ok, socket} = Absinthe.Phoenix.SubscriptionTest.join_absinthe(socket)
{:ok, socket: socket, conn: Phoenix.ConnTest.build_conn()}
end
import unquote(__MODULE__)
end
end
import Phoenix.ChannelTest
@doc false
def subscribe(socket, subscription_query) do
ref = SubscriptionTest.push_doc(socket, subscription_query)
assert_reply(ref, :ok, _)
end
@doc """
When subscription fufiled it receives data from it.
## WARNING
Paylod are patterns, dont forget to pin variables!
### Exapmples
Good
```elixir
expected = %{result: %{data: %{"userAdded" => %{"name" => "Boris"}}}}
assert_subscription_fulfilment fufilment
assert fufilment = expected
```
Bad
```elixir
expected = %{result: %{data: %{"userAdded" => %{"name" => "Boris"}}}}
assert_subscription_fulfilment expected
```
"""
defmacro assert_subscription_fulfilment(expected) do
quote do
assert_push("subscription:data", unquote(expected))
end
end
@doc """
Same as assert_subscription_fulfilment but refute
"""
defmacro refute_subscription_fulfilment(expected) do
quote do
refute_push("subscription:data", unquote(expected))
end
end
end
|
lib/subscription_case.ex
| 0.831588
| 0.536434
|
subscription_case.ex
|
starcoder
|
defmodule Makeup.Styles.HTML.StyleMap do
@moduledoc """
This module contains all styles, and facilities to map style names (binaries or atoms) to styles.
Style names are of the form `<name>_style`.
The supported style names are: `:abap`, `:algol`, `:algol_nu`.
You the style name `:abap`, for example, refers to the `abap_style`.
"""
alias Makeup.Styles.HTML
# %% Start Pygments %%
@doc """
The *abap* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#abap).
"""
def abap_style, do: HTML.AbapStyle.style()
@doc """
The *algol* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#algol).
"""
def algol_style, do: HTML.AlgolStyle.style()
@doc """
The *algol_nu* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#algol_nu).
"""
def algol_nu_style, do: HTML.Algol_NuStyle.style()
@doc """
The *arduino* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#arduino).
"""
def arduino_style, do: HTML.ArduinoStyle.style()
@doc """
The *autumn* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#autumn).
"""
def autumn_style, do: HTML.AutumnStyle.style()
@doc """
The *borland* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#borland).
"""
def borland_style, do: HTML.BorlandStyle.style()
@doc """
The *bw* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#bw).
"""
def bw_style, do: HTML.BlackWhiteStyle.style()
@doc """
The *colorful* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#colorful).
"""
def colorful_style, do: HTML.ColorfulStyle.style()
@doc """
The *default* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#default).
"""
def default_style, do: HTML.DefaultStyle.style()
@doc """
The *emacs* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#emacs).
"""
def emacs_style, do: HTML.EmacsStyle.style()
@doc """
The *friendly* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#friendly).
"""
def friendly_style, do: HTML.FriendlyStyle.style()
@doc """
The *fruity* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#fruity).
"""
def fruity_style, do: HTML.FruityStyle.style()
@doc """
The *igor* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#igor).
"""
def igor_style, do: HTML.IgorStyle.style()
@doc """
The *lovelace* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#lovelace).
"""
def lovelace_style, do: HTML.LovelaceStyle.style()
@doc """
The *manni* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#manni).
"""
def manni_style, do: HTML.ManniStyle.style()
@doc """
The *monokai* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#monokai).
"""
def monokai_style, do: HTML.MonokaiStyle.style()
@doc """
The *murphy* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#murphy).
"""
def murphy_style, do: HTML.MurphyStyle.style()
@doc """
The *native* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#native).
"""
def native_style, do: HTML.NativeStyle.style()
@doc """
The *paraiso_dark* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#paraiso_dark).
"""
def paraiso_dark_style, do: HTML.ParaisoDarkStyle.style()
@doc """
The *paraiso_light* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#paraiso_light).
"""
def paraiso_light_style, do: HTML.ParaisoLightStyle.style()
@doc """
The *pastie* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#pastie).
"""
def pastie_style, do: HTML.PastieStyle.style()
@doc """
The *perldoc* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#perldoc).
"""
def perldoc_style, do: HTML.PerldocStyle.style()
@doc """
The *rainbow_dash* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#rainbow_dash).
"""
def rainbow_dash_style, do: HTML.RainbowDashStyle.style()
@doc """
The *rrt* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#rrt).
"""
def rrt_style, do: HTML.RrtStyle.style()
@doc """
The *tango* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#tango).
"""
def tango_style, do: HTML.TangoStyle.style()
@doc """
The *trac* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#trac).
"""
def trac_style, do: HTML.TracStyle.style()
@doc """
The *vim* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#vim).
"""
def vim_style, do: HTML.VimStyle.style()
@doc """
The *vs* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#vs).
"""
def vs_style, do: HTML.VisualStudioStyle.style()
@doc """
The *xcode* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#xcode).
"""
def xcode_style, do: HTML.XcodeStyle.style()
# All styles
@pygments_style_map_binaries %{
"abap" => HTML.AbapStyle.style(),
"algol" => HTML.AlgolStyle.style(),
"algol_nu" => HTML.Algol_NuStyle.style(),
"arduino" => HTML.ArduinoStyle.style(),
"autumn" => HTML.AutumnStyle.style(),
"borland" => HTML.BorlandStyle.style(),
"bw" => HTML.BlackWhiteStyle.style(),
"colorful" => HTML.ColorfulStyle.style(),
"default" => HTML.DefaultStyle.style(),
"emacs" => HTML.EmacsStyle.style(),
"friendly" => HTML.FriendlyStyle.style(),
"fruity" => HTML.FruityStyle.style(),
"igor" => HTML.IgorStyle.style(),
"lovelace" => HTML.LovelaceStyle.style(),
"manni" => HTML.ManniStyle.style(),
"monokai" => HTML.MonokaiStyle.style(),
"murphy" => HTML.MurphyStyle.style(),
"native" => HTML.NativeStyle.style(),
"paraiso_dark" => HTML.ParaisoDarkStyle.style(),
"paraiso_light" => HTML.ParaisoLightStyle.style(),
"pastie" => HTML.PastieStyle.style(),
"perldoc" => HTML.PerldocStyle.style(),
"rainbow_dash" => HTML.RainbowDashStyle.style(),
"rrt" => HTML.RrtStyle.style(),
"tango" => HTML.TangoStyle.style(),
"trac" => HTML.TracStyle.style(),
"vim" => HTML.VimStyle.style(),
"vs" => HTML.VisualStudioStyle.style(),
"xcode" => HTML.XcodeStyle.style(),
}
@pygments_style_map_atoms %{
abap: HTML.AbapStyle.style(),
algol: HTML.AlgolStyle.style(),
algol_nu: HTML.Algol_NuStyle.style(),
arduino: HTML.ArduinoStyle.style(),
autumn: HTML.AutumnStyle.style(),
borland: HTML.BorlandStyle.style(),
bw: HTML.BlackWhiteStyle.style(),
colorful: HTML.ColorfulStyle.style(),
default: HTML.DefaultStyle.style(),
emacs: HTML.EmacsStyle.style(),
friendly: HTML.FriendlyStyle.style(),
fruity: HTML.FruityStyle.style(),
igor: HTML.IgorStyle.style(),
lovelace: HTML.LovelaceStyle.style(),
manni: HTML.ManniStyle.style(),
monokai: HTML.MonokaiStyle.style(),
murphy: HTML.MurphyStyle.style(),
native: HTML.NativeStyle.style(),
paraiso_dark: HTML.ParaisoDarkStyle.style(),
paraiso_light: HTML.ParaisoLightStyle.style(),
pastie: HTML.PastieStyle.style(),
perldoc: HTML.PerldocStyle.style(),
rainbow_dash: HTML.RainbowDashStyle.style(),
rrt: HTML.RrtStyle.style(),
tango: HTML.TangoStyle.style(),
trac: HTML.TracStyle.style(),
vim: HTML.VimStyle.style(),
vs: HTML.VisualStudioStyle.style(),
xcode: HTML.XcodeStyle.style(),
}
# %% End Pygments %%
# Custom themes:
@doc """
The *samba* style, based on the tango style, but with visual distinction between
classes and variables, and lighter punctuation.
"""
def samba_style, do: HTML.SambaStyle.style()
@doc """
The style with the `name`, given as an atom. Returns `{:ok, style}` or `:error`
"""
def fetch_from_atom(atom), do: Map.fetch(@pygments_style_map_atoms, atom)
@doc """
The style with the `name`, given as a binary. Returns `{:ok, style}` or `:error`
"""
def fetch_from_string(name), do: Map.fetch(@pygments_style_map_binaries, name)
@doc """
The style with the `name`, given as an atom. Raises if the style doesn't exist.
"""
def fetch_from_atom!(atom), do: Map.fetch!(@pygments_style_map_atoms, atom)
@doc """
The style with the `name`, given as a binary. Raises if the style doesn't exist.
"""
def fetch_from_string!(name), do: Map.fetch!(@pygments_style_map_binaries, name)
@doc """
Returns all atoms that are style names.
"""
def all_style_keys_as_atoms do
Map.keys(@pygments_style_map_atoms)
end
@doc """
Returns all binaries that are style names.
"""
def all_style_keys_as_binaries do
Map.keys(@pygments_style_map_binaries)
end
@doc """
The complete style map, with atoms as keys.
For the complete list, see above.
"""
def style_map_with_atom_keys do
@pygments_style_map_atoms
end
@doc """
The complete style map, with strings as keys.
For the complete list, see above.
"""
def style_map_with_binary_keys do
@pygments_style_map_binaries
end
end
|
lib/makeup/styles/html/style_map.ex
| 0.783285
| 0.611904
|
style_map.ex
|
starcoder
|
defmodule Re.Addresses.Neighborhoods do
@moduledoc """
Context for neighborhoods.
"""
import Ecto.Query
alias Re.{
Address,
Addresses.District,
Listing,
Repo,
Slugs
}
@all_query from(
a in Address,
join: l in Listing,
where: l.address_id == a.id and l.status == "active",
select: a.neighborhood,
distinct: a.neighborhood
)
@covered_for_show ["partially_covered", "covered"]
def all, do: Repo.all(@all_query)
def get_description(address) do
case Repo.get_by(District,
state_slug: address.state_slug,
city_slug: address.city_slug,
name_slug: address.neighborhood_slug
) do
nil -> {:error, :not_found}
description -> {:ok, description}
end
end
def districts,
do:
Repo.all(from(d in District, where: d.status in @covered_for_show, order_by: d.sort_order))
def districts_by_uuids(uuids \\ []) do
Repo.all(from(d in District, where: d.uuid in ^uuids))
end
def get_district(params) do
case Repo.get_by(District, params) do
nil -> {:error, :not_found}
district -> {:ok, district}
end
end
@doc """
Temporary mapping to find nearby neighborhood
"""
def nearby("Botafogo"), do: "Humaitá"
def nearby("Copacabana"), do: "Ipanema"
def nearby("Flamengo"), do: "Laranjeiras"
def nearby("Gávea"), do: "Leblon"
def nearby("Humaitá"), do: "Botafogo"
def nearby("Ipanema"), do: "Copacabana"
def nearby("Itanhangá"), do: "São Conrado"
def nearby("Jardim Botânico"), do: "Lagoa"
def nearby("Lagoa"), do: "Humaitá"
def nearby("Laranjeiras"), do: "Flamengo"
def nearby("Leblon"), do: "Gávea"
def nearby("São Conrado"), do: "Itanhangá"
defp do_is_covered(neighborhood) do
case Repo.get_by(District,
name_slug: neighborhood.neighborhood_slug,
city_slug: neighborhood.city_slug,
state_slug: neighborhood.state_slug,
status: "covered"
) do
nil -> false
_district -> true
end
end
def is_covered(%Address{} = address), do: do_is_covered(address)
def is_covered(neighborhood) do
neighborhood
|> sluggify_attributes()
|> do_is_covered()
end
defp sluggify_attributes(neighborhood) do
neighborhood
|> Map.put(:city_slug, Slugs.sluggify(neighborhood.city))
|> Map.put(:neighborhood_slug, Slugs.sluggify(neighborhood.neighborhood))
|> Map.put(:state_slug, Slugs.sluggify(neighborhood.state))
end
end
|
apps/re/lib/addresses/neighborhoods.ex
| 0.704058
| 0.467757
|
neighborhoods.ex
|
starcoder
|
defmodule AdventOfCode.Day5 do
@encoding_regex ~r/^([FB]{7})([LR]{3})$/
@ranges %{
:rows => %{
"F" => :lower,
"B" => :upper
},
:columns => %{
"L" => :lower,
"R" => :upper
}
}
@spec compute_range([binary], :columns | :rows) :: integer
def compute_range(input, :rows) do
%{:rows => encodings} = @ranges
compute_range(input, {0, 127}, encodings)
end
def compute_range(input, :columns) do
%{:columns => encodings} = @ranges
compute_range(input, {0, 7}, encodings)
end
defp compute_range([], {_, pos}, _) do
pos
end
defp compute_range([encoded_pos | rest], {min, max}, encodings) do
%{^encoded_pos => movement} = encodings
new_pos =
case movement do
:upper -> {Integer.floor_div(min + max, 2), max}
:lower -> {min, Integer.floor_div(min + max, 2)}
end
compute_range(rest, new_pos, encodings)
end
@spec find_missing_seat([{integer, integer, integer}], [{integer, integer, integer}]) :: [
{integer, integer, integer}
]
def find_missing_seat([], acc) do
acc
end
def find_missing_seat([{_, _, _}], acc) do
acc
end
def find_missing_seat([{row, prev_col, _} | [{row, next_col, _} | _] = cont], acc) do
empty_col = prev_col + 1
case empty_col == next_col do
true ->
find_missing_seat(cont, acc)
false ->
find_missing_seat(cont, [{row, empty_col, row * 8 + empty_col} | acc])
end
end
def find_missing_seat([{_, _, _} | [{_, _, _} | _] = cont], acc) do
find_missing_seat(cont, acc)
end
def day5() do
rows_columns_ids =
"day5_input"
|> AdventOfCode.read_file()
|> Enum.map(fn encoding ->
case Regex.run(@encoding_regex, encoding) do
nil ->
:invalid
[_, row_encoding, column_encoding] ->
column =
column_encoding
|> String.graphemes()
|> compute_range(:columns)
row =
row_encoding
|> String.graphemes()
|> compute_range(:rows)
{row, column, row * 8 + column}
end
end)
max_id =
Enum.reduce(rows_columns_ids, 0, fn {_, _, act_id}, max_id ->
max(max_id, act_id)
end)
missing_seat =
rows_columns_ids
|> Enum.sort()
|> find_missing_seat([])
{rows_columns_ids, max_id, missing_seat}
end
end
|
lib/day5.ex
| 0.737725
| 0.407805
|
day5.ex
|
starcoder
|
defmodule Weirding.Chain do
@moduledoc false
# This module provides a basic markov chain for generating text. It also defines
# a GenServer. The GenServer is designed to load the pre-built chain when the
# weirding application boots, convert the chain from binary back into a term,
# and store it in persistent_term.
use GenServer
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
# Build is called by a mix task in order to build the latest corpus into a chain
# which is then written into a file using term_to_binary.
@doc false
def build(corpus) do
corpus
|> cleanup
|> create_wordlist(%{})
end
defp cleanup(str) do
str
|> String.split()
|> Enum.map(& String.replace(&1, "\"", ""))
|> Enum.map(& String.replace_leading(&1, "-", ""))
end
defp create_wordlist([], wordlist), do: wordlist
defp create_wordlist([_], wordlist), do: wordlist
defp create_wordlist([word, next_word | rest], wordlist) do
new_wordlist = update_in(wordlist, [word], & [next_word | &1 || []])
create_wordlist([next_word | rest], new_wordlist)
end
def generate(n) when n > 0 do
chain = chain()
{word, _followers} = Enum.random(chain)
acc = [String.capitalize(word)]
generate(chain, word, acc, n-1)
end
defp generate(_, _, acc, 0) do
acc
|> Enum.reverse
|> Enum.join(" ")
end
defp generate(chain, word, acc, n) do
case Enum.random(chain[word]) do
nil ->
# If there are no followers than we should just grab a new random word
# and keep going.
{next, _} = Enum.random(chain)
generate(chain, next, [next | acc], n)
next ->
generate(chain, next, [next | acc], n-1)
end
end
def init(_opts) do
file = Path.absname("priv/chain.txt", Application.app_dir(:weirding))
term = :erlang.binary_to_term(File.read!(file))
:persistent_term.put({Weirding, :chain}, term)
{:ok, %{}}
end
defp chain, do: :persistent_term.get({Weirding, :chain})
end
|
lib/weirding/chain.ex
| 0.675122
| 0.423935
|
chain.ex
|
starcoder
|
defmodule Irateburgers.TopBurgers do
@moduledoc """
Read-model for querying the top rated burgers of all time.
"""
defmodule Record do
@moduledoc """
This struct represents a single row in the Top-Burgers table:
1. "Resolution Breaker", 4.5, 8 Reviews
2. "Brooklyn Cheeseburger", 4.3, 5 Reviews
...
"""
use Ecto.Schema
alias Ecto.Changeset
alias Irateburgers.ErrorHelpers
@primary_key false
embedded_schema do
field :burger_id, :binary_id
field :name, :string
field :version, :integer
field :average_rating, :float
field :num_reviews, :integer
end
@type t :: %__MODULE__{}
@spec new(Keyword.t | map) :: {:ok, Record.t} | {:error, term}
def new(params) do
case changeset(%__MODULE__{}, Map.new(params)) do
cs = %{valid?: true} -> {:ok, Changeset.apply_changes(cs)}
cs -> {:error, ErrorHelpers.errors_on(cs)}
end
end
@spec changeset(Record.t, map) :: Changeset.t
def changeset(struct, params) do
struct
|> Changeset.cast(params, __schema__(:fields))
|> Changeset.validate_required(__schema__(:fields))
end
end
defmodule Model do
@moduledoc """
This structure defines the model of the all-time top burgers.
It maintains a map of Records by burger-id, which will be incrementally
updated as Reviews are submitted, and a list of records sorted by average
review.
The position in the global event log is maintained to ensure old/duplicated
events are not applied.
"""
use Ecto.Schema
alias Irateburgers.{BurgerCreated, BurgerReviewed}
alias Irateburgers.TopBurgers.{Record, Model}
@primary_key false
embedded_schema do
field :last_event_id, :integer, default: 0
field :by_id, {:map, Record}, default: %{}
embeds_many :by_rating, Record
end
@type t :: %__MODULE__{}
# Ignore events that were already procesed when the model was initialized
@spec apply_event(Model.t, map) :: Model.t
def apply_event(
model = %Model{last_event_id: n},
_event = %{id: m})
when
is_integer(m) and
is_integer(n) and
m <= n
do
model
end
# Add a new new burger record to the model, initially has no reviews
def apply_event(
model = %Model{},
%BurgerCreated{burger_id: id, name: name, version: version})
do
{:ok, record} = Record.new(
burger_id: id,
name: name,
version: version,
average_rating: 0,
num_reviews: 0)
%{model | by_id: Map.put(model.by_id, id, record), by_rating: nil}
end
# Update the average rating for a burger after being reviewed
def apply_event(
model = %Model{},
event = %BurgerReviewed{burger_id: burger_id})
do
new_burger =
model
|> find_burger(burger_id)
|> update_average_rating(event.rating)
new_by_id = Map.put(model.by_id, burger_id, new_burger)
%{model | by_id: new_by_id, by_rating: nil}
end
@doc """
Gets a burger record by ID from the model
"""
@spec find_burger(Model.t, binary) :: Record.t | nil
def find_burger(model = %Model{}, burger_id) when is_binary(burger_id) do
model.by_id[burger_id]
end
# Update the given burger record to include a new review with given rating
@spec update_average_rating(Record.t, integer) :: Record.t
defp update_average_rating(
burger = %Record{average_rating: avg, num_reviews: n},
rating)
when is_integer(rating) do
%{burger |
num_reviews: n + 1,
average_rating: incremental_average(avg, n, rating)
}
end
@spec incremental_average(number, integer, integer) :: float
defp incremental_average(avg, count, value) do
avg * (count / (count + 1)) + (value / (count + 1))
end
@doc """
Sorts the burger records by rating if necessary and takes the top `count`
returns {records, new_model} so the sorting can be cached
"""
@spec top_burgers(Model.t, integer) :: {[Record.t], Model.t}
def top_burgers(
model = %Model{by_rating: nil},
count)
when is_integer(count) do
new_model = sort_by_rating(model)
top_burgers(new_model, count)
end
def top_burgers(model = %Model{by_rating: records}, count)
when
is_list(records) and
is_integer(count)
do
{Enum.take(records, count), model}
end
# Sorts the burger records by average_rating descending,
# updates `model.by_rating` with the result
@spec sort_by_rating(Model.t) :: Model.t
defp sort_by_rating(model = %Model{by_id: burgers = %{}}) do
sorted =
burgers
|> Map.values()
|> Enum.sort_by(&Map.get(&1, :average_rating), &>=/2)
%{model | by_rating: sorted}
end
end
defmodule Server do
@moduledoc """
This process maintains the state of the TopBurgers, and listens for events.
On initialization, it reads all relevant past events from the event log.
"""
alias Irateburgers.{
BurgerCreated,
BurgerReviewed,
EventListenerRegistry,
Repo
}
alias Irateburgers.TopBurgers.Model
@spec start_link() :: {:ok, pid} | {:error, term}
def start_link do
Agent.start_link(&init/0, name: __MODULE__)
end
# Register this processs in the `EventListenerRegistry` for new events,
# and stream in the history of past events.
@spec init() :: Model.t
def init do
{:ok, _pid} = Registry.register(
EventListenerRegistry, BurgerCreated, &Model.apply_event/2)
{:ok, _pid} = Registry.register(
EventListenerRegistry, BurgerReviewed, &Model.apply_event/2)
{:ok, model} = Repo.transaction(fn ->
events = Repo.stream_events(
types: [BurgerCreated, BurgerReviewed], position: 0)
Enum.reduce(events, %Model{}, fn x, acc ->
Model.apply_event(acc, x)
end)
end)
model
end
@doc """
Get the top `count` burgers by average rating
"""
@spec top_burgers(integer) :: [Record.t]
def top_burgers(count) when is_integer(count) do
Agent.get_and_update(__MODULE__, &Model.top_burgers(&1, count))
end
end
end
|
lib/burger/read_models/top_burgers.ex
| 0.840488
| 0.416322
|
top_burgers.ex
|
starcoder
|
defmodule Cldr.Time do
@moduledoc """
Provides an API for the localization and formatting of a `Time`
struct or any map with the keys `:hour`, `:minute`,
`:second` and optionlly `:microsecond`.
CLDR provides standard format strings for `Time` which
are reresented by the names `:short`, `:medium`, `:long`
and `:full`. This allows for locale-independent
formatting since each locale may define the underlying
format string as appropriate.
"""
alias Cldr.DateTime.{Format, Formatter}
alias Cldr.LanguageTag
@format_types [:short, :medium, :long, :full]
defmodule Formats do
defstruct Module.get_attribute(Cldr.Time, :format_types)
end
@doc """
Formats a time according to a format string
as defined in CLDR and described in [TR35](http://unicode.org/reports/tr35/tr35-dates.html)
## Returns
* `{:ok, formatted_time}` or
* `{:error, reason}`.
## Arguments
* `time` is a `%DateTime{}` or `%NaiveDateTime{}` struct or any map that contains the keys
`hour`, `minute`, `second` and optionally `calendar` and `microsecond`
* `options` is a keyword list of options for formatting.
## Options
* `format:` `:short` | `:medium` | `:long` | `:full` or a format string.
The default is `:medium`
* `locale:` any locale returned by `Cldr.known_locale_names()`. The default is `
Cldr.get_current_locale()`
* `number_system:` a number system into which the formatted date digits should
be transliterated
* `era: :variant` will use a variant for the era is one is available in the locale.
In the "en" locale, for example, `era: :variant` will return "BCE" instead of "BC".
* `period: :variant` will use a variant for the time period and flexible time period if
one is available in the locale. For example, in the "en" locale `period: :variant` will
return "pm" instead of "PM"
## Examples
iex> Cldr.Time.to_string ~T[07:35:13.215217]
{:ok, "7:35:13 am"}
iex> Cldr.Time.to_string ~T[07:35:13.215217], format: :short
{:ok, "7:35 am"}
iex> Cldr.Time.to_string ~T[07:35:13.215217], format: :medium, locale: "fr"
{:ok, "07:35:13"}
iex> Cldr.Time.to_string ~T[07:35:13.215217], format: :medium
{:ok, "7:35:13 am"}
iex> {:ok, datetime} = DateTime.from_naive(~N[2000-01-01 23:59:59.0], "Etc/UTC")
iex> Cldr.Time.to_string datetime, format: :long
{:ok, "11:59:59 pm UTC"}
"""
def to_string(time, options \\ [])
def to_string(%{hour: _hour, minute: _minute} = time, options) do
options = Keyword.merge(default_options(), options)
calendar = Map.get(time, :calendar) || Calendar.ISO
with {:ok, locale} <- Cldr.validate_locale(options[:locale]),
{:ok, cldr_calendar} <- Formatter.type_from_calendar(calendar),
{:ok, format_string} <-
format_string_from_format(options[:format], locale, cldr_calendar),
{:ok, formatted} <- Formatter.format(time, format_string, locale, options) do
{:ok, formatted}
else
{:error, reason} -> {:error, reason}
end
end
def to_string(time, _options) do
error_return(time, [:hour, :minute, :second])
end
defp default_options do
[format: :medium, locale: Cldr.get_current_locale()]
end
@doc """
Formats a time according to a format string
as defined in CLDR and described in [TR35](http://unicode.org/reports/tr35/tr35-dates.html).
## Arguments
* `time` is a `%DateTime{}` or `%NaiveDateTime{}` struct or any map that contains the keys
`hour`, `minute`, `second` and optionally `calendar` and `microsecond`
* `options` is a keyword list of options for formatting. The valid options are:
* `format:` `:short` | `:medium` | `:long` | `:full` or a format string.
The default is `:medium`
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct. The default is `Cldr.get_current_locale/0`
* `number_system:` a number system into which the formatted date digits should
be transliterated
* `era: :variant` will use a variant for the era is one is available in the locale.
In the "en" locale, for example, `era: :variant` will return "BCE" instead of "BC".
* `period: :variant` will use a variant for the time period and flexible time period if
one is available in the locale. For example, in the "en" locale `period: :variant` will
return "pm" instead of "PM"
## Returns
* `formatted time string` or
* raises an exception.
## Examples
iex> Cldr.Time.to_string! ~T[07:35:13.215217]
"7:35:13 am"
iex> Cldr.Time.to_string! ~T[07:35:13.215217], format: :short
"7:35 am"
iex> Cldr.Time.to_string ~T[07:35:13.215217], format: :short, period: :variant
{:ok, "7:35 am"}
iex> Cldr.Time.to_string! ~T[07:35:13.215217], format: :medium, locale: "fr"
"07:35:13"
iex> Cldr.Time.to_string! ~T[07:35:13.215217], format: :medium
"7:35:13 am"
iex> {:ok, datetime} = DateTime.from_naive(~N[2000-01-01 23:59:59.0], "Etc/UTC")
iex> Cldr.Time.to_string! datetime, format: :long
"11:59:59 pm UTC"
"""
def to_string!(time, options \\ [])
def to_string!(time, options) do
case to_string(time, options) do
{:ok, string} -> string
{:error, {exception, message}} -> raise exception, message
end
end
defp format_string_from_format(format, %LanguageTag{cldr_locale_name: locale_name}, calendar)
when format in @format_types do
with {:ok, formats} <- Format.time_formats(locale_name, calendar) do
{:ok, Map.get(formats, format)}
end
end
defp format_string_from_format(
%{number_system: number_system, format: format},
locale,
calendar
) do
{:ok, format_string} = format_string_from_format(format, locale, calendar)
{:ok, %{number_system: number_system, format: format_string}}
end
defp format_string_from_format(format, _locale, _calendar) when is_atom(format) do
{:error,
{Cldr.InvalidTimeFormatType,
"Invalid time format type. " <> "The valid types are #{inspect(@format_types)}."}}
end
defp format_string_from_format(format_string, _locale, _calendar)
when is_binary(format_string) do
{:ok, format_string}
end
defp error_return(map, requirements) do
{:error,
{ArgumentError,
"Invalid time. Time is a map that requires at least #{inspect(requirements)} fields. " <>
"Found: #{inspect(map)}"}}
end
end
|
lib/cldr/datetime/time.ex
| 0.943217
| 0.767842
|
time.ex
|
starcoder
|
defmodule ExPolars.Plot do
@moduledoc """
Plotting tools
"""
alias ExPolars.DataFrame, as: DF
alias Deneb.{Chart, Encoding, Mark, Plot}
def plot_by_type(df, type, opts \\ []) do
{:ok, csv} = DF.to_csv(df)
apply(Plot, type, [csv, opts])
end
def plot_single(df, mark, x, y, opts \\ []) do
default = [width: 800, height: 600]
{color, opts} = get_color(df, opts)
opts = Keyword.merge(default, opts)
xtype = DF.dtype(df, x, :vega)
ytype = DF.dtype(df, y, :vega)
{:ok, csv} = DF.to_csv(df)
mark
|> Mark.new(true)
|> Chart.new(
Encoding.new(%{
x: %{field: x, type: xtype},
y: %{field: y, type: ytype},
color: color
})
)
|> Chart.to_json(opts)
|> Deneb.to_json(csv)
end
def plot_repeat(df, mark, rows, columns, opts \\ [])
def plot_repeat(df, mark, rows, columns, opts) when is_list(rows) and is_list(columns) do
{color, opts} = get_color(df, opts)
{xtype, opts} = Keyword.pop(opts, :xtype, "quantitative")
{ytype, opts} = Keyword.pop(opts, :ytype, "quantitative")
default = []
opts = Keyword.merge(default, opts)
{:ok, csv} = DF.to_csv(df)
mark
|> Mark.new(true)
|> Chart.new(
Encoding.new(%{
x: %{field: %{repeat: "column"}, type: xtype},
y: %{field: %{repeat: "row"}, type: ytype, axis: %{minExtend: 30}},
color: color
})
)
|> Chart.repeat(%{row: rows, column: columns}, opts)
|> Deneb.to_json(csv)
end
def plot_repeat(df, mark, x, ys, opts) when is_binary(x) and is_list(ys) do
{color, opts} = get_color(df, opts)
{columns, opts} = Keyword.pop(opts, :columns, 2)
{ytype, opts} = Keyword.pop(opts, :ytype, "quantitative")
default = []
opts = Keyword.merge(default, opts)
{:ok, csv} = DF.to_csv(df)
mark
|> Mark.new(true)
|> Chart.new(
Encoding.new(%{
x: %{field: x, type: DF.dtype(df, x, :vega)},
y: %{field: %{repeat: "repeat"}, type: ytype},
color: color
})
)
|> Chart.repeat(ys, opts)
|> Deneb.to_json(csv)
|> Map.put("columns", columns)
end
def plot_repeat(df, mark, xs, y, opts) when is_list(xs) and is_binary(y) do
{color, opts} = get_color(df, opts)
{columns, opts} = Keyword.pop(opts, :columns, 2)
{xtype, opts} = Keyword.pop(opts, :xtype, "quantitative")
default = []
opts = Keyword.merge(default, opts)
{:ok, csv} = DF.to_csv(df)
mark
|> Mark.new(true)
|> Chart.new(
Encoding.new(%{
x: %{field: %{repeat: "repeat"}, type: xtype},
y: %{field: y, type: DF.dtype(df, y, :vega)},
color: color
})
)
|> Chart.repeat(xs, opts)
|> Deneb.to_json(csv)
|> Map.put("columns", columns)
end
defp get_color(df, opts) do
{color_field, opts} = Keyword.pop(opts, :color)
case color_field do
nil -> {nil, opts}
v -> {%{field: v, type: DF.dtype(df, v, :vega)}, opts}
end
end
end
|
lib/ex_polars/plot.ex
| 0.756313
| 0.60212
|
plot.ex
|
starcoder
|
defmodule XDR.FixedOpaque do
@moduledoc """
This module manages the `Fixed-Length Opaque Data` type based on the RFC4506 XDR Standard.
"""
@behaviour XDR.Declaration
defstruct [:opaque, :length]
alias XDR.Error.FixedOpaque, as: FixedOpaqueError
@typedoc """
`XDR.FixedOpaque` structure type specification.
"""
@type t :: %XDR.FixedOpaque{opaque: binary | nil, length: integer}
@doc """
Create a new `XDR.FixedOpaque` structure with the `opaque` and `length` passed.
"""
@spec new(opaque :: binary, length :: integer) :: t
def new(opaque, length), do: %XDR.FixedOpaque{opaque: opaque, length: length}
@impl XDR.Declaration
@doc """
Encode a `XDR.FixedOpaque` structure into a XDR format.
"""
@spec encode_xdr(opaque :: t | map()) ::
{:ok, binary} | {:error, :not_binary | :not_number | :invalid_length}
def encode_xdr(%{opaque: opaque}) when not is_binary(opaque), do: {:error, :not_binary}
def encode_xdr(%{length: length}) when not is_integer(length), do: {:error, :not_number}
def encode_xdr(%{opaque: opaque, length: length}) when length != byte_size(opaque),
do: {:error, :invalid_length}
def encode_xdr(%{opaque: opaque, length: length}) when rem(length, 4) === 0, do: {:ok, opaque}
def encode_xdr(%{opaque: opaque, length: length}) when rem(length, 4) != 0 do
(opaque <> <<0>>) |> new(length + 1) |> encode_xdr()
end
@impl XDR.Declaration
@doc """
Encode a `XDR.FixedOpaque` structure into a XDR format.
If the `opaque` is not valid, an exception is raised.
"""
@spec encode_xdr!(opaque :: t | map()) :: binary()
def encode_xdr!(opaque) do
case encode_xdr(opaque) do
{:ok, binary} -> binary
{:error, reason} -> raise(FixedOpaqueError, reason)
end
end
@impl XDR.Declaration
@doc """
Decode the Fixed-Length Opaque Data in XDR format to a `XDR.FixedOpaque` structure.
"""
@spec decode_xdr(bytes :: binary(), opaque :: t | map()) ::
{:ok, {t, binary()}}
| {:error, :not_binary | :not_valid_binary | :not_number | :exceed_length}
def decode_xdr(bytes, _opaque) when not is_binary(bytes), do: {:error, :not_binary}
def decode_xdr(bytes, _opaque) when rem(byte_size(bytes), 4) != 0,
do: {:error, :not_valid_binary}
def decode_xdr(_bytes, %{length: length}) when not is_integer(length), do: {:error, :not_number}
def decode_xdr(bytes, %{length: length}) when length > byte_size(bytes),
do: {:error, :exceed_length}
def decode_xdr(bytes, %{length: length}) do
required_padding = get_required_padding(length)
<<fixed_opaque::bytes-size(length), _padding::bytes-size(required_padding), rest::binary>> =
bytes
decoded_opaque = new(fixed_opaque, length)
{:ok, {decoded_opaque, rest}}
end
@impl XDR.Declaration
@doc """
Decode the Fixed-Length Array in XDR format to a `XDR.FixedOpaque` structure.
If the binaries are not valid, an exception is raised.
"""
@spec decode_xdr!(bytes :: binary(), opaque :: t | map()) :: {t, binary()}
def decode_xdr!(bytes, opaque) do
case decode_xdr(bytes, opaque) do
{:ok, result} -> result
{:error, reason} -> raise(FixedOpaqueError, reason)
end
end
@spec get_required_padding(length :: integer()) :: integer()
defp get_required_padding(length) when rem(length, 4) == 0, do: 0
defp get_required_padding(length), do: 4 - rem(length, 4)
end
|
lib/xdr/fixed_opaque.ex
| 0.919949
| 0.685107
|
fixed_opaque.ex
|
starcoder
|
defmodule Timex.Parsers.DateFormat.Tokenizers.Default do
@moduledoc """
Responsible for tokenizing date/time format strings
which use the Default formatter.
"""
alias Timex.Parsers.DateFormat.ParserState, as: State
alias Timex.Parsers.DateFormat.Directive, as: Directive
# These are all the default formatter's directives
@directives [
# Years
{"YYYY", Directive.get(:year4)},
{"YY", Directive.get(:year2)},
{"C", Directive.get(:century)},
{"WYYYY", Directive.get(:iso_year4)},
{"WYY", Directive.get(:iso_year2)},
# Months
{"M", Directive.get(:month)},
{"Mshort", Directive.get(:mshort)},
{"Mfull", Directive.get(:mfull)},
# Days
{"D", Directive.get(:day)},
{"Dord", Directive.get(:oday)},
# Weeks
{"Wiso", Directive.get(:iso_weeknum)},
{"Wmon", Directive.get(:week_mon)},
{"Wsun", Directive.get(:week_sun)},
{"WDmon", Directive.get(:wday_mon)},
{"WDsun", Directive.get(:wday_sun)},
{"WDshort", Directive.get(:wdshort)},
{"WDfull", Directive.get(:wdfull)},
# Hours
{"h24", Directive.get(:hour24)},
{"h12", Directive.get(:hour12)},
{"m", Directive.get(:min)},
{"s", Directive.get(:sec)},
{"ss", Directive.get(:sec_fractional)},
{"s-epoch", Directive.get(:sec_epoch)},
{"am", Directive.get(:am)},
{"AM", Directive.get(:AM)},
# Timezones
{"Zname", Directive.get(:zname)},
{"Z", Directive.get(:zoffs)},
{"Z:", Directive.get(:zoffs_colon)},
{"Z::", Directive.get(:zoffs_sec)},
# Preformatted Directives
{"ISO", Directive.get(:iso_8601)},
{"ISOz", Directive.get(:iso_8601z)},
{"ISOdate", Directive.get(:iso_date)},
{"ISOtime", Directive.get(:iso_time)},
{"ISOweek", Directive.get(:iso_week)},
{"ISOweek-day", Directive.get(:iso_weekday)},
{"ISOord", Directive.get(:iso_ordinal)},
{"RFC822", Directive.get(:rfc_822)},
{"RFC822z", Directive.get(:rfc_822z)},
{"RFC1123", Directive.get(:rfc_1123)},
{"RFC1123z", Directive.get(:rfc_1123z)},
{"RFC3339", Directive.get(:rfc_3339)},
{"RFC3339z", Directive.get(:rfc_3339z)},
{"ANSIC", Directive.get(:ansic)},
{"UNIX", Directive.get(:unix)},
{"kitchen", Directive.get(:kitchen)}
]
@doc """
Takes a format string and extracts parsing directives for the parser.
## Example
iex> Timex.Parsers.Tokenizers.Default.tokenize("{YYYY}-{0M}-{D}")
[%Directive{token: :year4, ...}, %Directive{token: :month, pad: 1, ...}, ...]
"""
def tokenize(s) when s in [nil, ""], do: {:error, "Format string cannot be nil or empty!"}
def tokenize(s) when is_list(s), do: tokenize("#{s}")
def tokenize(s) do
do_tokenize(s)
end
defp do_tokenize(format),
do: do_tokenize(format, %State{}, :next)
defp do_tokenize(<<>>, %State{tokens: tokens}, :next),
do: tokens |> Enum.reverse
# Invalid strings
defp do_tokenize(<<>>, %State{start_index: start_index}, status)
when status != :next,
do: {:error, "Unclosed directive starting at column #{start_index}"}
defp do_tokenize(<<?{, _format :: binary>>, %State{col: col}, status)
when status != :next,
do: {:error, "Invalid nesting of directives at column #{col}: #{_format}"}
defp do_tokenize(<<?}, _format :: binary>>, %State{col: col}, status)
when status != :token,
do: {:error, "Missing open brace for closing brace at column #{col}!"}
# Start of directive
defp do_tokenize(<<?{, format :: binary>>, state, :next) do
state = %{state | :col => state.col + 1, :start_index => state.col}
do_tokenize(format, state, :padding)
end
# End of directive
defp do_tokenize(<<?}, format :: binary>>, %State{padding: pad, token: token, tokens: tokens} = state, :token) do
case get_directive(token) do
:invalid -> {:error, "Invalid token beginning at column #{state.start_index}!"}
{_, %Directive{} = directive} ->
state = %{state |
:col => state.col + 1,
:padding => 0,
:token => "",
:tokens => [%{directive | :pad => pad || false, :pad_type => state.pad_type, :raw => token} | tokens]
}
do_tokenize(format, state, :next)
end
end
# Determine padding
defp do_tokenize(<<c :: utf8, format :: binary>>, %State{padding: pad} = state, :padding)
when c in [?0, ?_],
do: do_tokenize(format, %{state | :col => state.col + 1, :padding => pad + 1, :pad_type => pad_type(c)}, :padding)
defp do_tokenize(<<c :: utf8, format :: binary>>, %State{token: token} = state, :padding),
do: do_tokenize(format, %{state | :col => state.col + 1, :token => token <> <<c>>}, :token)
# Parse mnemonic
defp do_tokenize(<<c :: utf8, format :: binary>>, %State{token: token} = state, :token) do
state = %{state | :col => state.col + 1, :token => token <> <<c>>}
do_tokenize(format, state, :token)
end
# Handle non-token characters
defp do_tokenize(<<char :: utf8, format :: binary>>, %State{col: col, tokens: tokens} = state, status) do
directive = %Directive{type: :char, token: char, raw: <<char>>}
state = %{state | :col => col + 1, :tokens => [directive | tokens]}
do_tokenize(format, state, status)
end
defp pad_type(?0), do: :zero
defp pad_type(?_), do: :space
defp get_directive(dir) do
List.keyfind(@directives, dir, 0) || :invalid
end
end
|
lib/parsers/dateformat/tokenizers/default.ex
| 0.723602
| 0.479565
|
default.ex
|
starcoder
|
defmodule Legion.Identity.Information.AddressBook.Address do
@moduledoc """
Represents an entry found in address book.
## Schema fields
- `:user_id`: The identifier of the user that address belongs to.
- `:type`: Type of the address, e.g. "home", "work".
- `:name`: Name of the address, e.g. "Home at Rotterdam".
- `:description`: Description of the address. Most of the time this is a short
explanation of the address in a particular language to help to navigate the
readers of the address properly.
- `:state`: Name of the state, e.g. "Kansas". Some countries might not respect
the political divisions to this type of attribute.
- `:city`: Name of the city, e.g. "Rotterdam".
- `:neighborhood`: Name of the neighborhood, e.g. "Beukelsdijk".
- `:zip_code`: Zip code of the location.
"""
use Legion.Stereotype, :model
alias Legion.Identity.Information.Registration, as: User
alias Legion.Identity.Information.AddressBook.AddressType
alias Legion.Identity.Information.Political.Country
alias Legion.Types.Point
@typedoc """
Unique identifier for the address.
"""
@type id() :: pos_integer()
@typedoc """
Represents a polymorphic type, containing either the struct itself or a unique identifier.
"""
@type address_or_id() :: Address.id() | Address
@typedoc """
Shows the type of the address.
"""
@type address_type() :: :home | :work | :other
@env Application.get_env(:legion, Legion.Identity.Information.AddressBook)
@name_len Keyword.fetch!(@env, :name_length)
@description_len Keyword.fetch!(@env, :description_length)
@state_len Keyword.fetch!(@env, :state_length)
@city_len Keyword.fetch!(@env, :city_length)
@neighborhood_len Keyword.fetch!(@env, :neighborhood_length)
@zip_code_len Keyword.fetch!(@env, :zip_code_length)
schema "user_addresses" do
belongs_to :user, User
field :type, AddressType
field :name, :string
field :description, :string
field :state, :string
field :city, :string
field :neighborhood, :string
field :zip_code, :string
field :location, Point
belongs_to :country, Country, foreign_key: :country_name, references: :name, type: :string
timestamps()
end
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [
:user_id,
:type,
:name,
:description,
:state,
:city,
:neighborhood,
:zip_code,
:country_name,
:location
])
|> validate_required([:user_id, :type, :name, :country_name])
|> validate_length(:name, min: Enum.min(@name_len), max: Enum.max(@name_len))
|> validate_length(:description,
min: Enum.min(@description_len),
max: Enum.max(@description_len)
)
|> validate_length(:state, min: Enum.min(@state_len), max: Enum.max(@state_len))
|> validate_length(:city, min: Enum.min(@city_len), max: Enum.max(@city_len))
|> validate_length(:neighborhood,
min: Enum.min(@neighborhood_len),
max: Enum.max(@neighborhood_len)
)
|> validate_length(:zip_code, min: Enum.min(@zip_code_len), max: Enum.max(@zip_code_len))
|> validate_geo_inclusion()
|> foreign_key_constraint(:user_id)
|> foreign_key_constraint(:country_name)
end
defp validate_geo_inclusion(changeset) do
country_name = get_field(changeset, :country_name)
location = get_change(changeset, :location)
if not is_nil(country_name) and not is_nil(location) do
country = Repo.get_by!(Country, name: country_name)
if Country.does_contain_point?(country, location) do
changeset
else
add_error(changeset, :location, "is not contained by the country")
end
else
changeset
end
end
end
|
apps/legion/lib/identity/information/address_book/address.ex
| 0.815122
| 0.512815
|
address.ex
|
starcoder
|
defmodule Chex.Piece.King do
@moduledoc false
alias Chex.{Board, Color, Piece, Square}
@behaviour Piece
def possible_moves(game, square, color) do
moves =
possible_squares(square)
|> maybe_prepend_castling(game, color)
moves -- Board.all_attacking_squares(game, Color.flip(color))
end
def attacking_squares(_game, square, _color), do: possible_squares(square)
defp maybe_prepend_castling(moves, %{check: c}, c), do: moves
defp maybe_prepend_castling(moves, %{castling: []}, _color), do: moves
defp maybe_prepend_castling(moves, game, color) do
rank = rank_for_color(color)
moves
|> maybe_add_move({:g, rank}, can_kingside_castle(game, color))
|> maybe_add_move({:c, rank}, can_queenside_castle(game, color))
end
defp maybe_add_move(moves, square, true), do: [square | moves]
defp maybe_add_move(moves, _square, _false), do: moves
defp possible_squares({file, rank} = square) do
for r <- [-1, 0, 1], f <- [-1, 0, 1] do
{Board.file_offset(file, f), rank + r}
end
|> List.delete(square)
|> Enum.filter(&Square.valid?(&1))
end
defp piece_to_right(name, color) do
Piece.to_string({name, color})
|> String.to_existing_atom()
end
defp rank_for_color(:white), do: 1
defp rank_for_color(:black), do: 8
defp kingside_squares(color) do
r = rank_for_color(color)
[f: r, g: r]
end
defp queenside_squares(color, for \\ :occupied)
defp queenside_squares(color, :occupied) do
r = rank_for_color(color)
[d: r, c: r, b: r]
end
defp queenside_squares(color, :attacking) do
r = rank_for_color(color)
[d: r, c: r]
end
defp can_kingside_castle(game, color) do
squares = kingside_squares(color)
attacked_squares = Board.all_attacking_squares(game, Color.flip(color))
occupied = Enum.any?(squares, &Board.occupied?(game, &1))
attacked = Enum.any?(squares, fn sq -> sq in attacked_squares end)
has_right = piece_to_right(:king, color) in game.castling
!occupied && !attacked && has_right
end
defp can_queenside_castle(game, color) do
attacked_squares = Board.all_attacking_squares(game, Color.flip(color))
occupied =
queenside_squares(color)
|> Enum.any?(&Board.occupied?(game, &1))
attacked =
queenside_squares(color, :attacking)
|> Enum.any?(fn sq -> sq in attacked_squares end)
has_right = piece_to_right(:queen, color) in game.castling
!occupied && !attacked && has_right
end
end
|
lib/chex/piece/king.ex
| 0.758332
| 0.436562
|
king.ex
|
starcoder
|
defmodule DoIt.Argument do
@moduledoc false
import DoIt.Helper, only: [validate_list_type: 2]
@argument_types [:boolean, :integer, :float, :string]
@type t :: %__MODULE__{
name: atom,
type: atom,
description: String.t(),
allowed_values: list
}
@enforce_keys [:name, :type, :description]
defstruct [:name, :type, :description, :allowed_values]
def validate_definition(%DoIt.Argument{} = argument) do
argument
|> validate_definition_name
|> validate_definition_type
|> validate_definition_description
|> validate_definition_allowed_values
end
def validate_definition_name(%DoIt.Argument{name: nil}),
do: raise(DoIt.ArgumentDefinitionError, message: "name is required for argument definition")
def validate_definition_name(%DoIt.Argument{name: name} = argument) when is_atom(name),
do: argument
def validate_definition_name(%DoIt.Argument{name: _}),
do: raise(DoIt.ArgumentDefinitionError, message: "name must be an atom")
def validate_definition_type(%DoIt.Argument{type: nil}),
do: raise(DoIt.ArgumentDefinitionError, message: "type is required for argument definition")
def validate_definition_type(%DoIt.Argument{type: type} = argument)
when type in @argument_types,
do: argument
def validate_definition_type(%DoIt.Argument{type: type}),
do:
raise(DoIt.ArgumentDefinitionError,
message:
"unrecognized argument type '#{type}', allowed types are #{
@argument_types
|> Enum.map(&Atom.to_string/1)
|> Enum.join(", ")
}"
)
def validate_definition_description(%DoIt.Argument{description: nil}),
do:
raise(DoIt.ArgumentDefinitionError,
message: "description is required for argument definition"
)
def validate_definition_description(%DoIt.Argument{description: description} = argument)
when is_binary(description),
do: argument
def validate_definition_description(%DoIt.Argument{description: _}),
do: raise(DoIt.ArgumentDefinitionError, message: "description must be a string")
def validate_definition_allowed_values(%DoIt.Argument{allowed_values: nil} = argument),
do: argument
def validate_definition_allowed_values(%DoIt.Argument{type: type, allowed_values: _})
when type == :boolean,
do:
raise(DoIt.ArgumentDefinitionError,
message: "allowed_values cannot be used with type boolean"
)
def validate_definition_allowed_values(
%DoIt.Argument{type: type, allowed_values: allowed_values} = argument
)
when is_list(allowed_values) do
case validate_list_type(allowed_values, type) do
true ->
argument
_ ->
raise DoIt.ArgumentDefinitionError,
message: "all values in allowed_values must be of type #{Atom.to_string(type)}"
end
end
def validate_definition_allowed_values(%DoIt.Argument{allowed_values: _}),
do: raise(DoIt.ArgumentDefinitionError, message: "allowed_values must be a list")
def parse_input(arguments, parsed) do
cond do
Enum.count(arguments) != Enum.count(parsed) ->
{:error,
"wrong number of arguments (given #{Enum.count(parsed)} expected #{Enum.count(arguments)})"}
Enum.empty?(arguments) ->
{:ok, []}
true ->
argument_keys =
arguments
|> Enum.map(fn %{name: name} -> name end)
|> Enum.reverse()
{
:ok,
Enum.zip(argument_keys, parsed)
}
end
end
def validate_input([], _), do: {:ok, []}
def validate_input(arguments, parsed) do
case parsed
|> Enum.map(fn
{key, value} ->
argument = Enum.find(arguments, fn %DoIt.Argument{name: name} -> name == key end)
{argument, value}
|> validate_input_value()
|> validate_input_allowed_values()
end)
|> List.flatten()
|> Enum.map(fn {%DoIt.Argument{name: name}, value} -> {name, value} end)
|> Enum.split_with(fn
{_, {:error, _}} -> false
_ -> true
end) do
{valid_arguments, []} ->
{:ok, valid_arguments}
{_, invalid_arguments} ->
{
:error,
Enum.map(invalid_arguments, fn {_, {:error, message}} -> message end)
}
end
end
def validate_input_value({_, {:error, _}} = error), do: error
def validate_input_value({%DoIt.Argument{} = argument, values}) when is_list(values) do
validate_input_value({argument, values}, [])
end
def validate_input_value({%DoIt.Argument{type: :integer} = argument, value})
when is_integer(value),
do: {argument, value}
def validate_input_value({%DoIt.Argument{name: name, type: :integer} = argument, value}) do
{argument, String.to_integer(value)}
rescue
ArgumentError ->
{argument,
{:error, "invalid integer value '#{value}' for argument #{Atom.to_string(name)}"}}
end
def validate_input_value({%DoIt.Argument{type: :float} = argument, value}) when is_float(value),
do: {argument, value}
def validate_input_value({%DoIt.Argument{name: name, type: :float} = argument, value}) do
{argument, String.to_float(value)}
rescue
ArgumentError ->
{argument, {:error, "invalid float value '#{value}' for argument #{Atom.to_string(name)}"}}
end
def validate_input_value({%DoIt.Argument{} = argument, value}) do
{argument, "#{value}"}
end
def validate_input_value({%DoIt.Argument{} = argument, [value | values]}, acc) do
case validate_input_value({argument, value}) do
{%DoIt.Argument{}, {:error, _}} = error ->
error
{%DoIt.Argument{}, val} ->
validate_input_value({argument, values}, acc ++ [val])
end
end
def validate_input_value({%DoIt.Argument{} = argument, []}, acc) do
{argument, acc}
end
def validate_input_allowed_values({_, {:error, _}} = error), do: error
def validate_input_allowed_values({%DoIt.Argument{allowed_values: nil} = argument, value}) do
{argument, value}
end
def validate_input_allowed_values({%DoIt.Argument{} = argument, values}) when is_list(values) do
validate_input_allowed_values({argument, values}, [])
end
def validate_input_allowed_values(
{%DoIt.Argument{name: name, allowed_values: allowed_values} = argument, value}
) do
case Enum.find(allowed_values, fn allowed -> value == allowed end) do
nil ->
{argument,
{:error, "value '#{value}' isn't allowed for argument #{Atom.to_string(name)}"}}
_ ->
{argument, value}
end
end
def validate_input_allowed_values({%DoIt.Argument{} = argument, [value | values]}, acc) do
case validate_input_allowed_values({argument, value}) do
{%DoIt.Argument{}, {:error, _}} = error ->
error
{%DoIt.Argument{}, val} ->
validate_input_allowed_values({argument, values}, acc ++ [val])
end
end
def validate_input_allowed_values({%DoIt.Argument{} = argument, []}, acc) do
{argument, acc}
end
end
|
lib/do_it/argument.ex
| 0.637144
| 0.553566
|
argument.ex
|
starcoder
|
defmodule Minesweeper do
@mine "*"
@beginning_outer_bound -1
@doc """
Annotate empty spots next to mines with the number of mines next to them.
"""
@spec annotate([String.t()]) :: [String.t()]
def annotate([]), do: []
def annotate(board) do
board
|> Enum.with_index()
|> Enum.map(&annotate_line(board, &1))
end
defp annotate_line(board, {line, y_index}) do
line
|> String.codepoints()
|> Enum.with_index()
|> Enum.map(&annotate_character({y_index, board}, &1))
|> Enum.join()
end
defp annotate_character({_y_index, _board}, {@mine, _x_index}), do: @mine
defp annotate_character({y_index, board}, {char, x_index}) do
board
|> adjacent_coordinates(y_index, x_index)
|> Enum.reduce(0, &sum_mine(board, &1, &2))
|> substitute_character(char)
end
defp adjacent_coordinates(board, y_index, x_index) do
{left, right, above, below} = get_adjacent_coordinates(y_index, x_index)
[{y_index, left}, {y_index, right}]
|> concat_above_coordinates(board, {x_index, left, right, above})
|> concat_below_coordinates(board, {x_index, left, right, below})
end
defp get_adjacent_coordinates(y_index, x_index) do
{x_index - 1, x_index + 1, y_index - 1, y_index + 1}
end
defp concat_above_coordinates(
coordinates,
board,
{x_index, left, right, above}
) do
if above > @beginning_outer_bound and Enum.at(board, above) do
[{above, x_index}, {above, right}]
|> concat_left_coordinate(above, left)
|> Enum.concat(coordinates)
else
coordinates
end
end
defp concat_below_coordinates(
coordinates,
board,
{x_index, left, right, below}
) do
if Enum.at(board, below) do
[{below, x_index}, {below, right}]
|> concat_left_coordinate(below, left)
|> Enum.concat(coordinates)
else
coordinates
end
end
defp concat_left_coordinate(coordinates, y_direction, left) do
if left > @beginning_outer_bound do
[{y_direction, left} | coordinates]
else
coordinates
end
end
defp sum_mine(board, {y, x}, acc) do
char =
board
|> Enum.at(y)
|> String.codepoints()
|> Enum.at(x)
case char do
@mine ->
acc + 1
_other ->
acc
end
end
defp substitute_character(sum, char), do: if(sum > 0, do: sum, else: char)
end
|
elixir/minesweeper/lib/minesweeper.ex
| 0.778102
| 0.553747
|
minesweeper.ex
|
starcoder
|
defmodule TicTacToe.Game do
@type position :: integer()
@type player_type :: :x | :o
@type board_type :: list(player_type())
@board for _ <- 1..9, do: nil
@player_x :x
@player_o :o
@type t :: %__MODULE__{
board: board_type(),
current_player: player_type()
}
defstruct board: @board,
current_player: @player_x
@doc """
Creates a new game
"""
@spec new :: %TicTacToe.Game{}
def new() do
%TicTacToe.Game{}
end
@doc """
moves `player` to `position` in `game`
"""
@spec move(t(), player_type(), integer()) ::
{:error, String.t()} | {:ok, t()}
def move(game, player, position)
when player in [@player_x, @player_o] and
is_number(position) and
position >= 0 and position <= 8 do
if is_current_player?(game, player) && can_move?(game, position) do
{
:ok,
%TicTacToe.Game{
game
| board: List.update_at(game.board, position, fn _ -> player end),
current_player: next_player(player)
}
}
else
{:error, "invalid move"}
end
end
def move(_game, _player, _position) do
{:error, "invalid player"}
end
@doc """
can move to the `position`
"""
@spec can_move?(t(), integer()) :: boolean()
def can_move?(%TicTacToe.Game{board: board}, position) do
!get_winner(board) && !board_full?(board) && Enum.at(board, position) === nil
end
defp is_current_player?(game, player) do
game.current_player === player
end
@spec next_player(player_type()) :: player_type()
defp next_player(player) when player in [@player_x, @player_o] do
case player do
@player_x -> @player_o
@player_o -> @player_x
end
end
@doc """
get the winner of game `board`
"""
@spec get_winner(board_type()) :: player_type() | nil
def get_winner([a, a, a, _, _, _, _, _, _] = _board) when a != nil, do: a
def get_winner([_, _, _, a, a, a, _, _, _] = _board) when a != nil, do: a
def get_winner([_, _, _, _, _, _, a, a, a] = _board) when a != nil, do: a
def get_winner([a, _, _, a, _, _, a, _, _] = _board) when a != nil, do: a
def get_winner([_, a, _, _, a, _, _, a, _] = _board) when a != nil, do: a
def get_winner([_, _, a, _, _, a, _, _, a] = _board) when a != nil, do: a
def get_winner([a, _, _, _, a, _, _, _, a] = _board) when a != nil, do: a
def get_winner([_, _, a, _, a, _, a, _, _] = _board) when a != nil, do: a
def get_winner(_), do: nil
@doc """
is `board` full
"""
@spec board_full?(board_type()) :: boolean
def board_full?(board) do
!Enum.any?(board, &is_nil/1)
end
@spec get_state(t()) :: {atom()} | {atom(), player_type()}
def get_state(%TicTacToe.Game{board: board}) do
winner = get_winner(board)
cond do
get_winner(board) != nil ->
{:winner, winner}
board_full?(board) == true ->
{:tie}
true ->
{:continue}
end
end
end
|
server/lib/tic_tac_toe/game.ex
| 0.815012
| 0.440349
|
game.ex
|
starcoder
|
defmodule SpaceEx.Types.Decoders do
alias SpaceEx.API.Type
alias SpaceEx.{Protobufs, ObjectReference}
@moduledoc false
[
{Protobufs.Raw.Bool, true},
{Protobufs.Raw.Bytes, <<1, 2, 3>>},
{Protobufs.Raw.String, "dummy"},
{Protobufs.Raw.Float, 1.23},
{Protobufs.Raw.Double, 1.23},
{Protobufs.Raw.SInt32, 123},
{Protobufs.Raw.UInt32, 123},
{Protobufs.Raw.UInt64, 123}
]
|> Enum.each(fn {module, example} ->
<<first_byte, _::binary>> =
module.new(value: example)
|> module.encode
def raw_first_byte(unquote(module)), do: <<unquote(first_byte)>>
end)
def decode(bytes, %Type.Raw{module: module}, _conn) do
bytes = raw_first_byte(module) <> bytes
module.decode(bytes).value
end
def decode(bytes, %Type.Protobuf{module: module}, _conn) do
module.decode(bytes)
end
def decode(bytes, %Type.List{subtype: subtype}, conn) do
Protobufs.List.decode(bytes).items
|> Enum.map(&decode(&1, subtype, conn))
end
def decode(bytes, %Type.Set{subtype: subtype}, conn) do
Protobufs.Set.decode(bytes).items
|> MapSet.new(&decode(&1, subtype, conn))
end
def decode(bytes, %Type.Tuple{subtypes: subtypes}, conn) do
Protobufs.Tuple.decode(bytes).items
|> decode_tuple(subtypes, conn)
|> List.to_tuple()
end
def decode(bytes, %Type.Dictionary{key_type: key_type, value_type: value_type}, conn) do
Protobufs.Dictionary.decode(bytes).entries
|> Map.new(fn entry ->
{
decode(entry.key, key_type, conn),
decode(entry.value, value_type, conn)
}
end)
end
def decode(bytes, %Type.Enumeration{module: module}, _conn) do
module.wire_to_atom(bytes)
end
def decode(bytes, %Type.Class{name: name}, conn) do
%ObjectReference{id: bytes, class: name, conn: conn}
end
defp decode_tuple([], [], _conn), do: []
defp decode_tuple([item | items], [type | types], conn) do
[decode(item, type, conn) | decode_tuple(items, types, conn)]
end
end
|
lib/space_ex/types/decoders.ex
| 0.592431
| 0.435001
|
decoders.ex
|
starcoder
|
defmodule Riptide.Store.Memory do
@moduledoc """
This store persists all data into an ETS table. It will not survive restarts and is best used for local development or in conjunction with `Riptide.Store.Composite` to keep a portion of the tree in memory.
## Configuration
```elixir
config :riptide,
store: %{
read: {Riptide.Store.Memory, []},
write: {Riptide.Store.Memory, []},
}
```
## Options
- `:table` - name for ETS table, defaults to `:riptide_table` (optional)
"""
@behaviour Riptide.Store
@impl true
def init(opts) do
opts
|> opts_table()
|> :ets.new([
:ordered_set,
:public,
:named_table,
read_concurrency: true,
write_concurrency: true
])
:ok = snapshot_load(opts)
:ok
end
defp opts_table(opts), do: Keyword.get(opts, :table, :riptide_table)
defp opts_snapshot(opts), do: Keyword.get(opts, :snapshot, nil)
def snapshot_write(opts) do
case opts_snapshot(opts) do
nil ->
:ok
result ->
data = Riptide.Store.query(%{}, __MODULE__, opts)
json = Jason.encode!(data)
File.write!(result, json)
:ok
end
end
def snapshot_load(opts) do
with path when path != nil <- opts_snapshot(opts),
{:ok, data} <- File.read(path),
{:ok, json} <- Jason.decode(data),
:ok = mutation(Dynamic.flatten(json), [], opts) do
:ok
else
_ -> :ok
end
end
@impl true
def mutation(merges, deletes, opts) do
table = opts_table(opts)
deletes
|> Enum.each(fn {path, _} ->
{last, rest} = List.pop_at(path, -1)
{min, max} = Riptide.Store.Prefix.range(last, %{})
min = rest ++ min
max = rest ++ max
table
|> iterate_keys(min, max)
|> Enum.each(fn path -> :ets.delete(table, path) end)
end)
:ets.insert(
table,
merges
|> Stream.map(fn {path, value} -> {path, Jason.encode!(value)} end)
|> Enum.to_list()
)
:ok = snapshot_write(opts)
:ok
end
@impl true
def query(paths, opts) do
table = opts_table(opts)
Stream.map(paths, fn {path, opts} -> {path, query_path(table, path, opts)} end)
end
defp query_path(table, path, opts) do
{last, rest} = List.pop_at(path, -1)
{min, max} = Riptide.Store.Prefix.range(last, opts)
min = rest ++ min
max = rest ++ max
table
|> iterate_keys(min, max)
|> Stream.map(&:ets.lookup(table, &1))
|> Stream.map(&List.first/1)
|> Stream.filter(fn item -> item !== nil end)
|> Stream.map(fn {path, value} -> {path, Jason.decode!(value)} end)
end
defp iterate_keys(table, min, max) do
Stream.resource(
fn -> :start end,
fn
:start ->
{[min], min}
key ->
case :ets.next(table, key) do
:"$end_of_table" -> {:halt, nil}
result when result >= max -> {:halt, nil}
result -> {[result], result}
end
end,
fn _ -> :skip end
)
end
end
|
packages/elixir/lib/riptide/store/store_memory.ex
| 0.857783
| 0.76533
|
store_memory.ex
|
starcoder
|
defmodule Crossbar do
@moduledoc """
The `Crossbar` module provides commands for configuring, starting, and
stopping the Crossbar server. You might want to use it for running tests or
interactive development.
Why does this module implement the `GenEvent` behavior? It can be hooked into
the `ExUnit.EventManager`:
ExUnit.start(formatters: [ExUnit.CLIFormatter, Crossbar])
Useful for interactive development, the Crossbar.io server can be started by
calling `start`:
Crossbar.start()
## Crossbar.io Dependency
This module requires the [Crossbar.io
executable](http://crossbar.io/docs/Quick-Start/). Via
[pip](https://pypi.python.org/pypi/pip/):
pip install crossbar
"""
use GenEvent
require Logger
require EEx
# Module Attributes
@timeout 1000
@crossbar_exec "/usr/local/bin/crossbar"
@crossbar_path Application.app_dir(:spell, ".crossbar")
@crossbar_args ["--cbdir", @crossbar_path]
@crossbar_template Application.app_dir(:spell, "priv/config.json.eex")
# Structs
defstruct [:port, executable: @crossbar_exec, arguments: @crossbar_args]
@typep t :: %__MODULE__{
port: Port.t,
executable: String.t,
arguments: [String.t]}
# Public Interface
@doc """
Add an event manager with the `Crossbar` handler to `Spell.Supervisor`.
"""
@spec start(Keyword.t) :: {:ok, pid} | {:error, any}
def start(options \\ get_config()) do
import Supervisor.Spec
Supervisor.start_child(Spell.Supervisor,
worker(__MODULE__, [options],
restart: :transient,
shutdown: 10000))
end
@doc """
Stop the Crossbar.io server. This can only be used with `start/1`.
"""
@spec stop() :: :ok | {:error, any}
def stop() do
GenEvent.stop(Crossbar)
Supervisor.delete_child(Spell.Supervisor, Crossbar)
end
@doc """
Start an event manager with the `Crossbar` handler.
Stop the process with:
GenEvent.stop(pid)
"""
@spec start_link(Keyword.t) :: {:ok, pid} | {:error, any}
def start_link(options \\ get_config()) do
{:ok, pid} = GenEvent.start_link(name: __MODULE__)
:ok = GenEvent.add_handler(pid, __MODULE__, [options])
{:ok, pid}
end
@doc """
Return the port which the crossbar transport is listening on.
The default value of `8080` for websocket and `9000` for raw_socket can be
overrode using the environment variables `CROSSBAR_PORT_WS` and `CROSSBAR_PORT_RS`.
"""
@spec get_port(String.t) :: :inet.port
def get_port("websocket"), do: get_port_from_env("CROSSBAR_PORT_WS", 8080)
def get_port("rawsocket"), do: get_port_from_env("CROSSBAR_PORT_RS", 9000)
@doc """
Return the port which the crossbar raw_socket auth is listening on.
The default value of `9001` for raw_socket can be
overrode using the environment variable `CROSSBAR_AUTH_PORT_RS`.
"""
@spec get_auth_port(String.t) :: :inet.port
def get_auth_port("rawsocket"), do: get_port_from_env("CROSSBAR_AUTH_PORT_RS", 9001)
@doc """
Return the crossbar host.
"""
@spec get_host :: String.t
def get_host, do: "localhost"
@doc """
Get the crossbar resource path.
"""
@spec get_path(String.t) :: String.t
def get_path("websocket"), do: "/ws"
def get_path("rawsocket"), do: ""
@doc """
Get the crossbar config.
"""
@spec get_config(String.t) :: Keyword.t
def get_config(transport \\ app_transport) do
[host: get_host(),
port: get_port(transport),
path: get_path(transport),
realm: get_realm()]
end
@doc """
Get the config as a uri.
"""
@spec uri(Keyword.t, String.t) :: String.t
def uri(options \\ get_config(), transport \\ app_transport) do
uri_for(transport, options)
end
@doc """
Hack to get the auth uri.
TODO: support this as part of templating out the config file.
"""
@spec uri_auth(Keyword.t, String.t) :: String.t
def uri_auth(options \\ get_config(), transport \\ app_transport) do
auth_uri_for(transport, options)
end
@doc """
Get the default realm.
"""
@spec get_realm :: String.t
def get_realm do
"realm1"
end
@doc """
Create the Crossbar.io config dir for `config`.
## Options
* `:crossbar_path :: String.t` the path to the Crossbar.io config directory.
See `get_config/0` for other options.
"""
def create_config(config \\ []) do
{path, config} = Dict.pop(config, :crossbar_path, @crossbar_path)
case File.mkdir_p(path) do
:ok ->
json_config = Dict.merge(get_all_config(), config)
|> template_config()
Path.join(path, "config.json")
|> File.write(json_config)
{:error, reason} ->
{:error, reason}
end
end
@doc """
Generate a WAMP URI using the provided prefix and a random suffix.
"""
def create_uri(prefix, length \\ 10) do
reseed()
"#{prefix}.#{rand_bin(length)}"
end
# GenEvent Callbacks
@doc """
Initialize the GenEvent handler with opts.
"""
@spec init(Keyword.t) :: {:ok, t} | {:error, term}
def init(opts) do
:ok = create_config()
executable = Dict.get(opts, :executable, @crossbar_exec)
arguments = Dict.get(opts, :arguments, @crossbar_args)
Logger.debug(fn ->
command = Enum.intersperse([executable, "start" | arguments], " ")
["Starting crossbar: ", command]
end)
port = Port.open({:spawn_executable, executable}, port_opts(arguments))
# Wait for crossbar to start.
case await do
:ok ->
Logger.debug("Crossbar started.")
{:ok, %__MODULE__{port: port,
executable: executable,
arguments: arguments}}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Handle the `ExUnit.EventManager` events. When the test suite is
finished stop the crossbar server.
"""
def handle_event({:suite_finished, _, _}, state) do
{msg, exit_code} =
System.cmd(state.executable, ["stop" | state.arguments])
Logger.debug("Exited crossbar [status: " <>
"#{Integer.to_string(exit_code)}] -- #{inspect msg}")
{:ok, state}
end
def handle_event(_event, state) do
{:ok, state}
end
def handle_info({_transport, _pid, {:terminating, {:remote, :closed}}}, _state) do
# Remove the handler when receiving a remote closed message
# Logger.debug(fn -> "Crossbar out: #{inspect(message)}" end)
:remove_handler
end
def handle_info({port, {:data, message}}, %{port: port} = state) do
# Handle the stdout data coming in from the port
Logger.debug(fn -> "Crossbar.io stdout: #{inspect(message)}" end)
{:ok, state}
end
def handle_info({:EXIT, _pid, :normal}, state) do
# Swallow the notification of a websocket connection dying
{:ok, state}
end
def terminate(reason, state) do
Logger.debug(fn -> "Crossbar.io terminating due to: #{reason}" end)
handle_event({:suite_finished, nil, nil}, state)
end
# Private Functions
# Template out a crossbar config file
EEx.function_from_file :defp, :template_config, @crossbar_template, [:assigns]
@spec await(Keyword.t) :: :ok | {:error, :timeout | term}
defp await(config \\ get_config("websocket"), interval \\ 250, retries \\ 40)
defp await(_config, _interval, 0), do: {:error, :timeout}
defp await(config, interval, retries) do
case Spell.Transport.WebSocket.connect(Spell.Config.serializer, config) do
{:error, :econnrefused} ->
# Flush the error message of the linked websocket crashing
receive do
{:EXIT, pid, :normal} when is_pid(pid) -> :ok
after
0 -> :ok
end
:timer.sleep(interval)
await(config, interval, retries - 1)
{:ok, _pid} -> :ok
{:error, reason} -> {:error, reason}
end
end
@spec port_opts([String.t]) :: Keyword.t
defp port_opts(arguments) do
[{:args, ["start" | arguments]},
:binary,
:use_stdio,
:stderr_to_stdout]
end
defp app_transport, do: Spell.Config.transport_name
defp get_all_config do
[host: get_host(),
websocket_port: get_port("websocket"),
raw_socket_port: get_port("rawsocket"),
raw_socket_auth_port: get_auth_port("rawsocket"),
websocket_path: get_path("websocket"),
realm: get_realm()]
end
defp uri_for("websocket", options) do
"ws://#{options[:host]}:#{options[:port]}#{options[:path]}"
end
defp uri_for("rawsocket", options) do
"raw_socket://#{options[:host]}:#{options[:port]}"
end
defp auth_uri_for("websocket", options) do
uri_for("websocket", options) <> "_auth"
end
defp auth_uri_for("rawsocket", options) do
uri_for("rawsocket", Keyword.put(options, :port, get_auth_port("rawsocket")))
end
defp get_port_from_env(env_var, default) do
case System.get_env(env_var) do
nil -> default
port when is_binary(port) -> String.to_integer(port)
end
end
defp reseed() do
<<a :: 32, b :: 32, c :: 32>> = :crypto.rand_bytes(12)
:random.seed(a, b, c)
end
defp rand_bin(length, range \\ Enum.into(48..122, []), acc \\ [])
defp rand_bin(0, _, acc) do
:erlang.list_to_binary(acc)
end
defp rand_bin(length, range, acc) do
n = length(range) |> :random.uniform
rand_bin(length - 1, range, [Enum.at(range, n - 1) | acc])
end
end
|
lib/crossbar.ex
| 0.728941
| 0.483161
|
crossbar.ex
|
starcoder
|
defmodule Polyglot.Parser do
# Parse a string to an ast
def parse(str) do
{:ok, tokens} = tokenise(str, {"", [], 0})
tokens
|> Enum.filter(fn (t) -> t != "" end)
|> parse_tree([])
end
# Tokenise a string
defp tokenise("", {buffer, tokens, 0}) do
{:ok, Enum.reverse [buffer|tokens]}
end
defp tokenise("", _) do
{:error, "Unmatched opening bracket"}
end
defp tokenise(str, {buffer, tokens, b_depth}) do
<<c::binary-size(1), rest::binary>> = str
case {b_depth, c} do
{_, "{"} ->
tokenise(rest, {"", [:open, buffer | tokens], b_depth+1})
{n, "}"} when n > 0 ->
tokenise(rest, {"", [:close, buffer | tokens], b_depth-1})
{_, "}"} -> {:error, "Unmatched closing bracket"}
{n, ","} when n > 0 ->
tokenise(rest, {"", [:comma, buffer | tokens], b_depth})
{_, "#"} ->
tokenise(rest, {"", [:hash, buffer | tokens], b_depth})
{_, "\\"} ->
<<c::binary-size(1), rest::binary>> = rest
tokenise(rest, {buffer <> c, tokens, b_depth})
{_, c} ->
tokenise(rest, {buffer <> c, tokens, b_depth})
end
end
# Parse tokens out into nested lists of list|tuple|string|atom
defp parse_tree(tokens, output) do
case tokens do
[:open, arg, :close | rest] ->
arg = arg |> String.strip |> String.downcase
parse_tree(rest, [{:variable, arg}|output])
[:open, arg, :comma, method, :comma | rest] ->
{:partial, {body, rest}} = parse_body(rest, %{})
method = case String.strip(method) do
"select" -> :select
"selectordinal" -> :ordinal
"plural" -> :plural
"range" -> :range
end
arg = arg |> String.strip |> String.downcase
parse_tree(rest, [{method, arg, body}|output])
[:open | rest] ->
{:partial, {clause, rest}} = parse_tree(rest, [])
parse_tree(rest, [clause|output])
[:close | rest] ->
{:partial, {Enum.reverse(output), rest}}
[x | rest] ->
parse_tree(rest, [x|output])
[] ->
{:ok, Enum.reverse(output)}
end
end
defp parse_body([value, :open | rest], output) do
{:partial, {clause, rest}} = parse_tree(rest, [])
parse_body(rest, Map.put(output, String.strip(value), clause))
end
defp parse_body([:close | rest], output) do
{:partial, {output, rest}}
end
defp parse_body([other | rest], output) do
case String.strip(other) do
"" -> parse_body(rest, output)
text -> {:unexpected, text}
end
end
end
|
lib/polyglot/parser.ex
| 0.672762
| 0.595669
|
parser.ex
|
starcoder
|
defmodule AWS.MigrationHubConfig do
@moduledoc """
The AWS Migration Hub home region APIs are available specifically for working
with your Migration Hub home region.
You can use these APIs to determine a home region, as well as to create and work
with controls that describe the home region.
* You must make API calls for write actions (create, notify,
associate, disassociate, import, or put) while in your home region, or a
`HomeRegionNotSetException` error is returned.
* API calls for read actions (list, describe, stop, and delete) are
permitted outside of your home region.
* If you call a write API outside the home region, an
`InvalidInputException` is returned.
* You can call `GetHomeRegion` action to obtain the account's
Migration Hub home region.
For specific API usage, see the sections that follow in this AWS Migration Hub
Home Region API reference.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2019-06-30",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "migrationhub-config",
global?: false,
protocol: "json",
service_id: "MigrationHub Config",
signature_version: "v4",
signing_name: "mgh",
target_prefix: "AWSMigrationHubMultiAccountService"
}
end
@doc """
This API sets up the home region for the calling account only.
"""
def create_home_region_control(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateHomeRegionControl", input, options)
end
@doc """
This API permits filtering on the `ControlId` and `HomeRegion` fields.
"""
def describe_home_region_controls(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeHomeRegionControls", input, options)
end
@doc """
Returns the calling account’s home region, if configured.
This API is used by other AWS services to determine the regional endpoint for
calling AWS Application Discovery Service and Migration Hub. You must call
`GetHomeRegion` at least once before you call any other AWS Application
Discovery Service and AWS Migration Hub APIs, to obtain the account's Migration
Hub home region.
"""
def get_home_region(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetHomeRegion", input, options)
end
end
|
lib/aws/generated/migration_hub_config.ex
| 0.826116
| 0.404449
|
migration_hub_config.ex
|
starcoder
|
defmodule AWS.DirectConnect do
@moduledoc """
AWS Direct Connect links your internal network to an AWS Direct Connect location
over a standard Ethernet fiber-optic cable.
One end of the cable is connected to your router, the other to an AWS Direct
Connect router. With this connection in place, you can create virtual interfaces
directly to the AWS cloud (for example, to Amazon EC2 and Amazon S3) and to
Amazon VPC, bypassing Internet service providers in your network path. A
connection provides access to all AWS Regions except the China (Beijing) and
(China) Ningxia Regions. AWS resources in the China Regions can only be accessed
through locations associated with those Regions.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2012-10-25",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "directconnect",
global?: false,
protocol: "json",
service_id: "Direct Connect",
signature_version: "v4",
signing_name: "directconnect",
target_prefix: "OvertureService"
}
end
@doc """
Accepts a proposal request to attach a virtual private gateway or transit
gateway to a Direct Connect gateway.
"""
def accept_direct_connect_gateway_association_proposal(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"AcceptDirectConnectGatewayAssociationProposal",
input,
options
)
end
@doc """
Deprecated.
Use `AllocateHostedConnection` instead.
Creates a hosted connection on an interconnect.
Allocates a VLAN number and a specified amount of bandwidth for use by a hosted
connection on the specified interconnect.
Intended for use by AWS Direct Connect Partners only.
"""
def allocate_connection_on_interconnect(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AllocateConnectionOnInterconnect", input, options)
end
@doc """
Creates a hosted connection on the specified interconnect or a link aggregation
group (LAG) of interconnects.
Allocates a VLAN number and a specified amount of capacity (bandwidth) for use
by a hosted connection on the specified interconnect or LAG of interconnects.
AWS polices the hosted connection for the specified capacity and the AWS Direct
Connect Partner must also police the hosted connection for the specified
capacity.
Intended for use by AWS Direct Connect Partners only.
"""
def allocate_hosted_connection(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AllocateHostedConnection", input, options)
end
@doc """
Provisions a private virtual interface to be owned by the specified AWS account.
Virtual interfaces created using this action must be confirmed by the owner
using `ConfirmPrivateVirtualInterface`. Until then, the virtual interface is in
the `Confirming` state and is not available to handle traffic.
"""
def allocate_private_virtual_interface(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AllocatePrivateVirtualInterface", input, options)
end
@doc """
Provisions a public virtual interface to be owned by the specified AWS account.
The owner of a connection calls this function to provision a public virtual
interface to be owned by the specified AWS account.
Virtual interfaces created using this function must be confirmed by the owner
using `ConfirmPublicVirtualInterface`. Until this step has been completed, the
virtual interface is in the `confirming` state and is not available to handle
traffic.
When creating an IPv6 public virtual interface, omit the Amazon address and
customer address. IPv6 addresses are automatically assigned from the Amazon pool
of IPv6 addresses; you cannot specify custom IPv6 addresses.
"""
def allocate_public_virtual_interface(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AllocatePublicVirtualInterface", input, options)
end
@doc """
Provisions a transit virtual interface to be owned by the specified AWS account.
Use this type of interface to connect a transit gateway to your Direct Connect
gateway.
The owner of a connection provisions a transit virtual interface to be owned by
the specified AWS account.
After you create a transit virtual interface, it must be confirmed by the owner
using `ConfirmTransitVirtualInterface`. Until this step has been completed, the
transit virtual interface is in the `requested` state and is not available to
handle traffic.
"""
def allocate_transit_virtual_interface(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AllocateTransitVirtualInterface", input, options)
end
@doc """
Associates an existing connection with a link aggregation group (LAG).
The connection is interrupted and re-established as a member of the LAG
(connectivity to AWS is interrupted). The connection must be hosted on the same
AWS Direct Connect endpoint as the LAG, and its bandwidth must match the
bandwidth for the LAG. You can re-associate a connection that's currently
associated with a different LAG; however, if removing the connection would cause
the original LAG to fall below its setting for minimum number of operational
connections, the request fails.
Any virtual interfaces that are directly associated with the connection are
automatically re-associated with the LAG. If the connection was originally
associated with a different LAG, the virtual interfaces remain associated with
the original LAG.
For interconnects, any hosted connections are automatically re-associated with
the LAG. If the interconnect was originally associated with a different LAG, the
hosted connections remain associated with the original LAG.
"""
def associate_connection_with_lag(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateConnectionWithLag", input, options)
end
@doc """
Associates a hosted connection and its virtual interfaces with a link
aggregation group (LAG) or interconnect.
If the target interconnect or LAG has an existing hosted connection with a
conflicting VLAN number or IP address, the operation fails. This action
temporarily interrupts the hosted connection's connectivity to AWS as it is
being migrated.
Intended for use by AWS Direct Connect Partners only.
"""
def associate_hosted_connection(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateHostedConnection", input, options)
end
@doc """
Associates a virtual interface with a specified link aggregation group (LAG) or
connection.
Connectivity to AWS is temporarily interrupted as the virtual interface is being
migrated. If the target connection or LAG has an associated virtual interface
with a conflicting VLAN number or a conflicting IP address, the operation fails.
Virtual interfaces associated with a hosted connection cannot be associated with
a LAG; hosted connections must be migrated along with their virtual interfaces
using `AssociateHostedConnection`.
To reassociate a virtual interface to a new connection or LAG, the requester
must own either the virtual interface itself or the connection to which the
virtual interface is currently associated. Additionally, the requester must own
the connection or LAG for the association.
"""
def associate_virtual_interface(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateVirtualInterface", input, options)
end
@doc """
Confirms the creation of the specified hosted connection on an interconnect.
Upon creation, the hosted connection is initially in the `Ordering` state, and
remains in this state until the owner confirms creation of the hosted
connection.
"""
def confirm_connection(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ConfirmConnection", input, options)
end
@doc """
Accepts ownership of a private virtual interface created by another AWS account.
After the virtual interface owner makes this call, the virtual interface is
created and attached to the specified virtual private gateway or Direct Connect
gateway, and is made available to handle traffic.
"""
def confirm_private_virtual_interface(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ConfirmPrivateVirtualInterface", input, options)
end
@doc """
Accepts ownership of a public virtual interface created by another AWS account.
After the virtual interface owner makes this call, the specified virtual
interface is created and made available to handle traffic.
"""
def confirm_public_virtual_interface(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ConfirmPublicVirtualInterface", input, options)
end
@doc """
Accepts ownership of a transit virtual interface created by another AWS account.
After the owner of the transit virtual interface makes this call, the specified
transit virtual interface is created and made available to handle traffic.
"""
def confirm_transit_virtual_interface(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ConfirmTransitVirtualInterface", input, options)
end
@doc """
Creates a BGP peer on the specified virtual interface.
You must create a BGP peer for the corresponding address family (IPv4/IPv6) in
order to access AWS resources that also use that address family.
If logical redundancy is not supported by the connection, interconnect, or LAG,
the BGP peer cannot be in the same address family as an existing BGP peer on the
virtual interface.
When creating a IPv6 BGP peer, omit the Amazon address and customer address.
IPv6 addresses are automatically assigned from the Amazon pool of IPv6
addresses; you cannot specify custom IPv6 addresses.
For a public virtual interface, the Autonomous System Number (ASN) must be
private or already whitelisted for the virtual interface.
"""
def create_bgp_peer(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateBGPPeer", input, options)
end
@doc """
Creates a connection between a customer network and a specific AWS Direct
Connect location.
A connection links your internal network to an AWS Direct Connect location over
a standard Ethernet fiber-optic cable. One end of the cable is connected to your
router, the other to an AWS Direct Connect router.
To find the locations for your Region, use `DescribeLocations`.
You can automatically add the new connection to a link aggregation group (LAG)
by specifying a LAG ID in the request. This ensures that the new connection is
allocated on the same AWS Direct Connect endpoint that hosts the specified LAG.
If there are no available ports on the endpoint, the request fails and no
connection is created.
"""
def create_connection(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateConnection", input, options)
end
@doc """
Creates a Direct Connect gateway, which is an intermediate object that enables
you to connect a set of virtual interfaces and virtual private gateways.
A Direct Connect gateway is global and visible in any AWS Region after it is
created. The virtual interfaces and virtual private gateways that are connected
through a Direct Connect gateway can be in different AWS Regions. This enables
you to connect to a VPC in any Region, regardless of the Region in which the
virtual interfaces are located, and pass traffic between them.
"""
def create_direct_connect_gateway(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDirectConnectGateway", input, options)
end
@doc """
Creates an association between a Direct Connect gateway and a virtual private
gateway.
The virtual private gateway must be attached to a VPC and must not be associated
with another Direct Connect gateway.
"""
def create_direct_connect_gateway_association(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"CreateDirectConnectGatewayAssociation",
input,
options
)
end
@doc """
Creates a proposal to associate the specified virtual private gateway or transit
gateway with the specified Direct Connect gateway.
You can associate a Direct Connect gateway and virtual private gateway or
transit gateway that is owned by any AWS account.
"""
def create_direct_connect_gateway_association_proposal(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"CreateDirectConnectGatewayAssociationProposal",
input,
options
)
end
@doc """
Creates an interconnect between an AWS Direct Connect Partner's network and a
specific AWS Direct Connect location.
An interconnect is a connection that is capable of hosting other connections.
The AWS Direct Connect partner can use an interconnect to provide AWS Direct
Connect hosted connections to customers through their own network services. Like
a standard connection, an interconnect links the partner's network to an AWS
Direct Connect location over a standard Ethernet fiber-optic cable. One end is
connected to the partner's router, the other to an AWS Direct Connect router.
You can automatically add the new interconnect to a link aggregation group (LAG)
by specifying a LAG ID in the request. This ensures that the new interconnect is
allocated on the same AWS Direct Connect endpoint that hosts the specified LAG.
If there are no available ports on the endpoint, the request fails and no
interconnect is created.
For each end customer, the AWS Direct Connect Partner provisions a connection on
their interconnect by calling `AllocateHostedConnection`. The end customer can
then connect to AWS resources by creating a virtual interface on their
connection, using the VLAN assigned to them by the AWS Direct Connect Partner.
Intended for use by AWS Direct Connect Partners only.
"""
def create_interconnect(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateInterconnect", input, options)
end
@doc """
Creates a link aggregation group (LAG) with the specified number of bundled
physical dedicated connections between the customer network and a specific AWS
Direct Connect location.
A LAG is a logical interface that uses the Link Aggregation Control Protocol
(LACP) to aggregate multiple interfaces, enabling you to treat them as a single
interface.
All connections in a LAG must use the same bandwidth (either 1Gbps or 10Gbps)
and must terminate at the same AWS Direct Connect endpoint.
You can have up to 10 dedicated connections per LAG. Regardless of this limit,
if you request more connections for the LAG than AWS Direct Connect can allocate
on a single endpoint, no LAG is created.
You can specify an existing physical dedicated connection or interconnect to
include in the LAG (which counts towards the total number of connections). Doing
so interrupts the current physical dedicated connection, and re-establishes them
as a member of the LAG. The LAG will be created on the same AWS Direct Connect
endpoint to which the dedicated connection terminates. Any virtual interfaces
associated with the dedicated connection are automatically disassociated and
re-associated with the LAG. The connection ID does not change.
If the AWS account used to create a LAG is a registered AWS Direct Connect
Partner, the LAG is automatically enabled to host sub-connections. For a LAG
owned by a partner, any associated virtual interfaces cannot be directly
configured.
"""
def create_lag(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateLag", input, options)
end
@doc """
Creates a private virtual interface.
A virtual interface is the VLAN that transports AWS Direct Connect traffic. A
private virtual interface can be connected to either a Direct Connect gateway or
a Virtual Private Gateway (VGW). Connecting the private virtual interface to a
Direct Connect gateway enables the possibility for connecting to multiple VPCs,
including VPCs in different AWS Regions. Connecting the private virtual
interface to a VGW only provides access to a single VPC within the same Region.
Setting the MTU of a virtual interface to 9001 (jumbo frames) can cause an
update to the underlying physical connection if it wasn't updated to support
jumbo frames. Updating the connection disrupts network connectivity for all
virtual interfaces associated with the connection for up to 30 seconds. To check
whether your connection supports jumbo frames, call `DescribeConnections`. To
check whether your virtual interface supports jumbo frames, call
`DescribeVirtualInterfaces`.
"""
def create_private_virtual_interface(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreatePrivateVirtualInterface", input, options)
end
@doc """
Creates a public virtual interface.
A virtual interface is the VLAN that transports AWS Direct Connect traffic. A
public virtual interface supports sending traffic to public services of AWS such
as Amazon S3.
When creating an IPv6 public virtual interface (`addressFamily` is `ipv6`),
leave the `customer` and `amazon` address fields blank to use auto-assigned IPv6
space. Custom IPv6 addresses are not supported.
"""
def create_public_virtual_interface(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreatePublicVirtualInterface", input, options)
end
@doc """
Creates a transit virtual interface.
A transit virtual interface should be used to access one or more transit
gateways associated with Direct Connect gateways. A transit virtual interface
enables the connection of multiple VPCs attached to a transit gateway to a
Direct Connect gateway.
If you associate your transit gateway with one or more Direct Connect gateways,
the Autonomous System Number (ASN) used by the transit gateway and the Direct
Connect gateway must be different. For example, if you use the default ASN 64512
for both your the transit gateway and Direct Connect gateway, the association
request fails.
Setting the MTU of a virtual interface to 8500 (jumbo frames) can cause an
update to the underlying physical connection if it wasn't updated to support
jumbo frames. Updating the connection disrupts network connectivity for all
virtual interfaces associated with the connection for up to 30 seconds. To check
whether your connection supports jumbo frames, call `DescribeConnections`. To
check whether your virtual interface supports jumbo frames, call
`DescribeVirtualInterfaces`.
"""
def create_transit_virtual_interface(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateTransitVirtualInterface", input, options)
end
@doc """
Deletes the specified BGP peer on the specified virtual interface with the
specified customer address and ASN.
You cannot delete the last BGP peer from a virtual interface.
"""
def delete_bgp_peer(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteBGPPeer", input, options)
end
@doc """
Deletes the specified connection.
Deleting a connection only stops the AWS Direct Connect port hour and data
transfer charges. If you are partnering with any third parties to connect with
the AWS Direct Connect location, you must cancel your service with them
separately.
"""
def delete_connection(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteConnection", input, options)
end
@doc """
Deletes the specified Direct Connect gateway.
You must first delete all virtual interfaces that are attached to the Direct
Connect gateway and disassociate all virtual private gateways associated with
the Direct Connect gateway.
"""
def delete_direct_connect_gateway(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDirectConnectGateway", input, options)
end
@doc """
Deletes the association between the specified Direct Connect gateway and virtual
private gateway.
We recommend that you specify the `associationID` to delete the association.
Alternatively, if you own virtual gateway and a Direct Connect gateway
association, you can specify the `virtualGatewayId` and `directConnectGatewayId`
to delete an association.
"""
def delete_direct_connect_gateway_association(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DeleteDirectConnectGatewayAssociation",
input,
options
)
end
@doc """
Deletes the association proposal request between the specified Direct Connect
gateway and virtual private gateway or transit gateway.
"""
def delete_direct_connect_gateway_association_proposal(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DeleteDirectConnectGatewayAssociationProposal",
input,
options
)
end
@doc """
Deletes the specified interconnect.
Intended for use by AWS Direct Connect Partners only.
"""
def delete_interconnect(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteInterconnect", input, options)
end
@doc """
Deletes the specified link aggregation group (LAG).
You cannot delete a LAG if it has active virtual interfaces or hosted
connections.
"""
def delete_lag(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteLag", input, options)
end
@doc """
Deletes a virtual interface.
"""
def delete_virtual_interface(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteVirtualInterface", input, options)
end
@doc """
Deprecated.
Use `DescribeLoa` instead.
Gets the LOA-CFA for a connection.
The Letter of Authorization - Connecting Facility Assignment (LOA-CFA) is a
document that your APN partner or service provider uses when establishing your
cross connect to AWS at the colocation facility. For more information, see
[Requesting Cross Connects at AWS Direct Connect Locations](https://docs.aws.amazon.com/directconnect/latest/UserGuide/Colocation.html)
in the *AWS Direct Connect User Guide*.
"""
def describe_connection_loa(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeConnectionLoa", input, options)
end
@doc """
Displays the specified connection or all connections in this Region.
"""
def describe_connections(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeConnections", input, options)
end
@doc """
Deprecated.
Use `DescribeHostedConnections` instead.
Lists the connections that have been provisioned on the specified interconnect.
Intended for use by AWS Direct Connect Partners only.
"""
def describe_connections_on_interconnect(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeConnectionsOnInterconnect", input, options)
end
@doc """
Describes one or more association proposals for connection between a virtual
private gateway or transit gateway and a Direct Connect gateway.
"""
def describe_direct_connect_gateway_association_proposals(
%Client{} = client,
input,
options \\ []
) do
Request.request_post(
client,
metadata(),
"DescribeDirectConnectGatewayAssociationProposals",
input,
options
)
end
@doc """
Lists the associations between your Direct Connect gateways and virtual private
gateways.
You must specify a Direct Connect gateway, a virtual private gateway, or both.
If you specify a Direct Connect gateway, the response contains all virtual
private gateways associated with the Direct Connect gateway. If you specify a
virtual private gateway, the response contains all Direct Connect gateways
associated with the virtual private gateway. If you specify both, the response
contains the association between the Direct Connect gateway and the virtual
private gateway.
"""
def describe_direct_connect_gateway_associations(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeDirectConnectGatewayAssociations",
input,
options
)
end
@doc """
Lists the attachments between your Direct Connect gateways and virtual
interfaces.
You must specify a Direct Connect gateway, a virtual interface, or both. If you
specify a Direct Connect gateway, the response contains all virtual interfaces
attached to the Direct Connect gateway. If you specify a virtual interface, the
response contains all Direct Connect gateways attached to the virtual interface.
If you specify both, the response contains the attachment between the Direct
Connect gateway and the virtual interface.
"""
def describe_direct_connect_gateway_attachments(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeDirectConnectGatewayAttachments",
input,
options
)
end
@doc """
Lists all your Direct Connect gateways or only the specified Direct Connect
gateway.
Deleted Direct Connect gateways are not returned.
"""
def describe_direct_connect_gateways(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDirectConnectGateways", input, options)
end
@doc """
Lists the hosted connections that have been provisioned on the specified
interconnect or link aggregation group (LAG).
Intended for use by AWS Direct Connect Partners only.
"""
def describe_hosted_connections(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeHostedConnections", input, options)
end
@doc """
Deprecated.
Use `DescribeLoa` instead.
Gets the LOA-CFA for the specified interconnect.
The Letter of Authorization - Connecting Facility Assignment (LOA-CFA) is a
document that is used when establishing your cross connect to AWS at the
colocation facility. For more information, see [Requesting Cross Connects at AWS Direct Connect
Locations](https://docs.aws.amazon.com/directconnect/latest/UserGuide/Colocation.html)
in the *AWS Direct Connect User Guide*.
"""
def describe_interconnect_loa(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeInterconnectLoa", input, options)
end
@doc """
Lists the interconnects owned by the AWS account or only the specified
interconnect.
"""
def describe_interconnects(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeInterconnects", input, options)
end
@doc """
Describes all your link aggregation groups (LAG) or the specified LAG.
"""
def describe_lags(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeLags", input, options)
end
@doc """
Gets the LOA-CFA for a connection, interconnect, or link aggregation group
(LAG).
The Letter of Authorization - Connecting Facility Assignment (LOA-CFA) is a
document that is used when establishing your cross connect to AWS at the
colocation facility. For more information, see [Requesting Cross Connects at AWS Direct Connect
Locations](https://docs.aws.amazon.com/directconnect/latest/UserGuide/Colocation.html)
in the *AWS Direct Connect User Guide*.
"""
def describe_loa(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeLoa", input, options)
end
@doc """
Lists the AWS Direct Connect locations in the current AWS Region.
These are the locations that can be selected when calling `CreateConnection` or
`CreateInterconnect`.
"""
def describe_locations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeLocations", input, options)
end
@doc """
Describes the tags associated with the specified AWS Direct Connect resources.
"""
def describe_tags(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTags", input, options)
end
@doc """
Lists the virtual private gateways owned by the AWS account.
You can create one or more AWS Direct Connect private virtual interfaces linked
to a virtual private gateway.
"""
def describe_virtual_gateways(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeVirtualGateways", input, options)
end
@doc """
Displays all virtual interfaces for an AWS account.
Virtual interfaces deleted fewer than 15 minutes before you make the request are
also returned. If you specify a connection ID, only the virtual interfaces
associated with the connection are returned. If you specify a virtual interface
ID, then only a single virtual interface is returned.
A virtual interface (VLAN) transmits the traffic between the AWS Direct Connect
location and the customer network.
"""
def describe_virtual_interfaces(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeVirtualInterfaces", input, options)
end
@doc """
Disassociates a connection from a link aggregation group (LAG).
The connection is interrupted and re-established as a standalone connection (the
connection is not deleted; to delete the connection, use the `DeleteConnection`
request). If the LAG has associated virtual interfaces or hosted connections,
they remain associated with the LAG. A disassociated connection owned by an AWS
Direct Connect Partner is automatically converted to an interconnect.
If disassociating the connection would cause the LAG to fall below its setting
for minimum number of operational connections, the request fails, except when
it's the last member of the LAG. If all connections are disassociated, the LAG
continues to exist as an empty LAG with no physical connections.
"""
def disassociate_connection_from_lag(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisassociateConnectionFromLag", input, options)
end
@doc """
Lists the virtual interface failover test history.
"""
def list_virtual_interface_test_history(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListVirtualInterfaceTestHistory", input, options)
end
@doc """
Starts the virtual interface failover test that verifies your configuration
meets your resiliency requirements by placing the BGP peering session in the
DOWN state.
You can then send traffic to verify that there are no outages.
You can run the test on public, private, transit, and hosted virtual interfaces.
You can use
[ListVirtualInterfaceTestHistory](https://docs.aws.amazon.com/directconnect/latest/APIReference/API_ListVirtualInterfaceTestHistory.html) to view the virtual interface test history.
If you need to stop the test before the test interval completes, use
[StopBgpFailoverTest](https://docs.aws.amazon.com/directconnect/latest/APIReference/API_StopBgpFailoverTest.html).
"""
def start_bgp_failover_test(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartBgpFailoverTest", input, options)
end
@doc """
Stops the virtual interface failover test.
"""
def stop_bgp_failover_test(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopBgpFailoverTest", input, options)
end
@doc """
Adds the specified tags to the specified AWS Direct Connect resource.
Each resource can have a maximum of 50 tags.
Each tag consists of a key and an optional value. If a tag with the same key is
already associated with the resource, this action updates its value.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes one or more tags from the specified AWS Direct Connect resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates the specified attributes of the Direct Connect gateway association.
Add or remove prefixes from the association.
"""
def update_direct_connect_gateway_association(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"UpdateDirectConnectGatewayAssociation",
input,
options
)
end
@doc """
Updates the attributes of the specified link aggregation group (LAG).
You can update the following attributes:
* The name of the LAG.
* The value for the minimum number of connections that must be
operational for the LAG itself to be operational.
When you create a LAG, the default value for the minimum number of operational
connections is zero (0). If you update this value and the number of operational
connections falls below the specified value, the LAG automatically goes down to
avoid over-utilization of the remaining connections. Adjust this value with
care, as it could force the LAG down if it is set higher than the current number
of operational connections.
"""
def update_lag(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateLag", input, options)
end
@doc """
Updates the specified attributes of the specified virtual private interface.
Setting the MTU of a virtual interface to 9001 (jumbo frames) can cause an
update to the underlying physical connection if it wasn't updated to support
jumbo frames. Updating the connection disrupts network connectivity for all
virtual interfaces associated with the connection for up to 30 seconds. To check
whether your connection supports jumbo frames, call `DescribeConnections`. To
check whether your virtual q interface supports jumbo frames, call
`DescribeVirtualInterfaces`.
"""
def update_virtual_interface_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateVirtualInterfaceAttributes", input, options)
end
end
|
lib/aws/generated/direct_connect.ex
| 0.851999
| 0.431345
|
direct_connect.ex
|
starcoder
|
defmodule SystemRegistry.Node do
defstruct parent: [], node: [], key: nil, from: nil
@type t :: [
parent: [term],
node: [term],
key: term,
from: pid | nil
]
alias SystemRegistry.Storage.Binding, as: B
alias SystemRegistry.Transaction
import SystemRegistry.Utils
def binding(key, scope) do
case Registry.lookup(B, {key, scope}) do
[] -> nil
binding -> strip(binding)
end
end
def parent(node) do
[_l | internal_nodes] = Enum.reverse(node)
Enum.reverse(internal_nodes)
end
def is_leaf?(%__MODULE__{from: nil}), do: false
def is_leaf?(%__MODULE__{}), do: true
def leaf(node, opts \\ []) do
pid = opts[:pid] || self()
[l | internal_nodes] = Enum.reverse(node)
parent = Enum.reverse(internal_nodes)
%__MODULE__{parent: parent, node: node, key: l, from: pid}
end
@doc """
Return the leaf nodes in full path form.
## Examples
iex> SystemRegistry.Node.leaf_nodes(%{a: 2})
[[:a]]
iex> SystemRegistry.Node.leaf_nodes(%{a: %{b: 1, c: %{d: 2}}})
[[:a, :b], [:a, :c, :d]]
iex> SystemRegistry.Node.leaf_nodes(%{})
[]
"""
def leaf_nodes(map) do
leaf_nodes([], map)
end
defp leaf_nodes(pred, map) when not is_map(map) do
[Enum.reverse(pred)]
end
defp leaf_nodes(pred, map) do
Enum.flat_map(map, fn {k, v} -> leaf_nodes([k | pred], v) end)
end
def trim_tree(value, [], _), do: value
def trim_tree(value, [key | t] = path, bind_key) when is_map(value) do
case get_in(value, path) do
map when map == %{} ->
case t do
[] ->
Map.delete(value, key)
_ ->
[key | u_path] = Enum.reverse(path)
u_path = Enum.reverse(u_path)
Transaction.remove_binding(bind_key, path)
update_in(value, u_path, &Map.delete(&1, key))
|> trim_tree(u_path, bind_key)
end
_ ->
value
end
end
def internal_nodes(node) do
node
|> Enum.reverse()
|> tl()
|> internal_node()
end
defp internal_node(_, _ \\ [])
defp internal_node([], internal_nodes), do: internal_nodes
defp internal_node([key | path], internal_nodes) do
parent = Enum.reverse(path)
internal_node = %__MODULE__{parent: parent, node: parent ++ [key], key: key}
internal_node(path, [internal_node | internal_nodes])
end
end
|
lib/system_registry/node.ex
| 0.812607
| 0.444866
|
node.ex
|
starcoder
|
defmodule Oasis.Parser do
@moduledoc false
def parse(type, value) when is_bitstring(value) do
do_parse(type, value)
end
def parse(%{"type" => "null"}, nil), do: nil
def parse(%{"type" => "array"} = type, value) when is_list(value) do
# post request body as :urlencoded or :multipart
do_parse_array(type, value)
end
def parse(%{"type" => "object"} = type, value) when is_map(value) do
# post request body as :urlencoded or :multipart
do_parse_object(type, value)
end
def parse(_type, value), do: value
defp do_parse(%{"type" => type}, value)
when is_bitstring(value) and type in ["array", "object"] do
case Jason.decode(value) do
{:ok, value} ->
value
{:error, _error} ->
raise_argument_error()
end
end
defp do_parse(%{"type" => "boolean"}, "true"), do: true
defp do_parse(%{"type" => "boolean"}, "false"), do: false
defp do_parse(%{"type" => "boolean"}, value), do: value
defp do_parse(%{"type" => "number"}, value) do
case Float.parse(value) do
{value, ""} ->
value
_ ->
raise_argument_error()
end
end
defp do_parse(%{"type" => "integer"}, value) do
String.to_integer(value)
end
defp do_parse(%{"type" => "string"}, value) do
value
end
defp do_parse(_type, value) do
value
end
defp do_parse_array(%{"items" => %{"type" => _type} = schema}, list) do
Enum.map(list, fn value ->
parse(schema, value)
end)
end
defp do_parse_array(%{"items" => items}, list)
when is_list(items) and length(items) == length(list) do
items
|> Enum.zip(list)
|> Enum.map(fn {type, value} ->
parse(type, value)
end)
end
defp do_parse_array(%{"items" => items}, list)
when is_list(items) and length(items) != length(list) do
raise_argument_error()
end
defp do_parse_array(_, list), do: list
defp do_parse_object(
%{"properties" => properties, "additionalProperties" => additional_properties} = type,
map
)
when is_map(properties) and is_map(additional_properties) do
properties_of_dependencies = properties_from_schema_dependencies(type)
properties = Map.merge(properties, properties_of_dependencies)
pattern_properties = pattern_properties(type)
Enum.reduce(map, map, fn {name, value}, acc ->
type = type_to_parse_property(name, properties, pattern_properties, additional_properties)
Map.put(acc, name, parse(type, value))
end)
end
defp do_parse_object(%{"properties" => properties} = type, map) when is_map(properties) do
properties_of_dependencies = properties_from_schema_dependencies(type)
properties = Map.merge(properties, properties_of_dependencies)
pattern_properties = pattern_properties(type)
Enum.reduce(map, map, fn {name, value}, acc ->
type = type_to_parse_property(name, properties, pattern_properties)
if type == nil do
acc
else
Map.put(acc, name, parse(type, value))
end
end)
end
defp do_parse_object(_, map), do: map
defp properties_from_schema_dependencies(type) do
type
|> Map.get("dependencies", %{})
|> Enum.reduce(%{}, fn
{_name, definition}, acc when is_map(definition) ->
# schema dependencies
properties = Map.get(definition, "properties", %{})
Map.merge(acc, properties)
_, acc ->
acc
end)
end
defp pattern_properties(type) do
type
|> Map.get("patternProperties", %{})
|> Enum.reduce([], fn {pattern, definition}, acc ->
regex = Regex.compile!(pattern)
[{regex, definition} | acc]
end)
end
defp type_to_parse_property(name, properties, pattern_properties, additional_properties \\ nil) do
type = Map.get(properties, name)
if type == nil do
# may find a matched property from "patternProperties" by name
pattern = type_from_pattern_properties(name, pattern_properties)
if pattern == nil do
# `additional_properties` may be as nil or a schema (with only a type for any additional properties)
additional_properties
else
{_, type} = pattern
type
end
else
type
end
end
defp type_from_pattern_properties(_name, []), do: nil
defp type_from_pattern_properties(name, pattern_properties) do
Enum.find(pattern_properties, fn {regex, _definition} ->
Regex.match?(regex, name)
end)
end
defp raise_argument_error() do
raise ArgumentError, "argument error"
end
end
|
lib/oasis/parser.ex
| 0.673084
| 0.473718
|
parser.ex
|
starcoder
|
defmodule Movement.EntriesCommitProcessor do
@no_action_keys ~w(noop autocorrect)
@included_slave_actions ~w(new remove renew merge_on_corrected merge_on_proposed merge_on_proposed_force merge_on_corrected_force)
@doc """
For list of translations, new data (like the content of a file upload) and a given function,
returns the list of operations that will be executed. The operations will be neither persisted nor run.
The list contains the operations for keys that exists in the current translations list. For the removal of
keys, use the process_for_remove/3 function.
"""
@spec process(Movement.Context.t()) :: Movement.Context.t()
def process(context = %Movement.Context{entries: entries, assigns: assigns, operations: operations}) do
grouped_translations = group_by_key(assigns[:translations])
new_operations =
entries
|> Enum.map(fn entry ->
current_translation = fetch_current_translation(grouped_translations, entry.key)
suggested_translation = %Movement.SuggestedTranslation{
text: entry.value,
key: entry.key,
file_comment: entry.comment,
file_index: entry.index,
value_type: entry.value_type,
plural: entry.plural,
locked: entry.locked,
revision_id: Map.get(assigns[:revision], :id),
version_id: assigns[:version] && Map.get(assigns[:version], :id),
placeholders: entry.placeholders
}
operation = assigns[:comparer].(current_translation, suggested_translation)
%{operation | options: assigns[:options]}
end)
|> filter_for_revision(assigns[:revision])
%{context | operations: Enum.concat(operations, new_operations)}
end
@doc """
For list of translations and new data (like the content of a file upload),
returns the list of operations concerning removed keys from the content that will be exectued.
"""
@spec process_for_remove(Movement.Context.t()) :: Movement.Context.t()
def process_for_remove(context = %Movement.Context{entries: entries, assigns: assigns, operations: operations}) do
grouped_entries = group_by_key(entries)
grouped_entries_keys = Map.keys(grouped_entries)
new_operations =
assigns[:translations]
|> Enum.filter(&(!&1.removed && &1.key not in grouped_entries_keys))
|> Enum.map(fn current_translation ->
suggested_translation = %{current_translation | marked_as_removed: true}
assigns[:comparer].(suggested_translation, suggested_translation)
end)
%{context | operations: Enum.concat(operations, new_operations)}
end
defp group_by_key(list), do: Enum.group_by(list, & &1.key)
defp fetch_current_translation(grouped_translations, key) do
grouped_translations
|> Map.get(key)
|> case do
[value | _rest] when is_map(value) -> value
_ -> nil
end
end
defp filter_for_revision(operations, %{master: true}) do
operations
|> Enum.filter(fn %{action: operation} -> operation not in @no_action_keys end)
end
defp filter_for_revision(operations, _) do
Enum.filter(
operations,
fn %{action: operation} ->
operation in @included_slave_actions and operation not in @no_action_keys
end
)
end
end
|
lib/movement/entries_commit_processor.ex
| 0.78838
| 0.410668
|
entries_commit_processor.ex
|
starcoder
|
defmodule Liquex.Parser.Tag.ControlFlow do
@moduledoc false
import NimbleParsec
alias Liquex.Parser.Argument
alias Liquex.Parser.Literal
alias Liquex.Parser.Tag
@spec boolean_expression(NimbleParsec.t()) :: NimbleParsec.t()
def boolean_expression(combinator \\ empty()) do
operator =
choice([
string("=="),
string("!="),
string(">="),
string("<="),
string(">"),
string("<"),
string("contains")
])
|> map({String, :to_atom, []})
boolean_operator =
choice([
replace(string("and"), :and),
replace(string("or"), :or)
])
boolean_operation =
tag(Argument.argument(), :left)
|> ignore(Literal.whitespace())
|> unwrap_and_tag(operator, :op)
|> ignore(Literal.whitespace())
|> tag(Argument.argument(), :right)
|> wrap()
combinator
|> choice([boolean_operation, Literal.literal(), Argument.argument()])
|> ignore(Literal.whitespace())
|> repeat(
boolean_operator
|> ignore(Literal.whitespace())
|> choice([boolean_operation, Literal.literal(), Argument.argument()])
|> ignore(Literal.whitespace())
)
end
@spec if_expression(NimbleParsec.t()) :: NimbleParsec.t()
def if_expression(combinator \\ empty()) do
if_tag =
expression_tag("if")
|> tag(parsec(:document), :contents)
combinator
|> tag(if_tag, :if)
|> repeat(elsif_tag())
|> optional(else_tag())
|> ignore(Tag.tag_directive("endif"))
end
@spec unless_expression(NimbleParsec.t()) :: NimbleParsec.t()
def unless_expression(combinator \\ empty()) do
combinator
|> expression_tag("unless")
|> tag(parsec(:document), :contents)
|> tag(:unless)
|> repeat(elsif_tag())
|> optional(else_tag())
|> ignore(Tag.tag_directive("endunless"))
end
@spec case_expression(NimbleParsec.t()) :: NimbleParsec.t()
def case_expression(combinator \\ empty()) do
when_tag =
ignore(Tag.open_tag())
|> ignore(string("when"))
|> ignore(Literal.whitespace(empty(), 1))
|> tag(
Literal.literal()
|> repeat(
ignore(string(","))
|> ignore(Literal.whitespace(empty(), 1))
|> Literal.literal()
),
:expression
)
|> ignore(Tag.close_tag())
|> tag(parsec(:document), :contents)
case_tag =
ignore(Tag.open_tag())
|> ignore(string("case"))
|> ignore(Literal.whitespace(empty(), 1))
|> concat(Argument.argument())
|> ignore(Tag.close_tag())
combinator
|> tag(case_tag, :case)
|> ignore(Literal.whitespace())
|> times(tag(when_tag, :when), min: 1)
|> optional(else_tag())
|> ignore(Tag.tag_directive("endcase"))
end
@spec else_tag(NimbleParsec.t()) :: NimbleParsec.t()
def else_tag(combinator \\ empty()) do
combinator
|> ignore(Tag.tag_directive("else"))
|> tag(parsec(:document), :contents)
|> tag(:else)
end
defp elsif_tag(combinator \\ empty()) do
combinator
|> expression_tag("elsif")
|> tag(parsec(:document), :contents)
|> tag(:elsif)
end
@spec expression_tag(NimbleParsec.t(), String.t()) :: NimbleParsec.t()
defp expression_tag(combinator \\ empty(), tag_name) do
combinator
|> ignore(Tag.open_tag())
|> ignore(string(tag_name))
|> ignore(Literal.whitespace())
|> tag(boolean_expression(), :expression)
|> ignore(Tag.close_tag())
end
end
|
lib/liquex/parser/tag/control_flow.ex
| 0.669961
| 0.493287
|
control_flow.ex
|
starcoder
|
defmodule Tyyppi do
@moduledoc """
The main interface to `Tyyppi` library. Usually, functions and macros
presented is this module are enough to work with `Tyyppi`.
"""
use Boundary, exports: [Function, Matchers, Stats]
require Logger
alias Tyyppi.{Matchers, Stats, T}
import Tyyppi.T, only: [normalize_params: 1, param_names: 1, parse_definition: 1]
@doc false
defguard is_params(params) when is_list(params) or is_atom(params)
@doc """
Sigil to simplify specification of `Tyyppi.T.t(term())` type, it’s literally the wrapper for `Tyyppi.parse/1`.
## Examples
iex> import Tyyppi
iex> ~t[integer()]
%Tyyppi.T{
definition: {:type, 0, :integer, []},
module: nil,
name: nil,
params: [],
quoted: {:integer, [], []},
source: nil,
type: :built_in
}
...> ~t[atom]
%Tyyppi.T{
definition: {:type, 0, :atom, []},
module: nil,
name: nil,
params: [],
quoted: {:atom, [], []},
source: nil,
type: :built_in
}
"""
defmacro sigil_t({:<<>>, _meta, [string]}, []) when is_binary(string) do
if Version.compare(System.version(), "1.12.0") == :lt do
quote bind_quoted: [string: string] do
string
|> :elixir_interpolation.unescape_chars()
|> Code.string_to_quoted!()
|> Tyyppi.parse_quoted()
end
else
quote bind_quoted: [string: string] do
string
|> :elixir_interpolation.unescape_string()
|> Code.string_to_quoted!()
|> Tyyppi.parse_quoted()
end
end
end
defmacro sigil_t({:<<>>, meta, pieces}, []) do
tokens =
case :elixir_interpolation.unescape_tokens(pieces) do
{:ok, unescaped_tokens} -> unescaped_tokens
{:error, reason} -> raise ArgumentError, to_string(reason)
{:error, reason, _} -> raise ArgumentError, to_string(reason)
end
quote do
unquote({:<<>>, meta, tokens})
|> Code.string_to_quoted!()
|> Tyyppi.parse_quoted()
end
end
@doc """
Parses the type as by spec and returns its `Tyyppi.T` representation.
_Example:_
iex> require Tyyppi
...> parsed = Tyyppi.parse(GenServer.on_start())
...> with %Tyyppi.T{definition: {:type, _, :union, [type | _]}} <- parsed, do: type
{:type, 0, :tuple, [{:atom, 0, :ok}, {:type, 704, :pid, []}]}
...> parsed.module
GenServer
...> parsed.name
:on_start
...> parsed.params
[]
...> parsed.quoted
{{:., [], [GenServer, :on_start]}, [], []}
...> parsed.type
:type
"""
defmacro parse({:|, _, [_, _]} = type) do
quote bind_quoted: [union: Macro.escape(type)] do
union
|> T.union()
|> T.parse_definition()
|> Stats.type()
end
end
defmacro parse([{:->, _, [args, result]}]) do
type =
case args do
[{:..., _, _}] -> {:type, 0, :any}
args -> {:type, 0, :product, Enum.map(args, &parse_definition/1)}
end
result = parse_definition(result)
quote bind_quoted: [type: Macro.escape(type), result: Macro.escape(result)] do
Stats.type({:type, 0, :fun, [type, result]})
end
end
defmacro parse({{:., _, [module, fun]}, _, params}) when is_params(params) do
params = params |> normalize_params() |> length()
quote bind_quoted: [module: module, fun: fun, params: params] do
Stats.type({module, fun, params})
end
end
defmacro parse({{:., _, [{:__aliases__, _, aliases}, fun]}, _, params})
when is_params(params) do
params = params |> normalize_params() |> length()
quote bind_quoted: [aliases: aliases, fun: fun, params: params] do
Stats.type({Module.concat(aliases), fun, params})
end
end
defmacro parse({:%{}, _meta, fields} = quoted) when is_list(fields),
do: do_parse_map(quoted, __CALLER__)
defmacro parse({:%, _meta, [struct, {:%{}, meta, fields}]}),
do: do_parse_map({:%{}, meta, [{:__struct__, struct} | fields]}, __CALLER__)
defmacro parse({_, _} = tuple), do: do_lookup(tuple)
defmacro parse({:{}, _, content} = tuple) when is_list(content), do: do_lookup(tuple)
defmacro parse({fun, _, params}) when is_atom(fun) and is_params(params) do
quote bind_quoted: [fun: fun, params: param_names(params)] do
Stats.type({:type, 0, fun, params})
end
end
defmacro parse(any) do
Logger.debug("[🚰 T.parse/1]: " <> inspect(any))
do_lookup(any)
end
defp do_parse_map({:%{}, _meta, fields} = quoted, caller) when is_list(fields) do
fields =
fields
|> Enum.map(fn
{{:optional, _, [name]}, type} ->
{:type, 0, :map_field_assoc, Enum.map([name, type], &parse_quoted(&1).definition)}
{{:required, _, [name]}, type} ->
{:type, 0, :map_field_exact, Enum.map([name, type], &parse_quoted(&1).definition)}
{name, type} ->
{:type, 0, :map_field_exact, Enum.map([name, type], &parse_quoted(&1).definition)}
end)
|> Macro.escape()
file = caller.file
quoted = Macro.escape(quoted, prune_metadata: true)
quote location: :keep do
%Tyyppi.T{
definition: {:type, 0, :map, unquote(fields)},
module: nil,
name: nil,
params: [],
quoted: unquote(quoted),
source: unquote(file),
type: :type
}
end
end
defp do_lookup(any) do
quote bind_quoted: [any: Macro.escape(any)] do
any
|> T.parse_definition()
|> Stats.type()
end
end
@doc """
Returns `true` if the `term` passed as the second parameter is of type `type`.
The first parameter is expected to be a `type` as in specs, e. g. `atom()` or
`GenServer.on_start()`.
_Examples:_
iex> require Tyyppi
...> Tyyppi.of?(atom(), :ok)
true
...> Tyyppi.of?(atom(), 42)
false
...> Tyyppi.of?(GenServer.on_start(), {:error, {:already_started, self()}})
true
...> Tyyppi.of?(GenServer.on_start(), :foo)
false
"""
defmacro of?(type, term) do
quote do
%Tyyppi.T{module: module, definition: definition} = Tyyppi.parse(unquote(type))
Matchers.of?(module, definition, unquote(term))
end
end
@spec of_type?(Tyyppi.T.t(wrapped), any()) :: boolean() when wrapped: term()
@doc """
Returns `true` if the `term` passed as the second parameter is of type `type`.
The first parameter is expected to be of type `Tyyppi.T.t(term())`.
_Examples:_
iex> require Tyyppi
...> type = Tyyppi.parse(atom())
%Tyyppi.T{
definition: {:type, 0, :atom, []},
module: nil,
name: nil,
params: [],
quoted: {:atom, [], []},
source: nil,
type: :built_in
}
...> Tyyppi.of_type?(type, :ok)
true
...> Tyyppi.of_type?(type, 42)
false
...> type = Tyyppi.parse(GenServer.on_start())
...> Tyyppi.of_type?(type, {:error, {:already_started, self()}})
true
...> Tyyppi.of_type?(type, :foo)
false
"""
if Application.get_env(:tyyppi, :strict, false) do
def of_type?(%T{module: module, definition: definition}, term),
do: Matchers.of?(module, definition, term)
def of_type?(nil, term) do
Logger.debug("[🚰 Tyyppi.of_type?/2]: " <> inspect(term))
false
end
else
def of_type?(_, _), do: true
end
@doc """
**Experimental:** applies the **local** function given as an argument
in the form `&Module.fun/arity` or **anonymous** function with arguments.
Validates the arguments given and the result produced by the call.
Only named types are supported at the moment.
If the number of arguments does not fit the arity of the type, returns
`{:error, {:arity, n}}` where `n` is the number of arguments passed.
If arguments did not pass the validation, returns `{:error, {:args, [arg1, arg2, ...]}}`
where `argN` are the arguments passed.
If both arity and types of arguments are ok, _evaluates_ the function and checks the
result against the type. Returns `{:ok, result}` _or_ `{:error, {:result, result}}`
if the validation did not pass.
_Example:_
```elixir
require Tyyppi
Tyyppi.apply(MyModule.callback(), &MyModule.on_info/1, 2)
#⇒ {:ok, [foo_squared: 4]}
Tyyppi.apply(MyModule.callback(), &MyModule.on_info/1, :ok)
#⇒ {:error, {:args, :ok}}
Tyyppi.apply(MyModule.callback(), &MyModule.on_info/1, [])
#⇒ {:error, {:arity, 0}}
```
"""
defmacro apply(type, fun, args) do
quote do
%Tyyppi.T{module: module, definition: definition} = Tyyppi.parse(unquote(type))
Tyyppi.Function.apply(module, definition, unquote(fun), unquote(args))
end
end
@doc """
**Experimental:** applies the **external** function given as an argument
in the form `&Module.fun/arity` or **anonymous** function with arguments.
Validates the arguments given and the result produced by the call.
_Examples:_
iex> require Tyyppi
...> Tyyppi.apply((atom() -> binary()),
...> fn a -> to_string(a) end, [:foo])
{:ok, "foo"}
...> result = Tyyppi.apply((atom() -> binary()),
...> fn -> "foo" end, [:foo])
...> match?({:error, {:fun, _}}, result)
true
...> Tyyppi.apply((atom() -> binary()),
...> fn _ -> 42 end, ["foo"])
{:error, {:args, ["foo"]}}
...> Tyyppi.apply((atom() -> binary()),
...> fn _ -> 42 end, [:foo])
{:error, {:result, 42}}
"""
defmacro apply(fun, args) do
quote do
with %{module: module, name: fun, arity: arity} <-
Map.new(Elixir.Function.info(unquote(fun))),
{:ok, specs} <- Code.Typespec.fetch_specs(module),
{{fun, arity}, [spec]} <- Enum.find(specs, &match?({{^fun, ^arity}, _}, &1)),
do: Tyyppi.Function.apply(module, spec, unquote(fun), unquote(args)),
else: (result -> {:error, {:no_spec, result}})
end
end
@doc false
defdelegate parse_quoted(type), to: Tyyppi.T
@doc false
defdelegate void_validation(value), to: Tyyppi.Value.Validations, as: :any
@doc false
defdelegate void_coercion(value), to: Tyyppi.Value.Coercions, as: :any
@doc false
defmacro coproduct(types), do: {:|, [], types}
@doc false
defp setup_ast(import?) do
[
if(import?,
do: quote(generated: true, do: import(Tyyppi)),
else: quote(generated: true, do: require(Tyyppi))
),
quote generated: true, location: :keep do
import Kernel, except: [defstruct: 1]
import Tyyppi.Struct, only: [defstruct: 1]
alias Tyyppi.Value
end
]
end
@doc false
defp can_struct_guard? do
String.to_integer(System.otp_release()) > 22 and
Version.compare(System.version(), "1.11.0") != :lt
end
@doc false
defp maybe_struct_guard(struct) do
name = struct |> Module.split() |> List.last() |> Macro.underscore()
name = :"is_#{name}"
if can_struct_guard?() do
quote generated: true, location: :keep do
@doc "Helper guard to match instances of struct #{inspect(unquote(struct))}"
@doc since: "0.9.0", guard: true
defguard unquote(name)(value)
when is_map(value) and value.__struct__ == unquote(struct)
end
else
quote generated: true, location: :keep do
@doc """
Stub guard to match instances of struct #{inspect(unquote(struct))}.
Upgrade to 11.0/23 to make it work.
"""
@doc since: "0.9.0", guard: true
defguard unquote(name)(value) when is_map(value)
end
end
end
@doc false
defmacro formulae_guard, do: maybe_struct_guard(Formulae)
@doc false
defmacro __using__(opts \\ []) do
import? = Keyword.get(opts, :import, false)
guards =
case __CALLER__.context_modules do
[] -> []
[_some | _] -> [maybe_struct_guard(Tyyppi.Value)]
end
guards ++ setup_ast(import?)
end
@doc false
@spec can_flatten?(type :: module()) :: boolean()
def can_flatten?(type) do
{:flatten, 2} in Keyword.take(type.__info__(:functions), [:flatten])
end
@doc false
@spec any :: Tyyppi.T.t(term())
def any, do: parse(any())
end
|
lib/tyyppi.ex
| 0.781539
| 0.401394
|
tyyppi.ex
|
starcoder
|
defmodule Cldr.Number do
@moduledoc """
Formats numbers and currencies based upon CLDR's decimal formats specification.
The format specification is documentated in [Unicode TR35](http://unicode.org/reports/tr35/tr35-numbers.html#Number_Formats).
There are several classes of formatting including non-scientific, scientific,
rules based (for spelling and ordinal formats), compact formats that display `1k`
rather than `1,000` and so on. See `Cldr.Number.to_string/2` for specific formatting
options.
### Non-Scientific Notation Formatting
The following description applies to formats that do not use scientific
notation or significant digits:
* If the number of actual integer digits exceeds the maximum integer digits,
then only the least significant digits are shown. For example, 1997 is
formatted as "97" if the maximum integer digits is set to 2.
* If the number of actual integer digits is less than the minimum integer
digits, then leading zeros are added. For example, 1997 is formatted as
"01997" if the minimum integer digits is set to 5.
* If the number of actual fraction digits exceeds the maximum fraction
digits, then half-even rounding it performed to the maximum fraction
digits. For example, 0.125 is formatted as "0.12" if the maximum fraction
digits is 2. This behavior can be changed by specifying a rounding
increment and a rounding mode.
* If the number of actual fraction digits is less than the minimum fraction
digits, then trailing zeros are added. For example, 0.125 is formatted as
"0.1250" if the minimum fraction digits is set to 4.
* Trailing fractional zeros are not displayed if they occur j positions after
the decimal, where j is less than the maximum fraction digits. For example,
0.10004 is formatted as "0.1" if the maximum fraction digits is four or
less.
### Scientific Notation Formatting
Numbers in scientific notation are expressed as the product of a mantissa and
a power of ten, for example, 1234 can be expressed as 1.234 x 10^3. The
mantissa is typically in the half-open interval [1.0, 10.0) or sometimes
[0.0, 1.0), but it need not be. In a pattern, the exponent character
immediately followed by one or more digit characters indicates scientific
notation. Example: "0.###E0" formats the number 1234 as "1.234E3".
* The number of digit characters after the exponent character gives the
minimum exponent digit count. There is no maximum. Negative exponents are
formatted using the localized minus sign, not the prefix and suffix from
the pattern. This allows patterns such as "0.###E0 m/s". To prefix positive
exponents with a localized plus sign, specify '+' between the exponent and
the digits: "0.###E+0" will produce formats "1E+1", "1E+0", "1E-1", and so
on. (In localized patterns, use the localized plus sign rather than '+'.)
* The minimum number of integer digits is achieved by adjusting the exponent.
Example: 0.00123 formatted with "00.###E0" yields "12.3E-4". This only
happens if there is no maximum number of integer digits. If there is a
maximum, then the minimum number of integer digits is fixed at one.
* The maximum number of integer digits, if present, specifies the exponent
grouping. The most common use of this is to generate engineering notation,
in which the exponent is a multiple of three, for example, "##0.###E0". The
number 12345 is formatted using "##0.####E0" as "12.345E3".
* When using scientific notation, the formatter controls the digit counts
using significant digits logic. The maximum number of significant digits
limits the total number of integer and fraction digits that will be shown
in the mantissa; it does not affect parsing. For example, 12345 formatted
with "##0.##E0" is "12.3E3". Exponential patterns may not contain grouping
separators.
### Significant Digits
There are two ways of controlling how many digits are shows: (a)
significant digits counts, or (b) integer and fraction digit counts. Integer
and fraction digit counts are described above. When a formatter is using
significant digits counts, it uses however many integer and fraction digits
are required to display the specified number of significant digits. It may
ignore min/max integer/fraction digits, or it may use them to the extent
possible.
"""
alias Cldr.Config
alias Cldr.Number.Formatter
alias Cldr.Number.Format.Options
@type format_type ::
:standard
| :decimal_short
| :decimal_long
| :currency_short
| :currency_long
| :percent
| :accounting
| :scientific
| :currency
@short_format_styles Options.short_format_styles()
@root_locale_name Config.root_locale_name()
@root_locale Map.fetch!(Config.all_language_tags(), @root_locale_name)
@doc """
Return a valid number system from a provided locale and number
system name or type.
The number system or number system type must be valid for the
given locale. If a number system type is provided, the
underlying number system is returned.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`
* `system_name` is any number system name returned by
`Cldr.known_number_systems/0` or a number system type
returned by `Cldr.known_number_system_types/0`
* `backend` is any module that includes `use Cldr` and therefore
is a `Cldr` backend module
## Examples
iex> Cldr.Number.validate_number_system "en", :latn, TestBackend.Cldr
{:ok, :latn}
iex> Cldr.Number.validate_number_system "en", :default, TestBackend.Cldr
{:ok, :latn}
iex> Cldr.Number.validate_number_system "en", :unknown, TestBackend.Cldr
{:error,
{Cldr.UnknownNumberSystemError, "The number system :unknown is unknown"}}
iex> Cldr.Number.validate_number_system "zz", :default, TestBackend.Cldr
{:error, {Cldr.InvalidLanguageError, "The language \\"zz\\" is invalid"}}
"""
@spec validate_number_system(
Cldr.Locale.locale_name() | Cldr.LanguageTag.t(),
Cldr.Number.System.system_name() | Cldr.Number.System.types(),
Cldr.backend()
) ::
{:ok, Cldr.Number.System.system_name()} | {:error, {module(), String.t()}}
def validate_number_system(locale, number_system, backend \\ default_backend()) do
Cldr.Number.System.system_name_from(number_system, locale, backend)
end
@doc """
Returns a number formatted into a string according to a format pattern and options.
## Arguments
* `number` is an integer, float or Decimal to be formatted
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
* `options` is a keyword list defining how the number is to be formatted. The
valid options are:
## Options
* `format`: the format style or a format string defining how the number is
formatted. See `Cldr.Number.Format` for how format strings can be constructed.
See `Cldr.Number.Format.format_styles_for/3` to return available format styles
for a locale. The default `format` is `:standard`.
* If `:format` is set to `:long` or `:short` then the formatting depends on
whether `:currency` is specified. If not specified then the number is
formatted as `:decimal_long` or `:decimal_short`. If `:currency` is
specified the number is formatted as `:currency_long` or
`:currency_short` and `:fractional_digits` is set to 0 as a default.
* `:format` may also be a format defined by CLDR's Rules Based Number
Formats (RBNF). Further information is found in the module `Cldr.Rbnf`.
The most commonly used formats in this category are to spell out the
number in a the locales language. The applicable formats are `:spellout`,
`:spellout_year`, `:ordinal`. A number can also be formatted as roman
numbers by using the format `:roman` or `:roman_lower`.
* `currency`: is the currency for which the number is formatted. For
available currencies see `Cldr.Currency.known_currencies/0`. This option
is required if `:format` is set to `:currency`. If `currency` is set
and no `:format` is set, `:format` will be set to `:currency` as well.
* `currency_symbol`: Allows overriding a currency symbol. The alternatives
are:
* `:iso` the ISO currency code will be used instead of the default
currency symbol.
* `:narrow` uses the narrow symbol defined for the locale. The same
narrow symbol can be defined for more than one currency and therefore this
should be used with care. If no narrow symbol is defined, the standard
symbol is used.
* `:symbol` uses the standard symbol defined in CLDR. A symbol is unique
for each currency and can be safely used.
* "string" uses `string` as the currency symbol
* `:standard` (the default and recommended) uses the CLDR-defined symbol
based upon the currency format for the locale.
* `:cash`: a boolean which indicates whether a number being formatted as a
`:currency` is to be considered a cash value or not. Currencies can be
rounded differently depending on whether `:cash` is `true` or `false`.
*This option is deprecated in favour of `currency_digits: :cash`.
* `:currency_digits` indicates which of the rounding and digits should be
used. The options are `:accounting` which is the default, `:cash` or
`:iso`
* `:rounding_mode`: determines how a number is rounded to meet the precision
of the format requested. The available rounding modes are `:down`,
:half_up, :half_even, :ceiling, :floor, :half_down, :up. The default is
`:half_even`.
* `:number_system`: determines which of the number systems for a locale
should be used to define the separators and digits for the formatted
number. If `number_system` is an `atom` then `number_system` is
interpreted as a number system. See
`Cldr.Number.System.number_systems_for/2`. If the `:number_system` is
`binary` then it is interpreted as a number system name. See
`Cldr.Number.System.number_system_names_for/2`. The default is `:default`.
* `:locale`: determines the locale in which the number is formatted. See
`Cldr.known_locale_names/0`. The default is`Cldr.get_locale/0` which is the
locale currently in affect for this `Process` and which is set by
`Cldr.put_locale/1`.
* If `:fractional_digits` is set to a positive integer value then the number
will be rounded to that number of digits and displayed accordingly - overriding
settings that would be applied by default. For example, currencies have
fractional digits defined reflecting each currencies minor unit. Setting
`:fractional_digits` will override that setting.
* If `:maximum_integer_digits` is set to a positive integer value then the
numnber is left truncated before formatting. For example if the number `1234`
is formatted with the option `maximum_integer_digits: 2`, the number is
truncated to `34` and formatted.
* If `:round_nearest` is set to a positive integer value then the number
will be rounded to nearest increment of that value - overriding
settings that would be applied by default.
* `:minimum_grouping_digits` overrides the CLDR definition of minimum grouping
digits. For example in the locale `es` the number `1234` is formatted by default
as `1345` because the locale defines the `minimium_grouping_digits` as `2`. If
`minimum_grouping_digits: 1` is set as an option the number is formatting as
`1.345`. The `:minimum_grouping_digits` is added to the grouping defined by
the number format. If the sum of these two digits is greater than the number
of digits in the integer (or fractional) part of the number then no grouping
is performed.
## Locale extensions affecting formatting
A locale identifier can specify options that affect number formatting.
These options are:
* `cu`: defines what currency is implied when no curreny is specified in
the call to `to_string/2`.
* `cf`: defines whether to use currency or accounting format for
formatting currencies. This overrides the `format: :currency` and `format: :accounting`
options.
* `nu`: defines the number system to be used if none is specified by the `:number_system`
option to `to_string/2`
These keys are part of the [u extension](https://unicode.org/reports/tr35/#u_Extension) and
that document should be consulted for details on how to construct a locale identifier with these
extensions.
## Returns
* `{:ok, string}` or
* `{:error, {exception, message}}`
## Examples
iex> Cldr.Number.to_string 12345, TestBackend.Cldr
{:ok, "12,345"}
iex> Cldr.Number.to_string 12345, TestBackend.Cldr, locale: "fr"
{:ok, "12 345"}
iex> Cldr.Number.to_string 1345.32, TestBackend.Cldr, currency: :EUR, locale: "es", minimum_grouping_digits: 1
{:ok, "1.345,32 €"}
iex> Cldr.Number.to_string 1345.32, TestBackend.Cldr, currency: :EUR, locale: "es"
{:ok, "1345,32 €"}
iex> Cldr.Number.to_string 12345, TestBackend.Cldr, locale: "fr", currency: "USD"
{:ok, "12 345,00 $US"}
iex> Cldr.Number.to_string 12345, TestBackend.Cldr, format: "#E0"
{:ok, "1.2345E4"}
iex> Cldr.Number.to_string 12345, TestBackend.Cldr, format: :accounting, currency: "THB"
{:ok, "THB 12,345.00"}
iex> Cldr.Number.to_string -12345, TestBackend.Cldr, format: :accounting, currency: "THB"
{:ok, "(THB 12,345.00)"}
iex> Cldr.Number.to_string 12345, TestBackend.Cldr, format: :accounting, currency: "THB",
...> locale: "th"
{:ok, "฿12,345.00"}
iex> Cldr.Number.to_string 12345, TestBackend.Cldr, format: :accounting, currency: "THB",
...> locale: "th", number_system: :native
{:ok, "฿๑๒,๓๔๕.๐๐"}
iex> Cldr.Number.to_string 1244.30, TestBackend.Cldr, format: :long
{:ok, "1 thousand"}
iex> Cldr.Number.to_string 1244.30, TestBackend.Cldr, format: :long, currency: "USD"
{:ok, "1,244 US dollars"}
iex> Cldr.Number.to_string 1244.30, TestBackend.Cldr, format: :short
{:ok, "1K"}
iex> Cldr.Number.to_string 1244.30, TestBackend.Cldr, format: :short, currency: "EUR"
{:ok, "€1K"}
iex> Cldr.Number.to_string 1234, TestBackend.Cldr, format: :spellout
{:ok, "one thousand two hundred thirty-four"}
iex> Cldr.Number.to_string 1234, TestBackend.Cldr, format: :spellout_verbose
{:ok, "one thousand two hundred and thirty-four"}
iex> Cldr.Number.to_string 1989, TestBackend.Cldr, format: :spellout_year
{:ok, "nineteen eighty-nine"}
iex> Cldr.Number.to_string 123, TestBackend.Cldr, format: :ordinal
{:ok, "123rd"}
iex> Cldr.Number.to_string 123, TestBackend.Cldr, format: :roman
{:ok, "CXXIII"}
iex> Cldr.Number.to_string 123, TestBackend.Cldr, locale: "th-u-nu-thai"
{:ok, "๑๒๓"}
iex> Cldr.Number.to_string 123, TestBackend.Cldr, format: :currency, locale: "en-u-cu-thb"
{:ok, "THB 123.00"}
## Errors
An error tuple `{:error, reason}` will be returned if an error is detected.
The two most likely causes of an error return are:
* A format cannot be compiled. In this case the error tuple will look like:
```
iex> Cldr.Number.to_string(12345, TestBackend.Cldr, format: "0#")
{:error, {Cldr.FormatCompileError,
"Decimal format compiler: syntax error before: \\"#\\""}}
```
* The format style requested is not defined for the `locale` and
`number_system`. This happens typically when the number system is
`:algorithmic` rather than the more common `:numeric`. In this case the error
return looks like:
```
iex> Cldr.Number.to_string(1234, TestBackend.Cldr, locale: "he", number_system: "hebr")
{:error, {Cldr.UnknownFormatError,
"The locale \\"he\\" with number system :hebr does not define a format :standard"}}
```
"""
@spec to_string(number | Decimal.t(), Cldr.backend() | Keyword.t() | map(), Keyword.t() | map()) ::
{:ok, String.t()} | {:error, {atom, String.t()}}
def to_string(number, backend \\ default_backend(), options \\ [])
# No backend supplied, just options
def to_string(number, options, []) when is_list(options) do
{backend, options} = Keyword.pop_lazy(options, :backend, &default_backend/0)
to_string(number, backend, options)
end
# Decimal -0 is formatted like 0, without the sign
def to_string(%Decimal{coef: 0, sign: -1} = number, backend, options) do
%Decimal{number | sign: 1}
|> to_string(backend, options)
end
# Pre-processed options which is nearly twice as
# fast as non-preprocessed. See
# Cldr.Number.Options.validate_options/3
def to_string(number, backend, %Options{} = options) do
case to_string(number, options.format, backend, options) do
{:error, reason} -> {:error, reason}
string -> {:ok, string}
end
end
def to_string(number, backend, options) when is_list(options) do
with {:ok, options} <- Options.validate_options(number, backend, options) do
to_string(number, backend, options)
end
end
@doc """
Same as the execution of `to_string/2` but raises an exception if an error would be
returned.
## Options
* `number` is an integer, float or Decimal to be formatted
* `options` is a keyword list defining how the number is to be formatted. See
`Cldr.Number.to_string/2`
## Returns
* a formatted number as a string or
* raises an exception
## Examples
iex> Cldr.Number.to_string! 12345, TestBackend.Cldr
"12,345"
iex> Cldr.Number.to_string! 12345, TestBackend.Cldr, locale: "fr"
"12 345"
"""
@spec to_string!(
number | Decimal.t(),
Cldr.backend() | Keyword.t() | map(),
Keyword.t() | map()
) ::
String.t() | no_return()
def to_string!(number, backend \\ default_backend(), options \\ [])
def to_string!(number, backend, options) do
case to_string(number, backend, options) do
{:error, {exception, message}} ->
raise exception, message
{:ok, string} ->
string
end
end
@format_mapping [
{:ordinal, :digits_ordinal, Ordinal},
{:spellout, :spellout_numbering, Spellout},
{:spellout_verbose, :spellout_numbering_verbose, Spellout},
{:spellout_year, :spellout_numbering_year, Spellout},
]
for {format, function, module} <- @format_mapping do
defp to_string(number, unquote(format), backend, options) do
evaluate_rule(number, unquote(module), unquote(function), options.locale, backend)
end
end
# For Roman numerals
defp to_string(number, :roman, backend, _options) do
Module.concat(backend, Rbnf.NumberSystem).roman_upper(number, @root_locale)
end
defp to_string(number, :roman_lower, backend, _options) do
Module.concat(backend, Rbnf.NumberSystem).roman_lower(number, @root_locale)
end
# For the :currency_long format only
defp to_string(number, :currency_long = format, backend, options) do
Formatter.Currency.to_string(number, format, backend, options)
end
# For all other short formats
defp to_string(number, format, backend, options)
when is_atom(format) and format in @short_format_styles do
Formatter.Short.to_string(number, format, backend, options)
end
# For executing arbitrary RBNF rules that might exist for a given locale
defp to_string(_number, format, _backend, %{locale: %{rbnf_locale_name: nil} = locale}) do
{:error, Cldr.Rbnf.rbnf_rule_error(locale, format)}
end
defp to_string(number, format, backend, options) when is_atom(format) do
with {:ok, module, locale} <- find_rbnf_format_module(options.locale, format, backend) do
apply(module, format, [number, locale])
end
end
# For all other formats
defp to_string(number, format, backend, options) when is_binary(format) do
Formatter.Decimal.to_string(number, format, backend, options)
end
# For all other formats. The known atom-based formats are described
# above so this must be a format name expected to be defined by a
# locale but its not there.
defp to_string(_number, {:error, _} = error, _backend, _options) do
error
end
# Look for the RBNF rule in the given locale or in the
# root locale (called "und")
defp find_rbnf_format_module(locale, format, backend) do
root_locale = Map.put(@root_locale, :backend, backend)
cond do
module = find_rbnf_module(locale, format, backend) -> {:ok, module, locale}
module = find_rbnf_module(root_locale, format, backend) -> {:ok, module, root_locale}
true -> {:error, Cldr.Rbnf.rbnf_rule_error(locale, format)}
end
end
defp find_rbnf_module(locale, format, backend) do
Enum.reduce_while Cldr.Rbnf.categories_for_locale!(locale), nil, fn category, acc ->
format_module = Module.concat([backend, :Rbnf, category])
rules = format_module.rule_sets(locale)
if rules && format in rules do
{:halt, format_module}
else
{:cont, acc}
end
end
end
defp evaluate_rule(number, module, function, locale, backend) do
module = Module.concat([backend, :Rbnf, module])
rule_sets = module.rule_sets(locale)
if rule_sets && function in rule_sets do
apply(module, function, [number, locale])
else
{:error, Cldr.Rbnf.rbnf_rule_error(locale, function)}
end
end
@doc """
Formats a number and applies the `:at_least` format for
a locale and number system.
## Arguments
* `number` is an integer, float or Decimal to be formatted
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
* `options` is a keyword list defining how the number is to be formatted.
See `Cldr.Number.to_string/3` for a description of the available
options.
## Example
iex> Cldr.Number.to_at_least_string 1234, TestBackend.Cldr
{:ok, "1,234+"}
"""
@spec to_at_least_string(number | Decimal.t(), Cldr.backend(), Keyword.t() | map()) ::
{:ok, String.t()} | {:error, {module(), String.t()}}
def to_at_least_string(number, backend \\ default_backend(), options \\ [])
def to_at_least_string(number, options, []) when is_list(options) do
{backend, options} = Keyword.pop_lazy(options, :backend, &default_backend/0)
to_at_least_string(number, backend, options)
end
def to_at_least_string(number, backend, options) do
other_format(number, :at_least, backend, options)
end
@doc """
Formats a number and applies the `:at_most` format for
a locale and number system.
## Arguments
* `number` is an integer, float or Decimal to be formatted
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
* `options` is a keyword list defining how the number is to be formatted.
See `Cldr.Number.to_string/3` for a description of the available
options.
## Example
iex> Cldr.Number.to_at_most_string 1234, TestBackend.Cldr
{:ok, "≤1,234"}
"""
@spec to_at_most_string(number | Decimal.t(), Cldr.backend(), Keyword.t() | map()) ::
{:ok, String.t()} | {:error, {module(), String.t()}}
def to_at_most_string(number, backend \\ default_backend(), options \\ [])
def to_at_most_string(number, options, []) when is_list(options) do
{backend, options} = Keyword.pop_lazy(options, :backend, &default_backend/0)
to_at_most_string(number, backend, options)
end
def to_at_most_string(number, backend, options) do
other_format(number, :at_most, backend, options)
end
@doc """
Formats a number and applies the `:approximately` format for
a locale and number system.
## Arguments
* `number` is an integer, float or Decimal to be formatted
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
* `options` is a keyword list defining how the number is to be formatted.
See `Cldr.Number.to_string/3` for a description of the available
options.
## Example
iex> Cldr.Number.to_approx_string 1234, TestBackend.Cldr
{:ok, "~1,234"}
"""
@spec to_approx_string(number | Decimal.t(), Cldr.backend(), Keyword.t() | map()) ::
{:ok, String.t()} | {:error, {module(), String.t()}}
def to_approx_string(number, backend \\ default_backend(), options \\ [])
def to_approx_string(number, options, []) when is_list(options) do
{backend, options} = Keyword.pop_lazy(options, :backend, &default_backend/0)
to_approx_string(number, backend, options)
end
def to_approx_string(number, backend, options) do
other_format(number, :approximately, backend, options)
end
@doc """
Formats the first and last numbers of a range and applies
the `:range` format for a locale and number system.
## Arguments
* `number` is an integer, float or Decimal to be formatted
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
* `options` is a keyword list defining how the number is to be formatted.
See `Cldr.Number.to_string/3` for a description of the available
options.
## Example
iex> Cldr.Number.to_range_string 1234..5678, TestBackend.Cldr
{:ok, "1,234–5,678"}
"""
@spec to_range_string(Range.t(), Cldr.backend(), Keyword.t() | map()) ::
{:ok, String.t()} | {:error, {module(), String.t()}}
def to_range_string(number, backend \\ default_backend(), options \\ [])
def to_range_string(number, options, []) when is_list(options) do
{backend, options} = Keyword.pop_lazy(options, :backend, &default_backend/0)
to_range_string(number, backend, options)
end
def to_range_string(range, backend, options) do
%Range{first: first, last: last} = range
with {:ok, options} <- Options.validate_options(first, backend, options),
{:ok, format} <- Options.validate_other_format(:range, backend, options),
{:ok, first_formatted_number} <- to_string(first, backend, options),
{:ok, last_formatted_number} <- to_string(last, backend, options) do
final_format =
[first_formatted_number, last_formatted_number]
|> Cldr.Substitution.substitute(format)
|> :erlang.iolist_to_binary()
{:ok, final_format}
end
end
@spec other_format(
number | Decimal.t(),
:approximately | :at_least | :at_most,
Cldr.backend(),
Keyword.t()
) ::
{:ok, String.t()} | {:error, {module(), String.t()}}
defp other_format(number, other_format, backend, options) do
with {:ok, options} <- Options.validate_options(number, backend, options),
{:ok, format} <- Options.validate_other_format(other_format, backend, options),
{:ok, formatted_number} <- to_string(number, backend, options) do
final_format =
[formatted_number]
|> Cldr.Substitution.substitute(format)
|> :erlang.iolist_to_binary()
{:ok, final_format}
end
end
@doc """
Converts a number from the latin digits `0..9` into
another number system. Returns `{:ok, string}` or
`{:error, reason}`.
* `number` is an integer, float. Decimal is supported only for
`:numeric` number systems, not `:algorithmic`. See `Cldr.Number.System.to_system/3`
for further information.
* `system` is any number system returned by `Cldr.known_number_systems/0`
## Examples
iex> Cldr.Number.to_number_system 123, :hant, TestBackend.Cldr
{:ok, "一百二十三"}
iex> Cldr.Number.to_number_system 123, :hebr, TestBackend.Cldr
{:ok, "קכ״ג"}
"""
@spec to_number_system(number, atom, Cldr.backend()) ::
String.t() | {:error, {module(), String.t()}}
def to_number_system(number, system, backend \\ default_backend()) do
Cldr.Number.System.to_system(number, system, backend)
end
@doc """
Converts a number from the latin digits `0..9` into
another number system. Returns the converted number
or raises an exception on error.
* `number` is an integer, float. Decimal is supported only for
`:numeric` number systems, not `:algorithmic`. See `Cldr.Number.System.to_system/3`
for further information.
* `system` is any number system returned by `Cldr.Number.System.known_number_systems/0`
## Example
iex> Cldr.Number.to_number_system! 123, :hant, TestBackend.Cldr
"一百二十三"
"""
@spec to_number_system!(number, atom, Cldr.backend()) :: String.t() | no_return()
def to_number_system!(number, system, backend \\ default_backend()) do
Cldr.Number.System.to_system!(number, system, backend)
end
@doc """
Return the precision (number of digits) of a number
This function delegates to `Cldr.Digits.number_of_digits/1`
## Example
iex> Cldr.Number.precision 1.234
4
"""
defdelegate precision(number), to: Cldr.Digits, as: :number_of_digits
@doc false
# TODO remove for Cldr 3.0
if Code.ensure_loaded?(Cldr) && function_exported?(Cldr, :default_backend!, 0) do
def default_backend do
Cldr.default_backend!()
end
else
def default_backend do
Cldr.default_backend()
end
end
end
|
lib/cldr/number.ex
| 0.936713
| 0.812012
|
number.ex
|
starcoder
|
defmodule Aggregate.Tasks do
@moduledoc """
Task filtering and aggregation functions.
All timestamps are converted to the local machine's time zone.
"""
@day_seconds 24 * 60 * 60
@doc """
Filters and groups the tasks in supplied stream based on the specified regular expression and period.
The supported periods are: `:day`, `:week`, `:month`.
> Note: Active tasks are ignored.
"""
def per_period_for_a_task(stream, query, task_regex, group_period) do
stream
|> Stream.filter(fn entry -> Regex.match?(task_regex, entry.task) end)
|> per_period(query, group_period)
end
@doc """
Groups the tasks in the supplied stream based on the specified period.
> Note: Active tasks are ignored.
"""
def per_period(stream, query, group_period) do
result =
stream
|> without_active_tasks()
|> with_local_time_zone()
|> Enum.flat_map(fn entry -> split_task_per_day(entry) end)
|> as_list()
|> Enum.map(fn {entry, start_day} ->
{entry, task_start_day_to_period(start_day, group_period)}
end)
|> Enum.group_by(fn {_, period} -> period end)
|> Enum.map(fn {period, entries} ->
entries = entries |> Enum.map(fn {entry, _} -> entry end)
{period, entries |> Enum.reduce(0, fn entry, acc -> acc + entry.duration end), entries}
end)
|> Enum.sort_by(fn {period, duration, _} ->
case query.sort_by do
"task" -> period
"duration" -> duration
"start" -> period
end
end)
case query.order do
"desc" -> result |> Enum.reverse()
_ -> result
end
end
@doc """
Gathers all tasks that overlap and groups them by day.
> Note: Active tasks are ignored.
"""
def with_overlapping_periods(stream) do
tasks_list =
stream
|> without_active_tasks()
|> with_local_time_zone()
|> as_list()
tasks_map =
tasks_list
|> Enum.map(fn entry -> {entry.id, entry} end)
|> Map.new()
tasks_list
|> Enum.flat_map(fn entry -> split_task_per_day(entry) end)
|> Enum.map(fn {entry, start_day} ->
{:ok, start_day} = NaiveDateTime.from_iso8601("#{start_day}T00:00:00")
offset = div(NaiveDateTime.diff(entry.start, start_day, :second), 60)
minutes =
offset..(offset + entry.duration - 1)
|> Enum.map(fn minute -> {minute + 1, entry.id} end)
{start_day, minutes}
end)
|> Enum.group_by(fn {start_day, _} ->
start_day
end)
|> Enum.map(fn {start_day, entry_minutes} ->
overlapping_entries =
entry_minutes
|> Enum.reduce(
%{},
fn {_, current_minutes}, acc ->
Map.merge(
current_minutes |> Map.new(),
acc,
fn _k, v1, v2 -> if is_list(v2), do: [v1 | v2], else: [v1 | [v2]] end
)
end
)
|> Enum.flat_map(fn {_, entry_ids} ->
if is_list(entry_ids) && length(entry_ids) > 1, do: entry_ids, else: []
end)
|> Enum.uniq()
{
start_day,
overlapping_entries
|> Enum.map(fn entry_id -> Map.get(tasks_map, entry_id) end)
|> Enum.sort_by(fn entry -> entry.start end)
}
end)
|> Enum.filter(fn {_, entry_ids} -> length(entry_ids) > 0 end)
end
@doc """
Returns a list of all tasks that have no duration.
"""
def with_no_duration(stream) do
stream
|> Stream.filter(fn entry -> entry.duration <= 0 end)
|> as_list()
end
@doc """
Groups the supplied stream of tasks by task name and calculates their total duration.
> Note: Active tasks are ignored.
"""
def with_total_duration(stream, query) do
result =
stream
|> without_active_tasks()
|> with_local_time_zone()
|> as_list()
|> Enum.group_by(fn entry -> entry.task end)
|> Enum.map(fn {task, entries} ->
{task, entries |> Enum.reduce(0, fn entry, acc -> acc + entry.duration end), entries}
end)
|> Enum.sort_by(fn {task, duration, _} ->
case query.sort_by do
"task" -> task
"duration" -> duration
"start" -> duration
end
end)
case query.order do
"desc" -> result |> Enum.reverse()
_ -> result
end
end
@doc """
Returns a sorted list of tasks based on the supplied stream.
> Note: Active tasks are included.
"""
def as_sorted_list(stream, query) do
result =
stream
|> with_local_time_zone()
|> as_list()
|> Enum.sort_by(fn entry ->
case query.sort_by do
"task" -> entry.task
"start" -> NaiveDateTime.to_string(entry.start)
"duration" -> entry.duration
end
end)
case query.order do
"desc" -> result |> Enum.reverse()
_ -> result
end
end
@doc """
Converts the specified date to a string representation of the required period type.
"""
def task_start_day_to_period(start_day, group_period) do
case group_period do
:day ->
start_day |> Date.to_string()
:week ->
week_monday =
Date.add(
start_day,
-(Calendar.ISO.day_of_week(start_day.year, start_day.month, start_day.day) - 1)
)
week_monday |> Date.to_string()
:month ->
"#{start_day.year}-#{start_day.month |> Integer.to_string() |> String.pad_leading(2, "0")}"
end
end
@doc """
Splits the specified task into an entry per day, based on the tasks start and duration.
"""
def split_task_per_day(entry) do
start_day = entry.start |> NaiveDateTime.to_date()
start_day_string = start_day |> Date.to_string()
entry_end =
entry.start
|> NaiveDateTime.add(entry.duration * 60, :second)
end_day = entry_end |> NaiveDateTime.to_date()
end_day_string = end_day |> Date.to_string()
if start_day_string != end_day_string do
{:ok, start_day_end} = "#{start_day_string}T23:59:59" |> NaiveDateTime.from_iso8601()
start_day_end = start_day_end |> NaiveDateTime.add(1, :second)
current_day_seconds = NaiveDateTime.diff(start_day_end, entry.start, :second)
remaining_entry_seconds = entry.duration * 60 - current_day_seconds
current_day_task = %{entry | duration: div(current_day_seconds, 60)}
{result, _} =
0..div(remaining_entry_seconds, @day_seconds)
|> Enum.reduce(
{[{current_day_task, start_day}], remaining_entry_seconds},
fn day_number, {new_entries, remaining_seconds} ->
new_entry_day = start_day |> Date.add(day_number + 1)
new_entry_day_string = new_entry_day |> Date.to_string()
{:ok, new_entry_start} =
"#{new_entry_day_string}T00:00:00" |> NaiveDateTime.from_iso8601()
new_entry_duration =
if remaining_seconds >= @day_seconds do
@day_seconds
else
remaining_seconds
end
new_entry = %{
entry
| start: new_entry_start,
duration: div(new_entry_duration, 60)
}
{
[{new_entry, new_entry_day} | new_entries],
remaining_seconds - new_entry_duration
}
end
)
result |> Enum.filter(fn {entry, _} -> entry.duration > 0 end)
else
[{entry, start_day}]
end
end
@doc """
Converts each task's UTC start timestamp to the local system's time zone.
"""
def with_local_time_zone(list) do
list
|> Enum.map(fn entry ->
start_with_time_zone =
entry.start
|> NaiveDateTime.to_erl()
|> :calendar.universal_time_to_local_time()
|> NaiveDateTime.from_erl!()
%{entry | start: start_with_time_zone}
end)
end
@doc """
Filters all active tasks (duration <= 0) out of the supplied stream.
"""
def without_active_tasks(stream) do
stream
|> Stream.filter(fn entry -> entry.duration > 0 end)
end
@doc """
Converts the supplied stream to a list.
"""
def as_list(stream) do
stream
|> Enum.to_list()
end
end
|
lib/aggregate/tasks.ex
| 0.89826
| 0.635731
|
tasks.ex
|
starcoder
|
defmodule Dataloader.KV do
@moduledoc """
Simple KV based Dataloader source.
This module is a simple key value based data loader source. You
must supply a function that accepts ids, and returns a map of values
keyed by id.
## Examples
"""
defstruct [
:load_function,
opts: [],
batches: %{},
results: %{}
]
def new(load_function, opts \\ []) do
max_concurrency = opts[:max_concurrency] || System.schedulers_online() * 2
%__MODULE__{
load_function: load_function,
opts: [
max_concurrency: max_concurrency,
timeout: opts[:timeout] || 30_000
]
}
end
defimpl Dataloader.Source do
defp merge_results(existing_results, new_results) do
new_results
|> Enum.reduce(existing_results, fn {batch_info, data}, acc ->
case data do
{:error, reason} ->
merge_errors(acc, batch_info, reason)
{:ok, data} ->
merge(acc, Map.new([data]))
end
end)
end
defp merge_errors(acc, {batch_key, batch}, reason) do
errors =
batch
|> Enum.reduce(%{}, fn key, acc ->
Map.put(acc, key, {:error, reason})
end)
merge(acc, %{batch_key => errors})
end
defp merge(acc, results) do
Map.merge(acc, results, fn _, v1, v2 ->
Map.merge(v1, v2)
end)
end
def put(source, _batch, _id, nil) do
source
end
def put(source, batch, id, result) do
results = Map.update(source.results, batch, %{id => result}, &Map.put(&1, id, result))
%{source | results: results}
end
def load(source, batch_key, id) do
if fetched?(source.results, batch_key, id) do
source
else
update_in(source.batches, fn batches ->
Map.update(batches, batch_key, MapSet.new([id]), &MapSet.put(&1, id))
end)
end
end
defp fetched?(results, batch_key, id) do
case results do
%{^batch_key => %{^id => {:error, _}}} -> false
%{^batch_key => %{^id => _}} -> true
_ -> false
end
end
def fetch(source, batch_key, id) do
with {:ok, batch} <- Map.fetch(source.results, batch_key) do
case Map.fetch(batch, id) do
:error -> {:error, "Unable to find id #{inspect(id)}"}
{:ok, {:error, reason}} -> {:error, reason}
{:ok, item} -> {:ok, item}
end
else
:error ->
{:error, "Unable to find batch #{inspect(batch_key)}"}
end
end
def run(source) do
fun = fn {batch_key, ids} ->
{batch_key, source.load_function.(batch_key, ids)}
end
results = Dataloader.async_safely(Dataloader, :run_tasks, [source.batches, fun])
%{source | batches: %{}, results: merge_results(source.results, results)}
end
def pending_batches?(source) do
source.batches != %{}
end
def timeout(%{opts: opts}) do
opts[:timeout]
end
end
end
|
lib/dataloader/kv.ex
| 0.828072
| 0.418845
|
kv.ex
|
starcoder
|
defmodule Tint.HSV do
@moduledoc """
A color in the HSV (hue, saturation, value) colorspace.
"""
import Tint.Utils.Cast
alias Tint.Utils.Interval
defstruct [:hue, :saturation, :value]
@type t :: %__MODULE__{
hue: float,
saturation: float,
value: float
}
@hue_interval Interval.new(0, 360)
@hue_excl_interval %{@hue_interval | exclude_max: true}
@doc """
Builds a new HSV color from hue, saturation and value color parts. Please
always use this function to build a new HSV color.
## Examples
iex> Tint.HSV.new(25.8, 0.882, 1)
#Tint.HSV<25.8°,88.2%,100%>
"""
@spec new(number | String.t(), number | String.t(), number | String.t()) :: t
def new(hue, saturation, value) do
%__MODULE__{
hue: cast_value_with_interval!(hue, :float, @hue_excl_interval),
saturation: cast_ratio!(saturation),
value: cast_ratio!(value)
}
end
@doc """
Converts a tuple containing hue, saturation and value into a `Tint.HSV`
struct.
"""
@spec from_tuple(
{number | String.t(), number | String.t(), number | String.t()}
) :: t
def from_tuple({hue, saturation, value}) do
new(hue, saturation, value)
end
@doc """
Converts HSV color into a tuple containing the hue, saturation and value
parts.
"""
@spec to_tuple(t) :: {float, float, float}
def to_tuple(%__MODULE__{} = color) do
{color.hue, color.saturation, color.value}
end
@doc """
Determines whether the given color is a grayscale color which basically means
that saturation or the value is 0.
"""
@doc since: "1.0.0"
@spec grayscale?(t) :: boolean
def grayscale?(%__MODULE__{} = color) do
color.saturation == 0 || color.value == 0
end
@doc """
Checks whether the hue of the given color is in the specified bounds. This
can be used to cluster colors by their chromaticity.
"""
@doc since: "1.0.0"
@spec hue_between?(t, min :: number, max :: number) :: boolean
def hue_between?(%__MODULE__{} = color, min, max) when min <= max do
color.hue >= min && color.hue < max
end
defimpl Inspect do
import Inspect.Algebra
import Tint.Utils.Formatter
def inspect(color, _opts) do
concat([
"#Tint.HSV<",
format_degrees(color.hue),
",",
format_percentage(color.saturation),
",",
format_percentage(color.value),
">"
])
end
defp format_degrees(value) do
format_value(value) <> "°"
end
end
end
|
lib/tint/hsv.ex
| 0.941399
| 0.574574
|
hsv.ex
|
starcoder
|
defmodule AWS.ACMPCA do
@moduledoc """
This is the *ACM Private CA API Reference*. It provides descriptions,
syntax, and usage examples for each of the actions and data types involved
in creating and managing private certificate authorities (CA) for your
organization.
The documentation for each action shows the Query API request parameters
and the XML response. Alternatively, you can use one of the AWS SDKs to
access an API that's tailored to the programming language or platform that
you're using. For more information, see [AWS
SDKs](https://aws.amazon.com/tools/#SDKs).
<note> Each ACM Private CA API action has a throttling limit which
determines the number of times the action can be called per second. For
more information, see [API Rate Limits in ACM Private
CA](https://docs.aws.amazon.com/acm-pca/latest/userguide/PcaLimits.html#PcaLimits-api)
in the ACM Private CA user guide.
</note>
"""
@doc """
Creates a root or subordinate private certificate authority (CA). You must
specify the CA configuration, the certificate revocation list (CRL)
configuration, the CA type, and an optional idempotency token to avoid
accidental creation of multiple CAs. The CA configuration specifies the
name of the algorithm and key size to be used to create the CA private key,
the type of signing algorithm that the CA uses, and X.500 subject
information. The CRL configuration specifies the CRL expiration period in
days (the validity period of the CRL), the Amazon S3 bucket that will
contain the CRL, and a CNAME alias for the S3 bucket that is included in
certificates issued by the CA. If successful, this action returns the
Amazon Resource Name (ARN) of the CA.
"""
def create_certificate_authority(client, input, options \\ []) do
request(client, "CreateCertificateAuthority", input, options)
end
@doc """
Creates an audit report that lists every time that your CA private key is
used. The report is saved in the Amazon S3 bucket that you specify on
input. The `IssueCertificate` and `RevokeCertificate` actions use the
private key.
"""
def create_certificate_authority_audit_report(client, input, options \\ []) do
request(client, "CreateCertificateAuthorityAuditReport", input, options)
end
@doc """
Assigns permissions from a private CA to a designated AWS service. Services
are specified by their service principals and can be given permission to
create and retrieve certificates on a private CA. Services can also be
given permission to list the active permissions that the private CA has
granted. For ACM to automatically renew your private CA's certificates, you
must assign all possible permissions from the CA to the ACM service
principal.
At this time, you can only assign permissions to ACM (`acm.amazonaws.com`).
Permissions can be revoked with the `DeletePermission` action and listed
with the `ListPermissions` action.
"""
def create_permission(client, input, options \\ []) do
request(client, "CreatePermission", input, options)
end
@doc """
Deletes a private certificate authority (CA). You must provide the Amazon
Resource Name (ARN) of the private CA that you want to delete. You can find
the ARN by calling the `ListCertificateAuthorities` action.
<note> Deleting a CA will invalidate other CAs and certificates below it in
your CA hierarchy.
</note> Before you can delete a CA that you have created and activated, you
must disable it. To do this, call the `UpdateCertificateAuthority` action
and set the **CertificateAuthorityStatus** parameter to `DISABLED`.
Additionally, you can delete a CA if you are waiting for it to be created
(that is, the status of the CA is `CREATING`). You can also delete it if
the CA has been created but you haven't yet imported the signed certificate
into ACM Private CA (that is, the status of the CA is
`PENDING_CERTIFICATE`).
When you successfully call `DeleteCertificateAuthority`, the CA's status
changes to `DELETED`. However, the CA won't be permanently deleted until
the restoration period has passed. By default, if you do not set the
`PermanentDeletionTimeInDays` parameter, the CA remains restorable for 30
days. You can set the parameter from 7 to 30 days. The
`DescribeCertificateAuthority` action returns the time remaining in the
restoration window of a private CA in the `DELETED` state. To restore an
eligible CA, call the `RestoreCertificateAuthority` action.
"""
def delete_certificate_authority(client, input, options \\ []) do
request(client, "DeleteCertificateAuthority", input, options)
end
@doc """
Revokes permissions that a private CA assigned to a designated AWS service.
Permissions can be created with the `CreatePermission` action and listed
with the `ListPermissions` action.
"""
def delete_permission(client, input, options \\ []) do
request(client, "DeletePermission", input, options)
end
@doc """
Lists information about your private certificate authority (CA). You
specify the private CA on input by its ARN (Amazon Resource Name). The
output contains the status of your CA. This can be any of the following:
<ul> <li> `CREATING` - ACM Private CA is creating your private certificate
authority.
</li> <li> `PENDING_CERTIFICATE` - The certificate is pending. You must use
your ACM Private CA-hosted or on-premises root or subordinate CA to sign
your private CA CSR and then import it into PCA.
</li> <li> `ACTIVE` - Your private CA is active.
</li> <li> `DISABLED` - Your private CA has been disabled.
</li> <li> `EXPIRED` - Your private CA certificate has expired.
</li> <li> `FAILED` - Your private CA has failed. Your CA can fail because
of problems such a network outage or backend AWS failure or other errors. A
failed CA can never return to the pending state. You must create a new CA.
</li> <li> `DELETED` - Your private CA is within the restoration period,
after which it is permanently deleted. The length of time remaining in the
CA's restoration period is also included in this action's output.
</li> </ul>
"""
def describe_certificate_authority(client, input, options \\ []) do
request(client, "DescribeCertificateAuthority", input, options)
end
@doc """
Lists information about a specific audit report created by calling the
`CreateCertificateAuthorityAuditReport` action. Audit information is
created every time the certificate authority (CA) private key is used. The
private key is used when you call the `IssueCertificate` action or the
`RevokeCertificate` action.
"""
def describe_certificate_authority_audit_report(client, input, options \\ []) do
request(client, "DescribeCertificateAuthorityAuditReport", input, options)
end
@doc """
Retrieves a certificate from your private CA. The ARN of the certificate is
returned when you call the `IssueCertificate` action. You must specify both
the ARN of your private CA and the ARN of the issued certificate when
calling the **GetCertificate** action. You can retrieve the certificate if
it is in the **ISSUED** state. You can call the
`CreateCertificateAuthorityAuditReport` action to create a report that
contains information about all of the certificates issued and revoked by
your private CA.
"""
def get_certificate(client, input, options \\ []) do
request(client, "GetCertificate", input, options)
end
@doc """
Retrieves the certificate and certificate chain for your private
certificate authority (CA). Both the certificate and the chain are base64
PEM-encoded. The chain does not include the CA certificate. Each
certificate in the chain signs the one before it.
"""
def get_certificate_authority_certificate(client, input, options \\ []) do
request(client, "GetCertificateAuthorityCertificate", input, options)
end
@doc """
Retrieves the certificate signing request (CSR) for your private
certificate authority (CA). The CSR is created when you call the
`CreateCertificateAuthority` action. Sign the CSR with your ACM Private
CA-hosted or on-premises root or subordinate CA. Then import the signed
certificate back into ACM Private CA by calling the
`ImportCertificateAuthorityCertificate` action. The CSR is returned as a
base64 PEM-encoded string.
"""
def get_certificate_authority_csr(client, input, options \\ []) do
request(client, "GetCertificateAuthorityCsr", input, options)
end
@doc """
Imports a signed private CA certificate into ACM Private CA. This action is
used when you are using a chain of trust whose root is located outside ACM
Private CA. Before you can call this action, the following preparations
must in place:
<ol> <li> In ACM Private CA, call the `CreateCertificateAuthority` action
to create the private CA that that you plan to back with the imported
certificate.
</li> <li> Call the `GetCertificateAuthorityCsr` action to generate a
certificate signing request (CSR).
</li> <li> Sign the CSR using a root or intermediate CA hosted either by an
on-premises PKI hierarchy or a commercial CA..
</li> <li> Create a certificate chain and copy the signed certificate and
the certificate chain to your working directory.
</li> </ol> The following requirements apply when you import a CA
certificate.
<ul> <li> You cannot import a non-self-signed certificate for use as a root
CA.
</li> <li> You cannot import a self-signed certificate for use as a
subordinate CA.
</li> <li> Your certificate chain must not include the private CA
certificate that you are importing.
</li> <li> Your ACM Private CA-hosted or on-premises CA certificate must be
the last certificate in your chain. The subordinate certificate, if any,
that your root CA signed must be next to last. The subordinate certificate
signed by the preceding subordinate CA must come next, and so on until your
chain is built.
</li> <li> The chain must be PEM-encoded.
</li> </ul>
"""
def import_certificate_authority_certificate(client, input, options \\ []) do
request(client, "ImportCertificateAuthorityCertificate", input, options)
end
@doc """
Uses your private certificate authority (CA) to issue a client certificate.
This action returns the Amazon Resource Name (ARN) of the certificate. You
can retrieve the certificate by calling the `GetCertificate` action and
specifying the ARN.
<note> You cannot use the ACM **ListCertificateAuthorities** action to
retrieve the ARNs of the certificates that you issue by using ACM Private
CA.
</note>
"""
def issue_certificate(client, input, options \\ []) do
request(client, "IssueCertificate", input, options)
end
@doc """
Lists the private certificate authorities that you created by using the
`CreateCertificateAuthority` action.
"""
def list_certificate_authorities(client, input, options \\ []) do
request(client, "ListCertificateAuthorities", input, options)
end
@doc """
Lists all the permissions, if any, that have been assigned by a private CA.
Permissions can be granted with the `CreatePermission` action and revoked
with the `DeletePermission` action.
"""
def list_permissions(client, input, options \\ []) do
request(client, "ListPermissions", input, options)
end
@doc """
Lists the tags, if any, that are associated with your private CA. Tags are
labels that you can use to identify and organize your CAs. Each tag
consists of a key and an optional value. Call the `TagCertificateAuthority`
action to add one or more tags to your CA. Call the
`UntagCertificateAuthority` action to remove tags.
"""
def list_tags(client, input, options \\ []) do
request(client, "ListTags", input, options)
end
@doc """
Restores a certificate authority (CA) that is in the `DELETED` state. You
can restore a CA during the period that you defined in the
**PermanentDeletionTimeInDays** parameter of the
`DeleteCertificateAuthority` action. Currently, you can specify 7 to 30
days. If you did not specify a **PermanentDeletionTimeInDays** value, by
default you can restore the CA at any time in a 30 day period. You can
check the time remaining in the restoration period of a private CA in the
`DELETED` state by calling the `DescribeCertificateAuthority` or
`ListCertificateAuthorities` actions. The status of a restored CA is set to
its pre-deletion status when the **RestoreCertificateAuthority** action
returns. To change its status to `ACTIVE`, call the
`UpdateCertificateAuthority` action. If the private CA was in the
`PENDING_CERTIFICATE` state at deletion, you must use the
`ImportCertificateAuthorityCertificate` action to import a certificate
authority into the private CA before it can be activated. You cannot
restore a CA after the restoration period has ended.
"""
def restore_certificate_authority(client, input, options \\ []) do
request(client, "RestoreCertificateAuthority", input, options)
end
@doc """
Revokes a certificate that was issued inside ACM Private CA. If you enable
a certificate revocation list (CRL) when you create or update your private
CA, information about the revoked certificates will be included in the CRL.
ACM Private CA writes the CRL to an S3 bucket that you specify. For more
information about revocation, see the `CrlConfiguration` structure. ACM
Private CA also writes revocation information to the audit report. For more
information, see `CreateCertificateAuthorityAuditReport`.
<note> You cannot revoke a root CA self-signed certificate.
</note>
"""
def revoke_certificate(client, input, options \\ []) do
request(client, "RevokeCertificate", input, options)
end
@doc """
Adds one or more tags to your private CA. Tags are labels that you can use
to identify and organize your AWS resources. Each tag consists of a key and
an optional value. You specify the private CA on input by its Amazon
Resource Name (ARN). You specify the tag by using a key-value pair. You can
apply a tag to just one private CA if you want to identify a specific
characteristic of that CA, or you can apply the same tag to multiple
private CAs if you want to filter for a common relationship among those
CAs. To remove one or more tags, use the `UntagCertificateAuthority`
action. Call the `ListTags` action to see what tags are associated with
your CA.
"""
def tag_certificate_authority(client, input, options \\ []) do
request(client, "TagCertificateAuthority", input, options)
end
@doc """
Remove one or more tags from your private CA. A tag consists of a key-value
pair. If you do not specify the value portion of the tag when calling this
action, the tag will be removed regardless of value. If you specify a
value, the tag is removed only if it is associated with the specified
value. To add tags to a private CA, use the `TagCertificateAuthority`. Call
the `ListTags` action to see what tags are associated with your CA.
"""
def untag_certificate_authority(client, input, options \\ []) do
request(client, "UntagCertificateAuthority", input, options)
end
@doc """
Updates the status or configuration of a private certificate authority
(CA). Your private CA must be in the `ACTIVE` or `DISABLED` state before
you can update it. You can disable a private CA that is in the `ACTIVE`
state or make a CA that is in the `DISABLED` state active again.
"""
def update_certificate_authority(client, input, options \\ []) do
request(client, "UpdateCertificateAuthority", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "acm-pca"}
host = build_host("acm-pca", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "ACMPrivateCA.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/acmpca.ex
| 0.872619
| 0.662878
|
acmpca.ex
|
starcoder
|
defmodule Hui.URL do
@moduledoc """
Struct and utilities for working with Solr URLs and parameters.
Use the module `t:Hui.URL.t/0` struct to specify
Solr core or collection URLs with request handlers.
"""
defstruct [:url, handler: "select", headers: [], options: []]
@type headers :: HTTPoison.Base.headers
@type options :: Keyword.t
@typedoc """
Struct for a Solr endpoint with a request handler and any associated HTTP headers and options.
## Example
```
%Hui.URL{handler: "suggest", url: "http://localhost:8983/solr/collection"}
```
- `url`: typical endpoint including the core or collection name. This may also be a load balancer
endpoint fronting several Solr upstreams.
- `handler`: name of a Solr request handler that processes requests.
- `headers`: HTTP headers.
- `options`: [HTTPoison options](https://hexdocs.pm/httpoison/HTTPoison.html#request/5).
"""
@type t :: %__MODULE__{url: nil | binary, handler: nil | binary, headers: nil | headers, options: nil | options}
@typedoc """
Solr parameters as keyword list or structs.
"""
@type url_params :: Keyword.t | Hui.Q.t | Hui.D.t | Hui.F.t | Hui.F.Range.t | Hui.F.Interval.t
@doc """
Returns a configured default Solr endpoint as `t:Hui.URL.t/0` struct.
iex> Hui.URL.default_url!
%Hui.URL{handler: "select", url: "http://localhost:8983/solr/gettingstarted", headers: [{"accept", "application/json"}], options: [recv_timeout: 10000]}
The default endpoint can be specified in application configuration as below:
```
config :hui, :default,
url: "http://localhost:8983/solr/gettingstarted",
handler: "select", # optional
headers: [{"accept", "application/json"}],
options: [recv_timeout: 10000]
```
"""
@spec default_url! :: t | nil
def default_url! do
{status, default_url} = configured_url(:default)
case status do
:ok -> default_url
:error -> nil
end
end
@doc """
Retrieve url configuration as `t:Hui.URL.t/0` struct.
## Example
iex> Hui.URL.configured_url(:suggester)
{:ok, %Hui.URL{handler: "suggest", url: "http://localhost:8983/solr/collection"}}
The above retrieves the following endpoint configuration e.g. from `config.exs`:
```
config :hui, :suggester,
url: "http://localhost:8983/solr/collection",
handler: "suggest"
```
"""
@spec configured_url(atom) :: {:ok, t} | {:error, binary} | nil
def configured_url(config_key) do
url = Application.get_env(:hui, config_key)[:url]
handler = Application.get_env(:hui, config_key)[:handler]
headers = if Application.get_env(:hui, config_key)[:headers], do: Application.get_env(:hui, config_key)[:headers], else: []
options = if Application.get_env(:hui, config_key)[:options], do: Application.get_env(:hui, config_key)[:options], else: []
case {url,handler} do
{nil, _} -> {:error, "URL not found in configuration"}
{_, nil} -> {:ok, %Hui.URL{url: url, headers: headers, options: options}}
{_, _} -> {:ok, %Hui.URL{url: url, handler: handler, headers: headers, options: options}}
end
end
@doc """
Encodes keyword list or structs of Solr parameters into a query string.
Solr parameters such as the filter query `fq`, `facet.field` can be set multiple times.
These can be specified in a list (e.g. `fq: [filter1, filter]`). Dot-notated
parameters (facet.field, hl.fl) can be specified with string keys,
e.g. `"facet.field": "type"`, `"hl.fl": "words"`.
## Example - keyword list
iex> Hui.URL.encode_query(q: "loch", start: 10, rows: 10)
"q=loch&start=10&rows=10"
iex> Hui.URL.encode_query(q: "loch", fq: ["type:image", "year:[2001 TO 2007]"])
"q=loch&fq=type%3Aimage&fq=year%3A%5B2001+TO+2007%5D"
iex> Hui.URL.encode_query(q: "loch", facet: true, "facet.field": ["type", "year"])
"q=loch&facet=true&facet.field=type&facet.field=year"
iex> Hui.URL.encode_query("not a valid parameter")
""
## Example - `t:Hui.Q.t/0` query struct
iex> x = %Hui.Q{q: "edinburgh", fl: "id,title", fq: ["type:image"], rows: 15}
%Hui.Q{
cache: nil,
collection: nil,
debug: nil,
debugQuery: nil,
defType: nil,
df: nil,
distrib: nil,
echoParams: nil,
explainOther: nil,
fl: "id,title",
fq: ["type:image"],
"json.nl": nil,
"json.wrf": nil,
logParamsList: nil,
omitHeader: nil,
q: "edinburgh",
"q.op": nil,
rows: 15,
segmentTerminateEarly: nil,
shards: nil,
"shards.info": nil,
"shards.preference": nil,
"shards.tolerant": nil,
sort: nil,
sow: nil,
start: nil,
timeAllowed: nil,
tr: nil,
wt: nil
}
iex> x |> Hui.URL.encode_query
"fl=id%2Ctitle&fq=type%3Aimage&q=edinburgh&rows=15"
## Other Examples - faceting, highlighting structs
iex> x = %Hui.F{field: ["year", "type"]}
%Hui.F{
contains: nil,
"contains.ignoreCase": nil,
"enum.cache.minDf": nil,
excludeTerms: nil,
exists: nil,
facet: true,
field: ["year", "type"],
interval: nil,
limit: nil,
matches: nil,
method: nil,
mincount: nil,
missing: nil,
offset: nil,
"overrequest.count": nil,
"overrequest.ratio": nil,
pivot: [],
"pivot.mincount": nil,
prefix: nil,
query: [],
range: nil,
sort: nil,
threads: nil
}
iex> x |> Hui.URL.encode_query
"facet=true&facet.field=year&facet.field=type"
iex> %Hui.H{fl: "title,words", usePhraseHighlighter: true, fragsize: 250, snippets: 3 } |> Hui.URL.encode_query
"hl.fl=title%2Cwords&hl.fragsize=250&hl=true&hl.snippets=3&hl.usePhraseHighlighter=true"
See `Hui.Q`, `Hui.F`, `Hui.F.Range`, `Hui.F.Interval` for more examples
"""
@spec encode_query(url_params) :: binary
def encode_query(%Hui.H3{} = url_params), do: encode_query(url_params |> Map.to_list |> Enum.sort)
def encode_query(%Hui.F.Range{} = url_params), do: encode_query(url_params |> Map.to_list, "facet.range", url_params.range, url_params.per_field)
def encode_query(%Hui.F.Interval{} = url_params), do: encode_query(url_params |> Map.to_list, "facet.interval", url_params.interval, url_params.per_field)
def encode_query(url_params) when is_map(url_params), do: encode_query(url_params |> Map.to_list)
def encode_query([{:__struct__, Hui.Q} | tail]), do: tail |> encode_query
def encode_query([{:__struct__, Hui.F} | tail]), do: Enum.map(tail, &prefix/1) |> encode_query
def encode_query([{:__struct__, Hui.H} | tail]), do: Enum.map(tail, &prefix(&1, "hl")) |> encode_query
def encode_query([{:__struct__, Hui.H1} | tail]), do: Enum.map(tail, &prefix(&1, "hl")) |> encode_query
def encode_query([{:__struct__, Hui.H2} | tail]), do: Enum.map(tail, &prefix(&1, "hl")) |> encode_query
def encode_query([{:__struct__, Hui.H3} | tail]), do: Enum.map(tail, &prefix(&1, "hl")) |> encode_query
def encode_query([{:__struct__, Hui.S} | tail]), do: Enum.map(tail, &prefix(&1, "suggest")) |> encode_query
def encode_query([{:__struct__, Hui.Sp} | tail]), do: Enum.map(tail, &prefix(&1, "spellcheck")) |> encode_query
def encode_query([{:__struct__, Hui.M} | tail]), do: Enum.map(tail, &prefix(&1, "mlt")) |> encode_query
def encode_query(enumerable) when is_list(enumerable), do: Enum.reject(enumerable, &invalid_param?/1) |> Enum.map_join("&", &encode/1)
def encode_query(_), do: ""
def encode_query([{:__struct__, _struct} | tail], prefix, field, per_field), do: Enum.map(tail, &prefix(&1, prefix, field, per_field)) |> encode_query
@doc "Returns the string representation (URL path) of the given `t:Hui.URL.t/0` struct."
@spec to_string(t) :: binary
defdelegate to_string(uri), to: String.Chars.Hui.URL
defp encode({k,v}) when is_list(v), do: Enum.reject(v, &invalid_param?/1) |> Enum.map_join("&", &encode({k,&1}))
defp encode({k,v}) when is_binary(v), do: "#{k}=#{URI.encode_www_form(v)}"
# when value is a also struct, e.g. %Hui.F.Range/Interval{}
defp encode({_k,v}) when is_map(v), do: encode_query(v)
defp encode({k,v}), do: "#{k}=#{v}"
defp encode([]), do: ""
defp encode(v), do: v
# kv pairs with empty, nil or [] values
defp invalid_param?(""), do: true
defp invalid_param?(nil), do: true
defp invalid_param?([]), do: true
defp invalid_param?(x) when is_tuple(x), do: is_nil(elem(x,1)) or elem(x,1) == "" or elem(x, 1) == [] or elem(x,0) == :__struct__
defp invalid_param?(_x), do: false
# render kv pairs according to Solr prefix /per field syntax
# e.g. `field: "year"` to `"facet.field": "year"`, `f.[field].facet.gap`
defp prefix({k,v}) when k == :facet, do: {k,v}
defp prefix({k,v}, prefix \\ "facet", field \\ "", per_field \\ false) do
case {k,prefix} do
{:facet, _} -> {:facet, v}
{:hl, _} -> {:hl, v}
{:suggest, _} -> {:suggest, v}
{:spellcheck, _} -> {:spellcheck, v}
{:mlt, _} -> {:mlt, v}
{:range, "facet.range"} -> {:"facet.range", v} # render the same way despite per field setting
{:method, "facet.range"} -> {:"facet.range.method", v} # ditto
{:interval, "facet.interval"} -> {:"facet.interval", v} # ditto
{:per_field, _} -> {k, nil} # do not render this field
{:per_field_method, _} -> if per_field, do: {:"f.#{field}.#{prefix}.method", v}, else: {k, nil}
{_, _} -> if per_field, do: {:"f.#{field}.#{prefix}.#{k}", v}, else: {:"#{prefix}.#{k}", v}
end
end
end
# implement `to_string` for %Hui.URL{} in Elixir generally via the String.Chars protocol
defimpl String.Chars, for: Hui.URL do
def to_string(%Hui.URL{url: url, handler: handler}), do: [url, "/", handler] |> IO.iodata_to_binary
end
|
lib/hui/url.ex
| 0.917497
| 0.680043
|
url.ex
|
starcoder
|
defmodule AWS.CodeStar do
@moduledoc """
AWS CodeStar
This is the API reference for AWS CodeStar.
This reference provides descriptions of the operations and data types for the
AWS CodeStar API along with usage examples.
You can use the AWS CodeStar API to work with:
Projects and their resources, by calling the following:
* `DeleteProject`, which deletes a project.
* `DescribeProject`, which lists the attributes of a project.
* `ListProjects`, which lists all projects associated with your AWS
account.
* `ListResources`, which lists the resources associated with a
project.
* `ListTagsForProject`, which lists the tags associated with a
project.
* `TagProject`, which adds tags to a project.
* `UntagProject`, which removes tags from a project.
* `UpdateProject`, which updates the attributes of a project.
Teams and team members, by calling the following:
* `AssociateTeamMember`, which adds an IAM user to the team for a
project.
* `DisassociateTeamMember`, which removes an IAM user from the team
for a project.
* `ListTeamMembers`, which lists all the IAM users in the team for a
project, including their roles and attributes.
* `UpdateTeamMember`, which updates a team member's attributes in a
project.
Users, by calling the following:
* `CreateUserProfile`, which creates a user profile that contains
data associated with the user across all projects.
* `DeleteUserProfile`, which deletes all user profile information
across all projects.
* `DescribeUserProfile`, which describes the profile of a user.
* `ListUserProfiles`, which lists all user profiles.
* `UpdateUserProfile`, which updates the profile for a user.
"""
@doc """
Adds an IAM user to the team for an AWS CodeStar project.
"""
def associate_team_member(client, input, options \\ []) do
request(client, "AssociateTeamMember", input, options)
end
@doc """
Creates a project, including project resources.
This action creates a project based on a submitted project request. A set of
source code files and a toolchain template file can be included with the project
request. If these are not provided, an empty project is created.
"""
def create_project(client, input, options \\ []) do
request(client, "CreateProject", input, options)
end
@doc """
Creates a profile for a user that includes user preferences, such as the display
name and email address assocciated with the user, in AWS CodeStar.
The user profile is not project-specific. Information in the user profile is
displayed wherever the user's information appears to other users in AWS
CodeStar.
"""
def create_user_profile(client, input, options \\ []) do
request(client, "CreateUserProfile", input, options)
end
@doc """
Deletes a project, including project resources.
Does not delete users associated with the project, but does delete the IAM roles
that allowed access to the project.
"""
def delete_project(client, input, options \\ []) do
request(client, "DeleteProject", input, options)
end
@doc """
Deletes a user profile in AWS CodeStar, including all personal preference data
associated with that profile, such as display name and email address.
It does not delete the history of that user, for example the history of commits
made by that user.
"""
def delete_user_profile(client, input, options \\ []) do
request(client, "DeleteUserProfile", input, options)
end
@doc """
Describes a project and its resources.
"""
def describe_project(client, input, options \\ []) do
request(client, "DescribeProject", input, options)
end
@doc """
Describes a user in AWS CodeStar and the user attributes across all projects.
"""
def describe_user_profile(client, input, options \\ []) do
request(client, "DescribeUserProfile", input, options)
end
@doc """
Removes a user from a project.
Removing a user from a project also removes the IAM policies from that user that
allowed access to the project and its resources. Disassociating a team member
does not remove that user's profile from AWS CodeStar. It does not remove the
user from IAM.
"""
def disassociate_team_member(client, input, options \\ []) do
request(client, "DisassociateTeamMember", input, options)
end
@doc """
Lists all projects in AWS CodeStar associated with your AWS account.
"""
def list_projects(client, input, options \\ []) do
request(client, "ListProjects", input, options)
end
@doc """
Lists resources associated with a project in AWS CodeStar.
"""
def list_resources(client, input, options \\ []) do
request(client, "ListResources", input, options)
end
@doc """
Gets the tags for a project.
"""
def list_tags_for_project(client, input, options \\ []) do
request(client, "ListTagsForProject", input, options)
end
@doc """
Lists all team members associated with a project.
"""
def list_team_members(client, input, options \\ []) do
request(client, "ListTeamMembers", input, options)
end
@doc """
Lists all the user profiles configured for your AWS account in AWS CodeStar.
"""
def list_user_profiles(client, input, options \\ []) do
request(client, "ListUserProfiles", input, options)
end
@doc """
Adds tags to a project.
"""
def tag_project(client, input, options \\ []) do
request(client, "TagProject", input, options)
end
@doc """
Removes tags from a project.
"""
def untag_project(client, input, options \\ []) do
request(client, "UntagProject", input, options)
end
@doc """
Updates a project in AWS CodeStar.
"""
def update_project(client, input, options \\ []) do
request(client, "UpdateProject", input, options)
end
@doc """
Updates a team member's attributes in an AWS CodeStar project.
For example, you can change a team member's role in the project, or change
whether they have remote access to project resources.
"""
def update_team_member(client, input, options \\ []) do
request(client, "UpdateTeamMember", input, options)
end
@doc """
Updates a user's profile in AWS CodeStar.
The user profile is not project-specific. Information in the user profile is
displayed wherever the user's information appears to other users in AWS
CodeStar.
"""
def update_user_profile(client, input, options \\ []) do
request(client, "UpdateUserProfile", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "codestar"}
host = build_host("codestar", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "CodeStar_20170419.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/code_star.ex
| 0.854915
| 0.571348
|
code_star.ex
|
starcoder
|
defmodule Slug do
@moduledoc """
Transform strings from any language into slugs.
It works by transliterating Unicode characters into alphanumeric strings (e.g.
`字` into `zi`). All punctuation is stripped and whitespace between words are
replaced by hyphens.
"""
@doc """
Returns `string` as a slug or `nil` if it failed.
## Options
* `:separator` - Replace whitespaces with this string. Leading, trailing or
repeated whitespaces are trimmed. Defaults to `-`.
* `:lowercase` - Set to `false` if you wish to retain capitalization.
Defaults to `true`.
* `:truncate` - Truncates slug at this character length, shortened to the
nearest word.
* `:ignore` - Pass in a string (or list of strings) of characters to ignore.
## Examples
iex> Slug.slugify("Hello, World!")
"hello-world"
iex> Slug.slugify("Madam, I'm Adam", separator: "")
"madamimadam"
iex> Slug.slugify("StUdLy CaPs", lowercase: false)
"StUdLy-CaPs"
iex> Slug.slugify("Call me maybe", truncate: 10)
"call-me"
iex> Slug.slugify("你好,世界", ignore: ["你", "好"])
"你好-shi-jie"
"""
@doc since: "1.0.0"
@spec slugify(String.t(), Keyword.t()) :: String.t() | nil
def slugify(string, opts \\ []) do
separator = get_separator(opts)
lowercase? = Keyword.get(opts, :lowercase, true)
truncate_length = get_truncate_length(opts)
ignored_codepoints = to_charlist(separator) ++ get_ignored_codepoints(opts)
regex = "[" <> Regex.escape(separator) <> "[:space:][:punct:]]"
string
|> String.graphemes()
|> Enum.map_join(&transliterate(&1, ignored_codepoints))
|> String.replace("'", "")
|> String.replace("`", "")
|> String.split(Regex.compile!(regex, [:unicode]), trim: true)
|> Enum.filter(&(&1 != ""))
|> join(separator, truncate_length)
|> lower_case(lowercase?)
|> validate_slug()
end
defp get_separator(opts) do
separator = Keyword.get(opts, :separator)
case separator do
separator when is_integer(separator) and separator >= 0 ->
<<separator::utf8>>
separator when is_binary(separator) ->
separator
_ ->
"-"
end
end
defp get_truncate_length(opts) do
length = Keyword.get(opts, :truncate)
case length do
length when is_integer(length) and length <= 0 ->
0
length when is_integer(length) ->
length
_ ->
nil
end
end
defp get_ignored_codepoints(opts) do
characters_to_ignore = Keyword.get(opts, :ignore)
string =
case characters_to_ignore do
characters when is_list(characters) ->
Enum.join(characters)
characters when is_binary(characters) ->
characters
_ ->
""
end
normalize_to_codepoints(string)
end
defp join(words, separator, nil), do: Enum.join(words, separator)
defp join(words, separator, maximum_length) do
words
|> Enum.reduce_while({[], 0}, fn word, {result, length} ->
new_length =
case length do
0 -> String.length(word)
_ -> length + String.length(separator) + String.length(word)
end
cond do
new_length > maximum_length ->
{:halt, {result, length}}
new_length == maximum_length ->
{:halt, {[word | result], new_length}}
true ->
{:cont, {[word | result], new_length}}
end
end)
|> elem(0)
|> Enum.reverse()
|> Enum.join(separator)
end
defp lower_case(string, false), do: string
defp lower_case(string, true), do: String.downcase(string)
defp validate_slug(""), do: nil
defp validate_slug(string), do: string
defp normalize_to_codepoints(string) do
string
|> :unicode.characters_to_nfc_binary()
|> String.to_charlist()
end
defp transliterate(string, acc \\ [], ignored_codepoints)
defp transliterate(string, acc, ignored_codepoints) when is_binary(string) do
string
|> normalize_to_codepoints()
|> transliterate(acc, ignored_codepoints)
end
defp transliterate([], acc, _ignored_codepoints) do
acc
|> Enum.reverse()
|> Enum.join()
end
@alphanumerics Enum.concat([?A..?Z, ?a..?z, ?0..?9])
defp transliterate([codepoint | rest], acc, ignored_codepoints)
when codepoint in @alphanumerics do
transliterate(rest, [<<codepoint>> | acc], ignored_codepoints)
end
@replacements Path.join(:code.priv_dir(:slugify), "data.etf")
|> File.read!()
|> :erlang.binary_to_term()
defp transliterate([codepoint | rest], acc, ignored_codepoints) do
if codepoint in ignored_codepoints do
transliterate(rest, [<<codepoint::utf8>> | acc], ignored_codepoints)
else
case Map.get(@replacements, codepoint) do
nil ->
transliterate(rest, acc, ignored_codepoints)
replacement ->
transliterate(rest, [replacement | acc], ignored_codepoints)
end
end
end
end
|
lib/slug.ex
| 0.839257
| 0.577227
|
slug.ex
|
starcoder
|
defmodule ExULID.ULID do
@moduledoc """
This module provides data encoding and decoding functions
according to [ULID](https://github.com/ulid/spec).
"""
import ExULID.Crockford
@max_time 281474976710655 # (2 ^ 48) - 1
@doc """
Generates a ULID.
"""
def generate do
:milli_seconds
|> :os.system_time()
|> generate()
end
@doc """
Generates a ULID at the given timestamp (in millseconds).
"""
def generate(time) when not is_integer(time) do
{:error, "time must be an integer, got #{inspect(time)}"}
end
def generate(time) when time < 0 do
{:error, "time cannot be negative, got #{inspect(time)}"}
end
def generate(time) when time > @max_time do
{:error, "time cannot be >= 2^48 milliseconds, got #{inspect(time)}"}
end
def generate(time) do
rand = :crypto.strong_rand_bytes(10)
encode(time, 10) <> encode(rand, 16)
end
defp encode(data, str_length) do
case encode32(data) do
{:ok, encoded} ->
format_encoded(encoded, str_length)
{:error, _} = error ->
error
end
end
defp format_encoded(encoded, str_length) do
case String.length(encoded) do
n when n > str_length ->
String.slice(encoded, -str_length..-1)
n when n < str_length ->
String.pad_leading(encoded, str_length, "0")
^str_length ->
encoded
end
end
@doc """
Decodes the given ULID into a tuple of `{time, randomess}`,
where `time` is the embedded unix timestamp in milliseconds.
"""
def decode(<<time::bytes-size(10), id::bytes-size(16)>>) do
case decode_time(time) do
{:error, _} = error ->
error
decoded_time ->
{decoded_time, id}
end
end
def decode(ulid) do
{:error, "the ULID must be 26 characters long, got #{inspect(ulid)}"}
end
defp decode_time(ulid) do
decoded =
ulid
|> String.slice(0..9)
|> decode32()
case decoded do
{:ok, decoded} ->
binary_to_time(decoded)
{:error, _} = error ->
error
end
end
# Rejects decoded time that is greater than or equal to 2 ^ 48
# because it would not have been encodable in the first place.
defp binary_to_time(binary) do
binary
|> :binary.decode_unsigned()
|> validate_time()
end
defp validate_time(decoded) when is_integer(decoded) and decoded > @max_time do
{:error, "the decoded time cannot be greater than 2^48, got #{inspect(decoded)}"}
end
defp validate_time(decoded) when is_integer(decoded), do: decoded
end
|
lib/ex_ulid/ulid.ex
| 0.829803
| 0.626653
|
ulid.ex
|
starcoder
|
defmodule Phoenix.HTML.Format do
@moduledoc """
Helpers related to formatting text.
"""
@doc ~S"""
Returns text transformed into HTML using simple formatting rules.
Two or more consecutive newlines `\n\n` are considered as a paragraph
and text between them is wrapped in `<p>` tags.
One newline `\n` is considered as a linebreak and a `<br>` tag is inserted.
## Examples
iex> text_to_html("Hello\n\nWorld") |> safe_to_string
"<p>Hello</p>\n<p>World</p>\n"
iex> text_to_html("Hello\nWorld") |> safe_to_string
"<p>Hello<br>\nWorld</p>\n"
iex> opts = [wrapper_tag: :div, attributes: [class: "p"]]
...> text_to_html("Hello\n\nWorld", opts) |> safe_to_string
"<div class=\"p\">Hello</div>\n<div class=\"p\">World</div>\n"
## Options
* `:escape` - if `false` does not html escape input (default: `true`)
* `:wrapper_tag` - tag to wrap each paragraph (default: `:p`)
* `:attributes` - html attributes of the wrapper tag (default: `[]`)
* `:insert_brs` - if `true` insert `<br>` for single line breaks (default: `true`)
"""
@spec text_to_html(Phoenix.HTML.unsafe, Keyword.t) :: Phoenix.HTML.safe
def text_to_html(string, opts \\ []) do
escape? = Keyword.get(opts, :escape, true)
wrapper_tag = Keyword.get(opts, :wrapper_tag, :p)
attributes = Keyword.get(opts, :attributes, [])
insert_brs? = Keyword.get(opts, :insert_brs, true)
string
|> maybe_html_escape(escape?)
|> String.split("\n\n", trim: true)
|> Enum.filter(¬_blank?/1)
|> Enum.map(&wrap_paragraph(&1, wrapper_tag, attributes, insert_brs?))
|> Phoenix.HTML.html_escape
end
defp maybe_html_escape(string, true), do: Plug.HTML.html_escape(string)
defp maybe_html_escape(string, false), do: string
defp not_blank?(" " <> rest), do: not_blank?(rest)
defp not_blank?("\n" <> rest), do: not_blank?(rest)
defp not_blank?(""), do: false
defp not_blank?(_), do: true
defp wrap_paragraph(text, tag, attributes, insert_brs?) do
[Phoenix.HTML.Tag.content_tag(tag, insert_brs(text, insert_brs?), attributes), ?\n]
end
defp insert_brs(text, false) do
text
|> String.split("\n", trim: true)
|> Enum.intersperse(?\s)
|> Phoenix.HTML.raw
end
defp insert_brs(text, true) do
text
|> String.split("\n", trim: true)
|> Enum.map(&Phoenix.HTML.raw/1)
|> Enum.intersperse([Phoenix.HTML.Tag.tag(:br), ?\n])
end
end
|
golf13/deps/phoenix_html/lib/phoenix_html/format.ex
| 0.917778
| 0.406126
|
format.ex
|
starcoder
|
defmodule CubDB do
@moduledoc """
`CubDB` is an embedded key-value database written in the Elixir language. It
runs locally, it is schema-less, and backed by a single file.
## Features
- Both keys and values can be any arbitrary Elixir (or Erlang) term.
- Simple `get/3`, `put/3`, and `delete/2` operations
- Arbitrary selection of ranges of entries sorted by key with `select/2`
- Atomic transactions with `put_multi/2`, `get_and_update_multi/4`, etc.
- Concurrent read operations, that do not block nor are blocked by writes
- Unexpected shutdowns won't corrupt the database or break atomicity
- Manual or automatic compaction to optimize space usage
To ensure consistency, performance, and robustness to data corruption, `CubDB`
database file uses an append-only, immutable B-tree data structure. Entries
are never changed in-place, and read operations are performend on immutable
snapshots.
More information can be found in the following sections:
- [Frequently Asked Questions](faq.html)
- [How To](howto.html)
## Usage
Start `CubDB` by specifying a directory for its database file (if not existing,
it will be created):
{:ok, db} = CubDB.start_link("my/data/directory")
Alternatively, to specify more options, a keyword list can be passed:
{:ok, db} = CubDB.start_link(data_dir: "my/data/directory", auto_compact: true)
_Important: avoid starting multiple `CubDB` processes on the same data
directory. Only one `CubDB` process should use a specific data directory at any
time._
`CubDB` functions can be called concurrently from different processes, but it
is important that only one `CubDB` process is started on the same data
directory.
The `get/2`, `put/3`, and `delete/2` functions work as you probably expect:
CubDB.put(db, :foo, "some value")
#=> :ok
CubDB.get(db, :foo)
#=> "some value"
CubDB.delete(db, :foo)
#=> :ok
CubDB.get(db, :foo)
#=> nil
Both keys and values can be any Elixir (or Erlang) term:
CubDB.put(db, {"some", 'tuple', :key}, %{foo: "a map value"})
#=> :ok
CubDB.get(db, {"some", 'tuple', :key})
#=> %{foo: "a map value"}
Multiple operations can be performed as an atomic transaction with
`put_multi/2`, `delete_multi/2`, and the other `[...]_multi` functions:
CubDB.put_multi(db, [a: 1, b: 2, c: 3, d: 4, e: 5, f: 6, g: 7, h: 8])
#=> :ok
Range of entries sorted by key are retrieved using `select/2`:
CubDB.select(db, min_key: :b, max_key: :e)
#=> {:ok, [b: 2, c: 3, d: 4, e: 5]}
But `select/2` can do much more than that. It can apply a pipeline of operations
(`map`, `filter`, `take`, `drop` and more) to the selected entries, it can
select the entries in normal or reverse order, and it can `reduce` the result
using an arbitrary function:
# Take the sum of the last 3 even values:
CubDB.select(db,
# select entries in reverse order
reverse: true,
# apply a pipeline of operations to the entries
pipe: [
# map each entry discarding the key and keeping only the value
map: fn {_key, value} -> value end,
# filter only even integers
filter: fn value -> is_integer(value) && Integer.is_even(value) end,
# take the first 3 values
take: 3
],
# reduce the result to a sum
reduce: fn n, sum -> sum + n end
)
#=> {:ok, 18}
Because `CubDB` uses an immutable data structure, write operations cause the
data file to grow. When necessary, `CubDB` runs a compaction operation to
optimize the file size and reclaim disk space. Compaction runs in the
background, without blocking other operations. By default, `CubDB` runs
compaction automatically when necessary (see documentation of
`set_auto_compact/2` for details). Alternatively, it can be started manually
by calling `compact/1`.
"""
@doc """
Returns a specification to start this module under a supervisor.
The default options listed in `Supervisor` are used.
"""
use GenServer
alias CubDB.Btree
alias CubDB.CatchUp
alias CubDB.CleanUp
alias CubDB.Compactor
alias CubDB.Reader
alias CubDB.Store
@db_file_extension ".cub"
@compaction_file_extension ".compact"
@auto_compact_defaults {100, 0.25}
@type key :: any
@type value :: any
@type entry :: {key, value}
@type option :: {:auto_compact, {pos_integer, number} | boolean} | {:auto_file_sync, boolean}
@type pipe_operation ::
{:map, fun}
| {:filter, fun}
| {:take, non_neg_integer}
| {:drop, non_neg_integer}
| {:take_while, fun}
| {:drop_while, fun}
@type select_option ::
{:min_key, any}
| {:max_key, any}
| {:min_key_inclusive, boolean}
| {:max_key_inclusive, boolean}
| {:pipe, [pipe_operation]}
| {:reverse, boolean}
| {:reduce, fun | {any, fun}}
| {:timeout, timeout}
defmodule State do
@moduledoc false
@type t :: %CubDB.State{
btree: Btree.t(),
data_dir: String.t(),
task_supervisor: pid,
compactor: pid | nil,
catch_up: pid | nil,
clean_up: pid,
clean_up_pending: boolean,
old_btrees: [Btree.t()],
readers: %{required(reference) => {String.t(), reference}},
auto_compact: {pos_integer, number} | false,
auto_file_sync: boolean,
subs: list(pid)
}
@enforce_keys [:btree, :data_dir, :clean_up]
defstruct [
:task_supervisor,
btree: nil,
data_dir: nil,
compactor: nil,
catch_up: nil,
clean_up: nil,
clean_up_pending: false,
old_btrees: [],
readers: %{},
auto_compact: true,
auto_file_sync: true,
subs: []
]
end
@spec start_link(
String.t()
| [option | {:data_dir, String.t()} | GenServer.option()]
) :: GenServer.on_start()
@doc """
Starts the `CubDB` database process linked to the current process.
The argument is a keyword list of options:
- `data_dir`: the directory path where the database files will be stored.
This option is required. If the directory does not exist, it will be
created. Only one `CubDB` instance can run per directory, so if you run
several databases, they should each use their own separate data directory.
- `auto_compact`: whether to perform compaction automatically. It defaults
to `true`. See `set_auto_compact/2` for the possible values
- `auto_file_sync`: whether to force flush the disk buffer on each write. It
defaults to `true`. If set to `false`, write performance is faster, but
durability of writes is not strictly guaranteed. See `set_auto_file_sync/2`
for details.
`GenServer` options like `name` and `timeout` can also be given, and are
forwarded to `GenServer.start_link/3` as the third argument.
If only the `data_dir` is specified, it is possible to pass it as a single
string argument.
## Examples
# Passing only the data dir
{:ok, db} = CubDB.start_link("some/data/dir")
# Passing data dir and other options
{:ok, db} = CubDB.start_link(data_dir: "some/data/dir", auto_compact: true, name: :db)
"""
def start_link(data_dir_or_options) do
case split_options(data_dir_or_options) do
{:ok, {data_dir, options, gen_server_options}} ->
GenServer.start_link(__MODULE__, [data_dir, options], gen_server_options)
error ->
error
end
end
def start_link(data_dir, options) do
start_link(Keyword.merge(options, data_dir: data_dir))
end
@spec start(String.t() | [option | {:data_dir, String.t()} | GenServer.option()]) ::
GenServer.on_start()
@doc """
Starts the `CubDB` database without a link.
See `start_link/2` for more information about options.
"""
def start(data_dir_or_options) do
case split_options(data_dir_or_options) do
{:ok, {data_dir, options, gen_server_options}} ->
GenServer.start(__MODULE__, [data_dir, options], gen_server_options)
error ->
error
end
end
def start(data_dir, options) do
start(Keyword.merge(options, data_dir: data_dir))
end
@spec stop(GenServer.server(), term(), timeout()) :: :ok
@doc """
Synchronously stops the `CubDB` database.
See `GenServer.stop/3` for details.
"""
def stop(db, reason \\ :normal, timeout \\ :infinity) do
GenServer.stop(db, reason, timeout)
end
@spec get(GenServer.server(), key, value) :: value
@doc """
Gets the value associated to `key` from the database.
If no value is associated with `key`, `default` is returned (which is `nil`,
unless specified otherwise).
"""
def get(db, key, default \\ nil) do
perform_read(db, {:get, key, default})
end
@spec fetch(GenServer.server(), key) :: {:ok, value} | :error
@doc """
Fetches the value for the given `key` in the database, or return `:error` if `key` is not present.
If the database contains an entry with the given `key` and value `value`, it
returns `{:ok, value}`. If `key` is not found, it returns `:error`.
"""
def fetch(db, key) do
perform_read(db, {:fetch, key})
end
@spec has_key?(GenServer.server(), key) :: boolean
@doc """
Returns whether an entry with the given `key` exists in the database.
"""
def has_key?(db, key) do
perform_read(db, {:has_key?, key})
end
@spec select(GenServer.server(), [select_option]) ::
{:ok, any} | {:error, Exception.t()}
@doc """
Selects a range of entries from the database, and optionally performs a
pipeline of operations on them.
It returns `{:ok, result}` if successful, or `{:error, exception}` if an
exception is raised.
## Options
The `min_key` and `max_key` specify the range of entries that are selected. By
default, the range is inclusive, so all entries that have a key greater or
equal than `min_key` and less or equal then `max_key` are selected:
# Select all entries where "a" <= key <= "d"
CubDB.select(db, min_key: "b", max_key: "d")
The range boundaries can be excluded by setting `min_key_inclusive` or
`max_key_inclusive` to `false`:
# Select all entries where "a" <= key < "d"
CubDB.select(db, min_key: "b", max_key: "d", max_key_inclusive: false)
Any of `:min_key` and `:max_key` can be omitted, to leave the range
open-ended.
# Select entries where key <= "a"
CubDB.select(db, max_key: "a")
As `nil` is a valid key, setting `min_key` or `max_key` to `nil` does NOT
leave the range open ended:
# Select entries where nil <= key <= "a"
CubDB.select(db, min_key: nil, max_key: "a")
The `reverse` option, when set to true, causes the entries to be selected and
traversed in reverse order.
The `pipe` option specifies an optional list of operations performed
sequentially on the selected entries. The given order of operations is
respected. The available operations, specified as tuples, are:
- `{:filter, fun}` filters entries for which `fun` returns a truthy value
- `{:map, fun}` maps each entry to the value returned by the function `fun`
- `{:take, n}` takes the first `n` entries
- `{:drop, n}` skips the first `n` entries
- `{:take_while, fun}` takes entries while `fun` returns a truthy value
- `{:drop_while, fun}` skips entries while `fun` returns a truthy value
Note that, when selecting a key range, specifying `min_key` and/or `max_key`
is more performant than using `{:filter, fun}` or `{:take_while | :drop_while,
fun}`, because `min_key` and `max_key` avoid loading unnecessary entries from
disk entirely.
The `reduce` option specifies how the selected entries are aggregated. If
`reduce` is omitted, the entries are returned as a list. If `reduce` is a
function, it is used to reduce the collection of entries. If `reduce` is a
tuple, the first element is the starting value of the reduction, and the
second is the reducing function.
The `timeout` option specifies a timeout (in milliseconds or `:infinity`,
defaulting to `:infinity`) after which the operation will fail.
## Examples
To select all entries with keys between `:a` and `:c` as a list of `{key,
value}` entries we can do:
{:ok, entries} = CubDB.select(db, min_key: :a, max_key: :c)
If we want to get all entries with keys between `:a` and `:c`, with `:c`
excluded, we can do:
{:ok, entries} = CubDB.select(db,
min_key: :a, max_key: :c, max_key_inclusive: false)
To select the last 3 entries, we can do:
{:ok, entries} = CubDB.select(db, reverse: true, pipe: [take: 3])
If we want to obtain the sum of the first 10 positive numeric values
associated to keys from `:a` to `:f`, we can do:
{:ok, sum} = CubDB.select(db,
min_key: :a,
max_key: :f,
pipe: [
map: fn {_key, value} -> value end, # map values
filter: fn n -> is_number(n) and n > 0 end # only positive numbers
take: 10, # take only the first 10 entries in the range
],
reduce: fn n, sum -> sum + n end # reduce to the sum of selected values
)
"""
def select(db, options \\ []) when is_list(options) do
timeout = Keyword.get(options, :timeout, :infinity)
perform_read(db, {:select, options}, timeout)
end
@spec size(GenServer.server()) :: pos_integer
@doc """
Returns the number of entries present in the database.
"""
def size(db) do
GenServer.call(db, :size, :infinity)
end
@spec dirt_factor(GenServer.server()) :: float
@doc """
Returns the dirt factor.
The dirt factor is a number, ranging from 0 to 1, giving an indication about
the amount of overhead disk space (or "dirt") that can be cleaned up with a
compaction operation. A value of 0 means that there is no overhead, so a
compaction would have no benefit. The closer to 1 the dirt factor is, the more
can be cleaned up in a compaction operation.
"""
def dirt_factor(db) do
GenServer.call(db, :dirt_factor, :infinity)
end
@spec put(GenServer.server(), key, value) :: :ok
@doc """
Writes an entry in the database, associating `key` to `value`.
If `key` was already present, it is overwritten.
"""
def put(db, key, value) do
GenServer.call(db, {:put, key, value}, :infinity)
end
@spec put_new(GenServer.server(), key, value) :: :ok | {:error, :exists}
@doc """
Writes an entry in the database, associating `key` to `value`, only if `key`
is not yet in the database.
If `key` is already present, it does not change it, and returns `{:error,
:exists}`.
"""
def put_new(db, key, value) do
GenServer.call(db, {:put_new, key, value}, :infinity)
end
@spec delete(GenServer.server(), key) :: :ok
@doc """
Deletes the entry associated to `key` from the database.
If `key` was not present in the database, nothing is done.
"""
def delete(db, key) do
GenServer.call(db, {:delete, key}, :infinity)
end
@spec update(GenServer.server(), key, value, (value -> value)) :: :ok
@doc """
Updates the entry corresponding to `key` using the given function.
If `key` is present in the database, `fun` is invoked with the corresponding
`value`, and the result is set as the new value of `key`. If `key` is not
found, `initial` is inserted as the value of `key`.
The return value is `:ok`, or `{:error, reason}` in case an error occurs.
"""
def update(db, key, initial, fun) do
with {:ok, nil} <-
get_and_update_multi(db, [key], fn entries ->
case Map.fetch(entries, key) do
:error ->
{nil, %{key => initial}, []}
{:ok, value} ->
{nil, %{key => fun.(value)}, []}
end
end),
do: :ok
end
@spec get_and_update(GenServer.server(), key, (value -> {any, value} | :pop)) :: {:ok, any}
@doc """
Gets the value corresponding to `key` and updates it, in one atomic transaction.
`fun` is called with the current value associated to `key` (or `nil` if not
present), and must return a two element tuple: the result value to be
returned, and the new value to be associated to `key`. `fun` may also return
`:pop`, in which case the current value is deleted and returned.
The return value is `{:ok, result}`, or `{:error, reason}` in case an error occurs.
Note that in case the value to update returned by `fun` is the same as the
original value, no write is performed to disk.
"""
def get_and_update(db, key, fun) do
with {:ok, result} <-
get_and_update_multi(db, [key], fn entries ->
value = Map.get(entries, key, nil)
case fun.(value) do
{result, ^value} -> {result, [], []}
{result, new_value} -> {result, %{key => new_value}, []}
:pop -> {value, [], [key]}
end
end),
do: {:ok, result}
end
@spec get_and_update_multi(
GenServer.server(),
[key],
(%{optional(key) => value} -> {any, %{optional(key) => value} | nil, [key] | nil}),
[opt]
) :: {:ok, any} | {:error, any}
when opt: {:timeout, timeout}
@doc """
Gets and updates or deletes multiple entries in an atomic transaction.
Gets all values associated with keys in `keys_to_get`, and passes them as a
map of `%{key => value}` entries to `fun`. If a key is not found, it won't be
added to the map passed to `fun`. Updates the database and returns a result
according to the return value of `fun`. Returns {`:ok`, return_value} in case
of success, `{:error, reason}` otherwise.
The function `fun` should return a tuple of three elements: `{return_value,
entries_to_put, keys_to_delete}`, where `return_value` is an arbitrary value
to be returned, `entries_to_put` is a map of `%{key => value}` entries to be
written to the database, and `keys_to_delete` is a list of keys to be deleted.
The read and write operations are executed as an atomic transaction, so they
will either all succeed, or all fail. Note that `get_and_update_multi/4`
blocks other write operations until it completes.
The `options` argument is an optional keyword list of options, including:
- `:timeout` - a timeout (in milliseconds or `:infinity`, defaulting to
`5000`) for the operation, after which the function returns `{:error,
:timeout}`. This is useful to avoid blocking other write operations for too
long.
## Example
Assuming a database of names as keys, and integer monetary balances as values,
and we want to transfer 10 units from `"Anna"` to `"Joy"`, returning their
updated balance:
{:ok, {anna, joy}} = CubDB.get_and_update_multi(db, ["Anna", "Joy"], fn entries ->
anna = Map.get(entries, "Anna", 0)
joy = Map.get(entries, "Joy", 0)
if anna < 10, do: raise(RuntimeError, message: "Anna's balance is too low")
anna = anna - 10
joy = joy + 10
{{anna, joy}, %{"Anna" => anna, "Joy" => joy}, []}
end)
Or, if we want to transfer all of the balance from `"Anna"` to `"Joy"`,
deleting `"Anna"`'s entry, and returning `"Joy"`'s resulting balance:
{:ok, joy} = CubDB.get_and_update_multi(db, ["Anna", "Joy"], fn entries ->
anna = Map.get(entries, "Anna", 0)
joy = Map.get(entries, "Joy", 0)
joy = joy + anna
{joy, %{"Joy" => joy}, ["Anna"]}
end)
"""
def get_and_update_multi(db, keys_to_get, fun, options \\ []) do
GenServer.call(db, {:get_and_update_multi, keys_to_get, fun, options}, :infinity)
end
@spec put_and_delete_multi(GenServer.server(), %{key => value}, [key]) :: :ok
@doc """
Writes and deletes multiple entries all at once, atomically.
Entries to put are passed as a map of `%{key => value}` or a list of `{key,
value}`. Keys to delete are passed as a list of keys.
"""
def put_and_delete_multi(db, entries_to_put, keys_to_delete) do
GenServer.call(db, {:put_and_delete_multi, entries_to_put, keys_to_delete}, :infinity)
end
@spec get_multi(GenServer.server(), [key]) :: %{key => value}
@doc """
Gets multiple entries corresponding by the given keys all at once, atomically.
The keys to get are passed as a list. The result is a map of key/value entries
corresponding to the given keys. Keys that are not present in the database
won't be in the result map.
## Example
CubDB.put_multi(db, a: 1, b: 2, c: nil)
CubDB.get_multi(db, [:a, :b, :c, :x])
# => %{a: 1, b: 2, c: nil}
"""
def get_multi(db, keys) do
perform_read(db, {:get_multi, keys})
end
@spec put_multi(GenServer.server(), %{key => value} | [entry]) :: :ok
@doc """
Writes multiple entries all at once, atomically.
Entries are passed as a map of `%{key => value}` or a list of `{key, value}`.
"""
def put_multi(db, entries) do
put_and_delete_multi(db, entries, [])
end
@spec delete_multi(GenServer.server(), [key]) :: :ok
@doc """
Deletes multiple entries corresponding to the given keys all at once, atomically.
The `keys` to be deleted are passed as a list.
"""
def delete_multi(db, keys) do
put_and_delete_multi(db, %{}, keys)
end
@spec clear(GenServer.server()) :: :ok
@doc """
Deletes all entries, resulting in an empty database.
The deletion is atomic, and is much more performant than deleating each entry
manually.
The operation respects all the guarantees of consistency of other concurrent
operations. For example, if `select\2` was called before the call to `clear/1`
and is running concurrently, the `select\2` will still see all the entries.
If a compaction is in progress when `clear/1` is called, the compaction is
halted, and a new one started immediately after. The new compaction should be
very fast, as the database is empty as a result of the `clear/1` call.
"""
def clear(db) do
GenServer.call(db, :clear, :infinity)
end
@spec compact(GenServer.server()) :: :ok | {:error, String.t()}
@doc """
Runs a database compaction.
As write operations are performed on a database, its file grows. Occasionally,
a compaction operation can be run to shrink the file to its optimal size.
Compaction runs in the background and does not block operations.
Only one compaction operation can run at any time, therefore if this function
is called when a compaction is already running, it returns `{:error,
:pending_compaction}`.
When compacting, `CubDB` will create a new data file, and eventually switch to
it and remove the old one as the compaction succeeds. For this reason, during
a compaction, there should be enough disk space for a second copy of the
database file.
Compaction can create disk contention, so it should not be performed
unnecessarily often.
"""
def compact(db) do
GenServer.call(db, :compact, :infinity)
end
@spec set_auto_compact(GenServer.server(), boolean | {integer, integer | float}) ::
:ok | {:error, String.t()}
@doc """
Configures whether to perform automatic compaction, and how.
If set to `false`, no automatic compaction is performed. If set to `true`,
auto-compaction is performed, following a write operation, if at least 100
write operations occurred since the last compaction, and the dirt factor is at
least 0.25. These values can be customized by setting the `auto_compact`
option to `{min_writes, min_dirt_factor}`.
It returns `:ok`, or `{:error, reason}` if `setting` is invalid.
Compaction is performed in the background and does not block other operations,
but can create disk contention, so it should not be performed unnecessarily
often. When writing a lot into the database, such as when importing data from
an external source, it is advisable to turn off auto compaction, and manually
run compaction at the end of the import.
"""
def set_auto_compact(db, setting) do
GenServer.call(db, {:set_auto_compact, setting}, :infinity)
end
@spec file_sync(GenServer.server()) :: :ok
@doc """
Performs a `fsync`, forcing to flush all data that might be buffered by the OS
to disk.
Calling this function ensures that all writes up to this point are committed
to disk, and will be available after a restart.
If `CubDB` is started with the option `auto_file_sync: true`, calling this
function is not necessary, as every write operation will be automatically
flushed to the storage device.
If this function is NOT called, the operative system will control when the
file buffer is flushed to the storage device, which leads to better write
performance, but might affect durability of recent writes in case of a sudden
shutdown.
"""
def file_sync(db) do
GenServer.call(db, :file_sync, :infinity)
end
@spec set_auto_file_sync(GenServer.server(), boolean) :: :ok
@doc """
Configures whether to automatically force file sync upon each write operation.
If set to `false`, no automatic file sync is performed. That improves write
performance, but leaves to the operative system the decision of when to flush
disk buffers. This means that there is the possibility that recent writes
might not be durable in case of a sudden machine shutdown. In any case,
atomicity of multi operations is preserved, and partial writes will not
corrupt the database.
If set to `true`, the file buffer will be forced to flush upon every write
operation, ensuring durability even in case of sudden machine shutdowns, but
decreasing write performance.
"""
def set_auto_file_sync(db, bool) do
GenServer.call(db, {:set_auto_file_sync, bool}, :infinity)
end
@spec data_dir(GenServer.server()) :: String.t()
@doc """
Returns the path of the data directory, as given when the `CubDB` process was
started.
## Example
{:ok, db} = CubDB.start_link("some/data/directory")
CubDB.data_dir(db)
#=> "some/data/directory"
"""
def data_dir(db) do
GenServer.call(db, :data_dir, :infinity)
end
@spec current_db_file(GenServer.server()) :: String.t()
@doc """
Returns the path of the current database file.
The current database file will change after a compaction operation.
## Example
{:ok, db} = CubDB.start_link("some/data/directory")
CubDB.current_db_file(db)
#=> "some/data/directory/0.cub"
"""
def current_db_file(db) do
GenServer.call(db, :current_db_file, :infinity)
end
@spec cubdb_file?(String.t()) :: boolean
@doc false
def cubdb_file?(file_name) do
file_extensions = [@db_file_extension, @compaction_file_extension]
basename = Path.basename(file_name, Path.extname(file_name))
Enum.member?(file_extensions, Path.extname(file_name)) &&
Regex.match?(~r/^[\da-fA-F]+$/, basename)
end
@spec compaction_file?(String.t()) :: boolean
@doc false
def compaction_file?(file_name) do
Path.extname(file_name) == @compaction_file_extension
end
@doc false
def subscribe(db) do
GenServer.call(db, {:subscribe, self()}, :infinity)
end
@doc false
def file_name_to_n(file_name) do
base_name = Path.basename(file_name, Path.extname(file_name))
String.to_integer(base_name, 16)
end
# OTP callbacks
@doc false
def init([data_dir, options]) do
auto_compact = parse_auto_compact!(Keyword.get(options, :auto_compact, true))
auto_file_sync = Keyword.get(options, :auto_file_sync, true)
with file_name when is_binary(file_name) or is_nil(file_name) <- find_db_file(data_dir),
{:ok, store} <-
Store.File.create(Path.join(data_dir, file_name || "0#{@db_file_extension}")),
{:ok, clean_up} <- CleanUp.start_link(data_dir),
{:ok, task_supervisor} <- Task.Supervisor.start_link() do
{:ok,
%State{
btree: Btree.new(store),
task_supervisor: task_supervisor,
data_dir: data_dir,
clean_up: clean_up,
auto_compact: auto_compact,
auto_file_sync: auto_file_sync
}}
else
{:error, reason} ->
{:stop, reason}
end
end
@doc false
def terminate(_reason, %State{btree: btree}) do
Btree.stop(btree)
end
def handle_call({:read, operation, timeout}, from, state) do
%State{btree: btree, readers: readers} = state
{:ok, pid} = Task.start_link(Reader, :run, [btree, from, operation])
ref = Process.monitor(pid)
timer =
if timeout != :infinity do
Process.send_after(self(), {:reader_timeout, pid}, timeout)
else
nil
end
%Btree{store: %Store.File{file_path: file_path}} = btree
{:noreply, %State{state | readers: Map.put(readers, ref, {file_path, timer})}}
end
def handle_call(:size, _, state = %State{btree: btree}) do
{:reply, Enum.count(btree), state}
end
def handle_call(:dirt_factor, _, state = %State{btree: btree}) do
{:reply, Btree.dirt_factor(btree), state}
end
def handle_call({:put, key, value}, _, state) do
%State{btree: btree, auto_file_sync: auto_file_sync} = state
btree = Btree.insert(btree, key, value) |> Btree.commit()
btree = if auto_file_sync, do: Btree.sync(btree), else: btree
{:reply, :ok, maybe_auto_compact(%State{state | btree: btree})}
end
def handle_call({:put_new, key, value}, _, state) do
%State{btree: btree, auto_file_sync: auto_file_sync} = state
case Btree.insert_new(btree, key, value) do
{:error, :exists} = reply ->
{:reply, reply, state}
btree ->
btree = Btree.commit(btree)
btree = if auto_file_sync, do: Btree.sync(btree), else: btree
{:reply, :ok, maybe_auto_compact(%State{state | btree: btree})}
end
end
def handle_call({:delete, key}, _, state) do
%State{btree: btree, auto_file_sync: auto_file_sync} = state
btree =
case compaction_running?(state) do
false -> Btree.delete(btree, key) |> Btree.commit()
true -> Btree.mark_deleted(btree, key) |> Btree.commit()
end
btree = if auto_file_sync, do: Btree.sync(btree), else: btree
{:reply, :ok, maybe_auto_compact(%State{state | btree: btree})}
end
def handle_call({:get_and_update_multi, keys_to_get, fun, options}, _, state) do
%State{btree: btree} = state
timeout = Keyword.get(options, :timeout, 5000)
compute_update = fn ->
key_values = Reader.perform(btree, {:get_multi, keys_to_get})
fun.(key_values)
end
case run_with_timeout(compute_update, timeout) do
{:ok, {result, entries_to_put, keys_to_delete}} ->
state = do_put_and_delete_multi(state, entries_to_put, keys_to_delete)
{:reply, {:ok, result}, state}
{:error, cause} ->
{:reply, {:error, cause}, state}
end
end
def handle_call({:put_and_delete_multi, entries_to_put, keys_to_delete}, _, state) do
state = do_put_and_delete_multi(state, entries_to_put, keys_to_delete)
{:reply, :ok, state}
end
def handle_call(:clear, _, state) do
%State{btree: btree, auto_file_sync: auto_file_sync} = state
btree = Btree.clear(btree) |> Btree.commit()
btree = if auto_file_sync, do: Btree.sync(btree), else: btree
state = %State{state | btree: btree}
if compaction_running?(state) do
state = halt_compaction(state)
{:ok, compactor} = trigger_compaction(state)
{:reply, :ok, %State{state | compactor: compactor}}
else
{:reply, :ok, maybe_auto_compact(state)}
end
end
def handle_call(:compact, _, state) do
case trigger_compaction(state) do
{:ok, compactor} ->
{:reply, :ok, %State{state | compactor: compactor}}
error ->
{:reply, error, state}
end
end
def handle_call({:set_auto_compact, setting}, _, state) do
case parse_auto_compact(setting) do
{:ok, setting} -> {:reply, :ok, %State{state | auto_compact: setting}}
{:error, reason} -> {:reply, {:error, reason}, state}
end
end
def handle_call({:set_auto_file_sync, bool}, _, state) do
{:reply, :ok, %State{state | auto_file_sync: bool}}
end
def handle_call({:subscribe, pid}, _, state = %State{subs: subs}) do
{:reply, :ok, %State{state | subs: [pid | subs]}}
end
def handle_call(:file_sync, _, state = %State{btree: btree}) do
btree = Btree.sync(btree)
{:reply, :ok, %State{state | btree: btree}}
end
def handle_call(:data_dir, _, state = %State{data_dir: data_dir}) do
{:reply, data_dir, state}
end
def handle_call(:current_db_file, _, state = %State{btree: btree}) do
%Btree{store: store} = btree
%Store.File{file_path: file_path} = store
{:reply, file_path, state}
end
def handle_info(
{:compaction_completed, pid, original_btree, compacted_btree},
state = %State{compactor: pid}
) do
for pid <- state.subs, do: send(pid, :compaction_completed)
{:noreply, catch_up(compacted_btree, original_btree, state)}
end
def handle_info({:compaction_completed, _, _, _}, state), do: state
def handle_info(
{:catch_up, pid, compacted_btree, original_btree},
state = %State{catch_up: pid}
) do
{:noreply, catch_up(compacted_btree, original_btree, state)}
end
def handle_info({:catch_up, _, _, _}, state), do: state
def handle_info({:reader_timeout, reader}, state) do
Process.unlink(reader)
Process.exit(reader, :timeout)
{:noreply, state}
end
def handle_info({:DOWN, _ref, :process, pid, _reason}, state = %State{compactor: pid}) do
{:noreply, %State{state | compactor: nil}}
end
def handle_info({:DOWN, _ref, :process, pid, _reason}, state = %State{catch_up: pid}) do
{:noreply, %State{state | catch_up: nil}}
end
def handle_info({:DOWN, ref, :process, _pid, _reason}, state = %State{readers: readers}) do
# Process _might_ be a reader, so we remove it from the readers
case Map.pop(readers, ref) do
{nil, _readers} ->
{:noreply, state}
{{_, timer}, readers} ->
if timer != nil, do: Process.cancel_timer(timer, async: true, info: false)
if state.clean_up_pending == true do
{:noreply, trigger_clean_up(%State{state | readers: readers})}
else
{:noreply, %State{state | readers: readers}}
end
end
end
@spec perform_read(GenServer.server(), Reader.operation(), timeout) :: any
defp perform_read(db, operation, timeout \\ :infinity) do
GenServer.call(db, {:read, operation, timeout}, timeout)
end
@spec do_put_and_delete_multi(State.t(), [entry], [key]) :: State.t()
defp do_put_and_delete_multi(state, [], []), do: state
defp do_put_and_delete_multi(state, entries_to_put, []) when entries_to_put == %{}, do: state
defp do_put_and_delete_multi(state, entries_to_put, keys_to_delete) do
%State{btree: btree, auto_file_sync: auto_file_sync} = state
btree =
Enum.reduce(entries_to_put || [], btree, fn {key, value}, btree ->
Btree.insert(btree, key, value)
end)
btree =
Enum.reduce(keys_to_delete || [], btree, fn key, btree ->
case compaction_running?(state) do
false -> Btree.delete(btree, key)
true -> Btree.mark_deleted(btree, key)
end
end)
btree = Btree.commit(btree)
btree = if auto_file_sync, do: Btree.sync(btree), else: btree
maybe_auto_compact(%State{state | btree: btree})
end
@spec run_with_timeout(fun, timeout) :: {:ok, any} | {:error, any}
defp run_with_timeout(fun, timeout) do
task = Task.async(fun)
case Task.yield(task, timeout) || Task.shutdown(task) do
nil ->
{:error, :timeout}
{:exit, reason} ->
{:error, reason}
{:ok, result} ->
{:ok, result}
end
end
@spec find_db_file(String.t()) :: String.t() | nil | {:error, any}
defp find_db_file(data_dir) do
with :ok <- File.mkdir_p(data_dir),
{:ok, files} <- File.ls(data_dir) do
files
|> Enum.filter(&cubdb_file?/1)
|> Enum.filter(&String.ends_with?(&1, @db_file_extension))
|> Enum.sort_by(&file_name_to_n/1)
|> List.last()
end
end
@spec trigger_compaction(%State{}) :: {:ok, pid} | {:error, any}
defp trigger_compaction(state = %State{btree: btree, data_dir: data_dir, clean_up: clean_up}) do
case compaction_running?(state) do
false ->
for pid <- state.subs, do: send(pid, :compaction_started)
{:ok, store} = new_compaction_store(data_dir)
CleanUp.clean_up_old_compaction_files(clean_up, store)
with result <-
Task.Supervisor.start_child(state.task_supervisor, Compactor, :run, [
self(),
btree,
store
]),
{:ok, pid} <- result do
Process.monitor(pid)
result
end
true ->
{:error, :pending_compaction}
end
end
@spec catch_up(Btree.t(), Btree.t(), State.t()) :: State.t()
defp catch_up(compacted_btree, original_btree, state) do
%State{btree: latest_btree, task_supervisor: supervisor, old_btrees: old_btrees} = state
if latest_btree == original_btree do
compacted_btree = finalize_compaction(compacted_btree)
state = %State{state | btree: compacted_btree, old_btrees: [latest_btree | old_btrees]}
for pid <- state.subs, do: send(pid, :catch_up_completed)
trigger_clean_up(state)
else
{:ok, pid} =
Task.Supervisor.start_child(supervisor, CatchUp, :run, [
self(),
compacted_btree,
original_btree,
latest_btree
])
Process.monitor(pid)
%State{state | catch_up: pid}
end
end
@spec finalize_compaction(Btree.t()) :: Btree.t()
defp finalize_compaction(btree = %Btree{store: compacted_store}) do
Btree.sync(btree)
Store.close(compacted_store)
new_path =
String.replace_suffix(
compacted_store.file_path,
@compaction_file_extension,
@db_file_extension
)
:ok = File.rename(compacted_store.file_path, new_path)
{:ok, store} = Store.File.create(new_path)
Btree.new(store)
end
@spec new_compaction_store(String.t()) :: {:ok, Store.t()} | {:error, any}
defp new_compaction_store(data_dir) do
with {:ok, file_names} <- File.ls(data_dir) do
new_filename =
file_names
|> Enum.filter(&cubdb_file?/1)
|> Enum.map(&file_name_to_n/1)
|> Enum.sort()
|> List.last()
|> (&(&1 + 1)).()
|> Integer.to_string(16)
|> (&(&1 <> @compaction_file_extension)).()
Store.File.create(Path.join(data_dir, new_filename))
end
end
@spec compaction_running?(%State{}) :: boolean
defp compaction_running?(%State{compactor: nil, catch_up: nil}), do: false
defp compaction_running?(_), do: true
@spec halt_compaction(%State{}) :: %State{}
defp halt_compaction(state = %State{compactor: nil, catch_up: nil}), do: state
defp halt_compaction(state = %State{compactor: pid1, catch_up: pid2}) do
if pid1 != nil, do: Process.exit(pid1, :halt)
if pid2 != nil, do: Process.exit(pid2, :halt)
%State{state | compactor: nil, catch_up: nil}
end
@spec trigger_clean_up(%State{}) :: %State{}
defp trigger_clean_up(state) do
if can_clean_up?(state),
do: clean_up_now(state),
else: clean_up_when_possible(state)
end
@spec can_clean_up?(%State{}) :: boolean
defp can_clean_up?(%State{btree: %Btree{store: store}, readers: readers}) do
%Store.File{file_path: file_path} = store
Enum.all?(readers, fn {_reader, {file, _}} ->
file == file_path
end)
end
@spec clean_up_now(%State{}) :: %State{}
defp clean_up_now(state = %State{btree: btree, clean_up: clean_up}) do
for old_btree <- state.old_btrees do
if Btree.alive?(old_btree), do: :ok = Btree.stop(old_btree)
end
:ok = CleanUp.clean_up(clean_up, btree)
for pid <- state.subs, do: send(pid, :clean_up_started)
%State{state | clean_up_pending: false, old_btrees: []}
end
@spec clean_up_when_possible(%State{}) :: %State{}
defp clean_up_when_possible(state) do
%State{state | clean_up_pending: true}
end
@spec maybe_auto_compact(%State{}) :: %State{}
defp maybe_auto_compact(state) do
if should_auto_compact?(state) do
case trigger_compaction(state) do
{:ok, compactor} ->
%State{state | compactor: compactor}
{:error, _} ->
state
end
else
state
end
end
@spec should_auto_compact?(%State{}) :: boolean
defp should_auto_compact?(%State{auto_compact: false}), do: false
defp should_auto_compact?(%State{btree: btree, auto_compact: auto_compact}) do
{min_writes, min_dirt_factor} = auto_compact
%Btree{dirt: dirt} = btree
dirt_factor = Btree.dirt_factor(btree)
dirt >= min_writes and dirt_factor >= min_dirt_factor
end
@spec parse_auto_compact(any) :: {:ok, false | {pos_integer, number}} | {:error, any}
defp parse_auto_compact(setting) do
case setting do
false ->
{:ok, false}
true ->
{:ok, @auto_compact_defaults}
{min_writes, min_dirt_factor} when is_integer(min_writes) and is_number(min_dirt_factor) ->
if min_writes >= 0 and min_dirt_factor >= 0 and min_dirt_factor <= 1,
do: {:ok, {min_writes, min_dirt_factor}},
else: {:error, "invalid auto compact setting"}
_ ->
{:error, "invalid auto compact setting"}
end
end
@spec parse_auto_compact!(any) :: false | {pos_integer, number}
defp parse_auto_compact!(setting) do
case parse_auto_compact(setting) do
{:ok, setting} -> setting
{:error, reason} -> raise(ArgumentError, message: reason)
end
end
@spec split_options(
[option | {:data_dir, String.t()} | GenServer.option()]
| String.t()
) :: {:ok, {String.t(), [option], GenServer.options()}} | {:error, term}
defp split_options(data_dir) when is_binary(data_dir) do
{:ok, {data_dir, [], []}}
end
defp split_options(data_dir_or_options) do
case Keyword.pop(data_dir_or_options, :data_dir) do
{nil, data_dir_or_options} ->
try do
{:ok, {to_string(data_dir_or_options), [], []}}
rescue
ArgumentError ->
{:error, "Options must include :data_dir"}
Protocol.UndefinedError ->
{:error, "data_dir must be a string (or implement String.Chars)"}
end
{data_dir, options} ->
{gen_server_opts, opts} =
Keyword.split(options, [:name, :timeout, :spawn_opt, :hibernate_after, :debug])
try do
{:ok, {to_string(data_dir), opts, gen_server_opts}}
rescue
Protocol.UndefinedError ->
{:error, "data_dir must be a string (or implement String.Chars)"}
end
end
end
end
|
lib/cubdb.ex
| 0.899709
| 0.587973
|
cubdb.ex
|
starcoder
|
defmodule ExAliyunOts.DSL do
require ExAliyunOts.Const.FilterType, as: FilterType
alias ExAliyunOts.TableStore.Condition
alias ExAliyunOts.TableStoreFilter.{Filter, ColumnPaginationFilter}
@type row_existence :: :ignore | :expect_exist | :expect_not_exist
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/35193.html) | [English](https://www.alibabacloud.com/help/doc-detail/35193.html)
## Example
import MyApp.TableStore
get_row table_name1, [{"key", "key1"}],
columns_to_get: ["name", "level"],
filter: filter(
({"name", ignore_if_missing: true, latest_version_only: true} == var_name and "age" > 1) or
("class" == "1")
)
batch_get [
get(
table_name2,
[{"key", "key1"}],
filter: filter "age" >= 10
)
]
## Options
* `ignore_if_missing`, used when attribute column not existed.
* if a attribute column is not existed, when set `ignore_if_missing: true` in filter expression, there will ignore this row data in the returned result;
* if a attribute column is existed, the returned result won't be affected no matter true or false was set.
* `latest_version_only`, used when attribute column has multiple versions.
* if set `latest_version_only: true`, there will only check the value of the latest version is matched or not, by default it's set as `latest_version_only: true`;
* if set `latest_version_only: false`, there will check the value of all versions are matched or not.
"""
@doc row: :row
defmacro filter(filter_expr) do
ExAliyunOts.Filter.build_filter(filter_expr)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/44573.html) | [English](https://www.alibabacloud.com/help/doc-detail/44573.html)
## Example
import MyApp.TableStore
get_row table_name,
[{"key", "1"}],
start_column: "room",
filter: pagination(offset: 0, limit: 3)
Use `pagination/1` for `:filter` options when get row.
"""
@doc row: :row
@spec pagination(options :: Keyword.t()) :: map()
defmacro pagination(options) do
offset = Keyword.get(options, :offset)
limit = Keyword.get(options, :limit)
quote do
%Filter{
type: unquote(FilterType.column_pagination()),
filter: %ColumnPaginationFilter{offset: unquote(offset), limit: unquote(limit)}
}
end
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/35194.html) | [English](https://www.alibabacloud.com/help/doc-detail/35194.html)
## Example
import MyApp.TableStore
update_row "table", [{"pk", "pk1"}],
delete_all: ["attr1", "attr2"],
return_type: :pk,
condition: condition(:expect_exist)
The available `existence` options: `:expect_exist` | `:expect_not_exist` | `:ignore`, here are some use cases for your reference:
Use `condition(:expect_exist)`, expect the primary keys to row is existed.
* for `put_row/5`, if the primary keys have auto increment column type, meanwhile the target primary keys row is existed,
only use `condition(:expect_exist)` can successfully overwrite the row.
* for `update_row/4`, if the primary keys have auto increment column type, meanwhile the target primary keys row is existed,
only use `condition(:expect_exist)` can successfully update the row.
* for `delete_row/4`, no matter what primary keys type are, use `condition(:expect_exist)` can successfully delete the row.
Use `condition(:expect_not_exist)`, expect the primary_keys to row is not existed.
* for `put_row/5`, if the primary keys have auto increment type,
- while the target primary keys row is existed, only use `condition(:expect_exist)` can successfully put the row;
- while the target primary keys row is not existed, only use `condition(:ignore)` can successfully put the row.
Use `condition(:ignore)`, ignore the row existence check
* for `put_row/5`, if the primary keys have auto increment column type, meanwhile the target primary keys row is not existed,
only use `condition(:ignore)` can successfully put the row.
* for `update_row/4`, if the primary keys have auto increment column type, meanwhile the target primary keys row is not existed,
only use `condition(:ignore)` can successfully update the row.
* for `delete_row/4`, no matter what primary keys type are, use `condition(:ignore)` can successfully delete the row if existed.
The `batch_write/3` operation is a collection of put_row / update_row / delete_row operations.
"""
@doc row: :row
@spec condition(row_existence) :: map()
defmacro condition(row_existence) do
quote do
%Condition{row_existence: unquote(map_row_existence(row_existence))}
end
end
@doc """
Similar to `condition/1` and support use filter expression (please see `filter/1`) as well, please refer them for details.
## Example
import MyApp.TableStore
delete_row "table",
[{"key", "key1"}, {"key2", "key2"}],
condition: condition(:expect_exist, "attr_column" == "value2")
"""
@doc row: :row
defmacro condition(row_existence, filter_expr) do
row_existence = map_row_existence(row_existence)
column_condition = ExAliyunOts.Filter.build_filter(filter_expr)
quote do
%Condition{
row_existence: unquote(row_existence),
column_condition: unquote(column_condition)
}
end
end
ExAliyunOts.TableStore.RowExistenceExpectation.constants()
|> Enum.map(fn {_value, row_existence} ->
downcase_row_existence = row_existence |> Atom.to_string() |> String.downcase() |> String.to_atom()
defp map_row_existence(unquote(downcase_row_existence)) do
unquote(row_existence)
end
end)
defp map_row_existence(row_existence) do
raise ExAliyunOts.RuntimeError,
"Invalid existence: #{inspect(row_existence)} in condition, please use one of :ignore | :expect_exist | :expect_not_exist option."
end
end
|
lib/ex_aliyun_ots/dsl.ex
| 0.861188
| 0.50653
|
dsl.ex
|
starcoder
|
defmodule BitstylesPhoenix.Component.Form do
use BitstylesPhoenix.Component
import BitstylesPhoenix.Component.Error
alias Phoenix.HTML.Form, as: PhxForm
@moduledoc """
Components for rendering input elements.
## Common attributes
All helpers in this module accept the following attributes.
- `form` *(required)* - The form to render the input form.
- `field` *(required)* - The name of the field for the input.
- `label` - The text to be used as label. Defaults to `Phoenix.HTML.Form.humanize/1`.
- `label_opts` - The options passed to the label element generated with `Phoenix.HTML.Form.label/4`.
See `Phoenix.HTML.Form.form_for/4` or LiveView `form` component for details on how to render a form.
"""
@input_mapping %{
color: :color_input,
checkbox: :checkbox,
date: :date_input,
datetime_local: :datetime_local_input,
email: :email_input,
file: :file_input,
number: :number_input,
password: :<PASSWORD>,
range: :range_input,
search: :search_input,
telephone: :telephone_input,
text: :text_input,
time: :time_input,
url: :url_input
}
@wrapper_assigns_keys [:field, :form, :label, :label_opts, :hidden_label]
@type_doc_table @input_mapping
|> Enum.map(fn {type, helper} ->
"| `:#{type}` | `Phoenix.HTML.Form.#{helper}/3` |\n"
end)
@doc """
Renders various types of `<input>` element, with the associated `<label>`s, and any errors for that field.
## Attributes
- `type` - The type of the input (see table below for available types). Defaults to `type="text"`.
- `hidden_label` - Only show the label for screen readers if set to `true`.
- All options from above (see top level module doc).
- All other attributes will be passed in as input options to the underlying input
helpers from `Phoenix.HTML.Form` (see table below for used helpers).
Defaults to `maxlength="255"` for `email`, `text` and `password` type.
Set maxlength to `false` to prevent setting maxlength.
For reference which input helper is used check out the following mapping:
| type | Helper |
| :--: | ------ |
#{@type_doc_table}
See the [bitstyles form docs](https://bitcrowd.github.io/bitstyles/?path=/docs/base-forms--fieldset) for examples of inputs, selects, textareas, labels etc. in use.
See the [bitstyles form docs](https://bitcrowd.github.io/bitstyles/?path=/docs/ui-data-forms--login-form) for examples of form layouts.
"""
story("Text field with label", '''
iex> assigns=%{form: @user_form}
...> render ~H"""
...> <.ui_input form={@form} field={:name} />
...> """
"""
<label for="user_name">
Name
</label>
<input id="user_name" maxlength="255" name="user[name]" type="text"/>
"""
''')
story("Text field with error", '''
iex> assigns=%{form: @error_form}
...> render ~H"""
...> <.ui_input form={@form} field={:name} />
...> """
"""
<label for="user_name">
Name
</label>
<input id="user_name" maxlength="255" name="user[name]" type="text"/>
<span class="u-fg--warning" phx-feedback-for="user[name]">
is too short
</span>
"""
''')
story("Text field with multiple errors", '''
iex> assigns=%{form: @error_form}
...> render ~H"""
...> <.ui_input form={@form} field={:email} />
...> """
"""
<label for="user_email">
Email
</label>
<input id="user_email" maxlength="255" name="user[email]" type="text"/>
<ul class=\"u-padding-xl-left\">
<li>
<span class=\"u-fg--warning\" phx-feedback-for=\"user[email]\">
is invalid
</span>
</li>
<li>
<span class=\"u-fg--warning\" phx-feedback-for=\"user[email]\">
must end with @bitcrowd.net
</span>
</li>
</ul>
"""
''')
story("Text field with hidden label", '''
iex> assigns=%{form: @user_form}
...> render ~H"""
...> <.ui_input form={@form} field={:name} hidden_label={true} />
...> """
"""
<label class="u-sr-only" for="user_name">
Name
</label>
<input id="user_name" maxlength="255" name="user[name]" type="text"/>
"""
''')
story("Text field with label (without maxlength)", '''
iex> assigns=%{form: @user_form}
...> render ~H"""
...> <.ui_input form={@form} field={:name} maxlength={false}/>
...> """
"""
<label for="user_name">
Name
</label>
<input id="user_name" name="user[name]" type="text"/>
"""
''')
story("Text field with options", '''
iex> assigns=%{form: @user_form}
...> render ~H"""
...> <.ui_input
...> form={@form}
...> field={:totp}
...> label="Authentication code"
...> label_opts={[class: "extra"]}
...> placeholder="6-digit code"
...> required={true}
...> value=""
...> inputmode="numeric"
...> pattern="[0-9]*"
...> autocomplete="one-time-code"
...> maxlength={6} />
...> """
"""
<label class="extra" for="user_totp">
Authentication code
</label>
<input autocomplete="one-time-code" id="user_totp" inputmode="numeric" maxlength="6" name="user[totp]" pattern="[0-9]*" placeholder="6-digit code" required="required" type="text" value=""/>
"""
''')
story("Email field with label", '''
iex> assigns=%{form: @user_form}
...> render ~H"""
...> <.ui_input form={@form} field={:email} type={:email} />
...> """
"""
<label for="user_email">
Email
</label>
<input id="user_email" maxlength="255" name="user[email]" type="email"/>
"""
''')
story("Search field with placholder", '''
iex> assigns=%{form: @user_form}
...> render ~H"""
...> <.ui_input
...> form={@form}
...> field={:email_or_name}
...> type={:search}
...> placeholder="Search by email or name"
...> autofocus={true} />
...> """
"""
<label for="user_email_or_name">
Email or name
</label>
<input autofocus="autofocus" id="user_email_or_name" name="user[email_or_name]" placeholder="Search by email or name" type="search"/>
"""
''')
story("File field for pdfs", '''
iex> assigns=%{form: @file_form}
...> render ~H"""
...> <.ui_input form={@form} field={:file} type={:file} accept="application/pdf" />
...> """
"""
<label for="user_file">
File
</label>
<input accept="application/pdf" id="user_file" name="user[file]" type="file"/>
"""
''')
story("Checkbox", '''
iex> assigns=%{form: @user_form}
...> render ~H"""
...> <.ui_input form={@form} field={:accept} type={:checkbox} />
...> """
"""
<label for="user_accept">
<input name="user[accept]" type="hidden" value="false"/>
<input id="user_accept" name="user[accept]" type="checkbox" value="true"/>
Accept
</label>
"""
''')
story("Checkbox with label class", '''
iex> assigns=%{form: @user_form}
...> render ~H"""
...> <.ui_input form={@form} field={:accept} type={:checkbox} label_opts={[class: "extra"]}/>
...> """
"""
<label class="extra" for="user_accept">
<input name="user[accept]" type="hidden" value="false"/>
<input id="user_accept" name="user[accept]" type="checkbox" value="true"/>
Accept
</label>
"""
''')
def ui_input(assigns) do
extra = assigns_to_attributes(assigns, @wrapper_assigns_keys ++ [:type])
assigns =
assigns
|> assign_new(:type, fn -> :text end)
|> assign(:extra, extra)
|> assign(:wrapper, wrapper_assigns(assigns))
~H"""
<%= if @type == :checkbox do %>
<.ui_wrapped_input {@wrapper}>
<%= render_input(:checkbox, @form, @field, @extra) %>
</.ui_wrapped_input>
<% else %>
<.ui_unwrapped_input {@wrapper}>
<%= render_input(@type, @form, @field, @extra) %>
</.ui_unwrapped_input>
<% end %>
"""
end
defp render_input(type, form, field, opts) do
apply(PhxForm, input_type(type), [form, field, default_validations(opts, type)])
end
defp input_type(type) do
Map.get(@input_mapping, type, :text_input)
end
defp default_validations(extra, type) when type in [:email, :text, :password] do
Keyword.put_new(extra, :maxlength, 255)
end
defp default_validations(extra, _), do: extra
@doc ~S"""
Renders `<textarea>` elements, with the associated `<label>`s, and any errors for that field.
## Attributes
- `hidden_label` - Only show the label for screen readers if set to `true`.
- All options from above (see top level module doc).
- All other attributes will be passed in as input options to `Phoenix.HTML.Form.textarea/3`.
See the [bitstyles textarea docs](https://bitcrowd.github.io/bitstyles/?path=/docs/base-forms--textarea-and-label) for examples of textareas and labels in use.
"""
story("Textarea", '''
iex> assigns=%{form: @user_form}
...> render ~H"""
...> <.ui_textarea form={@form} field={:about_me} />
...> """
"""
<label for="user_about_me">
About me
</label>
<textarea id="user_about_me" name="user[about_me]">
</textarea>
"""
''')
story("Textarea with options", '''
iex> assigns=%{form: @user_form}
...> render ~H"""
...> <.ui_textarea
...> form={@form}
...> field={:metadata}
...> label="Metadata"
...> label_opts={[class: "extra"]}
...> value="Value here"
...> rows={10}
...> />
...> """
"""
<label class="extra" for="user_metadata">
Metadata
</label>
<textarea id="user_metadata" name="user[metadata]" rows="10">
Value here
</textarea>
"""
''')
story("Textarea with hidden label", '''
iex> assigns=%{form: @user_form}
...> render ~H"""
...> <.ui_textarea form={@form} field={:address} hidden_label/>
...> """
"""
<label class="u-sr-only" for="user_address">
Address
</label>
<textarea id="user_address" name="user[address]">
</textarea>
"""
''')
story("Textarea with error", '''
iex> assigns=%{form: @error_form}
...> render ~H"""
...> <.ui_textarea form={@form} field={:name} />
...> """
"""
<label for="user_name">
Name
</label>
<textarea id="user_name" name="user[name]">
</textarea>
<span class="u-fg--warning" phx-feedback-for="user[name]">
is too short
</span>
"""
''')
def ui_textarea(assigns) do
extra = assigns_to_attributes(assigns, @wrapper_assigns_keys)
assigns =
assigns
|> assign(:extra, extra)
|> assign(:wrapper, wrapper_assigns(assigns))
~H"""
<.ui_unwrapped_input {@wrapper}>
<%= PhxForm.textarea(@form, @field, @extra) %>
</.ui_unwrapped_input>
"""
end
@doc ~S"""
Renders a `<select>` element, with a `<label>` and any errors for that field.
## Attributes
- `hidden_label` - Only show the label for screen readers if set to `true`.
- `options` - The options passed to `Phoenix.HTML.Form.select/4`.
- All options from above (see top level module doc).
- All other attributes will be passed in as input options to `Phoenix.HTML.Form.select/4`.
See the [bitstyles select docs](https://bitcrowd.github.io/bitstyles/?path=/docs/base-forms--select-and-label) for examples of textareas and labels in use.
"""
story("Select box", '''
iex> assigns=%{form: @user_form}
...> render ~H"""
...> <.ui_select form={@form} field={:week} options={1..2} />
...> """
"""
<label for="user_week">
Week
</label>
<select id="user_week" name="user[week]">
<option value="1">
1
</option>
<option value="2">
2
</option>
</select>
"""
''')
story("Select box without label", '''
iex> assigns=%{form: @user_form}
...> render ~H"""
...> <.ui_select form={@form} field={:week} options={1..2} hidden_label/>
...> """
"""
<label class="u-sr-only" for="user_week">
Week
</label>
<select id="user_week" name="user[week]">
<option value="1">
1
</option>
<option value="2">
2
</option>
</select>
"""
''')
story("Select box with options", '''
iex> assigns=%{form: @user_form, options: [{"Ducks", "ducks"}, {"Cats", "cats"}]}
...> render ~H"""
...> <.ui_select form={@form} field={:preference} options={@options} label="What do you like best?" label_opts={[class: "extra"]}/>
...> """
"""
<label class="extra" for="user_preference">
What do you like best?
</label>
<select id="user_preference" name="user[preference]">
<option value="ducks">
Ducks
</option>
<option value="cats">
Cats
</option>
</select>
"""
''')
def ui_select(assigns) do
extra = assigns_to_attributes(assigns, @wrapper_assigns_keys ++ [:options])
assigns =
assigns
|> assign(:extra, extra)
|> assign(:wrapper, wrapper_assigns(assigns))
~H"""
<.ui_unwrapped_input {@wrapper}>
<%= PhxForm.select(@form, @field, @options, @extra) %>
</.ui_unwrapped_input>
"""
end
defp wrapper_assigns(assigns) do
Map.take(assigns, @wrapper_assigns_keys)
end
@doc """
Component for rendering custom inputs together with a label and errors.
## Attributes
- `hidden_label` - Only show the label for screen readers if set to `true`.
- All options from above (see top level module doc).
"""
story("Custom inputs", '''
iex> assigns=%{form: @error_form}
...> render ~H"""
...> <.ui_unwrapped_input form={@form} field={:name} label="Custom">
...> Custom content
...> <input type="text" whatever="foo" />
...> </.ui_unwrapped_input>
...> """
"""
<label for="user_name">
Custom
</label>
Custom content
<input type="text" whatever="foo"/>
<span class="u-fg--warning" phx-feedback-for="user[name]">
is too short
</span>
"""
''')
def ui_unwrapped_input(assigns) do
label_text = Map.get_lazy(assigns, :label, fn -> default_label(assigns.field) end)
label_opts = assigns |> Map.get(:label_opts, [])
label_opts =
Keyword.put(
label_opts,
:class,
classnames([label_opts[:class], {"u-sr-only", assigns[:hidden_label]}])
)
label = PhxForm.label(assigns.form, assigns.field, label_text, label_opts)
assigns = assign(assigns, :label, label)
~H"""
<%= @label %><%= render_slot(@inner_block) %><.ui_errors form={@form} field={@field} />
"""
end
@doc """
Component for rendering custom wrapped inputs in a label and with errors.
## Attributes
- All options from above (see top level module doc).
"""
story("Custom wrapped inputs", '''
iex> assigns=%{form: @user_form}
...> render ~H"""
...> <.ui_wrapped_input form={@form} field={:name} label="Current name">
...> <input type="checkbox" id="user_name" whatever="foo" />
...> </.ui_wrapped_input>
...> """
"""
<label for="user_name">
<input type="checkbox" id="user_name" whatever="foo"/>
Current name
</label>
"""
''')
def ui_wrapped_input(assigns) do
assigns =
assigns
|> assign_new(:label, fn -> default_label(assigns.field) end)
|> assign_new(:label_opts, fn -> [] end)
~H"""
<%= PhxForm.label @form, @field, @label_opts do %>
<%= render_slot(@inner_block) %>
<%= @label %>
<% end %>
<.ui_errors form={@form} field={@field} />
"""
end
defp default_label(field), do: PhxForm.humanize(field)
end
|
lib/bitstyles_phoenix/component/form.ex
| 0.837021
| 0.436622
|
form.ex
|
starcoder
|
defmodule ExRack.DHT do
@moduledoc false
use GenServer
@period 10_000
# Client
def start_link(state) do
GenServer.start_link(__MODULE__, state, name: __MODULE__)
end
def config do
Application.fetch_env!(:exrack_firmware, ExRack.DHT)
|> Map.new()
end
def temperature do
GenServer.call(__MODULE__, :temperature)
end
def humidity do
GenServer.call(__MODULE__, :humidity)
end
def data do
GenServer.call(__MODULE__, :data)
end
def subscribe() do
GenServer.call(__MODULE__, :subscribe)
end
# Server
@impl true
def init(state) do
state =
state
|> Map.merge(%{temperature: nil, humidity: nil, subscribed: []})
schedule_work()
{:ok, state}
end
@impl true
def handle_info(:work, %{:gpio => gpio, :sensor => sensor, :subscribed => subscribed} = state) do
state =
case DHT.read(gpio, sensor) do
{:ok, %{:humidity => humidity, :temperature => temperature}} ->
:telemetry.execute([:dht, :humididty], %{percent: humidity}, %{sensor: sensor})
:telemetry.execute([:dht, :temperature], %{celsius: temperature}, %{sensor: sensor})
Enum.each(subscribed, fn pid ->
if Process.alive?(pid) do
Process.send(pid, {:dht, %{:temperature => temperature, :humidity => humidity}}, [])
end
end)
Map.merge(state, %{:temperature => temperature, :humidity => humidity})
_ ->
state
end
schedule_work()
{:noreply, state}
end
@impl true
def handle_call(:temperature, _from, %{:temperature => temperature} = state) do
{:reply, temperature, state}
end
@impl true
def handle_call(:humidity, _from, %{:humidity => humidity} = state) do
{:reply, humidity, state}
end
@impl true
def handle_call(:data, _from, %{:humidity => humidity, :temperature => temperature} = state) do
{:reply, %{:temperature => temperature, :humidity => humidity}, state}
end
@impl true
def handle_call(:subscribe, {pid, _}, state) do
subscribed_state = [pid | state.subscribed]
{:reply, :ok, Map.put(state, :subscribed, subscribed_state)}
end
defp schedule_work do
Process.send_after(self(), :work, @period)
end
end
|
exrack_firmware/lib/exrack_firmware/dht.ex
| 0.729712
| 0.414247
|
dht.ex
|
starcoder
|
defmodule Geolix.Adapter.MMDB2 do
@moduledoc """
Adapter for Geolix to work with MMDB2 databases.
## Adapter Configuration
To start using the adapter with a compatible database you need to add the
required configuration entry to your `:geolix` configuration:
config :geolix,
databases: [
%{
id: :my_mmdb_database,
adapter: Geolix.Adapter.MMDB2,
source: "/absolute/path/to/my/database.mmdb"
}
]
## Database Configuration
In order to work this adapter requires a `:source` configuration value to
point to a valid MMDB2 format database.
### Compressed Databases
Some limited support is built in to allow working with compressed databases
if the filename matches one of the following patterns:
- `*.gz` - It is expected to be a `gzip` compressed file
- `*.tar` - It is expected to be a tarball and the first file in the archive ending in `.mmdb` will be loaded.
- `*.tar.gz` - Combination of the above
### MMDB2 Decoder Options
If not configured or passed otherwise the following options are used for
decoding:
%{
double_precision: 8,
float_precision: 4,
map_keys: :atoms
}
You can pass a custom option to the lookup request:
iex(1)> mmdb2_opts = %{
......> double_precision: 8,
......> float_precision: 4,
......> map_keys: :strings
......> }
iex(2)> Geolix.lookup({1, 1, 1, 1}, mmdb2_decoder_options: mmdb2_opts)
Or configure your values as the defaults if not passed:
config :geolix,
databases: [
%{
id: :my_mmdb_database,
adapter: Geolix.Adapter.MMDB2,
source: "/absolute/path/to/my/database.mmdb",
mmdb2_decoder_options: %{
double_precision: 8,
float_precision: 4,
map_keys: :strings
}
}
]
### Result Transformation
By default a result is transformed to a struct matching your database type.
This setting can be modified by passing an option to the lookup request:
iex> Geolix.lookup({1, 1, 1, 1}, as: :raw)
Or configure a default if not passed:
config :geolix,
databases: [
%{
id: :my_mmdb_database,
adapter: Geolix.Adapter.MMDB2,
source: "/absolute/path/to/my/database.mmdb",
result_as: :raw
}
]
Possible options:
- `:raw` - Return results as found in the database
- `:struct` - Return values after transforming them to a result struct (default)
Passing `as: :raw` skips the struct transformation and returns the value as
read from your database. This option may be necessary if you have configured
custom `:mmdb2_decoder_options`.
"""
alias Geolix.Adapter.MMDB2.Database
alias Geolix.Adapter.MMDB2.Loader
alias Geolix.Adapter.MMDB2.Storage
@behaviour Geolix.Adapter
@impl Geolix.Adapter
def database_workers(%{id: database_id}), do: [Storage.child_spec(database_id)]
@impl Geolix.Adapter
def load_database(database), do: Loader.load_database(database)
@impl Geolix.Adapter
def lookup(ip, opts, database), do: Database.lookup(ip, opts, database)
@impl Geolix.Adapter
def metadata(database), do: Database.metadata(database)
@impl Geolix.Adapter
def unload_database(database), do: Loader.unload_database(database)
end
|
lib/mmdb2.ex
| 0.873458
| 0.649071
|
mmdb2.ex
|
starcoder
|
defmodule SpiderMan.Configuration do
alias SpiderMan.{Pipeline, Producer, Storage, Utils}
@default_settings [
downloader_options: [
producer: Producer.ETS,
processor: [max_demand: 1],
rate_limiting: [allowed_messages: 10, interval: 1000],
pipelines: [Pipeline.DuplicateFilter],
post_pipelines: [],
context: %{}
],
spider_options: [
producer: Producer.ETS,
processor: [max_demand: 1],
pipelines: [],
post_pipelines: [],
context: %{}
],
item_processor_options: [
producer: Producer.ETS,
storage: Storage.JsonLines,
pipelines: [Pipeline.DuplicateFilter],
post_pipelines: [],
context: %{},
batchers: [
default: [
concurrency: 1,
batch_size: 50,
batch_timeout: 1000
]
]
]
]
@moduledoc """
Handle settings for spider
## Startup Spiders
config :spider_man, :spiders, [
SpiderA,
{SpiderB, settings = [...]},
...
]
All Spider what defined on `:spiders` would auto startup while the `:spider_man` application started.
## Global Settings
config :spider_man, global_settings: settings = [...]
This `settings` work for all spiders.
## Settings for Spider on config files
config :spider_man, SpiderA, settings = [...]
This `settings` only work for `SpiderA`.
## Default Settings
```elixir
#{inspect(@default_settings, pretty: true)}
```
## Settings Priority
1. Settings for Spider directly.
1.1 `settings` defined in `spiders` for the Spider.
1.2 As second argument while call `SpiderMan.start/2`.
2. Return by callback function: `SpiderModule.settings/0`.
3. Settings for Spider on config files.
4. Global Settings.
5. Default Settings.
"""
def configuration_docs do
configuration_spec()
|> Keyword.delete(:*)
|> NimbleOptions.docs()
end
def configuration_spec do
batcher_keys_spec = [
concurrency: [type: :pos_integer, default: 1],
batch_size: [type: :pos_integer, default: 100],
batch_timeout: [type: :pos_integer, default: 1000],
partition_by: [type: {:fun, 1}],
spawn_opt: [type: :keyword_list],
hibernate_after: [type: :pos_integer]
]
[
print_stats: [type: :boolean, default: true, doc: "Print the stats of spider, "],
log2file: [type: {:or, [:boolean, :string]}, default: true, doc: "Save the log to files, "],
status: [
type: {:in, [:running, :suspended]},
default: :running,
doc: "Set the startup status for the spider, "
],
spider_module: [type: :atom, doc: "Set the callback module for the spider, "],
ets_file: [
type: :string,
doc: "Set the filename for the spider, and load spider's state from ets files."
],
downloader_options: [
type: :keyword_list,
keys:
[
requester: [
type: {:or, [:atom, :mod_arg]},
default: {{SpiderMan.Requester.Finch, []}}
]
] ++ component_spec(:downloader),
doc: "see [Downloader Options](#t:settings/0-downloader-options).",
subsection: "### Downloader options"
],
spider_options: [
type: :keyword_list,
keys: component_spec(:spider),
doc: "see [Spider Options](#t:settings/0-spider-options).",
subsection: "### Spider options"
],
item_processor_options: [
type: :keyword_list,
keys:
[
storage: [
type: :atom,
default: Storage.JsonLines,
doc: "Set a storage module what are store items, "
],
batchers: [
type: :keyword_list,
default: [
default: [
concurrency: 1,
batch_size: 50,
batch_timeout: 1000
]
],
keys: [*: [type: :keyword_list, keys: batcher_keys_spec]],
doc:
"See [Batchers Options](https://hexdocs.pm/broadway/Broadway.html#start_link/2-batchers-options), ",
subsection: "#### Batchers options"
]
] ++ component_spec(:item_processor),
doc: "see [ItemProcessor Options](#t:settings/0-itemprocessor_options).",
subsection: "### ItemProcessor options"
],
*: [type: :any]
]
end
defp component_spec(component) do
pipelines_spec = [
type:
{:list,
{:or,
[
:mod_arg,
{:fun, 1},
{:fun, 2},
{:custom, __MODULE__, :validate_pipeline, []}
]}},
default: [],
doc: "Each msg will handle by each pipelines, "
]
processor_spec = [
type: :keyword_list,
default: [],
keys: [
concurrency: [type: :pos_integer, default: System.schedulers_online() * 2],
min_demand: [type: :non_neg_integer],
max_demand: [type: :non_neg_integer, default: 10],
partition_by: [type: {:fun, 1}],
spawn_opt: [type: :keyword_list],
hibernate_after: [type: :pos_integer]
],
doc:
"See [Processors Options](https://hexdocs.pm/broadway/Broadway.html#start_link/2-processors-options), "
]
rate_limiting_spec = [
type: :non_empty_keyword_list,
keys: [
allowed_messages: [required: true, type: :pos_integer],
interval: [required: true, type: :pos_integer]
],
doc:
"See [Producers Options - rate_limiting](https://hexdocs.pm/broadway/Broadway.html#start_link/2-producers-options), "
]
{processor_spec, rate_limiting_spec, pipelines_spec, extra} =
case component do
:downloader ->
{
Keyword.put(processor_spec, :default, max_demand: 1),
Keyword.put(rate_limiting_spec, :default, allowed_messages: 10, interval: 1000),
Keyword.put(pipelines_spec, :default, [Pipeline.DuplicateFilter]),
[post_pipelines: pipelines_spec]
}
:spider ->
{
Keyword.put(processor_spec, :default, max_demand: 1),
rate_limiting_spec,
pipelines_spec,
[]
}
:item_processor ->
{
processor_spec,
rate_limiting_spec,
Keyword.put(pipelines_spec, :default, [Pipeline.DuplicateFilter]),
[]
}
end
[
producer: [type: {:or, [:atom, :mod_arg]}, default: Producer.ETS],
context: [type: :any, default: %{}],
processor: processor_spec,
rate_limiting: rate_limiting_spec,
pipelines: pipelines_spec
] ++ extra
end
def validate_pipeline(v = {fun, _arg}) when is_function(fun, 2), do: {:ok, v}
def validate_pipeline(v = {mod, f, _arg}) when is_atom(mod) and is_atom(f), do: {:ok, v}
def validate_pipeline(mod) when is_atom(mod), do: {:ok, mod}
def validate_pipeline(v), do: {:error, "bad pipeline: #{inspect(v)}"}
def validate_settings!(spider, spider_settings) do
global_settings = Application.get_env(:spider_man, :global_settings, [])
local_settings = Application.get_env(:spider_man, spider, [])
settings =
@default_settings
|> Utils.merge_settings(global_settings)
|> Utils.merge_settings(local_settings)
spider_module =
Keyword.get_lazy(spider_settings, :spider_module, fn ->
Keyword.get(settings, :spider_module, spider)
end)
with {:module, _} <- Code.ensure_loaded(spider_module),
true <- function_exported?(spider_module, :settings, 0) do
Utils.merge_settings(settings, spider_module.settings())
else
{:error, _} ->
raise "could not load module: #{inspect(spider_module)} for spider:#{inspect(spider)}!"
false ->
settings
end
|> Utils.merge_settings(spider_settings)
|> Keyword.merge(spider: spider, spider_module: spider_module)
|> NimbleOptions.validate!(configuration_spec())
end
end
|
lib/spider_man/configuration.ex
| 0.866472
| 0.659213
|
configuration.ex
|
starcoder
|
defmodule Sanbase.Alert.Trigger.TrendingWordsTriggerSettings do
@moduledoc ~s"""
Trigger settings for trending words alert.
The alert supports the following operations:
1. Send the list of trending words at predefined time every day
2. Send an alert if some word enters the list of trending words.
3. Send an alert if some project enters the list of trending words
4. Send an alert if some project from a watchlist enters the list
of trending words
"""
@behaviour Sanbase.Alert.Trigger.Settings.Behaviour
use Vex.Struct
import Sanbase.Math, only: [to_integer: 1]
import Sanbase.Alert.Validation
import Sanbase.Alert.Utils
alias __MODULE__
alias Sanbase.Alert.Type
alias Sanbase.SocialData.TrendingWords
@derive {Jason.Encoder, except: [:filtered_target, :triggered?, :payload, :template_kv]}
@trigger_type "trending_words"
@trending_words_size 10
@enforce_keys [:type, :channel, :operation]
defstruct type: @trigger_type,
channel: nil,
operation: %{},
target: "default",
# Private fields, not stored in DB.
filtered_target: %{list: []},
triggered?: false,
payload: %{},
template_kv: %{},
extra_explanation: nil,
include_default_explanation: false,
template: nil
@type t :: %__MODULE__{
type: Type.trigger_type(),
channel: Type.channel(),
operation: Type.operation(),
# Private fields, not stored in DB.
filtered_target: Type.filtered_target(),
triggered?: boolean(),
payload: Type.payload(),
template_kv: Type.template_kv(),
extra_explanation: Type.extra_explanation(),
include_default_explanation: boolean()
}
# Validations
validates(:operation, &valid_trending_words_operation?/1)
validates(:channel, &valid_notification_channel?/1)
validates(:target, &valid_target?/1)
@spec type() :: String.t()
def type(), do: @trigger_type
def post_create_process(_trigger), do: :nochange
def post_update_process(_trigger), do: :nochange
@spec get_data(%__MODULE__{}) :: TrendingWords.result()
def get_data(%__MODULE__{}) do
TrendingWords.get_currently_trending_words(@trending_words_size)
end
# private functions
defimpl Sanbase.Alert.Settings, for: TrendingWordsTriggerSettings do
@default_explanation "A coin's appearance in trending words may suggest an increased risk of local tops and short-term price correction."
alias Sanbase.Model.Project
def triggered?(%TrendingWordsTriggerSettings{triggered?: triggered}), do: triggered
def evaluate(%TrendingWordsTriggerSettings{filtered_target: %{list: []}} = settings, _trigger) do
%TrendingWordsTriggerSettings{settings | triggered?: false}
end
def evaluate(%TrendingWordsTriggerSettings{} = settings, _trigger) do
case TrendingWordsTriggerSettings.get_data(settings) do
{:ok, top_words} when is_list(top_words) and top_words != [] ->
build_result(top_words, settings)
_ ->
%TrendingWordsTriggerSettings{settings | triggered?: false}
end
end
def cache_key(%TrendingWordsTriggerSettings{} = settings) do
construct_cache_key([settings.operation, settings.target])
end
defp build_result(
top_words,
%{operation: %{send_at_predefined_time: true, trigger_time: trigger_time}} = settings
) do
trigger_time = Sanbase.DateTimeUtils.time_from_iso8601!(trigger_time)
now = Time.utc_now()
after_15_mins = Time.add(now, 15 * 60, :second)
case Sanbase.DateTimeUtils.time_in_range?(trigger_time, now, after_15_mins) do
true ->
template_kv = %{settings.target => template_kv(settings, top_words)}
%TrendingWordsTriggerSettings{settings | triggered?: true, template_kv: template_kv}
false ->
%TrendingWordsTriggerSettings{settings | triggered?: false}
end
end
defp build_result(
top_words,
%{operation: %{trending_word: true}, filtered_target: %{list: words}} = settings
) do
top_words = top_words |> Enum.map(&String.downcase(&1.word))
trending_words =
MapSet.intersection(MapSet.new(top_words), MapSet.new(words))
|> Enum.to_list()
case trending_words do
[] ->
%TrendingWordsTriggerSettings{settings | triggered?: false}
[_ | _] = words ->
template_kv = %{words => template_kv(settings, words)}
%TrendingWordsTriggerSettings{settings | triggered?: true, template_kv: template_kv}
end
end
defp build_result(
top_words,
%{operation: %{trending_project: true}, filtered_target: %{list: slugs}} = settings
) do
projects = Project.List.by_slugs(slugs)
top_words =
top_words
|> Enum.map(&String.downcase(&1.word))
project_words =
Enum.flat_map(projects, &[&1.name, &1.ticker, &1.slug])
|> MapSet.new()
|> Enum.map(&String.downcase/1)
trending_words_mapset =
MapSet.intersection(MapSet.new(top_words), MapSet.new(project_words))
case Enum.empty?(trending_words_mapset) do
true ->
# If there are no trending words in the intersection there is no
# point of checking the projects separately
%TrendingWordsTriggerSettings{settings | triggered?: false}
false ->
template_kv =
Enum.reduce(projects, %{}, fn project, acc ->
case Project.is_trending?(project, trending_words_mapset) do
true -> Map.put(acc, project.slug, template_kv(settings, project))
false -> acc
end
end)
%TrendingWordsTriggerSettings{
settings
| triggered?: template_kv != %{},
template_kv: template_kv
}
end
end
defp template_kv(
%{operation: %{send_at_predefined_time: true, trigger_time: trigger_time}} = settings,
top_words
) do
max_len = get_max_len(top_words)
top_words_strings =
top_words
|> Enum.sort_by(fn tw -> tw.score end, &>=/2)
|> Enum.map(fn tw ->
~s/#{String.pad_trailing(tw.word, max_len)} | #{to_integer(tw.score)}/
end)
trending_words_str = Enum.join(top_words_strings, "\n")
# Having only the trigger_time won't be enough for the payload - include
# also the date
kv = %{
type: TrendingWordsTriggerSettings.type(),
datetime: "#{Date.utc_today()} #{trigger_time}",
operation: settings.operation,
trending_words_list: top_words,
trending_words_str: trending_words_str,
sonar_url: SanbaseWeb.Endpoint.sonar_url()
}
template = """
🔔 Trending words at: {{datetime}}
```
{{trending_words_str}}
```
"""
{template, kv}
|> extend_with_datetime_link()
|> maybe_extend_with_explanation(settings)
end
defp template_kv(%{operation: %{trending_word: true}} = settings, [word]) do
kv = %{
type: TrendingWordsTriggerSettings.type(),
operation: settings.operation,
trending_words_list: [word],
trending_words_str: "**#{word}**",
trending_words_url: SanbaseWeb.Endpoint.trending_word_url(word)
}
template = """
🔔 The word {{trending_words_str}} is in the top 10 trending words on crypto social media.
"""
{template, kv}
|> extend_with_datetime_link()
|> maybe_extend_with_explanation(settings)
end
defp template_kv(%{operation: %{trending_word: true}} = settings, [_, _ | _] = words) do
{last, previous} = List.pop_at(words, -1)
words_str = (Enum.map(previous, &"**#{&1}**") |> Enum.join(",")) <> " and **#{last}**"
kv = %{
type: TrendingWordsTriggerSettings.type(),
operation: settings.operation,
trending_words_list: words,
trending_words_str: words_str,
trending_words_url: SanbaseWeb.Endpoint.trending_word_url(words)
}
template = """
🔔 The words {{trending_words_str}} are in the top 10 trending words on crypto social media.
"""
{template, kv}
|> extend_with_datetime_link()
|> maybe_extend_with_explanation(settings)
end
defp template_kv(%{operation: %{trending_project: true}} = settings, project) do
kv = %{
type: TrendingWordsTriggerSettings.type(),
operation: settings.operation,
project_name: project.name,
project_ticker: project.ticker,
project_slug: project.slug
}
template = """
🔔 \#{{project_ticker}} | **{{project_name}}** is in the top 10 trending words on crypto social media.
"""
{template, kv}
|> extend_with_datetime_link()
|> maybe_extend_with_explanation(settings)
end
defp get_max_len(top_words) do
top_words
|> Enum.map(&String.length(&1.word))
|> Enum.max()
end
defp extend_with_datetime_link({template, kv}) do
now = DateTime.utc_now() |> DateTime.truncate(:second)
datetime_iso = now |> DateTime.to_iso8601()
datetime_human_readable = now |> Sanbase.DateTimeUtils.to_human_readable()
template =
template <> "[Trending words at {{datetime_human_readable}}]({{trending_words_url}})\n"
kv =
kv
|> Map.put(:datetime_human_readable, datetime_human_readable)
|> Map.put(:datetime_iso, datetime_iso)
|> Map.put(
:trending_words_url,
SanbaseWeb.Endpoint.trending_words_datetime_url(datetime_iso)
)
{template, kv}
end
defp maybe_extend_with_explanation({template, kv}, settings) do
default_explanation =
case settings.include_default_explanation do
true -> @default_explanation
false -> nil
end
explanation = settings.extra_explanation || default_explanation
{maybe_extend_template(template, explanation), Map.put(kv, :extra_explanation, explanation)}
end
defp maybe_extend_template(template, nil), do: template
defp maybe_extend_template(template, _extra_explanation) do
template <> "{{extra_explanation}}\n"
end
end
end
|
lib/sanbase/alerts/trigger/settings/trending_words_trigger_settings.ex
| 0.785555
| 0.489015
|
trending_words_trigger_settings.ex
|
starcoder
|
defmodule Oban.Telemetry do
@moduledoc """
Telemetry integration for event metrics, logging and error reporting.
Oban currently emits an event when a job has exeucted: `[:oban, :success]` if the job succeeded
or `[:oban, :failure]` if there was an error or the process crashed.
All job events share the same details about the job that was executed. In addition, failed jobs
provide the error type, the error itself, and the stacktrace. The following chart shows which
metadata you can expect for each event:
| event | metadata |
| ---------- | ---------------------------------------------------------------------------- |
| `:success` | `:id, :args, :queue, :worker, :attempt, :max_attempt` |
| `:failure` | `:id, :args, :queue, :worker, :attempt, :max_attempt, :kind, :error, :stack` |
For `:failure` events the metadata will include details about what caused the failure. The
`:kind` value is determined by how an error occurred. Here are the possible kinds:
* `:error` — from an `{:error, error}` return value. Some Erlang functions may also throw an
`:error` tuple, which will be reported as `:error`.
* `:exception` — from a rescued exception
* `:exit` — from a caught process exit
* `:throw` — from a caught value, this doesn't necessarily mean that an error occurred and the
error value is unpredictable
## Default Logger
A default log handler that emits structured JSON is provided, see `attach_default_logger/0` for
usage. Otherwise, if you would prefer more control over logging or would like to instrument
events you can write your own handler.
## Examples
A handler that only logs a few details about failed jobs:
```elixir
defmodule MicroLogger do
require Logger
def handle_event([:oban, :failure], %{duration: duration}, meta, nil) do
Logger.warning("[#\{meta.queue}] #\{meta.worker} failed in #\{duration}")
end
end
:telemetry.attach("oban-logger", [:oban, :failure], &ObanLogger.handle_event/4, nil)
```
Another great use of execution data is error reporting. Here is an example of integrating with
[Honeybadger][honey], but only reporting jobs that have failed 3 times or more:
```elixir
defmodule ErrorReporter do
def handle_event([:oban, :failure], _timing, %{attempt: attempt} = meta, nil) do
if attempt >= 3 do
context = Map.take(meta, [:id, :args, :queue, :worker])
Honeybadger.notify(meta.error, context, meta.stack)
end
end
end
:telemetry.attach("oban-errors", [:oban, :failure], &ErrorReporter.handle_event/4, nil)
```
[honey]: https://honeybadger.io
"""
@moduledoc since: "0.4.0"
require Logger
@doc """
Attaches a default structured JSON Telemetry handler for logging.
This function attaches a handler that outputs logs with the following fields:
* `source` — always "oban"
* `event` — either `:success` or `:failure` dependening on whether the job succeeded or errored
* `args` — a map of the job's raw arguments
* `worker` — the job's worker module
* `queue` — the job's queue
* `duration` — the job's runtime duration in microseconds
## Examples
:ok = Oban.Telemetry.attach_default_logger()
"""
@doc since: "0.4.0"
@spec attach_default_logger() :: :ok | {:error, :already_exists}
def attach_default_logger do
events = [[:oban, :success], [:oban, :failure]]
:telemetry.attach_many("oban-default-logger", events, &handle_event/4, :no_config)
end
@doc false
@spec handle_event([atom()], map(), map(), :no_config) :: :ok
def handle_event([:oban, event], measurement, meta, :no_config)
when event in [:success, :failure] do
Logger.info(fn ->
Jason.encode!(%{
source: "oban",
event: event,
args: meta[:args],
worker: meta[:worker],
queue: meta[:queue],
duration: measurement[:duration]
})
end)
end
end
|
lib/oban/telemetry.ex
| 0.869645
| 0.927495
|
telemetry.ex
|
starcoder
|
defmodule Benchee.Statistics do
@moduledoc """
Statistics related functionality that is meant to take the raw benchmark run
times and then compute statistics like the average and the standard deviation.
"""
alias Benchee.{Conversion.Duration, Scenario, Suite, Utility.Parallel}
alias Benchee.Statistics.Mode
alias Benchee.Statistics.Percentile
require Integer
defstruct [
:average,
:ips,
:std_dev,
:std_dev_ratio,
:std_dev_ips,
:median,
:percentiles,
:mode,
:minimum,
:maximum,
sample_size: 0
]
@type mode :: [number] | number | nil
@type t :: %__MODULE__{
average: float,
ips: float | nil,
std_dev: float,
std_dev_ratio: float,
std_dev_ips: float | nil,
median: number,
percentiles: %{number => float},
mode: mode,
minimum: number,
maximum: number,
sample_size: integer
}
@type samples :: [number]
@doc """
Takes a job suite with job run times, returns a map representing the
statistics of the job suite as follows:
* average - average run time of the job in μs (the lower the better)
* ips - iterations per second, how often can the given function be
executed within one second (the higher the better)
* std_dev - standard deviation, a measurement how much results vary
(the higher the more the results vary)
* std_dev_ratio - standard deviation expressed as how much it is relative to
the average
* std_dev_ips - the absolute standard deviation of iterations per second
(= ips * std_dev_ratio)
* median - when all measured times are sorted, this is the middle
value (or average of the two middle values when the number of times is
even). More stable than the average and somewhat more likely to be a
typical value you see.
* percentiles - a map of percentile ranks. These are the values below
which x% of the run times lie. For example, 99% of run times are shorter
than the 99th percentile (99th %) rank.
is a value for which 99% of the run times are shorter.
* mode - the run time(s) that occur the most. Often one value, but
can be multiple values if they occur the same amount of times. If no value
occurs at least twice, this value will be nil.
* minimum - the smallest (fastest) run time measured for the job
* maximum - the biggest (slowest) run time measured for the job
* sample_size - the number of run time measurements taken
## Parameters
* `suite` - the job suite represented as a map after running the measurements,
required to have the run_times available under the `run_times` key
## Examples
iex> scenarios = [
...> %Benchee.Scenario{
...> job_name: "<NAME>",
...> run_time_data: %Benchee.CollectionData{
...> samples: [200, 400, 400, 400, 500, 500, 500, 700, 900]
...> },
...> memory_usage_data: %Benchee.CollectionData{
...> samples: [200, 400, 400, 400, 500, 500, 500, 700, 900]
...> },
...> input_name: "Input",
...> input: "Input"
...> }
...> ]
iex> suite = %Benchee.Suite{scenarios: scenarios}
iex> Benchee.Statistics.statistics(suite)
%Benchee.Suite{
scenarios: [
%Benchee.Scenario{
job_name: "<NAME>",
input_name: "Input",
input: "Input",
run_time_data: %Benchee.CollectionData{
samples: [200, 400, 400, 400, 500, 500, 500, 700, 900],
statistics: %Benchee.Statistics{
average: 500.0,
ips: 2000_000.0,
std_dev: 200.0,
std_dev_ratio: 0.4,
std_dev_ips: 800_000.0,
median: 500.0,
percentiles: %{50 => 500.0, 99 => 900.0},
mode: [500, 400],
minimum: 200,
maximum: 900,
sample_size: 9
}
},
memory_usage_data: %Benchee.CollectionData{
samples: [200, 400, 400, 400, 500, 500, 500, 700, 900],
statistics: %Benchee.Statistics{
average: 500.0,
ips: nil,
std_dev: 200.0,
std_dev_ratio: 0.4,
std_dev_ips: nil,
median: 500.0,
percentiles: %{50 => 500.0, 99 => 900.0},
mode: [500, 400],
minimum: 200,
maximum: 900,
sample_size: 9
}
}
}
],
system: nil
}
"""
@spec statistics(Suite.t()) :: Suite.t()
def statistics(suite = %Suite{scenarios: scenarios}) do
config = suite.configuration
percentiles = Enum.uniq([50 | config.percentiles])
scenarios_with_statistics =
Parallel.map(scenarios, fn scenario ->
run_time_stats = scenario.run_time_data.samples |> job_statistics(percentiles) |> add_ips
memory_stats = job_statistics(scenario.memory_usage_data.samples, percentiles)
%{
scenario
| run_time_data: %{scenario.run_time_data | statistics: run_time_stats},
memory_usage_data: %{scenario.memory_usage_data | statistics: memory_stats}
}
end)
%Suite{suite | scenarios: sort(scenarios_with_statistics)}
end
@doc """
Calculates statistical data based on a series of run times for a job
in microseconds.
## Examples
iex> run_times = [200, 400, 400, 400, 500, 500, 500, 700, 900]
iex> Benchee.Statistics.job_statistics(run_times, [50, 99])
%Benchee.Statistics{
average: 500.0,
ips: nil,
std_dev: 200.0,
std_dev_ratio: 0.4,
std_dev_ips: nil,
median: 500.0,
percentiles: %{50 => 500.0, 99 => 900.0},
mode: [500, 400],
minimum: 200,
maximum: 900,
sample_size: 9
}
iex> Benchee.Statistics.job_statistics([100], [50, 99])
%Benchee.Statistics{
average: 100.0,
ips: nil,
std_dev: 0,
std_dev_ratio: 0.0,
std_dev_ips: nil,
median: 100.0,
percentiles: %{50 => 100.0, 99 => 100.0},
mode: nil,
minimum: 100,
maximum: 100,
sample_size: 1
}
iex> Benchee.Statistics.job_statistics([], [])
%Benchee.Statistics{
average: nil,
ips: nil,
std_dev: nil,
std_dev_ratio: nil,
std_dev_ips: nil,
median: nil,
percentiles: nil,
mode: nil,
minimum: nil,
maximum: nil,
sample_size: 0
}
"""
@spec job_statistics(samples, list) :: __MODULE__.t()
def job_statistics([], _) do
%__MODULE__{sample_size: 0}
end
def job_statistics(measurements, percentiles) do
total = Enum.sum(measurements)
num_iterations = length(measurements)
average = total / num_iterations
deviation = standard_deviation(measurements, average, num_iterations)
standard_dev_ratio = if average == 0, do: 0, else: deviation / average
percentiles = Percentile.percentiles(measurements, percentiles)
median = Map.fetch!(percentiles, 50)
mode = Mode.mode(measurements)
minimum = Enum.min(measurements)
maximum = Enum.max(measurements)
%__MODULE__{
average: average,
std_dev: deviation,
std_dev_ratio: standard_dev_ratio,
median: median,
percentiles: percentiles,
mode: mode,
minimum: minimum,
maximum: maximum,
sample_size: num_iterations
}
end
defp standard_deviation(_samples, _average, 1), do: 0
defp standard_deviation(samples, average, sample_size) do
total_variance =
Enum.reduce(samples, 0, fn sample, total ->
total + :math.pow(sample - average, 2)
end)
variance = total_variance / (sample_size - 1)
:math.sqrt(variance)
end
defp add_ips(statistics = %__MODULE__{sample_size: 0}), do: statistics
defp add_ips(statistics = %__MODULE__{average: 0.0}), do: statistics
defp add_ips(statistics) do
ips = Duration.convert_value({1, :second}, :nanosecond) / statistics.average
standard_dev_ips = ips * statistics.std_dev_ratio
%__MODULE__{
statistics
| ips: ips,
std_dev_ips: standard_dev_ips
}
end
@doc """
Calculate additional percentiles and add them to the
`run_time_data.statistics`. Should only be used after `statistics/1`, to
calculate extra values that may be needed for reporting.
## Examples
iex> scenarios = [
...> %Benchee.Scenario{
...> job_name: "<NAME>",
...> run_time_data: %Benchee.CollectionData{
...> samples: [200, 400, 400, 400, 500, 500, 500, 700, 900]
...> },
...> memory_usage_data: %Benchee.CollectionData{
...> samples: [200, 400, 400, 400, 500, 500, 500, 700, 900]
...> },
...> input_name: "Input",
...> input: "Input"
...> }
...> ]
iex> %Benchee.Suite{scenarios: scenarios}
...> |> Benchee.Statistics.statistics
...> |> Benchee.Statistics.add_percentiles([25, 75])
%Benchee.Suite{
scenarios: [
%Benchee.Scenario{
job_name: "<NAME>",
input_name: "Input",
input: "Input",
run_time_data: %Benchee.CollectionData{
samples: [200, 400, 400, 400, 500, 500, 500, 700, 900],
statistics: %Benchee.Statistics{
average: 500.0,
ips: 2_000_000.0,
std_dev: 200.0,
std_dev_ratio: 0.4,
std_dev_ips: 800_000.0,
median: 500.0,
percentiles: %{25 => 400.0, 50 => 500.0, 75 => 600.0, 99 => 900.0},
mode: [500, 400],
minimum: 200,
maximum: 900,
sample_size: 9
}
},
memory_usage_data: %Benchee.CollectionData{
samples: [200, 400, 400, 400, 500, 500, 500, 700, 900],
statistics: %Benchee.Statistics{
average: 500.0,
ips: nil,
std_dev: 200.0,
std_dev_ratio: 0.4,
std_dev_ips: nil,
median: 500.0,
percentiles: %{50 => 500.0, 99 => 900.0},
mode: [500, 400],
minimum: 200,
maximum: 900,
sample_size: 9
}
}
}
]
}
"""
def add_percentiles(suite = %Suite{scenarios: scenarios}, percentile_ranks) do
new_scenarios =
Parallel.map(scenarios, fn scenario ->
update_in(scenario.run_time_data.statistics.percentiles, fn existing ->
new = Percentile.percentiles(scenario.run_time_data.samples, percentile_ranks)
Map.merge(existing, new)
end)
end)
%Suite{suite | scenarios: new_scenarios}
end
@spec sort([Scenario.t()]) :: [Scenario.t()]
defp sort(scenarios) do
Enum.sort_by(scenarios, fn scenario ->
{scenario.run_time_data.statistics.average, scenario.memory_usage_data.statistics.average}
end)
end
end
|
lib/benchee/statistics.ex
| 0.940776
| 0.736045
|
statistics.ex
|
starcoder
|
defmodule LcdDisplay.HD44780.Util do
@moduledoc """
A collection of utility functions that are used for display drivers.
"""
@type row_col_pos :: {non_neg_integer, non_neg_integer}
@typedoc """
Typically 2x16 or 4x20.
"""
@type display_config :: %{
required(:rows) => LcdDisplay.HD44780.Driver.num_rows(),
required(:cols) => LcdDisplay.HD44780.Driver.num_cols(),
any => any
}
@doc """
Determines a Display Data RAM (DDRAM) address based on the display configuration (rows and columns)
and the zero-indexed cursor position (row and column).
## Examples
iex> LcdDisplay.HD44780.Util.determine_ddram_address({0,0}, %{rows: 2, cols: 16})
0
iex> LcdDisplay.HD44780.Util.determine_ddram_address({0,15}, %{rows: 2, cols: 16})
15
iex> LcdDisplay.HD44780.Util.determine_ddram_address({1,0}, %{rows: 2, cols: 16})
64
iex> LcdDisplay.HD44780.Util.determine_ddram_address({1,15}, %{rows: 2, cols: 16})
79
"""
@spec determine_ddram_address(row_col_pos, display_config) :: non_neg_integer
def determine_ddram_address({row_pos, col_pos} = _row_col_pos, %{rows: num_rows, cols: num_cols} = _display_config)
when is_number(num_rows) and is_number(num_cols) and
is_number(row_pos) and is_number(col_pos) and
num_rows >= 1 and num_rows >= 1 and
row_pos >= 0 and col_pos >= 0 do
col_pos = min(col_pos, num_cols - 1)
row_pos = min(row_pos, num_rows - 1)
num_cols
|> ddram_row_offsets()
|> elem(row_pos)
|> Kernel.+(col_pos)
end
@doc """
Determine a list of row offsets based on how many columns the display has.
```
0x00: | ROW 0 | ROW 2 |
0x40: | ROW 1 | ROW 3 |
```
For more info, please refer to [Hitachi HD44780 datasheet](https://cdn-shop.adafruit.com/datasheets/HD44780.pdf) page 10.
## Examples
iex> LcdDisplay.HD44780.Util.ddram_row_offsets(8)
{0, 64, 8, 72}
iex> LcdDisplay.HD44780.Util.ddram_row_offsets(16)
{0, 64, 16, 80}
iex> LcdDisplay.HD44780.Util.ddram_row_offsets(20)
{0, 64, 20, 84}
"""
@spec ddram_row_offsets(LcdDisplay.HD44780.Driver.num_cols()) :: {0, 64, pos_integer, pos_integer}
def ddram_row_offsets(num_cols) when is_number(num_cols) and num_cols >= 1 do
{
0x00,
0x40,
0x00 + num_cols,
0x40 + num_cols
}
end
@doc """
Adjusts the backlight-related values in the display driver state.
## Examples
# Default to the white LED when no color is specified.
iex> LcdDisplay.HD44780.Util.adjust_backlight_config(%{backlight: true, red: false, green: false, blue: false})
%{backlight: true, blue: true, green: true, red: true}
# Turn off all colors when the backlight is turned off.
iex> LcdDisplay.HD44780.Util.adjust_backlight_config(%{backlight: false, red: true, green: true, blue: true})
%{backlight: false, blue: false, green: false, red: false}
# Else do nothing
iex> LcdDisplay.HD44780.Util.adjust_backlight_config(%{backlight: true, red: true, green: false, blue: false})
%{backlight: true, blue: false, green: false, red: true}
"""
@spec adjust_backlight_config(map) :: map
def adjust_backlight_config(%{backlight: backlight, red: red, green: green, blue: blue} = display) do
display
|> Map.merge(
# Step 1: Default to the white LED when no color is specified.
if(!red && !green && !blue, do: %{red: true, green: true, blue: true}, else: %{})
)
|> Map.merge(
# Step 2: Turn off all colors when the backlight is turned off.
if(backlight, do: %{}, else: %{red: false, green: false, blue: false})
)
end
@doc """
Shuffles the RGB boolean values in the display driver state.
"""
@spec shuffle_color(map) :: map
def shuffle_color(display) do
display
|> Map.merge(
~w(red green blue)a
|> Enum.zip(
# Exclude white and none
[[true, false, false], [true, true, false]]
|> Enum.shuffle()
|> Enum.at(0)
|> Enum.shuffle()
)
|> Enum.into(%{})
)
end
end
|
lib/lcd_display/driver/hd44780_util.ex
| 0.887662
| 0.831964
|
hd44780_util.ex
|
starcoder
|
defmodule Keyword do
@moduledoc """
A keyword is a list of tuples where the first element
of the tuple is an atom and the second element can be
any value. The list is sorted by the first element of
each tuple.
A keyword may have duplicated keys, so it is not strictly
a dictionary. However most of the functions in this module
allows it to behave exactly as a dictionary. For example,
`Keyword.get` will get the first entry matching the given
key, regardless if duplicated entries exist. Similarly,
`Keyword.put` and `Keyword.delete` ensure all duplicated
entries for a given key are removed when invoked.
This module uses `==` as operator to check if two keys
are equal or not.
"""
@doc """
Creates a Keyword from enum. Differently from `Keyword.new`
that behaves as a dict, `Keyword.from_enum` do not remove
duplicated entries.
"""
def from_enum(enum) do
Enum.qsort(enum)
end
@doc """
Returns an empty keywords list, i.e. an empty list.
"""
def new do
[]
end
@doc """
Creates a Keyword from an enumerable. Similarly to dicts,
duplicated entries are removed, the latest one prevails.
## Examples
Keyword.new [{:b,1},{:a,2}]
#=> [a: 2, b: 1]
"""
def new(pairs) do
Enum.reduce pairs, [], fn {k, v}, keywords ->
put(keywords, k, v)
end
end
@doc """
Creates a Keyword from an enumerable with the
help of the transformation function. Duplicated
entries are removed, the latest one prevails.
## Examples
Keyword.new [:a, :b], fn x -> {x,x} end
#=> [a: :a, b: :b]
"""
def new(pairs, transform) do
Enum.reduce pairs, [], fn i, keywords ->
{ k, v } = transform.(i)
put(keywords, k, v)
end
end
@doc """
Gets the value for specific key.
If key not exist return default value (nil if no default value)
exists.
If duplicated entries exist, the first one is returned.
Use get_values/2 to retrieve all entries.
## Examples
Keyword.get [a: 1], :a #=> 1
Keyword.get [a: 1], :b #=> nil
Keyword.get [a: 1], :b, 3 #=> 3
"""
def get(keywords, key, default // nil)
def get([{k, _}|_], key, default) when key < k, do: default
def get([{k, _}|d], key, default) when key > k, do: get(d, key, default)
def get([{_, value}|_], _key, _default), do: value
def get([], _, default), do: default
@doc """
Gets the value for specific key. If key does not exist,
an error is raised.
## Examples
Keyword.get! [a: 1], :a #=> 1
Keyword.get! [a: 1], :b #=> raises KeyError[key: :b]
"""
def get!([{k, _}|_], key) when key < k, do: raise(Keyword.KeyError, key: key)
def get!([{k, _}|d], key) when key > k, do: get!(d, key)
def get!([{_, value}|_], _key), do: value
def get!([], key), do: raise(Keyword.KeyError, key: key)
@doc """
Gets all values for a specific key.
## Examples
Keyword.get_values [a: 1, a: 2], :a
#=> [1,2]
"""
def get_values([{k, _}|_], key) when key < k, do: []
def get_values([{k, _}|d], key) when key > k, do: get_values(d, key)
def get_values([{_, value}|d], key), do: [value|get_values(d, key)]
def get_values([], _), do: []
@doc """
Returns all keys from the keywords list. Duplicated
keys appear duplicated in the final list of keys.
## Examples
Keyword.keys [a: 1, b: 2] #=> [:a,:b]
"""
def keys(keywords) do
lc { key, _ } inlist keywords, do: key
end
@doc """
Returns all values.
## Examples
Keyword.values [a: 1, b: 2] #=> [1,2]
"""
def values(keywords) do
lc { _, value } inlist keywords, do: value
end
@doc """
Deletes all entries in the keywords list for a specific key.
If the key does not exist, returns the keywords list unchanged.
Use `delete_first` to delete just the first entry in case of
duplicated keys.
## Examples
Keyword.delete [a: 1, b: 2], :a #=> [b: 2]
Keyword.delete [b: 2], :a #=> [b: 2]
"""
def delete([{k, _}|_] = keywords, key) when key < k, do: keywords
def delete([{k, _} = e|tail], key) when key > k, do: [e|delete(tail, key)]
def delete([{_, _}|tail], key), do: delete(tail, key)
def delete([], _), do: []
@doc """
Sets the given `value` under `key`.
If a previous value is already stored, all entries are
removed and the value is overriden.
Use `put_other/3` to add a new value for an existing key
without removing previous ones.
## Examples
Keyword.put [a: 1, b: 2], :a, 3
#=> [a: 3, b: 2]
"""
def put([{k, _} = e|keywords], key, value) when key < k and is_atom(key) do
[{key, value},e|keywords]
end
def put([{k, _} = e|keywords], key, value) when key > k do
[e|put(keywords, key, value)]
end
def put([{key, _}|keywords], key, value) when is_atom(key) do
[{key, value}|delete(keywords, key)]
end
def put([], key, value) when is_atom(key) do
[{key, value}]
end
@doc """
Merges two keywords lists into one. If they have duplicated
entries, the one given as second argument wins.
## Examples
Keyword.merge [a: 1, b: 2], [a: 3, d: 4]
#=> [a:3, b:2, d: 4]
"""
def merge(d1, d2) do
merge(d1, d2, fn _k, _v1, v2 -> v2 end)
end
@doc """
Merges two keywords lists into one. If they have duplicated
entries, the given function is invoked to solve conflicts.
## Examples
Keyword.merge [a: 1, b: 2], [a: 3, d: 4], fn _k, v1, v2 ->
v1 + v2
end
#=> [a:4, b:2, d: 4]
"""
def merge([{k1, _} = e1|d1], [{k2, _} = e2|d2], fun) when k1 < k2 and is_atom(k1) do
[e1|merge(d1, [e2|d2], fun)]
end
def merge([{k1, _} = e1|d1], [{k2, _} = e2|d2], fun) when k1 > k2 and is_atom(k2) do
[e2|merge([e1|d1], d2, fun)]
end
def merge([{k1, v1}|d1], [{k1, v2}|d2], fun) do
[{k1, fun.(k1, v1, v2)}|merge(d1, d2, fun)]
end
def merge([], d2, _fun), do: d2
def merge(d1, [], _fun), do: d1
@doc """
Returns whether a given key exists in the given keywords.
### Examples
Keyword.key?([a: 1], :a)
#=> true
Keyword.key?([a: 1], :b)
#=> false
"""
def key?([{k, _}|_], key) when key < k, do: false
def key?([{k, _}|d], key) when key > k, do: key?(d, key)
def key?([{_, _}|_], _key), do: true
def key?([], _), do: false
end
|
lib/elixir/lib/keyword.ex
| 0.8835
| 0.691413
|
keyword.ex
|
starcoder
|
defmodule EWallet.Exchange do
@moduledoc """
Provides exchange functionalities.
"""
alias EWallet.Exchange.Calculation
alias EWalletDB.{ExchangePair, Token}
# The private types for calculation parameters
@typep non_neg_or_nil() :: non_neg_integer() | nil
@doc """
Retrieves the exchange rate of the given token pair, adjusted for the `subunit_to_unit`
differences of the two tokens and thus can be used directly on tokens with different
`subunit_to_unit` values.
Returns `{:ok, rate, pair}` if the exchange pair is found.
"""
@spec get_rate(from_token :: %Token{}, to_token :: %Token{}) ::
{:ok, Decimal.t(), %ExchangePair{}} | {:error, atom()}
def get_rate(from_token, to_token) do
case ExchangePair.fetch_exchangable_pair(from_token, to_token) do
{:ok, pair} ->
rate = Decimal.new(pair.rate)
subunit_scale = Decimal.div(to_token.subunit_to_unit, from_token.subunit_to_unit)
{:ok, Decimal.mult(rate, subunit_scale), pair}
{:error, _} = error ->
error
end
end
@doc """
Validates that the given `from_amount` and `to_amount` matches the exchange pair.
For same-token transactions, returns `true` if `from_amount` and `to_amount` are equal,
otherwise returns `false`.
For cross-token transactions, returns `true` if the amounts match the rate
of the exchange pair, otherwise returns `false`.
"""
@spec validate(
from_amount :: non_neg_or_nil() | Decimal.t(),
from_token :: %Token{},
to_amount :: non_neg_or_nil() | Decimal.t(),
to_token :: %Token{}
) :: {:ok, Calculation.t()} | {:error, atom()}
# Converts `from_amount` and `to_amount` to Decimal before operating on them
def validate(from_amount, from_token, to_amount, to_token) when is_number(from_amount) do
validate(Decimal.new(from_amount), from_token, to_amount, to_token)
end
def validate(from_amount, from_token, to_amount, to_token) when is_number(to_amount) do
validate(from_amount, from_token, Decimal.new(to_amount), to_token)
end
# Same-token: valid if `from_amount` and `to_amount` to be equal, error if not.
def validate(amount, %{uuid: uuid} = token, amount, %{uuid: uuid}) do
{:ok, build_result(amount, token, amount, token, Decimal.new(1), nil)}
end
def validate(from_amount, %{uuid: uuid}, to_amount, %{uuid: uuid}) do
{:error, :exchange_invalid_rate,
"expected the same 'from_amount' and 'to_amount' when given the same token, " <>
"got #{from_amount} and #{to_amount}"}
end
# Cross-token: valid if the amounts match the exchange rate.
def validate(from_amount, from_token, to_amount, to_token) do
with {:ok, rate, pair} <- get_rate(from_token, to_token),
expected_to_amount <- Decimal.mult(from_amount, rate),
{:ok, expected_to_amount} <- normalize(expected_to_amount),
true <-
Decimal.equal?(to_amount, expected_to_amount) ||
{:error, :exchange_invalid_rate, expected_to_amount} do
{:ok, build_result(from_amount, from_token, to_amount, to_token, rate, pair)}
else
{:error, :exchange_amounts_too_small, expected_to_amount} ->
{:error, :exchange_amounts_too_small,
"expected the 'from_amount' and 'to_amount' to be greater than zero, " <>
"got #{from_amount} and #{expected_to_amount}"}
{:error, :exchange_invalid_rate, expected_to_amount} ->
{:error, :exchange_invalid_rate,
"expected 'from_amount' to be #{from_amount} and 'to_amount' to be #{expected_to_amount}, " <>
"got #{from_amount} and #{to_amount}"}
{:error, _} = error ->
error
end
end
@doc """
Calculate the exchange transaction.
If `from_amount` is nil, the `from_amount` will be calculated from the given inputs.
If `to_amount` is nil, the `to_amount` will be calculated from the given inputs.
If both `from_amount` and `to_amount` are nil, `{:error, :invalid_parameter, description}`
will be returned.
If both `from_amount` and `to_amount` are given, `{:error, :invalid_parameter, description}`
error is returned.
"""
@spec calculate(
from_amount :: non_neg_or_nil() | Decimal.t(),
from_token :: %Token{},
fo_amount :: non_neg_or_nil() | Decimal.t(),
to_token :: %Token{}
) :: {:ok, Calculation.t()} | {:error, atom()} | {:error, atom(), String.t()}
# Returns an :invalid_parameter error if both `from_amount` and `to_amount` are missing
def calculate(nil, _, nil, _) do
{:error, :invalid_parameter, "an exchange requires from amount, to amount, or both"}
end
# Converts `from_amount` and `to_amount` to Decimal before operating on them
def calculate(from_amount, from_token, to_amount, to_token) when is_number(from_amount) do
calculate(Decimal.new(from_amount), from_token, to_amount, to_token)
end
def calculate(from_amount, from_token, to_amount, to_token) when is_number(to_amount) do
calculate(from_amount, from_token, Decimal.new(to_amount), to_token)
end
# Same-token: populates `from_amount` into `to_amount`
def calculate(nil, %{uuid: uuid} = token, to_amount, %{uuid: uuid}) do
{:ok, build_result(to_amount, token, to_amount, token, Decimal.new(1), nil)}
end
# Same-token: populates `to_amount` into `from_amount`
def calculate(from_amount, %{uuid: uuid} = token, nil, %{uuid: uuid}) do
{:ok, build_result(from_amount, token, from_amount, token, Decimal.new(1), nil)}
end
# Cross-token: calculates for the missing `from_amount`
def calculate(nil, from_token, to_amount, to_token) do
with {:ok, rate, pair} <- get_rate(from_token, to_token),
from_amount <- Decimal.div(to_amount, rate),
{:ok, from_amount} <- normalize(from_amount) do
{:ok, build_result(from_amount, from_token, to_amount, to_token, rate, pair)}
else
{:error, :exchange_amounts_too_small, from_amount} ->
{:error, :exchange_amounts_too_small,
"expected the 'from_amount' and 'to_amount' to be greater than zero, " <>
"got #{from_amount} and #{to_amount}"}
error ->
error
end
end
# Cross-token: calculates for the missing `to_amount`
def calculate(from_amount, from_token, nil, to_token) do
with {:ok, rate, pair} <- get_rate(from_token, to_token),
to_amount <- Decimal.mult(from_amount, rate),
{:ok, to_amount} <- normalize(to_amount) do
{:ok, build_result(from_amount, from_token, to_amount, to_token, rate, pair)}
else
{:error, :exchange_amounts_too_small, to_amount} ->
{:error, :exchange_amounts_too_small,
"expected the 'from_amount' and 'to_amount' to be greater than zero, " <>
"got #{from_amount} and #{to_amount}"}
error ->
error
end
end
# Returns an :invalid_parameter error if both `from_amount` and `to_amount` are provided
def calculate(_from_amount, _, _to_amount, _) do
{:error, :invalid_parameter, "unable to calculate if amounts are already provided"}
end
# Round the subunit amount to integer. Returns :error if the result is 0 or lower,
# the exchange amounts should never be 0 or less.
defp normalize(amount) do
rounded = Decimal.round(amount, 0)
zero = Decimal.new(0)
case greater_than(rounded, zero) do
true -> {:ok, rounded}
false -> {:error, :exchange_amounts_too_small, rounded}
end
end
defp greater_than(left, right), do: Decimal.compare(left, right) == Decimal.new(1)
defp build_result(from_amount, from_token, to_amount, to_token, rate, pair) do
%Calculation{
from_amount: Decimal.to_integer(from_amount),
from_token: from_token,
to_amount: Decimal.to_integer(to_amount),
to_token: to_token,
actual_rate: Decimal.to_float(rate),
pair: pair,
calculated_at: NaiveDateTime.utc_now()
}
end
end
|
apps/ewallet/lib/ewallet/exchange/exchange.ex
| 0.924125
| 0.662851
|
exchange.ex
|
starcoder
|
defmodule Saucexages.MediaInfoMeta do
@moduledoc false
require Saucexages.DataType
alias Saucexages.DataType
@enforce_keys [:media_type_id, :file_type, :data_type_id, :name]
defstruct [:media_type_id, :file_type, :data_type_id, :name, :t_info_1, :t_info_2, :t_info_3, :t_info_4, :t_flags, :t_info_s]
@type t :: %__MODULE__{
media_type_id: atom(),
file_type: non_neg_integer(),
data_type_id: DataType.data_type_id(),
name: String.t(),
t_info_1: non_neg_integer(),
t_info_2: non_neg_integer(),
t_info_3: non_neg_integer(),
t_info_4: non_neg_integer(),
t_flags: non_neg_integer(),
t_info_s: String.t(),
}
end
defmodule Saucexages.MediaInfo do
@moduledoc """
This module is used to provide facilities for working with type information described in the SAUCE spec. Typically, this consists at a minimum of a data type, possibly combined with a file type. The combination of the two together comprise the actual information type the SAUCE data is representing.
This module uses the concept of a `media_type_id` atom to abstract and simplify the underlying media types. This allows a simple, compact, and human-readable representation when working with SAUCE information. `media_type_id` can be further used to decode and understand any type dependent fields within a SAUCE record.
The type dependent fields include the following as describe by the SAUCE spec that may vary in meaning across media types:
* `t_info_1`
* `t_info_2`
* `t_info_3`
* `t_info_4`
* `t_flags`
* `t_info_s`
Additionally, this module provides a number of convenience wrappers and tools for interrogating SAUCE information, fields, and related info. If you are building any sort of UI, behavior, or module that relies on type-specific field information (ex: aspect ratio, number of lines, etc.) then you should carefully scan the functions available in this module as most typical use-cases are considered.
## Use-Cases
This module provides many functions to support the type dependent fields in SAUCE records.
Some basic use-cases include:
* Getting domain-specific names of SAUCE record values such as `character_width`, `pixel_height`, and `ansi_flags` among many others.
* Reading and writing fields based on type-specific information, for example fields that are required or specific to that type
* Extracting font, flags, colors, and other specific information, particularly in the case of character types such as ANSi art.
* Selecting the appropriate types based on actual file type/mime type/metadata
* Validating type information to avoid reading or writing invalid data for encoding/decoding SAUCE data
## Notes
There is a very nasty edge-case in which `file_type` may be used to store data. According to the spec, this is only the case for the data type - `binary text`. As such, this module wraps this edge-case while handling all other data types cleanly as possible. Note that due to this edge-case, it is not possible to assume the `media_type_id` is redundant with `file_type`. As such, the `file_type` data is fully represented as a separate field to avoid data truncation or corruption.
"""
require Saucexages.AnsiFlags
require Saucexages.DataType
require Saucexages.Font
alias Saucexages.{MediaInfoMeta, AnsiFlags, DataType, Font}
alias __MODULE__, as: MediaInfo
@enforce_keys [:file_type, :data_type]
@file_type_fields [:file_type, :data_type, :file_size, :t_info_1, :t_info_2, :t_info_3, :t_info_4, :t_flags, :t_info_s]
@type_specific_fields [:t_info_1, :t_info_2, :t_info_3, :t_info_4, :t_flags, :t_info_s]
defstruct [:file_type, :data_type, :t_info_s, t_info_1: 0, t_info_2: 0, t_info_3: 0, t_info_4: 0, t_flags: 0, file_size: 0]
@type file_type :: non_neg_integer()
@type type_handle :: {file_type(), DataType.data_type()}
@type media_type_id :: :none | :ascii | :ansi | :ansimation | :rip | :pcboard | :avatar | :html | :source | :tundra_draw | :gif | :pcx | :lmb_iff | :tga | :fli | :flc | :bmp | :gl | :dl | :wpg_bitmap | :png | :jpg | :mpg | :avi | :dxf | :dwg | :wpg_vector | :"3ds" | :mod | :"669" | :stm | :s3m | :mtm | :far | :ult | :amf | :dmf | :okt | :rol | :cmf | :mid | :sadt | :voc | :wav | :smp8 | :smp8s | :smp16 | :smp16s | :patch8 | :patch16 | :xm | :hsc | :it | :binary_text | :xbin | :zip | :arj | :lzh | :arc | :tar | :zoo | :rar | :uc2 | :pak | :sqz | :executable
@type t :: %__MODULE__{
file_type: file_type(),
data_type: non_neg_integer(),
file_size: non_neg_integer(),
t_info_1: non_neg_integer(),
t_info_2: non_neg_integer(),
t_info_3: non_neg_integer(),
t_info_4: non_neg_integer(),
t_flags: non_neg_integer(),
t_info_s: String.t(),
}
@file_type_mapping [
%MediaInfoMeta{
media_type_id: :none,
file_type: 0,
data_type_id: :none,
name: "Undefined"
},
%MediaInfoMeta{
media_type_id: :ascii,
file_type: 0,
data_type_id: :character,
name: "ASCII",
t_info_1: :character_width,
t_info_2: :number_of_lines,
t_flags: :ansi_flags,
t_info_s: :font_id
},
%MediaInfoMeta{
media_type_id: :ansi,
file_type: 1,
name: "ANSi",
data_type_id: :character,
t_info_1: :character_width,
t_info_2: :number_of_lines,
t_flags: :ansi_flags,
t_info_s: :font_id
},
%MediaInfoMeta{
media_type_id: :ansimation,
file_type: 2,
name: "ANSiMation",
data_type_id: :character,
t_info_1: :character_width,
t_info_2: :number_of_lines,
t_flags: :ansi_flags,
t_info_s: :font_id
},
%MediaInfoMeta{
media_type_id: :rip,
file_type: 3,
name: "RIP Script",
data_type_id: :character,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :number_of_colors
},
%MediaInfoMeta{
media_type_id: :pcboard,
file_type: 4,
name: "PC Board",
data_type_id: :character,
t_info_1: :character_width,
t_info_2: :number_of_lines,
},
%MediaInfoMeta{
media_type_id: :avatar,
file_type: 5,
name: "Avatar",
data_type_id: :character,
t_info_1: :character_width,
t_info_2: :number_of_lines,
},
%MediaInfoMeta{
media_type_id: :html,
file_type: 6,
name: "HTML",
data_type_id: :character,
},
%MediaInfoMeta{
media_type_id: :source,
file_type: 7,
name: "Source Code",
data_type_id: :character,
},
%MediaInfoMeta{
media_type_id: :tundra_draw,
file_type: 8,
name: "Tundra Draw",
data_type_id: :character,
t_info_1: :character_width,
t_info_2: :number_of_lines,
},
%MediaInfoMeta{
media_type_id: :gif,
file_type: 0,
name: "GIF",
data_type_id: :bitmap,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :pixel_depth,
},
%MediaInfoMeta{
media_type_id: :pcx,
file_type: 1,
name: "PCX",
data_type_id: :bitmap,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :pixel_depth,
},
%MediaInfoMeta{
media_type_id: :lmb_iff,
file_type: 2,
name: "LMB/IFF",
data_type_id: :bitmap,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :pixel_depth,
},
%MediaInfoMeta{
media_type_id: :tga,
file_type: 3,
name: "TGA",
data_type_id: :bitmap,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :pixel_depth,
},
%MediaInfoMeta{
media_type_id: :fli,
file_type: 4,
name: "FLI",
data_type_id: :bitmap,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :pixel_depth,
},
%MediaInfoMeta{
media_type_id: :flc,
file_type: 5,
name: "FLC",
data_type_id: :bitmap,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :pixel_depth,
},
%MediaInfoMeta{
media_type_id: :bmp,
file_type: 6,
name: "BMP",
data_type_id: :bitmap,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :pixel_depth,
},
%MediaInfoMeta{
media_type_id: :gl,
file_type: 7,
name: "GL",
data_type_id: :bitmap,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :pixel_depth,
},
%MediaInfoMeta{
media_type_id: :dl,
file_type: 8,
name: "DL",
data_type_id: :bitmap,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :pixel_depth,
},
%MediaInfoMeta{
media_type_id: :wpg_bitmap,
file_type: 9,
name: "WPG",
data_type_id: :bitmap,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :pixel_depth,
},
%MediaInfoMeta{
media_type_id: :png,
file_type: 10,
name: "PNG",
data_type_id: :bitmap,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :pixel_depth,
},
%MediaInfoMeta{
media_type_id: :jpg,
file_type: 11,
name: "JPG",
data_type_id: :bitmap,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :pixel_depth,
},
%MediaInfoMeta{
media_type_id: :mpg,
file_type: 12,
name: "MPG",
data_type_id: :bitmap,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :pixel_depth,
},
%MediaInfoMeta{
media_type_id: :avi,
file_type: 13,
name: "AVI",
data_type_id: :bitmap,
t_info_1: :pixel_width,
t_info_2: :pixel_height,
t_info_3: :pixel_depth,
},
%MediaInfoMeta{
media_type_id: :dxf,
file_type: 0,
name: "DXF",
data_type_id: :vector,
},
%MediaInfoMeta{
media_type_id: :dwg,
file_type: 1,
name: "DWG",
data_type_id: :vector,
},
%MediaInfoMeta{
media_type_id: :wpg_vector,
file_type: 2,
name: "WPG",
data_type_id: :vector,
},
%MediaInfoMeta{
media_type_id: :"3ds",
file_type: 3,
name: "3DS",
data_type_id: :vector,
},
%MediaInfoMeta{
media_type_id: :mod,
file_type: 0,
name: "MOD",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :"669",
file_type: 1,
name: "669",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :stm,
file_type: 2,
name: "STM",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :s3m,
file_type: 3,
name: "S3M",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :mtm,
file_type: 4,
name: "MTM",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :far,
file_type: 5,
name: "FAR",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :ult,
file_type: 6,
name: "ULT",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :amf,
file_type: 7,
name: "AMF",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :dmf,
file_type: 8,
name: "DMF",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :okt,
file_type: 9,
name: "OKT",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :rol,
file_type: 10,
name: "ROL",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :cmf,
file_type: 11,
name: "CMF",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :mid,
file_type: 12,
name: "MID",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :sadt,
file_type: 13,
name: "SADT",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :voc,
file_type: 14,
name: "VOC",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :wav,
file_type: 15,
name: "WAV",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :smp8,
file_type: 16,
name: "SMP8",
data_type_id: :audio,
t_info_1: :sample_rate,
},
%MediaInfoMeta{
media_type_id: :smp8s,
file_type: 17,
name: "SMP8S",
data_type_id: :audio,
t_info_1: :sample_rate,
},
%MediaInfoMeta{
media_type_id: :smp16,
file_type: 18,
name: "SMP16",
data_type_id: :audio,
t_info_1: :sample_rate,
},
%MediaInfoMeta{
media_type_id: :smp16s,
file_type: 19,
name: "SMP16S",
data_type_id: :audio,
t_info_1: :sample_rate,
},
%MediaInfoMeta{
media_type_id: :patch8,
file_type: 20,
name: "PATCH8",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :patch16,
file_type: 21,
name: "PATCH16",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :xm,
file_type: 22,
name: "XM",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :hsc,
file_type: 23,
name: "HSC",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :it,
file_type: 24,
name: "IT",
data_type_id: :audio,
},
%MediaInfoMeta{
media_type_id: :binary_text,
file_type: 0,
# file_type: nil,
name: "Binary Text",
data_type_id: :binary_text,
t_flags: :ansi_flags,
t_info_s: :font_id
},
%MediaInfoMeta{
media_type_id: :xbin,
file_type: 0,
name: "XBIN",
data_type_id: :xbin,
t_info_1: :character_width,
t_info_2: :number_of_lines,
},
%MediaInfoMeta{
media_type_id: :zip,
file_type: 0,
name: "ZIP",
data_type_id: :archive,
},
%MediaInfoMeta{
media_type_id: :arj,
file_type: 1,
name: "ARJ",
data_type_id: :archive,
},
%MediaInfoMeta{
media_type_id: :lzh,
file_type: 2,
name: "LZH",
data_type_id: :archive,
},
%MediaInfoMeta{
media_type_id: :arc,
file_type: 3,
name: "ARC",
data_type_id: :archive,
},
%MediaInfoMeta{
media_type_id: :tar,
file_type: 4,
name: "TAR",
data_type_id: :archive,
},
%MediaInfoMeta{
media_type_id: :zoo,
file_type: 5,
name: "ZOO",
data_type_id: :archive,
},
%MediaInfoMeta{
media_type_id: :rar,
file_type: 6,
name: "RAR",
data_type_id: :archive,
},
%MediaInfoMeta{
media_type_id: :uc2,
file_type: 7,
name: "UC2",
data_type_id: :archive,
},
%MediaInfoMeta{
media_type_id: :pak,
file_type: 8,
name: "PAK",
data_type_id: :archive,
},
%MediaInfoMeta{
media_type_id: :sqz,
file_type: 9,
name: "sqz",
data_type_id: :archive,
},
%MediaInfoMeta{
media_type_id: :executable,
file_type: 0,
name: "Executable",
data_type_id: :executable,
},
]
defguardp is_binary_text(file_type, data_type) when (data_type == 5 or data_type == :binary_text) and is_integer(file_type) and file_type >= 0
@doc """
Creates a new MediaInfo based on the given file_type and data_type.
"""
@spec new(file_type(), DataType.data_type(), Enum.t()) :: t()
def new(file_type, data_type, opts \\ []) do
struct(%__MODULE__{file_type: file_type, data_type: data_type}, opts)
end
@doc """
Lists all meta information about file types.
Useful for building interfaces, specialized parsing, and dynamic access.
"""
@spec media_meta() :: [MediaInfoMeta.t()]
defmacro media_meta() do
@file_type_mapping
|> Macro.escape()
end
@doc """
Lists all the meta information about the given media_type_id.
## Examples
iex> Saucexages.MediaInfo.media_meta_by(:ansi)
%Saucexages.MediaInfoMeta{
data_type_id: :character,
file_type: 1,
media_type_id: :ansi,
name: "ANSi",
t_flags: :ansi_flags,
t_info_1: :character_width,
t_info_2: :number_of_lines,
t_info_3: nil,
t_info_4: nil,
t_info_s: :font_id
}
"""
@spec media_meta_by(media_type_id()) :: MediaInfoMeta.t() | nil
def media_meta_by(media_type_id)
for %{media_type_id: media_type_id} = mapping <- @file_type_mapping do
def media_meta_by(unquote(media_type_id)) do
unquote(
mapping
|> Macro.escape()
)
end
end
def media_meta_by(_media_type_id) do
nil
end
@doc """
Lists all known file types.
"""
@spec media_type_ids() :: [media_type_id()]
defmacro media_type_ids() do
Enum.map(@file_type_mapping, fn (%{media_type_id: media_type_id}) -> media_type_id end)
end
@doc """
Lists all known file type ids per the given `data_type_id` or `data_type`.
## Examples
iex> Saucexages.MediaInfo.media_type_ids_for(:character)
[:ascii, :ansi, :ansimation, :rip, :pcboard, :avatar, :html, :source, :tundra_draw]
iex> Saucexages.MediaInfo.media_type_ids_for(1)
[:ascii, :ansi, :ansimation, :rip, :pcboard, :avatar, :html, :source, :tundra_draw]
"""
@spec media_type_ids_for(DataType.data_type() | DataType.data_type_id()) :: [media_type_id()]
defmacro media_type_ids_for(data_type)
for %{data_type_id: data_type_id, data_type: data_type} <- DataType.data_type_meta() do
defmacro media_type_ids_for(unquote(data_type_id)) do
media_type_ids_by(unquote(data_type_id))
end
defmacro media_type_ids_for(unquote(data_type)) do
DataType.data_type_id(unquote(data_type))
|> media_type_ids_by()
end
end
defmacro media_type_ids_for(_data_type) do
[]
end
defp media_type_ids_by(data_type_id) do
Enum.flat_map(
@file_type_mapping,
fn (mapping) ->
case mapping do
%{media_type_id: nil} -> []
%{data_type_id: ^data_type_id, media_type_id: media_type_id} -> [media_type_id]
_ -> []
end
end
)
end
@doc """
Lists all known file type ids per the given `data_type_id` or `data_type`.
For file types that don't exist for a given data type, ex: `binary_text`, an empty list will be returned.
## Examples
iex> Saucexages.MediaInfo.file_types_for(:character)
[0, 1, 2, 3, 4, 5, 6, 7, 8]
iex> Saucexages.MediaInfo.file_types_for(1)
[0, 1, 2, 3, 4, 5, 6, 7, 8]
iex> Saucexages.MediaInfo.file_types_for(:binary_text)
[]
"""
@spec file_types_for(DataType.data_type() | DataType.data_type_id()) :: [file_type()] | []
defmacro file_types_for(data_type)
for %{data_type_id: data_type_id, data_type: data_type} <- DataType.data_type_meta() do
defmacro file_types_for(unquote(data_type_id)) do
file_types_from(unquote(data_type_id))
end
defmacro file_types_for(unquote(data_type)) do
DataType.data_type_id(unquote(data_type))
|> file_types_from()
end
end
defmacro file_types_for(_data_type) do
[]
end
defp file_types_from(data_type_id) when is_atom(data_type_id) do
Enum.flat_map(
@file_type_mapping,
fn (mapping) ->
case mapping do
%{file_type: nil} -> []
%{media_type_id: :binary_text} -> []
%{data_type_id: ^data_type_id, file_type: file_type} -> [file_type]
_ -> []
end
end
)
end
@doc """
Extracts the `data_type_id` associated for a known `media_type_id`.
If the file type or data type is unknown, :none is returned.
## Examples
iex> Saucexages.MediaInfo.data_type_id(:ansi)
:character
iex> Saucexages.MediaInfo.data_type_id(:mod)
:audio
iex> Saucexages.MediaInfo.data_type_id(:fried_chicken)
:none
"""
@spec data_type_id(media_type_id()) :: DataType.data_type_id()
def data_type_id(media_type_id)
for %{media_type_id: media_type_id, data_type_id: data_type_id} <- @file_type_mapping do
def data_type_id(unquote(media_type_id)) do
unquote(data_type_id)
end
end
def data_type_id(_media_type_id) do
:none
end
@doc """
Extracts the integer data type from a given `media_type_id`.
## Examples
iex> Saucexages.MediaInfo.data_type(:ansi)
1
iex> Saucexages.MediaInfo.data_type(:gif)
2
iex> Saucexages.MediaInfo.data_type(:chicken_salad)
0
"""
@spec data_type(media_type_id()) :: DataType.data_type()
def data_type(media_type_id) do
data_type_id(media_type_id)
|> DataType.data_type()
end
@doc """
Lists all dynamic file type fields that can be found in a SAUCE record.
The meaning of each of these fields varies by `media_type_id`, and therefore by the combination of `data_type` and usually `file_type`.
Useful for building interfaces, specialized parsing, and dynamic access.
## Examples
iex> Saucexages.MediaInfo.type_fields()
[:t_info_1, :t_info_2, :t_info_3, :t_info_4, :t_flags, :t_info_s]
"""
@spec type_fields() :: [atom()]
defmacro type_fields() do
@type_specific_fields
end
@doc """
Lists all dynamic file type fields that can be found in a SAUCE record for the given `media_type_id`.
Useful for building interfaces, specialized parsing, and dynamic access.
If you need the mapping between SAUCE fields and what they mean for a file type, see `type_field_mapping/1`.
If you need the names of each field specific to the given file type, see `type_field_names/1`.
## Examples
iex> Saucexages.MediaInfo.type_fields(:ansi)
[:t_flags, :t_info_1, :t_info_2, :t_info_s]
iex> Saucexages.MediaInfo.type_fields(:gif)
[:t_info_1, :t_info_2, :t_info_3]
"""
@spec type_fields(media_type_id()) :: [atom()]
defmacro type_fields(media_type_id)
for %{media_type_id: media_type_id} = mapping <- @file_type_mapping do
defmacro type_fields(unquote(media_type_id)) do
unquote(
mapping
|> Macro.escape()
)
|> Map.take(@type_specific_fields)
|> Enum.flat_map(
fn {k, v} ->
case v do
nil -> []
v when is_atom(v) -> [k]
_ -> []
end
end
)
end
end
defmacro type_fields(media_type_id) when is_atom(media_type_id) do
[]
end
@doc """
Lists all dynamic file type fields that can be found in a SAUCE record for the given `file_type` and `data_type`.
Useful for building interfaces, specialized parsing, and dynamic access.
If you need the mapping between SAUCE fields and what they mean for a file type, see `type_field_map/1`.
If you need the names of each field specific to the given file type, see `type_field_names/1`.
## Examples
iex> Saucexages.MediaInfo.type_fields(1, 1)
[:t_flags, :t_info_1, :t_info_2, :t_info_s]
iex> Saucexages.MediaInfo.type_fields(0, 2)
[:t_info_1, :t_info_2, :t_info_3]
"""
@spec type_fields(file_type(), DataType.data_type()) :: [atom()]
defmacro type_fields(file_type, data_type)
for %{media_type_id: media_type_id, file_type: file_type, data_type_id: data_type_id} <- @file_type_mapping do
data_type = DataType.data_type(data_type_id)
defmacro type_fields(unquote(file_type), unquote(data_type)) do
type_fields(unquote(media_type_id))
end
end
defmacro type_fields(file_type, data_type) when is_binary_text(file_type, data_type)do
type_fields(:binary_text)
end
defmacro type_fields(_file_type, _data_type) do
[]
end
@doc """
Returns the `media_type_id` associated with the given `file_type` integer value and `data_type_id` or `data_type`.
## Examples
iex> Saucexages.MediaInfo.new(1, 1) |> Saucexages.MediaInfo.media_type_id()
:ansi
iex> Saucexages.MediaInfo.new(10, 2) |> Saucexages.MediaInfo.media_type_id()
:png
iex> Saucexages.MediaInfo.new(27, :binary_text) |> Saucexages.MediaInfo.media_type_id()
:binary_text
"""
@spec media_type_id(t()) :: media_type_id()
def media_type_id(%{file_type: file_type, data_type: data_type}) do
media_type_id(file_type, data_type)
end
@doc """
Returns the `media_type_id` associated with the given `file_type` integer value and `data_type_id` or `data_type`.
## Examples
iex> Saucexages.MediaInfo.media_type_id(1, :character)
:ansi
iex> Saucexages.MediaInfo.media_type_id(10, :bitmap)
:png
iex> Saucexages.MediaInfo.media_type_id(10, 2)
:png
"""
@spec media_type_id(file_type, DataType.data_type_id() | DataType.data_type()) :: media_type_id()
def media_type_id(file_type, data_type_id)
for %{media_type_id: media_type_id, file_type: file_type, data_type_id: data_type_id} <- @file_type_mapping, !is_nil(file_type) do
data_type = DataType.data_type(data_type_id)
def media_type_id(unquote(file_type), unquote(data_type_id)) do
unquote(media_type_id)
end
def media_type_id(unquote(file_type), unquote(data_type)) do
unquote(media_type_id)
end
end
def media_type_id(file_type, data_type) when is_binary_text(file_type, data_type) do
:binary_text
end
def media_type_id(_file_type, _data_type_id) do
:none
end
@doc """
Returns the `file_type` associated with the given `media_type_id`.
## Examples
iex> Saucexages.MediaInfo.file_type(:ansi)
1
iex> Saucexages.MediaInfo.file_type(:png)
10
"""
@spec file_type(media_type_id()) :: file_type()
def file_type(media_type_id)
for %{media_type_id: media_type_id, file_type: file_type} <- @file_type_mapping do
def file_type(unquote(media_type_id)) do
unquote(file_type)
end
end
def file_type(media_type_id) when is_atom(media_type_id) do
0
end
@doc """
Returns a tuple of `file_type` and `data_type` associated with the given `media_type_id` to be used with SAUCE data directly, for example writing a SAUCE.
## Examples
iex> Saucexages.MediaInfo.type_handle(:ansi)
{1, 1}
iex> Saucexages.MediaInfo.type_handle(:png)
{10, 2}
"""
@spec type_handle(media_type_id) :: type_handle()
def type_handle(media_type_id)
for %{media_type_id: media_type_id, file_type: file_type, data_type_id: data_type_id} <- @file_type_mapping do
def type_handle(unquote(media_type_id)) do
{unquote(file_type), DataType.data_type(unquote(data_type_id))}
end
end
def type_handle(_media_type_id) do
{0, 0}
end
@doc """
Returns a map consisting of only basic info about the `media_type_id` associated with the given `file_type` map.
If the file type is unknown, :undefined is returned.
The following keys are always returned:
* `media_type_id` - The unique identifier for a file type.
* `data_type_id` - The unique identifier for a data type.
* `name` - The friendly name for a file type.
## Examples
iex> Saucexages.MediaInfo.new(1, 1) |> Saucexages.MediaInfo..basic_info()
%{data_type_id: :character, media_type_id: :ansi, name: "ANSi"}
iex> Saucexages.MediaInfo.new(10, 2) |> Saucexages.MediaInfo.basic_info()
%{data_type_id: :bitmap, media_type_id: :png, name: "PNG"}
iex> Saucexages.MediaInfo.new(10, :binary_text) |> Saucexages.MediaInfo.basic_info()
%{data_type_id: :binary_text, media_type_id: :binary_text, name: "Binary Text"}
iex> Saucexages.MediaInfo.new(1, 10) |> Saucexages.MediaInfo.basic_info()
%{data_type_id: :none, media_type_id: :none, name: "Undefined"}
"""
@spec basic_info(t()) :: map()
def basic_info(file_type) when is_map(file_type) do
media_type_id(file_type)
|> basic_info()
end
@doc """
Returns a map consisting of only basic info about the given `media_type_id`.
The following keys are always returned:
* `media_type_id` - The unique identifier for a file type.
* `data_type_id` - The unique identifier for a data type.
* `name` - The friendly name for a file type.
## Examples
iex> Saucexages.MediaInfo.basic_info(:ansi)
%{data_type_id: :character, media_type_id: :ansi, name: "ANSi"}
iex> Saucexages.MediaInfo.basic_info(:png)
%{data_type_id: :bitmap, media_type_id: :png, name: "PNG"}
iex> Saucexages.MediaInfo.basic_info(:binary_text)
%{data_type_id: :binary_text, media_type_id: :binary_text, name: "Binary Text"}
"""
@spec basic_info(media_type_id()) :: map()
def basic_info(media_type_id)
for %{media_type_id: media_type_id, data_type_id: data_type_id, name: name} <- @file_type_mapping do
def basic_info(unquote(media_type_id)) do
%{
media_type_id: unquote(media_type_id),
data_type_id: unquote(data_type_id),
name: unquote(name),
}
end
end
def basic_info(media_type_id) when is_atom(media_type_id) do
basic_info(:none)
end
@doc """
Returns a detailed map of any media info that can be converted per-file type, along with basic file type information.
Useful for editors or specialized processing.
Any type-specific fields that have special meaning will be converted accordingly. Type-specific fields without meaning will be left `as is` to account for cases when developers, apps, users, etc. have added this data contrary to the SAUCE spec.
## Examples
iex> Saucexages.MediaInfo.details(%Saucexages.MediaInfo{file_type: 1, data_type: 1, t_flags: 17, t_info_1: 80, t_info_2: 250, t_info_s: "IBM VGA"})
%{
ansi_flags: %Saucexages.AnsiFlags{
aspect_ratio: :modern,
letter_spacing: :none,
non_blink_mode?: true
},
character_width: 80,
data_type: 1,
data_type_id: :character,
file_size: 0,
file_type: 1,
media_type_id: :ansi,
font_id: :ibm_vga,
name: "ANSi",
number_of_lines: 250,
t_info_3: 0,
t_info_4: 0
}
"""
@spec details(t()) :: map()
def details(media_info) when is_map(media_info) do
# We could use a macro to do all this, but it's nicer to be slightly explicit as we may want additional functions called in this chain anyway
with base_map when is_map(base_map) <- basic_info(media_info) do
do_read_fields(media_info, base_map)
else
_ -> nil
end
end
@doc """
Returns a detailed map of any media info that can be converted per-file type. Only the detailed information is returned.
Useful for editors or specialized processing.
## Examples
iex> Saucexages.MediaInfo.media_details(%Saucexages.MediaInfo{file_type: 1, data_type: 1, t_flags: 17, t_info_1: 80, t_info_2: 250, t_info_s: "IBM VGA"})
%{
ansi_flags: %Saucexages.AnsiFlags{
aspect_ratio: :modern,
letter_spacing: :none,
non_blink_mode?: true
},
character_width: 80,
data_type: 1,
file_size: 0,
file_type: 1,
font_id: :ibm_vga,
number_of_lines: 250,
t_info_3: 0,
t_info_4: 0
}
"""
@spec media_details(t()) :: map()
def media_details(media_info) when is_map(media_info) do
do_read_fields(media_info, %{})
end
def media_details(_media_info) do
nil
end
defp do_read_fields(media_info, file_type_info) do
read_fields(media_info, @file_type_fields)
|> Enum.into(file_type_info)
end
@doc """
Returns a keyword list of all the type dependent fields for a given file type.
Useful for building interfaces, specialized parsing, and dynamic access.
## Examples
iex> Saucexages.MediaInfo.type_field_mapping(:ansi)
[
t_flags: :ansi_flags,
t_info_1: :character_width,
t_info_2: :number_of_lines,
t_info_s: :font_id
]
"""
@spec type_field_mapping(media_type_id()) :: Enum.t()
def type_field_mapping(media_type_id)
for %{media_type_id: media_type_id} = mapping <- @file_type_mapping do
def type_field_mapping(unquote(media_type_id)) do
unquote(
mapping
|> Macro.escape()
)
|> Map.take(@type_specific_fields)
|> Enum.reject(fn ({_k, v}) -> is_nil(v) end)
end
end
def type_field_mapping(_media_type_id) do
[]
end
@doc """
Returns a list of all field names used by a given file type.
Useful for building interfaces, specialized parsing, and dynamic access.
## Examples
iex> Saucexages.MediaInfo.type_field_names(:ansi)
[:ansi_flags, :character_width, :number_of_lines, :font_id]
iex> Saucexages.MediaInfo.type_field_names(:gif)
[:pixel_width, :pixel_height, :pixel_depth]
"""
@spec type_field_names(media_type_id()) :: [atom()]
def type_field_names(media_type_id)
for %{media_type_id: media_type_id} = mapping <- @file_type_mapping do
def type_field_names(unquote(media_type_id)) do
unquote(
mapping
|> Macro.escape()
)
|> Map.take(@type_specific_fields)
|> Enum.flat_map(
fn ({_k, v}) ->
case v do
nil -> []
v when is_atom(v) -> [v]
_ -> []
end
end
)
end
end
def type_field_names(media_type_id) when is_atom(media_type_id) do
[]
end
@doc """
Returns the field name associated with a given file type and file type field.
## Examples
iex> Saucexages.MediaInfo.field_type(:ansi, :t_info_s)
:font_id
iex> Saucexages.MediaInfo.field_type(:ansi, :t_flags)
:ansi_flags
"""
@spec field_type(media_type_id(), atom()) :: atom()
def field_type(media_type_id, field_type_id)
for %{media_type_id: media_type_id} = mapping <- @file_type_mapping do
def field_type(unquote(media_type_id), field_type_id) do
case unquote(
mapping
|> Macro.escape()
) do
%{^field_type_id => field_type_value} -> field_type_value
_ -> nil
end
end
end
def field_type(_media_type_id, _field_type_id) do
nil
end
@doc """
Reads data dynamically for a given file info map based on its meta information and converts the data if possible.
Returns a 2-tuple containing the new field id (if any) as the first element, and the field data (if any) as the second element.
If the value does not exist in the map, you may pass a `default_value` to be used in these cases.
Optionally, a 3-arity conversion function that takes the same parameters, `media_type_id`, `field_type_id`, and `value` may be passed for custom conversions.
## Examples
iex> Saucexages.MediaInfo.read_media_field(%{file_type: 1, data_type: 1, t_flags: 17}, :t_flags)
{:ansi_flags,
%Saucexages.AnsiFlags{
aspect_ratio: :modern,
letter_spacing: :none,
non_blink_mode?: true
}}
iex> Saucexages.MediaInfo.read_media_field(%{file_type: 1, data_type: 1, t_flags: 17, t_info_1: 80}, :t_info_2, 22)
{:number_of_lines, 22}
"""
@spec read_media_field(t(), atom(), term(), function()) :: {atom(), term()}
def read_media_field(media_info, field_type_id, default_value \\ nil, conversion_fn \\ &read_field_value/3)
def read_media_field(%{file_type: _file_type, data_type: _data_type} = media_info, field_type_id, default_value, conversion_fn) do
media_type_id = media_type_id(media_info)
with %{^field_type_id => value} <- media_info do
read_field(media_type_id, field_type_id, value, conversion_fn)
else
_ -> read_field(media_type_id, field_type_id, default_value, conversion_fn)
end
end
def read_media_field(_media_info, field_type_id, default_value, _conversion_fn) do
{field_type_id, default_value}
end
@doc """
Reads the fields given in the `field_ids` list dynamically and returns data based on file type information, converting data when possible. The field ids may be any field that is part of a MediaInfo. A new map will be returned with any valid fields converted accordingly.
Unknown field ids will be ignored.
Optionally, a 3-arity conversion function that takes the same parameters, `media_type_id`, `field_type_id`, and `value` may be passed for custom conversions.
## Examples
iex> Saucexages.MediaInfo.read_fields(%Saucexages.MediaInfo{file_type: 1, data_type: 1, t_flags: 17, t_info_1: 80, t_info_2: 250, t_info_s: "IBM VGA"}, [:t_info_s, :t_info_1, :t_info_2])
%{character_width: 80, font_id: :ibm_vga, number_of_lines: 250}
iex> Saucexages.MediaInfo.read_fields(%Saucexages.MediaInfo{file_type: 1, data_type: 1, t_flags: 17, t_info_1: 80, t_info_2: 250, t_info_s: "IBM VGA"}, [:t_info_s, :cheese, :data_type])
%{font_id: :ibm_vga, data_type: 1}
"""
@spec read_fields(t(), [atom()]) :: map()
def read_fields(media_info, field_ids, conversion_fn \\ &read_field_value/3)
def read_fields(media_info, field_ids, conversion_fn) do
media_type_id = media_type_id(media_info)
Enum.reduce(
field_ids,
%{},
fn (field_id, acc) ->
with %{^field_id => value} <- media_info,
{k, v} <- read_field(media_type_id, field_id, value, conversion_fn) do
Map.put(acc, k, v)
else
_ -> acc
end
end
)
end
@doc """
Reads data dynamically for a given file type based on its meta information and converts the data if possible.
Returns a 2-tuple containing the new field id (if any) as the first element, and the field data (if any) as the second element.
Optionally, a 3-arity conversion function that takes the same parameters, `media_type_id`, `field_type_id`, and `value` may be passed for custom conversions.
## Examples
iex> Saucexages.MediaInfo.read_field(:ansi, :t_flags, 17)
{:ansi_flags, %Saucexages.AnsiFlags{aspect_ratio: :modern, letter_spacing: :none, non_blink_mode?: true}}
iex> Saucexages.MediaInfo.read_field(:png, :t_info_1, 640)
{:pixel_width, 640}
"""
@spec read_field(media_type_id(), atom(), term(), function()) :: {atom(), term()}
def read_field(media_type_id, field_type_id, value, conversion_fn \\ &read_field_value/3)
def read_field(media_type_id, :file_type, value, conversion_fn) when is_function(conversion_fn) do
{:file_type, conversion_fn.(media_type_id, :file_type, value)}
end
for %{media_type_id: media_type_id} = mapping <- @file_type_mapping do
def read_field(unquote(media_type_id), field_type_id, value, conversion_fn) when is_function(conversion_fn) do
case unquote(
mapping
|> Macro.escape()
) do
%{^field_type_id => nil} -> {field_type_id, value}
%{^field_type_id => field_type_value} ->
{field_type_value, conversion_fn.(unquote(media_type_id), field_type_value, value)}
_ ->
{field_type_id, value}
end
end
end
def read_field(_media_type_id, field_type, value, _conversion_fn) do
{field_type, value}
end
defp read_field_value(_file_type, :ansi_flags, value) do
AnsiFlags.ansi_flags(value)
end
defp read_field_value(_file_type, :font_id, value) do
Font.font_id(value)
end
defp read_field_value(_file_type, _field_type_value, value) do
value
end
@doc """
Returns a mapped version of the t_info_1 field as a tuple containing the field type as the first element and the field value as the second element.
"""
@spec t_info_1(t()) :: {atom, term()}
def t_info_1(%MediaInfo{} = media_info) do
read_media_field(media_info, :t_info_1, 0)
end
@doc """
Returns a mapped version of the t_info_1 field as a tuple containing the field type as the first element and the field value as the second element.
"""
@spec t_info_1(media_type_id(), term()) :: {atom, term()}
def t_info_1(media_type_id, value) do
read_field(media_type_id, :t_info_1, value)
end
@doc """
Returns a mapped version of the t_info_2 field as a tuple containing the field type as the first element and the field value as the second element.
"""
@spec t_info_2(t()) :: {atom, term()}
def t_info_2(%MediaInfo{} = media_info) do
read_media_field(media_info, :t_info_2, 0)
end
@doc """
Returns a mapped version of the t_info_2 field as a tuple containing the field type as the first element and the field value as the second element.
"""
@spec t_info_2(media_type_id(), term()) :: {atom, term()}
def t_info_2(media_type_id, value) do
read_field(media_type_id, :t_info_2, value)
end
@doc """
Returns a mapped version of the t_info_3 field as a tuple containing the field type as the first element and the field value as the second element.
"""
@spec t_info_3(t()) :: {atom, term()}
def t_info_3(%MediaInfo{} = media_info) do
read_media_field(media_info, :t_info_3, 0)
end
@doc """
Returns a mapped version of the t_info_3 field as a tuple containing the field type as the first element and the field value as the second element.
"""
@spec t_info_3(media_type_id(), term()) :: {atom, term()}
def t_info_3(media_type_id, value) do
read_field(media_type_id, :t_info_3, value)
end
@doc """
Returns a mapped version of the t_info_4 field as a tuple containing the field type as the first element and the field value as the second element.
"""
@spec t_info_4(t()) :: {atom, term()}
def t_info_4(%MediaInfo{} = media_info) do
read_media_field(media_info, :t_info_4, 0)
end
@doc """
Returns a mapped version of the t_info_4 field as a tuple containing the field type as the first element and the field value as the second element.
"""
@spec t_info_4(media_type_id(), term()) :: {atom, term()}
def t_info_4(media_type_id, value) do
read_field(media_type_id, :t_info_4, value)
end
@doc """
Returns a mapped version of the t_flags field as a tuple containing the field type as the first element and the field value as the second element.
"""
@spec t_flags(t()) :: {atom, term()}
def t_flags(%MediaInfo{} = media_info) do
read_media_field(media_info, :t_flags, 0)
end
@doc """
Returns a mapped version of the t_flags field as a tuple containing the field type as the first element and the field value as the second element.
"""
@spec t_flags(media_type_id(), term()) :: {atom, term()}
def t_flags(media_type_id, value) do
read_field(media_type_id, :t_flags, value)
end
@doc """
Returns a mapped version of the t_info_s field as a tuple containing the field type as the first element and the field value as the second element.
"""
@spec t_info_s(t()) :: {atom, term()}
def t_info_s(%MediaInfo{} = media_info) do
read_media_field(media_info, :t_info_s, nil)
end
@doc """
Returns a mapped version of the t_info_s field as a tuple containing the field type as the first element and the field value as the second element.
"""
@spec t_info_s(media_type_id(), term()) :: {atom, term()}
def t_info_s(media_type_id, value) do
read_field(media_type_id, :t_info_s, value)
end
@doc """
Checks if the combination of 'file_type' and 'data_type' corresponds to a valid, known file type.
## Examples
iex> Saucexages.MediaInfo.new(1, 1) |> Saucexages.MediaInfo.media_type_id?()
true
iex> Saucexages.MediaInfo.new(999, 5) |> Saucexages.MediaInfo.media_type_id?()
true
iex> Saucexages.MediaInfo.new(999, 1) |> Saucexages.MediaInfo.media_type_id?()
false
"""
@spec media_type_id?(t()) :: boolean()
def media_type_id?(%MediaInfo{file_type: file_type, data_type: data_type} = _media_info) do
media_type_id?(file_type, data_type)
end
def media_type_id?(_media_info) do
false
end
@doc """
Checks if the combination of 'file_type' and 'data_type' corresponds to a valid, known file type.
## Examples
iex> Saucexages.MediaInfo.media_type_id?(1, 1)
true
iex> Saucexages.MediaInfo.media_type_id?(999, 999)
false
iex> Saucexages.MediaInfo.media_type_id?(999, 5)
true
iex> Saucexages.MediaInfo.media_type_id?(-1, 5)
false
"""
@spec media_type_id?(file_type(), DataType.data_type()) :: boolean()
def media_type_id?(file_type, data_type)
for %{file_type: file_type, data_type_id: data_type_id} <- @file_type_mapping, !is_nil(file_type) do
data_type = DataType.data_type(data_type_id)
def media_type_id?(unquote(file_type), unquote(data_type)) do
true
end
end
def media_type_id?(file_type, data_type) when is_binary_text(file_type, data_type) do
#binary text
true
end
def media_type_id?(_file_type, _data_type) do
false
end
end
|
lib/saucexages/media_info.ex
| 0.799364
| 0.735796
|
media_info.ex
|
starcoder
|
defmodule ApiWeb.LegacyStops do
@moduledoc """
Enables backwards compatibility for changes to stop IDs and "splitting" of stops, e.g. from a
station having one non-platform-specific child stop to having a child stop for each platform.
Mappings from old to new stop IDs are expressed as:
%{"2020-01-01" => %{"old_id" => {"new_id", ["new_1", "new_2"]}}}
In this example, stop `old_id` was renamed to `new_id` and two sibling stops were added (if no
rename or additions occurred, these elements would be `nil` and `[]` respectively). Requests
using API versions earlier than 2020-01-01 that use `old_id` in a stop ID filter should behave
as though `new_id`, `new_1`, and `new_2` were *also* specified. Renames are distinguished from
additions so that only renames can be considered when a request is for a single stop.
An "old" ID may appear in multiple versions. Chained mappings where one version's "new" ID is a
later version's "old" ID are also possible.
"""
alias Model.Stop
@mappings %{
"2018-07-23" => %{
"Back Bay" =>
{nil, ["Back Bay-01", "Back Bay-02", "Back Bay-03", "Back Bay-05", "Back Bay-07"]},
"North Station" =>
{nil,
[
"North Station-01",
"North Station-02",
"North Station-03",
"North Station-04",
"North Station-05",
"North Station-06",
"North Station-07",
"North Station-08",
"North Station-09",
"North Station-10"
]},
"South Station" =>
{nil,
[
"South Station-01",
"South Station-02",
"South Station-03",
"South Station-04",
"South Station-05",
"South Station-06",
"South Station-07",
"South Station-08",
"South Station-09",
"South Station-10",
"South Station-11",
"South Station-12",
"South Station-13"
]}
},
"2019-02-12" => %{
"70001" => {nil, ["Forest Hills-01", "Forest Hills-02"]},
"70036" => {nil, ["Oak Grove-01", "Oak Grove-02"]},
"70061" => {nil, ["Alewife-01", "Alewife-02"]},
"70105" => {nil, ["Braintree-01", "Braintree-02"]},
"70150" => {nil, ["71150"]},
"70151" => {nil, ["71151"]},
"70200" => {nil, ["71199"]},
"70201" => {nil, ["Government Center-Brattle"]},
"70202" => {nil, ["Government Center-Brattle"]}
},
"2021-01-09" => %{
"Abington" => {"PB-0194-S", []},
"Anderson/ Woburn" => {"NHRML-0127", ["NHRML-0127-01", "NHRML-0127-02"]},
"Andover" => {"WR-0228-02", []},
"Ashland" => {"WML-0252", ["WML-0252-01", "WML-0252-02"]},
"Attleboro" => {"NEC-1969", ["NEC-1969-03", "NEC-1969-04"]},
"Auburndale" => {"WML-0102-02", []},
"Ayer" => {"FR-0361", ["FR-0361-01", "FR-0361-02"]},
"Back Bay" => {"NEC-2276", []},
"Back Bay-01" => {"NEC-2276-01", []},
"Back Bay-02" => {"NEC-2276-02", []},
"Back Bay-03" => {"NEC-2276-03", []},
"Back Bay-05" => {"WML-0012-05", []},
"Back Bay-07" => {"WML-0012-07", []},
"Ballardvale" => {"WR-0205-02", []},
"Bellevue" => {"NB-0072-S", []},
"Belmont" => {"FR-0064", ["FR-0064-01", "FR-0064-02"]},
"Beverly Farms" => {"GB-0229", ["GB-0229-01", "GB-0229-02"]},
"Beverly" => {"ER-0183", ["ER-0183-01", "ER-0183-02"]},
"Blue Hill Avenue" => {"DB-2222", ["DB-2222-01", "DB-2222-02"]},
"Boston Landing" => {"WML-0035", ["WML-0035-01", "WML-0035-02"]},
"Bourne" => {"CM-0564-S", []},
"Bradford" => {"WR-0325", ["WR-0325-01", "WR-0325-02"]},
"Braintree" => {"MM-0109", ["MM-0109-CS", "MM-0109-S"]},
"Brandeis/ Roberts" => {"FR-0115", ["FR-0115-01", "FR-0115-02"]},
"Bridgewater" => {"MM-0277-S", []},
"Brockton" => {"MM-0200", ["MM-0200-CS", "MM-0200-S"]},
"Buzzards Bay" => {"CM-0547-S", []},
"Campello" => {"MM-0219-S", []},
"Canton Center" => {"SB-0156-S", []},
"Canton Junction" =>
{"NEC-2139", ["NEC-2139-01", "NEC-2139-02", "SB-0150-04", "SB-0150-06"]},
"Chelsea" => {"ER-0046", ["ER-0046-01", "ER-0046-02"]},
"Cohasset" => {"GRB-0199-S", []},
"Concord" => {"FR-0201", ["FR-0201-01", "FR-0201-02"]},
"Dedham Corp Center" => {"FB-0118", ["FB-0118-01", "FB-0118-02"]},
"East Weymouth" => {"GRB-0146-S", []},
"Endicott" => {"FB-0109", ["FB-0109-01", "FB-0109-02"]},
"Fairmount" => {"DB-2205", ["DB-2205-01", "DB-2205-02"]},
"Fitchburg" => {"FR-0494-CS", []},
"Forest Hills" => {"NEC-2237", ["NEC-2237-03", "NEC-2237-05"]},
"Forge Park / 495" => {"FB-0303-S", []},
"Four Corners / Geneva" => {"DB-2249", ["DB-2249-01", "DB-2249-02"]},
"Foxboro" => {"FS-0049-S", []},
"Framingham" => {"WML-0214", ["WML-0214-01", "WML-0214-02"]},
"Franklin" => {"FB-0275-S", []},
"Gloucester" => {"GB-0316-S", []},
"Grafton" => {"WML-0364", ["WML-0364-01", "WML-0364-02"]},
"Greenbush" => {"GRB-0276-S", []},
"Greenwood" => {"WR-0085", ["WR-0085-01", "WR-0085-02"]},
"Halifax" => {"PB-0281", ["PB-0281-CS", "PB-0281-S"]},
"Hamilton/ Wenham" => {"ER-0227-S", []},
"Hanson" => {"PB-0245-S", []},
"Hastings" => {"FR-0137", ["FR-0137-01", "FR-0137-02"]},
"Haverhill" => {"WR-0329", ["WR-0329-01", "WR-0329-02"]},
"Hersey" => {"NB-0109-S", []},
"Highland" => {"NB-0076-S", []},
"Holbrook/ Randolph" => {"MM-0150-S", []},
"Hyannis" => {"CM-0790-S", []},
"Hyde Park" => {"NEC-2203", ["NEC-2203-02", "NEC-2203-03"]},
"Ipswich" => {"ER-0276-S", []},
"Islington" => {"FB-0125", ["FB-0125-01", "FB-0125-02"]},
"JFK/UMASS" => {"MM-0023-S", []},
"<NAME>" => {"FR-0132", ["FR-0132-01", "FR-0132-02"]},
"Kingston" => {"KB-0351-S", []},
"Lawrence" => {"WR-0264-02", []},
"Lincoln" => {"FR-0167", ["FR-0167-01", "FR-0167-02"]},
"Littleton / Rte 495" => {"FR-0301", ["FR-0301-01", "FR-0301-02"]},
"Lowell" => {"NHRML-0254", ["NHRML-0254-03", "NHRML-0254-04"]},
"Lynn" => {"ER-0115", ["ER-0115-01", "ER-0115-02"]},
"Malden Center" => {"WR-0045-S", []},
"Manchester" => {"GB-0254", ["GB-0254-01", "GB-0254-02"]},
"Mansfield" => {"NEC-2040", ["NEC-2040-01", "NEC-2040-02"]},
"Melrose Cedar Park" => {"WR-0067", ["WR-0067-01", "WR-0067-02"]},
"Melrose Highlands" => {"WR-0075", ["WR-0075-01", "WR-0075-02"]},
"Middleborough/ Lakeville" => {"MM-0356-S", []},
"Mishawum" => {"NHRML-0116", ["NHRML-0116-01", "NHRML-0116-02"]},
"Montello" => {"MM-0186", ["MM-0186-CS", "MM-0186-S"]},
"Montserrat" => {"GB-0198", ["GB-0198-01", "GB-0198-02"]},
"Morton Street" => {"DB-2230", ["DB-2230-01", "DB-2230-02"]},
"Nantasket Junction" => {"GRB-0183-S", []},
"Natick Center" => {"WML-0177", ["WML-0177-01", "WML-0177-02"]},
"Needham Center" => {"NB-0127-S", []},
"Needham Heights" => {"NB-0137-S", []},
"Needham Junction" => {"NB-0120-S", []},
"Newburyport" => {"ER-0362", ["ER-0362-01", "ER-0362-02"]},
"Newmarket" => {"DB-2265", ["DB-2265-01", "DB-2265-02"]},
"Newtonville" => {"WML-0081-02", []},
"Norfolk" => {"FB-0230-S", []},
"North Beverly" => {"ER-0208", ["ER-0208-01", "ER-0208-02"]},
"North Billerica" => {"NHRML-0218", ["NHRML-0218-01", "NHRML-0218-02"]},
"North Leominster" => {"FR-0451", ["FR-0451-01", "FR-0451-02"]},
"North Scituate" => {"GRB-0233-S", []},
"North Station" => {"BNT-0000", []},
"North Station-01" => {"BNT-0000-01", []},
"North Station-02" => {"BNT-0000-02", []},
"North Station-03" => {"BNT-0000-03", []},
"North Station-04" => {"BNT-0000-04", []},
"North Station-05" => {"BNT-0000-05", []},
"North Station-06" => {"BNT-0000-06", []},
"North Station-07" => {"BNT-0000-07", []},
"North Station-08" => {"BNT-0000-08", []},
"North Station-09" => {"BNT-0000-09", []},
"North Station-10" => {"BNT-0000-10", []},
"North Wilmington" => {"WR-0163-S", []},
"Norwood Central" => {"FB-0148", ["FB-0148-01", "FB-0148-02"]},
"Norwood Depot" => {"FB-0143", ["FB-0143-01", "FB-0143-02"]},
"place-dudly" => {"place-nubn", []},
"Plimptonville" => {"FB-0177-S", []},
"Plymouth" => {"PB-0356-S", []},
"Porter Square" => {"FR-0034", ["FR-0034-01", "FR-0034-02"]},
"Prides Crossing" => {"GB-0222", ["GB-0222-01", "GB-0222-02"]},
"Providence" => {"NEC-1851", ["NEC-1851-01", "NEC-1851-02", "NEC-1851-03", "NEC-1851-05"]},
"Quincy Center" => {"MM-0079-S", []},
"Reading" => {"WR-0120-S", []},
"Readville" => {"DB-0095", ["FB-0095-04", "FB-0095-05", "NEC-2192-02", "NEC-2192-03"]},
"River Works / GE Employees Only" => {"ER-0099", ["ER-0099-01", "ER-0099-02"]},
"Rockport" => {"GB-0353-S", []},
"Roslindale Village" => {"NB-0064-S", []},
"Route 128" => {"NEC-2173", ["NEC-2173-01", "NEC-2173-02"]},
"Rowley" => {"ER-0312-S", []},
"Ruggles" => {"NEC-2265", []},
"Ruggles-01" => {"NEC-2265-01", []},
"Ruggles-02" => {"NEC-2265-02", []},
"Ruggles-03" => {"NEC-2265-03", []},
"Salem" => {"ER-0168-S", []},
"Sharon" => {"NEC-2108", ["NEC-2108-01", "NEC-2108-02"]},
"Shirley" => {"FR-0394", ["FR-0394-01", "FR-0394-02"]},
"Silver Hill" => {"FR-0147", ["FR-0147-01", "FR-0147-02"]},
"South Acton" => {"FR-0253", ["FR-0253-01", "FR-0253-02"]},
"South Attleboro" => {"NEC-1919", ["NEC-1919-01", "NEC-1919-02"]},
"South Station" => {"NEC-2287", []},
"South Station-01" => {"NEC-2287-01", []},
"South Station-02" => {"NEC-2287-02", []},
"South Station-03" => {"NEC-2287-03", []},
"South Station-04" => {"NEC-2287-04", []},
"South Station-05" => {"NEC-2287-05", []},
"South Station-06" => {"NEC-2287-06", []},
"South Station-07" => {"NEC-2287-07", []},
"South Station-08" => {"NEC-2287-08", []},
"South Station-09" => {"NEC-2287-09", []},
"South Station-10" => {"NEC-2287-10", []},
"South Station-11" => {"NEC-2287-11", []},
"South Station-12" => {"NEC-2287-12", []},
"South Station-13" => {"NEC-2287-13", []},
"South Weymouth" => {"PB-0158-S", []},
"Southborough" => {"WML-0274", ["WML-0274-01", "WML-0274-02"]},
"Stoughton" => {"SB-0189-S", []},
"Swampscott" => {"ER-0128", ["ER-0128-01", "ER-0128-02"]},
"Talbot Avenue" => {"DB-2240", ["DB-2240-01", "DB-2240-02"]},
"TF Green Airport" => {"NEC-1768-03", []},
"Uphams Corner" => {"DB-2258", ["DB-2258-01", "DB-2258-02"]},
"Wachusett" => {"FR-3338-CS", []},
"Wakefield" => {"WR-0099", ["WR-0099-01", "WR-0099-02"]},
"Walpole" => {"FB-0191-S", []},
"Waltham" => {"FR-0098", ["FR-0098-01", "FR-0098-S"]},
"Wareham Village" => {"CM-0493-S", []},
"Waverley" => {"FR-0074", ["FR-0074-01", "FR-0074-02"]},
"Wedgemere" => {"NHRML-0073", ["NHRML-0073-01", "NHRML-0073-02"]},
"Wellesley Farms" => {"WML-0125", ["WML-0125-01", "WML-0125-02"]},
"Wellesley Hills" => {"WML-0135", ["WML-0135-01", "WML-0135-02"]},
"Wellesley Square" => {"WML-0147", ["WML-0147-01", "WML-0147-02"]},
"West Concord" => {"FR-0219", ["FR-0219-01", "FR-0219-02"]},
"West Gloucester" => {"GB-0296", ["GB-0296-01", "GB-0296-02"]},
"West Hingham" => {"GRB-0162-S", []},
"West Medford" => {"NHRML-0055", ["NHRML-0055-01", "NHRML-0055-02"]},
"West Natick" => {"WML-0199", ["WML-0199-01", "WML-0199-02"]},
"West Newton" => {"WML-0091-02", []},
"West Roxbury" => {"NB-0080-S", []},
"Westborough" => {"WML-0340", ["WML-0340-01", "WML-0340-02"]},
"Weymouth Landing/ East Braintree" => {"GRB-0118-S", []},
"Whitman" => {"PB-0212-S", []},
"Wickford Junction" => {"NEC-1659-03", []},
"Wilmington" => {"NHRML-0152", ["NHRML-0152-01", "NHRML-0152-02"]},
"Winchester Center" => {"NHRML-0078", ["NHRML-0078-01", "NHRML-0078-02"]},
"Windsor Gardens" => {"FB-0166-S", []},
"Worcester" => {"WML-0442-CS", []},
"Wyoming Hill" => {"WR-0062", ["WR-0062-01", "WR-0062-02"]},
"Yawkey" => {"WML-0025", ["WML-0025-05", "WML-0025-07"]}
}
}
@doc """
Transforms a list of stop IDs as per the given API version and options.
* `only_renames: true` expands the list using only stop ID renames, ignoring additions.
* `mappings: %{...}` supplies a custom set of mappings (see the moduledoc for details).
"""
@spec expand([Stop.id()], String.t(), keyword) :: [Stop.id()]
def expand(stop_ids, api_version, opts \\ []) do
all_mappings = Keyword.get(opts, :mappings, @mappings)
only_renames = Keyword.get(opts, :only_renames, false)
mappings =
all_mappings
|> Stream.filter(fn {version, _mapping} -> api_version < version end)
|> Enum.sort_by(fn {version, _mapping} -> version end)
|> Enum.map(fn {_version, mapping} -> mapping end)
Enum.reduce(mappings, stop_ids, fn mapping, ids ->
Enum.reduce(ids, [], fn stop_id, acc ->
{renamed_to, added} = Map.get(mapping, stop_id, {nil, []})
# credo:disable-for-lines:4 Credo.Check.Refactor.Nesting
new_ids =
if only_renames,
do: List.wrap(renamed_to),
else: Enum.reject([renamed_to | added], &is_nil/1)
acc ++ [stop_id | new_ids]
end)
end)
end
end
|
apps/api_web/lib/api_web/legacy_stops.ex
| 0.608827
| 0.423398
|
legacy_stops.ex
|
starcoder
|
defmodule Sage.Executor do
@moduledoc """
This module is responsible for Sage execution.
"""
import Record
alias Sage.Executor.Retries
require Logger
@type state ::
record(:state,
last_effect_or_error:
{name :: term(), reason :: term()}
| {:raise, exception :: Exception.t()}
| {:throw, reason :: term()}
| {:exit, reason :: term()},
effects_so_far: map(),
attempts: non_neg_integer(),
aborted?: boolean(),
tasks: [{Task.t(), Keyword.t()}],
on_compensation_error: :raise | module(),
tracers_and_state: {MapSet.t(), term()}
)
defrecordp(:state,
last_effect_or_error: nil,
effects_so_far: %{},
attempts: 1,
aborted?: false,
tasks: [],
on_compensation_error: :raise,
tracers_and_state: {MapSet.new(), []}
)
# List of Sage.Executor functions we do not want to purge from stacktrace
@stacktrace_functions_whitelist [:execute]
# # TODO: Inline functions for performance optimization
# @compile {:inline, encode_integer: 1, encode_float: 1}
@spec execute(sage :: Sage.t(), attrs :: any()) :: {:ok, result :: any(), effects :: Sage.effects()} | {:error, any()}
def execute(sage, attrs \\ [])
def execute(%Sage{stages: []}, _opts), do: raise(Sage.EmptyError)
def execute(%Sage{} = sage, attrs) do
%{
stages: stages,
final_hooks: final_hooks,
on_compensation_error: on_compensation_error,
tracers: tracers
} = sage
inital_state =
state(on_compensation_error: on_compensation_error, tracers_and_state: {MapSet.to_list(tracers), attrs})
final_hooks = MapSet.to_list(final_hooks)
stages
|> Enum.reverse()
|> execute_transactions([], attrs, inital_state)
|> maybe_notify_final_hooks(final_hooks, attrs)
|> return_or_reraise()
end
# Transactions
defp execute_transactions([], _executed_stages, _opts, state(tasks: []) = state) do
state(last_effect_or_error: last_effect, effects_so_far: effects_so_far) = state
{:ok, last_effect, effects_so_far}
end
defp execute_transactions([], executed_stages, attrs, state) do
state(tasks: tasks) = state
{:next_transaction, state}
|> maybe_await_for_tasks(tasks)
|> handle_transaction_result()
|> execute_next_stage([], executed_stages, attrs)
end
defp execute_transactions([stage | stages], executed_stages, attrs, state) do
state(tasks: tasks) = state
{stage, state}
|> maybe_await_for_tasks(tasks)
|> maybe_execute_transaction(attrs)
|> handle_transaction_result()
|> execute_next_stage(stages, executed_stages, attrs)
end
defp maybe_await_for_tasks({stage, state}, []), do: {stage, state}
# If next stage has async transaction, we don't need to await for tasks
defp maybe_await_for_tasks({{_name, {:run_async, _transaction, _compensation, _opts}} = stage, state}, _tasks),
do: {stage, state}
defp maybe_await_for_tasks({stage, state}, tasks) do
state = state(state, tasks: [])
tasks
|> Enum.map(&await_for_task/1)
|> Enum.reduce({stage, state}, &handle_task_result/2)
end
defp await_for_task({name, {task, yield_opts}}) do
timeout = Keyword.get(yield_opts, :timeout, 5000)
case Task.yield(task, timeout) || Task.shutdown(task) do
{:ok, result} ->
{name, result}
{:exit, {{:nocatch, reason}, _stacktrace}} ->
{name, {:throw, reason}}
{:exit, {exception, stacktrace}} ->
{name, {:raise, {exception, stacktrace}}}
{:exit, reason} ->
{name, {:exit, reason}}
nil ->
{name, {:raise, %Sage.AsyncTransactionTimeoutError{name: name, timeout: timeout}}}
end
end
defp handle_task_result({name, result}, {:start_compensations, state}) do
state(last_effect_or_error: failure_reason, tracers_and_state: tracers_and_state) = state
tracers_and_state = maybe_notify_tracers(tracers_and_state, :finish_transaction, name)
state = state(state, tracers_and_state: tracers_and_state)
case handle_transaction_result({name, :async, result, state}) do
{:next_transaction, {^name, :async}, state} ->
state = state(state, last_effect_or_error: failure_reason)
{:start_compensations, state}
{:start_compensations, {^name, :async}, state} ->
state = state(state, last_effect_or_error: failure_reason)
{:start_compensations, state}
end
end
defp handle_task_result({name, result}, {stage, state}) do
state(tracers_and_state: tracers_and_state) = state
tracers_and_state = maybe_notify_tracers(tracers_and_state, :finish_transaction, name)
state = state(state, tracers_and_state: tracers_and_state)
case handle_transaction_result({name, :async, result, state}) do
{:next_transaction, {^name, :async}, state} ->
{stage, state}
{:start_compensations, {^name, :async}, state} ->
{:start_compensations, state}
end
end
defp maybe_execute_transaction({:start_compensations, state}, _opts), do: {:start_compensations, state}
defp maybe_execute_transaction({{name, operation}, state}, attrs) do
state(effects_so_far: effects_so_far, tracers_and_state: tracers_and_state) = state
tracers_and_state = maybe_notify_tracers(tracers_and_state, :start_transaction, name)
return = execute_transaction(operation, name, effects_so_far, attrs)
tracers_and_state =
case return do
{%Task{}, _async_opts} -> tracers_and_state
_other -> maybe_notify_tracers(tracers_and_state, :finish_transaction, name)
end
state = state(state, tracers_and_state: tracers_and_state)
{name, operation, return, state}
end
defp execute_transaction({:run, transaction, _compensation, []}, name, effects_so_far, attrs) do
apply_transaction_fun(name, transaction, effects_so_far, attrs)
rescue
exception -> {:raise, {exception, __STACKTRACE__}}
catch
:exit, reason -> {:exit, reason}
:throw, reason -> {:throw, reason}
end
defp execute_transaction({:run_async, transaction, _compensation, tx_opts}, name, effects_so_far, attrs) do
logger_metadata = Logger.metadata()
task =
Task.Supervisor.async_nolink(Sage.AsyncTransactionSupervisor, fn ->
_ = Logger.metadata(logger_metadata)
apply_transaction_fun(name, transaction, effects_so_far, attrs)
end)
{task, tx_opts}
end
defp apply_transaction_fun(name, {mod, fun, args} = mfa, effects_so_far, attrs) do
case apply(mod, fun, [effects_so_far, attrs | args]) do
{:ok, effect} ->
{:ok, effect}
{:error, reason} ->
{:error, reason}
{:abort, reason} ->
{:abort, reason}
other ->
{:raise, %Sage.MalformedTransactionReturnError{stage: name, transaction: mfa, return: other}}
end
end
defp apply_transaction_fun(name, fun, effects_so_far, attrs) do
case apply(fun, [effects_so_far, attrs]) do
{:ok, effect} ->
{:ok, effect}
{:error, reason} ->
{:error, reason}
{:abort, reason} ->
{:abort, reason}
other ->
{:raise, %Sage.MalformedTransactionReturnError{stage: name, transaction: fun, return: other}}
end
end
defp handle_transaction_result({:start_compensations, state}), do: {:start_compensations, state}
defp handle_transaction_result({:next_transaction, state}), do: {:next_transaction, state}
defp handle_transaction_result({name, operation, {%Task{}, _async_opts} = async_step, state}) do
state(tasks: tasks) = state
state = state(state, tasks: [{name, async_step} | tasks], aborted?: false)
{:next_transaction, {name, operation}, state}
end
defp handle_transaction_result({name, operation, {:ok, effect}, state}) do
state(effects_so_far: effects_so_far) = state
state =
state(state, last_effect_or_error: effect, effects_so_far: Map.put(effects_so_far, name, effect), aborted?: false)
{:next_transaction, {name, operation}, state}
end
defp handle_transaction_result({name, operation, {:abort, reason}, state}) do
state(effects_so_far: effects_so_far) = state
state =
state(state,
last_effect_or_error: {name, reason},
effects_so_far: Map.put(effects_so_far, name, reason),
aborted?: true
)
{:start_compensations, {name, operation}, state}
end
defp handle_transaction_result({name, operation, {:error, reason}, state}) do
state(effects_so_far: effects_so_far) = state
state =
state(state,
last_effect_or_error: {name, reason},
effects_so_far: Map.put(effects_so_far, name, reason),
aborted?: false
)
{:start_compensations, {name, operation}, state}
end
defp handle_transaction_result({name, operation, {:raise, exception}, state}) do
state = state(state, last_effect_or_error: {:raise, exception})
{:start_compensations, {name, operation}, state}
end
defp handle_transaction_result({name, operation, {:throw, reason}, state}) do
state = state(state, last_effect_or_error: {:throw, reason})
{:start_compensations, {name, operation}, state}
end
defp handle_transaction_result({name, operation, {:exit, reason}, state}) do
state = state(state, last_effect_or_error: {:exit, reason})
{:start_compensations, {name, operation}, state}
end
# Compensation
defp execute_compensations(compensated_stages, [stage | stages], attrs, state) do
{stage, state}
|> execute_compensation(attrs)
|> handle_compensation_result()
|> execute_next_stage(compensated_stages, stages, attrs)
end
defp execute_compensations(_compensated_stages, [], _opts, state) do
state(last_effect_or_error: last_error) = state
case last_error do
{:exit, reason} -> {:exit, reason}
{:raise, {exception, stacktrace}} -> {:raise, {exception, stacktrace}}
{:raise, exception} -> {:raise, exception}
{:throw, reason} -> {:throw, reason}
{_name, reason} -> {:error, reason}
end
end
defp execute_compensation({{name, {_type, _transaction, :noop, _tx_opts} = operation}, state}, _opts) do
{name, operation, :ok, nil, state}
end
defp execute_compensation({{name, {_type, _transaction, compensation, _tx_opts} = operation}, state}, attrs) do
state(effects_so_far: effects_so_far, tracers_and_state: tracers_and_state) = state
{effect_to_compensate, effects_so_far} = Map.pop(effects_so_far, name)
tracers_and_state = maybe_notify_tracers(tracers_and_state, :start_compensation, name)
return = safe_apply_compensation_fun(name, compensation, effect_to_compensate, effects_so_far, attrs)
tracers_and_state = maybe_notify_tracers(tracers_and_state, :finish_compensation, name)
state = state(state, effects_so_far: effects_so_far, tracers_and_state: tracers_and_state)
{name, operation, return, effect_to_compensate, state}
end
defp safe_apply_compensation_fun(name, compensation, effect_to_compensate, effects_so_far, attrs) do
apply_compensation_fun(compensation, effect_to_compensate, effects_so_far, attrs)
rescue
exception -> {:raise, {exception, __STACKTRACE__}}
catch
:exit, reason -> {:exit, reason}
:throw, error -> {:throw, error}
else
:ok ->
:ok
:abort ->
:abort
{:retry, retry_opts} ->
{:retry, retry_opts}
{:continue, effect} ->
{:continue, effect}
other ->
{:raise, %Sage.MalformedCompensationReturnError{stage: name, compensation: compensation, return: other}}
end
defp apply_compensation_fun({mod, fun, args}, effect_to_compensate, effects_so_far, attrs),
do: apply(mod, fun, [effect_to_compensate, effects_so_far, attrs | args])
defp apply_compensation_fun(fun, effect_to_compensate, effects_so_far, attrs),
do: apply(fun, [effect_to_compensate, effects_so_far, attrs])
defp handle_compensation_result({name, operation, :ok, _compensated_effect, state}) do
{:next_compensation, {name, operation}, state}
end
defp handle_compensation_result({name, operation, :abort, _compensated_effect, state}) do
state = state(state, aborted?: true)
{:next_compensation, {name, operation}, state}
end
defp handle_compensation_result(
{name, operation, {:retry, _retry_opts}, _compensated_effect, state(aborted?: true) = state}
) do
{:next_compensation, {name, operation}, state}
end
defp handle_compensation_result(
{name, operation, {:retry, retry_opts}, _compensated_effect, state(aborted?: false) = state}
) do
state(attempts: attempts) = state
if Retries.retry_with_backoff?(attempts, retry_opts) do
state = state(state, attempts: attempts + 1)
{:retry_transaction, {name, operation}, state}
else
{:next_compensation, {name, operation}, state}
end
end
defp handle_compensation_result(
{name, operation, {:continue, effect}, _compensated_effect,
state(last_effect_or_error: {name, _reason}, aborted?: false) = state}
) do
state(effects_so_far: effects_so_far) = state
state = state(state, last_effect_or_error: effect, effects_so_far: Map.put(effects_so_far, name, effect))
{:next_transaction, {name, operation}, state}
end
defp handle_compensation_result({name, operation, {:continue, _effect}, _compensated_effect, state}) do
{:next_compensation, {name, operation}, state}
end
defp handle_compensation_result({name, operation, {:raise, _} = error, compensated_effect, state}) do
state = state(state, last_effect_or_error: error)
{:compensation_error, {name, operation, compensated_effect}, state}
end
defp handle_compensation_result({name, operation, {:exit, _reason} = error, compensated_effect, state}) do
state = state(state, last_effect_or_error: error)
{:compensation_error, {name, operation, compensated_effect}, state}
end
defp handle_compensation_result({name, operation, {:throw, _error} = error, compensated_effect, state}) do
state = state(state, last_effect_or_error: error)
{:compensation_error, {name, operation, compensated_effect}, state}
end
# Shared
defp execute_next_stage({:next_transaction, {name, operation}, state}, stages, executed_stages, attrs) do
execute_transactions(stages, [{name, operation} | executed_stages], attrs, state)
end
defp execute_next_stage({:next_transaction, state}, [], [{prev_name, _prev_op} | executed_stages], attrs) do
state(effects_so_far: effects_so_far) = state
state = state(state, last_effect_or_error: Map.get(effects_so_far, prev_name))
execute_transactions([], executed_stages, attrs, state)
end
defp execute_next_stage({:start_compensations, {name, operation}, state}, compensated_stages, stages, attrs) do
execute_compensations(compensated_stages, [{name, operation} | stages], attrs, state)
end
defp execute_next_stage({:start_compensations, state}, compensated_stages, stages, attrs) do
execute_compensations(compensated_stages, stages, attrs, state)
end
defp execute_next_stage({:next_compensation, {name, operation}, state}, compensated_stages, stages, attrs) do
execute_compensations([{name, operation} | compensated_stages], stages, attrs, state)
end
defp execute_next_stage({:retry_transaction, {name, operation}, state}, compensated_stages, stages, attrs) do
execute_transactions([{name, operation} | compensated_stages], stages, attrs, state)
end
defp execute_next_stage(
{:compensation_error, _compensation_error, state(on_compensation_error: :raise) = state},
_compensated_stages,
_stages,
_opts
) do
state(last_effect_or_error: error) = state
return_or_reraise(error)
end
defp execute_next_stage({:compensation_error, compensation_error, state}, _compensated_stages, stages, attrs) do
state(
last_effect_or_error: error,
effects_so_far: effects_so_far,
on_compensation_error: on_compensation_error
) = state
{name, operation, compensated_effect} = compensation_error
compensations_to_run =
[{name, operation} | stages]
|> Enum.reduce([], fn
{_name, {_type, _transaction, :noop, _tx_opts}}, acc ->
acc
{^name, {_type, _transaction, compensation, _tx_opts}}, acc ->
acc ++ [{name, compensation, compensated_effect}]
{name, {_type, _transaction, compensation, _tx_opts}}, acc ->
acc ++ [{name, compensation, Map.fetch!(effects_so_far, name)}]
end)
_ =
Logger.warn("""
[Sage] compensation #{inspect(name)} failed to compensate effect:
#{inspect(compensated_effect)}
#{compensation_error_message(error)}
""")
case error do
{:raise, {exception, stacktrace}} ->
apply(on_compensation_error, :handle_error, [{:exception, exception, stacktrace}, compensations_to_run, attrs])
{:raise, exception} ->
apply(on_compensation_error, :handle_error, [{:exception, exception, []}, compensations_to_run, attrs])
{:exit, reason} ->
apply(on_compensation_error, :handle_error, [{:exit, reason}, compensations_to_run, attrs])
{:throw, error} ->
apply(on_compensation_error, :handle_error, [{:throw, error}, compensations_to_run, attrs])
end
end
defp compensation_error_message({:raise, {exception, stacktrace}}) do
"""
Because exception was raised:
#{Exception.format(:error, exception, stacktrace)}
"""
end
defp compensation_error_message({:raise, exception}) do
"""
Because exception was raised:
#{Exception.format(:error, exception)}
"""
end
defp compensation_error_message({:exit, reason}) do
"Because of exit with reason: #{inspect(reason)}."
end
defp compensation_error_message({:throw, error}) do
"Because of thrown error: #{inspect(error)}."
end
defp maybe_notify_tracers({[], _tracing_state} = tracers, _action, _name) do
tracers
end
defp maybe_notify_tracers({tracers, tracing_state}, action, name) do
tracing_state =
Enum.reduce(tracers, tracing_state, fn tracer, tracing_state ->
apply_and_catch_errors(tracer, :handle_event, [name, action, tracing_state])
end)
{tracers, tracing_state}
end
defp maybe_notify_final_hooks(result, [], _opts), do: result
defp maybe_notify_final_hooks(result, filanlize_callbacks, attrs) do
status = if elem(result, 0) == :ok, do: :ok, else: :error
:ok =
filanlize_callbacks
|> Enum.map(fn
{module, function, args} ->
args = [status, attrs | args]
{{module, function, args}, apply_and_catch_errors(module, function, args)}
callback ->
args = [status, attrs]
{{callback, args}, apply_and_catch_errors(callback, args)}
end)
|> Enum.each(&maybe_log_errors/1)
result
end
defp apply_and_catch_errors(module, function, arguments) do
apply(module, function, arguments)
catch
:error, exception -> {:raise, {exception, System.stacktrace()}}
:exit, reason -> {:exit, reason}
:throw, reason -> {:throw, reason}
end
defp apply_and_catch_errors(function, arguments) do
apply(function, arguments)
catch
:error, exception -> {:raise, {exception, System.stacktrace()}}
:exit, reason -> {:exit, reason}
:throw, reason -> {:throw, reason}
end
defp maybe_log_errors({from, {:raise, {exception, stacktrace}}}) do
Logger.error("""
[Sage] Exception during #{callback_to_string(from)} final hook execution is ignored:
#{Exception.format(:error, exception, stacktrace)}
""")
end
defp maybe_log_errors({from, {:throw, reason}}) do
Logger.error(
"[Sage] Throw during #{callback_to_string(from)} final hook execution is ignored. " <> "Error: #{inspect(reason)}"
)
end
defp maybe_log_errors({from, {:exit, reason}}) do
Logger.error(
"[Sage] Exit during #{callback_to_string(from)} final hook execution is ignored. " <>
"Exit reason: #{inspect(reason)}"
)
end
defp maybe_log_errors({_from, _other}) do
:ok
end
defp callback_to_string({m, f, a}), do: "#{to_string(m)}.#{to_string(f)}/#{to_string(length(a))}"
defp callback_to_string({f, _a}), do: inspect(f)
defp return_or_reraise({:ok, effect, other_effects}), do: {:ok, effect, other_effects}
defp return_or_reraise({:exit, reason}), do: exit(reason)
defp return_or_reraise({:throw, reason}), do: throw(reason)
defp return_or_reraise({:raise, {exception, stacktrace}}), do: filter_and_reraise(exception, stacktrace)
defp return_or_reraise({:raise, exception}), do: raise(exception)
defp return_or_reraise({:error, reason}), do: {:error, reason}
defp filter_and_reraise(exception, stacktrace) do
stacktrace =
Enum.reject(stacktrace, &match?({__MODULE__, fun, _, _} when fun not in @stacktrace_functions_whitelist, &1))
reraise exception, stacktrace
end
end
|
lib/sage/executor.ex
| 0.52975
| 0.46223
|
executor.ex
|
starcoder
|
defmodule P1000 do
@moduledoc """
:timer.tc(&Main.main/0)
Query
A(x, y) : A_x + y
B(x, y) : B_i + A_i (x <= i <= y)
# Examples
iex> an = [9, 8, 1, 9, 6, 10, 8]
...> |> Enum.with_index() |> Enum.reduce(%{}, &(Map.put(&2, elem(&1, 1), elem(&1, 0))))
...> P1000.solve(7, 3, an, [["B", 2, 5], ["A", 7, 9], ["B", 4, 7]])
[0, 8, 1, 18, 12, 10, 17]
iex> {n, q} = {10, 10}
...> an = [1, 4, 1, 5, 9, 2, 6, 5, 3, 5]
...> |> Enum.with_index() |> Enum.reduce(%{}, &(Map.put(&2, elem(&1, 1), elem(&1, 0))))
...> queries = [
...> ["A", 2, 7],
...> ["A", 5, 9],
...> ["B", 1, 4],
...> ["B", 6, 10],
...> ["A", 10, 3],
...> ["B", 1, 2],
...> ["A", 8, 3],
...> ["B", 4, 9],
...> ["A", 6, 2],
...> ["B", 1, 10]
...> ]
...> P1000.solve(n, q, an, queries)
[3, 33, 2, 15, 36, 8, 18, 21, 9, 13]
"""
def main do
[n, q] = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
an = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.with_index() |> Enum.reduce(%{}, &(Map.put(&2, elem(&1, 1), String.to_integer(elem(&1, 0)))))
queries = for _ <- 0..(q-1) do
IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(fn
v when v in ["A", "B"] -> v
v -> String.to_integer(v)
end)
end
solve(n, q, an, queries) |> Enum.join(" ") |> IO.puts()
end
def solve(n, _q, an, queries) do
{_, bn} = Enum.reduce(queries, {an, Enum.reduce(0..(n-1), %{}, &(Map.put(&2, &1, 0)))}, fn
["A", x, y], {an, bn} ->
{Map.put(an, x - 1, an[x - 1] + y), bn}
["B", x, y], {an, bn} ->
{an, Enum.reduce((x - 1)..(y - 1), bn, &(Map.put(&2, &1, bn[&1] + an[&1])))}
end)
(n-1)..0 |> Enum.reduce([], fn i, acc -> [bn[i] | acc] end)
end
end
"""
defmodule Main do
def main do
[n, q] = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
an = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.with_index() |> Enum.reduce(%{}, &(Map.put(&2, elem(&1, 1), String.to_integer(elem(&1, 0)))))
queries = for _ <- 0..(q-1) do
IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(fn
v when v in ["A", "B"] -> v
v -> String.to_integer(v)
end)
end
solve(n, q, an, queries) |> Enum.join(" ") |> IO.puts()
end
def solve(n, _q, an, queries) do
{_, bn} = Enum.reduce(queries, {an, Enum.reduce(0..(n-1), %{}, &(Map.put(&2, &1, 0)))}, fn
["A", x, y], {an, bn} -> {Map.put(an, x - 1, an[x - 1] + y), bn}
["B", x, y], {an, bn} -> {an, Enum.reduce((x - 1)..(y - 1), bn, &(Map.put(&2, &1, bn[&1] + an[&1])))}
end)
(n-1)..0 |> Enum.reduce([], fn i, acc -> [bn[i] | acc] end)
end
end
"""
|
lib/1000/p1000.ex
| 0.547706
| 0.570571
|
p1000.ex
|
starcoder
|
defmodule Bitcoin.Script do
@moduledoc """
Bitcoin Script.
Bitcoin uses a scripting system for transactions. Forth-like, Script is simple, stack-based
and processed from left to right. It is purposefully not Turing-complete, with no loops.
This module contains functions to oparet on scripts. The actual implementation of the Script
engine can be found in `Bitcoin.Script.Interpreter`
Common pattern of `opts` is passed to verification and run functions. They set up the context
for script verification. Here are the opts used:
* `tx` - `Bitcoin.Protocol.Messages.Tx` in which the script is present
* `input_number` - input number of that transaction (needed for sighash)
* `sub_script` - also used for sighash, (often equal to pk_script)
* `flags` - script validation flags, see below
Flags are in the format of hash e.g. %{p2sh: true, dersig: true}. The reason for using that
instead of a simple list is ability to do function matching.
"""
# Script.Serialization handles parsing the script from a binary into a list with opcodes as symbols.
# Script.Control implements parsing OP_IF
# Script.Number handles parsing and serializing script integers (CStriptNum)
use Bitcoin.Script.Opcodes
# TODO block sigop limit (MAX_BLOCK_SIGOPS = MAX_BLOCK_SIZE/50), so we need to be abel to export that count
# TODO verify signature encoding https://github.com/bitcoin/bips/blob/master/bip-0066.mediawiki
use Bitcoin.Script.P2SH
alias Bitcoin.Script.Serialization
alias Bitcoin.Script.Interpreter
@doc """
Parse binary script into a form consumable by the interpreter (ops list). Parsed script looks like this:
[:OP_10, :OP_10, :OP_ADD, <<20>>, :OP_EQUAL]
"""
defdelegate parse(binary), to: Serialization
@doc """
Represent parsed script (list of :OP_CODES and binary data), in it's original binary form.
"""
defdelegate to_binary(script), to: Serialization
@doc """
Returns string representation of the provided parsed script in the same form as bitcoind decodescript command
"""
defdelegate to_string(script), to: Serialization
@doc """
Parse script from the string in a format that is outputed by bitcoid.
E.g. "2 OP_IF 0 OP_ELSE 1 OP_ENDIF"
"""
defdelegate parse_string(string), to: Serialization
@doc """
Parse script from a strig form familiar from test cases.
E.g. "128 SIZE 2 EQUAL"
Binaries appear in the 0x form or literaly in single quotes.
"""
defdelegate parse_string2(string), to: Serialization
@doc """
Execute the provided script. Returns the resulting stack or {:error, reason} tuple.
"""
@spec exec(list | binary, map) :: list | {:error, term}
def exec(script, opts \\ %{})
def exec(binary, opts) when is_binary(binary), do: binary |> parse |> exec(opts)
defdelegate exec(script, opts), to: Interpreter
defdelegate exec(stack, script, opts), to: Interpreter
# The reason for this function is that we need to parse sig script and pk separately.
# Otherwise sig script could do some nasty stuff with malformed PUSHDATA
# Then we have to run it separately
# default opts are done this weird way because additional function cases come from uses above (to avoid warnings)
def verify_sig_pk(sig_bin, pk_bin), do: verify_sig_pk(sig_bin, pk_bin, %{})
def verify_sig_pk(sig_bin, pk_bin, opts) when is_binary(sig_bin) and is_binary(pk_bin), do: verify_sig_pk(sig_bin |> parse, pk_bin |> parse, opts)
def verify_sig_pk(sig_script, pk_script, opts) do
try do
sig_script
|> exec(opts)
|> exec(pk_script, opts)
|> cast_to_bool
catch _,_ ->
false
end
end
# Returns true if top item of the stack is non-zero
def verify(script, opts \\ %{}) do
# TODO we should get rid of exceptions, make parser return {:error and non matched script cases should just be :invalid
try do
script |> exec(opts) |> cast_to_bool
catch _, _ ->
false
end
end
# Cast stack to boolean
def cast_to_bool({:error, _}), do: false
def cast_to_bool([]), do: false
def cast_to_bool([x | _]), do: Interpreter.bool(x)
end
|
lib/bitcoin/script.ex
| 0.76921
| 0.606964
|
script.ex
|
starcoder
|
defmodule Stripe.Card do
@moduledoc """
Work with Stripe card objects.
You can:
- Create a card
- Retrieve a card
- Update a card
- Delete a card
If you have been using an old version of the library, note that the functions which take an
`owner_type` argument are now deprecated.
The owner type is indicated by setting either the `recipient` or `customer`
```
Stripe API reference: https://stripe.com/docs/api#cards
"""
use Stripe.Entity
import Stripe.Request
@type t :: %__MODULE__{
id: Stripe.id(),
object: String.t(),
account: Stripe.id() | Stripe.Account.t() | nil,
address_city: String.t() | nil,
address_country: String.t() | nil,
address_line1: String.t() | nil,
address_line1_check: String.t() | nil,
address_line2: String.t() | nil,
address_state: String.t() | nil,
address_zip: String.t() | nil,
address_zip_check: String.t() | nil,
available_payout_methods: list(String.t()) | nil,
brand: String.t(),
country: String.t() | nil,
currency: String.t() | nil,
customer: Stripe.id() | Stripe.Customer.t() | nil,
cvc_check: String.t() | nil,
default_for_currency: boolean | nil,
deleted: boolean | nil,
dynamic_last4: String.t() | nil,
exp_month: integer,
exp_year: integer,
fingerprint: String.t() | nil,
funding: String.t(),
last4: String.t(),
metadata: Stripe.Types.metadata(),
name: String.t() | nil,
recipient: Stripe.id() | Stripe.Recipient.t() | nil,
tokenization_method: String.t() | nil
}
defstruct [
:id,
:object,
:account,
:address_city,
:address_country,
:address_line1,
:address_line1_check,
:address_line2,
:address_state,
:address_zip,
:address_zip_check,
:available_payout_methods,
:brand,
:country,
:currency,
:customer,
:cvc_check,
:default_for_currency,
:deleted,
:dynamic_last4,
:exp_month,
:exp_year,
:fingerprint,
:funding,
:last4,
:metadata,
:name,
:recipient,
:tokenization_method
]
defp plural_endpoint(%{customer: id}) do
"customers/" <> id <> "/sources"
end
@doc """
Create a card.
This requires a `token` created by a library like Stripe.js.
For PCI compliance reasons you should not send a card's number or CVC
to your own server.
If you want to create a card with your server without a token, you
can use the low-level API.
"""
@spec create(params, Keyword.t()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params: %{
:customer => Stripe.id() | Stripe.Customer.t(),
:source => Stripe.id() | Stripe.Source.t(),
optional(:metadata) => Stripe.Types.metadata(),
}
def create(%{customer: _, source: _} = params, opts \\ []) do
new_request(opts)
|> put_endpoint(params |> plural_endpoint())
|> put_params(params |> Map.delete(:customer))
|> put_method(:post)
|> make_request()
end
@doc """
Retrieve a card.
"""
@spec retrieve(Stripe.id() | t, map, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
def retrieve(id, %{customer: _} = params, opts \\ []) do
endpoint = params |> plural_endpoint()
new_request(opts)
|> put_endpoint(endpoint <> "/#{get_id!(id)}")
|> put_method(:get)
|> make_request()
end
@doc """
Update a card.
Takes the `id` and a map of changes
"""
@spec update(Stripe.id() | t, params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params: %{
:id => String.t(),
optional(:address_city) => String.t(),
optional(:address_country) => String.t(),
optional(:address_line1) => String.t(),
optional(:address_line2) => String.t(),
optional(:address_state) => String.t(),
optional(:address_zip) => String.t(),
optional(:exp_month) => String.t(),
optional(:exp_year) => String.t(),
optional(:metadata) => Stripe.Types.metadata(),
optional(:name) => String.t(),
}
def update(id, %{customer: _} = params, opts \\ []) do
endpoint = params |> plural_endpoint()
new_request(opts)
|> put_endpoint(endpoint <> "/#{get_id!(id)}")
|> put_method(:post)
|> put_params(params |> Map.delete(:customer))
|> make_request()
end
@doc """
Delete a card.
"""
@spec delete(Stripe.id() | t, map, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
def delete(id, %{customer: _} = params, opts \\ []) do
endpoint = params |> plural_endpoint()
new_request(opts)
|> put_endpoint(endpoint <> "/#{get_id!(id)}")
|> put_method(:delete)
|> make_request()
end
@doc """
List all cards.
"""
@spec list(params, Stripe.options()) :: {:ok, Stripe.List.t(t)} | {:error, Stripe.Error.t()}
when params: %{
:customer => Stripe.id() | Stripe.Customer.t(),
optional(:ending_before) => t | Stripe.id(),
optional(:limit) => 1..100,
optional(:starting_after) => t | Stripe.id(),
}
def list(%{customer: _} = params, opts \\ []) do
endpoint = params |> plural_endpoint()
params = params |> Map.put(:object, "card")
new_request(opts)
|> prefix_expansions()
|> put_endpoint(endpoint)
|> put_method(:get)
|> put_params(params |> Map.delete(:customer))
|> make_request()
end
end
|
lib/stripe/payment_methods/card.ex
| 0.827689
| 0.672597
|
card.ex
|
starcoder
|
defmodule ExPixBRCode.Payments.Models.DynamicPixPaymentWithDueDate do
@moduledoc """
A dynamic Pix payment with due date.
This has extra complexity when dealing with interests and due dates.
"""
use ExPixBRCode.ValueObject
alias ExPixBRCode.Changesets
@required [:revisao, :chave, :txid, :status]
@optional [:solicitacaoPagador]
@calendario_required [:criacao, :apresentacao, :dataDeVencimento]
@calendario_optional [:validadeAposVencimento]
@max_value_validade_apos_vencimento 2_147_483_647
@devedor_required [:cpf, :cnpj, :nome]
@recebedor_required [:nome, :logradouro, :cidade, :uf, :cep]
@recebedor_optional [:nomeFantasia]
@recebedor_one_of [:cpf, :cnpj]
@valor_required [:final]
@valor_optional [:original, :abatimento, :desconto, :juros, :multa]
embedded_schema do
field :revisao, :integer
field :chave, :string
field :txid, :string
field :status, Ecto.Enum,
values: ~w(ATIVA CONCLUIDA REMOVIDA_PELO_USUARIO_RECEBEDOR REMOVIDA_PELO_PSP)a
field :solicitacaoPagador, :string
embeds_one :calendario, Calendario, primary_key: false do
field :criacao, :utc_datetime
field :apresentacao, :utc_datetime
field :dataDeVencimento, :date
field :validadeAposVencimento, :integer, default: 30
end
embeds_one :devedor, Devedor, primary_key: false do
field :cpf, :string
field :cnpj, :string
field :nome, :string
end
embeds_one :valor, Valor, primary_key: false do
field :original, :decimal
field :abatimento, :decimal
field :desconto, :decimal
field :juros, :decimal
field :multa, :decimal
field :final, :decimal
end
embeds_one :recebedor, Recebedor, primary_key: false do
field :cpf, :string
field :cnpj, :string
field :nome, :string
field :nomeFantasia, :string
field :logradouro, :string
field :cidade, :string
field :uf, :string
field :cep, :string
end
embeds_many :infoAdicionais, InfoAdicionais, primary_key: false do
field :nome, :string
field :valor, :string
end
end
@spec changeset(
{map, map}
| %{
:__struct__ => atom | %{:__changeset__ => map, optional(any) => any},
optional(atom) => any
},
any
) :: Ecto.Changeset.t()
@doc false
def changeset(model \\ %__MODULE__{}, params) do
model
|> cast(coalesce_params(params), @required ++ @optional)
|> validate_required(@required)
|> validate_length(:txid, max: 35)
|> validate_length(:solicitacaoPagador, max: 140)
|> validate_number(:revisao, greater_than_or_equal_to: 0)
|> cast_embed(:calendario, required: true, with: &calendario_changeset/2)
|> cast_embed(:devedor, required: true, with: &devedor_changeset/2)
|> cast_embed(:valor, require: true, with: &valor_changeset/2)
|> cast_embed(:recebedor, required: true, with: &recebedor_changeset/2)
|> cast_embed(:infoAdicionais, with: &info_adicionais_changeset/2)
|> validate_length(:infoAdicionais, less_than_or_equal_to: 77)
end
defp coalesce_params(%{"infoAdicionais" => nil} = params),
do: Map.put(params, "infoAdicionais", [])
defp coalesce_params(%{infoAdicionais: nil} = params), do: Map.put(params, :infoAdicionais, [])
defp coalesce_params(params), do: params
defp calendario_changeset(model, params) do
model
|> cast(params, @calendario_required ++ @calendario_optional)
|> validate_required(@calendario_required)
# The validadeAposVencimeneto field accepts only Int32 possitive value type
|> validate_number(:validadeAposVencimento,
less_than_or_equal_to: @max_value_validade_apos_vencimento,
greater_than_or_equal_to: 0
)
end
defp devedor_changeset(model, params) do
model
|> cast(params, @devedor_required)
|> devedor_validate_required()
end
defp devedor_validate_required(changeset) do
cpf = get_field(changeset, :cpf)
cnpj = get_field(changeset, :cnpj)
name = get_field(changeset, :nome)
cond do
not is_nil(cpf) and not is_nil(cnpj) ->
add_error(changeset, :devedor, "only one of cpf or cnpj must be present")
(not is_nil(cpf) or not is_nil(cnpj)) and is_nil(name) ->
add_error(changeset, :devedor, "when either cpf or cnpj is present so must be 'nome'")
not is_nil(cpf) ->
Changesets.validate_document(changeset, :cpf)
true ->
Changesets.validate_document(changeset, :cnpj)
end
end
defp recebedor_changeset(model, params) do
model
|> cast(params, @recebedor_optional ++ @recebedor_one_of ++ @recebedor_required)
|> validate_required(@recebedor_required)
|> validate_either_cpf_or_cnpj()
end
defp validate_either_cpf_or_cnpj(changeset) do
cpf = get_field(changeset, :cpf)
cnpj = get_field(changeset, :cnpj)
cond do
is_nil(cpf) and is_nil(cnpj) ->
add_error(changeset, :recebedor, "one of cpf or cnpj must be present")
not is_nil(cpf) and not is_nil(cnpj) ->
add_error(changeset, :recebedor, "only one of cpf or cnpj must be present")
not is_nil(cpf) ->
Changesets.validate_document(changeset, :cpf)
true ->
Changesets.validate_document(changeset, :cnpj)
end
end
defp info_adicionais_changeset(model, params) do
model
|> cast(params, [:nome, :valor])
|> validate_required([:nome, :valor])
|> validate_length(:nome, less_than_or_equal_to: 50)
|> validate_length(:valor, less_than_or_equal_to: 200)
end
defp valor_changeset(model, params) do
model
|> cast(params, @valor_required ++ @valor_optional)
|> validate_required(@valor_required)
|> validate_number(:abatimento, greater_than_or_equal_to: 0)
|> validate_number(:desconto, greater_than_or_equal_to: 0)
|> validate_number(:juros, greater_than_or_equal_to: 0)
|> validate_number(:multa, greater_than_or_equal_to: 0)
end
end
|
lib/ex_pix_brcode/payments/models/dynamic_pix_payment_with_due_date.ex
| 0.741768
| 0.571049
|
dynamic_pix_payment_with_due_date.ex
|
starcoder
|
defmodule Elexir.FSM do
@moduledoc false
require Logger
defp calculate_path_weight(path) do
path
|> Enum.map(& elem(&1, 1))
|> Enum.sum
end
defp _find_all_paths([{nil, _}], _context, acc) do
Enum.sort_by(acc, &calculate_path_weight/1, &<=/2)
end
defp _find_all_paths(_path, [], acc) do
Enum.sort_by(acc, &calculate_path_weight/1, &<=/2)
end
defp _find_all_paths(
[{:end, _}|[{:begin, _}|_] = last_path],
[_|past_context],
acc
) do
_find_all_paths(last_path, past_context, acc)
end
defp _find_all_paths([{:end, _}|last_path], [_|past_context], acc) do
[_, {:begin, _}|completed_path] = Enum.reverse(last_path)
new_acc = [completed_path|acc]
_find_all_paths(last_path, past_context, new_acc)
end
defp _find_all_paths([_|path], [fsm|context], acc) when fsm == %{} do
_find_all_paths(path, context, acc)
end
defp _find_all_paths(
[{state, _}|[{last_state, _}|_] = last_path] = path,
[fsm|past_context],
acc
) do
:ok = Logger.debug("Transitioning from state #{inspect last_state} to state #{inspect state}.")
:ok = Logger.debug("Current context is #{inspect fsm}.")
:ok = Logger.debug("Path is #{inspect path}.")
case take_next_state(fsm, {last_state, state}) do
{nil, _} ->
[_|next_path] = last_path
[_|next_context] = past_context
_find_all_paths(next_path, next_context, acc)
{next_state, new_fsm} ->
next_path = [next_state|path]
_find_all_paths(next_path, [new_fsm, new_fsm|past_context], acc)
end
end
def find_all_paths(state_machine) when is_map state_machine do
initial_transition = {nil, :begin}
next_states = state_machine[initial_transition]
if is_nil(next_states) do
:ok = Logger.error("No next-states for :begin transition in the given state machine!")
{:error, :einval}
end
begin_count = Enum.sum(Map.values(next_states)) - 1
initial_path = [{:begin, begin_count}, {nil, begin_count}]
_find_all_paths(initial_path, [state_machine, state_machine], [])
end
defp _take_next_state(fsm, transition) do
Map.get_and_update(fsm, transition, fn
nil ->
{nil, fsm}
next_states ->
case Enum.take(next_states, 1) do
[] ->
{nil, []}
[{state, _} = entry] ->
{entry, Map.delete(next_states, state)}
end
end)
end
defp take_next_state(fsm, transition) do
with {entry, %{^transition => next_states}} when next_states == %{} <-
_take_next_state(fsm, transition)
do
{entry, Map.delete(fsm, transition)}
end
end
defp merge_maps_by_adding_values(map1, map2) do
Map.merge(map1, map2, fn(_, a, b) -> a + b end)
end
defp replace_states(fsm, replace_map, new_state) do
fsm
|> Enum.map(fn {{s1, s2}, next_states} ->
new_s1 = (Map.has_key?(replace_map, s1) && new_state) || s1
new_s2 = (Map.has_key?(replace_map, s2) && new_state) || s2
new_next_states =
Enum.reduce(next_states, %{}, fn({state, count}, acc) ->
if Map.has_key?(replace_map, state) do
merge_maps_by_adding_values(acc, %{new_state => count})
else
merge_maps_by_adding_values(acc, %{state => count})
end
end)
{{new_s1, new_s2}, new_next_states}
end)
|> Enum.reduce(%{}, fn({{s1, s2}, next_states}, acc) ->
Map.merge(acc, %{{s1, s2} => next_states}, fn(_, a, b) ->
merge_maps_by_adding_values(a, b)
end)
end)
end
defp ceiling(0), do: 0
defp ceiling(num) when trunc(num) / num < 1.0, do: trunc(num + 1)
defp ceiling(num) when trunc(num) / num == 1.0, do: trunc(num)
def merge_low_probability_states(state_machine, threshold)
when 0 <= threshold and threshold <= 1 do
transitions_per_state =
state_machine
|> Map.values
|> Enum.reduce(%{}, fn(next_states, acc) ->
acc
|> merge_maps_by_adding_values(next_states)
|> Map.delete(:end)
end)
total_transitions = Enum.sum Map.values(transitions_per_state)
min_transitions = ceiling(total_transitions * threshold)
low_probability_states =
transitions_per_state
|> Stream.filter(& elem(&1, 1) < min_transitions)
|> Stream.filter(& elem(&1, 0) != :end)
|> Enum.into(%{})
:ok = Logger.debug("Found #{total_transitions} transitions in state machine.")
:ok = Logger.debug("Found the following states with fewer than #{min_transitions} transitions: #{inspect low_probability_states}.")
replace_states(state_machine, low_probability_states, :hole)
end
defp _paths_to_patterns([], {_, regexps}) do
Enum.reverse(regexps)
end
defp _paths_to_patterns([[] | paths], {pattern, regexps}) do
_paths_to_patterns(paths, {"", [~r"^#{pattern}$" | regexps]})
end
defp _paths_to_patterns([[{:hole, _}|rest] | paths], {"", regexps}) do
_paths_to_patterns([rest | paths], {"\\S+", regexps})
end
defp _paths_to_patterns([[{:hole, _}|rest] | paths], {pattern, regexps}) do
_paths_to_patterns([rest | paths], {"#{pattern} \\S+", regexps})
end
defp _paths_to_patterns([[{token, _}|rest] | paths], {"", regexps}) do
_paths_to_patterns([rest | paths], {token, regexps})
end
defp _paths_to_patterns([[{token, _}|rest] | paths], {pattern, regexps}) do
_paths_to_patterns([rest | paths], {"#{pattern} #{token}", regexps})
end
def paths_to_patterns([[]]), do: []
def paths_to_patterns([[{token, _}|rest] | paths]) do
_paths_to_patterns([rest | paths], {token, []})
end
defp merge_state_machines(fsm1, fsm2) do
Map.merge(fsm1, fsm2, fn(_, next_states1, next_states2) ->
merge_maps_by_adding_values(next_states1, next_states2)
end)
end
defp _line_to_state_machine([], {_, :end} = transition, acc) do
entry = %{transition => %{}}
merge_state_machines(acc, entry)
end
defp _line_to_state_machine([], {_, state} = transition, acc) do
next_transition = {state, :end}
entry = %{transition => %{:end => 1}}
next_acc = merge_state_machines(acc, entry)
_line_to_state_machine([], next_transition, next_acc)
end
defp _line_to_state_machine([next_state|rest], {_, state} = transition, acc) do
next_transition = {state, next_state}
entry = %{transition => %{next_state => 1}}
next_acc = merge_state_machines(acc, entry)
_line_to_state_machine(rest, next_transition, next_acc)
end
defp line_to_state_machine(string) do
_line_to_state_machine(String.split(string), {nil, :begin}, %{})
end
def string_to_state_machine(string) when is_binary string do
string
|> String.split("\n")
|> Enum.map(&line_to_state_machine/1)
|> Enum.reduce(%{}, fn(line_fsm, acc) ->
merge_state_machines(acc, line_fsm)
end)
end
end
|
lib/elexir/fsm.ex
| 0.591841
| 0.403567
|
fsm.ex
|
starcoder
|
defmodule SpatialHash do
@moduledoc """
Documentation for SpatialHash.
"""
@type point :: list(number)
@type point_range :: list(%Range{})
@type grid_dim :: {number, number, number}
@type grid :: list(grid_dim)
@type geometry :: {number, number}
| %{type: String.t, coordinates: list}
| %Geo.Point{}
| %Geo.MultiPoint{}
| %Geo.LineString{}
| %Geo.MultiLineString{}
| %Geo.Polygon{}
| %Geo.MultiPolygon{}
@eps 0.000001
@doc """
Returns an array containing the hash elements for the given point for each dimension.
## Examples
iex> SpatialHash.hash([-0.2, -1.3], [{-180, 180, 0.05}, {-90, 90, 0.2}])
[3596, 443]
iex> SpatialHash.hash([0.2, -80.2], [{-180, 180, 0.05}, {-90, 90, 0.1}])
[3604, 98]
iex> SpatialHash.hash([0.2, -80.2])
[180200, 9800]
"""
@spec hash(point, grid) :: point
def hash(point), do: hash(point, world_grid())
def hash([], []), do: []
def hash([a | rest_a], [dim | rest_dim]) do
[do_hash(a, dim) | hash(rest_a, rest_dim)]
end
@spec do_hash(number, grid_dim) :: number
defp do_hash(a, {min, _, step}) do
hash = (a - min) / step
err = Float.ceil(hash) - hash
if err < @eps && err > 0 do
round(Float.floor(hash)) + 1
else
round(Float.floor(hash))
end
end
@doc """
Returns array of hash ranges for a given axis-aligned envelope
## Examples
iex> SpatialHash.hash_range(%Envelope{
...> min_x: -90.082756,
...> min_y: 29.949766,
...> max_x: -90.079484,
...> max_y: 29.952280
...> }, [{-180, 180, 0.01}, {-90, 90, 0.01}])
[8991..8992, 11994..11995]
iex> SpatialHash.hash_range(
...> %{type: "LineString", coordinates: [
...> { -90.082746, 29.950955},
...> {-90.081453, 29.952280},
...> {-90.079489, 29.949770}
...> ]})
[89917..89920, 119949..119952]
iex> SpatialHash.hash_range(
...> %{type: "Point",
...> coordinates: { -90.082746, 29.950955}})
[89917..89917, 119950..119950]
"""
@spec hash_range(%Envelope{} | geometry, grid) :: point_range
def hash_range(shape), do: hash_range(shape, world_grid())
def hash_range(%Envelope{} = env, [dim_x, dim_y]) do
min_x_hash = do_hash(env.min_x, dim_x)
max_x_hash = do_hash(env.max_x, dim_x)
min_y_hash = do_hash(env.min_y, dim_y)
max_y_hash = do_hash(env.max_y, dim_y)
[min_x_hash .. max_x_hash, min_y_hash .. max_y_hash]
end
def hash_range(%{coordinates: coords}, dims), do: hash_range(Envelope.from_geo(coords), dims)
@doc """
Convenience function for creating a grid for use with longitude/latitude grids.
You can specify a grid spacing, or it will default to `0.001`.
## Examples
iex> SpatialHash.world_grid()
[{-180, 180, 0.001}, {-90, 90, 0.001}]
iex> SpatialHash.world_grid(0.03)
[{-180, 180, 0.03}, {-90, 90, 0.03}]
"""
@spec world_grid(number) :: grid
def world_grid(step \\ 0.001) do
[{-180, 180, step}, {-90, 90, step}]
end
end
|
lib/spatial_hash.ex
| 0.914319
| 0.464234
|
spatial_hash.ex
|
starcoder
|
defmodule Adventofcode.Day12NBodyProblem do
use Adventofcode
@moduledoc """
Each moon has a 3-dimensional position (x, y, and z)
and a 3-dimensional velocity.
The position of each moon is given in your scan;
the x, y, and z velocity of each moon starts at 0.
Simulate the motion of the moons in time steps.
Within each time step, first update the velocity of every moon by applying gravity.
Then, once all moons' velocities have been updated,
update the position of every moon by applying velocity.
Time progresses by one step once all of the positions are updated.
"""
alias __MODULE__.{Axis, Compare, Energy, Gravity, Moon, Parser, Point, Printer, Simulation}
def part_1(input) do
input
|> Parser.parse()
|> Simulation.run(1000)
|> Simulation.total_energy()
end
def part_2(input) do
input
|> Parser.parse()
|> Simulation.run_until_repeat()
|> Simulation.get_repeats_every()
end
defmodule Simulation do
defstruct step: 0, axes: [], repeats_every: 0
def step(%Simulation{step: step}), do: step
def get_repeats_every(%Simulation{repeats_every: step}), do: step
def run(%Simulation{step: step} = simulation, step), do: simulation
def run(%Simulation{step: step} = simulation, until_step) when step < until_step do
simulation
|> update_axes
|> increment_step
|> check_repeats_every()
|> run(until_step)
end
def run_until_repeat(%Simulation{repeats_every: r} = simulation) when r > 0 do
simulation
end
def run_until_repeat(%Simulation{step: step} = simulation) do
simulation
|> run(step + 1)
|> update_repeats_every()
|> run_until_repeat()
end
def total_energy(%Simulation{} = simulation) do
simulation
|> Moon.list_from_simulation()
|> Energy.total()
end
defp update_axes(simulation) do
%{simulation | axes: simulation.axes |> Enum.map(&Axis.update/1)}
end
defp check_repeats_every(%{step: step} = simulation) do
axes = simulation.axes |> Enum.map(&Axis.check_repeats_every(&1, step))
%{simulation | axes: axes}
end
defp increment_step(simulation) do
%{simulation | step: simulation.step + 1}
end
defp update_repeats_every(simulation) do
%{simulation | repeats_every: sum_repeats_every(simulation)}
end
defp sum_repeats_every(simulation) do
simulation.axes
|> Enum.map(& &1.repeats_every)
|> Enum.reduce(&lcm/2)
end
# Least Common Multiple
defp lcm(_, 0), do: 0
defp lcm(a, b), do: div(a * b, Integer.gcd(a, b))
end
defmodule Axis do
@enforce_keys [:current, :initial]
defstruct current: [], initial: [], repeats_every: 0
def update(axis) do
axis
|> update_current()
end
defp update_current(axis) do
%{axis | current: next(axis)}
end
defp next(%Axis{current: current}) do
current
|> Enum.map(&Point.update_velocity(&1, current -- [&1]))
|> Enum.map(&Point.update_position/1)
end
def check_repeats_every(%{repeats_every: 0} = axis, step) do
%{axis | repeats_every: check_repeats(axis.current, axis.initial, step)}
end
def check_repeats_every(axis, _step), do: axis
defp check_repeats(initial, initial, step), do: step
defp check_repeats(_, _, _), do: 0
end
defmodule Point do
@moduledoc """
Once all gravity has been applied, apply velocity: simply add the velocity of
each moon to its own position. For example, if Europa has a position of x=1, y=2, z=3
and a velocity of x=-2, y=0,z=3, then its new position would be x=-1, y=2, z=6.
This process does not modify the velocity of any moon.
"""
@enforce_keys [:position]
defstruct position: 0, velocity: 0
def new(position, velocity) do
%__MODULE__{position: position, velocity: velocity}
end
def update_velocity(point, other_points) do
%{point | velocity: point.velocity + Gravity.apply(point, other_points)}
end
def update_position(point) do
%{point | position: point.position + point.velocity}
end
end
defmodule Gravity do
@moduledoc """
To apply gravity, consider every pair of moons. On each axis (x, y, and z),
the velocity of each moon changes by exactly +1 or -1 to pull the moons
together.
For example, if Ganymede has an x position of 3,
and Callisto has a x position of 5, then Ganymede's x velocity changes by +1
(because 5 > 3) and Callisto's x velocity changes by -1 (because 3 < 5).
However, if the positions on a given axis are the same, the velocity on that
axis does not change for that pair of moons.
"""
def apply(point, other_points) do
other_points
|> Enum.map(& &1.position)
|> Enum.map(&Compare.compare(&1, point.position))
|> Enum.sum()
end
end
defmodule Compare do
def compare(a, b) when is_number(a) and is_number(b) do
do_compare(a, b)
end
defp do_compare(a, b) when a < b, do: -1
defp do_compare(a, b) when a == b, do: 0
defp do_compare(a, b) when a > b, do: 1
end
defmodule Energy do
@moduledoc """
It might help to calculate the total energy in the system. The total
energy for a single moon is its potential energy multiplied by its kinetic
energy. A moon's potential energy is the sum of the absolute values of its
x, y, and z position coordinates. A moon's kinetic energy is the sum of the
absolute values of its velocity coordinates.
"""
def total(moons) do
moons
|> Enum.map(&energy_moon/1)
|> Enum.sum()
end
defp energy_moon(moon) do
energy(moon.position) * energy(moon.velocity)
end
defp energy(values) do
values
|> Tuple.to_list()
|> Enum.map(&abs/1)
|> Enum.sum()
end
end
defmodule Moon do
defstruct position: {0, 0, 0}, velocity: {0, 0, 0}
def list_from_simulation(%Simulation{} = simulation) do
simulation.axes
|> Enum.map(& &1.current)
|> List.zip()
|> Enum.map(&Tuple.to_list/1)
|> Enum.map(&moon_data_from_points/1)
|> Enum.map(fn [pos, vel] -> %Moon{position: pos, velocity: vel} end)
end
defp moon_data_from_points(points) do
points
|> Enum.map(&[&1.position, &1.velocity])
|> List.zip()
end
end
defmodule Printer do
def print(%Simulation{} = simulation) do
IO.puts("After #{simulation.step} steps:\n#{print_moons(simulation)}")
end
defp print_moons(simulation) do
simulation
|> Moon.list_from_simulation()
|> Enum.map_join("\n", &print_moon/1)
end
defp print_moon(%Moon{position: {x, y, z}, velocity: {xv, yv, zv}}) do
:io_lib.format("pos=<x=~3b, y=~3b, z=~3b>, vel=<x=~3b, y=~3b, z=~3b>", [x, y, z, xv, yv, zv])
end
end
defmodule Parser do
@moduledoc """
# Parses input into three Axises, x, y, z that each contain the respectice
# points.
"""
def parse(input) do
%Simulation{axes: parse_axes(input)}
end
defp parse_axes(input) do
~r/-?\d+/
|> Regex.scan(input)
|> List.flatten()
|> Enum.map(&String.to_integer/1)
|> Enum.map(&%Point{position: &1})
|> Enum.chunk_every(3)
|> List.zip()
|> Enum.map(&Tuple.to_list/1)
|> Enum.map(&%Axis{current: &1, initial: &1})
end
end
end
|
lib/day_12_n_body_problem.ex
| 0.888096
| 0.775605
|
day_12_n_body_problem.ex
|
starcoder
|
defmodule SmartCity.Data do
@moduledoc """
Message struct shared amongst all SmartCity microservices.
```javascript
const DataMessage = {
"dataset_id": "", // UUID
"ingestion_id":"", // UUID
"extraction_start_time": "", // iso8601
"payload": {},
"_metadata": { // cannot be used safely
"orgName": "", // ~r/^[a-zA-Z_]+$/
"dataName": "", // ~r/^[a-zA-Z_]+$/
"stream": true
},
"operational": {
"timing": [{
"startTime": "", // iso8601
"endTime": "", // iso8601
"app": "", // microservice generating timing data
"label": "" // label for this particular timing data
}]
}
}
```
"""
alias SmartCity.Data
alias SmartCity.Data.Timing
alias SmartCity.Helpers
@type t :: %SmartCity.Data{
:dataset_id => String.t(),
:ingestion_id => String.t(),
:extraction_start_time => DateTime.t(),
:operational => %{
:timing => list(SmartCity.Data.Timing.t())
},
:payload => String.t(),
:_metadata => %{
:org => String.t(),
:name => String.t(),
:stream => boolean()
},
:version => String.t()
}
@type payload :: String.t()
@derive Jason.Encoder
@enforce_keys [:dataset_id, :ingestion_id, :extraction_start_time, :payload, :_metadata, :operational]
defstruct version: "0.1",
_metadata: %{org: nil, name: nil, stream: false},
dataset_id: nil,
ingestion_id: nil,
extraction_start_time: nil,
payload: nil,
operational: %{timing: []}
@doc """
Returns a new `SmartCity.Data` struct. `SmartCity.Data.Timing`
structs will be created along the way.
Can be created from:
- map with string keys
- map with atom keys
- JSON
## Examples
iex> SmartCity.Data.new(%{dataset_id: "a_guid", ingestion_id: "b_guid", extraction_start_time: "2019-05-06T19:51:41+00:00", payload: "the_data", _metadata: %{org: "scos", name: "example"}, operational: %{timing: [%{app: "app name", label: "function name", start_time: "2019-05-06T19:51:41+00:00", end_time: "2019-05-06T19:51:51+00:00"}]}})
{:ok, %SmartCity.Data{
dataset_id: "a_guid",
ingestion_id: "b_guid",
extraction_start_time: "2019-05-06T19:51:41+00:00",
payload: "the_data",
_metadata: %{org: "scos", name: "example"},
operational: %{
timing: [%SmartCity.Data.Timing{ app: "app name", end_time: "2019-05-06T19:51:51+00:00", label: "function name", start_time: "2019-05-06T19:51:41+00:00"}]
}
}}
"""
@spec new(map() | String.t()) :: {:ok, SmartCity.Data.t()} | {:error, String.t()}
def new(msg) when is_binary(msg) do
with {:ok, decoded} <- Jason.decode(msg) do
new(decoded)
end
end
def new(%{"dataset_id" => _} = msg) do
%{
dataset_id: msg["dataset_id"],
ingestion_id: msg["ingestion_id"],
extraction_start_time: msg["extraction_start_time"],
operational: Helpers.to_atom_keys(msg["operational"]),
payload: msg["payload"],
_metadata: Helpers.to_atom_keys(msg["_metadata"])
}
|> new()
end
def new(%{
dataset_id: dataset_id,
ingestion_id: ingestion_id,
extraction_start_time: extraction_start_time,
operational: operational,
payload: payload,
_metadata: metadata
}) do
timings = Map.get(operational, :timing, [])
struct =
struct(__MODULE__, %{
dataset_id: dataset_id,
ingestion_id: ingestion_id,
extraction_start_time: extraction_start_time,
payload: payload,
_metadata: metadata,
operational: %{operational | timing: Enum.map(timings, &Timing.new/1)}
})
{:ok, struct}
rescue
e -> {:error, e}
end
def new(msg) do
{:error, "Invalid data message: #{inspect(msg)}"}
end
@doc """
Defines the string that will be the payload of the last message in a dataset.
"""
defmacro end_of_data(), do: quote(do: "END_OF_DATA")
@doc """
Encodes `SmartCity.Data` into JSON. Typically used right before sending as a Kafka message.
"""
@spec encode(SmartCity.Data.t()) ::
{:ok, String.t()} | {:error, Jason.EncodeError.t() | Exception.t()}
def encode(%__MODULE__{} = message) do
Jason.encode(message)
end
@doc """
Encodes `SmartCity.Data` into JSON. Typically used right before sending as a Kafka message.
Raises an error if it fails to convert to a JSON string.
"""
@spec encode!(SmartCity.Data.t()) :: String.t()
def encode!(%__MODULE__{} = message) do
Jason.encode!(message)
end
@doc """
Adds a `SmartCity.Data.Timing` to the list of timings in this `SmartCity.Data`. The timing will be validated to ensure both start and end times have been set.
Returns a `SmartCity.Data` struct with `new_timing` prepended to existing timings list.
## Parameters
- message: A `SmartCity.Data`
- new_timing: A timing you want to add. Must have `start_time` and `end_time` set
"""
@spec add_timing(
SmartCity.Data.t(),
SmartCity.Data.Timing.t()
) :: SmartCity.Data.t()
def add_timing(
%__MODULE__{operational: %{timing: timing}} = message,
%Data.Timing{} = new_timing
) do
case Timing.validate(new_timing) do
{:ok, new_timing} -> put_in_operational(message, :timing, [new_timing | timing])
{:error, errors} -> raise ArgumentError, "Invalid Timing: #{errors}"
end
end
@doc """
Creates a new `SmartCity.Data` struct using `new/1` and adds timing information to the message.
Returns a `SmartCity.Data` struct with `new_timing` prepended to existing timings list.
## Parameters
- message: A `SmartCity.Data`
- app: The application that is asking to create the new `SmartCity.Data`. Ex. `reaper` or `voltron`
"""
@spec timed_new(map(), String.t()) :: {:ok, SmartCity.Data.t()} | {:error, String.t()}
def timed_new(msg, app) do
label = inspect(&Data.new/1)
case Timing.measure(app, label, fn -> new(msg) end) do
{:ok, msg, timing} -> {:ok, msg |> add_timing(timing)}
error -> error
end
end
@doc """
Transforms the `SmartCity.Data` `payload` field with the given unary function and replaces it in the message.
Additionally, returns a `SmartCity.Data` struct with `new_timing` prepended to existing timings list.
## Parameters
- message: A `SmartCity.Data`
- app: The application that is asking to create the new `SmartCity.Data`. Ex. `reaper` or `voltron`
- function: an arity 1 (/1) function that will transform the payload in the provided message
"""
@spec timed_transform(
SmartCity.Data.t(),
String.t(),
(payload() -> {:ok, term()} | {:error, term()})
) :: {:ok, SmartCity.Data.t()} | {:error, String.t()}
def timed_transform(%Data{} = msg, app, function) when is_function(function, 1) do
label = inspect(function)
case Timing.measure(app, label, fn -> function.(msg.payload) end) do
{:ok, result, timing} -> {:ok, msg |> add_timing(timing) |> Map.replace!(:payload, result)}
error -> error
end
end
@doc """
Get all timings on this Data
Returns a list of `SmartCity.Data.Timing` structs or `[]`
## Parameters
- data_message: The message to extract timings from
"""
@spec get_all_timings(SmartCity.Data.t()) :: list(SmartCity.Data.Timing.t())
def get_all_timings(%__MODULE__{operational: %{timing: timing}}), do: timing
# Private functions
defp put_in_operational(%__MODULE__{operational: operational} = message, key, value) do
%{message | operational: Map.put(operational, key, value)}
end
end
|
lib/smart_city/data.ex
| 0.872809
| 0.643105
|
data.ex
|
starcoder
|
defmodule CanvasAPI.CanvasWatchService do
@moduledoc """
A service for viewing and manipulating canvas watches.
"""
use CanvasAPI.Web, :service
alias CanvasAPI.{Account, Canvas, CanvasService, User, UserService,
CanvasWatch}
@preload [:user, canvas: [:team]]
@doc """
Insert a new canvas watch.
"""
@spec insert(attrs, Keyword.t) :: {:ok, CanvasWatch.t}
| {:error, Changeset.t}
def insert(attrs, opts) do
%CanvasWatch{}
|> CanvasWatch.changeset(attrs)
|> put_canvas(attrs["canvas_id"], opts[:account])
|> put_user(opts[:account])
|> Repo.insert
end
@spec put_canvas(Changeset.t, String.t | nil, Account.t) :: Changeset.t
defp put_canvas(changeset, id, account) when is_binary(id) do
id
|> CanvasService.get(account: account)
|> case do
{:ok, canvas} ->
put_assoc(changeset, :canvas, canvas)
{:error, _} ->
add_error(changeset, :canvas, "was not found")
end
end
defp put_canvas(changeset, _, _),
do: add_error(changeset, :canvas, "is required")
@spec put_user(Changeset.t, Account.t) :: Changeset.t
defp put_user(changeset, account) do
with canvas = %Canvas{} <- get_field(changeset, :canvas) do
{:ok, user} = UserService.find_by_team(account, team_id: canvas.team_id)
put_assoc(changeset, :user, user)
else
_ -> changeset
end
end
@doc """
Get a canvas watch by ID.
"""
@spec get(String.t, Keyword.t) :: {:ok, CanvasWatch.t}
| {:error, :watch_not_found}
def get(id, opts) do
opts[:account]
|> watch_query
|> maybe_lock
|> where(canvas_id: ^id)
|> Repo.one
|> case do
watch = %CanvasWatch{} ->
{:ok, watch}
nil ->
{:error, :watch_not_found}
end
end
@doc """
List canvas watches.
"""
@spec list(Keyword.t) :: [CanvasWatch.t]
def list(opts \\ []) do
opts[:account]
|> watch_query
|> filter(canvas: opts[:canvas])
|> filter(opts[:filter])
|> Repo.all
end
@spec filter(Ecto.Query.t, Keyword.t | map | nil) :: Ecto.Query.t
defp filter(query, filter) when is_map(filter) or is_list(filter) do
filter
|> Enum.reduce(query, &do_filter/2)
end
defp filter(query, _), do: query
@spec do_filter({String.t | atom, any}, Ecto.Query.t) :: Ecto.Query.t
defp do_filter({:canvas, canvas = %Canvas{}}, query),
do: where(query, canvas_id: ^canvas.id)
defp do_filter({"canvas.id", canvas_id}, query),
do: where(query, canvas_id: ^canvas_id)
defp do_filter(_, query),
do: query
@doc """
Delete a canvas watch.
"""
@spec delete(String.t, Keyword.t) :: {:ok, CanvasWatch.t}
| {:error, :watch_not_found}
def delete(id, opts) do
Repo.transaction(fn ->
with {:ok, watch} <- get(id, opts) do
Repo.delete(watch)
end
|> case do
{:ok, watch} -> watch
{:error, error} -> Repo.rollback(error)
end
end)
end
@spec watch_query(Account.t | nil) :: Ecto.Query.t
defp watch_query(nil), do: CanvasWatch |> preload(^@preload)
defp watch_query(account) do
CanvasWatch
|> join(:left, [w], u in User, w.user_id == u.id)
|> where([_, u], u.account_id == ^account.id)
|> preload(^@preload)
end
end
|
lib/canvas_api/services/canvas_watch_service.ex
| 0.799442
| 0.46873
|
canvas_watch_service.ex
|
starcoder
|
defmodule VintageNet.WiFi.WPASupplicantLL do
use GenServer
require Logger
@moduledoc """
This modules provides a low-level interface for interacting with the `wpa_supplicant`
Example use:
```elixir
iex> {:ok, ws} = VintageNet.WiFi.WPASupplicantLL.start_link("/tmp/vintage_net/wpa_supplicant/wlan0")
{:ok, #PID<0.1795.0>}
iex> VintageNet.WiFi.WPASupplicantLL.subscribe(ws)
:ok
iex> VintageNet.WiFi.WPASupplicantLL.control_request(ws, "ATTACH")
{:ok, "OK\n"}
iex> VintageNet.WiFi.WPASupplicantLL.control_request(ws, "SCAN")
{:ok, "OK\n"}
iex> flush
{VintageNet.WiFi.WPASupplicant, 51, "CTRL-EVENT-SCAN-STARTED "}
{VintageNet.WiFi.WPASupplicant, 51, "CTRL-EVENT-BSS-ADDED 0 78:8a:20:87:7a:50"}
{VintageNet.WiFi.WPASupplicant, 51, "CTRL-EVENT-SCAN-RESULTS "}
{VintageNet.WiFi.WPASupplicant, 51, "CTRL-EVENT-NETWORK-NOT-FOUND "}
:ok
iex> VintageNet.WiFi.WPASupplicantLL.control_request(ws, "BSS 0")
{:ok,
"id=0\nbssid=78:8a:20:82:7a:50\nfreq=2437\nbeacon_int=100\ncapabilities=0x0431\nqual=0\nnoise=-89\nlevel=-71\ntsf=0000333220048880\nage=14\nie=0008426f7062654c414e010882848b968c1298240301062a01003204b048606c0b0504000a00002d1aac011bffffff00000000000000000001000000000000000000003d1606080c000000000000000000000000000000000000007f080000000000000040dd180050f2020101000003a4000027a4000042435e0062322f00dd0900037f01010000ff7fdd1300156d00010100010237e58106788a20867a5030140100000fac040100000fac040100000fac020000\nflags=[WPA2-PSK-CCMP][ESS]\nssid=BopbeLAN\nsnr=18\nest_throughput=48000\nupdate_idx=1\nbeacon_ie=0008426f7062654c414e010882848b968c1298240301060504010300002a01003204b048606c0b0504000a00002d1aac011bffffff00000000000000000001000000000000000000003d1606080c000000000000000000000000000000000000007f080000000000000040dd180050f2020101000003a4000027a4000042435e0062322f00dd0900037f01010000ff7fdd1300156d00010100010237e58106788a20867a5030140100000fac040100000fac040100000fac020000\n"}
```
"""
defmodule State do
@moduledoc false
defstruct control_file: nil,
socket: nil,
requests: [],
notification_pid: nil
end
@doc """
Start the WPASupplicant low-level interface
Pass the path to the wpa_supplicant control file
"""
@spec start_link(Path.t()) :: GenServer.on_start()
def start_link(path) do
GenServer.start_link(__MODULE__, path)
end
@spec control_request(GenServer.server(), binary()) :: {:ok, binary()} | {:error, any()}
def control_request(server, request) do
GenServer.call(server, {:control_request, request})
end
@doc """
Subscribe to wpa_supplicant notifications
"""
@spec subscribe(GenServer.server(), pid()) :: :ok
def subscribe(server, pid \\ self()) do
GenServer.call(server, {:subscribe, pid})
end
@impl true
def init(path) do
# Blindly create the control interface's directory in case we beat
# wpa_supplicant.
_ = File.mkdir_p(Path.dirname(path))
# The path to our end of the socket so that wpa_supplicant can send us
# notifications and responses
our_path = path <> ".ex"
# Blindly remove an old file just in case it exists from a previous run
_ = File.rm(our_path)
{:ok, socket} =
:gen_udp.open(0, [:local, :binary, {:active, true}, {:ip, {:local, our_path}}])
state = %State{control_file: path, socket: socket}
{:ok, state}
end
@impl true
def handle_call({:control_request, message}, from, state) do
case :gen_udp.send(state.socket, {:local, state.control_file}, 0, message) do
:ok ->
new_requests = state.requests ++ [from]
{:noreply, %{state | requests: new_requests}}
error ->
{:reply, error, state}
end
end
@impl true
def handle_call({:subscribe, pid}, _from, state) do
{:reply, :ok, %{state | notification_pid: pid}}
end
@impl true
def handle_info(
{:udp, socket, _, 0, <<?<, priority, ?>, notification::binary()>>},
%{socket: socket, notification_pid: pid} = state
) do
if pid do
send(pid, {__MODULE__, priority - ?0, notification})
else
_ = Logger.info("wpa_supplicant_ll dropping notification: #{notification}")
end
{:noreply, state}
end
@impl true
def handle_info({:udp, socket, _, 0, response}, %{socket: socket} = state) do
case List.pop_at(state.requests, 0) do
{nil, _requests} ->
_ = Logger.warn("wpa_supplicant sent an unexpected message: '#{response}'")
{:noreply, state}
{from, new_requests} ->
GenServer.reply(from, {:ok, response})
{:noreply, %{state | requests: new_requests}}
end
end
end
|
lib/vintage_net/wifi/wpa_supplicant_ll.ex
| 0.76454
| 0.501709
|
wpa_supplicant_ll.ex
|
starcoder
|
defmodule Blockchain.Transaction do
@moduledoc """
This module encodes the transaction object, defined in Section 4.3
of the Yellow Paper (http://gavwood.com/Paper.pdf). We are focused
on implementing 𝛶, as defined in Eq.(1).
"""
alias Blockchain.Account
alias Block.Header
defstruct nonce: 0,
# Tn
# Tp
gas_price: 0,
# Tg
gas_limit: 0,
# Tt
to: <<>>,
# Tv
value: 0,
# Tw
v: nil,
# Tr
r: nil,
# Ts
s: nil,
# Ti
init: <<>>,
# Td
data: <<>>
@type t :: %__MODULE__{
nonce: EVM.val(),
gas_price: EVM.val(),
gas_limit: EVM.val(),
to: EVM.address() | <<_::0>>,
value: EVM.val(),
v: Blockchain.Transaction.Signature.hash_v(),
r: Blockchain.Transaction.Signature.hash_r(),
s: Blockchain.Transaction.Signature.hash_s(),
init: EVM.MachineCode.t(),
data: binary()
}
@doc """
Encodes a transaction such that it can be RLP-encoded.
This is defined at L_T Eq.(14) in the Yellow Paper.
## Examples
iex> Blockchain.Transaction.serialize(%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<1::160>>, value: 8, v: 27, r: 9, s: 10, data: "hi"})
[<<5>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]
iex> Blockchain.Transaction.serialize(%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<>>, value: 8, v: 27, r: 9, s: 10, init: <<1, 2, 3>>})
[<<5>>, <<6>>, <<7>>, <<>>, <<8>>, <<1, 2, 3>>, <<27>>, <<9>>, <<10>>]
iex> Blockchain.Transaction.serialize(%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<>>, value: 8, v: 27, r: 9, s: 10, init: <<1, 2, 3>>}, false)
[<<5>>, <<6>>, <<7>>, <<>>, <<8>>, <<1, 2, 3>>]
iex> Blockchain.Transaction.serialize(%Blockchain.Transaction{ data: "", gas_limit: 21000, gas_price: 20000000000, init: "", nonce: 9, r: 0, s: 0, to: "55555555555555555555", v: 1, value: 1000000000000000000 })
["\t", <<4, 168, 23, 200, 0>>, "R\b", "55555555555555555555", <<13, 224, 182, 179, 167, 100, 0, 0>>, "", <<1>>, "", ""]
"""
@spec serialize(t) :: ExRLP.t()
def serialize(trx, include_vrs \\ true) do
base = [
trx.nonce |> BitHelper.encode_unsigned(),
trx.gas_price |> BitHelper.encode_unsigned(),
trx.gas_limit |> BitHelper.encode_unsigned(),
trx.to,
trx.value |> BitHelper.encode_unsigned(),
if(trx.to == <<>>, do: trx.init, else: trx.data)
]
if include_vrs do
base ++
[
trx.v |> BitHelper.encode_unsigned(),
trx.r |> BitHelper.encode_unsigned(),
trx.s |> BitHelper.encode_unsigned()
]
else
base
end
end
@doc """
Decodes a transaction that was previously encoded
using `Transaction.serialize/1`. Note, this is the
inverse of L_T Eq.(14) defined in the Yellow Paper.
## Examples
iex> Blockchain.Transaction.deserialize([<<5>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>])
%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<1::160>>, value: 8, v: 27, r: 9, s: 10, data: "hi"}
iex> Blockchain.Transaction.deserialize([<<5>>, <<6>>, <<7>>, <<>>, <<8>>, <<1, 2, 3>>, <<27>>, <<9>>, <<10>>])
%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<>>, value: 8, v: 27, r: 9, s: 10, init: <<1, 2, 3>>}
iex> Blockchain.Transaction.deserialize(["\t", <<4, 168, 23, 200, 0>>, "R\b", "55555555555555555555", <<13, 224, 182, 179, 167, 100, 0, 0>>, "", <<1>>, "", ""])
%Blockchain.Transaction{
data: "",
gas_limit: 21000,
gas_price: 20000000000,
init: "",
nonce: 9,
r: 0,
s: 0,
to: "55555555555555555555",
v: 1,
value: 1000000000000000000
}
"""
@spec deserialize(ExRLP.t()) :: t
def deserialize(rlp) do
[
nonce,
gas_price,
gas_limit,
to,
value,
init_or_data,
v,
r,
s
] = rlp
{init, data} = if to == <<>>, do: {init_or_data, <<>>}, else: {<<>>, init_or_data}
%__MODULE__{
nonce: :binary.decode_unsigned(nonce),
gas_price: :binary.decode_unsigned(gas_price),
gas_limit: :binary.decode_unsigned(gas_limit),
to: to,
value: :binary.decode_unsigned(value),
init: init,
data: data,
v: :binary.decode_unsigned(v),
r: :binary.decode_unsigned(r),
s: :binary.decode_unsigned(s)
}
end
@doc """
Validates the validity of a transaction that is required to be
true before we're willing to execute a transaction. This is
specified in Section 6.2 of the Yellow Paper Eq.(65) and Eq.(66).
TODO: Consider returning a set of reasons, instead of a singular reason.
## Examples
# Sender address is nil
iex> trx = %Blockchain.Transaction{data: <<>>, gas_limit: 1_000, gas_price: 1, init: <<1>>, nonce: 5, to: <<>>, value: 5, r: 1, s: 2, v: 3}
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{})
{:invalid, :invalid_sender}
# Sender account is nil
iex> private_key = <<1::256>>
iex> trx =
...> %Blockchain.Transaction{data: <<>>, gas_limit: 1_000, gas_price: 1, init: <<1>>, nonce: 5, to: <<>>, value: 5}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{})
{:invalid, :missing_account}
# Has sender account, but nonce mismatch
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> trx =
...> %Blockchain.Transaction{data: <<>>, gas_limit: 1_000, gas_price: 1, init: <<1>>, nonce: 4, to: <<>>, value: 5}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 1000, nonce: 5})
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{})
{:invalid, :nonce_mismatch}
# Insufficient starting gas
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> trx =
...> %Blockchain.Transaction{data: <<>>, gas_limit: 1_000, gas_price: 1, init: <<1>>, nonce: 5, to: <<>>, value: 5}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 1000, nonce: 5})
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{})
{:invalid, :insufficient_intrinsic_gas}
# Insufficient endowment
iex> private_key = <<fdf8:f53e:61e4::18>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> trx =
...> %Blockchain.Transaction{data: <<>>, gas_limit: 100_000, gas_price: 1, init: <<1>>, nonce: 5, to: <<>>, value: 5}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 1000, nonce: 5})
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{})
{:invalid, :insufficient_balance}
iex> private_key = <<fdf8:f53e:61e4::18>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> trx =
...> %Blockchain.Transaction{data: <<>>, gas_limit: 100_000, gas_price: 1, init: <<1>>, nonce: 5, to: <<>>, value: 5}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 100_001, nonce: 5})
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{})
{:invalid, :insufficient_balance}
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> trx =
...> %Blockchain.Transaction{data: <<>>, gas_limit: 100_000, gas_price: 1, init: <<1>>, nonce: 5, to: <<>>, value: 5}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 100_006, nonce: 5})
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{gas_limit: 50_000, gas_used: 49_999})
{:invalid, :over_gas_limit}
iex> private_key = <<fdf8:f53e:61e4::18>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> trx =
...> %Blockchain.Transaction{data: <<>>, gas_limit: 100_000, gas_price: 1, init: <<1>>, nonce: 5, to: <<>>, value: 5}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 100_006, nonce: 5})
...> |> Blockchain.Transaction.is_valid?(trx, %Block.Header{gas_limit: 500_000, gas_used: 49_999})
:valid
"""
@spec is_valid?(EVM.state(), t, Header.t()) :: :valid | {:invalid, atom()}
def is_valid?(state, trx, block_header) do
g_0 = intrinsic_gas_cost(trx, block_header)
v_0 = trx.gas_limit * trx.gas_price + trx.value
case Blockchain.Transaction.Signature.sender(trx) do
{:error, _reason} ->
{:invalid, :invalid_sender}
{:ok, sender_address} ->
sender_account = Account.get_account(state, sender_address)
if sender_account do
cond do
sender_account.nonce != trx.nonce -> {:invalid, :nonce_mismatch}
g_0 > trx.gas_limit -> {:invalid, :insufficient_intrinsic_gas}
v_0 > sender_account.balance -> {:invalid, :insufficient_balance}
trx.gas_limit > Header.available_gas(block_header) -> {:invalid, :over_gas_limit}
true -> :valid
end
else
{:invalid, :missing_account}
end
end
end
@doc """
Performs transaction execution, as defined in Section 6
of the Yellow Paper, defined there as 𝛶, Eq.(1) and Eq.(59),
Eq.(70), Eq.(79) and Eq.(80).
From the Yellow Paper, T_o is the original transactor, which can differ from the
sender in the case of a message call or contract creation
not directly triggered by a transaction but coming from
the execution of EVM-code.
# TODO: Add rich examples in `transaction_test.exs`
## Examples
# Create contract
iex> beneficiary = <<0x05::160>>
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> contract_address = Blockchain.Contract.new_contract_address(sender, 6)
iex> machine_code = EVM.MachineCode.compile([:push1, 3, :push1, 5, :add, :push1, 0x00, :mstore, :push1, 32, :push1, 0, :return])
iex> trx = %Blockchain.Transaction{nonce: 5, gas_price: 3, gas_limit: 100_000, to: <<>>, value: 5, init: machine_code}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> {state, gas, logs} = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 400_000, nonce: 5})
...> |> Blockchain.Transaction.execute_transaction(trx, %Block.Header{beneficiary: beneficiary})
iex> {gas, logs}
{53780, <<>>}
iex> Blockchain.Account.get_accounts(state, [sender, beneficiary, contract_address])
[%Blockchain.Account{balance: 238655, nonce: 6}, %Blockchain.Account{balance: 161340}, %Blockchain.Account{balance: 5, code_hash: <<243, 247, 169, 254, 54, 79, 170, 185, 59, 33, 109, 165, 10, 50, 20, 21, 79, 34, 160, 162, 180, 21, 178, 58, 132, 200, 22, 158, 139, 99, 110, 227>>}]
# Message call
iex> beneficiary = <<0x05::160>>
iex> private_key = <<1::256>>
iex> sender = <<126, 95, 69, 82, 9, 26, 105, 18, 93, 93, 252, 183, 184, 194, 101, 144, 41, 57, 91, 223>> # based on simple private key
iex> contract_address = Blockchain.Contract.new_contract_address(sender, 6)
iex> machine_code = EVM.MachineCode.compile([:push1, 3, :push1, 5, :add, :push1, 0x00, :mstore, :push1, 0, :push1, 32, :return])
iex> trx = %Blockchain.Transaction{nonce: 5, gas_price: 3, gas_limit: 100_000, to: contract_address, value: 5, init: machine_code}
...> |> Blockchain.Transaction.Signature.sign_transaction(private_key)
iex> {state, gas, logs} = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(sender, %Blockchain.Account{balance: 400_000, nonce: 5})
...> |> Blockchain.Account.put_code(contract_address, machine_code)
...> |> Blockchain.Transaction.execute_transaction(trx, %Block.Header{beneficiary: beneficiary})
iex> {gas, logs}
{21780, <<>>}
iex> Blockchain.Account.get_accounts(state, [sender, beneficiary, contract_address])
[%Blockchain.Account{balance: 334655, nonce: 6}, %Blockchain.Account{balance: 65340}, %Blockchain.Account{balance: 5, code_hash: <<216, 114, 80, 103, 17, 50, 164, 75, 162, 123, 123, 99, 162, 105, 226, 15, 215, 200, 136, 216, 29, 106, 193, 119, 1, 173, 138, 37, 219, 39, 23, 231>>}]
"""
@spec execute_transaction(EVM.state(), t, Header.t()) ::
{EVM.state(), EVM.Gas.t(), EVM.SubState.logs()}
def execute_transaction(state, trx, block_header) do
# TODO: Check transaction validity.
{:ok, sender} = Blockchain.Transaction.Signature.sender(trx)
state_0 = begin_transaction(state, sender, trx)
# sender and originator are the same for transaction execution
originator = sender
# stack depth starts at zero for transaction execution
stack_depth = 0
# apparent value is the full value for transaction execution
apparent_value = trx.value
# gas is equal to what was just subtracted from sender account less intrinsic gas cost
gas = trx.gas_limit - intrinsic_gas_cost(trx, block_header)
# TODO: Sender versus originator?
{state_p, remaining_gas, sub_state} =
case trx.to do
# Λ
<<>> ->
Blockchain.Contract.create_contract(
state_0,
sender,
originator,
gas,
trx.gas_price,
trx.value,
trx.init,
stack_depth,
block_header
)
recipient ->
# Note, we only want to take the first 3 items from the tuples, as designated Θ_3 in the literature
# Θ_3
{state, remaining_gas_, sub_state_, _output} =
Blockchain.Contract.message_call(
state_0,
sender,
originator,
recipient,
recipient,
gas,
trx.gas_price,
trx.value,
apparent_value,
trx.data,
stack_depth,
block_header
)
{state, remaining_gas_, sub_state_}
end
refund = calculate_total_refund(trx, remaining_gas, sub_state.refund)
state_after_gas = finalize_transaction_gas(state_p, sender, trx, refund, block_header)
state_after_suicides =
Enum.reduce(sub_state.suicide_list, state_after_gas, fn address, state ->
Account.del_account(state, address)
end)
expended_gas = trx.gas_limit - remaining_gas
# { σ', Υ^g, Υ^l }, as defined in Eq.(79) and Eq.(80)
{state_after_suicides, expended_gas, sub_state.logs}
end
@doc """
Performs first step of transaction, which adjusts the sender's
balance and nonce, as defined in Eq.(67), Eq.(68) and Eq.(69)
of the Yellow Paper.
Note: we pass in sender here so we do not need to compute it
several times (since we'll use it elsewhere).
TODO: we execute this as two separate updates; we may want to
combine a series of updates before we update our state.
## Examples
iex> state = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(<<0x01::160>>, %Blockchain.Account{balance: 1000, nonce: 7})
iex> state = Blockchain.Transaction.begin_transaction(state, <<0x01::160>>, %Blockchain.Transaction{gas_price: 3, gas_limit: 100})
iex> Blockchain.Account.get_account(state, <<0x01::160>>)
%Blockchain.Account{balance: 700, nonce: 8}
"""
@spec begin_transaction(EVM.state(), EVM.address(), t) :: EVM.state()
def begin_transaction(state, sender, trx) do
state
|> Account.dec_wei(sender, trx.gas_limit * trx.gas_price)
|> Account.increment_nonce(sender)
end
@doc """
Finalizes the gas payout, repaying the sender for excess or refunded gas
and paying the miner his due. This is defined according to Eq.(73), Eq.(74),
Eq.(75) and Eq.(76) of the Yellow Paper.
Again, we take a sender so that we don't have to re-compute the sender
address several times.
## Examples
iex> trx = %Blockchain.Transaction{gas_price: 10, gas_limit: 30}
iex> state = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
...> |> Blockchain.Account.put_account(<<0x0fc00:e968:6179::de52:7100>>, %Blockchain.Account{balance: 11})
...> |> Blockchain.Account.put_account(<<0x02::160>>, %Blockchain.Account{balance: 22})
iex> Blockchain.Transaction.finalize_transaction_gas(state, <<0x01::160>>, trx, 5, %Block.Header{beneficiary: <<0x02::160>>})
...> |> Blockchain.Account.get_accounts([<<0x01::160>>, <<0x02::160>>])
[
%Blockchain.Account{balance: 61},
%Blockchain.Account{balance: 272},
]
"""
@spec finalize_transaction_gas(EVM.state(), EVM.address(), t, EVM.Gas.t(), Block.Header.t()) ::
EVM.state()
def finalize_transaction_gas(state, sender, trx, total_refund, block_header) do
state
# Eq.(74)
|> Account.add_wei(sender, total_refund * trx.gas_price)
# Eq.(75)
|> Account.add_wei(block_header.beneficiary, (trx.gas_limit - total_refund) * trx.gas_price)
end
@doc """
Caluclates the amount which should be refunded based on the current transactions
final usage. This includes the remaining gas plus refunds from clearing storage.
The specs calls for capping the refund at half of the total amount of gas used.
This function is defined as `g*` in Eq.(72) in the Yellow Paper.
## Examples
iex> Blockchain.Transaction.calculate_total_refund(%Blockchain.Transaction{gas_limit: 100}, 10, 5)
15
iex> Blockchain.Transaction.calculate_total_refund(%Blockchain.Transaction{gas_limit: 100}, 10, 99)
55
iex> Blockchain.Transaction.calculate_total_refund(%Blockchain.Transaction{gas_limit: 100}, 10, 0)
10
iex> Blockchain.Transaction.calculate_total_refund(%Blockchain.Transaction{gas_limit: 100}, 11, 99)
55
"""
@spec calculate_total_refund(t, EVM.Gas.t(), EVM.SubState.refund()) :: EVM.Gas.t()
def calculate_total_refund(trx, remaining_gas, refund) do
# TODO: Add a math helper, finally
max_refund = round(:math.floor((trx.gas_limit - remaining_gas) / 2))
remaining_gas + min(max_refund, refund)
end
@doc """
Defines the "intrinsic gas cost," that is the amount of gas
this transaction requires to be paid prior to execution. This
is defined as g_0 in Eq.(62), Eq.(63) and Eq.(64) of the
Yellow Paper.
## Examples
iex> Blockchain.Transaction.intrinsic_gas_cost(%Blockchain.Transaction{to: <<1::160>>, init: <<>>, data: <<1, 2, 0, 3>>}, %Block.Header{number: 5})
3 * 68 + 4 + 21000
iex> Blockchain.Transaction.intrinsic_gas_cost(%Blockchain.Transaction{to: <<1::160>>, init: <<>>, data: <<1, 2, 0, 3>>}, %Block.Header{number: 5_000_000})
3 * 68 + 4 + 21000
iex> Blockchain.Transaction.intrinsic_gas_cost(%Blockchain.Transaction{to: <<1::160>>, init: <<>>, data: <<>>}, %Block.Header{number: 5_000_000})
21000
iex> Blockchain.Transaction.intrinsic_gas_cost(%Blockchain.Transaction{to: <<>>, init: <<1, 2, 0, 3>>, data: <<>>}, %Block.Header{number: 5})
3 * 68 + 4 + 21000
iex> Blockchain.Transaction.intrinsic_gas_cost(%Blockchain.Transaction{to: <<>>, init: <<1, 2, 0, 3>>, data: <<>>}, %Block.Header{number: 5_000_000})
3 * 68 + 4 + 32000 + 21000
"""
@spec intrinsic_gas_cost(t, Header.t()) :: EVM.Gas.t()
def intrinsic_gas_cost(trx, block_header) do
EVM.Gas.g_txdata(trx.init) + EVM.Gas.g_txdata(trx.data) +
if(
trx.to == <<>> and Header.is_after_homestead?(block_header),
do: EVM.Gas.g_txcreate(),
else: 0
) + EVM.Gas.g_transaction()
end
end
|
lib/blockchain/transaction.ex
| 0.793946
| 0.533519
|
transaction.ex
|
starcoder
|
defmodule Log.Level do
@moduledoc """
Provides functions to configure and order different levels
"""
@default [:trace, :debug, :info, :warn, :error, :fatal]
@levels (case Application.get_env(:log, :levels, @default) do
[] ->
raise ArgumentError, "At least one level is required for Log"
levels ->
cond do
Enum.any?(levels, fn level -> !is_atom(level) end) ->
raise ArgumentError, "Levels must all be atoms"
!Enum.all?(@default, fn level -> level in levels end) ->
raise(
ArgumentError,
"Levels :debug, :info, :warn and :error required"
)
true ->
levels
end
end)
@level_weights Enum.with_index(@levels) |> Map.new()
@level_name_max_length @levels
|> Enum.map(&Kernel.to_string/1)
|> Enum.map(&String.length/1)
|> Enum.sort()
|> List.last()
@type t :: atom()
@type weight :: non_neg_integer()
@spec name_max_length() :: pos_integer()
def name_max_length, do: @level_name_max_length
@spec all() :: [t()]
def all, do: @levels
@spec parse(level :: atom()) :: t() | {:error, String.t()}
def parse(level)
for level <- @levels do
def parse(unquote(level)), do: unquote(level)
end
def parse(level), do: {:error, "Level #{inspect(level)} doesn't exist"}
@spec parse!(level :: atom()) :: t() | no_return()
def parse!(level) do
case parse(level) do
{:error, msg} -> raise ArgumentError, msg
result -> result
end
end
@spec get_weight(level :: t()) :: weight()
def get_weight(level), do: Map.get(@level_weights, level)
@spec compare(left :: t(), right :: t()) :: :lt | :eq | :gt
def compare(left, right) do
left_weight = get_weight(left)
right_weight = get_weight(right)
cond do
left_weight < right_weight -> :lt
left_weight == right_weight -> :eq
left_weight > right_weight -> :gt
end
end
@spec min() :: t()
def min, do: all() |> List.first()
@spec max() :: t()
def max, do: all() |> List.last()
end
|
lib/log/level/level.ex
| 0.803444
| 0.436202
|
level.ex
|
starcoder
|
defmodule Scidata.CIFAR10 do
alias Scidata.Utils
@default_data_path "tmp/cifar10"
@base_url 'https://www.cs.toronto.edu/~kriz/'
@dataset_file 'cifar-10-binary.tar.gz'
defp parse_images(content) do
for <<example::size(3073)-binary <- content>>, reduce: {<<>>, <<>>} do
{images, labels} ->
<<label::size(8)-bitstring, image::size(3072)-binary>> = example
{images <> image, labels <> label}
end
end
@doc """
Downloads the CIFAR10 dataset or fetches it locally.
## Options
* `:datapath` - path where the dataset .gz should be stored locally
* `:transform_images` - accepts accept a tuple like
`{binary_data, tensor_type, data_shape}` which can be used for
converting the `binary_data` to a tensor with a function like
fn {labels_binary, type, _shape} ->
labels_binary
|> Nx.from_binary(type)
|> Nx.new_axis(-1)
|> Nx.equal(Nx.tensor(Enum.to_list(0..9)))
|> Nx.to_batched_list(32)
end
* `:transform_labels` - similar to `:transform_images` but applied to
dataset labels
## Examples
iex> Scidata.CIFAR10.download()
{{<<59, 43, 50, 68, 98, 119, 139, 145, 149, 149, 131, 125, 142, 144, 137, 129,
137, 134, 124, 139, 139, 133, 136, 139, 152, 163, 168, 159, 158, 158, 152,
148, 16, 0, 18, 51, 88, 120, 128, 127, 126, 116, 106, 101, 105, 113, 109,
112, ...>>, {:u, 8}, {50000, 3, 32, 32}},
{<<6, 9, 9, 4, 1, 1, 2, 7, 8, 3, 4, 7, 7, 2, 9, 9, 9, 3, 2, 6, 4, 3, 6, 6, 2,
6, 3, 5, 4, 0, 0, 9, 1, 3, 4, 0, 3, 7, 3, 3, 5, 2, 2, 7, 1, 1, 1, ...>>,
{:u, 8}, {50000}}}
"""
def download(opts \\ []) do
data_path = opts[:data_path] || @default_data_path
transform_images = opts[:transform_images] || fn out -> out end
transform_labels = opts[:transform_labels] || fn out -> out end
gz = Utils.unzip_cache_or_download(@base_url, @dataset_file, data_path)
with {:ok, files} <- :erl_tar.extract({:binary, gz}, [:memory, :compressed]) do
{imgs, labels} =
files
|> Enum.filter(fn {fname, _} -> String.match?(List.to_string(fname), ~r/data_batch/) end)
|> Enum.map(fn {_, content} -> Task.async(fn -> parse_images(content) end) end)
|> Enum.map(&Task.await(&1, :infinity))
|> Enum.reduce({<<>>, <<>>}, fn {image, label}, {image_acc, label_acc} ->
{image_acc <> image, label_acc <> label}
end)
{transform_images.({imgs, {:u, 8}, {50000, 3, 32, 32}}),
transform_labels.({labels, {:u, 8}, {50000}})}
end
end
end
|
lib/cifar10.ex
| 0.689933
| 0.593079
|
cifar10.ex
|
starcoder
|
defmodule AMQPX.RoutingKeyMatcher do
@doc """
Checks if a routing key matches a topic-style pattern.
iex> AMQPX.RoutingKeyMatcher.matches?("a", "a")
true
iex> AMQPX.RoutingKeyMatcher.matches?("a", "b")
false
iex> AMQPX.RoutingKeyMatcher.matches?("a.c", "a.c")
true
iex> AMQPX.RoutingKeyMatcher.matches?("a.c", "b.c")
false
iex> AMQPX.RoutingKeyMatcher.matches?("c.a", "c.b")
false
iex> AMQPX.RoutingKeyMatcher.matches?("a.b.c", "a.*.c")
true
iex> AMQPX.RoutingKeyMatcher.matches?("a.c", "a.*.c")
false
iex> AMQPX.RoutingKeyMatcher.matches?("a.b.c.d", "a.*.d")
false
iex> AMQPX.RoutingKeyMatcher.matches?("a", "#")
true
iex> AMQPX.RoutingKeyMatcher.matches?("a.b.c", "#")
true
iex> AMQPX.RoutingKeyMatcher.matches?("a", "a.#")
true
iex> AMQPX.RoutingKeyMatcher.matches?("a", "#.a")
true
iex> AMQPX.RoutingKeyMatcher.matches?("a.b", "a.#")
true
iex> AMQPX.RoutingKeyMatcher.matches?("a.c", "a.#.c")
true
iex> AMQPX.RoutingKeyMatcher.matches?("a.b.c", "a.#.c")
true
iex> AMQPX.RoutingKeyMatcher.matches?("a.b.c.d", "a.#.d")
true
"""
# all parts match
def matches?([], []), do: true
def matches?([], ["#"]), do: true
def matches?(k = [_ | t1], p = ["#" | t2]) do
# matches exactly one, or matches more than one, or matches zero
matches?(t1, t2) || matches?(t1, p) || matches?(k, t2)
end
def matches?([_ | t1], ["*" | t2]), do: matches?(t1, t2)
# literal head matches
def matches?([x | t1], [x | t2]), do: matches?(t1, t2)
# no match
def matches?(key, pattern) when is_list(key) and is_list(pattern) and key != pattern, do: false
# convert
def matches?(key, pattern) when is_binary(key) and is_binary(pattern) do
key = key |> String.split(".")
pattern = pattern |> String.split(".")
matches?(key, pattern)
end
def wildcard?(pattern) when is_binary(pattern), do: pattern |> String.split(".") |> wildcard?()
def wildcard?(pattern) when is_list(pattern), do: pattern |> Enum.any?(&(&1 in ["*", "#"]))
end
|
lib/amqpx/routing_key_matcher.ex
| 0.683842
| 0.487185
|
routing_key_matcher.ex
|
starcoder
|
defmodule OffBroadway.Kafka.Producer do
@moduledoc """
Defines a Broadway Producer module which receives messages from Kafka
to initiate the Broadway pipeline.
Sends messages to the `handle_info/2` and `handle_demand/2` functions based
on requests and tracks acknowledgements in state.
"""
use GenStage
use Retry
require Logger
@doc """
Passes Kafka messages to the `handle_info/2` function.
Called by Elsa message handler.
"""
@spec handle_messages(pid(), term()) :: :ok
def handle_messages(pid, messages) do
send(pid, {:process_messages, messages})
end
@doc """
Passes Kafka partition assignment changes to the `handle_info/2` function.
Called by Elsa `assignments_revoked_handler`.
"""
@spec assignments_revoked(pid()) :: :ok
def assignments_revoked(pid) do
send(pid, {:assignments_revoked, self()})
receive do
{:assignments_revoked, :complete} -> :ok
end
end
@doc """
Starts an `OffBroadway.Kafka` producer process linked to the current
process.
"""
def start_link(opts) do
GenStage.start_link(__MODULE__, opts)
end
@doc """
Names the producer process and initializes state for the producer GenServer.
If `args` contains a value for `:endpoints`, creates a handler config and
passes it to the Elsa library to start a consumer group supervisor and stores
the returned `pid` in the Broadway Producer state for reference.
"""
@impl GenStage
def init(args) do
connection = Keyword.fetch!(args, :connection)
state = %{
demand: 0,
events: [],
connection: connection,
acknowledgers: %{},
elsa_sup_pid: maybe_start_elsa(args)
}
{:producer, state}
end
@doc """
Handles message events based on demand.
Updates the demand based on the existing demand in state and sends the
requested number of message events.
"""
@impl GenStage
def handle_demand(demand, state) do
total_demand = demand + state.demand
send_events(state, total_demand, state.events)
end
@doc """
Handles incoming Kafka message events.
Updates the total message events based on existing messages and incoming
messages and sends the requested events.
"""
@impl GenStage
def handle_info({:process_messages, messages}, state) do
total_events = state.events ++ messages
send_events(state, state.demand, total_events)
end
@doc """
Handles assignments revoked by the Kafka broker.
Waits until acknowledgers report as empty and cleans out any events in queue.
"""
@impl GenStage
def handle_info({:assignments_revoked, caller}, state) do
acknowledgers = Map.values(state.acknowledgers)
wait constant_backoff(100) do
Enum.all?(acknowledgers, &OffBroadway.Kafka.Acknowledger.is_empty?/1)
after
_ -> true
else
_ -> false
end
send(caller, {:assignments_revoked, :complete})
{:noreply, [], %{state | events: []}}
end
@doc """
Handles termination of the Elsa consumer group supervisor process if one exists.
"""
@impl GenStage
def terminate(reason, %{elsa_sup_pid: elsa}) when is_pid(elsa) do
Logger.error("#{__MODULE__}: Terminated elsa")
Supervisor.stop(elsa)
reason
end
def terminate(reason, _state) do
reason
end
defp maybe_start_elsa(opts) do
producer_pid = self()
if Keyword.has_key?(opts, :endpoints) do
endpoints = Keyword.fetch!(opts, :endpoints)
group_consumer = Keyword.fetch!(opts, :group_consumer)
if Keyword.get(opts, :create_topics, false) == true do
topics = Keyword.fetch!(group_consumer, :topics)
ensure_topic(endpoints, topics)
end
new_group_consumer =
group_consumer
|> Keyword.put(:handler, OffBroadway.Kafka.ClassicHandler)
|> Keyword.put(:handler_init_args, %{producer: self()})
|> Keyword.put(:assignments_revoked_handler, fn ->
OffBroadway.Kafka.Producer.assignments_revoked(producer_pid)
end)
|> Keyword.put(:direct_ack, true)
config = Keyword.put(opts, :group_consumer, new_group_consumer)
Logger.info("Starting Elsa from Producer - #{inspect(config)}")
{:ok, pid} = Elsa.Supervisor.start_link(config)
pid
end
end
defp ensure_topic(endpoints, topics) do
Enum.each(topics, fn topic ->
unless Elsa.topic?(endpoints, topic) do
Elsa.create_topic(endpoints, topic)
end
end)
end
defp send_events(state, total_demand, total_events) do
events_to_send = Enum.take(total_events, total_demand)
num_events_to_send = length(events_to_send)
new_state =
%{
state
| demand: total_demand - num_events_to_send,
events: Enum.drop(total_events, num_events_to_send)
}
|> ensure_acknowledgers(events_to_send)
broadway_messages = wrap_events(new_state, events_to_send)
add_offsets_to_acknowledger(broadway_messages)
{:noreply, broadway_messages, new_state}
end
defp ensure_acknowledgers(state, events) do
events
|> Enum.reduce(MapSet.new(), fn event, set -> MapSet.put(set, OffBroadway.Kafka.Acknowledger.ack_ref(event)) end)
|> Enum.reduce(state, fn ack_ref, acc ->
case Map.has_key?(acc.acknowledgers, ack_ref) do
true ->
acc
false ->
{:ok, pid} = OffBroadway.Kafka.Acknowledger.start_link(connection: acc.connection)
%{acc | acknowledgers: Map.put(acc.acknowledgers, ack_ref, pid)}
end
end)
end
defp add_offsets_to_acknowledger([]), do: nil
defp add_offsets_to_acknowledger(broadway_messages) do
broadway_messages
|> Enum.map(fn %Broadway.Message{acknowledger: {_, ack_ref, %{offset: offset}}} -> {ack_ref, offset} end)
|> Enum.group_by(fn {ack_ref, _} -> ack_ref end, fn {_, offset} -> offset end)
|> Enum.map(fn {ack_ref, offsets} -> {ack_ref, Enum.min_max(offsets)} end)
|> Enum.each(fn {ack_ref, {min, max}} ->
OffBroadway.Kafka.Acknowledger.add_offsets(ack_ref.pid, min..max)
end)
end
defp wrap_events(state, messages) do
messages
|> Enum.map(fn message ->
ack_ref = OffBroadway.Kafka.Acknowledger.ack_ref(message)
acknowledger = Map.get(state.acknowledgers, ack_ref)
%Broadway.Message{
data: message,
acknowledger: {OffBroadway.Kafka.Acknowledger, Map.put(ack_ref, :pid, acknowledger), %{offset: message.offset}}
}
end)
end
end
|
lib/off_broadway/kafka/producer.ex
| 0.858837
| 0.485783
|
producer.ex
|
starcoder
|
defmodule Program do
defstruct [:input, state: %{}, pointer: 0, relative_pointer: 0, output: []]
def param_value(program, {param, mode}) do
case mode do
0 -> Map.get(program.state, param, 0)
1 -> param
2 -> Map.get(program.state, program.relative_pointer + param, 0)
end
end
def param_index(program, {param, mode}) do
case mode do
0 -> param
2 -> program.relative_pointer + param
end
end
def add(program, a, b, c) do
%{
program
| state:
Map.put(
program.state,
param_index(program, c),
param_value(program, a) + param_value(program, b)
),
pointer: program.pointer + 4
}
end
def multiply(program, a, b, c) do
%{
program
| state:
Map.put(
program.state,
param_index(program, c),
param_value(program, a) * param_value(program, b)
),
pointer: program.pointer + 4
}
end
def save(program, a) do
%{
program
| state:
Map.put(
program.state,
param_index(program, a),
hd(program.input)
),
input: tl(program.input),
pointer: program.pointer + 2
}
end
def write(program, a) do
%{
program
| output: [param_value(program, a) | program.output],
pointer: program.pointer + 2
}
end
def jump_true(program, a, b) do
case param_value(program, a) do
0 -> %{program | pointer: program.pointer + 3}
_ -> %{program | pointer: param_value(program, b)}
end
end
def jump_false(program, a, b) do
case param_value(program, a) do
0 -> %{program | pointer: param_value(program, b)}
_ -> %{program | pointer: program.pointer + 3}
end
end
def less_than(program, a, b, c) do
if param_value(program, a) < param_value(program, b) do
%{
program
| state: Map.put(program.state, param_index(program, c), 1),
pointer: program.pointer + 4
}
else
%{
program
| state: Map.put(program.state, param_index(program, c), 0),
pointer: program.pointer + 4
}
end
end
def equals(program, a, b, c) do
if param_value(program, a) == param_value(program, b) do
%{
program
| state: Map.put(program.state, param_index(program, c), 1),
pointer: program.pointer + 4
}
else
%{
program
| state: Map.put(program.state, param_index(program, c), 0),
pointer: program.pointer + 4
}
end
end
def adjust_relative_pointer(program, a) do
%{
program
| relative_pointer: program.relative_pointer + param_value(program, a),
pointer: program.pointer + 2
}
end
def stop(_program), do: :ok
@opcodes %{
1 => &Program.add/4,
2 => &Program.multiply/4,
3 => &Program.save/2,
4 => &Program.write/2,
5 => &Program.jump_true/3,
6 => &Program.jump_false/3,
7 => &Program.less_than/4,
8 => &Program.equals/4,
9 => &Program.adjust_relative_pointer/2,
99 => &Program.stop/1
}
def get_arity(opcode), do: :erlang.fun_info(@opcodes[opcode])[:arity] - 1
def parse_modes(opcode, encoded_modes) do
arity = get_arity(opcode)
case arity do
3 ->
[
rem(encoded_modes, 10),
rem(div(encoded_modes, 10), 10),
rem(div(encoded_modes, 100), 10)
]
2 ->
[rem(encoded_modes, 10), rem(div(encoded_modes, 10), 10)]
1 ->
[rem(encoded_modes, 10)]
0 ->
[]
end
end
def parse_operation(operation) do
# right-most two digits
opcode = rem(operation, 100)
modes = parse_modes(opcode, div(operation, 100))
{opcode, modes}
end
def step(program) do
{opcode, modes} = parse_operation(program.state[program.pointer])
input = program.input
case opcode do
99 ->
{:halt, program}
3 when input == [] ->
{:block, program}
_ ->
params =
1..get_arity(opcode)
|> Enum.map(fn x -> Map.get(program.state, program.pointer + x) end)
|> Enum.zip(modes)
args = [program | params]
{:ok, apply(@opcodes[opcode], args)}
end
end
defp run_impl({:halt, program}), do: program
defp run_impl({:ok, program}), do: run_impl(step(program))
defp run_impl({:block, _}),
do: raise(ArgumentError, "Program blocked; call run_blocking to handle")
def run(program), do: run_impl({:ok, program})
defp run_blocking_impl({:ok, program}), do: run_blocking_impl(step(program))
defp run_blocking_impl(halt_or_block_result), do: halt_or_block_result
def run_blocking(program), do: run_blocking_impl({:ok, program})
def hack(program, entry_point, code) do
converted = code |> Enum.with_index(entry_point) |> Map.new(fn {v, k} -> {k, v} end)
%{program | state: Map.merge(program.state, converted)}
end
def new(code, input \\ nil) do
%Program{
input: List.wrap(input),
state: code |> Enum.with_index() |> Map.new(fn {v, k} -> {k, v} end)
}
end
def stringify_instruction(state, index, relative_pointer \\ 0, evaluate \\ false) do
{opcode, modes} =
try do
parse_operation(state[index])
rescue
FunctionClauseError -> {state[index], nil}
end
if modes != nil do
params =
case opcode do
99 ->
[]
_ ->
1..get_arity(opcode)
|> Enum.map(fn x -> state[index + x] end)
|> Enum.zip(modes)
end
opcode_string =
case opcode do
1 -> "ADDI"
2 -> "MULI"
3 -> "SAVE"
4 -> "OUTP"
5 -> "TJMP"
6 -> "FJMP"
7 -> "CLTI"
8 -> "CEQI"
9 -> "AJRP"
99 -> "HALT"
end
params_string =
params
|> Enum.with_index()
|> Enum.map(fn {{param, mode}, i} ->
case evaluate and i < get_arity(opcode) - 1 do
true ->
case mode do
0 -> to_string(state[param])
1 -> to_string(param)
2 -> to_string(state[relative_pointer])
end
false ->
case mode do
0 -> "[#{param}]"
1 -> to_string(param)
2 -> "[RP " <> if(param > 0, do: "+", else: "-") <> " #{abs(param)}]"
end
end
end)
|> Enum.map(fn x -> String.pad_leading(x, 10) end)
|> Enum.join("\t")
|> String.trim_trailing()
opcode_string <> "\t" <> params_string
else
"DATA" <> "\t" <> String.pad_leading(to_string(opcode), 10)
end
end
defp decompile(state, index, buffer) when index >= map_size(state),
do: buffer |> Enum.reverse() |> Enum.join("\n")
defp decompile(state, index, buffer) do
index_string =
index
|> to_string
|> String.pad_leading(4, "0")
instruction = stringify_instruction(state, index)
line = index_string <> "\t" <> instruction
buffer = [line | buffer]
opcode = rem(state[index], 100)
index =
if opcode in ([99] ++ Enum.to_list(1..9)),
do: index + 1 + get_arity(opcode),
else: index + 1
decompile(state, index, buffer)
end
def decompile(program) do
decompile(program.state, 0, [])
end
def compile(code) do
code
|> String.split("\n")
|> Enum.map(fn line ->
line
|> String.split("\t")
# drop index
|> Enum.drop(1)
end)
|> Enum.flat_map(fn
["DATA", x] ->
[x |> String.trim() |> Integer.parse() |> elem(0)]
[instruction | args] ->
opcode =
case instruction do
"ADDI" -> 1
"MULI" -> 2
"SAVE" -> 3
"OUTP" -> 4
"TJMP" -> 5
"FJMP" -> 6
"CLTI" -> 7
"CEQI" -> 8
"AJRP" -> 9
"HALT" -> 99
end
modes =
args
|> Enum.map(&String.trim/1)
|> Enum.map(fn arg ->
case Integer.parse(arg) do
{_, ""} -> 1
:error -> if String.contains?(arg, "RP"), do: 2, else: 0
end
end)
|> Enum.reverse()
|> Integer.undigits()
opcode = modes * 100 + opcode
args =
args
|> Enum.map(&String.trim/1)
|> Enum.map(fn arg ->
case Integer.parse(arg) do
{x, ""} ->
x
:error ->
if String.contains?(arg, "RP") do
arg |> String.replace(~r/[\[\]RP +]/, "") |> Integer.parse() |> elem(0)
else
arg |> String.replace(~r/[\[\]]/, "") |> Integer.parse() |> elem(0)
end
end
end)
List.flatten([opcode, args])
end)
end
defp inspect({status, program}, count, history) do
prompt = "\nWhat would you like to do? 'h' for help\n"
help = """
(h)elp: Display this text
(n)ext: Evaluate the next instruction
(b)ack: Rewind history back one step
(c)ontinue: Continue evaluating the program until the program halts
(p)rint: Prints the current instruction
(i)nspect: Inspect what is stored at an index (expects one argument: the index)
(r)ecent: Lists the recent events (expects one argument: the number of recent events; default: 10)
(s)tats: Displays current status (current pointer, relative pointer, how many instructions have been executed)
(t)able: Displays current program code state as a table
(e)nter: Enter input
(o)utput: Displays program output
(q)uit: Quits the inspector and returns the current program
"""
status_printer = fn count, pointer, relative ->
"""
Instructions executed: #{count}
Pointer: #{pointer}
Relative pointer: #{relative}
"""
|> IO.puts()
end
state_printer = fn state, index ->
(["\t"] ++
(0..9
|> Enum.to_list()
|> Enum.map(fn x -> x |> to_string |> String.pad_leading(7) end)
|> Enum.intersperse("\t")))
|> IO.puts()
state
|> Enum.map(fn {i, v} -> {div(i, 10) * 10, rem(i, 10), v} end)
|> Enum.sort()
|> Enum.group_by(fn {y, _x, _v} -> y end, fn {_y, x, v} -> {x, v} end)
|> Enum.sort()
|> Enum.map(fn {y, list} ->
[String.pad_leading(to_string(y), 4, "0"), "\t"] ++
(list
|> Enum.sort()
|> Enum.map(fn {x, v} ->
if y + x == index do
# note that the rest of the row will probably be screwed up because the reset character, but oh well!
# red + bright + reset + 7 = 5 + 4 + 4 + 7 = 20
String.pad_leading(
IO.ANSI.red() <> IO.ANSI.bright() <> to_string(v) <> IO.ANSI.reset(),
20
)
else
v = String.pad_leading(to_string(v), 7)
unless byte_size(v) > 7, do: v, else: String.slice(v, 0, 4) <> "..."
end
end)
|> Enum.intersperse("\t"))
end)
|> Enum.join("\n")
|> IO.puts()
end
input = IO.gets(prompt)
command = input |> String.at(0) |> String.downcase()
args = input |> String.split() |> Enum.drop(1)
case command do
"h" ->
IO.puts(help)
inspect({status, program}, count, history)
"q" ->
IO.puts("Okay! Thanks for inspecting!")
program
"n" ->
{new_status, new_program} = step(program)
new_count =
cond do
new_status == :ok -> count + 1
new_status == :halt and status != :halt -> count + 1
new_status != :ok and status == :ok -> count + 1
true -> count
end
if count == new_count do
inspect({status, program}, count, history)
else
inspect({new_status, new_program}, new_count, [{status, program} | history])
end
"b" ->
{previous, history} = List.pop_at(history, 0, {status, program})
inspect(previous, max(0, count - 1), history)
"c" ->
result =
Stream.unfold({{status, program}, count}, fn
{{:halt, program}, count} -> {{:halt, program, count}, nil}
{{:block, program}, count} -> {{:block, program, count}, nil}
{{:ok, program}, count} -> {{:ok, program}, {step(program), count + 1}}
nil -> nil
end)
reversed = Enum.reverse(result)
[{new_status, new_program, new_count} | new_history] = reversed
inspect({new_status, new_program}, new_count, new_history ++ history)
"s" ->
status_printer.(count, program.pointer, program.relative_pointer)
inspect({status, program}, count, history)
"t" ->
state_printer.(program.state, program.pointer)
inspect({status, program}, count, history)
"i" ->
{index, _} = args |> Enum.at(0, "-1") |> Integer.parse()
value = Map.get(program.state, index)
if value == nil,
do:
IO.warn(
"No value present at index #{index} (defaults to -1 if index argument could not be parsed).",
[]
),
else: IO.puts("The value present at index #{index} is #{value}.")
inspect({status, program}, count, history)
"p" ->
IO.puts(
stringify_instruction(program.state, program.pointer, program.relative_pointer, true)
)
inspect({status, program}, count, history)
"e" ->
input = Enum.at(args, 0)
if input == nil do
IO.puts("Entering input requires 1 arg")
inspect({status, program}, count, history)
else
{input, _} = Integer.parse(input)
program = %{program | input: [input | program.input]}
inspect({status, program}, count, history)
end
"o" ->
IO.inspect(program.output)
inspect({status, program}, count, history)
"r" ->
{recent, _} = args |> Enum.at(0, "10") |> Integer.parse()
history
|> Enum.slice(0, recent)
|> Enum.reverse()
|> Enum.map(fn {_, program} ->
stringify_instruction(program.state, program.pointer, program.relative_pointer, true)
end)
|> Enum.join("\n")
|> IO.puts()
inspect({status, program}, count, history)
_ ->
IO.puts("Sorry, I didn't quite understand that. Please try using 'h' for help.")
inspect({status, program}, count, history)
end
end
def inspect(program) do
inspect({:ok, program}, 0, [])
end
end
|
lib/intcode.ex
| 0.550607
| 0.444685
|
intcode.ex
|
starcoder
|
defmodule Ockam.Vault do
@moduledoc """
"""
alias Ockam.Vault.NIF
defmodule Secret do
@moduledoc false
defstruct [:reference]
@opaque t :: %__MODULE__{}
end
defmodule SecretAttributes do
@moduledoc false
defstruct ty: :buffer, length: 0, persistence: :ephemeral, purpose: :key_agreement
end
defstruct [:reference]
@opaque t :: %__MODULE__{}
@spec create(Keyword.t()) :: {:ok, t} | {:error, term}
def create(options \\ []) when is_list(options) do
with {:ok, reference} <- NIF.make_vault(), do: {:ok, %__MODULE__{reference: reference}}
end
@spec sha256(t, binary) :: {:ok, binary} | {:error, term}
def sha256(%__MODULE__{reference: vault_ref}, data) when is_binary(data),
do: NIF.sha256(vault_ref, data)
def generate_secret(%__MODULE__{reference: vault_ref}, attributes \\ []) do
{type, attributes} = Keyword.pop(attributes, :type)
attributes = if type, do: Keyword.put(attributes, :ty, type), else: attributes
attributes = struct(SecretAttributes, attributes)
with {:ok, secret_ref} <- NIF.generate_secret(vault_ref, attributes) do
{:ok, %Secret{reference: secret_ref}}
end
end
def generate_curve25519_keypair(vault) do
with {:ok, private_key} <- generate_secret(vault, type: :curve25519_private),
{:ok, public_key} <- get_public_key(vault, private_key) do
{:ok, %{private: private_key, public: public_key}}
end
end
def import_secret(%__MODULE__{reference: vault_ref}, secret, attributes \\ []) do
{type, attributes} = Keyword.pop(attributes, :type)
attributes = if type, do: Keyword.put(attributes, :ty, type), else: attributes
attributes = struct(SecretAttributes, attributes)
attributes = Map.put(attributes, :length, byte_size(secret))
with {:ok, secret_ref} <- NIF.import_secret(vault_ref, secret, attributes) do
{:ok, %Secret{reference: secret_ref}}
end
end
def export_secret(%__MODULE__{reference: vault_ref}, %Secret{reference: secret_ref}),
do: NIF.export_secret(vault_ref, secret_ref)
def get_secret_attributes(%__MODULE__{reference: vault_ref}, %Secret{reference: secret_ref}) do
with {:ok, attributes} <- NIF.get_secret_attributes(vault_ref, secret_ref) do
{type, attributes} = attributes |> Map.from_struct() |> Map.to_list() |> Keyword.pop(:ty)
if type, do: Keyword.put(attributes, :type, type), else: attributes
end
end
def set_secret_type(%__MODULE__{reference: vault_ref}, %Secret{reference: secret_ref}, type),
do: NIF.set_secret_type(vault_ref, secret_ref, type)
@spec get_public_key(t, Secret.t()) :: {:ok, binary()} | {:error, term}
def get_public_key(%__MODULE__{reference: vault_ref}, %Secret{reference: secret_ref}),
do: NIF.get_public_key(vault_ref, secret_ref)
def ecdh(%__MODULE__{reference: vault_ref}, %Secret{reference: private_key_ref}, peer_pubkey) do
with {:ok, secret_ref} <- NIF.ecdh(vault_ref, private_key_ref, peer_pubkey) do
{:ok, %Secret{reference: secret_ref}}
end
end
def hkdf_sha256(
%__MODULE__{reference: vault_ref},
%Secret{reference: salt_ref},
nil,
num_outputs
) do
with {:ok, outputs} <- NIF.hkdf_sha256(vault_ref, salt_ref, nil, num_outputs) do
{:ok, Enum.map(outputs, fn x -> %Secret{reference: x} end)}
end
end
def hkdf_sha256(
%__MODULE__{reference: vault_ref},
%Secret{reference: salt_ref},
%Secret{reference: ikm_ref},
num_outputs
) do
with {:ok, outputs} <- NIF.hkdf_sha256(vault_ref, salt_ref, ikm_ref, num_outputs) do
{:ok, Enum.map(outputs, fn x -> %Secret{reference: x} end)}
end
end
def encrypt(
%__MODULE__{reference: vault_ref},
%Secret{reference: key_ref},
nonce,
aad,
plaintext
)
when is_integer(nonce) do
NIF.aead_aes_gcm_encrypt(vault_ref, key_ref, nonce, aad, plaintext)
end
def decrypt(
%__MODULE__{reference: vault_ref},
%Secret{reference: key_ref},
nonce,
aad,
ciphertext_and_tag
)
when is_integer(nonce) do
NIF.aead_aes_gcm_decrypt(vault_ref, key_ref, nonce, aad, ciphertext_and_tag)
end
defmodule NIF do
@moduledoc false
use Rustler, otp_app: :ockam, crate: :ockam_nif
def make_vault, do: exit(:nif_not_loaded)
def random(_vault), do: exit(:nif_not_loaded)
def sha256(_vault, _data), do: exit(:nif_not_loaded)
def generate_secret(_vault, _attrs), do: exit(:nif_not_loaded)
def import_secret(_vault, _data, _attrs), do: exit(:nif_not_loaded)
def export_secret(_vault, _secret), do: exit(:nif_not_loaded)
def get_secret_attributes(_vault, _secret), do: exit(:nif_not_loaded)
def set_secret_type(_vault, _secret, _secret_type), do: exit(:nif_not_loaded)
def get_public_key(_vault, _secret), do: exit(:nif_not_loaded)
def ecdh(_vault, _private_key, _peer_pubkey), do: exit(:nif_not_loaded)
def hkdf_sha256(_vault, _salt, _ikm, _num_derived_outputs), do: exit(:nif_not_loaded)
def aead_aes_gcm_encrypt(_vault, _key, _nonce, _additional_data, _plaintext),
do: exit(:nif_not_loaded)
def aead_aes_gcm_decrypt(_vault, _key, _nonce, _additional_data, _ciphertext_and_tag),
do: exit(:nif_not_loaded)
end
end
|
implementations/elixir/lib/ockam/vault.ex
| 0.795499
| 0.434581
|
vault.ex
|
starcoder
|
defmodule Day11 do
defmodule Pos do
defstruct x: 0, y: 0
end
@spec do_turn(atom, integer) :: atom
defp do_turn(direction, turn) do
case turn do
# left
0 ->
case direction do
:up -> :left
:right -> :up
:down -> :right
:left -> :down
end
# right
1 ->
case direction do
:up -> :right
:right -> :down
:down -> :left
:left -> :up
end
end
end
@spec advance(Pos.t(), atom) :: Pos.t()
defp advance(position, direction) do
case direction do
:up -> %Pos{x: position.x, y: position.y + 1}
:right -> %Pos{x: position.x + 1, y: position.y}
:down -> %Pos{x: position.x, y: position.y - 1}
:left -> %Pos{x: position.x - 1, y: position.y}
end
end
@spec robot_loop(map(), Pos.t(), atom, pid) :: map()
def robot_loop(paint_map, position, direction, int_pid) do
# Send the position of the current square
color = Map.get(paint_map, position, 0)
# IO.puts("Color of {#{position.x}, #{position.y}} - #{color}")
send(int_pid, {:value, color})
new_map =
receive do
{:value, c} ->
# IO.puts("Coloring {#{position.x}, #{position.y}} - #{c}")
Map.put(paint_map, position, c)
end
new_dir =
receive do
{:value, turn} ->
# IO.puts("Turning: #{turn}")
do_turn(direction, turn)
end
# IO.puts("#{new_dir}: #{position.x}, #{position.y}")
send(int_pid, {:is_halted})
is_halted =
receive do
{:halted, is_halted} -> is_halted
end
if is_halted,
do: new_map,
else: robot_loop(new_map, advance(position, new_dir), new_dir, int_pid)
end
@spec part1(String.t()) :: integer
def part1(file_name) do
int_pid =
Files.read_integers!(file_name)
|> IntComp.run_as_process([], self())
send(int_pid, {:run})
paint_job = robot_loop(%{}, %Pos{}, :up, int_pid)
length(Map.keys(paint_job))
end
@spec part2(String.t()) :: String.t()
def part2(file_name) do
int_pid =
Files.read_integers!(file_name)
|> IntComp.run_as_process([], self())
send(int_pid, {:run})
paint_job = robot_loop(%{%Pos{} => 1}, %Pos{}, :up, int_pid)
{e_min_x, e_min_y, e_max_x, e_max_y} =
Enum.reduce(
Map.keys(paint_job),
{0, 0, 0, 0},
fn tile, {min_x, min_y, max_x, max_y} ->
{min(min_x, tile.x), min(min_y, tile.y), max(max_x, tile.x), max(max_y, tile.y)}
end
)
lines =
Enum.map(e_max_y..e_min_y, fn y ->
to_string(
for x <- e_min_x..e_max_x,
do: if(Map.get(paint_job, %Pos{x: x, y: y}, 0) == 1, do: ?*, else: 0x20)
)
end)
Enum.join(lines, "\n")
end
end
|
lib/day11.ex
| 0.819749
| 0.668168
|
day11.ex
|
starcoder
|
defmodule Calendar.NaiveDateTime.Interval do
@moduledoc """
A `NaiveDateTime.Interval` consists of a start and an end `NaiveDateTime`.
"""
@type t :: %__MODULE__{from: %NaiveDateTime{}, to: %NaiveDateTime{}}
defstruct [:from, :to]
@doc """
Formats interval in ISO 8601 extended format.
## Example:
# With a `NaiveDateTime.Interval`
iex> %Calendar.NaiveDateTime.Interval{from: {{2016, 2, 27}, {10, 0, 0}} |> Calendar.NaiveDateTime.from_erl!, to: {{2016, 3, 1}, {11, 0, 0}} |> Calendar.NaiveDateTime.from_erl!} |> Calendar.NaiveDateTime.Interval.iso8601
"2016-02-27T10:00:00/2016-03-01T11:00:00"
# Also works with a `DateTime.Interval`
iex> %Calendar.DateTime.Interval{from: {{2016, 2, 27}, {10, 0, 0}} |> Calendar.DateTime.from_erl!("Etc/UTC"), to: {{2016, 3, 1}, {11, 0, 0}} |> Calendar.DateTime.from_erl!("Etc/UTC")} |> Calendar.NaiveDateTime.Interval.iso8601
"2016-02-27T10:00:00/2016-03-01T11:00:00"
"""
def iso8601(interval) do
from_string = interval.from |> Calendar.NaiveDateTime.Format.iso8601
to_string = interval.to |> Calendar.NaiveDateTime.Format.iso8601
from_string <> "/" <> to_string
end
@doc """
Formats interval in ISO 8601 basic format.
## Example:
iex> %Calendar.NaiveDateTime.Interval{from: {{2016, 2, 27}, {10, 0, 0}}, to: {{2016, 3, 1}, {11, 0, 0}}} |> Calendar.NaiveDateTime.Interval.iso8601_basic
"20160227T100000/20160301T110000"
# Also works with a `Calendar.DateTime.Interval`
iex> %Calendar.DateTime.Interval{from: {{2016, 2, 27}, {10, 0, 0}} |> Calendar.DateTime.from_erl!("Etc/UTC"), to: {{2016, 3, 1}, {11, 0, 0}} |> Calendar.DateTime.from_erl!("Etc/UTC")} |> Calendar.NaiveDateTime.Interval.iso8601_basic
"20160227T100000/20160301T110000"
"""
def iso8601_basic(interval) do
from_string = interval.from |> Calendar.NaiveDateTime.Format.iso8601_basic
to_string = interval.to |> Calendar.NaiveDateTime.Format.iso8601_basic
from_string <> "/" <> to_string
end
end
|
lib/calendar/naive_date_time/interval.ex
| 0.908527
| 0.453867
|
interval.ex
|
starcoder
|
defmodule Day14 do
alias Utils.Hash
@moduledoc "Day 14: Disk Defragmentation"
def part1(input) do
input
|> to_grid
|> Enum.map(& Enum.count(&1, fn(x) -> x == 1 end))
|> Enum.sum
end
def part2(input) do
input
|> to_grid
|> group_rows
|> num_groups
end
defp to_grid(input) do
0..127
|> Enum.map(fn n -> input <> "-#{n}" end)
|> Enum.map(&Hash.knot_hash/1)
|> Enum.map(& Integer.parse(&1, 16))
|> Enum.map(& Integer.digits(elem(&1, 0), 2))
|> Enum.map(
fn l ->
([0] |> Stream.cycle |> Enum.take(128 - Enum.count(l))) ++ l
end)
end
defp group_rows(grid) do
grid
|> Enum.reduce({[], 0}, &group_row/2)
|> elem(0)
end
defp group_row(row, {acc, count}) do
{grouped, new_count} = group_row(row, count, [])
{[grouped|acc], new_count}
end
defp group_row([], count, grouped), do: {grouped, count}
defp group_row([0|t], count, []), do: group_row(t, count, [0])
defp group_row([0|t], count, l), do: group_row(t, count, [0|l])
defp group_row([1|t], count, []), do: group_row(t, count + 1, [count + 1])
defp group_row([1|t], count, [0|l]), do: group_row(t, count + 1, [count + 1, 0 | l])
defp group_row([1|t], count, l), do: group_row(t, count, [count | l])
defp num_groups(grouped_rows) do
grouped_rows
|> group_pairs
|> Enum.uniq
|> partision_into_connected_groups
|> sum_groups(grouped_rows)
end
defp group_pairs(grid, row \\ 0, pairs \\ [])
defp group_pairs(_, 128, pairs), do: pairs
defp group_pairs(grid, row, pairs) do
cur_row = Enum.at(grid, row)
prev_row = if row == 0 do
[0] |> Stream.cycle |> Enum.take(128)
else
grid |> Enum.at(row - 1)
end
row_pairs = cur_row
|> Enum.zip(prev_row)
|> Enum.filter(fn {a, b} -> a != 0 && b != 0 end)
group_pairs(grid, row + 1, pairs ++ row_pairs)
end
defp partision_into_connected_groups(pairs, groups \\ %{})
defp partision_into_connected_groups([], groups), do: groups
defp partision_into_connected_groups([{a, b}|t], groups) do
keys = groups |> Map.keys
new_groups = case {a in keys, b in keys} do
{true, true} ->
update_same(Map.get(groups, a), Map.get(groups, b), groups)
{true, false} ->
Map.put(groups, b, Map.get(groups, a))
{false, true} ->
Map.put(groups, a, Map.get(groups, b))
_ -> groups
|> Map.put(a, a)
|> Map.put(b, a)
end
partision_into_connected_groups(t, new_groups)
end
defp update_same(a, b, groups) when a == b, do: groups
defp update_same(a, b, groups) do
groups
|> Map.to_list
|> Enum.map(
fn {k, v} ->
if v == b do
{k, a}
else
{k, v}
end
end)
|> Enum.into(%{})
end
defp sum_groups(map, group_rows) do
group_rows
|> Enum.map(& row_keys(&1, map, []))
|> List.flatten
|> Enum.uniq
|> Enum.count
end
defp row_keys([], _, acc), do: acc
defp row_keys([0|t], map, acc), do: row_keys(t, map, acc)
defp row_keys([h|t], map, acc) do
row_keys(t, map, [Map.get(map, h, h)|acc])
end
end
|
apps/day14/lib/day14.ex
| 0.575827
| 0.560914
|
day14.ex
|
starcoder
|
defmodule OT.Fuzzer do
@moduledoc """
Provides fuzzing functions for fuzz testing OT functions.
"""
defmacro composition_fuzz(mod, length \\ 1_000) do
quote do
for _ <- 1..unquote(length) do
initial_value = unquote(mod).init_random(64)
# Edit the document
op_a = unquote(mod).random_op(initial_value)
data_a = unquote(mod).apply!(initial_value, op_a)
# Make a subsequent edit
op_b = unquote(mod).random_op(data_a)
data_b = unquote(mod).apply!(data_a, op_b)
# Compose the edits
op_c = unquote(mod).compose(op_a, op_b)
data_c = unquote(mod).apply!(initial_value, op_c)
assert data_b == data_c
end
end
end
defmacro invert_fuzz(mod, length \\ 1_000) do
quote do
for _ <- 1..unquote(length) do
initial_value = unquote(mod).init_random(64)
# Make an edit
op_a = unquote(mod).random_op(initial_value)
# Apply the edit
data_a = unquote(mod).apply!(initial_value, op_a)
# Make a subsequent edit
op_b = unquote(mod).random_op(data_a)
# Apply the edit
data_a_b = unquote(mod).apply!(data_a, op_b)
# Invert the first edit
op_a_i = unquote(mod).invert(op_a)
# Transform the undo against subsequent edits
op_a_i_p = unquote(mod).transform(op_a_i, op_b, :left)
# Apply the undo
data_a_b_aip = unquote(mod).apply!(data_a_b, op_a_i_p)
# Transform the subsequent edit over the undo
op_b_p = unquote(mod).transform(op_b, op_a_i, :right)
# Apply the transformed subsequent edit
data_bp = unquote(mod).apply!(initial_value, op_b_p)
assert data_a_b_aip == data_bp
end
end
end
defmacro transformation_fuzz(mod, length \\ 1_000) do
quote do
for _ <- 1..unquote(length) do
initial_value = unquote(mod).init_random(64)
# Make to concurrent edits
op_a = unquote(mod).random_op(initial_value)
op_b = unquote(mod).random_op(initial_value)
side = Enum.random([:left, :right])
other_side = if side == :left, do: :right, else: :left
# Transform the edits
op_a_prime = unquote(mod).transform(op_a, op_b, side)
op_b_prime = unquote(mod).transform(op_b, op_a, other_side)
data_a =
initial_value
|> unquote(mod).apply!(op_a)
|> unquote(mod).apply!(op_b_prime)
data_b =
initial_value
|> unquote(mod).apply!(op_b)
|> unquote(mod).apply!(op_a_prime)
assert data_a == data_b
end
end
end
end
|
test/support/fuzzer.ex
| 0.78964
| 0.879199
|
fuzzer.ex
|
starcoder
|
defmodule AWS.DeviceFarm do
@moduledoc """
AWS Device Farm is a service that enables mobile app developers to test
Android, iOS, and Fire OS apps on physical phones, tablets, and other
devices in the cloud.
"""
@doc """
Creates a device pool.
"""
def create_device_pool(client, input, options \\ []) do
request(client, "CreateDevicePool", input, options)
end
@doc """
Creates a new project.
"""
def create_project(client, input, options \\ []) do
request(client, "CreateProject", input, options)
end
@doc """
Specifies and starts a remote access session.
"""
def create_remote_access_session(client, input, options \\ []) do
request(client, "CreateRemoteAccessSession", input, options)
end
@doc """
Uploads an app or test scripts.
"""
def create_upload(client, input, options \\ []) do
request(client, "CreateUpload", input, options)
end
@doc """
Deletes a device pool given the pool ARN. Does not allow deletion of
curated pools owned by the system.
"""
def delete_device_pool(client, input, options \\ []) do
request(client, "DeleteDevicePool", input, options)
end
@doc """
Deletes an AWS Device Farm project, given the project ARN.
**Note** Deleting this resource does not stop an in-progress run.
"""
def delete_project(client, input, options \\ []) do
request(client, "DeleteProject", input, options)
end
@doc """
Deletes a completed remote access session and its results.
"""
def delete_remote_access_session(client, input, options \\ []) do
request(client, "DeleteRemoteAccessSession", input, options)
end
@doc """
Deletes the run, given the run ARN.
**Note** Deleting this resource does not stop an in-progress run.
"""
def delete_run(client, input, options \\ []) do
request(client, "DeleteRun", input, options)
end
@doc """
Deletes an upload given the upload ARN.
"""
def delete_upload(client, input, options \\ []) do
request(client, "DeleteUpload", input, options)
end
@doc """
Returns the number of unmetered iOS and/or unmetered Android devices that
have been purchased by the account.
"""
def get_account_settings(client, input, options \\ []) do
request(client, "GetAccountSettings", input, options)
end
@doc """
Gets information about a unique device type.
"""
def get_device(client, input, options \\ []) do
request(client, "GetDevice", input, options)
end
@doc """
Gets information about a device pool.
"""
def get_device_pool(client, input, options \\ []) do
request(client, "GetDevicePool", input, options)
end
@doc """
Gets information about compatibility with a device pool.
"""
def get_device_pool_compatibility(client, input, options \\ []) do
request(client, "GetDevicePoolCompatibility", input, options)
end
@doc """
Gets information about a job.
"""
def get_job(client, input, options \\ []) do
request(client, "GetJob", input, options)
end
@doc """
Gets the current status and future status of all offerings purchased by an
AWS account. The response indicates how many offerings are currently
available and the offerings that will be available in the next period. The
API returns a `NotEligible` error if the user is not permitted to invoke
the operation. Please contact
[<EMAIL>](mailto:<EMAIL>)
if you believe that you should be able to invoke this operation.
"""
def get_offering_status(client, input, options \\ []) do
request(client, "GetOfferingStatus", input, options)
end
@doc """
Gets information about a project.
"""
def get_project(client, input, options \\ []) do
request(client, "GetProject", input, options)
end
@doc """
Returns a link to a currently running remote access session.
"""
def get_remote_access_session(client, input, options \\ []) do
request(client, "GetRemoteAccessSession", input, options)
end
@doc """
Gets information about a run.
"""
def get_run(client, input, options \\ []) do
request(client, "GetRun", input, options)
end
@doc """
Gets information about a suite.
"""
def get_suite(client, input, options \\ []) do
request(client, "GetSuite", input, options)
end
@doc """
Gets information about a test.
"""
def get_test(client, input, options \\ []) do
request(client, "GetTest", input, options)
end
@doc """
Gets information about an upload.
"""
def get_upload(client, input, options \\ []) do
request(client, "GetUpload", input, options)
end
@doc """
Installs an application to the device in a remote access session. For
Android applications, the file must be in .apk format. For iOS
applications, the file must be in .ipa format.
"""
def install_to_remote_access_session(client, input, options \\ []) do
request(client, "InstallToRemoteAccessSession", input, options)
end
@doc """
Gets information about artifacts.
"""
def list_artifacts(client, input, options \\ []) do
request(client, "ListArtifacts", input, options)
end
@doc """
Gets information about device pools.
"""
def list_device_pools(client, input, options \\ []) do
request(client, "ListDevicePools", input, options)
end
@doc """
Gets information about unique device types.
"""
def list_devices(client, input, options \\ []) do
request(client, "ListDevices", input, options)
end
@doc """
Gets information about jobs.
"""
def list_jobs(client, input, options \\ []) do
request(client, "ListJobs", input, options)
end
@doc """
Returns a list of all historical purchases, renewals, and system renewal
transactions for an AWS account. The list is paginated and ordered by a
descending timestamp (most recent transactions are first). The API returns
a `NotEligible` error if the user is not permitted to invoke the operation.
Please contact
[<EMAIL>](mailto:<EMAIL>)
if you believe that you should be able to invoke this operation.
"""
def list_offering_transactions(client, input, options \\ []) do
request(client, "ListOfferingTransactions", input, options)
end
@doc """
Returns a list of products or offerings that the user can manage through
the API. Each offering record indicates the recurring price per unit and
the frequency for that offering. The API returns a `NotEligible` error if
the user is not permitted to invoke the operation. Please contact
[<EMAIL>](mailto:<EMAIL>)
if you believe that you should be able to invoke this operation.
"""
def list_offerings(client, input, options \\ []) do
request(client, "ListOfferings", input, options)
end
@doc """
Gets information about projects.
"""
def list_projects(client, input, options \\ []) do
request(client, "ListProjects", input, options)
end
@doc """
Returns a list of all currently running remote access sessions.
"""
def list_remote_access_sessions(client, input, options \\ []) do
request(client, "ListRemoteAccessSessions", input, options)
end
@doc """
Gets information about runs, given an AWS Device Farm project ARN.
"""
def list_runs(client, input, options \\ []) do
request(client, "ListRuns", input, options)
end
@doc """
Gets information about samples, given an AWS Device Farm project ARN
"""
def list_samples(client, input, options \\ []) do
request(client, "ListSamples", input, options)
end
@doc """
Gets information about suites.
"""
def list_suites(client, input, options \\ []) do
request(client, "ListSuites", input, options)
end
@doc """
Gets information about tests.
"""
def list_tests(client, input, options \\ []) do
request(client, "ListTests", input, options)
end
@doc """
Gets information about unique problems.
"""
def list_unique_problems(client, input, options \\ []) do
request(client, "ListUniqueProblems", input, options)
end
@doc """
Gets information about uploads, given an AWS Device Farm project ARN.
"""
def list_uploads(client, input, options \\ []) do
request(client, "ListUploads", input, options)
end
@doc """
Immediately purchases offerings for an AWS account. Offerings renew with
the latest total purchased quantity for an offering, unless the renewal was
overridden. The API returns a `NotEligible` error if the user is not
permitted to invoke the operation. Please contact
[<EMAIL>](mailto:<EMAIL>)
if you believe that you should be able to invoke this operation.
"""
def purchase_offering(client, input, options \\ []) do
request(client, "PurchaseOffering", input, options)
end
@doc """
Explicitly sets the quantity of devices to renew for an offering, starting
from the `effectiveDate` of the next period. The API returns a
`NotEligible` error if the user is not permitted to invoke the operation.
Please contact
[<EMAIL>](mailto:<EMAIL>)
if you believe that you should be able to invoke this operation.
"""
def renew_offering(client, input, options \\ []) do
request(client, "RenewOffering", input, options)
end
@doc """
Schedules a run.
"""
def schedule_run(client, input, options \\ []) do
request(client, "ScheduleRun", input, options)
end
@doc """
Ends a specified remote access session.
"""
def stop_remote_access_session(client, input, options \\ []) do
request(client, "StopRemoteAccessSession", input, options)
end
@doc """
Initiates a stop request for the current test run. AWS Device Farm will
immediately stop the run on devices where tests have not started executing,
and you will not be billed for these devices. On devices where tests have
started executing, Setup Suite and Teardown Suite tests will run to
completion before stopping execution on those devices. You will be billed
for Setup, Teardown, and any tests that were in progress or already
completed.
"""
def stop_run(client, input, options \\ []) do
request(client, "StopRun", input, options)
end
@doc """
Modifies the name, description, and rules in a device pool given the
attributes and the pool ARN. Rule updates are all-or-nothing, meaning they
can only be updated as a whole (or not at all).
"""
def update_device_pool(client, input, options \\ []) do
request(client, "UpdateDevicePool", input, options)
end
@doc """
Modifies the specified project name, given the project ARN and a new name.
"""
def update_project(client, input, options \\ []) do
request(client, "UpdateProject", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "devicefarm"}
host = get_host("devicefarm", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "DeviceFarm_20150623.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/device_farm.ex
| 0.790652
| 0.417925
|
device_farm.ex
|
starcoder
|
defmodule GroupManager.Data.WorldClock do
require Record
require GroupManager.Data.LocalClock
require Chatter.NetID
alias GroupManager.Data.LocalClock
alias Chatter.NetID
Record.defrecord :world_clock,
time: []
@type t :: record( :world_clock,
time: list(LocalClock.t) )
@spec new() :: t
def new()
do
world_clock(time: [])
end
defmacro is_valid(data) do
case Macro.Env.in_guard?(__CALLER__) do
true ->
quote do
is_tuple(unquote(data)) and tuple_size(unquote(data)) == 2 and
:erlang.element(1, unquote(data)) == :world_clock and
# time
is_list(:erlang.element(2, unquote(data))) == true
end
false ->
quote bind_quoted: [result: data] do
is_tuple(result) and tuple_size(result) == 2 and
:erlang.element(1, result) == :world_clock and
# time
is_list(:erlang.element(2, result)) == true
end
end
end
defmacro is_empty(data) do
case Macro.Env.in_guard?(__CALLER__) do
true ->
quote do
# time
:erlang.element(2, unquote(data)) == []
end
false ->
quote do
result = unquote(data)
# time
:erlang.element(2, result) == []
end
end
end
@spec valid?(t) :: boolean
def valid?(data)
when is_valid(data)
do
true
end
def valid?(_), do: false
@spec empty?(t) :: boolean
def empty?(data)
when is_valid(data) and
is_empty(data)
do
true
end
def empty?(data)
when is_valid(data)
do
false
end
@spec time(t) :: list(LocalClock.t)
def time(clock)
when is_valid(clock)
do
world_clock(clock, :time)
end
@spec add(t, LocalClock.t) :: t
def add(clock, local_clock)
when is_valid(clock) and
LocalClock.is_valid(local_clock)
do
world_clock(time: LocalClock.merge(world_clock(clock, :time), local_clock))
end
@spec size(t) :: integer
def size(clock)
when is_valid(clock)
do
length(world_clock(clock, :time))
end
@spec get(t, NetID.t) :: LocalClock.t | nil
def get(clock, id)
when is_valid(clock) and
NetID.is_valid(id)
do
world_clock(clock, :time) |> Enum.find(fn(x) -> LocalClock.member(x) == id end)
end
@spec next(t, NetID.t) :: t
def next(clock, id)
when is_valid(clock) and
NetID.is_valid(id)
do
case get(clock, id) do
nil -> add(clock, LocalClock.new(id))
local_clock -> add(clock, LocalClock.next(local_clock))
end
end
@spec merge(t, t) :: t
def merge(lhs, rhs)
when is_valid(lhs) and
is_valid(rhs)
do
world_clock(time: LocalClock.merge(world_clock(lhs, :time),
world_clock(rhs, :time)))
end
@spec count(t, NetID.t) :: integer
def count(clock, id)
when is_valid(clock) and
NetID.is_valid(id)
do
List.foldl(world_clock(clock, :time), 0, fn(x, acc) ->
clock_id = LocalClock.member(x)
if( clock_id == id )
do
acc + 1
else
acc
end
end)
end
@spec count(t, LocalClock.t) :: integer
def count(clock, id)
when is_valid(clock) and
LocalClock.is_valid(id)
do
List.foldl(world_clock(clock, :time), 0, fn(x, acc) ->
if( x == id )
do
acc + 1
else
acc
end
end)
end
@spec extract_netids(t) :: list(NetID.t)
def extract_netids(clock)
when is_valid(clock)
do
Enum.map(world_clock(clock, :time), fn(x) -> LocalClock.member(x) end)
|> Enum.uniq
end
@spec encode_with(t, map) :: binary
def encode_with(clock, id_map)
when is_valid(clock) and
is_map(id_map)
do
world_clock(clock, :time) |> LocalClock.encode_list_with(id_map)
end
@spec decode_with(binary, map) :: {t, binary}
def decode_with(bin, id_map)
when is_binary(bin) and
byte_size(bin) > 0 and
is_map(id_map)
do
{elems, remaining} = LocalClock.decode_list_with(bin, id_map)
{world_clock(time: elems), remaining}
end
end
|
lib/group_manager/data/world_clock.ex
| 0.690455
| 0.499939
|
world_clock.ex
|
starcoder
|
defmodule Elasticlunr.Storage.Disk do
@moduledoc """
This storage provider writes data to the local disk of the running application.
```elixir
config :elasticlunr,
storage: Elasticlunr.Storage.Disk
config :elasticlunr, Elasticlunr.Storage.Disk,
directory: "/path/to/project/storage"
```
"""
use Elasticlunr.Storage
alias Elasticlunr.{DB, Deserializer, Index, Serializer}
require Logger
@data_file_ext "data"
@index_file_ext "index"
@extensions [@data_file_ext, @index_file_ext]
@impl true
def write(%Index{db: db, name: name} = index) do
directory = config(:directory, ".")
data = Serializer.serialize(index)
with %{data: data_file, index: index_file} <- filenames(directory, name),
:ok <- DB.to(db, file: data_file) do
write_serialized_index_to_file(index_file, data)
end
end
@impl true
def read(name) do
directory = config(:directory, ".")
%{data: data_file, index: index_file} = filenames(directory, name)
index =
File.stream!(index_file, ~w[compressed]a)
|> Deserializer.deserialize()
with %Index{db: db} <- index,
{:ok, db} <- DB.from(db, file: data_file) do
Index.update_documents_size(%{index | db: db})
else
false ->
Logger.info("[elasticlunr] unable to data for index #{index.name}")
index
end
end
@impl true
def load_all do
files()
|> Stream.filter(&String.ends_with?(&1, @index_file_ext))
|> Stream.map(fn file ->
name = without_ext(file, @index_file_ext)
read(name)
end)
end
@impl true
def delete(name) do
directory = config(:directory, ".")
%{data: data_file, index: index_file} = filenames(directory, name)
with :ok <- File.rm(index_file) do
File.rm(data_file)
end
end
@spec files() :: list(binary())
def files do
directory = config(:directory, ".")
extensions = Enum.map_join(@extensions, ",", & &1)
match = Path.join(directory, "*.{#{extensions}}")
Path.wildcard(match)
|> Enum.map(&Path.expand/1)
end
@spec write_serialized_index_to_file(binary(), Enum.t()) :: :ok
def write_serialized_index_to_file(path, data) do
data
|> Stream.into(File.stream!(path, ~w[compressed]a), &"#{&1}\n")
|> Stream.run()
end
defp filenames(directory, name) do
%{
index: Path.join(directory, "#{name}.#{@index_file_ext}"),
data: Path.join(directory, "#{name}.#{@data_file_ext}") |> String.to_charlist()
}
end
defp without_ext(file, ext), do: Path.basename(file, ".#{ext}")
end
|
lib/elasticlunr/storage/disk.ex
| 0.658966
| 0.675316
|
disk.ex
|
starcoder
|
defmodule Membrane.RTP.OutboundPacketTracker do
@moduledoc """
Tracks statistics of outband packets.
Besides tracking statistics, tracker can also serialize packet's header and payload stored inside an incoming buffer into
a a proper RTP packet.
"""
use Membrane.Filter
alias Membrane.{Buffer, RTP, Payload, Time}
def_input_pad :input,
caps: :any,
demand_unit: :buffers
def_output_pad :output,
caps: :any
def_options ssrc: [spec: RTP.ssrc_t()],
payload_type: [spec: RTP.payload_type_t()],
clock_rate: [spec: RTP.clock_rate_t()],
alignment: [
default: 1,
spec: pos_integer(),
description: """
Number of bytes that each packet should be aligned to.
Alignment is achieved by adding RTP padding.
"""
]
defmodule State do
@moduledoc false
use Bunch.Access
@type t :: %__MODULE__{
any_buffer_sent?: boolean(),
stats_acc: %{}
}
defstruct any_buffer_sent?: false,
stats_acc: %{
clock_rate: 0,
timestamp: 0,
rtp_timestamp: 0,
sender_packet_count: 0,
sender_octet_count: 0
}
end
@impl true
def handle_init(options) do
state = %State{} |> put_in([:stats_acc, :clock_rate], options.clock_rate)
{:ok, Map.merge(Map.from_struct(options), state)}
end
@impl true
def handle_demand(:output, size, :buffers, _ctx, state) do
{{:ok, demand: {:input, size}}, state}
end
@impl true
def handle_process(:input, %Buffer{} = buffer, _ctx, state) do
state = update_stats(buffer, state)
{rtp_metadata, metadata} = Map.pop(buffer.metadata, :rtp, %{})
header =
struct(RTP.Header, %{rtp_metadata | ssrc: state.ssrc, payload_type: state.payload_type})
payload =
RTP.Packet.serialize(%RTP.Packet{header: header, payload: buffer.payload},
align_to: state.alignment
)
buffer = %Buffer{payload: payload, metadata: metadata}
{{:ok, buffer: {:output, buffer}}, state}
end
@impl true
def handle_other(:send_stats, _ctx, state) do
stats = get_stats(state)
state = %{state | any_buffer_sent?: false}
{{:ok, notify: {:outband_stats, stats}}, state}
end
@spec get_stats(State.t()) :: map() | :no_stats
defp get_stats(%State{any_buffer_sent?: false}), do: :no_stats
defp get_stats(%State{stats_acc: stats}), do: stats
defp update_stats(%Buffer{payload: payload, metadata: metadata}, state) do
%{
sender_octet_count: octet_count,
sender_packet_count: packet_count
} = state.stats_acc
updated_stats = %{
clock_rate: state.stats_acc.clock_rate,
sender_octet_count: octet_count + Payload.size(payload),
sender_packet_count: packet_count + 1,
timestamp: Time.vm_time(),
rtp_timestamp: metadata.rtp.timestamp
}
Map.put(state, :stats_acc, updated_stats)
end
end
|
lib/membrane/rtp/outbound_packet_tracker.ex
| 0.868493
| 0.452052
|
outbound_packet_tracker.ex
|
starcoder
|
defmodule Confex do
@moduledoc File.read!("README.md")
alias Confex.ConfigSourceable, as: Source
@doc """
Gets an optional string from the source.
If the `key` does not exist in the source, returns `nil`.
## Examples
iex> source = Confex.Sources.Map.new(str: "value", int: 123)
iex> Confex.get_string(source, "str")
"value"
iex> Confex.get_string(source, "int")
"123"
iex> Confex.get_string(source, "not here")
nil
"""
def get_string(source, key) do
case Source.get(source, key) do
nil -> nil
value -> to_string(value)
end
end
@doc """
Fetches the value for a specific `key` as a string and returns it in a tuple.
If the `key` does not exist in the source, returns `:error`.
## Examples
iex> source = Confex.Sources.Map.new(str: "value", int: 123)
iex> Confex.fetch_string(source, "str")
{:ok, "value"}
iex> Confex.fetch_string(source, "int")
{:ok, "123"}
iex> Confex.fetch_string(source, "not here")
:error
"""
def fetch_string(source, key) do
case get_string(source, key) do
nil -> :error
str -> {:ok, str}
end
end
@doc """
Fetches the value for a specific `key` as a string.
If the `key` does not exist in the source, a `RuntimeError` is raised.
## Examples
iex> source = Confex.Sources.Map.new(str: "value", int: 123)
iex> Confex.fetch_string!(source, "str")
"value"
iex> Confex.fetch_string!(source, "int")
"123"
iex> Confex.fetch_string!(source, "not here")
** (RuntimeError) key "not here" not found in configuration
"""
def fetch_string!(source, key) do
case get_string(source, key) do
nil -> raise_not_found(key)
str -> str
end
end
@doc """
Gets an optional integer from the source.
If the `key` does not exist in the source or the value cannot be parsed as
an integer, returns `nil`.
## Examples
iex> source = Confex.Sources.Map.new(str: "123", int: 456, not_int: "garbage")
iex> Confex.get_int(source, "str")
123
iex> Confex.get_int(source, "int")
456
iex> Confex.get_int(source, "not here")
nil
iex> Confex.get_int(source, "not_int")
nil
"""
def get_int(source, key) do
case fetch_int(source, key) do
{:ok, int} -> int
_ -> nil
end
end
@doc """
Fetches the value for a specific `key` as an integer and returns it in a tuple.
If the `key` does not exist in the source, returns `:error`.
If the `key` exists but could not be parsed as an integer, returns
`{:error, :unparseable, value}`.
## Examples
iex> source = Confex.Sources.Map.new(str: "123", int: 456, not_int: "garbage")
iex> Confex.fetch_int(source, "str")
{:ok, 123}
iex> Confex.fetch_int(source, "int")
{:ok, 456}
iex> Confex.fetch_int(source, "not here")
:error
iex> Confex.fetch_int(source, "not_int")
{:error, :unparseable, "garbage"}
"""
def fetch_int(source, key) do
value = Source.get(source, key)
cond do
is_nil(value) -> :error
is_integer(value) -> {:ok, value}
is_binary(value) ->
try do
{int, ""} = Integer.parse(value)
{:ok, int}
rescue
_ -> {:error, :unparseable, value}
end
end
end
@doc """
Fetches the value for a specific `key` as an integer.
If the `key` does not exist in the source or the value could not be parsed
as an integer, a `RuntimeError` is raised.
## Examples
iex> source = Confex.Sources.Map.new(str: "123", int: 456, not_int: "garbage")
iex> Confex.fetch_int!(source, "str")
123
iex> Confex.fetch_int!(source, "int")
456
iex> Confex.fetch_int!(source, "not here")
** (RuntimeError) key "not here" not found in configuration
iex> source = Confex.Sources.Map.new(str: "123", int: 456, not_int: "garbage")
iex> Confex.fetch_int!(source, "not_int")
** (RuntimeError) key "not_int" with value \"garbage\" is not an integer
"""
def fetch_int!(source, key) do
case fetch_int(source, key) do
{:ok, int} ->
int
:error ->
raise_not_found(key)
{:error, :unparseable, value} ->
raise "key \"#{key}\" with value \"#{value}\" is not an integer"
end
end
defp raise_not_found(key) do
raise "key \"#{key}\" not found in configuration"
end
end
|
lib/confex.ex
| 0.842004
| 0.404919
|
confex.ex
|
starcoder
|
defmodule TextDelta.Operation do
@moduledoc """
Operations represent a smallest possible change applicable to a text.
In case of text, there are exactly 3 possible operations we might want to
perform:
- `t:TextDelta.Operation.insert/0`: insert a new piece of text or an embedded
element
- `t:TextDelta.Operation.retain/0`: preserve given number of characters in
sequence
- `t:TextDelta.Operation.delete/0`: delete given number of characters in
sequence
`insert` and `retain` operations can also have optional
`t:TextDelta.Attributes.t/0` attached to them. This is how Delta manages rich
text formatting without breaking the [Operational Transformation][ot]
paradigm.
[ot]: https://en.wikipedia.org/wiki/Operational_transformation
"""
alias TextDelta.{Attributes, ConfigurableString}
@typedoc """
Insert operation represents an intention to add a text or an embedded element
to a text state. Text additions are represented with binary strings and
embedded elements are represented with either an integer or an object.
Insert also allows us to attach attributes to the element being inserted.
"""
@type insert ::
%{insert: element}
| %{insert: element, attributes: Attributes.t()}
@typedoc """
Retain operation represents an intention to keep a sequence of characters
unchanged in the text. It is always a number and it is always positive.
In addition to indicating preservation of existing text, retain also allows us
to change formatting of retained text or element via optional attributes.
"""
@type retain ::
%{retain: non_neg_integer}
| %{retain: non_neg_integer, attributes: Attributes.t()}
@typedoc """
Delete operation represents an intention to delete a sequence of characters
from the text. It is always a number and it is always positive.
"""
@type delete :: %{delete: non_neg_integer}
@typedoc """
An operation. Either `insert`, `retain` or `delete`.
"""
@type t :: insert | retain | delete
@typedoc """
Atom representing type of operation.
"""
@type type :: :insert | :retain | :delete
@typedoc """
The result of comparison operation.
"""
@type comparison :: :eq | :gt | :lt
@typedoc """
An insertable rich text element. Either a piece of text, a number or an embed.
"""
@type element :: String.t() | integer | map
@doc """
Creates a new insert operation.
Attributes are optional and are ignored if empty map or `nil` is provided.
## Examples
To indicate that we need to insert a text "hello" into the text, we can
use following insert:
iex> TextDelta.Operation.insert("hello")
%{insert: "hello"}
In addition, we can indicate that "hello" should be inserted with specific
attributes:
iex> TextDelta.Operation.insert("hello", %{bold: true, color: "magenta"})
%{insert: "hello", attributes: %{bold: true, color: "magenta"}}
We can also insert non-text objects, such as an image:
iex> TextDelta.Operation.insert(%{img: "me.png"}, %{alt: "My photo"})
%{insert: %{img: "me.png"}, attributes: %{alt: "My photo"}}
"""
@spec insert(element, Attributes.t()) :: insert
def insert(el, attrs \\ %{})
def insert(el, nil), do: %{insert: el}
def insert(el, attrs) when map_size(attrs) == 0, do: %{insert: el}
def insert(el, attrs), do: %{insert: el, attributes: attrs}
@doc """
Creates a new retain operation.
Attributes are optional and are ignored if empty map or `nil` is provided.
## Examples
To keep 5 next characters inside the text, we can use the following retain:
iex> TextDelta.Operation.retain(5)
%{retain: 5}
To make those exact 5 characters bold, while keeping them, we can use
attributes:
iex> TextDelta.Operation.retain(5, %{bold: true})
%{retain: 5, attributes: %{bold: true}}
"""
@spec retain(non_neg_integer, Attributes.t()) :: retain
def retain(len, attrs \\ %{})
def retain(len, nil), do: %{retain: len}
def retain(len, attrs) when map_size(attrs) == 0, do: %{retain: len}
def retain(len, attrs), do: %{retain: len, attributes: attrs}
@doc """
Creates a new delete operation.
## Example
To delete 3 next characters from the text, we can create a following
operation:
iex> TextDelta.Operation.delete(3)
%{delete: 3}
"""
@spec delete(non_neg_integer) :: delete
def delete(len)
def delete(len), do: %{delete: len}
@doc """
Returns atom representing type of the given operation.
## Example
iex> TextDelta.Operation.type(%{retain: 5, attributes: %{bold: true}})
:retain
"""
@spec type(t) :: type
def type(op)
def type(%{insert: _}), do: :insert
def type(%{retain: _}), do: :retain
def type(%{delete: _}), do: :delete
@doc """
Returns length of text affected by a given operation.
Length for `insert` operations is calculated by counting the length of text
itself being inserted, length for `retain` or `delete` operations is a length
of sequence itself. Attributes have no effect over the length.
## Examples
For text inserts it is a length of text itself:
iex> TextDelta.Operation.length(%{insert: "hello!"})
6
For embed inserts, however, length is always 1:
iex> TextDelta.Operation.length(%{insert: 3})
1
For retain and deletes, the number itself is the length:
iex> TextDelta.Operation.length(%{retain: 4})
4
"""
@spec length(t) :: non_neg_integer
def length(op)
def length(%{insert: el}) when not is_bitstring(el), do: 1
def length(%{insert: str}), do: ConfigurableString.length(str)
def length(%{retain: len}), do: len
def length(%{delete: len}), do: len
@doc """
Compares the length of two operations.
## Example
iex> TextDelta.Operation.compare(%{insert: "hello!"}, %{delete: 3})
:gt
"""
@spec compare(t, t) :: comparison
def compare(op_a, op_b) do
len_a = __MODULE__.length(op_a)
len_b = __MODULE__.length(op_b)
cond do
len_a > len_b -> :gt
len_a < len_b -> :lt
true -> :eq
end
end
@doc """
Splits operations into two halves around the given index.
Text `insert` is split via slicing the text itself, `retain` or `delete` is
split by subtracting the sequence number. Attributes are preserved during
splitting. This is mostly used for normalisation of deltas during iteration.
## Examples
Text `inserts` are split via slicing the text itself:
iex> TextDelta.Operation.slice(%{insert: "hello"}, 3)
{%{insert: "hel"}, %{insert: "lo"}}
`retain` and `delete` are split by subtracting the sequence number:
iex> TextDelta.Operation.slice(%{retain: 5}, 2)
{%{retain: 2}, %{retain: 3}}
"""
@spec slice(t, non_neg_integer) :: {t, t}
def slice(op, idx)
def slice(%{insert: str} = op, idx) when is_bitstring(str) do
{part_one, part_two} = ConfigurableString.split_at(str, idx)
{Map.put(op, :insert, part_one), Map.put(op, :insert, part_two)}
end
def slice(%{insert: _} = op, _) do
{op, %{insert: ""}}
end
def slice(%{retain: op_len} = op, idx) do
{Map.put(op, :retain, idx), Map.put(op, :retain, op_len - idx)}
end
def slice(%{delete: op_len} = op, idx) do
{Map.put(op, :delete, idx), Map.put(op, :delete, op_len - idx)}
end
@doc """
Attempts to compact two given operations into one.
If successful, will return a list with just a single, compacted operation. In
any other case both operations will be returned back unchanged.
Compacting works by combining same operations with the same attributes
together. Easiest way to think about this function is that it produces an
exact opposite effect of `TextDelta.Operation.slice/2`.
Text `insert` is compacted by concatenating strings, `retain` or `delete` is
compacted by adding the sequence numbers. Only operations with the same
attribute set are compacted. This is mostly used to keep deltas short and
canonical.
## Examples
Text inserts are compacted into a single insert:
iex> TextDelta.Operation.compact(%{insert: "hel"}, %{insert: "lo"})
[%{insert: "hello"}]
Retains and deletes are compacted by adding their sequence numbers:
iex> TextDelta.Operation.compact(%{retain: 2}, %{retain: 3})
[%{retain: 5}]
"""
@spec compact(t, t) :: [t]
def compact(op_a, op_b)
def compact(%{retain: len_a, attributes: attrs_a}, %{
retain: len_b,
attributes: attrs_b
})
when attrs_a == attrs_b do
[retain(len_a + len_b, attrs_a)]
end
def compact(%{retain: len_a} = a, %{retain: len_b} = b)
when map_size(a) == 1 and map_size(b) == 1 do
[retain(len_a + len_b)]
end
def compact(%{insert: el_a} = op_a, %{insert: _} = op_b)
when not is_bitstring(el_a) do
[op_a, op_b]
end
def compact(%{insert: _} = op_a, %{insert: el_b} = op_b)
when not is_bitstring(el_b) do
[op_a, op_b]
end
def compact(%{insert: str_a, attributes: attrs_a}, %{
insert: str_b,
attributes: attrs_b
})
when attrs_a == attrs_b do
[insert(str_a <> str_b, attrs_a)]
end
def compact(%{insert: str_a} = op_a, %{insert: str_b} = op_b)
when map_size(op_a) == 1 and map_size(op_b) == 1 do
[insert(str_a <> str_b)]
end
def compact(%{delete: len_a}, %{delete: len_b}) do
[delete(len_a + len_b)]
end
def compact(op_a, op_b), do: [op_a, op_b]
@doc """
Checks if given operation is trimmable.
Technically only `retain` operations are trimmable, but the creator of this
library didn't feel comfortable exposing that knowledge outside of this
module.
## Example
iex> TextDelta.Operation.trimmable?(%{retain: 3})
true
"""
@spec trimmable?(t) :: boolean
def trimmable?(op) do
Map.has_key?(op, :retain) and !Map.has_key?(op, :attributes)
end
end
|
lib/text_delta/operation.ex
| 0.947466
| 0.761538
|
operation.ex
|
starcoder
|
defmodule Draconic.Program do
@moduledoc """
Draconic is a DSL for building command line programs. It allows you to define your
CLI via simplistic macro functions that get compiled into simple modules used at
run time to execute the desired command users enter. It's built on top of the
built in `OptionParser` so it's flag definitions are a remnant of those supported
by `OptionParser`. Although the goal was to unify aspects of an option flag as
singular unit.
With Draconic commands are defined as their own modules, and as behaviors you
just implement the run method that will be invoked if the command is given.
Associating these commands to a program is a simple call to a macro passing in the
module defining the command.
Commands in Draconic function like a tree, supporting nested (or "sub") commands
with their own set of flags. Flags are parsed from top to bottom, following the
path of commands, so global flags are parsed, the flags for the first command,
the second and down to the nth. So the lowest command executed (the only one the
`run` method will be called for) has access to all flags defined before it.
#### Examples
Define a program.
defmodule CSVParser.CLI do
use Draconic.Program
alias CSVParser.CLI.Commands
name "awesome"
command Commands.Mapper
command Commands.Lister
end
Configure your escipt program.
defmodule CSVParser.MixProject do
# ...
def escript do
[
main_module: CSVParser.CLI
]
end
# ...
end
Then just execute your program!
csv_parser list --input example.csv
"""
alias __MODULE__
alias Draconic.Command
alias Draconic.Flag
@typedoc "A list of string values passed into the program."
@type argv() :: [String.t()]
@typedoc "The return value from CLI, `0` (or `nil`) or a non-zero error code."
@type status_code() :: integer() | nil
@typedoc """
Contains the definition of a program, from things like it's description to a
map of commands available to be executed and even what default command should
be executed if none is given. This struct is not only used to execute a program
but it's also provided to a HelpRenderer which can then render help pages however
it decides to do so.
"""
@type t() :: %Program{
module: module(),
name: String.t(),
usage: String.t() | nil,
description: String.t(),
commands: [Command.t()],
flags: Flag.flag_definition(),
help_renderer: module(),
help_command: true
}
@doc false
defstruct module: nil,
name: "",
version: "",
commands: [],
usage: nil,
description: "",
flags: %{},
default_command: "help",
help_renderer: Draconic.BasicHelp,
help_command: true
@doc false
defmacro __using__(_) do
quote do
import Draconic.Program
@name "PROGRAM"
@commands []
@usage nil
@description ""
@flags %{}
@help_renderer Draconic.BasicHelp
@help_flag_name {:help, :h}
@help_command true
@default_command "help"
@version "1.0"
@before_compile Draconic.Program
end
end
@doc "Set the name of the program, used in auto-usage generation."
@spec name(String.t()) :: Macro.t()
defmacro name(name) do
quote do
@name unquote(name)
end
end
@doc "Set the programs description, used in help rendering."
@spec desc(String.t()) :: Macro.t()
defmacro desc(description) do
quote do
@description unquote(description)
end
end
@doc """
Explicitly set a usage string. This may or may not be considered by
the help renderer.
"""
@spec usage(String.t()) :: Macro.t()
defmacro usage(usage) do
quote do
@usage unquote(usage)
end
end
@doc "Define a command to use when no other command is given."
@spec default_command(String.t()) :: Macro.t()
defmacro default_command(cmd) do
quote do
@default_command unquote(cmd)
end
end
@doc "Define a version for the program."
@spec version(String.t()) :: Macro.t()
defmacro version(vsn) do
quote do
@version unquote(vsn)
end
end
@doc "Add a command module to the program."
@spec command(module()) :: Macro.t()
defmacro command(mod) do
quote do
@commands [unquote(mod) | @commands]
end
end
@doc """
Creates a string flag with the given name, description and default value associated
the currently defined program.
"""
@spec string_flag(Flag.flag_name(), String.t(), Flag.flag_type()) :: Macro.t()
defmacro string_flag(name, description, default) do
quote do
flag(unquote(name), :string, unquote(description), unquote(default))
end
end
@doc """
Creates a integer flag with the given name, description and default value associated
the currently defined program.
"""
@spec int_flag(Flag.flag_name(), String.t(), Flag.flag_type()) :: Macro.t()
defmacro int_flag(name, description, default \\ nil) do
quote do
flag(unquote(name), :integer, unquote(description), unquote(default))
end
end
@doc """
Creates a float flag with the given name, description and default value associated
the currently defined program.
"""
@spec float_flag(Flag.flag_name(), String.t(), Flag.flag_type()) :: Macro.t()
defmacro float_flag(name, description, default \\ nil) do
quote do
flag(unquote(name), :float, unquote(description), unquote(default))
end
end
@doc """
Creates a bool flag with the given name, description and default value associated
the currently defined program.
"""
@spec bool_flag(Flag.flag_name(), String.t(), Flag.flag_type()) :: Macro.t()
defmacro bool_flag(name, description, default \\ nil) do
quote do
flag(unquote(name), :boolean, unquote(description), unquote(default))
end
end
@doc """
Creates a flag of the given type, with the given name, description and default value
(if provided) and associates it to the root program.
"""
@spec flag(Flag.flag_name(), Flag.flag_kind(), String.t(), Flag.flag_type()) :: Macro.t()
defmacro flag(name, type, description, default \\ nil) do
{fname, falias} =
case name do
{n, a} -> {n, a}
x -> {x, nil}
end
quote do
@flags Map.put(@flags, unquote(fname), %Flag{
name: unquote(fname),
alias: unquote(falias),
type: unquote(type),
description: unquote(description),
default: unquote(default)
})
end
end
@doc "Assign a help renderer module for this program, defaults to Draconic.BasicHelp"
@spec help_renderer(module()) :: Macrot.t()
defmacro help_renderer(mod) do
quote do
@help_renderer unquote(mod)
end
end
@doc "Provide a name (and potential alias) for the help command, defaults to `{:help, :h}`"
@spec help_flag(Flag.flag_name()) :: Macrot.t()
defmacro help_flag(name) do
quote do
@help_flag_name unquote(name)
end
end
@doc """
Turn on, or off, the help command. If this value is `true` (default value) then running
the program with the command "help" (or the value you set for it) will render the help
page. Even if this is turned off, you can still use "help" (or a value you give for it)
as the default command.
"""
@spec provide_help_command(boolean()) :: Macro.t()
defmacro provide_help_command(state) do
quote do
@help_command unquote(state)
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
@doc "Return a list of command specs associated with this program."
@spec commands() :: [Draconic.Command.t()]
def commands do
@commands
|> Enum.map(fn mod -> {mod.name(), mod.command_spec()} end)
|> Enum.into(%{})
end
@doc "Return the long name of the help flag."
@spec help_flag_name() :: atom()
def help_flag_name do
case @help_flag_name do
{name, _} -> name
x -> x
end
end
@doc "Return a list of flags associated with this program."
@spec flags() :: [Draconic.Flag.t()]
def flags do
{help_name, help_alias} = @help_flag_name
help = %Flag{
name: help_name,
alias: help_alias,
type: :boolean,
description: "Print this page, providing useful information about the program.",
default: false
}
Map.put(@flags, help_name, help)
end
@doc "Return the name of teh default command to run if no other command is given."
@spec default_command() :: String.t()
def default_command, do: @default_command
@doc "Return the module that will render the help page for this commaand."
@spec help_renderer() :: module()
def help_renderer, do: @help_renderer
@doc "Return the name of the help command for this program."
@spec help_command() :: String.t()
def help_command, do: @help_command
@doc "Return the usage of the program, used for help rendering purposes."
@spec usage() :: String.t()
def usage, do: @usage
@doc "Return the name of the program."
@spec name() :: String.t()
def name, do: @name
@doc "Return the long description of the program."
@spec description() :: String.t()
def description, do: @description
@doc "Return the version of the CLI."
@spec version() :: String.t()
def version, do: @version
@doc "Return a detailed set of information defining the program."
@spec program_spec() :: Draconic.Program.t()
def program_spec do
%Program{
module: __MODULE__,
name: name(),
version: version(),
description: description(),
usage: usage(),
commands: commands(),
flags: flags(),
default_command: default_command(),
help_renderer: help_renderer(),
help_command: help_command()
}
end
@doc "Entry point for the program, use this as the target of escripts `main_module`."
@spec main(Draconic.Program.argv()) :: Draconic.Program.status_code()
def main(args) do
Draconic.Executor.execute(program_spec(), args)
end
end
end
end
|
lib/draconic/program.ex
| 0.836621
| 0.464112
|
program.ex
|
starcoder
|
defmodule SPARQL.Query do
@moduledoc """
A structure for SPARQL queries.
"""
defstruct [
:base,
:prefixes,
:form,
:expr,
:query_string, # This might be only temporary until we have a functionally complete SPARQL language decoder and encoder
]
alias __MODULE__
@type t :: module
@type forms :: :select | :construct | :describe | :ask
@doc """
Creates a `SPARQL.Query` struct.
See `translate/2` for more information about default prefixes, all of which
applies also to this function.
"""
def new(query, options \\ [])
def new(%Query{} = query, _options), do: query
def new(query, options) when is_binary(query), do: translate(query, options)
@doc """
Creates a `SPARQL.Query` struct from a SPARQL language string.
By default the configured `RDF.default_prefixes/0` will be automatically
defined for the query, so that you can use these prefixes without having them
defined manually in your query.
You can overwrite these default prefixes and define another set of prefixes
with the `default_prefixes` option.
If you don't want to use default prefixes for the given query you
can pass `nil` or an empty map for the `default_prefixes` option.
If you don't want to use default prefixes at all, just don't configure any and
set the `rdf` configuration flag `use_standard_prefixes` to `false`.
See the [API documentation of RDF.ex](https://hexdocs.pm/rdf/RDF.html) for
for more information about `RDF.default_prefixes/0` and `RDF.standard_prefixes/0`
and how to configure them.
"""
def translate(string, options \\ []) do
with prefixes = (
options
|> Keyword.get(:default_prefixes, :default_prefixes)
|> prefixes()
|> encode_prefixes()
),
{:ok, query} <-
SPARQL.Language.Decoder.decode(prefixes <> "\n" <> string, options)
do
query
end
end
defp prefixes(nil), do: RDF.PrefixMap.new()
defp prefixes(:default_prefixes), do: RDF.default_prefixes()
defp prefixes(prefixes), do: RDF.PrefixMap.new(prefixes)
defp encode_prefixes(prefixes) do
prefixes
|> Stream.map(fn {prefix, iri} ->
"PREFIX #{to_string(prefix)}: <#{to_string(iri)}>"
end)
|> Enum.join("\n")
end
defimpl String.Chars do
def to_string(query) do
query.query_string
end
end
end
|
lib/sparql/query/query.ex
| 0.758153
| 0.563018
|
query.ex
|
starcoder
|
defmodule EnumX do
@moduledoc ~S"""
Some enumeration extensions.
"""
@doc ~S"""
Reduces the enumerable until `fun` returns `{:error, reason}`.
The return value for `fun` is expected to be
* `{:ok, acc}` to continue the reduction with `acc` as the new
accumulator or
* `{:error, acc}` to halt the reduction and return `acc` as the return
value of this function
## Examples
```elixir
iex> EnumX.reduce_while(1..100, 0, fn x, acc ->
...> if x < 3, do: {:ok, acc + x}, else: {:error, acc}
...> end)
{:error, 3}
```
"""
@spec reduce_while(
Enum.t(),
any(),
(Enum.element(), any() -> {:ok, any()} | {:error, any()})
) :: {:ok, any} | {:error, any}
def reduce_while(enumerable, acc, fun) do
enumerable
|> Enumerable.reduce(
{:cont, {:ok, acc}},
fn e, {:ok, acc} ->
case fun.(e, acc) do
acc = {:ok, _} -> {:cont, acc}
err = {:error, _} -> {:halt, err}
end
end
)
|> elem(1)
end
@doc ~S"""
Returns a list where each item is the result of invoking
`fun` on each corresponding item of `enumerable`.
For maps, the function expects a key-value tuple.
## Examples
```elixir
iex> EnumX.map([1, 2, 3], fn x -> {:ok, x * 2} end)
{:ok, [2, 4, 6]}
iex> EnumX.map([a: 1, b: 2], fn {k, v} -> {:ok, {k, -v}} end)
{:ok, [a: -1, b: -2]}
```
"""
@spec map(Enum.t(), (Enum.element() -> {:ok, any()} | {:error, any})) ::
{:ok, list()} | {:error, any}
def map(enumerable, fun) do
with {:ok, enum} <-
reduce_while(enumerable, [], fn elem, acc ->
with({:ok, n} <- fun.(elem), do: {:ok, [n | acc]})
end) do
{:ok, :lists.reverse(enum)}
end
end
@doc ~S"""
Invokes the given `fun` for each item in the `enumerable`.
Returns `:ok` if all calls return `:ok`, returns `{:error, term}` if any fail.
## Examples
```elixir
iex> EnumX.each([1, 2, 3], fn x -> IO.puts(to_string(x)) end)
#=> "1"
#=> "2"
#=> "3"
:ok
```
Will halt on first error:
```elixir
iex> EnumX.each([1, 2, 3], fn x -> if x != 2, do: IO.puts(to_string(x)), else: {:error, :is_two} end)
#=> "1"
{:error, :is_two}
```
"""
@spec each(Enum.t(), (Enum.element() -> :ok | {:error, term})) :: :ok | {:error, term}
def each(enumerable, fun) do
enumerable
|> Enumerable.reduce(
{:cont, :ok},
fn e, :ok ->
case fun.(e) do
:ok -> {:cont, :ok}
err = {:error, _} -> {:halt, err}
end
end
)
|> elem(1)
end
end
|
lib/common_x/enum_x.ex
| 0.899945
| 0.893216
|
enum_x.ex
|
starcoder
|
defmodule Gettext.Backend do
@moduledoc """
Behaviour that defines the macros that a Gettext backend has to implement.
"""
@doc """
Default handling for missing bindings.
This function is called when there are missing bindings in a translation. It
takes a `Gettext.MissingBindingsError` struct and the translation with the
wrong bindings left as is with the `%{}` syntax.
For example, if something like this is called:
MyApp.Gettext.gettext("Hello %{name}, your favorite color is %{color}", name: "Jane", color: "blue")
and our `it/LC_MESSAGES/default.po` looks like this:
msgid "Hello %{name}, your favorite color is %{color}"
msgstr "Ciao %{name}, il tuo colore preferito è %{colour}" # (typo)
then Gettext will call:
MyApp.Gettext.handle_missing_bindings(exception, "Ciao Jane, il tuo colore preferito è %{colour}")
where `exception` is a struct that looks like this:
%Gettext.MissingBindingsError{
backend: MyApp.Gettext,
domain: "default",
locale: "it",
msgid: "Ciao %{name}, il tuo colore preferito è %{colour}",
bindings: [:colour],
}
The return value of the `c:handle_missing_bindings/2` callback is used as the
translated string that the translation macros and functions return.
The default implementation for this function uses `Logger.error/1` to warn
about the missing binding and returns the translated message with the
incomplete bindings.
This function can be overridden. For example, to raise when there are missing
bindings:
def handle_missing_bindings(exception, _incomplete) do
raise exception
end
"""
@callback handle_missing_bindings(Gettext.MissingBindingsError.t(), binary) ::
binary | no_return
@doc """
Default handling for translations with a missing translation.
When a Gettext function/macro is called with a string to translate into a locale but that
locale doesn't provide a translation for that string, this callback is invoked. `msgid` is the
string that Gettext tried to translate.
This function should return `{:ok, translated}` if a translation can be fetched or constructed
for the given string, or `{:default, msgid}` otherwise.
"""
@callback handle_missing_translation(
Gettext.locale(),
domain :: String.t(),
msgid :: String.t(),
bindings :: map()
) :: {:ok | :default, String.t()}
@doc """
Default handling for plural translations with a missing translation.
Same as `c:handle_missing_translation/4`, but for plural translations. In this case, `n` is
the number used for pluralizing the translated string.
"""
@callback handle_missing_plural_translation(
Gettext.locale(),
domain :: String.t(),
msgid :: String.t(),
msgid_plural :: String.t(),
n :: non_neg_integer(),
bindings :: map()
) :: {:ok | :default, String.t()}
@doc """
Translates the given `msgid` with a given context (`msgctxt`) in the given `domain`.
`bindings` is a map of bindings to support interpolation.
See also `Gettext.dpgettext/5`.
"""
@macrocallback dpgettext(
domain :: Macro.t(),
msgctxt :: String.t(),
msgid :: String.t(),
bindings :: Macro.t()
) :: Macro.t()
@doc """
Same as `dpgettext(domain, msgctxt, msgid, %{})`.
See also `Gettext.dpgettext/5`.
"""
@macrocallback dpgettext(domain :: Macro.t(), msgctxt :: String.t(), msgid :: String.t()) ::
Macro.t()
@doc """
Translates the given `msgid` in the given `domain`.
`bindings` is a map of bindings to support interpolation.
See also `Gettext.dgettext/4`.
"""
@macrocallback dgettext(domain :: Macro.t(), msgid :: String.t(), bindings :: Macro.t()) ::
Macro.t()
@doc """
Same as `dgettext(domain, msgid, %{})`.
See also `Gettext.dgettext/4`.
"""
@macrocallback dgettext(domain :: Macro.t(), msgid :: String.t()) :: Macro.t()
@doc """
Translates the given `msgid` with the given context (`msgctxt`).
`bindings` is a map of bindings to support interpolation.
See also `Gettext.pgettext/4`.
"""
@macrocallback pgettext(msgctxt :: String.t(), msgid :: String.t(), bindings :: Macro.t()) ::
Macro.t()
@doc """
Same as `pgettext(msgctxt, msgid, %{})`.
See also `Gettext.pgettext/4`.
"""
@macrocallback pgettext(msgctxt :: String.t(), msgid :: String.t()) :: Macro.t()
@doc """
Same as `dgettext("default", msgid, %{})`, but will use a per-backend
configured default domain if provided.
See also `Gettext.gettext/3`.
"""
@macrocallback gettext(msgid :: String.t(), bindings :: Macro.t()) :: Macro.t()
@doc """
Same as `gettext(msgid, %{})`.
See also `Gettext.gettext/3`.
"""
@macrocallback gettext(msgid :: String.t()) :: Macro.t()
@doc """
Translates the given plural translation (`msgid` + `msgid_plural`) with the given context (`msgctxt`)
in the given `domain`.
`n` is an integer used to determine how to pluralize the
translation. `bindings` is a map of bindings to support interpolation.
See also `Gettext.dpngettext/7`.
"""
@macrocallback dpngettext(
domain :: Macro.t(),
msgctxt :: String.t(),
msgid :: String.t(),
msgid_plural :: String.t(),
n :: Macro.t(),
bindings :: Macro.t()
) :: Macro.t()
@doc """
Same as `dpngettext(domain, msgctxt, msgid, msgid_plural, n, %{})`.
See also `Gettext.dpngettext/7`.
"""
@macrocallback dpngettext(
domain :: Macro.t(),
msgctxt :: String.t(),
msgid :: String.t(),
msgid_plural :: String.t(),
n :: Macro.t()
) :: Macro.t()
@doc """
Translates the given plural translation (`msgid` + `msgid_plural`) in the
given `domain`.
`n` is an integer used to determine how to pluralize the
translation. `bindings` is a map of bindings to support interpolation.
See also `Gettext.dngettext/6`.
"""
@macrocallback dngettext(
domain :: Macro.t(),
msgid :: String.t(),
msgid_plural :: String.t(),
n :: Macro.t(),
bindings :: Macro.t()
) :: Macro.t()
@doc """
Same as `dngettext(domain, msgid, msgid_plural, n, %{})`.
See also `Gettext.dngettext/6`.
"""
@macrocallback dngettext(
domain :: Macro.t(),
msgid :: String.t(),
msgid_plural :: String.t(),
n :: Macro.t()
) :: Macro.t()
@doc """
Translates the given plural translation (`msgid` + `msgid_plural`) with the given context (`msgctxt`).
`n` is an integer used to determine how to pluralize the
translation. `bindings` is a map of bindings to support interpolation.
See also `Gettext.pngettext/6`.
"""
@macrocallback pngettext(
msgctxt :: String.t(),
msgid :: String.t(),
msgid_plural :: String.t(),
n :: Macro.t(),
bindings :: Macro.t()
) :: Macro.t()
@doc """
Same as `pngettext(msgctxt, msgid, msgid_plural, n, %{})`.
See also `Gettext.pngettext/6`.
"""
@macrocallback pngettext(
msgctxt :: String.t(),
msgid :: String.t(),
msgid_plural :: String.t(),
n :: Macro.t()
) :: Macro.t()
@doc """
Same as `dngettext("default", msgid, msgid_plural, n, bindings)`, but will
use a per-backend configured default domain if provided.
See also `Gettext.ngettext/5`.
"""
@macrocallback ngettext(
msgid :: String.t(),
msgid_plural :: String.t(),
n :: Macro.t(),
bindings :: Macro.t()
) :: Macro.t()
@doc """
Same as `ngettext(msgid, msgid_plural, n, %{})`.
See also `Gettext.ngettext/5`.
"""
@macrocallback ngettext(msgid :: String.t(), msgid_plural :: String.t(), n :: Macro.t()) ::
Macro.t()
@doc """
Marks the given translation for extraction and returns it unchanged.
This macro can be used to mark a translation for extraction when `mix
gettext.extract` is run. The return value is the given string, so that this
macro can be used seamlessly in place of the string to extract.
## Examples
MyApp.Gettext.dgettext_noop("errors", "Error found!")
#=> "Error found!"
"""
@macrocallback dgettext_noop(domain :: String.t(), msgid :: String.t()) :: Macro.t()
@doc """
Same as `dgettext_noop("default", msgid)`.
"""
@macrocallback gettext_noop(msgid :: String.t()) :: Macro.t()
@doc """
Marks the given translation for extraction and returns
`{msgid, msgid_plural}`.
This macro can be used to mark a translation for extraction when `mix
gettext.extract` is run. The return value of this macro is `{msgid,
msgid_plural}`.
## Examples
my_fun = fn {msgid, msgid_plural} ->
# do something with msgid and msgid_plural
end
my_fun.(MyApp.Gettext.dngettext_noop("errors", "One error", "%{count} errors"))
"""
@macrocallback dngettext_noop(
domain :: Macro.t(),
msgid :: String.t(),
msgid_plural :: String.t()
) :: Macro.t()
@doc """
Same as `dngettext_noop("default", msgid, mgsid_plural)`, but will use a
per-backend configured default domain if provided.
"""
@macrocallback ngettext_noop(msgid :: String.t(), msgid_plural :: String.t()) :: Macro.t()
@doc """
Stores an "extracted comment" for the next translation.
This macro can be used to add comments (Gettext refers to such
comments as *extracted comments*) to the next translation that will
be extracted. Extracted comments will be prefixed with `#.` in POT
files.
Calling this function multiple times will accumulate the comments;
when another Gettext macro (such as `c:gettext/2`) is called,
the comments will be extracted and attached to that translation, and
they will be flushed so as to start again.
This macro always returns `:ok`.
## Examples
MyApp.Gettext.gettext_comment("The next translation is awesome")
MyApp.Gettext.gettext_comment("Another comment for the next translation")
MyApp.Gettext.gettext("The awesome translation")
"""
@macrocallback gettext_comment(comment :: String.t()) :: :ok
end
|
lib/gettext/backend.ex
| 0.904328
| 0.418756
|
backend.ex
|
starcoder
|
defmodule Sourceror do
@external_resource "README.md"
@moduledoc @external_resource
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
alias Sourceror.TraversalState
@line_fields ~w[closing do end end_of_expression]a
# @start_fields ~w[line do]a
@end_fields ~w[end closing end_of_expression]a
@type comment :: %{
line: integer,
previous_eol_count: integer,
next_eol_count: integer,
text: String.t()
}
@type position :: keyword
@type range :: %{
start: position,
end: position
}
@type patch :: %{
optional(:preserve_indentation) => boolean,
range: range,
change: String.t() | (String.t() -> String.t())
}
@type traversal_function :: (Macro.t(), TraversalState.t() -> {Macro.t(), TraversalState.t()})
@code_module (if Version.match?(System.version(), "~> 1.13.0") do
Code
else
Sourceror.Code
end)
@doc """
A wrapper around `Code.string_to_quoted_with_comments!/2` for compatibility
with pre 1.13 Elixir versions.
"""
defmacro string_to_quoted!(string, opts) do
quote bind_quoted: [code_module: @code_module, string: string, opts: opts] do
code_module.string_to_quoted_with_comments!(string, opts)
end
end
@doc """
A wrapper around `Code.string_to_quoted_with_comments/2` for compatibility
with pre 1.13 Elixir versions.
"""
defmacro string_to_quoted(string, opts) do
quote bind_quoted: [code_module: @code_module, string: string, opts: opts] do
code_module.string_to_quoted_with_comments(string, opts)
end
end
@doc """
A wrapper around `Code.quoted_to_algebra/2` for compatibility with pre 1.13
Elixir versions.
"""
defmacro quoted_to_algebra(quoted, opts) do
quote bind_quoted: [code_module: @code_module, quoted: quoted, opts: opts] do
code_module.quoted_to_algebra(quoted, opts)
end
end
@doc """
Parses the source code into an extended AST suitable for source manipulation
as described in `Code.quoted_to_algebra/2`.
Two additional fields are added to nodes metadata:
* `:leading_comments` - a list holding the comments found *before* the node.
* `:trailing_comments` - a list holding the comments found before the end of
the node. For example, comments right before the `end` keyword.
Comments are the same maps returned by `Code.string_to_quoted_with_comments/2`.
"""
@spec parse_string(String.t()) :: {:ok, Macro.t()} | {:error, term()}
def parse_string(source) do
with {:ok, quoted, comments} <- string_to_quoted(source, to_quoted_opts()) do
{:ok, Sourceror.Comments.merge_comments(quoted, comments)}
end
end
@doc """
Same as `parse_string/1` but raises on error.
"""
@spec parse_string!(String.t()) :: Macro.t()
def parse_string!(source) do
{quoted, comments} = string_to_quoted!(source, to_quoted_opts())
Sourceror.Comments.merge_comments(quoted, comments)
end
defp to_quoted_opts do
[
literal_encoder: &{:ok, {:__block__, &2, [&1]}},
token_metadata: true,
unescape: false,
columns: true,
warn_on_unnecessary_quotes: false
]
end
@doc """
Parses a single expression from the given string. It tries to parse on a
per-line basis.
Returns `{:ok, quoted, rest}` on success or `{:error, source}` on error.
## Examples
iex> ~S"\""
...> 42
...>
...> :ok
...> "\"" |> Sourceror.parse_expression()
{:ok, {:__block__, [trailing_comments: [], leading_comments: [],
token: "42", line: 2, column: 1], [42]}, "\\n:ok"}
## Options
* `:from_line` - The line at where the parsing should start. Defaults to `1`.
"""
@spec parse_expression(String.t(), keyword) ::
{:ok, Macro.t(), String.t()} | {:error, String.t()}
def parse_expression(string, opts \\ []) do
from_line = Keyword.get(opts, :from_line, 1)
lines =
Regex.split(~r/\r\n|\r|\n/, String.trim(string))
|> Enum.drop(from_line - 1)
do_parse_expression(lines, "")
end
defp do_parse_expression([], acc), do: {:error, acc}
defp do_parse_expression([line | rest], acc) do
string = Enum.join([acc, line], "\n")
case parse_string(string) do
# Skip empty lines
{:ok, {:__block__, _, []}} ->
do_parse_expression(rest, string)
{:ok, quoted} ->
{:ok, quoted, Enum.join(rest, "\n")}
{:error, _reason} ->
do_parse_expression(rest, string)
end
end
@doc """
Converts a quoted expression to a string.
The comments line number will be ignored and the line number of the associated
node will be used when formatting the code.
## Options
* `:indent` - how many indentations to insert at the start of each line.
Note that this only prepends the indents without checking the indentation
of nested blocks. Defaults to `0`.
* `:indent_type` - the type of indentation to use. It can be one of `:spaces`,
`:single_space` or `:tabs`. Defaults to `:spaces`.
* `:format` - if set to `:splicing`, if the quoted expression is a list, it
will strip the square brackets. This is useful to print a single element
of a keyword list.
For more options see `Code.format_string!/1` and `Code.quoted_to_algebra/2`.
"""
@spec to_string(Macro.t(), keyword) :: String.t()
def to_string(quoted, opts \\ []) do
indent = Keyword.get(opts, :indent, 0)
line_length = Keyword.get(opts, :line_length, 98)
indent_str =
case Keyword.get(opts, :indent_type, :spaces) do
:spaces -> "\s\s"
:single_space -> "\s"
:tabs -> "\t"
end
extract_comments_opts = [collapse_comments: true, correct_lines: true] ++ opts
{quoted, comments} = Sourceror.Comments.extract_comments(quoted, extract_comments_opts)
to_algebra_opts = Keyword.merge(opts, comments: comments, escape: false)
text =
quoted
|> quoted_to_algebra(to_algebra_opts)
|> Inspect.Algebra.format(line_length)
|> IO.iodata_to_binary()
|> String.split("\n")
|> Enum.map_join("\n", fn line ->
String.duplicate(indent_str, indent) <> line
end)
if is_list(quoted) and opts[:format] == :splicing do
text |> String.slice(1..-2)
else
text
end
end
@doc """
Performs a depth-first post-order traversal of a quoted expression.
See `postwalk/3` for more information.
"""
@spec postwalk(Macro.t(), traversal_function) ::
Macro.t()
def postwalk(quoted, fun) do
{quoted, _} = postwalk(quoted, nil, fun)
quoted
end
@doc """
Performs a depth-first post-order traversal of a quoted expression with an
accumulator.
`fun` is a function that will receive the current node as a first argument and
the traversal state as the second one. It must return a `{quoted, state}`,
in the same way it would return `{quoted, acc}` when using `Macro.postwalk/3`.
The state is a map with the following keys:
* `:acc` - The accumulator. Defaults to `nil` if none is given.
"""
@spec postwalk(Macro.t(), term, traversal_function) ::
{Macro.t(), term}
def postwalk(quoted, acc, fun) do
{quoted, %{acc: acc}} = Macro.traverse(quoted, %TraversalState{acc: acc}, &{&1, &2}, fun)
{quoted, acc}
end
@doc """
Performs a depth-first pre-order traversal of a quoted expression.
See `prewalk/3` for more information.
"""
@spec prewalk(Macro.t(), traversal_function) ::
Macro.t()
def prewalk(quoted, fun) do
{quoted, _} = prewalk(quoted, nil, fun)
quoted
end
@doc """
Performs a depth-first pre-order traversal of a quoted expression with an
accumulator.
`fun` is a function that will receive the current node as a first argument and
the traversal state as the second one. It must return a `{quoted, state}`,
in the same way it would return `{quoted, acc}` when using `Macro.prewalk/3`.
The state is a map with the following keys:
* `:acc` - The accumulator. Defaults to `nil` if none is given.
"""
@spec prewalk(Macro.t(), term, traversal_function) ::
{Macro.t(), term}
def prewalk(quoted, acc, fun) do
{quoted, %{acc: acc}} = Macro.traverse(quoted, %TraversalState{acc: acc}, fun, &{&1, &2})
{quoted, acc}
end
@doc """
Shifts the line numbers of the node or metadata by the given `line_correction`.
This function will update the `:line`, `:closing`, `:do`, `:end` and
`:end_of_expression` line numbers of the node metadata if such fields are
present.
"""
@spec correct_lines(Macro.t() | Macro.metadata(), integer, Macro.metadata()) ::
Macro.t() | Macro.metadata()
def correct_lines(meta, line_correction, opts \\ [])
def correct_lines(meta, line_correction, opts) when is_list(meta) do
skip = Keyword.get(opts, :skip, [])
meta
|> apply_line_corrections(line_correction, skip)
|> maybe_correct_line(line_correction, skip)
end
def correct_lines(quoted, line_correction, _opts) do
Macro.update_meta(quoted, &correct_lines(&1, line_correction))
end
defp correct_line(meta, key, line_correction) do
case Keyword.get(meta, key, []) do
value when value != [] ->
value =
if value[:line] do
put_in(value, [:line], value[:line] + line_correction)
else
value
end
[{key, value}]
_ ->
[]
end
end
defp apply_line_corrections(meta, line_correction, skip) do
to_correct = @line_fields -- skip
corrections = Enum.map(to_correct, &correct_line(meta, &1, line_correction))
Enum.reduce(corrections, meta, fn correction, meta ->
Keyword.merge(meta, correction)
end)
end
defp maybe_correct_line(meta, line_correction, skip) do
if Keyword.has_key?(meta, :line) and :line not in skip do
Keyword.put(meta, :line, meta[:line] + line_correction)
else
meta
end
end
@doc """
Returns the metadata of the given node.
iex> Sourceror.get_meta({:foo, [line: 5], []})
[line: 5]
"""
@spec get_meta(Macro.t()) :: Macro.metadata()
def get_meta({_, meta, _}) when is_list(meta) do
meta
end
@doc """
Returns the arguments of the node.
iex> Sourceror.get_args({:foo, [], [{:__block__, [], [:ok]}]})
[{:__block__, [], [:ok]}]
"""
@spec get_args(Macro.t()) :: [Macro.t()]
def get_args({_, _, args}) do
args
end
@doc """
Updates the arguments for the given node.
iex> node = {:foo, [line: 1], [{:__block__, [line: 1], [2]}]}
iex> updater = fn args -> Enum.map(args, &Sourceror.correct_lines(&1, 2)) end
iex> Sourceror.update_args(node, updater)
{:foo, [line: 1], [{:__block__, [line: 3], [2]}]}
"""
@spec update_args(Macro.t(), ([Macro.t()] -> [Macro.t()])) :: Macro.t()
def update_args({form, meta, args}, fun) when is_function(fun, 1) and is_list(args) do
{form, meta, fun.(args)}
end
@doc """
Returns the line of a node. If none is found, the default value is
returned(defaults to 1).
A default of `nil` may also be provided if the line number is meant to be
coalesced with a value that is not known upfront.
iex> Sourceror.get_line({:foo, [line: 5], []})
5
iex> Sourceror.get_line({:foo, [], []}, 3)
3
"""
@spec get_line(Macro.t(), default :: integer | nil) :: integer | nil
def get_line({_, meta, _}, default \\ 1)
when is_list(meta) and (is_integer(default) or is_nil(default)) do
Keyword.get(meta, :line, default)
end
@doc """
Returns the column of a node. If none is found, the default value is
returned(defaults to 1).
A default of `nil` may also be provided if the column number is meant to be
coalesced with a value that is not known upfront.
iex> Sourceror.get_column({:foo, [column: 5], []})
5
iex> Sourceror.get_column({:foo, [], []}, 3)
3
"""
@spec get_column(Macro.t(), default :: integer | nil) :: integer | nil
def get_column({_, meta, _}, default \\ 1)
when is_list(meta) and (is_integer(default) or is_nil(default)) do
Keyword.get(meta, :column, default)
end
@doc """
Returns the line where the given node ends. It recursively checks for `end`,
`closing` and `end_of_expression` line numbers. If none is found, the default
value is returned(defaults to 1).
iex> Sourceror.get_end_line({:foo, [end: [line: 4]], []})
4
iex> Sourceror.get_end_line({:foo, [closing: [line: 2]], []})
2
iex> Sourceror.get_end_line({:foo, [end_of_expression: [line: 5]], []})
5
iex> Sourceror.get_end_line({:foo, [closing: [line: 2], end: [line: 4]], []})
4
iex> "\""
...> alias Foo.{
...> Bar
...> }
...> "\"" |> Sourceror.parse_string!() |> Sourceror.get_end_line()
3
"""
@spec get_end_line(Macro.t(), integer) :: integer
def get_end_line(quoted, default \\ 1) when is_integer(default) do
get_end_position(quoted, line: default, column: 1)[:line]
end
@doc """
Returns the start position of a node.
iex> quoted = Sourceror.parse_string!(" :foo")
iex> Sourceror.get_start_position(quoted)
[line: 1, column: 2]
iex> quoted = Sourceror.parse_string!("\\n\\nfoo()")
iex> Sourceror.get_start_position(quoted)
[line: 3, column: 1]
iex> quoted = Sourceror.parse_string!("Foo.{Bar}")
iex> Sourceror.get_start_position(quoted)
[line: 1, column: 1]
iex> quoted = Sourceror.parse_string!("foo[:bar]")
iex> Sourceror.get_start_position(quoted)
[line: 1, column: 1]
iex> quoted = Sourceror.parse_string!("foo(:bar)")
iex> Sourceror.get_start_position(quoted)
[line: 1, column: 1]
"""
@spec get_start_position(Macro.t(), position) :: position
def get_start_position(quoted, default \\ [line: 1, column: 1])
def get_start_position({{:., _, [Access, :get]}, _, [left | _]}, default) do
get_start_position(left, default)
end
def get_start_position({{:., _, [left | _]}, _, _}, default) do
get_start_position(left, default)
end
def get_start_position({_, meta, _}, default) do
position = Keyword.take(meta, [:line, :column])
Keyword.merge(default, position)
end
@doc """
Returns the end position of the quoted expression. It recursively checks for
`end`, `closing` and `end_of_expression` positions. If none is found, the
default value is returned(defaults to `[line: 1, column: 1]`).
iex> quoted = ~S"\""
...> A.{
...> B
...> }
...> "\"" |> Sourceror.parse_string!()
iex> Sourceror.get_end_position(quoted)
[line: 3, column: 1]
iex> quoted = ~S"\""
...> foo do
...> :ok
...> end
...> "\"" |> Sourceror.parse_string!()
iex> Sourceror.get_end_position(quoted)
[line: 3, column: 1]
iex> quoted = ~S"\""
...> foo(
...> :a,
...> :b
...> )
...> "\"" |> Sourceror.parse_string!()
iex> Sourceror.get_end_position(quoted)
[line: 4, column: 4]
"""
@spec get_end_position(Macro.t(), position) :: position
def get_end_position(quoted, default \\ [line: 1, column: 1]) do
{_, position} =
Macro.postwalk(quoted, default, fn
{_, _, _} = quoted, end_position ->
current_end_position = get_node_end_position(quoted, default)
end_position =
if compare_positions(end_position, current_end_position) == :gt do
end_position
else
current_end_position
end
{quoted, end_position}
terminal, end_position ->
{terminal, end_position}
end)
position
end
defp get_node_end_position(quoted, default) do
meta = get_meta(quoted)
start_position = [
line: meta[:line] || default[:line],
column: meta[:column] || default[:column]
]
get_meta(quoted)
|> Keyword.take(@end_fields)
|> Keyword.values()
|> Enum.map(&Keyword.take(&1, [:line, :column]))
|> Enum.concat([start_position])
|> Enum.max_by(
& &1,
fn prev, next ->
compare_positions(prev, next) == :gt
end,
fn -> default end
)
end
@doc """
Compares two positions.
Returns `:gt` if the first position comes after the second one, and `:lt` for
vice versa. If the two positions are equal, `:eq` is returned.
`nil` values for lines or columns are coalesced to `0` for integer
comparisons.
"""
@spec compare_positions(position, position) :: :gt | :eq | :lt
def compare_positions(left, right) do
left = coalesce_position(left)
right = coalesce_position(right)
cond do
left == right ->
:eq
left[:line] > right[:line] ->
:gt
left[:line] == right[:line] and left[:column] > right[:column] ->
:gt
true ->
:lt
end
end
defp coalesce_position(position) do
line = position[:line] || 0
column = position[:column] || 0
[line: line, column: column]
end
@doc """
Gets the range used by the given quoted expression in the source code.
The quoted expression must have at least line and column metadata, otherwise
it is not possible to calculate an accurate range, or to calculate it at all.
This function is most useful when used after `Sourceror.parse_string/1`,
before any kind of modification to the AST.
The range is a map with `:start` and `:end` positions.
iex> quoted = ~S"\""
...> def foo do
...> :ok
...> end
...> "\"" |> Sourceror.parse_string!()
iex> Sourceror.get_range(quoted)
%{start: [line: 1, column: 1], end: [line: 3, column: 4]}
iex> quoted = ~S"\""
...> Foo.{
...> Bar
...> }
...> "\"" |> Sourceror.parse_string!()
iex> Sourceror.get_range(quoted)
%{start: [line: 1, column: 1], end: [line: 3, column: 2]}
## Options
- `:include_comments` - When `true`, it includes the comments into the range. Defaults to `false`.
iex> ~S"\""
...> # Foo
...> :baz # Bar
...> "\""
...> |> Sourceror.parse_string!()
...> |> Sourceror.get_range(include_comments: true)
%{start: [line: 1, column: 1], end: [line: 2, column: 11]}
"""
@spec get_range(Macro.t()) :: range
def get_range(quoted, opts \\ []) do
Sourceror.Range.get_range(quoted, opts)
end
@doc """
Prepends comments to the leading or trailing comments of a node.
"""
@spec prepend_comments(
quoted :: Macro.t(),
comments :: [comment],
position :: :leading | :trailing
) :: Macro.t()
def prepend_comments(quoted, comments, position \\ :leading)
when position in [:leading, :trailing] do
do_add_comments(quoted, comments, :prepend, position)
end
@doc """
Appends comments to the leading or trailing comments of a node.
"""
@spec append_comments(
quoted :: Macro.t(),
comments :: [comment],
position :: :leading | :trailing
) ::
Macro.t()
def append_comments(quoted, comments, position \\ :leading)
when position in [:leading, :trailing] do
do_add_comments(quoted, comments, :append, position)
end
defp do_add_comments({_, meta, _} = quoted, comments, mode, position) do
key =
case position do
:leading -> :leading_comments
:trailing -> :trailing_comments
end
current_comments = Keyword.get(meta, key, [])
current_comments =
case mode do
:append -> current_comments ++ comments
:prepend -> comments ++ current_comments
end
Macro.update_meta(quoted, &Keyword.put(&1, key, current_comments))
end
@doc false
@spec has_closing_line?(Macro.t()) :: boolean
def has_closing_line?({_, meta, _}) do
for field <- @end_fields do
Keyword.has_key?(meta, field)
end
|> Enum.any?()
end
@doc """
Applies one or more patches to the given string.
This functions limits itself to apply the patches in order, but it does not
check for overlapping ranges, so make sure to pass non-overlapping patches.
A patch is a map containing at least the range that it should patch, and the
change to be applied in the range, for example:
iex> original = ~S"\""
...> if not allowed? do
...> raise "Not allowed!"
...> end
...> "\""
iex> patch = %{
...> change: "unless allowed? do\\n raise \\"Not allowed!\\"\\nend",
...> range: %{start: [line: 1, column: 1], end: [line: 3, column: 4]}
...> }
iex> Sourceror.patch_string(original, [patch])
~S"\""
unless allowed? do
raise "Not allowed!"
end
"\""
A range can also be a function, in which case the original text in the patch
range will be given as an argument:
iex> original = ~S"\""
...> hello :world
...> "\""
iex> patch = %{
...> change: &String.upcase/1,
...> range: %{start: [line: 1, column: 7], end: [line: 1, column: 13]}
...> }
iex> Sourceror.patch_string(original, [patch])
~S"\""
hello :WORLD
"\""
By default, the patch will be automatically indented to match the indentation
of the range it wants to replace if the change is a text string:
iex> original = ~S"\""
...> foo do bar do
...> :ok
...> end end
...> "\""
iex> patch = %{
...> change: "baz do\\n :not_ok\\nend",
...> range: %{start: [line: 1, column: 8], end: [line: 3, column: 6]}
...> }
iex> Sourceror.patch_string(original, [patch])
~S"\""
foo do baz do
:not_ok
end end
"\""
If you don't want this behavior, you can add `:preserve_indentation: false` to
your patch:
iex> original = ~S"\""
...> foo do bar do
...> :ok
...> end end
...> "\""
iex> patch = %{
...> change: "baz do\\n :not_ok\\nend",
...> range: %{start: [line: 1, column: 8], end: [line: 3, column: 6]},
...> preserve_indentation: false
...> }
iex> Sourceror.patch_string(original, [patch])
~S"\""
foo do baz do
:not_ok
end end
"\""
"""
@spec patch_string(String.t(), [patch]) :: String.t()
def patch_string(string, patches) do
patches = Enum.sort_by(patches, & &1.range.start[:line], &>=/2)
lines =
string
|> String.split(~r/\n|\r\n|\r/)
|> Enum.reverse()
do_patch_string(lines, patches, [], length(lines))
|> Enum.join("\n")
end
defp do_patch_string(lines, [], seen, _), do: Enum.reverse(lines) ++ seen
defp do_patch_string([], _, seen, _), do: seen
defp do_patch_string([line | rest], [patch | patches], seen, current_line) do
cond do
current_line == patch.range.start[:line] and single_line_patch?(patch) ->
applicable_patches =
Enum.filter([patch | patches], fn patch ->
current_line == patch.range.start[:line] and single_line_patch?(patch)
end)
patched = apply_single_line_patches(line, applicable_patches)
do_patch_string([patched | rest], patches -- applicable_patches, seen, current_line)
current_line == patch.range.start[:line] ->
seen = apply_multiline_patch([line | seen], patch)
do_patch_string(rest, patches, seen, current_line - 1)
true ->
do_patch_string(rest, [patch | patches], [line | seen], current_line - 1)
end
end
defp single_line_patch?(patch), do: patch.range.start[:line] == patch.range.end[:line]
defp apply_single_line_patches(string, patches) do
patches
|> Enum.sort_by(& &1.range.start[:column], &>=/2)
|> Enum.reduce(string, fn patch, string ->
column_span = patch.range.end[:column] - patch.range.start[:column]
{start, middle} = String.split_at(string, patch.range.start[:column] - 1)
{to_patch, ending} = String.split_at(middle, column_span)
new_text =
if is_binary(patch.change) do
patch.change
else
patch.change.(to_patch)
end
Enum.join([start, new_text, ending])
end)
end
defp apply_multiline_patch(lines, patch) do
line_span = patch.range.end[:line] - patch.range.start[:line] + 1
[first | rest] = lines
{first, first_to_patch} = String.split_at(first, patch.range.start[:column] - 1)
{to_patch, rest} = Enum.split(rest, line_span - 1)
{last, _} = List.pop_at(to_patch, -1, "")
{_, last} = String.split_at(last, patch.range.end[:column] - 1)
patch_text =
if is_binary(patch.change) do
patch.change
else
original_text = Enum.join([first_to_patch | to_patch], "\n")
patch.change.(original_text)
end
[first_patch | middle_patch] = String.split(patch_text, ~r/\n|\r\n|\r/)
middle_patch =
if is_binary(patch.change) and Map.get(patch, :preserve_indentation, true) do
indent = get_indent(first)
indent =
if String.trim(first) != "" and get_indent(List.first(middle_patch) || "") > 0 do
# If the patch does not start at the start of the line and the next
# lines have an additional indentation, then we need to add it to
# prevent the "flattening" of the indentations, essentially to
# avoid this:
# foo do bar do
# :ok
# end
# end
indent + 1
else
indent
end
middle_patch
|> Enum.map(&(String.duplicate("\s\s", indent) <> &1))
|> Enum.join("\n")
else
middle_patch
|> Enum.join("\n")
end
[first <> first_patch, middle_patch <> last | rest]
end
defp get_indent(string, count \\ 0)
defp get_indent("\s\s" <> rest, count), do: get_indent(rest, count + 1)
defp get_indent(_, count), do: count
end
|
lib/sourceror.ex
| 0.877273
| 0.506042
|
sourceror.ex
|
starcoder
|
defmodule Flop.Generators do
@moduledoc false
use ExUnitProperties
alias Flop.Filter
@order_directions [
:asc,
:asc_nulls_first,
:asc_nulls_last,
:desc,
:desc_nulls_first,
:desc_nulls_last
]
def pet do
gen all name <- string(:alphanumeric),
age <- integer(1..500),
species <- string(:alphanumeric) do
%{name: name, age: age, species: species}
end
end
def pagination_parameters(type) when type in [:offset, :page] do
gen all val_1 <- positive_integer(),
val_2 <- one_of([positive_integer(), constant(nil)]) do
[a, b] = Enum.shuffle([val_1, val_2])
case type do
:offset -> %{offset: a, limit: b}
:page -> %{page: a, page_size: b}
end
end
end
def pagination_parameters(type) when type in [:first, :last] do
gen all val_1 <- positive_integer(),
val_2 <- one_of([string(:alphanumeric), constant(nil)]) do
case type do
:first -> %{first: val_1, after: val_2}
:last -> %{last: val_1, before: val_2}
end
end
end
def filter do
gen all field <- member_of([:age, :name]),
value <- value_by_field(field),
op <- operator_by_type(value) do
%Filter{field: field, op: op, value: value}
end
end
def value_by_field(:age), do: integer()
def value_by_field(:name), do: string(:alphanumeric, min_length: 1)
def compare_value_by_field(:age), do: integer(1..30)
def compare_value_by_field(:name),
do: string(?a..?z, min_length: 1, max_length: 3)
defp operator_by_type(a) when is_binary(a),
do:
member_of([
:==,
:!=,
:=~,
:<=,
:<,
:>=,
:>,
:like,
:like_and,
:like_or,
:ilike,
:ilike_and,
:ilike_or
])
defp operator_by_type(a) when is_number(a),
do: member_of([:==, :!=, :<=, :<, :>=, :>])
def cursor_fields(%{} = schema) do
schema
|> Flop.Schema.sortable()
|> Enum.shuffle()
|> constant()
end
def order_directions(%{} = schema) do
field_count =
schema
|> Flop.Schema.sortable()
|> length()
@order_directions
|> member_of()
|> list_of(length: field_count)
end
end
|
test/support/generators.ex
| 0.593845
| 0.441974
|
generators.ex
|
starcoder
|
defmodule SRTM.DataCell do
@moduledoc false
alias SRTM.Error
defstruct [:hgt_data, :latitude, :longitude, :points_per_cell, :last_used]
def from_file(path) do
with {:ok, hgt_data} <- read(path),
{:ok, ppc} <- get_ppc(hgt_data) do
{lat, lng} =
path
|> Path.basename(".hgt")
|> reverse_coordinates()
data_cell = %__MODULE__{
hgt_data: hgt_data,
latitude: lat,
longitude: lng,
points_per_cell: ppc
}
{:ok, data_cell}
end
end
def get_elevation(%__MODULE__{points_per_cell: ppc, hgt_data: hgt_data} = dc, lat, lng) do
row = trunc((dc.latitude + 1 - lat) * (ppc - 1))
col = trunc((lng - dc.longitude) * (ppc - 1))
byte_pos = (row * ppc + col) * 2
cond do
byte_pos < 0 or byte_pos > ppc * ppc * 2 ->
raise "Coordinates out of range"
byte_pos >= byte_size(hgt_data) ->
nil
:binary.at(hgt_data, byte_pos) == 0x80 && :binary.at(hgt_data, byte_pos + 1) == 0x00 ->
nil
true ->
hgt_data
|> binary_part(byte_pos, 2)
|> decode_elevation()
end
end
defp decode_elevation(<<val::signed-big-integer-size(16)>>) when val in -1000..10000, do: val
defp decode_elevation(_binary), do: nil
defp reverse_coordinates(<<d0::size(8), lat::size(16), d1::size(8), lng::size(24)>>) do
lat = String.to_integer(<<lat::size(16)>>)
lng = String.to_integer(<<lng::size(24)>>)
lat = if d0 == ?S, do: lat * -1, else: lat
lng = if d1 == ?W, do: lng * -1, else: lng
{lat, lng}
end
defp read(path) do
with {:error, reason} <- File.read(path) do
{:error,
%Error{reason: :io_error, message: "Reading of HGT file failed: #{inspect(reason)}"}}
end
end
@srtm_3 1201 * 1201 * 2
@srtm_1 3601 * 3601 * 2
defp get_ppc(hgt_data) do
case byte_size(hgt_data) do
@srtm_3 -> {:ok, 1201}
@srtm_1 -> {:ok, 3601}
_ -> {:error, %Error{reason: :unkown_file_type, message: "File type unkown"}}
end
end
end
|
lib/data_cell.ex
| 0.736211
| 0.534005
|
data_cell.ex
|
starcoder
|
defmodule AWS.Lambda do
@moduledoc """
AWS Lambda
**Overview**
This is the *AWS Lambda API Reference*. The AWS Lambda Developer Guide
provides additional information. For the service overview, see [What is AWS
Lambda](http://docs.aws.amazon.com/lambda/latest/dg/welcome.html), and for
information about how the service works, see [AWS Lambda: How it
Works](http://docs.aws.amazon.com/lambda/latest/dg/lambda-introduction.html)
in the *AWS Lambda Developer Guide*.
"""
@doc """
Adds a permission to the resource policy associated with the specified AWS
Lambda function. You use resource policies to grant permissions to event
sources that use *push* model. In a *push* model, event sources (such as
Amazon S3 and custom applications) invoke your Lambda function. Each
permission you add to the resource policy allows an event source,
permission to invoke the Lambda function.
For information about the push model, see [AWS Lambda: How it
Works](http://docs.aws.amazon.com/lambda/latest/dg/lambda-introduction.html).
If you are using versioning, the permissions you add are specific to the
Lambda function version or alias you specify in the `AddPermission` request
via the `Qualifier` parameter. For more information about versioning, see
[AWS Lambda Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
This operation requires permission for the `lambda:AddPermission` action.
"""
def add_permission(client, function_name, input, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}/policy"
headers = []
request(client, :post, url, headers, input, options, 201)
end
@doc """
Creates an alias that points to the specified Lambda function version. For
more information, see [Introduction to AWS Lambda
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/aliases-intro.html).
Alias names are unique for a given function. This requires permission for
the lambda:CreateAlias action.
"""
def create_alias(client, function_name, input, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}/aliases"
headers = []
request(client, :post, url, headers, input, options, 201)
end
@doc """
Identifies a stream as an event source for a Lambda function. It can be
either an Amazon Kinesis stream or an Amazon DynamoDB stream. AWS Lambda
invokes the specified function when records are posted to the stream.
This association between a stream source and a Lambda function is called
the event source mapping.
<important>This event source mapping is relevant only in the AWS Lambda
pull model, where AWS Lambda invokes the function. For more information,
see [AWS Lambda: How it
Works](http://docs.aws.amazon.com/lambda/latest/dg/lambda-introduction.html)
in the *AWS Lambda Developer Guide*.
</important> You provide mapping information (for example, which stream to
read from and which Lambda function to invoke) in the request body.
Each event source, such as an Amazon Kinesis or a DynamoDB stream, can be
associated with multiple AWS Lambda function. A given Lambda function can
be associated with multiple AWS event sources.
If you are using versioning, you can specify a specific function version or
an alias via the function name parameter. For more information about
versioning, see [AWS Lambda Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
This operation requires permission for the
`lambda:CreateEventSourceMapping` action.
"""
def create_event_source_mapping(client, input, options \\ []) do
url = "/2015-03-31/event-source-mappings"
headers = []
request(client, :post, url, headers, input, options, 202)
end
@doc """
Creates a new Lambda function. The function metadata is created from the
request parameters, and the code for the function is provided by a .zip
file in the request body. If the function name already exists, the
operation will fail. Note that the function name is case-sensitive.
If you are using versioning, you can also publish a version of the Lambda
function you are creating using the `Publish` parameter. For more
information about versioning, see [AWS Lambda Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
This operation requires permission for the `lambda:CreateFunction` action.
"""
def create_function(client, input, options \\ []) do
url = "/2015-03-31/functions"
headers = []
request(client, :post, url, headers, input, options, 201)
end
@doc """
Deletes the specified Lambda function alias. For more information, see
[Introduction to AWS Lambda
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/aliases-intro.html).
This requires permission for the lambda:DeleteAlias action.
"""
def delete_alias(client, function_name, name, input, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}/aliases/#{URI.encode(name)}"
headers = []
request(client, :delete, url, headers, input, options, 204)
end
@doc """
Removes an event source mapping. This means AWS Lambda will no longer
invoke the function for events in the associated source.
This operation requires permission for the
`lambda:DeleteEventSourceMapping` action.
"""
def delete_event_source_mapping(client, uuid, input, options \\ []) do
url = "/2015-03-31/event-source-mappings/#{URI.encode(uuid)}"
headers = []
request(client, :delete, url, headers, input, options, 202)
end
@doc """
Deletes the specified Lambda function code and configuration.
If you are using the versioning feature and you don't specify a function
version in your `DeleteFunction` request, AWS Lambda will delete the
function, including all its versions, and any aliases pointing to the
function versions. To delete a specific function version, you must provide
the function version via the `Qualifier` parameter. For information about
function versioning, see [AWS Lambda Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
When you delete a function the associated resource policy is also deleted.
You will need to delete the event source mappings explicitly.
This operation requires permission for the `lambda:DeleteFunction` action.
"""
def delete_function(client, function_name, input, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}"
headers = []
request(client, :delete, url, headers, input, options, 204)
end
@doc """
Returns a customer's account settings.
You can use this operation to retrieve Lambda limits information, such as
code size and concurrency limits. For more information about limits, see
[AWS Lambda
Limits](http://docs.aws.amazon.com/lambda/latest/dg/limits.html). You can
also retrieve resource usage statistics, such as code storage usage and
function count.
"""
def get_account_settings(client, options \\ []) do
url = "/2016-08-19/account-settings"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Returns the specified alias information such as the alias ARN, description,
and function version it is pointing to. For more information, see
[Introduction to AWS Lambda
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/aliases-intro.html).
This requires permission for the `lambda:GetAlias` action.
"""
def get_alias(client, function_name, name, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}/aliases/#{URI.encode(name)}"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Returns configuration information for the specified event source mapping
(see `CreateEventSourceMapping`).
This operation requires permission for the `lambda:GetEventSourceMapping`
action.
"""
def get_event_source_mapping(client, uuid, options \\ []) do
url = "/2015-03-31/event-source-mappings/#{URI.encode(uuid)}"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Returns the configuration information of the Lambda function and a
presigned URL link to the .zip file you uploaded with `CreateFunction` so
you can download the .zip file. Note that the URL is valid for up to 10
minutes. The configuration information is the same information you provided
as parameters when uploading the function.
Using the optional `Qualifier` parameter, you can specify a specific
function version for which you want this information. If you don't specify
this parameter, the API uses unqualified function ARN which return
information about the `$LATEST` version of the Lambda function. For more
information, see [AWS Lambda Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
This operation requires permission for the `lambda:GetFunction` action.
"""
def get_function(client, function_name, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Returns the configuration information of the Lambda function. This the same
information you provided as parameters when uploading the function by using
`CreateFunction`.
If you are using the versioning feature, you can retrieve this information
for a specific function version by using the optional `Qualifier` parameter
and specifying the function version or alias that points to it. If you
don't provide it, the API returns information about the $LATEST version of
the function. For more information about versioning, see [AWS Lambda
Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
This operation requires permission for the
`lambda:GetFunctionConfiguration` operation.
"""
def get_function_configuration(client, function_name, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}/configuration"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Returns the resource policy associated with the specified Lambda function.
If you are using the versioning feature, you can get the resource policy
associated with the specific Lambda function version or alias by specifying
the version or alias name using the `Qualifier` parameter. For more
information about versioning, see [AWS Lambda Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
For information about adding permissions, see `AddPermission`.
You need permission for the `lambda:GetPolicy action.`
"""
def get_policy(client, function_name, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}/policy"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Invokes a specific Lambda function. For an example, see [Create the Lambda
Function and Test It
Manually](http://docs.aws.amazon.com/lambda/latest/dg/with-dynamodb-create-function.html#with-dbb-invoke-manually).
If you are using the versioning feature, you can invoke the specific
function version by providing function version or alias name that is
pointing to the function version using the `Qualifier` parameter in the
request. If you don't provide the `Qualifier` parameter, the `$LATEST`
version of the Lambda function is invoked. Invocations occur at least once
in response to an event and functions must be idempotent to handle this.
For information about the versioning feature, see [AWS Lambda Function
Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
This operation requires permission for the `lambda:InvokeFunction` action.
"""
def invoke(client, function_name, input, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}/invocations"
headers = []
if Dict.has_key?(input, "ClientContext") do
headers = [{"X-Amz-Client-Context", input["ClientContext"]}|headers]
input = Dict.delete(input, "ClientContext")
end
if Dict.has_key?(input, "InvocationType") do
headers = [{"X-Amz-Invocation-Type", input["InvocationType"]}|headers]
input = Dict.delete(input, "InvocationType")
end
if Dict.has_key?(input, "LogType") do
headers = [{"X-Amz-Log-Type", input["LogType"]}|headers]
input = Dict.delete(input, "LogType")
end
case request(client, :post, url, headers, input, options, nil) do
{:ok, body, response} ->
if !is_nil(response.headers["X-Amz-Function-Error"]) do
body = %{body | "FunctionError" => response.headers["X-Amz-Function-Error"]}
end
if !is_nil(response.headers["X-Amz-Log-Result"]) do
body = %{body | "LogResult" => response.headers["X-Amz-Log-Result"]}
end
{:ok, body, response}
result ->
result
end
end
@doc """
<important>This API is deprecated. We recommend you use `Invoke` API (see
`Invoke`).
</important> Submits an invocation request to AWS Lambda. Upon receiving
the request, Lambda executes the specified function asynchronously. To see
the logs generated by the Lambda function execution, see the CloudWatch
Logs console.
This operation requires permission for the `lambda:InvokeFunction` action.
"""
def invoke_async(client, function_name, input, options \\ []) do
url = "/2014-11-13/functions/#{URI.encode(function_name)}/invoke-async"
headers = []
request(client, :post, url, headers, input, options, 202)
end
@doc """
Returns list of aliases created for a Lambda function. For each alias, the
response includes information such as the alias ARN, description, alias
name, and the function version to which it points. For more information,
see [Introduction to AWS Lambda
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/aliases-intro.html).
This requires permission for the lambda:ListAliases action.
"""
def list_aliases(client, function_name, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}/aliases"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Returns a list of event source mappings you created using the
`CreateEventSourceMapping` (see `CreateEventSourceMapping`).
For each mapping, the API returns configuration information. You can
optionally specify filters to retrieve specific event source mappings.
If you are using the versioning feature, you can get list of event source
mappings for a specific Lambda function version or an alias as described in
the `FunctionName` parameter. For information about the versioning feature,
see [AWS Lambda Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
This operation requires permission for the `lambda:ListEventSourceMappings`
action.
"""
def list_event_source_mappings(client, options \\ []) do
url = "/2015-03-31/event-source-mappings"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Returns a list of your Lambda functions. For each function, the response
includes the function configuration information. You must use `GetFunction`
to retrieve the code for your function.
This operation requires permission for the `lambda:ListFunctions` action.
If you are using versioning feature, the response returns list of $LATEST
versions of your functions. For information about the versioning feature,
see [AWS Lambda Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
"""
def list_functions(client, options \\ []) do
url = "/2015-03-31/functions"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
List all versions of a function. For information about the versioning
feature, see [AWS Lambda Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
"""
def list_versions_by_function(client, function_name, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}/versions"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Publishes a version of your function from the current snapshot of $LATEST.
That is, AWS Lambda takes a snapshot of the function code and configuration
information from $LATEST and publishes a new version. The code and
configuration cannot be modified after publication. For information about
the versioning feature, see [AWS Lambda Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
"""
def publish_version(client, function_name, input, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}/versions"
headers = []
request(client, :post, url, headers, input, options, 201)
end
@doc """
You can remove individual permissions from an resource policy associated
with a Lambda function by providing a statement ID that you provided when
you added the permission.
If you are using versioning, the permissions you remove are specific to the
Lambda function version or alias you specify in the `AddPermission` request
via the `Qualifier` parameter. For more information about versioning, see
[AWS Lambda Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
Note that removal of a permission will cause an active event source to lose
permission to the function.
You need permission for the `lambda:RemovePermission` action.
"""
def remove_permission(client, function_name, statement_id, input, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}/policy/#{URI.encode(statement_id)}"
headers = []
request(client, :delete, url, headers, input, options, 204)
end
@doc """
Using this API you can update the function version to which the alias
points and the alias description. For more information, see [Introduction
to AWS Lambda
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/aliases-intro.html).
This requires permission for the lambda:UpdateAlias action.
"""
def update_alias(client, function_name, name, input, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}/aliases/#{URI.encode(name)}"
headers = []
request(client, :put, url, headers, input, options, 200)
end
@doc """
You can update an event source mapping. This is useful if you want to
change the parameters of the existing mapping without losing your position
in the stream. You can change which function will receive the stream
records, but to change the stream itself, you must create a new mapping.
If you are using the versioning feature, you can update the event source
mapping to map to a specific Lambda function version or alias as described
in the `FunctionName` parameter. For information about the versioning
feature, see [AWS Lambda Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
If you disable the event source mapping, AWS Lambda stops polling. If you
enable again, it will resume polling from the time it had stopped polling,
so you don't lose processing of any records. However, if you delete event
source mapping and create it again, it will reset.
This operation requires permission for the
`lambda:UpdateEventSourceMapping` action.
"""
def update_event_source_mapping(client, uuid, input, options \\ []) do
url = "/2015-03-31/event-source-mappings/#{URI.encode(uuid)}"
headers = []
request(client, :put, url, headers, input, options, 202)
end
@doc """
Updates the code for the specified Lambda function. This operation must
only be used on an existing Lambda function and cannot be used to update
the function configuration.
If you are using the versioning feature, note this API will always update
the $LATEST version of your Lambda function. For information about the
versioning feature, see [AWS Lambda Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
This operation requires permission for the `lambda:UpdateFunctionCode`
action.
"""
def update_function_code(client, function_name, input, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}/code"
headers = []
request(client, :put, url, headers, input, options, 200)
end
@doc """
Updates the configuration parameters for the specified Lambda function by
using the values provided in the request. You provide only the parameters
you want to change. This operation must only be used on an existing Lambda
function and cannot be used to update the function's code.
If you are using the versioning feature, note this API will always update
the $LATEST version of your Lambda function. For information about the
versioning feature, see [AWS Lambda Function Versioning and
Aliases](http://docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html).
This operation requires permission for the
`lambda:UpdateFunctionConfiguration` action.
"""
def update_function_configuration(client, function_name, input, options \\ []) do
url = "/2015-03-31/functions/#{URI.encode(function_name)}/configuration"
headers = []
request(client, :put, url, headers, input, options, 200)
end
defp request(client, method, url, headers, input, options, success_status_code) do
client = %{client | service: "lambda"}
host = get_host("lambda", client)
url = get_url(host, url, client)
headers = Enum.concat([{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"}],
headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, response=%HTTPoison.Response{status_code: 202, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, response=%HTTPoison.Response{status_code: 204, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
reason = Poison.Parser.parse!(body)["message"]
{:error, reason}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: ^success_status_code, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: ^success_status_code, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
reason = Poison.Parser.parse!(body)["message"]
{:error, reason}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, url, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{url}/"
end
defp encode_payload(input) do
if input != nil do
Poison.Encoder.encode(input, [])
else
""
end
end
end
|
lib/aws/lambda.ex
| 0.900691
| 0.516595
|
lambda.ex
|
starcoder
|
defmodule CoopMinesweeperWeb.FieldView do
use CoopMinesweeperWeb, :view
alias CoopMinesweeper.Game.{Tile, Field}
def render("player_field.json", %{
field:
%Field{
tiles: tiles,
size: size
} = field
}) do
Map.merge(
%{
tiles:
for row <- 0..(size - 1) do
for col <- 0..(size - 1) do
render("tile.json", tile: tiles[{row, col}])
end
end
},
render("field_metadata.json", field: field)
)
end
def render("field_changes.json", %{
field: %Field{mines_left: mines_left, state: state, recent_player: recent_player},
changes: changes
}) do
%{
changes:
for {{row, col}, tile} <- changes do
[[row, col], render("tile.json", tile: tile)]
end,
field: %{
mines_left: mines_left,
state: state,
recent_player: recent_player
}
}
end
def render("field_metadata.json", %{
field: %Field{
size: size,
mines: mines,
mines_left: mines_left,
state: state,
recent_player: recent_player
}
}) do
%{
size: size,
mines: mines,
mines_left: mines_left,
state: state,
recent_player: recent_player
}
end
def render("field_list.json", %{
field_list: field_list
}) do
render_many(field_list, CoopMinesweeperWeb.FieldView, "field_list_entry.json", as: :list_entry)
end
def render("field_list_entry.json", %{
list_entry: %{
field: %Field{
id: id,
size: size,
mines: mines,
mines_left: mines_left
},
player_count: player_count
}
}) do
%{
id: id,
size: size,
mines: mines,
mines_left: mines_left,
player_count: player_count
}
end
def render("tile.json", %{tile: %Tile{state: :revealed, mines_close: mines_close}}) do
%{state: :revealed, mines_close: mines_close}
end
def render("tile.json", %{tile: %Tile{state: state}}) do
%{state: state}
end
end
|
lib/coop_minesweeper_web/views/field_view.ex
| 0.623033
| 0.443299
|
field_view.ex
|
starcoder
|
defmodule SSHKit.SSH.Channel do
@moduledoc """
Defines a `SSHKit.SSH.Channel` struct representing a connection channel.
A channel struct has the following fields:
* `connection` - the underlying `SSHKit.SSH.Connection`
* `type` - the type of the channel, i.e. `:session`
* `id` - the unique channel id
"""
alias SSHKit.SSH.Channel
defstruct [:connection, :type, :id, impl: :ssh_connection]
@doc """
Opens a channel on an SSH connection.
On success, returns `{:ok, channel}`, where `channel` is a `Channel` struct.
Returns `{:error, reason}` if a failure occurs.
For more details, see [`:ssh_connection.session_channel/4`](http://erlang.org/doc/man/ssh_connection.html#session_channel-4).
## Options
* `:timeout` - defaults to `:infinity`
* `:initial_window_size` - defaults to 128 KiB
* `:max_packet_size` - defaults to 32 KiB
"""
def open(connection, options \\ []) do
timeout = Keyword.get(options, :timeout, :infinity)
ini_window_size = Keyword.get(options, :initial_window_size, 128 * 1024)
max_packet_size = Keyword.get(options, :max_packet_size, 32 * 1024)
impl = Keyword.get(options, :impl, :ssh_connection)
case impl.session_channel(connection.ref, ini_window_size, max_packet_size, timeout) do
{:ok, id} -> {:ok, build(connection, id, impl)}
err -> err
end
end
defp build(connection, id, impl) do
%Channel{connection: connection, type: :session, id: id, impl: impl}
end
@doc """
Activates a subsystem on a channel.
Returns `:success`, `:failure` or `{:error, reason}`.
For more details, see [`:ssh_connection.subsystem/4`](http://erlang.org/doc/man/ssh_connection.html#subsystem-4).
"""
@spec subsystem(channel :: struct(), subsystem :: String.t(), options :: list()) ::
:success | :failure | {:error, reason :: String.t()}
def subsystem(channel, subsystem, options \\ []) do
timeout = Keyword.get(options, :timeout, :infinity)
impl = Keyword.get(options, :impl, :ssh_connection)
impl.subsystem(channel.connection.ref, channel.id, to_charlist(subsystem), timeout)
end
@doc """
Closes an SSH channel.
Returns `:ok`.
For more details, see [`:ssh_connection.close/2`](http://erlang.org/doc/man/ssh_connection.html#close-2).
"""
def close(channel) do
channel.impl.close(channel.connection.ref, channel.id)
end
@doc """
Executes a command on the remote host.
Returns `:success`, `:failure` or `{:error, reason}`.
For more details, see [`:ssh_connection.exec/4`](http://erlang.org/doc/man/ssh_connection.html#exec-4).
## Processing channel messages
`loop/4` may be used to process any channel messages received as a result of
executing `command` on the remote.
"""
def exec(channel, command, timeout \\ :infinity)
def exec(channel, command, timeout) when is_binary(command) do
exec(channel, to_charlist(command), timeout)
end
def exec(channel, command, timeout) do
channel.impl.exec(channel.connection.ref, channel.id, command, timeout)
end
@doc """
Allocates PTTY.
Returns `:success`.
For more details, see [`:ssh_connection.ptty_alloc/4`](http://erlang.org/doc/man/ssh_connection.html#ptty_alloc-4).
"""
def ptty(channel, options \\ [], timeout \\ :infinity) do
channel.impl.ptty_alloc(channel.connection.ref, channel.id, options, timeout)
end
@doc """
Sends data across an open SSH channel.
`data` may be an enumerable, e.g. a `File.Stream` or `IO.Stream`.
Returns `:ok`, `{:error, :timeout}` or `{:error, :closed}`.
For more details, see [`:ssh_connection.send/5`](http://erlang.org/doc/man/ssh_connection.html#send-5).
"""
def send(channel, type \\ 0, data, timeout \\ :infinity)
def send(channel, type, data, timeout) when is_binary(data) or is_list(data) do
channel.impl.send(channel.connection.ref, channel.id, type, data, timeout)
end
def send(channel, type, data, timeout) do
Enum.reduce_while(data, :ok, fn
(datum, :ok) -> {:cont, send(channel, type, datum, timeout)}
(_, err) -> {:halt, err}
end)
end
@doc """
Sends an EOF message on an open SSH channel.
Returns `:ok` or `{:error, :closed}`.
For more details, see [`:ssh_connection.send_eof/2`](http://erlang.org/doc/man/ssh_connection.html#send_eof-2).
"""
def eof(channel) do
channel.impl.send_eof(channel.connection.ref, channel.id)
end
@doc """
Receive the next message on an open SSH channel.
Returns `{:ok, message}` or `{:error, :timeout}`.
Only listens to messages from the channel specified as the first argument.
## Messages
The message tuples returned by `recv/3` correspond to the underlying Erlang
channel messages with the channel id replaced by the SSHKit channel struct:
* `{:data, channel, type, data}`
* `{:eof, channel}`
* `{:exit_signal, channel, signal, msg, lang}`
* `{:exit_status, channel, status}`
* `{:closed, channel}`
For more details, see [`:ssh_connection`](http://erlang.org/doc/man/ssh_connection.html).
"""
def recv(channel, timeout \\ :infinity) do
ref = channel.connection.ref
id = channel.id
receive do
{:ssh_cm, ^ref, msg} when elem(msg, 1) == id ->
msg = msg |> Tuple.delete_at(1) |> Tuple.insert_at(1, channel)
{:ok, msg}
after
timeout -> {:error, :timeout}
end
end
@doc """
Flushes any pending messages for the given channel.
Returns `:ok`.
"""
def flush(channel, timeout \\ 0) do
ref = channel.connection.ref
id = channel.id
receive do
{:ssh_cm, ^ref, msg} when elem(msg, 1) == id -> flush(channel)
after
timeout -> :ok
end
end
@doc """
Adjusts the flow control window.
Returns `:ok`.
For more details, see [`:ssh_connection.adjust_window/3`](http://erlang.org/doc/man/ssh_connection.html#adjust_window-3).
"""
def adjust(channel, size) when is_integer(size) do
channel.impl.adjust_window(channel.connection.ref, channel.id, size)
end
@doc """
Loops over channel messages until the channel is closed, or looping is stopped
explicitly.
Expects an accumulator on each call that determines how to proceed:
1. `{:cont, acc}`
The loop will wait for an inbound message. It will then pass the message and
current `acc` to the looping function. `fun`'s return value is the
accumulator for the next cycle.
2. `{:cont, message, acc}`
Sends a message to the remote end of the channel before waiting for a
message as outlined in the `{:cont, acc}` case above. `message` may be one
of the following:
* `{0, data}` or `{1, data}` - sends normal or stderr data to the remote
* `data` - is a shortcut for `{0, data}`
* `:eof` - sends EOF
3. `{:halt, acc}`
Terminates the loop, returning `{:halted, acc}`.
4. `{:suspend, acc}`
Suspends the loop, returning `{:suspended, acc, continuation}`.
`continuation` is a function that accepts a new accumulator value and that,
when called, will resume the loop.
`timeout` specifies the maximum wait time for receiving and sending individual
messages.
Once the final `{:closed, channel}` message is received, the loop will
terminate and return `{:done, acc}`. The channel will be closed if it has
not been closed before.
"""
def loop(channel, timeout \\ :infinity, acc, fun)
def loop(channel, timeout, {:cont, msg, acc}, fun) do
case lsend(channel, msg, timeout) do
:ok -> loop(channel, timeout, {:cont, acc}, fun)
err -> halt(channel, err)
end
end
def loop(channel, timeout, {:cont, acc}, fun) do
case recv(channel, timeout) do
{:ok, msg} ->
if elem(msg, 0) == :closed do
{_, acc} = fun.(msg, acc)
done(channel, acc)
else
:ok = ljust(channel, msg)
loop(channel, timeout, fun.(msg, acc), fun)
end
err -> halt(channel, err)
end
end
def loop(channel, _, {:halt, acc}, _) do
halt(channel, acc)
end
def loop(channel, timeout, {:suspend, acc}, fun) do
suspend(channel, acc, fun, timeout)
end
defp halt(channel, acc) do
:ok = close(channel)
:ok = flush(channel)
{:halted, acc}
end
defp suspend(channel, acc, fun, timeout) do
{:suspended, acc, &loop(channel, timeout, &1, fun)}
end
defp done(_, acc) do
{:done, acc}
end
defp lsend(_, nil, _), do: :ok
defp lsend(channel, :eof, _), do: eof(channel)
defp lsend(channel, {type, data}, timeout) do
send(channel, type, data, timeout)
end
defp lsend(channel, data, timeout) do
send(channel, 0, data, timeout)
end
defp ljust(channel, {:data, _, _, data}) do
adjust(channel, byte_size(data))
end
defp ljust(_, _), do: :ok
end
|
lib/sshkit/ssh/channel.ex
| 0.897215
| 0.562777
|
channel.ex
|
starcoder
|
defmodule PayDayLoan.LoadWorker do
@moduledoc """
Process to load requested keys into PDL cache.
Whenever this process receives a ping, it attempts to load a batch
of keys. Requested keys are ones returned by a call to
`LoadState.requested_keys`.
Pings should happen automatically via the PDL API. To force a
ping manually, call `GenServer.cast(pdl.load_worker, :ping)`.
"""
require Logger
alias PayDayLoan.LoadState
alias PayDayLoan.KeyCache
# how long to wait (msec) after startup before we do the initial load
@startup_dwell 10
@type state :: %{
pdl: PayDayLoan.t,
load_task_ref: nil | reference()
}
use GenServer
@doc "Start in a supervision tree"
@spec start_link({PayDayLoan.t, GenServer.options}) :: GenServer.on_start
def start_link({init_state = %PayDayLoan{}, gen_server_opts}) do
GenServer.start_link(__MODULE__, [init_state], gen_server_opts)
end
@spec init([PayDayLoan.t()]) :: {:ok, state}
@impl true
def init([pdl]) do
Process.send_after(self(), :ping, @startup_dwell)
{:ok, %{pdl: pdl, load_task_ref: nil}}
end
@spec handle_cast(atom, state) :: {:noreply, state}
@impl true
def handle_cast(:ping, %{pdl: pdl, load_task_ref: nil} = state) do
load_task = start_load_task(pdl)
{:noreply, %{state | load_task_ref: load_task.ref}}
end
def handle_cast(:ping, %{pdl: _pdl, load_task_ref: ref} = state) when is_reference(ref) do
{:noreply, state}
end
@spec handle_info(atom, state) :: {:noreply, state}
@impl true
def handle_info(:ping, %{pdl: pdl, load_task_ref: nil} = state) do
load_task = start_load_task(pdl)
{:noreply, %{state | load_task_ref: load_task.ref}}
end
def handle_info(:ping, %{pdl: _pdl, load_task_ref: ref} = state) when is_reference(ref) do
{:noreply, state}
end
@spec handle_info(tuple, state) :: {:noreply, state}
@impl true
def handle_info({ref, :ok}, state) do
Process.demonitor(ref, [:flush])
{:noreply, %{state | load_task_ref: nil}}
end
def handle_info({:DOWN, _ref, :process, _pid, _reason}, state) do
{:noreply, %{state | load_task_ref: nil}}
end
@spec start_load_task(PayDayLoan.t) :: Task.t()
defp start_load_task(pdl) do
Task.Supervisor.async_nolink(pdl.load_task_supervisor, fn -> do_load(pdl, false) end)
end
defp do_load(_pdl, true), do: :ok
defp do_load(pdl, false) do
requested_keys = LoadState.requested_keys(pdl.load_state_manager, pdl.batch_size)
_ = LoadState.loading(pdl.load_state_manager, requested_keys)
reload_keys =
LoadState.reload_keys(
pdl.load_state_manager,
pdl.batch_size - length(requested_keys)
)
_ = LoadState.reload_loading(pdl.load_state_manager, reload_keys)
load_batch(pdl, requested_keys ++ reload_keys)
# loop until no more requested keys
finished? = length(requested_keys) == 0 && length(reload_keys) == 0
do_load(pdl, finished?)
end
defp load_batch(_pdl, batch_keys) when length(batch_keys) == 0 do
:ok
end
defp load_batch(pdl, batch_keys) do
# get data from the cache source (e.g., database)
load_data = pdl.callback_module.bulk_load(batch_keys)
# add it to the cache
# we need to know which keys did not get handled so that we can
# mark them as failed
keys_not_loaded =
load_data
|> Enum.reduce(
MapSet.new(batch_keys),
fn {key, load_datum}, keys_remaining ->
# load
pdl
|> load_element(key, load_datum)
|> on_load_or_refresh(pdl, key)
# remove from the set of loading keys
MapSet.delete(keys_remaining, key)
end
)
# mark these keys as failed because we requested them and they did not
# get loaded into cache (e.g., the bulk load query did not return data)
Enum.each(
keys_not_loaded,
fn key -> LoadState.failed(pdl.load_state_manager, key) end
)
end
# either create a new element or update the existing one
defp load_element(pdl, key, load_datum) do
PayDayLoan.with_value(
pdl,
key,
fn existing_value ->
pdl.callback_module.refresh(existing_value, key, load_datum)
end,
fn ->
pdl.callback_module.new(key, load_datum)
end
)
end
# update cache states
defp on_load_or_refresh({:ok, value}, pdl, key) do
_ = LoadState.loaded(pdl.load_state_manager, key)
_ = KeyCache.add_to_cache(pdl.key_cache, key)
pdl.backend.put(pdl, key, value)
end
defp on_load_or_refresh(:ignore, pdl, key) do
# treat an :ignore the same as a failure to start
# - we failed to add this to cache
on_load_or_refresh({:error, :ignore}, pdl, key)
end
defp on_load_or_refresh({:error, _}, pdl, key) do
LoadState.failed(pdl.load_state_manager, key)
# NOTE the callback should handle failures - we don't need to
# bring down the worker because of it
:ok
end
end
|
lib/pay_day_loan/load_worker.ex
| 0.772531
| 0.429429
|
load_worker.ex
|
starcoder
|
defmodule Cidr do
@moduledoc """
Documentation for `Cidr`.
"""
use Bitwise
alias Cidr.Util
@bitlength 32
@type t() :: %__MODULE__{
a: integer(),
b: integer(),
c: integer(),
d: integer(),
mask: integer()
}
defstruct [
:a,
:b,
:c,
:d,
:mask
]
@doc """
new creates a new CIDR Block a.b.c.d/mask from a list of integers
[a, b, c, d, mask]
"""
@spec new(list(integer)) :: t()
def new([a, b, c, d, mask]) do
validate_cidr_block([a, b, c, d, mask])
end
@doc """
from_string parses a CIDR Block from its string representation
"""
@spec from_string(String.t()) :: t()
def from_string(cidr_block) do
cidr_block
|> String.split([".", "/"])
|> Enum.map(&String.to_integer/1)
|> validate_cidr_block()
end
@doc """
to_string returns a string representation of the CIDR Block
"""
@spec to_string(t()) :: String.t()
def to_string(%__MODULE__{a: a, b: b, c: c, d: d, mask: mask}),
do: "#{a}.#{b}.#{c}.#{d}/#{mask}"
@doc """
to_ip returns a string representation of the first IP address of the CIDR Block,
dropping the mask length.
"""
@spec to_ip(t()) :: String.t()
def to_ip(%__MODULE__{a: a, b: b, c: c, d: d}), do: "#{a}.#{b}.#{c}.#{d}"
@doc """
to_bin returns a binary representation of the CIDR Block
"""
@spec to_bin(t()) :: binary
def to_bin(%__MODULE__{a: a, b: b, c: c, d: d, mask: _mask}), do: <<a, b, c, d>>
@doc """
get_mask_bin returns the binary representation of the Subnet Mask
"""
@spec get_mask_bin(t()) :: binary
def get_mask_bin(%__MODULE__{mask: mask}) do
<<Util.ones(mask)::bitstring, Util.zeros(@bitlength - mask)::bitstring>>
end
@spec get_mask_int(t()) :: integer
def get_mask_int(cidr) do
cidr
|> get_mask_bin()
|> :binary.decode_unsigned()
end
@spec get_mask_inverse(t()) :: integer
defp get_mask_inverse(%__MODULE__{mask: mask}) do
<<Util.zeros(mask)::bitstring, Util.ones(@bitlength - mask)::bitstring>>
|> :binary.decode_unsigned()
end
@doc """
get_mask_string returns the string representation of the Subnet Mask
"""
@spec get_mask_string(t()) :: String.t()
def get_mask_string(cidr_block = %__MODULE__{}) do
cidr_block
|> get_mask_bin()
|> :binary.bin_to_list()
|> Enum.map(&Integer.to_string/1)
|> Enum.join(".")
end
@doc """
get_subnet_max_address returns the IP representation of the largest
IP address in this CIDR Block
"""
@spec get_subnet_max_address(t()) :: String.t()
def get_subnet_max_address(cidr = %__MODULE__{mask: mask}) do
subnet_mask = get_mask_int(cidr)
inverse_mask = get_mask_inverse(cidr)
<<a, b, c, d>> =
cidr
|> to_bin()
|> :binary.decode_unsigned()
|> Bitwise.band(subnet_mask)
|> Bitwise.bor(inverse_mask)
|> :binary.encode_unsigned()
%__MODULE__{a: a, b: b, c: c, d: d, mask: mask}
|> to_ip()
end
defp validate_cidr_block([a, b, c, d, mask])
when a >= 0 and a < 256 and
b >= 0 and b < 256 and
c >= 0 and c < 256 and
d >= 0 and d < 256 and
mask >= 0 and mask < @bitlength do
%__MODULE__{a: a, b: b, c: c, d: d, mask: mask}
end
defp validate_cidr_block(_), do: raise(ArgumentError, message: "invalid CIDR block")
@doc """
address_count returns the number of addresses (hosts) in this CIDR Block
"""
@spec address_count(t()) :: integer()
def address_count(%__MODULE__{mask: mask}), do: :math.pow(2, @bitlength - mask) |> trunc()
@doc """
useable_address_count returns the number of *useable* addresses (hosts)
in this CIDR Block. This is always 2 less than the address_count.
"""
@spec useable_address_count(t()) :: integer()
def useable_address_count(cidr_block), do: address_count(cidr_block) - 2
@doc """
cidr_to_range returns a string summary of this CIDR Block Range,
including the minimum and maximum IP address and the address_count.
"""
@spec cidr_to_range(String.t()) :: String.t()
def cidr_to_range(cidr_str) do
cidr_block = from_string(cidr_str)
"#{cidr_str}: #{to_ip(cidr_block)} - #{get_subnet_max_address(cidr_block)} (#{
address_count(cidr_block)
})\n"
end
end
|
lib/cidr.ex
| 0.84916
| 0.459925
|
cidr.ex
|
starcoder
|
defmodule Bson.Encoder do
defprotocol Protocol do
@moduledoc """
`Bson.Encoder.Protocol` protocol defines Bson encoding according to Elxir terms and some Bson predefined structs (see `Bson`).
List of the protocol implementations:
* `Map` - Encodes a map into a document
* `HasDict` - Encodes a HashDict into a document
* `Keyword` - Encodes a Keyword into a document
* `List` - Encodes a list of key-alue pairs into a document otherwize encode list into array
* `Integer` - Encodes integer in 32 or 64 bits
* `Float` - Encodes float in 64 bits
* `Atom` - Encodes special atom (`false`, `true`, `nil`,
`:nan`, `:+inf`, `:-inf`, `MIN_KEY` and `MAX_KEY`) in appropriate format
others in special type Symbol
* `BitString` - as binary string
* `Bson.Regex' - see specs
* `Bson.ObjectId' - see specs
* `Bson.JS' - see specs
* `Bson.Bin' - see specs
* `Bson.Timestamp ' - see specs
"""
@doc """
Returns a binary representing a term in Bson format
"""
def encode(term)
end
defmodule Error do
@moduledoc """
Container for error messages
* `what` has triggerred the error
* `acc` contains what was already decoded for this term (ie the size of a string when the string itself could not be decoded)
* `term` that failed to be encoded
"""
defstruct [what: nil, acc: [], term: nil]
defimpl Inspect, for: Error do
def inspect(e,_), do: inspect([what: e.what, term: e.term, acc: e.acc])
end
end
@doc """
Creates a document using a collection of element, this is, a key-value pair
"""
def document(element_list) do
case Enumerable.reduce(element_list, {:cont, []},
fn({key, value}, acc) when is_binary(key) -> accumulate_elist(key, value, acc)
({key, value}, acc) when is_atom(key) -> accumulate_elist(Atom.to_string(key), value, acc)
(element, acc) -> {:halt, %Error{what: [:element], term: element, acc: acc |> Enum.reverse}}
end) do
{:halted, error} -> error
{:done, acc} ->
acc |> Enum.reverse |> IO.iodata_to_binary |> wrap_document
end
end
defimpl Protocol, for: Integer do
@doc """
iex> Bson.Encoder.Protocol.encode(2)
{<<16>>, <<2, 0, 0, 0>>}
iex> Bson.Encoder.Protocol.encode(-2)
{<<16>>, <<254, 255, 255, 255>>}
iex> Bson.Encoder.Protocol.encode -0x80000001
{<<18>>, <<255, 255, 255, 127, 255, 255, 255, 255>>}
iex> Bson.Encoder.Protocol.encode 0x8000000000000001
%Bson.Encoder.Error{what: [Integer], term: 0x8000000000000001}
"""
def encode(i) when -0x80000000 <= i and i <= 0x80000000, do: {<<0x10>>, <<i::32-signed-little>>}
def encode(i) when -0x8000000000000000 <= i and i <= 0x8000000000000000, do: {<<0x12>>, <<i::64-signed-little>>}
def encode(i), do: %Error{what: [Integer], term: i}
end
defimpl Protocol, for: Float do
@doc """
iex> Bson.Encoder.Protocol.encode(1.1)
{<<1>>, <<154, 153, 153, 153, 153, 153, 241, 63>>}
"""
def encode(f), do: {<<0x01>>, <<(f)::size(64)-float-little>>}
end
defimpl Protocol, for: Atom do
@doc """
iex> Bson.Encoder.Protocol.encode(true)
{<<8>>, <<1>>}
iex> Bson.Encoder.Protocol.encode(nil)
{<<10>>, <<>>}
iex> Bson.Encoder.Protocol.encode(:max_key)
{<<127>>, <<>>}
iex> Bson.Encoder.Protocol.encode(:min_key)
{<<255>>, <<>>}
iex> Bson.Encoder.Protocol.encode(:nan)
{<<1>>, <<0, 0, 0, 0, 0, 0, 248, 127>>}
iex> Bson.Encoder.Protocol.encode(:'+inf')
{<<1>>, <<0, 0, 0, 0, 0, 0, 240, 127>>}
iex> Bson.Encoder.Protocol.encode(:'-inf')
{<<1>>, <<0, 0, 0, 0, 0, 0, 240, 255>>}
iex> Bson.Encoder.Protocol.encode(:atom)
{<<14>>, [<<5, 0, 0, 0>>, "atom", <<0>>]}
"""
# predefind Bson value
def encode(false), do: {<<0x08>>, <<0x00>>}
def encode(true), do: {<<0x08>>, <<0x01>>}
def encode(nil), do: {<<0x0a>>, <<>>}
def encode(:nan), do: {<<0x01>>, <<0, 0, 0, 0, 0, 0, 248, 127>>}
def encode(:'+inf'), do: {<<0x01>>, <<0, 0, 0, 0, 0, 0, 240, 127>>}
def encode(:'-inf'), do: {<<0x01>>, <<0, 0, 0, 0, 0, 0, 240, 255>>}
def encode(:min_key), do: {<<0xff>>, <<>>}
def encode(:max_key), do: {<<0x7f>>, <<>>}
# other Elixir atom are encoded like strings ()
def encode(atom), do: {<<0x0e>>, (atom |> Atom.to_string |> Bson.Encoder.wrap_string)}
end
defimpl Protocol, for: Bson.UTC do
@doc """
iex> Bson.Encoder.Protocol.encode(Bson.UTC.from_now({1390, 324703, 518471}))
{<<9>>, <<30, 97, 207, 181, 67, 1, 0, 0>>}
"""
def encode(%Bson.UTC{ms: ms}) when is_integer(ms), do: {<<0x09>>, <<ms::64-little-signed>>}
def encode(utc), do: %Error{what: [Bson.UTC], term: utc}
end
defimpl Protocol, for: Bson.Regex do
@doc """
iex> Bson.Encoder.Protocol.encode(%Bson.Regex{pattern: "p", opts: "i"})
{<<11>>, ["p", <<0>>, "i", <<0>>]}
"""
def encode(%Bson.Regex{pattern: p, opts: o}) when is_binary(p) and is_binary(o), do: {<<0x0b>>, [p, <<0x00>>, o, <<0x00>>]}
def encode(regex), do: %Error{what: [Bson.Regex], term: regex}
end
defimpl Protocol, for: Bson.ObjectId do
@doc """
iex> Bson.Encoder.Protocol.encode(%Bson.ObjectId{oid: <<0xFF>>})
{<<0x07>>, <<255>>}
iex> Bson.Encoder.Protocol.encode(%Bson.ObjectId{oid: 123})
%Bson.Encoder.Error{what: [Bson.ObjectId], term: %Bson.ObjectId{oid: 123}}
"""
def encode(%Bson.ObjectId{oid: oid}) when is_binary(oid), do: {<<0x07>>, oid}
def encode(oid), do: %Error{what: [Bson.ObjectId], term: oid}
end
defimpl Protocol, for: Bson.JS do
@doc """
iex> Bson.Encoder.Protocol.encode(%Bson.JS{code: "1+1;"})
{<<13>>, [<<5, 0, 0, 0>>, "1+1;", <<0>>]}
iex> Bson.Encoder.Protocol.encode(%Bson.JS{code: "1+1;", scope: %{a: 0, b: "c"}})
{<<15>>, <<34, 0, 0, 0, 5, 0, 0, 0, 49, 43, 49, 59, 0, 21, 0, 0, 0, 16, 97, 0, 0, 0, 0, 0, 2, 98, 0, 2, 0, 0, 0, 99, 0, 0>>}
"""
def encode(%Bson.JS{code: js, scope: nil}) when is_binary(js) do
{<<0x0d>>, Bson.Encoder.wrap_string(js)}
end
def encode(%Bson.JS{code: js, scope: ctx}) when is_binary(js) and is_map(ctx) do
case Bson.Encoder.document(ctx) do
%Error{}=error -> %Error{error|what: {:js_context, error.what}}
ctxBin ->
{<<0x0f>>, [Bson.Encoder.wrap_string(js), ctxBin] |> IO.iodata_to_binary |> js_ctx}
end
end
def encode(js), do: %Error{what: [Bson.JS], term: js}
defp js_ctx(jsctx), do: <<(byte_size(jsctx)+4)::32-little-signed, jsctx::binary>>
end
defimpl Protocol, for: Bson.Bin do
@doc """
iex> Bson.Encoder.Protocol.encode(%Bson.Bin{bin: "e", subtype: Bson.Bin.subtyx(:user)})
{<<5>>,[<<1, 0, 0, 0>>, 128, "e"]}
"""
def encode(%Bson.Bin{bin: bin, subtype: subtype}), do: encode(bin, subtype)
def encode(bin, subtype)
when is_binary(bin) and is_integer(subtype),
do: {<<0x05>>, [<<byte_size(bin)::32-little-signed>>, subtype, bin]}
def encode(bin, subtype), do: %Error{what: [Bson.Bin], term: {bin, subtype}}
end
defimpl Protocol, for: Bson.Timestamp do
@doc """
iex> Bson.Encoder.Protocol.encode(%Bson.Timestamp{inc: 1, ts: 2})
{<<17>>,<<1, 0, 0, 0, 2, 0, 0, 0>>}
"""
def encode(%Bson.Timestamp{inc: i, ts: t})
when is_integer(i) and -0x80000000 <= i and i <= 0x80000000
and is_integer(t) and -0x80000000 <= t and t <= 0x80000000,
do: {<<0x11>>, <<i::32-signed-little, t::32-signed-little>>}
def encode(ts), do: %Error{what: [Bson.Timestamp], term: ts}
end
defimpl Protocol, for: BitString do
@doc """
iex> Bson.Encoder.Protocol.encode("a")
{<<2>>, [<<2, 0, 0, 0>>, "a", <<0>>]}
"""
def encode(s) when is_binary(s), do: {<<0x02>>, Bson.Encoder.wrap_string(s)}
def encode(bits), do: %Error{what: [BitString], term: bits}
end
defimpl Protocol, for: List do
@doc """
iex> Bson.Encoder.Protocol.encode([])
{<<4>>,<<5, 0, 0, 0, 0>>}
iex> Bson.Encoder.Protocol.encode([2, 3])
{<<4>>,<<19, 0, 0, 0, 16, 48, 0, 2, 0, 0, 0, 16, 49, 0, 3, 0, 0, 0, 0>>}
iex> Bson.Encoder.Protocol.encode([1,[nil]])
{<<4>>,<<23, 0, 0, 0, 16, 48, 0, 1, 0, 0, 0, 4, 49, 0, 8, 0, 0, 0, 10, 48, 0, 0, 0>>}
iex> Bson.Encoder.Protocol.encode([1,[2, 3]])
{<<4>>,<<34, 0, 0, 0, 16, 48, 0, 1, 0, 0, 0, 4, 49, 0, 19, 0, 0, 0, 16, 48, 0, 2, 0, 0, 0, 16, 49, 0, 3, 0, 0, 0, 0, 0>>}
# Keyword and list of key-value pairs
iex> Bson.Encoder.Protocol.encode([a: "r"])
{<<3>>,<<14, 0, 0, 0, 2, 97, 0, 2, 0, 0, 0, 114, 0, 0>>}
iex> Bson.Encoder.Protocol.encode([{"a", "s"}])
{<<3>>,<<14, 0, 0, 0, 2, 97, 0, 2, 0, 0, 0, 115, 0, 0>>}
iex> Bson.Encoder.Protocol.encode([{"a", "s"}, {:b, "r"}, 1, 2])
%Bson.Encoder.Error{
term: 1,
what: [:element],
acc: [[<<2>>, "a", <<0>>, [<<2, 0, 0, 0>>, "s", <<0>>]],
[<<2>>, "b", <<0>>, [<<2, 0, 0, 0>>, "r", <<0>>]]]}
iex> Bson.Encoder.Protocol.encode([2, 3, ])
{<<4>>,<<19, 0, 0, 0, 16, 48, 0, 2, 0, 0, 0, 16, 49, 0, 3, 0, 0, 0, 0>>}
"""
def encode([{k, _}|_]=elist) when is_atom(k) or is_binary(k) do
case Bson.Encoder.document(elist) do
%Error{}=error -> error
encoded_elist -> {<<0x03>>, encoded_elist}
end
end
def encode(list) do
case Bson.Encoder.array(list) do
%Error{}=error -> error
encoded_list -> {<<0x04>>, encoded_list}
end
end
end
defimpl Protocol, for: [Map, HashDict, Keyword] do
@doc """
# Map
iex> Bson.Encoder.Protocol.encode(%{})
{<<3>>,<<5, 0, 0, 0, 0>>}
iex> Bson.Encoder.Protocol.encode(%{a: "r"})
{<<3>>,<<14, 0, 0, 0, 2, 97, 0, 2, 0, 0, 0, 114, 0, 0>>}
iex> Bson.Encoder.Protocol.encode(%{a: 1, b: 5})
{<<3>>,<<19, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 16, 98, 0, 5, 0, 0, 0, 0>>}
iex> Bson.Encoder.Protocol.encode(%{a: 1, b: %{c: 3}})
{<<3>>,<<27, 0, 0, 0, 16, 97, 0, 1, 0, 0, 0, 3, 98, 0, 12, 0, 0, 0, 16, 99, 0, 3, 0, 0, 0, 0, 0>>}
# HashDict
iex> Bson.Encoder.Protocol.encode(%HashDict{})
{<<3>>,<<5, 0, 0, 0, 0>>}
iex> Bson.Encoder.Protocol.encode(HashDict.put(%HashDict{}, :a, "r"))
{<<3>>,<<14, 0, 0, 0, 2, 97, 0, 2, 0, 0, 0, 114, 0, 0>>}
iex> Bson.Encoder.Protocol.encode(%{a: "va", b: "vb", u: %Bson.UTC{ms: "e"}})
%Bson.Encoder.Error{
what: ["u", Bson.UTC],
term: %Bson.UTC{ms: "e"},
acc: [[[<<2>>, "a", <<0>>, [<<3, 0, 0, 0>>, "va", <<0>>]],
[<<2>>, "b", <<0>>, [<<3, 0, 0, 0>>, "vb", <<0>>]]]]}
iex> Bson.Encoder.Protocol.encode([1, 2, %Bson.UTC{ms: "e"}])
%Bson.Encoder.Error{
what: ["2", Bson.UTC],
term: %Bson.UTC{ms: "e"},
acc: [[[<<16>>, "0", <<0>>, <<1, 0, 0, 0>>],
[<<16>>, "1", <<0>>, <<2, 0, 0, 0>>]]]}
iex> Bson.Encoder.Protocol.encode(%{a: "va", b: "vb", c: %{c1: "vc1", cu: %Bson.UTC{ms: "e"}}})
%Bson.Encoder.Error{
what: ["c", "cu", Bson.UTC],
term: %Bson.UTC{ms: "e"},
acc: [[[<<2>>, "a", <<0>>, [<<3, 0, 0, 0>>, "va", <<0>>]],
[<<2>>, "b", <<0>>, [<<3, 0, 0, 0>>, "vb", <<0>>]]],
[[<<2>>, "c1", <<0>>, [<<4, 0, 0, 0>>, "vc1", <<0>>]]]]}
iex> Bson.Encoder.Protocol.encode(%{a: "va", b: "vb", c: ["c0", %Bson.UTC{ms: "e"}]})
%Bson.Encoder.Error{
what: ["c", "1", Bson.UTC],
term: %Bson.UTC{ms: "e"},
acc: [[[<<2>>, "a", <<0>>, [<<3, 0, 0, 0>>, "va", <<0>>]],
[<<2>>, "b", <<0>>, [<<3, 0, 0, 0>>, "vb", <<0>>]]],
[[<<2>>, "0", <<0>>, [<<3, 0, 0, 0>>, "c0", <<0>>]]]]}
"""
def encode(dict) do
case Bson.Encoder.document(dict) do
%Error{}=error -> error
encoded_dict -> {<<0x03>>, encoded_dict}
end
end
end
@doc """
Creates a document for an array (list of items)
"""
def array(item_list) do
case Enumerable.reduce(item_list, {:cont, {[], 0}},
fn(item, {acc, i}) ->
case accumulate_elist(Integer.to_string(i), item, acc) do
{:cont, acc} -> {:cont, {acc, i+1}}
{:halt, error} -> {:halt, error}
end
end) do
{:halted, error} -> error
{:done, {bufferAcc, _}} ->
bufferAcc |> Enum.reverse |> IO.iodata_to_binary |> wrap_document
end
end
@doc """
Wraps a bson document with size and trailing null character
"""
def wrap_document(elist), do: <<(byte_size(elist)+5)::32-little-signed>> <> elist <> <<0x00>>
@doc """
Wraps a bson document with size and trailing null character
"""
def wrap_string(string), do: [<<(byte_size(string)+1)::32-little-signed>>, string, <<0x00>>]
@doc """
Accumulate element in an element list
"""
def accumulate_elist(name, value, elist) do
case element(name, value) do
%Error{}=error -> {:halt, %Error{error|acc: [Enum.reverse(elist)|error.acc]}}
encoded_element -> {:cont, [encoded_element | elist]}
end
end
@doc """
Returns encoded element using its name and value
"""
def element(name, value) do
case Bson.Encoder.Protocol.encode(value) do
%Error{}=error -> %Error{error|what: [name|error.what]}
{kind, encoded_value} -> [kind, name, <<0x00>>, encoded_value]
end
end
end
|
hello_elixir/deps/bson/lib/bson_encoder.ex
| 0.896243
| 0.703362
|
bson_encoder.ex
|
starcoder
|
defmodule Set do
@moduledoc %S"""
This module specifies the Set API expected to be
implemented by different representations.
It also provides functions that redirect to the
underlying Set, allowing a developer to work with
different Set implementations using one API.
To create a new set, use the `new` functions defined
by each set type:
HashSet.new #=> creates an empty HashSet
For simplicity's sake, in the examples below every time
`new` is used, it implies one of the module-specific
calls like above.
## Protocols
Sets are required to implement the `Enumerable` protocol,
allowing one to write:
Enum.each(set, fn k ->
IO.inspect k
end)
## Match
Sets are required to implement all operations
using the match (`===`) operator. Any deviation from
this behaviour should be avoided and explicitly documented.
"""
use Behaviour
@type value :: any
@type values :: [ value ]
@type t :: tuple
defcallback new :: t
defcallback new(Enum.t) :: t
defcallback new(Enum.t, (any -> any)) :: t
defcallback delete(t, value) :: t
defcallback difference(t, t) :: t
defcallback disjoint?(t, t) :: boolean
defcallback empty(t) :: t
defcallback equal?(t, t) :: boolean
defcallback intersection(t, t) :: t
defcallback member?(t, value) :: boolean
defcallback put(t, value) :: t
defcallback size(t) :: non_neg_integer
defcallback subset?(t, t) :: boolean
defcallback to_list(t) :: list()
defcallback union(t, t) :: t
defmacrop target(set) do
quote do
if is_tuple(unquote(set)) do
elem(unquote(set), 0)
else
unsupported_set(unquote(set))
end
end
end
@doc """
Deletes `value` from `set`.
## Examples
iex> s = HashSet.new([1, 2, 3])
...> Set.delete(s, 4) |> Enum.sort
[1, 2, 3]
iex> s = HashSet.new([1, 2, 3])
...> Set.delete(s, 2) |> Enum.sort
[1, 3]
"""
@spec delete(t, value) :: t
def delete(set, value) do
target(set).delete(set, value)
end
@doc """
Returns a set that is `set1` without the members of `set2`.
## Examples
iex> Set.difference(HashSet.new([1,2]), HashSet.new([2,3,4])) |> Enum.sort
[1]
"""
@spec difference(t, t) :: t
def difference(set1, set2) do
target(set1).difference(set1, set2)
end
@doc """
Checks if `set1` and `set2` have no members in common.
## Examples
iex> Set.disjoint?(HashSet.new([1, 2]), HashSet.new([3, 4]))
true
iex> Set.disjoint?(HashSet.new([1, 2]), HashSet.new([2, 3]))
false
"""
@spec disjoint?(t, t) :: boolean
def disjoint?(set1, set2) do
target(set1).disjoint?(set1, set2)
end
@doc """
Returns an empty set of the same type as `set`.
"""
@spec empty(t) :: t
def empty(set) do
target(set).empty(set)
end
@doc """
Checks if `set1` and `set2` are equal.
## Examples
iex> Set.equal?(HashSet.new([1, 2]), HashSet.new([2, 1, 1]))
true
iex> Set.equal?(HashSet.new([1, 2]), HashSet.new([3, 4]))
false
"""
@spec equal?(t, t) :: boolean
def equal?(set1, set2) do
target(set1).equal?(set1, set2)
end
@doc """
Returns a set containing only members in common between `set1` and `set2`.
## Examples
iex> Set.intersection(HashSet.new([1,2]), HashSet.new([2,3,4])) |> Enum.sort
[2]
iex> Set.intersection(HashSet.new([1,2]), HashSet.new([3,4])) |> Enum.sort
[]
"""
@spec intersection(t, t) :: t
def intersection(set1, set2) do
target(set1).intersection(set1, set2)
end
@doc """
Checks if `set` contains `value`.
## Examples
iex> Set.member?(HashSet.new([1, 2, 3]), 2)
true
iex> Set.member?(HashSet.new([1, 2, 3]), 4)
false
"""
@spec member?(t, value) :: boolean
def member?(set, value) do
target(set).member?(set, value)
end
@doc """
Inserts `value` into `set` if it does not already contain it.
## Examples
iex> Set.put(HashSet.new([1, 2, 3]), 3) |> Enum.sort
[1, 2, 3]
iex> Set.put(HashSet.new([1, 2, 3]), 4) |> Enum.sort
[1, 2, 3, 4]
"""
@spec put(t, value) :: t
def put(set, value) do
target(set).put(set, value)
end
@doc """
Returns the number of elements in `set`.
## Examples
iex> Set.size(HashSet.new([1, 2, 3]))
3
"""
@spec size(t) :: non_neg_integer
def size(set) do
target(set).size(set)
end
@doc """
Checks if `set1`'s members are all contained in `set2`.
## Examples
iex> Set.subset?(HashSet.new([1, 2]), HashSet.new([1, 2, 3]))
true
iex> Set.subset?(HashSet.new([1, 2, 3]), HashSet.new([1, 2]))
false
"""
@spec subset?(t, t) :: boolean
def subset?(set1, set2) do
target(set1).subset?(set1, set2)
end
@doc """
Converts `set` to a list.
## Examples
iex> HashSet.to_list(HashSet.new([1, 2, 3])) |> Enum.sort
[1,2,3]
"""
@spec to_list(t) :: list
def to_list(set) do
target(set).to_list(set)
end
@doc """
Returns a set containing all members of `set1` and `set2`.
## Examples
iex> Set.union(HashSet.new([1,2]), HashSet.new([2,3,4])) |> Enum.sort
[1,2,3,4]
"""
@spec union(t, t) :: t
def union(set1, set2) do
target(set1).union(set1, set2)
end
defp unsupported_set(set) do
raise ArgumentError, message: "unsupported set: #{inspect set}"
end
end
|
lib/elixir/lib/set.ex
| 0.918288
| 0.62671
|
set.ex
|
starcoder
|
defmodule BSV.Transaction.Output do
@moduledoc """
Module for parsing and serialising transaction outputs.
"""
alias BSV.Script
alias BSV.Util
alias BSV.Util.VarBin
defstruct satoshis: 0, script: nil
@typedoc "Transaction output"
@type t :: %__MODULE__{
satoshis: integer,
script: BSV.Script.t() | nil
}
@p2pkh_script_size 25
@doc """
Parse the given binary into a single transaction output. Returns a tuple
containing the transaction output and the remaining binary data.
## Options
The accepted options are:
* `:encoding` - Optionally decode the binary with either the `:base64` or `:hex` encoding scheme.
## Examples
BSV.Transaction.Output.parse(data)
{%BSV.Trasaction.Output{}, ""}
"""
@spec parse(binary, keyword) :: {__MODULE__.t(), binary}
def parse(data, options \\ []) do
encoding = Keyword.get(options, :encoding)
filter = Keyword.get(options, :filter) || (& &1)
{<<satoshis::little-64>>, data} =
data
|> Util.decode(encoding)
|> VarBin.read_bytes(8)
{script, data} = VarBin.parse_bin(data)
output =
struct(__MODULE__,
satoshis: satoshis,
script: Script.parse(script)
)
|> filter.()
{output, data}
end
@doc """
Serialises the given transaction output struct into a binary.
## Options
The accepted options are:
* `:encode` - Optionally encode the returned binary with either the `:base64` or `:hex` encoding scheme.
## Examples
BSV.Transaction.Output.serialize(output)
<<binary>>
"""
@spec serialize(__MODULE__.t(), keyword) :: binary
def serialize(%__MODULE__{} = output, options \\ []) do
encoding = Keyword.get(options, :encoding)
script =
case output.script do
%Script{} = s -> Script.serialize(s) |> VarBin.serialize_bin()
_ -> <<>>
end
<<output.satoshis::little-64, script::binary>>
|> Util.encode(encoding)
end
@doc """
Returns the size of the given output. If the output has a script, it's actual
size is calculated, otherwise a P2PKH output is estimated.
"""
@spec get_size(__MODULE__.t()) :: integer
def get_size(%__MODULE__{script: script}) when is_nil(script),
do: 8 + @p2pkh_script_size
def get_size(%__MODULE__{} = tx),
do: serialize(tx) |> byte_size
end
|
lib/bsv/transaction/output.ex
| 0.907753
| 0.538134
|
output.ex
|
starcoder
|
defmodule Traverse.Steps.Step do
@callback run_step(definition, state) :: :started | :next | nil | {next_step, step_state} when definition: %{}, state: %{}, step_state: %{} | nil, next_step: %{} | :next | nil
defmodule Definition do
defstruct workflow_id: nil,
step_id: nil,
step_definition: %{},
state: %{},
parent: nil
end
def start_step(workflow_id, definition, state, parent \\ self()) do
step_id = UUID.uuid4()
GenServer.start_link(
String.to_existing_atom("Elixir.#{definition.stepType}"),
%Definition{workflow_id: workflow_id, step_id: step_id, step_definition: definition, state: state, parent: parent},
name: {:global, step_id}
)
{:ok, step_id}
end
defmacro __using__(_) do
quote location: :keep do
use GenServer
import Traverse.Steps.Step
@behaviour Traverse.Steps.Step
def init(definition) do
GenServer.cast(self(), :execute)
{:ok, definition}
end
def handle_cast(:execute, definition) do
case run_step(definition.step_definition, definition.state) do
:started -> nil
response -> done(response)
end
{:noreply, definition}
end
def handle_cast({:done, :next}, definition), do: handle_cast({:done, {:next, nil}}, definition)
def handle_cast({:done, {:next, step_state}}, definition) do
handle_cast({:done, {Map.get(definition.step_definition, :next), step_state}}, definition)
end
def handle_cast({:done, next_step, step_state}, definition) do
GenServer.cast(definition, {:step_done, {definition.step_id, step_state, next_step}})
{:noreply, definition}
end
def done(), do: done(:next)
def done(nil), do: done({nil, nil})
def done(options) when options: :next or {:next, %{}} or {nil, %{}} or {%{}, nil} or {%{}, %{}} or {nil, nil} do
GenServer.cast(self(), {:done, options})
end
end
end
end
|
lib/traverse/steps/step.ex
| 0.542136
| 0.415492
|
step.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.