code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Curve25519 do
import Bitwise
@moduledoc """
Curve25519 Diffie-Hellman functions
"""
@typedoc """
public or secret key
"""
@type key :: binary
@p 57_896_044_618_658_097_711_785_492_504_343_953_926_634_992_332_820_282_019_728_792_003_956_564_819_949
@a 486_662
defp clamp(c) do
c
|> band(~~~7)
|> band(~~~(128 <<< (8 * 31)))
|> bor(64 <<< (8 * 31))
end
# :math.pow yields floats.. and we only need this one
defp square(x), do: x * x
defp expmod(_b, 0, _m), do: 1
defp expmod(b, e, m) do
t = b |> expmod(div(e, 2), m) |> square |> rem(m)
if (e &&& 1) == 1, do: rem(t * b, m), else: t
end
defp inv(x), do: x |> expmod(@p - 2, @p)
defp add({xn, zn}, {xm, zm}, {xd, zd}) do
x = (xm * xn - zm * zn) |> square |> (&(&1 * 4 * zd)).()
z = (xm * zn - zm * xn) |> square |> (&(&1 * 4 * xd)).()
{rem(x, @p), rem(z, @p)}
end
defp double({xn, zn}) do
x = (square(xn) - square(zn)) |> square
z = 4 * xn * zn * (square(xn) + @a * xn * zn + square(zn))
{rem(x, @p), rem(z, @p)}
end
defp curve25519(n, base) do
one = {base, 1}
two = double(one)
{{x, z}, _} = nth_mult(n, {one, two})
rem(x * inv(z), @p)
end
defp nth_mult(1, basepair), do: basepair
defp nth_mult(n, {one, two}) do
{pm, pm1} = n |> div(2) |> nth_mult({one, two})
if (n &&& 1) == 1, do: {add(pm, pm1, one), double(pm1)}, else: {double(pm), add(pm, pm1, one)}
end
@doc """
Generate a secret/public key pair
Returned tuple contains `{random_secret_key, derived_public_key}`
"""
@spec generate_key_pair :: {key, key}
def generate_key_pair do
# This algorithm is supposed to be resilient against poor RNG, but use the best we can
secret = :crypto.strong_rand_bytes(32)
{secret, derive_public_key(secret)}
end
@doc """
Derive a shared secret for a secret and public key
Given our secret key and our partner's public key, returns a
shared secret which can be derived by the partner in a complementary way.
"""
@spec derive_shared_secret(key, key) :: key | :error
def derive_shared_secret(<<our_secret::little-size(256)>>, <<their_public::little-size(256)>>) do
shared_secret =
our_secret
|> clamp
|> curve25519(their_public)
<<shared_secret::little-size(256)>>
end
def derive_shared_secret(_ours, _theirs), do: :error
@doc """
Derive the public key from a secret key
"""
@spec derive_public_key(key) :: key | :error
def derive_public_key(<<our_secret::little-size(256)>>) do
public_key =
our_secret
|> clamp
|> curve25519(9)
<<public_key::little-size(256)>>
end
def derive_public_key(_ours), do: :error
end
|
lib/curve25519.ex
| 0.839339
| 0.452294
|
curve25519.ex
|
starcoder
|
defmodule Exred.Node.GrpcTwin do
@moduledoc """
Represents an external node that connects to Exred via gRPC.
It needs the gRPC Daemon Node running in the flow.
External nodes can connect to Exred using the gRPC protocol.
The external client needs to implement the exredrpc protocol
(see the exredrpc.proto file under the priv dir).
The protocol is a simple bi-directional streaming protocol.
Both the internal Exred node (this node) and the external node needs to connect to the broker using a bond_id.
Once the two sides are bonded the Broker will start forwarding messages between them.
The external node just needs to make the RPC call and pass in the bond_id in the context as metadata.
The gRPC server on the Elixir side will initiate the bonding for the external node.
"""
@name "gRPC Twin"
@category "function"
@info @moduledoc
@config %{
name: %{
info: "Visible name",
value: @name,
type: "string",
attrs: %{max: 30}
},
bond_id: %{
info: "Common id used by this node and the external node to establish the connection",
value: "bondid",
type: "string",
attrs: %{max: 15}
}
}
@ui_attributes %{left_icon: "extension"}
use Exred.NodePrototype
require Logger
@impl true
def node_init(state) do
me = %Exredrpc.Twin.Ex{process: self()}
bond_id = state.config.bond_id.value
case Exredrpc.Broker.bond_ex(bond_id, me) do
:ok ->
Logger.info("registered with Broker using bond_id #{inspect(bond_id)}")
{:error, err} ->
Logger.error("failed to bond: #{inspect(err)}")
end
state
end
@impl true
# coming from the broker, forward it out to connected nodes
def handle_msg({:grpc_incoming, msg}, state) do
Logger.debug("got: #{inspect(msg)}")
{msg, state}
end
# coming from a node in the flow, send it to the broker to relay it out through grpc
def handle_msg(msg, state) do
Exredrpc.Broker.msg_from_ex(msg)
{nil, state}
end
end
|
lib/exred_node_grpc_twin.ex
| 0.666931
| 0.469095
|
exred_node_grpc_twin.ex
|
starcoder
|
defmodule GroupManager do
@moduledoc """
`GroupManager` is the top level wrapper over the group management
services provided by other modules:
- `Chatter` is responsible for network communication
- `Chatter.PeerDB` is a wrapper over the Chatter's knowledge about peers, stored in ETS
- `GroupManager.TopologyDB` stores information about groups and their topology (ETS)
"""
use Application
require Logger
require Chatter.NetID
require GroupManager.Data.Item
alias Chatter.NetID
alias Chatter.MessageHandler
alias Chatter.SerializerDB
alias Chatter
alias GroupManager.TopologyDB
alias GroupManager.Receiver
alias GroupManager.Data.Item
alias GroupManager.Data.TimedItem
alias GroupManager.Data.TimedSet
alias GroupManager.Data.Message
@doc """
Helper macro used in function guards to validate group names.
"""
defmacro is_valid_group_name(name) do
case Macro.Env.in_guard?(__CALLER__) do
true ->
quote do
is_binary(unquote(name)) and byte_size(unquote(name)) > 0
end
false ->
quote bind_quoted: binding() do
is_binary(name) and byte_size(name) > 0
end
end
end
@doc false
def start(_type, args)
do
:random.seed(:os.timestamp)
# register encoder and decoder for the Message type
msg = Message.new("dummy")
extract_fn = fn(id) -> Message.extract_netids(id) end
encode_fn = fn(id, ids) -> Message.encode_with(id, ids) end
decode_fn = fn(bin, ids) -> Message.decode_with(bin, ids) end
dispatch_fn = fn(msg) -> Receiver.handle(Receiver.locate!, msg) end
encdec = MessageHandler.new(msg, extract_fn, encode_fn, decode_fn, dispatch_fn)
ser_db = SerializerDB.locate!
SerializerDB.add(ser_db, encdec)
{:ok, _encded} = SerializerDB.get(ser_db, msg)
GroupManager.Supervisor.start_link(args)
end
@doc """
Calling this function registers our interest in a group. It first checks what
we already know about the `group_name` and combines the information with our
intent to participate in the group. This combined information is the group
topology stored in the `TopologyDB`.
When the local `TopologyDB` is updated with our request to participate, we send
the new topology over to others. `Chatter` makes sure we both multicast the
new topology and also broadcast to the `peers` (parameter, a list of `NetID`s).
Parameters:
- `group_name` a non-empty string
- `peers` a list of `NetID`s
Returns: :ok or an exception is raised
Example `NetID`:
```
iex(1)> GroupManager.my_id
{:net_id, {192, 168, 1, 100}, 29999}
```
Example usage:
```
iex(2)> GroupManager.join("G", [])
:ok
```
"""
@spec join(binary, list(NetID.t)) :: :ok
def join(group_name, peers)
when is_valid_group_name(group_name) and
is_list(peers)
do
:ok = NetID.validate_list!(peers)
# 1: prepare a new message with the help of TopologyDB
topo_db = TopologyDB.locate!
item = Item.new(my_id) |> Item.op(:get)
{:ok, _} = topo_db |> TopologyDB.add_item(group_name, item)
{:ok, msg} = topo_db |> TopologyDB.get(group_name)
# 2: broadcast the new message
:ok = Chatter.broadcast(peers, msg)
end
@doc """
see `join(group_name, peers)`
The only difference is that this function checks the members of the group with
the help of the `GroupManager.member(group_name)` function. The group
membership information comes from the `TopologyDB`.
When it gathered the group members it calls `join(group_name, peers)` with that
member list.
Example usage:
```
iex(2)> GroupManager.join("G")
:ok
```
"""
@spec join(binary) :: :ok
def join(group_name)
when is_valid_group_name(group_name)
do
others = members(group_name)
join(group_name, others)
end
@doc """
Calling this function tells others that we leave the group. It first checks what
we already know about the `group_name` and for each non-removal `Item` it generates
a remove item and merges this into the topology. This effectively replaces all
`:get` and `:add` items in the `TopologyDB` for our `NetID`.
When the local `TopologyDB` is updated with our request, we send
the new topology over to others. `Chatter` makes sure we both multicast the
new topology and also broadcast to the `peers` (parameter, a list of `NetID`s).
Parameters:
- `group_name` a non-empty string
Returns: :ok or an exception is raised
Example usage:
```
iex(2)> GroupManager.leave("G")
:ok
```
"""
@spec leave(binary) :: :ok
def leave(group_name)
when is_valid_group_name(group_name)
do
# 1: prepare a leave message with the help of TopologyDB
topo_db = TopologyDB.locate!
item = Item.new(my_id) |> Item.op(:rmv)
{:ok, _} = topo_db |> TopologyDB.add_item(group_name, item)
{:ok, msg} = topo_db |> TopologyDB.get(group_name)
# 2: remove all other group participation from the message
topology = Message.topology(msg)
msg = List.foldl(topology, msg, fn(x,acc) ->
del_item = TimedItem.item(x) |> Item.op(:rmv)
if( Item.member(del_item) == my_id )
do
local_clock = TimedItem.updated_at(x)
Message.add(acc, TimedItem.construct_next(del_item, local_clock))
else
acc
end
end)
# 3: update topo DB and get a new msg to be distributed
:ok = topo_db |> TopologyDB.add(msg)
{:ok, msg} = topo_db |> TopologyDB.get(group_name)
# 4: broadcast the new message
others = members(group_name)
:ok = Chatter.broadcast(others, msg)
end
@doc """
returns the list of nodes participating in a group in the form
of a list of `NetID`s.
Example usage:
```
iex(1)> GroupManager.my_id
{:net_id, {192, 168, 1, 100}, 29999}
iex(2)> GroupManager.join("G")
:ok
iex(3)> GroupManager.members("G")
[{:net_id, {192, 168, 1, 100}, 29999}]
```
"""
@spec members(binary) :: list(NetID.t)
def members(group_name)
when is_valid_group_name(group_name)
do
case TopologyDB.get(TopologyDB.locate!, group_name)
do
{:error, :not_found} -> []
{:ok, msg} -> Message.members(msg)
end
end
@doc """
returns the list of groups this node has ever seen. Note that this
checks the `TopologyDB` for the list of groups. `TopologyDB` may receive
uopdates from other nodes through UDP multicast so the list of
groups may contain group names without ever participating in any of them.
Example usage:
```
iex(1)> GroupManager.groups
[]
iex(2)> GroupManager.join("G")
:ok
iex(3)> GroupManager.leave("G")
:ok
iex(4)> GroupManager.my_groups
[]
iex(5)> GroupManager.groups
["G"]
```
"""
@spec groups() :: {:ok, list(binary)}
def groups()
do
TopologyDB.groups_
end
@doc """
returns the list of groups we either want to receive messages from (:get `Item`) or
we are actively participating in (:add `Item`)
Example usage:
```
iex(1)> GroupManager.join("G")
:ok
iex(2)> GroupManager.my_groups
["G"]
```
"""
@spec my_groups() :: {:ok, list(binary)}
def my_groups()
do
get_lst = TopologyDB.groups_(:get, my_id)
add_lst = TopologyDB.groups_(:add, my_id)
(get_lst ++ add_lst) |> Enum.uniq
end
@doc """
returns the topology of the given group in the form of `list(TimedItem.t)`.
The `TimedItem` element has an `Item` member that is the topology information
together with a `LocalClock` which tells when the change has happened.
Example usage:
```
iex(1)> GroupManager.topology("G")
[]
iex(2)> GroupManager.join("G")
:ok
iex(3)> GroupManager.topology("G")
[{:timed_item,
{:item, {:net_id, {192, 168, 1, 100}, 29999}, :get, 0, 4294967295, 0},
{:local_clock, {:net_id, {192, 168, 1, 100}, 29999}, 0}}]
iex(4)> GroupManager.add_item("G",0,255,11000)
{:ok,
{:timed_item, {:item, {:net_id, {192, 168, 1, 100}, 29999}, :add, 0, 255, 11000},
{:local_clock, {:net_id, {192, 168, 1, 100}, 29999}, 1}}}
iex(5)> GroupManager.topology("G")
[{:timed_item, {:item, {:net_id, {192, 168, 1, 100}, 29999}, :add, 0, 255, 11000},
{:local_clock, {:net_id, {192, 168, 1, 100}, 29999}, 1}},
{:timed_item, {:item, {:net_id, {192, 168, 1, 100}, 29999}, :get, 0, 4294967295, 0},
{:local_clock, {:net_id, {192, 168, 1, 100}, 29999}, 0}}]
```
Explanation:
- `iex(1)`: when topology is empty it returns an empty list
- `iex(2)`: joining the group, which means we add a :get `Item` to the topology
- `iex(3)`: the topology has a single :get `Item`
- `iex(4)`: register that we want to serve the range 0-255 with port=11000
- `iex(5)`: the topology now has two items, the `:get` and the `:add`
"""
@spec topology(binary) :: list(TimedItem.t)
def topology(group_name)
when is_valid_group_name(group_name)
do
case TopologyDB.get_(group_name)
do
{:error, :not_found} ->
[]
{:ok, msg} ->
Message.items(msg)
|> TimedSet.items
end
end
@doc """
see `topology(group_name)` for more information
This variant of the `topology` function returns the filtered list of
the topology items. The result set only has the items with the `op` field
equals to the `filter` parameter.
The filter parameter can be `:add`, `:rmv` or `:get`.
"""
@spec topology(binary, :add|:rmv|:get) :: list(TimedItem.t)
def topology(group_name, filter)
when is_valid_group_name(group_name) and
filter in [:add, :rmv, :get]
do
case TopologyDB.get_(group_name)
do
{:error, :not_found} ->
[]
{:ok, msg} ->
Message.items(msg)
|> TimedSet.items
|> Enum.filter(fn(x) -> (filter == TimedItem.item(x) |> Item.op) end)
end
end
@doc """
adds a topology item as an `:add` `Item` which represents a range that the
given node wants to serve within the group. The responsability of the group is
represented by a key range 0..0xffffffff. Each node tells what part of the range
it wants to serve. Further ranges can be added and removed based on the node's
decision, may be based on its capacity, speed or other factors.
Parameters:
- `group_name`
- `from` and `to` represent the boundaries of the range
- `port` is a hint to other nodes based on the node's capacities
Example usage:
```
iex(1)> GroupManager.add_item("G",1,2,3)
{:ok,
{:timed_item, {:item, {:net_id, {192, 168, 1, 100}, 29999}, :add, 1, 2, 3},
{:local_clock, {:net_id, {192, 168, 1, 100}, 29999}, 0}}}
iex(2)> GroupManager.my_groups
["G"]
iex(3)> GroupManager.topology("G")
[{:timed_item, {:item, {:net_id, {192, 168, 1, 100}, 29999}, :add, 1, 2, 3},
{:local_clock, {:net_id, {192, 168, 1, 100}, 29999}, 0}}]
```
Explanation:
- `iex(1)`: add_item is also an implicit join to the group if it has not joined before
- `iex(2)`: `my_groups` show that we are now part of the `G` group
- `iex(3)`: the topology shows our new `:add` item
"""
@spec add_item(binary, integer, integer, integer) :: {:ok, TimedItem.t}
def add_item(group_name, from, to, port)
when is_valid_group_name(group_name)
do
# 1: prepare a new message with the help of TopologyDB
item = Item.new(my_id) |> Item.set(:add, from, to, port)
topo_db = TopologyDB.locate!
{:ok, timed_item} = topo_db |> TopologyDB.add_item(group_name, item)
{:ok, msg} = topo_db |> TopologyDB.get(group_name)
# 2: gather peers
peers = members(group_name)
# 3: broadcast the new message
:ok = Chatter.broadcast(peers, msg)
{:ok, timed_item}
end
@doc """
similar to `add_item` except it adds a :rmv item to signify it no longer serves
the given range
"""
@spec remove_item(binary, integer, integer, integer) :: {:ok, TimedItem.t}
def remove_item(group_name, from, to, port)
when is_valid_group_name(group_name)
do
# 1: prepare a new message with the help of TopologyDB
item = Item.new(my_id) |> Item.set(:rmv, from, to, port)
topo_db = TopologyDB.locate!
{:ok, timed_item} = topo_db |> TopologyDB.add_item(group_name, item)
{:ok, msg} = topo_db |> TopologyDB.get(group_name)
# 2: gather peers
peers = members(group_name)
# 3: broadcast the new message
:ok = Chatter.broadcast(peers, msg)
{:ok, timed_item}
end
@doc """
return our local identifier as a `NetID`
Example usage:
```
iex(1)> GroupManager.my_id
{:net_id, {192, 168, 1, 100}, 29999}
```
"""
@spec my_id() :: NetID.t
def my_id(), do: Chatter.local_netid
end
|
lib/group_manager.ex
| 0.866909
| 0.517388
|
group_manager.ex
|
starcoder
|
defmodule Benchee.Conversion.Count do
@moduledoc """
Unit scaling for counts, such that 1000000 can be converted to 1 Million.
Only benchee plugins should use this code.
"""
alias Benchee.Conversion.{Format, Scale, Unit}
@behaviour Scale
@behaviour Format
@one_billion 1_000_000_000
@one_million 1_000_000
@one_thousand 1_000
@units %{
billion: %Unit{
name: :billion,
magnitude: @one_billion,
label: "B",
long: "Billion"
},
million: %Unit{
name: :million,
magnitude: @one_million,
label: "M",
long: "Million"
},
thousand: %Unit{
name: :thousand,
magnitude: @one_thousand,
label: "K",
long: "Thousand"
},
one: %Unit{
name: :one,
magnitude: 1,
label: "",
long: ""
}
}
@type unit_atoms :: :one | :thousand | :million | :billion
@type units :: unit_atoms | Unit.t()
@doc """
Scales a value representing a count in ones into a larger unit if appropriate
## Examples
iex> {value, unit} = Benchee.Conversion.Count.scale(4_321.09)
iex> value
4.32109
iex> unit.name
:thousand
iex> {value, unit} = Benchee.Conversion.Count.scale(0.0045)
iex> value
0.0045
iex> unit.name
:one
"""
def scale(count) when count >= @one_billion do
scale_with_unit(count, :billion)
end
def scale(count) when count >= @one_million do
scale_with_unit(count, :million)
end
def scale(count) when count >= @one_thousand do
scale_with_unit(count, :thousand)
end
def scale(count) do
scale_with_unit(count, :one)
end
# Helper function for returning a tuple of {value, unit}
defp scale_with_unit(count, unit) do
{scale(count, unit), unit_for(unit)}
end
@doc """
Get a unit by its atom representation. If handed already a %Unit{} struct it
just returns it.
## Examples
iex> Benchee.Conversion.Count.unit_for :thousand
%Benchee.Conversion.Unit{
name: :thousand,
magnitude: 1_000,
label: "K",
long: "Thousand"
}
iex> Benchee.Conversion.Count.unit_for(%Benchee.Conversion.Unit{
...> name: :thousand,
...> magnitude: 1_000,
...> label: "K",
...> long: "Thousand"
...>})
%Benchee.Conversion.Unit{
name: :thousand,
magnitude: 1_000,
label: "K",
long: "Thousand"
}
"""
def unit_for(unit) do
Scale.unit_for(@units, unit)
end
@doc """
Scales a value representing a count in ones into a specified unit
## Examples
iex> Benchee.Conversion.Count.scale(12345, :one)
12345.0
iex> Benchee.Conversion.Count.scale(12345, :thousand)
12.345
iex> Benchee.Conversion.Count.scale(12345, :billion)
1.2345e-5
iex> Benchee.Conversion.Count.scale(12345, :million)
0.012345
"""
def scale(count, unit) do
Scale.scale(count, unit, __MODULE__)
end
@doc """
Converts a value for a specified %Unit or unit atom and converts it to the equivalent of another unit of measure.
## Examples
iex> {value, unit} = Benchee.Conversion.Count.convert({2500, :thousand}, :million)
iex> value
2.5
iex> unit.name
:million
"""
def convert(number_and_unit, desired_unit) do
Scale.convert(number_and_unit, desired_unit, __MODULE__)
end
@doc """
Finds the best unit for a list of counts. By default, chooses the most common
unit. In case of tie, chooses the largest of the most common units.
Pass `[strategy: :smallest]` to always return the smallest unit in the list.
Pass `[strategy: :largest]` to always return the largest unit in the list.
## Examples
iex> Benchee.Conversion.Count.best([23, 23_000, 34_000, 2_340_000]).name
:thousand
iex> Benchee.Conversion.Count.best([23, 23_000, 34_000, 2_340_000, 3_450_000]).name
:million
iex> Benchee.Conversion.Count.best([23, 23_000, 34_000, 2_340_000], strategy: :smallest).name
:one
iex> Benchee.Conversion.Count.best([23, 23_000, 34_000, 2_340_000], strategy: :largest).name
:million
"""
def best(list, opts \\ [strategy: :best])
def best(list, opts) do
Scale.best_unit(list, __MODULE__, opts)
end
@doc """
The raw count, unscaled.
## Examples
iex> Benchee.Conversion.Count.base_unit.name
:one
"""
def base_unit, do: unit_for(:one)
@doc """
Formats a number as a string, with a unit label. To specify the unit, pass
a tuple of `{value, unit_atom}` like `{1_234, :million}`
## Examples
iex> Benchee.Conversion.Count.format(45_678.9)
"45.68 K"
iex> Benchee.Conversion.Count.format(45.6789)
"45.68"
iex> Benchee.Conversion.Count.format({45.6789, :thousand})
"45.68 K"
iex> Benchee.Conversion.Count.format({45.6789, %Benchee.Conversion.Unit{long: "Thousand", magnitude: "1_000", label: "K"}})
"45.68 K"
"""
def format(count) do
Format.format(count, __MODULE__)
end
end
|
lib/benchee/conversion/count.ex
| 0.881946
| 0.634345
|
count.ex
|
starcoder
|
defmodule Quark.FixedPoint do
@moduledoc ~S"""
Fixed point combinators generalize the idea of a recursive function. This can
be used to great effect, simplifying many definitions.
For example, here is the factorial function written in terms of `y/1`:
iex> fac = fn fac ->
...> fn
...> 0 -> 0
...> 1 -> 1
...> n -> n * fac.(n - 1)
...> end
...> end
...> factorial = y(fac)
...> factorial.(9)
362880
The resulting function will always be curried
iex> import Quark.SKI, only: [s: 3]
...> one_run = y(&s/3)
...> {_, arity} = :erlang.fun_info(one_run, :arity)
...> arity
1
"""
import Quark.Partial
import Quark.Curry, only: [curry: 1]
defdelegate fix(), to: __MODULE__, as: :y
defdelegate fix(a), to: __MODULE__, as: :y
@doc ~S"""
The famous Y-combinator. The resulting function will always be curried.
## Examples
iex> fac = fn fac ->
...> fn
...> 0 -> 0
...> 1 -> 1
...> n -> n * fac.(n - 1)
...> end
...> end
...> factorial = y(fac)
...> factorial.(9)
362880
"""
@spec y(fun) :: fun
defpartial y(fun) do
(fn x -> x.(x) end).(fn y ->
curry(fun).(fn arg -> y.(y).(arg) end)
end)
end
@doc ~S"""
Alan Turing's fix-point combinator. This is the call-by-value formulation.
## Examples
iex> fac = fn fac ->
...> fn
...> 0 -> 0
...> 1 -> 1
...> n -> n * fac.(n - 1)
...> end
...> end
...> factorial = turing(fac)
...> factorial.(9)
362880
"""
@spec turing(fun) :: fun
defpartial turing(fun), do: turing_inner().(turing_inner()).(fun)
defpartialp turing_inner(x, y) do
cx = curry(x)
cy = curry(y)
cy.(&(cx.(cx).(cy).(&1)))
end
@doc ~S"""
A [normal order](https://wikipedia.org/wiki/Evaluation_strategy#Normal_order)
fixed point.
## Examples
iex> fac = fn fac ->
...> fn
...> 0 -> 0
...> 1 -> 1
...> n -> n * fac.(n - 1)
...> end
...> end
...> factorial = z(fac)
...> factorial.(9)
362880
"""
@spec z(fun, any) :: fun
defpartial z(g, v), do: g.(z(g)).(v)
end
|
lib/quark/fixed_point.ex
| 0.853134
| 0.592283
|
fixed_point.ex
|
starcoder
|
defmodule Ockam.Bare.Union do
@moduledoc """
Extension for BARE schema
Support simple tags for union types,
Union type can be defined as [type1: schema, type2: schema]
and can be encoded and decoded from/to {:type1, data} or {:type2, data}
"""
@type schema() :: :bare.spec()
@type extended_schema() :: schema() | [{atom(), schema()}]
@type match_error() :: {:error, {:unmatched_subtype, atom(), extended_schema()}}
## TODO: this might be moved to BARE lib
def encode({option, data}, schema) do
bare_schema = to_bare_schema(schema)
to_encode =
case Keyword.fetch(schema, option) do
{:ok, option_spec} ->
{option_spec, data}
:error ->
raise("Option #{inspect(option)} not found in spec #{inspect(schema)}")
end
:bare.encode(to_encode, bare_schema)
end
def encode(data, schema) do
bare_schema = to_bare_schema(schema)
:bare.encode(data, bare_schema)
end
def decode(data, extended_schema) do
bare_schema = to_bare_schema(extended_schema)
case :bare.decode(data, bare_schema) do
{:ok, decoded, ""} ->
match_extended_schema(decoded, extended_schema)
{:ok, wrong_data, rest} ->
{:error, {:unmatched_data, wrong_data, rest}}
{:error, error} ->
{:error, error}
end
end
@spec match_extended_schema({atom(), any()} | any(), extended_schema()) ::
{:ok, {atom(), any()}} | {:ok, any()} | match_error()
def match_extended_schema({subtype, decoded}, extended_schema) do
case List.keyfind(extended_schema, subtype, 1) do
nil -> {:error, {:unmatched_subtype, subtype, extended_schema}}
{tag, _subtype} -> {:ok, {tag, decoded}}
end
end
def match_extended_schema(decoded, _schema) do
{:ok, decoded}
end
## TODO: recursive tagged union: make it a part of bare.erl
@spec to_bare_schema(extended_schema()) :: schema()
def to_bare_schema(extended_schema) when is_list(extended_schema) do
{:union, Keyword.values(extended_schema)}
end
def to_bare_schema(extended_schema) do
extended_schema
end
end
defmodule Ockam.Bare.Variant do
@moduledoc """
Support variant types
Variant types are defined as {:variant, [atom() | {atom(), schema()]}
The tag is encoded as bare enum, optionally followed by the field value in case
the variant has one.
"""
@type schema :: :bare.spec()
@type extended_schema() :: schema() | {:variant, [atom() | {atom(), schema()}]}
@spec encode(any(), extended_schema()) :: binary()
def encode(value, {:variant, ss} = schema) do
type = :bare.encode(enum_member(value), to_bare_schema(schema))
value = encode_value(enum_value(value), List.keyfind(ss, enum_member(value), 0))
<<type::binary, value::binary>>
end
def encode(value, schema), do: :bare.encode(value, schema)
@spec decode(binary(), extended_schema()) :: {:ok, any()} | {:error, any()}
def decode(data, {:variant, ss} = schema) do
case :bare.decode(data, to_bare_schema(schema)) do
{:ok, decoded, ""} ->
{:ok, decoded}
{:ok, decoded_tag, rest} ->
{_, subschema} = List.keyfind(ss, decoded_tag, 0)
with {:ok, decoded_value, ""} <- :bare.decode(rest, subschema) do
{:ok, {decoded_tag, decoded_value}}
end
{:error, reason} ->
{:error, reason}
end
end
def decode(data, schema), do: :bare.decode(data, schema)
def to_bare_schema({:variant, ext_schema}) do
{:enum, Enum.map(ext_schema, &enum_member/1)}
end
def to_bare_schema(schema), do: schema
def enum_member({tag, _}), do: tag
def enum_member(tag), do: tag
def enum_value({_tag, value}), do: value
def enum_value(_tag), do: nil
def encode_value(nil, nil), do: <<>>
def encode_value(value, {_tag, subschema}), do: :bare.encode(value, subschema)
end
defmodule Ockam.Bare.Extended do
@moduledoc """
Extension for BARE schema:
Support simple tags for union types defined as [type1: schema(), type2: schema()] and
variant defined as {:variant, [atom() | {atom(), schema()]}
"""
alias Ockam.Bare.Union
alias Ockam.Bare.Variant
@type schema() :: any()
@type extended_schema() ::
schema() | [{atom(), schema()}] | {:variant, [atom() | {atom(), schema()}]}
## TODO: this might be moved to BARE lib
def encode(data, {:variant, _} = schema), do: Variant.encode(data, schema)
def encode(data, schema), do: Union.encode(data, schema)
def decode(data, {:variant, _} = schema), do: Variant.decode(data, schema)
def decode(data, schema), do: Union.decode(data, schema)
end
|
implementations/elixir/ockam/ockam/lib/ockam/protocol/bare_extended.ex
| 0.637595
| 0.540439
|
bare_extended.ex
|
starcoder
|
defmodule Graphvix.Subgraph do
@moduledoc """
[Internal] Models a subgraph or cluster for inclusion in a graph.
The functions included in this module are for internal use only. See
* `Graphvix.Graph.add_subgraph/3`
* `Graphvix.Graph.add_cluster/3`
for the public interface for creating and including subgraphs and clusters.
"""
import Graphvix.DotHelpers
defstruct [
id: nil,
vertex_ids: [],
global_properties: [node: [], edge: []],
subgraph_properties: [],
is_cluster: false
]
@doc false
def new(id, vertex_ids, is_cluster \\ false, properties \\ []) do
node_properties = Keyword.get(properties, :node, [])
edge_properties = Keyword.get(properties, :edge, [])
subgraph_properties = properties |> Keyword.delete(:node) |> Keyword.delete(:edge)
%Graphvix.Subgraph{
id: id_prefix(is_cluster) <> "#{id}",
is_cluster: is_cluster,
vertex_ids: vertex_ids,
global_properties: [
node: node_properties,
edge: edge_properties
],
subgraph_properties: subgraph_properties
}
end
@doc false
def to_dot(subgraph, graph) do
[vtab, _, _] = Graphvix.Graph.digraph_tables(graph)
vertices_from_graph = :ets.tab2list(vtab)
[
"subgraph #{subgraph.id} {",
global_properties_to_dot(subgraph),
subgraph_properties_to_dot(subgraph),
subgraph_vertices_to_dot(subgraph.vertex_ids, vertices_from_graph),
subgraph_edges_to_dot(subgraph, graph),
"}"
] |> List.flatten
|> compact()
|> Enum.map(&indent/1)
|> Enum.join("\n\n")
end
@doc false
def subgraph_edges_to_dot(subgraph, graph) do
subgraph
|> edges_with_both_vertices_in_subgraph(graph)
|> sort_elements_by_id()
|> elements_to_dot(fn {_, [:"$v" | v1], [:"$v" | v2], attributes} ->
"v#{v1} -> v#{v2} #{attributes_to_dot(attributes)}" |> String.trim |> indent
end)
end
@doc false
def both_vertices_in_subgraph?(vertex_ids, vid1, vid2) do
vid1 in vertex_ids && vid2 in vertex_ids
end
## Private
defp subgraph_vertices_to_dot(subgraph_vertex_ids, vertices_from_graph) do
subgraph_vertex_ids
|> vertices_in_this_subgraph(vertices_from_graph)
|> sort_elements_by_id()
|> elements_to_dot(fn {[_ | id] , attributes} ->
[
"v#{id}",
attributes_to_dot(attributes)
] |> compact |> Enum.join(" ") |> indent
end)
end
defp vertices_in_this_subgraph(subgraph_vertex_ids, vertices_from_graph) do
vertices_from_graph
|> Enum.filter(fn {vid, _attributes} -> vid in subgraph_vertex_ids end)
end
defp subgraph_properties_to_dot(%{subgraph_properties: properties}) do
properties
|> Enum.map(fn {key, value} ->
indent(attribute_to_dot(key, value))
end)
|> compact()
|> return_joined_list_or_nil()
end
defp edges_with_both_vertices_in_subgraph(%{vertex_ids: vertex_ids}, graph) do
[_, etab, _] = Graphvix.Graph.digraph_tables(graph)
edges = :ets.tab2list(etab)
Enum.filter(edges, fn {_, vid1, vid2, _} ->
both_vertices_in_subgraph?(vertex_ids, vid1, vid2)
end)
end
defp id_prefix(_is_cluster = true), do: "cluster"
defp id_prefix(_is_cluster = false), do: "subgraph"
end
|
lib/graphvix/subgraph.ex
| 0.750873
| 0.490358
|
subgraph.ex
|
starcoder
|
import TypeClass
defclass Witchcraft.Extend do
@moduledoc """
`Extend` is essentially "co`Chain`", meaning that it reverses the relationships
in `Chain`.
Instead of a flattening operation, we have `nest` which wraps the data in
an additional layer of itsef.
Instead of a `chain`ing function that acts on raw data and wraps it,
we have `extend` which unwraps data, may modify it, and returns the unwrapped value
## Type Class
An instance of `Witchcraft.Extend` must also implement `Witchcraft.Functor`,
and define `Witchcraft.Extend.nest/1`.
Functor [map/2]
↓
Extend [nest/1]
"""
alias __MODULE__
alias Witchcraft.Functor
extend Witchcraft.Functor
use Witchcraft.Internal, deps: [Witchcraft.Functor]
use Quark
@type t :: any()
@type colink :: (Extend.t() -> any())
where do
@doc """
Wrap some nestable data structure in another layer of itself
## Examples
iex> nest([1, 2, 3])
[[1, 2, 3], [2, 3], [3]]
"""
@spec nest(Extend.t()) :: Extend.t()
def nest(data)
end
properties do
def extend_composition(data) do
if is_function(data) do
use Witchcraft.Semigroup
a = &inspect/1
monoid = Enum.random([1, [], ""])
arg1 = generate(monoid)
arg2 = generate(monoid)
arg3 = generate(monoid)
f = fn x -> x <|> fn a -> a <> a end end
g = fn y -> y <|> fn b -> b <> b <> b end end
left =
a
|> Witchcraft.Extend.extend(g)
|> Witchcraft.Extend.extend(f)
right =
Witchcraft.Extend.curried_extend(a, fn x ->
x
|> Witchcraft.Extend.curried_extend(g)
|> f.()
end)
equal?(left.(arg1).(arg2).(arg3), right.(arg1).(arg2).(arg3))
else
a = generate(data)
f = fn x -> "#{inspect(x)}-#{inspect(x)}" end
g = fn y -> "#{inspect(y)} / #{inspect(y)} / #{inspect(y)}" end
left =
a
|> Witchcraft.Extend.curried_extend(g)
|> Witchcraft.Extend.curried_extend(f)
right =
Witchcraft.Extend.curried_extend(a, fn x ->
x
|> Witchcraft.Extend.curried_extend(g)
|> f.()
end)
equal?(left, right)
end
end
def naturality(data) do
a = generate(data)
if is_function(data) do
fun = &inspect/1
monoid = Enum.random([1, [], ""])
arg1 = generate(monoid)
arg2 = generate(monoid)
arg3 = generate(monoid)
left =
fun
|> Extend.nest()
|> Functor.lift(&Extend.nest/1)
right =
fun
|> Extend.nest()
|> Extend.nest()
equal?(left.(arg1).(arg2).(arg3), right.(arg1).(arg2).(arg3))
else
a
|> Extend.nest()
|> Functor.lift(&Extend.nest/1)
|> equal?(a |> Extend.nest() |> Extend.nest())
end
end
def extend_as_nest(data) do
if is_function(data) do
fun = &inspect/1
monoid = Enum.random([1, [], ""])
arg1 = generate(monoid)
arg2 = generate(monoid)
left = Witchcraft.Extend.extend(fun, &Quark.id/1)
right = Witchcraft.Extend.nest(fun)
equal?(left.(arg1).(arg2), right.(arg1).(arg2))
true
else
a = generate(data)
a
|> Witchcraft.Extend.extend(&Quark.id/1)
|> equal?(Witchcraft.Extend.nest(a))
end
end
def nest_as_extend(data) do
if is_function(data) do
f = fn x -> x <> x end
g = &inspect/1
monoid = Enum.random([1, [], ""])
arg1 = generate(monoid)
arg2 = generate(monoid)
left =
g
|> Extend.nest()
|> Functor.lift(&Functor.lift(&1, f))
right = Extend.nest(Functor.lift(g, f))
equal?(left.(arg1).(arg2), right.(arg1).(arg2))
else
a = generate(data)
f = &inspect/1
a
|> Extend.nest()
|> Functor.lift(&Functor.lift(&1, f))
|> equal?(Extend.nest(Functor.lift(a, f)))
end
end
end
@doc """
Similar to `Witchcraft.Chain.chain/2`, except that it reverses the input and output
types of the colinking function.
## Examples
Chain:
iex> Witchcraft.Chain.chain([1, 2, 3], fn x -> [x * 10] end)
[10, 20, 30]
Extend:
iex> extend([1, 2, 3], fn list -> List.first(list) * 10 end)
[10, 20, 30]
"""
@spec extend(Extend.t(), Extend.colink()) :: Extend.t()
def extend(data, colink) do
data
|> nest()
|> Functor.map(colink)
end
@doc """
`extend/2` with arguments flipped.
Makes piping composed colinks easier (see `compose_colink/2` and `pipe_compose_colink/2`).
## Examples
iex> fn list -> List.first(list) * 10 end
...> |> peel([1, 2, 3])
[10, 20, 30]
"""
@spec peel(Extend.colink(), Extend.t()) :: Extend.t()
def peel(colink, data), do: Extend.extend(data, colink)
@doc """
The same as `extend/2`, but with the colinking function curried.
## Examples
iex> [1, 2, 3]
...> |> curried_extend(fn(list, coeff) -> List.first(list) * coeff end)
...> |> extend(fn(funs) -> List.first(funs).(10) end)
[10, 20, 30]
"""
@spec curried_extend(Extend.t(), fun()) :: Extend.t()
def curried_extend(data, colink), do: Extend.extend(data, curry(colink))
@doc """
The same as `extend/2`, but with the colinking function curried.
## Examples
iex> fn(list) -> List.first(list) * 10 end
...> |> curried_peel([1, 2, 3])
[10, 20, 30]
"""
@spec curried_peel(Extend.t(), fun()) :: Extend.t()
def curried_peel(colink, data), do: curried_extend(data, colink)
@doc """
## Examples
iex> composed =
...> fn xs -> List.first(xs) * 10 end
...> |> compose_colink(fn ys -> List.first(ys) - 10 end)
...>
...> extend([1, 2, 3], composed)
[-90, -80, -70]
iex> fn xs -> List.first(xs) * 10 end
...> |> compose_colink(fn ys -> List.first(ys) - 10 end)
...> |> compose_colink(fn zs -> List.first(zs) * 50 end)
...> |> peel([1, 2, 3])
[400, 900, 1400]
iex> fn xs -> List.first(xs) * 10 end
...> |> compose_colink(fn ys -> List.first(ys) - 10 end)
...> |> compose_colink(fn zs -> List.first(zs) * 50 end)
...> |> compose_colink(fn zs -> List.first(zs) + 12 end)
...> |> peel([1, 2, 3])
[6400, 6900, 7400]
"""
@spec compose_colink(Extend.colink(), Extend.colink()) :: (Extend.t() -> any())
def compose_colink(g, f), do: fn x -> x |> curried_extend(f) |> g.() end
@doc """
`pipe_colink/2` with functions curried.
## Examples
iex> fn xs -> List.first(xs) * 10 end
...> |> pipe_compose_colink(fn ys -> List.first(ys) - 2 end)
...> |> peel([1, 2, 3])
[8, 18, 28]
iex> composed =
...> fn xs -> List.first(xs) * 10 end
...> |> pipe_compose_colink(fn ys -> List.first(ys) - 2 end)
...> |> pipe_compose_colink(fn zs -> List.first(zs) * 5 end)
...>
...> extend([1, 2, 3], composed)
[40, 90, 140]
iex> fn xs -> List.first(xs) * 10 end
...> |> pipe_compose_colink(fn ys -> List.first(ys) - 2 end)
...> |> pipe_compose_colink(fn zs -> List.first(zs) * 5 end)
...> |> pipe_compose_colink(fn zs -> List.first(zs) + 1 end)
...> |> peel([1, 2, 3])
[41, 91, 141]
"""
@spec pipe_compose_colink(Extend.colink(), Extend.colink()) :: (Extend.t() -> any())
def pipe_compose_colink(f, g), do: compose_colink(g, f)
end
definst Witchcraft.Extend, for: Function do
def nest(fun) do
use Quark
fn left ->
fn right ->
left
|> Witchcraft.Semigroup.append(right)
|> curry(fun).()
end
end
end
end
definst Witchcraft.Extend, for: List do
def nest([]), do: []
# Could be improved
def nest(entire = [_head | tail]), do: [entire | nest(tail)]
end
definst Witchcraft.Extend, for: Tuple do
custom_generator(_) do
import TypeClass.Property.Generator, only: [generate: 1]
{generate(nil), generate(nil)}
end
def nest({x, y}), do: {x, {x, y}}
end
|
lib/witchcraft/extend.ex
| 0.834272
| 0.604107
|
extend.ex
|
starcoder
|
defmodule ShittyLinqEx do
@moduledoc """
Documentation for `ShittyLinqEx`.
"""
@doc """
Inverts the order of the elements in a sequence.
## Parameters
- `list`: A sequence of values to reverse.
## Returns
A sequence whose elements correspond to those of the input sequence in reverse order.
## Examples
iex> import ShittyLinqEx, only: [reverse: 1]
iex> reverse(["A", "B", "C"])
["C", "B", "A"]
iex> import ShittyLinqEx, only: [reverse: 1]
iex> reverse([42, "orange", ":atom"])
[":atom", "orange", 42]
"""
@spec reverse(list) :: list
def reverse(list) when is_list(list), do: reverse(list, [])
def reverse([head | tail], acc), do: reverse(tail, [head | acc])
def reverse([], acc), do: acc
@doc """
Filters a sequence of values based on a predicate.
Where `source` is an enumerable to filter.
Where `predicate` is a function to test each element for a condition.
Returns an enumerable that contains elements from the input sequence that satisfy the condition.
## Examples
iex> import ShittyLinqEx, only: [where: 2]
iex> where(
...> ["apple", "passionfruit", "banana", "mango", "orange", "blueberry", "grape", "strawberry"],
...> fn fruit -> String.length(fruit) < 6 end)
["apple", "mango", "grape"]
iex> import ShittyLinqEx, only: [where: 2]
iex> where(
...> [0, 30, 20, 15, 90, 85, 40, 75],
...> fn number, index -> number <= index * 10 end)
[0, 20, 15, 40]
"""
def where(source, predicate) when is_list(source) and is_function(predicate, 1) do
where_list(source, predicate)
end
def where(source, predicate) when is_list(source) and is_function(predicate, 2) do
where_list(source, predicate, 0)
end
defp where_list([head | tail], fun) do
case fun.(head) do
true -> [head | where_list(tail, fun)]
_ -> where_list(tail, fun)
end
end
defp where_list([], _fun) do
[]
end
defp where_list([head | tail], fun, index) do
case fun.(head, index) do
true -> [head | where_list(tail, fun, index + 1)]
_ -> where_list(tail, fun, index + 1)
end
end
defp where_list([], _fun, _index) do
[]
end
end
|
lib/shitty_linq_ex.ex
| 0.820793
| 0.669348
|
shitty_linq_ex.ex
|
starcoder
|
defmodule Owl.Data do
@moduledoc """
A set of functions for `t:iodata/0` with tags.
"""
@typedoc """
A recursive data type that is similar to `t:iodata/0`, but additionally supports `t:Owl.Tag.t/1`.
Can be written to stdout using `Owl.IO.puts/2`.
"""
# improper lists are not here, just because they were not tested
@type t :: [binary() | non_neg_integer() | t() | Owl.Tag.t(t())] | Owl.Tag.t(t()) | binary()
@typedoc """
ANSI escape sequence.
An atom alias of ANSI escape sequence.
A binary representation of color like `"\e[38;5;33m"` (which is `IO.ANSI.color(33)` or `IO.ANSI.color(0, 2, 5)`).
"""
@type sequence ::
:black
| :red
| :green
| :yellow
| :blue
| :magenta
| :cyan
| :white
| :black_background
| :red_background
| :green_background
| :yellow_background
| :blue_background
| :magenta_background
| :cyan_background
| :white_background
| :light_black_background
| :light_red_background
| :light_green_background
| :light_yellow_background
| :light_blue_background
| :light_magenta_background
| :light_cyan_background
| :light_white_background
| :default_color
| :default_background
| :blink_slow
| :blink_rapid
| :faint
| :bright
| :inverse
| :underline
| :italic
| :overlined
| :reverse
| binary()
@doc """
Builds a tag.
## Examples
iex> Owl.Data.tag(["hello ", Owl.Data.tag("world", :green), "!!!"], :red) |> inspect()
~s|#Owl.Tag[:red]<["hello ", #Owl.Tag[:green]<"world">, "!!!"]>|
iex> Owl.Data.tag("hello world", [:green, :red_background]) |> inspect()
~s|#Owl.Tag[:green, :red_background]<"hello world">|
"""
@spec tag(data, sequence() | [sequence()]) :: Owl.Tag.t(data) when data: t()
def tag(data, sequence_or_sequences) do
%Owl.Tag{
sequences: List.wrap(sequence_or_sequences),
data: data
}
end
@doc """
Removes information about sequences and keeps only content of the tag.
## Examples
iex> Owl.Data.tag("Hello", :red) |> Owl.Data.untag()
"Hello"
iex> Owl.Data.tag([72, 101, 108, 108, 111], :red) |> Owl.Data.untag()
'Hello'
iex> Owl.Data.tag(["Hello", Owl.Data.tag("world", :green)], :red) |> Owl.Data.untag()
["Hello", "world"]
iex> ["Hello ", Owl.Data.tag("world", :red), ["!"]] |> Owl.Data.untag()
["Hello ", "world", ["!"]]
"""
@spec untag(t()) :: iodata()
def untag(data) when is_list(data) do
Enum.map(data, &untag_child/1)
end
def untag(%Owl.Tag{data: data}) do
untag(data)
end
def untag(data) when is_binary(data) do
data
end
defp untag_child(data) when is_list(data) do
Enum.map(data, &untag_child/1)
end
defp untag_child(%Owl.Tag{data: data}) do
data
end
defp untag_child(data) when is_binary(data) do
data
end
defp untag_child(data) when is_integer(data) do
data
end
@doc """
Zips corresponding lines into 1 line.
The zipping finishes as soon as either data completes.
## Examples
iex> Owl.Data.zip("a\\nb\\nc", "d\\ne\\nf")
[["a", "d"], "\\n", ["b", "e"], "\\n", ["c", "f"]]
iex> Owl.Data.zip("a\\nb", "c")
[["a", "c"]]
iex> 1..3
...> |> Enum.map(&to_string/1)
...> |> Enum.map(&Owl.Box.new/1) |> Enum.reduce(&Owl.Data.zip/2) |> to_string()
\"""
┌─┐┌─┐┌─┐
│3││2││1│
└─┘└─┘└─┘
\""" |> String.trim_trailing()
"""
@spec zip(t(), t()) :: t()
def zip(data1, data2) do
lines1 = lines(data1)
lines2 = lines(data2)
lines1
|> Enum.zip_with(lines2, &[&1, &2])
|> unlines()
end
@doc """
Returns length of the data.
## Examples
iex> Owl.Data.length(["222"])
3
iex> Owl.Data.length([222])
1
iex> Owl.Data.length([[[]]])
0
iex> Owl.Data.length(["222", Owl.Data.tag(["333", "444"], :green)])
9
"""
@spec length(t()) :: non_neg_integer()
def length(data) when is_binary(data) do
String.length(data)
end
def length(data) when is_list(data) do
import Kernel, except: [length: 1]
Enum.reduce(data, 0, fn
item, acc when is_integer(item) -> length(<<item::utf8>>) + acc
item, acc -> length(item) + acc
end)
end
def length(%Owl.Tag{data: data}) do
import Kernel, except: [length: 1]
length(data)
end
@doc """
Splits data by new lines.
A special case of `split/2`.
## Example
iex> Owl.Data.lines(["first\\nsecond\\n", Owl.Data.tag("third\\nfourth", :red)])
["first", "second", Owl.Data.tag(["third"], :red), Owl.Data.tag(["fourth"], :red)]
"""
@spec lines(t()) :: [t()]
def lines(data) do
split(data, "\n")
end
@doc """
Creates a `t:t/0` from an a list of `t:t/0`, it inserts new line characters between original elements.
## Examples
iex> Owl.Data.unlines(["a", "b", "c"])
["a", "\\n", "b", "\\n", "c"]
iex> ["first\\nsecond\\n", Owl.Data.tag("third\\nfourth", :red)]
...> |> Owl.Data.lines()
...> |> Owl.Data.unlines()
...> |> Owl.Data.to_ansidata()
Owl.Data.to_ansidata(["first\\nsecond\\n", Owl.Data.tag("third\\nfourth", :red)])
"""
@spec unlines([t()]) :: [t()]
def unlines(data) do
Enum.intersperse(data, "\n")
end
@doc """
Adds a `prefix` before each line of the `data`.
An important feature is that styling of the data will be saved for each line.
## Example
iex> "first\\nsecond" |> Owl.Data.tag(:red) |> Owl.Data.add_prefix(Owl.Data.tag("test: ", :yellow))
[
[Owl.Data.tag("test: ", :yellow), Owl.Data.tag(["first"], :red)],
"\\n",
[Owl.Data.tag("test: ", :yellow), Owl.Data.tag(["second"], :red)]
]
"""
@spec add_prefix(t(), t()) :: t()
def add_prefix(data, prefix) do
data
|> lines()
|> Enum.map(fn line -> [prefix, line] end)
|> unlines()
end
@doc """
Transforms data to `t:IO.ANSI.ansidata/0` format which can be consumed by `IO` module.
## Examples
iex> "hello" |> Owl.Data.tag([:red, :cyan_background]) |> Owl.Data.to_ansidata()
[[[[[[[] | "\e[46m"] | "\e[31m"], "hello"] | "\e[39m"] | "\e[49m"] | "\e[0m"]
"""
@spec to_ansidata(t()) :: IO.ANSI.ansidata()
def to_ansidata(data) do
# split by \n and then intersperse is needed in order to break background and do not spread to the end of the line
data
|> lines()
|> unlines()
|> do_to_ansidata(%{})
|> IO.ANSI.format()
end
defp do_to_ansidata(
%Owl.Tag{sequences: sequences, data: data},
open_tags
) do
new_open_tags = sequences_to_state(open_tags, sequences)
close_tags =
Enum.reduce(new_open_tags, [], fn {sequence_type, sequence}, acc ->
case Map.get(open_tags, sequence_type) do
nil ->
return_to = default_value_by_sequence_type(sequence_type)
[return_to | acc]
previous_sequence ->
if previous_sequence == sequence do
acc
else
[previous_sequence | acc]
end
end
end)
[sequences, do_to_ansidata(data, new_open_tags), close_tags]
end
defp do_to_ansidata(list, open_tags) when is_list(list) do
Enum.map(list, &do_to_ansidata(&1, open_tags))
end
defp do_to_ansidata(term, _open_tags), do: term
defp maybe_wrap_to_tag([], [element]), do: element
defp maybe_wrap_to_tag([], data), do: data
defp maybe_wrap_to_tag(sequences1, [%Owl.Tag{sequences: sequences2, data: data}]) do
tag(data, collapse_sequences(sequences1 ++ sequences2))
end
defp maybe_wrap_to_tag(sequences, data) do
tag(data, collapse_sequences(sequences))
end
defp reverse_and_tag(sequences, [%Owl.Tag{sequences: last_sequences} | _] = data) do
maybe_wrap_to_tag(sequences -- last_sequences, Enum.reverse(data))
end
defp reverse_and_tag(sequences, data) do
maybe_wrap_to_tag(sequences, Enum.reverse(data))
end
# last write wins
defp collapse_sequences(sequences) do
%{foreground: nil, background: nil}
|> sequences_to_state(sequences)
|> Map.values()
|> Enum.reject(&is_nil/1)
end
@doc """
Divides data into parts based on a pattern saving sequences for tagged data in new tags.
## Example
iex> Owl.Data.split(["first second ", Owl.Data.tag("third fourth", :red)], " ")
["first", "second", Owl.Data.tag(["third"], :red), Owl.Data.tag(["fourth"], :red)]
iex> Owl.Data.split(["first second ", Owl.Data.tag("third fourth", :red)], ~r/\s+/)
["first", "second", Owl.Data.tag(["third"], :red), Owl.Data.tag(["fourth"], :red)]
"""
@spec split(t(), String.pattern() | Regex.t()) :: [t()]
def split(data, pattern) do
chunk_by(
data,
pattern,
fn value, pattern ->
[head | tail] = String.split(value, pattern, parts: 2)
head = if head == "", do: [], else: head
resolution = if tail == [], do: :cont, else: :chunk
{resolution, pattern, head, tail}
end
)
end
defp sequences_to_state(init, sequences) do
Enum.reduce(sequences, init, fn sequence, acc ->
Map.put(acc, sequence_type(sequence), sequence)
end)
end
for color <- [:black, :red, :green, :yellow, :blue, :magenta, :cyan, :white] do
defp sequence_type(unquote(color)), do: :foreground
defp sequence_type(unquote(:"light_#{color}")), do: :foreground
defp sequence_type(unquote(:"#{color}_background")), do: :background
defp sequence_type(unquote(:"light_#{color}_background")), do: :background
end
defp sequence_type(:default_color), do: :foreground
defp sequence_type(:default_background), do: :background
defp sequence_type(:blink_slow), do: :blink
defp sequence_type(:blink_rapid), do: :blink
defp sequence_type(:faint), do: :intensity
defp sequence_type(:bright), do: :intensity
defp sequence_type(:inverse), do: :inverse
defp sequence_type(:underline), do: :underline
defp sequence_type(:italic), do: :italic
defp sequence_type(:overlined), do: :overlined
defp sequence_type(:reverse), do: :reverse
# https://github.com/elixir-lang/elixir/blob/74bfab8ee271e53d24cb0012b5db1e2a931e0470/lib/elixir/lib/io/ansi.ex#L73
defp sequence_type("\e[38;5;" <> _), do: :foreground
# https://github.com/elixir-lang/elixir/blob/74bfab8ee271e53d24cb0012b5db1e2a931e0470/lib/elixir/lib/io/ansi.ex#L87
defp sequence_type("\e[48;5;" <> _), do: :background
defp default_value_by_sequence_type(:foreground), do: :default_color
defp default_value_by_sequence_type(:background), do: :default_background
defp default_value_by_sequence_type(:blink), do: :blink_off
defp default_value_by_sequence_type(:intensity), do: :normal
defp default_value_by_sequence_type(:inverse), do: :inverse_off
defp default_value_by_sequence_type(:underline), do: :no_underline
defp default_value_by_sequence_type(:italic), do: :not_italic
defp default_value_by_sequence_type(:overlined), do: :not_overlined
defp default_value_by_sequence_type(:reverse), do: :reverse_off
@doc """
Returns list of `t()` containing `count` elements each.
## Example
iex> Owl.Data.chunk_every(
...> ["first second ", Owl.Data.tag(["third", Owl.Data.tag(" fourth", :blue)], :red)],
...> 7
...> )
[
"first s",
["econd ", Owl.Data.tag(["t"], :red)],
Owl.Data.tag(["hird", Owl.Data.tag([" fo"], :blue)], :red),
Owl.Data.tag(["urth"], :blue)
]
"""
@spec chunk_every(data :: t(), count :: pos_integer()) :: [t()]
def chunk_every(data, count) when count > 0 do
chunk_by(
data,
{0, count},
fn value, {cut_left, count} ->
split_at = if cut_left == 0, do: count, else: cut_left
case String.split_at(value, split_at) do
{head, ""} ->
left = split_at - String.length(head)
resolution = if left == 0, do: :chunk, else: :cont
{resolution, {left, count}, head, []}
{head, rest} ->
{:chunk, {0, count}, head, [rest]}
end
end
)
end
defp chunk_by(data, chunk_acc, chunk_fun), do: chunk_by(data, chunk_acc, chunk_fun, [])
defp chunk_by([], _chunk_acc, _chunk_fun, _acc_sequences), do: []
defp chunk_by(data, chunk_acc, chunk_fun, acc_sequences) do
{_, before_pattern, after_pattern, chunk_acc, next_acc_sequences} =
do_chunk_by(data, chunk_acc, chunk_fun, [], acc_sequences)
[
reverse_and_tag(acc_sequences ++ next_acc_sequences, before_pattern)
| chunk_by(after_pattern, chunk_acc, chunk_fun, next_acc_sequences)
]
end
defp do_chunk_by([head | tail], chunk_acc, chunk_fun, acc, acc_sequences) do
case do_chunk_by(head, chunk_acc, chunk_fun, acc, acc_sequences) do
{:cont, new_head, new_tail, chunk_acc, new_acc_sequences} ->
new_tail
|> put_nonempty_head(tail)
|> do_chunk_by(chunk_acc, chunk_fun, new_head, new_acc_sequences)
{:chunk, new_head, new_tail, chunk_acc, new_acc_sequences} ->
new_tail = maybe_wrap_to_tag(new_acc_sequences -- acc_sequences, new_tail)
new_acc_sequences =
case new_head do
[%Owl.Tag{sequences: sequences} | _] -> new_acc_sequences -- sequences
_ -> new_acc_sequences
end
new_head =
case new_head do
[%Owl.Tag{data: []} | rest] -> rest
list -> list
end
new_tail = put_nonempty_head(new_tail, tail)
{:chunk, new_head, new_tail, chunk_acc, new_acc_sequences}
end
end
defp do_chunk_by([], chunk_acc, _chunk_fun, acc, acc_sequences) do
{:cont, acc, [], chunk_acc, acc_sequences}
end
defp do_chunk_by(
%Owl.Tag{sequences: sequences, data: data},
chunk_acc,
chunk_fun,
acc,
acc_sequences
) do
{resolution, before_pattern, after_pattern, chunk_acc, next_acc_sequences} =
do_chunk_by(data, chunk_acc, chunk_fun, [], acc_sequences ++ sequences)
before_pattern = reverse_and_tag(sequences, before_pattern)
next_acc_sequences =
case after_pattern do
[] -> next_acc_sequences -- sequences
[""] -> next_acc_sequences -- sequences
_ -> next_acc_sequences
end
{resolution, [before_pattern | acc], after_pattern, chunk_acc, next_acc_sequences}
end
defp do_chunk_by(value, chunk_acc, chunk_fun, acc, acc_sequences) when is_binary(value) do
{resolution, new_chunk_acc, head, rest} = chunk_fun.(value, chunk_acc)
{
resolution,
put_nonempty_head(head, acc),
rest,
new_chunk_acc,
acc_sequences
}
end
defp do_chunk_by(value, chunk_acc, chunk_fun, acc, acc_sequences) when is_integer(value) do
do_chunk_by(<<value::utf8>>, chunk_acc, chunk_fun, acc, acc_sequences)
end
defp put_nonempty_head([], tail), do: tail
defp put_nonempty_head(head, tail), do: [head | tail]
end
|
lib/owl/data.ex
| 0.894784
| 0.633439
|
data.ex
|
starcoder
|
defmodule Specter.PeerConnection do
@moduledoc """
Represents an RTCPeerConnection managed in the NIF. A running Specter instance may
have 0 or more peer connections at any time.
Users of Specter might choose between different topologies based on their use cases:
a Specter might be initialized per connection, and signaling messages passed between
different instances of the NIF; a Specter may be initialized per "room," and all peer
connections for that room created within the single NIF instance; a "room" may be split
across Erlang nodes, with tracks forwarded between the nodes.
"""
alias Specter.Native
@typedoc """
`t:Specter.PeerConnection.t/0` represents an instantiated RTCPeerConnection managed in the NIF.
"""
@opaque t() :: String.t()
@typedoc """
Options for creating a webrtc answer. Values default to false.
"""
@type answer_options_t() :: [] | [voice_activity_detection: bool]
@typedoc """
Options for creating a webrtc offer. Values default to false.
"""
@type offer_options_t() :: [] | [voice_activity_detection: bool, ice_restart: bool]
@typedoc """
The type of an SDP message, either an `:offer` or an `:answer`.
"""
@type sdp_type_t() :: :offer | :answer
@typedoc """
A UTF-8 encoded string encapsulating either an offer or an answer.
"""
@type sdp_t() :: String.t()
@typedoc """
A UTF-8 encoded string encapsulating an Offer or an Answer in JSON. The keys are as
follows:
| key | type |
| ------ | ---- |
| `type` | `offer`, `answer` |
| `sdp` | `sdp_t() |
"""
@type session_description_t() :: String.t()
@typedoc """
An ICE candidate as JSON.
"""
@type ice_candidate_t() :: String.t()
@typedoc """
Possible states of ICE connection.
"""
@type ice_connection_state_t() ::
:unspecified
| :new
| :checking
| :connected
| :completed
| :disconnected
| :failed
| :closed
@typedoc """
Message sent as a result of a call to `ice_connection_state/2`.
"""
@type ice_connection_state_msg_t() ::
{:ice_connection_state, t(), ice_connection_state_t()}
@typedoc """
Possible states of ICE gathering process.
"""
@type ice_gathering_state_t() :: :complete | :gathering | :new | :unspecified
@typedoc """
Message sent as a result of a call to `ice_gathering_state/2`.
"""
@type ice_gathering_state_msg_t() ::
{:ice_gathering_state, t(), ice_connection_state_t()}
@typedoc """
Possible states of session parameters negotiation.
"""
@type signaling_state_t() ::
:closed
| :have_local_offer
| :have_local_pranswer
| :have_remote_offer
| :have_remote_pranswer
| :stable
| :unspecified
@typedoc """
Message sent as a result of a call to `signaling_state/2`.
"""
@type signaling_state_msg_t() :: {:signaling_state, t(), signaling_state_t()}
@typedoc """
Possible states of peer connection.
"""
@type connection_state_t() ::
:closed | :connected | :connecting | :disconnected | :failed | :new | :unspecified
@typedoc """
Message sent as a result of a call to `connection_state/2`.
"""
@type connection_state_msg_t() :: {:connection_state, t(), connection_state_t()}
@typedoc """
Message sent as a result of a call to `add_track/3`.
"""
@type rtp_sender_t() :: {:rtp_sender, t(), Specter.TrackLocalStaticSample.t(), String.t()}
@doc """
Creates a new RTCPeerConnection, using an API reference created with `new_api/3`. The
functionality wrapped by this function is async, so `:ok` is returned immediately.
Callers should listen for the `{:peer_connection_ready, peer_connection_t()}` message
to receive the results of this function.
| param | type | default |
| --------- | -------- | ------- |
| `specter` | `t()` | |
| `api` | `opaque` | |
## Usage
iex> {:ok, specter} = Specter.init(ice_servers: ["stun:stun.l.google.com:19302"])
iex> {:ok, media_engine} = Specter.new_media_engine(specter)
iex> {:ok, registry} = Specter.new_registry(specter, media_engine)
iex> {:ok, api} = Specter.new_api(specter, media_engine, registry)
iex> {:ok, pc} = Specter.PeerConnection.new(specter, api)
...>
iex> {:ok, _pc} =
...> receive do
...> {:peer_connection_ready, ^pc} -> {:ok, pc}
...> after
...> 500 -> {:error, :timeout}
...> end
"""
@spec new(Specter.t(), Specter.api_t()) :: {:ok, t()} | {:error, term()}
def new(%Specter{native: ref}, api), do: Native.new_peer_connection(ref, api)
@doc """
Returns true or false, depending on whether the RTCPeerConnection is initialized.
## Usage
iex> {:ok, specter} = Specter.init(ice_servers: ["stun:stun.l.google.com:19302"])
iex> {:ok, media_engine} = Specter.new_media_engine(specter)
iex> {:ok, registry} = Specter.new_registry(specter, media_engine)
iex> {:ok, api} = Specter.new_api(specter, media_engine, registry)
iex> {:ok, pc} = Specter.PeerConnection.new(specter, api)
iex> assert_receive {:peer_connection_ready, ^pc}
iex> Specter.PeerConnection.exists?(specter, pc)
true
iex> {:ok, specter} = Specter.init(ice_servers: ["stun:stun.l.google.com:19302"])
iex> Specter.PeerConnection.exists?(specter, UUID.uuid4())
false
"""
@spec exists?(Specter.t(), t()) :: boolean() | no_return()
def exists?(%Specter{native: ref}, peer_connection) do
case Native.peer_connection_exists(ref, peer_connection) do
{:ok, value} ->
value
{:error, error} ->
raise "Unable to determine whether peer connection exists:\n#{inspect(error)}"
end
end
@doc """
Closes an open instance of an RTCPeerConnection.
## Usage
iex> {:ok, specter} = Specter.init(ice_servers: ["stun:stun.l.google.com:19302"])
iex> {:ok, media_engine} = Specter.new_media_engine(specter)
iex> {:ok, registry} = Specter.new_registry(specter, media_engine)
iex> {:ok, api} = Specter.new_api(specter, media_engine, registry)
iex> {:ok, pc} = Specter.PeerConnection.new(specter, api)
iex> assert_receive {:peer_connection_ready, ^pc}
...>
iex> Specter.PeerConnection.close(specter, pc)
:ok
iex> {:ok, _pc} =
...> receive do
...> {:peer_connection_closed, ^pc} -> {:ok, pc}
...> after
...> 500 -> {:error, :timeout}
...> end
...>
iex> Specter.PeerConnection.exists?(specter, pc)
false
"""
@spec close(Specter.t(), t()) :: :ok | {:error, term()}
def close(%Specter{native: ref}, pc), do: Native.close_peer_connection(ref, pc)
@doc """
Given an ICE candidate, add it to the given peer connection. Assumes trickle ICE.
Candidates must be JSON, with the keys `candidate`, `sdp_mid`, `sdp_mline_index`, and
`username_fragment`.
"""
@spec add_ice_candidate(Specter.t(), t(), ice_candidate_t()) :: :ok | {:error, term()}
def add_ice_candidate(%Specter{native: ref}, pc, candidate),
do: Native.add_ice_candidate(ref, pc, candidate)
@doc """
Adds track to peer connection.
Sends back uuid of newly created rtp sender.
This will send message `t:rtp_sender_msg_t/0`.
## Usage
iex> {:ok, specter} = Specter.init()
iex> {:ok, media_engine} = Specter.new_media_engine(specter)
iex> {:ok, registry} = Specter.new_registry(specter, media_engine)
iex> {:ok, api} = Specter.new_api(specter, media_engine, registry)
iex> {:ok, pc} = Specter.PeerConnection.new(specter, api)
iex> assert_receive {:peer_connection_ready, ^pc}
iex> codec = %Specter.RtpCodecCapability{mime_type: "audio"}
iex> {:ok, track} = Specter.TrackLocalStaticSample.new(specter, codec, "audio", "specter")
iex> :ok = Specter.PeerConnection.add_track(specter, pc, track)
iex> assert_receive {:rtp_sender, ^pc, ^track, _rtp_sender}
...>
iex> {:error, :invalid_track} = Specter.PeerConnection.add_track(specter, pc, "invalid_track")
"""
@spec add_track(Specter.t(), t(), Specter.TrackLocalStaticSample.t()) :: :ok | {:error | term()}
def add_track(%Specter{native: ref}, pc, track) do
Native.add_track(ref, pc, track)
end
@doc """
Sends back state of peer connection.
This will send message `t:connection_state_msg_t/0`.
"""
@spec connection_state(Specter.t(), t()) :: :ok | {:error, term()}
def connection_state(%Specter{native: ref}, pc) do
Native.connection_state(ref, pc)
end
@doc """
Given an RTCPeerConnection where the remote description has been assigned via
`set_remote_description/4`, create an answer that can be passed to another connection.
| param | type | default |
| ----------------- | -------------------- | ------- |
| `specter` | `t()` | |
| `peer_connection` | `opaque` | |
| `options` | `answer_options_t()` | voice_activity_detection: false |
"""
@spec create_answer(Specter.t(), t(), answer_options_t()) :: :ok | {:error, term()}
def create_answer(%Specter{native: ref}, pc, opts \\ []),
do:
Native.create_answer(
ref,
pc,
Keyword.get(opts, :voice_activity_detection, false)
)
@doc """
Creates a data channel on an RTCPeerConnection.
Note: this can be useful when attempting to generate a valid offer, but where no media
tracks are expected to be sent or received. Callbacks from data channels have not yet
been implemented.
"""
@spec create_data_channel(Specter.t(), t(), String.t()) :: :ok | {:error, term()}
def create_data_channel(%Specter{native: ref}, pc, label),
do: Native.create_data_channel(ref, pc, label)
@doc """
Given an RTCPeerConnection, create an offer that can be passed to another connection.
| param | type | default |
| ----------------- | ------------------- | ------- |
| `specter` | `t()` | |
| `peer_connection` | `opaque` | |
| `options` | `offer_options_t()` | voice_activity_detection: false |
| | | ice_restart: false |
"""
@spec create_offer(Specter.t(), t(), offer_options_t()) :: :ok | {:error, term()}
def create_offer(%Specter{native: ref}, pc, opts \\ []),
do:
Native.create_offer(
ref,
pc,
Keyword.get(opts, :voice_activity_detection, false),
Keyword.get(opts, :ice_restart, false)
)
@doc """
Sends back the value of the current session description on a peer connection. This will
send back JSON representing an offer or an answer when the peer connection has had
`set_local_description/3` called and has successfully negotiated ICE. In all other cases,
`nil` will be sent.
See `pending_local_description/2` and `local_description/2`.
## Usage
iex> {:ok, specter} = Specter.init()
iex> {:ok, media_engine} = Specter.new_media_engine(specter)
iex> {:ok, registry} = Specter.new_registry(specter, media_engine)
iex> {:ok, api} = Specter.new_api(specter, media_engine, registry)
iex> {:ok, pc} = Specter.PeerConnection.new(specter, api)
iex> assert_receive {:peer_connection_ready, ^pc}
iex> Specter.PeerConnection.current_local_description(specter, pc)
:ok
iex> assert_receive {:current_local_description, ^pc, nil}
"""
@spec current_local_description(Specter.t(), t()) :: :ok | {:error, term()}
def current_local_description(%Specter{native: ref}, pc),
do: Native.current_local_description(ref, pc)
@doc """
Sends back the value of the current remote session description on a peer connection. This will
send back JSON representing an offer or an answer when the peer connection has had
`set_remote_description/3` called and has successfully negotiated ICE. In all other cases,
`nil` will be sent.
See `current_remote_description/2` and `remote_description/2`.
## Usage
iex> {:ok, specter} = Specter.init()
iex> {:ok, media_engine} = Specter.new_media_engine(specter)
iex> {:ok, registry} = Specter.new_registry(specter, media_engine)
iex> {:ok, api} = Specter.new_api(specter, media_engine, registry)
iex> {:ok, pc} = Specter.PeerConnection.new(specter, api)
iex> assert_receive {:peer_connection_ready, ^pc}
iex> Specter.PeerConnection.current_remote_description(specter, pc)
:ok
iex> assert_receive {:current_remote_description, ^pc, nil}
"""
@spec current_remote_description(Specter.t(), t()) :: :ok | {:error, term()}
def current_remote_description(%Specter{native: ref}, pc),
do: Native.current_remote_description(ref, pc)
@doc """
Sends back state of ICE connection for given peer connection.
This will send message `t:ice_connection_state_msg_t/0`
"""
@spec ice_connection_state(Specter.t(), t()) :: :ok | {:error, term()}
def ice_connection_state(%Specter{native: ref}, pc) do
Native.ice_connection_state(ref, pc)
end
@doc """
Sends back state of ICE gathering process.
This will send message `t:ice_gathering_state_t/0`.
"""
@spec ice_gathering_state(Specter.t(), t()) :: :ok | {:error, term()}
def ice_gathering_state(%Specter{native: ref}, pc) do
Native.ice_gathering_state(ref, pc)
end
@doc """
Sends back the value of the local session description on a peer connection. This will
send back JSON representing an offer or an answer when the peer connection has had
`set_local_description/3` called. If ICE has been successfully negotated, the current
local description will be sent back, otherwise the caller will receive the pending
local description.
See `current_local_description/2` and `pending_local_description/2`.
## Usage
iex> {:ok, specter} = Specter.init()
iex> {:ok, media_engine} = Specter.new_media_engine(specter)
iex> {:ok, registry} = Specter.new_registry(specter, media_engine)
iex> {:ok, api} = Specter.new_api(specter, media_engine, registry)
iex> {:ok, pc} = Specter.PeerConnection.new(specter, api)
iex> assert_receive {:peer_connection_ready, ^pc}
...>
iex> Specter.PeerConnection.local_description(specter, pc)
:ok
iex> assert_receive {:local_description, ^pc, nil}
...>
iex> :ok = Specter.PeerConnection.create_offer(specter, pc)
iex> assert_receive {:offer, ^pc, offer}
iex> :ok = Specter.PeerConnection.set_local_description(specter, pc, offer)
iex> assert_receive {:ok, ^pc, :set_local_description}
...>
iex> Specter.PeerConnection.local_description(specter, pc)
:ok
iex> assert_receive {:local_description, ^pc, ^offer}
"""
@spec local_description(Specter.t(), t()) :: :ok | {:error, term()}
def local_description(%Specter{native: ref}, pc),
do: Native.local_description(ref, pc)
@doc """
Sends back the value of the session description on a peer connection that is pending
connection, or nil.
## Usage
iex> {:ok, specter} = Specter.init()
iex> {:ok, media_engine} = Specter.new_media_engine(specter)
iex> {:ok, registry} = Specter.new_registry(specter, media_engine)
iex> {:ok, api} = Specter.new_api(specter, media_engine, registry)
iex> {:ok, pc} = Specter.PeerConnection.new(specter, api)
iex> assert_receive {:peer_connection_ready, ^pc}
...>
iex> Specter.PeerConnection.pending_local_description(specter, pc)
:ok
iex> assert_receive {:pending_local_description, ^pc, nil}
...>
iex> :ok = Specter.PeerConnection.create_offer(specter, pc)
iex> assert_receive {:offer, ^pc, offer}
iex> :ok = Specter.PeerConnection.set_local_description(specter, pc, offer)
iex> assert_receive {:ok, ^pc, :set_local_description}
...>
iex> Specter.PeerConnection.pending_local_description(specter, pc)
:ok
iex> assert_receive {:pending_local_description, ^pc, ^offer}
"""
@spec pending_local_description(Specter.t(), t()) :: :ok | {:error, term()}
def pending_local_description(%Specter{native: ref}, pc),
do: Native.pending_local_description(ref, pc)
@doc """
Sends back the value of the remote session description on a peer connection on a peer
that is pending connection, or nil.
See `current_remote_description/2` and `pending_remote_description/2`.
## Usage
iex> {:ok, specter} = Specter.init()
iex> {:ok, media_engine} = Specter.new_media_engine(specter)
iex> {:ok, registry} = Specter.new_registry(specter, media_engine)
iex> {:ok, api} = Specter.new_api(specter, media_engine, registry)
iex> {:ok, pc_offer} = Specter.PeerConnection.new(specter, api)
iex> assert_receive {:peer_connection_ready, ^pc_offer}
iex> :ok = Specter.PeerConnection.create_data_channel(specter, pc_offer, "foo")
iex> assert_receive {:data_channel_created, ^pc_offer}
iex> :ok = Specter.PeerConnection.create_offer(specter, pc_offer)
iex> assert_receive {:offer, ^pc_offer, offer}
...>
iex> {:ok, media_engine} = Specter.new_media_engine(specter)
iex> {:ok, registry} = Specter.new_registry(specter, media_engine)
iex> {:ok, api} = Specter.new_api(specter, media_engine, registry)
iex> {:ok, pc_answer} = Specter.PeerConnection.new(specter, api)
iex> assert_receive {:peer_connection_ready, ^pc_answer}
...>
iex> Specter.PeerConnection.pending_remote_description(specter, pc_answer)
:ok
iex> assert_receive {:pending_remote_description, ^pc_answer, nil}
...>
iex> :ok = Specter.PeerConnection.set_remote_description(specter, pc_answer, offer)
iex> assert_receive {:ok, ^pc_answer, :set_remote_description}
...>
iex> Specter.PeerConnection.pending_remote_description(specter, pc_answer)
:ok
iex> assert_receive {:pending_remote_description, ^pc_answer, ^offer}
"""
@spec pending_remote_description(Specter.t(), t()) :: :ok | {:error, term()}
def pending_remote_description(%Specter{native: ref}, pc),
do: Native.pending_remote_description(ref, pc)
@doc """
Sends back the value of the remote session description on a peer connection. This will
send back JSON representing an offer or an answer when the peer connection has had
`set_remote_description/3` called. If ICE has been successfully negotated, the current
remote description will be sent back, otherwise the caller will receive the pending
remote description.
See `current_remote_description/2` and `remote_description/2`.
## Usage
iex> {:ok, specter} = Specter.init()
iex> {:ok, media_engine} = Specter.new_media_engine(specter)
iex> {:ok, registry} = Specter.new_registry(specter, media_engine)
iex> {:ok, api} = Specter.new_api(specter, media_engine, registry)
iex> {:ok, pc_offer} = Specter.PeerConnection.new(specter, api)
iex> assert_receive {:peer_connection_ready, ^pc_offer}
iex> :ok = Specter.PeerConnection.create_data_channel(specter, pc_offer, "foo")
iex> assert_receive {:data_channel_created, ^pc_offer}
iex> :ok = Specter.PeerConnection.create_offer(specter, pc_offer)
iex> assert_receive {:offer, ^pc_offer, offer}
...>
iex> {:ok, media_engine} = Specter.new_media_engine(specter)
iex> {:ok, registry} = Specter.new_registry(specter, media_engine)
iex> {:ok, api} = Specter.new_api(specter, media_engine, registry)
iex> {:ok, pc_answer} = Specter.PeerConnection.new(specter, api)
iex> assert_receive {:peer_connection_ready, ^pc_answer}
...>
iex> Specter.PeerConnection.remote_description(specter, pc_answer)
:ok
iex> assert_receive {:remote_description, ^pc_answer, nil}
...>
iex> :ok = Specter.PeerConnection.set_remote_description(specter, pc_answer, offer)
iex> assert_receive {:ok, ^pc_answer, :set_remote_description}
...>
iex> Specter.PeerConnection.remote_description(specter, pc_answer)
:ok
iex> assert_receive {:remote_description, ^pc_answer, ^offer}
"""
@spec remote_description(Specter.t(), t()) :: :ok | {:error, term()}
def remote_description(%Specter{native: ref}, pc),
do: Native.remote_description(ref, pc)
@doc """
Given an offer or an answer session description, sets the local description on
a peer connection. The description should be in the form of JSON with the keys
`type` and `sdp`.
| param | type | default |
| ----------------- | --------------------------- | ------- |
| `specter` | `t:t/0` | |
| `peer_connection` | `opaque` | |
| `description` | `t:session_description_t()` | |
"""
@spec set_local_description(Specter.t(), t(), session_description_t()) ::
:ok | {:error, term()}
def set_local_description(%Specter{native: ref}, pc, description),
do: Native.set_local_description(ref, pc, description)
@doc """
Given an offer or an answer in the form of SDP generated by a remote party, sets
the remote description on a peer connection. Expects a session description in the
form of JSON with the keys `type` and `sdp`.
| param | type | default |
| ----------------- | --------------------------- | ------- |
| `specter` | `t:t/0` | |
| `peer_connection` | `opaque` | |
| `description` | `t:session_description_t/0` | |
"""
@spec set_remote_description(Specter.t(), t(), session_description_t()) ::
:ok | {:error, term()}
def set_remote_description(%Specter{native: ref}, pc, description) do
Native.set_remote_description(ref, pc, description)
end
@doc """
Sends back state of session parameters negotiation.
This will send message `t:signaling_state_msg_t/0`.
"""
@spec signaling_state(Specter.t(), t()) :: :ok | {:error, term()}
def signaling_state(%Specter{native: ref}, pc) do
Native.signaling_state(ref, pc)
end
end
|
lib/specter/peer_connection.ex
| 0.922032
| 0.601535
|
peer_connection.ex
|
starcoder
|
defmodule Alchemy.User do
@moduledoc """
This module contains functions and types related to discord users.
"""
alias Alchemy.UserGuild
use Alchemy.Discord.Types
@typedoc """
Represents a discord User. The default values exist to cover missing fields.
- `id`
represents a unique user id
- `username`
represents a user's current username
- `discriminator`
4 digit tag to differenciate usernames
- `avatar`
A string representing their avatar hash. Use `avatar_url` to
get the corresponding url from a `User` object
- `bot`
Whether or not the user is a bot - *default: `false`*
A bot usually doesn't have the authorization necessary to access these 2, so
they're usually missing.
- `verified`
Whether the account is verified - *default: `:hidden`*
- `email`
The user's email - *default: `:hidden`*
"""
@type t :: %__MODULE__{
id: String.t,
username: String.t,
discriminator: String.t,
avatar: String.t,
bot: Boolean,
verified: :hidden | Boolean,
email: :hidden | String.t
}
@derive [Poison.Encoder]
defstruct [:id,
:username,
:discriminator,
:avatar,
bot: false,
verified: :hidden,
email: :hidden
]
@typedoc """
A shortened version of a Guild struct, through the view of a User.
- `id`
Represents the guild's id.
- `name`
Represents the guild's name.
- `icon`
A string representing the guild's icon hash.
- `owner`
Whether the user linked to the guild owns it.
- `permissions`
Bitwise of the user's enabled/disabled permissions.
"""
@type user_guild :: %UserGuild{
id: snowflake,
name: String.t,
icon: String.t,
owner: Boolean,
permissions: Integer
}
defimpl String.Chars, for: __MODULE__ do
def to_string(user), do: user.username <> "#" <> user.discriminator
end
defmacrop is_valid_img(type, size) do
quote do
unquote(type) in ["png", "webp", "jpg", "gif"] and
unquote(size) in [128, 256, 512, 1024, 2048]
end
end
@doc """
Used to get the url for a user's avatar
`type` must be one of `"png"`, `"webp"`, `"jpg"`, `"gif"`
`size` must be one of `128`, `256`, `512`, `1024`, `2048`
## Examples
```elixir
> User.avatar_url(user)
https://cdn.discordapp.com/avatars/...
```
"""
@spec avatar_url(__MODULE__.t, String.t, Integer) :: url
def avatar_url(user) do
avatar_url(user, "jpg", 128)
end
def avatar_url(user, type, size) when is_valid_img(type, size) do
base = "https://cdn.discordapp.com/avatars/#{user.id}/#{user.avatar}."
base <> "#{type}?size=#{size}"
end
def avatar_url(_user, _type, _size) do
raise ArgumentError, message: "invalid type and/or size"
end
@doc """
Returns a string that mentions a user when used in a message
"""
def mention(user) do
"<@#{user.id}>"
end
end
|
lib/Structs/Users/user.ex
| 0.845033
| 0.716119
|
user.ex
|
starcoder
|
defmodule Massex do
@moduledoc """
Defines a whole value pattern container for masses, and utility methods for
working with them to improve handling within your applications.
"""
@gram_to_ounce_rate Decimal.from_float(28.3495)
@zero Decimal.new(0)
@enforce_keys [:unit, :amount]
defstruct [:unit, :amount]
@type t :: %__MODULE__{
unit: atom(),
amount: Decimal.t()
}
@doc """
Builds a `Massex` struct from an amount and unit
## Examples
iex> Massex.new(10, :gram)
%Massex{amount: Decimal.new(10), unit: :gram}
"""
@spec new(number() | Decimal.t() | String.t(), atom() | String.t()) ::
t() | :error
def new(amount, unit) do
with {:ok, standardized} <- standardize_unit(unit) do
case cast_amount(amount) do
:error -> :error
val -> %__MODULE__{unit: standardized, amount: val}
end
end
end
@doc """
Returns a `Massex` with the arithmetical absolute of the amount
"""
@spec abs(t()) :: t()
def abs(%__MODULE__{amount: amount, unit: unit}),
do: %__MODULE__{amount: Decimal.abs(amount), unit: unit}
@doc """
Adds two `Massex` structs together, returning a Massex
## Examples
iex> left = Massex.new(10, :gram)
iex> right = Massex.new(20, :gram)
iex> Massex.add(left, right)
%Massex{unit: :gram, amount: Decimal.new(30)}
"""
@spec add(t(), t() | number() | String.t()) :: t() | {:error, :invalid_amount}
def add(%__MODULE__{amount: leftval, unit: unit}, %__MODULE__{amount: rightval, unit: unit}),
do: %__MODULE__{amount: Decimal.add(leftval, rightval), unit: unit}
def add(%__MODULE__{amount: leftval, unit: leftunit}, %__MODULE__{
amount: rightval,
unit: rightunit
}),
do: %__MODULE__{
amount: Decimal.add(leftval, convert_amount(rightval, rightunit, leftunit)),
unit: leftunit
}
def add(%__MODULE__{amount: amount, unit: unit}, value) do
case cast_amount(value) do
:error -> {:error, :invalid_value}
val -> %__MODULE__{amount: Decimal.add(amount, val), unit: unit}
end
end
@doc """
Compares two `Massex` structs, returning 0 on equality, 1 if left is greater than right, or -1 if left is less than right
## Examples
iex> less = Massex.new(10, :gram)
iex> more = Massex.new(20, :gram)
iex> Massex.compare(less, less)
0
iex> Massex.compare(less, more)
-1
iex> Massex.compare(more, less)
1
"""
@spec compare(t(), t()) :: integer()
def compare(%__MODULE__{amount: leftval, unit: unit}, %__MODULE__{amount: rightval, unit: unit}) do
leftval
|> Decimal.cmp(rightval)
|> cmp_to_integer()
end
def compare(%__MODULE__{amount: leftval, unit: leftunit}, %__MODULE__{
amount: rightval,
unit: rightunit
}),
do:
with(
newval <- convert_amount(rightval, rightunit, leftunit),
do:
leftval
|> Decimal.cmp(newval)
|> cmp_to_integer()
)
defp cmp_to_integer(:eq), do: 0
defp cmp_to_integer(:gt), do: 1
defp cmp_to_integer(:lt), do: -1
@doc """
Divides a `Massex` by the provided denominator
## Examples
iex> base = Massex.new(10, :gram)
iex> Massex.divide(base, 2)
%Massex{amount: Decimal.new(5), unit: :gram}
"""
@spec divide(t(), number()) :: t()
def divide(%__MODULE__{amount: amount, unit: unit}, denominator),
do: %__MODULE__{amount: Decimal.div(amount, denominator), unit: unit}
@doc """
Returns true if two `Massex` represent the same amount of mass
## Examples
iex> left = Massex.new(10, :gram)
iex> right = Massex.new(10, :gram)
iex> Massex.equals?(left, right)
true
"""
@spec equals?(t(), t()) :: boolean()
def equals?(%__MODULE__{amount: left, unit: unit}, %__MODULE__{amount: right, unit: unit}),
do: Decimal.eq?(left, right)
def equals?(%__MODULE__{amount: left, unit: leftunit}, %__MODULE__{
amount: right,
unit: rightunit
}),
do: right |> convert_amount(rightunit, leftunit) |> Decimal.eq?(left)
@doc """
Multiplies a `Massex` by the provided amount
## Examples
iex> mass = Massex.new(10, :gram)
iex> Massex.multiply(mass, 10)
%Massex{amount: Decimal.new(100), unit: :gram}
"""
@spec multiply(t(), number()) :: t()
def multiply(%__MODULE__{amount: amount, unit: unit}, value),
do: %__MODULE__{amount: Decimal.mult(amount, value), unit: unit}
@doc """
Returns true if the amount of a `Massex` is less than zero
## Examples
iex> Massex.negative?(Massex.new(-10, :gram))
true
iex> Massex.negative?(Massex.new(10, :gram))
false
"""
@spec negative?(t()) :: boolean()
def negative?(%__MODULE__{amount: amount}), do: Decimal.negative?(amount)
@doc """
Returns true if the amount of a `Massex` is more than zero
## Examples
iex> Massex.positive?(Massex.new(-10, :gram))
false
iex> Massex.positive?(Massex.new(10, :gram))
true
"""
@spec positive?(t()) :: boolean()
def positive?(%__MODULE__{amount: amount}), do: Decimal.positive?(amount)
@doc """
Subtracts one Massex struct from another, returning a Massex
## Examples
iex> left = Massex.new(20, :gram)
iex> right = Massex.new(10, :gram)
iex> Massex.subtract(left, right)
%Massex{unit: :gram, amount: Decimal.new(10)}
iex> Massex.subtract(left, 10)
%Massex{unit: :gram, amount: Decimal.new(10)}
"""
@spec subtract(t(), t() | number() | String.t()) :: t() | {:error, :invalid_amount}
def subtract(%__MODULE__{amount: leftval, unit: unit}, %__MODULE__{amount: rightval, unit: unit}),
do: %__MODULE__{amount: Decimal.sub(leftval, rightval), unit: unit}
def subtract(%__MODULE__{amount: leftval, unit: leftunit}, %__MODULE__{
amount: rightval,
unit: rightunit
}),
do: %__MODULE__{
amount: Decimal.sub(leftval, convert_amount(rightval, rightunit, leftunit)),
unit: leftunit
}
def subtract(%__MODULE__{amount: amount, unit: unit}, value) do
case cast_amount(value) do
:error -> {:error, :invalid_value}
val -> %__MODULE__{amount: Decimal.sub(amount, val), unit: unit}
end
end
@doc """
Returns the `Decimal` amount backing the `Massex`
## Examples
iex> mass = Massex.new(20, :gram)
iex> Massex.to_decimal(mass)
Decimal.new(20)
"""
@spec to_decimal(t()) :: Decimal.t()
def to_decimal(%__MODULE__{amount: amount}), do: amount
@doc """
Returns true if the amount of a `Massex` is zero
## Examples
iex> Massex.zero?(Massex.new(-10, :gram))
false
iex> Massex.zero?(Massex.new(0, :gram))
true
"""
@spec zero?(t()) :: boolean()
def zero?(%__MODULE__{amount: amount}), do: Decimal.eq?(amount, @zero)
defp standardize_unit(:g), do: {:ok, :gram}
defp standardize_unit(:oz), do: {:ok, :ounce}
defp standardize_unit(:gram), do: {:ok, :gram}
defp standardize_unit(:ounce), do: {:ok, :ounce}
defp standardize_unit(unit) when is_binary(unit),
do: unit |> String.to_atom() |> standardize_unit()
defp standardize_unit(_), do: :error
defp cast_amount(%Decimal{} = amount), do: amount
defp cast_amount(amount) when is_float(amount), do: Decimal.from_float(amount)
defp cast_amount(amount) when is_number(amount), do: Decimal.new(amount)
defp cast_amount(amount) when is_binary(amount),
do: with({val, _} <- Decimal.parse(amount), do: val)
defp convert_amount(amount, :gram, :ounce), do: Decimal.div(amount, @gram_to_ounce_rate)
defp convert_amount(amount, :ounce, :gram), do: Decimal.mult(amount, @gram_to_ounce_rate)
end
|
lib/massex.ex
| 0.95253
| 0.757077
|
massex.ex
|
starcoder
|
defmodule Day10.ParseResult.TargetAddress do
@moduledoc """
`%TargetAddress{}` describes the type and identifier of a target node
in the system.
"""
alias __MODULE__
@type target_identifier() :: String.t()
@type target_type() :: :bot | :output
@type t :: %__MODULE__{identifier: target_identifier(), type: target_type()}
@enforce_keys [:identifier, :type]
defstruct [:identifier, :type]
@spec new(target_type(), target_identifier()) :: TargetAddress.t()
def new(type, identifier) when is_atom(type) do
%__MODULE__{
type: type,
identifier: identifier
}
end
@spec new(String.t(), target_identifier()) :: TargetAddress.t()
def new("bot", identifier), do: new(:bot, identifier)
def new("output", identifier), do: new(:output, identifier)
@spec for_bot(target_identifier()) :: TargetAddress.t()
def for_bot(identifier), do: new(:bot, identifier)
@spec for_output(target_identifier()) :: TargetAddress.t()
def for_output(identifier), do: new(:output, identifier)
end
defmodule Day10.ParseResult.Node do
@moduledoc """
`%Node{}` describes a instruction to configure a node in the system.
"""
alias Day10.ParseResult.{Node, TargetAddress}
@type node_identifier() :: String.t()
@type t :: %__MODULE__{
identifier: node_identifier(),
low_destination: TargetAddress.t(),
high_destination: TargetAddress.t()
}
@enforce_keys [:identifier, :low_destination, :high_destination]
defstruct [:identifier, :low_destination, :high_destination]
@spec new(node_identifier(), TargetAddress.t(), TargetAddress.t()) :: Node.t()
def new(identifier, low_destination, high_destination) do
%__MODULE__{
identifier: identifier,
low_destination: low_destination,
high_destination: high_destination
}
end
end
defmodule Day10.ParseResult.ChipAssignment do
@moduledoc """
`%ChipAssignment{} describes a initial chip assignment instruction.
"""
alias Day10.ParseResult.{ChipAssignment, TargetAddress}
@type chip_value() :: integer()
@type t :: %__MODULE__{
value: chip_value(),
target_address: TargetAddress.t()
}
@enforce_keys [:value, :target_address]
defstruct [:value, :target_address]
@spec new(chip_value(), TargetAddress.t()) :: ChipAssignment.t()
def new(value, target_address) do
%__MODULE__{
value: value,
target_address: target_address
}
end
end
defmodule Day10.ParseResult do
@moduledoc """
`%Parseresult{}` is a tree representation of the parsed instruction set. It
separates the items it found, which are either of type `%Node{}` or of type
`%ChipAssignment{}`.
"""
alias __MODULE__
alias __MODULE__.{ChipAssignment, Node}
@type chip_assignments() :: list(ChipAssignment.t())
@type nodes() :: list(Node.t())
@type t :: %__MODULE__{
chip_assignments: chip_assignments(),
nodes: nodes()
}
@enforce_keys [:chip_assignments, :nodes]
defstruct [:chip_assignments, :nodes]
@doc """
`new/0` initializes a new _empty_ `%ParseResult{}`.
"""
@spec new() :: __MODULE__.t()
def new do
%__MODULE__{
nodes: [],
chip_assignments: []
}
end
@doc """
`add_node/2` takes a `%ParseResult{}` and a `%Node{}` and returns the updated
`%ParseResult{}` with the passed in `%Node{}` appended to the list of nodes.
"""
@spec add_node(ParseResult.t(), Node.t()) :: ParseResult.t()
def add_node(%ParseResult{nodes: nodes} = result, %Node{} = node) do
nodes = Enum.reverse([node | nodes])
%ParseResult{result | nodes: nodes}
end
@doc """
`add_assignment/2` takes a `%ParseResult{}` and a `%ChipAssignment{}` and
returns the updated `%ParseResult{}` with the passed in `%ChipAssignment{}`
appended to the list of chip_assignments.
"""
@spec add_assignment(ParseResult.t(), ChipAssignment.t()) :: ParseResult.t()
def add_assignment(
%ParseResult{chip_assignments: assignments} = result,
%ChipAssignment{} = assignment
) do
assignments = Enum.reverse([assignment | assignments])
%ParseResult{result | chip_assignments: assignments}
end
end
defmodule Day10.Parser do
@moduledoc """
Day10.Parser takes the challange input and parses it into a `%ParseResult{}`
struct which separas the found nodes (either bots or outputs) and its chip
assignments, ready to be processed later on.
"""
@assignment_regex ~r/value (\d+) goes to bot (\d+)/
@bot_regex ~r/bot (\d+) gives low to (bot|output) (\d+) and high to (bot|output) (\d+)/
alias Day10.ParseResult
alias ParseResult.{Node, ChipAssignment, TargetAddress}
@doc """
`parse/1` takes the input as a string and parses it as a `%ParseResult{}`
struct if all went well. It raises an exception when it encounters an invalid
instruction.
"""
@spec parse(String.t()) :: ParseResult.t()
def parse(contents) do
contents
|> String.split("\n", trim: true)
|> to_parse_result()
end
@spec to_parse_result(list(String.t())) :: ParseResult.t()
defp to_parse_result(lines) do
Enum.reduce(lines, ParseResult.new(), fn line, result ->
case parse_line(line) do
%Node{} = node ->
ParseResult.add_node(result, node)
%ChipAssignment{} = assignment ->
ParseResult.add_assignment(result, assignment)
end
end)
end
defp parse_line("bot" <> _rest = line), do: parse_bot(line)
defp parse_line("value" <> _rest = line), do: parse_value(line)
defp parse_line(instruction),
do: raise("uh-oh! Unsupported instruction: #{inspect(instruction)}")
@spec parse_bot(String.t()) :: Node.t()
defp parse_bot(line) do
case Regex.run(@bot_regex, line) do
[
_line,
source_bot_id,
low_target_type,
low_target_identifier,
high_target_type,
high_target_identifier
] ->
target_low = TargetAddress.new(low_target_type, low_target_identifier)
target_high = TargetAddress.new(high_target_type, high_target_identifier)
Node.new(source_bot_id, target_low, target_high)
_ ->
raise "Unsupported bot instruction: #{inspect(line)}"
end
end
@spec parse_value(String.t()) :: ChipAssignment.t()
defp parse_value(line) do
case Regex.run(@assignment_regex, line) do
[_line, value, bot_identifier] ->
target = TargetAddress.new(:bot, bot_identifier)
ChipAssignment.new(String.to_integer(value), target)
_ ->
raise "invalid assignment statement #{inspect(line)}"
end
end
end
|
advent-of-code-2016/day_10/lib/parser.ex
| 0.87787
| 0.430925
|
parser.ex
|
starcoder
|
defmodule Joken.Hooks do
@moduledoc """
Behaviour for defining hooks into Joken's lifecycle.
Hooks are passed to `Joken` functions or added to `Joken.Config` through the
`Joken.Config.add_hook/2` macro. They can change the execution flow of a token configuration.
There are 2 kinds of hooks: before and after.
Both of them are executed in a reduce_while call and so must always return either:
- `{:halt, ...}` -> when you want to abort execution (other hooks won't be called)
- `{:cont, ...}` -> when you want to let other hooks execute
## Before hooks
A before hook receives as the first parameter its options and then a tuple with the input of
the function. For example, the `generate_claims` function receives the token configuration plus a
map of extra claims. Therefore, a `before_generate` hook receives:
- the hook options or `[]` if none are given;
- a tuple with two elements where the first is the token configuration and the second is the extra
claims map;
The return of a before hook is always the input of the next hook. Say you want to add an extra claim
with a hook. You could do so like in this example:
defmodule EnsureExtraClaimHook do
use Joken.Hooks
@impl true
def before_generate(_hook_options, {token_config, extra_claims}) do
{:cont, {token_config, Map.put(extra_claims, "must_exist", true)}}
end
end
You could also halt execution completely on a before hook. Just use the `:halt` return with an error
tuple:
defmodule StopTheWorldHook do
use Joken.Hooks
@impl true
def before_generate(_hook_options, _input) do
{:halt, {:error, :stop_the_world}}
end
end
## After hooks
After hooks work similar then before hooks. The difference is that it takes and returns the result of the
operation. So, instead of receiving 2 arguments it takes three:
- the hook options or `[]` if none are given;
- the result tuple which might be `{:error, reason}` or a tuple with `:ok` and its parameters;
- the input to the function call.
Let's see an example with `after_verify`. The verify function takes as argument the token and a signer. So,
an `after_verify` might look like this:
defmodule CheckVerifyError do
use Joken.Hooks
require Logger
@impl true
def after_verify(_hook_options, result, input) do
case result do
{:error, :invalid_signature} ->
Logger.error("Check signer!!!")
{:halt, result}
{:ok, _claims} ->
{:cont, result, input}
end
end
end
On this example we have conditional logic for different results.
## `Joken.Config`
When you create a module that has `use Joken.Config` it automatically implements
this behaviour with overridable functions. You can simply override a callback
implementation directly and it will be triggered when using any of the generated
functions. Example:
defmodule HookToken do
use Joken.Config
@impl Joken.Hooks
def before_generate(_options, input) do
IO.puts("Before generating claims")
{:cont, input}
end
end
Now if we call `HookToken.generate_claims/1` it will call our callback.
Also in `Joken.Config` there is an imported macro for adding hooks with options. Example:
defmodule ManyHooks do
use Joken.Config
add_hook(JokenJwks, jwks_url: "http://someserver.com/.well-known/certs")
end
"""
alias Joken.Signer
@type halt_tuple :: {:halt, tuple}
@type hook_options :: Keyword.t()
@type generate_input :: {Joken.token_config(), extra :: Joken.claims()}
@type sign_input :: {Joken.claims(), Signer.t()}
@type verify_input :: {Joken.bearer_token(), Signer.t()}
@type validate_input :: {Joken.token_config(), Joken.claims(), context :: map()}
@doc "Called before `Joken.generate_claims/3`"
@callback before_generate(hook_options, generate_input) :: {:cont, generate_input} | halt_tuple
@doc "Called before `Joken.encode_and_sign/3`"
@callback before_sign(hook_options, sign_input) :: {:cont, sign_input} | halt_tuple
@doc "Called before `Joken.verify/3`"
@callback before_verify(hook_options, verify_input) :: {:cont, verify_input} | halt_tuple
@doc "Called before `Joken.validate/4`"
@callback before_validate(hook_options, validate_input) :: {:cont, validate_input} | halt_tuple
@doc "Called after `Joken.generate_claims/3`"
@callback after_generate(hook_options, Joken.generate_result(), generate_input) ::
{:cont, Joken.generate_result(), generate_input} | halt_tuple
@doc "Called after `Joken.encode_and_sign/3`"
@callback after_sign(
hook_options,
Joken.sign_result(),
sign_input
) :: {:cont, Joken.sign_result(), sign_input} | halt_tuple
@doc "Called after `Joken.verify/3`"
@callback after_verify(
hook_options,
Joken.verify_result(),
verify_input
) :: {:cont, Joken.verify_result(), verify_input} | halt_tuple
@doc "Called after `Joken.validate/4`"
@callback after_validate(
hook_options,
Joken.validate_result(),
validate_input
) :: {:cont, Joken.validate_result(), validate_input} | halt_tuple
defmacro __using__(_opts) do
quote do
@behaviour Joken.Hooks
@impl true
def before_generate(_hook_options, input), do: {:cont, input}
@impl true
def before_sign(_hook_options, input), do: {:cont, input}
@impl true
def before_verify(_hook_options, input), do: {:cont, input}
@impl true
def before_validate(_hook_options, input), do: {:cont, input}
@impl true
def after_generate(_hook_options, result, input), do: {:cont, result, input}
@impl true
def after_sign(_hook_options, result, input), do: {:cont, result, input}
@impl true
def after_verify(_hook_options, result, input), do: {:cont, result, input}
@impl true
def after_validate(_hook_options, result, input), do: {:cont, result, input}
defoverridable before_generate: 2,
before_sign: 2,
before_verify: 2,
before_validate: 2,
after_generate: 3,
after_sign: 3,
after_verify: 3,
after_validate: 3
end
end
@before_hooks [:before_generate, :before_sign, :before_verify, :before_validate]
@after_hooks [:after_generate, :after_sign, :after_verify, :after_validate]
def run_before_hook(hooks, hook_function, input) when hook_function in @before_hooks do
hooks
|> Enum.reduce_while(input, fn hook, input ->
{hook, opts} = unwrap_hook(hook)
case apply(hook, hook_function, [opts, input]) do
{:cont, _next_input} = res -> res
{:halt, _reason} = res -> res
_ -> {:halt, {:error, :wrong_hook_return}}
end
end)
|> case do
{:error, _reason} = err -> err
res -> {:ok, res}
end
end
def run_after_hook(hooks, hook_function, result, input) when hook_function in @after_hooks do
hooks
|> Enum.reduce_while({result, input}, fn hook, {result, input} ->
{hook, opts} = unwrap_hook(hook)
case apply(hook, hook_function, [opts, result, input]) do
{:cont, result, next_input} -> {:cont, {result, next_input}}
{:halt, _reason} = res -> res
_ -> {:halt, {:error, :wrong_hook_return}}
end
end)
|> case do
{result, input} when is_tuple(input) -> result
res -> res
end
end
defp unwrap_hook({_hook_module, _opts} = hook), do: hook
defp unwrap_hook(hook) when is_atom(hook), do: {hook, []}
end
|
lib/joken/hooks.ex
| 0.903805
| 0.689433
|
hooks.ex
|
starcoder
|
defmodule OffBroadwayRedisStream.Producer do
@moduledoc """
A GenStage Producer for Redis Stream.
It acts as a consumer in the specified Redis consumer group. Introduction to Redis Stream can be found at: https://redis.io/topics/streams-intro.
Support failover by automatically claiming pending messages of a dead node. A node is considered dead when it fails send heartbeats.
Currently, it only supports Redis 6.0.2 and above
## Producer Options
* `:redis_client_opts` - Required. Redis client specific options. Default client is [Redix](https://hexdocs.pm/redix/Redix.html) and for Redix this is used to start redix process `Redix.start_link(opts)`. see [Redix Documentation](https://hexdocs.pm/redix/Redix.html#start_link/1)
* `:receive_interval` - Optional. The duration (in milliseconds) for which the producer
waits before making a request for more messages if there are no events in stream. Default is 1000.
* `:stream` - Required. Redis stream name
* `:group` - Required. Redis consumer group. Group will be created with `:group_start_id` ID if it is not already present.
* `:group_start_id` - Optional. Starting stream ID which should be used when consumer group *created*. Use $ for latest ID. see [XGROUP CREATE](https://redis.io/commands/xgroup). Default is `$`
* `:consumer_name` - Required. Redis consumer name for the Broadway instance in the consumer-group. If you are running multiple consumers, make sure that each consumer has unique name.
* `:heartbeat_interval` - Optional. Producer sends heartbeats at regular intervals, this is interval duration. Default is 5000
* `:allowed_missed_heartbeats` - Optional. Number of allowed missing heartbeats for a consumer. The consumer is considered to be dead after this and other consumers claim its pending messages. Default is 3
* `:make_stream` - Optional. Appends MKSTREAM subcommand to `XGROUP CREATE` which automatically create the stream if it doesn't exist. See [XGROUP CREATE](https://redis.io/commands/xgroup). Default is false
## Acknowledgments
Both successful and failed messages are acknowledged by default. Use `Broadway.Message.configure_ack/2` to change this behaviour for failed messages. If a message configured to retry, that message will be attempted again in next batch.
```elixir
if message.metadata.attempt < @max_attempts do
Message.configure_ack(message, retry: true)
else
message
end
```
`attempt` field in metadata can be used to control maximum retries.
use `handle_failure` callback to handle failures by moving messages to other stream or persisting failed jobs etc
## Message Data
Message data is a 2 element list. First item is id of the message, second is the data
"""
use GenStage
alias Broadway.Producer
alias Broadway.Message
alias OffBroadwayRedisStream.Acknowledger
alias OffBroadwayRedisStream.Heartbeat
alias OffBroadwayRedisStream.RedisClient
require Logger
@behaviour Producer
@default_opts [
heartbeat_interval: 5000,
receive_interval: 1000,
client: OffBroadwayRedisStream.RedixClient,
allowed_missed_heartbeats: 3,
max_pending_ack: 100_000,
redis_command_retry_timeout: 300,
group_start_id: "$",
make_stream: false
]
@impl GenStage
def init(opts) do
opts = Keyword.merge(@default_opts, opts)
validate!(opts)
client = opts[:client]
case client.init(opts) do
{:error, message} ->
raise ArgumentError, "invalid options given to #{inspect(client)}.init/1, " <> message
{:ok, redis_config} ->
init_consumer_group!(client, opts[:group_start_id], redis_config)
{:ok, heartbeat_pid} =
Heartbeat.start_link(client, redis_config, opts[:heartbeat_interval])
state =
Map.new(opts)
|> Map.merge(%{
demand: 0,
redis_client: client,
redis_config: redis_config,
receive_timer: nil,
last_id: "0",
last_checked: 0,
heartbeat_pid: heartbeat_pid,
pending_ack: [],
retryable: []
})
{:producer, state}
end
end
@impl GenStage
def handle_demand(demand, state) do
receive_messages(%{state | demand: state.demand + demand})
end
@impl GenStage
def handle_info(:receive_messages, %{receive_timer: nil} = state) do
{:noreply, [], state}
end
@impl GenStage
def handle_info(:receive_messages, state) do
receive_messages(%{state | receive_timer: nil})
end
@impl GenStage
def handle_info({:ack, ack_ids, retryable}, state) do
ids = state.pending_ack ++ ack_ids
state = %{state | retryable: state.retryable ++ retryable}
case redis_cmd(:ack, [ids], state, 0) do
:ok ->
{:noreply, [], %{state | pending_ack: []}}
{:error, error} ->
Logger.warn("Unable to acknowledge messages with Redis. Reason: #{inspect(error)}")
if length(ids) > state.max_pending_ack do
{:stop, "Pending ack count is more than maximum limit #{state.max_pending_ack}", state}
else
{:noreply, [], %{state | pending_ack: ids}}
end
end
end
@impl GenStage
def handle_info(_, state) do
{:noreply, [], state}
end
@impl GenStage
def terminate(_reason, state) do
case redis_cmd(:ack, [state.pending_ack], state, 2) do
:ok ->
:ok
{:error, error} ->
Logger.warn("Unable to acknowledge messages with Redis. Reason: #{inspect(error)}")
end
Heartbeat.stop(state.heartbeat_pid)
:ok
end
@impl Producer
def prepare_for_draining(%{receive_timer: receive_timer} = state) do
receive_timer && Process.cancel_timer(receive_timer)
{:noreply, [], %{state | receive_timer: nil}}
end
defp receive_messages(%{receive_timer: nil, demand: demand} = state) when demand > 0 do
{retryable_messages, state} = retryable_messages(state)
state = %{state | demand: state.demand - length(retryable_messages)}
{claimed_messages, last_checked} = maybe_claim_dead_consumer_messages(state)
state = %{state | demand: state.demand - length(claimed_messages), last_checked: last_checked}
{new_messages, last_id} = fetch_messages_from_redis(state)
state = %{state | demand: state.demand - length(new_messages), last_id: last_id}
messages = retryable_messages ++ claimed_messages ++ new_messages
receive_timer = maybe_schedule_timer(state, length(messages), state.demand)
{:noreply, messages, %{state | receive_timer: receive_timer}}
end
defp receive_messages(state) do
{:noreply, [], state}
end
defp maybe_schedule_timer(state, current, demand) do
case {current, demand} do
{0, _} -> schedule_receive_messages(state.receive_interval)
{_, 0} -> nil
_ -> schedule_receive_messages(0)
end
end
defp schedule_receive_messages(interval) do
Process.send_after(self(), :receive_messages, interval)
end
defp maybe_claim_dead_consumer_messages(state) do
now = DateTime.utc_now() |> DateTime.to_unix(:millisecond)
expire_time = state.allowed_missed_heartbeats * state.heartbeat_interval
last_checked = state.last_checked
if now - last_checked > expire_time do
{redis_messages, state} = claim_dead_consumer_messages(state)
if length(redis_messages) > 0 do
%{stream: stream, group: group} = state
{wrap_messages(redis_messages, stream, group), last_checked}
else
{[], now}
end
else
{[], last_checked}
end
end
defp claim_dead_consumer_messages(state, acc \\ []) do
{:ok, consumers} = redis_cmd(:consumers_info, [], state)
expire_time = state.allowed_missed_heartbeats * state.heartbeat_interval
{dead_without_pending, dead_with_pending} = dead_consumers(consumers, expire_time)
prune_consumers(dead_without_pending, state)
{status, messages} = claim_consumers(dead_with_pending, state)
messages = acc ++ messages
state = %{state | demand: state.demand - length(messages)}
case status do
:ok -> {messages, state}
# someone else consumed messages
:reset -> claim_dead_consumer_messages(state, messages)
end
end
defp dead_consumers(consumers, expire_time) do
consumers
|> Enum.filter(&(&1["idle"] > expire_time))
|> Enum.reduce(
{[], []},
fn
%{"pending" => 0} = consumer, {without_pending, with_pending} ->
{[consumer | without_pending], with_pending}
consumer, {without_pending, with_pending} ->
{without_pending, [consumer | with_pending]}
end
)
end
defp claim_consumers(consumers, state) do
consumers
|> Enum.concat([:end])
|> Enum.reduce_while(
{[], state.demand},
fn
:end, {acc, _demand} ->
{:halt, {:ok, acc}}
consumer, {acc, demand} ->
case claim_consumer(state, consumer, demand) do
{:ok, messages} when length(messages) == demand ->
{:halt, {:ok, acc ++ messages}}
{:ok, messages} ->
{:cont, {acc ++ messages, demand - length(messages)}}
{:reset, messages} ->
{:halt, {:reset, acc ++ messages}}
end
end
)
end
defp claim_consumer(state, consumer, demand) do
count = min(consumer["pending"], demand)
{:ok, pending_messages} = redis_cmd(:pending, [consumer["name"], count], state)
ids = Enum.map(pending_messages, &Enum.at(&1, 0))
{:ok, messages} = redis_cmd(:claim, [consumer["idle"], ids], state)
received = length(messages)
cond do
received == demand ->
{:ok, messages}
received != length(ids) ->
# someone else consumed messages
{:reset, messages}
true ->
{:ok, messages}
end
end
@max_messages_per_batch 100_000
defp fetch_messages_from_redis(%{demand: demand} = state) when demand == 0,
do: {[], state.last_id}
defp fetch_messages_from_redis(state) do
%{
demand: demand,
stream: stream,
group: group,
consumer_name: consumer_name,
last_id: last_id
} = state
count = min(demand, @max_messages_per_batch)
case redis_cmd(:fetch, [count, last_id], state) do
{:ok, []} ->
{[], ">"}
{:ok, redis_messages} ->
last_id =
cond do
last_id == ">" ->
">"
length(redis_messages) < count ->
">"
true ->
[last_id, _] = List.last(redis_messages)
last_id
end
{wrap_messages(redis_messages, stream, group), last_id}
{:error, reason} ->
raise "cannot fetch messages from Redis (stream=#{stream} group=#{group} " <>
"consumer=#{consumer_name}). Reason: #{inspect(reason)}"
end
end
defp retryable_messages(state) do
%{demand: demand, retryable: retryable} = state
{messages, rest} = Enum.split(retryable, demand)
{prepare_failed_messages(messages), %{state | retryable: rest}}
end
defp wrap_messages(redis_messages, stream, group) do
Enum.map(redis_messages, fn [id, _] = data ->
ack_data = %{id: id, retry: false}
ack_ref = {self(), {stream, group}}
%Message{
data: data,
metadata: %{id: id, attempt: 1},
acknowledger: {Acknowledger, ack_ref, ack_data}
}
end)
end
defp prepare_failed_messages(messages) do
Enum.map(messages, fn message ->
{_, ack_ref, ack_data} = message.acknowledger
metadata = Map.update!(message.metadata, :attempt, &(&1 + 1))
%Message{
message
| metadata: metadata,
acknowledger: {Acknowledger, ack_ref, %{ack_data | retry: false}}
}
end)
end
@max_retries 2
defp redis_cmd(func, args, state, max_retries \\ @max_retries, retry_count \\ 0) do
%{redis_client: client, redis_config: redis_config} = state
case apply(client, func, args ++ [redis_config]) do
{:error, %RedisClient.ConnectionError{} = error} when retry_count < max_retries ->
Logger.warn(
"Failed to run #{func}, retry_count: #{retry_count}, reason: #{inspect(error.reason)}"
)
Process.sleep(state.redis_command_retry_timeout * (retry_count + 1))
redis_cmd(func, args, state, max_retries, retry_count + 1)
result ->
result
end
end
defp init_consumer_group!(client, group_start_id, redis_config) do
:ok = client.create_group(group_start_id, redis_config)
end
defp prune_consumers([], _state), do: :ok
defp prune_consumers(consumers, state) do
%{redis_client: client, redis_config: redis_config} = state
names = Enum.map(consumers, & &1["name"])
_ = client.delete_consumers(names, redis_config)
end
defp validate!(opts) do
case validate(opts) do
:ok -> :ok
{:error, error} -> raise ArgumentError, message: error
end
end
defp validate(opts) when is_list(opts) do
with :ok <- validate_option(:stream, opts[:stream]),
:ok <- validate_option(:group, opts[:group]),
:ok <- validate_option(:consumer_name, opts[:consumer_name]),
:ok <- validate_option(:receive_interval, opts[:receive_interval]),
:ok <- validate_option(:allowed_missed_heartbeats, opts[:allowed_missed_heartbeats]),
:ok <- validate_option(:heartbeat_interval, opts[:heartbeat_interval]),
:ok <- validate_option(:make_stream, opts[:make_stream]) do
:ok
end
end
defp validate_option(:group, value) when not is_binary(value) or value == "",
do: validation_error(:group, "a non empty string", value)
defp validate_option(:consumer_name, value) when not is_binary(value) or value == "",
do: validation_error(:consumer_name, "a non empty string", value)
defp validate_option(:stream, value) when not is_binary(value) or value == "",
do: validation_error(:stream, "a non empty string", value)
defp validate_option(:heartbeat_interval, value) when not is_integer(value) or value < 0,
do: validation_error(:heartbeat_interval, "a positive integer", value)
defp validate_option(:receive_interval, value) when not is_integer(value) or value < 0,
do: validation_error(:receive_interval, "a positive integer", value)
defp validate_option(:group_start_id, value) when not is_binary(value),
do: validation_error(:group_start_id, "a redis stream id or $", value)
defp validate_option(:allowed_missed_heartbeats, value)
when not is_integer(value) and value > 0,
do: validation_error(:allowed_missed_heartbeats, "a positive integer", value)
defp validate_option(:redis_command_retry_timeout, value)
when not is_integer(value) and value > 0,
do: validation_error(:redis_command_retry_timeout, "a positive integer", value)
defp validate_option(:make_stream, value) when not is_boolean(value),
do: validation_error(:make_stream, "a boolean", value)
defp validate_option(_, _), do: :ok
defp validation_error(option, expected, value) do
{:error, "expected #{inspect(option)} to be #{expected}, got: #{inspect(value)}"}
end
end
|
lib/producer.ex
| 0.897302
| 0.802594
|
producer.ex
|
starcoder
|
defmodule ElasticsearchElixirBulkProcessor.Helpers.BulkResponse do
@doc ~S"""
Given a list of items from a bulk response and the data sent as a list of requests return the items that match the error.
## Examples
iex> items = [%{"index" => %{}}, %{"update" => %{"error" => %{}}}, %{"create" => %{}}, %{"delete" => %{}}]
...> data = ["item", "item_with_errors", "item", "item"]
...> ElasticsearchElixirBulkProcessor.Helpers.BulkResponse.gather_error_items(items, data)
["item_with_errors"]
iex> items = [%{"index" => %{"error" => %{}}}, %{"update" => %{"error" => %{}}}, %{"create" => %{"error" => %{}}}, %{"delete" => %{"error" => %{}}}]
...> data = ["item1", "item2", "item3", "item4"]
...> ElasticsearchElixirBulkProcessor.Helpers.BulkResponse.gather_error_items(items, data)
["item1", "item2", "item3", "item4"]
"""
def gather_error_items(items, data) when is_list(data) do
data
|> Stream.zip(items)
|> Stream.filter(fn
{_, %{"index" => %{"error" => _}}} -> true
{_, %{"update" => %{"error" => _}}} -> true
{_, %{"create" => %{"error" => _}}} -> true
{_, %{"delete" => %{"error" => _}}} -> true
{_, _} -> false
end)
|> Enum.map(fn {data, _} -> data end)
end
@doc ~S"""
Given a list of items from a bulk response and the data sent as a string payload return the items that match the error.
## Examples
iex> items = [%{"index" => %{}}, %{"update" => %{"error" => %{}}}, %{"create" => %{}}, %{"delete" => %{}}]
...> data = "item\nitem_with_errors\nitem\nitem"
...> ElasticsearchElixirBulkProcessor.Helpers.BulkResponse.gather_error_items(items, data)
"item_with_errors"
iex> items = [%{"index" => %{"error" => %{}}}, %{"update" => %{"error" => %{}}}, %{"create" => %{"error" => %{}}}, %{"delete" => %{"error" => %{}}}]
...> data = "item1\nitem2\nitem3\nitem4"
...> ElasticsearchElixirBulkProcessor.Helpers.BulkResponse.gather_error_items(items, data)
"item1\nitem2\nitem3\nitem4"
"""
def gather_error_items(items, data) when is_binary(data) do
data_list =
data
|> String.split("\n")
gather_error_items(items, data_list)
|> Enum.join("\n")
end
@doc ~S"""
Given a list of items return true if all have an error.
## Examples
iex> items = [%{"index" => %{"error" => %{}}}, %{"update" => %{"error" => %{}}}, %{"create" => %{"error" => %{}}}, %{"delete" => %{"error" => %{}}}]
...> ElasticsearchElixirBulkProcessor.Helpers.BulkResponse.all_items_error?(items)
true
iex> items = [%{"index" => %{}}, %{"update" => %{"error" => %{}}}, %{"create" => %{}}, %{"delete" => %{}}]
...> ElasticsearchElixirBulkProcessor.Helpers.BulkResponse.all_items_error?(items)
false
"""
def all_items_error?(items),
do:
Enum.all?(items, fn
%{"index" => %{"error" => _}} -> true
%{"update" => %{"error" => _}} -> true
%{"create" => %{"error" => _}} -> true
%{"delete" => %{"error" => _}} -> true
_ -> false
end)
end
|
lib/elasticsearch_elixir_bulk_processor/helpers/bulk_response.ex
| 0.636918
| 0.523116
|
bulk_response.ex
|
starcoder
|
defmodule Machinist do
@moduledoc """
`Machinist` is a small library that allows you to implement finite state machines
in a simple way. It provides a simple DSL to write combinations of
transitions based on events.
A good example is how we would implement the functioning of a door. With `machinist` would be this way:
defmodule Door do
defstruct [state: :locked]
use Machinist
transitions do
from :locked, to: :unlocked, event: "unlock"
from :unlocked, to: :locked, event: "lock"
from :unlocked, to: :opened, event: "open"
from :opened, to: :closed, event: "close"
from :closed, to: :opened, event: "open"
from :closed, to: :locked, event: "lock"
end
end
By defining this rules with `transitions` and `from` macros, `machinist` generates and inject into the module `Door` `transit/2` functions like this one:
def transit(%Door{state: :locked} = struct, event: "unlock") do
{:ok, %Door{struct | state: :unlocked}}
end
_The functions `transit/2` implements the behaviour_ `Machinist.Transition`
So that we can transit between states by relying on the **state** + **event** pattern matching.
Let's see this in practice:
By default our `Door` is `locked`
iex> door_locked = %Door{}
iex> %Door{state: :locked}
So let's change its state to `unlocked` and `opened`
iex> {:ok, door_unlocked} = Door.transit(door_locked, event: "unlock")
iex> {:ok, %Door{state: :unlocked}}
iex> {:ok, door_opened} = Door.transit(door_unlocked, event: "open")
iex> {:ok, %Door{state: :opened}}
If we try to make a transition that not follow the rules, we got an error:
iex> Door.transit(door_opened, event: "lock")
iex> {:error, :not_allowed}
### Group same-state `from` definitions
In the example above we also could group the `from :unlocked` definitions like this:
# ...
transitions do
from :locked, to: :unlocked, event: "unlock"
from :unlocked do
to :locked, event: "lock"
to :opened, event: "open"
end
from :opened, to: :closed, event: "close"
from :closed, to: :opened, event: "open"
from :closed, to: :locked, event: "lock"
end
# ...
This is an option for a better organization and an increase of readability when having
a large number of `from` definitions with a same state.
### Setting different attribute name that holds the state
By default `machinist` expects the struct being updated holds a `state` attribute,
if you hold state in a different attribute, just pass the name as an atom, as follows:
transitions attr: :door_state do
# ...
end
And then `machinist` will set state in that attribute
iex> Door.transit(door, event: "unlock")
iex> {:ok, %Door{door_state: :unlocked}}
### Implementing different versions of a state machine
Let's suppose we want to build a selection process app that handles applications
of candidates and they may possibly going through different versions of the process. For example:
A Selection Process **V1** with the following sequence of stages: [Registration] -> [**Code test**] -> [Enrollment]
And a Selection Process **V2** with these ones: [Registration] -> [**Interview**] -> [Enrollment]
The difference here is in **V1** candidates must take a **Code Test** and V2 an **Interview**.
So, we could have a `%Candidate{}` struct that holds these attributes:
defmodule SelectionProcess.Candidate do
defstruct [:name, :state, test_score: 0]
end
And a `SelectionProcess` module that implements the state machine.
Notice this time we don't want to implement the rules in the module that holds
the state, in this case it makes more sense the `SelectionProcess` keep the rules,
also because we want more than one state machine version handling candidates as mentioned before.
This is our **V1** of the process:
defmodule SelectionProcess.V1 do
use Machinist
alias SelectionProcess.Candidate
@minimum_score 100
transitions Candidate do
from :new, to: :registered, event: "register"
from :registered, to: :started_test, event: "start_test"
from :started_test, to: &check_score/1, event: "send_test"
from :approved, to: :enrolled, event: "enroll"
end
defp check_score(%Candidate{test_score: score}) do
if score >= @minimum_score, do: :approved, else: :reproved
end
end
In this code we pass the `Candidate` module as a parameter to `transitions`
to tell `machinist` that we expect `V1.transit/2` functions with a `%Candidate{}`
struct as first argument and not the `%SelectionProcess.V1{}` which would be by default.
def transit(%Candidate{state: :new} = struct, event: "register") do
{:ok, %Candidate{struct | state: :registered}}
end
Also notice we provided the *function* `&check_score/1` to the option `to:` instead of an *atom*, in order to decide the state based on the candidate `test_score` value.
In the **version 2**, we replaced the `Code Test` stage by the `Interview` which has different state transitions:
defmodule SelectionProcess.V2 do
use Machinist
alias SelectionProcess.Candidate
transitions Candidate do
from :new, to: :registered, event: "register"
from :registered, to: :interview_scheduled, event: "schedule_interview"
from :interview_scheduled, to: :approved, event: "approve_interview"
from :interview_scheduled, to: :repproved, event: "reprove_interview"
from :approved, to: :enrolled, event: "enroll"
end
end
Now let's see how this could be used:
**V1:** A `registered` candidate wants to start its test.
iex> candidate1 = %Candidate{name: "Ada", state: :registered}
iex> SelectionProcess.V1.transit(candidate1, event: "start_test")
iex> %{:ok, %Candidate{state: :test_started}}
**V2:** A `registered` candidate wants to schedule the interview
iex> candidate2 = %Candidate{name: "Jose", state: :registered}
iex> SelectionProcess.V2.transit(candidate1, event: "schedule_interview")
iex> %{:ok, %Candidate{state: :interview_scheduled}}
That's great because we also can implement many state machines for only one
entity and test different scenarios, evaluate and collect data for deciding which one is better.
`machinist` gives us this flexibility since it's just pure Elixir.
### Transiting from any state to another
Sometimes we need to define a `from` _any state_ transition.
Still in the selection process example, a candidate can abandon the process in a given state and we want to be able to transit him/her to `application_expired` from any state. To do so we just define a `from` with an underscore variable in order the current state to be ignored.
defmodule SelectionProcess.V2 do
use Machinist
alias SelectionProcess.Candidate
transitions Candidate do
# ...
from _state, to: :application_expired, event: "application_expired"
end
end
## How does the DSL works?
The use of `transitions` in combination with each `from` statement will be
transformed in functions that will be injected into the module that is using `machinist`.
This implementation:
defmodule Door do
defstruct state: :locked
use Machinist
transitions do
from :locked, to: :unlocked, event: "unlock"
from :unlocked, to: :locked, event: "lock"
from :unlocked, to: :opened, event: "open"
from :opened, to: :closed, event: "close"
from :closed, to: :opened, event: "open"
from :closed, to: :locked, event: "lock"
end
end
is the same as:
defmodule Door do
defstruct state: :locked
def transit(%__MODULE__{state: :locked} = struct, event: "unlock") do
{:ok, %__MODULE__{struct | state: :unlocked}}
end
def transit(%__MODULE__{state: :unlocked} = struct, event: "lock") do
{:ok, %__MODULE__{struct | state: :locked}}
end
def transit(%__MODULE__{state: :unlocked} = struct, event: "open") do
{:ok, %__MODULE__{struct | state: :opened}}
end
def transit(%__MODULE__{state: :opened} = struct, event: "close") do
{:ok, %__MODULE__{struct | state: :closed}}
end
def transit(%__MODULE__{state: :closed} = struct, event: "open") do
{:ok, %__MODULE__{struct | state: :opened}}
end
def transit(%__MODULE__{state: :closed} = struct, event: "lock") do
{:ok, %__MODULE__{struct | state: :locked}}
end
# a catchall function in case of unmatched clauses
def transit(_, _), do: {:error, :not_allowed}
end
So, as we can see, we can eliminate a lot of boilerplate with `machinist` making
it easier to maintain and less prone to errors.
"""
@doc false
defmacro __using__(_) do
quote do
@__attr__ :state
@behaviour Machinist.Transition
import unquote(__MODULE__)
@before_compile unquote(__MODULE__)
end
end
@doc """
Defines a block of transitions.
By default `transitions/1` expects the module using `Machinist` has a struct
defined with a `state` attribute
transitions do
# ...
end
"""
defmacro transitions(do: block) do
quote do
@__struct__ __MODULE__
unquote(block)
end
end
@doc """
Defines a block of transitions for a specific struct or defines a block of
transitions just passing the `attr` option to define the attribute holding the state
## Examples
### A Candidate being handled by two different versions of a SelectionProcess
defmodule Candidate do
defstruct state: :new
end
defmodule SelectionProcess.V1 do
use Machinist
transitions Candidate do
from :new, to: :registered, event: "register"
end
end
defmodule SelectionProcess.V2 do
use Machinist
transitions Candidate do
from :new, to: :enrolled, event: "enroll"
end
end
### Providing the `attr` option to define the attribute holding the state
defmodule Candidate do
defstruct candidate_state: :new
use Machinist
transitions attr: :candidate_state do
from :new, to: :registered, event: "register"
end
end
"""
defmacro transitions(list_or_struct, block)
defmacro transitions([attr: attr], do: block) do
quote do
@__attr__ unquote(attr)
@__struct__ __MODULE__
unquote(block)
end
end
defmacro transitions(struct, do: block) do
quote do
@__struct__ unquote(struct)
unquote(block)
end
end
@doc """
Defines a block of transitions for a specific struct with `attr` option
defining the attribute holding the state
transitions Candidate, attr: :candidate_state do
# ...
end
"""
defmacro transitions(struct, [attr: attr], do: block) do
quote do
@__attr__ unquote(attr)
@__struct__ unquote(struct)
unquote(block)
end
end
@doc """
Defines a state transition with the given `state`, and the list of options `[to: new_state, event: event]`
from 1, to: 2, event: "next"
It's also possible to define a `from` any state transition to another specific one, by just passing an underscore variable in place of a real state value
from _state, to: :expired, event: "enrollment_expired"
"""
defmacro from(state, do: {_, _line, to_statements}) do
define_transitions(state, to_statements)
end
defmacro from(state, to: new_state, event: event) do
define_transition(state, to: new_state, event: event)
end
@doc false
defp define_transitions(_state, []), do: []
@doc false
defp define_transitions(state, [{:to, _line, [new_state, [event: event]]} | transitions]) do
[
define_transition(state, to: new_state, event: event)
| define_transitions(state, transitions)
]
end
@doc false
defp define_transition(state, to: new_state, event: event) do
quote do
@impl true
def transit(%@__struct__{@__attr__ => unquote(state)} = resource, event: unquote(event)) do
value = __set_new_state__(resource, unquote(new_state))
{:ok, Map.put(resource, @__attr__, value)}
end
end
end
@doc false
defmacro __before_compile__(_) do
quote do
@impl true
def transit(_resource, _opts) do
{:error, :not_allowed}
end
defp __set_new_state__(resource, new_state) when is_function(new_state) do
new_state.(resource)
end
defp __set_new_state__(_, new_state), do: new_state
end
end
end
|
lib/machinist.ex
| 0.802246
| 0.740292
|
machinist.ex
|
starcoder
|
defmodule Mix.Tasks.Repeatex.Readme do
use Mix.Task
require EEx
@readme_eex "README.eex.md"
@readme "README.md"
@shortdoc "Generate internal README from current code state"
@examples [
"every other day",
"every other monday",
"each tues",
"mon-sat every week",
"every 3rd of the month",
"1st and 3rd every 2 months",
"on the 3rd tuesday of every month"
]
@pending [
"weekly on thursdays",
"1st of every quarter",
"on the third tuesday of each month"
]
def run(_) do
examples = @examples |> Enum.filter(&Repeatex.parse/1)
|> Enum.map(&parse/1)
|> Enum.join("\n")
pending = @pending |> Enum.map(&( "- [ ] \"#{&1}\""))
|> Enum.join("\n")
content = EEx.eval_file(@readme_eex, [
parse: &parse/1,
schedule: &schedule/2,
format: &format/1,
today: today,
examples: examples, pending: pending
])
File.write!(@readme, content)
end
def parse(description) do
"""
```elixir
Repeatex.parse("#{description}")
# #{Repeatex.parse(description) |> pretty_format}
```
"""
end
def schedule(repeatex, date) do
"""
```elixir
# #{Repeatex.description(repeatex)}:
repeatex = #{repeatex |> to_str}
Repeatex.next_date(repeatex, #{date |> to_str}) # => #{Repeatex.next_date(repeatex, date) |> to_str}
```
"""
end
def format(repeatex) do
"""
```elixir
repeatex = #{repeatex |> to_str}
Repeatex.description(repeatex)
# => #{Repeatex.description(repeatex) |> to_str}
```
"""
end
def today do
{date, _} = :calendar.local_time
date
end
def to_str(item, opts \\ []) do
opts = struct(Inspect.Opts, opts)
Inspect.Algebra.format(Inspect.Algebra.to_doc(item, opts), 100)
|> to_string
end
def pretty_format(item) do
Apex.Format.format(item, color: false)
|> String.replace("\n", "\n# ")
|> String.replace("Elixir.", "")
|> String.replace(~r/\[.\]\s/, "")
|> String.slice(0..-4)
end
end
|
lib/tasks/readme.ex
| 0.621885
| 0.67392
|
readme.ex
|
starcoder
|
defmodule ExlasticSearch.Repo do
@moduledoc """
API executor for elasticsearch. The generate pattern is to define a `ExlasticSearch.Model`
on an ecto model, then call any of these functions to manage the model.
To configure the url the repo points to, do:
```
config :exlasticsearch, ExlasticSearch.Repo,
url: "https://elasticsearch.url.io:9200"
"""
use Scrivener
use ExlasticSearch.Retry.Decorator
alias ExlasticSearch.{Indexable, Query, Aggregation, Response}
alias Elastix.{Index, Mapping, Document, Bulk, Search, HTTP}
require Logger
@chunk_size 2000
@type response :: {:ok, %HTTPoison.Response{}} | {:error, any}
@log_level Application.get_env(:exlasticsearch, __MODULE__, []) |> Keyword.get(:log_level, :debug)
@doc """
Creates an index as defined in `model`
"""
@spec create_index(atom) :: response
def create_index(model, index \\ :index) do
es_url()
|> Index.create(model.__es_index__(index), model.__es_settings__())
end
@doc """
Updates the index for `model`
"""
def update_index(model) do
url = es_url() <> "/#{model.__es_index__(:index)}/_settings"
HTTP.put(url, Poison.encode!(model.__es_settings__()))
end
@doc """
Close an index for `model`
"""
def close_index(model) do
url = es_url() <> "/#{model.__es_index__(:index)}/_close"
HTTP.post(url, "")
end
@doc """
open an index for `model`
"""
def open_index(model) do
url = es_url() <> "/#{model.__es_index__(:index)}/_open"
HTTP.post(url, "")
end
@doc """
Updates an index's mappings to the current definition in `model`
"""
@spec create_mapping(atom) :: response
def create_mapping(model, index \\ :index) do
es_url()
|> Mapping.put(model.__es_index__(index), model.__doc_type__(), model.__es_mappings__())
end
@doc """
Removes the index defined in `model`
"""
@spec delete_index(atom) :: response
def delete_index(model, index \\ :index) do
es_url()
|> Index.delete(model.__es_index__(index))
end
@doc """
Aliases one index version to another, for instance:
```
alias(MyModel, read: :index)
```
will create an alias of the read version of the model's index
against it's indexing version
"""
@spec create_alias(atom, [{atom, atom}]) :: response
def create_alias(model, [{from, target}]) do
url = "#{es_url()}/_aliases"
from_index = model.__es_index__(from)
target_index = model.__es_index__(target)
json = Poison.encode!(%{
actions: [
%{
add: %{
index: from_index,
alias: target_index
},
}
]
})
HTTP.post(url, json)
end
@doc """
Deletes the read index and aliases the write index to it
"""
@spec rotate(atom) :: response
def rotate(model) do
with false <- model.__es_index__(:read) == model.__es_index__(:index),
_result <- delete_index(model, :read),
do: create_alias(model, index: :read)
end
@doc """
Retries the aliases for a given index
"""
@spec get_alias(atom, atom) :: response
def get_alias(model, index) when is_atom(index) do
index_name = model.__es_index__(index)
url = "#{es_url()}/#{index_name}/_alias/*"
HTTP.get(url)
end
@doc """
Checks if the index for `model` exists
"""
@spec exists?(atom) :: boolean
def exists?(model, index \\ :read) do
es_url()
|> Index.exists?(model.__es_index__(index))
|> case do
{:ok, result} -> result
_ -> false
end
end
@doc """
Refreshes `model`'s index
"""
def refresh(model) do
es_url()
|> Index.refresh(model.__es_index__())
end
@doc """
Adds a struct into it's associated index. The struct will be passed through the `ExlasticSearch.Indexable`
protocol prior to insertion
"""
@spec index(struct) :: response
@decorate retry()
def index(%{__struct__: model} = struct) do
id = Indexable.id(struct)
document = build_document(struct)
es_url()
|> Document.index(model.__es_index__(:index), model.__doc_type__(), id, document)
|> log_response()
|> mark_failure()
end
@doc """
Gets an ES document by _id
"""
@spec get(struct) :: response
def get(%{__struct__: model} = struct, index_type \\ :read) do
es_url()
|> Document.get(model.__es_index__(index_type), model.__doc_type__(), Indexable.id(struct))
|> log_response()
|> decode(Response.Record, model)
end
@doc """
Creates a call to `search/3` by realizing `query` (using `Exlasticsearch.Query.realize/1`) and any provided search opts
"""
@spec search(Query.t, list) :: response
def search(%Query{queryable: model} = query, params),
do: search(model, Query.realize(query), params, query.index_type || :read)
@doc """
Searches the index and type associated with `model` according to query `search`
"""
@spec search(atom, map, list) :: response
def search(model, search, params, index_type \\ :read) do
es_url()
|> Search.search(model.__es_index__(index_type), [model.__doc_type__()], search, params)
|> log_response()
|> decode(Response.Search, model)
end
@doc """
Performs an aggregation against a query, and returns only the aggregation results.
"""
def aggregate(%Query{queryable: model} = query, %Aggregation{} = aggregation) do
search =
Query.realize(query)
|> Map.merge(Aggregation.realize(aggregation))
index_type = query.index_type || :read
es_url()
|> Search.search(model.__es_index__(index_type), [model.__doc_type__()], search, size: 0)
|> log_response() # TODO: figure out how to decode these, it's not trivial to type them
end
@doc """
Removes `struct` from the index of its model
"""
@spec delete(struct) :: response
@decorate retry()
def delete(%{__struct__: model} = struct) do
es_url()
|> Document.delete(model.__es_index__(:index), model.__doc_type__(), Indexable.id(struct))
|> log_response()
|> mark_failure()
end
@doc """
Generates an Elasticsearch bulk request. `operations` should be of the form:
```
[
{:index, struct},
{:delete, other_struct},
{:update, third_struct}
]
```
The function will handle formatting the bulk request properly and passing each
struct to the `ExlasticSearch.Indexable` protocol
"""
def bulk(operations, opts \\ []) do
bulk_request = operations
|> Enum.map(&bulk_operation/1)
|> Enum.concat()
es_url()
|> Bulk.post(bulk_request, [], opts)
|> log_response()
|> mark_failure()
end
def index_stream(stream, parallelism \\ 10, demand \\ 10) do
stream
|> Stream.chunk_every(@chunk_size)
|> Flow.from_enumerable(stages: parallelism, max_demand: demand)
|> Flow.map(&insert_chunk/1)
end
defp insert_chunk(chunk) do
chunk
|> Enum.map(& {:index, &1})
|> bulk()
length(chunk)
end
defp log_response(response) do
Logger.log(@log_level, fn -> "Elasticsearch response: #{inspect(response)}" end)
response
end
defp bulk_operation({:delete, %{__struct__: model} = struct}),
do: [%{delete: %{_id: Indexable.id(struct), _index: model.__es_index__(:index), _type: model.__doc_type__()}}]
defp bulk_operation({op_type, %{__struct__: model} = struct}) do
[
%{op_type => %{_id: Indexable.id(struct), _index: model.__es_index__(:index), _type: model.__doc_type__()}},
build_document(struct)
]
end
defp build_document(struct), do: struct |> Indexable.preload() |> Indexable.document()
defp es_url(), do: Application.get_env(:exlasticsearch, __MODULE__)[:url]
defp decode({:ok, %HTTPoison.Response{body: body}}, response, model) do
case response.parse(body, model) do
nil -> {:error, :not_found}
result -> {:ok, result}
end
end
defp decode(response, _, _), do: response
defp mark_failure({:ok, %HTTPoison.Response{body: %{"_shards" => %{"successful" => 0}}} = result}), do: {:error, result}
defp mark_failure({:ok, %HTTPoison.Response{body: %{"errors" => true}} = result}), do: {:error, result}
defp mark_failure(result), do: result
end
|
lib/exlasticsearch/repo.ex
| 0.839865
| 0.73954
|
repo.ex
|
starcoder
|
defmodule SimpleMarkdownExtensionBlueprint do
@moduledoc """
Adds syntax for issuing a blueprint command and embedding
the resulting SVG.
The command takes the form of `@blueprint[]` or `@blueprint()`.
Where inside the brackets are the arguments that can be
passed to a `blueprint` escript. The `@blueprint` prefix may
optionally be followed by options separated by a `-` to
customize how it should be added to the page.
These options are:
* Overriding the width of the element that comes after it,
by providing a `w` before the literal to be used for the
width (where no literal means it will use the default
pixel width). e.g. If it comes before the `[]` it will
affect the width of the SVG element, whereas if it comes
before an `embed` option it will affect the width of the
container.
* Overriding the height of the element that comes after it,
by providing a `h` before the literal to be used for the
height (where no literal means it will use the default
pixel height). e.g. If it comes before the `[]` it will
affect the height of the SVG element, whereas if it comes
before an `embed` option it will affect the height of the
container.
* Place the SVG in a scrollable container, by providing
the `embed` option.
## Example
@blueprint[plot app --messages --colour]
Which produces:
@blueprint[plot app --messages --colour]
## Fixed size example
@blueprint-w300px-h50px[plot app --messages --colour]
Which produces:
@blueprint-w300px-h50px[plot app --messages --colour]
## Relative size example
@blueprint-w50%-h50%[plot app --messages --colour]
Which produces:
@blueprint-w50%-h50%[plot app --messages --colour]
## Embed example
@blueprint-embed[plot app --messages --colour]
Which produces:
@blueprint-embed[plot app --messages --colour]
## Fixed size embed with relative size example
@blueprint-w100px-h50px-embed-w500%-h500%[plot app --messages --colour]
Which produces:
@blueprint-w300px-h150px-embed-w1000%-h1000%[plot app --messages --colour]
"""
defstruct [command: nil, width: "100%", height: "100%", embed: false, embed_width: "100%", embed_height: "100%"]
@doc """
The rule for matching blueprint commands.
"""
@spec rule() :: Parsey.rule
def rule() do
{
:blueprint,
%{
match: ~r/\A[[:blank:]]*?@blueprint(-[^\[\(]+)*?[\[\(](.*?)[\]\)]/,
capture: 0,
option: fn input, [_, { attr_index, attr_length }, { index, length }] ->
opt = case String.split(binary_part(input, index, length), " ", trim: true) do
["plot", graph|args] -> %SimpleMarkdownExtensionBlueprint{ command: { Module.safe_concat(Mix.Tasks.Blueprint.Plot, String.to_atom(String.capitalize(graph))), :run, [args] } }
end
if(attr_index > 0, do: String.split(binary_part(input, attr_index, attr_length), "-", trim: true), else: [])
|> Enum.reduce(opt, fn
"w" <> width, opt -> %{ opt | width: width }
"h" <> height, opt -> %{ opt | height: height }
"embed", opt -> %SimpleMarkdownExtensionBlueprint{ command: opt.command, embed: true, embed_width: opt.width, embed_height: opt.height }
end)
end,
rules: []
}
}
end
@doc """
Insert the blueprint command rule in a appropriate place
in the rule parser.
"""
@spec add_rule([Parsey.rule]) :: [Parsey.rule]
def add_rule(rules), do: rules ++ [rule()]
defimpl SimpleMarkdown.Renderer.HTML, for: SimpleMarkdown.Attribute.Blueprint do
def render(%{ option: opts = %SimpleMarkdownExtensionBlueprint{ command: { module, fun, [args] } } }) do
name = ".simple_markdown_extension_blueprint.dot"
:ok = apply(module, fun, [["-o", name|args]])
{ svg, 0 } = System.cmd("dot", ["-Tsvg", name])
File.rm!(name)
String.replace(svg, ~r/\A(.|\n)*?(?=<svg)/m, "", global: false)
|> String.trim()
|> set_attribute("width", opts.width)
|> set_attribute("height", opts.height)
|> set_view(opts.embed, opts.embed_width, opts.embed_height)
end
defp set_attribute(svg, _, ""), do: svg
defp set_attribute(svg, attr, value), do: String.replace(svg, ~r/\A(.*?)#{attr}=".*?"/, "\\1#{attr}=\"#{value}\"", global: false)
defp set_view(svg, false, width, height), do: svg
defp set_view(svg, true, width, height), do: "<iframe srcdoc='#{svg}' width='#{width}' height='#{height}'></iframe>"
end
end
|
lib/simple_markdown_extension_blueprint.ex
| 0.864754
| 0.528047
|
simple_markdown_extension_blueprint.ex
|
starcoder
|
defmodule Rondo.Tree do
defstruct [:descriptor, :root, :children, :actions]
alias Rondo.Path
alias Rondo.Component.Pointer
alias Rondo.Store.Reference
def init(nil, descriptor, component_path, state, store) do
tree = %__MODULE__{children: %{}, actions: MapSet.new()}
init(tree, descriptor, component_path, state, store)
end
def init(tree = %{descriptor: descriptor, actions: actions}, descriptor, component_path, state, store) do
store = Enum.reduce(actions, store, fn(action, store) ->
{_, store} = put_action(action, component_path, store, state)
store
end)
{tree, store}
end
def init(tree, descriptor, component_path, state, store) do
{root, {children, actions, store}} = traverse(descriptor, component_path, state, store)
tree = %{tree | descriptor: descriptor, root: root, children: children, actions: actions}
{tree, store}
end
def traverse(descriptor, component_path, state, store) do
acc = {%{}, MapSet.new(), store}
Rondo.Traverser.postwalk(descriptor, [], acc, fn
(%Rondo.Element{type: type, props: props, children: c} = el, path, {children, actions, store}) when not is_binary(type) ->
path = Path.create_child_path(component_path, path)
children = Map.put(children, path, el)
{%Pointer{type: type, props: props, children: c, path: path}, {children, actions, store}}
(%Reference{} = ref, _path, acc) ->
ref = resolve(ref, state.children, component_path)
{ref, acc}
(%Rondo.Action{reference: nil}, _path, acc) ->
{nil, acc}
(%Rondo.Action{} = action, _path, {children, actions, store}) ->
{instance, store} = put_action(action, component_path, store, state)
actions = MapSet.put(actions, action)
{instance, {children, actions, store}}
(%Rondo.Stream.Subscription{} = sub, _path, {children, actions, store}) ->
{sub, {children, actions, store}}
(node, _, acc) ->
{node, acc}
end)
end
defp put_action(action = %{reference: reference, events: events}, component_path, store, %{children: children}) do
case resolve(reference, children, component_path) do
nil ->
{nil, store}
descriptor ->
events = resolve_events(events, component_path, children, [])
action = %{action | reference: descriptor,
events: events}
Rondo.Action.Store.put(store, action)
end
end
defp resolve_events([], _, _, acc) do
:lists.reverse(acc)
end
defp resolve_events([event = %{reference: ref} | events], component_path, children, acc) do
case resolve(ref, children, component_path) do
nil ->
resolve_events(events, component_path, children, acc)
descriptor ->
event = %{event | reference: descriptor}
resolve_events(events, component_path, children, [event | acc])
end
end
defp resolve(%Reference{} = reference, children, component_path) do
case Reference.resolve(reference, children) do
:error ->
raise Reference.Error, reference: reference, component_path: component_path
{:ok, nil} ->
nil
{:ok, descriptor} ->
descriptor
end
end
defp resolve(%Rondo.Store{} = store, _, _) do
store
end
end
|
lib/rondo/tree.ex
| 0.675872
| 0.567967
|
tree.ex
|
starcoder
|
defmodule AWS.RUM do
@moduledoc """
With Amazon CloudWatch RUM, you can perform real-user monitoring to collect
client-side data about your web application performance from actual user
sessions in real time.
The data collected includes page load times, client-side errors, and user
behavior. When you view this data, you can see it all aggregated together and
also see breakdowns by the browsers and devices that your customers use.
` You can use the collected data to quickly identify and debug client-side
performance issues. CloudWatch RUM helps you visualize anomalies in your
application performance and find relevant debugging data such as error messages,
stack traces, and user sessions. You can also use RUM to understand the range of
end-user impact including the number of users, geolocations, and browsers used.
`
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2018-05-10",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "rum",
global?: false,
protocol: "rest-json",
service_id: "RUM",
signature_version: "v4",
signing_name: "rum",
target_prefix: nil
}
end
@doc """
Creates a Amazon CloudWatch RUM app monitor, which collects telemetry data from
your application and sends that data to RUM.
The data includes performance and reliability information such as page load
time, client-side errors, and user behavior.
You use this operation only to create a new app monitor. To update an existing
app monitor, use
[UpdateAppMonitor](https://docs.aws.amazon.com/cloudwatchrum/latest/APIReference/API_UpdateAppMonitor.html) instead.
After you create an app monitor, sign in to the CloudWatch RUM console to get
the JavaScript code snippet to add to your web application. For more
information, see [How do I find a code snippet that I've already
generated?](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-RUM-find-code-snippet.html)
"""
def create_app_monitor(%Client{} = client, input, options \\ []) do
url_path = "/appmonitor"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Deletes an existing app monitor.
This immediately stops the collection of data.
"""
def delete_app_monitor(%Client{} = client, name, input, options \\ []) do
url_path = "/appmonitor/#{AWS.Util.encode_uri(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Retrieves the complete configuration information for one app monitor.
"""
def get_app_monitor(%Client{} = client, name, options \\ []) do
url_path = "/appmonitor/#{AWS.Util.encode_uri(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieves the raw performance events that RUM has collected from your web
application, so that you can do your own processing or analysis of this data.
"""
def get_app_monitor_data(%Client{} = client, name, input, options \\ []) do
url_path = "/appmonitor/#{AWS.Util.encode_uri(name)}/data"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns a list of the Amazon CloudWatch RUM app monitors in the account.
"""
def list_app_monitors(%Client{} = client, input, options \\ []) do
url_path = "/appmonitors"
headers = []
{query_params, input} =
[
{"MaxResults", "maxResults"},
{"NextToken", "nextToken"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Displays the tags associated with a CloudWatch RUM resource.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Sends telemetry events about your application performance and user behavior to
CloudWatch RUM.
The code snippet that RUM generates for you to add to your application includes
`PutRumEvents` operations to send this data to RUM.
Each `PutRumEvents` operation can send a batch of events from one user session.
"""
def put_rum_events(%Client{} = client, id, input, options \\ []) do
url_path = "/appmonitors/#{AWS.Util.encode_uri(id)}/"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Assigns one or more tags (key-value pairs) to the specified CloudWatch RUM
resource.
Currently, the only resources that can be tagged app monitors.
Tags can help you organize and categorize your resources. You can also use them
to scope user permissions by granting a user permission to access or change only
resources with certain tag values.
Tags don't have any semantic meaning to Amazon Web Services and are interpreted
strictly as strings of characters.
You can use the `TagResource` action with a resource that already has tags. If
you specify a new tag key for the resource, this tag is appended to the list of
tags associated with the alarm. If you specify a tag key that is already
associated with the resource, the new tag value that you specify replaces the
previous value for that tag.
You can associate as many as 50 tags with a resource.
For more information, see [Tagging Amazon Web Services resources](https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html).
"""
def tag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Removes one or more tags from the specified resource.
"""
def untag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
{query_params, input} =
[
{"TagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Updates the configuration of an existing app monitor.
When you use this operation, only the parts of the app monitor configuration
that you specify in this operation are changed. For any parameters that you
omit, the existing values are kept.
You can't use this operation to change the tags of an existing app monitor. To
change the tags of an existing app monitor, use
[TagResource](https://docs.aws.amazon.com/cloudwatchrum/latest/APIReference/API_TagResource.html). To create a new app monitor, use
[CreateAppMonitor](https://docs.aws.amazon.com/cloudwatchrum/latest/APIReference/API_CreateAppMonitor.html).
After you update an app monitor, sign in to the CloudWatch RUM console to get
the updated JavaScript code snippet to add to your web application. For more
information, see [How do I find a code snippet that I've already generated?](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-RUM-find-code-snippet.html)
"""
def update_app_monitor(%Client{} = client, name, input, options \\ []) do
url_path = "/appmonitor/#{AWS.Util.encode_uri(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
200
)
end
end
|
lib/aws/generated/rum.ex
| 0.788176
| 0.450118
|
rum.ex
|
starcoder
|
defmodule ICouch.Changes do
@moduledoc """
Module to handle changes feeds in CouchDB.
Changes structs should not be created or manipulated directly, please use
`ICouch.open_changes/2`.
Similar to a view, a changes feed can be in a "fetched" state or in an
"unfetched" state which can be tested with the `fetched?/1` function and
changed with the `fetch/1`, `fetch!/1` and `unfetch/1` function. In contrast
to a view, the sequence number is updated on each fetch so a consecutive
fetch will start off at the last sequence number.
The changes struct implements the enumerable protocol for easy handling with
Elixir's `Enum` module - however, this only works with fetched changes and
will fail with an `ArgumentError` otherwise.
"""
use ICouch.RequestError
defstruct [:db, :last_seq, :params, :results]
@type t :: %__MODULE__{
db: ICouch.DB.t,
last_seq: String.t | integer | nil,
params: map,
results: [map] | nil
}
@type changes_option_key :: :doc_ids | :conflicts | :descending |
:filter | :include_docs | :attachments | :att_encoding_info | :limit |
:since | :style | :view
@type changes_option_value :: boolean | String.t | integer | [String.t] |
:main_only | :all_docs
@doc """
Fetches all results of `changes`, turning it into a "fetched changes feed".
The last sequence number will be set and used as next "since" parameter.
"""
@spec fetch(changes :: t) :: {:ok, t} | {:error, term}
def fetch(%__MODULE__{params: params} = changes) do
case send_req(changes) do
{:ok, %{"results" => results, "last_seq" => last_seq}} ->
if params[:include_docs] do
{:ok, %{changes | last_seq: last_seq, results: Enum.map(results, fn
%{"doc" => doc} = row when doc != nil ->
%{row | "doc" => ICouch.Document.from_api!(doc)}
other ->
other
end)}}
else
{:ok, %{changes | last_seq: last_seq, results: results}}
end
{:ok, _} ->
{:error, :invalid_response}
other ->
other
end
end
@doc """
Same as `fetch/1` but returns the fetched changes feed directly on success or
raises an error on failure.
"""
@spec fetch!(changes :: t) :: t
def fetch!(changes),
do: req_result_or_raise! fetch(changes)
@doc """
Resets `changes` back to the "unfetched" state.
This will also reset the `last_seq` to `nil`.
"""
@spec unfetch(changes :: t) :: t
def unfetch(%__MODULE__{} = changes),
do: %{changes | last_seq: nil, results: nil}
@doc """
Tests whether `changes` is in "fetched" state or not.
"""
@spec fetched?(changes :: t) :: boolean
def fetched?(%__MODULE__{results: results}) when is_list(results),
do: true
def fetched?(%__MODULE__{}),
do: false
@doc """
Replaces `changes`'s options with the given ones.
This set the changes feed back to the "unfetched" state, but leaves the
`last_seq` value untouched unless `since` is given as option.
Note that when setting the `doc_ids` option, any given `filter` option will be
ignored while fetching changes.
"""
@spec set_options(changes :: t, options :: [ICouch.open_changes_option]) :: t
def set_options(%__MODULE__{} = changes, options) do
case Map.new(options) do
%{feed: _} ->
raise ArgumentError, message: "the \"feed\" option is not allowed here"
options ->
case Map.pop(options, :since) do
{nil, options} -> %{changes | params: options, results: nil}
{since, options} -> %{changes | last_seq: since, params: options, results: nil}
end
end
end
@doc """
Adds or updates a single option in `changes`.
This will also set the changes feed back to the "unfetched" state. To modify
the `last_seq` value, set the `since` option.
Note that when setting the `doc_ids` option, any given `filter` option will be
ignored while fetching changes.
"""
@spec put_option(changes :: t, key :: changes_option_key, value :: changes_option_value) :: t
def put_option(%__MODULE__{}, :feed, _),
do: raise ArgumentError, message: "the \"feed\" option is not allowed here"
def put_option(%__MODULE__{params: params} = changes, key, value),
do: %{changes | params: Map.put(params, key, value), results: nil}
@doc """
Deletes an option in `changes`.
This will also set the changes feed back to the "unfetched" state.
Returns `changes` unchanged if the option was not set (and it already was
"unfetched").
"""
@spec delete_option(changes :: t, key :: changes_option_key) :: t
def delete_option(%__MODULE__{params: params, results: results} = changes, key) do
if not Map.has_key?(params, key) and results == nil do
changes
else
%{changes | params: Map.delete(params, key), results: nil}
end
end
@doc """
Returns the value of an option in `changes` or `nil` if it was not set.
The `last_seq` value can be retrieved with the `since` option.
"""
@spec get_option(changes :: t, key :: changes_option_key) :: changes_option_value | nil
def get_option(%__MODULE__{last_seq: last_seq}, :since),
do: last_seq
def get_option(%__MODULE__{params: params}, key),
do: Map.get(params, key)
@doc """
Internal function to build a db endpoint.
"""
@spec db_endpoint(changes :: t) :: {String.t, map}
def db_endpoint(%__MODULE__{last_seq: last_seq, params: params}),
do: db_endpoint(last_seq, params)
defp send_req(%__MODULE__{db: db, last_seq: last_seq, params: %{doc_ids: doc_ids} = params}),
do: ICouch.DB.send_req(db, db_endpoint(last_seq, Map.delete(params, :doc_ids) |> Map.put(:filter, "_doc_ids")), :post, %{"doc_ids" => doc_ids})
defp send_req(%__MODULE__{db: db, last_seq: last_seq, params: params}),
do: ICouch.DB.send_req(db, db_endpoint(last_seq, params))
defp db_endpoint(nil, params),
do: {"_changes", params}
defp db_endpoint(last_seq, params),
do: {"_changes", Map.put(params, :since, last_seq)}
end
defimpl Enumerable, for: ICouch.Changes do
def count(%ICouch.Changes{results: nil}),
do: raise ArgumentError, message: "changes feed not fetched"
def count(%ICouch.Changes{results: results}),
do: {:ok, length(results)}
def member?(_changes, _element),
do: {:error, __MODULE__}
def slice(_),
do: {:error, __MODULE__}
def reduce(_, {:halt, acc}, _fun),
do: {:halted, acc}
def reduce(%ICouch.Changes{results: rest_results}, {:suspend, acc}, fun),
do: {:suspended, acc, &reduce(rest_results, &1, fun)}
def reduce(%ICouch.Changes{results: []}, {:cont, acc}, _fun),
do: {:done, acc}
def reduce(%ICouch.Changes{results: [h | t]} = changes, {:cont, acc}, fun),
do: reduce(%{changes | results: t}, fun.(h, acc), fun)
def reduce(%ICouch.Changes{results: nil}, _, _),
do: raise ArgumentError, message: "changes feed not fetched"
end
|
lib/icouch/changes.ex
| 0.864253
| 0.469155
|
changes.ex
|
starcoder
|
defmodule LevelWeb.Schema.InputObjects do
@moduledoc false
use Absinthe.Schema.Notation
@desc "The field and direction to sort users."
input_object :user_order do
@desc "The field by which to sort."
field :field, non_null(:user_order_field)
@desc "The sort direction."
field :direction, non_null(:order_direction)
end
@desc "The field and direction to sort users."
input_object :space_order do
@desc "The field by which to sort."
field :field, non_null(:space_order_field)
@desc "The sort direction."
field :direction, non_null(:order_direction)
end
@desc "The field and direction to sort space users."
input_object :space_user_order do
@desc "The field by which to sort."
field :field, non_null(:space_user_order_field)
@desc "The sort direction."
field :direction, non_null(:order_direction)
end
@desc "The field and direction to sort groups."
input_object :group_order do
@desc "The field by which to sort."
field :field, non_null(:group_order_field)
@desc "The sort direction."
field :direction, non_null(:order_direction)
end
@desc "The field and direction to sort posts."
input_object :post_order do
@desc "The field by which to sort."
field :field, non_null(:post_order_field), default_value: :posted_at
@desc "The sort direction."
field :direction, non_null(:order_direction), default_value: :desc
end
@desc "The field and direction to sort replies."
input_object :reply_order do
@desc "The field by which to sort."
field :field, non_null(:reply_order_field)
@desc "The sort direction."
field :direction, non_null(:order_direction)
end
@desc "The field and direction to sort reactions."
input_object :reaction_order do
@desc "The field by which to sort."
field :field, non_null(:reaction_order_field)
@desc "The sort direction."
field :direction, non_null(:order_direction)
end
@desc "The field and direction to sort notifications."
input_object :notification_order do
@desc "The field by which to sort."
field :field, non_null(:notification_order_field)
@desc "The sort direction."
field :direction, non_null(:order_direction)
end
@desc "Filtering criteria for post connections."
input_object :post_filters do
@desc """
Filter by whether the post is being followed by the user. A user is considered
to be "following" a post if they are explicitly subscribed to it, or if the
post was created in a group that the user belongs to.
"""
field :following_state, :following_state_filter, default_value: :all
@desc """
Filter by the different inbox states.
"""
field :inbox_state, :inbox_state_filter, default_value: :all
@desc """
Filter by the different post states.
"""
field :state, :post_state_filter, default_value: :all
@desc """
Filter by last activity.
"""
field :last_activity, :last_activity_filter, default_value: :all
@desc """
Filter by privacy.
"""
field :privacy, :privacy_filter, default_value: :all
@desc """
Filter by author handle.
"""
field :author, :string
@desc """
Filter by recipients.
"""
field :recipients, list_of(:string), default_value: []
end
@desc "Filtering criteria for notification connections."
input_object :notification_filters do
field :state, :notification_state_filter, default_value: :all
end
end
|
lib/level_web/schema/input_objects.ex
| 0.811601
| 0.47859
|
input_objects.ex
|
starcoder
|
defmodule Easing do
@moduledoc """
Easing function calculations
Cribbed from: https://easings.net/
"""
alias Easing.Range
@type easing_tuple :: {atom(), atom()}
@type range :: %Easing.Range{first: number(), last: number(), step: number()}
@type easing_function :: function()
@type easing_function_or_tuple :: easing_function() | easing_tuple()
@type easings :: [float()]
@spec to_list(range(), easing_function_or_tuple()) :: easings()
@doc """
Generates a list of animation frame values.
A `Range` is used for the `range` argument. See the example
## Examples:
iex> Easing.to_list(%Easing.Range{first: 0, last: 1, step: 0.1}, &Easing.sine_in(&1))
[0.0, 0.01231165940486223, 0.04894348370484647, 0.10899347581163221, 0.19098300562505255, 0.2928932188134524, 0.41221474770752675, 0.5460095002604533, 0.6909830056250525, 0.8435655349597688, 1.0]
iex> Easing.to_list(%Easing.Range{first: 0, last: 0.5, step: 0.1}, {:bounce, :in_out})
[0.0, 0.030000000000000027, 0.11375000000000002, 0.04499999999999993, 0.3487500000000001, 0.5]
"""
def to_list(%Range{} = range, easing) do
range
|> stream(easing)
|> Enum.to_list()
end
@spec stream(range(), easing_function_or_tuple()) :: Enumerable.t()
@doc """
Generates a stream of animation frame values.
A `Range` is used for the `range` argument. See the example
## Examples:
iex> Easing.stream(%Easing.Range{first: 0, last: 1, step: 0.1}, &Easing.sine_in(&1)) |> Enum.to_list()
[0.0, 0.01231165940486223, 0.04894348370484647, 0.10899347581163221, 0.19098300562505255, 0.2928932188134524, 0.41221474770752675, 0.5460095002604533, 0.6909830056250525, 0.8435655349597688, 1.0]
iex> Easing.stream(%Easing.Range{first: 0, last: 0.5, step: 0.1}, {:bounce, :in_out}) |> Enum.to_list()
[0.0, 0.030000000000000027, 0.11375000000000002, 0.04499999999999993, 0.3487500000000001, 0.5]
"""
def stream(%Range{} = range, easing_function) when is_function(easing_function) do
Stream.map(range, &easing_function.(&1))
end
def stream(%Range{} = range, easing_tuple) when is_tuple(easing_tuple) do
Stream.map(range, &run(easing_tuple, &1))
end
@spec linear_in(float()) :: float()
@doc """
Linear in easing function
## Example
iex> Easing.linear_in(0.1)
0.1
"""
def linear_in(progress), do: run({:linear, :in}, progress)
@spec linear_out(float()) ::float()
@doc """
Linear out easing function
## Example
iex> Easing.linear_out(0.1)
0.1
"""
def linear_out(progress), do: run({:linear, :out}, progress)
@spec linear_in_out(float()) :: float()
@doc """
Linear in-out easing function
## Example
iex> Easing.linear_in_out(0.1)
0.1
"""
def linear_in_out(progress), do: run({:linear, :in_out}, progress)
@spec sine_in(float()) :: float()
@doc """
Sine in easing function

## Example
iex> Easing.sine_in(0.1)
0.01231165940486223
"""
def sine_in(progress), do: run({:sine, :in}, progress)
@spec sine_out(float()) :: float()
@doc """
Sine out easing function

## Example
iex> Easing.sine_out(0.1)
0.15643446504023087
"""
def sine_out(progress), do: run({:sine, :out}, progress)
@spec sine_in_out(float()) :: float()
@doc """
Sine in-out easing function

## Example
iex> Easing.sine_in_out(0.1)
0.024471741852423234
"""
def sine_in_out(progress), do: run({:sine, :in_out}, progress)
@spec quadratic_in(float()) :: float()
@doc """
Quadratic in easing function

## Example
iex> Easing.quadratic_in(0.1)
0.010000000000000002
"""
def quadratic_in(progress), do: run({:quadratic, :in}, progress)
@spec quadratic_out(float()) :: float()
@doc """
Quadratic out easing function

## Example
iex> Easing.quadratic_out(0.1)
0.18999999999999995
"""
def quadratic_out(progress), do: run({:quadratic, :out}, progress)
@spec quadratic_in_out(float()) :: float()
@doc """
Quadratic in-out easing function

## Example
iex> Easing.quadratic_in_out(0.1)
0.020000000000000004
"""
def quadratic_in_out(progress), do: run({:quadratic, :in_out}, progress)
@spec cubic_in(float()) :: float()
@doc """
Cubic in easing function

## Example
iex> Easing.cubic_in(0.1)
0.0010000000000000002
"""
def cubic_in(progress), do: run({:cubic, :in}, progress)
@spec cubic_out(float()) :: float()
@doc """
Cubic out easing function

## Example
iex> Easing.cubic_out(0.1)
0.2709999999999999
"""
def cubic_out(progress), do: run({:cubic, :out}, progress)
@spec cubic_in_out(float()) :: float()
@doc """
Cubic in-out easing function

## Example
iex> Easing.cubic_in_out(0.1)
0.004000000000000001
"""
def cubic_in_out(progress), do: run({:cubic, :in_out}, progress)
@spec quartic_in(float()) :: float()
@doc """
Quartic in easing function

## Example
iex> Easing.quartic_in(0.1)
1.0000000000000002e-4
"""
def quartic_in(progress), do: run({:quartic, :in}, progress)
@spec quartic_out(float()) :: float()
@doc """
Quartic out easing function

## Example
iex> Easing.quartic_out(0.1)
0.3439
"""
def quartic_out(progress), do: run({:quartic, :out}, progress)
@spec quartic_in_out(float()) :: float()
@doc """
Quartic in-out easing function

## Example
iex> Easing.quartic_in_out(0.1)
8.000000000000001e-4
"""
def quartic_in_out(progress), do: run({:quartic, :in_out}, progress)
@spec quintic_in(float()) :: float()
@doc """
Quintic in easing function

## Example
iex> Easing.quintic_in(0.1)
1.0000000000000003e-5
"""
def quintic_in(progress), do: run({:quintic, :in}, progress)
@spec quintic_out(float()) :: float()
@doc """
Quintic out easing function

## Example
iex> Easing.quintic_out(0.1)
0.40950999999999993
"""
def quintic_out(progress), do: run({:quintic, :out}, progress)
@spec quintic_in_out(float()) :: float()
@doc """
Quintic in-out easing function

## Example
iex> Easing.quintic_in_out(0.1)
1.6000000000000004e-4
"""
def quintic_in_out(progress), do: run({:quintic, :in_out}, progress)
@spec exponential_in(float()) :: float()
@doc """
Exponential in easing function

## Example
iex> Easing.exponential_in(0.1)
0.001953125
"""
def exponential_in(progress), do: run({:exponential, :in}, progress)
@spec exponential_out(float()) :: float()
@doc """
Exponential out easing function

## Example
iex> Easing.exponential_out(0.1)
0.5
"""
def exponential_out(progress), do: run({:exponential, :out}, progress)
@spec exponential_in_out(float()) :: float()
@doc """
Exponential in-out easing function

## Example
iex> Easing.exponential_in_out(0.1)
0.001953125
"""
def exponential_in_out(progress), do: run({:exponential, :in_out}, progress)
@spec circular_in(float()) :: float()
@doc """
Circular in easing function

## Example
iex> Easing.circular_in(0.1)
0.005012562893380035
"""
def circular_in(progress), do: run({:circular, :in}, progress)
@spec circular_out(float()) :: float()
@doc """
Circular out easing function

## Example
iex> Easing.circular_out(0.1)
0.4358898943540673
"""
def circular_out(progress), do: run({:circular, :out}, progress)
@spec circular_in_out(float()) :: float()
@doc """
Circular in-out easing function

## Example
iex> Easing.circular_in_out(0.1)
0.010102051443364402
"""
def circular_in_out(progress), do: run({:circular, :in_out}, progress)
@spec back_in(float()) :: float()
@doc """
Back in easing function

## Example
iex> Easing.back_in(0.1)
-0.014314220000000004
"""
def back_in(progress), do: run({:back, :in}, progress)
@spec back_out(float()) :: float()
@doc """
Back out easing function

## Example
iex> Easing.back_out(0.1)
0.40882797999999987
"""
def back_out(progress), do: run({:back, :out}, progress)
@spec back_in_out(float()) :: float()
@doc """
Back in-out easing function

## Example
iex> Easing.back_in_out(0.1)
-0.037518552000000004
"""
def back_in_out(progress), do: run({:back, :in_out}, progress)
@spec elastic_in(float()) :: float()
@doc """
Elastic in easing function

## Example
iex> Easing.elastic_in(0.1)
0.001953125
"""
def elastic_in(progress), do: run({:elastic, :in}, progress)
@spec elastic_out(float()) :: float()
@doc """
Elastic out easing function

## Example
iex> Easing.elastic_out(0.1)
1.25
"""
def elastic_out(progress), do: run({:elastic, :out}, progress)
@spec elastic_in_out(float()) :: float()
@doc """
Elastic in-out easing function

## Example
iex> Easing.elastic_in_out(0.1)
3.39156597005722e-4
"""
def elastic_in_out(progress), do: run({:elastic, :in_out}, progress)
@spec bounce_in(float()) :: float()
@doc """
Bounce in easing function

## Example
iex> Easing.bounce_in(0.1)
0.01187500000000008
"""
def bounce_in(progress), do: run({:bounce, :in}, progress)
@spec bounce_out(float()) :: float()
@doc """
Bounce out easing function

## Example
iex> Easing.bounce_out(0.1)
0.07562500000000001
"""
def bounce_out(progress), do: run({:bounce, :out}, progress)
@spec bounce_in_out(float()) :: float()
@doc """
Bounce in-out easing function

## Example
iex> Easing.bounce_in_out(0.1)
0.030000000000000027
"""
def bounce_in_out(progress), do: run({:bounce, :in_out}, progress)
@spec run(easing_function_or_tuple(), float()) :: float()
@doc """
Easing calculation. Take a tupal of atoms `{direction, type}` and the progress is a value
between 0 - 1 that represents the animation progress. (0 = beginning, 1 = end)
* directions: `:in`, `:out`, and `:in_out`
* types: `:sine`, `:quadratic`, `:cubic`, `:quartic`, `:quintic`, :`exponential`, `:circular`, `:back`, `:elastic`, `:bounce`
* progress: value between `0` and `1` that represents the % of the animation state.
* options: keyword list
- round: `true` - will round the result up with a precision of 2
"""
def run(easing_function, progress) do
cond do
progress == 0 -> 0.0
progress == 1 -> 1.0
true -> do_run(easing_function, progress)
end
end
# Linear
defp do_run({:linear, _direction}, progress), do: progress
# Sine
defp do_run({:sine, :in}, progress) do
1 - :math.cos((progress * :math.pi()) / 2)
end
defp do_run({:sine, :out}, progress) do
:math.sin((progress * :math.pi()) / 2)
end
defp do_run({:sine, :in_out}, progress) do
-1 * (:math.cos(:math.pi() * progress) - 1) / 2
end
# Quadratic
defp do_run({:quadratic, :in}, progress) do
:math.pow(progress, 2)
end
defp do_run({:quadratic, :out}, progress) do
1 - (1 - progress) * (1 - progress)
end
defp do_run({:quadratic, :in_out}, progress) do
if progress < 0.5 do
2 * :math.pow(progress, 2)
else
1 - :math.pow(-2 * progress + 2, 2) / 2
end
end
# Cubic
defp do_run({:cubic, :in}, progress) do
:math.pow(progress, 3)
end
defp do_run({:cubic, :out}, progress) do
1 - :math.pow(1 - progress, 3)
end
defp do_run({:cubic, :in_out}, progress) do
if progress < 0.5 do
4 * :math.pow(progress, 3)
else
1 - :math.pow(-2 * progress + 2, 3) / 2
end
end
# Quartic
defp do_run({:quartic, :in}, progress) do
:math.pow(progress, 4)
end
defp do_run({:quartic, :out}, progress) do
1 - :math.pow(1 - progress, 4)
end
defp do_run({:quartic, :in_out}, progress) do
if progress < 0.5 do
8 * :math.pow(progress, 4)
else
1 - :math.pow(-2 * progress + 2, 4) / 2
end
end
# Quintic
defp do_run({:quintic, :in}, progress) do
:math.pow(progress, 5)
end
defp do_run({:quintic, :out}, progress) do
1 - :math.pow(1 - progress, 5)
end
defp do_run({:quintic, :in_out}, progress) do
if progress < 0.5 do
16 * :math.pow(progress, 5)
else
1 - :math.pow(-2 * progress + 2, 5) / 2
end
end
# Exponential
defp do_run({:exponential, :in}, progress) do
:math.pow(2, 10 * progress - 10)
end
defp do_run({:exponential, :out}, progress) do
1 - :math.pow(2, -10 * progress)
end
defp do_run({:exponential, :in_out}, progress) do
cond do
progress < 0.5 -> :math.pow(2, 20 * progress - 10) / 2
true -> (2 - :math.pow(2, -20 * progress + 10)) / 2
end
end
# Circular
defp do_run({:circular, :in}, progress) do
1 - :math.sqrt(1 - :math.pow(progress, 2))
end
defp do_run({:circular, :out}, progress) do
:math.sqrt(1 - :math.pow(progress - 1, 2))
end
defp do_run({:circular, :in_out}, progress) do
if progress < 0.5 do
(1 - :math.sqrt(1 - :math.pow(2 * progress, 2))) / 2
else
(:math.sqrt(1 - :math.pow(-2 * progress + 2, 2)) + 1) / 2
end
end
# Back
defp do_run({:back, :in}, progress) do
c1 = 1.70158
c3 = c1 + 1
c3 * :math.pow(progress, 3) - c1 * :math.pow(progress, 2)
end
defp do_run({:back, :out}, progress) do
c1 = 1.70158
c3 = c1 + 1
1 + c3 * :math.pow(progress - 1, 3) + c1 * :math.pow(progress - 1, 2)
end
defp do_run({:back, :in_out}, progress) do
c1 = 1.70158
c2 = c1 * 1.525
if progress < 0.5 do
(:math.pow(2 * progress, 2) * ((c2 + 1) * 2 * progress - c2)) /2
else
(:math.pow(2 * progress - 2, 2) * ((c2 + 1) * (progress * 2 - 2) + c2) + 2) / 2
end
end
# Elastic
defp do_run({:elastic, :in}, progress) do
c4 = (2 * :math.pi()) / 3
-1 * :math.pow(2, 10 * progress - 10) * :math.sin((progress * 10 - 10.75) * c4)
end
defp do_run({:elastic, :out}, progress) do
c4 = (2 * :math.pi()) / 3;
:math.pow(2, -10 * progress) * :math.sin((progress * 10 - 0.75) * c4) + 1
end
defp do_run({:elastic, :in_out}, progress) do
c5 = (2 * :math.pi()) / 4.5
cond do
progress < 0.5 -> -1 * (:math.pow(2, 20 * progress - 10) * :math.sin((20 * progress - 11.125) * c5)) / 2
true -> (:math.pow(2, -20 * progress + 10) * :math.sin((20 * progress - 11.125) * c5)) / 2 + 1
end
end
# Bounce
defp do_run({:bounce, :in}, progress) do
1.0 - run({:bounce, :out}, 1.0 - progress)
end
defp do_run({:bounce, :out}, progress) do
n1 = 7.5625
d1 = 2.75
cond do
progress < 1 / d1 -> n1 * :math.pow(progress, 2)
progress < 2 / d1 ->
p1 = progress - 1.5 / d1
n1 * :math.pow(p1, 2) + 0.75
progress < 2.5 / d1 ->
p2 = progress - 2.25 / d1
n1 * :math.pow(p2, 2) + 0.9375
true ->
p3 = progress - 2.625 / d1
n1 * :math.pow(p3, 2) + 0.984375
end
end
defp do_run({:bounce, :in_out}, progress) do
if progress < 0.5 do
(1 - run({:bounce, :out}, 1 - 2 * progress)) / 2
else
(1 + run({:bounce, :out}, 2 * progress - 1)) / 2
end
end
end
|
lib/easing.ex
| 0.917626
| 0.502441
|
easing.ex
|
starcoder
|
defmodule CSV.Decoding.Preprocessing.Lines do
use CSV.Defaults
@moduledoc ~S"""
The CSV lines preprocessor module - aggregates lines in a stream that are part
of a common escape sequence.
"""
@doc """
Aggregates the common escape sequences of a stream with the given separator.
## Options
Options get transferred from the decoder. They are:
* `:separator` – The field separator
* `:escape_max_lines` – The maximum number of lines to collect in an
escaped field
"""
def process(stream, options \\ []) do
stream
|> Stream.concat([:stream_end])
|> d_process(options)
end
defp d_process(stream, options) do
separator = options |> Keyword.get(:separator, @separator)
escape_max_lines = options |> Keyword.get(:escape_max_lines, @escape_max_lines)
stream
|> Stream.with_index()
|> Stream.transform(
fn -> {[], "", 0, 0} end,
&do_process(&1, &2, separator, escape_max_lines),
fn _ -> :ok end
)
end
defp do_process({:stream_end, _}, {escaped_lines, _, _, _}, _, _) do
{escaped_lines, {[], "", 0, 0}}
end
defp do_process({line, line_index}, {[], _, _, _}, separator, _) do
start_sequence(line, line_index, separator)
end
defp do_process(
{line, line_index},
{escaped_lines, sequence_start, sequence_start_index, num_escaped_lines},
separator,
escape_max_lines
)
when num_escaped_lines < escape_max_lines do
continue_sequence(
escaped_lines,
num_escaped_lines + 1,
line,
line_index,
separator,
sequence_start,
sequence_start_index
)
end
defp do_process({line, _}, {escaped_lines, _, _, _}, separator, escape_max_lines) do
reprocess(escaped_lines ++ [line], separator, escape_max_lines)
end
defp reprocess(lines, separator, escape_max_lines) do
[corrupt_line | potentially_valid_lines] = lines
{processed_lines, continuation} =
potentially_valid_lines
|> Stream.with_index()
|> Enum.flat_map_reduce({[], "", 0, 0}, &do_process(&1, &2, separator, escape_max_lines))
{
[corrupt_line] ++ processed_lines,
continuation
}
end
defp start_sequence(line, line_index, separator) do
{starts_sequence, sequence_start} = starts_sequence?(line, separator)
cond do
starts_sequence ->
{[], {[line], sequence_start, line_index, 1}}
true ->
{[line], {[], "", 0, 0}}
end
end
defp continue_sequence(
escaped_lines,
num_escaped_lines,
line,
line_index,
separator,
sequence_start,
sequence_start_index
) do
{ends_sequence, _} = ends_sequence?(line, separator)
cond do
ends_sequence ->
start_sequence((escaped_lines ++ [line]) |> Enum.join(@delimiter), line_index, separator)
true ->
{[],
{escaped_lines ++ [line], sequence_start <> @delimiter <> line, sequence_start_index,
num_escaped_lines}}
end
end
defp ends_sequence?(line, separator) do
ends_sequence?(line, "", true, separator)
end
defp ends_sequence?(<<@double_quote::utf8>> <> tail, _, quoted, separator) do
ends_sequence?(tail, <<@double_quote::utf8>>, !quoted, separator)
end
defp ends_sequence?(<<head::utf8>> <> tail, _, quoted, separator) do
ends_sequence?(tail, <<head::utf8>>, quoted, separator)
end
defp ends_sequence?("", _, quoted, _) do
{!quoted, ""}
end
defp starts_sequence?(line, separator) do
starts_sequence?(line, "", false, separator, "")
end
defp starts_sequence?(<<@double_quote::utf8>> <> tail, last_token, false, separator, _)
when last_token == <<separator::utf8>> do
starts_sequence?(tail, @double_quote, true, separator, tail)
end
defp starts_sequence?(<<@double_quote::utf8>> <> tail, "", false, separator, _) do
starts_sequence?(tail, @double_quote, true, separator, tail)
end
defp starts_sequence?(<<@double_quote::utf8>> <> tail, _, quoted, separator, sequence_start) do
starts_sequence?(tail, @double_quote, !quoted, separator, sequence_start)
end
defp starts_sequence?(<<head::utf8>> <> tail, _, quoted, separator, sequence_start) do
starts_sequence?(tail, <<head::utf8>>, quoted, separator, sequence_start)
end
defp starts_sequence?(<<head>> <> tail, _, quoted, separator, sequence_start) do
starts_sequence?(tail, <<head>>, quoted, separator, sequence_start)
end
defp starts_sequence?("", _, quoted, _, sequence_start) do
{quoted, sequence_start}
end
end
|
lib/csv/decoding/preprocessing/lines.ex
| 0.800107
| 0.485112
|
lines.ex
|
starcoder
|
defmodule Stargate.Producer do
@moduledoc """
Provides a producer websocket process and functions for producing
messages to the cluster.
Pass a keyword list of configuration options to the `start_link/1`
function or simply call `produce/2` passing a valid
Pulsar producer URL in place of a producer process.
"""
require Logger
use Stargate.Connection
use Puid
import Stargate.Supervisor, only: [via: 2]
alias Stargate.Producer.{Acknowledger, QueryParams}
@typedoc """
A URL defining the host and topic to which a Stargate producer can
connect for sending messages.
"""
@type url() :: String.t()
@typedoc """
A producer websocket process identified by a pid or via tuple.
The atom key for identifying the producer in the via tuple is of
the form `:"sg_prod_<tenant>_<namespace>_<topic>`.
"""
@type producer :: GenServer.server()
@typedoc """
Pulsar messages produced by Stargate can be any of the following forms:
* raw binary payload (must be encodable to base64)
* a {key, value} tuple where key is the optional message key and value is the payload
* a map with a "payload" field and optional fields for a key, context, properties (key/value
pairs as a map), and list of strings identifying replication clusters.
Stargate uses the `context` field on a message produced to Pulsar to correlate receipt messages
from the cluster to sent messages. If you do not define a context in your message, Stargate
generates one automatically.
"""
@type message ::
String.t()
| {String.t(), String.t()}
| %{
required(:payload) => String.t(),
optional(:key) => String.t(),
optional(:context) => String.t(),
optional(:properties) => map(),
optional(:replicationClusters) => [String.t()]
}
@doc """
Produce a message or list of messages to the cluster by producer URL or producer process.
Messages can be any of the accepted forms (see `message` type).
Producing by URL is good for irregular and/or ad hoc producer needs that do not require
a persistent websocket connection and ideally with few to no query parameters
to configure producer options from the default. For higher volume producing, a persistent
connection with an addressable producer process is recommended.
Once the message(s) is produced, the calling process automatically blocks until
it receives acknowledgement from the cluster that the message(s) has been received.
"""
@spec produce(url() | producer(), message() | [message()]) :: :ok | {:error, term()}
def produce(url, messages) when is_binary(url) do
with [protocol, _, host, _, _, _, persistence, tenant, ns, topic | _] <-
String.split(url, "/"),
opts <- temp_producer_opts(:temp, protocol, host, persistence, tenant, ns, topic),
{:ok, temp_producer} <- Stargate.Supervisor.start_link(opts),
:ok <- produce(via(:sg_reg_temp, {:producer, persistence, tenant, ns, topic}), messages) do
Process.unlink(temp_producer)
Supervisor.stop(temp_producer)
:ok
else
{:error, reason} -> {:error, reason}
error -> {:error, error}
end
end
def produce(producer, messages) when is_list(messages) do
Enum.each(messages, &produce(producer, &1))
end
def produce(producer, message) do
{payload, ctx} = construct_payload(message)
WebSockex.cast(producer, {:send, payload, ctx, self()})
receive do
:ack -> :ok
err -> err
end
end
@doc """
Produce a list of messages to a Stargate producer process. Messages can be any
of the accepted forms (see `message` type).
When calling `produce/3` the third argument must be an MFA tuple which is used by
the producer's acknowledger process to asynchronously perform acknowledgement that the
message was received by the cluster successfully. This is used to avoid blocking the
calling process for performance reasons.
"""
@spec produce(producer(), message() | [message()], {module(), atom(), [term()]}) ::
:ok | {:error, term()}
def produce(producer, messages, mfa) when is_list(messages) do
Enum.each(messages, &produce(producer, &1, mfa))
end
def produce(producer, message, mfa) do
{payload, ctx} = construct_payload(message)
WebSockex.cast(producer, {:send, payload, ctx, mfa})
end
defmodule State do
@moduledoc """
Defines the state stored by the producer websocket process. The Stargate producer
records the registry name associated to its supervision tree, the URL of the cluster and topic
it connects to, as well as the individual components that make up the URL including the
host, protocol (ws or wss), topic path parameters (persistent or non-persistent, tenant,
namespace, and topic) and any query parameters configuring the connection.
"""
defstruct [
:registry,
:url,
:host,
:protocol,
:persistence,
:tenant,
:namespace,
:topic,
:query_params
]
end
@doc """
Start a producer websocket process and link it to the current process.
Producer options require, at minimum:
* `host` is a tuple of the address or URL of the Pulsar cluster (broker service)
and the port on which the service is exposed.
* `tenant` is a string representing the tenant portion of the producer URL path parameter.
* `namespace` is a string representing the namespace portion of the producer URL path parameter.
* `topic` is a string representing the topic portion of the producer URL path parameter.
* `registry` is the name of the process registry associated to the client's supervision tree.
Stargate uses this to send messages back and forth between the producer and its acknowledger.
Additional optional parameters to a producer are:
* `protocol` can be one of "ws" or "wss"; defaults to "ws"
* `persistence` can be one of "persistent" or "non-persistent" per the Pulsar
specification of topics as being in-memory only or persisted to the brokers' disks.
Defaults to "persistent".
* `query_params` is a map containing any or all of the following:
* `send_timeout` the time at which a produce operation will time out; defaults to 30 seconds
* `batch_enabled` can be true or false to enable/disable the batching of messages.
Defaults to "false".
* `batch_max_msg` defines the maximum number of messages in a batch (if enabled).
Defaults to 1000.
* `max_pending_msg` defines the maximum size of the internal queue holding messages. Defaults
to 1000.
* `batch_max_delay` sets the time period within which message batches will be published.
Defaults to 10 milliseconds.
* `routing_mode` can be one of :round_robin or :single. _Pulsar has deprecated this parameter_.
* `compression_type` can be one of :lz4, :zlib, or :none. Defaults to :none
* `name` is used to enforce only one producer with the given name is publishing to
connected topic.
* `initial_seq_id` sets the baseline for the sequence ids assigned to published messages.
* `hashing_scheme` can be one of :java_string or :murmur3 when defining a hashing function to
use with partitioned topics. _Pulsar has deprecated this parameter_.
"""
@spec start_link(keyword()) :: GenServer.on_start()
def start_link(args) do
query_params_config = Keyword.get(args, :query_params)
query_params = QueryParams.build_params(query_params_config)
registry = Keyword.fetch!(args, :registry)
state =
args
|> Stargate.Connection.connection_settings(:producer, query_params)
|> Map.put(:query_params, query_params_config)
|> Map.put(:registry, registry)
|> (fn fields -> struct(State, fields) end).()
server_opts =
args
|> Stargate.Connection.auth_settings()
|> Keyword.put(
:name,
via(
state.registry,
{:producer, "#{state.persistence}", "#{state.tenant}", "#{state.namespace}",
"#{state.topic}"}
)
)
WebSockex.start_link(state.url, __MODULE__, state, server_opts)
end
@impl WebSockex
def handle_cast({:send, payload, ctx, ack}, state) do
Acknowledger.produce(
via(
state.registry,
{:producer_ack, "#{state.persistence}", "#{state.tenant}", "#{state.namespace}",
"#{state.topic}"}
),
ctx,
ack
)
{:reply, {:text, payload}, state}
end
@impl WebSockex
def handle_frame({:text, msg}, state) do
Logger.debug("Received response : #{inspect(msg)}")
response =
msg
|> Jason.decode!()
|> format_response()
:ok =
state.registry
|> via(
{:producer_ack, "#{state.persistence}", "#{state.tenant}", "#{state.namespace}",
"#{state.topic}"}
)
|> Acknowledger.ack(response)
{:ok, state}
end
defp construct_payload(%{"payload" => _payload, "context" => context} = message) do
encoded_message =
message
|> Map.update!("payload", &Base.encode64(&1))
|> Jason.encode!()
{encoded_message, context}
end
defp construct_payload(%{"payload" => _payload} = message) do
context = generate()
encoded_message =
message
|> Map.put("context", context)
|> Map.update!("payload", &Base.encode64(&1))
|> Jason.encode!()
{encoded_message, context}
end
defp construct_payload({key, payload}) do
context = generate()
encoded_message =
%{
"key" => key,
"payload" => Base.encode64(payload),
"context" => context
}
|> Jason.encode!()
{encoded_message, context}
end
defp construct_payload(message) when is_binary(message) do
context = generate()
encoded_message =
%{
"payload" => Base.encode64(message),
"context" => context
}
|> Jason.encode!()
{encoded_message, context}
end
defp format_response(%{"result" => "ok", "context" => ctx}) do
{:ack, ctx}
end
defp format_response(%{"result" => error, "errorMsg" => explanation, "context" => ctx}) do
reason = "Error of type : #{error} ocurred; #{explanation}"
{:error, reason, ctx}
end
defp temp_producer_opts(name, protocol, host, persistence, tenant, namespace, topic) do
[
name: name,
host: host,
protocol: String.trim(protocol, ":"),
producer: [
persistence: persistence,
tenant: tenant,
namespace: namespace,
topic: topic
]
]
end
end
|
lib/stargate/producer.ex
| 0.856812
| 0.461138
|
producer.ex
|
starcoder
|
defmodule Roll35Core.Data.Agent do
@moduledoc """
A general interface for our various data agents.
Each agent represents one data table and provides functions to roll
against that table.
Individual agents are expected to `use Roll35Core.Data.Agent` as well
as defining the expected callbacks.
The implementation actually uses a `GenServer` instance instead of an
`Agent` instance because it allows us to handle initialization
asynchronously.
"""
alias Roll35Core.Types
alias Roll35Core.Util
require Roll35Core.Types
require Logger
@call_timeout 15_000
defmacro __using__(_) do
quote do
@behaviour Roll35Core.Data.Agent
use GenServer
require Roll35Core.Data.Agent
require Roll35Core.Types
require Logger
@spec load_data(Path.t()) :: term()
def load_data(path) do
path = Path.join(Application.app_dir(:roll35_core), path)
data = YamlElixir.read_from_file!(path)
result = process_data(data)
Logger.info("Finished initializing #{__MODULE__}.")
result
end
@spec start_link(name: GenServer.server(), datapath: Path.t()) :: GenServer.on_start()
def start_link(name: name, datapath: datapath) do
Logger.info("Starting #{__MODULE__}.")
GenServer.start_link(__MODULE__, datapath, name: name)
end
@impl GenServer
def init(datapath) do
{:ok, %{datapath: datapath}, {:continue, :init}}
end
@impl GenServer
def handle_continue(:init, state) do
{:noreply, load_data(state.datapath)}
end
@impl GenServer
def handle_call({:get, function}, _, state) do
{:reply, function.(state), state}
end
end
end
@doc """
Gets an agent value via the given anonymous function.
The function `function` is sent to the `agent` which invokes
the function passing the agent state. The result of the function
invocation is returned from this function.
`timeout` is an integer greater than zero which specifies how many
milliseconds are allowed before the agent executes the function
and returns the result value, or the atom `:infinity` to wait
indefinitely. If no result is received within the specified time,
the function call fails and the caller exits.
"""
@spec get(GenServer.server(), (any() -> a), timeout()) :: a when a: var
def get(server, function, timeout \\ @call_timeout) do
GenServer.call(server, {:get, function}, timeout)
end
@doc """
Get a random item from a ranked item list based on a rank and subrank
"""
@spec random_ranked(GenServer.server(), Types.rank(), Types.full_subrank()) :: Types.item()
def random_ranked(agent, rank, subrank) do
Logger.debug(
"Getting random item with rank #{inspect(rank)} and subrank #{inspect(subrank)} from #{
__MODULE__
}."
)
data = get(agent, & &1)
Util.random(data[rank][subrank])
end
@doc """
Get a random item from a ranked item list based on a rank.
"""
@spec random_ranked(GenServer.server(), Types.rank()) :: Types.item()
def random_ranked(agent, rank) do
Logger.debug(
"Getting random item with rank #{inspect(rank)} and random subrank from #{__MODULE__}."
)
data = get(agent, & &1)
subrank = Util.random(Map.keys(data[rank]))
Util.random(data[rank][subrank])
end
@doc """
Get a random item from a ranked item list.
"""
@spec random_ranked(GenServer.server()) :: Types.item()
def random_ranked(agent) do
Logger.debug("Getting random item with random rank and subrank from #{__MODULE__}.")
data = get(agent, & &1)
rank = Util.random(Map.keys(data))
subrank = Util.random(Map.keys(data[rank]))
Util.random(data[rank][subrank])
end
@doc """
Get a random item from a compound itemlist for a given rank.
"""
@spec random_compound(GenServer.server(), Types.rank()) :: Types.item()
def random_compound(agent, rank) do
Logger.debug("Getting random item with rank #{inspect(rank)} from #{__MODULE__}.")
data = get(agent, & &1)
Util.random(data[rank])
end
@doc """
Get a random item from a compound itemlist.
"""
@spec random_compound(GenServer.server()) :: Types.item()
def random_compound(agent) do
Logger.debug("Getting random item with random rank from #{__MODULE__}.")
data = get(agent, & &1)
rank = Util.random(Map.keys(data))
Util.random(data[rank])
end
@doc """
Get a specific base item by name.
"""
@spec get_base(GenServer.server(), String.t()) ::
{:ok, %{atom() => term()}} | {:error, String.t()}
def get_base(agent, name) do
Logger.debug("Fetching bse item \"#{name}\".")
data = get(agent, fn data -> data.base end)
norm_name =
name
|> String.normalize(:nfd)
|> String.downcase()
result =
Enum.find(data, fn item ->
item.name |> String.normalize(:nfd) |> String.downcase() == norm_name
end)
if result == nil do
possible =
data
|> Task.async_stream(
fn item ->
item_name =
item.name
|> String.normalize(:nfd)
|> String.downcase()
cond do
String.starts_with?(item_name, norm_name) -> {item.name, 1.2}
String.ends_with?(item_name, norm_name) -> {item.name, 1.2}
String.contains?(item_name, norm_name) -> {item.name, 1.1}
true -> {item.name, String.jaro_distance(norm_name, item_name)}
end
end,
max_concurrency: min(System.schedulers_online(), 4),
ordered: false
)
|> Stream.map(fn {:ok, v} -> v end)
|> Stream.filter(fn {_, d} -> d > 0.8 end)
|> Enum.sort(fn {_, d1}, {_, d2} -> d2 >= d1 end)
|> Enum.take(4)
|> Enum.map(fn {i, _} -> i end)
if possible == [] do
{:error, "No matching items found."}
else
{:error,
"\"#{name}\" is not a recognized item, did you possibly mean one of: \"#{
Enum.join(possible, "\", \"")
}\"?"}
end
else
{:ok, result}
end
end
@doc """
Get a random base item, possibly limited by a tag list.
"""
@spec random_base(GenServer.server(), list(atom())) :: %{atom() => term()} | nil
def random_base(agent, tags \\ []) do
Logger.debug(
"Getting random base item with tags matching #{inspect(tags)} from #{__MODULE__}."
)
data = get(agent, fn data -> data.base end)
try do
data
|> Stream.filter(fn item ->
if Enum.empty?(tags) do
true
else
Enum.all?(tags, fn tag ->
tag == item.type or tag in item.tags
end)
end
end)
|> Enum.to_list()
|> Util.random()
rescue
_ in Enum.EmptyError -> nil
end
end
@doc """
Get a random enchantment limited by the given parameters.
"""
@spec random_enchantment(
GenServer.server(),
atom(),
non_neg_integer(),
list(String.t()),
list(atom())
) :: %{atom() => term()} | nil
def random_enchantment(agent, type, bonus, enchants \\ [], limit \\ []) do
Logger.debug(
"Getting random enchantment of type #{inspect(type)} and level #{inspect(bonus)} excluding #{
inspect(enchants)
} and limited by #{inspect(limit)} from #{__MODULE__}."
)
data = get(agent, fn data -> data.enchantments[type][bonus] end)
possible =
Enum.filter(data, fn item ->
cond do
Map.has_key?(item, :exclude) and Enum.any?(enchants, &(&1 in item.exclude)) ->
false
Map.has_key?(item, :limit) and Map.has_key?(item.limit, :only) and
not Enum.any?(limit, &(&1 in item.limit.only)) ->
false
Map.has_key?(item, :limit) and Map.has_key?(item.limit, :not) and
Enum.any?(limit, &(&1 in item.limit.not)) ->
false
true ->
true
end
end)
if Enum.empty?(possible) do
nil
else
Util.random(possible)
end
end
@doc """
Get a random armor or weapon pattern.
"""
@spec random_pattern(GenServer.server(), Types.rank(), Types.subrank(), keyword()) :: %{
atom() => term()
}
def random_pattern(agent, rank, subrank, opts \\ [])
def random_pattern(agent, rank, subrank, no_specific: true)
when Types.is_rank(rank) and Types.is_subrank(subrank) do
Logger.info(
"Getting random item of rank #{inspect(rank)} and subrank #{inspect(subrank)} from #{
__MODULE__
}, ignoring specific rolls."
)
data = get(agent, fn data -> data[rank][subrank] end)
data
|> Enum.filter(fn item -> :specific not in Map.keys(item.value) end)
|> Util.random()
end
def random_pattern(agent, rank, subrank, _)
when Types.is_rank(rank) and Types.is_subrank(subrank) do
Logger.info(
"Getting random item of rank #{inspect(rank)} and subrank #{inspect(subrank)} from #{
__MODULE__
}."
)
data = get(agent, fn data -> data[rank][subrank] end)
Util.random(data)
end
@doc """
Get a list of valid weapon or armor tags.
"""
@spec tags(GenServer.server()) :: list(atom())
def tags(agent) do
Logger.info("Fetching list of valid tags from #{__MODULE__}.")
get(agent, fn data -> data.tags end)
end
@doc """
Called during agent startup to process the data loaded from disk into the correct state.
"""
@callback process_data(list | map) :: term()
end
|
apps/roll35_core/lib/roll35_core/data/agent.ex
| 0.885724
| 0.401512
|
agent.ex
|
starcoder
|
defmodule Ecto.ReplaceableString do
@moduledoc """
A custom type that replace some terms for strings.
`#{inspect(__MODULE__)}` can be used when you want to replace some terms on dump/load
a field, or even both included, for example:
Before a field persisted to the database, `#{inspect(__MODULE__)}` replaces a matched pattern
with the replacement, the final replaced value will be saved into database.
field :content, #{inspect(__MODULE__)},
on_dump: pattern: "hello", replacement: "hi", options: [global: false]
After a field loaded from the database, `#{inspect(__MODULE__)}` replaces a matched pattern
with the replacement, the final replaced value will be extracted into the struct, but
no changes into the original database.
field :content, #{inspect(__MODULE__)},
on_load: pattern: ~r/test/, replacement: "TEST"
The `:pattern` and `:replacement` options are a pair required when both existed, the `:options` option
is optional, these three options are correspond completely to `String.replace/4`.
If no `:on_dump` or `:on_load` option(s) set, the following cases are equal from result perspective, but
recommend to use the `:string` type as a base type:
field :content, #{inspect(__MODULE__)}
# equals
field :content, :string
Please notice that once used this type means a string replacement will be invoked in each read or write operation
to the corresponding field, so please ensure use this type in a proper scenario to avoid a wasted performance issue,
or find a better way to satisfy the similar use case.
"""
use Ecto.ParameterizedType
@doc false
@impl true
def type(_params), do: :string
@doc false
@impl true
def init(opts) do
opts
|> Keyword.take([:on_dump, :on_load])
|> Enum.reduce(%{}, fn({key, opts}, acc) ->
prepare_init(key, opts, acc)
end)
end
@doc false
@impl true
def cast(nil, _params), do: {:ok, nil}
def cast(data, _params) when not is_bitstring(data), do: :error
def cast(data, _params) do
{:ok, data}
end
@doc false
@impl true
def load(nil, _loader, _params), do: {:ok, nil}
def load(data, _loader, %{on_load: %{pattern: pattern, replacement: replacement} = on_load}) do
data = String.replace(data, pattern, replacement, Map.get(on_load, :options, []))
{:ok, data}
end
def load(data, _loader, _params), do: {:ok, data}
@doc false
@impl true
def dump(nil, _dumper, _params), do: {:ok, nil}
def dump(data, _dumper, %{on_dump: %{pattern: pattern, replacement: replacement} = on_dump}) do
data = String.replace(data, pattern, replacement, Map.get(on_dump, :options, []))
{:ok, data}
end
def dump(data, _dumper, _params), do: {:ok, data}
@doc false
@impl true
def embed_as(_format, _params) do
# make sure `load/3` will be invoked when use `on_load` case.
:dump
end
@doc false
@impl true
def equal?(a, b, _params), do: a == b
defp prepare_init(_key, nil, acc), do: acc
defp prepare_init(key, opts, acc) do
opts
|> Keyword.take([:pattern, :replacement, :options])
|> Enum.into(%{})
|> validate(key, acc)
end
defp validate(opts, _key, acc) when opts === %{} do
acc
end
defp validate(%{pattern: pattern, replacement: replacement} = opts, key, acc)
when pattern != nil and is_bitstring(replacement) do
Map.put(acc, key, opts)
end
defp validate(opts, key, _acc) do
raise ArgumentError, """
#{inspect(__MODULE__)} type must both have a `:pattern` option specified as a string or a regex
type and a `:replacement` option as a string type for `#{key}` key,
but got `#{inspect(opts)}`, they are same to String.replace/4.
For example:
field :my_field, EctoTablestore.Replaceable, on_dump:
pattern: "a,b,c", replacement: ","
or
field :my_field, EctoTablestore.Replaceable, on_dump:
pattern: "a,b,c", replacement: ",", options: [global: false]
"""
end
end
|
lib/ecto/replaceable_string.ex
| 0.85318
| 0.470189
|
replaceable_string.ex
|
starcoder
|
defmodule FishermanServer.Utils do
@moduledoc """
Utils provides module-agnostic convenience functions
"""
@datetime_defaults [
year: 0,
month: 1,
day: 1,
zone_abbr: "EST",
hour: 0,
minute: 0,
second: 0,
microsecond: {0, 0},
utc_offset: 0,
std_offset: 0,
time_zone: "Etc/UTC"
]
@no_error_color "#a0cf93"
@error_color "#f79292"
@doc """
Casts unix milliseconds to a microsecond-friendly datetime
"""
def unix_millis_to_dt(millis) when is_integer(millis) do
(millis * 1_000)
|> DateTime.from_unix!(:microsecond)
end
def unix_millis_to_dt(_), do: nil
@doc """
Casts a postgres-json timestamp format to a datetime object
"""
def pg_json_millis_to_dt(millis) do
(millis <> "Z")
|> Timex.Parse.DateTime.Parser.parse!("{ISO:Extended:Z}")
end
@doc """
Encode the given datetime to be url safe. Use current utc time if not specified
"""
def encode_url_datetime(datetime = %DateTime{} \\ DateTime.utc_now()) do
datetime
|> DateTime.to_unix(:millisecond)
|> to_string()
end
@doc """
Decode the datetime url to a datetime object
"""
def decode_url_datetime(url_datetime) do
{millis, _} = Integer.parse(url_datetime)
DateTime.from_unix!(millis, :millisecond)
end
@doc """
Build a datetime object from a map. Uses defaults for non-provided but required DateTime
struct fields
"""
def datetime_from_map(dt_map \\ %{}) do
options =
Enum.map(dt_map, fn {k, v} ->
{String.to_existing_atom(k), parse_int!(v)}
end)
options =
Keyword.merge(@datetime_defaults, options)
|> Enum.into(%{})
struct(DateTime, options)
end
@doc """
Parse a string into an integer
"""
def parse_int!(string) do
{int, ""} = Integer.parse(string)
int
end
def string_to_bool("true"), do: true
def string_to_bool("false"), do: false
@doc """
Cast the map produces from a date select form field to a url safe datetime string
"""
def datetime_form_to_url_datetime(dt_map \\ %{}) do
options = Enum.map(dt_map, fn {k, v} -> {String.to_existing_atom(k), v} end)
options =
Keyword.merge(@datetime_defaults, options)
|> Enum.into(%{})
struct(DateTime, options)
end
@doc """
Determines color of the shell record background on basis
of if the command produced an error or not
"""
def pick_color(%{error: error}) do
if Enum.member?(["", nil], error) do
@no_error_color
else
@error_color
end
end
end
|
fisherman_server/lib/fisherman_server/utils.ex
| 0.823541
| 0.570451
|
utils.ex
|
starcoder
|
defmodule Kino.SmartCell do
@moduledoc ~S'''
An interface for defining custom smart cells.
A smart cell is a UI wizard designed for producing a piece of code
that accomplishes a specific task. In other words, a smart cell is
like a code template parameterized through UI interactions.
This module builds on top of `Kino.JS.Live`, consequently keeping
all of its component and communication mechanics. The additional
callbacks specify how the UI maps to source code.
## Usage
Defining a custom cell is similar to writing a regular `Kino.JS.Live`
component, with a couple specifics.
First, we only need to define callbacks, so there is no need for
using `Kino.JS.Live.new/2`. The `c:Kino.JS.Live.init/2` callback
always receives `t:attrs/0` as the first argument.
Second, we add a few new bits, namely `use Kino.SmartCell` and the
two corresponding callback definitions.
Here is an outline of a custom module
defmodule Kino.SmartCell.Custom do
use Kino.JS
use Kino.JS.Live
use Kino.SmartCell, name: "Our custom wizard"
@impl true
def init(attrs, ctx) do
...
end
# Other Kino.JS.Live callbacks
...
@impl true
def to_attrs(ctx) do
...
end
@impl true
def to_source(attrs) do
...
end
end
Additionally, in order for Livebook to pick up the custom cell, we
need to register our module. This usually happens in `application.ex`
Kino.SmartCell.register(Kino.SmartCell.Custom)
## Example
As a minimal example, that's how we can define a cell that allows
editing the underlying code directly through a textarea.
defmodule Kino.SmartCell.Plain do
use Kino.JS
use Kino.JS.Live
use Kino.SmartCell, name: "Plain code editor"
@impl true
def init(attrs, ctx) do
source = attrs["source"] || ""
{:ok, assign(ctx, source: source)}
end
@impl true
def handle_connect(ctx) do
{:ok, %{source: ctx.assigns.source}, ctx}
end
@impl true
def handle_event("update", %{"source" => source}, ctx) do
broadcast_event(ctx, "update", %{"source" => source})
{:noreply, assign(ctx, source: source)}
end
@impl true
def to_attrs(ctx) do
%{"source" => ctx.assigns.source}
end
@impl true
def to_source(attrs) do
attrs["source"]
end
asset "main.js" do
"""
export function init(ctx, payload) {
ctx.importCSS("main.css");
ctx.root.innerHTML = `
<textarea id="source"></textarea>
`;
const textarea = ctx.root.querySelector("#source");
textarea.value = payload.source;
textarea.addEventListener("change", (event) => {
ctx.pushEvent("update", { source: event.target.value });
});
ctx.handleEvent("update", ({ source }) => {
textarea.value = source;
});
ctx.handleSync(() => {
// Synchronously invokes change listeners
document.activeElement &&
document.activeElement.dispatchEvent(new Event("change"));
});
}
"""
end
asset "main.css" do
"""
#source {
box-sizing: border-box;
width: 100%;
min-height: 100px;
}
"""
end
end
And then we would register it as
Kino.SmartCell.register(Kino.SmartCell.Plain)
Note that we register a synchronization handler on the client with
`ctx.handleSync(() => ...)`. This optional handler is invoked before
evaluation and it should flush any deferred UI changes to the server.
In our example we listen to textarea's "change" event, which is only
triggered on blur, so on synchronization we trigger it programmatically.
## Collaborative editor
If a smart cell requires editing some code (like SQL), it may use
a dedicated editor instance managed by Livebook. The editor handles
syntax highlighting and collaborative editing, similarly to the
built-in cells.
To enable the editor, we need to include `:editor` configuration in
options returned from the `c:Kino.JS.Live.init/2` callback.
@impl true
def init(attrs, ctx) do
# ...
{:ok, ctx, editor: [attribute: "code", language: "elixir"]}
end
### Options
* `:attribute` - the key to put the source text under in `attrs`.
Required
* `:language` - the editor language, used for syntax highlighting.
Defaults to `nil`
* `:placement` - editor placement within the smart cell, either
`:top` or `:bottom`. Defaults to `:bottom`
* `:default_source` - the initial editor source. Defaults to `""`
## Other options
Other than the editor configuration, the following options are
supported:
* `:reevaluate_on_change` - if the cell should be reevaluated
whenever the generated source code changes. This option may be
helpful in cases where the cell output is a crucial element of
the UI interactions. Defaults to `false`
'''
require Logger
import Kino.Utils, only: [has_function?: 3]
alias Kino.JS.Live.Context
@typedoc """
Attributes are an intermediate form of smart cell state, used to
persist and restore cells.
Attributes are computed using `c:to_attrs/1` and used to generate
the source code using `c:to_source/1`.
Note that attributes are serialized and deserialized as JSON for
persistence, hence make sure to use JSON-friendly data structures.
Persisted attributes are passed to `c:Kino.JS.Live.init/2` as the
first argument and should be used to restore the relevant state.
"""
@type attrs :: map()
@doc """
Invoked to compute the smart cell state as serializable attributes.
"""
@callback to_attrs(ctx :: Context.t()) :: attrs()
@doc """
Invoked to generate source code based on the given attributes.
"""
@callback to_source(attrs()) :: String.t()
@doc """
Invoked whenever the base evaluation context changes.
This callback receives the binding and environment available to the
smart cell code.
Note that this callback runs asynchronously and it receives the PID
of the smart cell server, so the result needs to be sent explicitly
and handled using `c:Kino.JS.Live.handle_info/2`.
**Important:** remember that data sent between processes is copied,
so avoid sending large data structures. In particular, when looking
at variables, instead of sending their values, extract and send
only the relevant metadata.
**Important:** avoid any heavy work in this callback, as it runs in
the same process that evaluates code, so we don't want to block it.
"""
@callback scan_binding(server :: pid(), Code.binding(), Macro.Env.t()) :: any()
@doc """
Invoked when the smart cell code is evaluated.
This callback receives the result of an evaluation, either the
return value or an exception if raised.
This callback runs asynchronously and has the same characteristics
as `c:scan_binding/3`.
"""
@callback scan_eval_result(server :: pid(), eval_result()) :: any()
@type eval_result ::
{:ok, result :: any()}
| {:error, Exception.kind(), error :: any(), Exception.stacktrace()}
@optional_callbacks scan_binding: 3, scan_eval_result: 2
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
@behaviour Kino.SmartCell
@smart_opts opts
@before_compile Kino.SmartCell
end
end
defmacro __before_compile__(env) do
opts = Module.get_attribute(env.module, :smart_opts)
name = Keyword.fetch!(opts, :name)
quote do
def child_spec(%{ref: ref, attrs: attrs, target_pid: target_pid}) do
%{
id: __MODULE__,
start: {Kino.SmartCell.Server, :start_link, [__MODULE__, ref, attrs, target_pid]},
restart: :temporary
}
end
def __smart_definition__() do
%{
kind: Atom.to_string(__MODULE__),
module: __MODULE__,
name: unquote(name)
}
end
end
end
@doc """
Returns a list of available smart cell definitions.
"""
def definitions() do
for module <- get_modules(), do: module.__smart_definition__()
end
@doc """
Registers a new smart cell.
This should usually be called in `application.ex` when starting
the application.
## Examples
Kino.SmartCell.register(Kino.SmartCell.Custom)
"""
@spec register(module()) :: :ok
def register(module) do
unless has_function?(module, :__smart_definition__, 0) do
raise ArgumentError, "module #{inspect(module)} does not define a smart cell"
end
modules = get_modules()
updated_modules = if module in modules, do: modules, else: modules ++ [module]
put_modules(updated_modules)
end
@registry_key :smart_cell_modules
defp get_modules() do
Application.get_env(:kino, @registry_key, [])
end
defp put_modules(modules) do
Application.put_env(:kino, @registry_key, modules)
end
@doc """
Generates unique variable names with the given prefix.
When `var_name` is `nil`, allocates and returns the next available
name. Otherwise, marks the given suffix as taken, provided that
`var_name` has the given prefix.
This function can be used to generate default variable names during
smart cell initialization, so that don't overlap.
"""
@spec prefixed_var_name(String.t(), String.t() | nil) :: String.t()
def prefixed_var_name(prefix, var_name)
def prefixed_var_name(prefix, nil) do
case Kino.Counter.next(var_counter_key(prefix)) do
1 -> prefix
n -> "#{prefix}#{n}"
end
end
def prefixed_var_name(prefix, var_name) do
with {:ok, suffix} <- parse_var_prefix(var_name, prefix),
{:ok, n} <- parse_var_suffix(suffix) do
Kino.Counter.put_max(var_counter_key(prefix), n)
end
var_name
end
defp parse_var_prefix(string, prefix) do
if String.starts_with?(string, prefix) do
{:ok, String.replace_prefix(string, prefix, "")}
else
:error
end
end
defp parse_var_suffix(""), do: {:ok, 1}
defp parse_var_suffix(other) do
case Integer.parse(other) do
{n, ""} when n > 1 -> {:ok, n}
_ -> :error
end
end
defp var_counter_key(prefix), do: {:smart_cell_variable, prefix}
@doc """
Checks if the given string is a valid Elixir variable name.
"""
@spec valid_variable_name?(String.t()) :: boolean()
def valid_variable_name?(string) when is_binary(string) do
atom = String.to_atom(string)
macro_classify_atom(atom) == :identifier
end
@doc """
Converts the given AST to formatted code string.
"""
@spec quoted_to_string(Macro.t()) :: String.t()
def quoted_to_string(quoted) do
quoted
|> Code.quoted_to_algebra()
|> Inspect.Algebra.format(90)
|> IO.iodata_to_binary()
end
# ---
# TODO: use Macro.classify_atom/1 on Elixir 1.14
defp macro_classify_atom(atom) do
case macro_inner_classify(atom) do
:alias -> :alias
:identifier -> :identifier
type when type in [:unquoted_operator, :not_callable] -> :unquoted
_ -> :quoted
end
end
defp macro_inner_classify(atom) when is_atom(atom) do
cond do
atom in [:%, :%{}, :{}, :<<>>, :..., :.., :., :"..//", :->] ->
:not_callable
atom in [:"::"] ->
:quoted_operator
Macro.operator?(atom, 1) or Macro.operator?(atom, 2) ->
:unquoted_operator
true ->
charlist = Atom.to_charlist(atom)
if macro_valid_alias?(charlist) do
:alias
else
case :elixir_config.identifier_tokenizer().tokenize(charlist) do
{kind, _acc, [], _, _, special} ->
if kind == :identifier and not :lists.member(?@, special) do
:identifier
else
:not_callable
end
_ ->
:other
end
end
end
end
defp macro_valid_alias?('Elixir' ++ rest), do: macro_valid_alias_piece?(rest)
defp macro_valid_alias?(_other), do: false
defp macro_valid_alias_piece?([?., char | rest]) when char >= ?A and char <= ?Z,
do: macro_valid_alias_piece?(macro_trim_leading_while_valid_identifier(rest))
defp macro_valid_alias_piece?([]), do: true
defp macro_valid_alias_piece?(_other), do: false
defp macro_trim_leading_while_valid_identifier([char | rest])
when char >= ?a and char <= ?z
when char >= ?A and char <= ?Z
when char >= ?0 and char <= ?9
when char == ?_ do
macro_trim_leading_while_valid_identifier(rest)
end
defp macro_trim_leading_while_valid_identifier(other) do
other
end
end
|
lib/kino/smart_cell.ex
| 0.907303
| 0.563498
|
smart_cell.ex
|
starcoder
|
defmodule SieveOfEratosthenes do
@moduledoc """
Implements Sieve of Eratosthenes algorithm for finding all prime numbers up to the given limit.
Uses Elixir Map to store the list of odd integers for sieving.
Taken from: https://github.com/ybod/elixir_primes
"""
alias Helper.Sequence
@doc """
Returns the list of the prime numbers up to the given limit. Limit must be integer and larger than 0.
## Examples
iex> SieveOfEratosthenes.get_primes_list(10)
[2, 3, 5, 7]
"""
@spec get_primes_list(pos_integer) :: [pos_integer]
def get_primes_list(limit) when limit == 2, do: [2]
def get_primes_list(limit) when limit < 2, do: []
def get_primes_list(limit) when limit > 2 do
map =
Sequence.get_odd(3, limit)
|> Map.new(&{&1, :prime})
sieve(map, 3, limit)
end
# Sieving: all primes already found, no need to look further
defp sieve(map, odd_num, limit) when odd_num * odd_num > limit, do: get_primes(map)
# Check if the next odd number can be found as a Map key.
# If found - it's a prime number and we need to remove all multiples of this prime from Map.
defp sieve(map, odd_num, limit) do
new_map =
if Map.has_key?(map, odd_num), do: delete_composite(odd_num, map, limit), else: map
sieve(new_map, odd_num + 2, limit)
end
defp get_primes(map) do
primes =
Map.keys(map)
|> Enum.sort()
[2 | primes]
end
defp delete_composite(first, map, limit) do
composite_nums = Sequence.get(first * first, limit, 2 * first)
Map.drop(map, composite_nums)
end
end
defmodule Helper.Sequence do
@moduledoc """
Helper functions for generating sequence of integers
"""
import Integer, only: [is_odd: 1]
@doc """
Returns the sequence of the odd integers starting from the first element and up to the given limit. First element must be 1 or greater!
## Examples
iex> Helper.Sequence.get_odd(3, 20)
[3, 5, 7, 9, 11, 13, 15, 17, 19]
"""
@spec get_odd(pos_integer, pos_integer) :: [pos_integer]
def get_odd(first, limit)
when is_odd(first) and first > 0 and limit >= first,
do: :lists.seq(first, limit, 2)
@doc """
Returns the sequence of integers starting from the first element and up to the given limit with the specified increment.
## Examples
iex> Helper.Sequence.get(1, 26, 3)
[1, 4, 7, 10, 13, 16, 19, 22, 25]
"""
@spec get(pos_integer, pos_integer, pos_integer) :: [pos_integer]
def get(first, limit, incr) when first + incr > limit, do: [first]
def get(first, limit, incr), do: :lists.seq(first, limit, incr)
end
|
elixir/plug-cowboy/lib/plug_cowboy_app/utils/eratostenes.ex
| 0.854156
| 0.559591
|
eratostenes.ex
|
starcoder
|
defmodule Dayron.Adapter do
@moduledoc ~S"""
Behaviour for creating Dayron Adapters
Adapters are wrappers around client libraries, responsible to send HTTP
requests and parse the response status and body.
## Example
defmodule Dayron.CustomAdapter do
@behaviour Dayron.Adapter
def get(url, headers, opts) do
make_a_get_request(url, headers, opts)
end
end
"""
alias Dayron.Response
alias Dayron.ClientError
@type headers :: [{binary, binary}] | %{binary => binary}
@type body :: struct
@type response :: {:ok, Response.t} | {:error, ClientError.t}
@doc """
Issues a GET request to the given url. The headers param is an enumerable
consisting of two-item tuples that will be sent as request headers.
Returns `{:ok, response}` if the request is successful,
`{:error, reason}` otherwise.
## Options:
* `:timeout` - timeout to establish a connection, in milliseconds.
* `:recv_timeout` - timeout used when receiving a connection.
* `:stream_to` - a PID to stream the response to
* `:proxy` - a proxy to be used for the request;
it can be a regular url or a `{Host, Proxy}` tuple
* `:proxy_auth` - proxy authentication `{User, Password}` tuple
* `:ssl` - SSL options supported by the `ssl` erlang module
* `:follow_redirect` - a boolean that causes redirects to be followed
* `:max_redirect` - the maximum number of redirects to follow
* `:params` - an enumerable consisting of two-item tuples that will be
appended to the url as query string parameters
Timeouts can be an integer or `:infinity`.
Check the adapter implementations for default values.
"""
@callback get(binary, headers, Keyword.t) :: response
@doc """
Issues a POST request to the given url.
Returns `{:ok, response}` if the request is successful,
`{:error, reason}` otherwise.
## Arguments:
* `url` - target url as a binary string or char list
* `body` - request body. Usually a struct deriving `Poison.Encoder`
* `headers` - HTTP headers as an orddict
(e.g., `[{"Accept", "application/json"}]`)
* `options` - Keyword list of options
## Options:
* `:timeout` - timeout to establish a connection, in milliseconds.
* `:recv_timeout` - timeout used when receiving a connection.
* `:stream_to` - a PID to stream the response to
* `:proxy` - a proxy to be used for the request;
it can be a regular url or a `{Host, Proxy}` tuple
* `:proxy_auth` - proxy authentication `{User, Password}` tuple
* `:ssl` - SSL options supported by the `ssl` erlang module
* `:follow_redirect` - a boolean that causes redirects to be followed
* `:max_redirect` - the maximum number of redirects to follow
* `:params` - an enumerable consisting of two-item tuples that will be
appended to the url as query string parameters
Timeouts can be an integer or `:infinity`.
Check the adapter implementations for default values.
"""
@callback post(binary, body, headers, Keyword.t) :: response
@doc """
Issues a PATCH request to the given url.
Returns `{:ok, response}` if the request is successful,
`{:error, reason}` otherwise.
## Arguments:
* `url` - target url as a binary string or char list
* `body` - request body. Usually a struct deriving `Poison.Encoder`
* `headers` - HTTP headers as an orddict
(e.g., `[{"Accept", "application/json"}]`)
* `options` - Keyword list of options
## Options:
* `:timeout` - timeout to establish a connection, in milliseconds.
* `:recv_timeout` - timeout used when receiving a connection.
* `:stream_to` - a PID to stream the response to
* `:proxy` - a proxy to be used for the request;
it can be a regular url or a `{Host, Proxy}` tuple
* `:proxy_auth` - proxy authentication `{User, Password}` tuple
* `:ssl` - SSL options supported by the `ssl` erlang module
* `:follow_redirect` - a boolean that causes redirects to be followed
* `:max_redirect` - the maximum number of redirects to follow
* `:params` - an enumerable consisting of two-item tuples that will be
appended to the url as query string parameters
Timeouts can be an integer or `:infinity`.
Check the adapter implementations for default values.
"""
@callback patch(binary, body, headers, Keyword.t) :: response
@doc """
Issues a DELETE request to the given url.
Returns `{:ok, response}` if the request is successful,
`{:error, reason}` otherwise.
## Arguments:
* `url` - target url as a binary string or char list
* `headers` - HTTP headers as an orddict
(e.g., `[{"Accept", "application/json"}]`)
* `options` - Keyword list of options
## Options:
* `:timeout` - timeout to establish a connection, in milliseconds.
* `:recv_timeout` - timeout used when receiving a connection.
* `:stream_to` - a PID to stream the response to
* `:proxy` - a proxy to be used for the request;
it can be a regular url or a `{Host, Proxy}` tuple
* `:proxy_auth` - proxy authentication `{User, Password}` tuple
* `:ssl` - SSL options supported by the `ssl` erlang module
* `:follow_redirect` - a boolean that causes redirects to be followed
* `:max_redirect` - the maximum number of redirects to follow
* `:params` - an enumerable consisting of two-item tuples that will be
appended to the url as query string parameters
Timeouts can be an integer or `:infinity`.
Check the adapter implementations for default values.
"""
@callback delete(binary, headers, Keyword.t) :: response
end
|
lib/dayron/adapter.ex
| 0.953416
| 0.576125
|
adapter.ex
|
starcoder
|
defmodule Discovergy.Measurements do
@moduledoc """
The Measurements endpoint
"""
use Discovergy
alias Discovergy.Measurement
@doc """
Return the measurements for the specified meter in the specified time interval.
## Options
* `:fields` - list of measurement fields to return in the result (use
`Discovergy.Metadata.get_field_names/2` to get all available fields)
* `:resolution` - time distance between returned readings. Possible values:
`:raw` (default), `:three_minutes`, `:fifteen_minutes`, `:one_hour`, `:one_day`,
`:one_week`, `:one_month`, `:one_year`
* `: disaggregation ` - Include load disaggregation as pseudo-measurement
fields, if available. Only applies if raw resolution is selected
* `:each` - Return data from the virtual meter itself (false) or all its
sub-meters (true). Only applies if meterId refers to a virtual meter
## Examples
iex> Discovergy.Measurements.get_readings(client, meter_id, from, to,
...> resolution: :one_month)
{:ok, [
%Discovergy.Measurement{
time: ~U[2019-07-16 22:00:00.000Z],
values: %{
"energy" => 8069238602135,
"energy1" => 0,
"energy2" => 0,
"energyOut" => 2033461000,
"energyOut1" => 0,
"energyOut2" => 0,
"power" => 512286,
"power1" => 189978,
"power2" => 129660,
"power3" => 192654,
"voltage1" => 230469,
"voltage2" => 231599,
"voltage3" => 230247
}
},
...
]}
"""
@spec get_readings(Client.t(), Meter.id(), DateTime.t(), DateTime.t(), Keyword.t()) ::
{:ok, [Measurement.t()]} | {:error, Error.t()}
def get_readings(%Client{} = client, meter_id, from, to \\ nil, opts \\ []) do
parameters =
[
meterId: meter_id,
from: DateTime.to_unix(from, :millisecond),
to: to && DateTime.to_unix(to, :millisecond),
fields: Enum.join(opts[:fields] || [], ","),
resolution: opts[:resolution],
disaggregation: opts[:disaggregation],
each: opts[:each]
]
|> Enum.reject(fn {_, v} -> v in [nil, ""] end)
with {:ok, measurements} <- get(client, "/readings", query: parameters) do
{:ok, Enum.map(measurements, &Measurement.into/1)}
end
end
@doc """
Return the last measurement for the specified meter.
## Options
* `:fields` - list of measurement fields to return in the result (use
`Discovergy.Metadata.get_field_names/2` to get all available fields)
* `:each` - Return data from the virtual meter itself (false) or all its
sub-meters (true). Only applies if meterId refers to a virtual meter
## Examples
iex> Discovergy.Measurements.get_last_reading(client, meter_id)
{:ok, %Discovergy.Measurement{
time: ~U[2019-07-16 22:00:00.000Z],
values: %{
"energy" => 441576730000,
"energyOut" => 2154853000,
"power" => 205980,
"power1" => 63090,
"power2" => 53780,
"power3" => 89100,
"voltage1" => 234100,
"voltage2" => 234000,
"voltage3" => 233800
}
}}
"""
@spec get_last_reading(Client.t(), Meter.id(), Keyword.t()) ::
{:ok, Measurement.t()} | {:error, Error.t()}
def get_last_reading(%Client{} = client, meter_id, opts \\ []) do
parameters =
[
meterId: meter_id,
fields: Enum.join(opts[:fields] || [], ","),
each: opts[:each]
]
|> Enum.reject(fn {_, v} -> v in [nil, ""] end)
with {:ok, measurement} <- get(client, "/last_reading", query: parameters) do
{:ok, Measurement.into(measurement)}
end
end
@doc """
Return various statistics calculated over all measurements for the specified
meter in the specified time interval.
## Options
* `:fields` - list of measurement fields to return in the result (use
`Discovergy.Metadata.get_field_names/2` to get all available fields)
## Examples
iex> Discovergy.Measurements.get_statistics(client, meter_id, from)
{:ok, %{
"energy" => %{
"count" => 53962,
"maximum" => 441687910000,
"mean" => 420770138841.7405,
"minimum" => 402102161000,
"variance" => 1.430940674699829e20
},
"energyOut" => %{
...
},
"power" => %{
...
},
"power1" => %{
...
},
"power2" => %{
...
},
"power3" => %{
...
},
"voltage1" => %{
...
},
"voltage2" => %{
...
},
"voltage3" => %{
...
}
}}
"""
@spec get_statistics(Client.t(), Meter.id(), DateTime.t(), DateTime.t(), Keyword.t()) ::
{:ok, map()} | {:error, Error.t()}
def get_statistics(%Client{} = client, meter_id, from, to \\ nil, opts \\ []) do
parameters =
[
meterId: meter_id,
from: DateTime.to_unix(from, :millisecond),
to: to && DateTime.to_unix(to, :millisecond),
fields: Enum.join(opts[:fields] || [], ",")
]
|> Enum.reject(fn {_, v} -> v in [nil, ""] end)
get(client, "/statistics", query: parameters)
end
@doc """
Return load profile for the given meter.
## Options
* `:resolution` - reading resolution. Possible values: `:raw`, `:one_day`,
`:one_month`, `:one_year`
## Examples
iex> Discovergy.Measurements.get_load_profile(client, meter_id,
...> ~D{2020-07-01},
...> ~D{2020-07-01},
...> resolution: :one_day)
{:ok, [
%{
"1.29" => 0.816,
"2.29" => 833.028,
"3.29" => 179.376,
"4.29" => 16.128,
"5.29" => 9.06,
"6.29" => 170.328,
"7.29" => 12.744,
"8.29" => 3.384,
"status" => 0,
"time" => 1593640800000
},
%{"1.6.0" => 0.96, "time" => 1593585000000},
%{"2.6.0" => 142.44, "time" => 1593601200000}
]}
"""
@spec get_load_profile(Client.t(), Meter.id(), Date.t(), Date.t(), Keyword.t()) ::
{:ok, [map]} | {:error, Error.t()}
def get_load_profile(%Client{} = client, meter_id, from, to, opts \\ []) do
{from_year, from_month, from_day} = Date.to_erl(from)
{to_year, to_month, to_day} = Date.to_erl(to)
parameters =
[
meterId: meter_id,
fromYear: from_year,
fromMonth: from_month,
fromDay: from_day,
toYear: to_year,
toMonth: to_month,
toDay: to_day,
resolution: opts[:resolution]
]
|> Enum.reject(&match?({_, nil}, &1))
get(client, "/load_profile", query: parameters)
end
@doc """
Return the raw, unmodified load profile file as sent by the specified RLM
meter on the specified date.
## Examples
iex> {:ok, <<2, profile::binary>>} =
...> Discovergy.Measurements.get_raw_load_profile(client, meter_id, ~D{2020-07-01})
{:ok, <<2, 80, 46, 48, 49, 40, 49, 50, 48, 48, 55, 48, 49, 48, 48, 49,
53, 48, 48, 41, 40, 48, 48, 48, 48, 48, 48, 48, 48, 41, 40, 49, 53, 41,
40, 56, 41, 40, 49, 46, 50, 57, 41, 40, 107, 87, 104, 41, ...>>}
iex> String.split(profile)
["P.01(1200701001500)(00000000)(15)(8)(1.29)(kWh)(2.29)(kWh)(3.29)(kvarh)(4.29)...",
"(0.0001)(0.0000)(0.0000)(0.0032)(0.0000)(0.0000)(0.0026)(0.0007)",
"(0.0000)(0.0000)(0.0000)(0.0035)(0.0000)(0.0000)(0.0029)(0.0005)",
"(0.0000)(0.0001)(0.0000)(0.0036)(0.0000)(0.0000)(0.0030)(0.0006)",
"""
@spec get_raw_load_profile(Client.t(), Meter.id(), Date.t()) ::
{:ok, String.t()} | {:error, Error.t()}
def get_raw_load_profile(%Client{} = client, meter_id, date) do
{year, month, day} = Date.to_erl(date)
parameters = [
meterId: meter_id,
year: year,
month: month,
day: day
]
get(client, "/raw_load_profile", query: parameters)
end
end
|
lib/discovergy/measurements.ex
| 0.951369
| 0.687827
|
measurements.ex
|
starcoder
|
defmodule Magic do
@moduledoc """
The `Magic` module provides convenience [sigil](https://elixir-lang.org/getting-started/sigils.html) wrapper for Port cmd
"""
@doc """
The macros catch all error and exception,
transform them to normal output
Note: There are may have some performance issues
## Examples:
import Magic
defp_protected oops! do
raise ArgumentError, "oops!"
end
oops!() == {:error, %ArgumentError{message: "oops!"}}
"""
defmacro defp_protected(head, body) do
quote do
defp unquote(head) do
unquote(body[:do])
catch
reason -> {:error, reason}
rescue
reason -> {:error, reason}
end
end
end
@doc """
EN: Execute a specific cmd, will raise a exception if the cmd don't exists or execute failed
CN: 执行命令,如果命令不存在则会报错,或者命令执行失败
## Examples:
iex> import Magic
iex> ~x(echo 123)
{:ok, ["123"]}
iex> import Magic
iex> ~x(./lib ls)c
{:ok, ["app.ex", "http.ex", "magic.ex", "random.ex", "response.ex", "shotgun.ex"]}
c = CD,
EN: change the current directory into a specific directory
CN: 在指定路径执行命令
"""
def sigil_x(string, mod \\ []) do
execute(string, mod)
end
@doc """
EN: Execute a specific cmd, when return {:error,reason} if the cmd don't exists or execute failed
CN: 执行命令,如果命令不存或者命令执行失败 不会抛错 而是返回 {:error,reason}
## Examples:
iex> import Magic
iex> ~q(echo 123)
{:ok, ["123"]}
"""
def sigil_q(term, modifiers) do
execute(term, modifiers)
rescue
reason -> {:error, reason}
catch
reason -> {:error, reason}
end
@doc ~S"""
Async Run cmd
"""
def sigil_b(string, []) do
sigil_b(string, [?s])
end
def sigil_b(string, [mod]) when mod == ?s do
async_run(string)
end
def sigil_b(string, [mod]) when mod == ?c do
[cd | cmd] = String.split(string, " ", parts: 2)
async_run(hd(cmd), cd: cd)
end
defp async_run(cmd, opts \\ []) do
opts = [:binary, :exit_status, {:parallelism, true}, :stderr_to_stdout] ++ opts
Port.open({:spawn, cmd}, opts)
end
defp execute(string, []) do
execute(string, [?s])
end
defp execute(string, [mod]) when mod == ?s do
[cmd | args] = String.split(string)
run(cmd, args)
end
defp execute(string, [mod]) when mod == ?c do
[cd | other] = String.split(string)
[cmd | args] = other
run(cmd, args, cd: cd)
end
defp execute(_string, _mods) do
raise ArgumentError, "modifier must be one of: c, s"
end
defp run(cmd, args, opts \\ []) do
case System.cmd(cmd, args, [stderr_to_stdout: true] ++ opts) do
{output, 0} ->
{:ok, output |> String.split("\n", trim: true)}
{reason, _} ->
raise reason
end
end
end
|
lib/magic.ex
| 0.75037
| 0.431824
|
magic.ex
|
starcoder
|
defmodule Tournament do
defmodule Tally do
alias __MODULE__, as: Tally
defstruct draws: 0, losses: 0, wins: 0
@draw_points 1
@win_points 3
def init_wins, do: %Tally{wins: 1}
def init_losses, do: %Tally{losses: 1}
def init_draws, do: %Tally{draws: 1}
def wins(%Tally{wins: wins}), do: wins
def losses(%Tally{losses: losses}), do: losses
def draws(%Tally{draws: draws}), do: draws
def increment_wins(%Tally{wins: wins} = tally) do
%Tally{tally | wins: wins + 1}
end
def increment_losses(%Tally{losses: losses} = tally) do
%Tally{tally | losses: losses + 1}
end
def increment_draws(%Tally{draws: draws} = tally) do
%Tally{tally | draws: draws + 1}
end
def matches_played(%Tally{draws: draws, losses: losses, wins: wins}) do
wins + draws + losses
end
def points(%Tally{draws: draws, wins: wins}) do
wins * @win_points + draws * @draw_points
end
end
defmodule Table do
@report_results ~w[matches_played wins draws losses points]a
@team_header "Team"
@team_column_padding 31
@score_column_padding 3
@score_header "| MP | W | D | L | P"
@divider "|"
def output(result) do
body =
result
|> Enum.map(&append_team_results/1)
|> Enum.join("\n")
header() <> body
end
defp header do
String.pad_trailing(@team_header, @team_column_padding) <>
@score_header <>
"\n"
end
defp append_team_results({team_name, tally}) do
String.pad_trailing(team_name, @team_column_padding) <>
team_scores_to_string(tally)
end
defp team_scores_to_string(tally) do
@report_results
|> Enum.map(&score_to_string(tally, &1))
|> Enum.join(" ")
end
defp score_to_string(tally, score) do
score =
Tally
|> apply(score, [tally])
|> to_string()
|> String.pad_leading(@score_column_padding)
@divider <> score
end
end
@input_separator ";"
@doc """
Given `input` lines representing two teams and whether the first of them won,
lost, or reached a draw, separated by semicolons, calculate the statistics
for each team's number of games played, won, drawn, lost, and total points
for the season, and return a nicely-formatted string table.
A win earns a team 3 points, a draw earns 1 point, and a loss earns nothing.
Order the outcome by most total points for the season, and settle ties by
listing the teams in alphabetical order.
"""
@spec tally(input :: list(String.t())) :: String.t()
def tally(input) do
input
|> Enum.reduce(%{}, &tally_result/2)
|> to_sorted_list()
|> Table.output()
end
defp tally_result(result, acc) do
result
|> String.split(@input_separator)
|> tally_outcome(acc)
end
defp tally_outcome([team1, team2, "win"], acc) do
acc
|> record_win(team1)
|> record_loss(team2)
end
defp tally_outcome([team1, team2, "loss"], acc) do
acc
|> record_loss(team1)
|> record_win(team2)
end
defp tally_outcome([team1, team2, "draw"], acc) do
acc
|> record_draw(team1)
|> record_draw(team2)
end
defp tally_outcome(_result, acc), do: acc
defp record_win(acc, team) do
Map.update(acc, team, Tally.init_wins(), &Tally.increment_wins/1)
end
defp record_loss(acc, team) do
Map.update(acc, team, Tally.init_losses(), &Tally.increment_losses/1)
end
defp record_draw(acc, team) do
Map.update(acc, team, Tally.init_draws(), &Tally.increment_draws/1)
end
defp to_sorted_list(results) do
results
|> Map.to_list()
|> Enum.sort_by(&order_points_descending_then_team_name/1)
end
defp order_points_descending_then_team_name({team_name, tally}) do
{-Tally.points(tally), team_name}
end
end
|
elixir/tournament/lib/tournament.ex
| 0.756088
| 0.663328
|
tournament.ex
|
starcoder
|
defmodule Arrow.Array do
defstruct [:reference]
alias Arrow.Conversion
alias Arrow.Native
@behaviour Access
@type t :: %Arrow.Array{}
@spec sum(t()) :: number() | nil
def sum(%Arrow.Array{} = array) do
Native.array_sum(array)
end
@spec min(t()) :: number() | nil
def min(%Arrow.Array{} = array) do
Native.array_min(array)
end
@spec max(t()) :: number() | nil
def max(%Arrow.Array{} = array) do
Native.array_max(array)
end
@spec len(t()) :: integer()
def len(%Arrow.Array{} = array) do
Native.len(array)
end
@spec len(t()) :: list(term())
def to_list(%Arrow.Array{} = array) do
values = Native.to_list(array)
case data_type(array) do
{:timestamp_us, 64} -> values |> Enum.map(&Conversion.unix_to_datetime(&1, :microsecond))
{:date, 32} -> values |> Enum.map(&Conversion.days_to_date/1)
_ -> values
end
end
@spec slice(t(), integer(), integer()) :: t()
def slice(%Arrow.Array{} = array, offset, length) do
Native.array_slice(array, offset, length)
end
@spec offset(t()) :: integer()
def offset(%Arrow.Array{} = array), do: Native.array_offset(array)
@spec is_null(t(), integer()) :: boolean()
def is_null(%Arrow.Array{} = array, idx), do: Native.array_is_null(array, idx)
@spec is_valid(t(), integer()) :: boolean()
def is_valid(%Arrow.Array{} = array, idx), do: Native.array_is_valid(array, idx)
@spec null_count(t()) :: integer()
def null_count(%Arrow.Array{} = array), do: Native.array_null_count(array)
@spec is_empty(t()) :: boolean()
def is_empty(%Arrow.Array{} = array), do: Native.array_is_empty(array)
@spec data_type(t()) :: Arrow.Type.t()
def data_type(%Arrow.Array{} = array), do: Native.array_data_type(array)
@impl true
@spec fetch(t(), Range.t()) :: {:ok, t()}
def fetch(%Arrow.Array{} = array, _.._ = range),
do: {:ok, slice(array, range.first, range.last - range.first)}
@impl true
@spec fetch(t(), integer()) :: {:ok, t()}
def fetch(%Arrow.Array{} = array, key), do: {:ok, slice(array, key, 1)}
@impl true
def get_and_update(_array, _key, _function) do
raise "Access.get_and_update/3 not implemented for Arrow.Array"
end
@impl true
def pop(_array, _key) do
raise "Access.pop/2 not implemented for Arrow.Array"
end
@spec debug(t()) :: String.t()
def debug(%Arrow.Array{} = array), do: Native.array_debug(array)
end
defimpl Inspect, for: Arrow.Array do
def inspect(array, _opts) do
Arrow.Array.debug(array)
end
end
|
lib/arrow/array.ex
| 0.841403
| 0.586316
|
array.ex
|
starcoder
|
defmodule Univrse.Alg.ECDH_AES do
@moduledoc """
ECDH_AES algorithm module.
Implements ECDH-ES+AES_GCM encryption and decryption.
https://tools.ietf.org/html/rfc7518#section-4.6
"""
alias Univrse.Key
alias Univrse.Alg.AES_GCM
@doc """
Decrypts the cyphertext with the key using the specified algorithm.
Accepted options:
* `epk` - Ephemeral public key
* `apu` - Agreement PartyUInfo
* `apv` - Agreement PartyVInfo
* Any accepted AES_GCM options
"""
@spec decrypt(String.t, binary, Key.t, keyword) :: {:ok, binary} | {:error, any}
def decrypt(alg, encrypted, key, opts \\ [])
def decrypt("ECDH-ES+A128GCM", encrypted, %Key{type: "EC", params: %{crv: "secp256k1", d: privkey}}, opts) do
epk = Keyword.get(opts, :epk, "")
secret = privkey
|> Curvy.get_shared_secret(epk)
|> concat_kdf(128, Keyword.put(opts, :alg, "ECDH-ES+A128GCM"))
AES_GCM.decrypt("A128GCM", encrypted, %Key{type: "oct", params: %{k: secret}}, opts)
end
def decrypt("ECDH-ES+A256GCM", encrypted, %Key{type: "EC", params: %{crv: "secp256k1", d: privkey}}, opts) do
epk = Keyword.get(opts, :epk, "")
secret = privkey
|> Curvy.get_shared_secret(epk)
|> concat_kdf(256, Keyword.put(opts, :alg, "ECDH-ES+A256GCM"))
AES_GCM.decrypt("A256GCM", encrypted, %Key{type: "oct", params: %{k: secret}}, opts)
end
def decrypt(_alg, _encrypted, _key, _opts),
do: {:error, :invalid_key}
@doc """
Encrypts the message with the key using the specified algorithm. Returns a
three part tuple containing the encrypted cyphertext and any headers to add to
the Recipient.
Accepted options:
* `apu` - Agreement PartyUInfo
* `apv` - Agreement PartyVInfo
* Any accepted AES_GCM options
"""
@spec encrypt(String.t, binary, Key.t, keyword) :: {:ok, binary, map} | {:error, any}
def encrypt(alg, message, key, opts \\ [])
def encrypt("ECDH-ES+A128GCM", message, %Key{type: "EC", params: %{crv: "secp256k1", x: <<x::big-size(256)>>, y: <<y::big-size(256)>>}}, opts) do
ephemeral_key = Curvy.generate_key()
pubkey = Curvy.Key.from_point(%Curvy.Point{x: x, y: y})
secret = ephemeral_key
|> Curvy.get_shared_secret(pubkey)
|> concat_kdf(128, Keyword.put(opts, :alg, "ECDH-ES+A128GCM"))
with {:ok, encrypted, headers} <- AES_GCM.encrypt("A128GCM", message, %Key{type: "oct", params: %{k: secret}}, opts) do
epk = Curvy.Key.to_pubkey(ephemeral_key)
{:ok, encrypted, Map.put(headers, "epk", epk)}
end
end
def encrypt("ECDH-ES+A256GCM", message, %Key{type: "EC", params: %{crv: "secp256k1", x: <<x::big-size(256)>>, y: <<y::big-size(256)>>}}, opts) do
ephemeral_key = Curvy.generate_key()
pubkey = Curvy.Key.from_point(%Curvy.Point{x: x, y: y})
secret = ephemeral_key
|> Curvy.get_shared_secret(pubkey)
|> concat_kdf(256, Keyword.put(opts, :alg, "ECDH-ES+A256GCM"))
with {:ok, encrypted, headers} <- AES_GCM.encrypt("A256GCM", message, %Key{type: "oct", params: %{k: secret}}, opts) do
epk = Curvy.Key.to_pubkey(ephemeral_key)
{:ok, encrypted, Map.put(headers, "epk", epk)}
end
end
def encrypt(_alg, _message, _key, _opts),
do: {:error, :invalid_key}
# Implements Concat KDF as defined in NIST.800-56A.
defp concat_kdf(secret, keylen, opts) do
alg = Keyword.get(opts, :alg, "")
apu = Keyword.get(opts, :apu, "")
apv = Keyword.get(opts, :apv, "")
<<kdf::bits-size(keylen), _::binary>> = :crypto.hash(:sha256, <<
secret::binary,
keylen::big-size(32),
byte_size(alg)::big-size(32), alg::binary,
byte_size(apu)::big-size(32), apu::binary,
byte_size(apv)::big-size(32), apv::binary,
""
>>)
kdf
end
end
|
lib/univrse/alg/ecdh_aes.ex
| 0.696475
| 0.401072
|
ecdh_aes.ex
|
starcoder
|
defmodule Imagineer.Image.PNG.Palette do
alias Imagineer.Image.PNG
@moduledoc """
Packing and unpacking palettes.
PNGs can optionally store all colors that they
use in a palette, then have their pixels simply reference one of those colors
instead of encoding them all individually.
Sometimes this can reduce file size and/or improve compression. When is this
the case? This library author has no idea.
"""
def unpack(%PNG{color_type: 3, palette: palette, pixels: pixels} = image) do
%PNG{image | pixels: extract_pixels_from_palette(pixels, palette)}
end
# If the image doesn't have a color type of 3, it doesn't use a palette
def unpack(image) do
image
end
def pack(%PNG{color_type: 3} = image) do
build_palette_map(image)
|> replace_pixels
|> convert_palette_map_to_array
end
def pack(image) do
image
end
defp build_palette_map(%PNG{pixels: pixels, background: background} = image) do
palette_map =
Enum.reduce(pixels, %{}, fn pixel_row, palette_map ->
Enum.reduce(pixel_row, palette_map, fn pixel, palette_map ->
# The size will be the index in the pixel array
Map.put_new(palette_map, pixel, map_size(palette_map))
end)
end)
# If there is a background, it could be a color that never appears on the
# image. If so, we have to ensure it is in the palette
palette_map =
if background do
Map.put_new(palette_map, background, map_size(palette_map))
else
palette_map
end
%PNG{image | palette: palette_map}
end
defp replace_pixels(%PNG{pixels: pixels, palette: palette_map} = image) do
%PNG{image | pixels: dereferenced_pixels(pixels, palette_map)}
end
defp dereferenced_pixels(pixels, palette_map) do
Enum.reduce(pixels, [], fn row, new_pixels ->
new_row =
Enum.reduce(row, [], fn pixel, new_row ->
# We wrap the index in a tuple since it is a pixel
[{Map.get(palette_map, pixel)} | new_row]
end)
|> Enum.reverse()
[new_row | new_pixels]
end)
|> Enum.reverse()
end
# At this point, `palette_map` is a map between pixel values and their index.
# Now, to place them into an list.
defp convert_palette_map_to_array(%PNG{palette: palette_map} = image) do
%PNG{image | palette: palette_array_from_map(palette_map)}
end
defp palette_array_from_map(palette_map) do
Enum.reduce(palette_map, :array.new(map_size(palette_map)), fn {pixel, index},
palette_array ->
:array.set(index, pixel, palette_array)
end)
end
defp extract_pixels_from_palette(palette_rows, palette) do
extract_pixels_from_palette(palette_rows, palette, [])
end
# In the base case, we will have a reversed list of lists. Each list refers to
# a row of pixels.
defp extract_pixels_from_palette([], _palette, extracted_palette) do
Enum.reverse(extracted_palette)
end
defp extract_pixels_from_palette([palette_row | palette_rows], palette, extracted_palette) do
row_pixels = extract_pixels_from_palette_row(palette_row, palette, [])
extract_pixels_from_palette(palette_rows, palette, [row_pixels | extracted_palette])
end
# In the base case, we are left with a row of pixels. Reverse them and we're
# finished.
defp extract_pixels_from_palette_row([], _palette, pixels) do
Enum.reverse(pixels)
end
defp extract_pixels_from_palette_row([{palette_index} | palette_indices], palette, pixels) do
pixel = :array.get(palette_index, palette)
extract_pixels_from_palette_row(palette_indices, palette, [pixel | pixels])
end
end
|
lib/imagineer/image/png/palette.ex
| 0.837121
| 0.639835
|
palette.ex
|
starcoder
|
defmodule Timex.Date do
@moduledoc """
Module for working with dates.
Functions that produce time intervals use UNIX epoch (or simly Epoch) as the
default reference date. Epoch is defined as UTC midnight of January 1, 1970.
Time intervals in this module don't account for leap seconds.
Supported tasks:
* get current date in the desired time zone
* convert dates between time zones and time units
* introspect dates to find out weekday, week number, number of days in a given month, etc.
* parse dates from string
* compare dates
* date arithmetic
"""
require Record
alias __MODULE__, as: Date
alias Timex.DateTime, as: DateTime
alias Timex.Time, as: Time
alias Timex.Timezone, as: Timezone
alias Timex.TimezoneInfo, as: TimezoneInfo
# Date types
@type year :: non_neg_integer
@type month :: 1..12
@type day :: 1..31
@type daynum :: 1..366
@type weekday :: 1..7
@type weeknum :: 1..53
@type num_of_days :: 28..31
# Time types
@type hour :: 0..23
@type minute :: 0..59
@type second :: 0..59
@type timestamp :: {megaseconds, seconds, microseconds }
@type megaseconds :: non_neg_integer
@type seconds :: non_neg_integer
@type microseconds :: non_neg_integer
# Complex types
@type time :: { hour, minute, second }
@type date :: { year, month, day }
@type datetime :: { date, time }
@type dtz :: { datetime, TimezoneInfo.t }
@type iso_triplet :: { year, weeknum, weekday }
# Constants
@valid_months 1..12
@million 1_000_000
@weekdays [
{"Monday", 1}, {"Tuesday", 2}, {"Wednesday", 3}, {"Thursday", 4},
{"Friday", 5}, {"Saturday", 6}, {"Sunday", 7}
]
@months [
{"January", 1}, {"February", 2}, {"March", 3},
{"April", 4}, {"May", 5}, {"June", 6},
{"July", 7}, {"August", 8}, {"September", 9},
{"October", 10}, {"November", 11}, {"December", 12}
]
# {is_leap_year, month, shift}
@ordinal_day_map [
{true, 1, 0}, {false, 1, 0},
{true, 2, 31}, {false, 2, 31},
{true, 3, 60}, {false, 3, 59},
{true, 4, 91}, {false, 4, 90},
{true, 5, 121}, {false, 5, 120},
{true, 6, 152}, {false, 6, 151},
{true, 7, 182}, {false, 7, 181},
{true, 8, 213}, {false, 8, 212},
{true, 9, 244}, {false, 9, 243},
{true, 10, 274}, {false, 10, 273},
{true, 11, 305}, {false, 11, 304},
{true, 12, 335}, {false, 12, 334}
]
@doc """
Get a TimezoneInfo object for the specified offset or name.
When offset or name is invalid, exception is raised.
## Examples
iex> date = #{__MODULE__}.from({2015, 4, 12})
iex> tz = #{__MODULE__}.timezone(:utc, date)
iex> tz.full_name
"UTC"
iex> date = #{__MODULE__}.from({2015, 4, 12})
iex> tz = #{__MODULE__}.timezone("America/Chicago", date)
iex> {tz.full_name, tz.abbreviation}
{"America/Chicago", "CDT"}
iex> date = #{__MODULE__}.from({2015, 4, 12})
iex> tz = #{__MODULE__}.timezone(+2, date)
iex> {tz.full_name, tz.abbreviation}
{"Etc/GMT-2", "GMT-2"}
"""
@spec timezone(:local | :utc | number | binary, DateTime.t | nil) :: TimezoneInfo.t
def timezone(:local, {{_,_,_},{_,_,_}}=datetime), do: Timezone.local(construct(datetime))
def timezone(:local, %DateTime{}=date), do: Timezone.local(date)
def timezone(:utc, _), do: %TimezoneInfo{}
defdelegate timezone(name, datetime), to: Timezone, as: :get
@doc """
Get current date.
## Examples
> #{__MODULE__}.now
%Timex.DateTime{year: 2015, month: 6, day: 26, hour: 23, minute: 56, second: 12}
"""
@spec now() :: DateTime.t
def now do
construct(calendar_universal_time(), %TimezoneInfo{})
end
@doc """
Get the current date, in a specific timezone.
## Examples
iex> %Timex.DateTime{timezone: tz} = #{__MODULE__}.now("America/Chicago")
iex> tz.abbreviation in ["CST", "CDT"]
true
iex> tz.full_name === "America/Chicago"
true
"""
@spec now(binary) :: DateTime.t
def now(tz) when is_binary(tz) do
Timezone.convert(now(), tz)
end
@doc """
Get representation of the current date in seconds or days since Epoch.
See convert/2 for converting arbitrary dates to various time units.
## Examples
> #{__MODULE__}.now(:secs)
1363439013
> #{__MODULE__}.now(:days)
15780
"""
@spec now(:secs | :days) :: integer
def now(:secs), do: to_secs(now())
def now(:days), do: to_days(now())
@doc """
Get current local date.
See also `universal/0`.
## Examples
> #{__MODULE__}.local
%Timex.DateTime{year: 2013, month: 3, day: 16, hour: 11, minute: 1, second: 12, timezone: %TimezoneInfo{}}
"""
@spec local() :: DateTime.t
def local do
date = construct(calendar_local_time())
tz = Timezone.local(date)
%{date | :timezone => tz}
end
@doc """
Convert a date to your local timezone.
See also `universal/1`.
## Examples
Date.now |> Date.local
"""
@spec local(date :: DateTime.t) :: DateTime.t
def local(%DateTime{:timezone => tz} = date) do
case Timezone.local(date) do
^tz -> date
new_zone -> Timezone.convert(date, new_zone)
end
end
@doc """
Get current the current datetime in UTC.
See also `local/0`. Delegates to `now/0`, since they are identical in behavior
## Examples
> #{__MODULE__}.universal
%Timex.DateTime{timezone: %Timex.TimezoneInfo{full_name: "UTC"}}
"""
@spec universal() :: DateTime.t
defdelegate universal, to: __MODULE__, as: :now
@doc """
Convert a date to UTC
See also `local/1`.
## Examples
> localdate = Date.local
%Timex.DateTime{hour: 5, timezone: %Timex.TimezoneInfo{full_name: "America/Chicago"}}
> localdate |> Date.universal
%Timex.DateTime{hour: 10, timezone: %Timex.TimezoneInfo{full_name: "UTC"}}
"""
@spec universal(DateTime.t) :: DateTime.t
def universal(date), do: Timezone.convert(date, %TimezoneInfo{})
@doc """
The first day of year zero (calendar module's default reference date).
See also `epoch/0`.
## Examples
iex> date = %Timex.DateTime{year: 0, month: 1, day: 1, timezone: %Timex.TimezoneInfo{}}
iex> #{__MODULE__}.zero === date
true
"""
@spec zero() :: DateTime.t
def zero, do: construct({0, 1, 1}, {0, 0, 0}, %TimezoneInfo{})
@doc """
The date of Epoch, used as default reference date by this module
and also by the Time module.
See also `zero/0`.
## Examples
iex> date = %Timex.DateTime{year: 1970, month: 1, day: 1, timezone: %Timex.TimezoneInfo{}}
iex> #{__MODULE__}.epoch === date
true
"""
@spec epoch() :: DateTime.t
def epoch, do: construct({1970, 1, 1}, {0, 0, 0}, %TimezoneInfo{})
@doc """
Time interval since year 0 of Epoch expressed in the specified units.
## Examples
iex> #{__MODULE__}.epoch(:timestamp)
{0,0,0}
iex> #{__MODULE__}.epoch(:secs)
62167219200
"""
@spec epoch(:timestamp) :: timestamp
@spec epoch(:secs | :days) :: integer
def epoch(:timestamp), do: to_timestamp(epoch())
def epoch(:secs), do: to_secs(epoch(), :zero)
@doc """
Construct a date from Erlang's date or datetime value.
You may specify the date's time zone as the second argument. If the argument
is omitted, UTC time zone is assumed.
When passing {year, month, day} as the first argument, the resulting date
will indicate midnight of that day in the specified timezone (UTC by
default).
NOTE: When using `from` the input value is normalized to prevent invalid
dates from being accidentally introduced. Use `set` with `validate: false`,
or create the %DateTime{} by hand if you do not want normalization.
## Examples
> Date.from(:erlang.universaltime) #=> %DateTime{...}
> Date.from(:erlang.localtime) #=> %Datetime{...}
> Date.from(:erlang.localtime, :local) #=> %DateTime{...}
> Date.from({2014,3,16}, "America/Chicago") #=> %DateTime{...}
"""
@spec from(datetime | date) :: DateTime.t
@spec from(datetime | date, :utc | :local | TimezoneInfo.t | binary) :: DateTime.t
def from({y,m,d} = date) when is_integer(y) and is_integer(m) and is_integer(d), do: from(date, :utc)
def from({{_,_,_},{_,_,_}} = datetime), do: from(datetime, :utc)
def from({{_,_,_},{_,_,_,_}} = datetime), do: from(datetime, :utc)
def from({_,_,_} = date, :utc), do: construct({date, {0,0,0}}, %TimezoneInfo{})
def from({{_,_,_},{_,_,_}} = datetime, :utc), do: construct(datetime, %TimezoneInfo{})
def from({{_,_,_},{_,_,_,_}} = datetime, :utc), do: construct(datetime, %TimezoneInfo{})
def from({_,_,_} = date, :local), do: from({date, {0,0,0}}, timezone(:local, {date, {0,0,0}}))
def from({{_,_,_},{_,_,_}} = datetime, :local), do: from(datetime, timezone(:local, datetime))
def from({{_,_,_}=date,{h,min,sec,_}} = datetime, :local),do: from(datetime, timezone(:local, {date,{h, min, sec}}))
def from({_,_,_} = date, %TimezoneInfo{} = tz), do: from({date, {0,0,0}}, tz)
def from({{_,_,_},{_,_,_}} = datetime, %TimezoneInfo{} = tz), do: construct(datetime, tz)
def from({{_,_,_},{_,_,_,_}} = datetime, %TimezoneInfo{} = tz), do: construct(datetime, tz)
def from({_,_,_} = date, tz) when is_binary(tz), do: from({date, {0, 0, 0}}, tz)
def from({{_,_,_}=d,{h,m,s}}, tz) when is_binary(tz), do: from({d,{h,m,s,0}},tz)
def from({{_,_,_}=date,{h,min,sec,_}} = datetime, tz) when is_binary(tz) do
case timezone(tz, {date, {h,min,sec}}) do
%TimezoneInfo{} = tzinfo ->
construct(datetime, tzinfo)
{:error, _} = error ->
error
end
end
@doc """
Construct a date from a time interval since Epoch or year 0.
UTC time zone is assumed. This assumption can be modified by setting desired
time zone using set/3 after the date is constructed.
## Examples
> Date.from(13, :secs)
> Date.from(13, :days, :zero)
> Date.from(Time.now, :timestamp)
"""
@spec from(timestamp, :timestamp) :: DateTime.t
@spec from(number, :us | :secs | :days) :: DateTime.t
@spec from(timestamp, :timestamp, :epoch | :zero) :: DateTime.t
@spec from(number, :us | :secs | :days, :epoch | :zero) :: DateTime.t
def from(value, type, reference \\ :epoch)
def from({mega, sec, us}, :timestamp, :epoch), do: from((mega * @million + sec) * @million + us, :us)
def from({mega, sec, us}, :timestamp, :zero) do
from((mega * @million + sec) * @million + us, :us, :zero)
end
def from(us, :us, :epoch) do
construct(calendar_gregorian_microseconds_to_datetime(us, epoch(:secs)), %TimezoneInfo{})
end
def from(us, :us, :zero) do
construct(calendar_gregorian_microseconds_to_datetime(us, 0), %TimezoneInfo{})
end
def from(sec, :secs, :epoch) do
construct(:calendar.gregorian_seconds_to_datetime(trunc(sec) + epoch(:secs)), %TimezoneInfo{})
end
def from(sec, :secs, :zero) do
construct(:calendar.gregorian_seconds_to_datetime(trunc(sec)), %TimezoneInfo{})
end
def from(days, :days, :epoch) do
construct(:calendar.gregorian_days_to_date(trunc(days) + to_days(epoch(), :zero)), {0,0,0}, %TimezoneInfo{})
end
def from(days, :days, :zero) do
construct(:calendar.gregorian_days_to_date(trunc(days)), {0,0,0}, %TimezoneInfo{})
end
@doc """
Convert a date to a timestamp value consumable by the Time module.
See also `diff/2` if you want to specify an arbitrary reference date.
## Examples
iex> #{__MODULE__}.epoch |> #{__MODULE__}.to_timestamp
{0,0,0}
"""
@spec to_timestamp(DateTime.t) :: timestamp
@spec to_timestamp(DateTime.t, :epoch | :zero) :: timestamp
def to_timestamp(date, reference \\ :epoch)
def to_timestamp(%DateTime{:ms => ms} = date, :epoch) do
sec = to_secs(date)
{ div(sec, @million), rem(sec, @million), ms * 1000 }
end
def to_timestamp(%DateTime{:ms => ms} = date, :zero) do
sec = to_secs(date, :zero)
{ div(sec, @million), rem(sec, @million), ms * 1000 }
end
@doc """
Convert a date to an integer number of seconds since Epoch or year 0.
With `to_secs/3`, you can also specify an option `utc: false | true`,
which controls whether the DateTime is converted to UTC prior to calculating
the number of seconds from the reference date. By default, UTC conversion is
enabled.
See also `diff/2` if you want to specify an arbitrary reference date.
## Examples
iex> #{__MODULE__}.from({{1999, 1, 2}, {12,13,14}}) |> #{__MODULE__}.to_secs
915279194
"""
@spec to_secs(DateTime.t) :: integer
@spec to_secs(DateTime.t, :epoch | :zero) :: integer
@spec to_secs(DateTime.t, :epoch | :zero, [utc: false | true]) :: integer
def to_secs(date, reference \\ :epoch, options \\ [utc: true])
def to_secs(date, :epoch, utc: true), do: to_secs(date, :zero) - epoch(:secs)
def to_secs(date, :zero, utc: true) do
offset = Timex.Timezone.diff(date, %TimezoneInfo{})
case offset do
0 -> utc_to_secs(date)
_ -> utc_to_secs(date) + ( 60 * offset )
end
end
def to_secs(date, :epoch, utc: false), do: to_secs(date, :zero, utc: false) - epoch(:secs)
def to_secs(date, :zero, utc: false), do: utc_to_secs(date)
defp utc_to_secs(%DateTime{:year => y, :month => m, :day => d, :hour => h, :minute => min, :second => s}) do
:calendar.datetime_to_gregorian_seconds({{y, m, d}, {h, min, s}})
end
@doc """
Convert the date to an integer number of days since Epoch or year 0.
See also `diff/2` if you want to specify an arbitray reference date.
## Examples
iex> #{__MODULE__}.from({1970, 1, 15}) |> #{__MODULE__}.to_days
14
"""
@spec to_days(DateTime.t) :: integer
@spec to_days(DateTime.t, :epoch | :zero) :: integer
def to_days(date, reference \\ :epoch)
def to_days(date, :epoch), do: to_days(date, :zero) - to_days(epoch(), :zero)
def to_days(%DateTime{:year => y, :month => m, :day => d}, :zero) do
:calendar.date_to_gregorian_days({y, m, d})
end
@doc """
Gets the current century
## Examples
iex> #{__MODULE__}.century
21
"""
@spec century() :: non_neg_integer
def century(), do: Date.now |> century
@doc """
Given a date, get the century this date is in.
## Examples
iex> #{__MODULE__}.now |> #{__MODULE__}.century
21
"""
@spec century(DateTime.t) :: non_neg_integer
def century(%DateTime{:year => y}) do
base_century = div(y, 100)
years_past = rem(y, 100)
cond do
base_century == (base_century - years_past) -> base_century
true -> base_century + 1
end
end
@doc """
Return weekday number (as defined by ISO 8601) of the specified date.
## Examples
iex> #{__MODULE__}.epoch |> #{__MODULE__}.weekday
4 # (i.e. Thursday)
"""
@spec weekday(DateTime.t) :: weekday
def weekday(%DateTime{:year => y, :month => m, :day => d}), do: :calendar.day_of_the_week({y, m, d})
@doc """
Returns the ordinal day number of the date.
## Examples
iex> #{__MODULE__}.from({{2015,6,26},{0,0,0}}) |> #{__MODULE__}.day
177
"""
@spec day(DateTime.t) :: daynum
def day(date) do
start_of_year = date |> set([month: 1, day: 1])
1 + diff(start_of_year, date, :days)
end
@doc """
Convert an iso ordinal day number to the day it represents in the
current year. If no date is provided, a new one will be created, with
the time will be set to 0:00:00, in UTC. Otherwise, the date provided will
have its month and day reset to the date represented by the ordinal day.
## Examples
# Creating a DateTime from the given day
iex> expected = #{__MODULE__}.from({{2015, 6, 29}, {0,0,0}})
iex> (#{__MODULE__}.from_iso_day(180) === expected)
true
# Shifting a DateTime to the given day
iex> date = #{__MODULE__}.from({{2015,6,26}, {12,0,0}})
iex> expected = #{__MODULE__}.from({{2015, 6, 29}, {12,0,0}})
iex> (#{__MODULE__}.from_iso_day(180, date) === expected)
true
"""
@spec from_iso_day(non_neg_integer, DateTime.t | nil) :: DateTime.t
def from_iso_day(day, date \\ nil)
def from_iso_day(day, nil) do
{{year,_,_},_} = :calendar.universal_time
datetime = iso_day_to_date_tuple(year, day)
Date.from(datetime)
end
def from_iso_day(day, %DateTime{year: year} = date) do
{year, month, day_of_month} = iso_day_to_date_tuple(year, day)
%{date | :year => year, :month => month, :day => day_of_month}
end
defp iso_day_to_date_tuple(year, day) do
{year, day} = cond do
day < 1 && :calendar.is_leap_year(year - 1) -> {year - 1, day + 366}
day < 1 -> {year - 1, day + 365}
day > 366 && :calendar.is_leap_year(year) -> {year, day - 366}
day > 365 -> {year, day - 365}
true -> {year, day}
end
{_, month, first_of_month} = Enum.take_while(@ordinal_day_map, fn {_, _, oday} -> oday <= day end) |> List.last
{year, month, day - first_of_month}
end
@doc """
Return a pair {year, week number} (as defined by ISO 8601) that date falls
on.
## Examples
iex> #{__MODULE__}.epoch |> #{__MODULE__}.iso_week
{1970,1}
"""
@spec iso_week(DateTime.t) :: {year, weeknum}
def iso_week(%DateTime{:year => y, :month => m, :day => d}) do
:calendar.iso_week_number({y, m, d})
end
def iso_week(date), do: iso_week(from(date, :utc))
@doc """
Get the day of the week corresponding to the given name.
## Examples
iex> #{__MODULE__}.day_to_num("Monday")
1
iex> #{__MODULE__}.day_to_num("monday")
1
iex> #{__MODULE__}.day_to_num("Mon")
1
iex> #{__MODULE__}.day_to_num("mon")
1
iex> #{__MODULE__}.day_to_num(:mon)
1
"""
@spec day_to_num(binary | atom()) :: integer
@weekdays |> Enum.each fn {day_name, day_num} ->
lower = day_name |> String.downcase
abbr_cased = day_name |> String.slice(0..2)
abbr_lower = lower |> String.slice(0..2)
symbol = abbr_lower |> String.to_atom
day_quoted = quote do
def day_to_num(unquote(day_name)), do: unquote(day_num)
def day_to_num(unquote(lower)), do: unquote(day_num)
def day_to_num(unquote(abbr_cased)), do: unquote(day_num)
def day_to_num(unquote(abbr_lower)), do: unquote(day_num)
def day_to_num(unquote(symbol)), do: unquote(day_num)
end
Module.eval_quoted __MODULE__, day_quoted, [], __ENV__
end
# Make an attempt at cleaning up the provided string
def day_to_num(x), do: {:error, "Invalid day name: #{x}"}
@doc """
Get the name of the day corresponding to the provided number
## Examples
iex> #{__MODULE__}.day_name(1)
"Monday"
iex> #{__MODULE__}.day_name(0)
{:error, "Invalid day num: 0"}
"""
@spec day_name(weekday) :: binary
@weekdays |> Enum.each fn {name, day_num} ->
def day_name(unquote(day_num)), do: unquote(name)
end
def day_name(x), do: {:error, "Invalid day num: #{x}"}
@doc """
Get the short name of the day corresponding to the provided number
## Examples
iex> #{__MODULE__}.day_shortname(1)
"Mon"
iex> #{__MODULE__}.day_shortname(0)
{:error, "Invalid day num: 0"}
"""
@spec day_shortname(weekday) :: binary
@weekdays |> Enum.each fn {name, day_num} ->
def day_shortname(unquote(day_num)), do: String.slice(unquote(name), 0..2)
end
def day_shortname(x), do: {:error, "Invalid day num: #{x}"}
@doc """
Get the number of the month corresponding to the given name.
## Examples
iex> #{__MODULE__}.month_to_num("January")
1
iex> #{__MODULE__}.month_to_num("january")
1
iex> #{__MODULE__}.month_to_num("Jan")
1
iex> #{__MODULE__}.month_to_num("jan")
1
iex> #{__MODULE__}.month_to_num(:jan)
1
"""
@spec month_to_num(binary) :: integer
@months |> Enum.each fn {month_name, month_num} ->
lower = month_name |> String.downcase
abbr_cased = month_name |> String.slice(0..2)
abbr_lower = lower |> String.slice(0..2)
symbol = abbr_lower |> String.to_atom
full_chars = month_name |> String.to_char_list
abbr_chars = abbr_cased |> String.to_char_list
month_quoted = quote do
def month_to_num(unquote(month_name)), do: unquote(month_num)
def month_to_num(unquote(lower)), do: unquote(month_num)
def month_to_num(unquote(abbr_cased)), do: unquote(month_num)
def month_to_num(unquote(abbr_lower)), do: unquote(month_num)
def month_to_num(unquote(symbol)), do: unquote(month_num)
def month_to_num(unquote(full_chars)), do: unquote(month_num)
def month_to_num(unquote(abbr_chars)), do: unquote(month_num)
end
Module.eval_quoted __MODULE__, month_quoted, [], __ENV__
end
# Make an attempt at cleaning up the provided string
def month_to_num(x), do: {:error, "Invalid month name: #{x}"}
@doc """
Get the name of the month corresponding to the provided number
## Examples
iex> #{__MODULE__}.month_name(1)
"January"
iex> #{__MODULE__}.month_name(0)
{:error, "Invalid month num: 0"}
"""
@spec month_name(month) :: binary
@months |> Enum.each fn {name, month_num} ->
def month_name(unquote(month_num)), do: unquote(name)
end
def month_name(x), do: {:error, "Invalid month num: #{x}"}
@doc """
Get the short name of the month corresponding to the provided number
## Examples
iex> #{__MODULE__}.month_name(1)
"January"
iex> #{__MODULE__}.month_name(0)
{:error, "Invalid month num: 0"}
"""
@spec month_shortname(month) :: binary
@months |> Enum.each fn {name, month_num} ->
def month_shortname(unquote(month_num)), do: String.slice(unquote(name), 0..2)
end
def month_shortname(x), do: {:error, "Invalid month num: #{x}"}
@doc """
Return a 3-tuple {year, week number, weekday} for the given date.
## Examples
iex> #{__MODULE__}.epoch |> #{__MODULE__}.iso_triplet
{1970, 1, 4}
"""
@spec iso_triplet(DateTime.t) :: {year, weeknum, weekday}
def iso_triplet(%DateTime{} = datetime) do
{ iso_year, iso_week } = iso_week(datetime)
{ iso_year, iso_week, weekday(datetime) }
end
@doc """
Given an ISO triplet `{year, week number, weekday}`, convert it to a
DateTime struct.
## Examples
iex> expected = #{__MODULE__}.from({2014, 1, 28})
iex> #{__MODULE__}.from_iso_triplet({2014, 5, 2}) === expected
true
"""
@spec from_iso_triplet(iso_triplet) :: DateTime.t
def from_iso_triplet({year, week, weekday}) do
{_, _, jan4weekday} = Date.from({year, 1, 4}) |> iso_triplet
offset = jan4weekday + 3
ordinal_date = ((week * 7) + weekday) - offset
datetime = iso_day_to_date_tuple(year, ordinal_date)
Date.from(datetime)
end
@doc """
Return the number of days in the month which the date falls on.
## Examples
iex> #{__MODULE__}.epoch |> #{__MODULE__}.days_in_month
31
"""
@spec days_in_month(DateTime.t | {year, month}) :: num_of_days
def days_in_month(%DateTime{:year => year, :month => month}) when year >= 0 and month in @valid_months do
:calendar.last_day_of_the_month(year, month)
end
def days_in_month(year, month) when year >= 0 and month in @valid_months do
:calendar.last_day_of_the_month(year, month)
end
def days_in_month(year, month) do
valid_year? = year > 0
valid_month? = month in @valid_months
cond do
!valid_year? && valid_month? ->
raise ArgumentError, message: "Invalid year passed to days_in_month/2: #{year}"
valid_year? && !valid_month? ->
raise ArgumentError, message: "Invalid month passed to days_in_month/2: #{month}"
true ->
raise ArgumentError, message: "Invalid year/month pair passed to days_in_month/2: {#{year}, #{month}}"
end
end
@doc """
Return a boolean indicating whether the given year is a leap year. You may
pase a date or a year number.
## Examples
iex> #{__MODULE__}.epoch |> #{__MODULE__}.is_leap?
false
iex> #{__MODULE__}.is_leap?(2012)
true
"""
@spec is_leap?(DateTime.t | year) :: boolean
def is_leap?(year) when is_integer(year), do: :calendar.is_leap_year(year)
def is_leap?(%DateTime{:year => year}), do: is_leap?(year)
@doc """
Return a boolean indicating whether the given date is valid.
## Examples
iex> #{__MODULE__}.from({{1,1,1}, {1,1,1}}) |> #{__MODULE__}.is_valid?
true
iex> %Timex.DateTime{} |> #{__MODULE__}.set([month: 13, validate: false]) |> #{__MODULE__}.is_valid?
false
iex> %Timex.DateTime{} |> #{__MODULE__}.set(hour: -1) |> #{__MODULE__}.is_valid?
false
"""
@spec is_valid?(dtz | DateTime.t) :: boolean
def is_valid?({date, time, tz}) do
:calendar.valid_date(date) and is_valid_time?(time) and is_valid_tz?(tz)
end
def is_valid?(%DateTime{:year => y, :month => m, :day => d, :hour => h, :minute => min, :second => sec, :timezone => tz}) do
:calendar.valid_date({y,m,d}) and is_valid_time?({h,min,sec}) and is_valid_tz?(tz)
end
defp is_valid_time?({hour,min,sec}) do
hour >= 0 and hour < 24 and min >= 0 and min < 60 and sec >= 0 and sec < 60
end
defp is_valid_tz?(%TimezoneInfo{:full_name => tzname}), do: Timezone.exists?(tzname)
defp is_valid_tz?(_), do: false
@doc """
Produce a valid date from a possibly invalid one.
All date's components will be clamped to the minimum or maximum valid value.
## Examples
iex> expected = #{__MODULE__}.from({{1, 12, 31}, {0, 59, 59}}, :local)
iex> date = {{1,12,31},{0,59,59}}
iex> localtz = Timex.Timezone.local(date)
iex> result = {{1,12,31},{0,59,59}, localtz} |> #{__MODULE__}.normalize |> #{__MODULE__}.local
iex> result === expected
true
"""
@spec normalize(datetime | dtz | {date, time, TimezoneInfo.t}) :: DateTime.t
def normalize({{_,_,_}=date, time}), do: normalize({date, time, %TimezoneInfo{}})
def normalize({{_,_,_}=date, time, tz}) do
construct(do_normalize(:date, date), do_normalize(:time, time), tz)
end
@spec do_normalize(atom(), term) :: DateTime.t
defp do_normalize(:date, {year, month, day}) do
year = do_normalize(:year, year)
month = do_normalize(:month, month)
day = do_normalize(:day, {year, month, day})
{year, month, day}
end
defp do_normalize(:year, year) when year < 0, do: 0
defp do_normalize(:year, year), do: year
defp do_normalize(:month, month) do
cond do
month < 1 -> 1
month > 12 -> 12
true -> month
end
end
defp do_normalize(:time, {hour,min,sec}) do
hour = do_normalize(:hour, hour)
min = do_normalize(:minute, min)
sec = do_normalize(:second, sec)
{hour, min, sec}
end
defp do_normalize(:time, {hour,min,sec,ms}) do
{h,m,s} = do_normalize(:time, {hour,min,sec})
msecs = do_normalize(:ms, ms)
{h, m, s, msecs}
end
defp do_normalize(:hour, hour) do
cond do
hour < 0 -> 0
hour > 23 -> 23
true -> hour
end
end
defp do_normalize(:minute, min) do
cond do
min < 0 -> 0
min > 59 -> 59
true -> min
end
end
defp do_normalize(:second, sec) do
cond do
sec < 0 -> 0
sec > 59 -> 59
true -> sec
end
end
defp do_normalize(:ms, ms) do
cond do
ms < 0 -> 0
ms > 999 -> 999
true -> ms
end
end
defp do_normalize(:timezone, tz), do: tz
defp do_normalize(:day, {year, month, day}) do
year = do_normalize(:year, year)
month = do_normalize(:month, month)
ndays = days_in_month(year, month)
cond do
day < 1 -> 1
day > ndays -> ndays
true -> day
end
end
@doc """
Return a new date with the specified fields replaced by new values.
Values are automatically validated and clamped to good values by default. If
you wish to skip validation, perhaps for performance reasons, pass `validate: false`.
Values are applied in order, so if you pass `[datetime: dt, date: d]`, the date value
from `date` will override `datetime`'s date value.
## Examples
iex> now = #{__MODULE__}.epoch
iex> #{__MODULE__}.set(now, date: {1,1,1})
%Timex.DateTime{year: 1, month: 1, day: 1, hour: 0, minute: 0, second: 0, timezone: %Timex.TimezoneInfo{}, calendar: :gregorian}
iex> #{__MODULE__}.set(now, hour: 8)
%Timex.DateTime{year: 1970, month: 1, day: 1, hour: 8, minute: 0, second: 0, timezone: %Timex.TimezoneInfo{}, calendar: :gregorian}
iex> #{__MODULE__}.set(now, [date: {2013,3,26}, hour: 30])
%Timex.DateTime{year: 2013, month: 3, day: 26, hour: 23, minute: 0, second: 0, timezone: %Timex.TimezoneInfo{}, calendar: :gregorian}
iex> #{__MODULE__}.set(now, [
...> datetime: {{2013,3,26}, {12,30,0}},
...> date: {2014,4,12}
...>])
%Timex.DateTime{year: 2014, month: 4, day: 12, hour: 12, minute: 30, second: 0, timezone: %Timex.TimezoneInfo{}, calendar: :gregorian}
iex> #{__MODULE__}.set(now, [minute: 74, validate: false])
%Timex.DateTime{year: 1970, month: 1, day: 1, hour: 0, minute: 74, second: 0, timezone: %Timex.TimezoneInfo{}, calendar: :gregorian}
"""
@spec set(DateTime.t, list({atom(), term})) :: DateTime.t
def set(%DateTime{} = date, options) do
validate? = case options |> List.keyfind(:validate, 0, true) do
{:validate, bool} -> bool
_ -> true
end
Enum.reduce options, date, fn option, result ->
case option do
{:validate, _} -> result
{:datetime, {{y, m, d}, {h, min, sec}}} ->
if validate? do
%{result |
:year => do_normalize(:year, y),
:month => do_normalize(:month, m),
:day => do_normalize(:day, {y,m,d}),
:hour => do_normalize(:hour, h),
:minute => do_normalize(:minute, min),
:second => do_normalize(:second, sec)
}
else
%{result | :year => y, :month => m, :day => d, :hour => h, :minute => min, :second => sec}
end
{:date, {y, m, d}} ->
if validate? do
{yn,mn,dn} = do_normalize(:date, {y,m,d})
%{result | :year => yn, :month => mn, :day => dn}
else
%{result | :year => y, :month => m, :day => d}
end
{:time, {h, m, s}} ->
if validate? do
%{result | :hour => do_normalize(:hour, h), :minute => do_normalize(:minute, m), :second => do_normalize(:second, s)}
else
%{result | :hour => h, :minute => m, :second => s}
end
{:day, d} ->
if validate? do
%{result | :day => do_normalize(:day, {result.year, result.month, d})}
else
%{result | :day => d}
end
{:timezone, tz} ->
tz = case tz do
%TimezoneInfo{} -> tz
_ -> Timezone.get(tz, result)
end
if validate? do
%{result | :timezone => do_normalize(:timezone, tz)}
else
%{result | :timezone => tz}
end
{name, val} when name in [:year, :month, :hour, :minute, :second, :ms] ->
if validate? do
Map.put(result, name, do_normalize(name, val))
else
Map.put(result, name, val)
end
{option_name, _} -> raise "Invalid option passed to Date.set: #{option_name}"
end
end
end
@doc """
Compare two dates returning one of the following values:
* `-1` -- the first date comes before the second one
* `0` -- both arguments represent the same date when coalesced to the same timezone.
* `1` -- the first date comes after the second one
You can optionality specify a granularity of any of
:years :months :weeks :days :hours :mins :secs :timestamp
and the dates will be compared with the cooresponding accuracy.
The default granularity is :secs.
## Examples
iex> date1 = #{__MODULE__}.from({2014, 3, 4})
iex> date2 = #{__MODULE__}.from({2015, 3, 4})
iex> #{__MODULE__}.compare(date1, date2, :years)
-1
iex> #{__MODULE__}.compare(date2, date1, :years)
1
iex> #{__MODULE__}.compare(date1, date1)
0
"""
@spec compare(DateTime.t, DateTime.t | :epoch | :zero | :distant_past | :distant_future) :: -1 | 0 | 1
@spec compare(DateTime.t, DateTime.t, :years | :months | :weeks | :days | :hours | :mins | :secs | :timestamp) :: -1 | 0 | 1
def compare(date, :epoch), do: compare(date, epoch())
def compare(date, :zero), do: compare(date, zero())
def compare(_, :distant_past), do: +1
def compare(_, :distant_future), do: -1
def compare(date, date), do: 0
def compare(a, b), do: compare(a, b, :secs)
def compare( this, other, granularity)
when granularity in [:years, :months, :weeks, :days, :hours, :mins, :secs, :timestamp] do
difference = diff(this, other, granularity)
cond do
difference < 0 -> +1
difference == 0 -> 0
difference > 0 -> -1
end
end
def compare(_, _, _), do: {:error, "Invalid comparison granularity."}
@doc """
Determine if two dates represent the same point in time
## Examples
iex> date1 = #{__MODULE__}.from({2014, 3, 1})
iex> date2 = #{__MODULE__}.from({2014, 3, 1})
iex> #{__MODULE__}.equal?(date1, date2)
true
"""
@spec equal?(DateTime.t, DateTime.t) :: boolean
def equal?(this, other), do: compare(this, other) == 0
@doc """
Calculate time interval between two dates. If the second date comes after the
first one in time, return value will be positive; and negative otherwise.
You must specify one of the following units:
:years :months :weeks :days :hours :mins :secs :timestamp
and the result will be an integer value of those units or a timestamp.
"""
@spec diff(DateTime.t, DateTime.t, :timestamp) :: timestamp
@spec diff(DateTime.t, DateTime.t, :secs | :mins | :hours | :days | :weeks | :months | :years) :: integer
def diff(this, other, :timestamp) do
diff(this, other, :secs) |> Time.from(:secs)
end
def diff(this, other, :secs) do
to_secs(other, :zero) - to_secs(this, :zero)
end
def diff(this, other, :mins) do
(to_secs(other, :zero) - to_secs(this, :zero)) |> div(60)
end
def diff(this, other, :hours) do
(to_secs(other, :zero) - to_secs(this, :zero)) |> div(60) |> div(60)
end
def diff(this, other, :days) do
to_days(other, :zero) - to_days(this, :zero)
end
def diff(this, other, :weeks) do
# TODO: think of a more accurate method
diff(this, other, :days) |> div(7)
end
def diff(this, other, :months) do
%DateTime{:year => y1, :month => m1} = universal(this)
%DateTime{:year => y2, :month => m2} = universal(other)
((y2 - y1) * 12) + (m2 - m1)
end
def diff(this, other, :years) do
%DateTime{:year => y1} = universal(this)
%DateTime{:year => y2} = universal(other)
y2 - y1
end
@doc """
Add time to a date using a timestamp, i.e. {megasecs, secs, microsecs}
Same as shift(date, Time.to_timestamp(5, :mins), :timestamp).
"""
@spec add(DateTime.t, timestamp) :: DateTime.t
def add(%DateTime{} = date, {mega, sec, _}) do
shift(date, [secs: (mega * @million) + sec])
end
@doc """
Subtract time from a date using a timestamp, i.e. {megasecs, secs, microsecs}
Same as shift(date, Time.to_timestamp(5, :mins) |> Time.invert, :timestamp).
"""
@spec subtract(DateTime.t, timestamp) :: DateTime.t
def subtract(%DateTime{} = date, {mega, sec, _}) do
shift(date, [secs: (-mega * @million) - sec])
end
@doc """
A single function for adjusting the date using various units: timestamp,
seconds, minutes, hours, days, weeks, months, years.
When shifting by timestamps, microseconds are ignored.
If the list contains `:month` and at least one other unit, an ArgumentError
is raised (due to ambiguity of such shifts). You can still shift by months
separately.
If `:year` is present, it is applied in the last turn.
The returned date is always valid. If after adding months or years the day
exceeds maximum number of days in the resulting month, that month's last day
is used.
To prevent day skew, fix up the date after shifting. For example, if you want
to land on the last day of the next month, do the following:
shift(date, 1, :month) |> set(:month, 31)
Since `set/3` is capping values that are out of range, you will get the
correct last day for each month.
## Examples
date = from({{2013,3,5}, {23,23,23}})
local(shift(date, secs: 24*3600*365))
#=> {{2014,3,5}, {23,23,23}}
local(shift(date, secs: -24*3600*(365*2 + 1))) # +1 day for leap year 2012
#=> {{2011,3,5}, {23,23,23}}
local(shift(date, [secs: 13, day: -1, week: 2]))
#=> {{2013,3,18}, {23,23,36}}
"""
@spec shift(DateTime.t, list({atom(), term})) :: DateTime.t
def shift(%DateTime{} = date, [{_, 0}]), do: date
def shift(%DateTime{} = date, [timestamp: {0,0,0}]), do: date
def shift(%DateTime{} = date, [timestamp: timestamp]), do: add(date, timestamp)
def shift(%DateTime{timezone: tz} = date, [{type, value}]) when type in [:secs, :mins, :hours] do
secs = to_secs(date, :epoch, utc: false)
secs = secs + case type do
:secs -> value
:mins -> value * 60
:hours -> value * 3600
end
shifted = from(secs, :secs)
%{shifted | :timezone => tz}
end
def shift(%DateTime{:hour => h, :minute => m, :second => s, :timezone => tz} = date, [days: value]) do
days = to_days(date)
days = days + value
shifted = from(days, :days) |> set([time: {h, m, s}])
%{shifted | :timezone => tz}
end
def shift(%DateTime{} = date, [weeks: value]) do
date |> shift([days: value * 7])
end
def shift(%DateTime{} = date, [months: value]) do
%DateTime{
:year => year, :month => month, :day => day,
:hour => h, :minute => m, :second => s,
:timezone => tz
} = date
month = month + value
# Calculate a valid year value
year = cond do
month == 0 -> year - 1
month < 0 -> year + div(month, 12) - 1
month > 12 -> year + div(month - 1, 12)
true -> year
end
validate({year, round_month(month), day}) |> construct({h, m, s}, tz)
end
def shift(%DateTime{} = date, [years: value]) do
%DateTime{
:year => year, :month => month, :day => day,
:hour => h, :minute => m, :second => s,
:timezone => tz
} = date
validate({year + value, month, day}) |> construct({h, m, s}, tz)
end
Record.defrecordp :shift_rec, secs: 0, days: 0, years: 0
# This clause will match lists with at least 2 values
def shift(%DateTime{} = date, spec) when is_list(spec) do
shift_rec(secs: sec, days: day, years: year)
= Enum.reduce spec, shift_rec(), fn
({:timestamp, {mega, tsec, _}}, shift_rec(secs: sec) = rec) ->
shift_rec(rec, [secs: sec + mega * @million + tsec])
({:secs, tsec}, shift_rec(secs: sec) = rec) ->
shift_rec(rec, [secs: sec + tsec])
({:mins, min}, shift_rec(secs: sec) = rec) ->
shift_rec(rec, [secs: sec + min * 60])
({:hours, hrs}, shift_rec(secs: sec) = rec) ->
shift_rec(rec, [secs: sec + hrs * 3600])
({:days, days}, shift_rec(days: day) = rec) ->
shift_rec(rec, [days: day + days])
({:weeks, weeks}, shift_rec(days: day) = rec) ->
shift_rec(rec, [days: day + weeks * 7])
({:years, years}, shift_rec(years: year) = rec) ->
shift_rec(rec, [years: year + years])
({:months, _}, _) ->
raise ArgumentError, message: ":months not supported in bulk shifts"
end
# The order in which we apply secs and days is not important.
# The year shift must always go last though.
date |> shift([secs: sec]) |> shift([days: day]) |> shift([years: year])
end
# Primary constructor for DateTime objects
defp construct({{_, _, _} = date, {_, _, _} = time}), do: construct(date, time, %TimezoneInfo{})
defp construct({{_, _, _} = date, {_, _, _, _} = time}), do: construct(date, time, %TimezoneInfo{})
defp construct({_,_,_} = date, {_,_,_} = time, nil), do: construct(date, time, %TimezoneInfo{})
defp construct({_,_,_} = date, {_,_,_,_} = time, nil), do: construct(date, time, %TimezoneInfo{})
defp construct(date, {h, min, sec}, %TimezoneInfo{} = tz), do: construct(date, {h, min, sec, 0}, tz)
defp construct({_,_,_}=date, {_,_,_,_}=time, %TimezoneInfo{} = tz) do
{y,m,d} = do_normalize(:date, date)
{h,min,sec,ms} = do_normalize(:time, time)
%DateTime{
year: y, month: m, day: d,
hour: h, minute: min, second: sec,
ms: ms,
timezone: tz
}
end
defp construct({_,_,_}=date, {_,_,_,_}=time, {_, name}) do
{y,m,d} = do_normalize(:date, date)
{h,min,sec,ms} = do_normalize(:time, time)
dt = %DateTime{
year: y, month: m, day: d,
hour: h, minute: min, second: sec,
ms: ms
}
%{dt | :timezone => Timezone.get(name, dt)}
end
defp construct(date, {h, min, sec}, tz), do: construct(date, {h, min, sec, 0}, tz)
defp construct({date, time}, tz), do: construct(date, time, tz)
defp validate({year, month, day}) do
# Check if we got past the last day of the month
max_day = days_in_month(year, month)
if day > max_day do
day = max_day
end
{year, month, day}
end
defp mod(a, b), do: rem(rem(a, b) + b, b)
defp round_month(m) do
case mod(m, 12) do
0 -> 12
other -> other
end
end
defp calendar_universal_time() do
{_, _, us} = ts = Timex.Time.now
{d,{h,min,sec}} = :calendar.now_to_universal_time(ts)
{d,{h,min,sec,round(us/1000)}}
end
defp calendar_local_time() do
{_, _, us} = ts = Timex.Time.now
{d,{h,min,sec}} = :calendar.now_to_local_time(ts)
{d,{h,min,sec,round(us/1000)}}
end
defp calendar_gregorian_microseconds_to_datetime(us, addseconds) do
sec = div(us, @million)
u = rem(us, @million)
{d,{h,m,s}} = :calendar.gregorian_seconds_to_datetime(sec + addseconds)
{d,{h,m,s,round(u/1000)}}
end
end
|
lib/date/date.ex
| 0.915646
| 0.577257
|
date.ex
|
starcoder
|
defmodule Membrane.Demo.SimpleElement.Counter do
@moduledoc """
Membrane element counting incoming buffers.
Count of buffers divided by `divisor` (passed via `:input` pad options)
is sent as a `{:counter, number}` notification once every `interval`
(passed via element options).
"""
use Membrane.Filter
def_options interval: [
spec: Membrane.Time.non_neg_t(),
default: 1000,
description: """
Amount of the time in milliseconds, telling how often
the count of buffers should be sent and zeroed.
"""
]
def_input_pad :input,
availability: :always,
mode: :pull,
demand_unit: :bytes,
caps: :any,
options: [
divisor: [
type: :integer,
default: 1,
description: "Number by which the counter will be divided before sending notification"
]
]
def_output_pad :output,
availability: :always,
mode: :pull,
caps: :any
@impl true
def handle_init(%__MODULE{interval: interval}) do
state = %{
interval: interval,
counter: 0
}
{:ok, state}
end
@impl true
def handle_prepared_to_stopped(_ctx, state) do
{{:ok, stop_timer: :timer}, %{state | counter: 0}}
end
@impl true
def handle_prepared_to_playing(_ctx, state) do
{{:ok, start_timer: {:timer, state.interval}}, state}
end
@impl true
def handle_demand(:output, size, :bytes, _context, state) do
{{:ok, demand: {:input, size}}, state}
end
@impl true
def handle_process(:input, %Membrane.Buffer{} = buffer, _context, state) do
state = %{state | counter: state.counter + 1}
{{:ok, buffer: {:output, buffer}}, state}
end
@impl true
def handle_tick(:timer, ctx, state) do
# create the term to send
notification = {
:counter,
div(state.counter, ctx.pads.input.options.divisor)
}
# reset the counter
new_state = %{state | counter: 0}
{{:ok, notify: notification}, new_state}
end
end
|
simple_element/lib/membrane_demo/simple_element/counter.ex
| 0.885049
| 0.548432
|
counter.ex
|
starcoder
|
defmodule Conrex.ContourBuilder do
@moduledoc false
@typep point :: {number, number}
@typep segment :: {point, point}
@typep sequence :: [point] # list of connected points
@typep sequence_list :: [sequence] # collection of disconnected sequences
@spec build_rings([segment]) :: sequence_list
def build_rings(segments) do
segments
|> Enum.reduce([], &add_segment/2)
|> Enum.map(&close_ring/1)
end
# Takes each segment in a list and uses it to extend a sequence of points.
# If one segment point matches the head or tail of a sequence, the sequence is
# extended with the other segment point. If both points match different
# sequences, the sequences are joined. If neither match, the two points are
# added as a new sequence.
defp add_segment({pt_a, pt_b} = segment, sequences) do
case find_segment_matches(segment, sequences) do
# no match, add new sequence
{{nil, false}, {nil, false}} ->
[new_sequence(segment) | sequences]
# A matched, extend with B
{{sequence, should_prepend}, {nil, false}} ->
replace_sequence(sequences, sequence, extend_sequence(sequence, pt_b, should_prepend))
# B matched, extend with A
{{nil, false}, {sequence, should_prepend}} ->
replace_sequence(sequences, sequence, extend_sequence(sequence, pt_a, should_prepend))
# both matched, join sequences
{match_a, match_b} ->
join_sequences(sequences, match_a, match_b)
end
end
defp find_segment_matches(segment, sequences) do
initial_matches = {{nil, false}, {nil, false}}
Enum.reduce(sequences, initial_matches, fn sequence, matches -> match_segment(segment, sequence, matches) end)
end
# neither yet found, try both
defp match_segment({pt_a, pt_b}, sequence, {{nil, false}, {nil, false}}), do: {match_point(pt_a, sequence), match_point(pt_b, sequence)}
# A found, try to match B
defp match_segment({_pt_a, pt_b}, sequence, {match_a, {nil, false}}), do: {match_a, match_point(pt_b, sequence)}
# B found, try to match A
defp match_segment({pt_a, _pt_b}, sequence, {{nil, false}, match_b}), do: {match_point(pt_a, sequence), match_b}
# both found, do nothing
defp match_segment(_, _, matches), do: matches
defp match_point(point, sequence) do
cond do
List.first(sequence) == point -> {sequence, true}
List.last(sequence) == point -> {sequence, false}
true -> {nil, false}
end
end
defp new_sequence({pt_a, pt_b}), do: [pt_a, pt_b]
defp extend_sequence(sequence, point, true), do: [point | sequence]
defp extend_sequence(sequence, point, false), do: sequence ++ [point]
defp replace_sequence(sequences, old_sequence, sequence) do
List.replace_at(sequences, Enum.find_index(sequences, fn seq -> seq == old_sequence end), sequence)
end
defp remove_sequence(sequences, sequence) do
Enum.filter(sequences, fn seq -> seq != sequence end)
end
defp join_sequences(sequences, {seq_a, _}, {seq_b, _}) when seq_a == seq_b, do: sequences
defp join_sequences(sequences, {seq_a, a_at_head}, {seq_b, b_at_head}) do
case {a_at_head, b_at_head} do
# seq A extends seq B
{true, false} ->
sequences
|> replace_sequence(seq_b, seq_b ++ seq_a)
|> remove_sequence(seq_a)
# seq B extends seq A
{false, true} ->
sequences
|> replace_sequence(seq_a, seq_a ++ seq_b)
|> remove_sequence(seq_b)
# head-head — reverse one and join
{true, true} ->
sequences
|> replace_sequence(seq_b, Enum.reverse(seq_b) ++ seq_a)
|> remove_sequence(seq_a)
# tail-tail — reverse one and join
{false, false} ->
sequences
|> replace_sequence(seq_b, seq_b ++ Enum.reverse(seq_a))
|> remove_sequence(seq_a)
end
end
defp close_ring(sequence) do
sequence ++ [hd sequence]
end
end
|
lib/conrex/contour_builder.ex
| 0.827932
| 0.628977
|
contour_builder.ex
|
starcoder
|
defmodule SMPPEX.ESME.Sync do
@moduledoc """
`SMPPEX.ESME.Sync` is an implementation of `SMPPEX.ESME`. It allows to send PDUs
to SMSCs in a syncronous way, i.e. blocking till the response PDU comes.
`SMPPEX.ESME.Sync` is an `SMPPEX.ESME`, so one can use with `SMPPEX.ESME.Sync` all
methods provided by `SMPPEX.ESME` like `SMPPEX.ESME.send_pdu/2`, etc.
"""
use SMPPEX.ESME
alias SMPPEX.ESME
alias SMPPEX.Pdu
require Logger
@default_timeout 5000
# Public interface
@spec start_link(host :: term, port :: non_neg_integer, opts :: Keyword.t) :: GenServer.on_start
@doc """
Starts `SMPPEX.ESME.Sync`.
`opts` is a keyword list of `SMPPEX.ESME` options which is directly passed to
the underlying `SMPPEX.ESME.start_link/4` call.
"""
def start_link(host, port, opts \\ []) do
ESME.start_link(host, port, {__MODULE__, %{from: nil, pdu: nil, additional_pdus: [], state: :free}}, opts)
end
@spec request(esme :: pid, pdu :: Pdu.t, timeout :: non_neg_integer) :: {:ok, resp :: Pdu.t} | :timeout | :stop | {:error, reason :: term}
@doc """
Syncronously sends a PDU and wait for at most `timeout` ms for a response PDU.
The default timeout is #{@default_timeout} ms.
The result value is one of:
* `{:ok, resp}`, where `resp` is a successfully received response PDU;
* `:timeout` if the response PDU was not received within `timeout`;
* `:stop` if the ESME stopped while a response was awaited;
* `{:error, reason}` if the request failed.
"""
def request(esme, pdu, timeout \\ @default_timeout) do
try do
ESME.call(esme, {:request, pdu}, timeout)
catch
:exit, {:timeout, _} -> :timeout
end
end
@type awaited :: {:pdu, pdu :: Pdu.t} | {:resp, resp_pdu :: Pdu.t, original_pdu :: Pdu.t} | {:timeout, pdu :: Pdu.t} | {:error, pdu :: Pdu.t, reason :: any}
@spec wait_for_pdus(esme :: pid, timeout :: non_neg_integer) :: [awaited] | :timeout | :stop
@doc """
Syncronously wait for incoming PDUs. If the ESME already have some received PDUs,
they are returned immediately.
The default timeout is #{@default_timeout} ms.
The result value is `:timeout`, `:stop` or a list of the following items:
* `{:pdu, pdu}`, where `pdu` is an incoming PDU;
* `{:resp, resp_pdu, original_pdu}` where `resp_pdu` is an incoming reply for a
previously sent `original_pdu`;
* `{:timeout, pdu}` for `pdu`'s which have not received a response within ESME timeout;
* `{:error, pdu, reason}` for outcoming PDUs which were not successfully sent due to `reason`;
* `{:ok, pdu}` for outcoming PDUs which were successfully sent.
`:timeout` returned value indicates that the ESME didn't receive any PDUs within `timeout`.
`:stop` value indicates that the ESME stopped while waiting for incoming PDUs.
"""
def wait_for_pdus(esme, timeout \\ @default_timeout) do
try do
ESME.call(esme, :wait_for_pdus, timeout)
catch
:exit, {:timeout, _} -> :timeout
end
end
@spec pdus(esme :: pid, timeout) :: [awaited]
@doc """
A nonblocking version of `wait_for_pdus/2`.
The difference is that it always immediately returns a list of items(maybe empty)
and never returns `:timeout` or `:stop`.
"""
def pdus(esme, timeout \\ @default_timeout) do
ESME.call(esme, :pdus, timeout)
end
@spec stop(esme :: pid) :: :ok
@doc """
Stops ESME asyncronously.
"""
def stop(esme) do
ESME.stop(esme)
end
# ESME callbacks
@doc false
def handle_call({:request, pdu}, from, st) do
ESME.send_pdu(self(), pdu)
new_st = %{st | from: from, pdu: pdu, state: :wait_for_resp}
{:noreply, new_st}
end
def handle_call(:pdus, _from, st) do
do_get_pdus(st)
end
def handle_call(:wait_for_pdus, from, st) do
case st.additional_pdus do
[_ | _] -> do_get_pdus(st)
[] ->
new_st = %{st | from: from, state: :wait_for_pdus}
{:noreply, new_st}
end
end
@doc false
def handle_resp(pdu, original_pdu, st) do
case st.pdu != nil and Pdu.same?(original_pdu, st.pdu) and st.state == :wait_for_resp do
true ->
GenServer.reply(st.from, {:ok, pdu})
do_set_free(st)
false ->
do_push_to_waiting({:resp, pdu, original_pdu}, st)
end
end
@doc false
def handle_resp_timeout(pdu, st) do
case Pdu.same?(pdu, st.pdu) and st.state == :wait_for_resp do
true ->
GenServer.reply(st.from, :timeout)
do_set_free(st)
false ->
do_push_to_waiting({:timeout, pdu}, st)
end
end
@doc false
def handle_pdu(pdu, st) do
do_push_to_waiting({:pdu, pdu}, st)
end
@doc false
def handle_stop(st) do
case st.from do
nil -> :nop
from -> GenServer.reply(from, :stop)
end
end
@doc false
def handle_send_pdu_result(pdu, result, st) do
case result do
:ok -> do_push_to_waiting({:ok, pdu}, st)
{:error, error} ->
case Pdu.same?(pdu, st.pdu) and st.state == :wait_for_resp do
true ->
GenServer.reply(st.from, {:error, error})
do_set_free(st)
false ->
do_push_to_waiting({:error, pdu, error}, st)
end
end
end
defp do_push_to_waiting(pdu_info, st) do
pdus = [pdu_info | st.additional_pdus]
case st.state == :wait_for_pdus do
true ->
GenServer.reply(st.from, pdus)
%{do_set_free(st) | additional_pdus: []}
false ->
%{st | additional_pdus: pdus}
end
end
defp do_set_free(st), do: %{st | from: nil, pdu: nil, state: :free}
defp do_get_pdus(st) do
pdus = Enum.reverse(st.additional_pdus)
new_st = %{st | additional_pdus: []}
{:reply, pdus, new_st}
end
end
|
lib/smppex/esme/sync.ex
| 0.70619
| 0.453383
|
sync.ex
|
starcoder
|
defmodule Q do
@moduledoc """
Documentation for `Q`.
"""
alias __MODULE__
defstruct operations: [], names: MapSet.new()
@type changes :: map
@type run :: (changes -> {:ok | :error, any}) | {module, atom, [any]}
@typep operation :: {:run, run} | {:put, any} | {:inspect, Keyword.t()}
@typep operations :: [{name, operation}]
@typep names :: MapSet.t()
@type name :: any
@type t :: %__MODULE__{operations: operations, names: names}
@doc """
Returns an empty `Q` struct.
## Example
iex> Q.new() |> Q.to_list()
[]
"""
@spec new :: t
def new() do
%Q{}
end
@doc """
Adds a value to the changes so far under the given name.
## Example
Q.new()
|> Q.put(:params, params)
|> Q.run()
"""
@spec put(t, name, any) :: t
def put(que, name, value) do
add_operation(que, name, {:put, value})
end
@doc """
Returns the list of operations stored in `que`.
Always use this function when you need to access the operations you
have defined in `Q`. Inspecting the `Q` struct internals
directly is discouraged.
"""
@spec to_list(t) :: [{name, term}]
def to_list(%Q{operations: operations}) do
operations
|> Enum.reverse()
end
@spec inspect(t, Keyword.t()) :: t
def inspect(multi, opts \\ []) do
Map.update!(multi, :operations, &[{:inspect, {:inspect, opts}} | &1])
end
@doc """
Adds a function to run as part of the queue.
The function should return either `{:ok, value}` or `{:error, value}`,
and receives the repo as the first argument, and the changes so far
as the second argument.
## Example
Q.new()
|> Q.run(:write, fn _, _ -> {:ok, nil} end)
|> Q.exec()
"""
@spec exec(t) :: {:ok, term} | {:error, term}
def exec(%Q{} = que) do
operations = Enum.reverse(que.operations)
with {:ok, operations} <- check_operations_valid(operations) do
apply_operations(operations, que.names)
end
|> case do
{name, value, acc} -> {:error, name, value, acc}
{results, _} -> {:ok, results}
end
end
@doc """
Adds a function to run as part of the queue.
The function should return either `{:ok, value}` or `{:error, value}`,
and receives the repo as the first argument, and the changes so far
as the second argument.
## Example
Q.run(multi, :write, fn %{image: image} ->
with :ok <- File.write(image.name, image.contents) do
{:ok, nil}
end
end)
"""
@spec run(t, name, run) :: t
def run(que, name, run) when is_function(run, 1) do
add_operation(que, name, {:run, run})
end
@doc """
Adds a function to run as part of the queue.
Similar to `run/3`, but allows to pass module name, function and arguments.
The function should return either `{:ok, value}` or `{:error, value}`, and
receives the repo as the first argument, and the changes so far as the
second argument (prepended to those passed in the call to the function).
"""
@spec run(t, name, module, function, args) :: t when function: atom, args: [any]
def run(que, name, mod, fun, args)
when is_atom(mod) and is_atom(fun) and is_list(args) do
add_operation(que, name, {:run, {mod, fun, args}})
end
defp add_operation(%Q{} = que, name, operation) do
%{operations: operations, names: names} = que
if MapSet.member?(names, name) do
raise "#{Kernel.inspect(name)} is already a member of the Q: \n#{Kernel.inspect(que)}"
else
%{que | operations: [{name, operation} | operations], names: MapSet.put(names, name)}
end
end
defp check_operations_valid(operations) do
Enum.find_value(operations, &invalid_operation/1) || {:ok, operations}
end
defp invalid_operation({name, {:changeset, %{valid?: false} = changeset, _}}),
do: {:error, {name, changeset, %{}}}
defp invalid_operation({name, {:error, value}}),
do: {:error, {name, value, %{}}}
defp invalid_operation(_operation),
do: nil
defp apply_operations([], _names), do: {:ok, %{}}
defp apply_operations(operations, names) do
operations
|> Enum.reduce_while({%{}, names}, &apply_operation(&1, &2))
end
defp apply_operation({name, operation}, {acc, names}) do
case apply_operation(operation, acc) do
{:ok, value} ->
{:cont, {Map.put(acc, name, value), names}}
{:error, value} ->
{:halt, {name, value, acc}}
other ->
raise "expected Q callback named `#{Kernel.inspect(name)}` to return either {:ok, value} or {:error, value}, got: #{
Kernel.inspect(other)
}"
end
end
defp apply_operation({_name, {:inspect, opts}}, {acc, names}) do
if opts[:only] do
acc |> Map.take(List.wrap(opts[:only])) |> IO.inspect(opts)
else
IO.inspect(acc, opts)
end
{acc, names}
end
defp apply_operation({:run, run}, acc),
do: apply_run_fun(run, acc)
defp apply_operation({:put, value}, _acc),
do: {:ok, value}
defp apply_run_fun({mod, fun, args}, acc), do: apply(mod, fun, [acc | args])
defp apply_run_fun(fun, acc), do: apply(fun, [acc])
end
|
lib/q.ex
| 0.86337
| 0.486454
|
q.ex
|
starcoder
|
defmodule DealerScrapingRobot do
@moduledoc """
Gets the ***"overly positive reviews"*** of the informed dealer and by default returns the first three results from the five first review pages.
"""
@default_pages 5
@default_count 3
@doc """
Main function that scrapes the overly positive reviews from a dealer.
## Examples
iex> DealerScrapingRobot.main(["--help"])
:ok
iex> DealerScrapingRobot.main()
:ok
iex> DealerScrapingRobot.main(["-d", "Nóis Capota Mais Num Breca"])
:ok
iex> DealerScrapingRobot.main(["-d", "Nóis Capota Mais Num Breca", "-p", "1"])
:ok
iex> DealerScrapingRobot.main(["-d", "Nóis Capota Mais Num Breca", "-c", "1"])
:ok
iex> DealerScrapingRobot.main(["-d", "Nóis Capota Mais Num Breca", "-p", "1", "-c", "1"])
:ok
"""
def main(args \\ []) do
args
|> parse_args
|> run
end
defp parse_args(args) do
{options, _, _} = OptionParser.parse(
args,
switches: [help: :boolean, dealer: :string, pages: :integer, count: :integer],
aliases: [h: :help, d: :dealer, p: :pages, c: :count]
)
options
end
defp run(:help) do
Bunt.puts [
:aqua,
"""
Runs scraping robot to get the overly positive reviews from www.dealerrater.com.
Arguments:
* -d/--dealer: Dealer Name
* -p/--pages: Review pages to analyze (default 5)
* -c/--count: Reviews to return (default 3)
Usage: $ ./dealer_scraping_robot -d <dealer_name> [-p <number_of_pages>] [-c <results_to_show>]
"""
]
end
defp run(dealer: dealer, pages: pages, count: count) do
dealer
|> DealerRater.find_dealer_page()
|> DealerRater.get_overly_positive_reviews(pages, count)
|> Jason.encode!(pretty: true)
|> IO.puts
end
defp run(options) do
case options[:help] do
true ->
run(:help)
_ ->
case {List.keymember?(options, :dealer, 0), List.keymember?(options, :pages, 0), List.keymember?(options, :count, 0)} do
{true, true, true} ->
run(dealer: options[:dealer], pages: options[:pages], count: options[:count])
{true, true, false} ->
run(dealer: options[:dealer], pages: options[:pages], count: @default_count)
{true, false, true} ->
run(dealer: options[:dealer], pages: @default_pages, count: options[:count])
{true, false, false} ->
run(dealer: options[:dealer], pages: @default_pages, count: @default_count)
_ ->
run(:help)
end
end
end
end
|
lib/dealer_scraping_robot.ex
| 0.722233
| 0.448426
|
dealer_scraping_robot.ex
|
starcoder
|
defmodule Day20 do
@moduledoc """
Documentation for Day20.
"""
def part1 do
Day20.read_input("input.txt")
|> Day20.derive_all_edges()
|> Day20.pair_up()
|> Day20.tile_connections()
|> Day20.corners()
|> Day20.multiply()
|> IO.puts()
end
def read_input(filename) do
File.stream!(filename)
|> Stream.map(&String.trim/1)
|> Stream.chunk_by(fn line -> line == "" end)
|> Stream.reject(fn chunk -> chunk == [""] end)
|> Stream.map(fn chunk ->
[tile | data] = chunk
tile = Regex.run(~r/\d+/, tile) |> Enum.at(0)
{tile, data}
end)
|> Map.new()
end
def edge_to_num(edge) do
edge |> String.replace(".", "0") |> String.replace("#", "1") |> String.to_integer(2)
end
def get_left_edge(data) do
data |> Enum.map(fn line -> line |> String.slice(0..0) end) |> Enum.join()
end
def get_right_edge(data) do
data |> Enum.map(fn line -> line |> String.slice(-1..-1) end) |> Enum.join()
end
def derive_edges(tile_data) do
[
List.first(tile_data),
List.last(tile_data),
get_left_edge(tile_data),
get_right_edge(tile_data)
]
|> Enum.map(fn edge -> [edge, String.reverse(edge)] end)
|> List.flatten()
|> Enum.map(&edge_to_num/1)
end
def derive_all_edges(data) do
data |> Enum.map(fn {tile, data} -> {tile, Day20.derive_edges(data)} end) |> Map.new()
end
def pair_up(values) do
Enum.reduce(values, %{}, fn {id, edges}, map ->
Enum.reduce(edges, map, fn edge_num, map ->
Map.put(map, edge_num, [id | Map.get(map, edge_num, [])])
end)
end)
end
def tile_connections(edge_data) do
Enum.reduce(edge_data, %{}, fn
{_id, [tile1, tile2]}, map ->
map
|> Map.put(tile1, [tile2 | Map.get(map, tile1, [])])
|> Map.put(tile2, [tile1 | Map.get(map, tile2, [])])
{_id, [_tile1]}, map ->
map
end)
|> Enum.map(fn {tile, connections} -> {tile, Enum.uniq(connections)} end)
|> Map.new()
end
def with_connections(data, conn_count) do
data
|> Enum.filter(fn {_tile, connections} -> Enum.count(connections) == conn_count end)
|> Enum.map(fn {tile, _conn} -> tile end)
end
def multiply(corners) do
corners |> Enum.reduce(1, fn tile, acc -> String.to_integer(tile) * acc end)
end
def edges_till_corner(data, start_tile, alt \\ false) do
corners = with_connections(data, 2)
edges = with_connections(data, 3)
Stream.resource(
fn -> {start_tile, nil} end,
fn
{nil, nil} ->
{:halt, nil}
{first_tile, nil} ->
next_tile =
if alt do
Enum.reverse(data[first_tile])
else
data[first_tile]
end
|> Enum.find(fn next -> Enum.member?(edges, next) end)
{[first_tile, next_tile], {next_tile, first_tile}}
{last_tile, prev_tile} ->
valid_edges = edges |> Enum.reject(fn x -> x == prev_tile end)
found_edge =
data[last_tile] |> Enum.find(fn next -> Enum.member?(valid_edges, next) end)
if found_edge do
{[found_edge], {found_edge, last_tile}}
else
valid_corners = corners |> Enum.reject(fn x -> x == prev_tile end)
found_corner =
data[last_tile] |> Enum.find(fn next -> Enum.member?(valid_corners, next) end)
{[found_corner], {nil, nil}}
end
end,
fn _acc -> nil end
)
end
def build_row(last_tile, prev_row, next_index, _data) when next_index >= length(prev_row) do
[last_tile]
end
def build_row(last_tile, prev_row, next_index, data) do
next_tile =
MapSet.new(data[last_tile])
|> MapSet.intersection(
MapSet.new(
data[Enum.at(prev_row, next_index)]
|> Enum.reject(fn t -> Enum.member?(prev_row, t) end)
)
)
|> Enum.at(0)
[last_tile | build_row(next_tile, prev_row, next_index + 1, data)]
end
def generate_rows([], last_row, _data) do
[last_row]
end
def generate_rows(start_tiles, prev_row, data) do
[first_tile | remaining] = start_tiles
this_row = build_row(first_tile, prev_row, 1, data)
[prev_row | generate_rows(remaining, this_row, data)]
end
def arrange_tiles(data) do
corner1 = Enum.at(with_connections(data, 2), 0)
first_row = edges_till_corner(data, corner1) |> Enum.to_list()
first_col = edges_till_corner(data, corner1, true) |> Enum.to_list()
first_col |> Enum.slice(1..-1) |> generate_rows(first_row, data)
end
def strip_edges(tile_data) do
tile_data |> Enum.slice(1..-2) |> Enum.map(fn line -> String.slice(line, 1..-2) end)
end
def flip_vertical(tile_data) do
tile_data |> Enum.reverse()
end
def flip_horizontal(tile_data) do
tile_data |> Enum.map(fn row -> row |> String.reverse() end)
end
def pivot(tile_data) do
tile_data
|> Enum.map(&String.graphemes/1)
|> Enum.zip()
|> Enum.map(&Tuple.to_list/1)
|> Enum.map(&Enum.join/1)
end
def reorient_left_edge(this_tile, right_edge) do
case derive_edges(this_tile)
|> Enum.find_index(fn edge -> edge == right_edge end) do
# top, top_r, bottom, bottom_r, left, left_r, right, right_r
0 -> pivot(this_tile)
1 -> flip_vertical(pivot(this_tile))
2 -> flip_horizontal(pivot(this_tile))
3 -> flip_vertical(flip_horizontal(pivot(this_tile)))
4 -> this_tile
5 -> flip_vertical(this_tile)
6 -> flip_horizontal(this_tile)
7 -> flip_horizontal(flip_vertical(this_tile))
end
end
def reorient_right_edge(this_tile, left_edge) do
case derive_edges(this_tile)
|> Enum.find_index(fn edge -> edge == left_edge end) do
# top, top_r, bottom, bottom_r, left, left_r, right, right_r
0 -> flip_horizontal(pivot(this_tile))
1 -> flip_vertical(flip_horizontal(pivot(this_tile)))
2 -> pivot(this_tile)
3 -> flip_vertical(pivot(this_tile))
4 -> flip_horizontal(this_tile)
5 -> flip_horizontal(flip_vertical(this_tile))
6 -> this_tile
7 -> flip_vertical(this_tile)
end
end
def rearrange_tile_row([left_tile, right_tile] = tiles) do
[left_edge_set, right_edge_set] =
tiles |> Enum.map(&derive_edges/1) |> Enum.map(&MapSet.new/1)
matched_edge = MapSet.intersection(left_edge_set, right_edge_set) |> Enum.at(0)
[reorient_right_edge(left_tile, matched_edge), reorient_left_edge(right_tile, matched_edge)]
end
def rearrange_tile_row(tiles) do
[this_tile | rest] = tiles
tail = rearrange_tile_row(rest)
[
reorient_right_edge(this_tile, tail |> Enum.at(0) |> get_left_edge() |> edge_to_num())
| tail
]
end
def flip_row_vertical(row) do
row |> Enum.map(fn tile -> flip_vertical(tile) end)
end
def reorient_rows([top_row, bottom_row]) do
case [
List.first(List.first(top_row)),
List.last(List.first(top_row)),
List.first(List.first(bottom_row)),
List.last(List.first(bottom_row))
] do
[x, _, x, _] -> [flip_row_vertical(top_row), bottom_row]
[x, _, _, x] -> [flip_row_vertical(top_row), flip_row_vertical(bottom_row)]
[_, x, x, _] -> [top_row, bottom_row]
[_, x, _, x] -> [top_row, flip_row_vertical(bottom_row)]
end
end
def reorient_rows([top_row | more_rows]) do
bottom_rows = reorient_rows(more_rows)
top_of_bottom = List.first(List.first(List.first(bottom_rows)))
if List.first(List.first(top_row)) == top_of_bottom do
[flip_row_vertical(top_row) | bottom_rows]
else
[top_row | bottom_rows]
end
end
def rearange_tiles(tiles) do
tiles |> Enum.map(fn row -> rearrange_tile_row(row) end) |> reorient_rows()
end
def render_puzzle(assembled_tiles, tile_data) do
assembled_tiles
|> Enum.map(fn row ->
row
|> Enum.map(fn tile_id -> tile_data[tile_id] end)
|> rearrange_tile_row()
end)
|> reorient_rows()
|> Enum.map(fn row ->
row
|> Enum.map(fn tile -> strip_edges(tile) end)
|> Enum.zip()
|> Enum.map(&Tuple.to_list/1)
|> Enum.map(&Enum.join/1)
end)
|> List.flatten()
end
def monster() do
["..................#.", "#....##....##....###", ".#..#..#..#..#..#..."]
end
def monster_regex(puzzle_size) do
gap_size = puzzle_size - (monster() |> List.first() |> String.length())
monster() |> Enum.map(fn line -> "(#{line})" end) |> Enum.join(".{#{gap_size}}")
end
def find_all(regex, puzzle) do
result = Regex.run(regex, puzzle, return: :index)
if result == nil do
[]
else
index = (result |> List.first() |> elem(0)) + 1
[result |> List.first() | find_all(regex, puzzle |> String.slice(index..-1))]
end
end
end
|
day20/lib/day20.ex
| 0.663015
| 0.510069
|
day20.ex
|
starcoder
|
defmodule Txpost.Payload do
@moduledoc """
Request payload module, implements BRFC `c9a2975b3d19` ([CBOR Tx Payload](cbor-tx-payload.md)).
BRFC `c9a2975b3d19` defines a simple structure for encoding a raw Bitcoin transaction
alongside arbitrary data attributes and meta data in a CBOR encoded binary.
The `:data` attribute is either be a map with a single raw transaction
alongside any other attributes, or alternatively it can be a list of maps
containing multiple sets of raw transactions with additional attributes. This
allows multiple transactions to be encoded in a single payload.
The `:meta` attribute is a map which can contain any other arbitrary infomation
which can be used to help handle the request.
## Examples
Example payload containing a single transaction.
%Txpost.Payload{
data: %{
"rawtx" => <<1, 0 ,0 ,0, ...>>,
"type" => "article"
},
meta: %{
"path" => "/posts"
}
}
Example payload containing a list of transactions.
%Txpost.Payload{
data: [%{
"rawtx" => <<1, 0 ,0 ,0, ...>>,
"type" => "article"
}, %{
"rawtx" => <<1, 0 ,0 ,0, ...>>,
"type" => "article"
}],
meta: %{
"path" => "/posts"
}
}
"""
alias Txpost.Envelope
import Txpost.Utils.Params
import Txpost.Utils.Tags
defstruct data: nil, meta: %{}
@typedoc "CBOR Request Payload"
@type t :: %__MODULE__{
data: map | list(map),
meta: map
}
@doc """
Validates the given parameters and returns a [`Payload`](`t:t/0`) struct or
returns a validation error message.
Parameters can be passed as either a map or keyword list.
## Examples
iex> Txpost.Payload.build(data: %{"rawtx" => <<1, 0 ,0 ,0 ,0 ,0 ,0 ,0 ,0 ,0 >>})
{:ok, %Txpost.Payload{
data: %{"rawtx" => <<1, 0, 0, 0, 0, 0, 0, 0, 0, 0>>},
meta: %{}
}}
Returns an error when given invalid params.
iex> Txpost.Payload.build(data: "not a map")
{:error, "Invalid param: data"}
"""
@spec build(map | keyword) :: {:ok, t} | {:error, String.t}
def build(params) when is_map(params) or is_list(params) do
params
|> normalize_params([:data, :meta])
|> validate_param(:data, &valid_data/1)
|> validate_param(:meta, &is_map/1, allow_blank: true)
|> case do
{:ok, params} ->
{:ok, struct(__MODULE__, params)}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Decodes the given CBOR binary and returns a [`Payload`](`t:t/0`) struct or
returns a validation error message.
## Examples
iex> Txpost.Payload.decode(<<161, 100, 100, 97, 116, 97, 161, 101, 114, 97, 119, 116, 120, 74, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0>>)
{:ok, %Txpost.Payload{
data: %{"rawtx" => <<1, 0, 0, 0, 0, 0, 0, 0, 0, 0>>},
meta: %{}
}}
Returns an error when given invalid binary.
iex> Txpost.Payload.decode(<<0,1,2,3>>)
{:error, "Invalid payload binary"}
"""
@spec decode(binary) :: {:ok, t} | {:error, any}
def decode(data) when is_binary(data) do
case CBOR.decode(data) do
{:ok, data, _} when is_map(data) ->
data
|> detag
|> build
{:ok, _, _} ->
{:error, "Invalid payload binary"}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Encodes the given [`Payload`](`t:t/0`) struct and returns a CBOR binary.
## Examples
iex> Txpost.Payload.encode(%Txpost.Payload{
...> data: %{"rawtx" => <<1, 0, 0, 0, 0, 0, 0, 0, 0, 0>>}
...> })
<<161, 100, 100, 97, 116, 97, 161, 101, 114, 97, 119, 116, 120, 74, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
"""
@spec encode(t) :: binary
def encode(%__MODULE__{} = payload) do
payload
|> to_map
|> tag_rawtx
|> entag
|> CBOR.encode
end
@doc """
Encodes the given [`Payload`](`t:t/0`) struct as a CBOR binary and wraps it
within an [`Envelope`](`t:Envelopet/0`) struct.
## Examples
iex> Txpost.Payload.encode_envelope(%Txpost.Payload{
...> data: %{"rawtx" => <<1, 0, 0, 0, 0, 0, 0, 0, 0, 0>>}
...> })
%Txpost.Envelope{
payload: <<161, 100, 100, 97, 116, 97, 161, 101, 114, 97, 119, 116, 120, 74, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
}
"""
@spec encode_envelope(t) :: {:ok, Envelope.t}
def encode_envelope(%__MODULE__{} = payload),
do: struct(Envelope, payload: encode(payload))
@doc """
Returns the given [`Payload`](`t:t/0`) struct as a map with stringified keys.
The meta attribute is removed if it is an empty map.
## Examples
iex> Txpost.Payload.to_map(%Txpost.Payload{
...> data: %{"rawtx" => <<1, 0, 0, 0, 0, 0, 0, 0, 0, 0>>}
...> })
%{
"data" => %{"rawtx" => <<1, 0, 0, 0, 0, 0, 0, 0, 0, 0>>}
}
"""
@spec to_map(t) :: map
def to_map(%__MODULE__{} = payload) do
payload
|> Map.from_struct
|> Enum.reject(fn {_k, v} -> Enum.empty?(v) end)
|> Enum.map(fn {k, v} -> {Atom.to_string(k), v} end)
|> Enum.into(%{})
end
# Ensures the given value is a valid data map or list of maps.
defp valid_data(%{rawtx: rawtx}) when is_binary(rawtx), do: true
defp valid_data(%{"rawtx" => rawtx}) when is_binary(rawtx), do: true
defp valid_data(items) when is_list(items),
do: Enum.all?(items, &valid_data/1)
defp valid_data(_), do: false
# Wraps known binary elements in CBOR bytes tag
defp tag_rawtx(%{"data" => data} = payload)
when is_map(data) or is_list(data),
do: update_in(payload, ["data"], &tag_rawtx/1)
defp tag_rawtx([item | rest]),
do: [tag_rawtx(item) | tag_rawtx(rest)]
defp tag_rawtx(%{"rawtx" => rawtx} = data) when is_binary(rawtx),
do: Map.put(data, "rawtx", %CBOR.Tag{tag: :bytes, value: rawtx})
defp tag_rawtx(data), do: data
end
|
lib/txpost/payload.ex
| 0.901442
| 0.524577
|
payload.ex
|
starcoder
|
defmodule Day7 do
@moduledoc """
Documentation for Day7.
"""
@doc """
Read the puzzle input.
Returns a list of maps of the form %{"lhs" => lhs, "rhs" => rhs, "weight" => weight}
One such map per line in input file
"""
def read_input(source) do
source
|> File.stream!
|> Enum.map(fn line -> ~r/(?<lhs>.*) \((?<weight>\d+)\)( -> (?<rhs>.*))?/ |> Regex.named_captures(line) end)
end
@doc """
Part 1 - Find the root of the tree
"""
def find_root(input) do
{l, r} = node_biset(input)
l--r
end
defp node_biset(input) do
{l, r} = input |> Enum.reduce({[],[]}, &biset/2)
l_clean = l |> Enum.map(&String.trim/1)
r_clean = r |> Enum.map(&String.trim/1)
{l_clean, r_clean}
end
defp biset(%{"lhs" => lhs, "rhs" => "", "weight" => _}, {lh_nodes, rh_nodes}) do
{lh_nodes ++ [lhs], rh_nodes}
end
defp biset(%{"lhs" => lhs, "rhs" => rhs, "weight" => weight}, {lh_nodes, rh_nodes}) do
{lh_nodes ++ [lhs], rh_nodes ++ String.split(rhs, ~r/, /)}
end
@doc """
Part 2 - Weights of subtrees
"""
def make_tree(input) do
input
|> Enum.reduce(%{}, fn(%{"lhs" => node, "rhs" => children, "weight" => weight}, acc) -> Map.put(acc, node, %{:weight => str_to_i(weight), :children => String.split(children, ~r/, /, trim: true)}) end)
end
@doc """
Calculate a nested list of all nodes with weights of all subtrees
"""
def calc_weights(tree, node) do
%{:weight => weight, :children => children} = Map.fetch!(tree, node)
case children do
[] -> [{node, weight }]
children -> [{node, weight}] ++ Enum.map(children, fn child -> calc_weights(tree, child) end)
end
end
defp str_to_i(str) do
str
|> String.trim
|> String.to_integer
end
@doc """
subtrees for a given node are balanced,
if the sums over all weights in the subtree are equal,
i.e., the list of the sums of all subtrees has only
elements that are all the same. If that's not the case,
Enum.uniq will leave us with a list with more than one element.
"""
def subtrees_balanced?(node, tree) do
%{:children => children} = Map.get(tree, node)
count_uniq_sums =
children
|> Enum.map(fn n -> calc_weights(tree, n) |> sum_node_weights end)
|> Enum.uniq
|> Enum.count
count_uniq_sums == 1
end
@doc """
Collapse a given nested list of subtrees with weighted nodes to a single number:
the sum of all weights over all subtrees.
"""
def sum_node_weights({node, weight}) do
weight
end
def sum_node_weights(list_of_nodes) do
list_of_nodes
|> List.flatten
|> Enum.reduce(0, fn ({_, weight}, acc) -> weight + acc end)
end
def find_broken_node(tree, node) do
%{:children => children} = Map.get(tree, node)
children |> Enum.reject( fn x -> subtrees_balanced?(x, tree) end)
end
def subtree_weight(node, tree) do
calc_weights(tree, node) |> sum_node_weights
end
end
|
2017/day_07/elixir/lib/day7.ex
| 0.828384
| 0.459986
|
day7.ex
|
starcoder
|
defmodule Chex.Board do
@moduledoc false
@type value :: {Chex.name(), Chex.color(), Chex.square()}
@files [:a, :b, :c, :d, :e, :f, :g, :h]
# @ranks 1..8
@starting_position %{
{:a, 1} => {:rook, :white, {:a, 1}},
{:a, 2} => {:pawn, :white, {:a, 2}},
{:a, 7} => {:pawn, :black, {:a, 7}},
{:a, 8} => {:rook, :black, {:a, 8}},
{:b, 1} => {:knight, :white, {:b, 1}},
{:b, 2} => {:pawn, :white, {:b, 2}},
{:b, 7} => {:pawn, :black, {:b, 7}},
{:b, 8} => {:knight, :black, {:b, 8}},
{:c, 1} => {:bishop, :white, {:c, 1}},
{:c, 2} => {:pawn, :white, {:c, 2}},
{:c, 7} => {:pawn, :black, {:c, 7}},
{:c, 8} => {:bishop, :black, {:c, 8}},
{:d, 1} => {:queen, :white, {:d, 1}},
{:d, 2} => {:pawn, :white, {:d, 2}},
{:d, 7} => {:pawn, :black, {:d, 7}},
{:d, 8} => {:queen, :black, {:d, 8}},
{:e, 1} => {:king, :white, {:e, 1}},
{:e, 2} => {:pawn, :white, {:e, 2}},
{:e, 7} => {:pawn, :black, {:e, 7}},
{:e, 8} => {:king, :black, {:e, 8}},
{:f, 1} => {:bishop, :white, {:f, 1}},
{:f, 2} => {:pawn, :white, {:f, 2}},
{:f, 7} => {:pawn, :black, {:f, 7}},
{:f, 8} => {:bishop, :black, {:f, 8}},
{:g, 1} => {:knight, :white, {:g, 1}},
{:g, 2} => {:pawn, :white, {:g, 2}},
{:g, 7} => {:pawn, :black, {:g, 7}},
{:g, 8} => {:knight, :black, {:g, 8}},
{:h, 1} => {:rook, :white, {:h, 1}},
{:h, 2} => {:pawn, :white, {:h, 2}},
{:h, 7} => {:pawn, :black, {:h, 7}},
{:h, 8} => {:rook, :black, {:h, 8}}
}
@spec get_piece_name(Chex.game(), Chex.square()) :: Chex.name() | nil
def get_piece_name(%{board: board}, square) do
case board[square] do
{name, _color, _sq} -> name
_ -> nil
end
end
@spec get_piece_color(Chex.game(), Chex.square()) :: Chex.color() | nil
def get_piece_color(%{board: board}, square) do
case board[square] do
{_name, color, _sq} -> color
_ -> nil
end
end
@spec pickup_piece(Chex.game(), Chex.square()) ::
{:ok, {value(), Chex.game()}} | {:error, :reason}
def pickup_piece(game, square) do
case Map.pop(game.board, square) do
{nil, _board} -> {:error, :no_piece_at_square}
{piece, board} -> {:ok, {piece, %{game | board: board}}}
end
end
@spec place_piece(Chex.game(), Chex.square(), value()) ::
{:ok, {value(), Chex.game()}} | {:error, :reason}
def place_piece(game, square, {_name, color, _start} = piece) do
game.board
|> Map.get_and_update(square, fn capture ->
{capture, piece}
end)
|> case do
{{_name, ^color, _start}, _board} ->
{:error, :occupied_by_own_color}
{capture, board} ->
{:ok, {capture, %{game | board: board}}}
end
end
@spec move(Chex.game(), Chex.square(), Chex.square()) ::
{:ok, {value(), value(), Chex.game()}} | {:error, :reason}
def move(game, from, to) do
with {:ok, {piece, game}} <- pickup_piece(game, from),
{:ok, {capture, game}} <- place_piece(game, to, piece) do
{:ok, {piece, capture, game}}
end
end
def starting_position, do: @starting_position
def files, do: @files
def file_index(file), do: Enum.find_index(@files, fn x -> x == file end)
def file_offset(file, 0), do: file
def file_offset(file, offset) do
offset_index = file_index(file)
index = offset_index + offset
# Prevent wrap-around. Ex: (:a, -2) => :h
if index >= 0, do: Enum.at(@files, index)
end
def occupied_by_color?(%{board: board}, color, square) do
case board[square] do
{_name, ^color, _sq} -> true
_ -> false
end
end
def occupied?(%{board: board}, square) do
!is_nil(board[square])
end
@doc """
Get the square of the first matching piece.
"""
@spec find_piece(Chex.game(), Chex.piece()) :: Chex.square() | nil
def find_piece(%{board: board}, piece) do
Enum.reduce_while(board, nil, &finder(piece, &1, &2))
end
defp finder({name, color}, {square, {name, color, _}}, _acc), do: {:halt, square}
defp finder(_piece, {_square, _value}, acc), do: {:cont, acc}
@doc """
Find locations of the specified piece on the board.
"""
@spec find_pieces(Chex.game(), Chex.piece()) :: [Chex.square()] | []
def find_pieces(%{board: board}, piece) do
Enum.reduce(board, [], fn {square, {n, c, _}}, acc ->
if {n, c} == piece, do: [square | acc], else: acc
end)
end
def all_attacking_squares(game, color) do
game
|> all_occupied_by_color(color)
|> Enum.map(&Chex.Piece.attacking_squares(game, &1))
|> List.flatten()
|> Enum.uniq()
end
def all_possible_squares(game, color) do
game
|> all_occupied_by_color(color)
|> Enum.map(&Chex.Piece.possible_moves(game, &1))
|> List.flatten()
|> Enum.uniq()
end
def all_occupied_by_color(%{board: board}, color) do
board
|> Enum.map(fn {k, _v} -> k end)
|> Enum.filter(&occupied_by_color?(%{board: board}, color, &1))
end
end
|
lib/chex/board.ex
| 0.826537
| 0.531757
|
board.ex
|
starcoder
|
defmodule BlockBox.BlockElements do
@moduledoc """
Defines generator functions for all [block elements](https://api.slack.com/reference/block-kit/block-elements).
"""
alias BlockBox.CompositionObjects, as: CO
alias BlockBox.Utils, as: Utils
@type select_menu_type ::
:static_select
| :external_select
| :users_select
| :conversations_select
| :channels_select
@type multi_select_menu_type ::
:multi_static_select
| :multi_external_select
| :multi_users_select
| :multi_conversations_select
| :multi_channels_select
@doc """
Creates a [button element](https://api.slack.com/reference/block-kit/block-elements#button).
## Options
Options are not included by default.
* `:url` - String
* `:value` - String
* `:style` - String
* `:confirm` - `t:BlockBox.CompositionObjects.confirm_object/0`
"""
@spec button(String.t() | CO.text_object(), String.t(), keyword()) :: map()
def button(text, action_id, opts \\ [])
def button(text, action_id, opts) when is_binary(text) do
CO.text_object(text)
|> button(action_id, opts)
end
def button(text, action_id, opts) do
%{
type: "button",
text: text,
action_id: action_id
}
|> Map.merge(Enum.into(opts, %{}))
end
@doc """
Creates a [datepicker element](https://api.slack.com/reference/block-kit/block-elements#datepicker).
## Options
Options are not included by default.
* `:placeholder` - `t:BlockBox.CompositionObjects.plain_text_object/0` or String
* `:initial_date` - String, "YYYY-MM-DD" format. Put "today" for the current date.
* `:confirm` - `t:BlockBox.CompositionObjects.confirm_object/0`
"""
@spec datepicker(String.t(), keyword()) :: map()
def datepicker(action_id, opts \\ []) do
opts = Utils.convert_text_opts(opts, [:placeholder])
opts =
case Keyword.get(opts, :initial_date) do
"today" -> opts |> Keyword.put(:initial_date, Utils.today())
_other -> opts
end
%{type: "datepicker", action_id: action_id}
|> Map.merge(Enum.into(opts, %{}))
end
@doc """
Creates an [image element](https://api.slack.com/reference/block-kit/block-elements#image).
"""
@spec image(String.t(), String.t()) :: map()
def image(image_url, alt_text) do
%{
type: "image",
image_url: image_url,
alt_text: alt_text
}
end
@doc """
Creates an [overflow menu element](https://api.slack.com/reference/block-kit/block-elements#overflow).
## Options
Options are not included by default.
* `:confirm` - `t:BlockBox.CompositionObjects.confirm_object/0`
"""
@spec overflow_menu(String.t(), list(CO.option_object()), keyword()) :: map()
def overflow_menu(action_id, options, opts \\ []) do
%{
type: "overflow",
action_id: action_id,
options: options
}
|> Map.merge(Enum.into(opts, %{}))
end
@doc """
Creates a [plain text input element](https://api.slack.com/reference/block-kit/block-elements#input).
## Options
Options are not included by default.
* `:placeholder` - `t:BlockBox.CompositionObjects.plain_text_object/0` or String
* `:initial_value` - String
* `:multiline` - boolean
* `:min_length` - non negative integer
* `:max_length` - positive integer
"""
@spec plain_text_input(String.t(), keyword()) :: map()
def plain_text_input(action_id, opts \\ []) do
opts = Utils.convert_text_opts(opts, [:placeholder])
%{type: "plain_text_input", action_id: action_id}
|> Map.merge(Enum.into(opts, %{}))
end
@doc """
Creates a [radio button group element](https://api.slack.com/reference/block-kit/block-elements#radio).
## Options
Options are not included by default.
* `:initial_option` - `t:BlockBox.CompositionObjects.option_object/0` or an integer representing the index of the option you want to select
* `:confirm` - `t:BlockBox.CompositionObjects.confirm_object/0`
"""
@spec radio_buttons(String.t(), list(CO.option_object()), keyword()) :: map()
def radio_buttons(action_id, options, opts \\ []) do
opts = Utils.convert_initial_opts(opts)
%{
type: "radio_buttons",
action_id: action_id,
options: options
}
|> Map.merge(Enum.into(opts, %{}))
end
@doc """
Creates a [checkbox group element](https://api.slack.com/reference/block-kit/block-elements#checkboxes).
## Options
Options are not included by default.
* `:initial_options` - list of `t:BlockBox.CompositionObjects.option_object/0`s, Also included is the ability to pass in a list of integers representing the index of the item you want to select in `:options`.
* `:confirm` - `t:BlockBox.CompositionObjects.confirm_object/0`
"""
@spec checkboxes(String.t(), list(CO.option_object()), keyword()) :: map()
def checkboxes(action_id, options, opts \\ []) do
opts = Utils.convert_initial_opts(opts)
%{
type: "checkboxes",
action_id: action_id,
options: options
}
|> Map.merge(Enum.into(opts, %{}))
end
@doc """
Creates a [select menu element](https://api.slack.com/reference/block-kit/block-elements#select).
*ONLY ONE* of the following k/v pairs must be included in the options:
* `:options` - a list of `t:BlockBox.CompositionObjects.option_object/0`s
* `:option_groups` - a list of `t:BlockBox.CompositionObjects.option_group_object/0`s
## Options
Options are not included by default.
* `:initial_option` - `t:BlockBox.CompositionObjects.option_object/0`, only available with [static_select](https://api.slack.com/reference/block-kit/block-elements#static_select) or [external_select](https://api.slack.com/reference/block-kit/block-elements#external_select) types. Also included is the ability to pass in an integer representing the index of the item you want to select in `:options` or a 2-tuple representing an index of the 2D-list that is your `:option_groups`, only available with [static_select](https://api.slack.com/reference/block-kit/block-elements#static_select).
* `:min_query_length` - positive integer, only available with [external_select](https://api.slack.com/reference/block-kit/block-elements#external_select) type
* `:initial_user` - slack user ID, only available with [users_select](https://api.slack.com/reference/block-kit/block-elements#users_select) type
* `:initial_conversation` - slack conversation ID, only available with [conversations_select](https://api.slack.com/reference/block-kit/block-elements#conversation_select) type
* `:initial_channel` - slack channel ID, only available with [channels_select](https://api.slack.com/reference/block-kit/block-elements#channel_select) type
* `:confirm` - `t:BlockBox.CompositionObjects.confirm_object/0`
* `:response_url_enabled` - boolean, only works with menus in inputs blocks and modals. only available with [conversations_select](https://api.slack.com/reference/block-kit/block-elements#conversation_select) and [channels_select](https://api.slack.com/reference/block-kit/block-elements#channel_select)
* `:filter` - `t:BlockBox.CompositionObjects.filter_object/0`, only available with [conversations_select](https://api.slack.com/reference/block-kit/block-elements#conversation_select) type
"""
@spec select_menu(
String.t() | CO.plain_text_object(),
select_menu_type | multi_select_menu_type,
String.t(),
keyword()
) :: map()
def select_menu(placeholder, type, action_id, opts \\ [])
def select_menu(placeholder, type, action_id, opts) when is_binary(placeholder) do
CO.text_object(placeholder)
|> select_menu(type, action_id, opts)
end
def select_menu(placeholder, type, action_id, opts) do
opts = Utils.convert_initial_opts(opts)
%{type: type, placeholder: placeholder, action_id: action_id}
|> Map.merge(Enum.into(opts, %{}))
end
@doc """
Creates a [multi-select menu element](https://api.slack.com/reference/block-kit/block-elements#multi_select).
*ONLY ONE* of the following k/v pairs must be included in the options:
* `:options` - a list of `t:BlockBox.CompositionObjects.option_object/0`s
* `:option_groups` - a list of `t:BlockBox.CompositionObjects.option_group_object/0`s
## Options
Options are not included by default.
* `:initial_options` - list of `t:BlockBox.CompositionObjects.option_object/0`s, only available with [multi_static_select](https://api.slack.com/reference/block-kit/block-elements#static_multi_select) or [multi_external_select](https://api.slack.com/reference/block-kit/block-elements#external_multi_select) types. Also included is the ability to pass in a list of integers representing the index of the item you want to select in `:options` or a list of 2-tuples representing an index of the 2D-list that is your `:option_groups`, only available with [multi_static_select](https://api.slack.com/reference/block-kit/block-elements#static_multi_select).
* `:min_query_length` - positive integer, only available with [multi_external_select](https://api.slack.com/reference/block-kit/block-elements#external_multi_select) type
* `:initial_users` - list of slack user IDs, only available with [multi_users_select](https://api.slack.com/reference/block-kit/block-elements#users_multi_select) type
* `:initial_conversations` - list of slack conversation IDs, only available with [multi_conversations_select](https://api.slack.com/reference/block-kit/block-elements#conversation_multi_select) type
* `:initial_channels` - list of slack channel IDs, only available with [multi_channels_select](https://api.slack.com/reference/block-kit/block-elements#channel_multi_select) type
* `:confirm` - `t:BlockBox.CompositionObjects.confirm_object/0`
* `:filter` - `t:BlockBox.CompositionObjects.filter_object/0`, only available with [multi_conversations_select](https://api.slack.com/reference/block-kit/block-elements#conversation_multi_select) type
"""
@spec multi_select_menu(
String.t() | CO.plain_text_object(),
multi_select_menu_type,
String.t(),
keyword()
) :: map()
def multi_select_menu(placeholder, type, action_id, opts \\ []) do
select_menu(placeholder, type, action_id, opts)
end
end
|
lib/block_elements.ex
| 0.851922
| 0.445107
|
block_elements.ex
|
starcoder
|
defmodule Plymio.Codi.Pattern.Query do
# @moduledoc false
@moduledoc ~S"""
The *query* patterns build query functions
(e.g. `myfun?(arg)`) using existing base functions (e.g. `myfun(arg)`).
When the base function returns `{:ok, value}`, the query
function returns `true`. Otherwise `false` is returned.
Query functions can be built with, optionally, with a `@doc`, `@since`
and/or `@spec`.
See `Plymio.Codi` for an overview and documentation terms.
Note if the base function is in another module, the base mfa
`{module, function, arity}` is validated i.e. the `function` must
exist in the `module` with the given `arity`.
If `:fun_doc` is not in the pattern opts, a default of `:query` is
used. (It can be disabled by explicitly setting `:fun_doc` to
`nil`)
## Pattern: *query*
Valid keys in the *cpo* are:
| Key | Aliases |
| :--- | :--- |
| `:query_module` | *:module, :fun_mod, :query_module, :function_module* |
| `:query_name` | *:name, :fun_name, :function_name* |
| `:query_args` | *:args, :fun_args, :function_args* |
| `:query_arity` | *:arity, :fun_arity, :function_arity* |
| `:query_doc` | *:doc, :fun_doc, :function_doc* |
| `:typespec_spec_spec_args` |*:spec_args* |
| `:typespec_spec_result` |*::result, :spec_result, :fun_result, :function_result* |
| `:since` | |
## Examples
Here is the common case of a query function for a function in the
same module. Note the automatically generated `:query`-format `@doc`
and explicitly specified `@since`:
iex> {:ok, {forms, _}} = [
...> query: [as: :fun_tre, arity: 3, since: "1.7.9"]
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["@doc \"Query function for `fun_tre/3`\"",
"@since \"1.7.9\"",
"def(fun_tre?(var1, var2, var3)) do",
" case(fun_tre(var1, var2, var3)) do",
" {:ok, _} ->",
" true",
"",
" _ ->",
" false",
" end",
"end"]
Here the other function is in a different module(`ModuleA`):
iex> {:ok, {forms, _}} = [
...> query: [as: :fun_tre, arity: 3, to: ModuleA, since: "1.7.9"]
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["@doc \"Query function for `ModuleA.fun_tre/3`\"",
"@since \"1.7.9\"",
"def(fun_tre?(var1, var2, var3)) do",
" case(ModuleA.fun_tre(var1, var2, var3)) do",
" {:ok, _} ->",
" true",
"",
" _ ->",
" false",
" end",
"end"]
The `:fun_args` can be supplied to improve the definition. Note the
`:fun_doc` is set to `false`.
iex> {:ok, {forms, _}} = [
...> query: [as: :fun_tre, args: [:x, :y, :z], to: ModuleA, fun_doc: false]
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["@doc false",
"def(fun_tre?(x, y, z)) do",
" case(ModuleA.fun_tre(x, y, z)) do",
" {:ok, _} ->",
" true", "",
" _ ->",
" false",
" end",
"end"]
Similary, if the *cpo* contains a `:spec_result` key, a `@spec` will
be generated. The second example has an explicit `:spec_args`
> note the @spec result is always boolean and any given value will be ignored.
iex> {:ok, {forms, _}} = [
...> query: [as: :fun_tre, args: [:x, :y, :z], module: ModuleA, result: true]
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["@doc \"Query function for `ModuleA.fun_tre/3`\"",
"@spec fun_tre?(any, any, any) :: boolean",
"def(fun_tre?(x, y, z)) do",
" case(ModuleA.fun_tre(x, y, z)) do",
" {:ok, _} ->",
" true",
"",
" _ ->",
" false",
" end",
"end"]
iex> {:ok, {forms, _}} = [
...> query: [as: :fun_tre, args: [:x, :y, :z], module: ModuleA,
...> spec_args: [:integer, :binary, :atom], result: :tuple]
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["@doc \"Query function for `ModuleA.fun_tre/3`\"",
"@spec fun_tre?(integer, binary, atom) :: boolean",
"def(fun_tre?(x, y, z)) do",
" case(ModuleA.fun_tre(x, y, z)) do",
" {:ok, _} ->",
" true", "",
" _ ->",
" false",
" end",
"end"]
## Pattern: *query_module*
The *query_module* pattern builds a query function for one or more
functions in a module. As with `:query` a `@doc` or `@since` can be generated at
the same time.
Valid keys in the *cpo* are:
| Key | Aliases |
| :--- | :--- |
| `:query_module` | *:to, :module, :fun_mod, :fun_module, :function_module* |
| `:query_doc` | *:doc, :fun_doc, :function_doc* |
| `:take` | |
| `:drop` | |
| `:filter` | |
| `:reject` | |
| `:since` | |
## Examples
Here a query function will be generated for all the functions in the module.
iex> {:ok, {forms, _}} = [
...> query_module: [module: ModuleA],
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["@doc \"Query function for `ModuleA.fun_due/2`\"",
"def(fun_due?(var1, var2)) do",
" case(ModuleA.fun_due(var1, var2)) do",
" {:ok, _} ->",
" true",
"",
" _ ->",
" false",
" end",
"end",
"@doc \"Query function for `ModuleA.fun_one/1`\"",
"def(fun_one?(var1)) do", " case(ModuleA.fun_one(var1)) do",
" {:ok, _} ->",
" true",
"", " _ ->", " false",
" end", "end", "@doc \"Query function for `ModuleA.fun_tre/3`\"",
"def(fun_tre?(var1, var2, var3)) do",
" case(ModuleA.fun_tre(var1, var2, var3)) do",
" {:ok, _} ->",
" true",
"",
" _ ->",
" false",
" end",
"end"]
In the same way as `:bang_module` the functions can be selected
using e.g. `:take`. Here `:since` is also given.
iex> {:ok, {forms, _}} = [
...> query_module: [module: ModuleA, take: :fun_due, since: "1.7.9"],
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["@doc \"Query function for `ModuleA.fun_due/2`\"",
"@since \"1.7.9\"",
"def(fun_due?(var1, var2)) do",
" case(ModuleA.fun_due(var1, var2)) do",
" {:ok, _} ->",
" true",
"",
" _ ->",
" false",
" end",
"end"]
"""
alias Plymio.Codi, as: CODI
alias Plymio.Codi.Utility.Depend, as: DEPEND
use Plymio.Fontais.Attribute
use Plymio.Codi.Attribute
import Plymio.Fontais.Guard,
only: [
is_value_unset_or_nil: 1
]
import Plymio.Fontais.Option,
only: [
opts_canonical_keys: 2,
opts_take_canonical_keys: 2,
opts_create_aliases_dict: 1
]
import Plymio.Codi.Utility,
only: [
cpo_resolve_query_module: 1,
cpo_resolve_query_name: 1,
cpo_resolve_query_args: 1,
cpo_resolve_query_doc: 1,
cpo_resolve_fun_name: 1
]
import Plymio.Codi.Utility.Module,
only: [
reduce_module_fva: 2,
state_validate_mfa: 2,
state_resolve_module_fva: 2
]
import Plymio.Funcio.Enum.Map.Collate,
only: [
map_collate0_enum: 2
]
import Plymio.Codi.CPO
@pattern_query_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
@plymio_codi_key_alias_query_module,
@plymio_codi_key_alias_query_name,
@plymio_codi_key_alias_query_doc,
@plymio_codi_key_alias_query_args,
@plymio_codi_key_alias_query_arity,
@plymio_codi_key_alias_fun_name,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_forms_edit
]
@pattern_query_dict_alias @pattern_query_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_query_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_query_dict_alias)
end
@pattern_query_module_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
@plymio_codi_key_alias_query_module,
@plymio_codi_key_alias_query_name,
@plymio_codi_key_alias_query_doc,
{@plymio_codi_key_take, nil},
{@plymio_codi_key_drop, nil},
{@plymio_codi_key_filter, nil},
{@plymio_codi_key_reject, nil},
@plymio_codi_key_alias_forms_edit
]
@pattern_query_module_dict_alias @pattern_query_module_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_query_module_normalise(opts, dict \\ nil) do
opts |> opts_canonical_keys(dict || @pattern_query_module_dict_alias)
end
@doc false
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_query do
with {:ok, cpo} <- cpo |> cpo_pattern_query_normalise,
{:ok, query_module} <- cpo |> cpo_resolve_query_module,
{:ok, query_name} <- cpo |> cpo_resolve_query_name,
{:ok, query_args} <- cpo |> cpo_resolve_query_args,
{:ok, cpo} <- cpo |> cpo_maybe_put_query_doc(@plymio_codi_doc_type_query),
{:ok, query_doc} <- cpo |> cpo_resolve_query_doc,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_name("#{query_name}?" |> String.to_atom()),
{:ok, real_name} <- cpo |> cpo_resolve_fun_name,
{:ok, {_, %CODI{} = state}} <-
state |> state_validate_mfa({query_module, query_name, length(query_args)}),
# base dependent cpo
{:ok, depend_cpo} <- cpo |> cpo_put_fun_module(query_module),
{:ok, depend_cpo} <- depend_cpo |> cpo_put_fun_doc(query_doc),
{:ok, depend_cpo} <- depend_cpo |> cpo_put_fun_arity(length(query_args)),
true <- true do
pattern_form =
query_module
|> case do
# local function
x when is_value_unset_or_nil(x) ->
quote do
def unquote(real_name)(unquote_splicing(query_args)) do
case unquote(query_name)(unquote_splicing(query_args)) do
{:ok, _} -> true
_ -> false
end
end
end
# explicit module
_ ->
quote do
def unquote(real_name)(unquote_splicing(query_args)) do
case unquote(query_module).unquote(query_name)(unquote_splicing(query_args)) do
{:ok, _} -> true
_ -> false
end
end
end
end
depend_args = [
{&cpo_has_fun_doc?/1,
[
&DEPEND.cpo_transform_doc_depend/1,
# the doc fun name is the query fun
{:cpo_put_fun_name, query_name}
]},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
# always boolean
{:cpo_put_typespec_spec_result, :boolean}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <-
state |> DEPEND.create_depend_cpos(depend_cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, opts)
when pattern == @plymio_codi_pattern_query_module do
with {:ok, opts} <- opts |> cpo_pattern_query_module_normalise,
{:ok, query_module} <- opts |> cpo_resolve_query_module,
{:ok, {query_fva, %CODI{} = state}} <- state |> state_resolve_module_fva(query_module),
{:ok, query_fva} <- query_fva |> reduce_module_fva(opts),
{:ok, query_cpo} <- opts |> cpo_pattern_query_normalise,
{:ok, query_cpo} <- query_cpo |> cpo_mark_status_active,
{:ok, query_cpo} <- query_cpo |> cpo_put_pattern(@plymio_codi_pattern_query) do
query_fva
|> map_collate0_enum(fn {name, arity} ->
with {:ok, cpo} <- query_cpo |> cpo_put_query_name(name),
{:ok, _cpo} = result <- cpo |> cpo_put_query_arity(arity) do
result
else
{:error, %{__exception__: true}} = result -> result
end
end)
|> case do
{:error, %{__struct__: _}} = result -> result
{:ok, cpos} -> {:ok, {cpos, state}}
end
else
{:error, %{__exception__: true}} = result -> result
end
end
end
|
lib/codi/pattern/query/query.ex
| 0.903377
| 0.721069
|
query.ex
|
starcoder
|
defmodule Raxx.Router do
@moduledoc """
Routing for Raxx applications.
Routes are defined as a match and an action module.
Standard Elixir pattern matching is used to apply the match to an incoming request.
An action module another implementation of `Raxx.Server`
Sections group routes that all have the same middleware.
Middleware in a section maybe defined as a list,
this is useful when all configuration is known at compile-time.
Alternativly an arity 1 function can be used.
This can be used when middleware require runtime configuration.
The argument passed to this function is server initial state.
## Examples
defmodule MyRouter do
use Raxx.Router
section [{Raxx.Logger, level: :debug}], [
{%{method: :GET, path: ["ping"]}, Ping},
]
section &web/1, [
{%{method: :GET, path: []}, HomePage},
{%{method: :GET, path: ["users"]}, UsersPage},
{%{method: :GET, path: ["users", _id]}, UserPage},
{%{method: :POST, path: ["users"]}, CreateUser},
{_, NotFoundPage}
]
def web(state) do
[
{Raxx.Logger, level: state.log_level},
{MyMiddleware, foo: state.foo}
]
end
end
*The original API is kept for backwards compatibility.
See [previous docs](https://hexdocs.pm/raxx/0.17.2/Raxx.Router.html) for details.*
*If the sections DSL does not work for an application it is possible to instead just implement a `route/2` function.*
"""
@callback route(Raxx.Request.t(), term) :: Raxx.Stack.t()
@doc false
defmacro __using__(actions) when is_list(actions) do
# DEBT Remove this for 1.0 release
if actions != [] do
:elixir_errors.warn(__ENV__.line, __ENV__.file, """
Routes should not be passed as arguments to `use Raxx.Router`.
Instead make use of the `section/2` macro.
See documentation in `Raxx.Router` for details
""")
end
routes =
for {match, controller} <- actions do
{resolved_module, []} = Module.eval_quoted(__CALLER__, controller)
Raxx.Server.verify_implementation!(resolved_module)
# NOTE use resolved module to include any aliasing
controller_string = inspect(resolved_module)
match_string = Macro.to_string(match)
quote do
def route(request = unquote(match), state) do
Logger.metadata("raxx.action": unquote(controller_string))
Logger.metadata("raxx.route": unquote(match_string))
middlewares = []
Raxx.Stack.new(middlewares, {unquote(controller), state})
end
end
end
quote location: :keep do
if Enum.member?(Module.get_attribute(__MODULE__, :behaviour), Raxx.Server) do
%{file: file, line: line} = __ENV__
:elixir_errors.warn(__ENV__.line, __ENV__.file, """
The module `#{inspect(__MODULE__)}` already included the behaviour `Raxx.Server`.
This is probably use to `use Raxx.Server`,
this is no longer necessary when implementing a router.
""")
else
@behaviour Raxx.Server
end
import unquote(__MODULE__)
@behaviour unquote(__MODULE__)
unquote(routes)
@impl Raxx.Server
def handle_head(request, state) do
stack = route(request, state)
Raxx.Server.handle_head(stack, request)
end
@impl Raxx.Server
def handle_data(data, stack) do
Raxx.Server.handle_data(stack, data)
end
@impl Raxx.Server
def handle_tail(trailers, stack) do
Raxx.Server.handle_tail(stack, trailers)
end
@impl Raxx.Server
def handle_info(message, stack) do
Raxx.Server.handle_info(stack, message)
end
end
end
@doc """
Define a set of routes with a common set of middlewares applied to them.
The first argument may be a list of middlewares;
or a function that accepts one argument, the initial state, and returns a list of middleware.
If all settings for a middleware can be decided at compile-time then a list is preferable.
"""
defmacro section(middlewares, routes) do
state = quote do: state
resolved_middlewares =
quote do
case unquote(middlewares) do
middlewares when is_list(middlewares) ->
middlewares
stack_function when is_function(stack_function, 1) ->
stack_function.(unquote(state))
end
end
for {match, action} <- routes do
quote do
def route(unquote(match), unquote(state)) do
# Should this verify_implementation for the action/middlewares
# Perhaps Stack.new should do it
Raxx.Stack.new(unquote(resolved_middlewares), {unquote(action), unquote(state)})
end
end
end
end
end
|
lib/raxx/router.ex
| 0.844665
| 0.499268
|
router.ex
|
starcoder
|
defmodule Nebulex.Adapters.Multilevel do
@moduledoc ~S"""
Adapter module for Multi-level Cache.
This is just a simple layer on top of local or distributed cache
implementations that enables to have a cache hierarchy by levels.
Multi-level caches generally operate by checking the fastest,
level 1 (L1) cache first; if it hits, the adapter proceeds at
high speed. If that first cache misses, the next fastest cache
(level 2, L2) is checked, and so on, before accessing external
memory (that can be handled by a `cacheable` decorator).
For write functions, the "Write Through" policy is applied by default;
this policy ensures that the data is stored safely as it is written
throughout the hierarchy. However, it is possible to force the write
operation in a specific level (although it is not recommended) via
`level` option, where the value is a positive integer greater than 0.
We can define a multi-level cache as follows:
defmodule MyApp.Multilevel do
use Nebulex.Cache,
otp_app: :nebulex,
adapter: Nebulex.Adapters.Multilevel
defmodule L1 do
use Nebulex.Cache,
otp_app: :nebulex,
adapter: Nebulex.Adapters.Local
end
defmodule L2 do
use Nebulex.Cache,
otp_app: :nebulex,
adapter: Nebulex.Adapters.Partitioned
end
end
Where the configuration for the cache and its levels must be in your
application environment, usually defined in your `config/config.exs`:
config :my_app, MyApp.Multilevel,
model: :inclusive,
levels: [
{
MyApp.Multilevel.L1,
gc_interval: :timer.hours(12),
backend: :shards
},
{
MyApp.Multilevel.L2,
primary: [
gc_interval: :timer.hours(12),
backend: :shards
]
}
]
If your application was generated with a supervisor (by passing `--sup`
to `mix new`) you will have a `lib/my_app/application.ex` file containing
the application start callback that defines and starts your supervisor.
You just need to edit the `start/2` function to start the cache as a
supervisor on your application's supervisor:
def start(_type, _args) do
children = [
{MyApp.Multilevel, []},
...
]
See `Nebulex.Cache` for more information.
## Options
This adapter supports the following options and all of them can be given via
the cache configuration:
* `:levels` - This option is to define the levels, a list of tuples
`{cache_level :: Nebulex.Cache.t(), opts :: Keyword.t()}`, where
the first element is the module that defines the cache for that
level, and the second one is the options that will be passed to
that level in the `start/link/1` (which depends on the adapter
this level is using). The order in which the levels are defined
is the same the multi-level cache will use. For example, the first
cache in the list will be the L1 cache (level 1) and so on;
the Nth element will be the LN cache. This option is mandatory,
if it is not set or empty, an exception will be raised.
* `:model` - Specifies the cache model: `:inclusive` or `:exclusive`;
defaults to `:inclusive`. In an inclusive cache, the same data can be
present in all caches/levels. In an exclusive cache, data can be present
in only one cache/level and a key cannot be found in the rest of caches
at the same time. This option affects `get` operation only; if
`:cache_model` is `:inclusive`, when the key is found in a level N,
that entry is duplicated backwards (to all previous levels: 1..N-1).
## Shared options
Almost all of the cache functions outlined in `Nebulex.Cache` module
accept the following options:
* `:level` - It may be an integer greater than 0 that specifies the cache
level where the operation will take place. By default, the evaluation
is performed throughout the whole cache hierarchy (all levels).
## Telemetry events
This adapter emits all recommended Telemetry events, and documented
in `Nebulex.Cache` module (see **"Adapter-specific events"** section).
Since the multi-level adapter is a layer/wrapper on top of other existing
adapters, each cache level may Telemetry emit events independently.
For example, for the cache defined before `MyApp.Multilevel`, the next
events will be emitted for the main multi-level cache:
* `[:my_app, :multilevel, :command, :start]`
* `[:my_app, :multilevel, :command, :stop]`
* `[:my_app, :multilevel, :command, :exception]`
For the L1 (configured with the local adapter):
* `[:my_app, :multilevel, :l1, :command, :start]`
* `[:my_app, :multilevel, :l1, :command, :stop]`
* `[:my_app, :multilevel, :l1, :command, :exception]`
For the L2 (configured with the partitioned adapter):
* `[:my_app, :multilevel, :l2, :command, :start]`
* `[:my_app, :multilevel, :l2, :primary, :command, :start]`
* `[:my_app, :multilevel, :l2, :command, :stop]`
* `[:my_app, :multilevel, :l2, :primary, :command, :stop]`
* `[:my_app, :multilevel, :l2, :command, :exception]`
* `[:my_app, :multilevel, :l2, :primary, :command, :exception]`
See also the [Telemetry guide](http://hexdocs.pm/nebulex/telemetry.html)
for more information and examples.
## Stats
Since the multi-level adapter works as a wrapper for the configured cache
levels, the support for stats depends on the underlying levels. Also, the
measurements are consolidated per level, they are not aggregated. For example,
if we enable the stats for the multi-level cache defined previously and run:
MyApp.Multilevel.stats()
The returned stats will look like:
%Nebulex.Stats{
measurements: %{
l1: %{evictions: 0, expirations: 0, hits: 0, misses: 0, writes: 0},
l2: %{evictions: 0, expirations: 0, hits: 0, misses: 0, writes: 0}
},
metadata: %{
l1: %{
cache: NMyApp.Multilevel.L1,
started_at: ~U[2021-01-10 13:06:04.075084Z]
},
l2: %{
cache: MyApp.Multilevel.L2.Primary,
started_at: ~U[2021-01-10 13:06:04.089888Z]
},
cache: MyApp.Multilevel,
started_at: ~U[2021-01-10 13:06:04.066750Z]
}
}
**IMPORTANT:** Those cache levels with stats disabled won't be included
into the returned stats (they are skipped). If a cache level is using
an adapter that does not support stats, you may get unexpected errors.
Therefore, and as overall recommendation, check out the documentation
for adapters used by the underlying cache levels and ensure they
implement the `Nebulex.Adapter.Stats` behaviour.
### Stats with Telemetry
In case you are using Telemetry metrics, you can define the metrics per
level, for example:
last_value("nebulex.cache.stats.l1.hits",
event_name: "nebulex.cache.stats",
measurement: &get_in(&1, [:l1, :hits]),
tags: [:cache]
)
last_value("nebulex.cache.stats.l1.misses",
event_name: "nebulex.cache.stats",
measurement: &get_in(&1, [:l1, :misses]),
tags: [:cache]
)
> See the section **"Instrumenting Multi-level caches"** in the
[Telemetry guide](http://hexdocs.pm/nebulex/telemetry.html)
for more information.
## Extended API
This adapter provides one additional convenience function for retrieving
the cache model for the given cache `name`:
MyCache.model()
MyCache.model(:cache_name)
## Caveats of multi-level adapter
Because this adapter reuses other existing/configured adapters, it inherits
all their limitations too. Therefore, it is highly recommended to check the
documentation of the adapters to use.
"""
# Provide Cache Implementation
@behaviour Nebulex.Adapter
@behaviour Nebulex.Adapter.Entry
@behaviour Nebulex.Adapter.Queryable
@behaviour Nebulex.Adapter.Stats
# Inherit default transaction implementation
use Nebulex.Adapter.Transaction
import Nebulex.Adapter
import Nebulex.Helpers
alias Nebulex.Cache.Cluster
# Multi-level Cache Models
@models [:inclusive, :exclusive]
## Nebulex.Adapter
@impl true
defmacro __before_compile__(_env) do
quote do
@doc """
A convenience function to get the cache model.
"""
def model(name \\ __MODULE__) do
with_meta(name, fn _adapter, %{model: model} ->
model
end)
end
end
end
@impl true
def init(opts) do
# Required options
telemetry_prefix = Keyword.fetch!(opts, :telemetry_prefix)
telemetry = Keyword.fetch!(opts, :telemetry)
cache = Keyword.fetch!(opts, :cache)
name = opts[:name] || cache
# Maybe use stats
stats = get_boolean_option(opts, :stats)
# Get cache levels
levels =
get_option(
opts,
:levels,
"a list with at least one level definition",
&(Keyword.keyword?(&1) && length(&1) > 0)
)
# Get multilevel-cache model
model = get_option(opts, :model, ":inclusive or :exclusive", &(&1 in @models), :inclusive)
# Build multi-level specs
{children, meta_list, _} = children(levels, telemetry_prefix, telemetry, stats)
# Build adapter spec
child_spec =
Nebulex.Adapters.Supervisor.child_spec(
name: normalize_module_name([name, Supervisor]),
strategy: :one_for_one,
children: children
)
adapter_meta = %{
telemetry_prefix: telemetry_prefix,
telemetry: telemetry,
name: name,
levels: meta_list,
model: model,
stats: stats,
started_at: DateTime.utc_now()
}
{:ok, child_spec, adapter_meta}
end
# sobelow_skip ["DOS.BinToAtom"]
defp children(levels, telemetry_prefix, telemetry, stats) do
levels
|> Enum.reverse()
|> Enum.reduce({[], [], length(levels)}, fn {l_cache, l_opts}, {child_acc, meta_acc, n} ->
l_opts =
Keyword.merge(
[
telemetry_prefix: telemetry_prefix ++ [:"l#{n}"],
telemetry: telemetry,
stats: stats
],
l_opts
)
meta = %{cache: l_cache, name: l_opts[:name]}
{[{l_cache, l_opts} | child_acc], [meta | meta_acc], n - 1}
end)
end
## Nebulex.Adapter.Entry
@impl true
defspan get(adapter_meta, key, opts) do
fun = fn level, {default, prev} ->
if value = with_dynamic_cache(level, :get, [key, opts]) do
{:halt, {value, [level | prev]}}
else
{:cont, {default, [level | prev]}}
end
end
opts
|> levels(adapter_meta.levels)
|> Enum.reduce_while({nil, []}, fun)
|> maybe_replicate(key, adapter_meta.model)
end
@impl true
defspan get_all(adapter_meta, keys, _opts) do
Enum.reduce(keys, %{}, fn key, acc ->
if obj = get(adapter_meta, key, []),
do: Map.put(acc, key, obj),
else: acc
end)
end
@impl true
defspan put(adapter_meta, key, value, _ttl, on_write, opts) do
case on_write do
:put ->
:ok = eval(adapter_meta, :put, [key, value, opts], opts)
true
:put_new ->
eval(adapter_meta, :put_new, [key, value, opts], opts)
:replace ->
eval(adapter_meta, :replace, [key, value, opts], opts)
end
end
@impl true
defspan put_all(adapter_meta, entries, _ttl, on_write, opts) do
action = if on_write == :put_new, do: :put_new_all, else: :put_all
reducer = fn level, {_, level_acc} ->
case with_dynamic_cache(level, action, [entries, opts]) do
:ok ->
{:cont, {true, [level | level_acc]}}
true ->
{:cont, {true, [level | level_acc]}}
false ->
_ = delete_from_levels(level_acc, entries)
{:halt, {on_write == :put, level_acc}}
end
end
opts
|> levels(adapter_meta.levels)
|> Enum.reduce_while({true, []}, reducer)
|> elem(0)
end
@impl true
defspan delete(adapter_meta, key, opts) do
eval(adapter_meta, :delete, [key, opts], opts)
end
@impl true
defspan take(adapter_meta, key, opts) do
opts
|> levels(adapter_meta.levels)
|> do_take(nil, key, opts)
end
defp do_take([], result, _key, _opts), do: result
defp do_take([l_meta | rest], nil, key, opts) do
result = with_dynamic_cache(l_meta, :take, [key, opts])
do_take(rest, result, key, opts)
end
defp do_take(levels, result, key, _opts) do
_ = eval(levels, :delete, [key, []])
result
end
@impl true
defspan has_key?(adapter_meta, key) do
eval_while(adapter_meta, :has_key?, [key], false)
end
@impl true
defspan update_counter(adapter_meta, key, amount, _ttl, _default, opts) do
eval(adapter_meta, :incr, [key, amount, opts], opts)
end
@impl true
defspan ttl(adapter_meta, key) do
eval_while(adapter_meta, :ttl, [key], nil)
end
@impl true
defspan expire(adapter_meta, key, ttl) do
Enum.reduce(adapter_meta.levels, false, fn l_meta, acc ->
with_dynamic_cache(l_meta, :expire, [key, ttl]) or acc
end)
end
@impl true
defspan touch(adapter_meta, key) do
Enum.reduce(adapter_meta.levels, false, fn l_meta, acc ->
with_dynamic_cache(l_meta, :touch, [key]) or acc
end)
end
## Nebulex.Adapter.Queryable
@impl true
defspan execute(adapter_meta, operation, query, opts) do
{reducer, acc_in} =
case operation do
:all -> {&(&1 ++ &2), []}
_ -> {&(&1 + &2), 0}
end
Enum.reduce(adapter_meta.levels, acc_in, fn level, acc ->
level
|> with_dynamic_cache(operation, [query, opts])
|> reducer.(acc)
end)
end
@impl true
defspan stream(adapter_meta, query, opts) do
Stream.resource(
fn ->
adapter_meta.levels
end,
fn
[] ->
{:halt, []}
[level | levels] ->
elements =
level
|> with_dynamic_cache(:stream, [query, opts])
|> Enum.to_list()
{elements, levels}
end,
& &1
)
end
## Nebulex.Adapter.Transaction
@impl true
defspan transaction(adapter_meta, opts, fun) do
# Perhaps one of the levels is a distributed adapter,
# then ensure the lock on the right cluster nodes.
nodes =
adapter_meta.levels
|> Enum.reduce([node()], fn %{name: name, cache: cache}, acc ->
if cache.__adapter__ in [Nebulex.Adapters.Partitioned, Nebulex.Adapters.Replicated] do
Cluster.get_nodes(name || cache) ++ acc
else
acc
end
end)
|> Enum.uniq()
super(adapter_meta, Keyword.put(opts, :nodes, nodes), fun)
end
@impl true
defspan in_transaction?(adapter_meta) do
super(adapter_meta)
end
## Nebulex.Adapter.Stats
@impl true
defspan stats(adapter_meta) do
if adapter_meta.stats do
init_acc = %Nebulex.Stats{
metadata: %{
cache: adapter_meta.name || adapter_meta.cache,
started_at: adapter_meta.started_at
}
}
adapter_meta.levels
|> Enum.with_index(1)
|> Enum.reduce(init_acc, &update_stats/2)
end
end
# We can safely disable this warning since the atom created dynamically is
# always re-used; the number of levels is limited and known before hand.
# sobelow_skip ["DOS.BinToAtom"]
defp update_stats({meta, idx}, stats_acc) do
if stats = with_dynamic_cache(meta, :stats, []) do
level_idx = :"l#{idx}"
measurements = Map.put(stats_acc.measurements, level_idx, stats.measurements)
metadata = Map.put(stats_acc.metadata, level_idx, stats.metadata)
%{stats_acc | measurements: measurements, metadata: metadata}
else
stats_acc
end
end
## Helpers
defp with_dynamic_cache(%{cache: cache, name: nil}, action, args) do
apply(cache, action, args)
end
defp with_dynamic_cache(%{cache: cache, name: name}, action, args) do
cache.with_dynamic_cache(name, fn ->
apply(cache, action, args)
end)
end
defp eval(%{levels: levels}, fun, args, opts) do
eval(levels, fun, args, opts)
end
defp eval(levels, fun, args, opts) when is_list(levels) do
opts
|> levels(levels)
|> eval(fun, args)
end
defp eval([level_meta | next], fun, args) do
Enum.reduce(next, with_dynamic_cache(level_meta, fun, args), fn l_meta, acc ->
^acc = with_dynamic_cache(l_meta, fun, args)
end)
end
defp levels(opts, levels) do
case Keyword.get(opts, :level) do
nil -> levels
level -> [Enum.at(levels, level - 1)]
end
end
defp eval_while(%{levels: levels}, fun, args, init) do
Enum.reduce_while(levels, init, fn level_meta, acc ->
if return = with_dynamic_cache(level_meta, fun, args),
do: {:halt, return},
else: {:cont, acc}
end)
end
defp delete_from_levels(levels, entries) do
for level_meta <- levels, {key, _} <- entries do
with_dynamic_cache(level_meta, :delete, [key, []])
end
end
defp maybe_replicate({nil, _}, _, _), do: nil
defp maybe_replicate({value, [level_meta | [_ | _] = levels]}, key, :inclusive) do
ttl = with_dynamic_cache(level_meta, :ttl, [key]) || :infinity
:ok =
Enum.each(levels, fn l_meta ->
_ = with_dynamic_cache(l_meta, :put, [key, value, [ttl: ttl]])
end)
value
end
defp maybe_replicate({value, _levels}, _key, _model) do
value
end
end
|
lib/nebulex/adapters/multilevel.ex
| 0.88054
| 0.623234
|
multilevel.ex
|
starcoder
|
defmodule Day12.NBody2 do
def part2 do
# puzzle input
positions = %{
:a => {-1, 7, 3},
:b => {12, 2, -13},
:c => {14, 18, -8},
:d => {17, 4, -4}
}
# example 1
# positions = %{
# :a => {-1, 0, 2},
# :b => {2, -10, -7},
# :c => {4, -8, 8},
# :d => {3, 5, -1}
# }
# example 2
# positions = %{
# :a => {-8, -10, 0},
# :b => {5, 5, 10},
# :c => {2, -7, 3},
# :d => {9, -8, -3}
# }
all_pairs = create_pairs(Map.keys(positions))
velocities = %{:a => {0, 0, 0}, :b => {0, 0, 0}, :c => {0, 0, 0}, :d => {0, 0, 0}}
iterate(all_pairs, positions, velocities, 1, %{{positions, velocities} => true})
end
def iterate(pairs, positions, velocities, iteration \\ 0, history \\ %{}) do
new_velocities = apply_gravity(pairs, positions, velocities)
new_positions = apply_velocities(positions, new_velocities)
if Map.values(new_velocities) == [{0, 0, 0}, {0, 0, 0}, {0, 0, 0}, {0, 0, 0}] do
if history[{new_positions, new_velocities}] do
IO.puts("Repeat found at iteration #{iteration}")
else
history = Map.put(history, {positions, velocities}, true)
iterate(pairs, new_positions, new_velocities, iteration + 1, history)
end
else
if rem(iteration, 100_000) == 0 do
IO.puts("Iteration #{iteration}")
IO.inspect({new_positions, new_velocities})
end
iterate(pairs, new_positions, new_velocities, iteration + 1, history)
end
end
@doc """
iex> Day12.NBody2.create_pairs ["a", "b"]
[{"a", "b"}]
iex> Day12.NBody2.create_pairs ["a", "b", "c"]
[{"a", "b"}, {"a", "c"}, {"b", "c"}]
"""
def create_pairs(keys) do
for key1 <- keys, key2 <- keys, key1 != key2 do
[key1, key2] |> Enum.sort() |> List.to_tuple()
end
|> Enum.uniq()
end
def apply_gravity([], _positions, velocities), do: velocities
def apply_gravity([{a, b} | pairs], positions, velocities) do
{va, vb} =
new_velocities(
{positions[a], positions[b]},
{Map.get(velocities, a, {0, 0, 0}), Map.get(velocities, b, {0, 0, 0})}
)
velocities =
velocities
|> Map.put(a, va)
|> Map.put(b, vb)
apply_gravity(pairs, positions, velocities)
end
@doc """
iex> Day12.NBody2.new_velocities({{3,4,5}, {5,4,3}}, {{0,0,0}, {0,0,0}})
{{1,0,-1}, {-1,0,1}}
"""
def new_velocities({{ax, ay, az}, {bx, by, bz}}, {{avx, avy, avz}, {bvx, bvy, bvz}}) do
{{avx + dv(ax, bx), avy + dv(ay, by), avz + dv(az, bz)},
{bvx + dv(bx, ax), bvy + dv(by, ay), bvz + dv(bz, az)}}
end
def dv(a, b) when a > b, do: -1
def dv(a, b) when a == b, do: 0
def dv(a, b) when a < b, do: 1
def apply_velocities(positions, velocities) do
positions
|> Enum.map(fn {key, pos} -> {key, apply_velocity(pos, velocities[key])} end)
|> Enum.into(%{})
end
def apply_velocity({x, y, z}, {vx, vy, vz}), do: {x + vx, y + vy, z + vz}
def calculate_energy({x, y, z}, {vx, vy, vz}),
do: (abs(x) + abs(y) + abs(z)) * (abs(vx) + abs(vy) + abs(vz))
end
|
lib/day12/n_body2.ex
| 0.68763
| 0.61568
|
n_body2.ex
|
starcoder
|
defmodule Ash.Type do
@list_constraints [
min_length: [
type: :non_neg_integer,
doc: "A minimum length for the items"
],
max_length: [
type: :non_neg_integer,
doc: "A maximum length for the items"
],
nil_items?: [
type: :boolean,
doc: "Whether or not the list can contain nil items",
default: true
]
]
@short_names [
map: Ash.Type.Map,
term: Ash.Type.Term,
atom: Ash.Type.Atom,
string: Ash.Type.String,
integer: Ash.Type.Integer,
boolean: Ash.Type.Boolean,
uuid: Ash.Type.UUID,
date: Ash.Type.Date,
utc_datetime: Ash.Type.UtcDatetime
]
@builtin_types Keyword.values(@short_names)
def builtin?(type) when type in @builtin_types, do: true
def builtin?(_), do: false
@doc_list_constraints Keyword.put(@list_constraints, :items,
type: :any,
doc:
"Constraints for the elements of the list. See the contained type's docs for more."
)
@moduledoc """
This behaviour is a superset of the Ecto.Type behavior, that also contains
api level information, like what kinds of filters are allowed. Eventually,
this may be used for composite types or serialization.
Much better to `use Ash.Type` than to say `@behaviour Ash.Type` and define
everything yourself.
## Built in types
#{
Enum.map_join(@short_names, fn {key, module} ->
"* `#{inspect(key)}` - `#{inspect(module)}`\n"
end)
}
### Composite Types
Currently, the only composite type supported is a list type, specified via:
`{:array, Type}`. The constraints available are:
#{Ash.OptionsHelpers.docs(@doc_list_constraints)}
"""
@type constraints :: Keyword.t()
@callback storage_type() :: Ecto.Type.t()
@callback ecto_type() :: Ecto.Type.t()
@callback cast_input(term) :: {:ok, term} | {:error, Keyword.t()} | :error
@callback cast_stored(term) :: {:ok, term} | :error
@callback dump_to_native(term) :: {:ok, term} | :error
@callback constraints() :: constraints()
@callback apply_constraints(term, constraints()) ::
:ok | {:error, constraint_error() | list(constraint_error)}
@callback describe(constraints()) :: String.t() | nil
@callback equal?(term, term) :: boolean
@type constraint_error :: String.t() | {String.t(), Keyword.t()}
@type t :: atom | {:array, atom}
def describe(type, constraints) do
case get_type(type) do
{:array, type} ->
type.describe(constraints)
type ->
type.describe(constraints)
end
end
@doc false
def list_constraints, do: @list_constraints
@spec get_type(atom | module) :: atom | module | {:array, atom | module}
def get_type({:array, value}) do
{:array, get_type(value)}
end
def get_type(value) when is_atom(value) do
case Keyword.fetch(@short_names, value) do
{:ok, mod} -> mod
:error -> value
end
end
def get_type(value) do
value
end
@doc """
Returns the *underlying* storage type (the underlying type of the *ecto type* of the *ash type*)
"""
@spec storage_type(t()) :: Ecto.Type.t()
def storage_type({:array, type}), do: {:array, type.storage_type()}
def storage_type(type), do: type.storage_type()
@doc """
Returns the ecto compatible type for an Ash.Type.
If you `use Ash.Type`, this is created for you. For builtin types
this may return a corresponding ecto builtin type (atom)
"""
@spec ecto_type(t) :: Ecto.Type.t()
def ecto_type({:array, type}), do: {:array, ecto_type(type)}
for {name, mod} <- @short_names do
def ecto_type(unquote(name)), do: ecto_type(unquote(mod))
end
def ecto_type(type) do
type.ecto_type()
end
@spec ash_type?(term) :: boolean
@doc "Returns true if the value is a builtin type or adopts the `Ash.Type` behaviour"
def ash_type?({:array, value}), do: ash_type?(value)
def ash_type?(module) when is_atom(module) do
case Code.ensure_compiled(module) do
{:module, _} -> ash_type_module?(module)
_ -> false
end
end
def ash_type?(_), do: false
@doc """
Casts input (e.g. unknown) data to an instance of the type, or errors
Maps to `Ecto.Type.cast/2`
"""
@spec cast_input(t(), term) :: {:ok, term} | {:error, Keyword.t()} | :error
def cast_input({:array, _type}, term) when not is_list(term) do
{:error, message: "must be a list"}
end
def cast_input({:array, type}, term) do
term
|> Enum.with_index()
|> Enum.reverse()
|> Enum.reduce_while({:ok, []}, fn {item, index}, {:ok, casted} ->
case cast_input(type, item) do
:error ->
{:halt, {:error, index: index}}
{:error, keyword} ->
{:halt, {:error, Keyword.put(keyword, :index, index)}}
{:ok, value} ->
{:cont, {:ok, [value | casted]}}
end
end)
end
def cast_input(type, term) do
case type.cast_input(term) do
{:ok, value} ->
{:ok, value}
:error ->
{:error, "is invalid"}
{:error, other} ->
{:error, other}
end
end
@doc """
Casts a value from the data store to an instance of the type, or errors
Maps to `Ecto.Type.load/2`
"""
@spec cast_stored(t(), term) :: {:ok, term} | {:error, keyword()} | :error
def cast_stored({:array, type}, term) when is_list(term) do
term
|> Enum.with_index()
|> Enum.reverse()
|> Enum.reduce_while({:ok, []}, fn {item, index}, {:ok, casted} ->
case cast_stored(type, item) do
:error ->
{:halt, {:error, index: index}}
{:error, keyword} ->
{:halt, {:error, Keyword.put(keyword, :index, index)}}
{:ok, value} ->
{:cont, {:ok, [value | casted]}}
end
end)
end
def cast_stored(type, term) do
type.cast_stored(term)
end
@doc """
Confirms if a casted value matches the provided constraints.
"""
@spec apply_constraints(t(), term, constraints()) :: :ok | {:error, String.t()}
def apply_constraints({:array, type}, term, constraints) when is_list(constraints) do
list_constraint_errors = list_constraint_errors(term, constraints)
case list_constraint_errors do
[] ->
nil_items? = Keyword.get(constraints, :nil_items?, true)
item_constraints = constraints[:items] || []
if item_constraints != [] || !nil_items? do
errors =
term
|> Enum.with_index()
|> Enum.reduce([], fn {item, index}, errors ->
errors =
if is_nil(item) && not nil_items? do
[{"no nil/null values at index %{index}", index: index} | errors]
else
errors
end
case apply_constraints(type, item, item_constraints) do
:ok ->
errors
{:error, new_errors} ->
new_errors =
new_errors
|> List.wrap()
|> Enum.map(fn
{template, replacements} ->
{template <> " at index %{index}",
Keyword.put(replacements, :index, index)}
string ->
{string <> " at index %{index}", index: index}
end)
List.wrap(new_errors) ++ errors
end
end)
if errors == [] do
:ok
else
{:error, errors}
end
else
:ok
end
errors ->
{:error, errors}
end
end
def apply_constraints({:array, _}, _, _) do
{:error, ["must be a list"]}
end
def apply_constraints(type, term, constraints) do
type.apply_constraints(term, constraints)
end
defp list_constraint_errors(term, constraints) do
length =
if Keyword.has_key?(constraints, :max_length) || Keyword.has_key?(constraints, :min_length) do
length(term)
else
0
end
constraints
|> Enum.reduce([], fn
{:min_length, min_length}, errors ->
if length < min_length do
[{"must have more than %{min} items", min: min_length} | errors]
else
errors
end
{:max_length, max_length}, errors ->
if length > max_length do
[{"must have fewer than %{max} items", max: max_length} | errors]
else
errors
end
_, errors ->
errors
end)
end
@spec constraints(t()) :: constraints()
def constraints({:array, _type}) do
@list_constraints
end
def constraints(type) do
type.constraints()
end
@doc """
Casts a value from the Elixir type to a value that the data store can persist
Maps to `Ecto.Type.dump/2`
"""
@spec dump_to_native(t(), term) :: {:ok, term} | {:error, keyword()} | :error
def dump_to_native({:array, type}, term) do
term
|> Enum.reverse()
|> Enum.reduce_while({:ok, []}, fn item, {:ok, dumped} ->
case dump_to_native(type, item) do
:error ->
{:halt, :error}
{:ok, value} ->
{:cont, {:ok, [value | dumped]}}
end
end)
end
def dump_to_native(type, term) do
type.dump_to_native(term)
end
@doc """
Determines if two values of a given type are equal.
Maps to `Ecto.Type.equal?/3`
"""
@spec equal?(t(), term, term) :: boolean
def equal?({:array, type}, [nil | xs], [nil | ys]), do: equal?({:array, type}, xs, ys)
def equal?({:array, type}, [x | xs], [y | ys]),
do: equal?(type, x, y) && equal?({:array, type}, xs, ys)
def equal?({:array, _}, [], []), do: true
def equal?({:array, _}, _, _), do: false
def equal?(type, left, right) do
type.equal?(left, right)
end
# @callback equal?(term, term) :: boolean
defmacro __using__(_) do
quote location: :keep do
@behaviour Ash.Type
parent = __MODULE__
defmodule EctoType do
@moduledoc false
@behaviour Ecto.Type
@parent parent
@impl true
def type do
storage_type = @parent.storage_type()
if Ash.Type.ash_type?(storage_type) do
Ash.Type.storage_type(@parent.storage_type())
else
storage_type
end
end
@impl true
def cast(term) do
@parent.cast_input(term)
end
@impl true
def load(term) do
@parent.cast_stored(term)
end
@impl true
def dump(term) do
@parent.dump_to_native(term)
end
@impl true
def equal?(left, right) do
@parent.equal?(left, right)
end
@impl true
def embed_as(_), do: :self
end
@impl true
def ecto_type, do: EctoType
@impl true
def equal?(left, right), do: left == right
@impl true
def constraints, do: []
@impl true
def describe([]), do: String.trim_leading(inspect(__MODULE__), "Ash.Type.")
def describe(constraints) do
"#{String.trim_leading(inspect(__MODULE__), "Ash.Type.")} | #{inspect(constraints)}"
end
@impl true
def apply_constraints(_, _), do: :ok
defoverridable equal?: 2, constraints: 0, apply_constraints: 2
end
end
defp ash_type_module?(module) do
Ash.implements_behaviour?(module, __MODULE__)
end
end
|
lib/ash/type/type.ex
| 0.841077
| 0.499268
|
type.ex
|
starcoder
|
defmodule Example.Todo do
use Ecto.Schema
schema "todos" do
field :title, :string
field :completed, :boolean
timestamps [
type: :naive_datetime_usec
]
end
@doc """
Creates a Todo in the database
"""
def create(%Ecto.Changeset{} = changeset) do
result = Example.Repo.insert(changeset)
case result do
{:ok, todo} ->
{:ok, to_result(todo)}
{:error, cs} ->
to_error(cs)
end
end
@doc """
Updates a Todo in the database
"""
def update(id, params) do
with %__MODULE__{} = todo <- Example.Repo.get(__MODULE__, id),
cs <- changeset(todo, params),
{:ok, updated} <- Example.Repo.update(cs) do
{:ok, to_result(updated)}
else
nil ->
:ok
{:error, cs} ->
to_error(cs)
end
end
@doc """
Deletes a Todo with the given id
"""
def delete(id) do
with %__MODULE__{} = todo <- Example.Repo.get(__MODULE__, id),
{:ok, _} <- Example.Repo.delete(todo) do
:ok
else
nil ->
:ok
{:error, cs} ->
to_error(cs)
end
end
@doc """
Deletes all Todos
"""
def delete_all() do
Example.Repo.delete_all(__MODULE__)
:ok
rescue
err in [Ecto.QueryError] ->
{:error, Exception.message(err)}
end
@doc """
Returns all Todos from the database
"""
def all() do
todos =
Example.Repo.all(__MODULE__)
|> Enum.map(&to_result/1)
{:ok, todos}
rescue
err in [Ecto.QueryError] ->
{:error, Exception.message(err)}
end
@doc """
Applies a set of parameters to a Todo struct, validating them,
the result is an Ecto.Changeset
"""
def changeset(%__MODULE__{} = todo, params \\ %{}) do
todo
|> Ecto.Changeset.cast(params, [:title, :completed])
|> Ecto.Changeset.validate_required([:title])
end
defp to_error(%Ecto.Changeset{} = cs) do
errs =
Ecto.Changeset.traverse_errors(cs, fn {msg, opts} ->
Enum.reduce(opts, msg, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
{:error, errs}
end
defp to_result(%__MODULE__{} = result) do
# Strip Ecto metadata
result
|> Map.delete(:__meta__)
end
end
|
lib/example/todo.ex
| 0.609989
| 0.447158
|
todo.ex
|
starcoder
|
defmodule GN.Evolution do
import GN.Gluon
@layer_types %{
dense: &dense/3,
activation: &activation/2,
dropout: &dropout/2,
batch_norm: &batch_norm/1,
leaky_relu: &leaky_relu/2,
flatten: &flatten/1
}
def layer_types() do
@layer_types
end
@mutation_rate 0.25
@std_dev 2
def spawn_offspring(seed_layers, mutation_rate \\ @mutation_rate) do
duplicate(seed_layers, mutation_rate)
|> remove(mutation_rate)
|> Enum.map(&mutate(&1, mutation_rate))
end
def duplicate(seed_layers, mutation_rate, all \\ false) do
cond do
should_mutate(mutation_rate) ->
end_index = length(seed_layers) - 1
duplicate_segment = random_slice(seed_layers, all)
insertion_point = find_insertion_point(end_index)
Enum.concat([
Enum.slice(seed_layers, 0..insertion_point),
duplicate_segment,
Enum.slice(seed_layers, (insertion_point + 1)..end_index)
])
true ->
seed_layers
end
end
def random_slice(seed_layers, true) do
seed_layers
end
def random_slice(seed_layers, _false) do
end_index = length(seed_layers) - 1
[slice_start, slice_finish] = find_slice(end_index)
Enum.slice(seed_layers, slice_start..slice_finish)
end
def find_slice(end_index) do
Enum.take_random(0..end_index, 2) |> Enum.sort()
end
def find_insertion_point(end_index) do
Enum.take_random(0..end_index, 1) |> hd()
end
def remove(seed_layers, mutation_rate) do
Enum.filter(seed_layers, fn _ -> !should_mutate(mutation_rate) end)
end
def mutate({seed_layer_type, seed_params} = _layer, mutation_rate) do
cond do
should_mutate(mutation_rate) -> mutate_layer(mutation_rate)
true -> {seed_layer_type, mutate_params(seed_params, mutation_rate)}
end
end
def mutate_layer(mutation_rate) do
[new_layer_type] = Enum.take_random(Map.keys(layer_types()), 1)
new_params =
seed_params(new_layer_type)
|> mutate_params(mutation_rate)
{new_layer_type, new_params}
end
def mutate_params(params, mutation_rate) do
for param <- params do
cond do
should_mutate(mutation_rate) ->
cond do
is_atom(param) ->
Enum.take_random(activation_functions(), 1) |> hd()
is_integer(param) ->
Statistics.Distributions.Normal.rand(param, @std_dev)
|> Statistics.Math.to_int()
is_float(param) ->
:rand.uniform()
end
true ->
param
end
end
end
def seed_params(layer_type) do
GN.Parameters.get(__MODULE__, :layer_types)
|> Map.get(layer_type, [])
end
def build_layer({layer_type, params}, py) do
with_py = [py | params]
Map.get(layer_types(), layer_type) |> apply(with_py)
end
def should_mutate(mutation_rate) do
:rand.uniform() < mutation_rate
end
end
|
lib/galapagos_nao/evolution.ex
| 0.691706
| 0.565269
|
evolution.ex
|
starcoder
|
defmodule EventStore.AdvisoryLocks do
@moduledoc false
# PostgreSQL provides a means for creating locks that have application-defined
# meanings. Advisory locks are faster, avoid table bloat, and are
# automatically cleaned up by the server at the end of the session.
use GenServer
defmodule State do
@moduledoc false
defstruct conn: nil, state: :connected, locks: %{}
end
defmodule Lock do
@moduledoc false
defstruct key: nil, opts: nil
end
alias EventStore.AdvisoryLocks.{Lock, State}
alias EventStore.Storage
def start_link(conn) do
GenServer.start_link(__MODULE__, %State{conn: conn}, name: __MODULE__)
end
def init(%State{} = state) do
{:ok, state}
end
@doc """
Attempt to obtain an advisory lock.
- `key` - an application specific integer to acquire a lock on.
- `opts` an optional keyword list:
- `lock_released` - a 0-arity function called when the lock is released
(usually due to a lost database connection).
- `lock_reacquired` - a 0-arity function called when the lock has been
successfully reacquired.
Returns `:ok` when lock successfully acquired, or
`{:error, :lock_already_taken}` if the lock cannot be acquired immediately.
"""
@spec try_advisory_lock(key :: non_neg_integer(), opts :: list) ::
:ok | {:error, :lock_already_taken} | {:error, term}
def try_advisory_lock(key, opts \\ []) when is_integer(key) do
GenServer.call(__MODULE__, {:try_advisory_lock, key, self(), opts})
end
def disconnect do
GenServer.cast(__MODULE__, :disconnect)
end
def reconnect do
GenServer.cast(__MODULE__, :reconnect)
end
def handle_call({:try_advisory_lock, key, pid, opts}, _from, %State{} = state) do
%State{conn: conn} = state
case Storage.Lock.try_acquire_exclusive_lock(conn, key) do
:ok ->
{:reply, :ok, monitor_lock(key, pid, opts, state)}
reply ->
{:reply, reply, state}
end
end
def handle_cast(:disconnect, %State{locks: locks} = state) do
for {_ref, %Lock{opts: opts}} <- locks do
:ok = notify(:lock_released, opts)
end
{:noreply, %State{state | state: :disconnected}}
end
def handle_cast(:reconnect, %State{} = state) do
%State{conn: conn, locks: locks} = state
for {_ref, %Lock{key: key, opts: opts}} <- locks do
with :ok <- Storage.Lock.try_acquire_exclusive_lock(conn, key),
:ok <- notify(:lock_reacquired, opts) do
:ok
else
{:error, :lock_already_taken} -> :ok
end
end
{:noreply, %State{state | state: :connected}}
end
defp notify(notification, opts) do
case Keyword.get(opts, notification) do
fun when is_function(fun, 0) ->
apply(fun, [])
_ ->
:ok
end
end
defp monitor_lock(key, pid, opts, %State{locks: locks} = state) do
ref = Process.monitor(pid)
%State{state | locks: Map.put(locks, ref, %Lock{key: key, opts: opts})}
end
def handle_info({:DOWN, ref, :process, _pid, _reason}, %State{locks: locks} = state) do
state =
case Map.get(locks, ref) do
nil ->
state
%Lock{key: key} ->
:ok = release_lock(key, state)
%State{state | locks: Map.delete(locks, ref)}
end
{:noreply, state}
end
defp release_lock(key, %State{conn: conn, state: state}) do
case state do
:connected -> Storage.Lock.unlock(conn, key)
_ -> :ok
end
end
end
|
lib/event_store/advisory_locks.ex
| 0.706393
| 0.408011
|
advisory_locks.ex
|
starcoder
|
defmodule ChallengeGov.Challenges do
@moduledoc """
Context for Challenges
"""
@behaviour Stein.Filter
import Ecto.Query
alias ChallengeGov.Accounts
alias ChallengeGov.Challenges.Challenge
alias ChallengeGov.Challenges.ChallengeOwner
alias ChallengeGov.Challenges.FederalPartner
alias ChallengeGov.Challenges.Logo
alias ChallengeGov.Challenges.Phase
alias ChallengeGov.Challenges.WinnerImage
alias ChallengeGov.Challenges.ResourceBanner
alias ChallengeGov.Emails
alias ChallengeGov.Mailer
alias ChallengeGov.Phases
alias ChallengeGov.Repo
alias ChallengeGov.SavedChallenges
alias ChallengeGov.SecurityLogs
alias ChallengeGov.SupportingDocuments
alias ChallengeGov.Timeline.Event
alias Stein.Filter
# BOOKMARK: Functions for fetching valid attribute values
@doc false
def challenge_types(), do: Challenge.challenge_types()
@doc false
def legal_authority(), do: Challenge.legal_authority()
@doc false
def statuses(), do: Challenge.statuses()
@doc false
def sub_statuses(), do: Challenge.sub_statuses()
@doc false
def status_label(status) do
status_data = Enum.find(statuses(), fn s -> s.id == status end)
if status_data do
status_data.label
else
status
end
end
# BOOKMARK: Wizard functionality helpers
@doc false
def sections(), do: Challenge.sections()
@doc false
def section_index(section), do: Challenge.section_index(section)
@doc false
def next_section(section), do: Challenge.next_section(section)
@doc false
def prev_section(section), do: Challenge.prev_section(section)
@doc false
def to_section(section, action), do: Challenge.to_section(section, action)
# BOOKMARK: Create and update functions
@doc """
New changeset for a challenge
"""
def new(user) do
%Challenge{}
|> challenge_form_preload()
|> Challenge.create_changeset(%{}, user)
end
@doc """
Import challenges: no user, owner, documents or security logging
"""
def import_create(challenge_params) do
challenge_params =
challenge_params
|> check_non_federal_partners
result =
Ecto.Multi.new()
|> Ecto.Multi.insert(
:challenge,
Challenge.import_changeset(%Challenge{}, challenge_params)
)
|> attach_federal_partners(challenge_params)
|> Ecto.Multi.run(:logo, fn _repo, %{challenge: challenge} ->
Logo.maybe_upload_logo(challenge, challenge_params)
end)
|> Repo.transaction()
case result do
{:ok, %{challenge: challenge}} ->
{:ok, challenge}
{:error, :challenge, changeset, _} ->
{:error, changeset}
end
end
def create(%{"action" => action, "challenge" => challenge_params}, user, remote_ip \\ nil) do
challenge_params =
challenge_params
|> check_non_federal_partners
result =
Ecto.Multi.new()
|> Ecto.Multi.insert(
:challenge,
changeset_for_action(%Challenge{}, challenge_params, action)
)
|> attach_initial_owner(user)
|> attach_federal_partners(challenge_params)
|> attach_challenge_owners(challenge_params)
|> attach_documents(challenge_params)
|> Ecto.Multi.run(:logo, fn _repo, %{challenge: challenge} ->
Logo.maybe_upload_logo(challenge, challenge_params)
end)
|> add_to_security_log_multi(user, "create", remote_ip)
|> Repo.transaction()
case result do
{:ok, %{challenge: challenge}} ->
{:ok, challenge}
{:error, :challenge, changeset, _} ->
{:error, changeset}
end
end
@doc """
Changeset for editing a challenge (as an admin)
"""
def edit(challenge) do
challenge
|> challenge_form_preload()
|> Challenge.update_changeset(%{})
end
def update(challenge, params, user, remote_ip \\ nil)
def update(challenge, %{"action" => action, "challenge" => challenge_params}, user, remote_ip) do
section = Map.get(challenge_params, "section")
challenge_params =
challenge_params
|> check_non_federal_partners
result =
Ecto.Multi.new()
|> Ecto.Multi.update(:challenge, changeset_for_action(challenge, challenge_params, action))
|> attach_federal_partners(challenge_params)
|> attach_challenge_owners(challenge_params)
|> attach_documents(challenge_params)
|> Ecto.Multi.run(:logo, fn _repo, %{challenge: challenge} ->
Logo.maybe_upload_logo(challenge, challenge_params)
end)
|> Ecto.Multi.run(:resource_banner, fn _repo, %{challenge: challenge} ->
ResourceBanner.maybe_upload_resource_banner(challenge, challenge_params)
end)
|> add_to_security_log_multi(user, "update", remote_ip, %{action: action, section: section})
|> Repo.transaction()
case result do
{:ok, %{challenge: challenge}} ->
maybe_send_submission_confirmation(challenge, action)
{:ok, challenge}
{:error, _type, changeset, _changes} ->
{:error, changeset}
end
end
@doc """
Update a challenge
"""
def update(challenge, params, current_user, remote_ip) do
challenge = challenge_form_preload(challenge)
params =
params
|> Map.put_new("challenge_owners", [])
|> Map.put_new("federal_partners", [])
|> Map.put_new("non_federal_partners", [])
|> Map.put_new("events", [])
changeset =
if Accounts.has_admin_access?(current_user) do
Challenge.admin_update_changeset(challenge, params)
else
Challenge.update_changeset(challenge, params)
end
result =
Ecto.Multi.new()
|> Ecto.Multi.update(:challenge, changeset)
|> attach_federal_partners(params)
|> attach_challenge_owners(params)
|> Ecto.Multi.run(:event, fn _repo, %{challenge: challenge} ->
maybe_create_event(challenge, changeset)
end)
|> Ecto.Multi.run(:logo, fn _repo, %{challenge: challenge} ->
Logo.maybe_upload_logo(challenge, params)
end)
|> Ecto.Multi.run(:winner_image, fn _repo, %{challenge: challenge} ->
WinnerImage.maybe_upload_winner_image(challenge, params)
end)
|> add_to_security_log_multi(current_user, "update", remote_ip)
|> Repo.transaction()
case result do
{:ok, %{challenge: challenge}} ->
{:ok, challenge}
{:error, _type, changeset, _changes} ->
{:error, changeset}
end
end
# BOOKMARK: Create and update helper functions
defp changeset_for_action(struct, params, action) do
struct = challenge_form_preload(struct)
case action do
a when a == "back" or a == "save_draft" ->
Challenge.draft_changeset(struct, params, action)
_ ->
Challenge.section_changeset(struct, params, action)
end
end
defp challenge_form_preload(challenge) do
Repo.preload(challenge, [
:non_federal_partners,
:phases,
:events,
:user,
:challenge_owner_users,
:supporting_documents,
:sub_agency,
federal_partners: [:agency, :sub_agency],
federal_partner_agencies: [:sub_agencies],
agency: [:sub_agencies]
])
end
defp base_preload(challenge) do
preload(challenge, [
:non_federal_partners,
:events,
:user,
:challenge_owner_users,
:supporting_documents,
:sub_agency,
federal_partners: [:agency, :sub_agency],
federal_partner_agencies: [:sub_agencies],
phases: [winners: [:winners]],
agency: [:sub_agencies]
])
end
defp check_non_federal_partners(params) do
if Map.get(params, "non_federal_partners") == "" do
Map.put(params, "non_federal_partners", [])
else
params
end
end
def all_unpaginated(opts \\ []) do
base_query()
|> order_by([c], asc: c.end_date, asc: c.id)
|> Filter.filter(opts[:filter], __MODULE__)
|> Repo.all()
end
# BOOKMARK: Querying functions
@doc """
Get all challenges
"""
def all(opts \\ []) do
base_query()
|> where([c], c.status == "published")
|> order_by([c], asc: c.end_date, asc: c.id)
|> Filter.filter(opts[:filter], __MODULE__)
|> Repo.paginate(opts[:page], opts[:per])
end
def all_public(opts \\ []) do
base_query()
|> where([c], c.status == "published" or c.sub_status == "open")
|> where([c], c.end_date >= ^DateTime.utc_now())
|> order_by([c], asc: c.end_date, asc: c.id)
|> Filter.filter(opts[:filter], __MODULE__)
|> Repo.paginate(opts[:page], opts[:per])
end
@doc """
Gets all archived or sub_status archived challenges
"""
def all_archived(opts \\ []) do
base_query()
|> where(
[c],
(c.status == "published" and
(c.sub_status == "archived" or c.sub_status == "closed" or
(c.archive_date <= ^DateTime.utc_now() or c.end_date <= ^DateTime.utc_now()))) or
c.status == "archived"
)
|> order_by([c], asc: c.end_date, asc: c.id)
|> Filter.filter(opts[:filter], __MODULE__)
|> Repo.paginate(opts[:page], opts[:per])
end
@doc """
Get all public challenges non paginated for sitemap
"""
def all_for_sitemap() do
base_query()
|> where([c], c.status == "published" or c.status == "archived")
|> order_by([c], asc: c.end_date, asc: c.id)
|> Repo.all()
end
@doc """
Get all published challenges for govdelivery topics
"""
def all_for_govdelivery() do
base_query()
|> where([c], c.status == "published" and c.sub_status != "archived")
|> where([c], is_nil(c.gov_delivery_topic))
|> Repo.all()
end
@doc """
Get all archived challenges for removal from govdelivery topics
"""
def all_for_removal_from_govdelivery() do
base_query()
|> where(
[c],
c.status == "archived" or (c.status == "published" and c.sub_status == "archived")
)
|> where(
[c],
c.archive_date < ^Timex.shift(DateTime.utc_now(), months: -3)
)
|> where([c], not is_nil(c.gov_delivery_topic))
|> Repo.all()
end
@doc """
Get all challenges with govdelivery topics
"""
def all_in_govdelivery() do
base_query()
|> where([c], not is_nil(c.gov_delivery_topic))
|> Repo.all()
end
def all_ready_for_publish() do
base_query()
|> where([c], c.status == "approved")
|> where([c], fragment("? <= ?", c.auto_publish_date, ^DateTime.utc_now()))
|> Repo.all()
end
@doc """
Get all challenges
"""
def admin_all(opts \\ []) do
base_query()
|> order_by([c], desc: c.status, desc: c.id)
|> Filter.filter(opts[:filter], __MODULE__)
|> Repo.paginate(opts[:page], opts[:per])
end
@doc """
Get all challenges for a user
"""
def all_pending_for_user(user, opts \\ []) do
user
|> base_all_for_user_query()
|> where([c], c.status == "gsa_review")
|> order_on_attribute(opts[:sort])
|> Filter.filter(opts[:filter], __MODULE__)
|> Repo.paginate(opts[:page], opts[:per])
end
@doc """
Get all challenges for a user
"""
def all_for_user(user, opts \\ []) do
user
|> base_all_for_user_query()
|> order_on_attribute(opts[:sort])
|> Filter.filter(opts[:filter], __MODULE__)
|> Repo.paginate(opts[:page], opts[:per])
end
defp base_query() do
Challenge
|> where([c], is_nil(c.deleted_at))
|> base_preload
end
defp base_all_for_user_query(%{id: id, role: "challenge_owner"}) do
base_query()
|> join(:inner, [c], co in assoc(c, :challenge_owners))
|> where([c, co], co.user_id == ^id and is_nil(co.revoked_at))
end
defp base_all_for_user_query(_), do: base_query()
@doc """
Get a challenge
"""
def get(id) do
with false <- is_integer(id),
{id, _} <- Integer.parse(id) do
Challenge
|> where([c], c.id == ^id)
|> get_query()
else
true ->
Challenge
|> where([c], c.id == ^id)
|> get_query()
:error ->
Challenge
|> where([c], c.custom_url == ^id)
|> get_query()
end
|> case do
nil ->
{:error, :not_found}
challenge ->
challenge = Repo.preload(challenge, events: from(e in Event, order_by: e.occurs_on))
{:ok, challenge}
end
end
@doc """
Get a challenge by uuid
"""
def get_by_uuid(uuid) do
Challenge
|> where([c], c.uuid == ^uuid)
|> get_query()
|> case do
nil ->
{:error, :not_found}
challenge ->
challenge = Repo.preload(challenge, events: from(e in Event, order_by: e.occurs_on))
{:ok, challenge}
end
end
defp get_query(struct) do
struct
|> where([c], is_nil(c.deleted_at))
|> preload([
:supporting_documents,
:user,
:federal_partner_agencies,
:non_federal_partners,
:agency,
:sub_agency,
:challenge_owners,
:challenge_owner_users,
:events,
phases: ^{from(p in Phase, order_by: p.start_date), [winners: :winners]},
federal_partners: [:agency, :sub_agency]
])
|> Repo.one()
end
@doc """
Submit a new challenge for a user
"""
def old_create(user, params, remote_ip) do
result =
Ecto.Multi.new()
|> Ecto.Multi.insert(:challenge, create_challenge(user, params))
|> attach_initial_owner(user)
|> attach_federal_partners(params)
|> attach_challenge_owners(params)
|> attach_documents(params)
|> Ecto.Multi.run(:logo, fn _repo, %{challenge: challenge} ->
Logo.maybe_upload_logo(challenge, params)
end)
|> Ecto.Multi.run(:winner_image, fn _repo, %{challenge: challenge} ->
WinnerImage.maybe_upload_winner_image(challenge, params)
end)
|> add_to_security_log_multi(user, "create", remote_ip)
|> Repo.transaction()
case result do
{:ok, %{challenge: challenge}} ->
send_pending_challenge_email(challenge)
{:ok, challenge}
{:error, :challenge, changeset, _} ->
{:error, changeset}
{:error, {:document, _}, _, _} ->
user
|> Ecto.build_assoc(:challenges)
|> Challenge.create_changeset(params, user)
|> Ecto.Changeset.add_error(:document_ids, "are invalid")
|> Ecto.Changeset.apply_action(:insert)
end
end
defp create_challenge(user, params) do
user
|> Ecto.build_assoc(:challenges)
|> Map.put(:challenge_owner_users, [])
|> Map.put(:federal_partners, [])
|> Map.put(:federal_partner_agencies, [])
|> Challenge.create_changeset(params, user)
end
# Attach federal partners functions
defp attach_federal_partners(multi, %{"federal_partners" => ""}) do
attach_federal_partners(multi, %{"federal_partners" => []})
end
defp attach_federal_partners(multi, %{"federal_partners" => federal_partners}) do
multi =
Ecto.Multi.run(multi, :delete_agencies, fn _repo, changes ->
{:ok,
Repo.delete_all(
from(fp in FederalPartner, where: fp.challenge_id == ^changes.challenge.id)
)}
end)
Enum.reduce(federal_partners, multi, fn {id, federal_partner}, multi ->
%{"agency_id" => agency_id, "sub_agency_id" => sub_agency_id} = federal_partner
Ecto.Multi.run(multi, {:id, id, :agency, agency_id, :sub_agency, sub_agency_id}, fn _repo,
changes ->
maybe_create_federal_partner(agency_id, sub_agency_id, changes)
end)
end)
end
defp attach_federal_partners(multi, _params), do: multi
defp maybe_create_federal_partner(agency_id, sub_agency_id, changes)
when not is_nil(agency_id) and agency_id !== "" do
%FederalPartner{}
|> FederalPartner.changeset(%{
agency_id: agency_id,
sub_agency_id: sub_agency_id,
challenge_id: changes.challenge.id
})
|> Repo.insert()
end
defp maybe_create_federal_partner(_agency_id, _sub_agency_id, _changes), do: {:ok, nil}
# Attach challenge owners functions
defp attach_initial_owner(multi, user) do
Ecto.Multi.run(multi, {:user, user.id}, fn _repo, changes ->
%ChallengeOwner{}
|> ChallengeOwner.changeset(%{
user_id: user.id,
challenge_id: changes.challenge.id
})
|> Repo.insert()
end)
end
# Attach challenge owners functions
defp attach_challenge_owners(multi, %{challenge_owners: ids}) do
attach_challenge_owners(multi, %{"challenge_owners" => ids})
end
defp attach_challenge_owners(multi, %{"challenge_owners" => ids}) do
multi =
Ecto.Multi.run(multi, :delete_owners, fn _repo, changes ->
{:ok,
Repo.delete_all(
from(co in ChallengeOwner, where: co.challenge_id == ^changes.challenge.id)
)}
end)
Enum.reduce(ids, multi, fn user_id, multi ->
Ecto.Multi.run(multi, {:user, user_id}, fn _repo, changes ->
%ChallengeOwner{}
|> ChallengeOwner.changeset(%{
user_id: user_id,
challenge_id: changes.challenge.id
})
|> Repo.insert()
end)
end)
end
defp attach_challenge_owners(multi, _params), do: multi
# Attach supporting document functions
defp attach_documents(multi, %{document_ids: ids}) do
attach_documents(multi, %{"document_ids" => ids})
end
defp attach_documents(multi, %{"document_ids" => ids}) do
Enum.reduce(ids, multi, fn document_id, multi ->
Ecto.Multi.run(multi, {:document, document_id}, fn _repo, changes ->
document_id
|> SupportingDocuments.get()
|> attach_document(changes.challenge)
end)
end)
end
defp attach_documents(multi, _params), do: multi
defp attach_document({:ok, document}, challenge) do
SupportingDocuments.attach_to_challenge(document, challenge, "resources", "")
end
defp attach_document(result, _challenge), do: result
@doc """
Delete a challenge
"""
def delete(challenge) do
Repo.delete(challenge)
end
@doc """
Delete a challenge if allowed
"""
def delete(challenge, user, remote_ip) do
if allowed_to_delete(user, challenge) do
soft_delete(challenge, user, remote_ip)
else
{:error, :not_permitted}
end
end
def soft_delete(challenge, user, remote_ip) do
now = DateTime.truncate(Timex.now(), :second)
challenge
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:deleted_at, now)
|> Repo.update()
|> case do
{:ok, challenge} ->
add_to_security_log(user, challenge, "delete", remote_ip)
{:ok, challenge}
{:error, changeset} ->
{:error, changeset}
end
end
@doc """
Checks if a user is allowed to delete a challenge
"""
def allowed_to_delete(user, challenge) do
Accounts.has_admin_access?(user) or challenge.status == "draft"
end
@doc """
Checks if a user is allowed to edit a challenge
"""
def allowed_to_edit(user, challenge) do
if is_challenge_owner?(user, challenge) or
Accounts.has_admin_access?(user) do
{:ok, challenge}
else
{:error, :not_permitted}
end
end
def allowed_to_edit?(user, challenge) do
case allowed_to_edit(user, challenge) do
{:ok, _challenge} -> true
{:error, :not_permitted} -> false
end
end
@doc """
Checks if a user can send a bulletin
"""
def can_send_bulletin(user, challenge) do
if (is_challenge_owner?(user, challenge) or
Accounts.has_admin_access?(user)) and
challenge.gov_delivery_topic != nil and
challenge.gov_delivery_topic != "" do
{:ok, challenge}
else
{:error, :not_permitted}
end
end
@doc """
Checks if a user is in the list of owners for a challenge and not revoked
"""
def is_challenge_owner?(user, challenge) do
challenge.challenge_owners
|> Enum.reject(fn co ->
!is_nil(co.revoked_at)
end)
|> Enum.map(fn co ->
co.user_id
end)
|> Enum.member?(user.id)
end
@doc """
Restores access to a user's challlenges
"""
def restore_access(user, challenge) do
ChallengeOwner
|> where([co], co.user_id == ^user.id and co.challenge_id == ^challenge.id)
|> Repo.update_all(set: [revoked_at: nil])
end
defp maybe_create_event(challenge, changeset) do
case is_nil(Ecto.Changeset.get_change(changeset, :status)) do
true ->
{:ok, challenge}
false ->
create_status_event(challenge)
{:ok, challenge}
end
end
# BOOKMARK: Helper functions
def find_start_date(challenge) do
challenge.start_date
end
def find_end_date(challenge) do
challenge.end_date
end
# BOOKMARK: Phase helper functions
@doc """
Returns currently active phase
"""
def current_phase(%{phases: phases}) when length(phases) > 0 do
phases
|> Enum.find(fn phase ->
Phases.is_current?(phase)
end)
|> case do
nil ->
{:error, :no_current_phase}
phase ->
{:ok, phase}
end
end
def current_phase(_challenge), do: {:error, :no_current_phase}
@doc """
Returns phase of a challenge after the phase passed in
"""
def next_phase(%{phases: phases}, current_phase) do
phase_index =
Enum.find_index(phases, fn phase ->
phase.id == current_phase.id
end)
case Enum.at(phases, phase_index + 1) do
nil ->
{:error, :not_found}
phase ->
{:ok, phase}
end
end
@doc """
Returns if a challenge has closed phases or not
"""
def has_closed_phases?(%{phases: phases}) do
Enum.any?(phases, fn phase ->
Phases.is_past?(phase)
end)
end
def is_multi_phase?(challenge) do
length(challenge.phases) > 1
end
@doc """
Create a new status event when the status changes
"""
def create_status_event(_), do: :ok
# BOOKMARK: Base status functions
def is_draft?(%{status: "draft"}), do: true
def is_draft?(_user), do: false
def in_review?(%{status: "gsa_review"}), do: true
def in_review?(_user), do: false
def is_approved?(%{status: "approved"}), do: true
def is_approved?(_user), do: false
def has_edits_requested?(%{status: "edits_requested"}), do: true
def has_edits_requested?(_user), do: false
def is_published?(%{status: "published"}), do: true
def is_published?(_user), do: false
def is_unpublished?(%{status: "unpublished"}), do: true
def is_unpublished?(_user), do: false
def is_open?(%{sub_status: "open"}), do: true
def is_open?(challenge = %{start_date: start_date, end_date: end_date})
when not is_nil(start_date) and not is_nil(end_date) do
now = DateTime.utc_now()
is_published?(challenge) and DateTime.compare(now, start_date) === :gt and
DateTime.compare(now, end_date) === :lt
end
def is_open?(_challenge), do: false
def is_closed?(%{sub_status: "closed"}), do: true
def is_closed?(challenge = %{end_date: end_date}) when not is_nil(end_date) do
now = DateTime.utc_now()
is_published?(challenge) and DateTime.compare(now, end_date) === :gt
end
def is_closed?(_challenge), do: false
def is_archived_new?(%{status: "archived"}), do: true
def is_archived_new?(%{status: "published", sub_status: "archived"}), do: true
def is_archived_new?(challenge = %{archive_date: archive_date}) when not is_nil(archive_date) do
now = DateTime.utc_now()
is_published?(challenge) and DateTime.compare(now, archive_date) === :gt
end
def is_archived_new?(challenge = %{phases: phases}) when length(phases) > 0 do
now = DateTime.utc_now()
phases_end_date =
Enum.max_by(phases, fn p ->
d = p.end_date
if d do
{d.year, d.month, d.day, d.hour, d.minute, d.second, d.microsecond}
end
end).end_date
is_published?(challenge) and DateTime.compare(now, phases_end_date) === :gt
end
def is_archived_new?(_challenge), do: false
def is_archived?(%{status: "archived"}), do: true
def is_archived?(_user), do: false
def set_sub_statuses() do
Challenge
|> where([c], c.status == "published")
|> Repo.all()
|> Enum.reduce(Ecto.Multi.new(), fn challenge, multi ->
Ecto.Multi.update(multi, {:challenge, challenge.id}, set_sub_status(challenge))
end)
|> Repo.transaction()
end
def set_sub_status(challenge) do
cond do
is_archived_new?(challenge) ->
challenge
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:sub_status, "archived")
is_closed?(challenge) ->
challenge
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:sub_status, "closed")
is_open?(challenge) ->
challenge
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:sub_status, "open")
true ->
challenge
|> Ecto.Changeset.change()
end
end
def set_status(current_user, challenge, status, remote_ip) do
challenge
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:status, status)
|> Repo.update()
|> case do
{:ok, challenge} ->
add_to_security_log(current_user, challenge, "status_change", remote_ip, %{
status: status
})
{:ok, challenge}
{:error, changeset} ->
{:error, changeset}
end
end
# BOOKMARK: Advanced status functions
@doc """
Checks if the challenge should be publicly accessible. Either published or archived
"""
def is_public?(challenge) do
is_published?(challenge) or is_archived?(challenge)
end
def is_submittable?(challenge) do
!in_review?(challenge) and (is_draft?(challenge) or has_edits_requested?(challenge))
end
def is_submittable?(challenge, user) do
(Accounts.has_admin_access?(user) or is_challenge_owner?(user, challenge)) and
is_submittable?(challenge)
end
def is_approvable?(challenge) do
in_review?(challenge) or is_unpublished?(challenge)
end
def is_approvable?(challenge, user) do
Accounts.has_admin_access?(user) and is_approvable?(challenge)
end
def can_request_edits?(challenge) do
in_review?(challenge) or has_edits_requested?(challenge) or is_open?(challenge) or
is_approved?(challenge)
end
def can_request_edits?(challenge, user) do
Accounts.has_admin_access?(user) and can_request_edits?(challenge)
end
def is_archivable?(challenge) do
is_published?(challenge) or is_unpublished?(challenge)
end
def is_archivable?(challenge, user) do
Accounts.has_admin_access?(user) and is_archivable?(challenge)
end
def is_unarchivable?(challenge) do
is_archived?(challenge)
end
def is_unarchivable?(challenge, user) do
Accounts.has_admin_access?(user) and is_unarchivable?(challenge)
end
def is_publishable?(challenge) do
is_approved?(challenge)
end
def is_publishable?(challenge, user) do
Accounts.has_admin_access?(user) and is_publishable?(challenge)
end
def is_unpublishable?(challenge) do
(is_approved?(challenge) or is_published?(challenge) or is_archived?(challenge)) and
!(is_closed?(challenge) or is_archived_new?(challenge))
end
def is_unpublishable?(challenge, user) do
Accounts.has_admin_access?(user) and is_unpublishable?(challenge)
end
def edit_with_wizard?(challenge) do
challenge.status != "gsa_review"
end
def is_editable?(_challenge) do
true
end
def is_editable?(challenge, user) do
(is_challenge_owner?(user, challenge) or Accounts.has_admin_access?(user)) and
edit_with_wizard?(challenge)
end
# BOOKMARK: Status altering functions
def submit(challenge, user, remote_ip) do
changeset =
challenge
|> Challenge.section_changeset(%{"section" => "review"}, "submit")
result =
Ecto.Multi.new()
|> Ecto.Multi.update(:challenge, changeset)
|> add_to_security_log_multi(user, "status_change", remote_ip, %{status: "gsa_review"})
|> Repo.transaction()
case result do
{:ok, %{challenge: challenge}} ->
send_pending_challenge_email(challenge)
{:ok, challenge}
{:error, :challenge, changeset, _changes} ->
{:error, changeset}
end
end
def approve(challenge, user, remote_ip) do
changeset = Challenge.approve_changeset(challenge)
result =
Ecto.Multi.new()
|> Ecto.Multi.update(:challenge, changeset)
|> add_to_security_log_multi(user, "status_change", remote_ip, %{status: "approved"})
|> Repo.transaction()
case result do
{:ok, %{challenge: challenge}} ->
{:ok, challenge}
{:error, _type, changeset, _changes} ->
{:error, changeset}
end
end
def reject(challenge, user, remote_ip, message \\ "") do
changeset = Challenge.reject_changeset(challenge, message)
result =
Ecto.Multi.new()
|> Ecto.Multi.update(:challenge, changeset)
|> add_to_security_log_multi(user, "status_change", remote_ip, %{
status: "edits_requested",
message: message
})
|> Repo.transaction()
case result do
{:ok, %{challenge: challenge}} ->
send_challenge_rejection_emails(challenge)
{:ok, challenge}
{:error, _type, changeset, _changes} ->
{:error, changeset}
end
end
def publish(challenge, user, remote_ip) do
changeset = Challenge.publish_changeset(challenge)
result =
Ecto.Multi.new()
|> Ecto.Multi.update(:challenge, changeset)
|> add_to_security_log_multi(user, "status_change", remote_ip, %{status: "published"})
|> Repo.transaction()
case result do
{:ok, %{challenge: challenge}} ->
{:ok, challenge}
{:error, _type, changeset, _changes} ->
{:error, changeset}
end
end
def unpublish(challenge, user, remote_ip) do
changeset = Challenge.unpublish_changeset(challenge)
result =
Ecto.Multi.new()
|> Ecto.Multi.update(:challenge, changeset)
|> add_to_security_log_multi(user, "status_change", remote_ip, %{status: "unpublished"})
|> Repo.transaction()
case result do
{:ok, %{challenge: challenge}} ->
{:ok, challenge}
{:error, _type, changeset, _changes} ->
{:error, changeset}
end
end
def archive(challenge, user, remote_ip) do
changeset =
challenge
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:status, "archived")
result =
Ecto.Multi.new()
|> Ecto.Multi.update(:challenge, changeset)
|> add_to_security_log_multi(user, "status_change", remote_ip, %{status: "archived"})
|> Repo.transaction()
case result do
{:ok, %{challenge: challenge}} ->
{:ok, challenge}
{:error, _type, changeset, _changes} ->
{:error, changeset}
end
end
def unarchive(challenge, user, remote_ip) do
changeset =
challenge
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:status, "published")
result =
Ecto.Multi.new()
|> Ecto.Multi.update(:challenge, changeset)
|> add_to_security_log_multi(user, "status_change", remote_ip, %{status: "published"})
|> Repo.transaction()
case result do
{:ok, %{challenge: challenge}} ->
{:ok, challenge}
{:error, _type, changeset, _changes} ->
{:error, changeset}
end
end
def create_announcement(challenge, announcement) do
challenge
|> Challenge.create_announcement_changeset(announcement)
|> Repo.update()
end
def remove_announcement(challenge) do
challenge
|> Challenge.remove_announcement_changeset()
|> Repo.update()
end
# BOOKMARK: Email functions
defp send_pending_challenge_email(challenge) do
challenge
|> Emails.pending_challenge_email()
|> Mailer.deliver_later()
end
defp send_challenge_rejection_emails(challenge) do
Enum.map(challenge.challenge_owner_users, fn owner ->
owner
|> Emails.challenge_rejection_email(challenge)
|> Mailer.deliver_later()
end)
end
defp maybe_send_submission_confirmation(challenge, action) when action === "submit" do
Enum.map(challenge.challenge_owner_users, fn owner ->
owner
|> Emails.challenge_submission(challenge)
|> Mailer.deliver_later()
end)
end
defp maybe_send_submission_confirmation(_challenge, _action), do: nil
# BOOKMARK: Security log functions
defp add_to_security_log_multi(multi, user, type, remote_ip, details \\ nil) do
Ecto.Multi.run(multi, :log, fn _repo, %{challenge: challenge} ->
add_to_security_log(user, challenge, type, remote_ip, details)
end)
end
def add_to_security_log(user, challenge, type, remote_ip, details \\ nil) do
SecurityLogs.track(%{
originator_id: user.id,
originator_role: user.role,
originator_identifier: user.email,
originator_remote_ip: remote_ip,
target_id: challenge.id,
target_type: "challenge",
target_identifier: challenge.title,
action: type,
details: details
})
end
# BOOKMARK: Misc functions
def remove_logo(challenge) do
challenge
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:logo_key, nil)
|> Ecto.Changeset.put_change(:logo_extension, nil)
|> Repo.update()
end
def subscriber_count(challenge) do
max(
SavedChallenges.count_for_challenge(challenge),
challenge.gov_delivery_subscribers
)
end
def update_subscribe_count(challenge, {:ok, count}) do
challenge
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:gov_delivery_subscribers, count)
|> Repo.update()
end
def update_subscribe_count(_challenge, _result), do: nil
def store_gov_delivery_topic(challenge, topic) do
challenge
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:gov_delivery_topic, topic)
|> Repo.update()
end
def clear_gov_delivery_topic(challenge) do
challenge
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:gov_delivery_topic, nil)
|> Ecto.Changeset.put_change(:gov_delivery_subscribers, 0)
|> Repo.update()
end
def remove_winner_image(challenge) do
challenge
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:winner_image_key, nil)
|> Ecto.Changeset.put_change(:winner_image_extension, nil)
|> Repo.update()
end
def remove_resource_banner(challenge) do
challenge
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:resource_banner_key, nil)
|> Ecto.Changeset.put_change(:resource_banner_extension, nil)
|> Repo.update()
end
# BOOKMARK: Recurring tasks
def check_for_auto_publish do
Enum.map(all_ready_for_publish(), fn challenge ->
challenge
|> Challenge.publish_changeset()
|> Repo.update()
|> email_challenge_owners("challenge_auto_publish")
end)
end
defp email_challenge_owners({:ok, challenge}, template) do
challenge = Repo.preload(challenge, [:challenge_owner_users])
Enum.map(challenge.challenge_owner_users, fn owner ->
case template do
"challenge_auto_publish" ->
owner
|> Emails.challenge_auto_published(challenge)
|> Mailer.deliver_later()
_ ->
nil
end
end)
end
defp email_challenge_owners(_, _), do: nil
# Used in search filter
defp maybe_filter_id(query, id) do
case Integer.parse(id) do
{id, _} ->
or_where(query, [c], c.id == ^id)
_ ->
query
end
end
# BOOKMARK: Filter functions
@impl Stein.Filter
def filter_on_attribute({"search", value}, query) do
original_value = value
value = "%" <> value <> "%"
query
|> where([c], ilike(c.title, ^value) or ilike(c.description, ^value))
|> maybe_filter_id(original_value)
end
def filter_on_attribute({"status", value}, query) do
where(query, [c], c.status == ^value)
end
def filter_on_attribute({"sub_status", value}, query) do
where(query, [c], c.sub_status == ^value)
end
def filter_on_attribute({"types", values}, query) do
Enum.reduce(values, query, fn value, query ->
where(query, [c], fragment("? @> ?::jsonb", c.types, ^[value]))
end)
end
def filter_on_attribute({"year", value}, query) do
{value, _} = Integer.parse(value)
where(query, [c], fragment("date_part('year', ?) = ?", c.end_date, ^value))
end
def filter_on_attribute({"agency_id", value}, query) do
where(query, [c], c.agency_id == ^value)
end
def filter_on_attribute({"user_id", value}, query) do
where(query, [c], c.user_id == ^value)
end
def filter_on_attribute({"user_ids", ids}, query) do
query
|> join(:inner, [c], co in assoc(c, :challenge_owners))
|> where([co], co.user_id in ^ids)
end
def filter_on_attribute({"start_date_start", value}, query) do
{:ok, datetime} = Timex.parse(value, "{YYYY}-{0M}-{0D}")
where(query, [c], c.start_date >= ^datetime)
end
def filter_on_attribute({"start_date_end", value}, query) do
{:ok, datetime} = Timex.parse(value, "{YYYY}-{0M}-{0D}")
where(query, [c], c.start_date <= ^datetime)
end
def filter_on_attribute({"end_date_start", value}, query) do
{:ok, datetime} = Timex.parse(value, "{YYYY}-{0M}-{0D}")
where(query, [c], c.end_date >= ^datetime)
end
def filter_on_attribute({"end_date_end", value}, query) do
{:ok, datetime} = Timex.parse(value, "{YYYY}-{0M}-{0D}")
where(query, [c], c.end_date <= ^datetime)
end
def filter_on_attribute(_, query), do: query
# BOOKMARK: Order functions
def order_on_attribute(query, %{"user" => direction}) do
query = join(query, :left, [c], a in assoc(c, :user))
case direction do
"asc" ->
order_by(query, [c, a], asc_nulls_last: a.first_name)
"desc" ->
order_by(query, [c, a], desc_nulls_last: a.first_name)
_ ->
query
end
end
def order_on_attribute(query, %{"agency" => direction}) do
query = join(query, :left, [c], a in assoc(c, :agency))
case direction do
"asc" ->
order_by(query, [c, a], asc_nulls_last: a.name)
"desc" ->
order_by(query, [c, a], desc_nulls_last: a.name)
_ ->
query
end
end
def order_on_attribute(query, sort_columns) do
columns_to_sort =
Enum.reduce(sort_columns, [], fn {column, direction}, acc ->
column = String.to_atom(column)
case direction do
"asc" ->
acc ++ [asc_nulls_last: column]
"desc" ->
acc ++ [desc_nulls_last: column]
_ ->
[]
end
end)
order_by(query, [c], ^columns_to_sort)
end
end
|
lib/challenge_gov/challenges.ex
| 0.674908
| 0.422445
|
challenges.ex
|
starcoder
|
defmodule Rajska do
@moduledoc """
Rajska is an elixir authorization library for [Absinthe](https://github.com/absinthe-graphql/absinthe).
It provides the following middlewares:
- `Rajska.QueryAuthorization`
- `Rajska.QueryScopeAuthorization`
- `Rajska.ObjectAuthorization`
- `Rajska.ObjectScopeAuthorization`
- `Rajska.FieldAuthorization`
## Installation
The package can be installed by adding `rajska` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:rajska, "~> 0.1.0"},
]
end
```
## Usage
Create your Authorization module, which will implement the `Rajska.Authorization` behaviour and contain the logic to validate user permissions and will be called by Rajska middlewares. Rajska provides some helper functions by default, such as `c:Rajska.Authorization.is_role_authorized?/2`, `c:Rajska.Authorization.has_user_access?/3` and `c:Rajska.Authorization.is_field_authorized?/3`, but you can override them with your application needs.
```elixir
defmodule Authorization do
use Rajska,
roles: [:user, :admin]
end
```
Note: if you pass a non Keyword list to `roles`, as above, Rajska will assume your roles are in ascending order and the last one is the super role. You can override this behavior by defining your own `c:Rajska.Authorization.is_super_role?/1` function or passing a Keyword list in the format `[user: 0, admin: 1]`.
Add your Authorization module to your `Absinthe.Schema` [context/1](https://hexdocs.pm/absinthe/Absinthe.Schema.html#c:context/1) callback and the desired middlewares to the [middleware/3](https://hexdocs.pm/absinthe/Absinthe.Middleware.html#module-the-middleware-3-callback) callback:
```elixir
def context(ctx), do: Map.put(ctx, :authorization, Authorization)
def middleware(middleware, field, %Absinthe.Type.Object{identifier: identifier})
when identifier in [:query, :mutation, :subscription] do
middleware
|> Rajska.add_query_authorization(field, Authorization)
|> Rajska.add_object_authorization()
end
def middleware(middleware, field, object) do
Rajska.add_field_authorization(middleware, field, object)
end
```
Since Scope Authorization middleware must be used with Query Authorization, it is automatically called when adding the former.
"""
alias Rajska.Authorization
defmacro __using__(opts \\ []) do
all_role = Keyword.get(opts, :all_role, :all)
roles = Keyword.get(opts, :roles)
roles_with_tier = add_tier_to_roles!(roles)
roles_names = get_role_names(roles)
super_roles = get_super_roles(roles_with_tier)
quote do
@behaviour Authorization
@spec config() :: Keyword.t()
def config do
Keyword.merge(unquote(opts), [all_role: unquote(all_role), roles: unquote(roles_with_tier)])
end
def get_current_user(%{current_user: current_user}), do: current_user
def get_user_role(%{role: role}), do: role
def get_user_role(nil), do: nil
def user_role_names, do: unquote(roles_names)
def valid_roles, do: [:all | user_role_names()]
def not_scoped_roles, do: [:all | unquote(super_roles)]
defguard super_role?(role) when role in unquote(super_roles)
defguard all_role?(role) when role == unquote(all_role)
def is_super_role?(user_role) when super_role?(user_role), do: true
def is_super_role?(_user_role), do: false
def is_all_role?(user_role) when all_role?(user_role), do: true
def is_all_role?(_user_role), do: false
def is_role_authorized?(_user_role, unquote(all_role)), do: true
def is_role_authorized?(user_role, _allowed_role) when user_role in unquote(super_roles), do: true
def is_role_authorized?(user_role, allowed_role) when is_atom(allowed_role), do: user_role === allowed_role
def is_role_authorized?(user_role, allowed_roles) when is_list(allowed_roles), do: user_role in allowed_roles
def is_field_authorized?(nil, _scope_by, _source), do: false
def is_field_authorized?(%{id: user_id}, scope_by, source), do: user_id === Map.get(source, scope_by)
def has_user_access?(%user_struct{id: user_id} = current_user, scoped_struct, field_value) do
is_super_user? = current_user |> get_user_role() |> is_super_role?()
is_owner? = (user_struct === scoped_struct) && (user_id === field_value)
is_super_user? || is_owner?
end
def unauthorized_msg(_resolution), do: "unauthorized"
def is_super_user?(context) do
context
|> get_current_user()
|> get_user_role()
|> is_super_role?()
end
def is_context_authorized?(context, allowed_role) do
context
|> get_current_user()
|> get_user_role()
|> is_role_authorized?(allowed_role)
end
def is_context_field_authorized?(context, scope_by, source) do
context
|> get_current_user()
|> is_field_authorized?(scope_by, source)
end
def has_context_access?(context, scoped_struct, field_value) do
context
|> get_current_user()
|> has_user_access?(scoped_struct, field_value)
end
defoverridable Authorization
end
end
@doc false
def add_tier_to_roles!(roles) when is_list(roles) do
case Keyword.keyword?(roles) do
true -> roles
false -> Enum.with_index(roles, 1)
end
end
def add_tier_to_roles!(nil) do
raise "No roles configured in Rajska's authorization module"
end
@doc false
def get_role_names(roles) when is_list(roles) do
case Keyword.keyword?(roles) do
true -> Enum.map(roles, fn {role, _tier} -> role end)
false -> roles
end
end
@doc false
def get_super_roles(roles) do
{_, max_tier} = Enum.max_by(roles, fn {_, tier} -> tier end)
roles
|> Enum.filter(fn {_, tier} -> tier === max_tier end)
|> Enum.map(fn {role, _} -> role end)
end
@doc false
def apply_auth_mod(context, fnc_name, args \\ [])
def apply_auth_mod(%{authorization: authorization}, fnc_name, args) do
apply(authorization, fnc_name, args)
end
def apply_auth_mod(_context, _fnc_name, _args) do
raise "Rajska authorization module not found in Absinthe's context"
end
defdelegate add_query_authorization(middleware, field, authorization), to: Rajska.Schema
defdelegate add_object_authorization(middleware), to: Rajska.Schema
defdelegate add_field_authorization(middleware, field, object), to: Rajska.Schema
end
|
lib/rajska.ex
| 0.851181
| 0.85446
|
rajska.ex
|
starcoder
|
defmodule Noizu.Scaffolding.EntityBehaviour do
@moduledoc """
This Behaviour provides some callbacks needed for the Noizu.ERP (EntityReferenceProtocol) to work smoothly.
Note the following naming conventions (where Path.To.Entity is the same path in each following case)
- Entities MyApp.(Path.To.Entity).MyFooEntity
- Tables MyApp.MyDatabase.(Path.To.Entity).MyFooTable
- Repos MyApp.(Path.To.Entity).MyFooRepo
If the above conventions are not used a framework user must provide the appropriate `mnesia_table`, and `repo_module` `use` options.
"""
alias Noizu.ElixirCore.CallingContext
#-----------------------------------------------------------------------------
# aliases, imports, uses,
#-----------------------------------------------------------------------------
require Logger
#-----------------------------------------------------------------------------
# Behaviour definition and types.
#-----------------------------------------------------------------------------
@type nmid :: integer | atom | String.t | tuple
@type entity_obj :: any
@type entity_record :: any
@type entity_tuple_reference :: {:ref, module, nmid}
@type entity_string_reference :: String.t
@type entity_reference :: entity_obj | entity_record | entity_tuple_reference | entity_string_reference
@type details :: any
@type error :: {:error, details}
@type options :: Map.t | nil
@doc """
Return identifier of ref, sref, entity or record
"""
@callback id(entity_reference) :: any | error
@doc """
Returns appropriate {:ref|:ext_ref, module, identifier} reference tuple
"""
@callback ref(entity_reference) :: entity_tuple_reference | error
@doc """
Returns appropriate string encoded ref. E.g. ref.user.1234
"""
@callback sref(entity_reference) :: entity_string_reference | error
@doc """
Returns entity, given an identifier, ref tuple, ref string or other known identifier type.
Where an entity is a EntityBehaviour implementing struct.
"""
@callback entity(entity_reference, options) :: entity_obj | error
@doc """
Returns entity, given an identifier, ref tuple, ref string or other known identifier type. Wrapping call in transaction if required.
Where an entity is a EntityBehaviour implementing struct.
"""
@callback entity!(entity_reference, options) :: entity_obj | error
@doc """
Returns record, given an identifier, ref tuple, ref string or other known identifier type.
Where a record is the raw mnesia table entry, as opposed to a EntityBehaviour based struct object.
"""
@callback record(entity_reference, options) :: entity_record | error
@doc """
Returns record, given an identifier, ref tuple, ref string or other known identifier type. Wrapping call in transaction if required.
Where a record is the raw mnesia table entry, as opposed to a EntityBehaviour based struct object.
"""
@callback record!(entity_reference, options) :: entity_record | error
@callback has_permission(entity_reference, any, any, options) :: boolean | error
@callback has_permission!(entity_reference, any, any, options) :: boolean | error
@doc """
Converts entity into record format. Aka extracts any fields used for indexing with the expected database table looking something like
```
%Table{
identifier: entity.identifier,
...
any_indexable_fields: entity.indexable_field,
...
entity: entity
}
```
The default implementation assumes table structure if simply `%Table{identifier: entity.identifier, entity: entity}` therefore you will need to
overide this implementation if you have any indexable fields. Future versions of the entity behaviour will accept an indexable field option
that will insert expected fields and (if indicated) do simple type casting such as transforming DateTime.t fields into utc time stamps or
`{time_zone, year, month, day, hour, minute, second}` tuples for efficient range querying.
"""
@callback as_record(entity_obj, options :: Map.t) :: entity_record | error
@doc """
Returns the string used for preparing sref format strings. E.g. a `User` struct might use the string ``"user"`` as it's sref_module resulting in
sref strings like `ref.user.1234`.
"""
@callback sref_module() :: String.t
@doc """
Cast from json to struct.
"""
@callback from_json(Map.t, CallingContext.t) :: any
@doc """
get entitie's repo module
"""
@callback repo() :: atom
@doc """
Compress entity, default options
"""
@callback compress(entity :: any) :: any
@doc """
Compress entity
"""
@callback compress(entity :: any, options :: Map.t) :: any
@doc """
Expand entity, default options
"""
@callback expand(entity :: any) :: any
@doc """
Expand entity
"""
@callback expand(entity :: any, options :: Map.t) :: any
#-----------------------------------------------------------------------------
# Defines
#-----------------------------------------------------------------------------
@methods [:id, :ref, :sref, :entity, :entity!, :record, :record!, :erp_imp, :as_record, :sref_module, :as_record, :from_json, :repo, :shallow, :miss_cb, :compress, :expand, :has_permission, :has_permission!, :poly_base]
#-----------------------------------------------------------------------------
# Using Implementation
#-----------------------------------------------------------------------------
defmacro __using__(options) do
# Only include implementation for these methods.
option_arg = Keyword.get(options, :only, @methods)
only = List.foldl(@methods, %{}, fn(method, acc) -> Map.put(acc, method, Enum.member?(option_arg, method)) end)
# Don't include implementation for these methods.
option_arg = Keyword.get(options, :override, [])
override = List.foldl(@methods, %{}, fn(method, acc) -> Map.put(acc, method, Enum.member?(option_arg, method)) end)
required? = List.foldl(@methods, %{}, fn(method, acc) -> Map.put(acc, method, only[method] && !override[method]) end)
# Repo module (entity/record implementation), Module name with "Repo" appeneded if :auto
repo_module = Keyword.get(options, :repo_module, :auto)
mnesia_table = Keyword.get(options, :mnesia_table, :auto)
as_record_options = Keyword.get(options, :as_record_options, Macro.escape(%{}))
# Default Implementation Provider
default_implementation = Keyword.get(options, :default_implementation, Noizu.Scaffolding.EntityBehaviourDefault)
sm = Keyword.get(options, :sref_module, "unsupported")
sref_prefix = "ref." <> sm <> "."
quote do
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
import unquote(__MODULE__)
@behaviour Noizu.Scaffolding.EntityBehaviour
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
@expanded_repo unquote(default_implementation).expand_repo(__MODULE__, unquote(repo_module))
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.sref_module) do
def sref_module(), do: unquote(sm)
end
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.poly_base) do
def poly_base(), do: __MODULE__
end
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.repo) do
def repo(), do: @expanded_repo
end
#-------------------------------------------------------------------------
# Default Implementation from default_implementation behaviour
#-------------------------------------------------------------------------
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.from_json) do
def from_json(json, context) do
@expanded_repo.from_json(json, context)
end
end
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.shallow) do
def shallow(identifier) do
%__MODULE__{identifier: identifier}
end
end
#unquote(Macro.expand(default_implementation, __CALLER__).prepare(mnesia_table, repo_module, sref_prefix))
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.compress) do
def compress(entity), do: compress(entity, %{})
def compress(entity, options), do: entity
end
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.expand) do
def expand(entity), do: expand(entity, %{})
def expand(entity, options), do: entity
end
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.id), do: unquote(Macro.expand(default_implementation, __CALLER__).id_implementation(mnesia_table, sref_prefix))
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.ref), do: unquote(Macro.expand(default_implementation, __CALLER__).ref_implementation(mnesia_table, sref_prefix))
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.sref), do: unquote(Macro.expand(default_implementation, __CALLER__).sref_implementation(mnesia_table, sref_prefix))
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.miss_cb), do: unquote(Macro.expand(default_implementation, __CALLER__).miss_cb_implementation())
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.entity), do: unquote(Macro.expand(default_implementation, __CALLER__).entity_implementation(mnesia_table, repo_module))
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.entity!), do: unquote(Macro.expand(default_implementation, __CALLER__).entity_txn_implementation(mnesia_table, repo_module))
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.record), do: unquote(Macro.expand(default_implementation, __CALLER__).record_implementation(mnesia_table, repo_module))
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.record!), do: unquote(Macro.expand(default_implementation, __CALLER__).record_txn_implementation(mnesia_table, repo_module))
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.erp_imp), do: unquote(Macro.expand(default_implementation, __CALLER__).erp_imp(mnesia_table))
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.as_record), do: unquote(Macro.expand(default_implementation, __CALLER__).as_record_implementation(mnesia_table, as_record_options))
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.has_permission), do: unquote(Macro.expand(default_implementation, __CALLER__).has_permission_implementation())
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
if unquote(required?.has_permission!), do: unquote(Macro.expand(default_implementation, __CALLER__).has_permission_txn_implementation())
#@before_compile unquote(__MODULE__)
end # end quote
end #end defmacro __using__(options)
#defmacro __before_compile__(_env) do
# quote do
# def has_permission(_ref, _permission, _context, _options), do: false
# def has_permission!(_ref, _permission, _context, _options), do: false
# end # end quote
#end # end defmacro __before_compile__(_env)
end #end defmodule
|
lib/scaffolding/behaviours/entity_behaviour.ex
| 0.864996
| 0.59249
|
entity_behaviour.ex
|
starcoder
|
defmodule LineBot.Message do
@moduledoc """
Represents any of the possible [Message objects](https://developers.line.biz/en/reference/messaging-api/#message-objects).
"""
@type t ::
LineBot.Message.Text.t()
| LineBot.Message.Sticker.t()
| LineBot.Message.Image.t()
| LineBot.Message.Video.t()
| LineBot.Message.Audio.t()
| LineBot.Message.Location.t()
| LineBot.Message.Flex.t()
| LineBot.Message.Imagemap.t()
| LineBot.Message.Template.t()
defmacro __using__(_opts) do
quote do
defimpl Jason.Encoder do
def encode(struct, opts) do
struct
|> Map.from_struct()
|> Enum.reject(&(elem(&1, 1) == nil))
|> Map.new()
|> Jason.Encode.map(opts)
end
end
end
end
end
defmodule LineBot.Message.Text do
use LineBot.Message
@moduledoc """
Represents a [Text message](https://developers.line.biz/en/reference/messaging-api/#text-message).
"""
@type t :: %__MODULE__{
text: String.t(),
type: :text,
quickReply: LineBot.Message.QuickReply.t() | nil
}
@enforce_keys [:text]
defstruct [:text, :quickReply, type: :text]
end
defmodule LineBot.Message.Sticker do
use LineBot.Message
@moduledoc """
Represents a [Sticker message](https://developers.line.biz/en/reference/messaging-api/#sticker-message).
"""
@type t :: %__MODULE__{
packageId: String.t(),
stickerId: String.t(),
type: :sticker,
quickReply: LineBot.Message.QuickReply.t() | nil
}
@enforce_keys [:packageId, :stickerId]
defstruct [:packageId, :stickerId, :quickReply, type: :sticker]
end
defmodule LineBot.Message.Image do
use LineBot.Message
@moduledoc """
Represents an [Image message](https://developers.line.biz/en/reference/messaging-api/#image-message).
"""
@type t :: %__MODULE__{
originalContentUrl: String.t(),
previewImageUrl: String.t(),
type: :image,
quickReply: LineBot.Message.QuickReply.t() | nil
}
@enforce_keys [:originalContentUrl, :previewImageUrl]
defstruct [:originalContentUrl, :previewImageUrl, :quickReply, type: :image]
end
defmodule LineBot.Message.Video do
use LineBot.Message
@moduledoc """
Represents a [Video message](https://developers.line.biz/en/reference/messaging-api/#video-message).
"""
@type t :: %__MODULE__{
originalContentUrl: String.t(),
previewImageUrl: String.t(),
type: :video,
quickReply: LineBot.Message.QuickReply.t() | nil
}
@enforce_keys [:originalContentUrl, :previewImageUrl]
defstruct [:originalContentUrl, :previewImageUrl, :quickReply, type: :video]
end
defmodule LineBot.Message.Audio do
use LineBot.Message
@moduledoc """
Represents an [Audio message](https://developers.line.biz/en/reference/messaging-api/#audio-message).
"""
@type t :: %__MODULE__{
originalContentUrl: String.t(),
duration: number(),
type: :audio,
quickReply: LineBot.Message.QuickReply.t() | nil
}
@enforce_keys [:originalContentUrl, :duration]
defstruct [:originalContentUrl, :duration, :quickReply, type: :audio]
end
defmodule LineBot.Message.Location do
use LineBot.Message
@moduledoc """
Represents a [Location message](https://developers.line.biz/en/reference/messaging-api/#location-message).
"""
@type t :: %__MODULE__{
title: String.t(),
address: String.t(),
latitude: float(),
longitude: float(),
quickReply: LineBot.Message.QuickReply.t() | nil
}
@enforce_keys [:title, :address, :latitude, :longitude]
defstruct [:title, :address, :latitude, :longitude, :quickReply, type: :location]
end
|
lib/line_bot/message.ex
| 0.806358
| 0.45175
|
message.ex
|
starcoder
|
defmodule Clova.Response.SpeechInfoObject do
@moduledoc """
A struct that represents a `SpeechInfoObject` entry of the clova response. For the representation
of the entire response see `Clova.Response`.
"""
defstruct lang: "ja", type: "PlainText", value: nil
end
defmodule Clova.Response.OutputSpeech do
@moduledoc """
A struct that represents an `outputSpeech` entry of the clova response. For the representation
of the entire response see `Clova.Response`.
"""
defstruct type: "SimpleSpeech", values: %Clova.Response.SpeechInfoObject{}
end
defmodule Clova.Response.Reprompt do
@moduledoc """
A struct that represents a `reprompt` entry of the clova response. For the representation
of the entire response see `Clova.Response`.
"""
defstruct outputSpeech: %Clova.Response.OutputSpeech{}
end
defmodule Clova.Response.Response do
@moduledoc """
A struct that represents the `response` portion of the clova response. For the representation
of the entire response see `Clova.Response`.
"""
defstruct outputSpeech: %Clova.Response.OutputSpeech{},
shouldEndSession: false,
card: nil,
directives: nil,
reprompt: nil
end
defmodule Clova.Response do
@moduledoc """
Defines a struct that contains the data that should be encoded into JSON as a response to a clova request.
An intance of this struct is initialised by the `Clova.DispatcherPlug` and passed to the
callbacks defined by the `Clova` behaviour.
"""
defstruct response: %Clova.Response.Response{}, sessionAttributes: %{}, version: "1.0"
@doc """
Appends the specified `speech` to the response. `speech` can be text or a URL. When
passing a URL, set the `type:` option to `:url`.
This function automatically upgrades a `SimpleSpeech`
response to a `SpeechList` response if the response already contained a non-nil `SimpleSpeech`
string. If the response was empty, a `SimpleSpeech` response is created.
## Options
* `type:` - Can be `:text` or `:url`. Defaults to `:text`.
* `lang:` - Should be a two-letter language code. Defaults to `"ja"`. See the [`SpeechInfoObject`](https://clova-developers.line.me/guide/#/CEK/References/CEK_API.md#CustomExtSpeechInfoObject) documentation for the currently supported languages. When the `type:` option is set to `:url`, this option is ignored and the language is set to the empty string.
"""
def add_speech(resp, speech, opts \\ []) do
type = Keyword.get(opts, :type, :text)
lang = Keyword.get(opts, :lang, "ja")
output_speech = add_speech_to_output_speech(resp.response.outputSpeech, speech, lang, type)
put_in(resp.response.outputSpeech, output_speech)
end
@doc """
Adds the specified `speech` to the response's `reprompt` data. This is used by Clova to
reprompt the user for an utterance when clova is expecting a reply but none is detected.
The behavior is otherwise the same as `add_speech/3`.
"""
def add_reprompt(resp, speech, opts \\ []) do
type = Keyword.get(opts, :type, :text)
lang = Keyword.get(opts, :lang, "ja")
reprompt = resp.response.reprompt || %Clova.Response.Reprompt{}
output_speech = add_speech_to_output_speech(reprompt.outputSpeech, speech, lang, type)
reprompt = put_in(reprompt.outputSpeech, output_speech)
put_in(resp.response.reprompt, reprompt)
end
@doc """
Places the supplied `session_attributes` object into the response. The same data will be
included in any subsequent clova request during the session. Subsequent calls to this function
will overwrite the data from previous calls.
`session_attributes` should be formatted as a key, value map.
"""
def put_session_attributes(resp, session_attributes) do
put_in(resp.sessionAttributes, session_attributes)
end
@doc """
Sets the `shouldEndSession` flag of `response` to `true`.
"""
def end_session(response) do
put_in(response.response.shouldEndSession, true)
end
defp add_speech_to_output_speech(output_speech, speech, lang, :text) do
speech_info = %Clova.Response.SpeechInfoObject{value: speech, lang: lang}
add_speech_info_to_output_speech(output_speech, speech_info)
end
defp add_speech_to_output_speech(output_speech, speech, _lang, :url) do
speech_info = %Clova.Response.SpeechInfoObject{type: "URL", value: speech, lang: ""}
add_speech_info_to_output_speech(output_speech, speech_info)
end
defp add_speech_info_to_output_speech(output_speech = %{type: "SpeechList"}, speech_info) do
update_in(output_speech.values, &(&1 ++ [speech_info]))
end
defp add_speech_info_to_output_speech(output_speech = %{type: "SimpleSpeech"}, speech_info) do
if output_speech.values.value == nil do
put_in(output_speech.values, speech_info)
else
%{output_speech | type: "SpeechList", values: [output_speech.values, speech_info]}
end
end
end
|
lib/clova/response.ex
| 0.929744
| 0.540681
|
response.ex
|
starcoder
|
defmodule Sneex.Cpu do
@moduledoc """
This module defines a structure that represents the CPU's current state, and defines functions for
interacting with that state (getting it, updating it, etc.).
Design thoughts:
- CPU should have 2 functions:
- tick: this will act as a clock tick
- step: this will immediately execute the current command (regardless of remaining ticks)
- Have a module that will load the next command
- Have a module/struct that represents current command
23 15 7 0
Accumulator (B) (A) or (C) Accumulator (A)
Data Bank Register
X Index Register X
Y Index Register Y
0 0 0 0 0 0 0 0 Direct Page Register (D)
0 0 0 0 0 0 0 0 Stack Pointer (S)
Program Bank Register Program Counter (PC)
"""
defstruct [
:acc,
:acc_size,
:x,
:y,
:index_size,
:data_bank,
:direct_page,
:program_bank,
:stack_ptr,
:pc,
:emu_mode,
:neg_flag,
:overflow_flag,
:carry_flag,
:zero_flag,
:irq_disable,
:decimal_mode,
:memory
]
use Bitwise
@type t :: %__MODULE__{
acc: word(),
acc_size: bit_size(),
x: word(),
y: word(),
index_size: bit_size(),
data_bank: byte(),
direct_page: word(),
program_bank: byte(),
stack_ptr: word(),
pc: word(),
emu_mode: emulation_mode(),
neg_flag: boolean(),
overflow_flag: boolean(),
carry_flag: boolean(),
zero_flag: boolean(),
irq_disable: boolean(),
decimal_mode: boolean(),
memory: Sneex.Memory.t()
}
@typep word :: Sneex.BasicTypes.word()
@typep long :: Sneex.BasicTypes.long()
@type emulation_mode :: :native | :emulation
@type bit_size :: :bit8 | :bit16
@doc "This is a simple constructor that will initialize all of the defaults for the CPU."
@spec new(Sneex.Memory.t()) :: __MODULE__.t()
def new(memory) do
%__MODULE__{
acc: 0,
acc_size: :bit8,
x: 0,
y: 0,
index_size: :bit8,
data_bank: 0,
direct_page: 0,
program_bank: 0,
stack_ptr: 0,
pc: 0,
emu_mode: :emulation,
neg_flag: false,
overflow_flag: false,
carry_flag: false,
zero_flag: false,
irq_disable: false,
decimal_mode: false,
memory: memory
}
end
@doc "
Gets the accumulator from the CPU.
It will take into account the bit size of the accumulator and only return 8 or 16 bits.
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc(0xFF) |> Sneex.Cpu.acc()
0xFF
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc(0xFFFF) |> Sneex.Cpu.acc()
0xFF
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.emu_mode(:emulation) |> Sneex.Cpu.acc(0xFFFF) |> Sneex.Cpu.acc_size(:bit16) |> Sneex.Cpu.acc()
0xFF
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.emu_mode(:native) |> Sneex.Cpu.acc(0xFFFF) |> Sneex.Cpu.acc_size(:bit16) |> Sneex.Cpu.acc()
0xFFFF
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.emu_mode(:native) |> Sneex.Cpu.acc(0xFFFFFF) |> Sneex.Cpu.acc_size(:bit16) |> Sneex.Cpu.acc()
0xFFFF
"
@spec acc(__MODULE__.t()) :: word()
def acc(%__MODULE__{acc: a, acc_size: :bit8}), do: a &&& 0xFF
def acc(%__MODULE__{acc: a, emu_mode: :emulation}), do: a &&& 0xFF
def acc(%__MODULE__{acc: a}), do: a &&& 0xFFFF
@doc "Sets the accumulator value for the CPU."
@spec acc(__MODULE__.t(), word()) :: __MODULE__.t()
def acc(cpu = %__MODULE__{}, acc), do: %__MODULE__{cpu | acc: acc}
@doc "
Gets the lower 8 bits of the accumulator (referred to as A).
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc(0xDEAD) |> Sneex.Cpu.a()
0xAD
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc(0xBEEF) |> Sneex.Cpu.a()
0xEF
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc(0x42) |> Sneex.Cpu.a()
0x42
"
@spec a(__MODULE__.t()) :: byte()
def a(%__MODULE__{acc: c}), do: c |> band(0x00FF)
@doc "
Gets the top 8 bits of the accumulator (referred to as B).
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc(0xDEAD) |> Sneex.Cpu.b()
0xDE
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc(0xBEEF) |> Sneex.Cpu.b()
0xBE
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc(0x42) |> Sneex.Cpu.b()
0x00
"
@spec b(__MODULE__.t()) :: byte()
def b(%__MODULE__{acc: c}), do: c |> band(0xFF00) |> bsr(8)
@doc "
Gets the full 16 bits of the accumulator (referred to as C), regardless of the current memory mode.
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc(0xDEAD) |> Sneex.Cpu.c()
0xDEAD
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc(0xBEEF) |> Sneex.Cpu.c()
0xBEEF
"
@spec c(__MODULE__.t()) :: byte()
def c(%__MODULE__{acc: c}), do: c |> band(0xFFFF)
@doc "Gets the size of the accumulator, either :bit8 or :bit16."
@spec acc_size(__MODULE__.t()) :: bit_size()
def acc_size(%__MODULE__{emu_mode: :emulation}), do: :bit8
def acc_size(%__MODULE__{acc_size: acc_size}), do: acc_size
@doc "Sets the size of the accumulator, either :bit8 or :bit16."
@spec acc_size(__MODULE__.t(), bit_size()) :: __MODULE__.t()
def acc_size(cpu = %__MODULE__{}, size), do: %__MODULE__{cpu | acc_size: size}
@doc "
Gets the x index from the CPU.
It will take into account the bit size of the index registers and only return 8 or 16 bits.
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.x(0xFF) |> Sneex.Cpu.x()
0xFF
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.x(0xFFFF) |> Sneex.Cpu.x()
0xFF
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.x(0xFF) |> Sneex.Cpu.index_size(:bit16) |> Sneex.Cpu.emu_mode(:emulation) |> Sneex.Cpu.x()
0xFF
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.emu_mode(:native) |> Sneex.Cpu.x(0xFFFF) |> Sneex.Cpu.index_size(:bit16) |> Sneex.Cpu.x()
0xFFFF
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.emu_mode(:native) |> Sneex.Cpu.x(0xFFFFFF) |> Sneex.Cpu.index_size(:bit16) |> Sneex.Cpu.x()
0xFFFF
"
@spec x(__MODULE__.t()) :: word()
def x(%__MODULE__{x: x, index_size: :bit8}), do: x &&& 0xFF
def x(%__MODULE__{x: x, emu_mode: :emulation}), do: x &&& 0xFF
def x(%__MODULE__{x: x}), do: x &&& 0xFFFF
@doc "Sets the x index for the CPU."
@spec x(__MODULE__.t(), word()) :: __MODULE__.t()
def x(cpu = %__MODULE__{}, x), do: %__MODULE__{cpu | x: x}
@doc "
Gets the y index from the CPU.
It will take into account the bit size of the index registers and only return 8 or 16 bits.
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.y(0xFF) |> Sneex.Cpu.y()
0xFF
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.y(0xFF) |> Sneex.Cpu.index_size(:bit16) |> Sneex.Cpu.emu_mode(:emulation) |> Sneex.Cpu.y()
0xFF
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.emu_mode(:native) |> Sneex.Cpu.y(0xFFFF) |> Sneex.Cpu.y()
0xFF
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.emu_mode(:native) |> Sneex.Cpu.y(0xFFFF) |> Sneex.Cpu.index_size(:bit16) |> Sneex.Cpu.y()
0xFFFF
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.emu_mode(:native) |> Sneex.Cpu.y(0xFFFFFF) |> Sneex.Cpu.index_size(:bit16) |> Sneex.Cpu.y()
0xFFFF
"
@spec y(__MODULE__.t()) :: word()
def y(%__MODULE__{y: y, index_size: :bit8}), do: y &&& 0xFF
def y(%__MODULE__{y: y, emu_mode: :emulation}), do: y &&& 0xFF
def y(%__MODULE__{y: y}), do: y &&& 0xFFFF
@doc "Sets the y index for the CPU."
@spec y(__MODULE__.t(), word()) :: __MODULE__.t()
def y(cpu = %__MODULE__{}, y), do: %__MODULE__{cpu | y: y}
@doc "Gets the size of the index registers, either :bit8 or :bit16"
@spec index_size(__MODULE__.t()) :: bit_size()
def index_size(%__MODULE__{emu_mode: :emulation}), do: :bit8
def index_size(%__MODULE__{index_size: size}), do: size
@doc "Sets the size of the index registers, either :bit8 or :bit16"
@spec index_size(__MODULE__.t(), bit_size()) :: __MODULE__.t()
def index_size(cpu = %__MODULE__{}, size), do: %__MODULE__{cpu | index_size: size}
@doc "
This allows reading the break (b) flag while the CPU is in emulation mode.
It converts the index size (x) flag into the break flag while the CPU is in emulation mode.
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.break_flag()
true
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.index_size(:bit16) |> Sneex.Cpu.break_flag()
false
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.break_flag(false) |> Sneex.Cpu.emu_mode(:native) |> Sneex.Cpu.index_size()
:bit16
"
@spec break_flag(__MODULE__.t()) :: boolean
def break_flag(%__MODULE__{emu_mode: :emulation, index_size: :bit8}), do: true
def break_flag(%__MODULE__{emu_mode: :emulation, index_size: :bit16}), do: false
@doc "This allows setting the break (b) flag while the CPU is in emulation mode."
@spec break_flag(__MODULE__.t(), any) :: __MODULE__.t()
def break_flag(cpu = %__MODULE__{emu_mode: :emulation}, true),
do: %__MODULE__{cpu | index_size: :bit8}
def break_flag(cpu = %__MODULE__{emu_mode: :emulation}, false),
do: %__MODULE__{cpu | index_size: :bit16}
@doc "
Get the current value for the data bank register
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.data_bank(0xAA) |> Sneex.Cpu.data_bank()
0xAA
"
@spec data_bank(__MODULE__.t()) :: byte()
def data_bank(%__MODULE__{data_bank: dbr}), do: dbr &&& 0xFF
@doc "Sets the current value for the data bank register"
@spec data_bank(__MODULE__.t(), byte()) :: __MODULE__.t()
def data_bank(cpu = %__MODULE__{}, dbr), do: %__MODULE__{cpu | data_bank: dbr}
@doc "
Get the current value for the direct page register
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.direct_page(0xBB) |> Sneex.Cpu.direct_page()
0xBB
"
@spec direct_page(__MODULE__.t()) :: word()
def direct_page(%__MODULE__{direct_page: dpr}), do: dpr &&& 0xFFFF
@doc "Sets the value for the direct page register"
@spec direct_page(__MODULE__.t(), word()) :: __MODULE__.t()
def direct_page(cpu = %__MODULE__{}, dpr), do: %__MODULE__{cpu | direct_page: dpr}
@doc "
Get the current value for the program bank register
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.program_bank(0xBB) |> Sneex.Cpu.program_bank()
0xBB
"
@spec program_bank(__MODULE__.t()) :: byte()
def program_bank(%__MODULE__{program_bank: pbr}), do: pbr &&& 0xFF
@doc "Set the value of the program bank register"
@spec program_bank(__MODULE__.t(), byte()) :: __MODULE__.t()
def program_bank(cpu = %__MODULE__{}, pbr), do: %__MODULE__{cpu | program_bank: pbr}
@doc "
Get the current value for the stack pointer
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.stack_ptr(0xABCD) |> Sneex.Cpu.stack_ptr()
0xABCD
"
@spec stack_ptr(__MODULE__.t()) :: word()
def stack_ptr(%__MODULE__{stack_ptr: sp}), do: sp &&& 0xFFFF
@doc "Set the value of the stack pointer"
@spec stack_ptr(__MODULE__.t(), word()) :: __MODULE__.t()
def stack_ptr(cpu = %__MODULE__{}, sp), do: %__MODULE__{cpu | stack_ptr: sp}
@doc "
Get the current value for the program counter.
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.pc(0x1234) |> Sneex.Cpu.pc()
0x1234
"
@spec pc(__MODULE__.t()) :: word()
def pc(%__MODULE__{pc: pc}), do: pc &&& 0xFFFF
@doc "Set the value of the program counter"
@spec pc(__MODULE__.t(), word()) :: __MODULE__.t()
def pc(cpu = %__MODULE__{}, pc), do: %__MODULE__{cpu | pc: pc}
@doc "
Gets the effective program counter
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.pc(0x1234) |> Sneex.Cpu.program_bank(0xAB) |> Sneex.Cpu.effective_pc()
0xAB1234
"
def effective_pc(%__MODULE__{program_bank: pbr, pc: pc}), do: pbr |> bsl(16) |> bor(pc)
@doc "
Get the current value for the emulation mode, either :native or :emulation.
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.emu_mode(:native) |> Sneex.Cpu.emu_mode()
:native
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.emu_mode(:emulation) |> Sneex.Cpu.emu_mode()
:emulation
"
@spec emu_mode(__MODULE__.t()) :: emulation_mode()
def emu_mode(%__MODULE__{emu_mode: em}), do: em
@doc "Set the current value for the emulation mode"
@spec emu_mode(__MODULE__.t(), emulation_mode()) :: __MODULE__.t()
def emu_mode(cpu = %__MODULE__{}, em), do: %__MODULE__{cpu | emu_mode: em}
@doc "
Get the current value for the negative flag.
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.negative_flag(true) |> Sneex.Cpu.negative_flag()
true
"
@spec negative_flag(__MODULE__.t()) :: boolean()
def negative_flag(%__MODULE__{neg_flag: n}), do: n
@doc "Set the value of the negative flag"
@spec negative_flag(__MODULE__.t(), boolean()) :: __MODULE__.t()
def negative_flag(cpu = %__MODULE__{}, n), do: %__MODULE__{cpu | neg_flag: n}
@doc "
Get the current value for the overflow flag.
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.overflow_flag(true) |> Sneex.Cpu.overflow_flag()
true
"
@spec overflow_flag(__MODULE__.t()) :: boolean()
def overflow_flag(%__MODULE__{overflow_flag: o}), do: o
@doc "Set the value of the overflow flag"
@spec overflow_flag(__MODULE__.t(), boolean()) :: __MODULE__.t()
def overflow_flag(cpu = %__MODULE__{}, o), do: %__MODULE__{cpu | overflow_flag: o}
@doc "
Get the current value for the decimal mode (true = decimal, false = binary).
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.decimal_mode(true) |> Sneex.Cpu.decimal_mode()
true
"
@spec decimal_mode(__MODULE__.t()) :: boolean()
def decimal_mode(%__MODULE__{decimal_mode: d}), do: d
@doc "Set the decimal mode: true = decimal, false = binary"
@spec decimal_mode(__MODULE__.t(), boolean()) :: __MODULE__.t()
def decimal_mode(cpu = %__MODULE__{}, d), do: %__MODULE__{cpu | decimal_mode: d}
@doc "
Get the current value for the interrupt disable.
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.irq_disable(true) |> Sneex.Cpu.irq_disable()
true
"
@spec irq_disable(__MODULE__.t()) :: boolean()
def irq_disable(%__MODULE__{irq_disable: i}), do: i
@doc "Set the value of the interrupt disable (IRQ disable)"
@spec irq_disable(__MODULE__.t(), boolean()) :: __MODULE__.t()
def irq_disable(cpu = %__MODULE__{}, i), do: %__MODULE__{cpu | irq_disable: i}
@doc "
Get the current value for the zero flag.
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.zero_flag(true) |> Sneex.Cpu.zero_flag()
true
"
@spec zero_flag(__MODULE__.t()) :: boolean()
def zero_flag(%__MODULE__{zero_flag: z}), do: z
@doc "Set the value of the zero flag"
@spec zero_flag(__MODULE__.t(), boolean()) :: __MODULE__.t()
def zero_flag(cpu = %__MODULE__{}, z), do: %__MODULE__{cpu | zero_flag: z}
@doc "
Get the current value for the carry flag.
## Examples
iex> <<>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.carry_flag(true) |> Sneex.Cpu.carry_flag()
true
"
@spec carry_flag(__MODULE__.t()) :: boolean()
def carry_flag(%__MODULE__{carry_flag: c}), do: c
@doc "Set the value of the carry flag"
@spec carry_flag(__MODULE__.t(), boolean()) :: __MODULE__.t()
def carry_flag(cpu = %__MODULE__{}, c), do: %__MODULE__{cpu | carry_flag: c}
@doc "Get the Sneex.Memory that is held by the CPU."
@spec memory(__MODULE__.t()) :: Sneex.Memory.t()
def memory(%__MODULE__{memory: m}), do: m
@doc "Read the next opcode (where the program counter currently points)."
@spec read_opcode(__MODULE__.t()) :: byte()
def read_opcode(cpu = %__MODULE__{memory: m}) do
eff_pc = cpu |> effective_pc()
Sneex.Memory.read_byte(m, eff_pc)
end
@doc "Read the 1, 2, or 3 byte operand that is 1 address past the program counter."
@spec read_operand(__MODULE__.t(), 1 | 2 | 3) :: byte() | word() | long()
def read_operand(cpu = %__MODULE__{memory: m}, 1) do
eff_pc = cpu |> effective_pc()
Sneex.Memory.read_byte(m, eff_pc + 1)
end
def read_operand(cpu = %__MODULE__{memory: m}, 2) do
eff_pc = cpu |> effective_pc()
Sneex.Memory.read_word(m, eff_pc + 1)
end
def read_operand(cpu = %__MODULE__{memory: m}, 3) do
eff_pc = cpu |> effective_pc()
Sneex.Memory.read_long(m, eff_pc + 1)
end
@doc "
Reads data from the memory. The address is the memory location where the read starts.
The amount of data read (1 or 2 bytes) is based off of the accumulator size.
## Examples
iex> <<1, 2>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc_size(:bit8) |> Sneex.Cpu.read_data(0x000000)
0x01
iex> <<1, 2>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc_size(:bit8) |> Sneex.Cpu.read_data(0x000001)
0x02
iex> <<1, 2>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc_size(:bit8) |> Sneex.Cpu.read_data(0x000000, 2)
0x0201
iex> <<1, 2>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc_size(:bit16) |> Sneex.Cpu.read_data(0x000000)
0x0201
iex> <<1, 2>> |> Sneex.Memory.new() |> Sneex.Cpu.new() |> Sneex.Cpu.acc_size(:bit16) |> Sneex.Cpu.read_data(0x000000, 1)
0x01
"
@spec read_data(__MODULE__.t(), long()) :: byte() | word()
def read_data(%__MODULE__{memory: m, acc_size: :bit8}, address) do
Sneex.Memory.read_byte(m, address)
end
def read_data(%__MODULE__{memory: m, acc_size: :bit16}, address) do
Sneex.Memory.read_word(m, address)
end
@spec read_data(__MODULE__.t(), long(), 1 | 2 | 3) :: byte() | word()
def read_data(%__MODULE__{memory: m}, address, 1) do
Sneex.Memory.read_byte(m, address)
end
def read_data(%__MODULE__{memory: m}, address, 2) do
Sneex.Memory.read_word(m, address)
end
def read_data(%__MODULE__{memory: m}, address, 3) do
Sneex.Memory.read_long(m, address)
end
@doc "
Writes data to the memory. The address is the memory location where the write starts.
The amount of data written (1 or 2 bytes) is based off of the accumulator size.
"
@spec write_data(__MODULE__.t(), long(), byte() | word()) :: __MODULE__.t()
def write_data(cpu = %__MODULE__{memory: m, acc_size: :bit8}, address, value) do
new_memory = Sneex.Memory.write_byte(m, address, value)
%__MODULE__{cpu | memory: new_memory}
end
def write_data(cpu = %__MODULE__{memory: m, acc_size: :bit16}, address, value) do
new_memory = Sneex.Memory.write_word(m, address, value)
%__MODULE__{cpu | memory: new_memory}
end
end
|
lib/sneex/cpu.ex
| 0.816991
| 0.567277
|
cpu.ex
|
starcoder
|
defmodule Commanded.PubSub do
@moduledoc """
Pub/sub behaviour for use by Commanded to subcribe to and broadcast messages.
"""
@doc """
Return an optional supervisor spec for pub/sub.
"""
@callback child_spec() :: [:supervisor.child_spec()]
@doc """
Subscribes the caller to the PubSub adapter's topic.
"""
@callback subscribe(atom) :: :ok | {:error, term}
@doc """
Broadcasts message on given topic.
* `topic` - The topic to broadcast to, ie: `"users:123"`
* `message` - The payload of the broadcast
"""
@callback broadcast(String.t(), term) :: :ok | {:error, term}
@doc """
Track the current process under the given `topic`, uniquely identified by
`key`.
"""
@callback track(String.t(), term) :: :ok | {:error, term}
@doc """
List tracked PIDs for a given topic.
"""
@callback list(String.t()) :: [{term, pid}]
@doc """
Return an optional supervisor spec for pub/sub.
"""
@spec child_spec() :: [:supervisor.child_spec()]
def child_spec, do: pubsub_provider().child_spec()
@doc """
Subscribes the caller to the PubSub adapter's topic.
"""
@callback subscribe(atom) :: :ok | {:error, term}
def subscribe(topic) when is_binary(topic), do: pubsub_provider().subscribe(topic)
@doc """
Broadcasts message on given topic.
"""
@callback broadcast(String.t(), term) :: :ok | {:error, term}
def broadcast(topic, message) when is_binary(topic),
do: pubsub_provider().broadcast(topic, message)
@doc """
Track the current process under the given `topic`, uniquely identified by
`key`.
"""
@spec track(String.t(), term) :: :ok
def track(topic, key) when is_binary(topic), do: pubsub_provider().track(topic, key)
@doc """
List tracked PIDs for a given topic.
"""
@spec list(String.t()) :: [{term, pid}]
def list(topic) when is_binary(topic), do: pubsub_provider().list(topic)
@doc """
Get the configured pub/sub adapter.
Defaults to a local pub/sub, restricted to running on a single node.
"""
@spec pubsub_provider() :: module()
def pubsub_provider do
case Application.get_env(:commanded, :pubsub, :local) do
:local ->
Commanded.PubSub.LocalPubSub
provider when is_atom(provider) ->
provider
config ->
if Keyword.keyword?(config) do
case Keyword.get(config, :phoenix_pubsub) do
nil ->
raise "Unsupported pubsub adapter: #{inspect(config)}"
_phoenix_pubsub ->
Commanded.PubSub.PhoenixPubSub
end
else
raise "Unsupported pubsub adapter: #{inspect(config)}"
end
end
end
end
|
lib/commanded/pubsub.ex
| 0.859354
| 0.405979
|
pubsub.ex
|
starcoder
|
defmodule Ecto.Query.Builder.From do
@moduledoc false
alias Ecto.Query.Builder
@doc """
Handles from expressions.
The expressions may either contain an `in` expression or not.
The right side is always expected to Queryable.
## Examples
iex> escape(quote do: MySchema)
{quote(do: MySchema), []}
iex> escape(quote do: p in posts)
{quote(do: posts), [p: 0]}
iex> escape(quote do: p in {"posts", MySchema})
{quote(do: {"posts", MySchema}), [p: 0]}
iex> escape(quote do: [p, q] in posts)
{quote(do: posts), [p: 0, q: 1]}
iex> escape(quote do: [_, _] in abc)
{quote(do: abc), [_: 0, _: 1]}
iex> escape(quote do: other)
{quote(do: other), []}
iex> escape(quote do: x() in other)
** (Ecto.Query.CompileError) binding list should contain only variables, got: x()
"""
@spec escape(Macro.t) :: {Keyword.t, Macro.t}
def escape({:in, _, [var, query]}) do
Builder.escape_binding(query, List.wrap(var))
end
def escape(query) do
{query, []}
end
@doc """
Builds a quoted expression.
The quoted expression should evaluate to a query at runtime.
If possible, it does all calculations at compile time to avoid
runtime work.
"""
@spec build(Macro.t, Macro.Env.t) :: {Macro.t, Keyword.t, non_neg_integer | nil}
def build(query, env) do
{query, binds} = escape(query)
{count_bind, quoted} =
case Macro.expand(query, env) do
schema when is_atom(schema) ->
# Get the source at runtime so no unnecessary compile time
# dependencies between modules are added
source = quote do: unquote(schema).__schema__(:source)
prefix = quote do: unquote(schema).__schema__(:prefix)
{1, query(prefix, source, schema)}
source when is_binary(source) ->
# When a binary is used, there is no schema
{1, query(nil, source, nil)}
{source, schema} when is_binary(source) ->
prefix = quote do: unquote(schema).__schema__(:prefix)
{1, query(prefix, source, schema)}
other ->
{nil, other}
end
quoted = Builder.apply_query(quoted, __MODULE__, [length(binds)], env)
{quoted, binds, count_bind}
end
defp query(prefix, source, schema) do
{:%, [], [Ecto.Query, {:%{}, [], [from: {source, schema}, prefix: prefix]}]}
end
@doc """
The callback applied by `build/2` to build the query.
"""
@spec apply(Ecto.Queryable.t, non_neg_integer) :: Ecto.Query.t
def apply(query, binds) do
query = Ecto.Queryable.to_query(query)
check_binds(query, binds)
query
end
defp check_binds(query, count) do
if count > 1 and count > Builder.count_binds(query) do
Builder.error! "`from` in query expression specified #{count} " <>
"binds but query contains #{Builder.count_binds(query)} binds"
end
end
end
|
data/web/deps/ecto/lib/ecto/query/builder/from.ex
| 0.899548
| 0.511534
|
from.ex
|
starcoder
|
defmodule Tesla.Middleware.Retry do
@moduledoc """
Retry using exponential backoff and full jitter. This middleware only retries in the
case of connection errors (`nxdomain`, `connrefused` etc). Application error
checking for retry can be customized through `:should_retry` option by
providing a function in returning a boolean.
## Backoff algorithm
The backoff algorithm optimizes for tight bounds on completing a request successfully.
It does this by first calculating an exponential backoff factor based on the
number of retries that have been performed. It then multiplies this factor against the
base delay. The total maximum delay is found by taking the minimum of either the calculated delay
or the maximum delay specified. This creates an upper bound on the maximum delay
we can see.
In order to find the actual delay value we take a random number between 0 and
the maximum delay based on a uniform distribution. This randomness ensures that
our retried requests don't "harmonize" making it harder for the downstream
service to heal.
## Example
```
defmodule MyClient do
use Tesla
plug Tesla.Middleware.Retry,
delay: 500,
max_retries: 10,
max_delay: 4_000,
should_retry: fn
{:ok, %{status: status}} when status in [400, 500] -> true
{:ok, _} -> false
{:error, _} -> true
end
end
```
## Options
- `:delay` - The base delay in milliseconds (defaults to 50)
- `:max_retries` - maximum number of retries (defaults to 5)
- `:max_delay` - maximum delay in milliseconds (defaults to 5000)
- `:should_retry` - function to determine if request should be retried
"""
@behaviour Tesla.Middleware
@defaults [
delay: 50,
max_retries: 5,
max_delay: 5_000
]
@impl Tesla.Middleware
def call(env, next, opts) do
opts = opts || []
context = %{
retries: 0,
delay: Keyword.get(opts, :delay, @defaults[:delay]),
max_retries: Keyword.get(opts, :max_retries, @defaults[:max_retries]),
max_delay: Keyword.get(opts, :max_delay, @defaults[:max_delay]),
should_retry: Keyword.get(opts, :should_retry, &match?({:error, _}, &1))
}
retry(env, next, context)
end
# If we have max retries set to 0 don't retry
defp retry(env, next, %{max_retries: 0}), do: Tesla.run(env, next)
# If we're on our last retry then just run and don't handle the error
defp retry(env, next, %{max_retries: max, retries: max}) do
Tesla.run(env, next)
end
# Otherwise we retry if we get a retriable error
defp retry(env, next, context) do
res = Tesla.run(env, next)
if context.should_retry.(res) do
backoff(context.max_delay, context.delay, context.retries)
context = update_in(context, [:retries], &(&1 + 1))
retry(env, next, context)
else
res
end
end
# Exponential backoff with jitter
defp backoff(cap, base, attempt) do
factor = :math.pow(2, attempt)
max_sleep = trunc(min(cap, base * factor))
delay = :rand.uniform(max_sleep)
:timer.sleep(delay)
end
end
|
lib/tesla/middleware/retry.ex
| 0.931921
| 0.852935
|
retry.ex
|
starcoder
|
defmodule StbImage do
@moduledoc """
Tiny image encoding and decoding.
The following formats are supported and have type u8:
* JPEG baseline & progressive (12 bpc/arithmetic not supported, same as stock IJG lib)
* PNG 1/2/4/8/16-bit-per-channel
* TGA
* BMP non-1bpp, non-RLE
* PSD (composited view only, no extra channels, 8/16 bit-per-channel)
* GIF (always reports as 4-channel)
* PIC (Softimage PIC)
* PNM (PPM and PGM binary only)
The following formats are supported and have type f32:
* HDR (radiance rgbE format) (type is f32)
There are also specific functions for working with GIFs.
"""
@doc """
The `StbImage` struct.
It has the following fields:
* `:data` - a blob with the image bytes in HWC (heigth-width-channels) order
* `:shape` - a tuple with the `{height, width, channels}`
* `:type` - the type unit in the binary (`{:u, 8}` or `{:f, 32}`)
The number of channels correlate directly to the color mode.
1 channel is greyscale, 2 is greyscale+alpha, 3 is RGB, and
4 is RGB+alpha.
"""
defstruct [:data, :shape, :type]
defguardp is_path(path) when is_binary(path) or is_list(path)
defguardp is_dimension(d) when is_integer(d) and d > 0
@doc """
Creates a StbImage directly.
`data` is a binary blob with the image bytes in HWC
(heigth-width-channels) order. `shape` is a tuple
with the `heigth`, `width`, and `channel` dimensions.
## Options
* `:type` - The type of the data. Defaults to `{:u, 8}`.
Must be one of `{:u, 8}` or `{:f, 32}`. The `:u8` and
`:f32` convenience atom syntax is also available.
"""
def new(data, {h, w, c} = shape, opts \\ [])
when is_binary(data) and is_dimension(h) and is_dimension(w) and c in 1..4 do
type = type(opts[:type] || :u8)
if byte_size(data) == h * w * c * bytes(type) do
%StbImage{data: data, shape: shape, type: type}
else
raise ArgumentError,
"cannot create StbImage because number of bytes do not match shape and type"
end
end
@compile {:no_warn_undefined, Nx}
@doc """
Converts a `StbImage` to a Nx tensor.
It accepts the same options as `Nx.from_binary/3`.
"""
def to_nx(%StbImage{data: data, type: type, shape: shape}, opts \\ []) do
data
|> Nx.from_binary(type, opts)
|> Nx.reshape(shape, names: [:height, :width, :channels])
end
@doc """
Creates a `StbImage` from a Nx tensor.
The tensor is expected to have shape `{h, w, c}`
and one of the supported types (u8/f32).
"""
def from_nx(tensor) when is_struct(tensor, Nx.Tensor) do
new(Nx.to_binary(tensor), tensor_shape(Nx.shape(tensor)), type: tensor_type(Nx.type(tensor)))
end
defp tensor_type({:u, 8}), do: {:u, 8}
defp tensor_type({:f, 32}), do: {:f, 32}
defp tensor_type(type),
do: raise(ArgumentError, "unsupported tensor type: #{inspect(type)} (expected u8/f32)")
defp tensor_shape({_, _, c} = shape) when c in 1..4,
do: shape
defp tensor_shape(shape),
do:
raise(
ArgumentError,
"unsupported tensor shape: #{inspect(shape)} (expected height-width-channel)"
)
@doc """
Reads image from file at `path`.
## Options
* `:channels` - The number of desired channels.
Use `0` for auto-detection. Defaults to 0.
## Example
{:ok, img} = StbImage.read_file("/path/to/image")
{h, w, c} = img.shape
data = img.data
# If you know the image is a 4-channel image and auto-detection failed
{:ok, img} = StbImage.read_file("/path/to/image", channels: 4)
{h, w, c} = img.shape
img = img.data
"""
def read_file(path, opts \\ []) when is_path(path) and is_list(opts) do
channels = opts[:channels] || 0
case StbImage.Nif.read_file(path_to_charlist(path), channels) do
{:ok, img, shape, bytes} ->
{:ok, %StbImage{data: img, shape: shape, type: bytes_to_type(bytes)}}
{:error, reason} ->
{:error, List.to_string(reason)}
end
end
@doc """
Raising version of `read_file/2`.
"""
def read_file!(buffer, opts \\ []) do
case read_file(buffer, opts) do
{:ok, img} -> img
{:error, reason} -> raise ArgumentError, reason
end
end
@doc """
Reads image from `binary` representing an image.
## Options
* `:channels` - The number of desired channels.
Use `0` for auto-detection. Defaults to 0.
## Example
{:ok, buffer} = File.read("/path/to/image")
{:ok, img} = StbImage.read_binary(buffer)
{h, w, c} = img.shape
img = img.data
# If you know the image is a 4-channel image and auto-detection failed
{:ok, img} = StbImage.from_binary(buffer, channels: 4)
{h, w, c} = img.shape
img = img.data
"""
def read_binary(buffer, opts \\ []) when is_binary(buffer) and is_list(opts) do
channels = opts[:channels] || 0
case StbImage.Nif.read_binary(buffer, channels) do
{:ok, img, shape, bytes} ->
{:ok, %StbImage{data: img, shape: shape, type: bytes_to_type(bytes)}}
{:error, reason} ->
{:error, List.to_string(reason)}
end
end
@doc """
Raising version of `read_binary/2`.
"""
def read_binary!(buffer, opts \\ []) do
case read_binary(buffer, opts) do
{:ok, img} -> img
{:error, reason} -> raise ArgumentError, reason
end
end
@doc """
Reads GIF image from file at `path`.
## Example
{:ok, frames, delays} = StbImage.read_gif_file("/path/to/image")
frame = Enum.at(frames, 0)
{h, w, 3} = frame.shape
"""
def read_gif_file(path) when is_binary(path) or is_list(path) do
with {:ok, binary} <- File.read(path) do
read_gif_binary(binary)
end
end
@doc """
Decodes GIF image from a `binary` representing a GIF.
## Example
{:ok, buffer} = File.read("/path/to/image")
{:ok, frames, delays} = StbImage.read_gif_binary(buffer)
frame = Enum.at(frames, 0)
{h, w, 3} = frame.shape
"""
def read_gif_binary(binary) when is_binary(binary) do
with {:ok, frames, shape, delays} <- StbImage.Nif.read_gif_binary(binary) do
stb_frames = for frame <- frames, do: %StbImage{data: frame, shape: shape, type: {:u, 8}}
{:ok, stb_frames, delays}
end
end
@encoding_formats ~w(jpg png bmp tga hdr)a
@encoding_formats_string Enum.map_join(@encoding_formats, ", ", &inspect/1)
@doc """
Writes image to the file at `path`.
The supported formats are #{@encoding_formats_string}.
The format is determined from the file extension if possible,
you can also pass it explicitly via the `:format` option.
Returns `:ok` on success and `{:error, reason}` otherwise.
Make sure the directory you intend to write the file to exists,
otherwise an error is returned.
## Options
* `:format` - one of the supported image formats
"""
def write_file(%StbImage{data: data, shape: shape, type: type}, path, opts \\ []) do
{height, width, channels} = shape
format = opts[:format] || format_from_path!(path)
assert_write_type_and_format!(type, format)
case StbImage.Nif.write_file(path_to_charlist(path), format, data, height, width, channels) do
:ok -> :ok
{:error, reason} -> {:error, List.to_string(reason)}
end
end
@doc """
Raising version of `write_file/3`.
"""
def write_file!(image, path, opts \\ []) do
case write_file(image, path, opts) do
:ok -> :ok
{:error, reason} -> raise ArgumentError, reason
end
end
@doc """
Encodes image to a binary.
The supported formats are #{@encoding_formats_string}.
## Example
img = StbImage.new(raw_img, {h, w, channels})
binary = StbImage.to_binary(img, :png)
"""
def to_binary(%StbImage{data: data, shape: shape, type: type}, format) do
assert_write_type_and_format!(type, format)
{height, width, channels} = shape
case StbImage.Nif.to_binary(format, data, height, width, channels) do
{:ok, binary} -> binary
{:error, reason} -> raise ArgumentError, "#{reason}"
end
end
@doc """
Resizes the image into the given `output_h` and `output_w`.
## Example
img = StbImage.new(raw_img, {h, w, channels})
StbImage.resize(raw_img, div(h, 2), div(w, 2))
"""
def resize(
%StbImage{data: data, shape: {height, width, channels}, type: type},
output_h,
output_w
)
when is_dimension(output_h) and is_dimension(output_w) do
case StbImage.Nif.resize(data, height, width, channels, output_h, output_w, bytes(type)) do
{:ok, output_pixels} ->
%StbImage{data: output_pixels, shape: {output_h, output_w, channels}, type: type}
{:error, reason} ->
raise ArgumentError, "#{reason}"
end
end
defp assert_write_type_and_format!(type, format) when format in [:png, :jpg, :bmp, :tga] do
if type != {:u, 8} do
raise ArgumentError, "incompatible type (#{inspect(type)}) for #{inspect(format)}"
end
end
defp assert_write_type_and_format!(type, format) when format in [:hdr] do
if type != {:f, 32} do
raise ArgumentError, "incompatible type (#{inspect(type)}) for #{inspect(format)}"
end
end
defp assert_write_type_and_format!(_, format) do
raise ArgumentError,
"got an unsupported encoding format #{inspect(format)}, " <>
"the format must be one of #{inspect(@encoding_formats)}"
end
defp format_from_path!(path) do
case Path.extname(path) do
".jpg" ->
:jpg
".jpeg" ->
:jpg
".png" ->
:png
".bmp" ->
:bmp
".tga" ->
:tga
".hdr" ->
:hdr
ext ->
raise "could not determine a supported encoding format for file #{inspect(path)} with extension #{inspect(ext)}, " <>
"please specify a supported :format option explicitly"
end
end
defp path_to_charlist(path) when is_list(path), do: path
defp path_to_charlist(path) when is_binary(path), do: String.to_charlist(path)
defp type(:u8), do: {:u, 8}
defp type(:f32), do: {:f, 32}
defp type({:u, 8}), do: {:u, 8}
defp type({:f, 32}), do: {:f, 32}
defp bytes({_, s}), do: div(s, 8)
defp bytes_to_type(1), do: {:u, 8}
defp bytes_to_type(4), do: {:f, 32}
end
|
lib/stb_image.ex
| 0.939975
| 0.668021
|
stb_image.ex
|
starcoder
|
import Kernel, except: [to_binary: 1]
defmodule Macro do
@moduledoc """
This module provides conveniences for working with macros.
"""
@doc """
Returns a list of binary operators. This is available
as a macro so it can be used in guard clauses.
"""
defmacro binary_ops do
[
:===, :!==,
:==, :!=, :<=, :>=,
:&&, :||, :<>, :++, :--, :**, ://, :::, :<-, :.., :|>, :=~,
:<, :>,
:+, :-, :*, :/, :=, :|, :.,
:and, :or, :xor, :when, :in, :inlist, :inbits,
:<<<, :>>>, :|||, :&&&, :^^^, :~~~
]
end
@doc """
Returns a list of unary operators. This is available
as a macro so it can be used in guard clauses.
"""
defmacro unary_ops do
[:!, :@, :^, :not, :+, :-]
end
@doc """
Receives an expresion representing a possible definition
and extracts its arguments. It returns a tuple with the
function name and the arguments list or `:error` if not
a valid call syntax.
This is useful for macros that want to provide the same
arguments syntax available in def/defp/defmacro and friends.
## Examples
extract_args(quote do: foo) == { :foo, [] }
extract_args(quote do: foo()) == { :foo, [] }
extract_args(quote do: foo(1,2,3)) == { :foo, [1,2,3] }
extract_args(quote do: 1.(1,2,3)) == :error
"""
def extract_args(expr) do
:elixir_clauses.extract_args(expr)
end
@doc """
Recursively escapes a value so it can be inserted
into a syntax tree.
## Examples
iex> Macro.escape(:foo)
:foo
iex> Macro.escape({ :a, :b, :c })
{ :{}, [], [:a, :b, :c] }
"""
def escape(expr) do
:elixir_quote.escape(expr, false) |> elem(0)
end
@doc false
def escape_quoted(expr) do
:elixir_quote.escape(expr, true) |> elem(0)
end
@doc %B"""
Unescape the given chars. This is the unescaping behavior
used by default in Elixir single- and double-quoted strings.
Check `unescape_binary/2` for information on how to customize
the escaping map.
In this setup, Elixir will escape the following: `\a`, `\b`,
`\d`, `\e`, `\f`, `\n`, `\r`, `\s`, `\t` and `\v`. Octals are
also escaped according to the latin1 set they represent.
This function is commonly used on sigil implementations
(like `%r`, `%b` and others).
## Examples
iex> Macro.unescape_binary("example\\n")
"example\n"
In the example above, we pass a string with `\n` escaped
and we return a version with it unescaped.
"""
def unescape_binary(chars) do
:elixir_interpolation.unescape_chars(chars)
end
@doc %B"""
Unescape the given chars according to the map given.
Check `unescape_binary/1` if you want to use the same map
as Elixir single- and double-quoted strings.
## Map
The map must be a function. The function receives an integer
representing the number of the characters it wants to unescape.
Here is the default mapping function implemented by Elixir:
def unescape_map(?a), do: ?\a
def unescape_map(?b), do: ?\b
def unescape_map(?d), do: ?\d
def unescape_map(?e), do: ?\e
def unescape_map(?f), do: ?\f
def unescape_map(?n), do: ?\n
def unescape_map(?r), do: ?\r
def unescape_map(?s), do: ?\s
def unescape_map(?t), do: ?\t
def unescape_map(?v), do: ?\v
def unescape_map(e), do: e
If the `unescape_map` function returns false. The char is
not escaped and `\` is kept in the char list.
## Octals
Octals will by default be escaped unless the map function
returns false for ?0.
## Hex
Octals will by default be escaped unless the map function
returns false for ?x.
## Examples
Using the unescape_map defined above is easy:
Macro.unescape_binary "example\\n", unescape_map(&1)
"""
def unescape_binary(chars, map) do
:elixir_interpolation.unescape_chars(chars, map)
end
@doc """
Unescape the given tokens according to the default map.
Check `unescape_binary/1` and `unescape_binary/2` for more
information about unescaping.
Only tokens that are binaries are unescaped, all others are
ignored. This function is useful when implementing your own
sigils. Check the implementation of `Kernel.__b__`
for examples.
"""
def unescape_tokens(tokens) do
:elixir_interpolation.unescape_tokens(tokens)
end
@doc """
Unescape the given tokens according to the given map.
Check `unescape_tokens/1` and `unescape_binary/2` for more information.
"""
def unescape_tokens(tokens, map) do
:elixir_interpolation.unescape_tokens(tokens, map)
end
@doc """
Converts the given expression to a binary.
## Examples
iex> Macro.to_binary(quote do: foo.bar(1, 2, 3))
"foo.bar(1, 2, 3)"
"""
def to_binary(tree)
# Variables
def to_binary({ var, _, atom }) when is_atom(atom) do
atom_to_binary(var, :utf8)
end
# Aliases
def to_binary({ :__aliases__, _, refs }) do
Enum.map_join(refs, ".", call_to_binary(&1))
end
# Blocks
def to_binary({ :__block__, _, [expr] }) do
to_binary(expr)
end
def to_binary({ :__block__, _, _ } = expr) do
block = adjust_new_lines block_to_binary(expr), "\n "
"(\n " <> block <> "\n)"
end
# Bits containers
def to_binary({ :<<>>, _, args }) do
"<<" <> Enum.map_join(args, ", ", to_binary(&1)) <> ">>"
end
# Tuple containers
def to_binary({ :{}, _, args }) do
"{" <> Enum.map_join(args, ", ", to_binary(&1)) <> "}"
end
# List containers
def to_binary({ :[], _, args }) do
"[" <> Enum.map_join(args, ", ", to_binary(&1)) <> "]"
end
# Fn keyword
def to_binary({ :fn, _, [[do: { :->, _, [{_,tuple}] } = arrow]] })
when not is_tuple(tuple) or elem(tuple, 0) != :__block__ do
"fn " <> arrow_to_binary(arrow) <> " end"
end
def to_binary({ :fn, _, [[do: { :->, _, [_] } = block]] }) do
"fn " <> block_to_binary(block) <> "\nend"
end
def to_binary({ :fn, _, [[do: block]] }) do
block = adjust_new_lines block_to_binary(block), "\n "
"fn\n " <> block <> "\nend"
end
# Partial call
def to_binary({ :&, _, [num] }) do
"&#{num}"
end
# left -> right
def to_binary({ :->, _, _ } = arrow) do
"(" <> arrow_to_binary(arrow, true) <> ")"
end
# Binary ops
def to_binary({ op, _, [left, right] }) when op in binary_ops do
op_to_binary(left) <> " #{op} " <> op_to_binary(right)
end
# Unary ops
def to_binary({ :not, _, [arg] }) do
"not " <> to_binary(arg)
end
def to_binary({ op, _, [arg] }) when op in unary_ops do
atom_to_binary(op, :utf8) <> to_binary(arg)
end
# All other calls
def to_binary({ target, _, args }) when is_list(args) do
{ list, last } = :elixir_tree_helpers.split_last(args)
case is_kw_blocks?(last) do
true -> call_to_binary_with_args(target, list) <> kw_blocks_to_binary(last)
false -> call_to_binary_with_args(target, args)
end
end
# Two-item tuples
def to_binary({ left, right }) do
to_binary({ :{}, [], [left, right] })
end
# Lists
def to_binary(list) when is_list(list) do
to_binary({ :[], [], list })
end
# All other structures
def to_binary(other), do: inspect(other, raw: true)
# Block keywords
defmacrop kw_keywords, do: [:do, :catch, :rescue, :after, :else]
defp is_kw_blocks?([_|_] = kw) do
Enum.all?(kw, match?({x, _} when x in kw_keywords, &1))
end
defp is_kw_blocks?(_), do: false
defp module_to_binary(atom) when is_atom(atom), do: inspect(atom, raw: true)
defp module_to_binary(other), do: call_to_binary(other)
defp call_to_binary(atom) when is_atom(atom), do: atom_to_binary(atom, :utf8)
defp call_to_binary({ :., _, [arg] }), do: module_to_binary(arg) <> "."
defp call_to_binary({ :., _, [left, right] }), do: module_to_binary(left) <> "." <> call_to_binary(right)
defp call_to_binary(other), do: to_binary(other)
defp call_to_binary_with_args(target, args) do
args = Enum.map_join(args, ", ", to_binary(&1))
call_to_binary(target) <> "(" <> args <> ")"
end
defp kw_blocks_to_binary(kw) do
Enum.reduce(kw_keywords, " ", fn(x, acc) ->
case Keyword.has_key?(kw, x) do
true -> acc <> kw_block_to_binary(x, Keyword.get(kw, x))
false -> acc
end
end) <> "end"
end
defp kw_block_to_binary(key, value) do
block = adjust_new_lines block_to_binary(value), "\n "
atom_to_binary(key, :utf8) <> "\n " <> block <> "\n"
end
defp block_to_binary({ :->, _, exprs }) do
Enum.map_join(exprs, "\n", fn({ left, right }) ->
left = comma_join_or_empty_paren(left, false)
left <> "->\n " <> adjust_new_lines block_to_binary(right), "\n "
end)
end
defp block_to_binary({ :__block__, _, exprs }) do
Enum.map_join(exprs, "\n", to_binary(&1))
end
defp block_to_binary(other), do: to_binary(other)
defp op_to_binary({ op, _, [_, _] } = expr) when op in binary_ops do
"(" <> to_binary(expr) <> ")"
end
defp op_to_binary(expr), do: to_binary(expr)
defp arrow_to_binary({ :->, _, pairs }, paren // false) do
Enum.map_join(pairs, "; ", fn({ left, right }) ->
left = comma_join_or_empty_paren(left, paren)
left <> "-> " <> to_binary(right)
end)
end
defp comma_join_or_empty_paren([], true), do: "() "
defp comma_join_or_empty_paren([], false), do: ""
defp comma_join_or_empty_paren(left, _) do
Enum.map_join(left, ", ", to_binary(&1)) <> " "
end
defp adjust_new_lines(block, replacement) do
bc <<x>> inbits block do
<< case x == ?\n do
true -> replacement
false -> <<x>>
end :: binary >>
end
end
@doc """
Receives an expression representation and expands it. The following
contents are expanded:
* Macros (local or remote);
* Aliases are expanded (if possible) and return atoms;
* All pseudo-variables (__FILE__, __MODULE__, etc);
* Module attributes reader (@foo);
In case the expression cannot be expanded, it returns the expression itself.
Notice that `Macro.expand` is not recursive and it does not
expand child expressions. In this example
Macro.expand(quote(do: !some_macro), __ENV__)
`!some_macro` will expand to something like:
case some_macro do
false -> true
nil -> true
_ -> false
end
Notice that the `!` operator is a macro that expands to a case.
Even though `some_macro` is also a macro, it is not expanded
because it is a child expression given to `!` as argument.
## Examples
In the example below, we have a macro that generates a module
with a function named `name_length` that returns the length
of the module name. The value of this function will be calculated
at compilation time and not at runtime.
Consider the implementation below:
defmacro defmodule_with_length(name, do: block) do
length = length(atom_to_list(name))
quote do
defmodule unquote(name) do
def name_length, do: unquote(length)
unquote(block)
end
end
end
When invoked like this:
defmodule_with_length My.Module do
def other_function, do: ...
end
The compilation will fail because `My.Module` when quoted
is not an atom, but a syntax tree as follow:
{:__aliases__, [], [:My, :Module] }
That said, we need to expand the aliases node above to an
atom, so we can retrieve its length. Expanding the node is
not straight-forward because we also need to expand the
caller aliases. For example:
alias MyHelpers, as: My
defmodule_with_length My.Module do
def other_function, do: ...
end
The final module name will be `MyHelpers.Module` and not
`My.Module`. With `Macro.expand`, such aliases are taken
into consideration. Local and remote macros are also
expanded. We could rewrite our macro above to use this
function as:
defmacro defmodule_with_length(name, do: block) do
expanded = Macro.expand(name, __CALLER__)
length = length(atom_to_list(expanded))
quote do
defmodule unquote(name) do
def name_length, do: unquote(length)
unquote(block)
end
end
end
"""
def expand(aliases, env) do
expand(aliases, env, nil)
end
defp expand({ :__aliases__, _, _ } = original, env, cache) do
case :elixir_aliases.expand(original, env.aliases, []) do
atom when is_atom(atom) -> atom
aliases ->
aliases = lc alias inlist aliases, do: expand(alias, env, cache)
case :lists.all(is_atom(&1), aliases) do
true -> :elixir_aliases.concat(aliases)
false -> original
end
end
end
# Expand @ calls
defp expand({ :@, _, [{ name, _, args }] } = original, env, _cache) when is_atom(args) or args == [] do
case (module = env.module) && Module.open?(module) do
true -> Module.get_attribute(module, name)
false -> original
end
end
# Expand pseudo-variables
defp expand({ :__MODULE__, _, atom }, env, _cache) when is_atom(atom), do: env.module
defp expand({ :__FILE__, _, atom }, env, _cache) when is_atom(atom), do: env.file
defp expand({ :__DIR__, _, atom }, env, _cache) when is_atom(atom), do: :filename.dirname(env.file)
defp expand({ :__ENV__, _, atom }, env, _cache) when is_atom(atom), do: env
# Expand possible macro import invocation
defp expand({ atom, line, args } = original, env, cache) when is_atom(atom) do
args = case is_atom(args) do
true -> []
false -> args
end
case not is_partial?(args) do
false -> original
true ->
module = env.module
extra = if function_exported?(module, :__info__, 1) do
[{ module, module.__info__(:macros) }]
else
[]
end
expand = :elixir_dispatch.expand_import(line, { atom, length(args) }, args,
env.module, extra, to_erl_env(env, cache))
case expand do
{ :ok, _, expanded } -> expanded
{ :error, _ } -> original
end
end
end
# Expand possible macro require invocation
defp expand({ { :., _, [left, right] }, line, args } = original, env, cache) when is_atom(right) do
receiver = expand(left, env)
case is_atom(receiver) and not is_partial?(args) do
false -> original
true ->
expand = :elixir_dispatch.expand_require(line, receiver, { right, length(args) },
args, env.module, to_erl_env(env, cache))
case expand do
{ :ok, _receiver, expanded } -> expanded
{ :error, _ } -> original
end
end
end
# Anything else is just returned
defp expand(other, _env, _cache), do: other
defp to_erl_env(env, nil), do: :elixir_scope.to_erl_env(env)
defp to_erl_env(_env, cache), do: cache
## Helpers
defp is_partial?(args) do
:lists.any(match?({ :&, _, [_] }, &1), args)
end
@doc """
Recurs the quoted expression checking if all sub terms are
safe (i.e. they represented data structured and don't actually
evaluate code) and returns `:ok` unless a given term is unsafe,
which is returned as `{ :unsafe, term }`.
"""
def safe_term(terms) do
do_safe_term(terms) || :ok
end
defp do_safe_term({ local, _, terms }) when local in [:{}, :[], :__aliases__] do
do_safe_term(terms)
end
defp do_safe_term({ unary, _, [term] }) when unary in [:+, :-] do
do_safe_term(term)
end
defp do_safe_term({ left, right }), do: do_safe_term(left) || do_safe_term(right)
defp do_safe_term(terms) when is_list(terms), do: Enum.find_value(terms, do_safe_term(&1))
defp do_safe_term(terms) when is_tuple(terms), do: { :unsafe, terms }
defp do_safe_term(_), do: nil
end
|
lib/elixir/lib/macro.ex
| 0.75392
| 0.66609
|
macro.ex
|
starcoder
|
defmodule Cforum.MessagesUsers do
import Ecto.Query, warn: false
alias Cforum.Repo
alias Cforum.Messages.Message
alias Cforum.MessagesTags.MessageTag
alias Cforum.Tags.Tag
alias Cforum.Votes.Vote
alias Cforum.Scores.Score
alias Cforum.Users.User
@doc """
Returns a list of messages for a user, limited to the forums specified in `forum_ids`
## Examples
iex> list_messages_for_user(%User{}, [1, 2], limin: [quantity: 10, offset: 0])
[%Message{}, ...]
"""
def list_messages_for_user(user, forum_ids, query_params \\ [order: nil, limit: nil]) do
from(
m in Message,
preload: [:user, thread: :forum],
where: m.user_id == ^user.user_id and m.deleted == false and m.forum_id in ^forum_ids
)
|> Cforum.PagingApi.set_limit(query_params[:limit])
|> Cforum.OrderApi.set_ordering(query_params[:order], desc: :created_at)
|> Repo.all()
|> Repo.preload(tags: from(t in Tag, order_by: [asc: :tag_name]))
end
@doc """
Counts the messages for a user, limited to the forums specified in `forum_ids`
## Examples
iex> count_messages_for_user(%User{}, [1, 2])
10
"""
def count_messages_for_user(user, forum_ids) do
from(
m in Message,
where: m.user_id == ^user.user_id and m.deleted == false and m.forum_id in ^forum_ids,
select: count("*")
)
|> Repo.one()
end
@doc """
Lists the `limit` best scored messages for a user (limited to forums listed in `forum_ids`).
Although this function is very similiar to `list_messages_for_user`, we can't
really use that API due to limitations in the `order_by`.
## Examples
iex> list_best_scored_messages_for_user(%User{}, [1, 2])
[%Message{}, ...]
"""
def list_best_scored_messages_for_user(user, forum_ids, limit \\ 10) do
from(
m in Message,
preload: [:user, [thread: :forum]],
where: m.deleted == false and m.upvotes > 0 and m.user_id == ^user.user_id and m.forum_id in ^forum_ids,
order_by: fragment("upvotes - downvotes DESC"),
limit: ^limit
)
|> Repo.all()
|> Repo.preload(tags: from(t in Tag, order_by: [asc: :tag_name]))
end
defp int_list_scored_msgs_for_user_in_perspective(cuser, user, forum_ids, limit)
defp int_list_scored_msgs_for_user_in_perspective(%User{user_id: cuid}, user = %User{user_id: uid}, forum_ids, limit)
when cuid == uid do
from(
s in Score,
left_join: m1 in Message,
on: m1.message_id == s.message_id,
left_join: v in Vote,
on: s.vote_id == v.vote_id,
left_join: m2 in Message,
on: v.message_id == m2.message_id,
where: s.user_id == ^user.user_id,
where: is_nil(m1.message_id) or (m1.forum_id in ^forum_ids and m1.deleted == false),
where: is_nil(m2.message_id) or (m2.forum_id in ^forum_ids and m2.deleted == false),
order_by: [desc: :created_at]
)
|> Cforum.PagingApi.set_limit(limit)
end
defp int_list_scored_msgs_for_user_in_perspective(_, user, forum_ids, limit) do
from(
s in Score,
left_join: m1 in Message,
on: m1.message_id == s.message_id,
left_join: v in Vote,
on: s.vote_id == v.vote_id,
left_join: m2 in Message,
on: v.message_id == m2.message_id,
where: s.user_id == ^user.user_id,
where: is_nil(m1.message_id) or (m1.forum_id in ^forum_ids and m1.deleted == false),
where:
is_nil(m2.message_id) or (m2.forum_id in ^forum_ids and m2.deleted == false and m2.user_id == ^user.user_id),
where: s.value > 0,
order_by: [desc: :created_at]
)
|> Cforum.PagingApi.set_limit(limit)
end
@doc """
List scored messages for a user in the perspective of another user, i.e. leave out
negative votings (a user gets a negative score for voting negative) if user
doesn't look at his own scores; is limited to the forums defined in `forum_ids`
## Arguments
current_user: the user which perspective we are look on this
user: the user we are watching at
forum_ids: the list of forums we are interested in
limit: the number of messages we want to get
## Examples
iex> list_scored_msgs_for_user_in_perspective(nil, %User{}, [1, 2])
[%Message{}, ...]
iex> list_scored_msgs_for_user_in_perspective(%User{}, %User{}, [1, 2])
[%Message{}]
"""
def list_scored_msgs_for_user_in_perspective(user, current_user, forum_ids, limit \\ [quantity: 10, offset: 0]) do
current_user
|> int_list_scored_msgs_for_user_in_perspective(user, forum_ids, limit)
|> Repo.all()
|> Repo.preload(
message: [:user, tags: from(t in Tag, order_by: [asc: :tag_name]), thread: :forum],
vote: [message: [:user, tags: from(t in Tag, order_by: [asc: :tag_name]), thread: :forum]]
)
end
@doc """
Count the scored messages of the user in perspective; for a better explanation
look at `list_scored_msgs_for_user_in_perspective`
## Examples
iex> count_scored_msgs_for_user_in_perspective(nil, %User{}, [1, 2])
1
"""
def count_scored_msgs_for_user_in_perspective(user, current_user, forum_ids) do
current_user
|> int_list_scored_msgs_for_user_in_perspective(user, forum_ids, nil)
|> exclude(:preload)
|> exclude(:order_by)
|> select(count("*"))
|> Repo.one()
end
@doc """
Counts the messages for a user, grouped by month; for statistical purposes
## Examples
iex> count_messages_for_user_by_month(user)
[{"2017-01-01", 10}, ...]
"""
def count_messages_for_user_by_month(user, forum_ids) do
from(
m in Message,
select: {fragment("DATE_TRUNC('month', created_at) created_at"), count("*")},
where: m.user_id == ^user.user_id and m.deleted == false and m.forum_id in ^forum_ids,
group_by: fragment("DATE_TRUNC('month', created_at)"),
order_by: fragment("DATE_TRUNC('month', created_at)")
)
|> Repo.all()
end
@doc """
Count the number of messages for a user, grouped by tag and limited to the
forums defined in `forum_id`; returns a list of tuples consisting of
{tag slug, tag name, forum slug, forum short name, count}
## Examples
iex> count_messages_per_tag_for_user(%User{}, [1, 2])
[{"foo-bar", "Foo Bar", "self", "Selfforum", 10}, ...]
"""
def count_messages_per_tag_for_user(user, forum_ids, limit \\ 10) do
from(
mt in MessageTag,
inner_join: m in Message,
on: m.message_id == mt.message_id,
inner_join: t in Tag,
on: mt.tag_id == t.tag_id,
select: {t.slug, t.tag_name, count("*")},
where: m.deleted == false and m.user_id == ^user.user_id and m.forum_id in ^forum_ids,
group_by: [t.slug, t.tag_name],
order_by: fragment("COUNT(*) DESC"),
limit: ^limit
)
|> Repo.all()
end
end
|
lib/cforum/messages_users.ex
| 0.606498
| 0.434641
|
messages_users.ex
|
starcoder
|
defmodule Protobuf.JSON.Decode do
@moduledoc false
import Bitwise, only: [bsl: 2]
alias Protobuf.JSON.Utils
@compile {:inline,
fetch_field_value: 2,
decode_map: 2,
decode_repeated: 2,
decode_integer: 1,
decode_float: 1,
parse_float: 1,
decode_bytes: 1,
decode_key: 3,
parse_key: 2}
@int32_range -bsl(1, 31)..(bsl(1, 31) - 1)
@int64_range -bsl(1, 63)..(bsl(1, 63) - 1)
@uint32_range 0..(bsl(1, 32) - 1)
@uint64_range 0..(bsl(1, 64) - 1)
@int_ranges %{
int32: @int32_range,
int64: @int64_range,
sint32: @int32_range,
sint64: @int64_range,
sfixed32: @int32_range,
sfixed64: @int64_range,
fixed32: @int32_range,
fixed64: @int64_range,
uint32: @uint32_range,
uint64: @uint64_range
}
@int_types Map.keys(@int_ranges)
max_float = 3.402823466e38
@float_range {-max_float, max_float}
@float_types [:float, :double]
@duration_seconds_range -315_576_000_000..315_576_000_000
@spec from_json_data(term(), module()) :: struct()
def from_json_data(term, module)
# We start with all the Google built-in types that have specially-defined JSON decoding rules.
# These rules are listed here: https://developers.google.com/protocol-buffers/docs/proto3#json
# Note that we always have to keep the module names for the built-in types dynamic because
# these built-in types **do not ship with our library**.
def from_json_data(string, Google.Protobuf.Duration = mod) when is_binary(string) do
case Integer.parse(string) do
{seconds, "s"} when seconds in @duration_seconds_range ->
mod.new!(seconds: seconds)
{seconds, "." <> nanos_with_s} when seconds in @duration_seconds_range ->
sign = if seconds < 0, do: -1, else: 1
case Utils.parse_nanoseconds(nanos_with_s) do
{nanos, "s"} -> mod.new!(seconds: seconds, nanos: nanos * sign)
:error -> throw({:bad_duration, string, nanos_with_s})
end
other ->
throw({:bad_duration, string, other})
end
end
def from_json_data(string, Google.Protobuf.Timestamp = mod) when is_binary(string) do
case Protobuf.JSON.RFC3339.decode(string) do
{:ok, seconds, nanos} -> mod.new!(seconds: seconds, nanos: nanos)
{:error, reason} -> throw({:bad_timestamp, string, reason})
end
end
def from_json_data(map, Google.Protobuf.Empty = mod) when map == %{} do
mod.new!()
end
def from_json_data(int, Google.Protobuf.Int32Value = mod),
do: mod.new!(value: decode_scalar(:int32, :unknown_name, int))
def from_json_data(int, Google.Protobuf.UInt32Value = mod),
do: mod.new!(value: decode_scalar(:uint32, :unknown_name, int))
def from_json_data(int, Google.Protobuf.UInt64Value = mod),
do: mod.new!(value: decode_scalar(:uint64, :unknown_name, int))
def from_json_data(int, Google.Protobuf.Int64Value = mod),
do: mod.new!(value: decode_scalar(:int64, :unknown_name, int))
def from_json_data(number, mod)
when mod in [
Google.Protobuf.FloatValue,
Google.Protobuf.DoubleValue
] and (is_float(number) or is_integer(number)) do
mod.new!(value: number * 1.0)
end
def from_json_data(bool, Google.Protobuf.BoolValue = mod) when is_boolean(bool) do
mod.new!(value: decode_scalar(:bool, :unknown_field, bool))
end
def from_json_data(string, Google.Protobuf.StringValue = mod) when is_binary(string) do
mod.new!(value: decode_scalar(:string, :unknown_field, string))
end
def from_json_data(bytes, Google.Protobuf.BytesValue = mod) when is_binary(bytes) do
mod.new!(value: decode_scalar(:bytes, :unknown_field, bytes))
end
def from_json_data(list, Google.Protobuf.ListValue = mod) when is_list(list) do
mod.new!(values: Enum.map(list, &from_json_data(&1, Google.Protobuf.Value)))
end
def from_json_data(struct, Google.Protobuf.Struct = mod) when is_map(struct) do
fields =
Map.new(struct, fn {key, val} -> {key, from_json_data(val, Google.Protobuf.Value)} end)
mod.new!(fields: fields)
end
def from_json_data(term, Google.Protobuf.Value = mod) do
cond do
is_nil(term) ->
mod.new!(kind: {:null_value, :NULL_VALUE})
is_binary(term) ->
mod.new!(kind: {:string_value, term})
is_integer(term) ->
mod.new!(kind: {:number_value, term * 1.0})
is_float(term) ->
mod.new!(kind: {:number_value, term})
is_boolean(term) ->
mod.new!(kind: {:bool_value, term})
is_list(term) ->
mod.new!(kind: {:list_value, from_json_data(term, Google.Protobuf.ListValue)})
is_map(term) ->
mod.new!(kind: {:struct_value, from_json_data(term, Google.Protobuf.Struct)})
true ->
throw({:bad_message, term, mod})
end
end
def from_json_data(data, Google.Protobuf.FieldMask = mod) when is_binary(data) do
paths = String.split(data, ",")
cond do
data == "" -> mod.new!(paths: [])
paths = Enum.map(paths, &convert_field_mask_to_underscore/1) -> mod.new!(paths: paths)
true -> throw({:bad_field_mask, data})
end
end
def from_json_data(data, module) when is_map(data) and is_atom(module) do
message_props = module.__message_props__()
regular = decode_regular_fields(data, message_props)
oneofs = decode_oneof_fields(data, message_props)
module
|> struct(regular)
|> struct(oneofs)
end
def from_json_data(data, module) when is_atom(module), do: throw({:bad_message, data, module})
defp convert_field_mask_to_underscore(mask) do
if mask =~ ~r/^[a-zA-Z0-9]+$/ do
Macro.underscore(mask)
else
throw({:bad_field_mask, mask})
end
end
defp decode_regular_fields(data, %{field_props: field_props}) do
Enum.flat_map(field_props, fn
{_field_num, %Protobuf.FieldProps{oneof: nil} = prop} ->
case fetch_field_value(prop, data) do
{:ok, value} ->
case decode_value(prop, value) do
nil -> []
value -> [{prop.name_atom, value}]
end
:error ->
[]
end
{_field_num, _prop} ->
[]
end)
end
defp decode_oneof_fields(data, %{field_props: field_props, oneof: oneofs}) do
for {oneof, index} <- oneofs,
{_field_num, %{oneof: ^index} = prop} <- field_props,
result = fetch_field_value(prop, data),
match?({:ok, _value}, result),
{:ok, value} = result,
not null_value?(value, prop) do
{oneof, prop.name_atom, decode_value(prop, value)}
end
|> Enum.reduce(%{}, fn {oneof, name, decoded_value}, acc ->
if Map.has_key?(acc, oneof) do
throw({:duplicated_oneof, oneof})
else
Map.put(acc, oneof, {name, decoded_value})
end
end)
end
defp null_value?(nil, %Protobuf.FieldProps{type: {:enum, Google.Protobuf.NullValue}}), do: false
defp null_value?(value, _props), do: is_nil(value)
defp fetch_field_value(%Protobuf.FieldProps{name: name_key, json_name: json_key}, data) do
case data do
%{^json_key => value} -> {:ok, value}
%{^name_key => value} -> {:ok, value}
_ -> :error
end
end
defp decode_value(%{optional?: true, type: type}, nil) when type != Google.Protobuf.Value,
do: nil
defp decode_value(%{map?: true} = prop, map), do: decode_map(prop, map)
defp decode_value(%{repeated?: true} = prop, list), do: decode_repeated(prop, list)
defp decode_value(%{repeated?: false} = prop, value), do: decode_singular(prop, value)
defp decode_map(%{type: module, name_atom: field}, map) when is_map(map) do
%{field_props: field_props, field_tags: field_tags} = module.__message_props__()
key_type = field_props[field_tags[:key]].type
val_prop = field_props[field_tags[:value]]
for {key, val} <- map, into: %{} do
{decode_key(key_type, key, field), decode_singular(val_prop, val)}
end
end
defp decode_map(prop, bad_map), do: throw({:bad_map, prop.name_atom, bad_map})
defp decode_key(type, key, field) when is_binary(key) do
case parse_key(type, key) do
{:ok, decoded} -> decoded
:error -> throw({:bad_map_key, field, type, key})
end
end
defp decode_key(type, key, field), do: throw({:bad_map_key, field, type, key})
# Map keys can be of any scalar type except float, double and bytes. they
# must always be wrapped in strings. Other types should not compile.
defp parse_key(:string, key), do: {:ok, key}
defp parse_key(:bool, "true"), do: {:ok, true}
defp parse_key(:bool, "false"), do: {:ok, false}
defp parse_key(type, key) when type in @int_types, do: parse_int(key)
defp parse_key(_type, _key), do: :error
defp decode_repeated(prop, value) when is_list(value) do
for val <- value, do: decode_singular(prop, val)
end
defp decode_repeated(prop, value) do
throw({:bad_repeated, prop.name_atom, value})
end
defp decode_singular(%{type: type} = prop, value)
when type in [:string, :bool, :bytes] or type in @int_types or type in @float_types do
decode_scalar(type, prop.name_atom, value)
end
defp decode_singular(%{type: {:enum, enum}} = prop, value) do
Map.get_lazy(enum.__reverse_mapping__(), value, fn ->
cond do
is_integer(value) and value in @int32_range -> value
is_nil(value) and enum == Google.Protobuf.NullValue -> :NULL_VALUE
true -> throw({:bad_enum, prop.name_atom, value})
end
end)
end
defp decode_singular(%{type: module, embedded?: true}, value) do
from_json_data(value, module)
end
defp decode_scalar(:string, name, value) do
if is_binary(value), do: value, else: throw({:bad_string, name, value})
end
defp decode_scalar(:bool, name, value) do
if is_boolean(value), do: value, else: throw({:bad_bool, name, value})
end
defp decode_scalar(type, name, value) when type in @int_types do
with {:ok, integer} <- decode_integer(value),
true <- integer in @int_ranges[type] do
integer
else
_ -> throw({:bad_int, name, value})
end
end
defp decode_scalar(type, name, value) when type in @float_types do
{float_min, float_max} = @float_range
# If the type is float, we check that it's in range. If the type is double, we don't need to
# do that cause the BEAM would throw an error for an out of bounds double anyways.
case decode_float(value) do
{:ok, float}
when type == :float and is_float(float) and (float < float_min or float > float_max) ->
# Float is out of range.
throw({:bad_float, name, value})
{:ok, value} ->
value
:error ->
throw({:bad_float, name, value})
end
end
defp decode_scalar(:bytes, name, value) do
with true <- is_binary(value),
{:ok, bytes} <- decode_bytes(value) do
bytes
else
_ -> throw({:bad_bytes, name})
end
end
defp decode_integer(integer) when is_integer(integer), do: {:ok, integer}
defp decode_integer(string) when is_binary(string), do: parse_int(string)
defp decode_integer(float) when is_float(float), do: parse_float_as_int(float)
defp decode_integer(_bad), do: :error
defp parse_int(string) do
case Integer.parse(string) do
{int, ""} -> {:ok, int}
_ -> :error
end
end
defp parse_float_as_int(float) do
truncated = trunc(float)
if float - truncated == 0.0 do
{:ok, truncated}
else
:error
end
end
defp decode_float(float) when is_float(float), do: {:ok, float}
defp decode_float(string) when is_binary(string), do: parse_float(string)
defp decode_float(_bad), do: :error
defp parse_float("-Infinity"), do: {:ok, :negative_infinity}
defp parse_float("Infinity"), do: {:ok, :infinity}
defp parse_float("NaN"), do: {:ok, :nan}
defp parse_float(string) do
case Float.parse(string) do
{float, ""} -> {:ok, float}
_ -> :error
end
end
# Both url-encoded and regular base64 are accepted, with and without padding.
defp decode_bytes(bytes) do
pattern = :binary.compile_pattern(["-", "_"])
if String.contains?(bytes, pattern) do
Base.url_decode64(bytes, padding: false)
else
Base.decode64(bytes, padding: false)
end
end
end
|
lib/protobuf/json/decode.ex
| 0.842086
| 0.545104
|
decode.ex
|
starcoder
|
defmodule TodoList do
@moduledoc """
"""
defstruct auto_id: 1, entries: %{}
@doc """
Creates a new TodoList
"""
def new(), do: %TodoList{}
@doc """
Add a new entry to a todo list
"""
def add_entry(todo_list, entry) do
entry = Map.put(entry, :id, todo_list.auto_id)
new_entries =
Map.put(
todo_list.entries,
todo_list.auto_id,
entry
)
%TodoList{todo_list | entries: new_entries, auto_id: todo_list.auto_id + 1}
end
@doc """
List all entries for a given date
"""
def entries(todo_list, date) do
todo_list.entries
|> Stream.filter(fn {_, entry} -> entry.date == date end)
|> Enum.map(fn {_, entry} -> entry end)
end
@doc """
Update an entry based upon an entry id, and a function for updating the entry
"""
def update_entry(todo_list, entry_id, updater_fun) do
case Map.fetch(todo_list.entries, entry_id) do
:error ->
todo_list
{:ok, old_entry} ->
old_entry_id = old_entry.id
new_entry = %{id: ^old_entry_id} = updater_fun.(old_entry)
new_entries = Map.put(todo_list.entries, new_entry.id, new_entry)
%TodoList{todo_list | entries: new_entries}
end
end
def update_entry(todo_list, %{} = new_entry) do
update_entry(todo_list, new_entry.id, fn _ -> new_entry end)
end
@doc """
Deletes an entry from the todo list, based on the id given
"""
def delete_entry(todo_list, entry_id) do
Map.delete(todo_list, entry_id)
end
defimpl String.Chars, for: TodoList do
def to_string(_) do
"#TodoList"
end
end
defimpl Collectable, for: TodoList do
def into(original) do
{original, &into_callback/2}
end
defp into_callback(todo_list, {:cont, entry}) do
TodoList.add_entry(todo_list, entry)
end
defp into_callback(todo_list, :done), do: todo_list
defp into_callback(todo_list, :halt), do: :ok
end
end
defmodule TodoList.CsvImporter do
def import(filename) do
File.stream!(filename)
|> Stream.map(&String.replace(&1, "\n", ""))
|> Stream.map(&String.split(&1, ","))
|> Stream.map(&parse_date/1)
|> Stream.map(&create_map/1)
|> Enum.into(TodoList.new())
end
defp parse_date([date|tail]) do
[year, month, day] =
date
|> String.split("/")
|> Enum.map(&String.to_integer/1)
{:ok, date} = Date.new(year, month, day)
[ date | tail]
end
defp create_map([date | [task]]) do
%{date: date, title: task}
end
end
|
ch4/todo_list/lib/todo_list.ex
| 0.500732
| 0.412501
|
todo_list.ex
|
starcoder
|
defmodule Artemis.Helpers do
require Logger
@doc """
Generate a random string
"""
def random_string(string_length) do
string_length
|> :crypto.strong_rand_bytes()
|> Base.url_encode64()
|> binary_part(0, string_length)
end
@doc """
Detect if value is truthy
"""
def present?(nil), do: false
def present?(""), do: false
def present?(0), do: false
def present?(_value), do: true
@doc """
Detect if a key's value is truthy
"""
def present?(entry, key) when is_list(entry) do
entry
|> Keyword.get(key)
|> present?
end
def present?(entry, key) when is_map(entry) do
entry
|> Map.get(key)
|> present?
end
@doc """
Renames a key in a map. If the key does not exist, original map is returned.
"""
def rename_key(map, current_key, new_key) when is_map(map) do
case Map.has_key?(map, current_key) do
true -> Map.put(map, new_key, Map.get(map, current_key))
false -> map
end
end
@doc """
Takes the result of a `group_by` statement, applying the passed function
to each grouping's values. Returns a map.
"""
def reduce_group_by(grouped_data, function) do
Enum.reduce(grouped_data, %{}, fn {key, values}, acc ->
Map.put(acc, key, function.(values))
end)
end
@doc """
Takes a collection of values and an attribute and returns the max value for that attribute.
"""
def max_by_attribute(values, attribute, fun \\ fn x -> x end)
def max_by_attribute([], _, _), do: nil
def max_by_attribute(values, attribute, fun) do
values
|> Enum.max_by(&fun.(Map.get(&1, attribute)))
|> Map.get(attribute)
end
@doc """
Takes a collection of values and an attribute and returns the min value for that attribute.
"""
def min_by_attribute(values, attribute, fun \\ fn x -> x end)
def min_by_attribute([], _, _), do: []
def min_by_attribute(values, attribute, fun) do
values
|> Enum.min_by(&fun.(Map.get(&1, attribute)))
|> Map.get(attribute)
end
@doc """
Returns a titlecased string. Example:
Input: hello world
Ouput: Hello World
"""
def titlecase(value) do
value
|> String.split(" ")
|> Enum.map(&String.capitalize(&1))
|> Enum.join(" ")
end
@doc """
Returns a simplified module name. Example:
Input: Elixir.MyApp.MyModule
Ouput: MyModule
"""
def module_name(module) do
module
|> Atom.to_string()
|> String.split(".")
|> List.last()
|> String.to_atom()
end
@doc """
Converts an atom or string to an integer
"""
def to_integer(value) when is_float(value), do: Kernel.trunc(value)
def to_integer(value) when is_atom(value), do: to_integer(Atom.to_string(value))
def to_integer(value) when is_bitstring(value), do: String.to_integer(value)
def to_integer(value), do: value
@doc """
Converts an atom or integer to a bitstring
"""
def to_string(value) when is_atom(value), do: Atom.to_string(value)
def to_string(value) when is_integer(value), do: Integer.to_string(value)
def to_string(value), do: value
@doc """
Converts a nested list to a nested map. Example:
Input: [[:one, :two, 3], [:one, :three, 3]]
Output: %{one: %{two: 2, three: 3}}
"""
def nested_list_to_map(nested_list) do
Enum.reduce(nested_list, %{}, fn item, acc ->
deep_merge(acc, list_to_map(item))
end)
end
@doc """
Converts a simple list to a nested map. Example:
Input: [:one, :two, 3]
Output: %{one: %{two: 2}}
"""
def list_to_map([head | tail]) when tail == [], do: head
def list_to_map([head | tail]) when is_integer(head), do: list_to_map([Integer.to_string(head) | tail])
def list_to_map([head | tail]), do: Map.put(%{}, head, list_to_map(tail))
@doc """
Deep merges two maps
See: https://stackoverflow.com/questions/38864001/elixir-how-to-deep-merge-maps/38865647#38865647
"""
def deep_merge(left, right) do
Map.merge(left, right, &deep_resolve/3)
end
defp deep_resolve(_key, left = %{}, right = %{}) do
# Key exists in both maps, and both values are maps as well.
# These can be merged recursively.
deep_merge(left, right)
end
defp deep_resolve(_key, _left, right) do
# Key exists in both maps, but at least one of the values is
# NOT a map. We fall back to standard merge behavior, preferring
# the value on the right.
right
end
# Tasks
@doc """
Runs a list of tasks in parallel. Example:
async_await_many([&task_one/0, &task_two/0])
Returns:
["task_one/0 result", "task_two/0 result"]
## Maps
Also accepts a map:
async_await_many(%{
one: &task_one/0,
two: &task_two/0
})
Returns:
%{
one: "task_one/0 result",
two: "task_two/0 result"
}
"""
def async_await_many(tasks) when is_list(tasks) do
tasks
|> Enum.map(&Task.async(&1))
|> Enum.map(&Task.await/1)
end
def async_await_many(tasks) when is_map(tasks) do
values =
tasks
|> Map.values()
|> async_await_many
tasks
|> Map.keys()
|> Enum.zip(values)
|> Enum.into(%{})
end
@doc """
Recursively converts the keys of a map into an atom.
Options:
`:whitelist` -> List of strings to convert to atoms. When passed, only strings in whitelist will be converted.
Example:
keys_to_atoms(%{"nested" => %{"example" => "value"}})
Returns:
%{nested: %{example: "value"}}
"""
def keys_to_atoms(map, options \\ [])
def keys_to_atoms(%_{} = struct, _options), do: struct
def keys_to_atoms(map, options) when is_map(map) do
for {key, value} <- map, into: %{} do
key =
case is_bitstring(key) do
false ->
key
true ->
case Keyword.get(options, :whitelist) do
nil ->
String.to_atom(key)
whitelist ->
case Enum.member?(whitelist, key) do
false -> key
true -> String.to_atom(key)
end
end
end
{key, keys_to_atoms(value, options)}
end
end
def keys_to_atoms(value, _), do: value
@doc """
Recursively converts the keys of a map into a string.
Example:
keys_to_strings(%{nested: %{example: "value"}})
Returns:
%{"nested" => %{"example" => "value"}}
"""
def keys_to_strings(map, options \\ [])
def keys_to_strings(%_{} = struct, _options), do: struct
def keys_to_strings(map, options) when is_map(map) do
for {key, value} <- map, into: %{} do
key =
case is_atom(key) do
false -> key
true -> Atom.to_string(key)
end
{key, keys_to_strings(value, options)}
end
end
def keys_to_strings(value, _), do: value
@doc """
Serialize process id (pid) number to string
"""
def serialize_pid(pid) when is_pid(pid) do
pid
|> :erlang.pid_to_list()
|> :erlang.list_to_binary()
end
@doc """
Deserialize process id (pid) string to pid
"""
def deserialize_pid("#PID" <> string), do: deserialize_pid(string)
def deserialize_pid(string) do
string
|> :erlang.binary_to_list()
|> :erlang.list_to_pid()
end
@doc """
Recursive version of `Map.delete/2`. Adds support for nested values:
Example:
map = %{
hello: "world",
nested: %{example: "value", hello: "world"}
}
deep_delete(map, [:nested, :example])
Returns:
%{
nested: %{example: "value"}
}
"""
def deep_delete(data, delete_key) when is_map(data) do
data
|> Map.delete(delete_key)
|> Enum.reduce(%{}, fn {key, value}, acc ->
Map.put(acc, key, deep_delete(value, delete_key))
end)
end
def deep_delete(data, _), do: data
@doc """
Recursive version of `Map.get/2`. Adds support for nested values:
Example:
map = %{
simple: "simple",
nested: %{example: "value", other: "value"}
}
deep_get(map, [:nested, :example])
Returns:
"value"
"""
def deep_get(data, keys, default \\ nil)
def deep_get(data, [current_key | remaining_keys], default) when is_map(data) do
value = Map.get(data, current_key)
case remaining_keys do
[] -> value
_ -> deep_get(value, remaining_keys, default)
end
end
def deep_get(_data, _, default), do: default
@doc """
Recursive version of `Map.take/2`. Adds support for nested values:
Example:
map = %{
simple: "simple",
nested: %{example: "value", other: "value"}
}
deep_take(map, [:simple, nested: [:example]])
Returns:
map = %{
simple: "simple",
nested: %{example: "value"}
}
"""
def deep_take(map, keys) when is_map(map) do
{nested_keys, simple_keys} = Enum.split_with(keys, &is_tuple/1)
simple = Map.take(map, simple_keys)
nested =
Enum.reduce(nested_keys, %{}, fn {key, keys}, acc ->
value =
map
|> Map.get(key)
|> deep_take(keys)
Map.put(acc, key, value)
end)
Map.merge(simple, nested)
end
@doc """
Print entire value without truncation
"""
def print(value) do
IO.inspect(value, limit: :infinity, printable_limit: :infinity)
end
@doc """
Benchmark execution time
Options:
log_level -> when not set, uses default value set in an env variable
Example:
Artemis.Helpers.benchmark("Sleep Performance", fn ->
:timer.sleep(5_000)
end, log_level: :info)
"""
def benchmark(callback), do: benchmark(nil, callback)
def benchmark(callback, options) when is_list(options), do: benchmark(nil, callback, options)
def benchmark(key, callback, options \\ []) do
start_time = Timex.now()
result = callback.()
end_time = Timex.now()
duration = Timex.diff(end_time, start_time, :milliseconds)
default_log_level = Artemis.Helpers.AppConfig.fetch!(:artemis, :benchmark, :default_log_level)
options = Keyword.put_new(options, :log_level, default_log_level)
message = [
type: "Benchmark",
key: key,
duration: "#{duration}ms"
]
log(message, options)
result
end
@doc """
Send values to Logger
"""
def log(values, options \\ [])
def log(values, options) when is_list(values) do
message = format_log_message(values)
log(message, options)
end
def log(message, options) do
log_level = get_log_level(options)
Logger.log(log_level, message)
end
defp format_log_message(values) do
values
|> Enum.map(fn {key, value} ->
case is_nil(value) do
true -> nil
false -> "[#{key}: #{value}]"
end
end)
|> Enum.reject(&is_nil/1)
|> Enum.join(" ")
end
defp get_log_level(options) do
default_log_level = :info
log_level =
options
|> Keyword.get(:log_level, Keyword.get(options, :level))
|> Kernel.||(default_log_level)
|> Artemis.Helpers.to_string()
case log_level do
"emergency" -> :emergency
"alert" -> :alert
"critical" -> :critical
"error" -> :error
"warning" -> :warning
"notice" -> :notice
"info" -> :info
_ -> :debug
end
end
@doc """
Log application start
"""
def log_application_start(name) do
type = "ApplicationStart"
log(type: type, key: name, start: Timex.now())
end
@doc """
Log rescued errors
"""
def rescue_log(stacktrace \\ nil, caller, error) do
default_values = [
caller: serialize_caller(caller),
error: Map.get(error, :__struct__),
message: Map.get(error, :message, inspect(error)),
stacktrace: serialize_stacktrace(stacktrace)
]
log_message = format_log_message(default_values)
Logger.error(log_message)
end
defp serialize_caller(caller) when is_map(caller), do: Map.get(caller, :__struct__)
defp serialize_caller(caller), do: caller
defp serialize_stacktrace(nil), do: nil
defp serialize_stacktrace(stacktrace) do
stracktrace =
stacktrace
|> Enum.map(&inspect(&1))
|> Enum.join("\n ")
"\n " <> stracktrace
end
@doc """
Send values to Error
"""
def error(values) when is_list(values) do
message = format_log_message(values)
Logger.error(message)
end
def error(message), do: Logger.error(message: message)
@doc """
Convert an Ecto Query into SQL
Example:
Customer
|> distinct_query(params, default: false)
|> order_query(params)
|> Artemis.Helpers.print_to_sql(Artemis.Repo)
|> Repo.all()
"""
def print_to_sql(query, repo) do
IO.inspect(Ecto.Adapters.SQL.to_sql(:all, repo, query))
query
end
end
|
apps/artemis/lib/artemis/helpers.ex
| 0.863435
| 0.511107
|
helpers.ex
|
starcoder
|
defmodule RefInspector do
@moduledoc """
Referer parser library.
## Preparation
1. Verify your supervision setup according to `RefInspector.Supervisor`
2. Revise the default configuration values of `RefInspector.Config` and
adjust to your project/environment where necessary
3. Download a copy of the parser database file(s) as outlined in
`RefInspector.Downloader`
## Usage
iex> RefInspector.parse("http://www.google.com/search?q=ref_inspector")
%RefInspector.Result{
medium: "search",
referer: "http://www.google.com/search?q=ref_inspector",
source: "Google",
term: "ref_inspector"
}
Passing a referer string will result in a `%RefInspector.Result{}` returned
with the following information (if available):
- `:referer` will contain the unmodified referer passed to the parser.
- `:medium` will be `:internal` (if configured), `:unknown` if no matching
database entry could be found, or a string matching the entry in the
database. Detecting a referer as `:internal` requires additional
configuration (see `RefInspector.Config`).
- `:source` will be `:unknown` if no known source could be detected.
Otherwise it will contain a string with the provider's name.
- `:term` will be `:none` if no query parameters were given or the provider
has no configured term parameters in the database (mostly relevant for
social or email referers). If a configured term parameter was found it will
be an unencoded string (possibly empty).
#### Note about Result Medium Atoms/Binaries
The medium atoms `:unknown` and `:internal` are specially treated to reflect
two special cases. One being reserved for completely unknown referers and
one being for configured domains to not be parsed.
Your database can still include `"unknown"` and `"internal"` sections. These
will be parsed fully and returned using a binary as the medium instead of
the aforementioned atoms.
"""
alias RefInspector.Database
alias RefInspector.Parser
alias RefInspector.Result
@doc """
Checks if RefInspector is ready to perform lookups.
The `true == ready?` definition is made on the assumption that if there is
at least one referer in the database then lookups can be performed.
Checking the state is done using the currently active database.
Any potentially concurrent reload requests are not considered.
"""
@spec ready?(atom) :: boolean
def ready?(instance \\ :ref_inspector_default), do: [] != Database.list(instance)
@doc """
Parses a referer.
Passing an empty referer (`""` or `nil`) will directly return an empty result
without accessing the database.
"""
@spec parse(URI.t() | String.t() | nil, Keyword.t()) :: Result.t()
def parse(ref, opts \\ [instance: :ref_inspector_default])
def parse(nil, _), do: %Result{referer: nil}
def parse("", _), do: %Result{referer: ""}
def parse(ref, opts) when is_binary(ref), do: ref |> URI.parse() |> parse(opts)
def parse(%URI{} = uri, opts) do
uri
|> Parser.parse(opts)
|> Map.put(:referer, URI.to_string(uri))
end
@doc """
Reloads all databases.
You can pass `[async: true|false]` to define if the reload should happen
in the background or block your calling process until completed.
"""
@spec reload(Keyword.t()) :: :ok
def reload(opts \\ []) do
[async: true, instance: :ref_inspector_default]
|> Keyword.merge(opts)
|> Database.reload()
end
end
|
lib/ref_inspector.ex
| 0.888514
| 0.629291
|
ref_inspector.ex
|
starcoder
|
defmodule FlowAssertions.Ecto.ChangesetA do
use FlowAssertions.Define
alias FlowAssertions.Ecto.Messages
use FlowAssertions
alias Ecto.Changeset
alias FlowAssertions.MapA
@moduledoc """
Assertions for `Ecto.Changeset` structures.
"""
# ------------------------------------------------------------------------
@doc """
A pipeline-ready version of `assert changeset.valid?`
"""
defchain assert_valid(%Changeset{} = changeset) do
elaborate_assert(changeset.valid?, Messages.changeset_invalid,
expr: AssertionError.no_value,
left: changeset)
end
@doc """
A pipeline-ready version of `refute changeset.valid?`
"""
defchain assert_invalid(%Changeset{} = changeset) do
elaborate_assert(not changeset.valid?, Messages.changeset_valid,
expr: AssertionError.no_value,
left: changeset)
end
# ------------------------------------------------------------------------
@doc ~S"""
Applies `FlowAssertions.MapA.assert_fields/2` to the changes in the changeset.
To check that fields have been changed:
changeset |> assert_changes([:name, :tags])
To check specific changed values:
changeset |> assert_changes(name: "Bossie", tags: [])
"""
defchain assert_changes(%Changeset{} = changeset, keyword_list),
do: assert_fields(changeset.changes, keyword_list)
@doc """
Like `assert_changes/2` for cases where you care only about a single field.
This is just a convenience function for the grammatically obsessive.
changeset |> assert_change(:name)
changeset |> assert_change(name: "Bossie")
"""
def assert_change(cs, field_description) when not is_list(field_description),
do: assert_changes(cs, [field_description])
def assert_change(cs, field_description),
do: assert_changes(cs, field_description)
@doc """
The changeset must contain no changes.
"""
defchain assert_no_changes(%Changeset{} = changeset) do
changes = changeset.changes
elaborate_assert(changes == %{}, Messages.some_field_changes(changeset),
left: changeset)
end
@doc """
Require a changeset to have no changes in particular fields. Unmentioned fields may
have changes. When there's only a single field, it needn't be enclosed in a list.
changeset |> assert_no_changes([:name, :tags])
changeset |> assert_no_changes(:name)
"""
defchain assert_no_changes(%Changeset{} = changeset, field) when is_atom(field) do
struct_must_have_key!(changeset.data, field)
elaborate_refute(Map.has_key?(changeset.changes, field),
Messages.bad_field_change(field),
left: changeset)
end
defchain assert_no_changes(%Changeset{} = changeset, field_or_fields)
when is_list(field_or_fields),
do: Enum.map field_or_fields, &(assert_no_changes changeset, &1)
# ------------------------------------------------------------------------
@doc ~S"""
Assert that a changeset contains specific errors. In the simplest case,
it requires that each named field have at least one error, but doesn't require
any specific message:
changeset |> assert_errors([:name, :tags])
A message may also be required:
changeset
|> assert_errors(name: "may not be blank", tags: "is invalid")
The given string must be an exact match for one of the field's error messages.
If you want to check more than one error message for a given field,
enclose them in a list:
changeset
|> assert_errors(name: "may not be blank",
tags: ["is invalid", "has something else wrong"])
The list need not be a complete list of errors.
"""
defchain assert_errors(%Changeset{} = changeset, error_descriptions) do
errors_map = phoenix_errors_on(changeset)
assert_field_has_an_error = fn field ->
elaborate_assert(Map.has_key?(errors_map, field),
Messages.no_error_for_field(field),
left: changeset)
end
has_message_match? = fn expected, field_error_list ->
Enum.any?(field_error_list, fn error_message ->
good_enough?(error_message, expected)
end)
end
assert_message_match = fn field, expected ->
field_error_list = errors_map[field]
elaborate_assert(has_message_match?.(expected, field_error_list),
Messages.not_right_error_message(field),
left: field_error_list,
right: expected)
end
Enum.map(error_descriptions, fn
field when is_atom(field) ->
assert_field_has_an_error.(field)
{field, expecteds} when is_list(expecteds) ->
assert_field_has_an_error.(field)
for expected <- expecteds,
do: assert_message_match.(field, expected)
{field, expected} ->
assert_field_has_an_error.(field)
assert_message_match.(field, expected)
end)
end
@doc """
Like `assert_errors` but reads better when there's only a single error
to be checked:
assert_error(changeset, name: "is invalid")
If the message isn't to be checked, you can use a single atom:
assert_error(changeset, :name)
"""
defchain assert_error(cs, error_description) when is_atom(error_description),
do: assert_errors(cs, [error_description])
defchain assert_error(cs, error_description),
do: assert_errors(cs, error_description)
@doc """
Assert that a field or fields have no associated errors.
changeset |> assert_error_free([:in_service_datestring, :name])
You needn't use a list if there's only one field to check.
changeset |> assert_error_free(:in_service_datestring)
"""
defchain assert_error_free(changeset, field) when is_atom(field),
do: assert_error_free(changeset, [field])
defchain assert_error_free(changeset, fields) do
errors = phoenix_errors_on(changeset)
check = fn(field) ->
struct_must_have_key!(changeset.data, field)
elaborate_refute(Map.has_key?(errors, field),
Messages.unexpected_error(field),
left: changeset)
end
Enum.map(fields, check)
end
# # ------------------------------------------------------------------------
defchain assert_data(changeset, expected) do
assert_fields(changeset.data, expected)
end
@doc """
Assert that a field in the data part of a changeset matches a binding form.
changeset |> assert_data_shape(:field, %User{})
changeset |> assert_data_shape(:field, [_ | _])
"""
defmacro assert_data_shape(changeset, key, shape) do
quote do
eval_once = unquote(changeset)
assert_field_shape(eval_once.data, unquote(key), unquote(shape))
eval_once
end
end
# ----------------------------------------------------------------------------
@doc """
Operate on the single element of a list in a changeset field.
This is typically used with fields that take list values. Often,
you only want to test the empty list and a singleton list.
(When testing functions that produce their values with `Enum.map/2` or `for`,
creating a second list element gains you nothing.)
Using `with_singleton_content`, it's
convenient to apply assertions to the single element:
changeset
|> assert_valid
|> with_singleton_content(:changes, :service_gaps)
|> assert_shape(%VM.ServiceGap{})
|> Ex.Datespan.assert_datestrings(:first)
The second value can be `:data`, `:changes`, or `:newest`. The first use
their respective fields in the changeset. The last uses `Ecto.Changeset.fetch_field!/2`, meaning:
1. If the field is present in `Changeset.changes`, that value is used.
2. Otherwise, the value in `Changeset.data` is used.
If `field` does not exist or isn't an `Enum`, `with_singleton_content` will fail in
the same way `FlowAssertions.EnumA.singleton_content/1` does.
"""
def with_singleton_content(%Changeset{} = changeset, :newest, field) do
which_key =
if Map.has_key?(changeset.changes, field), do: :changes, else: :data
with_singleton_content(changeset, which_key, field)
end
def with_singleton_content(%Changeset{} = changeset, version, field) do
changeset
|> Map.get(version)
|> MapA.with_singleton_content(field)
end
# ----------------------------------------------------------------------------
# Taken from Phoenix's `test/support/data_case.ex`.
defp phoenix_errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
|
lib/changeset_a.ex
| 0.846292
| 0.512937
|
changeset_a.ex
|
starcoder
|
defmodule MdnsLite do
@moduledoc """
MdnsLite is a simple, limited, no frills mDNS implementation
Advertising hostnames and services is generally done using the application
config. See `MdnsLite.Options` for documentation.
To change the advertised hostnames or services at runtime, see `set_host/1`,
`add_mdns_service/1` and `remove_mdns_service/1`.
MdnsLite's mDNS record tables and caches can be inspected using
`MdnsLite.Info` if you're having trouble.
Finally, check out the MdnsLite `README.md` for more information.
"""
import MdnsLite.DNS
require Logger
alias MdnsLite.{DNS, Options, TableServer}
@typedoc """
A user-specified ID for referring to a service
Atoms are recommended, but binaries are still supported since they were used
in the past.
"""
@type service_id() :: atom() | binary()
@typedoc """
mDNS service description
Keys include:
* `:id` - an atom for referring to this service (only required if you want to
reference the service at runtime)
* `:port` - the TCP/UDP port number for the service (required)
* `:transport` - the transport protocol. E.g., `"tcp"` (specify this and
`:protocol`, or `:type`) * `:protocol` - the application protocol. E.g.,
`"ssh"` (specify this and `:transport`, or `:type`)
* `:type` - the transport/protocol to advertize. E.g., `"_ssh._tcp"` (only
needed if `:protocol` and `:transport` aren't specified)
* `:weight` - the service weight. Defaults to `0`. (optional)
* `:priority` - the service priority. Defaults to `0`. (optional)
* `:txt_payload` - a list of strings to advertise
Example:
```
%{id: :my_ssh, port: 22, protocol: "ssh", transport: "tcp"}
```
"""
@type service() :: %{
:id => service_id(),
:port => 1..65535,
optional(:txt_payload) => [String.t()],
optional(:priority) => 0..255,
optional(:protocol) => String.t(),
optional(:transport) => String.t(),
optional(:type) => String.t(),
optional(:weight) => 0..255
}
@local_if_info %MdnsLite.IfInfo{ipv4_address: {127, 0, 0, 1}}
@default_timeout 500
@doc """
Set the list of host names
This replaces the list of hostnames that MdnsLite will respond to. The first
hostname in the list is special. Service advertisements will use it. The
remainder are aliases.
Hostnames should not have the ".local" extension. MdnsLite will add it.
To specify the hostname returned by `:inet.gethostname/0`, use `:hostname`.
To make MdnsLite respond to queries for "<hostname>.local" and
"nerves.local", run this:
```elixir
iex> MdnsLite.set_hosts([:hostname, "nerves"])
:ok
```
"""
@spec set_hosts([:hostname | String.t()]) :: :ok
def set_hosts(hosts) do
TableServer.update_options(&Options.set_hosts(&1, hosts))
end
@doc """
Start advertising a service
Services can be added at compile-time via the `:services` key in the `mdns_lite`
application environment or they can be added at runtime using this function.
See the `service` type for information on what's needed.
Example:
```elixir
iex> service = %{
id: :my_web_server,
protocol: "http",
transport: "tcp",
port: 80
}
iex> MdnsLite.add_mdns_service(service)
:ok
```
"""
@spec add_mdns_service(service()) :: :ok
def add_mdns_service(service) do
TableServer.update_options(&Options.add_service(&1, service))
end
@doc """
Stop advertising a service
Example:
```elixir
iex> MdnsLite.remove_mdns_service(:my_ssh)
:ok
```
"""
@spec remove_mdns_service(service_id()) :: :ok
def remove_mdns_service(id) do
TableServer.update_options(&Options.remove_service_by_id(&1, id))
end
@doc """
Lookup a hostname using mDNS
The hostname should be a .local name since the query only goes out via mDNS.
On success, an IP address is returned.
"""
@spec gethostbyname(String.t()) :: {:ok, :inet.ip_address()} | {:error, any()}
def gethostbyname(hostname, timeout \\ @default_timeout) do
q = dns_query(class: :in, type: :a, domain: to_charlist(hostname))
case query(q, timeout) do
%{answer: [first | _]} ->
ip = first |> dns_rr(:data) |> to_addr()
{:ok, ip}
%{answer: []} ->
{:error, :nxdomain}
end
end
defp to_addr(addr) when is_tuple(addr), do: addr
defp to_addr(<<a, b, c, d>>), do: {a, b, c, d}
defp to_addr(<<a::16, b::16, c::16, d::16, e::16, f::16, g::16, h::16>>),
do: {a, b, c, d, e, f, g, h}
@doc false
@spec query(DNS.dns_query(), non_neg_integer()) :: %{
answer: [DNS.dns_rr()],
additional: [DNS.dns_rr()]
}
def query(dns_query() = q, timeout \\ @default_timeout) do
# 1. Try our configured records
# 2. Try the caches
# 3. Send the query
# 4. Wait for response to collect and return the matchers
with %{answer: []} <- MdnsLite.TableServer.query(q, @local_if_info),
%{answer: []} <- MdnsLite.Responder.query_all_caches(q) do
MdnsLite.Responder.multicast_all(q)
Process.sleep(timeout)
MdnsLite.Responder.query_all_caches(q)
end
end
end
|
lib/mdns_lite.ex
| 0.874171
| 0.792585
|
mdns_lite.ex
|
starcoder
|
defmodule ExDjango.Pbkdf2 do
@moduledoc """
Module to handle django pbkdf2_sha256 and pbkdf2_sha512 authentication.
Comeonin didn't want to support pbkdf2_sha256 so copied to here.
Comeonin password hashes start with $ it will work with both Django and Comeonin password hashes.
To generate a password hash, use the `hashpwsalt` function:
ExDjango.Pbkdf2.hashpwsalt("hard to guess")
ExDjango.Pbkdf2.hashpwsalt("hard to guess", 20_000, :sha256)
To check the password against a password hash, use the `checkpw` function:
ExDjango.Pbkdf2.checkpw("hard to guess", stored_hash)
There is also a `dummy_checkpw`, which can be used to stop an attacker guessing
a username by timing the responses.
See the documentation for each function for more details.
Most users will not need to use any of the other functions in this module.
## Pbkdf2
Pbkdf2 is a password-based key derivation function
that uses a password, a variable-length salt and an iteration
count and applies a pseudorandom function to these to
produce a key.
The original implementation used SHA-1 as the pseudorandom function,
but this version uses HMAC-SHA-256.
"""
use Bitwise
@max_length bsl(1, 32) - 1
@salt_length 12
@pbkdf2_rounds 20_000
@doc """
Generate a salt for use with the `hashpass` function.
The minimum length of the salt is 16 and the maximum length
is 1024. The default is 16.
"""
def gen_salt(salt_length \\ @salt_length)
def gen_salt(salt_length) when salt_length in 12..1024 do
:crypto.strong_rand_bytes(salt_length * 2)
|> Base.url_encode64
|> String.replace(~r/[-|_|=]/, "")
|> String.slice(0, salt_length)
end
def gen_salt(_) do
raise ArgumentError, message: "The salt is the wrong length."
end
@doc """
Hash the password using pbkdf2_sha256.
"""
def hashpass(password, salt, rounds \\ @pbkdf2_rounds, algorithm \\ :sha256) do
if is_binary(salt) do
pbkdf2(algorithm, password, salt, rounds) |> format(salt, rounds, algorithm)
else
raise ArgumentError, message: "Wrong type. The salt needs to be a string."
end
end
@doc """
Hash the password with a salt which is randomly generated.
To change the complexity (and the time taken) of the password hash
calculation, you need to change the value for `pbkdf2_rounds`
in the config file.
"""
def hashpwsalt(password, rounds \\ @pbkdf2_rounds, algorithm \\ :sha256) do
hashpass(password, gen_salt(), rounds, algorithm)
end
defp format(hash, salt, rounds, algorithm) do
"pbkdf2_#{algorithm}$#{rounds}$#{salt}$#{Base.encode64(hash)}"
end
@doc """
Check the password.
The check is performed in constant time to avoid timing attacks.
"""
def checkpw(password, hash) when is_binary(password) and is_binary(hash) do
case String.starts_with?(hash, "$") do
# If hash starts with $ use Comeonin to check it
true ->
Comeonin.Pbkdf2.checkpw(password, hash)
false ->
case String.split(hash, "$") do
[algorithm, rounds, salt, hash] ->
pbkdf2(parse_algorithm(algorithm), password, salt, String.to_integer(rounds))
|> Base.encode64
|> Comeonin.Tools.secure_check(hash)
_ ->
false
end
end
end
def checkpw(_password, _hash) do
raise ArgumentError, message: "Wrong type. The password and hash need to be strings."
end
@doc """
Perform a dummy check for a user that does not exist.
This always returns false. The reason for implementing this check is
in order to make user enumeration by timing responses more difficult.
"""
def dummy_checkpw do
hashpwsalt("password")
false
end
@doc """
Calculate pbkdf2 hash
"""
def pbkdf2(:sha256, password, salt, rounds), do: pbkdf2(:sha256, password, salt, rounds, 32)
def pbkdf2(:sha512, password, salt, rounds), do: pbkdf2(:sha512, password, salt, rounds, 64)
defp pbkdf2(_algorithm, _password, _salt, _rounds, length) when length > @max_length do
raise ArgumentError, "length must be less than or equal to #{@max_length}"
end
defp pbkdf2(algorithm, password, salt, rounds, length) when byte_size(salt) in 12..1024 do
pbkdf2(algorithm, password, salt, rounds, length, 1, [], 0)
end
defp pbkdf2(_algorithm, _password, _salt, _rounds, _length) do
raise ArgumentError, message: "The salt is the wrong length."
end
defp pbkdf2(_algorithm, _password, _salt, _rounds, max_length, _block_index, acc, length)
when length >= max_length do
key = acc |> Enum.reverse |> IO.iodata_to_binary
<<bin::binary-size(max_length), _::binary>> = key
bin
end
defp pbkdf2(algorithm, password, salt, rounds, max_length, block_index, acc, length) do
initial = :crypto.hmac(algorithm, password, <<salt::binary, block_index::integer-size(32)>>)
block = iterate(algorithm, password, rounds - 1, initial, initial)
pbkdf2(algorithm, password, salt, rounds, max_length, block_index + 1,
[block | acc], byte_size(block) + length)
end
defp iterate(_algorithm, _password, 0, _prev, acc), do: acc
defp iterate(algorithm, password, round, prev, acc) do
next = :crypto.hmac(algorithm, password, prev)
iterate(algorithm, password, round - 1, next, :crypto.exor(next, acc))
end
defp parse_algorithm(algorithm) do
case String.split(algorithm, ["_", "-"]) do
[_, "sha256"] -> :sha256
[_, "sha512"] -> :sha512
_ -> raise ArgumentError, message: "Unknown password algorithm."
end
end
end
|
lib/pbkdf2.ex
| 0.763528
| 0.640945
|
pbkdf2.ex
|
starcoder
|
defmodule Golos.WitnessApi do
def call(method, params) do
Golos.call(["witness_api", method, params])
end
@doc """
Get witnesses by ids
## Example response
```
[%{"created" => "2016-10-18T11:21:18",
"hardfork_time_vote" => "2016-10-18T11:00:00",
"hardfork_version_vote" => "0.0.0", "id" => "2.3.101",
"last_aslot" => 3323895, "last_confirmed_block_num" => 3318746,
"last_sbd_exchange_update" => "2017-02-09T06:10:33",
"last_work" => "0000000000000000000000000000000000000000000000000000000000000000",
"owner" => "hipster", "pow_worker" => 0,
"props" => %{"account_creation_fee" => "1.000 GOLOS",
"maximum_block_size" => 65536, "sbd_interest_rate" => 1000},
"running_version" => "0.14.2",
"sbd_exchange_rate" => %{"base" => "1.742 GBG",
"quote" => "1.000 GOLOS"},
"signing_key" => "<KEY>",
"total_missed" => 10,
"url" => "https://golos.io/ru--delegaty/@hipster/delegat-hipster",
"virtual_last_update" => "2363092957490310521961963807",
"virtual_position" => "186709431624610119071729411416709427966",
"virtual_scheduled_time" => "2363094451567901047152350987",
"votes" => "102787791122912956"},
%{...} ]
```
"""
@spec get_witnesses([String.t()]) :: {:ok, [map]} | {:error, any}
def get_witnesses(ids) do
call("get_witnesses", [ids])
end
@doc """
Get witnesses by votes. Example response is the same as get_witnesses.
"""
@spec get_witnesses_by_vote(integer, integer) :: {:ok, [map]} | {:error, any}
def get_witnesses_by_vote(from, limit) do
call("get_witnesses_by_vote", [from, limit])
end
@doc """
Lookup witness accounts
Example response:
```
["creator", "creatorgalaxy", "crypto", "cryptocat", "cyberfounder", "cybertech-01", "d00m", "dacom", "dance", "danet"]
```
"""
@spec lookup_witness_accounts(String.t(), integer) :: {:ok, [String.t()]} | {:error, any}
def lookup_witness_accounts(lower_bound_name, limit) do
call("lookup_witness_accounts", [lower_bound_name, limit])
end
@doc """
Get witness count
Example response: `997`
"""
@spec get_witness_count() :: {:ok, [String.t()]} | {:error, any}
def get_witness_count() do
call("get_witness_count", [])
end
@doc """
Get active witnesses
Example response:
```
["primus", "litvintech", "yaski", "serejandmyself", "dark.sun", "phenom",
"hipster", "gtx-1080-sc-0048", "lehard", "aleksandraz", "dr2073", "smailer",
"on0tole", "roelandp", "arcange", "testz", "vitaly-lvov", "xtar", "anyx",
"kuna", "creator"]
```
"""
@spec get_active_witnesses() :: {:ok, [String.t()]} | {:error, any}
def get_active_witnesses() do
call("get_active_witnesses", [])
end
@doc """
Returns witness schedule
Example response:
```
%{"current_shuffled_witnesses" => ["litrbooh", "gtx-1080-sc-0015",
"vitaly-lvov", "aleksandraz", "on0tole", "dark.sun", "jesta", "someguy123",
"pmartynov", "primus", "litvintech", "phenom", "hipster", "good-karma",
"arcange", "serejandmyself", "kuna", "dr2073", "lehard", "testz", "xtar"],
"current_virtual_time" => "2359603129137518468300462851", "id" => "2.7.0",
"majority_version" => "0.14.2",
"median_props" => %{"account_creation_fee" => "1.000 GOLOS",
"maximum_block_size" => 131072, "sbd_interest_rate" => 1000},
"next_shuffle_block_num" => 3108273}
```
"""
@spec get_witness_schedule() :: {:ok, map} | {:error, any}
def get_witness_schedule() do
call("get_witness_schedule", [])
end
@doc """
Returns feed history
Example response:
```
%{"current_median_history" => %{"base" => "1.000 GBG",
"quote" => "0.559 GOLOS"}, "id" => "2.14.0",
"price_history" => [%{"base" => "1.379 GBG", "quote" => "1.000 GOLOS"},
%{"base" => "1.379 GBG", "quote" => "1.000 GOLOS"},
%{"base" => "1.379 GBG", "quote" => "1.000 GOLOS"},
%{"base" => "1.000 GBG", ...}, %{...}, ...]}
```
"""
@spec get_feed_history() :: {:ok, map} | {:error, any}
def get_feed_history do
call("get_feed_history", [])
end
@doc """
Returns current median history price.
Example response:
```
%{"base" => "1.000 GBG", "quote" => "0.559 GOLOS"}
```
"""
@spec get_current_median_history_price() :: {:ok, map} | {:error, any}
def get_current_median_history_price() do
call("get_current_median_history_price", [])
end
@doc """
Get miner queue
Example response:
```
["gtx-1080-sc-0083", "gtx-1080-sc-0016", "gtx-1080-sc-0084", "gtx-1080-sc-0017",
"gtx-1080-sc-0085", "gtx-1080-sc-0018", "penguin-11", "gtx-1080-sc-0028",
"gtx-1080-sc-0023", "gtx-1080-sc-0080", ...]
```
"""
@spec get_miner_queue() :: {:ok, [String.t()]} | {:error, any}
def get_miner_queue() do
call("get_miner_queue", [])
end
end
|
lib/apis/witness_api.ex
| 0.812012
| 0.606528
|
witness_api.ex
|
starcoder
|
defmodule Cluster.Strategy.Kubernetes do
@moduledoc """
This clustering strategy works by loading all endpoints in the current Kubernetes
namespace with the configured label. It will fetch the addresses of all endpoints with
that label and attempt to connect. It will continually monitor and update its
connections every 5s.
In order for your endpoints to be found they should be returned when you run:
kubectl get endpoints -l app=myapp
It assumes that all nodes share a base name, are using longnames, and are unique
based on their FQDN, rather than the base hostname. In other words, in the following
longname, `<basename>@<domain>`, `basename` would be the value configured in
`kubernetes_node_basename`.
`domain` would be the value configured in `mode` and can be either of type `:ip`
(the pod's ip, can be obtained by setting an env variable to status.podIP) or
`:dns`, which is the pod's internal A Record. This A Record has the format
`<ip-with-dashes>.<namespace>.pod.cluster.local`, e.g
1-2-3-4.default.pod.cluster.local.
Getting `:dns` to work requires a bit fiddling in the container's CMD, for example:
# deployment.yaml
command: ["sh", "-c"]
args: ["POD_A_RECORD"]
args: ["export POD_A_RECORD=$(echo $POD_IP | sed 's/\./-/g') && /app/bin/app foreground"]
# vm.args
-name app@<%= "${POD_A_RECORD}.${NAMESPACE}.pod.cluster.local" %>
(in an app running as a Distillery release).
The benefit of using `:dns` over `:ip` is that you can establish a remote shell (as well as
run observer) by using `kubectl port-forward` in combination with some entries in `/etc/hosts`.
Defaults to `:ip`.
An example configuration is below:
config :libcluster,
topologies: [
k8s_example: [
strategy: #{__MODULE__},
config: [
mode: :ip,
kubernetes_node_basename: "myapp",
kubernetes_selector: "app=myapp",
polling_interval: 10_000]]]
"""
use GenServer
use Cluster.Strategy
import Cluster.Logger
alias Cluster.Strategy.State
@default_polling_interval 5_000
@kubernetes_master "kubernetes.default.svc.cluster.local"
@service_account_path "/var/run/secrets/kubernetes.io/serviceaccount"
def start_link(args), do: GenServer.start_link(__MODULE__, args)
@impl true
def init([%State{meta: nil} = state]) do
init([%State{state | :meta => MapSet.new()}])
end
def init([%State{} = state]) do
{:ok, load(state)}
end
@impl true
def handle_info(:timeout, state) do
handle_info(:load, state)
end
def handle_info(:load, %State{} = state) do
{:noreply, load(state)}
end
def handle_info(_, state) do
{:noreply, state}
end
defp load(%State{topology: topology, meta: meta} = state) do
new_nodelist = MapSet.new(get_nodes(state))
added = MapSet.difference(new_nodelist, meta)
removed = MapSet.difference(state.meta, new_nodelist)
new_nodelist =
case Cluster.Strategy.disconnect_nodes(
topology,
state.disconnect,
state.list_nodes,
MapSet.to_list(removed)
) do
:ok ->
new_nodelist
{:error, bad_nodes} ->
# Add back the nodes which should have been removed, but which couldn't be for some reason
Enum.reduce(bad_nodes, new_nodelist, fn {n, _}, acc ->
MapSet.put(acc, n)
end)
end
new_nodelist =
case Cluster.Strategy.connect_nodes(
topology,
state.connect,
state.list_nodes,
MapSet.to_list(added)
) do
:ok ->
new_nodelist
{:error, bad_nodes} ->
# Remove the nodes which should have been added, but couldn't be for some reason
Enum.reduce(bad_nodes, new_nodelist, fn {n, _}, acc ->
MapSet.delete(acc, n)
end)
end
Process.send_after(self(), :load, polling_interval(state))
%State{state | :meta => new_nodelist}
end
defp polling_interval(%State{config: config}) do
Keyword.get(config, :polling_interval, @default_polling_interval)
end
@spec get_token(String.t()) :: String.t()
defp get_token(service_account_path) do
path = Path.join(service_account_path, "token")
case File.exists?(path) do
true -> path |> File.read!() |> String.trim()
false -> ""
end
end
@spec get_namespace(String.t()) :: String.t()
if Mix.env() == :test do
defp get_namespace(_service_account_path), do: "__libcluster_test"
else
defp get_namespace(service_account_path) do
path = Path.join(service_account_path, "namespace")
if File.exists?(path) do
path |> File.read!() |> String.trim()
else
""
end
end
end
@spec get_nodes(State.t()) :: [atom()]
defp get_nodes(%State{topology: topology, config: config}) do
service_account_path =
Keyword.get(config, :kubernetes_service_account_path, @service_account_path)
token = get_token(service_account_path)
namespace = get_namespace(service_account_path)
app_name = Keyword.fetch!(config, :kubernetes_node_basename)
selector = Keyword.fetch!(config, :kubernetes_selector)
master = Keyword.get(config, :kubernetes_master, @kubernetes_master)
cond do
app_name != nil and selector != nil ->
selector = URI.encode(selector)
endpoints_path = "api/v1/namespaces/#{namespace}/endpoints?labelSelector=#{selector}"
headers = [{'authorization', 'Bearer #{token}'}]
http_options = [ssl: [verify: :verify_none]]
case :httpc.request(
:get,
{'https://#{master}/#{endpoints_path}', headers},
http_options,
[]
) do
{:ok, {{_version, 200, _status}, _headers, body}} ->
parse_response(Keyword.get(config, :mode, :ip), app_name, Jason.decode!(body))
{:ok, {{_version, 403, _status}, _headers, body}} ->
%{"message" => msg} = Jason.decode!(body)
warn(topology, "cannot query kubernetes (unauthorized): #{msg}")
[]
{:ok, {{_version, code, status}, _headers, body}} ->
warn(topology, "cannot query kubernetes (#{code} #{status}): #{inspect(body)}")
[]
{:error, reason} ->
error(topology, "request to kubernetes failed!: #{inspect(reason)}")
[]
end
app_name == nil ->
warn(
topology,
"kubernetes strategy is selected, but :kubernetes_node_basename is not configured!"
)
[]
selector == nil ->
warn(
topology,
"kubernetes strategy is selected, but :kubernetes_selector is not configured!"
)
[]
:else ->
warn(topology, "kubernetes strategy is selected, but is not configured!")
[]
end
end
defp parse_response(:ip, app_name, resp) do
case resp do
%{"items" => items} when is_list(items) ->
Enum.reduce(items, [], fn
%{"subsets" => subsets}, acc when is_list(subsets) ->
addrs =
Enum.flat_map(subsets, fn
%{"addresses" => addresses} when is_list(addresses) ->
Enum.map(addresses, fn %{"ip" => ip} -> :"#{app_name}@#{ip}" end)
_ ->
[]
end)
acc ++ addrs
_, acc ->
acc
end)
_ ->
[]
end
end
defp parse_response(:dns, app_name, resp) do
case resp do
%{"items" => items} when is_list(items) ->
Enum.reduce(items, [], fn
%{"subsets" => subsets}, acc when is_list(subsets) ->
addrs =
Enum.flat_map(subsets, fn
%{"addresses" => addresses} when is_list(addresses) ->
Enum.map(addresses, fn %{"ip" => ip, "targetRef" => %{"namespace" => namespace}} ->
format_dns_record(app_name, ip, namespace)
end)
_ ->
[]
end)
acc ++ addrs
_, acc ->
acc
end)
_ ->
[]
end
end
defp format_dns_record(app_name, ip, namespace) do
ip = String.replace(ip, ".", "-")
:"#{app_name}@#{ip}.#{namespace}.pod.cluster.local"
end
end
|
lib/strategy/kubernetes.ex
| 0.824214
| 0.500793
|
kubernetes.ex
|
starcoder
|
defmodule MoroxiteServer.Providers.Reddit.Fetcher do
@moduledoc """
This module is going to download the json from the reddit api and parse it
"""
@valid_listings ["new", "hot", "rising", "top", "controversial"]
@behaviour Fetcher
@doc """
Build the link based on a ```reddit_name```
`listing_type` is the way the items will be ordered. Defaults to `"hot"`
"""
def build_link(reddit_name, listing_type \\ "hot")
def build_link(reddit_name, listing_type)
when listing_type in @valid_listings do
"https://www.reddit.com/r/#{reddit_name}/#{listing_type}.json"
end
@doc """
Build a map based on the json downloaded from ```link```
`link` should be a string containing link to reddit json listing
"""
def build_map(link) do
case get_json(link) do
{:ok, body} ->
{:ok, map} = Poison.decode(body)
map
end
end
@doc """
Downloads the json given on ```link```
`link` should be a string containing link to reddit json listing
"""
def get_json(link) do
case HTTPoison.get(link) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} -> {:ok, body}
{:ok, %HTTPoison.Response{status_code: code}} -> {:remote_error, code}
{:error, %HTTPoison.Error{reason: reason}} -> {:local_error, reason}
end
end
@doc """
Removes all listing entities marked as content for users over 18 y/o.
`map` is a map structured as the json recieved from a reddit json listing
"""
def filter_over_18(map) do
list = map["data"]["children"]
list = Enum.filter(list, &(!&1["data"]["over_18"]))
put_in(map["data"]["children"], list)
end
@doc """
Build a valid, in terms of the protocol, list of images and metadata
`map` is a map structured as the json recieved from a reddit json listing
"""
def build_list(map) do
list = map
|> get_in(["data", "children"])
|> Enum.filter(&(get_in(&1, ["data", "preview", "enabled"])))
result = list
|> Enum.map(&parse_element/1)
|> List.flatten
end
@doc """
Build a valid map, in terms of the protocol, from a map based on the reddit
structure
`element` is a element of the "children" array of reddit listing
"""
def parse_element(element) do
source_reddit = element["data"]["subreddit_name_prefixed"]
author = element["data"]["author"]
tags = [source_reddit, author]
sources = element
|> get_in(["data", "preview", "images"])
|> Enum.map(&(get_in(&1, ["source"])))
Enum.map(sources, &(%{tags: tags,
url: &1["url"],
size: {(&1["width"]), &1["height"]}}))
end
@doc """
Implementation of the fetch method for the Fetcher behaviour
"""
def fetch(identificator) do
list = identificator
|> build_link
|> build_map
|> filter_over_18
|> build_list
{"Reddit", list}
end
end
|
apps/moroxite_server/lib/moroxite_server/providers/reddit/fetcher.ex
| 0.767123
| 0.80329
|
fetcher.ex
|
starcoder
|
defmodule Artour.ImageView do
use Artour.Web, :view
@doc """
How single image instance should be represented in views
"""
def display_name(image) do
image.title
end
@doc """
Renders page to create new image
"""
def render("new.html", assigns) do
assigns = Map.merge(assigns, %{action: image_path(assigns[:conn], :create),
heading: Artour.SharedView.form_heading("image", :new),
save_another: true})
render "form_page.html", assigns
end
@doc """
Renders page to edit image
"""
def render("edit.html", assigns) do
assigns = Map.merge(assigns, %{action: image_path(assigns[:conn], :update, assigns[:image]),
heading: Artour.SharedView.form_heading(display_name(assigns[:image]), :edit),
show_delete: true})
render "form_page.html", assigns
end
@doc """
Generates the contents of HTML img tag srcset attribute
for a given image instance
assumes that image.filename_small is used as src attribute
doesn't use large url, because assumes image is in container
"""
def srcset_for(conn, image, location) do
"#{url_for(conn, image, :small, location)} 400w, #{url_for(conn, image, :medium, location)} 800w"
end
@doc """
Generates the contents of HTML img tag srcset attribute
for a given image instance
assumes that image.filename_small is used as src attribute
used for when image is being shown fullsize
(not in a container, such as in an album)
"""
def srcset_for_fullsize(conn, image, location) do
"#{srcset_for(conn, image, location)}, #{url_for(conn, image, :large, location)} 1000w"
end
@doc """
Returns HTML img tag for a given image instance
lazy loaded version of img_tag_for/3
src is set to the small source file, and srcset is used for other sizes
location: atom that should be either :cloud or :local
"""
def lazy_img_tag_for(conn, image, location) do
tag(:img,
data:
[
src: url_for(conn, image, :small, location),
srcset: srcset_for(conn, image, location)
],
alt: image.description,
class: "lazy-image-placeholder"
)
end
@doc """
Returns HTML img tag for a given image instance
src is set to the small source file, and srcset is used for other sizes
location: atom that should be either :cloud or :local
"""
def img_tag_for(conn, image, location) do
img_tag url_for(conn, image, :small, location), alt: image.description, srcset: srcset_for(conn, image, location)
end
@doc """
Returns HTML img tag for a given image instance
size: atom should be the same as for url_for
location: atom that should be either :cloud or :local
"""
def img_tag_for(conn, image, size, location) do
img_tag url_for(conn, image, size, location), alt: image.description
end
@doc """
Returns base url for location
location is local (admin) or cloud (potentially b2 on s3)
"""
def base_url_for(location_atom) do
case location_atom do
:local -> "/media/images/"
:cloud -> "/media/images/"
end
end
@doc """
Returns image filename for given size
"""
def filename_for_size(image, size) do
case size do
:large -> image.filename_large
:medium -> image.filename_medium
:small -> image.filename_small
:thumbnail -> image.filename_thumbnail
end
end
@doc """
Returns url for image
size is atom representing image size
location is either :cloud (public) or :local (admin)
"""
def url_for(conn, image, size, location) do
image_url = URI.encode(base_url_for(location) <> filename_for_size(image, size))
static_path(conn, image_url)
end
@doc """
Used on index page - returns abbreviated list of attribute names in the
same order as the attribute_values function
"""
def attribute_names_short() do
["Title", "Thumbnail", "Description", "Format", "Date Completed"]
end
@doc """
Used on index page - takes image instance and returns abbreviated list of
formatted values
"""
def attribute_values_short(conn, image) do
[image.title, img_tag(url_for(conn, image, :thumbnail, :local), class: "thumbnail"), image.description, Artour.FormatView.display_name(image.format), Artour.DateHelpers.date_to_us_date(image.completion_date)]
end
@doc """
Used on show page - returns list of attribute names in the
same order as the attribute_values function
"""
def attribute_names() do
["Title", "Slug", "Description", "Filename Large", "Filename Medium", "Filename Small", "Filename Thumbnail", "Format", "Date Completed"]
end
@doc """
Used on show page - takes image instance and returns list of
formatted values
"""
def attribute_values(image) do
[image.title, image.slug, image.description, image.filename_large, image.filename_medium, image.filename_small, image.filename_thumbnail, Artour.FormatView.display_name(image.format), Artour.DateHelpers.date_to_us_date(image.completion_date)]
end
end
|
apps/artour/web/views/image_view.ex
| 0.779196
| 0.505249
|
image_view.ex
|
starcoder
|
defmodule AWS.Forecast do
@moduledoc """
Provides APIs for creating and managing Amazon Forecast resources.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2018-06-26",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "forecast",
global?: false,
protocol: "json",
service_id: "forecast",
signature_version: "v4",
signing_name: "forecast",
target_prefix: "AmazonForecast"
}
end
@doc """
Creates an Amazon Forecast dataset.
The information about the dataset that you provide helps Forecast understand how
to consume the data for model training. This includes the following:
* * `DataFrequency` * - How frequently your historical time-series
data is collected.
* * `Domain` * and * `DatasetType` * - Each dataset has an
associated dataset domain and a type within the domain. Amazon Forecast provides
a list of predefined domains and types within each domain. For each unique
dataset domain and type within the domain, Amazon Forecast requires your data to
include a minimum set of predefined fields.
* * `Schema` * - A schema specifies the fields in the dataset,
including the field name and data type.
After creating a dataset, you import your training data into it and add the
dataset to a dataset group. You use the dataset group to create a predictor. For
more information, see `howitworks-datasets-groups`.
To get a list of all your datasets, use the `ListDatasets` operation.
For example Forecast datasets, see the [Amazon Forecast Sample GitHub repository](https://github.com/aws-samples/amazon-forecast-samples).
The `Status` of a dataset must be `ACTIVE` before you can import training data.
Use the `DescribeDataset` operation to get the status.
"""
def create_dataset(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDataset", input, options)
end
@doc """
Creates a dataset group, which holds a collection of related datasets.
You can add datasets to the dataset group when you create the dataset group, or
later by using the `UpdateDatasetGroup` operation.
After creating a dataset group and adding datasets, you use the dataset group
when you create a predictor. For more information, see
`howitworks-datasets-groups`.
To get a list of all your datasets groups, use the `ListDatasetGroups`
operation.
The `Status` of a dataset group must be `ACTIVE` before you can use the dataset
group to create a predictor. To get the status, use the `DescribeDatasetGroup`
operation.
"""
def create_dataset_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDatasetGroup", input, options)
end
@doc """
Imports your training data to an Amazon Forecast dataset.
You provide the location of your training data in an Amazon Simple Storage
Service (Amazon S3) bucket and the Amazon Resource Name (ARN) of the dataset
that you want to import the data to.
You must specify a `DataSource` object that includes an AWS Identity and Access
Management (IAM) role that Amazon Forecast can assume to access the data, as
Amazon Forecast makes a copy of your data and processes it in an internal AWS
system. For more information, see `aws-forecast-iam-roles`.
The training data must be in CSV format. The delimiter must be a comma (,).
You can specify the path to a specific CSV file, the S3 bucket, or to a folder
in the S3 bucket. For the latter two cases, Amazon Forecast imports all files up
to the limit of 10,000 files.
Because dataset imports are not aggregated, your most recent dataset import is
the one that is used when training a predictor or generating a forecast. Make
sure that your most recent dataset import contains all of the data you want to
model off of, and not just the new data collected since the previous import.
To get a list of all your dataset import jobs, filtered by specified criteria,
use the `ListDatasetImportJobs` operation.
"""
def create_dataset_import_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDatasetImportJob", input, options)
end
@doc """
Creates a forecast for each item in the `TARGET_TIME_SERIES` dataset that was
used to train the predictor.
This is known as inference. To retrieve the forecast for a single item at low
latency, use the operation. To export the complete forecast into your Amazon
Simple Storage Service (Amazon S3) bucket, use the `CreateForecastExportJob`
operation.
The range of the forecast is determined by the `ForecastHorizon` value, which
you specify in the `CreatePredictor` request. When you query a forecast, you can
request a specific date range within the forecast.
To get a list of all your forecasts, use the `ListForecasts` operation.
The forecasts generated by Amazon Forecast are in the same time zone as the
dataset that was used to create the predictor.
For more information, see `howitworks-forecast`.
The `Status` of the forecast must be `ACTIVE` before you can query or export the
forecast. Use the `DescribeForecast` operation to get the status.
"""
def create_forecast(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateForecast", input, options)
end
@doc """
Exports a forecast created by the `CreateForecast` operation to your Amazon
Simple Storage Service (Amazon S3) bucket.
The forecast file name will match the following conventions:
<ForecastExportJobName>_<ExportTimestamp>_<PartNumber>
where the <ExportTimestamp> component is in Java SimpleDateFormat
(yyyy-MM-ddTHH-mm-ssZ).
You must specify a `DataDestination` object that includes an AWS Identity and
Access Management (IAM) role that Amazon Forecast can assume to access the
Amazon S3 bucket. For more information, see `aws-forecast-iam-roles`.
For more information, see `howitworks-forecast`.
To get a list of all your forecast export jobs, use the `ListForecastExportJobs`
operation.
The `Status` of the forecast export job must be `ACTIVE` before you can access
the forecast in your Amazon S3 bucket. To get the status, use the
`DescribeForecastExportJob` operation.
"""
def create_forecast_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateForecastExportJob", input, options)
end
@doc """
Creates an Amazon Forecast predictor.
In the request, provide a dataset group and either specify an algorithm or let
Amazon Forecast choose an algorithm for you using AutoML. If you specify an
algorithm, you also can override algorithm-specific hyperparameters.
Amazon Forecast uses the algorithm to train a predictor using the latest version
of the datasets in the specified dataset group. You can then generate a forecast
using the `CreateForecast` operation.
To see the evaluation metrics, use the `GetAccuracyMetrics` operation.
You can specify a featurization configuration to fill and aggregate the data
fields in the `TARGET_TIME_SERIES` dataset to improve model training. For more
information, see `FeaturizationConfig`.
For RELATED_TIME_SERIES datasets, `CreatePredictor` verifies that the
`DataFrequency` specified when the dataset was created matches the
`ForecastFrequency`. TARGET_TIME_SERIES datasets don't have this restriction.
Amazon Forecast also verifies the delimiter and timestamp format. For more
information, see `howitworks-datasets-groups`.
By default, predictors are trained and evaluated at the 0.1 (P10), 0.5 (P50),
and 0.9 (P90) quantiles. You can choose custom forecast types to train and
evaluate your predictor by setting the `ForecastTypes`.
## AutoML
If you want Amazon Forecast to evaluate each algorithm and choose the one that
minimizes the `objective function`, set `PerformAutoML` to `true`. The
`objective function` is defined as the mean of the weighted losses over the
forecast types. By default, these are the p10, p50, and p90 quantile losses. For
more information, see `EvaluationResult`.
When AutoML is enabled, the following properties are disallowed:
* `AlgorithmArn`
* `HPOConfig`
* `PerformHPO`
* `TrainingParameters`
To get a list of all of your predictors, use the `ListPredictors` operation.
Before you can use the predictor to create a forecast, the `Status` of the
predictor must be `ACTIVE`, signifying that training has completed. To get the
status, use the `DescribePredictor` operation.
"""
def create_predictor(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreatePredictor", input, options)
end
@doc """
Exports backtest forecasts and accuracy metrics generated by the
`CreatePredictor` operation.
Two folders containing CSV files are exported to your specified S3 bucket.
The export file names will match the following conventions:
`<ExportJobName>_<ExportTimestamp>_<PartNumber>.csv`
The <ExportTimestamp> component is in Java SimpleDate format
(yyyy-MM-ddTHH-mm-ssZ).
You must specify a `DataDestination` object that includes an Amazon S3 bucket
and an AWS Identity and Access Management (IAM) role that Amazon Forecast can
assume to access the Amazon S3 bucket. For more information, see
`aws-forecast-iam-roles`.
The `Status` of the export job must be `ACTIVE` before you can access the export
in your Amazon S3 bucket. To get the status, use the
`DescribePredictorBacktestExportJob` operation.
"""
def create_predictor_backtest_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreatePredictorBacktestExportJob", input, options)
end
@doc """
Deletes an Amazon Forecast dataset that was created using the `CreateDataset`
operation.
You can only delete datasets that have a status of `ACTIVE` or `CREATE_FAILED`.
To get the status use the `DescribeDataset` operation.
Forecast does not automatically update any dataset groups that contain the
deleted dataset. In order to update the dataset group, use the operation,
omitting the deleted dataset's ARN.
"""
def delete_dataset(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDataset", input, options)
end
@doc """
Deletes a dataset group created using the `CreateDatasetGroup` operation.
You can only delete dataset groups that have a status of `ACTIVE`,
`CREATE_FAILED`, or `UPDATE_FAILED`. To get the status, use the
`DescribeDatasetGroup` operation.
This operation deletes only the dataset group, not the datasets in the group.
"""
def delete_dataset_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDatasetGroup", input, options)
end
@doc """
Deletes a dataset import job created using the `CreateDatasetImportJob`
operation.
You can delete only dataset import jobs that have a status of `ACTIVE` or
`CREATE_FAILED`. To get the status, use the `DescribeDatasetImportJob`
operation.
"""
def delete_dataset_import_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDatasetImportJob", input, options)
end
@doc """
Deletes a forecast created using the `CreateForecast` operation.
You can delete only forecasts that have a status of `ACTIVE` or `CREATE_FAILED`.
To get the status, use the `DescribeForecast` operation.
You can't delete a forecast while it is being exported. After a forecast is
deleted, you can no longer query the forecast.
"""
def delete_forecast(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteForecast", input, options)
end
@doc """
Deletes a forecast export job created using the `CreateForecastExportJob`
operation.
You can delete only export jobs that have a status of `ACTIVE` or
`CREATE_FAILED`. To get the status, use the `DescribeForecastExportJob`
operation.
"""
def delete_forecast_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteForecastExportJob", input, options)
end
@doc """
Deletes a predictor created using the `CreatePredictor` operation.
You can delete only predictor that have a status of `ACTIVE` or `CREATE_FAILED`.
To get the status, use the `DescribePredictor` operation.
"""
def delete_predictor(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeletePredictor", input, options)
end
@doc """
Deletes a predictor backtest export job.
"""
def delete_predictor_backtest_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeletePredictorBacktestExportJob", input, options)
end
@doc """
Deletes an entire resource tree.
This operation will delete the parent resource and its child resources.
Child resources are resources that were created from another resource. For
example, when a forecast is generated from a predictor, the forecast is the
child resource and the predictor is the parent resource.
Amazon Forecast resources possess the following parent-child resource
hierarchies:
* **Dataset**: dataset import jobs
* **Dataset Group**: predictors, predictor backtest export jobs,
forecasts, forecast export jobs
* **Predictor**: predictor backtest export jobs, forecasts, forecast
export jobs
* **Forecast**: forecast export jobs
`DeleteResourceTree` will only delete Amazon Forecast resources, and will not
delete datasets or exported files stored in Amazon S3.
"""
def delete_resource_tree(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteResourceTree", input, options)
end
@doc """
Describes an Amazon Forecast dataset created using the `CreateDataset`
operation.
In addition to listing the parameters specified in the `CreateDataset` request,
this operation includes the following dataset properties:
* `CreationTime`
* `LastModificationTime`
* `Status`
"""
def describe_dataset(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDataset", input, options)
end
@doc """
Describes a dataset group created using the `CreateDatasetGroup` operation.
In addition to listing the parameters provided in the `CreateDatasetGroup`
request, this operation includes the following properties:
* `DatasetArns` - The datasets belonging to the group.
* `CreationTime`
* `LastModificationTime`
* `Status`
"""
def describe_dataset_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDatasetGroup", input, options)
end
@doc """
Describes a dataset import job created using the `CreateDatasetImportJob`
operation.
In addition to listing the parameters provided in the `CreateDatasetImportJob`
request, this operation includes the following properties:
* `CreationTime`
* `LastModificationTime`
* `DataSize`
* `FieldStatistics`
* `Status`
* `Message` - If an error occurred, information about the error.
"""
def describe_dataset_import_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDatasetImportJob", input, options)
end
@doc """
Describes a forecast created using the `CreateForecast` operation.
In addition to listing the properties provided in the `CreateForecast` request,
this operation lists the following properties:
* `DatasetGroupArn` - The dataset group that provided the training
data.
* `CreationTime`
* `LastModificationTime`
* `Status`
* `Message` - If an error occurred, information about the error.
"""
def describe_forecast(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeForecast", input, options)
end
@doc """
Describes a forecast export job created using the `CreateForecastExportJob`
operation.
In addition to listing the properties provided by the user in the
`CreateForecastExportJob` request, this operation lists the following
properties:
* `CreationTime`
* `LastModificationTime`
* `Status`
* `Message` - If an error occurred, information about the error.
"""
def describe_forecast_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeForecastExportJob", input, options)
end
@doc """
Describes a predictor created using the `CreatePredictor` operation.
In addition to listing the properties provided in the `CreatePredictor` request,
this operation lists the following properties:
* `DatasetImportJobArns` - The dataset import jobs used to import
training data.
* `AutoMLAlgorithmArns` - If AutoML is performed, the algorithms
that were evaluated.
* `CreationTime`
* `LastModificationTime`
* `Status`
* `Message` - If an error occurred, information about the error.
"""
def describe_predictor(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribePredictor", input, options)
end
@doc """
Describes a predictor backtest export job created using the
`CreatePredictorBacktestExportJob` operation.
In addition to listing the properties provided by the user in the
`CreatePredictorBacktestExportJob` request, this operation lists the following
properties:
* `CreationTime`
* `LastModificationTime`
* `Status`
* `Message` (if an error occurred)
"""
def describe_predictor_backtest_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribePredictorBacktestExportJob", input, options)
end
@doc """
Provides metrics on the accuracy of the models that were trained by the
`CreatePredictor` operation.
Use metrics to see how well the model performed and to decide whether to use the
predictor to generate a forecast. For more information, see [Predictor Metrics](https://docs.aws.amazon.com/forecast/latest/dg/metrics.html).
This operation generates metrics for each backtest window that was evaluated.
The number of backtest windows (`NumberOfBacktestWindows`) is specified using
the `EvaluationParameters` object, which is optionally included in the
`CreatePredictor` request. If `NumberOfBacktestWindows` isn't specified, the
number defaults to one.
The parameters of the `filling` method determine which items contribute to the
metrics. If you want all items to contribute, specify `zero`. If you want only
those items that have complete data in the range being evaluated to contribute,
specify `nan`. For more information, see `FeaturizationMethod`.
Before you can get accuracy metrics, the `Status` of the predictor must be
`ACTIVE`, signifying that training has completed. To get the status, use the
`DescribePredictor` operation.
"""
def get_accuracy_metrics(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAccuracyMetrics", input, options)
end
@doc """
Returns a list of dataset groups created using the `CreateDatasetGroup`
operation.
For each dataset group, this operation returns a summary of its properties,
including its Amazon Resource Name (ARN). You can retrieve the complete set of
properties by using the dataset group ARN with the `DescribeDatasetGroup`
operation.
"""
def list_dataset_groups(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatasetGroups", input, options)
end
@doc """
Returns a list of dataset import jobs created using the `CreateDatasetImportJob`
operation.
For each import job, this operation returns a summary of its properties,
including its Amazon Resource Name (ARN). You can retrieve the complete set of
properties by using the ARN with the `DescribeDatasetImportJob` operation. You
can filter the list by providing an array of `Filter` objects.
"""
def list_dataset_import_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatasetImportJobs", input, options)
end
@doc """
Returns a list of datasets created using the `CreateDataset` operation.
For each dataset, a summary of its properties, including its Amazon Resource
Name (ARN), is returned. To retrieve the complete set of properties, use the ARN
with the `DescribeDataset` operation.
"""
def list_datasets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatasets", input, options)
end
@doc """
Returns a list of forecast export jobs created using the
`CreateForecastExportJob` operation.
For each forecast export job, this operation returns a summary of its
properties, including its Amazon Resource Name (ARN). To retrieve the complete
set of properties, use the ARN with the `DescribeForecastExportJob` operation.
You can filter the list using an array of `Filter` objects.
"""
def list_forecast_export_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListForecastExportJobs", input, options)
end
@doc """
Returns a list of forecasts created using the `CreateForecast` operation.
For each forecast, this operation returns a summary of its properties, including
its Amazon Resource Name (ARN). To retrieve the complete set of properties,
specify the ARN with the `DescribeForecast` operation. You can filter the list
using an array of `Filter` objects.
"""
def list_forecasts(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListForecasts", input, options)
end
@doc """
Returns a list of predictor backtest export jobs created using the
`CreatePredictorBacktestExportJob` operation.
This operation returns a summary for each backtest export job. You can filter
the list using an array of `Filter` objects.
To retrieve the complete set of properties for a particular backtest export job,
use the ARN with the `DescribePredictorBacktestExportJob` operation.
"""
def list_predictor_backtest_export_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListPredictorBacktestExportJobs", input, options)
end
@doc """
Returns a list of predictors created using the `CreatePredictor` operation.
For each predictor, this operation returns a summary of its properties,
including its Amazon Resource Name (ARN). You can retrieve the complete set of
properties by using the ARN with the `DescribePredictor` operation. You can
filter the list using an array of `Filter` objects.
"""
def list_predictors(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListPredictors", input, options)
end
@doc """
Lists the tags for an Amazon Forecast resource.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Stops a resource.
The resource undergoes the following states: `CREATE_STOPPING` and
`CREATE_STOPPED`. You cannot resume a resource once it has been stopped.
This operation can be applied to the following resources (and their
corresponding child resources):
* Dataset Import Job
* Predictor Job
* Forecast Job
* Forecast Export Job
* Predictor Backtest Export Job
"""
def stop_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopResource", input, options)
end
@doc """
Associates the specified tags to a resource with the specified `resourceArn`.
If existing tags on a resource are not specified in the request parameters, they
are not changed. When a resource is deleted, the tags associated with that
resource are also deleted.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Deletes the specified tags from a resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Replaces the datasets in a dataset group with the specified datasets.
The `Status` of the dataset group must be `ACTIVE` before you can use the
dataset group to create a predictor. Use the `DescribeDatasetGroup` operation to
get the status.
"""
def update_dataset_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateDatasetGroup", input, options)
end
end
|
lib/aws/generated/forecast.ex
| 0.937968
| 0.612585
|
forecast.ex
|
starcoder
|
defmodule CTE.Utils do
@moduledoc """
Basic utilities for helping developing functionality about the `CTE` data structures
"""
@doc """
render a path hierarchy as a .dot string that you could use for drawing your results, using graphviz. w⦿‿⦿t!
Upon receiving something like this:
%{
nodes: %{
6 => %{
author: "Rolie",
comment: "Everything is easier, than with the Nested Sets.",
id: 6
},
8 => %{
author: "Olie",
comment: "I’m sold! And I’ll use its Elixir implementation! <3",
id: 8
},
9 => %{author: "Polie", comment: "w⦿‿⦿t!", id: 9}
},
paths: [
[6, 6, 0],
[6, 8, 1],
[8, 8, 0],
[6, 9, 1],
[9, 9, 0]
]
}
this will output a .dot formatted string that you could use later for generating
an image: `dot -Tpng <filename>.dot -o <filename>.png`
"""
@spec print_dot(map, Keyword.t()) :: String.t()
def print_dot(tree, opts \\ [])
def print_dot(%{paths: paths, nodes: nodes}, opts)
when is_list(paths) and is_map(nodes) do
labels = Keyword.get(opts, :labels, [])
[[root, _, _] | paths] = paths
root = Map.get(nodes, root)
acc = "digraph #{dot_bubble(root, labels)} {"
Enum.reduce(paths, acc, fn [ancestor, descendant, _path_length], acc ->
parent = Map.get(nodes, ancestor)
child = Map.get(nodes, descendant)
acc <> "\n " <> build_dot(parent, child, labels)
end) <>
"\n}\n"
end
def print_dot(_, _), do: {:error, :invalid_argument}
@doc """
print the tree at the console, using a custom function for selecting the info to be displayed
The print_tree/3 function receives the tree structure returned by the CTE, the id of an existing node we
want to start printing the tree with, followed by the options.
## Options
* `:callback` - a function that it is invoked for every node in the tree. Has two parameters:
* id - the id of the node we render, ao a node specific unique identifier i.e. node name, etc.
* nodes - the nodes received the tree structure, a map %{node_id => node}...any()
This function must return a tuple with two elements. First element is the name of the node we render,
the second one being any optional info you want to add.
* `:raw` - return a list of tuples if true. Each tuple will contain the path_length of
the text returned from the callback. Useful for custom formatting the output of the print.
Example, using the default options:
iex» {:ok, tree} = CTT.tree(1)
iex» CTE.Utils.print_tree(tree,1, callback: &({&2[&1].author <> ":", &2[&1].comment}))
Olie: Is Closure Table better than the Nested Sets?
└── Rolie: It depends. Do you need referential integrity?
└── Olie: Yeah.
"""
def print_tree(tree, id, opts \\ [])
def print_tree(%{paths: paths, nodes: nodes}, id, opts) do
user_callback = Keyword.get(opts, :callback, fn id, _nodes -> {id, "info..."} end)
tree =
paths
|> Enum.filter(fn [a, d, path_length] -> a != d && path_length == 1 end)
|> Enum.group_by(fn [a, _, _] -> a end, fn [_, d, _] -> d end)
|> Enum.reduce(%{}, fn {parent, children}, acc ->
descendants = children || []
Map.put(acc, parent, Enum.uniq(descendants))
end)
callback = fn
node_id when not is_nil(node_id) ->
{name, info} = user_callback.(node_id, nodes)
{{name, info}, Map.get(tree, node_id, [])}
end
_print_tree([id], callback, opts)
end
defp _print_tree(nodes, callback, opts) do
case print_tree(nodes, _path_length = [], _seen = %{}, callback, opts, []) do
{_seen, [] = out} -> out
{_, out} -> Enum.reverse(out)
end
end
# credits where credits due:
# - adapted from a Mix.Utils similar method
defp print_tree(nodes, path_length, seen, callback, opts, out) do
{nodes, seen} =
Enum.flat_map_reduce(nodes, seen, fn node, seen ->
{{name, info}, children} = callback.(node)
if Map.has_key?(seen, name) do
{[{name, info, []}], seen}
else
{[{name, info, children}], Map.put(seen, name, true)}
end
end)
print_every_node(nodes, path_length, seen, callback, opts, out)
end
defp print_every_node([], _path_length, seen, _callback, _opts, out), do: {seen, out}
defp print_every_node([{_name, info, children} | nodes], path_length, seen, callback, opts, out) do
raw? = Keyword.get(opts, :raw, false)
info = if(info, do: info, else: "")
out =
if raw? do
[{length(path_length), info} | out]
else
# info = if(info, do: " #{info}", else: "")
# IO.puts("#{path_length(path_length)}#{prefix(path_length, nodes)}#{name}#{info}")
IO.puts("#{path_length(path_length)}#{prefix(path_length, nodes)}#{info}")
out
end
{seen, out} = print_tree(children, [nodes != [] | path_length], seen, callback, opts, out)
print_every_node(nodes, path_length, seen, callback, opts, out)
end
defp path_length([]), do: ""
defp path_length(path_length), do: Enum.reverse(path_length) |> tl |> Enum.map(&entry(&1))
defp entry(true), do: "│ "
defp entry(false), do: " "
defp prefix([], _), do: ""
defp prefix(_, []), do: "└── "
defp prefix(_, _), do: "├── "
@spec build_dot(String.t(), String.t(), list) :: String.t()
defp build_dot(parent, child, []), do: "#{parent} -> #{child}"
defp build_dot(parent, child, labels) do
"#{dot_bubble(parent, labels)} -> #{dot_bubble(child, labels)}"
end
defp dot_bubble(node, labels) do
bubble_text =
labels
|> Enum.map(fn
l when is_function(l) ->
l.(node) || l
l ->
Map.get(node, l, l)
end)
|> Enum.join("")
"\"#{bubble_text}\""
end
end
|
lib/cte/utils.ex
| 0.807499
| 0.532607
|
utils.ex
|
starcoder
|
defmodule Sneex.Ops.Decrement do
@moduledoc """
This represents the op codes for decrementing a value (DEC, DEX, and DEY).
"""
defstruct [:disasm_override, :bit_size, :cycle_mods, :address_mode]
alias Sneex.Address.{Absolute, CycleCalculator, DirectPage, Indexed, Mode, Register}
alias Sneex.{Cpu, CpuHelper}
use Bitwise
@type t :: %__MODULE__{
disasm_override: nil | String.t(),
bit_size: :bit8 | :bit16,
cycle_mods: list(CycleCalculator.t()),
address_mode: any()
}
@spec new(Cpu.t() | byte()) :: nil | __MODULE__.t()
def new(cpu = %Cpu{}) do
cpu |> Cpu.read_opcode() |> new(cpu)
end
@spec new(byte(), Cpu.t()) :: nil | __MODULE__.t()
def new(0x3A, cpu) do
addr_mode = :acc |> Register.new()
bit_size = cpu |> Cpu.acc_size()
mods = [CycleCalculator.constant(2)]
%__MODULE__{bit_size: bit_size, cycle_mods: mods, address_mode: addr_mode}
end
def new(0xCE, cpu) do
addr_mode = true |> Absolute.new()
bit_size = cpu |> Cpu.acc_size()
mods = [CycleCalculator.constant(6), CycleCalculator.acc_is_16_bit(2)]
%__MODULE__{bit_size: bit_size, cycle_mods: mods, address_mode: addr_mode}
end
def new(0xC6, cpu) do
addr_mode = DirectPage.new()
bit_size = cpu |> Cpu.acc_size()
mods = [
CycleCalculator.constant(5),
CycleCalculator.acc_is_16_bit(2),
CycleCalculator.low_direct_page_is_not_zero(1)
]
%__MODULE__{bit_size: bit_size, cycle_mods: mods, address_mode: addr_mode}
end
def new(0xDE, cpu) do
addr_mode = true |> Absolute.new() |> Indexed.new(:x)
bit_size = cpu |> Cpu.acc_size()
mods = [CycleCalculator.constant(7), CycleCalculator.acc_is_16_bit(2)]
%__MODULE__{bit_size: bit_size, cycle_mods: mods, address_mode: addr_mode}
end
def new(0xD6, cpu) do
addr_mode = DirectPage.new() |> Indexed.new(:x)
bit_size = cpu |> Cpu.acc_size()
mods = [
CycleCalculator.constant(6),
CycleCalculator.acc_is_16_bit(2),
CycleCalculator.low_direct_page_is_not_zero(1)
]
%__MODULE__{bit_size: bit_size, cycle_mods: mods, address_mode: addr_mode}
end
def new(0xCA, cpu) do
addr_mode = :x |> Register.new()
bit_size = cpu |> Cpu.index_size()
mods = [CycleCalculator.constant(2)]
%__MODULE__{
disasm_override: "DEX",
bit_size: bit_size,
cycle_mods: mods,
address_mode: addr_mode
}
end
def new(0x88, cpu) do
addr_mode = :y |> Register.new()
bit_size = cpu |> Cpu.index_size()
mods = [CycleCalculator.constant(2)]
%__MODULE__{
disasm_override: "DEY",
bit_size: bit_size,
cycle_mods: mods,
address_mode: addr_mode
}
end
def new(_opcode, _cpu), do: nil
defimpl Sneex.Ops.Opcode do
def byte_size(%{address_mode: mode}, cpu), do: 1 + Mode.byte_size(mode, cpu)
def total_cycles(%{cycle_mods: mods}, cpu) do
cpu |> CycleCalculator.calc_cycles(mods)
end
def execute(%{address_mode: mode, bit_size: bit_size}, cpu) do
{data, cpu} = mode |> Mode.fetch(cpu) |> decrement(bit_size, cpu)
mode |> Mode.store(cpu, data)
end
defp decrement(0, bit_size, cpu = %Cpu{}) do
new_value = bit_size |> determine_mask() |> band(0xFFFF)
%{negative: nf, zero: zf} = CpuHelper.check_flags_for_value(new_value, bit_size)
new_cpu = cpu |> Cpu.zero_flag(zf) |> Cpu.negative_flag(nf)
{new_value, new_cpu}
end
defp decrement(value, bit_size, cpu = %Cpu{}) do
new_value = bit_size |> determine_mask() |> band(value - 1)
%{negative: nf, zero: zf} = CpuHelper.check_flags_for_value(new_value, bit_size)
new_cpu = cpu |> Cpu.zero_flag(zf) |> Cpu.negative_flag(nf)
{new_value, new_cpu}
end
defp determine_mask(:bit8), do: 0xFF
defp determine_mask(:bit16), do: 0xFFFF
def disasm(%{disasm_override: nil, address_mode: mode}, cpu),
do: "DEC #{Mode.disasm(mode, cpu)}"
def disasm(%{disasm_override: override}, _cpu), do: override
end
end
|
lib/sneex/ops/decrement.ex
| 0.819677
| 0.415966
|
decrement.ex
|
starcoder
|
defmodule PromEx.Plugins.Application do
@moduledoc """
This plugin captures metrics regarding your application, and its dependencies. Specifically,
it captures the versions of your application and the application dependencies and also
how many modules each dependency is bringing into the project.
This plugin supports the following options:
- `otp_app`: This is a REQUIRED option and is the name of you application in snake case (e.g. :my_cool_app).
- `deps`: This option is OPTIONAL and defines what dependencies the plugin should track. A value of `:all`
means that PromEx will fetch details on all application dependencies. A list of dependency names like
`[:phoenix, :ecto, :unplug]` means that PromEx will only fetch details regarding those dependencies.
- `git_sha_mfa`: This option is OPTIONAL and defines an MFA that will be called in order to fetch the
application's Git SHA at the time of deployment. By default, an Application Plugin function will be called
and will attempt to read the GIT_SHA environment variable to populate the value.
- `git_author_mfa`: This option is OPTIONAL and defines an MFA that will be called in order to fetch the
application's last Git commit author at the time of deployment. By default, an Application Plugin function
will be called and will attempt to read the GIT_AUTHOR environment variable to populate the value.
- `metric_prefix`: This option is OPTIONAL and is used to override the default metric prefix of
`[otp_app, :prom_ex, :application]`. If this changes you will also want to set `application_metric_prefix`
in your `dashboard_assigns` to the snakecase version of your prefix, the default
`application_metric_prefix` is `{otp_app}_prom_ex_application`.
This plugin exposes the following metric groups:
- `:application_versions_manual_metrics`
To use plugin in your application, add the following to your application supervision tree:
```
def start(_type, _args) do
children = [
...
{
PromEx,
plugins: [
{PromEx.Plugins.Application, [otp_app: :my_cool_app]},
...
],
delay_manual_start: :no_delay
}
]
opts = [strategy: :one_for_one, name: WebApp.Supervisor]
Supervisor.start_link(children, opts)
end
```
This plugin exposes manual metrics so be sure to configure the PromEx `:delay_manual_start` as required.
"""
use PromEx.Plugin
require Logger
@impl true
def manual_metrics(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
apps = Keyword.get(opts, :deps, :all)
git_sha_mfa = Keyword.get(opts, :git_sha_mfa, {__MODULE__, :git_sha, []})
git_author_mfa = Keyword.get(opts, :git_author_mfa, {__MODULE__, :git_author, []})
metric_prefix = Keyword.get(opts, :metric_prefix, PromEx.metric_prefix(otp_app, :application))
[
Manual.build(
:application_versions_manual_metrics,
{__MODULE__, :apps_running, [otp_app, apps, git_sha_mfa, git_author_mfa]},
[
# Capture information regarding the primary application (i.e the user's application)
last_value(
metric_prefix ++ [:primary, :info],
event_name: [otp_app | [:application, :primary, :info]],
description: "Information regarding the primary application.",
measurement: :status,
tags: [:name, :version, :modules]
),
# Capture information regarding the application dependencies (i.e the user's libs)
last_value(
metric_prefix ++ [:dependency, :info],
event_name: [otp_app | [:application, :dependency, :info]],
description: "Information regarding the application's dependencies.",
measurement: :status,
tags: [:name, :version, :modules]
),
# Capture application Git SHA using user provided MFA
last_value(
metric_prefix ++ [:git_sha, :info],
event_name: [otp_app | [:application, :git_sha, :info]],
description: "The application's Git SHA at the time of deployment.",
measurement: :status,
tags: [:sha]
),
# Capture application Git author using user provided MFA
last_value(
metric_prefix ++ [:git_author, :info],
event_name: [otp_app | [:application, :git_author, :info]],
description: "The application's author of the last Git commit at the time of deployment.",
measurement: :status,
tags: [:author]
)
]
)
]
end
@impl true
def polling_metrics(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
poll_rate = Keyword.get(opts, :poll_rate, 5_000)
metric_prefix = Keyword.get(opts, :metric_prefix, PromEx.metric_prefix(otp_app, :application))
Polling.build(
:application_time_polling_metrics,
poll_rate,
{__MODULE__, :execute_time_metrics, []},
[
last_value(
metric_prefix ++ [:uptime, :milliseconds, :count],
event_name: [:prom_ex, :plugin, :application, :uptime, :count],
description: "The total number of wall clock milliseconds that have passed since the application started.",
measurement: :count,
unit: :millisecond
)
]
)
end
@doc false
def execute_time_metrics do
{wall_clock_time, _} = :erlang.statistics(:wall_clock)
:telemetry.execute([:prom_ex, :plugin, :application, :uptime, :count], %{count: wall_clock_time})
end
@doc false
def git_sha do
case System.fetch_env("GIT_SHA") do
{:ok, git_sha} ->
git_sha
:error ->
Logger.warn("GIT_SHA environment variable has not been defined")
"Git SHA not available"
end
end
@doc false
def git_author do
case System.fetch_env("GIT_AUTHOR") do
{:ok, git_sha} ->
git_sha
:error ->
Logger.warn("GIT_AUTHOR environment variable has not been defined")
"Git author not available"
end
end
@doc false
def apps_running(otp_app, apps, git_sha_mfa, git_author_mfa) do
started_apps =
Application.started_applications()
|> Enum.map(fn {app, _description, version} ->
{app, :erlang.iolist_to_binary(version)}
end)
|> Map.new()
started_apps = if apps == :all, do: started_apps, else: Map.take(started_apps, apps)
loaded_only_apps =
Application.loaded_applications()
|> Enum.map(fn {app, _description, version} ->
{app, :erlang.iolist_to_binary(version)}
end)
|> Map.new()
|> Map.drop(Map.keys(started_apps))
loaded_only_apps = if apps == :all, do: loaded_only_apps, else: Map.take(loaded_only_apps, apps)
# Emit primary app details
:telemetry.execute(
[otp_app | [:application, :primary, :info]],
%{
status: if(Map.has_key?(started_apps, otp_app), do: 1, else: 0)
},
%{
name: otp_app,
version:
Map.get_lazy(started_apps, otp_app, fn ->
Map.get(loaded_only_apps, otp_app, "undefined")
end),
modules: length(Application.spec(otp_app)[:modules])
}
)
started_apps = Map.delete(started_apps, otp_app)
loaded_only_apps = Map.delete(loaded_only_apps, otp_app)
# Loop through other desired apps and fetch details
started_apps
|> Enum.each(fn {app, version} ->
:telemetry.execute(
[otp_app | [:application, :dependency, :info]],
%{status: 1},
%{name: app, version: version, modules: length(Application.spec(app)[:modules])}
)
end)
loaded_only_apps
|> Enum.each(fn {app, version} ->
:telemetry.execute(
[otp_app | [:application, :dependency, :info]],
%{status: 0},
%{name: app, version: version, modules: length(Application.spec(app)[:modules])}
)
end)
# Publish Git SHA data
{module, function, args} = git_sha_mfa
git_sha = apply(module, function, args)
:telemetry.execute(
[otp_app | [:application, :git_sha, :info]],
%{status: 1},
%{sha: git_sha}
)
# Publish Git author data
{module, function, args} = git_author_mfa
git_author = apply(module, function, args)
:telemetry.execute(
[otp_app | [:application, :git_author, :info]],
%{status: 1},
%{author: git_author}
)
end
end
|
lib/prom_ex/plugins/application.ex
| 0.888075
| 0.712107
|
application.ex
|
starcoder
|
defmodule Nostrum.Struct.ApplicationCommand do
@moduledoc """
Typespecs for creating Application Commands
Official reference:
https://discord.com/developers/docs/interactions/application-commands
"""
@moduledoc since: "0.5.0"
alias Nostrum.Snowflake
@typedoc """
The name of the command, subcommand, or command_option, it must be between 1 and 32 characters in length and match the following regex `^[\w-]{1,32}$`
Only `USER` and `MESSAGE` commands may include uppercase letters and spaces.
"""
@type command_name :: String.t()
@typedoc """
The command, subcommand, or options's description, for `CHAT_INPUT` commands it must be between 1 and 100 characters in length.
For `USER` and `MESSAGE` commands it must be an empty string.
"""
@type command_description :: String.t()
@typedoc """
The type of application command you wish to create
- `1` for `CHAT_INPUT`, regular slash commands (default)
- `2` for `USER`, right-click menu commands on a specific user
- `3` for `MESSAGE`, right-click menu commands on a specific message
"""
@type command_type :: pos_integer()
@typedoc """
If you specify choices for a command those become the only valid options for the user to select from.
"""
@type command_choice :: %{
name: String.t(),
value: String.t() | number()
}
@typedoc """
Indicates what type of argument the command expects.
- `1` for `SUB_COMMAND`
- `2` for `SUB_COMMAND_GROUP`
- `3` for `STRING`
- `4` for `INTEGER` *Note*: due to API limitations they can only be between -2^53 and 2^53
- `5` for `BOOLEAN`
- `6` for `USER`
- `7` for `CHANNEL`
- `8` for `ROLE`
- `9` for `MENTIONABLE` *Note*: Includes users and roles
- `10` for `NUMBER` *Note*: This has the same limitations as `INTEGER`
"""
@type command_option_type :: 1..10
@typedoc """
This defines a commands parameters, only valid for `CHAT_INPUT` commands.
## Notes
- required parameters on a command must precede optional ones
- for subcommands and subcommand groups, `:options` are it's parameters
- `:options` and `:choices` are mutually exclusive
- `:autocomplete` may not be set to true if `:choices` is present
- if `:type` is 7 then `:channel_types` can be a list of allowed [channel types](https://discord.com/developers/docs/resources/channel#channel-object-channel-types)
"""
@type command_option :: %{
required(:name) => command_name(),
required(:description) => command_description(),
required(:type) => command_option_type(),
optional(:required) => boolean(),
optional(:choices) => [command_choice()],
optional(:options) => [command_option()],
optional(:channel_types) => [pos_integer()],
optional(:autocomplete) => boolean()
}
@typedoc """
This defines the map for creating an application command.
`:default_permission` is for if the command is enabled for all users by default
For more information see [the official documentation](https://discord.com/developers/docs/interactions/application-commands)
"""
@type application_command_map :: %{
required(:name) => command_name(),
required(:description) => command_description(),
optional(:type) => command_type(),
optional(:default_permission) => boolean(),
optional(:options) => [command_option()]
}
@typedoc """
When editing an existing application command, only the following fields may be updated,
missing fields will remain unchanged.
"""
@type application_command_edit_map :: %{
optional(:name) => command_name(),
optional(:description) => command_description(),
optional(:options) => [command_option()],
optional(:default_permission) => boolean()
}
@typedoc """
For editing the permissions for an application command
- `:id` is the id of the role or user
- `:type` is the type of the id, either `role` or `user`
- `:allow` is whether the role or user should be allowed to use the command
"""
@type application_command_permissions :: %{
id: Snowflake.t(),
type: application_command_permission_type(),
permission: boolean()
}
@typedoc """
- `1` for `ROLE`
- `2` for `USER`
"""
@type application_command_permission_type :: pos_integer()
end
|
lib/nostrum/struct/application_command.ex
| 0.845544
| 0.464659
|
application_command.ex
|
starcoder
|
defmodule Toml.Transform do
@moduledoc """
Defines the behavior for custom transformations of decoded TOML values.
See the documentation for `c:transform/2` for more details.
"""
@type t :: module
@type key :: binary | atom | term
@type keypath :: [binary] | [atom] | [term]
@type value ::
%{key => value}
| [value]
| number
| binary
| NaiveDateTime.t()
| DateTime.t()
| Date.t()
| Time.t()
def __using__(_) do
quote do
@behaviour unquote(__MODULE__)
end
end
@doc """
This function is invoked for every key/value pair in the document, in a depth-first,
bottom-up, traversal of the document.
The transformer must return one of two values:
* `{:error, term}` - an error occurred in the transformer, and decoding should fail
* `term` - replace the value for the given key with the value returned from the
transformer in the final document
## Example
An example transformation would be the conversion of tables of a certain shape to a known struct value.
The struct:
defmodule Server do
defstruct [:name, :ip, :ports]
end
TOML which contains a table of this shape:
[servers.alpha]
ip = "192.168.1.1"
ports = [8080, 8081]
[servers.beta]
ip = "192.168.1.2"
ports = [8082, 8083]
And finally, the transforer implementation:
defmodule ServerTransform do
use Toml.Transform
# Transform IP strings to Erlang address tuples
def transform(:ip, ip) when is_binary(ip) do
case :inet.parse_ipv4_address(String.to_charlist(ip)) do
{:ok, result} ->
result
{:error, reason} ->
{:error, {:invalid_ip, ip, reason}}
end
end
# Non-binary values for IP addresses should return an error
def transform(:ip, ip), do: {:error, {:invalid_ip, ip, :expected_string}}
# Transform the server objects to Server structs
def transform(:servers, servers) when is_map(servers) do
for {name, server} <- servers do
struct(Server, Map.put(server, :name, name))
end
end
# Non-map values for servers should return an error
def transform(:servers, s), do: {:error, {:invalid_server, s}}
# Ignore all other values
def transform(_key, v), do: v
end
Assuming we decode with the following options:
Toml.decode!(content, keys: :atoms, transforms: [ServerTransform])
The result would be:
%{
servers: [
%Server{name: :alpha, ip: {192,168,1,1}, ports: [8080, 8081]},
%Server{name: :beta, ip: {192,168,1,2}, ports: [8082, 8083]}
]
}
"""
@callback transform(key, value) :: {:error, term} | term
# Given a list of transform functions, compose them into a single transformation pass
@doc false
def compose(mods) when is_list(mods) do
Enum.reduce(mods, nil, &compose_transforms/2)
end
# The first transform in the list does not require composition
defp compose_transforms(mod, nil), do: &mod.transform/2
# All subsequent transforms are composed with the previous
defp compose_transforms(mod, acc) when is_atom(mod) and is_function(acc) do
fn k, v ->
case acc.(k, v) do
{:error, _} = err ->
throw(err)
v2 ->
mod.transform(k, v2)
end
end
end
end
|
lib/transform.ex
| 0.931711
| 0.740831
|
transform.ex
|
starcoder
|
defmodule Advent20.Ferry do
@moduledoc """
Rain Risk
"""
def parse(input) do
input
|> String.split("\n", trim: true)
|> Stream.map(&parse_instruction/1)
end
defp parse_instruction("N" <> fields), do: {:vert, String.to_integer(fields)}
defp parse_instruction("S" <> fields), do: {:vert, -String.to_integer(fields)}
defp parse_instruction("E" <> fields), do: {:horiz, String.to_integer(fields)}
defp parse_instruction("W" <> fields), do: {:horiz, -String.to_integer(fields)}
defp parse_instruction("R" <> degrees), do: {:rotate, String.to_integer(degrees)}
defp parse_instruction("L" <> degrees), do: {:rotate, -String.to_integer(degrees)}
defp parse_instruction("F" <> fields), do: {:forward, String.to_integer(fields)}
@doc """
Part 1: Figure out where the navigation instructions lead.
What is the Manhattan distance between that location and the ship's starting position?
"""
def part_1(input) do
input
|> parse()
|> Enum.reduce(%{ship_pos: {0, 0}, rot: 90}, &update_boat/2)
|> manhattan_distance()
end
defp update_boat({:vert, fields}, %{ship_pos: {x, y}} = boat), do: %{boat | ship_pos: {x, y + fields}}
defp update_boat({:horiz, fields}, %{ship_pos: {x, y}} = boat), do: %{boat | ship_pos: {x + fields, y}}
defp update_boat({:rotate, degrees}, %{rot: rot} = boat), do: %{boat | rot: rem(rot + degrees, 360)}
defp update_boat({:forward, fields}, %{rot: 0} = boat), do: update_boat({:vert, fields}, boat)
defp update_boat({:forward, fields}, %{rot: rot} = boat) when rot in [270, -90],
do: update_boat({:horiz, -fields}, boat)
defp update_boat({:forward, fields}, %{rot: rot} = boat) when rot in [180, -180],
do: update_boat({:vert, -fields}, boat)
defp update_boat({:forward, fields}, %{rot: rot} = boat) when rot in [90, -270],
do: update_boat({:horiz, fields}, boat)
defp manhattan_distance(%{ship_pos: {x, y}}), do: abs(x) + abs(y)
@doc """
Part 2: Figure out where the navigation instructions actually lead.
What is the Manhattan distance between that location and the ship's starting position?
"""
def part_2(input) do
input
|> parse()
|> Enum.reduce(%{ship_pos: {0, 0}, waypoint_pos: {10, 1}}, &update_state/2)
|> manhattan_distance()
end
defp update_state({:vert, fields}, %{waypoint_pos: {x, y}} = state), do: %{state | waypoint_pos: {x, y + fields}}
defp update_state({:horiz, fields}, %{waypoint_pos: {x, y}} = state), do: %{state | waypoint_pos: {x + fields, y}}
defp update_state({:rotate, 90}, %{waypoint_pos: {x, y}} = state), do: %{state | waypoint_pos: {y, -x}}
defp update_state({:rotate, 180}, %{waypoint_pos: {x, y}} = state), do: %{state | waypoint_pos: {-x, -y}}
defp update_state({:rotate, 270}, %{waypoint_pos: {x, y}} = state), do: %{state | waypoint_pos: {-y, x}}
defp update_state({:rotate, -90}, %{waypoint_pos: {x, y}} = state), do: %{state | waypoint_pos: {-y, x}}
defp update_state({:rotate, -180}, %{waypoint_pos: {x, y}} = state), do: %{state | waypoint_pos: {-x, -y}}
defp update_state({:rotate, -270}, %{waypoint_pos: {x, y}} = state), do: %{state | waypoint_pos: {y, -x}}
defp update_state({:forward, fields}, %{waypoint_pos: {w_x, w_y}, ship_pos: {s_x, s_y}} = state),
do: %{state | ship_pos: {s_x + fields * w_x, s_y + fields * w_y}}
end
|
lib/advent20/12_ferry.ex
| 0.810329
| 0.675755
|
12_ferry.ex
|
starcoder
|
defmodule Co2Offset.Calculators.Calculator do
use Ecto.Schema
import Ecto.Changeset
alias Co2Offset.Calculators.Calculator
alias Co2Offset.Geo
schema "calculators" do
field :iata_from, :string
field :iata_to, :string
field :city_from, :string
field :city_to, :string
field :original_distance, :integer
field :airport_from, :map, virtual: true
field :airport_to, :map, virtual: true
timestamps()
end
def changeset(%Calculator{} = calculator, attrs) do
calculator
|> cast(attrs, [:iata_from, :iata_to])
|> validate_required([:iata_from, :iata_to])
|> validate_length(:iata_from, is: 3)
|> validate_length(:iata_to, is: 3)
|> put_airports()
|> validate_required([:airport_from, :airport_to])
|> put_cities()
|> validate_required([:city_from, :city_to])
|> put_original_distance()
|> validate_required([:original_distance])
end
defp put_airports(changeset) do
case changeset do
%Ecto.Changeset{valid?: true, changes: %{iata_from: iata_from, iata_to: iata_to}} ->
airport_from = Geo.get_airport_by_iata(iata_from)
airport_to = Geo.get_airport_by_iata(iata_to)
changeset
|> put_change(:airport_from, airport_from)
|> put_change(:airport_to, airport_to)
_ ->
changeset
end
end
defp put_cities(changeset) do
case changeset do
%Ecto.Changeset{
valid?: true,
changes: %{airport_from: airport_from, airport_to: airport_to}
} ->
changeset
|> put_change(:city_from, airport_from.city)
|> put_change(:city_to, airport_to.city)
_ ->
changeset
end
end
defp put_original_distance(changeset) do
case changeset do
%Ecto.Changeset{
valid?: true,
changes: %{airport_from: airport_from, airport_to: airport_to}
} ->
original_distance = Geo.distance_between_airports(airport_from, airport_to)
changeset
|> put_change(:original_distance, original_distance)
_ ->
changeset
end
end
end
|
lib/co2_offset/calculators/calculator.ex
| 0.61173
| 0.41401
|
calculator.ex
|
starcoder
|
defmodule Alchemy.Experiment do
defstruct [
name: "",
uuid: nil,
behaviors: [],
result: nil,
publisher: nil,
compare: nil,
cleaner: nil,
ignores: [],
]
alias __MODULE__
alias Alchemy.Observation
alias Alchemy.Result
require Logger
@doc """
Generates a new experiment. Alias for `Experiment.new/1`
"""
def experiment(title) do
%Experiment{name: title, uuid: uuid()}
|> comparator(fn(a, b) -> a == b end)
|> clean(fn value -> value end)
end
@doc """
Sets the function to use for publishing results. Can accept either a module or
a function. If a module name is passed in then the module is expected to have
a `publish/1` function which will be used for publishing results.
"""
def publisher(experiment, mod) when is_atom(mod) do
%{experiment | publisher: &mod.publish/1}
end
def publisher(experiment, f) when is_function(f) do
%{experiment | publisher: f}
end
@doc """
Adds a control function to the experiment.
Controls should be wrapped in a function in order to be lazily-evaluated
"""
def control(experiment, thunk) when is_function(thunk) do
add_behavior(experiment, :control, thunk)
end
@doc """
Adds a candidate function to the experiment.
The candidate needs to be wrapped in a function in order to be lazily-evaluated.
When the experiment is run the candidate will be evaluated and compared to the
control.
"""
def candidate(experiment, thunk) when is_function(thunk) do
add_behavior(experiment, :candidate, thunk)
end
@doc """
Adds a comparator to use when comparing the candidate to the control.
By default the comparator is:
``` elixir
fn(control, candidate) -> control == candidate end
```
"""
def comparator(experiment, thunk) when is_function(thunk) do
%{experiment | compare: thunk}
end
@doc """
Adds a clean function. This function will be run for each observation and
can be used to reduce noise when publishing results.
"""
def clean(experiment, f) do
%{experiment | cleaner: f}
end
@doc """
Adds an ignore clause to the experiment. This clause will be called if there
is a mismatch between the control values and the candidate values. This is a
useful function if you want to ignore known mismatches. Multiple ignore
clauses can be stacked together.
"""
def ignore(experiment, f) do
%{experiment | ignores: [f | experiment.ignores]}
end
@doc """
Runs the experiment.
If the `candidate` is provided then it will be run against the `control`. The
`control` must be provided for the experiment to be run. The `control`
is always returned. The execution order is randomized to account for any
ordering issues. Execution for these functions is done in serial. This is
because adding concurrent execution adds additional layers of error handling
that would need to occur and doing so is best done in the users application
code and not in this library code.
"""
def run(experiment=%Experiment{}) do
observations =
experiment.behaviors
|> Enum.shuffle
|> Enum.map(&Observation.run(&1, experiment.cleaner)) # lazily evaluate
control =
observations
|> Enum.find(fn({c, _}) -> c == :control end)
|> elem(1)
candidates =
observations
|> Keyword.delete(:control)
|> Enum.map(fn(a) -> elem(a, 1) end)
result = Result.new(experiment, control, candidates)
publish(result, experiment.publisher)
case Result.raised?(control) do
true ->
reraise control.error.error, control.error.stacktrace
false ->
control.value
end
end
defp publish(result, nil) do
Logger.debug(fn -> "Finished experiment: #{inspect result}" end)
result
end
defp publish(result, publisher) do
publisher.(result)
result
end
defp add_behavior(exp, type, thunk) do
behaviors = exp.behaviors ++ [{type, thunk}]
%Experiment{exp | behaviors: behaviors}
end
def control_value(%{control: control}) do
case control.value do
{:raised, e, stacktrace} ->
reraise e, stacktrace
value ->
value
end
end
defp uuid do
UUID.uuid1()
end
end
|
lib/alchemy/experiment.ex
| 0.835416
| 0.864196
|
experiment.ex
|
starcoder
|
defmodule Sanbase.Signal.Trigger.WalletTriggerSettings do
@moduledoc ~s"""
The wallet signal is triggered when the balance of a wallet or set of wallets
changes by a predefined amount for a specified asset (Ethereum, SAN tokens, Bitcoin, etc.)
The signal can follow a single address, a list of addresses
or a project. When a list of addresses or a project is followed, all the addresses
are considered to be owned by a single entity and the transfers between them
are excluded.
"""
use Vex.Struct
import Sanbase.{Validation, Signal.Validation}
import Sanbase.DateTimeUtils, only: [round_datetime: 2, str_to_sec: 1]
alias __MODULE__
alias Sanbase.Model.Project
alias Sanbase.Signal.Type
@derive {Jason.Encoder, except: [:filtered_target, :triggered?, :payload, :template_kv]}
@trigger_type "wallet_movement"
@enforce_keys [:type, :channel, :target, :asset]
defstruct type: @trigger_type,
channel: nil,
selector: nil,
target: nil,
operation: nil,
time_window: "1d",
# Private fields, not stored in DB.
filtered_target: %{list: []},
triggered?: false,
payload: %{},
template_kv: %{}
@type t :: %__MODULE__{
type: Type.trigger_type(),
channel: Type.channel(),
target: Type.complex_target(),
selector: map(),
operation: Type.operation(),
time_window: Type.time_window(),
# Private fields, not stored in DB.
filtered_target: Type.filtered_target(),
triggered?: boolean(),
payload: Type.payload(),
template_kv: Type.template_kv()
}
validates(:channel, &valid_notification_channel?/1)
validates(:target, &valid_crypto_address?/1)
validates(:selector, &valid_historical_balance_selector?/1)
validates(:operation, &valid_operation?/1)
validates(:time_window, &valid_time_window?/1)
@spec type() :: String.t()
def type(), do: @trigger_type
@doc ~s"""
Return a list of the `settings.metric` values for the necessary time range
"""
def get_data(
%__MODULE__{
filtered_target: %{list: target_list, type: :address},
selector: selector
} = settings
) do
{from, to} = get_timeseries_params(settings)
target_list
|> Enum.map(fn address ->
with {:ok, [{address, {balance_before, balance_after, _balance_change}}]} <-
balance_change(selector, address, from, to) do
{address,
[
%{datetime: from, balance: balance_before},
%{datetime: to, balance: balance_after}
]}
end
end)
|> Enum.reject(&match?({:error, _}, &1))
end
@doc ~s"""
Return a list of the `settings.metric` values for the necessary time range
"""
def get_data(
%__MODULE__{
filtered_target: %{list: target_list, type: :slug},
selector: selector
} = settings
) do
{from, to} = get_timeseries_params(settings)
target_list
|> Project.by_slug()
|> Enum.map(fn %Project{} = project ->
{:ok, eth_addresses} = Project.eth_addresses(project)
eth_addresses = Enum.map(eth_addresses, &String.downcase/1)
with {:ok, data} <- balance_change(selector, eth_addresses, from, to) do
{balance_before, balance_after} =
data
|> Enum.reduce(
{0, 0},
fn {_, {balance_before, balance_after, _change}},
{balance_before_acc, balance_after_acc} ->
{balance_before + balance_before_acc, balance_after + balance_after_acc}
end
)
data = [
%{datetime: from, balance: balance_before},
%{datetime: to, balance: balance_after}
]
{project.slug, data}
end
end)
|> Enum.reject(&match?({:error, _}, &1))
end
defp get_timeseries_params(%{time_window: time_window}) do
to = Timex.now()
from = Timex.shift(to, seconds: -str_to_sec(time_window))
{from, to}
end
defp balance_change(selector, address, from, to) do
cache_key =
{:wallet_signal, selector, address, round_datetime(from, 300), round_datetime(to, 300)}
|> Sanbase.Cache.hash()
Sanbase.Signal.Evaluator.Cache.get_or_store(cache_key, fn ->
case Sanbase.Clickhouse.HistoricalBalance.balance_change(
selector,
address,
from,
to
) do
{:ok, result} ->
{:ok, result}
{:error, error} ->
{:error, error}
end
end)
end
defimpl Sanbase.Signal.Settings, for: WalletTriggerSettings do
import Sanbase.Signal.Utils
alias Sanbase.Signal.{OperationText, ResultBuilder}
def triggered?(%WalletTriggerSettings{triggered?: triggered}), do: triggered
def evaluate(%WalletTriggerSettings{} = settings, _trigger) do
case WalletTriggerSettings.get_data(settings) do
data when is_list(data) and data != [] ->
build_result(data, settings)
_ ->
%WalletTriggerSettings{settings | triggered?: false}
end
end
def build_result(data, %WalletTriggerSettings{} = settings) do
ResultBuilder.build(data, settings, &template_kv/2, value_key: :balance)
end
def cache_key(%WalletTriggerSettings{} = settings) do
construct_cache_key([
settings.type,
settings.target,
settings.selector,
settings.time_window,
settings.operation
])
end
defp template_kv(values, %{filtered_target: %{type: :address}} = settings) do
{operation_template, operation_kv} =
OperationText.to_template_kv(values, settings.operation)
{curr_value_template, curr_value_kv} =
OperationText.current_value(values, settings.operation)
asset_target_blockchain_kv = asset_target_blockchain_kv(settings.selector)
kv =
%{
type: WalletTriggerSettings.type(),
operation: settings.operation,
address: settings.target.address
}
|> Map.merge(operation_kv)
|> Map.merge(curr_value_kv)
|> Map.merge(asset_target_blockchain_kv)
template = """
The address {{address}}'s {{asset}} balance on the {{target_blockchain}} blockchain has #{
operation_template
}.
#{curr_value_template}
"""
{template, kv}
end
defp template_kv(%{identifier: slug} = values, %{filtered_target: %{type: :slug}} = settings) do
project = Project.by_slug(slug)
{operation_template, operation_kv} =
OperationText.to_template_kv(values, settings.operation)
{curr_value_template, curr_value_kv} =
OperationText.current_value(values, settings.operation)
asset_target_blockchain_kv = asset_target_blockchain_kv(settings.selector)
kv =
%{
type: WalletTriggerSettings.type(),
project_name: project.name,
project_ticker: project.ticker,
project_slug: project.slug,
operation: settings.operation
}
|> Map.merge(operation_kv)
|> Map.merge(curr_value_kv)
|> Map.merge(asset_target_blockchain_kv)
template = """
🔔 \#{{project_ticker}} | **{{project_name}}**'s {{asset}} balance on the {{target_blockchain}} blockchain has #{
operation_template
}.
#{curr_value_template}
"""
{template, kv}
end
defp asset_target_blockchain_kv(%{infrastructure: infrastructure} = selector) do
case infrastructure do
"ETH" -> %{asset: Map.get(selector, :slug, "ethereum"), target_blockchain: "Ethereum"}
"BNB" -> %{asset: Map.get(selector, :slug, "binance-coin"), target_blockchain: "Binance"}
"XRP" -> %{asset: Map.get(selector, :currency, "XRP"), target_blockchain: "Ripple"}
"BTC" -> %{asset: "bitcoin", target_blockchain: "Bitcoin"}
"BCH" -> %{asset: "bitcoin-cash", target_blockchain: "Bitcoin Cash"}
"LTC" -> %{asset: "litecoin", target_blockchain: "Litecoin"}
end
end
end
end
|
lib/sanbase/signals/trigger/settings/wallet_trigger_settings.ex
| 0.837653
| 0.40486
|
wallet_trigger_settings.ex
|
starcoder
|
defmodule Advent.Y2021.D13 do
@moduledoc """
https://adventofcode.com/2021/day/13
"""
@typep fold :: {:x | :y, non_neg_integer()}
@doc """
How many dots are visible after completing just the first fold instruction on
your transparent paper?
"""
@spec part_one(Enumerable.t()) :: non_neg_integer()
def part_one(input) do
{paper, [instruction | _rest]} = parse_input(input)
paper
|> fold(instruction)
|> MapSet.size()
end
@doc """
What code do you use to activate the infrared thermal imaging camera system?
"""
@spec part_two(Enumerable.t()) :: [String.t()]
def part_two(input) do
{paper, instructions} = parse_input(input)
paper =
Enum.reduce(instructions, paper, fn instruction, paper ->
fold(paper, instruction)
end)
{{x_min, _}, {x_max, _}} = Enum.min_max_by(paper, &elem(&1, 0))
{{_, y_min}, {_, y_max}} = Enum.min_max_by(paper, &elem(&1, 1))
for y <- y_min..y_max do
for x <- x_min..x_max, into: "" do
if MapSet.member?(paper, {x, y}), do: "#", else: "."
end
end
end
@spec print_part_two([String.t()]) :: :ok
def print_part_two(feed) do
feed
|> Enum.map(fn line ->
line
|> String.graphemes()
|> Enum.map_join(fn
"#" -> IO.ANSI.format([:green_background, :green, "#"])
"." -> IO.ANSI.format([:black_background, :black, "."])
end)
end)
|> Enum.each(&IO.puts/1)
end
@spec parse_input(Enumerable.t()) :: {MapSet.t(), [fold()]}
defp parse_input(input) do
Enum.reduce(input, {MapSet.new(), []}, fn
<<"fold along ", axis, "=", cut::binary>>, {paper, instructions} ->
{paper, instructions ++ [{List.to_atom([axis]), String.to_integer(cut)}]}
"", {paper, instructions} ->
{paper, instructions}
line, {paper, instructions} ->
[x, y] = line |> String.split(",") |> Enum.map(&String.to_integer/1)
{MapSet.put(paper, {x, y}), instructions}
end)
end
@spec fold(MapSet.t(), fold()) :: MapSet.t()
# NOTE: would like to find a way to consolidate the `fold/2` functions, but
# haven't found a way I like...
defp fold(paper, {:x, cut}) do
{a, b} = Enum.split_with(paper, fn {x, _y} -> x < cut end)
b =
b
|> Enum.reject(fn {x, _y} -> x == cut end)
|> Enum.map(fn {x, y} -> {transform(x, cut), y} end)
|> MapSet.new()
MapSet.union(MapSet.new(a), b)
end
defp fold(paper, {:y, cut}) do
{a, b} = Enum.split_with(paper, fn {_x, y} -> y < cut end)
b =
b
|> Enum.reject(fn {_x, y} -> y == cut end)
|> Enum.map(fn {x, y} -> {x, transform(y, cut)} end)
|> MapSet.new()
MapSet.union(MapSet.new(a), b)
end
@spec transform(integer(), integer()) :: integer()
defp transform(n, cut), do: n - 2 * (n - cut)
end
|
lib/advent/y2021/d13.ex
| 0.783575
| 0.540439
|
d13.ex
|
starcoder
|
defmodule ExCypher do
@moduledoc """
A DSL to build Cypher queries using elixir syntax
Use a simple macro to build your queries without any kind of string
interpolation.
### Example
Import `ExCypher` into your module, as follows, and feel free to build
your queries.
iex> defmodule SomeQuery do
...> import ExCypher
...>
...> def get_all_spaceships do
...> cypher do
...> match node(:s, [:Spaceship])
...> return :s
...> end
...> end
...> end
...> SomeQuery.get_all_spaceships
"MATCH (s:Spaceship) RETURN s"
This library only generates string queries. In order to execute them,
please consider using `ex-cypher` along with
[Bolt Sips](https://github.com/florinpatrascu/bolt_sips).
### Querying
When querying nodes in your graph database, the most common command is `MATCH`.
As you can see in the rest of this doc, the library kept the syntax the closest
as possible from the cypher's one, making the learning curve much smaller.
So, in order to query nodes, you can use the `match` function, along with
`ExCypher.Graph.Node.node/0` function to represent your nodes:
iex> cypher do: match(node(:n))
"MATCH (n)"
iex> cypher do: match(node(:p, [:Person]))
"MATCH (p:Person)"
iex> cypher do
...> match(node(:p, [:Person], %{name: "bob"}))
...> end
~S[MATCH (p:Person {name:"bob"})]
Note that you can combine the `ExCypher.Graph.Node.node/3` arguments in your
wish, with the node name, labels and properties.
Although having nodes in the database is essential, they alone won't make the
database useful. We must have access to their relationships. Thus, you can use
the `ExCypher.Graph.Relationship.rel/0` function to represent relationships
between nodes.
As is made by cypher, you can use an arrow syntax to visually identify
the relationships direction, as you can see in there examples:
iex> cypher do
...> match node(:p, [:Person]) -- node(:c, [:Company])
...> end
"MATCH (p:Person)--(c:Company)"
iex> cypher do
...> match node(:p, [:Person]) -- node(:c, [:Company]) -- node()
...> end
"MATCH (p:Person)--(c:Company)--()"
iex> cypher do
...> match node(:p, [:Person]) -- rel(:WORKS_IN) -- node(:c, [:Company])
...> return :p
...> end
"MATCH (p:Person)-[WORKS_IN]-(c:Company) RETURN p"
iex> cypher do
...> match (node(:p, [:Person]) -- rel(:WORKS_IN) -> node(:c, [:Company]))
...> end
"MATCH (p:Person)-[WORKS_IN]->(c:Company)"
iex> cypher do
...> match (node(:c, [:Company]) <- rel(:WORKS_IN) -- node(:p, [:Person]))
...> end
"MATCH (c:Company)<-[WORKS_IN]-(p:Person)"
In the same way as nodes, `ExCypher.Graph.Relationship.rel/3` also allows you
to specify the relationship's name, labels and properties in different ways.
I strongly recommend you to take a look at these functions docummentations to
get more working examples.
### Limiting, filtering and ordering results
Matching entire databases is not cool... Cypher allows you to filter the
returned nodes in several ways. Maybe the most trivial way to start with this
would be to attempt to order or limit your queries using, respectively,
`order` and `limit` functions:
iex> cypher do
...> match node(:s, [:Sharks])
...> order s.name
...> limit 10
...> return s.name, s.population
...> end
"MATCH (s:Sharks) ORDER BY s.name LIMIT 10 RETURN s.name, s.population"
`ExCypher` allows you to sort the returned nodes by default in ascending order.
If you like to have more control on this, use the following tuple syntax:
iex> cypher do
...> match node(:s, [:Sharks])
...> order {s.name, :asc}, {s.age, :desc}
...> return :s
...> end
"MATCH (s:Sharks) ORDER BY s.name ASC, s.age DESC RETURN s"
In addition to ordering and limiting the returned nodes, it's also essential
to a query language to have filtering support. In this case, the `where`
function allows you to specify conditions that must be satisfied by each
returned node:
iex> cypher do
...> match node(:c, [:Creature])
...> where c.type == "cursed" or c.poisonous == true and c.population > 1000
...> return :c
...> end
~S|MATCH (c:Creature) WHERE c.type = "cursed" OR c.poisonous = true AND c.population > 1000 RETURN c|
We currently have support to all comparison operators used in cypher. You
can feel free to use `<`, `>`, `<=`, `>=`, `!=` and `==`.
### Creating
Cypher allows the creation of nodes in a database via `CREATE` statement.
You can generate those queries in the same way with `create` function:
iex> cypher do
...> create node(:p, [:Player], %{nick: "like4boss", score: 100})
...> return p.name
...> end
~S[CREATE (p:Player {nick:"like4boss",score:100}) RETURN p.name]
iex> cypher do
...> create (node(:c, [:Country], %{name: "Brazil"}) -- rel([:HAS_CITY]) -> node([:City], %{name: "São Paulo"}))
...> return :c
...> end
~S|CREATE (c:Country {name:"Brazil"})-[:HAS_CITY]->(:City {name:"São Paulo"}) RETURN c|
Note that `create` also accepts the arrow-based relationship building syntax.
Another important tip: `create`, as is done in cypher, will always create a
new node, even if that node already exists. If you want to provide a
`CREATE UNIQUE` behavior, you must use `merge` instead:
iex> cypher do
...> merge node(:p, [:Player], %{nick: "like4boss"})
...> merge node(:p2, [:Player], %{nick: "marioboss"})
...> return p.name
...> end
~S|MERGE (p:Player {nick:"like4boss"}) MERGE (p2:Player {nick:"marioboss"}) RETURN p.name|
iex> cypher do
...> merge node(:p, [:Player], %{nick: "like4boss"})
...> merge node(:p2, [:Player], %{nick: "marioboss"})
...> merge (node(:p) -- rel([:IN_LOBBY]) -> node(:p2))
...> return p.name
...> end
~S|MERGE (p:Player {nick:"like4boss"}) MERGE (p2:Player {nick:"marioboss"}) MERGE (p)-[:IN_LOBBY]->(p2) RETURN p.name|
The `merge` command in cypher attempts to pattern match the provided graph in
the database and, whenever this pattern is not matched, it'll insert the entire
pattern in the database.
### WITH statement
Cypher also allows a query piping behavior using `WITH` statements. However,
`with` is one of the elixir's reserved keywords, and cannot be overridden,
even using a macro.
Thus, you must use `pipe_with` instead:
iex> cypher do
...> match node(:c, [:Wizard], %{speciality: "healing"})
...> pipe_with {c.name, as: :name}, {c.age, as: :age}
...> return :name, :age
...> end
~S|MATCH (c:Wizard {speciality:"healing"}) WITH c.name AS name, c.age AS age RETURN name, age|
### Updating nodes
By default, we must rely on `set` function in order to update the nodes
labels and relationships. Here are a few running examples that'll show you
the `set` function syntax:
iex> # Setting a single property to a node
...> cypher do
...> match(node(:p, [:Person], %{name: "Andy"}))
...> set(p.name = "Bob")
...> return(p.name)
...> end
~S|MATCH (p:Person {name:"Andy"}) SET p.name = "Bob" RETURN p.name|
iex> # Setting several properties to a node
...> cypher do
...> match(node(:p, [:Person], %{name: "Andy"}))
...> set(p.name = "Bob", p.age = 34)
...> return(p.name)
...> end
~S|MATCH (p:Person {name:"Andy"}) SET p.name = "Bob", p.age = 34 RETURN p.name|
iex> # Setting several properties to a node at once
...> cypher do
...> match(node(:p, [:Person], %{name: "Andy"}))
...> set(p = %{name: "Bob", age: 34})
...> return(p.name)
...> end
~S|MATCH (p:Person {name:"Andy"}) SET p = {age:34,name:"Bob"} RETURN p.name|
### Removing properties
You can remove some properties from a node setting them to NULL, or to an
empty map:
iex> # Removing a node property
...> cypher do
...> match(node(:p, [:Person], %{name: "Andy"}))
...> set(p.name = nil)
...> return(p.name)
...> end
~S|MATCH (p:Person {name:"Andy"}) SET p.name = NULL RETURN p.name|
iex> # Removing several properties from a node
...> cypher do
...> match(node(:p, [:Person], %{name: "Andy"}))
...> set(p.name = %{})
...> return(p.name)
...> end
~S|MATCH (p:Person {name:"Andy"}) SET p.name = {} RETURN p.name|
### Upserting properties
You can also upsert properties on a node. If they don't exist, it'll
create them. If they exist, it won't. The syntax will look very familiar
to what you may know from elixir:
iex> cypher do
...> match(node(:p, [:Person], %{name: "Andy"}))
...> set(%{p | age: 40, role: "ship captain"})
...> return(p.name)
...> end
~S|MATCH (p:Person {name:"Andy"}) SET p += {age:40,role:"ship captain"} RETURN p.name|
### Using raw cypher functions
It's possible to use raw cypher functions in your queries too. Similarly to
`Ecto` library, use the `fragment` function:
iex> cypher do
...> match node(:random_winner, [:Person])
...> pipe_with {fragment("rand()"), as: :rand}, :random_winner
...> return :random_winner
...> limit 1
...> order :rand
...> end
~S|MATCH (random_winner:Person) WITH rand() AS rand, random_winner RETURN random_winner LIMIT 1 ORDER BY rand|
## Caveats with complex relationships
When building more complex associations, you must be aware about scopes and
how they'll affect the query building process. Whenever you run this:
iex> cypher do
...> create node(:p, [:Player], %{name: "mario"}),
...> node(:p2, [:Player], %{name: "luigi"})
...> end
~S|CREATE (p:Player {name:"mario"}), (p2:Player {name:"luigi"})|
You're actually calling the `create` function along with two arguments.
However, when building more complex associations, operator precedence may
break the query building process. The following, for example, won't work.
```
cypher do
create node(:p, [:Player], %{name: "mario"}),
node(:p2, [:Player], %{name: "luigi"}),
node(:p) -- rel([:IS_FRIEND]) -> node(:p2)
end
```
This will result in a compilation error. Instead, let's take care about
the operator precedence here and wrap the entire association in parenthesis,
creating a new scope. Then we can take advantages of the full power of
macro in favor of us:
iex> cypher do
...> create node(:p, [:Player], %{name: "mario"}),
...> node(:p2, [:Player], %{name: "luigi"}),
...> (node(:p) -- rel([:IS_FRIEND]) -> node(:p2))
...> end
~S|CREATE (p:Player {name:"mario"}), (p2:Player {name:"luigi"}), (p)-[:IS_FRIEND]->(p2)|
"""
alias ExCypher.{Buffer, Statement}
import ExCypher.Clause, only: [is_supported: 1]
@doc """
Wraps contents of a Cypher query and returns the query string.
"""
defmacro cypher(do: block) do
cypher_query(block, __CALLER__)
end
defp cypher_query(block, env) do
{:ok, pid} = Buffer.new_query()
Macro.postwalk(block, fn
{command, _ctx, args} when is_supported(command) ->
params = Statement.parse(command, args, env)
Buffer.put_buffer(pid, params)
term ->
term
end)
query = Buffer.generate_query(pid)
Buffer.stop_buffer(pid)
quote do
unquote(query)
|> Enum.reverse()
|> List.flatten()
|> Enum.join(" ")
|> String.replace(" , ", ", ")
end
end
end
|
lib/ex_cypher.ex
| 0.849191
| 0.50116
|
ex_cypher.ex
|
starcoder
|
defmodule SpaceApi.JsonParser do
alias SpaceApi.Space
@moduledoc """
The decoded JSON-String will be transferred into a SpaceApi.Space-struct here.
There is only one accessible function: _parse_json/2_
"""
@doc """
This function takes a JSON-String which was already transfered into a map
(like from ""Poison.decode!"") and tries to parse it into a Space-struct.
The second parameter is optional. If you're passing an existing Space-struct,
this will be the base for the new one.
Important: At this moment the latest version of the Space API was 13. So if
you're going to pass newer code an exception will be raised!
"""
@spec parse_json(json: map, space: Space) :: Space
def parse_json(json, space \\ %Space{}) do
version = Map.get(json, "api")
|> String.to_float
if version > 0.13, do:
raise "The version of this Space API-endpoint is newer than this code!"
space_init = %Space{space | raw_json: json}
parse(Map.keys(json), json, space_init)
end
# all: Recursion ends here
defp parse([], _, space), do: space
# all: Space/Logo/URL could be fetched directly
defp parse([key | keys], json, space)
when key in ["space", "logo", "url"] do
parse(keys, json, %{space | String.to_atom(key) => Map.get(json, key)})
end
# v0.13: Grep lat, lon from location
defp parse(["location" | keys], json = %{"api" => "0.13"}, space) do
case Map.get(json, "location") do
%{"lat" => lat, "lon" => lon} ->
parse(keys, json, %Space{space | location: {lat, lon}})
_ ->
parse(keys, json, space)
end
end
# <= v0.12: Parse lat, lon separately
defp parse([key | keys], json, space) when key in ["lat", "lon"] do
{lat, lon} = space.location
case key do
"lat" ->
parse(keys, json, %Space{space | location: {Map.get(json, key), lon}})
"lon" ->
parse(keys, json, %Space{space | location: {lat, Map.get(json, key)}})
end
end
# v0.13: Create state from state-object
defp parse(["state" | keys], json = %{"api" => "0.13"}, space) do
json_state = Map.get(json, "state", %{})
state = { Map.get(json_state, "open", false),
Map.get(json_state, "lastchange", 0),
case Map.get(json_state, "message", "") do
message when is_binary(message) -> message
_ -> ""
end }
parse(keys, json, %Space{space | state: state})
end
# <= v0.12: Fill state based on separate open, lastchange fields
defp parse([key | keys], json, space) when key in ["open", "lastchange"] do
{open, lastchange, message} = space.state
case key do
"open" ->
parse(keys, json,
%Space{space | state: {Map.get(json, key), lastchange, message}})
"lastchange" ->
parse(keys, json,
%Space{space | state: {open, Map.get(json, key), message}})
end
end
# all: Just jump over the rest…
defp parse([_key | keys], json, space), do: parse(keys, json, space)
end
|
lib/json_parser.ex
| 0.743913
| 0.420957
|
json_parser.ex
|
starcoder
|
defmodule OptionParser do
@doc """
Parses the argv and returns one tuple with parsed options
and the arguments.
## Examples
OptionParser.parse(["--debug"])
#=> { [debug: true], [] }
OptionParser.parse(["--source", "lib"])
#=> { [source: "lib"], [] }
OptionParser.parse(["--source", "lib", "test/enum_test.exs", "--verbose"])
#=> { [source: "lib", verbose: true], ["test/enum_test.exs"] }
## Aliases
A set of aliases can be given as second argument:
OptionParser.parse(["-d"], aliases: [d: :debug])
#=> { [debug: true], [] }
## Flags
A set of flags can be given as argument too. Those are considered
boolean and never consume the next value unless it is a boolean:
OptionParser.parse(["--unlock path/to/file"], flags: [:unlock])
#=> { [unlock: true], ["path/to/file"] }
OptionParser.parse(["--unlock false path/to/file"], flags: [:unlock])
#=> { [unlock: false], ["path/to/file"] }
## Negation switches
Any switches starting with `--no-` are always considered to be
booleans and never parse the next value.
OptionParser.parse(["--no-op path/to/file"])
#=> { [no_op: true], ["path/to/file"] }
"""
def parse(argv, opts // []) when is_list(argv) and is_list(opts) do
aliases = opts[:aliases] || []
flags = opts[:flags] || []
dict = Keyword.new(flags, fn(k) -> { k, false } end)
parse(argv, aliases, flags, dict, [], true)
end
@doc """
Similar to parse but only parses the head of the argv.
I.e. as soon as it finds a non switch, it stops parsing.
Check `parse/2` for more info.
## Example
OptionParser.parse_head(["--source", "lib", "test/enum_test.exs", "--verbose"])
#=> { [source: "lib"], ["test/enum_test.exs", "--verbose"] }
"""
def parse_head(argv, opts // []) when is_list(argv) and is_list(opts) do
aliases = opts[:aliases] || []
flags = opts[:flags] || []
dict = Keyword.new(flags, fn(k) -> { k, false } end)
parse(argv, aliases, flags, dict, [], false)
end
## Helpers
defp parse(["-" <> option|t], aliases, flags, dict, args, all) do
{ option, value } = normalize_option(option, aliases)
if value == nil do
{ value, t } = if is_flag?(flags, option) do
flag_from_tail(t)
else
value_from_tail(t)
end
end
dict = store_option dict, option, value
parse(t, aliases, flags, dict, args, all)
end
defp parse([], _, _, dict, args, true) do
{ dict, List.reverse(args) }
end
defp parse([h|t], aliases, flags, dict, args, true) do
parse(t, aliases, flags, dict, [h|args], true)
end
defp parse(value, _, _, dict, _args, false) do
{ dict, value }
end
defp flag_from_tail([h|t]) when h in ["false", "true"], do: { h, t }
defp flag_from_tail(t) , do: { true, t }
defp value_from_tail(["-" <> _|_] = t), do: { true, t }
defp value_from_tail([h|t]), do: { h, t }
defp value_from_tail([]), do: { true, [] }
defp store_option(dict, option, value) when value in ["false", "true"] do
store_option(dict, option, binary_to_atom(value))
end
defp store_option(dict, option, value) do
Keyword.put dict, option, value
end
defp normalize_option(<<?-, option|:binary>>, aliases) do
normalize_option(option, aliases)
end
defp normalize_option(option, aliases) do
{ option, value } = split_option(option)
if is_no?(option), do: value = true
atom = option /> to_underscore /> binary_to_atom
{ aliases[atom] || atom, value }
end
defp split_option(option) do
case :binary.split(option, "=") do
[h] -> { h, nil }
[h|t] -> { h, Enum.join(t, "=") }
end
end
defp to_underscore(option) do
bc <<c>> inbits option, do: << if c == ?-, do: ?_, else: c >>
end
defp is_no?("no-" <> _), do: true
defp is_no?(_), do: false
defp is_flag?(flags, option), do: List.member?(flags, option)
end
|
lib/elixir/lib/option_parser.ex
| 0.599016
| 0.427217
|
option_parser.ex
|
starcoder
|
import Kernel, except: [inspect: 1]
defprotocol Binary.Inspect do
@moduledoc """
The `Binary.Inspect` protocol is responsible for
converting any structure to a Binary for textual
representation. All basic data structures (tuple,
list, function, pid, etc) implement the inspect
protocol. Other structures are adviced to implement
the protocol in order to provide pretty printing.
"""
@only [BitString, List, Record, Tuple, Atom, Number, Any]
def inspect(thing)
end
defimpl Binary.Inspect, for: Atom do
require Macro
@doc """
Represents the atom as an Elixir term. The atoms false, true
and nil are simply quoted. Modules are properly represented
as modules using the dot notation.
Notice that in Elixir, all operators can be represented using
literal atoms (`:+`, `:-`, etc).
## Examples
inspect(:foo) #=> ":foo"
inspect(nil) #=> "nil"
inspect(Foo.Bar) #=> "Foo.Bar"
"""
def inspect(false), do: "false"
def inspect(true), do: "true"
def inspect(nil), do: "nil"
def inspect(:""), do: ":\"\""
def inspect(Elixir), do: "Elixir"
def inspect(atom) do
binary = atom_to_binary(atom)
cond do
valid_atom_identifier?(binary) ->
":" <> binary
valid_ref_identifier?(binary) ->
"Elixir-" <> rest = binary
bc <<r>> inbits rest, do: <<to_dot(r)>>
atom in Macro.binary_ops or atom in Macro.unary_ops ->
":" <> binary
true ->
":" <> Binary.escape(binary, ?")
end
end
# Detect if atom is an atom alias (Elixir-Foo-Bar-Baz)
defp to_dot(?-), do: ?.
defp to_dot(l), do: l
defp valid_ref_identifier?("Elixir" <> rest) do
valid_ref_piece?(rest)
end
defp valid_ref_identifier?(_), do: false
defp valid_ref_piece?(<<?-, h, t|:binary>>) when h in ?A..?Z do
valid_ref_piece? valid_identifier?(t)
end
defp valid_ref_piece?(<<>>), do: true
defp valid_ref_piece?(_), do: false
# Detect if atom
defp valid_atom_identifier?(<<h, t|:binary>>) when h in ?a..?z or h in ?A..?Z or h == ?_ do
case valid_identifier?(t) do
<<>> -> true
<<??>> -> true
<<?!>> -> true
_ -> false
end
end
defp valid_atom_identifier?(_), do: false
defp valid_identifier?(<<h, t|:binary>>)
when h in ?a..?z
when h in ?A..?Z
when h in ?0..?9
when h == ?_ do
valid_identifier? t
end
defp valid_identifier?(other), do: other
end
defimpl Binary.Inspect, for: BitString do
@doc %B"""
Represents the string as itself escaping
all necessary characters.
## Examples
inspect("bar") #=> "bar"
inspect("f\"oo") #=> "f\"oo"
"""
def inspect(thing) when is_binary(thing) do
if Binary.printable?(thing) do
Binary.escape(thing, ?")
else
as_bitstring(thing)
end
end
def inspect(thing) do
as_bitstring(thing)
end
## Helpers
defp as_bitstring(thing) do
erlang = Erlang.io_lib.format('~p', [thing])
list_to_binary List.reverse(replace(erlang, []))
end
defp replace([?:|t], acc), do: replace(t, [?||acc])
defp replace([h|t], acc) when is_list(h), do: replace(t, replace(h, acc))
defp replace([h|t], acc), do: replace(t, [h|acc])
defp replace([], acc), do: acc
end
defimpl Binary.Inspect, for: List do
@doc %B"""
Represents a list checking if it can be printed or not.
If so, a single-quoted representation is returned,
otherwise the brackets syntax is used.
Inspecting a list is conservative as it does not try
to guess how the list is encoded. That said, `'josé'`
will likely be inspected as `[106,111,115,195,169]`
because we can't know if it is encoded in utf-8
or iso-5569-1, which is common in Erlang libraries.
## Examples
inspect('bar') #=> 'bar'
inspect([0|'bar']) #=> "[0,98,97,114]"
inspect([:foo,:bar]) #=> "[:foo, :bar]"
"""
def inspect([]), do: "[]"
def inspect(thing) do
if printable?(thing) do
Binary.escape(list_to_binary(thing), ?')
else
container_join(thing, "[", "]")
end
end
## Helpers
def container_join([h], acc, last) do
acc <> Binary.Inspect.inspect(h) <> last
end
def container_join([h|t], acc, last) when is_list(t) do
acc = acc <> Binary.Inspect.inspect(h) <> ","
container_join(t, acc, last)
end
def container_join([h|t], acc, last) do
acc <> Binary.Inspect.inspect(h) <> "|" <> Binary.Inspect.inspect(t) <> last
end
def container_join([], acc, last) do
acc <> last
end
## printable?
defp printable?([c|cs]) when is_integer(c) and c in 32..126 do
printable?(cs)
end
defp printable?([c|cs]) when c in [?\n, ?\r, ?\t, ?\v, ?\b, ?\f, ?\e] do
printable?(cs)
end
defp printable?([]), do: true
defp printable?(_), do: false
end
defimpl Binary.Inspect, for: Tuple do
@doc """
Inspect tuples. If the tuple represents a record,
it shows it nicely formatted using the access syntax.
## Examples
inspect({1,2,3}) #=> "{1,2,3}"
inspect(ArgumentError.new) #=> ArgumentError[message: "argument error"]
"""
def inspect({}), do: "{}"
def inspect(exception) when is_exception(exception) do
[name,_|tail] = tuple_to_list(exception)
[_|fields] = lc { field, _ } inlist name.__record__(:fields), do: field
Binary.Inspect.Atom.inspect(name) <> records_join(fields, tail, "[", "]")
end
def inspect(thing) do
list = tuple_to_list(thing)
[name|tail] = list
if is_record?(name) do
fields = lc { field, _ } inlist name.__record__(:fields), do: field
if length(fields) != size(thing) - 1 do
Binary.Inspect.List.container_join(list, "{", "}")
else
Binary.Inspect.Atom.inspect(name) <> records_join(fields, tail, "[", "]")
end
else
Binary.Inspect.List.container_join(list, "{", "}")
end
end
## Helpers
defp is_record?(name) do
is_atom(name) and match?("Elixir-" <> _, atom_to_binary(name, :utf8)) and
function_exported?(name, :__record__, 1)
end
defp records_join([f], [v], acc, last) do
acc <> atom_to_binary(f, :utf8) <> ": " <> Binary.Inspect.inspect(v) <> last
end
defp records_join([fh|ft], [vh|vt], acc, last) do
acc = acc <> atom_to_binary(fh, :utf8) <> ": " <> Binary.Inspect.inspect(vh) <> ", "
records_join(ft, vt, acc, last)
end
defp records_join([], [], acc, last) do
acc <> last
end
end
defimpl Binary.Inspect, for: Number do
@doc """
Represents the number as a binary.
## Examples
inspect(1) #=> "1"
"""
def inspect(thing) when is_integer(thing) do
list_to_binary integer_to_list(thing)
end
def inspect(thing) do
list_to_binary float_to_list(thing)
end
end
defimpl Binary.Inspect, for: Regex do
@doc %B"""
Represents the Regex using the `%r""` syntax.
## Examples
inspect(%r/foo/m) #=> "%r\"foo\"m"
"""
def inspect(thing) do
"%r" <> Binary.Inspect.inspect(Regex.source(thing)) <> Regex.opts(thing)
end
end
defimpl Binary.Inspect, for: Any do
@doc """
For all other terms not implemented, we use the default
Erlang representation.
## Examples
inspect Process.self #=> "<0.35.0>"
"""
def inspect(thing) do
iolist_to_binary Erlang.io_lib.format('~p', [thing])
end
end
|
lib/elixir/lib/binary/inspect.ex
| 0.767646
| 0.565539
|
inspect.ex
|
starcoder
|
defmodule ExWire.Handler do
@moduledoc """
Defines a behavior for all message handlers of RLPx messages.
Message handlers tell us how we should respond to a given incoming transmission,
after it has been decoded.
"""
alias ExWire.Message
alias ExWire.Crypto
require Logger
@handlers %{
0x01 => ExWire.Handler.Ping,
0x02 => ExWire.Handler.Pong,
0x03 => ExWire.Handler.FindNeighbours,
0x04 => ExWire.Handler.Neighbours
}
defmodule Params do
@moduledoc "Struct to store parameters from an incoming message"
defstruct remote_host: nil,
signature: nil,
recovery_id: nil,
hash: nil,
type: nil,
data: nil,
timestamp: nil,
node_id: nil
@type t :: %__MODULE__{
remote_host: Endpoint.t(),
signature: Crpyto.signature(),
recovery_id: Crypto.recovery_id(),
hash: Crypto.hash(),
type: integer(),
data: binary(),
timestamp: integer(),
node_id: ExWire.node_id()
}
end
@type handler_response :: :not_implented | :no_response | {:respond, Message.t()}
@callback handle(Params.t()) :: handler_response
@doc """
Decides which module to route the given message to,
or returns `:not_implemented` if we have no implemented
a handler for the message type.
## Examples
iex> ExWire.Handler.dispatch(0x01, %ExWire.Handler.Params{
...> remote_host: %ExWire.Struct.Endpoint{ip: {1, 2, 3, 4}, udp_port: 55},
...> signature: 2,
...> recovery_id: 3,
...> hash: <<5>>,
...> data: [1, [<<1,2,3,4>>, <<>>, <<5>>], [<<5,6,7,8>>, <<6>>, <<>>], 4] |> ExRLP.encode(),
...> timestamp: 123,
...> }, nil)
{:respond, %ExWire.Message.Pong{
hash: <<5>>,
timestamp: 123,
to: %ExWire.Struct.Endpoint{
ip: {1, 2, 3, 4},
tcp_port: 5,
udp_port: nil
}
}}
iex> ExWire.Handler.dispatch(0x99, %ExWire.Handler.Params{}, nil)
:not_implemented
# TODO: Add a `no_response` test case
"""
@spec dispatch(integer(), Params.t(), identifier() | nil) :: handler_response
def dispatch(type, params, discovery) do
case @handlers[type] do
nil ->
_ = Logger.warn("Message code `#{inspect(type, base: :hex)}` not implemented")
:not_implemented
mod when is_atom(mod) ->
apply(mod, :handle, [params, discovery])
end
end
end
|
apps/ex_wire/lib/ex_wire/handler.ex
| 0.757794
| 0.406597
|
handler.ex
|
starcoder
|
defmodule Nosedrum.Storage.ETS do
@moduledoc """
An implementation of the `Nosedrum.Storage` behaviour based on ETS tables.
This module needs to be configured as part of your supervision tree as it
spins up a `GenServer` which owns the command table. If you want to obtain
the table ID of the internal ETS table, send a call with the message `:tid`.
"""
@behaviour Nosedrum.Storage
@default_table :nosedrum_commands
@default_table_options [{:read_concurrency, true}, :ordered_set, :public, :named_table]
@doc false
use GenServer
@spec put_nested_command(Map.t(), [String.t()], Module.t()) :: Nosedrum.Storage.command_group()
defp put_nested_command(acc, [name], command) do
if function_exported?(command, :aliases, 0) do
Enum.reduce(command.aliases(), acc, &Map.put(&2, &1, command))
else
acc
end
|> Map.put(name, command)
end
defp put_nested_command(acc, [name | path], command),
do: Map.put(acc, name, put_nested_command(Map.get(acc, name, %{}), path, command))
@impl true
def add_command(path, command, table_ref \\ @default_table)
def add_command([name], command, table_ref) do
if function_exported?(command, :aliases, 0) do
Enum.each(command.aliases, &:ets.insert(table_ref, {&1, command}))
end
:ets.insert(table_ref, {name, command})
:ok
end
def add_command([name | path], command, table_ref) do
case lookup_command(name, table_ref) do
maybe_map when is_map(maybe_map) or is_nil(maybe_map) ->
cog = put_nested_command(maybe_map || %{}, path, command)
:ets.insert(table_ref, {name, cog})
:ok
module when not is_map(module) ->
{:error, "command `#{name} is a top-level command, cannot add subcommand at `#{path}"}
end
end
@spec is_empty_cog?(Map.t() | Module.t()) :: boolean()
defp is_empty_cog?({_key, module}) when is_atom(module), do: false
defp is_empty_cog?({_key, %{}}), do: true
defp is_empty_cog?({_key, cog}), do: Enum.all?(cog, &is_empty_cog?/1)
@impl true
def remove_command(path, table_ref \\ @default_table)
def remove_command([name], table_ref) do
command = lookup_command(name, table_ref)
if function_exported?(command, :aliases, 0) do
Enum.each(command.aliases(), &:ets.delete(table_ref, {&1, command}))
end
:ets.delete(table_ref, name)
:ok
end
def remove_command([name | path], table_ref) do
case lookup_command(name, table_ref) do
nil ->
:ok
module when not is_map(module) ->
{:error,
"command `#{name}` is a top-level command, cannot remove subcommand at `#{path}`"}
map ->
{dropped_cog, updated_cog} = pop_in(map, path)
updated_cog =
if function_exported?(dropped_cog, :aliases, 0) do
Map.drop(updated_cog, dropped_cog.aliases())
else
updated_cog
end
case Enum.reject(updated_cog, &is_empty_cog?/1) do
[] ->
:ets.delete(table_ref, name)
:ok
entries ->
mapped = Map.new(entries)
:ets.insert(table_ref, {name, mapped})
:ok
end
end
end
@impl true
def lookup_command(name, table_ref \\ @default_table) do
case :ets.lookup(table_ref, name) do
[] ->
nil
[{_name, command}] ->
command
end
end
@impl true
def all_commands(table_ref \\ @default_table) do
table_ref
|> :ets.tab2list()
|> Enum.reduce(%{}, fn {name, cog}, acc -> Map.put(acc, name, cog) end)
end
@doc """
Initialize the ETS command storage.
By default, the table used for storing commands is a named table with
the name `#{@default_table}`. The table reference is stored internally
as the state of this process, the public-facing API functions default
to using the table name to access the module.
"""
@spec start_link(atom() | nil, List.t(), Keyword.t()) :: GenServer.on_start()
def start_link(
table_name \\ @default_table,
table_options \\ @default_table_options,
gen_options
) do
GenServer.start_link(__MODULE__, {table_name, table_options}, gen_options)
end
@impl true
@doc false
def init({table_name, table_options}) do
tid = :ets.new(table_name, table_options)
{:ok, tid}
end
@impl true
def handle_call(:tid, _, tid) do
{:reply, tid, tid}
end
end
|
lib/nosedrum/storage/ets.ex
| 0.76856
| 0.434701
|
ets.ex
|
starcoder
|
defmodule BsonJson do
@moduledoc """
Converts a Bson document into a JSON document.
"""
@doc """
Returns a json representation of set of Bson documents
transcodeing the following element type:
* int32 -> number
* int64 -> number (capped at js maximum)
* float -> number
* string -> string (utf8)
* document -> object
* array document -> array
* objectId -> 24 character length hexadecimal string
"""
def stringify(bson) do
case document(bson) do
{acc, rest} -> {acc|>List.flatten|>IO.iodata_to_binary, rest}
end
end
defp int32(<<i::size(32)-signed-little, rest::binary>>), do: {to_string(i), rest}
defp int64(<<i::size(64)-signed-little, rest::binary>>), do: {to_string(i), rest}
defp float(<<0, 0, 0, 0, 0, 0, 248, 127, rest::binary>>), do: {"null", rest} #nan
defp float(<<0, 0, 0, 0, 0, 0, 248, 255, rest::binary>>), do: {"null", rest} #nan
defp float(<<0, 0, 0, 0, 0, 0, 240, 127, rest::binary>>), do: {"9007199254740992", rest} #+inf
defp float(<<0, 0, 0, 0, 0, 0, 240, 255, rest::binary>>), do: {"-9007199254740992", rest} #-inf
defp float(<<f::size(64)-float-little, rest::binary>>), do: {to_string(f), rest}
defp string(<<l::size(32)-signed-little, rest::binary>>) do
bitsize = (l-1)*8
<<string::size(bitsize), 0, rest::binary>> = rest
{ [?", <<string::size(bitsize)>>, ?"],
rest }
end
defp objectid(<<oid::96, rest::binary>>) do
{<<?">> <> (for << <<b::size(4)>> <- <<oid::size(96)>> >>, into: <<>> do
<<Integer.to_string(b,16)::binary>>
end |> String.downcase) <> <<?">>, rest}
end
defp document(<<l::size(32)-signed-little, rest::binary>>) do
bitsize = (l-5)*8
<<bsondoc::size(bitsize), 0, rest::binary>> = rest
{ document(<<bsondoc::size(bitsize)>>, '', '{'), rest}
end
defp document("", _, acc), do: Enum.reverse([?}|acc])
defp document(<<head, rest::binary>>, prefix, acc) do
{el_name, rest} = peek_cstring(rest, [])
{el_value, rest} = element(head, rest)
document(rest, ?,, [el_value, ?:, ?", el_name, ?", prefix | acc])
end
defp array(<<l::size(32)-signed-little, rest::binary>>) do
bitsize = (l-5)*8
<<bsondoc::size(bitsize), 0, rest::binary>> = rest
{ array(<<bsondoc::size(bitsize)>>, '', [?[]), rest}
end
defp array("", _, acc), do: Enum.reverse([?]|acc])
defp array(<<head, rest::binary>>, prefix, acc) do
{_, rest} = peek_cstring(rest, [])
{el_value, rest} = element(head, rest)
array(rest, ?,, [el_value, prefix | acc])
end
defp element(head, bson) do
case head do
0x01 -> float(bson)
0x02 -> string(bson)
0x03 -> document(bson)
0x04 -> array(bson)
0x07 -> objectid(bson)
0x10 -> int32(bson)
0x12 -> int64(bson)
end
end
defp peek_cstring(<<0, rest::binary>>, acc), do: {acc|>Enum.reverse|>IO.iodata_to_binary, rest}
defp peek_cstring(<<c, rest::binary>>, acc), do: peek_cstring(rest, [c|acc])
defp peek_cstring("", _acc), do: raise "bson corrupted: expecting cstring end mark"
end
|
hello_elixir/deps/bson/lib/bsonjson.ex
| 0.712332
| 0.542621
|
bsonjson.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.