code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule IntSet do
use Bitwise
@moduledoc """
Efficiently store and index a set of non-negative integers.
A set can be constructed using `IntSet.new/0`:
iex> IntSet.new
#IntSet<[]>
An `IntSet` obeys the same set semantics as `MapSet`, and provides
constant-time operations for insertion, deletion, and membership checking.
Use `Enum.member?/2` to check for membership.
iex> IntSet.new(3) |> Enum.member?(3)
true
Sets also implement `Collectable`, so it can collect values in any context
that a list can:
iex> Enum.into([1, 2, 3], IntSet.new())
#IntSet<[1, 2, 3]>
The `inspect/1` implementation for `IntSet` sorts the members, which makes
it way easier to write doctests:
iex> IntSet.new([3, 1, 2])
#IntSet<[1, 2, 3]>
Working with applications that use bitstrings becomes way easier,
because `IntSet.new/1` accepts a bitstring,
and `IntSet.bitstring/2` can return one.
iex> IntSet.new(5) |> IntSet.bitstring()
<<0::1, 0::1, 0::1, 0::1, 0::1, 1::1>>
iex> IntSet.new(<<0::1, 0::1, 0::1, 0::1, 0::1, 1::1>>)
#IntSet<[5]>
## Performance
An `IntSet` is significantly faster than Elixir's `MapSet` at set operations (union, intersection, difference, equality),
but slower at everything else.
The case for memory usage is similar:
better than `MapSet` for set operations,
worse for everything else.
| Op | MapSet | IntSet | Comparison |
|--------------|--------|---------|---------------|
| new | 4.8K | 2.46K | 1.95x slower |
| member? | 6.78M | 2.93M | 2.31x slower |
| put | 4.19M | 1.15M | 3.66x slower |
| union | 156.4K | 944.31K | 6.04x faster |
| difference | 48.09 | 891.27K | 18.53x faster |
| intersection | 14.03K | 905.70K | 64.54x faster |
| equal? | 0.26M | 2.41M | 9.25x faster |
There is a benchmark checked into the project repo
at `perf/performance_test.exs`.
You can run it with `mix run` to see some results for yourself.
## Serialization
With the use of `IntSet.bitstring/2`, and `IntSet.new/1`,
you can serialize this collection very efficiently.
Remember to pass the `byte_align: true` option into `IntSet.bitstring/2` when you do this;
most encoding schemes like byte-aligned data.
iex> IntSet.new([4, 8, 15, 16, 23, 42]) |> IntSet.bitstring(byte_align: true) |> Base.encode16()
"088181000020"
iex> Base.decode16!("088181000020") |> IntSet.new()
#IntSet<[4, 8, 15, 16, 23, 42]>
"""
@moduledoc since: "1.0.0"
defstruct s: <<>>
@opaque t :: %__MODULE__{s: bitstring}
defguardp is_index(i)
when is_integer(i) and i >= 0
defguardp can_contain(s, i)
when is_index(i) and is_bitstring(s) and bit_size(s) > i
@doc """
Create an empty int set.
## Examples
iex> IntSet.new
#IntSet<[]>
"""
@doc since: "1.0.0"
@spec new :: t
def new do
%IntSet{}
end
@doc """
Create an int set with some starting value.
## Examples
You can create a set with a single starting value.
iex> IntSet.new(0)
#IntSet<[0]>
You can also provide an enumerable of integers to start with.
iex> IntSet.new([1, 2, 3])
#IntSet<[1, 2, 3]>
Lastly, you can initialize the set with a bit string.
Binary strings are interpreted as little-endian, with the very first bit
of the string representing the number zero.
iex> IntSet.new(<<1 :: 1>>)
#IntSet<[0]>
iex> IntSet.new(<<0b1000_1000>>)
#IntSet<[0, 4]>
iex> IntSet.new(<<0 :: 1>>)
#IntSet<[]>
"""
@doc since: "1.0.0"
@spec new(non_neg_integer | Enum.t() | bitstring) :: t
def new(members)
def new(bitstring) when is_bitstring(bitstring) do
%IntSet{s: bitstring}
end
def new(int) when is_index(int) do
new([int])
end
def new(enum) do
list = enum |> Enum.sort() |> Enum.uniq()
%IntSet{s: seqput(<<>>, list)}
end
defp seqput(bits, []) when is_bitstring(bits) do
bits
end
defp seqput(bits, [next | rest])
when is_bitstring(bits) and is_integer(next) and bit_size(bits) <= next do
pad_bits = next - bit_size(bits)
new_bits = <<bits::bitstring, 0::size(pad_bits), 1::1>>
seqput(new_bits, rest)
end
@doc """
Returns a set of size n with all members not in the given IntSet.
You can visualize this operation as calling `IntSet.difference/2`
with the first argument being a full IntSet of size n.
## Examples
iex> IntSet.new(0) |> IntSet.inverse(1)
#IntSet<[]>
iex> IntSet.new(0) |> IntSet.inverse(8)
#IntSet<[1, 2, 3, 4, 5, 6, 7]>
iex> IntSet.new() |> IntSet.inverse(3)
#IntSet<[0, 1, 2]>
iex> IntSet.new() |> IntSet.inverse(9)
#IntSet<[0, 1, 2, 3, 4, 5, 6, 7, 8]>
"""
@doc since: "1.4.0"
@spec inverse(t, non_neg_integer) :: t
def inverse(%IntSet{s: a}, n) do
bytes = ceil(n / 8)
padded_bits = bytes * 8
waste_bits = padded_bits - n
<<a::unsigned-big-integer-size(padded_bits)>> = right_pad(a, bytes)
<<a::unsigned-big-integer-size(n), _rest::bits-size(waste_bits)>> =
<<bnot(a)::size(padded_bits)>>
%IntSet{s: <<a::unsigned-big-integer-size(n)>>}
end
@doc """
Create a new set that contains all of the elements of both x and y.
## Examples
iex> a = IntSet.new(7)
iex> b = IntSet.new(4)
iex> IntSet.union(a, b)
#IntSet<[4, 7]>
"""
@doc since: "1.0.0"
@spec union(t, t) :: t
def union(x, y)
def union(%IntSet{s: a}, %IntSet{s: b}) do
%IntSet{s: bitwise_bits(&bor/2, a, b)}
end
@doc """
Returns a set that is `int_set1` without the members of `int_set2`.
## Examples
iex> IntSet.difference(IntSet.new([1, 2]), IntSet.new([2, 3, 4]))
#IntSet<[1]>
"""
@doc since: "1.2.0"
@spec difference(t, t) :: t
def difference(int_set1, int_set2)
def difference(%IntSet{s: a}, %IntSet{s: b}) do
%IntSet{s: bitwise_bits(&bdiff/2, a, b)}
end
defp bdiff(a, b) when is_number(a) and is_number(b) do
band(a, bnot(b))
end
defp bitwise_bits(fun, <<>>, <<>>) do
<<fun.(0, 0)::8>>
end
defp bitwise_bits(fun, a, b) do
# IO.puts "bitwise op on byte-lengths of #{byte_size(a)} and #{byte_size(b)}"
max_bytes = max(byte_size(a), byte_size(b))
max_bits = max_bytes * 8
<<abin::big-integer-size(max_bits)>> = right_pad(a, max_bytes)
<<bbin::big-integer-size(max_bits)>> = right_pad(b, max_bytes)
<<fun.(abin, bbin)::size(max_bits)>>
end
defp right_pad(bin, size_bytes)
when is_bitstring(bin) and is_integer(size_bytes) and size_bytes >= 0 do
target_bit_size = size_bytes * 8
pad_size = target_bit_size - bit_size(bin)
if pad_size > 0 do
<<bin::bitstring, 0::size(pad_size)>>
else
bin
end
end
@doc """
Find all elements that are in both `int_set1` and `int_set2`.
## Examples
iex> IntSet.intersection(IntSet.new([1, 2]), IntSet.new([2, 3, 4]))
#IntSet<[2]>
iex> IntSet.intersection(IntSet.new([1, 2]), IntSet.new([3, 4]))
#IntSet<[]>
"""
@doc since: "1.3.0"
@spec intersection(t, t) :: t
def intersection(int_set1, int_set2)
def intersection(%IntSet{s: <<>>}, %IntSet{s: _}), do: IntSet.new()
def intersection(%IntSet{s: _}, %IntSet{s: <<>>}), do: IntSet.new()
def intersection(%IntSet{s: a}, %IntSet{s: b}) do
%IntSet{s: bitwise_bits(&band/2, a, b)}
end
@doc """
Checks if `int_set` and `int_set2` have no members in common.
## Examples
iex> IntSet.disjoint?(IntSet.new([1, 2]), IntSet.new([3, 4]))
true
iex> IntSet.disjoint?(IntSet.new([1, 2]), IntSet.new([2, 3]))
false
"""
@doc since: "1.2.0"
@spec disjoint?(t, t) :: boolean
def disjoint?(int_set1, int_set2)
def disjoint?(%IntSet{s: <<>>}, %IntSet{s: _}), do: true
def disjoint?(%IntSet{s: _}, %IntSet{s: <<>>}), do: true
def disjoint?(%IntSet{s: a}, %IntSet{s: b}) do
bitwise = bitwise_bits(&band/2, a, b)
len = bit_size(bitwise)
bitwise == <<0::size(len)>>
end
@doc """
Add a value to the int set.
## Examples
iex> set = IntSet.new()
#IntSet<[]>
iex> IntSet.put(set, 0)
#IntSet<[0]>
"""
@doc since: "1.0.0"
@spec put(t, non_neg_integer) :: t
def put(s, x)
def put(%IntSet{s: s} = set, x) when is_index(x) and is_bitstring(s) do
set_bit(set, x, 1)
end
@doc """
Remove a number from the int set.
## Examples
iex> set = IntSet.new(5)
#IntSet<[5]>
iex> IntSet.delete(set, 5)
#IntSet<[]>
"""
@doc since: "1.0.0"
@spec delete(t, non_neg_integer) :: t
def delete(set, x)
def delete(%IntSet{s: s} = set, x)
when is_index(x) and is_bitstring(s) and not can_contain(s, x) do
set
end
def delete(%IntSet{s: s} = set, x) when can_contain(s, x) do
set_bit(set, x, 0)
end
@spec set_bit(t, non_neg_integer, 0 | 1) :: t
defp set_bit(%IntSet{} = set, i, x) when x in 0..1 do
%IntSet{s: s} = ensure_capacity_for(set, i)
<<pre::size(i), _::1, post::bitstring>> = s
%IntSet{s: <<pre::size(i), x::1, post::bitstring>>}
end
@spec ensure_capacity_for(t, non_neg_integer) :: t
defp ensure_capacity_for(s, x)
defp ensure_capacity_for(%IntSet{s: s} = set, x) when can_contain(s, x) do
set
end
defp ensure_capacity_for(%IntSet{s: s}, x) when is_index(x) and bit_size(s) <= x do
total_bits_needed = x + 1
bits_to_add = total_bits_needed - bit_size(s)
%IntSet{s: <<s::bitstring, 0::size(bits_to_add)>>}
end
@doc """
Checks if two sets are equal
## Examples
iex> IntSet.equal?(IntSet.new([1, 2]), IntSet.new([2, 1, 1]))
true
iex> IntSet.equal?(IntSet.new([1, 2]), IntSet.new([3, 4]))
false
"""
@doc since: "1.3.0"
@spec equal?(t, t) :: boolean
def equal?(int_set1, int_set2)
def equal?(%IntSet{s: a}, %IntSet{s: b}) do
equal_inner(a, b)
end
# The choice of powers-of-two binary sizes was arbitrary.
# The choice to stop at 16 bytes was not.
# Performance testing indicates that performance maxes out and we start getting slower.
# Also, memory usage drops substantially: it drops to a quarter of what it was when we stop at 8 bytes!
# Caveat: This is probably only true for my machine (eight 64-bit cores)
defp equal_inner(
<<a::binary-size(16), arest::bitstring>>,
<<b::binary-size(16), brest::bitstring>>
)
when a == b,
do: equal_inner(arest, brest)
defp equal_inner(
<<a::binary-size(8), arest::bitstring>>,
<<b::binary-size(8), brest::bitstring>>
)
when a == b,
do: equal_inner(arest, brest)
defp equal_inner(
<<a::binary-size(4), arest::bitstring>>,
<<b::binary-size(4), brest::bitstring>>
)
when a == b,
do: equal_inner(arest, brest)
defp equal_inner(
<<a::binary-size(2), arest::bitstring>>,
<<b::binary-size(2), brest::bitstring>>
)
when a == b,
do: equal_inner(arest, brest)
defp equal_inner(<<a, arest::bitstring>>, <<b, brest::bitstring>>) when a == b,
do: equal_inner(arest, brest)
defp equal_inner(<<a::size(1), arest::bitstring>>, <<b::size(1), brest::bitstring>>)
when a == b,
do: equal_inner(arest, brest)
defp equal_inner(<<0::size(1), rest::bitstring>>, <<>>), do: equal_inner(rest, <<>>)
defp equal_inner(<<>>, <<0::size(1), rest::bitstring>>), do: equal_inner(rest, <<>>)
defp equal_inner(<<a::size(1)>>, <<b::size(1)>>) when a == b, do: true
defp equal_inner(<<>>, <<>>), do: true
defp equal_inner(_, _), do: false
@doc """
Get a bitstring representing the members of a set.
## Examples
iex> IntSet.new(0) |> IntSet.bitstring()
<<1::1>>
iex> IntSet.new(5) |> IntSet.bitstring()
<<0::1, 0::1, 0::1, 0::1, 0::1, fc00:db20:35b:7399::5>>
iex> IntSet.new() |> IntSet.bitstring()
<<>>
You can also provide a `:byte_align` option,
which will pad the end of the binary with zeroes until you're at a nice round n-byte size.
By default this options is `false`.
iex> IntSet.new(5) |> IntSet.bitstring(byte_align: true)
<<0::1, 0::1, 0::1, 0::1, 0::1, fc00:db20:35b:7399::5, 0::1, 0::1>>
"""
@doc since: "1.1.0"
@spec bitstring(t) :: bitstring
def bitstring(%IntSet{s: s}, opts \\ []) do
if Keyword.get(opts, :byte_align, false) do
byte_align(s)
else
s
end
end
defp byte_align(bin) do
bits_to_add = 8 - Integer.mod(bit_size(bin), 8)
<<bin::bitstring, 0::size(bits_to_add)>>
end
defimpl Inspect do
import Inspect.Algebra
def inspect(s, opts) do
int_list = Enum.into(s, []) |> Enum.sort()
concat(["#IntSet<", to_doc(int_list, %{opts | charlists: :as_lists}), ">"])
end
end
defimpl Collectable do
def into(original) do
collector_fun = fn
list, {:cont, elem} -> [elem | list]
list, :done -> IntSet.new(list) |> IntSet.union(original)
_, :halt -> :ok
end
{[], collector_fun}
end
end
defimpl Enumerable do
def count(_) do
{:error, __MODULE__}
end
defguard is_index(i)
when is_integer(i) and i >= 0
def member?(%IntSet{}, x) when is_integer(x) and x < 0, do: {:ok, false}
def member?(%IntSet{s: s}, x) when is_index(x) and bit_size(s) <= x, do: {:ok, false}
def member?(%IntSet{s: <<0::1, _rst::bitstring>>}, 0), do: {:ok, false}
def member?(%IntSet{s: <<fc00:db20:35b:7399::5, _rst::bitstring>>}, 0), do: {:ok, true}
def member?(%IntSet{s: s}, x)
when is_index(x) and bit_size(s) > x do
<<_::size(x), i::1, _::bitstring>> = s
{:ok, i == 1}
end
def member?(%IntSet{}, _), do: {:error, __MODULE__}
def slice(_) do
{:error, __MODULE__}
end
def reduce(_, {:halt, acc}, _fun) do
{:halted, acc}
end
def reduce(set, {:suspend, acc}, fun) do
{:suspended, acc, &reduce(set, &1, fun)}
end
def reduce(%IntSet{s: <<>>}, {:cont, acc}, _fun) do
{:done, acc}
end
def reduce(%IntSet{s: s}, {:cont, acc}, fun) do
last_i = bit_size(s) - 1
before_last_size = last_i
<<h::bitstring-size(before_last_size), last_flag::1>> = s
rest = IntSet.new(h)
if last_flag == 1 do
reduce(rest, fun.(last_i, acc), fun)
else
reduce(rest, {:cont, acc}, fun)
end
end
end
end
|
lib/int_set.ex
| 0.91204
| 0.709001
|
int_set.ex
|
starcoder
|
defmodule Zap.Entry do
@moduledoc false
use Bitwise
@type header :: %{
size: non_neg_integer(),
name: String.t(),
nsize: non_neg_integer()
}
@type entity :: %{
crc: pos_integer(),
size: non_neg_integer(),
usize: non_neg_integer(),
csize: non_neg_integer()
}
@type t :: %__MODULE__{
binary: iodata(),
entity: entity(),
header: header(),
size: non_neg_integer()
}
defstruct [:binary, :header, :entity, size: 0]
@spec new(name :: String.t(), data :: binary()) :: t()
def new(name, data) do
{hframe, header} = encode_header(name)
{eframe, entity} = encode_entity(data)
binary = IO.iodata_to_binary([hframe, data, eframe])
%__MODULE__{
binary: binary,
header: header,
entity: entity,
size: byte_size(binary)
}
end
@spec consume(entry :: t(), bytes :: :all | pos_integer()) :: {t(), binary()}
def consume(%__MODULE__{size: 0} = entry, _bytes) do
{entry, ""}
end
def consume(%__MODULE__{} = entry, :all) do
{%{entry | binary: "", size: 0}, entry.binary}
end
def consume(%__MODULE__{size: size} = entry, bytes) when bytes >= size do
{%{entry | binary: "", size: 0}, entry.binary}
end
def consume(%__MODULE__{binary: binary, size: size} = entry, bytes) do
take = binary_part(binary, 0, bytes)
keep = binary_part(binary, bytes, size - bytes)
{%{entry | binary: keep, size: byte_size(keep)}, take}
end
defp encode_header(name) when is_binary(name) do
nsize = byte_size(name)
mtime = NaiveDateTime.from_erl!(:calendar.local_time())
frame = <<
# local file header signature
0x04034B50::little-size(32),
# version needed to extract
20::little-size(16),
# general purpose bit flag (bit 3: data descriptor, bit 11: utf8 name)
<<0x0008 ||| 0x0800::little-size(16)>>,
# 8::little-size(16),
# compression method (always 0, we aren't compressing currently)
0::little-size(16),
# last mod time
dos_time(mtime)::little-size(16),
# last mod date
dos_date(mtime)::little-size(16),
# crc-32
0::little-size(32),
# compressed size
0::little-size(32),
# uncompressed size
0::little-size(32),
# file name length
nsize::little-size(16),
# extra field length
0::little-size(16),
# file name
name::binary
>>
{frame, %{size: byte_size(frame), name: name, nsize: nsize}}
end
defp encode_entity(data) when is_binary(data) do
crc = :erlang.crc32(data)
size = byte_size(data)
frame = <<
# local file entry signature
0x08074B50::little-size(32),
# crc-32 for the entity
crc::little-size(32),
# compressed size, just the size since we aren't compressing
size::little-size(32),
# uncompressed size
size::little-size(32)
>>
{frame, %{crc: crc, size: size + byte_size(frame), usize: size, csize: size}}
end
defp dos_time(time) do
round(time.second / 2 + (time.minute <<< 5) + (time.hour <<< 11))
end
defp dos_date(time) do
round(time.day + (time.month <<< 5) + ((time.year - 1980) <<< 9))
end
end
|
lib/zap/entry.ex
| 0.771026
| 0.444806
|
entry.ex
|
starcoder
|
defmodule DoIt.Option do
@moduledoc false
import DoIt.Helper, only: [validate_list_type: 2]
@option_types [:boolean, :count, :integer, :float, :string]
@type t :: %__MODULE__{
name: atom,
type: atom,
description: String.t(),
alias: atom,
default: String.t() | integer | float | boolean,
keep: boolean,
allowed_values: list
}
@enforce_keys [:name, :type, :description]
defstruct [:name, :type, :description, :alias, :default, :keep, :allowed_values]
def validate_definition(%DoIt.Option{} = option) do
option
|> validate_definition_name
|> validate_definition_type
|> validate_definition_description
|> validate_definition_alias
|> validate_definition_keep
|> validate_definition_allowed_values
|> validate_definition_default
end
def validate_definition_name(%DoIt.Option{name: nil}),
do: raise(DoIt.OptionDefinitionError, message: "name is required for option definition")
def validate_definition_name(%DoIt.Option{name: name} = option) when is_atom(name), do: option
def validate_definition_name(%DoIt.Option{name: _}),
do: raise(DoIt.OptionDefinitionError, message: "name must be an atom")
def validate_definition_type(%DoIt.Option{type: nil}),
do: raise(DoIt.OptionDefinitionError, message: "type is required for option definition")
def validate_definition_type(%DoIt.Option{type: type} = option) when type in @option_types,
do: option
def validate_definition_type(%DoIt.Option{type: type}),
do:
raise(DoIt.OptionDefinitionError,
message:
"unrecognized option type '#{type}', allowed types are #{
@option_types
|> Enum.map(&Atom.to_string/1)
|> Enum.join(", ")
}"
)
def validate_definition_description(%DoIt.Option{description: nil}),
do:
raise(DoIt.OptionDefinitionError, message: "description is required for option definition")
def validate_definition_description(%DoIt.Option{description: description} = option)
when is_binary(description),
do: option
def validate_definition_description(%DoIt.Option{description: _}),
do: raise(DoIt.OptionDefinitionError, message: "description must be a string")
def validate_definition_alias(%DoIt.Option{alias: nil} = option), do: option
def validate_definition_alias(%DoIt.Option{alias: alias} = option) when is_atom(alias),
do: option
def validate_definition_alias(%DoIt.Option{alias: _}),
do: raise(DoIt.OptionDefinitionError, message: "alias must be an atom")
def validate_definition_keep(%DoIt.Option{keep: nil} = option), do: option
def validate_definition_keep(%DoIt.Option{type: :count, keep: _}),
do: raise(DoIt.OptionDefinitionError, message: "keep cannot be used with type count")
def validate_definition_keep(%DoIt.Option{keep: keep} = option) when is_boolean(keep),
do: option
def validate_definition_keep(%DoIt.Option{keep: _}),
do: raise(DoIt.OptionDefinitionError, message: "keep must be a boolean")
def validate_definition_allowed_values(%DoIt.Option{allowed_values: nil} = option), do: option
def validate_definition_allowed_values(%DoIt.Option{type: type, allowed_values: _})
when type in [:boolean, :count],
do:
raise(DoIt.OptionDefinitionError,
message: "allowed_values cannot be used with types boolean and count"
)
def validate_definition_allowed_values(
%DoIt.Option{type: type, allowed_values: allowed_values} = option
)
when is_list(allowed_values) do
case validate_list_type(allowed_values, type) do
true ->
option
_ ->
raise DoIt.OptionDefinitionError,
message: "all values in allowed_values must be of type #{Atom.to_string(type)}"
end
end
def validate_definition_allowed_values(%DoIt.Option{allowed_values: _}),
do: raise(DoIt.OptionDefinitionError, message: "allowed_values must be a list")
def validate_definition_default(%DoIt.Option{default: nil} = option), do: option
def validate_definition_default(
%DoIt.Option{type: :string, default: default, allowed_values: nil} = option
)
when is_binary(default),
do: option
def validate_definition_default(
%DoIt.Option{type: :integer, default: default, allowed_values: nil} = option
)
when is_integer(default),
do: option
def validate_definition_default(
%DoIt.Option{type: :float, default: default, allowed_values: nil} = option
)
when is_float(default),
do: option
def validate_definition_default(
%DoIt.Option{type: :boolean, default: default, allowed_values: nil} = option
)
when is_boolean(default),
do: option
def validate_definition_default(
%DoIt.Option{type: :count, default: default, allowed_values: nil} = option
)
when is_integer(default),
do: option
def validate_definition_default(%DoIt.Option{type: type, default: _, allowed_values: nil}),
do:
raise(DoIt.OptionDefinitionError,
message: "default value must be of type #{Atom.to_string(type)}"
)
def validate_definition_default(
%DoIt.Option{default: default, allowed_values: allowed_values} = option
) do
case default in allowed_values do
true ->
option
_ ->
raise DoIt.OptionDefinitionError,
message: "default value must be included in allowed_values"
end
end
def parse_input(options, parsed) do
{
:ok,
options
|> default(parsed)
|> group()
}
end
def default(options, parsed) do
options
|> Enum.filter(&default_filter/1)
|> Enum.reduce(parsed, &default_map/2)
end
def default_filter(%DoIt.Option{default: nil}), do: false
def default_filter(%DoIt.Option{}), do: true
def default_map(%DoIt.Option{name: name, default: default}, parsed) do
case List.keyfind(parsed, name, 0) do
nil -> parsed ++ [{name, default}]
_ -> parsed
end
end
def group(parsed) do
Enum.reduce(parsed, [], fn {key, value}, acc ->
case List.keyfind(acc, key, 0) do
nil -> acc ++ [{key, value}]
{_, found} when is_list(found) -> List.keyreplace(acc, key, 0, {key, found ++ [value]})
{_, found} -> List.keyreplace(acc, key, 0, {key, [found] ++ [value]})
end
end)
end
def validate_input([], _), do: {:ok, []}
def validate_input(options, parsed) do
case parsed
|> Enum.map(fn
{key, value} ->
option = Enum.find(options, fn %DoIt.Option{name: name} -> name == key end)
{option, value}
|> validate_input_value()
|> validate_input_allowed_values()
end)
|> List.flatten()
|> Enum.map(fn {%DoIt.Option{name: name}, value} -> {name, value} end)
|> Enum.split_with(fn
{_, {:error, _}} -> false
_ -> true
end) do
{valid_options, []} ->
{:ok, valid_options}
{_, invalid_options} ->
{
:error,
Enum.map(invalid_options, fn {_, {:error, message}} -> message end)
}
end
end
def validate_input_value({_, {:error, _}} = error), do: error
def validate_input_value({%DoIt.Option{} = option, values}) when is_list(values) do
validate_input_value({option, values}, [])
end
def validate_input_value({%DoIt.Option{type: :integer} = option, value}) when is_integer(value),
do: {option, value}
def validate_input_value({%DoIt.Option{name: name, type: :integer} = option, value}) do
{option, String.to_integer(value)}
rescue
ArgumentError ->
{option, {:error, "invalid integer value '#{value}' for option --#{Atom.to_string(name)}"}}
end
def validate_input_value({%DoIt.Option{type: :float} = option, value}) when is_float(value),
do: {option, value}
def validate_input_value({%DoIt.Option{name: name, type: :float} = option, value}) do
{option, String.to_float(value)}
rescue
ArgumentError ->
{option, {:error, "invalid float value '#{value}' for option --#{Atom.to_string(name)}"}}
end
def validate_input_value({%DoIt.Option{} = option, value}) do
{option, value}
end
def validate_input_value({%DoIt.Option{} = option, [value | values]}, acc) do
case validate_input_value({option, value}) do
{%DoIt.Option{}, {:error, _}} = error ->
error
{%DoIt.Option{}, val} ->
validate_input_value({option, values}, acc ++ [val])
end
end
def validate_input_value({%DoIt.Option{} = option, []}, acc) do
{option, acc}
end
def validate_input_allowed_values({_, {:error, _}} = error), do: error
def validate_input_allowed_values({%DoIt.Option{allowed_values: nil} = option, value}) do
{option, value}
end
def validate_input_allowed_values({%DoIt.Option{} = option, values}) when is_list(values) do
validate_input_allowed_values({option, values}, [])
end
def validate_input_allowed_values(
{%DoIt.Option{name: name, allowed_values: allowed_values} = option, value}
) do
case Enum.find(allowed_values, fn allowed -> value == allowed end) do
nil ->
{option, {:error, "value '#{value}' isn't allowed for option --#{Atom.to_string(name)}"}}
_ ->
{option, value}
end
end
def validate_input_allowed_values({%DoIt.Option{} = option, [value | values]}, acc) do
case validate_input_allowed_values({option, value}) do
{%DoIt.Option{}, {:error, _}} = error ->
error
{%DoIt.Option{}, val} ->
validate_input_allowed_values({option, values}, acc ++ [val])
end
end
def validate_input_allowed_values({%DoIt.Option{} = option, []}, acc) do
{option, acc}
end
end
|
lib/do_it/option.ex
| 0.626924
| 0.427994
|
option.ex
|
starcoder
|
defmodule ExcellentMigrations.DangersDetector do
@moduledoc """
This module finds potentially dangerous or destructive database operations in a given
migration AST.
"""
alias ExcellentMigrations.{
AstParser,
ConfigCommentsParser,
DangersFilter
}
@type ast :: list | tuple | atom | String.t()
@type danger_type ::
:column_added_with_default
| :column_removed
| :column_renamed
| :column_type_changed
| :index_not_concurrently
| :many_columns_index
| :not_null_added
| :operation_delete
| :operation_insert
| :operation_update
| :raw_sql_executed
| :table_dropped
| :table_renamed
@type line :: integer
@doc """
Traverses `ast` and finds potentially dangerous database operations. Returns keyword list
containing danger types and lines where they were detected.
## Parameters
* `ast` is a structure that represents AST of database migration.
It can be obtained e.g. via `Code.string_to_quoted!/1`.
## Examples
iex> source_code = \"""
...> alter table("dumplings") do
...> remove(:taste, :string)
...> remove(:stuffing, :string)
...> end
...> \"""
iex> ast = Code.string_to_quoted!(source_code)
{:ok,
{:alter, [line: 1],
[
{:table, [line: 1], ["dumplings"]},
[
do: {:__block__, [],
[
{:remove, [line: 2], [:taste, :string]},
{:remove, [line: 3], [:stuffing, :string]}
]}
]
]}}
iex> ExcellentMigrations.DangersDetector.detect_dangers(ast, source_code)
[column_removed: 2, column_removed: 3]
"""
@spec detect_dangers(ast, String.t()) :: [{danger_type, line}]
def detect_dangers(ast, source_code) do
parsed_dangers = AstParser.parse(ast)
parsed_safety_assured = ConfigCommentsParser.parse(source_code)
skipped_types = Application.get_env(:excellent_migrations, :skip_checks, [])
DangersFilter.filter_dangers(parsed_dangers, parsed_safety_assured, skipped_types)
end
end
|
lib/dangers_detector.ex
| 0.848643
| 0.525856
|
dangers_detector.ex
|
starcoder
|
defimpl Timex.Convertable, for: Map do
alias Timex.Convertable
def to_gregorian(map), do: try_convert(map, &Convertable.to_gregorian/1)
def to_julian(map), do: try_convert(map, &Convertable.to_julian/1)
def to_gregorian_seconds(map), do: try_convert(map, &Convertable.to_gregorian_seconds/1)
def to_erlang_datetime(map), do: try_convert(map, &Convertable.to_erlang_datetime/1)
def to_date(map), do: try_convert(map, &Convertable.to_date/1)
def to_datetime(map), do: try_convert(map, &Convertable.to_datetime/1)
def to_unix(map), do: try_convert(map, &Convertable.to_unix/1)
def to_timestamp(map), do: try_convert(map, &Convertable.to_timestamp/1)
defp try_convert(%{"year" => _, "month" => _, "day" => _} = map, fun) do
case convert_keys(map) do
{:error, _} = err ->
err
datetime_map when is_map(datetime_map) ->
year = Map.get(datetime_map, :year)
month = Map.get(datetime_map, :month)
day = Map.get(datetime_map, :day)
case Map.get(datetime_map, :hour) do
nil ->
case Convertable.to_date({year, month, day}) do
{:error, _} = err -> err
date -> fun.(date)
end
hour ->
minute = Map.get(datetime_map, :minute, 0)
second = Map.get(datetime_map, :second, 0)
ms = Map.get(datetime_map, :millisecond, 0)
tz = Map.get(datetime_map, :timezone, "UTC")
case Timex.datetime({{year, month, day}, {hour, minute, second, ms}}, tz) do
{:error, _} = err -> err
datetime -> fun.(datetime)
end
end
end
end
def try_convert(_), do: {:error, :invalid_date}
@allowed_keys_atom [
:year, :month, :day,
:hour, :minute, :min, :mins, :second, :sec, :secs,
:milliseconds, :millisecond, :ms,
:timezone, :tz
]
@allowed_keys Enum.concat(@allowed_keys_atom, Enum.map(@allowed_keys_atom, &Atom.to_string/1))
@valid_keys_map %{
:min => :minute,
:mins => :minute,
:secs => :second,
:sec => :second,
:milliseconds => :millisecond,
:ms => :millisecond,
:tz => :timezone
}
def convert_keys(map) when is_map(map) do
Enum.reduce(map, %{}, fn
{_, _}, {:error, _} = err -> err
{k, v}, acc when k in @allowed_keys and is_atom(k) and is_integer(v) ->
case Map.get(@valid_keys_map, k) do
nil -> Map.put(acc, k, v)
vk -> Map.put(acc, vk, v)
end
{k, v}, acc when k in @allowed_keys and is_integer(v) ->
ak = String.to_atom(k)
case Map.get(@valid_keys_map, ak) do
nil -> Map.put(acc, ak, v)
vk -> Map.put(acc, vk, v)
end
{:tz, v}, acc ->
Map.put(acc, :timezone, v)
{:timezone, v}, acc ->
Map.put(acc, :timezone, v)
{k, v}, acc when k in @allowed_keys and is_atom(k) ->
case Integer.parse(v) do
{n, _} ->
case Map.get(@valid_keys_map, k) do
nil -> Map.put(acc, k, n)
vk -> Map.put(acc, vk, n)
end
:error ->
{:error, {:expected_integer, for: k, got: v}}
end
{k, v}, acc when k in @allowed_keys ->
case Integer.parse(v) do
{n, _} ->
ak = String.to_atom(k)
case Map.get(@valid_keys_map, ak) do
nil -> Map.put(acc, ak, n)
vk -> Map.put(acc, vk, n)
end
:error ->
{:error, {:expected_integer, for: k, got: v}}
end
{_, _}, acc -> acc
end)
end
end
|
lib/convert/map.ex
| 0.632389
| 0.67277
|
map.ex
|
starcoder
|
defmodule Timex.Date do
@moduledoc """
This module represents all functions specific to creating/manipulating/comparing Dates (year/month/day)
"""
defstruct calendar: :gregorian, day: 1, month: 1, year: 0
alias __MODULE__
alias Timex.DateTime
alias Timex.TimezoneInfo
alias Timex.Helpers
use Timex.Constants
import Timex.Macros
@type t :: %__MODULE__{}
@doc """
Returns today's date as a Date struct. If given a timezone, returns whatever "today" is in that timezone
"""
@spec today() :: Date.t | {:error, term}
@spec today(Types.valid_timezone) :: Date.t | {:error, term}
def today, do: now
def today(%TimezoneInfo{} = tz), do: now(tz)
def today(tz) when is_binary(tz), do: now(tz)
def today(tz) when tz in [:utc, :local], do: now(tz)
def today(_), do: {:error, :invalid_timezone}
@doc """
Returns today's date as a Date struct. If given a timezone, returns whatever "today" is in that timezone
"""
@spec now() :: Date.t | {:error, term}
@spec now(Types.valid_timezone) :: DateTime.t | {:error, term}
def now, do: from_erl(Helpers.calendar_universal_time())
def now(tz) when is_binary(tz), do: from(DateTime.now(tz))
def now(%TimezoneInfo{} = tz), do: from(DateTime.now(tz))
def now(tz) when tz in [:utc, :local], do: from(DateTime.now(tz))
def now(:days), do: to_days(now())
def now(_), do: {:error, :invalid_timezone}
@doc """
Returns a Date representing the first day of year zero
"""
@spec zero() :: Date.t
def zero, do: from_erl({0, 1, 1})
@doc """
Returns a Date representing the date of the UNIX epoch
"""
@spec epoch() :: DateTime.t
@spec epoch(:seconds) :: DateTime.t
def epoch, do: from_erl({1970, 1, 1})
def epoch(:seconds), do: to_seconds(from_erl({1970, 1, 1}), :zero)
def epoch(:secs) do
IO.write :stderr, "warning: :secs is a deprecated unit name, use :seconds instead\n"
epoch(:seconds)
end
@doc """
Converts from a date/time value to a Date struct representing that date
"""
@spec from(Types.valid_datetime | Types.dtz | Types.phoenix_datetime_select_params) :: Date.t | {:error, term}
# From Timex types
def from(%Date{} = date), do: date
def from(%DateTime{year: y, month: m, day: d}), do: %Date{year: y, month: m, day: d}
# From Erlang/Ecto datetime tuples
def from({y,m,d} = date) when is_date(y,m,d),
do: from_erl(date)
def from({{y,m,d} = date, {h,mm,s}}) when is_datetime(y,m,d,h,mm,s),
do: from_erl(date)
def from({{y,m,d} = date, {h,mm,s,ms}}) when is_datetime(y,m,d,h,mm,s,ms),
do: from_erl(date)
# Phoenix datetime select value
def from(%{"year" => _, "month" => _, "day" => _} = dt) do
validated = Enum.reduce(dt, %{}, fn
_, :error -> :error
{key, value}, acc ->
case Integer.parse(value) do
{v, _} -> Map.put(acc, key, v)
:error -> :error
end
end)
case validated do
%{"year" => y, "month" => m, "day" => d} ->
from({{y,m,d},{0,0,0}})
{:error, _} ->
{:error, :invalid}
end
end
def from(_), do: {:error, :invalid_date}
@doc """
WARNING: This is here to ease the migration to 2.x, but is deprecated.
Converts a value of the provided type to a Date struct, relative to the reference date (:epoch or :zero)
"""
def from(value, type, ref \\ :epoch)
defdeprecated from(ts, :timestamp, ref), "use Date.from_timestamp/1 instead",
do: from_timestamp(ts, ref)
defdeprecated from(n, :us, ref), "use Date.from_microseconds/1 instead",
do: from_microseconds(n, ref)
defdeprecated from(n, :msecs, ref), "use Date.from_milliseconds/1 instead",
do: from_milliseconds(n, ref)
defdeprecated from(n, :secs, ref), "use Date.from_seconds/1 instead",
do: from_seconds(n, ref)
defdeprecated from(n, :days, ref), "use Date.from_days/1 instead",
do: from_days(n, ref)
@doc """
Like from/1, but more explicit about it's inputs (Erlang date/datetime tuples only).
"""
def from_erl({y,m,d}) when is_date(y,m,d) do
case :calendar.valid_date({y,m,d}) do
true -> %Date{year: y, month: m, day: d}
false -> {:error, :invalid_date}
end
end
def from_erl({{y,m,d}, {_,_,_}}) when is_date(y,m,d), do: from_erl({y,m,d})
def from_erl({{y,m,d}, {_,_,_,_}}) when is_date(y,m,d), do: from_erl({y,m,d})
def from_erl(_), do: {:error, :invalid_date}
@doc """
Given an Erlang timestamp, converts it to a Date struct representing the date of that timestamp
"""
@spec from_timestamp(Types.timestamp, :epoch | :zero) :: Date.t | {:error, term}
def from_timestamp(timestamp, ref \\ :epoch)
def from_timestamp({mega,sec,micro} = timestamp, ref)
when is_date_timestamp(mega,sec,micro) and ref in [:epoch, :zero]
do
case ok!(DateTime.from_timestamp(timestamp, ref)) do
{:error, _} = err -> err
{:ok, datetime} -> from(datetime)
end
end
def from_timestamp(_, _), do: {:error, :badarg}
@doc """
Given an integer value representing days since the reference date (:epoch or :zero), returns
a Date struct representing that date
"""
@spec from_days(non_neg_integer, :epoch | :zero) :: Date.t | {:error, term}
def from_days(n, ref \\ :epoch)
def from_days(n, ref) when is_positive_number(n) and ref in [:epoch, :zero] do
case ok!(DateTime.from_days(trunc(n), ref)) do
{:error, _} = err -> err
{:ok, datetime} -> from(datetime)
end
end
def from_days(_, _), do: {:error, :badarg}
@doc """
Given an integer value representing seconds since the reference date (:epoch or :zero), returns
a Date struct representing that date
"""
@spec from_seconds(non_neg_integer, :epoch | :zero) :: Date.t | {:error, term}
def from_seconds(n, ref \\ :epoch)
def from_seconds(n, ref) when is_positive_number(n) and ref in [:epoch, :zero] do
case ok!(DateTime.from_seconds(trunc(n), ref)) do
{:error, _} = err -> err
{:ok, datetime} -> from(datetime)
end
end
def from_seconds(_, _), do: {:error, :badarg}
@doc """
Given an integer value representing milliseconds since the reference date (:epoch or :zero), returns
a Date struct representing that date
"""
@spec from_milliseconds(non_neg_integer, :epoch | :zero) :: Date.t | {:error, term}
def from_milliseconds(n, ref \\ :epoch)
def from_milliseconds(n, ref) when is_positive_number(n) and ref in [:epoch, :zero] do
case ok!(DateTime.from_milliseconds(trunc(n), ref)) do
{:error, _} = err -> err
{:ok, datetime} -> from(datetime)
end
end
def from_millisecond(_, _), do: {:error, :badarg}
@doc """
Given an integer value representing microseconds since the reference date (:epoch or :zero), returns
a Date struct representing that date
"""
@spec from_microseconds(non_neg_integer, :epoch | :zero) :: Date.t | {:error, term}
def from_microseconds(n, ref \\ :epoch)
def from_microseconds(n, ref) when is_positive_number(n) and ref in [:epoch, :zero] do
case ok!(DateTime.from_microseconds(trunc(n), ref)) do
{:error, _} = err -> err
{:ok, datetime} -> from(datetime)
end
end
def from_microseconds(_, _), do: {:error, :badarg}
@doc """
Convert a date to a timestamp value consumable by the Time module.
See also `diff/2` if you want to specify an arbitrary reference date.
## Examples
iex> #{__MODULE__}.epoch |> #{__MODULE__}.to_timestamp
{0,0,0}
"""
@spec to_timestamp(Date.t) :: Types.timestamp | {:error, term}
@spec to_timestamp(Date.t, :epoch | :zero) :: Types.timestamp | {:error, term}
def to_timestamp(date, ref \\ :epoch)
def to_timestamp(%Date{} = date, ref) when ref in [:epoch, :zero] do
case ok!(to_datetime(date)) do
{:error, _} = err -> err
{:ok, datetime} -> DateTime.to_timestamp(datetime, ref)
end
end
def to_timestamp(_, _), do: {:error, :badarg}
defdelegate to_secs(date), to: __MODULE__, as: :to_seconds
defdelegate to_secs(date, ref), to: __MODULE__, as: :to_seconds
@doc """
Convert a date to an integer number of seconds since Epoch or year 0.
See also `Timex.diff/3` if you want to specify an arbitrary reference date.
## Examples
iex> Timex.date({1999, 1, 2}) |> #{__MODULE__}.to_seconds
915235200
"""
@spec to_seconds(Date.t) :: integer | {:error, term}
@spec to_seconds(Date.t, :epoch | :zero) :: integer | {:error, term}
def to_seconds(date, ref \\ :epoch)
def to_seconds(%Date{} = date, ref) when ref in [:epoch, :zero] do
case ok!(to_datetime(date)) do
{:error, _} = err -> err
{:ok, datetime} -> DateTime.to_seconds(datetime, ref)
end
end
def to_seconds(_, _), do: {:error, :badarg}
@doc """
Convert the date to an integer number of days since Epoch or year 0.
See also `Timex.diff/3` if you want to specify an arbitray reference date.
## Examples
iex> Timex.date({1970, 1, 15}) |> #{__MODULE__}.to_days
14
"""
@spec to_days(Date.t) :: integer | {:error, term}
@spec to_days(Date.t, :epoch | :zero) :: integer | {:error, term}
def to_days(date, ref \\ :epoch)
def to_days(date, ref) when ref in [:epoch, :zero] do
case ok!(to_datetime(date)) do
{:error, _} = err -> err
{:ok, datetime} -> DateTime.to_days(datetime, ref)
end
end
def to_days(_, _), do: {:error, :badarg}
@doc """
Converts a Date to a DateTime in UTC
"""
@spec to_datetime(Date.t) :: DateTime.t | {:error, term}
def to_datetime(%DateTime{} = dt), do: dt
def to_datetime(%Date{:year => y, :month => m, :day => d}) do
%DateTime{:year => y, :month => m, :day => d, :timezone => %TimezoneInfo{}}
end
def to_datetime(_), do: {:error, :badarg}
@doc """
See docs for Timex.set/2 for details.
"""
@spec set(Date.t, list({atom(), term})) :: Date.t | {:error, term}
def set(%Date{} = date, options) do
validate? = case options |> List.keyfind(:validate, 0, true) do
{:validate, bool} -> bool
_ -> true
end
Enum.reduce(options, date, fn
_option, {:error, _} = err ->
err
option, %Date{} = result ->
case option do
{:validate, _} -> result
{:datetime, {{y, m, d}, {_, _, _}}} ->
if validate? do
%{result |
:year => Timex.normalize(:year, y),
:month => Timex.normalize(:month, m),
:day => Timex.normalize(:day, {y,m,d}),
}
else
%{result | :year => y, :month => m, :day => d}
end
{:date, {y, m, d}} ->
if validate? do
{yn,mn,dn} = Timex.normalize(:date, {y,m,d})
%{result | :year => yn, :month => mn, :day => dn}
else
%{result | :year => y, :month => m, :day => d}
end
{:day, d} ->
if validate? do
%{result | :day => Timex.normalize(:day, {result.year, result.month, d})}
else
%{result | :day => d}
end
{name, val} when name in [:year, :month] ->
if validate? do
Map.put(result, name, Timex.normalize(name, val))
else
Map.put(result, name, val)
end
{:ms, _} ->
IO.write :stderr, "warning: using :ms with shift is deprecated, use :millisecond instead"
result
{name, _} when name in [:time, :timezone, :hour, :minute, :second] ->
result
{option_name, _} ->
{:error, {:invalid_option, option_name}}
end
end)
end
@doc """
See docs for `Timex.compare/3`
"""
defdelegate compare(a, b), to: Timex.Comparable
defdelegate compare(a, b, granularity), to: Timex.Comparable
@doc """
See docs for `Timex.diff/3`
"""
defdelegate diff(a, b), to: Timex.Comparable
defdelegate diff(a, b, granularity), to: Timex.Comparable
@doc """
Shifts the given Date based on the provided options.
See Timex.shift/2 for more information.
"""
@spec shift(Date.t, list({atom(), term})) :: Date.t | {:error, term}
def shift(%Date{} = date, [{_, 0}]), do: date
def shift(%Date{} = date, [timestamp: {0,0,0}]), do: date
def shift(%Date{} = date, options) do
allowed_options = Enum.filter(options, fn
{:hours, value} when value >= 24 or value <= -24 -> true
{:hours, _} -> false
{:mins, value} when value >= 24*60 or value <= -24*60 ->
IO.write :stderr, "warning: :mins is a deprecated unit name, use :minutes instead"
true
{:mins, _} ->
IO.write :stderr, "warning: :mins is a deprecated unit name, use :minutes instead"
false
{:minutes, value} when value >= 24*60 or value <= -24*60 -> true
{:minutes, _} -> false
{:secs, value} when value >= 24*60*60 or value <= -24*60*60 ->
IO.write :stderr, "warning: :secs is a deprecated unit name, use :seconds instead"
true
{:secs, _} ->
IO.write :stderr, "warning: :secs is a deprecated unit name, use :seconds instead"
false
{:seconds, value} when value >= 24*60*60 or value <= -24*60*60 -> true
{:seconds, _} -> false
{:msecs, value} when value >= 24*60*60*1000 or value <= -24*60*60*1000 ->
IO.write :stderr, "warning: :msecs is a deprecated unit name, use :milliseconds instead"
true
{:msecs, _} ->
IO.write :stderr, "warning: :msecs is a deprecated unit name, use :milliseconds instead"
false
{:milliseconds, value} when value >= 24*60*60*1000 or value <= -24*60*60*1000 -> true
{:milliseconds, _} -> false
{_type, _value} -> true
end)
case DateTime.shift(to_datetime(date), allowed_options) do
{:error, _} = err -> err
datetime -> from(datetime)
end
end
def shift(_, _), do: {:error, :badarg}
end
|
lib/date/date.ex
| 0.906118
| 0.564279
|
date.ex
|
starcoder
|
defmodule AdventOfCode.Day14 do
@typep pair :: {byte(), byte()}
@typep pairs :: Map.t(pair(), integer())
@typep rule :: {pair(), {pair(), pair()}}
@typep rules :: Map.t(pair(), {pair(), pair()})
@spec part1([binary()]) :: integer()
@spec part2([binary()]) :: integer()
def part1(args), do: parse_args(args) |> run_for(10) |> frequency_difference()
def part2(args), do: parse_args(args) |> run_for(40) |> frequency_difference()
@spec run_for({pairs(), rules()}, integer()) :: pairs()
defp run_for({pairs, _}, 0), do: pairs
defp run_for({pairs, rules}, count) when count > 0 do
new_pairs = expand_pairs(pairs, rules)
run_for({new_pairs, rules}, count - 1)
end
@spec expand_pairs(pairs(), rules()) :: pairs()
defp expand_pairs(pairs, rules) do
Enum.reduce(pairs, Map.new(), fn {pair, count}, map ->
{pair_a, pair_b} = Map.get(rules, pair)
map |> inc_map_count(pair_a, count) |> inc_map_count(pair_b, count)
end)
end
@spec frequency_difference(pairs()) :: integer()
defp frequency_difference(pairs) do
Enum.reduce(pairs, Map.new(), fn {{_, pair_b}, count}, map ->
# Add only one half of the pair to avoid duplicate characters
# Adding pair_b works for some reason, pair_a always is always lacking by 1
inc_map_count(map, pair_b, count)
end)
|> Enum.map(&elem(&1, 1))
|> Enum.min_max()
|> Kernel.then(fn {min, max} -> max - min end)
end
@spec inc_map_count(pairs(), pair(), integer()) :: pairs()
defp inc_map_count(map, key, count), do: Map.update(map, key, count, &(&1 + count))
# The heavy lifting is done here, the template and rules are compiled
# into frequencies and the two new pairs directly given by an existing pair
@spec parse_args([binary()]) :: {pairs(), rules()}
defp parse_args(args) do
{[template], [_ | rules]} = Enum.split(args, 1)
pairs = compile_template(template)
rules = Enum.map(rules, &parse_rule/1) |> Map.new()
{pairs, rules}
end
@spec compile_template(binary()) :: Map.t(pair(), integer())
defp compile_template(template) do
String.to_charlist(template)
|> Enum.chunk_every(2, 1, :discard)
|> Enum.map(&List.to_tuple/1)
|> Enum.frequencies()
|> Map.new()
end
@spec parse_rule(binary()) :: rule()
defp parse_rule(<<left, right, _::binary-size(4), into>>),
do: {{left, right}, {{left, into}, {into, right}}}
end
|
lib/advent_of_code/day_14.ex
| 0.859487
| 0.417717
|
day_14.ex
|
starcoder
|
defmodule Hologram.Template.EmbeddedExpressionParser do
alias Hologram.Compiler.{Context, Transformer}
alias Hologram.Compiler.Parser, as: CompilerParser
alias Hologram.Template.{TokenHTMLEncoder, Tokenizer}
alias Hologram.Template.VDOM.{Expression, TextNode}
@doc """
Splits a string which may contain embedded expressions into a list of expression nodes and text nodes.
"""
def parse(str, %Context{} = context) do
acc = %{
nodes: [],
num_open_braces: 0,
prev_tokens: [],
token_buffer: []
}
Tokenizer.tokenize(str)
|> assemble_nodes(:text, acc, context)
|> Map.get(:nodes)
end
# status is one of: :text, :expression
defp assemble_nodes(tokens, status, acc, context)
defp assemble_nodes([], :text, acc, _) do
maybe_add_text_node(acc)
end
# DEFER: implement this case (raise an error)
# defp assemble_nodes([], :expression, acc, _)
defp assemble_nodes([{:symbol, :"{"} = token | rest], :text, acc, context) do
acc =
acc
|> maybe_add_text_node()
|> increment_num_open_braces()
|> add_prev_token(token)
assemble_nodes(rest, :expression, acc, context)
end
defp assemble_nodes([{:symbol, :"{"} = token | rest], :expression, acc, context) do
acc
|> increment_num_open_braces()
|> assemble_node_part(token, rest, :expression, context)
end
defp assemble_nodes(
[{:symbol, :"}"} = token | rest],
:expression,
%{num_open_braces: 1} = acc,
context
) do
acc =
acc
|> maybe_add_expression_part(context)
|> decrement_num_open_braces()
|> add_prev_token(token)
assemble_nodes(rest, :text, acc, context)
end
defp assemble_nodes([{:symbol, :"}"} = token | rest], :expression, acc, context) do
acc
|> decrement_num_open_braces()
|> assemble_node_part(token, rest, :expression, context)
end
defp assemble_nodes([token | rest], :text, acc, context) do
assemble_node_part(acc, token, rest, :text, context)
end
defp assemble_nodes([token | rest], :expression, acc, context) do
assemble_node_part(acc, token, rest, :expression, context)
end
defp add_prev_token(acc, token) do
%{acc | prev_tokens: acc.prev_tokens ++ [token]}
end
defp assemble_node_part(acc, token, rest, type, context) do
acc = acc |> buffer_token(token) |> add_prev_token(token)
assemble_nodes(rest, type, acc, context)
end
defp buffer_token(acc, token) do
%{acc | token_buffer: acc.token_buffer ++ [token]}
end
defp decrement_num_open_braces(acc) do
%{acc | num_open_braces: acc.num_open_braces - 1}
end
defp flush_token_buffer(acc) do
tokens = acc.token_buffer
acc = %{acc | token_buffer: []}
{tokens, acc}
end
defp get_ir(code, context) do
CompilerParser.parse!("{#{code}}")
|> Transformer.transform(context)
end
defp increment_num_open_braces(acc) do
%{acc | num_open_braces: acc.num_open_braces + 1}
end
defp maybe_add_expression_part(acc, context) do
{tokens, acc} = flush_token_buffer(acc)
if Enum.any?(tokens) do
code = TokenHTMLEncoder.encode(tokens)
node = %Expression{ir: get_ir(code, context)}
%{acc | nodes: acc.nodes ++ [node]}
else
acc
end
end
defp maybe_add_text_node(acc) do
{tokens, acc} = flush_token_buffer(acc)
if Enum.any?(tokens) do
node = %TextNode{content: TokenHTMLEncoder.encode(tokens)}
%{acc | nodes: acc.nodes ++ [node]}
else
acc
end
end
end
|
lib/hologram/template/embedded_expression_parser.ex
| 0.650245
| 0.427516
|
embedded_expression_parser.ex
|
starcoder
|
defmodule SimpleCipher do
@alphabet "abcdefghijklmnopqrstuvwxyz" |> String.graphemes()
@alphabet_size @alphabet |> length
for key_char <- @alphabet do
shifted_alphabet =
Stream.cycle(@alphabet)
|> Stream.drop_while(&(&1 != key_char))
|> Enum.take(@alphabet_size)
for {plain, cipher} <- Enum.zip(@alphabet, shifted_alphabet) do
defp encode_char(unquote(plain), unquote(key_char)), do: unquote(cipher)
defp decode_char(unquote(cipher), unquote(key_char)), do: unquote(plain)
end
end
defp encode_char(plain, _), do: plain
defp decode_char(cipher, _), do: cipher
defp encode_char({plain, key}), do: encode_char(plain, key)
defp decode_char({cipher, key}), do: decode_char(cipher, key)
@doc """
Given a `plaintext` and `key`, encode each character of the `plaintext` by
shifting it by the corresponding letter in the alphabet shifted by the number
of letters represented by the `key` character, repeating the `key` if it is
shorter than the `plaintext`.
For example, for the letter 'd', the alphabet is rotated to become:
defghijklmnopqrstuvwxyzabc
You would encode the `plaintext` by taking the current letter and mapping it
to the letter in the same position in this rotated alphabet.
abcdefghijklmnopqrstuvwxyz
defghijklmnopqrstuvwxyzabc
"a" becomes "d", "t" becomes "w", etc...
Each letter in the `plaintext` will be encoded with the alphabet of the `key`
character in the same position. If the `key` is shorter than the `plaintext`,
repeat the `key`.
Example:
plaintext = "testing"
key = "abc"
The key should repeat to become the same length as the text, becoming
"abcabca". If the key is longer than the text, only use as many letters of it
as are necessary.
"""
def encode(plaintext, key) do
convert_keystream(plaintext, key, &encode_char/1)
end
@doc """
Given a `ciphertext` and `key`, decode each character of the `ciphertext` by
finding the corresponding letter in the alphabet shifted by the number of
letters represented by the `key` character, repeating the `key` if it is
shorter than the `ciphertext`.
The same rules for key length and shifted alphabets apply as in `encode/2`,
but you will go the opposite way, so "d" becomes "a", "w" becomes "t",
etc..., depending on how much you shift the alphabet.
"""
def decode(ciphertext, key) do
convert_keystream(ciphertext, key, &decode_char/1)
end
defp convert_keystream(text, key, converter) do
keystream = key |> String.graphemes() |> Stream.cycle()
text
|> String.graphemes()
|> Enum.zip(keystream)
|> Enum.map_join(converter)
end
@doc """
Generate a random key of a given length. It should contain lowercase letters only.
"""
def generate_key(length) do
for <<byte <- :crypto.strong_rand_bytes(length)>> do
delta = rem(byte, ?z - ?a + 1)
<<?a + delta>>
end
|> Enum.join()
end
end
|
exercises/practice/simple-cipher/.meta/example.ex
| 0.853699
| 0.589805
|
example.ex
|
starcoder
|
defmodule Component.Builder do
@moduledoc """
Conveniences for building components.
This module can be `use`-d into a module in order to build a component
pipeline:
defmodule MyApp do
use Component.Builder
component Component.Logger, some: :option
component :a_component_function
component SomethingElse
end
Multiple components can be defined with the `component/2` macro, forming a
pipeline. The components in the pipeline will be executed in the order
they've been added through the `component/2` macro.
## Component behaviour
Internally, `Component.Builder` implements the `Component` behaviour, which
means both the `call/2` and `respond/2` functions are defined.
In the example above,
calling `MyApp.call/2` will in turn call the following functions in order:
- `Component.Logger.call/2`
- `a_component_function/2`
- `SomethingElse.call/2`
- `SomethingElse.respond/2`
- `Component.Logger.respond/2`
- `MyApp.respond/2`
`respond/2` can be overriden if there is something you'd like to do with the
final conn object before returning it. This should be avoided in favor of a
component module which implements the `respond/2` function.
"""
@type component :: module
@doc false
defmacro __using__(opts) do
quote do
use Component
@component_builder_opts unquote(opts)
def call(conn, opts), do: component_builder_call(conn, opts)
defoverridable [call: 2]
import Component.Builder, only: [component: 1, component: 2]
Module.register_attribute(__MODULE__, :components, accumulate: true)
@before_compile Component.Builder
end
end
@doc false
defmacro __before_compile__(env) do
components = Module.get_attribute(env.module, :components)
builder_opts = Module.get_attribute(env.module, :component_builder_opts)
{conn, body} = Component.Builder.compile(env, components, builder_opts)
quote do
defp component_builder_call(unquote(conn), _opts), do: unquote(body)
end
end
@doc """
A macro that stores a new component. `opts` will be passed unchanged to the
component's `call/2` and `respond/2` functions.
"""
defmacro component(component, opts \\ [])
defmacro component(component, opts) do
quote do
@components {unquote(component), unquote(opts), true}
end
end
@doc """
Compiles a component pipeline.
Each element of the component pipeline (according to the type signature of
this function) has the form:
{component_name, options, guards}
Note that this function expects a reversed pipeline (with the last component that
has to be called coming first in the pipeline).
The function returns a tuple with the first element being a quoted reference
to the connection and the second element being the compiled quoted pipeline.
## Examples
Component.Builder.compile(env, [
{Component.Logger, [], true}, # no guards
{GuardedComponent, [], quote(do: a when is_binary(a))}
], [])
"""
@spec compile(
Macro.Env.t,
[{component, Component.opts, Macro.t}],
Keyword.t
) :: {Macro.t, Macro.t}
def compile(env, pipeline, builder_opts) do
conn = quote do: conn
pipeline = Enum.map(pipeline, &init_component(&1))
compiled_pipeline = Enum.reduce(
Enum.reverse(pipeline),
conn,
"e_component_calls(&1, &2, env, builder_opts)
)
compiled_pipeline = Enum.reduce(
pipeline,
compiled_pipeline,
"e_component_responds(&1, &2, env, builder_opts)
)
{conn, compiled_pipeline}
end
# private
defp compile_guards(call, true) do
call
end
defp compile_guards(call, guards) do
quote do
case true do
true when unquote(guards) -> unquote(call)
true -> conn
end
end
end
# Initializes the options of a component at compile time.
defp init_component({component, opts, guards}) do
case Atom.to_charlist(component) do
~c"Elixir." ++ _ ->
init_module_component(component, opts, guards)
_ ->
init_fun_component(component, opts, guards)
end
end
defp init_fun_component(component, opts, guards) do
{:function, component, opts, guards}
end
defp init_module_component(component, opts, guards) do
initialized_opts = component.init(opts)
Enum.each([:call, :respond], fn(fun) ->
if !function_exported?(component, fun, 2) do
raise ArgumentError,
message: "#{inspect component} component must implement #{fun}/2"
end
end)
{:module, component, initialized_opts, guards}
end
defp quote_component_call(:function, component, opts) do
quote do: unquote(component)(conn, unquote(Macro.escape(opts)))
end
defp quote_component_call(:module, component, opts) do
quote do: unquote(component).call(conn, unquote(Macro.escape(opts)))
end
defp quote_component_calls(
{component_type, component, opts, guards},
acc,
_env,
_builder_opts
) do
call = quote_component_call(component_type, component, opts)
wrap_existing_ast(call, guards, acc)
end
defp quote_component_responds(
{:function, _component, _opts, _guards},
acc,
_env,
_builder_opts
) do
acc
end
defp quote_component_responds(
{:module, component, opts, guards},
acc,
_env,
_builder_opts
) do
call =
quote do
unquote(component).respond(conn, unquote(Macro.escape(opts)))
end
wrap_existing_ast(call, guards, acc)
end
defp wrap_existing_ast(call, guards, acc) do
{fun, meta, [_arg1, opts]} = quote do: unquote(compile_guards(call, guards))
{fun, [generated: true] ++ meta, [acc, opts]}
end
end
|
lib/component/builder.ex
| 0.866429
| 0.589657
|
builder.ex
|
starcoder
|
defmodule Markright.Collectors.OgpTwitter do
@moduledoc ~S"""
Collector that basically builds the Open Graph Protocol and Twitter Card.
Typical usage:
```elixir
defmodule Sample do
use Markright.Collector, collectors: Markright.Collectors.OgpTwitter
def on_ast(%Markright.Continuation{ast: {tag, _, _}} = cont), do: tag
end
```
## Examples
```xml
<meta name="twitter:card" content="summary" />
<meta property="og:type" content="object" />
<meta name="twitter:image:src" property="og:image" content="https://avatars2..." />
<meta name="twitter:site" property="og:site_name" content="@github" />
<meta name="twitter:title" property="og:title" content="TITLE" />
<meta name="twitter:description" property="og:description" content="DESCRIPTION" />
```
"""
@behaviour Markright.Collector
def on_ast(%Markright.Continuation{ast: ast} = _cont, acc) do
case ast do
{:img, %{src: href}, _} -> Keyword.put_new(acc, :image, href)
{:figure, %{}, [{:img, %{src: href}, _} | _]} -> Keyword.put_new(acc, :image, href)
{:p, _, text} -> Keyword.put_new(acc, :description, text)
{:h1, _, text} -> Keyword.put_new(acc, :title, text)
{:h2, _, text} -> Keyword.put_new(acc, :title2, text)
_ -> acc
end
end
def afterwards(acc, opts) do
title = acc[:title] || acc[:title2]
description = acc[:description]
image = acc[:image]
[
{:meta, %{name: "twitter:card", content: "summary"}, nil},
{:meta, %{property: "og:type", content: "object"}, nil},
{:meta, %{name: "twitter:image:src", property: "og:image", content: image}, nil},
{:meta, %{name: "twitter:site", property: "og:site_name", content: opts[:site] || "★★★"},
nil},
{:meta, %{name: "twitter:title", property: "og:title", content: title}, nil},
{:meta, %{name: "twitter:description", property: "og:description", content: description},
nil}
]
end
end
|
lib/markright/collectors/ogp_twitter.ex
| 0.86609
| 0.792946
|
ogp_twitter.ex
|
starcoder
|
defmodule Zaryn.Crypto.Ed25519 do
@moduledoc false
alias __MODULE__.LibSodiumPort
@doc """
Generate an Ed25519 key pair
"""
@spec generate_keypair(binary()) :: {binary(), binary()}
def generate_keypair(seed) when is_binary(seed) and byte_size(seed) < 32 do
seed = :crypto.hash(:sha256, seed)
do_generate_keypair(seed)
end
def generate_keypair(seed) when is_binary(seed) and byte_size(seed) > 32 do
do_generate_keypair(:binary.part(seed, 0, 32))
end
def generate_keypair(seed) when is_binary(seed) do
do_generate_keypair(seed)
end
defp do_generate_keypair(seed) do
:crypto.generate_key(
:eddsa,
:ed25519,
seed
)
end
@doc """
Convert a ed25519 public key into a x25519
"""
@spec convert_to_x25519_public_key(binary()) :: binary()
def convert_to_x25519_public_key(ed25519_public_key) do
{:ok, x25519_pub} = LibSodiumPort.convert_public_key_to_x25519(ed25519_public_key)
x25519_pub
end
@doc """
Convert a ed25519 secret key into a x25519
"""
@spec convert_to_x25519_private_key(binary()) :: binary()
def convert_to_x25519_private_key(ed25519_private_key) do
{pub, pv} = generate_keypair(ed25519_private_key)
extended_secret_key = <<pv::binary, pub::binary>>
{:ok, x25519_pv} = LibSodiumPort.convert_secret_key_to_x25519(extended_secret_key)
x25519_pv
end
@doc """
Sign a message with the given Ed25519 private key
"""
@spec sign(binary(), iodata()) :: binary()
def sign(_key = <<private_key::binary-32>>, data) when is_binary(data) or is_list(data) do
:crypto.sign(:eddsa, :sha512, data, [private_key, :ed25519])
end
@doc """
Verify if a given Ed25519 public key matches the signature among with its data
"""
@spec verify?(binary(), binary(), binary()) :: boolean()
def verify?(<<public_key::binary-32>>, data, sig)
when (is_binary(data) or is_list(data)) and is_binary(sig) do
:crypto.verify(:eddsa, :sha512, data, sig, [public_key, :ed25519])
end
end
|
lib/zaryn/crypto/ed25519.ex
| 0.728652
| 0.411879
|
ed25519.ex
|
starcoder
|
defmodule APIacAuthMTLS do
@behaviour Plug
@behaviour APIac.Authenticator
@moduledoc """
An `APIac.Authenticator` plug implementing **section 2** of
OAuth 2.0 Mutual-TLS Client Authentication and Certificate-Bound Access Tokens
([RFC8705](https://tools.ietf.org/html/rfc8705))
Using this scheme, authentication is performed thanks to 2 elements:
- TLS client certificate authentication
- the `client_id` parameter of the `application/x-www-form-urlencoded` body
TLS client certificate authentication may be performed thanks to two methods:
- authentication with a certificate issued by a Certificate Authority (CA) which is called
[PKI Mutual-TLS Method](https://tools.ietf.org/html/rfc8705#section-2.1).
In this case, one of the following certificate attribute is checked against
this attribute registered for the `client_id`:
- Distinguished name
- SAN DNS
- SAN URI
- SAN IP address
- SAN email
- authentication with a self-signed, self-issued certificate which is called
[Self-Signed Certificate Mutual-TLS Method](https://tools.ietf.org/html/rfc8705#section-2.2).
In this case, the certificate is checked against the **subject public key info**
of the registered certificates of the `client_id`
## Plug options
- `:allowed_methods`: one of `:pki`, `:selfsigned` or `:both`. No default value,
mandatory option
- `:pki_callback`: a
`(String.t -> String.t | {tls_client_auth_subject_value(), String.t()} | nil)`
function that takes the `client_id` as a parameter and returns its DN as a `String.t()` or
`{tls_client_auth_subject_value(), String.t()}` or `nil` if no DN is registered for
that client. When no `t:tls_client_auth_subject_value/0` is specified, defaults to
`:tls_client_auth_subject_dn`
- `:selfsigned_callback`: a `(String.t -> binary() | [binary()] | nil)`
function that takes the `client_id` as a parameter and returns the certificate
or the list of the certificate for `the client_id`, or `nil` if no certificate
is registered for that client. Certificates can be returned in DER-encoded format, or
native OTP certificate structure
- `:cert_data_origin`: origin of the peer cert data. Can be set to:
- `:native`: the peer certificate data is retrieved from the connection. Only works when
this plug is used at the TLS termination endpoint. This is the *default value*
- `{:header_param, "Header-Name"}`: the peer certificate data, and more specifically the
parameter upon which the decision is to be made, is retrieved from an HTTP header. When
using this feature, **make sure** that this header is filtered by a n upstream system
(reverse-proxy...) so that malicious users cannot inject the value themselves. For instance,
the configuration could be set to: `{:header_param, "SSL_CLIENT_DN"}`. If there are several
values for the parameter (for instance several `dNSName`), they must be sent in
separate headers. Not compatible with self-signed certiticate authentication
- `:header_cert`: the whole certificate is forwarded in the `"Client-Cert"` HTTP header
as a Base64 encoded value of the certificate's DER serialization, in conformance with
[Client-Cert HTTP Header: Conveying Client Certificate Information from TLS Terminating Reverse Proxies to Origin Server Applications (draft-bdc-something-something-certificate-01)](https://tools.ietf.org/html/draft-bdc-something-something-certificate-01)
- `{:header_cert, "Header-Name"}`: the whole certificate is forwarded in the
"Header-Name" HTTP header as a Base64 encoded value of the certificate's DER serialization
- `{:header_cert_pem, "Header-Name"}`: the whole certificate is forwarded in the
"Header-Name" as a PEM-encoded string and retrieved by this plug
- `:set_error_response`: function called when authentication failed. Defaults to
`APIacAuthMTLS.send_error_response/3`
- `:error_response_verbosity`: one of `:debug`, `:normal` or `:minimal`.
Defaults to `:normal`
## Example
```elixir
plug APIacAuthMTLS, allowed_methods: :both,
selfsigned_callback: &selfsigned_certs/1,
pki_callback: &get_dn/1
# further
defp selfsigned_certs(client_id) do
:ets.lookup_element(:clients, :client_id, 5)
end
defp get_dn("client-1") do
"/C=US/ST=ARI/L=Chicago/O=Agora/CN=API access certificate"
end
defp get_dn(_), do: nil
```
## Configuring TLS for `#{inspect(__MODULE__)}` authentication
Plugs can authenticate requests on elements contained in the HTTP request.
Mutual TLS authentication, however, occurs on the TLS layer and the authentication
context is only then passed to the plug (`peer_data`).
Usually, when using TLS, only the server is authenticated by the client. But client
authentication by the server can also be activated on an TLS-enabled server:
in this case, both the server and the clients authenticate to each other.
Client authentication can either be optional or mandatory.
When a TLS-enabled server authenticates a client, it checks the client's certificate
against its list of known certificate authorities (CA). CAs are trusted root certificates.
The list of CAs can be changed through configuration.
Note that by default, the Erlang TLS stack does not accept self-signed certificate.
All TLS options are documented in the
[Erlang SSL module documentation](http://erlang.org/doc/man/ssl.html).
### Enabling TLS client authentication
This table summarizes which options are to be activated *on the server*:
| Use-case | TLS options |
|-------------------------------------|---------------------------------------------------------|
| No client authentication (default) | (no specific option to set) |
| Optional client authentication | -`verify: :verify_peer` |
| Mandatory client authentication | -`verify: :verify_peer`<br>- `fail_if_no_peer_cert: true` |
### Example with `plug_cowboy`
To enable optional TLS client authentication:
```elixir
Plug.Cowboy.https(MyPlug, [],
port: 8443,
keyfile: "priv/ssl/key.pem",
certfile: "priv/ssl/cer.pem",
verify: :verify_peer)
```
To enable mandatory TLS client authentication:
```elixir
Plug.Cowboy.https(MyPlug, [],
port: 8443,
keyfile: "priv/ssl/key.pem",
certfile: "priv/ssl/cer.pem",
verify: :verify_peer,
fail_if_no_peer_cert: true)
```
### Allowing TLS connection of clients with self-signed certificates
By default, Erlang's TLS stack rejects self-signed client certificates. To allow it,
use the `verify_fun` TLS parameter with the following function:
```elixir
defp verify_fun_selfsigned_cert(_, {:bad_cert, :selfsigned_peer}, user_state),
do: {:valid, user_state}
defp verify_fun_selfsigned_cert(_, {:bad_cert, _} = reason, _),
do: {:fail, reason}
defp verify_fun_selfsigned_cert(_, {:extension, _}, user_state),
do: {:unkown, user_state}
defp verify_fun_selfsigned_cert(_, :valid, user_state),
do: {:valid, user_state}
defp verify_fun_selfsigned_cert(_, :valid_peer, user_state),
do: {:valid, user_state}
```
Example with `plug_cowboy`:
```elixir
Plug.Cowboy.https(MyPlug, [],
port: 8443,
keyfile: "priv/ssl/key.pem",
certfile: "priv/ssl/cer.pem",
verify: :verify_peer,
verify_fun: {&verify_fun_selfsigned_cert/3, []})
```
## Security considerations
In addition to the security considerations listed in the RFC, consider that:
- Before TLS1.3, client authentication may leak information
([further information](https://blog.funkthat.com/2018/10/tls-client-authentication-leaks-user.html))
- Any CA can signe for any DN (as for any other certificate attribute). Though this is
a well-known security limitation of the X509 infrastructure, issuing certificate with
rogue DNs may be more difficult to detect (because less monitored)
## Other considerations
When activating TLS client authentication, be aware that some browser user
interfaces may prompt the user, in a unpredictable manner, for certificate
selection. You may want to consider starting a TLS-authentication-enabled
endpoint on another port (i.e. one port for web browsing, another one for
API access).
"""
@type tls_client_auth_subject_value ::
:tls_client_auth_subject_dn
| :tls_client_auth_san_dns
| :tls_client_auth_san_uri
| :tls_client_auth_san_ip
| :tls_client_auth_san_email
@impl Plug
def init(opts) do
if is_nil(opts[:allowed_methods]), do: raise(":allowed_methods mandatory option not set")
case {opts[:allowed_methods], opts[:pki_callback], opts[:selfsigned_callback]} do
{method, pki_callback, _}
when method in [:pki, :both] and not is_function(pki_callback, 1) ->
raise "Missing :pki_callback option"
{method, _, selfsigned_callback}
when method in [:selfsigned, :both] and not is_function(selfsigned_callback, 1) ->
raise "Missing :selfsigned_callback option"
_ ->
:ok
end
opts
|> Enum.into(%{})
|> Map.put_new(:cert_data_origin, :native)
|> Map.put_new(:set_error_response, &APIacAuthMTLS.send_error_response/3)
|> Map.put_new(:error_response_verbosity, :normal)
end
@impl Plug
@spec call(Plug.Conn.t(), Plug.opts()) :: Plug.Conn.t()
def call(conn, %{} = opts) do
if APIac.authenticated?(conn) do
conn
else
do_call(conn, opts)
end
end
defp do_call(conn, opts) do
with {:ok, conn, credentials} <- extract_credentials(conn, opts),
{:ok, conn} <- validate_credentials(conn, credentials, opts) do
conn
else
{:error, conn, %APIac.Authenticator.Unauthorized{} = error} ->
opts[:set_error_response].(conn, error, opts)
end
end
@doc """
`APIac.Authenticator` credential extractor callback
The returned credentials is a `{String.t, binary}` tuple where:
- the first parameter is the `client_id`
- the second parameter is the raw DER-encoded certificate
"""
@impl APIac.Authenticator
def extract_credentials(conn, opts) do
with {:ok, conn, client_id} <- get_client_id(conn),
{:ok, creds} <- do_extract_credentials(conn, opts) do
{:ok, conn, {client_id, creds}}
else
{:error, conn, reason} ->
{:error, conn,
%APIac.Authenticator.Unauthorized{authenticator: __MODULE__, reason: reason}}
end
end
defp get_client_id(%Plug.Conn{body_params: %Plug.Conn.Unfetched{}} = conn) do
plug_parser_opts =
Plug.Parsers.init(
parsers: [:urlencoded],
pass: ["application/x-www-form-urlencoded"]
)
conn
|> Plug.Parsers.call(plug_parser_opts)
|> get_client_id()
rescue
UnsupportedMediaTypeError ->
{:error, conn, :unsupported_media_type}
end
defp get_client_id(conn) do
client_id = conn.body_params["client_id"]
cond do
client_id == nil ->
{:error, conn, :credentials_not_found}
OAuth2Utils.valid_client_id_param?(client_id) == false ->
{:error, conn, :invalid_client_id}
true ->
{:ok, conn, client_id}
end
end
defp do_extract_credentials(conn, %{cert_data_origin: :native}) do
case Plug.Conn.get_peer_data(conn)[:ssl_cert] do
nil ->
{:error, conn, :no_client_cert_authentication}
raw_tls_cert ->
{:ok, X509.Certificate.from_der!(raw_tls_cert)}
end
end
defp do_extract_credentials(conn, %{cert_data_origin: {:header_param, header_name}}) do
case Plug.Conn.get_req_header(conn, header_name) do
[] ->
{:error, conn, :no_header_value}
headers ->
{:ok, headers}
end
end
defp do_extract_credentials(conn, %{cert_data_origin: :header_cert}) do
do_extract_credentials(conn, %{cert_data_origin: {:header_cert, "Client-Cert"}})
end
defp do_extract_credentials(conn, %{cert_data_origin: {:header_cert, header_name}}) do
case Plug.Conn.get_req_header(conn, String.downcase(header_name)) do
[] ->
{:error, conn, :no_cert_header_value}
[b64_der_cert] ->
with {:ok, der_cert} <- Base.decode64(b64_der_cert),
{:ok, cert} <- X509.Certificate.from_der(der_cert)
do
{:ok, cert}
else
:error ->
{:error, :invalid_b64_encoding_der_cert}
{:error, _} ->
{:error, :invalid_der_cert}
end
[_ | _] ->
{:error, conn, :multiple_certs_in_header}
end
end
defp do_extract_credentials(conn, %{cert_data_origin: {:header_cert_pem, header_name}}) do
case Plug.Conn.get_req_header(conn, String.downcase(header_name)) do
[] ->
{:error, conn, :no_cert_header_value}
[pem_cert] ->
case X509.Certificate.from_pem(pem_cert) do
{:ok, cert} ->
{:ok, cert}
{:error, _} ->
{:error, :invalid_pem_cert}
end
[_ | _] ->
{:error, conn, :multiple_certs_in_header}
end
end
@doc """
`APIac.Authenticator` credential validator callback
The credentials parameter must be an `%X509.Certificate{}` struct
"""
@impl APIac.Authenticator
def validate_credentials(conn, {client_id, {:OTPCertificate, _, _, _} = cert}, opts) do
# technically a root CA certificate is also self-signed, however it
# 1- is absolutely unlikely it would be used for that
# 2- wouldn't have the same public key info, so couldn't impersonate
# another client
# TODO: confirm these assumptions
if :public_key.pkix_is_self_signed(cert) and
opts[:allowed_methods] in [:selfsigned, :both] do
validate_self_signed_cert(conn, client_id, cert, opts)
else
if opts[:allowed_methods] in [:pki, :both] do
validate_pki_cert(conn, client_id, cert, opts)
else
{:error, conn,
%APIac.Authenticator.Unauthorized{
authenticator: __MODULE__,
reason: :no_method_provided
}}
end
end
end
def validate_credentials(_conn, {_client_id, header_values}, %{allowed_methods: :selfsigned})
when is_list(header_values)
do
raise ~s({:header_param, "Header-Name} is unsupported for self-signed certificates)
end
def validate_credentials(conn, {client_id, header_values}, opts) when is_list(header_values) do
case opts[:pki_callback].(client_id) do
{_tls_client_auth_subject_value_parameter, expected} ->
expected in header_values
expected when is_binary(expected) ->
expected in header_values
nil ->
false
end
|> if do
conn =
conn
|> Plug.Conn.put_private(:apiac_authenticator, __MODULE__)
|> Plug.Conn.put_private(:apiac_client, client_id)
{:ok, conn}
else
{:error, conn,
%APIac.Authenticator.Unauthorized{authenticator: __MODULE__, reason: :pki_no_match}}
end
end
defp validate_self_signed_cert(conn, client_id, cert, opts) do
peer_cert_subject_public_key= X509.Certificate.public_key(cert)
registered_certs =
case opts[:selfsigned_callback].(client_id) do
nil ->
[]
[_ | _] = certs ->
certs
cert ->
[cert]
end
public_key_info_match =
Enum.any?(
registered_certs,
fn registered_cert ->
case registered_cert do
{:OTPCertificate, _, _, _} ->
registered_cert
der_cert when is_binary(der_cert) ->
X509.Certificate.from_der!(der_cert)
end
|> X509.Certificate.public_key() == peer_cert_subject_public_key
end
)
if public_key_info_match do
conn =
conn
|> Plug.Conn.put_private(:apiac_authenticator, __MODULE__)
|> Plug.Conn.put_private(:apiac_client, client_id)
{:ok, conn}
else
{:error, conn,
%APIac.Authenticator.Unauthorized{
authenticator: __MODULE__,
reason: :selfsigned_no_cert_match
}}
end
rescue
_ -> {:error, conn, %APIac.Authenticator.Unauthorized{
authenticator: __MODULE__,
reason: :unknown
}}
end
defp validate_pki_cert(conn, client_id, cert, opts) do
case opts[:pki_callback].(client_id) do
{tls_client_auth_subject_value_parameter, parameter_value} ->
do_validate_pki_cert(tls_client_auth_subject_value_parameter, cert, parameter_value)
parameter_value when is_binary(parameter_value) ->
do_validate_pki_cert(:tls_client_auth_subject_dn, cert, parameter_value)
_ ->
false
end
|> if do
conn =
conn
|> Plug.Conn.put_private(:apiac_authenticator, __MODULE__)
|> Plug.Conn.put_private(:apiac_client, client_id)
{:ok, conn}
else
{:error, conn,
%APIac.Authenticator.Unauthorized{authenticator: __MODULE__, reason: :pki_no_match}}
end
end
defp do_validate_pki_cert(:tls_client_auth_subject_dn, cert, dn) do
# FIXME: is comparing string serialization of this DNs ok on a security
# point of view? Or shall we compare the raw SDNs?
# See further https://tools.ietf.org/html/rfc5280#section-7.1
cert
|> X509.Certificate.subject()
|> X509.RDNSequence.to_string() == dn
end
defp do_validate_pki_cert(tls_client_auth_subject_value_parameter, cert, param_value) do
san_key =
case tls_client_auth_subject_value_parameter do
:tls_client_auth_san_dns -> :dNSName
:tls_client_auth_san_uri -> :uniformResourceIdentifier
:tls_client_auth_san_ip -> :iPAddress
:tls_client_auth_san_email -> :rfc822Name
end
cert
|> X509.Certificate.extension(:subject_alt_name)
|> case do
nil ->
false
{:Extension, {2, 5, 29, 17}, false, values} ->
values
|> Enum.filter(fn {k, _v} -> k == san_key end)
|> Enum.any?(fn {_k, v} -> param_value == List.to_string(v) end)
end
end
@doc """
Implementation of the `APIac.Authenticator` callback
## Verbosity
The following elements in the HTTP response are set depending on the value
of the `:error_response_verbosity` option:
| Error response verbosity | HTTP Status | Headers | Body |
|:-------------------------:|--------------------|---------|---------------------------------------------------------|
| `:debug` | Unauthorized (401) | | `APIac.Authenticator.Unauthorized` exception's message |
| `:normal` | Unauthorized (401) | | |
| `:minimal` | Unauthorized (401) | | |
"""
@impl APIac.Authenticator
def send_error_response(conn, _error, %{:error_response_verbosity => error_response_verbosity})
when error_response_verbosity in [:normal, :minimal] do
conn
|> Plug.Conn.send_resp(:unauthorized, "")
|> Plug.Conn.halt()
end
def send_error_response(conn, error, %{:error_response_verbosity => :debug}) do
conn
|> Plug.Conn.send_resp(:unauthorized, Exception.message(error))
|> Plug.Conn.halt()
end
@doc """
Saves failure in a `Plug.Conn.t()`'s private field and returns the `conn`
See the `APIac.AuthFailureResponseData` module for more information.
"""
@spec save_authentication_failure_response(Plug.Conn.t(),
%APIac.Authenticator.Unauthorized{},
any()) :: Plug.Conn.t()
def save_authentication_failure_response(conn, error, opts) do
failure_response_data =
%APIac.AuthFailureResponseData{
module: __MODULE__,
reason: error.reason,
www_authenticate_header: nil,
status_code: :unauthorized,
body:
if opts[:error_response_verbosity] in [:normal, :minimal] do
nil
else
Exception.message(error)
end
}
APIac.AuthFailureResponseData.put(conn, failure_response_data)
end
end
|
lib/apiac_auth_mtls.ex
| 0.925894
| 0.805594
|
apiac_auth_mtls.ex
|
starcoder
|
defmodule Crux.Gateway.Registry.Generator do
@moduledoc false
# Generates via_ and lookup_ functions for different types of processes.
# Yes, I am overdoing it.
defmacro __using__(_opts) do
quote do
require unquote(__MODULE__)
import unquote(__MODULE__)
@spec _lookup(atom(), term()) :: pid() | :error
def _lookup(registry, key) do
case Registry.lookup(registry, key) do
[{pid, _value}] when is_pid(pid) ->
if Process.alive?(pid) do
pid
else
:error
end
[] ->
:error
end
end
end
end
@doc ~S"""
Defines two functions:
- `via_` to register a process using this name
- `lookup_` to lookup a process using this name
### Examples
```elixir
registered_name :identify
# results in...
def via_identify(registry), do: {:via, Elixir.Registry, {registry, {:identify}}}
def lookup_identify(registry), do: _lookup(registry, {:identify})
registered_name :connection, [:shard_id, :shard_count]
# results in...
def via_connection(registry, shard_id, shard_count) do
{:via, Elixir.Registry, {registry, {:connection, shard_id, shard_count}}}
end
def lookup_connection(registry, shard_id, shard_count) do
_lookup(registry, {:connection, shard_id, shard_count})
end
```
"""
defmacro registered_name(name, args \\ []) do
args = Enum.map(args, &Macro.var(&1, nil))
quote do
@doc """
Return a :via tuple to register a #{unquote(name)} process.
"""
def unquote(String.to_atom("via_" <> to_string(name)))(registry, unquote_splicing(args)) do
key = {unquote(name), unquote_splicing(args)}
{:via, Elixir.Registry, {registry, key}}
end
@doc """
Lookup a #{unquote(name)} process.
"""
def unquote(String.to_atom("lookup_" <> to_string(name)))(registry, unquote_splicing(args)) do
key = {unquote(name), unquote_splicing(args)}
_lookup(registry, key)
end
end
end
end
|
lib/gateway/registry/generator.ex
| 0.818737
| 0.668366
|
generator.ex
|
starcoder
|
defmodule GrovePi.Board do
@moduledoc """
Low-level interface for sending raw requests and receiving responses from a
GrovePi hat. Automatically started with GrovePi, allows you to use one of the other GrovePi
modules for interacting with a connected sensor, light, or actuator.
To check that your GrovePi hardware is working, try this:
```elixir
iex> GrovePi.Board.firmware_version()
"1.2.2"
```
"""
use GenServer
@i2c Application.get_env(:grovepi, :i2c, Circuits.I2C)
@i2c_retry_count 2
defstruct address: nil, i2c_bus: nil
## Client API
@spec start_link(byte, atom) :: {:ok, pid} | {:error, any}
def start_link(address, prefix, opts \\ []) when is_integer(address) do
opts = Keyword.put_new(opts, :name, i2c_name(prefix))
state = %__MODULE__{address: address}
GenServer.start_link(__MODULE__, state, opts)
end
def i2c_name(prefix) do
String.to_atom("#{prefix}.#{__MODULE__}")
end
@doc """
Get the version of firmware running on the GrovePi's microcontroller.
"""
@spec firmware_version(atom) :: binary | {:error, term}
def firmware_version(prefix \\ Default) do
with :ok <- send_request(prefix, <<8, 0, 0, 0>>),
<<_, major, minor, patch>> <- get_response(prefix, 4),
do: "#{major}.#{minor}.#{patch}"
end
@doc """
Send a request to the GrovePi. This is not normally called directly
except when interacting with an unsupported sensor.
"""
@spec send_request(GenServer.server(), binary) :: :ok | {:error, term}
def send_request(prefix, message) when byte_size(message) == 4 do
GenServer.call(i2c_name(prefix), {:write, message})
end
def send_request(message) do
send_request(Default, message)
end
@doc """
Get a response to a previously send request to the GrovePi. This is
not normally called directly.
"""
@spec get_response(atom, integer) :: binary | {:error, term}
def get_response(prefix, bytes_to_read) do
GenServer.call(i2c_name(prefix), {:read, bytes_to_read})
end
@spec get_response(integer) :: binary | {:error, term}
def get_response(bytes_to_read) do
get_response(Default, bytes_to_read)
end
@doc """
Write directly to a device on the I2C bus. This is used for sensors
that are not controlled by the GrovePi's microcontroller.
"""
def i2c_write_device(address, message) do
GenServer.call(i2c_name(Default), {:write_device, address, message})
end
#### test helper functions
def add_responses(board, messages) do
GenServer.call(board, {:add_responses, messages})
end
def add_response(board, message) do
add_responses(board, [message])
end
def get_last_write(board) do
GenServer.call(board, {:get_last_write})
end
def get_last_write_data(board) do
GenServer.call(board, {:get_last_write_data})
end
def get_all_writes(board) do
GenServer.call(board, {:get_all_writes})
end
def get_all_data(board) do
GenServer.call(board, {:get_all_data})
end
def reset(board) do
GenServer.call(board, :reset)
end
## Server Callbacks
@impl true
def init(state) do
{:ok, state, {:continue, :open_i2c}}
end
@impl true
def handle_continue(:open_i2c, state) do
{:ok, ref} = @i2c.open("i2c-1")
{:noreply, %{state | i2c_bus: ref}}
end
@impl true
def handle_call({:write, message}, _from, state) do
reply = @i2c.write(state.i2c_bus, state.address, message, retries: @i2c_retry_count)
{:reply, reply, state}
end
@impl true
def handle_call({:write_device, address, message}, _from, state) do
reply = @i2c.write(state.i2c_bus, address, message, retries: @i2c_retry_count)
{:reply, reply, state}
end
@impl true
def handle_call({:read, bytes_to_read}, _from, state) do
reply =
case(@i2c.read(state.i2c_bus, state.address, bytes_to_read, retries: @i2c_retry_count)) do
{:ok, response} -> response
{:error, error} -> {:error, error}
end
{:reply, reply, state}
end
### test helper callbacks
def handle_call({:get_last_write}, _from, state) do
{:reply, @i2c.get_last_write(state.i2c_bus), state}
end
def handle_call({:get_last_write_data}, _from, state) do
{:reply, @i2c.get_last_write_data(state.i2c_bus), state}
end
def handle_call({:get_all_writes}, _from, state) do
{:reply, @i2c.get_all_writes(state.i2c_bus), state}
end
def handle_call({:get_all_data}, _from, state) do
{:reply, @i2c.get_all_data(state.i2c_bus), state}
end
def handle_call({:add_responses, responses}, _from, state) do
{:reply, @i2c.add_responses(state.i2c_bus, responses), state}
end
def handle_call(:reset, _from, state) do
{:reply, @i2c.reset(state.i2c_bus), state}
end
end
|
lib/grovepi/board.ex
| 0.824885
| 0.693719
|
board.ex
|
starcoder
|
defmodule Membrane.VideoCutter do
@moduledoc """
Membrane element that cuts raw video.
The element expects each frame to be received in a separate buffer, so the parser
(`Membrane.Element.RawVideo.Parser`) may be required in a pipeline before
the encoder (e.g. when input is read from `Membrane.File.Source`).
To use this element, specify the desired intervals in the `intervals` options -
`VideoCutter` will "filter" out all frames with timestamps outside of them.
"""
use Membrane.Filter
alias Membrane.Buffer
alias Membrane.Caps.Video.Raw
def_options intervals: [
spec: [{Membrane.Time.t(), Membrane.Time.t() | :infinity}],
default: [{0, :infinity}],
description: """
List of intervals of timestamps. The buffer is forwarded when its timestamp belongs to any of the given intervals.
The start of the interval is inclusive and the end is exclusive.
For example, to cut part starting from 190 ms up to 1530 ms out of the video,
the `intervals` should be set to `[{0, Membrane.Time.miliseconds(190)}, {Membrane.Time.miliseconds(1530), :infinity}]`.
"""
],
offset: [
spec: Membrane.Time.t(),
default: 0,
description: """
Offset applied to all cut frames' presentation timestamp (PTS) values. It allows to logically shift the video to express its real starting point.
For example, if there are two streams and the second one begins two seconds after the first one,
video cutter that processes the second stream should apply a 2sec offset. Offset is applied after cutting phase.
"""
]
def_output_pad :output,
caps: {Raw, aligned: true}
def_input_pad :input,
caps: {Raw, aligned: true},
demand_unit: :buffers
@impl true
def handle_init(opts) do
{:ok, opts}
end
@impl true
def handle_demand(:output, size, :buffers, _ctx, state) do
{{:ok, demand: {:input, size}}, state}
end
@impl true
def handle_prepared_to_playing(_ctx, state) do
{:ok, state}
end
@impl true
def handle_process(_pad, %Buffer{pts: nil}, _ctx, _state),
do: raise("Cannot cut stream without pts")
@impl true
def handle_process(:input, buffer, _ctx, state) do
actions =
if within_any_interval?(buffer.pts, state.intervals),
do: [buffer: {:output, [apply_offset(buffer, state.offset)]}],
else: [redemand: :output]
{{:ok, actions}, state}
end
@impl true
def handle_end_of_stream(:input, _ctx, state) do
{{:ok, end_of_stream: :output, notify: {:end_of_stream, :input}}, state}
end
defp apply_offset(%Buffer{pts: pts} = buffer, offset) do
%Buffer{buffer | pts: pts + offset}
end
defp within_any_interval?(timestamp, intervals) do
Enum.any?(intervals, fn
{:infinity, _any} ->
false
{start, :infinity} ->
timestamp >= start
{start, stop} ->
timestamp >= start and timestamp < stop
end)
end
end
|
lib/video_cutter.ex
| 0.908161
| 0.57943
|
video_cutter.ex
|
starcoder
|
defmodule Day3 do
defmodule Extent do
defstruct start_x: 0, start_y: 0, min_x: 0, min_y: 0, max_x: 0, max_y: 0
end
defmodule Point do
defstruct x: 0, y: 0
end
@no_intersection 9_999_999_999_999_999
@type path() :: [Extent.t()]
@spec points_in(Day3.Extent.t()) :: [Day3.Point.t()]
defp points_in(extent) do
if extent.max_x > extent.min_x do
for x <- extent.min_x..extent.max_x, do: %Point{x: x, y: extent.min_y}
else
if extent.max_y > extent.min_y do
for y <- extent.min_y..extent.max_y, do: %Point{x: extent.min_x, y: y}
else
[%Point{x: extent.min_x, y: extent.min_y}]
end
end
end
@spec segment(Day3.Point.t(), String.t()) :: {Day3.Point.t(), Day3.Extent.t()}
def segment(start, seg) do
[dir | num] = to_charlist(seg)
count = String.to_integer(to_string(num))
finish =
case dir do
?U -> %Point{x: start.x, y: start.y + count}
?D -> %Point{x: start.x, y: start.y - count}
?L -> %Point{x: start.x - count, y: start.y}
?R -> %Point{x: start.x + count, y: start.y}
end
{finish,
%Extent{
start_x: start.x,
start_y: start.y,
min_x: min(start.x, finish.x),
min_y: min(start.y, finish.y),
max_x: max(start.x, finish.x),
max_y: max(start.y, finish.y)
}}
end
@spec segments([String.t()], Day3.Point.t()) :: [Day3.Extent.t()]
def segments(wire, start) do
case wire do
[] ->
[]
[seg1 | rest] ->
{end_pt, extent} = segment(start, seg1)
[extent | segments(rest, end_pt)]
end
end
@spec crossings(Extent.t(), Extent.t()) :: [Point.t()]
def crossings(extent1, extent2) do
if extent1.min_x <= extent2.max_x && extent2.min_x <= extent1.max_x &&
extent1.min_y <= extent2.max_y && extent2.min_y <= extent1.max_y do
overlap = %Extent{
min_x: max(extent1.min_x, extent2.min_x),
min_y: max(extent1.min_y, extent2.min_y),
max_x: min(extent1.max_x, extent2.max_x),
max_y: min(extent1.max_y, extent2.max_y)
}
points_in(overlap)
else
[]
end
end
@doc """
List of points where a particular extent intersects the path
"""
@spec intersects(Day3.Extent.t(), path()) :: [Point.t()]
def intersects(ext, path) do
List.flatten(for ext2 <- path, do: crossings(ext, ext2))
end
@doc """
List of points where one path intersects annother
"""
@spec intersections(path(), path()) :: [Point.t()]
def intersections(path_1, path_2) do
List.flatten(for ext1 <- path_1, do: intersects(ext1, path_2))
end
@doc """
Turns a wire definiton, e.g. ["R5", "U13", "L10"] into a path (list of extents)
"""
@spec path(String.t()) :: path()
def path(wire) do
segments(String.split(wire, ","), %Point{})
end
@spec paths_and_crossings(String.t(), String.t()) :: {path(), path(), [Day3.Point.t()]}
defp paths_and_crossings(wire_1, wire_2) do
path_1 = path(wire_1)
path_2 = path(wire_2)
{path_1, path_2, intersections(path_1, path_2)}
end
@spec closest([Day3.Point.t()], (Day3.Point.t() -> integer)) :: integer
defp closest(crossings, measure) do
Enum.reduce(crossings, @no_intersection, fn p, acc ->
case p do
%Point{x: 0, y: 0} -> acc
_ -> min(measure.(p), acc)
end
end)
end
@doc """
Finds the crossings between two wires and calculates the minimal Manhattan Distance to one from the origin
"""
@spec manhattan_distance(String.t(), String.t()) :: integer
def manhattan_distance(wire_1, wire_2) do
{_, _, crossings} = paths_and_crossings(wire_1, wire_2)
closest(crossings, fn p -> abs(p.x) + abs(p.y) end)
end
@spec is_vertical(Day3.Extent.t()) :: boolean
defp is_vertical(extent), do: extent.min_x == extent.max_x
@spec is_on(Day3.Point.t(), Day3.Extent.t()) :: boolean
defp is_on(point, extent) do
if is_vertical(extent) do
point.x == extent.min_x and point.y >= extent.min_y and point.y <= extent.max_y
else
point.y == extent.min_y and point.x >= extent.min_x and point.x <= extent.max_x
end
end
@spec steps_to(Day3.Point.t(), Day3.Extent.t()) :: integer
defp steps_to(point, extent) do
if is_vertical(extent) do
abs(point.y - extent.start_y)
else
abs(point.x - extent.start_x)
end
end
@spec length_of(Day3.Extent.t()) :: integer
defp length_of(extent) do
if is_vertical(extent) do
abs(extent.max_y - extent.min_y)
else
abs(extent.max_x - extent.min_x)
end
end
@spec distance_along(Day3.Point.t(), [Day3.Extent.t()]) :: integer
defp distance_along(point, path) do
case path do
[] ->
@no_intersection
[first_extent | rest] ->
cond do
is_on(point, first_extent) -> steps_to(point, first_extent)
true -> length_of(first_extent) + distance_along(point, rest)
end
end
end
@doc """
Finds the crossings between two wires and calculates the minimal path distance
along each wire to a crossing
"""
@spec timing_distance(String.t(), String.t()) :: integer()
def timing_distance(wire_1, wire_2) do
{path_1, path_2, crossings} = paths_and_crossings(wire_1, wire_2)
timing_measure = fn p ->
distance_along(p, path_1) + distance_along(p, path_2)
end
closest(crossings, timing_measure)
end
@spec read_lines(String.t()) :: {String.t(), String.t()}
defp read_lines(file_name) do
[line_1, line_2] =
File.read!(file_name)
|> String.trim()
|> String.split("\n")
{line_1, line_2}
end
@spec part1(String.t()) :: integer()
def part1(file_name) do
{line_1, line_2} = read_lines(file_name)
manhattan_distance(line_1, line_2)
end
@spec part2(String.t()) :: integer()
def part2(file_name) do
{line_1, line_2} = read_lines(file_name)
timing_distance(line_1, line_2)
end
end
|
lib/day3.ex
| 0.798933
| 0.700271
|
day3.ex
|
starcoder
|
defmodule Confx do
@moduledoc "README.md"
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
@doc """
Returns the configuration specified in the given file
## Options
The accepted options are:
* `defaults`: If some key could not be found, the default will be assumed
"""
@spec load(term(), Keyword.t()) ::
{:ok, map()}
| {:error, :file_not_found}
| {:error, :file_format_not_found}
| {:error, atom()}
def load(path, opts \\ []) do
defaults = opts[:defaults] || []
with {:ok, format} <- file_format(path),
{:ok, content} <- read_file(path),
{:ok, config} <- parse_file(content, format) do
defaults = keyword_to_map(defaults)
conf =
config
|> keys_to_atom()
|> then(&merge(defaults, &1))
{:ok, conf}
else
{:error, %{}} ->
{:error, :parsing_error}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Same as `load/2` but returns the config map directly, or raises an exception if
an error is returned
"""
@spec load!(term(), Keyword.t()) :: map()
def load!(path, opts \\ []) do
case load(path, opts) do
{:ok, config} ->
config
{:error, :file_format_not_found} ->
raise Confx.FileFormatNotFoundError, path
{:error, :file_not_found} ->
raise Confx.FileNotFoundError, path
end
end
defp parse_file(content, format) do
case format do
:json ->
Jason.decode(content)
:yaml ->
YamlElixir.read_from_string(content)
end
end
defp file_format(path) do
case Path.extname(path) do
".json" ->
{:ok, :json}
ext when ext in [".yml", ".yaml"] ->
{:ok, :yaml}
_ ->
{:error, :file_format_not_found}
end
end
defp read_file(path) do
case File.read(path) do
{:ok, content} ->
{:ok, content}
{:error, _} ->
{:error, :file_not_found}
end
end
defp merge(left, right) do
Map.merge(left, right, &merge_resolve/3)
end
defp merge_resolve(_key, %{} = left, %{} = right), do: merge(left, right)
defp merge_resolve(_key, _left, right), do: right
defp keys_to_atom(str_key_map) when is_map(str_key_map) do
for {key, val} <- str_key_map, into: %{}, do: {String.to_atom(key), keys_to_atom(val)}
end
defp keys_to_atom(value), do: value
defp keyword_to_map([]), do: %{}
defp keyword_to_map(keyword_list) when is_list(keyword_list) do
for {key, val} <- keyword_list, into: %{}, do: {key, keyword_to_map(val)}
end
defp keyword_to_map(val), do: val
end
|
lib/confx.ex
| 0.795181
| 0.410047
|
confx.ex
|
starcoder
|
defmodule Pipe do
@moduledoc """
A [conduit](http://hackage.haskell.org/package/conduit) like pipe system for Elixir.
See the [README](README.html) for high level documentation.
"""
use Monad
@typedoc "A pipe which hasn't started running yet"
@type t :: Source.t | Conduit.t | Sink.t
@typedoc "The result of stepping a pipe"
@type step :: NeedInput.t | HaveOutput.t | Done.t
@typedoc "A source or conduit, helper type for pipe functions"
@type sourceish :: Source.t | Conduit.t
# The step field may be strict or lazy depending on what works best.
# Unneccesary lazyness costs performance but too little lazyness can cause
# results to be computed before they are needed.
defrecord Source, step: nil do
@moduledoc """
A source pipe that hasn't yet started.
The `step` field should contain step or a a nullary function that returns a
step (depending on whether you want the step to be computed immediately or
when needed).
"""
record_type step: Pipe.step | (() -> Pipe.step)
end
defrecord Conduit, step: nil do
@moduledoc """
A conduit pipe that hasn't yet started.
The `step` field should contain step or a a nullary function that returns a
step (depending on whether you want the step to be computed immediately or
when needed).
"""
record_type step: Pipe.step | (() -> Pipe.step)
end
defrecord Sink, step: nil do
@moduledoc """
A sink pipe that hasn't yet started.
The `step` field should contain step or a a nullary function that returns a
step (depending on whether you want the step to be computed immediately or
when needed).
"""
record_type step: Pipe.step | (() -> Pipe.step)
end
defrecord NeedInput, on_value: nil, on_done: nil do
@moduledoc """
A pipe that needs more input.
The `on_value` field should contain a function that given an element of
input returns a new pipe.
The `on_done` field should contain a function that given the result of the
upstream pipe returns a new pipe.
"""
record_type on_value: (any -> Pipe.t), on_done: (any -> Pipe.t)
end
defrecord HaveOutput, value: nil, next: nil do
@moduledoc """
A pipe that has output.
The `value` field should contain a single piece of output.
The `next` field should contain a nullary function which when evaluated
returns a new pipe.
"""
record_type value: any, next: (() -> Pipe.t)
end
defrecord Done, result: nil, leftovers: [] do
@moduledoc """
A pipe that is done.
The `result` field should contain the result of the pipe.
The `leftovers` field should contain any unused input items.
"""
record_type result: any, leftovers: [any]
end
defrecord RegisterCleanup, func: nil, next: nil do
@moduledoc """
A pipe that wants to register a cleanup function.
Cleanup functions get run when the complete pipe has finished running.
The `func` field should contain a nullary function which should be safe
to call multiple times.
The `next` field should contain a nullary function which when evaluated
returns a new pipe.
"""
record_type func: (() -> none), next: (() -> Pipe.t)
end
defexception Invalid, message: nil do
@moduledoc """
Indicates that a pipe is invalid when run.
Usually this is because the user didn't properly compose pipes.
"""
end
## Helper macros
# Force the computation of a nullary function or otherwise return the value.
defmacrop force(p) do
quote location: :keep do
if is_function(unquote(p)), do: unquote(p).(), else: unquote(p)
end
end
## Connecting and running pipes (horizontal composition)
@doc """
Allow the use of a list for connecting pipes.
This simply reduces the list using `connect/2`.
Note that connecting a source to a sink runs a pipe.
See `connect/2` for more information.
## Examples
iex> Pipe.connect [Pipe.yield(1), Pipe.await]
[1]
"""
def connect(pipes) when is_list(pipes), do: Enum.reduce(pipes, &connect(&2, &1))
@doc """
Connect two pipes.
Note that this is a function while `connect/1` is a macro.
Connecting a source to a conduit results in a conduit.
Connecting a conduit to a conduit results in a conduit.
Connecting a conduit to a sink results in a sink.
Connecting a source to a sink runs the pipe.
Connecting `nil` to anything results in the second argument being returned.
Any other combination results in Pipe.Invalid being thrown.
"""
@spec connect(Source.t, Conduit.t) :: Source.t
@spec connect(Conduit.t, Conduit.t) :: Conduit.t
@spec connect(Conduit.t, Sink.t) :: Sink.t
@spec connect(Source.t, Sink.t) :: any()
@spec connect(nil, t) :: t
def connect(a, b)
def connect(Source[step: as], Conduit[step: bs]) do
Source[step: fn -> step(force(as), force(bs)) end]
end
def connect(Conduit[step: as], Conduit[step: bs]) do
Conduit[step: fn -> step(force(as), force(bs)) end]
end
def connect(Conduit[step: as], Sink[step: bs]) do
Sink[step: fn -> step(force(as), force(bs)) end]
end
def connect(Source[step: as], Sink[step: bs]) do
run(step(force(as), force(bs)))
end
def connect(nil, b) do
b
end
def connect(a, b) do
raise Invalid, message: "Invalid connect: #{inspect a} -> #{inspect b}"
end
# Run a fully composed pipe.
@spec run(step) :: any()
defp run(NeedInput[on_done: od]) do
run(od.(nil))
end
defp run(HaveOutput[]) do
raise Invalid, message: "Fully composed pipes shouldn't provide output"
end
defp run(Done[result: r]) do
r
end
defp run(RegisterCleanup[func: f, next: n]) do
# If 3 resources are registered the run function will be on the stack 3
# times, registering too many resources would therefore not be a too
# brilliant idea. However I doubt this will be a problem in practice.
try do
run(n.())
after
f.()
end
end
# Perform a step or as much steps as possible.
@spec step(step, step) :: step | Done.t
defp step(NeedInput[on_value: ov, on_done: od], b) do
NeedInput[on_value: &step(ov.(&1), b), on_done: &step(od.(&1), b)]
end
defp step(HaveOutput[value: v, next: n], NeedInput[on_value: ov]) do
# Ensure the downstream gets stepped first. Not sure if it's needed but it
# shouldn't hurt performance.
new_b = ov.(v)
step(n.(), new_b)
end
defp step(a = Done[result: r], NeedInput[on_done: od]) do
step(a, od.(r))
end
defp step(a, HaveOutput[value: v, next: n]) do
HaveOutput[value: v, next: fn -> step(a, n.()) end]
end
defp step(_, Done[result: r]) do
Done[result: r]
end
defp step(RegisterCleanup[func: f, next: n], b) do
RegisterCleanup[func: f, next: fn -> step(n.(), b) end]
end
defp step(a, RegisterCleanup[func: f, next: n]) do
RegisterCleanup[func: f, next: fn -> step(a, n.()) end]
end
defp step(Source[step: s], b), do: step(force(s), b)
defp step(Conduit[step: s], b), do: step(force(s), b)
defp step(Sink[step: s], b), do: step(force(s), b)
## The monadic interface (vertical composition)
@doc """
Return a value inside a pipe.
Note that you can't do `connect(return(1), await())` because return/1 doesn't
return a source, conduit or sink but a pipe step. If you run into this problem
use `done/1` instead.
"""
@spec return(any()) :: Done.t
def return(x) do
Done[result: x]
end
# Not really a monadic interface part but it fits best here.
@doc """
Return a result and some leftovers.
Note that you can't do `connect(return_leftovers(1, []), await())` because
return_leftovers/2 doesn't return a source, conduit or sink but a pipe step.
If you run into this problem use `done/2` instead.
"""
@spec return_leftovers(any, [any]) :: Done.t
def return_leftovers(x, l) do
Done[result: x, leftovers: l]
end
@doc """
Create a new pipe that first "runs" the passed pipe `p` and passes the result
of that pipe to `f` (which should return a pipe).
"""
@spec bind(t | step, (any -> t | step)) :: step
def bind(p, f)
def bind(Source[step: s], f), do: bind(force(s), f)
def bind(Conduit[step: s], f), do: bind(force(s), f)
def bind(Sink[step: s], f), do: bind(force(s), f)
def bind(NeedInput[on_value: ov, on_done: od], f) do
NeedInput[on_value: &(ov.(&1) |> bind(f)), on_done: &(od.(&1) |> bind(f))]
end
def bind(HaveOutput[value: v, next: n], f) do
HaveOutput[value: v, next: fn -> bind(n.(), f) end]
end
def bind(p = Done[], nil) do
# A nil step can easily result from an if without an else case. Gracefully
# handle it by considering it to mean return.
bind(p, &(return(&1)))
end
def bind(nil, f) do
# An expression like if ... do ... end should be taken to have an else
# clause that returns nil.
bind(return(nil), f)
end
def bind(Done[result: r, leftovers: l], f) do
x = f.(r)
# It's quite possible, even normal, that we get not a step but a pipe which
# hasn't started running, which basically means a lazily evaluated pipe
# step. Force the step, otherwise the whole system won't work.
s = case x do
Source[step: s] -> force(s)
Conduit[step: s] -> force(s)
Sink[step: s] -> force(s)
_ -> x
end
if l == [], do: s, else: with_leftovers(s, l)
end
def bind(RegisterCleanup[func: cf, next: n], f) do
RegisterCleanup[func: cf, next: fn -> bind(n.(), f) end]
end
@doc """
Run the step with some leftovers present.
"""
@spec with_leftovers(step, [any]) :: step
def with_leftovers(s, []) do
s
end
def with_leftovers(NeedInput[on_value: ov], [h|t]) do
with_leftovers(ov.(h), t)
end
def with_leftovers(HaveOutput[value: v, next: n], l) do
HaveOutput[value: v, next: fn -> with_leftovers(n, l) end]
end
def with_leftovers(Done[result: r, leftovers: l1], l2) do
Done[result: r, leftovers: l1 ++ l2]
end
def with_leftovers(RegisterCleanup[func: f, next: n], l) do
RegisterCleanup[func: f, next: fn -> with_leftovers(n.(), l) end]
end
@doc """
A do-notation macro for a source. Creates a strict source.
"""
defmacro source(opts) do
quote do
Source[step: Pipe.m(do: unquote(opts[:do]))]
end
end
@doc """
A do-notation macro for a source. Creates a lazy source.
"""
defmacro lazy_source(opts) do
quote do
Source[step: fn -> Pipe.m(do: unquote(opts[:do])) end]
end
end
@doc """
A do-notation macro for a conduit. Creates a strict conduit.
"""
defmacro conduit(opts), do: do_conduit(opts)
@doc """
A do-notation macro for a conduit. Creates a lazy conduit.
"""
defmacro lazy_conduit(opts), do: do_lazy_conduit(opts)
defp do_conduit(opts) do
quote do
Conduit[step: Pipe.m(do: unquote(opts[:do]))]
end
end
defp do_lazy_conduit(opts) do
quote do
Conduit[step: fn -> Pipe.m(do: unquote(opts[:do])) end]
end
end
@doc """
A do-notation macro for a sink. Creates a strict sink.
"""
defmacro sink(opts), do: do_sink(opts)
@doc """
A do-notation macro for a sink. Creates a lazy sink.
"""
defmacro lazy_sink(opts), do: do_lazy_sink(opts)
defp do_sink(opts) do
quote do
Sink[step: Pipe.m(do: unquote(opts[:do]))]
end
end
defp do_lazy_sink(opts) do
quote do
Sink[step: fn -> Pipe.m(do: unquote(opts[:do])) end]
end
end
## Primitive pipes
@doc """
Wait for a value to be provided by upstream.
If a value is provided return it wrapped in a single element list, otherwise
return an empty list.
"""
@spec await() :: Sink.t
def await() do
Sink[step: NeedInput[on_value: &Done[result: [&1]],
on_done: fn _ -> Done[result: []] end]]
end
@doc """
Wait for a value or result to be provided by upstream.
Returns either `{ :value, value }` (if a value is provided) or `{ :result,
result }` (if the upstream is done).
"""
@spec await_result() :: Sink.t
def await_result() do
Sink[step: NeedInput[on_value: &Done[result: { :value, &1 }],
on_done: &Done[result: { :result, &1 }]]]
end
@doc """
Yield a new output value.
"""
@spec yield(any) :: Source.t
def yield(v) do
Source[step: HaveOutput[value: v, next: fn -> Done[result: nil] end]]
end
@doc """
Return a value as a valid pipe and optionally pass along some leftover input
values.
Use this instead of `return/1` if you're going to immediately use the pipe in
horizontal composition.
Only return input values as leftovers, otherwise weird things might happen.
"""
@spec done(any) :: Source.t
@spec done(any, [any]) :: Source.t
def done(v, l // []) do
Source[step: return_leftovers(v, l)]
end
@doc """
Register a cleanup function to be called when the complete pipe has finished
running.
The cleanup function should be safe to call multiple times.
This is a good way to prevent resource leaks.
Note that `register_cleanup/1` returns a step and thus can't be used directly
in `connect/2` (not that you'd ever want to).
"""
@spec register_cleanup((() -> none)) :: step
def register_cleanup(f) do
RegisterCleanup[func: f, next: fn -> Done[result: nil] end]
end
## Misc
@doc """
Zip two sources.
Yields `{a, b}` where `a` is a value from the first source and `b` is a value
from the second source.
If both of the sources are done the result value will be `{ result_of_a,
result_of_b }`. If only one of the sources is done a similar tuple will be
returned but with :not_done instead of the result value of the other source.
## Examples
iex> Pipe.connect [
...> Pipe.zip_sources(Pipe.yield(1), Pipe.yield(2)),
...> Pipe.List.consume
...> ]
[{1, 2}]
iex> Pipe.connect [
...> Pipe.zip_sources(Pipe.done(:a), Pipe.done(:b)),
...> Pipe.List.skip_all
...> ]
{ :a, :b }
iex> Pipe.connect [
...> Pipe.zip_sources(Pipe.done(:a), Pipe.yield(2)),
...> Pipe.List.skip_all
...> ]
{ :a, :not_done }
"""
@spec zip_sources(Source.t, Source.t) :: Source.t
def zip_sources(Source[step: a], Source[step: b]),
do: Source[step: do_zip_sources(force(a), force(b))]
defp do_zip_sources(Source[step: a], b), do: do_zip_sources(force(a), b)
defp do_zip_sources(a, Source[step: b]), do: do_zip_sources(a, force(b))
defp do_zip_sources(NeedInput[on_done: od], b), do: do_zip_sources(od.(nil), b)
defp do_zip_sources(a, NeedInput[on_done: od]), do: do_zip_sources(a, od.(nil))
defp do_zip_sources(RegisterCleanup[func: f, next: n], b) do
RegisterCleanup[func: f, next: fn -> do_zip_sources(n.(), b) end]
end
defp do_zip_sources(a, RegisterCleanup[func: f, next: n]) do
RegisterCleanup[func: f, next: fn -> do_zip_sources(a, n.()) end]
end
defp do_zip_sources(HaveOutput[value: va, next: na], HaveOutput[value: vb, next: nb]) do
HaveOutput[value: { va, vb }, next: fn -> do_zip_sources(na.(), nb.()) end]
end
defp do_zip_sources(Done[result: ra], Done[result: rb]) do
Done[result: { ra, rb }]
end
defp do_zip_sources(Done[result: r], HaveOutput[]) do
Done[result: { r, :not_done }]
end
defp do_zip_sources(HaveOutput[], Done[result: r]) do
Done[result: { :not_done, r }]
end
end
|
lib/pipe.ex
| 0.890139
| 0.631253
|
pipe.ex
|
starcoder
|
defmodule Cryptopunk do
@moduledoc """
Hierarchical deterministic wallet. It has the following features:
- Generate mnemonic
- Generate seed from mnemonic
- Generate master keys from seed
- Derive private and public keys from the master key
- Various utility functions to work with derivation path, keys, crypto addresses
"""
alias Cryptopunk.Derivation
alias Cryptopunk.Derivation.Path
alias Cryptopunk.Key
alias Cryptopunk.Mnemonic
alias Cryptopunk.Seed
@doc """
Generate mnenmonic with the given number of words (24 by default).
See https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki
Examples:
iex> mnemonic1 = Cryptopunk.create_mnemonic()
iex> mnemonic2 = Cryptopunk.create_mnemonic()
iex> mnemonic1 |> String.split(" ") |> Enum.count()
24
iex> mnemonic1 != mnemonic2
true
"""
@spec create_mnemonic(non_neg_integer()) :: String.t() | no_return
def create_mnemonic(word_number \\ 24), do: Mnemonic.create(word_number)
@doc """
Generate mnemonic from entropy.
Examples:
iex> bytes = <<6, 197, 169, 93, 98, 210, 82, 216, 148, 177, 1, 251, 142, 15, 154, 85, 140, 0, 13, 202, 234, 160, 129, 218>>
iex> Cryptopunk.create_mnemonic_from_entropy(bytes)
"almost coil firm shield cement hobby fan cage wine idea track prison scale alone close favorite limb still"
"""
@spec create_mnemonic_from_entropy(binary()) :: String.t() | no_return
def create_mnemonic_from_entropy(entropy), do: Mnemonic.create_from_entropy(entropy)
@doc """
Generate seed from mnemonic.
See https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki#from-mnemonic-to-seed
Examples:
iex> mnemonic = "almost coil firm shield cement hobby fan cage wine idea track prison scale alone close favorite limb still"
iex> Cryptopunk.create_seed(mnemonic)
<<180, 208, 65, 58, 208, 96, 16, 14, 214, 63, 190, 54, 77, 169, 17, 207, 191, 239, 227, 252, 200, 195, 135, 251, 68, 70, 169, 124, 100, 147, 143, 61, 26, 196, 128, 18, 245, 89, 94, 32, 11, 35, 71, 132, 156, 123, 140, 123, 114, 55, 72, 40, 57, 245, 153, 249, 124, 98, 130, 203, 108, 168, 109, 144>>
"""
@spec create_seed(String.t(), binary()) :: binary() | no_return()
def create_seed(mnemonic, password \\ ""), do: Seed.create(mnemonic, password)
@doc """
Generate master private key from seed
See https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#master-key-generation
Examples:
iex> seed = <<98, 235, 236, 246, 19, 205, 197, 254, 187, 41, 62, 19, 189, 20, 24, 73, 206, 187, 198, 83, 160, 138, 77, 155, 195, 97, 140, 111, 133, 102, 241, 26, 176, 95, 206, 198, 71, 251, 118, 115, 134, 215, 226, 194, 62, 106, 255, 94, 15, 142, 227, 186, 152, 88, 218, 220, 184, 63, 242, 30, 162, 59, 32, 229>>
iex> Cryptopunk.master_key_from_seed(seed)
%Cryptopunk.Key{
chain_code:
<<153, 249, 145, 92, 65, 77, 50, 249, 120, 90, 178, 30, 41, 27, 73, 128, 74, 201,
91, 250, 143, 238, 129, 247, 115, 87, 161, 107, 123, 63, 84, 243>>,
key:
<<50, 8, 92, 222, 223, 155, 132, 50, 53, 227, 114, 79, 88, 11, 248, 24, 239, 76,
236, 39, 195, 198, 112, 133, 224, 41, 65, 138, 91, 47, 111, 43>>,
type: :private,
depth: 0,
parent_fingerprint: <<0, 0, 0, 0>>,
index: 0
}
"""
@spec master_key_from_seed(binary()) :: Key.t()
def master_key_from_seed(seed), do: Key.master_key(seed)
@doc """
Serialize extended key
See https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#Serialization_format
Examples:
iex> seed = <<98, 235, 236, 246, 19, 205, 197, 254, 187, 41, 62, 19, 189, 20, 24, 73, 206, 187, 198, 83, 160, 138, 77, 155, 195, 97, 140, 111, 133, 102, 241, 26, 176, 95, 206, 198, 71, 251, 118, 115, 134, 215, 226, 194, 62, 106, 255, 94, 15, 142, 227, 186, 152, 88, 218, 220, 184, 63, 242, 30, 162, 59, 32, 229>>
iex> seed |> Cryptopunk.master_key_from_seed() |> Cryptopunk.serialize_key(<<4, 136, 173, 228>>)
"<KEY>"
"""
@spec serialize_key(Key.t(), binary()) :: String.t()
def serialize_key(key, version), do: Key.serialize(key, version)
@doc """
Deserialize extended key
Examples:
iex> Cryptopunk.deserialize_key("<KEY>")
%Cryptopunk.Key{chain_code: <<153, 249, 145, 92, 65, 77, 50, 249, 120, 90, 178, 30, 41, 27, 73, 128, 74, 201, 91, 250, 143, 238, 129, 247, 115, 87, 161, 107, 123, 63, 84, 243>>, depth: 0, index: 0, key: <<50, 8, 92, 222, 223, 155, 132, 50, 53, 227, 114, 79, 88, 11, 248, 24, 239, 76, 236, 39, 195, 198, 112, 133, 224, 41, 65, 138, 91, 47, 111, 43>>, parent_fingerprint: <<0, 0, 0, 0>>, type: :private}
"""
@spec deserialize_key(binary()) :: Key.t()
def deserialize_key(key), do: Key.deserialize(key)
@doc """
Derives key
https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki
Examples:
iex> seed = <<98, 235, 236, 246, 19, 205, 197, 254, 187, 41, 62, 19, 189, 20, 24, 73, 206, 187, 198, 83, 160, 138, 77, 155, 195, 97, 140, 111, 133, 102, 241, 26, 176, 95, 206, 198, 71, 251, 118, 115, 134, 215, 226, 194, 62, 106, 255, 94, 15, 142, 227, 186, 152, 88, 218, 220, 184, 63, 242, 30, 162, 59, 32, 229>>
iex> {:ok, path} = Cryptopunk.parse_path("m / 44' / 0' / 0' / 0 / 0")
iex> seed |> Cryptopunk.master_key_from_seed() |> Cryptopunk.derive_key(path)
%Cryptopunk.Key{chain_code: <<166, 125, 2, 213, 77, 88, 124, 145, 241, 251, 83, 163, 21, 11, 20, 34, 158, 157, 179, 147, 162, 212, 148, 89, 28, 92, 68, 126, 215, 79, 147, 159>>, depth: 5, index: 0, key: <<214, 231, 94, 203, 167, 219, 125, 43, 251, 91, 147, 51, 32, 146, 186, 215, 58, 45, 104, 58, 119, 114, 121, 238, 155, 215, 239, 189, 37, 236, 27, 70>>, parent_fingerprint: <<205, 94, 166, 92>>, type: :private}
"""
@spec derive_key(Key.t(), Path.t() | Path.raw_path()) :: Key.t()
def derive_key(key, path), do: Derivation.derive(key, path)
@doc """
Parse derivation path
See https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki
Examples:
iex> Cryptopunk.parse_path("m / 44' / 0' / 0' / 0 / 0")
{:ok, %Cryptopunk.Derivation.Path{account: 0, address_index: 0, change: 0, coin_type: 0, purpose: 44, type: :private}}
"""
@spec parse_path(String.t()) :: {:error, any()} | {:ok, Path.t()}
def parse_path(path), do: Path.parse(path)
end
|
lib/cryptopunk.ex
| 0.908529
| 0.568655
|
cryptopunk.ex
|
starcoder
|
defmodule Waffle.Definition.Storage do
@moduledoc ~S"""
Uploader configuration.
Add `use Waffle.Definition` inside your module to use it as uploader.
## Storage directory
By default, the storage directory is `uploads`. But, it can be customized
in two ways.
### By setting up configuration
Customize storage directory via configuration option `:storage_dir`.
config :waffle,
storage_dir: "my/dir"
### By overriding the relevent functions in definition modules
Every definition module has a default `storage_dir/2` which is overridable.
For example, a common pattern for user avatars is to store each user's
uploaded images in a separate subdirectory based on primary key:
def storage_dir(version, {file, scope}) do
"uploads/users/avatars/#{scope.id}"
end
> **Note**: If you are "attaching" a file to a record on creation (eg, while inserting the record at the same time), then you cannot use the model's `id` as a path component. You must either (1) use a different storage path format, such as UUIDs, or (2) attach and update the model after an id has been given. [Read more about how to integrate it with Ecto](https://hexdocs.pm/waffle_ecto/filepath-with-id.html#content)
> **Note**: The storage directory is used for both local filestorage (as the relative or absolute directory), and S3 storage, as the path name (not including the bucket).
## Asynchronous File Uploading
If you specify multiple versions in your definition module, each
version is processed and stored concurrently as independent Tasks.
To prevent an overconsumption of system resources, each Task is
given a specified timeout to wait, after which the process will
fail. By default, the timeout is `15_000` milliseconds.
If you wish to change the time allocated to version transformation
and storage, you can add a configuration option:
config :waffle,
:version_timeout, 15_000 # milliseconds
To disable asynchronous processing, add `@async false` to your
definition module.
## Storage of files
Waffle currently supports:
* `Waffle.Storage.Local`
* `Waffle.Storage.S3`
Override the `__storage` function in your definition module if you
want to use a different type of storage for a particular uploader.
## File Validation
While storing files on S3 eliminates some malicious attack vectors,
it is strongly encouraged to validate the extensions of uploaded
files as well.
Waffle delegates validation to a `validate/1` function with a tuple
of the file and scope. As an example, in order to validate that an
uploaded file conforms to popular image formats, you can use:
defmodule Avatar do
use Waffle.Definition
@extension_whitelist ~w(.jpg .jpeg .gif .png)
def validate({file, _}) do
file_extension = file.file_name |> Path.extname() |> String.downcase()
case Enum.member?(@extension_whitelist, file_extension) do
true -> :ok
false -> {:error, "invalid file type"}
end
end
end
Validation will be considered successful if the function returns `true` or `:ok`.
A customized error message can be returned in the form of `{:error, message}`.
Any other return value will return `{:error, :invalid_file}` when passed through
to `Avatar.store`.
## Passing custom headers when downloading from remote path
By default, when downloading files from remote path request headers are empty,
but if you wish to provide your own, you can override the `remote_file_headers/1`
function in your definition module. For example:
defmodule Avatar do
use Waffle.Definition
def remote_file_headers(%URI{host: "elixir-lang.org"}) do
credentials = Application.get_env(:my_app, :avatar_credentials)
token = Base.encode64(credentials[:username] <> ":" <> credentials[:password])
[{"Authorization", "Basic #{token}")}]
end
end
This code would authenticate request only for specific domain. Otherwise, it would send
empty request headers.
"""
defmacro __using__(_) do
quote do
@acl :private
@async true
def bucket, do: Application.fetch_env!(:waffle, :bucket)
def bucket({_file, _scope}), do: bucket()
def asset_host, do: Application.get_env(:waffle, :asset_host)
def filename(_, {file, _}), do: Path.basename(file.file_name, Path.extname(file.file_name))
def storage_dir_prefix, do: Application.get_env(:waffle, :storage_dir_prefix, "")
def storage_dir(_, _), do: Application.get_env(:waffle, :storage_dir, "uploads")
def validate(_), do: true
def default_url(version, _), do: default_url(version)
def default_url(_), do: nil
def __storage, do: Application.get_env(:waffle, :storage, Waffle.Storage.S3)
defoverridable storage_dir_prefix: 0,
storage_dir: 2,
filename: 2,
validate: 1,
default_url: 1,
default_url: 2,
__storage: 0,
bucket: 0,
bucket: 1,
asset_host: 0
@before_compile Waffle.Definition.Storage
end
end
defmacro __before_compile__(_env) do
quote do
def acl(_, _), do: @acl
def s3_object_headers(_, _), do: []
def async, do: @async
def remote_file_headers(_), do: []
end
end
end
|
lib/waffle/definition/storage.ex
| 0.853837
| 0.48182
|
storage.ex
|
starcoder
|
defmodule Geo do
@moduledoc """
A collection of GIS functions. Handles conversions to and from WKT, WKB, and GeoJSON for the following geometries:
* Point
* PointZ
* LineString
* LineStringZ
* Polygon
* PolygonZ
* MultiPoint
* MultiPointZ
* MulitLineString
* MulitLineStringZ
* MultiPolygon
* MultiPolygonZ
* GeometryCollection
## Examples
* Encode and decode WKT and EWKT
```elixir
iex(1)> point = Geo.WKT.decode("POINT(30 -90)")
%Geo.Point{ coordinates: {30, -90}, srid: nil}
iex(2)> Geo.WKT.encode(point)
"POINT(30 -90)"
iex(3)> point = Geo.WKT.decode("SRID=4326;POINT(30 -90)")
%Geo.Point{coordinates: {30, -90}, srid: 4326}
```
* Encode and decode WKB and EWKB
```elixir
iex(1)> point = Geo.WKB.decode("0101000000000000000000F03F000000000000F03F")
%Geo.Point{ coordinates: {1.0, 1.0}, srid: nil }
iex(2)> Geo.WKB.encode(point)
"00000000013FF00000000000003FF0000000000000"
iex(3)> point = Geo.WKB.decode("0101000020E61000009EFB613A637B4240CF2C0950D3735EC0")
%Geo.Point{ coordinates: {36.9639657, -121.8097725}, srid: 4326 }
iex(4)> Geo.WKB.encode(point)
"0020000001000010E640427B633A61FB9EC05E73D350092CCF"
```
* Encode and decode GeoJSON
Geo only encodes and decodes maps shaped as GeoJSON. JSON encoding and decoding must
be done before and after.
```elixir
#Examples using Poison as the JSON parser
iex(1)> Geo.JSON.encode(point)
%{ "type" => "Point", "coordinates" => [100.0, 0.0] }
iex(2)> point = Poison.decode!("{ \"type\": \"Point\", \"coordinates\": [100.0, 0.0] }") |> Geo.JSON.decode
%Geo.Point{ coordinates: {100.0, 0.0}, srid: nil }
iex(3)> Geo.JSON.encode(point) |> Poison.encode!
"{\"type\":\"Point\",\"coordinates\":[100.0,0.0]}"
```
"""
@type geometry ::
Geo.Point.t()
| Geo.PointZ.t()
| Geo.PointM.t()
| Geo.PointZM.t()
| Geo.LineString.t()
| Geo.LineStringZ.t()
| Geo.Polygon.t()
| Geo.PolygonZ.t()
| Geo.MultiPoint.t()
| Geo.MultiPointZ.t()
| Geo.MultiLineString.t()
| Geo.MultiLineStringZ.t()
| Geo.MultiPolygon.t()
| Geo.MultiPolygonZ.t()
| Geo.GeometryCollection.t()
@typedoc """
Endianess (byte-order) of the WKB/EWKB representation.
* `:ndr` - little-endian
* `:xdr` - big-endian
"""
@type endian :: :ndr | :xdr
if Application.get_env(:geo, :impl_to_string, true) do
defimpl String.Chars,
for: [
Geo.Point,
Geo.PointZ,
Geo.PointM,
Geo.PointZM,
Geo.LineString,
Geo.LineStringZ,
Geo.Polygon,
Geo.PolygonZ,
Geo.MultiPoint,
Geo.MultiPointZ,
Geo.MultiLineString,
Geo.MultiLineStringZ,
Geo.MultiPolygon,
Geo.MultiPolygonZ,
Geo.GeometryCollection
] do
def to_string(geo) do
Geo.WKT.encode!(geo)
end
end
end
end
|
lib/geo.ex
| 0.861524
| 0.965771
|
geo.ex
|
starcoder
|
defmodule AWS.Ivs do
@moduledoc """
**Introduction**
The Amazon Interactive Video Service (IVS) API is REST compatible, using a
standard HTTP API and an [AWS SNS](http://aws.amazon.com/sns) event stream
for responses. JSON is used for both requests and responses, including
errors.
The API is an AWS regional service, currently in these regions: us-west-2,
us-east-1, and eu-west-1.
* **All API request parameters and URLs are case sensitive. ** *
For a summary of notable documentation changes in each release, see [
Document
History](https://docs.aws.amazon.com/ivs/latest/userguide/doc-history.html).
**Service Endpoints**
The following are the Amazon IVS service endpoints (all HTTPS):
Region name: US West (Oregon)
<ul> <li> Region: `us-west-2`
</li> <li> Endpoint: `ivs.us-west-2.amazonaws.com`
</li> </ul> Region name: US East (Virginia)
<ul> <li> Region: `us-east-1`
</li> <li> Endpoint: `ivs.us-east-1.amazonaws.com`
</li> </ul> Region name: EU West (Dublin)
<ul> <li> Region: `eu-west-1`
</li> <li> Endpoint: `ivs.eu-west-1.amazonaws.com`
</li> </ul> **Allowed Header Values**
<ul> <li> ` **Accept:** ` application/json
</li> <li> ` **Accept-Encoding:** ` gzip, deflate
</li> <li> ` **Content-Type:** `application/json
</li> </ul> **Resources**
The following resources contain information about your IVS live stream (see
[ Getting Started with Amazon
IVS](https://docs.aws.amazon.com/ivs/latest/userguide/GSIVS.html)):
<ul> <li> Channel — Stores configuration data related to your live stream.
You first create a channel and then use the channel’s stream key to start
your live stream. See the `Channel` endpoints for more information.
</li> <li> Stream key — An identifier assigned by Amazon IVS when you
create a channel, which is then used to authorize streaming. See the
`StreamKey` endpoints for more information. * **Treat the stream key like a
secret, since it allows anyone to stream to the channel.** *
</li> </ul> **Tagging**
A *tag* is a metadata label that you assign to an AWS resource. A tag
comprises a *key* and a *value*, both set by you. For example, you might
set a tag as `topic:nature` to label a particular video category. See
[Tagging AWS
Resources](https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html)
for more information, including restrictions that apply to tags.
Tags can help you identify and organize your AWS resources. For example,
you can use the same tag for different resources to indicate that they are
related. You can also use tags to manage access (see [ Access
Tags](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_tags.html)).
The Amazon IVS API has these tag-related endpoints: `TagResource`,
`UntagResource`, and `ListTagsForResource`. The following resources support
tagging: Channels and Stream Keys.
**API Endpoints**
`Channel`:
<ul> <li> `CreateChannel` — Creates a new channel and an associated stream
key to start streaming.
</li> <li> `GetChannel` — Gets the channel configuration for the specified
channel ARN (Amazon Resource Name).
</li> <li> `BatchGetChannel` — Performs `GetChannel` on multiple ARNs
simultaneously.
</li> <li> `ListChannels` — Gets summary information about all channels in
your account, in the AWS region where the API request is processed. This
list can be filtered to match a specified string.
</li> <li> `UpdateChannel` — Updates a channel's configuration. This does
not affect an ongoing stream of this channel. You must stop and restart the
stream for the changes to take effect.
</li> <li> `DeleteChannel` — Deletes the specified channel.
</li> </ul> `StreamKey`:
<ul> <li> `CreateStreamKey` — Creates a stream key, used to initiate a
stream, for the specified channel ARN.
</li> <li> `GetStreamKey` — Gets stream key information for the specified
ARN.
</li> <li> `BatchGetStreamKey` — Performs `GetStreamKey` on multiple ARNs
simultaneously.
</li> <li> `ListStreamKeys` — Gets summary information about stream keys
for the specified channel.
</li> <li> `DeleteStreamKey` — Deletes the stream key for the specified
ARN, so it can no longer be used to stream.
</li> </ul> `Stream`:
<ul> <li> `GetStream` — Gets information about the active (live) stream on
a specified channel.
</li> <li> `ListStreams` — Gets summary information about live streams in
your account, in the AWS region where the API request is processed.
</li> <li> `StopStream` — Disconnects the incoming RTMPS stream for the
specified channel. Can be used in conjunction with `DeleteStreamKey` to
prevent further streaming to a channel.
</li> <li> `PutMetadata` — Inserts metadata into an RTMPS stream for the
specified channel. A maximum of 5 requests per second per channel is
allowed, each with a maximum 1KB payload.
</li> </ul> [ AWS
Tags](https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html):
<ul> <li> `TagResource` — Adds or updates tags for the AWS resource with
the specified ARN.
</li> <li> `UntagResource` — Removes tags from the resource with the
specified ARN.
</li> <li> `ListTagsForResource` — Gets information about AWS tags for the
specified ARN.
</li> </ul>
"""
@doc """
Performs `GetChannel` on multiple ARNs simultaneously.
"""
def batch_get_channel(client, input, options \\ []) do
path_ = "/BatchGetChannel"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Performs `GetStreamKey` on multiple ARNs simultaneously.
"""
def batch_get_stream_key(client, input, options \\ []) do
path_ = "/BatchGetStreamKey"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new channel and an associated stream key to start streaming.
"""
def create_channel(client, input, options \\ []) do
path_ = "/CreateChannel"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a stream key, used to initiate a stream, for the specified channel
ARN.
Note that `CreateChannel` creates a stream key. If you subsequently use
CreateStreamKey on the same channel, it will fail because a stream key
already exists and there is a limit of 1 stream key per channel. To reset
the stream key on a channel, use `DeleteStreamKey` and then
CreateStreamKey.
"""
def create_stream_key(client, input, options \\ []) do
path_ = "/CreateStreamKey"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the specified channel and its associated stream keys.
"""
def delete_channel(client, input, options \\ []) do
path_ = "/DeleteChannel"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the stream key for the specified ARN, so it can no longer be used
to stream.
"""
def delete_stream_key(client, input, options \\ []) do
path_ = "/DeleteStreamKey"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets the channel configuration for the specified channel ARN. See also
`BatchGetChannel`.
"""
def get_channel(client, input, options \\ []) do
path_ = "/GetChannel"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets information about the active (live) stream on a specified channel.
"""
def get_stream(client, input, options \\ []) do
path_ = "/GetStream"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets stream-key information for a specified ARN.
"""
def get_stream_key(client, input, options \\ []) do
path_ = "/GetStreamKey"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets summary information about all channels in your account, in the AWS
region where the API request is processed. This list can be filtered to
match a specified string.
"""
def list_channels(client, input, options \\ []) do
path_ = "/ListChannels"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets summary information about stream keys for the specified channel.
"""
def list_stream_keys(client, input, options \\ []) do
path_ = "/ListStreamKeys"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets summary information about live streams in your account, in the AWS
region where the API request is processed.
"""
def list_streams(client, input, options \\ []) do
path_ = "/ListStreams"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets information about AWS tags for the specified ARN.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Inserts metadata into an RTMPS stream for the specified channel. A maximum
of 5 requests per second per channel is allowed, each with a maximum 1KB
payload.
"""
def put_metadata(client, input, options \\ []) do
path_ = "/PutMetadata"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Disconnects the incoming RTMPS stream for the specified channel. Can be
used in conjunction with `DeleteStreamKey` to prevent further streaming to
a channel.
<note> Many streaming client-software libraries automatically reconnect a
dropped RTMPS session, so to stop the stream permanently, you may want to
first revoke the `streamKey` attached to the channel.
</note>
"""
def stop_stream(client, input, options \\ []) do
path_ = "/StopStream"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Adds or updates tags for the AWS resource with the specified ARN.
"""
def tag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Removes tags from the resource with the specified ARN.
"""
def untag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
{query_, input} =
[
{"tagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Updates a channel's configuration. This does not affect an ongoing stream
of this channel. You must stop and restart the stream for the changes to
take effect.
"""
def update_channel(client, input, options \\ []) do
path_ = "/UpdateChannel"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, Poison.Parser.t(), Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "ivs"}
host = build_host("ivs", client)
url = host
|> build_url(path, client)
|> add_query(query)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, response}
{:ok, %HTTPoison.Response{status_code: status_code, body: body} = response}
when status_code == 200 or status_code == 202 or status_code == 204 ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: ""} = response} ->
{:ok, %{}, response}
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, []) do
url
end
defp add_query(url, query) do
querystring = AWS.Util.encode_query(query)
"#{url}?#{querystring}"
end
defp encode_payload(input) do
if input != nil, do: Poison.Encoder.encode(input, %{}), else: ""
end
end
|
lib/aws/ivs.ex
| 0.909513
| 0.588919
|
ivs.ex
|
starcoder
|
defmodule Poison.EncodeError do
defexception value: nil, message: nil
def message(%{value: value, message: nil}) do
"unable to encode value: #{inspect value}"
end
def message(%{message: message}) do
message
end
end
defmodule Poison.Encode do
defmacro __using__(_) do
quote do
defp encode_name(value) do
cond do
is_binary(value) ->
value
is_atom(value) ->
Atom.to_string(value)
true ->
raise Poison.EncodeError, value: value,
message: "expected string or atom key, got: #{inspect value}"
end
end
end
end
end
defmodule Poison.Pretty do
defmacro __using__(_) do
quote do
@default_indent 2
@default_offset 0
@compile {:inline, pretty: 1, indent: 1, offset: 1, offset: 2, spaces: 1}
defp pretty(options) do
!!Keyword.get(options, :pretty)
end
defp indent(options) do
Keyword.get(options, :indent, @default_indent)
end
defp offset(options) do
Keyword.get(options, :offset, @default_offset)
end
defp offset(options, value) do
Keyword.put(options, :offset, value)
end
defp spaces(count) do
:binary.copy(" ", count)
end
end
end
end
defprotocol Poison.Encoder do
@fallback_to_any true
def encode(value, options)
end
defimpl Poison.Encoder, for: Atom do
def encode(nil, _), do: "null"
def encode(true, _), do: "true"
def encode(false, _), do: "false"
def encode(atom, options) do
Poison.Encoder.BitString.encode(Atom.to_string(atom), options)
end
end
defimpl Poison.Encoder, for: BitString do
use Bitwise
def encode("", _), do: "\"\""
def encode(string, options) do
[?", escape(string, options[:escape]), ?"]
end
defp escape("", _), do: []
for {char, seq} <- Enum.zip('"\\\n\t\r\f\b', '"\\ntrfb') do
defp escape(<<unquote(char)>> <> rest, mode) do
[unquote("\\" <> <<seq>>) | escape(rest, mode)]
end
end
# http://en.wikipedia.org/wiki/Unicode_control_characters
defp escape(<<char>> <> rest, mode) when char <= 0x1F or char == 0x7F do
[seq(char) | escape(rest, mode)]
end
defp escape(<<char :: utf8>> <> rest, mode) when char in 0x80..0x9F do
[seq(char) | escape(rest, mode)]
end
defp escape(<<char :: utf8>> <> rest, :unicode) when char in 0xA0..0xFFFF do
[seq(char) | escape(rest, :unicode)]
end
# http://en.wikipedia.org/wiki/UTF-16#Example_UTF-16_encoding_procedure
# http://unicodebook.readthedocs.org/unicode_encodings.html#utf-16-surrogate-pairs
defp escape(<<char :: utf8>> <> rest, :unicode) when char > 0xFFFF do
code = char - 0x10000
[seq(0xD800 ||| (code >>> 10)),
seq(0xDC00 ||| (code &&& 0x3FF))
| escape(rest, :unicode)]
end
defp escape(<<char :: utf8>> <> rest, :javascript) when char in [0x2028, 0x2029] do
[seq(char) | escape(rest, :javascript)]
end
defp escape(string, mode) do
size = chunk_size(string, mode, 0)
<<chunk :: binary-size(size), rest :: binary>> = string
[chunk | escape(rest, mode)]
end
defp chunk_size(<<char>> <> _, _mode, acc) when char <= 0x1F or char in '"\\' do
acc
end
defp chunk_size(<<char>> <> rest, mode, acc) when char < 0x80 do
chunk_size(rest, mode, acc + 1)
end
defp chunk_size(<<_ :: utf8>> <> _, :unicode, acc) do
acc
end
defp chunk_size(<<char :: utf8>> <> _, :javascript, acc) when char in [0x2028, 0x2029] do
acc
end
defp chunk_size(<<codepoint :: utf8>> <> rest, mode, acc) do
size = cond do
codepoint < 0x800 -> 2
codepoint < 0x10000 -> 3
true -> 4
end
chunk_size(rest, mode, acc + size)
end
defp chunk_size(<<char>>, _, _) do
raise Poison.EncodeError, value: <<char>>
end
defp chunk_size("", _, acc), do: acc
@compile {:inline, seq: 1}
defp seq(char) do
case Integer.to_char_list(char, 16) do
s when length(s) < 2 -> ["\\u000" | s]
s when length(s) < 3 -> ["\\u00" | s]
s when length(s) < 4 -> ["\\u0" | s]
s -> ["\\u" | s]
end
end
end
defimpl Poison.Encoder, for: Integer do
def encode(integer, _options) do
Integer.to_string(integer)
end
end
defimpl Poison.Encoder, for: Float do
def encode(float, _options) do
:io_lib_format.fwrite_g(float)
end
end
defimpl Poison.Encoder, for: Map do
alias Poison.Encoder
@compile :inline_list_funcs
use Poison.Pretty
use Poison.Encode
# TODO: Remove once we require Elixir 1.1+
defmacro __deriving__(module, struct, options) do
Poison.Encoder.Any.deriving(module, struct, options)
end
def encode(map, _) when map_size(map) < 1, do: "{}"
def encode(map, options) do
encode(map, pretty(options), options)
end
def encode(map, true, options) do
indent = indent(options)
offset = offset(options) + indent
options = offset(options, offset)
fun = &[",\n", spaces(offset), Encoder.BitString.encode(encode_name(&1), options), ": ",
Encoder.encode(:maps.get(&1, map), options) | &2]
["{\n", tl(:lists.foldl(fun, [], :maps.keys(map))), ?\n, spaces(offset - indent), ?}]
end
def encode(map, _, options) do
fun = &[?,, Encoder.BitString.encode(encode_name(&1), options), ?:,
Encoder.encode(:maps.get(&1, map), options) | &2]
[?{, tl(:lists.foldl(fun, [], :maps.keys(map))), ?}]
end
end
defimpl Poison.Encoder, for: List do
alias Poison.Encoder
use Poison.Pretty
@compile :inline_list_funcs
def encode([], _), do: "[]"
def encode(list, options) do
encode(list, pretty(options), options)
end
def encode(list, false, options) do
fun = &[?,, Encoder.encode(&1, options) | &2]
[?[, tl(:lists.foldr(fun, [], list)), ?]]
end
def encode(list, true, options) do
indent = indent(options)
offset = offset(options) + indent
options = offset(options, offset)
fun = &[",\n", spaces(offset), Encoder.encode(&1, options) | &2]
["[\n", tl(:lists.foldr(fun, [], list)), ?\n, spaces(offset - indent), ?]]
end
end
defimpl Poison.Encoder, for: [Range, Stream, MapSet, HashSet] do
use Poison.Pretty
def encode(collection, options) do
encode(collection, pretty(options), options)
end
def encode(collection, false, options) do
fun = &[?,, Poison.Encoder.encode(&1, options)]
case Enum.flat_map(collection, fun) do
[] -> "[]"
[_ | tail] -> [?[, tail, ?]]
end
end
def encode(collection, true, options) do
indent = indent(options)
offset = offset(options) + indent
options = offset(options, offset)
fun = &[",\n", spaces(offset), Poison.Encoder.encode(&1, options)]
case Enum.flat_map(collection, fun) do
[] -> "[]"
[_ | tail] -> ["[\n", tail, ?\n, spaces(offset - indent), ?]]
end
end
end
defimpl Poison.Encoder, for: HashDict do
alias Poison.Encoder
use Poison.Pretty
use Poison.Encode
def encode(dict, options) do
if HashDict.size(dict) < 1 do
"{}"
else
encode(dict, pretty(options), options)
end
end
def encode(dict, false, options) do
fun = fn {key, value} ->
[?,, Encoder.BitString.encode(encode_name(key), options), ?:,
Encoder.encode(value, options)]
end
[?{, tl(Enum.flat_map(dict, fun)), ?}]
end
def encode(dict, true, options) do
indent = indent(options)
offset = offset(options) + indent
options = offset(options, offset)
fun = fn {key, value} ->
[",\n", spaces(offset), Encoder.BitString.encode(encode_name(key), options), ": ",
Encoder.encode(value, options)]
end
["{\n", tl(Enum.flat_map(dict, fun)), ?\n, spaces(offset - indent), ?}]
end
end
if Version.match?(System.version, ">=1.3.0-rc.1") do
defimpl Poison.Encoder, for: [Date, Time, NaiveDateTime, DateTime] do
def encode(value, options) do
Poison.Encoder.BitString.encode(@for.to_iso8601(value), options)
end
end
end
defimpl Poison.Encoder, for: Any do
defmacro __deriving__(module, struct, options) do
deriving(module, struct, options)
end
def deriving(module, _struct, options) do
only = options[:only]
except = options[:except]
extractor = cond do
only ->
quote(do: Map.take(struct, unquote(only)))
except ->
except = [:__struct__ | except]
quote(do: Map.drop(struct, unquote(except)))
true ->
quote(do: :maps.remove(:__struct__, struct))
end
quote do
defimpl Poison.Encoder, for: unquote(module) do
def encode(struct, options) do
Poison.Encoder.Map.encode(unquote(extractor), options)
end
end
end
end
def encode(%{__struct__: _} = struct, options) do
Poison.Encoder.Map.encode(Map.from_struct(struct), options)
end
def encode(value, _options) do
raise Poison.EncodeError, value: value
end
end
|
deps/poison/lib/poison/encoder.ex
| 0.51562
| 0.435541
|
encoder.ex
|
starcoder
|
defmodule Absinthe.Type.BuiltIns.Scalars do
use Absinthe.Schema.Notation
@moduledoc false
scalar :integer, name: "Int" do
description """
The `Int` scalar type represents non-fractional signed whole numeric values.
Int can represent values between `-(2^53 - 1)` and `2^53 - 1` since it is
represented in JSON as double-precision floating point numbers specified
by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).
"""
serialize & &1
parse parse_with([Absinthe.Blueprint.Input.Integer], &parse_int/1)
end
scalar :float do
description """
The `Float` scalar type represents signed double-precision fractional
values as specified by
[IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).
"""
serialize & &1
parse parse_with(
[Absinthe.Blueprint.Input.Integer, Absinthe.Blueprint.Input.Float],
&parse_float/1
)
end
scalar :string do
description """
The `String` scalar type represents textual data, represented as UTF-8
character sequences. The String type is most often used by GraphQL to
represent free-form human-readable text.
"""
serialize &String.Chars.to_string/1
parse parse_with([Absinthe.Blueprint.Input.String], &parse_string/1)
end
scalar :id, name: "ID" do
description """
The `ID` scalar type represents a unique identifier, often used to
refetch an object or as key for a cache. The ID type appears in a JSON
response as a String; however, it is not intended to be human-readable.
When expected as an input type, any string (such as `"4"`) or integer
(such as `4`) input value will be accepted as an ID.
"""
serialize &to_string/1
parse parse_with(
[Absinthe.Blueprint.Input.Integer, Absinthe.Blueprint.Input.String],
&parse_id/1
)
end
scalar :boolean do
description """
The `Boolean` scalar type represents `true` or `false`.
"""
serialize & &1
parse parse_with([Absinthe.Blueprint.Input.Boolean], &parse_boolean/1)
end
# Integers are only safe when between -(2^53 - 1) and 2^53 - 1 due to being
# encoded in JavaScript and represented in JSON as double-precision floating
# point numbers, as specified by IEEE 754.
@max_int 9_007_199_254_740_991
@min_int -9_007_199_254_740_991
@spec parse_int(any) :: {:ok, integer} | :error
defp parse_int(value) when is_integer(value) and value >= @min_int and value <= @max_int do
{:ok, value}
end
defp parse_int(_) do
:error
end
@spec parse_float(any) :: {:ok, float} | :error
defp parse_float(value) when is_float(value) do
{:ok, value}
end
defp parse_float(value) when is_integer(value) do
{:ok, value * 1.0}
end
defp parse_float(_) do
:error
end
@spec parse_string(any) :: {:ok, binary} | :error
defp parse_string(value) when is_binary(value) do
{:ok, value}
end
defp parse_string(_) do
:error
end
@spec parse_id(any) :: {:ok, binary} | :error
defp parse_id(value) when is_binary(value) do
{:ok, value}
end
defp parse_id(value) when is_integer(value) do
{:ok, Integer.to_string(value)}
end
defp parse_id(_) do
:error
end
@spec parse_boolean(any) :: {:ok, boolean} | :error
defp parse_boolean(value) when is_boolean(value) do
{:ok, value}
end
defp parse_boolean(_) do
:error
end
# Parse, supporting pulling values out of blueprint Input nodes
defp parse_with(node_types, coercion) do
fn
%{__struct__: str, value: value} ->
if Enum.member?(node_types, str) do
coercion.(value)
else
:error
end
%Absinthe.Blueprint.Input.Null{} ->
{:ok, nil}
other ->
coercion.(other)
end
end
end
|
lib/absinthe/type/built_ins/scalars.ex
| 0.902403
| 0.734262
|
scalars.ex
|
starcoder
|
defmodule GenServer.Behaviour do
@moduledoc """
This module is a convenience for defining GenServer callbacks in Elixir.
A server is responsible for reacting to messages received from a client.
A GenServer is an OTP behaviour that encapsulates common server
functionalities.
## Example
Below is an example of a GenServer that pushes and pops items
onto a stack:
defmodule MyServer do
use GenServer.Behaviour
# Callbacks
def handle_call(:pop, _from, [h|t]) do
{ :reply, h, t }
end
def handle_call(request, from, config) do
# Call the default implementation from GenServer.Behaviour
super(request, from, config)
end
def handle_cast({ :push, item }, config) do
{ :noreply, [item|config] }
end
def handle_cast(request, config) do
super(request, config)
end
end
{ :ok, pid } = :gen_server.start_link(MyServer, [:hello], [])
:gen_server.call(pid, :pop)
#=> :hello
:gen_server.cast(pid, { :push, :world })
#=> :ok
:gen_server.call(pid, :pop)
#=> :world
Notice we never call the server callbacks directly, they are called
by OTP whenever we interact with the server. **cast** messages are
asynchronous while **call** ones are synchronous. For a
GenServer, there are 8 different values a callback such as
`handle_call` or `handle_cast` can return:
{ :reply, reply, new_state }
{ :reply, reply, new_state, timeout }
{ :reply, reply, new_state, :hibernate }
{ :noreply, new_state }
{ :noreply, new_state, timeout }
{ :noreply, new_state, :hibernate }
{ :stop, reason, new_state }
{ :stop, reason, reply, new_state }
There are 6 callbacks required to be implemented in a GenServer. The
`GenServer.Behaviour` module defines all of them automatically, but
allows us to customize the ones we need. The required callbacks are:
* `init(args)` - invoked when the server is started;
* `handle_call(msg, from, state)` - invoked to handle call messages;
* `handle_cast(msg, state)` - invoked to handle cast messages;
* `handle_info(msg, state)` - handle all other messages which are
normally received by processes;
* `terminate(reason, state)` - called when the server is about to
terminate, useful for cleaning up;
* `code_change(old_vsn, state, extra)` - called when the application
code is being upgraded live (hot code swap);
Starting and sending messages to the GenServer is done
via Erlang's `:gen_server` module. For more information,
please refer to the following:
* http://www.erlang.org/doc/man/gen_server.html
* http://www.erlang.org/doc/design_principles/gen_server_concepts.html
* http://learnyousomeerlang.com/clients-and-servers
"""
@doc false
defmacro __using__(_) do
quote location: :keep do
@behavior :gen_server
@doc false
def init(args) do
{ :ok, args }
end
@doc false
def handle_call(_request, _from, state) do
{ :noreply, state }
end
@doc false
def handle_info(_msg, state) do
{ :noreply, state }
end
@doc false
def handle_cast(_msg, state) do
{ :noreply, state }
end
@doc false
def terminate(_reason, _state) do
:ok
end
@doc false
def code_change(_old, state, _extra) do
{ :ok, state }
end
defoverridable [init: 1, handle_call: 3, handle_info: 2,
handle_cast: 2, terminate: 2, code_change: 3]
end
end
end
|
lib/elixir/lib/gen_server/behaviour.ex
| 0.75274
| 0.484075
|
behaviour.ex
|
starcoder
|
defmodule Kernel.Typespec do
@moduledoc """
Provides macros and functions for working with typespecs.
Elixir comes with a notation for declaring types and specifications. Elixir is
dynamically typed, as such typespecs are never used by the compiler to
optimize or modify code. Still, using typespecs is useful as documentation and
tools such as [Dialyzer](http://www.erlang.org/doc/man/dialyzer.html) can
analyze the code with typespecs to find bugs.
The attributes `@type`, `@opaque`, `@typep`, `@spec` and `@callback` available
in modules are handled by the equivalent macros defined by this module. See
sub-sections "Defining a type" and "Defining a specification" below.
## Types and their syntax
The type syntax provided by Elixir is fairly similar to the one in
[Erlang](http://www.erlang.org/doc/reference_manual/typespec.html).
Most of the built-in types provided in Erlang (for example, `pid()`) are
expressed the same way: `pid()` or simply `pid`. Parametrized types are also
supported (`list(integer)`) and so are remote types (`Enum.t`).
Integers and atom literals are allowed as types (ex. `1`, `:atom` or
`false`). All other types are built of unions of predefined types. Certain
shorthands are allowed, such as `[...]`, `<<>>` and `{...}`.
### Predefined types
Type :: any # the top type, the set of all terms
| none # the bottom type, contains no terms
| pid
| port
| reference
| Atom
| Bitstring
| float
| Fun
| Integer
| List
| Tuple
| Union
| UserDefined # Described in section "Defining a type"
Atom :: atom
| ElixirAtom # `:foo`, `:bar`, ...
Bitstring :: <<>>
| << _ :: M >> # M is a positive integer
| << _ :: _ * N >> # N is a positive integer
| << _ :: M, _ :: _ * N >>
Fun :: (... -> any) # any function
| (... -> Type) # any arity, returning Type
| (() -> Type))
| (TList -> Type)
Integer :: integer
| ElixirInteger # ..., -1, 0, 1, ... 42 ...
| ElixirInteger..ElixirInteger # an integer range
List :: list(Type) # proper list ([]-terminated)
| improper_list(Type1, Type2) # Type1=contents, Type2=termination
| maybe_improper_list(Type1, Type2) # Type1 and Type2 as above
| nonempty_list(Type) # proper non-empty list
| [] # empty list
| [Type] # shorthand for list(Type)
| [Type, ...] # shorthand for nonempty_list(Type)
Tuple :: tuple # a tuple of any size
| {} # empty tuple
| { TList }
TList :: Type
| Type, TList
Union :: Type1 | Type2
### Bit strings
Bit string with a base size of 3:
<< _ :: 3 >>
Bit string with a unit size of 8:
<< _ :: _ * 8 >>
### Anonymous functions
Any anonymous function:
((...) -> any)
(... -> any)
Anonymous function with arity of zero:
(() -> type)
Anonymous function with some arity:
((type, type) -> type)
(type, type -> type)
## Built-in types
Built-in type | Defined as
:-------------------- | :---------
`term` | `any`
`binary` | `<< _ :: _ * 8 >>`
`bitstring` | `<< _ :: _ * 1 >>`
`boolean` | `false` | `true`
`byte` | `0..255`
`char` | `0..0xffff`
`number` | `integer` | `float`
`list` | `[any]`
`maybe_improper_list` | `maybe_improper_list(any, any)`
`nonempty_list` | `nonempty_list(any)`
`iodata` | `iolist` | `binary`
`iolist` | `maybe_improper_list(byte` | `binary` | `iolist, binary` | `[])`
`module` | `atom`
`mfa` | `{ atom, atom, arity }`
`arity` | `0..255`
`node` | `atom`
`timeout` | `:infinity` | `non_neg_integer`
`no_return` | `none`
`fun` | `(... -> any)`
Some built-in types cannot be expressed with valid syntax according to the
language defined above.
Built-in type | Can be interpreted as
:---------------- | :--------------------
`non_neg_integer` | `0..`
`pos_integer` | `1..`
`neg_integer` | `..-1`
Types defined in other modules are referred to as "remote types", they are
referenced as `Module.type_name` (ex. `Enum.t` or `String.t`).
## Defining a type
@type type_name :: type
@typep type_name :: type
@opaque type_name :: type
A type defined with `@typep` is private. An opaque type, defined with
`@opaque` is a type where the internal structure of the type will not be
visible, but the type is still public.
Types can be parametrised by defining variables as parameters, these variables
can then be used to define the type.
@type dict(key, value) :: [{ key, value }]
Types can also be defined for records, see `defrecord/3`.
## Defining a specification
@spec function_name(type1, type2) :: return_type
@callback function_name(type1, type2) :: return_type
Callbacks are used to define the callbacks functions of behaviours (see
`Behaviour`).
Guards can be used to restrict type variables given as arguments to the
function.
@spec function(arg) :: [arg] when arg: atom
Type variables with no restriction can also be defined.
@spec function(arg) :: [arg] when arg: var
Specifications can be overloaded just like ordinary functions.
@spec function(integer) :: atom
@spec function(atom) :: integer
## Notes
Elixir discourages the use of type `string` as it might be confused with
binaries which are referred to as "strings" in Elixir (as opposed to character
lists). In order to use the type that is called `string` in Erlang, one has to
use the `char_list` type which is a synonym for `string`. If you use `string`,
you'll get a warning from the compiler.
If you want to refer to the "string" type (the one operated on by functions in
the `String` module), use `String.t` type instead.
"""
@doc """
Defines a type.
This macro is responsible for handling the attribute `@type`.
## Examples
@type my_type :: atom
"""
defmacro deftype(type) do
quote do
Kernel.Typespec.deftype(:type, unquote(Macro.escape(type, unquote: true)), __ENV__)
end
end
@doc """
Defines an opaque type.
This macro is responsible for handling the attribute `@opaque`.
## Examples
@opaque my_type :: atom
"""
defmacro defopaque(type) do
quote do
Kernel.Typespec.deftype(:opaque, unquote(Macro.escape(type, unquote: true)), __ENV__)
end
end
@doc """
Defines a private type.
This macro is responsible for handling the attribute `@typep`.
## Examples
@typep my_type :: atom
"""
defmacro deftypep(type) do
quote do
Kernel.Typespec.deftype(:typep, unquote(Macro.escape(type, unquote: true)), __ENV__)
end
end
@doc """
Defines a spec.
This macro is responsible for handling the attribute `@spec`.
## Examples
@spec add(number, number) :: number
"""
defmacro defspec(spec) do
quote do
Kernel.Typespec.defspec(:spec, unquote(Macro.escape(spec, unquote: true)), __ENV__)
end
end
@doc """
Defines a callback.
This macro is responsible for handling the attribute `@callback`.
## Examples
@callback add(number, number) :: number
"""
defmacro defcallback(spec) do
quote do
Kernel.Typespec.defspec(:callback, unquote(Macro.escape(spec, unquote: true)), __ENV__)
end
end
## Helpers
@doc """
Defines a `type`, `typep` or `opaque` by receiving Erlang's typespec.
"""
def define_type(caller, kind, { name, _, vars } = type) when kind in [:type, :typep, :opaque] do
{ kind, export } =
case kind do
:type -> { :type, true }
:typep -> { :type, false }
:opaque -> { :opaque, true }
end
module = caller.module
arity = length(vars)
Module.compile_typespec module, kind, type
if export do
Module.compile_typespec(module, :export_type, [{ name, arity }])
end
define_doc(caller, kind, name, arity, export)
type
end
defp define_doc(caller, kind, name, arity, export) do
module = caller.module
doc = Module.get_attribute(module, :typedoc)
if doc do
if export do
Module.add_doc(module, caller.line, kind, { name, arity }, doc)
else
:elixir_errors.warn "#{caller.file}:#{caller.line}: type #{name}/#{arity} is private, " <>
"@typedoc's are always discarded for private types\n"
end
end
Module.delete_attribute(module, :typedoc)
end
@doc """
Defines a `spec` by receiving Erlang's typespec.
"""
def define_spec(module, tuple, definition) do
Module.compile_typespec module, :spec, { tuple, definition }
end
@doc """
Defines a `callback` by receiving Erlang's typespec.
"""
def define_callback(module, tuple, definition) do
Module.compile_typespec module, :callback, { tuple, definition }
end
@doc """
Returns `true` if the current module defines a given type
(private, opaque or not). This function is only available
for modules being compiled.
"""
def defines_type?(module, name, arity) do
finder = &match?({ ^name, _, vars } when length(vars) == arity, &1)
Enum.any?(Module.get_attribute(module, :type), finder) or
Enum.any?(Module.get_attribute(module, :opaque), finder)
end
@doc """
Returns `true` if the current module defines a given spec.
This function is only available for modules being compiled.
"""
def defines_spec?(module, name, arity) do
tuple = { name, arity }
Enum.any?(Module.get_attribute(module, :spec), &match?(^tuple, &1))
end
@doc """
Returns `true` if the current module defines a callback.
This function is only available for modules being compiled.
"""
def defines_callback?(module, name, arity) do
tuple = { name, arity }
Enum.any?(Module.get_attribute(module, :callback), &match?(^tuple, &1))
end
@doc """
Converts a spec clause back to Elixir AST.
"""
def spec_to_ast(name, { :type, line, :fun, [{:type, _, :product, args}, result] }) do
meta = [line: line]
body = { name, meta, Enum.map(args, &typespec_to_ast/1) }
vars = args ++ [result]
|> Enum.flat_map(&collect_vars/1)
|> Enum.uniq
|> Enum.map(&{ &1, { :var, meta, nil } })
result = if vars == [] do
typespec_to_ast(result)
else
{ :when, meta, [typespec_to_ast(result), vars] }
end
{ :::, meta, [body, result] }
end
def spec_to_ast(name, { :type, line, :fun, [] }) do
{ :::, [line: line], [{ name, [line: line], [] }, quote(do: term)] }
end
def spec_to_ast(name, { :type, line, :bounded_fun, [{ :type, _, :fun, [{ :type, _, :product, args }, result] }, constraints] }) do
guards =
lc {:type, _, :constraint, [{:atom, _, :is_subtype}, [{ :var, _, var }, type]]} inlist constraints do
{ var, typespec_to_ast(type) }
end
meta = [line: line]
vars = args ++ [result]
|> Enum.flat_map(&collect_vars/1)
|> Enum.uniq
|> Kernel.--(Keyword.keys(guards))
|> Enum.map(&{ &1, { :var, meta, nil } })
args = lc arg inlist args, do: typespec_to_ast(arg)
{ :::, meta, [
{ name, [line: line], args },
{ :when, meta, [typespec_to_ast(result), guards ++ vars] }
] }
end
@doc """
Converts a type clause back to Elixir AST.
"""
def type_to_ast({ { :record, record }, fields, args }) when is_atom(record) do
fields = lc field inlist fields, do: typespec_to_ast(field)
args = lc arg inlist args, do: typespec_to_ast(arg)
type = { :{}, [], [record|fields] }
quote do: unquote(record)(unquote_splicing(args)) :: unquote(type)
end
def type_to_ast({ name, type, args }) do
args = lc arg inlist args, do: typespec_to_ast(arg)
quote do: unquote(name)(unquote_splicing(args)) :: unquote(typespec_to_ast(type))
end
@doc """
Returns all type docs available from the module's beam code.
The result is returned as a list of tuples where the first element is the pair of type
name and arity and the second element is the documentation.
The module must have a corresponding beam file which can be
located by the runtime system.
"""
@spec beam_typedocs(module | binary) :: [tuple] | nil
def beam_typedocs(module) when is_atom(module) or is_binary(module) do
case abstract_code(module) do
{ :ok, abstract_code } ->
type_docs = lc { :attribute, _, :typedoc, tup } inlist abstract_code, do: tup
List.flatten(type_docs)
_ ->
nil
end
end
@doc """
Returns all types available from the module's beam code.
The result is returned as a list of tuples where the first
element is the type (`:typep`, `:type` and `:opaque`).
The module must have a corresponding beam file which can be
located by the runtime system.
"""
@spec beam_types(module | binary) :: [tuple] | nil
def beam_types(module) when is_atom(module) or is_binary(module) do
case abstract_code(module) do
{ :ok, abstract_code } ->
exported_types = lc { :attribute, _, :export_type, types } inlist abstract_code, do: types
exported_types = List.flatten(exported_types)
lc { :attribute, _, kind, { name, _, args } = type } inlist abstract_code, kind in [:opaque, :type] do
cond do
kind == :opaque -> { :opaque, type }
{ name, length(args) } in exported_types -> { :type, type }
true -> { :typep, type }
end
end
_ ->
nil
end
end
@doc """
Returns all specs available from the module's beam code.
The result is returned as a list of tuples where the first
element is spec name and arity and the second is the spec.
The module must have a corresponding beam file which can be
located by the runtime system.
"""
@spec beam_specs(module | binary) :: [tuple] | nil
def beam_specs(module) when is_atom(module) or is_binary(module) do
from_abstract_code(module, :spec)
end
@doc """
Returns all callbacks available from the module's beam code.
The result is returned as a list of tuples where the first
element is spec name and arity and the second is the spec.
The module must have a corresponding beam file
which can be located by the runtime system.
"""
@spec beam_callbacks(module | binary) :: [tuple] | nil
def beam_callbacks(module) when is_atom(module) or is_binary(module) do
from_abstract_code(module, :callback)
end
defp from_abstract_code(module, kind) do
case abstract_code(module) do
{ :ok, abstract_code } ->
lc { :attribute, _, abs_kind, value } inlist abstract_code, kind == abs_kind, do: value
:error ->
nil
end
end
defp abstract_code(module) do
case :beam_lib.chunks(abstract_code_beam(module), [:abstract_code]) do
{:ok, { _, [{ :abstract_code, { _raw_abstract_v1, abstract_code } }] } } ->
{ :ok, abstract_code }
_ ->
:error
end
end
defp abstract_code_beam(module) when is_atom(module) do
case :code.get_object_code(module) do
{ ^module, beam, _filename } -> beam
:error -> module
end
end
defp abstract_code_beam(binary) when is_binary(binary) do
binary
end
## Macro callbacks
@doc false
def deftype(kind, { :::, _, [{ name, _, args }, definition] }, caller) when is_atom(name) and name != ::: do
args =
if is_atom(args) do
[]
else
lc(arg inlist args, do: variable(arg))
end
vars = lc { :var, _, var } inlist args, do: var
spec = typespec(definition, vars, caller)
vars = lc { :var, _, _ } = var inlist args, do: var
type = { name, spec, vars }
define_type(caller, kind, type)
end
def deftype(_kind, other, caller) do
type_spec = Macro.to_string(other)
compile_error caller, "invalid type specification: #{type_spec}"
end
@doc false
def defspec(type, { :::, meta, [{ name, _, args }, return_and_guard] }, caller) when is_atom(name) and name != ::: do
if is_atom(args), do: args = []
{ return, guard } = split_return_and_guard(return_and_guard)
unless Keyword.keyword?(guard) do
guard = Macro.to_string(guard)
compile_error caller, "expected keywords as guard in function type specification, got: #{guard}"
end
vars = Keyword.keys(guard)
constraints = guard_to_constraints(guard, vars, meta, caller)
spec = { :type, line(meta), :fun, fn_args(meta, args, return, vars, caller) }
if constraints != [] do
spec = { :type, line(meta), :bounded_fun, [spec, constraints] }
end
code = { { name, Kernel.length(args) }, spec }
Module.compile_typespec(caller.module, type, code)
code
end
def defspec(_type, other, caller) do
spec = Macro.to_string(other)
compile_error caller, "invalid function type specification: #{spec}"
end
defp split_return_and_guard({ :when, _, [return, guard] }) do
{ return, guard }
end
defp split_return_and_guard({ :|, meta, [left, right] }) do
{ return, guard } = split_return_and_guard(right)
{ { :|, meta, [left, return] }, guard }
end
defp split_return_and_guard(other) do
{ other, [] }
end
defp guard_to_constraints(guard, vars, meta, caller) do
line = line(meta)
Enum.reduce(guard, [], fn
{ _name, { :var, _, context } }, acc when is_atom(context) ->
acc
{ name, type }, acc ->
constraint = [{ :atom, line, :is_subtype }, [{:var, line, name}, typespec(type, vars, caller)]]
type = { :type, line, :constraint, constraint }
[type|acc]
end) |> Enum.reverse
end
## To AST conversion
defp collect_vars({ :ann_type, _line, args }) when is_list(args) do
[]
end
defp collect_vars({ :type, _line, _kind, args }) when is_list(args) do
Enum.flat_map(args, &collect_vars/1)
end
defp collect_vars({ :remote_type, _line, args }) when is_list(args) do
Enum.flat_map(args, &collect_vars/1)
end
defp collect_vars({ :typed_record_field, _line, type }) do
collect_vars(type)
end
defp collect_vars({ :paren_type, _line, [type] }) do
collect_vars(type)
end
defp collect_vars({ :var, _line, var }) do
[erl_to_ex_var(var)]
end
defp collect_vars(_) do
[]
end
defp typespec_to_ast({ :type, line, :tuple, :any }) do
{ :tuple, [line: line], [] }
end
defp typespec_to_ast({ :type, line, :tuple, args }) do
args = lc arg inlist args, do: typespec_to_ast(arg)
{ :{}, [line: line], args }
end
defp typespec_to_ast({ :type, _line, :list, [{ :type, _, :union, unions } = arg] }) do
case unpack_typespec_kw(unions, []) do
{ :ok, ast } -> ast
:error -> [typespec_to_ast(arg)]
end
end
defp typespec_to_ast({ :type, _line, :list, args }) do
lc arg inlist args, do: typespec_to_ast(arg)
end
defp typespec_to_ast({ :type, line, :binary, [arg1, arg2] }) do
[arg1, arg2] = lc arg inlist [arg1, arg2], do: typespec_to_ast(arg)
cond do
arg2 == 0 ->
quote line: line, do: <<_ :: unquote(arg1)>>
arg1 == 0 ->
quote line: line, do: <<_ :: _ * unquote(arg2)>>
true ->
quote line: line, do: <<_ :: unquote(arg1) * unquote(arg2)>>
end
end
defp typespec_to_ast({ :type, line, :union, args }) do
args = lc arg inlist args, do: typespec_to_ast(arg)
Enum.reduce Enum.reverse(args), fn(arg, expr) -> { :|, [line: line], [arg, expr] } end
end
defp typespec_to_ast({ :type, line, :fun, [{:type, _, :product, args}, result] }) do
args = lc arg inlist args, do: typespec_to_ast(arg)
[{ :->, [line: line], [args, typespec_to_ast(result)] }]
end
defp typespec_to_ast({ :type, line, :fun, [args, result] }) do
[{ :->, [line: line], [[typespec_to_ast(args)], typespec_to_ast(result)] }]
end
defp typespec_to_ast({ :type, line, :fun, [] }) do
typespec_to_ast({ :type, line, :fun, [{:type, line, :any}, {:type, line, :any, []} ] })
end
defp typespec_to_ast({ :type, line, :range, [left, right] }) do
{ :"..", [line: line], [typespec_to_ast(left), typespec_to_ast(right)] }
end
defp typespec_to_ast({ :type, line, name, args }) do
args = lc arg inlist args, do: typespec_to_ast(arg)
{ name, [line: line], args }
end
defp typespec_to_ast({ :var, line, var }) do
{ erl_to_ex_var(var), line, nil }
end
defp typespec_to_ast({ :op, line, op, arg }) do
{ op, [line: line], [typespec_to_ast(arg)] }
end
# Special shortcut(s)
defp typespec_to_ast({ :remote_type, line, [{:atom, _, :elixir}, {:atom, _, :char_list}, []] }) do
typespec_to_ast({:type, line, :char_list, []})
end
defp typespec_to_ast({ :remote_type, line, [{:atom, _, :elixir}, {:atom, _, :as_boolean}, [arg]] }) do
typespec_to_ast({:type, line, :as_boolean, [arg]})
end
defp typespec_to_ast({ :remote_type, line, [mod, name, args] }) do
args = lc arg inlist args, do: typespec_to_ast(arg)
dot = { :., [line: line], [typespec_to_ast(mod), typespec_to_ast(name)] }
{ dot, [line: line], args }
end
defp typespec_to_ast({ :ann_type, line, [var, type] }) do
{ :::, [line: line], [typespec_to_ast(var), typespec_to_ast(type)] }
end
defp typespec_to_ast({ :typed_record_field,
{ :record_field, line, { :atom, line1, name } },
type }) do
typespec_to_ast({ :ann_type, line, [{ :var, line1, name }, type] })
end
defp typespec_to_ast({ :type, _, :any }) do
quote do: ...
end
defp typespec_to_ast({ :paren_type, _, [type] }) do
typespec_to_ast(type)
end
defp typespec_to_ast({ t, _line, atom }) when is_atom(t) do
atom
end
defp typespec_to_ast(other), do: other
defp erl_to_ex_var(var) do
case atom_to_binary(var) do
<<"_", c :: [binary, size(1)], rest :: binary>> ->
binary_to_atom("_#{String.downcase(c)}#{rest}")
<<c :: [binary, size(1)], rest :: binary>> ->
binary_to_atom("#{String.downcase(c)}#{rest}")
end
end
## From AST conversion
defp line(meta) do
case :lists.keyfind(:line, 1, meta) do
{ :line, line } -> line
false -> 0
end
end
# Handle unions
defp typespec({ :|, meta, [_, _] } = exprs, vars, caller) do
exprs = collect_union(exprs)
union = lc e inlist exprs, do: typespec(e, vars, caller)
{ :type, line(meta), :union, union }
end
# Handle binaries
defp typespec({:<<>>, meta, []}, _, _) do
{:type, line(meta), :binary, [{:integer, line(meta), 0}, {:integer, line(meta), 0}]}
end
defp typespec({:<<>>, meta, [{:::, _, [{:_, meta1, atom}, {:*, _, [{:_, meta2, atom}, unit]}]}]}, _, _) when is_atom(atom) do
{:type, line(meta), :binary, [{:integer, line(meta1), 0}, {:integer, line(meta2), unit}]}
end
defp typespec({:<<>>, meta, [{:::, meta1, [{:_, meta2, atom}, base]}]}, _, _) when is_atom(atom) do
{:type, line(meta), :binary, [{:integer, line(meta1), base}, {:integer, line(meta2), 0}]}
end
# Handle ranges
defp typespec({:.., meta, args}, vars, caller) do
typespec({:range, meta, args}, vars, caller)
end
# Handle special forms
defp typespec({:__MODULE__, _, atom}, vars, caller) when is_atom(atom) do
typespec(caller.module, vars, caller)
end
defp typespec({:__aliases__, _, _} = alias, vars, caller) do
atom = Macro.expand alias, caller
typespec(atom, vars, caller)
end
# Handle funs
defp typespec([{:->, meta, [arguments, return]}], vars, caller) when is_list(arguments) do
args = fn_args(meta, arguments, return, vars, caller)
{ :type, line(meta), :fun, args }
end
# Handle type operator
defp typespec({:::, meta, [var, expr] }, vars, caller) do
left = typespec(var, [elem(var, 0)|vars], caller)
right = typespec(expr, vars, caller)
{ :ann_type, line(meta), [left, right] }
end
# Handle unary ops
defp typespec({op, meta, [integer]}, _, _) when op in [:+, :-] and is_integer(integer) do
{ :op, line(meta), op, {:integer, line(meta), integer} }
end
# Handle access macro
defp typespec({{:., meta, [Kernel, :access]}, meta1, [target, args]}, vars, caller) do
access = {{:., meta, [Kernel, :access]}, meta1,
[target, args ++ [_: { :any, [], [] }]]}
typespec(Macro.expand(access, caller), vars, caller)
end
# Handle remote calls
defp typespec({{:., meta, [remote, name]}, _, args} = orig, vars, caller) do
remote = Macro.expand remote, caller
unless is_atom(remote) do
compile_error(caller, "invalid remote in typespec: #{Macro.to_string(orig)}")
end
remote_type({typespec(remote, vars, caller), meta, typespec(name, vars, caller), args}, vars, caller)
end
# Handle tuples
defp typespec({:tuple, meta, args}, _vars, _caller) when args == [] or is_atom(args) do
{ :type, line(meta), :tuple, :any }
end
defp typespec({:{}, meta, t}, vars, caller) when is_list(t) do
args = lc e inlist t, do: typespec(e, vars, caller)
{ :type, line(meta), :tuple, args }
end
defp typespec({ left, right }, vars, caller) do
typespec({ :{}, [], [left, right] }, vars, caller)
end
# Handle blocks
defp typespec({:__block__, _meta, [arg]}, vars, caller) do
typespec(arg, vars, caller)
end
# Handle variables or local calls
defp typespec({name, meta, atom}, vars, caller) when is_atom(atom) do
if name in vars do
{ :var, line(meta), name }
else
typespec({name, meta, []}, vars, caller)
end
end
# Handle local calls
defp typespec({:string, meta, arguments}, vars, caller) do
:elixir_errors.warn "warning: string() type use is discouraged. For character lists, use " <>
"char_list() type, for strings, String.t()\n#{Exception.format_stacktrace(caller.stacktrace)}"
arguments = lc arg inlist arguments, do: typespec(arg, vars, caller)
{ :type, line(meta), :string, arguments }
end
defp typespec({:char_list, _meta, arguments}, vars, caller) do
typespec((quote do: :elixir.char_list(unquote_splicing(arguments))), vars, caller)
end
defp typespec({:as_boolean, _meta, arguments}, vars, caller) do
typespec((quote do: :elixir.as_boolean(unquote_splicing(arguments))), vars, caller)
end
defp typespec({name, meta, arguments}, vars, caller) do
arguments = lc arg inlist arguments, do: typespec(arg, vars, caller)
{ :type, line(meta), name, arguments }
end
# Handle literals
defp typespec(atom, _, _) when is_atom(atom) do
{ :atom, 0, atom }
end
defp typespec(integer, _, _) when is_integer(integer) do
{ :integer, 0, integer }
end
defp typespec([], vars, caller) do
typespec({ nil, [], [] }, vars, caller)
end
defp typespec([spec], vars, caller) do
typespec({ :list, [], [spec] }, vars, caller)
end
defp typespec([spec, {:"...", _, quoted}], vars, caller) when is_atom(quoted) do
typespec({ :nonempty_list, [], [spec] }, vars, caller)
end
defp typespec(list, vars, caller) do
[h|t] = Enum.reverse(list)
union = Enum.reduce(t, validate_kw(h, list, caller), fn(x, acc) ->
{ :|, [], [validate_kw(x, list, caller), acc] }
end)
typespec({ :list, [], [union] }, vars, caller)
end
## Helpers
defp compile_error(caller, desc) do
raise CompileError, file: caller.file, line: caller.line, description: desc
end
defp remote_type({remote, meta, name, arguments}, vars, caller) do
arguments = lc arg inlist arguments, do: typespec(arg, vars, caller)
{ :remote_type, line(meta), [ remote, name, arguments ] }
end
defp collect_union({ :|, _, [a, b] }), do: [a|collect_union(b)]
defp collect_union(v), do: [v]
defp validate_kw({ key, _ } = t, _, _caller) when is_atom(key), do: t
defp validate_kw(_, original, caller) do
compile_error(caller, "unexpected list in typespec: #{Macro.to_string original}")
end
defp fn_args(meta, args, return, vars, caller) do
case [fn_args(meta, args, vars, caller), typespec(return, vars, caller)] do
[{:type, _, :any}, {:type, _, :any, []}] -> []
x -> x
end
end
defp fn_args(meta, [{:"...", _, _}], _vars, _caller) do
{ :type, line(meta), :any }
end
defp fn_args(meta, args, vars, caller) do
args = lc arg inlist args, do: typespec(arg, vars, caller)
{ :type, line(meta), :product, args }
end
defp variable({name, meta, _}) do
{:var, line(meta), name}
end
defp unpack_typespec_kw([{ :type, _, :tuple, [{ :atom, _, atom }, type] }|t], acc) do
unpack_typespec_kw(t, [{atom, typespec_to_ast(type)}|acc])
end
defp unpack_typespec_kw([], acc) do
{ :ok, :lists.reverse(acc) }
end
defp unpack_typespec_kw(_, _acc) do
:error
end
end
|
lib/elixir/lib/kernel/typespec.ex
| 0.898828
| 0.74644
|
typespec.ex
|
starcoder
|
defmodule Rajska.ObjectScopeAuthorization do
@moduledoc """
Absinthe Phase to perform object scoping.
Authorizes all Absinthe's [objects](https://hexdocs.pm/absinthe/Absinthe.Schema.Notation.html#object/3) requested in a query by checking the value of the field defined in each object meta `scope`.
## Usage
[Create your Authorization module and add it and ObjectScopeAuthorization to your Absinthe Pipeline](https://hexdocs.pm/rajska/Rajska.html#module-usage). Then set the scope of an object:
```elixir
object :user do
meta :scope, User # Same as meta :scope, {User, :id}
field :id, :integer
field :email, :string
field :name, :string
field :company, :company
end
object :company do
meta :scope, {Company, :user_id}
field :id, :integer
field :user_id, :integer
field :name, :string
field :wallet, :wallet
end
object :wallet do
meta :scope, Wallet
field :total, :integer
end
```
To define custom rules for the scoping, use `c:Rajska.Authorization.has_user_access?/3`. For example:
```elixir
defmodule Authorization do
use Rajska,
roles: [:user, :admin]
@impl true
def has_user_access?(%{role: :admin}, User, _id), do: true
def has_user_access?(%{id: user_id}, User, id) when user_id === id, do: true
def has_user_access?(_current_user, User, _id), do: false
end
```
Keep in mind that the `field_value` provided to `has_user_access?/3` can be `nil`. This case can be handled as you wish.
For example, to not raise any authorization errors and just return `nil`:
```elixir
defmodule Authorization do
use Rajska,
roles: [:user, :admin]
@impl true
def has_user_access?(_user, _, nil), do: true
def has_user_access?(%{role: :admin}, User, _id), do: true
def has_user_access?(%{id: user_id}, User, id) when user_id === id, do: true
def has_user_access?(_current_user, User, _id), do: false
end
```
"""
alias Absinthe.{Blueprint, Phase, Type}
alias Rajska.Introspection
use Absinthe.Phase
@spec run(Blueprint.t() | Phase.Error.t(), Keyword.t()) :: {:ok, map}
def run(%Blueprint{execution: execution} = bp, _options \\ []) do
{:ok, %{bp | execution: process(execution)}}
end
defp process(%{validation_errors: [], result: result} = execution), do: %{execution | result: result(result, execution.context)}
defp process(execution), do: execution
# Introspection
defp result(%{emitter: %{schema_node: %{identifier: identifier}}} = result, _context)
when identifier in [:query_type, nil] do
result
end
# Root
defp result(%{fields: fields, emitter: %{schema_node: %{identifier: identifier}}} = result, context)
when identifier in [:query, :mutation, :subscription] do
%{result | fields: walk_result(fields, context)}
end
# Object
defp result(%{fields: fields, emitter: %{schema_node: schema_node} = emitter} = result, context) do
type = Introspection.get_object_type(schema_node.type)
scope = Type.meta(type, :scope)
case is_authorized?(scope, result.root_value, context, type) do
true -> %{result | fields: walk_result(fields, context)}
false -> Map.put(result, :errors, [error(emitter)])
end
end
# List
defp result(%{values: values} = result, context) do
%{result | values: walk_result(values, context)}
end
# Leafs
defp result(result, _context), do: result
defp walk_result(fields, context, new_fields \\ [])
defp walk_result([], _context, new_fields), do: Enum.reverse(new_fields)
defp walk_result([field | fields], context, new_fields) do
new_fields = [result(field, context) | new_fields]
walk_result(fields, context, new_fields)
end
defp is_authorized?(nil, _values, _context, object), do: raise "No meta scope defined for object #{inspect object.identifier}"
defp is_authorized?(false, _values, _context, _object), do: true
defp is_authorized?({scoped_struct, field}, values, context, _object) do
scoped_field_value = Map.get(values, field)
Rajska.apply_auth_mod(context, :has_context_access?, [context, scoped_struct, scoped_field_value])
end
defp is_authorized?(scoped_struct, values, context, _object) do
scoped_field_value = Map.get(values, :id)
Rajska.apply_auth_mod(context, :has_context_access?, [context, scoped_struct, scoped_field_value])
end
defp error(%{source_location: location, schema_node: %{type: type}}) do
%Phase.Error{
phase: __MODULE__,
message: "Not authorized to access object #{Introspection.get_object_type(type).identifier}",
locations: [location]
}
end
end
|
lib/middlewares/object_scope_authorization.ex
| 0.86771
| 0.850344
|
object_scope_authorization.ex
|
starcoder
|
defmodule AWS.StorageGateway do
@moduledoc """
AWS Storage Gateway Service
AWS Storage Gateway is the service that connects an on-premises software
appliance with cloud-based storage to provide seamless and secure
integration between an organization's on-premises IT environment and the
AWS storage infrastructure. The service enables you to securely upload data
to the AWS cloud for cost effective backup and rapid disaster recovery.
Use the following links to get started using the *AWS Storage Gateway
Service API Reference*:
<ul> <li> [AWS Storage Gateway Required Request
Headers](https://docs.aws.amazon.com/storagegateway/latest/userguide/AWSStorageGatewayAPI.html#AWSStorageGatewayHTTPRequestsHeaders):
Describes the required headers that you must send with every POST request
to AWS Storage Gateway.
</li> <li> [Signing
Requests](https://docs.aws.amazon.com/storagegateway/latest/userguide/AWSStorageGatewayAPI.html#AWSStorageGatewaySigningRequests):
AWS Storage Gateway requires that you authenticate every request you send;
this topic describes how sign such a request.
</li> <li> [Error
Responses](https://docs.aws.amazon.com/storagegateway/latest/userguide/AWSStorageGatewayAPI.html#APIErrorResponses):
Provides reference information about AWS Storage Gateway errors.
</li> <li> [Operations in AWS Storage
Gateway](https://docs.aws.amazon.com/storagegateway/latest/APIReference/API_Operations.html):
Contains detailed descriptions of all AWS Storage Gateway operations, their
request parameters, response elements, possible errors, and examples of
requests and responses.
</li> <li> [AWS Storage Gateway Regions and
Endpoints:](http://docs.aws.amazon.com/general/latest/gr/rande.html#sg_region)
Provides a list of each AWS region and endpoints available for use with AWS
Storage Gateway.
</li> </ul> <note> AWS Storage Gateway resource IDs are in uppercase. When
you use these resource IDs with the Amazon EC2 API, EC2 expects resource
IDs in lowercase. You must change your resource ID to lowercase to use it
with the EC2 API. For example, in Storage Gateway the ID for a volume might
be `vol-AA22BB012345DAF670`. When you use this ID with the EC2 API, you
must change it to `vol-aa22bb012345daf670`. Otherwise, the EC2 API might
not behave as expected.
</note> <important> IDs for Storage Gateway volumes and Amazon EBS
snapshots created from gateway volumes are changing to a longer format.
Starting in December 2016, all new volumes and snapshots will be created
with a 17-character string. Starting in April 2016, you will be able to use
these longer IDs so you can test your systems with the new format. For more
information, see [Longer EC2 and EBS Resource
IDs](https://aws.amazon.com/ec2/faqs/#longer-ids).
For example, a volume Amazon Resource Name (ARN) with the longer volume ID
format looks like the following:
`arn:aws:storagegateway:us-west-2:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABBCCDDEEFFG`.
A snapshot ID with the longer ID format looks like the following:
`snap-78e226633445566ee`.
For more information, see [Announcement: Heads-up – Longer AWS Storage
Gateway volume and snapshot IDs coming in
2016](https://forums.aws.amazon.com/ann.jspa?annID=3557).
</important>
"""
@doc """
Activates the gateway you previously deployed on your host. In the
activation process, you specify information such as the region you want to
use for storing snapshots or tapes, the time zone for scheduled snapshots
the gateway snapshot schedule window, an activation key, and a name for
your gateway. The activation process also associates your gateway with your
account; for more information, see `UpdateGatewayInformation`.
<note> You must turn on the gateway VM before you can activate your
gateway.
</note>
"""
def activate_gateway(client, input, options \\ []) do
request(client, "ActivateGateway", input, options)
end
@doc """
Configures one or more gateway local disks as cache for a gateway. This
operation is only supported in the cached volume, tape and file gateway
type (see [Storage Gateway
Concepts](https://docs.aws.amazon.com/storagegateway/latest/userguide/StorageGatewayConcepts.html)).
In the request, you specify the gateway Amazon Resource Name (ARN) to which
you want to add cache, and one or more disk IDs that you want to configure
as cache.
"""
def add_cache(client, input, options \\ []) do
request(client, "AddCache", input, options)
end
@doc """
Adds one or more tags to the specified resource. You use tags to add
metadata to resources, which you can use to categorize these resources. For
example, you can categorize resources by purpose, owner, environment, or
team. Each tag consists of a key and a value, which you define. You can add
tags to the following AWS Storage Gateway resources:
<ul> <li> Storage gateways of all types
</li> <li> Storage volumes
</li> <li> Virtual tapes
</li> <li> NFS and SMB file shares
</li> </ul> You can create a maximum of 50 tags for each resource. Virtual
tapes and storage volumes that are recovered to a new gateway maintain
their tags.
"""
def add_tags_to_resource(client, input, options \\ []) do
request(client, "AddTagsToResource", input, options)
end
@doc """
Configures one or more gateway local disks as upload buffer for a specified
gateway. This operation is supported for the stored volume, cached volume
and tape gateway types.
In the request, you specify the gateway Amazon Resource Name (ARN) to which
you want to add upload buffer, and one or more disk IDs that you want to
configure as upload buffer.
"""
def add_upload_buffer(client, input, options \\ []) do
request(client, "AddUploadBuffer", input, options)
end
@doc """
Configures one or more gateway local disks as working storage for a
gateway. This operation is only supported in the stored volume gateway
type. This operation is deprecated in cached volume API version 20120630.
Use `AddUploadBuffer` instead.
<note> Working storage is also referred to as upload buffer. You can also
use the `AddUploadBuffer` operation to add upload buffer to a stored volume
gateway.
</note> In the request, you specify the gateway Amazon Resource Name (ARN)
to which you want to add working storage, and one or more disk IDs that you
want to configure as working storage.
"""
def add_working_storage(client, input, options \\ []) do
request(client, "AddWorkingStorage", input, options)
end
@doc """
Connects a volume to an iSCSI connection and then attaches the volume to
the specified gateway. Detaching and attaching a volume enables you to
recover your data from one gateway to a different gateway without creating
a snapshot. It also makes it easier to move your volumes from an
on-premises gateway to a gateway hosted on an Amazon EC2 instance.
"""
def attach_volume(client, input, options \\ []) do
request(client, "AttachVolume", input, options)
end
@doc """
Cancels archiving of a virtual tape to the virtual tape shelf (VTS) after
the archiving process is initiated. This operation is only supported in the
tape gateway type.
"""
def cancel_archival(client, input, options \\ []) do
request(client, "CancelArchival", input, options)
end
@doc """
Cancels retrieval of a virtual tape from the virtual tape shelf (VTS) to a
gateway after the retrieval process is initiated. The virtual tape is
returned to the VTS. This operation is only supported in the tape gateway
type.
"""
def cancel_retrieval(client, input, options \\ []) do
request(client, "CancelRetrieval", input, options)
end
@doc """
Creates a cached volume on a specified cached volume gateway. This
operation is only supported in the cached volume gateway type.
<note> Cache storage must be allocated to the gateway before you can create
a cached volume. Use the `AddCache` operation to add cache storage to a
gateway.
</note> In the request, you must specify the gateway, size of the volume in
bytes, the iSCSI target name, an IP address on which to expose the target,
and a unique client token. In response, the gateway creates the volume and
returns information about it. This information includes the volume Amazon
Resource Name (ARN), its size, and the iSCSI target ARN that initiators can
use to connect to the volume target.
Optionally, you can provide the ARN for an existing volume as the
`SourceVolumeARN` for this cached volume, which creates an exact copy of
the existing volume’s latest recovery point. The `VolumeSizeInBytes` value
must be equal to or larger than the size of the copied volume, in bytes.
"""
def create_cached_iscsi_volume(client, input, options \\ []) do
request(client, "CreateCachediSCSIVolume", input, options)
end
@doc """
Creates a Network File System (NFS) file share on an existing file gateway.
In Storage Gateway, a file share is a file system mount point backed by
Amazon S3 cloud storage. Storage Gateway exposes file shares using a NFS
interface. This operation is only supported for file gateways.
<important> File gateway requires AWS Security Token Service (AWS STS) to
be activated to enable you create a file share. Make sure AWS STS is
activated in the region you are creating your file gateway in. If AWS STS
is not activated in the region, activate it. For information about how to
activate AWS STS, see Activating and Deactivating AWS STS in an AWS Region
in the AWS Identity and Access Management User Guide.
File gateway does not support creating hard or symbolic links on a file
share.
</important>
"""
def create_nfs_file_share(client, input, options \\ []) do
request(client, "CreateNFSFileShare", input, options)
end
@doc """
Creates a Server Message Block (SMB) file share on an existing file
gateway. In Storage Gateway, a file share is a file system mount point
backed by Amazon S3 cloud storage. Storage Gateway expose file shares using
a SMB interface. This operation is only supported for file gateways.
<important> File gateways require AWS Security Token Service (AWS STS) to
be activated to enable you to create a file share. Make sure that AWS STS
is activated in the AWS Region you are creating your file gateway in. If
AWS STS is not activated in this AWS Region, activate it. For information
about how to activate AWS STS, see [Activating and Deactivating AWS STS in
an AWS
Region](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_enable-regions.html)
in the *AWS Identity and Access Management User Guide.*
File gateways don't support creating hard or symbolic links on a file
share.
</important>
"""
def create_s_m_b_file_share(client, input, options \\ []) do
request(client, "CreateSMBFileShare", input, options)
end
@doc """
Initiates a snapshot of a volume.
AWS Storage Gateway provides the ability to back up point-in-time snapshots
of your data to Amazon Simple Storage (S3) for durable off-site recovery,
as well as import the data to an Amazon Elastic Block Store (EBS) volume in
Amazon Elastic Compute Cloud (EC2). You can take snapshots of your gateway
volume on a scheduled or ad hoc basis. This API enables you to take ad-hoc
snapshot. For more information, see [Editing a Snapshot
Schedule](https://docs.aws.amazon.com/storagegateway/latest/userguide/managing-volumes.html#SchedulingSnapshot).
In the CreateSnapshot request you identify the volume by providing its
Amazon Resource Name (ARN). You must also provide description for the
snapshot. When AWS Storage Gateway takes the snapshot of specified volume,
the snapshot and description appears in the AWS Storage Gateway Console. In
response, AWS Storage Gateway returns you a snapshot ID. You can use this
snapshot ID to check the snapshot progress or later use it when you want to
create a volume from a snapshot. This operation is only supported in stored
and cached volume gateway type.
<note> To list or delete a snapshot, you must use the Amazon EC2 API. For
more information, see DescribeSnapshots or DeleteSnapshot in the [EC2 API
reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_Operations.html).
</note> <important> Volume and snapshot IDs are changing to a longer length
ID format. For more information, see the important note on the
[Welcome](https://docs.aws.amazon.com/storagegateway/latest/APIReference/Welcome.html)
page.
</important>
"""
def create_snapshot(client, input, options \\ []) do
request(client, "CreateSnapshot", input, options)
end
@doc """
Initiates a snapshot of a gateway from a volume recovery point. This
operation is only supported in the cached volume gateway type.
A volume recovery point is a point in time at which all data of the volume
is consistent and from which you can create a snapshot. To get a list of
volume recovery point for cached volume gateway, use
`ListVolumeRecoveryPoints`.
In the `CreateSnapshotFromVolumeRecoveryPoint` request, you identify the
volume by providing its Amazon Resource Name (ARN). You must also provide a
description for the snapshot. When the gateway takes a snapshot of the
specified volume, the snapshot and its description appear in the AWS
Storage Gateway console. In response, the gateway returns you a snapshot
ID. You can use this snapshot ID to check the snapshot progress or later
use it when you want to create a volume from a snapshot.
<note> To list or delete a snapshot, you must use the Amazon EC2 API. For
more information, in *Amazon Elastic Compute Cloud API Reference*.
</note>
"""
def create_snapshot_from_volume_recovery_point(client, input, options \\ []) do
request(client, "CreateSnapshotFromVolumeRecoveryPoint", input, options)
end
@doc """
Creates a volume on a specified gateway. This operation is only supported
in the stored volume gateway type.
The size of the volume to create is inferred from the disk size. You can
choose to preserve existing data on the disk, create volume from an
existing snapshot, or create an empty volume. If you choose to create an
empty gateway volume, then any existing data on the disk is erased.
In the request you must specify the gateway and the disk information on
which you are creating the volume. In response, the gateway creates the
volume and returns volume information such as the volume Amazon Resource
Name (ARN), its size, and the iSCSI target ARN that initiators can use to
connect to the volume target.
"""
def create_stored_iscsi_volume(client, input, options \\ []) do
request(client, "CreateStorediSCSIVolume", input, options)
end
@doc """
Creates a virtual tape by using your own barcode. You write data to the
virtual tape and then archive the tape. A barcode is unique and can not be
reused if it has already been used on a tape . This applies to barcodes
used on deleted tapes. This operation is only supported in the tape gateway
type.
<note> Cache storage must be allocated to the gateway before you can create
a virtual tape. Use the `AddCache` operation to add cache storage to a
gateway.
</note>
"""
def create_tape_with_barcode(client, input, options \\ []) do
request(client, "CreateTapeWithBarcode", input, options)
end
@doc """
Creates one or more virtual tapes. You write data to the virtual tapes and
then archive the tapes. This operation is only supported in the tape
gateway type.
<note> Cache storage must be allocated to the gateway before you can create
virtual tapes. Use the `AddCache` operation to add cache storage to a
gateway.
</note>
"""
def create_tapes(client, input, options \\ []) do
request(client, "CreateTapes", input, options)
end
@doc """
Deletes the bandwidth rate limits of a gateway. You can delete either the
upload and download bandwidth rate limit, or you can delete both. If you
delete only one of the limits, the other limit remains unchanged. To
specify which gateway to work with, use the Amazon Resource Name (ARN) of
the gateway in your request.
"""
def delete_bandwidth_rate_limit(client, input, options \\ []) do
request(client, "DeleteBandwidthRateLimit", input, options)
end
@doc """
Deletes Challenge-Handshake Authentication Protocol (CHAP) credentials for
a specified iSCSI target and initiator pair.
"""
def delete_chap_credentials(client, input, options \\ []) do
request(client, "DeleteChapCredentials", input, options)
end
@doc """
Deletes a file share from a file gateway. This operation is only supported
for file gateways.
"""
def delete_file_share(client, input, options \\ []) do
request(client, "DeleteFileShare", input, options)
end
@doc """
Deletes a gateway. To specify which gateway to delete, use the Amazon
Resource Name (ARN) of the gateway in your request. The operation deletes
the gateway; however, it does not delete the gateway virtual machine (VM)
from your host computer.
After you delete a gateway, you cannot reactivate it. Completed snapshots
of the gateway volumes are not deleted upon deleting the gateway, however,
pending snapshots will not complete. After you delete a gateway, your next
step is to remove it from your environment.
<important> You no longer pay software charges after the gateway is
deleted; however, your existing Amazon EBS snapshots persist and you will
continue to be billed for these snapshots. You can choose to remove all
remaining Amazon EBS snapshots by canceling your Amazon EC2 subscription.
If you prefer not to cancel your Amazon EC2 subscription, you can delete
your snapshots using the Amazon EC2 console. For more information, see the
[ AWS Storage Gateway Detail Page](http://aws.amazon.com/storagegateway).
</important>
"""
def delete_gateway(client, input, options \\ []) do
request(client, "DeleteGateway", input, options)
end
@doc """
Deletes a snapshot of a volume.
You can take snapshots of your gateway volumes on a scheduled or ad hoc
basis. This API action enables you to delete a snapshot schedule for a
volume. For more information, see [Working with
Snapshots](https://docs.aws.amazon.com/storagegateway/latest/userguide/WorkingWithSnapshots.html).
In the `DeleteSnapshotSchedule` request, you identify the volume by
providing its Amazon Resource Name (ARN). This operation is only supported
in stored and cached volume gateway types.
<note> To list or delete a snapshot, you must use the Amazon EC2 API. in
*Amazon Elastic Compute Cloud API Reference*.
</note>
"""
def delete_snapshot_schedule(client, input, options \\ []) do
request(client, "DeleteSnapshotSchedule", input, options)
end
@doc """
Deletes the specified virtual tape. This operation is only supported in the
tape gateway type.
"""
def delete_tape(client, input, options \\ []) do
request(client, "DeleteTape", input, options)
end
@doc """
Deletes the specified virtual tape from the virtual tape shelf (VTS). This
operation is only supported in the tape gateway type.
"""
def delete_tape_archive(client, input, options \\ []) do
request(client, "DeleteTapeArchive", input, options)
end
@doc """
Deletes the specified storage volume that you previously created using the
`CreateCachediSCSIVolume` or `CreateStorediSCSIVolume` API. This operation
is only supported in the cached volume and stored volume types. For stored
volume gateways, the local disk that was configured as the storage volume
is not deleted. You can reuse the local disk to create another storage
volume.
Before you delete a volume, make sure there are no iSCSI connections to the
volume you are deleting. You should also make sure there is no snapshot in
progress. You can use the Amazon Elastic Compute Cloud (Amazon EC2) API to
query snapshots on the volume you are deleting and check the snapshot
status. For more information, go to
[DescribeSnapshots](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeSnapshots.html)
in the *Amazon Elastic Compute Cloud API Reference*.
In the request, you must provide the Amazon Resource Name (ARN) of the
storage volume you want to delete.
"""
def delete_volume(client, input, options \\ []) do
request(client, "DeleteVolume", input, options)
end
@doc """
Returns the bandwidth rate limits of a gateway. By default, these limits
are not set, which means no bandwidth rate limiting is in effect.
This operation only returns a value for a bandwidth rate limit only if the
limit is set. If no limits are set for the gateway, then this operation
returns only the gateway ARN in the response body. To specify which gateway
to describe, use the Amazon Resource Name (ARN) of the gateway in your
request.
"""
def describe_bandwidth_rate_limit(client, input, options \\ []) do
request(client, "DescribeBandwidthRateLimit", input, options)
end
@doc """
Returns information about the cache of a gateway. This operation is only
supported in the cached volume, tape and file gateway types.
The response includes disk IDs that are configured as cache, and it
includes the amount of cache allocated and used.
"""
def describe_cache(client, input, options \\ []) do
request(client, "DescribeCache", input, options)
end
@doc """
Returns a description of the gateway volumes specified in the request. This
operation is only supported in the cached volume gateway types.
The list of gateway volumes in the request must be from one gateway. In the
response Amazon Storage Gateway returns volume information sorted by volume
Amazon Resource Name (ARN).
"""
def describe_cached_iscsi_volumes(client, input, options \\ []) do
request(client, "DescribeCachediSCSIVolumes", input, options)
end
@doc """
Returns an array of Challenge-Handshake Authentication Protocol (CHAP)
credentials information for a specified iSCSI target, one for each
target-initiator pair.
"""
def describe_chap_credentials(client, input, options \\ []) do
request(client, "DescribeChapCredentials", input, options)
end
@doc """
Returns metadata about a gateway such as its name, network interfaces,
configured time zone, and the state (whether the gateway is running or
not). To specify which gateway to describe, use the Amazon Resource Name
(ARN) of the gateway in your request.
"""
def describe_gateway_information(client, input, options \\ []) do
request(client, "DescribeGatewayInformation", input, options)
end
@doc """
Returns your gateway's weekly maintenance start time including the day and
time of the week. Note that values are in terms of the gateway's time zone.
"""
def describe_maintenance_start_time(client, input, options \\ []) do
request(client, "DescribeMaintenanceStartTime", input, options)
end
@doc """
Gets a description for one or more Network File System (NFS) file shares
from a file gateway. This operation is only supported for file gateways.
"""
def describe_nfs_file_shares(client, input, options \\ []) do
request(client, "DescribeNFSFileShares", input, options)
end
@doc """
Gets a description for one or more Server Message Block (SMB) file shares
from a file gateway. This operation is only supported for file gateways.
"""
def describe_s_m_b_file_shares(client, input, options \\ []) do
request(client, "DescribeSMBFileShares", input, options)
end
@doc """
Gets a description of a Server Message Block (SMB) file share settings from
a file gateway. This operation is only supported for file gateways.
"""
def describe_s_m_b_settings(client, input, options \\ []) do
request(client, "DescribeSMBSettings", input, options)
end
@doc """
Describes the snapshot schedule for the specified gateway volume. The
snapshot schedule information includes intervals at which snapshots are
automatically initiated on the volume. This operation is only supported in
the cached volume and stored volume types.
"""
def describe_snapshot_schedule(client, input, options \\ []) do
request(client, "DescribeSnapshotSchedule", input, options)
end
@doc """
Returns the description of the gateway volumes specified in the request.
The list of gateway volumes in the request must be from one gateway. In the
response Amazon Storage Gateway returns volume information sorted by volume
ARNs. This operation is only supported in stored volume gateway type.
"""
def describe_stored_iscsi_volumes(client, input, options \\ []) do
request(client, "DescribeStorediSCSIVolumes", input, options)
end
@doc """
Returns a description of specified virtual tapes in the virtual tape shelf
(VTS). This operation is only supported in the tape gateway type.
If a specific `TapeARN` is not specified, AWS Storage Gateway returns a
description of all virtual tapes found in the VTS associated with your
account.
"""
def describe_tape_archives(client, input, options \\ []) do
request(client, "DescribeTapeArchives", input, options)
end
@doc """
Returns a list of virtual tape recovery points that are available for the
specified tape gateway.
A recovery point is a point-in-time view of a virtual tape at which all the
data on the virtual tape is consistent. If your gateway crashes, virtual
tapes that have recovery points can be recovered to a new gateway. This
operation is only supported in the tape gateway type.
"""
def describe_tape_recovery_points(client, input, options \\ []) do
request(client, "DescribeTapeRecoveryPoints", input, options)
end
@doc """
Returns a description of the specified Amazon Resource Name (ARN) of
virtual tapes. If a `TapeARN` is not specified, returns a description of
all virtual tapes associated with the specified gateway. This operation is
only supported in the tape gateway type.
"""
def describe_tapes(client, input, options \\ []) do
request(client, "DescribeTapes", input, options)
end
@doc """
Returns information about the upload buffer of a gateway. This operation is
supported for the stored volume, cached volume and tape gateway types.
The response includes disk IDs that are configured as upload buffer space,
and it includes the amount of upload buffer space allocated and used.
"""
def describe_upload_buffer(client, input, options \\ []) do
request(client, "DescribeUploadBuffer", input, options)
end
@doc """
Returns a description of virtual tape library (VTL) devices for the
specified tape gateway. In the response, AWS Storage Gateway returns VTL
device information.
This operation is only supported in the tape gateway type.
"""
def describe_vtl_devices(client, input, options \\ []) do
request(client, "DescribeVTLDevices", input, options)
end
@doc """
Returns information about the working storage of a gateway. This operation
is only supported in the stored volumes gateway type. This operation is
deprecated in cached volumes API version (20120630). Use
DescribeUploadBuffer instead.
<note> Working storage is also referred to as upload buffer. You can also
use the DescribeUploadBuffer operation to add upload buffer to a stored
volume gateway.
</note> The response includes disk IDs that are configured as working
storage, and it includes the amount of working storage allocated and used.
"""
def describe_working_storage(client, input, options \\ []) do
request(client, "DescribeWorkingStorage", input, options)
end
@doc """
Disconnects a volume from an iSCSI connection and then detaches the volume
from the specified gateway. Detaching and attaching a volume enables you to
recover your data from one gateway to a different gateway without creating
a snapshot. It also makes it easier to move your volumes from an
on-premises gateway to a gateway hosted on an Amazon EC2 instance.
"""
def detach_volume(client, input, options \\ []) do
request(client, "DetachVolume", input, options)
end
@doc """
Disables a tape gateway when the gateway is no longer functioning. For
example, if your gateway VM is damaged, you can disable the gateway so you
can recover virtual tapes.
Use this operation for a tape gateway that is not reachable or not
functioning. This operation is only supported in the tape gateway type.
<important> Once a gateway is disabled it cannot be enabled.
</important>
"""
def disable_gateway(client, input, options \\ []) do
request(client, "DisableGateway", input, options)
end
@doc """
Adds a file gateway to an Active Directory domain. This operation is only
supported for file gateways that support the SMB file protocol.
"""
def join_domain(client, input, options \\ []) do
request(client, "JoinDomain", input, options)
end
@doc """
Gets a list of the file shares for a specific file gateway, or the list of
file shares that belong to the calling user account. This operation is only
supported for file gateways.
"""
def list_file_shares(client, input, options \\ []) do
request(client, "ListFileShares", input, options)
end
@doc """
Lists gateways owned by an AWS account in a region specified in the
request. The returned list is ordered by gateway Amazon Resource Name
(ARN).
By default, the operation returns a maximum of 100 gateways. This operation
supports pagination that allows you to optionally reduce the number of
gateways returned in a response.
If you have more gateways than are returned in a response (that is, the
response returns only a truncated list of your gateways), the response
contains a marker that you can specify in your next request to fetch the
next page of gateways.
"""
def list_gateways(client, input, options \\ []) do
request(client, "ListGateways", input, options)
end
@doc """
Returns a list of the gateway's local disks. To specify which gateway to
describe, you use the Amazon Resource Name (ARN) of the gateway in the body
of the request.
The request returns a list of all disks, specifying which are configured as
working storage, cache storage, or stored volume or not configured at all.
The response includes a `DiskStatus` field. This field can have a value of
present (the disk is available to use), missing (the disk is no longer
connected to the gateway), or mismatch (the disk node is occupied by a disk
that has incorrect metadata or the disk content is corrupted).
"""
def list_local_disks(client, input, options \\ []) do
request(client, "ListLocalDisks", input, options)
end
@doc """
Lists the tags that have been added to the specified resource. This
operation is only supported in the cached volume, stored volume and tape
gateway type.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Lists virtual tapes in your virtual tape library (VTL) and your virtual
tape shelf (VTS). You specify the tapes to list by specifying one or more
tape Amazon Resource Names (ARNs). If you don't specify a tape ARN, the
operation lists all virtual tapes in both your VTL and VTS.
This operation supports pagination. By default, the operation returns a
maximum of up to 100 tapes. You can optionally specify the `Limit`
parameter in the body to limit the number of tapes in the response. If the
number of tapes returned in the response is truncated, the response
includes a `Marker` element that you can use in your subsequent request to
retrieve the next set of tapes. This operation is only supported in the
tape gateway type.
"""
def list_tapes(client, input, options \\ []) do
request(client, "ListTapes", input, options)
end
@doc """
Lists iSCSI initiators that are connected to a volume. You can use this
operation to determine whether a volume is being used or not. This
operation is only supported in the cached volume and stored volume gateway
types.
"""
def list_volume_initiators(client, input, options \\ []) do
request(client, "ListVolumeInitiators", input, options)
end
@doc """
Lists the recovery points for a specified gateway. This operation is only
supported in the cached volume gateway type.
Each cache volume has one recovery point. A volume recovery point is a
point in time at which all data of the volume is consistent and from which
you can create a snapshot or clone a new cached volume from a source
volume. To create a snapshot from a volume recovery point use the
`CreateSnapshotFromVolumeRecoveryPoint` operation.
"""
def list_volume_recovery_points(client, input, options \\ []) do
request(client, "ListVolumeRecoveryPoints", input, options)
end
@doc """
Lists the iSCSI stored volumes of a gateway. Results are sorted by volume
ARN. The response includes only the volume ARNs. If you want additional
volume information, use the `DescribeStorediSCSIVolumes` or the
`DescribeCachediSCSIVolumes` API.
The operation supports pagination. By default, the operation returns a
maximum of up to 100 volumes. You can optionally specify the `Limit` field
in the body to limit the number of volumes in the response. If the number
of volumes returned in the response is truncated, the response includes a
Marker field. You can use this Marker value in your subsequent request to
retrieve the next set of volumes. This operation is only supported in the
cached volume and stored volume gateway types.
"""
def list_volumes(client, input, options \\ []) do
request(client, "ListVolumes", input, options)
end
@doc """
Sends you notification through CloudWatch Events when all files written to
your NFS file share have been uploaded to Amazon S3.
AWS Storage Gateway can send a notification through Amazon CloudWatch
Events when all files written to your file share up to that point in time
have been uploaded to Amazon S3. These files include files written to the
NFS file share up to the time that you make a request for notification.
When the upload is done, Storage Gateway sends you notification through an
Amazon CloudWatch Event. You can configure CloudWatch Events to send the
notification through event targets such as Amazon SNS or AWS Lambda
function. This operation is only supported for file gateways.
For more information, see Getting File Upload Notification in the Storage
Gateway User Guide
(https://docs.aws.amazon.com/storagegateway/latest/userguide/monitoring-file-gateway.html#get-upload-notification).
"""
def notify_when_uploaded(client, input, options \\ []) do
request(client, "NotifyWhenUploaded", input, options)
end
@doc """
Refreshes the cache for the specified file share. This operation finds
objects in the Amazon S3 bucket that were added, removed or replaced since
the gateway last listed the bucket's contents and cached the results. This
operation is only supported in the file gateway type. You can subscribe to
be notified through an Amazon CloudWatch event when your RefreshCache
operation completes. For more information, see [Getting Notified About File
Operations](https://docs.aws.amazon.com/storagegateway/latest/userguide/monitoring-file-gateway.html#get-notification).
When this API is called, it only initiates the refresh operation. When the
API call completes and returns a success code, it doesn't necessarily mean
that the file refresh has completed. You should use the refresh-complete
notification to determine that the operation has completed before you check
for new files on the gateway file share. You can subscribe to be notified
through an CloudWatch event when your `RefreshCache` operation completes.
"""
def refresh_cache(client, input, options \\ []) do
request(client, "RefreshCache", input, options)
end
@doc """
Removes one or more tags from the specified resource. This operation is
only supported in the cached volume, stored volume and tape gateway types.
"""
def remove_tags_from_resource(client, input, options \\ []) do
request(client, "RemoveTagsFromResource", input, options)
end
@doc """
Resets all cache disks that have encountered a error and makes the disks
available for reconfiguration as cache storage. If your cache disk
encounters a error, the gateway prevents read and write operations on
virtual tapes in the gateway. For example, an error can occur when a disk
is corrupted or removed from the gateway. When a cache is reset, the
gateway loses its cache storage. At this point you can reconfigure the
disks as cache disks. This operation is only supported in the cached volume
and tape types.
<important> If the cache disk you are resetting contains data that has not
been uploaded to Amazon S3 yet, that data can be lost. After you reset
cache disks, there will be no configured cache disks left in the gateway,
so you must configure at least one new cache disk for your gateway to
function properly.
</important>
"""
def reset_cache(client, input, options \\ []) do
request(client, "ResetCache", input, options)
end
@doc """
Retrieves an archived virtual tape from the virtual tape shelf (VTS) to a
tape gateway. Virtual tapes archived in the VTS are not associated with any
gateway. However after a tape is retrieved, it is associated with a
gateway, even though it is also listed in the VTS, that is, archive. This
operation is only supported in the tape gateway type.
Once a tape is successfully retrieved to a gateway, it cannot be retrieved
again to another gateway. You must archive the tape again before you can
retrieve it to another gateway. This operation is only supported in the
tape gateway type.
"""
def retrieve_tape_archive(client, input, options \\ []) do
request(client, "RetrieveTapeArchive", input, options)
end
@doc """
Retrieves the recovery point for the specified virtual tape. This operation
is only supported in the tape gateway type.
A recovery point is a point in time view of a virtual tape at which all the
data on the tape is consistent. If your gateway crashes, virtual tapes that
have recovery points can be recovered to a new gateway.
<note> The virtual tape can be retrieved to only one gateway. The retrieved
tape is read-only. The virtual tape can be retrieved to only a tape
gateway. There is no charge for retrieving recovery points.
</note>
"""
def retrieve_tape_recovery_point(client, input, options \\ []) do
request(client, "RetrieveTapeRecoveryPoint", input, options)
end
@doc """
Sets the password for your VM local console. When you log in to the local
console for the first time, you log in to the VM with the default
credentials. We recommend that you set a new password. You don't need to
know the default password to set a new password.
"""
def set_local_console_password(client, input, options \\ []) do
request(client, "SetLocalConsolePassword", input, options)
end
@doc """
Sets the password for the guest user `smbguest`. The `smbguest` user is the
user when the authentication method for the file share is set to
`GuestAccess`.
"""
def set_s_m_b_guest_password(client, input, options \\ []) do
request(client, "SetSMBGuestPassword", input, options)
end
@doc """
Shuts down a gateway. To specify which gateway to shut down, use the Amazon
Resource Name (ARN) of the gateway in the body of your request.
The operation shuts down the gateway service component running in the
gateway's virtual machine (VM) and not the host VM.
<note> If you want to shut down the VM, it is recommended that you first
shut down the gateway component in the VM to avoid unpredictable
conditions.
</note> After the gateway is shutdown, you cannot call any other API except
`StartGateway`, `DescribeGatewayInformation`, and `ListGateways`. For more
information, see `ActivateGateway`. Your applications cannot read from or
write to the gateway's storage volumes, and there are no snapshots taken.
<note> When you make a shutdown request, you will get a `200 OK` success
response immediately. However, it might take some time for the gateway to
shut down. You can call the `DescribeGatewayInformation` API to check the
status. For more information, see `ActivateGateway`.
</note> If do not intend to use the gateway again, you must delete the
gateway (using `DeleteGateway`) to no longer pay software charges
associated with the gateway.
"""
def shutdown_gateway(client, input, options \\ []) do
request(client, "ShutdownGateway", input, options)
end
@doc """
Starts a gateway that you previously shut down (see `ShutdownGateway`).
After the gateway starts, you can then make other API calls, your
applications can read from or write to the gateway's storage volumes and
you will be able to take snapshot backups.
<note> When you make a request, you will get a 200 OK success response
immediately. However, it might take some time for the gateway to be ready.
You should call `DescribeGatewayInformation` and check the status before
making any additional API calls. For more information, see
`ActivateGateway`.
</note> To specify which gateway to start, use the Amazon Resource Name
(ARN) of the gateway in your request.
"""
def start_gateway(client, input, options \\ []) do
request(client, "StartGateway", input, options)
end
@doc """
Updates the bandwidth rate limits of a gateway. You can update both the
upload and download bandwidth rate limit or specify only one of the two. If
you don't set a bandwidth rate limit, the existing rate limit remains.
By default, a gateway's bandwidth rate limits are not set. If you don't set
any limit, the gateway does not have any limitations on its bandwidth usage
and could potentially use the maximum available bandwidth.
To specify which gateway to update, use the Amazon Resource Name (ARN) of
the gateway in your request.
"""
def update_bandwidth_rate_limit(client, input, options \\ []) do
request(client, "UpdateBandwidthRateLimit", input, options)
end
@doc """
Updates the Challenge-Handshake Authentication Protocol (CHAP) credentials
for a specified iSCSI target. By default, a gateway does not have CHAP
enabled; however, for added security, you might use it.
<important> When you update CHAP credentials, all existing connections on
the target are closed and initiators must reconnect with the new
credentials.
</important>
"""
def update_chap_credentials(client, input, options \\ []) do
request(client, "UpdateChapCredentials", input, options)
end
@doc """
Updates a gateway's metadata, which includes the gateway's name and time
zone. To specify which gateway to update, use the Amazon Resource Name
(ARN) of the gateway in your request.
<note> For Gateways activated after September 2, 2015, the gateway's ARN
contains the gateway ID rather than the gateway name. However, changing the
name of the gateway has no effect on the gateway's ARN.
</note>
"""
def update_gateway_information(client, input, options \\ []) do
request(client, "UpdateGatewayInformation", input, options)
end
@doc """
Updates the gateway virtual machine (VM) software. The request immediately
triggers the software update.
<note> When you make this request, you get a `200 OK` success response
immediately. However, it might take some time for the update to complete.
You can call `DescribeGatewayInformation` to verify the gateway is in the
`STATE_RUNNING` state.
</note> <important> A software update forces a system restart of your
gateway. You can minimize the chance of any disruption to your applications
by increasing your iSCSI Initiators' timeouts. For more information about
increasing iSCSI Initiator timeouts for Windows and Linux, see [Customizing
Your Windows iSCSI
Settings](https://docs.aws.amazon.com/storagegateway/latest/userguide/ConfiguringiSCSIClientInitiatorWindowsClient.html#CustomizeWindowsiSCSISettings)
and [Customizing Your Linux iSCSI
Settings](https://docs.aws.amazon.com/storagegateway/latest/userguide/ConfiguringiSCSIClientInitiatorRedHatClient.html#CustomizeLinuxiSCSISettings),
respectively.
</important>
"""
def update_gateway_software_now(client, input, options \\ []) do
request(client, "UpdateGatewaySoftwareNow", input, options)
end
@doc """
Updates a gateway's weekly maintenance start time information, including
day and time of the week. The maintenance time is the time in your
gateway's time zone.
"""
def update_maintenance_start_time(client, input, options \\ []) do
request(client, "UpdateMaintenanceStartTime", input, options)
end
@doc """
Updates a Network File System (NFS) file share. This operation is only
supported in the file gateway type.
<note> To leave a file share field unchanged, set the corresponding input
field to null.
</note> Updates the following file share setting:
<ul> <li> Default storage class for your S3 bucket
</li> <li> Metadata defaults for your S3 bucket
</li> <li> Allowed NFS clients for your file share
</li> <li> Squash settings
</li> <li> Write status of your file share
</li> </ul> <note> To leave a file share field unchanged, set the
corresponding input field to null. This operation is only supported in file
gateways.
</note>
"""
def update_nfs_file_share(client, input, options \\ []) do
request(client, "UpdateNFSFileShare", input, options)
end
@doc """
Updates a Server Message Block (SMB) file share.
<note> To leave a file share field unchanged, set the corresponding input
field to null. This operation is only supported for file gateways.
</note> <important> File gateways require AWS Security Token Service (AWS
STS) to be activated to enable you to create a file share. Make sure that
AWS STS is activated in the AWS Region you are creating your file gateway
in. If AWS STS is not activated in this AWS Region, activate it. For
information about how to activate AWS STS, see [Activating and Deactivating
AWS STS in an AWS
Region](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_enable-regions.html)
in the *AWS Identity and Access Management User Guide.*
File gateways don't support creating hard or symbolic links on a file
share.
</important>
"""
def update_s_m_b_file_share(client, input, options \\ []) do
request(client, "UpdateSMBFileShare", input, options)
end
@doc """
Updates a snapshot schedule configured for a gateway volume. This operation
is only supported in the cached volume and stored volume gateway types.
The default snapshot schedule for volume is once every 24 hours, starting
at the creation time of the volume. You can use this API to change the
snapshot schedule configured for the volume.
In the request you must identify the gateway volume whose snapshot schedule
you want to update, and the schedule information, including when you want
the snapshot to begin on a day and the frequency (in hours) of snapshots.
"""
def update_snapshot_schedule(client, input, options \\ []) do
request(client, "UpdateSnapshotSchedule", input, options)
end
@doc """
Updates the type of medium changer in a tape gateway. When you activate a
tape gateway, you select a medium changer type for the tape gateway. This
operation enables you to select a different type of medium changer after a
tape gateway is activated. This operation is only supported in the tape
gateway type.
"""
def update_vtl_device_type(client, input, options \\ []) do
request(client, "UpdateVTLDeviceType", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "storagegateway"}
host = get_host("storagegateway", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "StorageGateway_20130630.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/storage_gateway.ex
| 0.898916
| 0.588653
|
storage_gateway.ex
|
starcoder
|
defmodule AWS.WorkMail do
@moduledoc """
Amazon WorkMail is a secure, managed business email and calendaring service with
support for existing desktop and mobile email clients.
You can access your email, contacts, and calendars using Microsoft Outlook, your
browser, or other native iOS and Android email applications. You can integrate
WorkMail with your existing corporate directory and control both the keys that
encrypt your data and the location in which your data is stored.
The WorkMail API is designed for the following scenarios:
* Listing and describing organizations
* Managing users
* Managing groups
* Managing resources
All WorkMail API operations are Amazon-authenticated and certificate-signed.
They not only require the use of the AWS SDK, but also allow for the exclusive
use of AWS Identity and Access Management users and roles to help facilitate
access, trust, and permission policies. By creating a role and allowing an IAM
user to access the WorkMail site, the IAM user gains full administrative
visibility into the entire WorkMail organization (or as set in the IAM policy).
This includes, but is not limited to, the ability to create, update, and delete
users, groups, and resources. This allows developers to perform the scenarios
listed above, as well as give users the ability to grant access on a selective
basis using the IAM model.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-10-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "workmail",
global?: false,
protocol: "json",
service_id: "WorkMail",
signature_version: "v4",
signing_name: "workmail",
target_prefix: "WorkMailService"
}
end
@doc """
Adds a member (user or group) to the resource's set of delegates.
"""
def associate_delegate_to_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateDelegateToResource", input, options)
end
@doc """
Adds a member (user or group) to the group's set.
"""
def associate_member_to_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateMemberToGroup", input, options)
end
@doc """
Cancels a mailbox export job.
If the mailbox export job is near completion, it might not be possible to cancel
it.
"""
def cancel_mailbox_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelMailboxExportJob", input, options)
end
@doc """
Adds an alias to the set of a given member (user or group) of Amazon WorkMail.
"""
def create_alias(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateAlias", input, options)
end
@doc """
Creates a group that can be used in Amazon WorkMail by calling the
`RegisterToWorkMail` operation.
"""
def create_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateGroup", input, options)
end
@doc """
Creates a new Amazon WorkMail organization.
Optionally, you can choose to associate an existing AWS Directory Service
directory with your organization. If an AWS Directory Service directory ID is
specified, the organization alias must match the directory alias. If you choose
not to associate an existing directory with your organization, then we create a
new Amazon WorkMail directory for you. For more information, see [Adding an organization](https://docs.aws.amazon.com/workmail/latest/adminguide/add_new_organization.html)
in the *Amazon WorkMail Administrator Guide*.
You can associate multiple email domains with an organization, then set your
default email domain from the Amazon WorkMail console. You can also associate a
domain that is managed in an Amazon Route 53 public hosted zone. For more
information, see [Adding a domain](https://docs.aws.amazon.com/workmail/latest/adminguide/add_domain.html)
and [Choosing the default domain](https://docs.aws.amazon.com/workmail/latest/adminguide/default_domain.html)
in the *Amazon WorkMail Administrator Guide*.
Optionally, you can use a customer managed master key from AWS Key Management
Service (AWS KMS) to encrypt email for your organization. If you don't associate
an AWS KMS key, Amazon WorkMail creates a default AWS managed master key for
you.
"""
def create_organization(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateOrganization", input, options)
end
@doc """
Creates a new Amazon WorkMail resource.
"""
def create_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateResource", input, options)
end
@doc """
Creates a user who can be used in Amazon WorkMail by calling the
`RegisterToWorkMail` operation.
"""
def create_user(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateUser", input, options)
end
@doc """
Deletes an access control rule for the specified WorkMail organization.
"""
def delete_access_control_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAccessControlRule", input, options)
end
@doc """
Remove one or more specified aliases from a set of aliases for a given user.
"""
def delete_alias(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAlias", input, options)
end
@doc """
Deletes a group from Amazon WorkMail.
"""
def delete_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteGroup", input, options)
end
@doc """
Deletes permissions granted to a member (user or group).
"""
def delete_mailbox_permissions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteMailboxPermissions", input, options)
end
@doc """
Deletes an Amazon WorkMail organization and all underlying AWS resources managed
by Amazon WorkMail as part of the organization.
You can choose whether to delete the associated directory. For more information,
see [Removing an organization](https://docs.aws.amazon.com/workmail/latest/adminguide/remove_organization.html)
in the *Amazon WorkMail Administrator Guide*.
"""
def delete_organization(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteOrganization", input, options)
end
@doc """
Deletes the specified resource.
"""
def delete_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteResource", input, options)
end
@doc """
Deletes the specified retention policy from the specified organization.
"""
def delete_retention_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteRetentionPolicy", input, options)
end
@doc """
Deletes a user from Amazon WorkMail and all subsequent systems.
Before you can delete a user, the user state must be `DISABLED`. Use the
`DescribeUser` action to confirm the user state.
Deleting a user is permanent and cannot be undone. WorkMail archives user
mailboxes for 30 days before they are permanently removed.
"""
def delete_user(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteUser", input, options)
end
@doc """
Mark a user, group, or resource as no longer used in Amazon WorkMail.
This action disassociates the mailbox and schedules it for clean-up. WorkMail
keeps mailboxes for 30 days before they are permanently removed. The
functionality in the console is *Disable*.
"""
def deregister_from_work_mail(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeregisterFromWorkMail", input, options)
end
@doc """
Returns the data available for the group.
"""
def describe_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeGroup", input, options)
end
@doc """
Describes the current status of a mailbox export job.
"""
def describe_mailbox_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeMailboxExportJob", input, options)
end
@doc """
Provides more information regarding a given organization based on its
identifier.
"""
def describe_organization(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeOrganization", input, options)
end
@doc """
Returns the data available for the resource.
"""
def describe_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeResource", input, options)
end
@doc """
Provides information regarding the user.
"""
def describe_user(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeUser", input, options)
end
@doc """
Removes a member from the resource's set of delegates.
"""
def disassociate_delegate_from_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisassociateDelegateFromResource", input, options)
end
@doc """
Removes a member from a group.
"""
def disassociate_member_from_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisassociateMemberFromGroup", input, options)
end
@doc """
Gets the effects of an organization's access control rules as they apply to a
specified IPv4 address, access protocol action, or user ID.
"""
def get_access_control_effect(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAccessControlEffect", input, options)
end
@doc """
Gets the default retention policy details for the specified organization.
"""
def get_default_retention_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetDefaultRetentionPolicy", input, options)
end
@doc """
Requests a user's mailbox details for a specified organization and user.
"""
def get_mailbox_details(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetMailboxDetails", input, options)
end
@doc """
Lists the access control rules for the specified organization.
"""
def list_access_control_rules(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAccessControlRules", input, options)
end
@doc """
Creates a paginated call to list the aliases associated with a given entity.
"""
def list_aliases(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAliases", input, options)
end
@doc """
Returns an overview of the members of a group.
Users and groups can be members of a group.
"""
def list_group_members(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListGroupMembers", input, options)
end
@doc """
Returns summaries of the organization's groups.
"""
def list_groups(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListGroups", input, options)
end
@doc """
Lists the mailbox export jobs started for the specified organization within the
last seven days.
"""
def list_mailbox_export_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListMailboxExportJobs", input, options)
end
@doc """
Lists the mailbox permissions associated with a user, group, or resource
mailbox.
"""
def list_mailbox_permissions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListMailboxPermissions", input, options)
end
@doc """
Returns summaries of the customer's organizations.
"""
def list_organizations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListOrganizations", input, options)
end
@doc """
Lists the delegates associated with a resource.
Users and groups can be resource delegates and answer requests on behalf of the
resource.
"""
def list_resource_delegates(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListResourceDelegates", input, options)
end
@doc """
Returns summaries of the organization's resources.
"""
def list_resources(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListResources", input, options)
end
@doc """
Lists the tags applied to an Amazon WorkMail organization resource.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Returns summaries of the organization's users.
"""
def list_users(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListUsers", input, options)
end
@doc """
Adds a new access control rule for the specified organization.
The rule allows or denies access to the organization for the specified IPv4
addresses, access protocol actions, and user IDs. Adding a new rule with the
same name as an existing rule replaces the older rule.
"""
def put_access_control_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutAccessControlRule", input, options)
end
@doc """
Sets permissions for a user, group, or resource.
This replaces any pre-existing permissions.
"""
def put_mailbox_permissions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutMailboxPermissions", input, options)
end
@doc """
Puts a retention policy to the specified organization.
"""
def put_retention_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutRetentionPolicy", input, options)
end
@doc """
Registers an existing and disabled user, group, or resource for Amazon WorkMail
use by associating a mailbox and calendaring capabilities.
It performs no change if the user, group, or resource is enabled and fails if
the user, group, or resource is deleted. This operation results in the
accumulation of costs. For more information, see
[Pricing](https://aws.amazon.com/workmail/pricing). The equivalent console
functionality for this operation is *Enable*.
Users can either be created by calling the `CreateUser` API operation or they
can be synchronized from your directory. For more information, see
`DeregisterFromWorkMail`.
"""
def register_to_work_mail(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RegisterToWorkMail", input, options)
end
@doc """
Allows the administrator to reset the password for a user.
"""
def reset_password(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ResetPassword", input, options)
end
@doc """
Starts a mailbox export job to export MIME-format email messages and calendar
items from the specified mailbox to the specified Amazon Simple Storage Service
(Amazon S3) bucket.
For more information, see [Exporting mailbox content](https://docs.aws.amazon.com/workmail/latest/adminguide/mail-export.html)
in the *Amazon WorkMail Administrator Guide*.
"""
def start_mailbox_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartMailboxExportJob", input, options)
end
@doc """
Applies the specified tags to the specified Amazon WorkMail organization
resource.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Untags the specified tags from the specified Amazon WorkMail organization
resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates a user's current mailbox quota for a specified organization and user.
"""
def update_mailbox_quota(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateMailboxQuota", input, options)
end
@doc """
Updates the primary email for a user, group, or resource.
The current email is moved into the list of aliases (or swapped between an
existing alias and the current primary email), and the email provided in the
input is promoted as the primary.
"""
def update_primary_email_address(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdatePrimaryEmailAddress", input, options)
end
@doc """
Updates data for the resource.
To have the latest information, it must be preceded by a `DescribeResource`
call. The dataset in the request should be the one expected when performing
another `DescribeResource` call.
"""
def update_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateResource", input, options)
end
end
|
lib/aws/generated/work_mail.ex
| 0.713332
| 0.406567
|
work_mail.ex
|
starcoder
|
defmodule PollutionData do
@moduledoc """
Loading data from file to the pollution server using Enum.
"""
@doc """
Gets lines from CSV file as list.
Returns list.
"""
def import_lines_from_CSV do
lines = File.read!("pollution.csv")
|> String.split("\r\n")
lines
end
@doc """
Makes map containing information about 1 measurement from given `line`.
Returns map with three items: `:datetime`, `:location`, `:pollution_level`.
"""
def parse_line(line) do
[date_str, time_str, x_str, y_str, value_str] = String.split(line, ",")
date = date_str
|> String.split("-")
|> Enum.reverse()
|> Enum.map(&Integer.parse/1)
|> Enum.map(&(elem(&1, 0)))
|> List.to_tuple()
time = time_str
|> String.split(":")
|> Enum.map(&Integer.parse/1)
|> Enum.map(&(elem(&1, 0)))
|> List.to_tuple()
datetime = {date, time}
location = [x_str, y_str]
|> Enum.map(&Float.parse/1)
|> Enum.map(&(elem(&1, 0)))
|> List.to_tuple()
pollution_level = elem(Integer.parse(value_str), 0)
%{:datetime => datetime, :location => location, :pollution_level => pollution_level}
end
@doc """
Gets unique stations from `measurements` (list of maps).
Returns list of unique locations of stations.
"""
def identify_stations(measurements) do
stations = measurements
|> Enum.map(fn measurement -> measurement.location end)
|> Enum.uniq()
stations
end
@doc """
Makes name of station from given `station_location`.
Returns string which is a name of the station.
"""
def generate_station_name(station_location) do
"station_#{elem(station_location, 0)}_#{elem(station_location, 1)}"
end
@doc """
Adds given `stations` (list of tuples representing locations) to the pollution server.
"""
def add_stations(stations) do
add_station_fn = fn station -> :pollution_gen_server.addStation(generate_station_name(station), station) end
Enum.each(stations, add_station_fn)
end
@doc """
Adds given `measurements` (list of maps) to the pollution server.
"""
def add_measurements(measurements) do
add_measurement_fn = fn measurement -> :pollution_gen_server.
addValue(measurement.location, measurement.datetime, "PM10", measurement.pollution_level) end
Enum.each(measurements, add_measurement_fn)
end
@doc """
Main function which gets list of lines from the file and saves all measurements to the pollution server.
Function prints time needed to load stations, load measurements, get station mean and get daily mean.
"""
def add_measurements_from_file do
measurements = import_lines_from_CSV()
|> Enum.map(&parse_line/1)
stations = identify_stations(measurements)
:pollution_sup.start_link()
add_stations_time = fn -> add_stations(stations) end
|> :timer.tc
|> elem(0)
|> Kernel./(1_000_000)
add_measurements_time = fn -> add_measurements(measurements) end
|> :timer.tc
|> elem(0)
|> Kernel./(1_000_000)
example_station = {20.06, 49.986}
example_day = {2017, 5, 3}
get_station_mean_time = fn -> :pollution_gen_server.getStationMean(example_station, "PM10") end
|> :timer.tc
|> elem(0)
|> Kernel./(1_000_000)
get_daily_mean_time = fn -> :pollution_gen_server.getDailyMean(example_day, "PM10") end
|> :timer.tc
|> elem(0)
|> Kernel./(1_000_000)
:timer.sleep(200);
IO.puts "Time of adding stations: #{add_stations_time}"
IO.puts "Time of adding measurements: #{add_measurements_time}"
IO.puts "Time of getting station mean: #{get_station_mean_time}"
IO.puts "Time of getting daily mean: #{get_daily_mean_time}"
end
end
|
src/pollution_data.ex
| 0.896494
| 0.538194
|
pollution_data.ex
|
starcoder
|
defmodule RRulex.Parser do
alias RRulex.Util
@moduledoc """
Parses an RRULE from the (iCalendar RFC-2445)[https://www.ietf.org/rfc/rfc2445.txt] a into RRulex%{}
"""
@frequencies %{
"SECONDLY" => :secondly,
"MINUTELY" => :minutely,
"HOURLY" => :hourly,
"DAILY" => :daily,
"WEEKLY" => :weekly,
"MONTHLY" => :monthly,
"YEARLY" => :yearly
}
@days %{
"SU" => :sunday,
"MO" => :monday,
"TU" => :tuesday,
"WE" => :wednesday,
"TH" => :thursday,
"FR" => :friday,
"SA" => :saturday
}
@months [
:january,
:february,
:march,
:april,
:may,
:june,
:july,
:august,
:september,
:october,
:november,
:december
]
@string_to_atoms %{
"FREQ" => :frequency,
"COUNT" => :count,
"UNTIL" => :until,
"INTERVAL" => :interval,
"BYSECOND" => :by_second,
"BYMINUTE" => :by_minute,
"BYHOUR" => :by_hour,
"BYMONTHDAY" => :by_month_day,
"BYYEARDAY" => :by_year_day,
"BYWEEKNO" => :by_week_number,
"BYSETPOS" => :by_set_pos,
"BYDAY" => :by_day,
"BYMONTH" => :by_month,
"WKST" => :week_start
}
@doc """
Parses RRULE RFC-2445 string into a usable struct.
## Examples
iex> RRulex.parse("FREQ=DAILY;COUNT=5")
%RRulex{
:frequency => :daily,
:count => 5
}
"""
def parse(rrule) do
rrule
|> String.split(";")
|> Enum.reduce(%RRulex{}, fn rule, hash ->
[key, value] = rule |> String.split("=")
atomized_key = parse_attr_key(key)
hash |> Map.put(atomized_key, parse_attr_value(atomized_key, value))
end)
end
defp parse_value_as_list(value, map_fn) do
value
|> String.split(",")
|> Enum.map(map_fn)
end
defp parse_attr_key(key) do
case Map.fetch(@string_to_atoms, key) do
{:ok, atom} ->
atom
end
end
defp parse_attr_value(:frequency, value) do
case Map.fetch(@frequencies, value) do
{:ok, freq} -> freq
:error -> raise ArgumentError, message: "'#{value}' is not a valid frequency"
end
end
defp parse_attr_value(:until, value) do
out = Util.to_date(value, "Etc/UTC")
case out do
{:ok, date} -> date
_ -> raise ArgumentError, message: "'#{value}' is not a valid date"
end
end
defp parse_attr_value(:count, value) do
value = String.to_integer(value)
case value >= 1 do
true -> value
false -> raise ArgumentError, message: "'COUNT' must be greater than one"
end
end
defp parse_attr_value(:interval, value) do
value = String.to_integer(value)
case value >= 1 do
true -> value
false -> raise ArgumentError, message: "'INTERVAL' must be greater than one"
end
end
defp parse_attr_value(:by_second, value) do
value =
value
|> parse_value_as_list(&String.to_integer(&1))
validation =
value
|> Enum.map(&(&1 >= 0 && &1 <= 59))
case false in validation do
false -> value
true -> raise ArgumentError, message: "'BYSECOND' must be between 0 and 59"
end
end
defp parse_attr_value(:by_minute, value) do
value =
value
|> parse_value_as_list(&String.to_integer(&1))
validation =
value
|> Enum.map(&(&1 >= 0 && &1 <= 59))
case false in validation do
false -> value
true -> raise ArgumentError, message: "'BYMINUTE' must be between 0 and 59"
end
end
defp parse_attr_value(:by_hour, value) do
value =
value
|> parse_value_as_list(&String.to_integer(&1))
validation =
value
|> Enum.map(&(&1 >= 0 && &1 <= 23))
case false in validation do
false -> value
true -> raise ArgumentError, message: "'BYHOUR' must be between 0 and 23"
end
end
defp parse_attr_value(:by_month_day, value) do
value =
value
|> parse_value_as_list(&String.to_integer(&1))
validation =
value
|> Enum.map(&((&1 >= 1 && &1 <= 31) || (&1 <= 1 && &1 >= -31 && &1 != 0)))
case false in validation do
false -> value
true -> raise ArgumentError, message: "'BYMONTHDAY' must be between 1 and 31 or -1 and -31"
end
end
defp parse_attr_value(:by_year_day, value) do
value =
value
|> parse_value_as_list(&String.to_integer(&1))
validation =
value
|> Enum.map(&((&1 >= 1 && &1 <= 366) || (&1 <= 1 && &1 >= -366 && &1 != 0)))
case false in validation do
false -> value
true -> raise ArgumentError, message: "'BYYEARDAY' must be between 1 and 366 or -1 and -366"
end
end
defp parse_attr_value(:by_week_number, value) do
value =
value
|> parse_value_as_list(&String.to_integer(&1))
validation =
value
|> Enum.map(&((&1 >= 1 && &1 <= 53) || (&1 <= 1 && &1 >= -53 && &1 != 0)))
case false in validation do
false -> value
true -> raise ArgumentError, message: "'BYWEEKNO' must be between 1 and 53 or -1 and -53"
end
end
defp parse_attr_value(:by_set_pos, value) do
value =
value
|> parse_value_as_list(&String.to_integer(&1))
validation =
value
|> Enum.map(&((&1 >= 1 && &1 <= 366) || (&1 <= 1 && &1 >= -366 && &1 != 0)))
case false in validation do
false ->
value
true ->
raise ArgumentError,
message: "'BYSETPOS' must be between 1 and 366 or -1 and -366 if it is set"
end
end
defp parse_attr_value(:by_day, value) do
# Upcase the values
value =
value
|> parse_value_as_list(&String.upcase(&1))
# Check to see if they're in the list of days
validation =
value
|> Enum.map(&(&1 in Map.keys(@days)))
# If they all are, then fetch the value for all of them and add them to the
# valueerty.
case false in validation do
false -> Enum.map(value, &Map.fetch!(@days, &1))
true -> raise ArgumentError, message: "'BYDAY' must have a valid day"
end
end
defp parse_attr_value(:week_start, value) do
value = String.upcase(value)
case Map.fetch(@days, value) do
{:ok, day} -> day
_ -> raise ArgumentError, message: "'WKST' must have a valid day"
end
end
defp parse_attr_value(:by_month, value) do
value =
value
|> parse_value_as_list(&String.to_integer(&1))
validation =
value
|> Enum.map(&(&1 >= 1 && &1 <= 12))
case false in validation do
false -> Enum.map(value, &Enum.at(@months, &1 - 1))
true -> raise ArgumentError, message: "'BYMONTH' must be between 1 and 12 if it is set"
end
end
defp parse_attr_value(key, _) do
raise ArgumentError, message: "'#{key}' is not a valid attribute"
end
end
|
lib/rrulex/parser.ex
| 0.695028
| 0.465934
|
parser.ex
|
starcoder
|
defmodule DataQuacker.Schema do
@moduledoc ~S"""
Defines macros for creating data schemas
which represents a mapping from the source to the desired output.
> Note: To use the macros you have to put `use DataQuacker.Schema` in the desired module.
A schema can be defined to represent the structure of an arbitrarily nested map or list of maps.
This is done with the `schema/2`, `row/2` and `field/3` macros.
Additionally, there are two special macros: `validate/1` and `transform/1`.
Lastly, the `source/1` and `virtual_source/1` macros are used
to define the data which should be inserted in a particular field.
These allow for validation and transformation to be performed
on a specific subset of the output data.
> Note: the `row/2` and `field/3` macros represent the *output* structure,
while the `source/1` and `virtual_source/1` macros reference the input data.
Since both the input and the output can be said to have rows,
the term "source row" is used in the documentation to denote a row in the input data.
The term "row" is used to denote a row in the output.
All of the structure-defining macros take a block as their last argument
which can be thought of as their "body". The `schema/2` and `field/2` macros
also take a name as their first argument, and `row/2` and `field/3`
take a keyword list of options as their first and second argument respectively.
More information can be found in the documentation for the specific macros.
## Examples
> Note: A fully working implementation of these examples can be found in the tests inside the "examples" directory.
Suppose we have a table of students in the form of a CSV file, which looks like this:
| First name | Last name | Age | Favourite subject |
|:----------:|:---------:|:---:|:-----------------:|
| John | Smith | 19 | Maths |
| Adam | Johnson | 18 | Physics |
| Quackers | the Duck | 1 | Programming |
Also suppose our desired output is a list of tuples with maps with the following structure:
```elixir
{:ok, %{
first_name: "...",
last_name: "...",
age: "...",
favourite_subject: "..."
}}
```
The mapping from the table to the list of maps can be represented as follows:
```elixir
defmodule StudentsSchema do
use DataQuacker.Schema
schema :students do
field :first_name do
source("first name")
end
field :last_name do
source("last name")
end
field :age do
source("age")
end
field :favourite_subject do
source("favourite subject")
end
end
end
```
This looks great (I hope!), but realistically we would like age to be an Integer,
and favourite subject to be somehow validated. This can be achieved by modifying the previous schema, like this:
```elixir
defmodule StudentsSchema do
use DataQuacker.Schema
schema :students do
field :first_name do
source("first name")
end
field :last_name do
source("last name")
end
field :age do
transform(fn age ->
case Integer.parse(age) do
{age_int, _} -> {:ok, age_int}
:error -> {:error, "Invalid value #{age} given"}
end
end)
source("age")
end
field :favourite_subject do
validate(fn subj -> subj in ["Maths", "Physics", "Programming"] end)
source("favourite subject")
end
end
end
```
Now our result will be a list of maps, like:
```elixir
[
# ...
{:ok, %{
age: 123,
# ...
}}
# ...
]
```
> Note: To see how to use such schema to parse a CSV file, please see the example in the documentation for the `DataQuacker` module.
However if, for example, an invalid age is given,
the entire row where the error occurred will result in the following tuple:
`{:error, "Invalid value blabla given"}`
Great, but what if we have the "First name" and "Last name" columns in our CSV files,
but only a `:full_name` field in our database? No problem, Fields can be arbitrarily nested.
It's just a small tweak:
```elixir
defmodule StudentsSchema do
use DataQuacker.Schema
schema :students do
field :full_name do
transform(fn %{first_name: first_name, last_name: last_name} ->
{:ok, "#{first_name} #{last_name}"}
end)
field :first_name do
source("first name")
end
field :last_name do
source("last name")
end
end
# ...
end
end
```
Now our output is:
```elixir
{:ok, [
#...
{:ok, %{
full_name: "<NAME>",
# ...
}}
#...
]}
```
To illustrate some more functionality, let's take a look at another example.
We will start with a very simple CSV source file
which will gradually become more and more complex,
and so will our rules for parsing it.
| Apartment/flat size (in m^2) | Price per 1 month |
|:----------------------------:|:-----------------:|
| 40 | 1000 |
| 50 | 1100 |
```elixir
defmodule PricingSchema do
use DataQuacker.Schema
schema :pricing do
field :size do
transform(fn size ->
case Integer.parse(size) do
{size_int, _} -> {:ok, size_int}
:error -> {:error, "Invalid value #{size} given"}
end
end)
source("Apartment/flat size (in m^2)")
end
field :price do
transform(fn price ->
case Integer.parse(price) do
{price_int, _} -> {:ok, price_int}
:error -> {:error, "Invalid value #{price} given"}
end
end)
source("Price per 1 month")
end
end
end
```
The above results in:
```elixir
[
{:ok, %{size: 50, price: 1100}},
{:ok, %{size: 40, price: 1000}}
]
```
> Note: The rows in the result are in the reverse order compared to the source rows. This is because for large lists reversing may be an expensive operation, which is often redundant, for example if the result is supposed to be inserted in a database.
This schema could work, but there are some problems with it.
It's not fun to copy&paste the function for parsing string to int
over and over again. That's why we'll create a regular function
and pass a reference to it in both places.
```elixir
defmodule PricingSchema do
use DataQuacker.Schema
schema :pricing do
field :size do
transform(&PricingSchema.parse_int/1)
# ...
end
field :price do
transform(&PricingSchema.parse_int/1)
# ...
end
end
def parse_int(str) do
case Integer.parse(str) do
{int, _} -> {:ok, int}
:error -> {:error, "Invalid value #{str} given"}
end
end
end
```
> Note: the reference to the function must be written out in full (including the module name),
because it will be executed in a different context.
This is better, but still not ideal for two reasons.
First of all, we source our data based on simple string matching. While this will still work
if the casing in the headers changes, it will not if "Price per 1 month" changes to "Price *for* 1 month",
or "Apartment/flat size (in m^2)" to "Apartment *or* flat size (in m^2)".
Since most likely we do not have control over the source, these can change unexpectedly.
Second of all, our error messages are quite vague since they do not specify the offending source row and field.
To tackle the first one we can change our `source/1` macros to be either strings, regexes,
lists of strings or custom functions. The details of each approach is specified
in the docs for the `source/1` macro, but for now we will just us a list of strings.
`source("Apartment/flat size (in m^2)")` -> `source(["apartment", "size"])`
`source("Apartment/flat size (in m^2)")` -> `source(["price", "1"])`
The above mean "match a header which contains apartment and size"
and "match a header which contains apartment and 1".
> Note: The order of the headers is inconsequential.
As for the second issue, transform can actually be given a one- or two-argument function.
If it is given a one-argument function, the argument at execution will be the value of the field
or row. If it is given a two-argument function, the second argument will be a `%Context{}` struct.
Which contains the following fields: `:metadata`, `:support_data`, `:source_row`.
Support data is an arbitrary value of any type that can be passed in at parse time.
It can be used to, for example, validate something against a database without having to fetch the data
for each row. More on that in the documentation of the `DataQuacker` module. For now, however, we only need `metadata` and `source_row`. The first one is a tuple
of an atom and an atom or a tuple, where the first element is the type (`:field` or `:row`)
and the second one is the name or index in the case of a row.
The second one is just the index of the source row which is being processed.
> Note: the term "source row" is used here to denote a row in the input file. The term row
is used to denote a row of output.
We can therefore change our `parse_int/1` function into
```elixir
def parse_int(str, %{metadata: metadata, source_row: source_row}) do
case Integer.parse(str) do
{int, _} -> {:ok, int}
:error -> {:error, "Error processing #{elem(metadata, 0)} #{elem(metadata, 1)} in row #{source_row}; '#{str}' given"}
end
end
```
An example error will look like this: `{:error, "Error processing field price in row 2; 'oops' given"}`
The next case we will be dealing with here is again a "small change" to the source file.
| Apartment/flat size (in m^2) | Price per 1 month | Price per 3 months |
|:----------------------------:|:-----------------:|--------------------|
| 40 | 1000 | 2800 |
| 50 | 1100 | 3000 |
| 60 | | 3600 |
Now each source row contains two different prices for different lease period.
Additionally, for the bigger apartments there may only be an option
to rent for three months.
We could create a schema to parse the data int rows like:
`%{size: 40, price_1: 1000, price_3: 2800}`,
but this is not ideal since we would have to deal with `nil` at `:price_1`,
and we probably want separate rows in the database for each lease duration,
as this will allow us to easily pull out the price for a specific size and lease duration
using SQL indexes.
A better structure therefore would look like this
```elixir
[
# ...
{:ok, %{size: 40, duration: 3, price: 2800}},
{:ok, %{size: 40, duration: 1, price: 1000}}
]
```
This is where the `row/2` macro comes in. It allows us to specify any number of output rows
for a single input row. Previously we did not use this macro at all,
since the lack of it implies there is exactly one output row per input row.
This is our new schema:
```elixir
defmodule PricingSchema do
use DataQuacker.Schema
schema :pricing do
row skip_if: (fn %{price: price} -> is_nil(price) end) do
field :size do
transform(&PricingSchema.parse_int/2)
source(["apartment", "size"])
end
field :duration do
virtual_source(1)
end
field :price do
transform(&PricingSchema.parse_int/2)
source(["price", "1"])
end
end
row do
field :size do
transform(&PricingSchema.parse_int/2)
source(["apartment", "size"])
end
field :duration do
virtual_source(3)
end
field :price do
transform(&PricingSchema.parse_int/2)
source(["price", "3"])
end
end
end
def parse_int("", _), do: {:ok, nil}
def parse_int(str, %{metadata: metadata, source_row: source_row}) do
case Integer.parse(str) do
{int, _} -> {:ok, int}
:error -> {:error, "Error processing #{elem(metadata, 0)} #{elem(metadata, 1)} in row #{source_row}; '#{str}' given"}
end
end
end
```
There are a few new interesting things going on here.
Firstly, as we can see, any column in the source can be inserted multiple times
within the schema. This is particularly useful if for a single input row
we want to have multiple output rows which share some of the fields.
Secondly, we added a new field `:duration` which instead of being sourced from the input data
is just a static value. We achieved it with the `virtual_source/1` macro
which either takes a value or a function returning a value to be injected into the field.
This is useful for us to be able to make the output structure as close to our database model as we can.
> Note: There is a special case in the `parse_int/2` function to return nil on empty input,
because `Integer.parse/2` will return an error given an empty string.
Lastly, we added a special option to the first output row, called `skip_if`.
The function we provided will be evaluated for each output row representing a one-month lease price,
and if it returns `true` the row will not appear in the actual result.
Using our latest schema and the CSV presented above, we get this result:
```elixir
{:ok, [
{:ok, %{duration: 3, price: 3600, size: 60}},
{:ok, %{duration: 3, price: 3000, size: 50}},
{:ok, %{duration: 1, price: 1100, size: 50}},
{:ok, %{duration: 3, price: 2800, size: 40}},
{:ok, %{duration: 1, price: 1000, size: 40}}
]}
```
The last case is about multiple transformations on the same field.
Our source file has changed again, so that it includes some non-integer prices.
We could just switch our usage of `Integer.parse/2` to `Decimal.parse/1`,
but there is a catch: `Decimal.parse/1` expects `.` (dot) to be the decimal separator,
and our source uses `,` (comma).
For this reason we will need to first replace the commas with periods, and then convert.
As the transformer we provide for the `:price` field is an arbitrary Elixir function,
we could do both of those operations at once.
That would work, but for schemas which have very complex transformation and validation rules,
the function could get bloated quickly.
The goal of this library is to avoid that complexity, and allow for easy understanding
of the custom rules. This is why it's recommended to split te transformers into multiple functions.
Let's create two functions: `parse_decimal/2` and `replace_commas/1`.
> Note: To follow this example you will have to install the `Decimal` library, which you can find at [hex.pm/packages/decimal](https://hex.pm/packages/decimal).
```elixir
def replace_commas(str) do
{:ok, String.replace(str, ",", ".")}
end
def parse_decimal("", _), do: {:ok, nil}
def parse_decimal(str, %{metadata: metadata, source_row: source_row}) do
case Decimal.parse(str) do
{decimal, ""} -> {:ok, decimal}
:error -> {:error, "Error processing #{elem(metadata, 0)} #{elem(metadata, 1)} in row #{source_row}; '#{str}' given"}
end
end
```
We can now change our `:price` fields to use these functions:
```elixir
# ...
field :price do
transform(&PricingSchema.replace_commas/1)
transform(&PricingSchema.parse_decimal/2)
source(["price", "1"])
end
# ...
field :price do
transform(&PricingSchema.replace_commas/1)
transform(&PricingSchema.parse_decimal/2)
source(["price", "3"])
end
# ...
```
> Note: Different transformers for the same field or row may take different numbers of arguments, depending on whether the context is needed in the particular function.
The final schema should look like this:
```elixir
defmodule PricingSchema do
use DataQuacker.Schema
schema :pricing do
row skip_if: (fn %{price: price} -> is_nil(price) end) do
field :size do
transform(&PricingSchema.parse_int/2)
source(["apartment", "size"])
end
field :duration do
virtual_source(1)
end
field :price do
transform(&PricingSchema.replace_commas/1)
transform(&PricingSchema.parse_decimal/2)
source(["price", "1"])
end
end
row do
field :size do
transform(&PricingSchema.parse_int/2)
source(["apartment", "size"])
end
field :duration do
virtual_source(3)
end
field :price do
transform(&PricingSchema.replace_commas/1)
transform(&PricingSchema.parse_decimal/2)
source(["price", "3"])
end
end
end
def parse_int("", _), do: {:ok, nil}
def parse_int(str, %{metadata: metadata, source_row: source_row}) do
case Integer.parse(str) do
{int, _} -> {:ok, int}
:error -> {:error, "Error processing #{elem(metadata, 0)} #{elem(metadata, 1)} in row #{source_row}; '#{str}' given"}
end
end
def replace_commas(str) do
{:ok, String.replace(str, ",", ".")}
end
def parse_decimal("", _), do: {:ok, nil}
def parse_decimal(str, %{metadata: metadata, source_row: source_row}) do
case Decimal.parse(str) do
{decimal, ""} -> {:ok, decimal}
:error -> {:error, "Error processing #{elem(metadata, 0)} #{elem(metadata, 1)} in row #{source_row}; '#{str}' given"}
end
end
end
```
"""
alias DataQuacker.Schema.State
alias DataQuacker.SchemaError
import DataQuacker.Schema.FunWrapper
@doc false
defmacro __using__(_opts) do
quote do
import unquote(__MODULE__)
@state State.new()
@schema_names []
end
end
@doc ~S"""
Defines a schema and a `schema_structure/1` function
which takes the schema name as the argument
and returns the schema in a form that can be passed to a parser.
Multiple schemas can be defined in a single module.
The result structure is a map with the following types:
```elixir
%{
__name__: atom(),
rows: list(),
matchers: list()
}
```
The macro takes in a name and a block with which the rows, fields, etc. can be defined.
The block must contain at least one row. Note, however, that if no row is explicitly specified,
but at least one field is, the schema is assumed to have exactly one row which contains all of the fields.
> Note: if one or many fields are present directly inside the schema, the row macro cannot be used explicitly.
The same is true the other way around - if at least one row is specified explicitly,
fields can only appear inside rows, not directly in the schema.
Unlike `row/2` and `field/3`, the `schema/2` macro cannot have validators or transformers.
If there is only one row, but it needs to define validators or transformers,
the schema must define this row explicitly.
"""
defmacro schema(name, do: block) when is_atom(name) do
quote do
if unquote(name) in @schema_names do
raise SchemaError, """
Invalid schema name.
There already exists a schema #{inspect(unquote(name))}
on this module.
"""
end
if not Enum.empty?(@state.cursor) do
raise SchemaError, """
Invalid schema position.
Schema can only appear as a top-level module macro
(cannot be nested in other schemas).
"""
end
@state State.new()
@state State.register(@state, :schema, {unquote(name), %{}})
unquote(block)
if Enum.empty?(@state.rows) do
raise SchemaError, """
Invalid schema usage.
Schema must have at least
one row or one field.
"""
end
if State.flagged?(@state, :has_loose_fields?) do
@state State.update(@state, :row, %{fields: @state.fields})
@state State.cursor_exit(@state)
end
@state State.cursor_exit(@state)
@state State.update(@state, :schema, %{
matchers: @state.matchers,
rows: @state.rows
})
def schema_structure(unquote(name)) do
@state.schema
end
end
end
defmacro schema(name, do: _block) do
quote do
raise SchemaError, """
Invalid schema name.
Must be an atom, #{inspect(unquote(name))} given.
"""
end
end
@doc ~S"""
Defines an output row.
Can only be used directly inside a schema, and only if the schema has no fields
directly inside it.
This macro takes in a keyword list of options, and a block within which the fields,
validators and transformers can be specified.
## Options
* `:skip_if` - a function of arity 1 or 2, which returns `true` or `false` given the value of the row and optionally the context; `true` means the row should be skipped from the output, `false` is a "noop"
> Note: The order of execution is always: transformers, then validators, then "skip_if".
"""
defmacro row(opts \\ [], do: block) do
quote do
if State.flagged?(@state, :has_loose_fields?) do
raise SchemaError, """
Invalid row usage.
Rows cannot appear in a schema
if the schema has loose fields
(fields appearing outside of any row).
"""
end
if not State.cursor_at?(@state, :schema) do
raise SchemaError, """
Invalid row position.
Rows can only appear directly
inside a schema.
"""
end
@state State.clear_fields(@state)
@state State.register(
@state,
:row,
{length(@state.rows), %{skip_if: skip_if_opt(unquote(opts))}}
)
unquote(block)
@state State.update(@state, :row, %{fields: @state.fields})
if @state.fields == %{} do
raise SchemaError, """
Invalid row usage.
Rows must have at least one subfield.
"""
end
@state State.cursor_exit(@state)
end
end
@doc ~S"""
Defines an output field.
Can be used inside a schema, a row or another field.
Can only be used directly inside a schema if the schema has no explicitly defined rows.
Can only be used inside another field if that field has no source.
This macro takes in a name, a keyword list of options, and a block within which the subfields or source,
and validators and transformers can be specified.
Can either specify exactly one source (virtual or regular) or subfields.
## Options
* `:skip_if` - a function of arity 1 or 2, which returns `true` or `false` given the value of the field and optionally the context; `true` means the field should be skipped from the output, `false` is a "noop"
> Note: The order of execution is always: transformers, then validators, then "skip_if"
"""
defmacro field(_name, _opts \\ [], _)
defmacro field(name, opts, do: block) when is_atom(name) do
quote do
if State.cursor_at?(@state, nil) do
raise SchemaError, """
Invalid field position.
Fields can only appear inside a schema,
rows or other fields.
"""
end
if State.cursor_at?(@state, :schema) and not Enum.empty?(@state.rows) do
raise SchemaError, """
Invalid field usage.
Fields cannot appear directly inside a schema
if the schema explicitly declares rows.
"""
end
if State.cursor_at?(@state, :schema) do
@state State.flag(@state, :has_loose_fields?, true)
@state State.register(@state, :row, {length(@state.rows), %{}})
end
if State.cursor_at?(@state, :field) and State.get(@state, :field).__type__ == :sourced do
raise SchemaError, """
Invalid field usage.
A field can either have subfields or a source,
but not both.
"""
end
if State.cursor_at?(@state, :row) and Map.has_key?(@state.fields, unquote(name)) do
raise SchemaError, """
Invalid field name.
There already exists a field of the same name
in this row.
"""
end
if State.cursor_at?(@state, :field) and
Map.has_key?(State.get(@state, :field).subfields, unquote(name)) do
raise SchemaError, """
Invalid field name.
There already exists a subfield of the same name
in this field.
"""
end
if State.cursor_at?(@state, :field) do
@state State.update(@state, :field, %{__type__: :wrapper})
end
@state State.register(
@state,
:field,
{unquote(name), %{skip_if: skip_if_opt(unquote(opts))}}
)
unquote(block)
if is_nil(State.get(@state, :field).__type__) do
raise SchemaError, """
Invalid field usage.
Fields must either have a source
or at least one subfield.
"""
end
@state State.cursor_exit(@state)
end
end
defmacro field(name, _opts, do: _block) do
quote do
raise SchemaError, """
Invalid field name.
Must be an atom, #{inspect(unquote(name))} given.
"""
end
end
@doc ~S"""
Defines a source mapping from the input.
Can only be used inside a field, and only if that field does not define any subfields
or any other source.
This macro takes in either a "needle" which can be string, a regex, a list of strings,
or a function of arity 1 or 2.
## Needle
* when is a string - the downcased header name for a particular column must contain the downcased string given as the needle for the column to match
* when is a regex - the header name for a particular column must match the needle for the column to match
* when is a list of strings - the downcase header name for a particular column must contain all of the downcased elements given as the needle for the column to match
* when is a function - given the header name for a particular column, and optionally the context, must return `true` for the column to match; the function must always return `true` or `false`
"""
defmacro source(needle) do
{unquoted_needle, _} = Code.eval_quoted(needle)
case unquoted_needle do
string when is_binary(string) ->
quote do
source(fn column_name ->
String.contains?(String.downcase(column_name), unquote(String.downcase(needle)))
end)
end
list when is_list(list) ->
quote do
source(fn column_name ->
column_name = String.downcase(column_name)
Enum.all?(
unquote(Enum.map(needle, &String.downcase(&1))),
&String.contains?(column_name, &1)
)
end)
end
%Regex{} ->
quote do
source(fn column_name ->
Regex.match?(unquote(needle), column_name)
end)
end
fun when is_function(fun) ->
quote do
if not State.cursor_at?(@state, :field) do
raise SchemaError, """
Invalid source position.
Sources can only appear inside fields.
"""
end
if State.get(@state, :field).__type__ == :sourced do
raise SchemaError, """
Invalid source usage.
Only one source per field is allowed.
"""
end
if State.get(@state, :field).__type__ == :wrapper do
raise SchemaError, """
Invalid source usage.
A field can either have subfields or a source,
but not both.
"""
end
@state State.register(@state, :matcher, wrap_fun(unquote(needle), 1..2))
@state State.update(@state, :field, %{__type__: :sourced, source: State.target(@state)})
end
_el ->
quote do
raise SchemaError, """
Invalid column source type.
Must be a string, a regex expression or a function
which can be used to match a column name.
"""
end
end
end
@doc ~S"""
Defines a value to be injected to a particular field.
Can only be used inside a field, and only if that field does not define any subfields
or any other source.
This macro takes in either a literal value, or a function of arity 0 or 1.
## Value
* when is a function - optionally given the context, can return any value to be injected inside the field
* else - the value is injected inside the field "as is"
"""
defmacro virtual_source(value) do
{unquoted_value, _} = Code.eval_quoted(value)
case unquoted_value do
fun when is_function(fun) ->
quote do
if not State.cursor_at?(@state, :field) do
raise SchemaError, """
Invalid source position.
Sources can only appear inside fields.
"""
end
if State.get(@state, :field).__type__ == :sourced do
raise SchemaError, """
Invalid source usage.
Only one source per field is allowed.
"""
end
if State.get(@state, :field).__type__ == :wrapper do
raise SchemaError, """
Invalid source usage.
A field can either have subfields or a source,
but not both.
"""
end
@state State.update(@state, :field, %{
__type__: :sourced,
source: wrap_fun(unquote(value), 0..1)
})
end
_el ->
quote do
virtual_source(fn -> unquote(value) end)
end
end
end
@doc ~S"""
Defines a validator for a field or row.
Can only be used inside a field or row.
This macro takes in a function of arity 1 or 2, which will be applied to the value of the row or the field where the validator was defined. Multiple validators are allowed, and will be executed in the order in which they are defined.
> Note: To use validators on a row, the row must be defined explicitly. Implicit rows cannot have validators.
## Fun
* when is a function - given the field's or row's value and optionally the context, must return either `true`, `false`, `:ok`, `:error` or a tuple `{:error, any()}`, where `true` and `ok` are the success typing, and `false`, `:error` and `{:error, any()}` are the error typing; the entire output row will be an error row if any validation inside it or inside its fields fails
"""
defmacro validate(fun) do
quote do
validator = wrap_fun(unquote(fun), 1..2)
cond do
State.cursor_at?(@state, :row) ->
validators = @state |> State.get(:row) |> Map.get(:validators)
@state State.update(@state, :row, %{validators: validators ++ [validator]})
State.cursor_at?(@state, :field) ->
validators = @state |> State.get(:field) |> Map.get(:validators)
@state State.update(@state, :field, %{validators: validators ++ [validator]})
true ->
raise SchemaError, """
Incorrect validator position.
Validators can only appear
inside rows or fields.
"""
end
end
end
@doc ~S"""
Defines a data transformer for a field or row.
Can only be used inside a field or row.
This macro takes in a function of arity 1 or 2, which will be applied to the value of the row or the field where the transformer was defined. Multiple transformers are allowed, and will be executed in the order in which they are defined.
> Note: To use transformers on a row, the row must be defined explicitly. Implicit rows cannot have transformers.
## Fun
* when is a function - given the field's or row's value and optionally the context, must return either `{:ok, any()}`, `{:error, any()}` or `:error`, where `{:ok, any()}` is the success typing and `{:error, any()}`, and `:error` are the error typing; the second element of the success tuple is taken to be the new value of the row or field; the entire output row will be an error row if any validation inside it or inside its fields fails
"""
defmacro transform(fun) do
quote do
transformer = wrap_fun(unquote(fun), 1..2)
cond do
State.cursor_at?(@state, :row) ->
transformers = @state |> State.get(:row) |> Map.get(:transformers)
@state State.update(@state, :row, %{transformers: transformers ++ [transformer]})
State.cursor_at?(@state, :field) ->
transformers = @state |> State.get(:field) |> Map.get(:transformers)
@state State.update(@state, :field, %{transformers: transformers ++ [transformer]})
true ->
raise SchemaError, """
Incorrect transformer position.
Transformers can only appear
inside rows or fields.
"""
end
end
end
@doc false
defmacro skip_if_opt(opts) do
{unquoted_opts, _} = Code.eval_quoted(opts)
case Keyword.fetch(unquoted_opts, :skip_if) do
{:ok, fun} when is_function(fun) ->
quote do
wrap_fun(unquote(Keyword.get(opts, :skip_if)), 1..2)
end
:error ->
quote do
nil
end
_el ->
quote do
raise SchemaError, """
Invalid skip_if type
must be a function
with arity 1 or 2.
"""
end
end
end
end
|
lib/schema/schema.ex
| 0.919154
| 0.987055
|
schema.ex
|
starcoder
|
defmodule Grapevine.Gossip.Rumor.State do
@moduledoc """
The rumor mongering message state
"""
defstruct [:state, :rounds, :removed_at]
@type t :: %__MODULE__{
state: atom(),
rounds: integer(),
removed_at: nil | integer()
}
@doc """
Returns a new state
## Examples
iex> State.new(2)
%State{rounds: 2, state: :infected}
"""
@spec new(integer()) :: t()
def new(rounds), do: %__MODULE__{rounds: rounds, state: :infected}
@doc """
Changes the state to removed
## Examples
iex> State.remove(State.new(0), 0)
%State{rounds: 0, state: :removed, removed_at: 0}
"""
@spec remove(t()) :: t()
def remove(value, removed_at \\ now()),
do: %__MODULE__{value | state: :removed, removed_at: removed_at}
@doc """
Check if the state is infected
## Examples
iex> State.infected?(%State{rounds: 0, state: :removed})
false
iex> State.infected?(State.new(1))
true
"""
@spec infected?(t()) :: boolean()
def infected?(%{state: :infected}), do: true
def infected?(_), do: false
@doc """
Checks if the state has expired
## Examples
iex> State.expired?(%State{rounds: 0, state: :removed, removed_at: 0}, 10)
true
iex> State.expired?(
...> %State{rounds: 0, state: :removed, removed_at: :os.system_time(:millisecond) - 100}, 200
...> )
false
iex> State.expired?(%State{rounds: 0, state: :removed, removed_at: nil}, 10)
false
"""
@spec expired?(t(), integer()) :: boolean()
def expired?(%{removed_at: nil}, _), do: false
def expired?(%{removed_at: removed_at}, threshold) do
if now() - removed_at > threshold, do: true, else: false
end
@doc """
Decrements the number of state rounds
## Examples
iex> State.dec(State.new(2))
%State{rounds: 1, state: :infected}
iex> State.dec(State.new(0))
%State{rounds: 0, state: :infected}
"""
@spec dec(t()) :: t()
def dec(%{rounds: 0} = value), do: value
def dec(%{rounds: rounds} = value), do: %__MODULE__{value | rounds: rounds - 1}
defp now(), do: :os.system_time(:millisecond)
end
|
lib/grapevine/gossip/rumor/state.ex
| 0.809163
| 0.509154
|
state.ex
|
starcoder
|
defmodule Geo.Turf.Measure do
@moduledoc """
A collection of measurement related tools
"""
import Geo.Turf.Helpers, only: [bbox: 1, flatten_coords: 1]
alias Geo.Turf.Math
@doc """
Takes a LineString and returns a Point at a specified distance along the line.
Note that this will aproximate location to the nearest coordinate point.
## Examples
iex> %Geo.LineString{coordinates: [{-23.621,64.769},{-23.629,64.766},{-23.638,64.766}]}
...> |> Geo.Turf.Measure.along(400, :meters)
%Geo.Point{coordinates: {-23.629,64.766}}
"""
def along(%Geo.LineString{coordinates: coords}, distance, unit \\ :kilometers)
when is_number(distance) do
walk_along(coords, distance, unit, 0)
end
defp walk_along([from, to| next], distance, unit, acc) when distance > acc do
travel = get_distance(from, to, unit)
walk_along([to| next], distance, unit, acc+travel)
end
defp walk_along([from| _next], distance, _unit, acc) when distance < acc do
# TODO: overshot
%Geo.Point{coordinates: from}
end
defp walk_along([{x,y}], _distance, _unit, _acc), do: %Geo.Point{coordinates: {x,y}}
defp walk_along([], _distance, _unit, _acc), do: :error
@doc """
Takes a LineString and returns a Point at the middle of the line.
## Examples
iex> %Geo.LineString{coordinates: [{-23.621,64.769},{-23.629,64.766},{-23.638,64.766}]}
...> |> Geo.Turf.Measure.along_midpoint()
%Geo.Point{coordinates: {-23.629, 64.766}}
"""
def along_midpoint(%Geo.LineString{} = line) do
along(line, length_of(line) / 2)
end
@doc """
Find the center of a `Geo.geometry()` item and give us a `Geo.Point`
## Examples
iex> Geo.Turf.Measure.center(%Geo.Polygon{coordinates: [{0,0}, {0,10}, {10,10}, {10,0}]})
%Geo.Point{ coordinates: {5, 5} }
"""
def center(geometry) when is_map(geometry) do
{min_x, min_y, max_x, max_y} = bbox(geometry)
if (is_integer(min_x) && is_integer(min_y) && is_integer(max_x) && is_integer(max_y)) do
%Geo.Point{ coordinates: {
round((min_x + max_x) / 2),
round((min_y + max_y) / 2)
} }
else
%Geo.Point{ coordinates: {
(min_x + max_x) / 2,
(min_y + max_y) / 2
} }
end
end
@doc """
Verifies that two points are close to each other. Defaults to 100 meters.
## Examples
iex> %Geo.Point{coordinates: {-22.653375, 64.844254}}
...> |> Geo.Turf.Measure.close_to(%Geo.Point{coordinates: {-22.654042, 64.843656}})
true
iex> %Geo.Point{coordinates: {-22.653375, 64.844254}}
...> |> Geo.Turf.Measure.close_to(%Geo.Point{coordinates: {-23.803020, 64.730435}}, 100, :kilometers)
true
"""
def close_to(point_a, point_b, maximum \\ 100, units \\ :meters) do
distance(point_a, point_b, units) < maximum
end
@doc """
Calculates the distance between two points in degrees, radians, miles, or kilometers.
This uses the [Haversine formula](http://en.wikipedia.org/wiki/Haversine_formula) to account for global curvature.
## Examples
iex> Geo.Turf.Measure.distance(
...> %Geo.Point{coordinates: {-75.343, 39.984}},
...> %Geo.Point{coordinates: {-75.534, 39.123}},
...> :kilometers)
97.13
"""
def distance(from, to, unit \\ :kilometers) do
get_distance(from, to, unit)
|> Math.rounded(2)
end
defp get_distance(from, to, unit)
defp get_distance(%Geo.Point{coordinates: a}, %Geo.Point{coordinates: b}, unit), do: distance(a, b, unit)
defp get_distance({x1, y1}, {x2, y2}, unit) do
d_lat = Math.degrees_to_radians((y2 - y1));
d_lon = Math.degrees_to_radians((x2 - x1));
lat1 = Math.degrees_to_radians(y1);
lat2 = Math.degrees_to_radians(y2);
a = :math.pow(:math.sin(d_lat / 2), 2) +
:math.pow(:math.sin(d_lon / 2), 2) * :math.cos(lat1) * :math.cos(lat2)
Math.radians_to_length(2 * :math.atan2(:math.sqrt(a), :math.sqrt(1 - a)), unit)
end
@doc """
Takes a `t:Geo.geometry()` and measures its length in the specified units.
## Examples
iex> %Geo.LineString{coordinates: [{-23.621,64.769},{-23.629,64.766},{-23.638,64.766}]}
...> |> Geo.Turf.Measure.length_of()
0.93
"""
def length_of(feature, unit \\ :kilometers) do
feature
|> flatten_coords()
|> walk_length(unit, 0)
|> Math.rounded(2)
end
defp walk_length([from, to| next], unit, acc) do
travel = get_distance(from, to, unit)
walk_length([to| next], unit, acc+travel)
end
defp walk_length([{_,_}], _unit, acc), do: acc
defp walk_length([], _unit, acc), do: acc
end
|
lib/geo/turf/measure.ex
| 0.81335
| 0.790934
|
measure.ex
|
starcoder
|
defmodule WeightedRoundRobin do
@moduledoc ~S"""
A local, decentralized and scalable weighted round-robin generator.
It allows developers to generate a sequence, evenly distributed, attending a
predefined set of weights attributed to elements of any type. The `take/2`
function is guaranteed to be atomic and isolated.
Generators can have any number of pools, each under a different `pool_name`.
The `precision` indicates how many precision digits you want at the generator
output (so 100 indicates you want a two digits precision).
The application can have multiple instances of the generator, but in this
case every function needs to be prefixed with the generator name, indicated
as `wrr`.
Internally the pools are versioned using an ETS table for each version of the
pool created with `new_pool`. Accesses hit the newer version first, and
migrate from the older version to the newer version along the time. When a
new generation is started, the oldest one is deleted by an internal GC.
"""
use GenServer
@type wrr :: atom
@type pool_name :: any
@type key_weights :: [{key, weight}]
@type key :: any
@type weight :: float
@type precision :: non_neg_integer
@type start_option :: {:name, generator_name :: atom}
@type option :: {:precision, precision}
@threshold_pos 2
@counter_pos 3
@dist_pos 4
@version_autoincr -1
@version_pos 2
@default_precision 100
@default_gc_interval :timer.minutes(1)
@default_gc_cleanup_min_timeout :timer.seconds(10)
@doc """
Create a new pool under the generator.
It is safe to reconfigure pools by calling `new_pool` with different
parameters, while `take` is being served at other processes.
Keys with weight equal to 0.0 will be filtered out.
"""
@spec new_pool(pool_name, key_weights) :: :ok
def new_pool(pool_name, key_weights) when is_list(key_weights),
do: new_pool(__MODULE__, pool_name, key_weights, [])
@spec new_pool(pool_name, key_weights, [option]) :: :ok
def new_pool(pool_name, key_weights, options) when is_list(key_weights) and is_list(options),
do: new_pool(__MODULE__, pool_name, key_weights, options)
@spec new_pool(wrr, pool_name, key_weights, [option]) :: :ok
def new_pool(wrr, pool_name, key_weights, options \\ [])
when is_atom(wrr) and is_list(key_weights) and is_list(options) do
key_weights = Enum.reject(key_weights, &(elem(&1, 1) == 0))
total = Enum.reduce(key_weights, 0, &(&2 + 1 / elem(&1, 1)))
precision = Keyword.get(options, :precision, @default_precision)
kw_cw = Enum.map(key_weights, fn {k, w} -> {k, w, 0} end)
weighted_dist =
0..(trunc(Float.round(total)) * precision)
|> Enum.reduce({kw_cw, []}, fn _, {kw_cw, acc} ->
# NGINX smooth weighted round-robin algorithm
# https://github.com/phusion/nginx/commit/27e94984486058d73157038f7950a0a36ecc6e35
kw_cw = Enum.sort_by(kw_cw, fn {_, _, cw} -> cw end, :desc)
sum_weights = Enum.reduce(tl(kw_cw), 0, fn {_, w, _}, acc -> acc + w end)
{k, w, cw} = hd(kw_cw)
tail = Enum.map(tl(kw_cw), fn {k, w, cw} -> {k, w, cw + w} end)
{[{k, w, cw - sum_weights} | tail], [k | acc]}
end)
|> elem(1)
|> Enum.reverse()
threshold = length(weighted_dist) - 1
version = :ets.update_counter(version_ets!(wrr), @version_autoincr, {@version_pos, 1})
object = List.to_tuple([{pool_name, version}, threshold, -1 | weighted_dist])
:ets.insert(key_ets!(wrr), object)
:ets.insert(version_ets!(wrr), {{pool_name, version}, :erlang.monotonic_time()})
:ok
end
@doc """
Delete a new pool from the generator.
It is not safe to call this function while serving other processes using
`take` or concurrently with `new_pool` for the same pool.
"""
@spec delete_pool(wrr, pool_name) :: :ok
def delete_pool(wrr \\ __MODULE__, pool_name) do
for [version] <- :ets.match(version_ets!(wrr), {{pool_name, :"$1"}, :_}) do
:ets.delete(version_ets!(wrr), {pool_name, version})
:ets.delete(key_ets!(wrr), {pool_name, version})
end
:ok
end
@doc """
Resets the wrr state.
This drops all previously configured pools and resets version counter.
It is not safe to call this function while serving other processes using
`take` or concurrently with `new_pool` for the same pool.
"""
@spec reset(wrr) :: :ok
def reset(wrr \\ __MODULE__) do
:ets.delete_all_objects(version_ets!(wrr))
:ets.insert_new(version_ets!(wrr), {@version_autoincr, 0})
:ets.delete_all_objects(key_ets!(wrr))
:ok
end
@doc """
Take elements from the pool in a round-robin fashion.
## Examples
iex> :ok = WeightedRoundRobin.new_pool(:pool, [a: 0.1, b: 0.2, c: 1.0])
iex> dist = Enum.map(1..10_000, fn _ -> WeightedRoundRobin.take(:pool) end)
iex> %{a: 775, b: 1537, c: 7688} = Enum.frequencies(dist)
"""
@spec take(wrr, pool_name) :: key | {:error, :not_found}
def take(wrr \\ __MODULE__, pool_name) do
case :ets.select(version_ets!(wrr), [{{{pool_name, :"$1"}, :_}, [], [:"$1"]}]) do
[] ->
{:error, :not_found}
versions ->
version = Enum.max(versions)
try do
threshold = :ets.lookup_element(key_ets!(wrr), {pool_name, version}, @threshold_pos)
index =
:ets.update_counter(
key_ets!(wrr),
{pool_name, version},
{@counter_pos, 1, threshold, 0}
)
:ets.lookup_element(key_ets!(wrr), {pool_name, version}, @dist_pos + index)
catch
:error, :badarg -> take(wrr, pool_name)
end
end
end
@doc """
Executes the garbage collector.
Used when the automatic GC is disabled by passing `:gc_interval` as
`:infinity` to `start_link`.
"""
@spec gc(wrr) :: :ok
def gc(wrr \\ __MODULE__, gc_cleanup_min_timeout \\ @default_gc_cleanup_min_timeout) do
:ets.select(version_ets!(wrr), [{{{:"$1", :"$2"}, :"$3"}, [], [{{:"$1", :"$2", :"$3"}}]}])
|> Enum.group_by(fn {pool_name, _, _} -> pool_name end)
|> Enum.flat_map(fn {_, candidates} ->
[_ | candidates] = Enum.sort_by(candidates, fn {_, version, _} -> version end, :desc)
candidates
|> Enum.filter(fn {_, _, created_timestamp} ->
:erlang.monotonic_time() - created_timestamp >= gc_cleanup_min_timeout
end)
|> Enum.map(fn {pool_name, version, _} -> {pool_name, version} end)
end)
|> Enum.each(fn {pool_name, version} ->
:ets.delete(version_ets!(wrr), {pool_name, version})
:ets.delete(key_ets!(wrr), {pool_name, version})
end)
end
@doc """
Starts the weighted round-robin generator.
You typically don't need to start the weighted round-robin generator, one
is started automatically at application start, except if you explicitly
say to not start one at your config:
config :wrr, start: false
So, manually it can be started as:
WeightedRoundRobin.start_link(name: MyApp.WeightedRoundRobin)
In your supervisor tree, you would write:
Supervisor.start_link([
{WeightedRoundRobin, name: MyApp.WeightedRoundRobin}
], strategy: :one_for_one)
## Options
The weighted round-robin generator requires the following key:
* `:name` - the name of the generator and its tables
* `:gc_interval` - If it is set, an integer > 0 is expected defining the
interval time in milliseconds to garbage collection to run, deleting the
older versions. If this option is not set, garbage collection is executed
every 1 minute. If set to :infinity, garbage collection is never executed
automatically and `gc` will need to be executed explicitly.
* `:gc_cleanup_min_timeout` - An integer > 0 defining the min timeout in
milliseconds for triggering the next cleanup.
"""
@spec start_link([start_option]) :: {:ok, pid} | {:error, term}
def start_link(options) do
name =
case Keyword.fetch(options, :name) do
{:ok, name} when is_atom(name) ->
name
{:ok, other} ->
raise ArgumentError, "expected :name to be an atom, got: #{inspect(other)}"
:error ->
raise ArgumentError, "expected :name option to be present"
end
gc_interval = Keyword.get(options, :gc_interval, @default_gc_interval)
gc_cleanup_min_timeout = Keyword.get(options, :gc_interval, @default_gc_cleanup_min_timeout)
GenServer.start_link(__MODULE__, {name, gc_interval, gc_cleanup_min_timeout}, name: name)
end
@impl true
def init({wrr, gc_interval, gc_cleanup_min_timeout}) do
:ets.new(version_ets!(wrr), [
:named_table,
:public,
read_concurrency: true,
write_concurrency: true
])
:ets.insert(version_ets!(wrr), {@version_autoincr, 0})
:ets.new(key_ets!(wrr), [
:named_table,
:public,
read_concurrency: true,
write_concurrency: true
])
{:ok, {wrr, gc_interval, gc_cleanup_min_timeout}, gc_interval}
end
@impl true
def handle_info(:timeout, {wrr, gc_interval, gc_cleanup_min_timeout}) do
gc(wrr, gc_cleanup_min_timeout)
{:noreply, {wrr, gc_interval, gc_cleanup_min_timeout}, gc_interval}
end
@compile {:inline, version_ets!: 1, key_ets!: 1}
defp version_ets!(name), do: Module.concat([name, "Versions"])
defp key_ets!(name), do: Module.concat([name, "Keys"])
end
|
lib/weighted_round_robin.ex
| 0.924108
| 0.573977
|
weighted_round_robin.ex
|
starcoder
|
defmodule ExPBKDF2 do
@moduledoc """
Rust NIf for [Password-Based Key Derivation Function v2 (PBKDF2)](https://en.wikipedia.org/wiki/PBKDF2). It uses the [pbkdf2](https://github.com/RustCrypto/password-hashes/tree/master/pbkdf2) rust library.
"""
alias ExPBKDF2.Impl
@doc """
Generate a random salt
Examples
iex> salt = ExPBKDF2.generate_salt()
iex> byte_size(salt)
16
"""
@spec generate_salt() :: binary() | no_return()
def generate_salt() do
Impl.generate_salt()
end
@doc """
Hash the provided password
It accepts a map with optional parameters:
- `:salt` - if it's not set, it will be generated
- `:alg` - the hashing algorithm to be used. it can be `"sha512"` or `"sha256"`. Default value is `"sha512"`
- `:iterations` - the number of iterations. The default value is 4096
- `:length` - the length of the result. The default value is 64
Examples
iex> opts = %{salt: "salt", alg: "sha256", iterations: 4096, length: 32}
iex> ExPBKDF2.pbkdf2("password", opts)
<<197, 228, 120, 213, 146, 136, 200, 65, 170, 83, 13, 182, 132, 92, 76, 141, 150, 40, 147, 160, 1, 206, 78, 17, 164, 150, 56, 115, 170, 152, 19, 74>>
"""
@spec pbkdf2(binary(), map() | nil) :: binary() | no_return()
def pbkdf2(password, opts \\ %{}) do
salt = Map.get(opts, :salt, generate_salt())
alg = Map.get(opts, :alg, "sha512")
iterations = Map.get(opts, :iterations, 4096)
length = Map.get(opts, :length, 64)
Impl.calculate_pbkdf2(password, salt, alg, iterations, length)
end
@doc """
Verify the provided hash
It accepts three parameters:
- hash - hash generated with pbkdf2
- password - <PASSWORD>
- optional parameters map - it's the same as options for `pbkdf2/2`. If this parameter is not passed, it's assumed that hash is already in the PHC string format
Examples
iex> hash = <<197, 228, 120, 213, 146, 136, 200, 65, 170, 83, 13, 182, 132, 92, 76, 141, 150, 40, 147, 160, 1, 206, 78, 17, 164, 150, 56, 115, 170, 152, 19, 74>>
iex> opts = %{salt: "salt", alg: "sha256", iterations: 4096, length: 32}
iex> ExPBKDF2.verify(hash, "password", opts)
true
"""
@spec verify(binary(), binary(), map() | nil) :: boolean() | no_return()
def verify(hash, password, params \\ nil)
def verify(hash, password, %{salt: raw_salt, alg: alg, iterations: iterations, length: length}) do
salt = Base.encode64(raw_salt, padding: false)
hash = Base.encode64(hash, padding: false)
formatted_hash = "$pbkdf2-#{alg}$i=#{iterations},l=#{length}$#{salt}$#{hash}"
verify(formatted_hash, password)
end
def verify(formatted_hash, password, _), do: Impl.verify(formatted_hash, password)
end
|
lib/ex_pbkdf2.ex
| 0.86891
| 0.567427
|
ex_pbkdf2.ex
|
starcoder
|
defmodule Resx.Storer do
@moduledoc """
A storer is an interface to perform some side-effect with a resource.
While this is intended for things such as caching, or saving a resource at
some destination. It may also be used for producing other side-effects such
as logging, delivery/dispatch, etc.
A store by itself is not referenceable, due to this it is suggested that the
store should not modify the resource, as this is likely to lead to confusion
when obtaining and passing around references. The suggested way of implementing
a referenceable store, is to have the store implement the `Resx.Producer`
behaviour as well. An example of this is the default file handler `Resx.Producers.File`,
this is both a producer and a store.
"""
alias Resx.Resource
@doc """
Implement the behaviour to store a resource.
The `options` keyword allows for your implementation to expose some configurable
settings.
If the store was successful return `{ :ok, resource }`, where `resource` is the
stored resource. Otherwise return an appropriate error.
"""
@callback store(resource :: Resource.t, options :: keyword) :: { :ok, resource :: Resource.t } | Resx.error
@doc """
Optionally implement the behaviour to discard a resource. This should be used to
reverse the effects of a store. The default implementation does nothing and just
returns `:ok`.
The `options` keyword allows for your implementation to expose some configurable
settings.
If the resource was successfully discarded return `:ok`. Otherwise return an
appropriate error.
"""
@callback discard(reference :: Resx.ref, options :: keyword) :: :ok | Resx.error(Resx.resource_error | Resx.reference_error)
@doc false
defmacro __using__(_opts) do
quote do
@behaviour Resx.Storer
@impl Resx.Storer
def discard(_, _ \\ []), do: :ok
defoverridable [discard: 1, discard: 2]
end
end
defmodule StoreError do
defexception [:message, :type, :reason, :storer, :resource, :options]
@impl Exception
def exception({ resource, storer, options, { type, reason } }) do
%StoreError{
message: "failed to store resource due to #{type} error: #{inspect reason}",
type: type,
reason: reason,
resource: resource,
storer: storer,
options: options
}
end
end
@doc """
Save a resource in the target store.
A `storer` must be a module that implements the `Resx.Storer`
behaviour.
"""
@spec save(Resource.t, module, keyword) :: { :ok, Resource.t } | Resx.error
def save(resource, storer, opts \\ []), do: storer.store(resource, opts)
@doc """
Save a resource in the target store.
Raises a `Resx.Storer.StoreError` if the resource couldn't be stored.
For more details see `save/3`.
"""
@spec save!(Resource.t, module, keyword) :: Resource.t | no_return
def save!(resource, storer, opts \\ []) do
case save(resource, storer, opts) do
{ :ok, resource } -> resource
{ :error, error } -> raise StoreError, { resource, storer, opts, error }
end
end
end
|
lib/resx/storer.ex
| 0.887835
| 0.414928
|
storer.ex
|
starcoder
|
defmodule ROS.Node.Spec do
@moduledoc """
A set of functions for declaring ROS abstractions for your Supervisor setup.
Add ROS abstractions to your `lib/my_project/application.ex` like so:
iex> import ROS.Node.Spec
iex> children = [
...> node(:"/mynode", [
...> publisher(:mypub, "chatter", "std_msgs/String"),
...> subscriber("other_chatter", "std_msgs/Int16", &IO.inspect/1),
...> service_proxy(:myproxy, "add_two_ints", "rospy_tutorials/AddTwoInts"),
...> service("add_two_ints", "rospy_tutorials/AddTwoInts", fn %RospyTutorials.AddTwoInts.Request{a: a, b: b} ->
...> %RospyTutorials.AddTwoInts.Response{sum: a + b}
...> end)
...> ])
...> ]
iex> {ok?, _pid} = Supervisor.start_link(children, strategy: :one_for_one)
iex> ok?
:ok
Note that you can also write any ROS types in their module form after you've
compiled them with `mix genmsg` or `mix gensrv`
iex> import ROS.Node.Spec
iex> publisher(:mypub, "chatter", StdMsgs.String)
{ROS.Publisher, %ROS.Publisher{name: :mypub, topic: "chatter", type: StdMsgs.String}}
iex> service_proxy(:myproxy, "add_two_ints", RospyTutorials.AddTwoInts)
{ROS.Service.Proxy, %ROS.Service.Proxy{name: :myproxy, service: "add_two_ints", type: RospyTutorials.AddTwoInts}}
"""
@typedoc """
An identifier for something that listens to messages received by a subscriber
or service. Follow the naming conventions of GenServer.
"""
@type listener() :: atom() | pid()
@doc """
Creates a child spec for a node.
A node is the ROS equivalent of a Supervisor. You should group your
publishers, subscribers, services, and service proxies as children of the
node. Nodes also startup hidden processes like a Slave API server and an
XML-RPC server for interacting with ROS master.
"""
@spec node(atom(), [tuple()]) :: {module(), %ROS.Node{}}
def node(name, children \\ []) do
{ROS.Node, %ROS.Node{children: children, name: name}}
end
@doc """
Creates a child spec for a publisher process.
## Parameters
- `name` an atom or reference to call the publisher. This will allow you to
call the publisher by name later when making a call to
`ROS.Publisher.publish/2`.
- `topic` the ROS topic to listen to
- `type` the msg type expected in that topic. Either string format
("std_msgs/Int16") or module format `StdMsgs.Int16` are accepted.
"""
@spec publisher(atom(), String.t(), String.t() | module()) ::
{module(), %ROS.Publisher{}}
def publisher(name, topic, type) do
{ROS.Publisher, %ROS.Publisher{name: name, topic: topic, type: type}}
end
@doc """
Creates a child spec for a subscriber process.
## Parameters
- `topic` the ROS topic to listen to
- `type` the msg type expected in that topic.
Either string format ("std_msgs/Int16") or module format `StdMsgs.Int16` are accepted.
The third parameter can either be a callback function, a pid or atom name,
or list of pids/atom names.
If it's a callback, that callback function will be executed every time the
subscriber receives a new message. This function call will be blocking, but
the GenServer that the subscriber is running on will buffer incoming
messages, so each callback call will be in order. This behavior is meant
to most closely simulate the bahavior of the Python and C++ client library
behaviors.
If it's a pid or atom, the subscriber process will send a `cast` message
to that pid or atom using `GenServer.cast/2` containing
`{:subscription, :from_subscriber_proc_name, %<incoming-message-type>{}}`
(e.g. `{:subscription, :from_subscriber_proc_name, %StdMsgs.String{data: "hello world"}}`).
Due to the behavior of `cast`, messages should arrive in order.
If it's a list of pids or atoms, the subscriber process will send a `cast`
message as described above to each of the processes in the list. There is
no garuantee about order in sending to each process.
## Examples
```
import ROS.Node.Spec
children = [node(:mynode, [
subscriber("chatter", "std_msgs/String", &IO.inspect/1),
subscriber("other_chatter", "std_msgs/String", self()),
subscriber("another_chatter", "std_msgs/String", MyModule.ChatterServer)
])]
Supervisor.start_link(children)
flush()
# => {:"$gen_cast", {:subscription, :mynode_other_chatter, %StdMsgs.String{data: "hello world"}}}
```
If you're attaching a GenServer to the subscriber, you'll need to provide
a handle for the subscription:
```
defmodule MyModule.ChatterServer do
use GenServer
def start_link(_otps), do: GenServer.start_link(__MODULE__, %{}, name: __MODULE__)
def init(args), do: {:ok, args}
def handle_cast({:subscription, _from, %StdMsgs.String{data: data}}, state) do
# do something interesting with `data`
{:noreply, state}
end
end
```
The subscriber will send its process name (an atom) in the `cast` tuple. This
is so that you can differentiate between different subscribers of the same type
if you're listening to multiple subscriber processes. This is a small use case,
though. For common use, you should underscore the process name as shown above.
"""
@spec subscriber(
String.t(),
String.t() | module(),
(struct() -> any()) | listener() | [listener()]
) :: {module(), %ROS.Subscriber{}}
def subscriber(topic, type, callback) when is_function(callback) do
{ROS.Subscriber,
%ROS.Subscriber{topic: topic, type: type, callback: callback}}
end
def subscriber(topic, type, listener)
when is_pid(listener) or is_atom(listener) do
{ROS.Subscriber,
%ROS.Subscriber{topic: topic, type: type, listeners: [listener]}}
end
def subscriber(topic, type, listeners) when is_list(listeners) do
{ROS.Subscriber,
%ROS.Subscriber{topic: topic, type: type, listeners: listeners}}
end
@doc """
Creates a child spec for a service proxy process.
"""
@spec service_proxy(atom(), String.t(), String.t() | module()) ::
{module(), %ROS.Service.Proxy{}}
def service_proxy(name, service, type) do
{ROS.Service.Proxy,
%ROS.Service.Proxy{name: name, service: service, type: type}}
end
@doc """
Creates a child spec for a service process.
## Parameters
- `service` the ROS service name to listen to
- `type` the srv type expected in that topic.
Either string format ("std_srv/Bool") or module format `StdMsgs.Bool` are
accepted.
The third parameter can either be a callback function a pid or atom name.
If it's a function, that function will be executed and the return value
will be sent as a service response. If it's a pid or atom, the request
will be forwarded to that process using `GenServer.call/2`. The reply value
from the `call` will be sent to the requestor as a service response.
There may only be one listener for services.
Each `call` sent to the listener will take the form of
`{:service, %<service-type>.Request{}}`. (e.g.
`{:service, %RospyTutorials.AddTwoInts.Request{a: 3, b: 4}}`).
"""
@spec service(
String.t(),
String.t() | module(),
(struct() -> any()) | listener() | [listener()]
) :: {module(), %ROS.Service{}}
def service(service, type, callback) when is_function(callback) do
{ROS.Service,
%ROS.Service{service: service, type: type, callback: callback}}
end
def service(service, type, listener)
when is_pid(listener) or is_atom(listener) do
{ROS.Service,
%ROS.Service{service: service, type: type, listener: listener}}
end
end
|
lib/ros/node/spec.ex
| 0.908933
| 0.758935
|
spec.ex
|
starcoder
|
defmodule ExUnit.ClusteredCase do
@moduledoc """
Helpers for defining clustered test cases.
Use this in place of `ExUnit.Case` when defining test modules where
you will be defining clustered tests. `#{__MODULE__}` extends
`ExUnit.Case` to provide additional helpers for those tests.
Since `#{__MODULE__}` is an extension of `ExUnit.Case`, it takes the
same options, and imports the same test helpers and callbacks. It adds
new helpers, `scenario/3`, `node_setup/1`, and `node_setup/2`, and aliases
the `#{__MODULE__.Cluster}` module for convenience.
## Examples
defmodule KVStoreTests do
# Use the module
use ExUnit.ClusteredCase
# Define a clustered scenario
scenario "given a healthy cluster", [cluster_size: 3] do
# Set up each node in the cluster prior to each test
node_setup do
{:ok, _} = Application.ensure_all_started(:kv_store)
end
# Define a test to run in this scenario
test "always pass" do
assert true
end
end
end
## Context
All tests receive a context as an argument, just like with `ExUnit.Case`, the
primary difference to be aware of is that the context contains a key, `:cluster`,
which is the pid of the cluster manager, and is used to invoke functions in the
`#{__MODULE__}.Cluster` module during tests.
defmodule KVStoreTests do
use ExUnit.ClusteredCase
scenario "given a healthy cluster", [cluster_size: 3] do
# You can use normal setup functions to setup context for the
# test, this is run once prior to each test
setup do
{:ok, foo: :bar}
end
# Like `setup`, but is run on all nodes prior to each test
node_setup do
{:ok, _} = Application.ensure_all_started(:kv_store)
end
test "cluster has three nodes", %{cluster: c} = context do
assert length(Cluster.members(c)) == 3
end
end
end
See the `ExUnit.Case` documentation for information on tags, filters, and more.
"""
@doc false
defmacro __using__(opts \\ []) do
quote do
@__clustered_case_scenario nil
use ExUnit.Case, unquote(opts)
alias unquote(__MODULE__).Cluster
import unquote(__MODULE__), only: [scenario: 3, node_setup: 1, node_setup: 2]
setup_all do
on_exit(fn ->
unquote(__MODULE__).Cluster.Supervisor.cleanup_clusters_for_test_module(__MODULE__)
end)
end
end
end
@doc """
Creates a new clustered test scenario.
Usage of this macro is similar to that of `ExUnit.Case.describe/2`,
but has some differences. While `describe/2` simply groups tests under a
common description, `scenario/3` both describes the group of tests, and
initializes a cluster which will be made available for each test in that scenario.
NOTE: It is important to be aware that each scenario is a distinct cluster,
and that all tests within a single scenario are running against the same
cluster. If tests within a scenario may conflict with one another - perhaps by
modifying shared state, or triggering crashes which may bring down shared
processes, etc., then you have a couple options:
- Disable async testing for the module
- Modify your tests to prevent conflict, e.g. writing to different keys
in a k/v store, rather than the same key
- Split the scenario into many, where the tests can run in isolation.
## Options
You can configure a scenario with the following options:
- `cluster_size: integer`, will create a cluster of the given size, this option is mutually
exclusive with `:nodes`, if the latter is used, this option will be ignored.
- `nodes: [[node_opt]]`, a list of node specifications to use when creating the cluster,
see `t:#{__MODULE__}.Node.node_opt/0` for specific options available. If used,
`:cluster_size` is ignored.
- `env: [{String.t, String.t}]`, will set the given key/values in the environment
when creating nodes. If you need different values for each node, you will need to use `:nodes`
- `erl_flags: [String.t]`, additional arguments to pass to `erl` when creating nodes, like `:env`,
if you need different args for each node, you will need to use `:nodes`
- `config: Keyword.t`, configuration overrides to apply to all nodes in the cluster
- `boot_timeout: integer`, the amount of time to allow for nodes to boot, in milliseconds
- `init_timeout: integer`, the amount of time to allow for nodes to be initialized, in milliseconds
## Examples
defmodule KVStoreTest do
use ExUnit.ClusteredCase
@scenario_opts [cluster_size: 3]
scenario "given a healthy cluster", @scenario_opts do
node_setup do
{:ok, _} = Application.ensure_all_started(:kv_store)
end
test "writes are replicated to all nodes", %{cluster: cluster} do
writer = Cluster.random_member(cluster)
key = self()
value = key
assert Cluster.call(writer, KVStore, :put, [key, value]) == :ok
results = Cluster.map(cluster, KVStore, :get, [key])
assert Enum.all?(results, fn val -> val == value end)
end
end
end
Since all scenarios are also describes, you can run all the tests for a
scenario by it's description:
mix test --only describe:"given a healthy cluster"
or by passing the exact line the scenario starts on:
mix test path/to/file:123
Like `describe/2`, you cannot nest `scenario/3`. Use the same technique
of named setups recommended in the `describe/2` documentation for composition.
"""
defmacro scenario(message, options, do: block) do
quote do
if @__clustered_case_scenario do
raise "cannot call scenario/2 inside another scenario. See the documentation " <>
"for scenario/2 on named setups and how to handle hierarchies"
end
message = unquote(message)
options = unquote(options)
@__clustered_case_scenario message
@__clustered_case_scenario_config options
try do
describe message do
setup context do
alias unquote(__MODULE__).Cluster.Supervisor, as: CS
# Start cluster if not started
{:ok, cluster} =
CS.init_cluster_for_scenario!(
__MODULE__,
@__clustered_case_scenario,
@__clustered_case_scenario_config
)
Map.put(context, :cluster, cluster)
end
unquote(block)
end
after
@__clustered_case_scenario nil
@__clustered_case_scenario_config nil
end
end
end
@doc """
Like `ExUnit.Callbacks.setup/1`, but is executed on every node in the cluster.
You can pass a block, a unary function as an atom, or a list of such atoms.
If you pass a unary function, it receives the test setup context, however unlike
`setup/1`, the value returned from this function does not modify the context. Use
`setup/1` or `setup/2` for that.
NOTE: This callback is invoked _on_ each node in the cluster for the given scenario.
## Examples
def start_apps(_context) do
{:ok, _} = Application.ensure_all_started(:kv_store)
:ok
end
scenario "given a healthy cluster", [cluster_size: 3] do
node_setup :start_apps
node_setup do
# This form is also acceptable
{:ok, _} = Application.ensure_all_started(:kv_store)
end
end
"""
defmacro node_setup(do: block) do
quote do
setup %{cluster: cluster} = context do
results =
unquote(__MODULE__).Cluster.map(cluster, fn ->
unquote(block)
end)
case results do
{:error, _} = err ->
exit(err)
_ ->
:ok
end
context
end
end
end
defmacro node_setup(callback) when is_atom(callback) do
quote do
setup %{cluster: cluster} = context do
results =
unquote(__MODULE__).Cluster.map(cluster, __MODULE__, unquote(callback), [context])
case results do
{:error, _} = err ->
exit(err)
_ ->
:ok
end
context
end
end
end
defmacro node_setup(callbacks) when is_list(callbacks) do
quote bind_quoted: [callbacks: callbacks] do
for cb <- callbacks do
unless is_atom(cb) do
raise ArgumentError, "expected list of callbacks as atoms, but got: #{callbacks}"
end
node_setup(cb)
end
end
end
@doc """
Same as `node_setup/1`, but receives the test setup context as a parameter.
## Examples
scenario "given a healthy cluster", [cluster_size: 3] do
node_setup _context do
# Do something on each node
end
end
"""
defmacro node_setup(var, do: block) do
quote do
setup %{cluster: cluster} = context do
result =
unquote(__MODULE__).Cluster.each(cluster, fn ->
case context do
unquote(var) ->
unquote(block)
end
end)
case result do
{:error, _} = err ->
exit(err)
end
context
end
end
end
end
|
lib/clustered_case.ex
| 0.908168
| 0.832985
|
clustered_case.ex
|
starcoder
|
alias KidsChain.Util
require Record
defmodule KidsChain.DB do
@moduledoc """
A set of functions for working with user over Mnesia.
"""
Record.defrecord(:user, [:uid, :id, :inviter, :name, :content, :from, :to, :address, :at])
@timeout 5000
# Returns a list of user fields.
defmacrop fields(record) do
quote do
unquote(record) |> unquote(record) |> Keyword.keys()
end
end
@doc """
Start Mnesia and init it.
"""
def start do
:mnesia.create_schema([node()])
:mnesia.start()
:mnesia.create_table(:variable, disc_copies: [node()])
:mnesia.create_table(
:user,
attributes: fields(user),
disc_copies: [node()],
index: [:id]
)
:ok = :mnesia.wait_for_tables([:variable, :user], @timeout)
end
@doc """
Creates a user.
"""
def create(%{uid: uid, inviter: inviter} = params) do
id = id(uid)
inviter = if inviter != "0", do: id(inviter), else: 0
cond do
not is_nil(id) ->
{:error, :conflict}
is_nil(inviter) ->
{:error, :not_found}
true ->
id = :mnesia.dirty_update_counter(:variable, :id, 1)
at = DateTime.utc_now() |> DateTime.to_unix(:millisecond)
u = params |> to_user() |> user(id: id, inviter: inviter, at: at)
:mnesia.sync_transaction(fn ->
:mnesia.write(u)
u
end)
end
end
@doc """
Updates the user info.
"""
def update(%{uid: uid} = params) when is_binary(uid) do
:mnesia.transaction(fn ->
case :mnesia.read(:user, uid, :write) do
[u] ->
u = to_map(u) |> Map.merge(Util.trim(params)) |> to_user()
:mnesia.write(u)
u
_ ->
:mnesia.abort(:not_found)
end
end)
end
@doc """
Returns the `uid` of given `id`.
"""
def uid(id) when is_integer(id) do
case lookup(id) do
[user(uid: uid)] -> uid
_ -> nil
end
end
@doc """
Returns the `id` of given `uid`.
"""
def id(uid) when is_binary(uid) do
case lookup(uid) do
[user(id: id)] -> id
_ -> nil
end
end
@doc """
Returns the basic user info of given `id`.
"""
def info(id) when is_integer(id) do
case lookup(id) do
[user(uid: uid, name: name)] -> %{uid: uid, name: name}
_ -> nil
end
end
@doc """
Finds the user of given `uid`.
"""
def lookup(uid) when is_binary(uid) do
:mnesia.dirty_read(:user, uid)
end
@doc """
Finds the user of given `id`.
"""
def lookup(id) when is_integer(id) do
:mnesia.dirty_index_read(:user, id, :id)
end
@doc """
Finds the user of given `name`.
"""
def lookup_by_name(name) when is_binary(name) do
:mnesia.dirty_match_object(user(name: name, _: :_))
end
@doc """
Finds the children of given `id`.
"""
def children(id) when is_integer(id) do
:mnesia.dirty_match_object(user(inviter: id, _: :_))
end
@doc """
Traverses a user table and performs operations on all records in the table.
"""
def next(uid \\ nil) do
uid =
if is_nil(uid) do
:mnesia.dirty_first(:user)
else
:mnesia.dirty_next(:user, uid)
end
if is_binary(uid) do
lookup(uid)
else
[]
end
end
def users do
:mnesia.dirty_select(:user, [{user(id: :"$1", inviter: :"$2", _: :_), [], [{{:"$1", :"$2"}}]}])
end
@doc """
Returns the size of the users.
"""
def count do
:mnesia.table_info(:user, :size)
end
@doc """
Converts a map to a user.
"""
def to_user(term) when is_map(term) do
fields(user) |> Enum.into([:user], fn key -> term[key] end) |> List.to_tuple()
end
@doc """
Converts a user to a map.
"""
def to_map(u) do
u
|> user()
|> Map.new()
|> Util.trim()
end
end
|
lib/kids_chain/db.ex
| 0.843396
| 0.534673
|
db.ex
|
starcoder
|
defmodule Mix.Tasks.GitHooks.Run do
@shortdoc "Runs all the configured mix tasks for a given git hook."
@moduledoc """
Runs all the configured mix tasks for a given git hook.
Any [git hook](https://git-scm.com/docs/githooks) is supported.
## Examples
You can run any hook by running `mix git_hooks.run hook_name`. For example:
```elixir
mix git_hooks.run pre_commit
```
You can also all the hooks which are configured with `mix git_hooks.run all`.
"""
use Mix.Task
alias GitHooks.Config
alias GitHooks.Printer
@opaque git_hook_args :: list(String.t())
@typedoc """
Run options:
* `include_hook_args`: Whether the git hook args should be sent to the
command to be executed. In case of `true`, the args will be amended to the
command. Defaults to `false`.
"""
@type run_opts :: [
{:include_hook_args, String.t()},
{:env, list({String.t(), binary})}
]
@doc """
Runs a task for a given git hook.
The task can be one of three different types:
* `{:cmd, "command arg1 arg2"}`: Runs a command.
* `{:file, "path_to_file"}`: Runs an executable file.
* `"command arg1 arg2"`: Runs a simple command, supports no options.
The first two options above can use a third element in the tuple, see
[here](`t:run_opts/0`) more info about the options.
"""
@impl true
@spec run(list(String.t())) :: :ok | no_return
def run([]), do: error_exit()
def run(args) do
{[git_hook_name], args} = Enum.split(args, 1)
git_hook_name
|> get_atom_from_arg()
|> check_is_valid_git_hook!()
|> Printer.info("Running hooks for ", append_first_arg: true)
|> Config.tasks()
|> run_tasks(args)
|> success_exit()
end
@spec run_tasks({atom, list(String.t())}, git_hook_args()) :: :ok
defp run_tasks({git_hook_type, tasks}, git_hook_args) do
Enum.each(tasks, &run_task(&1, git_hook_type, git_hook_args))
end
@spec run_task(String.t(), atom, git_hook_args()) :: :ok | no_return
@spec run_task({:file, String.t(), run_opts()}, atom, git_hook_args()) :: :ok | no_return
@spec run_task({:cmd, String.t(), run_opts()}, atom, git_hook_args()) :: :ok | no_return
defp run_task({:file, script_file}, git_hook_type, git_hook_args) do
run_task({:file, script_file, []}, git_hook_type, git_hook_args)
end
defp run_task({:file, script_file, opts}, git_hook_type, git_hook_args) do
env_vars = Keyword.get(opts, :env, [])
args =
if Keyword.get(opts, :include_hook_args, false) do
git_hook_args
else
[]
end
script_file
|> Path.absname()
|> System.cmd(
args,
into: Config.io_stream(git_hook_type),
env: env_vars
)
|> case do
{_result, 0} ->
Printer.success("`#{script_file}` was successful")
{result, _} ->
if !Config.verbose?(git_hook_type), do: IO.puts(result)
Printer.error("`#{script_file}` execution failed")
error_exit()
end
end
defp run_task({:cmd, command}, git_hook_type, git_hook_args) do
run_task({:cmd, command, []}, git_hook_type, git_hook_args)
end
defp run_task({:cmd, command, opts}, git_hook_type, git_hook_args) when is_list(opts) do
[command | args] = String.split(command, " ")
env_vars = Keyword.get(opts, :env, [])
command_args =
if Keyword.get(opts, :include_hook_args, false) do
Enum.concat(args, git_hook_args)
else
args
end
command
|> System.cmd(
command_args,
into: Config.io_stream(git_hook_type),
env: env_vars
)
|> case do
{_result, 0} ->
Printer.success("`#{command} #{Enum.join(command_args, " ")}` was successful")
{result, _} ->
if !Config.verbose?(git_hook_type), do: IO.puts(result)
Printer.error("#{Atom.to_string(git_hook_type)} failed on `#{command}`")
error_exit()
end
rescue
error ->
Printer.error("Error executing the command: #{inspect(error)}")
error_exit()
end
defp run_task(command, git_hook_type, git_hook_args) when is_binary(command) do
run_task({:cmd, command, []}, git_hook_type, git_hook_args)
end
defp run_task(task, git_hook_type, _git_hook_args),
do:
raise("""
Invalid task #{inspect(task)} for hook #{inspect(git_hook_type)}", only String, {:file, ""} or {:cmd, ""} are supported.
""")
@spec get_atom_from_arg(String.t()) :: atom | no_return
defp get_atom_from_arg(git_hook_type_arg) do
case git_hook_type_arg do
nil ->
Printer.error("You should provide a git hook type to run")
error_exit()
git_hook_type ->
git_hook_type
|> Recase.to_snake()
|> String.to_atom()
end
end
@spec check_is_valid_git_hook!(atom) :: no_return
defp check_is_valid_git_hook!(git_hook_type) do
unless Enum.any?(Config.supported_hooks(), &(&1 == git_hook_type)) do
Printer.error("Invalid or unsupported hook `#{git_hook_type}`")
Printer.warn("Supported hooks are: #{inspect(Config.supported_hooks())}")
error_exit()
end
git_hook_type
end
@spec success_exit(any) :: :ok
defp success_exit(_), do: :ok
@spec error_exit(non_neg_integer) :: no_return
defp error_exit(error_code \\ 1), do: exit(error_code)
end
|
lib/mix/tasks/git_hooks/run.ex
| 0.83508
| 0.77373
|
run.ex
|
starcoder
|
defmodule Sanbase.Twitter.Store do
@moduledoc ~S"""
A module for storing and fetching twitter account data from a time series data store
"""
use Sanbase.Influxdb.Store
alias Sanbase.Influxdb.Measurement
alias Sanbase.Twitter.Store
def all_records_for_measurement(measurement_name, from, to, interval) do
select_from_to_query(measurement_name, from, to, interval)
|> get()
|> parse_twitter_data_series()
end
def all_records_for_measurement!(measurement_name, from, to, interval) do
case all_records_for_measurement(measurement_name, from, to, interval) do
{:ok, result} -> result
{:error, error} -> raise(error)
end
end
def last_record_for_measurement(measurement_name) do
select_last_record_query(measurement_name)
|> get()
|> parse_twitter_record()
end
def last_record_with_tag_value(measurement_name, tag_name, tag_value) do
~s/SELECT LAST(followers_count) FROM "#{measurement_name}"
WHERE #{tag_name} = '#{tag_value}'/
|> Store.execute()
|> parse_twitter_record()
end
defp select_from_to_query(measurement_name, from, to, interval) do
~s/SELECT time, LAST(followers_count)
FROM "#{measurement_name}"
WHERE time >= #{DateTime.to_unix(from, :nanosecond)}
AND time <= #{DateTime.to_unix(to, :nanosecond)}
GROUP BY time(#{interval}) fill(none)/
end
defp select_last_record_query(measurement_name) do
~s/SELECT LAST(followers_count) FROM "#{measurement_name}"/
end
defp parse_twitter_data_series(%{results: [%{error: error}]}), do: {:error, error}
defp parse_twitter_data_series(%{
results: [
%{
series: [
%{
values: twitter_data_series
}
]
}
]
}) do
result =
twitter_data_series
|> Enum.map(fn [iso8601_datetime, followers_count] ->
{:ok, datetime, _} = DateTime.from_iso8601(iso8601_datetime)
{datetime, followers_count}
end)
{:ok, result}
end
defp parse_twitter_data_series(_), do: {:ok, []}
defp parse_twitter_record(%{
results: [
%{
series: [
%{
values: [[iso8601_datetime, followers_count]]
}
]
}
]
}) do
{:ok, datetime, _} = DateTime.from_iso8601(iso8601_datetime)
{datetime, followers_count}
end
defp parse_twitter_record(_), do: nil
end
|
lib/sanbase/twitter/store.ex
| 0.776835
| 0.480418
|
store.ex
|
starcoder
|
defmodule Teiserver.Coordinator.CoordinatorLib do
alias Teiserver.Data.Types, as: T
@spec help(T.user(), boolean()) :: String.t()
def help(user, host) do
everybody = """
$whoami
Sends back information about who you are
$joinq
Adds you to the queue to join when a space opens up, you will be automatically added to the game as a player. If already a member it has no effect.
$leaveq
Removes you from the join queue.
$status
status info about the battle lobby
$help
displays this help text
$splitlobby
Causes a "vote" to start where other players can elect to join you in splitting the lobby, follow someone
of their choosing or remain in place. After 20 seconds you are moved to a new (empty) lobby and those that voted yes
or are following someone that voted yes are also moved to that lobby.
The following are available to the boss (and battle host)
$welcome-message <message>
Sets the welcome message sent to anybody joining the lobby
$gatekeeper (default | friends | friendsplay | clan)
sets the gatekeeper for this battle
> default: no limitations
> friends allows only friends of existing members to join the lobby
> friendsplay: allows only friends of existing players to become players (but anybody can join to spectate)
> TODO: clan: allows only members of an existing clan to join the game (enable after one member from each clan is present)
"""
hosts = """
$lock (team | player | spectator | side)
Engages a lock on that mode, when engaged members are unable to change that attribute about themselves.
Hosts and the server are the only thing that will be able to change it. Moderators are typically exempt
from these restrictions.
Multiple locks can be engaged at the same time
> Team: Prevents a member from changing their teamally value (Team 1, 2 etc)
> Allyid: Prevents a member from changing their team value (also called playerid)
> Player: Prevents spectators becoming players
> Spectator: Prevents players becoming spectators
> Side: Prevents players changing their side (faction)
$unlock (team | player | spectator)
Disengages the lock on that mode
$specunready
specs all unready players, they are each sent a ring from the coordinator
$makeready <user>
sets a user to ready, when no user is specified all users are set to ready
any user set to ready is sent a ring from the coordinator
$settag <key> <value>
Sets a battletag of <key> to <value>
$speclock <user>
Locks the user into a spectator role. Can be reverted with the unban command.
$forceplay <user>
Forces the user into a player position
$timeout <user> <reason, default: You have been given a timeout on the naughty step>
Bans the user from the lobby for 1 game, will expire once the game is over
$lobbykick <user>
Kicks the user from the lobby
$lobbyban <user> <reason, default: None given>
Bans the user from the lobby but not the server, will refresh on !rehost
$lobbybanmult [<user>]
Bans all users separated by spaces (from the lobby, not the game)
If you want to add a reason, add a `!!` to the end of the player list, anything after that will be the reason
$unban <user>
Removes the user from the lobby banlist
$forcespec <user>
Moves the user to spectators and bans them from becoming a player
$forceplay <user>
Moves the user to players
"""
moderators = """
$whois <user>
Sends back information about the user specified
$check <user>
Performs a smurf check against the user mentioned and sends you the result
$pull <user>
Pulls a given user into the battle
$dosplit
Completes the lobby split now
$cancelsplit
Cancels the lobby split
$rename <new name>
Renames the lobby to the name given
$modwarn <user> <hours> <reason>
Warns the user for that many hours and creates a report for them
$modmute <user> <hours> <reason>
Mutes the user for that many hours and creates a report for them
$modban <user> <hours> <reason>
Bans the user for that many hours and creates a report for them
$reset
Resets the coordinator bot for this lobby to the default
"""
# $command - Coordinator command
# $%command - Don't echo command back to chat
[
everybody,
(if user.moderator or host, do: hosts, else: ""),
(if user.moderator, do: moderators, else: "")
]
|> Enum.join("\n")
end
@doc """
We call resolve_split to resolve the overall list of splits (y/n/follow). The issue
comes when we have multiple layers of people following each other. For this we recursively
call resolve_round.
When resolve_round finds circular references it drops them and they don't go anywhere.
"""
@spec resolve_split(Map.t()) :: Map.t()
def resolve_split(split) do
case resolve_round(split) do
{:complete, result} -> result
{:incomplete, result} -> resolve_split(result)
end
|> Enum.filter(fn {_k, v} -> v end)
|> Map.new
end
@spec resolve_round(Map.t()) :: {:incomplete | :complete, Map.t()}
defp resolve_round(split) do
players = Map.keys(split)
result = players
|> Enum.reduce({false, true, split}, fn (player_id, {changes, complete, acc}) ->
# First find out what their target is, possibly by looking
# at their target's target
new_target = case acc[player_id] do
true -> true
nil -> nil
target_id ->
case split[target_id] do
true -> true
nil -> nil
targets_target -> targets_target
end
end
new_split = Map.put(acc, player_id, new_target)
# Now, are we still on for completion?
is_complete = complete and (not is_integer(new_target))
if new_target == split[player_id] do
{changes, is_complete, new_split}
else
{true, is_complete, new_split}
end
end)
case result do
{false, true, split} -> {:complete, split}
{false, false, split} -> {:complete, remove_circular(split)}
{true, _, split} -> {:incomplete, split}
end
end
@spec remove_circular(Map.t()) :: Map.t()
defp remove_circular(split) do
split
|> Map.new(fn {k, v} ->
new_v = case v do
true -> true
_ -> nil
end
{k, new_v}
end)
end
end
|
lib/teiserver/coordinator/coordinator_lib.ex
| 0.523177
| 0.451871
|
coordinator_lib.ex
|
starcoder
|
defmodule VintageNetWiFi.WPA2 do
@moduledoc """
WPA2 preshared key calculations
WPA2 doesn't use passphrases directly, but instead hashes them with the
SSID and uses the result for the network key. The algorithm that runs
the hash takes some time so it's useful to compute the PSK from the
passphrase once rather than specifying it each time.
"""
@typedoc "A WPA2 preshared key"
@type psk :: <<_::512>>
@type invalid_passphrase_error :: :password_too_short | :password_too_long | :invalid_characters
@type invalid_ssid_error :: :ssid_too_short | :ssid_too_long
@doc """
Convert a WiFi WPA2 passphrase into a PSK
If a passphrase looks like a PSK, then it's assumed that it already is a PSK
and is passed through.
See IEEE Std 802.11i-2004 Appendix H.4 for the algorithm.
"""
@spec to_psk(String.t(), psk() | String.t()) ::
{:ok, psk()}
| {:error, invalid_ssid_error() | invalid_passphrase_error()}
def to_psk(ssid, psk) when byte_size(psk) == 64 do
with :ok <- validate_psk(psk),
:ok <- validate_ssid(ssid) do
{:ok, psk}
end
end
def to_psk(ssid, passphrase) when is_binary(passphrase) do
with :ok <- validate_passphrase(passphrase),
:ok <- validate_ssid(ssid) do
{:ok, compute_psk(ssid, passphrase)}
end
end
@doc """
Validate the length and characters of a passphrase
A valid passphrase is between 8 and 63 characters long, and
only contains ASCII characters (values between 32 and 126, inclusive).
"""
@spec validate_passphrase(String.t()) :: :ok | {:error, invalid_passphrase_error()}
def validate_passphrase(password) when byte_size(password) < 8 do
{:error, :password_too_short}
end
def validate_passphrase(password) when byte_size(password) >= 64 do
{:error, :password_too_long}
end
def validate_passphrase(password) do
all_ascii(password)
end
defp compute_psk(ssid, passphrase) do
result = f(ssid, passphrase, 4096, 1) <> f(ssid, passphrase, 4096, 2)
<<result256::256, _::binary>> = result
result256
|> Integer.to_string(16)
|> String.pad_leading(64, "0")
end
# F(P, S, c, i) = U1 xor U2 xor ... Uc
# U1 = PRF(P, S || Int(i))
# U2 = PRF(P, U1)
# Uc = PRF(P, Uc-1)
defp f(ssid, password, iterations, count) do
digest = <<ssid::binary, count::integer-32>>
digest1 = sha1_hmac(digest, password)
iterate(digest1, digest1, password, iterations - 1)
end
defp iterate(acc, _previous_digest, _password, 0) do
acc
end
defp iterate(acc, previous_digest, password, n) do
digest = sha1_hmac(previous_digest, password)
iterate(xor160(acc, digest), digest, password, n - 1)
end
defp xor160(<<a::160>>, <<b::160>>) do
<<:erlang.bxor(a, b)::160>>
end
if :erlang.system_info(:otp_release) == '21' do
# TODO: Remove when OTP 21 is no longer supported.
defp sha1_hmac(digest, password) do
:crypto.hmac(:sha, password, digest)
end
else
defp sha1_hmac(digest, password) do
:crypto.mac(:hmac, :sha, password, digest)
end
end
defp validate_psk(psk) when byte_size(psk) == 64 do
all_hex_digits(psk)
end
@doc """
Validate the length of the SSID
A valid SSID is between 1 and 32 characters long.
"""
@spec validate_ssid(String.t()) :: :ok | {:error, invalid_ssid_error()}
def validate_ssid(ssid) when byte_size(ssid) == 0, do: {:error, :ssid_too_short}
def validate_ssid(ssid) when byte_size(ssid) <= 32, do: :ok
def validate_ssid(ssid) when is_binary(ssid), do: {:error, :ssid_too_long}
defp all_ascii(<<c, rest::binary>>) when c >= 32 and c <= 126 do
all_ascii(rest)
end
defp all_ascii(<<>>), do: :ok
defp all_ascii(_other), do: {:error, :invalid_characters}
defp all_hex_digits(<<c, rest::binary>>)
when (c >= ?0 and c <= ?9) or (c >= ?a and c <= ?f) or (c >= ?A and c <= ?F) do
all_hex_digits(rest)
end
defp all_hex_digits(<<>>), do: :ok
defp all_hex_digits(_other), do: {:error, :invalid_characters}
end
|
lib/vintage_net_wifi/wpa2.ex
| 0.646683
| 0.55266
|
wpa2.ex
|
starcoder
|
defmodule Braintree.PaypalAccount do
@moduledoc """
Find, update and delete Paypal Accounts using PaymentMethod token
"""
use Braintree.Construction
alias Braintree.HTTP
alias Braintree.ErrorResponse, as: Error
@type t :: %__MODULE__{
billing_agreement_id: String.t(),
created_at: String.t(),
customer_id: String.t(),
email: String.t(),
image_url: String.t(),
payer_info: String.t(),
token: String.t(),
updated_at: String.t(),
default: boolean,
is_channel_initated: boolean,
subscriptions: [any]
}
defstruct billing_agreement_id: nil,
created_at: nil,
customer_id: nil,
default: false,
email: nil,
image_url: nil,
is_channel_initated: false,
payer_info: nil,
subscriptions: [],
token: nil,
updated_at: nil
@doc """
Find a paypal account record using `token` or return an error
response if the token is invalid.
## Example
{:ok, paypal_account} = Braintree.PaypalAccount.find(token)
"""
@spec find(String.t(), Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def find(token, opts \\ []) do
path = "payment_methods/paypal_account/" <> token
with {:ok, %{"paypal_account" => map}} <- HTTP.get(path, opts) do
{:ok, new(map)}
end
end
@doc """
Update a paypal account record using `token` or return an error
response if the token is invalid.
## Example
{:ok, paypal_account} = Braintree.PaypalAccount.update(
token,
%{options: %{make_default: true}
)
"""
@spec update(String.t(), map, Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def update(token, params, opts \\ []) do
path = "payment_methods/paypal_account/" <> token
with {:ok, %{"paypal_account" => map}} <- HTTP.put(path, %{paypal_account: params}, opts) do
{:ok, new(map)}
end
end
@doc """
Delete a paypal account record using `token` or return an error
response if the token is invalid.
## Example
{:ok, paypal_account} = Braintree.PaypalAccount.delete(token)
"""
@spec delete(String.t(), Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def delete(token, opts \\ []) do
path = "payment_methods/paypal_account/" <> token
with {:ok, _payload} <- HTTP.delete(path, opts) do
:ok
end
end
end
|
lib/paypal_account.ex
| 0.890079
| 0.412264
|
paypal_account.ex
|
starcoder
|
defmodule TinkoffInvest.HistoricalData do
@moduledoc """
Module for getting historical data for a security.
Useful when testing your algo or just need some insight on past prices
"""
alias TinkoffInvest.Market
alias TinkoffInvest.Model.Candle
require Logger
@intervals [
"1min",
"2min",
"3min",
"5min",
"10min",
"15min",
"30min",
"hour",
"day",
"week",
"month"
]
@doc """
Get candle history for specified figi by from, to and interval
Example:
```
from = DateTime.utc_now() |> DateTime.add(-86400, :second)
to = DateTime.utc_now()
TinkoffInvest.HistoricalData.candles("SOMEFIGI", from, to, "1min")
```
Possible intervals:
#{@intervals |> Enum.map(fn x -> x end) |> Enum.join(", ")}
"""
@spec candles(String.t(), DateTime.t(), DateTime.t(), String.t()) :: list(Candle.t())
def candles(figi, from, to, interval) when interval in @intervals do
interval
|> shift_opts()
|> shift(from)
|> diff(to)
|> to_range(interval)
|> Enum.map(fn inc ->
from_shifted = shift([days: inc], from)
to_shifted = shift([days: inc + step(interval)], from)
do_candles(figi, from_shifted, to_shifted, interval)
end)
|> List.flatten()
end
def shift_opts(interval) do
case interval do
"hour" -> [weeks: 1]
"day" -> [days: 365]
"week" -> [days: 728]
"month" -> [days: 365 * 10]
_ -> [days: 1]
end
end
def shift(opts, date) do
Timex.shift(date, opts)
end
defp diff(from, to) do
Timex.diff(from, to, :days)
end
defp to_range(count, interval) do
s = step(interval)
to = abs(count)
0
|> Range.new(to)
|> Enum.take_every(s)
end
defp step(interval) do
interval
|> case do
"hour" -> Timex.Duration.from_weeks(1)
"day" -> Timex.Duration.from_days(365)
"week" -> Timex.Duration.from_days(728)
"month" -> Timex.Duration.from_days(365 * 10)
_ -> Timex.Duration.from_days(1)
end
|> Timex.Duration.to_days(truncate: true)
end
defp do_candles(figi, from, to, interval) do
figi
|> Market.candles(from, to, interval)
|> case do
%{payload: x} when is_list(x) -> x
e -> raise "Api error: #{inspect(e)}"
end
end
end
|
lib/tinkoff_invest/historical_data.ex
| 0.875873
| 0.815857
|
historical_data.ex
|
starcoder
|
defmodule Timber.Events.CustomEvent do
@moduledoc ~S"""
The `CustomEvent` represents events that aren't covered elsewhere.
The defined structure of this data can be found in the log event JSON schema:
https://github.com/timberio/log-event-json-schema
Custom events can be used to structure information about events that are central
to your line of business like receiving credit card payments, saving a draft of a post,
or changing a user's password.
## Fields
* `type` - (atom, required) This is the type of your event. It should be something unique
and unchanging. It will be used to identify this event. Example: `:my_event`.
* `data` - (map, optional) A map of data. This can be anything that can be JSON encoded.
Example: `%{key: "value"}`.
## Special `data` fields
Timber treats these fields as special. We'll display them on the interface where relevant,
create graphs, etc.:
* `:time_ms` (float, optional) - Represents the execution time in fractional milliseconds.
Example: `45.6`
## Example
There are 2 ways to log custom events:
1. Log a map (simplest)
```elixir
event_data = %{customer_id: "xiaus1934", amount: 1900, currency: "USD"}
Logger.info("Payment rejected", event: %{payment_rejected: event_data})
```
The `:payment_rejected` key is the event `type`, and of course the value is the `data`.
2. Log a struct (advanced)
Defining structs for your events creates a contract around your data structure.
We recommend this approach when you have downstream consumers that will be affected
by data structure changes.
```elixir
def PaymentRejectedEvent do
use Timber.Events.CustomEvent, type: :payment_rejected
@enforce_keys [:customer_id, :amount, :currency]
defstruct [:customer_id, :amount, :currency]
def message(%__MODULE__{customer_id: customer_id}) do
"Payment rejected for #{customer_id}"
end
end
event = %PaymentRejectedEvent{customer_id: "xiaus1934", amount: 1900, currency: "USD"}
message = PaymentRejectedEvent.message(event)
Logger.info(message, event: event)
```
"""
@type t :: %__MODULE__{
type: atom(),
data: map() | nil
}
defmacro __using__(opts) do
quote do
defimpl Timber.Eventable, for: __MODULE__ do
def to_event(event) do
type = Keyword.get(unquote(opts), :type, __MODULE__)
data = Map.from_struct(event)
%Timber.Events.CustomEvent{type: type, data: data}
end
end
end
end
@enforce_keys [:type]
defstruct [
:data,
:type
]
end
|
lib/timber/events/custom_event.ex
| 0.914032
| 0.892843
|
custom_event.ex
|
starcoder
|
defmodule Pbuf.Tests.Everything do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, except: [:sfixed64, :sint64]}
defstruct [
choice: nil,
bool: false,
int32: 0,
int64: 0,
uint32: 0,
uint64: 0,
sint32: 0,
sint64: 0,
fixed32: 0,
fixed64: 0,
sfixed32: 0,
sfixed64: 0,
float: 0.0,
double: 0.0,
string: "",
bytes: <<>>,
struct: nil,
type: :EVERYTHING_TYPE_UNKNOWN,
corpus: :universal,
user: nil,
user_status: :USER_STATUS_UNKNOWN,
bools: [],
int32s: [],
int64s: [],
uint32s: [],
uint64s: [],
sint32s: [],
sint64s: [],
fixed32s: [],
sfixed32s: [],
fixed64s: [],
sfixed64s: [],
floats: [],
doubles: [],
strings: [],
bytess: [],
structs: [],
types: [],
corpuss: [],
map1: %{},
map2: %{},
map3: %{}
]
@type t :: %__MODULE__{
choice: map | {:choice_int32, integer} | {:choice_string, String.t},
bool: boolean,
int32: integer,
int64: integer,
uint32: non_neg_integer,
uint64: non_neg_integer,
sint32: integer,
sint64: integer,
fixed32: integer,
fixed64: integer,
sfixed32: integer,
sfixed64: integer,
float: number,
double: number,
string: String.t,
bytes: binary,
struct: Pbuf.Tests.Child.t,
type: Pbuf.Tests.EverythingType.t,
corpus: Pbuf.Tests.Everything.Corpus.t,
user: Pbuf.Tests.Sub.User.t,
user_status: Pbuf.Tests.Sub.UserStatus.t,
bools: [boolean],
int32s: [integer],
int64s: [integer],
uint32s: [non_neg_integer],
uint64s: [non_neg_integer],
sint32s: [integer],
sint64s: [integer],
fixed32s: [integer],
sfixed32s: [integer],
fixed64s: [integer],
sfixed64s: [integer],
floats: [number],
doubles: [number],
strings: [String.t],
bytess: [binary],
structs: [Pbuf.Tests.Child.t],
types: [Pbuf.Tests.EverythingType.t],
corpuss: [Pbuf.Tests.Everything.Corpus.t],
map1: %{optional(String.t) => any},
map2: %{optional(integer) => any},
map3: %{optional(non_neg_integer) => any}
}
defmodule Corpus do
@moduledoc false
@type t :: :universal | 0 | :web | 1 | :images | 2 | :local | 3 | :news | 4 | :products | 5 | :video | 6
@spec to_int(t | non_neg_integer) :: integer
def to_int(:images), do: 2
def to_int(2), do: 2
def to_int(:local), do: 3
def to_int(3), do: 3
def to_int(:news), do: 4
def to_int(4), do: 4
def to_int(:products), do: 5
def to_int(5), do: 5
def to_int(:universal), do: 0
def to_int(0), do: 0
def to_int(:video), do: 6
def to_int(6), do: 6
def to_int(:web), do: 1
def to_int(1), do: 1
def to_int(invalid) do
raise Pbuf.Encoder.Error,
type: __MODULE__,
value: invalid,
tag: nil,
message: "#{inspect(invalid)} is not a valid enum value for #{__MODULE__}"
end
@spec from_int(integer) :: t
def from_int(2), do: :images
def from_int(3), do: :local
def from_int(4), do: :news
def from_int(5), do: :products
def from_int(0), do: :universal
def from_int(6), do: :video
def from_int(1), do: :web
def from_int(_unknown), do: :invalid
end
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:bool, data.bool, <<8>>),
Encoder.field(:int32, data.int32, <<16>>),
Encoder.field(:int64, data.int64, <<24>>),
Encoder.field(:uint32, data.uint32, <<32>>),
Encoder.field(:uint64, data.uint64, <<40>>),
Encoder.field(:sint32, data.sint32, <<48>>),
Encoder.field(:sint64, data.sint64, <<56>>),
Encoder.field(:fixed32, data.fixed32, <<69>>),
Encoder.field(:fixed64, data.fixed64, <<73>>),
Encoder.field(:sfixed32, data.sfixed32, <<85>>),
Encoder.field(:sfixed64, data.sfixed64, <<89>>),
Encoder.field(:float, data.float, <<101>>),
Encoder.field(:double, data.double, <<105>>),
Encoder.field(:string, data.string, <<114>>),
Encoder.field(:bytes, data.bytes, <<122>>),
Encoder.field(:struct, data.struct, <<130, 1>>),
Encoder.enum_field(Pbuf.Tests.EverythingType, data.type, <<136, 1>>),
Encoder.enum_field(Pbuf.Tests.Everything.Corpus, data.corpus, <<144, 1>>),
Encoder.oneof_field(:choice_int32, data.choice, 0, fn v -> Encoder.field(:int32, v, <<152, 1>>) end),
Encoder.oneof_field(:choice_string, data.choice, 0, fn v -> Encoder.field(:string, v, <<162, 1>>) end),
Encoder.field(:struct, data.user, <<170, 1>>),
Encoder.enum_field(Pbuf.Tests.Sub.UserStatus, data.user_status, <<176, 1>>),
Encoder.repeated_field(:bool, data.bools, <<250, 1>>),
Encoder.repeated_field(:int32, data.int32s, <<130, 2>>),
Encoder.repeated_field(:int64, data.int64s, <<138, 2>>),
Encoder.repeated_field(:uint32, data.uint32s, <<146, 2>>),
Encoder.repeated_field(:uint64, data.uint64s, <<154, 2>>),
Encoder.repeated_field(:sint32, data.sint32s, <<162, 2>>),
Encoder.repeated_field(:sint64, data.sint64s, <<170, 2>>),
Encoder.repeated_field(:fixed32, data.fixed32s, <<178, 2>>),
Encoder.repeated_field(:sfixed32, data.sfixed32s, <<186, 2>>),
Encoder.repeated_field(:fixed64, data.fixed64s, <<194, 2>>),
Encoder.repeated_field(:sfixed64, data.sfixed64s, <<202, 2>>),
Encoder.repeated_field(:float, data.floats, <<210, 2>>),
Encoder.repeated_field(:double, data.doubles, <<218, 2>>),
Encoder.repeated_field(:string, data.strings, <<226, 2>>),
Encoder.repeated_field(:bytes, data.bytess, <<234, 2>>),
Encoder.repeated_field(:struct, data.structs, <<242, 2>>),
Encoder.repeated_enum_field(Pbuf.Tests.EverythingType, data.types, <<250, 2>>),
Encoder.repeated_enum_field(Pbuf.Tests.Everything.Corpus, data.corpuss, <<130, 3>>),
Encoder.map_field(<<10>>, :string, <<16>>, :int32, data.map1, <<234, 3>>),
Encoder.map_field(<<8>>, :int64, <<21>>, :float, data.map2, <<242, 3>>),
Encoder.map_field(<<8>>, :uint32, <<18>>, :struct, data.map3, <<250, 3>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<8, data::binary>>) do
Decoder.field(:bool, :bool, acc, data)
end
def decode(acc, <<16, data::binary>>) do
Decoder.field(:int32, :int32, acc, data)
end
def decode(acc, <<24, data::binary>>) do
Decoder.field(:int64, :int64, acc, data)
end
def decode(acc, <<32, data::binary>>) do
Decoder.field(:uint32, :uint32, acc, data)
end
def decode(acc, <<40, data::binary>>) do
Decoder.field(:uint64, :uint64, acc, data)
end
def decode(acc, <<48, data::binary>>) do
Decoder.field(:sint32, :sint32, acc, data)
end
def decode(acc, <<56, data::binary>>) do
Decoder.field(:sint64, :sint64, acc, data)
end
def decode(acc, <<69, data::binary>>) do
Decoder.field(:fixed32, :fixed32, acc, data)
end
def decode(acc, <<73, data::binary>>) do
Decoder.field(:fixed64, :fixed64, acc, data)
end
def decode(acc, <<85, data::binary>>) do
Decoder.field(:sfixed32, :sfixed32, acc, data)
end
def decode(acc, <<89, data::binary>>) do
Decoder.field(:sfixed64, :sfixed64, acc, data)
end
def decode(acc, <<101, data::binary>>) do
Decoder.field(:float, :float, acc, data)
end
def decode(acc, <<105, data::binary>>) do
Decoder.field(:double, :double, acc, data)
end
def decode(acc, <<114, data::binary>>) do
Decoder.field(:string, :string, acc, data)
end
def decode(acc, <<122, data::binary>>) do
Decoder.field(:bytes, :bytes, acc, data)
end
def decode(acc, <<130, 1, data::binary>>) do
Decoder.struct_field(Pbuf.Tests.Child, :struct, acc, data)
end
def decode(acc, <<136, 1, data::binary>>) do
Decoder.enum_field(Pbuf.Tests.EverythingType, :type, acc, data)
end
def decode(acc, <<144, 1, data::binary>>) do
Decoder.enum_field(Pbuf.Tests.Everything.Corpus, :corpus, acc, data)
end
def decode(acc, <<152, 1, data::binary>>) do
Decoder.oneof_field(:choice, 0, Decoder.field(:int32, :choice_int32, acc, data), nil)
end
def decode(acc, <<162, 1, data::binary>>) do
Decoder.oneof_field(:choice, 0, Decoder.field(:string, :choice_string, acc, data), nil)
end
def decode(acc, <<170, 1, data::binary>>) do
Decoder.struct_field(Pbuf.Tests.Sub.User, :user, acc, data)
end
def decode(acc, <<176, 1, data::binary>>) do
Decoder.enum_field(Pbuf.Tests.Sub.UserStatus, :user_status, acc, data)
end
def decode(acc, <<250, 1, data::binary>>) do
Decoder.repeated_field(:bool, :bools, acc, data)
end
def decode(acc, <<130, 2, data::binary>>) do
Decoder.repeated_field(:int32, :int32s, acc, data)
end
def decode(acc, <<138, 2, data::binary>>) do
Decoder.repeated_field(:int64, :int64s, acc, data)
end
def decode(acc, <<146, 2, data::binary>>) do
Decoder.repeated_field(:uint32, :uint32s, acc, data)
end
def decode(acc, <<154, 2, data::binary>>) do
Decoder.repeated_field(:uint64, :uint64s, acc, data)
end
def decode(acc, <<162, 2, data::binary>>) do
Decoder.repeated_field(:sint32, :sint32s, acc, data)
end
def decode(acc, <<170, 2, data::binary>>) do
Decoder.repeated_field(:sint64, :sint64s, acc, data)
end
def decode(acc, <<178, 2, data::binary>>) do
Decoder.repeated_field(:fixed32, :fixed32s, acc, data)
end
def decode(acc, <<186, 2, data::binary>>) do
Decoder.repeated_field(:sfixed32, :sfixed32s, acc, data)
end
def decode(acc, <<194, 2, data::binary>>) do
Decoder.repeated_field(:fixed64, :fixed64s, acc, data)
end
def decode(acc, <<202, 2, data::binary>>) do
Decoder.repeated_field(:sfixed64, :sfixed64s, acc, data)
end
def decode(acc, <<210, 2, data::binary>>) do
Decoder.repeated_field(:float, :floats, acc, data)
end
def decode(acc, <<218, 2, data::binary>>) do
Decoder.repeated_field(:double, :doubles, acc, data)
end
def decode(acc, <<226, 2, data::binary>>) do
Decoder.field(:string, :strings, acc, data)
end
def decode(acc, <<234, 2, data::binary>>) do
Decoder.field(:bytes, :bytess, acc, data)
end
def decode(acc, <<242, 2, data::binary>>) do
Decoder.struct_field(Pbuf.Tests.Child, :structs, acc, data)
end
def decode(acc, <<250, 2, data::binary>>) do
Decoder.repeated_enum_field(Pbuf.Tests.EverythingType, :types, acc, data)
end
def decode(acc, <<130, 3, data::binary>>) do
Decoder.repeated_enum_field(Pbuf.Tests.Everything.Corpus, :corpuss, acc, data)
end
def decode(acc, <<234, 3, data::binary>>) do
post_map(:map1, 61, Decoder.map_field(10, :string, "", 16, :int32, 0, :map1, acc, data))
end
def decode(acc, <<242, 3, data::binary>>) do
post_map(:map2, 62, Decoder.map_field(8, :int64, 0, 21, :float, 0.0, :map2, acc, data))
end
def decode(acc, <<250, 3, data::binary>>) do
post_map(:map3, 63, Decoder.map_field(8, :uint32, 0, 18, Pbuf.Tests.Child, nil, :map3, acc, data))
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,61,62,63] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
defp post_map(name, tag, {:error, %{tag: nil, message: message}}) do
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__}.#{name} tag #{tag} " <> message
}
{:error, err}
end
# either valid data or a complete error (which would happen if our value
# was a struct and the error happened decoding it)
defp post_map(_name, _prefix, data), do: data
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:map3, {c, v}}, acc -> Map.update(acc, :map3, %{c => v}, fn m -> Map.put(m, c, v) end)
{:map2, {c, v}}, acc -> Map.update(acc, :map2, %{c => v}, fn m -> Map.put(m, c, v) end)
{:map1, {c, v}}, acc -> Map.update(acc, :map1, %{c => v}, fn m -> Map.put(m, c, v) end)
{:structs, v}, acc -> Map.update(acc, :structs, [v], fn e -> [v | e] end)
{:bytess, v}, acc -> Map.update(acc, :bytess, [v], fn e -> [v | e] end)
{:strings, v}, acc -> Map.update(acc, :strings, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :structs, Elixir.Enum.reverse(struct.structs))
struct = Map.put(struct, :bytess, Elixir.Enum.reverse(struct.bytess))
struct = Map.put(struct, :strings, Elixir.Enum.reverse(struct.strings))
struct
end
end
defmodule Pbuf.Tests.Child do
@moduledoc false
alias Pbuf.Decoder
defstruct [
id: 0,
name: "",
data1: <<>>,
data2: <<>>,
data3: <<>>
]
@type t :: %__MODULE__{
id: non_neg_integer,
name: String.t,
data1: binary,
data2: binary,
data3: binary
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:uint32, data.id, <<8>>),
Encoder.field(:string, data.name, <<18>>),
Encoder.field(:bytes, data.data1, <<26>>),
case data.data2 do
<<>> -> []
value -> Encoder.field(:bytes, Elixir.Jason.encode!(value), <<34>>)
end,
case data.data3 do
<<>> -> []
value -> Encoder.field(:bytes, Elixir.Jason.encode!(value), <<42>>)
end,
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<8, data::binary>>) do
Decoder.field(:uint32, :id, acc, data)
end
def decode(acc, <<18, data::binary>>) do
Decoder.field(:string, :name, acc, data)
end
def decode(acc, <<26, data::binary>>) do
Decoder.field(:bytes, :data1, acc, data)
end
def decode(acc, <<34, data::binary>>) do
Decoder.field(:bytes, :data2, acc, data)
end
def decode(acc, <<42, data::binary>>) do
Decoder.field(:bytes, :data3, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3,4,5] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:data3, v}, acc -> Map.put(acc, :data3, Elixir.Jason.decode!(v, [keys: :atoms]))
{:data2, v}, acc -> Map.put(acc, :data2, Elixir.Jason.decode!(v, []))
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Pbuf.Tests.EverythingType do
@moduledoc false
@type t :: :EVERYTHING_TYPE_UNKNOWN | 0 | :EVERYTHING_TYPE_SAND | 1 | :EVERYTHING_TYPE_SPICE | 2
@spec to_int(t | non_neg_integer) :: integer
def to_int(:EVERYTHING_TYPE_SAND), do: 1
def to_int(1), do: 1
def to_int(:EVERYTHING_TYPE_SPICE), do: 2
def to_int(2), do: 2
def to_int(:EVERYTHING_TYPE_UNKNOWN), do: 0
def to_int(0), do: 0
def to_int(invalid) do
raise Pbuf.Encoder.Error,
type: __MODULE__,
value: invalid,
tag: nil,
message: "#{inspect(invalid)} is not a valid enum value for #{__MODULE__}"
end
@spec from_int(integer) :: t
def from_int(1), do: :EVERYTHING_TYPE_SAND
def from_int(2), do: :EVERYTHING_TYPE_SPICE
def from_int(0), do: :EVERYTHING_TYPE_UNKNOWN
def from_int(_unknown), do: :invalid
end
|
test/schemas/generated/everything.pb.ex
| 0.665302
| 0.678548
|
everything.pb.ex
|
starcoder
|
defmodule Omise.Transaction do
@moduledoc ~S"""
Provides Transaction API interfaces.
<https://www.omise.co/transactions-api>
"""
use Omise.HTTPClient, endpoint: "transactions"
defstruct object: "transaction",
id: nil,
location: nil,
type: nil,
source: nil,
amount: nil,
currency: nil,
transferable: nil,
created: nil,
direction: nil,
key: nil,
origin: nil
@type t :: %__MODULE__{
object: String.t(),
id: String.t(),
location: String.t(),
type: String.t(),
source: String.t(),
amount: integer,
currency: String.t(),
transferable: String.t(),
created: String.t(),
direction: String.t(),
key: String.t(),
origin: String.t()
}
@doc ~S"""
List all transactions.
Returns `{:ok, transactions}` if the request is successful, `{:error, error}` otherwise.
## Query Parameters:
* `offset` - (optional, default: 0) The offset of the first record returned.
* `limit` - (optional, default: 20, maximum: 100) The maximum amount of records returned.
* `from` - (optional, default: 1970-01-01T00:00:00Z, format: ISO 8601) The UTC date and time limiting the beginning of returned records.
* `to` - (optional, default: current UTC Datetime, format: ISO 8601) The UTC date and time limiting the end of returned records.
## Examples
Omise.Transaction.list
Omise.Transaction.list(limit: 5)
"""
@spec list(Keyword.t(), Keyword.t()) :: {:ok, Omise.List.t()} | {:error, Omise.Error.t()}
def list(params \\ [], opts \\ []) do
opts = Keyword.merge(opts, as: %Omise.List{data: [%__MODULE__{}]})
get(@endpoint, params, opts)
end
@doc ~S"""
Retrieve a transaction.
Returns `{:ok, transaction}` if the request is successful, `{:error, error}` otherwise.
## Examples
Omise.Transaction.retrieve("trxn_test_51yg3xs2yggzsfbai3e")
"""
@spec retrieve(String.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def retrieve(id, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
get("#{@endpoint}/#{id}", [], opts)
end
end
|
lib/omise/transaction.ex
| 0.898756
| 0.439928
|
transaction.ex
|
starcoder
|
defmodule Snitch.Data.Schema.TaxRate do
@moduledoc """
Models a TaxRate
"""
use Snitch.Data.Schema
alias Snitch.Data.Schema.{TaxCategory, Zone}
alias Snitch.Domain.Calculator
@type t :: %__MODULE__{}
schema "snitch_tax_rates" do
# associations
belongs_to(:tax_category, TaxCategory)
belongs_to(:zone, Zone)
field(:name, :string)
field(:value, :decimal)
field(:calculator, Ecto.Atom)
field(:deleted_at, :utc_datetime)
field(:included_in_price, :boolean, default: false)
timestamps()
end
@required_params ~w(name value tax_category_id calculator zone_id )a
@optional_params ~w(deleted_at)a
@create_params @required_params ++ @optional_params
@update_params @required_params ++ @optional_params
@doc """
Returns a changeset to create a new TaxRate.
"""
@spec create_changeset(t, map) :: Ecto.Changeset.t()
def create_changeset(%__MODULE__{} = tax_rate, params) do
tax_rate
|> cast(params, @create_params)
|> validate_required(@required_params)
|> common_changeset()
end
@doc """
Returns a changeset to update a TaxRate.
"""
@spec update_changeset(t, map) :: Ecto.Changeset.t()
def update_changeset(%__MODULE__{} = tax_rate, params) do
tax_rate
|> cast(params, @update_params)
|> common_changeset()
end
defp common_changeset(changeset) do
changeset
|> validate_required(:name)
|> validate_number(:value, greater_than: 0)
|> modify_calculator_name()
|> verify_calculator()
|> unique_constraint(:name, name: :unique_name_per_zone)
end
defp modify_calculator_name(%Ecto.Changeset{valid?: true} = changeset) do
case fetch_change(changeset, :calculator) do
{:ok, calculator} ->
put_change(changeset, :calculator, Module.safe_concat(Elixir, calculator))
:error ->
changeset
end
end
defp modify_calculator_name(changeset), do: changeset
defp verify_calculator(%Ecto.Changeset{valid?: true} = changeset) do
calc_list = Calculator.list()
with {:ok, calculator} <- fetch_change(changeset, :calculator),
true <- Enum.member?(calc_list, calculator) do
changeset
else
:error ->
changeset
false ->
add_error(
changeset,
:calculator,
"invalid calculator",
additional: "not specified in calculator list"
)
end
end
defp verify_calculator(changeset), do: changeset
end
|
apps/snitch_core/lib/core/data/schema/tax_rate.ex
| 0.860355
| 0.410845
|
tax_rate.ex
|
starcoder
|
defmodule Eml do
@moduledoc """
Eml makes markup a first class citizen in Elixir. It provides a
flexible and modular toolkit for generating, parsing and
manipulating markup. It's main focus is html, but other markup
languages could be implemented as well.
To start off:
This piece of code
```elixir
use Eml.HTML
name = "Vincent"
age = 36
div class: "person" do
div do
span "name: "
span name
end
div do
span "age: "
span age
end
end |> Eml.compile
```
produces
```html
<div class='person'>
<div>
<span>name: </span>
<span>Vincent</span>
</div>
<div>
<span>age: </span>
<span>36</span>
</div>
</div>
```
The functions and macro's in the `Eml` module cover most of
Eml's public API.
"""
alias Eml.Element
@default_elements Eml.HTML
@default_parser Eml.HTML.Parser
@type t :: Eml.Encoder.t | [Eml.Encoder.t] | [t]
@type node_primitive :: String.t | { :safe, String.t } | Macro.t | Eml.Element.t
@doc """
Define a template function that compiles eml to a string during compile time.
Eml uses the assigns extension from `EEx` for parameterizing templates. See
the `EEx` docs for more info about them. The function that the template macro
defines accepts optionally any Dict compatible dictionary as argument for
binding values to assigns.
### Example:
iex> defmodule MyTemplates1 do
...> use Eml
...> use Eml.HTML
...>
...> template example do
...> div id: "example" do
...> span @text
...> end
...> end
...> end
iex> MyTemplates.example text: "Example text"
{:safe, "<div id='example'><span>Example text</span></div>"}
Eml templates provides two ways of executing logic during runtime. By
providing assigns handlers to the optional `funs` dictionary, or by calling
external functions during runtime with the `&` operator.
### Example:
iex> defmodule MyTemplates2 do
...> use Eml
...> use Eml.HTML
...>
...> template assigns_handler,
...> text: &String.upcase/1 do
...> div id: "example1" do
...> span @text
...> end
...> end
...>
...> template external_call do
...> body &assigns_handler(text: @example_text)
...> end
...> end
iex> MyTemplates.assigns_handler text: "Example text"
{:safe, "<div id='example'><span>EXAMPLE TEXT</span></div>"}
iex> MyTemplates.exernal_call example_text: "Example text"
{:safe, "<body><div id='example'><span>EXAMPLE TEXT</span></div></body>"}
Templates are composable, so they are allowed to call other templates. The
only catch is that it's not possible to pass an assign to another template
during precompilation. The reason for this is that the logic in a template is
executed the moment the template is called, so if you would pass an assign
during precompilation, the logic in a template would receive this assign
instead of its result, which is only available during runtime. This all means
that when you for example want to pass an assign to a nested template, the
template should be prefixed with the `&` operator, or in other words, executed
during runtime.
### Example
iex> defmodule T1 do
...> template templ1,
...> num: &(&1 + &1) do
...> div @num
...> end
...> end
iex> template templ2 do
...> h2 @title
...> templ1(num: @number) # THIS GENERATES A COMPILE TIME ERROR
...> &templ1(num: @number) # THIS IS OK
...> end
Note that because the body of a template is evaluated at compiletime, it's
not possible to call other functions from the same module without using `&`
operator.
Instead of defining a do block, you can also provide a path to a file with the
`:file` option.
### Example:
iex> File.write! "test.eml.exs", "div @number"
iex> defmodule MyTemplates3 do
...> use Eml
...> use Eml.HTML
...>
...> template from_file, file: "test.eml.exs"
...> end
iex> File.rm! "test.eml.exs"
iex> MyTemplates3.from_file number: 42
{:safe, "<div>42</div>"}
"""
defmacro template(name, funs \\ [], do_block) do
do_template(name, funs, do_block, __CALLER__, false)
end
@doc """
Define a private template.
Same as `template/3` except that it defines a private function.
"""
defmacro templatep(name, funs \\ [], do_block) do
do_template(name, funs, do_block, __CALLER__, true)
end
defp do_template(tag, funs, do_block, caller, private) do
{ tag, _, _ } = tag
def_call = if private, do: :defp, else: :def
template = Eml.Compiler.precompile(caller, do_block)
quote do
unquote(def_call)(unquote(tag)(var!(assigns))) do
_ = var!(assigns)
var!(funs) = unquote(funs)
_ = var!(funs)
unquote(template)
end
end
end
@doc """
Define a template as an anonymous function.
Same as `template/3`, except that it defines an anonymous function.
### Example
iex> t = template_fn names: fn names ->
...> for n <- names, do: li n
...> end do
...> ul @names
...> end
iex> t.(names: ~w(john james jesse))
{:safe, "<ul><li>john</li><li>james</li><li>jesse</li></ul>"}
"""
defmacro template_fn(funs \\ [], do_block) do
template = Eml.Compiler.precompile(__CALLER__, do_block)
quote do
fn var!(assigns) ->
_ = var!(assigns)
var!(funs) = unquote(funs)
_ = var!(funs)
unquote(template)
end
end
end
@doc """
Define a component element
Components in Eml are a special kind of element that inherit functionality
from templates. Like templates, everything within the do block gets
precompiled, except assigns and function calls prefixed with the `&`
operator. Defined attributes on a component can be accessed as assigns, just
like with templates. Content can be accessed via the the special assign
`__CONTENT__`. However, since the type of a component is `Eml.Element.t`,
they can be queried and transformed, just like normal Eml elements.
See `template/3` for more info about composability, assigns, runtime logic and
accepted options.
### Example
iex> use Eml
iex> use Eml.HTML
iex> defmodule ElTest do
...>
...> component my_list,
...> __CONTENT__: fn content ->
...> for item <- content do
...> li do
...> span "* "
...> span item
...> span " *"
...> end
...> end
...> end do
...> ul [class: @class], @__CONTENT__
...> end
...>
...> end
iex> import ElTest
iex> el = my_list class: "some-class" do
...> "Item 1"
...> "Item 2"
...> end
#my_list<%{class: "some-class"} ["Item 1", "Item 2"]>
iex> Eml.compile(el)
"<ul class='some-class'><li><span>* </span><span>Item 1</span><span> *</span></li><li><span>* </span><span>Item 2</span><span> *</span></li></ul>"
"""
defmacro component(tag, funs \\ [], do_block) do
do_template_element(tag, funs, do_block, __CALLER__, false)
end
@doc """
Define a fragment element
Fragments in Eml are a special kind of element that inherit functionality from
templates. Like templates, everything within the do block gets precompiled,
except assigns. Defined attributes on a component can be accessed as assigns,
just like with templates. Content can be accessed via the the special assign
`__CONTENT__`. However, since the type of a fragment is `Eml.Element.t`, they
can be queried and transformed, just like normal Eml elements.
The difference between components and fragments is that fragments are without
any logic, so assign handlers or the `&` operator are not allowed in a
fragment definition.
The reason for their existence is easier composability and performance,
because unlike templates and components, it is allowed to pass assigns to
fragments during precompilation. This is possible because fragments don't
contain any logic.
### Example
iex> use Eml
nil
iex> use Eml.HTML
nil
iex> defmodule ElTest do
...>
...> fragment basic_page do
...> html do
...> head do
...> meta charset: "UTF-8"
...> title @title
...> end
...> body do
...> @__CONTENT__
...> end
...> end
...> end
...>
...> end
{:module, ElTest, ...}
iex> import ElTest
nil
iex> page = basic_page title: "Hello!" do
...> div "Hello World"
...> end
#basic_page<%{title: "Hello!!"} [#div<"Hello World">]>
iex> Eml.compile page
"<!doctype html>\n<html><head><meta charset='UTF-8'/><title>Hello!!</title></head><body><div>Hello World</div></body></html>"
"""
defmacro fragment(tag, do_block) do
do_template_element(tag, nil, do_block, __CALLER__, true)
end
defp do_template_element(tag, funs, do_block, caller, fragment?) do
{ tag, _, _ } = tag
template = Eml.Compiler.precompile(caller, Keyword.merge(do_block, fragment: fragment?))
template_tag = (Atom.to_string(tag) <> "__template") |> String.to_atom()
template_type = if fragment?, do: :fragment, else: :component
funs = unless fragment? do
quote do
var!(funs) = unquote(funs)
_ = var!(funs)
end
end
quote do
@doc false
def unquote(template_tag)(var!(assigns)) do
_ = var!(assigns)
unquote(funs)
unquote(template)
end
defmacro unquote(tag)(content_or_attrs, maybe_content \\ nil) do
tag = unquote(tag)
template_tag = unquote(template_tag)
template_type = unquote(template_type)
in_match = Macro.Env.in_match?(__CALLER__)
{ attrs, content } = Eml.Element.Generator.extract_content(content_or_attrs, maybe_content, in_match)
if in_match do
quote do
%Eml.Element{tag: unquote(tag), attrs: unquote(attrs), content: unquote(content)}
end
else
quote do
%Eml.Element{tag: unquote(tag),
attrs: Enum.into(unquote(attrs), %{}),
content: List.wrap(unquote(content)),
template: &unquote(__MODULE__).unquote(template_tag)/1,
type: unquote(template_type)}
end
end
end
end
end
@doc """
Parses data and converts it to eml
How the data is interpreted depends on the `parser` argument.
The default value is `Eml.HTML.Parser', which means that
strings are parsed as html.
In case of error, raises an Eml.ParseError exception.
### Examples:
iex> Eml.parse("<body><h1 id='main-title'>The title</h1></body>")
[#body<[#h1<%{id: "main-title"} "The title">]>]
"""
@spec parse(String.t, Keyword.t) :: [t]
def parse(data, opts \\ [])
def parse(data, opts) when is_binary(data) do
parser = opts[:parser] || @default_parser
parser.parse(data, opts)
end
def parse(data, _) do
raise Eml.ParseError, message: "Bad argument: #{inspect data}"
end
@doc """
Compiles eml content with the specified markup compiler, which is html by default.
The accepted options are:
* `:compiler` - The compiler to use, by default `Eml.HTML.Compiler`
* `:quotes` - The type of quotes used for attribute values. Accepted values are `:single` (default) and `:double`.
* `:transform` - A function that receives every node just before it get's compiled. Same as using `transform/2`,
but more efficient, since it's getting called during the compile pass.
* `:escape` - Automatically escape strings, default is `true`.
In case of error, raises an Eml.CompileError exception.
### Examples:
iex> Eml.compile(body(h1([id: "main-title"], "A title")))
"<body><h1 id='main-title'>A title</h1></body>"
iex> Eml.compile(body(h1([id: "main-title"], "A title")), quotes: :double)
"<body><h1 id=\"main-title\">A title</h1></body>"
iex> Eml.compile(p "Tom & Jerry")
"<p>Tom & Jerry</p>"
"""
@spec compile(t, Dict.t) :: String.t
def compile(content, opts \\ [])
def compile({ :safe, string }, _opts) do
string
end
def compile(content, opts) do
case Eml.Compiler.compile(content, opts) do
{ :safe, string } ->
string
_ ->
raise Eml.CompileError, message: "Bad argument: #{inspect content}"
end
end
@doc """
Recursively transforms `eml` content.
It traverses all nodes of the provided eml tree. The provided transform
function will be evaluated for every node `transform/3` encounters. Parent
nodes will be transformed before their children. Child nodes of a parent will
be evaluated before moving to the next sibling.
When the provided function returns `nil`, the node will be removed from the
eml tree.
Note that because parent nodes are evaluated before their children, no
children will be evaluated if the parent is removed.
### Examples:
iex> e = div do
...> span [id: "inner1", class: "inner"], "hello "
...> span [id: "inner2", class: "inner"], "world"
...> end
#div<[#span<%{id: "inner1", class: "inner"} "hello ">,
#span<%{id: "inner2", class: "inner"} "world">]>
iex> Eml.transform(e, fn
...> span(_) -> "matched"
...> node -> node
...> end)
#div<["matched", "matched"]>
iex> transform(e, fn node ->
...> IO.inspect(node)
...> node
...> end)
#div<[#span<%{class: "inner", id: "inner1"} "hello ">,
#span<%{class: "inner", id: "inner2"} "world">]>
#span<%{class: "inner", id: "inner1"} "hello ">
"hello "
#span<%{class: "inner", id: "inner2"} "world">
"world"
#div<[#span<%{class: "inner", id: "inner1"} "hello ">,
#span<%{class: "inner", id: "inner2"} "world">]>
"""
@spec transform(t, (t -> t)) :: t | nil
def transform(nil, _fun) do
nil
end
def transform(eml, fun) when is_list(eml) do
for node <- eml, t = transform(node, fun), do: t
end
def transform(node, fun) do
case fun.(node) do
%Element{content: content} = node ->
%Element{node| content: transform(content, fun)}
node ->
node
end
end
@doc """
Recursively reduces a tree of nodes
### Example
iex> tree = div do
...> span 1
...> span 2
...> end
iex> list = [tree, tree]
iex> Eml.collect(list, [], fn node, acc ->
...> if is_integer(node) do
...> [node | acc]
...> else
...> acc
...> end
...> end)
[2, 1, 2, 1]
"""
@spec collect(t, term, (t, term -> term)) :: term
def collect(eml, acc \\ %{}, fun)
def collect(eml, acc, fun) when is_list(eml) do
Enum.reduce(eml, acc, &collect(&1, &2, fun))
end
def collect(%Element{} = eml, acc, fun) do
Enum.reduce(eml, acc, fun)
end
def collect(eml, acc, fun) do
fun.(eml, acc)
end
@doc """
Match on element tag, attributes, or content
Implemented as a macro.
### Examples:
iex> use Eml
iex> use Eml.HTML
iex> node = section [id: "my-section"], [div([id: "some_id"], "Some content"), div([id: "another_id"], "Other content")]
iex> Eml.match?(node, attrs: %{id: "my-section"})
true
iex> Eml.match?(node, tag: :div)
false
iex> Enum.filter(node, &Eml.match?(&1, tag: :div))
[#div<%{id: "some_id"} "Some content">, #div<%{id: "another_id"}
"Other content">]
iex> Eml.transform(node, fn node ->
...> if Eml.match?(node, content: "Other content") do
...> put_in(node.content, "New content")
...> else
...> node
...> end
...> end)
#section<%{id: "my-section"}
[#div<%{id: "some_id"} "Some content">, #div<%{id: "another_id"}
"New content">]>
"""
defmacro match?(node, opts \\ []) do
tag = opts[:tag] || quote do: _
attrs = opts[:attrs] || quote do: _
content = opts[:content] || quote do: _
quote do
case unquote(node) do
%Eml.Element{tag: unquote(tag), attrs: unquote(attrs), content: unquote(content)} ->
true
_ ->
false
end
end
end
@doc """
Extracts a value recursively from content
### Examples
iex> Eml.unpack [42]
bm 42
iex> Eml.unpack 42
42
iex> Eml.unpack(div "hallo")
"hallo"
iex> Eml.unpack Eml.unpack(div(span("hallo")))
"hallo"
iex> Eml.unpack div(span(42))
42
iex> Eml.unpack div([span("Hallo"), span(" world")])
["Hallo", " world"]
"""
@spec unpack(t) :: t
def unpack(%Element{content: content}) do
unpack(content)
end
def unpack([node]) do
unpack(node)
end
def unpack(content) when is_list(content) do
for node <- content, do: unpack(node)
end
def unpack({ :safe, node }) do
node
end
def unpack(node) do
node
end
@doc """
Escape content
### Examples
iex> escape "Tom & Jerry"
"Tom & Jerry"
iex> escape div span("Tom & Jerry")
#div<[#span<["Tom & Jerry"]>]>
"""
@spec escape(t) :: t
defdelegate escape(eml), to: Eml.Compiler
@doc """
Unescape content
### Examples
iex> unescape "Tom & Jerry"
"Tom & Jerry"
iex> unescape div span("Tom & Jerry")
#div<[#span<["Tom & Jerry"]>]>
"""
@spec unescape(t) :: t
defdelegate unescape(eml), to: Eml.Parser
# use Eml
@doc """
Import macro's from this module and alias `Eml.Element`.
Accepts the following options:
* `:compile` - Set dcompile options as a Keyword list for all templates,
components and fragments that are defined in the module where `use Eml` is
invoked. See `Eml.compile/2` for all available options.
* `:elements` - Which elements to import in the current scope. Accepts a
module, or list of modules and defaults to `Eml.HTML`. When you don't want
to import any elements, set to `nil` or `false`.
"""
defmacro __using__(opts) do
use_elements = if mods = Keyword.get(opts, :elements, @default_elements) do
for mod <- List.wrap(mods) do
quote do: use unquote(mod)
end
end
compile_opts = Keyword.get(opts, :compile, [])
if mod = __CALLER__.module do
Module.put_attribute(mod, :eml_compile, compile_opts)
end
quote do
unquote(use_elements)
import Eml, only: [
template: 2, template: 3,
templatep: 2, templatep: 3,
template_fn: 1, template_fn: 2,
component: 2, component: 3,
fragment: 2
]
end
end
end
|
lib/eml.ex
| 0.830353
| 0.785473
|
eml.ex
|
starcoder
|
defmodule ExSDP.ConnectionData do
@moduledoc """
This module represents the Connection Information.
The address can be represented by either:
- IPv4 address
- IPv6 address
- FQDN (Fully Qualified Domain Name)
In the case of IPv4 and IPv6 multicast addresses there can be more than one
parsed from single SDP field if it is described using slash notation.
Sessions using an IPv4 multicast connection address MUST also have
a time to live (TTL) value present in addition to the multicast
address.
For more details please see [RFC4566 Section 5.7](https://tools.ietf.org/html/rfc4566#section-5.7)
"""
use Bunch
use Bunch.Access
alias ExSDP.{Address, Utils}
@enforce_keys [:address]
defstruct @enforce_keys ++ [:address_count, :ttl, network_type: "IN"]
@type t :: %__MODULE__{
address: Address.t(),
address_count: pos_integer() | nil,
ttl: 0..255 | nil,
network_type: binary()
}
@type reason ::
:invalid_addrtype
| :invalid_address
| :ip6_cannot_have_ttl
| :invalid_ttl_or_address_count
@spec parse(binary()) :: {:ok, t()} | {:error, reason}
def parse(connection) do
with {:ok, [nettype, addrtype, connection_address]} <- Utils.split(connection, " ", 3),
{:ok, addrtype} <- Address.parse_addrtype(addrtype),
[address | ttl_with_address_count] <- String.split(connection_address, "/", parts: 2),
{:ok, address} <- Address.parse_address(address),
{:ok, ttl, address_count} <-
parse_ttl_with_address_count(ttl_with_address_count, addrtype) do
# check whether fqdn
address = if is_binary(address), do: {addrtype, address}, else: address
connection_data = %__MODULE__{
address: address,
address_count: address_count,
ttl: ttl,
network_type: nettype
}
{:ok, connection_data}
else
{:error, _reason} = error -> error
_ -> {:error, :invalid_address}
end
end
defp parse_ttl_with_address_count([], _addrtype), do: {:ok, nil, nil}
defp parse_ttl_with_address_count([""], _addrtype), do: {:error, :invalid_ttl_or_address_count}
defp parse_ttl_with_address_count([ttl_with_address_count], :IP4) do
with [ttl] <- String.split(ttl_with_address_count, "/"),
{:ok, ttl} when ttl in 0..255 <- Utils.parse_numeric_string(ttl) do
{:ok, ttl, nil}
else
[ttl, address_count] ->
with {:ok, ttl} <- Utils.parse_numeric_string(ttl),
{:ok, address_count} <- Utils.parse_numeric_string(address_count) do
{:ok, ttl, address_count}
else
_ -> {:error, :invalid_ttl_or_address_count}
end
_ ->
{:error, :invalid_ttl_or_address_count}
end
end
defp parse_ttl_with_address_count([ttl_with_address_count], :IP6) do
case String.split(ttl_with_address_count, "/") do
[address_count] ->
case Utils.parse_numeric_string(address_count) do
{:ok, address_count} -> {:ok, nil, address_count}
_ -> {:error, :invalid_ttl_or_address_count}
end
[_ttl, _address_count] ->
{:error, :ip6_cannot_have_ttl}
_ ->
{:error, :invalid_ttl_or_address_count}
end
end
end
defimpl String.Chars, for: ExSDP.ConnectionData do
alias ExSDP.{Address, ConnectionData}
def to_string(%ConnectionData{} = connection) do
"""
#{connection.network_type} \
#{Address.get_addrtype(connection.address)} \
#{Address.serialize_address(connection.address)}\
#{serialize_ttl_with_address_count(connection.ttl, connection.address_count)}\
"""
end
defp serialize_ttl_with_address_count(nil, nil), do: ""
defp serialize_ttl_with_address_count(ttl, nil), do: "/#{ttl}"
defp serialize_ttl_with_address_count(nil, address_count), do: "/#{address_count}"
defp serialize_ttl_with_address_count(ttl, address_count), do: "/#{ttl}/#{address_count}"
end
|
lib/ex_sdp/connection_data.ex
| 0.712932
| 0.475727
|
connection_data.ex
|
starcoder
|
defmodule Bible.Reader do
@moduledoc """
This module provides services related to what parts of the Bible
have been read over periods of time. for example; the following information can be obtained:
1 - Percent of Bible read during a specific period.
2 - List of readings during a specific time period.
3 - List of what has not been read during a period of time.
Information is read from a string or file where entries are:
dd-mmm-yyyy : citation
...
These are all then put into a binary that has the following format:
<<days>> :: 16 bit unsigned : Days since Jan 1, 2000
<<start book>> :: 8 bit unsigned : Starting book number
<<start chapter>> :: 8 bit unsigned : Starting chapter number
<<start verse>> :: 8 bit unsigned : Starging chapter number
<<end book>> :: 8 bit unsigned : Ending book number
<<end chapter>> :: 8 bit unsigned : Ending chapter number
<<end verse>> :: 8 bit unsigned : Ending chapter number
<<days since 1/1/2000>><<book>><<chap>><<verse>><<book>><<chap>><<verse>>
...
...
There is also a readings map which is a bitmap of all versions in the
provided version. A bit set indicates the verse has been read.
Requires that the BibleServer is running.
"""
defstruct [
:date,
:ref
]
@doc """
This returns the number of verses read and the number of verses in the
reference range provided.
info is the info from the Version.
reference is the string reference such as "John 1-5"
readings is a list of the reading entries in the internal format
shown above.
"""
def reading_metrics(readings, reference, info) do
ref = Bible.References.exp_bible_reference(reference, info)
{start, stop} = Bible.Info.get_reference_range(info, ref)
length = stop - start + 1
s1 = start - 1
<<(<<_::bitstring-size(s1)>>), <<bin::bitstring-size(length)>>, (<<_::bitstring>>)>> =
readings
total = bit_size(bin)
read = for(<<bit::size(1) <- bin>>, do: bit) |> Enum.sum()
{total, read}
end
@doc """
Returns a binary list of readings between the dates specified. Requires that a readings binary be provided.
Readings is a list of the readings in the internal compressed format shown
above.
"""
def filter_by_date(readings, start_date, end_date) do
readings
|> Enum.filter(fn r -> Date.compare(r.date, end_date) != :gt end)
|> Enum.filter(fn r -> Date.compare(r.date, start_date) != :lt end)
end
@doc """
Returns a verse map given a set of readings and a version (info). The map has a bit for every verse in the Bible version. Each verse that has been read
according to the readings will be set.
"""
def to_verse_map(readings, info) do
x = Bible.Info.get_total_verse_count(info)
readings
|> Enum.reduce(<<0::size(x)>>, fn reading, acc -> add_reading(acc, reading, info) end)
end
def verse_count(verse_map) do
for(<<bit::size(1) <- verse_map>>, do: bit) |> Enum.sum()
end
def add_reading(readings, reading, info) do
# { a, b, c, d, e, f } = reading
{first_v, last_v} = Bible.Info.get_reference_range(info, reading.ref)
p1 = first_v - 1
p2 = last_v - first_v + 1
p3 = bit_size(readings) - last_v
<<s1::size(p1), _::size(p2), s3::size(p3)>> = readings
<<(<<s1::size(p1)>>)::bitstring, <<-1::size(p2)>>::bitstring, <<s3::size(p3)>>::bitstring>>
end
# def load_bible_readings do
# File.read!(Application.fetch_env!(:bible, :bible_readings_file))
# |> load_readings_string
# end
@doc """
The reading list is an ordered list of all recorded bible readings. It is
sorted from earliest to latest. Each entry in the list is the Bible.Reader
structure.
"""
def load_bible_readings(filepath, info) do
File.read!(filepath)
|> load_readings_string(info)
end
def load_readings_string(readings, info) do
readings
# Get list of lines.
|> String.split("\n")
|> Enum.filter(&(String.length(&1) > 0))
# Have date and ref.
|> Enum.map(&String.split(&1, " : ", parts: 2))
# Add reference into entry.
|> Enum.map(&add_ref_days(&1, info))
|> List.flatten()
|> Enum.sort_by(&Date.to_erl(&1.date))
end
# Change to accept a multi-reference line after date.
# Need an entry in the map called Refs that is a list of maps for each
# individual ref.
defp add_ref_days([string_date, reading], info) do
string_date = String.trim(string_date)
{:ok, date} = Date.Temp.parse(string_date)
Bible.References.exp_bible_references(reading, info)
|> Enum.map(fn ref -> %Bible.Reader{date: date, ref: ref} end)
end
@doc """
Returns the following results:
{ % To Target Last 7 days,
% To Target Last 30 days,
% To Target Last 365 days,
[ Last 5 readings ] }
"""
def read_metrics(readings, info) do
# end_date = Timex.now
{:ok, end_date} = Date.Temp.now()
days_list = [1, 7, 30, 365]
target_attainment =
days_list
|> Enum.map(&to_date_range(&1, end_date))
|> Enum.map(fn {sdate, edate} -> filter_by_date(readings, sdate, edate) end)
# |> Enum.map(&(Bible.Reader.filter_by_date(readings,&1)))
|> Enum.map(&to_verse_map(&1, info))
|> Enum.map(&reading_metrics(&1, "Genesis - Revelation", info))
|> Enum.map(fn {total, read} -> read / total end)
|> Enum.zip(days_list)
|> Enum.map(fn {percent, days} -> {days, percent * 365 / days} end)
# latest = for(<< _days :: unsigned-integer-size(16),
# start_book_number :: unsigned-integer-size(8),
# start_chap :: unsigned-integer-size(8),
# start_vers :: unsigned-integer-size(8),
# end_book_number :: unsigned-integer-size(8),
# end_chap :: unsigned-integer-size(8),
# end_vers :: unsigned-integer-size(8) <- readings >>, do:
# { Bible.Info.get_book_name(info, start_book_number), start_chap, start_vers,
# Bible.Info.get_book_name(info, end_book_number), end_chap, end_vers })
latest =
readings
|> Enum.take(-5)
|> Enum.map(&Bible.References.reduce_reference(&1.ref, info))
|> Enum.reverse()
{target_attainment, latest}
end
defp to_date_range(days, end_date) do
# {Timex.shift(end_date, days: -days+1),end_date}
{Date.Temp.shift(end_date, days: -days + 1), end_date}
end
def t do
esv = Bible.Info.get_bible_info(Bible.Versions.ESV)
filepath = System.user_home() <> "/Projects/FileliF/Compendiums/Bible/Read List.txt"
readings = load_bible_readings(filepath, esv)
# IO.inspect {:readings, readings}
{:ok, start_date} = Date.new(2017, 11, 03)
{:ok, end_date} = Date.new(2017, 11, 03)
IO.inspect({start_date, end_date})
filter_by_date(readings, start_date, end_date)
end
end
|
lib/bible/reader.ex
| 0.672439
| 0.589864
|
reader.ex
|
starcoder
|
defmodule Lanyard.Gateway.Utility do
@spec normalize_atom(atom) :: String.t
def normalize_atom(atom) do
atom |> Atom.to_string |> String.downcase |> String.to_atom
end
@doc "Build a binary payload for discord communication"
@spec payload_build(number, map, number, String.t) :: binary
def payload_build(op, data, seq_num \\ nil, event_name \\ nil) do
load = %{"op" => op, "d" => data}
load
|> _update_payload(seq_num, "s", seq_num)
|> _update_payload(event_name, "t", seq_num)
|> :erlang.term_to_binary
end
@doc "Build a json payload for discord communication"
@spec payload_build_json(number, map, number, String.t) :: binary
def payload_build_json(op, data, seq_num \\ nil, event_name \\ nil) do
load = %{"op" => op, "d" => data}
load
|> _update_payload(seq_num, "s", seq_num)
|> _update_payload(event_name, "t", seq_num)
|> Poison.encode!
end
@doc "Decode binary payload received from discord into a map"
@spec payload_decode(list, {atom, binary}) :: map
def payload_decode(codes, {:binary, payload}) do
payload = :erlang.binary_to_term(payload)
%{op: opcode(codes, payload[:op] || payload["op"]), data: (payload[:d] || payload["d"]), seq_num: (payload[:s] || payload["s"]), event_name: (payload[:t] || payload["t"])}
end
@doc "Decode json payload received from discord into a map"
@spec payload_decode(list, {atom, binary}) :: map
def payload_decode(codes, {:text, payload}) do
payload = Poison.decode!(payload)
%{op: opcode(codes, payload[:op] || payload["op"]), data: (payload[:d] || payload["d"]), seq_num: (payload[:s] || payload["s"]), event_name: (payload[:t] || payload["t"])}
end
@doc "Get the integer value for an opcode using it's name"
@spec opcode(map, atom) :: integer
def opcode(codes, value) when is_atom(value) do
codes[value]
end
@doc "Get the atom value of and opcode using an integer value"
@spec opcode(map, integer) :: atom
def opcode(codes, value) when is_integer(value) do
{k, _value} = Enum.find codes, fn({_key, v}) -> v == value end
k
end
@doc "Generic function for getting the value from an agent process"
@spec agent_value(pid) :: any
def agent_value(agent) do
Agent.get(agent, fn a -> a end)
end
@doc "Generic function for updating the value of an agent process"
@spec agent_update(pid, any) :: nil
def agent_update(agent, n) do
if n != nil do
Agent.update(agent, fn _a -> n end)
end
end
# Makes it easy to just update and pipe a payload
defp _update_payload(load, var, key, value) do
if var do
Map.put(load, key, value)
else
load
end
end
end
|
lib/gateway/utility.ex
| 0.780077
| 0.419856
|
utility.ex
|
starcoder
|
defmodule Solution do
@moduledoc """
A Macro-based approach to working with ok/error tuples
This module exposes two main things:
1. guard-clause macros `is_ok/1`, `is_error/1` and `is_okerror/1` (as well as arity-2 variants of the same), to be used whenever you like.
2. `scase/2` and `swith/2`, replacements for `case` and `with`-statements respectively that allow you to pattern match on ok/error tuples more effectively, as well as bind to one ore multiple of the values stored inside.
"""
@doc """
Matches when `x` is one of the following:
- `:ok`
- `{:ok, _}`
- `{:ok, _, _}`
- or a longer tuple where the first element is the atom `:ok` (`{:ok, ...}`)
iex> is_ok(:ok)
true
iex> is_ok({:ok, 42})
true
iex> is_ok({:ok, "I", "have", "many", "elements"})
true
iex> is_ok(:asdf)
false
iex> is_ok({:error, "failure"})
false
"""
defguard is_ok(x) when x == :ok or (is_tuple(x) and tuple_size(x) >= 1 and elem(x, 0) == :ok)
@doc """
Matches when `x` is a long-enough ok-tuple that has more than `n_elems` elements.
"""
defguard is_ok(x, n_elems) when is_ok(x) and (n_elems == 0 or tuple_size(x) >= n_elems)
@doc """
Matches when `x` is one of the following:
- `:error`
- `{:error, _}`
- `{:error, _, _}`
- or a longer tuple where the first element is the atom `:error`. (`{:error, ...}`)
iex> is_error(:error)
true
iex> is_error({:error, 42})
true
iex> is_error({:error, "I", "have", "many", "elements"})
true
iex> is_error(:asdf)
false
iex> is_error({:ok, "success!"})
false
"""
defguard is_error(x)
when x == :error or (is_tuple(x) and tuple_size(x) >= 1 and elem(x, 0) == :error)
@doc """
Matches when `x` is a long-enough ok-tuple that has more than `n_elems` elements.
"""
defguard is_error(x, n_elems) when is_error(x) and (n_elems == 0 or tuple_size(x) >= n_elems)
@doc """
Matches when either `is_ok(x)` or `is_error(x)` matches.
iex> is_okerror({:ok, "Yay!"})
true
iex> is_okerror({:error, "Nay"})
true
iex> is_okerror(false)
false
iex> is_okerror({})
false
iex> is_okerror({:ok, "the", "quick", "brown", "fox"})
true
"""
defguard is_okerror(x) when is_ok(x) or is_error(x)
@doc """
Matches when `x` is a long-enough ok-tuple that has more than `n_elems` elements.
Warning: Will _not_ match plain `:ok` or `:error`!
"""
defguard is_okerror(x, n_elems) when is_ok(x, n_elems) or is_error(x, n_elems)
@doc """
Matches any ok datatype.
(See also `is_ok/1`)
Has to be used inside the LHS of a `scase` or `swith` statement.
"""
defmacro ok() do
guard_env = Map.put(__ENV__, :context, :guard)
{:when, [],
[
{:latest_solution_match____, [], nil},
{:is_ok, [context: Elixir, import: Solution], [{:latest_solution_match____, [], nil}]}
]}
|> Macro.prewalk(&Macro.expand(&1, guard_env))
end
# Expands to x when is_ok(x, min_length)
# Used internally by `expand_match`
defmacro __ok__(min_length) do
guard_env = Map.put(__ENV__, :context, :guard)
{:when, [],
[
{:latest_solution_match____, [], nil},
{:is_ok, [context: Elixir, import: Solution],
[{:latest_solution_match____, [], nil}, min_length]}
]}
|> Macro.prewalk(&Macro.expand(&1, guard_env))
end
@doc """
Matches `{:ok, res}` (as well as tuples with more elements). `res` is then bound.
(See also `is_ok/1`)
Has to be used inside the LHS of a `scase` or `swith` statement.
"""
defmacro ok(res) do
quote do
{:ok, unquote(res)}
end
end
@doc """
Matches any error datatype.
(See also `is_error/1`)
Has to be used inside the LHS of a `scase` or `swith` statement.
"""
defmacro error() do
guard_env = Map.put(__ENV__, :context, :guard)
{:when, [],
[
{:latest_solution_match____, [], nil},
{:is_error, [context: Elixir, import: Solution], [{:latest_solution_match____, [], nil}]}
]}
|> Macro.prewalk(&Macro.expand(&1, guard_env))
end
@doc """
Matches `{:error, res}` (as well as tuples with more elements). `res` is then bound.
(See also `is_error/1`)
Has to be used inside the LHS of a `scase` or `swith` statement.
"""
defmacro error(res) do
quote do
{:error, unquote(res)}
end
end
# Expands to x when is_error(x, min_length)
# Used internally by `expand_match`
defmacro __error__(min_length) do
guard_env = Map.put(__ENV__, :context, :guard)
{:when, [],
[
{:latest_solution_match____, [], nil},
{:is_error, [context: Elixir, import: Solution],
[{:latest_solution_match____, [], nil}, min_length]}
]}
|> Macro.prewalk(&Macro.expand(&1, guard_env))
end
@doc """
Matches any ok/error datatype.
Has to be used inside the LHS of a `scase` or `swith` statement.
"""
defmacro okerror() do
guard_env = Map.put(__ENV__, :context, :guard)
{:when, [],
[
{:latest_solution_match____, [], nil},
{:is_okerror, [context: Elixir, import: Solution], [{:latest_solution_match____, [], nil}]}
]}
|> Macro.prewalk(&Macro.expand(&1, guard_env))
end
@doc """
Matches any ok or error type. `tag` is then bound to `:ok` or `:error`.
(See also `is_okerror/1`)
Has to be used inside the LHS of a `scase` or `swith` statement.
"""
defmacro okerror(tag) do
quote do
{unquote(tag), _} when unquote(tag) in [:ok, :error]
end
end
@doc """
Matches `{:ok, res}`, `{:error, res}` (as well as tuples with more elements). `tag` and `res` are bound.
(See also `is_okerror/1`)
`tag` is bound to the value `:ok` or `:error` depending on the tuple.
`res` is bound to what the second element might be.
Has to be used inside the LHS of a `scase` or `swith` statement.
"""
defmacro okerror(tag, res) do
quote do
{unquote(tag), unquote(res)} when unquote(tag) in [:ok, :error]
end
end
# Expands to x when is_okerror(x, min_length)
# Used internally by `expand_match`
defmacro __okerror__(0) do
quote do
okerror()
end
end
defmacro __okerror__(min_length) do
guard_env = Map.put(__ENV__, :context, :guard)
{:when, [],
[
{:latest_solution_match____, [], nil},
{:is_okerror, [context: Elixir, import: Solution],
[{:latest_solution_match____, [], nil}, min_length]}
]}
|> Macro.prewalk(&Macro.expand(&1, guard_env))
end
@doc """
Works like a normal `case`-statement,
but will expand `ok()`, `error()` and `okerror()`macros to the left side of `->`.
iex> scase {:ok, 10} do
...> ok() -> "Yay!"
...> _ -> "Failure"
...> end
"Yay!"
You can also pass arguments to `ok()`, `error()` or `okerror()` which will then be bound and available
to be used inside the case expression:
iex> scase {:ok, "foo", 42} do
...> ok(res, extra) ->
...> "result: \#{res}, extra: \#{extra}"
...> _ ->
...> "Failure"
...> end
"result: foo, extra: 42"
Note that for `ok()` and `error()`, the first argument will match the first element after the `:ok` or `:error` tag.
On the other hand, for `okerror()`, the first argument will match the tag `:ok` or `:error`.
Note: It is not required to import Solution to use the macros inside `swith` without prefixing them.
"""
defmacro scase(input, conditions) do
guard_env = Map.put(__ENV__, :context, :guard)
{:case, [], [input, conditions]}
|> Macro.prewalk(fn node ->
case node do
{:->, meta, [[lhs], rhs]} ->
{lhs, rhs_list} = expand_match(lhs, [rhs])
lhs = Macro.expand(lhs, guard_env)
rhs = {:__block__, [], rhs_list}
node = {:->, meta, [[lhs], rhs]}
Macro.expand(node, guard_env)
other ->
Macro.expand(other, __CALLER__)
end
end)
end
defp expand_match({tag, meta, args}, rhs)
when tag in [:ok, :error, :okerror] and is_list(args) do
var = Macro.var(:latest_solution_match____, nil)
args_amount = args_amount(args)
prefixes =
args
|> Enum.with_index()
|> Enum.map(fn {arg, index} ->
full_index = index + elem_offset(tag)
quote do
unquote(arg) = elem(unquote(var), unquote(full_index))
end
end)
lhs = {:"__#{tag}__", meta, [max(args_amount + elem_offset(tag) - 1, 0)]}
{lhs, prefixes ++ rhs}
end
defp expand_match(other, rhs) do
{other, rhs}
end
defp args_amount(args) do
case Enum.count(args) do
0 ->
0
other ->
other + 1
end
end
defp elem_offset(:okerror), do: 0
defp elem_offset(_), do: 1
@doc """
Works like a normal `with`-statement,
but will expand `ok()`, `error()` and `okerror()` macros to the left side of `<-`.
iex> x = {:ok, 10}
iex> y = {:ok, 33}
iex> swith ok(res) <- x,
...> ok(res2) <- y do
...> "We have: \#{res} \#{res2}"
...> else
...> _ -> "Failure"
...> end
"We have: 10 33"
You can also pass arguments to `ok()`, `error()` or `okerror()` which will then be bound and available
to be used inside the rest of the `swith`-expression:
iex> x = {:ok, 10}
iex> y = {:error, 33}
iex> z = {:ok, %{a: 42}}
iex> swith ok(res) <- x,
...> error(res2) <- y,
...> okerror(tag, metamap) <- z,
...> %{a: val} = metamap do
...> "We have: \#{res} \#{res2} \#{tag} \#{val}"
...> else
...> _ -> "Failure"
...> end
"We have: 10 33 ok 42"
Note that for `ok()` and `error()`, the first argument will match the first element after the `:ok` or `:error` tag.
On the other hand, for `okerror()`, the first argument will match the tag `:ok` or `:error`.
Note: It is not required to import Solution to use the macros inside `swith` without prefixing them.
"""
defmacro swith(statements, conditions)
defmacro swith(statement, conditions) do
do_swith([statement], conditions, __CALLER__)
end
# Since `swith` is a normal macro bound to normal function rules,
# define it for all possible arities.
for arg_num <- 1..252 do
args = 0..arg_num |> Enum.map(fn num -> Macro.var(:"statement#{num}", __MODULE__) end)
@doc false
defmacro swith(unquote_splicing(args), conditions) do
do_swith(unquote(args), conditions, __CALLER__)
end
end
defp do_swith(statements, conditions, caller_env) do
guard_env = Map.put(__ENV__, :context, :guard)
statements =
statements
|> Enum.flat_map(fn node ->
case node do
{:<-, meta, [lhs, rhs]} ->
{lhs, extra_statements} = expand_match(lhs, [])
lhs = Macro.expand(lhs, guard_env)
node = {:<-, meta, [lhs, rhs]}
[node | extra_statements]
other ->
[Macro.expand(other, caller_env)]
end
end)
quote do
with(unquote_splicing(statements), unquote(conditions))
end
end
@doc """
Changes an `:ok` into an `:error`, an `{:ok, ...}` into an `{:error, ...}` and vice-versa.
iex> invert_okerror(:ok)
:error
iex> invert_okerror({:ok, 1,2,3})
{:error, 1,2,3}
iex> invert_okerror({:error, "failure"})
{:ok, "failure"}
iex> invert_okerror("improper datatype")
** (ArgumentError) argument error
"""
def invert_okerror(okerror) do
case okerror do
:ok ->
:error
:error ->
:ok
x when is_ok(x, 1) ->
x
|> Tuple.delete_at(0)
|> Tuple.insert_at(0, :error)
x when is_error(x, 1) ->
x
|> Tuple.delete_at(0)
|> Tuple.insert_at(0, :ok)
_ ->
raise ArgumentError
end
end
@doc """
Turns a nillable type (that can be either `nil` or a non-nil value) into an ok/error tuple.
Also handles Erlang's 'nil'-type equivalent: the atom `:undefined`.
iex> from_nillable(nil)
{:error, nil}
iex> from_nillable(42)
{:ok, 42}
iex> (%{a: "yes!"} |> Map.get(:a) |> from_nillable())
{:ok, "yes!"}
iex> (%{a: "yes!"} |> Map.get(:b) |> from_nillable())
{:error, nil}
iex> from_nillable(:undefined)
{:error, :undefined}
"""
def from_nillable(thing) do
case thing do
nil ->
{:error, nil}
:undefined ->
{:error, :undefined}
_ ->
{:ok, thing}
end
end
end
|
lib/solution.ex
| 0.869341
| 0.746486
|
solution.ex
|
starcoder
|
defmodule Cuckoo.Bucket do
@moduledoc """
This module implements a Bucket.
"""
@type t :: :array.array()
@doc """
Creates a new bucket with the given size `n`.
"""
@spec new(pos_integer) :: t
def new(n) do
:array.new([{:default, nil}, n, :fixed])
end
@doc """
Sets the entry `index` to `element`.
Returns the updated bucket.
"""
@spec set(t, non_neg_integer, pos_integer) :: t
def set(bucket, index, element) do
:array.set(index, element, bucket)
end
@doc """
Resets the entry `index` to the default value.
Returns the updated bucket.
"""
@spec reset(t, non_neg_integer) :: t
def reset(bucket, index) do
:array.reset(index, bucket)
end
@doc """
Returns the element at the specified `index`.
"""
@spec get(t, non_neg_integer) :: pos_integer
def get(bucket, index) do
:array.get(index, bucket)
end
@doc """
Checks if the `bucket` has any room left.
Returns `{ :ok, index }` if it finds an empty entry in the bucket,
otherwise returns `{ :error, :full }`.
"""
@spec has_room?(t) :: {:ok, pos_integer} | {:error, :full}
def has_room?(bucket) do
index = array_find(bucket, fn x -> x == nil end)
if index do
{:ok, index}
else
{:error, :full}
end
end
@doc """
Returns `true` if the bucket contains the `element`, otherwise returns `false`.
"""
@spec contains?(t, pos_integer) :: boolean
def contains?(bucket, element) do
case find(bucket, element) do
{:ok, _} -> true
{:error, :inexistent} -> false
end
end
@doc """
Tries to find the given `element` in the `bucket`.
Returns `{:ok, index}` if it finds it, otherwise returns `{:error, :inexistent}`.
"""
@spec find(t, pos_integer) :: {:ok, non_neg_integer} | {:error, :inexistent}
def find(bucket, element) do
index = array_find(bucket, fn x -> x == element end)
if index do
{:ok, index}
else
{:error, :inexistent}
end
end
@spec array_find(t, (any -> boolean)) :: nil | non_neg_integer
defp array_find(array, fun) do
size = :array.size(array)
_array_find(array, size, size, fun)
end
@spec _array_find(t, non_neg_integer, non_neg_integer, (any -> boolean)) ::
nil | non_neg_integer
defp _array_find(_, _, 0, _), do: nil
defp _array_find(array, size, left, fun) do
index = size - left
if fun.(:array.get(index, array)) do
index
else
_array_find(array, size, left - 1, fun)
end
end
end
|
lib/bucket/bucket.ex
| 0.90385
| 0.636424
|
bucket.ex
|
starcoder
|
defmodule StatBuffer do
@moduledoc """
Defines a stat buffer.
A stat buffer is an efficient way to maintain a local incrementable count with a
given key that can later be flushed to persistent storage. In fast moving systems,
this provides a scalable way keep track of counts without putting heavy loads
on a database.
Creating a buffer is as easy as:
defmodule Buffer do
use StatBuffer
end
Once we have defined our buffer module, we must then implement the `handle_flush/2`
callback that allows us to perform an operation with a rovided key and counter.
This could mean something like updating a counter in a database.
defmodule Buffer do
use StatBuffer
def handle_flush(key, counter) do
# write to the database...
# handle_flush MUST return an :ok atom
:ok
end
end
We must then add our buffer to our supervision tree.
children = [
Buffer
]
Each flush operation is handled with its own supervised `Task` process. By
default, a failed flush operation will retry about 3 times within 3 seconds.
## Usage
With our buffer started, we can now increment key counters. A key can be any
valid term.
Buffer.increment("mykey") # increments by 1
Buffer.increment("mykey", 10) # increments by 10
Key counts are maintained in an ETS table. All keys are scoped to the given
buffer module - so multiple buffers using the same keys will not cause issues.
With the default buffer we setup above, the "mykey" counter will be flushed
after 5 seconds. Assuming no new operations occur with our buffer, the process
will be placed into hibernation after 10 seconds. All of this is configurable
through the options below.
## Options
A stat buffer comes with a few configurable options. We can pass any of these
options along with the use macro.
use StatBuffer, interval: 60_000, jitter: 20_000
* `:interval` - the time in milliseconds between the first increment for a
given key and its next flush callback being invoked. Defaults to `5_000`.
* `:jitter` - a max time in milliseconds that will be added to `interval` to
ensure some randomness in each flush invocation. The time added would be
randomly selected between 0 and `jitter`. Defaults to `0`.
* `:timeout` - the time in milliseconds between the last operation on a
a buffer, and the process being hibernated. Defaults to `10_000`.
* `:backoff` - the time in milliseconds between a `handle_flush/2` callback
failing, and the next attempt occuring. Defaults to `1_000`.
"""
@doc """
Starts the buffer process.
## Options
The options available are the same provided in the "Options" section.
"""
@callback start_link(options()) :: GenServer.on_start()
@doc """
Callback for flushing a key for the buffer.
When a buffer key hits its set time interval, this function will be called and
provided with the key as well its current counter.
This function is called within its own Task and is supervised. If the
callback does not return `:ok` - the task will fail and attempt a retry
with configurable backoff.
"""
@callback handle_flush(key :: any(), counter :: integer()) :: :ok
@doc """
Increments a given key in the buffer by the provided count.
Each key is scoped to the buffer module. So duplicate keys across different
buffer modules will not cause issues.
"""
@callback increment(key :: any(), count :: integer()) :: :ok | :error
@doc """
This callback has been deprecated - use `c:increment/2` instead.
"""
@callback async_increment(key :: any(), count :: integer()) :: :ok
@doc """
Asynchronously flushes a given key from the buffer.
"""
@callback flush(key :: any()) :: :ok
@doc """
Returns the current state of a key from the buffer.
"""
@callback count(key :: any()) :: integer() | nil
@type t :: module
@type option ::
{:interval, non_neg_integer()}
| {:jitter, non_neg_integer()}
| {:timeout, non_neg_integer()}
| {:backoff, non_neg_integer()}
@type options :: [option()]
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour StatBuffer
default_opts = [
interval: 5_000,
jitter: 0,
timeout: 10_000,
backoff: 1_000
]
@opts Keyword.merge(default_opts, opts)
@impl StatBuffer
def start_link(opts \\ []) do
opts = Keyword.merge(@opts, opts)
StatBuffer.Supervisor.start_link(__MODULE__, opts)
end
@doc false
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]}
}
end
@impl StatBuffer
def handle_flush(_key, _counter) do
:ok
end
@impl StatBuffer
def increment(key, count \\ 1) do
StatBuffer.Worker.increment(__MODULE__, key, count)
end
@impl StatBuffer
@deprecated "Use increment/2 instead"
def async_increment(key, count \\ 1) do
StatBuffer.Worker.async_increment(__MODULE__, key, count)
end
@impl StatBuffer
def flush(key) do
StatBuffer.Worker.flush(__MODULE__, key)
end
@impl StatBuffer
def count(key) do
StatBuffer.Worker.count(__MODULE__, key)
end
defoverridable handle_flush: 2
end
end
end
|
lib/stat_buffer.ex
| 0.901107
| 0.644288
|
stat_buffer.ex
|
starcoder
|
defmodule Data.Item do
@moduledoc """
Item Schema
"""
use Data.Schema
alias Data.Effect
alias __MODULE__
alias Data.Item.Instance
alias Data.ItemAspecting
alias Data.NPCItem
alias Data.ShopItem
alias Data.Stats
@type instance :: %Instance{}
@types ["basic", "weapon", "armor","resource", "potion", "book", "bauble"]
@doc """
Item Schema
"""
@valid_effects %{
"basic" => ["recover"],
"weapon" => ["damage", "damage/type", "stats"],
"armor" => ["stats", "damage/type"],
"resource" => ["stats"],
"book" => ["stats"],
"bauble" => ["stats","stats/boost"],
"potion" => ["stats", "stats/boost"]
}
@required_fields [
:level,
:name,
:description,
:type,
:tags,
:keywords,
:stats,
:effects,
:cost,
:user_text,
:usee_text,
:is_usable,
:amount,
:whitelist_effects
]
@optional_fields [
:usage_command
]
schema "items" do
field(:name, :string)
field(:description, :string)
field(:type, :string)
field(:tags, {:array, :string}, default: [])
field(:keywords, {:array, :string})
field(:stats, Data.Stats)
field(:effects, {:array, Data.Effect})
field(:cost, :integer, default: 0)
field(:level, :integer, default: 1)
field(:usage_command, :string)
field(:user_text, :string, default: "You use [name] on [target].")
field(:usee_text, :string, default: "[user] uses [name] on you.")
field(:is_usable, :boolean, default: false)
field(:amount, :integer, default: 1)
field(:whitelist_effects, {:array, :string}, default: [])
has_many(:item_aspectings, ItemAspecting)
has_many(:item_aspects, through: [:item_aspectings, :item_aspect])
has_many(:npc_items, NPCItem)
has_many(:shop_items, ShopItem)
timestamps()
end
defdelegate compile(item), to: Item.Compiled
@doc """
List out item types
"""
@spec types() :: [String.t()]
def types(), do: @types
@doc """
List out item fields
"""
@spec fields() :: [atom()]
def fields(), do: @required_fields ++ @optional_fields
@doc """
Provide a starting point for the web panel to edit new statistics
"""
@spec basic_stats(atom) :: map
def basic_stats(:armor) do
%{
slot: "",
armor: 0
}
end
def basic_stats(:basic), do: %{}
def basic_stats(:weapon), do: %{}
def basic_stats(:resource), do: %{}
def basic_stats(:potion), do: %{}
def basic_stats(:bauble), do: %{}
def basic_stats(:book), do: %{}
@doc """
Create an instance of an item
"""
@spec instantiate(t()) :: instance()
def instantiate(item) do
case item.is_usable do
true ->
%Instance{id: item.id, created_at: Timex.now(), amount: item.amount}
false ->
%Instance{id: item.id, created_at: Timex.now()}
end
end
def changeset(struct, params) do
struct
|> cast(params, @required_fields ++ @optional_fields)
|> ensure_keywords()
|> validate_required(@required_fields)
|> validate_inclusion(:type, @types)
|> validate_stats()
|> Effect.validate_effects()
|> validate_effects()
|> validate_whitelist()
end
defp ensure_keywords(changeset) do
case changeset do
%{changes: %{keywords: _keywords}} ->
changeset
%{data: %{keywords: keywords}} when keywords != nil ->
changeset
_ ->
put_change(changeset, :keywords, [])
end
end
@doc """
Validate item statistics
"""
@spec validate_stats(Ecto.Changeset.t()) :: Ecto.Changeset.t()
def validate_stats(changeset) do
case get_change(changeset, :stats) do
nil ->
changeset
stats ->
_validate_stats(changeset, stats)
end
end
defp _validate_stats(changeset, stats) do
type = get_field(changeset, :type)
case Stats.valid?(type, stats) do
true ->
changeset
false ->
add_error(changeset, :stats, "are invalid")
end
end
@doc """
Validate effects are for the proper item type
"""
@spec validate_effects(Ecto.Changeset.t()) :: Ecto.Changeset.t()
def validate_effects(changeset) do
case get_change(changeset, :effects) do
nil ->
changeset
effects ->
_validate_effects(changeset, effects)
end
end
defp _validate_effects(changeset, effects) do
type = get_field(changeset, :type)
case effects |> Enum.all?(&(&1.kind in @valid_effects[type])) do
true ->
changeset
false ->
add_error(changeset, :effects, "can only include damage or stats effects")
end
end
defp validate_whitelist(changeset) do
case get_field(changeset, :whitelist_effects) do
nil ->
changeset
whitelist_effects ->
_validate_whitelist(changeset, whitelist_effects)
end
end
defp _validate_whitelist(changeset, whitelist_effects) do
case Enum.all?(whitelist_effects, &(&1 in Effect.types())) do
true ->
changeset
false ->
add_error(changeset, :whitelist_effects, "must all be a real type")
end
end
end
|
lib/data/item.ex
| 0.768516
| 0.415254
|
item.ex
|
starcoder
|
defmodule Meilisearch.Search do
@moduledoc """
Collection of functions used to search for documents matching given query.
[MeiliSearch Documentation - Search](https://docs.meilisearch.com/references/search.html)
"""
alias Meilisearch.HTTP
@doc """
Search for documents matching a specific query in the given index.
([ref.](https://docs.meilisearch.com/reference/api/search.html#search-in-an-index-with-post-route))
A `search_query` value of `nil` will send a placeholder query.
## Options
* `offset` Number of documents to skip. Defaults to `0`
* `limit` Maximum number of documents returned. Defaults to `20`
* `filter` Filter queries by an attribute value. Defaults to `nil`
* `facetsDistribution` Facets for which to retrieve the matching count. Defaults to `nil`
* `attributesToRetrieve` Attributes to display in the returned documents. Defaults to `["*"]`
* `attributesToCrop` Attributes whose values have to be cropped. Defaults to `nil`
* `cropLength` Length used to crop field values. Defaults to `200`
* `attributesToHighlight` Attributes whose values will contain highlighted matching terms. Defaults to `nil`
* `matches` Defines whether an object that contains information about the matches should be returned or not. Defaults to `false`
* `sort` Sort search results according to the attributes and sorting order (asc or desc) specified. Defaults to `nil`
## Examples
iex> Meilisearch.Search.search("movies", "where art thou")
{:ok, %{
"hits" => [
%{
"id" => 2,
"tagline" => "They have a plan but not a clue",
"title" => "O' Brother Where Art Thou"
}
],
"offset" => 0,
"limit" => 20,
"nbHits" => 1,
"exhaustiveNbHits" => false,
"processingTimeMs" => 17,
"query" => "where art thou"
}}
iex> Meilisearch.Search.search("movies", nil, filter: "id = 2")
{:ok, %{
"hits" => [
%{
"id" => 2,
"tagline" => "They have a plan but not a clue",
"title" => "O' Brother Where Art Thou"
}
],
"offset" => 0,
"limit" => 20,
"nbHits" => 1,
"exhaustiveNbHits" => false,
"processingTimeMs" => 17,
"query" => "where art thou"
}}
iex> Meilisearch.Search.search("movies", "nothing will match")
{:ok, %{
"hits" => [],
"offset" => 0,
"limit" => 20,
"nbHits" => 0,
"exhaustiveNbHits" => false,
"processingTimeMs" => 27,
"query" => "nothing will match"
}}
"""
@spec search(String.t(), String.t() | nil, Keyword.t()) :: HTTP.response()
def search(uid, search_query, opts \\ []) do
params =
case search_query do
nil -> opts
q -> [{:q, q} | opts]
end
HTTP.post_request("indexes/#{uid}/search", Enum.into(params, %{}))
end
end
|
lib/meilisearch/search.ex
| 0.844072
| 0.684771
|
search.ex
|
starcoder
|
defmodule DataConverter do
@accident_statuses ["Accident", "Collision", "Spun off"]
@mecahnical_problem_statuses [
"Clutch", "Electrical", "Engine", "Gearbox",
"Hydraulics", "Transmission", "Suspension", "Brakes",
"Mechanical", "Tyre", "Puncture", "Wheel",
"Heat shield fire", "Oil leak", "Water leak",
"Wheel nut", "Rear wing", "Engine misfire",
"Vibrations", "Alternator", "Collision damage",
"Pneumatics", "Fuel system", "Technical", "Oil pressure",
"Drivetrain", "Turbo", "ERS", "Power Unit",
"Water pressure", "Fuel pressure", "Throttle",
"Steering", "Electronics", "Exhaust",
"Retired", "Withdrew", "Power loss"
]
def convert_data(race_results, laps, pit_stops, drivers) do
%{
lap_count: get_lap_count(race_results),
laps: get_laps(race_results, drivers, laps, pit_stops),
lapped: [],
safety: []
}
end
def get_laps(race_results, _drivers, laps, pit_stops) do
initialize_laps(race_results)
|> set_laps_placing(race_results, laps)
|> set_laps_pit_stops(pit_stops)
end
def initialize_laps(race_results) do
# Ergast marks the starting position of the drivers that did not participate in the race with a 0.
driver_count = length(race_results)
{ret, _} = Enum.map_reduce race_results, 0, fn(e, count_of_drivers_with_position_0_on_grid) ->
{placing, count_of_drivers_with_position_0_on_grid} = case parse_int(e["grid"]) do
0 ->
pos = driver_count - count_of_drivers_with_position_0_on_grid
{pos, count_of_drivers_with_position_0_on_grid + 1}
pos ->
{pos, count_of_drivers_with_position_0_on_grid}
end
accident = case Enum.find_index(@accident_statuses, &(&1 == e["status"])) do
nil -> nil
_ -> [parse_int(e["laps"])]
end
mechanical = case Enum.find_index(@mecahnical_problem_statuses, &(&1 == e["status"])) do
nil -> nil
_ -> [parse_int(e["laps"])]
end
disqualified = case e["status"] do
"Disqualified" -> [parse_int(e["laps"])]
_ -> nil
end
{%{
driver: e["Driver"],
placing: [placing],
pitstops: [],
mechanical: mechanical,
accident: accident,
disqualified: disqualified
},
count_of_drivers_with_position_0_on_grid}
end
ret
end
def set_laps_placing(laps, race_results, ergast_laps) do
laps_done_by_winner = get_lap_count(race_results)
Enum.reduce ergast_laps, laps, fn(ergast_lap, laps)->
case parse_int(ergast_lap["number"]) <= laps_done_by_winner do
false -> laps
true -> laps |> update_laps_with_timing_info(ergast_lap)
end
end
end
def update_laps_with_timing_info(laps, ergast_lap_info) do
Enum.reduce ergast_lap_info["Timings"], laps, fn(ergast_lap_info, laps)->
{front, back} = Enum.split_while laps, fn(x) -> x.driver["driverId"] != ergast_lap_info["driverId"] end
[lap_to_manipulate | rest] = back
lap_to_manipulate = put_in(lap_to_manipulate, [:placing], lap_to_manipulate.placing ++ [parse_int(ergast_lap_info["position"])])
List.flatten([front, lap_to_manipulate, rest])
end
end
def set_laps_pit_stops(laps, pit_stops) do
Enum.reduce pit_stops, laps, fn(pit_stop_info, laps)->
{front, back} = Enum.split_while laps, fn(x) -> x.driver["driverId"] != pit_stop_info["driverId"] end
[lap_to_manipulate | rest] = back
lap_to_manipulate = put_in(lap_to_manipulate, [:pitstops], lap_to_manipulate.pitstops ++ [parse_int(pit_stop_info["lap"])])
List.flatten([front, lap_to_manipulate, rest])
end
end
def get_lap_count(race_results) do
parse_int(hd(race_results)["laps"])
end
# utils
defp parse_int(str) when is_binary(str) do
{ret, _} = Integer.parse(str)
ret
end
defp parse_int(str) when is_integer(str) do
str
end
end
|
lib/data_converter.ex
| 0.544559
| 0.48182
|
data_converter.ex
|
starcoder
|
defmodule ExMicrosoftAzureStorage.Storage.Utilities do
@moduledoc """
Utilities
"""
@doc """
Adds a value to a list, which is a value in a dictionary.
## Examples
iex> %{foo: nil} |> ExMicrosoftAzureStorage.Storage.Utilities.add_to(:foo, :a)
%{foo: [:a]}
iex> %{foo: [:a]} |> ExMicrosoftAzureStorage.Storage.Utilities.add_to(:foo, :b)
%{foo: [:b, :a]}
iex> %{foo: [:a]} |> ExMicrosoftAzureStorage.Storage.Utilities.add_to(:foo, :b) |> ExMicrosoftAzureStorage.Storage.Utilities.add_to(:foo, :c)
%{foo: [:c, :b, :a]}
"""
def add_to(v = %{}, key, value) when is_atom(key) and is_atom(value),
do:
v
|> Map.update(
key,
value,
&case &1 do
nil -> [value]
a -> [value | a] |> Enum.uniq()
end
)
@doc """
Converts a list of atoms to a representative string, based on a mapping table.
## Examples
iex> [:read, :write] |> ExMicrosoftAzureStorage.Storage.Utilities.set_to_string(%{read: "r", write: "w"})
"rw"
iex> [:read, :write, :update] |> ExMicrosoftAzureStorage.Storage.Utilities.set_to_string(%{read: "r", write: "w", create: "c"})
"rw"
"""
def set_to_string(set, mapping) when is_list(set) and is_map(mapping),
do:
set
|> Enum.uniq()
|> Enum.map(&Map.get(mapping, &1))
|> Enum.filter(&(&1 != nil))
|> Enum.join("")
@doc """
Reverses a map
## Examples
iex> %{read: "r", write: "w"} |> ExMicrosoftAzureStorage.Storage.Utilities.reverse_map()
%{"r" => :read, "w" => :write}
iex> %{"r" => :read, "w" => :write} |> ExMicrosoftAzureStorage.Storage.Utilities.reverse_map()
%{write: "w", read: "r"}
iex> %{"r" => :read, "w" => :write} |> ExMicrosoftAzureStorage.Storage.Utilities.reverse_map()
%{read: "r", write: "w"}
"""
def reverse_map(mapping),
do: mapping |> Enum.to_list() |> Enum.map(fn {k, v} -> {v, k} end) |> Map.new()
@doc """
Converts a string with shortcuts back into a list of atoms.
## Examples
iex> "rw" |> ExMicrosoftAzureStorage.Storage.Utilities.string_to_set(%{read: "r", write: "w", create: "c"})
[:read, :write]
"""
def string_to_set(string, mapping) when is_binary(string) and is_map(mapping) do
reverse_mapping = mapping |> reverse_map()
string
|> String.graphemes()
|> Enum.uniq()
|> Enum.map(&Map.get(reverse_mapping, &1))
|> Enum.filter(&(&1 != nil))
|> Enum.to_list()
end
@doc """
Converts a string literal "true" or "false" into appropriate boolean.
All other values return `false`.
"""
def to_bool("true"), do: true
def to_bool("false"), do: false
def to_bool(_), do: false
end
|
lib/storage/utilities.ex
| 0.863161
| 0.501953
|
utilities.ex
|
starcoder
|
defmodule UrbitEx.API.DM do
alias UrbitEx.{API, Actions, Utils, Resource, GraphStore}
alias UrbitEx.API.{Graph}
@moduledoc """
Client API to interact with `dm-hook`, new implementation of direct messaging
Fetch, send, accept and decline DMs.
"""
@doc """
Fetch new DMs. Takes a Session struct, a target @p, and an optional options keyword list.
For details check the `api/gall/graph.ex` module, DMs use the same endpoints.
"""
def fetch_newest(session, target, opts \\ []) do
{resource, target_ud} = set_data(session, target)
Graph.fetch_siblings(session, resource, target_ud, :newest, opts)
end
def fetch_oldest(session, target, opts \\ []) do
{resource, target_ud} = set_data(session, target)
Graph.fetch_siblings(session, resource, target_ud, :oldest, opts)
end
def fetch_younger_than(session, target, index, opts \\ []) do
{resource, target_ud} = set_data(session, target)
Graph.fetch_siblings(session, resource, target_ud<>index, :older, opts)
end
def fetch_older_than(session, target, index, opts \\ []) do
{resource, target_ud} = set_data(session, target)
Graph.fetch_siblings(session, resource, target_ud<>index, :younger, opts)
end
def fetch_subset(session, target, start_index, end_index, opts \\ []) do
{resource, target_ud} = set_data(session, target)
Graph.fetch_subset(session, resource, target_ud<>start_index, target_ud<>end_index, opts)
end
def fetch_children(session, target, parent_index, start_index \\ "/", end_index \\ "/", opts \\ []) do
{resource, target_ud} = set_data(session, target)
Graph.fetch_children(session, resource, target_ud<>parent_index, target_ud<>start_index, target_ud<>end_index, opts)
end
def fetch_node(session, target, index, opts \\ []) do
{resource, target_ud} = set_data(session, target)
Graph.fetch_node(session, resource, target_ud<>index, opts)
end
defp set_data(session, target) do
p = Utils.add_tilde(target)
target_ud = API.evaluate(session, "`@ud`#{p}") |> String.replace(".", "")
resource = Resource.new(session.ship, "dm-inbox")
{resource, "/#{target_ud}"}
end
@doc """
Starts a Direct Message with a ship. Takes an UrbitEx.Session struct and an Urbit @p to invite.
"""
def send(session, channel, target, text, custom \\ nil) do
target_num =
API.evaluate(session, "`@ud`#{Utils.add_tilde(target)}") |> String.replace(".", "")
json = GraphStore.send_dm(session.ship, target_num, text, custom)
body = Actions.poke(session.ship, "dm-hook", "graph-update-3", json)
API.wrap_put(session, channel, [body])
end
@doc """
Accepts a Direct Message with a ship. Takes an UrbitEx.Session struct and an Urbit @p to invite.
"""
def accept(session, channel, target) do
json = %{accept: Utils.add_tilde(target)}
body = Actions.poke(session.ship, "dm-hook", "dm-hook-action", json)
API.wrap_put(session, channel, [body])
end
@doc """
Declines a Direct Message with a ship. Takes an UrbitEx.Session struct and an Urbit @p to invite.
"""
def decline(session, channel, target) do
json = %{decline: Utils.add_tilde(target)}
body = Actions.poke(session.ship, "dm-hook", "dm-hook-action", json)
API.wrap_put(session, channel, [body])
end
end
|
lib/api/gall/dm.ex
| 0.707101
| 0.407805
|
dm.ex
|
starcoder
|
defmodule PEnum do
@moduledoc """
Parallel `Enum`. This library provides a set of functions similar to the
ones in the [Enum](https://hexdocs.pm/elixir/Enum.html) module except that
the function is executed on each element parallel.
The behavior of each of the functions should be the same as the `Enum` varieties,
except that order of execution is not guaranteed.
Except where otherwise noted, the function names are identical to the ones in
`Enum` but with a `p` in front. For example, `PEnum.pmap` is a parallel version of
`Enum.map`.
"""
@type element :: any
@spec pchunk_by(Enumerable.t(), (element -> any)) :: [list]
def pchunk_by(enumerable, fun) do
enumerable
|> pmap(fn n -> {fun.(n), n} end)
|> Enum.chunk_by(fn {f, _} -> f end)
|> Enum.map(fn chunk ->
Enum.map(chunk, fn {_, v} -> v end)
end)
end
@spec pcount(Enumerable.t(), (element -> as_boolean(term))) :: non_neg_integer
def pcount(enumerable, fun) do
enumerable
|> acc_func(fun, &Enum.count/2)
end
@spec pdedup_by(Enumerable.t(), (element -> term)) :: list
def pdedup_by(enumerable, fun) do
enumerable
|> by_func(fun, &Enum.dedup_by/2)
end
@spec peach(Enumerable.t(), (element -> any)) :: :ok
def peach(enumerable, fun) do
enumerable
|> Enum.map(fn n -> Task.async(fn -> fun.(n) end) end)
|> Enum.each(fn t -> Task.await(t, :infinity) end)
:ok
end
@spec pflat_map(Enumerable.t(), (element -> Enumerable.t())) :: list
def pflat_map(enumerable, fun) do
enumerable
|> acc_func(fun, &Enum.flat_map/2)
end
@spec pfilter(Enumerable.t(), (element -> as_boolean(term))) :: list
def pfilter(enumerable, fun) do
enumerable
|> by_func(fun, &Enum.filter/2)
end
@spec pgroup_by(Enumerable.t(), (element -> any), (element -> any)) :: map
def pgroup_by(enumerable, key_fun, value_fun \\ fn x -> x end)
def pgroup_by(enumerable, key_fun, value_fun) do
enumerable
|> pmap(fn n -> {key_fun.(n), n} end)
|> Enum.group_by(fn {f, _} -> f end, fn {_, v} -> value_fun.(v) end)
end
@spec group_byp(Enumerable.t(), (element -> any), (element -> any)) :: map
def group_byp(enumerable, key_fun, value_fun \\ fn x -> x end)
def group_byp(enumerable, key_fun, value_fun) do
enumerable
|> Enum.group_by(key_fun)
|> pinto(%{}, fn {k, group} ->
{k, pmap(group, value_fun)}
end)
end
@spec pgroup_byp(Enumerable.t(), (element -> any), (element -> any)) :: map
def pgroup_byp(enumerable, key_fun, value_fun \\ fn x -> x end)
def pgroup_byp(enumerable, key_fun, value_fun) do
enumerable
|> pmap(fn n -> {key_fun.(n), n} end)
|> pmap(fn {f, v} -> {f, value_fun.(v)} end)
|> Enum.group_by(fn {f, _} -> f end, fn {_, v} -> v end)
end
@spec pinto(Enumerable.t(), Collectable.t(), (term -> term)) :: Collectable.t()
def pinto(enumerable, collectable, transform) do
enumerable
|> pmap(transform)
|> Enum.into(collectable)
end
@spec pmap(Enumerable.t(), (element -> any)) :: list
def pmap(enumerable, fun) do
enumerable
|> Enum.map(fn n -> Task.async(fn -> fun.(n) end) end)
|> Enum.map(fn t -> Task.await(t, :infinity) end)
end
@spec pmap_every(Enumerable.t(), non_neg_integer, (element -> any)) :: list
def pmap_every(enumerable, nth, fun) do
enumerable
|> Enum.map_every(nth, fn n -> Task.async(fn -> fun.(n) end) end)
|> Enum.map_every(nth, fn t -> Task.await(t, :infinity) end)
end
@spec pmap_join(Enumerable.t(), String.t(), (element -> String.Chars.t())) :: String.t()
def pmap_join(enumerable, joiner \\ "", mapper) do
enumerable
|> pmap(mapper)
|> Enum.join(joiner)
end
@spec pmax_by(Enumerable.t(), (element -> any), (() -> empty_result)) ::
element | empty_result | no_return
when empty_result: any
def pmax_by(enumerable, fun, empty_fallback \\ fn -> raise Enum.EmptyError end) do
enumerable
|> num_by_func(fun, empty_fallback, &Enum.max_by/3)
end
@spec pmin_by(Enumerable.t(), (element -> any), (() -> empty_result)) ::
element | empty_result | no_return
when empty_result: any
def pmin_by(enumerable, fun, empty_fallback \\ fn -> raise Enum.EmptyError end) do
enumerable
|> num_by_func(fun, empty_fallback, &Enum.min_by/3)
end
@spec pmin_max_by(Enumerable.t(), (element -> any), (() -> empty_result)) ::
{element, element} | empty_result | no_return
when empty_result: any
def pmin_max_by(enumerable, fun, empty_fallback \\ fn -> raise Enum.EmptyError end) do
enumerable
|> pmap(fn n -> {fun.(n), n} end)
|> Enum.min_max_by(fn {f, _} -> f end, fn -> :empty end)
|> case do
:empty -> empty_fallback.()
{{_, nv}, {_, xv}} -> {nv, xv}
end
end
@spec preject(Enumerable.t(), (element -> as_boolean(term))) :: list
def preject(enumerable, fun) do
enumerable
|> by_func(fun, &Enum.reject/2)
end
@spec puniq_by(Enumerable.t(), (element -> term)) :: list
def puniq_by(enumerable, fun) do
enumerable
|> by_func(fun, &Enum.uniq_by/2)
end
defp by_func(enumerable, fun, wrap_fun) do
enumerable
|> pmap(fn n -> {fun.(n), n} end)
|> wrap_fun.(fn {f, _} -> f end)
|> Enum.map(fn {_, v} -> v end)
end
defp acc_func(enumerable, fun, acc_fun) do
enumerable
|> pmap(fun)
|> acc_fun.(& &1)
end
defp num_by_func(enumerable, fun, empty_fallback, num_by_fun) do
enumerable
|> pmap(fn n -> {fun.(n), n} end)
|> num_by_fun.(fn {f, _} -> f end, fn -> :empty end)
|> case do
:empty -> empty_fallback.()
{_, v} -> v
end
end
end
|
lib/p_enum.ex
| 0.889876
| 0.655818
|
p_enum.ex
|
starcoder
|
defmodule Cloudinary.Transformation.Effect do
@moduledoc """
The effect parameter of transformations.
To apply an effect without options, pass an `t:atom/0` as a transformation's effect parameter. To
apply with options, use a `t:tuple/0` instead. For details of each options, see the corresponding
documentation for that type.
"""
import Cloudinary.Transformation.Color
@typedoc """
A number greater than or equal to 0.
"""
@type non_neg_number :: non_neg_integer | float
@typedoc """
Any type of an effect.
"""
@type t ::
accelerate
| adv_redeye
| anti_removal
| art
| assist_colorblind
| auto_brightness
| auto_color
| auto_contrast
| background_removal
| blackwhite
| blue
| blur
| blur_faces
| blur_region
| boomerang
| brightness
| brightness_hsb
| cartoonify
| colorize
| contrast
| cut_out
| deshake
| displace
| distort
| fade
| fill_light
| gamma
| gradient_fade
| grayscale
| green
| hue
| improve
| loop
| make_transparent
| multiply
| negate
| noise
| oil_paint
| opacity_threshold
| ordered_dither
| outline
| overlay
| pixelate
| pixelate_faces
| pixelate_region
| preview
| progressbar
| recolor
| red
| redeye
| replace_color
| reverse
| saturation
| screen
| sepia
| shadow
| sharpen
| shear
| simulate_colorblind
| style_transfer
| tint
| transition
| trim
| unsharp_mask
| vectorize
| vibrance
| viesus_correct
| vignette
| volume
@doc """
Converts an atom or a tuple representing an effect parameter to a URL string.
"""
@spec to_url_string(t) :: String.t()
@typedoc """
The video speeding up effect.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string(:accelerate)
"accelerates"
iex> #{__MODULE__}.to_url_string({:accelerate, 100})
"accelerate:100"
"""
@type accelerate :: :accelerate | {:accelerate, -50..100 | float}
def to_url_string(:accelerate), do: "accelerate"
def to_url_string({:accelerate, acceleration})
when acceleration <= 100 and acceleration >= -50 do
"accelerate:#{acceleration}"
end
@typedoc """
The removing red eyes filter.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
* https://cloudinary.com/documentation/advanced_facial_attributes_detection_addon
## Example
iex> #{__MODULE__}.to_url_string(:adv_redeye)
"adv_redeye"
"""
@type adv_redeye :: :adv_redeye
def to_url_string(:adv_redeye), do: "adv_redeye"
@typedoc """
The overlay anti removal level.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:anti_removal)
"anti_removal"
iex> #{__MODULE__}.to_url_string({:anti_removal, 90})
"anti_removal:90"
"""
@type anti_removal :: :anti_removal | {:anti_removal, 1..100 | float}
def to_url_string(:anti_removal), do: "anti_removal"
def to_url_string({:anti_removal, level}) when level <= 100 and level >= 1 do
"anti_removal:#{level}"
end
@typedoc """
The artistic filter.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string({:art, :frost})
"art:frost"
"""
@type art ::
{:art,
:al_dente
| :athena
| :audrey
| :aurora
| :daguerre
| :eucalyptus
| :fes
| :frost
| :hairspray
| :hokusai
| :incognito
| :linen
| :peacock
| :primavera
| :quartz
| :red_rock
| :refresh
| :sizzle
| :sonnet
| :ukulele
| :zorro}
def to_url_string({:art, filter})
when filter in [
:al_dente,
:athena,
:audrey,
:aurora,
:daguerre,
:eucalyptus,
:fes,
:frost,
:hairspray,
:hokusai,
:incognito,
:linen,
:peacock,
:primavera,
:quartz,
:red_rock,
:refresh,
:sizzle,
:sonnet,
:ukulele,
:zorro
] do
"art:#{filter}"
end
@typedoc """
The color blind friendly filter.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:assist_colorblind)
"assist_colorblind"
iex> #{__MODULE__}.to_url_string({:assist_colorblind, 8})
"assist_colorblind:8"
iex> #{__MODULE__}.to_url_string({:assist_colorblind, :xray})
"assist_colorblind:xray"
"""
@type assist_colorblind :: :assist_colorblind | {:assist_colorblind, 1..100 | float | :xray}
def to_url_string(:assist_colorblind), do: "assist_colorblind"
def to_url_string({:assist_colorblind, strength})
when (strength <= 100 and strength >= 1) or strength == :xray do
"assist_colorblind:#{strength}"
end
@typedoc """
The automatic brightness adjustment.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:auto_brightness)
"auto_brightness"
iex> #{__MODULE__}.to_url_string({:auto_brightness, 70})
"auto_brightness:70"
"""
@type auto_brightness :: :auto_brightness | {:auto_brightness, 0..100 | float}
def to_url_string(:auto_brightness), do: "auto_brightness"
def to_url_string({:auto_brightness, amount}) when amount <= 100 and amount >= 0 do
"auto_brightness:#{amount}"
end
@typedoc """
The automatic color balance adjustment.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:auto_color)
"auto_color"
iex> #{__MODULE__}.to_url_string({:auto_color, 90})
"auto_color:90"
"""
@type auto_color :: :auto_color | {:auto_color, 0..100 | float}
def to_url_string(:auto_color), do: "auto_color"
def to_url_string({:auto_color, amount}) when amount <= 100 and amount >= 0 do
"auto_color:#{amount}"
end
@typedoc """
The automatic contrast adjustment.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:auto_contrast)
"auto_contrast"
iex> #{__MODULE__}.to_url_string({:auto_contrast, 90})
"auto_contrast:90"
"""
@type auto_contrast :: :auto_contrast | {:auto_contrast, 0..100 | float}
def to_url_string(:auto_contrast), do: "auto_contrast"
def to_url_string({:auto_contrast, amount}) when amount <= 100 and amount >= 0 do
"auto_contrast:#{amount}"
end
@typedoc """
The background removing filter.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:background_removal)
"bgremoval"
iex> #{__MODULE__}.to_url_string({:background_removal, :screen})
"bgremoval:screen"
iex> #{__MODULE__}.to_url_string({:background_removal, '8AF02B'})
"bgremoval:8AF02B"
iex> #{__MODULE__}.to_url_string({:background_removal, {'8af02b', :screen}})
"bgremoval:screen:8af02b"
"""
@type background_removal ::
:background_removal
| {:background_removal,
Cloudinary.Transformation.Color.t()
| :screen
| {Cloudinary.Transformation.Color.t(), :screen}}
def to_url_string(:background_removal), do: "bgremoval"
def to_url_string({:background_removal, :screen}), do: "bgremoval:screen"
def to_url_string({:background_removal, clr}) when is_rgb(clr) or is_rgba(clr) do
"bgremoval:#{clr}"
end
def to_url_string({:background_removal, {clr, :screen}}) when is_rgb(clr) or is_rgba(clr) do
"bgremoval:screen:#{clr}"
end
@typedoc """
The black/white filter.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:blackwhite)
"blackwhite"
"""
@type blackwhite :: :blackwhite
def to_url_string(:blackwhite), do: "blackwhite"
@typedoc """
The blue channel adjustment.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:blue)
"blue"
iex> #{__MODULE__}.to_url_string({:blue, 90})
"blue:90"
"""
@type blue :: :blue | {:blue, -100..100 | float}
def to_url_string(:blue), do: "blue"
def to_url_string({:blue, blue}) when blue <= 100 and blue >= -100, do: "blue:#{blue}"
@typedoc """
The blur effect.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:blur)
"blur"
iex> #{__MODULE__}.to_url_string({:blur, 300})
"blur:300"
"""
@type blur :: :blur | {:blur, 1..2000 | float}
def to_url_string(:blur), do: "blur"
def to_url_string({:blur, strength}) when strength <= 2000 and strength >= 1 do
"blur:#{strength}"
end
@typedoc """
The blur effect on faces.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:blur_faces)
"blur_faces"
iex> #{__MODULE__}.to_url_string({:blur_faces, 600})
"blur_faces:600"
"""
@type blur_faces :: :blur_faces | {:blur_faces, 1..2000 | float}
def to_url_string(:blur_faces), do: "blur_faces"
def to_url_string({:blur_faces, strength}) when strength <= 2000 and strength >= 1 do
"blur_faces:#{strength}"
end
@typedoc """
The blur effect on a specified region.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:blur_region)
"blur_region"
iex> #{__MODULE__}.to_url_string({:blur_region, 200})
"blur_region:200"
"""
@type blur_region :: :blur_region | {:blur_region, 1..2000 | float}
def to_url_string(:blur_region), do: "blur_region"
def to_url_string({:blur_region, strength}) when strength <= 2000 and strength >= 1 do
"blur_region:#{strength}"
end
@typedoc """
The boomerang playing conversion.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
* https://cloudinary.com/documentation/video_manipulation_and_delivery#create_a_boomerang_video_clip
## Example
iex> #{__MODULE__}.to_url_string(:boomerang)
"boomerang"
"""
@type boomerang :: :boomerang
def to_url_string(:boomerang), do: "boomerang"
@typedoc """
The brightness adjustment.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string(:brightness)
"brightness"
iex> #{__MODULE__}.to_url_string({:brightness, 60})
"brightness:60"
"""
@type brightness :: :brightness | {:brightness, -99..100 | float}
def to_url_string(:brightness), do: "brightness"
def to_url_string({:brightness, adjustment}) when adjustment <= 100 and adjustment >= -99 do
"brightness:#{adjustment}"
end
@typedoc """
The brightness adjustment in HSB.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:brightness_hsb)
"brightness_hsb"
iex> #{__MODULE__}.to_url_string({:brightness_hsb, 50})
"brightness_hsb:50"
"""
@type brightness_hsb :: :brightness_hsb | {:brightness_hsb, -99..100 | float}
def to_url_string(:brightness_hsb), do: "brightness_hsb"
def to_url_string({:brightness_hsb, adjustment}) when adjustment <= 100 and adjustment >= -99 do
"brightness_hsb:#{adjustment}"
end
@typedoc """
The cartoon effect.
Options:
* `:line_strength` - the thickness of lines.
* `:color_reduction` - the decrease level in the number of colors or `:blackwhite`.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:cartoonify)
"cartoonify"
iex> #{__MODULE__}.to_url_string({:cartoonify, line_strength: 40})
"cartoonify:40"
iex> #{__MODULE__}.to_url_string({:cartoonify, color_reduction: 55})
"cartoonify:50:55"
iex> #{__MODULE__}.to_url_string({:cartoonify, color_reduction: :blackwhite})
"cartoonify:50:bw"
iex> #{__MODULE__}.to_url_string({:cartoonify, line_strength: 20, color_reduction: 60})
"cartoonify:20:60"
iex> #{__MODULE__}.to_url_string({:cartoonify, line_strength: 30, color_reduction: :blackwhite})
"cartoonify:30:bw"
"""
@type cartoonify :: :cartoonify | {:cartoonify, keyword | map}
def to_url_string(:cartoonify), do: "cartoonify"
def to_url_string({:cartoonify, options}) when is_list(options) do
to_url_string({:cartoonify, Enum.into(options, %{})})
end
def to_url_string({:cartoonify, options}) when is_map(options) do
__MODULE__.Cartoonify.to_url_string(options)
end
@typedoc """
The colorize effect.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:colorize)
"colorize"
iex> #{__MODULE__}.to_url_string({:colorize, 80})
"colorize:80"
"""
@type colorize :: :colorize | {:colorize, 0..100 | float}
def to_url_string(:colorize), do: "colorize"
def to_url_string({:colorize, strength}) when strength <= 100 and strength >= 0 do
"colorize:#{strength}"
end
@typedoc """
The contrast adjustment.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string(:contrast)
"contrast"
iex> #{__MODULE__}.to_url_string({:contrast 90})
"contrast:90"
"""
@type contrast :: :contrast | {:contrast, -100..100 | float}
def to_url_string(:contrast), do: "contrast"
def to_url_string({:contrast, adjustment}) when adjustment <= 100 and adjustment >= -100 do
"contrast:#{adjustment}"
end
@typedoc """
The trimming transparent pixels of the overlay image.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:cut_out)
"cut_out"
"""
@type cut_out :: :cut_out
def to_url_string(:cut_out), do: "cut_out"
@typedoc """
The removing small motion shifts.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string(:deshake)
"deshake"
iex> #{__MODULE__}.to_url_string({:deshake, 32})
"deshake:32"
"""
@type deshake :: :deshake | {:deshake, 16 | 32 | 48 | 64}
def to_url_string(:deshake), do: "deshake"
def to_url_string({:deshake, extent}) when extent in [16, 32, 48, 64], do: "deshake:#{extent}"
@typedoc """
The displacing by color channels.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:displace)
"displace"
"""
@type displace :: :displace
def to_url_string(:displace), do: "displace"
@typedoc """
The distortion effect.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string({:distort, {{5, 34}, {70, 10}, {70, 75}, {5, 55}}})
"distort:fc00:db20:35b:7399::5"
iex> #{__MODULE__}.to_url_string({:distort, {:arc, 180}})
"distort:arc:180"
"""
@type distort ::
{:distort,
{:arc, number}
| {{number, number}, {number, number}, {number, number}, {number, number}}}
def to_url_string({:distort, {:arc, angle}}) when is_number(angle), do: "distort:arc:#{angle}"
def to_url_string({:distort, {{x1, y1}, {x2, y2}, {x3, y3}, {x4, y4}}})
when is_number(x1) and is_number(y1) and is_number(x2) and is_number(y2) and
is_number(x3) and is_number(y3) and is_number(x4) and is_number(y4) do
"distort:#{x1}:#{y1}:#{x2}:#{y2}:#{x3}:#{y3}:#{x4}:#{y4}"
end
@typedoc """
The fading in/out.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string(:fade)
"fade"
iex> #{__MODULE__}.to_url_string({:fade, 2000})
"fade:2000"
"""
@type fade :: :fade | {:fade, non_neg_number}
def to_url_string(:fade), do: "fade"
def to_url_string({:fade, fade}) when is_number(fade) and fade >= 0, do: "fade:#{fade}"
@typedoc """
The fill light adjustment.
Options:
* `:amount` - a `t:number/0` between 0 to 100.
* `:bias` - a `t:number/0` between -100 to 100.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:fill_light)
"fill_light"
iex> #{__MODULE__}.to_url_string({:fill_light, amount: 40})
"fill_light:40"
iex> #{__MODULE__}.to_url_string({:fill_light, amount: 70, bias: 20})
"fill_light:70:20"
"""
@type fill_light :: :fill_light | {:fill_light, keyword | map}
def to_url_string(:fill_light), do: "fill_light"
def to_url_string({:fill_light, options}) when is_list(options) do
to_url_string({:fill_light, Enum.into(options, %{})})
end
def to_url_string({:fill_light, options}) when is_map(options) do
__MODULE__.FillLight.to_url_string(options)
end
@typedoc """
The gamma level adjustment.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string(:gamma)
"gamma"
iex> #{__MODULE__}.to_url_string({:gamma, 50})
"gamma:50"
"""
@type gamma :: :gamma | {:gamma, -50..150 | float}
def to_url_string(:gamma), do: "gamma"
def to_url_string({:gamma, level}) when level <= 150 and level >= -50, do: "gamma:#{level}"
@typedoc """
The gradient fade effect.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:gradient_fade)
"gradient_fade"
iex> #{__MODULE__}.to_url_string({:gradient_fade, 40})
"gradient_fade:40"
iex> #{__MODULE__}.to_url_string({:gradient_fade, :symmetric})
"gradient_fade:symmetric"
iex> #{__MODULE__}.to_url_string({:gradient_fade, {25, :symmetric}})
"gradient_fade:symmetric:25"
iex> #{__MODULE__}.to_url_string({:gradient_fade, {50, :symmetric_pad}})
"gradient_fade:symmetric_pad:50"
"""
@type gradient_fade ::
:gradient_fade
| {:gradient_fade,
0..100
| float
| :symmetric
| :symmetric_pad
| {0..100 | float, :symmetric | :symmetric_pad}}
def to_url_string(:gradient_fade), do: "gradient_fade"
def to_url_string({:gradient_fade, strength_or_mode})
when strength_or_mode <= 100 and strength_or_mode >= 0
when strength_or_mode in [:symmetric, :symmetric_pad] do
"gradient_fade:#{strength_or_mode}"
end
def to_url_string({:gradient_fade, {strength, mode}})
when strength <= 100 and strength >= 0 and mode in [:symmetric, :symmetric_pad] do
"gradient_fade:#{mode}:#{strength}"
end
@typedoc """
The effect that generates a gray-scaled image.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:grayscale)
"grayscale"
"""
@type grayscale :: :grayscale
def to_url_string(:grayscale), do: "grayscale"
@typedoc """
The green channel adjustment.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:green)
"green"
iex> #{__MODULE__}.to_url_string({:green, -30})
"green:-30"
"""
@type green :: :green | {:green, -100..100 | float}
def to_url_string(:green), do: "green"
def to_url_string({:green, green}) when green <= 100 and green >= -100, do: "green:#{green}"
@typedoc """
The hue adjustment.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:hue)
"hue"
iex> #{__MODULE__}.to_url_string({:hue, 40})
"hue:40"
"""
@type hue :: :hue | {:hue, -100..100 | float}
def to_url_string(:hue), do: "hue"
def to_url_string({:hue, hue}) when hue <= 100 and hue >= -100, do: "hue:#{hue}"
@typedoc """
The automatic image improvement.
Options:
* `:mode` - `:outdoor` or `:indoor`.
* `:blend` - `t:number/0` representing improvement amounts between 0 to 100.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:improve)
"improve"
iex> #{__MODULE__}.to_url_string({:improve, blend: 40})
"improve:40"
iex> #{__MODULE__}.to_url_string({:improve, mode: :indoor})
"improve:indoor"
iex> #{__MODULE__}.to_url_string({:improve, mode: :indoor, blend: 50})
"improve:indoor:50"
"""
@type improve :: :improve | {:improve, keyword | map}
def to_url_string(:improve), do: "improve"
def to_url_string({:improve, options}) when is_list(options) do
to_url_string({:improve, Enum.into(options, %{})})
end
def to_url_string({:improve, options}) when is_map(options) do
__MODULE__.Improve.to_url_string(options)
end
@typedoc """
The loop playing times.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string({:loop, 2})
"loop:2"
"""
@type loop :: {:loop, non_neg_integer}
def to_url_string({:loop, loop}) when is_integer(loop) and loop >= 0, do: "loop:#{loop}"
@typedoc """
The making background transparent.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:make_transparent)
"make_transparent"
iex> #{__MODULE__}.to_url_string({:make_transparent, 40})
"make_transparent:40"
"""
@type make_transparent :: :make_transparent | {:make_transparent, 0..100 | float}
def to_url_string(:make_transparent), do: "make_transparent"
def to_url_string({:make_transparent, tolerance}) when tolerance <= 100 and tolerance >= 0 do
"make_transparent:#{tolerance}"
end
@typedoc """
The multiply blending with the overlay image.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:multiply)
"multiply"
"""
@type multiply :: :multiply
def to_url_string(:multiply), do: "multiply"
@typedoc """
The nagating color effect.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:negate)
"negate"
"""
@type negate :: :negate
def to_url_string(:negate), do: "negate"
@typedoc """
The noise effect.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string(:noise)
"noise"
iex> #{__MODULE__}.to_url_string({:noise, 10})
"noise:10"
"""
@type noise :: :noise | {:noise, 0..100 | float}
def to_url_string(:noise), do: "noise"
def to_url_string({:noise, level}) when level <= 100 and level >= 0, do: "noise:#{level}"
@typedoc """
The oil paint effect.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:oil_paint)
"oil_paint"
iex> #{__MODULE__}.to_url_string({:oil_paint, 40})
"oil_paint:40"
"""
@type oil_paint :: :oil_paint | {:oil_paint, 0..100 | float}
def to_url_string(:oil_paint), do: "oil_paint"
def to_url_string({:oil_paint, strength}) when strength <= 100 and strength >= 0 do
"oil_paint:#{strength}"
end
@typedoc """
The transparency level of the semi-transparent pixels.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(threshold: 40)
"opacity_threshold:40"
"""
@type opacity_threshold :: {:opacity_threshold, 1..100 | float}
def to_url_string(:opacity_threshold), do: "opacity_threshold"
def to_url_string({:opacity_threshold, level}) when level <= 100 and level >= 1 do
"opacity_threshold:#{level}"
end
@typedoc """
The dither effect.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:ordered_dither)
"ordered_dither"
iex> #{__MODULE__}.to_url_string({:ordered_dither, 3})
"ordered_dither:3"
"""
@type ordered_dither :: :ordered_dither | {:ordered_dither, 0..18}
def to_url_string(:ordered_dither), do: "ordered_dither"
def to_url_string({:ordered_dither, level}) when level in 0..18, do: "ordered_dither:#{level}"
@typedoc """
The outline effect.
Options:
* `:mode` - an `t:atom/0` representing how to apply the outline effect.
* `:width` - the thickness of the outline.
* `:blur` - the blur level of the outline.
## Official documentation
* https://cloudinary.com/documentation/image_transformations#outline_effects
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:outline)
"outline"
iex> #{__MODULE__}.to_url_string({:outline, width: 3})
"outline:8"
iex> #{__MODULE__}.to_url_string({:outline, width: 8, blur: 20})
"outline:8:20"
iex> #{__MODULE__}.to_url_string({:outline, mode: :inner})
"outline:inner"
iex> #{__MODULE__}.to_url_string({:outline, mode: :inner_fill, width: 7})
"outline:inner_fill:7"
iex> #{__MODULE__}.to_url_string({:outline, mode: :outer, width: 10, blur: 200})
"outline:outer:10:200"
"""
@type outline :: :outline | {:outline, keyword | map}
def to_url_string(:outline), do: "outline"
def to_url_string({:outline, options}) when is_list(options) do
to_url_string({:outline, Enum.into(options, %{})})
end
def to_url_string({:outline, options}) when is_map(options) do
__MODULE__.Outline.to_url_string(options)
end
@typedoc """
The overlay blending with the overlay image.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:overlay)
"overlay"
"""
@type overlay :: :overlay
def to_url_string(:overlay), do: "overlay"
@typedoc """
The pixelate effect.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:pixelate)
"pixelate"
iex> #{__MODULE__}.to_url_string({:pixelate, 3})
"pixelate:3"
"""
@type pixelate :: :pixelate | {:pixelate, 1..200 | float}
def to_url_string(:pixelate), do: "pixelate"
def to_url_string({:pixelate, size}) when size <= 200 and size >= 1 do
"pixelate:#{size}"
end
@typedoc """
The pixelate effect on faces.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:pixelate_faces)
"pixelate_faces"
iex> #{__MODULE__}.to_url_string({:pixelate_faces, 7})
"pixelate_faces:7"
"""
@type pixelate_faces :: :pixelate_faces | {:pixelate_faces, 1..200 | float}
def to_url_string(:pixelate_faces), do: "pixelate_faces"
def to_url_string({:pixelate_faces, size}) when size <= 200 and size >= 1 do
"pixelate_faces:#{size}"
end
@typedoc """
The pixelate effect on the specified region.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:pixelate_region)
"pixelate_region"
iex> #{__MODULE__}.to_url_string({:pixelate_region, 20})
"pixelate_region:20"
"""
@type pixelate_region :: :pixelate_region | {:pixelate_region, 1..200 | float}
def to_url_string(:pixelate_region), do: "pixelate_region"
def to_url_string({:pixelate_region, size}) when size <= 200 and size >= 1 do
"pixelate_region:#{size}"
end
@typedoc """
The generating a preview.
Options:
* `:duration` - the duration in seconds.
* `:max_segments` - the maximum number of segments.
* `:min_segment_duration` - the minimum duration for each segment in seconds.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string(:preview)
"preview"
iex> #{__MODULE__}.to_url_string({:preview, duration: 8})
"preview:duration_8"
iex> #{__MODULE__}.to_url_string({:preview, max_segments: 6})
"preview:max_seg_6"
iex> #{__MODULE__}.to_url_string({:preview, min_segment_duration: 2})
"preview:min_seg_dur_2"
iex> #{__MODULE__}.to_url_string({:preview, duration: 20, max_segments: 7})
"preview:duration_20:max_seg_7"
iex> #{__MODULE__}.to_url_string({:preview, duration: 12, min_segment_duration: 3})
"preview:duration_12:min_seg_dur_3"
iex> #{__MODULE__}.to_url_string({:preview, max_segments: 2, min_segment_duration: 2})
"preview:max_seg_2:min_seg_dur_2"
iex> #{__MODULE__}.to_url_string({:preview, duration: 12, max_segments: 3, min_segment_duration: 3})
"preview:duration_12:max_seg_3:min_seg_dur_3"
"""
@type preview :: :preview | {:preview, keyword | map}
def to_url_string(:preview), do: "preview"
def to_url_string({:preview, options}) when is_list(options) do
to_url_string({:preview, Enum.into(options, %{})})
end
def to_url_string({:preview, options}) when is_map(options) do
__MODULE__.Preview.to_url_string(options)
end
@typedoc """
The adding a progressbar.
Options:
* `:type` - `:frame` or `:bar`.
* `:color` - a `t:String.t/0` as a color name or an `t:integer/0` as a RGB hex triplet.
* `:width` - a positive number.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string(:progressbar)
"progressbar"
iex> #{__MODULE__}.to_url_string({:progressbar, type: :frame})
"progressbar:frame"
iex> #{__MODULE__}.to_url_string({:progressbar, color: 'E8F7D4'})
"progressbar:color_E8F7D4"
iex> #{__MODULE__}.to_url_string({:progressbar, width: 12})
"progressbar:width_12"
iex> #{__MODULE__}.to_url_string({:progressbar, type: :frame, color: "green"})
"progressbar:frame:green"
iex> #{__MODULE__}.to_url_string({:progressbar, type: :frame, width: 4})
"progressbar:type_frame:width_4"
iex> #{__MODULE__}.to_url_string({:progressbar, color: 'abaf40', width: 6})
"progressbar:color_abaf40:width_6"
iex> #{__MODULE__}.to_url_string({:progressbar, type: :frame, color: "blue", width: 8})
"progressbar:frame:blue:8"
"""
@type progressbar :: :progressbar | {:progressbar, keyword | map}
def to_url_string(:progressbar), do: "progressbar"
def to_url_string({:progressbar, options}) when is_list(options) do
to_url_string({:progressbar, Enum.into(options, %{})})
end
def to_url_string({:progressbar, options}) when is_map(options) do
__MODULE__.Progressbar.to_url_string(options)
end
@typedoc """
The replacing color with a rgb(a) matrix.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string({:recolor, {{0.3, 0.7, 0.1}, {0.3, 0.6, 0.1}, {0.2, 0.5, 0.1}}})
"recolor:0.3:0.7:0.1:0.3:0.6:0.1:0.2:0.5:0.1"
iex> #{__MODULE__}.to_url_string({:recolor, {{0.3, 0.7, 0.1, 0.4}, {0.3, 0.6, 0.1, 0.2}, {0.2, 0.5, 0.1, 0.6}, {0.8, 0.7, 0.4, 0.3}}})
"recolor:0.3:0.7:0.1:0.4:0.3:0.6:0.1:0.2:0.2:0.5:0.1:0.6:0.8:0.7:0.4:0.3"
"""
@type recolor ::
{:recolor,
{{float | 0 | 1, float | 0 | 1, float | 0 | 1},
{float | 0 | 1, float | 0 | 1, float | 0 | 1},
{float | 0 | 1, float | 0 | 1, float | 0 | 1}}
| {{float | 0 | 1, float | 0 | 1, float | 0 | 1, float | 0 | 1},
{float | 0 | 1, float | 0 | 1, float | 0 | 1, float | 0 | 1},
{float | 0 | 1, float | 0 | 1, float | 0 | 1, float | 0 | 1},
{float | 0 | 1, float | 0 | 1, float | 0 | 1, float | 0 | 1}}}
def to_url_string({:recolor, {{a, b, c}, {d, e, f}, {g, h, i}}})
when a <= 1 and a >= 0 and b <= 1 and b >= 0 and c <= 1 and c >= 0 and
d <= 1 and d >= 0 and e <= 1 and e >= 0 and f <= 1 and f >= 0 and
g <= 1 and g >= 0 and h <= 1 and h >= 0 and i <= 1 and i >= 0 do
"recolor:#{a}:#{b}:#{c}:#{d}:#{e}:#{f}:#{g}:#{h}:#{i}"
end
def to_url_string({:recolor, {{a, b, c, d}, {e, f, g, h}, {i, j, k, l}, {m, n, o, p}}})
when a <= 1 and a >= 0 and b <= 1 and b >= 0 and c <= 1 and c >= 0 and d <= 1 and d >= 0 and
e <= 1 and e >= 0 and f <= 1 and f >= 0 and g <= 1 and g >= 0 and h <= 1 and h >= 0 and
i <= 1 and i >= 0 and j <= 1 and j >= 0 and k <= 1 and k >= 0 and l <= 1 and l >= 0 and
m <= 1 and m >= 0 and n <= 1 and n >= 0 and o <= 1 and o >= 0 and p <= 1 and p >= 0 do
"recolor:#{a}:#{b}:#{c}:#{d}:#{e}:#{f}:#{g}:#{h}:#{i}:#{j}:#{k}:#{l}:#{m}:#{n}:#{o}:#{p}"
end
@typedoc """
The red channel adjustment.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string({:red, 50})
"red:50"
"""
@type red :: :red | {:red, -100..100 | float}
def to_url_string(:red), do: "red"
def to_url_string({:red, adjustment}) when adjustment <= 100 and adjustment >= -100 do
"red:#{adjustment}"
end
@typedoc """
The removing redeyes.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string()
"redeye"
"""
@type redeye :: :redeye
def to_url_string(:redeye), do: "redeye"
@typedoc """
The replacing color.
Options:
* `:to_color` - the target output color.
* `:tolerance` - the tolerance threshold from the input color.
* `:from_color` - the base input color.
## Official documentation
* https://cloudinary.com/documentation/image_transformations#color_effects
## Example
iex> #{__MODULE__}.to_url_string(to_color: 'af2b4c')
"replace_color:af2b4c"
iex> #{__MODULE__}.to_url_string(to_color: "saddlebrown", tolerance: 30)
"replace_color:saddlebrown:30"
iex> #{__MODULE__}.to_url_string(to_color: '2F4F4F', tolerance: 20)
"replace_color:2f4f4f:20"
iex> #{__MODULE__}.to_url_string(to_color: "silver", tolerance: 60, from_color: '89B8ED')
"replace_color:silver:60:89b8ed"
iex> #{__MODULE__}.to_url_string(to_color: "gray", tolerance: 60, from_color: "blue")
"replace_color:gray:60:blue"
"""
@type replace_color :: {:replace_color, keyword | map}
def to_url_string({:replace_color, options}) when is_list(options) do
to_url_string({:replace_color, Enum.into(options, %{})})
end
def to_url_string({:replace_color, options}) when is_map(options) do
__MODULE__.ReplaceColor.to_url_string(options)
end
@typedoc """
The generating a reverse playing video.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string(:reverse)
"reverse"
"""
@type reverse :: :reverse
def to_url_string(:reverse), do: "reverse"
@typedoc """
The color saturation adjustment.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string(:saturation)
"saturation"
iex> #{__MODULE__}.to_url_string({:saturation, 70})
"saturation:70"
"""
@type saturation :: :saturation | {:saturation, -100..100}
def to_url_string(:saturation), do: "saturation"
def to_url_string({:saturation, adjustment}) when adjustment <= 100 and adjustment >= -100 do
"saturation:#{adjustment}"
end
@typedoc """
The screen blending with the overlay image.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:screen)
"screen"
"""
@type screen :: :screen
def to_url_string(:screen), do: "screen"
@typedoc """
The sepia effect.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:sepia)
"sepia"
iex> #{__MODULE__}.to_url_string({:sepia, 50})
"sepia:50"
"""
@type sepia :: :sepia | {:sepia, 1..100 | float}
def to_url_string(:sepia), do: "sepia"
def to_url_string({:sepia, strength}) when strength <= 100 and strength >= 1 do
"sepia:#{strength}"
end
@typedoc """
The shadow effect.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
* https://cloudinary.com/documentation/image_transformations#image_shape_changes_and_distortion_effects
## Example
iex> #{__MODULE__}.to_url_string(:shadow)
"shadow"
iex> #{__MODULE__}.to_url_string({:shadow, 50})
"shadow:50"
"""
@type shadow :: :shadow | {:shadow, 0..100 | float}
def to_url_string(:shadow), do: "shadow"
def to_url_string({:shadow, strength}) when strength <= 100 and strength >= 0 do
"shadow:#{strength}"
end
@typedoc """
The sharpening filter.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:sharpen)
"sharpen"
iex> #{__MODULE__}.to_url_string({:sharpen, 400})
"sharpen:400"
"""
@type sharpen :: :sharpen | {:sharpen, 1..2000 | float}
def to_url_string(:sharpen), do: "sharpen"
def to_url_string({:sharpen, strength}) when strength <= 2000 and strength >= 1 do
"sharpen:#{strength}"
end
@typedoc """
The skewing effect.
Options:
* `:x` - skewing degrees on the x-axis.
* `:y` - skewing degrees on the y-axis.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(x: 20)
"shear:20:0"
"""
@type shear :: {:shear, keyword | map}
def to_url_string({:shear, options}) when is_list(options) do
to_url_string({:shear, Enum.into(options, %{})})
end
def to_url_string({:shear, options}) when is_map(options) do
__MODULE__.Shear.to_url_string(options)
end
@typedoc """
The simulating color blind condition.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:simulate_colorblind)
"simulate_colorblind"
iex> #{__MODULE__}.to_url_string({:simulate_colorblind, :tritanomaly})
"simulate_colorblind:tritanomaly"
"""
@type simulate_colorblind ::
:simulate_colorblind
| {:simulate_colorblind,
:deuteranopia
| :protanopia
| :tritanopia
| :tritanomaly
| :deuteranomaly
| :cone_monochromacy
| :rod_monochromacy}
def to_url_string(:simulate_colorblind), do: "simulate_colorblind"
def to_url_string({:simulate_colorblind, mode})
when mode in [
:deuteranopia,
:protanopia,
:tritanopia,
:tritanomaly,
:deuteranomaly,
:cone_monochromacy,
:rod_monochromacy
] do
"simulate_colorblind:#{mode}"
end
@typedoc """
The style transfer effect.
Options:
* `:preserve_color` - A `t:as_boolean/1` representing either retains the original colors or not.
* `:style_strength` - A `t:number/0` between 0 to 100.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
* https://cloudinary.com/documentation/neural_artwork_style_transfer_addon
## Example
iex> #{__MODULE__}.to_url_string(:style_transfer)
"style_transfer"
iex> #{__MODULE__}.to_url_string({:style_transfer, style_strength: 60})
"style_transfer:60"
iex> #{__MODULE__}.to_url_string({:style_transfer, preserve_color: true, style_strength: 40})
"style_transfer:preserve_color:40"
"""
@type style_transfer :: :style_transfer | {:style_transfer, keyword | map}
def to_url_string(:style_transfer), do: "style_transfer"
def to_url_string({:style_transfer, options}) when is_list(options) do
to_url_string({:style_transfer, Enum.into(options, %{})})
end
def to_url_string({:style_transfer, options}) when is_map(options) do
__MODULE__.StyleTransfer.to_url_string(options)
end
@typedoc """
The tint effect.
Options:
* `:amount` - A `t:number/0` between 0 and 100.
* `:color` - A `t:String.t/0` as a color name, an `t:charlist/0` as a RGB hex triplet, or a
`t:tuple/0` with a color and a `t:number/0` to adjust positions of the blend. It also can be a
`t:list/0` of colors or a `t:list/0` of color and position `t:tuple/0`s.
* `:equalize` - A `t:as_boolean/1` representing either equalize colors before tinting or not.
## Official documentation
* https://cloudinary.com/documentation/image_transformations#tint_effects
## Example
iex> #{__MODULE__}.to_url_string(:tint)
"tint"
iex> #{__MODULE__}.to_url_string({:tint, amount: 80})
"tint:80"
iex> #{__MODULE__}.to_url_string({:tint, amount: 80, color: ["blue", "green", '47DA8B']})
"tint:80:blue:green:rgb:47DA8B"
iex> #{__MODULE__}.to_url_string({:tint, amount: 50, color: [{'6F71EA', 40}, {"yellow", 35}]})
"tint:50:rgb:6F71EA:40p:yellow:35p"
iex> #{__MODULE__}.to_url_string({:tint, equalize: true, amount: 80, color: "green"})
"tint:equalize:80:green"
iex> #{__MODULE__}.to_url_string({:tint, equalize: false, amount: 80, color: {"green", 60})
"tint:equalize:80:green:60p"
"""
@type tint :: :tint | {:tint, keyword | map}
def to_url_string(:tint), do: "tint"
def to_url_string({:tint, options}) when is_list(options) do
to_url_string({:tint, Enum.into(options, %{})})
end
def to_url_string({:tint, options}) when is_map(options) do
__MODULE__.Tint.to_url_string(options)
end
@typedoc """
The transition effect.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string(:transition)
"transition"
"""
@type transition :: :transition
def to_url_string(:transition), do: "transition"
@typedoc """
The edge trimming effect.
Options:
* `:color_similarity` - A `t:number/0` between 0 and 100 as the tolerance level.
* `:color_override` - A `t:String.t/0` as a color name or an `t:charlist/0` as a RGB(A) hex
triplet.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:trim)
"trim"
iex> #{__MODULE__}.to_url_string({:trim, color_similarity: 30, color_override: "white"})
"trim:30:white"
iex> #{__MODULE__}.to_url_string({:trim, color_similarity: 30, color_override: 'E8D9AA'})
"trim:30:rgb:E8D9AA"
iex> #{__MODULE__}.to_url_string({:trim, color_similarity: 40})
"trim:40"
iex> #{__MODULE__}.to_url_string({:trim, color_override: 'A674B3D0'})
"trim:10:rgb:A674B3D0"
"""
@type trim :: :trim | {:trim, keyword | map}
def to_url_string(:trim), do: "trim"
def to_url_string({:trim, options}) when is_list(options) do
to_url_string({:trim, Enum.into(options, %{})})
end
def to_url_string({:trim, options}) when is_map(options) do
__MODULE__.Trim.to_url_string(options)
end
@typedoc """
The unsharp mask filter.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:unsharp_mask)
"unsharp_mask"
iex> #{__MODULE__}.to_url_string({:unsharp_mask, 200})
"unsharp_mask:200"
"""
@type unsharp_mask :: :unsharp_mask | {:unsharp_mask, 1..2000 | float}
def to_url_string(:unsharp_mask), do: "unsharp_mask"
def to_url_string({:unsharp_mask, strength}) when strength <= 2000 and strength >= 1 do
"unsharp_mask:#{strength}"
end
@typedoc """
The vectorization.
Options:
* `:colors` - An `t:integer/0` representing number of colors in range between 2 and 30.
* `:detail` - A `t:float/0` between 0.0 to 1.0 as a percentage or an `t:integer/0` between 0 to
1000 as an absolute number of pixels.
* `:despeckle` - A `t:float/0` between 0.0 to 1.0 as a percentage or an `t:integer/0` between 0
to 100 as an absolute number of pixels.
* `:paths` - A `t:number/0` representing bezier curve optimization in range between 0 to 100.
* `:corners` - A `t:number/0` representing corner threshold parameter in range between 0 to 100.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:vectorize)
"vectorize"
iex> #{__MODULE__}.to_url_string({:vectorize, colors: 3})
"vectorize:3"
iex> #{__MODULE__}.to_url_string({:vectorize, colors: 3, detail: 0.5})
"vectorize:3:0.5"
iex> #{__MODULE__}.to_url_string({:vectorize, colors: 3, detail: 550, despeckle: 60})
"vectorize:3:550:60"
iex> #{__MODULE__}.to_url_string({:vectorize, colors: 3, detail: 550, despeckle: 0.4, paths: 20})
"vectorize:3:550:0.4:20"
iex> #{__MODULE__}.to_url_string({:vectorize, colors: 3, detail: 550, despeckle: 0.4, paths: 20, corners: 40})
"vectorize:3:550:0.4:20:40"
iex> #{__MODULE__}.to_url_string({:vectorize, detail: 550, despeckle: 0.3, paths: 20, corners: 40})
"vectorize:detail:550:despeckle:0.3:paths:20:corners:40"
iex> #{__MODULE__}.to_url_string({:vectorize, colors: 6, detail: 0.5, despeckle: 30, corners: 40})
"vectorize:colors:6:detail:0.5:despeckle:30:corners:40"
"""
@type vectorize :: :vectorize | {:vectorize, keyword | map}
def to_url_string(:vectorize), do: "vectorize"
def to_url_string({:vectorize, options}) when is_list(options) do
to_url_string({:vectorize, Enum.into(options, %{})})
end
def to_url_string({:vectorize, options}) when is_map(options) do
__MODULE__.Vectorize.to_url_string(options)
end
@typedoc """
The vibrance filter.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
## Example
iex> #{__MODULE__}.to_url_string(:vibrance)
"vibrance"
iex> #{__MODULE__}.to_url_string({:vibrance, 70})
"vibrance:70"
"""
@type vibrance :: :vibrance | {:vibrance, -100..100 | float}
def to_url_string(:vibrance), do: "vibrance"
def to_url_string({:vibrance, strength}) when strength <= 100 and strength >= -100 do
"vibrance:#{strength}"
end
@typedoc """
The automatic image enhancement.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
* https://cloudinary.com/documentation/viesus_automatic_image_enhancement_addon
## Example
iex> #{__MODULE__}.to_url_string(:viesus_correct)
"viesus_correct"
"""
@type viesus_correct :: :viesus_correct
def to_url_string(:viesus_correct), do: "viesus_correct"
@typedoc """
The vignette effect.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string(:vignette)
"vignette"
iex> #{__MODULE__}.to_url_string({:vignette, 30})
"vignette:30"
"""
@type vignette :: :vignette | {:vignette, 0..100 | float}
def to_url_string(:vignette), do: "vignette"
def to_url_string({:vignette, strength}) when strength <= 100 and strength >= 0 do
"vignette:#{strength}"
end
@typedoc """
The volume adjustment.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string({:volume, 70})
"volume:70"
iex> #{__MODULE__}.to_url_string({:volume, {-10, :decibels}})
"volume:-10dB"
iex> #{__MODULE__}.to_url_string({:volume, :mute})
"volume:mute"
"""
@type volume :: {:volume, -100..400 | float | {number, :decibels} | :mute}
def to_url_string({:volume, :mute}), do: "volume:mute"
def to_url_string({:volume, percents}) when percents <= 400 and percents >= -100 do
"volume:#{percents}"
end
def to_url_string({:volume, {db, :decibels}}) when is_integer(db) or is_float(db) do
"volume:#{db}dB"
end
end
|
lib/cloudinary/transformation/effect.ex
| 0.938555
| 0.616301
|
effect.ex
|
starcoder
|
defmodule Logi.Channel do
@moduledoc """
Log Message Channels.
A channel (logically) receives log messages from loggers and delivers the messages to installed sinks.
## Examples
```elixir
# CREATE CHANNEL
iex> :ok = Logi.Channel.create :sample_log
iex> Logi.Channel.which_channels
[:sample_log, :logi_default_log] # 'logi_default_log' is created automatically when 'logi' application was started
# INSTALL SINK
iex> write_fun = fn (_, format, data) -> :io.format("[my_sink] " <> format <> "\\n", data) end
iex> sink = Logi.BuiltIn.Sink.Fun.new :sample_sink, write_fun
iex> {:ok, _} = Logi.Channel.install_sink :sample_log, sink, :info # Installs `sink` with `:info` level
iex> Logi.Channel.which_sinks :sample_log
[:sample_sink]
# OUTPUT LOG MESSAGE
iex> require Logi
iex> Logi.debug "hello world", [], [logger: :sample_log]
# The message is not emitted (the severity is too low).
iex> Logi.info "hello world", [], [logger: :sample_log]
#OUTPUT# [my_sink] hello world
iex> Logi.alert "hello world", [], [logger: :sample_log]
#OUTPUT# [my_sink] hello world
iex> Logi.info "hello world" # If `logger` option is omitted, the default channel will be used
# The message is not emitted (no sinks are installed to the default channel).
```
"""
@typedoc "The identifier of a channel"
@type id :: atom
@typedoc """
Options for `install_sink_opt/3`.
## if_exists
- The confliction handling policy.
- If a sink with the same identifier already exists,
- `:error`: the function returns an error `{:error, {:already_installed, existing_sink}}`.
- `:ignore`: the new sink is ignored. Then the function returns `{:ok, existing_sink}`.
- `:supersede`: the new sink supersedes it. Then the function returns `{:ok, old_sink}`.
- Default: `:supersede`
"""
@type install_sink_options :: [
{:if_exists, :error | :ignore | :supersede}
]
@typedoc "The information of an installed sink."
@type installed_sink :: %{
:sink => Logi.Sink.sink,
:condition => Logi.Confliction.condition,
:sink_sup => Logi.SinkProc.sink_sup,
:writer => Logi.SinkWriter.writer | :undefined
}
@doc """
The default channel.
This channel is created automatically when `logi_ex` application was started.
NOTE: The default channel ID is the same as the default logger ID (`Logi.default_logger/0`).
"""
@spec default_channel :: id
def default_channel do
:logi_channel.default_channel
end
@doc """
Creates a new channel.
If the channel already exists, nothing happens.
If there exists a process or a ETS table with the same name as `channel`, the function crashes.
"""
@spec create(id) :: :ok
def create(channel) do
:logi_channel.create channel
end
@doc """
Deletes a channel.
If the channel does not exists, it is silently ignored.
"""
@spec delete(id) :: :ok
def delete(channel) do
:logi_channel.delete channel
end
@doc "Returns a list of all existing channels."
@spec which_channels :: [id]
def which_channels do
:logi_channel.which_channels
end
@doc "Equivalent to `Logi.Channel.install_sink Logi.Channel.default_channel, sink, condition`."
@spec install_sink(Logi.Sink.sink, Logi.Condition.condition) :: {:ok, old} | {:error, reason} when
old: :undefined | installed_sink,
reason: {:cannot_start, any}
def install_sink(sink, condition) do
:logi_channel.install_sink sink, condition
end
@doc "Equivalent to `Logi.Channel.install_sink_opt channel, sink, condition, []`."
@spec install_sink(id, Logi.Sink.sink, Logi.Condition.condition) :: {:ok, old} | {:error, reason} when
old: :undefined | installed_sink,
reason: {:cannot_start, any}
def install_sink(channel, sink, condition) do
:logi_channel.install_sink channel, sink, condition
end
@doc "Equivalent to `Logi.Channel.install_sink_opt Logi.Channel.default_channel, sink, condition, options`."
@spec install_sink_opt(Logi.Sink.sink, Logi.Condition.condition, install_sink_options) :: {:ok, old} | {:error, reason} when
old: :undefined | installed_sink,
reason: {:cannot_start, any} | {:already_installed, installed_sink}
def install_sink_opt(sink, condition, options) do
:logi_channel.install_sink_opt sink, condition, options
end
@doc """
Installs `sink`.
If failed to start a sink process specified by `logi_sink:get_spec(sink)`,
the function returns `{:cannot_start, failure_reason}`.
If there does not exist a sink which has the same identifier with a new one,
the function returns `{:ok, :undefined}`.
Otherwise the result value depends on the value of the `:if_exists` option
(see the description of `t:install_sink_options/0` for details).
"""
@spec install_sink_opt(id, Logi.Sink.sink, Logi.Condition.condition, install_sink_options) :: {:ok, old} | {:error, reason} when
old: :undefined | installed_sink,
reason: {:cannot_start, any} | {:already_installed, installed_sink}
def install_sink_opt(channel, sink, condition, options) do
:logi_channel.install_sink_opt channel, sink, condition, options
end
@doc "Equivalent to `Logi.Channel.uninstall_sink Logi.Channel.default_channel, sink_id`."
@spec uninstall_sink(Logi.Sink.id) :: {:ok, installed_sink} | :error
def uninstall_sink(sink_id) do
:logi_channel.uninstall_sink sink_id
end
@doc """
Uninstalls the sink which has the identifier `sink_id` from `channel`.
The function returns `{:ok, sink}` if the specified sink exists in the channel, `:error` otherwise.
"""
@spec uninstall_sink(id, Logi.Sink.id) :: {:ok, installed_sink} | :error
def uninstall_sink(channel, sink_id) do
:logi_channel.uninstall_sink channel, sink_id
end
@doc "Equivalent to `Logi.Channel.set_sink_condition Logi.Channel.default_channel, sink_id, condition`."
@spec set_sink_condition(Logi.Sink.id, Logi.Condition.condition) :: {:ok, old} | :error when
old: Logi.Condition.condition
def set_sink_condition(sink_id, condition) do
:logi_channel.set_sink_condition sink_id, condition
end
@doc """
Sets the applicable condition of the `sink_id`.
The function returns `{:ok, old}` if the specified sink exists in the channel, `:error` otherwise.
"""
@spec set_sink_condition(id, Logi.Sink.id, Logi.Condition.condition) :: {:ok, old} | :error when
old: Logi.Condition.condition
def set_sink_condition(channel, sink_id, condition) do
:logi_channel.set_sink_condition channel, sink_id, condition
end
@doc "Equivalent to `Logi.Channel.find_sink Logi.Channel.id, sink_id`."
@spec find_sink(Logi.Sink.id) :: {:ok, installed_sink} | :error
def find_sink(sink_id) do
:logi_channel.find_sink sink_id
end
@doc """
Searches for `sink_id` in `channel`.
The function returns `{:ok, sink}`, or `:error` if `sink_id` is not present.
"""
@spec find_sink(id, Logi.Sink.id) :: {:ok, installed_sink} | :error
def find_sink(channel_id, sink_id) do
:logi_channel.find_sink channel_id, sink_id
end
@doc "Returns a list of installed sinks."
@spec which_sinks(id) :: [Logi.Sink.id]
def which_sinks(channel \\ Logi.Channel.default_channel) do
:logi_channel.which_sinks channel
end
@doc "Equivalent to `Logi.Channel.whereis_sink_proc Logi.Channel.default_channel, path`."
@spec whereis_sink_proc([Logi.Sink.id]) :: pid | :undefined
def whereis_sink_proc(path) do
:logi_channel.whereis_sink_proc path
end
@doc "Returns the pid associated with `path`."
@spec whereis_sink_proc(id, [Logi.Sink.id]) :: pid | :undefined
def whereis_sink_proc(channel, path) do
:logi_channel.whereis_sink_proc channel, path
end
end
|
lib/logi/channel.ex
| 0.85289
| 0.665261
|
channel.ex
|
starcoder
|
defmodule V3Api.Stream do
@moduledoc """
A GenStage for connecting to the V3Api's Server-Sent Event Stream
capability. Receives events from the API and parses their data.
Subscribers receive events as `%V3Api.Stream.Event{}` structs, which
include the event name and the data as a `%JsonApi{}` struct.
Required options:
`:path` (e.g. "/vehicles")
`:name` -- name of module
`:subscribe_to` -- pid or name of a ServerSentEventStage
for the V3Api.Stream to subscribe to. This should be
started as part of a supervision tree.
Other options are made available for tests, and can include:
- :name (name of the GenStage process)
- :base_url
- :api_key
"""
use GenStage
alias ServerSentEventStage, as: SSES
alias V3Api.Headers
defmodule Event do
@moduledoc """
Struct representing a parsed V3Api server-sent event.
"""
defstruct data: nil, event: :unknown
@type event :: :reset | :add | :update | :remove
@type t :: %__MODULE__{
event: event | :unknown,
data: nil | JsonApi.t() | {:error, any}
}
end
@spec start_link(Keyword.t()) :: {:ok, pid}
def start_link(opts) do
name = Keyword.fetch!(opts, :name)
GenStage.start_link(__MODULE__, opts, name: name)
end
@doc """
Builds an option list for a ServerSentEventStage
which a V3Api.Stream will subscribe to.
Each app's ServerSentEventStage should be started
inside the application's supervision tree.
"""
@spec build_options(Keyword.t()) :: Keyword.t()
def build_options(opts) do
default_options()
|> Keyword.merge(opts)
|> set_url()
|> set_headers()
end
@spec default_options :: Keyword.t()
defp default_options do
[
base_url: config(:base_url),
api_key: config(:api_key)
]
end
@spec config(atom) :: any
defp config(key), do: Util.config(:v3_api, key)
@spec set_url(Keyword.t()) :: Keyword.t()
defp set_url(opts) do
path = Keyword.fetch!(opts, :path)
base_url = Keyword.fetch!(opts, :base_url)
Keyword.put(opts, :url, Path.join(base_url, path))
end
@spec set_headers(Keyword.t()) :: Keyword.t()
defp set_headers(opts) do
headers =
opts
|> Keyword.fetch!(:api_key)
|> Headers.build(use_cache?: false)
Keyword.put(opts, :headers, headers)
end
def init(opts) do
producer = Keyword.fetch!(opts, :subscribe_to)
{:producer_consumer, %{}, subscribe_to: [producer]}
end
def handle_events(events, _from, state) do
{:noreply, Enum.map(events, &parse_event/1), state}
end
@spec parse_event(SSES.Event.t()) :: Event.t()
defp parse_event(%SSES.Event{data: data, event: event}) do
%Event{
data: JsonApi.parse(data),
event: event(event)
}
end
@spec event(String.t()) :: Event.event()
for atom <- ~w(reset add update remove)a do
str = Atom.to_string(atom)
defp event(unquote(str)), do: unquote(atom)
end
end
|
apps/v3_api/lib/stream.ex
| 0.810066
| 0.414869
|
stream.ex
|
starcoder
|
NimbleCSV.define(CsvParser, [])
defmodule Reports.Easymile.Incidents do
def run(params \\ []) do
data = query(params)
|> Stream.map(fn row -> for {key, val} <- row, into: %{}, do: {String.to_atom(key), val} end)
|> Stream.map(fn row -> Map.update!(row, :last_seen, &date_parse/1) end)
|> Enum.into([])
IO.inspect("Got data")
incidents =
data
|> Enum.reduce({[], [], nil}, fn current, {incidents, context, last} ->
if last && current.reason != last.reason && current.id == last.id do
incident =
current
|> Map.put(:reason_pre, last.reason)
|> Map.put(:context, context |> Enum.take(10))
|> Map.delete(:speed)
{[incident | incidents], [], current}
else
{incidents, [current | context], current}
end
end)
|> elem(0)
|> Enum.dedup()
|> Enum.reject(fn i -> i.reason == "" end)
IO.inspect("Got incidents")
incidents_with_context =
incidents
|> Enum.map(fn incident -> get_context(incident) end)
IO.inspect("Got incident contexts")
report =
incidents_with_context
|> Enum.map(fn incident -> Map.values(incident) |> Enum.join(",") end)
|> Enum.map(fn row -> row <> "\n" end)
headers =
incidents_with_context
|> Enum.take(1)
|> Enum.map(fn incident -> Map.keys(incident) |> Enum.join(",") end)
|> Enum.map(fn row -> row <> "\n" end)
File.write!("#{__MODULE__}.csv", headers ++ report)
end
defp get_context(incident) do
IO.puts("Getting context for incident at: #{incident.last_seen}")
incident_time = incident.last_seen
highest_speed_datum =
Enum.filter(incident.context, fn datum ->
time_since_incident = NaiveDateTime.diff(incident_time, datum.last_seen)
(time_since_incident > 0 and time_since_incident <= 6)
end)
|> Enum.sort(fn a, b -> a.speed > b.speed end)
|> List.first()
if highest_speed_datum do
%{
disengagement_time: incident.last_seen,
disengagement_mode: incident.mode,
disengagement_reason: incident.reason,
disengagement_prev_reason: incident.reason_pre,
id: incident.id,
lat: incident.lat,
lon: incident.lon,
initial_speed: highest_speed_datum.speed,
initial_mode: highest_speed_datum.mode,
initial_time: highest_speed_datum.last_seen
}
else
%{
disengagement_time: incident.last_seen,
disengagement_mode: incident.mode,
disengagement_reason: incident.reason,
disengagement_prev_reason: incident.reason_pre,
id: incident.id,
lat: incident.lat,
lon: incident.lon,
initial_speed: nil,
initial_mode: nil,
initial_time: nil
}
end
end
defp date_parse(date), do: DateTime.from_iso8601(date <> "Z") |> elem(1)
def download() do
data =
query()
|> Stream.map(fn row -> for {key, val} <- row, into: %{}, do: {String.to_atom(key), val} end)
|> Enum.into([])
download =
data
|> Enum.map(fn incident -> Map.values(incident) |> Enum.join(",") end)
|> Enum.map(fn row -> row <> "\n" end)
headers =
data
|> Enum.take(1)
|> Enum.map(fn incident -> Map.keys(incident) |> Enum.join(",") end)
|> Enum.map(fn row -> row <> "\n" end)
File.write!("#{__MODULE__}.download.csv", headers ++ download)
end
defp query(params \\ []) do
Application.get_env(:prestige, :session_opts)
|> Prestige.new_session()
|> Prestige.stream!(statement(params))
|> Stream.flat_map(&Prestige.Result.as_maps/1)
end
defp statement(params \\ []) do
hours = Keyword.get(params, :hours, 24)
"SELECT attributes.disengagement_reason as reason, attributes.last_seen, attributes.lat, attributes.lon, attributes.speed, attributes.mode, id FROM easymile__linden_states order by id desc, attributes.last_seen asc"
end
defp pmap(collection, func) do
collection
|> Enum.map(&(Task.async(fn -> func.(&1) end)))
|> Enum.map(fn task -> Task.await(task, 15000) end)
end
end
|
lib/easymile/incidents.ex
| 0.547464
| 0.418251
|
incidents.ex
|
starcoder
|
defmodule Earmark.Internal do
@moduledoc ~S"""
All public functions that are internal to Earmark, so that **only** external API
functions are public in `Earmark`
"""
alias Earmark.{Error, Message, Options, SysInterface, Transform}
import Message, only: [emit_messages: 2]
@doc ~S"""
A wrapper to extract the AST from a call to `EarmarkParser.as_ast` if a tuple `{:ok, result, []}` is returned,
raise errors otherwise
iex(1)> as_ast!(["Hello %% annotated"], annotations: "%%")
[{"p", [], ["Hello "], %{annotation: "%% annotated"}}]
iex(2)> as_ast!("===")
** (Earmark.Error) [{:warning, 1, "Unexpected line ==="}]
"""
def as_ast!(markdown, options \\ [])
def as_ast!(markdown, options) do
case EarmarkParser.as_ast(markdown, options) do
{:ok, result, _} -> result
{:error, _, messages} -> raise Earmark.Error, inspect(messages)
end
end
@doc false
def as_html(lines, options)
def as_html(lines, options) when is_list(options) do
case Options.make_options(options) do
{:ok, options1} -> as_html(lines, options1)
{:error, messages} -> {:error, "", messages}
end
end
def as_html(lines, options) do
{status, ast, messages} = Transform.postprocessed_ast(lines, options)
{status, Transform.transform(ast, options), messages}
end
def as_html!(lines, options \\ [])
def as_html!(lines, options) do
{_status, html, messages} = as_html(lines, options)
emit_messages(messages, options)
html
end
@doc ~S"""
A utility function that will be passed as a partial capture to `EEx.eval_file` by
providing a value for the `options` parameter
```elixir
EEx.eval(..., include: &include(&1, options))
```
thusly allowing
```eex
<%= include.(some file) %>
```
where `some file` can be a relative path starting with `"./"`
Here is an example using [these fixtures](https://github.com/pragdave/earmark/tree/master/test/fixtures)
iex(3)> include("./include/basic.md.eex", file: "test/fixtures/does_not_matter")
"# Headline Level 1\n"
And here is how it is used inside a template
iex(4)> options = [file: "test/fixtures/does_not_matter"]
...(4)> EEx.eval_string(~s{<%= include.("./include/basic.md.eex") %>}, include: &include(&1, options))
"# Headline Level 1\n"
"""
def include(filename, options \\ []) do
options_ =
options
|> Options.relative_filename(filename)
case Path.extname(filename) do
".eex" -> EEx.eval_file(options_.file, include: &include(&1, options_))
_ -> SysInterface.sys_interface.readlines(options_.file) |> Enum.to_list
end
end
@doc ~S"""
This is a convenience method to read a file or pass it to `EEx.eval_file` if its name
ends in `.eex`
The returned string is then passed to `as_html` this is used in the escript now and allows
for a simple inclusion mechanism, as a matter of fact an `include` function is passed
"""
def from_file!(filename, options \\ [])
def from_file!(filename, options) do
filename
|> include(options)
|> as_html!()
end
@default_timeout_in_ms 5000
@doc false
def pmap(collection, func, timeout \\ @default_timeout_in_ms) do
collection
|> Enum.map(fn item -> Task.async(fn -> func.(item) end) end)
|> Task.yield_many(timeout)
|> Enum.map(&_join_pmap_results_or_raise(&1, timeout))
end
defp _join_pmap_results_or_raise(yield_tuples, timeout)
defp _join_pmap_results_or_raise({_task, {:ok, result}}, _timeout), do: result
defp _join_pmap_results_or_raise({task, {:error, reason}}, _timeout),
do: raise(Error, "#{inspect(task)} has died with reason #{inspect(reason)}")
defp _join_pmap_results_or_raise({task, nil}, timeout),
do:
raise(
Error,
"#{inspect(task)} has not responded within the set timeout of #{timeout}ms, consider increasing it"
)
end
# SPDX-License-Identifier: Apache-2.0
|
lib/earmark/internal.ex
| 0.808635
| 0.76399
|
internal.ex
|
starcoder
|
defmodule USGovData.Parsers.CandidateMaster do
defstruct([
:address1,
:address2,
:city,
:election_year,
:ici,
:id,
:name,
:office_district,
:office_state,
:party,
:pcc,
:state,
:status,
:type,
:zip_code
])
@type candidate_type :: :house | :senate | :president
@type t :: %__MODULE__{
address1: String.t(),
address2: String.t(),
city: String.t(),
election_year: non_neg_integer,
ici: String.t(),
id: String.t(),
name: String.t(),
office_district: String.t(),
office_state: String.t(),
party: String.t(),
pcc: String.t(),
state: String.t(),
status: String.t(),
type: candidate_type(),
zip_code: String.t()
}
@doc """
Parses a line from a candidate master FEC data file
"""
@spec parse_line(line :: String.t()) :: {:ok, __MODULE__.t()} | {:error, atom}
def parse_line(line) do
case :csv_parser.scan_and_parse(line) do
{:ok, fields} ->
fields = maybe_pad(fields)
case length(fields) do
15 ->
%__MODULE__{
type: Enum.at(fields, name2off(:type)),
id: Enum.at(fields, name2off(:id)),
name: Enum.at(fields, name2off(:name)),
party: Enum.at(fields, name2off(:party)),
election_year: Enum.at(fields, name2off(:election_year)),
office_state: Enum.at(fields, name2off(:office_state)),
office_district: Enum.at(fields, name2off(:office_district)),
ici: Enum.at(fields, name2off(:ici)),
status: Enum.at(fields, name2off(:status)),
pcc: Enum.at(fields, name2off(:pcc)),
address1: Enum.at(fields, name2off(:address1)),
address2: Enum.at(fields, name2off(:address2)),
city: Enum.at(fields, name2off(:city)),
state: Enum.at(fields, name2off(:state)),
zip_code: Enum.at(fields, name2off(:zip_code))
}
|> validate
_ ->
{:error, :bad_field_count}
end
error ->
error
end
end
defp name2off(:id), do: 0
defp name2off(:name), do: 1
defp name2off(:party), do: 2
defp name2off(:election_year), do: 3
defp name2off(:office_state), do: 4
defp name2off(:type), do: 5
defp name2off(:office_district), do: 6
defp name2off(:ici), do: 7
defp name2off(:status), do: 8
defp name2off(:pcc), do: 9
defp name2off(:address1), do: 10
defp name2off(:address2), do: 11
defp name2off(:city), do: 12
defp name2off(:state), do: 13
defp name2off(:zip_code), do: 14
defp validate(%__MODULE__{election_year: ey}) when is_integer(ey) == false do
{:error, :bad_election_year}
end
defp validate(%__MODULE__{type: type}) when type not in ["H", "S", "P"] do
{:error, :bad_candidate_type}
end
defp validate(%__MODULE__{type: type, zip_code: zip_code} = r) do
updated =
case type do
"H" ->
:house
"S" ->
:senate
"P" ->
:president
end
{:ok, %{r | type: updated, zip_code: "#{zip_code}"}}
end
defp maybe_pad(fields) do
if length(fields) < 15 do
maybe_pad(fields ++ [nil])
else
fields
end
end
end
|
lib/parsers/candidate_master.ex
| 0.727298
| 0.492493
|
candidate_master.ex
|
starcoder
|
defmodule Interpreter do
@moduledoc """
Interprets a Brainfark program.
To turn the output into text, pass the final state to
`CmdState.render_output/1` and it will convert it into a string.
"""
@doc """
Interpret a parsed Brainfark program.
"""
def interpret(program), do: interpret(program, "")
def interpret(program, input) do
do_interpret(%CmdState{
code: program |> ZipperList.from_list,
data: %ZipperList{cursor: 0},
input: String.codepoints(input),
output: []
})
end
# This happens when the code has finished executing, cursor will be nil
defp do_interpret(state = %CmdState{code: %ZipperList{cursor: nil}}), do: state
# If anything else is happening, then do_interpret will recurse
defp do_interpret(state = %CmdState{}) do
{action, state} = Command.command(state)
# IO.inspect action
# Act on the stack based on the command's control action
state = case action do
:continue -> continue(state)
:break -> find_loop_end(state)
:restart -> find_loop_begin(state)
end
do_interpret(state)
end
# Just go to the next command
defp continue(state = %CmdState{code: code}) do
%{state | code: ZipperList.right(code)}
end
# Traverses backwards to find the current loop's beginning
defp find_loop_begin(state = %CmdState{code: code}) do
code = code
|> ZipperList.reverse
|> do_find_loop(:loop_end, :loop_begin)
|> ZipperList.reverse
%{state | code: code}
end
# traverse forward to find the current loop's ending
defp find_loop_end(state = %CmdState{code: code}) do
code = do_find_loop(code, :loop_begin, :loop_end)
%{state | code: code}
end
# traverse to find the matching end token
defp do_find_loop(code, start_token, end_token) do
Enum.reduce_while(code, 0, fn(z, i) ->
case {z.cursor, i} do
{^start_token, _} -> {:cont, i + 1}
{^end_token, 1} -> {:halt, z}
{^end_token, _} -> {:cont, i - 1}
_ -> {:cont, i}
end
end)
end
end
|
lib/brainfark/interpreter.ex
| 0.659076
| 0.598664
|
interpreter.ex
|
starcoder
|
defmodule DateTimeParserTestMacros do
@moduledoc false
alias DateTimeParser
alias DateTimeParserTest.Recorder
def to_iso(%NaiveDateTime{} = datetime), do: NaiveDateTime.to_iso8601(datetime)
def to_iso(%DateTime{} = datetime), do: DateTime.to_iso8601(datetime)
def to_iso(%Date{} = date), do: Date.to_iso8601(date)
def to_iso(%Time{} = time), do: Time.to_iso8601(time)
def to_iso(string) when is_binary(string), do: string
defmacro test_parsing(string_timestamp, expected_result, opts \\ []) do
quote do
test_name =
if unquote(opts) == [] do
"parses timestamp #{unquote(string_timestamp)}"
else
"parses timestamp #{unquote(string_timestamp)} with opts #{inspect(unquote(opts))}"
end
test test_name do
assert {:ok, result} = DateTimeParser.parse(unquote(string_timestamp), unquote(opts))
case unquote(expected_result) do
%{} = expected ->
assert result == expected
expected when is_binary(expected) ->
assert to_iso(result) == expected
end
Recorder.add(unquote(string_timestamp), unquote(expected_result), "parse", unquote(opts))
end
end
end
defmacro test_datetime_parsing(string_datetime, expected_result, opts \\ []) do
quote do
test_name =
if unquote(opts) == [] do
"parses datetime #{unquote(string_datetime)}"
else
"parses datetime #{unquote(string_datetime)} with opts #{inspect(unquote(opts))}"
end
test test_name do
assert {:ok, datetime} =
DateTimeParser.parse_datetime(unquote(string_datetime), unquote(opts))
case unquote(expected_result) do
%{} = expected ->
assert datetime == expected
expected when is_binary(expected) ->
assert to_iso(datetime) == expected
end
Recorder.add(
unquote(string_datetime),
unquote(expected_result),
"parse_datetime",
unquote(opts)
)
end
end
end
defmacro test_time_parsing(string_time, expected_result) do
quote do
test "parses time #{unquote(string_time)}" do
assert {:ok, time} = DateTimeParser.parse_time(unquote(string_time))
assert time == unquote(expected_result)
Recorder.add(unquote(string_time), unquote(expected_result), "parse_time", [])
end
end
end
defmacro test_date_parsing(string_date, expected_result, opts \\ []) do
quote do
test_name =
if unquote(opts) == [] do
"parses date #{unquote(string_date)}"
else
"parses date #{unquote(string_date)} with opts #{inspect(unquote(opts))}"
end
test test_name do
assert {:ok, date} = DateTimeParser.parse_date(unquote(string_date), unquote(opts))
assert date == unquote(expected_result)
Recorder.add(unquote(string_date), unquote(expected_result), "parse_date", unquote(opts))
end
end
end
defmacro test_error(string_timestamp, expected_message \\ nil, opts \\ []) do
quote do
test_name =
if unquote(opts) == [] do
"does not parse timestamp #{unquote(string_timestamp)}"
else
"does not parse timestamp #{unquote(string_timestamp)} with opts #{
inspect(unquote(opts))
}"
end
test test_name do
assert {:error, message} = DateTimeParser.parse(unquote(string_timestamp), unquote(opts))
if unquote(expected_message) do
assert message == unquote(expected_message)
end
Recorder.add(unquote(string_timestamp), message, "parse", unquote(opts))
end
end
end
defmacro test_datetime_error(string_timestamp, expected_message \\ nil, opts \\ []) do
quote do
test_name =
if unquote(opts) == [] do
"does not parse datetime #{unquote(string_timestamp)}"
else
"does not parse datetime #{unquote(string_timestamp)} with opts #{
inspect(unquote(opts))
}"
end
test test_name do
assert {:error, message} =
DateTimeParser.parse_datetime(unquote(string_timestamp), unquote(opts))
if unquote(expected_message) do
assert message == unquote(expected_message)
end
Recorder.add(unquote(string_timestamp), message, "parse_datetime", unquote(opts))
end
end
end
defmacro test_date_error(string_timestamp, expected_message \\ nil, opts \\ []) do
quote do
test_name =
if unquote(opts) == [] do
"does not parse date #{unquote(string_timestamp)}"
else
"does not parse date #{unquote(string_timestamp)} with opts #{inspect(unquote(opts))}"
end
test test_name do
assert {:error, message} =
DateTimeParser.parse_date(unquote(string_timestamp), unquote(opts))
if unquote(expected_message) do
assert message == unquote(expected_message)
end
Recorder.add(unquote(string_timestamp), message, "parse_date", unquote(opts))
end
end
end
defmacro test_time_error(string_time, expected_message) do
quote do
test "does not parse time #{unquote(string_time)}" do
assert {:error, expected_message} = DateTimeParser.parse_time(unquote(string_time))
if unquote(expected_message) do
assert message == unquote(expected_message)
end
Recorder.add(unquote(string_time), unquote(expected_message), "parse_time", [])
end
end
end
end
|
test/support/macros.ex
| 0.755096
| 0.7036
|
macros.ex
|
starcoder
|
defmodule TreeStorage do
@moduledoc """
Documentation for TreeStorage.
"""
@doc """
## Manage tree-like structure.
"""
@tree :tree
@leaf :leaf
def leaf(name, data), do: {@leaf, name, data}
def tree(name, tree), do: check_tree(tree) && {@tree, name, tree}
def find(tree, condition) when is_function(condition),
do: check_tree(tree) && _find(tree, condition)
defp _find([], _condition), do: nil
defp _find([h|t], condition),
do: _find(h, condition) || _find(t, condition)
defp _find({@leaf, name, leaf}, condition),
do: condition.(name, leaf) && [name]
defp _find({@tree, name, tree}, condition),
do: (path = _find(tree, condition)) && [name|path]
def get(tree, path) when is_list(path),
do: check_tree(tree) && _get(tree, path)
defp _get(tree, []), do: tree
defp _get([{_, name, tree}|t], [h_p|t_p]=path),
do: (name == h_p && _get(tree, t_p)) || _get(t, path)
def replace(tree, path, input) when is_list(path),
do: check_tree(tree) && _replace(tree, path, input)
defp _replace(_, [], input), do: input
defp _replace([{type, name, data}|t], [name|path], input),
do: [{type, name, _replace(data, path, input)}|t]
defp _replace([h|t], path, input), do: [h|_replace(t, path, input)]
def reduce(tree, leaf_fun, tree_fun, init)
when is_function(leaf_fun) and is_function(tree_fun),
do: check_tree(tree) && _reduce(tree, leaf_fun, tree_fun, init, init)
defp _reduce([] ,_, _, _, acc), do: acc
defp _reduce([{@tree, name, tree}|t], leaf_fun, tree_fun, init, acc),
do: _reduce(t, leaf_fun, tree_fun, init,
tree_fun.(acc, name, _reduce(tree, leaf_fun, tree_fun, init, init)))
defp _reduce([{@leaf, name, leaf}|t], leaf_fun, tree_fun, init, acc),
do: _reduce(t, leaf_fun, tree_fun, init, leaf_fun.(acc, name, leaf))
def check_tree([]), do: true
def check_tree([{@leaf, _, _}|t]), do: check_tree(t)
def check_tree([{@tree, _, tree}|t]),
do: check_tree(tree) && check_tree(t)
def check_tree(tree),
do: raise "Invalid tree structure: #{inspect tree}"
end
|
lib/tree_storage.ex
| 0.597725
| 0.456773
|
tree_storage.ex
|
starcoder
|
defmodule Shmex do
@moduledoc """
This module allows using data placed in POSIX shared memory on POSIX
compliant systems.
Defines a struct representing the actual shared memory object. The struct
should not be modified, and should always be passed around as a whole - see
`t:#{inspect(__MODULE__)}.t/0`
"""
alias __MODULE__.Native
@typedoc """
Struct describing data kept in shared memory. Should not be modified
and should always be passed around as a whole
...including passing to the native code - there are functions in `:shmex_lib`
(a native library exported via Bundlex) that will allow to transorm Elixir
struct into a C struct and then access the shared memory from the native code.)
Shared memory should be available as long as the associated struct is not
garbage collected.
"""
@type t :: %__MODULE__{
name: binary(),
guard: reference(),
size: non_neg_integer(),
capacity: pos_integer()
}
@default_capacity 4096
defstruct name: nil, guard: nil, size: 0, capacity: @default_capacity
@doc """
Creates a new, empty shared memory area with the given capacity
"""
@spec empty(capacity :: pos_integer) :: t()
def empty(capacity \\ @default_capacity) do
{:ok, data} = create(capacity)
data
end
@doc """
Creates a new shared memory area filled with the existing data.
"""
@spec new(binary()) :: t()
def new(data) when is_binary(data) do
new(data, byte_size(data))
end
@doc """
Creates a new shared memory area initialized with `data` and sets its capacity.
The actual capacity is the greater of passed capacity and data size
"""
@spec new(data :: binary(), capacity :: pos_integer()) :: t()
def new(data, capacity) when capacity > 0 do
{:ok, shm} = create(capacity)
{:ok, shm} = Native.write(shm, data)
shm
end
@doc """
Sets the capacity of shared memory area.
If the capacity is smaller than the current size, data will be discarded and size modified
"""
@spec set_capacity(t(), pos_integer()) :: t()
def set_capacity(shm, capacity) do
{:ok, new_shm} = Native.set_capacity(shm, capacity)
new_shm
end
@doc """
Ensures that shared memory is not garbage collected at the point of executing
this function.
Useful when passing shared memory to other OS process, to prevent it
from being garbage collected until received and mapped by that process.
"""
@spec ensure_not_gc(t()) :: :ok
def ensure_not_gc(shm) do
:ok = Native.ensure_not_gc(shm)
end
@doc """
Returns shared memory contents as a binary.
"""
@spec to_binary(t()) :: binary()
def to_binary(shm) do
{:ok, binary} = shm |> Native.read()
binary
end
defp create(capacity) do
shm_struct = %__MODULE__{capacity: capacity}
Native.allocate(shm_struct)
end
end
|
lib/shmex.ex
| 0.912021
| 0.706937
|
shmex.ex
|
starcoder
|
defmodule AdventOfCode.Solutions.Day08 do
@moduledoc """
Solution for day 8 exercise.
### Exercise
https://adventofcode.com/2021/day/8
"""
require Logger
@entry_separator " | "
def calculate_segments(filename, mode) do
entries =
filename
|> File.read!()
|> parse_entries()
result = guess_entries(entries, mode)
IO.puts("Result guessing segments in #{mode} is #{result}")
end
defp parse_entries(file_content) do
file_content
|> String.replace("\r\n", "\n")
|> String.split("\n", trim: true)
|> Enum.map(&parse_entry/1)
end
defp parse_entry(entry_str) do
[patterns, signals] =
entry_str
|> String.split(@entry_separator)
|> Enum.map(&String.split(&1, " ", trim: true))
%{patterns: patterns, signals: signals}
end
defp guess_entries(entries, mode) do
Enum.reduce(entries, 0, fn entry, acc -> acc + guess_entry(entry, mode) end)
end
defp guess_entry(%{patterns: patterns, signals: signals}, :direct) do
recognisable_patterns = get_direct_patterns(patterns)
signals
|> Enum.map(&sort_signal/1)
|> Enum.map(&Map.get(recognisable_patterns, &1))
|> Enum.reject(&is_nil/1)
|> length()
end
defp guess_entry(%{patterns: patterns, signals: signals}, :full) do
recognisable_patterns =
patterns
|> get_direct_patterns()
|> get_indirect_patterns()
signals
|> Enum.map(&sort_signal/1)
|> Enum.map(&Map.get(recognisable_patterns, &1))
|> Enum.reject(&is_nil/1)
|> Enum.map(&to_string/1)
|> Enum.join()
|> String.to_integer()
end
defp get_direct_patterns(patterns) do
patterns
|> Enum.map(&sort_signal/1)
|> Enum.map(&direct_guess_pattern/1)
|> Enum.into(%{})
end
defp get_indirect_patterns(direct_patterns) do
all_patterns = Map.keys(direct_patterns)
# This order is specifically crafted because there are some guessing
# functions that need previous guesses.
direct_patterns
|> traverse_map()
|> guess_pattern(0, all_patterns)
|> guess_pattern(3, all_patterns)
|> guess_pattern(6, all_patterns)
|> guess_pattern(9, all_patterns)
|> guess_pattern(2, all_patterns)
|> guess_pattern(5, all_patterns)
|> traverse_map()
end
defp guess_pattern(patterns, 0, all_patterns) do
%{7 => pattern_7, 4 => pattern_4} = patterns
pattern_0 =
Enum.find(all_patterns, fn pattern ->
length(pattern) == 6 &&
length(pattern_4 -- pattern) == 1 &&
length(pattern_7 -- pattern) == 0
end)
Map.put(patterns, 0, pattern_0)
end
defp guess_pattern(patterns, 3, all_patterns) do
%{4 => pattern_4, 7 => pattern_7} = patterns
pattern_3 =
Enum.find(all_patterns, fn pattern ->
length(pattern) == 5 &&
length(pattern_4 -- pattern) == 1 &&
length(pattern_7 -- pattern) == 0
end)
Map.put(patterns, 3, pattern_3)
end
defp guess_pattern(patterns, 6, all_patterns) do
%{8 => pattern_8, 1 => pattern_1} = patterns
pattern_6 =
Enum.find(all_patterns, fn pattern ->
length(pattern) == 6 &&
length(pattern_8 -- pattern) == 1 &&
length(pattern_1 -- pattern) == 1
end)
Map.put(patterns, 6, pattern_6)
end
defp guess_pattern(patterns, 9, all_patterns) do
%{4 => pattern_4} = patterns
pattern_9 =
Enum.find(all_patterns, fn pattern ->
length(pattern) == 6 &&
length(pattern_4 -- pattern) == 0
end)
Map.put(patterns, 9, pattern_9)
end
defp guess_pattern(patterns, 2, all_patterns) do
%{9 => pattern_9, 1 => pattern_1} = patterns
pattern_2 =
Enum.find(all_patterns, fn pattern ->
length(pattern) == 5 &&
length(pattern_9 -- pattern) == 2 &&
length(pattern_1 -- pattern) == 1
end)
Map.put(patterns, 2, pattern_2)
end
defp guess_pattern(patterns, 5, all_patterns) do
%{6 => pattern_6, 4 => pattern_4} = patterns
pattern_5 =
Enum.find(all_patterns, fn pattern ->
length(pattern) == 5 &&
length(pattern_6 -- pattern) == 1 &&
length(pattern_4 -- pattern) == 1
end)
Map.put(patterns, 5, pattern_5)
end
defp guess_pattern(patterns, _, _), do: patterns
defp direct_guess_pattern(pattern) when length(pattern) == 2, do: {pattern, 1}
defp direct_guess_pattern(pattern) when length(pattern) == 4, do: {pattern, 4}
defp direct_guess_pattern(pattern) when length(pattern) == 3, do: {pattern, 7}
defp direct_guess_pattern(pattern) when length(pattern) == 7, do: {pattern, 8}
defp direct_guess_pattern(pattern), do: {pattern, nil}
defp sort_signal(signal) do
signal
|> String.graphemes()
|> Enum.sort()
end
defp traverse_map(map) do
map
|> Enum.reduce(%{}, fn
{_k, nil}, acc -> acc
{k, v}, acc -> Map.put(acc, v, k)
end)
|> Enum.into(%{})
end
end
|
lib/advent_of_code/solutions/day08.ex
| 0.689828
| 0.459561
|
day08.ex
|
starcoder
|
defmodule Advent.Sixteen.Thirteen.State do
alias Advent.Helpers.Utility, as: U
alias Advent.Sixteen.Thirteen.State
@favourite_number 1364
#@favourite_number 10
defstruct [:x, :y]
def to_list(state) do
[state.x, state.y]
end
def valid(state) do
x = state.x
y = state.y
(x >= 0 && y >= 0) and (Integer.to_string(@favourite_number + x*x + 3*x + 2*x*y + y + y*y, 2)
|> to_charlist |> Enum.count(fn(x) -> x == ?1 end)
|> rem(2)) == 0
end
def successors(state) do
nesw = [%State{x: state.x - 1, y: state.y},
%State{x: state.x, y: state.y - 1},
%State{x: state.x + 1, y: state.y},
%State{x: state.x, y: state.y + 1}]
Enum.filter(nesw, &State.valid/1)
end
def canonicalise(state) do
state
end
end
defmodule Advent.Sixteen.Thirteen do
alias Advent.Helpers.NodeCache, as: Cache
alias Advent.Sixteen.Thirteen.State
alias Advent.Helpers.Utility, as: U
use Timing
@victory_condition %State{x: 31, y: 39}
#@victory_condition %State{x: 7, y: 4}
@max_depth 50
def distance(state) do
0
end
# Returns true if move1 is better than move2
def heuristic_sort(move1, move2) do
distance(move1) < distance(move2)
end
def flatten_states(e, acc) do
[Enum.reduce([], e, fn([x], [acc2]) -> [x|acc] end)|acc]
end
defp get_openset([], acc), do: acc
defp get_openset(state, acc), do: get_openset(Cache.pop, [state|acc])
def search(initial, 0) do
Cache.open(initial, State.canonicalise(initial))
search(1)
end
def search_tree_cardinality(initial, 0) do
Cache.open(initial, State.canonicalise(initial))
search(1, true)
end
def search(depth, return_cardinality \\ false) do
#U.i depth, "searching at depth"
#IO.gets "boop?"
#U.i Cache.openset, "current open set"
#U.i Cache.closed, "current closed set"
Enum.each(get_openset(Cache.pop, []), fn(open) ->
Enum.each(State.successors(open), fn(succ) ->
Cache.open(succ, State.canonicalise(succ))
end)
end)
#U.i length(Cache.openset), " cardinality of openset"
#U.i Cache.openset, "current open set"
#U.i Cache.closed, "current closed set"
if return_cardinality do
if depth == @max_depth do
Enum.count(Cache.closed)
else
search(depth+1, true)
end
else
if Enum.any?(Cache.openset, fn(state) ->
state == @victory_condition
end) do
depth
else
search(depth+1, return_cardinality)
end
end
end
def a do
initial_state = %State{x: 1, y: 1}
Cache.init(&heuristic_sort/2)
{elapsed, result} = time do
search(initial_state, 0)
end
end
def b do
initial_state = %State{x: 1, y: 1}
Cache.init(&heuristic_sort/2)
{elapsed, result} = time do
search_tree_cardinality(initial_state, 0)
end
end
end
|
lib/2016/13.ex
| 0.574156
| 0.584123
|
13.ex
|
starcoder
|
defmodule Regex do
@moduledoc ~S"""
Provides regular expressions for Elixir.
"""
defstruct re_pattern: nil, source: "", opts: "", re_version: ""
@type t :: %__MODULE__{re_pattern: term, source: binary, opts: binary}
defmodule CompileError do
defexception message: "regex could not be compiled"
end
@doc """
Compiles the regular expression.
"""
@spec compile(binary, binary | [term]) :: {:ok, t} | {:error, any}
def compile(source, options \\ "") when is_binary(source) do
compile(source, options, version())
end
defp compile(source, options, version) when is_binary(options) do
case translate_options(options, []) do
{:error, rest} ->
{:error, {:invalid_option, rest}}
translated_options ->
compile(source, translated_options, options, version)
end
end
defp compile(source, options, version) when is_list(options) do
compile(source, options, "", version)
end
defp compile(source, opts, doc_opts, version) do
case :re.compile(source, opts) do
{:ok, re_pattern} ->
{:ok, %Regex{re_pattern: re_pattern, re_version: version, source: source, opts: doc_opts}}
error ->
error
end
end
@doc """
Compiles the regular expression and raises `Regex.CompileError` in case of errors.
"""
@spec compile!(binary, binary | [term]) :: t
def compile!(source, options \\ "") when is_binary(source) do
case compile(source, options) do
{:ok, regex} -> regex
{:error, {reason, at}} -> raise Regex.CompileError, "#{reason} at position #{at}"
end
end
@doc """
Recompiles the existing regular expression if necessary.
"""
@doc since: "1.4.0"
@spec recompile(t) :: {:ok, t} | {:error, any}
def recompile(%Regex{} = regex) do
version = version()
case regex do
%{re_version: ^version} ->
{:ok, regex}
_ ->
%{source: source, opts: opts} = regex
compile(source, opts, version)
end
end
@doc """
Recompiles the existing regular expression and raises `Regex.CompileError` in case of errors.
"""
@doc since: "1.4.0"
@spec recompile!(t) :: t
def recompile!(regex) do
case recompile(regex) do
{:ok, regex} -> regex
{:error, {reason, at}} -> raise Regex.CompileError, "#{reason} at position #{at}"
end
end
@doc """
Returns the version of the underlying Regex engine.
"""
@doc since: "1.4.0"
@spec version :: term()
def version do
{:re.version(), :erlang.system_info(:endian)}
end
@doc """
Returns a boolean indicating whether there was a match or not.
"""
@spec match?(t, String.t()) :: boolean
def match?(%Regex{} = regex, string) when is_binary(string) do
safe_run(regex, string, [{:capture, :none}]) == :match
end
@doc """
Returns `true` if the given `term` is a regex.
Otherwise returns `false`.
"""
# TODO: deprecate permanently on Elixir v1.15
@doc deprecated: "Use Kernel.is_struct/2 or pattern match on %Regex{} instead"
def regex?(term)
def regex?(%Regex{}), do: true
def regex?(_), do: false
@doc """
Runs the regular expression against the given string until the first match.
It returns a list with all captures or `nil` if no match occurred.
"""
@spec run(t, binary, [term]) :: nil | [binary] | [{integer, integer}]
def run(regex, string, options \\ [])
def run(%Regex{} = regex, string, options) when is_binary(string) do
return = Keyword.get(options, :return, :binary)
captures = Keyword.get(options, :capture, :all)
offset = Keyword.get(options, :offset, 0)
case safe_run(regex, string, [{:capture, captures, return}, {:offset, offset}]) do
:nomatch -> nil
:match -> []
{:match, results} -> results
end
end
@doc """
Returns the given captures as a map or `nil` if no captures are found.
"""
@spec named_captures(t, String.t(), [term]) :: map | nil
def named_captures(regex, string, options \\ []) when is_binary(string) do
names = names(regex)
options = Keyword.put(options, :capture, names)
results = run(regex, string, options)
if results, do: Enum.zip(names, results) |> Enum.into(%{})
end
@doc """
Returns the underlying `re_pattern` in the regular expression.
"""
@spec re_pattern(t) :: term
def re_pattern(%Regex{re_pattern: compiled}) do
compiled
end
@doc """
Returns the regex source as a binary.
"""
@spec source(t) :: String.t()
def source(%Regex{source: source}) do
source
end
@doc """
Returns the regex options as a string.
"""
@spec opts(t) :: String.t()
def opts(%Regex{opts: opts}) do
opts
end
@doc """
Returns a list of names in the regex.
"""
@spec names(t) :: [String.t()]
def names(%Regex{re_pattern: compiled, re_version: version, source: source}) do
re_pattern =
case version() do
^version ->
compiled
_ ->
{:ok, recompiled} = :re.compile(source)
recompiled
end
{:namelist, names} = :re.inspect(re_pattern, :namelist)
names
end
@doc ~S"""
Same as `run/3`, but scans the target several times collecting all
matches of the regular expression.
"""
@spec scan(t, String.t(), [term]) :: [[String.t()]]
def scan(regex, string, options \\ [])
def scan(%Regex{} = regex, string, options) when is_binary(string) do
return = Keyword.get(options, :return, :binary)
captures = Keyword.get(options, :capture, :all)
offset = Keyword.get(options, :offset, 0)
options = [{:capture, captures, return}, :global, {:offset, offset}]
case safe_run(regex, string, options) do
:match -> []
:nomatch -> []
{:match, results} -> results
end
end
defp safe_run(
%Regex{re_pattern: compiled, source: source, re_version: version, opts: compile_opts},
string,
options
) do
case version() do
^version -> :re.run(string, compiled, options)
_ -> :re.run(string, source, translate_options(compile_opts, options))
end
end
@doc """
Splits the given target based on the given pattern and in the given number of
parts.
"""
@spec split(t, String.t(), [term]) :: [String.t()]
def split(regex, string, options \\ [])
def split(%Regex{}, "", opts) do
if Keyword.get(opts, :trim, false) do
[]
else
[""]
end
end
def split(%Regex{} = regex, string, opts)
when is_binary(string) and is_list(opts) do
on = Keyword.get(opts, :on, :first)
case safe_run(regex, string, [:global, capture: on]) do
{:match, matches} ->
index = parts_to_index(Keyword.get(opts, :parts, :infinity))
trim = Keyword.get(opts, :trim, false)
include_captures = Keyword.get(opts, :include_captures, false)
do_split(matches, string, 0, index, trim, include_captures)
:match ->
[string]
:nomatch ->
[string]
end
end
defp parts_to_index(:infinity), do: 0
defp parts_to_index(n) when is_integer(n) and n > 0, do: n
defp do_split(_, string, offset, _counter, true, _with_captures)
when byte_size(string) <= offset do
[]
end
defp do_split(_, string, offset, 1, _trim, _with_captures),
do: [binary_part(string, offset, byte_size(string) - offset)]
defp do_split([], string, offset, _counter, _trim, _with_captures),
do: [binary_part(string, offset, byte_size(string) - offset)]
defp do_split([[{pos, _} | h] | t], string, offset, counter, trim, with_captures)
when pos - offset < 0 do
do_split([h | t], string, offset, counter, trim, with_captures)
end
defp do_split([[] | t], string, offset, counter, trim, with_captures),
do: do_split(t, string, offset, counter, trim, with_captures)
defp do_split([[{pos, length} | h] | t], string, offset, counter, trim, true) do
new_offset = pos + length
keep = pos - offset
<<_::binary-size(offset), part::binary-size(keep), match::binary-size(length), _::binary>> =
string
if keep == 0 and trim do
[match | do_split([h | t], string, new_offset, counter - 1, trim, true)]
else
[part, match | do_split([h | t], string, new_offset, counter - 1, trim, true)]
end
end
defp do_split([[{pos, length} | h] | t], string, offset, counter, trim, false) do
new_offset = pos + length
keep = pos - offset
if keep == 0 and trim do
do_split([h | t], string, new_offset, counter, trim, false)
else
<<_::binary-size(offset), part::binary-size(keep), _::binary>> = string
[part | do_split([h | t], string, new_offset, counter - 1, trim, false)]
end
end
@doc ~S"""
Receives a regex, a binary and a replacement, returns a new
binary where all matches are replaced by the replacement.
"""
@spec replace(t, String.t(), String.t() | (... -> String.t()), [term]) :: String.t()
def replace(%Regex{} = regex, string, replacement, options \\ [])
when is_binary(string) and is_list(options) do
opts = if Keyword.get(options, :global) != false, do: [:global], else: []
opts = [{:capture, :all, :index} | opts]
case safe_run(regex, string, opts) do
:nomatch ->
string
{:match, [mlist | t]} when is_list(mlist) ->
apply_list(string, precompile_replacement(replacement), [mlist | t])
|> IO.iodata_to_binary()
{:match, slist} ->
apply_list(string, precompile_replacement(replacement), [slist])
|> IO.iodata_to_binary()
end
end
defp precompile_replacement(replacement) when is_function(replacement) do
{:arity, arity} = Function.info(replacement, :arity)
{replacement, arity}
end
defp precompile_replacement(""), do: []
defp precompile_replacement(<<?\\, ?g, ?{, rest::binary>>) when byte_size(rest) > 0 do
{ns, <<?}, rest::binary>>} = pick_int(rest)
[List.to_integer(ns) | precompile_replacement(rest)]
end
defp precompile_replacement(<<?\\, ?\\, rest::binary>>) do
[<<?\\>> | precompile_replacement(rest)]
end
defp precompile_replacement(<<?\\, x, rest::binary>>) when x in ?0..?9 do
{ns, rest} = pick_int(rest)
[List.to_integer([x | ns]) | precompile_replacement(rest)]
end
defp precompile_replacement(<<x, rest::binary>>) do
case precompile_replacement(rest) do
[head | t] when is_binary(head) ->
[<<x, head::binary>> | t]
other ->
[<<x>> | other]
end
end
defp pick_int(<<x, rest::binary>>) when x in ?0..?9 do
{found, rest} = pick_int(rest)
{[x | found], rest}
end
defp pick_int(bin) do
{[], bin}
end
defp apply_list(string, replacement, list) do
apply_list(string, string, 0, replacement, list)
end
defp apply_list(_, "", _, _, []) do
[]
end
defp apply_list(_, string, _, _, []) do
string
end
defp apply_list(whole, string, pos, replacement, [[{mpos, _} | _] | _] = list)
when mpos > pos do
length = mpos - pos
<<untouched::binary-size(length), rest::binary>> = string
[untouched | apply_list(whole, rest, mpos, replacement, list)]
end
defp apply_list(whole, string, pos, replacement, [[{pos, length} | _] = head | tail]) do
<<_::size(length)-binary, rest::binary>> = string
new_data = apply_replace(whole, replacement, head)
[new_data | apply_list(whole, rest, pos + length, replacement, tail)]
end
defp apply_replace(string, {fun, arity}, indexes) do
apply(fun, get_indexes(string, indexes, arity))
end
defp apply_replace(_, [bin], _) when is_binary(bin) do
bin
end
defp apply_replace(string, repl, indexes) do
indexes = List.to_tuple(indexes)
for part <- repl do
cond do
is_binary(part) ->
part
part >= tuple_size(indexes) ->
""
true ->
get_index(string, elem(indexes, part))
end
end
end
defp get_index(_string, {pos, _length}) when pos < 0 do
""
end
defp get_index(string, {pos, length}) do
<<_::size(pos)-binary, res::size(length)-binary, _::binary>> = string
res
end
defp get_indexes(_string, _, 0) do
[]
end
defp get_indexes(string, [], arity) do
["" | get_indexes(string, [], arity - 1)]
end
defp get_indexes(string, [h | t], arity) do
[get_index(string, h) | get_indexes(string, t, arity - 1)]
end
@doc ~S"""
Escapes a string to be literally matched in a regex.
"""
@spec escape(String.t()) :: String.t()
def escape(string) when is_binary(string) do
string
|> escape(_length = 0, string)
|> IO.iodata_to_binary()
end
@escapable '.^$*+?()[]{}|#-\\\t\n\v\f\r\s'
defp escape(<<char, rest::binary>>, length, original) when char in @escapable do
escape_char(rest, length, original, char)
end
defp escape(<<_, rest::binary>>, length, original) do
escape(rest, length + 1, original)
end
defp escape(<<>>, _length, original) do
original
end
defp escape_char(<<rest::binary>>, 0, _original, char) do
[?\\, char | escape(rest, 0, rest)]
end
defp escape_char(<<rest::binary>>, length, original, char) do
[binary_part(original, 0, length), ?\\, char | escape(rest, 0, rest)]
end
# Helpers
@doc false
# Unescape map function used by Macro.unescape_string.
def unescape_map(?f), do: ?\f
def unescape_map(?n), do: ?\n
def unescape_map(?r), do: ?\r
def unescape_map(?t), do: ?\t
def unescape_map(?v), do: ?\v
def unescape_map(?a), do: ?\a
def unescape_map(_), do: false
# Private Helpers
defp translate_options(<<?u, t::binary>>, acc), do: translate_options(t, [:unicode, :ucp | acc])
defp translate_options(<<?i, t::binary>>, acc), do: translate_options(t, [:caseless | acc])
defp translate_options(<<?x, t::binary>>, acc), do: translate_options(t, [:extended | acc])
defp translate_options(<<?f, t::binary>>, acc), do: translate_options(t, [:firstline | acc])
defp translate_options(<<?U, t::binary>>, acc), do: translate_options(t, [:ungreedy | acc])
defp translate_options(<<?s, t::binary>>, acc),
do: translate_options(t, [:dotall, {:newline, :anycrlf} | acc])
defp translate_options(<<?m, t::binary>>, acc), do: translate_options(t, [:multiline | acc])
defp translate_options(<<?r, t::binary>>, acc) do
IO.warn("the /r modifier in regular expressions is deprecated, please use /U instead")
translate_options(t, [:ungreedy | acc])
end
defp translate_options(<<>>, acc), do: acc
defp translate_options(rest, _acc), do: {:error, rest}
end
|
samples/Elixir/regex.ex
| 0.838746
| 0.436682
|
regex.ex
|
starcoder
|
defmodule Himamo.Matrix do
@moduledoc ~S"""
Defines a two- or three-dimensional matrix.
## Examples
iex> matrix = Himamo.Matrix.new({2, 3})
...> matrix = Himamo.Matrix.put(matrix, {1, 0}, 0.1)
...> Himamo.Matrix.get(matrix, {1, 0})
0.1
Implements the `Collectable` protocol.
## Examples
iex> matrix = [{{0, 1}, 0.1}] |> Enum.into(Himamo.Matrix.new({2, 2}))
...> Himamo.Matrix.get(matrix, {0, 1})
0.1
"""
defstruct [:map, :size]
@type entry :: term
@type index :: non_neg_integer
@type position :: {index, index} | {index, index, index}
@type t :: %__MODULE__{map: map, size: tuple}
@doc ~S"""
Creates a `Matrix`.
The `size` argument is a tuple that specifies the dimensions. For example,
`new({5, 3})` creates a 5×3 two-dimensional matrix and `new({7, 5, 4})`
creates a 7×5×4 three-dimensional matrix.
"""
@spec new(tuple) :: t
def new(size) do
%__MODULE__{map: Map.new, size: size}
end
@doc ~S"""
Returns entry at `position`.
`position` is a tuple of indices.
Raises `KeyError` when accessing a position that was not previously set.
"""
@spec get(t, position) :: entry
def get(%__MODULE__{map: map}, position) do
case Map.fetch(map, position) do
{:ok, entry} -> entry
:error -> raise(KeyError, key: position, term: map)
end
end
@doc ~S"""
Updates entry at `position`.
`position` is a tuple of indices.
Raises `ArgumentError` when updating a position that is out of bounds of the
matrix.
"""
@spec put(t, position, entry) :: t
def put(%__MODULE__{map: map, size: size}, position, entry) do
validate_position_within_size!(position, size)
new_map = Map.put(map, position, entry)
%__MODULE__{size: size, map: new_map}
end
defp validate_position_within_size!(position, size) do
case validate_position_within_size(position, size) do
:ok -> :ok
error -> raise(ArgumentError, error)
end
end
defp validate_position_within_size({x, y, z}, {width, height, depth}) do
cond do
x < 0 || x >= width ->
"x position out of bounds (got #{x}, expected 0..#{width-1})"
y < 0 || y >= height ->
"y position out of bounds (got #{y}, expected 0..#{height-1})"
z < 0 || z >= depth ->
"z position out of bounds (got #{z}, expected 0..#{depth-1})"
true -> :ok
end
end
defp validate_position_within_size({x, y}, {width, height}) do
validate_position_within_size({x, y, 0}, {width, height, 1})
end
end
defimpl Collectable, for: Himamo.Matrix do
alias Himamo.Matrix
def into(original) do
{
original,
fn
matrix, {:cont, {pos, entry}} -> Matrix.put(matrix, pos, entry)
matrix, :done -> matrix
_, :halt -> :ok
end
}
end
end
|
lib/himamo/matrix.ex
| 0.888369
| 0.814238
|
matrix.ex
|
starcoder
|
defmodule Cards do
@moduledoc """
Documentation for `Cards` module which is used for:
1. Creating a hand/deck of cards.
2. Shuffling a hand/deck cards.
3. Saving a hand/deck.
4. Loading a hand/deck.
5. Assessing if a hand/deck contains a specific card.
"""
defp create_deck_meh() do
suits = ["Spades", "Hearts", "Clubs", "Diamonds"]
values = ["Ace", "Two", "Three", "Four", "Five", "Six", "Seven", "Eight", "Nine", "Ten", "Jack", "Queen", "King"]
# For statements/list comprehensions are maps of a sort and each context will return a list, thus nested for.. statements will
# Produce nested lists. List.flatten recursively flattens lists, this is not the best solution.
cards = for suit <- suits do
for value <- values do
"#{value} of #{suit}"
end
end
List.flatten(cards)
end
@doc """
Creates a 52 card deck.
## Examples
iex> deck = Cards.create_deck()
iex> deck
["Ace of Spades", "Two of Spades", "Three of Spades", "Four of Spades",
"Five of Spades", "Six of Spades", "Seven of Spades", "Eight of Spades",
"Nine of Spades", "Ten of Spades", "Jack of Spades", "Queen of Spades",
"King of Spades", "Ace of Hearts", "Two of Hearts", "Three of Hearts",
"Four of Hearts", "Five of Hearts", "Six of Hearts", "Seven of Hearts",
"Eight of Hearts", "Nine of Hearts", "Ten of Hearts", "Jack of Hearts",
"Queen of Hearts", "King of Hearts", "Ace of Clubs", "Two of Clubs",
"Three of Clubs", "Four of Clubs", "Five of Clubs", "Six of Clubs",
"Seven of Clubs", "Eight of Clubs", "Nine of Clubs", "Ten of Clubs",
"Jack of Clubs", "Queen of Clubs", "King of Clubs", "Ace of Diamonds",
"Two of Diamonds", "Three of Diamonds", "Four of Diamonds", "Five of Diamonds",
"Six of Diamonds", "Seven of Diamonds", "Eight of Diamonds", "Nine of Diamonds",
"Ten of Diamonds", "Jack of Diamonds", "Queen of Diamonds", "King of Diamonds"]
"""
def create_deck() do
suits = ["Spades", "Hearts", "Clubs", "Diamonds"]
values = ["Ace", "Two", "Three", "Four", "Five", "Six", "Seven", "Eight", "Nine", "Ten", "Jack", "Queen", "King"]
# Nested comprehensions can also take this form where hierachy is from right to left, lowest to highest
for suit <- suits, value <- values do
"#{value} of #{suit}"
end
end
@doc """
Shuffles a deck of cards.
"""
def shuffle(deck) do
# Function arguments are passed by value, and are thus immutable
# Function signatures are of the form function/no_of_args eg shuffle/1
# Implicit return statements, explicit return causes a complilation error
Enum.shuffle(deck)
end
@doc """
Checks if as card is contained within a deck.
## Examples
iex> deck = Cards.create_deck()
iex> contained = Cards.contains?(deck, "Ace of Spades")
iex> contained
true
"""
def contains?(deck, card) do
# Standard library, is it called that, functions need not be imported or installed in any way
# Append ? to functions that return Booleans => convention!
Enum.member?(deck, card)
end
@doc """
Deals a hand from the deck, where `count` is the number of crads on the hand.
## Examples
iex> deck = Cards.create_deck()
iex> {hand, _rest} = Cards.deal(deck, 2)
iex> hand
["Ace of Spades", "Two of Spades"]
"""
def deal(deck, count) do
# Returns a tuple
Enum.split(deck, count)
end
@doc """
Saves cards into a file with `filename` specified.
"""
def save(deck, filename) do
# File.write takes a filename and a chardata representation of the file, hence we are calling erlang's term_to_binary
# Function to achieve this
chardata = :erlang.term_to_binary deck
# Pattern matching on a function will pipe its output to the case LHS
case File.write(filename, chardata) do
:ok -> "#{filename} written successfully."
{:error, _reason} -> "#{filename} not written."
end
end
@doc """
Loads cards stored in a file with `filename` specified.
"""
def load(filename) do
case File.read(filename) do
{:ok, binary} -> :erlang.binary_to_term binary
{:error, _reason} -> "#{filename} not loaded successfully."
end
end
@doc """
Creates a `hand_size` amount of cards hand, which is randomized.
"""
def create_hand(hand_size) do
# Using the |> operator allows us to achieve the following lines of code
# cards = Cards.create_deck()
# shuffled = Cards.shuffle(cards)
# hand = Cards.deal(shuffled, hand_size)
# Note how the previous output is piped to the next function as the first argument
Cards.create_deck
|> Cards.shuffle
|> Cards.deal(hand_size)
end
def hello() do
:world
end
end
|
cards/lib/cards.ex
| 0.895552
| 0.74637
|
cards.ex
|
starcoder
|
defmodule Kiq.Job do
@moduledoc """
Used to construct a Sidekiq compatible job.
The job complies with the [Sidekiq Job Format][1], and contains the following
fields:
* `jid` - A 12 byte random number as a 24 character hex encoded string
* `pid` — Process id of the worker running the job, defaults to the calling process
* `class` - The worker class which is responsible for executing the job
* `args` - The arguments passed which should be passed to the worker
* `queue` - The queue where a job should be enqueued, defaults to "default"
* `at` — A time at or after which a scheduled job should be performed, in Unix format
* `created_at` - When the job was created, in Unix format
* `enqueue_at` - When the job was enqueued, in Unix format
Retry & Failure Fields:
* `retry` - Tells the Kiq worker to retry the enqueue job
* `retry_count` - The number of times we've retried so far
* `failed_at` - The first time the job failed, in Unix format
* `retried_at` — The last time the job was retried, in Unix format
* `error_message` — The message from the last exception
* `error_class` — The exception module (or class, in Sidekiq terms)
* `backtrace` - The number of lines of error backtrace to store. Only present
for compatibility with Sidekiq, this field is ignored.
Epiration Fields:
* `expires_in` - How long to keep a job before expiring it and skipping
execution, in milliseconds
Unique Fields:
* `unique_for` - How long uniqueness will be enforced for a job, in
milliseconds
* `unique_until` - Allows controlling when a unique lock will be removed,
valid options are "start" and "success".
* `unlocks_at` - When the job will be unlocked, in milliseconds
* `unique_token` - The uniqueness token calculated from class, queue and args
[1]: https://github.com/mperham/sidekiq/wiki/Job-Format
"""
alias Kiq.{Encoder, Timestamp}
@type t :: %__MODULE__{
jid: binary(),
pid: pid(),
class: binary(),
args: list(any),
queue: binary(),
retry: boolean() | non_neg_integer(),
retry_count: non_neg_integer(),
dead: boolean(),
at: nil | Timestamp.t(),
created_at: Timestamp.t(),
enqueued_at: Timestamp.t(),
failed_at: Timestamp.t(),
retried_at: Timestamp.t(),
error_message: binary(),
error_class: binary(),
expires_in: pos_integer(),
expires_at: nil | Timestamp.t(),
unique_for: pos_integer(),
unique_until: binary(),
unique_token: binary(),
unlocks_at: nil | Timestamp.t()
}
@enforce_keys ~w(jid class)a
defstruct jid: nil,
pid: nil,
class: nil,
args: [],
queue: "default",
retry: true,
retry_count: 0,
dead: true,
at: nil,
created_at: nil,
enqueued_at: nil,
failed_at: nil,
retried_at: nil,
error_message: nil,
error_class: nil,
expires_in: nil,
expires_at: nil,
unique_for: nil,
unique_token: nil,
unique_until: nil,
unlocks_at: nil
@doc """
Build a new `Job` struct with all dynamic arguments populated.
iex> Kiq.Job.new(%{class: "Worker"}) |> Map.take([:class, :args, :queue])
%{class: "Worker", args: [], queue: "default"}
To fit more naturally with Elixir the `class` argument can be passed as `module`:
iex> Kiq.Job.new(module: "Worker").class
"Worker"
Only "start" and "success" are allowed as values for `unique_until`. Any
other value will be nullified:
iex> Kiq.Job.new(class: "A", unique_until: "start").unique_until
"start"
iex> Kiq.Job.new(class: "A", unique_until: :start).unique_until
"start"
iex> Kiq.Job.new(class: "A", unique_until: "whenever").unique_until
nil
"""
@spec new(args :: map() | Keyword.t()) :: t()
def new(%{class: class} = args) do
args =
args
|> Map.put(:class, to_string(class))
|> Map.put_new(:jid, random_jid())
|> Map.put_new(:created_at, Timestamp.unix_now())
|> coerce_unique_until()
struct!(__MODULE__, args)
end
def new(%{module: module} = args) do
args
|> Map.delete(:module)
|> Map.put(:class, module)
|> new()
end
def new(args) when is_list(args) do
args
|> Enum.into(%{})
|> new()
end
@doc """
Extract a fully qualified worker module from a job.
# Examples
iex> Kiq.Job.to_module(Kiq.Job.new(module: "Kiq.Job"))
Kiq.Job
"""
@spec to_module(job :: t()) :: module()
def to_module(%__MODULE__{class: class}) do
class
|> String.split(".")
|> Module.safe_concat()
end
@doc """
Convert a job into a map suitable for encoding.
For Sidekiq compatibility and encodeability some values are rejected.
Specifically, the `retry_count` value is dropped when it is 0.
"""
@spec to_map(job :: t()) :: map()
def to_map(%__MODULE__{} = job) do
job
|> Map.from_struct()
|> Map.drop([:pid])
|> Enum.reject(fn {_key, val} -> is_nil(val) end)
|> Enum.reject(fn {key, val} -> key == :retry_count and val == 0 end)
|> Enum.into(%{})
end
@doc """
Encode a job as JSON.
During the encoding process any keys with `nil` values are removed.
"""
@spec encode(job :: t()) :: binary() | {:error, Exception.t()}
def encode(%__MODULE__{} = job) do
job
|> to_map()
|> Encoder.encode()
end
@doc """
Decode an encoded job from JSON into a Job struct.
All job keys are atomized except for those within arguments.
# Example
iex> job = Kiq.Job.decode(~s({"class":"MyWorker","args":[1,2]}))
...> Map.take(job, [:class, :args])
%{class: "MyWorker", args: [1, 2]}
iex> job = Kiq.Job.decode(~s({"class":"MyWorker","args":{"a":1}}))
...> Map.get(job, :args)
%{"a" => 1}
"""
@spec decode(input :: binary()) :: t() | {:error, Exception.t()}
def decode(input) when is_binary(input) do
with {:ok, decoded} <- Jason.decode(input) do
decoded
|> Map.new(fn {key, val} -> {String.to_existing_atom(key), val} end)
|> new()
end
end
@doc false
@spec random_jid(size :: pos_integer()) :: binary()
def random_jid(size \\ 12) do
size
|> :crypto.strong_rand_bytes()
|> Base.encode16(case: :lower)
end
# Uniqueness & Expiration
@doc false
@spec apply_expiry(job :: t()) :: t()
def apply_expiry(%__MODULE__{expires_in: expires_in} = job) when is_integer(expires_in) do
%{job | expires_at: future_at(job.at, expires_in)}
end
def apply_expiry(job), do: job
@doc false
@spec apply_unique(job :: t()) :: t()
def apply_unique(%__MODULE__{unique_for: unique_for} = job) when is_integer(unique_for) do
%{job | unlocks_at: future_at(job.at, unique_for), unique_token: unique_token(job)}
end
def apply_unique(job), do: job
# Helpers
defp future_at(nil, increment), do: future_at(Timestamp.unix_now(), increment)
defp future_at(now, increment), do: now + increment / 1_000.0
defp unique_token(%__MODULE__{args: args, class: class, queue: queue}) do
[class, queue, args]
|> Enum.map(&inspect/1)
|> sha_hash()
|> Base.encode16(case: :lower)
end
defp coerce_unique_until(%{unique_until: :start} = map), do: %{map | unique_until: "start"}
defp coerce_unique_until(%{unique_until: "start"} = map), do: map
defp coerce_unique_until(%{unique_until: _} = map), do: %{map | unique_until: nil}
defp coerce_unique_until(map), do: map
defp sha_hash(value), do: :crypto.hash(:sha, value)
end
|
lib/kiq/job.ex
| 0.886353
| 0.615319
|
job.ex
|
starcoder
|
defmodule Adapter.Compile do
@moduledoc false
@doc false
@spec generate(term, Adapter.Utility.behavior(), Adapter.Utility.config()) :: term
def generate(code, callbacks, config)
def generate(code, callbacks, config) do
%{default: default, error: error, log: log, random: random, validate: validate} = config
simple_callbacks = Enum.map(callbacks, fn {k, %{args: a}} -> {k, Enum.count(a)} end)
stubs =
if default do
generate_compiled_delegates(callbacks, default)
else
generate_stubs(callbacks, Adapter.Utility.generate_error(error, random))
end
Code.compile_quoted(
quote do
defmodule unquote(Module.concat(Adapter, config.adapter)) do
@moduledoc false
unquote(stubs)
end
end
)
quote do
unquote(code)
unquote(generate_compiled_delegates(callbacks, Module.concat(Adapter, config.adapter)))
@doc false
@spec __adapter__ :: module | nil
def __adapter__, do: unquote(nil)
@doc ~S"""
Configure a new adapter implementation.
## Example
```elixir
iex> configure(Fake)
:ok
```
"""
@spec configure(module) :: :ok
def configure(adapter) do
with false <- __adapter__() == adapter && :ok,
:ok <-
unquote(
Adapter.Utility.generate_validation(
validate,
callbacks,
Macro.var(:adapter, __MODULE__)
)
),
:ok <-
unquote(__MODULE__).recompile_module(
__MODULE__,
unquote(simple_callbacks),
adapter
) do
unquote(Adapter.Utility.generate_logger(log, Macro.var(:adapter, __MODULE__)))
:ok
end
end
end
end
@doc false
@spec recompile_module(module, [{atom, non_neg_integer()}], module) :: :ok
def recompile_module(module, callbacks, target) do
with {mod, data, file} <- :code.get_object_code(module),
{:ok, {^mod, chunks}} <- :beam_lib.chunks(data, [:abstract_code]),
{_, code} <- Keyword.get(chunks, :abstract_code) do
:code.purge(module)
:code.delete(module)
updated_code = replace_compiled_delegates(code, callbacks, target)
with {:ok, ^mod, bin} <- :compile.forms(updated_code) do
:code.load_binary(mod, file, bin)
:ok
end
else
_ ->
mod = Module.concat(Adapter, module)
:code.purge(mod)
:code.delete(mod)
Code.compile_quoted(
quote do
defmodule unquote(mod) do
@moduledoc false
unquote(regenerate_redirect(callbacks, target))
end
end
)
:ok
end
end
@spec replace_compiled_delegates(term, [{atom, non_neg_integer()}], module) :: term
defp replace_compiled_delegates(ast, callbacks, target) do
Enum.map(ast, fn
{:function, l, :__adapter__, 0, [{:clause, l, [], [], [{:atom, 0, _}]}]} ->
{:function, l, :__adapter__, 0, [{:clause, l, [], [], [{:atom, 0, target}]}]}
a = {:function, _, name, arity, [data]} ->
if {name, arity} in callbacks do
{:clause, l, vars, [],
[{:call, l, {:remote, l, {:atom, 0, _target}, {:atom, l, function}}, vars}]} = data
{:function, l, name, arity,
[
{:clause, l, vars, [],
[{:call, l, {:remote, l, {:atom, 0, target}, {:atom, l, function}}, vars}]}
]}
else
a
end
a ->
a
end)
end
@spec generate_compiled_delegates(Adapter.Utility.behavior(), module) :: term
defp generate_compiled_delegates(callbacks, target) do
Enum.reduce(callbacks, nil, fn {key, %{spec: s, doc: d, args: a}}, acc ->
vars = Enum.map(a, &Macro.var(&1, nil))
quote do
unquote(acc)
unquote(d)
unquote(s)
def unquote(key)(unquote_splicing(vars))
defdelegate unquote(key)(unquote_splicing(vars)), to: unquote(target)
end
end)
end
@spec generate_stubs(Adapter.Utility.behavior(), term) :: term
defp generate_stubs(callbacks, result) do
Enum.reduce(callbacks, nil, fn {key, %{spec: s, doc: d, args: a}}, acc ->
quote do
unquote(acc)
unquote(d)
unquote(s)
def unquote(key)(unquote_splicing(Enum.map(a, &Macro.var(&1, nil))))
def unquote(key)(unquote_splicing(Enum.map(a, &Macro.var(:"_#{&1}", nil)))),
do: unquote(result)
end
end)
end
@spec regenerate_redirect([{atom, non_neg_integer()}], module) :: term
defp regenerate_redirect(callbacks, target) do
Enum.reduce(callbacks, nil, fn {key, arity}, acc ->
vars = if arity > 0, do: Enum.map(1..arity, &Macro.var(:"arg#{&1}", nil)), else: []
quote do
unquote(acc)
defdelegate unquote(key)(unquote_splicing(vars)), to: unquote(target)
end
end)
end
end
|
lib/adapter/compile.ex
| 0.76947
| 0.501526
|
compile.ex
|
starcoder
|
defmodule ExDockerBuild.DockerfileParser do
@comment ~r/^\s*#/
@continuation ~r/^.*\\\s*$/
@instruction ~r/^\s*(\w+)\s+(.*)$/
@spec parse_file!(Path.t()) :: list(String.t()) | no_return()
def parse_file!(path) do
path
|> File.read!()
|> do_parse()
end
@spec parse_content!(String.t()) :: list(String.t()) | no_return()
def parse_content!(content), do: do_parse(content)
@spec do_parse(String.t()) :: list(String.t())
defp do_parse(content) do
{parsed_lines, _} =
content
|> String.split("\n")
|> Enum.reduce({[], false}, fn line, {acc, continuation?} ->
case parse_line(line, continuation?) do
nil ->
{acc, continuation?}
{:continue, _} = result ->
{join(result, acc), true}
{:end, _} = result ->
{join(result, acc), false}
end
end)
Enum.reverse(parsed_lines)
end
@spec parse_line(String.t(), boolean()) ::
nil
| {:continue, String.t() | {String.t(), String.t()}}
| {:end, String.t() | {String.t(), String.t()}}
defp parse_line(line, continuation?) do
line = String.trim(line)
cond do
line == "" || Regex.match?(@comment, line) ->
nil
# continuations are not instructions
continuation? ->
if Regex.match?(@continuation, line) do
# remove trailing continuation (\)
{:continue, String.slice(line, 0..-2)}
else
{:end, line}
end
true ->
# line: "RUN set -xe \\"
[command, value] = Regex.run(@instruction, line, capture: :all_but_first)
# ["RUN set -xe \\", "RUN", "set -xe \\"]
if Regex.match?(@continuation, line) do
# remove trailing continuation (\)
{:continue, {command, String.slice(value, 0..-2)}}
else
{:end, {command, value}}
end
end
end
@spec join(parsed_line, list()) :: list()
when parsed_line:
{:continue, String.t() | {String.t(), String.t()}}
| {:end, String.t() | {String.t(), String.t()}}
# first line - accumulator empty
defp join({:continue, _} = val, []) do
[val]
end
# a continuation of a previous continuation - need to join lines
defp join({:continue, val}, [{:continue, {prev_command, prev_value}} | rest]) do
[{:continue, {prev_command, prev_value <> " " <> val}} | rest]
end
# a new continuation - other continuation already finished
defp join({:continue, _} = val, acc) do
[val | acc]
end
# first line - single instruction
defp join({:end, val}, []) do
[val]
end
# the end of a continuation
defp join({:end, val}, [{:continue, {prev_command, prev_value}} | rest]) do
[{prev_command, prev_value <> " " <> val} | rest]
end
# single instruction
defp join({:end, val}, acc) do
[val | acc]
end
end
|
lib/ex_docker_build/dockerfile_parser.ex
| 0.642881
| 0.433682
|
dockerfile_parser.ex
|
starcoder
|
defmodule SubscriptionsTransportWS.Socket do
@external_resource "README.md"
@moduledoc @external_resource
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
require Logger
alias SubscriptionsTransportWS.OperationMessage
alias __MODULE__
@typedoc """
When using this module there are several options available
* `json_module` - defaults to Jason
* `schema` - refers to the Absinthe schema (required)
* `pipeline` - refers to the Absinthe pipeline to use, defaults to `{SubscriptionsTransportWS.Socket, :default_pipeline}`
* `keep_alive` period in ms to send keep alive messages over the socket, defaults to 10000
* `ping_interval` period in ms to send keep pings to the client, the client should respond with pong to keep the connection alive
## Example
```elixir
use SubscriptionsTransportWS.Socket, schema: App.GraphqlSchema, keep_alive: 1000
```
"""
defstruct [
:handler,
:pubsub_server,
:endpoint,
:json_module,
keep_alive: 10_000,
serializer: Phoenix.Socket.V2.JSONSerializer,
operations: %{},
assigns: %{},
ping_interval: 30_000
]
@type t :: %Socket{
assigns: map,
endpoint: atom,
handler: atom,
pubsub_server: atom,
serializer: atom,
json_module: atom,
operations: map,
keep_alive: integer | nil,
ping_interval: integer | nil
}
@type control() ::
:ping
| :pong
@type opcode() ::
:text
| :binary
| control()
@type message() :: binary()
@type frame() :: {opcode(), message()}
@initial_keep_alive_wait 1
@doc """
Receives the socket params and authenticates the connection.
## Socket params and assigns
Socket params are passed from the client and can
be used to verify and authenticate a user. After
verification, you can put default assigns into
the socket that will be set for all channels, ie
{:ok, assign(socket, :user_id, verified_user_id)}
To deny connection, return `:error`.
See `Phoenix.Token` documentation for examples in
performing token verification on connect.
"""
@callback connect(params :: map, Socket.t()) :: {:ok, Socket.t()} | :error
@callback connect(params :: map, Socket.t(), connect_info :: map) :: {:ok, Socket.t()} | :error
@doc """
Callback for the `connection_init` message.
The client sends this message after plain websocket connection to start
the communication with the server.
In the `subscriptions-transport-ws` protocol this is usually used to
set the user on the socket.
Should return `{:ok, socket}` on success, and `{:error, payload}` to deny.
Receives the a map of `connection_params`, see
* connectionParams in [Apollo javascript client](https://github.com/apollographql/subscriptions-transport-ws/blob/06b8eb81ba2b6946af4faf0ae6369767b31a2cc9/src/client.ts#L62)
* connectingPayload in [Apollo iOS client](https://github.com/apollographql/apollo-ios/blob/ca023e5854b5b78529eafe9006c6ce1e3c2db539/docs/source/api/ApolloWebSocket/classes/WebSocketTransport.md)
or similar in other clients.
"""
@callback gql_connection_init(connection_params :: map, Socket.t()) ::
{:ok, Socket.t()} | {:error, any}
@callback handle_message(params :: term(), Socket.t()) ::
{:ok, Socket.t()}
| {:push, frame(), Socket.t()}
| {:stop, term(), Socket.t()}
@optional_callbacks connect: 2, connect: 3, handle_message: 2
defmacro __using__(opts) do
quote do
import SubscriptionsTransportWS.Socket
alias SubscriptionsTransportWS.Socket
@phoenix_socket_options unquote(opts)
@behaviour Phoenix.Socket.Transport
@behaviour SubscriptionsTransportWS.Socket
@doc false
@impl true
def child_spec(opts) do
Socket.__child_spec__(
__MODULE__,
opts,
@phoenix_socket_options
)
end
@doc false
@impl true
def connect(state),
do:
Socket.__connect__(
__MODULE__,
state,
@phoenix_socket_options
)
@doc false
@impl true
def init(socket), do: Socket.__init__(socket)
@doc false
@impl true
def handle_in(message, socket),
do: Socket.__in__(message, socket)
@doc false
@impl true
def handle_info(message, socket),
do: Socket.__info__(message, socket)
@impl true
def handle_control(message, socket),
do: Socket.__control__(message, socket)
@doc false
@impl true
def terminate(reason, socket),
do: Socket.__terminate__(reason, socket)
end
end
def __child_spec__(module, _opts, _socket_options) do
# Nothing to do here, so noop.
%{id: {__MODULE__, module}, start: {Task, :start_link, [fn -> :ok end]}, restart: :transient}
end
def __connect__(module, socket, socket_options) do
json_module = Keyword.get(socket_options, :json_module, Jason)
schema = Keyword.get(socket_options, :schema)
pipeline = Keyword.get(socket_options, :pipeline)
keep_alive = Keyword.get(socket_options, :keep_alive)
ping_interval = Keyword.get(socket_options, :ping_interval)
case user_connect(
module,
socket.endpoint,
socket.params,
socket.connect_info,
json_module,
keep_alive,
ping_interval
) do
{:ok, socket} ->
absinthe_config = Map.get(socket.assigns, :absinthe, %{})
opts =
absinthe_config
|> Map.get(:opts, [])
|> Keyword.update(:context, %{pubsub: socket.endpoint}, fn context ->
Map.put_new(context, :pubsub, socket.endpoint)
end)
absinthe_config =
put_in(absinthe_config[:opts], opts)
|> Map.update(:schema, schema, & &1)
absinthe_config =
Map.put(absinthe_config, :pipeline, pipeline || {__MODULE__, :default_pipeline})
socket = socket |> assign(:absinthe, absinthe_config)
{:ok, socket}
:error ->
:error
end
end
defp user_connect(
handler,
endpoint,
params,
connect_info,
json_module,
keep_alive,
ping_interval
) do
if pubsub_server = endpoint.config(:pubsub_server) do
socket = %SubscriptionsTransportWS.Socket{
handler: handler,
endpoint: endpoint,
pubsub_server: pubsub_server,
json_module: json_module,
keep_alive: keep_alive,
ping_interval: ping_interval
}
connect_result =
if function_exported?(handler, :connect, 3) do
handler.connect(params, socket, connect_info)
else
handler.connect(params, socket)
end
connect_result
else
Logger.error("""
The :pubsub_server was not configured for endpoint #{inspect(endpoint)}.
Make sure to start a PubSub proccess in your application supervision tree:
{Phoenix.PubSub, [name: YOURAPP.PubSub, adapter: Phoenix.PubSub.PG2]}
And then list it your endpoint config:
pubsub_server: YOURAPP.PubSub
""")
:error
end
end
@doc """
Adds key value pairs to socket assigns.
A single key value pair may be passed, a keyword list or map
of assigns may be provided to be merged into existing socket
assigns.
## Examples
iex> assign(socket, :name, "Elixir")
iex> assign(socket, name: "Elixir", logo: "💧")
"""
def assign(socket, key, value) do
assign(socket, [{key, value}])
end
def assign(socket, attrs) when is_map(attrs) or is_list(attrs) do
%{socket | assigns: Map.merge(socket.assigns, Map.new(attrs))}
end
@doc """
Sets the options for a given GraphQL document execution.
## Examples
iex> SubscriptionsTransportWS.Socket.put_options(socket, context: %{current_user: user})
%SubscriptionsTransportWS.Socket{}
"""
def put_options(socket, opts) do
absinthe_assigns =
socket.assigns
|> Map.get(:absinthe, %{})
absinthe_assigns =
absinthe_assigns
|> Map.put(:opts, Keyword.merge(Map.get(absinthe_assigns, :opts, []), opts))
assign(socket, :absinthe, absinthe_assigns)
end
@doc """
Adds key-value pairs into Absinthe context.
## Examples
iex> Socket.assign_context(socket, current_user: user)
%Socket{}
"""
def assign_context(%Socket{assigns: %{absinthe: absinthe}} = socket, context) do
context =
absinthe
|> Map.get(:opts, [])
|> Keyword.get(:context, %{})
|> Map.merge(Map.new(context))
put_options(socket, context: context)
end
def assign_context(socket, assigns) do
put_options(socket, context: Map.new(assigns))
end
@doc """
Same as `assign_context/2` except one key-value pair is assigned.
"""
def assign_context(socket, key, value) do
assign_context(socket, [{key, value}])
end
@doc false
def __init__(state) do
{:ok, state}
end
@doc false
def __in__({text, _opts}, socket) do
message = socket.json_module.decode!(text)
message = OperationMessage.from_map(message)
handle_message(socket, message)
end
def __control__({_, opcode: :ping}, socket), do: {:reply, :ok, {:pong, "pong"}, socket}
def __control__({_, opcode: :pong}, socket), do: {:ok, socket}
@doc false
def __info__(:keep_alive, socket) do
reply =
%OperationMessage{type: "ka"}
|> OperationMessage.as_json()
|> socket.json_module.encode!
Process.send_after(self(), :keep_alive, socket.keep_alive)
{:push, {:text, reply}, socket}
end
def __info__(:ping, socket) do
Process.send_after(self(), :ping, socket.ping_interval)
{:push, {:ping, "ping"}, socket}
end
def __info__({:socket_push, :text, message}, socket) do
message = socket.serializer.decode!(message, opcode: :text)
id = Map.get(socket.operations, message.topic)
reply =
%OperationMessage{type: "data", id: id, payload: %{data: message.payload["result"]["data"]}}
|> OperationMessage.as_json()
|> socket.json_module.encode!
{:push, {:text, reply}, socket}
end
def __info__(message, socket) do
if function_exported?(socket.handler, :handle_message, 2) do
socket.handler.handle_message(message, socket)
else
{:ok, socket}
end
end
@doc false
def __terminate__(_reason, _state) do
:ok
end
@doc """
Default pipeline to use for Absinthe graphql document execution
"""
def default_pipeline(schema, options) do
schema
|> Absinthe.Pipeline.for_document(options)
end
defp handle_message(socket, %{type: "connection_init"} = message) do
case socket.handler.gql_connection_init(message, socket) do
{:ok, socket} ->
if socket.keep_alive do
Process.send_after(self(), :keep_alive, @initial_keep_alive_wait)
end
if socket.ping_interval do
Process.send_after(self(), :ping, socket.ping_interval)
end
reply =
%OperationMessage{type: "connection_ack"}
|> OperationMessage.as_json()
|> socket.json_module.encode!
{:reply, :ok, {:text, reply}, socket}
{:error, payload} ->
reply =
%OperationMessage{type: "connection_error", payload: payload}
|> OperationMessage.as_json()
|> socket.json_module.encode!
{:reply, :ok, {:text, reply}, socket}
end
end
defp handle_message(socket, %{type: "stop", id: id}) do
doc_id =
Enum.find_value(socket.operations, fn {key, op_id} ->
if id == op_id, do: key
end)
reply =
%OperationMessage{type: "complete", id: id}
|> OperationMessage.as_json()
|> socket.json_module.encode!
case doc_id do
nil ->
{:reply, :ok, {:text, reply}, socket}
doc_id ->
pubsub =
socket.assigns
|> Map.get(:absinthe, %{})
|> Map.get(:opts, [])
|> Keyword.get(:context, %{})
|> Map.get(:pubsub, socket.endpoint)
Phoenix.PubSub.unsubscribe(socket.pubsub_server, doc_id)
Absinthe.Subscription.unsubscribe(pubsub, doc_id)
socket = %{socket | operations: Map.delete(socket.operations, doc_id)}
{:reply, :ok, {:text, reply}, socket}
end
end
defp handle_message(socket, %{type: "start", payload: payload, id: id}) do
config = socket.assigns[:absinthe]
case extract_variables(payload) do
variables when is_map(variables) ->
opts = Keyword.put(config.opts, :variables, variables)
query = Map.get(payload, "query", "")
Absinthe.Logger.log_run(:debug, {query, config.schema, [], opts})
{reply, socket} = run_doc(socket, query, config, opts, id)
Logger.debug(fn ->
"""
-- Absinthe Phoenix Reply --
#{inspect(reply)}
----------------------------
"""
end)
if reply != :noreply do
case reply do
{:ok, operation_message} ->
{:reply, :ok,
{:text, OperationMessage.as_json(operation_message) |> socket.json_module.encode!},
socket}
end
else
{:ok, socket}
end
_ ->
reply = %OperationMessage{
type: "error",
id: id,
payload: %{errors: "Could not parse variables"}
}
{:reply, :ok, {:text, OperationMessage.as_json(reply) |> socket.json_module.encode!},
socket}
end
end
defp handle_message(socket, %{type: "connection_terminate"}) do
Enum.each(socket.operations, fn {doc_id, _} ->
pubsub =
socket.assigns
|> Map.get(:absinthe, %{})
|> Map.get(:opts, [])
|> Keyword.get(:context, %{})
|> Map.get(:pubsub, socket.endpoint)
Phoenix.PubSub.unsubscribe(socket.pubsub_server, doc_id)
Absinthe.Subscription.unsubscribe(pubsub, doc_id)
end)
socket = %{socket | operations: %{}}
{:ok, socket}
end
defp extract_variables(payload) do
case Map.get(payload, "variables", %{}) do
nil -> %{}
map -> map
end
end
defp run_doc(socket, query, config, opts, id) do
case run(query, config[:schema], config[:pipeline], opts) do
{:ok, %{"subscribed" => topic}, context} ->
:ok =
Phoenix.PubSub.subscribe(
socket.pubsub_server,
topic,
metadata: {:fastlane, self(), socket.serializer, []}
)
socket = put_options(socket, context: context)
socket = %{socket | operations: Map.put(socket.operations, topic, id)}
{:noreply, socket}
{:ok, %{data: data}, context} ->
socket = put_options(socket, context: context)
reply = %OperationMessage{
type: "data",
id: id,
payload: %{data: data}
}
{{:ok, reply}, socket}
{:ok, %{errors: errors}, context} ->
socket = put_options(socket, context: context)
reply = %OperationMessage{
type: "data",
id: id,
payload: %{data: %{}, errors: errors}
}
{{:ok, reply}, socket}
{:error, error} ->
reply = %OperationMessage{
type: "error",
id: id,
payload: %{errors: error}
}
{{:ok, reply}, socket}
end
end
defp run(document, schema, pipeline, options) do
{module, fun} = pipeline
case Absinthe.Pipeline.run(document, apply(module, fun, [schema, options])) do
{:ok, %{result: result, execution: res}, _phases} ->
{:ok, result, res.context}
{:error, msg, _phases} ->
{:error, msg}
end
end
end
|
lib/subscriptions_transport_ws/socket.ex
| 0.871146
| 0.500183
|
socket.ex
|
starcoder
|
defmodule Timex.Format.Time.Formatters.Default do
@moduledoc """
Handles formatting timestamp values as ISO 8601 durations as described below.
Durations are represented by the format P[n]Y[n]M[n]DT[n]H[n]M[n]S.
In this representation, the [n] is replaced by the value for each of the
date and time elements that follow the [n]. Leading zeros are not required,
but the maximum number of digits for each element should be agreed to by the
communicating parties. The capital letters P, Y, M, W, D, T, H, M, and S are
designators for each of the date and time elements and are not replaced.
- P is the duration designator (historically called "period") placed at the start of the duration representation.
- Y is the year designator that follows the value for the number of years.
- M is the month designator that follows the value for the number of months.
- D is the day designator that follows the value for the number of days.
- T is the time designator that precedes the time components of the representation.
- H is the hour designator that follows the value for the number of hours.
- M is the minute designator that follows the value for the number of minutes.
- S is the second designator that follows the value for the number of seconds.
"""
use Timex.Format.Time.Formatter
alias Timex.Translator
@minute 60
@hour @minute * 60
@day @hour * 24
@week @day * 7
@month @day * 30
@year @day * 365
@doc """
Return a human readable string representing the time interval.
## Examples
iex> {1435, 180354, 590264} |> #{__MODULE__}.format
"P45Y6M5DT21H12M34.590264S"
iex> {0, 65, 0} |> #{__MODULE__}.format
"PT1M5S"
"""
@spec format(Types.timestamp) :: String.t | {:error, term}
def format({_,_,_} = timestamp), do: lformat(timestamp, Translator.default_locale)
def format(_), do: {:error, :invalid_timestamp}
def lformat({_,_,_} = timestamp, _locale) do
timestamp
|> deconstruct
|> do_format
end
def lformat(_, _locale), do: {:error, :invalid_timestamp}
defp do_format(components), do: do_format(components, <<?P>>)
defp do_format([], str), do: str
defp do_format([{unit,_} = component|rest], str) do
cond do
unit in [:hours, :minutes, :seconds] && String.contains?(str, "T") ->
do_format(rest, format_component(component, str))
unit in [:hours, :minutes, :seconds] ->
do_format(rest, format_component(component, str <> "T"))
true ->
do_format(rest, format_component(component, str))
end
end
defp format_component({_, 0}, str), do: str
defp format_component({:years, y}, str), do: str <> "#{y}Y"
defp format_component({:months, m}, str), do: str <> "#{m}M"
defp format_component({:days, d}, str), do: str <> "#{d}D"
defp format_component({:hours, h}, str), do: str <> "#{h}H"
defp format_component({:minutes, m}, str), do: str <> "#{m}M"
defp format_component({:seconds, s}, str), do: str <> "#{s}S"
defp deconstruct({_, _, micro} = ts), do: deconstruct({ts |> Time.to_seconds |> trunc, micro}, [])
defp deconstruct({0, 0}, components), do: components |> Enum.reverse
defp deconstruct({seconds, us}, components) when seconds > 0 do
cond do
seconds >= @year -> deconstruct({rem(seconds, @year), us}, [{:years, div(seconds, @year)} | components])
seconds >= @month -> deconstruct({rem(seconds, @month), us}, [{:months, div(seconds, @month)} | components])
seconds >= @day -> deconstruct({rem(seconds, @day), us}, [{:days, div(seconds, @day)} | components])
seconds >= @hour -> deconstruct({rem(seconds, @hour), us}, [{:hours, div(seconds, @hour)} | components])
seconds >= @minute -> deconstruct({rem(seconds, @minute), us}, [{:minutes, div(seconds, @minute)} | components])
true -> get_fractional_seconds(seconds, us, components)
end
end
defp deconstruct({seconds, us}, components) do
get_fractional_seconds(seconds, us, components)
end
defp get_fractional_seconds(seconds, 0, components), do: deconstruct({0, 0}, [{:seconds, seconds} | components])
defp get_fractional_seconds(seconds, micro, components) when micro > 0 do
msecs = {0, 0, micro} |> Time.abs |> Time.to_milliseconds
cond do
msecs >= 1.0 -> deconstruct({0, 0}, [{:seconds, seconds + (msecs * :math.pow(10, -3))} | components])
true -> deconstruct({0, 0}, [{:seconds, seconds + (micro * :math.pow(10, -6))} | components])
end
end
end
|
lib/format/time/formatters/default.ex
| 0.916568
| 0.738645
|
default.ex
|
starcoder
|
defmodule GameOfLife do
@moduledoc """
Game Of Life implementation in Elixir.
"""
@doc """
Runs the experiment sequentially.
"""
def run_sequentially do
run(&update_world_sequentially/1)
end
@doc """
Runs the experiment by updating every cell in a separate process.
"""
def run_with_spawn do
run(&update_world_with_spawn/1)
end
@doc """
Runs the experiment by updating every column in a separate process.
"""
def run_with_column_spawn do
run(&update_world_with_column_spawn/1)
end
@doc """
Runs the experiment.
"""
def run(update_function) do
{:ok, world} = GameOfLife.World.start_link(100)
parse_board(world)
start = Time.utc_now
for _ <- 1..1000 do
update_function.(world)
end
finish = Time.utc_now
IO.puts "Finished in #{Time.diff(finish, start, :millisecond)}"
save_board(world)
end
@doc """
Updates the cell defined by the coordinates `x` and `y` state in the `world`.
"""
def update_cell(world, world_state, x, y) do
new_state = GameOfLife.Cell.next_state(world_state, x, y)
GameOfLife.World.set(world, x, y, new_state)
end
@doc """
It calculates the next state of all cells of the world and it updates the
world sequentially..
"""
def update_world_sequentially(world) do
world_state = GameOfLife.World.state(world)
for x <- Map.keys(world_state),
y <- Map.keys(world_state[x]) do
update_cell(world, world_state, x, y)
end
end
@doc """
It calculates the next state of all cells of the world and it updates the
world by using a process for every cell.
"""
def update_world_with_spawn(world) do
world_state = GameOfLife.World.state(world)
cells_count = GameOfLife.World.cells_count(world)
parent = self()
for x <- Map.keys(world_state),
y <- Map.keys(world_state[x]) do
spawn fn ->
send(parent, {x, y, GameOfLife.Cell.next_state(world_state, x, y)})
end
end
for _ <- 1..cells_count do
receive do
{x, y, state} -> GameOfLife.World.set(world, x, y, state)
end
end
end
@doc """
It calculates the next state of all cells of the world and it updates the
world by using a process for every column.
"""
def update_world_with_column_spawn(world) do
world_state = GameOfLife.World.state(world)
columns_count = map_size(world_state)
parent = self()
for x <- Map.keys(world_state) do
spawn fn ->
for y <- Map.keys(world_state[x]) do
cell_state = GameOfLife.Cell.next_state(world_state, x, y)
GameOfLife.World.set(world, x, y, cell_state)
end
send(parent, "done")
end
end
for _ <- 1..columns_count do
receive do
"done" ->
end
end
end
defp parse_board(world) do
board = File.read!("board.csv")
GameOfLife.Board.parse(board, world)
end
defp save_board(world) do
board = GameOfLife.Board.generate(world)
File.write("result.csv", board)
end
end
|
gol_elixir/lib/game_of_life.ex
| 0.665411
| 0.771069
|
game_of_life.ex
|
starcoder
|
defmodule Rustler do
@moduledoc """
Provides compile-time configuration for a NIF module.
When used, Rustler expects the `:otp_app` as option.
The `:otp_app` should point to the OTP application that
the dynamic library can be loaded from.
For example:
defmodule MyNIF do
use Rustler, otp_app: :my_nif
end
This allows the module to be configured like so:
config :my_nif, MyNIF,
crate: :my_nif,
load_data: [1, 2, 3]
## Configuration options
* `:cargo` - Specify how to envoke the rust compiler. Options are:
- `:system` (default) - use `cargo` from the system (must by in `$PATH`)
- `{:rustup, <version>}` - use `rustup` to specify which channel to use.
Available options include: `:stable`, `:beta`, `:nightly`, or a string
which specifies a specific version (i.e. `"1.39.0"`).
- `{:bin, "/path/to/binary"}` - provide a specific path to `cargo`.
* `:crate` - the name of the Rust crate, if different from your `otp_app`
value. If you have more than one crate in your project, you will need to
be explicit about which crate you intend to use.
* `:default_features` - a boolean to specify whether the crate's default features
should be used.
* `:env` - Specify a list of environment variables when envoking the compiler.
* `:features` - a list of features to enable when compiling the crate.
* `:load_data` - Any valid term. This value is passed into the NIF when it is
loaded (default: `0`)
* `:load_from` - This option allows control over where the final artifact should be
loaded from at runtime. By default the compiled artifact is loaded from the
owning `:otp_app`'s `priv/native` directory. This option comes in handy in
combination with the `:skip_compilation?` option in order to load pre-compiled
artifacts. To override the default behaviour specify a tuple:
`{:my_app, "priv/native/<artifact>"}`. Due to the way `:erlang.load_nif/2`
works, the artifact should not include the file extension (i.e. `.so`, `.dll`).
* `:mode` - Specify which mode to compile the crate with. If you do not specify
this option, a default will be provide based on the `Mix.env()`:
- When `Mix.env()` is `:dev` or `:test`, the crate will be compiled in `:debug` mode.
- When `Mix.env()` is `:prod` or `:bench`, the crate will be compiled in `:release` mode.
* `:path` - By default, rustler expects the crate to be found in `native/<crate>` in the
root of the project. Use this option to override this.
* `:skip_compilation?` - This option skips envoking the rust compiler. Specify this option
in combination with `:load_from` to load a pre-compiled artifact.
* `:target` - Specify a compile [target] triple.
* `:target_dir`: Override the compiler output directory.
Any of the above options can be passed directly into the `use` macro like so:
defmodule MyNIF do
use Rustler,
otp_app: :my_nif,
crate: :some_other_crate,
load_data: :something
end
[target]: https://forge.rust-lang.org/release/platform-support.html
"""
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
config = Rustler.Compiler.compile_crate(__MODULE__, opts)
for resource <- config.external_resources do
@external_resource resource
end
if config.lib do
@load_from config.load_from
@load_data config.load_data
@before_compile Rustler
end
end
end
defmacro __before_compile__(_env) do
quote do
@on_load :rustler_init
@doc false
def rustler_init do
# Remove any old modules that may be loaded so we don't get
# {:error, {:upgrade, 'Upgrade not supported by this NIF library.'}}
:code.purge(__MODULE__)
{otp_app, path} = @load_from
load_path =
otp_app
|> Application.app_dir(path)
|> to_charlist()
:erlang.load_nif(load_path, @load_data)
end
end
end
@doc false
def rustler_version, do: "0.22.0"
@doc """
Supported NIF API versions.
"""
def nif_versions,
do: [
'2.7',
'2.8',
'2.9',
'2.10',
'2.11',
'2.12',
'2.13',
'2.14',
'2.15',
'2.16'
]
end
|
rustler_mix/lib/rustler.ex
| 0.913571
| 0.475666
|
rustler.ex
|
starcoder
|
defmodule ExAws.Utils do
@moduledoc false
def identity(x), do: x
def identity(x, _), do: x
def camelize_keys(opts) do
camelize_keys(opts, deep: false)
end
# This isn't tail recursive. However, given that the structures
# being worked upon are relatively shallow, this is ok.
def camelize_keys(opts = %{}, deep: deep) do
opts |> Enum.reduce(%{}, fn({k ,v}, map) ->
if deep do
Map.put(map, camelize_key(k), camelize_keys(v, deep: true))
else
Map.put(map, camelize_key(k), v)
end
end)
end
def camelize_keys([%{} | _] = opts, deep: deep) do
Enum.map(opts, &camelize_keys(&1, deep: deep))
end
def camelize_keys(opts, depth) do
try do
opts
|> Map.new
|> camelize_keys(depth)
rescue
[Protocol.UndefinedError, ArgumentError, FunctionClauseError] -> opts
end
end
def camelize_key(key) when is_atom(key) do
key
|> Atom.to_string
|> camelize
end
def camelize_key(key) when is_binary(key) do
key |> camelize
end
def camelize(string)
def camelize(""), do: ""
def camelize(<<?_, t :: binary>>), do: camelize(t)
def camelize(<<h, t :: binary>>), do: <<to_upper_char(h)>> <> do_camelize(t)
defp do_camelize(<<?_, ?_, t :: binary>>),
do: do_camelize(<< ?_, t :: binary >>)
defp do_camelize(<<?_, h, t :: binary>>) when h in ?a..?z,
do: <<to_upper_char(h)>> <> do_camelize(t)
defp do_camelize(<<?_>>),
do: <<>>
defp do_camelize(<<?/, t :: binary>>),
do: <<?.>> <> camelize(t)
defp do_camelize(<<h, t :: binary>>),
do: <<h>> <> do_camelize(t)
defp do_camelize(<<>>),
do: <<>>
defp to_upper_char(char) when char in ?a..?z, do: char - 32
defp to_upper_char(char), do: char
def upcase(value) when is_atom(value) do
value
|> Atom.to_string
|> String.upcase
end
def upcase(value) when is_binary(value) do
String.upcase(value)
end
@seconds_0_to_1970 :calendar.datetime_to_gregorian_seconds({{1970, 1, 1}, {0, 0, 0}})
def iso_z_to_secs(<<date::binary-10, "T", time::binary-8, "Z">>) do
<<year::binary-4, "-", mon::binary-2, "-", day::binary-2>> = date
<<hour::binary-2, ":", min::binary-2, ":", sec::binary-2>> = time
year = year |> String.to_integer
mon = mon |> String.to_integer
day = day |> String.to_integer
hour = hour |> String.to_integer
min = min |> String.to_integer
sec = sec |> String.to_integer
# Seriously? Gregorian seconds but not epoch seconds?
greg_secs = :calendar.datetime_to_gregorian_seconds({{year, mon, day}, {hour, min, sec}})
greg_secs - @seconds_0_to_1970
end
def now_in_seconds do
greg_secs = :os.timestamp
|> :calendar.now_to_universal_time
|> :calendar.datetime_to_gregorian_seconds
greg_secs - @seconds_0_to_1970
end
end
|
lib/ex_aws/utils.ex
| 0.559651
| 0.495545
|
utils.ex
|
starcoder
|
defmodule Elsa.Producer do
require Logger
@moduledoc """
Defines functions to write messages to topics based on either a list of endpoints or a named client.
All produce functions support the following options:
* An existing named client process to handle the request can be specified by the keyword option `connection:`.
* If no partition is supplied, the first (zero) partition is chosen.
* Value may be a single message or a list of messages.
* If a list of messages is supplied as the value, the key is defaulted to an empty string binary.
* Partition can be specified by the keyword option `partition:` and an integer corresponding to a specific
partition, or the keyword option `partitioner:` and the atoms `:md5` or `:random`. The atoms
correspond to partitioner functions that will uniformely select a random partition
from the total available topic partitions or assign an integer based on an md5 hash of the messages.
"""
@typedoc """
Elsa messages can take a number of different forms, including a single binary, a key/value tuple, a map
including `:key` and `:value` keys, or a list of iolists. Because Elsa supports both single messages and
lists of messages and because an iolist is indistinguishable from a list of other message types from the
perspective of the compiler, even single-message iolists must be wrapped in an additional list in order to
be produced. Internally, all messages are converted to a map before being encoded and produced.
"""
@type message :: {iodata(), iodata()} | binary() | %{key: iodata(), value: iodata()}
alias Elsa.Util
@doc """
Write the supplied message(s) to the desired topic/partition via an endpoint list and optional named client.
If no client is supplied, the default named client is chosen.
"""
@spec produce(
Elsa.endpoints() | Elsa.connection(),
Elsa.topic(),
message() | [message()] | [iolist()],
keyword()
) :: :ok | {:error, term} | {:error, String.t(), [Elsa.Message.elsa_message()]}
def produce(endpoints_or_connection, topic, messages, opts \\ [])
def produce(endpoints, topic, messages, opts) when is_list(endpoints) do
connection = Keyword.get_lazy(opts, :connection, &Elsa.default_client/0)
registry = Elsa.Supervisor.registry(connection)
case Process.whereis(registry) do
nil ->
ad_hoc_produce(endpoints, connection, topic, messages, opts)
_pid ->
produce(connection, topic, messages, opts)
end
:ok
end
def produce(connection, topic, messages, opts) when is_atom(connection) and is_list(messages) do
transformed_messages = Enum.map(messages, &transform_message/1)
do_produce_sync(connection, topic, transformed_messages, opts)
end
def produce(connection, topic, message, opts) when is_atom(connection) do
do_produce_sync(connection, topic, [transform_message(message)], opts)
end
def ready?(connection) do
registry = Elsa.Supervisor.registry(connection)
via = Elsa.Supervisor.via_name(registry, :producer_process_manager)
Elsa.DynamicProcessManager.ready?(via)
end
defp ad_hoc_produce(endpoints, connection, topic, messages, opts) do
with {:ok, pid} <-
Elsa.Supervisor.start_link(endpoints: endpoints, connection: connection, producer: [topic: topic]) do
ready?(connection)
produce(connection, topic, messages, opts)
Process.unlink(pid)
Supervisor.stop(pid)
end
end
defp transform_message(%{key: _key, value: _value} = msg) do
msg
|> Map.update!(:key, &IO.iodata_to_binary/1)
|> Map.update!(:value, &IO.iodata_to_binary/1)
end
defp transform_message({key, value}), do: %{key: IO.iodata_to_binary(key), value: IO.iodata_to_binary(value)}
defp transform_message(message), do: %{key: "", value: IO.iodata_to_binary(message)}
defp do_produce_sync(connection, topic, messages, opts) do
Elsa.Util.with_registry(connection, fn registry ->
with {:ok, partitioner} <- get_partitioner(registry, topic, opts),
message_chunks <- create_message_chunks(partitioner, messages),
{:ok, _} <- produce_sync_while_successful(registry, topic, message_chunks) do
:ok
else
{:error, reason, messages_sent, failed_messages} -> failure_message(reason, messages_sent, failed_messages)
error_result -> error_result
end
end)
end
defp produce_sync_while_successful(registry, topic, message_chunks) do
Enum.reduce_while(message_chunks, {:ok, 0}, fn {partition, chunk}, {:ok, messages_sent} ->
Logger.debug(fn ->
"#{__MODULE__} Sending #{length(chunk)} messages to #{topic}:#{partition}"
end)
case brod_produce(registry, topic, partition, chunk) do
:ok ->
{:cont, {:ok, messages_sent + length(chunk)}}
{:error, reason} ->
failed_messages =
Enum.flat_map(message_chunks, fn {_partition, chunk} -> chunk end)
|> Enum.drop(messages_sent)
{:halt, {:error, reason, messages_sent, failed_messages}}
end
end)
end
defp create_message_chunks(partitioner, messages) do
messages
|> Enum.group_by(partitioner)
|> Enum.map(fn {partition, messages} -> {partition, Util.chunk_by_byte_size(messages)} end)
|> Enum.flat_map(fn {partition, chunks} -> Enum.map(chunks, fn chunk -> {partition, chunk} end) end)
end
defp failure_message(reason, messages_sent, failed_messages) do
reason_string =
"#{messages_sent} messages succeeded before elsa producer failed midway through due to #{inspect(reason)}"
{:error, reason_string, failed_messages}
end
defp get_partitioner(registry, topic, opts) do
Elsa.Util.with_client(registry, fn client ->
case Keyword.get(opts, :partition) do
nil ->
{:ok, partition_num} = :brod_client.get_partitions_count(client, topic)
partitioner = Keyword.get(opts, :partitioner, Elsa.Partitioner.Default) |> remap_deprecated()
{:ok, fn %{key: key} -> partitioner.partition(partition_num, key) end}
partition ->
{:ok, fn _msg -> partition end}
end
end)
end
@partitioners %{default: Elsa.Partitioner.Default, md5: Elsa.Partitioner.Md5, random: Elsa.Partitioner.Random}
defp remap_deprecated(key) when key in [:default, :md5, :random] do
mod = Map.get(@partitioners, key)
Logger.warn(fn -> ":#{key} partitioner is deprecated. Use #{mod} instead." end)
mod
end
defp remap_deprecated(key), do: key
defp brod_produce(registry, topic, partition, messages) do
producer = :"producer_#{topic}_#{partition}"
case Elsa.Registry.whereis_name({registry, producer}) do
:undefined -> {:error, "Elsa Producer for #{topic}:#{partition} not found"}
pid -> call_brod_producer(pid, messages)
end
end
defp call_brod_producer(pid, messages) do
with {:ok, call_ref} <- :brod_producer.produce(pid, "", messages),
{:ok, _partition} <- :brod_producer.sync_produce_request(call_ref, :infinity) do
:ok
end
end
end
|
lib/elsa/producer.ex
| 0.85984
| 0.637764
|
producer.ex
|
starcoder
|
defmodule Timex.Format.DateTime.Formatters.Default do
@moduledoc """
Date formatting language used by default by the formatting functions in Timex.
This is a novel formatting language introduced with `DateFormat`. Its main
advantage is simplicity and usage of mnemonics that are easy to memorize.
## Directive format
A directive is an optional _padding specifier_ followed by a _mnemonic_, both
enclosed in braces (`{` and `}`):
{<padding><mnemonic>}
Supported padding specifiers:
* `0` -- pads the number with zeros. Applicable to mnemonics that produce numerical result.
* `_` -- pads the number with spaces. Applicable to mnemonics that produce numerical result.
When padding specifier is omitted, numbers will not be padded.
## List of all directives
### Years and centuries
* `{YYYY}` - full year number (0..9999)
* `{YY}` - the last two digits of the year number (0.99)
* `{C}` - century number (0..99)
* `{WYYYY}` - year number (4 digits) corresponding to the date's ISO week (0000..9999)
* `{WYY}` - year number (2 digits) corresponding to the date's ISO week (00.99)
### Months
* `{M}` - month number (1..12)
* `{Mshort}` - abbreviated month name (Jan..Dec, no padding)
* `{Mfull}` - full month name (January..December, no padding)
### Days and weekdays
* `{D}` - day number (1..31)
* `{Dord}` - ordinal day of the year (1..366)
* `{WDmon}` - weekday, Monday first (0..6, no padding)
* `{WDsun}` - weekday, Sunday first (0..6, no padding)
* `{WDshort}` - abbreviated weekday name (Mon..Sun, no padding)
* `{WDfull}` - full weekday name (Monday..Sunday, no padding)
### Weeks
* `{Wiso}` - ISO week number (01..53)
* `{Wmon}` - week number of the year, Monday first (01..53)
* `{Wsun}` - week number of the year, Sunday first (01..53)
### Time
* `{h24}` - hour of the day (00..23)
* `{h12}` - hour of the day (1..12)
* `{m}` - minutes of the hour (00..59)
* `{s}` - seconds of the minute (00..60)
* `{ss}` - fractional second, based on precision of microseconds given (.xxx == ms, .xxxxxx == us)
* `{s-epoch}` - number of seconds since UNIX epoch
* `{am}` - lowercase am or pm (no padding)
* `{AM}` - uppercase AM or PM (no padding)
### Time zones
* `{Zname}` - time zone name, e.g. `UTC` (no padding)
* `{Zabbr}` - time zone abbreviation, e.g. `CST` (no padding)
* `{Z}` - time zone offset in the form `+0230` (no padding)
* `{Z:}` - time zone offset in the form `-07:30` (no padding)
* `{Z::}` - time zone offset in the form `-07:30:00` (no padding)
### Compound directives
These are shortcut directives corresponding to parts of the ISO 8601
specification. The benefit of using these over manually constructed ISO
formats is that these directives convert the date to UTC for you.
* `{ISO:Basic}` - `<date>T<time><offset>`. Full date and time
specification without separators.
* `{ISO:Basic:Z}` - `<date>T<time>Z`. Full date and time in UTC without
separators (e.g. `20070813T134801Z`)
* `{ISO:Extended}` - `<date>T<time><offset>`. Full date and time
specification with separators. (e.g. `2007-08-13T16:48:01 +03:00`)
* `{ISO:Extended:Z}` - `<date>T<time>Z`. Full date and time in UTC. (e.g. `2007-08-13T13:48:01Z`)
* `{ISOdate}` - `YYYY-MM-DD`. That is, 4-digit year number, followed by
2-digit month and day numbers (e.g. `2007-08-13`)
* `{ISOtime}` - `hh:mm:ss`. That is, 2-digit hour, minute, and second,
separated by colons (e.g. `13:04:05`). Midnight is 00 hours.
* `{ISOweek}` - `YYYY-Www`. That is, ISO week-based year, followed by ISO
week number (e.g. `2007-W09`)
* `{ISOweek-day}` - `YYYY-Www-D`. That is, an `{ISOweek}`, additionally
followed by weekday (e.g. `2007-W09-1`)
* `{ISOord}` - `YYYY-DDD`. That is, year number, followed by the ordinal
day number (e.g. `2007-113`)
* `{ASN1:UTCtime}` - `YYMMDD<time>Z`. Full 2-digit year date and time in UTC without
separators (e.g. `070813134801Z`)
* `{ASN1:GeneralizedTime}` - `YYYYMMDD<time>`. Full 4-digit year date and time in local timezone without
separators and with optional fractional seconds (e.g. `20070813134801.032`)
* `{ASN1:GeneralizedTime:Z}` - `YYYYMMDD<time>Z`. Full 4-digit year date and time in UTC without
separators and with optional fractional seconds (e.g. `20070813134801.032Z`)
* `{ASN1:GeneralizedTime:TZ}` - `YYYYMMDD<time><offset>`. Full 4-digit year date and time in UTC without
separators and with optional fractional seconds (e.g. `20070813134801.032-0500`)
These directives provide support for miscellaneous common formats:
* `{RFC822}` - e.g. `Mon, 05 Jun 14 23:20:59 UT`
* `{RFC822z}` - e.g. `Mon, 05 Jun 14 23:20:59 Z`
* `{RFC1123}` - e.g. `Tue, 05 Mar 2013 23:25:19 +0200`
* `{RFC1123z}` - e.g. `Tue, 05 Mar 2013 23:25:19 Z`
* `{RFC3339}` - e.g. `2013-03-05T23:25:19+02:00`
* `{RFC3339z}` - e.g. `2013-03-05T23:25:19Z`
* `{ANSIC}` - e.g. `Tue Mar 5 23:25:19 2013`
* `{UNIX}` - e.g. `Tue Mar 5 23:25:19 PST 2013`
* `{ASN1:UTCtime}` - e.g. `130305232519Z`
* `{ASN1:GeneralizedTime}` - e.g. `20130305232519.928`
* `{ASN1:GeneralizedTime:Z}` - e.g. `20130305232519.928Z`
* `{ASN1:GeneralizedTime:TZ}` - e.g. `20130305232519.928-0500`
* `{kitchen}` - e.g. `3:25PM`
"""
use Timex.Format.DateTime.Formatter
alias Timex.Format.FormatError
alias Timex.Parse.DateTime.Tokenizers.Default, as: Tokenizer
alias Timex.{Types, Translator}
@spec tokenize(String.t) :: {:ok, [Directive.t]} | {:error, term}
defdelegate tokenize(format_string), to: Tokenizer
def format!(date, format_string), do: lformat!(date, format_string, Translator.default_locale)
def format(date, format_string), do: lformat(date, format_string, Translator.default_locale)
def format(date, format_string, tokenizer), do: lformat(date, format_string, tokenizer, Translator.default_locale)
@spec lformat!(Types.calendar_types, String.t, String.t) :: String.t | no_return
def lformat!(date, format_string, locale) do
case lformat(date, format_string, locale) do
{:ok, result} -> result
{:error, reason} -> raise FormatError, message: reason
end
end
@spec lformat(Types.calendar_types, String.t, String.t) :: {:ok, String.t} | {:error, term}
def lformat(date, format_string, locale) do
case tokenize(format_string) do
{:ok, []} ->
{:error, "There were no formatting directives in the provided string."}
{:ok, dirs} when is_list(dirs) ->
do_format(locale, date, dirs, <<>>)
{:error, reason} -> {:error, {:format, reason}}
end
end
@doc """
If one wants to use the default formatting semantics with a different
tokenizer, this is the way.
"""
@spec lformat(Types.calendar_types, String.t, atom, String.t) :: {:ok, String.t} | {:error, term}
def lformat(date, format_string, tokenizer, locale) do
case tokenizer.tokenize(format_string) do
{:ok, []} ->
{:error, "There were no formatting directives in the provided string."}
{:ok, dirs} when is_list(dirs) ->
do_format(locale, date, dirs, <<>>)
{:error, reason} -> {:error, {:format, reason}}
end
end
defp do_format(_locale, _date, [], result), do: {:ok, result}
defp do_format(locale, date, [%Directive{type: :literal, value: char} | dirs], result) when is_binary(char) do
do_format(locale, date, dirs, <<result::binary, char::binary>>)
end
defp do_format(locale, date, [%Directive{type: type, modifiers: mods, flags: flags, width: width} | dirs], result) do
case format_token(locale, type, date, mods, flags, width) do
{:error, _} = err -> err
formatted -> do_format(locale, date, dirs, <<result::binary, formatted::binary>>)
end
end
end
|
lib/format/datetime/formatters/default.ex
| 0.904022
| 0.875148
|
default.ex
|
starcoder
|
defmodule Legion.Identity.Telephony.PhoneNumber do
@moduledoc """
Represents a phone number entry of a user.
## Schema fields
- `:user_id`: The reference of the user that phone number belongs to.
- `:number`: The number of the phone.
- `:type`: The type of the phone, e.g. "home", "work".
- `:ignored?`: If `true`, the phone number will be hidden from transactional endpoints.
- `:safe?`: If `false`, the phone number will not be used in authentication processes.
- `:primed_at`: When the user marks the phone number entry as "primary", this attribute will be updated with a timestamp.
- `:inserted_at`: The timestamp user created the entry.
## Prioritization of entries
Phone number entries can be prioritized by a user, possibly for telephony-related transactional operations.
A user may make phone number on a arbitrary moment.
If no phone number of a user was made primary, the phone number will be picked according to configuration
options.
However, no matter what, the rules below for determining the phone number will still apply.
If a phone number is primary,
- the phone number is *not ignored*,
- it should have no descendants with a higher insertion timestamp,
- it should have no descendants with a higher prioritization timestamp,
- its type is one of "home", "work", "mobile".
## Rules for prioritization
Certain rules apply for the prioritization of the phone numbers.
A phone number entry can be prioritized if and only if,
- it is not marked as ignored,
- it is marked as safe.
"""
use Legion.Stereotype, :model
import Legion.Telephony.PhoneNumber, only: [is_valid_number?: 1]
alias Legion.Identity.Information.Registration, as: User
alias Legion.Identity.Telephony.PhoneType
alias Legion.Identity.Telephony.PhoneNumber.{
PrioritizationTrait,
SafetyTrait,
NeglectionTrait,
ValidSafetyTrait,
ValidPrioritizationTrait
}
@typedoc """
A positive integer uniquely identifying a phone number entity.
"""
@type id() :: pos_integer()
@typedoc """
Type of the phone number.
"""
@type phone_type() :: :home | :mobile | :work | :home_fax | :work_fax | :pager
schema "user_phone_numbers" do
belongs_to :user, User
field :number, :string
field :type, PhoneType
field :inserted_at, :naive_datetime_usec, read_after_writes: true
has_many :prioritization_traits, PrioritizationTrait
has_many :safety_traits, SafetyTrait
has_one :neglection_trait, NeglectionTrait
has_many :valid_safety_traits, ValidSafetyTrait
has_one :valid_prioritization_trait, ValidPrioritizationTrait
end
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:user_id, :number, :type])
|> validate_required([:user_id, :number, :type])
|> validate_phone_number()
|> foreign_key_constraint(:user_id)
|> unique_constraint(:user_id)
end
defp validate_phone_number(changeset) do
# Retrieve the phone number if changed and validate
if phone_number = get_change(changeset, :number) do
if is_valid_number?(phone_number) do
changeset
else
add_error(changeset, :phone_number, "is invalid")
end
else
changeset
end
end
end
|
apps/legion/lib/identity/telephony/phone_number/phone_number.ex
| 0.830663
| 0.557153
|
phone_number.ex
|
starcoder
|
defmodule Sentry do
use Application
import Supervisor.Spec
alias Sentry.{Event, Config}
require Logger
@moduledoc """
Provides the basic functionality to submit a `Sentry.Event` to the Sentry Service.
## Configuration
Add the following to your production config
config :sentry, dsn: "https://public:secret@app.getsentry.com/1",
included_environments: [:prod],
environment_name: :prod,
tags: %{
env: "production"
}
The `environment_name` and `included_environments` work together to determine
if and when Sentry should record exceptions. The `environment_name` is the
name of the current environment. In the example above, we have explicitly set
the environment to `:prod` which works well if you are inside an environment
specific configuration `config/prod.exs`.
An alternative is to use `Mix.env` in your general configuration file:
config :sentry, dsn: "https://public:secret@app.getsentry.com/1",
included_environments: [:prod],
environment_name: Mix.env
This will set the environment name to whatever the current Mix environment
atom is, but it will only send events if the current environment is `:prod`,
since that is the only entry in the `included_environments` key.
You can even rely on more custom determinations of the environment name. It's
not uncommmon for most applications to have a "staging" environment. In order
to handle this without adding an additional Mix environment, you can set an
environment variable that determines the release level.
config :sentry, dsn: "https://public:secret@app.getsentry.com/1",
included_environments: ~w(production staging),
environment_name: System.get_env("RELEASE_LEVEL") || "development"
In this example, we are getting the environment name from the `RELEASE_LEVEL`
environment variable. If that variable does not exist, we default to `"development"`.
Now, on our servers, we can set the environment variable appropriately. On
our local development machines, exceptions will never be sent, because the
default value is not in the list of `included_environments`.
## Filtering Exceptions
If you would like to prevent certain exceptions, the `:filter` configuration option
allows you to implement the `Sentry.EventFilter` behaviour. The first argument is the
exception to be sent, and the second is the source of the event. `Sentry.Plug`
will have a source of `:plug`, `Sentry.Logger` will have a source of `:logger`, and `Sentry.Phoenix.Endpoint` will have a source of `:endpoint`.
If an exception does not come from either of those sources, the source will be nil
unless the `:event_source` option is passed to `Sentry.capture_exception/2`
A configuration like below will prevent sending `Phoenix.Router.NoRouteError` from `Sentry.Plug`, but
allows other exceptions to be sent.
# sentry_event_filter.ex
defmodule MyApp.SentryEventFilter do
@behaviour Sentry.EventFilter
def exclude_exception?(%Elixir.Phoenix.Router.NoRouteError{}, :plug), do: true
def exclude_exception?(_exception, _source), do: false
end
# config.exs
config :sentry, filter: MyApp.SentryEventFilter,
included_environments: ~w(production staging),
environment_name: System.get_env("RELEASE_LEVEL") || "development"
## Capturing Exceptions
Simply calling `capture_exception/2` will send the event. By default, the event is sent asynchronously and the result can be awaited upon. The `:result` option can be used to change this behavior. See `Sentry.Client.send_event/2` for more information.
{:ok, task} = Sentry.capture_exception(my_exception)
{:ok, event_id} = Task.await(task)
{:ok, another_event_id} = Sentry.capture_exception(other_exception, [event_source: :my_source, result: :sync])
### Options
* `:event_source` - The source passed as the first argument to `Sentry.EventFilter.exclude_exception?/2`
## Configuring The `Logger` Backend
See `Sentry.Logger`
"""
@type send_result :: Sentry.Client.send_event_result() | :excluded | :ignored
def start(_type, _opts) do
children = [
supervisor(Task.Supervisor, [[name: Sentry.TaskSupervisor]]),
:hackney_pool.child_spec(
Sentry.Client.hackney_pool_name(),
timeout: Config.hackney_timeout(),
max_connections: Config.max_hackney_connections()
)
]
validate_json_config!()
opts = [strategy: :one_for_one, name: Sentry.Supervisor]
Supervisor.start_link(children, opts)
end
@doc """
Parses and submits an exception to Sentry if current environment is in included_environments.
`opts` argument is passed as the second argument to `Sentry.send_event/2`.
"""
@spec capture_exception(Exception.t() | atom() | {atom(), atom()}, Keyword.t()) :: send_result
def capture_exception(exception, opts \\ []) do
filter_module = Config.filter()
{source, opts} = Keyword.pop(opts, :event_source)
if filter_module.exclude_exception?(exception, source) do
:excluded
else
exception
|> Event.transform_exception(opts)
|> send_event(opts)
end
end
@doc """
Reports a message to Sentry.
`opts` argument is passed as the second argument to `Sentry.send_event/2`.
"""
@spec capture_message(String.t(), Keyword.t()) :: send_result
def capture_message(message, opts \\ []) when is_binary(message) do
opts
|> Keyword.put(:message, message)
|> Event.create_event()
|> send_event(opts)
end
@doc """
Sends a `Sentry.Event`
`opts` argument is passed as the second argument to `send_event/2` of the configured `Sentry.HTTPClient`. See `Sentry.Client.send_event/2` for more information.
"""
@spec send_event(Event.t(), Keyword.t()) :: send_result
def send_event(event, opts \\ [])
def send_event(%Event{message: nil, exception: nil}, _opts) do
Logger.warn("Sentry: unable to parse exception")
:ignored
end
def send_event(%Event{} = event, opts) do
included_environments = Config.included_environments()
environment_name = Config.environment_name()
client = Config.client()
if environment_name in included_environments do
client.send_event(event, opts)
else
:ignored
end
end
defp validate_json_config!() do
case Config.json_library() do
nil ->
raise ArgumentError.exception("nil is not a valid :json_library configuration")
library ->
try do
with {:ok, %{}} <- library.decode("{}"),
{:ok, "{}"} <- library.encode(%{}) do
:ok
else
_ ->
raise ArgumentError.exception(
"configured :json_library #{inspect(library)} does not implement decode/1 and encode/1"
)
end
rescue
UndefinedFunctionError ->
reraise ArgumentError.exception("""
configured :json_library #{inspect(library)} is not available or does not implement decode/1 and encode/1.
Do you need to add #{inspect(library)} to your mix.exs?
"""),
__STACKTRACE__
end
end
end
end
|
lib/sentry.ex
| 0.877909
| 0.518241
|
sentry.ex
|
starcoder
|
defmodule ExGremlin.Graph do
@moduledoc """
Functions for traversing and mutating the Graph.
Graph operations are stored in a queue which can be created with `g/0`.
Mosts functions return the queue so that they can be chained together
similar to how Gremlin queries work.
Example:
```
g.V(1).values("name")
```
Would translate to
```
g |> v(1) |> values("name")
```
Note: This module doesn't actually execute any queries, it just allows you to build one.
For query execution see `ExGremlin.Client.query/1`
originally made by Gremlex
"""
alias :queue, as: Queue
@type t :: {[], []}
@default_namespace "ExGremlin"
@default_namespace_property "namespace"
@doc """
Start of graph traversal. All graph operations are stored in a queue.
"""
@spec g :: ExGremlin.Graph.t()
def g, do: Queue.new()
@spec anonymous :: ExGremlin.Graph.t()
def anonymous do
enqueue(Queue.new(), "__", [])
end
@doc """
Appends an addV command to the traversal.
Returns a graph to allow chaining.
"""
@spec add_v(ExGremlin.Graph.t(), any()) :: ExGremlin.Graph.t()
def add_v(graph, id) do
enqueue(graph, "addV", [id])
end
@doc """
Appends an addE command to the traversal.
Returns a graph to allow chaining.
"""
@spec add_e(ExGremlin.Graph.t(), any()) :: ExGremlin.Graph.t()
def add_e(graph, edge) do
enqueue(graph, "addE", [edge])
end
@doc """
Appends an aggregate command to the traversal.
Returns a graph to allow chaining.
"""
@spec aggregate(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def aggregate(graph, aggregate) do
enqueue(graph, "aggregate", aggregate)
end
@spec barrier(ExGremlin.Graph.t(), non_neg_integer()) :: ExGremlin.Graph.t()
def barrier(graph, max_barrier_size) do
enqueue(graph, "barrier", max_barrier_size)
end
@spec barrier(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def barrier(graph) do
enqueue(graph, "barrier", [])
end
@doc """
Appends a coin command to the traversal. Takes in a graph and a probability
modifier as parameters.
Returns a graph to allow chaining.
"""
@spec coin(ExGremlin.Graph.t(), Float.t()) :: ExGremlin.Graph.t()
def coin(graph, probability) do
enqueue(graph, "coin", probability)
end
@spec has_label(ExGremlin.Graph.t(), any()) :: ExGremlin.Graph.t()
def has_label(graph, label) do
enqueue(graph, "hasLabel", [label])
end
@spec has(ExGremlin.Graph.t(), any(), any()) :: ExGremlin.Graph.t()
def has(graph, key, value) do
enqueue(graph, "has", [key, value])
end
@spec key(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def key(graph) do
enqueue(graph, "key", [])
end
@doc """
Appends property command to the traversal.
Returns a graph to allow chaining.
"""
@spec property(ExGremlin.Graph.t(), String.t(), any()) :: ExGremlin.Graph.t()
def property(graph, key, value) do
enqueue(graph, "property", [key, value])
end
@spec property(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def property(graph, key) do
enqueue(graph, "property", [key])
end
@spec property(ExGremlin.Graph.t(), atom(), String.t(), any()) :: ExGremlin.Graph.t()
def property(graph, :single, key, value) do
enqueue(graph, "property", [:single, key, value])
end
@spec property(ExGremlin.Graph.t(), atom(), String.t(), any()) :: ExGremlin.Graph.t()
def property(graph, :list, key, value) do
enqueue(graph, "property", [[:list, key, value]])
end
@spec property(ExGremlin.Graph.t(), atom(), String.t(), any()) :: ExGremlin.Graph.t()
def property(graph, :set, key, value) do
enqueue(graph, "property", [[:set, key, value]])
end
@spec property(ExGremlin.Graph.t(), atom(), String.t(), any()) :: ExGremlin.Graph.t()
def property(graph, :raw, key, value) do
enqueue(graph, "property", [[:raw, key, value]])
end
@doc """
Appends properties command to the traversal.
Returns a graph to allow chaining.
"""
@spec properties(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def properties(graph, key) do
enqueue(graph, "properties", [key])
end
@spec properties(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def properties(graph) do
enqueue(graph, "properties", [])
end
@doc """
Appends the store command to the traversal. Takes in a graph and the name of
the side effect key that will hold the aggregate.
Returns a graph to allow chaining.
"""
@spec store(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def store(graph, store) do
enqueue(graph, "store", store)
end
@spec cap(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def cap(graph, cap) do
enqueue(graph, "cap", cap)
end
@doc """
Appends valueMap command to the traversal.
Returns a graph to allow chaining.
"""
@spec value_map(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def value_map(graph) do
enqueue(graph, "valueMap", [])
end
@spec value_map(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def value_map(graph, value) when is_binary(value) do
enqueue(graph, "valueMap", [value])
end
@spec value_map(ExGremlin.Graph.t(), list(String.t())) :: ExGremlin.Graph.t()
def value_map(graph, values) when is_list(values) do
enqueue(graph, "valueMap", values)
end
@doc """
Appends values command to the traversal.
Returns a graph to allow chaining.
"""
@spec values(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def values(graph, key) do
enqueue(graph, "values", [key])
end
@doc """
Appends values the `V` command allowing you to select a vertex.
Returns a graph to allow chaining.
"""
@spec v(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def v({h, t} = graph) when is_list(h) and is_list(t) do
enqueue(graph, "V", [])
end
@spec v(number()) :: ExGremlin.Vertex.t()
def v(id) do
%ExGremlin.Vertex{id: id, label: ""}
end
@spec v(ExGremlin.Graph.t(), ExGremlin.Vertex.t()) :: ExGremlin.Graph.t()
def v(graph, %ExGremlin.Vertex{id: id}) do
enqueue(graph, "V", [id])
end
@doc """
Appends values the `V` command allowing you to select a vertex.
Returns a graph to allow chaining.
"""
@spec v(ExGremlin.Graph.t(), number()) :: ExGremlin.Graph.t()
def v(graph, id) when is_number(id) or is_binary(id) do
enqueue(graph, "V", [id])
end
@spec v(ExGremlin.Graph.t(), List.t() | String.t()) :: ExGremlin.Graph.t()
def v(graph, id) do
enqueue(graph, "V", id)
end
@spec in_e(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def in_e(graph) do
enqueue(graph, "inE", [])
end
@spec in_e(ExGremlin.Graph.t(), String.t() | List.t()) :: ExGremlin.Graph.t()
def in_e(graph, edges) do
enqueue(graph, "inE", edges)
end
@spec out_e(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def out_e(graph) do
enqueue(graph, "outE", [])
end
@spec out_e(ExGremlin.Graph.t(), String.t() | List.t()) :: ExGremlin.Graph.t()
def out_e(graph, edges) do
enqueue(graph, "outE", edges)
end
@spec out(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def out(graph) do
enqueue(graph, "out", [])
end
@spec out(ExGremlin.Graph.t(), String.t() | List.t()) :: ExGremlin.Graph.t()
def out(graph, labels) do
enqueue(graph, "out", labels)
end
@spec in_(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def in_(graph, edge) do
enqueue(graph, "in", [edge])
end
@spec in_(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def in_(graph) do
enqueue(graph, "in", [])
end
@spec or_(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def or_(graph) do
enqueue(graph, "or", [])
end
@spec and_(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def and_(graph) do
enqueue(graph, "and", [])
end
@spec in_v(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def in_v(graph) do
enqueue(graph, "inV", [])
end
@spec in_v(ExGremlin.Graph.t(), String.t() | List.t()) :: ExGremlin.Graph.t()
def in_v(graph, labels) do
enqueue(graph, "inV", labels)
end
@spec out_v(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def out_v(graph) do
enqueue(graph, "outV", [])
end
@spec out_v(ExGremlin.Graph.t(), String.t() | List.t()) :: ExGremlin.Graph.t()
def out_v(graph, labels) do
enqueue(graph, "outV", labels)
end
@spec both(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def both(graph) do
enqueue(graph, "both", [])
end
@spec both(ExGremlin.Graph.t(), List.t()) :: ExGremlin.Graph.t()
def both(graph, labels) when is_list(labels) do
enqueue(graph, "both", labels)
end
@spec both(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def both(graph, label) do
enqueue(graph, "both", [label])
end
@spec both_e(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def both_e(graph) do
enqueue(graph, "bothE", [])
end
@spec both_e(ExGremlin.Graph.t(), String.t() | List.t()) :: ExGremlin.Graph.t()
def both_e(graph, labels) do
enqueue(graph, "bothE", labels)
end
@spec both_v(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def both_v(graph) do
enqueue(graph, "bothV", [])
end
@spec both_v(ExGremlin.Graph.t(), List.t() | String.t()) :: ExGremlin.Graph.t()
def both_v(graph, labels) do
enqueue(graph, "bothV", labels)
end
@spec dedup(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def dedup(graph) do
enqueue(graph, "dedup", [])
end
@spec to(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def to(graph, target) do
enqueue(graph, "to", [target])
end
@spec has_next(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def has_next(graph) do
enqueue(graph, "hasNext", [])
end
@spec next(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def next(graph) do
enqueue(graph, "next", [])
end
@spec next(ExGremlin.Graph.t(), number()) :: ExGremlin.Graph.t()
def next(graph, numberOfResults) do
enqueue(graph, "next", [numberOfResults])
end
@spec try_next(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def try_next(graph) do
enqueue(graph, "tryNext", [])
end
@spec to_list(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def to_list(graph) do
enqueue(graph, "toList", [])
end
@spec to_set(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def to_set(graph) do
enqueue(graph, "toSet", [])
end
@spec to_bulk_set(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def to_bulk_set(graph) do
enqueue(graph, "toBulkSet", [])
end
@spec drop(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def drop(graph) do
enqueue(graph, "drop", [])
end
@spec iterate(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def iterate(graph) do
enqueue(graph, "iterate", [])
end
@spec sum(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def sum(graph) do
enqueue(graph, "sum", [])
end
@spec inject(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def inject(graph, target) do
enqueue(graph, "inject", [target])
end
@spec tail(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def tail(graph) do
enqueue(graph, "tail", [1])
end
@spec tail(ExGremlin.Graph.t(), non_neg_integer()) :: ExGremlin.Graph.t()
def tail(graph, size) do
enqueue(graph, "tail", [size])
end
@spec min(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def min(graph) do
enqueue(graph, "min", [])
end
@spec max(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def max(graph) do
enqueue(graph, "max", [])
end
@spec identity(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def identity(graph) do
enqueue(graph, "identity", [])
end
@spec constant(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def constant(graph, constant) do
enqueue(graph, "constant", constant)
end
@spec id(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def id(graph) do
enqueue(graph, "id", [])
end
@spec cyclic_path(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def cyclic_path(graph) do
enqueue(graph, "cyclicPath", [])
end
@spec count(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def count(graph) do
enqueue(graph, "count", [])
end
@spec group(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def group(graph) do
enqueue(graph, "group", [])
end
@spec group_count(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def group_count(graph) do
enqueue(graph, "groupCount", [])
end
@doc """
Appends groupCount command to the traversal. Takes in a graph and the name
of the key that will hold the aggregated grouping.
Returns a graph to allow chainig.
"""
@spec group_count(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def group_count(graph, key) do
enqueue(graph, "groupCount", key)
end
defp enqueue(graph, op, args) when is_list(args) do
Queue.in({op, args}, graph)
end
defp enqueue(graph, op, args) do
Queue.in({op, [args]}, graph)
end
@doc """
Appends values the `E` command allowing you to select an edge.
Returns a graph to allow chaining.
"""
@spec e(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def e(graph) do
enqueue(graph, "E", [])
end
@spec e(ExGremlin.Graph.t(), ExGremlin.Edge.t()) :: ExGremlin.Graph.t()
def e(graph, %ExGremlin.Edge{id: id}) do
enqueue(graph, "E", [id])
end
@spec e(ExGremlin.Graph.t(), number | String.t()) :: ExGremlin.Graph.t()
def e(graph, id) when is_number(id) or is_binary(id) do
enqueue(graph, "E", [id])
end
@doc """
Adds a namespace as property
"""
@spec add_namespace(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def add_namespace(graph) do
add_namespace(graph, namespace())
end
@spec add_namespace(ExGremlin.Graph.t(), any()) :: ExGremlin.Graph.t()
def add_namespace(graph, ns) do
graph |> property(namespace_property(), ns)
end
@spec has_namespace(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def has_namespace(graph) do
has_namespace(graph, namespace())
end
@spec has_namespace(ExGremlin.Graph.t(), any()) :: ExGremlin.Graph.t()
def has_namespace(graph, ns) do
graph |> has(namespace_property(), ns)
end
@spec has_id(ExGremlin.Graph.t(), any()) :: ExGremlin.Graph.t()
def has_id(graph, id) do
enqueue(graph, "hasId", id)
end
@spec has_key(ExGremlin.Graph.t(), List.t() | String.t()) :: ExGremlin.Graph.t()
def has_key(graph, key) do
enqueue(graph, "hasKey", key)
end
@spec has_not(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def has_not(graph, key) do
enqueue(graph, "hasNot", key)
end
@spec coalesce(ExGremlin.Graph.t(), List.t() | String.t()) :: ExGremlin.Graph.t()
def coalesce(graph, traversals) do
enqueue(graph, "coalesce", traversals)
end
@spec fold(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def fold(graph) do
enqueue(graph, "fold", [])
end
@spec fold(ExGremlin.Graph.t(), any()) :: ExGremlin.Graph.t()
def fold(graph, traversal) do
enqueue(graph, "fold", [traversal])
end
@spec unfold(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def unfold(graph) do
enqueue(graph, "unfold", [])
end
@spec unfold(ExGremlin.Graph.t(), any()) :: ExGremlin.Graph.t()
def unfold(graph, traversal) do
enqueue(graph, "unfold", [traversal])
end
@spec as(ExGremlin.Graph.t(), List.t() | String.t()) :: ExGremlin.Graph.t()
def as(graph, name) do
enqueue(graph, "as", name)
end
@spec select(ExGremlin.Graph.t(), List.t() | String.t()) :: ExGremlin.Graph.t()
def select(graph, names) do
enqueue(graph, "select", names)
end
@spec by(ExGremlin.Graph.t(), List.t() | String.t()) :: ExGremlin.Graph.t()
def by(graph, value) do
enqueue(graph, "by", value)
end
@spec path(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def path(graph) do
enqueue(graph, "path", [])
end
@spec simple_path(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def simple_path(graph) do
enqueue(graph, "simplePath", [])
end
@spec from(ExGremlin.Graph.t(), String.t()) :: ExGremlin.Graph.t()
def from(graph, name) do
enqueue(graph, "from", [name])
end
@spec repeat(ExGremlin.Graph.t(), ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def repeat(graph, traversal) do
enqueue(graph, "repeat", [traversal])
end
@spec until(ExGremlin.Graph.t(), ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def until(graph, traversal) do
enqueue(graph, "until", [traversal])
end
@spec loops(ExGremlin.Graph.t()) :: ExGremlin.Graph.t()
def loops(graph) do
enqueue(graph, "loops", [])
end
@spec is(ExGremlin.Graph.t(), any()) :: ExGremlin.Graph.t()
def is(graph, value) do
enqueue(graph, "is", [value])
end
@spec eq(ExGremlin.Graph.t(), number()) :: ExGremlin.Graph.t()
def eq(graph, number) do
enqueue(graph, "eq", [number])
end
@spec where(ExGremlin.Graph.t(), any()) :: ExGremlin.Graph.t()
def where(graph, traversal) do
enqueue(graph, "where", [traversal])
end
@spec not_(ExGremlin.Graph.t(), any()) :: ExGremlin.Graph.t()
def not_(graph, traversal) do
enqueue(graph, "not", [traversal])
end
@doc """
Creates a `within` predicate that will match at least one of the values provided.
Takes in a range or a list as the values.
Examples:
```
g.V().has('age', within(1..18))
g.V().has('name', within(["some", "value"]))
```
"""
def within(%Range{} = range) do
enqueue(Queue.new(), "within", [range])
end
def within(values) do
enqueue(Queue.new(), "within", values)
end
@doc """
Creates a `without` predicate that will filter out values that match the values provided.
Takes in a range or a list as the values.
Examples:
```
g.V().has('age', without(18..30))
g.V().has('name', without(["any", "value"]))
```
"""
def without(%Range{} = range) do
enqueue(Queue.new(), "without", [range])
end
def without(values) do
enqueue(Queue.new(), "without", values)
end
def eq(value) do
enqueue(Queue.new(),"eq", value)
end
def neq(value) do
enqueue(Queue.new(),"neq", value)
end
def gt(value) do
enqueue(Queue.new(),"gt", value)
end
def gte(value) do
enqueue(Queue.new(),"gte", value)
end
def lt(value) do
enqueue(Queue.new(),"lt", value)
end
def lte(value) do
enqueue(Queue.new(),"lte", value)
end
@doc """
Compiles a graph into the Gremlin query.
"""
@spec encode(ExGremlin.Graph.t()) :: String.t()
def encode(graph) do
encode(graph, "g")
end
defp encode({[], []}, acc), do: acc
defp encode(graph, acc) do
{{:value, {op, args}}, remainder} = :queue.out(graph)
args =
args
|> Enum.map(fn
nil ->
"none"
%ExGremlin.Vertex{id: id} when is_number(id) ->
"V(#{id})"
%ExGremlin.Vertex{id: id} when is_binary(id) ->
"V('#{id}')"
arg when is_number(arg) or is_atom(arg) ->
"#{arg}"
%Range{first: first, last: last} ->
"#{first}..#{last}"
[:predicate_raw, predicate, pattern] ->
predicate_raw(predicate,pattern)
[:predicate, predicate, pattern] ->
predicate(predicate,pattern)
[:list, key, values] ->
"'#{key}',#{list_string values}"
[:set, key,values] ->
"'#{key}',#{list_string values}"
[:raw, key, value] ->
"'#{key}',#{value}"
arg when is_tuple(arg) ->
case :queue.is_queue(arg) and :queue.get(arg) do
{"V", _} -> encode(arg, "g")
_ -> encode(arg, "")
end
str ->
"'#{escape(str)}'"
end)
|> Enum.join(", ")
construct_fn_call(acc, op, args, remainder)
end
@spec construct_fn_call(String.t(), String.t(), String.t(), ExGremlin.Graph.t()) :: String.t()
defp construct_fn_call("", "__", _, remainder), do: encode(remainder, "" <> "__")
defp construct_fn_call(_, "__", _, _), do: raise("Not a valid traversal")
defp construct_fn_call("", op, args, remainder), do: encode(remainder, "" <> "#{op}(#{args})")
defp construct_fn_call(acc, op, args, remainder),
do: encode(remainder, acc <> ".#{op}(#{args})")
# @spec escape(String.t()) :: String.t()
def escape(str) do
# We escape single quote if it is not already escaped by an odd number of backslashes
String.replace(str, ~r/((\A|[^\\])(\\\\)*)'/, "\\1\\'")
end
defp predicate(predicate, pattern) when is_bitstring(pattern) do
"#{predicate}('#{escape pattern}')"
end
defp predicate(predicate, pattern) do
"#{predicate}(#{pattern})"
end
defp predicate_raw(predicate, pattern) do
"#{predicate}(#{pattern})"
end
defp list_string(list) do
str = Enum.map(list, fn
el when is_bitstring(el) -> "'#{escape(el)}'"
el -> "#{el}"
end)
|> Enum.join(",")
"[#{str}]"
end
defp namespace_property do
Application.get_env(:ex_gremlin, :namespace, %{})
|> Map.get(:property, @default_namespace_property)
end
defp namespace do
Application.get_env(:ex_gremlin, :namespace, %{})
|> Map.get(:namespace, @default_namespace)
end
end
|
lib/ex_gremlin/graph/graph.ex
| 0.913397
| 0.865508
|
graph.ex
|
starcoder
|
if Code.ensure_loaded?(Plug) do
defmodule Uinta.Plug do
@moduledoc """
This plug combines the request and response logs into a single line. This
brings many benefits including:
- Removing the need to visually match up the request and response makes it
easier to read your logs and get a full picture of what has happened.
- Having a single line for both request and response halves the number of
request logs that your log aggregator will need to process and index, which
leads to saved costs
In addition to combining the log lines, it also gives you the ability to
output request logs in JSON format so that you can easily have your log
aggregator parse the fields. To do this, pass `json: true` in the options
when calling the plug.
You will also gain the ability to not log certain paths that are requested,
as long as those paths return a 200-level status code. This can be
particularly useful for things like not showing health checks in your logs
to cut down on noise. To do this, just pass `ignored_paths:
["/path_to_ignore"]` in the options.
Finally, GraphQL requests will replace `POST /graphql` with the GraphQL
operation type and name like `QUERY getUser` or `MUTATION createUser` if an
operation name is provided. This will give you more visibility into your
GraphQL requests without having to log out the entire request body or go
into debug mode. If desired, the GraphQL variables can be included in the
log line as well. The query can also be included if unnamed.
## Installation
Installation of the plug will depend on how your app currently logs requests.
Open `YourApp.Endpoint` and look for the following line:
```
plug Plug.Logger
```
If it exists in your endpoint, replace it with this (using the options you
want):
```
plug Uinta.Plug,
log: :info,
json: false,
include_variables: false,
ignored_paths: [],
filter_variables: []
```
If your endpoint didn't call `Plug.Logger`, add the above line above the line
that looks like this:
```
plug Plug.RequestId
```
Now you will also want to add the following anywhere in your main config file to
make sure that you aren't logging each request twice:
```
config :phoenix, logger: false
```
## Options
- `:log` - The log level at which this plug should log its request info.
Default is `:info`
- `:json` - Whether or not this plug should log in JSON format. Default is
`false`
- `:ignored_paths` - A list of paths that should not log requests. Default
is `[]`.
- `:include_variables` - Whether or not to include any GraphQL variables in
the log line when applicable. Default is `false`.
- `:filter_variables` - A list of variable names that should be filtered
out from the logs. By default `password`, `<PASSWORD>`,
`idToken`, and `refreshToken` will be filtered.
- `:include_unnamed_queries` - Whether or not to include the full query
body for queries with no name supplied
"""
require Logger
alias Plug.Conn
@behaviour Plug
@default_filter ~w(password <PASSWORD> idToken refreshToken)
@query_name_regex ~r/^(?:(?:query|mutation)\s+(\w+)(?:\(\s*\$\w+:\s+\[?\w+\]?!?(?:,?\s+\$\w+:\s+\[?\w+\]?!?)*\s*\))?\s*)?{/
@type format :: :json | :string
@type graphql_info :: %{type: String.t(), operation: String.t(), variables: String.t() | nil}
@type opts :: %{
level: Logger.level(),
format: format(),
include_unnamed_queries: boolean(),
include_variables: boolean(),
ignored_paths: list(String.t()),
filter_variables: list(String.t())
}
@impl Plug
def init(opts) do
format = if Keyword.get(opts, :json, false), do: :json, else: :string
%{
level: Keyword.get(opts, :log, :info),
format: format,
ignored_paths: Keyword.get(opts, :ignored_paths, []),
include_unnamed_queries: Keyword.get(opts, :include_unnamed_queries, false),
include_variables: Keyword.get(opts, :include_variables, false),
filter_variables: Keyword.get(opts, :filter_variables, @default_filter)
}
end
@impl Plug
def call(conn, opts) do
start = System.monotonic_time()
Conn.register_before_send(conn, fn conn ->
if conn.request_path not in opts.ignored_paths || conn.status >= 300 do
Logger.log(opts.level, fn ->
stop = System.monotonic_time()
diff = System.convert_time_unit(stop - start, :native, :microsecond)
graphql_info = graphql_info(conn, opts)
info = info(conn, graphql_info, diff, opts)
format_line(info, opts.format)
end)
end
conn
end)
end
@spec info(Plug.Conn.t(), graphql_info(), integer(), opts()) :: map()
defp info(conn, graphql_info, diff, opts) do
%{
connection_type: connection_type(conn),
method: method(conn, graphql_info),
path: path(conn, graphql_info),
query: query(graphql_info, opts),
status: Integer.to_string(conn.status),
timing: formatted_diff(diff),
variables: variables(graphql_info)
}
end
@spec format_line(map(), format()) :: iodata()
defp format_line(info, :json) do
info =
info
|> Map.delete(:connection_type)
|> Enum.filter(fn {_, value} -> !is_nil(value) end)
|> Enum.into(%{})
case Jason.encode(info) do
{:ok, encoded} -> encoded
_ -> inspect(info)
end
end
defp format_line(info, :string) do
log = [info.method, ?\s, info.path]
log = if is_nil(info.variables), do: log, else: [log, " with ", info.variables]
log = [log, " - ", info.connection_type, ?\s, info.status, " in ", info.timing]
if is_nil(info.query), do: log, else: [log, "\nQuery: ", info.query]
end
@spec method(Plug.Conn.t(), graphql_info()) :: String.t()
defp method(_, %{type: type}), do: type
defp method(conn, _), do: conn.method
@spec path(Plug.Conn.t(), graphql_info()) :: String.t()
defp path(_, %{operation: operation}), do: operation
defp path(conn, _), do: conn.request_path
@spec query(graphql_info(), opts()) :: String.t() | nil
defp query(_, %{include_unnamed_queries: false}), do: nil
defp query(%{query: query}, _), do: query
defp query(_, _), do: nil
@spec variables(graphql_info() | nil) :: String.t() | nil
defp variables(%{variables: variables}), do: variables
defp variables(_), do: nil
@spec graphql_info(Plug.Conn.t(), opts()) :: graphql_info() | nil
defp graphql_info(%{method: "POST", params: params}, opts) do
type =
params["query"]
|> Kernel.||("")
|> String.trim()
|> query_type()
if is_nil(type) do
nil
else
%{type: type}
|> put_operation_name(params)
|> put_query(params["query"], opts)
|> put_variables(params["variables"], opts)
end
end
defp graphql_info(_, _), do: nil
@spec put_operation_name(map(), map()) :: map()
defp put_operation_name(info, params) do
operation = operation_name(params)
Map.put(info, :operation, operation)
end
@spec put_query(map(), String.t(), opts()) :: map()
defp put_query(%{operation: "unnamed"} = info, query, %{include_unnamed_queries: true}),
do: Map.put(info, :query, query)
defp put_query(info, _query, _opts), do: info
@spec put_variables(map(), any(), opts()) :: map()
defp put_variables(info, _variables, %{include_variables: false}), do: info
defp put_variables(info, variables, _) when not is_map(variables), do: info
defp put_variables(info, variables, opts) do
filtered = filter_variables(variables, opts.filter_variables)
case Jason.encode(filtered) do
{:ok, encoded} -> Map.put(info, :variables, encoded)
_ -> info
end
end
@spec filter_variables(map(), list(String.t())) :: map()
defp filter_variables(variables, to_filter) do
variables
|> Enum.map(&filter(&1, to_filter))
|> Enum.into(%{})
end
@spec filter({String.t(), term()}, list(String.t())) :: {String.t(), term()}
defp filter({key, value}, to_filter) do
if key in to_filter do
{key, "[FILTERED]"}
else
{key, value}
end
end
@spec formatted_diff(integer()) :: list(String.t())
defp formatted_diff(diff) when diff > 1000 do
"#{diff |> div(1000) |> Integer.to_string()}ms"
end
defp formatted_diff(diff), do: "#{Integer.to_string(diff)}µs"
@spec connection_type(Plug.Conn.t()) :: String.t()
defp connection_type(%{state: :set_chunked}), do: "Chunked"
defp connection_type(_), do: "Sent"
@spec operation_name(String.t()) :: String.t() | nil
defp operation_name(%{"operationName" => name}), do: name
defp operation_name(%{"query" => query}) do
case Regex.run(@query_name_regex, query, capture: :all_but_first) do
[query_name] -> query_name
_ -> "unnamed"
end
end
defp operation_name(_), do: "unnamed"
@spec query_type(term()) :: String.t() | nil
defp query_type("query" <> _), do: "QUERY"
defp query_type("mutation" <> _), do: "MUTATION"
defp query_type("{" <> _), do: "QUERY"
defp query_type(_), do: nil
end
end
|
lib/uinta/plug.ex
| 0.830834
| 0.82226
|
plug.ex
|
starcoder
|
defmodule OMG.Performance.ExtendedPerftest do
@moduledoc """
This performance test allows to send out many transactions to a child chain instance of choice.
See `OMG.Performance` for configuration within the `iex` shell using `Performance.init()`
"""
use OMG.Utils.LoggerExt
alias OMG.TestHelper
alias OMG.Utxo
alias Support.Integration.DepositHelper
require Utxo
@make_deposit_timeout 600_000
@doc """
Runs test with `ntx_to_send` transactions for each of `spenders` provided.
The spenders should be provided in the form as given by `OMG.Performance.Generators.generate_users`, and must be
funded on the root chain. The test invocation will do the deposits on the child chain.
## Usage
Once you have your Ethereum node and a child chain running, from a configured `iex -S mix run --no-start` shell
```
use OMG.Performance
Performance.init()
spenders = Generators.generate_users(2)
Performance.ExtendedPerftest.start(100, spenders, destdir: destdir)
```
The results are going to be waiting for you in a file within `destdir` and will be logged.
Options:
- :destdir - directory where the results will be put, relative to `pwd`, defaults to `"."`
- :randomized - whether the non-change outputs of the txs sent out will be random or equal to sender (if `false`),
defaults to `true`
"""
@spec start(pos_integer(), list(TestHelper.entity()), keyword()) :: :ok
def start(ntx_to_send, spenders, opts \\ []) do
_ =
Logger.info(
"Number of spenders: #{inspect(length(spenders))}, number of tx to send per spender: #{inspect(ntx_to_send)}" <>
", #{inspect(length(spenders) * ntx_to_send)} txs in total"
)
defaults = [destdir: "."]
opts = Keyword.merge(defaults, opts)
utxos = create_deposits(spenders, ntx_to_send)
OMG.Performance.Runner.run(ntx_to_send, utxos, opts, false)
end
@spec create_deposits(list(TestHelper.entity()), pos_integer()) :: list()
defp create_deposits(spenders, ntx_to_send) do
Enum.map(make_deposits(ntx_to_send * 2, spenders), fn {:ok, owner, blknum, amount} ->
utxo_pos = Utxo.Position.encode(Utxo.position(blknum, 0, 0))
%{owner: owner, utxo_pos: utxo_pos, amount: amount}
end)
end
defp make_deposits(value, accounts) do
depositing_f = fn account ->
deposit_blknum = DepositHelper.deposit_to_child_chain(account.addr, value)
{:ok, account, deposit_blknum, value}
end
accounts
|> Task.async_stream(depositing_f, timeout: @make_deposit_timeout, max_concurrency: 10_000)
|> Enum.map(fn {:ok, result} -> result end)
end
end
|
apps/omg_performance/lib/omg_performance/extended_perftest.ex
| 0.931774
| 0.80784
|
extended_perftest.ex
|
starcoder
|
defmodule Membrane.MPEG.TS.Demuxer do
@moduledoc """
Demuxes MPEG TS stream.
After transition into playing state, this element will wait for
[Program Association Table](https://en.wikipedia.org/wiki/MPEG_transport_stream#PAT) and
[Program Mapping Table](https://en.wikipedia.org/wiki/MPEG_transport_stream#PMT).
Upon succesfful parsing of those tables it will send a message to the pipeline in format
`{:mpeg_ts_stream_info, configuration}`, where configuration contains data read from tables.
Configuration sent by element to pipeline should have following shape
```
%{
program_id => %Membrane.MPEG.TS.ProgramMapTable{
pcr_pid: 256,
program_info: [],
streams: %{
256 => %{stream_type: :H264, stream_type_id: 27},
257 => %{stream_type: :MPEG1_AUDIO, stream_type_id: 3}
}
}
}
```
"""
use Membrane.Filter
alias __MODULE__.Parser
alias Membrane.Buffer
alias Membrane.MPEG.TS.Table
alias Membrane.MPEG.TS.{ProgramAssociationTable, ProgramMapTable}
@typedoc """
This types represents datae structure that is sent by this element to pipeline.
"""
@type configuration :: %{
ProgramAssociationTable.program_id_t() => ProgramMapTable.t()
}
@ts_packet_size 188
@pat 0
@pmt 2
defmodule State do
@moduledoc false
alias Membrane.MPEG.TS.Demuxer
defstruct data_queue: <<>>,
parser: %Parser.State{},
work_state: :waiting_pat,
configuration: %{}
@type work_state_t :: :waiting_pat | :waiting_pmt | :awaiting_linking | :working
@type t :: %__MODULE__{
data_queue: binary(),
parser: Parser.State.t(),
work_state: work_state_t(),
configuration: Demuxer.configuration()
}
end
def_output_pad :output,
availability: :on_request,
caps: :any
def_input_pad :input, caps: :any, demand_unit: :buffers
@impl true
def handle_demand(_pad, _size, _unit, _ctx, %State{work_state: work_state} = state)
when work_state in [:waiting_pat, :waiting_pmt, :awaiting_linking] do
{:ok, state}
end
def handle_demand(_pad, _size, unit, ctx, %State{work_state: :working} = state) do
standarized_new_demand = standarize_demand(ctx.incoming_demand, unit)
{{:ok, demand: {:input, &(&1 + standarized_new_demand)}}, state}
end
@impl true
def handle_init(_) do
{:ok, %State{}}
end
@impl true
def handle_other(:pads_ready, _ctx, %State{work_state: :working} = state),
do: {:ok, state}
@impl true
def handle_other(:pads_ready, ctx, %State{work_state: :awaiting_linking} = state) do
state = %State{state | work_state: :working}
{{:ok, consolidate_demands(ctx)}, state}
end
defp all_pads_added?(configuration, ctx) do
pad_names =
ctx.pads
|> Map.keys()
|> Enum.filter(&(Pad.name_by_ref(&1) == :output))
stream_ids =
configuration
|> Enum.flat_map(fn {_id, program_table} -> Map.keys(program_table.streams) end)
Enum.all?(
stream_ids,
&Enum.any?(pad_names, fn Pad.ref(:output, id) -> id == &1 end)
)
end
@impl true
def handle_prepared_to_playing(_ctx, state) do
{{:ok, demand: :input}, state}
end
@impl true
def handle_process(:input, buffer, _ctx, %State{work_state: work_state} = state)
when work_state in [:waiting_pmt, :waiting_pat] do
%{state | data_queue: state.data_queue <> buffer.payload}
|> handle_startup()
end
def handle_process(
:input,
buffer,
_ctx,
%State{work_state: :awaiting_linking, data_queue: q} = state
) do
state = %State{state | data_queue: q <> buffer.payload}
{:ok, state}
end
def handle_process(:input, buffer, ctx, %State{work_state: :working} = state) do
{payloads, data_queue, parser} =
Parser.parse_packets(state.data_queue <> buffer.payload, state.parser)
buffer_actions =
payloads
|> Enum.group_by(&Bunch.key/1, &Bunch.value/1)
# TODO What about ignoring streams
|> Enum.filter(fn {stream_pid, _} -> Pad.ref(:output, stream_pid) in Map.keys(ctx.pads) end)
|> Enum.map(fn {stream_pid, payloads} ->
buffers = Enum.map(payloads, fn payload -> %Buffer{payload: payload} end)
destination_pad = Pad.ref(:output, stream_pid)
{:buffer, {destination_pad, buffers}}
end)
actions = buffer_actions ++ redemand_all_output_pads(ctx)
state = %State{state | data_queue: data_queue, parser: parser}
{{:ok, actions}, state}
end
defp redemand_all_output_pads(ctx) do
out_pads =
ctx.pads
|> Map.keys()
|> Enum.filter(&(Pad.name_by_ref(&1) == :output))
[redemand: out_pads]
end
# Pad added after receving tables
@impl true
def handle_pad_added(Pad.ref(:output, _id), ctx, %State{work_state: :awaiting_linking} = state) do
if all_pads_added?(state.configuration, ctx) do
state = %State{state | work_state: :working}
{{:ok, consolidate_demands(ctx)}, state}
else
{:ok, state}
end
end
# Pad added during linking
@impl true
def handle_pad_added(_pad, _ctx, %State{work_state: work_state} = state)
when work_state in [:waiting_pat, :waiting_pmt] do
{:ok, state}
end
# TODO: remove when issue in core with handle pad added is resolved
# issue https://github.com/membraneframework/membrane-core/issues/258
@impl true
def handle_pad_added(_pad, _ctx, state) do
{:ok, state}
end
defp handle_startup(%State{data_queue: data_queue} = state)
when byte_size(data_queue) < @ts_packet_size do
{{:ok, demand: :input}, state}
end
defp handle_startup(state) do
case Parser.parse_single_packet(state.data_queue, state.parser) do
{{:ok, {_pid, table_data}}, {rest, parser_state}} ->
%State{state | parser: parser_state, data_queue: rest}
|> parse_table(table_data)
|> handle_parse_result()
{{:error, _reason}, {rest, parser_state}} ->
%State{state | parser: parser_state, data_queue: rest}
|> handle_startup
end
end
defp parse_table(state, table_data) do
case Membrane.MPEG.TS.Table.parse(table_data) do
{:ok, {header, data, _crc}} ->
handle_table(header, data, state)
{:error, _} = error ->
{error, state}
end
end
# Received PAT
defp handle_table(%Table{table_id: @pat}, data, %State{work_state: :waiting_pat} = state) do
parser = %{state.parser | known_tables: Map.values(data)}
state = %State{state | work_state: :waiting_pmt, parser: parser}
{:ok, state}
end
# Received one of the PMTs
defp handle_table(
%Table{table_id: @pmt} = table,
data,
%State{work_state: :waiting_pmt} = state
) do
configuration = Map.put(state.configuration, table.transport_stream_id, data)
state = %State{state | configuration: configuration}
if state.parser.known_tables == [] do
state = %State{state | work_state: :awaiting_linking, configuration: configuration}
{{:ok, notify: {:mpeg_ts_stream_info, configuration}}, state}
else
{:ok, state}
end
end
defp handle_table(_, _, state) do
{{:error, :wrong_table}, state}
end
# Demands another buffer if data_queue does not contain enough data
defp handle_parse_result({:ok, %State{work_state: ws, data_queue: data_queue} = state})
when ws in [:waiting_pat, :waiting_pmt] do
if data_queue |> byte_size() < @ts_packet_size do
{{:ok, demand: :input}, state}
else
handle_startup(state)
end
end
defp handle_parse_result({{:error, _reason}, state}), do: handle_startup(state)
defp handle_parse_result({{:ok, _actions}, _state} = result), do: result
defp consolidate_demands(ctx) do
demand_size =
ctx.pads
|> Bunch.KVEnum.filter_by_keys(&(Pad.name_by_ref(&1) == :output))
|> Enum.reduce(0, fn {_pad_ref, pad_data}, acc ->
acc + standarize_demand(pad_data.demand, pad_data.other_demand_unit)
end)
[demand: {:input, demand_size}]
end
defp standarize_demand(size, :buffers), do: size
defp standarize_demand(size, :bytes) do
(size / 188) |> ceil()
end
end
|
lib/membrane_element_mpegts/demuxer.ex
| 0.696887
| 0.827689
|
demuxer.ex
|
starcoder
|
defmodule ColognePhoneticEx do
@moduledoc """
**Cologne phonetics** (also Kölner Phonetik, Cologne process) is a phonetic
algorithm which assigns to words a sequence of digits, the phonetic code.
The aim of this procedure is that identical sounding words have the same code
assigned to them. The algorithm can be used to perform a similarity search
between words. For example, it is possible in a name list to find entries
like "Meier" under different spellings such as "Maier", "Mayer", or "Mayr".
The Cologne phonetics is related to the well known Soundex
phoneticalgorithm but is optimized to match the German language.
[de.wikipedia.org/wiki/Kölner_Phonetik](http://de.wikipedia.org/wiki/Kölner_Phonetik)
"""
require Logger
@cologne_phonetic_table %{
"a" => "0",
"ä" => "0",
"e" => "0",
"i" => "0",
"j" => "0",
"o" => "0",
"ö" => "0",
"u" => "0",
"ü" => "0",
"y" => "0",
"b" => "1",
"f" => "3",
"v" => "3",
"w" => "3",
"g" => "4",
"k" => "4",
"q" => "4",
"l" => "5",
"m" => "6",
"n" => "6",
"r" => "7",
"s" => "8",
"z" => "8"
}
@doc """
Calculates and returns the "Cologne Phonetic" (Kölner Phonetik) code for the given string.
It's the phonetic code for the German language.
## Examples
iex> ColognePhoneticEx.as_cologne_phonetic("Bühler")
"157"
"""
@spec as_cologne_phonetic(String) :: String
def as_cologne_phonetic(term) do
case term do
nil ->
""
"" ->
""
term ->
i = 0
term_downcase = String.downcase(term)
phonetic_string = each_char(i,
String.graphemes(term_downcase), term_downcase, " ", "")
trimmed_phonetic_string = String.replace(phonetic_string, " ", "")
trimmed_phonetic_string =
String.replace(trimmed_phonetic_string, "0", "")
trimmed_phonetic_string
end
end
defp each_char(_, [], _, _, current) do
current
end
@spec each_char(Integer, [String | String], String, String, String) :: String
defp each_char(i, [head | tail], term, previous_code, current) do
previous = if i > 0 do
String.at(term, (i - 1))
else
" "
end
follower = if i < (String.length(term) - 1) do
String.at(term, (i + 1))
else
" "
end
code = if Map.has_key?(@cologne_phonetic_table, head) do
@cologne_phonetic_table[head]
else
case head do
"h" ->
" "
"p" ->
if follower == "h" do
"3"
else
"1"
end
"d" ->
if ((follower == "c") || (follower == "s") || (follower == "z") || (follower == "ß")) do
"8"
else
"2"
end
"t" ->
if ((follower == "c") || (follower == "s") || (follower == "z") || (follower == "ß")) do
"8"
else
"2"
end
"c" ->
if i == 1 do
if ((follower == "a") || (follower == "h") || (follower == "k") || (follower == "l")
|| (follower == "o") || (follower == "q") || (follower == "r") || (follower == "u") || (follower == "x")) do
"8"
else
"4"
end
else
if ((previous == "s") || (previous == "z") || (previous == "ß")) do
if ((follower == "a") || (follower == "h") || (follower == "k")
|| (follower == "o") || (follower == "q") || (follower == "u") || (follower == "x")) do
"8"
else
"4"
end
else
"8"
end
end
"x" ->
if ((previous == "c") || (previous == "k") || (previous == "q")) do
"8"
else
"48"
end
_ ->
" "
end
end
phonetic_string = if previous_code != code do
current <> to_string(code)
else
current
end
each_char(i + 1, tail, term, code, phonetic_string)
end
end
|
lib/cologne_phonetic_ex.ex
| 0.704262
| 0.527742
|
cologne_phonetic_ex.ex
|
starcoder
|
defmodule Mix.Tasks.AshPostgres.Migrate do
use Mix.Task
import AshPostgres.MixHelpers,
only: [migrations_path: 2, tenant_migrations_path: 2, tenants: 2]
@shortdoc "Runs the repository migrations for all repositories in the provided (or congigured) apis"
@aliases [
n: :step
]
@switches [
all: :boolean,
tenants: :boolean,
step: :integer,
to: :integer,
quiet: :boolean,
prefix: :string,
pool_size: :integer,
log_sql: :boolean,
strict_version_order: :boolean,
apis: :string,
no_compile: :boolean,
no_deps_check: :boolean,
migrations_path: :keep,
only_tenants: :string,
except_tenants: :string
]
@moduledoc """
Runs the pending migrations for the given repository.
Migrations are expected at "priv/YOUR_REPO/migrations" directory
of the current application (or `tenant_migrations` for multitenancy),
where "YOUR_REPO" is the last segment
in your repository name. For example, the repository `MyApp.Repo`
will use "priv/repo/migrations". The repository `Whatever.MyRepo`
will use "priv/my_repo/migrations".
This task runs all pending migrations by default. To migrate up to a
specific version number, supply `--to version_number`. To migrate a
specific number of times, use `--step n`.
This is only really useful if your api or apis only use a single repo.
If you have multiple repos and you want to run a single migration and/or
migrate/roll them back to different points, you will need to use the
ecto specific task, `mix ecto.migrate` and provide your repo name.
If a repository has not yet been started, one will be started outside
your application supervision tree and shutdown afterwards.
## Examples
mix ash_postgres.migrate
mix ash_postgres.migrate --apis MyApp.Api1,MyApp.Api2
mix ash_postgres.migrate -n 3
mix ash_postgres.migrate --step 3
mix ash_postgres.migrate --to 20080906120000
## Command line options
* `--apis` - the apis who's repos should be migrated
* `--tenants` - Run the tenant migrations
* `--only-tenants` - in combo with `--tenants`, only runs migrations for the provided tenants, e.g `tenant1,tenant2,tenant3`
* `--except-tenants` - in combo with `--tenants`, does not run migrations for the provided tenants, e.g `tenant1,tenant2,tenant3`
* `--all` - run all pending migrations
* `--step`, `-n` - run n number of pending migrations
* `--to` - run all migrations up to and including version
* `--quiet` - do not log migration commands
* `--prefix` - the prefix to run migrations on. This is ignored if `--tenants` is provided.
* `--pool-size` - the pool size if the repository is started only for the task (defaults to 2)
* `--log-sql` - log the raw sql migrations are running
* `--strict-version-order` - abort when applying a migration with old timestamp
* `--no-compile` - does not compile applications before migrating
* `--no-deps-check` - does not check depedendencies before migrating
* `--migrations-path` - the path to load the migrations from, defaults to
`"priv/repo/migrations"`. This option may be given multiple times in which case the migrations
are loaded from all the given directories and sorted as if they were in the same one.
Note, if you have migrations paths e.g. `a/` and `b/`, and run
`mix ecto.migrate --migrations-path a/`, the latest migrations from `a/` will be run (even
if `b/` contains the overall latest migrations.)
"""
@impl true
def run(args) do
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
repos = AshPostgres.MixHelpers.repos(opts, args)
if repos == [] do
raise "must supply the --apis argument, or set `config :my_app, ash_apis: [...]` in config"
end
repo_args =
Enum.flat_map(repos, fn repo ->
["-r", to_string(repo)]
end)
rest_opts =
args
|> AshPostgres.MixHelpers.delete_arg("--apis")
|> AshPostgres.MixHelpers.delete_arg("--migrations-path")
|> AshPostgres.MixHelpers.delete_flag("--tenants")
|> AshPostgres.MixHelpers.delete_flag("--only-tenants")
|> AshPostgres.MixHelpers.delete_flag("--except-tenants")
if opts[:tenants] do
for repo <- repos do
Ecto.Migrator.with_repo(repo, fn repo ->
for tenant <- tenants(repo, opts) do
rest_opts = AshPostgres.MixHelpers.delete_arg(rest_opts, "--prefix")
Mix.Task.run(
"ecto.migrate",
repo_args ++
rest_opts ++
["--prefix", tenant, "--migrations-path", tenant_migrations_path(opts, repo)]
)
Mix.Task.reenable("ecto.migrate")
end
end)
end
else
for repo <- repos do
Mix.Task.run(
"ecto.migrate",
repo_args ++ rest_opts ++ ["--migrations-path", migrations_path(opts, repo)]
)
Mix.Task.reenable("ecto.migrate")
end
end
end
end
|
lib/mix/tasks/ash_postgres.migrate.ex
| 0.733833
| 0.400515
|
ash_postgres.migrate.ex
|
starcoder
|
defmodule Snitch.Data.Schema.Address do
@moduledoc """
Models an Address
An `Address` must contain a reference to `Snitch.Data.Schema.Country` and only if the
country has states a reference to a `Snitch.Data.Schema.State`. This means some
Addresses might not have a State.
"""
use Snitch.Data.Schema
alias Snitch.Data.Schema.{Country, State}
@type t :: %__MODULE__{}
schema "snitch_addresses" do
field(:first_name, :string)
field(:last_name, :string)
field(:address_line_1, :string)
field(:address_line_2, :string)
field(:city, :string)
field(:zip_code, :string)
field(:phone, :string)
field(:alternate_phone, :string)
belongs_to(:state, State, on_replace: :nilify)
belongs_to(:country, Country, on_replace: :nilify)
belongs_to(:user, User)
timestamps()
end
@required_fields ~w(first_name last_name address_line_1 city zip_code country_id user_id)a
@cast_fields ~w(phone alternate_phone state_id address_line_2)a ++ @required_fields
@doc """
Returns an `Address` changeset to create OR update `address`.
An address must be associated with a country, and if the country has
sub-divisions (aka states) according to ISO 3166-2, then the address must also
be associated with a state.
## Note
You may only provide `:country_id` and `:state_id`, structs under `:country`
and `:state` are ignored.
"""
@spec changeset(t, map) :: Ecto.Changeset.t()
def changeset(%__MODULE__{} = address, params) do
address
|> cast(params, @cast_fields)
|> validate_required(@required_fields)
|> validate_length(:address_line_1, min: 10)
|> assoc_country_and_state()
end
defp assoc_country_and_state(
%{
valid?: true,
changes: %{
country_id: c_id,
state_id: s_id
}
} = changeset
) do
case Repo.get(Country, c_id) do
nil ->
add_error(changeset, :country_id, "does not exist", country_id: c_id)
%Country{} = country ->
changeset
|> put_assoc(:country, country)
|> assoc_state(country, s_id)
end
end
defp assoc_country_and_state(
%{
valid?: true,
changes: %{state_id: s_id},
data: %{country_id: c_id}
} = changeset
) do
case Repo.get(Country, c_id) do
nil ->
add_error(changeset, :country_id, "does not exist", country_id: c_id)
%Country{} = country ->
assoc_state(changeset, country, s_id)
end
end
defp assoc_country_and_state(
%{
valid?: true,
changes: %{country_id: c_id}
} = changeset
) do
case Repo.get(Country, c_id) do
nil ->
add_error(changeset, :country_id, "does not exist", country_id: c_id)
%Country{} = country ->
changeset
|> put_assoc(:country, country)
|> assoc_state(country, nil)
end
end
defp assoc_country_and_state(changeset), do: changeset
defp assoc_state(changeset, %Country{states_required: false}, _) do
put_change(changeset, :state_id, nil)
end
defp assoc_state(changeset, %{states_required: true} = country, s_id) when is_integer(s_id) do
case Repo.get(State, s_id) do
nil ->
add_error(changeset, :state_id, "does not exist", state_id: s_id)
%State{} = state ->
if state.country_id == country.id do
put_assoc(changeset, :state, state)
else
add_error(
changeset,
:state,
"state does not belong to country",
state_id: state.id,
country_id: country.id
)
end
end
end
defp assoc_state(changeset, %{states_required: true} = country, _) do
add_error(
changeset,
:state_id,
"state is explicitly required for this country",
country_id: country.id
)
end
end
|
apps/snitch_core/lib/core/data/schema/address.ex
| 0.874553
| 0.590573
|
address.ex
|
starcoder
|
defmodule ScrapyCloudEx.Endpoints.App.Jobs do
@moduledoc """
Wraps the [Jobs](https://doc.scrapinghub.com/api/jobs.html) endpoint.
The jobs API makes it easy to work with your spider’s jobs and lets you schedule,
stop, update and delete them.
"""
import ScrapyCloudEx.Endpoints.Guards
alias ScrapyCloudEx.Endpoints
alias ScrapyCloudEx.Endpoints.{App, Helpers}
alias ScrapyCloudEx.HttpAdapter.RequestConfig
@typedoc """
A function to encode job settings to JSON.
This function will be given the job settings provided to `run/5` so they can be encoded
into a JSON string.
"""
@type encoder_fun :: (term() -> {:ok, String.t()} | {:error, any()})
@base_url "https://app.scrapinghub.com/api"
@valid_states ~w(pending running finished deleted)
@doc """
Schedules a job for a given spider.
The following parameters are supported in the `params` argument:
* `:add_tag` - add the specified tag to the job. May be given multiple times.
* `:job_settings` - job settings to be proxied to the job. This value can be provided
as a string representation of a JSON object, or as an Elixir term. If a term is provided,
an accompanying encoding function (of type `t:encoder_fun/0`) must be provided with the
`:encoder` key within `opts`.
* `:priority` - job priority. Supports values in the `0..4` range (where `4` is highest
priority). Defaults to `2`.
* `:units` - Amount of [units](https://support.scrapinghub.com/support/solutions/articles/22000200408-what-is-a-scrapy-cloud-unit-) to use for the job. Supports values in the `1..6` range.
Any other parameter will be treated as a spider argument.
The `opts` value is documented [here](ScrapyCloudEx.Endpoints.html#module-options).
See docs [here](https://doc.scrapinghub.com/api/jobs.html#run-json).
## Example
```
settings = [job_settings: ~s({ "SETTING1": "value1", "SETTING2": "value2" })]
tags = [add_tag: "sometag", add_tag: "othertag"]
params = [priority: 3, units: 1, spiderarg1: "example"] ++ tags ++ settings
ScrapyCloudEx.Endpoints.App.Jobs.run("API_KEY", "123", "somespider", params)
# {:ok, %{"jobid" => "123/1/4", "status" => "ok"}}
```
"""
@spec run(String.t(), String.t() | integer, String.t(), Keyword.t(), Keyword.t()) ::
ScrapyCloudEx.result(map())
def run(api_key, project_id, spider_name, params \\ [], opts \\ [])
when is_api_key(api_key)
when is_id(project_id)
when is_binary(spider_name)
when is_list(params)
when is_list(opts) do
job_settings = params |> Keyword.get(:job_settings)
json_encoder = Keyword.get(opts, :encoder)
with {:ok, job_settings} <- format_job_settings(job_settings, json_encoder) do
body =
params
|> Keyword.put(:project, project_id)
|> Keyword.put(:spider, spider_name)
|> maybe_add_job_settings(job_settings)
RequestConfig.new()
|> RequestConfig.put(:api_key, api_key)
|> RequestConfig.put(:method, :post)
|> RequestConfig.put(:body, body)
|> RequestConfig.put(:opts, opts)
|> RequestConfig.put(:url, "#{@base_url}/run.json")
|> Helpers.make_request()
else
error -> {:error, error}
end
end
@doc """
Retrieves job information for a given project, spider, or specific job.
The following parameters are supported in the `params` argument:
* `:format` - the format to be used for returning results. Can be `:json` or `:jl`. Defaults to `:json`.
* `:pagination` - the pagination params: a keyword list with optional `:count` and `:offset` integer
values, where `:count` indicates the desired number of results per page and `:offset` the offset
to retrieve specific records.
* `:job` - the job id.
* `:spider` - the spider name.
* `:state` - return jobs with specified state. Supported values: `"pending"`, `"running"`,
`"finished"`, `"deleted"`.
* `:has_tag` - return jobs with specified tag. May be given multiple times, and will behave
as a logical `OR` operation among the values.
* `:lacks_tag` - return jobs that lack specified tag. May be given multiple times, and will
behave as a logical `AND` operation among the values.
The `opts` value is documented [here](ScrapyCloudEx.Endpoints.html#module-options).
See docs [here](https://doc.scrapinghub.com/api/jobs.html#jobs-list-json-jl).
## Examples
```
# Retrieve the latest 3 finished jobs for "somespider" spider
params = [spider: "somespider", state: "finished", count: 3]
ScrapyCloudEx.Endpoints.App.Jobs.list("API_KEY", "123", params)
# Retrieve all running jobs
ScrapyCloudEx.Endpoints.App.Jobs.list("API_KEY", "123", state: "running")
# Retrieve 10 jobs with the tag "consumed"
ScrapyCloudEx.Endpoints.App.Jobs.list("API_KEY", "123", has_tag: "consumed", pagination: [count: 10])
```
## Example return value
```
{:ok,
%{
"status" => "ok",
"count" => 2,
"total" => 2,
"jobs" => [
%{
"close_reason" => "cancelled",
"elapsed" => 124138,
"errors_count" => 0,
"id" => "123/1/3",
"items_scraped" => 620,
"logs" => 17,
"priority" => 2,
"responses_received" => 670,
"spider" => "somespider",
"spider_type" => "manual",
"started_time" => "2018-10-03T07:06:07",
"state" => "finished",
"tags" => ["foo"],
"updated_time" => "2018-10-03T07:07:42",
"version" => "5ef3139-master"
},
%{
"close_reason" => "cancelled",
"elapsed" => 483843779,
"errors_count" => 1,
"id" => "123/1/2",
"items_scraped" => 2783,
"logs" => 20,
"priority" => 3,
"responses_received" => 2888,
"spider" => "somespider",
"spider_args" => %{"spiderarg1" => "example"},
"spider_type" => "manual",
"started_time" => "2018-10-23T16:42:54",
"state" => "finished",
"tags" => ["bar", "foo"],
"updated_time" => "2018-10-23T16:45:54",
"version" => "5ef3139-master"
}
]
}
}
```
"""
@spec list(String.t(), String.t() | integer, Keyword.t(), Keyword.t()) ::
ScrapyCloudEx.result(map())
def list(api_key, project_id, params \\ [], opts \\ [])
when is_api_key(api_key)
when is_id(project_id)
when is_list(params)
when is_list(opts) do
with valid_params =
[:format, :job, :spider, :state, :has_tag, :lacks_tag, :pagination] ++
App.pagination_params(),
:ok <- Helpers.validate_params(params, valid_params),
true <- Keyword.get(params, :format) in [nil, :json, :jl],
format = Keyword.get(params, :format, :json),
:ok <- params |> Keyword.get(:state) |> validate_state() do
params =
params
|> Endpoints.scope_params(:pagination, [:count, :offset])
|> Endpoints.merge_scope(:pagination)
|> Keyword.delete(:format)
query = [{:project, project_id} | params] |> URI.encode_query()
RequestConfig.new()
|> RequestConfig.put(:api_key, api_key)
|> RequestConfig.put(:opts, opts)
|> RequestConfig.put(:url, "#{@base_url}/jobs/list.#{format}?#{query}")
|> Helpers.make_request()
else
false ->
error =
"expected format to be one of :json, :jl, but got `#{Keyword.get(params, :format)}`"
|> Helpers.invalid_param_error(:format)
{:error, error}
{:invalid_param, _} = error ->
{:error, error}
end
end
@doc """
Updates information about jobs.
The job ids in `job_or_jobs` must have at least 3 sections.
The following parameters are supported in the `params` argument:
* `:add_tag` - add specified tag to the job(s). May be given multiple times.
* `:remove_tag` - remove specified tag to the job(s). May be given multiple times.
The `opts` value is documented [here](ScrapyCloudEx.Endpoints.html#module-options).
See docs [here](https://doc.scrapinghub.com/api/jobs.html#jobs-update-json).
## Example
```
params = [add_tag: "foo", add_tag: "bar", remove_tag: "sometag"]
ScrapyCloudEx.Endpoints.App.Jobs.update("API_KEY", "123", ["123/1/1", "123/1/2"], params)
# {:ok, %{"count" => 2, "status" => "ok"}}
```
"""
@spec update(
String.t(),
String.t() | integer,
String.t() | [String.t()],
Keyword.t(),
Keyword.t()
) :: ScrapyCloudEx.result(map())
def update(api_key, project_id, job_or_jobs, params \\ [], opts \\ [])
when is_api_key(api_key)
when is_id(project_id)
when is_id(job_or_jobs) or is_list(job_or_jobs)
when is_list(params)
when is_list(opts) do
with :ok <- Helpers.validate_params(params, [:add_tag, :remove_tag]) do
request = prepare_basic_post_request(api_key, project_id, job_or_jobs, opts)
request
|> RequestConfig.put(:url, "#{@base_url}/jobs/update.json")
|> RequestConfig.put(:body, request.body ++ params)
|> Helpers.make_request()
else
{:invalid_param, _} = error -> {:error, error}
end
end
@doc """
Deletes one or more jobs.
The job ids in `job_or_jobs` must have at least 3 sections.
The `opts` value is documented [here](ScrapyCloudEx.Endpoints.html#module-options).
See docs [here](https://doc.scrapinghub.com/api/jobs.html#jobs-delete-json).
## Example
```
ScrapyCloudEx.Endpoints.App.Jobs.delete("API_KEY", "123", ["123/1/1", "123/1/2"])
# {:ok, %{"count" => 2, "status" => "ok"}}
```
"""
@spec delete(String.t(), String.t() | integer, String.t() | [String.t()], Keyword.t()) ::
ScrapyCloudEx.result(map())
def delete(api_key, project_id, job_or_jobs, opts \\ [])
when is_api_key(api_key)
when is_id(project_id)
when is_id(job_or_jobs) or is_list(job_or_jobs)
when is_list(opts) do
api_key
|> prepare_basic_post_request(project_id, job_or_jobs, opts)
|> RequestConfig.put(:url, "#{@base_url}/jobs/delete.json")
|> Helpers.make_request()
end
@doc """
Stops one or more running jobs.
The job ids in `job_or_jobs` must have at least 3 sections.
The `opts` value is documented [here](ScrapyCloudEx.Endpoints.html#module-options).
See docs [here](https://doc.scrapinghub.com/api/jobs.html#jobs-stop-json).
## Example
```
ScrapyCloudEx.Endpoints.App.Jobs.stop("API_KEY", "123", ["123/1/1", "123/1/2"])
# {:ok, %{"status" => "ok"}}
```
"""
@spec stop(String.t(), String.t() | integer, [String.t()], Keyword.t()) ::
ScrapyCloudEx.result(map())
def stop(api_key, project_id, job_or_jobs, opts \\ [])
when is_api_key(api_key)
when is_id(project_id)
when is_id(job_or_jobs) or is_list(job_or_jobs)
when is_list(opts) do
api_key
|> prepare_basic_post_request(project_id, job_or_jobs, opts)
|> RequestConfig.put(:url, "#{@base_url}/jobs/stop.json")
|> Helpers.make_request()
end
@spec prepare_basic_post_request(String.t(), String.t() | integer, [String.t()], Keyword.t()) ::
RequestConfig.t()
defp prepare_basic_post_request(api_key, project_id, job_or_jobs, opts) do
body =
job_or_jobs
|> format_jobs()
|> Keyword.put(:project, project_id)
RequestConfig.new()
|> RequestConfig.put(:api_key, api_key)
|> RequestConfig.put(:method, :post)
|> RequestConfig.put(:body, body)
|> RequestConfig.put(:opts, opts)
end
@spec format_jobs([String.t()]) :: Keyword.t()
defp format_jobs(job_or_jobs) do
job_or_jobs
|> List.wrap()
|> Enum.map(&{:job, &1})
end
@spec format_job_settings(any, encoder_fun | nil) :: ScrapyCloudEx.result(any)
defp format_job_settings(nil, _encoder), do: {:ok, []}
defp format_job_settings(settings, _encoder) when is_binary(settings), do: {:ok, settings}
defp format_job_settings(settings, encoder) when is_map(settings) and is_function(encoder),
do: encoder.(settings)
defp format_job_settings(_settings, _encoder) do
"expected job_settings to be a string-encoded JSON object or to have an encoder function provided"
|> Helpers.invalid_param_error(:job_settings)
end
@spec maybe_add_job_settings(Keyword.t(), any) :: Keyword.t()
defp maybe_add_job_settings(list, []), do: list
defp maybe_add_job_settings(list, settings) do
list |> Keyword.put(:job_settings, settings)
end
@spec validate_state(any) :: :ok | ScrapyCloudEx.tagged_error()
defp validate_state(nil), do: :ok
defp validate_state(state) when state in @valid_states, do: :ok
defp validate_state(state) do
"state '#{state}' not among valid states: #{@valid_states |> Enum.join(", ")}"
|> Helpers.invalid_param_error(:state)
end
end
|
lib/endpoints/app/jobs.ex
| 0.911224
| 0.782164
|
jobs.ex
|
starcoder
|
defmodule Quantum.DateLibrary do
@moduledoc """
This Behaviour offers Date Library Independant integration of helper
functions.
**This behaviour is considered internal. Breaking Changes can occur on every
release.**
Make sure your implementation passes `Quantum.DateLibraryTest`. Otherwise
unexpected behaviour can occur.
"""
@date_library Application.get_env(:quantum, :date_library, Quantum.DateLibrary.Timex)
@doc """
Convert `NaiveDateTime` in UTC to `NaiveDateTime` in given tz.
* Should raise an `InvalidTimezoneError` if the timezone is not valid.
"""
@callback utc_to_tz!(NaiveDateTime.t(), String.t()) :: NaiveDateTime.t() | no_return
@doc """
Convert `NaiveDateTime` in given tz to `NaiveDateTime` in UTC.
* Should raise an `InvalidDateTimeForTimezoneError` if the time is not valid.
* Should raise an `InvalidTimezoneError` if the timezone is not valid.
"""
@callback tz_to_utc!(NaiveDateTime.t(), String.t()) :: NaiveDateTime.t() | no_return
@doc """
Gives back the required application dependency to start, if any is needed.
"""
@callback dependency_application :: atom | nil
@doc """
Convert Date to Utc
"""
@spec to_utc!(NaiveDateTime.t(), :utc | binary) :: NaiveDateTime.t() | no_return
def to_utc!(date, :utc), do: date
def to_utc!(date, tz) when is_binary(tz), do: @date_library.tz_to_utc!(date, tz)
@doc """
Convert Date to TZ
"""
@spec to_utc!(NaiveDateTime.t(), :utc | binary) :: NaiveDateTime.t() | no_return
def to_tz!(date, :utc), do: date
def to_tz!(date, tz) when is_binary(tz), do: @date_library.utc_to_tz!(date, tz)
defmodule InvalidDateTimeForTimezoneError do
@moduledoc """
Raised when a time does not exist in a timezone. THis happens for example when chaninging from DST to normal time.
"""
defexception message: "The requested time does not exist in the given timezone."
end
defmodule InvalidTimezoneError do
@moduledoc """
Raised when a timezone does not exist.
"""
defexception message: "The requested timezone is invalid."
end
end
|
lib/quantum/date_library.ex
| 0.927569
| 0.550789
|
date_library.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.