hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e83d3e1518b9d81856f1736d80807714b4926713 | 8,279 | ex | Elixir | elixir/codes-from-books/little-elixir/cap8/blitzy/deps/timex/lib/interval/interval.ex | trxeste/wrk | 3e05e50ff621866f0361cc8494ce8f6bb4d97fae | [
"BSD-3-Clause"
] | 1 | 2017-10-16T03:00:50.000Z | 2017-10-16T03:00:50.000Z | elixir/codes-from-books/little-elixir/cap8/blitzy/deps/timex/lib/interval/interval.ex | trxeste/wrk | 3e05e50ff621866f0361cc8494ce8f6bb4d97fae | [
"BSD-3-Clause"
] | null | null | null | elixir/codes-from-books/little-elixir/cap8/blitzy/deps/timex/lib/interval/interval.ex | trxeste/wrk | 3e05e50ff621866f0361cc8494ce8f6bb4d97fae | [
"BSD-3-Clause"
] | null | null | null | defmodule Timex.Interval do
@moduledoc """
This module is used for creating and manipulating DateTime intervals.
"""
alias Timex.Duration
defmodule FormatError do
@moduledoc """
Thrown when an error occurs with formatting an Interval
"""
defexception message: "Unable to format interval!"
def exception([message: message]) do
%FormatError{message: message}
end
end
@enforce_keys [:from, :until]
defstruct from: nil,
until: nil,
left_open: false,
right_open: true,
step: [days: 1]
@doc """
Create a new Interval struct.
Note: By default intervals are right open.
Valid keywords:
- `from`: The date the interval starts at. Should be a DateTime.
- `until`: Either a DateTime, or a time shift that will be applied to the `from` date.
- `left_open`: Whether the interval is left open. See explanation below.
- `right_open`: Whether the interval is right open. See explanation below.
- `step`: The step to use when iterating the interval, defaults to `[days: 1]`
The terms`left_open` and `right_open` come from the mathematical concept of intervals, the following
excerpt from Wikipedia gives a good explanation of their meaning:
"An interval is said to be left-open if and only if it has no minimum
(an element that is smaller than all other elements); right-open if it has no maximum;
and open if it has both properties. The interval [0,1) = {x | 0 ≤ x < 1}, for example,
is left-closed and right-open. The empty set and the set of all reals are open intervals,
while the set of non-negative reals, for example, is a right-open but not left-open interval.
The open intervals coincide with the open sets of the real line in its standard topology."
Note: `until` shifts delegate to `Timex.shift`, so the options provided should match its valid options.
## Examples
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: ~D[2014-09-29])
...> |> Interval.format!("%Y-%m-%d", :strftime)
"[2014-09-22, 2014-09-29)"
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [days: 7])
...> |> Interval.format!("%Y-%m-%d", :strftime)
"[2014-09-22, 2014-09-29)"
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [days: 7], left_open: true, right_open: false)
...> |> Interval.format!("%Y-%m-%d", :strftime)
"(2014-09-22, 2014-09-29]"
iex> use Timex
...> Interval.new(from: ~N[2014-09-22T15:30:00], until: [minutes: 20], right_open: false)
...> |> Interval.format!("%H:%M", :strftime)
"[15:30, 15:50]"
"""
def new(options \\ []) do
from = case Keyword.get(options, :from) do
nil -> Timex.Protocol.NaiveDateTime.now()
%NaiveDateTime{} = d -> d
d -> Timex.to_naive_datetime(d)
end
left_open = Keyword.get(options, :left_open, false)
right_open = Keyword.get(options, :right_open, true)
step = Keyword.get(options, :step, [days: 1])
until = case Keyword.get(options, :until, [days: 1]) do
{:error, _} = err -> err
x when is_list(x) -> Timex.shift(from, x)
%NaiveDateTime{} = d -> d
%DateTime{} = d -> Timex.to_naive_datetime(d)
%Date{} = d -> Timex.to_naive_datetime(d)
_ -> {:error, :invalid_until}
end
case until do
{:error, _} = err -> err
_ ->
%__MODULE__{from: from, until: until,
left_open: left_open, right_open: right_open,
step: step}
end
end
@doc """
Return the interval duration, given a unit.
When the unit is one of `:seconds`, `:minutes`, `:hours`, `:days`, `:weeks`, `:months`, `:years`, the result is an `integer`.
When the unit is `:duration`, the result is a `Duration` struct.
## Example
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [months: 5])
...> |> Interval.duration(:months)
5
iex> use Timex
...> Interval.new(from: ~N[2014-09-22T15:30:00], until: [minutes: 20])
...> |> Interval.duration(:duration)
Duration.from_minutes(20)
"""
def duration(%__MODULE__{from: from, until: until}, :duration) do
Timex.diff(until, from, :microseconds) |> Duration.from_microseconds
end
def duration(%__MODULE__{from: from, until: until}, unit) do
Timex.diff(until, from, unit)
end
@doc """
Change the step value for the provided interval.
The step should be a keyword list valid for use with `Timex.Date.shift`.
## Examples
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [days: 3], right_open: false)
...> |> Interval.with_step([days: 1]) |> Enum.map(&Timex.format!(&1, "%Y-%m-%d", :strftime))
["2014-09-22", "2014-09-23", "2014-09-24", "2014-09-25"]
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [days: 3], right_open: false)
...> |> Interval.with_step([days: 2]) |> Enum.map(&Timex.format!(&1, "%Y-%m-%d", :strftime))
["2014-09-22", "2014-09-24"]
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [days: 3], right_open: false)
...> |> Interval.with_step([days: 3]) |> Enum.map(&Timex.format!(&1, "%Y-%m-%d", :strftime))
["2014-09-22", "2014-09-25"]
"""
def with_step(%__MODULE__{} = interval, step) do
%__MODULE__{interval | :step => step}
end
@doc """
Formats the interval as a human readable string.
## Examples
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [days: 3])
...> |> Interval.format!("%Y-%m-%d %H:%M", :strftime)
"[2014-09-22 00:00, 2014-09-25 00:00)"
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [days: 3])
...> |> Interval.format!("%Y-%m-%d", :strftime)
"[2014-09-22, 2014-09-25)"
"""
def format(%__MODULE__{} = interval, format, formatter \\ nil) do
case Timex.format(interval.from, format, formatter) do
{:error, _} = err -> err
{:ok, from} ->
case Timex.format(interval.until, format, formatter) do
{:error, _} = err -> err
{:ok, until} ->
lopen = if interval.left_open, do: "(", else: "["
ropen = if interval.right_open, do: ")", else: "]"
{:ok, "#{lopen}#{from}, #{until}#{ropen}"}
end
end
end
@doc """
Same as `format/3`, but raises a `Timex.Interval.FormatError` on failure.
"""
def format!(%__MODULE__{} = interval, format, formatter \\ nil) do
case format(interval, format, formatter) do
{:ok, str} -> str
{:error, e} -> raise FormatError, message: "#{inspect e}"
end
end
defimpl Enumerable do
def reduce(interval, acc, fun) do
do_reduce({get_starting_date(interval), interval.until, interval.right_open, interval.step}, acc, fun)
end
def member?(%Timex.Interval{from: from, until: until}, value) do
# Just tests for set membership (date is within the provided (inclusive) range)
result = cond do
Timex.compare(value, from) < 1 -> false
Timex.compare(value, until) > 0 -> false
:else -> true
end
{:ok, result}
end
def count(_interval) do
{:error, __MODULE__}
end
defp do_reduce(_state, {:halt, acc}, _fun), do: {:halted, acc}
defp do_reduce( state, {:suspend, acc}, fun), do: {:suspended, acc, &do_reduce(state, &1, fun)}
defp do_reduce({current_date, end_date, right_open, keywords}, {:cont, acc}, fun) do
if has_recursion_ended?(current_date, end_date, right_open) do
{:done, acc}
else
next_date = Timex.shift(current_date, keywords)
do_reduce({next_date, end_date, right_open, keywords}, fun.(current_date, acc), fun)
end
end
defp get_starting_date(%Timex.Interval{from: from, step: step, left_open: true}), do: Timex.shift(from, step)
defp get_starting_date(%Timex.Interval{from: from}), do: from
defp has_recursion_ended?(current_date, end_date, true), do: Timex.compare(end_date, current_date) < 1
defp has_recursion_ended?(current_date, end_date, false), do: Timex.compare(end_date, current_date) < 0
end
end
| 35.995652 | 127 | 0.607561 |
e83d498544329e01bf7fec58cad1d32f88948f13 | 49,340 | ex | Elixir | lib/elixir/lib/stream.ex | irisTa56/elixir | 0a953d75fb2cbdc6f3d33040aa60738d85512a1f | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/stream.ex | irisTa56/elixir | 0a953d75fb2cbdc6f3d33040aa60738d85512a1f | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/stream.ex | irisTa56/elixir | 0a953d75fb2cbdc6f3d33040aa60738d85512a1f | [
"Apache-2.0"
] | null | null | null | defmodule Stream do
@moduledoc """
Functions for creating and composing streams.
Streams are composable, lazy enumerables (for an introduction on
enumerables, see the `Enum` module). Any enumerable that generates
elements one by one during enumeration is called a stream. For example,
Elixir's `Range` is a stream:
iex> range = 1..5
1..5
iex> Enum.map(range, &(&1 * 2))
[2, 4, 6, 8, 10]
In the example above, as we mapped over the range, the elements being
enumerated were created one by one, during enumeration. The `Stream`
module allows us to map the range, without triggering its enumeration:
iex> range = 1..3
iex> stream = Stream.map(range, &(&1 * 2))
iex> Enum.map(stream, &(&1 + 1))
[3, 5, 7]
Notice we started with a range and then we created a stream that is
meant to multiply each element in the range by 2. At this point, no
computation was done. Only when `Enum.map/2` is called we actually
enumerate over each element in the range, multiplying it by 2 and adding 1.
We say the functions in `Stream` are *lazy* and the functions in `Enum`
are *eager*.
Due to their laziness, streams are useful when working with large
(or even infinite) collections. When chaining many operations with `Enum`,
intermediate lists are created, while `Stream` creates a recipe of
computations that are executed at a later moment. Let's see another
example:
1..3
|> Enum.map(&IO.inspect(&1))
|> Enum.map(&(&1 * 2))
|> Enum.map(&IO.inspect(&1))
1
2
3
2
4
6
#=> [2, 4, 6]
Notice that we first printed each element in the list, then multiplied each
element by 2 and finally printed each new value. In this example, the list
was enumerated three times. Let's see an example with streams:
stream = 1..3
|> Stream.map(&IO.inspect(&1))
|> Stream.map(&(&1 * 2))
|> Stream.map(&IO.inspect(&1))
Enum.to_list(stream)
1
2
2
4
3
6
#=> [2, 4, 6]
Although the end result is the same, the order in which the elements were
printed changed! With streams, we print the first element and then print
its double. In this example, the list was enumerated just once!
That's what we meant when we said earlier that streams are composable,
lazy enumerables. Notice we could call `Stream.map/2` multiple times,
effectively composing the streams and keeping them lazy. The computations
are only performed when you call a function from the `Enum` module.
Like with `Enum`, the functions in this module work in linear time. This
means that, the time it takes to perform an operation grows at the same
rate as the length of the list. This is expected on operations such as
`Stream.map/2`. After all, if we want to traverse every element on a
stream, the longer the stream, the more elements we need to traverse,
and the longer it will take.
## Creating Streams
There are many functions in Elixir's standard library that return
streams, some examples are:
* `IO.stream/2` - streams input lines, one by one
* `URI.query_decoder/1` - decodes a query string, pair by pair
This module also provides many convenience functions for creating streams,
like `Stream.cycle/1`, `Stream.unfold/2`, `Stream.resource/3` and more.
Note the functions in this module are guaranteed to return enumerables.
Since enumerables can have different shapes (structs, anonymous functions,
and so on), the functions in this module may return any of those shapes
and this may change at any time. For example, a function that today
returns an anonymous function may return a struct in future releases.
"""
@doc false
defstruct enum: nil, funs: [], accs: [], done: nil
@type acc :: any
@type element :: any
@typedoc "Zero-based index."
@type index :: non_neg_integer
@type default :: any
# Require Stream.Reducers and its callbacks
require Stream.Reducers, as: R
defmacrop skip(acc) do
{:cont, acc}
end
defmacrop next(fun, entry, acc) do
quote(do: unquote(fun).(unquote(entry), unquote(acc)))
end
defmacrop acc(head, state, tail) do
quote(do: [unquote(head), unquote(state) | unquote(tail)])
end
defmacrop next_with_acc(fun, entry, head, state, tail) do
quote do
{reason, [head | tail]} = unquote(fun).(unquote(entry), [unquote(head) | unquote(tail)])
{reason, [head, unquote(state) | tail]}
end
end
## Transformers
@doc false
@deprecated "Use Stream.chunk_every/2 instead"
def chunk(enum, n), do: chunk(enum, n, n, nil)
@doc false
@deprecated "Use Stream.chunk_every/3 instead"
def chunk(enum, n, step) do
chunk_every(enum, n, step, nil)
end
@doc false
@deprecated "Use Stream.chunk_every/4 instead"
def chunk(enum, n, step, leftover)
when is_integer(n) and n > 0 and is_integer(step) and step > 0 do
chunk_every(enum, n, step, leftover || :discard)
end
@doc """
Shortcut to `chunk_every(enum, count, count)`.
"""
@doc since: "1.5.0"
@spec chunk_every(Enumerable.t(), pos_integer) :: Enumerable.t()
def chunk_every(enum, count), do: chunk_every(enum, count, count, [])
@doc """
Streams the enumerable in chunks, containing `count` elements each,
where each new chunk starts `step` elements into the enumerable.
`step` is optional and, if not passed, defaults to `count`, i.e.
chunks do not overlap.
If the last chunk does not have `count` elements to fill the chunk,
elements are taken from `leftover` to fill in the chunk. If `leftover`
does not have enough elements to fill the chunk, then a partial chunk
is returned with less than `count` elements.
If `:discard` is given in `leftover`, the last chunk is discarded
unless it has exactly `count` elements.
## Examples
iex> Stream.chunk_every([1, 2, 3, 4, 5, 6], 2) |> Enum.to_list()
[[1, 2], [3, 4], [5, 6]]
iex> Stream.chunk_every([1, 2, 3, 4, 5, 6], 3, 2, :discard) |> Enum.to_list()
[[1, 2, 3], [3, 4, 5]]
iex> Stream.chunk_every([1, 2, 3, 4, 5, 6], 3, 2, [7]) |> Enum.to_list()
[[1, 2, 3], [3, 4, 5], [5, 6, 7]]
iex> Stream.chunk_every([1, 2, 3, 4, 5, 6], 3, 3, []) |> Enum.to_list()
[[1, 2, 3], [4, 5, 6]]
"""
@doc since: "1.5.0"
@spec chunk_every(Enumerable.t(), pos_integer, pos_integer, Enumerable.t() | :discard) ::
Enumerable.t()
def chunk_every(enum, count, step, leftover \\ [])
when is_integer(count) and count > 0 and is_integer(step) and step > 0 do
R.chunk_every(&chunk_while/4, enum, count, step, leftover)
end
@doc """
Chunks the `enum` by buffering elements for which `fun` returns the same value.
Elements are only emitted when `fun` returns a new value or the `enum` finishes.
## Examples
iex> stream = Stream.chunk_by([1, 2, 2, 3, 4, 4, 6, 7, 7], &(rem(&1, 2) == 1))
iex> Enum.to_list(stream)
[[1], [2, 2], [3], [4, 4, 6], [7, 7]]
"""
@spec chunk_by(Enumerable.t(), (element -> any)) :: Enumerable.t()
def chunk_by(enum, fun) when is_function(fun, 1) do
R.chunk_by(&chunk_while/4, enum, fun)
end
@doc """
Chunks the `enum` with fine grained control when every chunk is emitted.
`chunk_fun` receives the current element and the accumulator and
must return `{:cont, element, acc}` to emit the given chunk and
continue with accumulator or `{:cont, acc}` to not emit any chunk
and continue with the return accumulator.
`after_fun` is invoked when iteration is done and must also return
`{:cont, element, acc}` or `{:cont, acc}`.
## Examples
iex> chunk_fun = fn element, acc ->
...> if rem(element, 2) == 0 do
...> {:cont, Enum.reverse([element | acc]), []}
...> else
...> {:cont, [element | acc]}
...> end
...> end
iex> after_fun = fn
...> [] -> {:cont, []}
...> acc -> {:cont, Enum.reverse(acc), []}
...> end
iex> stream = Stream.chunk_while(1..10, [], chunk_fun, after_fun)
iex> Enum.to_list(stream)
[[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]
"""
@doc since: "1.5.0"
@spec chunk_while(
Enumerable.t(),
acc,
(element, acc -> {:cont, chunk, acc} | {:cont, acc} | {:halt, acc}),
(acc -> {:cont, chunk, acc} | {:cont, acc})
) :: Enumerable.t()
when chunk: any
def chunk_while(enum, acc, chunk_fun, after_fun)
when is_function(chunk_fun, 2) and is_function(after_fun, 1) do
lazy(
enum,
[acc | after_fun],
fn f1 -> chunk_while_fun(chunk_fun, f1) end,
&after_chunk_while/2
)
end
defp chunk_while_fun(callback, fun) do
fn entry, acc(head, [acc | after_fun], tail) ->
case callback.(entry, acc) do
{:cont, emit, acc} ->
# If we emit an element and then we have to halt,
# we need to disable the after_fun callback to
# avoid emitting even more elements.
case next(fun, emit, [head | tail]) do
{:halt, [head | tail]} -> {:halt, acc(head, [acc | &{:cont, &1}], tail)}
{command, [head | tail]} -> {command, acc(head, [acc | after_fun], tail)}
end
{:cont, acc} ->
skip(acc(head, [acc | after_fun], tail))
{:halt, acc} ->
{:halt, acc(head, [acc | after_fun], tail)}
end
end
end
defp after_chunk_while(acc(h, [acc | after_fun], t), f1) do
case after_fun.(acc) do
{:cont, emit, acc} -> next_with_acc(f1, emit, h, [acc | after_fun], t)
{:cont, acc} -> {:cont, acc(h, [acc | after_fun], t)}
end
end
@doc """
Creates a stream that only emits elements if they are different from the last emitted element.
This function only ever needs to store the last emitted element.
Elements are compared using `===/2`.
## Examples
iex> Stream.dedup([1, 2, 3, 3, 2, 1]) |> Enum.to_list()
[1, 2, 3, 2, 1]
"""
@spec dedup(Enumerable.t()) :: Enumerable.t()
def dedup(enum) do
dedup_by(enum, fn x -> x end)
end
@doc """
Creates a stream that only emits elements if the result of calling `fun` on the element is
different from the (stored) result of calling `fun` on the last emitted element.
## Examples
iex> Stream.dedup_by([{1, :x}, {2, :y}, {2, :z}, {1, :x}], fn {x, _} -> x end) |> Enum.to_list()
[{1, :x}, {2, :y}, {1, :x}]
"""
@spec dedup_by(Enumerable.t(), (element -> term)) :: Enumerable.t()
def dedup_by(enum, fun) when is_function(fun, 1) do
lazy(enum, nil, fn f1 -> R.dedup(fun, f1) end)
end
@doc """
Lazily drops the next `n` elements from the enumerable.
If a negative `n` is given, it will drop the last `n` elements from
the collection. Note that the mechanism by which this is implemented
will delay the emission of any element until `n` additional elements have
been emitted by the enum.
## Examples
iex> stream = Stream.drop(1..10, 5)
iex> Enum.to_list(stream)
[6, 7, 8, 9, 10]
iex> stream = Stream.drop(1..10, -5)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
"""
@spec drop(Enumerable.t(), integer) :: Enumerable.t()
def drop(enum, n) when is_integer(n) and n >= 0 do
lazy(enum, n, fn f1 -> R.drop(f1) end)
end
def drop(enum, n) when is_integer(n) and n < 0 do
n = abs(n)
lazy(enum, {0, [], []}, fn f1 ->
fn
entry, [h, {count, buf1, []} | t] ->
do_drop(:cont, n, entry, h, count, buf1, [], t)
entry, [h, {count, buf1, [next | buf2]} | t] ->
{reason, [h | t]} = f1.(next, [h | t])
do_drop(reason, n, entry, h, count, buf1, buf2, t)
end
end)
end
defp do_drop(reason, n, entry, h, count, buf1, buf2, t) do
buf1 = [entry | buf1]
count = count + 1
if count == n do
{reason, [h, {0, [], :lists.reverse(buf1)} | t]}
else
{reason, [h, {count, buf1, buf2} | t]}
end
end
@doc """
Creates a stream that drops every `nth` element from the enumerable.
The first element is always dropped, unless `nth` is 0.
`nth` must be a non-negative integer.
## Examples
iex> stream = Stream.drop_every(1..10, 2)
iex> Enum.to_list(stream)
[2, 4, 6, 8, 10]
iex> stream = Stream.drop_every(1..1000, 1)
iex> Enum.to_list(stream)
[]
iex> stream = Stream.drop_every([1, 2, 3, 4, 5], 0)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
"""
@spec drop_every(Enumerable.t(), non_neg_integer) :: Enumerable.t()
def drop_every(enum, nth)
def drop_every(enum, 0), do: %Stream{enum: enum}
def drop_every([], _nth), do: %Stream{enum: []}
def drop_every(enum, nth) when is_integer(nth) and nth > 0 do
lazy(enum, nth, fn f1 -> R.drop_every(nth, f1) end)
end
@doc """
Lazily drops elements of the enumerable while the given
function returns a truthy value.
## Examples
iex> stream = Stream.drop_while(1..10, &(&1 <= 5))
iex> Enum.to_list(stream)
[6, 7, 8, 9, 10]
"""
@spec drop_while(Enumerable.t(), (element -> as_boolean(term))) :: Enumerable.t()
def drop_while(enum, fun) when is_function(fun, 1) do
lazy(enum, true, fn f1 -> R.drop_while(fun, f1) end)
end
@doc """
Executes the given function for each element.
Useful for adding side effects (like printing) to a stream.
## Examples
iex> stream = Stream.each([1, 2, 3], fn x -> send(self(), x) end)
iex> Enum.to_list(stream)
iex> receive do: (x when is_integer(x) -> x)
1
iex> receive do: (x when is_integer(x) -> x)
2
iex> receive do: (x when is_integer(x) -> x)
3
"""
@spec each(Enumerable.t(), (element -> term)) :: Enumerable.t()
def each(enum, fun) when is_function(fun, 1) do
lazy(enum, fn f1 ->
fn x, acc ->
fun.(x)
f1.(x, acc)
end
end)
end
@doc """
Maps the given `fun` over `enumerable` and flattens the result.
This function returns a new stream built by appending the result of invoking `fun`
on each element of `enumerable` together.
## Examples
iex> stream = Stream.flat_map([1, 2, 3], fn x -> [x, x * 2] end)
iex> Enum.to_list(stream)
[1, 2, 2, 4, 3, 6]
iex> stream = Stream.flat_map([1, 2, 3], fn x -> [[x]] end)
iex> Enum.to_list(stream)
[[1], [2], [3]]
"""
@spec flat_map(Enumerable.t(), (element -> Enumerable.t())) :: Enumerable.t()
def flat_map(enum, mapper) when is_function(mapper, 1) do
transform(enum, nil, fn val, nil -> {mapper.(val), nil} end)
end
@doc """
Creates a stream that filters elements according to
the given function on enumeration.
## Examples
iex> stream = Stream.filter([1, 2, 3], fn x -> rem(x, 2) == 0 end)
iex> Enum.to_list(stream)
[2]
"""
@spec filter(Enumerable.t(), (element -> as_boolean(term))) :: Enumerable.t()
def filter(enum, fun) when is_function(fun, 1) do
lazy(enum, fn f1 -> R.filter(fun, f1) end)
end
@doc false
@deprecated "Use Stream.filter/2 + Stream.map/2 instead"
def filter_map(enum, filter, mapper) do
lazy(enum, fn f1 -> R.filter_map(filter, mapper, f1) end)
end
@doc """
Creates a stream that emits a value after the given period `n`
in milliseconds.
The values emitted are an increasing counter starting at `0`.
This operation will block the caller by the given interval
every time a new element is streamed.
Do not use this function to generate a sequence of numbers.
If blocking the caller process is not necessary, use
`Stream.iterate(0, & &1 + 1)` instead.
## Examples
iex> Stream.interval(10) |> Enum.take(10)
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
"""
@spec interval(non_neg_integer) :: Enumerable.t()
def interval(n) when is_integer(n) and n >= 0 do
unfold(0, fn count ->
Process.sleep(n)
{count, count + 1}
end)
end
@doc """
Injects the stream values into the given collectable as a side-effect.
This function is often used with `run/1` since any evaluation
is delayed until the stream is executed. See `run/1` for an example.
"""
@spec into(Enumerable.t(), Collectable.t(), (term -> term)) :: Enumerable.t()
def into(enum, collectable, transform \\ fn x -> x end) when is_function(transform, 1) do
&do_into(enum, collectable, transform, &1, &2)
end
defp do_into(enum, collectable, transform, acc, fun) do
{initial, into} = Collectable.into(collectable)
composed = fn x, [acc | collectable] ->
collectable = into.(collectable, {:cont, transform.(x)})
{reason, acc} = fun.(x, acc)
{reason, [acc | collectable]}
end
do_into(&Enumerable.reduce(enum, &1, composed), initial, into, acc)
end
defp do_into(reduce, collectable, into, {command, acc}) do
try do
reduce.({command, [acc | collectable]})
catch
kind, reason ->
into.(collectable, :halt)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{:suspended, [acc | collectable], continuation} ->
{:suspended, acc, &do_into(continuation, collectable, into, &1)}
{reason, [acc | collectable]} ->
into.(collectable, :done)
{reason, acc}
end
end
@doc """
Creates a stream that will apply the given function on
enumeration.
## Examples
iex> stream = Stream.map([1, 2, 3], fn x -> x * 2 end)
iex> Enum.to_list(stream)
[2, 4, 6]
"""
@spec map(Enumerable.t(), (element -> any)) :: Enumerable.t()
def map(enum, fun) when is_function(fun, 1) do
lazy(enum, fn f1 -> R.map(fun, f1) end)
end
@doc """
Creates a stream that will apply the given function on
every `nth` element from the enumerable.
The first element is always passed to the given function.
`nth` must be a non-negative integer.
## Examples
iex> stream = Stream.map_every(1..10, 2, fn x -> x * 2 end)
iex> Enum.to_list(stream)
[2, 2, 6, 4, 10, 6, 14, 8, 18, 10]
iex> stream = Stream.map_every([1, 2, 3, 4, 5], 1, fn x -> x * 2 end)
iex> Enum.to_list(stream)
[2, 4, 6, 8, 10]
iex> stream = Stream.map_every(1..5, 0, fn x -> x * 2 end)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
"""
@doc since: "1.4.0"
@spec map_every(Enumerable.t(), non_neg_integer, (element -> any)) :: Enumerable.t()
def map_every(enum, nth, fun) when is_integer(nth) and nth >= 0 and is_function(fun, 1) do
map_every_after_guards(enum, nth, fun)
end
defp map_every_after_guards(enum, 1, fun), do: map(enum, fun)
defp map_every_after_guards(enum, 0, _fun), do: %Stream{enum: enum}
defp map_every_after_guards([], _nth, _fun), do: %Stream{enum: []}
defp map_every_after_guards(enum, nth, fun) do
lazy(enum, nth, fn f1 -> R.map_every(nth, fun, f1) end)
end
@doc """
Creates a stream that will reject elements according to
the given function on enumeration.
## Examples
iex> stream = Stream.reject([1, 2, 3], fn x -> rem(x, 2) == 0 end)
iex> Enum.to_list(stream)
[1, 3]
"""
@spec reject(Enumerable.t(), (element -> as_boolean(term))) :: Enumerable.t()
def reject(enum, fun) when is_function(fun, 1) do
lazy(enum, fn f1 -> R.reject(fun, f1) end)
end
@doc """
Runs the given stream.
This is useful when a stream needs to be run, for side effects,
and there is no interest in its return result.
## Examples
Open up a file, replace all `#` by `%` and stream to another file
without loading the whole file in memory:
File.stream!("/path/to/file")
|> Stream.map(&String.replace(&1, "#", "%"))
|> Stream.into(File.stream!("/path/to/other/file"))
|> Stream.run()
No computation will be done until we call one of the `Enum` functions
or `run/1`.
"""
@spec run(Enumerable.t()) :: :ok
def run(stream) do
_ = Enumerable.reduce(stream, {:cont, nil}, fn _, _ -> {:cont, nil} end)
:ok
end
@doc """
Creates a stream that applies the given function to each
element, emits the result and uses the same result as the accumulator
for the next computation. Uses the first element in the enumerable
as the starting value.
## Examples
iex> stream = Stream.scan(1..5, &(&1 + &2))
iex> Enum.to_list(stream)
[1, 3, 6, 10, 15]
"""
@spec scan(Enumerable.t(), (element, acc -> any)) :: Enumerable.t()
def scan(enum, fun) when is_function(fun, 2) do
lazy(enum, :first, fn f1 -> R.scan2(fun, f1) end)
end
@doc """
Creates a stream that applies the given function to each
element, emits the result and uses the same result as the accumulator
for the next computation. Uses the given `acc` as the starting value.
## Examples
iex> stream = Stream.scan(1..5, 0, &(&1 + &2))
iex> Enum.to_list(stream)
[1, 3, 6, 10, 15]
"""
@spec scan(Enumerable.t(), acc, (element, acc -> any)) :: Enumerable.t()
def scan(enum, acc, fun) when is_function(fun, 2) do
lazy(enum, acc, fn f1 -> R.scan3(fun, f1) end)
end
@doc """
Lazily takes the next `count` elements from the enumerable and stops
enumeration.
If a negative `count` is given, the last `count` values will be taken.
For such, the collection is fully enumerated keeping up to `2 * count`
elements in memory. Once the end of the collection is reached,
the last `count` elements will be executed. Therefore, using
a negative `count` on an infinite collection will never return.
## Examples
iex> stream = Stream.take(1..100, 5)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
iex> stream = Stream.take(1..100, -5)
iex> Enum.to_list(stream)
[96, 97, 98, 99, 100]
iex> stream = Stream.cycle([1, 2, 3]) |> Stream.take(5)
iex> Enum.to_list(stream)
[1, 2, 3, 1, 2]
"""
@spec take(Enumerable.t(), integer) :: Enumerable.t()
def take(enum, count) when is_integer(count) do
take_after_guards(enum, count)
end
defp take_after_guards(_enum, 0), do: %Stream{enum: []}
defp take_after_guards([], _count), do: %Stream{enum: []}
defp take_after_guards(enum, count) when count > 0 do
lazy(enum, count, fn f1 -> R.take(f1) end)
end
defp take_after_guards(enum, count) when count < 0 do
&Enumerable.reduce(Enum.take(enum, count), &1, &2)
end
@doc """
Creates a stream that takes every `nth` element from the enumerable.
The first element is always included, unless `nth` is 0.
`nth` must be a non-negative integer.
## Examples
iex> stream = Stream.take_every(1..10, 2)
iex> Enum.to_list(stream)
[1, 3, 5, 7, 9]
iex> stream = Stream.take_every([1, 2, 3, 4, 5], 1)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
iex> stream = Stream.take_every(1..1000, 0)
iex> Enum.to_list(stream)
[]
"""
@spec take_every(Enumerable.t(), non_neg_integer) :: Enumerable.t()
def take_every(enum, nth) when is_integer(nth) and nth >= 0 do
take_every_after_guards(enum, nth)
end
defp take_every_after_guards(_enum, 0), do: %Stream{enum: []}
defp take_every_after_guards([], _nth), do: %Stream{enum: []}
defp take_every_after_guards(enum, nth) do
lazy(enum, nth, fn f1 -> R.take_every(nth, f1) end)
end
@doc """
Lazily takes elements of the enumerable while the given
function returns a truthy value.
## Examples
iex> stream = Stream.take_while(1..100, &(&1 <= 5))
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
"""
@spec take_while(Enumerable.t(), (element -> as_boolean(term))) :: Enumerable.t()
def take_while(enum, fun) when is_function(fun, 1) do
lazy(enum, fn f1 -> R.take_while(fun, f1) end)
end
@doc """
Creates a stream that emits a single value after `n` milliseconds.
The value emitted is `0`. This operation will block the caller by
the given time until the element is streamed.
## Examples
iex> Stream.timer(10) |> Enum.to_list()
[0]
"""
@spec timer(non_neg_integer) :: Enumerable.t()
def timer(n) when is_integer(n) and n >= 0 do
take(interval(n), 1)
end
@doc """
Transforms an existing stream.
It expects an accumulator and a function that receives each stream element
and an accumulator. It must return a tuple, where the first element is a new
stream (often a list) or the atom `:halt`, and the second element is the
accumulator to be used by the next element, if any, in both cases.
Note: this function is equivalent to `Enum.flat_map_reduce/3`, except this
function does not return the accumulator once the stream is processed.
## Examples
`Stream.transform/3` is useful as it can be used as the basis to implement
many of the functions defined in this module. For example, we can implement
`Stream.take(enum, n)` as follows:
iex> enum = 1001..9999
iex> n = 3
iex> stream = Stream.transform(enum, 0, fn i, acc ->
...> if acc < n, do: {[i], acc + 1}, else: {:halt, acc}
...> end)
iex> Enum.to_list(stream)
[1001, 1002, 1003]
"""
@spec transform(Enumerable.t(), acc, fun) :: Enumerable.t()
when fun: (element, acc -> {Enumerable.t(), acc} | {:halt, acc}),
acc: any
def transform(enum, acc, reducer) when is_function(reducer, 2) do
&do_transform(enum, fn -> acc end, reducer, &1, &2, nil)
end
@doc """
Transforms an existing stream with function-based start and finish.
The accumulator is only calculated when transformation starts. It also
allows an after function to be given which is invoked when the stream
halts or completes.
This function can be seen as a combination of `Stream.resource/3` with
`Stream.transform/3`.
"""
@spec transform(Enumerable.t(), (() -> acc), fun, (acc -> term)) :: Enumerable.t()
when fun: (element, acc -> {Enumerable.t(), acc} | {:halt, acc}),
acc: any
def transform(enum, start_fun, reducer, after_fun)
when is_function(start_fun, 0) and is_function(reducer, 2) and is_function(after_fun, 1) do
&do_transform(enum, start_fun, reducer, &1, &2, after_fun)
end
defp do_transform(enumerables, user_acc, user, inner_acc, fun, after_fun) do
inner = &do_transform_each(&1, &2, fun)
step = &do_transform_step(&1, &2)
next = &Enumerable.reduce(enumerables, &1, step)
funs = {user, fun, inner, after_fun}
do_transform(user_acc.(), :cont, next, inner_acc, funs)
end
defp do_transform(user_acc, _next_op, next, {:halt, inner_acc}, funs) do
{_, _, _, after_fun} = funs
next.({:halt, []})
do_after(after_fun, user_acc)
{:halted, inner_acc}
end
defp do_transform(user_acc, next_op, next, {:suspend, inner_acc}, funs) do
{:suspended, inner_acc, &do_transform(user_acc, next_op, next, &1, funs)}
end
defp do_transform(user_acc, :halt, _next, {_, inner_acc}, funs) do
{_, _, _, after_fun} = funs
do_after(after_fun, user_acc)
{:halted, inner_acc}
end
defp do_transform(user_acc, :cont, next, inner_acc, funs) do
{_, _, _, after_fun} = funs
try do
next.({:cont, []})
catch
kind, reason ->
do_after(after_fun, user_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{:suspended, vals, next} ->
do_transform_user(:lists.reverse(vals), user_acc, :cont, next, inner_acc, funs)
{_, vals} ->
do_transform_user(:lists.reverse(vals), user_acc, :halt, next, inner_acc, funs)
end
end
defp do_transform_user([], user_acc, next_op, next, inner_acc, funs) do
do_transform(user_acc, next_op, next, inner_acc, funs)
end
defp do_transform_user([val | vals], user_acc, next_op, next, inner_acc, funs) do
{user, fun, inner, after_fun} = funs
try do
user.(val, user_acc)
catch
kind, reason ->
next.({:halt, []})
do_after(after_fun, user_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{[], user_acc} ->
do_transform_user(vals, user_acc, next_op, next, inner_acc, funs)
{list, user_acc} when is_list(list) ->
reduce = &Enumerable.List.reduce(list, &1, fun)
do_list_transform(vals, user_acc, next_op, next, inner_acc, reduce, funs)
{:halt, user_acc} ->
next.({:halt, []})
do_after(after_fun, user_acc)
{:halted, elem(inner_acc, 1)}
{other, user_acc} ->
reduce = &Enumerable.reduce(other, &1, inner)
do_enum_transform(vals, user_acc, next_op, next, inner_acc, reduce, funs)
end
end
defp do_list_transform(vals, user_acc, next_op, next, inner_acc, reduce, funs) do
{_, _, _, after_fun} = funs
try do
reduce.(inner_acc)
catch
kind, reason ->
next.({:halt, []})
do_after(after_fun, user_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{:done, acc} ->
do_transform_user(vals, user_acc, next_op, next, {:cont, acc}, funs)
{:halted, acc} ->
next.({:halt, []})
do_after(after_fun, user_acc)
{:halted, acc}
{:suspended, acc, continuation} ->
resume = &do_list_transform(vals, user_acc, next_op, next, &1, continuation, funs)
{:suspended, acc, resume}
end
end
defp do_enum_transform(vals, user_acc, next_op, next, {op, inner_acc}, reduce, funs) do
{_, _, _, after_fun} = funs
try do
reduce.({op, [:outer | inner_acc]})
catch
kind, reason ->
next.({:halt, []})
do_after(after_fun, user_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
# Only take into account outer halts when the op is not halt itself.
# Otherwise, we were the ones wishing to halt, so we should just stop.
{:halted, [:outer | acc]} when op != :halt ->
do_transform_user(vals, user_acc, next_op, next, {:cont, acc}, funs)
{:halted, [_ | acc]} ->
next.({:halt, []})
do_after(after_fun, user_acc)
{:halted, acc}
{:done, [_ | acc]} ->
do_transform_user(vals, user_acc, next_op, next, {:cont, acc}, funs)
{:suspended, [_ | acc], continuation} ->
resume = &do_enum_transform(vals, user_acc, next_op, next, &1, continuation, funs)
{:suspended, acc, resume}
end
end
defp do_after(nil, _user_acc), do: :ok
defp do_after(fun, user_acc), do: fun.(user_acc)
defp do_transform_each(x, [:outer | acc], f) do
case f.(x, acc) do
{:halt, res} -> {:halt, [:inner | res]}
{op, res} -> {op, [:outer | res]}
end
end
defp do_transform_step(x, acc) do
{:suspend, [x | acc]}
end
@doc """
Creates a stream that only emits elements if they are unique.
Keep in mind that, in order to know if an element is unique
or not, this function needs to store all unique values emitted
by the stream. Therefore, if the stream is infinite, the number
of elements stored will grow infinitely, never being garbage-collected.
## Examples
iex> Stream.uniq([1, 2, 3, 3, 2, 1]) |> Enum.to_list()
[1, 2, 3]
"""
@spec uniq(Enumerable.t()) :: Enumerable.t()
def uniq(enum) do
uniq_by(enum, fn x -> x end)
end
@doc false
@deprecated "Use Stream.uniq_by/2 instead"
def uniq(enum, fun) do
uniq_by(enum, fun)
end
@doc """
Creates a stream that only emits elements if they are unique, by removing the
elements for which function `fun` returned duplicate elements.
The function `fun` maps every element to a term which is used to
determine if two elements are duplicates.
Keep in mind that, in order to know if an element is unique
or not, this function needs to store all unique values emitted
by the stream. Therefore, if the stream is infinite, the number
of elements stored will grow infinitely, never being garbage-collected.
## Example
iex> Stream.uniq_by([{1, :x}, {2, :y}, {1, :z}], fn {x, _} -> x end) |> Enum.to_list()
[{1, :x}, {2, :y}]
iex> Stream.uniq_by([a: {:tea, 2}, b: {:tea, 2}, c: {:coffee, 1}], fn {_, y} -> y end) |> Enum.to_list()
[a: {:tea, 2}, c: {:coffee, 1}]
"""
@spec uniq_by(Enumerable.t(), (element -> term)) :: Enumerable.t()
def uniq_by(enum, fun) when is_function(fun, 1) do
lazy(enum, %{}, fn f1 -> R.uniq_by(fun, f1) end)
end
@doc """
Creates a stream where each element in the enumerable will
be wrapped in a tuple alongside its index.
If an `offset` is given, we will index from the given offset instead of from zero.
## Examples
iex> stream = Stream.with_index([1, 2, 3])
iex> Enum.to_list(stream)
[{1, 0}, {2, 1}, {3, 2}]
iex> stream = Stream.with_index([1, 2, 3], 3)
iex> Enum.to_list(stream)
[{1, 3}, {2, 4}, {3, 5}]
"""
@spec with_index(Enumerable.t(), integer) :: Enumerable.t()
def with_index(enum, offset \\ 0) when is_integer(offset) do
lazy(enum, offset, fn f1 -> R.with_index(f1) end)
end
## Combiners
@doc """
Creates a stream that enumerates each enumerable in an enumerable.
## Examples
iex> stream = Stream.concat([1..3, 4..6, 7..9])
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5, 6, 7, 8, 9]
"""
@spec concat(Enumerable.t()) :: Enumerable.t()
def concat(enumerables) do
flat_map(enumerables, & &1)
end
@doc """
Creates a stream that enumerates the first argument, followed by the second.
## Examples
iex> stream = Stream.concat(1..3, 4..6)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5, 6]
iex> stream1 = Stream.cycle([1, 2, 3])
iex> stream2 = Stream.cycle([4, 5, 6])
iex> stream = Stream.concat(stream1, stream2)
iex> Enum.take(stream, 6)
[1, 2, 3, 1, 2, 3]
"""
@spec concat(Enumerable.t(), Enumerable.t()) :: Enumerable.t()
def concat(first, second) do
flat_map([first, second], & &1)
end
@doc """
Zips two collections together, lazily.
The zipping finishes as soon as any enumerable completes.
## Examples
iex> concat = Stream.concat(1..3, 4..6)
iex> cycle = Stream.cycle([:a, :b, :c])
iex> Stream.zip(concat, cycle) |> Enum.to_list()
[{1, :a}, {2, :b}, {3, :c}, {4, :a}, {5, :b}, {6, :c}]
"""
@spec zip(Enumerable.t(), Enumerable.t()) :: Enumerable.t()
def zip(left, right), do: zip([left, right])
@doc """
Zips corresponding elements from a finite collection of enumerables
into one stream of tuples.
The zipping finishes as soon as any enumerable in the given collection completes.
## Examples
iex> concat = Stream.concat(1..3, 4..6)
iex> cycle = Stream.cycle(["foo", "bar", "baz"])
iex> Stream.zip([concat, [:a, :b, :c], cycle]) |> Enum.to_list()
[{1, :a, "foo"}, {2, :b, "bar"}, {3, :c, "baz"}]
"""
@doc since: "1.4.0"
@spec zip(enumerables) :: Enumerable.t() when enumerables: [Enumerable.t()] | Enumerable.t()
def zip(enumerables) do
&prepare_zip(enumerables, &1, &2)
end
defp prepare_zip(enumerables, acc, fun) do
step = &do_zip_step(&1, &2)
enum_funs =
Enum.map(enumerables, fn enum ->
{&Enumerable.reduce(enum, &1, step), [], :cont}
end)
do_zip(enum_funs, acc, fun)
end
# This implementation of do_zip/3 works for any number of
# streams to zip, even if right now zip/2 only zips two streams.
defp do_zip(zips, {:halt, acc}, _fun) do
do_zip_close(zips)
{:halted, acc}
end
defp do_zip(zips, {:suspend, acc}, fun) do
{:suspended, acc, &do_zip(zips, &1, fun)}
end
defp do_zip([], {:cont, acc}, _callback) do
{:done, acc}
end
defp do_zip(zips, {:cont, acc}, callback) do
try do
do_zip_next_tuple(zips, acc, callback, [], [])
catch
kind, reason ->
do_zip_close(zips)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{:next, buffer, acc} ->
do_zip(buffer, acc, callback)
{:done, _acc} = other ->
other
end
end
# do_zip_next_tuple/5 computes the next tuple formed by
# the next element of each zipped stream.
defp do_zip_next_tuple([{_, [], :halt} | zips], acc, _callback, _yielded_elems, buffer) do
do_zip_close(:lists.reverse(buffer, zips))
{:done, acc}
end
defp do_zip_next_tuple([{fun, [], :cont} | zips], acc, callback, yielded_elems, buffer) do
case fun.({:cont, []}) do
{:suspended, [elem | next_acc], fun} ->
next_buffer = [{fun, next_acc, :cont} | buffer]
do_zip_next_tuple(zips, acc, callback, [elem | yielded_elems], next_buffer)
{_, [elem | next_acc]} ->
next_buffer = [{fun, next_acc, :halt} | buffer]
do_zip_next_tuple(zips, acc, callback, [elem | yielded_elems], next_buffer)
{_, []} ->
# The current zipped stream terminated, so we close all the streams
# and return {:halted, acc} (which is returned as is by do_zip/3).
do_zip_close(:lists.reverse(buffer, zips))
{:done, acc}
end
end
defp do_zip_next_tuple([{fun, zip_acc, zip_op} | zips], acc, callback, yielded_elems, buffer) do
[elem | rest] = zip_acc
next_buffer = [{fun, rest, zip_op} | buffer]
do_zip_next_tuple(zips, acc, callback, [elem | yielded_elems], next_buffer)
end
defp do_zip_next_tuple([] = _zips, acc, callback, yielded_elems, buffer) do
# "yielded_elems" is a reversed list of results for the current iteration of
# zipping: it needs to be reversed and converted to a tuple to have the next
# tuple in the list resulting from zipping.
zipped = List.to_tuple(:lists.reverse(yielded_elems))
{:next, :lists.reverse(buffer), callback.(zipped, acc)}
end
defp do_zip_close(zips) do
:lists.foreach(fn {fun, _, _} -> fun.({:halt, []}) end, zips)
end
defp do_zip_step(x, acc) do
{:suspend, :lists.reverse([x | acc])}
end
## Sources
@doc """
Creates a stream that cycles through the given enumerable,
infinitely.
## Examples
iex> stream = Stream.cycle([1, 2, 3])
iex> Enum.take(stream, 5)
[1, 2, 3, 1, 2]
"""
@spec cycle(Enumerable.t()) :: Enumerable.t()
def cycle(enumerable)
def cycle([]) do
raise ArgumentError, "cannot cycle over empty enumerable"
end
def cycle(enumerable) when is_list(enumerable) do
unfold({enumerable, enumerable}, fn
{source, [h | t]} -> {h, {source, t}}
{source = [h | t], []} -> {h, {source, t}}
end)
end
def cycle(enumerable) do
fn acc, fun ->
inner = &do_cycle_each(&1, &2, fun)
outer = &Enumerable.reduce(enumerable, &1, inner)
reduce = check_cycle_first_element(outer)
do_cycle(reduce, outer, acc)
end
end
defp do_cycle(_reduce, _cycle, {:halt, acc}) do
{:halted, acc}
end
defp do_cycle(reduce, cycle, {:suspend, acc}) do
{:suspended, acc, &do_cycle(reduce, cycle, &1)}
end
defp do_cycle(reduce, cycle, acc) do
try do
reduce.(acc)
catch
{:stream_cycle, acc} ->
{:halted, acc}
else
{state, acc} when state in [:done, :halted] ->
do_cycle(cycle, cycle, {:cont, acc})
{:suspended, acc, continuation} ->
{:suspended, acc, &do_cycle(continuation, cycle, &1)}
end
end
defp do_cycle_each(x, acc, f) do
case f.(x, acc) do
{:halt, h} -> throw({:stream_cycle, h})
{_, _} = o -> o
end
end
defp check_cycle_first_element(reduce) do
fn acc ->
case reduce.(acc) do
{state, []} when state in [:done, :halted] ->
raise ArgumentError, "cannot cycle over empty enumerable"
other ->
other
end
end
end
@doc """
Emits a sequence of values, starting with `start_value`. Successive
values are generated by calling `next_fun` on the previous value.
## Examples
iex> Stream.iterate(0, &(&1 + 1)) |> Enum.take(5)
[0, 1, 2, 3, 4]
"""
@spec iterate(element, (element -> element)) :: Enumerable.t()
def iterate(start_value, next_fun) when is_function(next_fun, 1) do
unfold({:ok, start_value}, fn
{:ok, value} ->
{value, {:next, value}}
{:next, value} ->
next = next_fun.(value)
{next, {:next, next}}
end)
end
@doc """
Returns a stream generated by calling `generator_fun` repeatedly.
## Examples
# Although not necessary, let's seed the random algorithm
iex> :rand.seed(:exsplus, {1, 2, 3})
iex> Stream.repeatedly(&:rand.uniform/0) |> Enum.take(3)
[0.40502929729990744, 0.45336720247823126, 0.04094511692041057]
"""
@spec repeatedly((() -> element)) :: Enumerable.t()
def repeatedly(generator_fun) when is_function(generator_fun, 0) do
&do_repeatedly(generator_fun, &1, &2)
end
defp do_repeatedly(generator_fun, {:suspend, acc}, fun) do
{:suspended, acc, &do_repeatedly(generator_fun, &1, fun)}
end
defp do_repeatedly(_generator_fun, {:halt, acc}, _fun) do
{:halted, acc}
end
defp do_repeatedly(generator_fun, {:cont, acc}, fun) do
do_repeatedly(generator_fun, fun.(generator_fun.(), acc), fun)
end
@doc """
Emits a sequence of values for the given resource.
Similar to `transform/3` but the initial accumulated value is
computed lazily via `start_fun` and executes an `after_fun` at
the end of enumeration (both in cases of success and failure).
Successive values are generated by calling `next_fun` with the
previous accumulator (the initial value being the result returned
by `start_fun`) and it must return a tuple containing a list
of elements to be emitted and the next accumulator. The enumeration
finishes if it returns `{:halt, acc}`.
As the name says, this function is useful to stream values from
resources.
## Examples
Stream.resource(
fn -> File.open!("sample") end,
fn file ->
case IO.read(file, :line) do
data when is_binary(data) -> {[data], file}
_ -> {:halt, file}
end
end,
fn file -> File.close(file) end
)
iex> Stream.resource(
...> fn ->
...> {:ok, pid} = StringIO.open("string")
...> pid
...> end,
...> fn pid ->
...> case IO.getn(pid, "", 1) do
...> :eof -> {:halt, pid}
...> char -> {[char], pid}
...> end
...> end,
...> fn pid -> StringIO.close(pid) end
...> ) |> Enum.to_list()
["s", "t", "r", "i", "n", "g"]
"""
@spec resource((() -> acc), (acc -> {[element], acc} | {:halt, acc}), (acc -> term)) ::
Enumerable.t()
def resource(start_fun, next_fun, after_fun)
when is_function(start_fun, 0) and is_function(next_fun, 1) and is_function(after_fun, 1) do
&do_resource(start_fun.(), next_fun, &1, &2, after_fun)
end
defp do_resource(next_acc, next_fun, {:suspend, acc}, fun, after_fun) do
{:suspended, acc, &do_resource(next_acc, next_fun, &1, fun, after_fun)}
end
defp do_resource(next_acc, _next_fun, {:halt, acc}, _fun, after_fun) do
after_fun.(next_acc)
{:halted, acc}
end
defp do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun) do
try do
# Optimize the most common cases
case next_fun.(next_acc) do
{[], next_acc} -> {:opt, {:cont, acc}, next_acc}
{[v], next_acc} -> {:opt, fun.(v, acc), next_acc}
{_, _} = other -> other
end
catch
kind, reason ->
after_fun.(next_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{:opt, acc, next_acc} ->
do_resource(next_acc, next_fun, acc, fun, after_fun)
{:halt, next_acc} ->
do_resource(next_acc, next_fun, {:halt, acc}, fun, after_fun)
{list, next_acc} when is_list(list) ->
reduce = &Enumerable.List.reduce(list, &1, fun)
do_list_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun, reduce)
{enum, next_acc} ->
inner = &do_resource_each(&1, &2, fun)
reduce = &Enumerable.reduce(enum, &1, inner)
do_enum_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun, reduce)
end
end
defp do_list_resource(next_acc, next_fun, acc, fun, after_fun, reduce) do
try do
reduce.(acc)
catch
kind, reason ->
after_fun.(next_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{:done, acc} ->
do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun)
{:halted, acc} ->
do_resource(next_acc, next_fun, {:halt, acc}, fun, after_fun)
{:suspended, acc, c} ->
{:suspended, acc, &do_list_resource(next_acc, next_fun, &1, fun, after_fun, c)}
end
end
defp do_enum_resource(next_acc, next_fun, {op, acc}, fun, after_fun, reduce) do
try do
reduce.({op, [:outer | acc]})
catch
kind, reason ->
after_fun.(next_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{:halted, [:outer | acc]} ->
do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun)
{:halted, [:inner | acc]} ->
do_resource(next_acc, next_fun, {:halt, acc}, fun, after_fun)
{:done, [_ | acc]} ->
do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun)
{:suspended, [_ | acc], c} ->
{:suspended, acc, &do_enum_resource(next_acc, next_fun, &1, fun, after_fun, c)}
end
end
defp do_resource_each(x, [:outer | acc], f) do
case f.(x, acc) do
{:halt, res} -> {:halt, [:inner | res]}
{op, res} -> {op, [:outer | res]}
end
end
@doc """
Emits a sequence of values for the given accumulator.
Successive values are generated by calling `next_fun` with the previous
accumulator and it must return a tuple with the current value and next
accumulator. The enumeration finishes if it returns `nil`.
## Examples
iex> Stream.unfold(5, fn
...> 0 -> nil
...> n -> {n, n - 1}
...> end) |> Enum.to_list()
[5, 4, 3, 2, 1]
"""
@spec unfold(acc, (acc -> {element, acc} | nil)) :: Enumerable.t()
def unfold(next_acc, next_fun) when is_function(next_fun, 1) do
&do_unfold(next_acc, next_fun, &1, &2)
end
defp do_unfold(next_acc, next_fun, {:suspend, acc}, fun) do
{:suspended, acc, &do_unfold(next_acc, next_fun, &1, fun)}
end
defp do_unfold(_next_acc, _next_fun, {:halt, acc}, _fun) do
{:halted, acc}
end
defp do_unfold(next_acc, next_fun, {:cont, acc}, fun) do
case next_fun.(next_acc) do
nil -> {:done, acc}
{v, next_acc} -> do_unfold(next_acc, next_fun, fun.(v, acc), fun)
end
end
@doc """
Lazily intersperses `intersperse_element` between each element of the enumeration.
## Examples
iex> Stream.intersperse([1, 2, 3], 0) |> Enum.to_list()
[1, 0, 2, 0, 3]
iex> Stream.intersperse([1], 0) |> Enum.to_list()
[1]
iex> Stream.intersperse([], 0) |> Enum.to_list()
[]
"""
@doc since: "1.6.0"
@spec intersperse(Enumerable.t(), any) :: Enumerable.t()
def intersperse(enumerable, intersperse_element) do
Stream.transform(enumerable, false, fn
element, true -> {[intersperse_element, element], true}
element, false -> {[element], true}
end)
end
## Helpers
@compile {:inline, lazy: 2, lazy: 3, lazy: 4}
defp lazy(%Stream{done: nil, funs: funs} = lazy, fun), do: %{lazy | funs: [fun | funs]}
defp lazy(enum, fun), do: %Stream{enum: enum, funs: [fun]}
defp lazy(%Stream{done: nil, funs: funs, accs: accs} = lazy, acc, fun),
do: %{lazy | funs: [fun | funs], accs: [acc | accs]}
defp lazy(enum, acc, fun), do: %Stream{enum: enum, funs: [fun], accs: [acc]}
defp lazy(%Stream{done: nil, funs: funs, accs: accs} = lazy, acc, fun, done),
do: %{lazy | funs: [fun | funs], accs: [acc | accs], done: done}
defp lazy(enum, acc, fun, done), do: %Stream{enum: enum, funs: [fun], accs: [acc], done: done}
end
defimpl Enumerable, for: Stream do
@compile :inline_list_funs
def count(_lazy), do: {:error, __MODULE__}
def member?(_lazy, _value), do: {:error, __MODULE__}
def slice(_lazy), do: {:error, __MODULE__}
def reduce(lazy, acc, fun) do
do_reduce(lazy, acc, fn x, [acc] ->
{reason, acc} = fun.(x, acc)
{reason, [acc]}
end)
end
defp do_reduce(%Stream{enum: enum, funs: funs, accs: accs, done: done}, acc, fun) do
composed = :lists.foldl(fn entry_fun, acc -> entry_fun.(acc) end, fun, funs)
reduce = &Enumerable.reduce(enum, &1, composed)
do_each(reduce, done && {done, fun}, :lists.reverse(accs), acc)
end
defp do_each(reduce, done, accs, {command, acc}) do
case reduce.({command, [acc | accs]}) do
{:suspended, [acc | accs], continuation} ->
{:suspended, acc, &do_each(continuation, done, accs, &1)}
{:halted, accs} ->
do_done({:halted, accs}, done)
{:done, accs} ->
do_done({:done, accs}, done)
end
end
defp do_done({reason, [acc | _]}, nil), do: {reason, acc}
defp do_done({reason, [acc | t]}, {done, fun}) do
[h | _] = Enum.reverse(t)
case done.([acc, h], fun) do
{:cont, [acc | _]} -> {reason, acc}
{:halt, [acc | _]} -> {:halted, acc}
{:suspend, [acc | _]} -> {:suspended, acc, &{:done, elem(&1, 1)}}
end
end
end
defimpl Inspect, for: Stream do
import Inspect.Algebra
def inspect(%{enum: enum, funs: funs}, opts) do
inner = [enum: enum, funs: Enum.reverse(funs)]
concat(["#Stream<", to_doc(inner, opts), ">"])
end
end
| 30.269939 | 110 | 0.615484 |
e83d81617941ca839e09ff15f62ff1f18433d87a | 675 | exs | Elixir | 2018/Day02/part1.exs | CarpeNoctem/AdventOfCode | 8dcfb735514fa474c8e90865664b35408ccead25 | [
"MIT"
] | null | null | null | 2018/Day02/part1.exs | CarpeNoctem/AdventOfCode | 8dcfb735514fa474c8e90865664b35408ccead25 | [
"MIT"
] | null | null | null | 2018/Day02/part1.exs | CarpeNoctem/AdventOfCode | 8dcfb735514fa474c8e90865664b35408ccead25 | [
"MIT"
] | null | null | null | defmodule Day2 do
defp get_checksum([], twos, threes) do
twos * threes
end
defp get_checksum([id | ids], twos, threes) do
stats = parse_id(String.split(id, ""), %{}) |> Map.values
twos = 2 in stats && twos + 1 || twos
threes = 3 in stats && threes + 1 || threes
get_checksum(ids, twos, threes)
end
def get_checksum(ids) do
get_checksum(ids, 0, 0)
end
defp parse_id([], stats) do
stats
end
defp parse_id([char | chars], stats) do
parse_id(chars, Map.update(stats, char, 1, &(&1 + 1)))
end
end
File.read!("input.txt") |> String.split |> Day2.get_checksum |> IO.puts
| 23.275862 | 71 | 0.568889 |
e83d901da180d2bf473a8cd083ade4e27d9e33b2 | 6,174 | exs | Elixir | test/c_backend_tests/c_client_tests/connection_test.exs | juljimm/opex62541 | c44c157213a8a3fb07283f6e697e6dd018693315 | [
"MIT"
] | null | null | null | test/c_backend_tests/c_client_tests/connection_test.exs | juljimm/opex62541 | c44c157213a8a3fb07283f6e697e6dd018693315 | [
"MIT"
] | null | null | null | test/c_backend_tests/c_client_tests/connection_test.exs | juljimm/opex62541 | c44c157213a8a3fb07283f6e697e6dd018693315 | [
"MIT"
] | null | null | null | defmodule CClientConnectionTest do
use ExUnit.Case
doctest Opex62541
setup do
executable = :code.priv_dir(:opex62541) ++ '/opc_ua_client'
port =
Port.open({:spawn_executable, executable}, [
{:args, []},
{:packet, 2},
:use_stdio,
:binary,
:exit_status
])
config = %{
requestedSessionTimeout: 12000,
secureChannelLifeTime: 6000,
timeout: 500
}
msg = {:set_client_config, config}
send(port, {self(), {:command, :erlang.term_to_binary(msg)}})
status =
receive do
{_, {:data, <<?r, response::binary>>}} ->
:erlang.binary_to_term(response)
x ->
IO.inspect(x)
:error
after
3000 ->
# Not sure how this can be recovered
exit(:port_timed_out)
end
%{port: port, status: status}
end
test "Connect client by url", state do
case state.status do
:ok ->
url = "opc.tcp://localhost:4840"
msg = {:connect_client_by_url, url}
send(state.port, {self(), {:command, :erlang.term_to_binary(msg)}})
c_response =
receive do
{_, {:data, <<?r, response::binary>>}} ->
:erlang.binary_to_term(response)
x ->
IO.inspect(x)
:error
after
3000 ->
# Not sure how this can be recovered
exit(:port_timed_out)
end
assert c_response == :ok
msg = {:get_client_state, nil}
send(state.port, {self(), {:command, :erlang.term_to_binary(msg)}})
c_response =
receive do
{_, {:data, <<?r, response::binary>>}} ->
:erlang.binary_to_term(response)
x ->
IO.inspect(x)
:error
after
1000 ->
# Not sure how this can be recovered
exit(:port_timed_out)
end
assert c_response == {:ok, 'Session'}
_ ->
raise("Configuration fail")
end
end
test "Connect client with no session", state do
case state.status do
:ok ->
url = "opc.tcp://localhost:4840"
n_chars = String.length(url)
msg = {:connect_client_no_session, {n_chars, url}}
send(state.port, {self(), {:command, :erlang.term_to_binary(msg)}})
c_response =
receive do
{_, {:data, <<?r, response::binary>>}} ->
:erlang.binary_to_term(response)
x ->
IO.inspect(x)
:error
after
3000 ->
# Not sure how this can be recovered
exit(:port_timed_out)
end
assert c_response == :ok
msg = {:get_client_state, nil}
send(state.port, {self(), {:command, :erlang.term_to_binary(msg)}})
c_response =
receive do
{_, {:data, <<?r, response::binary>>}} ->
:erlang.binary_to_term(response)
x ->
IO.inspect(x)
:error
after
1000 ->
# Not sure how this can be recovered
exit(:port_timed_out)
end
assert c_response == {:ok, 'Secure Channel'}
_ ->
raise("Configuration fail")
end
end
test "Connect client with username", state do
case state.status do
:ok ->
url = "opc.tcp://localhost:4840"
url_n_chars = String.length(url)
username = "opc.tcp://localhost:4840"
username_n_chars = String.length(url)
password = "Secret"
password_n_chars = String.length(password)
msg = {:connect_client_by_username, {url_n_chars, url, username_n_chars, username, password_n_chars, password}}
send(state.port, {self(), {:command, :erlang.term_to_binary(msg)}})
c_response =
receive do
{_, {:data, <<?r, response::binary>>}} ->
:erlang.binary_to_term(response)
x ->
IO.inspect(x)
:error
after
3000 ->
# Not sure how this can be recovered
exit(:port_timed_out)
end
# current server doesn't supports the user.
assert c_response == {:error, 2149515264}
_ ->
raise("Configuration fail")
end
end
test "Disconnect client", state do
case state.status do
:ok ->
# Connect
url = "opc.tcp://localhost:4840"
n_chars = String.length(url)
msg = {:connect_client_by_url, {n_chars, url}}
send(state.port, {self(), {:command, :erlang.term_to_binary(msg)}})
c_response =
receive do
{_, {:data, <<?r, response::binary>>}} ->
:erlang.binary_to_term(response)
x ->
IO.inspect(x)
:error
after
3000 ->
# Not sure how this can be recovered
exit(:port_timed_out)
end
assert c_response == :ok
# Disconnect
msg = {:disconnect_client, nil}
send(state.port, {self(), {:command, :erlang.term_to_binary(msg)}})
c_response =
receive do
{_, {:data, <<?r, response::binary>>}} ->
:erlang.binary_to_term(response)
x ->
IO.inspect(x)
:error
after
3000 ->
# Not sure how this can be recovered
exit(:port_timed_out)
end
assert c_response == :ok
msg = {:get_client_state, nil}
send(state.port, {self(), {:command, :erlang.term_to_binary(msg)}})
c_response =
receive do
{_, {:data, <<?r, response::binary>>}} ->
:erlang.binary_to_term(response)
x ->
IO.inspect(x)
:error
after
1000 ->
# Not sure how this can be recovered
exit(:port_timed_out)
end
assert c_response == {:ok, 'Disconnected'}
_ ->
raise("Configuration fail")
end
end
end
| 25.618257 | 119 | 0.50162 |
e83d96d16613c09149b849c6250e7e9f94281ca1 | 2,825 | ex | Elixir | lib/ex_algo/list/linked_list.ex | code-shoily/ex_algo | 7837c222fd2844a151b6b92038f94ea088bec0a2 | [
"MIT"
] | 21 | 2021-11-21T08:07:38.000Z | 2022-03-13T06:19:35.000Z | lib/ex_algo/list/linked_list.ex | code-shoily/ex_algo | 7837c222fd2844a151b6b92038f94ea088bec0a2 | [
"MIT"
] | 3 | 2021-11-26T22:54:09.000Z | 2022-03-06T21:16:12.000Z | lib/ex_algo/list/linked_list.ex | code-shoily/ex_algo | 7837c222fd2844a151b6b92038f94ea088bec0a2 | [
"MIT"
] | null | null | null | defmodule ExAlgo.List.LinkedList do
@moduledoc """
Implementation of a singly linked list.
"""
@type neg_index_error :: {:error, :negative_index}
@type empty_error :: {:error, :empty_list}
@type value_type :: any()
@type t :: %__MODULE__{container: [value_type()]}
defstruct container: []
@doc """
Creates an empty linked list.
## Example
iex> LinkedList.new
%LinkedList{container: []}
"""
@spec new :: t()
def new, do: %__MODULE__{container: []}
@doc """
Creates an empty linked list from a list
## Example
iex> LinkedList.from 1..3
%LinkedList{container: [1, 2, 3]}
"""
@spec from(Enumerable.t()) :: t()
def from(enumerable), do: %__MODULE__{container: Enum.to_list(enumerable)}
@doc """
Inserts a new element on the head of the list.
## Example
iex> list = LinkedList.from 1..3
iex> list |> LinkedList.insert(10)
%LinkedList{container: [10, 1, 2, 3]}
"""
@spec insert(t(), value_type()) :: t()
def insert(%__MODULE__{container: container}, element),
do: %__MODULE__{container: [element | container]}
@doc """
Removes the head.
## Example
iex> list = LinkedList.from 1..3
iex> list |> LinkedList.remove()
{1, %LinkedList{container: [2, 3]}}
iex> LinkedList.new() |> LinkedList.remove()
{:error, :empty_list}
"""
@spec remove(t()) :: {value_type(), t()} | empty_error()
def remove(%__MODULE__{container: []}), do: {:error, :empty_list}
def remove(%__MODULE__{container: [head | rest]}), do: {head, %__MODULE__{container: rest}}
@doc """
Returns the head of the linked list
## Example
iex> LinkedList.from(1..10) |> LinkedList.head()
1
iex> LinkedList.new |> LinkedList.head()
{:error, :empty_list}
"""
@spec head(t()) :: empty_error()
def head(%__MODULE__{container: [head | _]}), do: head
def head(_), do: {:error, :empty_list}
@doc """
Returns the next items of the linked list
## Example
iex> LinkedList.from(1..3) |> LinkedList.next()
%LinkedList{container: [2, 3]}
iex> LinkedList.new |> LinkedList.next()
{:error, :empty_list}
"""
@spec next(t()) :: t() | empty_error()
def next(%__MODULE__{container: [_ | next]}), do: %__MODULE__{container: next}
def next(_), do: {:error, :empty_list}
@doc """
Return the element at index. Index is 0 based and must be positive. Errors on empty list.
## Example
iex> LinkedList.from(0..10) |> LinkedList.at(3)
3
"""
@spec at(t(), value_type()) :: value_type() | empty_error() | neg_index_error()
def at(%__MODULE__{container: []}, _), do: {:error, :empty_list}
def at(_, index) when index < 0, do: {:error, :negative_index}
def at(list, 0), do: list |> head()
def at(list, index), do: list |> next() |> at(index - 1)
end
| 23.739496 | 93 | 0.613097 |
e83dc2f95a52d22e4ca9087f12fc082497e9edb7 | 75 | exs | Elixir | test/views/layout_view_test.exs | chasm/elm_pack | 5bbdc4df335376170b6d161bfc65f9fc7eca2648 | [
"MIT"
] | 1 | 2016-08-02T00:36:46.000Z | 2016-08-02T00:36:46.000Z | test/views/layout_view_test.exs | chasm/elm_pack | 5bbdc4df335376170b6d161bfc65f9fc7eca2648 | [
"MIT"
] | null | null | null | test/views/layout_view_test.exs | chasm/elm_pack | 5bbdc4df335376170b6d161bfc65f9fc7eca2648 | [
"MIT"
] | null | null | null | defmodule ElmPack.LayoutViewTest do
use ElmPack.ConnCase, async: true
end | 25 | 35 | 0.826667 |
e83dcc19fb4bfe51bd6757ff62852d740d15798d | 698 | ex | Elixir | lib/geolix/adapter/mmdb2/record/enterprise_subdivision.ex | coladarci/geolix | 0a0508db410732fa8a24cbcd28e44f89b1b30afa | [
"Apache-2.0"
] | null | null | null | lib/geolix/adapter/mmdb2/record/enterprise_subdivision.ex | coladarci/geolix | 0a0508db410732fa8a24cbcd28e44f89b1b30afa | [
"Apache-2.0"
] | null | null | null | lib/geolix/adapter/mmdb2/record/enterprise_subdivision.ex | coladarci/geolix | 0a0508db410732fa8a24cbcd28e44f89b1b30afa | [
"Apache-2.0"
] | null | null | null | defmodule Geolix.Adapter.MMDB2.Record.EnterpriseSubdivision do
@moduledoc """
Record for `subdivision` information (enterprise database).
"""
alias Geolix.Adapter.MMDB2.Model
alias Geolix.Adapter.MMDB2.Record
defstruct %Record.Subdivision{}
|> Map.keys()
|> List.delete(:__struct__)
|> List.flatten([:confidence])
@behaviour Model
def from(nil, _), do: nil
def from(data, locale) when is_list(data) do
data |> Enum.map(&from(&1, locale))
end
def from(data, nil), do: struct(__MODULE__, data)
def from(data, locale) do
result = from(data, nil)
result = Map.put(result, :name, result.names[locale])
result
end
end
| 22.516129 | 62 | 0.654728 |
e83e12dbfa850b4b19f338790d0623c481c14e79 | 83 | exs | Elixir | test/test_helper.exs | AquarHEAD/mashiro_no_asobiba | 84badd8410087b61807cd225226feec5646775bd | [
"Unlicense"
] | 1 | 2016-07-25T10:16:20.000Z | 2016-07-25T10:16:20.000Z | test/test_helper.exs | AquarHEAD/mashiro_no_asobiba | 84badd8410087b61807cd225226feec5646775bd | [
"Unlicense"
] | 2 | 2016-10-31T22:13:02.000Z | 2016-11-19T01:33:42.000Z | test/test_helper.exs | aquarhead/mashiro_no_asobiba | 84badd8410087b61807cd225226feec5646775bd | [
"Unlicense"
] | null | null | null | ExUnit.start()
defmodule TestA do
def write, do: :nothing
end
:meck.new(TestA)
| 10.375 | 25 | 0.710843 |
e83e32c3fb32c9fa9b0cacc97e68e3e104547aae | 1,653 | ex | Elixir | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/disk_encryption_status.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/disk_encryption_status.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/disk_encryption_status.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SQLAdmin.V1beta4.Model.DiskEncryptionStatus do
@moduledoc """
Disk encryption status for an instance.
## Attributes
* `kind` (*type:* `String.t`, *default:* `nil`) - This is always <code>sql#diskEncryptionStatus</code>.
* `kmsKeyVersionName` (*type:* `String.t`, *default:* `nil`) - KMS key version used to encrypt the Cloud SQL instance resource
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => String.t(),
:kmsKeyVersionName => String.t()
}
field(:kind)
field(:kmsKeyVersionName)
end
defimpl Poison.Decoder, for: GoogleApi.SQLAdmin.V1beta4.Model.DiskEncryptionStatus do
def decode(value, options) do
GoogleApi.SQLAdmin.V1beta4.Model.DiskEncryptionStatus.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SQLAdmin.V1beta4.Model.DiskEncryptionStatus do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.06 | 130 | 0.733212 |
e83e7895711f9d7eb8866d3fb5c34c8c0677eb47 | 2,314 | ex | Elixir | clients/monitoring/lib/google_api/monitoring/v3/model/monitored_resource_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/monitoring/lib/google_api/monitoring/v3/model/monitored_resource_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/monitoring/lib/google_api/monitoring/v3/model/monitored_resource_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Monitoring.V3.Model.MonitoredResourceMetadata do
@moduledoc """
Auxiliary metadata for a MonitoredResource object. MonitoredResource objects contain the minimum set of information to uniquely identify a monitored resource instance. There is some other useful auxiliary metadata. Monitoring and Logging use an ingestion pipeline to extract metadata for cloud resources of all types, and store the metadata in this message.
## Attributes
* `systemLabels` (*type:* `map()`, *default:* `nil`) - Output only. Values for predefined system metadata labels. System labels are a kind of metadata extracted by Google, including "machine_image", "vpc", "subnet_id", "security_group", "name", etc. System label values can be only strings, Boolean values, or a list of strings. For example: { "name": "my-test-instance", "security_group": ["a", "b", "c"], "spot_instance": false }
* `userLabels` (*type:* `map()`, *default:* `nil`) - Output only. A map of user-defined metadata labels.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:systemLabels => map() | nil,
:userLabels => map() | nil
}
field(:systemLabels, type: :map)
field(:userLabels, type: :map)
end
defimpl Poison.Decoder, for: GoogleApi.Monitoring.V3.Model.MonitoredResourceMetadata do
def decode(value, options) do
GoogleApi.Monitoring.V3.Model.MonitoredResourceMetadata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Monitoring.V3.Model.MonitoredResourceMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.28 | 436 | 0.742005 |
e83ebe6a0401fc4152bc06ea5299a2f5156ed926 | 3,786 | ex | Elixir | apps/crawler/lib/crawler/task_manager.ex | gregredhead/belethor | 255925396b18ba4a6950f386abf8a9e17a7e5e7c | [
"Apache-2.0"
] | 3 | 2018-07-20T22:14:36.000Z | 2018-12-21T19:54:48.000Z | apps/crawler/lib/crawler/task_manager.ex | gregredhead/belethor | 255925396b18ba4a6950f386abf8a9e17a7e5e7c | [
"Apache-2.0"
] | 36 | 2018-09-15T21:46:54.000Z | 2020-03-28T16:10:18.000Z | apps/crawler/lib/crawler/task_manager.ex | gregredhead/belethor | 255925396b18ba4a6950f386abf8a9e17a7e5e7c | [
"Apache-2.0"
] | 2 | 2018-07-22T08:47:07.000Z | 2021-12-11T01:39:19.000Z | defmodule Crawler.TaskManager do
@moduledoc """
Module to limit the amount of concurrent running task.
If the task maximum is reached, the request will be queued.
"""
# struct to represent the inner gen_server state
defstruct [:max, :queue, :supervisor]
use GenServer
require Logger
alias Crawler
import Common.Utils, only: [debug: 1]
@type max() :: pos_integer() | :infinty
@typedoc """
start options for starting up a `TaskManager`
"""
@type start_options() ::
{:max, max()}
| {:task_supervisor, Supervisor.supervisor()}
| GenServer.options()
## api
@doc """
start an instance
`:max` and `:task_supervisor` are required:
- `:max` the maximum tasks running in parrallel
- `:task_manager` the `Task.Supervisor` to supervise the tasks.
`GenServer.options()` can be added, but are optional.
"""
@spec start_link(opts :: [start_options()]) :: GenServer.on_start()
def start_link(opts) do
{max, opts1} = Access.pop(opts, :max)
{supi, opts2} = Access.pop(opts1, :task_supervisor)
GenServer.start_link(__MODULE__, {max, supi}, opts2)
end
@doc """
execute `client.search(query)` in a rate limited way.
the search callback is defined in `Crawler.Client`.
"""
@spec search(
query :: Crawler.Client.args(),
manager :: GenServer.name(),
client :: module(),
timeout()
) :: Crawler.Client.result()
def search(query, manager, client, timeout \\ 5_000) do
GenServer.call(manager, {:search, {client, query}}, timeout)
end
# callbacks and internal stuff
@doc false
def init({max, supervisor}) do
start = %__MODULE__{
max: max,
queue: :queue.new(),
supervisor: supervisor
}
debug("#{__MODULE__} started in #{inspect(self())} inits with #{inspect(start)}")
{:ok, start}
end
# get the call to add on task
@doc false
def handle_call({:search, args}, client, state) do
# startup a new task if the max is not reached
# otherwise queue it
if max_reached?(state) do
debug("directly start task #{inspect(args)}")
:ok = start_task(state.supervisor, client, args)
{:noreply, state}
else
debug("request #{inspect(args)} will be queued")
q = :queue.in({client, args}, state.queue)
{:noreply, %__MODULE__{state | queue: q}}
end
end
# a task returned without error
@doc false
def handle_info({ref, :ok}, state) when is_reference(ref) do
debug("a Task (#{inspect(ref)}) ended successful")
{:noreply, state}
end
# a monitored process died (for whatever reason)
@doc false
def handle_info(down = {:DOWN, _ref, :process, _pid, _reason}, state) do
debug("got a down msg : #{inspect(down)}")
# add new task if aviable
queue = state.queue
case :queue.out(queue) do
{:empty, ^queue} ->
{:noreply, state}
{{:value, {pid, args}}, q} ->
if max_reached?(state) do
:ok = start_task(state.supervisor, pid, args)
{:noreply, %__MODULE__{state | queue: q}}
else
{:noreply, state}
end
end
end
defp max_reached?(state) do
state.max > count_tasks(state.supervisor)
end
defp count_tasks(supervisor) do
supervisor |> Task.Supervisor.children() |> length
end
defp start_task(
supervisor,
client = {client_pid, _id},
{provider_client, query}
)
when is_pid(client_pid) do
debug("current supervised children #{inspect(count_tasks(supervisor))}")
%Task{} =
Task.Supervisor.async_nolink(supervisor, fn ->
Process.link(client_pid)
result = provider_client.search(query)
GenServer.reply(client, result)
:ok
end)
:ok
end
end
| 26.475524 | 85 | 0.624142 |
e83ec99148948baf2ca178d287ed05876cd2c487 | 574 | exs | Elixir | mix.exs | Pragmatic-Elixir-Meetup/rpi_video | eb70bc19eb96ee2e617ae3a6477480e5c41cbe3a | [
"MIT"
] | 1 | 2019-12-12T09:00:18.000Z | 2019-12-12T09:00:18.000Z | mix.exs | silathdiir/rpi_video | eb70bc19eb96ee2e617ae3a6477480e5c41cbe3a | [
"MIT"
] | 2 | 2019-12-12T01:25:26.000Z | 2019-12-12T17:51:33.000Z | mix.exs | Pragmatic-Elixir-Meetup/rpi_video | eb70bc19eb96ee2e617ae3a6477480e5c41cbe3a | [
"MIT"
] | 1 | 2019-12-12T09:13:47.000Z | 2019-12-12T09:13:47.000Z | defmodule RpiVideo.MixProject do
use Mix.Project
def project do
[
app: :rpi_video,
version: "0.0.1",
elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 19.793103 | 87 | 0.578397 |
e83ec9f2951aab265ce644a43331101e8967c17c | 751 | exs | Elixir | test/tags/feature_filterer_test.exs | ejscunha/white-bread | 1c2eed1c98545beeb70b590426ce9026a8455e97 | [
"MIT"
] | 209 | 2015-03-03T14:14:28.000Z | 2020-10-26T03:23:48.000Z | test/tags/feature_filterer_test.exs | ejscunha/white-bread | 1c2eed1c98545beeb70b590426ce9026a8455e97 | [
"MIT"
] | 83 | 2015-03-23T11:46:51.000Z | 2020-11-04T09:47:06.000Z | test/tags/feature_filterer_test.exs | ejscunha/white-bread | 1c2eed1c98545beeb70b590426ce9026a8455e97 | [
"MIT"
] | 46 | 2015-06-12T17:37:21.000Z | 2020-10-30T09:52:45.000Z | defmodule WhiteBread.Tags.FeatureFiltererTest do
use ExUnit.Case
import WhiteBread.Tags.FeatureFilterer, only: [get_for_tags: 2]
alias Gherkin.Elements.Feature, as: Feature
alias Gherkin.Elements.Scenario, as: Scenario
test "Returns a feature if it has a matching tag" do
feature = %Feature{tags: ["matching"]}
assert get_for_tags([feature], ["matching"]) == [feature]
end
test "Returns a feature with filtered scenarios if any of them match" do
matching_scenario = %Scenario{tags: ["matching"]}
other_scenario = %Scenario{}
feature = %Feature{name: "mine", scenarios: [matching_scenario, other_scenario]}
assert get_for_tags([feature], ["matching"]) == [%{feature | scenarios: [matching_scenario]}]
end
end
| 37.55 | 97 | 0.723036 |
e83f1071a990eed3ee57b337a956672c34dfb616 | 781 | ex | Elixir | ros/ros_ui_station/test/support/channel_case.ex | kujua/elixir-handbook | 4185ad8da7f652fdb59c799dc58bcb33fda10475 | [
"Apache-2.0"
] | 1 | 2019-07-01T18:47:28.000Z | 2019-07-01T18:47:28.000Z | ros/ros_ui_station/test/support/channel_case.ex | kujua/elixir-handbook | 4185ad8da7f652fdb59c799dc58bcb33fda10475 | [
"Apache-2.0"
] | 4 | 2020-07-17T16:57:18.000Z | 2021-05-09T23:50:52.000Z | ros/ros_ui_station/test/support/channel_case.ex | kujua/elixir-handbook | 4185ad8da7f652fdb59c799dc58bcb33fda10475 | [
"Apache-2.0"
] | null | null | null | defmodule Ros.StationWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint Ros.StationWeb.Endpoint
end
end
setup _tags do
:ok
end
end
| 24.40625 | 59 | 0.727273 |
e83f1cd76906f99446af18083f52648fd3072c7c | 6,475 | ex | Elixir | lib/hedwig/robot.ex | scrogson/hedwig | a453847d04a9d730952fc5b2f8239848f4285573 | [
"MIT"
] | 51 | 2015-01-15T10:47:58.000Z | 2020-10-21T19:55:23.000Z | lib/hedwig/robot.ex | scrogson/hedwig | a453847d04a9d730952fc5b2f8239848f4285573 | [
"MIT"
] | 13 | 2015-01-27T13:39:45.000Z | 2015-09-10T22:16:45.000Z | lib/hedwig/robot.ex | scrogson/hedwig | a453847d04a9d730952fc5b2f8239848f4285573 | [
"MIT"
] | 16 | 2015-04-02T10:24:16.000Z | 2022-01-04T10:34:40.000Z | defmodule Hedwig.Robot do
@moduledoc """
Defines a robot.
Robots receive messages from a chat source (XMPP, Slack, Console, etc), and
dispatch them to matching responders. See the documentation for
`Hedwig.Responder` for details on responders.
When used, the robot expects the `:otp_app` as option. The `:otp_app` should
point to an OTP application that has the robot configuration. For example,
the robot:
defmodule MyApp.Robot do
use Hedwig.Robot, otp_app: :my_app
end
Could be configured with:
config :my_app, MyApp.Robot,
adapter: Hedwig.Adapters.Console,
name: "alfred"
Most of the configuration that goes into the `config` is specific to the
adapter. Be sure to check the documentation for the adapter in use for all
of the available options.
## Robot configuration
* `adapter` - the adapter module name.
* `name` - the name the robot will respond to.
* `aka` - an alias the robot will respond to.
* `log_level` - the level to use when logging output.
* `responders` - a list of responders specified in the following format:
`{module, kwlist}`.
"""
defstruct adapter: nil,
aka: nil,
name: "",
opts: [],
pid: nil,
responders: []
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
use GenServer
require Logger
{otp_app, adapter, robot_config} =
Hedwig.Robot.Supervisor.parse_config(__MODULE__, opts)
@adapter adapter
@before_compile adapter
@config robot_config
@log_level robot_config[:log_level] || :debug
@otp_app otp_app
def start_link(opts \\ []) do
Hedwig.start_robot(__MODULE__, opts)
end
def stop(robot) do
Hedwig.stop_robot(robot)
end
def config(opts \\ []) do
Hedwig.Robot.Supervisor.config(__MODULE__, @otp_app, opts)
end
def log(msg) do
Logger.unquote(@log_level)(fn ->
"#{inspect msg}"
end, [])
end
def __adapter__, do: @adapter
def init({robot, opts}) do
opts = Keyword.merge(robot.config, opts)
{:ok, adapter} = @adapter.start_link(robot, opts)
{aka, opts} = Keyword.pop(opts, :aka)
{name, opts} = Keyword.pop(opts, :name)
responders = Keyword.get(opts, :responders, [])
unless responders == [] do
GenServer.cast(self, :install_responders)
end
state = %Hedwig.Robot{
adapter: adapter,
aka: aka,
name: name,
opts: opts,
pid: self()
}
{:ok, state}
end
def after_connect(state) do
Logger.warn """
#{inspect __MODULE__}.after_connect/1 default handler invoked.
"""
{:ok, state}
end
def handle_in(msg, state) do
Logger.warn """
#{inspect __MODULE__}.handle_in/2 default handler invoked.
"""
{:ok, state}
end
def handle_call(:after_connect, _from, state) do
{:ok, state} = __MODULE__.after_connect(state)
{:reply, :ok, state}
end
def handle_cast({:send, msg}, %{adapter: adapter} = state) do
@adapter.send(adapter, msg)
{:noreply, state}
end
def handle_cast({:reply, msg}, %{adapter: adapter} = state) do
@adapter.reply(adapter, msg)
{:noreply, state}
end
def handle_cast({:emote, msg}, %{adapter: adapter} = state) do
@adapter.emote(adapter, msg)
{:noreply, state}
end
def handle_cast({:register, name}, state) do
Hedwig.Registry.register(name)
{:noreply, state}
end
def handle_cast(%Hedwig.Message{} = msg, %{responders: responders} = state) do
Hedwig.Responder.run(%{msg | robot: %{state | responders: []}}, responders)
{:noreply, state}
end
def handle_cast({:handle_in, msg}, state) do
{:ok, state} = __MODULE__.handle_in(msg, state)
{:noreply, state}
end
def handle_cast(:install_responders, %{opts: opts} = state) do
responders =
Enum.reduce opts[:responders], [], fn {mod, opts}, acc ->
mod.install(state, opts) ++ acc
end
{:noreply, %{state | responders: responders}}
end
def handle_info(msg, state) do
{:noreply, state}
end
def terminate(_reason, _state) do
:ok
end
def code_change(_old, state, _extra) do
{:ok, state}
end
defoverridable [
{:after_connect, 1},
{:terminate, 2},
{:code_change, 3},
{:handle_in, 2},
{:handle_info, 2}
]
end
end
@doc false
def start_link(robot, opts) do
GenServer.start_link(robot, {robot, opts})
end
@doc """
Send a message via the robot.
"""
def send(pid, msg) do
GenServer.cast(pid, {:send, msg})
end
@doc """
Send a reply message via the robot.
"""
def reply(pid, msg) do
GenServer.cast(pid, {:reply, msg})
end
@doc """
Send an emote message via the robot.
"""
def emote(pid, msg) do
GenServer.cast(pid, {:emote, msg})
end
@doc """
Handles invoking installed responders with a `Hedwig.Message`.
This function should be called by an adapter when a message arrives. A message
will be sent to each installed responder.
"""
@spec handle_message(pid, Hedwig.Message.t) :: :ok
def handle_message(robot, %Hedwig.Message{} = msg) do
GenServer.cast(robot, msg)
end
@doc """
Invokes a user defined `handle_in/2` function, if defined.
This function should be called by an adapter when a message arrives but
should be handled by the user.
"""
@spec handle_in(pid, any) :: :ok
def handle_in(robot, msg) do
GenServer.cast(robot, {:handle_in, msg})
end
@doc """
Invokes a user defined `after_connect/1` function, if defined.
If the user has defined an `after_connect/1` in the robot module, it will be
called with the robot's state. It is expected that the function return
`{:ok, state}`.
"""
@spec after_connect(pid, integer) :: :ok
def after_connect(robot, timeout \\ 5000) do
GenServer.call(robot, :after_connect, timeout)
end
@doc """
Allows a robot to be registered by name.
"""
@spec register(pid, any) :: :ok
def register(robot, name) do
GenServer.cast(robot, {:register, name})
end
end
| 26.108871 | 84 | 0.60417 |
e83f2c755581d562ef6cf98118e54805d9939edf | 1,542 | ex | Elixir | lib/nostrum/struct/event/message_reaction_remove.ex | mckethanor/nostrum | 13a3927c872c1540266e9f1ba4bcad4182baa9bf | [
"MIT"
] | null | null | null | lib/nostrum/struct/event/message_reaction_remove.ex | mckethanor/nostrum | 13a3927c872c1540266e9f1ba4bcad4182baa9bf | [
"MIT"
] | null | null | null | lib/nostrum/struct/event/message_reaction_remove.ex | mckethanor/nostrum | 13a3927c872c1540266e9f1ba4bcad4182baa9bf | [
"MIT"
] | 1 | 2021-09-13T20:59:42.000Z | 2021-09-13T20:59:42.000Z | defmodule Nostrum.Struct.Event.MessageReactionRemove do
@moduledoc "Sent when a user removes a reaction from a message"
@moduledoc since: "0.5.0"
alias Nostrum.Struct.{Channel, Emoji, Guild, Message, User}
alias Nostrum.{Snowflake, Util}
defstruct [:user_id, :channel_id, :message_id, :guild_id, :emoji]
# XXX: is this correct?
@typedoc "Author of the reaction"
@type user_id :: User.id()
@typedoc "ID of the channel in which the reaction was created"
@type channel_id :: Channel.id()
@typedoc "ID of the message to which the reaction was attached"
@type message_id :: Message.id()
@typedoc "ID of the guild on which the message lives, if applicable"
@type guild_id :: Guild.id() | nil
@typedoc "Partial emoji object that was removed"
@type emoji :: Emoji.t() | nil
@typedoc "Event sent when a user removes a reaction from a message"
@type t :: %__MODULE__{
user_id: user_id,
channel_id: channel_id,
message_id: message_id,
guild_id: guild_id,
emoji: emoji
}
@doc false
def to_struct(map) do
new =
map
|> Map.new(fn {k, v} -> {Util.maybe_to_atom(k), v} end)
|> Map.update(:user_id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:channel_id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:message_id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:guild_id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:emoji, nil, &Util.cast(&1, {:struct, Emoji}))
struct(__MODULE__, new)
end
end
| 31.469388 | 70 | 0.649157 |
e83f57ee02af165828632889b61ac8af73902c54 | 1,562 | ex | Elixir | lib/arc_ecto_paperclip/interpolations.ex | gregpardo/arc_ecto_paperclip | 4b1af69597ed0dadf7de1efa3b920ddae8cd3b53 | [
"Apache-2.0"
] | 1 | 2019-07-10T16:00:08.000Z | 2019-07-10T16:00:08.000Z | lib/arc_ecto_paperclip/interpolations.ex | gregpardo/arc_ecto_paperclip | 4b1af69597ed0dadf7de1efa3b920ddae8cd3b53 | [
"Apache-2.0"
] | null | null | null | lib/arc_ecto_paperclip/interpolations.ex | gregpardo/arc_ecto_paperclip | 4b1af69597ed0dadf7de1efa3b920ddae8cd3b53 | [
"Apache-2.0"
] | null | null | null | defmodule Arc.Ecto.Paperclip.Interpolations do
@doc """
Paperclip default interpolation of class name... Example: Book -> books
"""
@spec class(struct, atom, atom) :: {String.t}
def class(scope, _attachment_name, _version \\ :original) do
scope.__struct__ |> Inflex.pluralize |> Macro.underscore |> String.split("/") |> List.last
end
@doc """
Paperclip default interpolation of attachment name... Example: :book -> books
"""
@spec attachment(struct, atom, atom) :: {String.t}
def attachment(_scope, attachment_name, _version \\ :original) do
Atom.to_string(attachment_name) |> Inflex.pluralize
end
@doc """
Paperclip default interpolation of id_partition. Calculated using the objects ID
"""
@spec id_partition(struct, atom, atom) :: {String.t}
def id_partition(scope, _attachment_name, _version \\ :original) do
String.pad_leading("#{scope.id}", 9, "0")
|> String.split(~r/\d{3}/, include_captures: true, trim: true)
|> Enum.take(3)
|> Enum.map(fn s -> String.slice(s, 0, 3) end) # Fixes uuid support
|> Enum.join("/")
end
@doc """
Paperclip default interpolation of style
"""
@spec style(struct, atom, atom) :: {String.t}
def style(_scope, _attachment_name, version \\ :original) do
Atom.to_string(version)
end
@doc """
Paperclip style filename of the attachment
"""
@spec filename(struct, atom, atom) :: {String.t}
def filename(scope, attachment_name, _version \\ :original) do
Map.get(scope, String.to_atom("#{attachment_name}_file_name"))
end
end
| 33.956522 | 94 | 0.673496 |
e83f94e04a7f03adfb2e6d443207705007a3ef91 | 45,971 | ex | Elixir | lib/elixir/lib/calendar/datetime.ex | IvanRublev/elixir | 1ce201aa1ebbfc1666c4e4bde64f706a89629d59 | [
"Apache-2.0"
] | 2 | 2020-06-02T18:00:28.000Z | 2021-12-10T03:21:42.000Z | lib/elixir/lib/calendar/datetime.ex | IvanRublev/elixir | 1ce201aa1ebbfc1666c4e4bde64f706a89629d59 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/calendar/datetime.ex | IvanRublev/elixir | 1ce201aa1ebbfc1666c4e4bde64f706a89629d59 | [
"Apache-2.0"
] | null | null | null | defmodule DateTime do
@moduledoc """
A datetime implementation with a time zone.
This datetime can be seen as an ephemeral snapshot
of a datetime at a given time zone. For such purposes,
it also includes both UTC and Standard offsets, as
well as the zone abbreviation field used exclusively
for formatting purposes.
Remember, comparisons in Elixir using `==/2`, `>/2`, `</2` and friends
are structural and based on the DateTime struct fields. For proper
comparison between datetimes, use the `compare/2` function.
Developers should avoid creating the `DateTime` struct directly
and instead rely on the functions provided by this module as
well as the ones in third-party calendar libraries.
## Time zone database
Many functions in this module require a time zone database.
By default, it uses the default time zone database returned by
`Calendar.get_time_zone_database/0`, which defaults to
`Calendar.UTCOnlyTimeZoneDatabase` which only handles "Etc/UTC"
datetimes and returns `{:error, :utc_only_time_zone_database}`
for any other time zone.
Other time zone databases can also be configured. For example,
two of the available options are:
* [`tz`](https://hexdocs.pm/tz/)
* [`tzdata`](https://hexdocs.pm/tzdata/)
To use them, first make sure it is added as a dependency in `mix.exs`.
It can then be configured either via configuration:
config :elixir, :time_zone_database, Tzdata.TimeZoneDatabase
or by calling `Calendar.put_time_zone_database/1`:
Calendar.put_time_zone_database(Tzdata.TimeZoneDatabase)
See the proper names in the library installation instructions.
"""
@enforce_keys [:year, :month, :day, :hour, :minute, :second] ++
[:time_zone, :zone_abbr, :utc_offset, :std_offset]
defstruct [
:year,
:month,
:day,
:hour,
:minute,
:second,
:time_zone,
:zone_abbr,
:utc_offset,
:std_offset,
microsecond: {0, 0},
calendar: Calendar.ISO
]
@type t :: %__MODULE__{
year: Calendar.year(),
month: Calendar.month(),
day: Calendar.day(),
calendar: Calendar.calendar(),
hour: Calendar.hour(),
minute: Calendar.minute(),
second: Calendar.second(),
microsecond: Calendar.microsecond(),
time_zone: Calendar.time_zone(),
zone_abbr: Calendar.zone_abbr(),
utc_offset: Calendar.utc_offset(),
std_offset: Calendar.std_offset()
}
@unix_days :calendar.date_to_gregorian_days({1970, 1, 1})
@doc """
Returns the current datetime in UTC.
## Examples
iex> datetime = DateTime.utc_now()
iex> datetime.time_zone
"Etc/UTC"
"""
@spec utc_now(Calendar.calendar()) :: t
def utc_now(calendar \\ Calendar.ISO) do
System.os_time() |> from_unix!(:native, calendar)
end
@doc """
Converts the given Unix time to `DateTime`.
The integer can be given in different unit
according to `System.convert_time_unit/3` and it will
be converted to microseconds internally. Up to
253402300799 seconds is supported.
Unix times are always in UTC and therefore the DateTime
will be returned in UTC.
## Examples
iex> {:ok, datetime} = DateTime.from_unix(1_464_096_368)
iex> datetime
~U[2016-05-24 13:26:08Z]
iex> {:ok, datetime} = DateTime.from_unix(1_432_560_368_868_569, :microsecond)
iex> datetime
~U[2015-05-25 13:26:08.868569Z]
iex> {:ok, datetime} = DateTime.from_unix(253_402_300_799)
iex> datetime
~U[9999-12-31 23:59:59Z]
iex> {:error, :invalid_unix_time} = DateTime.from_unix(253_402_300_800)
The unit can also be an integer as in `t:System.time_unit/0`:
iex> {:ok, datetime} = DateTime.from_unix(143_256_036_886_856, 1024)
iex> datetime
~U[6403-03-17 07:05:22.320312Z]
Negative Unix times are supported up to -377705116800 seconds:
iex> {:ok, datetime} = DateTime.from_unix(-377_705_116_800)
iex> datetime
~U[-9999-01-01 00:00:00Z]
iex> {:error, :invalid_unix_time} = DateTime.from_unix(-377_705_116_801)
"""
@spec from_unix(integer, :native | System.time_unit(), Calendar.calendar()) ::
{:ok, t} | {:error, atom}
def from_unix(integer, unit \\ :second, calendar \\ Calendar.ISO) when is_integer(integer) do
case Calendar.ISO.from_unix(integer, unit) do
{:ok, {year, month, day}, {hour, minute, second}, microsecond} ->
iso_datetime = %DateTime{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
std_offset: 0,
utc_offset: 0,
zone_abbr: "UTC",
time_zone: "Etc/UTC"
}
convert(iso_datetime, calendar)
{:error, _} = error ->
error
end
end
@doc """
Converts the given Unix time to `DateTime`.
The integer can be given in different unit
according to `System.convert_time_unit/3` and it will
be converted to microseconds internally.
Unix times are always in UTC and therefore the DateTime
will be returned in UTC.
## Examples
# An easy way to get the Unix epoch is passing 0 to this function
iex> DateTime.from_unix!(0)
~U[1970-01-01 00:00:00Z]
iex> DateTime.from_unix!(1_464_096_368)
~U[2016-05-24 13:26:08Z]
iex> DateTime.from_unix!(1_432_560_368_868_569, :microsecond)
~U[2015-05-25 13:26:08.868569Z]
iex> DateTime.from_unix!(143_256_036_886_856, 1024)
~U[6403-03-17 07:05:22.320312Z]
"""
@spec from_unix!(integer, :native | System.time_unit(), Calendar.calendar()) :: t
def from_unix!(integer, unit \\ :second, calendar \\ Calendar.ISO) do
case from_unix(integer, unit, calendar) do
{:ok, datetime} ->
datetime
{:error, :invalid_unix_time} ->
raise ArgumentError, "invalid Unix time #{integer}"
end
end
@doc """
Converts the given `NaiveDateTime` to `DateTime`.
It expects a time zone to put the `NaiveDateTime` in.
If the time zone is "Etc/UTC", it always succeeds. Otherwise,
the NaiveDateTime is checked against the time zone database
given as `time_zone_database`. See the "Time zone database"
section in the module documentation.
## Examples
iex> DateTime.from_naive(~N[2016-05-24 13:26:08.003], "Etc/UTC")
{:ok, ~U[2016-05-24 13:26:08.003Z]}
When the datetime is ambiguous - for instance during changing from summer
to winter time - the two possible valid datetimes are returned. First the one
that happens first, then the one that happens after.
iex> {:ambiguous, first_dt, second_dt} = DateTime.from_naive(~N[2018-10-28 02:30:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> first_dt
#DateTime<2018-10-28 02:30:00+02:00 CEST Europe/Copenhagen>
iex> second_dt
#DateTime<2018-10-28 02:30:00+01:00 CET Europe/Copenhagen>
When there is a gap in wall time - for instance in spring when the clocks are
turned forward - the latest valid datetime just before the gap and the first
valid datetime just after the gap.
iex> {:gap, just_before, just_after} = DateTime.from_naive(~N[2019-03-31 02:30:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> just_before
#DateTime<2019-03-31 01:59:59.999999+01:00 CET Europe/Copenhagen>
iex> just_after
#DateTime<2019-03-31 03:00:00+02:00 CEST Europe/Copenhagen>
Most of the time there is one, and just one, valid datetime for a certain
date and time in a certain time zone.
iex> {:ok, datetime} = DateTime.from_naive(~N[2018-07-28 12:30:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> datetime
#DateTime<2018-07-28 12:30:00+02:00 CEST Europe/Copenhagen>
This function accepts any map or struct that contains at least the same fields as a `NaiveDateTime`
struct. The most common example of that is a `DateTime`. In this case the information about the time
zone of that `DateTime` is completely ignored. This is the same principle as passing a `DateTime` to
`Date.to_iso8601/2`. `Date.to_iso8601/2` extracts only the date-specific fields (calendar, year,
month and day) of the given structure and ignores all others.
This way if you have a `DateTime` in one time zone, you can get the same wall time in another time zone.
For instance if you have 2018-08-24 10:00:00 in Copenhagen and want a `DateTime` for 2018-08-24 10:00:00
in UTC you can do:
iex> cph_datetime = DateTime.from_naive!(~N[2018-08-24 10:00:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> {:ok, utc_datetime} = DateTime.from_naive(cph_datetime, "Etc/UTC", FakeTimeZoneDatabase)
iex> utc_datetime
~U[2018-08-24 10:00:00Z]
If instead you want a `DateTime` for the same point time in a different time zone see the
`DateTime.shift_zone/3` function which would convert 2018-08-24 10:00:00 in Copenhagen
to 2018-08-24 08:00:00 in UTC.
"""
@doc since: "1.4.0"
@spec from_naive(
Calendar.naive_datetime(),
Calendar.time_zone(),
Calendar.time_zone_database()
) ::
{:ok, t}
| {:ambiguous, t, t}
| {:gap, t, t}
| {:error,
:incompatible_calendars | :time_zone_not_found | :utc_only_time_zone_database}
def from_naive(
naive_datetime,
time_zone,
time_zone_database \\ Calendar.get_time_zone_database()
)
def from_naive(naive_datetime, "Etc/UTC", _) do
utc_period = %{std_offset: 0, utc_offset: 0, zone_abbr: "UTC"}
{:ok, from_naive_with_period(naive_datetime, "Etc/UTC", utc_period)}
end
def from_naive(%{calendar: Calendar.ISO} = naive_datetime, time_zone, time_zone_database) do
case time_zone_database.time_zone_periods_from_wall_datetime(naive_datetime, time_zone) do
{:ok, period} ->
{:ok, from_naive_with_period(naive_datetime, time_zone, period)}
{:ambiguous, first_period, second_period} ->
first_datetime = from_naive_with_period(naive_datetime, time_zone, first_period)
second_datetime = from_naive_with_period(naive_datetime, time_zone, second_period)
{:ambiguous, first_datetime, second_datetime}
{:gap, {first_period, first_period_until_wall}, {second_period, second_period_from_wall}} ->
# `until_wall` is not valid, but any time just before is.
# So by subtracting a second and adding .999999 seconds
# we get the last microsecond just before.
before_naive =
first_period_until_wall
|> Map.put(:microsecond, {999_999, 6})
|> NaiveDateTime.add(-1)
after_naive = second_period_from_wall
latest_datetime_before = from_naive_with_period(before_naive, time_zone, first_period)
first_datetime_after = from_naive_with_period(after_naive, time_zone, second_period)
{:gap, latest_datetime_before, first_datetime_after}
{:error, _} = error ->
error
end
end
def from_naive(%{calendar: calendar} = naive_datetime, time_zone, time_zone_database)
when calendar != Calendar.ISO do
# For non-ISO calendars, convert to ISO, create ISO DateTime, and then
# convert to original calendar
iso_result =
with {:ok, in_iso} <- NaiveDateTime.convert(naive_datetime, Calendar.ISO) do
from_naive(in_iso, time_zone, time_zone_database)
end
case iso_result do
{:ok, dt} ->
convert(dt, calendar)
{:ambiguous, dt1, dt2} ->
with {:ok, dt1converted} <- convert(dt1, calendar),
{:ok, dt2converted} <- convert(dt2, calendar),
do: {:ambiguous, dt1converted, dt2converted}
{:gap, dt1, dt2} ->
with {:ok, dt1converted} <- convert(dt1, calendar),
{:ok, dt2converted} <- convert(dt2, calendar),
do: {:gap, dt1converted, dt2converted}
{:error, _} = error ->
error
end
end
defp from_naive_with_period(naive_datetime, time_zone, period) do
%{std_offset: std_offset, utc_offset: utc_offset, zone_abbr: zone_abbr} = period
%{
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
year: year,
month: month,
day: day
} = naive_datetime
%DateTime{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
std_offset: std_offset,
utc_offset: utc_offset,
zone_abbr: zone_abbr,
time_zone: time_zone
}
end
@doc """
Converts the given `NaiveDateTime` to `DateTime`.
It expects a time zone to put the NaiveDateTime in.
If the time zone is "Etc/UTC", it always succeeds. Otherwise,
the NaiveDateTime is checked against the time zone database
given as `time_zone_database`. See the "Time zone database"
section in the module documentation.
## Examples
iex> DateTime.from_naive!(~N[2016-05-24 13:26:08.003], "Etc/UTC")
~U[2016-05-24 13:26:08.003Z]
iex> DateTime.from_naive!(~N[2018-05-24 13:26:08.003], "Europe/Copenhagen", FakeTimeZoneDatabase)
#DateTime<2018-05-24 13:26:08.003+02:00 CEST Europe/Copenhagen>
"""
@doc since: "1.4.0"
@spec from_naive!(
NaiveDateTime.t(),
Calendar.time_zone(),
Calendar.time_zone_database()
) :: t
def from_naive!(
naive_datetime,
time_zone,
time_zone_database \\ Calendar.get_time_zone_database()
) do
case from_naive(naive_datetime, time_zone, time_zone_database) do
{:ok, datetime} ->
datetime
{:ambiguous, dt1, dt2} ->
raise ArgumentError,
"cannot convert #{inspect(naive_datetime)} to datetime because such " <>
"instant is ambiguous in time zone #{time_zone} as there is an overlap " <>
"between #{inspect(dt1)} and #{inspect(dt2)}"
{:gap, dt1, dt2} ->
raise ArgumentError,
"cannot convert #{inspect(naive_datetime)} to datetime because such " <>
"instant does not exist in time zone #{time_zone} as there is a gap " <>
"between #{inspect(dt1)} and #{inspect(dt2)}"
{:error, reason} ->
raise ArgumentError,
"cannot convert #{inspect(naive_datetime)} to datetime, reason: #{inspect(reason)}"
end
end
@doc """
Changes the time zone of a `DateTime`.
Returns a `DateTime` for the same point in time, but instead at
the time zone provided. It assumes that `DateTime` is valid and
exists in the given time zone and calendar.
By default, it uses the default time zone database returned by
`Calendar.get_time_zone_database/0`, which defaults to
`Calendar.UTCOnlyTimeZoneDatabase` which only handles "Etc/UTC" datetimes.
Other time zone databases can be passed as argument or set globally.
See the "Time zone database" section in the module docs.
## Examples
iex> {:ok, pacific_datetime} = DateTime.shift_zone(~U[2018-07-16 10:00:00Z], "America/Los_Angeles", FakeTimeZoneDatabase)
iex> pacific_datetime
#DateTime<2018-07-16 03:00:00-07:00 PDT America/Los_Angeles>
iex> DateTime.shift_zone(~U[2018-07-16 10:00:00Z], "bad timezone", FakeTimeZoneDatabase)
{:error, :time_zone_not_found}
"""
@doc since: "1.8.0"
@spec shift_zone(t, Calendar.time_zone(), Calendar.time_zone_database()) ::
{:ok, t} | {:error, :time_zone_not_found | :utc_only_time_zone_database}
def shift_zone(datetime, time_zone, time_zone_database \\ Calendar.get_time_zone_database())
def shift_zone(%{time_zone: time_zone} = datetime, time_zone, _) do
{:ok, datetime}
end
def shift_zone(datetime, time_zone, time_zone_database) do
%{
std_offset: std_offset,
utc_offset: utc_offset,
calendar: calendar,
microsecond: {_, precision}
} = datetime
datetime
|> to_iso_days()
|> apply_tz_offset(utc_offset + std_offset)
|> shift_zone_for_iso_days_utc(calendar, precision, time_zone, time_zone_database)
end
defp shift_zone_for_iso_days_utc(iso_days_utc, calendar, precision, time_zone, time_zone_db) do
case time_zone_db.time_zone_period_from_utc_iso_days(iso_days_utc, time_zone) do
{:ok, %{std_offset: std_offset, utc_offset: utc_offset, zone_abbr: zone_abbr}} ->
{year, month, day, hour, minute, second, {microsecond_without_precision, _}} =
iso_days_utc
|> apply_tz_offset(-(utc_offset + std_offset))
|> calendar.naive_datetime_from_iso_days()
datetime = %DateTime{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: {microsecond_without_precision, precision},
std_offset: std_offset,
utc_offset: utc_offset,
zone_abbr: zone_abbr,
time_zone: time_zone
}
{:ok, datetime}
{:error, _} = error ->
error
end
end
@doc """
Changes the time zone of a `DateTime` or raises on errors.
See `shift_zone/3` for more information.
## Examples
iex> DateTime.shift_zone!(~U[2018-07-16 10:00:00Z], "America/Los_Angeles", FakeTimeZoneDatabase)
#DateTime<2018-07-16 03:00:00-07:00 PDT America/Los_Angeles>
iex> DateTime.shift_zone!(~U[2018-07-16 10:00:00Z], "bad timezone", FakeTimeZoneDatabase)
** (ArgumentError) cannot shift ~U[2018-07-16 10:00:00Z] to "bad timezone" time zone, reason: :time_zone_not_found
"""
@doc since: "1.10.0"
@spec shift_zone!(t, Calendar.time_zone(), Calendar.time_zone_database()) :: t
def shift_zone!(datetime, time_zone, time_zone_database \\ Calendar.get_time_zone_database()) do
case shift_zone(datetime, time_zone, time_zone_database) do
{:ok, datetime} ->
datetime
{:error, reason} ->
raise ArgumentError,
"cannot shift #{inspect(datetime)} to #{inspect(time_zone)} time zone" <>
", reason: #{inspect(reason)}"
end
end
@doc """
Returns the current datetime in the provided time zone.
By default, it uses the default time_zone returned by
`Calendar.get_time_zone_database/0`, which defaults to
`Calendar.UTCOnlyTimeZoneDatabase` which only handles "Etc/UTC" datetimes.
Other time zone databases can be passed as argument or set globally.
See the "Time zone database" section in the module docs.
## Examples
iex> {:ok, datetime} = DateTime.now("Etc/UTC")
iex> datetime.time_zone
"Etc/UTC"
iex> DateTime.now("Europe/Copenhagen")
{:error, :utc_only_time_zone_database}
iex> DateTime.now("bad timezone", FakeTimeZoneDatabase)
{:error, :time_zone_not_found}
"""
@doc since: "1.8.0"
@spec now(Calendar.time_zone(), Calendar.time_zone_database()) ::
{:ok, t} | {:error, :time_zone_not_found | :utc_only_time_zone_database}
def now(time_zone, time_zone_database \\ Calendar.get_time_zone_database())
def now("Etc/UTC", _) do
{:ok, utc_now()}
end
def now(time_zone, time_zone_database) do
shift_zone(utc_now(), time_zone, time_zone_database)
end
@doc """
Returns the current datetime in the provided time zone or raises on errors
See `now/2` for more information.
## Examples
iex> datetime = DateTime.now!("Etc/UTC")
iex> datetime.time_zone
"Etc/UTC"
iex> DateTime.now!("Europe/Copenhagen")
** (ArgumentError) cannot get current datetime in "Europe/Copenhagen" time zone, reason: :utc_only_time_zone_database
iex> DateTime.now!("bad timezone", FakeTimeZoneDatabase)
** (ArgumentError) cannot get current datetime in "bad timezone" time zone, reason: :time_zone_not_found
"""
@doc since: "1.10.0"
@spec now!(Calendar.time_zone(), Calendar.time_zone_database()) :: t
def now!(time_zone, time_zone_database \\ Calendar.get_time_zone_database()) do
case now(time_zone, time_zone_database) do
{:ok, datetime} ->
datetime
{:error, reason} ->
raise ArgumentError,
"cannot get current datetime in #{inspect(time_zone)} time zone, reason: " <>
inspect(reason)
end
end
@doc """
Converts the given `datetime` to Unix time.
The `datetime` is expected to be using the ISO calendar
with a year greater than or equal to 0.
It will return the integer with the given unit,
according to `System.convert_time_unit/3`.
## Examples
iex> 1_464_096_368 |> DateTime.from_unix!() |> DateTime.to_unix()
1464096368
iex> dt = %DateTime{calendar: Calendar.ISO, day: 20, hour: 18, microsecond: {273806, 6},
...> minute: 58, month: 11, second: 19, time_zone: "America/Montevideo",
...> utc_offset: -10800, std_offset: 3600, year: 2014, zone_abbr: "UYST"}
iex> DateTime.to_unix(dt)
1416517099
iex> flamel = %DateTime{calendar: Calendar.ISO, day: 22, hour: 8, microsecond: {527771, 6},
...> minute: 2, month: 3, second: 25, std_offset: 0, time_zone: "Etc/UTC",
...> utc_offset: 0, year: 1418, zone_abbr: "UTC"}
iex> DateTime.to_unix(flamel)
-17412508655
"""
@spec to_unix(Calendar.datetime(), System.time_unit()) :: integer
def to_unix(datetime, unit \\ :second)
def to_unix(%{utc_offset: utc_offset, std_offset: std_offset} = datetime, unit) do
{days, fraction} = to_iso_days(datetime)
unix_units = Calendar.ISO.iso_days_to_unit({days - @unix_days, fraction}, unit)
offset_units = System.convert_time_unit(utc_offset + std_offset, :second, unit)
unix_units - offset_units
end
@doc """
Converts the given `datetime` into a `NaiveDateTime`.
Because `NaiveDateTime` does not hold time zone information,
any time zone related data will be lost during the conversion.
## Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 1},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_naive(dt)
~N[2000-02-29 23:00:07.0]
"""
@spec to_naive(Calendar.datetime()) :: NaiveDateTime.t()
def to_naive(%{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: _
}) do
%NaiveDateTime{
year: year,
month: month,
day: day,
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
}
end
@doc """
Converts a `DateTime` into a `Date`.
Because `Date` does not hold time nor time zone information,
data will be lost during the conversion.
## Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_date(dt)
~D[2000-02-29]
"""
@spec to_date(Calendar.datetime()) :: Date.t()
def to_date(%{
year: year,
month: month,
day: day,
calendar: calendar,
hour: _,
minute: _,
second: _,
microsecond: _,
time_zone: _
}) do
%Date{year: year, month: month, day: day, calendar: calendar}
end
@doc """
Converts a `DateTime` into `Time`.
Because `Time` does not hold date nor time zone information,
data will be lost during the conversion.
## Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 1},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_time(dt)
~T[23:00:07.0]
"""
@spec to_time(Calendar.datetime()) :: Time.t()
def to_time(%{
year: _,
month: _,
day: _,
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: _
}) do
%Time{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: calendar
}
end
@doc """
Converts the given datetime to
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601) format.
By default, `DateTime.to_iso8601/2` returns datetimes formatted in the "extended"
format, for human readability. It also supports the "basic" format through passing the `:basic` option.
Only supports converting datetimes which are in the ISO calendar,
attempting to convert datetimes from other calendars will raise.
WARNING: the ISO 8601 datetime format does not contain the time zone nor
its abbreviation, which means information is lost when converting to such
format.
### Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_iso8601(dt)
"2000-02-29T23:00:07+01:00"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "UTC",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"}
iex> DateTime.to_iso8601(dt)
"2000-02-29T23:00:07Z"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.to_iso8601(dt, :extended)
"2000-02-29T23:00:07-04:00"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.to_iso8601(dt, :basic)
"20000229T230007-0400"
"""
@spec to_iso8601(Calendar.datetime(), :extended | :basic) :: String.t()
def to_iso8601(datetime, format \\ :extended)
def to_iso8601(%{calendar: Calendar.ISO} = datetime, format)
when format in [:extended, :basic] do
%{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: time_zone,
utc_offset: utc_offset,
std_offset: std_offset
} = datetime
Calendar.ISO.date_to_string(year, month, day, format) <>
"T" <>
Calendar.ISO.time_to_string(hour, minute, second, microsecond, format) <>
Calendar.ISO.offset_to_string(utc_offset, std_offset, time_zone, format)
end
def to_iso8601(%{calendar: _} = datetime, format) when format in [:extended, :basic] do
datetime
|> convert!(Calendar.ISO)
|> to_iso8601(format)
end
@doc """
Parses the extended "Date and time of day" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Since ISO 8601 does not include the proper time zone, the given
string will be converted to UTC and its offset in seconds will be
returned as part of this function. Therefore offset information
must be present in the string.
As specified in the standard, the separator "T" may be omitted if
desired as there is no ambiguity within this function.
The year parsed by this function is limited to four digits and,
while ISO 8601 allows datetimes to specify 24:00:00 as the zero
hour of the next day, this notation is not supported by Elixir.
Note leap seconds are not supported by the built-in Calendar.ISO.
## Examples
iex> {:ok, datetime, 0} = DateTime.from_iso8601("2015-01-23T23:50:07Z")
iex> datetime
~U[2015-01-23 23:50:07Z]
iex> {:ok, datetime, 9000} = DateTime.from_iso8601("2015-01-23T23:50:07.123+02:30")
iex> datetime
~U[2015-01-23 21:20:07.123Z]
iex> {:ok, datetime, 9000} = DateTime.from_iso8601("2015-01-23T23:50:07,123+02:30")
iex> datetime
~U[2015-01-23 21:20:07.123Z]
iex> {:ok, datetime, 0} = DateTime.from_iso8601("-2015-01-23T23:50:07Z")
iex> datetime
~U[-2015-01-23 23:50:07Z]
iex> {:ok, datetime, 9000} = DateTime.from_iso8601("-2015-01-23T23:50:07,123+02:30")
iex> datetime
~U[-2015-01-23 21:20:07.123Z]
iex> DateTime.from_iso8601("2015-01-23P23:50:07")
{:error, :invalid_format}
iex> DateTime.from_iso8601("2015-01-23T23:50:07")
{:error, :missing_offset}
iex> DateTime.from_iso8601("2015-01-23 23:50:61")
{:error, :invalid_time}
iex> DateTime.from_iso8601("2015-01-32 23:50:07")
{:error, :invalid_date}
iex> DateTime.from_iso8601("2015-01-23T23:50:07.123-00:00")
{:error, :invalid_format}
"""
@doc since: "1.4.0"
@spec from_iso8601(String.t(), Calendar.calendar()) ::
{:ok, t, Calendar.utc_offset()} | {:error, atom}
def from_iso8601(string, calendar \\ Calendar.ISO) do
with {:ok, {year, month, day, hour, minute, second, microsecond}, offset} <-
Calendar.ISO.parse_utc_datetime(string) do
datetime = %DateTime{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
std_offset: 0,
utc_offset: 0,
zone_abbr: "UTC",
time_zone: "Etc/UTC"
}
with {:ok, converted} <- convert(datetime, calendar) do
{:ok, converted, offset}
end
end
end
@doc """
Converts the given `datetime` to a string according to its calendar.
### Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_string(dt)
"2000-02-29 23:00:07+01:00 CET Europe/Warsaw"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "UTC",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"}
iex> DateTime.to_string(dt)
"2000-02-29 23:00:07Z"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.to_string(dt)
"2000-02-29 23:00:07-04:00 AMT America/Manaus"
iex> dt = %DateTime{year: -100, month: 12, day: 19, zone_abbr: "CET",
...> hour: 3, minute: 20, second: 31, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Stockholm"}
iex> DateTime.to_string(dt)
"-0100-12-19 03:20:31+01:00 CET Europe/Stockholm"
"""
@spec to_string(Calendar.datetime()) :: String.t()
def to_string(%{calendar: calendar} = datetime) do
%{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset
} = datetime
calendar.datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
)
end
@doc """
Compares two datetime structs.
Returns `:gt` if the first datetime is later than the second
and `:lt` for vice versa. If the two datetimes are equal
`:eq` is returned.
Note that both UTC and Standard offsets will be taken into
account when comparison is done.
## Examples
iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> dt2 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.compare(dt1, dt2)
:gt
"""
@doc since: "1.4.0"
@spec compare(Calendar.datetime(), Calendar.datetime()) :: :lt | :eq | :gt
def compare(
%{utc_offset: utc_offset1, std_offset: std_offset1} = datetime1,
%{utc_offset: utc_offset2, std_offset: std_offset2} = datetime2
) do
{days1, {parts1, ppd1}} =
datetime1
|> to_iso_days()
|> apply_tz_offset(utc_offset1 + std_offset1)
{days2, {parts2, ppd2}} =
datetime2
|> to_iso_days()
|> apply_tz_offset(utc_offset2 + std_offset2)
# Ensure fraction tuples have same denominator.
first = {days1, parts1 * ppd2}
second = {days2, parts2 * ppd1}
cond do
first > second -> :gt
first < second -> :lt
true -> :eq
end
end
@doc """
Subtracts `datetime2` from `datetime1`.
The answer can be returned in any `unit` available from `t:System.time_unit/0`.
Leap seconds are not taken into account.
This function returns the difference in seconds where seconds are measured
according to `Calendar.ISO`.
## Examples
iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> dt2 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.diff(dt1, dt2)
18000
iex> DateTime.diff(dt2, dt1)
-18000
"""
@doc since: "1.5.0"
@spec diff(Calendar.datetime(), Calendar.datetime(), System.time_unit()) :: integer()
def diff(
%{utc_offset: utc_offset1, std_offset: std_offset1} = datetime1,
%{utc_offset: utc_offset2, std_offset: std_offset2} = datetime2,
unit \\ :second
) do
naive_diff =
(datetime1 |> to_iso_days() |> Calendar.ISO.iso_days_to_unit(unit)) -
(datetime2 |> to_iso_days() |> Calendar.ISO.iso_days_to_unit(unit))
offset_diff = utc_offset2 + std_offset2 - (utc_offset1 + std_offset1)
naive_diff + System.convert_time_unit(offset_diff, :second, unit)
end
@doc """
Adds a specified amount of time to a `DateTime`.
Accepts an `amount_to_add` in any `unit` available from `t:System.time_unit/0`.
Negative values will move backwards in time.
Takes changes such as summer time/DST into account. This means that adding time
can cause the wall time to "go backwards" during "fall back" during autumn.
Adding just a few seconds to a datetime just before "spring forward" can cause wall
time to increase by more than an hour.
Fractional second precision stays the same in a similar way to `NaiveDateTime.add/2`.
### Examples
iex> dt = DateTime.from_naive!(~N[2018-11-15 10:00:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> dt |> DateTime.add(3600, :second, FakeTimeZoneDatabase)
#DateTime<2018-11-15 11:00:00+01:00 CET Europe/Copenhagen>
iex> DateTime.add(~U[2018-11-15 10:00:00Z], 3600, :second)
~U[2018-11-15 11:00:00Z]
When adding 3 seconds just before "spring forward" we go from 1:59:59 to 3:00:02
iex> dt = DateTime.from_naive!(~N[2019-03-31 01:59:59.123], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> dt |> DateTime.add(3, :second, FakeTimeZoneDatabase)
#DateTime<2019-03-31 03:00:02.123+02:00 CEST Europe/Copenhagen>
"""
@doc since: "1.8.0"
@spec add(Calendar.datetime(), integer, System.time_unit(), Calendar.time_zone_database()) ::
t()
def add(
datetime,
amount_to_add,
unit \\ :second,
time_zone_database \\ Calendar.get_time_zone_database()
)
when is_integer(amount_to_add) do
%{
utc_offset: utc_offset,
std_offset: std_offset,
calendar: calendar,
microsecond: {_, precision}
} = datetime
ppd = System.convert_time_unit(86400, :second, unit)
total_offset = System.convert_time_unit(utc_offset + std_offset, :second, unit)
result =
datetime
|> to_iso_days()
# Subtract total offset in order to get UTC and add the integer for the addition
|> Calendar.ISO.add_day_fraction_to_iso_days(amount_to_add - total_offset, ppd)
|> shift_zone_for_iso_days_utc(calendar, precision, datetime.time_zone, time_zone_database)
case result do
{:ok, result_datetime} ->
result_datetime
{:error, error} ->
raise ArgumentError,
"cannot add #{amount_to_add} #{unit} to #{inspect(datetime)} (with time zone " <>
"database #{inspect(time_zone_database)}), reason: #{inspect(error)}"
end
end
@doc """
Returns the given datetime with the microsecond field truncated to the given
precision (`:microsecond`, `:millisecond` or `:second`).
The given datetime is returned unchanged if it already has lower precision than
the given precision.
## Examples
iex> dt1 = %DateTime{year: 2017, month: 11, day: 7, zone_abbr: "CET",
...> hour: 11, minute: 45, second: 18, microsecond: {123456, 6},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Paris"}
iex> DateTime.truncate(dt1, :microsecond)
#DateTime<2017-11-07 11:45:18.123456+01:00 CET Europe/Paris>
iex> dt2 = %DateTime{year: 2017, month: 11, day: 7, zone_abbr: "CET",
...> hour: 11, minute: 45, second: 18, microsecond: {123456, 6},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Paris"}
iex> DateTime.truncate(dt2, :millisecond)
#DateTime<2017-11-07 11:45:18.123+01:00 CET Europe/Paris>
iex> dt3 = %DateTime{year: 2017, month: 11, day: 7, zone_abbr: "CET",
...> hour: 11, minute: 45, second: 18, microsecond: {123456, 6},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Paris"}
iex> DateTime.truncate(dt3, :second)
#DateTime<2017-11-07 11:45:18+01:00 CET Europe/Paris>
"""
@doc since: "1.6.0"
@spec truncate(Calendar.datetime(), :microsecond | :millisecond | :second) :: t()
def truncate(%DateTime{microsecond: microsecond} = datetime, precision) do
%{datetime | microsecond: Calendar.truncate(microsecond, precision)}
end
def truncate(%{} = datetime_map, precision) do
truncate(from_map(datetime_map), precision)
end
@doc """
Converts a given `datetime` from one calendar to another.
If it is not possible to convert unambiguously between the calendars
(see `Calendar.compatible_calendars?/2`), an `{:error, :incompatible_calendars}` tuple
is returned.
## Examples
Imagine someone implements `Calendar.Holocene`, a calendar based on the
Gregorian calendar that adds exactly 10,000 years to the current Gregorian
year:
iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.convert(dt1, Calendar.Holocene)
{:ok, %DateTime{calendar: Calendar.Holocene, day: 29, hour: 23,
microsecond: {0, 0}, minute: 0, month: 2, second: 7, std_offset: 0,
time_zone: "America/Manaus", utc_offset: -14400, year: 12000,
zone_abbr: "AMT"}}
"""
@doc since: "1.5.0"
@spec convert(Calendar.datetime(), Calendar.calendar()) ::
{:ok, t} | {:error, :incompatible_calendars}
def convert(%DateTime{calendar: calendar} = datetime, calendar) do
{:ok, datetime}
end
def convert(%{calendar: calendar} = datetime, calendar) do
{:ok, from_map(datetime)}
end
def convert(%{calendar: dt_calendar, microsecond: {_, precision}} = datetime, calendar) do
if Calendar.compatible_calendars?(dt_calendar, calendar) do
result_datetime =
datetime
|> to_iso_days
|> from_iso_days(datetime, calendar, precision)
{:ok, result_datetime}
else
{:error, :incompatible_calendars}
end
end
@doc """
Converts a given `datetime` from one calendar to another.
If it is not possible to convert unambiguously between the calendars
(see `Calendar.compatible_calendars?/2`), an ArgumentError is raised.
## Examples
Imagine someone implements `Calendar.Holocene`, a calendar based on the
Gregorian calendar that adds exactly 10,000 years to the current Gregorian
year:
iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.convert!(dt1, Calendar.Holocene)
%DateTime{calendar: Calendar.Holocene, day: 29, hour: 23,
microsecond: {0, 0}, minute: 0, month: 2, second: 7, std_offset: 0,
time_zone: "America/Manaus", utc_offset: -14400, year: 12000,
zone_abbr: "AMT"}
"""
@doc since: "1.5.0"
@spec convert!(Calendar.datetime(), Calendar.calendar()) :: t
def convert!(datetime, calendar) do
case convert(datetime, calendar) do
{:ok, value} ->
value
{:error, :incompatible_calendars} ->
raise ArgumentError,
"cannot convert #{inspect(datetime)} to target calendar #{inspect(calendar)}, " <>
"reason: #{inspect(datetime.calendar)} and #{inspect(calendar)} have different " <>
"day rollover moments, making this conversion ambiguous"
end
end
# Keep it multiline for proper function clause errors.
defp to_iso_days(%{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
}) do
calendar.naive_datetime_to_iso_days(year, month, day, hour, minute, second, microsecond)
end
defp from_iso_days(iso_days, datetime, calendar, precision) do
%{time_zone: time_zone, zone_abbr: zone_abbr, utc_offset: utc_offset, std_offset: std_offset} =
datetime
{year, month, day, hour, minute, second, {microsecond, _}} =
calendar.naive_datetime_from_iso_days(iso_days)
%DateTime{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: {microsecond, precision},
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset
}
end
defp apply_tz_offset(iso_days, 0) do
iso_days
end
defp apply_tz_offset(iso_days, offset) do
Calendar.ISO.add_day_fraction_to_iso_days(iso_days, -offset, 86400)
end
defp from_map(%{} = datetime_map) do
%DateTime{
year: datetime_map.year,
month: datetime_map.month,
day: datetime_map.day,
hour: datetime_map.hour,
minute: datetime_map.minute,
second: datetime_map.second,
microsecond: datetime_map.microsecond,
time_zone: datetime_map.time_zone,
zone_abbr: datetime_map.zone_abbr,
utc_offset: datetime_map.utc_offset,
std_offset: datetime_map.std_offset
}
end
defimpl String.Chars do
def to_string(datetime) do
%{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset
} = datetime
calendar.datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
)
end
end
defimpl Inspect do
def inspect(datetime, _) do
%{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset,
calendar: calendar
} = datetime
formatted =
calendar.datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
)
case datetime do
%{utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"} ->
"~U[" <> formatted <> suffix(calendar) <> "]"
_ ->
"#DateTime<" <> formatted <> suffix(calendar) <> ">"
end
end
defp suffix(Calendar.ISO), do: ""
defp suffix(calendar), do: " " <> inspect(calendar)
end
end
| 33.75257 | 134 | 0.634704 |
e83fd2457e561e071f6dfbc17a8bf91c87d0e8d0 | 296 | exs | Elixir | api_gateway/test/controllers/ping_controller_test.exs | lcnascimento/pokerapp | d7928aebc867c49f01546611ed65a5befa3a6429 | [
"MIT"
] | 2 | 2020-04-20T16:31:01.000Z | 2020-04-20T16:45:03.000Z | api_gateway/test/controllers/ping_controller_test.exs | lcnascimento/pokerapp | d7928aebc867c49f01546611ed65a5befa3a6429 | [
"MIT"
] | null | null | null | api_gateway/test/controllers/ping_controller_test.exs | lcnascimento/pokerapp | d7928aebc867c49f01546611ed65a5befa3a6429 | [
"MIT"
] | null | null | null | defmodule ApiGateway.PingControllerTest do
use ApiGateway.ConnCase
test "index/2 should respond with 'pong' message", %{conn: conn} do
response =
conn
|> get(Routes.ping_path(conn, :index))
|> json_response(200)
assert response == %{"message" => "pong"}
end
end
| 22.769231 | 69 | 0.655405 |
e8404658c20b9315f303c38252faa690c0b26d34 | 140 | ex | Elixir | lib/club_backend_web/controllers/ping_controller.ex | ufosc/club-backend-ex | 87b55bc9fcccd9d458ee3277c3153f608f3a8a5e | [
"MIT"
] | null | null | null | lib/club_backend_web/controllers/ping_controller.ex | ufosc/club-backend-ex | 87b55bc9fcccd9d458ee3277c3153f608f3a8a5e | [
"MIT"
] | 6 | 2020-07-31T23:02:34.000Z | 2021-02-26T21:10:10.000Z | lib/club_backend_web/controllers/ping_controller.ex | ufosc/club-backend-ex | 87b55bc9fcccd9d458ee3277c3153f608f3a8a5e | [
"MIT"
] | 2 | 2020-08-23T23:04:00.000Z | 2020-10-03T03:18:11.000Z | defmodule ClubBackendWeb.PingController do
use ClubBackendWeb, :controller
def ping(conn, _) do
conn
|> text("pong")
end
end
| 15.555556 | 42 | 0.7 |
e84085409b16e5339b29cdc5e4f32b75916cebcf | 1,724 | ex | Elixir | lib/stranger_web/live/login_form_component.ex | Arp-G/stranger | 1c7c804b0ab4dbee4c77f1c5adf0d9d5a09aaf41 | [
"MIT"
] | 11 | 2021-01-27T17:21:00.000Z | 2021-12-07T13:02:59.000Z | lib/stranger_web/live/login_form_component.ex | Arp-G/stranger | 1c7c804b0ab4dbee4c77f1c5adf0d9d5a09aaf41 | [
"MIT"
] | null | null | null | lib/stranger_web/live/login_form_component.ex | Arp-G/stranger | 1c7c804b0ab4dbee4c77f1c5adf0d9d5a09aaf41 | [
"MIT"
] | null | null | null | defmodule StrangerWeb.LoginFormComponent do
use StrangerWeb, :live_component
use Phoenix.HTML
@impl true
def mount(socket) do
{:ok, assign(socket, %{email: "", password: ""})}
end
@impl true
def render(assigns) do
~L"""
<div class="form_heading"> Login </div>
<section class="login-section">
<div class="form-group">
<label for="email">Email</label>
<input id="email" name="email" type="email" phx-blur="update_email" phx-target="<%= @myself %>" class="form-control">
</div>
<div class="form-group">
<label for="password">Password</label>
<input id="password" name="password" type="password" phx-blur="update_password" phx-target="<%= @myself %>" class="form-control">
</div>
<div class="form-group">
<button class="btn btn-success" phx-click="sign_in" phx-target="<%= @myself %>">Sign in</button>
<div>
<div class="form-group form-link">
<a href="#" phx-click="jump_to_1">
Don't have an Account? Sign Up here.
</a>
</div>
</section>
"""
end
@impl true
def handle_event("update_email", %{"value" => email}, socket) do
{:noreply, assign(socket, email: email)}
end
@impl true
def handle_event("update_password", %{"value" => password}, socket) do
{:noreply, assign(socket, password: password)}
end
@impl true
def handle_event("sign_in", _attrs, %{assigns: %{email: email, password: password}} = socket) do
{
:noreply,
redirect(socket,
to:
StrangerWeb.Router.Helpers.session_path(
socket,
:sign_in,
%{email: email, password: password}
)
)
}
end
end
| 27.365079 | 137 | 0.589327 |
e840892813a9c7f714299c857369a36587d59a66 | 13,902 | ex | Elixir | lib/ex_unit/lib/ex_unit/diff.ex | Joe-noh/elixir | 34bf464bc1a035b6015366463e04f1bf9d2065f3 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/diff.ex | Joe-noh/elixir | 34bf464bc1a035b6015366463e04f1bf9d2065f3 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/diff.ex | Joe-noh/elixir | 34bf464bc1a035b6015366463e04f1bf9d2065f3 | [
"Apache-2.0"
] | null | null | null | defmodule ExUnit.Diff do
@moduledoc false
@doc """
Returns an edit script representing the difference between `left` and `right`.
Returns `nil` if they are not the same data type,
or if the given data type is not supported.
"""
def script(left, right)
def script(term, term)
when is_binary(term) or is_number(term)
when is_map(term) or is_list(term) or is_tuple(term) do
[eq: inspect(term)]
end
# Binaries
def script(left, right) when is_binary(left) and is_binary(right) do
if String.printable?(left) and String.printable?(right) do
script_string(left, right, ?\")
end
end
# Structs
def script(%name{} = left, %name{} = right) do
left = Map.from_struct(left)
right = Map.from_struct(right)
script_map(left, right, inspect(name))
end
# Maps
def script(%{} = left, %{} = right) do
if match?(%_{}, left) or match?(%_{}, right) do
nil
else
script_map(left, right, "")
end
end
# Char lists and lists
def script(left, right) when is_list(left) and is_list(right) do
if Inspect.List.printable?(left) and Inspect.List.printable?(right) do
script_string(List.to_string(left), List.to_string(right), ?')
else
keywords? = Inspect.List.keyword?(left) and Inspect.List.keyword?(right)
script_list_new(left, right, keywords?)
end
end
# Numbers
def script(left, right)
when is_integer(left) and is_integer(right)
when is_float(left) and is_float(right) do
script_string(inspect(left), inspect(right))
end
# Tuples
def script(left, right)
when is_tuple(left) and is_tuple(right) do
left = {left, tuple_size(left) - 1}
right = {right, tuple_size(right) - 1}
script_tuple(left, right, [])
end
def script(_left, _right), do: nil
defp script_string(string1, string2, token) do
length1 = String.length(string1)
length2 = String.length(string2)
if bag_distance(string1, string2) / max(length1, length2) <= 0.6 do
string1 = Inspect.BitString.escape(string1, token)
string2 = Inspect.BitString.escape(string2, token)
[{:eq, <<token>>}, script_string(string1, string2), {:eq, <<token>>}]
end
end
defp script_string(string1, string2) do
String.myers_difference(string1, string2)
end
defp check_if_proper_and_get_length([_ | rest], length),
do: check_if_proper_and_get_length(rest, length + 1)
defp check_if_proper_and_get_length([], length),
do: {true, length}
defp check_if_proper_and_get_length(_other, length),
do: {false, length + 1}
# The algorithm is outlined in the
# "String Matching with Metric Trees Using an Approximate Distance"
# paper by Ilaria Bartolini, Paolo Ciaccia, and Marco Patella.
defp bag_distance(string1, string2) do
bag1 = string_to_bag(string1)
bag2 = string_to_bag(string2)
diff1 = bag_difference(bag1, bag2)
diff2 = bag_difference(bag2, bag1)
max(diff1, diff2)
end
defp string_to_bag(string) do
string_to_bag(string, %{}, &(&1 + 1))
end
defp string_to_bag(string, bag, fun) do
case String.next_grapheme(string) do
{char, rest} ->
bag = Map.update(bag, char, 1, fun)
string_to_bag(rest, bag, fun)
nil ->
bag
end
end
defp bag_difference(bag1, bag2) do
Enum.reduce(bag1, 0, fn {char, count1}, sum ->
case Map.fetch(bag2, char) do
{:ok, count2} ->
sum + max(count1 - count2, 0)
:error ->
sum + count1
end
end)
end
defp script_list_new(list1, list2, keywords?) do
{proper1?, length1} = check_if_proper_and_get_length(list1, 0)
{proper2?, length2} = check_if_proper_and_get_length(list2, 0)
if proper1? and proper2? do
initial_path = {0, 0, list1, list2, []}
result =
find_script(0, length1 + length2, [initial_path], keywords?)
|> format_each_fragment([], keywords?)
[{:eq, "["}, result, {:eq, "]"}]
else
script_list(list1, list2, [])
end
end
defp format_each_fragment([{:diff, script}], [], _keywords?),
do: script
defp format_each_fragment([{kind, elems}], [], keywords?),
do: [format_fragment(kind, elems, keywords?)]
defp format_each_fragment([_, _] = fragments, acc, keywords?) do
result =
case fragments do
[diff: script1, diff: script2] ->
[script1, {:eq, ", "}, script2]
[{:diff, script}, {kind, elems}] ->
[script, {kind, ", "}, format_fragment(kind, elems, keywords?)]
[{kind, elems}, {:diff, script}] ->
[format_fragment(kind, elems, keywords?), {kind, ", "}, script]
[del: elems1, ins: elems2] ->
[format_fragment(:del, elems1, keywords?), format_fragment(:ins, elems2, keywords?)]
[{:eq, elems1}, {kind, elems2}] ->
[format_fragment(:eq, elems1, keywords?), {kind, ", "}, format_fragment(kind, elems2, keywords?)]
[{kind, elems1}, {:eq, elems2}] ->
[format_fragment(kind, elems1, keywords?), {kind, ", "}, format_fragment(:eq, elems2, keywords?)]
end
Enum.reverse(acc, result)
end
defp format_each_fragment([{:diff, script} | rest], acc, keywords?) do
format_each_fragment(rest, [{:eq, ", "}, script | acc], keywords?)
end
defp format_each_fragment([{kind, elems} | rest], acc, keywords?) do
new_acc = [{kind, ", "}, format_fragment(kind, elems, keywords?) | acc]
format_each_fragment(rest, new_acc, keywords?)
end
defp format_fragment(kind, elems, keywords?) do
formatter = fn
{key, val} when keywords? ->
format_key_value(key, val, true)
elem ->
inspect(elem)
end
{kind, Enum.map_join(elems, ", ", formatter)}
end
defp find_script(envelope, max, _paths, _keywords?) when envelope > max do
nil
end
defp find_script(envelope, max, paths, keywords?) do
case each_diagonal(-envelope, envelope, paths, [], keywords?) do
{:done, edits} ->
compact_reverse(edits, [])
{:next, paths} -> find_script(envelope + 1, max, paths, keywords?)
end
end
defp compact_reverse([], acc),
do: acc
defp compact_reverse([{:diff, _} = fragment | rest], acc),
do: compact_reverse(rest, [fragment | acc])
defp compact_reverse([{kind, char} | rest], [{kind, chars} | acc]),
do: compact_reverse(rest, [{kind, [char | chars]} | acc])
defp compact_reverse([{kind, char} | rest], acc),
do: compact_reverse(rest, [{kind, [char]} | acc])
defp each_diagonal(diag, limit, _paths, next_paths, _keywords?) when diag > limit do
{:next, Enum.reverse(next_paths)}
end
defp each_diagonal(diag, limit, paths, next_paths, keywords?) do
{path, rest} = proceed_path(diag, limit, paths, keywords?)
with {:cont, path} <- follow_snake(path) do
each_diagonal(diag + 2, limit, rest, [path | next_paths], keywords?)
end
end
defp proceed_path(0, 0, [path], _keywords?), do: {path, []}
defp proceed_path(diag, limit, [path | _] = paths, keywords?) when diag == -limit do
{move_down(path, keywords?), paths}
end
defp proceed_path(diag, limit, [path], keywords?) when diag == limit do
{move_right(path, keywords?), []}
end
defp proceed_path(_diag, _limit, [path1, path2 | rest], keywords?) do
if elem(path1, 1) > elem(path2, 1) do
{move_right(path1, keywords?), [path2 | rest]}
else
{move_down(path2, keywords?), [path2 | rest]}
end
end
defp script_keyword_inner({key, val1}, {key, val2}, true),
do: [{:eq, format_key(key, true)}, script_inner(val1, val2)]
defp script_keyword_inner(_pair1, _pair2, true),
do: nil
defp script_keyword_inner(elem1, elem2, false),
do: script(elem1, elem2)
defp move_right({x, x, [elem1 | rest1] = list1, [elem2 | rest2], edits}, keywords?) do
if result = script_keyword_inner(elem1, elem2, keywords?) do
{x + 1, x + 1, rest1, rest2, [{:diff, result} | edits]}
else
{x + 1, x, list1, rest2, [{:ins, elem2} | edits]}
end
end
defp move_right({x, y, list1, [elem | rest], edits}, _keywords?) do
{x + 1, y, list1, rest, [{:ins, elem} | edits]}
end
defp move_right({x, y, list1, [], edits}, _keywords?) do
{x + 1, y, list1, [], edits}
end
defp move_down({x, x, [elem1 | rest1], [elem2 | rest2] = list2, edits}, keywords?) do
if result = script_keyword_inner(elem1, elem2, keywords?) do
{x + 1, x + 1, rest1, rest2, [{:diff, result} | edits]}
else
{x, x + 1, rest1, list2, [{:del, elem1} | edits]}
end
end
defp move_down({x, y, [elem | rest], list2, edits}, _keywords?) do
{x, y + 1, rest, list2, [{:del, elem} | edits]}
end
defp move_down({x, y, [], list2, edits}, _keywords?) do
{x, y + 1, [], list2, edits}
end
defp follow_snake({x, y, [elem | rest1], [elem | rest2], edits}) do
follow_snake({x + 1, y + 1, rest1, rest2, [{:eq, elem} | edits]})
end
defp follow_snake({_x, _y, [], [], edits}) do
{:done, edits}
end
defp follow_snake(path) do
{:cont, path}
end
defp script_list([], [], acc) do
[[_ | elem_diff] | rest] = Enum.reverse(acc)
[{:eq, "["}, [elem_diff | rest], {:eq, "]"}]
end
defp script_list([], [elem | rest], acc) do
elem_diff = [ins: inspect(elem)]
script_list([], rest, [[ins: ", "] ++ elem_diff | acc])
end
defp script_list([elem | rest], [], acc) do
elem_diff = [del: inspect(elem)]
script_list(rest, [], [[del: ", "] ++ elem_diff | acc])
end
defp script_list([elem | rest1], [elem | rest2], acc) do
elem_diff = [eq: inspect(elem)]
script_list(rest1, rest2, [[eq: ", "] ++ elem_diff | acc])
end
defp script_list([elem1 | rest1], [elem2 | rest2], acc) do
elem_diff = script_inner(elem1, elem2)
script_list(rest1, rest2, [[eq: ", "] ++ elem_diff | acc])
end
defp script_list(last, [elem | rest], acc) do
joiner_diff = [del: " |", ins: ",", eq: " "]
elem_diff = script_inner(last, elem)
new_acc = [joiner_diff ++ elem_diff | acc]
script_list([], rest, new_acc)
end
defp script_list([elem | rest], last, acc) do
joiner_diff = [del: ",", ins: " |", eq: " "]
elem_diff = script_inner(elem, last)
new_acc = [joiner_diff ++ elem_diff | acc]
script_list(rest, [], new_acc)
end
defp script_list(last1, last2, acc) do
elem_diff =
cond do
last1 == [] ->
[ins: " | " <> inspect(last2)]
last2 == [] ->
[del: " | " <> inspect(last1)]
true ->
[eq: " | "] ++ script_inner(last1, last2)
end
script_list([], [], [elem_diff | acc])
end
defp script_tuple({_tuple1, -1}, {_tuple2, -1}, acc) do
[[_ | elem_diff] | rest] = acc
[{:eq, "{"}, [elem_diff | rest], {:eq, "}"}]
end
defp script_tuple({tuple1, index1}, {_, index2} = right, acc)
when index1 > index2 do
elem = elem(tuple1, index1)
elem_diff = [del: ", ", del: inspect(elem)]
script_tuple({tuple1, index1 - 1}, right, [elem_diff | acc])
end
defp script_tuple({_, index1} = left, {tuple2, index2}, acc)
when index1 < index2 do
elem = elem(tuple2, index2)
elem_diff = [ins: ", ", ins: inspect(elem)]
script_tuple(left, {tuple2, index2 - 1}, [elem_diff | acc])
end
defp script_tuple({tuple1, index}, {tuple2, index}, acc) do
elem1 = elem(tuple1, index)
elem2 = elem(tuple2, index)
elem_diff = script_inner(elem1, elem2)
script_tuple({tuple1, index - 1}, {tuple2, index - 1}, [[eq: ", "] ++ elem_diff | acc])
end
defp script_map(left, right, name) do
{surplus, altered, missing, same} = map_difference(left, right)
keywords? =
Inspect.List.keyword?(surplus) and
Inspect.List.keyword?(altered) and
Inspect.List.keyword?(missing) and
Inspect.List.keyword?(same)
result = Enum.reduce(missing, [], fn({key, val}, acc) ->
map_pair = format_key_value(key, val, keywords?)
[[ins: ", ", ins: map_pair] | acc]
end)
result =
if same == [] and altered == [] and missing != [] and surplus != [] do
[[_ | elem_diff] | rest] = result
[elem_diff | rest]
else
result
end
result = Enum.reduce(surplus, result, fn({key, val}, acc) ->
map_pair = format_key_value(key, val, keywords?)
[[del: ", ", del: map_pair] | acc]
end)
result = Enum.reduce(altered, result, fn({key, {val1, val2}}, acc) ->
value_diff = script_inner(val1, val2)
[[{:eq, ", "}, {:eq, format_key(key, keywords?)}, value_diff] | acc]
end)
result = Enum.reduce(same, result, fn({key, val}, acc) ->
map_pair = format_key_value(key, val, keywords?)
[[eq: ", ", eq: map_pair] | acc]
end)
[[_ | elem_diff] | rest] = result
[{:eq, "%" <> name <> "{"}, [elem_diff | rest], {:eq, "}"}]
end
defp map_difference(map1, map2) do
{surplus, altered, same} =
Enum.reduce(map1, {[], [], []}, fn({key, val1}, {surplus, altered, same}) ->
case Map.fetch(map2, key) do
{:ok, ^val1} ->
{surplus, altered, [{key, val1} | same]}
{:ok, val2} ->
{surplus, [{key, {val1, val2}} | altered], same}
:error ->
{[{key, val1} | surplus], altered, same}
end
end)
missing = Enum.reduce(map2, [], fn({key, _} = pair, acc) ->
if Map.has_key?(map1, key), do: acc, else: [pair | acc]
end)
{surplus, altered, missing, same}
end
defp format_key(key, false) do
inspect(key) <> " => "
end
defp format_key(key, true) do
Atom.to_string(key) <> ": "
end
defp format_key_value(key, value, keyword?) do
format_key(key, keyword?) <> inspect(value)
end
defp script_inner(term, term) do
[eq: inspect(term)]
end
defp script_inner(left, right) do
if result = script(left, right) do
result
else
[del: inspect(left), ins: inspect(right)]
end
end
end
| 30.287582 | 107 | 0.603151 |
e8408ba9b437c9d88d935510a548b4f969255f93 | 305 | ex | Elixir | lib/Structs/Guild/emoji.ex | BenAlbin/alchemy | c31bce54e13a692acdba3c3b581ff49090da7604 | [
"MIT"
] | 1 | 2019-02-24T03:13:22.000Z | 2019-02-24T03:13:22.000Z | lib/Structs/Guild/emoji.ex | appositum/alchemy | 2e4c06e198fa8c824183782508610815395d7c0e | [
"MIT"
] | null | null | null | lib/Structs/Guild/emoji.ex | appositum/alchemy | 2e4c06e198fa8c824183782508610815395d7c0e | [
"MIT"
] | null | null | null | defmodule Alchemy.Guild.Emoji do
@moduledoc false
@derive Poison.Encoder
defstruct [:id,
:name,
:roles,
:require_colons,
:managed]
defimpl String.Chars, for: __MODULE__ do
def to_string(emoji), do: "<:#{emoji.name}:#{emoji.id}>"
end
end
| 20.333333 | 60 | 0.577049 |
e840a7b5a5b932a6731f51c4f395fddfda4e3f00 | 160 | ex | Elixir | with-op1/w2.ex | leogtzr/elixir_code_snippets | 5c8c921dc165de8fc29bb14046386efa81ce7542 | [
"MIT"
] | null | null | null | with-op1/w2.ex | leogtzr/elixir_code_snippets | 5c8c921dc165de8fc29bb14046386efa81ce7542 | [
"MIT"
] | null | null | null | with-op1/w2.ex | leogtzr/elixir_code_snippets | 5c8c921dc165de8fc29bb14046386efa81ce7542 | [
"MIT"
] | null | null | null | vals = [1, 2, 3, 4, 5]
mean =
with(
count = Enum.count(vals),
sum =
Enum.sum vals do
sum / count
end
)
IO.puts(inspect(mean))
| 12.307692 | 29 | 0.49375 |
e840bdc7eb51764d44ab603f2a7733ed35415694 | 276 | exs | Elixir | test/channels/routine_channel_test.exs | mcousillas6/BioMonitor | 312a903fe19751b6896aca9346340ea502397350 | [
"MIT"
] | null | null | null | test/channels/routine_channel_test.exs | mcousillas6/BioMonitor | 312a903fe19751b6896aca9346340ea502397350 | [
"MIT"
] | null | null | null | test/channels/routine_channel_test.exs | mcousillas6/BioMonitor | 312a903fe19751b6896aca9346340ea502397350 | [
"MIT"
] | null | null | null | defmodule BioMonitor.RoutineChannelTest do
use BioMonitor.ChannelCase
alias BioMonitor.RoutineChannel
setup do
{:ok, _, socket} =
socket("user_id", %{some: :assign})
|> subscribe_and_join(RoutineChannel, "routine")
{:ok, socket: socket}
end
end
| 19.714286 | 54 | 0.688406 |
e840c2aa4efcb2ef3e421f3810c002a2230dcf4a | 4,356 | ex | Elixir | priv/templates/coherence.install/controllers/coherence/password_controller.ex | harmon25/coherence | 5c4f26d3c87f6a16638adf623d041e2723ccf2b8 | [
"MIT"
] | 1 | 2022-03-06T16:30:21.000Z | 2022-03-06T16:30:21.000Z | priv/templates/coherence.install/controllers/coherence/password_controller.ex | ysbaddaden/coherence | 5c4f26d3c87f6a16638adf623d041e2723ccf2b8 | [
"MIT"
] | null | null | null | priv/templates/coherence.install/controllers/coherence/password_controller.ex | ysbaddaden/coherence | 5c4f26d3c87f6a16638adf623d041e2723ccf2b8 | [
"MIT"
] | 2 | 2017-09-22T16:54:36.000Z | 2021-11-09T20:55:58.000Z | defmodule <%= base %>.Coherence.PasswordController do
@moduledoc """
Handle password recovery actions.
Controller that handles the recover password feature.
Actions:
* new - render the recover password form
* create - verify user's email address, generate a token, and send the email
* edit - render the reset password form
* update - verify password, password confirmation, and update the database
"""
use Coherence.Web, :controller
require Logger
use Timex
alias Coherence.ControllerHelpers, as: Helpers
alias Coherence.TrackableService
plug :layout_view
plug :redirect_logged_in when action in [:new, :create, :edit, :update]
@type schema :: Ecto.Schema.t
@type conn :: Plug.Conn.t
@type params :: Map.t
@doc """
Render the recover password form.
"""
@spec new(conn, params) :: conn
def new(conn, _params) do
user_schema = Config.user_schema
cs = Helpers.changeset :password, user_schema, user_schema.__struct__
conn
|> render(:new, [email: "", changeset: cs])
end
@doc """
Create the recovery token and send the email
"""
@spec create(conn, params) :: conn
def create(conn, %{"password" => password_params} = params) do
user_schema = Config.user_schema
email = password_params["email"]
user = where(user_schema, [u], u.email == ^email)
|> Config.repo.one
case user do
nil ->
changeset = Helpers.changeset :password, user_schema, user_schema.__struct__
conn
|> put_flash(:error, "Could not find that email address")
|> render("new.html", changeset: changeset)
user ->
token = random_string 48
url = router_helpers().password_url(conn, :edit, token)
Logger.debug "reset email url: #{inspect url}"
dt = Ecto.DateTime.utc
cs = Helpers.changeset(:password, user_schema, user,
%{reset_password_token: token, reset_password_sent_at: dt})
Config.repo.update! cs
send_user_email :password, user, url
conn
|> put_flash(:info, "Reset email sent. Check your email for a reset link.")
|> redirect_to(:password_create, params)
end
end
@doc """
Render the password and password confirmation form.
"""
@spec edit(conn, params) :: conn
def edit(conn, params) do
user_schema = Config.user_schema
token = params["id"]
user = where(user_schema, [u], u.reset_password_token == ^token)
|> Config.repo.one
case user do
nil ->
conn
|> put_flash(:error, "Invalid reset token.")
|> redirect(to: logged_out_url(conn))
user ->
if expired? user.reset_password_sent_at, days: Config.reset_token_expire_days do
Helpers.changeset(:password, user_schema, user, clear_password_params())
|> Config.repo.update
conn
|> put_flash(:error, "Password reset token expired.")
|> redirect(to: logged_out_url(conn))
else
changeset = Helpers.changeset(:password, user_schema, user)
conn
|> render("edit.html", changeset: changeset)
end
end
end
@doc """
Verify the passwords and update the database
"""
@spec update(conn, params) :: conn
def update(conn, %{"password" => password_params} = params) do
user_schema = Config.user_schema
repo = Config.repo
token = password_params["reset_password_token"]
user = where(user_schema, [u], u.reset_password_token == ^token)
|> repo.one
case user do
nil ->
conn
|> put_flash(:error, "Invalid reset token")
|> redirect(to: logged_out_url(conn))
user ->
params = password_params
|> clear_password_params
cs = Helpers.changeset(:password, user_schema, user, params)
case repo.update(cs) do
{:ok, user} ->
conn
|> TrackableService.track_password_reset(user, user_schema.trackable_table?)
|> put_flash(:info, "Password updated successfully.")
|> redirect_to(:password_update, params)
{:error, changeset} ->
conn
|> render("edit.html", changeset: changeset)
end
end
end
defp clear_password_params(params \\ %{}) do
params
|> Map.put("reset_password_token", nil)
|> Map.put("reset_password_sent_at", nil)
end
end
| 31.114286 | 88 | 0.637282 |
e840e9cf453fed48e58c8ff47757cfad728e787c | 5,111 | exs | Elixir | test/support/api_server_test.exs | TheRealReal/spandex_datadog | c3b98af7328fe966a1640b3444dfcf4dcbf9ee7d | [
"MIT"
] | null | null | null | test/support/api_server_test.exs | TheRealReal/spandex_datadog | c3b98af7328fe966a1640b3444dfcf4dcbf9ee7d | [
"MIT"
] | null | null | null | test/support/api_server_test.exs | TheRealReal/spandex_datadog | c3b98af7328fe966a1640b3444dfcf4dcbf9ee7d | [
"MIT"
] | null | null | null | defmodule SpandexDatadog.ApiServerTest do
use ExUnit.Case
import ExUnit.CaptureLog
alias Spandex.{
Span,
Trace
}
alias SpandexDatadog.ApiServer
defmodule TestOkApiServer do
def put(url, body, headers) do
send(self(), {:put_datadog_spans, body |> Msgpax.unpack!() |> hd(), url, headers})
{:ok, %HTTPoison.Response{status_code: 200}}
end
end
defmodule TestErrorApiServer do
def put(url, body, headers) do
send(self(), {:put_datadog_spans, body |> Msgpax.unpack!() |> hd(), url, headers})
{:error, %HTTPoison.Error{id: :foo, reason: :bar}}
end
end
setup_all do
{:ok, agent_pid} = Agent.start_link(fn -> 0 end, name: :spandex_currently_send_count)
trace_id = 4_743_028_846_331_200_905
{:ok, span_1} =
Span.new(
id: 4_743_028_846_331_200_906,
start: 1_527_752_052_216_478_000,
service: :foo,
env: "local",
name: "foo",
trace_id: trace_id,
completion_time: 1_527_752_052_216_578_000,
tags: [foo: "123", bar: 321, buz: :blitz, baz: {1, 2}, zyx: [xyz: {1, 2}]]
)
{:ok, span_2} =
Span.new(
id: 4_743_029_846_331_200_906,
start: 1_527_752_052_216_578_001,
completion_time: 1_527_752_052_316_578_001,
service: :bar,
env: "local",
name: "bar",
trace_id: trace_id
)
trace = %Trace{spans: [span_1, span_2]}
{
:ok,
[
trace: trace,
url: "localhost:8126/v0.3/traces",
state: %ApiServer.State{
asynchronous_send?: false,
host: "localhost",
port: "8126",
http: TestOkApiServer,
verbose?: false,
waiting_traces: [],
batch_size: 1,
agent_pid: agent_pid
}
]
}
end
describe "ApiServer.handle_call/3 - :send_trace" do
test "doesn't log anything when verbose?: false", %{trace: trace, state: state, url: url} do
log =
capture_log(fn ->
ApiServer.handle_call({:send_trace, trace}, self(), state)
end)
assert log == ""
formatted = [
%{
"duration" => 100_000,
"error" => 0,
"meta" => %{
"env" => "local",
"foo" => "123",
"bar" => "321",
"buz" => "blitz",
"baz" => "{1, 2}",
"zyx" => "[xyz: {1, 2}]"
},
"name" => "foo",
"service" => "foo",
"resource" => "foo",
"span_id" => 4_743_028_846_331_200_906,
"start" => 1_527_752_052_216_478_000,
"trace_id" => 4_743_028_846_331_200_905,
"metrics" => %{
"_sampling_priority_v1" => 1
}
},
%{
"duration" => 100_000_000,
"error" => 0,
"meta" => %{"env" => "local"},
"name" => "bar",
"service" => "bar",
"resource" => "bar",
"span_id" => 4_743_029_846_331_200_906,
"start" => 1_527_752_052_216_578_001,
"trace_id" => 4_743_028_846_331_200_905,
"metrics" => %{
"_sampling_priority_v1" => 1
}
}
]
assert_received {:put_datadog_spans, ^formatted, ^url, _}
end
test "doesn't care about the response result", %{trace: trace, state: state, url: url} do
state =
state
|> Map.put(:verbose?, true)
|> Map.put(:http, TestErrorApiServer)
[processing, received_spans, response] =
capture_log(fn ->
{:reply, :ok, _} = ApiServer.handle_call({:send_trace, trace}, self(), state)
end)
|> String.split("\n")
|> Enum.reject(fn s -> s == "" end)
assert processing =~ ~r/Sending 1 traces, 2 spans/
assert received_spans =~ ~r/Trace: \[%Spandex.Trace{/
formatted = [
%{
"duration" => 100_000,
"error" => 0,
"meta" => %{
"env" => "local",
"foo" => "123",
"bar" => "321",
"buz" => "blitz",
"baz" => "{1, 2}",
"zyx" => "[xyz: {1, 2}]"
},
"name" => "foo",
"service" => "foo",
"resource" => "foo",
"span_id" => 4_743_028_846_331_200_906,
"start" => 1_527_752_052_216_478_000,
"trace_id" => 4_743_028_846_331_200_905,
"metrics" => %{
"_sampling_priority_v1" => 1
}
},
%{
"duration" => 100_000_000,
"error" => 0,
"meta" => %{"env" => "local"},
"name" => "bar",
"service" => "bar",
"resource" => "bar",
"span_id" => 4_743_029_846_331_200_906,
"start" => 1_527_752_052_216_578_001,
"trace_id" => 4_743_028_846_331_200_905,
"metrics" => %{
"_sampling_priority_v1" => 1
}
}
]
assert response =~ ~r/Trace response: {:error, %HTTPoison.Error{id: :foo, reason: :bar}}/
assert_received {:put_datadog_spans, ^formatted, ^url, _}
end
end
end
| 27.627027 | 96 | 0.499902 |
e841014928493297c7dd5a8e93df503abf83a188 | 941 | exs | Elixir | implementations/elixir/ockam/ockam/test/ockam/router/message_test.exs | digikata/ockam | 015a834ce0d34921b634439d965f726d877fd076 | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam/test/ockam/router/message_test.exs | digikata/ockam | 015a834ce0d34921b634439d965f726d877fd076 | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam/test/ockam/router/message_test.exs | digikata/ockam | 015a834ce0d34921b634439d965f726d877fd076 | [
"Apache-2.0"
] | null | null | null | defmodule Ockam.Router.Message.Tests do
use ExUnit.Case, async: true
doctest Ockam.Router.Message
alias Ockam.Router.Message
describe "Ockam.Router.Message.Any" do
test "empty onward_route is default" do
assert [] === Message.onward_route(:test)
assert [] === Message.onward_route(100)
assert [] === Message.onward_route(%{})
assert [] === Message.onward_route("test")
assert [] === Message.onward_route([])
assert [] === Message.onward_route(100.0)
assert [] === Message.onward_route({100, 300})
end
test "onward_route key of map is used if it has a list value" do
assert [] === Message.onward_route(%{onward_route: []})
assert [1, 2, 3] === Message.onward_route(%{onward_route: [1, 2, 3]})
end
test "onward_route key of map is not used if it does not have a list value" do
assert [] === Message.onward_route(%{onward_route: 100})
end
end
end
| 34.851852 | 82 | 0.645058 |
e84110014b7fac08ce8d1d9bdb503edbad284a19 | 11,851 | exs | Elixir | lib/elixir/test/elixir/task/supervisor_test.exs | mertonium/elixir | 74e666156906974082f6b4d34dfbe6988d6465c0 | [
"Apache-2.0"
] | 1 | 2021-04-28T21:35:01.000Z | 2021-04-28T21:35:01.000Z | lib/elixir/test/elixir/task/supervisor_test.exs | mertonium/elixir | 74e666156906974082f6b4d34dfbe6988d6465c0 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/task/supervisor_test.exs | mertonium/elixir | 74e666156906974082f6b4d34dfbe6988d6465c0 | [
"Apache-2.0"
] | 1 | 2018-04-18T11:25:33.000Z | 2018-04-18T11:25:33.000Z | Code.require_file("../test_helper.exs", __DIR__)
defmodule Task.SupervisorTest do
use ExUnit.Case
@moduletag :capture_log
setup do
{:ok, pid} = Task.Supervisor.start_link()
{:ok, supervisor: pid}
end
def wait_and_send(caller, atom) do
send(caller, :ready)
receive do: (true -> true)
send(caller, atom)
end
def sleep(number) do
Process.sleep(number)
number
end
test "can be supervised directly", config do
modules = [{Task.Supervisor, name: config.test}]
assert {:ok, _} = Supervisor.start_link(modules, strategy: :one_for_one)
assert Process.whereis(config.test)
end
test "counts and returns children", config do
assert Task.Supervisor.children(config[:supervisor]) == []
assert Supervisor.count_children(config[:supervisor]) ==
%{active: 0, specs: 0, supervisors: 0, workers: 0}
assert DynamicSupervisor.count_children(config[:supervisor]) ==
%{active: 0, specs: 0, supervisors: 0, workers: 0}
end
test "async/1", config do
parent = self()
fun = fn -> wait_and_send(parent, :done) end
task = Task.Supervisor.async(config[:supervisor], fun)
assert Task.Supervisor.children(config[:supervisor]) == [task.pid]
# Assert the struct
assert task.__struct__ == Task
assert is_pid(task.pid)
assert is_reference(task.ref)
# Assert the link
{:links, links} = Process.info(self(), :links)
assert task.pid in links
receive do: (:ready -> :ok)
# Assert the initial call
{:name, fun_name} = Function.info(fun, :name)
assert {__MODULE__, fun_name, 0} === :proc_lib.translate_initial_call(task.pid)
# Run the task
send(task.pid, true)
# Assert response and monitoring messages
ref = task.ref
assert_receive {^ref, :done}
assert_receive {:DOWN, ^ref, _, _, :normal}
end
test "async/3", config do
args = [self(), :done]
task = Task.Supervisor.async(config[:supervisor], __MODULE__, :wait_and_send, args)
assert Task.Supervisor.children(config[:supervisor]) == [task.pid]
receive do: (:ready -> :ok)
assert {__MODULE__, :wait_and_send, 2} === :proc_lib.translate_initial_call(task.pid)
send(task.pid, true)
assert task.__struct__ == Task
assert Task.await(task) == :done
end
test "async/1 with custom shutdown", config do
Process.flag(:trap_exit, true)
parent = self()
fun = fn -> wait_and_send(parent, :done) end
%{pid: pid} = Task.Supervisor.async(config[:supervisor], fun, shutdown: :brutal_kill)
Process.exit(config[:supervisor], :shutdown)
assert_receive {:DOWN, _, _, ^pid, :killed}
end
test "async_nolink/1", config do
parent = self()
fun = fn -> wait_and_send(parent, :done) end
task = Task.Supervisor.async_nolink(config[:supervisor], fun)
assert Task.Supervisor.children(config[:supervisor]) == [task.pid]
# Assert the struct
assert task.__struct__ == Task
assert is_pid(task.pid)
assert is_reference(task.ref)
# Refute the link
{:links, links} = Process.info(self(), :links)
refute task.pid in links
receive do: (:ready -> :ok)
# Assert the initial call
{:name, fun_name} = Function.info(fun, :name)
assert {__MODULE__, fun_name, 0} === :proc_lib.translate_initial_call(task.pid)
# Run the task
send(task.pid, true)
# Assert response and monitoring messages
ref = task.ref
assert_receive {^ref, :done}
assert_receive {:DOWN, ^ref, _, _, :normal}
end
test "async_nolink/3", config do
args = [self(), :done]
task = Task.Supervisor.async_nolink(config[:supervisor], __MODULE__, :wait_and_send, args)
assert Task.Supervisor.children(config[:supervisor]) == [task.pid]
receive do: (:ready -> :ok)
assert {__MODULE__, :wait_and_send, 2} === :proc_lib.translate_initial_call(task.pid)
send(task.pid, true)
assert task.__struct__ == Task
assert Task.await(task) == :done
end
test "async_nolink/1 with custom shutdown", config do
Process.flag(:trap_exit, true)
parent = self()
fun = fn -> wait_and_send(parent, :done) end
%{pid: pid} = Task.Supervisor.async_nolink(config[:supervisor], fun, shutdown: :brutal_kill)
Process.exit(config[:supervisor], :shutdown)
assert_receive {:DOWN, _, _, ^pid, :killed}
end
test "start_child/1", config do
parent = self()
fun = fn -> wait_and_send(parent, :done) end
{:ok, pid} = Task.Supervisor.start_child(config[:supervisor], fun)
assert Task.Supervisor.children(config[:supervisor]) == [pid]
{:links, links} = Process.info(self(), :links)
refute pid in links
receive do: (:ready -> :ok)
{:name, fun_name} = Function.info(fun, :name)
assert {__MODULE__, fun_name, 0} === :proc_lib.translate_initial_call(pid)
send(pid, true)
assert_receive :done
end
test "start_child/3", config do
args = [self(), :done]
{:ok, pid} =
Task.Supervisor.start_child(config[:supervisor], __MODULE__, :wait_and_send, args)
assert Task.Supervisor.children(config[:supervisor]) == [pid]
{:links, links} = Process.info(self(), :links)
refute pid in links
receive do: (:ready -> :ok)
assert {__MODULE__, :wait_and_send, 2} === :proc_lib.translate_initial_call(pid)
send(pid, true)
assert_receive :done
assert_raise FunctionClauseError, fn ->
Task.Supervisor.start_child(config[:supervisor], __MODULE__, :wait_and_send, :illegal_arg)
end
assert_raise FunctionClauseError, fn ->
args = [self(), :done]
Task.Supervisor.start_child(config[:supervisor], __MODULE__, "wait_and_send", args)
end
end
test "start_child/1 with custom shutdown", config do
Process.flag(:trap_exit, true)
parent = self()
fun = fn -> wait_and_send(parent, :done) end
{:ok, pid} = Task.Supervisor.start_child(config[:supervisor], fun, shutdown: :brutal_kill)
Process.monitor(pid)
Process.exit(config[:supervisor], :shutdown)
assert_receive {:DOWN, _, _, ^pid, :killed}
end
test "start_child/1 with custom restart", config do
parent = self()
fun = fn -> wait_and_send(parent, :done) end
{:ok, pid} = Task.Supervisor.start_child(config[:supervisor], fun, restart: :permanent)
assert_receive :ready
Process.monitor(pid)
Process.exit(pid, :shutdown)
assert_receive {:DOWN, _, _, ^pid, :shutdown}
assert_receive :ready
end
test "terminate_child/2", config do
args = [self(), :done]
{:ok, pid} =
Task.Supervisor.start_child(config[:supervisor], __MODULE__, :wait_and_send, args)
assert Task.Supervisor.children(config[:supervisor]) == [pid]
assert Task.Supervisor.terminate_child(config[:supervisor], pid) == :ok
assert Task.Supervisor.children(config[:supervisor]) == []
assert Task.Supervisor.terminate_child(config[:supervisor], pid) == {:error, :not_found}
end
describe "await/1" do
test "exits on task throw", config do
Process.flag(:trap_exit, true)
task = Task.Supervisor.async(config[:supervisor], fn -> throw(:unknown) end)
assert {{{:nocatch, :unknown}, _}, {Task, :await, [^task, 5000]}} =
catch_exit(Task.await(task))
end
test "exits on task error", config do
Process.flag(:trap_exit, true)
task = Task.Supervisor.async(config[:supervisor], fn -> raise "oops" end)
assert {{%RuntimeError{}, _}, {Task, :await, [^task, 5000]}} = catch_exit(Task.await(task))
end
test "exits on task exit", config do
Process.flag(:trap_exit, true)
task = Task.Supervisor.async(config[:supervisor], fn -> exit(:unknown) end)
assert {:unknown, {Task, :await, [^task, 5000]}} = catch_exit(Task.await(task))
end
end
describe "async_stream" do
@opts []
test "streams an enumerable with fun", %{supervisor: supervisor} do
assert supervisor
|> Task.Supervisor.async_stream(1..4, &sleep/1, @opts)
|> Enum.to_list() == [ok: 1, ok: 2, ok: 3, ok: 4]
end
test "streams an enumerable with mfa", %{supervisor: supervisor} do
assert supervisor
|> Task.Supervisor.async_stream(1..4, __MODULE__, :sleep, [], @opts)
|> Enum.to_list() == [ok: 1, ok: 2, ok: 3, ok: 4]
end
test "streams an enumerable without leaking tasks", %{supervisor: supervisor} do
assert supervisor
|> Task.Supervisor.async_stream(1..4, &sleep/1, @opts)
|> Enum.to_list() == [ok: 1, ok: 2, ok: 3, ok: 4]
refute_received _
end
test "streams an enumerable with slowest first", %{supervisor: supervisor} do
Process.flag(:trap_exit, true)
assert supervisor
|> Task.Supervisor.async_stream(4..1, &sleep/1, @opts)
|> Enum.to_list() == [ok: 4, ok: 3, ok: 2, ok: 1]
end
test "streams an enumerable with exits", %{supervisor: supervisor} do
Process.flag(:trap_exit, true)
assert supervisor
|> Task.Supervisor.async_stream(1..4, &exit(Integer.to_string(&1)), @opts)
|> Enum.to_list() == [exit: "1", exit: "2", exit: "3", exit: "4"]
end
test "shuts down unused tasks", %{supervisor: supervisor} do
collection = [0, :infinity, :infinity, :infinity]
assert supervisor
|> Task.Supervisor.async_stream(collection, &sleep/1, @opts)
|> Enum.take(1) == [ok: 0]
assert Process.info(self(), :links) == {:links, [supervisor]}
end
test "shuts down unused tasks without leaking messages", %{supervisor: supervisor} do
collection = [0, :infinity, :infinity, :infinity]
assert supervisor
|> Task.Supervisor.async_stream(collection, &sleep/1, @opts)
|> Enum.take(1) == [ok: 0]
refute_received _
end
end
describe "async_stream_nolink" do
@opts [max_concurrency: 4]
test "streams an enumerable with fun", %{supervisor: supervisor} do
assert supervisor
|> Task.Supervisor.async_stream_nolink(1..4, &sleep/1, @opts)
|> Enum.to_list() == [ok: 1, ok: 2, ok: 3, ok: 4]
end
test "streams an enumerable with mfa", %{supervisor: supervisor} do
assert supervisor
|> Task.Supervisor.async_stream_nolink(1..4, __MODULE__, :sleep, [], @opts)
|> Enum.to_list() == [ok: 1, ok: 2, ok: 3, ok: 4]
end
test "streams an enumerable without leaking tasks", %{supervisor: supervisor} do
assert supervisor
|> Task.Supervisor.async_stream_nolink(1..4, &sleep/1, @opts)
|> Enum.to_list() == [ok: 1, ok: 2, ok: 3, ok: 4]
refute_received _
end
test "streams an enumerable with slowest first", %{supervisor: supervisor} do
assert supervisor
|> Task.Supervisor.async_stream_nolink(4..1, &sleep/1, @opts)
|> Enum.to_list() == [ok: 4, ok: 3, ok: 2, ok: 1]
end
test "streams an enumerable with exits", %{supervisor: supervisor} do
assert supervisor
|> Task.Supervisor.async_stream_nolink(1..4, &exit/1, @opts)
|> Enum.to_list() == [exit: 1, exit: 2, exit: 3, exit: 4]
end
test "shuts down unused tasks", %{supervisor: supervisor} do
collection = [0, :infinity, :infinity, :infinity]
assert supervisor
|> Task.Supervisor.async_stream_nolink(collection, &sleep/1, @opts)
|> Enum.take(1) == [ok: 0]
assert Process.info(self(), :links) == {:links, [supervisor]}
end
test "shuts down unused tasks without leaking messages", %{supervisor: supervisor} do
collection = [0, :infinity, :infinity, :infinity]
assert supervisor
|> Task.Supervisor.async_stream_nolink(collection, &sleep/1, @opts)
|> Enum.take(1) == [ok: 0]
refute_received _
end
end
end
| 32.379781 | 97 | 0.636402 |
e84140b20db7116738901c2bd3d95ab05b318015 | 1,885 | ex | Elixir | clients/firestore/lib/google_api/firestore/v1beta1/model/lat_lng.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1beta1/model/lat_lng.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1beta1/model/lat_lng.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Firestore.V1beta1.Model.LatLng do
@moduledoc """
An object representing a latitude/longitude pair. This is expressed as a pair
of doubles representing degrees latitude and degrees longitude. Unless
specified otherwise, this must conform to the
<a href="http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf">WGS84
standard</a>. Values must be within normalized ranges.
## Attributes
* `latitude` (*type:* `float()`, *default:* `nil`) - The latitude in degrees. It must be in the range [-90.0, +90.0].
* `longitude` (*type:* `float()`, *default:* `nil`) - The longitude in degrees. It must be in the range [-180.0, +180.0].
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:latitude => float(),
:longitude => float()
}
field(:latitude)
field(:longitude)
end
defimpl Poison.Decoder, for: GoogleApi.Firestore.V1beta1.Model.LatLng do
def decode(value, options) do
GoogleApi.Firestore.V1beta1.Model.LatLng.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Firestore.V1beta1.Model.LatLng do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.907407 | 125 | 0.722016 |
e8414626c8d5630c15fb0bdc3dd3eb63b305e475 | 2,476 | exs | Elixir | test/redixcontrol_test.exs | beehive-spg/beehive-routing-buffer | a7748b1789e82718db5ab2f8c4e3620ce79c7f2e | [
"Apache-2.0"
] | null | null | null | test/redixcontrol_test.exs | beehive-spg/beehive-routing-buffer | a7748b1789e82718db5ab2f8c4e3620ce79c7f2e | [
"Apache-2.0"
] | null | null | null | test/redixcontrol_test.exs | beehive-spg/beehive-routing-buffer | a7748b1789e82718db5ab2f8c4e3620ce79c7f2e | [
"Apache-2.0"
] | null | null | null | ExUnit.start
defmodule RedixcontrolTest do
use ExUnit.Case, async: true
doctest Buffer.Redixcontrol
# NOTE Make sure the Redis Server is started
test "Worker creation and operability." do
assert Buffer.Redixcontrol.query(["PING"]) == "PONG"
end
test "Redis operability. Insert \"fav_color: green\"" do
key = "fav_color"
value = "green"
Buffer.Redixcontrol.set(key, value)
result = Buffer.Redixcontrol.get(key)
Buffer.Redixcontrol.query(["DEL", "#{key}"])
assert result == value
end
test "Adding and removing arrivals" do
time = "2017-01-01 12:00:00"
drone = "512"
location = "16"
is_delivery = true
id = Buffer.Redixcontrol.add_arrival(time, drone, location, is_delivery)
resp = Buffer.Redixcontrol.query ["HGET", "arr_#{id}", "drone"]
assert resp == drone
Buffer.Redixcontrol.remove_arrival("arr_#{id}")
assert Buffer.Redixcontrol.query(["HGET", "arr_#{id}", "drone"]) == nil
end
test "Adding and removing departures" do
time = "2017-01-01 12:00:00"
drone = "512"
location = "16"
is_delivery = true
id = Buffer.Redixcontrol.add_departure(time, drone, location, is_delivery)
resp = Buffer.Redixcontrol.query ["HGET", "dep_#{id}", "drone"]
assert resp == drone
Buffer.Redixcontrol.remove_departure("dep_#{id}")
assert Buffer.Redixcontrol.query(["HGET", "dep_#{id}", "drone"]) == nil
end
test "Adding routes" do
route = %{:is_delivery => true, :route => [%{:from => "10", :to => "11", :dep_time => "2017-01-01 10:00:00", :arr_time => "2017-01-01 10:10:00", :drone => "512"}, %{:from => "11", :to => "12", :dep_time => "2017-01-01 10:20:00", :arr_time => "2017-01-01 10:35:00", :drone => "26"}]}
ids = Buffer.Redixcontrol.add_route(route)
assert Buffer.Redixcontrol.query(["HGET", "dep_#{Enum.at(Enum.at(ids, 0), 0)}", "arrival"]) == "arr_#{Enum.at(Enum.at(ids, 0), 1)}"
cleanup(ids)
end
defp cleanup([head | []]) do
Buffer.Redixcontrol.remove_departure("dep_#{Enum.at(head, 0)}")
Buffer.Redixcontrol.remove_arrival("arr_#{Enum.at(head, 1)}")
end
defp cleanup([head | tail]) do
Buffer.Redixcontrol.remove_departure("dep_#{Enum.at(head, 0)}")
Buffer.Redixcontrol.remove_arrival("arr_#{Enum.at(head, 1)}")
cleanup(tail)
end
end
| 36.411765 | 290 | 0.601777 |
e8414630a54810a5c3caae536902fe91e59fce53 | 5,637 | exs | Elixir | test/captain_hook/webhook_endpoints/secrets/webhook_endpoint_secret_test.exs | annatel/captain_hook | e16a01107d11756d37d96d1e9092c17d9aa9260b | [
"MIT"
] | 4 | 2020-11-13T11:27:24.000Z | 2021-08-19T17:28:53.000Z | test/captain_hook/webhook_endpoints/secrets/webhook_endpoint_secret_test.exs | annatel/captain_hook | e16a01107d11756d37d96d1e9092c17d9aa9260b | [
"MIT"
] | null | null | null | test/captain_hook/webhook_endpoints/secrets/webhook_endpoint_secret_test.exs | annatel/captain_hook | e16a01107d11756d37d96d1e9092c17d9aa9260b | [
"MIT"
] | null | null | null | defmodule CaptainHook.WebhookEndpoints.Secrets.WebhookEndpointSecretTest do
use ExUnit.Case, async: true
use CaptainHook.DataCase
alias CaptainHook.WebhookEndpoints.Secrets.WebhookEndpointSecret
@datetime_1 DateTime.from_naive!(~N[2018-05-24 12:27:48], "Etc/UTC")
@datetime_2 DateTime.from_naive!(~N[2018-06-24 12:27:48], "Etc/UTC")
describe "create_changeset/2" do
test "only permitted_keys are casted" do
webhook_endpoint_secret_params =
params_for(:webhook_endpoint_secret, webhook_endpoint_id: uuid())
changeset =
WebhookEndpointSecret.create_changeset(
%WebhookEndpointSecret{},
Map.merge(webhook_endpoint_secret_params, %{new_key: "value"})
)
changes_keys = changeset.changes |> Map.keys()
assert :webhook_endpoint_id in changes_keys
assert :started_at in changes_keys
assert :is_main in changes_keys
refute :ended_at in changes_keys
refute :new_key in changes_keys
end
test "when required params are missing, returns an invalid changeset" do
changeset = WebhookEndpointSecret.create_changeset(%WebhookEndpointSecret{}, %{})
refute changeset.valid?
assert %{webhook_endpoint_id: ["can't be blank"]} = errors_on(changeset)
assert %{started_at: ["can't be blank"]} = errors_on(changeset)
assert %{is_main: ["can't be blank"]} = errors_on(changeset)
end
test "when params are valid, return a valid changeset" do
webhook_endpoint = insert!(:webhook_endpoint)
webhook_endpoint_secret_params =
params_for(:webhook_endpoint_secret,
webhook_endpoint_id: webhook_endpoint.id,
started_at: @datetime_1
)
changeset =
WebhookEndpointSecret.create_changeset(
%WebhookEndpointSecret{},
webhook_endpoint_secret_params
)
assert changeset.valid?
assert get_field(changeset, :webhook_endpoint_id) ==
webhook_endpoint_secret_params.webhook_endpoint_id
assert get_field(changeset, :started_at) == @datetime_1
assert get_field(changeset, :is_main) == webhook_endpoint_secret_params.is_main
refute is_nil(get_field(changeset, :secret))
end
end
describe "remove_changeset/2" do
test "only permitted_keys are casted" do
webhook_endpoint = insert!(:webhook_endpoint)
webhook_endpoint_secret =
insert!(:webhook_endpoint_secret, webhook_endpoint_id: webhook_endpoint.id)
webhook_endpoint_secret_params =
params_for(:webhook_endpoint_secret, started_at: @datetime_1, ended_at: @datetime_2)
|> Map.put(:is_main, false)
changeset =
WebhookEndpointSecret.remove_changeset(
webhook_endpoint_secret,
Map.merge(webhook_endpoint_secret_params, %{new_key: "value"})
)
changes_keys = changeset.changes |> Map.keys()
refute :webhook_endpoint_id in changes_keys
refute :secret in changes_keys
refute :started_at in changes_keys
assert :is_main in changes_keys
assert :ended_at in changes_keys
refute :new_key in changes_keys
end
test "when params are valid, return a valid changeset" do
webhook_endpoint = insert!(:webhook_endpoint)
webhook_endpoint_secret =
insert!(:webhook_endpoint_secret,
webhook_endpoint_id: webhook_endpoint.id,
started_at: @datetime_1
)
changeset =
WebhookEndpointSecret.remove_changeset(webhook_endpoint_secret, %{ended_at: @datetime_2})
assert changeset.valid?
assert get_field(changeset, :ended_at) == @datetime_2
end
test "when required params are missing, returns an invalid changeset" do
webhook_endpoint = insert!(:webhook_endpoint)
webhook_endpoint_secret =
insert!(:webhook_endpoint_secret, webhook_endpoint_id: webhook_endpoint.id)
changeset = WebhookEndpointSecret.remove_changeset(webhook_endpoint_secret, %{is_main: nil})
refute changeset.valid?
assert %{is_main: ["can't be blank"]} = errors_on(changeset)
assert %{ended_at: errors} = errors_on(changeset)
assert "can't be blank" in errors
end
test "when params are invalid, returns an invalid changeset" do
webhook_endpoint = insert!(:webhook_endpoint)
webhook_endpoint_secret =
insert!(:webhook_endpoint_secret,
webhook_endpoint_id: webhook_endpoint.id,
started_at: @datetime_2
)
changeset =
WebhookEndpointSecret.remove_changeset(webhook_endpoint_secret, %{ended_at: @datetime_1})
refute changeset.valid?
assert %{ended_at: ["should be after or equal to started_at"]} = errors_on(changeset)
end
test "when ended_at is after the max expiration time, returns a changeset error" do
webhook_endpoint = insert!(:webhook_endpoint)
webhook_endpoint_secret =
insert!(:webhook_endpoint_secret,
webhook_endpoint_id: webhook_endpoint.id,
started_at: utc_now()
)
max_expiration_time = utc_now() |> add(7 * 24 * 3600 + 100)
over_max_expiration_time = max_expiration_time |> add(100)
webhook_endpoint_secret_params =
build(:webhook_endpoint_secret, ended_at: over_max_expiration_time) |> params_for()
changeset =
WebhookEndpointSecret.remove_changeset(
webhook_endpoint_secret,
webhook_endpoint_secret_params
)
refute changeset.valid?
assert %{ended_at: ["should be before or equal to #{max_expiration_time}"]} ==
errors_on(changeset)
end
end
end
| 33.553571 | 98 | 0.697179 |
e84153004f2b7f1848eed5799fbe88d7a53ef962 | 4,532 | exs | Elixir | dpp/test/genserver_distributor_test.exs | XiaoxiaoZ/Distributed-path-planner | a7467e08e60248793dc55624497fd91bdb45ca14 | [
"MIT"
] | null | null | null | dpp/test/genserver_distributor_test.exs | XiaoxiaoZ/Distributed-path-planner | a7467e08e60248793dc55624497fd91bdb45ca14 | [
"MIT"
] | 6 | 2021-12-12T13:23:17.000Z | 2021-12-17T12:03:46.000Z | dpp/test/genserver_distributor_test.exs | XiaoxiaoZ/Distributed-path-planner | a7467e08e60248793dc55624497fd91bdb45ca14 | [
"MIT"
] | null | null | null | defmodule GenServerDistributorTest do
use ExUnit.Case
test "GenServer test distributor" do
points = [[0.0, 1.0, 0.0, -1.0, -0.5, 0.0, 0.0, -0.5, -1.0, 1.0, -0.5, 0.0],
[0.0, 2.0, 0.0, -2.0, -1.0, 0.0, 0.0, -1.0, -2.0, 2.0, -1.0, 0.0]]
indices = [[0, 1, 2, 0, 2, 3, 0, 3, 1],
[0, 1, 2, 0, 2, 3, 0, 3, 1]]
translates = [[2.0, 2.0, 0.2],
[0.0, 0.0, 0.0]]
rotates = [[0.0, 1.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0]]
[point1 | [point2]] = points
[indice1 | [indice2]] = indices
[translate1 | [translate2]] = translates
[rotate1 | [rotate2]] = rotates
margin = 0.001
obj_points = points
obj_indices = indices
obj_translates = translates
obj_rotates = rotates
{:ok, pid} =Robot.Links.Server.start_link([])
joints = %Robot.Joints{joint1: :math.pi/2, joint2: 0.0, joint3: 0.0, joint4: :math.pi/2, joint5: 0.0, joint6: 0.0}
robot_model = Robot.Links.Server.get_mesh_with_joints(pid, joints)
rob_points = robot_model.points
rob_indices = robot_model.indices
rob_translates = robot_model.translates
rob_rotates = robot_model.rotates
data_temp = Enum.with_index(obj_points, fn obj, index -> %{point1: Enum.at(obj_points,index), indice1: Enum.at(obj_indices,index), translate1: Enum.at(obj_translates,index), rotate1: Enum.at(obj_rotates,index), point2: Enum.at(rob_points,0), indice2: Enum.at(rob_indices,0), translate2: Enum.at(rob_translates,0), rotate2: Enum.at(rob_rotates,0), margin: margin} end)
data1 = [data_temp]
data_temp = Enum.with_index(obj_points, fn obj, index -> %{point1: Enum.at(obj_points,index), indice1: Enum.at(obj_indices,index), translate1: Enum.at(obj_translates,index), rotate1: Enum.at(obj_rotates,index), point2: Enum.at(rob_points,1), indice2: Enum.at(rob_indices,1), translate2: Enum.at(rob_translates,1), rotate2: Enum.at(rob_rotates,1), margin: margin} end)
data1 = [data_temp | data1]
data_temp = Enum.with_index(obj_points, fn obj, index -> %{point1: Enum.at(obj_points,index), indice1: Enum.at(obj_indices,index), translate1: Enum.at(obj_translates,index), rotate1: Enum.at(obj_rotates,index), point2: Enum.at(rob_points,2), indice2: Enum.at(rob_indices,2), translate2: Enum.at(rob_translates,2), rotate2: Enum.at(rob_rotates,2), margin: margin} end)
data1 = [data_temp | data1]
data_temp = Enum.with_index(obj_points, fn obj, index -> %{point1: Enum.at(obj_points,index), indice1: Enum.at(obj_indices,index), translate1: Enum.at(obj_translates,index), rotate1: Enum.at(obj_rotates,index), point2: Enum.at(rob_points,3), indice2: Enum.at(rob_indices,3), translate2: Enum.at(rob_translates,3), rotate2: Enum.at(rob_rotates,3), margin: margin} end)
data1 = [data_temp | data1]
data_temp = Enum.with_index(obj_points, fn obj, index -> %{point1: Enum.at(obj_points,index), indice1: Enum.at(obj_indices,index), translate1: Enum.at(obj_translates,index), rotate1: Enum.at(obj_rotates,index), point2: Enum.at(rob_points,4), indice2: Enum.at(rob_indices,4), translate2: Enum.at(rob_translates,4), rotate2: Enum.at(rob_rotates,4), margin: margin} end)
data1 = [data_temp | data1]
data_temp = Enum.with_index(obj_points, fn obj, index -> %{point1: Enum.at(obj_points,index), indice1: Enum.at(obj_indices,index), translate1: Enum.at(obj_translates,index), rotate1: Enum.at(obj_rotates,index), point2: Enum.at(rob_points,5), indice2: Enum.at(rob_indices,5), translate2: Enum.at(rob_translates,5), rotate2: Enum.at(rob_rotates,5), margin: margin} end)
data1 = [data_temp | data1]
data_temp = Enum.with_index(obj_points, fn obj, index -> %{point1: Enum.at(obj_points,index), indice1: Enum.at(obj_indices,index), translate1: Enum.at(obj_translates,index), rotate1: Enum.at(obj_rotates,index), point2: Enum.at(rob_points,6), indice2: Enum.at(rob_indices,6), translate2: Enum.at(rob_translates,6), rotate2: Enum.at(rob_rotates,6), margin: margin} end)
data1 = [data_temp | data1]
data1 = List.flatten(data1)
data = [1,2,3,4,5,6]
TaskManagerSupervisor.start_link(data1)
CollisionDetectorSupervisor.start_link(10)
:timer.sleep(50000)
TaskManager.add_task(data1)
:timer.sleep(3000)
TaskManager.remove_all_tasks()
:timer.sleep(100000)
IO.inspect TaskManager.get_result
end
end | 74.295082 | 375 | 0.661959 |
e841641b7ddef50125ec957726db748bd48ceac6 | 320 | ex | Elixir | lib/page_change_notifier_web/controllers/search_controller.ex | elitau/page_change_notifier | 55c02ef0a464040d98cf416c131e39e7a09df975 | [
"MIT"
] | null | null | null | lib/page_change_notifier_web/controllers/search_controller.ex | elitau/page_change_notifier | 55c02ef0a464040d98cf416c131e39e7a09df975 | [
"MIT"
] | null | null | null | lib/page_change_notifier_web/controllers/search_controller.ex | elitau/page_change_notifier | 55c02ef0a464040d98cf416c131e39e7a09df975 | [
"MIT"
] | null | null | null | defmodule PageChangeNotifierWeb.SearchController do
use PageChangeNotifierWeb, :controller
plug(PageChangeNotifierWeb.Plug.Authenticate)
def search(conn, %{"q" => query}) do
new_results = PageChangeNotifier.Search.run(query)
render(conn, "results.html", query: query, new_results: new_results)
end
end
| 29.090909 | 72 | 0.76875 |
e84197b232107032ca2a106304fe9e5566c0d328 | 74 | exs | Elixir | test/views/layout_view_test.exs | frbaroni/budget | c8bd78eff0607f98ffa49265d9485d0c85a28ede | [
"MIT"
] | null | null | null | test/views/layout_view_test.exs | frbaroni/budget | c8bd78eff0607f98ffa49265d9485d0c85a28ede | [
"MIT"
] | null | null | null | test/views/layout_view_test.exs | frbaroni/budget | c8bd78eff0607f98ffa49265d9485d0c85a28ede | [
"MIT"
] | null | null | null | defmodule Budget.LayoutViewTest do
use Budget.ConnCase, async: true
end
| 18.5 | 34 | 0.810811 |
e8419b54b31ea0a9bec0abb7244c19d9b41f0413 | 2,603 | exs | Elixir | test/lib/bamboo/sent_email_test.exs | njwest/bamboo | 024286443a4e1aae57cbaa87dbcafe62c9a5755a | [
"MIT"
] | null | null | null | test/lib/bamboo/sent_email_test.exs | njwest/bamboo | 024286443a4e1aae57cbaa87dbcafe62c9a5755a | [
"MIT"
] | null | null | null | test/lib/bamboo/sent_email_test.exs | njwest/bamboo | 024286443a4e1aae57cbaa87dbcafe62c9a5755a | [
"MIT"
] | 1 | 2018-08-02T12:36:21.000Z | 2018-08-02T12:36:21.000Z | defmodule Bamboo.SentEmailTest do
use ExUnit.Case
alias Bamboo.SentEmail
import Bamboo.Email
setup do
Bamboo.SentEmail.reset
:ok
end
test "get_id gets the emails id" do
email = new_email() |> put_private(:local_adapter_id, 1)
assert SentEmail.get_id(email) == 1
end
test "raises when trying to get id from something that isn't an email" do
assert_raise RuntimeError, ~r/expected a %Bamboo.Email{}/, fn ->
SentEmail.get_id("string")
end
end
test "raises helpful message if the id is not set" do
email = new_email()
assert_raise RuntimeError, ~r/no id was present/, fn ->
SentEmail.get_id(email)
end
end
test "gets an email by id" do
pushed_email = SentEmail.push(new_email(subject: "Something"))
email = pushed_email |> SentEmail.get_id |> SentEmail.get
assert %Bamboo.Email{subject: "Something"} = email
end
test "get is case-insensitive" do
pushed_email = SentEmail.push(new_email(subject: "Something"))
id = SentEmail.get_id(pushed_email)
assert pushed_email == id |> String.upcase |> SentEmail.get
assert pushed_email == id |> String.downcase |> SentEmail.get
end
test "returns nil when getting email with no matching id" do
assert SentEmail.get("non_existent_id") == nil
end
test "raises if there is no email with that id" do
assert_raise Bamboo.SentEmail.NoDeliveriesError, fn ->
SentEmail.get!("non_existent_id")
end
end
test "all/0 is empty if no emails have been sent" do
assert SentEmail.all == []
end
test "one/0 returns an email if there is one email in the mailbox" do
email = new_email(subject: "Something")
SentEmail.push(email)
assert %Bamboo.Email{subject: "Something"} = SentEmail.one
end
test "one/0 raises if there are no emails in the mailbox" do
assert_raise SentEmail.NoDeliveriesError, fn ->
SentEmail.one
end
end
test "one/0 raises if there are 2 or more emails in the mailbox" do
SentEmail.push(new_email())
SentEmail.push(new_email())
assert_raise SentEmail.DeliveriesError, fn ->
SentEmail.one
end
end
test "pushes emails and gives them an id" do
email = new_email(subject: "Something")
SentEmail.push(email)
assert [%{subject: "Something"}] = SentEmail.all
assert has_id?(SentEmail.one)
end
defp has_id?(email) do
email |> SentEmail.get_id |> String.length == 16
end
test "reset/0 removes all emails from the mailbox" do
SentEmail.push(new_email())
SentEmail.reset
assert SentEmail.all == []
end
end
| 24.327103 | 75 | 0.685747 |
e841a22cc731be8c342c07d41117fa42d4cd0110 | 682 | ex | Elixir | Tree/MultiwayTree/build.ex | hscspring/TheAlgorithms-Python | 5c2faea1d2d25a9a81a4786e053b0cc58ab46c6f | [
"MIT"
] | 10 | 2020-07-06T11:00:58.000Z | 2022-01-29T09:25:24.000Z | Tree/MultiwayTree/build.ex | hscspring/TheAlgorithms-Python | 5c2faea1d2d25a9a81a4786e053b0cc58ab46c6f | [
"MIT"
] | null | null | null | Tree/MultiwayTree/build.ex | hscspring/TheAlgorithms-Python | 5c2faea1d2d25a9a81a4786e053b0cc58ab46c6f | [
"MIT"
] | 3 | 2020-07-13T06:39:23.000Z | 2020-08-15T16:29:48.000Z | defmodule Tree do
def build_tree(list) do
list
|> Enum.reverse()
|> Enum.reduce(%{}, fn foo, map ->
foo = %{foo | children: Map.get(map, foo.location, [])}
Map.update(map, foo.parent, [foo], fn foos ->
[foo | foos]
end)
end)
|> Map.get(nil)
|> hd
end
end
items = [
%{ location: "/", parent: nil, children: [] },
%{ location: "/folder1", parent: "/", children: [] },
%{ location: "/folder1/folder1-folder1", parent: "/folder1", children: [] },
%{ location: "/folder2", parent: "/", children: [] },
%{ location: "/folder2/folder2-folder1", parent: "/folder2", children: [] }
]
Tree.build_tree(items)
|> IO.inspect()
| 24.357143 | 78 | 0.552786 |
e841cf96007c3f2d1d91681421726a4383415d4e | 5,506 | ex | Elixir | lib/implied.ex | jalcine/microformats2-elixir | a7b1d0e6ac3e01dc4f0f9e96d57948dc53b127ad | [
"MIT"
] | null | null | null | lib/implied.ex | jalcine/microformats2-elixir | a7b1d0e6ac3e01dc4f0f9e96d57948dc53b127ad | [
"MIT"
] | null | null | null | lib/implied.ex | jalcine/microformats2-elixir | a7b1d0e6ac3e01dc4f0f9e96d57948dc53b127ad | [
"MIT"
] | null | null | null | defmodule Microformats2.Items.ImpliedProperties do
def parse(entry, root, url, doc) do
implied_name_property(entry, root)
|> implied_photo_property(root)
|> implied_url_property(root, url, doc)
end
defp implied_url_property(entry, root, doc_url, doc) do
if entry[:properties][:url] == nil do
val = implied_url_attrval(root)
url =
if Microformats2.blank?(val) do
implied_url_deep(root)
else
val
end
|> Microformats2.stripped_or_nil()
if Microformats2.blank?(url) do
entry
else
Map.put(entry, :properties, Map.put(entry[:properties], :url, [Microformats2.abs_uri(url, doc_url, doc)]))
end
else
entry
end
end
defp implied_photo_property(entry, root) do
if entry[:properties][:photo] == nil do
val = implied_photo_attrval(root)
url =
if Microformats2.blank?(val) do
implied_photo_deep(root)
else
val
end
|> Microformats2.stripped_or_nil()
if Microformats2.blank?(url) do
entry
else
Map.put(entry, :properties, Map.put(entry[:properties], :photo, [url]))
end
else
entry
end
end
defp implied_name_property(entry, root = {elem, _, _}) do
if entry[:properties][:name] == nil do
nam =
cond do
elem == "img" or elem == "area" ->
Floki.attribute(root, "alt") |> List.first()
elem == "abbr" ->
Floki.attribute(root, "title") |> List.first()
true ->
val = implied_name_deep(root)
if Microformats2.blank?(val) do
Microformats2.Items.text_content(root)
else
val
end
end
|> Microformats2.stripped_or_nil()
Map.put(entry, :properties, Map.put(entry[:properties], :name, [nam]))
else
entry
end
end
defp implied_name_deep({_, _, children}) do
only_nodes =
Enum.filter(children, fn
el when is_bitstring(el) -> false
_ -> true
end)
if Enum.count(only_nodes) == 1 do
sec_node = List.first(only_nodes)
{_, _, sec_node_children} = sec_node
attrval = implied_name_attrval(sec_node)
if Microformats2.blank?(attrval) do
sec_only_nodes =
Enum.filter(sec_node_children, fn
el when is_bitstring(el) -> false
_ -> true
end)
if Enum.count(sec_only_nodes) == 1 do
third_node = sec_only_nodes |> List.first()
implied_name_attrval(third_node)
end
else
attrval
end
end
end
defp implied_name_attrval(node = {"img", _, _}) do
Floki.attribute(node, "alt") |> List.first()
end
defp implied_name_attrval(node = {"area", _, _}) do
Floki.attribute(node, "alt") |> List.first()
end
defp implied_name_attrval(node = {"abbr", _, _}) do
Floki.attribute(node, "title") |> List.first()
end
defp implied_name_attrval(_) do
nil
end
defp implied_photo_deep(root) do
imgs = direct_not_h_children_with_attr(root, "img", "src")
objects = direct_not_h_children_with_attr(root, "object", "data")
cond do
Enum.count(imgs) == 1 ->
List.first(imgs) |> Floki.attribute("src") |> List.first()
Enum.count(objects) == 1 ->
List.first(objects) |> Floki.attribute("data") |> List.first()
true ->
{_, _, children} = root
only_nodes =
Enum.filter(children, fn
el when is_bitstring(el) -> false
_ -> true
end)
if Enum.count(only_nodes) == 1 do
child = List.first(children)
sec_imgs = direct_not_h_children_with_attr(child, "img", "src")
sec_objs = direct_not_h_children_with_attr(child, "object", "data")
cond do
Enum.count(sec_imgs) == 1 ->
List.first(sec_imgs) |> Floki.attribute("src") |> List.first()
Enum.count(sec_objs) == 1 ->
List.first(sec_objs) |> Floki.attribute("data") |> List.first()
true ->
nil
end
else
nil
end
end
end
defp implied_url_deep(root) do
as = direct_not_h_children_with_attr(root, "a", "href")
areas = direct_not_h_children_with_attr(root, "area", "href")
cond do
Enum.count(as) == 1 ->
List.first(as) |> Floki.attribute("href") |> List.first()
Enum.count(areas) == 1 ->
List.first(areas) |> Floki.attribute("href") |> List.first()
true ->
nil
end
end
defp implied_photo_attrval(node = {"img", _, _}) do
Floki.attribute(node, "src") |> List.first()
end
defp implied_photo_attrval(node = {"object", _, _}) do
Floki.attribute(node, "data") |> List.first()
end
defp implied_photo_attrval(_) do
nil
end
defp direct_not_h_children_with_attr({_, _, children}, name, attr) do
Enum.filter(children, fn
{el, _, _} -> el == name
v when is_bitstring(v) -> false
end)
|> Enum.filter(fn el -> not Microformats2.is_rootlevel?(el) end)
|> Enum.filter(fn el -> Enum.count(Floki.attribute(el, attr)) > 0 end)
end
defp implied_url_attrval(node = {"a", _, _}) do
Floki.attribute(node, "href") |> List.first()
end
defp implied_url_attrval(node = {"area", _, _}) do
Floki.attribute(node, "href") |> List.first()
end
defp implied_url_attrval(_) do
nil
end
end
| 25.728972 | 114 | 0.581911 |
e841f668db8fd0986bc186b5e02010afb99f8684 | 748 | ex | Elixir | widget_market_phoenix/lib/widget_market_phoenix/web/gettext.ex | thegillis/from_rails_to_phoenix | fb230b787fd441e71e93dc8d82b3769eeaeddbf8 | [
"MIT"
] | null | null | null | widget_market_phoenix/lib/widget_market_phoenix/web/gettext.ex | thegillis/from_rails_to_phoenix | fb230b787fd441e71e93dc8d82b3769eeaeddbf8 | [
"MIT"
] | null | null | null | widget_market_phoenix/lib/widget_market_phoenix/web/gettext.ex | thegillis/from_rails_to_phoenix | fb230b787fd441e71e93dc8d82b3769eeaeddbf8 | [
"MIT"
] | null | null | null | defmodule WidgetMarketPhoenix.Web.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import WidgetMarketPhoenix.Web.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :widget_market_phoenix
end
| 29.92 | 72 | 0.695187 |
e8421d4916121d9afa9342cf489f306056c500d7 | 355 | ex | Elixir | apps/definition_extract/lib/extract/v1.ex | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | apps/definition_extract/lib/extract/v1.ex | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | apps/definition_extract/lib/extract/v1.ex | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | defmodule Extract.V1 do
use Definition.Schema
@impl true
def s do
schema(%Extract{
version: version(1),
id: id(),
dataset_id: required_string(),
subset_id: required_string(),
destination: required_string(),
steps: spec(is_list() and not_nil?()),
dictionary: of_struct(Dictionary.Impl)
})
end
end
| 20.882353 | 44 | 0.633803 |
e84265311d015ad3e05afb929e073c3bcdac236e | 84 | ex | Elixir | elixir-primer/v01/ch12/hello2.ex | Atla0903/Study-Elixir | 155fbf30e508e513278425b89262a0a444e0936c | [
"Unlicense"
] | null | null | null | elixir-primer/v01/ch12/hello2.ex | Atla0903/Study-Elixir | 155fbf30e508e513278425b89262a0a444e0936c | [
"Unlicense"
] | null | null | null | elixir-primer/v01/ch12/hello2.ex | Atla0903/Study-Elixir | 155fbf30e508e513278425b89262a0a444e0936c | [
"Unlicense"
] | null | null | null | defmodule Hello2 do
def greet(name) do
IO.puts "Hello, #{name}!"
end
end
| 14 | 29 | 0.619048 |
e84266c00caf2503f3e65b8fe0c4738b377e860a | 325 | ex | Elixir | lib/ggity/scale/size_manual.ex | kianmeng/ggity | 75f0097464eae4086f8c70e4bea995d60571eba9 | [
"MIT"
] | 47 | 2020-06-21T15:23:54.000Z | 2022-03-13T01:24:19.000Z | lib/ggity/scale/size_manual.ex | kianmeng/ggity | 75f0097464eae4086f8c70e4bea995d60571eba9 | [
"MIT"
] | 3 | 2020-11-28T11:00:59.000Z | 2020-11-30T18:20:37.000Z | lib/ggity/scale/size_manual.ex | kianmeng/ggity | 75f0097464eae4086f8c70e4bea995d60571eba9 | [
"MIT"
] | 2 | 2020-11-28T10:40:10.000Z | 2021-05-28T06:44:47.000Z | defmodule GGity.Scale.Size.Manual do
@moduledoc false
alias GGity.Scale.Size
@default_size 4
@type t() :: %__MODULE__{}
defstruct transform: nil
@spec new(number()) :: Size.Manual.t()
def new(size \\ @default_size) when is_number(size) do
struct(Size.Manual, transform: fn _size -> size end)
end
end
| 19.117647 | 56 | 0.686154 |
e842d6ff5347215d2029e5b8f107899061682da3 | 331 | ex | Elixir | test/support/belongs_to.ex | pedromlcosta/dictator | 1b81c6a7c340b0c8338fd69c8123827a97d0ce06 | [
"0BSD"
] | 78 | 2020-02-21T14:35:54.000Z | 2021-11-08T23:03:47.000Z | test/support/belongs_to.ex | pedromlcosta/dictator | 1b81c6a7c340b0c8338fd69c8123827a97d0ce06 | [
"0BSD"
] | 11 | 2020-04-23T07:17:00.000Z | 2020-12-17T14:46:56.000Z | test/support/belongs_to.ex | pedromlcosta/dictator | 1b81c6a7c340b0c8338fd69c8123827a97d0ce06 | [
"0BSD"
] | 4 | 2020-09-16T15:08:53.000Z | 2021-10-14T01:52:06.000Z | defmodule Dictator.Test.BelongsTo do
defmodule Struct do
defstruct [:id, :user_id]
end
defmodule Repo do
def get_by(Struct, id: id), do: %Struct{id: id, user_id: id}
end
defmodule Policy do
alias Dictator.Test.BelongsTo.{Repo, Struct}
use Dictator.Policies.BelongsTo, for: Struct, repo: Repo
end
end
| 20.6875 | 64 | 0.700906 |
e842d913f9b4dcbfe1f3eec68b923da85f5148e4 | 880 | ex | Elixir | clients/spanner/lib/google_api/spanner/v1/metadata.ex | richiboi1977/elixir-google-api | c495bb3548090eb7a63d12f6fb145ec48aecdc0b | [
"Apache-2.0"
] | 1 | 2021-10-01T09:20:41.000Z | 2021-10-01T09:20:41.000Z | clients/spanner/lib/google_api/spanner/v1/metadata.ex | richiboi1977/elixir-google-api | c495bb3548090eb7a63d12f6fb145ec48aecdc0b | [
"Apache-2.0"
] | null | null | null | clients/spanner/lib/google_api/spanner/v1/metadata.ex | richiboi1977/elixir-google-api | c495bb3548090eb7a63d12f6fb145ec48aecdc0b | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Spanner.V1 do
@moduledoc """
API client metadata for GoogleApi.Spanner.V1.
"""
@discovery_revision "20210723"
def discovery_revision(), do: @discovery_revision
end
| 32.592593 | 74 | 0.757955 |
e842ed00d1ba14ab457f606579cd904b4867c3d0 | 1,499 | ex | Elixir | lib/exq/support/config.ex | JamesFerguson/exq | 4457ea50ad4c8a6a7e127da5acafc130960822bb | [
"Apache-2.0"
] | null | null | null | lib/exq/support/config.ex | JamesFerguson/exq | 4457ea50ad4c8a6a7e127da5acafc130960822bb | [
"Apache-2.0"
] | null | null | null | lib/exq/support/config.ex | JamesFerguson/exq | 4457ea50ad4c8a6a7e127da5acafc130960822bb | [
"Apache-2.0"
] | null | null | null | defmodule Exq.Support.Config do
@default_config %{
name: Exq,
mode: :default,
host: "127.0.0.1",
port: 6379,
database: 0,
redis_options: [],
namespace: "exq",
queues: ["default"],
json_library: Jason,
heartbeat_enable: false,
heartbeat_interval: 60_000,
missed_heartbeats_allowed: 5,
scheduler_enable: true,
concurrency: 100,
scheduler_poll_timeout: 200,
poll_timeout: 100,
genserver_timeout: 5000,
shutdown_timeout: 5000,
max_retries: 25,
dead_max_jobs: 10_000,
# 6 months
dead_timeout_in_seconds: 180 * 24 * 60 * 60,
stats_flush_interval: 1000,
stats_batch_size: 2000,
serializer: Exq.Serializers.JsonSerializer,
node_identifier: Exq.NodeIdentifier.HostnameIdentifier,
backoff: Exq.Backoff.SidekiqDefault,
start_on_application: true,
middleware: [
Exq.Middleware.Stats,
Exq.Middleware.Job,
Exq.Middleware.Manager,
Exq.Middleware.Logger
],
queue_adapter: Exq.Adapters.Queue.Redis
}
def get(key) do
get(key, Map.get(@default_config, key))
end
def get(key, fallback) do
case Application.get_env(:exq, key, fallback) do
{:system, varname} -> System.get_env(varname)
{:system, varname, default} -> System.get_env(varname) || default
value -> value
end
end
def serializer do
get(:serializer)
end
def node_identifier do
get(:node_identifier)
end
def backoff() do
get(:backoff)
end
end
| 23.421875 | 71 | 0.665777 |
e8431fbb669a5963e23811f21bc2877054d377ca | 12,159 | ex | Elixir | lib/sanbase/intercom/intercom.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 81 | 2017-11-20T01:20:22.000Z | 2022-03-05T12:04:25.000Z | lib/sanbase/intercom/intercom.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 359 | 2017-10-15T14:40:53.000Z | 2022-01-25T13:34:20.000Z | lib/sanbase/intercom/intercom.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 16 | 2017-11-19T13:57:40.000Z | 2022-02-07T08:13:02.000Z | defmodule Sanbase.Intercom do
@moduledoc """
Sync all users and user stats into intercom
"""
import Ecto.Query
require Sanbase.Utils.Config, as: Config
require Logger
alias Sanbase.Accounts.{User, Statistics}
alias Sanbase.Billing.{Subscription, Product}
alias Sanbase.Alert.UserTrigger
alias Sanbase.Clickhouse.ApiCallData
alias Sanbase.Intercom.UserAttributes
alias Sanbase.Accounts.EthAccount
alias Sanbase.Repo
@intercom_url "https://api.intercom.io/users"
@user_events_url "https://api.intercom.io/events?type=user"
@users_page_size 100
def sync_intercom_to_kafka do
if intercom_api_key() do
Logger.info("Start sync_intercom_to_kafka")
from(u in User, order_by: [asc: u.id], select: u.id)
|> Repo.all()
|> Enum.each(fn user_id ->
try do
attributes = get_user(user_id)
if attributes do
%{user_id: user_id, properties: attributes, inserted_at: Timex.now()}
|> UserAttributes.persist_kafka_sync()
end
rescue
e ->
Logger.error(
"Error sync_intercom_to_kafka for user: #{user_id}, error: #{inspect(e)}"
)
end
end)
Logger.info("Finish sync_intercom_to_kafka")
else
:ok
end
end
def get_user(user_id) do
body =
%{
query: %{
field: "external_id",
operator: "=",
value: user_id |> to_string()
}
}
|> Jason.encode!()
HTTPoison.post!(
"https://api.intercom.io/contacts/search",
body,
intercom_headers() ++ [{"Intercom-Version", "2.0"}]
)
|> Map.get(:body)
|> Jason.decode!()
|> Map.get("data")
|> List.first()
end
def all_users_stats do
%{
customer_payment_type_map: customer_payment_type_map(),
triggers_map: Statistics.resource_user_count_map(Sanbase.Alert.UserTrigger),
insights_map: Statistics.resource_user_count_map(Sanbase.Insight.Post),
watchlists_map: Statistics.resource_user_count_map(Sanbase.UserList),
screeners_map: Statistics.user_screeners_count_map(),
users_used_api_list: ApiCallData.users_used_api(),
users_used_sansheets_list: ApiCallData.users_used_sansheets(),
api_calls_per_user_count: ApiCallData.api_calls_count_per_user(),
user_active_subscriptions_map: Subscription.Stats.user_active_subscriptions_map(),
users_with_monitored_watchlist:
Sanbase.UserLists.Statistics.users_with_monitored_watchlist()
|> Enum.map(fn {%{id: user_id}, count} -> {user_id, count} end)
|> Enum.into(%{})
}
end
def sync_users do
Logger.info("Start sync_users to Intercom")
# Skip if api key not present in env. (Run only on production)
all_users_stats = all_users_stats()
if intercom_api_key() do
1..user_pages()
|> Stream.flat_map(fn page ->
users_by_page(page, @users_page_size)
end)
|> fetch_and_send_stats(all_users_stats)
Logger.info("Finish sync_users to Intercom")
else
:ok
end
end
defp all_users_count() do
Repo.one(from(u in User, select: count(u.id)))
end
def get_events_for_user(user_id, since \\ nil) do
url = "#{@user_events_url}&user_id=#{user_id}"
url = if since, do: "#{url}&since=#{since}", else: url
fetch_all_events(url)
end
def intercom_api_key() do
Config.get(:api_key)
end
# helpers
defp fetch_stats_for_user(
%User{
id: id,
email: email,
username: username,
san_balance: san_balance,
eth_accounts: eth_accounts,
stripe_customer_id: stripe_customer_id,
inserted_at: inserted_at
} = user,
%{
triggers_map: triggers_map,
insights_map: insights_map,
watchlists_map: watchlists_map,
screeners_map: screeners_map,
users_used_api_list: users_used_api_list,
users_used_sansheets_list: users_used_sansheets_list,
api_calls_per_user_count: api_calls_per_user_count,
users_with_monitored_watchlist: users_with_monitored_watchlist,
customer_payment_type_map: customer_payment_type_map,
user_active_subscriptions_map: user_active_subscriptions_map
}
) do
{sanbase_subscription_current_status, sanbase_trial_created_at} =
fetch_sanbase_subscription_data(stripe_customer_id)
user_paid_after_trial =
sanbase_trial_created_at && sanbase_subscription_current_status == "active"
address_balance_map =
eth_accounts
|> Enum.map(fn eth_account ->
case EthAccount.san_balance(eth_account) do
:error -> "#{eth_account.address}=0.0"
balance -> "#{eth_account.address}=#{Sanbase.Math.to_float(balance)}"
end
end)
|> Enum.join(" | ")
stats = %{
user_id: id,
email: email,
name: username,
signed_up_at: DateTime.from_naive!(inserted_at, "Etc/UTC") |> DateTime.to_unix(),
custom_attributes:
%{
all_watchlists_count: Map.get(watchlists_map, id, 0),
all_triggers_count: Map.get(triggers_map, id, 0),
all_insights_count: Map.get(insights_map, id, 0),
all_screeners_count: Map.get(screeners_map, id, 0),
staked_san_tokens: Sanbase.Math.to_float(san_balance),
address_balance_map: address_balance_map,
sanbase_subscription_current_status: sanbase_subscription_current_status,
sanbase_trial_created_at: sanbase_trial_created_at,
user_paid_after_trial: user_paid_after_trial,
user_paid_with: Map.get(customer_payment_type_map, stripe_customer_id, "not_paid"),
weekly_digest:
Sanbase.Accounts.UserSettings.settings_for(user).newsletter_subscription
|> to_string(),
used_sanapi: id in users_used_api_list,
used_sansheets: id in users_used_sansheets_list,
api_calls_count: Map.get(api_calls_per_user_count, id, 0),
weekly_report_watchlist_count: Map.get(users_with_monitored_watchlist, id, 0),
active_subscriptions: Map.get(user_active_subscriptions_map, id, "")
}
|> Map.merge(triggers_type_count(user))
}
# email must be dropped if nil so user still can be created in Intercom if doesn't exist
stats = if email, do: stats, else: Map.delete(stats, :email)
stats
end
defp triggers_type_count(user) do
user
|> UserTrigger.triggers_for()
|> Enum.group_by(fn ut -> ut.trigger.settings.type end)
|> Enum.map(fn {type, list} -> {"trigger_" <> type, length(list)} end)
|> Enum.into(%{})
end
defp fetch_sanbase_subscription_data(nil) do
{nil, nil}
end
defp fetch_sanbase_subscription_data(stripe_customer_id) do
sanbase_product_stripe_id = Product.by_id(Product.product_sanbase()).stripe_id
Stripe.Customer.retrieve(stripe_customer_id)
|> case do
{:ok, %{subscriptions: %{object: "list", data: data}}} when is_list(data) ->
data
|> Enum.filter(&(&1.plan.product == sanbase_product_stripe_id))
|> Enum.max_by(& &1.created, fn -> nil end)
|> case do
nil -> {nil, nil}
subscription -> {subscription.status, format_dt(subscription.trial_start)}
end
_ ->
{nil, nil}
end
end
defp send_user_stats_to_intercom(stats) do
stats_json = Jason.encode!(stats)
HTTPoison.post(@intercom_url, stats_json, intercom_headers())
|> case do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
Logger.info("Stats sent: #{inspect(stats_json |> Jason.decode!())}}")
stats = merge_intercom_attributes(stats, body)
UserAttributes.save(%{user_id: stats.user_id, properties: stats})
:ok
{:ok, %HTTPoison.Response{} = response} ->
Logger.error(
"Error sending to intercom stats: #{inspect(stats_json |> Jason.decode!())}}. Response: #{inspect(response)}"
)
{:error, reason} ->
Logger.error(
"Error sending to intercom stats: #{inspect(stats_json |> Jason.decode!())}}. Reason: #{inspect(reason)}"
)
end
end
defp user_pages() do
(all_users_count() / @users_page_size)
|> Float.ceil()
|> round()
end
defp users_by_page(page, page_size) do
offset = (page - 1) * page_size
from(u in User,
order_by: u.id,
limit: ^page_size,
offset: ^offset,
preload: [:eth_accounts]
)
|> Repo.all()
end
defp fetch_and_send_stats(users, all_users_stats) do
users
|> Stream.map(fn user ->
try do
fetch_stats_for_user(user, all_users_stats)
rescue
e ->
Logger.error(
"Error sync_users to Intercom (fetch_stats_for_user) for user: #{user.id}, error: #{inspect(e)}"
)
reraise e, __STACKTRACE__
end
end)
|> Enum.each(fn user ->
try do
send_user_stats_to_intercom(user)
rescue
e ->
Logger.error(
"Error sync_users to Intercom (send_user_stats_to_intercom) for user: #{user.id}, error: #{inspect(e)}"
)
end
end)
end
defp merge_intercom_attributes(stats, intercom_resp) do
res = Jason.decode!(intercom_resp)
app_version = get_in(res, ["custom_attributes", "app_version"])
if app_version do
put_in(stats, [:custom_attributes, :app_version], app_version)
else
stats
end
end
defp fetch_all_events(url, all_events \\ []) do
case fetch_events(url) do
{:ok, %{"events" => []}} ->
all_events
{:ok, %{"events" => events, "pages" => %{"next" => next}}} ->
fetch_all_events(next, all_events ++ events)
{:ok, %{"events" => events}} ->
all_events ++ events
{:error, _} ->
all_events
end
end
defp fetch_events(url) do
HTTPoison.get(url, intercom_headers())
|> case do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
Jason.decode(body)
{:ok, %HTTPoison.Response{} = response} ->
Logger.error(
"Error fetching intercom events for url: #{url}. Response: #{inspect(response)}"
)
{:error, response}
{:error, reason} ->
Logger.error("Error fetching intercom events for url: #{url}. Reason: #{inspect(reason)}")
{:error, reason}
end
end
# %{"cus_HQ1vCgehxitRJU" => "fiat" | "san", ...}
def customer_payment_type_map() do
do_list([], nil)
|> filter_only_payments()
|> classify_payments_by_type()
end
defp filter_only_payments(invoices) do
invoices
|> Enum.filter(&(&1.status == "paid" && &1.total > 0))
|> Enum.dedup_by(fn %{customer: customer} -> customer end)
end
def classify_payments_by_type(invoices) do
Enum.reduce(invoices, %{}, fn invoice, acc ->
cond do
invoice.starting_balance == 0 ->
Map.put(acc, invoice.customer, "fiat")
invoice.total == abs(invoice.starting_balance) ->
Map.put(acc, invoice.customer, "san/crypto")
true ->
acc
end
end)
end
def do_list([], nil) do
list = list_invoices(%{limit: 100})
do_list(list, Enum.at(list, -1) |> Map.get(:id))
end
def do_list(acc, next) do
case list_invoices(%{limit: 100, starting_after: next}) do
[] -> acc
list -> do_list(acc ++ list, Enum.at(list, -1) |> Map.get(:id))
end
end
defp list_invoices(params) do
Stripe.Invoice.list(params)
|> elem(1)
|> Map.get(:data, [])
|> Enum.map(fn invoice ->
Map.split(invoice, [:id, :customer, :total, :starting_balance, :status, :created])
|> elem(0)
end)
end
defp format_dt(unix_timestmap) when is_integer(unix_timestmap) do
DateTime.from_unix!(unix_timestmap)
|> DateTime.to_iso8601()
end
defp format_dt(nil), do: nil
defp intercom_headers() do
[
{"Content-Type", "application/json"},
{"Accept", "application/json"},
{"Authorization", "Bearer #{intercom_api_key()}"}
]
end
end
| 29.656098 | 119 | 0.632371 |
e84338cf183eb746a2128eeef10857b03eaab44e | 1,599 | ex | Elixir | web/web.ex | harmon25/phonix_react | 390d2663d1d796224588d94c5073b3bac600bb7f | [
"MIT"
] | null | null | null | web/web.ex | harmon25/phonix_react | 390d2663d1d796224588d94c5073b3bac600bb7f | [
"MIT"
] | null | null | null | web/web.ex | harmon25/phonix_react | 390d2663d1d796224588d94c5073b3bac600bb7f | [
"MIT"
] | null | null | null | defmodule PhoenixReact.Web do
@moduledoc """
A module that keeps using definitions for controllers,
views and so on.
This can be used in your application as:
use PhoenixReact.Web, :controller
use PhoenixReact.Web, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below.
"""
def model do
quote do
use Ecto.Model
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
end
end
def controller do
quote do
use Phoenix.Controller
alias PhoenixReact.Repo
import Ecto.Model
import Ecto.Query, only: [from: 1, from: 2]
import PhoenixReact.Router.Helpers
end
end
def view do
quote do
use Phoenix.View, root: "web/templates"
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import PhoenixReact.Router.Helpers
end
end
def router do
quote do
use Phoenix.Router
end
end
def channel do
quote do
use Phoenix.Channel
alias PhoenixReact.Repo
import Ecto.Model
import Ecto.Query, only: [from: 1, from: 2]
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 20.766234 | 88 | 0.666667 |
e8433e80b4e22447a6a29f0a1df55f343b31ba51 | 41,496 | ex | Elixir | lib/ecto/query/builder.ex | elielhaouzi/ecto | 66900f472713a27a38dfe78a72af4400855d1dc0 | [
"Apache-2.0"
] | 1 | 2019-11-11T21:48:20.000Z | 2019-11-11T21:48:20.000Z | lib/ecto/query/builder.ex | elielhaouzi/ecto | 66900f472713a27a38dfe78a72af4400855d1dc0 | [
"Apache-2.0"
] | 3 | 2021-06-20T14:51:14.000Z | 2021-06-25T00:56:11.000Z | deps/ecto/lib/ecto/query/builder.ex | carlosviana/blog | 1dcf58c3ca40bc3a7105d75de6f51954eb44bca8 | [
"MIT"
] | null | null | null | defmodule Ecto.Query.Builder do
@moduledoc false
alias Ecto.Query
@comparisons [
is_nil: 1,
==: 2,
!=: 2,
<: 2,
>: 2,
<=: 2,
>=: 2
]
@dynamic_aggregates [
max: 1,
min: 1,
first_value: 1,
last_value: 1,
nth_value: 2,
lag: 3,
lead: 3,
lag: 2,
lead: 2,
lag: 1,
lead: 1
]
@static_aggregates [
count: {0, :integer},
count: {1, :integer},
count: {2, :integer},
avg: {1, :any},
sum: {1, :any},
row_number: {0, :integer},
rank: {0, :integer},
dense_rank: {0, :integer},
percent_rank: {0, :any},
cume_dist: {0, :any},
ntile: {1, :integer}
]
@typedoc """
Quoted types store primitive types and types in the format
{source, quoted}. The latter are handled directly in the planner,
never forwarded to Ecto.Type.
The Ecto.Type module concerns itself only with runtime types,
which include all primitive types and custom user types. Also
note custom user types do not show up during compilation time.
"""
@type quoted_type :: Ecto.Type.primitive | {non_neg_integer, atom | Macro.t}
@doc """
Smart escapes a query expression and extracts interpolated values in
a map.
Everything that is a query expression will be escaped, interpolated
expressions (`^foo`) will be moved to a map unescaped and replaced
with `^index` in the query where index is a number indexing into the
map.
"""
@spec escape(Macro.t, quoted_type | {:in, quoted_type} | {:out, quoted_type}, {list, term},
Keyword.t, Macro.Env.t | {Macro.Env.t, fun}) :: {Macro.t, {list, term}}
def escape(expr, type, params_acc, vars, env)
# var.x - where var is bound
def escape({{:., _, [callee, field]}, _, []}, _type, params_acc, vars, _env) when is_atom(field) do
{escape_field!(callee, field, vars), params_acc}
end
# field macro
def escape({:field, _, [callee, field]}, _type, params_acc, vars, _env) do
{escape_field!(callee, field, vars), params_acc}
end
# param interpolation
def escape({:^, _, [arg]}, type, {params, acc}, _vars, _env) do
expr = {:{}, [], [:^, [], [length(params)]]}
params = [{arg, type} | params]
{expr, {params, acc}}
end
# tagged types
def escape({:type, _, [{:^, _, [arg]}, type]}, _type, {params, acc}, vars, env) do
type = validate_type!(type, vars, env)
expr = {:{}, [], [:type, [], [{:{}, [], [:^, [], [length(params)]]}, type]]}
params = [{arg, type} | params]
{expr, {params, acc}}
end
def escape({:type, _, [{{:., _, [{var, _, context}, field]}, _, []} = expr, type]}, _type, params_acc, vars, env)
when is_atom(var) and is_atom(context) and is_atom(field) do
escape_with_type(expr, type, params_acc, vars, env)
end
def escape({:type, _, [{:field, _, [_ | _]} = expr, type]}, _type, params_acc, vars, env) do
escape_with_type(expr, type, params_acc, vars, env)
end
def escape({:type, _, [{math_op, _, [_, _]} = op_expr, type]}, _type, params_acc, vars, env)
when math_op in ~w(+ - * /)a do
escape_with_type(op_expr, type, params_acc, vars, env)
end
def escape({:type, _, [{fun, _, args} = expr, type]}, _type, params_acc, vars, env)
when is_list(args) and fun in ~w(fragment avg count max min sum over filter)a do
escape_with_type(expr, type, params_acc, vars, env)
end
def escape({:type, meta, [expr, type]}, given_type, params_acc, vars, env) do
case Macro.expand_once(expr, get_env(env)) do
^expr ->
error! """
the first argument of type/2 must be one of:
* interpolations, such as ^value
* fields, such as p.foo or field(p)
* fragments, such fragment("foo(?)", value)
* an arithmetic expression (+, -, *, /)
* an aggregation or window expression (avg, count, min, max, sum, over, filter)
Got: #{Macro.to_string(expr)}
"""
expanded ->
escape({:type, meta, [expanded, type]}, given_type, params_acc, vars, env)
end
end
# fragments
def escape({:fragment, _, [query]}, _type, params_acc, vars, env) when is_list(query) do
{escaped, params_acc} =
Enum.map_reduce(query, params_acc, &escape_fragment(&1, :any, &2, vars, env))
{{:{}, [], [:fragment, [], [escaped]]}, params_acc}
end
def escape({:fragment, _, [{:^, _, [var]} = _expr]}, _type, params_acc, _vars, _env) do
expr = quote do
Ecto.Query.Builder.fragment!(unquote(var))
end
{{:{}, [], [:fragment, [], [expr]]}, params_acc}
end
def escape({:fragment, _, [query | frags]}, _type, params_acc, vars, env) do
pieces = expand_and_split_fragment(query, env)
if length(pieces) != length(frags) + 1 do
error! "fragment(...) expects extra arguments in the same amount of question marks in string. " <>
"It received #{length(frags)} extra argument(s) but expected #{length(pieces) - 1}"
end
{frags, params_acc} = Enum.map_reduce(frags, params_acc, &escape(&1, :any, &2, vars, env))
{{:{}, [], [:fragment, [], merge_fragments(pieces, frags)]}, params_acc}
end
# interval
def escape({:from_now, meta, [count, interval]}, type, params_acc, vars, env) do
utc = quote do: ^DateTime.utc_now()
escape({:datetime_add, meta, [utc, count, interval]}, type, params_acc, vars, env)
end
def escape({:ago, meta, [count, interval]}, type, params_acc, vars, env) do
utc = quote do: ^DateTime.utc_now()
count =
case count do
{:^, meta, [value]} ->
negate = quote do: Ecto.Query.Builder.negate!(unquote(value))
{:^, meta, [negate]}
value ->
{:-, [], [value]}
end
escape({:datetime_add, meta, [utc, count, interval]}, type, params_acc, vars, env)
end
def escape({:datetime_add, _, [datetime, count, interval]} = expr, type, params_acc, vars, env) do
assert_type!(expr, type, {:param, :any_datetime})
{datetime, params_acc} = escape(datetime, {:param, :any_datetime}, params_acc, vars, env)
{count, interval, params_acc} = escape_interval(count, interval, params_acc, vars, env)
{{:{}, [], [:datetime_add, [], [datetime, count, interval]]}, params_acc}
end
def escape({:date_add, _, [date, count, interval]} = expr, type, params_acc, vars, env) do
assert_type!(expr, type, :date)
{date, params_acc} = escape(date, :date, params_acc, vars, env)
{count, interval, params_acc} = escape_interval(count, interval, params_acc, vars, env)
{{:{}, [], [:date_add, [], [date, count, interval]]}, params_acc}
end
# json
def escape({:json_extract_path, _, [field, path]} = expr, type, params_acc, vars, env) do
case field do
{{:., _, _}, _, _} ->
path = escape_json_path(path)
{field, params_acc} = escape(field, type, params_acc, vars, env)
{{:{}, [], [:json_extract_path, [], [field, path]]}, params_acc}
_ ->
error!("`#{Macro.to_string(expr)}` is not a valid query expression")
end
end
def escape({{:., meta, [Access, :get]}, _, [left, _]} = expr, type, params_acc, vars, env) do
case left do
{{:., _, _}, _, _} ->
{expr, path} = parse_access_get(expr, [])
escape({:json_extract_path, meta, [expr, path]}, type, params_acc, vars, env)
_ ->
error!("`#{Macro.to_string(expr)}` is not a valid query expression")
end
end
# sigils
def escape({name, _, [_, []]} = sigil, type, params_acc, vars, _env)
when name in ~w(sigil_s sigil_S sigil_w sigil_W)a do
{literal(sigil, type, vars), params_acc}
end
# lists
def escape(list, type, params_acc, vars, env) when is_list(list) do
if Enum.all?(list, &is_binary(&1) or is_number(&1) or is_boolean(&1)) do
{literal(list, type, vars), params_acc}
else
fun =
case type do
{:array, inner_type} ->
&escape(&1, inner_type, &2, vars, env)
_ ->
# In case we don't have an array nor a literal at compile-time,
# such as p.links == [^value], we don't do any casting nor validation.
# We may want to tackle this if the expression above is ever used.
&escape(&1, :any, &2, vars, env)
end
Enum.map_reduce(list, params_acc, fun)
end
end
# literals
def escape({:<<>>, _, args} = expr, type, params_acc, vars, _env) do
valid? = Enum.all?(args, fn
{:::, _, [left, _]} -> is_integer(left) or is_binary(left)
left -> is_integer(left) or is_binary(left)
end)
unless valid? do
error! "`#{Macro.to_string(expr)}` is not a valid query expression. " <>
"Only literal binaries and strings are allowed, " <>
"dynamic values need to be explicitly interpolated in queries with ^"
end
{literal(expr, type, vars), params_acc}
end
def escape({:-, _, [number]}, type, params_acc, vars, _env) when is_number(number),
do: {literal(-number, type, vars), params_acc}
def escape(number, type, params_acc, vars, _env) when is_number(number),
do: {literal(number, type, vars), params_acc}
def escape(binary, type, params_acc, vars, _env) when is_binary(binary),
do: {literal(binary, type, vars), params_acc}
def escape(nil, _type, params_acc, _vars, _env),
do: {nil, params_acc}
def escape(atom, type, params_acc, vars, _env) when is_atom(atom),
do: {literal(atom, type, vars), params_acc}
# negate any expression
def escape({:-, meta, arg}, type, params_acc, vars, env) do
{escaped_arg, params_acc} = escape(arg, type, params_acc, vars, env)
expr = {:{}, [], [:-, meta, escaped_arg]}
{expr, params_acc}
end
# comparison operators
def escape({comp_op, _, [left, right]} = expr, type, params_acc, vars, env)
when comp_op in ~w(== != < > <= >=)a do
assert_type!(expr, type, :boolean)
if is_nil(left) or is_nil(right) do
error! "comparison with nil is forbidden as it is unsafe. " <>
"If you want to check if a value is nil, use is_nil/1 instead"
end
ltype = quoted_type(right, vars)
rtype = quoted_type(left, vars)
{left, params_acc} = escape(left, ltype, params_acc, vars, env)
{right, params_acc} = escape(right, rtype, params_acc, vars, env)
{params, acc} = params_acc
{{:{}, [], [comp_op, [], [left, right]]},
{params |> wrap_nil(left) |> wrap_nil(right), acc}}
end
# mathematical operators
def escape({math_op, _, [left, right]}, type, params_acc, vars, env)
when math_op in ~w(+ - * /)a do
{left, params_acc} = escape(left, type, params_acc, vars, env)
{right, params_acc} = escape(right, type, params_acc, vars, env)
{{:{}, [], [math_op, [], [left, right]]}, params_acc}
end
# in operator
def escape({:in, _, [left, right]} = expr, type, params_acc, vars, env)
when is_list(right)
when is_tuple(right) and elem(right, 0) in ~w(sigil_w sigil_W)a do
assert_type!(expr, type, :boolean)
{:array, ltype} = quoted_type(right, vars)
rtype = {:array, quoted_type(left, vars)}
{left, params_acc} = escape(left, ltype, params_acc, vars, env)
{right, params_acc} = escape(right, rtype, params_acc, vars, env)
{{:{}, [], [:in, [], [left, right]]}, params_acc}
end
def escape({:in, _, [left, right]} = expr, type, params_acc, vars, env) do
assert_type!(expr, type, :boolean)
ltype = {:out, quoted_type(right, vars)}
rtype = {:in, quoted_type(left, vars)}
{left, params_acc} = escape(left, ltype, params_acc, vars, env)
{right, params_acc} = escape_subquery(right, rtype, params_acc, vars, env)
# Remove any type wrapper from the right side
right =
case right do
{:{}, [], [:type, [], [right, _]]} -> right
_ -> right
end
{{:{}, [], [:in, [], [left, right]]}, params_acc}
end
def escape({:count, _, [arg, :distinct]}, type, params_acc, vars, env) do
{arg, params_acc} = escape(arg, type, params_acc, vars, env)
expr = {:{}, [], [:count, [], [arg, :distinct]]}
{expr, params_acc}
end
def escape({:filter, _, [aggregate]}, type, params_acc, vars, env) do
escape(aggregate, type, params_acc, vars, env)
end
def escape({:filter, _, [aggregate, filter_expr]}, type, params_acc, vars, env) do
{aggregate, params_acc} = escape(aggregate, type, params_acc, vars, env)
{filter_expr, params_acc} = escape(filter_expr, :boolean, params_acc, vars, env)
{{:{}, [], [:filter, [], [aggregate, filter_expr]]}, params_acc}
end
def escape({:coalesce, _, [left, right]}, type, params_acc, vars, env) do
{left, params_acc} = escape(left, type, params_acc, vars, env)
{right, params_acc} = escape(right, type, params_acc, vars, env)
{{:{}, [], [:coalesce, [], [left, right]]}, params_acc}
end
def escape({:over, _, [{agg_name, _, agg_args} | over_args]}, type, params_acc, vars, env) do
aggregate = {agg_name, [], agg_args || []}
{aggregate, params_acc} = escape_window_function(aggregate, type, params_acc, vars, env)
{window, params_acc} = escape_window_description(over_args, params_acc, vars, env)
{{:{}, [], [:over, [], [aggregate, window]]}, params_acc}
end
def escape({quantifier, meta, [subquery]}, type, params_acc, vars, env) when quantifier in [:all, :any, :exists] do
{subquery, params_acc} = escape_subquery({:subquery, meta, [subquery]}, type, params_acc, vars, env)
{{:{}, [], [quantifier, [], [subquery]]}, params_acc}
end
def escape({:=, _, _} = expr, _type, _params_acc, _vars, _env) do
error! "`#{Macro.to_string(expr)}` is not a valid query expression. " <>
"The match operator is not supported: `=`. " <>
"Did you mean to use `==` instead?"
end
def escape({op, _, _}, _type, _params_acc, _vars, _env) when op in ~w(|| && !)a do
error! "short-circuit operators are not supported: `#{op}`. " <>
"Instead use boolean operators: `and`, `or`, and `not`"
end
# Tuple
def escape({left, right}, type, params_acc, vars, env) do
escape({:{}, [], [left, right]}, type, params_acc, vars, env)
end
# Tuple
def escape({:{}, _, list}, {:tuple, types}, params_acc, vars, env) do
if Enum.count(list) == Enum.count(types) do
{list, params_acc} =
list
|> Enum.zip(types)
|> Enum.map_reduce(params_acc, fn {expr, type}, params_acc ->
escape(expr, type, params_acc, vars, env)
end)
expr = {:{}, [], [:{}, [], list]}
{expr, params_acc}
else
escape({:{}, [], list}, :any, params_acc, vars, env)
end
end
# Tuple
def escape({:{}, _, _}, _, _, _, _) do
error! "Tuples can only be used in comparisons with literal tuples of the same size"
end
# Other functions - no type casting
def escape({name, _, args} = expr, type, params_acc, vars, env) when is_atom(name) and is_list(args) do
case call_type(name, length(args)) do
{in_type, out_type} ->
assert_type!(expr, type, out_type)
escape_call(expr, in_type, params_acc, vars, env)
nil ->
try_expansion(expr, type, params_acc, vars, env)
end
end
# Finally handle vars
def escape({var, _, context}, _type, params_acc, vars, _env) when is_atom(var) and is_atom(context) do
{escape_var!(var, vars), params_acc}
end
# Raise nice error messages for fun calls.
def escape({fun, _, args} = other, _type, _params_acc, _vars, _env)
when is_atom(fun) and is_list(args) do
error! """
`#{Macro.to_string(other)}` is not a valid query expression. \
If you are trying to invoke a function that is not supported by Ecto, \
you can use fragments:
fragment("some_function(?, ?, ?)", m.some_field, 1)
See Ecto.Query.API to learn more about the supported functions and \
Ecto.Query.API.fragment/1 to learn more about fragments.
"""
end
# Raise nice error message for remote calls
def escape({{:., _, [mod, fun]}, _, args} = other, _type, _params_acc, _vars, _env)
when is_atom(fun) do
fun_arity = "#{fun}/#{length(args)}"
error! """
`#{Macro.to_string(other)}` is not a valid query expression. \
If you want to invoke #{Macro.to_string(mod)}.#{fun_arity} in \
a query, make sure that the module #{Macro.to_string(mod)} \
is required and that #{fun_arity} is a macro
"""
end
# For everything else we raise
def escape(other, _type, _params_acc, _vars, _env) do
error! "`#{Macro.to_string(other)}` is not a valid query expression"
end
defp escape_with_type(expr, {:^, _, [type]}, params_acc, vars, env) do
{expr, params_acc} = escape(expr, :any, params_acc, vars, env)
{{:{}, [], [:type, [], [expr, type]]}, params_acc}
end
defp escape_with_type(expr, type, params_acc, vars, env) do
type = validate_type!(type, vars, env)
{expr, params_acc} = escape(expr, type, params_acc, vars, env)
{{:{}, [], [:type, [], [expr, Macro.escape(type)]]}, params_acc}
end
defp escape_subquery({:subquery, _, [expr]}, _, {params, subqueries}, _vars, _env) do
subquery = quote(do: Ecto.Query.subquery(unquote(expr)))
index = length(subqueries)
expr = {:subquery, index} # used both in ast and in parameters, as a placeholder.
{expr, {[expr | params], [subquery | subqueries]}}
end
defp escape_subquery(expr, type, params, vars, env) do
escape(expr, type, params, vars, env)
end
defp wrap_nil(params, {:{}, _, [:^, _, [ix]]}), do: wrap_nil(params, length(params) - ix - 1, [])
defp wrap_nil(params, _other), do: params
defp wrap_nil([{val, type} | params], 0, acc) do
val = quote do: Ecto.Query.Builder.not_nil!(unquote(val))
Enum.reverse(acc, [{val, type} | params])
end
defp wrap_nil([pair | params], i, acc) do
wrap_nil(params, i - 1, [pair | acc])
end
defp expand_and_split_fragment(query, env) do
case Macro.expand(query, get_env(env)) do
binary when is_binary(binary) ->
split_fragment(binary, "")
_ ->
error! bad_fragment_message(Macro.to_string(query))
end
end
defp bad_fragment_message(arg) do
"to prevent SQL injection attacks, fragment(...) does not allow strings " <>
"to be interpolated as the first argument via the `^` operator, got: `#{arg}`"
end
defp split_fragment(<<>>, consumed),
do: [consumed]
defp split_fragment(<<??, rest :: binary>>, consumed),
do: [consumed | split_fragment(rest, "")]
defp split_fragment(<<?\\, ??, rest :: binary>>, consumed),
do: split_fragment(rest, consumed <> <<??>>)
defp split_fragment(<<first :: utf8, rest :: binary>>, consumed),
do: split_fragment(rest, consumed <> <<first :: utf8>>)
@doc "Returns fragment pieces, given a fragment string and arguments."
def fragment_pieces(frag, args) do
frag
|> split_fragment("")
|> merge_fragments(args)
end
defp escape_window_description([], params_acc, _vars, _env),
do: {[], params_acc}
defp escape_window_description([window_name], params_acc, _vars, _env) when is_atom(window_name),
do: {window_name, params_acc}
defp escape_window_description([kw], params_acc, vars, env) do
case Ecto.Query.Builder.Windows.escape(kw, params_acc, vars, env) do
{runtime, [], params_acc} ->
{runtime, params_acc}
{_, [{key, _} | _], _} ->
error! "windows definitions given to over/2 do not allow interpolations at the root of " <>
"`#{key}`. Please use Ecto.Query.windows/3 to explicitly define a window instead"
end
end
defp escape_window_function(expr, type, params_acc, vars, env) do
expr
|> validate_window_function!(env)
|> escape(type, params_acc, vars, env)
end
defp validate_window_function!({:fragment, _, _} = expr, _env), do: expr
defp validate_window_function!({agg, _, args} = expr, env)
when is_atom(agg) and is_list(args) do
if Code.ensure_loaded?(Ecto.Query.WindowAPI) and
function_exported?(Ecto.Query.WindowAPI, agg, length(args)) do
expr
else
case Macro.expand_once(expr, get_env(env)) do
^expr ->
error! "unknown window function #{agg}/#{length(args)}. " <>
"See Ecto.Query.WindowAPI for all available functions"
expr ->
validate_window_function!(expr, env)
end
end
end
defp validate_window_function!(expr, _), do: expr
defp escape_call({name, _, args}, type, params_acc, vars, env) do
{args, params_acc} = Enum.map_reduce(args, params_acc, &escape(&1, type, &2, vars, env))
expr = {:{}, [], [name, [], args]}
{expr, params_acc}
end
defp escape_field!({var, _, context}, field, vars)
when is_atom(var) and is_atom(context) do
var = escape_var!(var, vars)
field = quoted_field!(field)
dot = {:{}, [], [:., [], [var, field]]}
{:{}, [], [dot, [], []]}
end
defp escape_field!({kind, _, [atom]}, field, _vars)
when kind in [:as, :parent_as] and is_atom(atom) do
as = {:{}, [], [kind, [], [atom]]}
field = quoted_field!(field)
dot = {:{}, [], [:., [], [as, field]]}
{:{}, [], [dot, [], []]}
end
defp escape_field!(expr, field, _vars) do
error!("""
cannot fetch field `#{field}` from `#{Macro.to_string(expr)}`. Can only fetch fields from:
* sources, such as `p` in `from p in Post`
* named bindings, such as `as(:post)` in `from Post, as: :post`
* parent named bindings, such as `parent_as(:post)` in a subquery
""")
end
defp escape_interval(count, interval, params_acc, vars, env) do
type =
cond do
is_float(count) -> :float
is_integer(count) -> :integer
true -> :decimal
end
{count, params_acc} = escape(count, type, params_acc, vars, env)
{count, quoted_interval!(interval), params_acc}
end
defp escape_fragment({key, [{_, _}|_] = exprs}, type, params_acc, vars, env) when is_atom(key) do
{escaped, params_acc} = Enum.map_reduce(exprs, params_acc, &escape_fragment(&1, type, &2, vars, env))
{{key, escaped}, params_acc}
end
defp escape_fragment({key, expr}, type, params_acc, vars, env) when is_atom(key) do
{escaped, params_acc} = escape(expr, type, params_acc, vars, env)
{{key, escaped}, params_acc}
end
defp escape_fragment({key, _expr}, _type, _params_acc, _vars, _env) do
error! "fragment(...) with keywords accepts only atoms as keys, got `#{Macro.to_string(key)}`"
end
defp merge_fragments([h1|t1], [h2|t2]),
do: [{:raw, h1}, {:expr, h2}|merge_fragments(t1, t2)]
defp merge_fragments([h1], []),
do: [{:raw, h1}]
for {agg, arity} <- @dynamic_aggregates do
defp call_type(unquote(agg), unquote(arity)), do: {:any, :any}
end
for {agg, {arity, return}} <- @static_aggregates do
defp call_type(unquote(agg), unquote(arity)), do: {:any, unquote(return)}
end
for {comp, arity} <- @comparisons do
defp call_type(unquote(comp), unquote(arity)), do: {:any, :boolean}
end
defp call_type(:or, 2), do: {:boolean, :boolean}
defp call_type(:and, 2), do: {:boolean, :boolean}
defp call_type(:not, 1), do: {:boolean, :boolean}
defp call_type(:like, 2), do: {:string, :boolean}
defp call_type(:ilike, 2), do: {:string, :boolean}
defp call_type(_, _), do: nil
defp assert_type!(expr, type, actual) do
cond do
not is_atom(type) and not Ecto.Type.primitive?(type) ->
:ok
Ecto.Type.match?(type, actual) ->
:ok
true ->
error! "expression `#{Macro.to_string(expr)}` does not type check. " <>
"It returns a value of type #{inspect actual} but a value of " <>
"type #{inspect type} is expected"
end
end
@doc """
Validates the type with the given vars.
"""
def validate_type!({composite, type}, vars, env),
do: {composite, validate_type!(type, vars, env)}
def validate_type!({:^, _, [type]}, _vars, _env),
do: type
def validate_type!({:__aliases__, _, _} = type, _vars, env),
do: Macro.expand(type, get_env(env))
def validate_type!(type, _vars, _env) when is_atom(type),
do: type
def validate_type!({{:., _, [{var, _, context}, field]}, _, []}, vars, _env)
when is_atom(var) and is_atom(context) and is_atom(field),
do: {find_var!(var, vars), field}
def validate_type!({:field, _, [{var, _, context}, field]}, vars, _env)
when is_atom(var) and is_atom(context) and is_atom(field),
do: {find_var!(var, vars), field}
def validate_type!(type, _vars, _env) do
error! "type/2 expects an alias, atom or source.field as second argument, got: `#{Macro.to_string(type)}`"
end
@always_tagged [:binary]
defp literal(value, expected, vars),
do: do_literal(value, expected, quoted_type(value, vars))
defp do_literal(value, _, current) when current in @always_tagged,
do: {:%, [], [Ecto.Query.Tagged, {:%{}, [], [value: value, type: current]}]}
defp do_literal(value, :any, _current),
do: value
defp do_literal(value, expected, expected),
do: value
defp do_literal(value, expected, _current),
do: {:%, [], [Ecto.Query.Tagged, {:%{}, [], [value: value, type: expected]}]}
@doc """
Escape the params entries list.
"""
@spec escape_params(list()) :: list()
def escape_params(list), do: Enum.reverse(list)
@doc """
Escapes a variable according to the given binds.
A escaped variable is represented internally as
`&0`, `&1` and so on.
"""
@spec escape_var!(atom, Keyword.t) :: Macro.t
def escape_var!(var, vars) do
{:{}, [], [:&, [], [find_var!(var, vars)]]}
end
@doc """
Escapes a list of bindings as a list of atoms.
Only variables or `{:atom, value}` tuples are allowed in the `bindings` list,
otherwise an `Ecto.Query.CompileError` is raised.
## Examples
iex> escape_binding(%Ecto.Query{}, quote(do: [x, y, z]), __ENV__)
{%Ecto.Query{}, [x: 0, y: 1, z: 2]}
iex> escape_binding(%Ecto.Query{}, quote(do: [{x, 0}, {z, 2}]), __ENV__)
{%Ecto.Query{}, [x: 0, z: 2]}
iex> escape_binding(%Ecto.Query{}, quote(do: [x, y, x]), __ENV__)
** (Ecto.Query.CompileError) variable `x` is bound twice
iex> escape_binding(%Ecto.Query{}, quote(do: [a, b, :foo]), __ENV__)
** (Ecto.Query.CompileError) binding list should contain only variables or `{as, var}` tuples, got: :foo
"""
@spec escape_binding(Macro.t, list, Macro.Env.t) :: {Macro.t, Keyword.t}
def escape_binding(query, binding, _env) when is_list(binding) do
vars = binding |> Enum.with_index |> Enum.map(&escape_bind/1)
assert_no_duplicate_binding!(vars)
{positional_vars, named_vars} = Enum.split_while(vars, ¬ named_bind?(&1))
assert_named_binds_in_tail!(named_vars, binding)
{query, positional_binds} = calculate_positional_binds(query, positional_vars)
{query, named_binds} = calculate_named_binds(query, named_vars)
{query, positional_binds ++ named_binds}
end
def escape_binding(_query, bind, _env) do
error! "binding should be list of variables and `{as, var}` tuples " <>
"at the end, got: #{Macro.to_string(bind)}"
end
defp named_bind?({kind, _, _}), do: kind == :named
defp assert_named_binds_in_tail!(named_vars, binding) do
if Enum.all?(named_vars, &named_bind?/1) do
:ok
else
error! "named binds in the form of `{as, var}` tuples must be at the end " <>
"of the binding list, got: #{Macro.to_string(binding)}"
end
end
defp assert_no_duplicate_binding!(vars) do
bound_vars = for {_, var, _} <- vars, var != :_, do: var
case bound_vars -- Enum.uniq(bound_vars) do
[] -> :ok
[var | _] -> error! "variable `#{var}` is bound twice"
end
end
defp calculate_positional_binds(query, vars) do
case Enum.split_while(vars, &elem(&1, 1) != :...) do
{vars, []} ->
vars = for {:pos, var, count} <- vars, do: {var, count}
{query, vars}
{vars, [_ | tail]} ->
query =
quote do
query = Ecto.Queryable.to_query(unquote(query))
escape_count = Ecto.Query.Builder.count_binds(query)
query
end
tail =
tail
|> Enum.with_index(-length(tail))
|> Enum.map(fn {{:pos, k, _}, count} -> {k, quote(do: escape_count + unquote(count))} end)
vars = for {:pos, var, count} <- vars, do: {var, count}
{query, vars ++ tail}
end
end
def calculate_named_binds(query, []), do: {query, []}
def calculate_named_binds(query, vars) do
query =
quote do
query = Ecto.Queryable.to_query(unquote(query))
end
vars =
for {:named, key, name} <- vars do
{key,
quote do
Ecto.Query.Builder.count_alias!(query, unquote(name))
end}
end
{query, vars}
end
@doc """
Count the alias for the given query.
"""
def count_alias!(%{aliases: aliases} = query, name) do
case aliases do
%{^name => ix} ->
ix
%{} ->
raise Ecto.QueryError, message: "unknown bind name `#{inspect name}`", query: query
end
end
defp escape_bind({{{var, _, context}, ix}, _}) when is_atom(var) and is_atom(context),
do: {:pos, var, ix}
defp escape_bind({{var, _, context}, ix}) when is_atom(var) and is_atom(context),
do: {:pos, var, ix}
defp escape_bind({{name, {var, _, context}}, _ix}) when is_atom(name) and is_atom(var) and is_atom(context),
do: {:named, var, name}
defp escape_bind({{{:^, _, [expr]}, {var, _, context}}, _ix}) when is_atom(var) and is_atom(context),
do: {:named, var, expr}
defp escape_bind({bind, _ix}),
do: error!("binding list should contain only variables or " <>
"`{as, var}` tuples, got: #{Macro.to_string(bind)}")
defp try_expansion(expr, type, params, vars, %Macro.Env{} = env) do
try_expansion(expr, type, params, vars, {env, &escape/5})
end
defp try_expansion(expr, type, params, vars, {env, fun}) do
case Macro.expand_once(expr, env) do
^expr ->
error! """
`#{Macro.to_string(expr)}` is not a valid query expression.
* If you intended to call an Elixir function or introduce a value,
you need to explicitly interpolate it with ^
* If you intended to call a database function, please check the documentation
for Ecto.Query.API to see the supported database expressions
* If you intended to extend Ecto's query DSL, make sure that you have required
the module or imported the relevant function. Note that you need macros to
extend Ecto's querying capabilities
"""
expanded ->
fun.(expanded, type, params, vars, env)
end
end
@doc """
Finds the index value for the given var in vars or raises.
"""
def find_var!(var, vars) do
vars[var] || error! "unbound variable `#{var}` in query. If you are attempting to interpolate a value, use ^var"
end
@doc """
Checks if the field is an atom at compilation time or
delegate the check to runtime for interpolation.
"""
def quoted_field!({:^, _, [expr]}),
do: quote(do: Ecto.Query.Builder.field!(unquote(expr)))
def quoted_field!(atom) when is_atom(atom),
do: atom
def quoted_field!(other),
do: error!("expected literal atom or interpolated value in field/2, got: `#{Macro.to_string(other)}`")
@doc """
Called by escaper at runtime to verify that value is an atom.
"""
def field!(atom) when is_atom(atom),
do: atom
def field!(other),
do: error!("expected atom in field/2, got: `#{inspect other}`")
defp escape_json_path(path) when is_list(path) do
Enum.map(path, "ed_json_path_element!/1)
end
defp escape_json_path(other) do
error!("expected JSON path to be compile-time list, got: `#{Macro.to_string(other)}`")
end
defp quoted_json_path_element!({:^, _, [expr]}),
do: quote(do: Ecto.Query.Builder.json_path_element!(unquote(expr)))
defp quoted_json_path_element!(binary) when is_binary(binary),
do: binary
defp quoted_json_path_element!(integer) when is_integer(integer),
do: integer
defp quoted_json_path_element!(other),
do:
error!(
"expected JSON path to contain literal strings, literal integers, or interpolated values, got: " <>
"`#{Macro.to_string(other)}`"
)
@doc """
Called by escaper at runtime to verify that value is a string or an integer.
"""
def json_path_element!(binary) when is_binary(binary),
do: binary
def json_path_element!(integer) when is_integer(integer),
do: integer
def json_path_element!(other),
do: error!("expected string or integer in json_extract_path/2, got: `#{inspect other}`")
@doc """
Called by escaper at runtime to verify that a value is not nil.
"""
def not_nil!(nil) do
raise ArgumentError, "comparison with nil is forbidden as it is unsafe. " <>
"If you want to check if a value is nil, use is_nil/1 instead"
end
def not_nil!(not_nil) do
not_nil
end
@doc """
Checks if the field is a valid interval at compilation time or
delegate the check to runtime for interpolation.
"""
def quoted_interval!({:^, _, [expr]}),
do: quote(do: Ecto.Query.Builder.interval!(unquote(expr)))
def quoted_interval!(other),
do: interval!(other)
@doc """
Called by escaper at runtime to verify keywords.
"""
def fragment!(kw) do
if Keyword.keyword?(kw) do
kw
else
raise ArgumentError, bad_fragment_message(inspect(kw))
end
end
@doc """
Called by escaper at runtime to verify that value is a valid interval.
"""
@interval ~w(year month week day hour minute second millisecond microsecond)
def interval!(interval) when interval in @interval,
do: interval
def interval!(other_string) when is_binary(other_string),
do: error!("invalid interval: `#{inspect other_string}` (expected one of #{Enum.join(@interval, ", ")})")
def interval!(not_string),
do: error!("invalid interval: `#{inspect not_string}` (expected a string)")
@doc """
Negates the given number.
"""
# TODO: Remove check when we depend on decimal v2.0
if Code.ensure_loaded?(Decimal) and function_exported?(Decimal, :negate, 1) do
def negate!(%Decimal{} = decimal), do: Decimal.negate(decimal)
else
def negate!(%Decimal{} = decimal), do: Decimal.minus(decimal)
end
def negate!(number) when is_number(number), do: -number
@doc """
Returns the type of an expression at build time.
"""
@spec quoted_type(Macro.t, Keyword.t) :: quoted_type
# Fields
def quoted_type({{:., _, [{var, _, context}, field]}, _, []}, vars)
when is_atom(var) and is_atom(context) and is_atom(field),
do: {find_var!(var, vars), field}
def quoted_type({:field, _, [{var, _, context}, field]}, vars)
when is_atom(var) and is_atom(context) and is_atom(field),
do: {find_var!(var, vars), field}
# Unquoting code here means the second argument of field will
# always be unquoted twice, one by the type checking and another
# in the query itself. We are assuming this is not an issue
# as the solution is somewhat complicated.
def quoted_type({:field, _, [{var, _, context}, {:^, _, [code]}]}, vars)
when is_atom(var) and is_atom(context),
do: {find_var!(var, vars), code}
# Interval
def quoted_type({:datetime_add, _, [_, _, _]}, _vars), do: :naive_datetime
def quoted_type({:date_add, _, [_, _, _]}, _vars), do: :date
# Tagged
def quoted_type({:<<>>, _, _}, _vars), do: :binary
def quoted_type({:type, _, [_, type]}, _vars), do: type
# Sigils
def quoted_type({sigil, _, [_, []]}, _vars) when sigil in ~w(sigil_s sigil_S)a, do: :string
def quoted_type({sigil, _, [_, []]}, _vars) when sigil in ~w(sigil_w sigil_W)a, do: {:array, :string}
# Lists
def quoted_type(list, vars) when is_list(list) do
case list |> Enum.map("ed_type(&1, vars)) |> Enum.uniq() do
[type] -> {:array, type}
_ -> {:array, :any}
end
end
# Negative numbers
def quoted_type({:-, _, [number]}, _vars) when is_integer(number), do: :integer
def quoted_type({:-, _, [number]}, _vars) when is_float(number), do: :float
# Dynamic aggregates
for {agg, arity} <- @dynamic_aggregates do
args = 1..arity |> Enum.map(fn _ -> Macro.var(:_, __MODULE__) end) |> tl()
def quoted_type({unquote(agg), _, [expr, unquote_splicing(args)]}, vars) do
quoted_type(expr, vars)
end
end
# Literals
def quoted_type(literal, _vars) when is_float(literal), do: :float
def quoted_type(literal, _vars) when is_binary(literal), do: :string
def quoted_type(literal, _vars) when is_boolean(literal), do: :boolean
def quoted_type(literal, _vars) when is_atom(literal) and not is_nil(literal), do: :atom
def quoted_type(literal, _vars) when is_integer(literal), do: :integer
# Tuples
def quoted_type({left, right}, vars), do: quoted_type({:{}, [], [left, right]}, vars)
def quoted_type({:{}, _, elems}, vars), do: {:tuple, Enum.map(elems, "ed_type(&1, vars))}
def quoted_type({name, _, args}, _vars) when is_atom(name) and is_list(args) do
case call_type(name, length(args)) do
{_in, out} -> out
nil -> :any
end
end
def quoted_type(_, _vars), do: :any
defp get_env({env, _}), do: env
defp get_env(env), do: env
@doc """
Raises a query building error.
"""
def error!(message) when is_binary(message) do
{:current_stacktrace, [_|t]} = Process.info(self(), :current_stacktrace)
t = Enum.drop_while t, fn
{mod, _, _, _} ->
String.starts_with?(Atom.to_string(mod), ["Elixir.Ecto.Query.", "Elixir.Enum"])
_ ->
false
end
reraise Ecto.Query.CompileError, [message: message], t
end
@doc """
Counts the bindings in a query expression.
## Examples
iex> count_binds(%Ecto.Query{joins: [1,2,3]})
4
"""
@spec count_binds(Ecto.Query.t) :: non_neg_integer
def count_binds(%Query{joins: joins}) do
1 + length(joins)
end
@doc """
Bump interpolations by the length of parameters.
"""
def bump_interpolations(expr, []), do: expr
def bump_interpolations(expr, params) do
len = length(params)
Macro.prewalk(expr, fn
{:^, meta, [counter]} when is_integer(counter) -> {:^, meta, [len + counter]}
other -> other
end)
end
@doc """
Applies a query at compilation time or at runtime.
This function is responsible for checking if a given query is an
`Ecto.Query` struct at compile time. If it is not it will act
accordingly.
If a query is available, it invokes the `apply` function in the
given `module`, otherwise, it delegates the call to runtime.
It is important to keep in mind the complexities introduced
by this function. In particular, a %Query{} is a mixture of escaped
and unescaped expressions which makes it impossible for this
function to properly escape or unescape it at compile/runtime.
For this reason, the apply function should be ready to handle
arguments in both escaped and unescaped form.
For example, take into account the `Builder.OrderBy`:
select = %Ecto.Query.QueryExpr{expr: expr, file: env.file, line: env.line}
Builder.apply_query(query, __MODULE__, [order_by], env)
`expr` is already an escaped expression and we must not escape
it again. However, it is wrapped in an Ecto.Query.QueryExpr,
which must be escaped! Furthermore, the `apply/2` function
in `Builder.OrderBy` very likely will inject the QueryExpr inside
Query, which again, is a mixture of escaped and unescaped expressions.
That said, you need to obey the following rules:
1. In order to call this function, the arguments must be escapable
values supported by the `escape/1` function below;
2. The apply function may not manipulate the given arguments,
with exception to the query.
In particular, when invoked at compilation time, all arguments
(except the query) will be escaped, so they can be injected into
the query properly, but they will be in their runtime form
when invoked at runtime.
"""
@spec apply_query(Macro.t, Macro.t, Macro.t, Macro.Env.t) :: Macro.t
def apply_query(query, module, args, env) do
case Macro.expand(query, env) |> unescape_query() do
%Query{} = compiletime_query ->
apply(module, :apply, [compiletime_query | args])
|> escape_query()
runtime_query ->
quote do
# Unquote the query before `module.apply()` for any binding variable.
query = unquote(runtime_query)
unquote(module).apply(query, unquote_splicing(args))
end
end
end
# Unescapes an `Ecto.Query` struct.
@spec unescape_query(Macro.t) :: Query.t | Macro.t
defp unescape_query({:%, _, [Query, {:%{}, _, list}]}) do
struct(Query, list)
end
defp unescape_query({:%{}, _, list} = ast) do
if List.keyfind(list, :__struct__, 0) == {:__struct__, Query} do
Map.new(list)
else
ast
end
end
defp unescape_query(other) do
other
end
# Escapes an `Ecto.Query` and associated structs.
@spec escape_query(Query.t) :: Macro.t
defp escape_query(%Query{} = query), do: {:%{}, [], Map.to_list(query)}
defp parse_access_get({{:., _, [Access, :get]}, _, [left, right]}, acc) do
parse_access_get(left, [right | acc])
end
defp parse_access_get({{:., _, [{var, _, context}, field]}, _, []} = expr, acc)
when is_atom(var) and is_atom(context) and is_atom(field) do
{expr, acc}
end
end
| 34.988196 | 117 | 0.627868 |
e84344c7c9bdbd435d589eb7a0ba4fa20056225d | 1,587 | ex | Elixir | lib/livebook_web/router.ex | benjreinhart/livebook | 0500ad5c6237167ce9769d8cc78fca360834f576 | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/router.ex | benjreinhart/livebook | 0500ad5c6237167ce9769d8cc78fca360834f576 | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/router.ex | benjreinhart/livebook | 0500ad5c6237167ce9769d8cc78fca360834f576 | [
"Apache-2.0"
] | null | null | null | defmodule LivebookWeb.Router do
use LivebookWeb, :router
import Phoenix.LiveDashboard.Router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_live_flash
plug :put_root_layout, {LivebookWeb.LayoutView, :root}
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :auth do
plug LivebookWeb.AuthPlug
plug LivebookWeb.UserPlug
end
scope "/", LivebookWeb do
pipe_through [:browser, :auth]
live "/", HomeLive, :page
live "/home/user-profile", HomeLive, :user
live "/home/import/:tab", HomeLive, :import
live "/home/sessions/:session_id/close", HomeLive, :close_session
live "/explore", ExploreLive, :page
live "/explore/user-profile", ExploreLive, :user
live "/explore/notebooks/:slug", ExploreLive, :notebook
live "/sessions/:id", SessionLive, :page
live "/sessions/:id/user-profile", SessionLive, :user
live "/sessions/:id/shortcuts", SessionLive, :shortcuts
live "/sessions/:id/settings/runtime", SessionLive, :runtime_settings
live "/sessions/:id/settings/file", SessionLive, :file_settings
live "/sessions/:id/cell-settings/:cell_id", SessionLive, :cell_settings
live "/sessions/:id/cell-upload/:cell_id", SessionLive, :cell_upload
get "/sessions/:id/images/:image", SessionController, :show_image
live_dashboard "/dashboard", metrics: LivebookWeb.Telemetry
end
scope "/authenticate", LivebookWeb do
pipe_through :browser
get "/", AuthController, :index
post "/", AuthController, :authenticate
end
end
| 33.0625 | 76 | 0.711405 |
e84346dc14e963973e5c8909774c61099a169549 | 667 | exs | Elixir | test/iodized/notification_test.exs | envato/iodized | 396a051262fbdfb051e696a5d39e76414dab3568 | [
"MIT"
] | 16 | 2015-01-13T20:56:40.000Z | 2021-07-16T08:05:36.000Z | test/iodized/notification_test.exs | envato/iodized | 396a051262fbdfb051e696a5d39e76414dab3568 | [
"MIT"
] | 5 | 2015-01-11T23:50:58.000Z | 2017-06-06T05:24:04.000Z | test/iodized/notification_test.exs | envato/iodized | 396a051262fbdfb051e696a5d39e76414dab3568 | [
"MIT"
] | 2 | 2015-04-06T11:16:10.000Z | 2016-08-01T05:28:03.000Z | defmodule Iodized.NotificationTest do
use ExUnit.Case, async: true
import Mock
test_with_mock "call registered urls with message",
HTTPoison,
[start: &mock_start/0,
get: &mock_get/1] do
webhooks = [
%Iodized.Webhook{id: 1, event_type: "created", url: "http://someurl.com/msg={{message}}"}
]
Iodized.Notification.notify_event("created", "ice cream is the best", webhooks)
assert called HTTPoison.start()
assert called HTTPoison.get("http://someurl.com/msg=ice%20cream%20is%20the%20best")
end
defp mock_start() do
nil
end
def mock_get(url) do
%HTTPoison.Response{status_code: 200, body: ""}
end
end
| 23 | 95 | 0.68066 |
e8435be969c57ade42bd09378fa5094cb4a98dbe | 203 | exs | Elixir | Chapter11/vocial-demo-chapter-11/priv/repo/migrations/20180114145743_add_oauth_data_to_users.exs | PacktPublishing/Phoenix-Web-Development | a071392abe2a459be1896580446b006126c393bf | [
"MIT"
] | 10 | 2018-05-13T14:53:05.000Z | 2021-11-08T13:10:44.000Z | Chapter11/vocial-demo-chapter-11/priv/repo/migrations/20180114145743_add_oauth_data_to_users.exs | PacktPublishing/Phoenix-Web-Development | a071392abe2a459be1896580446b006126c393bf | [
"MIT"
] | null | null | null | Chapter11/vocial-demo-chapter-11/priv/repo/migrations/20180114145743_add_oauth_data_to_users.exs | PacktPublishing/Phoenix-Web-Development | a071392abe2a459be1896580446b006126c393bf | [
"MIT"
] | 2 | 2019-04-23T10:54:33.000Z | 2019-04-27T15:47:07.000Z | defmodule Vocial.Repo.Migrations.AddOauthDataToUsers do
use Ecto.Migration
def change do
alter table(:users) do
add :oauth_provider, :string
add :oauth_id, :string
end
end
end
| 18.454545 | 55 | 0.704433 |
e8435e04f11a69175ad5ee541d2eddfe73f68c1a | 5,498 | exs | Elixir | apps/ewallet/test/ewallet/web/inviter_test.exs | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | 1 | 2018-12-07T06:21:21.000Z | 2018-12-07T06:21:21.000Z | apps/ewallet/test/ewallet/web/inviter_test.exs | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/test/ewallet/web/inviter_test.exs | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | null | null | null | defmodule EWallet.Web.InviterTest do
use EWallet.DBCase
use Bamboo.Test
alias EWallet.Web.{Inviter, MockInviteEmail, Preloader}
alias EWalletDB.{Account, Invite, Membership, User}
@user_redirect_url "http://localhost:4000/some_redirect_url?email={email}&token={token}"
@user_success_url "http://localhost:4000/some_success_url"
@admin_redirect_url "http://localhost:4000/invite?email={email}&token={token}"
describe "invite_user/5" do
test "sends email and returns the invite if successful" do
{:ok, _account} = :account |> params_for(parent: nil) |> Account.insert()
{res, invite} =
Inviter.invite_user(
"test@example.com",
"password",
@user_redirect_url,
@user_success_url,
&MockInviteEmail.create/2
)
assert res == :ok
assert %Invite{} = invite
end
test "links the user with master account" do
{:ok, _account} = :account |> params_for(parent: nil) |> Account.insert()
{:ok, invite} =
Inviter.invite_user(
"test@example.com",
"password",
@user_redirect_url,
@user_success_url,
&MockInviteEmail.create/2
)
{:ok, invite} = Preloader.preload_one(invite, :user)
accounts = User.get_all_linked_accounts(invite.user.uuid)
assert Enum.any?(accounts, fn account -> Account.master?(account) end)
end
test "resends the verification email if the user has not verified their email" do
{:ok, _account} = :account |> params_for(parent: nil) |> Account.insert()
invite = insert(:invite)
{:ok, user} = :standalone_user |> params_for(invite: invite) |> User.insert()
{res, invite} =
Inviter.invite_user(
user.email,
"password",
@user_redirect_url,
@user_success_url,
&MockInviteEmail.create/2
)
assert res == :ok
assert %Invite{} = invite
assert_delivered_email(MockInviteEmail.create(invite, @user_redirect_url))
{:ok, invite} = Preloader.preload_one(invite, :user)
assert invite.user.uuid == user.uuid
end
test "returns :user_already_active error if user is already active" do
_user = insert(:user, %{email: "activeuser@example.com"})
{res, error} =
Inviter.invite_user(
"activeuser@example.com",
"password",
@user_redirect_url,
@user_success_url,
&MockInviteEmail.create/2
)
assert res == :error
assert error == :user_already_active
end
end
describe "invite_admin/5" do
test "sends email and returns the invite if successful" do
user = insert(:admin, %{email: "activeuser@example.com"})
account = insert(:account)
role = insert(:role)
{res, invite} =
Inviter.invite_admin(
"test@example.com",
account,
role,
@admin_redirect_url,
user,
&MockInviteEmail.create/2
)
assert res == :ok
assert %Invite{} = invite
assert_delivered_email(MockInviteEmail.create(invite, @admin_redirect_url))
end
test "sends a new invite if this email has been invited before" do
user = insert(:admin, %{email: "activeuser@example.com"})
account = insert(:account)
role = insert(:role)
{:ok, invite1} =
Inviter.invite_admin(
"test@example.com",
account,
role,
@admin_redirect_url,
user,
&MockInviteEmail.create/2
)
{:ok, invite2} =
Inviter.invite_admin(
"test@example.com",
account,
role,
@admin_redirect_url,
user,
&MockInviteEmail.create/2
)
assert_delivered_email(MockInviteEmail.create(invite1, @admin_redirect_url))
assert_delivered_email(MockInviteEmail.create(invite2, @admin_redirect_url))
end
test "assigns the user to account and role" do
user = insert(:admin, %{email: "activeuser@example.com"})
account = insert(:account)
role = insert(:role)
{:ok, invite} =
Inviter.invite_admin(
"test@example.com",
account,
role,
@admin_redirect_url,
user,
&MockInviteEmail.create/2
)
memberships = Membership.all_by_user(invite.user)
assert Enum.any?(memberships, fn m ->
m.account_uuid == account.uuid && m.role_uuid == role.uuid
end)
end
test "returns :user_already_active error if user is already active" do
# This should already be an active user
user = insert(:admin, %{email: "activeuser@example.com"})
account = insert(:account)
role = insert(:role)
{res, error} =
Inviter.invite_admin(
"activeuser@example.com",
account,
role,
@admin_redirect_url,
user,
&MockInviteEmail.create/2
)
assert res == :error
assert error == :user_already_active
end
end
describe "send_email/3" do
test "creates and sends the invite email" do
{:ok, user} = :admin |> params_for() |> User.insert()
{:ok, invite} = Invite.generate(user)
{res, _} = Inviter.send_email(invite, @admin_redirect_url, &MockInviteEmail.create/2)
assert res == :ok
assert_delivered_email(MockInviteEmail.create(invite, @admin_redirect_url))
end
end
end
| 29.089947 | 91 | 0.604402 |
e8435f289fa0b1233bb5f79cb7e96ad6fe85c642 | 514 | exs | Elixir | config/test.exs | bitriot/phoenix_base | 15ec83a9acf46202102f2b006d577972f5564b2f | [
"MIT"
] | null | null | null | config/test.exs | bitriot/phoenix_base | 15ec83a9acf46202102f2b006d577972f5564b2f | [
"MIT"
] | null | null | null | config/test.exs | bitriot/phoenix_base | 15ec83a9acf46202102f2b006d577972f5564b2f | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :phoenix_base, PhoenixBase.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :phoenix_base, PhoenixBase.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "phoenix_base_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 25.7 | 56 | 0.745136 |
e84360e49ad8af4747c13d08f0babefd96d332d9 | 4,986 | ex | Elixir | lib/sentinel/ueberauthenticator.ex | lumenlunae/sentinel | 189d9b02aeeea942a41963b42ef8523ef192fd03 | [
"MIT"
] | 125 | 2016-01-29T11:46:20.000Z | 2021-06-08T09:25:38.000Z | lib/sentinel/ueberauthenticator.ex | lumenlunae/sentinel | 189d9b02aeeea942a41963b42ef8523ef192fd03 | [
"MIT"
] | 54 | 2016-02-18T01:11:58.000Z | 2017-10-19T11:25:26.000Z | lib/sentinel/ueberauthenticator.ex | lumenlunae/sentinel | 189d9b02aeeea942a41963b42ef8523ef192fd03 | [
"MIT"
] | 29 | 2016-02-20T12:59:16.000Z | 2018-04-11T14:29:41.000Z | defmodule Sentinel.Ueberauthenticator do
@moduledoc """
Common authentication logic using the ueberauth underlying layer
"""
alias Ueberauth.Auth
alias Sentinel.Authenticator
alias Sentinel.Changeset.Registrator
alias Sentinel.Changeset.Confirmator
alias Sentinel.Config
alias Sentinel.Ueberauth
@unknown_error {:error, [base: {"Unknown email or password", []}]}
def ueberauthenticate(auth = %Auth{provider: :identity, credentials: %Auth.Credentials{other: %{password: password}}}) when is_nil(password) or password == "" do
if invitable?() do
create_user_and_auth(auth)
else
{:error, [password: {"A password is required to login", []}]}
end
end
def ueberauthenticate(%Auth{provider: :identity, uid: email}) when is_nil(email) or email == "" do
{:error, [email: {"An email is required to login", []}]}
end
def ueberauthenticate(%Auth{provider: :identity, uid: uid, credentials: %Auth.Credentials{other: %{password: password, password_confirmation: password_confirmation}}}) when is_nil(password_confirmation) or password_confirmation == "" do
Config.user_model
|> Config.repo.get_by(email: String.downcase(uid))
|> find_auth_and_authenticate(password)
end
def ueberauthenticate(%Auth{provider: :identity, uid: uid, credentials: %Auth.Credentials{other: %{password: password, password_confirmation: password_confirmation}}} = auth) when password == password_confirmation do
user =
Config.user_model
|> Config.repo.get_by(email: String.downcase(uid))
if is_nil(user) do
create_user_and_auth(auth)
else
db_auth = Config.repo.get_by(Ueberauth, provider: "identity", user_id: user.id)
authenticate(user, db_auth, password)
end
end
def ueberauthenticate(%Auth{provider: :identity, uid: uid, credentials: %Auth.Credentials{other: %{password: password, password_confirmation: password}}}) do
Config.user_model
|> Config.repo.get_by(email: String.downcase(uid))
|> find_auth_and_authenticate(password)
end
def ueberauthenticate(%Auth{provider: :identity, uid: _uid, credentials: %Auth.Credentials{other: %{password: _password, password_confirmation: _password_confirmation}}}) do
{:error, [%{password: "Password must match password confirmation"}]}
end
def ueberauthenticate(%Auth{provider: :identity, uid: uid, credentials: %Auth.Credentials{other: %{password: password}}}) do
Config.user_model
|> Config.repo.get_by(email: String.downcase(uid))
|> find_auth_and_authenticate(password)
end
def ueberauthenticate(%Auth{uid: uid} = auth_params) do
auth =
Sentinel.Ueberauth
|> Config.repo.get_by(uid: uid)
|> Config.repo.preload([:user])
if is_nil(auth) do
user = Config.repo.get_by(Config.user_model, email: auth_params.info.email)
if is_nil(user) do
create_user_and_auth(auth_params)
else
updated_auth = auth_params |> Map.put(:provider, Atom.to_string(auth_params.provider))
auth_changeset =
%Sentinel.Ueberauth{uid: user.id, user_id: user.id}
|> Sentinel.Ueberauth.changeset(Map.from_struct(updated_auth))
case Config.repo.insert(auth_changeset) do
{:ok, _auth} -> {:ok, user}
{:error, error} -> {:error, error}
end
end
else
{:ok, auth.user}
end
end
defp find_auth_and_authenticate(user, password) do
if is_nil(user) do
@unknown_error
else
db_auth = Config.repo.get_by(Ueberauth, provider: "identity", user_id: user.id)
authenticate(user, db_auth, password)
end
end
defp authenticate(nil, _auth, _password) do
@unknown_error
end
defp authenticate(_user, nil, _password) do
@unknown_error
end
defp authenticate(user, auth, password) do
auth
|> Map.put(:user, user)
|> Authenticator.authenticate(password)
end
defp create_user_and_auth(auth) do
if Config.registerable?() do
updated_auth = auth |> Map.put(:provider, Atom.to_string(auth.provider))
Config.repo.transaction(fn ->
{confirmation_token, changeset} =
updated_auth.info
|> Map.from_struct
|> Registrator.changeset(updated_auth.extra.raw_info)
|> Confirmator.confirmation_needed_changeset
user =
case Config.repo.insert(changeset) do
{:ok, user} -> user
_ -> Config.repo.rollback(changeset.errors)
end
auth_changeset =
%Sentinel.Ueberauth{uid: user.id, user_id: user.id}
|> Sentinel.Ueberauth.changeset(Map.from_struct(updated_auth))
case Config.repo.insert(auth_changeset) do
{:ok, _auth} -> nil
_ -> Config.repo.rollback(changeset.errors)
end
%{user: user, confirmation_token: confirmation_token}
end)
else
{:error, [base: {"New user registration is not permitted", []}]}
end
end
defp invitable? do
Config.invitable
end
end
| 36.130435 | 238 | 0.68231 |
e8439fe7b44b3f7443e029fe67244241a372d252 | 605 | ex | Elixir | lib/squarestore/product/products.ex | NinjaAnge/forksquare | ee9ea91e45e50b9f1ba4a8261ebdd99b7fe3333d | [
"MIT"
] | null | null | null | lib/squarestore/product/products.ex | NinjaAnge/forksquare | ee9ea91e45e50b9f1ba4a8261ebdd99b7fe3333d | [
"MIT"
] | null | null | null | lib/squarestore/product/products.ex | NinjaAnge/forksquare | ee9ea91e45e50b9f1ba4a8261ebdd99b7fe3333d | [
"MIT"
] | null | null | null | defmodule Squarestore.Product.Products do
use Ecto.Schema
import Ecto.Changeset
schema "products" do
field :alternative_products, :integer
field :base_price, :integer
field :image, :map
field :storage_status, :integer
field :name, :string
field :pnr, :string
field :summary, :string
field :supplier, :integer
timestamps()
end
def changeset(products, attrs) do
products
|> cast(attrs, [:name, :image, :pnr, :summary, :base_price, :storage_status, :supplier, :alternative_products])
|> validate_required([:name, :summary, :base_price])
end
end
| 25.208333 | 115 | 0.689256 |
e843b5643483802c2ae9e7fba2c8d4d9e6757fd5 | 1,542 | ex | Elixir | clients/books/lib/google_api/books/v1/model/offers.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/books/lib/google_api/books/v1/model/offers.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/books/lib/google_api/books/v1/model/offers.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Books.V1.Model.Offers do
@moduledoc """
## Attributes
* `items` (*type:* `list(GoogleApi.Books.V1.Model.OffersItems.t)`, *default:* `nil`) - A list of offers.
* `kind` (*type:* `String.t`, *default:* `nil`) - Resource type.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:items => list(GoogleApi.Books.V1.Model.OffersItems.t()) | nil,
:kind => String.t() | nil
}
field(:items, as: GoogleApi.Books.V1.Model.OffersItems, type: :list)
field(:kind)
end
defimpl Poison.Decoder, for: GoogleApi.Books.V1.Model.Offers do
def decode(value, options) do
GoogleApi.Books.V1.Model.Offers.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Books.V1.Model.Offers do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.84 | 108 | 0.708171 |
e843f9b97b171d4b0918a10a215b3a60db9e1c6e | 560 | exs | Elixir | mix.exs | imeraj/elixir_git | 27792b6aa9f8b14c946543cd81b253977d8686f8 | [
"MIT"
] | 22 | 2021-03-07T17:00:42.000Z | 2022-03-21T07:16:11.000Z | mix.exs | imeraj/elixir_git | 27792b6aa9f8b14c946543cd81b253977d8686f8 | [
"MIT"
] | null | null | null | mix.exs | imeraj/elixir_git | 27792b6aa9f8b14c946543cd81b253977d8686f8 | [
"MIT"
] | 2 | 2021-03-10T21:16:51.000Z | 2021-05-06T10:49:13.000Z | defmodule Egit.MixProject do
use Mix.Project
def project do
[
app: :egit,
version: "0.1.0",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
escript: escript()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger, :crypto]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:stream_hash, "~> 0.3.1"}
]
end
defp escript do
[main_module: Egit.CLI]
end
end
| 16.969697 | 59 | 0.573214 |
e84415a472659a71bae094f05b04c0b2cef8ba76 | 1,265 | ex | Elixir | test/support/phoenix/endpoint.ex | randaalex/pow | 2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b | [
"MIT"
] | 1 | 2021-06-25T10:36:01.000Z | 2021-06-25T10:36:01.000Z | test/support/phoenix/endpoint.ex | randaalex/pow | 2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b | [
"MIT"
] | null | null | null | test/support/phoenix/endpoint.ex | randaalex/pow | 2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b | [
"MIT"
] | 1 | 2020-07-13T01:11:17.000Z | 2020-07-13T01:11:17.000Z | defmodule Pow.Test.Phoenix.Endpoint do
defmodule SessionPlugHelper do
@moduledoc false
alias Pow.Plug.Session
def init(config), do: Session.init(config)
def call(conn, config) do
additional_config =
case conn.private[:pow_test_config] do
:username_user -> [user: Pow.Test.Ecto.Users.UsernameUser, users_context: Pow.Test.ContextMock.UsernameUser]
nil -> [user: Pow.Test.Ecto.Users.User, users_context: Pow.Test.ContextMock]
additional -> additional
end
Session.call(conn, Keyword.merge(config, additional_config))
end
end
@moduledoc false
use Phoenix.Endpoint, otp_app: :pow
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session,
store: :cookie,
key: "_binaryid_key",
signing_salt: "secret"
plug SessionPlugHelper,
current_user_assigns_key: :current_user,
session_key: "auth",
cache_store_backend: Pow.Test.EtsCacheMock,
messages_backend: Pow.Test.Phoenix.Messages,
routes_backend: Pow.Test.Phoenix.Routes
plug Pow.Test.Phoenix.Router
end
| 26.354167 | 118 | 0.6917 |
e844160d650713c3b492daad4b7c52d41251c84b | 135 | exs | Elixir | config/dev.exs | Amtelco-Software/ex_ari | 3280d830a7911d11bf30878a99b2534058a145bd | [
"MIT"
] | null | null | null | config/dev.exs | Amtelco-Software/ex_ari | 3280d830a7911d11bf30878a99b2534058a145bd | [
"MIT"
] | null | null | null | config/dev.exs | Amtelco-Software/ex_ari | 3280d830a7911d11bf30878a99b2534058a145bd | [
"MIT"
] | null | null | null | import Config
config :ex_ari,
clients: %{
ari_test: %{
name: "test",
module: ARI.WebSocketTest.TestClient
}
}
| 13.5 | 42 | 0.585185 |
e844358786f8f148482c24e4078c826e5a614246 | 8,227 | exs | Elixir | apps/pipeline/test/integration/pipeline/writer/table_writer_test.exs | SmartColumbusOS/smartcitiesdata | c8553d34631c822b034945eebf396994bf1001ff | [
"Apache-2.0"
] | 1 | 2021-04-05T19:17:18.000Z | 2021-04-05T19:17:18.000Z | apps/pipeline/test/integration/pipeline/writer/table_writer_test.exs | SmartColumbusOS/smartcitiesdata | c8553d34631c822b034945eebf396994bf1001ff | [
"Apache-2.0"
] | 11 | 2020-01-07T15:43:42.000Z | 2020-12-22T15:23:25.000Z | apps/pipeline/test/integration/pipeline/writer/table_writer_test.exs | SmartColumbusOS/smartcitiesdata | c8553d34631c822b034945eebf396994bf1001ff | [
"Apache-2.0"
] | null | null | null | defmodule Pipeline.Writer.TableWriterTest do
use ExUnit.Case
use Divo
use Placebo
alias Pipeline.Writer.TableWriter
alias Pipeline.Writer.TableWriter.Compaction
alias Pipeline.Application
alias SmartCity.TestDataGenerator, as: TDG
import SmartCity.TestHelper, only: [eventually: 1]
setup do
session =
Application.prestige_opts()
|> Prestige.new_session()
[session: session]
end
describe "init/1" do
test "creates table with correct name and schema", %{session: session} do
expected = [
%{"Column" => "one", "Comment" => "", "Extra" => "", "Type" => "array(varchar)"},
%{"Column" => "two", "Comment" => "", "Extra" => "", "Type" => "row(three decimal(18,3))"},
%{"Column" => "four", "Comment" => "", "Extra" => "", "Type" => "array(row(five decimal(18,3)))"}
]
schema = [
%{name: "one", type: "list", itemType: "string"},
%{name: "two", type: "map", subSchema: [%{name: "three", type: "decimal(18,3)"}]},
%{name: "four", type: "list", itemType: "map", subSchema: [%{name: "five", type: "decimal(18,3)"}]}
]
dataset = TDG.create_dataset(%{technical: %{systemName: "org_name_dataset_name", schema: schema}})
TableWriter.init(table: dataset.technical.systemName, schema: dataset.technical.schema)
eventually(fn ->
table = "describe hive.default.org_name_dataset_name"
result =
session
|> Prestige.execute!(table)
|> Prestige.Result.as_maps()
assert result == expected
end)
end
test "handles prestige errors for invalid table names" do
schema = [
%{name: "one", type: "list", itemType: "string"},
%{name: "two", type: "map", subSchema: [%{name: "three", type: "decimal(18,3)"}]},
%{name: "four", type: "list", itemType: "map", subSchema: [%{name: "five", type: "integer"}]}
]
dataset = TDG.create_dataset(%{technical: %{systemName: "this.is.invalid", schema: schema}})
assert {:error, _} = TableWriter.init(table: dataset.technical.systemName, schema: dataset.technical.schema)
end
test "escapes invalid column names", %{session: session} do
expected = [%{"Column" => "on", "Comment" => "", "Extra" => "", "Type" => "boolean"}]
schema = [%{name: "on", type: "boolean"}]
dataset = TDG.create_dataset(%{technical: %{systemName: "foo", schema: schema}})
TableWriter.init(table: dataset.technical.systemName, schema: dataset.technical.schema)
eventually(fn ->
table = "describe hive.default.foo"
result =
session
|> Prestige.execute!(table)
|> Prestige.Result.as_maps()
assert result == expected
end)
end
end
describe "write/2" do
test "inserts records", %{session: session} do
schema = [%{name: "one", type: "string"}, %{name: "two", type: "integer"}]
dataset = TDG.create_dataset(%{technical: %{systemName: "foo__bar", schema: schema}})
TableWriter.init(table: dataset.technical.systemName, schema: schema)
datum1 = TDG.create_data(%{dataset_id: dataset.id, payload: %{"one" => "hello", "two" => 42}})
datum2 = TDG.create_data(%{dataset_id: dataset.id, payload: %{"one" => "goodbye", "two" => 9001}})
TableWriter.write([datum1, datum2], table: dataset.technical.systemName, schema: schema)
eventually(fn ->
query = "select * from foo__bar"
result =
session
|> Prestige.query!(query)
|> Prestige.Result.as_maps()
assert result == [%{"one" => "hello", "two" => 42}, %{"one" => "goodbye", "two" => 9001}]
end)
end
test "inserts heavily nested records", %{session: session} do
schema = [
%{name: "first_name", type: "string"},
%{name: "age", type: "decimal"},
%{name: "friend_names", type: "list", itemType: "string"},
%{
name: "friends",
type: "list",
itemType: "map",
subSchema: [
%{name: "first_name", type: "string"},
%{name: "pet", type: "string"}
]
},
%{
name: "spouse",
type: "map",
subSchema: [
%{name: "first_name", type: "string"},
%{name: "gender", type: "string"},
%{
name: "next_of_kin",
type: "map",
subSchema: [
%{name: "first_name", type: "string"},
%{name: "date_of_birth", type: "string"}
]
}
]
}
]
payload = %{
"first_name" => "Joe",
"age" => 10,
"friend_names" => ["bob", "sally"],
"friends" => [
%{"first_name" => "Bill", "pet" => "Bunco"},
%{"first_name" => "Sally", "pet" => "Bosco"}
],
"spouse" => %{
"first_name" => "Susan",
"gender" => "female",
"next_of_kin" => %{
"first_name" => "Joel",
"date_of_birth" => "1941-07-12"
}
}
}
dataset = TDG.create_dataset(%{technical: %{systemName: "foo__baz", schema: schema}})
TableWriter.init(table: dataset.technical.systemName, schema: schema)
datum = TDG.create_data(dataset_id: dataset.id, payload: payload)
expected = %{
"age" => "10",
"first_name" => "Joe",
"friend_names" => ["bob", "sally"],
"friends" => [%{"first_name" => "Bill", "pet" => "Bunco"}, %{"first_name" => "Sally", "pet" => "Bosco"}],
"spouse" => %{
"first_name" => "Susan",
"gender" => "female",
"next_of_kin" => %{"date_of_birth" => "1941-07-12", "first_name" => "Joel"}
}
}
assert :ok = TableWriter.write([datum], table: dataset.technical.systemName, schema: schema)
eventually(fn ->
query = "select * from foo__baz"
result =
session
|> Prestige.execute!(query)
|> Prestige.Result.as_maps()
assert result == [expected]
end)
end
end
describe "compact/1" do
test "compacts a table without changing data", %{session: session} do
sub = [%{name: "three", type: "boolean"}]
schema = [%{name: "one", type: "list", itemType: "decimal"}, %{name: "two", type: "map", subSchema: sub}]
dataset = TDG.create_dataset(%{technical: %{schema: schema, systemName: "a__b"}})
TableWriter.init(table: dataset.technical.systemName, schema: schema)
Enum.each(1..15, fn n ->
payload = %{"one" => [n], "two" => %{"three" => false}}
datum = TDG.create_data(%{dataset_id: dataset.id, payload: payload})
TableWriter.write([datum], table: dataset.technical.systemName, schema: schema)
end)
eventually(fn ->
query = "select count(1) from #{dataset.technical.systemName}"
result =
session
|> Prestige.query!(query)
assert result.rows == [[15]]
end)
assert :ok == TableWriter.compact(table: dataset.technical.systemName)
eventually(fn ->
query = "select count(1) from #{dataset.technical.systemName}"
result =
session
|> Prestige.query!(query)
assert result.rows == [[15]]
end)
end
test "fails without altering state if it was going to change data", %{session: session} do
allow Compaction.measure(any(), any()), return: {6, 10}, meck_options: [:passthrough]
schema = [%{name: "abc", type: "string"}]
dataset = TDG.create_dataset(%{technical: %{schema: schema, systemName: "xyz"}})
TableWriter.init(table: dataset.technical.systemName, schema: schema)
Enum.each(1..15, fn n ->
payload = %{"abc" => "#{n}"}
datum = TDG.create_data(%{dataset_id: dataset.id, payload: payload})
TableWriter.write([datum], table: "xyz", schema: schema)
end)
assert {:error, _} = TableWriter.compact(table: "xyz")
eventually(fn ->
query = "select count(1) from xyz"
result =
session
|> Prestige.query!(query)
assert result.rows == [[15]]
end)
end
end
end
| 32.646825 | 114 | 0.547587 |
e8443a570b7695b5b6e9cc2607138cb797601833 | 128 | exs | Elixir | whoppex_runner/test/whoppex_runner_test.exs | benjaminplee/whoppex | 63cbe7f9f07079925c43a1e0e1896f043993e9e7 | [
"Apache-2.0"
] | 3 | 2017-08-14T13:53:34.000Z | 2021-07-03T12:46:41.000Z | whoppex_runner/test/whoppex_runner_test.exs | benjaminplee/whoppex | 63cbe7f9f07079925c43a1e0e1896f043993e9e7 | [
"Apache-2.0"
] | null | null | null | whoppex_runner/test/whoppex_runner_test.exs | benjaminplee/whoppex | 63cbe7f9f07079925c43a1e0e1896f043993e9e7 | [
"Apache-2.0"
] | null | null | null | defmodule WhoppexRunnerTest do
use ExUnit.Case
doctest WhoppexRunner
test "the truth" do
assert 1 + 1 == 2
end
end
| 14.222222 | 30 | 0.703125 |
e8443fd9c015b734d5f8afcadac5cd380498341c | 96 | exs | Elixir | packet_analyzer/test/packet_analyzer_web/views/layout_view_test.exs | niya1123/PacketAnalyzer | 65236372812ef9f593df460d054ee7ee487d1a74 | [
"MIT"
] | null | null | null | packet_analyzer/test/packet_analyzer_web/views/layout_view_test.exs | niya1123/PacketAnalyzer | 65236372812ef9f593df460d054ee7ee487d1a74 | [
"MIT"
] | 2 | 2021-03-10T02:15:32.000Z | 2021-05-10T21:48:57.000Z | packet_analyzer/test/packet_analyzer_web/views/layout_view_test.exs | niya1123/PacketAnalyzer | 65236372812ef9f593df460d054ee7ee487d1a74 | [
"MIT"
] | null | null | null | defmodule PacketAnalyzerWeb.LayoutViewTest do
use PacketAnalyzerWeb.ConnCase, async: true
end
| 24 | 45 | 0.854167 |
e844536d255d765cb16d889ae3eb71788016700e | 1,277 | ex | Elixir | backend/lib/comeonin_ecto_password.ex | ui-icts/aptamer-web | a28502c22a4e55ab1fbae8bbeaa6b11c9a477c06 | [
"MIT"
] | null | null | null | backend/lib/comeonin_ecto_password.ex | ui-icts/aptamer-web | a28502c22a4e55ab1fbae8bbeaa6b11c9a477c06 | [
"MIT"
] | 7 | 2019-02-08T18:28:49.000Z | 2022-02-12T06:44:59.000Z | backend/lib/comeonin_ecto_password.ex | ui-icts/aptamer-web | a28502c22a4e55ab1fbae8bbeaa6b11c9a477c06 | [
"MIT"
] | null | null | null | defmodule Comeonin.Ecto.Password do
@behaviour Ecto.Type
@moduledoc """
A custom Ecto type for storing encrypted passwords.
## Usage
On your schema, define secure fields with this type:
field :password, Comeonin.Ecto.Password
Then on your changeset just cast from plain-text params
cast(changeset, params, ~w(password), ~w())
After casting the password will already be encrypted,
and can be saved to your database string column.
To check for validity, do something like:
user = Repo.get_by User, email: "me@example.org"
Comeonin.Ecto.Type.valid?("plain_password", user.password)
See [Homepage](http://github.com/vic/comeonin_ecto_password)
"""
def type, do: :string
def cast(""), do: {:ok, ""}
def cast(value) when is_binary(value), do: {:ok, hash_password(value)}
def cast(_), do: :error
def load(x) when is_binary(x), do: {:ok, x}
def load(_), do: :error
def dump(x) when is_binary(x), do: {:ok, x}
def dump(_), do: :error
defp crypt, do: Application.get_env(:comeonin, Ecto.Password, Comeonin.Bcrypt)
defp hash_password(plain_password) do
crypt().hashpwsalt(plain_password)
end
def valid?(plain_password, hashed_password) do
crypt().checkpw(plain_password, hashed_password)
end
end
| 25.039216 | 80 | 0.700078 |
e84459c80cafa9454992d8ba29d8776beb3a56d5 | 3,324 | exs | Elixir | exercises/practice/binary-search-tree/test/binary_search_tree_test.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/practice/binary-search-tree/test/binary_search_tree_test.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/practice/binary-search-tree/test/binary_search_tree_test.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule BinarySearchTreeTest do
use ExUnit.Case
test "data is retained" do
root = BinarySearchTree.new(4)
assert root.data == 4
assert root.left == nil
assert root.right == nil
end
describe "insert data at proper node" do
@tag :pending
test "smaller number at left node" do
root =
BinarySearchTree.new(4)
|> BinarySearchTree.insert(2)
assert root.data == 4
assert root.left.data == 2
assert root.left.left == nil
assert root.left.right == nil
assert root.right == nil
end
@tag :pending
test "same number at left node" do
root =
BinarySearchTree.new(4)
|> BinarySearchTree.insert(4)
assert root.data == 4
assert root.left.data == 4
assert root.left.left == nil
assert root.left.right == nil
assert root.right == nil
end
@tag :pending
test "greater number at right node" do
root =
BinarySearchTree.new(4)
|> BinarySearchTree.insert(5)
assert root.data == 4
assert root.left == nil
assert root.right.data == 5
assert root.right.left == nil
assert root.right.right == nil
end
end
@tag :pending
test "can create complex tree" do
root =
BinarySearchTree.new(4)
|> BinarySearchTree.insert(2)
|> BinarySearchTree.insert(6)
|> BinarySearchTree.insert(1)
|> BinarySearchTree.insert(3)
|> BinarySearchTree.insert(5)
|> BinarySearchTree.insert(7)
assert root.data == 4
assert root.left.data == 2
assert root.left.left.data == 1
assert root.left.left.left == nil
assert root.left.left.right == nil
assert root.left.right.data == 3
assert root.left.right.left == nil
assert root.left.right.right == nil
assert root.right.data == 6
assert root.right.left.data == 5
assert root.right.left.left == nil
assert root.right.left.right == nil
assert root.right.right.data == 7
assert root.right.right.left == nil
assert root.right.right.right == nil
end
describe "can sort data" do
@tag :pending
test "can sort single number" do
root = BinarySearchTree.new(2)
assert [2] == BinarySearchTree.in_order(root)
end
@tag :pending
test "can sort if second number is smaller than first" do
root =
BinarySearchTree.new(2)
|> BinarySearchTree.insert(1)
assert [1, 2] == BinarySearchTree.in_order(root)
end
@tag :pending
test "can sort if second number is the same as the first" do
root =
BinarySearchTree.new(2)
|> BinarySearchTree.insert(2)
assert [2, 2] == BinarySearchTree.in_order(root)
end
@tag :pending
test "can sort if second number is greater than the first" do
root =
BinarySearchTree.new(2)
|> BinarySearchTree.insert(3)
assert [2, 3] == BinarySearchTree.in_order(root)
end
@tag :pending
test "can sort complex tree" do
root =
BinarySearchTree.new(2)
|> BinarySearchTree.insert(1)
|> BinarySearchTree.insert(3)
|> BinarySearchTree.insert(6)
|> BinarySearchTree.insert(7)
|> BinarySearchTree.insert(5)
assert [1, 2, 3, 5, 6, 7] == BinarySearchTree.in_order(root)
end
end
end
| 25.767442 | 66 | 0.619134 |
e8446d7db834a5b0daad95301841611c3332d802 | 1,021 | ex | Elixir | lib/nostrum/struct/embed/thumbnail.ex | phereford/nostrum | 3d273671f51d839eedac4d6e52ba9cf70720ac01 | [
"MIT"
] | 637 | 2017-03-07T11:25:35.000Z | 2022-03-31T13:37:51.000Z | lib/nostrum/struct/embed/thumbnail.ex | phereford/nostrum | 3d273671f51d839eedac4d6e52ba9cf70720ac01 | [
"MIT"
] | 372 | 2017-03-07T20:42:03.000Z | 2022-03-30T22:46:46.000Z | lib/nostrum/struct/embed/thumbnail.ex | phereford/nostrum | 3d273671f51d839eedac4d6e52ba9cf70720ac01 | [
"MIT"
] | 149 | 2017-03-07T12:11:58.000Z | 2022-03-19T22:11:51.000Z | defmodule Nostrum.Struct.Embed.Thumbnail do
@moduledoc """
Struct representing a Discord embed thumbnail.
"""
alias Nostrum.Util
alias Jason.{Encode, Encoder}
defstruct [
:url,
:proxy_url,
:height,
:width
]
defimpl Encoder do
def encode(thumbnail, options) do
thumbnail
|> Map.from_struct()
|> Enum.filter(fn {_, v} -> v != nil end)
|> Map.new()
|> Encode.map(options)
end
end
@typedoc "Source URL of the thumbnail"
@type url :: String.t() | nil
@typedoc "URL of thumbnail icon"
@type proxy_url :: String.t() | nil
@typedoc "Height of the thumbnail"
@type height :: integer | nil
@typedoc "Width of the thumbnail"
@type width :: integer | nil
@type t :: %__MODULE__{
url: url,
proxy_url: proxy_url,
height: height,
width: width
}
@doc false
def to_struct(map) do
new = Map.new(map, fn {k, v} -> {Util.maybe_to_atom(k), v} end)
struct(__MODULE__, new)
end
end
| 19.634615 | 67 | 0.596474 |
e844985b94cb594d19c9583aeb5dab8993b73ef6 | 1,704 | ex | Elixir | bullion/lib/bullion_web.ex | ttymck/bullion | d15babe80d30f9775e45f2a143b88a66b539d318 | [
"MIT"
] | null | null | null | bullion/lib/bullion_web.ex | ttymck/bullion | d15babe80d30f9775e45f2a143b88a66b539d318 | [
"MIT"
] | 8 | 2021-03-10T20:53:42.000Z | 2021-07-30T06:52:16.000Z | bullion/lib/bullion_web.ex | ttymck/bullion | d15babe80d30f9775e45f2a143b88a66b539d318 | [
"MIT"
] | null | null | null | defmodule BullionWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use BullionWeb, :controller
use BullionWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: BullionWeb
import Plug.Conn
import BullionWeb.Gettext
alias BullionWeb.Router.Helpers, as: Routes
import BullionWeb.InputHelpers
end
end
def view do
quote do
use Phoenix.View,
root: "lib/bullion_web/templates",
namespace: BullionWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import BullionWeb.ErrorHelpers
import BullionWeb.Gettext
alias BullionWeb.Router.Helpers, as: Routes
import BullionWeb.InputHelpers
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import BullionWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23.666667 | 83 | 0.691901 |
e844ac49480fdbfab1f89a4495b430ab3eef2bde | 1,565 | ex | Elixir | lib/bitcoin/protocol/types/tx_input.ex | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | 2 | 2019-08-12T04:53:57.000Z | 2019-09-03T03:47:33.000Z | lib/bitcoin/protocol/types/tx_input.ex | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | null | null | null | lib/bitcoin/protocol/types/tx_input.ex | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | null | null | null | defmodule Bitcoin.Protocol.Types.TxInput do
alias Bitcoin.Protocol.Types.VarString
alias Bitcoin.Protocol.Types.Outpoint
# The previous output transaction reference, as an OutPoint structure
defstruct previous_output: %Outpoint{},
# Computational Script for confirming transaction authorization
signature_script: <<>>,
# Transaction version as defined by the sender. Intended for "replacement" of transactions when information is updated before inclusion into a block.
sequence: 0
@type t :: %__MODULE__{
previous_output: Outpoint.t(),
signature_script: binary,
sequence: non_neg_integer
}
# defimpl Inspect, for: __MODULE__ do
# def inspect(data, _opts) do
# "%In{ ##{data.sequence} output: #{data.previous_output |> Kernel.inspect}, sig: #{data.signature_script |> Base.encode16} }"
# end
# end
@spec parse_stream(binary) :: {t, binary}
def parse_stream(payload) do
{outpoint, payload} = Outpoint.parse_stream(payload)
{sig_script, payload} = VarString.parse_stream(payload)
<<sequence::unsigned-little-integer-size(32), payload::binary>> = payload
{%__MODULE__{
previous_output: outpoint,
signature_script: sig_script,
sequence: sequence
}, payload}
end
@spec serialize(t) :: binary
def serialize(%__MODULE__{} = s) do
(s.previous_output |> Outpoint.serialize()) <>
(s.signature_script |> VarString.serialize()) <>
<<s.sequence::unsigned-little-integer-size(32)>>
end
end
| 35.568182 | 161 | 0.677955 |
e8451d3035ef3f1cdf7dcd08b5bfb05496add907 | 4,266 | ex | Elixir | clients/firebase_rules/lib/google_api/firebase_rules/v1/model/test_result.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/firebase_rules/lib/google_api/firebase_rules/v1/model/test_result.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/firebase_rules/lib/google_api/firebase_rules/v1/model/test_result.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.FirebaseRules.V1.Model.TestResult do
@moduledoc """
Test result message containing the state of the test as well as a description and source position for test failures.
## Attributes
* `debugMessages` (*type:* `list(String.t)`, *default:* `nil`) - Debug messages related to test execution issues encountered during evaluation. Debug messages may be related to too many or too few invocations of function mocks or to runtime errors that occur during evaluation. For example: ```Unable to read variable [name: "resource"]```
* `errorPosition` (*type:* `GoogleApi.FirebaseRules.V1.Model.SourcePosition.t`, *default:* `nil`) - Position in the `Source` or `Ruleset` where the principle runtime error occurs. Evaluation of an expression may result in an error. Rules are deny by default, so a `DENY` expectation when an error is generated is valid. When there is a `DENY` with an error, the `SourcePosition` is returned. E.g. `error_position { line: 19 column: 37 }`
* `expressionReports` (*type:* `list(GoogleApi.FirebaseRules.V1.Model.ExpressionReport.t)`, *default:* `nil`) - The mapping from expression in the ruleset AST to the values they were evaluated to. Partially-nested to mirror AST structure. Note that this field is actually tracking expressions and not permission statements in contrast to the "visited_expressions" field above. Literal expressions are omitted.
* `functionCalls` (*type:* `list(GoogleApi.FirebaseRules.V1.Model.FunctionCall.t)`, *default:* `nil`) - The set of function calls made to service-defined methods. Function calls are included in the order in which they are encountered during evaluation, are provided for both mocked and unmocked functions, and included on the response regardless of the test `state`.
* `state` (*type:* `String.t`, *default:* `nil`) - State of the test.
* `visitedExpressions` (*type:* `list(GoogleApi.FirebaseRules.V1.Model.VisitedExpression.t)`, *default:* `nil`) - The set of visited permission expressions for a given test. This returns the positions and evaluation results of all visited permission expressions which were relevant to the test case, e.g. ``` match /path { allow read if: } ``` For a detailed report of the intermediate evaluation states, see the `expression_reports` field
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:debugMessages => list(String.t()),
:errorPosition => GoogleApi.FirebaseRules.V1.Model.SourcePosition.t(),
:expressionReports => list(GoogleApi.FirebaseRules.V1.Model.ExpressionReport.t()),
:functionCalls => list(GoogleApi.FirebaseRules.V1.Model.FunctionCall.t()),
:state => String.t(),
:visitedExpressions => list(GoogleApi.FirebaseRules.V1.Model.VisitedExpression.t())
}
field(:debugMessages, type: :list)
field(:errorPosition, as: GoogleApi.FirebaseRules.V1.Model.SourcePosition)
field(:expressionReports, as: GoogleApi.FirebaseRules.V1.Model.ExpressionReport, type: :list)
field(:functionCalls, as: GoogleApi.FirebaseRules.V1.Model.FunctionCall, type: :list)
field(:state)
field(:visitedExpressions, as: GoogleApi.FirebaseRules.V1.Model.VisitedExpression, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.FirebaseRules.V1.Model.TestResult do
def decode(value, options) do
GoogleApi.FirebaseRules.V1.Model.TestResult.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.FirebaseRules.V1.Model.TestResult do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 68.806452 | 443 | 0.751289 |
e8452f48e868e66bee367869ec5ef6b173cd967c | 334 | exs | Elixir | examples/swagger_demo/test/test_helper.exs | Whatnot-Inc/bureaucrat | d0634c6017dc68f8a23078cbc8c181a4b2d3e6db | [
"Unlicense"
] | 326 | 2015-08-19T10:05:07.000Z | 2022-03-28T08:49:33.000Z | examples/swagger_demo/test/test_helper.exs | Whatnot-Inc/bureaucrat | d0634c6017dc68f8a23078cbc8c181a4b2d3e6db | [
"Unlicense"
] | 64 | 2015-08-19T06:44:19.000Z | 2022-03-29T06:23:34.000Z | examples/swagger_demo/test/test_helper.exs | Whatnot-Inc/bureaucrat | d0634c6017dc68f8a23078cbc8c181a4b2d3e6db | [
"Unlicense"
] | 66 | 2016-01-08T20:40:40.000Z | 2022-03-03T02:15:15.000Z | Bureaucrat.start(
writer: Bureaucrat.SwaggerSlateMarkdownWriter,
default_path: "doc/source/index.html.md",
env_var: "DOC",
swagger: "priv/static/swagger.json" |> File.read!() |> Poison.decode!())
Ecto.Adapters.SQL.Sandbox.mode(SwaggerDemo.Repo, :manual)
ExUnit.start(formatters: [ExUnit.CLIFormatter, Bureaucrat.Formatter])
| 33.4 | 74 | 0.754491 |
e8453e8dea0b15a39e40fe68194d1e6219c45b62 | 275 | ex | Elixir | lib/erlixir_web/views/layout_view.ex | itsemilano/erlixir | 39fdcb86a9ccd55058682b3263d40efb9cbad11f | [
"MIT"
] | null | null | null | lib/erlixir_web/views/layout_view.ex | itsemilano/erlixir | 39fdcb86a9ccd55058682b3263d40efb9cbad11f | [
"MIT"
] | null | null | null | lib/erlixir_web/views/layout_view.ex | itsemilano/erlixir | 39fdcb86a9ccd55058682b3263d40efb9cbad11f | [
"MIT"
] | null | null | null | defmodule ErlixirWeb.LayoutView do
use ErlixirWeb, :view
# Phoenix LiveDashboard is available only in development by default,
# so we instruct Elixir to not warn if the dashboard route is missing.
@compile {:no_warn_undefined, {Routes, :live_dashboard_path, 2}}
end
| 34.375 | 72 | 0.774545 |
e8454aa1512b44fba90bb923a0d6e0eb8e113913 | 967 | exs | Elixir | config/config.exs | neslinesli93/advent-of-code-2019-phoenix-react | d7523d0bda5581990c21e8113e5c78c4f0ff921a | [
"MIT"
] | 5 | 2019-12-02T08:50:54.000Z | 2021-03-31T22:54:20.000Z | config/config.exs | neslinesli93/advent-of-code-2019-phoenix-react | d7523d0bda5581990c21e8113e5c78c4f0ff921a | [
"MIT"
] | 1 | 2019-12-18T07:19:26.000Z | 2019-12-18T07:19:26.000Z | config/config.exs | neslinesli93/advent-of-code-2019-phoenix-react | d7523d0bda5581990c21e8113e5c78c4f0ff921a | [
"MIT"
] | 2 | 2019-12-16T07:52:16.000Z | 2019-12-17T17:49:05.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
# Configures the endpoint
config :advent2019, Advent2019Web.Endpoint,
url: [host: "localhost"],
secret_key_base: "l1gQOFVqCk0ZY9KrqInNB9eVOqPm5XhmYyP8m3cNsN9/NQWLHB0h960X5Pw27K+G",
render_errors: [view: Advent2019Web.ErrorView, accepts: ~w(json)],
pubsub: [name: Advent2019.PubSub, adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 34.535714 | 86 | 0.77456 |
e84565ad2a42eb7c34c28db0b67ed0314e192efc | 4,732 | exs | Elixir | test/crawlie/stats/server_test.exs | kianmeng/crawlie | 19883f17a208107927ba14d15312f5a908d5e8ea | [
"MIT"
] | 91 | 2016-12-29T12:31:14.000Z | 2021-09-25T23:09:34.000Z | test/crawlie/stats/server_test.exs | kianmeng/crawlie | 19883f17a208107927ba14d15312f5a908d5e8ea | [
"MIT"
] | 40 | 2016-12-14T00:55:52.000Z | 2022-01-29T08:46:03.000Z | test/crawlie/stats/server_test.exs | kianmeng/crawlie | 19883f17a208107927ba14d15312f5a908d5e8ea | [
"MIT"
] | 10 | 2017-04-06T11:18:10.000Z | 2021-10-30T00:04:09.000Z | defmodule Crawlie.Stats.ServerTest do
use ExUnit.Case
alias Crawlie.Stats.Server
alias Crawlie.Stats.Server.Data
alias Crawlie.Utils
alias Crawlie.Page
alias Crawlie.Response
test "get_stats on a fresh server" do
ref = Server.start_new()
assert %Data{} = data = Server.get_stats(ref)
assert data.status == :ready
assert Data.finished?(data) == false
assert Data.elapsed_usec(data) == 0
assert %Data{
uris_visited: 0,
uris_extracted: 0,
depths_dist: %{},
retry_count_dist: %{},
bytes_received: 0,
status_codes_dist: %{},
content_types_dist: %{},
failed_fetch_uris: MapSet.new(),
uris_skipped: 0,
failed_parse_uris: MapSet.new(),
status: :ready,
utimestamp_started: nil,
utimestamp_finished: nil,
usec_spent_fetching: 0,
} == data
end
describe "Server.Data" do
test "finished?" do
data = Data.new()
assert data.status == :ready
refute Data.finished?(data)
data = Map.put(data, :status, :crawling)
refute Data.finished?(data)
data = Map.put(data, :status, :finished)
assert Data.finished?(data)
end
test "elapsed_usec" do
# fresh
data = Data.new()
assert Data.elapsed_usec(data) == 0
# running
start = Utils.utimestamp() - 1000000
data = Map.put(data, :utimestamp_started, start)
then = Utils.utimestamp()
elapsed = Data.elapsed_usec(data)
now = Utils.utimestamp()
assert elapsed >= (then - start)
assert elapsed <= (now - start)
# finished
finish = Utils.utimestamp()
data = Map.put(data, :utimestamp_finished, finish)
assert Data.elapsed_usec(data) == finish - start
end
end
test "fetch_succeeded" do
ref = Server.start_new()
url = "https://foo.bar/"
page = Page.new(url)
response = Response.new(url, 200, [{"content-type", "foo"}], "body")
duration = 666
Server.fetch_succeeded(ref, page, response, duration)
data = Server.get_stats(ref)
assert data.bytes_received == 4
assert data.content_types_dist == %{"foo" => 1}
assert data.depths_dist == %{0 => 1}
assert data.retry_count_dist == %{0 => 1}
assert data.uris_visited == 1
assert data.usec_spent_fetching == 666
assert data.utimestamp_started != nil
# page at a bigger depth
page2 = Page.new("page2", 7)
Server.fetch_succeeded(ref, page2, response, 100)
data = Server.get_stats(ref)
assert data.bytes_received == 8
assert data.content_types_dist == %{"foo" => 2}
assert data.depths_dist == %{0 => 1, 7 => 1}
assert data.retry_count_dist == %{0 => 2}
assert data.uris_visited == 2
assert data.usec_spent_fetching == 766
# retried page
retried_page = page |> Map.put(:retries, 1)
Server.fetch_succeeded(ref, retried_page, response, 200)
data = Server.get_stats(ref)
assert data.bytes_received == 12
assert data.content_types_dist == %{"foo" => 3}
assert data.depths_dist == %{0 => 2, 7 => 1}
assert data.retry_count_dist == %{0 => 2, 1 => 1}
assert data.uris_visited == 3
assert data.usec_spent_fetching == 966
end
test "fetch_failed" do
ref = Server.start_new()
url = "https://foo.bar/"
page = Page.new(url)
Server.fetch_failed(ref, page, 100)
data = Server.get_stats(ref)
assert data.failed_fetch_uris == MapSet.new([page.uri])
# no room for any more failed fetches in the set
Server.fetch_failed(ref, page, 1)
data2 = Server.get_stats(ref)
assert data2.failed_fetch_uris == data.failed_fetch_uris
end
test "parse_failed" do
ref = Server.start_new()
url = "https://foo.bar/"
page = Page.new(url)
Server.parse_failed(ref, page, 100)
data = Server.get_stats(ref)
assert data.failed_parse_uris == MapSet.new([page.uri])
# no room for any more failed parses in the set
Server.parse_failed(ref, page, 1)
data2 = Server.get_stats(ref)
assert data2.failed_parse_uris == data.failed_parse_uris
end
test "page_skipped" do
ref = Server.start_new()
url = "https://foo.bar/"
page = Page.new(url)
Server.page_skipped(ref, page)
data = Server.get_stats(ref)
assert data.uris_skipped == 1
end
test "uris_extracted" do
ref = Server.start_new()
Server.uris_extracted(ref, 13)
data = Server.get_stats(ref)
assert data.uris_extracted == 13
end
test "finished" do
ref = Server.start_new()
data = Server.get_stats(ref)
refute Data.finished?(data)
Server.finished(ref)
data = Server.get_stats(ref)
assert Data.finished?(data)
assert is_integer(data.utimestamp_finished)
end
end
| 26.58427 | 72 | 0.64497 |
e8458086e1c5104270fc060d9c933ac6df278847 | 4,714 | ex | Elixir | lib/mix/tasks/parse_to_json.ex | ryoung786/jeopardy | 5558fc49013c5a22e556a0040cbc116aa8f63912 | [
"MIT"
] | null | null | null | lib/mix/tasks/parse_to_json.ex | ryoung786/jeopardy | 5558fc49013c5a22e556a0040cbc116aa8f63912 | [
"MIT"
] | 32 | 2020-06-08T14:50:46.000Z | 2021-01-01T05:40:44.000Z | lib/mix/tasks/parse_to_json.ex | ryoung786/jeopardy | 5558fc49013c5a22e556a0040cbc116aa8f63912 | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.ParseToJson do
use Mix.Task
@shortdoc "Parse jarchive html files and write to json files"
@archive_path "/Users/ryany/dev/jeopardy-parser/j-archive"
@out_path Path.join(:code.priv_dir(:jeopardy), "jarchive")
def run(args) do
Mix.Task.run("app.start")
get_files(args) |> process_files()
end
def get_files([_ | _] = ids), do: Enum.map(ids, fn id -> "#{id}.html" end)
def get_files([]) do
{:ok, files} = File.ls(@archive_path)
files
end
def process_files(files) do
Enum.with_index(files)
|> Enum.each(fn {file, i} ->
if rem(i, 100) == 0, do: Mix.shell().info("processed #{i} files")
with {:ok, f} <- File.read(Path.join(@archive_path, file)) do
game = parse(f, i)
File.write(Path.join(@out_path, "#{i}.json"), Jason.encode!(game))
else
_ -> Mix.shell().error("Couldn't find #{file}")
end
end)
end
def parse(f, id) do
{:ok, html} = Floki.parse_document(f)
final_jeopardy_clue = parse_final_jeopardy_clue(html)
%{
id: id,
air_date: parse_air_date(html),
episode_num: get_episode_num(html),
title: Floki.find(html, "#game_title") |> Floki.text(),
description: Floki.find(html, "#game_comments") |> Floki.text(),
contestants: get_contestants(html),
jeopardy_round_categories: categories_by_round(:jeopardy, html),
double_jeopardy_round_categories: categories_by_round(:double_jeopardy, html),
final_jeopardy_category: final_jeopardy_clue[:category],
jeopardy: parse_round(Floki.find(html, "#jeopardy_round"), :jeopardy),
double_jeopardy: parse_round(Floki.find(html, "#double_jeopardy_round"), :double_jeopardy),
final_jeopardy: final_jeopardy_clue
}
end
defp categories_by_round(round, html) when round == :jeopardy,
do: categories_by_round("#jeopardy_round", html)
defp categories_by_round(round, html) when round == :double_jeopardy,
do: categories_by_round("#double_jeopardy_round", html)
defp categories_by_round(round, html) do
Floki.find(html, "#{round} .category_name") |> Enum.map(&Floki.text/1)
end
def parse_round(html, round) do
categories = Floki.find(html, ".category_name") |> Enum.map(&Floki.text/1)
Floki.find(html, "td.clue")
# [{clue, idx}, ...]
|> Enum.with_index()
|> Enum.map(fn {clue, i} ->
parse_clue(clue, i, categories, round)
end)
end
def parse_clue(clue, idx, categories, round) do
category = Enum.at(categories, rem(idx, 6))
round_num =
case round do
:jeopardy -> 1
:double_jeopardy -> 2
:final_jeopardy -> 0
end
value = 100 * round_num * (div(idx, 6) + 1)
question = clue |> Floki.find(".clue_text") |> Floki.text()
if question == "" do
%{value: value, round: Atom.to_string(round), category: category}
else
answer =
clue
|> Floki.attribute("div", "onmouseover")
|> List.first()
|> String.replace(~r/^.*correct_response">(.*)<\/em.*$/, "\\1")
is_daily_double = Floki.find(clue, ".clue_value_daily_double") |> Enum.count() > 0
type = if is_daily_double, do: "daily_double", else: "standard"
%{
clue_text: question,
answer_text: answer,
value: value,
round: Atom.to_string(round),
type: type,
category: category
}
end
end
def parse_final_jeopardy_clue(html) do
clue = Floki.find(html, "table.final_round")
question = Floki.find(clue, "td.clue_text") |> Floki.text()
category = Floki.find(clue, "td.category_name") |> Floki.text()
case category do
"" ->
%{}
_ ->
answer =
clue
|> Floki.attribute("div", "onmouseover")
|> List.first()
|> String.replace(~r/^.*em class.*correct_response.*">(.+)<\/em>.*$/, "\\1")
%{
clue_text: question,
answer_text: answer,
round: "final_jeopardy",
type: "final_jeopardy",
category: category
}
end
end
def parse_air_date(html) do
case Floki.find(html, "title")
|> Floki.text()
|> String.replace(~r/^.*([0-9]{4}.[0-9]{2}.[0-9]{2}).*$/, "\\1")
|> Date.from_iso8601() do
{:ok, air_date} -> air_date
_ -> nil
end
end
def get_contestants(html),
do: Floki.find(html, "#contestants_table .contestants a") |> Enum.map(&Floki.text/1)
def get_episode_num(html) do
title = Floki.find(html, "#game_title") |> Floki.text()
case Regex.run(~r/#(\d+) -/, title) |> Enum.at(1) do
nil -> nil
ep_num_as_string -> String.to_integer(ep_num_as_string)
end
end
end
| 29.279503 | 97 | 0.606491 |
e8458c35c8a07d2ebd55b8654c8fb9635645900e | 1,295 | exs | Elixir | kousa/test/ad_hoc_user_test.exs | LeonardSSH/dogehouse | 584055ad407bc37fa35cdf36ebb271622e29d436 | [
"MIT"
] | 9 | 2021-03-17T03:56:18.000Z | 2021-09-24T22:45:14.000Z | kousa/test/ad_hoc_user_test.exs | ActuallyTomas/dogehouse | 8c3d2cd1d7e99e173f0658759467a391c4a90c4e | [
"MIT"
] | 12 | 2021-07-06T12:51:13.000Z | 2022-03-16T12:38:18.000Z | kousa/test/ad_hoc_user_test.exs | ActuallyTomas/dogehouse | 8c3d2cd1d7e99e173f0658759467a391c4a90c4e | [
"MIT"
] | 4 | 2021-07-15T20:33:50.000Z | 2022-03-27T12:46:47.000Z | defmodule KousaTest.AdHocUserTest do
use ExUnit.Case, async: true
use KousaTest.Support.EctoSandbox
@moduledoc """
ad-hoc test set to give coverage for all modules
that have 'alias Beef.Schemas.User', prior to refactoring.
"""
# TODO: recategorize into appropriate test cases over
# time.
alias Beef.Schemas.User
alias Beef.Schemas.Room
alias Beef.Repo
alias KousaTest.Support.Factory
describe "Beef.Schemas.RoomBlock" do
alias Beef.Schemas.RoomBlock
test "you can add a room blocker into the roomblock table" do
%{id: uid} = Factory.create(User)
%{id: rid} = Factory.create(Room)
%{id: mid} = Factory.create(User)
assert {:ok, %RoomBlock{userId: ^uid, roomId: ^rid, modId: ^mid}} =
%RoomBlock{}
|> RoomBlock.insert_changeset(%{userId: uid, roomId: rid, modId: mid})
|> Repo.insert()
assert [roomblock] = Repo.all(RoomBlock)
assert %RoomBlock{
userId: ^uid,
user: %User{id: ^uid},
roomId: ^rid,
# TODO: insert room assoc here.
modId: ^mid,
mod: %User{id: ^mid}
} = Repo.preload(roomblock, [:user, :mod])
end
end
describe "Kousa.Utils.TokenUtils" do
end
end
| 26.979167 | 85 | 0.601544 |
e845d8d0085f4bb2bdbfc19209c82ec34afcd510 | 1,665 | ex | Elixir | lib/handler/example.ex | ColaCheng/elixir_cowboy_example | 6c432ecaa72137af78899db15af0c66d4155fe92 | [
"MIT"
] | null | null | null | lib/handler/example.ex | ColaCheng/elixir_cowboy_example | 6c432ecaa72137af78899db15af0c66d4155fe92 | [
"MIT"
] | null | null | null | lib/handler/example.ex | ColaCheng/elixir_cowboy_example | 6c432ecaa72137af78899db15af0c66d4155fe92 | [
"MIT"
] | null | null | null | defmodule ElixirCowboyExample.Handler.Example do
alias ElixirCowboyExample.Handler.Utils, as: HUtils
def init(req_in, opts) do
request = %{
method: :cowboy_req.method(req_in),
query: :cowboy_req.parse_qs(req_in),
data: %{}
}
{result, req_done} =
case :cowboy_req.has_body(req_in) do
true ->
{:ok, body, req_out} = HUtils.read_body(req_in, <<>>)
case HUtils.decode_body(body, :json) do
{:ok, data} ->
{process_request(Map.put(request, :data, data)), req_out}
:error ->
{:invalid_json, req_out}
end
false ->
{process_request(request), req_in}
end
case make_response(result) do
{code, response} ->
{:ok,
:cowboy_req.reply(
code,
%{<<"content-type">> => <<"application/json">>},
:jiffy.encode(response),
req_done
), opts}
code ->
{:ok, :cowboy_req.reply(code, req_done), opts}
end
end
defp process_request(%{method: method}) do
case method do
"GET" ->
{:ok, %{message: "Hello World!"}}
"POST" ->
:created
"PUT" ->
:updated
"DELETE" ->
:deleted
_ ->
:method_not_allowed
end
end
defp make_response({:ok, result}), do: {200, result}
defp make_response(:created), do: 201
defp make_response(:updated), do: 204
defp make_response(:deleted), do: 205
defp make_response(:invalid_json), do: {400, %{message: "Invalid JSON."}}
defp make_response(:method_not_allowed), do: {405, %{message: "Method not allowed."}}
end
| 24.130435 | 87 | 0.555556 |
e845f0a2b30fe2588d9a282cbc82ac0239ec2368 | 1,003 | ex | Elixir | test/support/channel_case.ex | gustavoarmoa/changelog.com | e898a9979a237ae66962714821ed8633a4966f37 | [
"MIT"
] | 2,599 | 2016-10-25T15:02:53.000Z | 2022-03-26T02:34:42.000Z | test/support/channel_case.ex | sdrees/changelog.com | 955cdcf93d74991062f19a03e34c9f083ade1705 | [
"MIT"
] | 253 | 2016-10-25T20:29:24.000Z | 2022-03-29T21:52:36.000Z | test/support/channel_case.ex | sdrees/changelog.com | 955cdcf93d74991062f19a03e34c9f083ade1705 | [
"MIT"
] | 298 | 2016-10-25T15:18:31.000Z | 2022-01-18T21:25:52.000Z | defmodule ChangelogWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
alias Changelog.Repo
import Ecto
import Ecto.Query, only: [from: 2]
# The default endpoint for testing
@endpoint ChangelogWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Changelog.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Changelog.Repo, {:shared, self()})
end
:ok
end
end
| 23.880952 | 71 | 0.703888 |
e84610c8f87d53e55fc813093391d4472d112092 | 2,341 | exs | Elixir | episode06/test/test/controllers/post_controller_test.exs | paulfioravanti/learn_phoenix | 3767f28b09bb5e740231dd261a0bfa8b3eea98d3 | [
"MIT"
] | null | null | null | episode06/test/test/controllers/post_controller_test.exs | paulfioravanti/learn_phoenix | 3767f28b09bb5e740231dd261a0bfa8b3eea98d3 | [
"MIT"
] | null | null | null | episode06/test/test/controllers/post_controller_test.exs | paulfioravanti/learn_phoenix | 3767f28b09bb5e740231dd261a0bfa8b3eea98d3 | [
"MIT"
] | null | null | null | defmodule Test.PostControllerTest do
use Test.ConnCase
alias Test.Post
@valid_attrs %{}
@invalid_attrs %{}
setup do
conn = conn()
{:ok, conn: conn}
end
test "lists all entries on index", %{conn: conn} do
conn = get conn, post_path(conn, :index)
assert html_response(conn, 200) =~ "Listing posts"
end
test "renders form for new resources", %{conn: conn} do
conn = get conn, post_path(conn, :new)
assert html_response(conn, 200) =~ "New post"
end
test "creates resource and redirects when data is valid", %{conn: conn} do
conn = post conn, post_path(conn, :create), post: @valid_attrs
assert redirected_to(conn) == post_path(conn, :index)
assert Repo.get_by(Post, @valid_attrs)
end
test "does not create resource and renders errors when data is invalid", %{conn: conn} do
conn = post conn, post_path(conn, :create), post: @invalid_attrs
assert html_response(conn, 200) =~ "New post"
end
test "shows chosen resource", %{conn: conn} do
post = Repo.insert! %Post{}
conn = get conn, post_path(conn, :show, post)
assert html_response(conn, 200) =~ "Show post"
end
test "renders page not found when id is nonexistent", %{conn: conn} do
assert_raise Ecto.NoResultsError, fn ->
get conn, post_path(conn, :show, -1)
end
end
test "renders form for editing chosen resource", %{conn: conn} do
post = Repo.insert! %Post{}
conn = get conn, post_path(conn, :edit, post)
assert html_response(conn, 200) =~ "Edit post"
end
test "updates chosen resource and redirects when data is valid", %{conn: conn} do
post = Repo.insert! %Post{}
conn = put conn, post_path(conn, :update, post), post: @valid_attrs
assert redirected_to(conn) == post_path(conn, :show, post)
assert Repo.get_by(Post, @valid_attrs)
end
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
post = Repo.insert! %Post{}
conn = put conn, post_path(conn, :update, post), post: @invalid_attrs
assert html_response(conn, 200) =~ "Edit post"
end
test "deletes chosen resource", %{conn: conn} do
post = Repo.insert! %Post{}
conn = delete conn, post_path(conn, :delete, post)
assert redirected_to(conn) == post_path(conn, :index)
refute Repo.get(Post, post.id)
end
end
| 32.513889 | 98 | 0.670226 |
e846575417c6d77bfec8d2315facab3150af51aa | 2,357 | ex | Elixir | deps/sqlitex/lib/sqlitex/sql_builder.ex | lgandersen/jocker_dist | b5e676f8d9e60bbc8bc7a82ccd1e05389f2cd5b5 | [
"BSD-2-Clause"
] | null | null | null | deps/sqlitex/lib/sqlitex/sql_builder.ex | lgandersen/jocker_dist | b5e676f8d9e60bbc8bc7a82ccd1e05389f2cd5b5 | [
"BSD-2-Clause"
] | null | null | null | deps/sqlitex/lib/sqlitex/sql_builder.ex | lgandersen/jocker_dist | b5e676f8d9e60bbc8bc7a82ccd1e05389f2cd5b5 | [
"BSD-2-Clause"
] | null | null | null | defmodule Sqlitex.SqlBuilder do
@moduledoc """
This module contains functions for SQL creation. At the moment
it is only used for `CREATE TABLE` statements.
"""
# Returns an SQL CREATE TABLE statement as a string. `name` is the name of the
# table, and `table_opts` contains the table constraints (at the moment only
# PRIMARY KEY is supported). `cols` is expected to be a keyword list in the
# form of:
#
# column_name: :column_type, of
# column_name: {:column_type, [column_constraints]}
def create_table(name, table_opts, cols) do
tbl_options = get_opts_map(table_opts, &table_opt/1)
get_opt = &(Map.get(tbl_options, &1, nil))
"CREATE #{get_opt.(:temp)} TABLE \"#{name}\" (#{get_columns_block(cols)} #{get_opt.(:primary_key)})"
end
# Supported table options
defp table_opt(:temporary), do: {:temp, "TEMP"}
defp table_opt(:temp), do: {:temp, "TEMP"}
defp table_opt({:primary_key, cols}) when is_list(cols) do
{
:primary_key, ", PRIMARY KEY ("
# Also quote the columns in a PRIMARY KEY list
<> (cols |> Enum.map(&(~s("#{&1}"))) |> Enum.join(", "))
<> ")"
}
end
defp table_opt({:primary_key, col}) when is_atom(col) do
{:primary_key, ", PRIMARY KEY (\"" <> Atom.to_string(col) <> "\")"}
end
# Supported column options
defp column_opt(:primary_key), do: {:primary_key, "PRIMARY KEY"}
defp column_opt(:not_null), do: {:not_null, "NOT NULL"}
defp column_opt(:autoincrement), do: {:autoincrement, "AUTOINCREMENT"}
# Helper function that creates a map of option names
# and their string representations
defp get_opts_map(opts, opt) do
Enum.into(opts, %{}, &(opt.(&1)))
end
# Create the sql fragment for the column definitions from the
# passed keyword list
defp get_columns_block(cols) do
Enum.map_join(cols, ", ", fn(col) ->
case col do
# Column with name, type and constraint
{name, {type, constraints}} ->
col_options = get_opts_map(constraints, &column_opt/1)
get_opt = &(Map.get(col_options, &1, nil))
[~s("#{name}"), type, get_opt.(:primary_key), get_opt.(:not_null), get_opt.(:autoincrement)]
|> Enum.filter(&(&1))
|> Enum.join(" ")
# Column with name and type
{name, type} ->
~s("#{name}" #{type})
end
end)
end
end
| 35.179104 | 104 | 0.63216 |
e84688cb5b024275709219d0c70460b550423956 | 38,460 | ex | Elixir | lib/phoenix_live_view/channel.ex | khionu/phoenix_live_view | 4ada690fb1579eebf787bd7459233086b383bd7b | [
"MIT"
] | null | null | null | lib/phoenix_live_view/channel.ex | khionu/phoenix_live_view | 4ada690fb1579eebf787bd7459233086b383bd7b | [
"MIT"
] | 4 | 2021-03-04T13:00:52.000Z | 2021-03-12T12:42:09.000Z | deps/phoenix_live_view/lib/phoenix_live_view/channel.ex | adrianomota/blog | ef3b2d2ed54f038368ead8234d76c18983caa75b | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveView.Channel do
@moduledoc false
use GenServer, restart: :temporary
require Logger
alias Phoenix.LiveView.{Socket, Utils, Diff, Upload, UploadConfig, Route, Session, Lifecycle}
alias Phoenix.Socket.Message
@prefix :phoenix
@not_mounted_at_router :not_mounted_at_router
def start_link({endpoint, from}) do
hibernate_after = endpoint.config(:live_view)[:hibernate_after] || 15000
opts = [hibernate_after: hibernate_after]
GenServer.start_link(__MODULE__, from, opts)
end
def send_update(pid \\ self(), module, id, assigns) do
send(pid, {@prefix, :send_update, {module, id, assigns}})
end
def send_update_after(pid \\ self(), module, id, assigns, time_in_milliseconds)
when is_integer(time_in_milliseconds) do
Process.send_after(
pid,
{@prefix, :send_update, {module, id, assigns}},
time_in_milliseconds
)
end
def ping(pid) do
GenServer.call(pid, {@prefix, :ping}, :infinity)
end
def register_upload(pid, {upload_config_ref, entry_ref} = _ref, cid) do
info = %{channel_pid: self(), ref: upload_config_ref, entry_ref: entry_ref, cid: cid}
GenServer.call(pid, {@prefix, :register_entry_upload, info})
end
def fetch_upload_config(pid, name, cid) do
GenServer.call(pid, {@prefix, :fetch_upload_config, name, cid})
end
def drop_upload_entries(%UploadConfig{} = conf, entry_refs) do
info = %{ref: conf.ref, entry_refs: entry_refs, cid: conf.cid}
send(self(), {@prefix, :drop_upload_entries, info})
end
@impl true
def init({pid, _ref}) do
{:ok, Process.monitor(pid)}
end
@impl true
def handle_info({Phoenix.Channel, auth_payload, from, phx_socket}, ref) do
Process.demonitor(ref)
mount(auth_payload, from, phx_socket)
rescue
# Normalize exceptions for better client debugging
e -> reraise(e, __STACKTRACE__)
end
def handle_info({:DOWN, ref, _, _, _reason}, ref) do
{:stop, {:shutdown, :closed}, ref}
end
def handle_info(
{:DOWN, _, _, transport_pid, _reason},
%{socket: %{transport_pid: transport_pid}} = state
) do
{:stop, {:shutdown, :closed}, state}
end
def handle_info({:DOWN, _, _, parent, reason}, %{socket: %{parent_pid: parent}} = state) do
send(state.socket.transport_pid, {:socket_close, self(), reason})
{:stop, {:shutdown, :parent_exited}, state}
end
def handle_info({:DOWN, _, :process, pid, reason} = msg, %{socket: socket} = state) do
case Map.fetch(state.upload_pids, pid) do
{:ok, {ref, entry_ref, cid}} ->
if reason in [:normal, {:shutdown, :closed}] do
new_state =
state
|> drop_upload_pid(pid)
|> unregister_upload(ref, entry_ref, cid)
{:noreply, new_state}
else
{:stop, {:shutdown, {:channel_upload_exit, reason}}, state}
end
:error ->
msg
|> view_handle_info(socket)
|> handle_result({:handle_info, 2, nil}, state)
end
end
def handle_info(%Message{topic: topic, event: "phx_leave"} = msg, %{topic: topic} = state) do
send(state.socket.transport_pid, {:socket_close, self(), {:shutdown, :left}})
reply(state, msg.ref, :ok, %{})
{:stop, {:shutdown, :left}, state}
end
def handle_info(%Message{topic: topic, event: "live_patch"} = msg, %{topic: topic} = state) do
%{socket: socket} = state
%{view: view} = socket
%{"url" => url} = msg.payload
case Route.live_link_info!(socket, view, url) do
{:internal, %Route{params: params, action: action}} ->
socket = socket |> assign_action(action) |> Utils.clear_flash()
socket
|> Utils.call_handle_params!(view, params, url)
|> handle_result({:handle_params, 3, msg.ref}, state)
{:external, _uri} ->
{:noreply, reply(state, msg.ref, :ok, %{link_redirect: true})}
end
end
def handle_info(
%Message{topic: topic, event: "cids_will_destroy"} = msg,
%{topic: topic} = state
) do
%{"cids" => cids} = msg.payload
new_components =
Enum.reduce(cids, state.components, fn cid, acc ->
Diff.mark_for_deletion_component(cid, acc)
end)
{:noreply, reply(%{state | components: new_components}, msg.ref, :ok, %{})}
end
def handle_info(%Message{topic: topic, event: "progress"} = msg, %{topic: topic} = state) do
cid = msg.payload["cid"]
new_state =
write_socket(state, cid, msg.ref, fn socket, _ ->
%{"ref" => ref, "entry_ref" => entry_ref, "progress" => progress} = msg.payload
new_socket = Upload.update_progress(socket, ref, entry_ref, progress)
upload_conf = Upload.get_upload_by_ref!(new_socket, ref)
entry = UploadConfig.get_entry_by_ref(upload_conf, entry_ref)
if event = entry && upload_conf.progress_event do
{:noreply, new_socket} = event.(upload_conf.name, entry, new_socket)
{new_socket, {:ok, {msg.ref, %{}}, state}}
else
{new_socket, {:ok, {msg.ref, %{}}, state}}
end
end)
{:noreply, new_state}
end
def handle_info(%Message{topic: topic, event: "allow_upload"} = msg, %{topic: topic} = state) do
%{"ref" => upload_ref, "entries" => entries} = payload = msg.payload
cid = payload["cid"]
new_state =
write_socket(state, cid, msg.ref, fn socket, _ ->
socket = Upload.register_cid(socket, upload_ref, cid)
conf = Upload.get_upload_by_ref!(socket, upload_ref)
ensure_unique_upload_name!(state, conf)
{ok_or_error, reply, %Socket{} = new_socket} =
with {:ok, new_socket} <- Upload.put_entries(socket, conf, entries, cid) do
Upload.generate_preflight_response(new_socket, conf.name, cid)
end
new_upload_names =
case ok_or_error do
:ok -> Map.put(state.upload_names, conf.name, {upload_ref, cid})
_ -> state.upload_names
end
{new_socket, {:ok, {msg.ref, reply}, %{state | upload_names: new_upload_names}}}
end)
{:noreply, new_state}
end
def handle_info(
%Message{topic: topic, event: "cids_destroyed"} = msg,
%{topic: topic} = state
) do
%{"cids" => cids} = msg.payload
{deleted_cids, new_state} = delete_components(state, cids)
{:noreply, reply(new_state, msg.ref, :ok, %{cids: deleted_cids})}
end
def handle_info(%Message{topic: topic, event: "event"} = msg, %{topic: topic} = state) do
%{"value" => raw_val, "event" => event, "type" => type} = msg.payload
val = decode_event_type(type, raw_val)
if cid = msg.payload["cid"] do
component_handle_event(state, cid, event, val, msg.ref, msg.payload)
else
new_state = %{state | socket: maybe_update_uploads(state.socket, msg.payload)}
new_state.socket
|> view_handle_event(event, val)
|> handle_result({:handle_event, 3, msg.ref}, new_state)
end
end
def handle_info({@prefix, :drop_upload_entries, info}, state) do
%{ref: ref, cid: cid, entry_refs: entry_refs} = info
new_state =
write_socket(state, cid, nil, fn socket, _ ->
upload_config = Upload.get_upload_by_ref!(socket, ref)
{Upload.drop_upload_entries(socket, upload_config, entry_refs), {:ok, nil, state}}
end)
{:noreply, new_state}
end
def handle_info({@prefix, :send_update, update}, state) do
case Diff.update_component(state.socket, state.components, update) do
{diff, new_components} ->
{:noreply, push_diff(%{state | components: new_components}, diff, nil)}
:noop ->
{module, id, _} = update
if function_exported?(module, :__info__, 1) do
# Only a warning, because there can be race conditions where a component is removed before a `send_update` happens.
Logger.debug(
"send_update failed because component #{inspect(module)} with ID #{inspect(id)} does not exist or it has been removed"
)
else
raise ArgumentError, "send_update failed (module #{inspect(module)} is not available)"
end
{:noreply, state}
end
end
def handle_info({@prefix, :redirect, command, flash}, state) do
handle_redirect(state, command, flash, nil)
end
def handle_info(msg, %{socket: socket} = state) do
msg
|> view_handle_info(socket)
|> handle_result({:handle_info, 2, nil}, state)
end
@impl true
def handle_call({@prefix, :ping}, _from, state) do
{:reply, :ok, state}
end
def handle_call({@prefix, :fetch_upload_config, name, cid}, _from, state) do
read_socket(state, cid, fn socket, _ ->
result =
with {:ok, uploads} <- Map.fetch(socket.assigns, :uploads),
{:ok, conf} <- Map.fetch(uploads, name),
do: {:ok, conf}
{:reply, result, state}
end)
end
def handle_call({@prefix, :child_mount, _child_pid, assign_new}, _from, state) do
assigns = Map.take(state.socket.assigns, assign_new)
{:reply, {:ok, assigns}, state}
end
def handle_call({@prefix, :register_entry_upload, info}, from, state) do
{:noreply, register_entry_upload(state, from, info)}
end
def handle_call(msg, from, %{socket: socket} = state) do
case socket.view.handle_call(msg, from, socket) do
{:reply, reply, %Socket{} = new_socket} ->
case handle_changed(state, new_socket, nil) do
{:noreply, new_state} -> {:reply, reply, new_state}
{:stop, reason, new_state} -> {:stop, reason, reply, new_state}
end
other ->
handle_result(other, {:handle_call, 3, nil}, state)
end
end
@impl true
def handle_cast(msg, %{socket: socket} = state) do
msg
|> socket.view.handle_cast(socket)
|> handle_result({:handle_cast, 2, nil}, state)
end
@impl true
def terminate(reason, %{socket: socket}) do
%{view: view} = socket
if function_exported?(view, :terminate, 2) do
view.terminate(reason, socket)
else
:ok
end
end
def terminate(_reason, _state) do
:ok
end
@impl true
def code_change(old, %{socket: socket} = state, extra) do
%{view: view} = socket
if function_exported?(view, :code_change, 3) do
view.code_change(old, socket, extra)
else
{:ok, state}
end
end
defp view_handle_event(%Socket{} = socket, "lv:clear-flash", val) do
case val do
%{"key" => key} -> {:noreply, Utils.clear_flash(socket, key)}
_ -> {:noreply, Utils.clear_flash(socket)}
end
end
defp view_handle_event(%Socket{}, "lv:" <> _ = bad_event, _val) do
raise ArgumentError, """
received unknown LiveView event #{inspect(bad_event)}.
The following LiveView events are supported: lv:clear-flash.
"""
end
defp view_handle_event(%Socket{} = socket, event, val) do
:telemetry.span(
[:phoenix, :live_view, :handle_event],
%{socket: socket, event: event, params: val},
fn ->
case Lifecycle.handle_event(event, val, socket) do
{:halt, %Socket{} = socket} ->
{{:noreply, socket}, %{socket: socket, event: event, params: val}}
{:cont, %Socket{} = socket} ->
case socket.view.handle_event(event, val, socket) do
{:noreply, %Socket{} = socket} ->
{{:noreply, socket}, %{socket: socket, event: event, params: val}}
{:reply, reply, %Socket{} = socket} ->
{{:reply, reply, socket}, %{socket: socket, event: event, params: val}}
other ->
raise_bad_callback_response!(other, socket.view, :handle_event, 3)
end
end
end
)
end
defp view_handle_info(msg, %{view: view} = socket) do
case Lifecycle.handle_info(msg, socket) do
{:halt, %Socket{} = socket} -> {:noreply, socket}
{:cont, %Socket{} = socket} -> view.handle_info(msg, socket)
end
end
defp maybe_call_mount_handle_params(%{socket: socket} = state, router, url, params) do
%{view: view, redirected: mount_redirect} = socket
lifecycle = Lifecycle.stage_info(socket, view, :handle_params, 3)
cond do
mount_redirect ->
mount_handle_params_result({:noreply, socket}, state, :mount)
not lifecycle.any? ->
{:diff, diff, new_state} = render_diff(state, socket, true)
{:ok, diff, :mount, new_state}
socket.root_pid != self() or is_nil(router) ->
# Let the callback fail for the usual reasons
Route.live_link_info!(%{socket | router: nil}, view, url)
params == @not_mounted_at_router ->
raise "cannot invoke handle_params/3 for #{inspect(view)} because #{inspect(view)}" <>
" was not mounted at the router with the live/3 macro under URL #{inspect(url)}"
true ->
socket
|> Utils.call_handle_params!(view, lifecycle.exported?, params, url)
|> mount_handle_params_result(state, :mount)
end
end
defp mount_handle_params_result({:noreply, %Socket{} = new_socket}, state, redir) do
new_state = %{state | socket: new_socket}
case maybe_diff(new_state, true) do
{:diff, diff, new_state} ->
{:ok, diff, redir, new_state}
{:redirect, %{to: _to} = opts} ->
{:redirect, copy_flash(new_state, Utils.get_flash(new_socket), opts), new_state}
{:redirect, %{external: url}} ->
{:redirect, copy_flash(new_state, Utils.get_flash(new_socket), %{to: url}), new_state}
{:live, :redirect, %{to: _to} = opts} ->
{:live_redirect, copy_flash(new_state, Utils.get_flash(new_socket), opts), new_state}
{:live, {params, action}, %{to: to} = opts} ->
%{socket: new_socket} = new_state = drop_redirect(new_state)
uri = build_uri(new_state, to)
new_socket
|> assign_action(action)
|> Utils.call_handle_params!(new_socket.view, params, uri)
|> mount_handle_params_result(new_state, {:live_patch, opts})
end
end
defp handle_result(
{:reply, %{} = reply, %Socket{} = new_socket},
{:handle_event, 3, ref},
state
) do
handle_changed(state, Utils.put_reply(new_socket, reply), ref)
end
defp handle_result({:noreply, %Socket{} = new_socket}, {_from, _arity, ref}, state) do
handle_changed(state, new_socket, ref)
end
defp handle_result(result, {name, arity, _ref}, state) do
raise_bad_callback_response!(result, state.socket.view, name, arity)
end
defp raise_bad_callback_response!(result, view, :handle_call, 3) do
raise ArgumentError, """
invalid noreply from #{inspect(view)}.handle_call/3 callback.
Expected one of:
{:noreply, %Socket{}}
{:reply, map, %Socket}
Got: #{inspect(result)}
"""
end
defp raise_bad_callback_response!(result, view, :handle_event, arity) do
raise ArgumentError, """
invalid return from #{inspect(view)}.handle_event/#{arity} callback.
Expected one of:
{:noreply, %Socket{}}
{:reply, map, %Socket{}}
Got: #{inspect(result)}
"""
end
defp raise_bad_callback_response!(result, view, name, arity) do
raise ArgumentError, """
invalid noreply from #{inspect(view)}.#{name}/#{arity} callback.
Expected one of:
{:noreply, %Socket{}}
Got: #{inspect(result)}
"""
end
defp component_handle_event(state, cid, event, val, ref, payload) do
%{socket: socket, components: components} = state
result =
Diff.write_component(socket, cid, components, fn component_socket, component ->
component_socket
|> maybe_update_uploads(payload)
|> inner_component_handle_event(component, event, val)
end)
# Due to race conditions, the browser can send a request for a
# component ID that no longer exists. So we need to check for
# the :error case accordingly.
case result do
{diff, new_components, {redirected, flash}} ->
new_state = %{state | components: new_components}
if redirected do
handle_redirect(new_state, redirected, flash, nil, {diff, ref})
else
{:noreply, push_diff(new_state, diff, ref)}
end
:error ->
{:noreply, push_noop(state, ref)}
end
end
defp unregister_upload(state, ref, entry_ref, cid) do
write_socket(state, cid, nil, fn socket, _ ->
conf = Upload.get_upload_by_ref!(socket, ref)
new_state = drop_upload_name(state, conf.name)
{Upload.unregister_completed_entry_upload(socket, conf, entry_ref), {:ok, nil, new_state}}
end)
end
defp put_upload_pid(state, pid, ref, entry_ref, cid) when is_pid(pid) do
Process.monitor(pid)
%{state | upload_pids: Map.put(state.upload_pids, pid, {ref, entry_ref, cid})}
end
defp drop_upload_pid(state, pid) when is_pid(pid) do
%{state | upload_pids: Map.delete(state.upload_pids, pid)}
end
defp drop_upload_name(state, name) do
{_, new_state} = pop_in(state.upload_names[name])
new_state
end
defp inner_component_handle_event(component_socket, _component, "lv:clear-flash", val) do
component_socket =
case val do
%{"key" => key} -> Utils.clear_flash(component_socket, key)
_ -> Utils.clear_flash(component_socket)
end
{component_socket, {nil, %{}}}
end
defp inner_component_handle_event(_component_socket, _component, "lv:" <> _ = bad_event, _val) do
raise ArgumentError, """
received unknown LiveView event #{inspect(bad_event)}.
The following LiveView events are supported: lv:clear-flash.
"""
end
defp inner_component_handle_event(component_socket, component, event, val) do
:telemetry.span(
[:phoenix, :live_component, :handle_event],
%{socket: component_socket, component: component, event: event, params: val},
fn ->
component_socket =
%Socket{redirected: redirected, assigns: assigns} =
case component.handle_event(event, val, component_socket) do
{:noreply, component_socket} ->
component_socket
{:reply, %{} = reply, component_socket} ->
Utils.put_reply(component_socket, reply)
other ->
raise ArgumentError, """
invalid return from #{inspect(component)}.handle_event/3 callback.
Expected one of:
{:noreply, %Socket{}}
{:reply, map, %Socket}
Got: #{inspect(other)}
"""
end
{
{component_socket, {redirected, assigns.flash}},
%{socket: component_socket, component: component, event: event, params: val}
}
end
)
end
defp decode_event_type("form", url_encoded) do
url_encoded
|> Plug.Conn.Query.decode()
|> decode_merge_target()
end
defp decode_event_type(_, value), do: value
defp decode_merge_target(%{"_target" => target} = params) when is_list(target), do: params
defp decode_merge_target(%{"_target" => target} = params) when is_binary(target) do
keyspace = target |> Plug.Conn.Query.decode() |> gather_keys([])
Map.put(params, "_target", Enum.reverse(keyspace))
end
defp decode_merge_target(%{} = params), do: params
defp gather_keys(%{} = map, acc) do
case Enum.at(map, 0) do
{key, val} -> gather_keys(val, [key | acc])
nil -> acc
end
end
defp gather_keys([], acc), do: acc
defp gather_keys([%{} = map], acc), do: gather_keys(map, acc)
defp gather_keys(_, acc), do: acc
defp handle_changed(state, %Socket{} = new_socket, ref, pending_live_patch \\ nil) do
new_state = %{state | socket: new_socket}
case maybe_diff(new_state, false) do
{:diff, diff, new_state} ->
{:noreply,
new_state
|> push_live_patch(pending_live_patch)
|> push_diff(diff, ref)}
result ->
handle_redirect(new_state, result, Utils.changed_flash(new_socket), ref)
end
end
defp maybe_push_pending_diff_ack(state, nil), do: state
defp maybe_push_pending_diff_ack(state, {diff, ref}), do: push_diff(state, diff, ref)
defp handle_redirect(new_state, result, flash, ref, pending_diff_ack \\ nil) do
%{socket: new_socket} = new_state
root_pid = new_socket.root_pid
case result do
{:redirect, %{external: to} = opts} ->
opts =
copy_flash(new_state, flash, opts)
|> Map.delete(:external)
|> Map.put(:to, to)
new_state
|> push_redirect(opts, ref)
|> stop_shutdown_redirect(:redirect, opts)
{:redirect, %{to: _to} = opts} ->
opts = copy_flash(new_state, flash, opts)
new_state
|> push_redirect(opts, ref)
|> stop_shutdown_redirect(:redirect, opts)
{:live, :redirect, %{to: _to} = opts} ->
opts = copy_flash(new_state, flash, opts)
new_state
|> push_live_redirect(opts, ref, pending_diff_ack)
|> stop_shutdown_redirect(:live_redirect, opts)
{:live, {params, action}, %{to: _to, kind: _kind} = opts} when root_pid == self() ->
new_state
|> drop_redirect()
|> maybe_push_pending_diff_ack(pending_diff_ack)
|> Map.update!(:socket, &Utils.replace_flash(&1, flash))
|> sync_handle_params_with_live_redirect(params, action, opts, ref)
{:live, {_params, _action}, %{to: _to, kind: _kind}} = patch ->
send(new_socket.root_pid, {@prefix, :redirect, patch, flash})
{:diff, diff, new_state} = render_diff(new_state, new_socket, false)
{:noreply,
new_state
|> drop_redirect()
|> maybe_push_pending_diff_ack(pending_diff_ack)
|> push_diff(diff, ref)}
end
end
defp stop_shutdown_redirect(state, kind, opts) do
send(state.socket.transport_pid, {:socket_close, self(), {kind, opts}})
{:stop, {:shutdown, {kind, opts}}, state}
end
defp drop_redirect(state) do
put_in(state.socket.redirected, nil)
end
defp sync_handle_params_with_live_redirect(state, params, action, %{to: to} = opts, ref) do
%{socket: socket} = state
{:noreply, %Socket{} = new_socket} =
socket
|> assign_action(action)
|> Utils.call_handle_params!(socket.view, params, build_uri(state, to))
handle_changed(state, new_socket, ref, opts)
end
defp push_live_patch(state, nil), do: state
defp push_live_patch(state, opts), do: push(state, "live_patch", opts)
defp push_redirect(state, opts, nil = _ref) do
push(state, "redirect", opts)
end
defp push_redirect(state, opts, ref) do
reply(state, ref, :ok, %{redirect: opts})
end
defp push_live_redirect(state, opts, nil = _ref, {_diff, ack_ref}) do
reply(state, ack_ref, :ok, %{live_redirect: opts})
end
defp push_live_redirect(state, opts, nil = _ref, _pending_diff_ack) do
push(state, "live_redirect", opts)
end
defp push_live_redirect(state, opts, ref, _pending_diff_ack) do
reply(state, ref, :ok, %{live_redirect: opts})
end
defp push_noop(state, nil = _ref), do: state
defp push_noop(state, ref), do: reply(state, ref, :ok, %{})
defp push_diff(state, diff, ref) when diff == %{}, do: push_noop(state, ref)
defp push_diff(state, diff, nil = _ref), do: push(state, "diff", diff)
defp push_diff(state, diff, ref), do: reply(state, ref, :ok, %{diff: diff})
defp copy_flash(_state, flash, opts) when flash == %{},
do: opts
defp copy_flash(state, flash, opts),
do: Map.put(opts, :flash, Utils.sign_flash(state.socket.endpoint, flash))
defp maybe_diff(%{socket: socket} = state, force?) do
socket.redirected || render_diff(state, socket, force?)
end
defp render_diff(state, socket, force?) do
{socket, diff, components} =
if force? or Utils.changed?(socket) do
rendered = Utils.to_rendered(socket, socket.view)
Diff.render(socket, rendered, state.components)
else
{socket, %{}, state.components}
end
diff = Diff.render_private(socket, diff)
{:diff, diff, %{state | socket: Utils.clear_changed(socket), components: components}}
end
defp reply(state, {ref, extra}, status, payload) do
reply(state, ref, status, Map.merge(payload, extra))
end
defp reply(state, ref, status, payload) when is_binary(ref) do
reply_ref = {state.socket.transport_pid, state.serializer, state.topic, ref, state.join_ref}
Phoenix.Channel.reply(reply_ref, {status, payload})
state
end
defp push(state, event, payload) do
message = %Message{topic: state.topic, event: event, payload: payload, join_ref: state.join_ref}
send(state.socket.transport_pid, state.serializer.encode!(message))
state
end
## Mount
defp mount(%{"session" => session_token} = params, from, phx_socket) do
%Phoenix.Socket{endpoint: endpoint, topic: topic} = phx_socket
case Session.verify_session(endpoint, topic, session_token, params["static"]) do
{:ok, %Session{} = verified} ->
%Phoenix.Socket{private: %{connect_info: connect_info}} = phx_socket
case connect_info do
%{session: nil} ->
Logger.debug("""
LiveView session was misconfigured or the user token is outdated.
1) Ensure your session configuration in your endpoint is in a module attribute:
@session_options [
...
]
2) Change the `plug Plug.Session` to use said attribute:
plug Plug.Session, @session_options
3) Also pass the `@session_options` to your LiveView socket:
socket "/live", Phoenix.LiveView.Socket,
websocket: [connect_info: [session: @session_options]]
4) Ensure the `protect_from_forgery` plug is in your router pipeline:
plug :protect_from_forgery
5) Define the CSRF meta tag inside the `<head>` tag in your layout:
<%= csrf_meta_tag() %>
6) Pass it forward in your app.js:
let csrfToken = document.querySelector("meta[name='csrf-token']").getAttribute("content");
let liveSocket = new LiveSocket("/live", Socket, {params: {_csrf_token: csrfToken}});
""")
GenServer.reply(from, {:error, %{reason: "stale"}})
{:stop, :shutdown, :no_state}
%{} ->
case authorize_session(verified, endpoint, params) do
{:ok, %Session{} = new_verified, route, url} ->
verified_mount(new_verified, route, url, params, from, phx_socket, connect_info)
{:error, :unauthorized} ->
GenServer.reply(from, {:error, %{reason: "unauthorized"}})
{:stop, :shutdown, :no_state}
{:error, _reason} ->
GenServer.reply(from, {:error, %{reason: "stale"}})
{:stop, :shutdown, :no_state}
end
end
{:error, _reason} ->
GenServer.reply(from, {:error, %{reason: "stale"}})
{:stop, :shutdown, :no_state}
end
end
defp mount(%{}, from, phx_socket) do
Logger.error("Mounting #{phx_socket.topic} failed because no session was provided")
GenServer.reply(from, {:error, %{reason: "stale"}})
{:stop, :shutdown, :no_session}
end
defp verify_flash(endpoint, %Session{} = verified, flash_token, connect_params) do
# verified_flash is fetched from the disconnected render.
# params["flash"] is sent on live redirects and therefore has higher priority.
cond do
flash_token -> Utils.verify_flash(endpoint, flash_token)
connect_params["_mounts"] == 0 && verified.flash -> verified.flash
true -> %{}
end
end
defp verified_mount(%Session{} = verified, route, url, params, from, phx_socket, connect_info) do
%Session{
id: id,
view: view,
root_view: root_view,
parent_pid: parent,
root_pid: root_pid,
session: verified_user_session,
assign_new: assign_new,
router: router
} = verified
# Make sure the view is loaded. Otherwise if the first request
# ever is a LiveView connection, the view won't be loaded and
# the mount/handle_params callbacks won't be invoked as they
# are optional, leading to errors.
config = view.__live__()
live_session_on_mount = load_live_session_on_mount(route)
lifecycle = lifecycle(config, live_session_on_mount)
%Phoenix.Socket{
endpoint: endpoint,
transport_pid: transport_pid
} = phx_socket
# Optional parameter handling
connect_params = params["params"]
# Optional verified parts
flash = verify_flash(endpoint, verified, params["flash"], connect_params)
socket_session = connect_info[:session] || %{}
Process.monitor(transport_pid)
load_csrf_token(endpoint, socket_session)
case params do
%{"caller" => {pid, _}} when is_pid(pid) -> Process.put(:"$callers", [pid])
_ -> Process.put(:"$callers", [transport_pid])
end
socket = %Socket{
endpoint: endpoint,
view: view,
transport_pid: transport_pid,
parent_pid: parent,
root_pid: root_pid || self(),
id: id,
router: router
}
{params, host_uri, action} =
case route do
%Route{} = route ->
{route.params, route.uri, route.action}
nil ->
{@not_mounted_at_router, @not_mounted_at_router, nil}
end
merged_session = Map.merge(socket_session, verified_user_session)
case mount_private(parent, root_view, assign_new, connect_params, connect_info, lifecycle) do
{:ok, mount_priv} ->
socket = Utils.configure_socket(socket, mount_priv, action, flash, host_uri)
socket
|> Utils.maybe_call_live_view_mount!(view, params, merged_session)
|> build_state(phx_socket)
|> maybe_call_mount_handle_params(router, url, params)
|> reply_mount(from, verified, route)
{:error, :noproc} ->
GenServer.reply(from, {:error, %{reason: "stale"}})
{:stop, :shutdown, :no_state}
end
end
defp load_csrf_token(endpoint, socket_session) do
if token = socket_session["_csrf_token"] do
state = Plug.CSRFProtection.dump_state_from_session(token)
secret_key_base = endpoint.config(:secret_key_base)
Plug.CSRFProtection.load_state(secret_key_base, state)
end
end
defp load_live_session_on_mount(%Route{live_session: %{extra: %{on_mount: hooks}}}), do: hooks
defp load_live_session_on_mount(_), do: []
defp lifecycle(%{lifecycle: lifecycle}, []), do: lifecycle
defp lifecycle(%{lifecycle: lifecycle}, on_mount) do
%{lifecycle | mount: on_mount ++ lifecycle.mount}
end
defp mount_private(nil, root_view, assign_new, connect_params, connect_info, lifecycle) do
{:ok,
%{
connect_params: connect_params,
connect_info: connect_info,
assign_new: {%{}, assign_new},
lifecycle: lifecycle,
root_view: root_view,
__changed__: %{}
}}
end
defp mount_private(parent, root_view, assign_new, connect_params, connect_info, lifecycle) do
case sync_with_parent(parent, assign_new) do
{:ok, parent_assigns} ->
# Child live views always ignore the layout on `:use`.
{:ok,
%{
connect_params: connect_params,
connect_info: connect_info,
assign_new: {parent_assigns, assign_new},
phoenix_live_layout: false,
lifecycle: lifecycle,
root_view: root_view,
__changed__: %{}
}}
{:error, :noproc} ->
{:error, :noproc}
end
end
defp sync_with_parent(parent, assign_new) do
_ref = Process.monitor(parent)
try do
GenServer.call(parent, {@prefix, :child_mount, self(), assign_new})
catch
:exit, {:noproc, _} -> {:error, :noproc}
end
end
defp put_container(%Session{} = session, %Route{} = route, %{} = diff) do
if container = session.redirected? && Route.container(route) do
{tag, attrs} = container
Map.put(diff, :container, [tag, Enum.into(attrs, %{})])
else
diff
end
end
defp put_container(%Session{}, nil = _route, %{} = diff), do: diff
defp reply_mount(result, from, %Session{} = session, route) do
case result do
{:ok, diff, :mount, new_state} ->
reply = put_container(session, route, %{rendered: diff})
GenServer.reply(from, {:ok, reply})
{:noreply, post_verified_mount(new_state)}
{:ok, diff, {:live_patch, opts}, new_state} ->
reply = put_container(session, route, %{rendered: diff, live_patch: opts})
GenServer.reply(from, {:ok, reply})
{:noreply, post_verified_mount(new_state)}
{:live_redirect, opts, new_state} ->
GenServer.reply(from, {:error, %{live_redirect: opts}})
{:stop, :shutdown, new_state}
{:redirect, opts, new_state} ->
GenServer.reply(from, {:error, %{redirect: opts}})
{:stop, :shutdown, new_state}
end
end
defp build_state(%Socket{} = lv_socket, %Phoenix.Socket{} = phx_socket) do
%{
join_ref: phx_socket.join_ref,
serializer: phx_socket.serializer,
socket: lv_socket,
topic: phx_socket.topic,
components: Diff.new_components(),
upload_names: %{},
upload_pids: %{}
}
end
defp build_uri(%{socket: socket}, "/" <> _ = to) do
URI.to_string(%{socket.host_uri | path: to})
end
defp post_verified_mount(%{socket: socket} = state) do
%{state | socket: Utils.post_mount_prune(socket)}
end
defp assign_action(socket, action) do
Phoenix.LiveView.assign(socket, :live_action, action)
end
defp maybe_update_uploads(%Socket{} = socket, %{"uploads" => uploads} = payload) do
cid = payload["cid"]
Enum.reduce(uploads, socket, fn {ref, entries}, acc ->
upload_conf = Upload.get_upload_by_ref!(acc, ref)
case Upload.put_entries(acc, upload_conf, entries, cid) do
{:ok, new_socket} -> new_socket
{:error, _error_resp, %Socket{} = new_socket} -> new_socket
end
end)
end
defp maybe_update_uploads(%Socket{} = socket, %{} = _payload), do: socket
defp register_entry_upload(state, from, info) do
%{channel_pid: pid, ref: ref, entry_ref: entry_ref, cid: cid} = info
write_socket(state, cid, nil, fn socket, _ ->
conf = Upload.get_upload_by_ref!(socket, ref)
case Upload.register_entry_upload(socket, conf, pid, entry_ref) do
{:ok, new_socket, entry} ->
reply = %{max_file_size: entry.client_size, chunk_timeout: conf.chunk_timeout}
GenServer.reply(from, {:ok, reply})
new_state = put_upload_pid(state, pid, ref, entry_ref, cid)
{new_socket, {:ok, nil, new_state}}
{:error, reason} ->
GenServer.reply(from, {:error, reason})
{socket, :error}
end
end)
end
defp read_socket(state, nil = _cid, func) do
func.(state.socket, nil)
end
defp read_socket(state, cid, func) do
%{socket: socket, components: components} = state
Diff.read_component(socket, cid, components, func)
end
# If :error is returned, the socket must not change,
# otherwise we need to call push_diff on all cases.
defp write_socket(state, nil, ref, fun) do
{new_socket, return} = fun.(state.socket, nil)
case return do
{:ok, ref_reply, new_state} ->
{:noreply, new_state} = handle_changed(new_state, new_socket, ref_reply)
new_state
:error ->
push_noop(state, ref)
end
end
defp write_socket(state, cid, ref, fun) do
%{socket: socket, components: components} = state
{diff, new_components, return} =
case Diff.write_component(socket, cid, components, fun) do
{_diff, _new_components, _return} = triplet -> triplet
:error -> {%{}, components, :error}
end
case return do
{:ok, ref_reply, new_state} ->
new_state = %{new_state | components: new_components}
push_diff(new_state, diff, ref_reply)
:error ->
push_noop(state, ref)
end
end
defp delete_components(state, cids) do
upload_cids = Enum.into(state.upload_names, MapSet.new(), fn {_name, {_ref, cid}} -> cid end)
Enum.flat_map_reduce(cids, state, fn cid, acc ->
{deleted_cids, new_components} = Diff.delete_component(cid, acc.components)
canceled_confs =
deleted_cids
|> Enum.filter(fn deleted_cid -> deleted_cid in upload_cids end)
|> Enum.flat_map(fn deleted_cid ->
read_socket(acc, deleted_cid, fn c_socket, _ ->
{_new_c_socket, canceled_confs} = Upload.maybe_cancel_uploads(c_socket)
canceled_confs
end)
end)
new_state =
Enum.reduce(canceled_confs, acc, fn conf, acc -> drop_upload_name(acc, conf.name) end)
{deleted_cids, %{new_state | components: new_components}}
end)
end
defp ensure_unique_upload_name!(state, conf) do
upload_ref = conf.ref
cid = conf.cid
case Map.fetch(state.upload_names, conf.name) do
{:ok, {^upload_ref, ^cid}} ->
:ok
:error ->
:ok
{:ok, {_existing_ref, existing_cid}} ->
raise RuntimeError, """
existing upload for #{conf.name} already allowed in another component (#{existing_cid})
If you want to allow simultaneous uploads across different components, pass a
unique upload name to allow_upload/3
"""
end
end
defp authorize_session(%Session{} = session, endpoint, %{"redirect" => url}) do
if redir_route = session_route(session, endpoint, url) do
case Session.authorize_root_redirect(session, redir_route) do
{:ok, %Session{} = new_session} -> {:ok, new_session, redir_route, url}
{:error, :unauthorized} = err -> err
end
else
{:error, :unauthorized}
end
end
defp authorize_session(%Session{} = session, endpoint, %{"url" => url}) do
if Session.main?(session) do
{:ok, session, session_route(session, endpoint, url), url}
else
{:ok, session, _route = nil, _url = nil}
end
end
defp authorize_session(%Session{} = session, _endpoint, %{} = _params) do
{:ok, session, _route = nil, _url = nil}
end
defp session_route(%Session{} = session, endpoint, url) do
case Route.live_link_info(endpoint, session.router, url) do
{:internal, %Route{} = route} -> route
_ -> nil
end
end
end
| 32.238055 | 130 | 0.631097 |
e846f93d791e3747cdc60a2870f6b1d655353d45 | 1,268 | ex | Elixir | lib/blue_heron/hci/commands/controller_and_baseband/write_local_name.ex | kevinansfield/blue_heron | 8339e6747e135030f7d1e67801391f03f2558e0d | [
"Apache-2.0"
] | 45 | 2020-10-17T13:34:15.000Z | 2022-03-08T09:40:43.000Z | lib/blue_heron/hci/commands/controller_and_baseband/write_local_name.ex | kevinansfield/blue_heron | 8339e6747e135030f7d1e67801391f03f2558e0d | [
"Apache-2.0"
] | 20 | 2020-10-15T15:05:54.000Z | 2022-03-27T15:54:36.000Z | lib/blue_heron/hci/commands/controller_and_baseband/write_local_name.ex | kevinansfield/blue_heron | 8339e6747e135030f7d1e67801391f03f2558e0d | [
"Apache-2.0"
] | 11 | 2020-10-23T17:18:57.000Z | 2022-03-15T20:01:49.000Z | defmodule BlueHeron.HCI.Command.ControllerAndBaseband.WriteLocalName do
use BlueHeron.HCI.Command.ControllerAndBaseband, ocf: 0x0013
@moduledoc """
The HCI_Write_Local_Name command provides the ability to modify the user- friendly name for the BR/EDR Controller.
* OGF: `#{inspect(@ogf, base: :hex)}`
* OCF: `#{inspect(@ocf, base: :hex)}`
* Opcode: `#{inspect(@opcode)}`
Bluetooth Spec v5.2, Vol 4, Part E, section 7.3.11
## Command Parameters
* `name` - A UTF-8 encoded User-Friendly Descriptive Name for the device. Up-to 248 bytes
## Return Parameters
* `:status` - see `BlueHeron.ErrorCode`
"""
defparameters name: "Bluetooth"
defimpl BlueHeron.HCI.Serializable do
def serialize(%{opcode: opcode, name: name}) do
padded = for _i <- 1..(248 - byte_size(name)), into: name, do: <<0>>
<<opcode::binary, 248, padded::binary>>
end
end
@impl BlueHeron.HCI.Command
def deserialize(<<@opcode::binary, 248, padded::binary>>) do
new(name: String.trim(padded, <<0>>))
end
@impl BlueHeron.HCI.Command
def deserialize_return_parameters(<<status>>) do
%{status: status}
end
@impl true
def serialize_return_parameters(%{status: status}) do
<<BlueHeron.ErrorCode.to_code!(status)>>
end
end
| 28.818182 | 116 | 0.685331 |
e84706f76d6e975df24bf23916ba431c03f1cacc | 833 | exs | Elixir | test/magnemite/repo_test.exs | andsleonardo/magnemite | 2a06c1520defeb193d718313ad3fc6a50349bc8d | [
"MIT"
] | null | null | null | test/magnemite/repo_test.exs | andsleonardo/magnemite | 2a06c1520defeb193d718313ad3fc6a50349bc8d | [
"MIT"
] | null | null | null | test/magnemite/repo_test.exs | andsleonardo/magnemite | 2a06c1520defeb193d718313ad3fc6a50349bc8d | [
"MIT"
] | null | null | null | defmodule Magnemite.RepoTest do
use ExUnit.Case, async: true
alias Magnemite.Repo
describe "handle_operation_result/1" do
test "returns the successful output when given an :ok tuple" do
output = {:ok, %{field: "value"}}
assert ^output = Repo.handle_operation_result(output)
end
test "returns transformed changeset errors when given an :error tuple with a changeset" do
changeset = %Ecto.Changeset{
types: [{:field1, :integer}, {:field2, :string}],
errors: [
field1: {"can't be blank", []},
field2: {"is invalid", []}
]
}
assert {:error, :changeset,
%{
field1: ["can't be blank"],
field2: ["is invalid"]
}} = Repo.handle_operation_result({:error, changeset})
end
end
end
| 27.766667 | 94 | 0.581032 |
e84725073d9bcf181a9ba861822df7e59bc61305 | 266 | exs | Elixir | config/test.exs | licaonfee/phoenix_gitlab_monitor | 1b7dd437018d42a2b7b9a1643e6767a48f312eee | [
"MIT"
] | 12 | 2018-11-04T03:39:34.000Z | 2020-04-29T19:30:58.000Z | config/test.exs | licaonfee/phoenix_gitlab_monitor | 1b7dd437018d42a2b7b9a1643e6767a48f312eee | [
"MIT"
] | 16 | 2018-11-07T01:05:01.000Z | 2021-05-07T21:32:07.000Z | config/test.exs | licaonfee/phoenix_gitlab_monitor | 1b7dd437018d42a2b7b9a1643e6767a48f312eee | [
"MIT"
] | 3 | 2019-08-27T20:29:00.000Z | 2020-05-25T20:36:12.000Z | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :monitor, MonitorWeb.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 24.181818 | 56 | 0.736842 |
e847381cbdc5ac308af85c8ae754ea856c5e36e9 | 251 | ex | Elixir | lib/dornach.ex | ream88/jw-dornach | 0a25deae13fafd832421bde21bc21035a128ac7d | [
"MIT"
] | null | null | null | lib/dornach.ex | ream88/jw-dornach | 0a25deae13fafd832421bde21bc21035a128ac7d | [
"MIT"
] | 4 | 2020-01-28T10:12:25.000Z | 2021-05-10T23:05:12.000Z | lib/dornach.ex | ream88/jw-dornach | 0a25deae13fafd832421bde21bc21035a128ac7d | [
"MIT"
] | null | null | null | defmodule Dornach do
@moduledoc """
Dornach keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 25.1 | 66 | 0.752988 |
e8473cb3dcc3914978395456e1425c384d11ba64 | 506 | exs | Elixir | year_2020/test/day_13_test.exs | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | year_2020/test/day_13_test.exs | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | year_2020/test/day_13_test.exs | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | defmodule Day13Test do
use ExUnit.Case, async: true
test "it computes time to the next shuttle" do
assert Day13.part_one(InputTestFile) == 295
end
test "it computes the right timestamp" do
assert Day13.part_two(InputTestFile) == 1_068_781
end
test "it computes factors used by GCD" do
assert Day13.gcd(15, 26) == [1, 1, 2, 1]
end
test "it computes the modular inverse" do
assert Day13.modular_inverse(11, 13) == 6
assert Day13.modular_inverse(12, 19) == 8
end
end
| 24.095238 | 53 | 0.693676 |
e84758712cb3a1bbb39575d3d3c931341a920d2d | 1,226 | ex | Elixir | lib/text_based_fps/command_helper.ex | guisehn/elixir-text-based-fps | 59a815da337309297f8b42ef3481277dd4d9b371 | [
"MIT"
] | 1 | 2022-03-02T12:18:07.000Z | 2022-03-02T12:18:07.000Z | lib/text_based_fps/command_helper.ex | guisehn/elixir-text-based-fps | 59a815da337309297f8b42ef3481277dd4d9b371 | [
"MIT"
] | 12 | 2021-05-31T21:41:09.000Z | 2021-07-30T03:18:09.000Z | lib/text_based_fps/command_helper.ex | guisehn/elixir-text-based-fps | 59a815da337309297f8b42ef3481277dd4d9b371 | [
"MIT"
] | null | null | null | defmodule TextBasedFPS.CommandHelper do
import TextBasedFPS.Text, only: [highlight: 1]
alias TextBasedFPS.{Room, ServerState}
@spec require_alive_player(ServerState.t(), Player.t()) ::
{:ok, Room.t()} | {:error, ServerState.t(), String.t()}
def require_alive_player(state, player) do
with {:ok, room} <- require_room(state, player) do
room_player = Room.get_player(room, player.key)
require_alive_player(state, player, room, room_player)
end
end
defp require_alive_player(state, _player, _room, %{coordinates: nil}) do
{:error, state, "You're dead. Type #{highlight("respawn")} to return to the game."}
end
defp require_alive_player(_state, _player, room, _room_player) do
{:ok, room}
end
@spec require_room(ServerState.t(), Player.t()) ::
{:ok, Room.t()} | {:error, ServerState.t(), String.t()}
def require_room(state, player) do
if player.room do
room = ServerState.get_room(state, player.room)
{:ok, room}
else
{:error, state, room_required_message()}
end
end
defp room_required_message() do
"You need to be in a room to use this command. Type #{highlight("join-room <room_name>")} to join a room."
end
end
| 32.263158 | 110 | 0.668842 |
e84766d811e9449ef43b480f85ddcb399b7b7634 | 801 | ex | Elixir | lib/nomad_client/model/server_members.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | 8 | 2021-09-04T21:22:53.000Z | 2022-02-22T22:48:38.000Z | lib/nomad_client/model/server_members.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | null | null | null | lib/nomad_client/model/server_members.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule NomadClient.Model.ServerMembers do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:ServerName,
:ServerRegion,
:ServerDc,
:Members
]
@type t :: %__MODULE__{
:ServerName => String.t() | nil,
:ServerRegion => String.t() | nil,
:ServerDc => String.t() | nil,
:Members => [NomadClient.Model.AgentMember.t()] | nil
}
end
defimpl Poison.Decoder, for: NomadClient.Model.ServerMembers do
import NomadClient.Deserializer
def decode(value, options) do
value
|> deserialize(:Members, :list, NomadClient.Model.AgentMember, options)
end
end
| 23.558824 | 91 | 0.654182 |
e847742193b1f0c2efb42f771f17274cc98095f9 | 1,193 | ex | Elixir | lib/pco_api/actions/create.ex | geolessel/pco-api-elixir | 6f4d9f86247a1f6370a512166e9fed3a216302e5 | [
"MIT"
] | 4 | 2016-05-13T20:04:32.000Z | 2020-12-03T11:25:01.000Z | lib/pco_api/actions/create.ex | geolessel/pco-api-elixir | 6f4d9f86247a1f6370a512166e9fed3a216302e5 | [
"MIT"
] | 23 | 2016-05-14T01:34:02.000Z | 2016-08-16T22:34:40.000Z | lib/pco_api/actions/create.ex | geolessel/pco-api-elixir | 6f4d9f86247a1f6370a512166e9fed3a216302e5 | [
"MIT"
] | 4 | 2016-05-27T23:40:39.000Z | 2019-10-06T17:36:40.000Z | defmodule PcoApi.Actions.Create do
defmacro __using__(_opts) do
quote do
import PcoApi.Actions.Create
import PcoApi.Record
def post(url) when is_binary(url), do: create("", url)
def create(%PcoApi.Record{attributes: _, type: _} = record, url) when is_binary(url) do
# TODO: Error handling for when the record isn't created
record |> PcoApi.Record.to_json |> create(url)
end
def create(json, url) when is_binary(json) do
url
|> post(json, [], hackney: [basic_auth: {PcoApi.key, PcoApi.secret}])
|> do_create
end
defp do_create({:ok, %HTTPoison.Response{status_code: code, body: %{"data" => data}}}) when (code in 200..299) do
data |> to_record
end
defp do_create({:ok, %HTTPoison.Response{status_code: code, body: body}}) when (code in 200..299), do: body
defp do_create({:ok, %HTTPoison.Response{body: body}}), do: raise "PcoApi ok, but not ok: #{IO.inspect body}"
defp do_create({:error, err}), do: raise "PcoApi error: #{IO.inspect err}"
end
end
defmacro record_type(type) do
quote do
def unquote(:type)(), do: unquote(type)
end
end
end
| 35.088235 | 119 | 0.631182 |
e847957edbef9cb0c532502bd38a5a93a7869d50 | 1,253 | ex | Elixir | web/views/error_helpers.ex | gedzubo/pagila_phoenix_api | c72063672a1b09c8d9bcc2814b03a4863a09a7b4 | [
"MIT"
] | null | null | null | web/views/error_helpers.ex | gedzubo/pagila_phoenix_api | c72063672a1b09c8d9bcc2814b03a4863a09a7b4 | [
"MIT"
] | null | null | null | web/views/error_helpers.ex | gedzubo/pagila_phoenix_api | c72063672a1b09c8d9bcc2814b03a4863a09a7b4 | [
"MIT"
] | null | null | null | defmodule PagilaPhoenixApi.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
if error = form.errors[field] do
content_tag :span, translate_error(error), class: "help-block"
end
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# Because error messages were defined within Ecto, we must
# call the Gettext module passing our Gettext backend. We
# also use the "errors" domain as translations are placed
# in the errors.po file.
# Ecto will pass the :count keyword if the error message is
# meant to be pluralized.
# On your own code and templates, depending on whether you
# need the message to be pluralized or not, this could be
# written simply as:
#
# dngettext "errors", "1 file", "%{count} files", count
# dgettext "errors", "is invalid"
#
if count = opts[:count] do
Gettext.dngettext(PagilaPhoenixApi.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(PagilaPhoenixApi.Gettext, "errors", msg, opts)
end
end
end
| 30.560976 | 82 | 0.674381 |
e847eb7af073f4a7e26eb28b58574ebc82f30591 | 1,830 | ex | Elixir | apps/omg_child_chain/lib/omg_child_chain/fees/feed_adapter.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | 1 | 2020-10-06T03:07:47.000Z | 2020-10-06T03:07:47.000Z | apps/omg_child_chain/lib/omg_child_chain/fees/feed_adapter.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | 9 | 2020-09-16T15:31:17.000Z | 2021-03-17T07:12:35.000Z | apps/omg_child_chain/lib/omg_child_chain/fees/feed_adapter.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | 1 | 2020-09-30T17:17:27.000Z | 2020-09-30T17:17:27.000Z | # Copyright 2019-2020 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.ChildChain.Fees.FeedAdapter do
@moduledoc """
Adapter pulls actual fees prices from fee feed.
"""
@behaviour OMG.ChildChain.Fees.Adapter
alias OMG.ChildChain.Fees.FeeUpdater
alias OMG.ChildChain.HttpRPC.Client
use OMG.Utils.LoggerExt
@doc """
Pulls the fee specification from fees feed. Feed updates fee prices based on Ethereum's gas price.
"""
@impl true
def get_fee_specs(opts, actual_fee_specs, updated_at) do
fee_feed_url = Keyword.fetch!(opts, :fee_feed_url)
with {:ok, fee_specs_from_feed} <- Client.all_fees(fee_feed_url),
{:ok, {new_updated_at, new_fee_specs}} <-
can_update(opts, actual_fee_specs, fee_specs_from_feed, updated_at) do
{:ok, new_fee_specs, new_updated_at}
else
:no_changes -> :ok
error -> error
end
end
defp can_update(opts, stored_specs, fetched_specs, updated_at) do
tolerance_percent = Keyword.fetch!(opts, :fee_change_tolerance_percent)
update_interval_minutes = Keyword.fetch!(opts, :stored_fee_update_interval_minutes)
FeeUpdater.can_update(
{updated_at, stored_specs},
{:os.system_time(:second), fetched_specs},
tolerance_percent,
update_interval_minutes * 60
)
end
end
| 33.888889 | 100 | 0.732787 |
e847ec33e27b5d6fec2fb205dd89581cfa2225ef | 4,328 | ex | Elixir | lib/coherence/controllers/confirmation_controller.ex | dipth/coherence | 64ad450c5d8d020172875c4bf52f2bbdaa6e59de | [
"MIT"
] | null | null | null | lib/coherence/controllers/confirmation_controller.ex | dipth/coherence | 64ad450c5d8d020172875c4bf52f2bbdaa6e59de | [
"MIT"
] | null | null | null | lib/coherence/controllers/confirmation_controller.ex | dipth/coherence | 64ad450c5d8d020172875c4bf52f2bbdaa6e59de | [
"MIT"
] | null | null | null | defmodule Coherence.ConfirmationController do
@moduledoc """
Handle confirmation actions.
A single action, `edit`, is required for the confirmation module.
"""
use CoherenceWeb, :controller
use Timex
alias Coherence.{ConfirmableService, Messages}
alias Coherence.Schemas
require Logger
plug Coherence.ValidateOption, :confirmable
plug :layout_view, view: Coherence.ConfirmationView, caller: __MODULE__
plug :redirect_logged_in when action in [:new]
@doc """
Handle resending a confirmation email.
Request the user's email, reset the confirmation token and resend the email.
"""
@spec new(Plug.Conn.t, Map.t) :: Plug.Conn.t
def new(conn, _params) do
user_schema = Config.user_schema
cs = Controller.changeset :confirmation, user_schema, user_schema.__struct__
conn
|> render(:new, [email: "", changeset: cs])
end
@doc """
Create a new confirmation token and resend the email.
"""
@spec create(Plug.Conn.t, Map.t) :: Plug.Conn.t
def create(conn, %{"confirmation" => password_params} = params) do
user_schema = Config.user_schema
email = password_params["email"]
user = Schemas.get_user_by_email email
changeset = Controller.changeset :confirmation, user_schema, user_schema.__struct__
case user do
nil ->
conn
|> respond_with(
:confirmation_create_error,
%{
changeset: changeset,
error: Messages.backend().could_not_find_that_email_address()
}
)
user ->
if user_schema.confirmed?(user) && !(Config.get(:confirm_email_updates) && user.unconfirmed_email) do
conn
|> respond_with(
:confirmation_create_error,
%{
changeset: changeset,
email: "",
error: Messages.backend().account_already_confirmed()
}
)
else
conn
|> send_confirmation(user, user_schema)
|> respond_with(:confirmation_create_success, %{params: params})
end
end
end
@doc """
Handle the user's click on the confirm link in the confirmation email.
Validate that the confirmation token has not expired and sets `confirmation_sent_at`
field to nil, marking the user as confirmed.
"""
@spec edit(Plug.Conn.t, Map.t) :: Plug.Conn.t
def edit(conn, params) do
user_schema = Config.user_schema
token = params["id"]
user = Schemas.get_by_user confirmation_token: token
case user do
nil ->
changeset = Controller.changeset :confirmation, user_schema, user_schema.__struct__
conn
|> respond_with(
:confirmation_update_invalid,
%{
params: params,
error: Messages.backend().invalid_confirmation_token()
}
)
user ->
if ConfirmableService.expired? user do
conn
|> respond_with(
:confirmation_update_expired,
%{
params: params,
error: Messages.backend().confirmation_token_expired()
}
)
else
attrs = case Config.get(:confirm_email_updates) do
true ->
%{
email: user.unconfirmed_email,
unconfirmed_email: nil
}
_ ->
%{}
end
changeset = Ecto.Changeset.change(user, Map.merge(attrs, %{
confirmation_token: nil,
confirmed_at: NaiveDateTime.utc_now(),
}))
case Config.repo.update(changeset) do
{:ok, user} ->
Config.auth_module
|> apply(Config.update_login, [conn, user, [id_key: Config.schema_key]])
|> respond_with(
:confirmation_update_success,
%{
params: params,
info: Messages.backend().user_account_confirmed_successfully()
}
)
{:error, _changeset} ->
conn
|> respond_with(
:confirmation_update_error,
%{
params: params,
error: Messages.backend().problem_confirming_user_account()
}
)
end
end
end
end
end
| 29.643836 | 109 | 0.580176 |
e84810b1cc74652c5d42b47f03040e385e21f4e1 | 948 | ex | Elixir | api/lib/api.ex | acoustep/ember-phoenix-slug-example | 871e667e8cf84605f3bb1b4d2bb5a7dca914a614 | [
"MIT"
] | 1 | 2016-05-08T07:52:50.000Z | 2016-05-08T07:52:50.000Z | api/lib/api.ex | acoustep/ember-phoenix-slug-example | 871e667e8cf84605f3bb1b4d2bb5a7dca914a614 | [
"MIT"
] | null | null | null | api/lib/api.ex | acoustep/ember-phoenix-slug-example | 871e667e8cf84605f3bb1b4d2bb5a7dca914a614 | [
"MIT"
] | null | null | null | defmodule Api do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
# Start the endpoint when the application starts
supervisor(Api.Endpoint, []),
# Start the Ecto repository
worker(Api.Repo, []),
# Here you could define other workers and supervisors as children
# worker(Api.Worker, [arg1, arg2, arg3]),
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Api.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
Api.Endpoint.config_change(changed, removed)
:ok
end
end
| 30.580645 | 71 | 0.702532 |
e8485debacc9336202e2116c687c64c4b7d5640d | 757 | ex | Elixir | apps/dead_letter/lib/dead_letter/supervisor.ex | PillarTechnology/smartcitiesdata | 9420a26820e38267513cd1bfa82c7f5583222bb1 | [
"Apache-2.0"
] | null | null | null | apps/dead_letter/lib/dead_letter/supervisor.ex | PillarTechnology/smartcitiesdata | 9420a26820e38267513cd1bfa82c7f5583222bb1 | [
"Apache-2.0"
] | null | null | null | apps/dead_letter/lib/dead_letter/supervisor.ex | PillarTechnology/smartcitiesdata | 9420a26820e38267513cd1bfa82c7f5583222bb1 | [
"Apache-2.0"
] | null | null | null | defmodule DeadLetter.Supervisor do
@moduledoc """
DeadLetter application supervisor. Orchestrates and monitors
the server and driver processes.
"""
use Supervisor
@doc """
Start a DeadLetter supervisor and link it to the current process
"""
def start_link(opts) do
Supervisor.start_link(__MODULE__, opts, name: __MODULE__)
end
@doc """
Initialize the DeadLetter supervisor with all the necessary
configurations for starting its children.
"""
def init(opts) do
config = Keyword.fetch!(opts, :driver) |> Enum.into(%{})
children =
[
{config.module, config.init_args},
{DeadLetter.Server, config}
]
|> List.flatten()
Supervisor.init(children, strategy: :one_for_one)
end
end
| 23.65625 | 66 | 0.681638 |
e8488ac04633f350325710ecb325cf5531491486 | 882 | ex | Elixir | lib/combine/parser_state.ex | kianmeng/combine | f3a16b56efab388abe2608188d519291549b7eb5 | [
"MIT"
] | 199 | 2015-07-27T11:42:38.000Z | 2022-01-16T13:42:32.000Z | lib/combine/parser_state.ex | kianmeng/combine | f3a16b56efab388abe2608188d519291549b7eb5 | [
"MIT"
] | 45 | 2015-07-27T00:16:26.000Z | 2018-03-02T23:27:04.000Z | lib/combine/parser_state.ex | kianmeng/combine | f3a16b56efab388abe2608188d519291549b7eb5 | [
"MIT"
] | 24 | 2015-07-26T23:52:14.000Z | 2021-09-12T01:35:14.000Z | defmodule Combine.ParserState do
@moduledoc """
Defines a struct representing the state of the parser.
The struct has following fields:
- `input` - the unparsed part of the input
- `column` - column position of the next character (zero based)
- `line` - current line position
- `results` - list of outputs produced by so far, in the reverse order
- `status` - `:ok` if the grammar rules are satisfied, `:error` otherwise
- `error` - an error message if a grammar rule wasn't satisfied
"""
@type t :: %__MODULE__{
input: any,
column: non_neg_integer,
line: pos_integer,
results: [any],
labels: [any],
status: :ok | :error,
error: any
}
defstruct input: <<>>,
column: 0,
line: 1,
results: [],
labels: [],
status: :ok, # :error
error: nil
end
| 26.727273 | 77 | 0.591837 |
e848adaaad566c03e70a7b2e69bb91b54fd43c90 | 30,330 | exs | Elixir | lib/elixir/test/elixir/stream_test.exs | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/stream_test.exs | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/stream_test.exs | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule StreamTest do
use ExUnit.Case, async: true
defmodule PDict do
defstruct []
defimpl Collectable do
def into(struct) do
{struct,
fn
_, {:cont, x} -> Process.put(:stream_cont, [x|Process.get(:stream_cont)])
_, :done -> Process.put(:stream_done, true)
_, :halt -> Process.put(:stream_halt, true)
end}
end
end
end
test "streams as enumerables" do
stream = Stream.map([1, 2, 3], &(&1 * 2))
# Reduce
assert Enum.map(stream, &(&1 + 1)) == [3, 5, 7]
# Member
assert Enum.member?(stream, 4)
refute Enum.member?(stream, 1)
# Count
assert Enum.count(stream) == 3
end
test "streams are composable" do
stream = Stream.map([1, 2, 3], &(&1 * 2))
assert is_lazy(stream)
stream = Stream.map(stream, &(&1 + 1))
assert is_lazy(stream)
assert Enum.to_list(stream) == [3, 5, 7]
end
test "chunk/2, chunk/3 and chunk/4" do
assert Stream.chunk([1, 2, 3, 4, 5], 2) |> Enum.to_list ==
[[1, 2], [3, 4]]
assert Stream.chunk([1, 2, 3, 4, 5], 2, 2, [6]) |> Enum.to_list ==
[[1, 2], [3, 4], [5, 6]]
assert Stream.chunk([1, 2, 3, 4, 5, 6], 3, 2) |> Enum.to_list ==
[[1, 2, 3], [3, 4, 5]]
assert Stream.chunk([1, 2, 3, 4, 5, 6], 2, 3) |> Enum.to_list ==
[[1, 2], [4, 5]]
assert Stream.chunk([1, 2, 3, 4, 5, 6], 3, 2, []) |> Enum.to_list ==
[[1, 2, 3], [3, 4, 5], [5, 6]]
assert Stream.chunk([1, 2, 3, 4, 5, 6], 3, 3, []) |> Enum.to_list ==
[[1, 2, 3], [4, 5, 6]]
assert Stream.chunk([1, 2, 3, 4, 5], 4, 4, 6..10) |> Enum.to_list ==
[[1, 2, 3, 4], [5, 6, 7, 8]]
end
test "chunk/4 is zippable" do
stream = Stream.chunk([1, 2, 3, 4, 5, 6], 3, 2, [])
list = Enum.to_list(stream)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
end
test "chunk/4 is haltable" do
assert 1..10 |> Stream.take(6) |> Stream.chunk(4, 4, [7, 8]) |> Enum.to_list ==
[[1, 2, 3, 4], [5, 6, 7, 8]]
assert 1..10 |> Stream.take(6) |> Stream.chunk(4, 4, [7, 8]) |> Stream.take(3) |> Enum.to_list ==
[[1, 2, 3, 4], [5, 6, 7, 8]]
assert 1..10 |> Stream.take(6) |> Stream.chunk(4, 4, [7, 8]) |> Stream.take(2) |> Enum.to_list ==
[[1, 2, 3, 4], [5, 6, 7, 8]]
assert 1..10 |> Stream.take(6) |> Stream.chunk(4, 4, [7, 8]) |> Stream.take(1) |> Enum.to_list ==
[[1, 2, 3, 4]]
assert 1..6 |> Stream.take(6) |> Stream.chunk(4, 4, [7, 8]) |> Enum.to_list ==
[[1, 2, 3, 4], [5, 6, 7, 8]]
end
test "chunk_by/2" do
stream = Stream.chunk_by([1, 2, 2, 3, 4, 4, 6, 7, 7], &(rem(&1, 2) == 1))
assert is_lazy(stream)
assert Enum.to_list(stream) ==
[[1], [2, 2], [3], [4, 4, 6], [7, 7]]
assert stream |> Stream.take(3) |> Enum.to_list ==
[[1], [2, 2], [3]]
assert 1..10 |> Stream.chunk(2) |> Enum.take(2) ==
[[1, 2], [3, 4]]
end
test "chunk_by/2 is zippable" do
stream = Stream.chunk_by([1, 2, 2, 3], &(rem(&1, 2) == 1))
list = Enum.to_list(stream)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
end
test "concat/1" do
stream = Stream.concat([1..3, [], [4, 5, 6], [], 7..9])
assert is_function(stream)
assert Enum.to_list(stream) == [1, 2, 3, 4, 5, 6, 7, 8, 9]
assert Enum.take(stream, 5) == [1, 2, 3, 4, 5]
stream = Stream.concat([1..3, [4, 5, 6], Stream.cycle(7..100)])
assert is_function(stream)
assert Enum.take(stream, 13) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
end
test "concat/2" do
stream = Stream.concat(1..3, 4..6)
assert is_function(stream)
assert Stream.cycle(stream) |> Enum.take(16) == [1, 2, 3, 4, 5, 6, 1, 2, 3, 4, 5, 6, 1, 2, 3, 4]
stream = Stream.concat(1..3, [])
assert is_function(stream)
assert Stream.cycle(stream) |> Enum.take(5) == [1, 2, 3, 1, 2]
stream = Stream.concat(1..6, Stream.cycle(7..9))
assert is_function(stream)
assert Stream.drop(stream, 3) |> Enum.take(13) == [4, 5, 6, 7, 8, 9, 7, 8, 9, 7, 8, 9, 7]
stream = Stream.concat(Stream.cycle(1..3), Stream.cycle(4..6))
assert is_function(stream)
assert Enum.take(stream, 13) == [1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1]
end
test "concat/2 does not intercept wrapped lazy enumeration" do
# concat returns a lazy enumeration that does not halt
assert Stream.concat([[0], Stream.map([1, 2, 3], & &1), [4]])
|> Stream.take_while(fn x -> x <= 4 end)
|> Enum.to_list == [0, 1, 2, 3, 4]
# concat returns a lazy enumeration that does halts
assert Stream.concat([[0], Stream.take_while(1..6, &(&1 <= 3)), [4]])
|> Stream.take_while(fn x -> x <= 4 end)
|> Enum.to_list == [0, 1, 2, 3, 4]
end
test "cycle/1" do
stream = Stream.cycle([1, 2, 3])
assert is_function(stream)
assert Stream.cycle([1, 2, 3]) |> Stream.take(5) |> Enum.to_list == [1, 2, 3, 1, 2]
assert Enum.take(stream, 5) == [1, 2, 3, 1, 2]
end
test "cycle/1 is zippable" do
stream = Stream.cycle([1, 2, 3])
assert Enum.zip(1..6, [1, 2, 3, 1, 2, 3]) == Enum.zip(1..6, stream)
end
test "cycle/1 with inner stream" do
assert [1, 2, 3] |> Stream.take(2) |> Stream.cycle |> Enum.take(4) ==
[1, 2, 1, 2]
end
test "dedup/1 is lazy" do
assert is_lazy Stream.dedup([1, 2, 3])
end
test "dedup/1" do
assert Stream.dedup([1, 1, 2, 1, 1, 2, 1]) |> Enum.to_list == [1, 2, 1, 2, 1]
assert Stream.dedup([2, 1, 1, 2, 1]) |> Enum.to_list == [2, 1, 2, 1]
assert Stream.dedup([1, 2, 3, 4]) |> Enum.to_list == [1, 2, 3, 4]
assert Stream.dedup([1, 1.0, 2.0, 2]) |> Enum.to_list == [1, 1.0, 2.0, 2]
assert Stream.dedup([]) |> Enum.to_list == []
assert Stream.dedup([nil, nil, true, {:value, true}]) |> Enum.to_list
== [nil, true, {:value, true}]
assert Stream.dedup([nil]) |> Enum.to_list == [nil]
end
test "dedup_by/2" do
assert Stream.dedup_by([{1, :x}, {2, :y}, {2, :z}, {1, :x}], fn {x, _} -> x end) |> Enum.to_list
== [{1, :x}, {2, :y}, {1, :x}]
end
test "drop/2" do
stream = Stream.drop(1..10, 5)
assert is_lazy(stream)
assert Enum.to_list(stream) == [6, 7, 8, 9, 10]
assert Enum.to_list(Stream.drop(1..5, 0)) == [1, 2, 3, 4, 5]
assert Enum.to_list(Stream.drop(1..3, 5)) == []
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.drop(nats, 2) |> Enum.take(5) == [3, 4, 5, 6, 7]
end
test "drop/2 with negative count" do
stream = Stream.drop(1..10, -5)
assert is_lazy(stream)
assert Enum.to_list(stream) == [1, 2, 3, 4, 5]
stream = Stream.drop(1..10, -5)
list = Enum.to_list(stream)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
end
test "drop/2 with negative count stream entries" do
par = self
pid = spawn_link fn ->
Enum.each Stream.drop(&inbox_stream/2, -3),
fn x -> send par, {:stream, x} end
end
send pid, {:stream, 1}
send pid, {:stream, 2}
send pid, {:stream, 3}
refute_receive {:stream, 1}
send pid, {:stream, 4}
assert_receive {:stream, 1}
send pid, {:stream, 5}
assert_receive {:stream, 2}
refute_receive {:stream, 3}
end
test "drop_while/2" do
stream = Stream.drop_while(1..10, &(&1 <= 5))
assert is_lazy(stream)
assert Enum.to_list(stream) == [6, 7, 8, 9, 10]
assert Enum.to_list(Stream.drop_while(1..5, &(&1 <= 0))) == [1, 2, 3, 4, 5]
assert Enum.to_list(Stream.drop_while(1..3, &(&1 <= 5))) == []
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.drop_while(nats, &(&1 <= 5)) |> Enum.take(5) == [6, 7, 8, 9, 10]
end
test "each/2" do
Process.put(:stream_each, [])
stream = Stream.each([1, 2, 3], fn x ->
Process.put(:stream_each, [x|Process.get(:stream_each)])
end)
assert is_lazy(stream)
assert Enum.to_list(stream) == [1, 2, 3]
assert Process.get(:stream_each) == [3, 2, 1]
end
test "filter/2" do
stream = Stream.filter([1, 2, 3], fn(x) -> rem(x, 2) == 0 end)
assert is_lazy(stream)
assert Enum.to_list(stream) == [2]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.filter(nats, &(rem(&1, 2) == 0)) |> Enum.take(5) == [2, 4, 6, 8, 10]
end
test "filter_map/3" do
stream = Stream.filter_map([1, 2, 3], fn(x) -> rem(x, 2) == 0 end, &(&1 * 2))
assert is_lazy(stream)
assert Enum.to_list(stream) == [4]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.filter_map(nats, &(rem(&1, 2) == 0), &(&1 * 2))
|> Enum.take(5) == [4, 8, 12, 16, 20]
end
test "flat_map/2" do
stream = Stream.flat_map([1, 2, 3], &[&1, &1 * 2])
assert is_lazy(stream)
assert Enum.to_list(stream) == [1, 2, 2, 4, 3, 6]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.flat_map(nats, &[&1, &1 * 2]) |> Enum.take(6) == [1, 2, 2, 4, 3, 6]
end
test "flat_map/2 does not intercept wrapped lazy enumeration" do
# flat_map returns a lazy enumeration that does not halt
assert [1, 2, 3, -1, -2]
|> Stream.flat_map(fn x -> Stream.map([x, x+1], & &1) end)
|> Stream.take_while(fn x -> x >= 0 end)
|> Enum.to_list == [1, 2, 2, 3, 3, 4]
# flat_map returns a lazy enumeration that does halts
assert [1, 2, 3, -1, -2]
|> Stream.flat_map(fn x -> Stream.take_while([x, x+1, x+2], &(&1 <= x + 1)) end)
|> Stream.take_while(fn x -> x >= 0 end)
|> Enum.to_list == [1, 2, 2, 3, 3, 4]
# flat_map returns a lazy enumeration that does halts wrapped in an enumerable
assert [1, 2, 3, -1, -2]
|> Stream.flat_map(fn x -> Stream.concat([x], Stream.take_while([x+1, x+2], &(&1 <= x + 1))) end)
|> Stream.take_while(fn x -> x >= 0 end)
|> Enum.to_list == [1, 2, 2, 3, 3, 4]
end
test "flat_map/2 is zippable" do
stream = [1, 2, 3, -1, -2]
|> Stream.flat_map(fn x -> Stream.map([x, x+1], & &1) end)
|> Stream.take_while(fn x -> x >= 0 end)
list = Enum.to_list(stream)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
end
test "flat_map/2 does not leave inner stream suspended" do
stream = Stream.flat_map [1, 2, 3],
fn i ->
Stream.resource(fn -> i end,
fn acc -> {[acc], acc + 1} end,
fn _ -> Process.put(:stream_flat_map, true) end)
end
Process.put(:stream_flat_map, false)
assert stream |> Enum.take(3) == [1, 2, 3]
assert Process.get(:stream_flat_map)
end
test "flat_map/2 does not leave outer stream suspended" do
stream = Stream.resource(fn -> 1 end,
fn acc -> {[acc], acc + 1} end,
fn _ -> Process.put(:stream_flat_map, true) end)
stream = Stream.flat_map(stream, fn i -> [i, i + 1, i + 2] end)
Process.put(:stream_flat_map, false)
assert stream |> Enum.take(3) == [1, 2, 3]
assert Process.get(:stream_flat_map)
end
test "flat_map/2 closes on error" do
stream = Stream.resource(fn -> 1 end,
fn acc -> {[acc], acc + 1} end,
fn _ -> Process.put(:stream_flat_map, true) end)
stream = Stream.flat_map(stream, fn _ -> throw(:error) end)
Process.put(:stream_flat_map, false)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_flat_map)
end
test "flat_map/2 with inner flat_map/2" do
stream = Stream.flat_map(1..5, fn x ->
Stream.flat_map([x], fn x ->
x .. x * x
end) |> Stream.map(& &1 * 1)
end)
assert Enum.take(stream, 5) == [1, 2, 3, 4, 3]
end
test "flat_map/2 properly halts both inner and outer stream when inner stream is halted" do
# Fixes a bug that, when the inner stream was done,
# sending it a halt would cause it to return the
# inner stream was halted, forcing flat_map to get
# the next value from the outer stream, evaluate it,
# get annother inner stream, just to halt it.
assert [1, 2] # 2 should never be used
|> Stream.flat_map(fn 1 -> Stream.repeatedly(fn -> 1 end) end)
|> Stream.flat_map(fn 1 -> Stream.repeatedly(fn -> 1 end) end)
|> Enum.take(1) == [1]
end
test "interval/1" do
stream = Stream.interval(10)
now = :os.timestamp
assert Enum.take(stream, 5) == [0, 1, 2, 3, 4]
assert :timer.now_diff(:os.timestamp, now) > 50000
end
test "into/2 and run/1" do
Process.put(:stream_cont, [])
Process.put(:stream_done, false)
Process.put(:stream_halt, false)
stream = Stream.into([1, 2, 3], %PDict{})
assert is_lazy(stream)
assert Stream.run(stream) == :ok
assert Process.get(:stream_cont) == [3, 2, 1]
assert Process.get(:stream_done)
refute Process.get(:stream_halt)
stream = Stream.into(fn _, _ -> raise "error" end, %PDict{})
catch_error(Stream.run(stream))
assert Process.get(:stream_halt)
end
test "into/3" do
Process.put(:stream_cont, [])
Process.put(:stream_done, false)
Process.put(:stream_halt, false)
stream = Stream.into([1, 2, 3], %PDict{}, fn x -> x*2 end)
assert is_lazy(stream)
assert Enum.to_list(stream) == [1, 2, 3]
assert Process.get(:stream_cont) == [6, 4, 2]
assert Process.get(:stream_done)
refute Process.get(:stream_halt)
end
test "into/2 with halting" do
Process.put(:stream_cont, [])
Process.put(:stream_done, false)
Process.put(:stream_halt, false)
stream = Stream.into([1, 2, 3], %PDict{})
assert is_lazy(stream)
assert Enum.take(stream, 1) == [1]
assert Process.get(:stream_cont) == [1]
assert Process.get(:stream_done)
refute Process.get(:stream_halt)
end
test "transform/3" do
stream = Stream.transform([1, 2, 3], 0, &{[&1, &2], &1 + &2})
assert is_lazy(stream)
assert Enum.to_list(stream) == [1, 0, 2, 1, 3, 3]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.transform(nats, 0, &{[&1, &2], &1 + &2}) |> Enum.take(6) == [1, 0, 2, 1, 3, 3]
end
test "transform/3 with halt" do
stream = Stream.resource(fn -> 1 end,
fn acc -> {[acc], acc + 1} end,
fn _ -> Process.put(:stream_transform, true) end)
stream = Stream.transform(stream, 0, fn i, acc ->
if acc < 3, do: {[i], acc + 1}, else: {:halt, acc}
end)
Process.put(:stream_transform, false)
assert Enum.to_list(stream) == [1, 2, 3]
assert Process.get(:stream_transform)
end
test "iterate/2" do
stream = Stream.iterate(0, &(&1+2))
assert Enum.take(stream, 5) == [0, 2, 4, 6, 8]
stream = Stream.iterate(5, &(&1+2))
assert Enum.take(stream, 5) == [5, 7, 9, 11, 13]
# Only calculate values if needed
stream = Stream.iterate("HELLO", &raise/1)
assert Enum.take(stream, 1) == ["HELLO"]
end
test "map/2" do
stream = Stream.map([1, 2, 3], &(&1 * 2))
assert is_lazy(stream)
assert Enum.to_list(stream) == [2, 4, 6]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.map(nats, &(&1 * 2)) |> Enum.take(5) == [2, 4, 6, 8, 10]
assert Stream.map(nats, &(&1 - 2)) |> Stream.map(&(&1 * 2)) |> Enum.take(3) == [-2, 0, 2]
end
test "reject/2" do
stream = Stream.reject([1, 2, 3], fn(x) -> rem(x, 2) == 0 end)
assert is_lazy(stream)
assert Enum.to_list(stream) == [1, 3]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.reject(nats, &(rem(&1, 2) == 0)) |> Enum.take(5) == [1, 3, 5, 7, 9]
end
test "repeatedly/1" do
stream = Stream.repeatedly(fn -> 1 end)
assert Enum.take(stream, 5) == [1, 1, 1, 1, 1]
stream = Stream.repeatedly(&:random.uniform/0)
[r1, r2] = Enum.take(stream, 2)
assert r1 != r2
end
test "resource/3 closes on errors" do
stream = Stream.resource(fn -> 1 end,
fn acc -> {[acc], acc + 1} end,
fn _ -> Process.put(:stream_resource, true) end)
Process.put(:stream_resource, false)
stream = Stream.map(stream, fn x -> if x > 2, do: throw(:error), else: x end)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_resource)
end
test "resource/3 is zippable" do
stream = Stream.resource(fn -> 1 end,
fn 10 -> {:halt, 10}
acc -> {[acc], acc + 1}
end,
fn _ -> Process.put(:stream_resource, true) end)
list = Enum.to_list(stream)
Process.put(:stream_resource, false)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
assert Process.get(:stream_resource)
end
test "resource/3 halts with inner list" do
stream = Stream.resource(fn -> 1 end,
fn acc -> {[acc, acc+1, acc+2], acc + 1} end,
fn _ -> Process.put(:stream_resource, true) end)
Process.put(:stream_resource, false)
assert Enum.take(stream, 5) == [1, 2, 3, 2, 3]
assert Process.get(:stream_resource)
end
test "resource/3 closes on errors with inner list" do
stream = Stream.resource(fn -> 1 end,
fn acc -> {[acc, acc+1, acc+2], acc + 1} end,
fn _ -> Process.put(:stream_resource, true) end)
Process.put(:stream_resource, false)
stream = Stream.map(stream, fn x -> if x > 2, do: throw(:error), else: x end)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_resource)
end
test "resource/3 is zippable with inner list" do
stream = Stream.resource(fn -> 1 end,
fn 10 -> {:halt, 10}
acc -> {[acc, acc+1, acc+2], acc + 1}
end,
fn _ -> Process.put(:stream_resource, true) end)
list = Enum.to_list(stream)
Process.put(:stream_resource, false)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
assert Process.get(:stream_resource)
end
test "resource/3 halts with inner enum" do
stream = Stream.resource(fn -> 1 end,
fn acc -> {acc..acc+2, acc + 1} end,
fn _ -> Process.put(:stream_resource, true) end)
Process.put(:stream_resource, false)
assert Enum.take(stream, 5) == [1, 2, 3, 2, 3]
assert Process.get(:stream_resource)
end
test "resource/3 closes on errors with inner enum" do
stream = Stream.resource(fn -> 1 end,
fn acc -> {acc..acc+2, acc + 1} end,
fn _ -> Process.put(:stream_resource, true) end)
Process.put(:stream_resource, false)
stream = Stream.map(stream, fn x -> if x > 2, do: throw(:error), else: x end)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_resource)
end
test "resource/3 is zippable with inner enum" do
stream = Stream.resource(fn -> 1 end,
fn 10 -> {:halt, 10}
acc -> {acc..acc+2, acc + 1}
end,
fn _ -> Process.put(:stream_resource, true) end)
list = Enum.to_list(stream)
Process.put(:stream_resource, false)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
assert Process.get(:stream_resource)
end
test "transform/4" do
stream = Stream.transform(1..10, fn -> 0 end,
fn x, acc -> {[x, x + acc], x} end,
fn 10 -> Process.put(:stream_transform, true) end)
Process.put(:stream_transform, false)
assert Enum.to_list(stream) ==
[1, 1, 2, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19]
assert Process.get(:stream_transform)
end
test "transform/4 closes on errors" do
stream = Stream.transform(1..10, fn -> 0 end,
fn x, acc -> {[x + acc], x} end,
fn 2 -> Process.put(:stream_transform, true) end)
Process.put(:stream_transform, false)
stream = Stream.map(stream, fn x -> if x > 2, do: throw(:error), else: x end)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_transform)
end
test "transform/4 is zippable" do
stream = Stream.transform(1..20, fn -> 0 end,
fn 10, acc -> {:halt, acc}
x, acc -> {[x + acc], x}
end,
fn 9 -> Process.put(:stream_transform, true) end)
list = Enum.to_list(stream)
Process.put(:stream_transform, false)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
assert Process.get(:stream_transform)
end
test "transform/4 halts with inner list" do
stream = Stream.transform(1..10, fn -> :acc end,
fn x, acc -> {[x, x+1, x+2], acc} end,
fn :acc -> Process.put(:stream_transform, true) end)
Process.put(:stream_transform, false)
assert Enum.take(stream, 5) == [1, 2, 3, 2, 3]
assert Process.get(:stream_transform)
end
test "transform/4 closes on errors with inner list" do
stream = Stream.transform(1..10, fn -> :acc end,
fn x, acc -> {[x, x+1, x+2], acc} end,
fn :acc -> Process.put(:stream_transform, true) end)
Process.put(:stream_transform, false)
stream = Stream.map(stream, fn x -> if x > 2, do: throw(:error), else: x end)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_transform)
end
test "transform/4 is zippable with inner list" do
stream = Stream.transform(1..20, fn -> :inner end,
fn 10, acc -> {:halt, acc}
x, acc -> {[x, x+1, x+2], acc}
end,
fn :inner -> Process.put(:stream_transform, true) end)
list = Enum.to_list(stream)
Process.put(:stream_transform, false)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
assert Process.get(:stream_transform)
end
test "transform/4 halts with inner enum" do
stream = Stream.transform(1..10, fn -> :acc end,
fn x, acc -> {x..x+2, acc} end,
fn :acc -> Process.put(:stream_transform, true) end)
Process.put(:stream_transform, false)
assert Enum.take(stream, 5) == [1, 2, 3, 2, 3]
assert Process.get(:stream_transform)
end
test "transform/4 closes on errors with inner enum" do
stream = Stream.transform(1..10, fn -> :acc end,
fn x, acc -> {[x..x+2], acc} end,
fn :acc -> Process.put(:stream_transform, true) end)
Process.put(:stream_transform, false)
stream = Stream.map(stream, fn x -> if x > 2, do: throw(:error), else: x end)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_transform)
end
test "transform/4 is zippable with inner enum" do
stream = Stream.transform(1..20, fn -> :inner end,
fn 10, acc -> {:halt, acc}
x, acc -> {[x..x+2], acc}
end,
fn :inner -> Process.put(:stream_transform, true) end)
list = Enum.to_list(stream)
Process.put(:stream_transform, false)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
assert Process.get(:stream_transform)
end
test "scan/2" do
stream = Stream.scan(1..5, &(&1 + &2))
assert is_lazy(stream)
assert Enum.to_list(stream) == [1, 3, 6, 10, 15]
assert Stream.scan([], &(&1 + &2)) |> Enum.to_list == []
end
test "scan/3" do
stream = Stream.scan(1..5, 0, &(&1 + &2))
assert is_lazy(stream)
assert Enum.to_list(stream) == [1, 3, 6, 10, 15]
assert Stream.scan([], 0, &(&1 + &2)) |> Enum.to_list == []
end
test "take/2" do
stream = Stream.take(1..1000, 5)
assert is_lazy(stream)
assert Enum.to_list(stream) == [1, 2, 3, 4, 5]
assert Enum.to_list(Stream.take(1..1000, 0)) == []
assert Enum.to_list(Stream.take([], 5)) == []
assert Enum.to_list(Stream.take(1..3, 5)) == [1, 2, 3]
nats = Stream.iterate(1, &(&1 + 1))
assert Enum.to_list(Stream.take(nats, 5)) == [1, 2, 3, 4, 5]
stream = Stream.drop(1..100, 5)
assert Stream.take(stream, 5) |> Enum.to_list == [6, 7, 8, 9, 10]
stream = 1..5 |> Stream.take(10) |> Stream.drop(15)
assert {[], []} = Enum.split(stream, 5)
stream = 1..20 |> Stream.take(10 + 5) |> Stream.drop(4)
assert Enum.to_list(stream) == [5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
end
test "take/2 with negative count" do
Process.put(:stream_each, [])
stream = Stream.take(1..100, -5)
assert is_lazy(stream)
stream = Stream.each(stream, &Process.put(:stream_each, [&1|Process.get(:stream_each)]))
assert Enum.to_list(stream) == [96, 97, 98, 99, 100]
assert Process.get(:stream_each) == [100, 99, 98, 97, 96]
end
test "take/2 is zippable" do
stream = Stream.take(1..1000, 5)
list = Enum.to_list(stream)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
end
test "take_every/2" do
assert 1..10
|> Stream.take_every(2)
|> Enum.to_list == [1, 3, 5, 7, 9]
assert 1..10
|> Stream.drop(2)
|> Stream.take_every(2)
|> Stream.drop(1)
|> Enum.to_list == [5, 7, 9]
assert 1..10
|> Stream.take_every(0)
|> Enum.to_list == []
assert []
|> Stream.take_every(10)
|> Enum.to_list == []
end
test "take_every/2 without non-negative integer" do
assert_raise FunctionClauseError, fn ->
Stream.take_every(1..10, -1)
end
assert_raise FunctionClauseError, fn ->
Stream.take_every(1..10, 3.33)
end
end
test "take_while/2" do
stream = Stream.take_while(1..1000, &(&1 <= 5))
assert is_lazy(stream)
assert Enum.to_list(stream) == [1, 2, 3, 4, 5]
assert Enum.to_list(Stream.take_while(1..1000, &(&1 <= 0))) == []
assert Enum.to_list(Stream.take_while(1..3, &(&1 <= 5))) == [1, 2, 3]
nats = Stream.iterate(1, &(&1 + 1))
assert Enum.to_list(Stream.take_while(nats, &(&1 <= 5))) == [1, 2, 3, 4, 5]
stream = Stream.drop(1..100, 5)
assert Stream.take_while(stream, &(&1 < 11)) |> Enum.to_list == [6, 7, 8, 9, 10]
end
test "timer/1" do
stream = Stream.timer(10)
now = :os.timestamp
assert Enum.to_list(stream) == [0]
assert :timer.now_diff(:os.timestamp, now) > 10000
end
test "unfold/2" do
stream = Stream.unfold(10, fn x -> if x > 0, do: {x, x-1}, else: nil end)
assert Enum.take(stream, 5) == [10, 9, 8, 7, 6]
stream = Stream.unfold(5, fn x -> if x > 0, do: {x, x-1}, else: nil end)
assert Enum.to_list(stream) == [5, 4, 3, 2, 1]
end
test "unfold/2 only calculates values if needed" do
stream = Stream.unfold(1, fn x -> if x > 0, do: {x, x-1}, else: throw(:boom) end)
assert Enum.take(stream, 1) == [1]
stream = Stream.unfold(5, fn x -> if x > 0, do: {x, x-1}, else: nil end)
assert Enum.to_list(Stream.take(stream, 2)) == [5, 4]
end
test "unfold/2 is zippable" do
stream = Stream.unfold(10, fn x -> if x > 0, do: {x, x-1}, else: nil end)
list = Enum.to_list(stream)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
end
test "uniq/1" do
assert Stream.uniq([1, 2, 3, 2, 1]) |> Enum.to_list ==
[1, 2, 3]
assert Stream.uniq([{1, :x}, {2, :y}, {1, :z}], fn {x, _} -> x end) |> Enum.to_list ==
[{1, :x}, {2, :y}]
end
test "zip/2" do
concat = Stream.concat(1..3, 4..6)
cycle = Stream.cycle([:a, :b, :c])
assert Stream.zip(concat, cycle) |> Enum.to_list ==
[{1, :a}, {2, :b}, {3, :c}, {4, :a}, {5, :b}, {6, :c}]
end
test "zip/2 does not leave streams suspended" do
stream = Stream.resource(fn -> 1 end,
fn acc -> {[acc], acc + 1} end,
fn _ -> Process.put(:stream_zip, true) end)
Process.put(:stream_zip, false)
assert Stream.zip([:a, :b, :c], stream) |> Enum.to_list == [a: 1, b: 2, c: 3]
assert Process.get(:stream_zip)
Process.put(:stream_zip, false)
assert Stream.zip(stream, [:a, :b, :c]) |> Enum.to_list == [{1, :a}, {2, :b}, {3, :c}]
assert Process.get(:stream_zip)
end
test "zip/2 does not leave streams suspended on halt" do
stream = Stream.resource(fn -> 1 end,
fn acc -> {[acc], acc + 1} end,
fn _ -> Process.put(:stream_zip, :done) end)
assert Stream.zip([:a, :b, :c, :d, :e], stream) |> Enum.take(3) ==
[a: 1, b: 2, c: 3]
assert Process.get(:stream_zip) == :done
end
test "zip/2 closes on inner error" do
stream = Stream.into([1, 2, 3], %PDict{})
stream = Stream.zip(stream, Stream.map([:a, :b, :c], fn _ -> throw(:error) end))
Process.put(:stream_done, false)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_done)
end
test "zip/2 closes on outer error" do
stream = Stream.into([1, 2, 3], %PDict{})
|> Stream.zip([:a, :b, :c])
|> Stream.map(fn _ -> throw(:error) end)
Process.put(:stream_done, false)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_done)
end
test "with_index/2" do
stream = Stream.with_index([1, 2, 3])
assert is_lazy(stream)
assert Enum.to_list(stream) == [{1, 0}, {2, 1}, {3, 2}]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.with_index(nats) |> Enum.take(3) == [{1, 0}, {2, 1}, {3, 2}]
end
defp is_lazy(stream) do
match?(%Stream{}, stream) or is_function(stream, 2)
end
defp inbox_stream({:suspend, acc}, f) do
{:suspended, acc, &inbox_stream(&1, f)}
end
defp inbox_stream({:halt, acc}, _f) do
{:halted, acc}
end
defp inbox_stream({:cont, acc}, f) do
receive do
{:stream, item} ->
inbox_stream(f.(item, acc), f)
end
end
end
| 34.544419 | 108 | 0.546818 |
e848baeff9da8d53e24127835806fe560364e22e | 446 | exs | Elixir | priv/repo/migrations/20180916231415_create_answers.exs | the-mikedavis/mole | 73d884b5dca4e5371b1b399d7e65c0f4a0229851 | [
"BSD-3-Clause"
] | 1 | 2020-07-15T14:39:10.000Z | 2020-07-15T14:39:10.000Z | priv/repo/migrations/20180916231415_create_answers.exs | the-mikedavis/mole | 73d884b5dca4e5371b1b399d7e65c0f4a0229851 | [
"BSD-3-Clause"
] | 59 | 2018-11-05T23:09:10.000Z | 2020-07-11T20:44:14.000Z | priv/repo/migrations/20180916231415_create_answers.exs | the-mikedavis/mole | 73d884b5dca4e5371b1b399d7e65c0f4a0229851 | [
"BSD-3-Clause"
] | null | null | null | defmodule Mole.Repo.Migrations.CreateAnswers do
use Ecto.Migration
def change do
create table(:answers) do
add :correct, :boolean, default: false, null: false
add :user_id, references(:users, on_delete: :nothing)
add :image_id, references(:images, on_delete: :nothing)
add :time_spent, :integer
timestamps()
end
create index(:answers, [:user_id])
create index(:answers, [:image_id])
end
end
| 24.777778 | 61 | 0.674888 |
e848be335027144f336a4e55cef83ea75a041ebc | 840 | ex | Elixir | lib/changelog/transcripts/parser.ex | wojtekmach/changelog.com | d4a8a7703c5f07a3da63bffd770f4642488cf8fd | [
"MIT"
] | 1 | 2020-05-20T16:58:17.000Z | 2020-05-20T16:58:17.000Z | lib/changelog/transcripts/parser.ex | type1fool/changelog.com | fbec3528cc3f5adfdc75b008bb92b17efc4f248f | [
"MIT"
] | null | null | null | lib/changelog/transcripts/parser.ex | type1fool/changelog.com | fbec3528cc3f5adfdc75b008bb92b17efc4f248f | [
"MIT"
] | null | null | null | defmodule Changelog.Transcripts.Parser do
@speaker_regex ~r{\*\*(.*?):\*\*}
def parse_text(string, participants \\ [])
def parse_text(string, participants) when is_nil(string), do: parse_text("", participants)
def parse_text(string, participants) do
@speaker_regex
|> Regex.split(string, include_captures: true, trim: true)
|> Enum.chunk_every(2)
|> Enum.map(fn(tuple) ->
[speaker_section, content_section] = tuple
speaker_name = case Regex.run(@speaker_regex, speaker_section) do
[_, name] -> name
nil -> "Unknown"
end
speaker_id = Enum.find_value(participants, fn(x) -> if x.name == speaker_name do x.id end end)
%{"title" => speaker_name,
"person_id" => speaker_id,
"body" => String.trim(content_section)}
end)
|> List.flatten()
end
end
| 31.111111 | 100 | 0.640476 |
e848bfae9bf9254533d11466d911754243476f4d | 580 | exs | Elixir | mix.exs | yudistrange/diffrential | 251b3e5fdb61bedcd4160ce1f0e207adb5485644 | [
"MIT"
] | null | null | null | mix.exs | yudistrange/diffrential | 251b3e5fdb61bedcd4160ce1f0e207adb5485644 | [
"MIT"
] | 3 | 2021-07-26T17:53:32.000Z | 2021-07-26T18:05:56.000Z | mix.exs | yudistrange/diffrential | 251b3e5fdb61bedcd4160ce1f0e207adb5485644 | [
"MIT"
] | null | null | null | defmodule Diffrential.MixProject do
use Mix.Project
def project do
[
app: :diffrential,
version: "0.1.0",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 20 | 87 | 0.582759 |
e848c0e0ffa778d85b47906e2c1c694bd3549870 | 2,517 | exs | Elixir | tests/dummy/test/autox/plugs/broadcast_session_plug_test.exs | foxnewsnetwork/autox | 66ea3f0f7ba8b3f9e910984a2ed3cdf0ef5ef29a | [
"MIT"
] | null | null | null | tests/dummy/test/autox/plugs/broadcast_session_plug_test.exs | foxnewsnetwork/autox | 66ea3f0f7ba8b3f9e910984a2ed3cdf0ef5ef29a | [
"MIT"
] | 20 | 2016-04-05T06:28:58.000Z | 2016-05-12T15:45:37.000Z | tests/dummy/test/autox/plugs/broadcast_session_plug_test.exs | foxnewsnetwork/autox | 66ea3f0f7ba8b3f9e910984a2ed3cdf0ef5ef29a | [
"MIT"
] | null | null | null | defmodule Dummy.BroadcastSessionPlugTest do
use Dummy.ConnChanCase
alias Autox.SessionUtils
alias Dummy.UserSocket
import Dummy.SeedSupport
setup do
user = build_user
owner = build_owner
relationships = %{
"owner" => %{
"data" => %{
"id" => owner.id,
"type" => "owners"
}
}
}
conn = conn()
|> post("/api/sessions", %{
"data" => %{
"type" => "sessions",
"attributes" => session_attributes,
"relationships" => relationships
}
})
{:ok, socket} = UserSocket |> connect(%{"user_id" => user.id})
topic = "users:#{user.id}"
{:ok, socket: socket, user: user, conn: conn, topic: topic}
end
test "creating tacos should trigger broadcast", %{conn: conn, topic: topic} do
assert conn |> SessionUtils.logged_in?
@endpoint.subscribe(self(), topic)
path = conn |> taco_path(:create)
data = %{"type" => "tacos", "attributes" => taco_attributes}
conn
|> post(path, %{"data" => data})
|> json_response(201)
|> assert
assert_broadcast "update", %{data: data, meta: _, links: _}
assert %{id: _, links: _, type: :tacos, attributes: attributes, relationships: relationships} = data
assert %{
name: "al pastor",
calories: 9000
} = attributes
assert %{
shops: %{
links: _
}
} = relationships
end
test "deleting tacos should trigger broadcast", %{conn: conn, topic: topic} do
assert conn |> SessionUtils.logged_in?
@endpoint.subscribe(self(), topic)
taco = build_taco
path = conn |> taco_path(:delete, taco.id)
conn
|> delete(path, %{})
|> response(204)
|> assert
assert_broadcast "destroy", %{data: data, meta: _, links: _}
assert %{id: _, links: _, type: :tacos, attributes: attributes, relationships: relationships} = data
assert %{
name: "al pastor",
calories: 9000
} = attributes
assert %{
shops: %{
links: _
}
} = relationships
end
test "creating taco relationships should not fuck it all up", %{conn: conn, topic: topic} do
@endpoint.subscribe self, topic
shop = build_shop
taco = build_taco
path = conn |> taco_shop_relationship_path(:create, taco.id)
data = %{"type" => "shops", "id" => shop.id}
conn
|> post(path, %{"data" => data})
|> response(204)
|> assert
taco_id = taco.id
assert_broadcast "refresh", %{id: ^taco_id, type: :tacos}
end
end | 27.064516 | 108 | 0.582439 |
e848cea7528c497e7c38aa5198ea65f3b3c40a82 | 2,361 | ex | Elixir | lib/exqlite/sqlite3_nif.ex | LostKobrakai/exqlite | ab64f59d2f88155a19027d18f63ed7ad07b8d2f4 | [
"MIT"
] | null | null | null | lib/exqlite/sqlite3_nif.ex | LostKobrakai/exqlite | ab64f59d2f88155a19027d18f63ed7ad07b8d2f4 | [
"MIT"
] | null | null | null | lib/exqlite/sqlite3_nif.ex | LostKobrakai/exqlite | ab64f59d2f88155a19027d18f63ed7ad07b8d2f4 | [
"MIT"
] | null | null | null | defmodule Exqlite.Sqlite3NIF do
@moduledoc """
This is the module where all of the NIF entry points reside. Calling this directly
should be avoided unless you are aware of what you are doing.
"""
@on_load :load_nifs
@type db() :: reference()
@type statement() :: reference()
@type reason() :: :atom | String.Chars.t()
def load_nifs() do
path = :filename.join(:code.priv_dir(:exqlite), 'sqlite3_nif')
:erlang.load_nif(path, 0)
end
@spec open(String.Chars.t()) :: {:ok, db()} | {:error, reason()}
def open(_path), do: :erlang.nif_error(:not_loaded)
@spec close(db()) :: :ok | {:error, reason()}
def close(_conn), do: :erlang.nif_error(:not_loaded)
@spec execute(db(), String.Chars.t()) :: :ok | {:error, reason()}
def execute(_conn, _sql), do: :erlang.nif_error(:not_loaded)
@spec changes(db()) :: {:ok, integer()}
def changes(_conn), do: :erlang.nif_error(:not_loaded)
@spec prepare(db(), String.Chars.t()) :: {:ok, statement()} | {:error, reason()}
def prepare(_conn, _sql), do: :erlang.nif_error(:not_loaded)
@spec bind(db(), statement(), []) ::
:ok | {:error, reason()} | {:error, {atom(), any()}}
def bind(_conn, _statement, _args), do: :erlang.nif_error(:not_loaded)
@spec step(db(), statement()) :: :done | :busy | {:row, []}
def step(_conn, _statement), do: :erlang.nif_error(:not_loaded)
@spec multi_step(db(), statement(), integer()) ::
:busy | {:rows, [[]]} | {:done, [[]]}
def multi_step(_conn, _statement, _chunk_size), do: :erlang.nif_error(:not_loaded)
@spec columns(db(), statement()) :: {:ok, []} | {:error, reason()}
def columns(_conn, _statement), do: :erlang.nif_error(:not_loaded)
@spec last_insert_rowid(db()) :: {:ok, integer()}
def last_insert_rowid(_conn), do: :erlang.nif_error(:not_loaded)
@spec transaction_status(db()) :: {:ok, :idle | :transaction}
def transaction_status(_conn), do: :erlang.nif_error(:not_loaded)
@spec serialize(db(), String.Chars.t()) :: {:ok, binary()} | {:error, reason()}
def serialize(_conn, _database), do: :erlang.nif_error(:not_loaded)
@spec deserialize(db(), String.Chars.t(), binary()) :: :ok | {:error, reason()}
def deserialize(_conn, _database, _serialized), do: :erlang.nif_error(:not_loaded)
# TODO: add statement inspection tooling https://sqlite.org/c3ref/expanded_sql.html
end
| 38.704918 | 85 | 0.647607 |
e848e553d4ec798cdefe730e5b65c367ad103f78 | 2,919 | ex | Elixir | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/revision.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/revision.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/revision.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.CloudRun.V1alpha1.Model.Revision do
@moduledoc """
Revision is an immutable snapshot of code and configuration. A revision
references a container image. Revisions are created by updates to a
Configuration.
Cloud Run does not currently support referencing a build that is responsible
for materializing the container image from source.
See also:
https://github.com/knative/serving/blob/master/docs/spec/overview.md#revision
## Attributes
* `apiVersion` (*type:* `String.t`, *default:* `nil`) - The API version for this call such as "serving.knative.dev/v1alpha1".
* `kind` (*type:* `String.t`, *default:* `nil`) - The kind of this resource, in this case "Revision".
* `metadata` (*type:* `GoogleApi.CloudRun.V1alpha1.Model.ObjectMeta.t`, *default:* `nil`) - Metadata associated with this Revision, including name, namespace, labels,
and annotations.
* `spec` (*type:* `GoogleApi.CloudRun.V1alpha1.Model.RevisionSpec.t`, *default:* `nil`) - Spec holds the desired state of the Revision (from the client).
* `status` (*type:* `GoogleApi.CloudRun.V1alpha1.Model.RevisionStatus.t`, *default:* `nil`) - Status communicates the observed state of the Revision (from the
controller).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:apiVersion => String.t(),
:kind => String.t(),
:metadata => GoogleApi.CloudRun.V1alpha1.Model.ObjectMeta.t(),
:spec => GoogleApi.CloudRun.V1alpha1.Model.RevisionSpec.t(),
:status => GoogleApi.CloudRun.V1alpha1.Model.RevisionStatus.t()
}
field(:apiVersion)
field(:kind)
field(:metadata, as: GoogleApi.CloudRun.V1alpha1.Model.ObjectMeta)
field(:spec, as: GoogleApi.CloudRun.V1alpha1.Model.RevisionSpec)
field(:status, as: GoogleApi.CloudRun.V1alpha1.Model.RevisionStatus)
end
defimpl Poison.Decoder, for: GoogleApi.CloudRun.V1alpha1.Model.Revision do
def decode(value, options) do
GoogleApi.CloudRun.V1alpha1.Model.Revision.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudRun.V1alpha1.Model.Revision do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.304348 | 170 | 0.729017 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.