hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1ca4d09feddb1157a318baa67ceabfd600cf605b | 2,421 | exs | Elixir | integration_test/selenium/capabilities_test.exs | JonRowe/wallaby | 00a7b4efdfecb374e4827fa106503e689a4dd967 | [
"MIT"
] | null | null | null | integration_test/selenium/capabilities_test.exs | JonRowe/wallaby | 00a7b4efdfecb374e4827fa106503e689a4dd967 | [
"MIT"
] | null | null | null | integration_test/selenium/capabilities_test.exs | JonRowe/wallaby | 00a7b4efdfecb374e4827fa106503e689a4dd967 | [
"MIT"
] | null | null | null | defmodule Wallaby.Integration.CapabilitiesTest do
use ExUnit.Case
use Wallaby.DSL
alias Wallaby.Integration.SessionCase
alias Wallaby.Experimental.Selenium.WebdriverClient
setup do
on_exit(fn ->
Application.delete_env(:wallaby, :selenium)
end)
end
describe "capabilities" do
test "reads default capabilities" do
expected_capabilities = %{
javascriptEnabled: true,
browserName: "firefox",
"moz:firefoxOptions": %{
args: ["-headless"]
}
}
create_session_fn = fn url, capabilities ->
assert capabilities == expected_capabilities
WebdriverClient.create_session(url, capabilities)
end
{:ok, session} = SessionCase.start_test_session(create_session_fn: create_session_fn)
session
|> visit("page_1.html")
|> assert_has(Query.text("Page 1"))
assert :ok = Wallaby.end_session(session)
end
test "reads capabilities from application config" do
expected_capabilities = %{
browserName: "firefox",
"moz:firefoxOptions": %{
args: ["-headless"]
}
}
Application.put_env(:wallaby, :selenium, capabilities: expected_capabilities)
create_session_fn = fn url, capabilities ->
assert capabilities == expected_capabilities
WebdriverClient.create_session(url, capabilities)
end
{:ok, session} = SessionCase.start_test_session(create_session_fn: create_session_fn)
session
|> visit("page_1.html")
|> assert_has(Query.text("Page 1"))
assert :ok = Wallaby.end_session(session)
end
test "reads capabilities from opts when also using application config" do
Application.put_env(:wallaby, :selenium, capabilities: %{})
expected_capabilities = %{
browserName: "firefox",
"moz:firefoxOptions": %{
args: ["-headless"]
}
}
create_session_fn = fn url, capabilities ->
assert capabilities == expected_capabilities
WebdriverClient.create_session(url, capabilities)
end
{:ok, session} =
SessionCase.start_test_session(
capabilities: expected_capabilities,
create_session_fn: create_session_fn
)
session
|> visit("page_1.html")
|> assert_has(Query.text("Page 1"))
assert :ok = Wallaby.end_session(session)
end
end
end
| 26.032258 | 91 | 0.646014 |
1ca4dc379381f9ebe3981ba89843e26cdc976b6d | 663 | ex | Elixir | apps/rig/lib/rig/event_stream/kinesis_to_filter.ex | arana3/reactive-interaction-gateway | 793648bcc5b8b05fc53df1f5f97818fb40ca84be | [
"Apache-2.0"
] | null | null | null | apps/rig/lib/rig/event_stream/kinesis_to_filter.ex | arana3/reactive-interaction-gateway | 793648bcc5b8b05fc53df1f5f97818fb40ca84be | [
"Apache-2.0"
] | 132 | 2018-11-26T14:00:54.000Z | 2022-03-11T04:17:54.000Z | apps/rig/lib/rig/event_stream/kinesis_to_filter.ex | arana3/reactive-interaction-gateway | 793648bcc5b8b05fc53df1f5f97818fb40ca84be | [
"Apache-2.0"
] | 1 | 2020-07-17T05:17:32.000Z | 2020-07-17T05:17:32.000Z | defmodule Rig.EventStream.KinesisToFilter do
@moduledoc """
Consumes events and forwards them to the event filter by event type.
"""
alias Rig.EventFilter
alias RigCloudEvents.CloudEvent
require Logger
# ---
def validate(conf), do: {:ok, conf}
# ---
def kinesis_handler(message) do
case CloudEvent.parse(message) do
{:ok, %CloudEvent{} = cloud_event} ->
Logger.debug(fn -> inspect(cloud_event.parsed) end)
EventFilter.forward_event(cloud_event)
:ok
{:error, :parse_error} ->
{:error, :non_cloud_events_not_supported, message}
end
rescue
err -> {:error, err, message}
end
end
| 20.71875 | 70 | 0.656109 |
1ca4fc8eb66caf74f5a843f55ad739795a519fb9 | 926 | ex | Elixir | apps/discovery_streams/test/support/conn_case.ex | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 26 | 2019-09-20T23:54:45.000Z | 2020-08-20T14:23:32.000Z | apps/discovery_streams/test/support/conn_case.ex | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 757 | 2019-08-15T18:15:07.000Z | 2020-09-18T20:55:31.000Z | apps/discovery_streams/test/support/conn_case.ex | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 9 | 2019-11-12T16:43:46.000Z | 2020-03-25T16:23:16.000Z | defmodule DiscoveryStreamsWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import DiscoveryStreamsWeb.Router.Helpers
# The default endpoint for testing
@endpoint DiscoveryStreamsWeb.Endpoint
end
end
setup _tags do
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 27.235294 | 58 | 0.736501 |
1ca50550f0cf04710c26a325fd4292d48c4c5d08 | 30,927 | ex | Elixir | lib/elixir/lib/list.ex | clambodile/elixir | 0a058028c2f674b0776dd6484f74c80a94673146 | [
"Apache-2.0"
] | 1 | 2021-07-11T16:52:47.000Z | 2021-07-11T16:52:47.000Z | lib/elixir/lib/list.ex | clambodile/elixir | 0a058028c2f674b0776dd6484f74c80a94673146 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/list.ex | clambodile/elixir | 0a058028c2f674b0776dd6484f74c80a94673146 | [
"Apache-2.0"
] | null | null | null | defmodule List do
@moduledoc """
Functions that work on (linked) lists.
Many of the functions provided for lists, which implement
the `Enumerable` protocol, are found in the `Enum` module.
Additionally, the following functions and operators for lists are
found in `Kernel`:
* `++/2`
* `--/2`
* `hd/1`
* `tl/1`
* `in/2`
* `length/1`
Lists in Elixir are specified between square brackets:
iex> [1, "two", 3, :four]
[1, "two", 3, :four]
Two lists can be concatenated and subtracted using the
`Kernel.++/2` and `Kernel.--/2` operators:
iex> [1, 2, 3] ++ [4, 5, 6]
[1, 2, 3, 4, 5, 6]
iex> [1, true, 2, false, 3, true] -- [true, false]
[1, 2, 3, true]
Lists in Elixir are effectively linked lists, which means
they are internally represented in pairs containing the
head and the tail of a list:
iex> [head | tail] = [1, 2, 3]
iex> head
1
iex> tail
[2, 3]
Similarly, we could write the list `[1, 2, 3]` using only
such pairs (called cons cells):
iex> [1 | [2 | [3 | []]]]
[1, 2, 3]
Some lists, called improper lists, do not have an empty list as
the second element in the last cons cell:
iex> [1 | [2 | [3 | 4]]]
[1, 2, 3 | 4]
Although improper lists are generally avoided, they are used in some
special circumstances like iodata and chardata entities (see the `IO` module).
Due to their cons cell based representation, prepending an element
to a list is always fast (constant time), while appending becomes
slower as the list grows in size (linear time):
iex> list = [1, 2, 3]
iex> [0 | list] # fast
[0, 1, 2, 3]
iex> list ++ [4] # slow
[1, 2, 3, 4]
Additionally, getting a list's length and accessing it by index are
linear time operations. Negative indexes are also supported but
they imply the list will be iterated twice, once to calculate the
proper index and another time to perform the operation.
## Charlists
If a list is made of non-negative integers, it can also be called
a charlist. Elixir uses single quotes to define charlists:
iex> 'héllo'
[104, 233, 108, 108, 111]
In particular, charlists may be printed back in single
quotes if they contain only ASCII-printable codepoints:
iex> 'abc'
'abc'
The rationale behind this behaviour is to better support
Erlang libraries which may return text as charlists
instead of Elixir strings. One example of such functions
is `Application.loaded_applications/0`:
Application.loaded_applications()
#=> [
#=> {:stdlib, 'ERTS CXC 138 10', '2.6'},
#=> {:compiler, 'ERTS CXC 138 10', '6.0.1'},
#=> {:elixir, 'elixir', '1.0.0'},
#=> {:kernel, 'ERTS CXC 138 10', '4.1'},
#=> {:logger, 'logger', '1.0.0'}
#=> ]
A list can be checked if it is made of printable ASCII
codepoints with `ascii_printable?/2`.
"""
@compile :inline_list_funcs
@doc """
Deletes the given `item` from the `list`. Returns a new list without
the item.
If the `item` occurs more than once in the `list`, just
the first occurrence is removed.
## Examples
iex> List.delete([:a, :b, :c], :a)
[:b, :c]
iex> List.delete([:a, :b, :b, :c], :b)
[:a, :b, :c]
"""
@spec delete(list, any) :: list
def delete(list, item)
def delete([item | list], item), do: list
def delete([other | list], item), do: [other | delete(list, item)]
def delete([], _item), do: []
@doc """
Duplicates the given element `n` times in a list.
## Examples
iex> List.duplicate("hello", 3)
["hello", "hello", "hello"]
iex> List.duplicate([1, 2], 2)
[[1, 2], [1, 2]]
"""
@spec duplicate(elem, non_neg_integer) :: [elem] when elem: var
def duplicate(elem, n) do
:lists.duplicate(n, elem)
end
@doc """
Flattens the given `list` of nested lists.
## Examples
iex> List.flatten([1, [[2], 3]])
[1, 2, 3]
"""
@spec flatten(deep_list) :: list when deep_list: [any | deep_list]
def flatten(list) do
:lists.flatten(list)
end
@doc """
Flattens the given `list` of nested lists.
The list `tail` will be added at the end of
the flattened list.
## Examples
iex> List.flatten([1, [[2], 3]], [4, 5])
[1, 2, 3, 4, 5]
"""
@spec flatten(deep_list, [elem]) :: [elem] when elem: var, deep_list: [elem | deep_list]
def flatten(list, tail) do
:lists.flatten(list, tail)
end
@doc """
Folds (reduces) the given list from the left with
a function. Requires an accumulator.
## Examples
iex> List.foldl([5, 5], 10, fn x, acc -> x + acc end)
20
iex> List.foldl([1, 2, 3, 4], 0, fn x, acc -> x - acc end)
2
"""
@spec foldl([elem], acc, (elem, acc -> acc)) :: acc when elem: var, acc: var
def foldl(list, acc, fun) when is_list(list) and is_function(fun) do
:lists.foldl(fun, acc, list)
end
@doc """
Folds (reduces) the given list from the right with
a function. Requires an accumulator.
## Examples
iex> List.foldr([1, 2, 3, 4], 0, fn x, acc -> x - acc end)
-2
"""
@spec foldr([elem], acc, (elem, acc -> acc)) :: acc when elem: var, acc: var
def foldr(list, acc, fun) when is_list(list) and is_function(fun) do
:lists.foldr(fun, acc, list)
end
@doc """
Returns the first element in `list` or `nil` if `list` is empty.
## Examples
iex> List.first([])
nil
iex> List.first([1])
1
iex> List.first([1, 2, 3])
1
"""
@spec first([elem]) :: nil | elem when elem: var
def first([]), do: nil
def first([head | _]), do: head
@doc """
Returns the last element in `list` or `nil` if `list` is empty.
## Examples
iex> List.last([])
nil
iex> List.last([1])
1
iex> List.last([1, 2, 3])
3
"""
@spec last([elem]) :: nil | elem when elem: var
def last([]), do: nil
def last([head]), do: head
def last([_ | tail]), do: last(tail)
@doc """
Receives a list of tuples and returns the first tuple
where the item at `position` in the tuple matches the
given `key`.
## Examples
iex> List.keyfind([a: 1, b: 2], :a, 0)
{:a, 1}
iex> List.keyfind([a: 1, b: 2], 2, 1)
{:b, 2}
iex> List.keyfind([a: 1, b: 2], :c, 0)
nil
"""
@spec keyfind([tuple], any, non_neg_integer, any) :: any
def keyfind(list, key, position, default \\ nil) do
:lists.keyfind(key, position + 1, list) || default
end
@doc """
Receives a list of tuples and returns `true` if there is
a tuple where the item at `position` in the tuple matches
the given `key`.
## Examples
iex> List.keymember?([a: 1, b: 2], :a, 0)
true
iex> List.keymember?([a: 1, b: 2], 2, 1)
true
iex> List.keymember?([a: 1, b: 2], :c, 0)
false
"""
@spec keymember?([tuple], any, non_neg_integer) :: boolean
def keymember?(list, key, position) do
:lists.keymember(key, position + 1, list)
end
@doc """
Receives a list of tuples and if the identified item by `key` at `position`
exists, it is replaced with `new_tuple`.
## Examples
iex> List.keyreplace([a: 1, b: 2], :a, 0, {:a, 3})
[a: 3, b: 2]
iex> List.keyreplace([a: 1, b: 2], :a, 1, {:a, 3})
[a: 1, b: 2]
"""
@spec keyreplace([tuple], any, non_neg_integer, tuple) :: [tuple]
def keyreplace(list, key, position, new_tuple) do
:lists.keyreplace(key, position + 1, list, new_tuple)
end
@doc """
Receives a list of tuples and sorts the items
at `position` of the tuples. The sort is stable.
## Examples
iex> List.keysort([a: 5, b: 1, c: 3], 1)
[b: 1, c: 3, a: 5]
iex> List.keysort([a: 5, c: 1, b: 3], 0)
[a: 5, b: 3, c: 1]
"""
@spec keysort([tuple], non_neg_integer) :: [tuple]
def keysort(list, position) do
:lists.keysort(position + 1, list)
end
@doc """
Receives a `list` of tuples and replaces the item
identified by `key` at `position` with `new_tuple`.
If the item does not exist, it is added to the end of the `list`.
## Examples
iex> List.keystore([a: 1, b: 2], :a, 0, {:a, 3})
[a: 3, b: 2]
iex> List.keystore([a: 1, b: 2], :c, 0, {:c, 3})
[a: 1, b: 2, c: 3]
"""
@spec keystore([tuple], any, non_neg_integer, tuple) :: [tuple, ...]
def keystore(list, key, position, new_tuple) do
:lists.keystore(key, position + 1, list, new_tuple)
end
@doc """
Receives a `list` of tuples and deletes the first tuple
where the item at `position` matches the
given `key`. Returns the new list.
## Examples
iex> List.keydelete([a: 1, b: 2], :a, 0)
[b: 2]
iex> List.keydelete([a: 1, b: 2], 2, 1)
[a: 1]
iex> List.keydelete([a: 1, b: 2], :c, 0)
[a: 1, b: 2]
"""
@spec keydelete([tuple], any, non_neg_integer) :: [tuple]
def keydelete(list, key, position) do
:lists.keydelete(key, position + 1, list)
end
@doc """
Receives a `list` of tuples and returns the first tuple
where the element at `position` in the tuple matches the
given `key`, as well as the `list` without found tuple.
If such a tuple is not found, `nil` will be returned.
## Examples
iex> List.keytake([a: 1, b: 2], :a, 0)
{{:a, 1}, [b: 2]}
iex> List.keytake([a: 1, b: 2], 2, 1)
{{:b, 2}, [a: 1]}
iex> List.keytake([a: 1, b: 2], :c, 0)
nil
"""
@spec keytake([tuple], any, non_neg_integer) :: {tuple, [tuple]} | nil
def keytake(list, key, position) do
case :lists.keytake(key, position + 1, list) do
{:value, item, list} -> {item, list}
false -> nil
end
end
@doc """
Wraps `term` in a list if this is not list.
If `term` is already a list, it returns the list.
If `term` is `nil`, it returns an empty list.
## Examples
iex> List.wrap("hello")
["hello"]
iex> List.wrap([1, 2, 3])
[1, 2, 3]
iex> List.wrap(nil)
[]
"""
@spec wrap(nil) :: []
@spec wrap(list) :: list when list: maybe_improper_list()
@spec wrap(term) :: nonempty_list(term) when term: any()
def wrap(term)
def wrap(list) when is_list(list) do
list
end
def wrap(nil) do
[]
end
def wrap(other) do
[other]
end
@doc """
Zips corresponding elements from each list in `list_of_lists`.
The zipping finishes as soon as any list terminates.
## Examples
iex> List.zip([[1, 2], [3, 4], [5, 6]])
[{1, 3, 5}, {2, 4, 6}]
iex> List.zip([[1, 2], [3], [5, 6]])
[{1, 3, 5}]
"""
@spec zip([list]) :: [tuple]
def zip([]), do: []
def zip(list_of_lists) when is_list(list_of_lists) do
do_zip(list_of_lists, [])
end
@doc ~S"""
Checks if `list` is a charlist made only of printable ASCII characters.
Takes an optional `limit` as a second argument. `ascii_printable?/2` only
checks the printability of the list up to the `limit`.
A printable charlist in Elixir contains only the printable characters in the
standard seven-bit ASCII character encoding, which are characters ranging from
32 to 126 in decimal notation, plus the following control characters:
* `?\a` - Bell
* `?\b` - Backspace
* `?\t` - Horizontal tab
* `?\n` - Line feed
* `?\v` - Vertical tab
* `?\f` - Form feed
* `?\r` - Carriage return
* `?\e` - Escape
For more information read the [Character groups](https://en.wikipedia.org/wiki/ASCII#Character_groups)
section in the Wikipedia article of the [ASCII](https://en.wikipedia.org/wiki/ASCII) standard.
## Examples
iex> List.ascii_printable?('abc')
true
iex> List.ascii_printable?('abc' ++ [0])
false
iex> List.ascii_printable?('abc' ++ [0], 2)
true
Improper lists are not printable, even if made only of ASCII characters:
iex> List.ascii_printable?('abc' ++ ?d)
false
"""
@doc since: "1.6.0"
@spec ascii_printable?(list, limit) :: boolean
when limit: :infinity | non_neg_integer
def ascii_printable?(list, limit \\ :infinity)
when is_list(list) and (limit == :infinity or (is_integer(limit) and limit >= 0)) do
ascii_printable_guarded?(list, limit)
end
defp ascii_printable_guarded?(_, 0) do
true
end
defp ascii_printable_guarded?([char | rest], counter)
when is_integer(char) and char >= 32 and char <= 126 do
ascii_printable_guarded?(rest, decrement(counter))
end
defp ascii_printable_guarded?([?\n | rest], counter) do
ascii_printable_guarded?(rest, decrement(counter))
end
defp ascii_printable_guarded?([?\r | rest], counter) do
ascii_printable_guarded?(rest, decrement(counter))
end
defp ascii_printable_guarded?([?\t | rest], counter) do
ascii_printable_guarded?(rest, decrement(counter))
end
defp ascii_printable_guarded?([?\v | rest], counter) do
ascii_printable_guarded?(rest, decrement(counter))
end
defp ascii_printable_guarded?([?\b | rest], counter) do
ascii_printable_guarded?(rest, decrement(counter))
end
defp ascii_printable_guarded?([?\f | rest], counter) do
ascii_printable_guarded?(rest, decrement(counter))
end
defp ascii_printable_guarded?([?\e | rest], counter) do
ascii_printable_guarded?(rest, decrement(counter))
end
defp ascii_printable_guarded?([?\a | rest], counter) do
ascii_printable_guarded?(rest, decrement(counter))
end
defp ascii_printable_guarded?([], _counter), do: true
defp ascii_printable_guarded?(_, _counter), do: false
@compile {:inline, decrement: 1}
defp decrement(:infinity), do: :infinity
defp decrement(counter), do: counter - 1
@doc """
Returns `true` if `list` is an improper list. Otherwise returns `false`.
## Examples
iex> List.improper?([1, 2 | 3])
true
iex> List.improper?([1, 2, 3])
false
"""
@doc since: "1.8.0"
@spec improper?(maybe_improper_list) :: boolean
def improper?(list) when is_list(list) and length(list) >= 0, do: false
def improper?(list) when is_list(list), do: true
@doc """
Returns a list with `value` inserted at the specified `index`.
Note that `index` is capped at the list length. Negative indices
indicate an offset from the end of the `list`.
## Examples
iex> List.insert_at([1, 2, 3, 4], 2, 0)
[1, 2, 0, 3, 4]
iex> List.insert_at([1, 2, 3], 10, 0)
[1, 2, 3, 0]
iex> List.insert_at([1, 2, 3], -1, 0)
[1, 2, 3, 0]
iex> List.insert_at([1, 2, 3], -10, 0)
[0, 1, 2, 3]
"""
@spec insert_at(list, integer, any) :: list
def insert_at(list, index, value) when is_list(list) and is_integer(index) do
case index do
-1 ->
list ++ [value]
_ when index < 0 ->
case length(list) + index + 1 do
index when index < 0 -> [value | list]
index -> do_insert_at(list, index, value)
end
_ ->
do_insert_at(list, index, value)
end
end
@doc """
Returns a list with a replaced value at the specified `index`.
Negative indices indicate an offset from the end of the `list`.
If `index` is out of bounds, the original `list` is returned.
## Examples
iex> List.replace_at([1, 2, 3], 0, 0)
[0, 2, 3]
iex> List.replace_at([1, 2, 3], 10, 0)
[1, 2, 3]
iex> List.replace_at([1, 2, 3], -1, 0)
[1, 2, 0]
iex> List.replace_at([1, 2, 3], -10, 0)
[1, 2, 3]
"""
@spec replace_at(list, integer, any) :: list
def replace_at(list, index, value) when is_list(list) and is_integer(index) do
if index < 0 do
case length(list) + index do
index when index < 0 -> list
index -> do_replace_at(list, index, value)
end
else
do_replace_at(list, index, value)
end
end
@doc """
Returns a list with an updated value at the specified `index`.
Negative indices indicate an offset from the end of the `list`.
If `index` is out of bounds, the original `list` is returned.
## Examples
iex> List.update_at([1, 2, 3], 0, &(&1 + 10))
[11, 2, 3]
iex> List.update_at([1, 2, 3], 10, &(&1 + 10))
[1, 2, 3]
iex> List.update_at([1, 2, 3], -1, &(&1 + 10))
[1, 2, 13]
iex> List.update_at([1, 2, 3], -10, &(&1 + 10))
[1, 2, 3]
"""
@spec update_at([elem], integer, (elem -> any)) :: list when elem: var
def update_at(list, index, fun) when is_list(list) and is_function(fun) and is_integer(index) do
if index < 0 do
case length(list) + index do
index when index < 0 -> list
index -> do_update_at(list, index, fun)
end
else
do_update_at(list, index, fun)
end
end
@doc """
Produces a new list by removing the value at the specified `index`.
Negative indices indicate an offset from the end of the `list`.
If `index` is out of bounds, the original `list` is returned.
## Examples
iex> List.delete_at([1, 2, 3], 0)
[2, 3]
iex> List.delete_at([1, 2, 3], 10)
[1, 2, 3]
iex> List.delete_at([1, 2, 3], -1)
[1, 2]
"""
@spec delete_at(list, integer) :: list
def delete_at(list, index) when is_integer(index) do
elem(pop_at(list, index), 1)
end
@doc """
Returns and removes the value at the specified `index` in the `list`.
Negative indices indicate an offset from the end of the `list`.
If `index` is out of bounds, the original `list` is returned.
## Examples
iex> List.pop_at([1, 2, 3], 0)
{1, [2, 3]}
iex> List.pop_at([1, 2, 3], 5)
{nil, [1, 2, 3]}
iex> List.pop_at([1, 2, 3], 5, 10)
{10, [1, 2, 3]}
iex> List.pop_at([1, 2, 3], -1)
{3, [1, 2]}
"""
@doc since: "1.4.0"
@spec pop_at(list, integer, any) :: {any, list}
def pop_at(list, index, default \\ nil) when is_integer(index) do
if index < 0 do
do_pop_at(list, length(list) + index, default, [])
else
do_pop_at(list, index, default, [])
end
end
@doc """
Returns `true` if `list` starts with the given `prefix` list; otherwise returns `false`.
If `prefix` is an empty list, it returns `true`.
### Examples
iex> List.starts_with?([1, 2, 3], [1, 2])
true
iex> List.starts_with?([1, 2], [1, 2, 3])
false
iex> List.starts_with?([:alpha], [])
true
iex> List.starts_with?([], [:alpha])
false
"""
@doc since: "1.5.0"
@spec starts_with?(list, list) :: boolean
@spec starts_with?(list, []) :: true
@spec starts_with?([], nonempty_list) :: false
def starts_with?(list, prefix)
def starts_with?([head | tail], [head | prefix_tail]), do: starts_with?(tail, prefix_tail)
def starts_with?(list, []) when is_list(list), do: true
def starts_with?(list, [_ | _]) when is_list(list), do: false
@doc """
Converts a charlist to an atom.
Elixir supports conversions from charlists which contains any Unicode
codepoint.
Inlined by the compiler.
## Examples
iex> List.to_atom('Elixir')
:Elixir
iex> List.to_atom('🌢 Elixir')
:"🌢 Elixir"
"""
@spec to_atom(charlist) :: atom
def to_atom(charlist) do
:erlang.list_to_atom(charlist)
end
@doc """
Converts a charlist to an existing atom. Raises an `ArgumentError`
if the atom does not exist.
Elixir supports conversions from charlists which contains any Unicode
codepoint.
Inlined by the compiler.
## Examples
iex> _ = :my_atom
iex> List.to_existing_atom('my_atom')
:my_atom
iex> _ = :"🌢 Elixir"
iex> List.to_existing_atom('🌢 Elixir')
:"🌢 Elixir"
iex> List.to_existing_atom('this_atom_will_never_exist')
** (ArgumentError) argument error
"""
@spec to_existing_atom(charlist) :: atom
def to_existing_atom(charlist) do
:erlang.list_to_existing_atom(charlist)
end
@doc """
Returns the float whose text representation is `charlist`.
Inlined by the compiler.
## Examples
iex> List.to_float('2.2017764e+0')
2.2017764
"""
@spec to_float(charlist) :: float
def to_float(charlist) do
:erlang.list_to_float(charlist)
end
@doc """
Returns an integer whose text representation is `charlist`.
Inlined by the compiler.
## Examples
iex> List.to_integer('123')
123
"""
@spec to_integer(charlist) :: integer
def to_integer(charlist) do
:erlang.list_to_integer(charlist)
end
@doc """
Returns an integer whose text representation is `charlist` in base `base`.
Inlined by the compiler.
## Examples
iex> List.to_integer('3FF', 16)
1023
"""
@spec to_integer(charlist, 2..36) :: integer
def to_integer(charlist, base) do
:erlang.list_to_integer(charlist, base)
end
@doc """
Converts a list to a tuple.
Inlined by the compiler.
## Examples
iex> List.to_tuple([:share, [:elixir, 163]])
{:share, [:elixir, 163]}
"""
@spec to_tuple(list) :: tuple
def to_tuple(list) do
:erlang.list_to_tuple(list)
end
@doc """
Converts a list of integers representing codepoints, lists or
strings into a string.
Notice that this function expects a list of integers representing
UTF-8 codepoints. If you have a list of bytes, you must instead use
the [`:binary` module](http://www.erlang.org/doc/man/binary.html).
## Examples
iex> List.to_string([0x00E6, 0x00DF])
"æß"
iex> List.to_string([0x0061, "bc"])
"abc"
iex> List.to_string([0x0064, "ee", ['p']])
"deep"
"""
@spec to_string(:unicode.charlist()) :: String.t()
def to_string(list) when is_list(list) do
try do
:unicode.characters_to_binary(list)
rescue
ArgumentError ->
raise ArgumentError, """
cannot convert the given list to a string.
To be converted to a string, a list must contain only:
* strings
* integers representing Unicode codepoints
* or a list containing one of these three elements
Please check the given list or call inspect/1 to get the list representation, got:
#{inspect(list)}
"""
else
result when is_binary(result) ->
result
{:error, encoded, rest} ->
raise UnicodeConversionError, encoded: encoded, rest: rest, kind: :invalid
{:incomplete, encoded, rest} ->
raise UnicodeConversionError, encoded: encoded, rest: rest, kind: :incomplete
end
end
@doc """
Converts a list of integers representing codepoints, lists or
strings into a charlist.
Notice that this function expects a list of integers representing
UTF-8 codepoints. If you have a list of bytes, you must instead use
the [`:binary` module](http://www.erlang.org/doc/man/binary.html).
## Examples
iex> List.to_charlist([0x00E6, 0x00DF])
'æß'
iex> List.to_charlist([0x0061, "bc"])
'abc'
iex> List.to_charlist([0x0064, "ee", ['p']])
'deep'
"""
@doc since: "1.8.0"
@spec to_charlist(:unicode.charlist()) :: charlist()
def to_charlist(list) when is_list(list) do
try do
:unicode.characters_to_list(list)
rescue
ArgumentError ->
raise ArgumentError, """
cannot convert the given list to a charlist.
To be converted to a charlist, a list must contain only:
* strings
* integers representing Unicode codepoints
* or a list containing one of these three elements
Please check the given list or call inspect/1 to get the list representation, got:
#{inspect(list)}
"""
else
result when is_list(result) ->
result
{:error, encoded, rest} ->
raise UnicodeConversionError, encoded: encoded, rest: rest, kind: :invalid
{:incomplete, encoded, rest} ->
raise UnicodeConversionError, encoded: encoded, rest: rest, kind: :incomplete
end
end
@doc """
Returns a keyword list that represents an *edit script*.
The algorithm is outlined in the
"An O(ND) Difference Algorithm and Its Variations" paper by E. Myers.
An *edit script* is a keyword list. Each key describes the "editing action" to
take in order to bring `list1` closer to being equal to `list2`; a key can be
`:eq`, `:ins`, or `:del`. Each value is a sublist of either `list1` or `list2`
that should be inserted (if the corresponding key `:ins`), deleted (if the
corresponding key is `:del`), or left alone (if the corresponding key is
`:eq`) in `list1` in order to be closer to `list2`.
See `myers_difference/3` if you want to handle nesting in the diff scripts.
## Examples
iex> List.myers_difference([1, 4, 2, 3], [1, 2, 3, 4])
[eq: [1], del: [4], eq: [2, 3], ins: [4]]
"""
@doc since: "1.4.0"
@spec myers_difference(list, list) :: [{:eq | :ins | :del, list}]
def myers_difference(list1, list2) when is_list(list1) and is_list(list2) do
myers_difference_with_diff_script(list1, list2, nil)
end
@doc """
Returns a keyword list that represents an *edit script* with nested diffs.
This is an extension of `myers_difference/2` where a `diff_script` function
can be given in case it is desired to compute nested differences. The function
may return a list with the inner edit script or `nil` in case there is no
such script. The returned inner edit script will be under the `:diff` key.
## Examples
iex> List.myers_difference(["a", "db", "c"], ["a", "bc"], &String.myers_difference/2)
[eq: ["a"], diff: [del: "d", eq: "b", ins: "c"], del: ["c"]]
"""
@doc since: "1.8.0"
@spec myers_difference(list, list, (term, term -> script | nil)) :: script
when script: [{:eq | :ins | :del | :diff, list}]
def myers_difference(list1, list2, diff_script)
when is_list(list1) and is_list(list2) and is_function(diff_script) do
myers_difference_with_diff_script(list1, list2, diff_script)
end
defp myers_difference_with_diff_script(list1, list2, diff_script) do
path = {0, list1, list2, []}
find_script(0, length(list1) + length(list2), [path], diff_script)
end
defp find_script(envelope, max, paths, diff_script) do
case each_diagonal(-envelope, envelope, paths, [], diff_script) do
{:done, edits} -> compact_reverse(edits, [])
{:next, paths} -> find_script(envelope + 1, max, paths, diff_script)
end
end
defp compact_reverse([], acc), do: acc
defp compact_reverse([{:diff, _} = fragment | rest], acc) do
compact_reverse(rest, [fragment | acc])
end
defp compact_reverse([{kind, elem} | rest], [{kind, result} | acc]) do
compact_reverse(rest, [{kind, [elem | result]} | acc])
end
defp compact_reverse(rest, [{:eq, elem}, {:ins, elem}, {:eq, other} | acc]) do
compact_reverse(rest, [{:ins, elem}, {:eq, elem ++ other} | acc])
end
defp compact_reverse([{kind, elem} | rest], acc) do
compact_reverse(rest, [{kind, [elem]} | acc])
end
defp each_diagonal(diag, limit, _paths, next_paths, _diff_script) when diag > limit do
{:next, :lists.reverse(next_paths)}
end
defp each_diagonal(diag, limit, paths, next_paths, diff_script) do
{path, rest} = proceed_path(diag, limit, paths, diff_script)
case follow_snake(path) do
{:cont, path} -> each_diagonal(diag + 2, limit, rest, [path | next_paths], diff_script)
{:done, edits} -> {:done, edits}
end
end
defp proceed_path(0, 0, [path], _diff_script), do: {path, []}
defp proceed_path(diag, limit, [path | _] = paths, diff_script) when diag == -limit do
{move_down(path, diff_script), paths}
end
defp proceed_path(diag, limit, [path], diff_script) when diag == limit do
{move_right(path, diff_script), []}
end
defp proceed_path(_diag, _limit, [path1, path2 | rest], diff_script) do
if elem(path1, 0) > elem(path2, 0) do
{move_right(path1, diff_script), [path2 | rest]}
else
{move_down(path2, diff_script), [path2 | rest]}
end
end
defp move_right({y, [elem1 | rest1] = list1, [elem2 | rest2], edits}, diff_script)
when diff_script != nil do
if diff = diff_script.(elem1, elem2) do
{y + 1, rest1, rest2, [{:diff, diff} | edits]}
else
{y, list1, rest2, [{:ins, elem2} | edits]}
end
end
defp move_right({y, list1, [elem | rest], edits}, _diff_script) do
{y, list1, rest, [{:ins, elem} | edits]}
end
defp move_right({y, list1, [], edits}, _diff_script) do
{y, list1, [], edits}
end
defp move_down({y, [elem1 | rest1], [elem2 | rest2] = list2, edits}, diff_script)
when diff_script != nil do
if diff = diff_script.(elem1, elem2) do
{y + 1, rest1, rest2, [{:diff, diff} | edits]}
else
{y + 1, rest1, list2, [{:del, elem1} | edits]}
end
end
defp move_down({y, [elem | rest], list2, edits}, _diff_script) do
{y + 1, rest, list2, [{:del, elem} | edits]}
end
defp move_down({y, [], list2, edits}, _diff_script) do
{y + 1, [], list2, edits}
end
defp follow_snake({y, [elem | rest1], [elem | rest2], edits}) do
follow_snake({y + 1, rest1, rest2, [{:eq, elem} | edits]})
end
defp follow_snake({_y, [], [], edits}) do
{:done, edits}
end
defp follow_snake(path) do
{:cont, path}
end
## Helpers
# replace_at
defp do_replace_at([], _index, _value) do
[]
end
defp do_replace_at([_old | rest], 0, value) do
[value | rest]
end
defp do_replace_at([head | tail], index, value) do
[head | do_replace_at(tail, index - 1, value)]
end
# insert_at
defp do_insert_at([], _index, value) do
[value]
end
defp do_insert_at(list, 0, value) do
[value | list]
end
defp do_insert_at([head | tail], index, value) do
[head | do_insert_at(tail, index - 1, value)]
end
# update_at
defp do_update_at([value | list], 0, fun) do
[fun.(value) | list]
end
defp do_update_at([head | tail], index, fun) do
[head | do_update_at(tail, index - 1, fun)]
end
defp do_update_at([], _index, _fun) do
[]
end
# pop_at
defp do_pop_at([], _index, default, acc) do
{default, :lists.reverse(acc)}
end
defp do_pop_at([head | tail], 0, _default, acc) do
{head, :lists.reverse(acc, tail)}
end
defp do_pop_at([head | tail], index, default, acc) do
do_pop_at(tail, index - 1, default, [head | acc])
end
# zip
defp do_zip(list, acc) do
converter = fn x, acc -> do_zip_each(to_list(x), acc) end
case :lists.mapfoldl(converter, [], list) do
{_, nil} ->
:lists.reverse(acc)
{mlist, heads} ->
do_zip(mlist, [to_tuple(:lists.reverse(heads)) | acc])
end
end
defp do_zip_each(_, nil) do
{nil, nil}
end
defp do_zip_each([head | tail], acc) do
{tail, [head | acc]}
end
defp do_zip_each([], _) do
{nil, nil}
end
defp to_list(tuple) when is_tuple(tuple), do: Tuple.to_list(tuple)
defp to_list(list) when is_list(list), do: list
end
| 25.601821 | 104 | 0.610534 |
1ca51cdd61c77d18de7d567d1ce65cb5c25e3357 | 100 | ex | Elixir | lib/app_twitter/apptwitter_tweet.ex | adrielradicchi/app_twitter | b2af84f39ee7b9d4910343b677b03a39ee93b06b | [
"MIT"
] | 1 | 2019-03-23T21:36:39.000Z | 2019-03-23T21:36:39.000Z | lib/app_twitter/apptwitter_tweet.ex | adrielradicchi/app_twitter | b2af84f39ee7b9d4910343b677b03a39ee93b06b | [
"MIT"
] | null | null | null | lib/app_twitter/apptwitter_tweet.ex | adrielradicchi/app_twitter | b2af84f39ee7b9d4910343b677b03a39ee93b06b | [
"MIT"
] | null | null | null | defmodule AppTwitter.Tweet do
def send(message) do
ExTwitter.update(message)
end
end | 20 | 33 | 0.7 |
1ca52067030097ce61aec19e88bdc028bde56e92 | 1,785 | exs | Elixir | clients/data_fusion/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/data_fusion/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/data_fusion/mix.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DataFusion.Mixfile do
use Mix.Project
@version "0.20.0"
def project() do
[
app: :google_api_data_fusion,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/data_fusion"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Cloud Data Fusion API client library.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/data_fusion",
"Homepage" => "https://cloud.google.com/data-fusion/docs"
}
]
end
end
| 26.641791 | 102 | 0.65042 |
1ca55da3c45d400f2e8375e710834b2beaee6eb9 | 4,979 | ex | Elixir | lib/docusign/model/envelope_template_result.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 4 | 2020-12-21T12:50:13.000Z | 2022-01-12T16:50:43.000Z | lib/docusign/model/envelope_template_result.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 12 | 2018-09-18T15:26:34.000Z | 2019-09-28T15:29:39.000Z | lib/docusign/model/envelope_template_result.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 15 | 2020-04-29T21:50:16.000Z | 2022-02-11T18:01:51.000Z | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule DocuSign.Model.EnvelopeTemplateResult do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:allowMarkup,
:allowReassign,
:allowViewHistory,
:asynchronous,
:attachmentsUri,
:authoritativeCopy,
:authoritativeCopyDefault,
:autoNavigation,
:brandId,
:brandLock,
:certificateUri,
:completedDateTime,
:created,
:createdDateTime,
:customFields,
:customFieldsUri,
:declinedDateTime,
:deletedDateTime,
:deliveredDateTime,
:description,
:documents,
:documentsCombinedUri,
:documentsUri,
:emailBlurb,
:emailSettings,
:emailSubject,
:enableWetSign,
:enforceSignerVisibility,
:envelopeId,
:envelopeIdStamping,
:envelopeUri,
:folderId,
:folderName,
:folderUri,
:initialSentDateTime,
:is21CFRPart11,
:isSignatureProviderEnvelope,
:lastModified,
:lastModifiedDateTime,
:lockInformation,
:messageLock,
:name,
:notification,
:notificationUri,
:owner,
:pageCount,
:parentFolderUri,
:password,
:purgeState,
:recipients,
:recipientsLock,
:recipientsUri,
:sentDateTime,
:shared,
:signerCanSignOnMobile,
:signingLocation,
:status,
:statusChangedDateTime,
:templateId,
:templatesUri,
:transactionId,
:uri,
:useDisclosure,
:voidedDateTime,
:voidedReason
]
@type t :: %__MODULE__{
:allowMarkup => String.t(),
:allowReassign => String.t(),
:allowViewHistory => String.t(),
:asynchronous => String.t(),
:attachmentsUri => String.t(),
:authoritativeCopy => String.t(),
:authoritativeCopyDefault => String.t(),
:autoNavigation => String.t(),
:brandId => String.t(),
:brandLock => String.t(),
:certificateUri => String.t(),
:completedDateTime => String.t(),
:created => String.t(),
:createdDateTime => String.t(),
:customFields => AccountCustomFields,
:customFieldsUri => String.t(),
:declinedDateTime => String.t(),
:deletedDateTime => String.t(),
:deliveredDateTime => String.t(),
:description => String.t(),
:documents => [Document],
:documentsCombinedUri => String.t(),
:documentsUri => String.t(),
:emailBlurb => String.t(),
:emailSettings => EnvelopeEmailSettings,
:emailSubject => String.t(),
:enableWetSign => String.t(),
:enforceSignerVisibility => String.t(),
:envelopeId => String.t(),
:envelopeIdStamping => String.t(),
:envelopeUri => String.t(),
:folderId => String.t(),
:folderName => String.t(),
:folderUri => String.t(),
:initialSentDateTime => String.t(),
:is21CFRPart11 => String.t(),
:isSignatureProviderEnvelope => String.t(),
:lastModified => String.t(),
:lastModifiedDateTime => String.t(),
:lockInformation => EnvelopeLocks,
:messageLock => String.t(),
:name => String.t(),
:notification => Notification,
:notificationUri => String.t(),
:owner => UserInfo,
:pageCount => integer(),
:parentFolderUri => String.t(),
:password => String.t(),
:purgeState => String.t(),
:recipients => EnvelopeRecipients,
:recipientsLock => String.t(),
:recipientsUri => String.t(),
:sentDateTime => String.t(),
:shared => String.t(),
:signerCanSignOnMobile => String.t(),
:signingLocation => String.t(),
:status => String.t(),
:statusChangedDateTime => String.t(),
:templateId => String.t(),
:templatesUri => String.t(),
:transactionId => String.t(),
:uri => String.t(),
:useDisclosure => String.t(),
:voidedDateTime => String.t(),
:voidedReason => String.t()
}
end
defimpl Poison.Decoder, for: DocuSign.Model.EnvelopeTemplateResult do
import DocuSign.Deserializer
def decode(value, options) do
value
|> deserialize(:customFields, :struct, DocuSign.Model.AccountCustomFields, options)
|> deserialize(:documents, :list, DocuSign.Model.Document, options)
|> deserialize(:emailSettings, :struct, DocuSign.Model.EnvelopeEmailSettings, options)
|> deserialize(:lockInformation, :struct, DocuSign.Model.EnvelopeLocks, options)
|> deserialize(:notification, :struct, DocuSign.Model.Notification, options)
|> deserialize(:owner, :struct, DocuSign.Model.UserInfo, options)
|> deserialize(:recipients, :struct, DocuSign.Model.EnvelopeRecipients, options)
end
end
| 30.734568 | 90 | 0.596907 |
1ca56ae32f3900e36de22eaf049a1c20b69cdc07 | 3,331 | ex | Elixir | clients/source_repo/lib/google_api/source_repo/v1/model/operation.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/source_repo/lib/google_api/source_repo/v1/model/operation.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/source_repo/lib/google_api/source_repo/v1/model/operation.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.SourceRepo.V1.Model.Operation do
@moduledoc """
This resource represents a long-running operation that is the result of a network API call.
## Attributes
- done (boolean()): If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed, and either `error` or `response` is available. Defaults to: `null`.
- error (Status): The error result of the operation in case of failure or cancellation. Defaults to: `null`.
- metadata (%{optional(String.t) => String.t}): Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any. Defaults to: `null`.
- name (String.t): The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should have the format of `operations/some/unique/name`. Defaults to: `null`.
- response (%{optional(String.t) => String.t}): The normal response of the operation in case of success. If the original method returns no data on success, such as `Delete`, the response is `google.protobuf.Empty`. If the original method is standard `Get`/`Create`/`Update`, the response should be the resource. For other methods, the response should have the type `XxxResponse`, where `Xxx` is the original method name. For example, if the original method name is `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:done => any(),
:error => GoogleApi.SourceRepo.V1.Model.Status.t(),
:metadata => map(),
:name => any(),
:response => map()
}
field(:done)
field(:error, as: GoogleApi.SourceRepo.V1.Model.Status)
field(:metadata, type: :map)
field(:name)
field(:response, type: :map)
end
defimpl Poison.Decoder, for: GoogleApi.SourceRepo.V1.Model.Operation do
def decode(value, options) do
GoogleApi.SourceRepo.V1.Model.Operation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SourceRepo.V1.Model.Operation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 55.516667 | 661 | 0.727409 |
1ca56d4938096c1a7fda39490703a8ae93a3c035 | 6,239 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/cell_data.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/cell_data.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/sheets/lib/google_api/sheets/v4/model/cell_data.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.CellData do
@moduledoc """
Data about a specific cell.
## Attributes
* `dataSourceFormula` (*type:* `GoogleApi.Sheets.V4.Model.DataSourceFormula.t`, *default:* `nil`) - Output only. Information about a data source formula on the cell. The field is set if user_entered_value is a formula referencing some DATA_SOURCE sheet, e.g `=SUM(DataSheet!Column)`.
* `dataSourceTable` (*type:* `GoogleApi.Sheets.V4.Model.DataSourceTable.t`, *default:* `nil`) - A data source table anchored at this cell. The size of data source table itself is computed dynamically based on its configuration. Only the first cell of the data source table contains the data source table definition. The other cells will contain the display values of the data source table result in their effective_value fields.
* `dataValidation` (*type:* `GoogleApi.Sheets.V4.Model.DataValidationRule.t`, *default:* `nil`) - A data validation rule on the cell, if any. When writing, the new data validation rule will overwrite any prior rule.
* `effectiveFormat` (*type:* `GoogleApi.Sheets.V4.Model.CellFormat.t`, *default:* `nil`) - The effective format being used by the cell. This includes the results of applying any conditional formatting and, if the cell contains a formula, the computed number format. If the effective format is the default format, effective format will not be written. This field is read-only.
* `effectiveValue` (*type:* `GoogleApi.Sheets.V4.Model.ExtendedValue.t`, *default:* `nil`) - The effective value of the cell. For cells with formulas, this is the calculated value. For cells with literals, this is the same as the user_entered_value. This field is read-only.
* `formattedValue` (*type:* `String.t`, *default:* `nil`) - The formatted value of the cell. This is the value as it's shown to the user. This field is read-only.
* `hyperlink` (*type:* `String.t`, *default:* `nil`) - A hyperlink this cell points to, if any. If the cell contains multiple hyperlinks, this field will be empty. This field is read-only. To set it, use a `=HYPERLINK` formula in the userEnteredValue.formulaValue field.
* `note` (*type:* `String.t`, *default:* `nil`) - Any note on the cell.
* `pivotTable` (*type:* `GoogleApi.Sheets.V4.Model.PivotTable.t`, *default:* `nil`) - A pivot table anchored at this cell. The size of pivot table itself is computed dynamically based on its data, grouping, filters, values, etc. Only the top-left cell of the pivot table contains the pivot table definition. The other cells will contain the calculated values of the results of the pivot in their effective_value fields.
* `textFormatRuns` (*type:* `list(GoogleApi.Sheets.V4.Model.TextFormatRun.t)`, *default:* `nil`) - Runs of rich text applied to subsections of the cell. Runs are only valid on user entered strings, not formulas, bools, or numbers. Properties of a run start at a specific index in the text and continue until the next run. Runs will inherit the properties of the cell unless explicitly changed. When writing, the new runs will overwrite any prior runs. When writing a new user_entered_value, previous runs are erased.
* `userEnteredFormat` (*type:* `GoogleApi.Sheets.V4.Model.CellFormat.t`, *default:* `nil`) - The format the user entered for the cell. When writing, the new format will be merged with the existing format.
* `userEnteredValue` (*type:* `GoogleApi.Sheets.V4.Model.ExtendedValue.t`, *default:* `nil`) - The value the user entered in the cell. e.g, `1234`, `'Hello'`, or `=NOW()` Note: Dates, Times and DateTimes are represented as doubles in serial number format.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dataSourceFormula => GoogleApi.Sheets.V4.Model.DataSourceFormula.t(),
:dataSourceTable => GoogleApi.Sheets.V4.Model.DataSourceTable.t(),
:dataValidation => GoogleApi.Sheets.V4.Model.DataValidationRule.t(),
:effectiveFormat => GoogleApi.Sheets.V4.Model.CellFormat.t(),
:effectiveValue => GoogleApi.Sheets.V4.Model.ExtendedValue.t(),
:formattedValue => String.t(),
:hyperlink => String.t(),
:note => String.t(),
:pivotTable => GoogleApi.Sheets.V4.Model.PivotTable.t(),
:textFormatRuns => list(GoogleApi.Sheets.V4.Model.TextFormatRun.t()),
:userEnteredFormat => GoogleApi.Sheets.V4.Model.CellFormat.t(),
:userEnteredValue => GoogleApi.Sheets.V4.Model.ExtendedValue.t()
}
field(:dataSourceFormula, as: GoogleApi.Sheets.V4.Model.DataSourceFormula)
field(:dataSourceTable, as: GoogleApi.Sheets.V4.Model.DataSourceTable)
field(:dataValidation, as: GoogleApi.Sheets.V4.Model.DataValidationRule)
field(:effectiveFormat, as: GoogleApi.Sheets.V4.Model.CellFormat)
field(:effectiveValue, as: GoogleApi.Sheets.V4.Model.ExtendedValue)
field(:formattedValue)
field(:hyperlink)
field(:note)
field(:pivotTable, as: GoogleApi.Sheets.V4.Model.PivotTable)
field(:textFormatRuns, as: GoogleApi.Sheets.V4.Model.TextFormatRun, type: :list)
field(:userEnteredFormat, as: GoogleApi.Sheets.V4.Model.CellFormat)
field(:userEnteredValue, as: GoogleApi.Sheets.V4.Model.ExtendedValue)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.CellData do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.CellData.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.CellData do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 77.9875 | 520 | 0.739702 |
1ca59b122471e54faaa0fae25ff1a20714e3fad9 | 965 | exs | Elixir | mix.exs | piersadrian/hal | 190e3fb1cd0d187ac3ef485d1bf9e2623296ae16 | [
"MIT"
] | null | null | null | mix.exs | piersadrian/hal | 190e3fb1cd0d187ac3ef485d1bf9e2623296ae16 | [
"MIT"
] | null | null | null | mix.exs | piersadrian/hal | 190e3fb1cd0d187ac3ef485d1bf9e2623296ae16 | [
"MIT"
] | null | null | null | defmodule HAL.Mixfile do
use Mix.Project
def project do
[
app: :hal,
version: "0.0.1",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {HAL.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.3.0"},
{:phoenix_pubsub, "~> 1.0"},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"},
{:faker, "~> 0.9"}
]
end
end
| 22.44186 | 56 | 0.57513 |
1ca59c368f4014edaa8469ac8a9ebcc98a431729 | 787 | exs | Elixir | test/bender/command_test.exs | DylanGriffith/bender | 336fb92c3ed80bb8e330abfa7628df7ccbc04c84 | [
"MIT"
] | 27 | 2015-10-29T21:43:12.000Z | 2021-08-14T06:33:13.000Z | test/bender/command_test.exs | lytedev/bender | 1033f2e2a2bb3b45a6782342e070e5e593212c2c | [
"MIT"
] | 2 | 2016-01-04T20:39:42.000Z | 2019-05-22T05:18:10.000Z | test/bender/command_test.exs | lytedev/bender | 1033f2e2a2bb3b45a6782342e070e5e593212c2c | [
"MIT"
] | 3 | 2017-10-17T10:33:53.000Z | 2018-11-19T15:07:50.000Z | defmodule MyCommand do
use Bender.Command
def handle_event({{:command, "my_command", _m}, _conf}, parent) do
send parent, :MY_COMMAND
{:ok, parent}
end
end
defmodule Bender.CommandTest do
use ExUnit.Case
test "it pattern matches a command" do
{:ok, manager} = GenEvent.start_link
GenEvent.add_handler(manager, MyCommand, self)
GenEvent.notify(manager, {{:command, "my_command", "hello world"}, {}})
assert_receive :MY_COMMAND, 100
end
test "does not crash for commands that don't match" do
{:ok, manager} = GenEvent.start_link
GenEvent.add_handler(manager, MyCommand, self)
:ok = GenEvent.sync_notify(manager, {{:command, "unknown_command", "hello world"}, {}})
assert GenEvent.which_handlers(manager) == [MyCommand]
end
end
| 27.137931 | 91 | 0.698856 |
1ca59feb5c07c918340c5f27585d1efc24ff885e | 2,178 | ex | Elixir | lib/exchange_api_web/controllers/api/order_controller.ex | realyarilabs/exchange_api | c7dd9af9356277a022b164675cc1622359af8a76 | [
"Apache-2.0"
] | 3 | 2020-08-10T10:09:26.000Z | 2020-08-28T08:41:36.000Z | lib/exchange_api_web/controllers/api/order_controller.ex | realyarilabs/exchange_api | c7dd9af9356277a022b164675cc1622359af8a76 | [
"Apache-2.0"
] | 30 | 2020-08-17T10:38:24.000Z | 2022-02-28T07:06:42.000Z | lib/exchange_api_web/controllers/api/order_controller.ex | realyarilabs/exchange_api | c7dd9af9356277a022b164675cc1622359af8a76 | [
"Apache-2.0"
] | 1 | 2020-09-17T13:08:47.000Z | 2020-09-17T13:08:47.000Z | defmodule ExchangeApiWeb.Api.OrderController do
use ExchangeApiWeb, :controller
alias ExchangeApiWeb.Ticker
action_fallback ExchangeApiWeb.Api.FallbackController
def index_open(conn, %{"ticker" => ticker}) do
with {:ok, tick} <- Ticker.get_ticker(ticker), {:ok, open} <- Exchange.open_orders(tick) do
json(conn, %{data: open})
end
end
def count_buy_side(conn, %{"ticker" => ticker}) do
with {:ok, tick} <- Ticker.get_ticker(ticker),
{:ok, total_buy_orders} <- Exchange.total_buy_orders(tick) do
json(conn, %{data: total_buy_orders})
end
end
def count_sell_side(conn, %{"ticker" => ticker}) do
with {:ok, tick} <- Ticker.get_ticker(ticker),
{:ok, total_sell_orders} <- Exchange.total_sell_orders(tick) do
json(conn, %{data: total_sell_orders})
end
end
def spread(conn, %{"ticker" => ticker}) do
with {:ok, tick} <- Ticker.get_ticker(ticker),
{:ok, spread} <- json_encode_money(Exchange.spread(tick)) do
json(conn, %{data: spread})
end
end
def highest_bid_price(conn, %{"ticker" => ticker}) do
with {:ok, tick} <- Ticker.get_ticker(ticker),
{:ok, bid_price} <- json_encode_money(Exchange.highest_bid_price(tick)) do
json(conn, %{data: bid_price})
end
end
def highest_bid_volume(conn, %{"ticker" => ticker}) do
with {:ok, tick} <- Ticker.get_ticker(ticker),
{:ok, bid_volume} <- Exchange.highest_bid_volume(tick) do
json(conn, %{data: bid_volume})
end
end
def lowest_ask_price(conn, %{"ticker" => ticker}) do
with {:ok, tick} <- Ticker.get_ticker(ticker),
{:ok, ask_price} <- json_encode_money(Exchange.lowest_ask_price(tick)) do
json(conn, %{data: ask_price})
end
end
def highest_ask_volume(conn, %{"ticker" => ticker}) do
with {:ok, tick} <- Ticker.get_ticker(ticker),
{:ok, ask_volume} <- Exchange.highest_ask_volume(tick) do
json(conn, %{data: ask_volume})
end
end
# ----- PRIVATE ----- #
defp json_encode_money(money) do
{status, %Money{amount: amount, currency: currency}} = money
{status, %{amount: amount, currency: currency}}
end
end
| 31.565217 | 95 | 0.645546 |
1ca5c54ed1f685e3a31a134f36e230ec4774bad4 | 4,477 | exs | Elixir | .credo.exs | rodrigues/credo | b4e08477a2141d5537d8fc9c5cc08ebf93a5ee23 | [
"MIT"
] | 70 | 2016-07-11T20:28:15.000Z | 2020-05-03T09:22:31.000Z | .credo.exs | rbeene/elixir-remote-monitor | a1cf0f89aad21e4d7ec547761d88e450cc7c9188 | [
"MIT"
] | 3 | 2016-10-31T15:12:29.000Z | 2017-01-28T13:57:01.000Z | .credo.exs | rbeene/elixir-remote-monitor | a1cf0f89aad21e4d7ec547761d88e450cc7c9188 | [
"MIT"
] | 2 | 2017-03-03T15:55:09.000Z | 2020-04-23T17:42:12.000Z | # This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any config using `mix credo -C <name>`. If no config name is given
# "default" is used.
name: "default",
#
# these are the files included in the analysis
files: %{
#
# you can give explicit globs or simply directories
# in the latter case `**/*.{ex,exs}` will be used
included: ["lib/", "src/", "web/", "apps/"],
excluded: [~r"/_build/", ~r"/deps/"]
},
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
requires: [],
#
# Credo automatically checks for updates, like e.g. Hex does.
# You can disable this behaviour below:
check_for_updates: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
{Credo.Check.Consistency.ExceptionNames},
{Credo.Check.Consistency.LineEndings},
{Credo.Check.Consistency.SpaceAroundOperators},
{Credo.Check.Consistency.SpaceInParentheses},
{Credo.Check.Consistency.TabsOrSpaces},
# For some checks, like AliasUsage, you can only customize the priority
# Priority values are: `low, normal, high, higher`
{Credo.Check.Design.AliasUsage, priority: :low},
# For others you can set parameters
# If you don't want the `setup` and `test` macro calls in ExUnit tests
# or the `schema` macro in Ecto schemas to trigger DuplicatedCode, just
# set the `excluded_macros` parameter to `[:schema, :setup, :test]`.
{Credo.Check.Design.DuplicatedCode, excluded_macros: []},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
{Credo.Check.Design.TagTODO, exit_status: 2},
{Credo.Check.Design.TagFIXME},
{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.LargeNumbers},
{Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 80},
{Credo.Check.Readability.ModuleAttributeNames},
{Credo.Check.Readability.ModuleDoc},
{Credo.Check.Readability.ModuleNames},
{Credo.Check.Readability.ParenthesesInCondition},
{Credo.Check.Readability.PredicateFunctionNames},
{Credo.Check.Readability.TrailingBlankLine},
{Credo.Check.Readability.TrailingWhiteSpace},
{Credo.Check.Readability.VariableNames},
{Credo.Check.Refactor.ABCSize},
# {Credo.Check.Refactor.CaseTrivialMatches}, # deprecated in 0.4.0
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.FunctionArity},
{Credo.Check.Refactor.MatchInCondition},
{Credo.Check.Refactor.PipeChainStart},
{Credo.Check.Refactor.CyclomaticComplexity},
{Credo.Check.Refactor.NegatedConditionsInUnless},
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting},
{Credo.Check.Refactor.UnlessWithElse},
{Credo.Check.Warning.IExPry},
{Credo.Check.Warning.IoInspect},
{Credo.Check.Warning.NameRedeclarationByAssignment},
{Credo.Check.Warning.NameRedeclarationByCase},
{Credo.Check.Warning.NameRedeclarationByDef},
{Credo.Check.Warning.NameRedeclarationByFn},
{Credo.Check.Warning.OperationOnSameValues},
{Credo.Check.Warning.BoolOperationOnSameValues},
{Credo.Check.Warning.UnusedEnumOperation},
{Credo.Check.Warning.UnusedKeywordOperation},
{Credo.Check.Warning.UnusedListOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},
{Credo.Check.Warning.OperationWithConstantResult},
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}
| 40.7 | 80 | 0.657136 |
1ca5cd95c09db05458407c0c9995477f25e18553 | 1,103 | exs | Elixir | test/day_03_toboggan_trajectory_test.exs | scmx/advent-of-code-2020-elixir | f60159e1a8827c6e033f5f4b2d47ea7edd49db5f | [
"MIT"
] | 1 | 2021-01-07T07:00:16.000Z | 2021-01-07T07:00:16.000Z | test/day_03_toboggan_trajectory_test.exs | scmx/advent-of-code-2020-elixir | f60159e1a8827c6e033f5f4b2d47ea7edd49db5f | [
"MIT"
] | null | null | null | test/day_03_toboggan_trajectory_test.exs | scmx/advent-of-code-2020-elixir | f60159e1a8827c6e033f5f4b2d47ea7edd49db5f | [
"MIT"
] | null | null | null | defmodule Adventofcode.Day03TobogganTrajectoryTest do
use Adventofcode.FancyCase
import Adventofcode.Day03TobogganTrajectory
alias Adventofcode.Day03TobogganTrajectory.Grid
@example_input """
..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#
"""
describe "part_1/1" do
test "parse map into list, width, height" do
assert %Grid{
list: ['..##.......', '#...#...#..', '.#....#..#.' | _],
size: %{x: 11, y: 11}
} = @example_input |> parse()
end
test "slope right 3, down 1" do
grid = @example_input |> parse()
assert %Grid{result: 7} = grid |> traverse(%{x: 3, y: 1})
end
test_with_puzzle_input do
assert 191 = puzzle_input() |> part_1()
end
end
describe "part_2/1" do
test "trees encountered on each of the listed slopes" do
assert 336 = @example_input |> part_2()
end
test_with_puzzle_input do
assert 1_478_615_040 = puzzle_input() |> part_2()
end
end
end
| 22.06 | 71 | 0.514959 |
1ca5e449d0a646c65bb057707b65d4febed67a3c | 170 | ex | Elixir | priv/templates/brando.install/lib/application_name/repo.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | 4 | 2020-10-30T08:40:38.000Z | 2022-01-07T22:21:37.000Z | priv/templates/brando.install/lib/application_name/repo.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | 1,162 | 2020-07-05T11:20:15.000Z | 2022-03-31T06:01:49.000Z | priv/templates/brando.install/lib/application_name/repo.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | null | null | null | defmodule <%= application_module %>.Repo do
use Ecto.Repo,
otp_app: :<%= application_name %>,
adapter: Ecto.Adapters.Postgres
use Brando.SoftDelete.Repo
end
| 21.25 | 43 | 0.705882 |
1ca60efd72ebdd0e9d3d1d1f84bd48605fb96312 | 3,365 | ex | Elixir | farmbot_core/lib/farmbot_core/asset_workers/regimen_instance_worker.ex | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | 1 | 2019-08-06T11:51:48.000Z | 2019-08-06T11:51:48.000Z | farmbot_core/lib/farmbot_core/asset_workers/regimen_instance_worker.ex | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | null | null | null | farmbot_core/lib/farmbot_core/asset_workers/regimen_instance_worker.ex | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | null | null | null | defimpl FarmbotCore.AssetWorker, for: FarmbotCore.Asset.RegimenInstance do
@moduledoc """
An instance of a running Regimen. Asset.Regimen is the blueprint by which a
Regimen "instance" is created.
"""
use GenServer
require Logger
require FarmbotCore.Logger
alias FarmbotCeleryScript.AST
alias FarmbotCore.Asset
alias FarmbotCore.Asset.{RegimenInstance, FarmEvent, Sequence, Regimen}
@impl FarmbotCore.AssetWorker
def preload(%RegimenInstance{}), do: [:farm_event, :regimen, :executions]
@impl FarmbotCore.AssetWorker
def tracks_changes?(%RegimenInstance{}), do: false
@impl FarmbotCore.AssetWorker
def start_link(regimen_instance, args) do
GenServer.start_link(__MODULE__, [regimen_instance, args])
end
@impl GenServer
def init([regimen_instance, _args]) do
Logger.warn "RegimenInstance #{inspect(regimen_instance)} initializing"
with %Regimen{} <- regimen_instance.regimen,
%FarmEvent{} <- regimen_instance.farm_event do
send self(), :schedule
{:ok, %{regimen_instance: regimen_instance}}
else
_ -> {:stop, "Regimen instance not preloaded."}
end
end
@impl GenServer
def handle_info(:schedule, state) do
regimen_instance = state.regimen_instance
# load the sequence and calculate the scheduled_at time
Enum.map(regimen_instance.regimen.regimen_items, fn(%{time_offset: offset, sequence_id: sequence_id}) ->
scheduled_at = DateTime.add(regimen_instance.epoch, offset, :millisecond)
sequence = Asset.get_sequence(sequence_id) || raise("sequence #{sequence_id} is not synced")
%{scheduled_at: scheduled_at, sequence: sequence}
end)
# get rid of any item that has already been scheduled/executed
|> Enum.reject(fn(%{scheduled_at: scheduled_at}) ->
Asset.get_regimen_instance_execution(regimen_instance, scheduled_at)
end)
|> Enum.each(fn(%{scheduled_at: at, sequence: sequence}) ->
schedule_sequence(regimen_instance, sequence, at)
end)
{:noreply, state}
end
def handle_info({FarmbotCeleryScript, {:scheduled_execution, scheduled_at, executed_at, result}}, state) do
status = case result do
:ok -> "ok"
{:error, reason} -> reason
end
_ = Asset.add_execution_to_regimen_instance!(state.regimen_instance, %{
scheduled_at: scheduled_at,
executed_at: executed_at,
status: status
})
{:noreply, state}
end
# TODO(RickCarlino) This function essentially copy/pastes a regimen body into
# the `locals` of a sequence, which works but is not-so-clean. Refactor later
# when we have a better idea of the problem.
@doc false
def schedule_sequence(%RegimenInstance{} = regimen_instance, %Sequence{} = sequence, at) do
# FarmEvent is the furthest outside of the scope
farm_event_params = AST.decode(regimen_instance.farm_event.body)
# Regimen is the second scope
regimen_params = AST.decode(regimen_instance.regimen.body)
# there may be many sequence scopes from here downward
celery_ast = AST.decode(sequence)
celery_ast = %{
celery_ast
| args: %{
celery_ast.args
| locals: %{
celery_ast.args.locals | body: celery_ast.args.locals.body ++ regimen_params ++ farm_event_params}
}
}
FarmbotCeleryScript.schedule(celery_ast, at, sequence)
end
end
| 35.797872 | 110 | 0.709955 |
1ca618f802caf692595a4fe4942733fe9cc7c883 | 888 | ex | Elixir | lib/astarte_housekeeping/config.ex | rbino/astarte_housekeeping | d582175f9499ffd3eaefe3a69fcfceba8b2089b9 | [
"Apache-2.0"
] | null | null | null | lib/astarte_housekeeping/config.ex | rbino/astarte_housekeeping | d582175f9499ffd3eaefe3a69fcfceba8b2089b9 | [
"Apache-2.0"
] | null | null | null | lib/astarte_housekeeping/config.ex | rbino/astarte_housekeeping | d582175f9499ffd3eaefe3a69fcfceba8b2089b9 | [
"Apache-2.0"
] | null | null | null | #
# This file is part of Astarte.
#
# Copyright 2018 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Astarte.Housekeeping.Config do
@doc """
Returns the replication factor for the astarte keyspace, defaults to 1
"""
def astarte_keyspace_replication_factor do
Application.get_env(:astarte_housekeeping, :astarte_keyspace_replication_factor, 1)
end
end
| 32.888889 | 87 | 0.766892 |
1ca6330a1408e370e4a31022f7466777db9519d3 | 6,823 | exs | Elixir | lib/ex_unit/test/ex_unit/capture_io_test.exs | joearms/elixir | 9a0f8107bd8bbd089acb96fe0041d61a05e88a9b | [
"Apache-2.0"
] | 4 | 2016-04-05T05:51:36.000Z | 2019-10-31T06:46:35.000Z | lib/ex_unit/test/ex_unit/capture_io_test.exs | joearms/elixir | 9a0f8107bd8bbd089acb96fe0041d61a05e88a9b | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/test/ex_unit/capture_io_test.exs | joearms/elixir | 9a0f8107bd8bbd089acb96fe0041d61a05e88a9b | [
"Apache-2.0"
] | 5 | 2015-02-01T06:01:19.000Z | 2019-08-29T09:02:35.000Z | Code.require_file "../test_helper.exs", __DIR__
defmodule ExUnit.CaptureIOTest.Value do
def binary, do: "a"
end
alias ExUnit.CaptureIOTest.Value
defmodule ExUnit.CaptureIOTest.GetUntil do
def until_new_line(_, :eof, _) do
{ :done, :eof, [] }
end
def until_new_line(this_far, chars, stop_char) do
case Enum.split_while(chars, fn(c) -> c != stop_char end) do
{ l, [] } ->
{ :more, this_far ++ l }
{ l, [stop_char|rest] } ->
{ :done, this_far ++ l ++ [stop_char], rest }
end
end
def get_line(device // Process.group_leader) do
device <- { :io_request, self, device, { :get_until, :unicode, "", __MODULE__, :until_new_line, [?\n] } }
receive do
{ :io_reply, _, data } -> data
end
end
end
alias ExUnit.CaptureIOTest.GetUntil
defmodule ExUnit.CaptureIOTest do
use ExUnit.Case, async: true
doctest ExUnit.CaptureIO, import: true
import ExUnit.CaptureIO
test :capture_io_with_nothing do
assert capture_io(fn ->
end) == nil
end
test :capture_io_with_put_chars do
assert capture_io(fn ->
:io.put_chars("")
end) == ""
assert capture_io(fn ->
:io.put_chars("a")
:io.put_chars("b")
end) == "ab"
assert capture_io(fn ->
send_and_receive_io({ :put_chars, :unicode, Value, :binary, [] })
end) == "a"
assert capture_io(fn ->
:io.put_chars("josé")
end) == "josé"
assert capture_io(fn ->
spawn(fn -> :io.put_chars("a") end)
end) == "a"
assert capture_io(fn ->
assert :io.put_chars("a") == :ok
end)
end
test :capture_io_with_put_chars_to_stderr do
assert capture_io(:stderr, fn ->
:io.put_chars(:standard_error, "a")
end) == "a"
end
test :capture_io_with_get_chars do
assert capture_io(fn ->
:io.get_chars(">", 3)
end) == nil
capture_io(fn ->
assert :io.get_chars(">", 3) == :eof
end)
capture_io("", fn ->
assert :io.get_chars(">", 3) == :eof
end)
capture_io("abc\ndef", fn ->
assert :io.get_chars(">", 3) == "abc"
assert :io.get_chars(">", 5) == "\ndef"
assert :io.get_chars(">", 7) == :eof
end)
capture_io("あいう", fn ->
assert :io.get_chars(">", 2) == "あい"
assert :io.get_chars(">", 1) == "う"
assert :io.get_chars(">", 1) == :eof
end)
end
test :capture_io_with_get_line do
assert capture_io(fn ->
:io.get_line ">"
end) == nil
capture_io(fn ->
assert :io.get_line(">") == :eof
end)
capture_io("", fn ->
assert :io.get_line(">") == :eof
end)
capture_io("\n", fn ->
assert :io.get_line(">") == "\n"
assert :io.get_line(">") == :eof
end)
capture_io("a", fn ->
assert :io.get_line(">") == "a"
assert :io.get_line(">") == :eof
end)
capture_io("a\n", fn ->
assert :io.get_line(">") == "a\n"
assert :io.get_line(">") == :eof
end)
capture_io("a\nb", fn ->
assert :io.get_line(">") == "a\n"
assert :io.get_line(">") == "b"
assert :io.get_line(">") == :eof
end)
capture_io("あい\nう", fn ->
assert :io.get_line(">") == "あい\n"
assert :io.get_line(">") == "う"
assert :io.get_line(">") == :eof
end)
end
test :capture_io_with_get_until do
assert capture_io(fn ->
assert :io.scan_erl_form('>')
end) == nil
capture_io(fn ->
assert :io.scan_erl_form('>') == { :eof, 1 }
end)
capture_io("1", fn ->
assert :io.scan_erl_form('>') == { :ok, [{ :integer, 1, 1 }], 1 }
assert :io.scan_erl_form('>') == { :eof, 1 }
end)
capture_io("1\n.", fn ->
assert :io.scan_erl_form('>') == { :ok, [{ :integer, 1, 1 }, { :dot, 2 }], 2 }
assert :io.scan_erl_form('>') == { :eof, 1 }
end)
capture_io("1.\n.", fn ->
assert :io.scan_erl_form('>') == { :ok, [{ :integer, 1, 1 }, { :dot, 1 }], 2 }
assert :io.scan_erl_form('>') == { :ok, [dot: 1], 1}
assert :io.scan_erl_form('>') == { :eof, 1 }
end)
capture_io("\"a", fn ->
assert :io.scan_erl_form('>') == { :error, { 1, :erl_scan, { :string, 34, 'a' } }, 1 }
assert :io.scan_erl_form('>') == { :eof, 1 }
end)
capture_io("\"a\n\"", fn ->
assert :io.scan_erl_form('>') == { :ok, [{ :string, 1, 'a\n' }], 2 }
assert :io.scan_erl_form('>') == { :eof, 1 }
end)
capture_io(":erl. mof*,,l", fn ->
assert :io.scan_erl_form('>') == { :ok, [{ :":", 1 }, { :atom, 1, :erl }, { :dot, 1 }], 1 }
assert :io.scan_erl_form('>') == { :ok, [{ :atom, 1, :mof }, { :*, 1 }, { :"," , 1 }, { :",", 1 }, { :atom, 1, :l }], 1 }
assert :io.scan_erl_form('>') == { :eof, 1 }
end)
capture_io("a\nb\nc", fn ->
assert GetUntil.get_line == 'a\n'
assert GetUntil.get_line == 'b\n'
assert GetUntil.get_line == :eof
end)
end
test :capture_io_with_setopts do
assert capture_io(fn ->
:io.setopts({ :encoding, :latin1 })
end) == nil
capture_io(fn ->
assert :io.setopts({ :encoding, :latin1 }) == :ok
end)
end
test :capture_io_with_getopts do
assert capture_io(fn ->
:io.getopts
end) == nil
capture_io(fn ->
assert :io.getopts == { :error, :enotsup }
end)
end
test :capture_io_with_columns do
assert capture_io(fn ->
:io.columns
end) == nil
capture_io(fn ->
assert :io.columns == { :error, :enotsup }
end)
end
test :capture_io_with_rows do
assert capture_io(fn ->
:io.rows
end) == nil
capture_io(fn ->
assert :io.rows == { :error, :enotsup }
end)
end
test :capture_io_with_multiple_io_requests do
assert capture_io(fn ->
send_and_receive_io({ :requests, [{ :put_chars, :unicode, "a" },
{ :put_chars, :unicode, "b" }]})
end) == "ab"
capture_io(fn ->
assert send_and_receive_io({ :requests, [{ :put_chars, :unicode, "a" },
{ :put_chars, :unicode, "b" }]}) == :ok
end)
end
test :caputure_io_with_unknown_io_request do
assert capture_io(fn ->
send_and_receive_io(:unknown)
end) == nil
capture_io(fn ->
assert send_and_receive_io(:unknown) == { :error, :request }
end)
end
test :capture_io_with_inside_assert do
group_leader = :erlang.group_leader
try do
capture_io(fn ->
assert false
end)
rescue
error in [ExUnit.AssertionError] ->
"Expected false to be true" = error.message
end
# Ensure no leakage on failures
assert group_leader == :erlang.group_leader
end
defp send_and_receive_io(req) do
:erlang.group_leader <- { :io_request, self, self, req }
s = self
receive do
{ :io_reply, ^s, res} -> res
end
end
end
| 24.455197 | 127 | 0.543456 |
1ca634a904c4f351267c274d73e027a6a8d7e436 | 94 | exs | Elixir | test/kitten_grapqhl_web/views/layout_view_test.exs | floriank/react_relay_kitten_uploader | 1b8988bdf9fe01a263dc59097652ddb2c50520f6 | [
"MIT"
] | 3 | 2020-02-25T16:49:06.000Z | 2020-04-24T03:14:11.000Z | test/kitten_grapqhl_web/views/layout_view_test.exs | floriank/react_relay_kitten_uploader | 1b8988bdf9fe01a263dc59097652ddb2c50520f6 | [
"MIT"
] | 3 | 2021-03-09T03:17:50.000Z | 2021-09-01T04:37:12.000Z | test/kitten_grapqhl_web/views/layout_view_test.exs | floriank/react_relay_kitten_uploader | 1b8988bdf9fe01a263dc59097652ddb2c50520f6 | [
"MIT"
] | 2 | 2019-05-06T08:27:54.000Z | 2021-10-05T20:33:30.000Z | defmodule KittenGraphqlWeb.LayoutViewTest do
use KittenGraphqlWeb.ConnCase, async: true
end
| 23.5 | 44 | 0.851064 |
1ca63b659479f7a02e7503caa1cea47c57a98387 | 1,151 | exs | Elixir | ex_cubic_ingestion/test/ex_cubic_ingestion/workers/archive_test.exs | mbta/data_platform | 3fa66cb74134b2baa5234e908e147bf393c13926 | [
"MIT"
] | 1 | 2022-01-30T21:02:48.000Z | 2022-01-30T21:02:48.000Z | ex_cubic_ingestion/test/ex_cubic_ingestion/workers/archive_test.exs | mbta/data_platform | 3fa66cb74134b2baa5234e908e147bf393c13926 | [
"MIT"
] | 21 | 2022-01-25T16:35:50.000Z | 2022-03-31T19:42:52.000Z | ex_cubic_ingestion/test/ex_cubic_ingestion/workers/archive_test.exs | mbta/data_platform | 3fa66cb74134b2baa5234e908e147bf393c13926 | [
"MIT"
] | 1 | 2022-02-02T14:34:17.000Z | 2022-02-02T14:34:17.000Z | defmodule ExCubicIngestion.Workers.ArchiveTest do
use ExCubicIngestion.DataCase, async: true
use Oban.Testing, repo: ExCubicIngestion.Repo
import ExCubicIngestion.TestFixtures, only: [setup_tables_loads: 1]
alias ExCubicIngestion.Schema.CubicLoad
alias ExCubicIngestion.Workers.Archive
require MockExAws
setup :setup_tables_loads
describe "perform/1" do
test "run job without error", %{
dmap_load: dmap_load
} do
assert :ok ==
perform_job(Archive, %{
load_rec_id: dmap_load.id,
lib_ex_aws: "MockExAws"
})
assert "archived" == CubicLoad.get!(dmap_load.id).status
end
end
describe "construct_destination_key/1" do
test "getting destination key for generic load", %{
dmap_load: dmap_load
} do
assert dmap_load.s3_key == Archive.construct_destination_key(dmap_load)
end
test "getting destination key for ODS load", %{
ods_load: ods_load
} do
assert "cubic/ods_qlik/SAMPLE/snapshot=20220101T204950Z/LOAD1.csv" ==
Archive.construct_destination_key(ods_load)
end
end
end
| 26.767442 | 77 | 0.67854 |
1ca64b4a758a2325e793fce0499bc8e8118b4c7e | 805 | ex | Elixir | lib/brazilian_utils/cep.ex | alephao/brazilian-utils-elixir | 37574b9a8fa6f9601e3fbfe4411ef5954a96806c | [
"MIT"
] | 1 | 2021-04-27T19:16:06.000Z | 2021-04-27T19:16:06.000Z | lib/brazilian_utils/cep.ex | alephao/brazilian-utils-elixir | 37574b9a8fa6f9601e3fbfe4411ef5954a96806c | [
"MIT"
] | null | null | null | lib/brazilian_utils/cep.ex | alephao/brazilian-utils-elixir | 37574b9a8fa6f9601e3fbfe4411ef5954a96806c | [
"MIT"
] | null | null | null | defmodule BrazilianUtils.Cep do
@moduledoc false
alias BrazilianUtils.Helper
@spec is_valid?(cep :: String.t() | integer()) :: boolean()
def is_valid?(cep)
def is_valid?(cep) when is_binary(cep) do
cep
|> Helper.only_numbers()
|> is_valid_cep_length?
end
def is_valid?(cep) when is_integer(cep) do
Integer.to_string(cep)
|> is_valid?
end
@spec is_valid_cep_length?(cep :: String.t()) :: boolean()
defp is_valid_cep_length?(cep) when is_binary(cep) do
String.length(cep) == 8
end
@spec format(String.t()) :: String.t()
def format(cep) when is_binary(cep) do
{lhs, rhs} =
cep
|> Helper.only_numbers()
|> String.split_at(5)
if rhs == "" do
lhs
else
(lhs <> "-" <> rhs) |> String.slice(0..8)
end
end
end
| 20.641026 | 61 | 0.616149 |
1ca672fb5a262f3c64e2314adb72635936456ac1 | 1,610 | exs | Elixir | tests/dummy/test/controllers/appointment_controller_test.exs | foxnewsnetwork/autox | 66ea3f0f7ba8b3f9e910984a2ed3cdf0ef5ef29a | [
"MIT"
] | null | null | null | tests/dummy/test/controllers/appointment_controller_test.exs | foxnewsnetwork/autox | 66ea3f0f7ba8b3f9e910984a2ed3cdf0ef5ef29a | [
"MIT"
] | 20 | 2016-04-05T06:28:58.000Z | 2016-05-12T15:45:37.000Z | tests/dummy/test/controllers/appointment_controller_test.exs | foxnewsnetwork/autox | 66ea3f0f7ba8b3f9e910984a2ed3cdf0ef5ef29a | [
"MIT"
] | null | null | null | defmodule Dummy.AppointmentControllerTest do
use Dummy.ConnCase
import Dummy.SeedSupport
alias Dummy.Repo
setup do
conn = conn()
appointment = build_appointment
import_batch = build_import_batch(appointment)
export_batch = build_export_batch(appointment)
{:ok, conn: conn, appointment: appointment, import_batch: import_batch, export_batch: export_batch}
end
test "it should show appointment", %{conn: conn, appointment: appointment} do
path = conn |> appointment_path(:show, appointment.id)
assert path == "/api/appointments/#{appointment.id}"
assert %{"data" => data, "meta" => meta} = conn
|> get(path, %{})
|> json_response(200)
assert data["type"] == "appointments"
assert data["id"] == appointment.id
end
test "it should index appointments", %{conn: conn, appointment: %{id: id}} do
path = conn |> appointment_path(:index)
assert %{"data" => data, "meta" => meta} = conn |> get(path, %{}) |> json_response(200)
assert meta["count"] == 1
assert [%{"id" => ^id, "type" => "appointments"}] = data
end
test "it should handle polymorphic relationships correctly", %{conn: conn, appointment: %{id: id}, import_batch: batch} do
path = conn |> appointment_import_batch_relationship_path(:index, id)
assert path == "/api/appointments/#{id}/import-batches"
assert %{"data" => data, "meta" => meta} = conn
|> get(path, %{})
|> json_response(200)
assert meta["count"] == 1
assert [batch_json] = data
assert batch_json["id"] == batch.id
assert batch_json["type"] == "batches"
end
end | 35 | 124 | 0.655901 |
1ca67381828f3f9991418b263d11604046117449 | 304 | ex | Elixir | apps/packet/lib/packet/unsubscribe.ex | tchupp/SteveMQ | a1e84fd5ab9c44094e31a9dee67fe970b3cee163 | [
"BSD-3-Clause"
] | 8 | 2019-12-20T15:44:08.000Z | 2021-01-25T18:30:25.000Z | apps/packet/lib/packet/unsubscribe.ex | tchupp/SteveMQ | a1e84fd5ab9c44094e31a9dee67fe970b3cee163 | [
"BSD-3-Clause"
] | 1 | 2019-11-08T17:20:00.000Z | 2019-11-08T17:20:00.000Z | apps/packet/lib/packet/unsubscribe.ex | tchupp/SteveMQ | a1e84fd5ab9c44094e31a9dee67fe970b3cee163 | [
"BSD-3-Clause"
] | null | null | null | defmodule Packet.Unsubscribe do
use Bitwise
require Logger
@opaque decode_result :: {:unsubscribe, String.t()}
@spec decode(<<_::8>>, binary()) :: decode_result
def decode(<<10::4, 2::4>>, _msg) do
Logger.info("RECEIVED A UNSUBSCRIBE")
{:unsubscribe, "unsubscribe reasons"}
end
end
| 23.384615 | 53 | 0.671053 |
1ca675444eab7d2542edbc5efc8c8f04fa456107 | 1,008 | ex | Elixir | lib/blogger/endpoint.ex | joeletizia/blogger | 64b7b5665cdd75cbf24f5cfd938faf5135eb914a | [
"MIT"
] | null | null | null | lib/blogger/endpoint.ex | joeletizia/blogger | 64b7b5665cdd75cbf24f5cfd938faf5135eb914a | [
"MIT"
] | null | null | null | lib/blogger/endpoint.ex | joeletizia/blogger | 64b7b5665cdd75cbf24f5cfd938faf5135eb914a | [
"MIT"
] | null | null | null | defmodule Blogger.Endpoint do
use Phoenix.Endpoint, otp_app: :blogger
socket "/socket", Blogger.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/", from: :blogger, gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session,
store: :cookie,
key: "_blogger_key",
signing_salt: "WI6z44xr"
plug Blogger.Router
end
| 25.2 | 69 | 0.709325 |
1ca67e872be85dfa4b0413a3fd1ed80685bcbf91 | 295 | ex | Elixir | lib/types/author.ex | imeraj/elixir_git | 27792b6aa9f8b14c946543cd81b253977d8686f8 | [
"MIT"
] | 22 | 2021-03-07T17:00:42.000Z | 2022-03-21T07:16:11.000Z | lib/types/author.ex | imeraj/elixir_git | 27792b6aa9f8b14c946543cd81b253977d8686f8 | [
"MIT"
] | null | null | null | lib/types/author.ex | imeraj/elixir_git | 27792b6aa9f8b14c946543cd81b253977d8686f8 | [
"MIT"
] | 2 | 2021-03-10T21:16:51.000Z | 2021-05-06T10:49:13.000Z | defmodule Egit.Types.Author do
@moduledoc """
An Elixir implementation of Git version control system
"""
defstruct name: nil, email: nil, time: nil
def to_s(author) do
timestamp = "#{DateTime.to_unix(author.time)}"
"#{author.name} <#{author.email}> " <> timestamp
end
end
| 21.071429 | 56 | 0.671186 |
1ca685866cb749d19ec66b24056d6431c60b7ecb | 6,576 | ex | Elixir | lib/mllp/packet_framer.ex | angrycandy/elixir-mllp | f3b5988c0d9f2fefaca16890b13f8c08db2ce382 | [
"Apache-2.0"
] | 22 | 2018-10-08T18:37:23.000Z | 2022-02-22T13:53:38.000Z | lib/mllp/packet_framer.ex | angrycandy/elixir-mllp | f3b5988c0d9f2fefaca16890b13f8c08db2ce382 | [
"Apache-2.0"
] | 27 | 2020-03-03T16:29:22.000Z | 2022-03-16T20:09:59.000Z | lib/mllp/packet_framer.ex | angrycandy/elixir-mllp | f3b5988c0d9f2fefaca16890b13f8c08db2ce382 | [
"Apache-2.0"
] | 10 | 2019-03-29T04:19:59.000Z | 2021-12-13T17:39:08.000Z | defmodule MLLP.PacketFramer do
@callback handle_packet(packet :: String.t(), state :: MLLP.FramingContext.t()) ::
{:ok, MLLP.FramingContext.t()}
defmacro __using__(opts) do
alias MLLP.FramingContext
{opt_frame_types, _} =
opts
|> Keyword.get(:frame_types, [])
# It is said that using this function in a macro is bad,
# but I can't figure out another way to make it work.
|> Code.eval_quoted()
# ^K - VT (Vertical Tab)
file_sep = <<0x1C>>
carriage_return = <<0x0D>>
mllp_start_of_block = <<0x0B>>
mllp_end_of_block = file_sep <> carriage_return
frame_types =
opt_frame_types
|> Enum.concat([
{mllp_start_of_block, mllp_end_of_block, :mllp}
])
quote do
@behaviour MLLP.PacketFramer
require Logger
@doc false
@spec handle_packet(packet :: String.t(), state :: MLLP.FramingContext.t()) ::
{:ok, MLLP.FramingContext.t()}
unquote do
frame_types
|> Enum.map(fn {start_of_block, end_of_block, message_type} ->
quote do
def handle_packet(
unquote(start_of_block) <> rest_of_packet,
%FramingContext{current_message_type: nil} = state
) do
message_type_value = unquote(message_type)
case String.split(rest_of_packet, unquote(end_of_block), parts: 2) do
# start but no end found
[receiver_buffer] ->
{:ok,
%{
state
| receiver_buffer: receiver_buffer,
current_message_type: message_type_value
}}
# start and end found
[message, receiver_buffer] ->
message_type_atom = get_message_type(message_type_value, message)
{:ok, new_state} =
state.dispatcher_module.dispatch(message_type_atom, message, %{
state
| # save leftovers to prepend to next packet
receiver_buffer: receiver_buffer,
current_message_type: nil
})
if receiver_buffer == "" do
# done with this packet
{:ok, new_state}
else
# the leftovers might have another message to dispatch,
# so treat them like a separate packet
handle_packet(receiver_buffer, new_state)
end
end
end
def handle_packet(
unquote(carriage_return),
%FramingContext{current_message_type: unquote(message_type)} = state
) do
message_type_value = unquote(message_type)
check = byte_size(state.receiver_buffer) - 1
case state.receiver_buffer do
<<message::binary-size(check), unquote(file_sep)>> ->
message_type_atom = get_message_type(message_type_value, message)
{:ok, new_state} =
state.dispatcher_module.dispatch(
message_type_atom,
message,
%{
state
| # save leftovers to prepend to next packet
receiver_buffer: "",
current_message_type: nil
}
)
{:ok, new_state}
_ ->
{:ok,
%{
state
| receiver_buffer: state.receiver_buffer <> unquote(carriage_return),
current_message_type: message_type_value
}}
end
end
def handle_packet(
packet,
%FramingContext{current_message_type: unquote(message_type)} = state
) do
case String.split(packet, unquote(end_of_block), parts: 2) do
# no end found
[new_receiver_buffer] ->
{
:ok,
%{state | receiver_buffer: state.receiver_buffer <> new_receiver_buffer}
}
# end found
[end_of_message, new_receiver_buffer] ->
message = state.receiver_buffer <> end_of_message
message_type_value = unquote(message_type)
message_type_atom = get_message_type(message_type_value, message)
{:ok, new_state} =
state.dispatcher_module.dispatch(message_type_atom, message, %{
state
| # save leftovers to prepend to next packet
receiver_buffer: new_receiver_buffer,
current_message_type: nil
})
if new_receiver_buffer == "" do
# done with this packet
{:ok, new_state}
else
# the leftovers might have another message to dispatch,
# so treat them like a separate packet
handle_packet(new_receiver_buffer, new_state)
end
end
end
end
end)
end
# TODO: Handle start_of_blocks of other types as well
def handle_packet(
unexpected_packet,
state
) do
to_chunk = unexpected_packet <> state.receiver_buffer
case String.split(to_chunk, unquote(mllp_start_of_block), parts: 2) do
[unframed] ->
handle_unframed(unframed)
{:ok, %{state | receiver_buffer: ""}}
[unframed, next_buffer] ->
handle_unframed(unframed)
handle_packet(unquote(mllp_start_of_block) <> next_buffer, %{
state
| receiver_buffer: ""
})
end
end
def handle_unframed(unframed) do
Logger.error("The DefaultPacketFramer is discarding unexpected data: #{unframed}")
end
defoverridable handle_unframed: 1
@doc false
@spec get_message_type(message_type :: atom(), message :: String.t()) :: atom()
def get_message_type(:mllp, "MSH" <> _rest_of_message), do: :mllp_hl7
def get_message_type(:mllp, _message), do: :mllp_unknown
end
end
end
| 34.610526 | 92 | 0.509428 |
1ca6ae08061a232ff5b50b7f51166452db494fbe | 1,649 | ex | Elixir | lib/kollybistes_web.ex | miketineo/kollybistes | 7c3504f7f4a46afdf2e560d240ec6644d0d39c7c | [
"MIT"
] | null | null | null | lib/kollybistes_web.ex | miketineo/kollybistes | 7c3504f7f4a46afdf2e560d240ec6644d0d39c7c | [
"MIT"
] | null | null | null | lib/kollybistes_web.ex | miketineo/kollybistes | 7c3504f7f4a46afdf2e560d240ec6644d0d39c7c | [
"MIT"
] | null | null | null | defmodule KollybistesWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use KollybistesWeb, :controller
use KollybistesWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: KollybistesWeb
import Plug.Conn
import KollybistesWeb.Router.Helpers
import KollybistesWeb.Gettext
end
end
def view do
quote do
use Phoenix.View, root: "lib/kollybistes_web/templates",
namespace: KollybistesWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import KollybistesWeb.Router.Helpers
import KollybistesWeb.ErrorHelpers
import KollybistesWeb.Gettext
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import KollybistesWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 24.25 | 69 | 0.695573 |
1ca6d4721da2cd7dd036cc4b88c87afce4a3830f | 648 | ex | Elixir | lib/cableclub_web/channels/pokemon/gen1/socket.ex | CableClub/cable-club-core | 70c67c7a105dea83f2c1a1e2ee75a1ee97713bfb | [
"Apache-2.0"
] | null | null | null | lib/cableclub_web/channels/pokemon/gen1/socket.ex | CableClub/cable-club-core | 70c67c7a105dea83f2c1a1e2ee75a1ee97713bfb | [
"Apache-2.0"
] | null | null | null | lib/cableclub_web/channels/pokemon/gen1/socket.ex | CableClub/cable-club-core | 70c67c7a105dea83f2c1a1e2ee75a1ee97713bfb | [
"Apache-2.0"
] | null | null | null | defmodule CableClubWeb.Pokemon.Gen1.Socket do
use Phoenix.Socket
channel "v1", CableClubWeb.Pokemon.Gen1.Channel
alias CableClub.Accounts
@impl true
def connect(%{"token" => token}, socket, _connect_info) do
with {:ok, token} <- Base.url_decode64(token),
{:ok, user} <- check_token(token) do
{:ok,
socket
|> assign(:user, user)}
end
end
@impl true
def id(socket), do: "pokemon.gen1.socket.#{socket.assigns.user.id}"
def check_token(token) do
case Accounts.get_user_by_session_token(token) do
nil -> {:error, %{reason: "unauthorized"}}
user -> {:ok, user}
end
end
end
| 24 | 69 | 0.643519 |
1ca6eeae2112f1ce3e7164c627dbc3d49183217c | 3,102 | ex | Elixir | clients/gmail/lib/google_api/gmail/v1/model/message_part.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/gmail/lib/google_api/gmail/v1/model/message_part.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/gmail/lib/google_api/gmail/v1/model/message_part.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Gmail.V1.Model.MessagePart do
@moduledoc """
A single MIME message part.
## Attributes
* `body` (*type:* `GoogleApi.Gmail.V1.Model.MessagePartBody.t`, *default:* `nil`) - The message part body for this part, which may be empty for
container MIME message parts.
* `filename` (*type:* `String.t`, *default:* `nil`) - The filename of the attachment. Only present if this message part
represents an attachment.
* `headers` (*type:* `list(GoogleApi.Gmail.V1.Model.MessagePartHeader.t)`, *default:* `nil`) - List of headers on this message part. For the top-level message part,
representing the entire message payload, it will contain the standard
RFC 2822 email headers such as <code>To</code>, <code>From</code>, and
<code>Subject</code>.
* `mimeType` (*type:* `String.t`, *default:* `nil`) - The MIME type of the message part.
* `partId` (*type:* `String.t`, *default:* `nil`) - The immutable ID of the message part.
* `parts` (*type:* `list(GoogleApi.Gmail.V1.Model.MessagePart.t)`, *default:* `nil`) - The child MIME message parts of this part. This only applies to container
MIME message parts, for example <code>multipart/*</code>. For non-
container MIME message part types, such as <code>text/plain</code>, this
field is empty. For more information, see
<a href="http://www.ietf.org/rfc/rfc1521.txt">RFC 1521</a>.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:body => GoogleApi.Gmail.V1.Model.MessagePartBody.t(),
:filename => String.t(),
:headers => list(GoogleApi.Gmail.V1.Model.MessagePartHeader.t()),
:mimeType => String.t(),
:partId => String.t(),
:parts => list(GoogleApi.Gmail.V1.Model.MessagePart.t())
}
field(:body, as: GoogleApi.Gmail.V1.Model.MessagePartBody)
field(:filename)
field(:headers, as: GoogleApi.Gmail.V1.Model.MessagePartHeader, type: :list)
field(:mimeType)
field(:partId)
field(:parts, as: GoogleApi.Gmail.V1.Model.MessagePart, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Gmail.V1.Model.MessagePart do
def decode(value, options) do
GoogleApi.Gmail.V1.Model.MessagePart.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Gmail.V1.Model.MessagePart do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.690141 | 168 | 0.69697 |
1ca708abe3eafd1607bab095f60fc843dcf815e5 | 491 | ex | Elixir | lib/mappers_web/views/error_view.ex | evandiewald/mappers | 7359cfb39a4d9d26c42f5917ee04a7e41d3291bc | [
"Apache-2.0"
] | 32 | 2021-04-22T01:55:31.000Z | 2022-02-25T13:17:21.000Z | lib/mappers_web/views/error_view.ex | evandiewald/mappers | 7359cfb39a4d9d26c42f5917ee04a7e41d3291bc | [
"Apache-2.0"
] | 58 | 2021-06-04T18:42:59.000Z | 2022-03-31T07:17:01.000Z | lib/mappers_web/views/error_view.ex | evandiewald/mappers | 7359cfb39a4d9d26c42f5917ee04a7e41d3291bc | [
"Apache-2.0"
] | 13 | 2021-04-10T06:09:15.000Z | 2022-03-23T13:07:37.000Z | defmodule MappersWeb.ErrorView do
use MappersWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 28.882353 | 61 | 0.735234 |
1ca73c99a5f3a7cd7ecdff285e66c139dc1f859c | 188 | ex | Elixir | lib/onvif/device.ex | cogini/onvif | ec687d70895bf22ad7b802d7964142e3719787b6 | [
"Apache-2.0"
] | 3 | 2018-09-26T07:33:22.000Z | 2020-01-14T18:34:44.000Z | lib/onvif/device.ex | cogini/onvif | ec687d70895bf22ad7b802d7964142e3719787b6 | [
"Apache-2.0"
] | null | null | null | lib/onvif/device.ex | cogini/onvif | ec687d70895bf22ad7b802d7964142e3719787b6 | [
"Apache-2.0"
] | null | null | null | defmodule Onvif.Device do
@moduledoc "Device connection info"
defstruct [
host: "",
method: :http,
]
@type t :: %__MODULE__{
host: binary,
method: atom,
}
end
| 12.533333 | 37 | 0.595745 |
1ca754a2e38fa2ae90e7e123790863675cd3b8a3 | 2,195 | ex | Elixir | lib/shitty_linq_ex.ex | chr1sto/shitty_linq_ex | 4ba4e102b3e8820889ed945da0750f97e14c92fd | [
"Unlicense"
] | null | null | null | lib/shitty_linq_ex.ex | chr1sto/shitty_linq_ex | 4ba4e102b3e8820889ed945da0750f97e14c92fd | [
"Unlicense"
] | null | null | null | lib/shitty_linq_ex.ex | chr1sto/shitty_linq_ex | 4ba4e102b3e8820889ed945da0750f97e14c92fd | [
"Unlicense"
] | null | null | null | defmodule ShittyLinqEx do
@moduledoc """
Documentation for `ShittyLinqEx`.
"""
@doc """
Inverts the order of the elements in a sequence.
## Parameters
- `list`: A sequence of values to reverse.
## Returns
A sequence whose elements correspond to those of the input sequence in reverse order.
## Examples
iex> import ShittyLinqEx, only: [reverse: 1]
iex> reverse(["A", "B", "C"])
["C", "B", "A"]
iex> import ShittyLinqEx, only: [reverse: 1]
iex> reverse([42, "orange", ":atom"])
[":atom", "orange", 42]
"""
@spec reverse(list) :: list
def reverse(list) when is_list(list), do: reverse(list, [])
def reverse([head | tail], acc), do: reverse(tail, [head | acc])
def reverse([], acc), do: acc
@doc """
Filters a sequence of values based on a predicate.
Where `source` is an enumerable to filter.
Where `predicate` is a function to test each element for a condition.
Returns an enumerable that contains elements from the input sequence that satisfy the condition.
## Examples
iex> import ShittyLinqEx, only: [where: 2]
iex> where(
...> ["apple", "passionfruit", "banana", "mango", "orange", "blueberry", "grape", "strawberry"],
...> fn fruit -> String.length(fruit) < 6 end)
["apple", "mango", "grape"]
iex> import ShittyLinqEx, only: [where: 2]
iex> where(
...> [0, 30, 20, 15, 90, 85, 40, 75],
...> fn number, index -> number <= index * 10 end)
[0, 20, 15, 40]
"""
def where(source, predicate) when is_list(source) and is_function(predicate, 1) do
where_list(source, predicate)
end
def where(source, predicate) when is_list(source) and is_function(predicate, 2) do
where_list(source, predicate, 0)
end
defp where_list([head | tail], fun) do
case fun.(head) do
true -> [head | where_list(tail, fun)]
_ -> where_list(tail, fun)
end
end
defp where_list([], _fun) do
[]
end
defp where_list([head | tail], fun, index) do
case fun.(head, index) do
true -> [head | where_list(tail, fun, index + 1)]
_ -> where_list(tail, fun, index + 1)
end
end
defp where_list([], _fun, _index) do
[]
end
end
| 25.523256 | 101 | 0.61959 |
1ca755bff275c9ab1ed8eab437c596af836e887d | 4,426 | exs | Elixir | test/finnish_pic_test.exs | fbergr/finnish-pic | 994016557005c4fad28759031b902e1ca171c459 | [
"MIT"
] | 1 | 2020-04-24T05:35:53.000Z | 2020-04-24T05:35:53.000Z | test/finnish_pic_test.exs | fbergr/finnish-pic | 994016557005c4fad28759031b902e1ca171c459 | [
"MIT"
] | null | null | null | test/finnish_pic_test.exs | fbergr/finnish-pic | 994016557005c4fad28759031b902e1ca171c459 | [
"MIT"
] | null | null | null | defmodule FinnishPicTest do
use ExUnit.Case
test "Anna Suomalainen’s valid PIC (known test person)" do
{:ok, result} = FinnishPic.validate("131052-308T")
assert result.birth_date == ~D[1952-10-13]
assert result.gender == :female
assert result.temporary_id == false
end
test "Siiri Suomalainen’s valid PIC (known test person)" do
{:ok, result} = FinnishPic.validate("240147-632T")
assert result.birth_date == ~D[1947-01-24]
assert result.gender == :female
assert result.temporary_id == false
end
test "Matti Matkailija's valid PIC (known test person)" do
{:ok, result} = FinnishPic.validate("010150-1130")
assert result.birth_date == ~D[1950-01-01]
assert result.gender == :male
assert result.temporary_id == false
end
test "valid PICs born in the 1800s" do
{:ok, result} = FinnishPic.validate("291110+948R")
assert result.birth_date == ~D[1810-11-29]
assert result.gender == :female
assert result.temporary_id == true
{:ok, result} = FinnishPic.validate("030690+917K")
assert result.birth_date == ~D[1890-06-03]
assert result.gender == :male
assert result.temporary_id == true
end
test "valid PICs born in the 1900s" do
{:ok, result} = FinnishPic.validate("010181-900C")
assert result.birth_date == ~D[1981-01-01]
assert result.gender == :female
assert result.temporary_id == true
{:ok, result} = FinnishPic.validate("311299-9872")
assert result.birth_date == ~D[1999-12-31]
assert result.gender == :male
assert result.temporary_id == true
end
test "valid PICs born in the 2000s" do
{:ok, result} = FinnishPic.validate("270201A964C")
assert result.birth_date == ~D[2001-02-27]
assert result.gender == :female
assert result.temporary_id == true
{:ok, result} = FinnishPic.validate("111014A9458")
assert result.birth_date == ~D[2014-10-11]
assert result.gender == :male
assert result.temporary_id == true
end
test "valid PIC with February 29th" do
{:ok, result} = FinnishPic.validate("290200A935F")
assert result.birth_date == ~D[2000-02-29]
assert result.gender == :male
assert result.temporary_id == true
end
test "valid PIC with smallest possible individual number 002" do
{:ok, result} = FinnishPic.validate("010101+002S")
assert result.birth_date == ~D[1801-01-01]
assert result.gender == :female
assert result.temporary_id == false
end
test "too short" do
expect_to_fail("", :too_short)
expect_to_fail("123456-123", :too_short)
end
test "too long" do
expect_to_fail("123456-12345", :too_long)
end
test "extra spaces before or after" do
expect_to_fail(" 010203-308T", :too_long)
expect_to_fail("010203-308T ", :too_long)
end
test "invalid date format" do
expect_to_fail("ABCDEF-123T", :invalid_format)
expect_to_fail("01CDEF-123T", :invalid_format)
expect_to_fail("0102EF-123T", :invalid_format)
end
test "invalid date" do
expect_to_fail("001052-123T", :invalid_date)
expect_to_fail("010052-123T", :invalid_date)
# non-leap year
expect_to_fail("290297-123T", :invalid_date)
# leap year
expect_to_fail("300296-123T", :invalid_date)
end
test "invalid century" do
expect_to_fail("131052a123T", :invalid_format)
expect_to_fail("131052B123T", :invalid_format)
expect_to_fail("010203*123T", :invalid_format)
end
test "invalid individual number" do
expect_to_fail("010203-000T", :invalid_individual_number)
expect_to_fail("010203-001T", :invalid_individual_number)
end
test "invalid control character" do
expect_to_fail("010203-123G", :invalid_format)
expect_to_fail("010203-123Z", :invalid_format)
expect_to_fail("010203-123-", :invalid_format)
expect_to_fail("010203-123!", :invalid_format)
end
test "invalid format" do
expect_to_fail("2019-1-123A", :invalid_format)
expect_to_fail("-04-21-123A", :invalid_format)
expect_to_fail("010203-1.2A", :invalid_format)
expect_to_fail("010203-.12A", :invalid_format)
end
test "invalid type" do
assert_raise FunctionClauseError, fn ->
FinnishPic.validate(true)
end
assert_raise FunctionClauseError, fn ->
FinnishPic.validate(~c{131052-308T})
end
end
def expect_to_fail(pic, expected) do
{:error, reason} = FinnishPic.validate(pic)
assert reason == expected
end
end
| 30.315068 | 66 | 0.692725 |
1ca767b069fb1bc8d3f52975e3b4f6ea2b96b2e2 | 884 | ex | Elixir | test/support/conn_case.ex | ejpcmac/phoenix_elm | cc43a2253610728fbf2db9bd87b5341e82a27e5e | [
"BSD-3-Clause"
] | null | null | null | test/support/conn_case.ex | ejpcmac/phoenix_elm | cc43a2253610728fbf2db9bd87b5341e82a27e5e | [
"BSD-3-Clause"
] | null | null | null | test/support/conn_case.ex | ejpcmac/phoenix_elm | cc43a2253610728fbf2db9bd87b5341e82a27e5e | [
"BSD-3-Clause"
] | null | null | null | defmodule PhoenixElmWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
import PhoenixElmWeb.Router.Helpers
# The default endpoint for testing
@endpoint PhoenixElmWeb.Endpoint
end
end
setup _tags do
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 25.257143 | 58 | 0.7319 |
1ca76b788be6b79f278d4c2bb58377613946fd8c | 1,841 | exs | Elixir | test/arkecosystem/client/blocks_test.exs | ArkEcosystem/ARK-Elixir-Client | a62c0f63c3d490b2d8734384ae354444cc279a9a | [
"MIT"
] | 2 | 2018-07-13T23:05:08.000Z | 2019-02-06T10:27:08.000Z | test/arkecosystem/client/blocks_test.exs | ArkEcosystem/ARK-Elixir-Client | a62c0f63c3d490b2d8734384ae354444cc279a9a | [
"MIT"
] | 59 | 2018-06-11T07:59:59.000Z | 2019-11-17T23:30:19.000Z | test/arkecosystem/client/blocks_test.exs | ArkEcosystem/ARK-Elixir-Client | a62c0f63c3d490b2d8734384ae354444cc279a9a | [
"MIT"
] | 17 | 2018-07-02T16:10:25.000Z | 2020-11-23T23:43:55.000Z | defmodule ArkEcosystem.Client.API.BlocksTest do
use ExUnit.Case
import ArkEcosystem.Client.API.Blocks
import Tesla.Mock
@client ArkEcosystem.Client.new(%{
host: "http://127.0.0.1:4003/api",
nethash: "578e820911f24e039733b45e4882b73e301f813a0d2c31330dafda84534ffa23",
version: "1.1.1"
})
setup do
mock(fn
%{method: :get, url: "http://127.0.0.1:4003/api/blocks/dummyId"} ->
json(%{"success" => true, "data" => %{id: "dummyId"}})
%{method: :get, url: "http://127.0.0.1:4003/api/blocks"} ->
json(%{"success" => true, "data" => [%{id: "dummyId"}]})
%{method: :get, url: "http://127.0.0.1:4003/api/blocks/dummyId/transactions"} ->
json(%{"success" => true, "data" => [%{id: "dummyTransactionId"}]})
%{method: :post, url: "http://127.0.0.1:4003/api/blocks/search"} ->
json(%{"success" => true, "data" => [%{id: "dummySearch"}]})
end)
:ok
end
test "call ArkEcosystem.Client.API.Blocks.list" do
assert {:ok, response} = list(@client)
assert Enum.at(response["data"], 0)["id"] == "dummyId"
assert response["success"] == true
end
test "call ArkEcosystem.Client.API.Blocks.show" do
assert {:ok, response} = show(@client, "dummyId")
assert response["data"]["id"] == "dummyId"
assert response["success"] == true
end
test "call ArkEcosystem.Client.API.Blocks.transactions" do
assert {:ok, response} = transactions(@client, "dummyId")
assert Enum.at(response["data"], 0)["id"] == "dummyTransactionId"
assert response["success"] == true
end
test "call ArkEcosystem.Client.API.Blocks.search" do
assert {:ok, response} = search(@client, %{q: "searchQuery"})
assert Enum.at(response["data"], 0)["id"] == "dummySearch"
assert response["success"] == true
end
end
| 34.092593 | 88 | 0.611624 |
1ca791740418c3e730ad388a8c461b9e9624ec21 | 1,920 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/operations_scoped_list.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/compute/lib/google_api/compute/v1/model/operations_scoped_list.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/operations_scoped_list.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.OperationsScopedList do
@moduledoc """
## Attributes
* `operations` (*type:* `list(GoogleApi.Compute.V1.Model.Operation.t)`, *default:* `nil`) - [Output Only] A list of operations contained in this scope.
* `warning` (*type:* `GoogleApi.Compute.V1.Model.OperationsScopedListWarning.t`, *default:* `nil`) - [Output Only] Informational warning which replaces the list of operations when the list is empty.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:operations => list(GoogleApi.Compute.V1.Model.Operation.t()),
:warning => GoogleApi.Compute.V1.Model.OperationsScopedListWarning.t()
}
field(:operations, as: GoogleApi.Compute.V1.Model.Operation, type: :list)
field(:warning, as: GoogleApi.Compute.V1.Model.OperationsScopedListWarning)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.OperationsScopedList do
def decode(value, options) do
GoogleApi.Compute.V1.Model.OperationsScopedList.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.OperationsScopedList do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.4 | 202 | 0.746354 |
1ca7a7ba3605be82b45c7aca0ffd289d3084240e | 1,129 | exs | Elixir | config/config.exs | hvnsweeting/evalixator | b1b58eafcf2be7300061de6249e61f19058322db | [
"BSD-3-Clause"
] | null | null | null | config/config.exs | hvnsweeting/evalixator | b1b58eafcf2be7300061de6249e61f19058322db | [
"BSD-3-Clause"
] | null | null | null | config/config.exs | hvnsweeting/evalixator | b1b58eafcf2be7300061de6249e61f19058322db | [
"BSD-3-Clause"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :evalixatir, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:evalixatir, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.419355 | 73 | 0.752879 |
1ca7cd428f13417f7c8e35b60bea9b9ad0d949ea | 54,292 | exs | Elixir | test/ecto/query/planner_test.exs | joerichsen/ecto | 13e22d1f2a18f5698d5b0db5112f3936cab86ae8 | [
"Apache-2.0"
] | null | null | null | test/ecto/query/planner_test.exs | joerichsen/ecto | 13e22d1f2a18f5698d5b0db5112f3936cab86ae8 | [
"Apache-2.0"
] | null | null | null | test/ecto/query/planner_test.exs | joerichsen/ecto | 13e22d1f2a18f5698d5b0db5112f3936cab86ae8 | [
"Apache-2.0"
] | null | null | null | Code.require_file "../../../integration_test/support/types.exs", __DIR__
defmodule Ecto.Query.PlannerTest do
use ExUnit.Case, async: true
import Ecto.Query
alias Ecto.Query.Planner
alias Ecto.Query.JoinExpr
defmodule Comment do
use Ecto.Schema
schema "comments" do
field :text, :string
field :temp, :boolean, virtual: true
field :posted, :naive_datetime
field :uuid, :binary_id
field :crazy_comment, :string
belongs_to :post, Ecto.Query.PlannerTest.Post
belongs_to :crazy_post, Ecto.Query.PlannerTest.Post,
where: [title: "crazypost"]
belongs_to :crazy_post_with_list, Ecto.Query.PlannerTest.Post,
where: [title: {:in, ["crazypost1", "crazypost2"]}],
foreign_key: :crazy_post_id,
define_field: false
has_many :post_comments, through: [:post, :comments]
has_many :comment_posts, Ecto.Query.PlannerTest.CommentPost
end
end
defmodule CommentPost do
use Ecto.Schema
schema "comment_posts" do
belongs_to :comment, Comment
belongs_to :post, Post
belongs_to :special_comment, Comment, where: [text: nil]
belongs_to :special_long_comment, Comment, where: [text: {:fragment, "LEN(?) > 100"}]
field :deleted, :boolean
end
def inactive() do
dynamic([row], row.deleted)
end
end
defmodule Author do
use Ecto.Schema
embedded_schema do
field :name, :string
end
end
defmodule PostMeta do
use Ecto.Schema
embedded_schema do
field :slug, :string
embeds_one :author, Author
end
end
defmodule Post do
use Ecto.Schema
@primary_key {:id, CustomPermalink, []}
@schema_prefix "my_prefix"
schema "posts" do
field :title, :string, source: :post_title
field :text, :string
field :code, :binary
field :posted, :naive_datetime
field :visits, :integer
field :links, {:array, CustomPermalink}
field :prefs, {:map, :string}
field :payload, :map, load_in_query: false
field :status, Ecto.Enum, values: [:draft, :published, :deleted]
embeds_one :meta, PostMeta
embeds_many :metas, PostMeta
has_many :comments, Ecto.Query.PlannerTest.Comment
has_many :extra_comments, Ecto.Query.PlannerTest.Comment
has_many :special_comments, Ecto.Query.PlannerTest.Comment, where: [text: {:not, nil}]
many_to_many :crazy_comments, Comment, join_through: CommentPost, where: [text: "crazycomment"]
many_to_many :crazy_comments_with_list, Comment, join_through: CommentPost, where: [text: {:in, ["crazycomment1", "crazycomment2"]}], join_where: [deleted: true]
many_to_many :crazy_comments_without_schema, Comment, join_through: "comment_posts", join_where: [deleted: true]
end
end
defp plan(query, operation \\ :all) do
Planner.plan(query, operation, Ecto.TestAdapter)
end
defp normalize(query, operation \\ :all) do
normalize_with_params(query, operation) |> elem(0)
end
defp normalize_with_params(query, operation \\ :all) do
{query, params, _key} = plan(query, operation)
{query, select} =
query
|> Planner.ensure_select(operation == :all)
|> Planner.normalize(operation, Ecto.TestAdapter, 0)
{query, params, select}
end
defp select_fields(fields, ix) do
for field <- fields do
{{:., [], [{:&, [], [ix]}, field]}, [], []}
end
end
test "plan: merges all parameters" do
union = from p in Post, select: {p.title, ^"union"}
subquery = from Comment, where: [text: ^"subquery"]
query =
from p in Post,
select: {p.title, ^"select"},
join: c in subquery(subquery),
on: c.text == ^"join",
left_join: d in assoc(p, :comments),
union_all: ^union,
windows: [foo: [partition_by: fragment("?", ^"windows")]],
where: p.title == ^"where",
group_by: p.title == ^"group_by",
having: p.title == ^"having",
order_by: [asc: fragment("?", ^"order_by")],
limit: ^0,
offset: ^1
{_query, params, _key} = plan(query)
assert params ==
["select", "subquery", "join", "where", "group_by", "having", "windows"] ++
["union", "order_by", 0, 1]
end
test "plan: checks from" do
assert_raise Ecto.QueryError, ~r"query must have a from expression", fn ->
plan(%Ecto.Query{})
end
end
test "plan: casts values" do
{_query, params, _key} = plan(Post |> where([p], p.id == ^"1"))
assert params == [1]
exception = assert_raise Ecto.Query.CastError, fn ->
plan(Post |> where([p], p.title == ^1))
end
assert Exception.message(exception) =~ "value `1` in `where` cannot be cast to type :string"
assert Exception.message(exception) =~ "where: p0.title == ^1"
end
test "plan: Ecto.Query struct as right-side value of in operator" do
query = from(Post)
exception = assert_raise Ecto.QueryError, fn ->
plan(Post |> where([p], p.id in ^query))
end
assert Exception.message(exception) =~ "an Ecto.Query struct is not supported as right-side value of `in` operator"
assert Exception.message(exception) =~ "Did you mean to write `expr in subquery(query)` instead?"
end
test "plan: raises readable error on dynamic expressions/keyword lists" do
dynamic = dynamic([p], p.id == ^"1")
{_query, params, _key} = plan(Post |> where([p], ^dynamic))
assert params == [1]
assert_raise Ecto.QueryError, ~r/dynamic expressions can only be interpolated/, fn ->
plan(Post |> where([p], p.title == ^dynamic))
end
assert_raise Ecto.QueryError, ~r/keyword lists are only allowed/, fn ->
plan(Post |> where([p], p.title == ^[foo: 1]))
end
end
test "plan: casts and dumps custom types" do
permalink = "1-hello-world"
{_query, params, _key} = plan(Post |> where([p], p.id == ^permalink))
assert params == [1]
end
test "plan: casts and dumps binary ids" do
uuid = "00010203-0405-4607-8809-0a0b0c0d0e0f"
{_query, params, _key} = plan(Comment |> where([c], c.uuid == ^uuid))
assert params == [<<0, 1, 2, 3, 4, 5, 70, 7, 136, 9, 10, 11, 12, 13, 14, 15>>]
assert_raise Ecto.Query.CastError,
~r/`"00010203-0405-4607-8809"` cannot be dumped to type :binary_id/, fn ->
uuid = "00010203-0405-4607-8809"
plan(Comment |> where([c], c.uuid == ^uuid))
end
end
test "plan: casts and dumps custom types in left side of in-expressions" do
permalink = "1-hello-world"
{_query, params, _key} = plan(Post |> where([p], ^permalink in p.links))
assert params == [1]
message = ~r"value `\"1-hello-world\"` in `where` expected to be part of an array but matched type is :string"
assert_raise Ecto.Query.CastError, message, fn ->
plan(Post |> where([p], ^permalink in p.text))
end
end
test "plan: casts and dumps custom types in right side of in-expressions" do
datetime = ~N[2015-01-07 21:18:13.0]
{_query, params, _key} = plan(Comment |> where([c], c.posted in ^[datetime]))
assert params == [~N[2015-01-07 21:18:13]]
permalink = "1-hello-world"
{_query, params, _key} = plan(Post |> where([p], p.id in ^[permalink]))
assert params == [1]
datetime = ~N[2015-01-07 21:18:13.0]
{_query, params, _key} = plan(Comment |> where([c], c.posted in [^datetime]))
assert params == [~N[2015-01-07 21:18:13]]
permalink = "1-hello-world"
{_query, params, _key} = plan(Post |> where([p], p.id in [^permalink]))
assert params == [1]
{_query, params, _key} = plan(Post |> where([p], p.code in [^"abcd"]))
assert params == ["abcd"]
{_query, params, _key} = plan(Post |> where([p], p.code in ^["abcd"]))
assert params == ["abcd"]
end
test "plan: casts values on update_all" do
{_query, params, _key} = plan(Post |> update([p], set: [id: ^"1"]), :update_all)
assert params == [1]
{_query, params, _key} = plan(Post |> update([p], set: [title: ^nil]), :update_all)
assert params == [nil]
{_query, params, _key} = plan(Post |> update([p], set: [title: nil]), :update_all)
assert params == []
end
test "plan: joins" do
query = from(p in Post, join: c in "comments") |> plan |> elem(0)
assert hd(query.joins).source == {"comments", nil}
query = from(p in Post, join: c in Comment) |> plan |> elem(0)
assert hd(query.joins).source == {"comments", Comment}
query = from(p in Post, join: c in {"post_comments", Comment}) |> plan |> elem(0)
assert hd(query.joins).source == {"post_comments", Comment}
end
test "plan: joins associations" do
query = from(p in Post, join: assoc(p, :comments)) |> plan |> elem(0)
assert %JoinExpr{on: on, source: source, assoc: nil, qual: :inner} = hd(query.joins)
assert source == {"comments", Comment}
assert Macro.to_string(on.expr) == "&1.post_id() == &0.id()"
query = from(p in Post, left_join: assoc(p, :comments)) |> plan |> elem(0)
assert %JoinExpr{on: on, source: source, assoc: nil, qual: :left} = hd(query.joins)
assert source == {"comments", Comment}
assert Macro.to_string(on.expr) == "&1.post_id() == &0.id()"
query = from(p in Post, left_join: c in assoc(p, :comments), on: p.title == c.text) |> plan |> elem(0)
assert %JoinExpr{on: on, source: source, assoc: nil, qual: :left} = hd(query.joins)
assert source == {"comments", Comment}
assert Macro.to_string(on.expr) == "&1.post_id() == &0.id() and &0.title() == &1.text()"
end
test "plan: nested joins associations" do
query = from(c in Comment, left_join: assoc(c, :post_comments)) |> plan |> elem(0)
assert {{"comments", _, _}, {"comments", _, _}, {"posts", _, _}} = query.sources
assert [join1, join2] = query.joins
assert Enum.map(query.joins, & &1.ix) == [2, 1]
assert Macro.to_string(join1.on.expr) == "&2.id() == &0.post_id()"
assert Macro.to_string(join2.on.expr) == "&1.post_id() == &2.id()"
query = from(p in Comment, left_join: assoc(p, :post),
left_join: assoc(p, :post_comments)) |> plan |> elem(0)
assert {{"comments", _, _}, {"posts", _, _}, {"comments", _, _}, {"posts", _, _}} = query.sources
assert [join1, join2, join3] = query.joins
assert Enum.map(query.joins, & &1.ix) == [1, 3, 2]
assert Macro.to_string(join1.on.expr) == "&1.id() == &0.post_id()"
assert Macro.to_string(join2.on.expr) == "&3.id() == &0.post_id()"
assert Macro.to_string(join3.on.expr) == "&2.post_id() == &3.id()"
query = from(p in Comment, left_join: assoc(p, :post_comments),
left_join: assoc(p, :post)) |> plan |> elem(0)
assert {{"comments", _, _}, {"comments", _, _}, {"posts", _, _}, {"posts", _, _}} = query.sources
assert [join1, join2, join3] = query.joins
assert Enum.map(query.joins, & &1.ix) == [3, 1, 2]
assert Macro.to_string(join1.on.expr) == "&3.id() == &0.post_id()"
assert Macro.to_string(join2.on.expr) == "&1.post_id() == &3.id()"
assert Macro.to_string(join3.on.expr) == "&2.id() == &0.post_id()"
end
test "plan: joins associations with custom queries" do
query = from(p in Post, left_join: assoc(p, :special_comments)) |> plan |> elem(0)
assert {{"posts", _, _}, {"comments", _, _}} = query.sources
assert [join] = query.joins
assert join.ix == 1
assert Macro.to_string(join.on.expr) =~
~r"&1.post_id\(\) == &0.id\(\) and not[\s\(]is_nil\(&1.text\(\)\)\)?"
end
test "plan: nested joins associations with custom queries" do
query = from(p in Post,
join: c1 in assoc(p, :special_comments),
join: p2 in assoc(c1, :post),
join: cp in assoc(c1, :comment_posts),
join: c2 in assoc(cp, :special_comment),
join: c3 in assoc(cp, :special_long_comment))
|> plan
|> elem(0)
assert [join1, join2, join3, join4, join5] = query.joins
assert {{"posts", _, _}, {"comments", _, _}, {"posts", _, _},
{"comment_posts", _, _}, {"comments", _, _}, {"comments", _, _}} = query.sources
assert Macro.to_string(join1.on.expr) =~
~r"&1.post_id\(\) == &0.id\(\) and not[\s\(]is_nil\(&1.text\(\)\)\)?"
assert Macro.to_string(join2.on.expr) == "&2.id() == &1.post_id()"
assert Macro.to_string(join3.on.expr) == "&3.comment_id() == &1.id()"
assert Macro.to_string(join4.on.expr) == "&4.id() == &3.special_comment_id() and is_nil(&4.text())"
assert Macro.to_string(join5.on.expr) ==
"&5.id() == &3.special_long_comment_id() and fragment({:raw, \"LEN(\"}, {:expr, &5.text()}, {:raw, \") > 100\"})"
end
test "plan: cannot associate without schema" do
query = from(p in "posts", join: assoc(p, :comments))
message = ~r"cannot perform association join on \"posts\" because it does not have a schema"
assert_raise Ecto.QueryError, message, fn ->
plan(query)
end
end
test "plan: requires an association field" do
query = from(p in Post, join: assoc(p, :title))
assert_raise Ecto.QueryError, ~r"could not find association `title`", fn ->
plan(query)
end
end
test "plan: handles specific param type-casting" do
value = NaiveDateTime.utc_now()
{_, params, _} = from(p in Post, where: p.posted > datetime_add(^value, 1, "second")) |> plan()
assert params == [value]
value = DateTime.utc_now()
{_, params, _} = from(p in Post, where: p.posted > datetime_add(^value, 1, "second")) |> plan()
assert params == [value]
value = ~N[2010-04-17 14:00:00]
{_, params, _} =
from(p in Post, where: p.posted > datetime_add(^"2010-04-17 14:00:00", 1, "second")) |> plan()
assert params == [value]
end
test "plan: generates a cache key" do
{_query, _params, key} = plan(from(Post, []))
assert key == [:all, {"posts", Post, 71478254, "my_prefix"}]
query =
from(
p in Post,
prefix: "hello",
select: 1,
lock: "foo",
where: is_nil(nil),
or_where: is_nil(nil),
join: c in Comment,
prefix: "world",
preload: :comments
)
{_query, _params, key} = plan(%{query | prefix: "foo"})
assert key == [:all,
{:lock, "foo"},
{:prefix, "foo"},
{:where, [{:and, {:is_nil, [], [nil]}}, {:or, {:is_nil, [], [nil]}}]},
{:join, [{:inner, {"comments", Comment, 38292156, "world"}, true}]},
{"posts", Post, 71478254, "hello"},
{:select, 1}]
end
test "plan: generates a cache key for in based on the adapter" do
query = from(p in Post, where: p.id in ^[1, 2, 3])
{_query, _params, key} = Planner.plan(query, :all, Ecto.TestAdapter)
assert key == :nocache
end
test "plan: combination with uncacheable queries are uncacheable" do
query1 =
Post
|> where([p], p.id in ^[1, 2, 3])
|> select([p], p.id)
query2 =
Post
|> where([p], p.id in [1, 2])
|> select([p], p.id)
|> distinct(true)
{_, _, key} = query1 |> union_all(^query2) |> Planner.plan(:all, Ecto.TestAdapter)
assert key == :nocache
end
test "plan: normalizes prefixes" do
# No schema prefix in from
{query, _, _} = from(Comment, select: 1) |> plan()
assert query.sources == {{"comments", Comment, nil}}
{query, _, _} = from(Comment, select: 1) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}}
{query, _, _} = from(Comment, prefix: "local", select: 1) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "local"}}
# Schema prefix in from
{query, _, _} = from(Post, select: 1) |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}}
{query, _, _} = from(Post, select: 1) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}}
{query, _, _} = from(Post, prefix: "local", select: 1) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "local"}}
# Schema prefix in join
{query, _, _} = from(c in Comment, join: Post) |> plan()
assert query.sources == {{"comments", Comment, nil}, {"posts", Post, "my_prefix"}}
{query, _, _} = from(c in Comment, join: Post) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"posts", Post, "my_prefix"}}
{query, _, _} = from(c in Comment, join: Post, prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"posts", Post, "local"}}
# Schema prefix in query join
{query, _, _} = from(p in Post, join: ^from(c in Comment)) |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, nil}}
{query, _, _} = from(p in Post, join: ^from(c in Comment)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, "global"}}
{query, _, _} = from(p in Post, join: ^from(c in Comment), prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, "local"}}
# No schema prefix in assoc join
{query, _, _} = from(c in Comment, join: assoc(c, :comment_posts)) |> plan()
assert query.sources == {{"comments", Comment, nil}, {"comment_posts", CommentPost, nil}}
{query, _, _} = from(c in Comment, join: assoc(c, :comment_posts)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"comment_posts", CommentPost, "global"}}
{query, _, _} = from(c in Comment, join: assoc(c, :comment_posts), prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"comment_posts", CommentPost, "local"}}
# Schema prefix in assoc join
{query, _, _} = from(c in Comment, join: assoc(c, :post)) |> plan()
assert query.sources == {{"comments", Comment, nil}, {"posts", Post, "my_prefix"}}
{query, _, _} = from(c in Comment, join: assoc(c, :post)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"posts", Post, "my_prefix"}}
{query, _, _} = from(c in Comment, join: assoc(c, :post), prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"posts", Post, "local"}}
# Schema prefix for assoc many-to-many joins
{query, _, _} = from(c in Post, join: assoc(c, :crazy_comments)) |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, nil}, {"comment_posts", CommentPost, nil}}
{query, _, _} = from(c in Post, join: assoc(c, :crazy_comments)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, "global"}, {"comment_posts", CommentPost, "global"}}
{query, _, _} = from(c in Post, join: assoc(c, :crazy_comments), prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, "local"}, {"comment_posts", CommentPost, "local"}}
# Schema prefix for assoc many-to-many joins (when join_through is a table name)
{query, _, _} = from(c in Post, join: assoc(c, :crazy_comments_without_schema)) |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, nil}, {"comment_posts", nil, nil}}
{query, _, _} = from(c in Post, join: assoc(c, :crazy_comments_without_schema)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, "global"}, {"comment_posts", nil, "global"}}
{query, _, _} = from(c in Post, join: assoc(c, :crazy_comments_without_schema), prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, "local"}, {"comment_posts", nil, "local"}}
# Schema prefix for assoc has through
{query, _, _} = from(c in Comment, join: assoc(c, :post_comments)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"comments", Comment, "global"}, {"posts", Ecto.Query.PlannerTest.Post, "my_prefix"}}
{query, _, _} = from(c in Comment, join: assoc(c, :post_comments), prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"comments", Comment, "local"}, {"posts", Ecto.Query.PlannerTest.Post, "local"}}
end
test "plan: combination queries" do
{%{combinations: [{_, query}]}, _, cache} = from(c in Comment, union: ^from(c in Comment)) |> plan()
assert query.sources == {{"comments", Comment, nil}}
assert %Ecto.Query.SelectExpr{expr: {:&, [], [0]}} = query.select
assert [:all, {:union, _}, _] = cache
{%{combinations: [{_, query}]}, _, cache} = from(c in Comment, union: ^from(c in Comment, where: c in ^[1, 2, 3])) |> plan()
assert query.sources == {{"comments", Comment, nil}}
assert %Ecto.Query.SelectExpr{expr: {:&, [], [0]}} = query.select
assert :nocache = cache
end
test "plan: normalizes prefixes for combinations" do
# No schema prefix in from
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Comment, union: ^from(Comment)) |> plan()
assert query.sources == {{"comments", Comment, nil}}
assert union_query.sources == {{"comments", Comment, nil}}
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Comment, union: ^from(Comment)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}}
assert union_query.sources == {{"comments", Comment, "global"}}
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Comment, prefix: "local", union: ^from(Comment)) |> plan()
assert query.sources == {{"comments", Comment, "local"}}
assert union_query.sources == {{"comments", Comment, nil}}
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Comment, prefix: "local", union: ^from(Comment)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "local"}}
assert union_query.sources == {{"comments", Comment, "global"}}
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Comment, prefix: "local", union: ^(from(Comment) |> Map.put(:prefix, "union"))) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "local"}}
assert union_query.sources == {{"comments", Comment, "union"}}
# With schema prefix
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Post, union: ^from(p in Post)) |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}}
assert union_query.sources == {{"posts", Post, "my_prefix"}}
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Post, union: ^from(Post)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}}
assert union_query.sources == {{"posts", Post, "my_prefix"}}
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Post, prefix: "local", union: ^from(Post)) |> plan()
assert query.sources == {{"posts", Post, "local"}}
assert union_query.sources == {{"posts", Post, "my_prefix"}}
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Post, prefix: "local", union: ^from(Post)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "local"}}
assert union_query.sources == {{"posts", Post, "my_prefix"}}
# Deep-nested unions
assert {%{combinations: [{_, upper_level_union_query}]} = query, _, _} = from(Comment, union: ^from(Comment, union: ^from(Comment))) |> plan()
assert %{combinations: [{_, deeper_level_union_query}]} = upper_level_union_query
assert query.sources == {{"comments", Comment, nil}}
assert upper_level_union_query.sources == {{"comments", Comment, nil}}
assert deeper_level_union_query.sources == {{"comments", Comment, nil}}
assert {%{combinations: [{_, upper_level_union_query}]} = query, _, _} = from(Comment, union: ^from(Comment, union: ^from(Comment))) |> Map.put(:prefix, "global") |> plan()
assert %{combinations: [{_, deeper_level_union_query}]} = upper_level_union_query
assert query.sources == {{"comments", Comment, "global"}}
assert upper_level_union_query.sources == {{"comments", Comment, "global"}}
assert deeper_level_union_query.sources == {{"comments", Comment, "global"}}
assert {%{combinations: [{_, upper_level_union_query}]} = query, _, _} = from(Comment, prefix: "local", union: ^from(Comment, union: ^from(Comment))) |> plan()
assert %{combinations: [{_, deeper_level_union_query}]} = upper_level_union_query
assert query.sources == {{"comments", Comment, "local"}}
assert upper_level_union_query.sources == {{"comments", Comment, nil}}
assert deeper_level_union_query.sources == {{"comments", Comment, nil}}
assert {%{combinations: [{_, upper_level_union_query}]} = query, _, _} = from(Comment, prefix: "local", union: ^from(Comment, union: ^from(Comment))) |> Map.put(:prefix, "global") |> plan()
assert %{combinations: [{_, deeper_level_union_query}]} = upper_level_union_query
assert query.sources == {{"comments", Comment, "local"}}
assert upper_level_union_query.sources == {{"comments", Comment, "global"}}
assert deeper_level_union_query.sources == {{"comments", Comment, "global"}}
end
describe "plan: CTEs" do
test "with uncacheable queries are uncacheable" do
{_, _, cache} =
Comment
|> with_cte("cte", as: ^from(c in Comment, where: c.id in ^[1, 2, 3]))
|> plan()
assert cache == :nocache
end
test "on all" do
{%{with_ctes: with_expr}, _, cache} =
Comment
|> with_cte("cte", as: ^put_query_prefix(Comment, "another"))
|> plan()
%{queries: [{"cte", query}]} = with_expr
assert query.sources == {{"comments", Comment, "another"}}
assert %Ecto.Query.SelectExpr{expr: {:&, [], [0]}} = query.select
assert [
:all,
{"comments", Comment, _, nil},
{:non_recursive_cte, "cte",
[:all, {:prefix, "another"}, {"comments", Comment, _, nil}, {:select, {:&, _, [0]}}]}
] = cache
{%{with_ctes: with_expr}, _, cache} =
Comment
|> with_cte("cte", as: ^(from(c in Comment, where: c in ^[1, 2, 3])))
|> plan()
%{queries: [{"cte", query}]} = with_expr
assert query.sources == {{"comments", Comment, nil}}
assert %Ecto.Query.SelectExpr{expr: {:&, [], [0]}} = query.select
assert :nocache = cache
{%{with_ctes: with_expr}, _, cache} =
Comment
|> recursive_ctes(true)
|> with_cte("cte", as: fragment("SELECT * FROM comments WHERE id = ?", ^123))
|> plan()
%{queries: [{"cte", query_expr}]} = with_expr
expr = {:fragment, [], [raw: "SELECT * FROM comments WHERE id = ", expr: {:^, [], [0]}, raw: ""]}
assert expr == query_expr.expr
assert [:all, {"comments", Comment, _, nil}, {:recursive_cte, "cte", ^expr}] = cache
end
test "on update_all" do
recent_comments =
from(c in Comment,
order_by: [desc: c.posted],
limit: ^500,
select: [:id]
)
|> put_query_prefix("another")
{%{with_ctes: with_expr}, [500, "text"], cache} =
Comment
|> with_cte("recent_comments", as: ^recent_comments)
|> join(:inner, [c], r in "recent_comments", on: c.id == r.id)
|> update(set: [text: ^"text"])
|> select([c, r], c)
|> plan(:update_all)
%{queries: [{"recent_comments", cte}]} = with_expr
assert {{"comments", Comment, "another"}} = cte.sources
assert %{expr: {:^, [], [0]}, params: [{500, :integer}]} = cte.limit
assert [:update_all, _, _, _, _, {:non_recursive_cte, "recent_comments", cte_cache}] = cache
assert [
:all,
{:prefix, "another"},
{:take, %{0 => {:any, [:id]}}},
{:limit, {:^, [], [0]}},
{:order_by, [[desc: _]]},
{"comments", Comment, _, nil},
{:select, {:&, [], [0]}}
] = cte_cache
end
test "on delete_all" do
recent_comments =
from(c in Comment,
order_by: [desc: c.posted],
limit: ^500,
select: [:id]
)
|> put_query_prefix("another")
{%{with_ctes: with_expr}, [500, "text"], cache} =
Comment
|> with_cte("recent_comments", as: ^recent_comments)
|> join(:inner, [c], r in "recent_comments", on: c.id == r.id and c.text == ^"text")
|> select([c, r], c)
|> plan(:delete_all)
%{queries: [{"recent_comments", cte}]} = with_expr
assert {{"comments", Comment, "another"}} = cte.sources
assert %{expr: {:^, [], [0]}, params: [{500, :integer}]} = cte.limit
assert [:delete_all, _, _, _, {:non_recursive_cte, "recent_comments", cte_cache}] = cache
assert [
:all,
{:prefix, "another"},
{:take, %{0 => {:any, [:id]}}},
{:limit, {:^, [], [0]}},
{:order_by, [[desc: _]]},
{"comments", Comment, _, nil},
{:select, {:&, [], [0]}}
] = cte_cache
end
test "prefixes" do
{%{with_ctes: with_expr} = query, _, _} = Comment |> with_cte("cte", as: ^from(c in Comment)) |> plan()
%{queries: [{"cte", cte_query}]} = with_expr
assert query.sources == {{"comments", Comment, nil}}
assert cte_query.sources == {{"comments", Comment, nil}}
{%{with_ctes: with_expr} = query, _, _} = Comment |> with_cte("cte", as: ^from(c in Comment)) |> Map.put(:prefix, "global") |> plan()
%{queries: [{"cte", cte_query}]} = with_expr
assert query.sources == {{"comments", Comment, "global"}}
assert cte_query.sources == {{"comments", Comment, "global"}}
{%{with_ctes: with_expr} = query, _, _} = Comment |> with_cte("cte", as: ^(from(c in Comment) |> Map.put(:prefix, "cte"))) |> Map.put(:prefix, "global") |> plan()
%{queries: [{"cte", cte_query}]} = with_expr
assert query.sources == {{"comments", Comment, "global"}}
assert cte_query.sources == {{"comments", Comment, "cte"}}
end
end
test "normalize: validates literal types" do
assert_raise Ecto.QueryError, fn ->
Comment |> where([c], c.text == 123) |> normalize()
end
assert_raise Ecto.QueryError, fn ->
Comment |> where([c], c.text == '123') |> normalize()
end
end
test "normalize: casts atom values" do
{_query, params, _key} = normalize_with_params(Post |> where([p], p.status == :draft))
assert params == []
{_query, params, _key} = normalize_with_params(Post |> where([p], p.status == ^:published))
assert params == ["published"]
assert_raise Ecto.QueryError, ~r/value `:atoms_are_not_strings` cannot be dumped to type :string/, fn ->
normalize(Post |> where([p], p.title == :atoms_are_not_strings))
end
assert_raise Ecto.QueryError, ~r/value `:unknown_status` cannot be dumped to type \{:parameterized, Ecto.Enum/, fn ->
normalize(Post |> where([p], p.status == :unknown_status))
end
assert_raise Ecto.Query.CastError, ~r/value `:pinned` in `where` cannot be cast to type {:parameterized, Ecto.Enum/, fn ->
normalize(Post |> where([p], p.status == ^:pinned))
end
end
test "normalize: tagged types" do
{query, params, _select} = from(Post, []) |> select([p], type(^"1", :integer))
|> normalize_with_params
assert query.select.expr ==
%Ecto.Query.Tagged{type: :integer, value: {:^, [], [0]}, tag: :integer}
assert params == [1]
{query, params, _select} = from(Post, []) |> select([p], type(^"1", CustomPermalink))
|> normalize_with_params
assert query.select.expr ==
%Ecto.Query.Tagged{type: :id, value: {:^, [], [0]}, tag: CustomPermalink}
assert params == [1]
{query, params, _select} = from(Post, []) |> select([p], type(^"1", p.visits))
|> normalize_with_params
assert query.select.expr ==
%Ecto.Query.Tagged{type: :integer, value: {:^, [], [0]}, tag: :integer}
assert params == [1]
assert_raise Ecto.Query.CastError, ~r/value `"1"` in `select` cannot be cast to type Ecto.UUID/, fn ->
from(Post, []) |> select([p], type(^"1", Ecto.UUID)) |> normalize
end
end
test "normalize: late bindings with as" do
query = from(Post, as: :posts, where: as(:posts).code == ^123) |> normalize()
assert Macro.to_string(hd(query.wheres).expr) == "&0.code() == ^0"
assert_raise Ecto.QueryError, ~r/could not find named binding `as\(:posts\)`/, fn ->
from(Post, where: as(:posts).code == ^123) |> normalize()
end
end
test "normalize: late parent bindings with as" do
child = from(c in Comment, where: parent_as(:posts).posted == c.posted)
query = from(Post, as: :posts, join: c in subquery(child)) |> normalize()
assert Macro.to_string(hd(hd(query.joins).source.query.wheres).expr) == "parent_as(&0).posted() == &0.posted()"
assert_raise Ecto.SubQueryError, ~r/could not find named binding `parent_as\(:posts\)`/, fn ->
from(Post, join: c in subquery(child)) |> normalize()
end
assert_raise Ecto.QueryError, ~r/`parent_as\(:posts\)` can only be used in subqueries/, fn ->
from(Post, where: parent_as(:posts).code == ^123) |> normalize()
end
end
test "normalize: assoc join with wheres that have regular filters" do
# Mixing both has_many and many_to_many
{_query, params, _select} =
from(post in Post,
join: comment in assoc(post, :crazy_comments),
join: post in assoc(comment, :crazy_post)) |> normalize_with_params()
assert params == ["crazycomment", "crazypost"]
end
test "normalize: has_many assoc join with wheres" do
{query, params, _select} =
from(comment in Comment, join: post in assoc(comment, :crazy_post_with_list)) |> normalize_with_params()
assert inspect(query) =~ "join: p1 in Ecto.Query.PlannerTest.Post, on: p1.id == c0.crazy_post_id and p1.post_title in ^..."
assert params == ["crazypost1", "crazypost2"]
{query, params, _} =
Ecto.assoc(%Comment{crazy_post_id: 1}, :crazy_post_with_list)
|> normalize_with_params()
assert inspect(query) =~ "where: p0.id == ^... and p0.post_title in ^..."
assert params == [1, "crazypost1", "crazypost2"]
end
test "normalize: many_to_many assoc join with schema and wheres" do
{query, params, _select} =
from(post in Post, join: comment in assoc(post, :crazy_comments_with_list)) |> normalize_with_params()
assert inspect(query) =~ "join: c1 in Ecto.Query.PlannerTest.Comment, on: c2.comment_id == c1.id and c1.text in ^... and c2.deleted == ^..."
assert params == ["crazycomment1", "crazycomment2", true]
{query, params, _} =
Ecto.assoc(%Post{id: 1}, :crazy_comments_with_list)
|> normalize_with_params()
assert inspect(query) =~ "join: c2 in Ecto.Query.PlannerTest.CommentPost, on: c2.post_id == p1.id and c2.deleted == ^..."
assert inspect(query) =~ "where: c2.comment_id == c0.id and c0.text in ^..."
assert params == [1, true, "crazycomment1", "crazycomment2"]
end
test "normalize: many_to_many assoc join without schema and wheres" do
{query, params, _select} =
from(post in Post, join: comment in assoc(post, :crazy_comments_without_schema)) |> normalize_with_params()
assert inspect(query) =~ "join: c1 in Ecto.Query.PlannerTest.Comment, on: c2.comment_id == c1.id and c2.deleted == ^..."
assert params == [true]
{query, params, _} =
Ecto.assoc(%Post{id: 1}, :crazy_comments_without_schema)
|> normalize_with_params()
assert inspect(query) =~ "join: c2 in \"comment_posts\", on: c2.post_id == p1.id and c2.deleted == ^..."
assert inspect(query) =~ "where: c2.comment_id == c0.id"
assert params == [1, true]
end
test "normalize: dumps in query expressions" do
assert_raise Ecto.QueryError, ~r"cannot be dumped", fn ->
normalize(from p in Post, where: p.posted == "2014-04-17 00:00:00")
end
end
test "normalize: validate fields" do
message = ~r"field `unknown` in `select` does not exist in schema Ecto.Query.PlannerTest.Comment"
assert_raise Ecto.QueryError, message, fn ->
query = from(Comment, []) |> select([c], c.unknown)
normalize(query)
end
message = ~r"field `temp` in `select` is a virtual field in schema Ecto.Query.PlannerTest.Comment"
assert_raise Ecto.QueryError, message, fn ->
query = from(Comment, []) |> select([c], c.temp)
normalize(query)
end
end
test "normalize: validate fields in left side of in expressions" do
query = from(Post, []) |> where([p], p.id in [1, 2, 3])
normalize(query)
message = ~r"value `\[1, 2, 3\]` cannot be dumped to type \{:array, :string\}"
assert_raise Ecto.QueryError, message, fn ->
query = from(Comment, []) |> where([c], c.text in [1, 2, 3])
normalize(query)
end
end
test "normalize: validate fields in json_extract_path/2" do
query = from(Post, []) |> select([p], p.meta["slug"])
normalize(query)
query = from(Post, []) |> select([p], p.meta["author"])
normalize(query)
query = from(Post, []) |> select([p], p.meta["author"]["name"])
normalize(query)
query = from(Post, []) |> select([p], p.metas[0]["slug"])
normalize(query)
query = from(Post, []) |> select([p], p.payload["unknown_field"])
normalize(query)
query = from(Post, []) |> select([p], p.prefs["unknown_field"])
normalize(query)
query = from(p in "posts") |> select([p], p.meta["slug"])
normalize(query)
query = from(p in "posts") |> select([p], p.meta["unknown_field"])
normalize(query)
query = from(p in "posts") |> select([p], p.meta["author"]["unknown_field"])
normalize(query)
query = from(p in "posts") |> select([p], p.metas["not_index"])
normalize(query)
query = from(p in "posts") |> select([p], p.metas["not_index"]["unknown_field"])
normalize(query)
assert_raise RuntimeError, "expected field `title` to be an embed or a map, got: `:string`", fn ->
query = from(Post, []) |> select([p], p.title["foo"])
normalize(query)
end
assert_raise RuntimeError, "field `unknown_field` does not exist in Ecto.Query.PlannerTest.PostMeta", fn ->
query = from(Post, []) |> select([p], p.meta["unknown_field"])
normalize(query)
end
assert_raise RuntimeError, "field `0` does not exist in Ecto.Query.PlannerTest.PostMeta", fn ->
query = from(Post, []) |> select([p], p.meta[0])
normalize(query)
end
assert_raise RuntimeError, "field `unknown_field` does not exist in Ecto.Query.PlannerTest.Author", fn ->
query = from(Post, []) |> select([p], p.meta["author"]["unknown_field"])
normalize(query)
end
assert_raise RuntimeError, "cannot use `not_index` to refer to an item in `embeds_many`", fn ->
query = from(Post, []) |> select([p], p.metas["not_index"])
normalize(query)
end
end
test "normalize: flattens and expands right side of in expressions" do
{query, params, _select} = where(Post, [p], p.id in [1, 2, 3]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in [1, 2, 3]"
assert params == []
{query, params, _select} = where(Post, [p], p.id in [^1, 2, ^3]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in [^0, 2, ^1]"
assert params == [1, 3]
{query, params, _select} = where(Post, [p], p.id in ^[]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in ^(0, 0)"
assert params == []
{query, params, _select} = where(Post, [p], p.id in ^[1, 2, 3]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in ^(0, 3)"
assert params == [1, 2, 3]
{query, params, _select} = where(Post, [p], p.title == ^"foo" and p.id in ^[1, 2, 3] and
p.title == ^"bar") |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) ==
"&0.post_title() == ^0 and &0.id() in ^(1, 3) and &0.post_title() == ^4"
assert params == ["foo", 1, 2, 3, "bar"]
end
test "normalize: reject empty order by and group by" do
query = order_by(Post, [], []) |> normalize()
assert query.order_bys == []
query = order_by(Post, [], ^[]) |> normalize()
assert query.order_bys == []
query = group_by(Post, [], []) |> normalize()
assert query.group_bys == []
end
describe "normalize: CTEs" do
test "single-level" do
%{with_ctes: with_expr} =
Comment
|> with_cte("cte", as: ^from(c in "comments", select: %{id: c.id, text: c.text}))
|> normalize()
%{queries: [{"cte", query}]} = with_expr
assert query.sources == {{"comments", nil, nil}}
assert {:%{}, [], [id: _, text: _]} = query.select.expr
assert [id: {{:., _, [{:&, _, [0]}, :id]}, _, []},
text: {{:., [{:type, _} | _], [{:&, _, [0]}, :text]}, _, []}] = query.select.fields
%{with_ctes: with_expr} =
Comment
|> with_cte("cte", as: ^(from(c in Comment, where: c in ^[1, 2, 3])))
|> normalize()
%{queries: [{"cte", query}]} = with_expr
assert query.sources == {{"comments", Comment, nil}}
assert {:&, [], [0]} = query.select.expr
assert [{:id, {{:., _, [{:&, _, [0]}, :id]}, _, []}},
{:text, {{:., _, [{:&, _, [0]}, :text]}, _, []}},
_ | _] = query.select.fields
end
test "multi-level with select" do
sensors =
"sensors"
|> where(id: ^"id")
|> select([s], map(s, [:number]))
# There was a bug where the parameter in select would be reverted
# to ^0, this test aims to guarantee it remains ^1
agg_values =
"values"
|> with_cte("sensors_cte", as: ^sensors)
|> join(:inner, [v], s in "sensors_cte")
|> select([v, s], %{bucket: ^123 + v.number})
query =
"agg_values"
|> with_cte("agg_values", as: ^agg_values)
|> select([agg_v], agg_v.bucket)
query = normalize(query)
[{"agg_values", query}] = query.with_ctes.queries
assert Macro.to_string(query.select.fields) == "[bucket: ^1 + &0.number()]"
end
test "with field select" do
query =
"parent"
|> with_cte("cte", as: ^from(r in "cte", select: r.child))
|> select([e], [:parent])
|> normalize()
[{"cte", query}] = query.with_ctes.queries
assert Macro.to_string(query.select.fields) == "[child: &0.child()]"
end
end
test "normalize: select" do
query = from(Post, []) |> normalize()
assert query.select.expr ==
{:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links, :prefs, :status, :meta, :metas], 0)
query = from(Post, []) |> select([p], {p, p.title, "Post"}) |> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links, :prefs, :status, :meta, :metas], 0) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
query = from(Post, []) |> select([p], {p.title, p, "Post"}) |> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links, :prefs, :status, :meta, :metas], 0) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
query =
from(Post, [])
|> join(:inner, [_], c in Comment)
|> preload([_, c], comments: c)
|> select([p, _], {p.title, p})
|> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links, :prefs, :status, :meta, :metas], 0) ++
select_fields([:id, :text, :posted, :uuid, :crazy_comment, :post_id, :crazy_post_id], 1) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
end
test "normalize: select with unions" do
union_query = from(Post, []) |> select([p], %{title: p.title, category: "Post"})
query = from(Post, []) |> select([p], %{title: p.title, category: "Post"}) |> union(^union_query) |> normalize()
union_query = query.combinations |> List.first() |> elem(1)
assert "Post" in query.select.fields
assert query.select.fields == union_query.select.fields
end
test "normalize: select with unions and virtual literal" do
union_query = from(Post, []) |> select([p], %{title: p.title, temp: true})
query = from(Post, []) |> select([p], %{title: p.title, temp: false}) |> union(^union_query) |> normalize()
union_query = query.combinations |> List.first() |> elem(1)
assert false in query.select.fields
assert true in union_query.select.fields
end
test "normalize: select on schemaless" do
assert_raise Ecto.QueryError, ~r"need to explicitly pass a :select clause in query", fn ->
from("posts", []) |> normalize()
end
end
test "normalize: select with struct/2" do
assert_raise Ecto.QueryError, ~r"struct/2 in select expects a source with a schema", fn ->
"posts" |> select([p], struct(p, [:id, :title])) |> normalize()
end
query = Post |> select([p], struct(p, [:id, :title])) |> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields == select_fields([:id, :post_title], 0)
query = Post |> select([p], {struct(p, [:id, :title]), p.title}) |> normalize()
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], {p, struct(c, [:id, :text])})
|> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links, :prefs, :status, :meta, :metas], 0) ++
select_fields([:id, :text], 1)
end
test "normalize: select with struct/2 on assoc" do
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], struct(p, [:id, :title, comments: [:id, :text]]))
|> preload([p, c], comments: c)
|> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
select_fields([:id, :text], 1)
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], struct(p, [:id, :title, comments: [:id, :text, post: :id], extra_comments: :id]))
|> preload([p, c], comments: {c, post: p}, extra_comments: c)
|> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
select_fields([:id, :text], 1) ++
select_fields([:id], 0) ++
select_fields([:id], 1)
end
test "normalize: select with map/2" do
query = Post |> select([p], map(p, [:id, :title])) |> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields == select_fields([:id, :post_title], 0)
query = Post |> select([p], {map(p, [:id, :title]), p.title}) |> normalize()
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], {p, map(c, [:id, :text])})
|> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links, :prefs, :status, :meta, :metas], 0) ++
select_fields([:id, :text], 1)
end
test "normalize: select with map/2 on assoc" do
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], map(p, [:id, :title, comments: [:id, :text]]))
|> preload([p, c], comments: c)
|> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
select_fields([:id, :text], 1)
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], map(p, [:id, :title, comments: [:id, :text, post: :id], extra_comments: :id]))
|> preload([p, c], comments: {c, post: p}, extra_comments: c)
|> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
select_fields([:id, :text], 1) ++
select_fields([:id], 0) ++
select_fields([:id], 1)
end
test "normalize: windows" do
assert_raise Ecto.QueryError, ~r"unknown window :v given to over/2", fn ->
Comment
|> windows([c], w: [partition_by: c.id])
|> select([c], count(c.id) |> over(:v))
|> normalize()
end
end
test "normalize: preload errors" do
message = ~r"the binding used in `from` must be selected in `select` when using `preload`"
assert_raise Ecto.QueryError, message, fn ->
Post |> preload(:hello) |> select([p], p.title) |> normalize
end
message = ~r"invalid query has specified more bindings than"
assert_raise Ecto.QueryError, message, fn ->
Post |> preload([p, c], comments: c) |> normalize
end
end
test "normalize: preload assoc merges" do
{_, _, select} =
from(p in Post)
|> join(:inner, [p], c in assoc(p, :comments))
|> join(:inner, [_, c], cp in assoc(c, :comment_posts))
|> join(:inner, [_, c], ip in assoc(c, :post))
|> preload([_, c, cp, _], comments: {c, comment_posts: cp})
|> preload([_, c, _, ip], comments: {c, post: ip})
|> normalize_with_params()
assert select.assocs == [comments: {1, [comment_posts: {2, []}, post: {3, []}]}]
end
test "normalize: preload assoc errors" do
message = ~r"field `Ecto.Query.PlannerTest.Post.not_field` in preload is not an association"
assert_raise Ecto.QueryError, message, fn ->
query = from(p in Post, join: c in assoc(p, :comments), preload: [not_field: c])
normalize(query)
end
message = ~r"requires an inner, left or lateral join, got right join"
assert_raise Ecto.QueryError, message, fn ->
query = from(p in Post, right_join: c in assoc(p, :comments), preload: [comments: c])
normalize(query)
end
end
test "normalize: fragments do not support preloads" do
query = from p in Post, join: c in fragment("..."), preload: [comments: c]
assert_raise Ecto.QueryError, ~r/can only preload sources with a schema/, fn ->
normalize(query)
end
end
test "normalize: all does not allow updates" do
message = ~r"`all` does not allow `update` expressions"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, update: [set: [name: "foo"]]) |> normalize(:all)
end
end
test "normalize: update all only allow filters and checks updates" do
message = ~r"`update_all` requires at least one field to be updated"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, select: p, update: []) |> normalize(:update_all)
end
message = ~r"duplicate field `title` for `update_all`"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, select: p, update: [set: [title: "foo", title: "bar"]])
|> normalize(:update_all)
end
message = ~r"`update_all` allows only `with_cte`, `where` and `join` expressions"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, order_by: p.title, update: [set: [title: "foo"]]) |> normalize(:update_all)
end
end
test "normalize: delete all only allow filters and forbids updates" do
message = ~r"`delete_all` does not allow `update` expressions"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, update: [set: [name: "foo"]]) |> normalize(:delete_all)
end
message = ~r"`delete_all` allows only `with_cte`, `where` and `join` expressions"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, order_by: p.title) |> normalize(:delete_all)
end
end
describe "normalize: subqueries in boolean expressions" do
test "replaces {:subquery, index} with an Ecto.SubQuery struct" do
subquery = from(p in Post, select: p.visits)
%{wheres: [where]} =
from(p in Post, where: p.visits in subquery(subquery))
|> normalize()
assert {:in, _, [_, %Ecto.SubQuery{}] } = where.expr
%{wheres: [where]} =
from(p in Post, where: p.visits >= all(subquery))
|> normalize()
assert {:>=, _, [_, {:all, _, [%Ecto.SubQuery{}] }]} = where.expr
%{wheres: [where]} =
from(p in Post, where: exists(subquery))
|> normalize()
assert {:exists, _, [%Ecto.SubQuery{}]} = where.expr
end
test "raises a runtime error if more than 1 field is selected" do
s = from(p in Post, select: [p.visits, p.id])
assert_raise Ecto.QueryError, fn ->
from(p in Post, where: p.id in subquery(s))
|> normalize()
end
assert_raise Ecto.QueryError, fn ->
from(p in Post, where: p.id > any(s))
|> normalize()
end
assert_raise Ecto.QueryError, fn ->
from(p in Post, where: p.id > all(s))
|> normalize()
end
end
end
end
| 41.068079 | 193 | 0.589461 |
1ca7dc2531a508afbc12896a647eae66a22f5444 | 662 | ex | Elixir | web/router.ex | Andorbal/phoenix_react | 0291d612ca710f80bd5ec87f052649f32c9e5847 | [
"MIT"
] | null | null | null | web/router.ex | Andorbal/phoenix_react | 0291d612ca710f80bd5ec87f052649f32c9e5847 | [
"MIT"
] | null | null | null | web/router.ex | Andorbal/phoenix_react | 0291d612ca710f80bd5ec87f052649f32c9e5847 | [
"MIT"
] | null | null | null | defmodule PhoenixReact.Router do
use PhoenixReact.Web, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", PhoenixReact do
pipe_through :browser # Use the default browser stack
get "/", PageController, :index
get "/app/*path", PageController, :app
end
# Other scopes may use custom stacks.
scope "/api", PhoenixReact do
pipe_through :api
get "/repos", RepoController, :index
post "/repos/create", RepoController, :create
end
end
| 21.354839 | 57 | 0.685801 |
1ca7e59565192e970950a7f3a8f84cfe4c2c8c8b | 2,417 | ex | Elixir | clients/redis/lib/google_api/redis/v1/model/location.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/redis/lib/google_api/redis/v1/model/location.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/redis/lib/google_api/redis/v1/model/location.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Redis.V1.Model.Location do
@moduledoc """
A resource that represents Google Cloud Platform location.
## Attributes
* `displayName` (*type:* `String.t`, *default:* `nil`) - The friendly name for this location, typically a nearby city name. For example, "Tokyo".
* `labels` (*type:* `map()`, *default:* `nil`) - Cross-service attributes for the location. For example {"cloud.googleapis.com/region": "us-east1"}
* `locationId` (*type:* `String.t`, *default:* `nil`) - Resource ID for the region. For example: "us-east1".
* `metadata` (*type:* `map()`, *default:* `nil`) - Output only. The set of available zones in the location. The map is keyed by the lowercase ID of each zone, as defined by Compute Engine. These keys can be specified in `location_id` or `alternative_location_id` fields when creating a Redis instance.
* `name` (*type:* `String.t`, *default:* `nil`) - Full resource name for the region. For example: "projects/example-project/locations/us-east1".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:displayName => String.t() | nil,
:labels => map() | nil,
:locationId => String.t() | nil,
:metadata => map() | nil,
:name => String.t() | nil
}
field(:displayName)
field(:labels, type: :map)
field(:locationId)
field(:metadata, type: :map)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.Redis.V1.Model.Location do
def decode(value, options) do
GoogleApi.Redis.V1.Model.Location.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Redis.V1.Model.Location do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.966102 | 305 | 0.69549 |
1ca82fa6a351eacd5ad28d37d2bd4e0ae65c56fc | 21,305 | ex | Elixir | lib/gettext/compiler.ex | jshmrtn/gettext | eb3411ffb09368977f18bd20ff50867220fbdee0 | [
"Apache-2.0"
] | null | null | null | lib/gettext/compiler.ex | jshmrtn/gettext | eb3411ffb09368977f18bd20ff50867220fbdee0 | [
"Apache-2.0"
] | null | null | null | lib/gettext/compiler.ex | jshmrtn/gettext | eb3411ffb09368977f18bd20ff50867220fbdee0 | [
"Apache-2.0"
] | null | null | null | defmodule Gettext.Compiler do
@moduledoc false
alias Gettext.{
PO,
PO.Translation,
PO.PluralTranslation
}
require Logger
@default_priv "priv/gettext"
@default_domain "default"
@po_wildcard "*/LC_MESSAGES/*.po"
@doc false
defmacro __before_compile__(env) do
opts = Module.get_attribute(env.module, :gettext_opts)
otp_app = Keyword.fetch!(opts, :otp_app)
priv = Keyword.get(opts, :priv, @default_priv)
translations_dir = Application.app_dir(otp_app, priv)
external_file = String.replace(Path.join(".compile", priv), "/", "_")
known_po_files = known_po_files(translations_dir, opts)
known_locales = Enum.map(known_po_files, & &1[:locale]) |> Enum.uniq()
default_locale =
opts[:default_locale] || quote(do: Application.fetch_env!(:gettext, :default_locale))
default_domain = opts[:default_domain] || @default_domain
interpolation = opts[:interpolation] || Gettext.Interpolation.Default
quote do
@behaviour Gettext.Backend
# Info about the Gettext backend.
@doc false
def __gettext__(:priv), do: unquote(priv)
def __gettext__(:otp_app), do: unquote(otp_app)
def __gettext__(:known_locales), do: unquote(known_locales)
def __gettext__(:default_locale), do: unquote(default_locale)
def __gettext__(:default_domain), do: unquote(default_domain)
def __gettext__(:interpolation), do: unquote(interpolation)
# The manifest lives in the root of the priv
# directory that contains .po/.pot files.
@external_resource unquote(Application.app_dir(otp_app, external_file))
if Gettext.Extractor.extracting?() do
Gettext.ExtractorAgent.add_backend(__MODULE__)
end
unquote(macros())
# These are the two functions we generated inside the backend.
def lgettext(locale, domain, msgctxt \\ nil, msgid, bindings)
def lngettext(locale, domain, msgctxt \\ nil, msgid, msgid_plural, n, bindings)
unquote(compile_po_files(env, known_po_files, opts))
# Catch-all clauses.
def lgettext(locale, domain, msgctxt, msgid, bindings),
do: handle_missing_translation(locale, domain, msgid, bindings)
def lngettext(locale, domain, msgctxt, msgid, msgid_plural, n, bindings),
do: handle_missing_plural_translation(locale, domain, msgid, msgid_plural, n, bindings)
end
end
defp macros() do
quote unquote: false do
defmacro dpgettext_noop(domain, msgctxt, msgid) do
domain = Gettext.Compiler.expand_to_binary(domain, "domain", __MODULE__, __CALLER__)
msgid = Gettext.Compiler.expand_to_binary(msgid, "msgid", __MODULE__, __CALLER__)
msgctxt = Gettext.Compiler.expand_to_binary(msgctxt, "msgctxt", __MODULE__, __CALLER__)
if Gettext.Extractor.extracting?() do
Gettext.Extractor.extract(
__CALLER__,
__MODULE__,
domain,
msgctxt,
msgid,
Gettext.Compiler.get_and_flush_extracted_comments()
)
end
msgid
end
defmacro dgettext_noop(domain, msgid) do
quote do
unquote(__MODULE__).dpgettext_noop(unquote(domain), nil, unquote(msgid))
end
end
defmacro gettext_noop(msgid) do
domain = __gettext__(:default_domain)
quote do
unquote(__MODULE__).dpgettext_noop(unquote(domain), nil, unquote(msgid))
end
end
defmacro pgettext_noop(msgid, context) do
domain = __gettext__(:default_domain)
quote do
unquote(__MODULE__).dpgettext_noop(unquote(domain), unquote(context), unquote(msgid))
end
end
defmacro dpngettext_noop(domain, msgctxt, msgid, msgid_plural) do
domain = Gettext.Compiler.expand_to_binary(domain, "domain", __MODULE__, __CALLER__)
msgid = Gettext.Compiler.expand_to_binary(msgid, "msgid", __MODULE__, __CALLER__)
msgctxt = Gettext.Compiler.expand_to_binary(msgctxt, "msgctxt", __MODULE__, __CALLER__)
msgid_plural =
Gettext.Compiler.expand_to_binary(msgid_plural, "msgid_plural", __MODULE__, __CALLER__)
if Gettext.Extractor.extracting?() do
Gettext.Extractor.extract(
__CALLER__,
__MODULE__,
domain,
msgctxt,
{msgid, msgid_plural},
Gettext.Compiler.get_and_flush_extracted_comments()
)
end
{msgid, msgid_plural}
end
defmacro dngettext_noop(domain, msgid, msgid_plural) do
quote do
unquote(__MODULE__).dpngettext_noop(
unquote(domain),
nil,
unquote(msgid),
unquote(msgid_plural)
)
end
end
defmacro pngettext_noop(msgctxt, msgid, msgid_plural) do
domain = __gettext__(:default_domain)
quote do
unquote(__MODULE__).dpngettext_noop(
unquote(domain),
unquote(msgctxt),
unquote(msgid),
unquote(msgid_plural)
)
end
end
defmacro ngettext_noop(msgid, msgid_plural) do
domain = __gettext__(:default_domain)
quote do
unquote(__MODULE__).dpngettext_noop(
unquote(domain),
nil,
unquote(msgid),
unquote(msgid_plural)
)
end
end
defmacro dpgettext(domain, msgctxt, msgid, bindings \\ Macro.escape(%{})) do
quote do
msgid =
unquote(__MODULE__).dpgettext_noop(unquote(domain), unquote(msgctxt), unquote(msgid))
Gettext.dpgettext(
unquote(__MODULE__),
unquote(domain),
unquote(msgctxt),
msgid,
unquote(bindings)
)
end
end
defmacro dgettext(domain, msgid, bindings \\ Macro.escape(%{})) do
quote do
unquote(__MODULE__).dpgettext(unquote(domain), nil, unquote(msgid), unquote(bindings))
end
end
defmacro pgettext(msgctxt, msgid, bindings \\ Macro.escape(%{})) do
domain = __gettext__(:default_domain)
quote do
unquote(__MODULE__).dpgettext(
unquote(domain),
unquote(msgctxt),
unquote(msgid),
unquote(bindings)
)
end
end
defmacro gettext(msgid, bindings \\ Macro.escape(%{})) do
domain = __gettext__(:default_domain)
quote do
unquote(__MODULE__).dpgettext(unquote(domain), nil, unquote(msgid), unquote(bindings))
end
end
defmacro dpngettext(domain, msgctxt, msgid, msgid_plural, n, bindings \\ Macro.escape(%{})) do
quote do
{msgid, msgid_plural} =
unquote(__MODULE__).dpngettext_noop(
unquote(domain),
unquote(msgctxt),
unquote(msgid),
unquote(msgid_plural)
)
Gettext.dpngettext(
unquote(__MODULE__),
unquote(domain),
unquote(msgctxt),
msgid,
msgid_plural,
unquote(n),
unquote(bindings)
)
end
end
defmacro dngettext(domain, msgid, msgid_plural, n, bindings \\ Macro.escape(%{})) do
quote do
unquote(__MODULE__).dpngettext(
unquote(domain),
nil,
unquote(msgid),
unquote(msgid_plural),
unquote(n),
unquote(bindings)
)
end
end
defmacro ngettext(msgid, msgid_plural, n, bindings \\ Macro.escape(%{})) do
domain = __gettext__(:default_domain)
quote do
unquote(__MODULE__).dpngettext(
unquote(domain),
nil,
unquote(msgid),
unquote(msgid_plural),
unquote(n),
unquote(bindings)
)
end
end
defmacro pngettext(msgctxt, msgid, msgid_plural, n, bindings \\ Macro.escape(%{})) do
domain = __gettext__(:default_domain)
quote do
unquote(__MODULE__).dpngettext(
unquote(domain),
unquote(msgctxt),
unquote(msgid),
unquote(msgid_plural),
unquote(n),
unquote(bindings)
)
end
end
defmacro gettext_comment(comment) do
comment = Gettext.Compiler.expand_to_binary(comment, "comment", __MODULE__, __CALLER__)
Gettext.Compiler.append_extracted_comment(comment)
:ok
end
end
end
@doc """
Expands the given `msgid` in the given `env`, raising if it doesn't expand to
a binary.
"""
@spec expand_to_binary(binary, binary, module, Macro.Env.t()) :: binary | no_return
def expand_to_binary(term, what, gettext_module, env)
when what in ~w(domain msgctxt msgid msgid_plural comment) do
raiser = fn term ->
raise ArgumentError, """
Gettext macros expect translation keys (msgid and msgid_plural),
domains, and comments to expand to strings at compile-time, but the given #{what}
doesn't. This is what the macro received:
#{inspect(term)}
Dynamic translations should be avoided as they limit Gettext's
ability to extract translations from your source code. If you are
sure you need dynamic lookup, you can use the functions in the Gettext
module:
string = "hello world"
Gettext.gettext(#{inspect(gettext_module)}, string)
"""
end
# We support nil too in order to fall back to a nil context and always use the *p
# variants of the Gettext macros.
case Macro.expand(term, env) do
term when is_binary(term) or is_nil(term) ->
term
{:<<>>, _, pieces} = term ->
if Enum.all?(pieces, &is_binary/1), do: Enum.join(pieces), else: raiser.(term)
other ->
raiser.(other)
end
end
@doc """
Appends the given comment to the list of extracted comments in the process dictionary.
"""
@spec append_extracted_comment(binary) :: :ok
def append_extracted_comment(comment) do
existing = Process.get(:gettext_comments, [])
Process.put(:gettext_comments, ["#. " <> comment | existing])
:ok
end
@doc """
Returns all extracted comments in the process dictionary and clears them from the process
dictionary.
"""
@spec get_and_flush_extracted_comments() :: [binary]
def get_and_flush_extracted_comments() do
Enum.reverse(Process.delete(:gettext_comments) || [])
end
@doc """
Logs a warning via `Logger.error/1` if `domain` contains slashes.
This function is called by `lgettext` and `lngettext`. It could make sense to
make this function raise an error since slashes in domains are not supported,
but we decided not to do so and to only emit a warning since the expected
behaviour for Gettext functions/macros when the domain or translation is not
known is to return the original string (msgid) and raising here would break
that contract.
"""
@spec warn_if_domain_contains_slashes(binary) :: :ok
def warn_if_domain_contains_slashes(domain) do
if String.contains?(domain, "/") do
_ = Logger.error(fn -> ["Slashes in domains are not supported: ", inspect(domain)] end)
end
:ok
end
# Compiles all the `.po` files in the given directory (`dir`) into `lgettext/4`
# and `lngettext/6` function clauses.
defp compile_po_files(env, known_po_files, opts) do
plural_mod =
Keyword.get(opts, :plural_forms) ||
Application.get_env(:gettext, :plural_forms, Gettext.Plural)
opts =
if opts[:one_module_per_locale] do
IO.warn(
":one_module_per_locale is deprecatead, please use split_module_by: [:locale] instead"
)
Keyword.put_new(opts, :split_module_by, [:locale])
else
opts
end
case List.wrap(opts[:split_module_by]) do
[] ->
Enum.map(
known_po_files,
&compile_unified_po_file(env, &1, plural_mod, opts[:interpolation])
)
split ->
grouped = Enum.group_by(known_po_files, &split_module_name(env, &1, split))
case Keyword.get(opts, :split_module_compilation, :parallel) do
:serial ->
Enum.map(grouped, fn {module, files} ->
compile_split_po_files(env, module, files, plural_mod, opts[:interpolation])
end)
:parallel ->
grouped
|> Enum.map(fn {module, files} ->
Kernel.ParallelCompiler.async(fn ->
compile_split_po_files(env, module, files, plural_mod, opts[:interpolation])
end)
end)
|> Enum.map(fn task ->
Task.await(task, :infinity)
end)
end
end
end
defp split_module_name(env, po_file, split) do
String.to_atom(
"#{env.module}.T" <>
if(:locale in split, do: "_" <> po_file.locale, else: "") <>
if(:domain in split, do: "_" <> po_file.domain, else: "")
)
end
defp compile_unified_po_file(env, po_file, plural_mod, interpolation_module) do
{locale, domain, singular_fun, plural_fun, quoted} =
compile_po_file(:defp, po_file, env, plural_mod, interpolation_module)
quote do
unquote(quoted)
def lgettext(unquote(locale), unquote(domain), msgctxt, msgid, bindings) do
unquote(singular_fun)(msgctxt, msgid, bindings)
end
def lngettext(unquote(locale), unquote(domain), msgctxt, msgid, msgid_plural, n, bindings) do
unquote(plural_fun)(msgctxt, msgid, msgid_plural, n, bindings)
end
end
end
defp compile_split_po_files(env, module, files, plural_mod, interpolation_module) do
{current, split} =
Enum.reduce(
files,
{[], []},
&compile_split_po_file(env, module, plural_mod, &1, interpolation_module, &2)
)
create_split_module(env, module, split)
current
end
defp compile_split_po_file(env, module, plural_mod, po_file, interpolation_module, {acc1, acc2}) do
{locale, domain, singular_fun, plural_fun, split_module_quoted} =
compile_po_file(:def, po_file, env, plural_mod, interpolation_module)
current_module_quoted =
quote do
def lgettext(unquote(locale), unquote(domain), msgctxt, msgid, bindings) do
unquote(module).unquote(singular_fun)(msgctxt, msgid, bindings)
end
def lngettext(unquote(locale), unquote(domain), msgctxt, msgid, msgid_plural, n, bindings) do
unquote(module).unquote(plural_fun)(msgctxt, msgid, msgid_plural, n, bindings)
end
end
{[current_module_quoted | acc1], [split_module_quoted | acc2]}
end
defp create_split_module(env, module, translations) do
exprs = [quote(do: @moduledoc(false)) | translations]
Module.create(module, block(exprs), env)
:ok
end
# Compiles a .po file into a list of lgettext/5 (for translations) and
# lngettext/7 (for plural translations) clauses.
defp compile_po_file(kind, po_file, env, plural_mod, interpolation_module) do
%{locale: locale, domain: domain, path: path} = po_file
%PO{translations: translations, file: file} = PO.parse_file!(path)
singular_fun = :"#{locale}_#{domain}_lgettext"
plural_fun = :"#{locale}_#{domain}_lngettext"
mapper =
&compile_translation(
kind,
locale,
&1,
singular_fun,
plural_fun,
file,
plural_mod,
interpolation_module
)
translations = block(Enum.map(translations, mapper))
quoted =
quote do
unquote(translations)
Kernel.unquote(kind)(unquote(singular_fun)(msgctxt, msgid, bindings)) do
unquote(env.module).handle_missing_translation(
unquote(locale),
unquote(domain),
msgid,
bindings
)
end
Kernel.unquote(kind)(unquote(plural_fun)(msgctxt, msgid, msgid_plural, n, bindings)) do
unquote(env.module).handle_missing_plural_translation(
unquote(locale),
unquote(domain),
msgid,
msgid_plural,
n,
bindings
)
end
end
{locale, domain, singular_fun, plural_fun, quoted}
end
defp locale_and_domain_from_path(path) do
[file, "LC_MESSAGES", locale | _rest] = path |> Path.split() |> Enum.reverse()
domain = Path.rootname(file, ".po")
{locale, domain}
end
defp compile_translation(
kind,
_locale,
%Translation{} = t,
singular_fun,
_plural_fun,
_file,
_plural_mod,
interpolation_module
) do
msgid = IO.iodata_to_binary(t.msgid)
msgstr = IO.iodata_to_binary(t.msgstr)
msgctxt = t.msgctxt && IO.iodata_to_binary(t.msgctxt)
case msgstr do
# Only actually generate this function clause if the msgstr is not empty.
# If it is empty, it will trigger the missing translation case.
"" ->
nil
_ ->
quote do
Kernel.unquote(kind)(
unquote(singular_fun)(unquote(msgctxt), unquote(msgid), bindings)
) do
require unquote(interpolation_module)
unquote(interpolation_module).compile_interpolate(
:translation,
unquote(msgstr),
bindings
)
end
end
end
end
defp compile_translation(
kind,
locale,
%PluralTranslation{} = t,
_singular_fun,
plural_fun,
file,
plural_mod,
interpolation_module
) do
warn_if_missing_plural_forms(locale, plural_mod, t, file)
msgid = IO.iodata_to_binary(t.msgid)
msgid_plural = IO.iodata_to_binary(t.msgid_plural)
msgstr = Enum.map(t.msgstr, fn {form, str} -> {form, IO.iodata_to_binary(str)} end)
msgctxt = t.msgctxt && IO.iodata_to_binary(t.msgctxt)
# If any of the msgstrs is empty, then we skip the generation of this
# function clause. The reason we do this is the same as for the
# `%Translation{}` clause.
unless Enum.any?(msgstr, &match?({_form, ""}, &1)) do
# We use flat_map here because clauses can only be defined in blocks,
# so when quoted they are a list.
clauses =
Enum.flat_map(msgstr, fn {form, str} ->
quote do
unquote(form) ->
require unquote(interpolation_module)
unquote(interpolation_module).compile_interpolate(
:plural_translation,
unquote(str),
var!(bindings)
)
end
end)
error_clause =
quote do
form ->
raise Gettext.PluralFormError,
form: form,
locale: unquote(locale),
file: unquote(file),
line: unquote(t.po_source_line)
end
quote generated: true do
Kernel.unquote(kind)(
unquote(plural_fun)(
unquote(msgctxt),
unquote(msgid),
unquote(msgid_plural),
n,
bindings
)
) do
plural_form = unquote(plural_mod).plural(unquote(locale), n)
var!(bindings) = Map.put(bindings, :count, n)
case plural_form, do: unquote(clauses ++ error_clause)
end
end
end
end
defp warn_if_missing_plural_forms(locale, plural_mod, translation, file) do
Enum.each(0..(plural_mod.nplurals(locale) - 1), fn form ->
unless Map.has_key?(translation.msgstr, form) do
_ =
Logger.error([
"#{file}:#{translation.po_source_line}: translation is missing plural form ",
Integer.to_string(form),
" which is required by the locale ",
inspect(locale)
])
end
end)
end
defp block(contents) when is_list(contents) do
{:__block__, [], contents}
end
# Returns all the PO files in `translations_dir` (under "canonical" paths,
# that is, `locale/LC_MESSAGES/domain.po`).
defp po_files_in_dir(dir) do
dir
|> Path.join(@po_wildcard)
|> Path.wildcard()
end
# Returns the known the PO files in `translations_dir` with their locale and domain
# If allowed_locales is configured, it removes all the PO files that do not belong
# to those locales
defp known_po_files(translations_dir, opts) do
case File.ls(translations_dir) do
{:ok, _} ->
translations_dir
|> po_files_in_dir()
|> Enum.map(fn path ->
{locale, domain} = locale_and_domain_from_path(path)
%{locale: locale, path: path, domain: domain}
end)
|> maybe_restrict_locales(opts[:allowed_locales])
{:error, :enoent} ->
[]
{:error, reason} ->
raise File.Error, reason: reason, action: "list directory", path: translations_dir
end
end
defp maybe_restrict_locales(po_files, nil) do
po_files
end
defp maybe_restrict_locales(po_files, allowed_locales) when is_list(allowed_locales) do
allowed_locales = MapSet.new(Enum.map(allowed_locales, &to_string/1))
Enum.filter(po_files, &MapSet.member?(allowed_locales, &1[:locale]))
end
end
| 30.876812 | 101 | 0.618165 |
1ca86e68495a565d286fd2b8927459cf64f3cfad | 4,569 | exs | Elixir | test/glimesh/chat_parser_test.exs | mja00/glimesh.tv | 1f543bfdf68de269d0984041ed8394b78fb2158d | [
"MIT"
] | null | null | null | test/glimesh/chat_parser_test.exs | mja00/glimesh.tv | 1f543bfdf68de269d0984041ed8394b78fb2158d | [
"MIT"
] | null | null | null | test/glimesh/chat_parser_test.exs | mja00/glimesh.tv | 1f543bfdf68de269d0984041ed8394b78fb2158d | [
"MIT"
] | null | null | null | defmodule Glimesh.ChatParserTest do
use Glimesh.DataCase
import Glimesh.AccountsFixtures
alias Glimesh.Chat.ChatMessage
describe "chat parser" do
test "parses a simple chat message" do
parsed = Glimesh.Chat.Parser.parse("Hello world")
assert parsed == ["Hello", "world"]
end
test "parses all the things" do
parsed = Glimesh.Chat.Parser.parse(":glimwow: hello https://example.com/")
assert Glimesh.Chat.Parser.to_raw_html(parsed) ==
"<img alt=\":glimwow:\" draggable=\"false\" height=\"32px\" src=\"/emotes/svg/glimwow.svg\" width=\"32px\"> hello <a href=\"https://example.com/\" rel=\"ugc\" target=\"_blank\">https://example.com/</a>"
end
test "parses an average glimoji chat message" do
parsed = Glimesh.Chat.Parser.parse("Hello :glimwow:")
assert parsed == [
"Hello",
{:safe,
[
60,
"img",
[
[32, "alt", 61, 34, ":glimwow:", 34],
[32, "draggable", 61, 34, "false", 34],
[32, "height", 61, 34, "32px", 34],
[32, "src", 61, 34, "/emotes/svg/glimwow.svg", 34],
[32, "width", 61, 34, "32px", 34]
],
62
]}
]
assert Glimesh.Chat.Parser.to_raw_html(parsed) ==
"Hello <img alt=\":glimwow:\" draggable=\"false\" height=\"32px\" src=\"/emotes/svg/glimwow.svg\" width=\"32px\">"
end
test "parses an an animated emoji when the config allows" do
parsed =
Glimesh.Chat.Parser.parse("Hello :glimfury:", %Glimesh.Chat.Parser.Config{
allow_animated_glimjois: true
})
assert Glimesh.Chat.Parser.to_raw_html(parsed) ==
"Hello <img alt=\":glimfury:\" draggable=\"false\" height=\"32px\" src=\"/emotes/gif/glimfury.gif\" width=\"32px\">"
end
test "DOES NOT parse a glimoji chat message with no spaces" do
parsed = Glimesh.Chat.Parser.parse("Hello:glimwow:world")
refute Glimesh.Chat.Parser.to_raw_html(parsed) ==
"Hello<img alt=\":glimwow:\" draggable=\"false\" height=\"32px\" src=\"/emotes/svg/glimwow.svg\" width=\"32px\">world"
end
test "parses a large glimoji" do
parsed = Glimesh.Chat.Parser.parse(":glimwow:")
assert parsed == [
{:safe,
[
60,
"img",
[
[32, "alt", 61, 34, ":glimwow:", 34],
[32, "draggable", 61, 34, "false", 34],
[32, "height", 61, 34, "128px", 34],
[32, "src", 61, 34, "/emotes/svg/glimwow.svg", 34],
[32, "width", 61, 34, "128px", 34]
],
62
]}
]
end
test "parses a link" do
parsed = Glimesh.Chat.Parser.parse("https://example.com/")
assert Glimesh.Chat.Parser.to_raw_html(parsed) ==
"<a href=\"https://example.com/\" rel=\"ugc\" target=\"_blank\">https://example.com/</a>"
end
test "ignores a link when config disabled" do
parsed =
Glimesh.Chat.Parser.parse("https://example.com/", %Glimesh.Chat.Parser.Config{
allow_links: false
})
assert Glimesh.Chat.Parser.to_raw_html(parsed) == "https://example.com/"
end
test "ignores a glimoji when config disabled" do
parsed =
Glimesh.Chat.Parser.parse(":glimwow:", %Glimesh.Chat.Parser.Config{
allow_glimojis: false
})
assert Glimesh.Chat.Parser.to_raw_html(parsed) == ":glimwow:"
end
setup do
%{
user: user_fixture()
}
end
test "html cannot be injected", %{user: user} do
message = %ChatMessage{
message: "<h2>Hello world</h2>",
user: user
}
parsed = Glimesh.Chat.Parser.parse_and_render(message)
assert Phoenix.HTML.safe_to_string(parsed) == "<h2>Hello world</h2>"
end
test "html cannot be injected with a functional parser", %{user: user} do
message = %ChatMessage{
message: "<h2>Hello :glimwow: world</h2>",
user: user
}
parsed = Glimesh.Chat.Parser.parse_and_render(message)
assert Phoenix.HTML.safe_to_string(parsed) ==
"<h2>Hello <img alt=\":glimwow:\" draggable=\"false\" height=\"32px\" src=\"/emotes/svg/glimwow.svg\" width=\"32px\"> world</h2>"
end
end
end
| 33.595588 | 217 | 0.539724 |
1ca87a09b230a3979adceae67f87974b9832e094 | 7,333 | ex | Elixir | lib/gorpo/announce/unit.ex | dgvncsz0f/exul | ce989851d8237f38a422c16122ca0affa17f9a3b | [
"BSD-2-Clause"
] | 3 | 2017-01-06T03:28:00.000Z | 2022-01-04T01:10:13.000Z | lib/gorpo/announce/unit.ex | dgvncsz0f/exul | ce989851d8237f38a422c16122ca0affa17f9a3b | [
"BSD-2-Clause"
] | 2 | 2017-11-28T17:13:33.000Z | 2017-11-28T19:54:28.000Z | lib/gorpo/announce/unit.ex | dgvncsz0f/exul | ce989851d8237f38a422c16122ca0affa17f9a3b | [
"BSD-2-Clause"
] | 1 | 2017-11-28T14:20:33.000Z | 2017-11-28T14:20:33.000Z | # Copyright (c) 2016, Diego Vinicius e Souza All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
defmodule Gorpo.Announce.Unit do
@moduledoc """
register a service on consul and periodically update its health
status. Normally, you shouldn't need to use this module
directly. Use Gorpo.Announce instead. The next example uses a dummy
driver which does nothing. You should use `Gorpo.Drivers.HTTPC` for
a real case.
iex> driver = Gorpo.Drivers.Echo.success([status: 200])
iex> consul = %Gorpo.Consul{endpoint: "http://localhost:8500", driver: driver}
iex> service = %Gorpo.Service{id: "foobar", name: "foobar", check: %Gorpo.Check{}}
iex> {:ok, pid} = Gorpo.Announce.Unit.start_link(service: service, consul: consul)
iex> Gorpo.Announce.Unit.stat(pid)
[service: :ok, heartbeat: :ok]
Notice that a service without a check ignores the heartbeat:
iex> driver = Gorpo.Drivers.Echo.success([status: 200])
iex> consul = %Gorpo.Consul{endpoint: "http://localhost:8500", driver: driver}
iex> service = %Gorpo.Service{id: "foobar", name: "foobar"}
iex> {:ok, pid} = Gorpo.Announce.Unit.start_link(service: service, consul: consul)
iex> Gorpo.Announce.Unit.stat(pid)
[service: :ok, heartbeat: :error]
"""
use GenServer
require Logger
defstruct [:service, :consul, :wait, :tick, :timer, :status]
@type start_options :: [
service: Gorpo.Service.t,
consul: Gorpo.Consul.t
]
@typep state :: %__MODULE__{
service: Gorpo.Service.t,
consul: Gorpo.Consul.t,
wait: pos_integer,
tick: pos_integer,
timer: :timer.tref | nil,
status: map
}
@spec stat(pid) :: [service: :ok | :error, heartbeat: :ok | :error]
@doc """
Returns a keyword list with the status of the service registration and
heatbeat.
"""
def stat(pid),
do: GenServer.call(pid, :stat)
@spec start_link(start_options) :: {:ok, pid}
@doc """
Starts this process.
Expects a keyword which describes the service to register and the Consul
configuration.
"""
def start_link(state),
do: GenServer.start_link(__MODULE__, state)
@doc """
Will register the service and perform the first health check update
synchronously. an error registering the service or updating the
check status will not impede the process initialization.
Keep in mind that this may take a while as it will wait for both the
service registration and check update responses, which may take
arbitrarily long depending on the consul backend in use.
"""
def init(params) do
service = params[:service]
tick = tickof(service)
state = %__MODULE__{
service: service,
consul: params[:consul],
tick: tick,
wait: tick,
status: %{}
}
{:noreply, state} = handle_info(:tick, state)
Logger.info("#{__MODULE__} register #{service.name}.#{service.id}: #{state.status[:service]}")
{:ok, state}
end
@spec terminate(term, state) :: :ok | :error
@doc """
Deregister the service on Consul. returns `:ok` on success or `:error`
otherwise.
"""
def terminate(_reason, state) do
if state.timer do
Process.cancel_timer(state.timer)
end
service = state.service
{status, _} = Gorpo.Consul.service_deregister(state.consul, service.id)
Logger.info("#{__MODULE__} deregister #{service.name}.#{service.id}: #{status}")
status
end
@doc false
def handle_info(:tick, state) do
if state.timer do
Process.cancel_timer(state.timer)
end
service = state.service
status = Map.get(state.status, :service, :error)
name = "#{service.name}.#{service.id}"
case process_tick(state) do
{:ok, state} ->
unless status == :ok do
Logger.debug "#{__MODULE__} #{name}: ok"
end
timer = Process.send_after(self(), :tick, state.wait)
state = %{state| timer: timer, wait: state.tick}
{:noreply, state}
{:error, reason, state} ->
Logger.warn "#{__MODULE__} #{name}: #{inspect reason} [backoff: #{state.wait}]"
timer = Process.send_after(self(), :tick, state.wait)
state = %{
state|
timer: timer,
wait: min(state.wait * 2, 300_000),
status: %{}
}
{:noreply, state}
end
end
@doc false
def handle_call(:stat, _, state) do
reply = [
service: Map.get(state.status, :service, :error),
heartbeat: Map.get(state.status, :heartbeat, :error)
]
{:reply, reply, state}
end
@spec process_tick(state) :: {:ok, state} | {:error, {:heartbeat | :service, term}, state}
defp process_tick(state) do
case Map.fetch(state.status, :service) do
{:ok, :ok} ->
do_heartbeat(state)
:error ->
with {:ok, state} <- do_service(state) do
do_heartbeat(state)
end
end
end
@spec do_service(state) :: {:ok, state} | {:error, {:service, term}, state}
defp do_service(state) do
case Gorpo.Consul.service_register(state.consul, state.service) do
{:ok, _} ->
{:ok, %{state| status: Map.put(state.status, :service, :ok)}}
error ->
{:error, {:service, error}, state}
end
end
@spec do_heartbeat(state) :: {:ok, state} | {:error, {:heartbeat, term}, state}
defp do_heartbeat(state) do
if state.service.check do
status = Gorpo.Status.passing
case Gorpo.Consul.check_update(state.consul, state.service, status) do
{:ok, _} ->
{:ok, %{state| status: Map.put(state.status, :heartbeat, :ok)}}
error ->
{:error, {:heartbeat, error}, state}
end
else
{:ok, state}
end
end
@spec tickof(Gorpo.Service.t) :: pos_integer
defp tickof(service) do
if service.check do
ms = case Integer.parse(service.check.ttl) do
{n, "h"} -> n * 1000 * 60 * 60
{n, "m"} -> n * 1000 * 60
{n, "s"} -> n * 1000
{n, ""} -> n
end
ms
|> div(5)
|> max(50)
else
5 * 1000 * 60
end
end
end
| 30.810924 | 98 | 0.646257 |
1ca8918845e8a647c4a8d6582f4da6b79fac5d1f | 448 | exs | Elixir | Chapter07/apps/elixir_drip/priv/repo/migrations/20180112095312_create_media.exs | sthagen/Mastering-Elixir | 1b52ee79afe6b2ae80767a5e55c2be51df3c4c1d | [
"MIT"
] | 28 | 2018-08-09T05:05:29.000Z | 2022-03-14T06:59:07.000Z | Chapter07/apps/elixir_drip/priv/repo/migrations/20180112095312_create_media.exs | sthagen/Mastering-Elixir | 1b52ee79afe6b2ae80767a5e55c2be51df3c4c1d | [
"MIT"
] | 1 | 2019-02-11T09:11:33.000Z | 2019-05-06T06:40:19.000Z | Chapter07/apps/elixir_drip/priv/repo/migrations/20180112095312_create_media.exs | sthagen/Mastering-Elixir | 1b52ee79afe6b2ae80767a5e55c2be51df3c4c1d | [
"MIT"
] | 8 | 2018-08-09T14:53:02.000Z | 2020-12-14T19:31:21.000Z | defmodule ElixirDrip.Repo.Migrations.Storage.Media do
use Ecto.Migration
def change do
create table(:storage_media, primary_key: false) do
add :id, :string, primary_key: true, size: 27
add :file_name, :string
add :full_path, :string
add :metadata, :map
add :encryption_key, :string
add :storage_key, :string
add :uploaded_at, :utc_datetime
timestamps(type: :utc_datetime)
end
end
end
| 24.888889 | 55 | 0.674107 |
1ca8921e55c9ea4ab6ab93966eec504908022345 | 2,730 | ex | Elixir | lib/engine/parser/outlet.ex | vacarsu/snap_framework | 2dd3a6c77a01e85b8cd5c9566e27bf3fe4eb433e | [
"MIT"
] | 5 | 2021-06-23T17:46:08.000Z | 2022-02-23T21:12:06.000Z | lib/engine/parser/outlet.ex | vacarsu/snap_framework | 2dd3a6c77a01e85b8cd5c9566e27bf3fe4eb433e | [
"MIT"
] | null | null | null | lib/engine/parser/outlet.ex | vacarsu/snap_framework | 2dd3a6c77a01e85b8cd5c9566e27bf3fe4eb433e | [
"MIT"
] | 1 | 2021-08-02T13:33:17.000Z | 2021-08-02T13:33:17.000Z | defmodule SnapFramework.Parser.Outlet do
require Logger
@moduledoc false
def run(ast, assigns) do
ast
|> parse(assigns)
end
# -----------------------------------------------
# render the list of slot component passed to the
# outlet component if it matches the slot_name
# -----------------------------------------------
def parse({:outlet, meta, [slot_name, opts]}, assigns) do
# graph_val = Macro.var(:graph_val, SnapFramework.Engine)
slot = assigns[:state][:data][:slots][slot_name] || nil
case slot do
{nil, _, _} ->
quote do
nil
end
{cmp, data, nil} ->
quote line: meta[:line] || 0 do
# unquote(graph_val) =
[
type: :component,
module: unquote(cmp),
data: unquote(data),
opts: unquote(opts)
]
end
{cmp, data, cmp_opts} ->
quote line: meta[:line] || 0 do
# unquote(graph_val) =
[
type: :component,
module: unquote(cmp),
data: unquote(data),
opts: unquote(cmp_opts)
]
end
_ ->
quote do
nil
end
end
end
# -----------------------------------------------
# render the slot component for unnamed outlet
# used typically to render a list of components
# -----------------------------------------------
def parse({:outlet, _meta, [opts]}, assigns) do
# graph_val = Macro.var(:graph_val, SnapFramework.Engine)
slots = assigns[:state][:data][:slots] || nil
Enum.reduce(slots, [], fn {:slot, slot}, acc ->
case slot do
{nil, _, _} ->
quote do
unquote(acc)
end
{cmp, data, nil} ->
quote do
# var!(cmp) = cmp
# unquote(cmp)(unquote(acc), unquote(data), unquote(opts))
List.insert_at(
unquote(acc),
length(unquote(acc)),
type: :component,
module: unquote(cmp),
data: unquote(data),
opts: unquote(opts)
)
end
{cmp, data, cmp_opts} ->
quote do
# var!(cmp) = cmp
# unquote(cmp)(unquote(acc), unquote(data), Vector2.add(unquote(opts), unquote(cmp_opts)))
List.insert_at(
unquote(acc),
length(unquote(acc)),
type: :component,
module: unquote(cmp),
data: unquote(data),
opts: unquote(cmp_opts)
)
end
_ ->
quote do
unquote(acc)
end
end
end)
end
def parse(ast, _assigns), do: ast
end
| 25.514019 | 102 | 0.456044 |
1ca89911313e55035026379b18d76d1323906fe8 | 46 | exs | Elixir | test/figlet_test.exs | mjolnir-mud/figlet | 022a492c79b6928bc827763c57c91ce4b82dbb05 | [
"Apache-2.0"
] | 2 | 2021-06-24T13:16:16.000Z | 2021-12-30T08:53:48.000Z | test/figlet_test.exs | mjolnir-mud/figlet | 022a492c79b6928bc827763c57c91ce4b82dbb05 | [
"Apache-2.0"
] | 3 | 2021-06-24T13:16:42.000Z | 2022-03-23T14:25:18.000Z | test/figlet_test.exs | mjolnir-mud/figlet | 022a492c79b6928bc827763c57c91ce4b82dbb05 | [
"Apache-2.0"
] | 1 | 2021-07-15T11:23:49.000Z | 2021-07-15T11:23:49.000Z | defmodule FigletTest do
use ExUnit.Case
end
| 11.5 | 23 | 0.804348 |
1ca89f22a77eaef430c1a7e53e026eb4271ca104 | 54 | exs | Elixir | test/grpc_prometheus_test.exs | tony612/elixir-grpc-prometheus | c5bb19463e5292accdb2582a096b4ee5abe99686 | [
"MIT"
] | 1 | 2018-05-23T01:50:55.000Z | 2018-05-23T01:50:55.000Z | test/grpc_prometheus_test.exs | tony612/elixir-grpc-prometheus | c5bb19463e5292accdb2582a096b4ee5abe99686 | [
"MIT"
] | null | null | null | test/grpc_prometheus_test.exs | tony612/elixir-grpc-prometheus | c5bb19463e5292accdb2582a096b4ee5abe99686 | [
"MIT"
] | null | null | null | defmodule GRPCPrometheusTest do
use ExUnit.Case
end
| 13.5 | 31 | 0.833333 |
1ca91a5beb83dddb74fd197cc1609b863da87162 | 10,857 | ex | Elixir | test/support/fake_servers/wild_apricot.ex | dhadka/website | e67c23d7052b4ef00a1af52b0b9ebc952d34776e | [
"Apache-2.0"
] | null | null | null | test/support/fake_servers/wild_apricot.ex | dhadka/website | e67c23d7052b4ef00a1af52b0b9ebc952d34776e | [
"Apache-2.0"
] | null | null | null | test/support/fake_servers/wild_apricot.ex | dhadka/website | e67c23d7052b4ef00a1af52b0b9ebc952d34776e | [
"Apache-2.0"
] | null | null | null | defmodule Erlef.Test.WildApricot do
@moduledoc false
# n.b., strings such as "account_id", "admin" and "basic_member" are used in place of integer ids
# in this model.
# The following are base profiles for different personas. Said base profiles are merged
# with default profile data. See test/support/models/wild_apricot/data.ex for more detail.
# n.b., all profiles except the basic member profile have erlef_app_id(s) (UUID(s)). This
# is for testing the mechanism in the app that is responsible for populating this field.
# Otherwise, testing other features would be a bit of pain since we don't persist data in
# ets between runtime(s)
#
@base_admin_profile %{
"DisplayName" => "Admin",
"Email" => "admin@foo.bar",
"FirstName" => "Admin",
"Id" => "admin",
"LastName" => "Admin",
"MembershipLevel" => %{
"Id" => "1234567",
"Name" => "Admin Admin",
"Url" => "https://api.wildapricot.org/v2.2/accounts/010101/MembershipLevels/1234567"
},
"IsAccountAdministrator" => true,
"FieldValues" => [
%{
"FieldName" => "Group participation",
"SystemCode" => "Groups",
"Value" => [
%{"Id" => "654321", "Label" => "Website Admin"}
]
},
%{
"FieldName" => "erlef_app_id",
"SystemCode" => "custom-12523894",
"Value" => "55d3a339-a768-4410-839a-bd7e29616450"
}
]
}
@base_basic_profile %{
"DisplayName" => "Basic Member",
"Email" => "basic_member@foo.bar",
"FirstName" => "Basic",
"LastName" => "Member",
"Id" => "basic_member",
"MembershipLevel" => %{
"Id" => "1234567",
"Name" => "Basic Membership",
"Url" => "https://api.wildapricot.org/v2.2/accounts/010101/MembershipLevels/1234567"
}
}
@base_wg_chair_profile %{
"DisplayName" => "Working Group Chair",
"Email" => "wg_chair@foo.bar",
"FirstName" => "Working Group",
"LastName" => "Chair",
"Id" => "wg_chair",
"MembershipLevel" => %{
"Id" => "1234567",
"Name" => "Basic Membership",
"Url" => "https://api.wildapricot.org/v2.2/accounts/010101/MembershipLevels/1234567"
},
"FieldValues" => [
%{
"FieldName" => "erlef_app_id",
"SystemCode" => "custom-12523894",
"Value" => "4ad3c890-856f-4fd9-859c-ed0ef8e05a54"
}
]
}
@base_annual_profile %{
"DisplayName" => "Annual Supporting Member",
"Email" => "annual_supporting_member@foo.bar",
"FirstName" => "Annual",
"Id" => "annual_member",
"LastName" => "Supporting Member",
"MembershipLevel" => %{
"Id" => "1234567",
"Name" => "Annual Supporting Membership",
"Url" => "https://api.wildapricot.org/v2.2/accounts/010101/MembershipLevels/1234567"
},
"FieldValues" => [
%{
"FieldName" => "erlef_app_id",
"SystemCode" => "custom-12523894",
"Value" => "874e3414-ebf2-4ed5-b0fb-eb704b18cd05"
}
]
}
@base_lifetime_profile %{
"DisplayName" => "Lifetime Supporting Member",
"Email" => "life_supporting_member@foo.bar",
"FirstName" => "Lifetime",
"Id" => "lifetime_member",
"LastName" => "Supporting Member",
"MembershipLevel" => %{
"Id" => "1234567",
"Name" => "Lifetime Supporting Membership",
"Url" => "https://api.wildapricot.org/v2.2/accounts/010101/MembershipLevels/1234567"
},
"FieldValues" => [
%{
"FieldName" => "erlef_app_id",
"SystemCode" => "custom-12523894",
"Value" => "494df2a5-1c4d-4544-a9b9-a12fb2e49b8d"
}
]
}
@base_lifetime_profile %{
"DisplayName" => "Lifetime Member",
"Email" => "lifetime_member@foo.bar",
"FirstName" => "Lifetime",
"Id" => "lifetime_member",
"LastName" => "Member",
"MembershipLevel" => %{
"Id" => "1234567",
"Name" => "Fellow",
"Url" => "https://api.wildapricot.org/v2.2/accounts/010101/MembershipLevels/1234567"
},
"FieldValues" => [
%{
"FieldName" => "erlef_app_id",
"SystemCode" => "custom-12523894",
"Value" => "06339432-fa6d-4f82-890d-5c72493ae476"
}
]
}
@base_fellow_profile %{
"DisplayName" => "Fellow Member",
"Email" => "fellow@foo.bar",
"FirstName" => "Fellow",
"Id" => "fellow_member",
"LastName" => "Member",
"MembershipLevel" => %{
"Id" => "1234567",
"Name" => "Fellow",
"Url" => "https://api.wildapricot.org/v2.2/accounts/010101/MembershipLevels/1234567"
},
"FieldValues" => [
%{
"FieldName" => "erlef_app_id",
"SystemCode" => "custom-12523894",
"Value" => "5d5e213f-476f-496d-841d-d93af03b6ed7"
}
]
}
# Static data for each type of member used in the return from the ../me endpoint
@stub_data_map %{
"admin" => Erlef.Test.WildApricot.Data.contact_data(@base_admin_profile),
"wg_chair" => Erlef.Test.WildApricot.Data.contact_data(@base_wg_chair_profile),
"basic_member" => Erlef.Test.WildApricot.Data.contact_data(@base_basic_profile),
"annual_member" => Erlef.Test.WildApricot.Data.contact_data(@base_annual_profile),
"lifetime_member" => Erlef.Test.WildApricot.Data.contact_data(@base_lifetime_profile),
"fellow_member" => Erlef.Test.WildApricot.Data.contact_data(@base_fellow_profile)
}
use Plug.Router
plug(:match)
plug Plug.Parsers, parsers: [:urlencoded, :json], json_decoder: Jason
plug(:dispatch)
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :worker,
restart: :permanent,
shutdown: 500
}
end
def init(options), do: options
def start_link(_opts) do
start()
end
def start(ref \\ __MODULE__) do
_tid = :ets.new(__MODULE__, [:named_table, :public, {:write_concurrency, true}])
Enum.each(@stub_data_map, fn {k, v} -> true = :ets.insert(__MODULE__, {k, v}) end)
Plug.Cowboy.http(__MODULE__, [], ref: ref, port: 9999)
end
def stop(ref \\ __MODULE__) do
Plug.Cowboy.shutdown(ref)
end
post "/auth/token" do
case gen_auth_token(conn) do
"invalid" ->
data = %{
"error" => "invalid_grant",
"error_description" => "Authorization code was not found.",
"error_uri" => nil
}
conn
|> put_resp_content_type("application/json")
|> send_resp(500, Jason.encode!(data))
code ->
data = %{
"Permissions" => [
%{
"AccountId" => "account_id",
"AvailableScopes" => ["contacts_me", "account_view"],
"SecurityProfileId" => "42"
}
],
"access_token" => code,
"expires_in" => 1_800,
"refresh_token" => "rt_#{code}",
"token_type" => "Bearer"
}
conn
|> put_resp_content_type("application/json")
|> send_resp(200, Jason.encode!(data))
end
end
get "/auth/expiretoken" do
conn
|> put_resp_content_type("application/json")
|> send_resp(200, Jason.encode!(%{}))
end
get "/auth/deleterefreshtoken" do
conn
|> put_resp_content_type("application/json")
|> send_resp(200, Jason.encode!(%{}))
end
post "/v2.2" do
data = %{
"Permissions" => [
%{
"AccountId" => "account_id",
"AvailableScopes" => [
"contacts_view",
"finances_view",
"emails_view",
"events_view",
"event_registrations_view",
"account_view",
"membership_levels_view"
],
"SecurityProfileId" => "42"
}
],
"access_token" => "3dOo4f-KEg3D9t3nBDzSjbyjCTo-",
"expires_in" => 1_800,
"refresh_token" => "not_requested",
"token_type" => "Bearer"
}
conn
|> put_resp_content_type("application/json")
|> send_resp(200, Jason.encode!(data))
end
get "/v2.2/accounts/:aid/contacts/me" do
token = get_auth_token(conn)
me_data = %{
"Id" => @stub_data_map[token]["Id"],
"DisplayName" => @stub_data_map[token]["DisplayName"]
}
conn
|> put_resp_content_type("application/json")
|> send_resp(200, Jason.encode!(me_data))
end
get "/v2.2/accounts/:aid/contacts" do
case conn.params do
%{"$filter" => <<"erlef_app_id eq ", uuid::binary-size(36)>>} ->
contacts = lookup_by_uuid(uuid)
conn
|> put_resp_content_type("application/json")
|> send_resp(200, Jason.encode!(%{"Contacts" => contacts}))
_ ->
send_resp(conn, 500, "")
end
end
get "/v2.2/accounts/:aid/contacts/:id" do
case :ets.lookup(__MODULE__, id) do
[{_id, contact}] ->
conn
|> put_resp_content_type("application/json")
|> send_resp(200, Jason.encode!(contact))
[] ->
send_resp(conn, 404, "")
end
end
put "/v2.2/accounts/:aid/contacts/:id" do
case :ets.lookup(__MODULE__, id) do
[{_id, contact}] ->
contact = update_contact(id, contact, conn.params)
conn
|> put_resp_content_type("application/json")
|> send_resp(200, Jason.encode!(contact))
[] ->
send_resp(conn, 404, "")
end
end
defp update_contact(id, contact, params) do
new = Map.merge(contact, params)
new =
case Map.has_key?(params, "FieldValues") do
true ->
specified_fields = Enum.map(params["FieldValues"], & &1["FieldName"])
without = Enum.reject(contact["FieldValues"], &(&1["FieldName"] in specified_fields))
Map.put(new, "FieldValues", params["FieldValues"] ++ without)
false ->
new
end
true = :ets.insert(__MODULE__, {id, new})
new
end
defp lookup_by_uuid(uuid) do
res =
Enum.find(@stub_data_map, fn {k, _v} ->
[{_, contact}] = :ets.lookup(__MODULE__, k)
fv = contact["FieldValues"]
case find_uuid_in_fv(fv, uuid) do
%{"Value" => ^uuid} ->
true
_ ->
false
end
end)
case res do
{_k, contact} -> [contact]
_ -> []
end
end
defp find_uuid_in_fv(fv, uuid) do
Enum.find(fv, fn f -> f["FieldName"] == "erlef_app_id" and f["Value"] == uuid end)
end
def gen_auth_token(conn) do
case conn.params["grant_type"] do
"authorization_code" ->
conn.params["code"]
"refresh_token" ->
<<"rt_", rt_code::binary>> = conn.params["refresh_token"]
rt_code
"client_credentials" ->
"client_token"
end
end
def get_auth_token(conn) do
conn
|> Plug.Conn.get_req_header("authorization")
|> parse_bearer()
end
def parse_bearer([<<"Bearer ", token::binary>>]), do: token
def parse_bearer(_), do: {:error, :invalid_token}
end
| 27.838462 | 99 | 0.576863 |
1ca92d468fd661f003705395d9a8b01bb499bfa5 | 2,161 | ex | Elixir | lib/zen_monitor/metrics.ex | LaudateCorpus1/zen_monitor | a01589e2df68ad643974b79a05fc3212587eca53 | [
"MIT"
] | 84 | 2020-04-29T12:15:29.000Z | 2022-02-15T09:52:37.000Z | lib/zen_monitor/metrics.ex | LaudateCorpus1/zen_monitor | a01589e2df68ad643974b79a05fc3212587eca53 | [
"MIT"
] | null | null | null | lib/zen_monitor/metrics.ex | LaudateCorpus1/zen_monitor | a01589e2df68ad643974b79a05fc3212587eca53 | [
"MIT"
] | 7 | 2020-10-10T05:30:14.000Z | 2022-03-17T09:48:54.000Z | defmodule ZenMonitor.Metrics do
@moduledoc """
Metrics helper for monitoring the ZenMonitor system.
"""
alias Instruments.Probe
@doc """
Registers various probes for the ZenMonitor System.
- ERTS message_queue_len for the `ZenMonitor.Local` and `ZenMonitor.Proxy` processes.
- Internal Batch Queue length for `ZenMonitor.Local` (dispatches to be delivered)
- ETS table size for References (number of monitors)
- ETS table size for Subscribers (number of monitored local processes * interested remotes)
"""
@spec register() :: :ok
def register do
Probe.define!(
"zen_monitor.local.message_queue_len",
:gauge,
mfa: {__MODULE__, :message_queue_len, [ZenMonitor.Local]}
)
Probe.define!(
"zen_monitor.proxy.message_queue_len",
:gauge,
mfa: {__MODULE__, :message_queue_len, [ZenMonitor.Proxy]}
)
Probe.define!(
"zen_monitor.local.batch_length",
:gauge,
mfa: {ZenMonitor.Local, :batch_length, []}
)
Probe.define!(
"zen_monitor.local.ets.references.size",
:gauge,
mfa: {__MODULE__, :table_size, [ZenMonitor.Local.Tables.references()]}
)
Probe.define!(
"zen_monitor.proxy.ets.subscribers.size",
:gauge,
mfa: {__MODULE__, :table_size, [ZenMonitor.Proxy.Tables.subscribers()]}
)
:ok
end
@doc """
Given a pid or a registered name, this will return the message_queue_len as reported by
`Process.info/2`
"""
@spec message_queue_len(target :: nil | pid() | atom()) :: nil | integer()
def message_queue_len(nil), do: nil
def message_queue_len(target) when is_pid(target) do
case Process.info(target, :message_queue_len) do
{:message_queue_len, len} -> len
_ -> nil
end
end
def message_queue_len(target) when is_atom(target) do
target
|> Process.whereis()
|> message_queue_len()
end
@doc """
Given a table identifier, returns the size as reported by `:ets.info/2`
"""
@spec table_size(:ets.tid()) :: nil | integer()
def table_size(tid) do
case :ets.info(tid, :size) do
:undefined -> nil
size -> size
end
end
end
| 26.353659 | 95 | 0.657103 |
1ca93ff4191cfe163a7824fb14736af27adb25d7 | 427 | ex | Elixir | year_2019/lib/day_07/signal_maximizer.ex | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | year_2019/lib/day_07/signal_maximizer.ex | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | year_2019/lib/day_07/signal_maximizer.ex | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | defmodule Day07.SignalMaximizer do
def run(amplifier, phases) do
InputFile.contents_of(7, :read)
|> Intcode.build
|> max_signal_for(amplifier, phases)
end
def max_signal_for(machine, amplifier, phases) do
phases
|> permute
|> Enum.map(&(amplifier.signal_for(machine, &1)))
|> Enum.max
end
def permute([]), do: [[]]
def permute(l), do: for h <- l, t <- permute(l -- [h]), do: [h|t]
end
| 23.722222 | 67 | 0.627635 |
1ca9461e9a375c6baa09bb5fe687e91e2c605da9 | 102 | ex | Elixir | phoenix2/lib/phoenix2/endpoint.ex | ronnyhartenstein/benchmarking-helloworld-http | ecb0b3d800a2d9c426ce4dd0d0b198a109e812d5 | [
"MIT"
] | null | null | null | phoenix2/lib/phoenix2/endpoint.ex | ronnyhartenstein/benchmarking-helloworld-http | ecb0b3d800a2d9c426ce4dd0d0b198a109e812d5 | [
"MIT"
] | null | null | null | phoenix2/lib/phoenix2/endpoint.ex | ronnyhartenstein/benchmarking-helloworld-http | ecb0b3d800a2d9c426ce4dd0d0b198a109e812d5 | [
"MIT"
] | null | null | null | defmodule Phoenix2.Endpoint do
use Phoenix.Endpoint, otp_app: :phoenix2
plug Phoenix2.Router
end
| 17 | 42 | 0.794118 |
1ca94ca9d2d8a66f939c5a60c7280865f3e13c0d | 96 | exs | Elixir | examples/new_relic_sandbox_umbrella/apps/new_relic_sandbox_web/test/new_relic_sandbox_web/views/page_view_test.exs | surgeventures/new_relic_integration | 5417f15f7dd17022ee927e0cdd4fca32529ed278 | [
"MIT"
] | null | null | null | examples/new_relic_sandbox_umbrella/apps/new_relic_sandbox_web/test/new_relic_sandbox_web/views/page_view_test.exs | surgeventures/new_relic_integration | 5417f15f7dd17022ee927e0cdd4fca32529ed278 | [
"MIT"
] | 2 | 2019-09-09T08:26:44.000Z | 2019-11-05T04:31:37.000Z | examples/new_relic_sandbox_umbrella/apps/new_relic_sandbox_web/test/new_relic_sandbox_web/views/page_view_test.exs | surgeventures/new_relic_integration | 5417f15f7dd17022ee927e0cdd4fca32529ed278 | [
"MIT"
] | 1 | 2019-09-06T09:27:27.000Z | 2019-09-06T09:27:27.000Z | defmodule NewRelicSandboxWeb.PageViewTest do
use NewRelicSandboxWeb.ConnCase, async: true
end
| 24 | 46 | 0.854167 |
1ca955429e5a106b10b7f853f061465bd42eb1ba | 1,073 | ex | Elixir | lib/ash/notifier/notifier.ex | kingshalaby1/ash | 1e23199bec1729d66d3fc64de18856525f638549 | [
"MIT"
] | null | null | null | lib/ash/notifier/notifier.ex | kingshalaby1/ash | 1e23199bec1729d66d3fc64de18856525f638549 | [
"MIT"
] | null | null | null | lib/ash/notifier/notifier.ex | kingshalaby1/ash | 1e23199bec1729d66d3fc64de18856525f638549 | [
"MIT"
] | null | null | null | defmodule Ash.Notifier do
@moduledoc """
A notifier is an extension that receives various events
"""
@callback notify(Ash.notification()) :: :ok
defmacro __using__(_) do
quote do
@behaviour Ash.Notifier
end
end
@doc """
Sends any notifications that can be sent, and returns the rest.
A notification can only be sent if you are not currently in a transaction
for the resource in question.
"""
@spec notify(list(Ash.notification()) | Ash.notification()) :: list(Ash.notification())
def notify(resource_notifications) do
{unsent, to_send} =
resource_notifications
|> List.wrap()
|> Enum.group_by(& &1.resource)
|> Enum.split_with(fn {resource, _} ->
Ash.DataLayer.in_transaction?(resource)
end)
for {resource, notifications} <- to_send do
for notifier <- Ash.Resource.notifiers(resource) do
for notification <- notifications do
notifier.notify(notification)
end
end
end
unsent
|> Enum.map(&elem(&1, 1))
|> List.flatten()
end
end
| 25.547619 | 89 | 0.649581 |
1ca95b678e913c5392c827165ce7e10964add7e7 | 1,763 | ex | Elixir | debian/manpage.1.ex | santeri/pydiststore | 227074a152e9e6a86abf22c3d4b2e8d43ec2f659 | [
"MIT"
] | 1 | 2016-05-09T05:37:17.000Z | 2016-05-09T05:37:17.000Z | debian/manpage.1.ex | santeri/pydiststore | 227074a152e9e6a86abf22c3d4b2e8d43ec2f659 | [
"MIT"
] | null | null | null | debian/manpage.1.ex | santeri/pydiststore | 227074a152e9e6a86abf22c3d4b2e8d43ec2f659 | [
"MIT"
] | null | null | null | .\" Hey, EMACS: -*- nroff -*-
.\" First parameter, NAME, should be all caps
.\" Second parameter, SECTION, should be 1-8, maybe w/ subsection
.\" other parameters are allowed: see man(7), man(1)
.TH PYDISTSTORE SECTION "March 8, 2009"
.\" Please adjust this date whenever revising the manpage.
.\"
.\" Some roff macros, for reference:
.\" .nh disable hyphenation
.\" .hy enable hyphenation
.\" .ad l left justify
.\" .ad b justify to both left and right margins
.\" .nf disable filling
.\" .fi enable filling
.\" .br insert line break
.\" .sp <n> insert n+1 empty lines
.\" for manpage-specific macros, see man(7)
.SH NAME
pydiststore \- program to do something
.SH SYNOPSIS
.B pydiststore
.RI [ options ] " files" ...
.br
.B bar
.RI [ options ] " files" ...
.SH DESCRIPTION
This manual page documents briefly the
.B pydiststore
and
.B bar
commands.
.PP
.\" TeX users may be more comfortable with the \fB<whatever>\fP and
.\" \fI<whatever>\fP escape sequences to invode bold face and italics,
.\" respectively.
\fBpydiststore\fP is a program that...
.SH OPTIONS
These programs follow the usual GNU command line syntax, with long
options starting with two dashes (`-').
A summary of options is included below.
For a complete description, see the Info files.
.TP
.B \-h, \-\-help
Show summary of options.
.TP
.B \-v, \-\-version
Show version of program.
.SH SEE ALSO
.BR bar (1),
.BR baz (1).
.br
The programs are documented fully by
.IR "The Rise and Fall of a Fooish Bar" ,
available via the Info system.
.SH AUTHOR
pydiststore was written by <upstream author>.
.PP
This manual page was written by gray <santeri@santeri.se>,
for the Debian project (but may be used by others).
| 29.383333 | 70 | 0.67612 |
1ca95fce61847806be55b4720165b0a000fb44af | 438 | exs | Elixir | test/legato/report_test.exs | m0tivus/legato-ex | a59d5bb924774e089102f6f3e7745907b8871b49 | [
"MIT"
] | 15 | 2016-11-26T23:12:47.000Z | 2022-03-15T11:49:23.000Z | test/legato/report_test.exs | m0tivus/legato-ex | a59d5bb924774e089102f6f3e7745907b8871b49 | [
"MIT"
] | 2 | 2017-04-13T12:48:43.000Z | 2017-06-28T22:07:47.000Z | test/legato/report_test.exs | m0tivus/legato-ex | a59d5bb924774e089102f6f3e7745907b8871b49 | [
"MIT"
] | 3 | 2017-10-25T09:29:30.000Z | 2021-03-18T01:34:19.000Z | defmodule Legato.ReportTest do
use ExUnit.Case
doctest Legato.Report
defmodule ExitReport do
defstruct exits: 0, pageviews: 0
end
test "applying struct to request results" do
reports = [%{exits: 18, pageviews: 90}, %{pageviews: 10}] |> Legato.Report.as(ExitReport)
expected = [
%ExitReport{exits: 18, pageviews: 90},
%ExitReport{exits: 0, pageviews: 10}
]
assert reports == expected
end
end
| 23.052632 | 93 | 0.671233 |
1ca96d9a2277c347241cdec4f011d5f5ff9b50a8 | 479 | exs | Elixir | example_app/test/models/user_test.exs | moxley/addict | 9271c60d9a862edcefc31e8a764b3eb5a5905171 | [
"MIT"
] | 750 | 2015-01-18T23:00:36.000Z | 2021-03-24T22:11:09.000Z | example_app/test/models/user_test.exs | moxley/addict | 9271c60d9a862edcefc31e8a764b3eb5a5905171 | [
"MIT"
] | 130 | 2015-01-19T12:39:42.000Z | 2021-09-28T22:40:52.000Z | example_app/test/models/user_test.exs | moxley/addict | 9271c60d9a862edcefc31e8a764b3eb5a5905171 | [
"MIT"
] | 151 | 2015-01-19T09:24:44.000Z | 2020-09-21T13:52:46.000Z | defmodule ExampleApp.UserTest do
use ExampleApp.ModelCase
alias ExampleApp.User
@valid_attrs %{email: "some content", encrypted_password: "some content", name: "some content"}
@invalid_attrs %{}
test "changeset with valid attributes" do
changeset = User.changeset(%User{}, @valid_attrs)
assert changeset.valid?
end
test "changeset with invalid attributes" do
changeset = User.changeset(%User{}, @invalid_attrs)
refute changeset.valid?
end
end
| 25.210526 | 97 | 0.728601 |
1ca9eb951d045e0b0f1e70dbb3838fca732799f4 | 249 | exs | Elixir | elixir/euler1.exs | lnds/Euler | 818146053da99088e96c2bf70d2bcd78c0959628 | [
"FTL"
] | null | null | null | elixir/euler1.exs | lnds/Euler | 818146053da99088e96c2bf70d2bcd78c0959628 | [
"FTL"
] | null | null | null | elixir/euler1.exs | lnds/Euler | 818146053da99088e96c2bf70d2bcd78c0959628 | [
"FTL"
] | null | null | null | defmodule Euler1 do
@target 1000
@a 3
@b 5
def sum(n, m) do
Enum.filter(1..n-1, &(rem(&1, m)==0)) |> Enum.sum()
end
def result(), do: sum(@target, @a) + sum(@target, @b) - sum(@target, @a * @b)
end
IO.puts("#{Euler1.result()}")
| 17.785714 | 79 | 0.534137 |
1caa08446d2ad29f2f9883f4f6ff1c095220a6de | 1,736 | exs | Elixir | test/lib/code_corps_web/controllers/stripe_connect_events_controller_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 275 | 2015-06-23T00:20:51.000Z | 2021-08-19T16:17:37.000Z | test/lib/code_corps_web/controllers/stripe_connect_events_controller_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 1,304 | 2015-06-26T02:11:54.000Z | 2019-12-12T21:08:00.000Z | test/lib/code_corps_web/controllers/stripe_connect_events_controller_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 140 | 2016-01-01T18:19:47.000Z | 2020-11-22T06:24:47.000Z | defmodule CodeCorpsWeb.StripeConnectEventsControllerTest do
use CodeCorpsWeb.ConnCase
alias CodeCorps.StripeEvent
setup do
conn =
%{build_conn() | host: "api."}
|> put_req_header("accept", "application/json")
|> put_req_header("content-type", "application/json")
{:ok, conn: conn}
end
test "responds with 200 when the event will be processed", %{conn: conn} do
event = %{"id" => "evt_123", "livemode" => false}
path = conn |> stripe_connect_events_path(:create)
assert conn |> post(path, event) |> response(200)
assert StripeEvent |> Repo.aggregate(:count, :id) == 1
end
# TODO: The following two can be merged into one and actual environment matching behavior
# can be added to the EnvironmentFilter test module
test "returns 400, does nothing if event is livemode and env is not :prod", %{conn: conn} do
Application.put_env(:code_corps, :stripe_env, :other)
event = %{"id" => "evt_123", "livemode" => true}
path = conn |> stripe_connect_events_path(:create)
assert conn |> post(path, event) |> response(400)
assert StripeEvent |> Repo.aggregate(:count, :id) == 0
# put env back to original state
Application.put_env(:code_corps, :stripe_env, :test)
end
test "returns 400, does nothing if event is not livemode and env is :prod", %{conn: conn} do
Application.put_env(:code_corps, :stripe_env, :prod)
event = %{"id" => "evt_123", "livemode" => false}
path = conn |> stripe_connect_events_path(:create)
assert conn |> post(path, event) |> response(400)
assert StripeEvent |> Repo.aggregate(:count, :id) == 0
# put env back to original state
Application.put_env(:code_corps, :stripe_env, :test)
end
end
| 34.039216 | 94 | 0.675115 |
1caa3ee03da86a725208e15d36c352b215537c42 | 23,456 | exs | Elixir | integration/oracle/cases/preload.exs | MikeAlbertFleetSolutions/oracle_ecto | 63f510a5cd97c0b8a2f92786d0dfc8af688fb596 | [
"Apache-2.0"
] | null | null | null | integration/oracle/cases/preload.exs | MikeAlbertFleetSolutions/oracle_ecto | 63f510a5cd97c0b8a2f92786d0dfc8af688fb596 | [
"Apache-2.0"
] | 2 | 2019-05-21T15:57:37.000Z | 2019-05-21T16:00:59.000Z | integration/oracle/cases/preload.exs | MikeAlbertFleetSolutions/oracle_ecto | 63f510a5cd97c0b8a2f92786d0dfc8af688fb596 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Integration.PreloadTest do
use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true)
@moduletag :integration
alias Ecto.Integration.TestRepo
import Ecto.Query
alias Ecto.Integration.Post
alias Ecto.Integration.Comment
alias Ecto.Integration.Permalink
alias Ecto.Integration.User
test "preload has_many" do
p1 = TestRepo.insert!(%Post{id: 1, title: "1"})
p2 = TestRepo.insert!(%Post{id: 2, title: "2"})
p3 = TestRepo.insert!(%Post{id: 3, title: "3"})
# We use the same text to expose bugs in preload sorting
%Comment{id: cid1} = TestRepo.insert!(%Comment{id: 1, text: "1", post_id: p1.id})
%Comment{id: cid3} = TestRepo.insert!(%Comment{id: 2, text: "2", post_id: p2.id})
%Comment{id: cid2} = TestRepo.insert!(%Comment{id: 3, text: "2", post_id: p1.id})
%Comment{id: cid4} = TestRepo.insert!(%Comment{id: 4, text: "3", post_id: p2.id})
assert %Ecto.Association.NotLoaded{} = p1.comments
[p3, p1, p2] = TestRepo.preload([p3, p1, p2], :comments)
assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = p1.comments |> sort_by_id
assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = p2.comments |> sort_by_id
assert [] = p3.comments
end
test "preload has_one" do
p1 = TestRepo.insert!(%Post{id: 1, title: "1"})
p2 = TestRepo.insert!(%Post{id: 2, title: "2"})
p3 = TestRepo.insert!(%Post{id: 3, title: "3"})
%Permalink{id: pid1} = TestRepo.insert!(%Permalink{id: 1, url: "1", post_id: p1.id})
%Permalink{} = TestRepo.insert!(%Permalink{id: 2, url: "2", post_id: nil})
%Permalink{id: pid3} = TestRepo.insert!(%Permalink{id: 3, url: "3", post_id: p3.id})
assert %Ecto.Association.NotLoaded{} = p1.permalink
assert %Ecto.Association.NotLoaded{} = p2.permalink
[p3, p1, p2] = TestRepo.preload([p3, p1, p2], :permalink)
assert %Permalink{id: ^pid1} = p1.permalink
refute p2.permalink
assert %Permalink{id: ^pid3} = p3.permalink
end
test "preload belongs_to" do
%Post{id: pid1} = TestRepo.insert!(%Post{id: 1, title: "1"})
TestRepo.insert!(%Post{id: 2, title: "2"})
%Post{id: pid3} = TestRepo.insert!(%Post{id: 3, title: "3"})
pl1 = TestRepo.insert!(%Permalink{id: 1, url: "1", post_id: pid1})
pl2 = TestRepo.insert!(%Permalink{id: 2, url: "2", post_id: nil})
pl3 = TestRepo.insert!(%Permalink{id: 3, url: "3", post_id: pid3})
assert %Ecto.Association.NotLoaded{} = pl1.post
[pl3, pl1, pl2] = TestRepo.preload([pl3, pl1, pl2], :post)
assert %Post{id: ^pid1} = pl1.post
refute pl2.post
assert %Post{id: ^pid3} = pl3.post
end
test "preload belongs_to with shared assocs" do
%Post{id: pid1} = TestRepo.insert!(%Post{id: 1, title: "1"})
%Post{id: pid2} = TestRepo.insert!(%Post{id: 2, title: "2"})
c1 = TestRepo.insert!(%Comment{id: 1, text: "1", post_id: pid1})
c2 = TestRepo.insert!(%Comment{id: 2, text: "2", post_id: pid1})
c3 = TestRepo.insert!(%Comment{id: 3, text: "3", post_id: pid2})
[c3, c1, c2] = TestRepo.preload([c3, c1, c2], :post)
assert %Post{id: ^pid1} = c1.post
assert %Post{id: ^pid1} = c2.post
assert %Post{id: ^pid2} = c3.post
end
# test "preload many_to_many" do
# p1 = TestRepo.insert!(%Post{id: 1, title: "1"})
# p2 = TestRepo.insert!(%Post{id: 2, title: "2"})
# p3 = TestRepo.insert!(%Post{id: 3, title: "3"})
#
# # We use the same name to expose bugs in preload sorting
# %User{id: uid1} = TestRepo.insert!(%User{id: 1, name: "1"})
# %User{id: uid3} = TestRepo.insert!(%User{id: 2, name: "2"})
# %User{id: uid2} = TestRepo.insert!(%User{id: 3, name: "2"})
# %User{id: uid4} = TestRepo.insert!(%User{id: 4, name: "3"})
#
# TestRepo.insert_all "posts_users", [[id: 1, post_id: p1.id, user_id: uid1],
# [id: 2, post_id: p1.id, user_id: uid2],
# [id: 3, post_id: p2.id, user_id: uid3],
# [id: 4, post_id: p2.id, user_id: uid4],
# [id: 5, post_id: p3.id, user_id: uid1],
# [id: 6, post_id: p3.id, user_id: uid4]]
#
# assert %Ecto.Association.NotLoaded{} = p1.users
#
# [p1, p2, p3] = TestRepo.preload([p1, p2, p3], :users)
# assert [%User{id: ^uid1}, %User{id: ^uid2}] = p1.users |> sort_by_id
# assert [%User{id: ^uid3}, %User{id: ^uid4}] = p2.users |> sort_by_id
# assert [%User{id: ^uid1}, %User{id: ^uid4}] = p3.users |> sort_by_id
# end
test "preload has_many through" do
%Post{id: pid1} = p1 = TestRepo.insert!(%Post{id: 1})
%Post{id: pid2} = p2 = TestRepo.insert!(%Post{id: 2})
%User{id: uid1} = TestRepo.insert!(%User{id: 1, name: "foo"})
%User{id: uid2} = TestRepo.insert!(%User{id: 2, name: "bar"})
%Comment{} = TestRepo.insert!(%Comment{id: 1, post_id: pid1, author_id: uid1})
%Comment{} = TestRepo.insert!(%Comment{id: 2, post_id: pid1, author_id: uid1})
%Comment{} = TestRepo.insert!(%Comment{id: 3, post_id: pid1, author_id: uid2})
%Comment{} = TestRepo.insert!(%Comment{id: 4, post_id: pid2, author_id: uid2})
[p1, p2] = TestRepo.preload([p1, p2], :comments_authors)
# Through was preloaded
[u1, u2] = p1.comments_authors |> sort_by_id
assert u1.id == uid1
assert u2.id == uid2
[u2] = p2.comments_authors
assert u2.id == uid2
# But we also preloaded everything along the way
assert [c1, c2, c3] = p1.comments |> sort_by_id
assert c1.author.id == uid1
assert c2.author.id == uid1
assert c3.author.id == uid2
assert [c4] = p2.comments
assert c4.author.id == uid2
end
test "preload has_one through" do
%Post{id: pid1} = TestRepo.insert!(%Post{id: 1})
%Post{id: pid2} = TestRepo.insert!(%Post{id: 2})
%Permalink{id: lid1} = TestRepo.insert!(%Permalink{id: 1, post_id: pid1})
%Permalink{id: lid2} = TestRepo.insert!(%Permalink{id: 2, post_id: pid2})
%Comment{} = c1 = TestRepo.insert!(%Comment{id: 1, post_id: pid1})
%Comment{} = c2 = TestRepo.insert!(%Comment{id: 2, post_id: pid1})
%Comment{} = c3 = TestRepo.insert!(%Comment{id: 3, post_id: pid2})
[c1, c2, c3] = TestRepo.preload([c1, c2, c3], :post_permalink)
# Through was preloaded
assert c1.post.id == pid1
assert c1.post.permalink.id == lid1
assert c1.post_permalink.id == lid1
assert c2.post.id == pid1
assert c2.post.permalink.id == lid1
assert c2.post_permalink.id == lid1
assert c3.post.id == pid2
assert c3.post.permalink.id == lid2
assert c3.post_permalink.id == lid2
end
test "preload has_many through-through" do
%Post{id: pid1} = TestRepo.insert!(%Post{id: 1})
%Post{id: pid2} = TestRepo.insert!(%Post{id: 2})
%Permalink{} = l1 = TestRepo.insert!(%Permalink{id: 1, post_id: pid1})
%Permalink{} = l2 = TestRepo.insert!(%Permalink{id: 2, post_id: pid2})
%User{id: uid1} = TestRepo.insert!(%User{id: 1, name: "foo"})
%User{id: uid2} = TestRepo.insert!(%User{id: 2, name: "bar"})
%Comment{} = TestRepo.insert!(%Comment{id: 1, post_id: pid1, author_id: uid1})
%Comment{} = TestRepo.insert!(%Comment{id: 2, post_id: pid1, author_id: uid1})
%Comment{} = TestRepo.insert!(%Comment{id: 3, post_id: pid1, author_id: uid2})
%Comment{} = TestRepo.insert!(%Comment{id: 4, post_id: pid2, author_id: uid2})
# With assoc query
[l1, l2] = TestRepo.preload([l1, l2], :post_comments_authors)
# Through was preloaded
[u1, u2] = l1.post_comments_authors |> sort_by_id
assert u1.id == uid1
assert u2.id == uid2
[u2] = l2.post_comments_authors
assert u2.id == uid2
# But we also preloaded everything along the way
assert l1.post.id == pid1
assert l1.post.comments != []
assert l2.post.id == pid2
assert l2.post.comments != []
end
# test "preload has_many through many_to_many" do
# %Post{} = p1 = TestRepo.insert!(%Post{id: 1})
# %Post{} = p2 = TestRepo.insert!(%Post{id: 2})
#
# %User{id: uid1} = TestRepo.insert!(%User{id: 1, name: "foo"})
# %User{id: uid2} = TestRepo.insert!(%User{id: 2, name: "bar"})
#
# TestRepo.insert_all "posts_users", [[id: 1, post_id: p1.id, user_id: uid1],
# [id: 2, post_id: p1.id, user_id: uid2],
# [id: 3, post_id: p2.id, user_id: uid2]]
#
# %Comment{id: cid1} = TestRepo.insert!(%Comment{id: 1, author_id: uid1})
# %Comment{id: cid2} = TestRepo.insert!(%Comment{id: 2, author_id: uid1})
# %Comment{id: cid3} = TestRepo.insert!(%Comment{id: 3, author_id: uid2})
# %Comment{id: cid4} = TestRepo.insert!(%Comment{id: 4, author_id: uid2})
#
# [p1, p2] = TestRepo.preload([p1, p2], :users_comments)
#
# # Through was preloaded
# [c1, c2, c3, c4] = p1.users_comments |> sort_by_id
# assert c1.id == cid1
# assert c2.id == cid2
# assert c3.id == cid3
# assert c4.id == cid4
#
# [c3, c4] = p2.users_comments |> sort_by_id
# assert c3.id == cid3
# assert c4.id == cid4
#
# # But we also preloaded everything along the way
# assert [u1, u2] = p1.users |> sort_by_id
# assert u1.id == uid1
# assert u2.id == uid2
#
# assert [u2] = p2.users
# assert u2.id == uid2
# end
## Empties
test "preload empty" do
assert TestRepo.preload([], :anything_goes) == []
end
test "preload has_many with no associated entries" do
p = TestRepo.insert!(%Post{id: 1, title: "1"})
p = TestRepo.preload(p, :comments)
assert p.title == "1"
assert p.comments == []
end
test "preload has_one with no associated entries" do
p = TestRepo.insert!(%Post{id: 1, title: "1"})
p = TestRepo.preload(p, :permalink)
assert p.title == "1"
assert p.permalink == nil
end
test "preload belongs_to with no associated entry" do
c = TestRepo.insert!(%Comment{id: 1, text: "1"})
c = TestRepo.preload(c, :post)
assert c.text == "1"
assert c.post == nil
end
test "preload many_to_many with no associated entries" do
p = TestRepo.insert!(%Post{id: 1, title: "1"})
p = TestRepo.preload(p, :users)
assert p.title == "1"
assert p.users == []
end
## With queries
test "preload with function" do
p1 = TestRepo.insert!(%Post{id: 1, title: "1"})
p2 = TestRepo.insert!(%Post{id: 2, title: "2"})
p3 = TestRepo.insert!(%Post{id: 3, title: "3"})
# We use the same text to expose bugs in preload sorting
%Comment{id: cid1} = TestRepo.insert!(%Comment{id: 1, text: "1", post_id: p1.id})
%Comment{id: cid3} = TestRepo.insert!(%Comment{id: 2, text: "2", post_id: p2.id})
%Comment{id: cid2} = TestRepo.insert!(%Comment{id: 3, text: "2", post_id: p1.id})
%Comment{id: cid4} = TestRepo.insert!(%Comment{id: 4, text: "3", post_id: p2.id})
assert [pe3, pe1, pe2] = TestRepo.preload([p3, p1, p2],
comments: fn _ -> TestRepo.all(Comment) end)
assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = pe1.comments
assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = pe2.comments
assert [] = pe3.comments
end
test "preload with query" do
p1 = TestRepo.insert!(%Post{id: 1, title: "1"})
p2 = TestRepo.insert!(%Post{id: 2, title: "2"})
p3 = TestRepo.insert!(%Post{id: 3, title: "3"})
# We use the same text to expose bugs in preload sorting
%Comment{id: cid1} = TestRepo.insert!(%Comment{id: 1, text: "1", post_id: p1.id})
%Comment{id: cid3} = TestRepo.insert!(%Comment{id: 2, text: "2", post_id: p2.id})
%Comment{id: cid2} = TestRepo.insert!(%Comment{id: 3, text: "2", post_id: p1.id})
%Comment{id: cid4} = TestRepo.insert!(%Comment{id: 4, text: "3", post_id: p2.id})
assert %Ecto.Association.NotLoaded{} = p1.comments
# With empty query
assert [pe3, pe1, pe2] = TestRepo.preload([p3, p1, p2],
comments: from(c in Comment, where: false))
assert [] = pe1.comments
assert [] = pe2.comments
assert [] = pe3.comments
# With custom select
assert [pe3, pe1, pe2] = TestRepo.preload([p3, p1, p2],
comments: from(c in Comment, select: c.id, order_by: c.id))
assert [^cid1, ^cid2] = pe1.comments
assert [^cid3, ^cid4] = pe2.comments
assert [] = pe3.comments
# With custom ordered query
assert [pe3, pe1, pe2] = TestRepo.preload([p3, p1, p2],
comments: from(c in Comment, order_by: [desc: c.text]))
assert [%Comment{id: ^cid2}, %Comment{id: ^cid1}] = pe1.comments
assert [%Comment{id: ^cid4}, %Comment{id: ^cid3}] = pe2.comments
assert [] = pe3.comments
# With custom ordered query with preload
assert [pe3, pe1, pe2] = TestRepo.preload([p3, p1, p2],
comments: {from(c in Comment, order_by: [desc: c.text]), :post})
assert [%Comment{id: ^cid2} = c2, %Comment{id: ^cid1} = c1] = pe1.comments
assert [%Comment{id: ^cid4} = c4, %Comment{id: ^cid3} = c3] = pe2.comments
assert [] = pe3.comments
assert c1.post.title == "1"
assert c2.post.title == "1"
assert c3.post.title == "2"
assert c4.post.title == "2"
end
test "preload through with query" do
%Post{id: pid1} = p1 = TestRepo.insert!(%Post{id: 1})
u1 = TestRepo.insert!(%User{id: 1, name: "foo"})
u2 = TestRepo.insert!(%User{id: 2, name: "bar"})
u3 = TestRepo.insert!(%User{id: 3, name: "baz"})
u4 = TestRepo.insert!(%User{id: 4, name: "norf"})
%Comment{} = TestRepo.insert!(%Comment{id: 1, post_id: pid1, author_id: u1.id})
%Comment{} = TestRepo.insert!(%Comment{id: 2, post_id: pid1, author_id: u1.id})
%Comment{} = TestRepo.insert!(%Comment{id: 3, post_id: pid1, author_id: u2.id})
%Comment{} = TestRepo.insert!(%Comment{id: 4, post_id: pid1, author_id: u3.id})
%Comment{} = TestRepo.insert!(%Comment{id: 5, post_id: pid1, author_id: u4.id})
np1 = TestRepo.preload(p1, comments_authors: from(u in User, where: u.name == "foo"))
assert np1.comments_authors == [u1]
assert_raise ArgumentError, ~r/Ecto expected a map\/struct with the key `id` but got: \d+/, fn ->
TestRepo.preload(p1, comments_authors: from(u in User, order_by: u.name, select: u.id))
end
# The subpreload order does not matter because the result is dictated by comments
np1 = TestRepo.preload(p1, comments_authors: from(u in User, order_by: u.name, select: %{id: u.id}))
assert np1.comments_authors ==
[%{id: u1.id}, %{id: u2.id}, %{id: u3.id}, %{id: u4.id}]
end
## With take
test "preload with take" do
p1 = TestRepo.insert!(%Post{id: 1, title: "1"})
p2 = TestRepo.insert!(%Post{id: 2, title: "2"})
_p = TestRepo.insert!(%Post{id: 3, title: "3"})
%Comment{id: cid1} = TestRepo.insert!(%Comment{id: 1, text: "1", post_id: p1.id})
%Comment{id: cid3} = TestRepo.insert!(%Comment{id: 2, text: "2", post_id: p2.id})
%Comment{id: cid2} = TestRepo.insert!(%Comment{id: 3, text: "2", post_id: p1.id})
%Comment{id: cid4} = TestRepo.insert!(%Comment{id: 4, text: "3", post_id: p2.id})
assert %Ecto.Association.NotLoaded{} = p1.comments
posts = TestRepo.all(from Post, preload: [:comments], select: [:id, comments: [:id, :post_id]])
[p1, p2, p3] = sort_by_id(posts)
assert p1.title == nil
assert p2.title == nil
assert p3.title == nil
assert [%{id: ^cid1, text: nil}, %{id: ^cid2, text: nil}] = sort_by_id(p1.comments)
assert [%{id: ^cid3, text: nil}, %{id: ^cid4, text: nil}] = sort_by_id(p2.comments)
assert [] = sort_by_id(p3.comments)
end
test "preload through with take" do
%Post{id: pid1} = TestRepo.insert!(%Post{id: 1})
%User{id: uid1} = TestRepo.insert!(%User{id: 1, name: "foo"})
%User{id: uid2} = TestRepo.insert!(%User{id: 2, name: "bar"})
%Comment{} = TestRepo.insert!(%Comment{id: 1, post_id: pid1, author_id: uid1})
%Comment{} = TestRepo.insert!(%Comment{id: 2, post_id: pid1, author_id: uid1})
%Comment{} = TestRepo.insert!(%Comment{id: 3, post_id: pid1, author_id: uid2})
[p1] = TestRepo.all from Post, preload: [:comments_authors], select: [:id, comments_authors: :id]
[%{id: ^uid1, name: nil}, %{id: ^uid2, name: nil}] = p1.comments_authors |> sort_by_id
end
## Nested
test "preload many assocs" do
p1 = TestRepo.insert!(%Post{id: 1, title: "1"})
p2 = TestRepo.insert!(%Post{id: 2, title: "2"})
assert [p2, p1] = TestRepo.preload([p2, p1], [:comments, :users])
assert p1.comments == []
assert p2.comments == []
assert p1.users == []
assert p2.users == []
end
test "preload nested" do
p1 = TestRepo.insert!(%Post{id: 1, title: "1"})
p2 = TestRepo.insert!(%Post{id: 2, title: "2"})
TestRepo.insert!(%Comment{id: 1, text: "1", post_id: p1.id})
TestRepo.insert!(%Comment{id: 2, text: "2", post_id: p1.id})
TestRepo.insert!(%Comment{id: 3, text: "3", post_id: p2.id})
TestRepo.insert!(%Comment{id: 4, text: "4", post_id: p2.id})
assert [p2, p1] = TestRepo.preload([p2, p1], [comments: :post])
assert [c1, c2] = p1.comments
assert [c3, c4] = p2.comments
assert p1.id == c1.post.id
assert p1.id == c2.post.id
assert p2.id == c3.post.id
assert p2.id == c4.post.id
end
test "preload nested via custom query" do
p1 = TestRepo.insert!(%Post{id: 1, title: "1"})
p2 = TestRepo.insert!(%Post{id: 2, title: "2"})
TestRepo.insert!(%Comment{id: 1, text: "1", post_id: p1.id})
TestRepo.insert!(%Comment{id: 2, text: "2", post_id: p1.id})
TestRepo.insert!(%Comment{id: 3, text: "3", post_id: p2.id})
TestRepo.insert!(%Comment{id: 4, text: "4", post_id: p2.id})
query = from(c in Comment, preload: :post, order_by: [desc: c.text])
assert [p2, p1] = TestRepo.preload([p2, p1], comments: query)
assert [c2, c1] = p1.comments
assert [c4, c3] = p2.comments
assert p1.id == c1.post.id
assert p1.id == c2.post.id
assert p2.id == c3.post.id
assert p2.id == c4.post.id
end
## Others
@tag :invalid_prefix
test "preload custom prefix from schema" do
p = TestRepo.insert!(%Post{id: 1, title: "1"})
p = Ecto.put_meta(p, prefix: "this_surely_does_not_exist")
# This preload should fail because it points to a prefix that does not exist
assert catch_error(TestRepo.preload(p, [:comments]))
end
@tag :invalid_prefix
test "preload custom prefix from options" do
p = TestRepo.insert!(%Post{id: 1, title: "1"})
# This preload should fail because it points to a prefix that does not exist
assert catch_error(TestRepo.preload(p, [:comments], prefix: "this_surely_does_not_exist"))
end
# test "preload with binary_id" do
# c = TestRepo.insert!(%Custom{bid: 1})
# u = TestRepo.insert!(%User{id: 1, custom_id: c.bid})
#
# u = TestRepo.preload(u, :custom)
# assert u.custom.bid == c.bid
# end
test "preload skips with association set but without id" do
c1 = TestRepo.insert!(%Comment{id: 1, text: "1"})
u1 = TestRepo.insert!(%User{id: 1, name: "name"})
p1 = TestRepo.insert!(%Post{id: 1, title: "title"})
c1 = %{c1 | author: u1, author_id: nil, post: p1, post_id: nil}
c1 = TestRepo.preload(c1, [:author, :post])
assert c1.author == u1
assert c1.post == p1
end
test "preload skips already loaded for cardinality one" do
%Post{id: pid} = TestRepo.insert!(%Post{id: 1, title: "1"})
c1 = %Comment{id: cid1} = TestRepo.insert!(%Comment{id: 1, text: "1", post_id: pid})
c2 = %Comment{id: _cid} = TestRepo.insert!(%Comment{id: 2, text: "2", post_id: nil})
[c1, c2] = TestRepo.preload([c1, c2], :post)
assert %Post{id: ^pid} = c1.post
assert c2.post == nil
[c1, c2] = TestRepo.preload([c1, c2], post: :comments)
assert [%Comment{id: ^cid1}] = c1.post.comments
TestRepo.update_all Post, set: [title: "0"]
TestRepo.update_all Comment, set: [post_id: pid]
# Preloading once again shouldn't change the result
[c1, c2] = TestRepo.preload([c1, c2], :post)
assert %Post{id: ^pid, title: "1", comments: [_|_]} = c1.post
assert c2.post == nil
[c1, c2] = TestRepo.preload([c1, %{c2 | post_id: pid}], :post, force: true)
assert %Post{id: ^pid, title: "0", comments: %Ecto.Association.NotLoaded{}} = c1.post
assert %Post{id: ^pid, title: "0", comments: %Ecto.Association.NotLoaded{}} = c2.post
end
test "preload skips already loaded for cardinality many" do
p1 = TestRepo.insert!(%Post{id: 1, title: "1"})
p2 = TestRepo.insert!(%Post{id: 2, title: "2"})
%Comment{id: cid1} = TestRepo.insert!(%Comment{id: 1, text: "1", post_id: p1.id})
%Comment{id: cid2} = TestRepo.insert!(%Comment{id: 2, text: "2", post_id: p2.id})
[p1, p2] = TestRepo.preload([p1, p2], :comments)
assert [%Comment{id: ^cid1}] = p1.comments
assert [%Comment{id: ^cid2}] = p2.comments
[p1, p2] = TestRepo.preload([p1, p2], comments: :post)
assert hd(p1.comments).post.id == p1.id
assert hd(p2.comments).post.id == p2.id
TestRepo.update_all Comment, set: [text: "0"]
# Preloading once again shouldn't change the result
[p1, p2] = TestRepo.preload([p1, p2], :comments)
assert [%Comment{id: ^cid1, text: "1", post: %Post{}}] = p1.comments
assert [%Comment{id: ^cid2, text: "2", post: %Post{}}] = p2.comments
[p1, p2] = TestRepo.preload([p1, p2], :comments, force: true)
assert [%Comment{id: ^cid1, text: "0", post: %Ecto.Association.NotLoaded{}}] = p1.comments
assert [%Comment{id: ^cid2, text: "0", post: %Ecto.Association.NotLoaded{}}] = p2.comments
end
test "preload keyword query" do
p1 = TestRepo.insert!(%Post{id: 1, title: "1"})
p2 = TestRepo.insert!(%Post{id: 2, title: "2"})
TestRepo.insert!(%Post{id: 3, title: "3"})
%Comment{id: cid1} = TestRepo.insert!(%Comment{id: 1, text: "1", post_id: p1.id})
%Comment{id: cid2} = TestRepo.insert!(%Comment{id: 2, text: "2", post_id: p1.id})
%Comment{id: cid3} = TestRepo.insert!(%Comment{id: 3, text: "3", post_id: p2.id})
%Comment{id: cid4} = TestRepo.insert!(%Comment{id: 4, text: "4", post_id: p2.id})
# Regular query
query = from(p in Post, preload: [:comments], select: p)
assert [p1, p2, p3] = TestRepo.all(query) |> sort_by_id
assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = p1.comments |> sort_by_id
assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = p2.comments |> sort_by_id
assert [] = p3.comments
# Query with interpolated preload query
query = from(p in Post, preload: [comments: ^from(c in Comment, where: false)], select: p)
assert [p1, p2, p3] = TestRepo.all(query)
assert [] = p1.comments
assert [] = p2.comments
assert [] = p3.comments
# Now let's use an interpolated preload too
comments = [:comments]
query = from(p in Post, preload: ^comments, select: {0, [p], 1, 2})
posts = TestRepo.all(query)
[p1, p2, p3] = Enum.map(posts, fn {0, [p], 1, 2} -> p end) |> sort_by_id
assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = p1.comments |> sort_by_id
assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = p2.comments |> sort_by_id
assert [] = p3.comments
end
defp sort_by_id(values) do
Enum.sort_by(values, &(&1.id))
end
end
| 39.755932 | 110 | 0.605943 |
1caa584bdde8104c95337a14818d0f862a41f331 | 2,288 | exs | Elixir | apps/rest_api/test/controllers/fallback_test.exs | dcdourado/watcher_ex | ce80df81610a6e9b77612911aac2a6d6cf4de8d5 | [
"Apache-2.0"
] | null | null | null | apps/rest_api/test/controllers/fallback_test.exs | dcdourado/watcher_ex | ce80df81610a6e9b77612911aac2a6d6cf4de8d5 | [
"Apache-2.0"
] | null | null | null | apps/rest_api/test/controllers/fallback_test.exs | dcdourado/watcher_ex | ce80df81610a6e9b77612911aac2a6d6cf4de8d5 | [
"Apache-2.0"
] | null | null | null | defmodule RestAPI.Controllers.FallbackTest do
use RestAPI.ConnCase, async: true
alias ResourceManager.Identities.Commands.Inputs.CreateUser
alias RestAPI.Controllers.Fallback
test "handles bad request responses", %{conn: conn} do
assert %{
"detail" => "The given parameters are invalid",
"error" => "bad_request",
"status" => 400
} ==
conn
|> Fallback.call({:error, :invalid_params})
|> json_response(400)
end
test "handles unathorized responses", %{conn: conn} do
assert %{
"detail" => "Not authorized to perform such action",
"error" => "unauthorized",
"status" => 401
} ==
conn
|> Fallback.call({:error, :unauthorized})
|> json_response(401)
end
test "handles anauthenticated responses", %{conn: conn} do
assert %{
"detail" => "Not authenticated so cannot due such action",
"error" => "unauthenticated",
"status" => 403
} ==
conn
|> Fallback.call({:error, :unauthenticated})
|> json_response(403)
end
test "handles not found responses", %{conn: conn} do
assert %{
"detail" => "Endpoint not found",
"error" => "not_found",
"status" => 404
} ==
conn
|> Fallback.call({:error, :not_found})
|> json_response(404)
end
test "handles changeset responses", %{conn: conn} do
assert %{
"detail" => "The given params failed in validation",
"status" => 400,
"error" => "bad_request",
"response" => %{
"username" => ["can't be blank"]
}
} ==
conn
|> Fallback.call(CreateUser.cast_and_apply(%{}))
|> json_response(400)
end
test "handles unknow responses", %{conn: conn} do
assert %{
"detail" => "Internal Server Error",
"error" => "internal_server_error",
"status" => 500
} ==
conn
|> Fallback.call({:error, :internal_server_error})
|> json_response(500)
end
end
| 30.105263 | 71 | 0.503059 |
1caa6a89244426ae22e62bd6baf33054bfb12ecc | 3,749 | ex | Elixir | lib/mongo_request.ex | LINKHA/elixir-mongo | f2e567675e2d9bdace0e6893e1e9bd424980ba75 | [
"MIT"
] | null | null | null | lib/mongo_request.ex | LINKHA/elixir-mongo | f2e567675e2d9bdace0e6893e1e9bd424980ba75 | [
"MIT"
] | null | null | null | lib/mongo_request.ex | LINKHA/elixir-mongo | f2e567675e2d9bdace0e6893e1e9bd424980ba75 | [
"MIT"
] | null | null | null | defmodule Mongo.Request do
@moduledoc """
Defines, encodes and sends MongoDB operations to the server
"""
defstruct [
requestID: nil,
payload: nil]
@update <<0xd1, 0x07, 0, 0, 0::32>> # 2001 update document
@insert <<0xd2, 0x07, 0, 0, 0::32>> # 2002 insert new document
@get_more <<0xd5, 0x07, 0, 0, 0::32>> # 2005 Get more data from a query. See Cursors
@delete <<0xd6, 0x07, 0, 0, 0::32>> # 2006 Delete documents
@kill_cursor <<0xd7, 0x07, 0, 0, 0::32>> # 2007 Tell database client is done with a cursor
@query <<0xd4, 0x07, 0, 0>> # 2004 query a collection
@query_opts <<0b00000100::8>> # default query options, equvalent to `cursor.set_opts(slaveok: true)`
@doc """
Builds a query message
* collection: collection
* selector: selection criteria (Map or nil)
* projector: fields (Map or nil)
"""
def query(find) do
selector = if find.mods == %{}, do: find.selector, else: Map.put(find.mods, :'$query', find.selector)
[
@query, (Enum.reduce(find.opts, @query_opts, &queryopt_red/2)), <<0::24>>,
find.collection.db.name, 46, find.collection.name,
<<0, find.skip::32-little-signed, find.batchSize::32-little-signed>>,
Bson.encode(selector),
Bson.encode(find.projector)
]
end
@doc """
Builds a database command message composed of the command tag and its arguments.
"""
def cmd(dbname, cmd, cmd_args \\ %{}) do
[
@query, @query_opts, <<0::24>>, # [slaveok: true]
dbname, <<".$cmd", 0, 0::32, 255, 255, 255, 255>>, # skip(0), batchSize(-1)
document(cmd, cmd_args)
]
end
@doc """
Builds an insert command message
"""
def insert(collection, docs) do
[
@insert, collection.db.name, 46, collection.name, <<0::8>>,
Enum.map(docs, fn(doc) -> Bson.encode(doc) end)
]
end
@doc """
Builds an update command message
"""
def update(collection, selector, update, upsert, multi) do
[
@update,
collection.db.name, 46, collection.name,
<<0::8, 0::6, (bit(multi))::1, (bit(upsert))::1, 0::24>>,
document(selector), document(update)
]
end
# transforms `true` and `false` to bits
defp bit(false), do: 0
defp bit(true), do: 1
@doc """
Builds a delete command message
"""
def delete(collection, selector, justOne) do
[
@delete,
collection.db.name, 46, collection.name,
<<0, 0::7, (bit(justOne))::1, 0::24>>,
document(selector)
]
end
@doc """
Builds a kill_cursor command message
"""
def kill_cursor(cursorid) do
[
@kill_cursor,
<<1::32-little-signed, cursorid::64-little-signed>>
]
end
@doc """
Builds a get_more command message
"""
def get_more(collection, batchsize, cursorid) do
[
@get_more,
collection.db.name, 46, collection.name,
<<0, batchsize::32-little-signed, cursorid::64-little-signed>>,
]
end
# transform a document into bson
defp document(command), do: Bson.encode(command)
defp document(command, command_args) do
Bson.encode(Enum.to_list(command) ++ Enum.to_list(command_args))
end
use Bitwise
# Operates one option
defp queryopt_red({opt, true}, bits), do: bits ||| queryopt(opt)
defp queryopt_red({opt, false}, bits), do: bits &&& ~~~queryopt(opt)
defp queryopt_red(_, bits), do: bits
# Identifies the bit that is switched by an option when it is set to `true`
defp queryopt(:awaitdata), do: 0b00100000
defp queryopt(:nocursortimeout), do: 0b00010000
defp queryopt(:slaveok), do: 0b00000100
defp queryopt(:tailablecursor), do: 0b00000010
defp queryopt(_), do: 0b00000000
end
| 29.753968 | 106 | 0.61323 |
1caa74f574c34c25e8b72d2916af497a88ce3b12 | 3,741 | exs | Elixir | test/cforum_web/controllers/admin/user_controller_test.exs | MatthiasApsel/cforum_ex | 52c621a583182d82692b74694b0b2792ac23b8ff | [
"MIT"
] | null | null | null | test/cforum_web/controllers/admin/user_controller_test.exs | MatthiasApsel/cforum_ex | 52c621a583182d82692b74694b0b2792ac23b8ff | [
"MIT"
] | null | null | null | test/cforum_web/controllers/admin/user_controller_test.exs | MatthiasApsel/cforum_ex | 52c621a583182d82692b74694b0b2792ac23b8ff | [
"MIT"
] | null | null | null | defmodule CforumWeb.Admin.UserControllerTest do
use CforumWeb.ConnCase
alias Cforum.Users.User
describe "index" do
setup [:setup_login]
test "lists all users", %{conn: conn} do
conn = get(conn, Path.admin_user_path(conn, :index))
assert html_response(conn, 200) =~ gettext("administrate users")
end
end
describe "new user" do
setup [:setup_login]
test "renders form", %{conn: conn} do
conn = get(conn, Path.admin_user_path(conn, :new))
assert html_response(conn, 200) =~ gettext("new user")
end
end
describe "create user" do
setup [:setup_login]
test "redirects to show when data is valid", %{conn: conn} do
params = params_for(:user)
conn = post(conn, Path.admin_user_path(conn, :create), user: params)
assert %{id: id} = cf_redirected_params(conn)
assert redirected_to(conn) == Path.admin_user_path(conn, :edit, %User{user_id: id})
conn = get(conn, Path.admin_user_path(conn, :edit, %User{user_id: id}))
assert html_response(conn, 200) =~ gettext("edit user „%{name}“", name: params[:username])
end
test "renders errors when data is invalid", %{conn: conn} do
conn = post(conn, Path.admin_user_path(conn, :create), user: %{username: nil})
assert html_response(conn, 200) =~ gettext("new user")
end
end
describe "edit user" do
setup [:setup_login, :create_user]
test "renders form for editing chosen user", %{conn: conn, user: user} do
conn = get(conn, Path.admin_user_path(conn, :edit, user))
assert html_response(conn, 200) =~ gettext("edit user „%{name}“", name: user.username)
end
end
describe "update user" do
setup [:setup_login, :create_user]
test "redirects when data is valid", %{conn: conn, user: user} do
conn = put(conn, Path.admin_user_path(conn, :update, user), user: %{username: "Rebellion"})
assert redirected_to(conn) == Path.admin_user_path(conn, :edit, user)
conn = get(conn, Path.admin_user_path(conn, :edit, user))
assert html_response(conn, 200) =~ "Rebellion"
end
test "renders errors when data is invalid", %{conn: conn, user: user} do
conn = put(conn, Path.admin_user_path(conn, :update, user), user: %{username: nil})
assert html_response(conn, 200) =~ gettext("edit user „%{name}“", name: user.username)
end
end
describe "delete user" do
setup [:setup_login, :create_user]
test "deletes chosen user", %{conn: conn, user: user} do
conn = delete(conn, Path.admin_user_path(conn, :delete, user))
assert %{success: 1, failure: 0} == Oban.drain_queue(:background)
assert redirected_to(conn) == Path.admin_user_path(conn, :index)
assert_error_sent(404, fn ->
get(conn, Path.admin_user_path(conn, :edit, user))
end)
end
end
describe "access rights" do
test "anonymous isn't allowed to access", %{conn: conn} do
assert_error_sent(403, fn -> get(conn, Path.admin_user_path(conn, :index)) end)
end
test "non-admin user isn't allowed to access", %{conn: conn} do
user = insert(:user)
conn = login(conn, user)
assert_error_sent(403, fn -> get(conn, Path.admin_user_path(conn, :index)) end)
end
test "admin is allowed", %{conn: conn} do
user = insert(:user, admin: true)
conn =
conn
|> login(user)
|> get(Path.admin_user_path(conn, :index))
assert html_response(conn, 200) =~ gettext("administrate users")
end
end
defp create_user(_) do
user = insert(:user)
{:ok, user: user}
end
defp setup_login(%{conn: conn}) do
user = build(:user) |> as_admin |> insert
{:ok, user: user, conn: login(conn, user)}
end
end
| 31.974359 | 97 | 0.645817 |
1caa779ad195719fb78623cc64bd5abe1bade6ca | 5,107 | exs | Elixir | test/process_test.exs | clszzyh/flexflow | 93df67c41278c2ca89c680dd8badb7a1f9f86cdf | [
"MIT"
] | 2 | 2021-02-05T02:07:17.000Z | 2021-07-31T22:38:54.000Z | test/process_test.exs | clszzyh/flexflow | 93df67c41278c2ca89c680dd8badb7a1f9f86cdf | [
"MIT"
] | 79 | 2021-01-14T02:39:36.000Z | 2022-03-31T02:06:27.000Z | test/process_test.exs | clszzyh/flexflow | 93df67c41278c2ca89c680dd8badb7a1f9f86cdf | [
"MIT"
] | null | null | null | defmodule ProcessTest do
use ExUnit.Case, async: true
doctest Flexflow.EventDispatcher
doctest Flexflow.ProcessRegistry
@moduletag capture_log: true
@moduletag :process
setup_all do
[]
end
test "history" do
name = to_string(elem(__ENV__.function, 0))
assert Flexflow.History.put({P1, name}, :process_init) == :ok
assert [_ | _] = Flexflow.history({P1, name})
end
# test "start_child" do
# name = to_string(elem(__ENV__.function, 0))
# {:ok, _pid} = Flexflow.start({P1, name})
# {:ok, child_pid} = Flexflow.start_child({P1, name}, {P1, "child"})
# {:error, {:exist, child_pid2}} = Flexflow.start_child({P1, name}, {P1, "child"})
# assert child_pid == child_pid2
# :ok = Flexflow.stop({P1, "child"})
# {:ok, _child_pid} = Flexflow.start_child({P1, name}, {P1, "child2"})
# process = Flexflow.state({P1, name})
# child_process = Flexflow.state({P1, "child2"})
# assert process.childs == [{P1, "child2"}, {P1, "child"}]
# assert child_process.parent == {P1, name}
# assert process.request_id == child_process.request_id
# :ok = Flexflow.stop({P1, "child2"})
# :ok = Flexflow.stop({P1, name})
# end
# test "Flexflow.TaskSupervisor" do
# pid = Flexflow.TaskSupervisor |> Process.whereis()
# assert is_pid(pid)
# end
# test "Flexflow.ProcessParentManager" do
# pid = Flexflow.ProcessParentManager |> Process.whereis()
# assert is_pid(pid)
# assert [_ | _] = Flexflow.ProcessParentManager.children()
# end
# test "Flexflow.ProcessManager" do
# pid = Flexflow.ProcessManager.pid(P1)
# assert is_pid(pid)
# pids = for %{pid: pid} <- Flexflow.ProcessParentManager.children(), do: pid
# assert pid in pids
# end
# test "kill" do
# assert {:ok, {P1, "kill"}} = Flexflow.History.ensure_new({P1, "kill"})
# {:ok, pid} = Flexflow.start({P1, "kill"})
# assert {:error, :already_exists} = Flexflow.History.ensure_new({P1, "kill"})
# true = Process.exit(pid, :kill)
# assert Process.info(pid) == nil
# {:error, :already_exists} = Flexflow.start({P1, "kill"})
# {:ok, pid2} = Flexflow.start({P1, "kill2"})
# true = Process.exit(pid2, :normal)
# refute Process.info(pid2) == nil
# {:ok, srv} = Flexflow.ProcessManager.server_pid(P1)
# assert is_pid(srv)
# :ok = Flexflow.stop({P1, "kill2"})
# assert Process.info(pid2) == nil
# end
# test "process p1" do
# name = to_string(elem(__ENV__.function, 0))
# {:ok, pid} = Flexflow.start({P1, name})
# {:exist, pid2} = Flexflow.server({P1, name})
# pid3 = Flexflow.pid({P1, name})
# assert pid == pid2
# assert pid == pid3
# assert [_ | _] = Flexflow.history({P1, name})
# assert Flexflow.ProcessManager.children(P1) == [
# %Flexflow.ProcessManager{pid: pid, id: name, name: :p1_new, state: :a}
# ]
# server_pid = Flexflow.ProcessStatem.pid({P1, name})
# assert server_pid == pid
# process = Flexflow.state({P1, name})
# assert process.id == name
# assert process.state == :waiting
# assert process.states[{N1, :n1}].state == :completed
# assert process.states[{N2, :n2}].state == :initial
# assert process.events[{T1, :t1_n1}].state == :initial
# Process.sleep(60)
# process = Flexflow.state({P1, name})
# assert process.states[{N3, :n3}].state == :initial
# end
# test "p2 slow ok" do
# name = to_string(elem(__ENV__.function, 0))
# {:ok, _pid} = Flexflow.start({P2, name}, %{slow: :ok, sleep: 50})
# Process.sleep(60)
# process = Flexflow.state({P2, name})
# assert process.states[{P2.Slow, :slow}].state == :initial
# assert process.states[{P2.Slow, :slow}].context.state == :ok
# end
# test "p2 slow other" do
# name = to_string(elem(__ENV__.function, 0))
# {:ok, _pid} = Flexflow.start({P2, name}, %{slow: :other, sleep: 50})
# Process.sleep(60)
# process = Flexflow.state({P2, name})
# assert process.states[{P2.Slow, :slow}].state == :initial
# assert process.states[{P2.Slow, :slow}].context.state == :ok
# assert process.states[{P2.Slow, :slow}].context.result == :other
# end
# test "p2 slow error" do
# name = to_string(elem(__ENV__.function, 0))
# {:ok, _pid} = Flexflow.start({P2, name}, %{slow: :error, sleep: 50})
# Process.sleep(60)
# process = Flexflow.state({P2, name})
# assert process.states[{P2.Slow, :slow}].state == :error
# assert process.states[{P2.Slow, :slow}].context.state == :error
# assert process.states[{P2.Slow, :slow}].context.result == :custom_error
# end
# test "p2 slow raise" do
# name = to_string(elem(__ENV__.function, 0))
# {:ok, _pid} = Flexflow.start({P2, name}, %{slow: :raise, sleep: 50})
# Process.sleep(60)
# process = Flexflow.state({P2, name})
# assert process.states[{P2.Slow, :slow}].state == :error
# assert process.states[{P2.Slow, :slow}].context.state == :error
# assert {%RuntimeError{message: "fooo"}, [_ | _]} =
# process.states[{P2.Slow, :slow}].context.result
# end
end
| 37.82963 | 86 | 0.615626 |
1caa8740955961cfb4e29593192eb5a300b8cbf0 | 4,538 | ex | Elixir | lib/changelog_web/controllers/admin/person_controller.ex | PsOverflow/changelog.com | 53f4ecfc39b021c6b8cfcc0fa11f29aff8038a7f | [
"MIT"
] | 1 | 2021-03-14T21:12:49.000Z | 2021-03-14T21:12:49.000Z | lib/changelog_web/controllers/admin/person_controller.ex | PsOverflow/changelog.com | 53f4ecfc39b021c6b8cfcc0fa11f29aff8038a7f | [
"MIT"
] | null | null | null | lib/changelog_web/controllers/admin/person_controller.ex | PsOverflow/changelog.com | 53f4ecfc39b021c6b8cfcc0fa11f29aff8038a7f | [
"MIT"
] | 1 | 2018-10-03T20:55:52.000Z | 2018-10-03T20:55:52.000Z | defmodule ChangelogWeb.Admin.PersonController do
use ChangelogWeb, :controller
alias Changelog.{Mailer, Episode, NewsItem, Person, Slack}
alias ChangelogWeb.Email
plug :assign_person when action in [:edit, :update, :delete, :slack]
plug Authorize, [Policies.Person, :person]
plug :scrub_params, "person" when action in [:create, :update]
def index(conn, params) do
filter = Map.get(params, "filter", "all")
page =
case filter do
"admin" -> Person.admins()
"host" -> Person.hosts()
"editor" -> Person.editors()
_else -> Person
end
|> Person.newest_first()
|> Repo.paginate(params)
conn
|> assign(:people, page.entries)
|> assign(:filter, filter)
|> assign(:page, page)
|> render(:index)
end
def show(conn, %{"id" => id}) do
person = Repo.get!(Person, id)
episodes =
assoc(person, :guest_episodes)
|> Episode.published()
|> Episode.newest_first()
|> Episode.preload_all()
|> Repo.all()
published =
NewsItem
|> NewsItem.with_person(person)
|> NewsItem.published()
|> NewsItem.newest_first()
|> NewsItem.preload_all()
|> Repo.all()
declined =
NewsItem
|> NewsItem.with_person(person)
|> NewsItem.declined()
|> NewsItem.newest_first()
|> NewsItem.preload_all()
|> Repo.all()
conn
|> assign(:person, person)
|> assign(:episodes, episodes)
|> assign(:published, published)
|> assign(:declined, declined)
|> render(:show)
end
def new(conn, _params) do
changeset = Person.admin_insert_changeset(%Person{})
render(conn, :new, changeset: changeset)
end
def create(conn, params = %{"person" => person_params}) do
changeset = Person.admin_insert_changeset(%Person{}, person_params)
case Repo.insert(changeset) do
{:ok, person} ->
Repo.update(Person.file_changeset(person, person_params))
handle_welcome_email(person, params)
conn
|> put_flash(:result, "success")
|> redirect_next(params, admin_person_path(conn, :edit, person))
{:error, changeset} ->
conn
|> put_flash(:result, "failure")
|> render(:new, changeset: changeset)
end
end
def edit(conn = %{assigns: %{person: person}}, _params) do
changeset = Person.admin_update_changeset(person)
render(conn, :edit, person: person, changeset: changeset)
end
def update(conn = %{assigns: %{person: person}}, params = %{"person" => person_params}) do
changeset = Person.admin_update_changeset(person, person_params)
case Repo.update(changeset) do
{:ok, _person} ->
conn
|> put_flash(:result, "success")
|> redirect_next(params, admin_person_path(conn, :index))
{:error, changeset} ->
conn
|> put_flash(:result, "failure")
|> render(:edit, person: person, changeset: changeset)
end
end
def delete(conn = %{assigns: %{person: person}}, _params) do
Repo.delete!(person)
conn
|> put_flash(:result, "success")
|> redirect(to: admin_person_path(conn, :index))
end
def slack(conn = %{assigns: %{person: person}}, params) do
flash = case Slack.Client.invite(person.email) do
%{"ok" => true} ->
set_slack_id_to_pending(person)
"success"
%{"ok" => false, "error" => "already_in_team"} ->
set_slack_id_to_pending(person)
"success"
_else -> "failure"
end
conn
|> put_flash(:result, flash)
|> redirect_next(params, admin_person_path(conn, :index))
end
defp assign_person(conn = %{params: %{"id" => id}}, _) do
person = Repo.get!(Person, id)
assign(conn, :person, person)
end
defp set_slack_id_to_pending(person = %{slack_id: id}) when not is_nil(id), do: person
defp set_slack_id_to_pending(person) do
{:ok, person} = Repo.update(Person.slack_changes(person, "pending"))
person
end
defp handle_welcome_email(person, params) do
case Map.get(params, "welcome") do
"generic" -> handle_generic_welcome_email(person)
"guest" -> handle_guest_welcome_email(person)
_else -> false
end
end
defp handle_generic_welcome_email(person) do
person = Person.refresh_auth_token(person)
Email.community_welcome(person) |> Mailer.deliver_later
end
defp handle_guest_welcome_email(person) do
person = Person.refresh_auth_token(person)
Email.guest_welcome(person) |> Mailer.deliver_later
end
end
| 28.3625 | 92 | 0.633098 |
1caaa7dfd4c557992106d9898b013c937876e9d6 | 1,885 | ex | Elixir | lib/nautilus/adapters/cluster_adapter/cluster_manager.ex | CarloHFR/NautilusGateway | 26211948c5f9127e6662a90e41df5b43b2408372 | [
"MIT"
] | null | null | null | lib/nautilus/adapters/cluster_adapter/cluster_manager.ex | CarloHFR/NautilusGateway | 26211948c5f9127e6662a90e41df5b43b2408372 | [
"MIT"
] | null | null | null | lib/nautilus/adapters/cluster_adapter/cluster_manager.ex | CarloHFR/NautilusGateway | 26211948c5f9127e6662a90e41df5b43b2408372 | [
"MIT"
] | null | null | null | defmodule Nautilus.Adapters.Cluster.ClusterManager do
@moduledoc """
This module is responsible for managing all connections with remote gateways
"""
use GenServer
@get_hostname Application.get_env(:nautilus, :GetHostname)
@cluster_client Application.get_env(:nautilus, :ClusterClient)
@key_value_adapter Application.get_env(:nautilus, :KeyValueBucketInterface)
def start_link(_) do
GenServer.start(__MODULE__, %{socket: nil}, name: :ClusterManager)
end
def init(state) do
case Application.get_env(:nautilus, :new_network) do
false ->
remote_gateways = Application.get_env(:nautilus, :remote_gateways)
Enum.each(remote_gateways, fn gateway -> connect_to_gateway(elem(gateway, 0), elem(gateway, 1)) end)
{:ok, state}
_ ->
{:ok, state}
end
end
def prepare_gateway_list(gateway_list) do
{_, this_gateway} = @get_hostname.get_hostname()
{_, known_gateways} = @key_value_adapter.get_gateway_list()
gateway_list
|> String.replace("[", "")
|> String.replace("]", "")
|> String.split(", ")
|> Enum.reject(fn gateway -> gateway == this_gateway or gateway in known_gateways end)
|> Enum.each(fn gateway ->
gateway_address = String.split(gateway, ":")
{_, gateway_ip} = Enum.at(gateway_address, 0) |> String.to_charlist |> :inet.parse_address
gateway_port = Enum.at(gateway_address, 1) |> String.to_integer()
connect_to_gateway(gateway_ip, gateway_port)
end)
end
def connect_to_gateway(ip, port) do
_pid = spawn(@cluster_client, :start_link, [%{:ip => ip, :port => port, :discovery => true}])
{:ok, :connected}
end
end
| 33.660714 | 117 | 0.606366 |
1caab54edac5345aa3983f01902c55a84ad41847 | 2,070 | exs | Elixir | config/config.exs | akdilsiz/elixir-parasut | 755feb4d36b0fd1c0c4ba21eda976892c4da9b8b | [
"Apache-2.0"
] | null | null | null | config/config.exs | akdilsiz/elixir-parasut | 755feb4d36b0fd1c0c4ba21eda976892c4da9b8b | [
"Apache-2.0"
] | 1 | 2019-09-03T10:52:27.000Z | 2019-09-03T11:23:34.000Z | config/config.exs | akdilsiz/elixir-parasut | 755feb4d36b0fd1c0c4ba21eda976892c4da9b8b | [
"Apache-2.0"
] | null | null | null | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :parasut, Parasut,
redis: false,
ets: false,
redirect_url: "urn:ietf:wg:oauth:2.0:oob",
merchant_no: 158,
auth: %{
username: "",
password: ""
}
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# third-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :parasut, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:parasut, :key)
#
# You can also configure a third-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
import_config "#{Mix.env()}.exs"
| 35.689655 | 74 | 0.752657 |
1caabb87f119a76f403c8308fe08f17dce1b2091 | 54 | exs | Elixir | test/test_helper.exs | Ta-To/mix_test_observer | 0ca27bbf3ee5c547374fa29770e40f43341d4364 | [
"Apache-2.0"
] | 5 | 2019-11-15T10:44:36.000Z | 2021-05-15T21:14:49.000Z | test/test_helper.exs | software-mansion-labs/elixir-ibm-speech-to-text | 2d1dec2f429071bb30a0568af8fec24787b8cd57 | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | software-mansion-labs/elixir-ibm-speech-to-text | 2d1dec2f429071bb30a0568af8fec24787b8cd57 | [
"Apache-2.0"
] | null | null | null | ExUnit.configure(exclude: [:external])
ExUnit.start()
| 18 | 38 | 0.759259 |
1caad37b6b65a6983e12e99a4a376aa5b11a09ea | 3,083 | ex | Elixir | clients/api_gateway/lib/google_api/api_gateway/v1beta/model/apigateway_audit_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/api_gateway/lib/google_api/api_gateway/v1beta/model/apigateway_audit_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/api_gateway/lib/google_api/api_gateway/v1beta/model/apigateway_audit_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.APIGateway.V1beta.Model.ApigatewayAuditConfig do
@moduledoc """
Specifies the audit configuration for a service. The configuration determines which permission types are logged, and what identities, if any, are exempted from logging. An AuditConfig must have one or more AuditLogConfigs. If there are AuditConfigs for both `allServices` and a specific service, the union of the two AuditConfigs is used for that service: the log_types specified in each AuditConfig are enabled, and the exempted_members in each AuditLogConfig are exempted. Example Policy with multiple AuditConfigs: { "audit_configs": [ { "service": "allServices", "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" }, { "log_type": "ADMIN_READ" } ] }, { "service": "sampleservice.googleapis.com", "audit_log_configs": [ { "log_type": "DATA_READ" }, { "log_type": "DATA_WRITE", "exempted_members": [ "user:aliya@example.com" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also exempts jose@example.com from DATA_READ logging, and aliya@example.com from DATA_WRITE logging.
## Attributes
* `auditLogConfigs` (*type:* `list(GoogleApi.APIGateway.V1beta.Model.ApigatewayAuditLogConfig.t)`, *default:* `nil`) - The configuration for logging of each type of permission.
* `service` (*type:* `String.t`, *default:* `nil`) - Specifies a service that will be enabled for audit logging. For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. `allServices` is a special value that covers all services.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:auditLogConfigs =>
list(GoogleApi.APIGateway.V1beta.Model.ApigatewayAuditLogConfig.t()) | nil,
:service => String.t() | nil
}
field(:auditLogConfigs,
as: GoogleApi.APIGateway.V1beta.Model.ApigatewayAuditLogConfig,
type: :list
)
field(:service)
end
defimpl Poison.Decoder, for: GoogleApi.APIGateway.V1beta.Model.ApigatewayAuditConfig do
def decode(value, options) do
GoogleApi.APIGateway.V1beta.Model.ApigatewayAuditConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.APIGateway.V1beta.Model.ApigatewayAuditConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 56.054545 | 1,106 | 0.747973 |
1caae680f4375fc520c8ae63dca14b390e0e3d58 | 1,846 | ex | Elixir | lib/dobar_web/controllers/admin/place_image_controller.ex | ashkan18/dobar | 37381af2a56b2456cfe2a0a358169fd2764cd3f0 | [
"MIT"
] | null | null | null | lib/dobar_web/controllers/admin/place_image_controller.ex | ashkan18/dobar | 37381af2a56b2456cfe2a0a358169fd2764cd3f0 | [
"MIT"
] | 14 | 2019-09-02T18:00:07.000Z | 2021-09-02T00:49:54.000Z | lib/dobar_web/controllers/admin/place_image_controller.ex | ashkan18/dobar | 37381af2a56b2456cfe2a0a358169fd2764cd3f0 | [
"MIT"
] | 1 | 2016-07-27T14:40:55.000Z | 2016-07-27T14:40:55.000Z | defmodule DobarWeb.Admin.PlaceImageController do
use DobarWeb, :controller
alias Dobar.{Repo, PlaceImageUploader, Places, Places.PlaceImage}
def index(conn, %{"place_id" => place_id}) do
place = Places.get_place!(place_id) |> Repo.preload(:images)
render(conn, "index.html", place_images: place.images, place_id: place_id)
end
def new(conn, %{"place_id" => place_id}) do
changeset = Places.change_place_image(%PlaceImage{place_id: place_id})
render(conn, "new.html", changeset: changeset, place_id: place_id)
end
def create(conn, %{"place_id" => place_id, "place_image" => %{"image_file" => image_file}}) do
with place <- Places.get_place!(place_id),
{:ok, file} <- PlaceImageUploader.store({image_file, place}),
urls <- PlaceImageUploader.urls({file, place}),
{:ok, place_image} <-
Places.create_place_image(%{
urls: urls,
place_id: place.id,
uploader_id: Guardian.Plug.current_resource(conn).id
}) do
conn
|> put_flash(:info, "Place image created successfully.")
|> redirect(to: Routes.admin_place_place_image_path(conn, :index, place_image.place_id))
else
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "new.html", changeset: changeset, place_id: place_id)
{:error, _} ->
nil
end
end
def show(conn, %{"id" => id}) do
place_image = Places.get_place_image!(id)
render(conn, "show.html", place_image: place_image)
end
def delete(conn, %{"id" => id}) do
place_image = Places.get_place_image!(id)
{:ok, _place_image} = Places.delete_place_image(place_image)
conn
|> put_flash(:info, "Place image deleted successfully.")
|> redirect(to: Routes.admin_place_place_image_path(conn, :index, place_image.place_id))
end
end
| 35.5 | 96 | 0.656013 |
1caaf502e40912a4138c4cea31f11b3d2a76f074 | 6,830 | exs | Elixir | lib/mix/test/mix/tasks/compile.app_test.exs | RyanBard/elixir | 3e0f3b47cf26aa121470141b9a1aa55a366c066e | [
"Apache-2.0"
] | 2 | 2018-11-15T06:38:14.000Z | 2018-11-17T18:03:14.000Z | lib/mix/test/mix/tasks/compile.app_test.exs | RyanBard/elixir | 3e0f3b47cf26aa121470141b9a1aa55a366c066e | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/compile.app_test.exs | RyanBard/elixir | 3e0f3b47cf26aa121470141b9a1aa55a366c066e | [
"Apache-2.0"
] | null | null | null | Code.require_file("../../test_helper.exs", __DIR__)
defmodule Mix.Tasks.Compile.AppTest do
use MixTest.Case
defmodule CustomProject do
def project do
[
app: :custom_project,
version: "0.2.0",
description: "Some UTF-8 description (uma descrição em UTF-8)"
]
end
def application do
[maxT: :infinity, applications: [:example_app], extra_applications: [:logger]]
end
end
defmodule CustomDeps do
def project do
[app: :custom_deps, version: "0.2.0", deps: deps()]
end
def application do
[extra_applications: [:logger], included_applications: [:ok9]]
end
def deps do
[
{:ok1, path: "../ok"},
{:ok2, path: "../ok", only: :prod},
{:ok3, path: "../ok", only: :dev},
{:ok4, path: "../ok", runtime: true},
{:ok5, path: "../ok", runtime: false},
{:ok6, path: "../ok", optional: true},
{:ok7, path: "../ok", optional: false},
{:ok8, path: "../ok", app: false},
{:ok9, path: "../ok"}
]
end
end
defmodule InvalidProject do
def project do
[app: :invalid_project, version: "0.3.0"]
end
def application do
Process.get(:application)
end
end
defmodule InvalidVsnProject do
def project do
[app: :invalid_vsn_project, version: "0.3"]
end
end
test "generates .app file when changes happen" do
Mix.Project.push(MixTest.Case.Sample)
in_fixture("no_mixfile", fn ->
Mix.Tasks.Compile.Elixir.run([])
assert Mix.Tasks.Compile.App.run([]) == :ok
properties = parse_resource_file(:sample)
assert properties[:vsn] == '0.1.0'
assert properties[:modules] == [A, B]
assert properties[:applications] == [:kernel, :stdlib, :elixir]
assert Mix.Tasks.Compile.App.run([]) == :noop
end)
end
test "uses custom application settings" do
Mix.Project.push(CustomProject)
in_fixture("no_mixfile", fn ->
Mix.Tasks.Compile.Elixir.run([])
Mix.Tasks.Compile.App.run([])
properties = parse_resource_file(:custom_project)
assert properties[:vsn] == '0.2.0'
assert properties[:maxT] == :infinity
assert properties[:applications] == [:kernel, :stdlib, :elixir, :logger, :example_app]
assert properties[:description] == 'Some UTF-8 description (uma descrição em UTF-8)'
refute Keyword.has_key?(properties, :extra_applications)
end)
end
test "automatically infers applications" do
Mix.Project.push(CustomDeps)
in_fixture("no_mixfile", fn ->
Mix.Tasks.Compile.Elixir.run([])
Mix.Tasks.Compile.App.run([])
properties = parse_resource_file(:custom_deps)
assert properties[:applications] ==
[:kernel, :stdlib, :elixir, :logger, :ok1, :ok3, :ok4, :ok6, :ok7]
end)
end
test "application properties validation" do
Mix.Project.push(InvalidProject)
in_fixture("no_mixfile", fn ->
Process.put(:application, [:not_a_keyword, applications: []])
message = "Application configuration returned from application/0 should be a keyword list"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.Compile.App.run([])
end
Process.put(:application, modules: :invalid)
message = "Application modules (:modules) should be a list of atoms, got: :invalid"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.Compile.App.run([])
end
Process.put(:application, maxT: :invalid)
message = "Application maximum time (:maxT) is not an integer or :infinity, got: :invalid"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.Compile.App.run([])
end
Process.put(:application, registered: ["invalid"])
message =
"Application registered processes (:registered) should be a list of atoms, got: [\"invalid\"]"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.Compile.App.run([])
end
Process.put(:application, extra_applications: ["invalid"])
message =
"Application extra applications (:extra_applications) should be a list of atoms, got: [\"invalid\"]"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.Compile.App.run([])
end
Process.put(:application, included_applications: ["invalid"])
message =
"Application included applications (:included_applications) should be a list of atoms, got: [\"invalid\"]"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.Compile.App.run([])
end
Process.put(:application, applications: ["invalid"])
message =
"Application applications (:applications) should be a list of atoms, got: [\"invalid\"]"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.Compile.App.run([])
end
Process.put(:application, applications: nil)
message = "Application applications (:applications) should be a list of atoms, got: nil"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.Compile.App.run([])
end
Process.put(:application, env: [:invalid])
message = "Application environment (:env) should be a keyword list, got: [:invalid]"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.Compile.App.run([])
end
Process.put(:application, mod: {Mod})
message =
"Application callback module (:mod) should be either [] or {module, start_args}, got: {Mod}"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.Compile.App.run([])
end
Process.put(:application, start_phases: [:invalid])
message =
"Application start phases (:start_phases) should be a keyword list, got: [:invalid]"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.Compile.App.run([])
end
end)
end
test ".app contains description and registered (as required by systools)" do
Mix.Project.push(MixTest.Case.Sample)
in_fixture("no_mixfile", fn ->
Mix.Tasks.Compile.Elixir.run([])
assert Mix.Tasks.Compile.App.run([]) == :ok
properties = parse_resource_file(:sample)
assert properties[:registered] == []
assert properties[:description] == 'sample'
assert properties[:applications] == [:kernel, :stdlib, :elixir]
assert Mix.Tasks.Compile.App.run([]) == :noop
end)
end
test "raise on invalid version" do
Mix.Project.push(InvalidVsnProject)
in_fixture("no_mixfile", fn ->
message = "Expected :version to be a SemVer version, got: \"0.3\""
assert_raise Mix.Error, message, fn ->
Mix.Tasks.Compile.App.run([])
end
end)
end
defp parse_resource_file(app) do
{:ok, [term]} = :file.consult("_build/dev/lib/#{app}/ebin/#{app}.app")
{:application, ^app, properties} = term
properties
end
end
| 29.06383 | 114 | 0.623719 |
1caaf9391fc9a2bf0cce9f03d34e528a01cae6a1 | 3,111 | ex | Elixir | lib/elixir_core/testing/partial_object_check/value_constraint.ex | noizu/Context | bd3486071a5c36f3c66087ae2488cfb7e26cdf81 | [
"MIT"
] | null | null | null | lib/elixir_core/testing/partial_object_check/value_constraint.ex | noizu/Context | bd3486071a5c36f3c66087ae2488cfb7e26cdf81 | [
"MIT"
] | null | null | null | lib/elixir_core/testing/partial_object_check/value_constraint.ex | noizu/Context | bd3486071a5c36f3c66087ae2488cfb7e26cdf81 | [
"MIT"
] | 2 | 2018-03-05T11:20:31.000Z | 2021-12-01T12:22:22.000Z | #-------------------------------------------------------------------------------
# Author: Keith Brings
# Copyright (C) 2018 Noizu Labs, Inc. All rights reserved.
#-------------------------------------------------------------------------------
defmodule Noizu.ElixirCore.PartialObjectCheck.ValueConstraint do
@type t :: %__MODULE__{
assert: :met | :unmet | :pending | :not_applicable,
constraint: nil | {:value, any} | list | fun,
}
defstruct [
assert: :pending,
constraint: nil
]
def perform_check(constraint, sut) do
case constraint do
nil -> {:not_applicable, constraint}
v = %Noizu.ElixirCore.PartialObjectCheck{} ->
uv = Noizu.ElixirCore.PartialObjectCheck.check(v, sut)
{(uv && uv.assert || :unmet), uv}
{:value, v} ->
{(v == sut && :met || :unmet), constraint}
{:exact, v} ->
{(v === sut && :met || :unmet), constraint}
v when is_function(v, 1) ->
c = case v.(sut) do
true -> :met
false -> :unmet
nil -> :unmet
v -> v
end
{c, constraint}
v when is_list(v) ->
Enum.reduce(v, {:not_applicable, []}, fn(c, {c_acc, v_acc}) ->
{a, c} = perform_check(c, sut)
ua = cond do
c_acc == :unmet || a == :unmet -> :unmet
c_acc == :pending || a == :pending -> :pending
c_acc == :met || a == :met -> :met
true -> c_acc
end
{ua, v_acc ++ [c]}
end)
_ ->
{:unmet, constraint}
end
end
def check(nil, _sut), do: nil
def check(%__MODULE__{} = this, sut) do
{a, c} = perform_check(this.constraint, sut)
%__MODULE__{this| assert: a, constraint: c}
end
end
if Application.get_env(:noizu_scaffolding, :inspect_partial_object, true) do
#-----------------------------------------------------------------------------
# Inspect Protocol
#-----------------------------------------------------------------------------
defimpl Inspect, for: Noizu.ElixirCore.PartialObjectCheck.ValueConstraint do
import Inspect.Algebra
@dont_expand MapSet.new([:met, :pending, :not_applicable])
def inspect(entity, opts) do
{seperator, end_seperator} = cond do
opts.pretty && (opts.limit == :infinity || opts.limit > 200) -> {"#Noizu.ElixirCore.PartialObjectCheck.ValueConstraint<\n", "\n>"}
opts.pretty -> {"#ValueConstraint<\n", "\n>"}
(opts.limit == :infinity || opts.limit > 200) -> {"#Noizu.ElixirCore.PartialObjectCheck.ValueConstraint<", ">"}
true -> {"#ValueConstraint<", ">"}
end
obj = cond do
opts.limit == :infinity -> entity |> Map.from_struct()
opts.limit > 100 -> entity |> Map.from_struct()
true ->
cond do
MapSet.member?(@dont_expand, entity.assert) -> %{assert: entity.assert}
true -> entity |> Map.from_struct()
end
end
concat(["#{seperator}", to_doc(obj, opts), "#{end_seperator}"])
end # end inspect/2
end # end defimpl
end | 34.186813 | 138 | 0.497589 |
1cab0a35b36f70abd93160740bcc81a0ea08129a | 1,273 | ex | Elixir | clients/notebooks/lib/google_api/notebooks/v1/model/reset_instance_request.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/notebooks/lib/google_api/notebooks/v1/model/reset_instance_request.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/notebooks/lib/google_api/notebooks/v1/model/reset_instance_request.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Notebooks.V1.Model.ResetInstanceRequest do
@moduledoc """
Request for resetting a notebook instance
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.Notebooks.V1.Model.ResetInstanceRequest do
def decode(value, options) do
GoogleApi.Notebooks.V1.Model.ResetInstanceRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Notebooks.V1.Model.ResetInstanceRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.309524 | 81 | 0.765122 |
1cab0ba0240ce6b516028b06d4fa38a6dd53db1f | 386 | ex | Elixir | lib/preview/package/supervisor.ex | leandrocp/preview | 9dd3a9bae4385dc4935e76f63328f70b9d78fe4d | [
"Apache-2.0"
] | 26 | 2021-01-25T20:30:46.000Z | 2021-12-16T08:42:35.000Z | lib/preview/package/supervisor.ex | leandrocp/preview | 9dd3a9bae4385dc4935e76f63328f70b9d78fe4d | [
"Apache-2.0"
] | 17 | 2021-01-25T18:45:43.000Z | 2021-07-23T15:15:41.000Z | lib/preview/package/supervisor.ex | leandrocp/preview | 9dd3a9bae4385dc4935e76f63328f70b9d78fe4d | [
"Apache-2.0"
] | 4 | 2021-01-25T21:32:28.000Z | 2021-07-07T12:36:19.000Z | defmodule Preview.Package.Supervisor do
use Supervisor
def start_link(_opts) do
Supervisor.start_link(__MODULE__, [], [])
end
@impl true
def init(_opts) do
children = [{Preview.Package.Store, []}, {updater_module(), []}]
Supervisor.init(children, strategy: :one_for_one)
end
defp updater_module, do: Application.get_env(:preview, :package_updater_impl)
end
| 24.125 | 79 | 0.715026 |
1cab1ee657d60dd2238396d9caee9795fd1487f5 | 652 | ex | Elixir | lib/mix/tasks/up.ex | kianmeng/version_tasks | 9b07113ce5155bcadae47c49a0ecaec22b1d7c68 | [
"MIT"
] | 18 | 2017-07-09T23:47:38.000Z | 2021-09-30T09:24:37.000Z | lib/mix/tasks/up.ex | kianmeng/version_tasks | 9b07113ce5155bcadae47c49a0ecaec22b1d7c68 | [
"MIT"
] | 1 | 2022-01-29T08:48:10.000Z | 2022-01-29T08:48:10.000Z | lib/mix/tasks/up.ex | kianmeng/version_tasks | 9b07113ce5155bcadae47c49a0ecaec22b1d7c68 | [
"MIT"
] | 2 | 2019-09-24T11:09:28.000Z | 2021-11-02T00:40:05.000Z | defmodule Mix.Tasks.Version.Up do
use Mix.Task
alias Mix.Tasks.Version
@shortdoc "Inc to next version, and commit changes to git"
def run(mode) do
next_version = Version.Inc.run(mode)
updated_version = Version.Current.calc(mode)
if next_version == updated_version do
IO.puts("Committing updates to git")
repo = Git.new(".")
{:ok, _} = Git.add(repo, ["mix.exs", "README.md"])
{:ok, output} = Git.commit(repo, ["-m", "v#{updated_version}"])
IO.puts(output)
else
IO.puts("Unable to update version, stopping task. Sorry we couldn't automate better :-(")
end
updated_version
end
end
| 27.166667 | 95 | 0.647239 |
1cab25f047124012bef5ea7f57cc9a6a698a095b | 3,166 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/report_criteria.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/report_criteria.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/report_criteria.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V33.Model.ReportCriteria do
@moduledoc """
The report criteria for a report of type "STANDARD".
## Attributes
* `activities` (*type:* `GoogleApi.DFAReporting.V33.Model.Activities.t`, *default:* `nil`) - Activity group.
* `customRichMediaEvents` (*type:* `GoogleApi.DFAReporting.V33.Model.CustomRichMediaEvents.t`, *default:* `nil`) - Custom Rich Media Events group.
* `dateRange` (*type:* `GoogleApi.DFAReporting.V33.Model.DateRange.t`, *default:* `nil`) - The date range for which this report should be run.
* `dimensionFilters` (*type:* `list(GoogleApi.DFAReporting.V33.Model.DimensionValue.t)`, *default:* `nil`) - The list of filters on which dimensions are filtered. Filters for different dimensions are ANDed, filters for the same dimension are grouped together and ORed.
* `dimensions` (*type:* `list(GoogleApi.DFAReporting.V33.Model.SortedDimension.t)`, *default:* `nil`) - The list of standard dimensions the report should include.
* `metricNames` (*type:* `list(String.t)`, *default:* `nil`) - The list of names of metrics the report should include.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:activities => GoogleApi.DFAReporting.V33.Model.Activities.t(),
:customRichMediaEvents => GoogleApi.DFAReporting.V33.Model.CustomRichMediaEvents.t(),
:dateRange => GoogleApi.DFAReporting.V33.Model.DateRange.t(),
:dimensionFilters => list(GoogleApi.DFAReporting.V33.Model.DimensionValue.t()),
:dimensions => list(GoogleApi.DFAReporting.V33.Model.SortedDimension.t()),
:metricNames => list(String.t())
}
field(:activities, as: GoogleApi.DFAReporting.V33.Model.Activities)
field(:customRichMediaEvents, as: GoogleApi.DFAReporting.V33.Model.CustomRichMediaEvents)
field(:dateRange, as: GoogleApi.DFAReporting.V33.Model.DateRange)
field(:dimensionFilters, as: GoogleApi.DFAReporting.V33.Model.DimensionValue, type: :list)
field(:dimensions, as: GoogleApi.DFAReporting.V33.Model.SortedDimension, type: :list)
field(:metricNames, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V33.Model.ReportCriteria do
def decode(value, options) do
GoogleApi.DFAReporting.V33.Model.ReportCriteria.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V33.Model.ReportCriteria do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 51.064516 | 272 | 0.740998 |
1cab26a2f35cfd0f9d4f22f346ffd37f3041cc5b | 4,149 | ex | Elixir | lib/manga_providers/mangahost.ex | yashin5/mangaex | 2303c49a62527f617f17620931db3371f2b7f4bd | [
"MIT"
] | 2 | 2021-04-22T18:22:48.000Z | 2021-06-10T10:23:08.000Z | lib/manga_providers/mangahost.ex | yashin5/mangaex | 2303c49a62527f617f17620931db3371f2b7f4bd | [
"MIT"
] | null | null | null | lib/manga_providers/mangahost.ex | yashin5/mangaex | 2303c49a62527f617f17620931db3371f2b7f4bd | [
"MIT"
] | null | null | null | defmodule MangaEx.MangaProviders.Mangahost do
@moduledoc """
This module is responsible to find mangas, get chapters,
get pages and download chapter.
"""
use Tesla
alias MangaEx.Actions.Download
alias MangaEx.Actions.Find
alias MangaEx.MangaProviders.ProvidersBehaviour
alias MangaEx.Utils.ParserUtils
alias MangaEx.Util.DownloadUtils
require Logger
plug(Tesla.Middleware.Headers, [
{"User-Agent", "Mozilla/5.0 (X11; Linux x86_64; rv:76.0) Gecko/20100101 Firefox/76.0"}
])
plug(Tesla.Middleware.JSON)
@behaviour ProvidersBehaviour
@latest_url "mangahost4"
@mangahost_url "https://" <> @latest_url <> ".com/"
@find_url "find/"
@impl true
def download_pages(pages_url, manga_name, chapter, sleep) do
Download.download_pages(pages_url, manga_name, chapter, sleep, [])
end
@impl true
def find_mangas(manga_name) do
manga_name_in_find_format =
manga_name
|> String.downcase()
|> String.replace(" ", "+")
url =
@mangahost_url
|> DownloadUtils.generate_find_url(
@find_url,
manga_name_in_find_format
)
manga_name
|> Find.find_mangas(url)
|> get_name_and_url()
end
@impl true
def get_chapters(_, attempt \\ 0)
def get_chapters(manga_url, attempt) when attempt <= 10 do
case get(manga_url) do
{:ok, %{body: body, status: status}} when status in 200..299 ->
body
|> get_chapters_url(manga_url, attempt)
_response ->
:timer.sleep(:timer.seconds(1))
get_chapters(manga_url, attempt + 1)
end
end
@impl true
def get_chapters(manga_url, _) do
Logger.error("Error getting #{manga_url}")
:ok
end
@impl true
def get_pages(_, _, attempt \\ 0)
def get_pages(chapter_url, manga_name, attempt) when attempt <= 10 do
case get(chapter_url) do
{:ok, %{body: body, status: status}} when status in 200..299 ->
do_get_pages(body, manga_name, chapter_url, attempt)
_response ->
:timer.sleep(:timer.seconds(1))
get_pages(chapter_url, manga_name, attempt + 1)
end
end
def get_pages(chapter_url, manga_name, _) do
Logger.error("Error getting #{manga_name} in #{chapter_url}")
:ok
end
defp do_get_pages(body, manga_name, chapter_url, attempt) do
DownloadUtils.verify_path_and_mkdir(manga_name)
body
|> Floki.parse_document()
|> elem(1)
|> Floki.find(".image-content")
|> Floki.find("img")
|> Enum.map(fn element ->
element
|> Floki.attribute("src")
|> List.first()
|> URI.encode()
end)
|> Enum.with_index()
|> case do
pages when pages == [] and attempt < 10 ->
:timer.sleep(:timer.seconds(1))
get_pages(chapter_url, manga_name, attempt + 1)
[] ->
{:error, :pages_not_found}
pages ->
pages
end
end
defp get_name_and_url(<<body::bitstring>>) do
body
|> Floki.parse_document()
|> elem(1)
|> Floki.find(".entry-title")
|> Floki.find("a")
|> Enum.map(fn element ->
{
element |> Floki.attribute("title") |> List.last(),
element |> Floki.attribute("href") |> List.last()
}
end)
|> Find.handle_get_name_and_url()
end
defp get_name_and_url(error), do: error
defp get_chapters_url(body, manga_url, attempt) do
body
|> Floki.parse_document()
|> elem(1)
|> Floki.find(".chapters")
|> Floki.find(".tags")
|> Floki.find("a")
|> Enum.map(fn element ->
chapter_url =
element
|> Floki.attribute("href")
|> List.last()
|> URI.encode()
chapter_number =
chapter_url
|> String.split("/")
|> List.last()
{chapter_url, chapter_number}
end)
|> case do
chapters when chapters == [] and attempt < 10 ->
get_chapters(manga_url, attempt + 1)
chapters when chapters == [] and attempt > 10 ->
{:error, :manga_not_found}
chapters ->
ParserUtils.generate_chapter_lists(chapters)
end
end
def generate_chapter_url(manga_url, chapter), do: "#{manga_url}/#{chapter}"
end
| 23.982659 | 90 | 0.620872 |
1cab514bb7953c6282ff4df51ace48b6c6c7d547 | 3,617 | exs | Elixir | mix.exs | patrickbiermann/pow | ebc2ac7d6e15961dac4be38091ff75dae0d26554 | [
"MIT"
] | null | null | null | mix.exs | patrickbiermann/pow | ebc2ac7d6e15961dac4be38091ff75dae0d26554 | [
"MIT"
] | null | null | null | mix.exs | patrickbiermann/pow | ebc2ac7d6e15961dac4be38091ff75dae0d26554 | [
"MIT"
] | null | null | null | defmodule Pow.MixProject do
use Mix.Project
@version "1.0.24"
def project do
[
app: :pow,
version: @version,
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
compilers: [:phoenix] ++ Mix.compilers(),
deps: deps(),
xref: [exclude: [:mnesia]],
# Hex
description: "Robust user authentication solution",
package: package(),
# Docs
name: "Pow",
docs: docs()
]
end
def application do
[
extra_applications: [:logger],
mod: {Pow.Application, []}
]
end
defp deps do
[
{:ecto, "~> 2.2 or ~> 3.0"},
{:phoenix, ">= 1.3.0 and < 1.6.0"},
{:phoenix_html, ">= 2.0.0 and <= 3.0.0"},
{:plug, ">= 1.5.0 and < 2.0.0", optional: true},
{:phoenix_ecto, "~> 4.2", only: [:dev, :test]},
{:credo, "~> 1.5", only: [:dev, :test]},
{:jason, "~> 1.2", only: [:dev, :test]}, # Credo requires jason to exist also in :dev
{:ex_doc, "~> 0.23", only: :dev},
{:ecto_sql, "~> 3.5", only: [:test]},
{:plug_cowboy, "~> 2.4", only: [:test]},
{:postgrex, "~> 0.15", only: [:test]}
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp package do
[
maintainers: ["Dan Shultzer"],
licenses: ["MIT"],
links: %{github: "https://github.com/danschultzer/pow"},
files: ~w(lib LICENSE mix.exs README.md)
]
end
defp docs do
[
markdown_processor: ExDoc.Pow.Markdown,
source_ref: "v#{@version}",
main: "README",
canonical: "http://hexdocs.pm/pow",
source_url: "https://github.com/danschultzer/pow",
logo: "assets/logo.svg",
assets: "assets",
extras: [
"README.md": [filename: "README"],
"CONTRIBUTING.md": [filename: "CONTRIBUTING"],
"CHANGELOG.md": [filename: "CHANGELOG"],
"guides/why_pow.md": [],
"guides/production_checklist.md": [],
"guides/security_practices.md": [],
"guides/coherence_migration.md": [],
"guides/configuring_mailer.md": [],
"guides/user_roles.md": [],
"guides/lock_users.md": [],
"guides/custom_controllers.md": [],
"guides/disable_registration.md": [],
"guides/redis_cache_store_backend.md": [],
"guides/umbrella_project.md": [],
"guides/multitenancy.md": [],
"guides/sync_user.md": [],
"guides/api.md": [],
"lib/extensions/email_confirmation/README.md": [filename: "pow_email_confirmation"],
"lib/extensions/invitation/README.md": [filename: "pow_invitation"],
"lib/extensions/persistent_session/README.md": [filename: "pow_persistent_session"],
"lib/extensions/reset_password/README.md": [filename: "pow_reset_password"]
],
groups_for_modules: [
Plug: ~r/^Pow.Plug/,
Ecto: ~r/^Pow.Ecto/,
Phoenix: ~r/^Pow.Phoenix/,
"Plug extension": ~r/^Pow.Extension.Plug/,
"Ecto extension": ~r/^Pow.Extension.Ecto/,
"Phoenix extension": ~r/^Pow.Extension.Phoenix/,
"Store handling": ~r/^Pow.Store/,
"Mix helpers": ~r/^Mix.Pow/,
"PowEmailConfirmation": ~r/^PowEmailConfirmation/,
"PowPersistentSession": ~r/^PowPersistentSession/,
"PowResetPassword": ~r/^PowResetPassword/,
"PowInvitation": ~r/^PowInvitation/
],
groups_for_extras: [
Extensions: Path.wildcard("lib/extensions/*/README.md"),
Guides: Path.wildcard("guides/*.md")
]
]
end
end
| 30.652542 | 92 | 0.560133 |
1cab6120851a70fb5f628fdcba561ba8f3ef8363 | 158 | ex | Elixir | testData/org/elixir_lang/parser_definition/at_bracket_operation_parsing_test_case/UnknownBaseWholeNumber.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/at_bracket_operation_parsing_test_case/UnknownBaseWholeNumber.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/at_bracket_operation_parsing_test_case/UnknownBaseWholeNumber.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | @0z1[key: value]
@0z1 [key: value]
@0z1[()]
@0z1 [()]
@0z1[matched_expression]
@0z1 [matched_expression]
@0z1[matched_expression,]
@0z1 [matched_expression,]
| 17.555556 | 26 | 0.708861 |
1cab7868e4f5b4cdde7a9f89929f9f1510edfffc | 18,017 | exs | Elixir | lib/elixir/test/elixir/kernel_test.exs | tsloughter/elixir | 44a9f505c14c58878010cb07349802f99ca225ac | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel_test.exs | tsloughter/elixir | 44a9f505c14c58878010cb07349802f99ca225ac | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel_test.exs | tsloughter/elixir | 44a9f505c14c58878010cb07349802f99ca225ac | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule KernelTest do
use ExUnit.Case, async: true
doctest Kernel
test "=~/2" do
assert ("abcd" =~ ~r/c(d)/) == true
assert ("abcd" =~ ~r/e/) == false
assert ("abcd" =~ ~R/c(d)/) == true
assert ("abcd" =~ ~R/e/) == false
string = "^ab+cd*$"
assert (string =~ "ab+") == true
assert (string =~ "bb") == false
assert ("abcd" =~ ~r//) == true
assert ("abcd" =~ ~R//) == true
assert ("abcd" =~ "") == true
assert ("" =~ ~r//) == true
assert ("" =~ ~R//) == true
assert ("" =~ "") == true
assert ("" =~ "abcd") == false
assert ("" =~ ~r/abcd/) == false
assert ("" =~ ~R/abcd/) == false
assert_raise FunctionClauseError, "no function clause matching in Kernel.=~/2", fn ->
1234 =~ "hello"
end
assert_raise FunctionClauseError, "no function clause matching in Kernel.=~/2", fn ->
1234 =~ ~r"hello"
end
assert_raise FunctionClauseError, "no function clause matching in Kernel.=~/2", fn ->
1234 =~ ~R"hello"
end
assert_raise FunctionClauseError, "no function clause matching in Kernel.=~/2", fn ->
~r"hello" =~ "hello"
end
assert_raise FunctionClauseError, "no function clause matching in Kernel.=~/2", fn ->
~r"hello" =~ ~r"hello"
end
assert_raise FunctionClauseError, "no function clause matching in Kernel.=~/2", fn ->
:abcd =~ ~r//
end
assert_raise FunctionClauseError, "no function clause matching in Kernel.=~/2", fn ->
:abcd =~ ""
end
assert_raise FunctionClauseError, "no function clause matching in Regex.match?/2", fn ->
"abcd" =~ nil
end
assert_raise FunctionClauseError, "no function clause matching in Regex.match?/2", fn ->
"abcd" =~ :abcd
end
end
test "^" do
x = List.first([1])
assert_raise MatchError, fn ->
{x, ^x} = {2, 2}
x
end
end
test "match?/2" do
assert match?(_, List.first(1)) == true
assert binding() == []
a = List.first([0])
assert match?(b when b > a, 1) == true
assert binding() == [a: 0]
assert match?(b when b > a, -1) == false
assert binding() == [a: 0]
end
test "in/2" do
assert 2 in [1, 2, 3]
assert 2 in 1..3
refute 4 in [1, 2, 3]
refute 4 in 1..3
list = [1, 2, 3]
assert 2 in list
refute 4 in list
end
@at_list [4, 5]
@at_range 6..8
def fun_in(x) when x in [0], do: :list
def fun_in(x) when x in 1..3, do: :range
def fun_in(x) when x in @at_list, do: :at_list
def fun_in(x) when x in @at_range, do: :at_range
def fun_in(_), do: :none
test "in/2 in function guard" do
assert fun_in(0) == :list
assert fun_in(1) == :range
assert fun_in(2) == :range
assert fun_in(3) == :range
assert fun_in(5) == :at_list
assert fun_in(6) == :at_range
assert fun_in(7) == :at_range
assert fun_in(8) == :at_range
assert fun_in(0.0) == :none
assert fun_in(1.0) == :none
assert fun_in(2.0) == :none
assert fun_in(3.0) == :none
assert fun_in(6.0) == :none
assert fun_in(7.0) == :none
assert fun_in(8.0) == :none
end
def fun_in(x, y, z) when x in y..z, do: true
def fun_in(_x, _y, _z), do: false
test "in/2 in dynamic function guard" do
assert fun_in(1, 1, 3)
assert fun_in(2, 1, 3)
assert fun_in(3, 1, 3)
assert fun_in(1, 3, 1)
assert fun_in(2, 3, 1)
assert fun_in(3, 3, 1)
refute fun_in(0, 1, 3)
refute fun_in(4, 1, 3)
refute fun_in(0, 3, 1)
refute fun_in(4, 3, 1)
refute fun_in(2, 1.0, 3)
refute fun_in(2, 1, 3.0)
refute fun_in(2.0, 1, 3)
end
defmacrop case_in(x, y) do
quote do
case 0 do
_ when unquote(x) in unquote(y) -> true
_ -> false
end
end
end
test "in/2 in case guard" do
assert case_in(1, [1, 2, 3]) == true
assert case_in(1, 1..3) == true
assert case_in(2, 1..3) == true
assert case_in(3, 1..3) == true
assert case_in(-3, -1..-3) == true
end
test "in/2 in module body" do
defmodule In do
@foo [:a, :b]
true = :a in @foo
end
end
@bitstring <<"foo", 16::4>>
test "bitstring attribute" do
assert @bitstring == <<"foo", 16::4>>
end
test "paren as nil" do
assert is_nil(()) == true
assert (_ = (); ();) == nil
assert [ 1, (), 3 ] == [1, nil, 3 ]
assert [do: ()] == [do: nil]
assert {1, (), 3} == {1, nil, 3}
assert (Kernel.&& nil, ()) == nil
assert (Kernel.&& nil, ()) == nil
assert (() && ()) == nil
assert (if(() && ()) do
:ok
else
:error
end) == :error
end
test "__info__(:macros)" do
assert {:in, 2} in Kernel.__info__(:macros)
end
test "__info__(:functions)" do
refute {:__info__, 1} in Kernel.__info__(:functions)
end
test "__info__(others)" do
assert Kernel.__info__(:module) == Kernel
assert is_list Kernel.__info__(:compile)
assert is_list Kernel.__info__(:attributes)
assert is_list Kernel.__info__(:exports)
end
def exported?, do: not_exported?
defp not_exported?, do: true
test "function_exported?/3" do
assert function_exported?(__MODULE__, :exported?, 0)
refute function_exported?(__MODULE__, :not_exported?, 0)
end
test "macro_exported?/3" do
assert macro_exported?(Kernel, :in, 2) == true
assert macro_exported?(Kernel, :def, 1) == true
assert macro_exported?(Kernel, :def, 2) == true
assert macro_exported?(Kernel, :def, 3) == false
assert macro_exported?(Kernel, :no_such_macro, 2) == false
end
test "apply/3 and apply/2" do
assert apply(Enum, :reverse, [[1|[2, 3]]]) == [3, 2, 1]
assert apply(fn x -> x * 2 end, [2]) == 4
end
test "binding/0 and binding/1" do
x = 1
assert binding() == [x: 1]
x = 2
assert binding() == [x: 2]
y = 3
assert binding() == [x: 2, y: 3]
var!(x, :foo) = 4
assert binding() == [x: 2, y: 3]
assert binding(:foo) == [x: 4]
end
test "binding/0 doesn't warn on underscored vars" do
_x = 1
assert binding() == [_x: 1]
end
defmodule User do
assert is_map defstruct name: "john"
end
defmodule UserTuple do
def __struct__({ UserTuple, :ok }) do
%User{}
end
end
test "struct/1 and struct/2" do
assert struct(User) == %User{name: "john"}
user = struct(User, name: "meg")
assert user == %User{name: "meg"}
assert struct(user, unknown: "key") == user
assert struct(user, %{name: "john"}) == %User{name: "john"}
assert struct(user, name: "other", __struct__: Post) == %User{name: "other"}
end
test "struct!/1 and struct!/2" do
assert struct!(User) == %User{name: "john"}
user = struct!(User, name: "meg")
assert user == %User{name: "meg"}
assert_raise KeyError, fn ->
struct!(user, unknown: "key")
end
assert struct!(user, %{name: "john"}) == %User{name: "john"}
assert struct!(user, name: "other", __struct__: Post) == %User{name: "other"}
end
defdelegate my_flatten(list), to: List, as: :flatten
dynamic = :dynamic_flatten
defdelegate unquote(dynamic)(list), to: List, as: :flatten
test "defdelegate/2" do
assert my_flatten([[1]]) == [1]
end
test "defdelegate/2 with unquote" do
assert dynamic_flatten([[1]]) == [1]
end
test "defdelegate/2 raises with non-variable arguments" do
msg = "defdelegate/2 only accepts function parameters, got: 1"
assert_raise ArgumentError, msg, fn -> Code.eval_string("""
defmodule IntDelegate do
defdelegate foo(1), to: List
end
""", [], __ENV__)
end
assert_raise ArgumentError, msg, fn -> Code.eval_string("""
defmodule IntOptionDelegate do
defdelegate foo(1 \\\\ 1), to: List
end
""", [], __ENV__)
end
end
defdelegate my_reverse(list \\ []), to: :lists, as: :reverse
defdelegate my_get(map \\ %{}, key, default \\ ""), to: Map, as: :get
test "defdelegate/2 accepts variable with optional arguments" do
assert my_reverse() == []
assert my_reverse([1, 2, 3]) == [3, 2, 1]
assert my_get("foo") == ""
assert my_get(%{}, "foo") == ""
assert my_get(%{"foo" => "bar"}, "foo") == "bar"
assert my_get(%{}, "foo", "not_found") == "not_found"
end
test "get_in/2" do
users = %{"john" => %{age: 27}, "meg" => %{age: 23}}
assert get_in(users, ["john", :age]) == 27
assert get_in(users, ["dave", :age]) == nil
assert get_in(nil, ["john", :age]) == nil
map = %{"fruits" => ["banana", "apple", "orange"]}
assert get_in(map, ["fruits", by_index(0)]) == "banana"
assert get_in(map, ["fruits", by_index(3)]) == nil
assert get_in(map, ["unknown", by_index(3)]) == :oops
assert_raise FunctionClauseError, fn ->
get_in(users, [])
end
end
test "put_in/3" do
users = %{"john" => %{age: 27}, "meg" => %{age: 23}}
assert put_in(users, ["john", :age], 28) ==
%{"john" => %{age: 28}, "meg" => %{age: 23}}
assert_raise FunctionClauseError, fn ->
put_in(users, [], %{})
end
assert_raise ArgumentError, "could not put/update key \"john\" on a nil value", fn ->
put_in(nil, ["john", :age], 28)
end
end
test "put_in/2" do
users = %{"john" => %{age: 27}, "meg" => %{age: 23}}
assert put_in(users["john"][:age], 28) ==
%{"john" => %{age: 28}, "meg" => %{age: 23}}
assert put_in(users["john"].age, 28) ==
%{"john" => %{age: 28}, "meg" => %{age: 23}}
assert_raise BadMapError, fn ->
put_in(users["dave"].age, 19)
end
assert_raise KeyError, fn ->
put_in(users["meg"].unknown, "value")
end
end
test "update_in/3" do
users = %{"john" => %{age: 27}, "meg" => %{age: 23}}
assert update_in(users, ["john", :age], &(&1 + 1)) ==
%{"john" => %{age: 28}, "meg" => %{age: 23}}
assert_raise FunctionClauseError, fn ->
update_in(users, [], fn _ -> %{} end)
end
assert_raise ArgumentError, "could not put/update key \"john\" on a nil value", fn ->
update_in(nil, ["john", :age], fn _ -> %{} end)
end
end
test "update_in/2" do
users = %{"john" => %{age: 27}, "meg" => %{age: 23}}
assert update_in(users["john"][:age], &(&1 + 1)) ==
%{"john" => %{age: 28}, "meg" => %{age: 23}}
assert update_in(users["john"].age, &(&1 + 1)) ==
%{"john" => %{age: 28}, "meg" => %{age: 23}}
assert_raise BadMapError, fn ->
update_in(users["dave"].age, &(&1 + 1))
end
assert_raise KeyError, fn ->
put_in(users["meg"].unknown, &(&1 + 1))
end
end
test "get_and_update_in/3" do
users = %{"john" => %{age: 27}, "meg" => %{age: 23}}
assert get_and_update_in(users, ["john", :age], &{&1, &1 + 1}) ==
{27, %{"john" => %{age: 28}, "meg" => %{age: 23}}}
map = %{"fruits" => ["banana", "apple", "orange"]}
assert get_and_update_in(map, ["fruits", by_index(0)], &{&1, String.reverse(&1)}) ==
{"banana", %{"fruits" => ["ananab", "apple", "orange"]}}
assert get_and_update_in(map, ["fruits", by_index(3)], &{&1, &1}) ==
{nil, %{"fruits" => ["banana", "apple", "orange"]}}
assert get_and_update_in(map, ["unknown", by_index(3)], &{&1, []}) ==
{:oops, %{"fruits" => ["banana", "apple", "orange"], "unknown" => []}}
assert_raise FunctionClauseError, fn ->
update_in(users, [], fn _ -> %{} end)
end
end
test "get_and_update_in/2" do
users = %{"john" => %{age: 27}, "meg" => %{age: 23}}
assert get_and_update_in(users["john"].age, &{&1, &1 + 1}) ==
{27, %{"john" => %{age: 28}, "meg" => %{age: 23}}}
assert_raise ArgumentError, "could not put/update key \"john\" on a nil value", fn ->
get_and_update_in(nil["john"][:age], fn nil -> {:ok, 28} end)
end
assert_raise BadMapError, fn ->
get_and_update_in(users["dave"].age, &{&1, &1 + 1})
end
assert_raise KeyError, fn ->
get_and_update_in(users["meg"].unknown, &{&1, &1 + 1})
end
end
test "paths" do
map = empty_map()
assert put_in(map[:foo], "bar") == %{foo: "bar"}
assert put_in(empty_map()[:foo], "bar") == %{foo: "bar"}
assert put_in(KernelTest.empty_map()[:foo], "bar") == %{foo: "bar"}
assert put_in(__MODULE__.empty_map()[:foo], "bar") == %{foo: "bar"}
assert_raise ArgumentError, ~r"access at least one element,", fn ->
Code.eval_quoted(quote(do: put_in(map, "bar")), [])
end
assert_raise ArgumentError, ~r"must start with a variable, local or remote call", fn ->
Code.eval_quoted(quote(do: put_in(map.foo(1, 2)[:bar], "baz")), [])
end
end
def empty_map, do: %{}
def by_index(index) do
fn
_, nil, next ->
next.(:oops)
:get, data, next ->
next.(Enum.at(data, index))
:get_and_update, data, next ->
{get, update} = next.(Enum.at(data, index))
{get, List.replace_at(data, index, update)}
end
end
test "calling if with invalid keys" do
error_message = "invalid or duplicate keys for if, only \"do\" " <>
"and an optional \"else\" are permitted"
assert_raise ArgumentError, error_message, fn ->
Code.eval_string("if true, foo: 7")
end
assert_raise ArgumentError, error_message, fn ->
Code.eval_string("if true, do: 6, boo: 7")
end
assert_raise ArgumentError, error_message, fn ->
Code.eval_string("if true, do: 7, do: 6")
end
assert_raise ArgumentError, error_message, fn ->
Code.eval_string("if true, do: 8, else: 7, else: 6")
end
assert_raise ArgumentError, error_message, fn ->
Code.eval_string("if true, else: 6")
end
assert_raise ArgumentError, error_message, fn ->
Code.eval_string("if true, []")
end
end
test "calling unless with invalid keys" do
error_message = "invalid or duplicate keys for unless, only \"do\" " <>
"and an optional \"else\" are permitted"
assert_raise ArgumentError, error_message, fn ->
Code.eval_string("unless true, foo: 7")
end
assert_raise ArgumentError, error_message, fn ->
Code.eval_string("unless true, do: 6, boo: 7")
end
assert_raise ArgumentError, error_message, fn ->
Code.eval_string("unless true, do: 7, do: 6")
end
assert_raise ArgumentError, error_message, fn ->
Code.eval_string("unless true, do: 8, else: 7, else: 6")
end
assert_raise ArgumentError, error_message, fn ->
Code.eval_string("unless true, else: 6")
end
assert_raise ArgumentError, error_message, fn ->
Code.eval_string("unless true, []")
end
end
defmodule PipelineOp do
use ExUnit.Case, async: true
test "simple" do
assert [1, [2], 3] |> List.flatten == [1, 2, 3]
end
test "nested pipelines" do
assert [1, [2], 3] |> List.flatten |> Enum.map(&(&1 * 2)) == [2, 4, 6]
end
test "local call" do
assert [1, [2], 3] |> List.flatten |> local == [2, 4, 6]
end
test "pipeline with capture" do
assert Enum.map([1, 2, 3], &(&1 |> twice |> twice)) == [4, 8, 12]
end
test "anonymous functions" do
assert 1 |> (&(&1*2)).() == 2
assert [1] |> (&hd(&1)).() == 1
end
defp twice(a), do: a * 2
defp local(list) do
Enum.map(list, &(&1 * 2))
end
end
defmodule Destructure do
use ExUnit.Case, async: true
test "less args" do
destructure [x, y, z], [1, 2, 3, 4, 5]
assert x == 1
assert y == 2
assert z == 3
end
test "more args" do
destructure [a, b, c, d, e], [1, 2, 3]
assert a == 1
assert b == 2
assert c == 3
assert d == nil
assert e == nil
end
test "equal args" do
destructure [a, b, c], [1, 2, 3]
assert a == 1
assert b == 2
assert c == 3
end
test "no values" do
destructure [a, b, c], []
assert a == nil
assert b == nil
assert c == nil
end
test "works as match" do
destructure [1, b, _], [1, 2, 3]
assert b == 2
end
test "nil values" do
destructure [a, b, c], a_nil
assert a == nil
assert b == nil
assert c == nil
end
test "invalid match" do
a = List.first([3])
assert_raise MatchError, fn ->
destructure [^a, _b, _c], a_list
end
end
defp a_list, do: [1, 2, 3]
defp a_nil, do: nil
end
defmodule UseMacro do
use ExUnit.Case, async: true
import ExUnit.CaptureIO
defmodule SampleA do
defmacro __using__(opts) do
prefix = Keyword.get(opts, :prefix, "")
IO.puts(prefix <> "A")
end
end
defmodule SampleB do
defmacro __using__(_) do
IO.puts("B")
end
end
test "invalid argument is literal" do
message = "invalid arguments for use, expected a compile time atom or alias, got: 42"
assert_raise ArgumentError, message, fn ->
Code.eval_string("use 42")
end
end
test "invalid argument is variable" do
message = "invalid arguments for use, expected a compile time atom or alias, got: variable"
assert_raise ArgumentError, message, fn ->
Code.eval_string("use variable")
end
end
test "multi-call" do
assert capture_io(fn ->
Code.eval_string("use UseMacro.{SampleA, SampleB,}", [], __ENV__)
end) == "A\nB\n"
end
test "multi-call with options" do
assert capture_io(fn ->
Code.eval_string(~S|use UseMacro.{SampleA}, prefix: "-"|, [], __ENV__)
end) == "-A\n"
end
test "multi-call with unquote" do
assert capture_io(fn ->
Code.eval_string("""
defmodule TestMod do
def main() do
use UseMacro.{SampleB, unquote(:SampleA)}
end
end
""", [], __ENV__)
end) == "B\nA\n"
after
:code.purge(UseMacro.TestMod)
:code.delete(UseMacro.TestMod)
end
end
end
| 26.30219 | 97 | 0.561636 |
1cab953847b014b74617eeb254cd6cd758d1cb36 | 4,455 | exs | Elixir | test/xpeg_test.exs | zevv/xpeg | 5a83293d51f46a616aa1df694b9b020a5e43026e | [
"MIT"
] | 13 | 2021-12-29T16:29:33.000Z | 2022-01-22T15:24:23.000Z | test/xpeg_test.exs | zevv/xpeg | 5a83293d51f46a616aa1df694b9b020a5e43026e | [
"MIT"
] | null | null | null | test/xpeg_test.exs | zevv/xpeg | 5a83293d51f46a616aa1df694b9b020a5e43026e | [
"MIT"
] | null | null | null | defmodule XpegTest do
use ExUnit.Case
doctest Xpeg
import Xpeg
def run(p, s, exp_result \\ :ok, exp_captures \\ []) do
r = match(p, s)
assert(r.result == exp_result)
assert(r.captures == exp_captures)
end
test "any" do
run(patt(0 * "a"), "a")
run(patt(1), "a")
run(patt(2), "a", :error)
run(patt(2), "aa")
end
test "chr" do
run(patt("a"), "a")
run(patt("a"), "b", :error)
run(patt("abc"), "abc")
run(patt('abc'), "abc")
run(patt("abc"), "-bcd", :error)
run(patt("abc"), "a-cd", :error)
run(patt("abc"), "ab-d", :error)
run(patt("abc"), "abc-", :ok)
end
test "set" do
run(patt({'a'}), "a")
run(patt({'b'}), "a", :error)
run(patt({'a', 'b'}), "a")
run(patt({'a', 'b'}), "b")
run(patt({'a', 'b'}), "c", :error)
run(patt({'a', 'b', 'c'}), "a")
run(patt({'a', 'b', 'c'}), "b")
run(patt({'a', 'b', 'c'}), "c")
run(patt({'a', 'b', 'c'}), "d", :error)
run(patt({'a'..'c'}), "a")
run(patt({'a'..'c'}), "b")
run(patt({'a'..'c'}), "c")
run(patt({'a'..'c'}), "d", :error)
run(patt({'a'..'c', 'd'}), "a")
run(patt({'a'..'c', 'd'}), "b")
run(patt({'a'..'c', 'd'}), "c")
run(patt({'a'..'c', 'd'}), "d")
run(patt({'a', 'b'..'d'}), "a")
run(patt({'a', 'b'..'d'}), "b")
run(patt({'a', 'b'..'d'}), "c")
run(patt({'a', 'b'..'d'}), "d")
run(patt({'a', 'b'..'c', 'd'}), "a")
run(patt({'a', 'b'..'c', 'd'}), "b")
run(patt({'a', 'b'..'c', 'd'}), "c")
run(patt({'a', 'b'..'c', 'd'}), "d")
run(patt({'a'..'c', 'e'..'g'}), "a")
run(patt({'a'..'c', 'e'..'g'}), "b")
run(patt({'a'..'c', 'e'..'g'}), "c")
run(patt({'a'..'c', 'e'..'g'}), "d", :error)
run(patt({'a'..'c', 'e'..'g'}), "e")
run(patt({'a'..'c', 'e'..'g'}), "f")
run(patt({'a'..'c', 'e'..'g'}), "g")
end
test "zero-or-one" do
run(patt("a" * opt("b") * "c"), "ac")
run(patt("a" * opt("b") * "c"), "abc")
run(patt("a" * opt("b") * "c"), "abbc", :error)
end
test "zero-or-more" do
run(patt(star('a')), "aaaa")
run(patt(star('a') * 'b'), "aaaab")
run(patt(star('a') * 'b'), "bbbbb")
run(patt(star('a') * 'b'), "caaab", :error)
end
test "one-or-more" do
run(patt(+'a' * 'b'), "aaaab")
run(patt(+'a' * 'b'), "ab")
run(patt(+'a' * 'b'), "b", :error)
end
test "not-predicate" do
run(patt('a' * !'b'), "ac")
run(patt('a' * !'b'), "ab", :error)
end
test "and-predicate" do
run(patt(&"abc"), "abc")
run(patt(&"abc"), "abd", :error)
p = patt(&"abc")
r = match(p, "abc")
assert r.match_len == 0
end
test "[n]: count" do
run(patt(1[3]), "aaaa")
run(patt(1[4]), "aaaa")
run(patt(1[5]), "aaaa", :error)
end
test "[m..n]: count" do
run(patt('a'[2..4] * !1), "", :error)
run(patt('a'[2..4] * !1), "a", :error)
run(patt('a'[2..4] * !1), "aa")
run(patt('a'[2..4] * !1), "aaa")
run(patt('a'[2..4] * !1), "aaaa")
run(patt('a'[2..4] * !1), "aaaaa", :error)
run(patt('a'[0..1] * !1), "")
run(patt('a'[0..1] * !1), "a")
run(patt('a'[0..1] * !1), "aa", :error)
end
test "|: ordered choice" do
run(patt("ab" | "cd"), "ab")
run(patt("ab" | "cd"), "cd")
run(patt("ab" | "cd"), "ef", :error)
run(patt(("ab" | "cd") | "ef"), "ab")
run(patt(("ab" | "cd") | "ef"), "cd")
run(patt(("ab" | "cd") | "ef"), "ef")
run(patt("ab" | "cd" | "ef"), "ab")
run(patt("ab" | "cd" | "ef"), "cd")
run(patt("ab" | "cd" | "ef"), "ef")
end
test "-: difference" do
run(patt("abcd" - "abcdef"), "abcdefgh", :error)
run(patt("abcd" - "abcdf"), "abcdefgh")
run(patt({'a','b','c'} - {'a'}), "a", :error)
end
test "Misc combos" do
run(patt('a' | 'b' * 'c'), "a")
run(patt('a' | 'b' * 'c' | 'd' * 'e' * 'f'), "a")
run(patt('a' | 'b' * 'c' | 'd' * 'e' * 'f'), "bc")
run(patt('a' | 'b' * 'c' | 'd' * 'e' * 'f'), "def")
run(patt({'a','b'} * 'c' | {'a','b'} * 'e'), "ac")
run(patt({'a','b'} * 'c' | {'a','b'} * 'e'), "ae")
end
test "grammars" do
p = peg One do
One <- "1"
end
assert(match(p, "1").result == :ok)
p = peg One do
One <- Two
Two <- "2"
end
assert(match(p, "2").result == :ok)
end
test "peephole bug" do
p = peg :flop do
:flop <- "3" | (:two)
:two <- "2"
end
assert(match(p, "3").result == :ok)
assert(match(p, "2").result == :ok)
end
end
| 27 | 57 | 0.410774 |
1cabd6128239c3359208bb00e3f0b78ce35ac2f0 | 9,311 | exs | Elixir | test/course_planner_web/controllers/coordinator_controller_test.exs | digitalnatives/course_planner | 27b1c8067edc262685e9c4dcbfcf82633bc8b8dc | [
"MIT"
] | 38 | 2017-04-11T13:37:38.000Z | 2021-05-22T19:35:36.000Z | test/course_planner_web/controllers/coordinator_controller_test.exs | digitalnatives/course_planner | 27b1c8067edc262685e9c4dcbfcf82633bc8b8dc | [
"MIT"
] | 226 | 2017-04-07T13:14:14.000Z | 2018-03-08T16:50:11.000Z | test/course_planner_web/controllers/coordinator_controller_test.exs | digitalnatives/course_planner | 27b1c8067edc262685e9c4dcbfcf82633bc8b8dc | [
"MIT"
] | 7 | 2017-08-30T23:58:13.000Z | 2021-03-28T11:50:45.000Z | defmodule CoursePlanner.CoordinatorControllerTest do
use CoursePlannerWeb.ConnCase
alias CoursePlanner.Repo
alias CoursePlanner.Accounts.User
import CoursePlanner.Factory
setup do
{:ok, conn: login_as(:coordinator)}
end
defp login_as(user_type) do
user_type
|> insert()
|> guardian_login_html()
end
test "lists all entries on index", %{conn: conn} do
conn = get conn, coordinator_path(conn, :index)
assert html_response(conn, 200) =~ "Coordinators"
end
test "shows chosen resource", %{conn: conn} do
coordinator = insert(:coordinator)
conn = get conn, coordinator_path(conn, :show, coordinator)
assert html_response(conn, 200) =~ "#{coordinator.name} #{coordinator.family_name}"
end
test "lists all entries on index for supervisor" do
conn = login_as(:supervisor)
conn = get conn, coordinator_path(conn, :index)
assert html_response(conn, 200) =~ "Coordinators"
end
test "shows chosen resource for supervisor" do
conn = login_as(:supervisor)
coordinator = insert(:coordinator)
conn = get conn, coordinator_path(conn, :show, coordinator)
assert html_response(conn, 200) =~ "#{coordinator.name} #{coordinator.family_name}"
end
test "renders page not found when id is nonexistent", %{conn: conn} do
conn = get conn, coordinator_path(conn, :show, -1)
assert html_response(conn, 404)
end
test "renders form for editing chosen resource", %{conn: conn} do
coordinator = insert(:coordinator, %{name: "Foo", family_name: "Bar"})
conn = get conn, coordinator_path(conn, :edit, coordinator)
assert html_response(conn, 200) =~ "Foo Bar"
end
test "renders page not found for editing inexistent resource", %{conn: conn} do
conn = get conn, coordinator_path(conn, :edit, -1)
assert html_response(conn, 404)
end
test "does not updates if the resource does not exist", %{conn: conn} do
conn = put conn, coordinator_path(conn, :update, -1), %{"user" => %{"email" => "foo@bar.com"}}
assert html_response(conn, 404)
end
test "updates chosen resource and redirects when data is valid", %{conn: conn} do
coordinator = insert(:coordinator, %{})
conn = put conn, coordinator_path(conn, :update, coordinator), %{"user" => %{"email" => "foo@bar.com"}}
assert redirected_to(conn) == coordinator_path(conn, :show, coordinator)
assert Repo.get_by(User, email: "foo@bar.com")
end
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
coordinator = insert(:coordinator, %{name: "Foo", family_name: "Bar"})
conn = put conn, coordinator_path(conn, :update, coordinator), %{"user" => %{"email" => "not email"}}
assert html_response(conn, 200) =~ "Foo Bar"
end
test "deletes chosen resource", %{conn: conn} do
coordinator = insert(:coordinator)
conn = delete conn, coordinator_path(conn, :delete, coordinator)
assert redirected_to(conn) == coordinator_path(conn, :index)
refute Repo.get(User, coordinator.id)
end
test "fails when doing a selfdeletion", %{conn: conn} do
current_logged_in_coordinator = conn.assigns.current_user
conn = delete conn, coordinator_path(conn, :delete, current_logged_in_coordinator)
assert redirected_to(conn) == coordinator_path(conn, :index)
assert get_flash(conn, "error") == "Coordinator cannot delete herself."
assert Repo.get(User, current_logged_in_coordinator.id)
end
test "does not delete chosen resource when does not exist", %{conn: conn} do
conn = delete conn, coordinator_path(conn, :delete, "-1")
assert redirected_to(conn) == coordinator_path(conn, :index)
assert get_flash(conn, "error") == "Coordinator was not found."
end
test "renders form for new resources", %{conn: conn} do
conn = get conn, coordinator_path(conn, :new)
assert html_response(conn, 200) =~ "New coordinator"
end
test "does not shows chosen resource for non coordinator user", %{conn: _conn} do
student_conn = login_as(:student)
teacher_conn = login_as(:teacher)
volunteer_conn = login_as(:volunteer)
coordinator = insert(:coordinator)
conn = get student_conn, coordinator_path(student_conn, :show, coordinator)
assert html_response(conn, 403)
conn = get teacher_conn, coordinator_path(teacher_conn, :show, coordinator)
assert html_response(conn, 403)
conn = get volunteer_conn, coordinator_path(volunteer_conn, :show, coordinator)
assert html_response(conn, 403)
end
test "does not list entries on index for non coordinator user", %{conn: _conn} do
student_conn = login_as(:student)
teacher_conn = login_as(:teacher)
volunteer_conn = login_as(:volunteer)
conn = get student_conn, coordinator_path(student_conn, :index)
assert html_response(conn, 403)
conn = get teacher_conn, coordinator_path(teacher_conn, :index)
assert html_response(conn, 403)
conn = get volunteer_conn, coordinator_path(volunteer_conn, :index)
assert html_response(conn, 403)
end
test "does not renders form for editing chosen resource for non coordinator user", %{conn: _conn} do
student_conn = login_as(:student)
teacher_conn = login_as(:teacher)
volunteer_conn = login_as(:volunteer)
supervisor_conn = login_as(:supervisor)
coordinator = insert(:coordinator)
conn = get student_conn, coordinator_path(student_conn, :edit, coordinator)
assert html_response(conn, 403)
conn = get teacher_conn, coordinator_path(teacher_conn, :edit, coordinator)
assert html_response(conn, 403)
conn = get volunteer_conn, coordinator_path(volunteer_conn, :edit, coordinator)
assert html_response(conn, 403)
conn = get supervisor_conn, coordinator_path(supervisor_conn, :edit, coordinator)
assert html_response(conn, 403)
end
test "does not delete a chosen resource for non coordinator user", %{conn: _conn} do
student_conn = login_as(:student)
teacher_conn = login_as(:teacher)
volunteer_conn = login_as(:volunteer)
supervisor_conn = login_as(:supervisor)
coordinator = insert(:coordinator)
conn = delete student_conn, coordinator_path(student_conn, :delete, coordinator.id)
assert html_response(conn, 403)
conn = delete teacher_conn, coordinator_path(teacher_conn, :delete, coordinator.id)
assert html_response(conn, 403)
conn = delete volunteer_conn, coordinator_path(volunteer_conn, :delete, coordinator.id)
assert html_response(conn, 403)
conn = delete supervisor_conn, coordinator_path(supervisor_conn, :delete, coordinator.id)
assert html_response(conn, 403)
end
test "does not render form for new coordinator for non coordinator user", %{conn: _conn} do
student_conn = login_as(:student)
teacher_conn = login_as(:teacher)
volunteer_conn = login_as(:volunteer)
supervisor_conn = login_as(:supervisor)
conn = get student_conn, coordinator_path(student_conn, :new)
assert html_response(conn, 403)
conn = get teacher_conn, coordinator_path(teacher_conn, :new)
assert html_response(conn, 403)
conn = get volunteer_conn, coordinator_path(volunteer_conn, :new)
assert html_response(conn, 403)
conn = get supervisor_conn, coordinator_path(supervisor_conn, :new)
assert html_response(conn, 403)
end
test "does not create coordinator for coordinator user when data is invalid", %{conn: conn} do
conn = post conn, coordinator_path(conn, :create), %{"user" => %{"email" => ""}}
assert html_response(conn, 200) =~ "Something went wrong."
end
test "create coordinator for coordinator user", %{conn: conn} do
conn = post conn, coordinator_path(conn, :create), %{"user" => %{"email" => "foo@bar.com"}}
assert redirected_to(conn) == coordinator_path(conn, :index)
assert get_flash(conn, "info") == "Coordinator created and notified by."
end
test "does not create coordinator for non coordinator user", %{conn: _conn} do
student_conn = login_as(:student)
teacher_conn = login_as(:teacher)
volunteer_conn = login_as(:volunteer)
coordinator = insert(:coordinator)
conn = post student_conn, coordinator_path(student_conn, :create), %{"user" => coordinator}
assert html_response(conn, 403)
conn = post teacher_conn, coordinator_path(teacher_conn, :create), %{"user" => coordinator}
assert html_response(conn, 403)
conn = post volunteer_conn, coordinator_path(volunteer_conn, :create), %{"user" => coordinator}
assert html_response(conn, 403)
end
test "does not update chosen coordinator for non coordinator user", %{conn: _conn} do
student_conn = login_as(:student)
teacher_conn = login_as(:teacher)
volunteer_conn = login_as(:volunteer)
coordinator = insert(:coordinator, %{})
conn = put student_conn, coordinator_path(student_conn, :update, coordinator), %{"user" => %{"email" => "foo@bar.com"}}
assert html_response(conn, 403)
conn = put teacher_conn, coordinator_path(teacher_conn, :update, coordinator), %{"user" => %{"email" => "foo@bar.com"}}
assert html_response(conn, 403)
conn = put volunteer_conn, coordinator_path(volunteer_conn, :update, coordinator), %{"user" => %{"email" => "foo@bar.com"}}
assert html_response(conn, 403)
end
end
| 38.795833 | 127 | 0.710557 |
1cabe06f13285aed19404c3f8274f1df7d1cc007 | 3,274 | ex | Elixir | lib/diode_client/ticket.ex | diodechain/diode_client_ex | 0aec3aa7a2e3448cccfc255b4d4e8d2cbf475c7f | [
"Apache-2.0"
] | null | null | null | lib/diode_client/ticket.ex | diodechain/diode_client_ex | 0aec3aa7a2e3448cccfc255b4d4e8d2cbf475c7f | [
"Apache-2.0"
] | null | null | null | lib/diode_client/ticket.ex | diodechain/diode_client_ex | 0aec3aa7a2e3448cccfc255b4d4e8d2cbf475c7f | [
"Apache-2.0"
] | null | null | null | defmodule DiodeClient.Ticket do
alias DiodeClient.{Wallet, Secp256k1, Hash, ABI}
require Record
Record.defrecord(:ticket,
server_id: nil,
block_number: nil,
block_hash: nil,
fleet_contract: nil,
total_connections: nil,
total_bytes: nil,
local_address: nil,
device_signature: nil,
server_signature: nil
)
@type ticket ::
record(:ticket,
server_id: binary(),
block_number: integer(),
block_hash: binary(),
fleet_contract: binary(),
total_connections: integer(),
total_bytes: integer(),
local_address: binary(),
device_signature: Secp256k1.signature(),
server_signature: Secp256k1.signature() | nil
)
def key(tck = ticket()) do
device_address(tck)
end
def device_address(tck = ticket()) do
Secp256k1.recover!(
device_signature(tck),
device_blob(tck),
:kec
)
|> Wallet.from_pubkey()
|> Wallet.address!()
end
def device_sign(tck = ticket(), private) do
ticket(tck, device_signature: Secp256k1.sign(private, device_blob(tck), :kec))
end
def server_sign(tck = ticket(), private) do
ticket(tck, server_signature: Secp256k1.sign(private, server_blob(tck), :kec))
end
@doc """
Format for putting into a transaction with "SubmitTicketRaw"
"""
def raw(tck = ticket()) do
[rec, r, s] = Secp256k1.bitcoin_to_rlp(device_signature(tck))
[
block_number(tck),
fleet_contract(tck),
server_id(tck),
total_connections(tck),
total_bytes(tck),
Hash.sha3_256(local_address(tck)),
r,
s,
rec
]
end
def device_blob(tck = ticket()) do
# From DiodeRegistry.sol:
# bytes32[] memory message = new bytes32[](6);
# message[0] = blockhash(blockHeight);
# message[1] = bytes32(fleetContract);
# message[2] = bytes32(nodeAddress);
# message[3] = bytes32(totalConnections);
# message[4] = bytes32(totalBytes);
# message[5] = localAddress;
[
block_hash(tck),
fleet_contract(tck),
server_id(tck),
total_connections(tck),
total_bytes(tck),
Hash.sha3_256(local_address(tck))
]
|> Enum.map(&ABI.encode("bytes32", &1))
|> :erlang.iolist_to_binary()
end
def server_blob(tck = ticket()) do
[device_blob(tck), device_signature(tck)]
|> :erlang.iolist_to_binary()
end
# def epoch(ticket), do: block(ticket) |> Block.epoch()
def server_id(ticket(server_id: id)), do: id
def block_number(ticket(block_number: block)), do: block
def block_hash(ticket(block_hash: hash)), do: hash
def device_signature(ticket(device_signature: signature)), do: signature
def server_signature(ticket(server_signature: signature)), do: signature
def fleet_contract(ticket(fleet_contract: fc)), do: fc
def total_connections(ticket(total_connections: tc)), do: tc
def total_bytes(ticket(total_bytes: tb)), do: tb
def local_address(ticket(local_address: la)), do: la
def preferred_server_ids(ticket(server_id: id, local_address: la)) do
case la do
<<0, addr::binary-size(20)>> -> [addr, id]
<<1, addr::binary-size(20)>> -> [id, addr]
_ -> [id]
end
end
end
| 27.982906 | 82 | 0.63653 |
1cabe1b9c5654655c8461bd0152778a70b400292 | 2,841 | ex | Elixir | lib/erlef/integrations/app_key.ex | joaquinalcerro/website | 52dc89c70cd0b42127ab233a4c0d10f626d2b698 | [
"Apache-2.0"
] | 71 | 2019-07-02T18:06:15.000Z | 2022-03-09T15:30:08.000Z | lib/erlef/integrations/app_key.ex | joaquinalcerro/website | 52dc89c70cd0b42127ab233a4c0d10f626d2b698 | [
"Apache-2.0"
] | 157 | 2019-07-02T01:21:16.000Z | 2022-03-30T16:08:12.000Z | lib/erlef/integrations/app_key.ex | joaquinalcerro/website | 52dc89c70cd0b42127ab233a4c0d10f626d2b698 | [
"Apache-2.0"
] | 45 | 2019-07-04T05:51:11.000Z | 2022-02-27T11:56:02.000Z | defmodule Erlef.Integrations.AppKey do
@moduledoc false
use Erlef.Schema
alias Erlef.Integrations.App
schema "app_keys" do
field(:name, :string)
field(:type, Ecto.Enum, values: [:webhook, :api_read_only])
field(:app_secret, :string, virtual: true)
field(:key_id, :string)
field(:secret, :string)
field(:revoked_at, :utc_datetime)
field(:revoked_by, Ecto.UUID)
embeds_one :last_used, Useage, on_replace: :delete do
field(:used_at, :utc_datetime_usec)
field(:user_agent, :string)
field(:ip, :string)
end
field(:created_by, Ecto.UUID)
belongs_to(:app, App)
timestamps()
end
@doc false
def changeset(key, attrs) do
key
|> cast(attrs, [:type, :created_by, :name])
|> validate_required([:type, :created_by, :name])
end
@doc false
def create_changeset(key, attrs) do
key
|> cast(attrs, [:type, :created_by, :name])
|> add_keys()
|> validate_required([:type, :created_by])
|> prepare_changes(&unique_name/1)
end
def update_last_use(key, params) do
key
|> change()
|> put_embed(:last_used, struct(AppKey.Useage, params))
end
def revoke(key, %{updated_by: updated_by}) do
key
|> change()
|> cast(%{revoked_at: DateTime.utc_now()}, [:revoked_at])
|> put_change(:revoked_by, updated_by)
end
def build(app, params) do
build_assoc(app, :keys)
|> associate_app(app)
|> create_changeset(params)
end
def revoked?(%AppKey{} = key) do
not is_nil(key.revoked_at)
end
defp add_keys(changeset) do
{app_secret, id, token} = gen_key()
changeset
|> put_change(:app_secret, app_secret)
|> put_change(:key_id, id)
|> put_change(:secret, token)
end
defp gen_key() do
app_secret =
:crypto.strong_rand_bytes(32)
|> Base.encode16(case: :lower)
our_secret = Application.get_env(:erlef, :secret)
<<id::binary-size(32), token::binary-size(32)>> =
:crypto.mac(:hmac, :sha3_256, our_secret, app_secret)
|> Base.encode16(case: :lower)
{app_secret, id, token}
end
defp unique_name(changeset) do
{:ok, name} = fetch_change(changeset, :name)
names =
from(
a in App,
left_join: k in assoc(a, :keys),
select: k.name
)
|> changeset.repo.all
|> Enum.into(MapSet.new())
name = if MapSet.member?(names, name), do: find_unique_name(name, names), else: name
put_change(changeset, :name, name)
end
defp find_unique_name(name, names, counter \\ 2) do
name_counter = "#{name}-#{counter}"
if MapSet.member?(names, name_counter) do
find_unique_name(name, names, counter + 1)
else
name_counter
end
end
defp associate_app(nil, _app), do: nil
defp associate_app(%AppKey{} = key, %App{} = app), do: %{key | app: app}
end
| 23.479339 | 88 | 0.632876 |
1cac39ff32f170ed02a609526ff0a5a0d239558d | 745 | ex | Elixir | lib/app.ex | hewsut/ex_unit_clustered_case | 7762c0a0cce1c78703a1955ce9bc89c0ff9d2f88 | [
"Apache-2.0"
] | 48 | 2018-07-20T20:19:23.000Z | 2021-11-10T09:22:52.000Z | lib/app.ex | hewsut/ex_unit_clustered_case | 7762c0a0cce1c78703a1955ce9bc89c0ff9d2f88 | [
"Apache-2.0"
] | 9 | 2018-07-20T19:02:07.000Z | 2019-07-29T05:23:38.000Z | lib/app.ex | hewsut/ex_unit_clustered_case | 7762c0a0cce1c78703a1955ce9bc89c0ff9d2f88 | [
"Apache-2.0"
] | 7 | 2018-07-30T02:13:59.000Z | 2019-12-11T15:28:47.000Z | defmodule ExUnit.ClusteredCase.App do
@moduledoc false
use Application
def start(_type, _args) do
children =
if is_clustered_node?() do
# Do not start boot server/children if running as a cluster node
[]
else
# We depend on the boot server, so start it if not started yet
unless Process.whereis(:boot_server) do
{:ok, _} = :erl_boot_server.start_link([{127, 0, 0, 1}])
end
[
{ExUnit.ClusteredCase.Node.Ports, []},
{ExUnit.ClusteredCase.Cluster.Supervisor, []}
]
end
Supervisor.start_link(children, strategy: :one_for_one)
end
defp is_clustered_node? do
Atom.to_string(node()) =~ "ex_unit_clustered_node_"
end
end
| 25.689655 | 72 | 0.628188 |
1cac432d56eac2d0322526329c84c518736c0934 | 1,642 | ex | Elixir | apps/fz_http/lib/fz_http_web/live/setting_live/default_live.ex | athulspeaks/firezone | 98f4709c4d2c03f1ade5167494547ef240a09aed | [
"Apache-2.0"
] | null | null | null | apps/fz_http/lib/fz_http_web/live/setting_live/default_live.ex | athulspeaks/firezone | 98f4709c4d2c03f1ade5167494547ef240a09aed | [
"Apache-2.0"
] | null | null | null | apps/fz_http/lib/fz_http_web/live/setting_live/default_live.ex | athulspeaks/firezone | 98f4709c4d2c03f1ade5167494547ef240a09aed | [
"Apache-2.0"
] | null | null | null | defmodule FzHttpWeb.SettingLive.Default do
@moduledoc """
Manages the defaults view.
"""
use FzHttpWeb, :live_view
alias FzHttp.{ConnectivityChecks, Settings}
@help_texts %{
allowed_ips: """
Configures the default AllowedIPs setting for devices.
AllowedIPs determines which destination IPs get routed through
Firezone. Specify a comma-separated list of IPs or CIDRs here to achieve split tunneling, or use
<code>0.0.0.0/0, ::/0</code> to route all device traffic through this Firezone server.
""",
dns_servers: """
Comma-separated list of DNS servers to use for devices.
Leaving this blank will omit the <code>DNS</code> section in
generated device configs.
""",
endpoint: """
IPv4 or IPv6 address that devices will be configured to connect
to. Defaults to this server's public IP if not set.
"""
}
@impl Phoenix.LiveView
def mount(params, session, socket) do
{:ok,
socket
|> assign_defaults(params, session, &load_data/2)}
end
defp endpoint_placeholder do
ConnectivityChecks.endpoint()
end
defp load_changesets do
Settings.to_list("default.")
|> Map.new(fn setting -> {setting.key, Settings.change_setting(setting)} end)
end
defp load_data(_params, socket) do
user = socket.assigns.current_user
if user.role == :admin do
socket
|> assign(:changesets, load_changesets())
|> assign(:help_texts, @help_texts)
|> assign(:endpoint_placeholder, endpoint_placeholder())
|> assign(:page_title, "Default Settings")
else
not_authorized(socket)
end
end
end
| 28.807018 | 102 | 0.680268 |
1cac4ad06578b98f12297e346575c5eb8a3ea50b | 2,283 | ex | Elixir | clients/assured_workloads/lib/google_api/assured_workloads/v1beta1/model/google_cloud_assuredworkloads_v1beta1_create_workload_operation_metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/assured_workloads/lib/google_api/assured_workloads/v1beta1/model/google_cloud_assuredworkloads_v1beta1_create_workload_operation_metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/assured_workloads/lib/google_api/assured_workloads/v1beta1/model/google_cloud_assuredworkloads_v1beta1_create_workload_operation_metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AssuredWorkloads.V1beta1.Model.GoogleCloudAssuredworkloadsV1beta1CreateWorkloadOperationMetadata do
@moduledoc """
Operation metadata to give request details of CreateWorkload.
## Attributes
* `complianceRegime` (*type:* `String.t`, *default:* `nil`) - Optional. Compliance controls that should be applied to the resources managed by the workload.
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Optional. Time when the operation was created.
* `displayName` (*type:* `String.t`, *default:* `nil`) - Optional. The display name of the workload.
* `parent` (*type:* `String.t`, *default:* `nil`) - Optional. The parent of the workload.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:complianceRegime => String.t(),
:createTime => DateTime.t(),
:displayName => String.t(),
:parent => String.t()
}
field(:complianceRegime)
field(:createTime, as: DateTime)
field(:displayName)
field(:parent)
end
defimpl Poison.Decoder,
for:
GoogleApi.AssuredWorkloads.V1beta1.Model.GoogleCloudAssuredworkloadsV1beta1CreateWorkloadOperationMetadata do
def decode(value, options) do
GoogleApi.AssuredWorkloads.V1beta1.Model.GoogleCloudAssuredworkloadsV1beta1CreateWorkloadOperationMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.AssuredWorkloads.V1beta1.Model.GoogleCloudAssuredworkloadsV1beta1CreateWorkloadOperationMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.238095 | 160 | 0.734122 |
1cac7a166415b09ffd71e0ae9a9d640b444e9849 | 890 | ex | Elixir | apps/snitch_core/lib/core/data/model/stock/stock_transfer.ex | saurabharch/avia | 74a82a95cf8bfe8143d1fce8136a3bb7ffc9467c | [
"MIT"
] | 1 | 2018-12-01T18:13:55.000Z | 2018-12-01T18:13:55.000Z | apps/snitch_core/lib/core/data/model/stock/stock_transfer.ex | saurabharch/avia | 74a82a95cf8bfe8143d1fce8136a3bb7ffc9467c | [
"MIT"
] | null | null | null | apps/snitch_core/lib/core/data/model/stock/stock_transfer.ex | saurabharch/avia | 74a82a95cf8bfe8143d1fce8136a3bb7ffc9467c | [
"MIT"
] | null | null | null | defmodule Snitch.Data.Model.StockTransfer do
@moduledoc """
StockTransfer CRUD API
"""
use Snitch.Data.Model
alias Snitch.Data.Schema.StockTransfer, as: StockTransferSchema
@spec create(String.t(), String.t(), non_neg_integer, non_neg_integer) ::
{:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
def create(reference, number, source_id, destination_id) do
QH.create(
StockTransferSchema,
%{
reference: reference,
number: number,
source_id: source_id,
destination_id: destination_id
},
Repo
)
end
@spec get(non_neg_integer | map) :: StockTransferSchema.t()
def get(query_fields), do: QH.get(StockTransferSchema, query_fields, Repo)
@doc """
Fetches all the transfers from DB.
"""
@spec get_all :: list(StockTransferSchema.t())
def get_all, do: Repo.all(StockTransferSchema)
end
| 26.969697 | 76 | 0.67191 |
1cac7c29dec73b5a2b7a0977672c1f36ef20f972 | 1,278 | exs | Elixir | mix.exs | Cantido/liberator_phoenix | e6279e8712d39822272c69b3d81a9037d11862e8 | [
"MIT"
] | null | null | null | mix.exs | Cantido/liberator_phoenix | e6279e8712d39822272c69b3d81a9037d11862e8 | [
"MIT"
] | null | null | null | mix.exs | Cantido/liberator_phoenix | e6279e8712d39822272c69b3d81a9037d11862e8 | [
"MIT"
] | null | null | null | # SPDX-FileCopyrightText: 2021 Rosa Richter
#
# SPDX-License-Identifier: MIT
defmodule Liberator.Phoenix.MixProject do
use Mix.Project
def project do
[
app: :liberator_phoenix,
description: "Phoenix integration for Liberator",
package: package(),
version: "0.1.0",
elixir: "~> 1.11",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
defp package do
[
maintainers: ["Rosa Richter"],
licenses: ["MIT"],
links: %{
"GitHub" => "https://github.com/Cantido/liberator_phoenix",
"sourcehut" => "https://git.sr.ht/~cosmicrose/liberator_phoenix"
}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:liberator, "~> 2.0.0"},
{:mime, "~> 2.0.0"},
{:nimble_options, "~> 0.3.0"},
{:phoenix_view, "~> 1.0"},
{:ex_doc, "~> 0.25", only: :dev, runtime: false}
]
end
end
| 23.666667 | 72 | 0.578247 |
1cac87522c6c5342d34a02ff4c8981676653165f | 61 | exs | Elixir | uderzo_example/config/config.exs | cdegroot/uderzo_poncho | af2f416cfae4dbcd41e3e88a86dfeb0fd423ed75 | [
"Apache-2.0"
] | 46 | 2018-05-08T12:06:18.000Z | 2022-03-23T18:30:13.000Z | uderzo_example/config/config.exs | cdegroot/uderzo_poncho | af2f416cfae4dbcd41e3e88a86dfeb0fd423ed75 | [
"Apache-2.0"
] | 13 | 2018-05-08T11:42:00.000Z | 2018-09-17T22:41:55.000Z | uderzo_example/config/config.exs | cdegroot/uderzo_poncho | af2f416cfae4dbcd41e3e88a86dfeb0fd423ed75 | [
"Apache-2.0"
] | 4 | 2018-06-06T05:04:00.000Z | 2018-07-13T01:23:31.000Z | use Mix.Config
config :clixir, application: :uderzo_example
| 15.25 | 44 | 0.803279 |
1caca54f02af5526278042155a7209e983ec7b62 | 829 | ex | Elixir | lib/ex_twilio/resources/proxy/participant.ex | lesserhatch/ex_twilio | 4869bc0f55213c5663ad3bb775ad7dc924277181 | [
"MIT"
] | null | null | null | lib/ex_twilio/resources/proxy/participant.ex | lesserhatch/ex_twilio | 4869bc0f55213c5663ad3bb775ad7dc924277181 | [
"MIT"
] | 1 | 2021-02-19T04:34:52.000Z | 2021-03-29T19:14:14.000Z | lib/ex_twilio/resources/proxy/participant.ex | workpathco/ex_twilio | fe6948ee2c78084dade683c0b81c33f47461589f | [
"MIT"
] | null | null | null | defmodule ExTwilio.Proxy.Participant do
@moduledoc """
Represents a Participant attached to a Session.
- [Twilio docs](https://www.twilio.com/docs/proxy/api/participant)
"""
defstruct sid: nil,
account_sid: nil,
session_sid: nil,
service_sid: nil,
friendly_name: nil,
identifier: nil,
proxy_identifier: nil,
proxy_identifier_sid: nil,
date_deleted: nil,
date_created: nil,
date_updated: nil,
url: nil,
links: nil
use ExTwilio.Resource, import: [:stream, :all, :find, :create, :delete]
def parents,
do: [
%ExTwilio.Parent{module: ExTwilio.Proxy.Service, key: :service},
%ExTwilio.Parent{module: ExTwilio.Proxy.SessionResource, key: :session}
]
end
| 27.633333 | 77 | 0.595899 |
1cace951e3e13e916cabcd489a1da25240891125 | 4,508 | ex | Elixir | lib/avrora/client.ex | juanperi/avrora | c5a7ccd1643764a20d9336344785fd952ebacb89 | [
"MIT"
] | null | null | null | lib/avrora/client.ex | juanperi/avrora | c5a7ccd1643764a20d9336344785fd952ebacb89 | [
"MIT"
] | null | null | null | lib/avrora/client.ex | juanperi/avrora | c5a7ccd1643764a20d9336344785fd952ebacb89 | [
"MIT"
] | null | null | null | defmodule Avrora.Client do
@moduledoc """
Generates client module with isolated memory storage.
## Examples
defmodule MyClient do
use Avrora.Client,
schemas_path: Path.expand("./priv/schemas"),
registry_url: "https://registry.io"
end
It will expose `Avrora.Encoder` module functions and make `MyClient` module
identical to `Avrora` module, but isolated from it.
To start using `MyClient` follow the [Start cache process](README.md#start-cache-process),
add it to your supervision tree
children = [
MyClient
]
Supervisor.start_link(children, strategy: :one_for_one)
or start the process manually
{:ok, pid} = MyClient.start_link()
"""
@modules ~w(
encoder
resolver
avro_schema_store
avro_decoder_options
schema/encoder
codec/plain
codec/schema_registry
codec/object_container_file
storage/file
storage/memory
storage/registry
utils/registrar
)
@aliases ~w(
Codec
Config
Resolver
Schema.Encoder
AvroDecoderOptions
Codec.Plain
Codec.SchemaRegistry
Codec.ObjectContainerFile
Storage.Registry
Storage.File
)
defp personalize(definition, module: module) do
definition = Regex.replace(~r/defmodule Avrora\./, definition, "defmodule ")
~r/alias Avrora\.([\w\.]+)(, as: [\w\.]+)?/
|> Regex.scan(definition)
|> Enum.reject(fn [_, modl | _] -> !Enum.member?(@aliases, modl) end)
|> Enum.reduce(definition, fn [alis, modl | as], defn ->
Regex.replace(~r/#{alis}(?=[[:cntrl:]])/, defn, "alias #{module}.#{modl}#{as}")
end)
end
defp generate!(definition, file: file) do
case Code.string_to_quoted(definition, file: file) do
{:ok, quoted} ->
quoted
{:error, {line, error, token}} ->
raise "error #{error} on line #{line} caused by #{inspect(token)}"
end
end
defmacro __using__(opts) do
module = __CALLER__.module |> Module.split() |> Enum.join(".")
modules =
@modules
|> Enum.map(fn name ->
file = Path.expand("./#{name}.ex", __DIR__)
file
|> File.read!()
|> personalize(module: module)
|> generate!(file: file)
end)
config =
quote do
defmodule Config do
@moduledoc false
@opts unquote(opts)
import Keyword, only: [get: 3]
def schemas_path do
path = get(@opts, :schemas_path, "./priv/schemas")
otp_app = get(@opts, :otp_app, nil)
if is_nil(otp_app), do: Path.expand(path), else: Application.app_dir(otp_app, path)
end
def registry_url, do: get(@opts, :registry_url, nil)
def registry_auth, do: get(@opts, :registry_auth, nil)
def registry_schemas_autoreg, do: get(@opts, :registry_schemas_autoreg, true)
def convert_null_values, do: get(@opts, :convert_null_values, true)
def convert_map_to_proplist, do: get(@opts, :convert_map_to_proplist, false)
def names_cache_ttl, do: get(@opts, :names_cache_ttl, :infinity)
def file_storage, do: :"Elixir.#{unquote(module)}.Storage.File"
def memory_storage, do: :"Elixir.#{unquote(module)}.Storage.Memory"
def registry_storage, do: :"Elixir.#{unquote(module)}.Storage.Registry"
def http_client, do: Avrora.HTTPClient
def ets_lib, do: :"Elixir.#{unquote(module)}.AvroSchemaStore"
def self, do: __MODULE__
end
end
quote location: :keep do
unquote(modules)
unquote(config)
use Supervisor
defdelegate decode(payload), to: :"Elixir.#{unquote(module)}.Encoder"
defdelegate encode(payload, opts), to: :"Elixir.#{unquote(module)}.Encoder"
defdelegate decode(payload, opts), to: :"Elixir.#{unquote(module)}.Encoder"
defdelegate decode_plain(payload, opts), to: :"Elixir.#{unquote(module)}.Encoder"
defdelegate encode_plain(payload, opts), to: :"Elixir.#{unquote(module)}.Encoder"
defdelegate extract_schema(payload), to: :"Elixir.#{unquote(module)}.Encoder"
def start_link(opts \\ []), do: Supervisor.start_link(__MODULE__, opts, name: __MODULE__)
@impl true
def init(_state \\ []) do
children = [
:"Elixir.#{unquote(module)}.AvroSchemaStore",
:"Elixir.#{unquote(module)}.Storage.Memory"
]
Supervisor.init(children, strategy: :one_for_all)
end
end
end
end
| 30.053333 | 95 | 0.624224 |
1cacfe4bc3a9f018838ce0537de79783d08549e9 | 2,267 | exs | Elixir | example/mix.exs | mnishiguchi/circuits_cdev | 26aa3186a172456573a837f78649dc2c4ae1d4bb | [
"Apache-2.0"
] | null | null | null | example/mix.exs | mnishiguchi/circuits_cdev | 26aa3186a172456573a837f78649dc2c4ae1d4bb | [
"Apache-2.0"
] | null | null | null | example/mix.exs | mnishiguchi/circuits_cdev | 26aa3186a172456573a837f78649dc2c4ae1d4bb | [
"Apache-2.0"
] | null | null | null | defmodule Example.MixProject do
use Mix.Project
@app :example
@version "0.1.0"
@all_targets [:rpi, :rpi0, :rpi2, :rpi3, :rpi3a, :rpi4, :bbb, :osd32mp1, :x86_64]
def project do
[
app: @app,
version: @version,
elixir: "~> 1.9",
archives: [nerves_bootstrap: "~> 1.10"],
start_permanent: Mix.env() == :prod,
build_embedded: true,
deps: deps(),
releases: [{@app, release()}],
preferred_cli_target: [run: :host, test: :host]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
mod: {Example.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# Dependencies for all targets
{:nerves, "~> 1.7.0", runtime: false},
{:shoehorn, "~> 0.7.0"},
{:ring_logger, "~> 0.8.1"},
{:toolshed, "~> 0.2.13"},
# Dependencies for all targets except :host
{:nerves_runtime, "~> 0.11.3", targets: @all_targets},
{:nerves_pack, "~> 0.4.0", targets: @all_targets},
{:circuits_cdev, path: "../", targets: @all_targets},
# Dependencies for specific targets
{:nerves_system_rpi, "~> 1.15", runtime: false, targets: :rpi},
{:nerves_system_rpi0, "~> 1.15", runtime: false, targets: :rpi0},
{:nerves_system_rpi2, "~> 1.15", runtime: false, targets: :rpi2},
{:nerves_system_rpi3, "~> 1.15", runtime: false, targets: :rpi3},
{:nerves_system_rpi3a, "~> 1.15", runtime: false, targets: :rpi3a},
{:nerves_system_rpi4, "~> 1.15", runtime: false, targets: :rpi4},
{:nerves_system_bbb, "~> 2.10", runtime: false, targets: :bbb},
{:nerves_system_osd32mp1, "~> 0.6", runtime: false, targets: :osd32mp1},
{:nerves_system_x86_64, "~> 1.15.0", runtime: false, targets: :x86_64}
]
end
def release do
[
overwrite: true,
# Erlang distribution is not started automatically.
# See https://hexdocs.pm/nerves_pack/readme.html#erlang-distribution
cookie: "#{@app}_cookie",
include_erts: &Nerves.Release.erts/0,
steps: [&Nerves.Release.init/1, :assemble],
strip_beams: Mix.env() == :prod or [keep: ["Docs"]]
]
end
end
| 32.855072 | 83 | 0.597265 |
1cad00d12912a6f1548b9bdca23579f7402639a6 | 466 | ex | Elixir | lib/pundit/exceptions.ex | nonrational/pundit-elixir | 5ca084c5457fa4f97ad1fd7ab9b2024b9900d805 | [
"MIT"
] | 24 | 2019-03-24T21:04:09.000Z | 2022-01-03T02:59:58.000Z | lib/pundit/exceptions.ex | nonrational/pundit-elixir | 5ca084c5457fa4f97ad1fd7ab9b2024b9900d805 | [
"MIT"
] | 1 | 2021-03-24T13:27:54.000Z | 2021-03-24T13:27:54.000Z | lib/pundit/exceptions.ex | nonrational/pundit-elixir | 5ca084c5457fa4f97ad1fd7ab9b2024b9900d805 | [
"MIT"
] | 4 | 2019-10-14T19:59:28.000Z | 2021-03-24T12:30:06.000Z | defmodule Pundit.NotDefinedError do
@moduledoc """
Exception raised when a module doesn't implement a necessary access function.
"""
defexception message: "The function you are trying to call is not defined."
end
defmodule Pundit.NotAuthorizedError do
@moduledoc """
Exception raised when a user attempts to perform an action they're not authorized to perform.
"""
defexception message: "The user is not authorized to perform the given action."
end
| 33.285714 | 95 | 0.766094 |
1cad00f995f443256f990deeff39508fda40c85f | 504 | exs | Elixir | config/test.exs | Adnatull/donatebox | 47e07e7831c223265a465425520313da5370f149 | [
"Apache-2.0"
] | null | null | null | config/test.exs | Adnatull/donatebox | 47e07e7831c223265a465425520313da5370f149 | [
"Apache-2.0"
] | null | null | null | config/test.exs | Adnatull/donatebox | 47e07e7831c223265a465425520313da5370f149 | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :donatebox, DonateboxWeb.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :donatebox, Donatebox.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "donatebox_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 25.2 | 56 | 0.740079 |
1cad2837932c9705743c301c021d3c3aac0cb117 | 1,559 | ex | Elixir | lib/ex_admin/sidebar.ex | fanduel/ex_admin | 05806a718859a0e155d3447c3ffde8a536fd676a | [
"MIT"
] | 1 | 2017-03-23T00:50:59.000Z | 2017-03-23T00:50:59.000Z | lib/ex_admin/sidebar.ex | fanduel/ex_admin | 05806a718859a0e155d3447c3ffde8a536fd676a | [
"MIT"
] | null | null | null | lib/ex_admin/sidebar.ex | fanduel/ex_admin | 05806a718859a0e155d3447c3ffde8a536fd676a | [
"MIT"
] | null | null | null | defmodule ExAdmin.Sidebar do
@moduledoc false
require Logger
require Ecto.Query
use Xain
def sidebars_visible?(_conn, %{sidebars: []}), do: false
def sidebars_visible?(conn, %{sidebars: sidebars}) do
Enum.reduce(sidebars, false, fn {_, opts, _}, acc ->
acc || visible?(conn, opts)
end)
end
def sidebar_view(_conn, %{sidebars: []}, _), do: ""
def sidebar_view(conn, %{sidebars: sidebars}, resource) do
for sidebar <- sidebars do
_sidebar_view(conn, sidebar, resource)
end
end
defp _sidebar_view(conn, {name, opts, {mod, fun}}, resource) do
if visible?(conn, opts) do
ExAdmin.Theme.Helpers.theme_module(conn, Layout).sidebar_view(
conn,
{name, opts, {mod, fun}},
resource
)
else
""
end
end
def visible?(conn, opts) do
Phoenix.Controller.action_name(conn)
|> _visible?(Enum.into(opts, %{}))
end
def _visible?(action, %{only: only}) when is_atom(only) do
if action == only, do: true, else: false
end
def _visible?(action, %{only: only}) when is_list(only) do
if action in only, do: true, else: false
end
def _visible?(action, %{except: except}) when is_atom(except) do
if action == except, do: false, else: true
end
def _visible?(action, %{except: except}) when is_list(except) do
if action in except, do: false, else: true
end
def _visible?(_, _), do: true
def get_actions(item, opts) do
case opts[item] || [] do
atom when is_atom(atom) -> [atom]
other -> other
end
end
end
| 23.984615 | 68 | 0.629891 |
1cad7a1af46566b70b22c052f3ce04699b26cfd5 | 1,662 | exs | Elixir | priv/repo/seeds.exs | palindrom615/firestorm | 0690493c9dcae5c04c63c5321532a7db923e5be7 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | palindrom615/firestorm | 0690493c9dcae5c04c63c5321532a7db923e5be7 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | palindrom615/firestorm | 0690493c9dcae5c04c63c5321532a7db923e5be7 | [
"MIT"
] | 1 | 2020-03-20T12:58:37.000Z | 2020-03-20T12:58:37.000Z | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# FirestormWeb.Repo.insert!(%FirestormWeb.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
alias FirestormWeb.Forums
IO.puts "~~~~~ INITIALIZING FIRESTORM SEEDS ~~~~~"
IO.puts "-- CREATING ROLES --"
{:ok, admin_role} =
Forums.create_role("admin")
IO.puts "-- CREATING USERS --"
IO.puts "---- Admin von Hornclaw ----"
{:ok, admin} =
Forums.login_or_register_from_identity(%{
username: "admin",
password: "password"
})
{:ok, _} = Forums.add_role(admin, admin_role)
IO.puts "---- Bob Vladbob ----"
{:ok, bob} =
Forums.login_or_register_from_identity(%{
username: "bob",
password: "password"
})
IO.puts "---- Alice McStinkerton ----"
{:ok, alice} =
Forums.login_or_register_from_identity(%{
username: "alice",
password: "password"
})
IO.puts "-- CREATING CATEGORIES --"
IO.puts "---- Elixir ----"
{:ok, elixir} =
Forums.create_category(%{title: "Elixir"})
IO.puts "---- Elm ----"
{:ok, elm} =
Forums.create_category(%{title: "Elm"})
IO.puts "-- CREATING THREADS --"
IO.puts "---- OTP is cool ----"
{:ok, otp_is_cool} =
Forums.create_thread(elixir, bob, %{title: "OTP is cool", body: "Don't you think?"})
{:ok, elm_mdl} =
Forums.create_thread(elm, alice, %{title: "elm-mdl", body: "What's the story on this library these days?"})
IO.puts "-- CREATING POSTS --"
{:ok, otp_post} = Forums.create_post(otp_is_cool, alice, %{body: "I really do"})
| 27.245902 | 109 | 0.649819 |
1cad7e58d7c2fcd2a293fadc82019c6eebea019f | 751 | ex | Elixir | lib/brando/content/var/color.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | 1 | 2020-04-26T09:53:02.000Z | 2020-04-26T09:53:02.000Z | lib/brando/content/var/color.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | 198 | 2019-08-20T16:16:07.000Z | 2020-07-03T15:42:07.000Z | lib/brando/content/var/color.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | null | null | null | defmodule Brando.Content.Var.Color do
use Brando.Blueprint,
application: "Brando",
domain: "Content",
schema: "VarColor",
singular: "var_color",
plural: "var_colors",
gettext_module: Brando.Gettext
data_layer :embedded
@primary_key false
identifier "{{ entry.label }}"
attributes do
attribute :type, :string, required: true
attribute :label, :string, required: true
attribute :key, :string, required: true
attribute :value, :string
attribute :important, :boolean, default: false
attribute :placeholder, :string
attribute :instructions, :string
attribute :picker, :boolean, default: true
attribute :opacity, :boolean, default: false
attribute :palette_id, :integer
end
end
| 26.821429 | 50 | 0.696405 |
1cadd17cb6dfc4d05ab24ce67418c75e8b91267a | 918 | exs | Elixir | config/config.exs | ramonh/rumbl | c22ac66fc8c32bc75f4982ea88f6874c874fab4d | [
"MIT"
] | null | null | null | config/config.exs | ramonh/rumbl | c22ac66fc8c32bc75f4982ea88f6874c874fab4d | [
"MIT"
] | null | null | null | config/config.exs | ramonh/rumbl | c22ac66fc8c32bc75f4982ea88f6874c874fab4d | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :rumbl,
ecto_repos: [Rumbl.Repo]
# Configures the endpoint
config :rumbl, Rumbl.Endpoint,
url: [host: "localhost"],
secret_key_base: "yt47ws4DN2Iau0Nv1KQS+tMOKaVOz7mndGvaVQPQOOF4UuRFrR8x9nvTkyg9zl3T",
render_errors: [view: Rumbl.ErrorView, accepts: ~w(html json)],
pubsub: [name: Rumbl.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 32.785714 | 86 | 0.759259 |
1cade8b7a53a2d1ffe60520150549af2bac6fb2d | 349 | exs | Elixir | priv/repo/seeds.exs | RatioPBC/epi-viaduct-nys | 99fb637785ea207aee5449fa01fa59dd18ec8bf2 | [
"MIT"
] | 2 | 2021-06-22T21:01:49.000Z | 2021-11-04T18:36:48.000Z | priv/repo/seeds.exs | RatioPBC/epi-viaduct-nys | 99fb637785ea207aee5449fa01fa59dd18ec8bf2 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | RatioPBC/epi-viaduct-nys | 99fb637785ea207aee5449fa01fa59dd18ec8bf2 | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# NYSETL.Repo.insert!(%NYSETL.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.083333 | 61 | 0.704871 |
1cae22a5848974d8e3f800526a0a0067b60452c7 | 6,428 | ex | Elixir | lib/open_scad.ex | joedevivo/open_scad | c8981396b088e795a4269d1ac8a66ef7a1ecc744 | [
"MIT"
] | 16 | 2018-02-12T22:06:00.000Z | 2022-01-04T05:07:35.000Z | lib/open_scad.ex | joedevivo/open_scad | c8981396b088e795a4269d1ac8a66ef7a1ecc744 | [
"MIT"
] | 2 | 2019-12-07T15:54:10.000Z | 2019-12-07T19:21:14.000Z | lib/open_scad.ex | joedevivo/open_scad | c8981396b088e795a4269d1ac8a66ef7a1ecc744 | [
"MIT"
] | null | null | null | defmodule OpenSCAD do
@moduledoc """
'use OpenSCAD' in your module to have access to the entire OpenSCAD language
See the OpenSCAD [Documentation](http://www.openscad.org/documentation.html)
for more details of actual usage.
This [CheatSheet](http://www.openscad.org/cheatsheet/index.html) also comes in
handy
"""
defmodule Language do
# Works through all the implementations of Renderable and creates a
# cooresponding function for its type
defmacro __before_compile__(_env) do
# TODO: Automate this generation
impls = [
OpenSCAD.Projection,
OpenSCAD.Polygon,
OpenSCAD.Square,
OpenSCAD.Translate,
OpenSCAD.Sphere,
OpenSCAD.Minkowski,
OpenSCAD.LinearExtrude,
OpenSCAD.Import,
OpenSCAD.Hull,
OpenSCAD.RotateExtrude,
OpenSCAD.Color,
OpenSCAD.Rotate,
OpenSCAD.Resize,
OpenSCAD.Scale,
OpenSCAD.Cube,
OpenSCAD.Circle,
OpenSCAD.Union,
List,
BitString,
OpenSCAD.Intersection,
OpenSCAD.Cylinder,
OpenSCAD.Text,
OpenSCAD.Offset,
OpenSCAD.Difference,
OpenSCAD.Mirror,
OpenSCAD.Polyhedron
]
scad_functions =
for i <- impls do
name = OpenSCAD.Action.scad_name(i) |> String.to_atom()
case OpenSCAD.Renderable.type(%{__struct__: i}) do
# If this impl is an object, create a function that only takes
# parameters.
:object ->
quote do
def unquote(name)(kwlist \\ []) do
struct(unquote(i), kwlist)
end
end
# If it's a transformation, create a function that can have child
# objects piped into the first argument
:transformation ->
# special handling for difference, just feels nicer to me
if name == :difference do
quote do
def difference(children), do: struct(unquote(i), children: children)
def difference(thing, things_to_remove) when is_list(things_to_remove) do
struct(unquote(i), children: [thing | things_to_remove])
end
def difference(thing, things_to_remove) do
struct(unquote(i), children: [thing, things_to_remove])
end
end
else
quote do
def unquote(name)(children, kwlist \\ [])
# allows us to render a transformation with no children. This has
# no effect, but OpenSCAD is fine with it, so must we be.
def unquote(name)(nil, kwlist) do
unquote(name)([], kwlist)
end
def unquote(name)(obj, kwlist) do
struct(unquote(i), [children: obj] ++ kwlist)
end
end
end
_ ->
# This covers our implementation for BitString, :list, and Any for which we don't
# need any additional functions.
nil
end
end
|> Enum.filter(&(not is_nil(&1)))
scad_functions
end
end
@before_compile OpenSCAD.Language
defmacro __using__(_opts) do
quote do
# , only: [write: 2, to_scad: 1]
import OpenSCAD, except: [import: 1]
import OpenSCAD.Action, only: [disable: 1, show_only: 1, debug: 1, transparent: 1]
@before_compile OpenSCAD
end
end
# Works through all the implementations of Renderable and creates a
# cooresponding function for its type
defmacro __before_compile__(_env) do
quote do
# once you `use OpenSCAD`, you're declaring yourself a Model
def is_open_scad_model?, do: true
end
end
@doc """
Renders an OpenSCAD.Renderable to a string with a trailing newline.
"""
@spec to_scad(any) :: String.t()
def to_scad(renderable) do
OpenSCAD.Renderable.to_scad(renderable) <> "\n"
end
@doc """
Writes an OpenSCAD.Renderable out to a file
"""
@spec write(any, Path.t()) :: :ok | {:error, atom}
def write(renderable, filename) do
scad = to_scad(renderable)
File.write(filename, scad)
end
@doc """
slice is something unavailable in openscad.
It's intention is to take a 3D model and output a set of SVGs that will be
individual layers that can be cut with a laser or CNC machine.
You can play around with these settings to distort the model, but if you want
it to be accurate, set `layer` to the thickness of your material.
Also, it shift the model z -layer mm for each step, and will create the svg
from all points at z=0 for that step. This means that it starts at z=0, and
anything below z=0 will not be accounted for at all. Also, it will only go as
high as `height`, so if you create a `cube(v: [100, 100, 100], center:true)`,
half of it will be below the z axis and never get rendered. It will have 50mm
above the z-axis, but if you set `height` to `25`, you'll lose the topp half
of that. Conversley, if you set `height` to `100`, you'll end up with half
your SVGs being empty.
- height: total height in mm
- layer: height per layer
- name: file_prefix
"""
# TODO: After each SVG is created, read it and if it's empty abort the
# process, since if you're stacking layers and a layer is empty, you can't
# stack any higher.
@spec slice(any(), Keyword.t()) :: :ok
def slice(renderable, kwargs) do
layer_count = floor(kwargs[:height] / kwargs[:layer])
layer_digits =
layer_count
|> Integer.to_string()
|> String.length()
_ = File.mkdir_p(kwargs[:name])
_ = write(renderable, Path.join(kwargs[:name], "model.scad"))
Range.new(0, layer_count)
|> Enum.each(fn l ->
filename = String.pad_leading(Integer.to_string(l), layer_digits, "0")
scad_file = Path.join(kwargs[:name], Enum.join([filename, ".scad"]))
svg_file = Path.join(kwargs[:name], Enum.join([filename, ".svg"]))
_ =
renderable
|> translate(v: [0, 0, -(kwargs[:layer] * l)])
|> projection(cut: true)
|> write(scad_file)
{output, rc} = System.cmd("openscad", ["-o", svg_file, scad_file])
if rc != 0 do
IO.puts(output)
end
:ok
end)
:ok
end
end
| 31.665025 | 95 | 0.60112 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.