hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e84900800b76b1f2ae0a87dfeb02da60364ab86c | 154 | ex | Elixir | apps/crawler/lib/crawler/bethesda.ex | gregredhead/belethor | 255925396b18ba4a6950f386abf8a9e17a7e5e7c | [
"Apache-2.0"
] | 3 | 2018-07-20T22:14:36.000Z | 2018-12-21T19:54:48.000Z | apps/crawler/lib/crawler/bethesda.ex | gregredhead/belethor | 255925396b18ba4a6950f386abf8a9e17a7e5e7c | [
"Apache-2.0"
] | 36 | 2018-09-15T21:46:54.000Z | 2020-03-28T16:10:18.000Z | apps/crawler/lib/crawler/bethesda.ex | gregredhead/belethor | 255925396b18ba4a6950f386abf8a9e17a7e5e7c | [
"Apache-2.0"
] | 2 | 2018-07-22T08:47:07.000Z | 2021-12-11T01:39:19.000Z | defmodule Crawler.Bethesda do
@moduledoc "implements `Crawler.Provider` for [Bethesda](https://bethesda.net/en/mods/skyrim)"
use Crawler.Provider
end
| 30.8 | 96 | 0.779221 |
e849322c5836e3368b4fc3b2dd83a6b29d73a75a | 12,564 | ex | Elixir | lib/phoenix_live_view/upload.ex | feliperenan/phoenix_live_view | af65bb51fe12ea88e7c66808d2b1118e1c491ddd | [
"MIT"
] | 1 | 2020-09-09T09:10:01.000Z | 2020-09-09T09:10:01.000Z | lib/phoenix_live_view/upload.ex | feliperenan/phoenix_live_view | af65bb51fe12ea88e7c66808d2b1118e1c491ddd | [
"MIT"
] | null | null | null | lib/phoenix_live_view/upload.ex | feliperenan/phoenix_live_view | af65bb51fe12ea88e7c66808d2b1118e1c491ddd | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveView.Upload do
# Operations integrating Phoenix.LiveView.Socket with UploadConfig.
@moduledoc false
alias Phoenix.LiveView.{Socket, Utils, UploadConfig, UploadEntry}
@refs_to_names :__phoenix_refs_to_names__
@doc """
Allows an upload.
"""
def allow_upload(%Socket{} = socket, name, opts) when is_atom(name) and is_list(opts) do
case uploaded_entries(socket, name) do
{[], []} ->
:ok
{_, _} ->
raise ArgumentError, """
cannot allow_upload on an existing upload with active entries.
Use cancel_upload and/or consume_upload to handle the active entries before allowing a new upload.
"""
end
ref = Utils.random_id()
uploads = socket.assigns[:uploads] || %{}
upload_config = UploadConfig.build(name, ref, opts)
new_uploads =
uploads
|> Map.put(name, upload_config)
|> Map.update(@refs_to_names, %{ref => name}, fn refs -> Map.put(refs, ref, name) end)
Utils.assign(socket, :uploads, new_uploads)
end
@doc """
Disallows a previously allowed upload.
"""
def disallow_upload(%Socket{} = socket, name) when is_atom(name) do
case uploaded_entries(socket, name) do
{[], []} ->
uploads = socket.assigns[:uploads] || %{}
upload_config =
uploads
|> Map.fetch!(name)
|> UploadConfig.disallow()
new_refs =
Enum.reduce(uploads[@refs_to_names], uploads[@refs_to_names], fn
{ref, ^name}, acc -> Map.delete(acc, ref)
{_ref, _name}, acc -> acc
end)
new_uploads =
uploads
|> Map.put(name, upload_config)
|> Map.update!(@refs_to_names, fn _ -> new_refs end)
Utils.assign(socket, :uploads, new_uploads)
{_completed, _inprogress} ->
raise RuntimeError, "unable to disallow_upload for an upload with active entries"
end
end
@doc """
Cancels an upload entry.
"""
def cancel_upload(socket, name, entry_ref) do
upload_config = Map.fetch!(socket.assigns[:uploads] || %{}, name)
%UploadEntry{} = entry = UploadConfig.get_entry_by_ref(upload_config, entry_ref)
upload_config
|> UploadConfig.cancel_entry(entry)
|> update_uploads(socket)
end
@doc """
Cancels all uploads that exist.
Returns the new socket with the cancelled upload configs.
"""
def maybe_cancel_uploads(socket) do
uploads = socket.assigns[:uploads] || %{}
uploads
|> Map.delete(@refs_to_names)
|> Enum.reduce({socket, []}, fn {name, conf}, {socket_acc, conf_acc} ->
new_socket =
Enum.reduce(conf.entries, socket_acc, fn entry, inner_acc ->
cancel_upload(inner_acc, name, entry.ref)
end)
{new_socket, [conf | conf_acc]}
end)
end
@doc """
Updates the entry metadata.
"""
def update_upload_entry_meta(%Socket{} = socket, upload_conf_name, %UploadEntry{} = entry, meta) do
socket.assigns.uploads
|> Map.fetch!(upload_conf_name)
|> UploadConfig.update_entry_meta(entry.ref, meta)
|> update_uploads(socket)
end
@doc """
Updates the entry progress.
Progress is either an integer percently between 0 and 100, or a map
with an `"error"` key containing the information for a failed upload
while in progress on the client.
"""
def update_progress(%Socket{} = socket, config_ref, entry_ref, progress)
when is_integer(progress) and progress >= 0 and progress <= 100 do
socket
|> get_upload_by_ref!(config_ref)
|> UploadConfig.update_progress(entry_ref, progress)
|> update_uploads(socket)
end
def update_progress(%Socket{} = socket, config_ref, entry_ref, %{"error" => reason})
when is_binary(reason) do
conf = get_upload_by_ref!(socket, config_ref)
put_upload_error(socket, conf.name, entry_ref, :external_client_failure)
end
@doc """
Puts the entries into the `%UploadConfig{}`.
"""
def put_entries(%Socket{} = socket, %UploadConfig{} = conf, entries, cid) do
case UploadConfig.put_entries(%UploadConfig{conf | cid: cid}, entries) do
{:ok, new_config} ->
{:ok, update_uploads(new_config, socket)}
{:error, new_config} ->
errors_resp = Enum.map(new_config.errors, fn {ref, msg} -> [ref, msg] end)
{:error, %{ref: conf.ref, error: errors_resp}, update_uploads(new_config, socket)}
end
end
@doc """
Unregisters a completed entry from an `Phoenix.LiveView.UploadChannel` process.
"""
def unregister_completed_entry_upload(%Socket{} = socket, %UploadConfig{} = conf, entry_ref) do
conf
|> UploadConfig.unregister_completed_entry(entry_ref)
|> update_uploads(socket)
end
@doc """
Registers a new entry upload for an `Phoenix.LiveView.UploadChannel` process.
"""
def register_entry_upload(%Socket{} = socket, %UploadConfig{} = conf, pid, entry_ref)
when is_pid(pid) do
case UploadConfig.register_entry_upload(conf, pid, entry_ref) do
{:ok, new_config} ->
entry = UploadConfig.get_entry_by_ref(new_config, entry_ref)
{:ok, update_uploads(new_config, socket), entry}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Populates the errors for a given entry.
"""
def put_upload_error(%Socket{} = socket, conf_name, entry_ref, reason) do
conf = Map.fetch!(socket.assigns.uploads, conf_name)
conf
|> UploadConfig.put_error(entry_ref, reason)
|> update_uploads(socket)
end
@doc """
Retrieves the `%UploadConfig{}` from the socket for the provided ref or raises.
"""
def get_upload_by_ref!(%Socket{} = socket, config_ref) do
uploads = socket.assigns[:uploads] || raise(ArgumentError, no_upload_allowed_message(socket))
name = Map.fetch!(uploads[@refs_to_names], config_ref)
Map.fetch!(uploads, name)
end
defp no_upload_allowed_message(socket) do
"no uploads have been allowed on " <>
if(socket.assigns[:myself], do: "component running inside ", else: "") <>
"LiveView named #{inspect(socket.view)}"
end
@doc """
Returns the `%UploadConfig{}` from the socket for the `Phoenix.LiveView.UploadChannel` pid.
"""
def get_upload_by_pid(socket, pid) when is_pid(pid) do
Enum.find_value(socket.assigns[:uploads] || %{}, fn
{@refs_to_names, _} -> false
{_name, %UploadConfig{} = conf} -> UploadConfig.get_entry_by_pid(conf, pid) && conf
end)
end
@doc """
Returns the completed and in progress entries for the upload.
"""
def uploaded_entries(%Socket{} = socket, name) do
entries =
case Map.fetch(socket.assigns[:uploads] || %{}, name) do
{:ok, conf} -> conf.entries
:error -> []
end
Enum.reduce(entries, {[], []}, fn entry, {done, in_progress} ->
if entry.done? do
{[entry | done], in_progress}
else
{done, [entry | in_progress]}
end
end)
end
@doc """
Consumes the uploaded entries or raises if entries are still in progress.
"""
def consume_uploaded_entries(%Socket{} = socket, name, func) when is_function(func, 2) do
conf =
socket.assigns[:uploads][name] ||
raise ArgumentError, "no upload allowed for #{inspect(name)}"
case uploaded_entries(socket, name) do
{[_ | _] = done_entries, []} ->
consume_entries(conf, done_entries, func)
{_, [_ | _]} ->
raise ArgumentError, "cannot consume uploaded files when entries are still in progress"
{[], []} ->
[]
end
end
@doc """
Consumes an individual entry or raises if it is still in progress.
"""
def consume_uploaded_entry(%Socket{} = socket, %UploadEntry{} = entry, func)
when is_function(func, 1) do
unless entry.done?,
do: raise(ArgumentError, "cannot consume uploaded files when entries are still in progress")
conf = Map.fetch!(socket.assigns[:uploads], entry.upload_config)
[result] = consume_entries(conf, [entry], func)
result
end
@doc """
Drops all entries from the upload.
"""
def drop_upload_entries(%Socket{} = socket, %UploadConfig{} = conf, entry_refs) do
conf.entries
|> Enum.filter(fn entry -> entry.ref in entry_refs end)
|> Enum.reduce(conf, fn entry, acc -> UploadConfig.drop_entry(acc, entry) end)
|> update_uploads(socket)
end
defp update_uploads(%UploadConfig{} = new_conf, %Socket{} = socket) do
new_uploads = Map.update!(socket.assigns.uploads, new_conf.name, fn _ -> new_conf end)
Utils.assign(socket, :uploads, new_uploads)
end
defp consume_entries(%UploadConfig{} = conf, entries, func)
when is_list(entries) and is_function(func) do
if conf.external do
results =
entries
|> Enum.map(fn entry ->
meta = Map.fetch!(conf.entry_refs_to_metas, entry.ref)
result =
cond do
is_function(func, 1) -> func.(meta)
is_function(func, 2) -> func.(meta, entry)
end
case result do
{:ok, return} ->
{entry.ref, return}
{:postpone, return} ->
{:postpone, return}
return ->
IO.warn("""
consuming uploads requires a return signature matching:
{:ok, value} | {:postpone, value}
got:
#{inspect(return)}
""")
{entry.ref, return}
end
end)
consumed_refs =
Enum.flat_map(results, fn
{:postpone, _result} -> []
{ref, _result} -> [ref]
end)
Phoenix.LiveView.Channel.drop_upload_entries(conf, consumed_refs)
Enum.map(results, fn {_ref, result} -> result end)
else
entries
|> Enum.map(fn entry -> {entry, UploadConfig.entry_pid(conf, entry)} end)
|> Enum.filter(fn {_entry, pid} -> is_pid(pid) end)
|> Enum.map(fn {entry, pid} -> Phoenix.LiveView.UploadChannel.consume(pid, entry, func) end)
end
end
@doc """
Generates a preflight response by calling the `:external` function.
"""
def generate_preflight_response(%Socket{} = socket, name, cid) do
%UploadConfig{} = conf = Map.fetch!(socket.assigns.uploads, name)
client_meta = %{
max_file_size: conf.max_file_size,
max_entries: conf.max_entries,
chunk_size: conf.chunk_size
}
{new_socket, new_conf, new_entries} = mark_preflighted(socket, conf)
case new_conf.external do
false ->
channel_preflight(new_socket, new_conf, new_entries, cid, client_meta)
func when is_function(func) ->
external_preflight(new_socket, new_conf, new_entries, client_meta)
end
end
defp mark_preflighted(socket, conf) do
{new_conf, new_entries} = UploadConfig.mark_preflighted(conf)
new_socket = update_uploads(new_conf, socket)
{new_socket, new_conf, new_entries}
end
defp channel_preflight(
%Socket{} = socket,
%UploadConfig{} = conf,
entries,
cid,
%{} = client_config_meta
) do
reply_entries =
for entry <- entries, into: %{} do
token =
Phoenix.LiveView.Static.sign_token(socket.endpoint, %{
pid: self(),
ref: {conf.ref, entry.ref},
cid: cid
})
{entry.ref, token}
end
{:ok, %{ref: conf.ref, config: client_config_meta, entries: reply_entries}, socket}
end
defp external_preflight(%Socket{} = socket, %UploadConfig{} = conf, entries, client_config_meta) do
reply_entries =
Enum.reduce_while(entries, {:ok, %{}, socket}, fn entry, {:ok, metas, acc} ->
case conf.external.(entry, acc) do
{:ok, %{} = meta, new_socket} ->
new_socket = update_upload_entry_meta(new_socket, conf.name, entry, meta)
{:cont, {:ok, Map.put(metas, entry.ref, meta), new_socket}}
{:error, %{} = meta, new_socket} ->
{:halt, {:error, {entry.ref, meta}, new_socket}}
end
end)
case reply_entries do
{:ok, entry_metas, new_socket} ->
{:ok, %{ref: conf.ref, config: client_config_meta, entries: entry_metas}, new_socket}
{:error, {entry_ref, meta_reason}, new_socket} ->
new_socket = put_upload_error(new_socket, conf.name, entry_ref, meta_reason)
{:error, %{ref: conf.ref, error: [[entry_ref, meta_reason]]}, new_socket}
end
end
def register_cid(%Socket{} = socket, ref, cid) do
socket
|> get_upload_by_ref!(ref)
|> UploadConfig.register_cid(cid)
|> update_uploads(socket)
end
end
| 30.794118 | 106 | 0.63085 |
e8494536be094d92e9aab12f1bf1308e6f9aedd6 | 8,207 | exs | Elixir | test/sparql/algebra/group_graph_pattern_test.exs | pukkamustard/sparql-ex | 4f8907ddbd15215c9b6c40edac19362c92e34d91 | [
"MIT"
] | null | null | null | test/sparql/algebra/group_graph_pattern_test.exs | pukkamustard/sparql-ex | 4f8907ddbd15215c9b6c40edac19362c92e34d91 | [
"MIT"
] | null | null | null | test/sparql/algebra/group_graph_pattern_test.exs | pukkamustard/sparql-ex | 4f8907ddbd15215c9b6c40edac19362c92e34d91 | [
"MIT"
] | null | null | null | defmodule SPARQL.Algebra.GroupGraphPatternTest do
use SPARQL.Test.Case
import SPARQL.Language.Decoder, only: [decode: 1]
test "a filter splitting a bgp into a group" do
query = """
SELECT ?s
WHERE {
?s ?p ?o
FILTER(?o = 1)
?s2 ?p2 ?o2
}
"""
n1 = RDF.integer(1)
assert {:ok, %SPARQL.Query{
expr: %SPARQL.Algebra.Project{
vars: ~w[s],
expr: %SPARQL.Algebra.Filter{
filters: [
%SPARQL.Algebra.FunctionCall.Builtin{
name: :=,
arguments: ["o", ^n1]
}
],
expr: %SPARQL.Algebra.Join{
expr1: %SPARQL.Algebra.BGP{triples: [{"s", "p", "o"}]},
expr2: %SPARQL.Algebra.BGP{triples: [{"s2", "p2", "o2"}]}
}
}
}
}} = decode(query)
end
test "nested graph pattern without filter" do
[
"""
SELECT ?s
WHERE {
{?s ?p ?o}
?s2 ?p2 ?o2 .
}
""",
"""
SELECT ?s
WHERE {
?s ?p ?o .
{?s2 ?p2 ?o2}
}
""",
"""
SELECT ?s
WHERE {
{?s ?p ?o}
{?s2 ?p2 ?o2}
}
"""
]
|> Enum.each(fn query ->
assert {:ok, %SPARQL.Query{
expr: %SPARQL.Algebra.Project{
vars: ~w[s],
expr: %SPARQL.Algebra.Join{
expr1: %SPARQL.Algebra.BGP{triples: [{"s", "p", "o"}]},
expr2: %SPARQL.Algebra.BGP{triples: [{"s2", "p2", "o2"}]}
}
}
}} = decode(query)
end)
end
test "nested graph pattern with filter" do
n1 = RDF.integer(1)
[
"""
SELECT ?s
WHERE {
{
?s ?p ?o
FILTER(?o = 1)
}
?s2 ?p2 ?o2
}
""",
"""
SELECT ?s
WHERE {
{
FILTER(?o = 1)
?s ?p ?o
}
?s2 ?p2 ?o2
}
""",
]
|> Enum.each(fn query ->
assert {:ok, %SPARQL.Query{
expr: %SPARQL.Algebra.Project{
vars: ~w[s],
expr: %SPARQL.Algebra.Join{
expr1: %SPARQL.Algebra.Filter{
filters: [
%SPARQL.Algebra.FunctionCall.Builtin{
name: :=,
arguments: ["o", ^n1]
}
],
expr: %SPARQL.Algebra.BGP{triples: [{"s", "p", "o"}]}
},
expr2: %SPARQL.Algebra.BGP{triples: [{"s2", "p2", "o2"}]}
}
}
}} = decode(query)
end)
[
"""
SELECT ?s
WHERE {
?s ?p ?o .
{
?s2 ?p2 ?o2 .
FILTER(?o2 = 1)
}
}
""",
"""
SELECT ?s
WHERE {
?s ?p ?o .
{
FILTER(?o2 = 1)
?s2 ?p2 ?o2 .
}
}
"""
]
|> Enum.each(fn query ->
assert {:ok, %SPARQL.Query{
expr: %SPARQL.Algebra.Project{
vars: ~w[s],
expr: %SPARQL.Algebra.Join{
expr1: %SPARQL.Algebra.BGP{triples: [{"s", "p", "o"}]},
expr2: %SPARQL.Algebra.Filter{
filters: [
%SPARQL.Algebra.FunctionCall.Builtin{
name: :=,
arguments: ["o2", ^n1]
}
],
expr: %SPARQL.Algebra.BGP{triples: [{"s2", "p2", "o2"}]}
},
}
}
}} = decode(query)
end)
end
test "nested group graph patterns" do
n1 = RDF.integer(1)
[
"""
SELECT ?s
WHERE {
{
?s ?p ?o
FILTER(?o2 = 1)
?s2 ?p2 ?o2 .
}
{
?s3 ?p3 ?o3 .
}
}
""",
"""
SELECT ?s
WHERE {
{{
?s ?p ?o
FILTER(?o2 = 1)
?s2 ?p2 ?o2 .
}}
{
?s3 ?p3 ?o3 .
}
}
"""
]
|> Enum.each(fn query ->
assert {:ok, %SPARQL.Query{
expr: %SPARQL.Algebra.Project{
vars: ~w[s],
expr: %SPARQL.Algebra.Join{
expr1: %SPARQL.Algebra.Filter{
filters: [
%SPARQL.Algebra.FunctionCall.Builtin{
name: :=,
arguments: ["o2", ^n1]
}
],
expr: %SPARQL.Algebra.Join{
expr1: %SPARQL.Algebra.BGP{triples: [{"s", "p", "o"}]},
expr2: %SPARQL.Algebra.BGP{triples: [{"s2", "p2", "o2"}]}
}
},
expr2: %SPARQL.Algebra.BGP{triples: [{"s3", "p3", "o3"}]},
}
}
}} = decode(query)
end)
query = """
SELECT ?s
WHERE {
?s ?p ?o
{
?s2 ?p2 ?o2 .
FILTER(?o2 = 1)
?s3 ?p3 ?o3 .
}
}
"""
assert {:ok, %SPARQL.Query{
expr: %SPARQL.Algebra.Project{
vars: ~w[s],
expr: %SPARQL.Algebra.Join{
expr1: %SPARQL.Algebra.BGP{triples: [{"s", "p", "o"}]},
expr2: %SPARQL.Algebra.Filter{
filters: [
%SPARQL.Algebra.FunctionCall.Builtin{
name: :=,
arguments: ["o2", ^n1]
}
],
expr: %SPARQL.Algebra.Join{
expr1: %SPARQL.Algebra.BGP{triples: [{"s2", "p2", "o2"}]},
expr2: %SPARQL.Algebra.BGP{triples: [{"s3", "p3", "o3"}]}
}
}
}
}
}} = decode(query)
end
test "simplification" do
query = """
SELECT ?s
WHERE {
?s ?p ?o .
{}
?s2 ?p2 ?o2 .
}
"""
assert {:ok, %SPARQL.Query{
expr: %SPARQL.Algebra.Project{
vars: ~w[s],
expr: %SPARQL.Algebra.Join{
expr1: %SPARQL.Algebra.BGP{triples: [{"s", "p", "o"}]},
expr2: %SPARQL.Algebra.BGP{triples: [{"s2", "p2", "o2"}]}
}
}
}} = decode(query)
n1 = RDF.integer(1)
[
"""
SELECT ?s
WHERE {
?s ?p ?o .
{}
?s2 ?p2 ?o2 .
FILTER(?o = 1)
}
""",
"""
SELECT ?s
WHERE {
?s ?p ?o .
{}
FILTER(?o = 1)
?s2 ?p2 ?o2 .
}
""",
"""
SELECT ?s
WHERE {
?s ?p ?o .
FILTER(?o = 1)
{}
?s2 ?p2 ?o2 .
}
""",
"""
SELECT ?s
WHERE {
FILTER(?o = 1)
?s ?p ?o .
{}
?s2 ?p2 ?o2 .
}
"""
]
|> Enum.each(fn query ->
assert {:ok, %SPARQL.Query{
expr: %SPARQL.Algebra.Project{
vars: ~w[s],
expr: %SPARQL.Algebra.Filter{
filters: [
%SPARQL.Algebra.FunctionCall.Builtin{
name: :=,
arguments: ["o", ^n1]
}
],
expr: %SPARQL.Algebra.Join{
expr1: %SPARQL.Algebra.BGP{triples: [{"s", "p", "o"}]},
expr2: %SPARQL.Algebra.BGP{triples: [{"s2", "p2", "o2"}]}
}
}
}
}} = decode(query)
end)
end
end
| 25.174847 | 83 | 0.321433 |
e8494fa1e1149483451f8ec7759110c8690514cc | 1,955 | ex | Elixir | clients/cloud_search/lib/google_api/cloud_search/v1/model/schema.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_search/lib/google_api/cloud_search/v1/model/schema.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_search/lib/google_api/cloud_search/v1/model/schema.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudSearch.V1.Model.Schema do
@moduledoc """
The schema definition for a data source.
## Attributes
* `objectDefinitions` (*type:* `list(GoogleApi.CloudSearch.V1.Model.ObjectDefinition.t)`, *default:* `nil`) - The list of top-level objects for the data source. The maximum number of elements is 10.
* `operationIds` (*type:* `list(String.t)`, *default:* `nil`) - IDs of the Long Running Operations (LROs) currently running for this schema. After modifying the schema, wait for operations to complete before indexing additional content.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:objectDefinitions => list(GoogleApi.CloudSearch.V1.Model.ObjectDefinition.t()) | nil,
:operationIds => list(String.t()) | nil
}
field(:objectDefinitions, as: GoogleApi.CloudSearch.V1.Model.ObjectDefinition, type: :list)
field(:operationIds, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.CloudSearch.V1.Model.Schema do
def decode(value, options) do
GoogleApi.CloudSearch.V1.Model.Schema.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudSearch.V1.Model.Schema do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.1 | 240 | 0.739642 |
e849653813d25f2ea37f555b2922370864afa7ac | 380 | ex | Elixir | lib/queerlink_web/views/error_view.ex | Queertoo/Queerlink | 0a7726460cda63fc4ab342a2fe1d1155caa3d6d4 | [
"MIT"
] | 38 | 2015-11-07T23:54:26.000Z | 2021-04-09T04:14:25.000Z | lib/queerlink_web/views/error_view.ex | Queertoo/Queerlink | 0a7726460cda63fc4ab342a2fe1d1155caa3d6d4 | [
"MIT"
] | 2 | 2015-11-23T15:00:34.000Z | 2015-11-26T09:59:26.000Z | lib/queerlink_web/views/error_view.ex | Queertoo/Queerlink | 0a7726460cda63fc4ab342a2fe1d1155caa3d6d4 | [
"MIT"
] | 6 | 2015-11-26T00:25:22.000Z | 2020-03-04T22:13:59.000Z | defmodule QueerlinkWeb.ErrorView do
use QueerlinkWeb, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Internal server error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 21.111111 | 47 | 0.705263 |
e8499d4100201047b12803891a43d6324f722e6b | 11,552 | ex | Elixir | web_finngen_r4/lib/risteys_web/views/phenocode_view.ex | vincent-octo/risteys | 5bb1e70b78988770048b91b42fad025faf98d84a | [
"MIT"
] | null | null | null | web_finngen_r4/lib/risteys_web/views/phenocode_view.ex | vincent-octo/risteys | 5bb1e70b78988770048b91b42fad025faf98d84a | [
"MIT"
] | null | null | null | web_finngen_r4/lib/risteys_web/views/phenocode_view.ex | vincent-octo/risteys | 5bb1e70b78988770048b91b42fad025faf98d84a | [
"MIT"
] | null | null | null | defmodule RisteysWeb.PhenocodeView do
use RisteysWeb, :view
require Integer
def render("assocs.json", %{phenocode: phenocode, assocs: assocs}) do
%{
"plot" => data_assocs_plot(phenocode, assocs),
"table" => data_assocs_table(phenocode.id, assocs)
}
end
defp table_data_sources(data_sources) do
# Merge HD registry ICDs
hd_icd10s = render_icds("ICD-10: ", data_sources.hd_icd10s, true)
hd_icd9s = render_icds("ICD-9: ", data_sources.hd_icd9s, false)
hd_icd8s =
if not is_nil(data_sources.hd_icd8s) do
"ICD-8: " <> data_sources.hd_icd8s
else
""
end
hd = [hd_icd10s, hd_icd9s, hd_icd8s]
hd = Enum.reject(hd, fn val -> val == "" end)
hd = Enum.intersperse(hd, ", ")
# Merge COD registry ICDs
cod_icd10s = render_icds("ICD-10: ", data_sources.cod_icd10s, true)
cod_icd9s = render_icds("ICD-9: ", data_sources.cod_icd9s, false)
cod_icd8s =
if not is_nil(data_sources.cod_icd8s) do
"ICD-8: " <> data_sources.cod_icd8s
else
""
end
cod = [cod_icd10s, cod_icd9s, cod_icd8s]
cod = Enum.reject(cod, fn val -> val == "" end)
cod = Enum.intersperse(cod, ", ")
kela_icd10s = render_icds("ICD-10: ", data_sources.kela_icd10s, true)
# Link to included phenocodes
include =
if not is_nil(data_sources.include) do
data_sources.include
|> String.split("|")
|> Enum.map(fn name -> content_tag(:a, name, href: name) end)
|> Enum.intersperse(", ")
end
# Build the whole table
kela_abbr = abbr("KELA", "Finnish Social Insurance Institution")
table = [
{"Hospital Discharge registry", hd},
{"Hospital Discharge registry: exclude ICD-10", data_sources.hd_icd10s_excl},
{"Hospital Discharge registry: exclude ICD-9", data_sources.hd_icd9s_excl},
{"Hospital Discharge registry: exclude ICD-8", data_sources.hd_icd8s_excl},
{"Hospital Discharge registry: only main entry used", data_sources.hd_mainonly},
{"Cause of Death registry", cod},
{"Cause of Death registry: exclude ICD-10", data_sources.cod_icd10s_excl},
{"Cause of Death registry: exclude ICD-9", data_sources.cod_icd9s_excl},
{"Cause of Death registry: exclude ICD-8", data_sources.cod_icd8s_excl},
{"Cause of Death registry: only main entry used", data_sources.cod_mainonly},
{"Outpatient visit: ICD and other codes ", data_sources.outpat_icd},
{"Operations: NOMESCO codes", data_sources.oper_nom},
{"Operations: FINNISH HOSPITAL LEAGUE codes", data_sources.oper_hl},
{"Operations: HEART PATIENT codes V1", data_sources.oper_hp1},
{"Operations: HEART PATIENT codes V2", data_sources.oper_hp2},
{[kela_abbr | " reimboursements codes"], data_sources.kela_reimb},
{[kela_abbr | " reimbursements"], kela_icd10s},
{"Medicine purchases: ATC; other reg. data required", data_sources.kela_atc_needother},
{"Medicine purchases: ATC codes", data_sources.kela_atc},
{"Cancer reg: TOPOGRAPHY codes", data_sources.canc_topo},
{"Cancer reg: MORPHOLOGY codes", data_sources.canc_morph},
{"Sex specific endpoint", data_sources.sex},
{"Pre-conditions required", data_sources.pre_conditions},
{"Conditions required", data_sources.conditions},
{"Include", include},
{"Level in the ICD-hierarchy", data_sources.level},
{"First defined in version", data_sources.version}
]
# Discard table rows with no values
Enum.reject(table, fn {_name, values} -> values in ["", nil, []] end)
end
defp render_icds(_prefix, nil, _url), do: ""
defp render_icds(_prefix, [], _url), do: ""
defp render_icds(prefix, icds, url?) do
icds =
icds
|> Enum.map(fn icd ->
text = abbr(icd.code, icd.description)
if url? do
icd10_url(text, icd.code)
else
text
end
end)
|> Enum.intersperse("/")
[prefix | icds]
end
defp table_ontology(ontology) do
display = %{
"DOID" => %{
display: "DOID",
url: fn doid ->
link = "https://www.ebi.ac.uk/ols/search?q=" <> doid <> "&ontology=doid"
ahref(doid, link)
end
},
"EFO" => %{
display: "GWAS catalog",
url: fn efo ->
link = "https://www.ebi.ac.uk/gwas/efotraits/EFO_" <> efo
ahref(efo, link)
end
},
"MESH" => %{
display: "MESH",
url: fn mesh ->
link = "https://meshb.nlm.nih.gov/record/ui?ui=" <> mesh
ahref(mesh, link)
end
},
"SNOMED" => %{
display: "SNOMED CT",
url: fn snomed ->
link =
"https://browser.ihtsdotools.org/?perspective=full&conceptId1=" <>
snomed <> "&edition=en-edition"
ahref(snomed, link)
end
}
}
table =
for {source, values} <- ontology, into: %{} do
values =
Enum.map(values, fn id ->
fun =
display
|> Map.fetch!(source)
|> Map.fetch!(:url)
fun.(id)
end)
source =
display
|> Map.fetch!(source)
|> Map.fetch!(:display)
values = Enum.intersperse(values, ", ")
{source, values}
end
Enum.reject(table, fn {_name, values} -> values == [] end)
end
defp distrib_values(distrib) do
if is_nil(distrib) do
[]
else
for [bin, val] <- distrib do
val = if is_nil(val), do: "NaN", else: val
[bin, val]
end
end
end
defp abbr(text, title) do
content_tag(:abbr, text, [{:data, [title: title]}])
end
defp icd10_url(text, icd) do
# ICD browser uses X12.3 instead of X1234
short = String.slice(icd, 0..3)
{prefix, suffix} = String.split_at(short, 3)
icd = prefix <> "." <> suffix
ahref(text, "https://icd.who.int/browse10/2016/en#/#{icd}")
end
defp ahref(text, link) do
content_tag(:a, text,
href: link,
rel: "external nofollow noopener noreferrer",
target: "_blank"
)
end
defp round(number, precision) do
case number do
"-" -> "-"
_ -> Float.round(number, precision)
end
end
defp percentage(number) do
case number do
"-" ->
"-"
nil ->
"-"
_ ->
number * 100
end
end
defp pvalue_str(pvalue) do
# Print the given pvalue using scientific notation, display
# "<1e-100" if very low.
cond do
is_nil(pvalue) ->
"-"
pvalue < 1.0e-100 ->
"<1e-100"
true ->
# See http://erlang.org/doc/man/io.html#format-2
:io_lib.format("~.2. e", [pvalue]) |> to_string()
end
end
defp data_assocs_plot(phenocode, assocs) do
Enum.map(assocs, fn assoc ->
# Find direction given phenocode of interest
{other_pheno_name, other_pheno_longname, other_pheno_category, direction} =
if phenocode.name == assoc.prior_name do
{assoc.outcome_name, assoc.outcome_longname, assoc.outcome_category, "after"}
else
{assoc.prior_name, assoc.prior_longname, assoc.prior_category, "before"}
end
%{
"name" => other_pheno_name,
"longname" => other_pheno_longname,
"category" => other_pheno_category,
"direction" => direction,
"hr" => round(assoc.hr, 2),
"ci_min" => round(assoc.ci_min, 2),
"ci_max" => round(assoc.ci_max, 2),
"pvalue_str" => pvalue_str(assoc.pvalue),
"pvalue_num" => assoc.pvalue,
"nindivs" => assoc.nindivs
}
end)
end
defp data_assocs_table(pheno_id, assocs) do
# Takes the associations from the database and transform them to
# values for the assocation table, such that each table row has
# "before" and "after" associations with the given pheno_id.
no_stats = %{
"hr" => nil,
"ci_min" => nil,
"ci_max" => nil,
"pvalue" => nil,
"nindivs" => nil,
"lagged_hr_cut_year" => nil
}
rows =
Enum.reduce(assocs, %{}, fn assoc, acc ->
to_record(acc, assoc, pheno_id)
end)
Enum.map(rows, fn {other_id, lag_data} ->
no_lag_before =
case get_in(lag_data, [0, "before"]) do
nil ->
no_stats
stats ->
stats
end
no_lag_after =
case get_in(lag_data, [0, "after"]) do
nil ->
no_stats
stats ->
stats
end
lag_1y_before =
case get_in(lag_data, [1, "before"]) do
nil ->
no_stats
stats ->
stats
end
lag_1y_after =
case get_in(lag_data, [1, "after"]) do
nil ->
no_stats
stats ->
stats
end
lag_5y_before =
case get_in(lag_data, [5, "before"]) do
nil ->
no_stats
stats ->
stats
end
lag_5y_after =
case get_in(lag_data, [5, "after"]) do
nil ->
no_stats
stats ->
stats
end
lag_15y_before =
case get_in(lag_data, [15, "before"]) do
nil ->
no_stats
stats ->
stats
end
lag_15y_after =
case get_in(lag_data, [15, "after"]) do
nil ->
no_stats
stats ->
stats
end
%{
"id" => other_id,
"name" => lag_data["name"],
"longname" => lag_data["longname"],
"all" => %{
"before" => no_lag_before,
"after" => no_lag_after
},
"lagged_1y" => %{
"before" => lag_1y_before,
"after" => lag_1y_after
},
"lagged_5y" => %{
"before" => lag_5y_before,
"after" => lag_5y_after
},
"lagged_15y" => %{
"before" => lag_15y_before,
"after" => lag_15y_after
}
}
end)
end
defp to_record(res, assoc, pheno_id) do
# Takes an association and transform it to a suitable value for a
# row in the association table.
[dir, other_pheno] =
if pheno_id == assoc.prior_id do
[
"after",
%{
id: assoc.outcome_id,
name: assoc.outcome_name,
longname: assoc.outcome_longname
}
]
else
[
"before",
%{
id: assoc.prior_id,
name: assoc.prior_name,
longname: assoc.prior_longname
}
]
end
lag = assoc.lagged_hr_cut_year
new_stats = %{
"hr" => round(assoc.hr, 2),
"ci_min" => round(assoc.ci_min, 2),
"ci_max" => round(assoc.ci_max, 2),
"pvalue" => assoc.pvalue,
"pvalue_str" => pvalue_str(assoc.pvalue),
"nindivs" => assoc.nindivs
}
# Create pheno mapping if not existing
res =
if is_nil(Map.get(res, other_pheno.id)) do
Map.put(res, other_pheno.id, %{})
else
res
end
# Create inner lag mapping if not existing
res =
if is_nil(get_in(res, [other_pheno.id, lag])) do
put_in(res, [other_pheno.id, lag], %{})
else
res
end
res
|> put_in([other_pheno.id, lag, dir], new_stats)
|> put_in([other_pheno.id, "name"], other_pheno.name)
|> put_in([other_pheno.id, "longname"], other_pheno.longname)
end
end
| 26.495413 | 93 | 0.554449 |
e849a93fa01b6a659490433798e16db9919fa50f | 2,111 | exs | Elixir | test/netstrings_test.exs | mwmiller/netstrings_ex | a36095a24614dbfcb723712582482931a6d855bc | [
"MIT"
] | 1 | 2016-02-07T18:40:52.000Z | 2016-02-07T18:40:52.000Z | test/netstrings_test.exs | mwmiller/ex_netstrings | a36095a24614dbfcb723712582482931a6d855bc | [
"MIT"
] | null | null | null | test/netstrings_test.exs | mwmiller/ex_netstrings | a36095a24614dbfcb723712582482931a6d855bc | [
"MIT"
] | null | null | null | defmodule NetstringsTest do
use ExUnit.Case
import Netstrings
doctest Netstrings
test "djb examples" do
assert encode("") == "0:,", "Encode empty string."
assert decode("0:,") == {[""], ""}, "Decode empty string."
assert encode("hello world!") == "12:hello world!,", "Encode hello world!"
assert decode("12:hello world!,") == {["hello world!"], ""}, "Decode hello world!"
assert decode("012:hello world!,") == {["hello world!"], ""},
"Spec violation: lax acceptance of leading 0 in decode"
end
test "encode" do
assert encode(0) == {:error, "Can only encode binaries"}, "Can only encode binaries"
assert encode("√2") == "4:√2,", "UTF-8 string including number"
end
test "encode!" do
assert_raise RuntimeError, "Can only encode binaries", fn -> encode!(0) end
assert encode!("√2") == "4:√2,", "UTF-8 string including number"
end
test "decode" do
assert decode(0) == {:error, "Can only decode binaries"}, "Can only decode binaries"
assert decode("0:,0:,") == {["", ""], ""}, "Pair of empty strings."
assert decode("4:√3,") == {["√3"], ""}, "UTF-8 string including number"
assert decode("4:say,,") == {["say,"], ""}, "Including a comma"
assert decode("4:say:,") == {["say:"], ""}, "Including a colon"
assert decode("3:say:,") == {[], "3:say:,"}, "Improper netstring left undecoded"
assert decode("2:hi,5:there,3:") == {["hi", "there"], "3:"},
"Incomplete netstring is left as remainder"
assert decode("2:hi,4:there,3") == {["hi"], "4:there,3"}, "Stop as soon as improper is hit"
assert decode("2:hi,:") == {["hi"], ":"}, "Remaining colon is untouched"
end
test "decode!" do
assert_raise RuntimeError, "Can only decode binaries", fn -> decode!(0) end
assert decode!("0:,0:,") == {["", ""], ""}, "Pair of empty strings."
assert decode!("2:hi,5:there,3:") == {["hi", "there"], "3:"},
"Incomplete netstring is left as remainder"
end
test "exceptional round trip" do
assert encode!("Scheiße") |> decode! == {["Scheiße"], ""}, "Garbage in/garbage out"
end
end
| 39.830189 | 95 | 0.586926 |
e849b636fb18c3752cc855033ec87b922fe73671 | 23,336 | ex | Elixir | clients/android_publisher/lib/google_api/android_publisher/v2/api/purchases.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/android_publisher/lib/google_api/android_publisher/v2/api/purchases.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/android_publisher/lib/google_api/android_publisher/v2/api/purchases.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidPublisher.V2.Api.Purchases do
@moduledoc """
API calls for all endpoints tagged `Purchases`.
"""
alias GoogleApi.AndroidPublisher.V2.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Checks the purchase and consumption status of an inapp item.
## Parameters
* `connection` (*type:* `GoogleApi.AndroidPublisher.V2.Connection.t`) - Connection to server
* `package_name` (*type:* `String.t`) - The package name of the application the inapp product was sold in (for example, 'com.some.thing').
* `product_id` (*type:* `String.t`) - The inapp product SKU (for example, 'com.some.thing.inapp1').
* `token` (*type:* `String.t`) - The token provided to the user's device when the inapp product was purchased.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AndroidPublisher.V2.Model.ProductPurchase{}}` on success
* `{:error, info}` on failure
"""
@spec androidpublisher_purchases_products_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.AndroidPublisher.V2.Model.ProductPurchase.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def androidpublisher_purchases_products_get(
connection,
package_name,
product_id,
token,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/androidpublisher/v2/applications/{packageName}/purchases/products/{productId}/tokens/{token}",
%{
"packageName" => URI.encode(package_name, &URI.char_unreserved?/1),
"productId" => URI.encode(product_id, &URI.char_unreserved?/1),
"token" => URI.encode(token, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.AndroidPublisher.V2.Model.ProductPurchase{}])
end
@doc """
Cancels a user's subscription purchase. The subscription remains valid until its expiration time.
## Parameters
* `connection` (*type:* `GoogleApi.AndroidPublisher.V2.Connection.t`) - Connection to server
* `package_name` (*type:* `String.t`) - The package name of the application for which this subscription was purchased (for example, 'com.some.thing').
* `subscription_id` (*type:* `String.t`) - The purchased subscription ID (for example, 'monthly001').
* `token` (*type:* `String.t`) - The token provided to the user's device when the subscription was purchased.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %{}}` on success
* `{:error, info}` on failure
"""
@spec androidpublisher_purchases_subscriptions_cancel(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, nil} | {:ok, Tesla.Env.t()} | {:error, Tesla.Env.t()}
def androidpublisher_purchases_subscriptions_cancel(
connection,
package_name,
subscription_id,
token,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/androidpublisher/v2/applications/{packageName}/purchases/subscriptions/{subscriptionId}/tokens/{token}:cancel",
%{
"packageName" => URI.encode(package_name, &URI.char_unreserved?/1),
"subscriptionId" => URI.encode(subscription_id, &URI.char_unreserved?/1),
"token" => URI.encode(token, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
@doc """
Defers a user's subscription purchase until a specified future expiration time.
## Parameters
* `connection` (*type:* `GoogleApi.AndroidPublisher.V2.Connection.t`) - Connection to server
* `package_name` (*type:* `String.t`) - The package name of the application for which this subscription was purchased (for example, 'com.some.thing').
* `subscription_id` (*type:* `String.t`) - The purchased subscription ID (for example, 'monthly001').
* `token` (*type:* `String.t`) - The token provided to the user's device when the subscription was purchased.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.AndroidPublisher.V2.Model.SubscriptionPurchasesDeferRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AndroidPublisher.V2.Model.SubscriptionPurchasesDeferResponse{}}` on success
* `{:error, info}` on failure
"""
@spec androidpublisher_purchases_subscriptions_defer(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.AndroidPublisher.V2.Model.SubscriptionPurchasesDeferResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def androidpublisher_purchases_subscriptions_defer(
connection,
package_name,
subscription_id,
token,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/androidpublisher/v2/applications/{packageName}/purchases/subscriptions/{subscriptionId}/tokens/{token}:defer",
%{
"packageName" => URI.encode(package_name, &URI.char_unreserved?/1),
"subscriptionId" => URI.encode(subscription_id, &URI.char_unreserved?/1),
"token" => URI.encode(token, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.AndroidPublisher.V2.Model.SubscriptionPurchasesDeferResponse{}]
)
end
@doc """
Checks whether a user's subscription purchase is valid and returns its expiry time.
## Parameters
* `connection` (*type:* `GoogleApi.AndroidPublisher.V2.Connection.t`) - Connection to server
* `package_name` (*type:* `String.t`) - The package name of the application for which this subscription was purchased (for example, 'com.some.thing').
* `subscription_id` (*type:* `String.t`) - The purchased subscription ID (for example, 'monthly001').
* `token` (*type:* `String.t`) - The token provided to the user's device when the subscription was purchased.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AndroidPublisher.V2.Model.SubscriptionPurchase{}}` on success
* `{:error, info}` on failure
"""
@spec androidpublisher_purchases_subscriptions_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.AndroidPublisher.V2.Model.SubscriptionPurchase.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def androidpublisher_purchases_subscriptions_get(
connection,
package_name,
subscription_id,
token,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/androidpublisher/v2/applications/{packageName}/purchases/subscriptions/{subscriptionId}/tokens/{token}",
%{
"packageName" => URI.encode(package_name, &URI.char_unreserved?/1),
"subscriptionId" => URI.encode(subscription_id, &URI.char_unreserved?/1),
"token" => URI.encode(token, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.AndroidPublisher.V2.Model.SubscriptionPurchase{}]
)
end
@doc """
Refunds a user's subscription purchase, but the subscription remains valid until its expiration time and it will continue to recur.
## Parameters
* `connection` (*type:* `GoogleApi.AndroidPublisher.V2.Connection.t`) - Connection to server
* `package_name` (*type:* `String.t`) - The package name of the application for which this subscription was purchased (for example, 'com.some.thing').
* `subscription_id` (*type:* `String.t`) - The purchased subscription ID (for example, 'monthly001').
* `token` (*type:* `String.t`) - The token provided to the user's device when the subscription was purchased.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %{}}` on success
* `{:error, info}` on failure
"""
@spec androidpublisher_purchases_subscriptions_refund(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, nil} | {:ok, Tesla.Env.t()} | {:error, Tesla.Env.t()}
def androidpublisher_purchases_subscriptions_refund(
connection,
package_name,
subscription_id,
token,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/androidpublisher/v2/applications/{packageName}/purchases/subscriptions/{subscriptionId}/tokens/{token}:refund",
%{
"packageName" => URI.encode(package_name, &URI.char_unreserved?/1),
"subscriptionId" => URI.encode(subscription_id, &URI.char_unreserved?/1),
"token" => URI.encode(token, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
@doc """
Refunds and immediately revokes a user's subscription purchase. Access to the subscription will be terminated immediately and it will stop recurring.
## Parameters
* `connection` (*type:* `GoogleApi.AndroidPublisher.V2.Connection.t`) - Connection to server
* `package_name` (*type:* `String.t`) - The package name of the application for which this subscription was purchased (for example, 'com.some.thing').
* `subscription_id` (*type:* `String.t`) - The purchased subscription ID (for example, 'monthly001').
* `token` (*type:* `String.t`) - The token provided to the user's device when the subscription was purchased.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %{}}` on success
* `{:error, info}` on failure
"""
@spec androidpublisher_purchases_subscriptions_revoke(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, nil} | {:ok, Tesla.Env.t()} | {:error, Tesla.Env.t()}
def androidpublisher_purchases_subscriptions_revoke(
connection,
package_name,
subscription_id,
token,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/androidpublisher/v2/applications/{packageName}/purchases/subscriptions/{subscriptionId}/tokens/{token}:revoke",
%{
"packageName" => URI.encode(package_name, &URI.char_unreserved?/1),
"subscriptionId" => URI.encode(subscription_id, &URI.char_unreserved?/1),
"token" => URI.encode(token, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
@doc """
Lists the purchases that were canceled, refunded or charged-back.
## Parameters
* `connection` (*type:* `GoogleApi.AndroidPublisher.V2.Connection.t`) - Connection to server
* `package_name` (*type:* `String.t`) - The package name of the application for which voided purchases need to be returned (for example, 'com.some.thing').
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:endTime` (*type:* `String.t`) - The time, in milliseconds since the Epoch, of the newest voided purchase that you want to see in the response. The value of this parameter cannot be greater than the current time and is ignored if a pagination token is set. Default value is current time. Note: This filter is applied on the time at which the record is seen as voided by our systems and not the actual voided time returned in the response.
* `:maxResults` (*type:* `integer()`) -
* `:startIndex` (*type:* `integer()`) -
* `:startTime` (*type:* `String.t`) - The time, in milliseconds since the Epoch, of the oldest voided purchase that you want to see in the response. The value of this parameter cannot be older than 30 days and is ignored if a pagination token is set. Default value is current time minus 30 days. Note: This filter is applied on the time at which the record is seen as voided by our systems and not the actual voided time returned in the response.
* `:token` (*type:* `String.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AndroidPublisher.V2.Model.VoidedPurchasesListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec androidpublisher_purchases_voidedpurchases_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.AndroidPublisher.V2.Model.VoidedPurchasesListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def androidpublisher_purchases_voidedpurchases_list(
connection,
package_name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:endTime => :query,
:maxResults => :query,
:startIndex => :query,
:startTime => :query,
:token => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/androidpublisher/v2/applications/{packageName}/purchases/voidedpurchases",
%{
"packageName" => URI.encode(package_name, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.AndroidPublisher.V2.Model.VoidedPurchasesListResponse{}]
)
end
end
| 43.782364 | 454 | 0.631385 |
e849bceba03a09e4a0a4d9c79d0789233d828a1e | 203 | ex | Elixir | lib/ex_klaviyo.ex | techgaun/ex_klaviyo | 85a71407a1264ed9dd9af959af09000c3f76cc31 | [
"Apache-2.0"
] | 1 | 2021-06-15T04:52:31.000Z | 2021-06-15T04:52:31.000Z | lib/ex_klaviyo.ex | techgaun/ex_klaviyo | 85a71407a1264ed9dd9af959af09000c3f76cc31 | [
"Apache-2.0"
] | null | null | null | lib/ex_klaviyo.ex | techgaun/ex_klaviyo | 85a71407a1264ed9dd9af959af09000c3f76cc31 | [
"Apache-2.0"
] | null | null | null | defmodule ExKlaviyo do
@moduledoc """
Documentation for ExKlaviyo.
"""
@doc """
Hello world.
## Examples
iex> ExKlaviyo.hello
:world
"""
def hello do
:world
end
end
| 10.684211 | 30 | 0.581281 |
e849c67c1738ab2ae1852bb6ff7c827fcce214c2 | 436 | ex | Elixir | zmq/lib/zmq.ex | h00dy/elixir-zmq-dummy-client | c69b28c82631af9d7568beee22ed9183ad2f52c4 | [
"Apache-2.0"
] | null | null | null | zmq/lib/zmq.ex | h00dy/elixir-zmq-dummy-client | c69b28c82631af9d7568beee22ed9183ad2f52c4 | [
"Apache-2.0"
] | null | null | null | zmq/lib/zmq.ex | h00dy/elixir-zmq-dummy-client | c69b28c82631af9d7568beee22ed9183ad2f52c4 | [
"Apache-2.0"
] | null | null | null | defmodule Zmq do
@moduledoc """
Documentation for Zmq
"""
require Logger
def listen(socket) do
resp = :chumak.recv(socket)
{:ok, body} = resp
data = Poison.decode!(body)
Logger.info data["uuid"]
listen(socket)
end
def connect do
{:ok, socket} = :chumak.socket(:pull)
port = System.get_env("PORT") |> String.to_integer
:chumak.connect(socket, :tcp, '127.0.0.1', port)
socket
end
end
| 19.818182 | 54 | 0.623853 |
e849e7bcc4ac189817ac72ec89fb998db5fedce5 | 131 | ex | Elixir | lib/kamleague_web/views/post_view.ex | mzavoloka/kamleague | ba29263ed54cac5c67b537c4b7d1dbc522215341 | [
"MIT"
] | null | null | null | lib/kamleague_web/views/post_view.ex | mzavoloka/kamleague | ba29263ed54cac5c67b537c4b7d1dbc522215341 | [
"MIT"
] | 2 | 2021-11-04T21:05:24.000Z | 2021-11-04T21:51:48.000Z | lib/kamleague_web/views/post_view.ex | mzavoloka/kamleague | ba29263ed54cac5c67b537c4b7d1dbc522215341 | [
"MIT"
] | 1 | 2021-11-04T18:40:26.000Z | 2021-11-04T18:40:26.000Z | defmodule KamleagueWeb.PostView do
use KamleagueWeb, :view
def markdown(body) do
body
|> Earmark.as_html!()
end
end
| 14.555556 | 34 | 0.694656 |
e849ffe6be169091b36f7e1015a3487dc6d1de4f | 373 | ex | Elixir | lib/brando/revisions/schema.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | 4 | 2020-10-30T08:40:38.000Z | 2022-01-07T22:21:37.000Z | lib/brando/revisions/schema.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | 1,162 | 2020-07-05T11:20:15.000Z | 2022-03-31T06:01:49.000Z | lib/brando/revisions/schema.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | null | null | null | defmodule Brando.Revisions.Schema do
@moduledoc """
Adds revisions support to schema
## Usage
use Brando.Revisions.Schema
Now, when using `Brando.Query`'s `mutation :create` and `mutation :update` macros,
revisions will be created automatically for you.
"""
defmacro __using__(_) do
quote do
def __revisioned__, do: true
end
end
end
| 19.631579 | 84 | 0.69437 |
e84a3d5643260b26ffefb4e2acd5f11c765b9354 | 2,168 | ex | Elixir | clients/android_device_provisioning/lib/google_api/android_device_provisioning/v1/deserializer.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/android_device_provisioning/lib/google_api/android_device_provisioning/v1/deserializer.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/android_device_provisioning/lib/google_api/android_device_provisioning/v1/deserializer.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AndroidDeviceProvisioning.V1.Deserializer do
@moduledoc """
Helper functions for deserializing responses into models
"""
@doc """
Update the provided model with a deserialization of a nested value
"""
@spec deserialize(struct(), :atom, :atom, struct(), keyword()) :: struct()
def deserialize(model, _field, :list, nil, _options), do: model
def deserialize(model, field, :list, mod, options) do
model
|> Map.update!(field, &(Poison.Decode.decode(&1, Keyword.merge(options, [as: [struct(mod)]]))))
end
def deserialize(model, field, :struct, mod, options) do
model
|> Map.update!(field, &(Poison.Decode.decode(&1, Keyword.merge(options, [as: struct(mod)]))))
end
def deserialize(model, _field, :map, nil, _options), do: model
def deserialize(model, field, :map, mod, options) do
model
|> Map.update!(field, &(Map.new(&1, fn {key, val} -> {key, Poison.Decode.decode(val, Keyword.merge(options, [as: struct(mod)]))} end)))
end
def deserialize(model, field, :date, _, _options) do
case DateTime.from_iso8601(Map.get(model, field)) do
{:ok, datetime} ->
Map.put(model, field, datetime)
_ ->
model
end
end
def serialize_non_nil(model, options) do
model
|> Map.from_struct
|> Enum.filter(fn {_k, v} -> v != nil end)
|> Enum.into(%{})
|> Poison.Encoder.encode(options)
end
end
| 36.133333 | 139 | 0.690498 |
e84a5a7e2a6da2602e10ae08bece30e784f4b7b2 | 1,002 | exs | Elixir | server/config/test.exs | felixwolter/idai-field | 146ab8dbdedb23035a4ba19eac95f02a1fa2329f | [
"Apache-2.0"
] | null | null | null | server/config/test.exs | felixwolter/idai-field | 146ab8dbdedb23035a4ba19eac95f02a1fa2329f | [
"Apache-2.0"
] | null | null | null | server/config/test.exs | felixwolter/idai-field | 146ab8dbdedb23035a4ba19eac95f02a1fa2329f | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# Only in tests, remove the complexity from the password hashing algorithm
config :bcrypt_elixir, :log_rounds, 1
# Only in tests, remove the complexity from the password hashing algorithm
config :bcrypt_elixir, :log_rounds, 1
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :idai_field_server, IdaiFieldServer.Repo,
username: "postgres",
password: "abcdef",
database: "idai_field_server_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: (if System.get_env("IN_CONTAINER") == "true" do "postgres" else "localhost" end),
pool: Ecto.Adapters.SQL.Sandbox
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :idai_field_server, IdaiFieldServerWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 34.551724 | 93 | 0.767465 |
e84a6c51537d30caba87cdb940b898235846b990 | 3,439 | exs | Elixir | apps/ewallet/test/ewallet/permissions/bouncer/targets/wallet_target_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet/test/ewallet/permissions/bouncer/targets/wallet_target_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet/test/ewallet/permissions/bouncer/targets/wallet_target_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.Bouncer.WalletTargetTest do
use EWallet.DBCase, async: true
import EWalletDB.Factory
alias EWalletDB.AccountUser
alias EWallet.Bouncer.{WalletTarget, DispatchConfig}
alias ActivityLogger.System
alias Utils.Helpers.UUID
describe "get_owner_uuids/1" do
test "returns the list of UUIDs owning the account's wallet" do
account = insert(:account)
wallet = insert(:wallet, account: account, user: nil)
res = WalletTarget.get_owner_uuids(wallet)
assert res == [account.uuid]
end
test "returns the list of UUIDs owning the user's wallet" do
user = insert(:user)
wallet = insert(:wallet, user: user, account: nil)
res = WalletTarget.get_owner_uuids(wallet)
assert res == [user.uuid]
end
end
describe "get_target_types/0" do
test "returns a list of types" do
assert WalletTarget.get_target_types() == [:account_wallets, :end_user_wallets]
end
end
describe "get_target_type/1" do
test "returns the type of the given wallet when it's an account wallet" do
account = insert(:account)
wallet = insert(:wallet, account: account, user: nil)
assert WalletTarget.get_target_type(wallet) == :account_wallets
end
test "returns the type of the given wallet when it's a user wallet" do
user = insert(:user)
wallet = insert(:wallet, user: user, account: nil)
assert WalletTarget.get_target_type(wallet) == :end_user_wallets
end
end
describe "get_target_accounts/2" do
test "returns the list of accounts having rights on the account wallet" do
account = insert(:account)
wallet = insert(:wallet, account: account, user: nil)
account_unlinked = insert(:account)
target_accounts_uuids =
wallet |> WalletTarget.get_target_accounts(DispatchConfig) |> UUID.get_uuids()
assert length(target_accounts_uuids) == 1
assert Enum.member?(target_accounts_uuids, account.uuid)
refute Enum.member?(target_accounts_uuids, account_unlinked.uuid)
end
test "returns the list of accounts having rights on the user wallet" do
user = insert(:user)
account_1 = insert(:account)
account_2 = insert(:account)
account_unlinked = insert(:account)
{:ok, _} = AccountUser.link(account_1.uuid, user.uuid, %System{})
{:ok, _} = AccountUser.link(account_2.uuid, user.uuid, %System{})
wallet = insert(:wallet, user: user, account: nil)
target_accounts_uuids =
wallet |> WalletTarget.get_target_accounts(DispatchConfig) |> UUID.get_uuids()
assert length(target_accounts_uuids) == 2
assert Enum.member?(target_accounts_uuids, account_1.uuid)
assert Enum.member?(target_accounts_uuids, account_2.uuid)
refute Enum.member?(target_accounts_uuids, account_unlinked.uuid)
end
end
end
| 36.978495 | 86 | 0.711835 |
e84aa68bbfb565b587c16e120419911c6af9f566 | 1,169 | exs | Elixir | test/integration/sign_up_test.exs | drdean/jelly | 44ca6d90e0c7ce62ccd458795f54dac4d10e8cfc | [
"MIT"
] | null | null | null | test/integration/sign_up_test.exs | drdean/jelly | 44ca6d90e0c7ce62ccd458795f54dac4d10e8cfc | [
"MIT"
] | null | null | null | test/integration/sign_up_test.exs | drdean/jelly | 44ca6d90e0c7ce62ccd458795f54dac4d10e8cfc | [
"MIT"
] | null | null | null | defmodule JellyBoard.SignUpTest do
use JellyBoard.IntegrationCase
@tag :integration
test "GET /sign_up" do
navigate_to "/sign_up"
assert page_title == "Sign up | Phoenix Trello"
assert element_displayed?({:id, "sign_up_form"})
end
@tag :integration
test "Siginig up with correct data" do
navigate_to "/sign_up"
assert element_displayed?({:id, "sign_up_form"})
sign_up_form = find_element(:id, "sign_up_form")
sign_up_form
|> find_within_element(:id, "user_first_name")
|> fill_field("John")
sign_up_form
|> find_within_element(:id, "user_last_name")
|> fill_field("Doe")
sign_up_form
|> find_within_element(:id, "user_email")
|> fill_field("john@doe.com")
sign_up_form
|> find_within_element(:id, "user_password")
|> fill_field("12345678")
sign_up_form
|> find_within_element(:id, "user_password_confirmation")
|> fill_field("12345678")
sign_up_form
|> find_within_element(:css, "button")
|> click
assert element_displayed?({:id, "authentication_container"})
assert page_source =~ "John Doe"
assert page_source =~ "My boards"
end
end
| 23.38 | 64 | 0.679213 |
e84aa7c01733c935b3b09478728939ebac240ad2 | 15,997 | ex | Elixir | lib/elixir/lib/kernel/cli.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/elixir/lib/kernel/cli.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/elixir/lib/kernel/cli.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | defmodule Kernel.CLI do
@moduledoc false
@compile {:no_warn_undefined, [Logger, IEx]}
@blank_config %{
commands: [],
output: ".",
compile: [],
no_halt: false,
compiler_options: [],
errors: [],
pa: [],
pz: [],
verbose_compile: false,
profile: nil
}
@standalone_opts ["-h", "--help", "--short-version"]
@doc """
This is the API invoked by Elixir boot process.
"""
def main(argv) do
argv = for arg <- argv, do: IO.chardata_to_string(arg)
{config, argv} = parse_argv(argv)
System.argv(argv)
System.no_halt(config.no_halt)
fun = fn _ ->
errors = process_commands(config)
if errors != [] do
Enum.each(errors, &IO.puts(:stderr, &1))
System.halt(1)
end
end
run(fun)
end
@doc """
Runs the given function by catching any failure
and printing them to stdout. `at_exit` hooks are
also invoked before exiting.
This function is used by Elixir's CLI and also
by escripts generated by Elixir.
"""
def run(fun) do
{ok_or_shutdown, status} = exec_fun(fun, {:ok, 0})
if ok_or_shutdown == :shutdown or not System.no_halt() do
{_, status} = at_exit({ok_or_shutdown, status})
# Ensure Logger messages are flushed before halting
case :erlang.whereis(Logger) do
pid when is_pid(pid) -> Logger.flush()
_ -> :ok
end
System.halt(status)
end
end
@doc """
Parses the CLI arguments. Made public for testing.
"""
def parse_argv(argv) do
parse_argv(argv, @blank_config)
end
@doc """
Process CLI commands. Made public for testing.
"""
def process_commands(config) do
results = Enum.map(Enum.reverse(config.commands), &process_command(&1, config))
errors = for {:error, msg} <- results, do: msg
Enum.reverse(config.errors, errors)
end
@doc """
Shared helper for error formatting on CLI tools.
"""
def format_error(kind, reason, stacktrace) do
{blamed, stacktrace} = Exception.blame(kind, reason, stacktrace)
iodata =
case blamed do
%FunctionClauseError{} ->
formatted = Exception.format_banner(kind, reason, stacktrace)
padded_blame = pad(FunctionClauseError.blame(blamed, &inspect/1, &blame_match/1))
[formatted, padded_blame]
_ ->
Exception.format_banner(kind, blamed, stacktrace)
end
[iodata, ?\n, Exception.format_stacktrace(prune_stacktrace(stacktrace))]
end
@doc """
Function invoked across nodes for `--rpc-eval`.
"""
def rpc_eval(expr) do
wrapper(fn -> Code.eval_string(expr) end)
catch
kind, reason -> {kind, reason, __STACKTRACE__}
end
## Helpers
defp at_exit(res) do
hooks = :elixir_config.get_and_put(:at_exit, [])
res = Enum.reduce(hooks, res, &exec_fun/2)
if hooks == [], do: res, else: at_exit(res)
end
defp exec_fun(fun, res) when is_function(fun, 1) and is_tuple(res) do
parent = self()
{pid, ref} =
spawn_monitor(fn ->
try do
fun.(elem(res, 1))
catch
:exit, {:shutdown, int} when is_integer(int) ->
send(parent, {self(), {:shutdown, int}})
exit({:shutdown, int})
:exit, reason
when reason == :normal
when reason == :shutdown
when tuple_size(reason) == 2 and elem(reason, 0) == :shutdown ->
send(parent, {self(), {:shutdown, 0}})
exit(reason)
kind, reason ->
print_error(kind, reason, __STACKTRACE__)
send(parent, {self(), {:shutdown, 1}})
exit(to_exit(kind, reason, __STACKTRACE__))
else
_ ->
send(parent, {self(), res})
end
end)
receive do
{^pid, res} ->
:erlang.demonitor(ref, [:flush])
res
{:DOWN, ^ref, _, _, other} ->
print_error({:EXIT, pid}, other, [])
{:shutdown, 1}
end
end
defp to_exit(:throw, reason, stack), do: {{:nocatch, reason}, stack}
defp to_exit(:error, reason, stack), do: {reason, stack}
defp to_exit(:exit, reason, _stack), do: reason
defp shared_option?(list, config, callback) do
case parse_shared(list, config) do
{[h | hs], _} when h == hd(list) ->
new_config = %{config | errors: ["#{h} : Unknown option" | config.errors]}
callback.(hs, new_config)
{new_list, new_config} ->
callback.(new_list, new_config)
end
end
## Error handling
defp print_error(kind, reason, stacktrace) do
IO.write(:stderr, format_error(kind, reason, stacktrace))
end
defp blame_match(%{match?: true, node: node}), do: blame_ansi(:normal, "+", node)
defp blame_match(%{match?: false, node: node}), do: blame_ansi(:red, "-", node)
defp blame_ansi(color, no_ansi, node) do
if IO.ANSI.enabled?() do
[color | Macro.to_string(node)]
|> IO.ANSI.format(true)
|> IO.iodata_to_binary()
else
no_ansi <> Macro.to_string(node) <> no_ansi
end
end
defp pad(string) do
" " <> String.replace(string, "\n", "\n ")
end
@elixir_internals [:elixir, :elixir_aliases, :elixir_expand, :elixir_compiler, :elixir_module] ++
[:elixir_clauses, :elixir_lexical, :elixir_def, :elixir_map, :elixir_locals] ++
[:elixir_erl, :elixir_erl_clauses, :elixir_erl_compiler, :elixir_erl_pass] ++
[Kernel.ErrorHandler, Module.ParallelChecker]
defp prune_stacktrace([{mod, _, _, _} | t]) when mod in @elixir_internals do
prune_stacktrace(t)
end
defp prune_stacktrace([{__MODULE__, :wrapper, 1, _} | _]) do
[]
end
defp prune_stacktrace([h | t]) do
[h | prune_stacktrace(t)]
end
defp prune_stacktrace([]) do
[]
end
# Parse shared options
defp halt_standalone(opt) do
IO.puts(:stderr, "#{opt} : Standalone options can't be combined with other options")
System.halt(1)
end
defp parse_shared([opt | _], _config) when opt in @standalone_opts do
halt_standalone(opt)
end
defp parse_shared([opt | t], _config) when opt in ["-v", "--version"] do
if function_exported?(IEx, :started?, 0) and IEx.started?() do
IO.puts("IEx " <> System.build_info()[:build])
else
IO.puts(:erlang.system_info(:system_version))
IO.puts("Elixir " <> System.build_info()[:build])
end
if t != [] do
halt_standalone(opt)
else
System.halt(0)
end
end
defp parse_shared(["-pa", h | t], config) do
paths = expand_code_path(h)
Enum.each(paths, &:code.add_patha/1)
parse_shared(t, %{config | pa: config.pa ++ paths})
end
defp parse_shared(["-pz", h | t], config) do
paths = expand_code_path(h)
Enum.each(paths, &:code.add_pathz/1)
parse_shared(t, %{config | pz: config.pz ++ paths})
end
defp parse_shared(["--app", h | t], config) do
parse_shared(t, %{config | commands: [{:app, h} | config.commands]})
end
defp parse_shared(["--no-halt" | t], config) do
parse_shared(t, %{config | no_halt: true})
end
defp parse_shared(["-e", h | t], config) do
parse_shared(t, %{config | commands: [{:eval, h} | config.commands]})
end
defp parse_shared(["--eval", h | t], config) do
parse_shared(t, %{config | commands: [{:eval, h} | config.commands]})
end
defp parse_shared(["--rpc-eval", node, h | t], config) do
node = append_hostname(node)
parse_shared(t, %{config | commands: [{:rpc_eval, node, h} | config.commands]})
end
defp parse_shared(["-r", h | t], config) do
parse_shared(t, %{config | commands: [{:require, h} | config.commands]})
end
defp parse_shared(["-pr", h | t], config) do
parse_shared(t, %{config | commands: [{:parallel_require, h} | config.commands]})
end
defp parse_shared(list, config) do
{list, config}
end
defp append_hostname(node) do
case :string.find(node, "@") do
:nomatch -> node <> :string.find(Atom.to_string(node()), "@")
_ -> node
end
end
defp expand_code_path(path) do
path = Path.expand(path)
case Path.wildcard(path) do
[] -> [to_charlist(path)]
list -> Enum.map(list, &to_charlist/1)
end
end
# Process init options
defp parse_argv(["--" | t], config) do
{config, t}
end
defp parse_argv(["+elixirc" | t], config) do
parse_compiler(t, config)
end
defp parse_argv(["+iex" | t], config) do
parse_iex(t, config)
end
defp parse_argv(["-S", h | t], config) do
{%{config | commands: [{:script, h} | config.commands]}, t}
end
defp parse_argv([h | t] = list, config) do
case h do
"-" <> _ ->
shared_option?(list, config, &parse_argv(&1, &2))
_ ->
if List.keymember?(config.commands, :eval, 0) do
{config, list}
else
{%{config | commands: [{:file, h} | config.commands]}, t}
end
end
end
defp parse_argv([], config) do
{config, []}
end
# Parse compiler options
defp parse_compiler(["--" | t], config) do
{config, t}
end
defp parse_compiler(["-o", h | t], config) do
parse_compiler(t, %{config | output: h})
end
defp parse_compiler(["--no-docs" | t], config) do
parse_compiler(t, %{config | compiler_options: [{:docs, false} | config.compiler_options]})
end
defp parse_compiler(["--no-debug-info" | t], config) do
compiler_options = [{:debug_info, false} | config.compiler_options]
parse_compiler(t, %{config | compiler_options: compiler_options})
end
defp parse_compiler(["--ignore-module-conflict" | t], config) do
compiler_options = [{:ignore_module_conflict, true} | config.compiler_options]
parse_compiler(t, %{config | compiler_options: compiler_options})
end
defp parse_compiler(["--warnings-as-errors" | t], config) do
compiler_options = [{:warnings_as_errors, true} | config.compiler_options]
parse_compiler(t, %{config | compiler_options: compiler_options})
end
defp parse_compiler(["--verbose" | t], config) do
parse_compiler(t, %{config | verbose_compile: true})
end
# Private compiler options
defp parse_compiler(["--profile", "time" | t], config) do
parse_compiler(t, %{config | profile: :time})
end
defp parse_compiler([h | t] = list, config) do
case h do
"-" <> _ ->
shared_option?(list, config, &parse_compiler(&1, &2))
_ ->
pattern = if File.dir?(h), do: "#{h}/**/*.ex", else: h
parse_compiler(t, %{config | compile: [pattern | config.compile]})
end
end
defp parse_compiler([], config) do
{%{config | commands: [{:compile, config.compile} | config.commands]}, []}
end
# Parse IEx options
defp parse_iex(["--" | t], config) do
{config, t}
end
# This clause is here so that Kernel.CLI does not
# error out with "unknown option"
defp parse_iex(["--dot-iex", _ | t], config) do
parse_iex(t, config)
end
defp parse_iex([opt, _ | t], config) when opt in ["--remsh"] do
parse_iex(t, config)
end
defp parse_iex(["-S", h | t], config) do
{%{config | commands: [{:script, h} | config.commands]}, t}
end
defp parse_iex([h | t] = list, config) do
case h do
"-" <> _ -> shared_option?(list, config, &parse_iex(&1, &2))
_ -> {%{config | commands: [{:file, h} | config.commands]}, t}
end
end
defp parse_iex([], config) do
{config, []}
end
# Process commands
defp process_command({:cookie, h}, _config) do
if Node.alive?() do
wrapper(fn -> Node.set_cookie(String.to_atom(h)) end)
else
{:error, "--cookie : Cannot set cookie if the node is not alive (set --name or --sname)"}
end
end
defp process_command({:eval, expr}, _config) when is_binary(expr) do
wrapper(fn -> Code.eval_string(expr, []) end)
end
defp process_command({:rpc_eval, node, expr}, _config) when is_binary(expr) do
case :rpc.call(String.to_atom(node), __MODULE__, :rpc_eval, [expr]) do
:ok -> :ok
{:badrpc, {:EXIT, exit}} -> Process.exit(self(), exit)
{:badrpc, reason} -> {:error, "--rpc-eval : RPC failed with reason #{inspect(reason)}"}
{kind, error, stack} -> :erlang.raise(kind, error, stack)
end
end
defp process_command({:app, app}, _config) when is_binary(app) do
case Application.ensure_all_started(String.to_atom(app)) do
{:error, {app, reason}} ->
msg = "--app : Could not start application #{app}: " <> Application.format_error(reason)
{:error, msg}
{:ok, _} ->
:ok
end
end
defp process_command({:script, file}, _config) when is_binary(file) do
if exec = find_elixir_executable(file) do
wrapper(fn -> Code.require_file(exec) end)
else
{:error, "-S : Could not find executable #{file}"}
end
end
defp process_command({:file, file}, _config) when is_binary(file) do
if File.regular?(file) do
wrapper(fn -> Code.require_file(file) end)
else
{:error, "No file named #{file}"}
end
end
defp process_command({:require, pattern}, _config) when is_binary(pattern) do
files = filter_patterns(pattern)
if files != [] do
wrapper(fn -> Enum.map(files, &Code.require_file(&1)) end)
else
{:error, "-r : No files matched pattern #{pattern}"}
end
end
defp process_command({:parallel_require, pattern}, _config) when is_binary(pattern) do
files = filter_patterns(pattern)
if files != [] do
wrapper(fn ->
case Kernel.ParallelCompiler.require(files) do
{:ok, _, _} -> :ok
{:error, _, _} -> exit({:shutdown, 1})
end
end)
else
{:error, "-pr : No files matched pattern #{pattern}"}
end
end
defp process_command({:compile, patterns}, config) do
# If ensuring the dir returns an error no files will be found.
_ = :filelib.ensure_dir(:filename.join(config.output, "."))
case filter_multiple_patterns(patterns) do
{:ok, []} ->
{:error, "No files matched provided patterns"}
{:ok, files} ->
wrapper(fn ->
Code.compiler_options(config.compiler_options)
verbose_opts =
if config.verbose_compile do
[each_file: &IO.puts("Compiling #{Path.relative_to_cwd(&1)}")]
else
[
each_long_compilation:
&IO.puts("Compiling #{Path.relative_to_cwd(&1)} (it's taking more than 10s)")
]
end
profile_opts =
if config.profile do
[profile: config.profile]
else
[]
end
opts = verbose_opts ++ profile_opts
case Kernel.ParallelCompiler.compile_to_path(files, config.output, opts) do
{:ok, _, _} -> :ok
{:error, _, _} -> exit({:shutdown, 1})
end
end)
{:missing, missing} ->
{:error, "No files matched pattern(s) #{Enum.join(missing, ",")}"}
end
end
defp filter_patterns(pattern) do
pattern
|> Path.expand()
|> Path.wildcard()
|> :lists.usort()
|> Enum.filter(&File.regular?/1)
end
defp filter_multiple_patterns(patterns) do
{files, missing} =
Enum.reduce(patterns, {[], []}, fn pattern, {files, missing} ->
case filter_patterns(pattern) do
[] -> {files, [pattern | missing]}
match -> {match ++ files, missing}
end
end)
case missing do
[] -> {:ok, :lists.usort(files)}
_ -> {:missing, :lists.usort(missing)}
end
end
defp wrapper(fun) do
_ = fun.()
:ok
end
defp find_elixir_executable(file) do
if exec = System.find_executable(file) do
# If we are on Windows, the executable is going to be
# a .bat file that must be in the same directory as
# the actual Elixir executable.
case :os.type() do
{:win32, _} ->
base = Path.rootname(exec)
if File.regular?(base), do: base, else: exec
_ ->
exec
end
end
end
end
| 27.205782 | 101 | 0.598987 |
e84ab9d903483a2989f4f740b8b212c27384da68 | 1,499 | ex | Elixir | test/helpers/fixture.ex | jonasrichard/iris | eb4547ced7f7ff9305a4edfa1c32e8d45fa2aa00 | [
"Apache-2.0"
] | 1 | 2017-03-31T09:26:21.000Z | 2017-03-31T09:26:21.000Z | test/helpers/fixture.ex | jonasrichard/iris | eb4547ced7f7ff9305a4edfa1c32e8d45fa2aa00 | [
"Apache-2.0"
] | 1 | 2017-05-03T06:30:09.000Z | 2017-05-03T06:30:09.000Z | test/helpers/fixture.ex | jonasrichard/iris | eb4547ced7f7ff9305a4edfa1c32e8d45fa2aa00 | [
"Apache-2.0"
] | null | null | null | defmodule Iris.Fixture do
def message(opts \\ []) do
id = Keyword.get(opts, :id, id())
sender_id = Keyword.get(opts, :sender_id, num_id())
channel_id = Keyword.get(opts, :channel_id, num_id())
ts = Keyword.get(opts, :created_ts, :os.timestamp()) |> now_to_utc()
%Iris.Model.Message{
id: id,
sender_id: sender_id,
channel_id: channel_id,
created_ts: ts,
body: "A new message"
}
end
def now_to_utc() do
now_to_utc(:erlang.timestamp())
end
def now_to_utc(now) do
{{year, month, day}, {hour, minute, second}} = :calendar.now_to_datetime(now)
[
pad(year),
"-",
pad(month),
"-",
pad(day),
"T",
pad(hour),
":",
pad(minute),
":",
pad(second)
]
|> Enum.join()
end
defp pad(i) when i > 9 do
Integer.to_string(i)
end
defp pad(i) do
Integer.to_string(i) |> String.pad_leading(2, "0")
end
def id() do
1..3
|> Enum.map(fn _ ->
:rand.uniform(100_000)
|> Integer.to_string()
|> String.pad_leading(5, "0")
end)
|> Enum.join("-")
end
def num_id() do
:rand.uniform(100_00) |> Integer.to_string()
end
def retry(fun) do
retry(fun, 30)
end
def retry(fun, 0) do
raise "Timeout calling evaluating: #{inspect(fun)}"
end
def retry(fun, num) do
case fun.() do
nil ->
Process.sleep(1000)
retry(fun, num - 1)
result ->
result
end
end
end
| 18.506173 | 81 | 0.547698 |
e84ad615906b8cb572ae5dc5eecb481e2e37a38e | 37,988 | ex | Elixir | lib/surface/compiler.ex | yusufyildirim/surface | 1cacafa275ec7fefa862af7fd86794a400652e9d | [
"MIT"
] | null | null | null | lib/surface/compiler.ex | yusufyildirim/surface | 1cacafa275ec7fefa862af7fd86794a400652e9d | [
"MIT"
] | null | null | null | lib/surface/compiler.ex | yusufyildirim/surface | 1cacafa275ec7fefa862af7fd86794a400652e9d | [
"MIT"
] | null | null | null | defmodule Surface.Compiler do
@moduledoc """
Defines a behaviour that must be implemented by all HTML/Surface node translators.
This module also contains the main logic to translate Surface code.
"""
alias Surface.Compiler.Parser
alias Surface.IOHelper
alias Surface.AST
alias Surface.Compiler.Helpers
@stateful_component_types [
Surface.LiveComponent
]
@tag_directive_handlers [
Surface.Directive.TagAttrs,
Surface.Directive.Events,
Surface.Directive.Show,
Surface.Directive.Hook,
Surface.Directive.If,
Surface.Directive.For,
Surface.Directive.Values,
Surface.Directive.Debug
]
@component_directive_handlers [
Surface.Directive.Let,
Surface.Directive.ComponentProps,
Surface.Directive.If,
Surface.Directive.For,
Surface.Directive.Debug
]
@meta_component_directive_handlers [
Surface.Directive.If,
Surface.Directive.For,
Surface.Directive.Debug
]
@template_directive_handlers [Surface.Directive.Let]
@slot_directive_handlers [
Surface.Directive.SlotArgs,
Surface.Directive.If,
Surface.Directive.For
]
@valid_slot_props ["name", "index"]
@directive_prefixes [":", "s-"]
@void_elements [
"area",
"base",
"br",
"col",
"command",
"embed",
"hr",
"img",
"input",
"keygen",
"link",
"meta",
"param",
"source",
"track",
"wbr"
]
defmodule CompileMeta do
defstruct [:line, :file, :caller, :checks, :variables]
@type t :: %__MODULE__{
line: non_neg_integer(),
file: binary(),
caller: Macro.Env.t(),
variables: keyword(),
checks: Keyword.t(boolean())
}
end
@doc """
This function compiles a string into the Surface AST.This is used by ~F and Surface.Renderer to parse and compile templates.
A special note for line: This is considered the line number for the first line in the string. If the first line of the
string is also the first line of the file, then this should be 1. If this is being called within a macro (say to process a heredoc
passed to ~F), this should be __CALLER__.line + 1.
"""
@spec compile(binary, non_neg_integer(), Macro.Env.t(), binary(), Keyword.t()) :: [
Surface.AST.t()
]
def compile(string, line, caller, file \\ "nofile", opts \\ []) do
compile_meta = %CompileMeta{
line: line,
file: file,
caller: caller,
checks: opts[:checks] || [],
variables: opts[:variables]
}
string
|> Parser.parse!(
file: file,
line: line,
caller: caller,
checks: opts[:checks] || [],
warnings: opts[:warnings] || [],
column: Keyword.get(opts, :column, 1),
indentation: Keyword.get(opts, :indentation, 0)
)
|> to_ast(compile_meta)
|> validate_component_structure(compile_meta, caller.module)
end
def to_live_struct(nodes, opts \\ []) do
Surface.Compiler.EExEngine.translate(nodes, opts)
end
def validate_component_structure(ast, meta, module) do
if is_stateful_component(module) do
validate_stateful_component(ast, meta)
end
ast
end
defp is_stateful_component(module) do
cond do
function_exported?(module, :component_type, 0) ->
module.component_type() in @stateful_component_types
Module.open?(module) ->
# If the template is compiled directly in a test module, get_attribute might fail,
# breaking some of the tests once in a while.
try do
Module.get_attribute(module, :component_type) in @stateful_component_types
rescue
_e in ArgumentError -> false
end
true ->
false
end
end
defp validate_stateful_component(ast, %CompileMeta{caller: %{function: {:render, _}}} = compile_meta) do
num_tags =
ast
|> Enum.filter(fn
%AST.Tag{} ->
true
%AST.VoidTag{} ->
true
%AST.Component{type: Surface.LiveComponent, meta: meta} ->
warn_live_component_as_root_node_of_another_live_component(meta, compile_meta.caller)
true
%AST.Component{} ->
true
_ ->
false
end)
|> Enum.count()
cond do
num_tags == 0 ->
IOHelper.warn(
"stateful live components must have a HTML root element",
compile_meta.caller,
compile_meta.file,
compile_meta.line
)
num_tags > 1 ->
IOHelper.warn(
"stateful live components must have a single HTML root element",
compile_meta.caller,
compile_meta.file,
compile_meta.line
)
true ->
:noop
end
end
defp validate_stateful_component(_ast, %CompileMeta{}), do: nil
defp warn_live_component_as_root_node_of_another_live_component(meta, caller) do
IOHelper.warn(
"""
cannot have a LiveComponent as root node of another LiveComponent.
Hint: You can wrap the root `#{meta.node_alias}` node in another element. Example:
def render(assigns) do
~F"\""
<div>
<#{meta.node_alias} ... >
...
</#{meta.node_alias}>
</div>
"\""
end
""",
caller,
meta.file,
meta.line
)
end
def to_ast(nodes, compile_meta) do
for node <- List.wrap(nodes),
result = convert_node_to_ast(node_type(node), node, compile_meta),
result != :ignore do
case result do
{:ok, ast} ->
process_directives(ast)
{:error, {message, line}, meta} ->
IOHelper.warn(message, compile_meta.caller, meta.file, line)
%AST.Error{message: message, meta: meta}
{:error, {message, details, line}, meta} ->
details = if details, do: "\n\n" <> details, else: ""
IOHelper.warn(message <> details, compile_meta.caller, meta.file, line)
%AST.Error{message: message, meta: meta}
end
end
end
# Slots
defp node_type({"#template", _, _, _}), do: :template
defp node_type({"#slot", _, _, _}), do: :slot
defp node_type({":" <> _, _, _, _}), do: :template
defp node_type({"slot", _, _, _}), do: :slot
# Conditional blocks
defp node_type({:block, "if", _, _, _}), do: :if_elseif_else
defp node_type({:block, "elseif", _, _, _}), do: :if_elseif_else
defp node_type({:block, "else", _, _, _}), do: :else
defp node_type({:block, "unless", _, _, _}), do: :unless
# For
defp node_type({:block, "for", _, _, _}), do: :for_else
# case/match
defp node_type({:block, "case", _, _, _}), do: :block
defp node_type({:block, "match", _, _, _}), do: :sub_block
defp node_type({:block, :default, _, _, _}), do: :sub_block
defp node_type({:ast, _, _}), do: :ast
defp node_type({"#" <> _, _, _, _}), do: :macro_component
defp node_type({<<first, _::binary>>, _, _, _}) when first in ?A..?Z, do: :component
defp node_type({name, _, _, _}) when name in @void_elements, do: :void_tag
defp node_type({_, _, _, _}), do: :tag
defp node_type({:expr, _, _}), do: :interpolation
defp node_type({:comment, _, _}), do: :comment
defp node_type(_), do: :text
defp process_directives(%{directives: directives} = node) when is_list(directives) do
directives
|> Enum.filter(fn %AST.Directive{module: mod} -> function_exported?(mod, :process, 2) end)
|> Enum.reduce(node, fn %AST.Directive{module: mod} = directive, node ->
mod.process(directive, node)
end)
end
defp process_directives(node), do: node
defp convert_node_to_ast(:comment, {_, _comment, %{visibility: :private}}, _), do: :ignore
defp convert_node_to_ast(:comment, {_, comment, %{visibility: :public}}, _),
do: {:ok, %AST.Literal{value: comment}}
defp convert_node_to_ast(:text, text, _),
do: {:ok, %AST.Literal{value: text}}
defp convert_node_to_ast(:interpolation, {_, text, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
expr = Helpers.expression_to_quoted!(text, meta)
Helpers.perform_assigns_checks(expr, compile_meta)
{:ok,
%AST.Interpolation{
original: text,
value: expr,
meta: meta,
constant?: Macro.quoted_literal?(expr)
}}
end
defp convert_node_to_ast(:ast, {_, variable, expr_meta}, compile_meta) do
meta = Helpers.to_meta(expr_meta, compile_meta)
ast = unquote_variable!(variable, compile_meta, meta)
{:ok, ast}
end
defp convert_node_to_ast(:else, {:block, _name, _expr, children, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
{:ok, %AST.Container{children: to_ast(children, compile_meta), meta: meta, directives: []}}
end
defp convert_node_to_ast(:if_elseif_else, {:block, _name, attributes, children, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
default = AST.AttributeExpr.new(false, "", node_meta)
condition = attribute_value_as_ast(attributes, :root, default, compile_meta)
[if_children, else_children] =
case children do
[{:block, :default, [], default, _}, {:block, "else", _, _, _} = else_block] ->
[default, [else_block]]
[{:block, :default, [], default, _}, {:block, "elseif", a, c, m} | rest] ->
[default, [{:block, "elseif", a, [{:block, :default, [], c, %{}} | rest], m}]]
[{:block, :default, [], default, _}] ->
[default, []]
children ->
[children, []]
end
{:ok,
%AST.If{
condition: condition,
children: to_ast(if_children, compile_meta),
else: to_ast(else_children, compile_meta),
meta: meta
}}
end
defp convert_node_to_ast(:sub_block, {:block, :default, _attrs, [], _meta}, _compile_meta) do
:ignore
end
defp convert_node_to_ast(:sub_block, {:block, name, attrs, children, meta}, compile_meta) do
{:ok,
%AST.SubBlock{
name: name,
expression: quoted_block_expression(attrs),
children: to_ast(children, compile_meta),
meta: Helpers.to_meta(meta, compile_meta)
}}
end
defp convert_node_to_ast(:block, {:block, "case", _, _, %{has_sub_blocks?: false} = node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
message = "no {#match} sub-block defined. A {#case} block must include at least one {#match ...} sub-block."
IOHelper.compile_error(message, meta.file, meta.line)
end
defp convert_node_to_ast(:block, {:block, name, attrs, children, meta}, compile_meta) do
{:ok,
%AST.Block{
name: name,
expression: quoted_block_expression(attrs),
sub_blocks: to_ast(children, compile_meta),
meta: Helpers.to_meta(meta, compile_meta)
}}
end
defp convert_node_to_ast(:unless, {:block, _name, attributes, children, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
default = AST.AttributeExpr.new(false, "", meta)
condition = attribute_value_as_ast(attributes, :root, default, compile_meta)
{:ok,
%AST.If{
condition: condition,
children: [],
else: to_ast(children, compile_meta),
meta: meta
}}
end
defp convert_node_to_ast(:for_else, {:block, _name, attributes, children, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
default = AST.AttributeExpr.new(false, "", meta)
generator = attribute_value_as_ast(attributes, :root, :generator, default, compile_meta)
[for_children, else_children] =
case children do
[{:block, :default, [], default, _}, {:block, "else", _, _, _} = else_block] ->
[default, [else_block]]
children ->
[children, []]
end
for_ast = %AST.For{
generator: generator,
children: to_ast(for_children, compile_meta),
else: to_ast(else_children, compile_meta),
meta: meta
}
if else_children == [] do
{:ok, for_ast}
else
[else_ast | _] = to_ast(else_children, compile_meta)
value =
case generator.value do
[{:<-, _, [_, value]}] -> value
_ -> raise_complex_generator(else_ast.meta)
end
condition_expr =
quote do
unquote(value) != []
end
condition = AST.AttributeExpr.new(condition_expr, "", meta)
{:ok,
%AST.If{
condition: condition,
children: [for_ast],
else: [else_ast],
meta: meta
}}
end
end
defp convert_node_to_ast(:template, {name, attributes, children, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
with {:ok, directives, attributes} <-
collect_directives(@template_directive_handlers, attributes, meta),
slot <- get_slot_name(name, attributes) do
{:ok,
%AST.Template{
name: slot,
children: to_ast(children, compile_meta),
directives: directives,
let: [],
meta: meta
}}
else
_ -> {:error, {"failed to parse template", meta.line}, meta}
end
end
defp convert_node_to_ast(:slot, {_, attributes, children, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
defined_slots =
meta.caller.module
|> Surface.API.get_slots()
# TODO: Validate attributes with custom messages
name = attribute_value(attributes, "name", :default)
short_slot_syntax? = not has_attribute?(attributes, "name")
index = attribute_value_as_ast(attributes, "index", %Surface.AST.Literal{value: 0}, compile_meta)
with {:ok, directives, attrs} <-
collect_directives(@slot_directive_handlers, attributes, meta),
slot <- Enum.find(defined_slots, fn slot -> slot.name == name end),
slot when not is_nil(slot) <- slot do
maybe_warn_required_slot_with_default_value(slot, children, short_slot_syntax?, meta)
validate_slot_attrs!(attrs)
{:ok,
%AST.Slot{
name: name,
index: index,
directives: directives,
default: to_ast(children, compile_meta),
args: [],
meta: meta
}}
else
_ ->
raise_missing_slot_error!(
meta.caller.module,
name,
meta,
defined_slots,
short_slot_syntax?
)
end
end
defp convert_node_to_ast(:tag, {name, attributes, children, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
with {:ok, directives, attributes} <-
collect_directives(@tag_directive_handlers, attributes, meta),
attributes <- process_attributes(nil, attributes, meta, compile_meta),
children <- to_ast(children, compile_meta),
:ok <- validate_tag_children(children) do
{:ok,
%AST.Tag{
element: name,
attributes: attributes,
directives: directives,
children: children,
meta: meta
}}
else
{:error, message} ->
message = "cannot render <#{name}> (#{message})"
{:error, message}
_ ->
{:error, {"cannot render <#{name}>", meta.line}, meta}
end
end
defp convert_node_to_ast(:void_tag, {name, attributes, children, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
with {:ok, directives, attributes} <-
collect_directives(@tag_directive_handlers, attributes, meta),
attributes <- process_attributes(nil, attributes, meta, compile_meta),
# a void element containing content is an error
[] <- to_ast(children, compile_meta) do
{:ok,
%AST.VoidTag{
element: name,
attributes: attributes,
directives: directives,
meta: meta
}}
else
{:error, message} ->
message = "cannot render <#{name}> (#{message})"
{:error, message}
_ ->
{:error, {"cannot render <#{name}>", meta.line}, meta}
end
end
defp convert_node_to_ast(:component, {name, attributes, children, node_meta}, compile_meta) do
# TODO: validate live views vs live components ?
meta = Helpers.to_meta(node_meta, compile_meta)
mod = Helpers.actual_component_module!(name, meta.caller)
meta = Map.merge(meta, %{module: mod, node_alias: name})
with :ok <- Helpers.validate_component_module(mod, name),
true <- function_exported?(mod, :component_type, 0),
component_type <- mod.component_type(),
# This is a little bit hacky. :let will only be extracted for the default
# template if `mod` doesn't export __slot_name__ (i.e. if it isn't a slotable component)
# we pass in and modify the attributes so that non-slotable components are not
# processed by the :let directive
{:ok, templates, attributes} <-
collect_templates(mod, attributes, children, meta),
:ok <- validate_templates(mod, templates, meta),
{:ok, directives, attributes} <-
collect_directives(@component_directive_handlers, attributes, meta),
attributes <- process_attributes(mod, attributes, meta, compile_meta),
:ok <- validate_properties(mod, attributes, directives, meta) do
result =
if component_slotable?(mod) do
%AST.SlotableComponent{
module: mod,
slot: mod.__slot_name__(),
type: component_type,
let: [],
props: attributes,
directives: directives,
templates: templates,
meta: meta
}
else
%AST.Component{
module: mod,
type: component_type,
props: attributes,
directives: directives,
templates: templates,
meta: meta
}
end
{:ok, maybe_call_transform(result)}
else
{:error, message, details} ->
{:error, {"cannot render <#{name}> (#{message})", details, meta.line}, meta}
{:error, message} ->
{:error, {"cannot render <#{name}> (#{message})", meta.line}, meta}
_ ->
{:error, {"cannot render <#{name}>", meta.line}, meta}
end
end
defp convert_node_to_ast(:macro_component, {"#" <> name, attributes, children, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
mod = Helpers.actual_component_module!(name, meta.caller)
meta = Map.merge(meta, %{module: mod, node_alias: name})
with :ok <- Helpers.validate_component_module(mod, name),
meta <- Map.merge(meta, %{module: mod, node_alias: name}),
true <- function_exported?(mod, :expand, 3),
{:ok, directives, attributes} <-
collect_directives(@meta_component_directive_handlers, attributes, meta),
attributes <- process_attributes(mod, attributes, meta, compile_meta),
:ok <- validate_properties(mod, attributes, directives, meta) do
compile_dep_expr = %AST.Expr{
value:
quote generated: true, line: meta.line do
require(unquote(mod)).__compile_dep__()
end,
meta: meta
}
expanded_children = mod.expand(attributes, List.to_string(children), meta)
children_with_dep = [compile_dep_expr | List.wrap(expanded_children)]
{:ok, %AST.Container{children: children_with_dep, directives: directives, meta: meta}}
else
false ->
{:error, {"cannot render <#{name}> (MacroComponents must export an expand/3 function)", meta.line}, meta}
{:error, message, details} ->
{:error, {"cannot render <#{name}> (#{message})", details, meta.line}, meta}
{:error, message} ->
{:error, {"cannot render <#{name}> (#{message})", meta.line}, meta}
_ ->
{:error, {"cannot render <#{name}>", meta.line}, meta}
end
end
defp maybe_call_transform(%{module: module} = node) do
if function_exported?(module, :transform, 1) do
module.transform(node)
else
node
end
end
defp attribute_value(attributes, attr_name, default) do
Enum.find_value(attributes, default, fn {name, value, _} ->
if name == attr_name do
String.to_atom(value)
end
end)
end
defp has_attribute?([], _), do: false
defp has_attribute?(attributes, attr_name),
do: Enum.any?(attributes, &match?({^attr_name, _, _}, &1))
defp attribute_value_as_ast(attributes, attr_name, type \\ :integer, default, meta) do
Enum.find_value(attributes, default, fn
{^attr_name, {:attribute_expr, value, expr_meta}, _attr_meta} ->
expr_meta = Helpers.to_meta(expr_meta, meta)
expr = Surface.TypeHandler.expr_to_quoted!(value, attr_name, type, expr_meta)
AST.AttributeExpr.new(expr, value, expr_meta)
{^attr_name, value, attr_meta} ->
attr_meta = Helpers.to_meta(attr_meta, meta)
Surface.TypeHandler.literal_to_ast_node!(type, attr_name, value, attr_meta)
_ ->
nil
end)
end
defp quoted_block_expression([{:root, {:attribute_expr, value, expr_meta}, _attr_meta}]) do
Helpers.expression_to_quoted!(value, expr_meta)
end
defp quoted_block_expression([]) do
nil
end
defp get_slot_name("#template", attributes), do: attribute_value(attributes, "slot", :default)
defp get_slot_name(":" <> name, _), do: String.to_atom(name)
defp component_slotable?(mod), do: function_exported?(mod, :__slot_name__, 0)
defp process_attributes(_module, [], _meta, _compile_meta), do: []
defp process_attributes(mod, attrs, meta, compile_meta),
do: process_attributes(mod, attrs, meta, compile_meta, [])
defp process_attributes(_module, [], _meta, _compile_meta, acc) do
acc
|> Keyword.values()
|> Enum.reverse()
end
defp process_attributes(mod, [{:root, value, attr_meta} | attrs], meta, compile_meta, acc) do
with true <- function_exported?(mod, :__props__, 0),
prop when not is_nil(prop) <- Enum.find(mod.__props__(), & &1.opts[:root]) do
name = Atom.to_string(prop.name)
process_attributes(mod, [{name, value, attr_meta} | attrs], meta, compile_meta, acc)
else
_ ->
message = """
no root property defined for component <#{meta.node_alias}>
Hint: you can declare a root property using option `root: true`
"""
IOHelper.warn(message, meta.caller, attr_meta.file, attr_meta.line)
process_attributes(mod, attrs, meta, compile_meta, acc)
end
end
defp process_attributes(mod, [{name, value, attr_meta} | attrs], meta, compile_meta, acc) do
name = String.to_atom(name)
attr_meta = Helpers.to_meta(attr_meta, meta)
{type, type_opts} = Surface.TypeHandler.attribute_type_and_opts(mod, name, attr_meta)
duplicated_attr? = Keyword.has_key?(acc, name)
duplicated_prop? = mod && (!Keyword.get(type_opts, :accumulate, false) and duplicated_attr?)
duplicated_html_attr? = !mod && duplicated_attr?
root_prop? = Keyword.get(type_opts, :root, false)
cond do
duplicated_prop? && root_prop? ->
message = """
the prop `#{name}` has been passed multiple times. Considering only the last value.
Hint: Either specify the `#{name}` via the root property (`<#{meta.node_alias} { ... }>`) or \
explicitly via the #{name} property (`<#{meta.node_alias} #{name}="...">`), but not both.
"""
IOHelper.warn(message, meta.caller, attr_meta.file, attr_meta.line)
duplicated_prop? && not root_prop? ->
message = """
the prop `#{name}` has been passed multiple times. Considering only the last value.
Hint: Either remove all redundant definitions or set option `accumulate` to `true`:
```
prop #{name}, :#{type}, accumulate: true
```
This way the values will be accumulated in a list.
"""
IOHelper.warn(message, meta.caller, attr_meta.file, attr_meta.line)
duplicated_html_attr? ->
message = """
the attribute `#{name}` has been passed multiple times on line #{meta.line}. \
Considering only the last value.
Hint: remove all redundant definitions
"""
IOHelper.warn(message, meta.caller, attr_meta.file, attr_meta.line)
true ->
nil
end
node = %AST.Attribute{
type: type,
type_opts: type_opts,
name: name,
value: attr_value(name, type, value, attr_meta, compile_meta),
meta: attr_meta
}
process_attributes(mod, attrs, meta, compile_meta, [{name, node} | acc])
end
defp attr_value(name, type, {:attribute_expr, value, expr_meta}, attr_meta, _compile_meta) do
expr_meta = Helpers.to_meta(expr_meta, attr_meta)
expr = Surface.TypeHandler.expr_to_quoted!(value, name, type, expr_meta)
AST.AttributeExpr.new(expr, value, expr_meta)
end
defp attr_value(_name, _type, {:ast, variable, expr_meta}, _attr_meta, compile_meta) do
meta = Helpers.to_meta(expr_meta, compile_meta)
unquote_variable!(variable, compile_meta, meta)
end
defp attr_value(name, type, value, meta, _compile_meta) do
Surface.TypeHandler.literal_to_ast_node!(type, name, value, meta)
end
defp validate_tag_children([]), do: :ok
defp validate_tag_children([%AST.Template{name: name} | _]) do
{:error, "templates are only allowed as children elements of components, but found template for #{name}"}
end
defp validate_tag_children([_ | nodes]), do: validate_tag_children(nodes)
defp collect_templates(mod, attributes, nodes, meta) do
# Don't extract the template directives if this module is slotable
{:ok, directives, attributes} =
if component_slotable?(mod) do
{:ok, [], attributes}
else
collect_directives(@template_directive_handlers, attributes, meta)
end
templates =
nodes
|> to_ast(meta)
|> Enum.group_by(fn
%AST.Template{name: name} -> name
%AST.SlotableComponent{slot: name} -> name
_ -> :default
end)
{already_wrapped, default_children} =
templates
|> Map.get(:default, [])
|> Enum.split_with(fn
%AST.Template{} -> true
_ -> false
end)
if Enum.all?(default_children, &Helpers.is_blank_or_empty/1) do
{:ok, Map.put(templates, :default, already_wrapped), attributes}
else
wrapped =
process_directives(%AST.Template{
name: :default,
children: default_children,
directives: directives,
let: [],
meta: meta
})
{:ok, Map.put(templates, :default, [wrapped | already_wrapped]), attributes}
end
end
defp collect_directives(handlers, attributes, meta) do
attributes =
for attr <- attributes,
attr_name = elem(attr, 0),
normalized_name = normalize_directive_prefix(attr_name) do
put_elem(attr, 0, normalized_name)
end
do_collect_directives(handlers, attributes, meta)
end
for prefix <- @directive_prefixes do
defp normalize_directive_prefix(unquote(prefix) <> name), do: ":#{name}"
end
defp normalize_directive_prefix(name), do: name
defp do_collect_directives(handlers, attributes, meta)
defp do_collect_directives(_, [], _), do: {:ok, [], []}
defp do_collect_directives(handlers, [attr | attributes], meta) do
{:ok, dirs, attrs} = do_collect_directives(handlers, attributes, meta)
attr = extract_modifiers(attr)
directives =
handlers
|> Enum.map(fn handler -> handler.extract(attr, meta) end)
|> List.flatten()
attributes =
if Enum.empty?(directives) do
[attr | attrs]
else
attrs
end
directives =
Enum.sort_by(directives ++ dirs, fn %{module: mod} ->
Enum.find_index(handlers, fn handler -> handler == mod end)
end)
{:ok, directives, attributes}
end
defp extract_modifiers({":" <> _ = attr_name, value, meta}) do
{name, modifiers} =
case String.split(attr_name, ".") do
[name] ->
{name, Map.get(meta, :modifiers, [])}
[name | modifiers] ->
{name, modifiers}
end
{name, value, Map.put(meta, :modifiers, modifiers)}
end
defp extract_modifiers(attr) do
attr
end
defp validate_properties(module, props, directives, meta) do
has_directive_props? = Enum.any?(directives, &match?(%AST.Directive{name: :props}, &1))
if not has_directive_props? and function_exported?(module, :__props__, 0) do
existing_props_names = Enum.map(props, & &1.name)
required_props_names = module.__required_props_names__()
missing_props_names = required_props_names -- existing_props_names
for prop_name <- missing_props_names do
message = "Missing required property \"#{prop_name}\" for component <#{meta.node_alias}>"
message =
if prop_name == :id and is_stateful_component(module) do
message <>
"""
\n\nHint: Components using `Surface.LiveComponent` automatically define a required `id` prop to make them stateful.
If you meant to create a stateless component, you can switch to `use Surface.Component`.
"""
else
message
end
IOHelper.warn(message, meta.caller, meta.file, meta.line)
end
end
:ok
end
defp validate_templates(mod, templates, meta) do
names = Map.keys(templates)
if !function_exported?(mod, :__slots__, 0) and not Enum.empty?(names) do
message = """
parent component `#{inspect(mod)}` does not define any slots. \
Found the following templates: #{inspect(names)}
"""
IOHelper.compile_error(message, meta.file, meta.line)
end
for name <- mod.__required_slots_names__(),
!Map.has_key?(templates, name) or
Enum.all?(Map.get(templates, name, []), &Helpers.is_blank_or_empty/1) do
message = "missing required slot \"#{name}\" for component <#{meta.node_alias}>"
IOHelper.warn(message, meta.caller, meta.file, meta.line)
end
for {slot_name, template_instances} <- templates,
mod.__get_slot__(slot_name) == nil,
not component_slotable?(mod),
template <- template_instances do
raise_missing_parent_slot_error!(mod, slot_name, template.meta, meta)
end
for slot_name <- Map.keys(templates),
template <- Map.get(templates, slot_name) do
slot = mod.__get_slot__(slot_name)
args = Keyword.keys(template.let)
arg_meta =
Enum.find_value(template.directives, meta, fn directive ->
if directive.module == Surface.Directive.Let do
directive.meta
end
end)
case slot do
%{opts: opts} ->
non_generator_args = Enum.map(opts[:args] || [], &Map.get(&1, :name))
undefined_keys = args -- non_generator_args
if not Enum.empty?(undefined_keys) do
[arg | _] = undefined_keys
message = """
undefined argument `#{inspect(arg)}` for slot `#{slot_name}` in `#{inspect(mod)}`.
Available arguments: #{inspect(non_generator_args)}.
Hint: You can define a new slot argument using the `args` option: \
`slot #{slot_name}, args: [..., #{inspect(arg)}]`
"""
IOHelper.compile_error(message, arg_meta.file, arg_meta.line)
end
_ ->
:ok
end
end
:ok
end
defp raise_missing_slot_error!(module, slot_name, meta, _defined_slots, true = _short_syntax?) do
message = """
no slot `#{slot_name}` defined in the component `#{inspect(module)}`
Please declare the default slot using `slot default` in order to use the `<#slot />` notation.
"""
IOHelper.compile_error(message, meta.file, meta.line)
end
defp raise_missing_slot_error!(module, slot_name, meta, defined_slots, false = _short_syntax?) do
defined_slot_names = Enum.map(defined_slots, & &1.name)
similar_slot_message = similar_slot_message(slot_name, defined_slot_names)
existing_slots_message = existing_slots_message(defined_slot_names)
message = """
no slot `#{slot_name}` defined in the component `#{inspect(module)}`\
#{similar_slot_message}\
#{existing_slots_message}\
Hint: You can define slots using the `slot` macro.\
For instance: `slot #{slot_name}`\
"""
IOHelper.compile_error(message, meta.file, meta.line)
end
defp raise_missing_parent_slot_error!(mod, slot_name, template_meta, parent_meta) do
parent_slots = mod.__slots__() |> Enum.map(& &1.name)
similar_slot_message = similar_slot_message(slot_name, parent_slots)
existing_slots_message = existing_slots_message(parent_slots)
header_message =
if component_slotable?(template_meta.module) do
"""
The slotable component <#{inspect(template_meta.module)}> as the `:slot` option set to \
`#{slot_name}`.
That slot name is not declared in parent component <#{parent_meta.node_alias}>.
Please declare the slot in the parent component or rename the value in the `:slot` option.\
"""
else
"""
no slot "#{slot_name}" defined in parent component <#{parent_meta.node_alias}>\
"""
end
message = """
#{header_message}\
#{similar_slot_message}\
#{existing_slots_message}
"""
IOHelper.compile_error(message, template_meta.file, template_meta.line)
end
defp raise_complex_generator(meta) do
message = """
using `{#else}` is only supported when the expression in `{#for}` has a single generator and no filters.
Example:
{#for i <- [1, 2, 3]}
...
{#else}
...
{/for}
"""
IOHelper.compile_error(message, meta.file, meta.line)
end
defp similar_slot_message(slot_name, list_of_slot_names, opts \\ []) do
threshold = opts[:threshold] || 0.8
case Helpers.did_you_mean(slot_name, list_of_slot_names) do
{similar, score} when score > threshold ->
"\n\nDid you mean #{inspect(to_string(similar))}?"
_ ->
""
end
end
defp existing_slots_message([]), do: ""
defp existing_slots_message(existing_slots) do
slots = Enum.map(existing_slots, &to_string/1)
available = Helpers.list_to_string("slot:", "slots:", slots)
"\n\nAvailable #{available}"
end
defp maybe_warn_required_slot_with_default_value(_, [], _, _), do: nil
defp maybe_warn_required_slot_with_default_value(slot, _, short_syntax?, meta) do
if Keyword.get(slot.opts, :required, false) do
slot_name_tag = if short_syntax?, do: "", else: " name=\"#{slot.name}\""
message = """
setting the fallback content on a required slot has no effect.
Hint: Either keep the fallback content and remove the `required: true`:
slot #{slot.name}
...
<#slot#{slot_name_tag}>Fallback content</#slot>
or keep the slot as required and remove the fallback content:
slot #{slot.name}, required: true`
...
<#slot#{slot_name_tag} />
but not both.
"""
IOHelper.warn(message, meta.caller, meta.file, meta.line)
end
end
defp unquote_variable!(variable, compile_meta, expr_meta) do
validate_inside_quote_surface!(compile_meta, expr_meta)
validate_variable!(variable, expr_meta)
case fetch_variable_value!(variable, compile_meta, expr_meta) do
value when is_binary(value) or is_boolean(value) or is_integer(value) ->
%Surface.AST.Literal{value: value}
[value] ->
value
value when is_list(value) ->
%AST.Container{children: value, meta: expr_meta}
ast ->
ast
end
end
defp validate_inside_quote_surface!(compile_meta, expr_meta) do
if !compile_meta.variables do
message = "cannot use tagged expression {^var} outside `quote_surface`"
IOHelper.compile_error(message, expr_meta.file, expr_meta.line)
end
end
defp validate_variable!(variable, expr_meta) do
if !Regex.match?(~r/^[a-z][a-zA-Z_\d]*$/, variable) do
message = """
cannot unquote `#{variable}`.
The expression to be unquoted must be written as `^var`, where `var` is an existing variable.
"""
IOHelper.compile_error(message, expr_meta.file, expr_meta.line)
end
end
defp fetch_variable_value!(variable, compile_meta, expr_meta) do
case Keyword.fetch(compile_meta.variables, String.to_atom(variable)) do
:error ->
defined_variables = compile_meta.variables |> Keyword.keys() |> Enum.map(&to_string/1)
similar_variable_message =
case Helpers.did_you_mean(variable, defined_variables) do
{similar, score} when score > 0.8 ->
"\n\nDid you mean #{inspect(to_string(similar))}?"
_ ->
""
end
available_variables =
Helpers.list_to_string(
"\n\nAvailable variable:",
"\n\nAvailable variables:",
defined_variables
)
message = """
undefined variable "#{variable}".#{similar_variable_message}#{available_variables}
"""
IOHelper.compile_error(message, expr_meta.file, expr_meta.line)
{:ok, value} ->
value
end
end
defp validate_slot_attrs!(attrs) do
Enum.each(attrs, &validate_slot_attr!/1)
end
defp validate_slot_attr!({name, _, _meta}) when name in @valid_slot_props do
:ok
end
defp validate_slot_attr!({name, _, %{file: file, line: line}}) do
type =
case name do
":" <> _ -> "directive"
_ -> "attribute"
end
message = """
invalid #{type} `#{name}` for <#slot>.
Slots only accept `name`, `index`, `:args`, `:if` and `:for`.
"""
IOHelper.compile_error(message, file, line)
end
end
| 30.960065 | 132 | 0.628435 |
e84afa4f1b352bae225ea939cb335842098eb656 | 939 | ex | Elixir | lib/tables/offset.ex | bengtson/fonts | b41daac7e010ec95eb62f2f68c5987429d771e33 | [
"MIT"
] | null | null | null | lib/tables/offset.ex | bengtson/fonts | b41daac7e010ec95eb62f2f68c5987429d771e33 | [
"MIT"
] | null | null | null | lib/tables/offset.ex | bengtson/fonts | b41daac7e010ec95eb62f2f68c5987429d771e33 | [
"MIT"
] | null | null | null | defmodule Fonts.Tables.Offset do
def get_offset_table(state) do
%{"Binary" => binary, "Parser" => parser, "Font" => font} = state
%{"Cursor" => cursor} = parser
%{"Tables" => tables} = font
<<
snft_version::binary-size(4),
table_count::unsigned-integer-size(16),
search_range::unsigned-integer-size(16),
entry_selector::unsigned-integer-size(16),
range_shift::unsigned-integer-size(16),
_rest::binary
>> = binary
parser = Map.merge(parser, %{"Cursor" => cursor + 12})
table = %{
"offset" => %{
"Offset Version" => snft_version,
"Table Count" => table_count,
"Search Range" => search_range,
"Entry Selector" => entry_selector,
"Range Shift" => range_shift
}
}
tables = Map.merge(tables, table)
font = put_in(font, ["Tables"], tables)
%{"Binary" => binary, "Parser" => parser, "Font" => font}
end
end
| 28.454545 | 69 | 0.5836 |
e84b09cc6a16038523cf544c56e01bf00e4ebcae | 6,756 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v3/model/google_cloud_dialogflow_cx_v3_response_message.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dialogflow/lib/google_api/dialogflow/v3/model/google_cloud_dialogflow_cx_v3_response_message.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dialogflow/lib/google_api/dialogflow/v3/model/google_cloud_dialogflow_cx_v3_response_message.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessage do
@moduledoc """
Represents a response message that can be returned by a conversational agent. Response messages are also used for output audio synthesis. The approach is as follows: * If at least one OutputAudioText response is present, then all OutputAudioText responses are linearly concatenated, and the result is used for output audio synthesis. * If the OutputAudioText responses are a mixture of text and SSML, then the concatenated result is treated as SSML; otherwise, the result is treated as either text or SSML as appropriate. The agent designer should ideally use either text or SSML consistently throughout the bot design. * Otherwise, all Text responses are linearly concatenated, and the result is used for output audio synthesis. This approach allows for more sophisticated user experience scenarios, where the text displayed to the user may differ from what is heard.
## Attributes
* `conversationSuccess` (*type:* `GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageConversationSuccess.t`, *default:* `nil`) - Indicates that the conversation succeeded.
* `endInteraction` (*type:* `GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageEndInteraction.t`, *default:* `nil`) - Output only. A signal that indicates the interaction with the Dialogflow agent has ended. This message is generated by Dialogflow only when the conversation reaches `END_SESSION` page. It is not supposed to be defined by the user. It's guaranteed that there is at most one such message in each response.
* `liveAgentHandoff` (*type:* `GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageLiveAgentHandoff.t`, *default:* `nil`) - Hands off conversation to a human agent.
* `mixedAudio` (*type:* `GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageMixedAudio.t`, *default:* `nil`) - Output only. An audio response message composed of both the synthesized Dialogflow agent responses and responses defined via play_audio. This message is generated by Dialogflow only and not supposed to be defined by the user.
* `outputAudioText` (*type:* `GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageOutputAudioText.t`, *default:* `nil`) - A text or ssml response that is preferentially used for TTS output audio synthesis, as described in the comment on the ResponseMessage message.
* `payload` (*type:* `map()`, *default:* `nil`) - Returns a response containing a custom, platform-specific payload.
* `playAudio` (*type:* `GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessagePlayAudio.t`, *default:* `nil`) - Signal that the client should play an audio clip hosted at a client-specific URI. Dialogflow uses this to construct mixed_audio. However, Dialogflow itself does not try to read or process the URI in any way.
* `telephonyTransferCall` (*type:* `GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageTelephonyTransferCall.t`, *default:* `nil`) - A signal that the client should transfer the phone call connected to this agent to a third-party endpoint.
* `text` (*type:* `GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageText.t`, *default:* `nil`) - Returns a text response.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:conversationSuccess =>
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageConversationSuccess.t()
| nil,
:endInteraction =>
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageEndInteraction.t()
| nil,
:liveAgentHandoff =>
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageLiveAgentHandoff.t()
| nil,
:mixedAudio =>
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageMixedAudio.t()
| nil,
:outputAudioText =>
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageOutputAudioText.t()
| nil,
:payload => map() | nil,
:playAudio =>
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessagePlayAudio.t()
| nil,
:telephonyTransferCall =>
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageTelephonyTransferCall.t()
| nil,
:text =>
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageText.t() | nil
}
field(:conversationSuccess,
as: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageConversationSuccess
)
field(:endInteraction,
as: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageEndInteraction
)
field(:liveAgentHandoff,
as: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageLiveAgentHandoff
)
field(:mixedAudio,
as: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageMixedAudio
)
field(:outputAudioText,
as: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageOutputAudioText
)
field(:payload, type: :map)
field(:playAudio,
as: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessagePlayAudio
)
field(:telephonyTransferCall,
as:
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageTelephonyTransferCall
)
field(:text, as: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessageText)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessage do
def decode(value, options) do
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessage.decode(value, options)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3ResponseMessage do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 60.864865 | 870 | 0.767466 |
e84b3f262604946c746472dc49c16c0dad21dc4e | 75 | exs | Elixir | test/test_helper.exs | ruyjfs/example-elixir | 9737205e4a548f2118f32ff45dd4f2d67a00c54a | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | ruyjfs/example-elixir | 9737205e4a548f2118f32ff45dd4f2d67a00c54a | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | ruyjfs/example-elixir | 9737205e4a548f2118f32ff45dd4f2d67a00c54a | [
"Apache-2.0"
] | null | null | null | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(ExampleElixir.Repo, :manual)
| 25 | 59 | 0.8 |
e84b584d8d56b3fc33f4bca680c8a5663f2a8d0d | 126 | ex | Elixir | debian/menu.ex | takeokunn/roswell | 9b5c03ba921581176c3c87277fac83e19a011f5d | [
"MIT"
] | 1,572 | 2016-01-04T05:27:12.000Z | 2022-03-27T08:13:52.000Z | debian/menu.ex | DavidAlphaFox/roswell | 079cec15b0e7001b70969ca2e63f0950f2dda7bd | [
"MIT"
] | 400 | 2015-12-31T14:03:23.000Z | 2022-03-17T08:10:58.000Z | debian/menu.ex | DavidAlphaFox/roswell | 079cec15b0e7001b70969ca2e63f0950f2dda7bd | [
"MIT"
] | 140 | 2016-01-17T18:04:28.000Z | 2022-03-06T08:54:25.000Z | ?package(roswell):needs="X11|text|vc|wm" section="Applications/see-menu-manual"\
title="roswell" command="/usr/bin/roswell"
| 42 | 80 | 0.746032 |
e84b5b1966df16f190b4d43b2b95f3670f6a155f | 4,988 | ex | Elixir | lib/make_word_bot/workers/process_game.ex | nulleof/make-word-bot | bf6ed80975f0d89697cdd5e1019d73c47dd335d4 | [
"MIT"
] | null | null | null | lib/make_word_bot/workers/process_game.ex | nulleof/make-word-bot | bf6ed80975f0d89697cdd5e1019d73c47dd335d4 | [
"MIT"
] | 5 | 2019-02-10T14:22:39.000Z | 2019-02-11T16:29:02.000Z | lib/make_word_bot/workers/process_game.ex | nulleof/make-word-bot | bf6ed80975f0d89697cdd5e1019d73c47dd335d4 | [
"MIT"
] | null | null | null | defmodule MakeWordBot.ProcessGame do
require Logger
alias MakeWordBot.Repo
def send_message(chat_id, message, message_reply_id \\ nil) do
MakeWordBot.start_async(fn ->
MakeWordBot.ProcessMessage.send_message(chat_id, message, message_reply_id)
end)
end
def is_joke_time(word) do
Regex.match?(~r/иста$/ui, word)
|| Regex.match?(~r/ет$/ui, word)
end
def send_joke_message(chat_id, initial_message, message_id) do
answer = cond do
Regex.match?(~r/иста$/ui, initial_message) -> "Отсоси у тракториста"
Regex.match?(~r/ет$/ui, initial_message) -> "Пидора ответ"
end
send_message(chat_id, answer, message_id)
end
def perform_joke(chat_id, text, message_id) do
cond do
is_joke_time(text) -> send_joke_message(chat_id, text, message_id)
true -> :not_a_joke
end
end
import Ecto.Query
def gen_main_word() do
word = Repo.all(from w in MakeWordBot.Word)
|> Enum.filter(fn word -> String.length(word.word) >= MakeWordBot.min_word_size() end)
|> Enum.random()
word.word
end
def word_exists_in_db?(word) do
query = from w in MakeWordBot.Word,
where: w.word == ^word
count_entries = Repo.all(query)
|> Enum.count()
count_entries > 0
end
def start_link(chat_id) do
# start linked (or not? It doesn't matter at all) timer here
Logger.debug("New game started with pid: #{inspect self()}")
Process.send_after(self(), {:end_game}, MakeWordBot.game_length())
# search here new word
word = gen_main_word()
message = "Начата новая игра! Слово игры *#{String.upcase(word)}* "
send_message(chat_id, message)
# start main game loop
game_loop(%{
chat_id: chat_id,
word: word,
answers: %{},
score: %{},
})
end
def update_score(score, user_name, user_id, amount) do
user_score = Map.get(score, user_id)
if (user_score == nil) do
Map.put(score, user_id, %{
score: amount,
name: user_name,
})
else
old_score = user_score.score
user_score = Map.put(user_score, :score, old_score + amount)
Map.put(score, user_id, user_score)
end
end
import MakeWordBot.WordChecker, only: [word_consist_of?: 2]
def process_answer(message_id, answer, from, state) do
answer_prepared = answer
|> String.downcase()
|> String.trim()
game_word = state.word
case Map.get(state.answers, answer_prepared) do
true ->
# do nothing, word was already answered
state
nil ->
cond do
game_word == answer_prepared ->
message = "Молодец! Возьми с полки пирожок, +0. *#{String.upcase(state.word)}*"
send_message(state.chat_id, message, message_id)
state
word_consist_of?(game_word, answer_prepared) && word_exists_in_db?(answer_prepared) ->
# add to list of known entries
answers = Map.put(state.answers, answer_prepared, true)
user_name = "#{from["first_name"]} #{from["last_name"]}"
user_id = from["id"]
# update score for user
score_for_word = MakeWordBot.WordScore.score(answer_prepared)
score = update_score(state.score, user_name, user_id, score_for_word)
state = Map.put(state, :score, score)
state = Map.put(state, :answers, answers)
# send hooray message
message = "+#{score_for_word} #{user_name}. *#{String.upcase(state.word)}*"
send_message(state.chat_id, message, message_id)
state
true ->
state
end
_ -> state
end
end
def score_message(chat_id, score) do
message = "Счет:\n"
Enum.reduce(score, message, fn ({k, v}, acc) ->
acc <> "#{v.score} | #{v.name}\n"
end)
end
def game_loop(state) do
receive do
{:end_game} ->
# kills itself, tell about results
Logger.info("Game was finished")
message = "*Игра закончена!*\n" <> score_message(state.chat_id, state.score)
send_message(state.chat_id, message)
{:answer, message_id, text, from} ->
Logger.debug("PONG!")
# send a joke first
perform_joke(state.chat_id, text, message_id)
state = process_answer(message_id, text, from, state)
game_loop(state)
{:get_word} ->
Logger.debug("Request current word")
message = "Текущее слово *#{String.upcase(state.word)}*"
send_message(state.chat_id, message)
game_loop(state)
{:score} ->
Logger.debug("Requested current score")
message = score_message(state.chat_id, state.score)
send_message(state.chat_id, message)
game_loop(state)
end
end
end | 28.340909 | 96 | 0.596832 |
e84b5e876e9eba23c85a97c7181e34ecd6319d4a | 1,894 | ex | Elixir | clients/service_management/lib/google_api/service_management/v1/model/monitoring_destination.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/service_management/lib/google_api/service_management/v1/model/monitoring_destination.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/service_management/lib/google_api/service_management/v1/model/monitoring_destination.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.ServiceManagement.V1.Model.MonitoringDestination do
@moduledoc """
Configuration of a specific monitoring destination (the producer project
or the consumer project).
## Attributes
* `metrics` (*type:* `list(String.t)`, *default:* `nil`) - Types of the metrics to report to this monitoring destination.
Each type must be defined in Service.metrics section.
* `monitoredResource` (*type:* `String.t`, *default:* `nil`) - The monitored resource type. The type must be defined in
Service.monitored_resources section.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:metrics => list(String.t()),
:monitoredResource => String.t()
}
field(:metrics, type: :list)
field(:monitoredResource)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceManagement.V1.Model.MonitoringDestination do
def decode(value, options) do
GoogleApi.ServiceManagement.V1.Model.MonitoringDestination.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceManagement.V1.Model.MonitoringDestination do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.735849 | 125 | 0.74076 |
e84b6833810fa6906d3ce1dede8eb6f2b3139594 | 95 | ex | Elixir | lib/playwright/api_request_context.ex | dbrody/playwright-elixir | 48611c08dbdb8e36aa4dd8aa2d97a4014b753815 | [
"MIT"
] | 30 | 2021-06-01T16:59:35.000Z | 2022-03-25T16:56:19.000Z | lib/playwright/api_request_context.ex | dbrody/playwright-elixir | 48611c08dbdb8e36aa4dd8aa2d97a4014b753815 | [
"MIT"
] | 35 | 2021-06-10T17:05:31.000Z | 2022-02-11T22:30:36.000Z | lib/playwright/api_request_context.ex | dbrody/playwright-elixir | 48611c08dbdb8e36aa4dd8aa2d97a4014b753815 | [
"MIT"
] | 4 | 2021-08-13T20:38:18.000Z | 2022-01-31T04:32:35.000Z | defmodule Playwright.APIRequestContext do
@moduledoc false
use Playwright.ChannelOwner
end
| 19 | 41 | 0.842105 |
e84b8733b03c327956db47a13cbf14b0acf1bc17 | 1,138 | ex | Elixir | lib/stomp_client/rabbitmq/persisted_work_queue.ex | eteubert/stomp_client | 57bcd4d824a64f068b9f19916c6659a716f7866a | [
"Apache-2.0"
] | 2 | 2018-12-18T11:58:19.000Z | 2019-02-13T19:32:25.000Z | lib/stomp_client/rabbitmq/persisted_work_queue.ex | eteubert/stomp_client | 57bcd4d824a64f068b9f19916c6659a716f7866a | [
"Apache-2.0"
] | 7 | 2016-11-03T07:37:21.000Z | 2017-11-14T06:37:12.000Z | lib/stomp_client/rabbitmq/persisted_work_queue.ex | eteubert/stomp_client | 57bcd4d824a64f068b9f19916c6659a716f7866a | [
"Apache-2.0"
] | 12 | 2016-10-27T12:39:06.000Z | 2020-12-01T05:45:44.000Z | defmodule StompClient.RabbitMQ.PersistedWorkQueue do
import Kernel, except: [send: 2]
@default_prefetch_count 1
def subscribe(pid, topic) do
subscribe(pid, topic, prefetch_count: @default_prefetch_count)
end
def subscribe(pid, topic, prefetch_count: prefetch_count) do
topic2 = create_topic(topic)
sub_id = :erlang.phash2(topic2)
opts = [
id: sub_id,
durable: true,
"auto-delete": false,
"prefetch-count": prefetch_count,
ack: "client-individual"
]
StompClient.subscribe(pid, topic2, opts)
end
def send(pid, topic, payload, opts \\ []) do
rabbit_opts = [persistent: true]
Keyword.merge(opts, rabbit_opts, fn _k, _v1, v2 -> v2 end)
StompClient.send(pid, create_topic(topic), payload, opts)
end
def ack(pid, %{"ack" => message_id} = _message) do
StompClient.ack(pid, message_id)
end
def nack(pid, %{"ack" => message_id} = _message) do
StompClient.ack(pid, message_id)
end
# Private functions
defp create_topic(<<?/, topic::binary>>) do
"/queue/" <> topic
end
defp create_topic(topic) do
"/queue/" <> topic
end
end
| 23.708333 | 66 | 0.662566 |
e84b907814375f9320af18c75c725c51b35a61b7 | 279 | exs | Elixir | priv/repo/migrations/20180206015504_create_neighbors.exs | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | 1 | 2021-01-13T15:46:49.000Z | 2021-01-13T15:46:49.000Z | priv/repo/migrations/20180206015504_create_neighbors.exs | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | 1 | 2021-01-13T16:42:24.000Z | 2021-01-13T16:42:24.000Z | priv/repo/migrations/20180206015504_create_neighbors.exs | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | null | null | null | defmodule Litelist.Repo.Migrations.CreateNeighbors do
use Ecto.Migration
def change do
create table(:neighbors) do
add :username, :string
add :encrypted_password, :string
timestamps()
end
create unique_index(:neighbors, [:username])
end
end
| 19.928571 | 53 | 0.698925 |
e84bc0c567f257a3d8a66f60b6c3d00d645cc960 | 2,428 | exs | Elixir | apps/definition/test/definition_test.exs | rucker/hindsight | 876a5d344c5d8eebbea37684ee07e0a91e4430f0 | [
"Apache-2.0"
] | null | null | null | apps/definition/test/definition_test.exs | rucker/hindsight | 876a5d344c5d8eebbea37684ee07e0a91e4430f0 | [
"Apache-2.0"
] | null | null | null | apps/definition/test/definition_test.exs | rucker/hindsight | 876a5d344c5d8eebbea37684ee07e0a91e4430f0 | [
"Apache-2.0"
] | null | null | null | defmodule DefinitionTest do
use ExUnit.Case
defmodule Foo do
use Definition, schema: Foo.V2
defstruct [:version, :bar, :baz]
def on_new(foo) do
new_baz =
case foo.baz do
nil -> nil
x -> String.upcase(x)
end
%{foo | baz: new_baz}
|> Ok.ok()
end
def migrate(%__MODULE__{version: 1} = old) do
struct(__MODULE__, %{version: 2, bar: String.to_integer(old.bar)})
|> Ok.ok()
end
defmodule V1 do
use Definition.Schema
def s do
schema(%Foo{version: spec(fn v -> v == 1 end), bar: spec(is_binary())})
end
end
defmodule V2 do
use Definition.Schema
def s do
schema(%Foo{version: spec(fn v -> v == 2 end), bar: spec(is_integer())})
end
end
end
describe "__using__/1" do
test "makes new/1 available to create struct" do
input = %{version: 2, bar: 9001}
assert {:ok, %Foo{}} = Foo.new(input)
end
test "makes migrate/1 overridable to migrate schema versions" do
input = %{version: 1, bar: "42"}
assert {:ok, %Foo{version: 2, bar: 42}} = Foo.new(input)
end
test "makes schema/0 available to get current version schema" do
assert Foo.schema() == Foo.V2.s()
end
end
describe "new/1" do
test "handles input with string keys" do
input = %{"version" => 2, "bar" => 33}
assert {:ok, %Foo{version: 2, bar: 33}} = Foo.new(input)
end
test "accepts a Keyword list input" do
assert {:ok, %Foo{bar: 42}} = Foo.new(version: 2, bar: 42)
end
test "calls on_new to allow custom transformation" do
input = %{"version" => 2, "bar" => 34, "baz" => "mike"}
assert {:ok, %Foo{baz: "MIKE"}} = Foo.new(input)
end
test "returns exception for other list input" do
assert {:error, %Foo.InputError{} = ex} = Foo.new([:foo])
assert ex.message == [:foo]
end
end
describe "from_json/1" do
test "turns JSON into new struct" do
input = ~s/{"version": 2, "bar": 9001}/
assert {:ok, %Foo{bar: 9001}} = Foo.from_json(input)
end
test "returns error tuple for invalid JSON" do
assert {:error, %Jason.DecodeError{}} = Foo.from_json("{a, b}")
end
test "returns exception for invalid new/1 input" do
input = ~s/[{"version": 2, "bar": 0}]/
assert {:error, %Foo.InputError{}} = Foo.from_json(input)
end
end
end
| 25.829787 | 80 | 0.576606 |
e84bdb9816c386add2d364ab01f1bf5e9c292db0 | 94 | exs | Elixir | ch2/script.exs | TheEndIsNear/ElixirInAction | 9b18ebd4845723935e03ec2a0c3cf869aa955541 | [
"MIT"
] | null | null | null | ch2/script.exs | TheEndIsNear/ElixirInAction | 9b18ebd4845723935e03ec2a0c3cf869aa955541 | [
"MIT"
] | null | null | null | ch2/script.exs | TheEndIsNear/ElixirInAction | 9b18ebd4845723935e03ec2a0c3cf869aa955541 | [
"MIT"
] | null | null | null | defmodule MyModule do
def run do
IO.puts("Called MyModule.run")
end
end
MyModule.run
| 11.75 | 34 | 0.712766 |
e84bf2ee3f34b35324ac9dba9d19a80d6c6270a7 | 92 | exs | Elixir | .iex.exs | alex-min/ex_platform | 73dea336ee939d8155b4b5953146351af30edd66 | [
"MIT"
] | 129 | 2021-05-30T10:10:59.000Z | 2022-03-27T14:42:48.000Z | .iex.exs | alex-min/ex_platform | 73dea336ee939d8155b4b5953146351af30edd66 | [
"MIT"
] | 48 | 2021-05-30T21:34:03.000Z | 2022-02-21T14:20:04.000Z | .iex.exs | alex-min/ex_platform | 73dea336ee939d8155b4b5953146351af30edd66 | [
"MIT"
] | 12 | 2021-05-30T22:03:49.000Z | 2022-02-08T19:59:16.000Z | alias ExPlatform.Repo
alias ExPlatform.Accounts.User
alias Ecto.Changeset
import Ecto.Query
| 18.4 | 30 | 0.858696 |
e84c2de040bcdee338ee7c4880f61dc6f6aa6ae8 | 363 | exs | Elixir | samples/sin_benchmarks.exs | zacky1972/simd_sample | 3f50e698ae3c577e751ee7a5513086b7df7fca49 | [
"Apache-2.0"
] | 1 | 2021-09-03T07:56:54.000Z | 2021-09-03T07:56:54.000Z | samples/sin_benchmarks.exs | zacky1972/simd_sample | 3f50e698ae3c577e751ee7a5513086b7df7fca49 | [
"Apache-2.0"
] | null | null | null | samples/sin_benchmarks.exs | zacky1972/simd_sample | 3f50e698ae3c577e751ee7a5513086b7df7fca49 | [
"Apache-2.0"
] | null | null | null |
input_f32 = Nx.iota({0x1000000}, type: {:f, 32})
# input_f16 = Nx.iota({0x1000000}, type: {:f, 16})
Benchee.run(
%{
"sin32_1" => fn -> SimdSample.Sin.sin32_1(input_f32) end,
"sin32_0" => fn -> SimdSample.Sin.sin32(input_f32) end,
"nx_32" => fn -> Nx.sin(input_f32) end,
# "nx_16" => fn -> Nx.sin(input_f16) end
}
) \
|> then(fn _ -> :ok end)
| 25.928571 | 61 | 0.584022 |
e84c357aea9c186e6c356e7c56ff79f9c63900bb | 231 | exs | Elixir | priv/repo/migrations/20170530201724_create_entity.exs | ExpressApp/ecto_crdt_types | cf18557cf888b3d50a44640997507cff6caf2b93 | [
"MIT"
] | 8 | 2018-09-20T13:05:16.000Z | 2021-09-22T08:40:40.000Z | priv/repo/migrations/20170530201724_create_entity.exs | ExpressApp/ecto_crdt_types | cf18557cf888b3d50a44640997507cff6caf2b93 | [
"MIT"
] | null | null | null | priv/repo/migrations/20170530201724_create_entity.exs | ExpressApp/ecto_crdt_types | cf18557cf888b3d50a44640997507cff6caf2b93 | [
"MIT"
] | null | null | null | defmodule TestApp.Repo.Migrations.CreateEntity do
use Ecto.Migration
def change do
create table(:entities) do
add :counter, :integer
add :test, {:array, :string}
add :test_crdt, :binary
end
end
end
| 19.25 | 49 | 0.666667 |
e84c3ab15dc3db73eb7704faccd3555f573db039 | 719 | ex | Elixir | lib/earmark/helpers/string_helpers.ex | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | lib/earmark/helpers/string_helpers.ex | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | lib/earmark/helpers/string_helpers.ex | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | 1 | 2020-03-31T19:53:15.000Z | 2020-03-31T19:53:15.000Z | defmodule Earmark.Helpers.StringHelpers do
@moduledoc false
@doc """
Remove the leading part of a string
"""
def behead(str, ignore) when is_integer(ignore) do
String.slice(str, ignore..-1)
end
def behead(str, leading_string) do
behead(str, String.length(leading_string))
end
@doc """
Remove leading spaces up to size
"""
def behead_indent(str, size) do
String.replace(str, ~r<\A\s{0,#{size}}>, "")
end
@doc """
Returns a tuple with the prefix and the beheaded string
iex> behead_tuple("prefixpostfix", "prefix")
{"prefix", "postfix"}
"""
def behead_tuple(str, lead) do
{lead, behead(str, lead)}
end
end
# SPDX-License-Identifier: Apache-2.0
| 20.542857 | 59 | 0.650904 |
e84c70b4bdd5888080b4dbccf5e45c9fad8c6b81 | 470 | ex | Elixir | apps/customer/lib/customer/web/commands/user.ex | JaiMali/job_search-1 | 5fe1afcd80aa5d55b92befed2780cd6721837c88 | [
"MIT"
] | 102 | 2017-05-21T18:24:04.000Z | 2022-03-10T12:53:20.000Z | apps/customer/lib/customer/web/commands/user.ex | JaiMali/job_search-1 | 5fe1afcd80aa5d55b92befed2780cd6721837c88 | [
"MIT"
] | 2 | 2017-05-21T01:53:30.000Z | 2017-12-01T00:27:06.000Z | apps/customer/lib/customer/web/commands/user.ex | JaiMali/job_search-1 | 5fe1afcd80aa5d55b92befed2780cd6721837c88 | [
"MIT"
] | 18 | 2017-05-22T09:51:36.000Z | 2021-09-24T00:57:01.000Z | defmodule Customer.Web.Command.User do
use Customer.Command, model: Customer.Web.User
alias Customer.Web.User
def get_or_insert_by(multi, %{info: auth_info}) do
case Repo.get_by(User, email: auth_info.email) do
nil -> insert_by(multi, auth_info)
user -> Multi.run(multi, :user, fn _ -> {:ok, user} end)
end
end
defp insert_by(multi, auth_info) do
Multi.insert(multi, :user, User.registration_changeset(%User{}, auth_info))
end
end | 27.647059 | 79 | 0.697872 |
e84cac87a1f5d2694aa571acb718fd2281a82aa5 | 98 | ex | Elixir | test/support/dummy_repo.ex | dbernheisel/ecto_psql_extras | 537a328753cfb1d5d3a80f1865d77ddfa3930e19 | [
"MIT"
] | 278 | 2020-08-04T12:38:29.000Z | 2022-03-25T15:14:28.000Z | test/support/dummy_repo.ex | dbernheisel/ecto_psql_extras | 537a328753cfb1d5d3a80f1865d77ddfa3930e19 | [
"MIT"
] | 21 | 2020-09-27T14:38:21.000Z | 2021-11-08T22:44:46.000Z | test/support/dummy_repo.ex | dbernheisel/ecto_psql_extras | 537a328753cfb1d5d3a80f1865d77ddfa3930e19 | [
"MIT"
] | 15 | 2020-09-28T18:08:25.000Z | 2022-03-22T11:26:21.000Z | defmodule Dummy.Repo do
use Ecto.Repo, otp_app: :dummy_app, adapter: Ecto.Adapters.Postgres
end
| 24.5 | 69 | 0.785714 |
e84cb9ec49a181a5dde735b5a884a383b1c33f16 | 202 | exs | Elixir | apps/ewallet_db/priv/repo/migrations/20190516034014_add_created_backup_code_at_to_user.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet_db/priv/repo/migrations/20190516034014_add_created_backup_code_at_to_user.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet_db/priv/repo/migrations/20190516034014_add_created_backup_code_at_to_user.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | defmodule EWalletDB.Repo.Migrations.AddCreatedBackupCodeAtToUser do
use Ecto.Migration
def change do
alter table(:user) do
add :backup_codes_created_at, :naive_datetime
end
end
end
| 20.2 | 67 | 0.762376 |
e84cd37338c09baa8b8a218d133a02d1a051f39b | 1,778 | exs | Elixir | apps/web/mix.exs | renovate-tests/ex | 2e02d5a1e59122c13a0ebd2321f5cb291160aaa8 | [
"MIT"
] | null | null | null | apps/web/mix.exs | renovate-tests/ex | 2e02d5a1e59122c13a0ebd2321f5cb291160aaa8 | [
"MIT"
] | null | null | null | apps/web/mix.exs | renovate-tests/ex | 2e02d5a1e59122c13a0ebd2321f5cb291160aaa8 | [
"MIT"
] | null | null | null | defmodule Web.Mixfile do
use Mix.Project
def project do
[
app: :web,
version: "0.0.1",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Web.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.3.4"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 4.0"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.10"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:plug_cowboy, "~> 1.0"},
{:ecto_sql, "~> 3.0"},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 26.537313 | 79 | 0.566367 |
e84cdea863ce2dcc1c0e64fdbdbd1713b09328e5 | 7,593 | ex | Elixir | lib/ecto/adapters/sql/worker.ex | thiagoarrais/ecto | 93d7595beb7f407775c4918d19cad370b0bcd498 | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/sql/worker.ex | thiagoarrais/ecto | 93d7595beb7f407775c4918d19cad370b0bcd498 | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/sql/worker.ex | thiagoarrais/ecto | 93d7595beb7f407775c4918d19cad370b0bcd498 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Adapters.SQL.Worker do
@moduledoc false
use GenServer
def start_link({module, args}) do
GenServer.start_link(__MODULE__, {module, args})
end
def start({module, args}) do
GenServer.start(__MODULE__, {module, args})
end
def link_me(worker, timeout) do
GenServer.call(worker, :link_me, timeout)
end
def unlink_me(worker, timeout) do
GenServer.call(worker, :unlink_me, timeout)
end
def query!(worker, sql, params, opts) do
case GenServer.call(worker, :query, opts[:timeout]) do
{:ok, {module, conn}} ->
case module.query(conn, sql, params, opts) do
{:ok, res} -> res
{:error, err} -> raise err
end
{:error, err} ->
raise err
end
end
def begin!(worker, opts) do
call!(worker, {:begin, opts}, opts)
end
def commit!(worker, opts) do
call!(worker, {:commit, opts}, opts)
end
def rollback!(worker, opts) do
call!(worker, {:rollback, opts}, opts)
end
def begin_test_transaction!(worker, opts) do
call!(worker, {:begin_test_transaction, opts}, opts)
end
def restart_test_transaction!(worker, opts) do
call!(worker, {:restart_test_transaction, opts}, opts)
end
def rollback_test_transaction!(worker, opts) do
call!(worker, {:rollback_test_transaction, opts}, opts)
end
defp call!(worker, command, opts) do
case GenServer.call(worker, command, opts[:timeout]) do
:ok -> :ok
{:error, err} -> raise err
end
end
## Callbacks
def init({module, params}) do
Process.flag(:trap_exit, true)
lazy? = Keyword.get(params, :lazy, true)
unless lazy? do
case module.connect(params) do
{:ok, conn} ->
conn = conn
_ ->
:ok
end
end
{:ok, %{conn: conn, params: params, link: nil,
transactions: 0, module: module, sandbox: false}}
end
# Those functions do not need a connection
def handle_call(:link_me, {pid, _}, %{link: nil} = s) do
Process.link(pid)
{:reply, :ok, %{s | link: pid}}
end
def handle_call(:unlink_me, {pid, _}, %{link: pid} = s) do
Process.unlink(pid)
{:reply, :ok, %{s | link: nil}}
end
# Connection is disconnected, reconnect before continuing
def handle_call(request, from, %{conn: nil, params: params, module: module} = s) do
case module.connect(params) do
{:ok, conn} ->
case begin_sandbox(%{s | conn: conn}) do
{:ok, s} -> handle_call(request, from, s)
{:error, err} -> {:reply, {:error, err}, s}
end
{:error, err} ->
{:reply, {:error, err}, s}
end
end
def handle_call(:query, _from, %{conn: conn, module: module} = s) do
{:reply, {:ok, {module, conn}}, s}
end
def handle_call({:begin, opts}, _from, s) do
%{conn: conn, transactions: trans, module: module} = s
sql =
if trans == 0 do
module.begin_transaction
else
module.savepoint "ecto_#{trans}"
end
case module.query(conn, sql, [], opts) do
{:ok, _} ->
{:reply, :ok, %{s | transactions: trans + 1}}
{:error, _} = err ->
GenServer.reply(err)
wipe_state(s)
end
end
def handle_call({:commit, opts}, _from, %{transactions: trans} = s) when trans >= 1 do
%{conn: conn, module: module} = s
reply =
case trans do
1 -> module.query(conn, module.commit, [], opts)
_ -> {:ok, {[], 0}}
end
case reply do
{:ok, _} ->
{:reply, :ok, %{s | transactions: trans - 1}}
{:error, _} = err ->
GenServer.reply(err)
wipe_state(s)
end
end
def handle_call({:rollback, opts}, _from, %{transactions: trans} = s) when trans >= 1 do
%{conn: conn, module: module} = s
sql =
case trans do
1 -> module.rollback
_ -> module.rollback_to_savepoint "ecto_#{trans-1}"
end
case module.query(conn, sql, [], opts) do
{:ok, _} ->
{:reply, :ok, %{s | transactions: trans - 1}}
{:error, _} = err ->
GenServer.reply(err)
wipe_state(s)
end
end
def handle_call({:begin_test_transaction, _opts}, _from, %{sandbox: true} = s) do
{:reply, :ok, s}
end
def handle_call({:begin_test_transaction, _opts}, _from, %{transactions: 0} = s) do
case begin_sandbox(%{s | sandbox: true}) do
{:ok, s} -> {:reply, :ok, s}
{:error, err} -> {:reply, {:error, err}, s}
end
end
def handle_call({:restart_test_transaction, _opts}, _from, %{sandbox: false} = s) do
{:reply, :ok, s}
end
def handle_call({:restart_test_transaction, opts}, _from, %{transactions: 1} = s) do
%{conn: conn, module: module} = s
case module.query(conn, module.rollback_to_savepoint("ecto_sandbox"), [], opts) do
{:ok, _} ->
{:reply, :ok, s}
{:error, _} = err ->
GenServer.reply(err)
wipe_state(s)
end
end
def handle_call({:rollback_test_transaction, _opts}, _from, %{sandbox: false} = s) do
{:reply, :ok, s}
end
def handle_call({:rollback_test_transaction, opts}, _from, %{transactions: 1} = s) do
%{conn: conn, module: module} = s
case module.query(conn, module.rollback, [], opts) do
{:ok, _} ->
{:reply, :ok, %{s | transactions: 0, sandbox: false}}
{:error, _} = err ->
GenServer.reply(err)
wipe_state(s)
end
end
# The connection crashed, notify all linked process.
def handle_info({:EXIT, conn, _reason}, %{conn: conn} = s) do
wipe_state(%{s | conn: nil})
end
# If a linked process crashed, assume stale connection and close it.
def handle_info({:EXIT, link, _reason}, %{link: link} = s) do
wipe_state(s)
end
def handle_info(_info, s) do
{:noreply, s}
end
def terminate(_reason, %{conn: conn, module: module}) do
conn && module.disconnect(conn)
end
## Helpers
defp begin_sandbox(%{sandbox: false} = s), do: {:ok, s}
defp begin_sandbox(%{sandbox: true} = s) do
%{conn: conn, module: module} = s
opts = [timeout: :infinity]
case module.query(conn, module.begin_transaction, [], opts) do
{:ok, _} ->
case module.query(conn, module.savepoint("ecto_sandbox"), [], opts) do
{:ok, _} -> {:ok, %{s | transactions: 1}}
{:error, _} = err -> err
end
{:error, _} = err ->
err
end
end
# Imagine the following scenario:
#
# 1. PID starts a transaction
# 2. PID sends a query
# 3. The connection crashes (and we receive an EXIT message)
#
# If 2 and 3 happen at the same, there is no guarantee which
# one will be handled first. That's why we can't simply kill
# the linked processes and start a new connection as we may
# have left-over messages in the inbox.
#
# So this is what we do:
#
# 1. We disconnect from the database
# 2. We kill the linked processes (transaction owner)
# 3. We remove all calls from that process
#
# Because this worker only accept calls and it is controlled by
# the pool, the expectation is that the number of messages to
# be removed will always be maximum 1.
defp wipe_state(%{conn: conn, module: module, link: link} = s) do
conn && module.disconnect(conn)
if link do
Process.unlink(link)
Process.exit(link, {:ecto, :no_connection})
clear_calls(link)
end
{:noreply, %{s | conn: nil, link: nil, transactions: 0}}
end
defp clear_calls(link) do
receive do
{:"$gen_call", {^link, _}, _} -> clear_calls(link)
after
0 -> :ok
end
end
end
| 26.642105 | 90 | 0.590939 |
e84cef48af0ce035e8e99be918c01279c249be9f | 1,154 | ex | Elixir | lib/radiator/directory/audio.ex | djschilling/radiator | 382e22904d7e400a8ffba54e9ddfd2845bc2b623 | [
"MIT"
] | null | null | null | lib/radiator/directory/audio.ex | djschilling/radiator | 382e22904d7e400a8ffba54e9ddfd2845bc2b623 | [
"MIT"
] | null | null | null | lib/radiator/directory/audio.ex | djschilling/radiator | 382e22904d7e400a8ffba54e9ddfd2845bc2b623 | [
"MIT"
] | null | null | null | defmodule Radiator.Directory.Audio do
@moduledoc """
Audio Meta Object.
An Audio contains all data required to generate a web player: file references
and audio metadata.
An Audio belongs to one or many episodes, or stand on its own in a network.
"""
use Ecto.Schema
import Ecto.Changeset
import Arc.Ecto.Changeset
import Ecto.Query, warn: false
alias Radiator.Media
alias Radiator.Directory.{Episode, Network}
alias Radiator.AudioMeta.Chapter
schema "audios" do
field :title, :string
field :duration, :string
field :published_at, :utc_datetime
field :image, Media.AudioImage.Type
has_many :episodes, Episode
belongs_to :network, Network
has_many :audio_files, Media.AudioFile
has_many :chapters, Chapter
has_many :permissions, {"audios_perm", Radiator.Perm.Permission}, foreign_key: :subject_id
timestamps()
end
@doc false
def changeset(audio, attrs) do
audio
|> cast(attrs, [:title, :duration, :published_at])
|> cast_attachments(attrs, [:image], allow_paths: true, allow_urls: true)
# todo: validate it belongs to _something_ / not a zombie
end
end
| 25.086957 | 94 | 0.717504 |
e84cf7d2298e8049f40d9e1467b2c58cec7cb166 | 36,310 | ex | Elixir | lib/ecto/association.ex | ohta-rh/ecto | d5f8bfdfcc6fcfb520c62bbd1dbdd8ee6f09de59 | [
"Apache-2.0"
] | null | null | null | lib/ecto/association.ex | ohta-rh/ecto | d5f8bfdfcc6fcfb520c62bbd1dbdd8ee6f09de59 | [
"Apache-2.0"
] | null | null | null | lib/ecto/association.ex | ohta-rh/ecto | d5f8bfdfcc6fcfb520c62bbd1dbdd8ee6f09de59 | [
"Apache-2.0"
] | null | null | null | import Ecto.Query, only: [from: 2, join: 4, distinct: 3]
defmodule Ecto.Association.NotLoaded do
@moduledoc """
Struct returned by one to one associations when they are not loaded.
The fields are:
* `__field__` - the association field in `owner`
* `__owner__` - the schema that owns the association
* `__cardinality__` - the cardinality of the association
"""
defstruct [:__field__, :__owner__, :__cardinality__]
defimpl Inspect do
def inspect(not_loaded, _opts) do
msg = "association #{inspect not_loaded.__field__} is not loaded"
~s(#Ecto.Association.NotLoaded<#{msg}>)
end
end
end
defmodule Ecto.Association do
@moduledoc false
@type t :: %{__struct__: atom,
on_cast: nil | fun,
cardinality: :one | :many,
relationship: :parent | :child,
owner: atom,
owner_key: atom,
field: atom,
unique: boolean}
alias Ecto.Query.{BooleanExpr, QueryExpr}
@doc """
Builds the association struct.
The struct must be defined in the module that implements the
callback and it must contain at least the following keys:
* `:cardinality` - tells if the association is one to one
or one/many to many
* `:field` - tells the field in the owner struct where the
association should be stored
* `:owner` - the owner module of the association
* `:owner_key` - the key in the owner with the association value
* `:relationship` - if the relationship to the specified schema is
of a `:child` or a `:parent`
"""
@callback struct(module, field :: atom, opts :: Keyword.t) :: t
@doc """
Builds a struct for the given association.
The struct to build from is given as argument in case default values
should be set in the struct.
Invoked by `Ecto.build_assoc/3`.
"""
@callback build(t, Ecto.Schema.t, %{atom => term} | [Keyword.t]) :: Ecto.Schema.t
@doc """
Returns an association join query.
This callback receives the association struct and it must return
a query that retrieves all associated entries using joins up to
the owner association.
For example, a `has_many :comments` inside a `Post` module would
return:
from c in Comment, join: p in Post, on: c.post_id == p.id
Note all the logic must be expressed inside joins, as fields like
`where` and `order_by` won't be used by the caller.
This callback is invoked when `join: assoc(p, :comments)` is used
inside queries.
"""
@callback joins_query(t) :: Ecto.Query.t
@doc """
Returns the association query on top of the given query.
If the query is `nil`, the association target must be used.
This callback receives the association struct and it must return
a query that retrieves all associated entries with the given
values for the owner key.
This callback is used by `Ecto.assoc/2` and when preloading.
"""
@callback assoc_query(t, Ecto.Query.t | nil, values :: [term]) :: Ecto.Query.t
@doc """
Returns information used by the preloader.
"""
@callback preload_info(t) ::
{:assoc, t, {integer, atom}} | {:through, t, [atom]}
@doc """
Performs the repository change on the association.
Receives the parent changeset, the current changesets
and the repository action options. Must return the
persisted struct (or nil) or the changeset error.
"""
@callback on_repo_change(t, parent :: Ecto.Changeset.t, changeset :: Ecto.Changeset.t, Keyword.t) ::
{:ok, Ecto.Schema.t | nil} | {:error, Ecto.Changeset.t}
@doc """
Retrieves the association from the given schema.
"""
def association_from_schema!(schema, assoc) do
schema.__schema__(:association, assoc) ||
raise ArgumentError, "schema #{inspect schema} does not have association #{inspect assoc}"
end
@doc """
Returns the association key for the given module with the given suffix.
## Examples
iex> Ecto.Association.association_key(Hello.World, :id)
:world_id
iex> Ecto.Association.association_key(Hello.HTTP, :id)
:http_id
iex> Ecto.Association.association_key(Hello.HTTPServer, :id)
:http_server_id
"""
def association_key(module, suffix) do
prefix = module |> Module.split |> List.last |> Macro.underscore
:"#{prefix}_#{suffix}"
end
@doc """
Build an association query through with starting the given reflection
and through the given associations.
"""
def assoc_query(refl, through, query, values)
def assoc_query(%{owner: owner, through: [h|t], field: field}, extra, query, values) do
refl = owner.__schema__(:association, h) ||
raise "unknown association `#{h}` for `#{inspect owner}` (used by through association `#{field}`)"
assoc_query refl, t ++ extra, query, values
end
def assoc_query(%module{} = refl, [], query, values) do
module.assoc_query(refl, query, values)
end
def assoc_query(refl, t, query, values) do
query = query || %Ecto.Query{from: {"join expression", nil}, prefix: refl.queryable.__schema__(:prefix)}
# Find the position for upcoming joins
position = length(query.joins) + 1
# The first association must become a join,
# so we convert its where (that comes from assoc_query)
# to a join expression.
#
# Note we are being restrictive on the format
# expected from assoc_query.
assoc_query = refl.__struct__.assoc_query(refl, nil, values)
joins = Ecto.Query.Planner.query_to_joins(assoc_query, position)
# Add the new join to the query and traverse the remaining
# joins that will start counting from the added join position.
query =
%{query | joins: query.joins ++ joins}
|> joins_query(t, position + length(joins) - 1)
|> Ecto.Query.Planner.prepare_sources(:adapter_wont_be_needed)
# Our source is going to be the last join after
# traversing them all.
{joins, [assoc]} = Enum.split(query.joins, -1)
# Update the mapping and start rewriting expressions
# to make the last join point to the new from source.
rewrite_ix = assoc.ix
[assoc | joins] = Enum.map([assoc | joins], &rewrite_join(&1, rewrite_ix))
%{query | wheres: [assoc_to_where(assoc) | query.wheres], joins: joins,
from: merge_from(query.from, assoc.source), sources: nil}
|> distinct([x], true)
end
defp assoc_to_where(%{on: %QueryExpr{} = on}) do
on
|> Map.put(:__struct__, BooleanExpr)
|> Map.put(:op, :and)
end
defp merge_from({"join expression", _}, assoc_source), do: assoc_source
defp merge_from(from, _assoc_source), do: from
# Rewrite all later joins
defp rewrite_join(%{on: on, ix: ix} = join, mapping) when ix >= mapping do
on = Ecto.Query.Planner.rewrite_sources(on, &rewrite_ix(mapping, &1))
%{join | on: on, ix: rewrite_ix(mapping, ix)}
end
# Previous joins are kept intact
defp rewrite_join(join, _mapping) do
join
end
defp rewrite_ix(mapping, ix) when ix > mapping, do: ix - 1
defp rewrite_ix(ix, ix), do: 0
defp rewrite_ix(_mapping, ix), do: ix
@doc """
Build a join query with the given `through` associations starting at `counter`.
"""
def joins_query(query, through, counter) do
Enum.reduce(through, {query, counter}, fn current, {acc, counter} ->
query = join(acc, :inner, [x: counter], assoc(x, ^current))
{query, counter + 1}
end) |> elem(0)
end
@doc """
Retrieves related module from queryable.
## Examples
iex> Ecto.Association.related_from_query({"custom_source", Schema})
Schema
iex> Ecto.Association.related_from_query(Schema)
Schema
iex> Ecto.Association.related_from_query("wrong")
** (ArgumentError) association queryable must be a schema or {source, schema}, got: "wrong"
"""
def related_from_query(atom) when is_atom(atom), do: atom
def related_from_query({source, schema}) when is_binary(source) and is_atom(schema), do: schema
def related_from_query(queryable) do
raise ArgumentError, "association queryable must be a schema " <>
"or {source, schema}, got: #{inspect queryable}"
end
@doc """
Merges source from query into to the given schema.
In case the query does not have a source, returns
the schema unchanged.
"""
def merge_source(schema, query)
def merge_source(struct, {source, _}) do
Ecto.put_meta(struct, source: source)
end
def merge_source(struct, _query) do
struct
end
@doc false
def update_parent_prefix(changeset, parent) do
case parent do
%{__meta__: %{source: {prefix, _}}} ->
update_in changeset.data, &Ecto.put_meta(&1, prefix: prefix)
_ ->
changeset
end
end
@doc """
Performs the repository action in the related changeset,
returning `{:ok, data}` or `{:error, changes}`.
"""
def on_repo_change(%{data: struct}, [], _opts) do
{:ok, struct}
end
def on_repo_change(changeset, assocs, opts) do
%{data: struct, changes: changes, action: action} = changeset
{struct, changes, _halt, valid?} =
Enum.reduce(assocs, {struct, changes, false, true}, fn {refl, value}, acc ->
on_repo_change(refl, value, changeset, action, opts, acc)
end)
case valid? do
true -> {:ok, struct}
false -> {:error, changes}
end
end
defp on_repo_change(%{cardinality: :one, field: field} = meta, nil, parent_changeset,
_repo_action, opts, {parent, changes, halt, valid?}) do
if not halt, do: maybe_replace_one!(meta, nil, parent, parent_changeset, opts)
{Map.put(parent, field, nil), Map.put(changes, field, nil), halt, valid?}
end
defp on_repo_change(%{cardinality: :one, field: field, __struct__: mod} = meta,
%{action: action} = changeset, parent_changeset,
repo_action, opts, {parent, changes, halt, valid?}) do
check_action!(meta, action, repo_action)
case on_repo_change_unless_halted(halt, mod, meta, parent_changeset, changeset, opts) do
{:ok, struct} ->
maybe_replace_one!(meta, struct, parent, parent_changeset, opts)
{Map.put(parent, field, struct), Map.put(changes, field, changeset), halt, valid?}
{:error, error_changeset} ->
{parent, Map.put(changes, field, error_changeset),
halted?(halt, changeset, error_changeset), false}
end
end
defp on_repo_change(%{cardinality: :many, field: field, __struct__: mod} = meta,
changesets, parent_changeset, repo_action, opts,
{parent, changes, halt, all_valid?}) do
{changesets, structs, halt, valid?} =
Enum.reduce(changesets, {[], [], halt, true}, fn
%{action: action} = changeset, {changesets, structs, halt, valid?} ->
check_action!(meta, action, repo_action)
case on_repo_change_unless_halted(halt, mod, meta, parent_changeset, changeset, opts) do
{:ok, nil} ->
{[changeset|changesets], structs, halt, valid?}
{:ok, struct} ->
{[changeset|changesets], [struct | structs], halt, valid?}
{:error, error_changeset} ->
{[error_changeset|changesets], structs, halted?(halt, changeset, error_changeset), false}
end
end)
if valid? do
{Map.put(parent, field, Enum.reverse(structs)),
Map.put(changes, field, Enum.reverse(changesets)),
halt, all_valid?}
else
{parent,
Map.put(changes, field, Enum.reverse(changesets)),
halt, false}
end
end
defp check_action!(%{related: schema}, :delete, :insert),
do: raise(ArgumentError, "got action :delete in changeset for associated #{inspect schema} while inserting")
defp check_action!(_, _, _), do: :ok
defp halted?(true, _, _), do: true
defp halted?(_, %{valid?: true}, %{valid?: false}), do: true
defp halted?(_, _, _), do: false
defp on_repo_change_unless_halted(true, _mod, _meta, _parent, changeset, _opts) do
{:error, changeset}
end
defp on_repo_change_unless_halted(false, mod, meta, parent, changeset, opts) do
mod.on_repo_change(meta, parent, changeset, opts)
end
defp maybe_replace_one!(%{field: field, __struct__: mod} = meta, current, parent,
parent_changeset, opts) do
previous = Map.get(parent, field)
if replaceable?(previous) and primary_key!(previous) != primary_key!(current) do
changeset = %{Ecto.Changeset.change(previous) | action: :replace}
case mod.on_repo_change(meta, parent_changeset, changeset, opts) do
{:ok, nil} ->
:ok
{:error, changeset} ->
raise Ecto.InvalidChangesetError,
action: changeset.action, changeset: changeset
end
end
end
defp maybe_replace_one!(_, _, _, _, _), do: :ok
defp replaceable?(nil), do: false
defp replaceable?(%Ecto.Association.NotLoaded{}), do: false
defp replaceable?(%{__meta__: %{state: :built}}), do: false
defp replaceable?(_), do: true
defp primary_key!(nil), do: []
defp primary_key!(struct), do: Ecto.primary_key!(struct)
end
defmodule Ecto.Association.Has do
@moduledoc """
The association struct for `has_one` and `has_many` associations.
Its fields are:
* `cardinality` - The association cardinality
* `field` - The name of the association field on the schema
* `owner` - The schema where the association was defined
* `related` - The schema that is associated
* `owner_key` - The key on the `owner` schema used for the association
* `related_key` - The key on the `related` schema used for the association
* `queryable` - The real query to use for querying association
* `on_delete` - The action taken on associations when schema is deleted
* `on_replace` - The action taken on associations when schema is replaced
* `defaults` - Default fields used when building the association
* `relationship` - The relationship to the specified schema, default is `:child`
"""
@behaviour Ecto.Association
@on_delete_opts [:nothing, :nilify_all, :delete_all]
@on_replace_opts [:raise, :mark_as_invalid, :delete, :nilify]
@has_one_on_replace_opts @on_replace_opts ++ [:update]
defstruct [:cardinality, :field, :owner, :related, :owner_key, :related_key, :on_cast,
:queryable, :on_delete, :on_replace, unique: true, defaults: [], relationship: :child]
@doc false
def struct(module, name, opts) do
ref =
cond do
ref = opts[:references] ->
ref
primary_key = Module.get_attribute(module, :primary_key) ->
elem(primary_key, 0)
true ->
raise ArgumentError, "need to set :references option for " <>
"association #{inspect name} when schema has no primary key"
end
unless Module.get_attribute(module, :ecto_fields)[ref] do
raise ArgumentError, "schema does not have the field #{inspect ref} used by " <>
"association #{inspect name}, please set the :references option accordingly"
end
queryable = Keyword.fetch!(opts, :queryable)
cardinality = Keyword.fetch!(opts, :cardinality)
related = Ecto.Association.related_from_query(queryable)
if opts[:through] do
raise ArgumentError, "invalid association #{inspect name}. When using the :through " <>
"option, the schema should not be passed as second argument"
end
on_delete = Keyword.get(opts, :on_delete, :nothing)
unless on_delete in @on_delete_opts do
raise ArgumentError, "invalid :on_delete option for #{inspect name}. " <>
"The only valid options are: " <>
Enum.map_join(@on_delete_opts, ", ", &"`#{inspect &1}`")
end
on_replace = Keyword.get(opts, :on_replace, :raise)
on_replace_opts = if cardinality == :one, do: @has_one_on_replace_opts, else: @on_replace_opts
unless on_replace in on_replace_opts do
raise ArgumentError, "invalid `:on_replace` option for #{inspect name}. " <>
"The only valid options are: " <>
Enum.map_join(@on_replace_opts, ", ", &"`#{inspect &1}`")
end
%__MODULE__{
field: name,
cardinality: cardinality,
owner: module,
related: related,
owner_key: ref,
related_key: opts[:foreign_key] || Ecto.Association.association_key(module, ref),
queryable: queryable,
on_delete: on_delete,
on_replace: on_replace,
defaults: opts[:defaults] || []
}
end
@doc false
def build(%{owner_key: owner_key, related_key: related_key} = refl, struct, attributes) do
refl
|> build()
|> struct(attributes)
|> Map.put(related_key, Map.get(struct, owner_key))
end
@doc false
def joins_query(%{queryable: queryable, related_key: related_key,
owner: owner, owner_key: owner_key}) do
from o in owner,
join: q in ^queryable,
on: field(q, ^related_key) == field(o, ^owner_key)
end
@doc false
def assoc_query(%{queryable: queryable, related_key: related_key}, query, [value]) do
from x in (query || queryable),
where: field(x, ^related_key) == ^value
end
@doc false
def assoc_query(%{queryable: queryable, related_key: related_key}, query, values) do
from x in (query || queryable),
where: field(x, ^related_key) in ^values
end
@doc false
def preload_info(%{related_key: related_key} = refl) do
{:assoc, refl, {0, related_key}}
end
@doc false
def on_repo_change(%{on_replace: on_replace} = refl, %{data: parent} = parent_changeset,
%{action: :replace} = changeset, opts) do
changeset = case on_replace do
:nilify -> %{changeset | action: :update}
:delete -> %{changeset | action: :delete}
end
changeset = Ecto.Association.update_parent_prefix(changeset, parent)
case on_repo_change(refl, %{parent_changeset | data: nil}, changeset, opts) do
{:ok, _} -> {:ok, nil}
{:error, changeset} -> {:error, changeset}
end
end
def on_repo_change(assoc, parent_changeset, changeset, opts) do
%{data: parent, repo: repo} = parent_changeset
%{action: action, changes: changes} = changeset
{key, value} = parent_key(assoc, parent)
changeset = update_parent_key(changeset, action, key, value)
changeset = Ecto.Association.update_parent_prefix(changeset, parent)
case apply(repo, action, [changeset, opts]) do
{:ok, _} = ok ->
if action == :delete, do: {:ok, nil}, else: ok
{:error, changeset} ->
original = Map.get(changes, key)
{:error, put_in(changeset.changes[key], original)}
end
end
defp update_parent_key(changeset, :delete, _key, _value),
do: changeset
defp update_parent_key(changeset, _action, key, value),
do: Ecto.Changeset.put_change(changeset, key, value)
defp parent_key(%{related_key: related_key}, nil) do
{related_key, nil}
end
defp parent_key(%{owner_key: owner_key, related_key: related_key}, owner) do
{related_key, Map.get(owner, owner_key)}
end
## Relation callbacks
@behaviour Ecto.Changeset.Relation
@doc false
def build(%{related: related, queryable: queryable, defaults: defaults}) do
related
|> struct(defaults)
|> Ecto.Association.merge_source(queryable)
end
## On delete callbacks
@doc false
def delete_all(refl, parent, repo, opts) do
if query = on_delete_query(refl, parent) do
repo.delete_all query, opts
end
end
@doc false
def nilify_all(%{related_key: related_key} = refl, parent, repo, opts) do
if query = on_delete_query(refl, parent) do
repo.update_all query, [set: [{related_key, nil}]], opts
end
end
defp on_delete_query(%{owner_key: owner_key, related_key: related_key,
queryable: queryable}, parent) do
if value = Map.get(parent, owner_key) do
from x in queryable, where: field(x, ^related_key) == ^value
end
end
end
defmodule Ecto.Association.HasThrough do
@moduledoc """
The association struct for `has_one` and `has_many` through associations.
Its fields are:
* `cardinality` - The association cardinality
* `field` - The name of the association field on the schema
* `owner` - The schema where the association was defined
* `owner_key` - The key on the `owner` schema used for the association
* `through` - The through associations
* `relationship` - The relationship to the specified schema, default `:child`
"""
@behaviour Ecto.Association
defstruct [:cardinality, :field, :owner, :owner_key, :through, :on_cast,
relationship: :child, unique: true]
@doc false
def struct(module, name, opts) do
through = Keyword.fetch!(opts, :through)
refl =
case through do
[h,_|_] ->
Module.get_attribute(module, :ecto_assocs)[h]
_ ->
raise ArgumentError, ":through expects a list with at least two entries: " <>
"the association in the current module and one step through, got: #{inspect through}"
end
unless refl do
raise ArgumentError, "schema does not have the association #{inspect hd(through)} " <>
"used by association #{inspect name}, please ensure the association exists and " <>
"is defined before the :through one"
end
%__MODULE__{
field: name,
cardinality: Keyword.fetch!(opts, :cardinality),
through: through,
owner: module,
owner_key: refl.owner_key,
}
end
@doc false
def build(%{field: name}, %{__struct__: struct}, _attributes) do
raise ArgumentError,
"cannot build through association `#{inspect name}` for #{inspect struct}. " <>
"Instead build the intermediate steps explicitly."
end
@doc false
def preload_info(%{through: through} = refl) do
{:through, refl, through}
end
def on_repo_change(%{field: name}, _, _, _) do
raise ArgumentError,
"cannot insert/update/delete through associations `#{inspect name}` via the repository. " <>
"Instead build the intermediate steps explicitly."
end
@doc false
def joins_query(%{owner: owner, through: through}) do
Ecto.Association.joins_query(owner, through, 0)
end
@doc false
def assoc_query(refl, query, values) do
Ecto.Association.assoc_query(refl, [], query, values)
end
end
defmodule Ecto.Association.BelongsTo do
@moduledoc """
The association struct for a `belongs_to` association.
Its fields are:
* `cardinality` - The association cardinality
* `field` - The name of the association field on the schema
* `owner` - The schema where the association was defined
* `owner_key` - The key on the `owner` schema used for the association
* `related` - The schema that is associated
* `related_key` - The key on the `related` schema used for the association
* `queryable` - The real query to use for querying association
* `defaults` - Default fields used when building the association
* `relationship` - The relationship to the specified schema, default `:parent`
* `on_replace` - The action taken on associations when schema is replaced
"""
@behaviour Ecto.Association
@on_replace_opts [:raise, :mark_as_invalid, :delete, :nilify, :update]
defstruct [:field, :owner, :related, :owner_key, :related_key, :queryable, :on_cast,
:on_replace, defaults: [], cardinality: :one, relationship: :parent, unique: true]
@doc false
def struct(module, name, opts) do
ref = if ref = opts[:references], do: ref, else: :id
queryable = Keyword.fetch!(opts, :queryable)
related = Ecto.Association.related_from_query(queryable)
unless is_atom(related) do
raise ArgumentError, "association queryable must be a schema, got: #{inspect related}"
end
on_replace = Keyword.get(opts, :on_replace, :raise)
unless on_replace in @on_replace_opts do
raise ArgumentError, "invalid `:on_replace` option for #{inspect name}. " <>
"The only valid options are: " <>
Enum.map_join(@on_replace_opts, ", ", &"`#{inspect &1}`")
end
%__MODULE__{
field: name,
owner: module,
related: related,
owner_key: Keyword.fetch!(opts, :foreign_key),
related_key: ref,
queryable: queryable,
on_replace: on_replace,
defaults: opts[:defaults] || []
}
end
@doc false
def build(refl, _, attributes) do
refl
|> build()
|> struct(attributes)
end
@doc false
def joins_query(%{queryable: queryable, related_key: related_key,
owner: owner, owner_key: owner_key}) do
from o in owner,
join: q in ^queryable,
on: field(q, ^related_key) == field(o, ^owner_key)
end
@doc false
def assoc_query(%{queryable: queryable, related_key: related_key}, query, [value]) do
from x in (query || queryable),
where: field(x, ^related_key) == ^value
end
@doc false
def assoc_query(%{queryable: queryable, related_key: related_key}, query, values) do
from x in (query || queryable),
where: field(x, ^related_key) in ^values
end
@doc false
def preload_info(%{related_key: related_key} = refl) do
{:assoc, refl, {0, related_key}}
end
@doc false
def on_repo_change(%{on_replace: :nilify}, _parent_changeset, %{action: :replace}, _opts) do
{:ok, nil}
end
def on_repo_change(%{on_replace: :delete} = refl, parent_changeset,
%{action: :replace} = changeset, opts) do
on_repo_change(refl, parent_changeset, %{changeset | action: :delete}, opts)
end
def on_repo_change(_refl, %{data: parent, repo: repo}, %{action: action} = changeset, opts) do
changeset = Ecto.Association.update_parent_prefix(changeset, parent)
case apply(repo, action, [changeset, opts]) do
{:ok, _} = ok ->
if action == :delete, do: {:ok, nil}, else: ok
{:error, changeset} ->
{:error, changeset}
end
end
## Relation callbacks
@behaviour Ecto.Changeset.Relation
@doc false
def build(%{related: related, queryable: queryable, defaults: defaults}) do
related
|> struct(defaults)
|> Ecto.Association.merge_source(queryable)
end
end
defmodule Ecto.Association.ManyToMany do
@moduledoc """
The association struct for `many_to_many` associations.
Its fields are:
* `cardinality` - The association cardinality
* `field` - The name of the association field on the schema
* `owner` - The schema where the association was defined
* `related` - The schema that is associated
* `owner_key` - The key on the `owner` schema used for the association
* `queryable` - The real query to use for querying association
* `on_delete` - The action taken on associations when schema is deleted
* `on_replace` - The action taken on associations when schema is replaced
* `defaults` - Default fields used when building the association
* `relationship` - The relationship to the specified schema, default `:child`
* `join_keys` - The keyword list with many to many join keys
* `join_through` - Atom (representing a schema) or a string (representing a table)
for many to many associations
"""
@behaviour Ecto.Association
@on_delete_opts [:nothing, :delete_all]
@on_replace_opts [:raise, :mark_as_invalid, :delete]
defstruct [:field, :owner, :related, :owner_key, :queryable, :on_delete,
:on_replace, :join_keys, :join_through, :on_cast,
defaults: [], relationship: :child, cardinality: :many, unique: false]
@doc false
def struct(module, name, opts) do
join_through = opts[:join_through]
if join_through && (is_atom(join_through) or is_binary(join_through)) do
:ok
else
raise ArgumentError,
"many_to_many #{inspect name} associations require the :join_through option to be " <>
"given and it must be an atom (representing a schema) or a string (representing a table)"
end
join_keys = opts[:join_keys]
queryable = Keyword.fetch!(opts, :queryable)
related = Ecto.Association.related_from_query(queryable)
{owner_key, join_keys} =
case join_keys do
[{join_owner_key, owner_key}, {join_related_key, related_key}]
when is_atom(join_owner_key) and is_atom(owner_key) and
is_atom(join_related_key) and is_atom(related_key) ->
{owner_key, join_keys}
nil ->
{:id, default_join_keys(module, related)}
_ ->
raise ArgumentError,
"many_to_many #{inspect name} expect :join_keys to be a keyword list " <>
"with two entries, the first being how the join table should reach " <>
"the current schema and the second how the join table should reach " <>
"the associated schema. For example: #{inspect default_join_keys(module, related)}"
end
unless Module.get_attribute(module, :ecto_fields)[owner_key] do
raise ArgumentError, "schema does not have the field #{inspect owner_key} used by " <>
"association #{inspect name}, please set the :join_keys option accordingly"
end
on_delete = Keyword.get(opts, :on_delete, :nothing)
on_replace = Keyword.get(opts, :on_replace, :raise)
unless on_delete in @on_delete_opts do
raise ArgumentError, "invalid :on_delete option for #{inspect name}. " <>
"The only valid options are: " <>
Enum.map_join(@on_delete_opts, ", ", &"`#{inspect &1}`")
end
unless on_replace in @on_replace_opts do
raise ArgumentError, "invalid `:on_replace` option for #{inspect name}. " <>
"The only valid options are: " <>
Enum.map_join(@on_replace_opts, ", ", &"`#{inspect &1}`")
end
%__MODULE__{
field: name,
cardinality: Keyword.fetch!(opts, :cardinality),
owner: module,
related: related,
owner_key: owner_key,
join_keys: join_keys,
join_through: join_through,
queryable: queryable,
on_delete: on_delete,
on_replace: on_replace,
defaults: opts[:defaults] || [],
unique: Keyword.get(opts, :unique, false)
}
end
defp default_join_keys(module, related) do
[{Ecto.Association.association_key(module, :id), :id},
{Ecto.Association.association_key(related, :id), :id}]
end
@doc false
def joins_query(%{queryable: queryable, owner: owner,
join_through: join_through, join_keys: join_keys}) do
[{join_owner_key, owner_key}, {join_related_key, related_key}] = join_keys
from o in owner,
join: j in ^join_through, on: field(j, ^join_owner_key) == field(o, ^owner_key),
join: q in ^queryable, on: field(j, ^join_related_key) == field(q, ^related_key)
end
@doc false
def assoc_query(%{queryable: queryable} = refl, values) do
assoc_query(refl, queryable, values)
end
@doc false
def assoc_query(%{join_through: join_through, join_keys: join_keys,
queryable: queryable, owner: owner}, query, values) do
[{join_owner_key, owner_key}, {join_related_key, related_key}] = join_keys
# We need to go all the way using owner and query so
# Ecto has all the information necessary to cast fields.
# This also helps validate the associated schema exists all the way.
from q in (query || queryable),
join: o in ^owner, on: field(o, ^owner_key) in ^values,
join: j in ^join_through, on: field(j, ^join_owner_key) == field(o, ^owner_key),
where: field(j, ^join_related_key) == field(q, ^related_key)
end
@doc false
def build(refl, _, attributes) do
refl
|> build()
|> struct(attributes)
end
@doc false
def preload_info(%{join_keys: [{_, owner_key}, {_, _}]} = refl) do
{:assoc, refl, {-2, owner_key}}
end
@doc false
def on_repo_change(%{on_replace: :delete} = refl, parent_changeset,
%{action: :replace} = changeset, opts) do
on_repo_change(refl, parent_changeset, %{changeset | action: :delete}, opts)
end
def on_repo_change(%{join_keys: join_keys, join_through: join_through},
%{repo: repo, data: owner}, %{action: :delete, data: related}, opts) do
[{join_owner_key, owner_key}, {join_related_key, related_key}] = join_keys
adapter = repo.__adapter__()
owner_value = dump! :delete, join_through, owner, owner_key, adapter
related_value = dump! :delete, join_through, related, related_key, adapter
query =
from j in join_through,
where: field(j, ^join_owner_key) == ^owner_value and
field(j, ^join_related_key) == ^related_value
{prefix, _} = owner.__meta__.source
query = Map.put(query, :prefix, prefix)
repo.delete_all query, opts
{:ok, nil}
end
def on_repo_change(%{field: field, join_through: join_through, join_keys: join_keys},
%{repo: repo, data: owner, constraints: constraints} = parent_changeset,
%{action: action} = changeset, opts) do
changeset = Ecto.Association.update_parent_prefix(changeset, owner)
case apply(repo, action, [changeset, opts]) do
{:ok, related} ->
[{join_owner_key, owner_key}, {join_related_key, related_key}] = join_keys
if insert_join?(parent_changeset, changeset, field, related_key) do
adapter = repo.__adapter__()
owner_value = dump! :insert, join_through, owner, owner_key, adapter
related_value = dump! :insert, join_through, related, related_key, adapter
data = [{join_owner_key, owner_value}, {join_related_key, related_value}]
case insert_join(repo, join_through, data, opts, constraints) do
{:error, join_changeset} ->
{:error, %{changeset | errors: join_changeset.errors ++ changeset.errors,
valid?: join_changeset.valid? and changeset.valid?}}
_ ->
{:ok, related}
end
else
{:ok, related}
end
{:error, changeset} ->
{:error, changeset}
end
end
defp insert_join?(%{action: :insert}, _, _field, _related_key), do: true
defp insert_join?(_, %{action: :insert}, _field, _related_key), do: true
defp insert_join?(%{data: owner}, %{data: related}, field, related_key) do
current_key = Map.fetch!(related, related_key)
not Enum.any? Map.fetch!(owner, field), fn child ->
Map.get(child, related_key) == current_key
end
end
defp insert_join(repo, join_through, data, opts, _constraints) when is_binary(join_through) do
repo.insert_all join_through, [data], opts
end
defp insert_join(repo, join_through, data, opts, constraints) when is_atom(join_through) do
struct(join_through, data)
|> Ecto.Changeset.change
|> Map.put(:constraints, constraints)
|> repo.insert(opts)
end
defp field!(op, struct, field) do
Map.get(struct, field) || raise "could not #{op} join entry because `#{field}` is nil in #{inspect struct}"
end
defp dump!(action, join_through, struct, field, adapter) when is_binary(join_through) do
value = field!(action, struct, field)
type = struct.__struct__.__schema__(:type, field)
case Ecto.Type.adapter_dump(adapter, type, value) do
{:ok, value} ->
value
:error ->
raise Ecto.ChangeError,
message: "value `#{inspect value}` for `#{inspect struct.__struct__}.#{field}` " <>
"in `#{action}` does not match type #{inspect type}"
end
end
defp dump!(action, join_through, struct, field, _) when is_atom(join_through) do
field!(action, struct, field)
end
## Relation callbacks
@behaviour Ecto.Changeset.Relation
@doc false
def build(%{related: related, queryable: queryable, defaults: defaults}) do
related
|> struct(defaults)
|> Ecto.Association.merge_source(queryable)
end
## On delete callbacks
@doc false
def delete_all(%{join_through: join_through, join_keys: join_keys, owner: owner}, parent, repo, opts) do
[{join_owner_key, owner_key}, {_, _}] = join_keys
if value = Map.get(parent, owner_key) do
owner_type = owner.__schema__(:type, owner_key)
query = from j in join_through, where: field(j, ^join_owner_key) == type(^value, ^owner_type)
repo.delete_all query, opts
end
end
end
| 34.913462 | 112 | 0.659212 |
e84cf7df3ca7e9633e6409559eace10e837076ac | 197 | exs | Elixir | priv/repo/migrations/20190118182122_add_is_urgent_to_posts.exs | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 928 | 2018-04-03T16:18:11.000Z | 2019-09-09T17:59:55.000Z | priv/repo/migrations/20190118182122_add_is_urgent_to_posts.exs | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 74 | 2018-04-03T00:46:50.000Z | 2019-03-10T18:57:27.000Z | priv/repo/migrations/20190118182122_add_is_urgent_to_posts.exs | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 89 | 2018-04-03T17:33:20.000Z | 2019-08-19T03:40:20.000Z | defmodule Level.Repo.Migrations.AddIsUrgentToPosts do
use Ecto.Migration
def change do
alter table(:posts) do
add :is_urgent, :boolean, null: false, default: false
end
end
end
| 19.7 | 59 | 0.715736 |
e84d01cd307c75d3b7567fa479d69acb51396263 | 11,716 | exs | Elixir | exercises/concept/rpn-calculator-inspection/test/rpn_calculator_inspection_test.exs | SaschaMann/elixir | 2489747bba72a0ba5efa27e7e00441a428fdf987 | [
"MIT"
] | 1 | 2021-06-09T06:57:02.000Z | 2021-06-09T06:57:02.000Z | exercises/concept/rpn-calculator-inspection/test/rpn_calculator_inspection_test.exs | SaschaMann/elixir | 2489747bba72a0ba5efa27e7e00441a428fdf987 | [
"MIT"
] | 6 | 2022-03-04T13:05:25.000Z | 2022-03-30T18:36:49.000Z | exercises/concept/rpn-calculator-inspection/test/rpn_calculator_inspection_test.exs | SaschaMann/elixir | 2489747bba72a0ba5efa27e7e00441a428fdf987 | [
"MIT"
] | null | null | null | defmodule RPNCalculatorInspectionTest do
use ExUnit.Case, async: false
defmodule RPNCalculator do
def unsafe_division(input) do
[_, a, b] = Regex.run(~r/^(\d*) (\d*) \/$/, input)
String.to_integer(a) / String.to_integer(b)
end
end
defp flush_messages() do
receive do
_ ->
flush_messages()
after
50 ->
nil
end
end
setup_all do
# turning off the logger to avoid error logs spamming the output
# when the functions are expected to crash
Logger.configure(level: :none)
end
setup do
# just in case, we clear the test process inbox before each test
flush_messages()
%{}
end
describe "start_reliability_check" do
@task_id 1
test "returns a map with test data" do
calculator = fn _ -> 0 end
input = "1 2 +"
result = RPNCalculatorInspection.start_reliability_check(calculator, input)
assert is_map(result)
assert is_pid(result.pid)
assert result.input == input
end
@task_id 1
test "starts a linked process" do
old_value = Process.flag(:trap_exit, true)
calculator = fn _ -> :timer.sleep(50) end
input = "1 2 +"
%{pid: pid} = RPNCalculatorInspection.start_reliability_check(calculator, input)
assert pid in Keyword.get(Process.info(self()), :links)
assert_receive {:EXIT, ^pid, :normal}
Process.flag(:trap_exit, old_value)
end
@task_id 1
test "the process runs the calculator function with the given input" do
caller_process_pid = self()
calculator = fn input -> send(caller_process_pid, input) end
input = "7 3 +"
RPNCalculatorInspection.start_reliability_check(calculator, input)
assert_receive ^input
end
end
describe "await_reliability_check_result" do
@task_id 2
test "adds `input` => :ok to the results after a normal exit" do
caller_process_pid = self()
test_data = %{pid: caller_process_pid, input: "2 3 +"}
check_results_so_far = %{"2 0 /" => :error}
expected_result = %{"2 0 /" => :error, "2 3 +" => :ok}
send(caller_process_pid, {:EXIT, caller_process_pid, :normal})
assert RPNCalculatorInspection.await_reliability_check_result(
test_data,
check_results_so_far
) ==
expected_result
end
@task_id 2
test "adds `input` => :error to the results after an abnormal exit" do
caller_process_pid = self()
test_data = %{pid: caller_process_pid, input: "3 0 /"}
check_results_so_far = %{"1 1 +" => :ok}
expected_result = %{"1 1 +" => :ok, "3 0 /" => :error}
send(caller_process_pid, {:EXIT, caller_process_pid, {%ArithmeticError{}, []}})
assert RPNCalculatorInspection.await_reliability_check_result(
test_data,
check_results_so_far
) ==
expected_result
end
@task_id 2
test "adds `input` => :timeout to the results if no message arrives in 100ms" do
caller_process_pid = self()
test_data = %{pid: caller_process_pid, input: "24 12 /"}
check_results_so_far = %{"3 1 +" => :ok}
expected_result = %{"3 1 +" => :ok, "24 12 /" => :timeout}
task =
Task.async(fn ->
:timer.sleep(200)
# this message should arrive too late
send(caller_process_pid, {:EXIT, caller_process_pid, {%ArithmeticError{}, []}})
end)
assert RPNCalculatorInspection.await_reliability_check_result(
test_data,
check_results_so_far
) ==
expected_result
Task.await(task)
end
@task_id 2
test "normal exit messages from processes whose pids don't match stay in the inbox" do
caller_process_pid = self()
other_process_pid = spawn(fn -> nil end)
test_data = %{pid: caller_process_pid, input: "5 0 /"}
check_results_so_far = %{"5 0 +" => :ok}
expected_result = %{"5 0 +" => :ok, "5 0 /" => :error}
send(caller_process_pid, {:EXIT, other_process_pid, :normal})
send(caller_process_pid, {:EXIT, caller_process_pid, {%ArithmeticError{}, []}})
assert RPNCalculatorInspection.await_reliability_check_result(
test_data,
check_results_so_far
) ==
expected_result
assert Keyword.get(Process.info(self()), :message_queue_len) == 1
end
@task_id 2
test "abnormal exit messages from processes whose pids don't match stay in the inbox" do
caller_process_pid = self()
other_process_pid = spawn(fn -> nil end)
test_data = %{pid: caller_process_pid, input: "2 2 +"}
check_results_so_far = %{"0 0 /" => :error}
expected_result = %{"0 0 /" => :error, "2 2 +" => :ok}
send(caller_process_pid, {:EXIT, other_process_pid, {%ArithmeticError{}, []}})
send(caller_process_pid, {:EXIT, caller_process_pid, :normal})
assert RPNCalculatorInspection.await_reliability_check_result(
test_data,
check_results_so_far
) ==
expected_result
assert Keyword.get(Process.info(self()), :message_queue_len) == 1
end
@task_id 2
test "any other messages stay in the inbox" do
caller_process_pid = self()
test_data = %{pid: caller_process_pid, input: "4 2 /"}
check_results_so_far = %{"4 0 /" => :error}
expected_result = %{"4 0 /" => :error, "4 2 /" => :ok}
send(caller_process_pid, {:exit, caller_process_pid, {%ArithmeticError{}, []}})
send(caller_process_pid, {:something_else, caller_process_pid, {%ArithmeticError{}, []}})
send(caller_process_pid, :something_else)
send(caller_process_pid, {:EXIT, caller_process_pid, :normal})
assert RPNCalculatorInspection.await_reliability_check_result(
test_data,
check_results_so_far
) ==
expected_result
assert Keyword.get(Process.info(self()), :message_queue_len) == 3
end
@task_id 2
test "doesn't change the trap_exit flag of the caller process" do
caller_process_pid = self()
Process.flag(:trap_exit, false)
test_data = %{pid: caller_process_pid, input: "30 3 /"}
check_results_so_far = %{}
expected_result = %{"30 3 /" => :ok}
send(caller_process_pid, {:EXIT, caller_process_pid, :normal})
assert RPNCalculatorInspection.await_reliability_check_result(
test_data,
check_results_so_far
) ==
expected_result
assert Keyword.get(Process.info(self()), :trap_exit) == false
Process.flag(:trap_exit, true)
send(caller_process_pid, {:EXIT, caller_process_pid, :normal})
assert RPNCalculatorInspection.await_reliability_check_result(
test_data,
check_results_so_far
) ==
expected_result
assert Keyword.get(Process.info(self()), :trap_exit) == true
end
end
describe "reliability_check" do
@task_id 3
test "returns an empty map when input list empty" do
inputs = []
calculator = &RPNCalculator.unsafe_division/1
outputs = %{}
assert RPNCalculatorInspection.reliability_check(calculator, inputs) == outputs
end
@task_id 3
test "returns a map with inputs as keys and :ok as values" do
inputs = ["4 2 /", "8 2 /", "6 3 /"]
calculator = &RPNCalculator.unsafe_division/1
outputs = %{
"4 2 /" => :ok,
"8 2 /" => :ok,
"6 3 /" => :ok
}
assert RPNCalculatorInspection.reliability_check(calculator, inputs) == outputs
end
@task_id 3
test "returns a map when input list has 1000 elements" do
inputs = Enum.map(1..1000, &"#{2 * &1} #{&1} /")
calculator = &RPNCalculator.unsafe_division/1
outputs = 1..1000 |> Enum.map(&{"#{2 * &1} #{&1} /", :ok}) |> Enum.into(%{})
assert RPNCalculatorInspection.reliability_check(calculator, inputs) == outputs
end
@task_id 3
test "returns a map when input list has 1000 elements and the calculator takes 50ms for each calculation" do
inputs = Enum.map(1..1000, &"#{2 * &1} #{&1} /")
calculator = fn input -> :timer.sleep(50) && RPNCalculator.unsafe_division(input) end
outputs = 1..1000 |> Enum.map(&{"#{2 * &1} #{&1} /", :ok}) |> Enum.into(%{})
assert RPNCalculatorInspection.reliability_check(calculator, inputs) == outputs
end
@task_id 3
test "returns :error values for inputs that cause the calculator to crash" do
inputs = ["3 0 /", "22 11 /", "4 0 /"]
calculator = &RPNCalculator.unsafe_division/1
outputs = %{
"3 0 /" => :error,
"22 11 /" => :ok,
"4 0 /" => :error
}
assert RPNCalculatorInspection.reliability_check(calculator, inputs) == outputs
end
@task_id 3
test "returns a map when input list has 1000 elements and all of them crash" do
inputs = Enum.map(1..1000, &"#{2 * &1} 0 /")
calculator = &RPNCalculator.unsafe_division/1
outputs = 1..1000 |> Enum.map(&{"#{2 * &1} 0 /", :error}) |> Enum.into(%{})
assert RPNCalculatorInspection.reliability_check(calculator, inputs) == outputs
end
@task_id 3
test "restores the original value of the trap_exit flag" do
inputs = ["3 0 /", "22 11 /", "4 0 /"]
calculator = &RPNCalculator.unsafe_division/1
outputs = %{
"3 0 /" => :error,
"22 11 /" => :ok,
"4 0 /" => :error
}
Process.flag(:trap_exit, false)
assert RPNCalculatorInspection.reliability_check(calculator, inputs) == outputs
assert Keyword.get(Process.info(self()), :trap_exit) == false
Process.flag(:trap_exit, true)
assert RPNCalculatorInspection.reliability_check(calculator, inputs) == outputs
assert Keyword.get(Process.info(self()), :trap_exit) == true
end
end
describe "correctness_check" do
@task_id 4
test "returns an empty list when input list empty" do
inputs = []
calculator = &RPNCalculator.unsafe_division/1
outputs = []
assert RPNCalculatorInspection.correctness_check(calculator, inputs) == outputs
end
@task_id 4
test "returns a list of results" do
inputs = ["3 2 /", "4 2 /", "5 2 /"]
calculator = &RPNCalculator.unsafe_division/1
outputs = [1.5, 2, 2.5]
assert RPNCalculatorInspection.correctness_check(calculator, inputs) == outputs
end
@task_id 4
test "returns a list of results when input list has 1000 elements" do
inputs = Enum.map(1..1000, &"100 #{&1} /")
calculator = &RPNCalculator.unsafe_division/1
outputs = Enum.map(1..1000, &(100 / &1))
assert RPNCalculatorInspection.correctness_check(calculator, inputs) == outputs
end
@task_id 4
test "returns a list of results when input list has 1000 elements and the calculator takes 50ms for each calculation" do
inputs = Enum.map(1..1000, &"100 #{&1} /")
calculator = fn input -> :timer.sleep(50) && RPNCalculator.unsafe_division(input) end
outputs = Enum.map(1..1000, &(100 / &1))
assert RPNCalculatorInspection.correctness_check(calculator, inputs) == outputs
end
@task_id 4
test "awaits a single task for 100ms" do
inputs = ["1 1 /1"]
calculator = fn _ -> :timer.sleep(500) end
Process.flag(:trap_exit, true)
pid = spawn_link(fn -> RPNCalculatorInspection.correctness_check(calculator, inputs) end)
assert_receive {:EXIT, ^pid, {:timeout, {Task, :await, [_task, 100]}}}, 150
Process.flag(:trap_exit, false)
end
end
end
| 33.378917 | 124 | 0.620775 |
e84d0d32f919e7079b6daa1680a7a4e3efb21ee5 | 1,043 | exs | Elixir | mix.exs | jwarlander/hedwig_hipchat | 375dee4eab81bdf7e6167552503bfe2ba331c93f | [
"MIT"
] | 7 | 2016-03-15T12:29:51.000Z | 2017-03-15T16:12:06.000Z | mix.exs | jwarlander/hedwig_hipchat | 375dee4eab81bdf7e6167552503bfe2ba331c93f | [
"MIT"
] | 6 | 2016-03-21T21:23:27.000Z | 2017-02-12T23:03:48.000Z | mix.exs | jwarlander/hedwig_hipchat | 375dee4eab81bdf7e6167552503bfe2ba331c93f | [
"MIT"
] | 6 | 2016-04-05T14:51:16.000Z | 2018-05-11T23:48:07.000Z | defmodule HedwigHipChat.Mixfile do
use Mix.Project
@version "0.9.4"
@source_url "https://github.com/jwarlander/hedwig_hipchat"
def project do
[app: :hedwig_hipchat,
name: "Hedwig HipChat",
source_url: @source_url,
version: @version,
elixir: "~> 1.1",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
package: package,
description: "A HipChat adapter for Hedwig",
deps: deps,
docs: [extras: ["README.md"], main: "readme"]]
end
def application do
[applications: [:logger, :hedwig, :romeo]]
end
defp deps do
[{:exml, github: "paulgray/exml", override: true},
{:hedwig, github: "hedwig-im/hedwig"},
{:romeo, "~> 0.4"},
{:earmark, "~> 0.1", only: :dev},
{:ex_doc, "~> 0.11", only: :dev}]
end
defp package do
[files: ["lib", "priv", "mix.exs", "README*", "readme*", "LICENSE*", "license*"],
maintainers: ["Johan Wärlander"],
licenses: ["MIT"],
links: %{
"GitHub" => @source_url
}]
end
end
| 24.833333 | 85 | 0.581975 |
e84d4e24814d3cf27f625e5c0fb4eb6a218389f6 | 8,895 | exs | Elixir | lib/eex/test/eex_test.exs | liveforeverx/elixir | cf3cf0bd5443b59206e5733602244bc3543f0a53 | [
"Apache-2.0"
] | null | null | null | lib/eex/test/eex_test.exs | liveforeverx/elixir | cf3cf0bd5443b59206e5733602244bc3543f0a53 | [
"Apache-2.0"
] | null | null | null | lib/eex/test/eex_test.exs | liveforeverx/elixir | cf3cf0bd5443b59206e5733602244bc3543f0a53 | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
require EEx
defmodule EExTest.Compiled do
def before_compile do
fill_in_stacktrace
{__ENV__.line, hd(tl(System.stacktrace))}
end
EEx.function_from_string :def, :string_sample, "<%= a + b %>", [:a, :b]
filename = Path.join(__DIR__, "fixtures/eex_template_with_bindings.eex")
EEx.function_from_file :defp, :private_file_sample, filename, [:bar]
filename = Path.join(__DIR__, "fixtures/eex_template_with_bindings.eex")
EEx.function_from_file :def, :public_file_sample, filename, [:bar]
def file_sample(arg), do: private_file_sample(arg)
def after_compile do
fill_in_stacktrace
{__ENV__.line, hd(tl(System.stacktrace))}
end
@file "unknown"
def unknown do
fill_in_stacktrace
{__ENV__.line, hd(tl(System.stacktrace))}
end
defp fill_in_stacktrace do
try do
:erlang.error "failed"
catch
:error, _ -> System.stacktrace
end
end
end
defmodule Clause do
defmacro defclause(expr, block) do
quote do
def unquote(expr), unquote(block)
end
end
end
defmodule EExTest do
use ExUnit.Case, async: true
doctest EEx
doctest EEx.Engine
doctest EEx.SmartEngine
test "evaluates simple string" do
assert_eval "foo bar", "foo bar"
end
test "evaluates with embedded" do
assert_eval "foo bar", "foo <%= :bar %>"
end
test "evaluates with embedded and the binding" do
assert EEx.eval_string("foo <%= bar %>", [bar: 1]) == "foo 1"
end
test "evaluates with embedded do end" do
assert_eval "foo bar", "foo <%= if true do %>bar<% end %>"
end
test "evaluates with embedded do end and eval the expression" do
assert_eval "foo ", "foo <%= if false do %>bar<% end %>"
end
test "evaluates with embedded do end and nested print expression" do
assert_eval "foo bar", "foo <%= if true do %><%= :bar %><% end %>"
end
test "evaluates with embedded do end and nested expressions" do
assert_eval "foo bar baz", "foo <%= if true do %>bar <% Process.put(:eex_text, 1) %><%= :baz %><% end %>"
assert Process.get(:eex_text) == 1
end
test "evaluates with embedded middle expression" do
assert_eval "foo bar", "foo <%= if true do %>bar<% else %>baz<% end %>"
end
test "evaluates with embedded middle expression and eval the expression" do
assert_eval "foo baz", "foo <%= if false do %>bar<% else %>baz<% end %>"
end
test "evaluates with nested start expression" do
assert_eval "foo bar", "foo <%= if true do %><%= if true do %>bar<% end %><% end %>"
end
test "evaluates with nested middle expression" do
assert_eval "foo baz", "foo <%= if true do %><%= if false do %>bar<% else %>baz<% end %><% end %>"
end
test "evaluates with defined variable" do
assert_eval "foo 1", "foo <% bar = 1 %><%= bar %>"
end
test "evaluates with require code" do
assert_eval "foo 1,2,3", "foo <% require Enum, as: E %><%= E.join [1, 2, 3], \",\" %>"
end
test "evaluates with end of token" do
assert_eval "foo bar %>", "foo bar %>"
end
test "raises a syntax error when the token is invalid" do
assert_raise EEx.SyntaxError, "nofile:1: missing token '%>'", fn ->
EEx.compile_string "foo <%= bar"
end
end
test "raises a syntax error when end expression is found without a start expression" do
assert_raise EEx.SyntaxError, "nofile:1: unexpected token ' end '", fn ->
EEx.compile_string "foo <% end %>"
end
end
test "raises a syntax error when start expression is found without an end expression" do
assert_raise EEx.SyntaxError, "nofile:2: unexpected end of string, expected a closing '<% end %>'", fn ->
EEx.compile_string "foo\n<% if true do %>"
end
end
test "raises a syntax error when nested end expression is found without a start expression" do
assert_raise EEx.SyntaxError, "nofile:1: unexpected token ' end '", fn ->
EEx.compile_string "foo <% if true do %><% end %><% end %>"
end
end
test "respects line numbers" do
expected = """
foo
2
"""
string = """
foo
<%= __ENV__.line %>
"""
assert_eval expected, string
end
test "respects line numbers inside nested expressions" do
expected = """
foo
3
5
"""
string = """
foo
<%= if true do %>
<%= __ENV__.line %>
<% end %>
<%= __ENV__.line %>
"""
assert_eval expected, string
end
test "respects line numbers inside start expression" do
expected = """
foo
true
5
"""
string = """
foo
<%= if __ENV__.line == 2 do %>
<%= true %>
<% end %>
<%= __ENV__.line %>
"""
assert_eval expected, string
end
test "respects line numbers inside middle expression with ->" do
expected = """
foo
true
7
"""
string = """
foo
<%= cond do %>
<% false -> %> false
<% __ENV__.line == 4 -> %>
<%= true %>
<% end %>
<%= __ENV__.line %>
"""
assert_eval expected, string
end
test "respects line number inside middle expressions with keywords" do
expected = """
foo
5
7
"""
string = """
foo
<%= if false do %>
<%= __ENV__.line %>
<% else %>
<%= __ENV__.line %>
<% end %>
<%= __ENV__.line %>
"""
assert_eval expected, string
end
test "properly handle functions" do
expected = """
Number 1
Number 2
Number 3
"""
string = """
<%= Enum.map [1, 2, 3], fn x -> %>
Number <%= x %>
<% end %>
"""
assert_eval expected, string
end
test "do not consider already finished functions" do
expected = """
foo
true
"""
string = """
foo
<%= cond do %>
<% false -> %> false
<% fn -> 1 end -> %>
<%= true %>
<% end %>
"""
assert_eval expected, string
end
test "evaluates nested do expressions" do
string = """
<% y = ["a", "b", "c"] %>
<%= cond do %>
<% "a" in y -> %>
Good
<% true -> %>
<% if true do %>true<% else %>false<% end %>
Bad
<% end %>
"""
assert_eval "\n\n Good\n \n", string
end
test "for comprehensions" do
string = """
<%= for _name <- packages || [] do %>
<% end %>
<%= all || :done %>
"""
assert_eval "\ndone\n", string, packages: nil, all: nil
end
test "unicode" do
template = """
• <%= "•" %> •
<%= "Jößé Vâlìm" %> Jößé Vâlìm
"""
result = EEx.eval_string(template)
assert result == " • • •\n Jößé Vâlìm Jößé Vâlìm\n"
end
test "trim mode" do
string = "<%= 123 %> \n456\n <%= 789 %>"
expected = "123456\n789"
assert_eval expected, string, [], trim: true
end
test "evaluates the source from a given file" do
filename = Path.join(__DIR__, "fixtures/eex_template.eex")
result = EEx.eval_file(filename)
assert result == "foo bar.\n"
end
test "evaluates the source from a given file with bindings" do
filename = Path.join(__DIR__, "fixtures/eex_template_with_bindings.eex")
result = EEx.eval_file(filename, [bar: 1])
assert result == "foo 1\n"
end
test "raises an Exception when there's an error with the given file" do
assert_raise File.Error, "could not read file non-existent.eex: no such file or directory", fn ->
filename = "non-existent.eex"
EEx.compile_file(filename)
end
end
test "sets external resource attribute" do
assert EExTest.Compiled.__info__(:attributes)[:external_resource] ==
[Path.join(__DIR__, "fixtures/eex_template_with_bindings.eex")]
end
test "defined from string" do
assert EExTest.Compiled.string_sample(1, 2) == "3"
end
test "defined from file" do
assert EExTest.Compiled.file_sample(1) == "foo 1\n"
assert EExTest.Compiled.public_file_sample(1) == "foo 1\n"
end
test "defined from file do not affect backtrace" do
assert EExTest.Compiled.before_compile ==
{8,
{EExTest.Compiled,
:before_compile,
0,
[file: to_char_list(Path.relative_to_cwd(__ENV__.file)), line: 7]
}
}
assert EExTest.Compiled.after_compile ==
{23,
{EExTest.Compiled,
:after_compile,
0,
[file: to_char_list(Path.relative_to_cwd(__ENV__.file)), line: 22]
}
}
assert EExTest.Compiled.unknown ==
{29,
{EExTest.Compiled,
:unknown,
0,
[file: 'unknown', line: 28]
}
}
end
defmodule TestEngine do
@behaviour EEx.Engine
def handle_body(body) do
{:wrapped, body}
end
def handle_text(buffer, text) do
EEx.Engine.handle_text(buffer, text)
end
def handle_expr(buffer, mark, expr) do
EEx.Engine.handle_expr(buffer, mark, expr)
end
end
test "calls handle_body" do
assert {:wrapped, "foo"} = EEx.eval_string("foo", [], engine: TestEngine)
end
defp assert_eval(expected, actual, binding \\ [], opts \\ []) do
opts = Enum.into [file: __ENV__.file, engine: EEx.Engine], opts
result = EEx.eval_string(actual, binding, opts)
assert result == expected
end
end
| 22.126866 | 109 | 0.617538 |
e84d8360603167ac61afbb6a61e2b8696788fbaf | 1,633 | ex | Elixir | clients/network_security/lib/google_api/network_security/v1/model/google_iam_v1_test_iam_permissions_response.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/network_security/lib/google_api/network_security/v1/model/google_iam_v1_test_iam_permissions_response.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/network_security/lib/google_api/network_security/v1/model/google_iam_v1_test_iam_permissions_response.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.NetworkSecurity.V1.Model.GoogleIamV1TestIamPermissionsResponse do
@moduledoc """
Response message for `TestIamPermissions` method.
## Attributes
* `permissions` (*type:* `list(String.t)`, *default:* `nil`) - A subset of `TestPermissionsRequest.permissions` that the caller is allowed.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:permissions => list(String.t()) | nil
}
field(:permissions, type: :list)
end
defimpl Poison.Decoder,
for: GoogleApi.NetworkSecurity.V1.Model.GoogleIamV1TestIamPermissionsResponse do
def decode(value, options) do
GoogleApi.NetworkSecurity.V1.Model.GoogleIamV1TestIamPermissionsResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.NetworkSecurity.V1.Model.GoogleIamV1TestIamPermissionsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.403846 | 143 | 0.747704 |
e84d9ae088695f1c8ab4ad849c220325dc931a6c | 5,635 | exs | Elixir | test/distance_test.exs | safwank/Statix | ca2abe476c91172d05488f8a7703545fc9563c6f | [
"MIT"
] | null | null | null | test/distance_test.exs | safwank/Statix | ca2abe476c91172d05488f8a7703545fc9563c6f | [
"MIT"
] | null | null | null | test/distance_test.exs | safwank/Statix | ca2abe476c91172d05488f8a7703545fc9563c6f | [
"MIT"
] | null | null | null | defmodule Numerix.DistanceTest do
use ExUnit.Case, async: true
use ExUnitProperties
import ListHelper
alias Numerix.{Correlation, Distance}
describe "mse/2" do
test "is correct for a specific example" do
vector1 = [12, 15, 20, 22, 24]
vector2 = [13, 17, 18, 20, 24]
assert Distance.mse(vector1, vector2) == 2.6
end
property "is 0 when the vectors are equal" do
check all(xs <- list_of(float(), min_length: 1)) do
assert Distance.mse(xs, xs) == 0
end
end
property "is not 0 when the vectors are different" do
check all(
length <- integer(1..50),
xs <- list_of(integer(), length: length),
ys <- list_of(integer(), length: length),
xs != ys
) do
assert Distance.mse(xs, ys) != 0
end
end
end
describe "rmse/2" do
test "is correct for a specific example" do
vector1 = [7, 10, 12, 10, 10, 8, 7, 8, 11, 13, 10, 8]
vector2 = [6, 10, 14, 16, 7, 5, 5, 13, 12, 13, 8, 5]
assert Distance.rmse(vector1, vector2) == 2.9154759474226504
end
property "is 0 when the vectors are equal" do
check all(xs <- list_of(float(), min_length: 1)) do
assert Distance.rmse(xs, xs) == 0
end
end
property "is not 0 when the vectors are different" do
check all(
length <- integer(1..50),
xs <- list_of(integer(), length: length),
ys <- list_of(integer(), length: length),
xs != ys
) do
assert Distance.rmse(xs, ys) != 0
end
end
end
describe "pearson/2" do
test "is nil when any vector is empty" do
refute Distance.pearson([], [1])
refute Distance.pearson([2], [])
refute Distance.pearson([], [])
end
property "is the inverse of its correlation" do
check all(
length <- integer(1..50),
xs <- list_of(integer(), length: length),
ys <- list_of(integer(), length: length),
xs != ys
) do
assert Distance.pearson(xs, ys) == 1.0 - Correlation.pearson(xs, ys)
end
end
property "is between 0 and 2" do
check all(
length <- integer(1..50),
xs <- list_of(integer(), length: length),
ys <- list_of(integer(), length: length),
xs != ys
) do
assert Distance.pearson(xs, ys) |> between?(0, 2)
end
end
end
describe "minkowski/2" do
test "is 0 when any vector is empty" do
assert Distance.minkowski([], [1]) == 0
assert Distance.minkowski([2], []) == 0
assert Distance.minkowski([], []) == 0
end
property "is 0 when the vectors are equal" do
check all(xs <- list_of(float(), min_length: 1)) do
assert Distance.minkowski(xs, xs) == 0
end
end
test "is correct for a specific dataset when using the default lambda" do
vector1 = [1, 3, 5, 6, 8, 9]
vector2 = [2, 5, 6, 6, 7, 7]
assert Distance.minkowski(vector1, vector2) == 2.6684016487219897
end
test "is correct for a specific dataset when using a different lambda" do
vector1 = [1, 3, 5, 6, 8, 9]
vector2 = [2, 5, 6, 6, 7, 7]
lambda = 5
assert Distance.minkowski(vector1, vector2, lambda) == 2.3185419629968713
end
end
describe "euclidean/2" do
test "is 0 when any vector is empty" do
assert Distance.euclidean([], [1]) == 0
assert Distance.euclidean([2], []) == 0
assert Distance.euclidean([], []) == 0
end
property "is 0 when the vectors are equal" do
check all(xs <- list_of(float(), min_length: 1)) do
assert Distance.euclidean(xs, xs) == 0
end
end
test "is correct for a specific dataset" do
vector1 = [1, 3, 5, 6, 8, 9, 6, 4, 3, 2]
vector2 = [2, 5, 6, 6, 7, 7, 5, 3, 1, 1]
assert Distance.euclidean(vector1, vector2) == 4.2426406871196605
end
end
describe "manhattan/2" do
test "is 0 when any vector is empty" do
assert Distance.manhattan([], [1]) == 0
assert Distance.manhattan([2], []) == 0
assert Distance.manhattan([], []) == 0
end
property "is 0 when the vectors are equal" do
check all(xs <- list_of(float(), min_length: 1)) do
assert Distance.manhattan(xs, xs) == 0
end
end
test "is correct for a specific dataset" do
vector1 = [1, 3, 5, 6, 8, 9, 6, 4, 3, 2]
vector2 = [2, 5, 6, 6, 7, 7, 5, 3, 1, 1]
assert Distance.manhattan(vector1, vector2) == 12
end
end
describe "jaccard/2" do
test "is 0 when both vectors are empty" do
assert Distance.jaccard([], []) == 0.0
end
test "is nil when any one vector is empty" do
refute Distance.jaccard([], [1])
refute Distance.jaccard([2], [])
end
test "is correct for specific examples" do
[
{[0, 0.5], [0.5, 1], 1.0},
{[4.5, 1], [4, 2], 1.0},
{[1, 1, 1], [1, 1, 1], 0},
{[2.5, 3.5, 3.0, 3.5, 2.5, 3.0], [3.0, 3.5, 1.5, 5.0, 3.5, 3.0], 0.6666666666666667},
{[1, 3, 5, 6, 8, 9, 6, 4, 3, 2], [2, 5, 6, 6, 7, 7, 5, 3, 1, 1], 0.9}
]
|> Enum.each(fn {vector1, vector2, distance} ->
assert Distance.jaccard(vector1, vector2) == distance
end)
end
property "is between 0 and 1" do
check all(
xs <- list_of(integer(0..255), min_length: 1),
ys <- list_of(integer(0..255), min_length: 1)
) do
assert Distance.jaccard(xs, ys) |> between?(0, 1)
end
end
end
end
| 29.196891 | 93 | 0.547116 |
e84db5471793f05c7a93bcb8c278dbfbbca4cafe | 260 | ex | Elixir | lib/link_previewer/helpers.ex | salamtime2016/link-preview | eccc640c4b6d0b6592b47fb2cd9ab1ed6e190784 | [
"MIT"
] | 4 | 2018-11-07T13:47:13.000Z | 2021-11-01T10:12:15.000Z | lib/link_previewer/helpers.ex | salamtime2016/link-preview | eccc640c4b6d0b6592b47fb2cd9ab1ed6e190784 | [
"MIT"
] | null | null | null | lib/link_previewer/helpers.ex | salamtime2016/link-preview | eccc640c4b6d0b6592b47fb2cd9ab1ed6e190784 | [
"MIT"
] | 2 | 2019-10-15T20:27:53.000Z | 2021-08-20T04:32:35.000Z | defmodule LinkPreviewer.Helpers do
@moduledoc false
@spec normalize_link(String.t(), String.t()) :: String.t() | nil
def normalize_link(_base_link, nil), do: nil
def normalize_link(base_link, link), do: base_link |> URI.merge(link) |> to_string()
end
| 32.5 | 86 | 0.719231 |
e84dbdced40d2a0a469ef9a74baa9b7fc6a55ab6 | 586 | ex | Elixir | lib/util/crypto.ex | Zensavona/naughtygram | b10cc3fc149ca83ca9a0b794927ce3cd87717493 | [
"MIT"
] | 14 | 2016-04-28T19:18:09.000Z | 2021-09-04T11:38:15.000Z | lib/util/crypto.ex | Zensavona/naughtygram | b10cc3fc149ca83ca9a0b794927ce3cd87717493 | [
"MIT"
] | 1 | 2016-02-19T08:57:52.000Z | 2016-02-19T10:00:08.000Z | lib/util/crypto.ex | Zensavona/naughtygram | b10cc3fc149ca83ca9a0b794927ce3cd87717493 | [
"MIT"
] | 4 | 2015-10-26T20:55:18.000Z | 2020-07-29T21:02:09.000Z | defmodule Naughtygram.Crypto do
@moduledoc """
Handles some very basic crypto functionality
"""
@key "b4a23f5e39b5929e0666ac5de94c89d1618a2916"
@doc """
Takes some data which is to be sent as the body, and returns a signed body
which is ready to send to the server.
"""
def signed_body(data) do
signature = sign(data)
data = URI.encode(data, &(URI.char_unreserved?/1))
"ig_sig_key_version=4&signed_body=#{signature}.#{data}" # &src=single&d=0
end
defp sign(data) do
:crypto.hmac(:sha256, @key, data) |> Base.encode16 |> String.downcase
end
end
| 27.904762 | 77 | 0.696246 |
e84dc6524a5f2fae4a5531d6c5b362e09f585eb2 | 1,765 | ex | Elixir | lib/remote_retro_web.ex | birkand/remote_retro | f7fef13aa8779cd4a8a9fd73d2fbbd5ae537634c | [
"MIT"
] | null | null | null | lib/remote_retro_web.ex | birkand/remote_retro | f7fef13aa8779cd4a8a9fd73d2fbbd5ae537634c | [
"MIT"
] | null | null | null | lib/remote_retro_web.ex | birkand/remote_retro | f7fef13aa8779cd4a8a9fd73d2fbbd5ae537634c | [
"MIT"
] | null | null | null | defmodule RemoteRetroWeb do
@moduledoc """
A module that keeps using definitions for controllers,
views and so on.
This can be used in your application as:
use RemoteRetroWeb, :controller
use RemoteRetroWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below.
"""
def model do
quote do
use Ecto.Schema
import Ecto
import Ecto.Changeset
import Ecto.Query
end
end
def controller do
quote do
use Phoenix.Controller, namespace: RemoteRetroWeb
alias RemoteRetro.Repo
import Ecto
import Ecto.Query
alias RemoteRetroWeb.Router.Helpers
alias RemoteRetroWeb.Gettext
end
end
def view do
quote do
use Phoenix.View, root: "lib/remote_retro_web/templates",
namespace: RemoteRetroWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import RemoteRetroWeb.Router.Helpers
import RemoteRetroWeb.ErrorHelpers
import RemoteRetroWeb.Gettext
end
end
def router do
quote do
use Phoenix.Router
end
end
def channel do
quote do
use Phoenix.Channel
alias RemoteRetro.Repo
import Ecto
import Ecto.Query
alias RemoteRetroWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 21.26506 | 88 | 0.67762 |
e84dcc22c90a4e76e280275452d05a273d7eae90 | 143 | exs | Elixir | test/plexy/logger/simple_redactor_test.exs | heroku/plexy | bfdd4a539c2c0d8f80101cca8061a945a2cd6159 | [
"MIT"
] | 153 | 2016-11-07T15:11:52.000Z | 2021-11-16T23:20:52.000Z | test/plexy/logger/simple_redactor_test.exs | heroku/plexy | bfdd4a539c2c0d8f80101cca8061a945a2cd6159 | [
"MIT"
] | 21 | 2016-11-07T14:55:09.000Z | 2022-03-30T18:31:31.000Z | test/plexy/logger/simple_redactor_test.exs | heroku/plexy | bfdd4a539c2c0d8f80101cca8061a945a2cd6159 | [
"MIT"
] | 5 | 2018-07-09T05:02:53.000Z | 2019-11-01T01:08:09.000Z | defmodule Plexy.Logger.SimpleRedactorTest do
use ExUnit.Case, async: true
alias Plexy.Logger.SimpleRedactor
doctest SimpleRedactor
end
| 17.875 | 44 | 0.811189 |
e84dd0d5fc75996f405ab79c368fba06ab2c87fb | 1,035 | ex | Elixir | apps/admin_api/lib/admin_api/global/controllers/status_controller.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/admin_api/lib/admin_api/global/controllers/status_controller.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/admin_api/lib/admin_api/global/controllers/status_controller.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule AdminAPI.StatusController do
use AdminAPI, :controller
def status(conn, _attrs) do
json(conn, %{
success: true,
api_versions: api_versions(),
ewallet_version: Application.get_env(:ewallet, :version)
})
end
defp api_versions do
api_versions = Application.get_env(:admin_api, :api_versions)
Enum.map(api_versions, fn {key, value} ->
%{name: value[:name], media_type: key}
end)
end
end
| 30.441176 | 74 | 0.723671 |
e84dd74294b1326c811af0b3d91c7c0b8c9e5b3f | 330 | ex | Elixir | lib/hippo_game_live_web/controllers/page_controller.ex | manojsamanta/typing-race | 6bec298b3470145ba9ae6b23ee8c5e2a6be7328b | [
"MIT"
] | null | null | null | lib/hippo_game_live_web/controllers/page_controller.ex | manojsamanta/typing-race | 6bec298b3470145ba9ae6b23ee8c5e2a6be7328b | [
"MIT"
] | 2 | 2021-03-09T16:25:19.000Z | 2021-05-10T11:12:55.000Z | lib/hippo_game_live_web/controllers/page_controller.ex | manojsamanta/typing-race | 6bec298b3470145ba9ae6b23ee8c5e2a6be7328b | [
"MIT"
] | 1 | 2021-07-04T09:01:31.000Z | 2021-07-04T09:01:31.000Z | defmodule HippoGameLiveWeb.PageController do
use HippoGameLiveWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
def game(conn, _params) do
Phoenix.LiveView.Controller.live_render(
conn,
HippoGameLiveWeb.HippoGameLive,
session: %{cookies: conn.cookies}
)
end
end
| 20.625 | 44 | 0.706061 |
e84e163862d897637dfa49806039198d2b8d44f1 | 14,288 | exs | Elixir | test/binance_test.exs | alfaridi/binance.ex | 50acf31eae15b377d7d2e4fc8d0dfa74e5052433 | [
"MIT"
] | null | null | null | test/binance_test.exs | alfaridi/binance.ex | 50acf31eae15b377d7d2e4fc8d0dfa74e5052433 | [
"MIT"
] | null | null | null | test/binance_test.exs | alfaridi/binance.ex | 50acf31eae15b377d7d2e4fc8d0dfa74e5052433 | [
"MIT"
] | null | null | null | defmodule BinanceTest do
use ExUnit.Case
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
doctest Binance
setup_all do
HTTPoison.start()
end
test "ping returns an empty map" do
use_cassette "ping_ok" do
assert Binance.ping() == {:ok, %{}}
end
end
test "get_server_time success return an ok, time tuple" do
use_cassette "get_server_time_ok" do
assert Binance.get_server_time() == {:ok, 1_521_781_361_467}
end
end
test "get_exchange_info success returns the trading rules and symbol information" do
use_cassette "get_exchange_info_ok" do
assert {:ok, %Binance.ExchangeInfo{} = info} = Binance.get_exchange_info()
assert info.timezone == "UTC"
assert info.server_time != nil
assert info.rate_limits == [
%{"interval" => "MINUTE", "limit" => 1200, "rateLimitType" => "REQUESTS"},
%{"interval" => "SECOND", "limit" => 10, "rateLimitType" => "ORDERS"},
%{"interval" => "DAY", "limit" => 100_000, "rateLimitType" => "ORDERS"}
]
assert info.exchange_filters == []
assert [symbol | _] = info.symbols
assert symbol == %{
"baseAsset" => "ETH",
"baseAssetPrecision" => 8,
"filters" => [
%{
"filterType" => "PRICE_FILTER",
"maxPrice" => "100000.00000000",
"minPrice" => "0.00000100",
"tickSize" => "0.00000100"
},
%{
"filterType" => "LOT_SIZE",
"maxQty" => "100000.00000000",
"minQty" => "0.00100000",
"stepSize" => "0.00100000"
},
%{"filterType" => "MIN_NOTIONAL", "minNotional" => "0.00100000"}
],
"icebergAllowed" => false,
"orderTypes" => [
"LIMIT",
"LIMIT_MAKER",
"MARKET",
"STOP_LOSS_LIMIT",
"TAKE_PROFIT_LIMIT"
],
"quoteAsset" => "BTC",
"quotePrecision" => 8,
"status" => "TRADING",
"symbol" => "ETHBTC"
}
end
end
test "get_all_prices returns a list of prices for every symbol" do
use_cassette "get_all_prices_ok" do
assert {:ok, symbol_prices} = Binance.get_all_prices()
assert [%Binance.SymbolPrice{price: "0.06137000", symbol: "ETHBTC"} | _tail] = symbol_prices
assert symbol_prices |> Enum.count() == 288
end
end
describe ".get_ticker" do
test "returns a ticker struct with details for the given symbol" do
use_cassette "get_ticker_ok" do
assert {
:ok,
%Binance.Ticker{
ask_price: "0.01876000",
bid_price: "0.01875200",
close_time: 1_521_826_338_547,
count: 30612
}
} = Binance.get_ticker("LTCBTC")
end
end
test "returns an error tuple when the symbol doesn't exist" do
use_cassette "get_ticker_error" do
assert Binance.get_ticker("IDONTEXIST") == {
:error,
%{"code" => -1121, "msg" => "Invalid symbol."}
}
end
end
end
describe ".get_depth" do
test "returns the bids & asks up to the given depth" do
use_cassette "get_depth_ok" do
assert Binance.get_depth("BTCUSDT", 5) == {
:ok,
%Binance.OrderBook{
asks: [
["8400.00000000", "2.04078100", []],
["8405.35000000", "0.50354700", []],
["8406.00000000", "0.32769800", []],
["8406.33000000", "0.00239000", []],
["8406.51000000", "0.03241000", []]
],
bids: [
["8393.00000000", "0.20453200", []],
["8392.57000000", "0.02639000", []],
["8392.00000000", "1.40893300", []],
["8390.09000000", "0.07047100", []],
["8388.72000000", "0.04577400", []]
],
last_update_id: 113_634_395
}
}
end
end
test "returns an error tuple when the symbol doesn't exist" do
use_cassette "get_depth_error" do
assert Binance.get_depth("IDONTEXIST", 1000) == {
:error,
%{"code" => -1121, "msg" => "Invalid symbol."}
}
end
end
end
describe ".order_limit_buy" do
test "creates an order with a duration of good til cancel by default" do
use_cassette "order_limit_buy_good_til_cancel_default_duration_success" do
assert {:ok, %Binance.OrderResponse{} = response} =
Binance.order_limit_buy("LTCBTC", 0.1, 0.01)
assert response.client_order_id == "9kITBshSwrClye1HJcLM3j"
assert response.executed_qty == "0.00000000"
assert response.order_id == 47_511_548
assert response.orig_qty == "0.10000000"
assert response.price == "0.01000000"
assert response.side == "BUY"
assert response.status == "NEW"
assert response.symbol == "LTCBTC"
assert response.time_in_force == "GTC"
assert response.transact_time == 1_527_278_150_709
assert response.type == "LIMIT"
end
end
test "can create an order with a fill or kill duration" do
use_cassette "order_limit_buy_fill_or_kill_success" do
assert {:ok, %Binance.OrderResponse{} = response} =
Binance.order_limit_buy("LTCBTC", 0.1, 0.01, "FOK")
assert response.client_order_id == "dY67P33S4IxPnJGx5EtuSf"
assert response.executed_qty == "0.00000000"
assert response.order_id == 47_527_179
assert response.orig_qty == "0.10000000"
assert response.price == "0.01000000"
assert response.side == "BUY"
assert response.status == "EXPIRED"
assert response.symbol == "LTCBTC"
assert response.time_in_force == "FOK"
assert response.transact_time == 1_527_290_557_607
assert response.type == "LIMIT"
end
end
test "can create an order with am immediate or cancel duration" do
use_cassette "order_limit_buy_immediate_or_cancel_success" do
assert {:ok, %Binance.OrderResponse{} = response} =
Binance.order_limit_buy("LTCBTC", 0.1, 0.01, "IOC")
assert response.client_order_id == "zyMyhtRENlvFHrl4CitDe0"
assert response.executed_qty == "0.00000000"
assert response.order_id == 47_528_830
assert response.orig_qty == "0.10000000"
assert response.price == "0.01000000"
assert response.side == "BUY"
assert response.status == "EXPIRED"
assert response.symbol == "LTCBTC"
assert response.time_in_force == "IOC"
assert response.transact_time == 1_527_291_300_912
assert response.type == "LIMIT"
end
end
test "returns an insufficient balance error tuple" do
use_cassette "order_limit_buy_error_insufficient_balance" do
assert {:error, reason} = Binance.order_limit_buy("LTCBTC", 10_000, 0.001, "FOK")
assert reason == %Binance.InsufficientBalanceError{
reason: %{
code: -2010,
msg: "Account has insufficient balance for requested action."
}
}
end
end
end
describe ".order_limit_sell" do
test "creates an order with a duration of good til cancel by default" do
use_cassette "order_limit_sell_good_til_cancel_default_duration_success" do
assert {:ok, %Binance.OrderResponse{} = response} =
Binance.order_limit_sell("BTCUSDT", 0.001, 50_000)
assert response.client_order_id == "9UFMPloZsQ3eshCx66PVqD"
assert response.executed_qty == "0.00000000"
assert response.order_id == 108_212_133
assert response.orig_qty == "0.00100000"
assert response.price == "50000.00000000"
assert response.side == "SELL"
assert response.status == "NEW"
assert response.symbol == "BTCUSDT"
assert response.time_in_force == "GTC"
assert response.transact_time == 1_527_279_796_770
assert response.type == "LIMIT"
end
end
test "can create an order with a fill or kill duration" do
use_cassette "order_limit_sell_fill_or_kill_success" do
assert {:ok, %Binance.OrderResponse{} = response} =
Binance.order_limit_sell("BTCUSDT", 0.001, 50_000, "FOK")
assert response.client_order_id == "lKYECwEPSTPzurwx6emuN2"
assert response.executed_qty == "0.00000000"
assert response.order_id == 108_277_184
assert response.orig_qty == "0.00100000"
assert response.price == "50000.00000000"
assert response.side == "SELL"
assert response.status == "EXPIRED"
assert response.symbol == "BTCUSDT"
assert response.time_in_force == "FOK"
assert response.transact_time == 1_527_290_985_305
assert response.type == "LIMIT"
end
end
test "can create an order with am immediate or cancel duration" do
use_cassette "order_limit_sell_immediate_or_cancel_success" do
assert {:ok, %Binance.OrderResponse{} = response} =
Binance.order_limit_sell("BTCUSDT", 0.001, 50_000, "IOC")
assert response.client_order_id == "roSkLhwX9KCgYqr4yFPx1V"
assert response.executed_qty == "0.00000000"
assert response.order_id == 108_279_070
assert response.orig_qty == "0.00100000"
assert response.price == "50000.00000000"
assert response.side == "SELL"
assert response.status == "EXPIRED"
assert response.symbol == "BTCUSDT"
assert response.time_in_force == "IOC"
assert response.transact_time == 1_527_291_411_088
assert response.type == "LIMIT"
end
end
end
describe ".get_open_orders" do
test "when called without symbol returns all open orders for all symbols" do
use_cassette "get_open_orders_without_symbol_success" do
assert {:ok, [%Binance.Order{} = order_1, %Binance.Order{} = order_2]} =
Binance.get_open_orders()
# open order 1
assert order_1.client_order_id == "web_6b6a916821d342fd861faa5139b229d2"
assert order_1.cummulative_quote_qty == "0.00000000"
assert order_1.executed_qty == "0.00000000"
assert order_1.iceberg_qty == "0.00000000"
assert order_1.is_working == true
assert order_1.order_id == 148_740_811
assert order_1.orig_qty == "177.00000000"
assert order_1.price == "0.00050000"
assert order_1.side == "SELL"
assert order_1.status == "NEW"
assert order_1.stop_price == "0.00000000"
assert order_1.symbol == "XRPBTC"
assert order_1.time == 1_556_710_572_734
assert order_1.time_in_force == "GTC"
assert order_1.type == "LIMIT"
assert order_1.update_time == 1_556_710_572_734
# open order 2
assert order_2.client_order_id == "web_db04d8a507f14135a9a9d4467bc541a1"
assert order_2.cummulative_quote_qty == "0.00000000"
assert order_2.executed_qty == "0.00000000"
assert order_2.iceberg_qty == "0.00000000"
assert order_2.is_working == true
assert order_2.order_id == 42_240_233
assert order_2.orig_qty == "215.00000000"
assert order_2.price == "0.00064200"
assert order_2.side == "SELL"
assert order_2.status == "NEW"
assert order_2.stop_price == "0.00000000"
assert order_2.symbol == "WABIBTC"
assert order_2.time == 1_556_710_717_616
assert order_2.time_in_force == "GTC"
assert order_2.type == "LIMIT"
assert order_2.update_time == 1_556_710_717_616
end
end
test "when called with symbol returns all open orders for that symbols(string)" do
use_cassette "get_open_orders_with_symbol_string_success" do
assert {:ok, [%Binance.Order{} = result]} = Binance.get_open_orders("WABIBTC")
assert result.client_order_id == "web_db04d8a507f14135a9a9d4467bc541a1"
assert result.cummulative_quote_qty == "0.00000000"
assert result.executed_qty == "0.00000000"
assert result.iceberg_qty == "0.00000000"
assert result.is_working == true
assert result.order_id == 42_240_233
assert result.orig_qty == "215.00000000"
assert result.price == "0.00064200"
assert result.side == "SELL"
assert result.status == "NEW"
assert result.stop_price == "0.00000000"
assert result.symbol == "WABIBTC"
assert result.time == 1_556_710_717_616
assert result.time_in_force == "GTC"
assert result.type == "LIMIT"
assert result.update_time == 1_556_710_717_616
end
end
test "when called with symbol returns all open orders for that symbols(TradePair struct)" do
use_cassette "get_open_orders_with_trade_pair_struct_string_success" do
assert {:ok, [%Binance.Order{} = result]} =
Binance.get_open_orders(%Binance.TradePair{:from => "WABI", :to => "BTC"})
assert result.client_order_id == "web_db04d8a507f14135a9a9d4467bc541a1"
assert result.cummulative_quote_qty == "0.00000000"
assert result.executed_qty == "0.00000000"
assert result.iceberg_qty == "0.00000000"
assert result.is_working == true
assert result.order_id == 42_240_233
assert result.orig_qty == "215.00000000"
assert result.price == "0.00064200"
assert result.side == "SELL"
assert result.status == "NEW"
assert result.stop_price == "0.00000000"
assert result.symbol == "WABIBTC"
assert result.time == 1_556_710_717_616
assert result.time_in_force == "GTC"
assert result.type == "LIMIT"
assert result.update_time == 1_556_710_717_616
end
end
end
end
| 39.145205 | 98 | 0.595745 |
e84e3e8b19605c361f2612b18074f7d79e24e037 | 2,279 | exs | Elixir | test/suite/draft6/enum_test.exs | starbelly/json_xema | 73ca23f9ce51d450d2f9cf0509ee6bb82f9a8c79 | [
"MIT"
] | 1 | 2019-06-21T10:55:33.000Z | 2019-06-21T10:55:33.000Z | test/suite/draft6/enum_test.exs | starbelly/json_xema | 73ca23f9ce51d450d2f9cf0509ee6bb82f9a8c79 | [
"MIT"
] | null | null | null | test/suite/draft6/enum_test.exs | starbelly/json_xema | 73ca23f9ce51d450d2f9cf0509ee6bb82f9a8c79 | [
"MIT"
] | null | null | null | defmodule Draft6.EnumTest do
use ExUnit.Case, async: true
import JsonXema, only: [valid?: 2]
describe "simple enum validation" do
setup do
%{schema: ~s(
{
"enum": [
1,
2,
3
]
}
) |> Jason.decode!() |> JsonXema.new()}
end
test "one of the enum is valid", %{schema: schema} do
data = 1
assert valid?(schema, data)
end
test "something else is invalid", %{schema: schema} do
data = 4
refute valid?(schema, data)
end
end
describe "heterogeneous enum validation" do
setup do
%{schema: ~s(
{
"enum": [
6,
"foo",
[],
true,
{
"foo": 12
}
]
}
) |> Jason.decode!() |> JsonXema.new()}
end
test "one of the enum is valid", %{schema: schema} do
data = []
assert valid?(schema, data)
end
test "something else is invalid", %{schema: schema} do
data = nil
refute valid?(schema, data)
end
test "objects are deep compared", %{schema: schema} do
data = %{"foo" => false}
refute valid?(schema, data)
end
end
describe "enums in properties" do
setup do
%{schema: ~s(
{
"properties": {
"bar": {
"enum": [
"bar"
]
},
"foo": {
"enum": [
"foo"
]
}
},
"required": [
"bar"
],
"type": "object"
}
) |> Jason.decode!() |> JsonXema.new()}
end
test "both properties are valid", %{schema: schema} do
data = %{"bar" => "bar", "foo" => "foo"}
assert valid?(schema, data)
end
test "missing optional property is valid", %{schema: schema} do
data = %{"bar" => "bar"}
assert valid?(schema, data)
end
test "missing required property is invalid", %{schema: schema} do
data = %{"foo" => "foo"}
refute valid?(schema, data)
end
test "missing all properties is invalid", %{schema: schema} do
data = %{}
refute valid?(schema, data)
end
end
end
| 21.101852 | 69 | 0.455024 |
e84e45c563ae60c5641581dbc663c70a9396585b | 1,924 | ex | Elixir | lib/cond_parser.ex | FabienHenon/cond_parser | 35d13c89decaa1f7ed500c2e8c9da5572da28d0f | [
"MIT"
] | null | null | null | lib/cond_parser.ex | FabienHenon/cond_parser | 35d13c89decaa1f7ed500c2e8c9da5572da28d0f | [
"MIT"
] | null | null | null | lib/cond_parser.ex | FabienHenon/cond_parser | 35d13c89decaa1f7ed500c2e8c9da5572da28d0f | [
"MIT"
] | null | null | null | defmodule CondParser do
@moduledoc """
`CondParser` reads conditions like
`(':foo' or ("toto" == true AND (12 != 13 OR false))) and true` and transform them
either in an ast with `parse/1`, or in json with `to_json/1`.
Ast looks like this:
```
{:and_op,
{:or_op, ":foo",
{:and_op,
{:eq_op, "toto", true},
{:or_op,
{:not_eq_op, 12, 13},
false
}
}
},
true
}
```
Json looks like this:
```json
{
"left": {
"left": ":foo",
"op": "or",
"right": {
"left": {"left": "toto", "op": "eq", "right": true},
"op": "and",
"right": {
"left": {"left": 12, "op": "not_eq", "right": 13},
"op": "or",
"right": false
}
}
},
"op": "and",
"right": true
}
```
"""
@type ast() :: any()
@type json() :: map()
@spec parse(binary) :: {:ok, ast()} | {:error, any()}
def parse(str) do
with {:ok, tokens, _end_line} <- str |> to_charlist() |> :cond_lexer.string(),
{:ok, ast} <- :cond_parser.parse(tokens) do
{:ok, ast}
else
{_, reason, _} ->
{:error, reason}
{:error, reason} ->
{:error, reason}
end
end
@spec parse!(binary) :: ast
def parse!(str) do
case parse(str) do
{:ok, ast} -> ast
{:error, err} -> throw(err)
end
end
@spec to_json(binary) :: {:error, any()} | {:ok, json()}
def to_json(str) do
case parse(str) do
{:ok, ast} -> {:ok, JsonBuilder.build(ast)}
{:error, reason} -> {:error, reason}
end
end
@spec to_json!(binary) :: json()
def to_json!(str) do
case to_json(str) do
{:ok, json} -> json
{:error, err} -> throw(err)
end
end
@spec is_valid?(binary) :: boolean()
def is_valid?(str) do
case parse(str) do
{:ok, _ast} -> true
{:error, _err} -> false
end
end
end
| 20.041667 | 84 | 0.476091 |
e84e4af69e402b8cf86babf7b831b40f7333d667 | 37 | ex | Elixir | lib/nerves_hub_cli/device.ex | danielspofford/nerves_hub_cli | 3d12abe873e27448ba5c862a08f03d719cc40dc2 | [
"Apache-2.0"
] | 28 | 2018-08-15T02:17:26.000Z | 2021-11-25T11:14:24.000Z | lib/nerves_hub_cli/device.ex | danielspofford/nerves_hub_cli | 3d12abe873e27448ba5c862a08f03d719cc40dc2 | [
"Apache-2.0"
] | 81 | 2018-08-13T17:14:14.000Z | 2021-09-11T13:08:29.000Z | lib/nerves_hub_cli/device.ex | danielspofford/nerves_hub_cli | 3d12abe873e27448ba5c862a08f03d719cc40dc2 | [
"Apache-2.0"
] | 13 | 2018-09-14T12:47:26.000Z | 2022-02-03T07:28:41.000Z | defmodule NervesHubCLI.Device do
end
| 12.333333 | 32 | 0.864865 |
e84e5fd5d9f976a9f37091376725d1cc3365d831 | 744 | exs | Elixir | apps/imposc/test/dynamics/state_of_motion_test.exs | FelixDux/impact-oscillator | 4b93975a8f87129777f4e4fe5c2da77fbea41ecf | [
"MIT"
] | null | null | null | apps/imposc/test/dynamics/state_of_motion_test.exs | FelixDux/impact-oscillator | 4b93975a8f87129777f4e4fe5c2da77fbea41ecf | [
"MIT"
] | null | null | null | apps/imposc/test/dynamics/state_of_motion_test.exs | FelixDux/impact-oscillator | 4b93975a8f87129777f4e4fe5c2da77fbea41ecf | [
"MIT"
] | null | null | null | defmodule StateOfMotionTest do
use ExUnit.Case
alias StateOfMotion
@moduletag :capture_log
doctest StateOfMotion
test "module exists" do
assert is_list(StateOfMotion.module_info())
end
test "ImpactPoint from state" do
omega = 2 * :math.pi()
for state <- [
%StateOfMotion{t: 37.5, v: 8.2},
%StateOfMotion{t: 18.6, v: -0.42},
%StateOfMotion{t: -37.5, v: 8.2}
],
do:
StateOfMotion.point_from_state(state, omega)
|> (fn point ->
assert(
state.t == point.t && state.v == point.v &&
abs(ForcingPhase.frac_part(abs(state.t)) - point.phi) < 0.000001
)
end).()
end
end
| 23.25 | 84 | 0.53629 |
e84e604c983bbc67417db044e8adc31a84dc274c | 1,802 | exs | Elixir | test/materia_career/tags/tags_test.exs | karabiner-inc/materia_career | cb378d80a7bbb53b5370771227954d1a850ba28a | [
"Apache-2.0"
] | null | null | null | test/materia_career/tags/tags_test.exs | karabiner-inc/materia_career | cb378d80a7bbb53b5370771227954d1a850ba28a | [
"Apache-2.0"
] | 23 | 2019-01-16T01:52:22.000Z | 2019-06-20T01:59:55.000Z | test/materia_career/tags/tags_test.exs | karabiner-inc/materia_career | cb378d80a7bbb53b5370771227954d1a850ba28a | [
"Apache-2.0"
] | null | null | null | defmodule MateriaCareer.TagsTest do
use MateriaCareer.DataCase
alias MateriaCareer.Tags
describe "tags" do
alias MateriaCareer.Tags.Tag
@valid_attrs %{label: "some label"}
@update_attrs %{label: "some updated label"}
@invalid_attrs %{label: nil}
def tag_fixture(attrs \\ %{}) do
{:ok, tag} =
attrs
|> Enum.into(@valid_attrs)
|> Tags.create_tag()
tag
end
test "list_tags/0 returns all tags" do
tag = tag_fixture()
assert Tags.list_tags() == [tag]
end
test "get_tag!/1 returns the tag with given id" do
tag = tag_fixture()
assert Tags.get_tag!(tag.id) == tag
end
test "create_tag/1 with valid data creates a tag" do
assert {:ok, %Tag{} = tag} = Tags.create_tag(@valid_attrs)
assert tag.label == "some label"
end
test "create_tag/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Tags.create_tag(@invalid_attrs)
end
test "update_tag/2 with valid data updates the tag" do
tag = tag_fixture()
assert {:ok, tag} = Tags.update_tag(tag, @update_attrs)
assert %Tag{} = tag
assert tag.label == "some updated label"
end
test "update_tag/2 with invalid data returns error changeset" do
tag = tag_fixture()
assert {:error, %Ecto.Changeset{}} = Tags.update_tag(tag, @invalid_attrs)
assert tag == Tags.get_tag!(tag.id)
end
test "delete_tag/1 deletes the tag" do
tag = tag_fixture()
assert {:ok, %Tag{}} = Tags.delete_tag(tag)
assert_raise Ecto.NoResultsError, fn -> Tags.get_tag!(tag.id) end
end
test "change_tag/1 returns a tag changeset" do
tag = tag_fixture()
assert %Ecto.Changeset{} = Tags.change_tag(tag)
end
end
end
| 27.30303 | 79 | 0.633185 |
e84e772327c7ed32cdb2649f40f53bd26ccbddcc | 2,847 | exs | Elixir | config/runtime.exs | miguelcoba/saturn | c5b4386c32ea9c1862638e08707b2cf346a1092e | [
"MIT"
] | 9 | 2021-11-02T11:11:41.000Z | 2022-03-04T07:39:22.000Z | config/runtime.exs | miguelcoba/saturn | c5b4386c32ea9c1862638e08707b2cf346a1092e | [
"MIT"
] | null | null | null | config/runtime.exs | miguelcoba/saturn | c5b4386c32ea9c1862638e08707b2cf346a1092e | [
"MIT"
] | 5 | 2021-11-27T00:14:13.000Z | 2022-03-04T07:17:49.000Z | import Config
# config/runtime.exs is executed for all environments, including
# during releases. It is executed after compilation and before the
# system starts, so it is typically used to load production configuration
# and secrets from environment variables or elsewhere. Do not define
# any compile-time configuration in here, as it won't be applied.
# The block below contains prod specific runtime configuration.
if config_env() == :prod do
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :saturn, Saturn.Repo,
# ssl: true,
# socket_options: [:inet6],
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
# The secret key base is used to sign/encrypt cookies and other secrets.
# A default value is used in config/dev.exs and config/test.exs but you
# want to use a different value for prod and you most likely don't want
# to check this value into version control, so we use an environment
# variable instead.
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :saturn, SaturnWeb.Endpoint,
http: [
# Enable IPv6 and bind on all interfaces.
# Set it to {0, 0, 0, 0, 0, 0, 0, 1} for local network only access.
# See the documentation on https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html
# for details about using IPv6 vs IPv4 and loopback vs public addresses.
ip: {0, 0, 0, 0, 0, 0, 0, 0},
port: String.to_integer(System.get_env("PORT") || "4000")
],
secret_key_base: secret_key_base
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
config :saturn, SaturnWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
# ## Configuring the mailer
#
# In production you need to configure the mailer to use a different adapter.
# Also, you may need to configure the Swoosh API client of your choice if you
# are not using SMTP. Here is an example of the configuration:
#
# config :saturn, Saturn.Mailer,
# adapter: Swoosh.Adapters.Mailgun,
# api_key: System.get_env("MAILGUN_API_KEY"),
# domain: System.get_env("MAILGUN_DOMAIN")
#
# For this example you need include a HTTP client required by Swoosh API client.
# Swoosh supports Hackney and Finch out of the box:
#
# config :swoosh, :api_client, Swoosh.ApiClient.Hackney
#
# See https://hexdocs.pm/swoosh/Swoosh.html#module-installation for details.
end
| 38.472973 | 82 | 0.696874 |
e84ec8983bac21e15c8a4758e43594fdc0880e1e | 611 | exs | Elixir | mix.exs | spawnproc/spawnproc | 5f6f34328cc065fca97ef069e3b00527addab51f | [
"0BSD"
] | 2 | 2017-02-26T21:22:16.000Z | 2017-02-28T11:39:59.000Z | mix.exs | enterprizing/fix | 5f6f34328cc065fca97ef069e3b00527addab51f | [
"0BSD"
] | null | null | null | mix.exs | enterprizing/fix | 5f6f34328cc065fca97ef069e3b00527addab51f | [
"0BSD"
] | 1 | 2021-01-10T23:47:18.000Z | 2021-01-10T23:47:18.000Z | defmodule FIX.Mixfile do
use Mix.Project
def project() do
[
app: :fix,
version: "4.6.0",
description: "FIX Financial Information Exchange",
package: package(),
elixir: "~> 1.7",
deps: deps()
]
end
def package do
[
files: ~w(doc include src mix.exs LICENSE),
licenses: ["ISC"],
maintainers: ["Namdak Tonpa"],
name: :fix,
links: %{"GitHub" => "https://github.com/enterprizing/fix"}
]
end
def application() do
[mod: {:fix, []}]
end
def deps() do
[
{:ex_doc, "~> 0.11", only: :dev}
]
end
end
| 16.972222 | 65 | 0.523732 |
e84f3f5908bf7e346ce6554aabb0905e108a80a9 | 6,066 | exs | Elixir | .credo.exs | kianmeng/benchee_markdown | 701ac80e0d862888e17ccea29cb784ab1e943de5 | [
"MIT"
] | null | null | null | .credo.exs | kianmeng/benchee_markdown | 701ac80e0d862888e17ccea29cb784ab1e943de5 | [
"MIT"
] | null | null | null | .credo.exs | kianmeng/benchee_markdown | 701ac80e0d862888e17ccea29cb784ab1e943de5 | [
"MIT"
] | null | null | null | # This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any exec using `mix credo -C <name>`. If no exec name is given
# "default" is used.
#
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
#
included: ["lib/", "src/", "test/", "web/", "apps/"],
excluded: [~r"/_build/", ~r"/deps/", ~r"test/suite"]
},
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
#
requires: [],
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
#
strict: true,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
#
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
#
## Consistency Checks
#
{Credo.Check.Consistency.ExceptionNames},
{Credo.Check.Consistency.LineEndings},
{Credo.Check.Consistency.ParameterPatternMatching},
{Credo.Check.Consistency.SpaceAroundOperators},
{Credo.Check.Consistency.SpaceInParentheses},
{Credo.Check.Consistency.TabsOrSpaces},
#
## Design Checks
#
# You can customize the priority of any check
# Priority values are: `low, normal, high, higher`
#
{Credo.Check.Design.AliasUsage, priority: :low},
# For some checks, you can also set other parameters
#
# If you don't want the `setup` and `test` macro calls in ExUnit tests
# or the `schema` macro in Ecto schemas to trigger DuplicatedCode, just
# set the `excluded_macros` parameter to `[:schema, :setup, :test]`.
#
{Credo.Check.Design.DuplicatedCode, excluded_macros: []},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
#
{Credo.Check.Design.TagTODO, exit_status: 2},
{Credo.Check.Design.TagFIXME},
#
## Readability Checks
#
{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.LargeNumbers},
{Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 80},
{Credo.Check.Readability.ModuleAttributeNames},
{Credo.Check.Readability.ModuleDoc},
{Credo.Check.Readability.ModuleNames},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs},
{Credo.Check.Readability.ParenthesesInCondition},
{Credo.Check.Readability.PredicateFunctionNames},
{Credo.Check.Readability.PreferImplicitTry},
{Credo.Check.Readability.RedundantBlankLines},
{Credo.Check.Readability.StringSigils},
{Credo.Check.Readability.TrailingBlankLine},
{Credo.Check.Readability.TrailingWhiteSpace},
{Credo.Check.Readability.VariableNames},
{Credo.Check.Readability.Semicolons},
{Credo.Check.Readability.SpaceAfterCommas},
#
## Refactoring Opportunities
#
{Credo.Check.Refactor.DoubleBooleanNegation},
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.CyclomaticComplexity, max_complexity: 12},
{Credo.Check.Refactor.FunctionArity},
{Credo.Check.Refactor.LongQuoteBlocks},
{Credo.Check.Refactor.MatchInCondition},
{Credo.Check.Refactor.NegatedConditionsInUnless},
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting, max_nesting: 3},
{Credo.Check.Refactor.PipeChainStart,
excluded_argument_types: [:atom, :binary, :fn, :keyword], excluded_functions: []},
{Credo.Check.Refactor.UnlessWithElse},
#
## Warnings
#
{Credo.Check.Warning.BoolOperationOnSameValues},
{Credo.Check.Warning.ExpensiveEmptyEnumCheck},
{Credo.Check.Warning.IExPry},
{Credo.Check.Warning.IoInspect},
{Credo.Check.Warning.LazyLogging},
{Credo.Check.Warning.OperationOnSameValues},
{Credo.Check.Warning.OperationWithConstantResult},
{Credo.Check.Warning.UnusedEnumOperation},
{Credo.Check.Warning.UnusedFileOperation},
{Credo.Check.Warning.UnusedKeywordOperation},
{Credo.Check.Warning.UnusedListOperation},
{Credo.Check.Warning.UnusedPathOperation},
{Credo.Check.Warning.UnusedRegexOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},
{Credo.Check.Warning.RaiseInsideRescue},
#
# Controversial and experimental checks (opt-in, just remove `, false`)
#
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.MapGetUnsafePass, false},
{Credo.Check.Consistency.MultiAliasImportRequireUse, false},
#
# Deprecated checks (these will be deleted after a grace period)
#
{Credo.Check.Readability.Specs, false}
#
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}
| 37.9125 | 91 | 0.633861 |
e84f4cc8f1f30b77a48f945875e8974c6bfd2bd9 | 688 | exs | Elixir | test/unit/hologram/compiler/module_def_aggregators/elixir_list_test.exs | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 40 | 2022-01-19T20:27:36.000Z | 2022-03-31T18:17:41.000Z | test/unit/hologram/compiler/module_def_aggregators/elixir_list_test.exs | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 42 | 2022-02-03T22:52:43.000Z | 2022-03-26T20:57:32.000Z | test/unit/hologram/compiler/module_def_aggregators/elixir_list_test.exs | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 3 | 2022-02-10T04:00:37.000Z | 2022-03-08T22:07:45.000Z | defmodule Hologram.Compiler.ModuleDefAggregator.ElixirListTest do
use Hologram.Test.UnitCase, async: false
alias Hologram.Compiler.{ModuleDefAggregator, ModuleDefStore}
alias Hologram.Compiler.IR.{ModuleDefinition, ModuleType}
alias Hologram.Test.Fixtures.{PlaceholderModule1, PlaceholderModule2}
setup do
ModuleDefStore.run()
:ok
end
test "aggregate/1" do
ir = [
%ModuleType{module: PlaceholderModule1},
%ModuleType{module: PlaceholderModule2}
]
ModuleDefAggregator.aggregate(ir)
assert %ModuleDefinition{} = ModuleDefStore.get!(PlaceholderModule1)
assert %ModuleDefinition{} = ModuleDefStore.get!(PlaceholderModule2)
end
end
| 27.52 | 72 | 0.758721 |
e84fc5f9bfa94e6c0a463ebf852bd3f7f8eaed31 | 6,179 | ex | Elixir | test/support/router.ex | Matsa59/phoenix_live_view | 20430b791e8e321b5d4ed10a8e1fa46321a7db85 | [
"MIT"
] | null | null | null | test/support/router.ex | Matsa59/phoenix_live_view | 20430b791e8e321b5d4ed10a8e1fa46321a7db85 | [
"MIT"
] | null | null | null | test/support/router.ex | Matsa59/phoenix_live_view | 20430b791e8e321b5d4ed10a8e1fa46321a7db85 | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveViewTest.Router.ForwardedRouter do
use Phoenix.Router
import Phoenix.LiveView.Router
pipeline :browser do
plug :accepts, ["html"]
end
scope "/", Phoenix.LiveViewTest do
pipe_through [:browser]
live "/:params", ForwardedLive
end
end
defmodule Phoenix.LiveViewTest.Router do
use Phoenix.Router
import Phoenix.LiveView.Router
pipeline :setup_session do
plug Plug.Session,
store: :cookie,
key: "_live_view_key",
signing_salt: "/VEDsdfsffMnp5"
plug :fetch_session
end
pipeline :browser do
plug :setup_session
plug :accepts, ["html"]
plug :fetch_live_flash
end
pipeline :bad_layout do
plug :put_root_layout, {UnknownView, :unknown_template}
end
scope "/", Phoenix.LiveViewTest do
pipe_through [:browser]
live "/thermo", ThermostatLive
live "/thermo/:id", ThermostatLive
live "/thermo-container", ThermostatLive, container: {:span, style: "thermo-flex<script>"}
live "/", ThermostatLive, as: :live_root
live "/clock", ClockLive
live "/redir", RedirLive
live "/elements", ElementsLive
live "/inner_block", InnerLive
live "/same-child", SameChildLive
live "/root", RootLive
live "/opts", OptsLive
live "/time-zones", AppendLive
live "/shuffle", ShuffleLive
live "/components", WithComponentLive
live "/multi-targets", WithMultipleTargets
live "/assigns-not-in-socket", AssignsNotInSocketLive
live "/errors", ErrorsLive
# controller test
get "/controller/:type", Controller, :incoming
get "/widget", Controller, :widget
get "/not_found", Controller, :not_found
post "/not_found", Controller, :not_found
# router test
live "/router/thermo_defaults/:id", DashboardLive
live "/router/thermo_session/:id", DashboardLive
live "/router/thermo_container/:id", DashboardLive, container: {:span, style: "flex-grow"}
live "/router/thermo_session/custom/:id", DashboardLive, as: :custom_live
live "/router/foobarbaz", FooBarLive, :index
live "/router/foobarbaz/index", FooBarLive.Index, :index
live "/router/foobarbaz/show", FooBarLive.Index, :show
live "/router/foobarbaz/nested/index", FooBarLive.Nested.Index, :index
live "/router/foobarbaz/nested/show", FooBarLive.Nested.Index, :show
live "/router/foobarbaz/custom", FooBarLive, :index, as: :custom_foo_bar
live "/router/foobarbaz/with_live", Phoenix.LiveViewTest.Live.Nested.Module, :action
live "/router/foobarbaz/nosuffix", NoSuffix, :index, as: :custom_route
# integration layout
live_session :styled_layout, root_layout: {Phoenix.LiveViewTest.LayoutView, "styled.html"} do
live "/styled-elements", ElementsLive
end
live_session :app_layout, root_layout: {Phoenix.LiveViewTest.LayoutView, :app} do
live "/layout", LayoutLive
end
scope "/" do
pipe_through [:bad_layout]
# The layout option needs to have higher precedence than bad layout
live "/bad_layout", LayoutLive
live_session :parent_layout, root_layout: false do
live "/parent_layout", ParentLayoutLive
end
end
# integration params
live "/counter/:id", ParamCounterLive
live "/action", ActionLive
live "/action/index", ActionLive, :index
live "/action/:id/edit", ActionLive, :edit
# integration flash
live "/flash-root", FlashLive
live "/flash-child", FlashChildLive
# integration events
live "/events", EventsLive
live "/events-in-mount", EventsInMountLive
live "/events-in-component", EventsInComponentLive
# integration components
live "/component_in_live", ComponentInLive.Root
live "/cids_destroyed", CidsDestroyedLive
# integration lifecycle
live "/lifecycle", HooksLive
live "/lifecycle/bad-mount", HooksLive.BadMount
live "/lifecycle/own-mount", HooksLive.OwnMount
live "/lifecycle/halt-mount", HooksLive.HaltMount
live "/lifecycle/redirect-cont-mount", HooksLive.RedirectMount, :cont
live "/lifecycle/redirect-halt-mount", HooksLive.RedirectMount, :halt
live "/lifecycle/components", HooksLive.WithComponent
live "/lifecycle/handle-params-not-defined", HooksLive.HandleParamsNotDefined
live "/lifecycle/handle-info-not-defined", HooksLive.HandleInfoNotDefined
# integration connect
live "/connect", ConnectLive
# live_patch
scope host: "app.example.com" do
live "/with-host/full", HostLive, :full
live "/with-host/path", HostLive, :path
end
# live_session
live_session :test do
live "/thermo-live-session", ThermostatLive
live "/clock-live-session", ClockLive
end
live_session :admin, session: %{"admin" => true} do
live "/thermo-live-session-admin", ThermostatLive
live "/clock-live-session-admin", ClockLive
end
live_session :mfa, session: {__MODULE__, :session, [%{"inlined" => true}]} do
live "/thermo-live-session-mfa", ThermostatLive
end
live_session :merged, session: %{"top-level" => true} do
live "/thermo-live-session-merged", ThermostatLive
end
live_session :lifecycle, on_mount: Phoenix.LiveViewTest.HaltConnectedMount do
live "/lifecycle/halt-connected-mount", HooksLive.Noop
end
live_session :mount_mod_arg, on_mount: {Phoenix.LiveViewTest.MountArgs, :inlined} do
live "/lifecycle/mount-mod-arg", HooksLive.Noop
end
live_session :mount_mods,
on_mount: [Phoenix.LiveViewTest.OnMount, Phoenix.LiveViewTest.OtherOnMount] do
live "/lifecycle/mount-mods", HooksLive.Noop
end
live_session :mount_mod_args,
on_mount: [
{Phoenix.LiveViewTest.OnMount, :other},
{Phoenix.LiveViewTest.OtherOnMount, :other}
] do
live "/lifecycle/mount-mods-args", HooksLive.Noop
end
forward "/forwarded/path", Router.ForwardedRouter
end
scope "/", as: :user_defined_metadata, alias: Phoenix.LiveViewTest do
live "/sessionless-thermo", ThermostatLive
live "/thermo-with-metadata", ThermostatLive, metadata: %{route_name: "opts"}
end
def session(%Plug.Conn{}, extra), do: Map.merge(extra, %{"called" => true})
end
| 32.521053 | 97 | 0.698009 |
e8501a1fc40d8853b7f788836a31aab2bee8ad41 | 595 | exs | Elixir | fizzbuzz/test/fizzbuzz_test.exs | fernanduandrade/logic-studies | 49e274d9f503880691c38d4493152cec1b02dd29 | [
"MIT"
] | null | null | null | fizzbuzz/test/fizzbuzz_test.exs | fernanduandrade/logic-studies | 49e274d9f503880691c38d4493152cec1b02dd29 | [
"MIT"
] | null | null | null | fizzbuzz/test/fizzbuzz_test.exs | fernanduandrade/logic-studies | 49e274d9f503880691c38d4493152cec1b02dd29 | [
"MIT"
] | null | null | null | defmodule FizzbuzzTest do
use ExUnit.Case
describe "build/1" do
test "when a file is valid it returns the converted list" do
expectedResponse = {:ok,
[1, 2, :fizz, 4, :buzz, :fizz, 7, 8, :fizz, :buzz, 11, :fizz, 13, 14, :fizzbuzz, 16, 17, :buzz, :buzz, :fizzbuzz,
:fizzbuzz]}
assert Fizzbuzz.build("number.txt") == expectedResponse
end
test "when an file is invalid it returns an error" do
expectedResponse = {:error, "Error while trying to read the file: enoent"}
assert Fizzbuzz.build("invalid.txt") == expectedResponse
end
end
end
| 33.055556 | 119 | 0.648739 |
e85044101141f1fba9230af79e4b903b5173ecfa | 497 | ex | Elixir | lib/lucidboard_web/views/error_view.ex | borodark/lucidboard | 487a9a54053977ea9704121d4a6a4343012d4421 | [
"MIT"
] | 86 | 2019-01-07T20:49:04.000Z | 2021-10-02T21:15:42.000Z | lib/lucidboard_web/views/error_view.ex | borodark/lucidboard | 487a9a54053977ea9704121d4a6a4343012d4421 | [
"MIT"
] | 26 | 2019-03-27T12:06:52.000Z | 2020-09-20T05:21:09.000Z | lib/lucidboard_web/views/error_view.ex | borodark/lucidboard | 487a9a54053977ea9704121d4a6a4343012d4421 | [
"MIT"
] | 19 | 2015-01-06T19:02:49.000Z | 2020-05-25T08:54:00.000Z | defmodule LucidboardWeb.ErrorView do
use LucidboardWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 29.235294 | 61 | 0.738431 |
e85074216e0e399edda02438037980d2d24090db | 7,950 | ex | Elixir | lib/parse/zoneinfo/parser.ex | Adzz/timex | a9c97e2dd9cb5bed286d5a1d688f3eea8e73e6ea | [
"MIT"
] | null | null | null | lib/parse/zoneinfo/parser.ex | Adzz/timex | a9c97e2dd9cb5bed286d5a1d688f3eea8e73e6ea | [
"MIT"
] | null | null | null | lib/parse/zoneinfo/parser.ex | Adzz/timex | a9c97e2dd9cb5bed286d5a1d688f3eea8e73e6ea | [
"MIT"
] | null | null | null | defmodule Timex.Parse.ZoneInfo.Parser do
@moduledoc """
This module is responsible for parsing binary zoneinfo files,
such as those found in /usr/local/zoneinfo.
"""
# See http://linux.about.com/library/cmd/blcmdl5_tzfile.htm or
# https://github.com/eggert/tz/blob/master/tzfile.h for details on the tzfile format
defmodule Zone do
@moduledoc """
Represents the data retreived from a binary tzfile.
For details on the tzfile format, see:
http://www.cstdbill.com/tzdb/tzfile-format.html
http://linux.about.com/library/cmd/blcmdl5_tzfile.htm
https://github.com/eggert/tz/blob/master/tzfile.h
"""
# Transition times
defstruct transitions: [],
# Zone abbreviations,
abbreviations: [],
# Leap second adjustments
leaps: [],
# whether local transitions are standard or wall
std_or_wall?: false,
# whether local transitions are UTC or local
utc_or_local?: false
end
defmodule Header do
@moduledoc false
# Six big-endian 4-8 byte integers
# count of UTC/local indicators
defstruct utc_count: 0,
# count of standard/wall indicators
wall_count: 0,
# number of leap seconds
leap_count: 0,
# number of transition times
transition_count: 0,
# number of local time types (never zero)
type_count: 0,
# total number of characters of the zone abbreviations string
abbrev_length: 0
end
defmodule TransitionInfo do
@moduledoc false
# total ISO 8601 offset (std + dst)
defstruct gmt_offset: 0,
# The time at which this transition starts
starts_at: 0,
# Is this transition in daylight savings time
is_dst?: false,
# The lookup index of the abbreviation
abbrev_index: 0,
# The zone abbreviation
abbreviation: "N/A",
# Whether transitions are standard or wall
is_std?: true,
# Whether transitions are UTC or local
is_utc?: false
end
defmodule LeapSecond do
@moduledoc false
# The time at which this leap second occurs
defstruct start: 0,
# The count of leap seconds after this leap second
remaining: 0
end
##############
# Macros defining common bitstring modifier combinations in zoneinfo files
defmacrop bytes(size) do
quote do: binary - size(unquote(size)) - unit(8)
end
defmacrop integer_32bit_be do
quote do: big - size(4) - unit(8) - integer
end
defmacrop signed_char_be do
quote do: big - size(1) - unit(8) - signed - integer
end
defmacrop unsigned_char_be do
quote do: big - size(1) - unit(8) - unsigned - integer
end
@doc """
Given a path to a zoneinfo file, or the binary data from a zoneinfo file,
parse the timezone information inside, and return it as a Zone struct.
"""
@spec parse(binary) :: {:ok, Zone.t()} | {:error, binary}
def parse(<<?T, ?Z, ?i, ?f, _reserved::bytes(16), rest::binary>>) do
do_parse_header(rest)
end
def parse(path) when is_binary(path) do
if path |> File.exists?() do
path |> File.read!() |> parse
else
{:error, "No zoneinfo file at #{path}"}
end
end
def parse(_) do
{:error, "Invalid zoneinfo file header"}
end
# Parse the header information from the zoneinfo file
defp do_parse_header(<<header::bytes(24), rest::binary>>) do
{utc_count, next} = parse_int(header)
{wall_count, next} = parse_int(next)
{leap_count, next} = parse_int(next)
{tx_count, next} = parse_int(next)
{type_count, next} = parse_int(next)
{abbrev_length, _} = parse_int(next)
header = %Header{
utc_count: utc_count,
wall_count: wall_count,
leap_count: leap_count,
transition_count: tx_count,
type_count: type_count,
abbrev_length: abbrev_length
}
do_parse_transition_times(rest, header)
end
# Parse the number of transition times in this zone
defp do_parse_transition_times(data, %Header{transition_count: tx_count} = header) do
{times, rest} = parse_array(data, tx_count, &parse_int/1)
do_parse_transition_info(rest, header, %Zone{transitions: times})
end
# Parse transition time info for this zone
defp do_parse_transition_info(
data,
%Header{transition_count: tx_count, type_count: type_count} = header,
%Zone{transitions: transitions} = tzfile
) do
{indices, rest} = parse_array(data, tx_count, &parse_uchar/1)
{txinfos, rest} =
parse_array(rest, type_count, fn data ->
{gmt_offset, next} = parse_int(data)
{is_dst?, next} = parse_char(next)
{abbrev_index, next} = parse_uchar(next)
info = %TransitionInfo{
gmt_offset: gmt_offset,
is_dst?: is_dst? == 1,
abbrev_index: abbrev_index
}
{info, next}
end)
txs =
indices
|> Enum.map(&Enum.at(txinfos, &1))
|> Enum.zip(transitions)
|> Enum.map(fn {info, time} ->
Map.put(info, :starts_at, time)
end)
do_parse_abbreviations(rest, header, %{tzfile | :transitions => txs})
end
# Parses zone abbreviations for this zone
defp do_parse_abbreviations(
data,
%Header{abbrev_length: len} = header,
%Zone{transitions: transitions} = tzfile
) do
{abbrevs, rest} = parse_array(data, len, &parse_char/1)
txinfos =
Enum.map(transitions, fn %TransitionInfo{abbrev_index: idx} = tx ->
abbrev =
abbrevs
|> Enum.drop(idx)
|> take_while_gt(0)
%{tx | :abbreviation => "#{abbrev}"}
end)
do_parse_leap_seconds(rest, header, %{tzfile | :transitions => txinfos})
end
# Parses leap second information for this zone
defp do_parse_leap_seconds(data, %Header{leap_count: count} = header, tzfile) do
{leaps, rest} =
parse_array(data, count, fn data ->
{start, next} = parse_int(data)
{remaining, next} = parse_int(next)
leap = %LeapSecond{
start: start,
remaining: remaining
}
{leap, next}
end)
do_parse_flags(rest, header, %{tzfile | :leaps => leaps})
end
# Parses the trailing flags in the zoneinfo binary
defp do_parse_flags(data, %Header{utc_count: utc_count, wall_count: wall_count}, tzfile) do
{is_std, rest} = parse_array(data, wall_count, &parse_char/1)
{is_gmt, _} = parse_array(rest, utc_count, &parse_char/1)
{:ok, %{tzfile | :std_or_wall? => is_std, :utc_or_local? => is_gmt}}
end
################
# Parses an array of a primitive type, ex:
# parse_array(<<"test">>, 2, &parse_uchar/1) => [?t, ?e]
###
defp parse_array(data, 0, _parser), do: {[], data}
defp parse_array(data, count, parser) when is_binary(data) and is_function(parser) do
{results, rest} = do_parse_array(data, count, parser, [])
{results, rest}
end
defp do_parse_array(data, 0, _, acc), do: {Enum.reverse(acc), data}
defp do_parse_array(data, count, parser, acc) do
{item, next} = parser.(data)
do_parse_array(next, count - 1, parser, [item | acc])
end
#################
# Data Type Parsers
defp parse_int(<<val::integer_32bit_be, rest::binary>>), do: {val, rest}
defp parse_char(<<val::signed_char_be, rest::binary>>), do: {val, rest}
defp parse_uchar(<<val::unsigned_char_be, rest::binary>>), do: {val, rest}
# Enum.take_while, but not so slow
defp take_while_gt(xs, match), do: take_while_gt(xs, match, [])
defp take_while_gt([], _, acc), do: Enum.reverse(acc)
defp take_while_gt([h | rest], match, acc) when h > match,
do: take_while_gt(rest, match, [h | acc])
defp take_while_gt(_, _, acc), do: Enum.reverse(acc)
end
| 31.422925 | 93 | 0.621761 |
e850c614caf508dfa022523f74047d28d292d757 | 9,509 | exs | Elixir | test/oban/testing_test.exs | polvalente/oban | 7267f8f37f8257b735d498d81cc1e43147cefc49 | [
"Apache-2.0"
] | null | null | null | test/oban/testing_test.exs | polvalente/oban | 7267f8f37f8257b735d498d81cc1e43147cefc49 | [
"Apache-2.0"
] | null | null | null | test/oban/testing_test.exs | polvalente/oban | 7267f8f37f8257b735d498d81cc1e43147cefc49 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.TestingTest do
use Oban.Case
use Oban.Testing, repo: Oban.Test.Repo
alias Oban.TelemetryHandler
@moduletag :integration
defmodule InvalidWorker do
def perform(_), do: :ok
end
defmodule OverriddenWorker do
use Oban.Worker
@impl Worker
def new({key, val}, opts) do
super(%{key => val}, opts)
end
@impl Worker
def perform(%{args: args}) do
{:ok, args}
end
end
defmodule MyApp.Worker do
defmacro __using__(_opts) do
quote do
@behaviour unquote(__MODULE__)
end
end
@callback process() :: :ok
end
defmodule DoubleBehaviourWorker do
use MyApp.Worker
use Oban.Worker
@impl Oban.Worker
def perform(_job), do: :ok
@impl MyApp.Worker
def process, do: :ok
end
defmodule MisbehavedWorker do
use Oban.Worker
@impl Oban.Worker
def perform(%{args: %{"action" => "bad_atom"}}), do: :bad
def perform(%{args: %{"action" => "bad_string"}}), do: "bad"
def perform(%{args: %{"action" => "bad_error"}}), do: :error
def perform(%{args: %{"action" => "bad_tuple"}}), do: {:ok, "bad", :bad}
def perform(%{args: %{"action" => "bad_snooze"}}), do: {:snooze, true}
def perform(%{args: %{"action" => "bad_code"}}), do: raise(RuntimeError, "bad")
def perform(%{args: %{"action" => "bad_timing"}}), do: Process.sleep(10)
@impl Oban.Worker
def timeout(%{args: %{"timeout" => timeout}}), do: timeout
def timeout(_job), do: :infinity
end
defmodule AttemptDrivenWorker do
use Oban.Worker
@impl Oban.Worker
def perform(%{attempt: attempt}) do
{:ok, attempt}
end
end
defmodule TemporalWorker do
use Oban.Worker
@impl Oban.Worker
def perform(%{attempted_at: attempted_at, scheduled_at: scheduled_at}) do
{:ok, {attempted_at, scheduled_at}}
end
end
describe "perform_job/3" do
test "verifying that the worker implements the Oban.Worker behaviour" do
message = "worker to be a module that implements"
assert_perform_error(BogusWorker, message)
assert_perform_error(InvalidWorker, message)
:ok = perform_job(DoubleBehaviourWorker, %{})
end
test "creating a valid job out of the args and options" do
assert_perform_error(Worker, %{}, [max_attempts: -1], "args and opts to build a valid job")
assert_perform_error(
Worker,
%{},
[max_attempts: -1],
"max_attempts: must be greater than 0"
)
assert_perform_error(Worker, %{}, [priority: -1], "priority: must be greater than -1")
end
test "passing non-map args through to an overridden new/2 function" do
{:ok, %{"id" => 1}} = perform_job(OverriddenWorker, {:id, 1})
end
test "validating the return value of the worker's perform/1 function" do
assert_perform_error(MisbehavedWorker, %{"action" => "bad_atom"}, ":bad")
message = "Expected result to be one of"
actions = ["bad_string", "bad_error", "bad_tuple", "bad_snooze"]
for action <- actions do
assert_perform_error(MisbehavedWorker, %{"action" => action}, message)
end
end
test "returning the value of worker's perform/1 function" do
assert :ok = perform_job(Worker, %{ref: 1, action: "OK"})
assert :discard = perform_job(Worker, %{ref: 1, action: "DISCARD"})
assert {:error, _} = perform_job(Worker, %{ref: 1, action: "ERROR"})
end
test "not rescuing unhandled exceptions" do
assert_raise RuntimeError, fn ->
perform_job(MisbehavedWorker, %{"action" => "bad_code"})
end
assert_raise RuntimeError, fn ->
perform_job(MisbehavedWorker, %{"action" => "bad_code", "timeout" => 20})
end
end
test "respecting a worker's timeout" do
Process.flag(:trap_exit, true)
perform_job(MisbehavedWorker, %{"action" => "bad_timing", "timeout" => 1})
assert_receive {:EXIT, _pid, %Oban.TimeoutError{}}
perform_job(MisbehavedWorker, %{"action" => "bad_timing", "timeout" => 20})
refute_receive {:EXIT, _pid, %Oban.TimeoutError{}}
end
test "defaulting the number of attempts to mimic real execution" do
assert {:ok, 1} = perform_job(AttemptDrivenWorker, %{})
assert {:ok, 2} = perform_job(AttemptDrivenWorker, %{}, attempt: 2)
end
test "retaining attempted_at or scheduled_at timestamps" do
time = ~U[2020-02-20 00:00:00.000000Z]
assert {:ok, {attempted, scheduled}} =
perform_job(TemporalWorker, %{}, attempted_at: time, scheduled_at: time)
assert attempted == time
assert scheduled == time
end
test "emitting appropriate telemetry events" do
TelemetryHandler.attach_events("perform-job-handler")
assert :ok = perform_job(Worker, %{ref: 1, action: "OK"})
assert_receive {:event, :stop, _, %{job: %{args: %{"ref" => 1}}, state: :success}}
assert {:error, _} = perform_job(Worker, %{ref: 2, action: "ERROR"})
assert_receive {:event, :exception, _, %{job: %{args: %{"ref" => 2}}, state: :failure}}
assert_raise RuntimeError, fn ->
perform_job(Worker, %{ref: 3, action: "FAIL"})
end
assert_receive {:event, :exception, _, %{job: %{args: %{"ref" => 3}}, state: :failure}}
after
:telemetry.detach("perform-job-handler")
end
end
describe "all_enqueued/0,1" do
test "retrieving a filtered list of enqueued jobs" do
insert!(%{id: 1, ref: "a"}, worker: Ping, queue: :alpha)
insert!(%{id: 2, ref: "b"}, worker: Ping, queue: :alpha)
insert!(%{id: 3, ref: "c"}, worker: Pong, queue: :gamma)
assert [%{args: %{"id" => 2}} | _] = all_enqueued(worker: Ping)
assert [%Job{}] = all_enqueued(worker: Pong, queue: :gamma)
assert [%Job{}, %Job{}, %Job{}] = all_enqueued()
end
end
describe "assert_enqueued/1" do
test "checking for jobs with matching properties" do
insert!(%{id: 1}, worker: Ping, queue: :alpha)
insert!(%{id: 2}, worker: Pong, queue: :gamma)
insert!(%{message: "hello"}, worker: Pong, queue: :gamma)
assert_enqueued worker: Ping
assert_enqueued worker: Ping, queue: :alpha
assert_enqueued worker: Ping, queue: :alpha, args: %{id: 1}
assert_enqueued worker: Pong
assert_enqueued worker: "Pong", queue: "gamma"
assert_enqueued worker: "Pong", queue: "gamma", args: %{message: "hello"}
assert_enqueued args: %{id: 1}
assert_enqueued args: %{message: "hello"}
assert_enqueued worker: Ping, prefix: "public"
end
test "checking for jobs with matching timestamps with delta" do
insert!(%{}, worker: Ping, scheduled_at: seconds_from_now(60))
assert_enqueued worker: Ping, scheduled_at: seconds_from_now(60)
end
test "checking for jobs allows to configure timestamp delta" do
insert!(%{}, worker: Ping, scheduled_at: seconds_from_now(60))
assert_enqueued worker: Ping, scheduled_at: {seconds_from_now(69), delta: 10}
end
test "asserting that jobs are now or will eventually be enqueued" do
insert!(%{id: 1}, worker: Ping, queue: :alpha)
Task.async(fn ->
Process.sleep(50)
insert!(%{id: 2}, worker: Pong, queue: :alpha)
end)
assert_enqueued [worker: Pong, args: %{id: 2}], 100
assert_enqueued [worker: Ping, args: %{id: 1}], 100
end
test "printing a helpful error message" do
insert!(%{dest: "some_node"}, worker: Ping)
try do
assert_enqueued worker: Ping, args: %{dest: "other_node"}
rescue
error in [ExUnit.AssertionError] ->
expected = """
Expected a job matching:
%{args: %{dest: "other_node"}, worker: Ping}
to be enqueued in the "public" schema. Instead found:
[%{args: %{"dest" => "some_node"}, worker: "Ping"}]
"""
assert error.message == expected
end
end
end
describe "refute_enqueued/1" do
test "refuting jobs with specific properties have been enqueued" do
insert!(%{id: 1}, worker: Ping, queue: :alpha)
insert!(%{id: 2}, worker: Pong, queue: :gamma)
insert!(%{id: 3}, worker: Pong, queue: :gamma, state: "completed")
insert!(%{id: 4}, worker: Pong, queue: :gamma, state: "discarded")
insert!(%{message: "hello"}, worker: Pong, queue: :gamma)
refute_enqueued worker: Pongo
refute_enqueued worker: Ping, args: %{id: 2}
refute_enqueued worker: Pong, args: %{id: 3}
refute_enqueued worker: Pong, args: %{id: 4}
refute_enqueued worker: Ping, queue: :gamma
refute_enqueued worker: Pong, queue: :gamma, args: %{message: "helo"}
refute_enqueued worker: Ping, prefix: "private"
end
test "refuting that jobs will eventually be enqueued" do
Task.async(fn ->
Process.sleep(50)
insert!(%{id: 1}, worker: Ping, queue: :alpha)
end)
refute_enqueued [worker: Ping, args: %{id: 1}], 20
end
end
defp assert_perform_error(worker, message) when is_binary(message) do
assert_perform_error(worker, %{}, [], message)
end
defp assert_perform_error(worker, args, message) when is_binary(message) do
assert_perform_error(worker, args, [], message)
end
defp assert_perform_error(worker, args, opts, message) do
perform_job(worker, args, opts)
assert false, "This should not be reached"
rescue
error in [ExUnit.AssertionError] -> assert error.message =~ message
end
end
| 31.279605 | 97 | 0.628668 |
e850d59ebcb6ac08ed4a9aa8ec797893837eac3e | 352 | exs | Elixir | mix.exs | faithandbrave/elixir_recursive_file | c9e840bf5716b7e40e1006c34409a57cdd507cbc | [
"MIT"
] | null | null | null | mix.exs | faithandbrave/elixir_recursive_file | c9e840bf5716b7e40e1006c34409a57cdd507cbc | [
"MIT"
] | null | null | null | mix.exs | faithandbrave/elixir_recursive_file | c9e840bf5716b7e40e1006c34409a57cdd507cbc | [
"MIT"
] | null | null | null | defmodule ElixirRecursiveFile.Mixfile do
use Mix.Project
def project do
[app: :elixir_recursive_file,
version: "0.1.0",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
def application do
[applications: [:logger]]
end
defp deps do
[]
end
end
| 16.761905 | 40 | 0.616477 |
e851279bb5017b0ea95af083d5b0ff5f9a355224 | 1,475 | ex | Elixir | lib/robolia_web/views/error_helpers.ex | Robolia/robolia | cbd3ea24db460f213a3411f474d765536ae7871c | [
"MIT"
] | 1 | 2020-01-19T04:27:46.000Z | 2020-01-19T04:27:46.000Z | lib/robolia_web/views/error_helpers.ex | Robolia/robolia | cbd3ea24db460f213a3411f474d765536ae7871c | [
"MIT"
] | null | null | null | lib/robolia_web/views/error_helpers.ex | Robolia/robolia | cbd3ea24db460f213a3411f474d765536ae7871c | [
"MIT"
] | 1 | 2018-05-13T14:06:46.000Z | 2018-05-13T14:06:46.000Z | defmodule RoboliaWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error), class: "help-block")
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext "errors", "is invalid"
#
# # Translate the number of files with plural rules
# dngettext "errors", "1 file", "%{count} files", count
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(RoboliaWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(RoboliaWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.777778 | 76 | 0.671186 |
e8512fae6919e76a374f0ef725f7315bb5c27f28 | 3,632 | exs | Elixir | project/queens/test/queen_attack_test.exs | MickeyOoh/Exercises | 3b34e7fdab4a09e0269d20c68531b4fb75bb7f16 | [
"MIT"
] | null | null | null | project/queens/test/queen_attack_test.exs | MickeyOoh/Exercises | 3b34e7fdab4a09e0269d20c68531b4fb75bb7f16 | [
"MIT"
] | 1 | 2018-06-19T18:59:41.000Z | 2018-06-19T18:59:41.000Z | project/queens/test/queen_attack_test.exs | MickeyOoh/Exercises | 3b34e7fdab4a09e0269d20c68531b4fb75bb7f16 | [
"MIT"
] | null | null | null | #if !System.get_env("EXERCISM_TEST_EXAMPLES") do
# Code.load_file("queen_attack.exs", __DIR__)
#end
#ExUnit.start()
#ExUnit.configure(exclude: :pending, trace: true)
defmodule QueenAttackTest do
use ExUnit.Case
setup_all do
IO.puts "===== setup_all ======="
Queens.init() |> IO.inspect
end
# @tag :pending
test "default positions" do
queens = Queens.new()
assert queens.white == {0, 3}
assert queens.black == {7, 3}
end
@tag :pending
test "specific placement" do
queens = Queens.new({3, 7}, {6, 1})
assert queens.white == {3, 7}
assert queens.black == {6, 1}
end
@tag :pending
test "cannot occupy same space" do
assert_raise ArgumentError, fn ->
Queens.new({2, 4}, {2, 4})
end
end
@tag :pending
test "string representation" do
queens = Queens.new({2, 4}, {6, 6})
board =
String.trim("""
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ W _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ B _
_ _ _ _ _ _ _ _
""")
assert Queens.to_string(queens) == board
end
@tag :pending
test "another string representation" do
queens = Queens.new({7, 1}, {0, 0})
board =
String.trim("""
B _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ W _ _ _ _ _ _
""")
assert Queens.to_string(queens) == board
end
@tag :pending
test "yet another string representation" do
queens = Queens.new({4, 3}, {3, 4})
board =
String.trim("""
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ B _ _ _
_ _ _ W _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
""")
assert Queens.to_string(queens) == board
end
@tag :pending
test "queen placed on the bottom right corner" do
queens = Queens.new({4, 3}, {7, 7})
board =
String.trim("""
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ W _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ B
""")
assert Queens.to_string(queens) == board
end
@tag :pending
test "queen placed on the edge of the board" do
queens = Queens.new({4, 3}, {2, 7})
board =
String.trim("""
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ B
_ _ _ _ _ _ _ _
_ _ _ W _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
""")
assert Queens.to_string(queens) == board
end
@tag :pending
test "cannot attack" do
queens = Queens.new({2, 3}, {4, 7})
refute Queens.can_attack?(queens)
end
@tag :pending
test "can attack on same row" do
queens = Queens.new({2, 4}, {2, 7})
assert Queens.can_attack?(queens)
end
@tag :pending
test "can attack on same column" do
queens = Queens.new({5, 4}, {2, 4})
assert Queens.can_attack?(queens)
end
@tag :pending
test "can attack on diagonal" do
queens = Queens.new({1, 1}, {6, 6})
assert Queens.can_attack?(queens)
end
@tag :pending
test "can attack on other diagonal" do
queens = Queens.new({0, 6}, {1, 7})
assert Queens.can_attack?(queens)
end
@tag :pending
test "can attack on yet another diagonal" do
queens = Queens.new({4, 1}, {6, 3})
assert Queens.can_attack?(queens)
end
@tag :pending
test "can attack on a diagonal slanted the other way" do
queens = Queens.new({6, 1}, {1, 6})
assert Queens.can_attack?(queens)
end
end
| 20.99422 | 58 | 0.547357 |
e851417aea70c0eb9de6d00b6cbab2c72861d17d | 11,020 | ex | Elixir | priv/templates/coh.install/views/coherence/coherence_view_helpers.ex | pmarreck/coherence | aa0ef8403197dfd262863f4b0e592122a1a3e525 | [
"MIT"
] | 1,347 | 2016-07-04T23:20:10.000Z | 2022-02-10T20:10:48.000Z | priv/templates/coh.install/views/coherence/coherence_view_helpers.ex | pmarreck/coherence | aa0ef8403197dfd262863f4b0e592122a1a3e525 | [
"MIT"
] | 378 | 2016-07-06T16:30:28.000Z | 2021-09-16T13:34:05.000Z | priv/templates/coh.install/views/coherence/coherence_view_helpers.ex | pmarreck/coherence | aa0ef8403197dfd262863f4b0e592122a1a3e525 | [
"MIT"
] | 276 | 2016-07-06T20:26:17.000Z | 2021-12-06T19:32:41.000Z | defmodule <%= web_base %>.Coherence.ViewHelpers do
@moduledoc """
Helper functions for Coherence Views.
Use these convenience functions to generate coherence links in your application.
Each link supports a number of options to customize the returned markup.
For example, to generate links in your layout template use:
<%= web_base %>.Coherence.ViewHelps.coherence_links/3
This will create a profile and Sign Out link for logged users and a Sign In link
when the user is not logged in. It will also generate other links depending on
the installed Coherence options.
The link text uses gettext.
"""
use Phoenix.HTML
alias Coherence.Config
import <%= web_base %>.Gettext
@type conn :: Plug.Conn.t()
@type schema :: Ecto.Schema.t()
@seperator {:safe, " | "}
@helpers <%= web_base %>.Router.Helpers
@recover_link_text "Forgot your password?"
@unlock_link_text "Send an unlock email"
@register_link_text "Need An Account?"
@confirm_link_text "Resend confirmation email"
@signin_link_text "Sign In"
@signout_link_text "Sign Out"
@doc """
Get the default text for the Forgot your password link.
"""
def recover_link_text(), do: dgettext("coherence", "Forgot your password?")
@doc """
Get the default text for the Send an unlock email link.
"""
def unlock_link_text(), do: dgettext("coherence", "Send an unlock email")
@doc """
Get the default text for the Need An Account? link.
"""
def register_link_text(), do: dgettext("coherence", "Need An Account?")
@doc """
Get the default text for the Invite Someone link.
"""
def invite_link_text(), do: dgettext("coherence", "Invite Someone")
@doc """
Get the default text for the Resend confirmation email link.
"""
def confirm_link_text(), do: dgettext("coherence", "Resend confirmation email")
@doc """
Get the default text for the Sign In link.
"""
def signin_link_text(), do: dgettext("coherence", "Sign In")
@doc """
Get the default text for the Sign Out link.
"""
def signout_link_text(), do: dgettext("coherence", "Sign Out")
@doc """
Create coherence template links.
Generates links if the appropriate option is installed. This function
can be used to:
* create links for the new session page `:new_session`
* create links for your layout template `:layout`
Defaults are provided based on the options configured for Coherence.
However, the defaults can be overridden by passing the following options.
## Customize the links
### :new_session Options
* :recover - customize the recover link (#{@recover_link_text})
* :unlock - customize the unlock link (#{@unlock_link_text})
* :register - customize the register link (#{@register_link_text})
* :confirm - customize the confirm link (#{@confirm_link_text})
### :layout Options
* :list_tag - customize the list tag (:li)
* :signout_class - customize the class on the signout link ("navbar-form")
* :signin - customize the signin link text (#{@signin_link_text})
* :signout - customize the signout link text (#{@signout_link_text})
* :register - customize the register link text (#{@register_link_text})
### Disable links
If you set an option to false, the link will not be shown. For example, to
disable the register link on the layout, use the following in your layout template:
coherence_links(conn, :layout, register: false)
## Examples
coherence_links(conn, :new_session)
Generates: #{@recover_link_text} #{@unlock_link_text} #{@register_link_text} #{
@confirm_link_text
}
coherence_links(conn, :new_session, recover: "Password reset", register: false
Generates: #{@unlock_link_text}
coherence_links(conn, :layout) # when logged in
Generates: User's Name #{@signout_link_text}
coherence_links(conn, :layout) # when not logged in
Generates: #{@register_link_text} #{@signin_link_text}
"""
@spec coherence_links(conn, atom, keyword()) :: tuple
def coherence_links(conn, which, opts \\ [])
def coherence_links(conn, :new_session, opts) do
recover_link = Keyword.get(opts, :recover, recover_link_text())
unlock_link = Keyword.get(opts, :unlock, unlock_link_text())
register_link = Keyword.get(opts, :register, register_link_text())
confirm_link = Keyword.get(opts, :confirm, confirm_link_text())
user_schema = Coherence.Config.user_schema()
[
recover_link(conn, user_schema, recover_link),
unlock_link(conn, user_schema, unlock_link),
register_link(conn, user_schema, register_link),
confirmation_link(conn, user_schema, confirm_link)
]
|> List.flatten()
|> concat([])
end
def coherence_links(conn, :layout, opts) do
list_tag = Keyword.get(opts, :list_tag, :li)
signout_class = Keyword.get(opts, :signout_class, "navbar-form")
signin = Keyword.get(opts, :signin, signin_link_text())
signout = Keyword.get(opts, :signout, signout_link_text())
register = Keyword.get(opts, :register, register_link_text())
if Coherence.logged_in?(conn) do
current_user = Coherence.current_user(conn)
[
content_tag(list_tag, profile_link(current_user, conn)),
content_tag(list_tag, signout_link(conn, signout, signout_class))
]
else
signin_link =
content_tag(
list_tag,
link(signin, to: coherence_path(@helpers, :session_path, conn, :new))
)
if Config.has_option(:registerable) && register do
[
content_tag(
list_tag,
link(register, to: coherence_path(@helpers, :registration_path, conn, :new))
),
signin_link
]
else
signin_link
end
end
end
@doc """
Helper to avoid compile warnings when options are disabled.
"""
@spec coherence_path(module, atom, conn, atom) :: String.t()
def coherence_path(module, route_name, conn, action) do
apply(module, route_name, [conn, action])
end
def coherence_path(module, route_name, conn, action, opts) do
apply(module, route_name, [conn, action, opts])
end
defp concat([], acc), do: Enum.reverse(acc)
defp concat([h | t], []), do: concat(t, [h])
defp concat([h | t], acc), do: concat(t, [h, @seperator | acc])
@doc """
Generate the recover password link.
"""
@spec recover_link(conn, module, false | String.t()) :: [any] | []
def recover_link(_conn, _user_schema, false), do: []
def recover_link(conn, user_schema, text) do
if user_schema.recoverable?, do: [recover_link(conn, text)], else: []
end
@spec recover_link(conn, String.t()) :: tuple
def recover_link(conn, text \\ recover_link_text()),
do: link(text, to: coherence_path(@helpers, :password_path, conn, :new))
@doc """
Generate the new account registration link.
"""
@spec register_link(conn, module, false | String.t()) :: [any] | []
def register_link(_conn, _user_schema, false), do: []
def register_link(conn, user_schema, text) do
if user_schema.registerable?, do: [register_link(conn, text)], else: []
end
@spec register_link(conn, String.t()) :: tuple
def register_link(conn, text \\ register_link_text()),
do: link(text, to: coherence_path(@helpers, :registration_path, conn, :new))
@doc """
Generate the unlock account link.
"""
@spec unlock_link(conn, module, false | String.t()) :: [any] | []
def unlock_link(_conn, _user_schema, false), do: []
def unlock_link(conn, _user_schema, text) do
if conn.assigns[:locked], do: [unlock_link(conn, text)], else: []
end
@spec unlock_link(conn, String.t()) :: tuple
def unlock_link(conn, text \\ unlock_link_text()),
do: link(text, to: coherence_path(@helpers, :unlock_path, conn, :new))
@doc """
Generate the invitation link.
"""
@spec invitation_link(conn, String.t()) :: tuple
def invitation_link(conn, text \\ invite_link_text()) do
link(text, to: coherence_path(@helpers, :invitation_path, conn, :new))
end
@doc """
Generate the sign out link.
"""
@spec signout_link(conn, String.t(), String.t()) :: tuple
def signout_link(conn, text \\ signout_link_text(), signout_class \\ "") do
link(
text,
to: coherence_path(@helpers, :session_path, conn, :delete),
method: :delete,
class: signout_class
)
end
@doc """
Generate the resend confirmation link.
"""
@spec confirmation_link(conn, module, false | String.t()) :: [any] | []
def confirmation_link(_conn, _user_schema, false), do: []
def confirmation_link(conn, user_schema, text) do
if user_schema.confirmable?, do: [confirmation_link(conn, text)], else: []
end
@spec confirmation_link(conn, String.t()) :: tuple
def confirmation_link(conn, text \\ confirm_link_text()) do
link(text, to: coherence_path(@helpers, :confirmation_path, conn, :new))
end
@doc """
Generate the required label.
"""
@spec required_label(atom, String.t() | atom, keyword()) :: tuple
def required_label(f, name, opts \\ []) do
{label, opts} = Keyword.pop(opts, :label)
label = label || humanize(name)
label f, name, opts do
[
"#{label}\n",
content_tag(:abbr, "*", class: "required", title: dgettext("coherence", "required"))
]
end
end
@doc """
Helper to get the current user.
"""
@spec current_user(conn) :: schema
def current_user(conn) do
Coherence.current_user(conn)
end
@doc """
Helper to test if the user is currently logged in.
"""
@spec logged_in?(conn) :: boolean
def logged_in?(conn) do
Coherence.logged_in?(conn)
end
defp profile_link(current_user, conn) do
if Config.user_schema().registerable? do
link(current_user.name, to: coherence_path(@helpers, :registration_path, conn, :show))
else
current_user.name
end
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# Because error messages were defined within Ecto, we must
# call the Gettext module passing our Gettext backend. We
# also use the "errors" domain as translations are placed
# in the errors.po file.
# Ecto will pass the :count keyword if the error message is
# meant to be pluralized.
# On your own code and templates, depending on whether you
# need the message to be pluralized or not, this could be
# written simply as:
#
# dngettext "errors", "1 file", "%{count} files", count
# dgettext "errors", "is invalid"
#
if count = opts[:count] do
Gettext.dngettext(<%= web_base %>.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(<%= web_base %>.Gettext, "errors", msg, opts)
end
end
@doc """
Generates an error string from changeset errors.
"""
def error_string_from_changeset(changeset) do
Enum.map(changeset.errors, fn {k, v} ->
"#{Phoenix.Naming.humanize(k)} #{translate_error(v)}"
end)
|> Enum.join(". ")
end
end
| 31.849711 | 92 | 0.670054 |
e85171bb012aac43a5bfe2c94230d8c568fc1d26 | 702 | ex | Elixir | clients/hydra/elixir/lib/ory/model/reject_request.ex | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | clients/hydra/elixir/lib/ory/model/reject_request.ex | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | clients/hydra/elixir/lib/ory/model/reject_request.ex | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule Ory.Model.RejectRequest do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"error",
:"error_debug",
:"error_description",
:"error_hint",
:"status_code"
]
@type t :: %__MODULE__{
:"error" => String.t | nil,
:"error_debug" => String.t | nil,
:"error_description" => String.t | nil,
:"error_hint" => String.t | nil,
:"status_code" => integer() | nil
}
end
defimpl Poison.Decoder, for: Ory.Model.RejectRequest do
def decode(value, _options) do
value
end
end
| 20.647059 | 91 | 0.636752 |
e8519005471d4ff9bc480abc10674034f5205693 | 2,004 | ex | Elixir | lib/koans/09_map_sets.ex | camisetags/elixir-koans | 7bb1655d0f290c0834276091efa5a28b416dd91a | [
"MIT"
] | 1 | 2020-11-23T17:01:37.000Z | 2020-11-23T17:01:37.000Z | lib/koans/09_map_sets.ex | camisetags/elixir-koans | 7bb1655d0f290c0834276091efa5a28b416dd91a | [
"MIT"
] | null | null | null | lib/koans/09_map_sets.ex | camisetags/elixir-koans | 7bb1655d0f290c0834276091efa5a28b416dd91a | [
"MIT"
] | null | null | null | defmodule MapSets do
use Koans
@intro "My name is Set, MapSet."
@set MapSet.new([1, 2, 3, 4, 5])
koan "I am very similar to a list" do
assert Enum.fetch(@set, 0) == {:ok, ___}
end
koan "However, I do not allow duplication" do
new_set = MapSet.new([1, 1, 2, 3, 3, 3])
assert MapSet.size(new_set) == ___
end
def sorted?(set) do
list = MapSet.to_list(set)
sorted = Enum.sort(list)
list == sorted
end
koan "You cannot depend on my order" do
new_set = MapSet.new(1..33)
assert sorted?(new_set) == ___
# Note: The number "33" is actually special here. Erlang uses a different
# implementation for maps after 32 elements which does not maintain order.
# http://stackoverflow.com/a/40408469
# What do you think this answer to this assertion is?
assert sorted?(@set) == ___
end
koan "Does this value exist in the map set?" do
assert MapSet.member?(@set, 3) == ___
end
koan "I am merely another collection, but you can perform some operations on me" do
new_set = MapSet.new(@set, fn x -> 3 * x end)
assert MapSet.member?(new_set, 15) == ___
assert MapSet.member?(new_set, 1) == ___
end
koan "Add this value into a map set" do
modified_set = MapSet.put(@set, 6)
assert MapSet.member?(modified_set, 6) == ___
end
koan "Delete this value from the map set" do
modified_set = MapSet.delete(@set, 1)
assert MapSet.member?(modified_set, 1) == ___
end
koan "Are these maps twins?" do
new_set = MapSet.new([1, 2, 3])
assert MapSet.equal?(@set, new_set) == ___
end
koan "I want only the common values in both sets" do
intersection_set = MapSet.intersection(@set, MapSet.new([5, 6, 7]))
assert MapSet.member?(intersection_set, 5) == ___
end
koan "Unify my sets" do
new_set = MapSet.union(@set, MapSet.new([1, 5, 6, 7]))
assert MapSet.size(new_set) == ___
end
koan "I want my set in a list" do
assert MapSet.to_list(@set) == ___
end
end
| 24.740741 | 85 | 0.646707 |
e85197ff8ec6b343970c9b01d7e433745fe857cf | 1,957 | exs | Elixir | test/kazan/server_test.exs | vickerysec/obmarg_kazan | 27e9ab923dd2a3b89e030de918f4d7c9da316248 | [
"MIT"
] | null | null | null | test/kazan/server_test.exs | vickerysec/obmarg_kazan | 27e9ab923dd2a3b89e030de918f4d7c9da316248 | [
"MIT"
] | null | null | null | test/kazan/server_test.exs | vickerysec/obmarg_kazan | 27e9ab923dd2a3b89e030de918f4d7c9da316248 | [
"MIT"
] | null | null | null | defmodule Kazan.ServerTest do
use ExUnit.Case
alias Kazan.Server
alias Kazan.Server.{CertificateAuth, TokenAuth}
describe "Server.from_kubeconfig" do
test "loads default context" do
config = Server.from_kubeconfig("test/test_data/kubeconfig")
assert config.url == "https://172.17.4.99:443"
assert config.ca_cert
assert config.auth
assert config.auth.certificate
assert config.auth.key
end
test "can load other context" do
config =
Server.from_kubeconfig(
"test/test_data/kubeconfig",
context: "other-context"
)
assert config.url == "https://172.17.4.99:443"
assert config.ca_cert
refute config.auth
end
test "can load non default user" do
config =
Server.from_kubeconfig("test/test_data/kubeconfig", user: "other-user")
assert config.url == "https://172.17.4.99:443"
assert config.ca_cert
refute config.auth
end
end
describe "Server.from_map" do
test "converts the map into a server struct" do
server =
Server.from_map(%{
url: "http://example.com",
ca_cert: "abcd",
insecure_skip_tls_verify: false
})
assert server == %Server{
url: "http://example.com",
ca_cert: "abcd",
insecure_skip_tls_verify: false,
auth: nil
}
end
test "converts token auth" do
server = Server.from_map(%{auth: %{token: "abcde"}})
assert server.auth == %TokenAuth{token: "abcde"}
end
test "converts certificate auth" do
server = Server.from_map(%{auth: %{certificate: "abcde", key: "fghij"}})
assert server.auth == %CertificateAuth{certificate: "abcde", key: "fghij"}
end
test "raises on unknown auth" do
assert_raise RuntimeError, fn ->
Server.from_map(%{auth: %{something: "abcd"}})
end
end
end
end
| 26.808219 | 80 | 0.607052 |
e8519ae3877ad62b6567d0ed9e09f28c9ac8b7eb | 577 | ex | Elixir | chapter20/my.ex | PaoloLaurenti/programming_elixir | b243097dafac7d95632ada1035e8ab62248bbff6 | [
"MIT"
] | null | null | null | chapter20/my.ex | PaoloLaurenti/programming_elixir | b243097dafac7d95632ada1035e8ab62248bbff6 | [
"MIT"
] | null | null | null | chapter20/my.ex | PaoloLaurenti/programming_elixir | b243097dafac7d95632ada1035e8ab62248bbff6 | [
"MIT"
] | 2 | 2018-03-22T01:47:28.000Z | 2018-06-29T00:37:22.000Z | defmodule My do
defmacro unless(condition, clauses) do
do_clause = Keyword.get(clauses, :do)
else_clause = Keyword.get(clauses, :else)
quote do
case unquote(condition) do
val when val in [nil, false] -> unquote(do_clause)
_ -> unquote(else_clause)
end
end
end
end
defmodule Test do
require My
My.unless 1 !== 2 do
IO.puts "PRINT ERROR"
else
IO.puts "PRINT OK"
end
IO.puts ""
IO.puts "=============="
IO.puts ""
My.unless 1 === 2 do
IO.puts "PRINT OK"
else
IO.puts "PRINT ERROR"
end
end
| 16.027778 | 58 | 0.592721 |
e851e7b07301d445af8bd79e2e4b950f0d9fa661 | 416 | ex | Elixir | apps/tai/lib/tai/venue_adapters/gdax/maker_taker_fees.ex | ihorkatkov/tai | 09f9f15d2c385efe762ae138a8570f1e3fd41f26 | [
"MIT"
] | 1 | 2019-12-19T05:16:26.000Z | 2019-12-19T05:16:26.000Z | apps/tai/lib/tai/venue_adapters/gdax/maker_taker_fees.ex | ihorkatkov/tai | 09f9f15d2c385efe762ae138a8570f1e3fd41f26 | [
"MIT"
] | null | null | null | apps/tai/lib/tai/venue_adapters/gdax/maker_taker_fees.ex | ihorkatkov/tai | 09f9f15d2c385efe762ae138a8570f1e3fd41f26 | [
"MIT"
] | null | null | null | defmodule Tai.VenueAdapters.Gdax.MakerTakerFees do
# TODO:
# When API endpoint is added for user fee rate it should be used to get
# an accurate value for the 30 day trailing fee rate. Currently it assumes
# the highest taker fee rate of 0.30%.
def maker_taker_fees(_venue_id, _account_id, _credentials) do
maker = Decimal.new(0)
taker = Decimal.new("0.003")
{:ok, {maker, taker}}
end
end
| 32 | 77 | 0.711538 |
e851fde11ff0bba212f06fd019ba0effc9e17900 | 418 | ex | Elixir | lib/rocketpay/accounts/deposit.ex | caiocichetti/nlw-04-rocketpay | b68c55841e6298757007136859d7b9a98702d127 | [
"MIT"
] | 80 | 2021-02-26T13:26:00.000Z | 2022-01-31T15:13:18.000Z | lib/rocketpay/accounts/deposit.ex | caiocichetti/nlw-04-rocketpay | b68c55841e6298757007136859d7b9a98702d127 | [
"MIT"
] | null | null | null | lib/rocketpay/accounts/deposit.ex | caiocichetti/nlw-04-rocketpay | b68c55841e6298757007136859d7b9a98702d127 | [
"MIT"
] | 42 | 2021-02-26T13:26:19.000Z | 2021-12-11T16:32:47.000Z | defmodule Rocketpay.Accounts.Deposit do
alias Rocketpay.Accounts.Operation
alias Rocketpay.Repo
def call(params) do
params
|> Operation.call(:deposit)
|> run_transaction()
end
defp run_transaction(multi) do
case Repo.transaction(multi) do
{:error, _operation, reason, _changes} ->
{:error, reason}
{:ok, %{deposit: account}} ->
{:ok, account}
end
end
end
| 19.904762 | 47 | 0.643541 |
e85248f39fba178bdc2230b69cab3ac73760fffb | 4,095 | ex | Elixir | lib/elixir/lib/tuple.ex | TurtleAI/elixir | 2fb41ebef4d06315dd6c05ee00899572b27ee50a | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/tuple.ex | TurtleAI/elixir | 2fb41ebef4d06315dd6c05ee00899572b27ee50a | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/tuple.ex | TurtleAI/elixir | 2fb41ebef4d06315dd6c05ee00899572b27ee50a | [
"Apache-2.0"
] | null | null | null | defmodule Tuple do
@moduledoc """
Functions for working with tuples.
Tuples are ordered collection of elements; tuples can contain elements of any
type, and a tuple can contain elements of different types. Curly braces can be
used to create tuples:
iex> {}
{}
iex> {1, :two, "three"}
{1, :two, "three"}
Tuples store elements contiguously in memory; this means that accessing a
tuple element by index (which can be done through the `Kernel.elem/2`
function) is a constant-time operation:
iex> tuple = {1, :two, "three"}
iex> elem(tuple, 0)
1
iex> elem(tuple, 2)
"three"
Same goes for getting the tuple size (via `Kernel.tuple_size/1`):
iex> tuple_size({})
0
iex> tuple_size({1, 2, 3})
3
Tuples being stored contiguously in memory also means that updating a tuple
(for example replacing an element with `Kernel.put_elem/3`) will make a copy
of the whole tuple.
Tuples are not meant to be used as a "collection" type (which is also
suggested by the absence of an implementation of the `Enumerable` protocol for
tuples): they're mostly meant to be used as a fixed-size container for
multiple elements. For example, tuples are often used to have functions return
"enriched" values: a common pattern is for functions to return `{:ok, value}`
for successful cases and `{:error, reason}` for unsuccessful cases. For
example, this is exactly what `File.read/1` does: it returns `{:ok, contents}`
if reading the given file is successful, or `{:error, reason}` otherwise
(e.g., `{:error, :enoent}` if the file doesn't exist).
This module provides functions to work with tuples; some more functions to
work with tuples can be found in `Kernel` (`Kernel.tuple_size/1`,
`Kernel.elem/2`, `Kernel.put_elem/3`, and others).
"""
@doc """
Creates a new tuple.
Creates a tuple of `size` containing the
given `data` at every position.
Inlined by the compiler.
## Examples
iex> Tuple.duplicate(:hello, 3)
{:hello, :hello, :hello}
"""
@spec duplicate(term, non_neg_integer) :: tuple
def duplicate(data, size) do
:erlang.make_tuple(size, data)
end
@doc """
Inserts an element into a tuple.
Inserts `value` into `tuple` at the given `index`.
Raises an `ArgumentError` if `index` is negative or greater than the
length of `tuple`. Index is zero-based.
Inlined by the compiler.
## Examples
iex> tuple = {:bar, :baz}
iex> Tuple.insert_at(tuple, 0, :foo)
{:foo, :bar, :baz}
iex> Tuple.insert_at(tuple, 2, :bong)
{:bar, :baz, :bong}
"""
@spec insert_at(tuple, non_neg_integer, term) :: tuple
def insert_at(tuple, index, value) do
:erlang.insert_element(index + 1, tuple, value)
end
@doc """
Inserts an element at the end of a tuple.
Returns a new tuple with the element appended at the end, and contains
the elements in `tuple` followed by `value` as the last element.
Inlined by the compiler.
## Examples
iex> tuple = {:foo, :bar}
iex> Tuple.append(tuple, :baz)
{:foo, :bar, :baz}
"""
@spec append(tuple, term) :: tuple
def append(tuple, value) do
:erlang.append_element(tuple, value)
end
@doc """
Removes an element from a tuple.
Deletes the element at the given `index` from `tuple`.
Raises an `ArgumentError` if `index` is negative or greater than
or equal to the length of `tuple`. Index is zero-based.
Inlined by the compiler.
## Examples
iex> tuple = {:foo, :bar, :baz}
iex> Tuple.delete_at(tuple, 0)
{:bar, :baz}
"""
@spec delete_at(tuple, non_neg_integer) :: tuple
def delete_at(tuple, index) do
:erlang.delete_element(index + 1, tuple)
end
@doc """
Converts a tuple to a list.
Returns a new list with all the tuple elements.
Inlined by the compiler.
## Examples
iex> tuple = {:foo, :bar, :baz}
iex> Tuple.to_list(tuple)
[:foo, :bar, :baz]
"""
@spec to_list(tuple) :: list
def to_list(tuple) do
:erlang.tuple_to_list(tuple)
end
end
| 27.119205 | 80 | 0.66105 |
e852540a68019f17761216512389355376238418 | 1,323 | exs | Elixir | test/wabanex_web/schema_test.exs | brunodantas/wabanex | 4d8e4258969203cf0adf5b6561fbd862085afed7 | [
"MIT"
] | null | null | null | test/wabanex_web/schema_test.exs | brunodantas/wabanex | 4d8e4258969203cf0adf5b6561fbd862085afed7 | [
"MIT"
] | null | null | null | test/wabanex_web/schema_test.exs | brunodantas/wabanex | 4d8e4258969203cf0adf5b6561fbd862085afed7 | [
"MIT"
] | null | null | null | defmodule WabanexWeb.SchemaTest do
use WabanexWeb.ConnCase, async: true
alias Wabanex.User
alias Wabanex.Users.Create
describe "users queries" do
test "when a valid id is given, return the user", %{conn: conn} do
params = %{email: "rafael@gmail.com", name: "rafael", password: "123456"}
{:ok, %User{id: user_id}} = Create.call(params)
query = """
{
getUser(id: "#{user_id}"){
email
name
}
}
"""
response =
conn
|> post("/api/graphql", %{query: query})
|> json_response(:ok)
expected_response = %{
"data" => %{
"getUser" => %{
"email" => "rafael@gmail.com",
"name" => "rafael"
}}}
assert response == expected_response
end
end
describe "users mutations" do
test "when aall params are valid, create the user", %{conn: conn} do
mutation = """
mutation {
createUser(input: {
name:"teste",
email:"teste@teste.com",
password:"123333"}){
id
name
}
}
"""
response =
conn
|> post("/api/graphql", %{query: mutation})
|> json_response(:ok)
assert %{"data" => %{"createUser" => %{"id" => _id, "name" => "teste"}}} = response
end
end
end
| 22.05 | 89 | 0.513228 |
e8525bcb6344d67580499bee959bd824e70bcc9d | 2,030 | ex | Elixir | lib/mix/tasks/brando.resave.entries.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | 1 | 2020-04-26T09:53:02.000Z | 2020-04-26T09:53:02.000Z | lib/mix/tasks/brando.resave.entries.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | 198 | 2019-08-20T16:16:07.000Z | 2020-07-03T15:42:07.000Z | lib/mix/tasks/brando.resave.entries.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Brando.Resave.Entries do
use Mix.Task
@shortdoc "Re-save all entries"
@moduledoc """
Re-save all entries
mix brando.resave.entries
Re-save entries for specific blueprint
mix brando.resave.entries MyApp.Projects.Project
"""
@spec run(any) :: no_return
def run([]) do
Application.put_env(:phoenix, :serve_endpoints, true)
Application.put_env(:logger, :level, :error)
Mix.Tasks.Run.run([])
Mix.shell().info("""
------------------------------
% Brando Resave Entries
------------------------------
""")
blueprints = Brando.Blueprint.list_blueprints() ++ [Brando.Pages.Page, Brando.Pages.Fragment]
for blueprint <- blueprints do
resave_entries(blueprint)
end
Mix.shell().info([:green, "\n==> Done.\n"])
end
def run([blueprint_binary]) do
Application.put_env(:phoenix, :serve_endpoints, true)
Application.put_env(:logger, :level, :error)
Mix.Tasks.Run.run([])
Mix.shell().info("""
------------------------------
% Brando Resave Entries
------------------------------
""")
blueprint = Module.concat([blueprint_binary])
resave_entries(blueprint)
Mix.shell().info([:green, "\n==> Done.\n"])
end
defp resave_entries(blueprint) do
context = blueprint.__modules__().context
singular = blueprint.__naming__().singular
plural = blueprint.__naming__().plural
{:ok, entries} = apply(context, :"list_#{plural}", [%{order: "asc id"}])
Mix.shell().info([:green, "\n==> Resaving #{singular} entries\n"])
for entry <- entries do
IO.write([
"* [#{singular}:#{entry.id}] → #{blueprint.__identifier__(entry).title} ... "
])
changeset = Ecto.Changeset.change(entry)
case Brando.repo().update(changeset, force: true) do
{:ok, _} ->
IO.write([IO.ANSI.green(), "done!\n", IO.ANSI.reset()])
{:error, _} ->
IO.write([IO.ANSI.red(), "failed!\n", IO.ANSI.reset()])
end
end
end
end
| 24.756098 | 97 | 0.578325 |
e852850aef778e009f840dfa1a5e0d4dcc73bb30 | 1,201 | exs | Elixir | lib/elixir/test/elixir/integer_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | 1 | 2015-02-23T00:01:48.000Z | 2015-02-23T00:01:48.000Z | lib/elixir/test/elixir/integer_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/integer_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule IntegerTest do
use ExUnit.Case, async: true
require Integer
test :odd? do
assert Integer.is_odd(0) == false
assert Integer.is_odd(1) == true
assert Integer.is_odd(2) == false
assert Integer.is_odd(3) == true
assert Integer.is_odd(-1) == true
assert Integer.is_odd(-2) == false
assert Integer.is_odd(-3) == true
end
test :even? do
assert Integer.is_even(0) == true
assert Integer.is_even(1) == false
assert Integer.is_even(2) == true
assert Integer.is_even(3) == false
assert Integer.is_even(-1) == false
assert Integer.is_even(-2) == true
assert Integer.is_even(-3) == false
end
test :parse do
assert Integer.parse("12") === {12, ""}
assert Integer.parse("-12") === {-12, ""}
assert Integer.parse("123456789") === {123456789, ""}
assert Integer.parse("12.5") === {12, ".5"}
assert Integer.parse("7.5e-3") === {7, ".5e-3"}
assert Integer.parse("12x") === {12, "x"}
assert Integer.parse("++1") === :error
assert Integer.parse("--1") === :error
assert Integer.parse("+-1") === :error
assert Integer.parse("three") === :error
end
end
| 30.025 | 57 | 0.616986 |
e8529181cb61cd06af43ed199322a9fe28362933 | 51,847 | ex | Elixir | lib/codi/pattern/struct/struct.ex | ianrumford/plymio_codi | 1163b139df0ed344e63597e563637c091ffe1f5e | [
"MIT"
] | null | null | null | lib/codi/pattern/struct/struct.ex | ianrumford/plymio_codi | 1163b139df0ed344e63597e563637c091ffe1f5e | [
"MIT"
] | null | null | null | lib/codi/pattern/struct/struct.ex | ianrumford/plymio_codi | 1163b139df0ed344e63597e563637c091ffe1f5e | [
"MIT"
] | null | null | null | defmodule Plymio.Codi.Pattern.Struct do
@moduledoc ~S"""
The *struct* patterns create a range of transform functions for a module's struct.
See `Plymio.Codi` for an overview and documentation terms.
## Set and Unset Fields
These patterns use *the unset value*
(see `Plymio.Fontais.Guard.the_unset_value/0`) to decide whether a
field has a useful value. Fields can be unset by default by using
*the unset value* in the `Kernel.defstruct/1` e.g.
defstruct [x: Plymio.Fontais.Guard.the_unset_value()]
For example, a function built using the `struct_fetch` pattern will return
`{error, error}` if the target field's value is unset. Similary,
for `struct_get`, `{:ok, default}` will be returned if the field's
value is unset.
## Errors
The code generated by most *struct* patterns checks the first
argument is an instance of the target module's *struct* and returns
`{:error, error}` if not.
## Test Environment
The doctests use a helper (`codi_helper_struct_compile_module/1`) to
compile the generated function(s) in a dynamically created module,
returning `{:ok, {forms, test_mod}}`.
The `forms` are the generated code and another helper
(`Harnais.Helper.harnais_helper_format_forms!/2`) is used to
"textify" the code using the Elixir code formatter.
The `test_mod` is the dynamically created module and is used to call the generated function(s).
The default `Kernel.defstruct/1` for the doctests is shown here. Note the `:z` field is unset.
defstruct [x: 42, y: nil, z: Plymio.Fontais.Guard.the_unset_value()]
The first example for each pattern just shows the generated code with
subsequent examples performing the actual test(s).
## Common Codi Pattern Opts (cpo) Keys
As well as the top level common options, these keys are valid in any *struct* pattern's *cpo*:
| Key | Aliases |
| :--- | :--- |
| `:fun_name` | *:name, :function_name* |
| `:fun_field` | *:field, ::function_field, :key, :fun_key, :function_key* |
| `:fun_args` | *:args, :function_args* |
| `:fun_arity` | *:arity, :function_arity* |
| `:fun_doc` | *:doc, :function_doc* |
| `:typesepc_spec_args` | *:spec_args* |
| `:typespec_spec_result` |*:spec_result, :result, :fun_result, :function_result* |
| `:since` | |
All *struct* patterns support the generation of `@doc`, `@spec` and
`@since` module attribute forms.
All *struct* patterns generate a `@doc` by default. (It can be disabled in
the usual way by specifying `doc: false` in the *cpo*.)
> In the doctests below the `@doc` forms are mostly disabled (i.e. `doc: nil`) for clarity.
## Pattern: *struct_get*
This pattern is a convenience to generate both *struct_get1* and *struct_get2* patterns.
The *cpo* must be configured for a *struct_get2* and will be "reduced" to suite a *struct_get1*.
## Pattern: *struct_get1*
The *struct_get1* pattern creates a function to get the value of a
struct's field and if the value is unset return a fixed default.
## Examples
This example generate a `get/1` function for the `:z` field. Note
the `:field` in the *cpo* is a `Keyword` with the fixed default for
the `:z` field (`"z get default"`)
iex> {:ok, {forms, _test_mod}} = [
...> struct_get1: [args: :t, name: :get_z, field: [z: "z get default"], doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(get_z(t))",
"",
"def(get_z(%__MODULE__{z: field_value})) do",
" field_value",
" |> case do",
" x when Plymio.Fontais.Guard.is_value_set(x) ->",
" {:ok, x}",
"",
" _ ->",
" {:ok, \"z get default\"}",
" end",
"end",
"",
"def(get_z(state)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
iez> {:ok, {_forms, test_mod}} = [
...> struct_get1: [args: :t, name: :get_z, field: [z: "z get default"], doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # z is unset by default so the get/1 default will be returned.
...> struct(test_mod) |> test_mod.get_z
{:ok, "z get default"}
## Pattern: *struct_get2*
The *struct_get2* pattern creates a function to get the value of a
struct's field and, if the value is unset, return the second argument.
## Examples
This example generate a `get/2` function for the `:z` field.
iex> {:ok, {forms, _test_mod}} = [
...> struct_get2: [args: [:t, :the_default_for_z], name: :get_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(get_z(t, the_default_for_z))",
"",
"def(get_z(%__MODULE__{z: field_value}, default)) do",
" field_value",
" |> case do",
" x when Plymio.Fontais.Guard.is_value_set(x) ->",
" {:ok, x}",
"",
" _ ->",
" {:ok, default}",
" end",
"end",
"",
"def(get_z(state, _default)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
iez> {:ok, {_forms, test_mod}} = [
...> struct_get2: [args: [:t, :the_default_for_z], name: :get_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # z is unset by default so the get/2 2nd argument will be returned.
...> struct(test_mod) |> test_mod.get_z("an explicit default")
{:ok, "an explicit default"}
## Pattern: *struct_fetch*
The *struct_fetch* pattern creates a function to fetch the value of a struct's field.
## Examples
This example generate a `fetch/1` function for the `:x` field.
iex> {:ok, {forms, _test_mod}} = [
...> struct_fetch: [args: :t, name: :fetch_x, field: :x, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(fetch_x(t))",
"",
"def(fetch_x(%__MODULE__{x: field_value} = state)) do",
" field_value",
" |> case do",
" x when Plymio.Fontais.Guard.is_value_set(x) ->",
" {:ok, x}",
"",
" _ ->",
" Plymio.Codi.Error.new_error_result(m: \"struct field \#{:x} unset\", v: state)",
" end",
"end",
"",
"def(fetch_x(state)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
iex> {:ok, {_forms, test_mod}} = [
...> struct_fetch: [args: :t, name: :fetch_x, field: :x, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> {:ok, 42} = struct(test_mod) |> test_mod.fetch_x
...> # setting x to the unset value causes the fetch to fail
...> {:error, error} = struct(test_mod, x: Plymio.Fontais.Guard.the_unset_value) |> test_mod.fetch_x
...> true = error |> Exception.message |> String.starts_with?("struct field x unset")
...> # the argument must be an instance of the module's struct
...> {:error, error} = :not_a_struct |> test_mod.fetch_x
...> error |> Exception.message
"struct invalid, got: :not_a_struct"
## Pattern: *struct_put*
The *struct_put* pattern creates a function to put a value for a struct's field.
## Examples
This example generates a `put/2` function for the `:x` field.
iex> {:ok, {forms, _test_mod}} = [
...> struct_put: [args: [:t, :value], name: :put_x, field: :x, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(put_x(t, value))",
"",
"def(put_x(%__MODULE__{x: _} = state, value)) do",
" {:ok, state |> struct!(x: value)}",
"end",
"",
"def(put_x(state, _value)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
iex> {:ok, {_forms, test_mod}} = [
...> struct_put: [args: [:t, :value], name: :put_x, field: :x, doc: nil],
...> struct_fetch: [args: :t, name: :fetch_x, field: :x, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # set the :x field's value to 123
...> {:ok, %test_mod{} = t1} = struct(test_mod) |> test_mod.put_x(123)
...> # use `fetch_x/1` to check
...> {:ok, 123} = t1 |> test_mod.fetch_x
...> # the argument must be an instance of the module's struct
...> {:error, error} = :not_a_struct |> test_mod.put_x(123)
...> error |> Exception.message
"struct invalid, got: :not_a_struct"
## Pattern: *struct_maybe_put*
The *struct_maybe_put* pattern create a function to put a value for
a struct's field *only* if the field's current value is unset.
## Examples
This code shows a `maybe_put/2` function for the `:z` field:
iex> {:ok, {forms, _test_mod}} = [
...> struct_maybe_put: [args: [:t, :value], name: :maybe_put_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(maybe_put_z(t, value))",
"",
"def(",
" maybe_put_z(%__MODULE__{z: field_value} = state, _value)",
" when Plymio.Fontais.Guard.is_value_set(field_value)",
") do",
" {:ok, state}",
"end",
"",
"def(",
" maybe_put_z(%__MODULE__{z: field_value} = state, value)",
" when Plymio.Fontais.Guard.is_value_unset(field_value)",
") do",
" value",
" |> Plymio.Fontais.Guard.is_value_unset()",
" |> case do",
" true ->",
" {:ok, state}",
"",
" _ ->",
" {:ok, state |> struct!(z: value)}",
" end",
"end",
"",
"def(maybe_put_z(state, _value)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
Here `maybe_put/2` and `fetch/1` functions are generated for two fields: the
`:x` field has `42` for its default, whereas the `:z` field is
unset.
iex> {:ok, {_forms, test_mod}} = [
...> struct_maybe_put: [args: [:t, :value], name: :maybe_put_x, field: :x, doc: nil],
...> struct_fetch: [args: :t, name: :fetch_x, field: :x, doc: nil],
...> struct_maybe_put: [args: [:t, :value], name: :maybe_put_z, field: :z, doc: nil],
...> struct_fetch: [args: :t, name: :fetch_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # by default the `:z` field is unset so `fetch_z/1` will fail
...> t1 = struct(test_mod)
...> {:error, _error} = t1 |> test_mod.fetch_z
...> # maybe_put/2 will update the field since it is unset
...> {:ok, %test_mod{} = t1} = t1 |> test_mod.maybe_put_z(123)
...> # use `fetch_z/1` to check
...> {:ok, 123} = t1 |> test_mod.fetch_z
...> # field `:x` has a default of `42` so maybe_put/2 will not update the field
...> {:ok, %test_mod{} = t1} = t1 |> test_mod.maybe_put_x("will be ignored")
...> # the `:x` field will still be `42`
...> t1 |> test_mod.fetch_x
{:ok, 42}
## Pattern: *struct_has?*
The *struct_has?* pattern creates a function that returns `true` if
the field's value is set, otherwise `false`.
> Note: if the first argument is not a valid struct, `false` is returned..
## Examples
This code shows `has?/1` function for the `:z` field:
iex> {:ok, {forms, _test_mod}} = [
...> struct_has?: [args: :t, name: :has_z?, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(has_z?(t))",
"",
"def(has_z?(%__MODULE__{z: field_value}) when Plymio.Fontais.Guard.is_value_set(field_value)) do",
" true",
"end",
"",
"def(has_z?(_state)) do",
" false",
"end"]
Here `has?/1` functions are generated for two fields: the
`:x` field has `42` for its default, whereas the `:z` field is
unset.
iex> {:ok, {_forms, test_mod}} = [
...> struct_has?: [args: :t, name: :has_x?, field: :x, doc: nil],
...> struct_has?: [args: :t, name: :has_z?, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> t1 = struct(test_mod)
...> false = t1 |> test_mod.has_z?
...> t1 |> test_mod.has_x?
true
## Pattern: *struct_update*
The *struct_update* pattern creates a function to call the modules's `update/2` function.
The module's `update/2` function is a standard `Plymio` module state
function that works like a validated put. Apart from showing the generated code, it is not
documented or tested further here.
## Examples
This example generates an `update/2` function for the `:x` field.
iex> {:ok, {forms, _codi}} = [
...> struct_update: [args: [:t, :value], name: :update_x, field: :x, doc: nil],
...> ] |> CODI.produce_codi
...> forms |> harnais_helper_format_forms!
["def(update_x(t, value))",
"",
"def(update_x(%__MODULE__{x: _} = state, value)) do",
" {:ok, state |> update(x: value)}",
"end",
"",
"def(update_x(state, _value)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
## Pattern: *struct_set*
The *struct_set* pattern is a simple but versatile pattern for
creating a function that sets one or more fields in the *struct* to
specific values, defaulting to *the unset value*.
## Examples
This example generates an `set/2` function to set the `:x` field to value `123`.
Note the `:field` in the *cpo* is a `Keyword`.
iex> {:ok, {forms, _codi}} = [
...> struct_set: [args: :t, name: :set_x, field: [x: 123], doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(set_x(t))",
"",
"def(set_x(%__MODULE__{x: _} = state)) do",
" {:ok, state |> struct!(x: 123)}",
"end",
"",
"def(set_x(state)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
This example create a `set/1` function that sets the `:x` and `:z` fields.
iex> {:ok, {_forms, test_mod}} = [
...> struct_set: [args: :t, name: :set_xz, doc: nil,
...> field: [x: 123, z: "z is no longer unset"]],
...> struct_fetch: [args: :t, name: :fetch_x, field: :x, doc: nil],
...> struct_fetch: [args: :t, name: :fetch_y, field: :y, doc: nil],
...> struct_fetch: [args: :t, name: :fetch_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # set the :x and :z fields
...> {:ok, %test_mod{} = t1} = struct(test_mod) |> test_mod.set_xz
...> # use fetch to check
...> {:ok, 123} = t1 |> test_mod.fetch_x
...> {:ok, nil} = t1 |> test_mod.fetch_y
...> t1 |> test_mod.fetch_z
{:ok, "z is no longer unset"}
This example create a `set/1` function that sets the `:x` and `:z`
fields to specific values, but unsets the `:y` field. Note the
`:field` in the *cpo* is not a `Keyword`: The value for `:y` is not given and
defaults to be the unset value.
iex> {:ok, {_forms, test_mod}} = [
...> struct_set: [args: :t, name: :set_xyz, doc: nil,
...> field: [{:x, 123}, :y, {:z, "z is no longer unset"}]],
...> struct_fetch: [args: :t, name: :fetch_x, field: :x, doc: nil],
...> struct_fetch: [args: :t, name: :fetch_y, field: :y, doc: nil],
...> struct_fetch: [args: :t, name: :fetch_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # set all 3 fields
...> {:ok, %test_mod{} = t1} = struct(test_mod) |> test_mod.set_xyz
...> {:ok, 123} = t1 |> test_mod.fetch_x
...> {:ok, "z is no longer unset"} = t1 |> test_mod.fetch_z
...> # :y is now unset
...> {:error, error} = t1 |> test_mod.fetch_y
...> error |> Exception.message |> String.starts_with?("struct field y unset")
true
## Pattern: *struct_export*
The *struct_export* pattern creates
a function that exports one or more fields in the *struct* to
an *opts* (`Keyword`).
The export (*opts*) is sparse: only keys that are set are included.
Default values can be provided; if the value of the key in the *struct* is unset, the default is used.
## Examples
This example generates an `export/1` function for all three fields
in the test *struct*. Note, since no default export values were given
in the `:field` in the *cpo*, the defaults are the unset value and
the field will only appear in the export if the *struct* value is set.
iex> {:ok, {forms, _codi}} = [
...> struct_export: [args: :t, name: :export_all, field: [:x, :y, :z], doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(export_all(t))", "",
"def(export_all(%__MODULE__{x: field_value, y: field_value1, z: field_value2})) do",
" tuples =",
" [",
" x: :plymio_fontais_t3h1e4_u9n8s7e2t7_v1a8l3u8e,",
" y: :plymio_fontais_t3h1e4_u9n8s7e2t7_v1a8l3u8e,",
" z: :plymio_fontais_t3h1e4_u9n8s7e2t7_v1a8l3u8e",
" ] ++ [x: field_value, y: field_value1, z: field_value2]",
"",
" export =",
" tuples",
" |> Keyword.keys()",
" |> Stream.uniq()",
" |> Stream.map(fn k ->",
" tuples",
" |> Keyword.get_values(k)",
" |> Enum.filter(fn v -> v |> Plymio.Fontais.Guard.is_value_set() end)",
" |> case do",
" [] ->",
" {k, @plymio_fontais_the_unset_value}",
"",
" values ->",
" {k, values |> List.last()}",
" end",
" end)",
" |> Stream.filter(fn {_k, v} -> v |> Plymio.Fontais.Guard.is_value_set() end)",
" |> Keyword.new()",
"",
" {:ok, export}",
"end",
"",
"def(export_all(state)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
This example creates an `export/1` function that exports the `:x` and `:z`
fields. The `:z` field is unset by default so will not appear in the export unless set explicitly.
iex> {:ok, {_forms, test_mod}} = [
...> struct_export: [args: :t, name: :export_xz, doc: nil,
...> field: [:x, :z]],
...> struct_put: [args: [:t, :value], name: :put_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # the :z field is by default unset and will not be in the export
...> {:ok, [x: 42]} = struct(test_mod) |> test_mod.export_xz
...> # set z and export
...> {:ok, %test_mod{} = t1} = struct(test_mod) |> test_mod.put_z("z is now set")
...> t1 |> test_mod.export_xz
{:ok, [x: 42, z: "z is now set"]}
Another example but providing default values for each key in the export by
supplying a `Keyword` for `:field` in the *cpo*:.
iex> {:ok, {_forms, test_mod}} = [
...> struct_export: [args: :t, name: :export_xz, doc: nil,
...> field: [x: :x_default, z: :z_default]],
...> struct_put: [args: [:t, :value], name: :put_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # the :z field has a default export value
...> struct(test_mod) |> test_mod.export_xz
{:ok, [x: 42, z: :z_default]}
"""
alias Plymio.Codi, as: CODI
alias Plymio.Codi.Utility, as: CODIUTIL
alias Plymio.Codi.Utility.Depend, as: DEPEND
use Plymio.Fontais.Attribute
use Plymio.Codi.Attribute
import Plymio.Codi.Error,
only: [
new_error_result: 1
]
import Plymio.Fontais.Option,
only: [
opts_take_canonical_keys: 2,
opts_create_aliases_dict: 1
]
import Plymio.Codi.Utility,
only: [
cpo_resolve_fun_name: 1,
cpo_resolve_guard_fun_args: 2,
cpo_resolve_guard_field_match: 1,
cpo_resolve_guard_field_match: 2,
cpo_resolve_typespec_spec_args: 1
]
import Plymio.Codi.CPO
@pattern_struct_get_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_fun_default,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_get_dict_alias @pattern_struct_get_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_get_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_get_dict_alias)
end
@pattern_struct_fetch_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_fetch_dict_alias @pattern_struct_fetch_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_fetch_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_fetch_dict_alias)
end
@pattern_struct_put_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_put_dict_alias @pattern_struct_put_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_put_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_put_dict_alias)
end
@pattern_struct_maybe_put_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_maybe_put_dict_alias @pattern_struct_maybe_put_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_maybe_put_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_maybe_put_dict_alias)
end
@pattern_struct_set_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_fun_default,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_set_dict_alias @pattern_struct_set_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_set_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_set_dict_alias)
end
@pattern_struct_has_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_fun_default,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_has_dict_alias @pattern_struct_has_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_has_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_has_dict_alias)
end
@pattern_struct_update_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_update_dict_alias @pattern_struct_update_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_update_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_update_dict_alias)
end
@pattern_struct_export_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_export_dict_alias @pattern_struct_export_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_export_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_export_dict_alias)
end
@doc false
def express_pattern(codi, pattern, opts)
# expectation if the cpo is initially configured for get2
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_get do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_get_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(2),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args([:struct, :default]),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(2),
{:ok, get2_fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# typespec if any
{:ok, cpo} <-
cpo
|> cpo_maybe_transform(&cpo_has_typespec_spec_opts?/1, [
{:cpo_maybe_put_typespec_spec_result, :result},
{:cpo_maybe_put_typespec_spec_args, [[:struct, :any]]}
]),
{:ok, cpo_get1} <- cpo |> cpo_put_pattern(@plymio_codi_pattern_struct_get1),
{:ok, cpo_get1} <- cpo_get1 |> cpo_put_fun_args(get2_fun_args |> Enum.take(1)),
{:ok, cpo_get1} <- cpo_get1 |> cpo_put_fun_arity(1),
{:ok, cpo_get1} <-
cpo_get1
|> cpo_maybe_transform(
&cpo_has_typespec_spec_opts?/1,
fn cpo ->
with {:ok, spec_args} <- cpo_get1 |> cpo_resolve_typespec_spec_args,
{:ok, cpo} <- cpo |> cpo_put_typespec_spec_args(spec_args |> Enum.take(1)),
true <- true do
{:ok, cpo}
else
{:error, %{__exception__: true}} = result -> result
end
end
),
{:ok, cpo_get2} <- cpo |> cpo_put_pattern(@plymio_codi_pattern_struct_get2),
{:ok, cpo_get2} <- cpo_get2 |> cpo_put_fun_args(get2_fun_args),
true <- true do
{:ok, {[cpo_get1, cpo_get2], state}}
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_get1 do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_get_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(1),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args(:struct),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(1),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_key_length(1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match(ctrl),
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name("struct_#{to_string(field_name)}_get" |> String.to_atom()),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
fun_default = field_tuples |> hd |> elem(1)
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match)) do
unquote(field_value_var)
|> case do
x when Plymio.Fontais.Guard.is_value_set(x) ->
{:ok, x}
_ ->
{:ok, unquote(fun_default)}
end
end
def unquote(fun_name)(state) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_maybe_put_typespec_spec_result, :result},
{:cpo_maybe_put_typespec_spec_args, :struct}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_get2 do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_get_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(2),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args([:struct, :default]),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(2),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_key_length(1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match(ctrl),
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name("struct_#{to_string(field_name)}_get" |> String.to_atom()),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match), default) do
unquote(field_value_var)
|> case do
x when Plymio.Fontais.Guard.is_value_set(x) ->
{:ok, x}
_ ->
{:ok, default}
end
end
def unquote(fun_name)(state, _default) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_maybe_put_typespec_spec_result, :result},
{:cpo_maybe_put_typespec_spec_args, [[:struct, :any]]}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_fetch do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_fetch_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(1),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args(:struct),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(1),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_key_length(1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match(ctrl),
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name("struct_#{to_string(field_name)}_fetch" |> String.to_atom()),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match) = state) do
unquote(field_value_var)
|> case do
x when Plymio.Fontais.Guard.is_value_set(x) ->
{:ok, x}
_ ->
Plymio.Codi.Error.new_error_result(
m: "struct field #{unquote(field_name)} unset",
v: state
)
end
end
def unquote(fun_name)(state) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_maybe_put_typespec_spec_result, :result},
{:cpo_maybe_put_typespec_spec_args, :struct}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_put do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_put_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(2),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args([:struct, :value]),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(2),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_key_length(1),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_build_value(&CODIUTIL.field_build_anon_var/1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match(ctrl),
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name("struct_#{to_string(field_name)}_put" |> String.to_atom()),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match) = state, value) do
{:ok, state |> struct!([{unquote(field_name), value}])}
end
def unquote(fun_name)(state, _value) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_maybe_put_typespec_spec_result, :struct_result},
{:cpo_maybe_put_typespec_spec_args, [[:struct, :any]]}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_maybe_put do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_maybe_put_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(2),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args([:struct, :value]),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(2),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_key_length(1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match,
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name(
"struct_#{to_string(field_name)}_maybe_put"
|> String.to_atom()
),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match) = state, _value)
when Plymio.Fontais.Guard.is_value_set(unquote(field_value_var)) do
{:ok, state}
end
def unquote(fun_name)(unquote(field_match) = state, value)
when Plymio.Fontais.Guard.is_value_unset(unquote(field_value_var)) do
value
|> Plymio.Fontais.Guard.is_value_unset()
|> case do
true ->
{:ok, state}
_ ->
{:ok, state |> struct!([{unquote(field_name), value}])}
end
end
def unquote(fun_name)(state, _value) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_maybe_put_typespec_spec_result, :struct_result},
{:cpo_maybe_put_typespec_spec_args, [[:struct, :any]]}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_has? do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_has_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(1),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args(:struct),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(1),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_key_length(1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match,
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name("struct_#{to_string(field_name)}_has?" |> String.to_atom()),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match))
when Plymio.Fontais.Guard.is_value_set(unquote(field_value_var)) do
true
end
def unquote(fun_name)(_state) do
false
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_put_typespec_spec_result, :boolean},
{:cpo_maybe_put_typespec_spec_args, :struct}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_update do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_update_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(2),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args([:struct, :value]),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(2),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_key_length(1),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_build_value(&CODIUTIL.field_build_anon_var/1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match(ctrl),
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name("struct_#{to_string(field_name)}_update" |> String.to_atom()),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match) = state, value) do
{:ok, state |> update([{unquote(field_name), value}])}
end
def unquote(fun_name)(state, _value) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_maybe_put_typespec_spec_result, :struct_result},
{:cpo_maybe_put_typespec_spec_args, [[:struct, :keyword]]}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_set do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_set_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(1),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args([:struct]),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(1),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_build_value(&CODIUTIL.field_build_anon_var/1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match(ctrl),
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name("struct_#{to_string(field_name)}_set" |> String.to_atom()),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match) = state) do
{:ok, state |> struct!(unquote(field_tuples))}
end
def unquote(fun_name)(state) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_maybe_put_typespec_spec_result, :struct_result},
{:cpo_maybe_put_typespec_spec_args, :struct}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_export do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_export_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(1),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args(:struct),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_name("struct_export" |> String.to_atom()),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(1),
{:ok, {_, field_vars, field_tuples, field_match}} <-
cpo |> cpo_resolve_guard_field_match(ctrl),
# saves resolved fields for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl) do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match)) do
# must maintain order
tuples = unquote(field_tuples) ++ unquote(field_vars)
export =
tuples
|> Keyword.keys()
|> Stream.uniq()
|> Stream.map(fn k ->
tuples
|> Keyword.get_values(k)
# dop unset values
|> Enum.filter(fn v -> v |> Plymio.Fontais.Guard.is_value_set() end)
|> case do
# no set values => mark to drop in next stage
[] ->
{k, @plymio_fontais_the_unset_value}
# want last value
values ->
{k, values |> List.last()}
end
end)
# make the export sparse i.e. no unset values
|> Stream.filter(fn {_k, v} -> v |> Plymio.Fontais.Guard.is_value_set() end)
# take last tuple for each key
|> Keyword.new()
{:ok, export}
end
def unquote(fun_name)(state) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
:ok = pattern_form |> Macro.validate()
## pattern_form |> Code.eval_quoted([], __ENV__)
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_put_typespec_spec_result, :opts_result},
{:cpo_maybe_put_typespec_spec_args, :struct}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(_codi, pattern, opts) do
new_error_result(m: "proxy pattern #{inspect(pattern)} invalid", v: opts)
end
end
| 37.927579 | 106 | 0.602311 |
e852a3b55b986d422a054c659dc0a612851cd941 | 8,763 | ex | Elixir | apps/neoscan/lib/neoscan/blocks/blocks.ex | cc1776/neo-scan | 49fc9256f5c7ed4e0a7cd43513b27ba5d9d4f287 | [
"MIT"
] | null | null | null | apps/neoscan/lib/neoscan/blocks/blocks.ex | cc1776/neo-scan | 49fc9256f5c7ed4e0a7cd43513b27ba5d9d4f287 | [
"MIT"
] | null | null | null | apps/neoscan/lib/neoscan/blocks/blocks.ex | cc1776/neo-scan | 49fc9256f5c7ed4e0a7cd43513b27ba5d9d4f287 | [
"MIT"
] | null | null | null | defmodule Neoscan.Blocks do
@moduledoc """
The boundary for the Blocks system.
"""
import Ecto.Query, warn: true
alias Neoscan.Repo
alias Neoscan.Blocks.Block
alias Neoscan.Transactions
alias Neoscan.Transactions.Transaction
alias NeoscanMonitor.Api
alias Neoscan.Stats
alias NeoscanSync.HttpCalls
alias NeoscanSync.Blockchain
@doc """
Returns the list of blocks.
## Examples
iex> list_blocks()
[%Block{}, ...]
"""
def list_blocks do
Repo.all(Block)
end
@doc """
Count total blocks in DB.
## Examples
iex> count_blocks()
50
"""
def count_blocks do
Repo.aggregate(Block, :count, :id)
end
@doc """
Returns the list of blocks in the home page.
## Examples
iex> home_blocks()
[%Block{}, ...]
"""
def home_blocks do
block_query =
from(
e in Block,
where: e.index > -1,
order_by: [
desc: e.id
],
select: %{
:index => e.index,
:time => e.time,
:tx_count => e.tx_count,
:hash => e.hash,
:size => e.size
},
limit: 15
)
Repo.all(block_query)
end
@doc """
Returns the list of paginated blocks.
## Examples
iex> paginate_blocks(page)
[%Block{}, ...]
"""
def paginate_blocks(pag) do
block_query =
from(
e in Block,
where: e.index > -1,
order_by: [
desc: e.id
],
select: %{
:index => e.index,
:time => e.time,
:tx_count => e.tx_count,
:hash => e.hash,
:size => e.size
},
limit: 15
)
Repo.paginate(block_query, page: pag, page_size: 15)
end
@doc """
Gets a single block.
Raises `Ecto.NoResultsError` if the Block does not exist.
## Examples
iex> get_block!(123)
%Block{}
iex> get_block!(456)
** (Ecto.NoResultsError)
"""
def get_block!(id), do: Repo.get!(Block, id)
@doc """
Gets a single block by its hash value
## Examples
iex> get_block_by_hash(123)
%Block{}
iex> get_block_by_hash(456)
nil
"""
def get_block_by_hash(hash) do
query =
from(
e in Block,
where: e.hash == ^hash,
select: e
)
Repo.all(query)
|> List.first()
end
@doc """
Gets a single block by its hash value for blocks page
## Examples
iex> get_block_by_hash_for_view(hash)
%Block{}
iex> get_block_by_hash_for_view(hash)
nil
"""
def get_block_by_hash_for_view(hash) do
trans_query =
from(
t in Transaction,
select: %{
type: t.type,
txid: t.txid
}
)
query =
from(
e in Block,
where: e.hash == ^hash,
preload: [
transactions: ^trans_query
],
select: e
)
Repo.all(query)
|> List.first()
end
@doc """
Gets a single block by its hash value for blocks page, with paginated transactions
## Examples
iex> paginate_transactions(hash, page)
%Block{}
iex> paginate_transactions(hash, page)
nil
"""
def paginate_transactions(hash, page) do
query =
from(
e in Block,
where: e.hash == ^hash,
select: e
)
block =
Repo.all(query)
|> List.first()
transactions = Transactions.paginate_transactions_for_block(block.id, page)
{block, transactions}
end
@doc """
Gets a single block by its heigh value
## Examples
iex> get_block_by_height(123)
%Block{}
iex> get_block_by_height(456)
nill
"""
def get_block_by_height(height) do
query =
from(
e in Block,
where: e.index == ^height,
select: e
)
Repo.all(query)
|> List.first()
end
@doc """
Get the block time by its heigh value
## Examples
iex> get_block_time(123)
12518982
iex> get_block_time(456)
nill
"""
def get_block_time(height) do
url = HttpCalls.url(1)
case Blockchain.get_block_by_height(url, height) do
{:ok, block} ->
Map.get(block, "time")
_ ->
get_block_time(height)
end
end
@doc """
Creates a block.
## Examples
iex> create_block(%{field: value})
{:ok, %Block{}}
iex> create_block(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_block(attrs \\ %{}) do
%Block{}
|> Block.changeset(attrs)
|> Repo.insert!()
|> update_blocks_state
end
def update_blocks_state(block) do
Api.add_block(block)
Stats.add_block_to_table()
block
end
@doc """
Updates a block.
## Examples
iex> update_block(block, %{field: new_value})
{:ok, %Block{}}
iex> update_block(block, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_block(%Block{} = block, attrs) do
block
|> Block.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Block.
## Examples
iex> delete_block(block)
{:ok, %Block{}}
iex> delete_block(block)
{:error, %Ecto.Changeset{}}
"""
def delete_block(%Block{:updated_at => _time} = block) do
# Addresses.rollback_addresses(time) TODO
Repo.delete!(block)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking block changes.
## Examples
iex> change_block(block)
%Ecto.Changeset{source: %Block{}}
"""
def change_block(%Block{} = block) do
Block.changeset(block, %{})
end
@doc """
Returns the heighest block in the database
## Examples
iex> get_highest_block_in_db()
{:ok, %Block{}}
"""
def get_highest_block_in_db do
query =
from(
e in Block,
select: e.index,
where: e.index > -1,
# force postgres to use index
order_by: [
desc: e.index
],
limit: 1
)
case Repo.all(query) do
[index] ->
{:ok, index}
[] ->
{:ok, -1}
end
end
@doc """
get all blocks heigher than `height`
## Examples
iex> get_higher_than(height)
[%Block{}, ...]
"""
def get_higher_than(index) do
query =
from(
e in Block,
where: e.index > ^index,
select: e
)
Repo.all(query)
end
@doc """
delete all blocks in list
## Examples
iex> delete_blocks([%Block{}, ...])
{ :ok, "deleted"}
"""
def delete_blocks([block | tail]), do: [delete_block(block) | delete_blocks(tail)]
def delete_blocks([]), do: {:ok, "Deleted"}
@doc """
delete all blocks heigher than `height`
## Examples
iex> get_higher_than(height)
[%Block{}, ...]
"""
def delete_higher_than(height) do
get_higher_than(height)
|> delete_blocks
end
# get the total of spent fees in the network between a height range
def get_fees_in_range(height1, height2) do
value1 = String.to_integer(height1)
try do
String.to_integer(height2)
rescue
ArgumentError ->
"wrong input"
else
value2 ->
range = [value1, value2]
max = Enum.max(range)
min = Enum.min(range)
query =
from(
b in Block,
where: b.index >= ^min and b.index <= ^max,
select: %{
:total_sys_fee => b.total_sys_fee,
:total_net_fee => b.total_net_fee
}
)
Repo.all(query)
|> Enum.reduce(%{:total_sys_fee => 0, :total_net_fee => 0}, fn %{
:total_sys_fee => sys_fee,
:total_net_fee => net_fee
},
acc ->
%{
:total_sys_fee => acc.total_sys_fee + sys_fee,
:total_net_fee => acc.total_net_fee + net_fee
}
end)
end
rescue
ArgumentError ->
"wrong input string can't be parsed into integer"
end
def compute_fees(block) do
sys_fee =
Enum.reduce(block["tx"], 0, fn tx, acc ->
case Float.parse(tx["sys_fee"]) do
{num, _st} ->
acc + num
:error ->
acc + 0
end
end)
net_fee =
Enum.reduce(block["tx"], 0, fn tx, acc ->
case Float.parse(tx["net_fee"]) do
{num, _st} ->
acc + num
:error ->
acc + 0
end
end)
Map.merge(block, %{"total_sys_fee" => sys_fee, "total_net_fee" => net_fee})
end
end
| 18.605096 | 99 | 0.524364 |
e852a53bad4b9d3769e7fdbde8abfab09f0fe219 | 2,050 | ex | Elixir | lib/nested_filter.ex | kianmeng/nested_filter | e391787ef3a5c35b2ea590a51c7237769b6d8e73 | [
"MIT"
] | 33 | 2017-04-10T04:38:57.000Z | 2021-10-05T21:18:25.000Z | lib/nested_filter.ex | kianmeng/nested_filter | e391787ef3a5c35b2ea590a51c7237769b6d8e73 | [
"MIT"
] | 97 | 2017-04-05T01:15:09.000Z | 2022-02-28T13:22:34.000Z | lib/nested_filter.ex | Obighbyd/nested_filter | e391787ef3a5c35b2ea590a51c7237769b6d8e73 | [
"MIT"
] | 9 | 2017-04-05T00:07:43.000Z | 2022-03-08T07:29:27.000Z | defmodule NestedFilter do
@moduledoc """
Documentation for NestedFilter.
"""
@type key :: any
@type val :: any
@type keys_to_select :: list
@type predicate :: ((key, val) -> boolean)
@spec drop_by(struct, predicate) :: struct
def drop_by(%_{} = struct, _), do: struct
@spec drop_by(map, predicate) :: map
def drop_by(map, predicate) when is_map(map) do
map
|> Enum.reduce(%{},
fn ({key, val}, acc) ->
cleaned_val = drop_by(val, predicate)
if predicate.(key, cleaned_val) do
acc
else
Map.put(acc, key, cleaned_val)
end
end)
end
@spec drop_by(list, predicate) :: list
def drop_by(list, predicate) when is_list(list) do
Enum.map(list, &drop_by(&1, predicate))
end
def drop_by(elem, _) do
elem
end
@doc """
Take a (nested) map and filter out any keys with specified values in the
values_to_reject list.
"""
@spec drop_by_value(%{any => any}, [any]) :: %{any => any}
def drop_by_value(map, values_to_reject) when is_map(map) do
drop_by(map, fn (_, val) -> val in values_to_reject end)
end
@doc """
Take a (nested) map and filter out any values with specified keys in the
keys_to_reject list.
"""
@spec drop_by_key(%{any => any}, [any]) :: %{any => any}
def drop_by_key(map, keys_to_reject) when is_map(map) do
drop_by(map, fn(key, _) -> key in keys_to_reject end)
end
@spec take_by(map, keys_to_select) :: map
def take_by(map, keys_to_select) when is_map(map) do
map
|> Enum.reduce(%{},
fn ({_key, val}, acc) ->
Map.merge(acc, take_by(val, keys_to_select))
end)
|>
Map.merge(Map.take(map, keys_to_select))
end
def take_by(_elem, _) do
%{}
end
@doc """
Take a (nested) map and keep any values with specified keys in the
keys_to_select list.
"""
@spec take_by_key(%{any => any}, [any]) :: %{any => any}
def take_by_key(map, keys_to_select) when is_map(map) do
Map.merge(
take_by(map, keys_to_select),
Map.take(map, keys_to_select))
end
end
| 25.625 | 74 | 0.631707 |
e852d167aa129174ca6e9e250a3891e0a5b285ed | 301 | ex | Elixir | web/controllers/user_controller.ex | krondev/elm-elixir-starter | 3954fb3982e9e2330cfae197c7ecea34402cfcea | [
"MIT"
] | 73 | 2017-06-06T20:10:11.000Z | 2020-12-05T22:11:31.000Z | web/controllers/user_controller.ex | krondev/elm-elixir-starter | 3954fb3982e9e2330cfae197c7ecea34402cfcea | [
"MIT"
] | 8 | 2017-05-24T02:08:58.000Z | 2018-05-11T14:54:29.000Z | web/controllers/user_controller.ex | krondev/elm-elixir-starter | 3954fb3982e9e2330cfae197c7ecea34402cfcea | [
"MIT"
] | 22 | 2017-07-30T02:59:23.000Z | 2020-12-05T22:13:18.000Z | defmodule Elmelixirstarter.UserController do
use Elmelixirstarter.Web, :controller
use Guardian.Phoenix.Controller
plug(Guardian.Plug.EnsureAuthenticated, handler: Elmelixirstarter.AuthErrorHandler)
def me(conn, _params, user, _claims) do
render(conn, "show.json", user: user)
end
end
| 27.363636 | 85 | 0.784053 |
e852d2e7c0d503883ba1d4f695f8b81632163d65 | 4,915 | exs | Elixir | mix.exs | iautom8things/oban | 5f1dfc277c2933fdc0dada812dbbca31c6d55fa0 | [
"Apache-2.0"
] | null | null | null | mix.exs | iautom8things/oban | 5f1dfc277c2933fdc0dada812dbbca31c6d55fa0 | [
"Apache-2.0"
] | null | null | null | mix.exs | iautom8things/oban | 5f1dfc277c2933fdc0dada812dbbca31c6d55fa0 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.MixProject do
use Mix.Project
@version "2.3.4"
def project do
[
app: :oban,
version: @version,
elixir: "~> 1.8",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases(),
preferred_cli_env: [
bench: :test,
ci: :test,
"test.setup": :test
],
# Hex
package: package(),
description: """
Robust job processing, backed by modern PostgreSQL.
""",
# Dialyzer
dialyzer: [
plt_add_apps: [:ex_unit],
plt_core_path: "_build/#{Mix.env()}",
flags: [:error_handling, :race_conditions, :underspecs]
],
# Docs
name: "Oban",
docs: [
main: "Oban",
source_ref: "v#{@version}",
source_url: "https://github.com/sorentwo/oban",
extra_section: "GUIDES",
formatters: ["html"],
extras: extras() ++ pro_extras() ++ web_extras(),
groups_for_extras: groups_for_extras(),
groups_for_modules: groups_for_modules()
]
]
end
def application do
[
mod: {Oban.Application, []},
extra_applications: [:logger]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_env), do: ["lib"]
defp extras do
[
"CHANGELOG.md",
"guides/installation.md",
"guides/troubleshooting.md",
"guides/writing_plugins.md",
"guides/upgrading/v2.0.md",
"guides/release_configuration.md",
"guides/recipes/recursive-jobs.md",
"guides/recipes/reliable-scheduling.md",
"guides/recipes/reporting-progress.md",
"guides/recipes/expected-failures.md",
"guides/recipes/splitting-queues.md"
]
end
defp pro_extras do
if File.exists?("../oban_pro") do
[
"../oban_pro/guides/pro/overview.md": [filename: "pro_overview"],
"../oban_pro/guides/pro/installation.md": [filename: "pro_installation"],
"../oban_pro/guides/plugins/lifeline.md": [title: "Lifeline Plugin"],
"../oban_pro/guides/plugins/dynamic_cron.md": [title: "Dynamic Cron Plugin"],
"../oban_pro/guides/plugins/dynamic_pruner.md": [title: "Dynamic Pruner Plugin"],
"../oban_pro/guides/plugins/reprioritizer.md": [title: "Reprioritizer Plugin"],
"../oban_pro/guides/workers/batch.md": [title: "Batch Worker"],
"../oban_pro/guides/workers/workflow.md": [title: "Workflow Worker"],
"../oban_pro/CHANGELOG.md": [filename: "pro-changelog", title: "Changelog"]
]
else
[]
end
end
defp web_extras do
if File.exists?("../oban_web") do
[
"../oban_web/guides/web/overview.md": [filename: "web_overview"],
"../oban_web/guides/web/installation.md": [filename: "web_installation"],
"../oban_web/guides/web/customizing.md": [filename: "web_customizing"],
"../oban_web/guides/web/telemetry.md": [filename: "web_telemetry"],
"../oban_web/guides/web/troubleshooting.md": [filename: "web_troubleshooting"],
"../oban_web/CHANGELOG.md": [filename: "web-changelog", title: "Changelog"]
]
else
[]
end
end
defp groups_for_extras do
[
Guides: ~r{guides/[^\/]+\.md},
Recipes: ~r{guides/recipes/.?},
Extras: ~r{^CHANGELOG.md},
"Upgrade Guides": ~r{guides/upgrading/.*},
"Oban Pro": ~r{oban_pro/.?},
"Oban Web": ~r{oban_web/.?}
]
end
defp groups_for_modules do
[
Plugins: [
Oban.Plugins.Cron,
Oban.Plugins.Pruner,
Oban.Plugins.Stager
],
Extending: [
Oban.Config,
Oban.Notifier,
Oban.Registry,
Oban.Repo
]
]
end
defp package do
[
maintainers: ["Parker Selbert"],
licenses: ["Apache-2.0"],
links: %{
Website: "https://getoban.pro",
Changelog: "https://github.com/sorentwo/oban/blob/master/CHANGELOG.md",
GitHub: "https://github.com/sorentwo/oban",
Sponsor: "https://getoban.pro"
}
]
end
defp deps do
[
{:ecto_sql, ">= 3.4.3"},
{:jason, "~> 1.1"},
{:postgrex, "~> 0.14"},
{:telemetry, "~> 0.4"},
{:stream_data, "~> 0.4", only: [:test, :dev]},
{:tzdata, "~> 1.0", only: [:test, :dev]},
{:benchee, "~> 1.0", only: [:test, :dev], runtime: false},
{:credo, "~> 1.4", only: [:test, :dev], runtime: false},
{:dialyxir, "~> 1.0", only: [:test, :dev], runtime: false},
{:ex_doc, "~> 0.20", only: [:test, :dev], runtime: false}
]
end
defp aliases do
[
bench: "run bench/bench_helper.exs",
"test.setup": ["ecto.create", "ecto.migrate"],
ci: [
"format --check-formatted",
"deps.unlock -- check-unused",
"credo --strict",
"test --raise",
"dialyzer"
]
]
end
end
| 27.768362 | 89 | 0.560122 |
e852d8788a3ca4ca1c4bd6b2a7df9b580561a530 | 1,885 | exs | Elixir | test/cingi/when_plan_test.exs | Rhathe/Cing_I | fd4e6d6d73f9ae2115e40e51ad47135869cb2689 | [
"MIT"
] | 14 | 2017-09-21T19:45:12.000Z | 2020-10-12T04:06:11.000Z | test/cingi/when_plan_test.exs | Rhathe/Cing_I | fd4e6d6d73f9ae2115e40e51ad47135869cb2689 | [
"MIT"
] | null | null | null | test/cingi/when_plan_test.exs | Rhathe/Cing_I | fd4e6d6d73f9ae2115e40e51ad47135869cb2689 | [
"MIT"
] | 1 | 2017-09-21T21:50:42.000Z | 2017-09-21T21:50:42.000Z | defmodule CingiWhenTest do
use ExUnit.Case
describe "when" do
setup do
Helper.run_mission_report("test/mission_plans/when.yaml")
end
test "runs correct amount of output", ctx do
assert 10 = length(ctx.output)
end
test "things that should not run don't run", ctx do
Enum.map ctx.output, &(assert not(&1 =~ "should not run"))
end
test "runs first few commands", ctx do
assert ["first", "second"] = Enum.slice(ctx.output, 0, 2)
end
test "runs regardless, since fail_fast is false", ctx do
assert "runs regardless" in ctx.output
end
test "runs correct output for exit code", ctx do
assert "runs because of exit code 1" in ctx.output
assert "should not run because not exit code 0" not in ctx.output
end
test "runs correct output for failure", ctx do
assert "runs because of failure" in ctx.output
assert "should not run because not success" not in ctx.output
end
test "runs correct output for output", ctx do
assert "runs because of second in outputs" in ctx.output
assert "should not run because of no first in outputs" not in ctx.output
end
test "runs correct output for multiple conditions", ctx do
assert "runs because of second in outputs and exit code of 1" in ctx.output
assert "should not run because although second in outputs, exit_code is not 2" not in ctx.output
end
test "runs correct output for parallel group", ctx do
assert "runs because parallel group exited with 0" in ctx.output
assert "should not run because parallel group was success" not in ctx.output
end
test "runs correct output meaning last submission does not make a nil exit code", ctx do
assert "runs because exit code is not nil with last mission being skipped" in ctx.output
end
test "runs end mission because of false fail_fast", ctx do
assert ["end"] = Enum.take(ctx.output, -1)
end
end
end
| 31.949153 | 99 | 0.722546 |
e852e23a625f08d0e39355ebbd09fa0437f483b7 | 3,473 | ex | Elixir | lib/oban/plugins/gossip.ex | ethangunderson/oban | d8d044b4bfd79713ca120be2cb2c70e26eaa5b5d | [
"Apache-2.0"
] | null | null | null | lib/oban/plugins/gossip.ex | ethangunderson/oban | d8d044b4bfd79713ca120be2cb2c70e26eaa5b5d | [
"Apache-2.0"
] | null | null | null | lib/oban/plugins/gossip.ex | ethangunderson/oban | d8d044b4bfd79713ca120be2cb2c70e26eaa5b5d | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Plugins.Gossip do
@moduledoc """
The Gossip plugin uses PubSub to periodically exchange queue state information between all
interested nodes. This allows Oban instances to broadcast state information regardless of which
engine they are using, and without storing anything in the database.
Gossip enables real-time updates across an entire cluster, and is essential to the operation of
UIs like Oban Web.
The Gossip plugin entirely replaced heartbeats and the legacy `oban_beats` table.
## Using the Plugin
The following example demonstrates using the plugin without any configuration, which will broadcast
the state of each local queue every 1 second:
config :my_app, Oban,
plugins: [Oban.Plugins.Gossip],
...
Override the default options to broadcast every 5 seconds:
config :my_app, Oban,
plugins: [{Oban.Plugins.Gossip, interval: :timer.seconds(5)}],
...
## Options
* `:interval` — the number of milliseconds between gossip broadcasts
## Instrumenting with Telemetry
The `Oban.Plugins.Gossip` plugin adds the following metadata to the `[:oban, :plugin, :stop]` event:
* `:gossip_count` - the number of queues that had activity broadcasted
"""
@behaviour Oban.Plugin
use GenServer
alias Oban.{Notifier, Plugin, Validation}
@type option :: Plugin.option() | {:interval, pos_integer()}
defmodule State do
@moduledoc false
defstruct [:conf, :name, :timer, interval: :timer.seconds(1)]
end
@impl Plugin
@spec start_link([option()]) :: GenServer.on_start()
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: opts[:name])
end
@impl Plugin
def validate(opts) do
Validation.validate(opts, fn
{:conf, _} -> :ok
{:name, _} -> :ok
{:interval, interval} -> Validation.validate_integer(:interval, interval)
option -> {:error, "unknown option provided: #{inspect(option)}"}
end)
end
@impl GenServer
def init(opts) do
Validation.validate!(opts, &validate/1)
Process.flag(:trap_exit, true)
state =
State
|> struct!(opts)
|> schedule_gossip()
{:ok, state}
end
@impl GenServer
def terminate(_reason, state) do
if is_reference(state.timer), do: Process.cancel_timer(state.timer)
:ok
end
@impl GenServer
def handle_info(:gossip, %State{} = state) do
meta = %{conf: state.conf, plugin: __MODULE__}
match = [{{{state.conf.name, {:producer, :_}}, :"$1", :_}, [], [:"$1"]}]
:telemetry.span([:oban, :plugin], meta, fn ->
checks =
Oban.Registry
|> Registry.select(match)
|> Enum.map(&safe_check(&1, state))
|> Enum.reject(&is_nil/1)
|> Enum.map(&sanitize_name/1)
if Enum.any?(checks), do: Notifier.notify(state.conf, :gossip, checks)
{:ok, Map.put(meta, :gossip_count, length(checks))}
end)
{:noreply, schedule_gossip(state)}
end
def handle_info(_message, state) do
{:noreply, state}
end
# Scheduling
defp schedule_gossip(state) do
%{state | timer: Process.send_after(self(), :gossip, state.interval)}
end
# Checking
defp safe_check(pid, state) do
if Process.alive?(pid), do: GenServer.call(pid, :check, state.interval)
catch
:exit, _ -> nil
end
defp sanitize_name(%{name: name} = check) when is_binary(name), do: check
defp sanitize_name(%{name: name} = check), do: %{check | name: inspect(name)}
end
| 26.310606 | 102 | 0.664843 |
e85300c3294281065660a53f43aaef1a296573a9 | 894 | ex | Elixir | lib/xadmin/adminlog.ex | ntsai/xadmin | 82d8be63e69483ff66472481e66f9870face355b | [
"MIT"
] | 5 | 2016-08-30T01:23:50.000Z | 2021-09-22T14:39:00.000Z | lib/xadmin/adminlog.ex | ntsai/xadmin | 82d8be63e69483ff66472481e66f9870face355b | [
"MIT"
] | null | null | null | lib/xadmin/adminlog.ex | ntsai/xadmin | 82d8be63e69483ff66472481e66f9870face355b | [
"MIT"
] | 1 | 2021-12-10T11:10:55.000Z | 2021-12-10T11:10:55.000Z | defmodule XAdmin.Adminlog do
defmacro __using__(_) do
quote do
alias XAdmin.Adminlog
require Logger
end
end
defmacro debug(message) do
if Mix.env in [:dev, :test] and Application.get_env(:xadmin, :logger, false) do
quote do
Logger.debug unquote(message)
end
end
end
defmacro info(message) do
if Mix.env in [:dev, :test] and Application.get_env(:xadmin, :logger, false) do
quote do
Logger.info unquote(message)
end
end
end
defmacro warn(message) do
if Mix.env in [:dev, :test] and Application.get_env(:xadmin, :logger, false) do
quote do
Logger.warn unquote(message)
end
end
end
defmacro error(message) do
if Mix.env in [:dev, :test] and Application.get_env(:xadmin, :logger, false) do
quote do
Logger.error unquote(message)
end
end
end
end
| 22.923077 | 83 | 0.636465 |
e8530ab6c08ca33703072d0395abbf013d5292fa | 21,543 | ex | Elixir | lib/phoenix/test/conn_test.ex | Douvi/phoenix | 7832e45e056a48f9dd4bd8178d6c6b0569f19194 | [
"MIT"
] | null | null | null | lib/phoenix/test/conn_test.ex | Douvi/phoenix | 7832e45e056a48f9dd4bd8178d6c6b0569f19194 | [
"MIT"
] | null | null | null | lib/phoenix/test/conn_test.ex | Douvi/phoenix | 7832e45e056a48f9dd4bd8178d6c6b0569f19194 | [
"MIT"
] | null | null | null | defmodule Phoenix.ConnTest do
@moduledoc """
Conveniences for testing Phoenix endpoints and connection related helpers.
You likely want to use this module or make it part of your `ExUnit.CaseTemplate`.
Once used, this module automatically imports all functions defined here as
well as the functions in `Plug.Conn`.
## Endpoint testing
`Phoenix.ConnTest` typically works against endpoints. That's the preferred way
to test anything that your router dispatches to:
@endpoint MyAppWeb.Endpoint
test "says welcome on the home page" do
conn = get(build_conn(), "/")
assert conn.resp_body =~ "Welcome!"
end
test "logs in" do
conn = post(build_conn(), "/login", [username: "john", password: "doe"])
assert conn.resp_body =~ "Logged in!"
end
The `@endpoint` module attribute contains the endpoint under testing,
most commonly your application endpoint itself. If you are using the
MyApp.ConnCase generated by Phoenix, it is automatically set for you.
As in your router and controllers, the connection is the main abstraction
in testing. `build_conn()` returns a new connection and functions in this
module can be used to manipulate the connection before dispatching
to the endpoint.
For example, one could set the accepts header for json requests as
follows:
build_conn()
|> put_req_header("accept", "application/json")
|> get("/")
You can also create your own helpers, such as `json_conn()` that uses
`build_conn/0` and `put_req_header/3`, so you avoid repeating the connection
setup throughout your tests.
## Controller testing
The functions in this module can also be used for controller testing.
While endpoint testing is preferred over controller testing, especially
since the controller in Phoenix plays an integration role between your
domain and your views, unit testing controllers may be helpful in some
situations.
For such cases, you need to set the `@endpoint` attribute to your controller
and pass an atom representing the action to dispatch:
@endpoint MyAppWeb.HomeController
test "says welcome on the home page" do
conn = get(build_conn(), :index)
assert conn.resp_body =~ "Welcome!"
end
Keep in mind that, once the `@endpoint` variable is set, all tests after
setting it will be affected.
## Views testing
Under other circumstances, you may be testing a view or another layer that
requires a connection for processing. For such cases, a connection can be
created using the `conn/3` helper:
MyApp.UserView.render("hello.html", conn: build_conn(:get, "/"))
While `build_conn/0` returns a connection with no request information to it,
`build_conn/2` returns a connection with the given request information already
filled in.
## Recycling
Browsers implement a storage by using cookies. When a cookie is set in the
response, the browser stores it and sends it in the next request.
To emulate this behaviour, this module provides the idea of recycling.
The `recycle/1` function receives a connection and returns a new connection,
similar to the one returned by `build_conn/0` with all the response cookies
from the previous connection defined as request headers. This is useful when
testing multiple routes that require cookies or session to work.
Keep in mind Phoenix will automatically recycle the connection between
dispatches. This usually works out well most times, but it may discard
information if you are modifying the connection before the next dispatch:
# No recycling as the connection is fresh
conn = get(build_conn(), "/")
# The connection is recycled, creating a new one behind the scenes
conn = post(conn, "/login")
# We can also recycle manually in case we want custom headers
conn =
conn
|> recycle()
|> put_req_header("x-special", "nice")
# No recycling as we did it explicitly
conn = delete(conn, "/logout")
Recycling also recycles the "accept" and "authorization" headers,
as well as peer data information.
"""
@doc false
defmacro __using__(_) do
quote do
import Plug.Conn
import Phoenix.ConnTest
end
end
alias Plug.Conn
import ExUnit.Assertions, only: [flunk: 1]
@doc """
Creates a connection to be used in upcoming requests.
"""
@spec build_conn() :: Conn.t
def build_conn() do
build_conn(:get, "/", nil)
end
@doc """
Deprecated version of `conn/0`. Use `build_conn/0` instead.
"""
@spec conn() :: Conn.t
def conn() do
IO.warn "using conn/0 to build a connection is deprecated. Use build_conn/0 instead"
build_conn()
end
@doc """
Creates a connection to be used in upcoming requests
with a preset method, path and body.
This is useful when a specific connection is required
for testing a plug or a particular function.
"""
@spec build_conn(atom | binary, binary, binary | list | map | nil) :: Conn.t
def build_conn(method, path, params_or_body \\ nil) do
Plug.Adapters.Test.Conn.conn(%Conn{}, method, path, params_or_body)
|> Conn.put_private(:plug_skip_csrf_protection, true)
|> Conn.put_private(:phoenix_recycled, true)
end
@doc """
Deprecated version of `conn/3`. Use `build_conn/3` instead.
"""
@spec conn(atom | binary, binary, binary | list | map | nil) :: Conn.t
def conn(method, path, params_or_body \\ nil) do
IO.warn """
using conn/3 to build a connection is deprecated. Use build_conn/3 instead.
#{Exception.format_stacktrace}
"""
build_conn(method, path, params_or_body)
end
@http_methods [:get, :post, :put, :patch, :delete, :options, :connect, :trace, :head]
for method <- @http_methods do
@doc """
Dispatches to the current endpoint.
See `dispatch/5` for more information.
"""
defmacro unquote(method)(conn, path_or_action, params_or_body \\ nil) do
method = unquote(method)
quote do
Phoenix.ConnTest.dispatch(unquote(conn), @endpoint, unquote(method),
unquote(path_or_action), unquote(params_or_body))
end
end
end
@doc """
Dispatches the connection to the given endpoint.
When invoked via `get/3`, `post/3` and friends, the endpoint
is automatically retrieved from the `@endpoint` module
attribute, otherwise it must be given as an argument.
The connection will be configured with the given `method`,
`path_or_action` and `params_or_body`.
If `path_or_action` is a string, it is considered to be the
request path and stored as so in the connection. If an atom,
it is assumed to be an action and the connection is dispatched
to the given action.
## Parameters and body
This function, as well as `get/3`, `post/3` and friends, accepts the
request body or parameters as last argument:
get(build_conn(), "/", some: "param")
get(build_conn(), "/", "some=param&url=encoded")
The allowed values are:
* `nil` - meaning there is no body
* a binary - containing a request body. For such cases, `:headers`
must be given as option with a content-type
* a map or list - containing the parameters which will automatically
set the content-type to multipart. The map or list may contain
other lists or maps and all entries will be normalized to string
keys
* a struct - unlike other maps, a struct will be passed through as-is
without normalizing its entries
"""
def dispatch(conn, endpoint, method, path_or_action, params_or_body \\ nil)
def dispatch(%Plug.Conn{} = conn, endpoint, method, path_or_action, params_or_body) do
if is_nil(endpoint) do
raise "no @endpoint set in test case"
end
if is_binary(params_or_body) and is_nil(List.keyfind(conn.req_headers, "content-type", 0)) do
raise ArgumentError, "a content-type header is required when setting " <>
"a binary body in a test connection"
end
conn
|> ensure_recycled()
|> dispatch_endpoint(endpoint, method, path_or_action, params_or_body)
|> Conn.put_private(:phoenix_recycled, false)
|> from_set_to_sent()
end
def dispatch(conn, _endpoint, method, _path_or_action, _params_or_body) do
raise ArgumentError, "expected first argument to #{method} to be a " <>
"%Plug.Conn{}, got #{inspect conn}"
end
defp dispatch_endpoint(conn, endpoint, method, path, params_or_body) when is_binary(path) do
conn
|> Plug.Adapters.Test.Conn.conn(method, path, params_or_body)
|> endpoint.call(endpoint.init([]))
end
defp dispatch_endpoint(conn, endpoint, method, action, params_or_body) when is_atom(action) do
conn
|> Plug.Adapters.Test.Conn.conn(method, "/", params_or_body)
|> endpoint.call(endpoint.init(action))
end
defp from_set_to_sent(%Conn{state: :set} = conn), do: Conn.send_resp(conn)
defp from_set_to_sent(conn), do: conn
@doc """
Puts a request cookie.
"""
@spec put_req_cookie(Conn.t, binary, binary) :: Conn.t
defdelegate put_req_cookie(conn, key, value), to: Plug.Test
@doc """
Deletes a request cookie.
"""
@spec delete_req_cookie(Conn.t, binary) :: Conn.t
defdelegate delete_req_cookie(conn, key), to: Plug.Test
@doc """
Fetches the flash storage.
"""
@spec fetch_flash(Conn.t) :: Conn.t
defdelegate fetch_flash(conn), to: Phoenix.Controller
@doc """
Gets the whole flash storage.
"""
@spec get_flash(Conn.t) :: Conn.t
defdelegate get_flash(conn), to: Phoenix.Controller
@doc """
Gets the given key from the flash storage.
"""
@spec get_flash(Conn.t, term) :: Conn.t
defdelegate get_flash(conn, key), to: Phoenix.Controller
@doc """
Puts the given value under key in the flash storage.
"""
@spec put_flash(Conn.t, term, term) :: Conn.t
defdelegate put_flash(conn, key, value), to: Phoenix.Controller
@doc """
Clears up the flash storage.
"""
@spec clear_flash(Conn.t) :: Conn.t
defdelegate clear_flash(conn), to: Phoenix.Controller
@doc """
Returns the content type as long as it matches the given format.
## Examples
# Assert we have an html response with utf-8 charset
assert response_content_type(conn, :html) =~ "charset=utf-8"
"""
@spec response_content_type(Conn.t, atom) :: String.t | no_return
def response_content_type(conn, format) when is_atom(format) do
case Conn.get_resp_header(conn, "content-type") do
[] ->
raise "no content-type was set, expected a #{format} response"
[h] ->
if response_content_type?(h, format) do
h
else
raise "expected content-type for #{format}, got: #{inspect h}"
end
[_|_] ->
raise "more than one content-type was set, expected a #{format} response"
end
end
defp response_content_type?(header, format) do
case parse_content_type(header) do
{part, subpart} ->
format = Atom.to_string(format)
format in MIME.extensions(part <> "/" <> subpart) or
format == subpart or String.ends_with?(subpart, "+" <> format)
_ ->
false
end
end
defp parse_content_type(header) do
case Plug.Conn.Utils.content_type(header) do
{:ok, part, subpart, _params} ->
{part, subpart}
_ ->
false
end
end
@doc """
Asserts the given status code and returns the response body
if one was set or sent.
## Examples
conn = get(build_conn(), "/")
assert response(conn, 200) =~ "hello world"
"""
@spec response(Conn.t, status :: integer | atom) :: binary | no_return
def response(%Conn{state: :unset}, _status) do
raise """
expected connection to have a response but no response was set/sent.
Please verify that you assign to "conn" after a request:
conn = get(conn, "/")
assert html_response(conn) =~ "Hello"
"""
end
def response(%Conn{status: status, resp_body: body}, given) do
given = Plug.Conn.Status.code(given)
if given == status do
body
else
raise "expected response with status #{given}, got: #{status}, with body:\n#{body}"
end
end
@doc """
Asserts the given status code, that we have an html response and
returns the response body if one was set or sent.
## Examples
assert html_response(conn, 200) =~ "<html>"
"""
@spec html_response(Conn.t, status :: integer | atom) :: String.t | no_return
def html_response(conn, status) do
body = response(conn, status)
_ = response_content_type(conn, :html)
body
end
@doc """
Asserts the given status code, that we have an text response and
returns the response body if one was set or sent.
## Examples
assert text_response(conn, 200) =~ "hello"
"""
@spec text_response(Conn.t, status :: integer | atom) :: String.t | no_return
def text_response(conn, status) do
body = response(conn, status)
_ = response_content_type(conn, :text)
body
end
@doc """
Asserts the given status code, that we have an json response and
returns the decoded JSON response if one was set or sent.
## Examples
body = json_response(conn, 200)
assert "can't be blank" in body["errors"]
"""
@spec json_response(Conn.t, status :: integer | atom) :: map | no_return
def json_response(conn, status) do
body = response(conn, status)
_ = response_content_type(conn, :json)
Phoenix.json_library().decode!(body)
end
@doc """
Returns the location header from the given redirect response.
Raises if the response does not match the redirect status code
(defaults to 302).
## Examples
assert redirected_to(conn) =~ "/foo/bar"
assert redirected_to(conn, 301) =~ "/foo/bar"
assert redirected_to(conn, :moved_permanently) =~ "/foo/bar"
"""
@spec redirected_to(Conn.t, status :: non_neg_integer) :: String.t
def redirected_to(conn, status \\ 302)
def redirected_to(%Conn{state: :unset}, _status) do
raise "expected connection to have redirected but no response was set/sent"
end
def redirected_to(conn, status) when is_atom(status) do
redirected_to(conn, Plug.Conn.Status.code(status))
end
def redirected_to(%Conn{status: status} = conn, status) do
location = Conn.get_resp_header(conn, "location") |> List.first
location || raise "no location header was set on redirected_to"
end
def redirected_to(conn, status) do
raise "expected redirection with status #{status}, got: #{conn.status}"
end
@doc """
Recycles the connection.
Recycling receives a connection and returns a new connection,
containing cookies and relevant information from the given one.
This emulates behaviour performed by browsers where cookies
returned in the response are available in following requests.
Note `recycle/1` is automatically invoked when dispatching
to the endpoint, unless the connection has already been
recycled.
"""
@spec recycle(Conn.t) :: Conn.t
def recycle(conn) do
build_conn()
|> Map.put(:host, conn.host)
|> Plug.Test.recycle_cookies(conn)
|> Plug.Test.put_peer_data(Plug.Conn.get_peer_data(conn))
|> copy_headers(conn.req_headers, ~w(accept authorization))
end
defp copy_headers(conn, headers, copy) do
headers = for {k, v} <- headers, k in copy, do: {k, v}
%{conn | req_headers: headers ++ conn.req_headers}
end
@doc """
Ensures the connection is recycled if it wasn't already.
See `recycle/1` for more information.
"""
@spec ensure_recycled(Conn.t) :: Conn.t
def ensure_recycled(conn) do
if conn.private[:phoenix_recycled] do
conn
else
recycle(conn)
end
end
@doc """
Calls the Endpoint and Router pipelines.
Useful for unit testing Plugs where Endpoint and/or router pipeline
plugs are required for proper setup.
Note the use of `get("/")` following `bypass_through` in the examples below.
To execute the plug pipelines, you must issue a request against the router.
Most often, you can simpy send a GET request against the root path, but you
may also specify a different method or path which your pipelines may operate
against.
## Examples
For example, imagine you are testing an authentication plug in
isolation, but you need to invoke the Endpoint plugs and router
Ripelines to set up session and flash related dependencies.
One option is to invoke an existing route that uses the proper
pipelines. You can do so by passing the connection and the
router name to `bypass_through`:
conn =
conn
|> bypass_through(MyAppWeb.Router)
|> get("/some_url")
|> MyApp.RequireAuthentication.call([])
assert conn.halted
You can also specify which pipelines you want to run:
conn =
conn
|> bypass_through(MyAppWeb.Router, [:browser])
|> get("/")
|> MyApp.RequireAuthentication.call([])
assert conn.halted
Alternatively, you could only invoke the Endpoint's plugs:
conn =
conn
|> bypass_through()
|> get("/")
|> MyApp.RequireAuthentication.call([])
assert conn.halted
"""
@spec bypass_through(Conn.t) :: Conn.t
def bypass_through(conn) do
Plug.Conn.put_private(conn, :phoenix_bypass, :all)
end
@doc """
Calls the Endpoint and Router pipelines for the current route.
See `bypass_through/1`.
"""
@spec bypass_through(Conn.t, module) :: Conn.t
def bypass_through(conn, router) do
Plug.Conn.put_private(conn, :phoenix_bypass, {router, :current})
end
@doc """
Calls the Endpoint and and the given Router pipelines.
See `bypass_through/1`.
"""
@spec bypass_through(Conn.t, module, atom | list) :: Conn.t
def bypass_through(conn, router, pipelines) do
Plug.Conn.put_private(conn, :phoenix_bypass, {router, List.wrap(pipelines)})
end
@doc """
Returns the matched params from the URL the connection was redirected to.
Uses the provided `%Plug.Conn{}`s router matched in the previous request.
Raises if the response's location header is not set.
## Examples
assert redirected_to(conn) =~ "/posts/123"
assert %{id: "123"} = redirected_params(conn)
"""
@spec redirected_params(Conn.t) :: map
def redirected_params(%Plug.Conn{} = conn) do
router = Phoenix.Controller.router_module(conn)
%URI{path: path, host: host} = conn |> redirected_to() |> URI.parse()
case Phoenix.Router.route_info(router, "GET", path, host || conn.host) do
:error ->
raise Phoenix.Router.NoRouteError, conn: conn, router: router
%{path_params: path_params} ->
Enum.into(path_params, %{}, fn {key, val} -> {String.to_atom(key), val} end)
end
end
@doc """
Asserts an error was wrapped and sent with the given status.
Useful for testing actions that you expect raise an error and have
the response wrapped in an HTTP status, with content usually rendered
by your MyApp.ErrorView.
The function accepts a status either as an integer HTTP status or
atom, such as `404` or `:not_found`. The list of allowed atoms is available
in `Plug.Conn.Status`. If an error is raised, a 3-tuple of the wrapped
response is returned matching the status, headers, and body of the response:
{404, [{"content-type", "text/html"} | _], "Page not found"}
## Examples
assert_error_sent :not_found, fn ->
get(build_conn(), "/users/not-found")
end
response = assert_error_sent 404, fn ->
get(build_conn(), "/users/not-found")
end
assert {404, [_h | _t], "Page not found"} = response
"""
@spec assert_error_sent(integer | atom, function) :: {integer, list, term}
def assert_error_sent(status_int_or_atom, func) do
expected_status = Plug.Conn.Status.code(status_int_or_atom)
discard_previously_sent()
result =
func
|> wrap_request()
|> receive_response(expected_status)
discard_previously_sent()
result
end
defp receive_response({:ok, conn}, expected_status) do
if conn.state == :sent do
flunk "expected error to be sent as #{expected_status} status, but response sent #{conn.status} without error"
else
flunk "expected error to be sent as #{expected_status} status, but no error happened"
end
end
defp receive_response({:error, {_kind, exception, stack}}, expected_status) do
receive do
{ref, {^expected_status, headers, body}} when is_reference(ref) ->
{expected_status, headers, body}
{ref, {sent_status, _headers, _body}} when is_reference(ref) ->
reraise ExUnit.AssertionError.exception("""
expected error to be sent as #{expected_status} status, but got #{sent_status} from:
#{Exception.format_banner(:error, exception)}
"""), stack
after 0 ->
reraise ExUnit.AssertionError.exception("""
expected error to be sent as #{expected_status} status, but got an error with no response from:
#{Exception.format_banner(:error, exception)}
"""), stack
end
end
defp discard_previously_sent() do
receive do
{ref, {_, _, _}} when is_reference(ref) -> discard_previously_sent()
{:plug_conn, :sent} -> discard_previously_sent()
after
0 -> :ok
end
end
defp wrap_request(func) do
try do
{:ok, func.()}
catch
kind, error -> {:error, {kind, error, System.stacktrace()}}
end
end
end
| 31.82127 | 116 | 0.679293 |
e853123e9b181a782f259eac38cd9cacb4d3d9e1 | 4,469 | exs | Elixir | config/releases.exs | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 4 | 2020-11-30T17:38:57.000Z | 2021-01-23T21:29:41.000Z | config/releases.exs | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 24 | 2020-11-30T17:32:48.000Z | 2021-02-22T06:25:22.000Z | config/releases.exs | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | null | null | null | import Config
rpc_url = System.get_env("ETHEREUM_RPC_URL")
vault_url = System.get_env("VAULT_URL")
to_boolean = fn
"true" -> true
"false" -> false
_ -> nil
end
mandatory = fn env, exception ->
case System.get_env(env) do
nil -> throw(exception)
data -> data
end
end
config :engine,
finality_margin: String.to_integer(System.get_env("FINALITY_MARGIN") || "10"),
rpc_url: rpc_url,
vault_url: vault_url,
network: System.get_env("ETHEREUM_NETWORK"),
tx_hash_contract: System.get_env("TX_HASH_CONTRACT"),
authority_address: System.get_env("AUTHORITY_ADDRESS"),
plasma_framework: System.get_env("CONTRACT_ADDRESS_PLASMA_FRAMEWORK"),
erc20_vault: nil,
eth_vault: nil,
payment_exit_game: nil,
ethereum_events_check_interval_ms: String.to_integer(System.get_env("ETHEREUM_EVENTS_CHECK_INTERVAL_MS") || "8000"),
ethereum_stalled_sync_threshold_ms:
String.to_integer(System.get_env("ETHEREUM_STALLED_SYNC_THRESHOLD_MS") || "20000"),
fee_claimer_address: mandatory.("FEE_CLAIMER_ADDRESS", "FEE_CLAIMER_ADDRESS has to be set!")
config :gas, Gas.Integration.Pulse, api_key: System.get_env("PULSE_API_KEY")
config :gas, Gas.Integration.Web3Api,
blockchain_id: System.get_env("WEB3API_BLOCKCHAIN_ID"),
api_key: System.get_env("WEB3API_API_KEY")
config :gas, Gas.Integration.Etherscan, api_key: System.get_env("ETHERSCAN_API_KEY")
config :engine, Engine.Repo,
backoff_type: :stop,
# Have at most `:pool_size` DB connections on standby and serving DB queries.
pool_size: String.to_integer(System.get_env("ENGINE_DB_POOL_SIZE") || "10"),
# Wait at most `:queue_target` for a connection. If all connections checked out during
# a `:queue_interval` takes more than `:queue_target`, then we double the `:queue_target`.
# If checking out connections take longer than the new target, a DBConnection.ConnectionError is raised.
# See: https://hexdocs.pm/db_connection/DBConnection.html#start_link/2-queue-config
queue_target: String.to_integer(System.get_env("ENGINE_DB_POOL_QUEUE_TARGET_MS") || "200"),
telemetry_prefix: [:engine, :repo],
queue_interval: String.to_integer(System.get_env("ENGINE_DB_POOL_QUEUE_INTERVAL_MS") || "2000"),
show_sensitive_data_on_connection_error: true,
url: System.get_env("DATABASE_URL")
config :engine, Engine.Fee,
fee_feed_url: System.get_env("FEE_FEED_URL", "http://localhost:4000/api/v1"),
fee_change_tolerance_percent: String.to_integer(System.get_env("FEE_CHANGE_TOLERANCE_PERCENT") || "25"),
stored_fee_update_interval_minutes: String.to_integer(System.get_env("STORED_FEE_UPDATE_INTERVAL_MINUTES") || "1")
config :ethereumex,
url: rpc_url,
http_options: [recv_timeout: 20_000]
config :sentry,
dsn: System.get_env("SENTRY_DSN"),
server_name: System.get_env("HOSTNAME"),
environment_name: System.get_env("APP_ENV"),
enable_source_code_context: true,
root_source_code_path: File.cwd!(),
included_environments: ["development", "production", "staging", "stress", "sandbox"],
tags: %{
eth_network: System.get_env("ETHEREUM_NETWORK"),
app_env: System.get_env("APP_ENV"),
hostname: System.get_env("HOSTNAME"),
application: "childchain"
}
statix_tags = [application: "childchain-v2", app_env: System.get_env("APP_ENV"), hostname: System.get_env("HOSTNAME")]
config :statix,
host: System.get_env("DD_HOSTNAME") || "datadog",
port: String.to_integer(System.get_env("DD_PORT") || "8125"),
tags: Enum.map(statix_tags, fn {key, value} -> "#{key}:#{value}" end)
config :spandex_datadog,
host: System.get_env("DD_HOSTNAME") || "datadog",
port: String.to_integer(System.get_env("DD_APM_PORT") || "8126"),
batch_size: String.to_integer(System.get_env("BATCH_SIZE") || "10"),
sync_threshold: String.to_integer(System.get_env("SYNC_THRESHOLD") || "100"),
http: HTTPoison
config :status, Status.Metric.Tracer,
service: :web,
adapter: SpandexDatadog.Adapter,
disabled?: to_boolean.(System.get_env("DD_DISABLED") || "true"),
type: :web,
env: System.get_env("APP_ENV") || ""
config :engine, Engine.Feefeed.Rules.Scheduler,
interval: String.to_integer(System.get_env("RULES_FETCH_INTERVAL") || "180")
config :api,
port: String.to_integer(System.get_env("PORT") || "9656")
config :ex_plasma,
eip_712_domain: %{
name: "OMG Network",
salt: "0xfad5c7f626d80f9256ef01929f3beb96e058b8b4b0e3fe52d84f054c0e2a7a83",
verifying_contract: System.get_env("CONTRACT_ADDRESS_PLASMA_FRAMEWORK"),
version: "2"
}
| 39.548673 | 118 | 0.748713 |
e8531cc378e8d8611a12a0d2260776d3eaaad9bc | 98 | ex | Elixir | lib/ex338.ex | axelclark/ex338 | 3fb3c260d93bda61f7636ee1a677770d2dc1b89a | [
"MIT"
] | 17 | 2016-12-22T06:39:26.000Z | 2021-01-20T13:51:13.000Z | lib/ex338.ex | axelclark/ex338 | 3fb3c260d93bda61f7636ee1a677770d2dc1b89a | [
"MIT"
] | 608 | 2016-08-06T18:57:58.000Z | 2022-03-01T02:48:17.000Z | lib/ex338.ex | axelclark/ex338 | 3fb3c260d93bda61f7636ee1a677770d2dc1b89a | [
"MIT"
] | 6 | 2017-11-21T22:35:45.000Z | 2022-01-11T21:37:40.000Z | defmodule Ex338 do
@moduledoc """
Ex338 keeps the contexts for the 338 application.
"""
end
| 16.333333 | 51 | 0.704082 |
e8538b0013cbd890a9bb4f8b1ac2ae958cf5bbec | 3,442 | ex | Elixir | lib/jeff/secure_channel.ex | smartrent/jeff | 1cd2e418207579c338cdbdcaa331da8e17585b28 | [
"Apache-2.0"
] | null | null | null | lib/jeff/secure_channel.ex | smartrent/jeff | 1cd2e418207579c338cdbdcaa331da8e17585b28 | [
"Apache-2.0"
] | null | null | null | lib/jeff/secure_channel.ex | smartrent/jeff | 1cd2e418207579c338cdbdcaa331da8e17585b28 | [
"Apache-2.0"
] | null | null | null | defmodule Jeff.SecureChannel do
@moduledoc false
use Bitwise
defstruct [
:enc,
:established?,
:initialized?,
:scbk,
:server_cryptogram,
:server_rnd,
:smac1,
:smac2,
:rmac,
:cmac,
:scbkd?
]
@scbk_default Base.decode16!("303132333435363738393A3B3C3D3E3F")
@padding_start 0x80
def new(opts \\ []) do
scbk = Keyword.get(opts, :scbk, @scbk_default)
server_rnd = Keyword.get(opts, :server_rnd, :rand.bytes(8))
%__MODULE__{
scbk: scbk,
server_rnd: server_rnd,
initialized?: false,
established?: false,
scbkd?: scbk == @scbk_default
}
end
def initialize(
%{scbk: scbk, server_rnd: server_rnd} = sc,
%{cryptogram: client_cryptogram, cuid: _cuid, rnd: client_rnd}
) do
enc = gen_enc(server_rnd, scbk)
# verify client cryptogram
^client_cryptogram = gen_client_cryptogram(server_rnd, client_rnd, enc)
smac1 = gen_smac1(server_rnd, scbk)
smac2 = gen_smac2(server_rnd, scbk)
server_cryptogram = gen_server_cryptogram(client_rnd, server_rnd, enc)
%{
sc
| enc: enc,
server_cryptogram: server_cryptogram,
smac1: smac1,
smac2: smac2,
initialized?: true
}
end
def establish(sc, rmac) do
%{sc | rmac: rmac, established?: true}
end
def calculate_mac(sc, data, command?) do
iv = if command?, do: sc.rmac, else: sc.cmac
mac = do_calculate_mac(sc, data, iv)
if command? do
%{sc | cmac: mac}
else
%{sc | rmac: mac}
end
end
def do_calculate_mac(sc, <<block::binary-size(16), rest::binary>> = data, iv)
when byte_size(data) > 16 do
key = sc.smac1
iv = :crypto.crypto_one_time(:aes_128_cbc, key, iv, block, encrypt: true)
do_calculate_mac(sc, rest, iv)
end
def do_calculate_mac(sc, block, iv) do
padding_start = <<0x80>>
key = sc.smac2
block = block <> padding_start
zeroes = 16 - byte_size(block)
block = block <> <<0::size(zeroes)-unit(8)>>
:crypto.crypto_one_time(:aes_128_cbc, key, iv, block, encrypt: true)
end
def encrypt(sc, data) do
key = sc.enc
iv =
sc.rmac
|> :binary.bin_to_list()
|> Enum.map(&(~~~&1 &&& 0xFF))
|> :binary.list_to_bin()
:crypto.crypto_one_time(:aes_128_cbc, key, iv, data <> <<@padding_start>>,
encrypt: true,
padding: :zero
)
end
def decrypt(sc, data) do
key = sc.enc
iv =
sc.cmac
|> :binary.bin_to_list()
|> Enum.map(&(~~~&1 &&& 0xFF))
|> :binary.list_to_bin()
:crypto.crypto_one_time(:aes_128_cbc, key, iv, data, encrypt: false)
|> :binary.split(<<@padding_start>>)
|> hd()
end
def gen_enc(server_rnd, scbk), do: gen_session_key(<<0x01, 0x82>>, server_rnd, scbk)
def gen_smac1(server_rnd, scbk), do: gen_session_key(<<0x01, 0x01>>, server_rnd, scbk)
def gen_smac2(server_rnd, scbk), do: gen_session_key(<<0x01, 0x02>>, server_rnd, scbk)
def gen_client_cryptogram(server_rnd, client_rnd, enc) do
gen_key(server_rnd <> client_rnd, enc)
end
def gen_server_cryptogram(client_rnd, server_rnd, enc) do
gen_key(client_rnd <> server_rnd, enc)
end
defp gen_session_key(pre, rnd, scbk) do
data = pre <> :binary.part(rnd, 0, 6) <> <<0, 0, 0, 0, 0, 0, 0, 0>>
gen_key(data, scbk)
end
defp gen_key(data, key) do
:crypto.crypto_one_time(:aes_128_ecb, key, data, true)
end
end
| 23.902778 | 88 | 0.623475 |
e853bbd85d118589eda71648720cd7f6649ec837 | 156 | exs | Elixir | train_shunting/test/train_shunting_test.exs | lucas-larsson/ID1019 | b21a79bfa7fbbaaba0b4db88ec8b44fc5e9f291c | [
"MIT"
] | null | null | null | train_shunting/test/train_shunting_test.exs | lucas-larsson/ID1019 | b21a79bfa7fbbaaba0b4db88ec8b44fc5e9f291c | [
"MIT"
] | null | null | null | train_shunting/test/train_shunting_test.exs | lucas-larsson/ID1019 | b21a79bfa7fbbaaba0b4db88ec8b44fc5e9f291c | [
"MIT"
] | null | null | null | defmodule TrainShuntingTest do
use ExUnit.Case
doctest TrainShunting
test "greets the world" do
assert TrainShunting.hello() == :world
end
end
| 17.333333 | 42 | 0.74359 |
e853e36a11bb605129929f2caf80d2384e6877b9 | 200 | ex | Elixir | lib/options_tracker_web/live/position_live/delete_modal_component.ex | mgwidmann/options_tracker | 5520f88a9a5873842a63a23d4bcc5da82a51feba | [
"MIT"
] | 12 | 2020-06-25T17:25:15.000Z | 2021-09-30T20:13:33.000Z | lib/options_tracker_web/live/position_live/delete_modal_component.ex | mgwidmann/options_tracker | 5520f88a9a5873842a63a23d4bcc5da82a51feba | [
"MIT"
] | 5 | 2020-08-05T03:12:31.000Z | 2021-07-15T04:59:03.000Z | lib/options_tracker_web/live/position_live/delete_modal_component.ex | mgwidmann/options_tracker | 5520f88a9a5873842a63a23d4bcc5da82a51feba | [
"MIT"
] | 2 | 2021-07-03T17:20:15.000Z | 2021-09-01T15:38:58.000Z | defmodule OptionsTrackerWeb.PositionLive.DeleteModalComponent do
use OptionsTrackerWeb, :live_component
@impl true
def update(assigns, socket) do
{:ok, socket |> assign(assigns)}
end
end
| 22.222222 | 64 | 0.765 |
e853e7a9ccd2048eb753e026dd37e64dec0495f8 | 144 | exs | Elixir | apps/fc_tesla/test/fc_tesla_test.exs | andyl/ragged | 2baab0849e2dfc068652ecb2fe88a7c6fe5437d0 | [
"MIT"
] | null | null | null | apps/fc_tesla/test/fc_tesla_test.exs | andyl/ragged | 2baab0849e2dfc068652ecb2fe88a7c6fe5437d0 | [
"MIT"
] | 10 | 2021-02-08T00:01:41.000Z | 2021-05-27T12:54:28.000Z | apps/fc_tesla/test/fc_tesla_test.exs | andyl/ragged | 2baab0849e2dfc068652ecb2fe88a7c6fe5437d0 | [
"MIT"
] | null | null | null | defmodule FcTeslaTest do
use ExUnit.Case
doctest FcTesla
# test "greets the world" do
# assert FcTesla.hello() == :world
# end
end
| 16 | 38 | 0.680556 |
e853ef9cb3c078553c21191825dc18ad3aacb883 | 1,258 | exs | Elixir | mix.exs | stefanchrobot/con_cache | 9a497cbf1a1295310480598538ba50686b349ef8 | [
"MIT"
] | null | null | null | mix.exs | stefanchrobot/con_cache | 9a497cbf1a1295310480598538ba50686b349ef8 | [
"MIT"
] | null | null | null | mix.exs | stefanchrobot/con_cache | 9a497cbf1a1295310480598538ba50686b349ef8 | [
"MIT"
] | null | null | null | Code.ensure_loaded?(Hex) and Hex.start()
defmodule ConCache.Mixfile do
use Mix.Project
@version "0.13.1"
def project do
[
app: :con_cache,
version: @version,
elixir: "~> 1.7",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
package: [
maintainers: ["Saša Jurić"],
licenses: ["MIT"],
links: %{
"Github" => "https://github.com/sasa1977/con_cache",
"Docs" => "http://hexdocs.pm/con_cache",
"Changelog" =>
"https://github.com/sasa1977/con_cache/blob/#{@version}/CHANGELOG.md#v#{
String.replace(@version, ".", "")
}"
}
],
description:
"ETS based key-value storage with support for row-level isolated writes, TTL auto-purge, and modification callbacks.",
docs: [
extras: ["README.md"],
main: "ConCache",
source_url: "https://github.com/sasa1977/con_cache/",
source_ref: @version
]
]
end
def application do
[applications: [:logger], mod: {ConCache.Application, []}]
end
defp deps do
[
{:ex_doc, "~> 0.19.0", only: :dev},
{:dialyxir, "~> 0.5.0", only: :dev}
]
end
end
| 25.16 | 126 | 0.540541 |
e853f7680b8b4d242f0e38481dc6708ec330479b | 1,267 | ex | Elixir | lib/elixir_console_web/live/live_monitor.ex | wyeworks/elixir_console | f72147224131cb43ebea4a5929030928cdf155d0 | [
"MIT"
] | 60 | 2019-11-23T15:54:24.000Z | 2022-03-24T15:56:32.000Z | lib/elixir_console_web/live/live_monitor.ex | wyeworks/elixir_console | f72147224131cb43ebea4a5929030928cdf155d0 | [
"MIT"
] | 62 | 2019-11-11T00:44:38.000Z | 2022-01-10T13:12:48.000Z | lib/elixir_console_web/live/live_monitor.ex | wyeworks/elixir_console | f72147224131cb43ebea4a5929030928cdf155d0 | [
"MIT"
] | 5 | 2020-01-15T00:44:30.000Z | 2021-05-25T05:06:27.000Z | defmodule ElixirConsoleWeb.LiveMonitor do
@moduledoc """
This module monitors the created sandbox processes. Gives a way to dispose
those processes when they are not longer used.
The code is based on https://github.com/phoenixframework/phoenix_live_view/issues/123
"""
use GenServer
def start_link(_) do
GenServer.start_link(__MODULE__, %{}, name: __MODULE__)
end
def monitor(pid, view_module, meta) do
GenServer.call(__MODULE__, {:monitor, pid, view_module, meta})
end
def update_sandbox(pid, view_module, meta) do
GenServer.call(__MODULE__, {:update_sandbox, pid, view_module, meta})
end
def init(_) do
{:ok, %{views: %{}}}
end
def handle_call({:monitor, pid, view_module, meta}, _from, %{views: views} = state) do
Process.monitor(pid)
{:reply, :ok, %{state | views: Map.put(views, pid, {view_module, meta})}}
end
def handle_call({:update_sandbox, pid, view_module, meta}, _from, %{views: views} = state) do
{:reply, :ok, %{state | views: Map.put(views, pid, {view_module, meta})}}
end
def handle_info({:DOWN, _ref, :process, pid, _reason}, state) do
{{module, meta}, new_views} = Map.pop(state.views, pid)
module.unmount(meta)
{:noreply, %{state | views: new_views}}
end
end
| 30.166667 | 95 | 0.683504 |
e853fbaaa52d581c49d01323b6604d8ae87e52b1 | 1,835 | ex | Elixir | farmbot_os/lib/mix/tasks.farmbot/env.ex | Arti4ever/farmbot_os | a238c1d5ae78c08d1f5894cac41ed61035fb3266 | [
"MIT"
] | 1 | 2021-04-22T10:18:50.000Z | 2021-04-22T10:18:50.000Z | farmbot_os/lib/mix/tasks.farmbot/env.ex | Arti4ever/farmbot_os | a238c1d5ae78c08d1f5894cac41ed61035fb3266 | [
"MIT"
] | null | null | null | farmbot_os/lib/mix/tasks.farmbot/env.ex | Arti4ever/farmbot_os | a238c1d5ae78c08d1f5894cac41ed61035fb3266 | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Farmbot.Env do
@moduledoc false
@doc false
def mix_config(key \\ nil) do
config = Mix.Project.config()
if key do
config[key]
else
config
end
end
@doc false
def fw_file do
# Path.join([images_dir(), "test.fw"])
Path.join([images_dir(), "#{mix_config(:app)}.fw"])
end
@doc false
def signed_fw_file do
Path.join([images_dir(), "#{mix_config(:app)}-signed.fw"])
end
@doc false
def images_dir do
Path.join([mix_config(:build_path), env(), "nerves", "images"])
end
@doc false
def target do
mix_config(:target)
end
def version do
FarmbotCore.Project.version()
end
def commit do
FarmbotCore.Project.commit()
end
@doc false
def env do
to_string(FarmbotCore.Project.env())
end
@doc false
def format_date_time(%{ctime: {{yr, m, day}, {hr, min, sec}}}) do
dt = %DateTime{
hour: hr,
year: yr,
month: m,
day: day,
minute: min,
second: sec,
time_zone: "Etc/UTC",
zone_abbr: "UTC",
std_offset: 0,
utc_offset: 0
}
"#{dt.year}-#{pad(dt.month)}-#{pad(dt.day)}_#{pad(dt.hour)}#{pad(dt.minute)}"
end
defp pad(int) do
if int < 10, do: "0#{int}", else: "#{int}"
end
@doc false
def build_comment(time, comment) do
"""
*New Farmbot Firmware!*
> *_Env_*: `#{env()}`
> *_Target_*: `#{target()}`
> *_Version_*: `#{version()}`
> *_Commit_*: `#{commit()}`
> *_Time_*: `#{time}`
#{commit_message()}
#{String.trim(comment)}
"""
end
defp commit_message do
System.cmd("git", ~w(log -1 --pretty=%B)) |> elem(0) |> String.trim()
end
@doc false
def slack_token do
System.get_env("SLACK_TOKEN") ||
Mix.raise("No $SLACK_TOKEN environment variable.")
end
end
| 19.315789 | 81 | 0.571117 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.