hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c57ab612066bf394ed2e47ad44bc8ceed689a0c | 31,521 | ex | Elixir | lib/iex/lib/iex/helpers.ex | milmazz/elixir | b62704e9e223ea00da75e2ad277e5468f728c5d3 | [
"Apache-2.0"
] | null | null | null | lib/iex/lib/iex/helpers.ex | milmazz/elixir | b62704e9e223ea00da75e2ad277e5468f728c5d3 | [
"Apache-2.0"
] | null | null | null | lib/iex/lib/iex/helpers.ex | milmazz/elixir | b62704e9e223ea00da75e2ad277e5468f728c5d3 | [
"Apache-2.0"
] | null | null | null | defmodule IEx.Helpers do
@moduledoc """
Welcome to Interactive Elixir. You are currently
seeing the documentation for the module `IEx.Helpers`
which provides many helpers to make Elixir's shell
more joyful to work with.
This message was triggered by invoking the helper `h()`,
usually referred to as `h/0` (since it expects 0 arguments).
You can use the `h/1` function to invoke the documentation
for any Elixir module or function:
iex> h(Enum)
iex> h(Enum.map)
iex> h(Enum.reverse/1)
You can also use the `i/1` function to introspect any value
you have in the shell:
iex> i("hello")
There are many other helpers available, here are some examples:
* `b/1` - prints callbacks info and docs for a given module
* `c/1` - compiles a file into the current directory
* `c/2` - compiles a file to the given path
* `cd/1` - changes the current directory
* `clear/0` - clears the screen
* `exports/1` - shows all exports (functions + macros) in a module
* `flush/0` - flushes all messages sent to the shell
* `h/0` - prints this help message
* `h/1` - prints help for the given module, function or macro
* `i/0` - prints information about the last value
* `i/1` - prints information about the given term
* `ls/0` - lists the contents of the current directory
* `ls/1` - lists the contents of the specified directory
* `open/1` - opens the source for the given module or function in your editor
* `pid/1` - creates a PID from a string
* `pid/3` - creates a PID with the 3 integer arguments passed
* `pwd/0` - prints the current working directory
* `r/1` - recompiles the given module's source file
* `recompile/0` - recompiles the current project
* `runtime_info/0` - prints runtime info (versions, memory usage, stats)
* `v/0` - retrieves the last value from the history
* `v/1` - retrieves the nth value from the history
Help for all of those functions can be consulted directly from
the command line using the `h/1` helper itself. Try:
iex> h(v/0)
To list all IEx helpers available, which is effectively all
exports (functions and macros) in the `IEx.Helpers` module:
iex> exports(IEx.Helpers)
This module also include helpers for debugging purposes, see
`IEx.break!/4` for more information.
To learn more about IEx as a whole, type `h(IEx)`.
"""
import IEx, only: [dont_display_result: 0]
@doc """
Recompiles the current Mix application.
This helper only works when IEx is started with a Mix
project, for example, `iex -S mix`. The application is
not restarted after compilation, which means any long
running process may crash as any changed module will be
temporarily removed and recompiled, without going through
the proper code changes callback.
If you want to reload a single module, consider using
`r(ModuleName)` instead.
This function is meant to be used for development and
debugging purposes. Do not depend on it in production code.
"""
def recompile do
if mix_started?() do
config = Mix.Project.config()
consolidation = Mix.Project.consolidation_path(config)
reenable_tasks(config)
# No longer allow consolidations to be accessed.
Code.delete_path(consolidation)
purge_protocols(consolidation)
{result, _} = Mix.Task.run("compile")
# Reenable consolidation and allow them to be loaded.
Code.prepend_path(consolidation)
purge_protocols(consolidation)
result
else
IO.puts(IEx.color(:eval_error, "Mix is not running. Please start IEx with: iex -S mix"))
:error
end
end
defp mix_started? do
List.keyfind(Application.started_applications(), :mix, 0) != nil
end
defp reenable_tasks(config) do
Mix.Task.reenable("compile")
Mix.Task.reenable("compile.all")
Mix.Task.reenable("compile.protocols")
compilers = config[:compilers] || Mix.compilers()
Enum.each(compilers, &Mix.Task.reenable("compile.#{&1}"))
end
defp purge_protocols(path) do
case File.ls(path) do
{:ok, beams} ->
Enum.each(beams, fn beam ->
module = beam |> Path.rootname() |> String.to_atom()
:code.purge(module)
:code.delete(module)
end)
{:error, _} ->
:ok
end
end
@doc """
Compiles the given files.
It expects a list of files to compile and an optional path to write
the compiled code to (defaults to the current directory). When compiling
one file, there is no need to wrap it in a list.
It returns the names of the compiled modules.
If you want to recompile an existing module, check `r/1` instead.
## Examples
iex> c(["foo.ex", "bar.ex"], "ebin")
[Foo, Bar]
iex> c("baz.ex")
[Baz]
"""
def c(files, path \\ :in_memory) when is_binary(path) or path == :in_memory do
files = List.wrap(files)
unless Enum.all?(files, &is_binary/1) do
raise ArgumentError, "expected a binary or a list of binaries as argument"
end
{found, not_found} = Enum.split_with(files, &File.exists?/1)
unless Enum.empty?(not_found) do
raise ArgumentError, "could not find files #{Enum.join(not_found, ", ")}"
end
{erls, exs} = Enum.split_with(found, &String.ends_with?(&1, ".erl"))
erl_modules =
Enum.map(erls, fn source ->
{module, binary} = compile_erlang(source)
if path != :in_memory do
base = source |> Path.basename() |> Path.rootname()
File.write!(Path.join(path, base <> ".beam"), binary)
end
module
end)
ex_modules =
case compile_elixir(exs, path) do
{:ok, modules, _} -> modules
{:error, _, _} -> raise CompileError
end
erl_modules ++ ex_modules
end
@doc """
Clears the console screen.
This function only works if ANSI escape codes are enabled
on the shell, which means this function is by default
unavailable on Windows machines.
"""
def clear() do
if IO.ANSI.enabled?() do
IO.write([IO.ANSI.home(), IO.ANSI.clear()])
else
IO.puts("Cannot clear the screen because ANSI escape codes are not enabled on this shell")
end
dont_display_result()
end
@doc """
Opens the current prying location.
This command only works inside a pry session started manually
via `IEx.pry/0` or a breakpoint set via `IEx.break!/4`. Calling
this function during a regular `IEx` session will print an error.
Keep in mind the `open/0` location may not exist when prying
precompiled source code, such as Elixir itself.
For more information and to open any module or function, see
`open/1`.
"""
def open() do
case Process.get(:iex_whereami) do
{file, line, _} ->
IEx.Introspection.open({file, line})
_ ->
IO.puts(IEx.color(:eval_error, "Pry session is not currently enabled"))
end
dont_display_result()
end
@doc """
Opens the given module, module/function/arity or `{file, line}`.
This function uses the `ELIXIR_EDITOR` environment variable
and falls back to `EDITOR` if the former is not available.
By default, it attempts to open the file and line using the
`file:line` notation. For example, if your editor is called
`subl`, it will open the file as:
subl path/to/file:line
Custom editors are supported by using the __FILE__ and __LINE__
notations. For example, vi/vim users can set `ELIXIR_EDITOR` to:
ELIXIR_EDITOR="vi +__LINE__ __FILE__"
and Elixir will properly interpolate values.
Since this function prints the result returned by the editor,
`ELIXIR_EDITOR` can be set "echo" if you prefer to display the
location rather than opening it.
Keep in mind the location may not exist when opening
precompiled source code, such as Elixir itself.
## Examples
iex> open MyApp
iex> open MyApp.fun/2
iex> open {"path/to/file", 1}
"""
defmacro open(term) do
quote do
IEx.Introspection.open(unquote(IEx.Introspection.decompose(term)))
end
end
@doc """
Prints the documentation for `IEx.Helpers`.
"""
def h() do
IEx.Introspection.h(IEx.Helpers)
end
@doc """
Prints the documentation for the given module
or for the given function/arity pair.
## Examples
iex> h(Enum)
It also accepts functions in the format `fun/arity`
and `module.fun/arity`, for example:
iex> h receive/1
iex> h Enum.all?/2
iex> h Enum.all?
"""
defmacro h(term) do
quote do
IEx.Introspection.h(unquote(IEx.Introspection.decompose(term)))
end
end
@doc """
Prints the documentation for the given callback function.
It also accepts single module argument to list
all available behaviour callbacks.
## Examples
iex> b(Mix.Task.run/1)
iex> b(Mix.Task.run)
iex> b(GenServer)
"""
defmacro b(term) do
quote do
IEx.Introspection.b(unquote(IEx.Introspection.decompose(term)))
end
end
@doc """
Prints the types for the given module or for the given function/arity pair.
## Examples
iex> t(Enum)
@type t() :: Enumerable.t()
@type acc() :: any()
@type element() :: any()
@type index() :: integer()
@type default() :: any()
iex> t(Enum.t/0)
@type t() :: Enumerable.t()
iex> t(Enum.t)
@type t() :: Enumerable.t()
"""
defmacro t(term) do
quote do
IEx.Introspection.t(unquote(IEx.Introspection.decompose(term)))
end
end
@doc """
Prints the specs for the given module or for the given function/arity pair.
## Examples
iex> s(Enum)
iex> s(Enum.all?)
iex> s(Enum.all?/2)
iex> s(is_atom)
iex> s(is_atom/1)
"""
defmacro s(term) do
quote do
IEx.Introspection.s(unquote(IEx.Introspection.decompose(term)))
end
end
@doc """
Returns the value of the `n`th expression in the history.
`n` can be a negative value: if it is, the corresponding expression value
relative to the current one is returned. For example, `v(-2)` returns the
value of the expression evaluated before the last evaluated expression. In
particular, `v(-1)` returns the result of the last evaluated expression and
`v()` does the same.
## Examples
iex(1)> "hello" <> " world"
"hello world"
iex(2)> 40 + 2
42
iex(3)> v(-2)
"hello world"
iex(4)> v(2)
42
iex(5)> v()
42
"""
def v(n \\ -1) do
IEx.History.nth(history(), n) |> elem(2)
end
@doc """
Recompiles and reloads the given `module`.
Please note that all the modules defined in the same
file as `module` are recompiled and reloaded.
This function is meant to be used for development and
debugging purposes. Do not depend on it in production code.
## In-memory reloading
When we reload the module in IEx, we recompile the module source
code, updating its contents in memory. The original `.beam` file
in disk, probably the one where the first definition of the module
came from, does not change at all.
Since typespecs and docs are loaded from the .beam file (they
are not loaded in memory with the module because there is no need
for them to be in memory), they are not reloaded when you reload
the module.
"""
def r(module) when is_atom(module) do
{:reloaded, module, do_r(module)}
end
defp do_r(module) do
unless Code.ensure_loaded?(module) do
raise ArgumentError, "could not load nor find module: #{inspect(module)}"
end
source = source(module)
cond do
source == nil ->
raise ArgumentError, "could not find source for module: #{inspect(module)}"
not File.exists?(source) ->
raise ArgumentError, "could not find source (#{source}) for module: #{inspect(module)}"
String.ends_with?(source, ".erl") ->
[compile_erlang(source) |> elem(0)]
true ->
Enum.map(Code.load_file(source), fn {name, _} -> name end)
end
end
@doc """
Loads the given module's BEAM code (and ensures any previous
old version was properly purged before).
This function is useful when you know the bytecode for module
has been updated in the filesystem and you want to tell the VM
to load it.
"""
def l(module) when is_atom(module) do
:code.purge(module)
:code.load_file(module)
end
@doc """
Prints information about the data type of any given term.
If no argument is given, the value of the previous expression
is used.
## Examples
iex> i(1..5)
Will print:
Term
1..5
Data type
Range
Description
This is a struct. Structs are maps with a __struct__ key.
Reference modules
Range, Map
"""
def i(term \\ v(-1)) do
implemented_protocols = [{"Implemented protocols", all_implemented_protocols_for_term(term)}]
info = [{"Term", inspect(term)}] ++ IEx.Info.info(term) ++ implemented_protocols
for {subject, info} <- info do
info = info |> to_string() |> String.trim() |> String.replace("\n", "\n ")
IO.puts(IEx.color(:eval_result, to_string(subject)))
IO.puts(IEx.color(:eval_info, " #{info}"))
end
dont_display_result()
end
# Given any "term", this function returns all the protocols in
# :code.get_path() implemented by the data structure of such term, in the form
# of a binary like "Protocol1, Protocol2, Protocol3".
defp all_implemented_protocols_for_term(term) do
:code.get_path()
|> Protocol.extract_protocols()
|> Enum.uniq()
|> Enum.reject(fn protocol -> is_nil(protocol.impl_for(term)) end)
|> Enum.map_join(", ", &inspect/1)
end
@runtime_info_topics [:system, :memory, :limits, :applications]
@doc """
Prints vm/runtime information such as versions, memory usage and statistics.
Additional topics are available via `runtime_info/1`.
"""
def runtime_info(), do: runtime_info([:system, :memory, :limits])
@doc """
Just like `runtime_info/0`, except accepts topic or a list of topics.
E.g. topic `:applications` will list the applications loaded.
"""
def runtime_info(topic) when is_atom(topic) and topic in @runtime_info_topics do
topic
|> List.wrap()
|> runtime_info
end
def runtime_info(topics) when is_list(topics) do
topics
|> Enum.uniq()
|> print_runtime_info
end
defp print_runtime_info(topics) do
Enum.each(topics, &print_runtime_info_topic/1)
IO.puts("")
print_topic_info(topics)
IO.puts("")
dont_display_result()
end
defp print_topic_info(topics) when is_list(topics) do
IO.write(pad_key("Showing topics"))
IO.puts(inspect(topics))
IO.write(pad_key("Additional topics"))
IO.puts(inspect(@runtime_info_topics -- topics))
IO.puts("")
IO.puts("To view a specific topic call runtime_info(topic)")
end
defp print_runtime_info_topic(:system) do
print_pane("System and architecture")
print_entry("Elixir version", System.version())
print_entry("OTP version", :erlang.system_info(:otp_release))
print_entry("ERTS version", :erlang.system_info(:version))
print_entry("Compiled for", :erlang.system_info(:system_architecture))
print_entry("Schedulers", :erlang.system_info(:schedulers))
print_entry("Schedulers online", :erlang.system_info(:schedulers_online))
end
defp print_runtime_info_topic(:memory) do
print_pane("Memory")
print_memory("Total", :total, :MB)
print_memory("Atoms", :atom)
print_memory("Binaries", :binary)
print_memory("Code", :code)
print_memory("ETS", :ets)
print_memory("Processes", :processes)
end
defp print_runtime_info_topic(:limits) do
print_pane("Statistics / limits")
print_uptime()
print_entry("Run queue", :erlang.statistics(:run_queue))
if :erlang.system_info(:otp_release) >= '20' do
print_percentage("Atoms", :atom_count, :atom_limit)
end
print_percentage("ETS", :ets_count, :ets_limit)
print_percentage("Ports", :port_count, :port_limit)
print_percentage("Processes", :process_count, :process_limit)
end
defp print_runtime_info_topic(:applications) do
print_pane("Loaded OTP Applications")
started = Application.started_applications()
loaded = Application.loaded_applications()
for {app, _, version} = entry <- Enum.sort(loaded) do
IO.write(pad_key(app))
IO.write(String.pad_trailing("#{version}", 20))
if entry in started do
IO.write("(started)")
end
IO.puts("")
end
:ok
end
defp print_pane(msg) do
IO.puts(IEx.color(:eval_result, ["\n## ", msg, " \n"]))
end
defp print_entry(_key, nil), do: :ok
defp print_entry(key, value), do: IO.puts("#{pad_key(key)}#{value}")
defp print_uptime() do
IO.write(pad_key("Uptime"))
:c.uptime()
end
defp print_percentage(key, min, max) do
min = get_stat(min)
max = get_stat(max)
percentage = trunc(min / max * 100)
IO.puts("#{pad_key(key)}#{min} / #{max} (#{percentage}% used)")
end
defp get_stat(:ets_count), do: length(:ets.all())
defp get_stat(other), do: :erlang.system_info(other)
defp print_memory(key, memory, unit \\ :kB) do
value =
memory
|> :erlang.memory()
|> div(memory_unit(unit))
|> round()
IO.puts("#{pad_key(key)}#{value} #{unit}")
end
defp memory_unit(:MB), do: 1024 * 1024
defp memory_unit(:kB), do: 1024
defp pad_key(key), do: String.pad_trailing("#{key}:", 20, " ")
@doc """
Flushes all messages sent to the shell and prints them out.
"""
def flush do
do_flush(IEx.inspect_opts())
end
defp do_flush(inspect_opts) do
receive do
msg ->
IO.inspect(msg, inspect_opts)
do_flush(inspect_opts)
after
0 -> :ok
end
end
defp source(module) do
source = module.module_info(:compile)[:source]
case source do
nil -> nil
source -> List.to_string(source)
end
end
@doc """
Prints the current working directory.
"""
def pwd do
IO.puts(IEx.color(:eval_info, System.cwd!()))
dont_display_result()
end
@doc """
Changes the current working directory to the given path.
"""
def cd(directory) when is_binary(directory) do
case File.cd(expand_home(directory)) do
:ok ->
pwd()
{:error, :enoent} ->
IO.puts(IEx.color(:eval_error, "No directory #{directory}"))
end
dont_display_result()
end
@doc """
Prints a list of all the functions and macros exported by the given module.
"""
def exports(module \\ Kernel) do
exports = IEx.Autocomplete.exports(module)
list =
Enum.map(exports, fn {name, arity} ->
Atom.to_string(name) <> "/" <> Integer.to_string(arity)
end)
print_table(list)
dont_display_result()
end
@doc """
Prints a list of the given directory's contents.
If `path` points to a file, prints its full path.
"""
def ls(path \\ ".") when is_binary(path) do
path = expand_home(path)
case File.ls(path) do
{:ok, items} ->
sorted_items = Enum.sort(items)
printer = fn item, width ->
format_item(Path.join(path, item), String.pad_trailing(item, width))
end
print_table(sorted_items, printer)
{:error, :enoent} ->
IO.puts(IEx.color(:eval_error, "No such file or directory #{path}"))
{:error, :enotdir} ->
IO.puts(IEx.color(:eval_info, Path.absname(path)))
end
dont_display_result()
end
defp expand_home(<<?~, rest::binary>>) do
System.user_home!() <> rest
end
defp expand_home(other), do: other
defp print_table(list, printer \\ &String.pad_trailing/2)
defp print_table([], _printer) do
:ok
end
defp print_table(list, printer) do
# print items in multiple columns (2 columns in the worst case)
lengths = Enum.map(list, &String.length(&1))
maxlen = maxlength(lengths)
offset = min(maxlen, 30) + 5
print_table(list, printer, offset)
end
defp print_table(list, printer, offset) do
Enum.reduce(list, 0, fn item, len ->
len =
if len >= 80 do
IO.puts("")
0
else
len
end
IO.write(printer.(item, offset))
len + offset
end)
IO.puts("")
end
defp maxlength(list) do
Enum.reduce(list, 0, &max(&1, &2))
end
defp format_item(path, representation) do
case File.stat(path) do
{:ok, %File.Stat{type: :device}} ->
IEx.color(:ls_device, representation)
{:ok, %File.Stat{type: :directory}} ->
IEx.color(:ls_directory, representation)
_ ->
representation
end
end
@doc """
Respawns the current shell by starting a new shell process.
"""
def respawn do
if whereis = IEx.Server.whereis() do
send(whereis, {:respawn, self()})
end
dont_display_result()
end
@doc """
Continues execution of the current process.
This is usually called by sessions started with `IEx.pry/0`
or `IEx.break!/4`. This allows the current to execute until
the next breakpoint, which will automatically yield control
back to IEx without requesting permission to pry.
If the running process terminates, a new IEx session is
started.
While the process executes, the user will no longer have
control of the shell. If you would rather start a new shell,
use `respawn/0` instead.
"""
def continue do
if whereis = IEx.Server.whereis() do
send(whereis, {:continue, self()})
end
dont_display_result()
end
@doc """
Macro-based shortcut for `IEx.break!/4`.
"""
defmacro break!(ast, stops \\ 1) do
quote do
require IEx
IEx.break!(unquote(ast), unquote(stops))
end
end
@doc """
Sets up a breakpoint in `module`, `function` and `arity`
with the given number of `stops`.
See `IEx.break!/4` for a complete description of breakpoints
in IEx.
"""
defdelegate break!(module, function, arity, stops \\ 1), to: IEx
@doc """
Prints all breakpoints to the terminal.
"""
def breaks do
breaks(IEx.Pry.breaks())
end
defp breaks([]) do
IO.puts(IEx.color(:eval_info, "No breakpoints set"))
dont_display_result()
end
defp breaks(breaks) do
entries =
for {id, module, {function, arity}, stops} <- breaks do
{
Integer.to_string(id),
Exception.format_mfa(module, function, arity),
Integer.to_string(stops)
}
end
entries = [{"ID", "Module.function/arity", "Pending stops"} | entries]
{id_max, mfa_max, stops_max} =
Enum.reduce(entries, {0, 0, 0}, fn {id, mfa, stops}, {id_max, mfa_max, stops_max} ->
{
max(byte_size(id), id_max),
max(byte_size(mfa), mfa_max),
max(byte_size(stops), stops_max)
}
end)
[header | entries] = entries
IO.puts("")
print_break(header, id_max, mfa_max)
IO.puts([
String.duplicate("-", id_max + 2),
?\s,
String.duplicate("-", mfa_max + 2),
?\s,
String.duplicate("-", stops_max + 2)
])
Enum.each(entries, &print_break(&1, id_max, mfa_max))
IO.puts("")
dont_display_result()
end
defp print_break({id, mfa, stops}, id_max, mfa_max) do
IO.puts([
?\s,
String.pad_trailing(id, id_max + 2),
?\s,
String.pad_trailing(mfa, mfa_max + 2),
?\s,
stops
])
end
@doc """
Sets the number of pending stops in the breakpoint
with the given id to zero.
Returns `:ok` if there is such breakpoint id. `:not_found`
otherwise.
Note the module remains "instrumented" on reset. If you would
like to effectively remove all breakpoints and instrumentation
code from a module, use `remove_breaks/1` instead.
"""
defdelegate reset_break(id), to: IEx.Pry
@doc """
Sets the number of pending stops in the given module,
function and arity to zero.
If the module is not instrumented or if the given function
does not have a breakpoint, it is a no-op and it returns
`:not_found`. Otherwise it returns `:ok`.
Note the module remains "instrumented" on reset. If you would
like to effectively remove all breakpoints and instrumentation
code from a module, use `remove_breaks/1` instead.
"""
defdelegate reset_break(module, function, arity), to: IEx.Pry
@doc """
Removes all breakpoints and instrumentation from `module`.
"""
defdelegate remove_breaks(module), to: IEx.Pry
@doc """
Removes all breakpoints and instrumentation from all modules.
"""
defdelegate remove_breaks(), to: IEx.Pry
@doc """
Prints the current location and stacktrace in a pry session.
It expects a `radius` which chooses how many lines before and after
the current line we should print. By default the `radius` is of two
lines:
Location: lib/iex/lib/iex/helpers.ex:79
77:
78: def recompile do
79: require IEx; IEx.pry
80: if mix_started?() do
81: config = Mix.Project.config
(IEx.Helpers) lib/iex/lib/iex/helpers.ex:78: IEx.Helpers.recompile/0
This command only works inside a pry session started manually
via `IEx.pry/0` or a breakpoint set via `IEx.break!/4`. Calling
this function during a regular `IEx` session will print an error.
Keep in mind the `whereami/1` location may not exist when prying
precompiled source code, such as Elixir itself.
"""
def whereami(radius \\ 2) do
case Process.get(:iex_whereami) do
{file, line, stacktrace} ->
msg = ["Location: ", Path.relative_to_cwd(file), ":", Integer.to_string(line)]
IO.puts(IEx.color(:eval_info, msg))
case IEx.Pry.whereami(file, line, radius) do
{:ok, lines} ->
IO.write([?\n, lines, ?\n])
:error ->
msg = "Could not extract source snippet. Location is not available."
IO.puts(IEx.color(:eval_error, msg))
end
case stacktrace do
nil -> :ok
stacktrace -> IO.write([Exception.format_stacktrace(stacktrace), ?\n])
end
_ ->
IO.puts(IEx.color(:eval_error, "Pry session is not currently enabled"))
end
dont_display_result()
end
@doc """
Similar to `import_file` but only imports the file it if it is available.
By default, `import_file/1` fails when the given file does not exist.
However, since `import_file/1` is expanded at compile-time, it's not
possible to conditionally import a file since the macro is always
expanded:
# This raises a File.Error if ~/.iex.exs doesn't exist.
if ("~/.iex.exs" |> Path.expand |> File.exists?) do
import_file "~/.iex.exs"
end
This macro addresses this issue by checking if the file exists or not
in behalf of the user.
"""
defmacro import_file_if_available(path) when is_binary(path) do
import_file_if_available(path, true)
end
defmacro import_file_if_available(_) do
raise ArgumentError, "import_file_if_available/1 expects a literal binary as its argument"
end
defp import_file_if_available(path, optional?) when is_binary(path) do
path = Path.expand(path)
if not optional? or File.exists?(path) do
path |> File.read!() |> Code.string_to_quoted!(file: path)
end
end
@doc """
Evaluates the contents of the file at `path` as if it were directly typed into
the shell.
`path` has to be a literal string. `path` is automatically expanded via
`Path.expand/1`.
## Examples
# ~/file.exs
value = 13
# in the shell
iex(1)> import_file "~/file.exs"
13
iex(2)> value
13
"""
defmacro import_file(path) when is_binary(path) do
import_file_if_available(path, false)
end
defmacro import_file(_) do
raise ArgumentError, "import_file/1 expects a literal binary as its argument"
end
@doc false
defmacro import_file(path, opts) when is_binary(path) and is_list(opts) do
IO.warn("import_file/2 is deprecated, please use import_file_if_available/1 instead")
import_file_if_available(path, Keyword.get(opts, :optional, false))
end
@doc """
Calls `import/2` with the given arguments, but only if the module is available.
This lets you put imports in `.iex.exs` files (including `~/.iex.exs`) without
getting compile errors if you open a console where the module is not available.
## Example
# In ~/.iex.exs
import_if_available Ecto.Query
"""
defmacro import_if_available(quoted_module, opts \\ []) do
module = Macro.expand(quoted_module, __CALLER__)
if Code.ensure_loaded?(module) do
quote do
import unquote(quoted_module), unquote(opts)
end
end
end
defp compile_elixir(exs, :in_memory), do: Kernel.ParallelCompiler.compile(exs)
defp compile_elixir(exs, path), do: Kernel.ParallelCompiler.compile_to_path(exs, path)
# Compiles and loads an Erlang source file, returns {module, binary}
defp compile_erlang(source) do
source = Path.relative_to_cwd(source) |> String.to_charlist()
case :compile.file(source, [:binary, :report]) do
{:ok, module, binary} ->
:code.purge(module)
{:module, module} = :code.load_binary(module, source, binary)
{module, binary}
_ ->
raise CompileError
end
end
defp history, do: Process.get(:iex_history)
@doc """
Creates a PID from `string`.
## Examples
iex> pid("0.21.32")
#PID<0.21.32>
"""
def pid(string) when is_binary(string) do
:erlang.list_to_pid('<#{string}>')
end
@doc """
Creates a PID with 3 non-negative integers passed as arguments
to the function.
## Examples
iex> pid(0, 21, 32)
#PID<0.21.32>
iex> pid(0, 64, 2048)
#PID<0.64.2048>
"""
def pid(x, y, z)
when is_integer(x) and x >= 0 and is_integer(y) and y >= 0 and is_integer(z) and z >= 0 do
:erlang.list_to_pid(
'<' ++
Integer.to_charlist(x) ++
'.' ++ Integer.to_charlist(y) ++ '.' ++ Integer.to_charlist(z) ++ '>'
)
end
@doc """
Deploys a given module's BEAM code to a list of nodes.
This function is useful for development and debugging when you have code that
has been compiled or updated locally that you want to run on other nodes.
The node list defaults to a list of all connected nodes.
Returns `{:error, :nofile}` if the object code (i.e. ".beam" file) for the module
could not be found locally.
## Examples
iex> nl(HelloWorld)
{:ok, [{:node1@easthost, :loaded, HelloWorld},
{:node1@westhost, :loaded, HelloWorld}]}
iex> nl(NoSuchModuleExists)
{:error, :nofile}
"""
def nl(nodes \\ Node.list(), module) when is_list(nodes) and is_atom(module) do
case :code.get_object_code(module) do
{^module, bin, beam_path} ->
results =
for node <- nodes do
case :rpc.call(node, :code, :load_binary, [module, beam_path, bin]) do
{:module, _} -> {node, :loaded, module}
{:badrpc, message} -> {node, :badrpc, message}
{:error, message} -> {node, :error, message}
unexpected -> {node, :error, unexpected}
end
end
{:ok, results}
_otherwise ->
{:error, :nofile}
end
end
end
| 27.314558 | 97 | 0.64484 |
1c57c5c9173da29fb1384eaf7b4a3c62276b6770 | 1,752 | ex | Elixir | apps/astarte_appengine_api/test/support/jwt_test_helper.ex | matt-mazzucato/astarte | 34d84941a5019efc42321052f7f34b7d907a38f2 | [
"Apache-2.0"
] | 191 | 2018-03-30T13:23:08.000Z | 2022-03-02T12:05:32.000Z | apps/astarte_appengine_api/test/support/jwt_test_helper.ex | matt-mazzucato/astarte | 34d84941a5019efc42321052f7f34b7d907a38f2 | [
"Apache-2.0"
] | 402 | 2018-03-30T13:37:00.000Z | 2022-03-31T16:47:10.000Z | apps/astarte_appengine_api/test/support/jwt_test_helper.ex | matt-mazzucato/astarte | 34d84941a5019efc42321052f7f34b7d907a38f2 | [
"Apache-2.0"
] | 24 | 2018-03-30T13:29:48.000Z | 2022-02-28T11:10:26.000Z | #
# This file is part of Astarte.
#
# Copyright 2017 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule Astarte.AppEngine.API.JWTTestHelper do
alias Astarte.AppEngine.API.Auth.User
alias Astarte.AppEngine.APIWeb.AuthGuardian
def public_key_pem do
Application.get_env(:astarte_appengine_api, :test_pub_key_pem)
end
def gen_jwt_token(authorization_paths) do
jwk =
Application.get_env(:astarte_appengine_api, :test_priv_key)
|> JOSE.JWK.from_map()
{:ok, jwt, _claims} =
%User{id: "testuser"}
|> AuthGuardian.encode_and_sign(
%{a_aea: authorization_paths},
secret: jwk,
allowed_algos: ["RS256"]
)
jwt
end
def gen_jwt_all_access_token do
gen_jwt_token([".*::.*"])
end
def gen_channels_jwt_token(authorization_paths) do
jwk =
Application.get_env(:astarte_appengine_api, :test_priv_key)
|> JOSE.JWK.from_map()
{:ok, jwt, _claims} =
%User{id: "testuser"}
|> AuthGuardian.encode_and_sign(
%{a_ch: authorization_paths},
secret: jwk,
allowed_algos: ["RS256"]
)
jwt
end
def gen_channels_jwt_all_access_token do
gen_channels_jwt_token(["JOIN::.*", "WATCH::.*"])
end
end
| 26.545455 | 74 | 0.694064 |
1c57d4312c445087cd352635fca68aa1a62e2f20 | 27,771 | ex | Elixir | lib/plug/debugger.ex | bruteforcecat/plug | 45165d978e59d18df8b8085e4e158997dcac19a3 | [
"Apache-2.0"
] | null | null | null | lib/plug/debugger.ex | bruteforcecat/plug | 45165d978e59d18df8b8085e4e158997dcac19a3 | [
"Apache-2.0"
] | null | null | null | lib/plug/debugger.ex | bruteforcecat/plug | 45165d978e59d18df8b8085e4e158997dcac19a3 | [
"Apache-2.0"
] | null | null | null | defmodule Plug.Debugger do
@moduledoc """
A module (**not a plug**) for debugging in development.
This module is commonly used within a `Plug.Builder` or a `Plug.Router`
and it wraps the `call/2` function.
Notice `Plug.Debugger` *does not* catch errors, as errors should still
propagate so that the Elixir process finishes with the proper reason.
This module does not perform any logging either, as all logging is done
by the web server handler.
**Note:** If this module is used with `Plug.ErrorHandler`, only one of
them will effectively handle errors. For this reason, it is recommended
that `Plug.Debugger` is used before `Plug.ErrorHandler` and only in
particular environments, like `:dev`.
## Examples
defmodule MyApp do
use Plug.Builder
if Mix.env == :dev do
use Plug.Debugger, otp_app: :my_app
end
plug :boom
def boom(conn, _) do
# Error raised here will be caught and displayed in a debug page
# complete with a stacktrace and other helpful info.
raise "oops"
end
end
## Options
* `:otp_app` - the OTP application that is using Plug. This option is used
to filter stacktraces that belong only to the given application.
* `:style` - custom styles (see below)
* `:banner` - the optional MFA (`{module, function, args}`) which receives
exception details and returns banner contents to appear at the top of
the page. May be any string, including markup.
## Custom styles
You may pass a `:style` option to customize the look of the HTML page.
use Plug.Debugger, style:
[primary: "#c0392b", logo: "data:image/png;base64,..."]
The following keys are available:
* `:primary` - primary color
* `:accent` - accent color
* `:logo` - logo URI, or `nil` to disable
The `:logo` is preferred to be a base64-encoded data URI so not to make any
external requests, though external URLs (eg, `https://...`) are supported.
## Custom Banners
You may pass an MFA (`{module, function, args}`) to be invoked when an
error is rendered which provides a custom banner at the top of the
debugger page. The function receives the following arguments, with the
passed `args` concatenated at the end:
[conn, status, kind, reason, stacktrace]
For example, the following `:banner` option:
use Plug.Debugger, banner: {MyModule, :debug_banner, []}
would invoke the function:
MyModule.debug_banner(conn, status, kind, reason, stacktrace)
## Links to the text editor
If a `PLUG_EDITOR` environment variable is set, `Plug.Debugger` will
use it to generate links to your text editor. The variable should be
set with `__FILE__` and `__LINE__` placeholders which will be correctly
replaced. For example (with the [TextMate](http://macromates.com) editor):
txmt://open/?url=file://__FILE__&line=__LINE__
Or, using Visual Studio Code:
vscode://file/__FILE__:__LINE__
"""
@already_sent {:plug_conn, :sent}
@logo "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAD0AAABgCAYAAACucnrAAAAAAXNSR0IArs4c6QAAAAlwSFlzAAALEwAACxMBAJqcGAAAJOpJREFUeAHFnFmPHeeZ39/aztZ7c2myuTVJLRxZsaPQ1lhjj+Oxg2AymAFyFUyAAOMgQO7yHZKLXAbIR8l9rgaDGMlgnCCLEcuxNBQlk5K49t5nqar5/Z7q1kJRNCV2K8U+59Spepfn/+zvU+9hkf6/Hjerq3OvvT4qr/e3Z+9uflOklN/URE/MU1xZ+seXyzb//SyvXulPp/+J+7efaHNiX79x0IvpW6sr8+d/nKf2Rpu3/SxNUlWkwYkhfMrA3yTo7NLoxzdHVf69uk1rbZvqLDXjlBW9Jk+jp9B2YpfyExv5iYGvL/30X/d7vf84bYszKcsmWZbqaNI0bdtkQ86zJ7qc2NdvQtL51YWf/KsiK/9D27YfNu1k2mSfTlsmLDtrlLSg2xND+pmBP539MxeP8TS/OPeDf55n5b9vUzbXJGSal1nWtE3MkecgbZu6zfp8/8YkfaLqfWn0/T8ti96/a1N7qm5a1LntpTbLE16sYyzos1lbN+PVlC72jpHZzxzqxECfH73x3Twf/NvU5hspaw8wYYw39XBepWaMhOus5ryu14s8e219YW3umZQe480TAb0yvHa5Kub+TZ6l38NhHWDLLTLVcfWR8QDlntVtParT7DJOfLVtZovFpL90jLieOdRJgF5ayE//edEWf4xUp6g0oalu8Ni82kqwKZstty2A23a+TQWqnvfyvF17JqXHePO4QZfne2/8ozLr/wv0d4DzmrVYLD6qAXgNyDzP2g2Efg5mVJh22DYerGyL6uIx4nrmUMcJOlvsX9zoV/1/1mbZZWx22mbaLu6Zf3zWbTPJZ019nnsxb+ZhjsIBQy48k9JjvHlsoFfSyuJicfYnKS+/1ybst22aLjSF02oRs3NlZZ4tAvEz86r3Xi/Pn06vLhwjti8d6jOTf2mb57kxHA7XvlWk/p81bVrAWU3oBBZclmGYOIUNywM9+AIRuZ8hfeJzSDnPMjXh9OLiGmp/8sdxgM5Xh+unq3zwA2h/PTU4L8EFaLwYUsV3E5q5hFDhxAAe4LnLQ8gAhxN5ni82Tf6NqPhxgB712oWXUjv4EYo8QIpdTo0kPTKddxxZWwicWA34paK7CuJO2twZFBkx/Rs4Xgj0zZs3q7P9S2tFmvsDJHsDmBP0WDHrqfmY4aAFSmJCvCI0saBqcuR6GvF+JgXW7psCDl0F84lnZi8E+u1ffLBCiF0r8+Im9tsHkGBZQJhb+xHOG9D6bw5tN8/VhJU6QyviUBOUNp2y4uK59MaJJykvAPrM/GChXK7yuZfJOq6hprOU5cRjnZVa3IgQ1Eg4HJUS55WymrRzgYnJt2nmm0sQYjUdzuT9kddP9Pi6oPO1wehMWc8tkXndBNYcsiQRQcIAV61ByB9qfQQcx6bGowqhBTBiHbhFoMtkQrGYtbP7uHUzs87iTwj61wK9nDYWU1nOFXVxFvt8VREWOmo+wdapKxelWfCcxDkfOja+s9Ros1OkJqSkBSWjtDpLzSPU4oO6rAD91omWj74O6KJK+RlkVmZl9RK41jBeQq4GrLPSjkNUAkXHve4rgHNim5SKrO1lbX4lS/kpumyjAPcwjI+Q8fLFfnOiKelXBr2W1k5Vo3whjUckGeVrrKQq9RgkpGFEZfIrAKjCIWFQauTaLMANW6YlOnjMIMuucL3E/h/i5j9CvfdolRdF7xX4cmJe/KuC7qX5+bNKqhxkZ5HZBpQDIBwRDkqrBkonVeUeUrW9Ehc4pzQPB8cl1tdtM4RnB/i/j7hF6jrFsedn1wY/PDFpfyXQp9OVU6nOWexXiKfcANOKUhYSMTgAKk2uIeAAibbCBf4h+jgMXsC2DSbdzvg8Sw0JLzh9aBs8Iaoy6w3y7Pf4WnW9jvf9q4Au87nBmRJicdN9lhPX8MA91LLuMpAQuDqMJwvECpZXZ8Nh7+iBE+IDSMgw99RSAk4DFGSNGM3am0s4gDplU5CvIe1Lxwu3G+25QZ9JG6fLJi3kVV73snw5L9IFBUYehT2TNfMiUOOXuAraQxVXrH5FiC6mWF929zSEOmtmOIF2AtZXqtS8jqVM6Admx2szSoj/4CRWXs8Luqyq8jw16zzNpLp3FpKXcWJ+a/FO+CVgIU4TUD6VcIBEdJx3B428rjjp1LgSkyOoOFfz4qdc3aDTDM7pGaZlmZaH82tvcLuL590wL/z+XIMh5bNZPnehZMUAeZA0fINV0beRDKADg1go/LQVxJp+Qhgs4aK8CFBRMIAreQaLZqDFSrpbU24d0GEBcz9H31tojsyEQ7EqW1nILuxv1x/cf2G0hwM8D+j+wnD9BsRW4CLWNL1eNfwpNnexaaf7oAAa+BAukuxhqmq2WP2HuP1OoMKbeY33irA+hRMN6aitx1zHMuiZp1UarTLQ++DltOExV9vLi/Zsf7r84W76ePc4gP9O9V4aXFkno15xstIyX15QJOhtoM6sHfLa5SKSRAPMuzkl6hqDgwuITRftqysAInnA8jdlIWb8xidmY+M47gv7Tkr8Rl5kf8Qlamjawazg7qn+cPST47Lv3wW6GJbDy02qYxk4I4CUWbWK2C5C3KHXzvFlAtdZkZSIGbEJGklbQTBgHTq2FmOlUAhqGQHTtOspjIMBhaCJ0dmYob7NY6AfMi+3Q0U0o/XRaOkfprT+wg/7ngl6Pl0/hV2dyxBfn9rGVPrr6gqdzgApq3NAkkb7hgR1YebgCJVLCokDRvDyE/kH/RQMLRLSnkqhUp7BkvD+aDjXEuEqm3L+XR4HvQX7DsfwYUH96vro9A05+iLHs0Bn89VgA0Mj+a8NqFY+iqrKr0AGsTWVRea6Xy9tkYDcEkoRCV+1Z4l1+JiC8+7ABGZZZR3cSlI7oR+80jzkjj5PjSkmePMJjP0+BvFdXZ9uEkaiJSxSLDm9wPGloFfT6gICvtKTHJYHCQUHNk8nyssxH6sjTRmQSKnAcxGyEX7YNQ3oJtDuhbdyGDUfW9WO0ZMWaWZ6CYBEacUmmIzaUiPxbBLMyNJbdP77RDKUICecjethOnWKtj70+1rHl4Iue+sbeVkuodUwnVQEyGXZW6FyeRHSqQMVc/WMG3mqCUDos+voBqKw1cg60XINOqSXw5QmGoCsW5x0Uj6sJyllPFmjqcTaU39henuAu4RB+R+2RfmtrIAZmEBvsNtbGa677v5ai5IvAb0xKMvsWl63OYCo78DzmYrcWyNKL+NsyJpSn0JeH//blATovIjVpQ4HoDlpDJ/6t07anUS8UtQuN2dYBuobxXAAmoFlTZnj9RE6Xs6OXMPhte3YeA3bfkTrV2HkZJCWcBvZgCqswJ8n7HbzH74/FfTaYGEdSZ7J8dYFoi5mqrfVoHwdzeRxjQ+nctxVMd/yjmsCHiukQ3WNSK2QOQDEp6cgoV+B7ipVVFxTUNdtwDXErDM03KHu9FbduaSTS+xcYEtOKn9c1vM3x2nTe7jPNDefzlleinDhtec5ngY66xflNYQ1YCmRMz0aDFRKHIC6yMtI7B+0FssdGJaW1PJnhYV9iO3wAhXnw4P3o0M77jQ4IT3B6vtwez7R5A0GRRhkDhkDN42BvmAQEkcDqibr/cUoe+UPpmmK+vdSv9c/zbPtyCOO5vldn59SdNhyJV1bpHRzCZeC9zH/MJMkpLal9oM6AQs6jDhFno+QwxB1mLWq+FTijtJLBRsHn5gHB/nZFDWe8U0n5gBaQgeeoOw6k0OpwwBXbz4eUgFgKhcZcZwVlBiyuZ8t5ufezLPZrO21+amFxjX+fMz2HG9fAD3oLa+TAS7VOc/MMSSIK0gE0WXX0WZm0BBH2F3BzWVWGZ0qFlh4LEKiAbSGh+uaI00rBHg0nF23UHGBhqNAYgWDzrgFq9EtIpSThKrDJICTvJjRMjnfxy07klI2/Fm/XXlzOK2n0NY70794nvufqaV30z7t/UnQJeJ9hQlLBko5JKBwKCjRqMV78PAN0ShNwoqSVDTFCuZmGq0ngzAcVKiDwgUTDs0Pvszwfz4M4H60M1YhQySPV8YMQsIz8IWdh62rUVZlbGraqrQ1iXavyKt+Xg3ZwHP6zdq1eFHMr6T1deZ5EhOXPn98rsF8emOlKtNFLpo/AlZJczbLSjR9GeLZ+qSZabSNy6AaUMOmKedBh4RMHsyrJZR4Fb6LluHEGnYVtTznShMGACBSK0HmWI3S5Bqi14ujOeHRoUJGhJ3LbFrTlftwmHt7LNiKVPT/5dxs6U20clJWc6s4NmP4M4/Pgx4WV2dN4lGqsiAqassCz1OPBT2lIVdIOhj/KT7BK8vylHImEEvQhBiiNJCuwxPkOAMXOXWaMFptiFN6jAB/YhwZyQgAJ+53Usc/YOe0hIHmeDDEspIcN5lhLkjbRTp9tP1no2b1LYau53rlORzPM/evfDbGlcvluR/BaRIQRyTJjBBobGHo1LuOwF5DfmHA0Cpi/5g8G9BkEy+wT7hCA4s5VL6kvdEL5ChFMd3CpnezGVLVf2kd3KDBVEVwNC4flv/hhfySrQYDWedIfiacWUbs5oLXYQTxPi0QU7+Tt72drKzvzPKl4V79eJPbMPOLxyegT49urlUpe4uZKofXg6m9MQ1vrHpucGkD2gwzzOOch2c4eUTM1/qBekq6KmifTjItizDaQ9tj3sewoBtW8ZvKUCFhNPMRWjEvZ4AFk7xkFMblW8BnZOggK2A5CvPVMSjgH41RhxHu9ltNW06xtLvtdGE2TY93YtAn3j4BvVKefx1yXiMZsUjFKT6S4dRA4i3RqXwdMs8wVXCPc/U3JoVmMyce0zZbhOa9vCnn8G0sCnCGrlPw2imfPkbRWWDQRSna18jfXYORjBbSZAZ5yuj+03F4WdhSxQmVlrSPXtjKhtyX53iEphnQ9AYpBvE73dme3ttiGpj6+eNwejWrvIpG9gCMHYducq0HQ3km2dZUPbGTkHJ4T4DjVNR0Xjhz5CPfq/NwEX1POjNIopdLpKbeZ1cRFRKUWOXXyxMm+IaEzOR0WJwb+rRhywa+vK5dGxkO43mMoUvxWmdpnk8ZjTxdgKXJxZ+Vqf9Pz8+/fAW4Rxg/QR6SXk0vLQyq0R/CywWcCIShc1gxFUrEamGswY+Vr4Fg3pDBxIdi1qz91v0Bs9/k9SbSA0iJnSEE3EETUm536AiWuNpJDXrwvxCMpdM5lBjVCIk6B5egOCaDNh2BkQDvWuzFd4ZiAWS7GI9TEiiaqxttcb1oKzOeX03Szt4niDkJLvQHCyuoYKRyAHR6pF1jbHpvAjUNATvGuUKR7lbHEw2FIJGCFxPrjmoNVQuddnjEgKTqXRroC6wJMYUOOexYJzxjNcFlHBySpa+O0hUW58aETrNC+ngD5lBzVDQcTqSr+lkGRE72C/uPmhu3qz9dnbvwJ9Jlv6MjJD1XbdzAU38H4lG9aCE4tq7idaQR7Qf5KRby52EjKw9XRyEa5uKK57yY0fg2ZMkwo46IEMiVKR6yNH7ASCKludyRTcEqhTL2nhxB1ZmJXjqxTnbctheD28ATKAbBHleZju8INSxNB9n6RMTSFniliwcJnL86qs7+cmf64Z0YgDc5kPNA6RyCo4P+CP4iXeKNvpHqQFWUlsHydhcbIrzAaWmAdD9pHx9clRGdDLP8DBSxD9SL011uWAVBoHQrTD5C6iF52BkSwvJ9WB/rZVSzi8tMCBIXI6EB0EhUZEypZ+3ndQAjE7TCdFXtwLRkIucQmI0xhtVeqv6CzUufbNcSNKum8ozGgQT4Dljk+in4Kevpgk3ZlG3zdkcDY1AkCu9DqxAg0H0TI1anbyrxOwt4fYhqd7CKWu8F9U3O+jvchJLHdvBU1MhwVmQvjBvAYTrQDkEAKIALGHXX0/EdRzODaYzF8zDocBVm3qKJHdpAkCRR+7z9/oXh5X/iPY+csmqffQ+nzNSLHFoB3aTyE/BNqvDe5nswJWvuww15TTgk/8LTxTmSo/snr+B6hrdvJjj/2XYoILAgwIwqgIfUa5yYSW5HtevLAALLWWZhz4n8kOe/MLLz7pRSBc536CNqkKCBGrcR3pX5USbW9/YPhsgEbul4cJl/fiq9ciFAz43myZtNPYXtKsPVA50PwTMe5zhd7AXJbPJtLwyJgJEbKFQj2E1nFAYWlFpYhBP8wrTPDTyhhQABde0kDMnLpCgBM78hyUwNSSpx9qUgdVPWTuqoO5JHGfnOesToDTJG7BgSy1mG1+JJwoGsGwpRSxd9KULkl+YHy3/MdypttcV7Nq/quohXSBRJ484OwYMXdca2ccjB8Wx2z47IzVU/ut4N3pZKUs4yF6rDDXS0HsHF08gEkDADCRgOlA4k8oSDiggBnOFcrKDqigVdRZtCIxwP5gRwtTYkgNrzxzUnA7QvFMjNAURKTQaxYv7wRTEHQdDkUeR/dDn9vRUcdLGM/NlxbyKJ3CiMQDy/slAqNgaVuYoaomfN0zYgtvkyR4vIdpAcrW2ve+PQA1JLQfLkyfl51HcbM93CW3QpKaKN1m27z+IJuVA4Y0DmBbK0MYJfmAxeqiZQL2YogB5MGjnzVf4wOKc9jA9zUintJxGeKkQ9HRcoQDDKtcmg/x1qccWiqZROh4GdWo0N75JTleoUqcbZIYtOkEhrdp/x5pwxzC9miPkQTNTC0a92yrhTlBRVyS5gbLswS6lKtwTziDaNzbUtPfJGiGBlx4p+irWCGtUJyzEdhn7kyTxAwIvpf2FsSV0Y1jL+ErzpySdXAdwi9sEA1MZ0VVD0ZZJs0E8FDxHYsuiITEL2Bn2oNemytg3hdag7XMBWROZ4WFsiu2raXTxxD7YynhyW95CNYhA1UPXZhHdBIqpsnkanoaVbG6MQjLTDpFNVHrmr7apL2DJuFIdFG7lsdYL7CtuXYY1+jktn1+3lKjOyWV6JMQgE0JpLkoulhkCiv/qFZrbfLlYH178HXzbkJh1pwy0jPmhIRuBA0D1kkJHKKWw4IguJgZSP2EhGJ8NFTMS7JzKYjgDpJpVRcywwCB/tPgLgqWXziOkoeqghNkIB4iy+KCyJjsIZJQxG13jimUPEKCTk0hUt5WXOKOv8ox1yie/hpxijw6KmeDMbqgTsyjXlpAvWxJuqpdR54Y2sYJMJ0BMAHVkMph4c1EV7j5H6sBDVond4XRwUU2KWhBPECNrO53G7TRdgY68o2DNWZfu4jxoEJhnUzTrP7ac+Dcw6qRn1d/yaF/IZ2Fgusqm0zRcQyTlkSjHQ4WlB5JBTEBlBFRzdzFIQHARUhL5sUMYzKgDCcGcTuA4QHPaH17IgLEQSOIO/XKclXqlOD2mOPSWWkpDatgdIY4ZZygOiPUHYT8YOP1G3Q+z0LDH4bRQT1cY6velzET6dyaAKOAyAzmg0SgcDOVw2tsU8Gku0yUYoIC5cs/LolMsTOviBksISWedtDNbgqt0JpliuLr0O+de9DkQsggK26i0u/jr6WaAapx0AVihoTRhum7dS7MsXUVNWYjy4Z2Rzw24anTKHFkJDrB/hk9Rk7Q6PcDYRvZMa2+EJnBYyQzswPfgRAD9HbItltG6ZaVeZZwEfx6Y9qIF8VHESPgGMWCKdsHqG6lweAyNJ2SI7GRcoQU3Bk8ecZCNqsIxrnMOLhinaMtRFAjq97j5Aqx9CtPqflOOQ2i0er60UFgWsp6W8YrAZ8NxxQLWT2IzrRm0OYP8Bk1zj5wsHs1Q/hC4YZfqfs4sYM8nyERPgQ/hVXpP1cUykOwIx+Iez8hFTRw+ZQIksjNp4B2UiMlhm8RZPjmszn9V7Mx4nEYMagnJNzGVAlSocD7cAzgBqZDBd1J8/6G3q69yckiN+jJgHeGs2Hsns4BJb9wu1QznAJkKYo5rr4mqQCrW46n3MhGoKcTbcj3mZ4oguwVg9Np3VLPSVngwhID71s37FruCHVwWGMDRydDZu2THGo7uqRivS4CY9UglEgIWRYwtex6Wu48wMxfq2cImO46T+dSdwGAQZlZH2I9Q2mMVt70tI5MzMgxZSH+OgUFzpJfEa8wx6mUsuRR1Sm2jQFsYw2JJkIwXa4cO8phqGitFWqeIkSTlN9hgHTnlNicoxxBWnVD8Y1+/4QD8ZglBLFugmVDbM+JwIdAEyRAhYA13HsiAY4PKv46FxoTuYHFJLFhYZxT+ogwCoZP5oq8ZN6MTigjmN4Thq/IwLjSE0+1vqECATulYyBkbfw/5+KAOtnP6BRpdsES/adZ8daC4fAvejcyd05iwa64bu5c3B/H263qYtKoemMJZlInrwUn2QPKPK6ieOGPToGg2ZpXzAd4oCMQtvcpaNNFlmucb2hy8lJ9OVYppDgGyD9l43LQPFeSclzlmOGhoQGrzT+mLWwzoKTA6cTqrE4Yu3oVnVPlRvOyPBeitlkzvFbnonnR68cgYB+yOx2MQSe/UwD0hm/HCleFL0DSpphyj0WI7n8NqZGLihB8aBIQ2L7XTnaQaemrZKJohCk1U/vnqle0MDkLhjZ/tejMFohMYwCOxQUEQwL4hPdeSGDYgcBE4oYSTHxbXqxlUuxgd3zNZxYJPfbt9Bmz+Uau0Oaefv0+pjOhqylbqGzJ8xpDswxS9K3BHwBAweVMOnbaZ/zJREl3YHPzrDKIhnSEg2AJSmOhogdN+9RDBfwX9Sp2N6LwQIFneou4wN9sA8CELCMEBvzePDQ8rgRGduStr+cV1ZAICV5wMqzbe4xlK2eWjLdq7cWCB7v0r7x4zvYxl+4B1bG0AtD2Ph48ww0QTembkVyQ4k0divTiebAe4vafexW1nS0esJh15VPJ1+MDA0IaJoRPchAQTXWLKfLJwzjdUNvtkxetE6fICdWCfL7G5ixg4LjWlgCPx2X8v0DqHxfYabEZh77B/4X8GeXrVeDMr+DSSBQhRbYNEpOar17goA+v0OtEMygnR0TTrLj9YqBTznzkOQ38WzslUDJZNH8ZI6bS5a0zB4gvbEaHJU1R2CQhsmWYTFmpAjOp8XmBX6IMDqfoaDVDFDs0MQNEI9WItlzSNyl1t1OfvYCZgZRWVF0zZ/HaD3psV0qTr9EnxagjbN64BxH+JsHzFZjZX0WTDxvApWd/NLtXnZocT0jFzBoCB6l2Loh5C/DxXmzxTkjmRJm6BXlPyFDIMJXEU/1ClxZYQxiARLbLSJKSMz5ipaFZFe8WallRf1T7rkLFWV+l7dTv42lePfku7uyQkCrywnTNY7PE37eYBO6fFsZfgyP0JpL0AsMYWGmijqQ6x8RAHgARSRV8t3dySTzeBBnFAiJT8OT9v6Di7B1RSpd87jl9CKBVlJ26BfQuSY35mJrtoHB3PKCBmAdCk1KWZU2GuIKSTugKRpjoRioq0EHHwHxZO7bEb6zbQ6uBOmFWObEttB0Aqk+fDudPw3h6Ax4t6VosrKl+SbAjA4SA8qRLeWLLPZY1H0AIyPibPG9W48mmgWMJ7WzRbg2K3r4o0RqBzicql7838hID3H5S9mMJgYzeVYqIzzWIIMIqUYuPznL3x1CUlKSw+gR9LNIx9gb7HL5b1JObnVFAe3eE7/EZKdsAjBPdEUHXOx5EgIDjPRLOpfbU9//f8+Ab09GW2v9heukjl22ykgxincT2HRH9C0NTS2Y5b1W1xxK/I99OEhlaFdKMZ5ZR8TNKZImPAmg3HrsM2qCeIZMVQ/BoWhHopTgXHCG3+RMwXgAE0LlRzQ3mgeE6FuQ/ijVB6Mm3Tw63E2/lU/r3dJ21zZQJY+AAM+HMIagxlB6AtPaPZ2xz8/SI82rX0dHr9ElS+8zQahSzSjM+4OO+IBOiVIVgW4YvfzxOEEuu6G/5Em1ewUmvEsuK5ZGu9QsyzYBunj3goq+G0ZnyT36CBF/fYqe8ZRdWEqcI/QpFAAZrLaaWUE22x4OsmOopwdDK66kSIua5sqKpqFD4EJph/kLLFcgsNRz6Rc5xIZC+Nrt+Ag40LzmnT3UXo3nnJ8BjSWnU/eXm16N9GdJUQ0FTD0QRsiR2IVXt9KYKeUcAUWxt4AmETLKeGHipXlteZgxv9+YN2Troa2EC3g36cacIPmQ4E5EvpDedE9seYL8FLyFDGeJWI732gb/KFoQeEgH6FuW4yxh2SshGIU5rk4RLKnKcJx66mG7i+2qYk1/B4QjtRvM67Le8PJp8fBwa2DUW+DvVPFdVQEOnTlKKSGB0CLVpDIdRQ2kKiR0EM+Q9t9hAS/jA62CJfKp3GZPsVMFvC0o30ECdTaQ2Un0kkTN3ApYQSMHTkLfMZyHQx8XuWbT6BSWsReWCo1bHh3yMI0N+RAOw5rC+HaYbrEqhbNxx/u/uq/cvOLoO1STla3e4PRdRjOj80iNw6DATNz4XqUmV/4gJbwi6g5SpaxTo7U3YY24hUI6BXM0oNaYdajHyAQ6+1GAcAE+YetoJPhEZJzxSIAZPLRGzz+hd95ex+AlJezBe4R12mpnJkPIUkLGkgHHL6pG9tg/mZ78pjffXXH5yTtpYN0d7zcx/SytEFoYy4JZs5DpyZUaVDzHddhYQIcjM3p3uPA6CGD1T/JgKogE5QZB9SgursMi3PL57lvJu/TC4lVzRGcK3lzMQb3GmoPw/3fcObINll+tISn7MDN4XoQmDnKS/7/I/IJhEwN3KeXrJuydkTd/MGdnXf/mplDypLwBdBefDxe+Wh1MDzLRGchQp8oBShkxwbLZPgWVZpoIxN0PDzk0V8AOa4EdjrIeERDH1AjCNSgh5Mhdd5j89UWtesh41AxIciAFUZSDwmwLCW45jgyvSwXmYpKWvJ3mXe9DvUI18dA8MhnBjx4Y0bMmN9m582IJ3v5/rj9y3H96GOJPDqeCjqlu01VnbvP/1dyFc65YsJWQ2vRMA0sTI4ZMa54oefd3g4g6bVorE/wnCZKEczaZhgueyVlTNbv5WP8NbuO2J3EXg9ESjt8W+z1j6EdTKZTo8vnItRm7W/ROhY1MSTzgkx56NC1JxcymjJiL/LJf3uw897/OAJ79PkloFPanby/u1it75R57yV2uVlPsoohnAAL18N2CBBkJiXLSXaGxq24rYARiuott8huuOxDJgiTEZoFOg0Z/N6QD5Z9ken1fUTHpuOuWgID6EMlsCKVxRtkLIgEraJoV6iGVU85TYfOF3Tkudp45+7j23/lDEdgjz6/FLQNNifvPVjsX3Mv5xW+Yj6qNWFRgwaSNDuTul+REnaZgbyA+4jVXbKaAMyAdIQGJT5yrGhPNRleAMUFA7wh6bHqQq6d8TNLHixyjXEp4lVu5vMB4xgWvMNw7ELgnw5LF4vasemBsdRvKA1vkj7+cHPzPzPcs7dU0fqpx+PxO3cWe9eYorlsvqMtqeqyVzfHcyelxyVs+lCfO5oACWf8x6FuI/UwQ69CpLGAe9qlysB9BLaLHVl0IKezgMv2y5TzZIQ2WfseFb37agjyNZOGjuCcHhvwjG/U50HC9nTvL6fTh2aMTz2eKemjHpuTdz5YGF4fIwGA838ORQFA5QVEp2ZqNpWHklJv+OuIbOG2VGcIDTah48EE5WJGIQfVnDh1tsA/ISJuNniVqi1O4Q+s/VLkmN0ioaXCSGvBBveBDzfoiHUwaNY+2Jkc/Je9vft3j2h/2udzgbbj1vjdO2zIeVBl/TXq7fM4ICuXyizMG80q+ti1+W53BWIO46wqHvqghuCayX4In1Ku3/Ge/5ShuqGO9pZY7w8w+oM6n75XZ+P/G0DjvxAhajAjjRhDBTCFcYdxcfujxzs/n04ffKmEjxjw3KDtsD352wf96txttgQvUh3m/yrRLYd2eVst54OfFqHt0OXqlASD0w4WxAIKdQE50jlkhFL3iRkpLFkHj7mq5SJVS3CgZE/aB9N09xck4fdIbncQOY97ZyQl5PxtzUOKeo//QIffNuS/ubf1/v9he8nn9otJ1NOOrwTaAXYmt/ceT7J3lqtlV2+rwMDJqGa4JTDOZpP/zcntdjY9wD7x6HgbnZ1ajulhGrCAAMwF16ZsrCA4wB5+iMLPQ1YAT4g0Q5n9di9/+D8PpgcHJT/Lh3+k0Ps+IQF4bw8nz5o93xynvfc2dx7cZoIveGnpfdqhaL72cXbup2vzRfZdBPQy6o5nMfGd3d1rtv7q/t4vtKtyLX27v5/q/tnh/IigzHOp/OysnY18kOSvr8iwKpZk8yzORhhqWZij1/u/3qt/+5sDmIkN8BSmLfr9RbjF+hWHvuBPtur+1v29+Ycp3fL3mV/peCHQhzOVF/o/vFZWg9cR3nk0G7HUu/zg6r9/sLWJyv3icyrHlszFQf/U91kgmTf3eLw95IFwhe2ag27PJo9/eW9664P5eZx0M6r29tgRORiUsbt2sNIsVFvje/d23dYsWJ3YVz6OA/TRpOXa4M1LvWL+ZfYxrBOPFyhwPCTevptV7f16ws4EHBHL4fM8zHqJpciAsIUs0ZGs2eH/lLwzObj3zoP0gTt3yZ0jDSjm5thFTSt8+fjRo0fWxV3dfi2w9IvjOEEfjck+ix8s1MNsZZTa9TIrVvG8I3STh3mcART9PcC02cIx25o0k/vT8aN7m+m2v43W+AF8mo/7KEKkt9qq5Qu/H8txIqCfoIxCxUZ5Ol2oZmmXIFdQYr2LtO4osSMw0nHkVAV3JMmjzyeGfLGvfwc7xKMiheWKzQAAAABJRU5ErkJggg=="
@default_style %{
primary: "#4e2a8e",
accent: "#607080",
highlight: "#f0f4fa",
red_highlight: "#ffe5e5",
line_color: "#eee",
text_color: "#203040",
logo: @logo,
monospace_font: "menlo, consolas, monospace"
}
@salt "plug-debugger-actions"
import Plug.Conn
require Logger
@doc false
defmacro __using__(opts) do
quote do
@plug_debugger unquote(opts)
@before_compile Plug.Debugger
end
end
@doc false
defmacro __before_compile__(_) do
quote location: :keep do
defoverridable call: 2
def call(conn, opts) do
try do
case conn do
%Plug.Conn{path_info: ["__plug__", "debugger", "action"], method: "POST"} ->
Plug.Debugger.run_action(conn)
%Plug.Conn{} ->
super(conn, opts)
end
rescue
e in Plug.Conn.WrapperError ->
%{conn: conn, kind: kind, reason: reason, stack: stack} = e
Plug.Debugger.__catch__(conn, kind, reason, stack, @plug_debugger)
catch
kind, reason ->
Plug.Debugger.__catch__(conn, kind, reason, __STACKTRACE__, @plug_debugger)
end
end
end
end
@doc false
def __catch__(conn, kind, reason, stack, opts) do
reason = Exception.normalize(kind, reason, stack)
status = status(kind, reason)
receive do
@already_sent ->
send(self(), @already_sent)
log(status, kind, reason, stack)
:erlang.raise(kind, reason, stack)
after
0 ->
render(conn, status, kind, reason, stack, opts)
log(status, kind, reason, stack)
:erlang.raise(kind, reason, stack)
end
end
# We don't log status >= 500 because those are treated as errors and logged later.
defp log(status, kind, reason, stack) when status < 500,
do: Logger.debug(Exception.format(kind, reason, stack))
defp log(_status, _kind, _reason, _stack), do: :ok
## Rendering
require EEx
html_template_path = "lib/plug/templates/debugger.html.eex"
EEx.function_from_file(:defp, :template_html, html_template_path, [:assigns])
markdown_template_path = "lib/plug/templates/debugger.md.eex"
EEx.function_from_file(:defp, :template_markdown, markdown_template_path, [:assigns])
# Made public with @doc false for testing.
@doc false
def render(conn, status, kind, reason, stack, opts) do
session = maybe_fetch_session(conn)
params = maybe_fetch_query_params(conn)
{title, message} = info(kind, reason)
style = Enum.into(opts[:style] || [], @default_style)
banner = banner(conn, status, kind, reason, stack, opts)
if accepts_html?(get_req_header(conn, "accept")) do
conn = put_resp_content_type(conn, "text/html")
actions = encoded_actions_for_exception(reason, conn)
last_path = actions_redirect_path(conn)
assigns = [
conn: conn,
frames: frames(stack, opts),
title: title,
message: message,
session: session,
params: params,
style: style,
banner: banner,
actions: actions,
last_path: last_path
]
send_resp(conn, status, template_html(assigns))
else
{reason, stack} = Exception.blame(kind, reason, stack)
conn = put_resp_content_type(conn, "text/markdown")
assigns = [
conn: conn,
title: title,
formatted: Exception.format(kind, reason, stack),
session: session,
params: params
]
send_resp(conn, status, template_markdown(assigns))
end
end
def run_action(%Plug.Conn{} = conn) do
with %Plug.Conn{body_params: params} <- fetch_body_params(conn),
{:ok, {module, function, args}} <-
Plug.Crypto.verify(conn.secret_key_base, @salt, params["encoded_handler"]) do
apply(module, function, args)
conn
|> Plug.Conn.put_resp_header("location", params["last_path"] || "/")
|> send_resp(302, "")
|> halt()
else
_ -> raise "could not run Plug.Debugger action"
end
end
def encoded_actions_for_exception(exception, conn) do
exception_implementation = Plug.Exception.impl_for(exception)
implements_actions? =
Code.ensure_loaded?(exception_implementation) &&
function_exported?(exception_implementation, :actions, 1)
# TODO: Remove implements_actions? in future Plug versions
if implements_actions? && conn.secret_key_base do
actions = Plug.Exception.actions(exception)
Enum.map(actions, fn %{label: label, handler: handler} ->
encoded_handler = Plug.Crypto.sign(conn.secret_key_base, @salt, handler)
%{label: label, encoded_handler: encoded_handler}
end)
else
[]
end
end
def actions_redirect_path(%Plug.Conn{method: "GET", request_path: request_path}),
do: request_path
def actions_redirect_path(conn) do
case get_req_header(conn, "referer") do
[referer] -> referer
[] -> "/"
end
end
defp accepts_html?(_accept_header = []), do: false
defp accepts_html?(_accept_header = [header | _]),
do: String.contains?(header, ["*/*", "text/*", "text/html"])
defp maybe_fetch_session(conn) do
if conn.private[:plug_session_fetch] do
conn |> fetch_session(conn) |> get_session()
end
end
defp maybe_fetch_query_params(conn) do
fetch_query_params(conn).params
rescue
Plug.Conn.InvalidQueryError ->
case conn.params do
%Plug.Conn.Unfetched{} -> %{}
params -> params
end
end
@parsers_opts Plug.Parsers.init(parsers: [:urlencoded])
defp fetch_body_params(conn), do: Plug.Parsers.call(conn, @parsers_opts)
defp status(:error, error), do: Plug.Exception.status(error)
defp status(_, _), do: 500
defp info(:error, error), do: {inspect(error.__struct__), Exception.message(error)}
defp info(:throw, thrown), do: {"unhandled throw", inspect(thrown)}
defp info(:exit, reason), do: {"unhandled exit", Exception.format_exit(reason)}
defp frames(stacktrace, opts) do
app = opts[:otp_app]
editor = System.get_env("PLUG_EDITOR")
stacktrace
|> Enum.map_reduce(0, &each_frame(&1, &2, app, editor))
|> elem(0)
end
defp each_frame(entry, index, root, editor) do
{module, info, location, app, fun, arity, args} = get_entry(entry)
{file, line} = {to_string(location[:file] || "nofile"), location[:line]}
doc = module && get_doc(module, fun, arity, app)
clauses = module && get_clauses(module, fun, args)
source = get_source(module, file)
context = get_context(root, app)
snippet = get_snippet(source, line)
{%{
app: app,
info: info,
file: file,
line: line,
context: context,
snippet: snippet,
index: index,
doc: doc,
clauses: clauses,
args: args,
link: editor && get_editor(source, line, editor)
}, index + 1}
end
# From :elixir_compiler_*
defp get_entry({module, :__MODULE__, 0, location}) do
{module, inspect(module) <> " (module)", location, get_app(module), nil, nil, nil}
end
# From :elixir_compiler_*
defp get_entry({_module, :__MODULE__, 1, location}) do
{nil, "(module)", location, nil, nil, nil, nil}
end
# From :elixir_compiler_*
defp get_entry({_module, :__FILE__, 1, location}) do
{nil, "(file)", location, nil, nil, nil, nil}
end
defp get_entry({module, fun, args, location}) when is_list(args) do
arity = length(args)
formatted_mfa = Exception.format_mfa(module, fun, arity)
{module, formatted_mfa, location, get_app(module), fun, arity, args}
end
defp get_entry({module, fun, arity, location}) do
{module, Exception.format_mfa(module, fun, arity), location, get_app(module), fun, arity, nil}
end
defp get_entry({fun, arity, location}) do
{nil, Exception.format_fa(fun, arity), location, nil, fun, arity, nil}
end
defp get_app(module) do
case :application.get_application(module) do
{:ok, app} -> app
:undefined -> nil
end
end
defp get_doc(module, fun, arity, app) do
with true <- has_docs?(module, fun, arity),
{:ok, vsn} <- :application.get_key(app, :vsn) do
vsn = vsn |> List.to_string() |> String.split("-") |> hd()
fun = fun |> Atom.to_string() |> URI.encode()
"https://hexdocs.pm/#{app}/#{vsn}/#{inspect(module)}.html##{fun}/#{arity}"
else
_ -> nil
end
end
# TODO: Remove exported check once we depend on Elixir v1.7+
if Code.ensure_loaded?(Code) and function_exported?(Code, :fetch_docs, 1) do
def has_docs?(module, name, arity) do
case Code.fetch_docs(module) do
{:docs_v1, _, _, _, module_doc, _, docs} when module_doc != :hidden ->
Enum.any?(docs, has_doc_matcher?(name, arity))
_ ->
false
end
end
defp has_doc_matcher?(name, arity) do
&match?(
{{kind, ^name, ^arity}, _, _, doc, _}
when kind in [:function, :macro] and doc != :hidden,
&1
)
end
else
def has_docs?(module, fun, arity) do
docs = Code.get_docs(module, :docs)
not is_nil(docs) and List.keymember?(docs, {fun, arity}, 0)
end
end
defp get_clauses(module, fun, args) do
with true <- is_list(args),
{:ok, kind, clauses} <- Exception.blame_mfa(module, fun, args) do
top_10 =
clauses
|> Enum.take(10)
|> Enum.map(fn {args, guards} ->
code = Enum.reduce(guards, {fun, [], args}, &{:when, [], [&2, &1]})
"#{kind} " <> Macro.to_string(code, &clause_match/2)
end)
{length(top_10), length(clauses), top_10}
else
_ -> nil
end
end
defp clause_match(%{match?: true, node: node}, _),
do: ~s(<i class="green">) <> h(Macro.to_string(node)) <> "</i>"
defp clause_match(%{match?: false, node: node}, _),
do: ~s(<i class="red">) <> h(Macro.to_string(node)) <> "</i>"
defp clause_match(_, string), do: string
defp get_context(app, app) when app != nil, do: :app
defp get_context(_app1, _app2), do: :all
defp get_source(module, file) do
cond do
File.regular?(file) ->
file
source = module && Code.ensure_loaded?(module) && module.module_info(:compile)[:source] ->
to_string(source)
true ->
file
end
end
defp get_editor(file, line, editor) do
editor
|> :binary.replace("__FILE__", URI.encode(Path.expand(file)))
|> :binary.replace("__LINE__", to_string(line))
|> h
end
@radius 5
defp get_snippet(file, line) do
if File.regular?(file) and is_integer(line) do
to_discard = max(line - @radius - 1, 0)
lines = File.stream!(file) |> Stream.take(line + 5) |> Stream.drop(to_discard)
{first_five, lines} = Enum.split(lines, line - to_discard - 1)
first_five = with_line_number(first_five, to_discard + 1, false)
{center, last_five} = Enum.split(lines, 1)
center = with_line_number(center, line, true)
last_five = with_line_number(last_five, line + 1, false)
first_five ++ center ++ last_five
end
end
defp with_line_number(lines, initial, highlight) do
lines
|> Enum.map_reduce(initial, fn line, acc -> {{acc, line, highlight}, acc + 1} end)
|> elem(0)
end
defp banner(conn, status, kind, reason, stack, opts) do
case Keyword.fetch(opts, :banner) do
{:ok, {mod, func, args}} ->
apply(mod, func, [conn, status, kind, reason, stack] ++ args)
{:ok, other} ->
raise ArgumentError,
"expected :banner to be an MFA ({module, func, args}), got: #{inspect(other)}"
:error ->
nil
end
end
## Helpers
defp method(%Plug.Conn{method: method}), do: method
defp url(%Plug.Conn{scheme: scheme, host: host, port: port} = conn),
do: "#{scheme}://#{host}:#{port}#{conn.request_path}"
defp h(string) do
string |> to_string() |> Plug.HTML.html_escape()
end
end
| 54.992079 | 12,756 | 0.787872 |
1c57f442cca8278c995aa42307d6aa072f4e82bb | 13,622 | exs | Elixir | test/myxql/client_test.exs | adrianomitre/myxql | 96d098d4d6b2ccfc04a77e5da55094ea4e787c5e | [
"Apache-2.0"
] | null | null | null | test/myxql/client_test.exs | adrianomitre/myxql | 96d098d4d6b2ccfc04a77e5da55094ea4e787c5e | [
"Apache-2.0"
] | null | null | null | test/myxql/client_test.exs | adrianomitre/myxql | 96d098d4d6b2ccfc04a77e5da55094ea4e787c5e | [
"Apache-2.0"
] | null | null | null | defmodule MyXQL.ClientTest do
use ExUnit.Case, async: true
alias MyXQL.{Client, Protocol}
import MyXQL.Protocol.{Flags, Records}
@opts TestHelper.opts()
describe "connect" do
@tag public_key_exchange: true
test "default auth plugin (public key exchange)" do
opts = [username: "default_auth", password: "secret"] ++ @opts
assert {:ok, _} = Client.connect(opts)
end
@tag ssl: true
test "default auth plugin (ssl)" do
opts = [username: "default_auth", password: "secret", ssl: true] ++ @opts
assert {:ok, _} = Client.connect(opts)
end
@tag public_key_exchange: false, ssl: false
test "default auth plugin (no secure authentication)" do
opts = [username: "default_auth", password: "secret"] ++ @opts
case Client.connect(opts) do
# e.g. mysql_native_password doesn't require secure connection
{:ok, _} ->
:ok
# e.g. sha256_password does
{:error, err_packet(message: "Access denied" <> _)} ->
:ok
end
end
test "no password" do
opts = [username: "nopassword"] ++ @opts
assert {:ok, _} = Client.connect(opts)
opts = [username: "nopassword", password: ""] ++ @opts
assert {:ok, _} = Client.connect(opts)
end
@tag ssl: true
test "no password (ssl)" do
opts = [username: "nopassword", ssl: true] ++ @opts
assert {:ok, _} = Client.connect(opts)
opts = [username: "nopassword", password: ""] ++ @opts
assert {:ok, _} = Client.connect(opts)
end
# mysql_native_password
@tag mysql_native_password: true
test "mysql_native_password" do
opts = [username: "mysql_native", password: "secret"] ++ @opts
assert {:ok, _} = Client.connect(opts)
end
@tag mysql_native_password: true
test "mysql_native_password (bad password)" do
opts = [username: "mysql_native", password: "bad"] ++ @opts
assert {:error, err_packet(message: "Access denied" <> _)} = Client.connect(opts)
end
@tag mysql_native_password: true, ssl: true
test "mysql_native_password (ssl)" do
opts = [username: "mysql_native", password: "secret", ssl: true] ++ @opts
assert {:ok, _} = Client.connect(opts)
end
# sha256_password
@tag sha256_password: true, public_key_exchange: true
test "sha256_password" do
opts = [username: "sha256_password", password: "secret"] ++ @opts
assert {:ok, _} = Client.connect(opts)
end
@tag sha256_password: true, ssl: true
test "sha256_password (ssl)" do
opts = [username: "sha256_password", password: "secret", ssl: true] ++ @opts
assert {:ok, _} = Client.connect(opts)
end
@tag sha256_password: true, public_key_exchange: true
test "sha256_password (bad password)" do
opts = [username: "sha256_password", password: "bad"] ++ @opts
assert {:error, err_packet(message: "Access denied" <> _)} = Client.connect(opts)
end
@tag sha256_password: true, ssl: true
test "sha256_password (bad password) (ssl)" do
opts = [username: "sha256_password", password: "bad", ssl: true] ++ @opts
assert {:error, err_packet(message: "Access denied" <> _)} = Client.connect(opts)
end
@tag sha256_password: true, ssl: true
test "sha256_password (empty password) (ssl)" do
opts = [username: "sha256_empty", ssl: true] ++ @opts
assert {:ok, _} = Client.connect(opts)
end
# caching_sha2_password
@tag caching_sha2_password: true, public_key_exchange: true
test "caching_sha2_password (public key exchange)" do
opts = [username: "caching_sha2_password", password: "secret"] ++ @opts
assert {:ok, _} = Client.connect(opts)
end
@tag caching_sha2_password: true, ssl: true
test "caching_sha2_password (ssl)" do
opts = [username: "caching_sha2_password", password: "secret", ssl: true] ++ @opts
assert {:ok, _} = Client.connect(opts)
end
@tag caching_sha2_password: true
test "caching_sha2_password (bad password)" do
opts = [username: "caching_sha2_password", password: "bad"] ++ @opts
assert {:error, err_packet(message: "Access denied" <> _)} = Client.connect(opts)
end
@tag caching_sha2_password: true, ssl: true
test "caching_sha2_password (bad password) (ssl)" do
opts = [username: "caching_sha2_password", password: "bad", ssl: true] ++ @opts
assert {:error, err_packet(message: "Access denied" <> _)} = Client.connect(opts)
end
# other
@tag ssl: false
test "client requires ssl but server does not support it" do
opts = [ssl: true] ++ @opts
assert {:error, :server_does_not_support_ssl} = Client.connect(opts)
end
test "default charset" do
{:ok, client} = Client.connect(@opts)
{:ok, resultset(rows: [[charset, collation]])} =
Client.com_query(client, "select @@character_set_connection, @@collation_connection")
assert charset == "utf8mb4"
assert collation =~ "utf8mb4_"
assert {:ok, resultset(rows: [["hello 😃"]])} = Client.com_query(client, "SELECT 'hello 😃'")
end
test "set charset" do
{:ok, client} = Client.connect([charset: "latin1"] ++ @opts)
{:ok, resultset(rows: [[charset, collation]])} =
Client.com_query(client, "select @@character_set_connection, @@collation_connection")
assert charset == "latin1"
assert collation == "latin1_swedish_ci"
end
test "set charset and collation" do
{:ok, client} = Client.connect([charset: "latin1", collation: "latin1_general_ci"] ++ @opts)
{:ok, resultset(rows: [[charset, collation]])} =
Client.com_query(client, "select @@character_set_connection, @@collation_connection")
assert charset == "latin1"
assert collation == "latin1_general_ci"
end
end
describe "com_query/2" do
setup :connect
test "simple query", %{client: client} do
{:ok, resultset(rows: rows)} = Client.com_query(client, "SELECT 1024 as a, 2048 as b")
assert rows == [[1024, 2048]]
end
end
describe "com_stmt_prepare/2 + com_stmt_execute/2" do
setup :connect
test "no results", %{client: client} do
{:ok, com_stmt_prepare_ok(statement_id: statement_id)} =
Client.com_stmt_prepare(client, "select x from integers")
{:ok,
resultset(num_rows: 0, status_flags: status_flags, rows: rows, column_defs: column_defs)} =
Client.com_stmt_execute(client, statement_id, [], :cursor_type_no_cursor)
assert list_status_flags(status_flags) == [
:server_status_autocommit,
:server_status_no_index_used
]
assert [column_def(name: "x")] = column_defs
assert rows == []
end
test "no params", %{client: client} do
{:ok, com_stmt_prepare_ok(statement_id: statement_id)} =
Client.com_stmt_prepare(client, "select 1024 as a, 2048 as b")
{:ok,
resultset(num_rows: 1, status_flags: status_flags, rows: rows, column_defs: column_defs)} =
Client.com_stmt_execute(client, statement_id, [], :cursor_type_no_cursor)
assert [column_def(name: "a"), column_def(name: "b")] = column_defs
assert [[1024, 2048]] = rows
assert list_status_flags(status_flags) == [:server_status_autocommit]
end
test "params", %{client: client} do
{:ok, com_stmt_prepare_ok(statement_id: statement_id)} =
Client.com_stmt_prepare(client, "select ? as a, ? as b")
{:ok,
resultset(num_rows: 1, status_flags: status_flags, rows: rows, column_defs: column_defs)} =
Client.com_stmt_execute(client, statement_id, [1024, 2048], :cursor_type_no_cursor)
assert [column_def(name: "a"), column_def(name: "b")] = column_defs
assert [[1024, 2048]] = rows
assert list_status_flags(status_flags) == [:server_status_autocommit]
end
test "encode large packets", %{client: client} do
x = String.duplicate("x", 20_000_000)
{:ok, com_stmt_prepare_ok(statement_id: statement_id)} =
Client.com_stmt_prepare(client, "select length(?)")
{:ok, resultset(rows: rows)} =
Client.com_stmt_execute(client, statement_id, [x], :cursor_type_no_cursor)
assert rows == [[20_000_000]]
end
end
describe "com_stmt_prepare + com_stmt_execute + com_stmt_fetch" do
setup :connect
test "with no results", %{client: client} do
{:ok, com_stmt_prepare_ok(statement_id: statement_id)} =
Client.com_stmt_prepare(client, "select * from integers")
{:ok,
resultset(num_rows: 0, status_flags: status_flags, rows: [], column_defs: column_defs)} =
Client.com_stmt_execute(client, statement_id, [], :cursor_type_read_only)
assert :server_status_cursor_exists in list_status_flags(status_flags)
{:ok, resultset(num_rows: 0, status_flags: status_flags, rows: [])} =
Client.com_stmt_fetch(client, statement_id, column_defs, 5)
refute :server_status_cursor_exists in list_status_flags(status_flags)
assert :server_status_last_row_sent in list_status_flags(status_flags)
end
test "with simple query", %{client: client} do
values = Enum.map_join(1..4, ", ", &"(#{&1})")
{:ok, ok_packet()} = Client.com_query(client, "insert into integers values #{values}")
{:ok, com_stmt_prepare_ok(statement_id: statement_id)} =
Client.com_stmt_prepare(client, "select * from integers")
{:ok,
resultset(num_rows: 0, status_flags: status_flags, rows: [], column_defs: column_defs)} =
Client.com_stmt_execute(client, statement_id, [], :cursor_type_read_only)
assert :server_status_cursor_exists in list_status_flags(status_flags)
{:ok, resultset(num_rows: 2, status_flags: status_flags, rows: [[1], [2]])} =
Client.com_stmt_fetch(client, statement_id, column_defs, 2)
assert :server_status_cursor_exists in list_status_flags(status_flags)
{:ok, resultset(num_rows: 2, status_flags: status_flags, rows: [[3], [4]])} =
Client.com_stmt_fetch(client, statement_id, column_defs, 2)
assert :server_status_cursor_exists in list_status_flags(status_flags)
{:ok, resultset(num_rows: 0, status_flags: status_flags, rows: [])} =
Client.com_stmt_fetch(client, statement_id, column_defs, 5)
refute :server_status_cursor_exists in list_status_flags(status_flags)
assert :server_status_last_row_sent in list_status_flags(status_flags)
{:ok, err_packet(code: code)} = Client.com_stmt_fetch(client, statement_id, column_defs, 2)
assert Protocol.error_code_to_name(code) == :ER_STMT_HAS_NO_OPEN_CURSOR
end
test "with stored procedure of single result", %{client: client} do
{:ok, com_stmt_prepare_ok(statement_id: statement_id)} =
Client.com_stmt_prepare(client, "CALL single_procedure()")
{:ok, resultset(num_rows: 1, status_flags: status_flags)} =
Client.com_stmt_execute(client, statement_id, [], :cursor_type_read_only)
assert list_status_flags(status_flags) == [:server_status_autocommit]
end
test "with stored procedure of multiple results", %{client: client} do
{:ok, com_stmt_prepare_ok(statement_id: statement_id)} =
Client.com_stmt_prepare(client, "CALL multi_procedure()")
assert {:error, :multiple_results} =
Client.com_stmt_execute(client, statement_id, [], :cursor_type_read_only)
end
end
describe "recv_packets/4" do
test "simple" do
%{port: port} =
start_fake_server(fn %{accept_socket: sock} ->
:gen_tcp.send(sock, <<3::24-little, 0, "foo">>)
end)
decoder = fn payload, _next_data, :initial ->
{:halt, payload}
end
{:ok, client} = Client.do_connect(Client.Config.new(port: port))
assert Client.recv_packets(client, decoder, :initial) == {:ok, "foo"}
end
end
describe "com_ping/2" do
test "handles multiple packets" do
%{port: port} =
start_fake_server(fn %{accept_socket: sock} ->
payload1 =
<<255, 211, 7, 35, 72, 89, 48, 48, 48, 76, 111, 115, 116, 32, 99, 111, 110, 110, 101,
99, 116, 105, 111, 110, 32, 116, 111, 32, 98, 97, 99, 107, 101, 110, 100, 32, 115,
101, 114, 118, 101, 114, 46>>
payload2 =
<<255, 135, 7, 35, 48, 56, 83, 48, 49, 67, 111, 110, 110, 101, 99, 116, 105, 111, 110,
32, 107, 105, 108, 108, 101, 100, 32, 98, 121, 32, 77, 97, 120, 83, 99, 97, 108,
101, 58, 32, 82, 111, 117, 116, 101, 114>>
:gen_tcp.send(sock, [
<<byte_size(payload1)::24-little>>,
0,
payload1,
<<byte_size(payload2)::24-little>>,
1,
payload2
])
end)
{:ok, client} = Client.do_connect(Client.Config.new(port: port))
assert {:ok, err_packet(message: "Lost connection to backend server.")} =
Client.com_ping(client, 100)
end
end
defp connect(_) do
{:ok, client} = Client.connect(@opts)
{:ok, ok_packet()} = Client.com_query(client, "create temporary table integers (x int)")
{:ok, [client: client]}
end
defp start_fake_server(fun) do
{:ok, listen_socket} = :gen_tcp.listen(0, mode: :binary, active: false)
{:ok, port} = :inet.port(listen_socket)
{:ok, pid} =
Task.start_link(fn ->
{:ok, accept_socket} = :gen_tcp.accept(listen_socket)
fun.(%{accept_socket: accept_socket, listen_socket: listen_socket})
end)
%{pid: pid, port: port}
end
end
| 35.941953 | 98 | 0.642931 |
1c5808029948be505ecc47bb147c21caac2e900d | 3,747 | ex | Elixir | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/my_sql_replica_configuration.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/my_sql_replica_configuration.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/my_sql_replica_configuration.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SQLAdmin.V1beta4.Model.MySqlReplicaConfiguration do
@moduledoc """
Read-replica configuration specific to MySQL databases.
## Attributes
* `caCertificate` (*type:* `String.t`, *default:* `nil`) - PEM representation of the trusted CA's x509 certificate.
* `clientCertificate` (*type:* `String.t`, *default:* `nil`) - PEM representation of the replica's x509 certificate.
* `clientKey` (*type:* `String.t`, *default:* `nil`) - PEM representation of the replica's private key. The corresponsing public key is encoded in the client's certificate.
* `connectRetryInterval` (*type:* `integer()`, *default:* `nil`) - Seconds to wait between connect retries. MySQL's default is 60 seconds.
* `dumpFilePath` (*type:* `String.t`, *default:* `nil`) - Path to a SQL dump file in Google Cloud Storage from which the replica instance is to be created. The URI is in the form gs://bucketName/fileName. Compressed gzip files (.gz) are also supported. Dumps have the binlog co-ordinates from which replication begins. This can be accomplished by setting --master-data to 1 when using mysqldump.
* `kind` (*type:* `String.t`, *default:* `nil`) - This is always *sql#mysqlReplicaConfiguration*.
* `masterHeartbeatPeriod` (*type:* `String.t`, *default:* `nil`) - Interval in milliseconds between replication heartbeats.
* `password` (*type:* `String.t`, *default:* `nil`) - The password for the replication connection.
* `sslCipher` (*type:* `String.t`, *default:* `nil`) - A list of permissible ciphers to use for SSL encryption.
* `username` (*type:* `String.t`, *default:* `nil`) - The username for the replication connection.
* `verifyServerCertificate` (*type:* `boolean()`, *default:* `nil`) - Whether or not to check the primary instance's Common Name value in the certificate that it sends during the SSL handshake.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:caCertificate => String.t(),
:clientCertificate => String.t(),
:clientKey => String.t(),
:connectRetryInterval => integer(),
:dumpFilePath => String.t(),
:kind => String.t(),
:masterHeartbeatPeriod => String.t(),
:password => String.t(),
:sslCipher => String.t(),
:username => String.t(),
:verifyServerCertificate => boolean()
}
field(:caCertificate)
field(:clientCertificate)
field(:clientKey)
field(:connectRetryInterval)
field(:dumpFilePath)
field(:kind)
field(:masterHeartbeatPeriod)
field(:password)
field(:sslCipher)
field(:username)
field(:verifyServerCertificate)
end
defimpl Poison.Decoder, for: GoogleApi.SQLAdmin.V1beta4.Model.MySqlReplicaConfiguration do
def decode(value, options) do
GoogleApi.SQLAdmin.V1beta4.Model.MySqlReplicaConfiguration.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SQLAdmin.V1beta4.Model.MySqlReplicaConfiguration do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 48.662338 | 399 | 0.705898 |
1c585822492e3991de31fd55a93503d773e78ee7 | 1,424 | ex | Elixir | Microsoft.Azure.Management.Database.PostgreSql/lib/microsoft/azure/management/database/postgre_sql/model/server_properties.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Database.PostgreSql/lib/microsoft/azure/management/database/postgre_sql/model/server_properties.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Database.PostgreSql/lib/microsoft/azure/management/database/postgre_sql/model/server_properties.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Database.PostgreSql.Model.ServerProperties do
@moduledoc """
The properties of a server.
"""
@derive [Poison.Encoder]
defstruct [
:"administratorLogin",
:"version",
:"sslEnforcement",
:"userVisibleState",
:"fullyQualifiedDomainName",
:"earliestRestoreDate",
:"storageProfile"
]
@type t :: %__MODULE__{
:"administratorLogin" => String.t,
:"version" => ServerVersion,
:"sslEnforcement" => SslEnforcement,
:"userVisibleState" => String.t,
:"fullyQualifiedDomainName" => String.t,
:"earliestRestoreDate" => DateTime.t,
:"storageProfile" => StorageProfile
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.Database.PostgreSql.Model.ServerProperties do
import Microsoft.Azure.Management.Database.PostgreSql.Deserializer
def decode(value, options) do
value
|> deserialize(:"version", :struct, Microsoft.Azure.Management.Database.PostgreSql.Model.ServerVersion, options)
|> deserialize(:"sslEnforcement", :struct, Microsoft.Azure.Management.Database.PostgreSql.Model.SslEnforcement, options)
|> deserialize(:"storageProfile", :struct, Microsoft.Azure.Management.Database.PostgreSql.Model.StorageProfile, options)
end
end
| 33.904762 | 124 | 0.732444 |
1c58595b1d3942996cc581c41aa3f709e3a2ffab | 79 | exs | Elixir | default_value.exs | ariarijp/elixir-crash-course | f63502a2bb59e3a3e1ce6c62c595fbf75c0462c0 | [
"MIT"
] | 1 | 2016-09-22T03:47:46.000Z | 2016-09-22T03:47:46.000Z | default_value.exs | ariarijp/elixir-crash-course | f63502a2bb59e3a3e1ce6c62c595fbf75c0462c0 | [
"MIT"
] | null | null | null | default_value.exs | ariarijp/elixir-crash-course | f63502a2bb59e3a3e1ce6c62c595fbf75c0462c0 | [
"MIT"
] | null | null | null | defmodule DefaultValueModule do
def mul_by(x, n \\ 2) do
x * n
end
end
| 13.166667 | 31 | 0.64557 |
1c585a23629309f5b63058e18f22e853d72dd277 | 2,822 | ex | Elixir | web/modules/sql/adapters/query_makers/influxdb.ex | sahilpaudel/AfterGlow | 0859ec14b47c8c5704cc8e5cba86d39aa258fff5 | [
"MIT"
] | null | null | null | web/modules/sql/adapters/query_makers/influxdb.ex | sahilpaudel/AfterGlow | 0859ec14b47c8c5704cc8e5cba86d39aa258fff5 | [
"MIT"
] | null | null | null | web/modules/sql/adapters/query_makers/influxdb.ex | sahilpaudel/AfterGlow | 0859ec14b47c8c5704cc8e5cba86d39aa258fff5 | [
"MIT"
] | null | null | null | require IEx
defmodule AfterGlow.Sql.Adapters.QueryMakers.InfluxDb do
use AfterGlow.Sql.Adapters.QueryMakers.Common
@time_intervals %{
"seconds" => "s",
"minutes" => "m",
"hours" => "h",
"days" => "d",
"weeks" => "w"
}
def sql(query_record) do
options = options(query_record, :influxdb)
query =
"select #{options[:select] |> Enum.join(", ")} from #{
query_record[:table]["readable_table_name"]
}"
if options[:where] |> length > 0 do
query = "#{query} WHERE #{options[:where] |> Enum.join(" and ")}"
end
if options[:group_by] && options[:group_by] |> length > 0 do
query = "#{query} GROUP BY #{options[:group_by] |> Enum.join(" , ")}"
end
if options[:order_by] && options[:order_by] |> length > 0 do
query = "#{query} ORDER BY #{options[:order_by] |> Enum.join(" , ")}"
end
if query_record[:limit] do
query = query <> " " <> "LIMIT #{query_record[:limit]}"
end
if query_record[:offset] do
query = query <> " " <> "OFFSET #{query_record[:offset]}"
end
query
end
def options(query_record, adapter) do
%{
select: select_maker(query_record[:selects], []),
group_by: group_bys_maker(query_record[:group_bys]),
where: where_maker(query_record[:filters]) || [],
order_by: order_bys_maker(query_record[:order_bys]),
adapter: adapter,
limit: nil
}
end
def find_columns_required_for_select(group_by, order_by), do: []
def parse_filter_date_obj_value(val, dtt, dur) do
{val, duration} =
case dur["value"] do
"months" ->
{(val |> String.to_integer()) * 30, "d"}
"quarters" ->
{(val |> String.to_integer()) * 90, "d"}
"years" ->
{(val |> String.to_integer()) * 365, "d"}
_ ->
{val, @time_intervals[dur["value"]]}
end
op =
case dtt["value"] do
"ago" -> "-"
_ -> "+"
end
"now() #{op} #{val}#{duration}"
end
def cast_group_by(el, nil), do: el
def cast_group_by(el, "day"), do: "time(1d)"
def cast_group_by(el, "minutes"), do: "time(1m) "
def cast_group_by(el, "seconds"), do: "time(1s) "
def cast_group_by(el, "hour"), do: "time(1h) "
def cast_group_by(el, "week"), do: "time(1w) "
def cast_group_by(el, "month"), do: "time(30d) "
def cast_group_by(el, "quarter"), do: "time(90d) "
def cast_group_by(el, "year"), do: "time(365d) "
def stringify_select(%{"raw" => true, "value" => value}, columns_required), do: value
def stringify_select(%{"name" => _name, "value" => "raw_data"}, []), do: "*"
def stringify_select(%{"name" => _name, "value" => "raw_data"}, columns_required), do: "*"
def stringify_select(%{"name" => _name, "value" => "count"}, columns_required), do: "count(*)"
end
| 28.795918 | 96 | 0.575124 |
1c587af3d11e58e569c323bdd98e34981c7628b2 | 1,007 | exs | Elixir | apps/service_broadcast/test/broadcast/init_test.exs | rucker/hindsight | 876a5d344c5d8eebbea37684ee07e0a91e4430f0 | [
"Apache-2.0"
] | null | null | null | apps/service_broadcast/test/broadcast/init_test.exs | rucker/hindsight | 876a5d344c5d8eebbea37684ee07e0a91e4430f0 | [
"Apache-2.0"
] | null | null | null | apps/service_broadcast/test/broadcast/init_test.exs | rucker/hindsight | 876a5d344c5d8eebbea37684ee07e0a91e4430f0 | [
"Apache-2.0"
] | null | null | null | defmodule Broadcast.InitTest do
use ExUnit.Case
use Placebo
@instance Broadcast.Application.instance()
setup do
Process.flag(:trap_exit, true)
:ok
end
test "will start all streams in store" do
allow(Broadcast.Stream.Supervisor.start_child(any()), return: {:ok, :pid})
loads = [
Load.new!(
id: "load1",
dataset_id: "ds1",
subset_id: "one",
source: Source.Fake.new!(),
destination: Channel.Topic.new!(name: "d1")
),
Load.new!(
id: "load2",
dataset_id: "ds2",
subset_id: "two",
source: Source.Fake.new!(),
destination: Channel.Topic.new!(name: "d2")
)
]
Brook.Test.with_event(@instance, fn ->
Enum.each(loads, &Broadcast.Stream.Store.persist/1)
end)
start_supervised(Broadcast.Init)
assert_called(Broadcast.Stream.Supervisor.start_child(Enum.at(loads, 0)))
assert_called(Broadcast.Stream.Supervisor.start_child(Enum.at(loads, 1)))
end
end
| 23.97619 | 78 | 0.620655 |
1c587bcaf13a1dfdfb14ba26f4de29024e1eb4bd | 328 | ex | Elixir | lib/mastani_server_web/middleware/see_me.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | 1 | 2019-05-07T15:03:54.000Z | 2019-05-07T15:03:54.000Z | lib/mastani_server_web/middleware/see_me.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | lib/mastani_server_web/middleware/see_me.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | # ---
# Absinthe.Middleware behaviour
# see https://hexdocs.pm/absinthe/Absinthe.Middleware.html#content
# ---
defmodule MastaniServerWeb.Middleware.SeeMe do
@behaviour Absinthe.Middleware
def call(resolution, _) do
# IO.inspect("see me")
# IO.inspect resolution.arguments, label: "see me"
resolution
end
end
| 23.428571 | 66 | 0.72561 |
1c588fc25d13ddd073b88baa9cc31738b9324041 | 2,628 | exs | Elixir | test/mix/tasks/ex_doc_groups_test.exs | randycoulman/boundary | 2fdea46e702400c152670262d5ca1f31edbd4fa1 | [
"MIT"
] | 490 | 2019-09-07T10:33:15.000Z | 2022-03-30T09:50:03.000Z | test/mix/tasks/ex_doc_groups_test.exs | randycoulman/boundary | 2fdea46e702400c152670262d5ca1f31edbd4fa1 | [
"MIT"
] | 35 | 2019-09-07T10:49:25.000Z | 2022-03-18T10:02:34.000Z | test/mix/tasks/ex_doc_groups_test.exs | randycoulman/boundary | 2fdea46e702400c152670262d5ca1f31edbd4fa1 | [
"MIT"
] | 16 | 2019-09-08T15:09:16.000Z | 2022-02-12T21:40:14.000Z | defmodule Mix.Tasks.Boundary.ExDocGroupsTest do
use ExUnit.Case, async: false
alias Boundary.TestProject
@module_setup """
defmodule Boundary1 do
use Boundary, deps: [], exports: []
defmodule Foo do
defmodule Bar do end
end
defmodule Bar do end
defmodule Baz do end
end
defmodule Boundary2 do
use Boundary, deps: [], exports: []
defmodule Foo do end
end
defmodule Boundary3 do
use Boundary, deps: [], exports: []
defmodule Foo do end
defmodule Bar do end
end
defmodule Boundary3.InnerBoundary do
use Boundary, deps: [], exports: []
defmodule Foo do end
end
defmodule Ignored do
use Boundary, check: [in: false, out: false]
end
"""
test "the mix task produces the correct output" do
Mix.shell(Mix.Shell.Process)
Logger.disable(self())
TestProject.in_project(fn project ->
File.write!(
Path.join([project.path, "lib", "source.ex"]),
@module_setup
)
assert TestProject.run_task("boundary.ex_doc_groups").output =~ "* creating boundary.exs"
end)
end
test "when evaled produces the correct data" do
Mix.shell(Mix.Shell.Process)
Logger.disable(self())
TestProject.in_project(fn project ->
File.write!(
Path.join([project.path, "lib", "source.ex"]),
@module_setup
)
TestProject.run_task("boundary.ex_doc_groups")
{groups, _} = Code.eval_file("boundary.exs")
assert [
Boundary1: [Boundary1, Boundary1.Bar, Boundary1.Baz, Boundary1.Foo, Boundary1.Foo.Bar],
Boundary2: [Boundary2, Boundary2.Foo],
Boundary3: [Boundary3, Boundary3.Bar, Boundary3.Foo],
"Boundary3.InnerBoundary": [Boundary3.InnerBoundary, Boundary3.InnerBoundary.Foo]
] = groups
end)
end
test "is formatted correctly" do
Mix.shell(Mix.Shell.Process)
Logger.disable(self())
TestProject.in_project(fn project ->
File.write!(
Path.join([project.path, "lib", "source.ex"]),
@module_setup
)
TestProject.run_task("boundary.ex_doc_groups")
assert """
# Generated by `mix boundary.ex_doc_groups`
[
Boundary1: [Boundary1, Boundary1.Bar, Boundary1.Baz, Boundary1.Foo, Boundary1.Foo.Bar],
Boundary2: [Boundary2, Boundary2.Foo],
Boundary3: [Boundary3, Boundary3.Bar, Boundary3.Foo],
"Boundary3.InnerBoundary": [Boundary3.InnerBoundary, Boundary3.InnerBoundary.Foo]
]
""" = File.read!("boundary.exs")
end)
end
end
| 26.019802 | 102 | 0.625571 |
1c5890a463ccadb05e7f8965c91f1feae6c90ad9 | 7,613 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Sheets.V4.Model.Response do
@moduledoc """
A single response from an update.
## Attributes
* `addBanding` (*type:* `GoogleApi.Sheets.V4.Model.AddBandingResponse.t`, *default:* `nil`) - A reply from adding a banded range.
* `addChart` (*type:* `GoogleApi.Sheets.V4.Model.AddChartResponse.t`, *default:* `nil`) - A reply from adding a chart.
* `addDimensionGroup` (*type:* `GoogleApi.Sheets.V4.Model.AddDimensionGroupResponse.t`, *default:* `nil`) - A reply from adding a dimension group.
* `addFilterView` (*type:* `GoogleApi.Sheets.V4.Model.AddFilterViewResponse.t`, *default:* `nil`) - A reply from adding a filter view.
* `addNamedRange` (*type:* `GoogleApi.Sheets.V4.Model.AddNamedRangeResponse.t`, *default:* `nil`) - A reply from adding a named range.
* `addProtectedRange` (*type:* `GoogleApi.Sheets.V4.Model.AddProtectedRangeResponse.t`, *default:* `nil`) - A reply from adding a protected range.
* `addSheet` (*type:* `GoogleApi.Sheets.V4.Model.AddSheetResponse.t`, *default:* `nil`) - A reply from adding a sheet.
* `createDeveloperMetadata` (*type:* `GoogleApi.Sheets.V4.Model.CreateDeveloperMetadataResponse.t`, *default:* `nil`) - A reply from creating a developer metadata entry.
* `deleteConditionalFormatRule` (*type:* `GoogleApi.Sheets.V4.Model.DeleteConditionalFormatRuleResponse.t`, *default:* `nil`) - A reply from deleting a conditional format rule.
* `deleteDeveloperMetadata` (*type:* `GoogleApi.Sheets.V4.Model.DeleteDeveloperMetadataResponse.t`, *default:* `nil`) - A reply from deleting a developer metadata entry.
* `deleteDimensionGroup` (*type:* `GoogleApi.Sheets.V4.Model.DeleteDimensionGroupResponse.t`, *default:* `nil`) - A reply from deleting a dimension group.
* `deleteDuplicates` (*type:* `GoogleApi.Sheets.V4.Model.DeleteDuplicatesResponse.t`, *default:* `nil`) - A reply from removing rows containing duplicate values.
* `duplicateFilterView` (*type:* `GoogleApi.Sheets.V4.Model.DuplicateFilterViewResponse.t`, *default:* `nil`) - A reply from duplicating a filter view.
* `duplicateSheet` (*type:* `GoogleApi.Sheets.V4.Model.DuplicateSheetResponse.t`, *default:* `nil`) - A reply from duplicating a sheet.
* `findReplace` (*type:* `GoogleApi.Sheets.V4.Model.FindReplaceResponse.t`, *default:* `nil`) - A reply from doing a find/replace.
* `trimWhitespace` (*type:* `GoogleApi.Sheets.V4.Model.TrimWhitespaceResponse.t`, *default:* `nil`) - A reply from trimming whitespace.
* `updateConditionalFormatRule` (*type:* `GoogleApi.Sheets.V4.Model.UpdateConditionalFormatRuleResponse.t`, *default:* `nil`) - A reply from updating a conditional format rule.
* `updateDeveloperMetadata` (*type:* `GoogleApi.Sheets.V4.Model.UpdateDeveloperMetadataResponse.t`, *default:* `nil`) - A reply from updating a developer metadata entry.
* `updateEmbeddedObjectPosition` (*type:* `GoogleApi.Sheets.V4.Model.UpdateEmbeddedObjectPositionResponse.t`, *default:* `nil`) - A reply from updating an embedded object's position.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:addBanding => GoogleApi.Sheets.V4.Model.AddBandingResponse.t(),
:addChart => GoogleApi.Sheets.V4.Model.AddChartResponse.t(),
:addDimensionGroup => GoogleApi.Sheets.V4.Model.AddDimensionGroupResponse.t(),
:addFilterView => GoogleApi.Sheets.V4.Model.AddFilterViewResponse.t(),
:addNamedRange => GoogleApi.Sheets.V4.Model.AddNamedRangeResponse.t(),
:addProtectedRange => GoogleApi.Sheets.V4.Model.AddProtectedRangeResponse.t(),
:addSheet => GoogleApi.Sheets.V4.Model.AddSheetResponse.t(),
:createDeveloperMetadata =>
GoogleApi.Sheets.V4.Model.CreateDeveloperMetadataResponse.t(),
:deleteConditionalFormatRule =>
GoogleApi.Sheets.V4.Model.DeleteConditionalFormatRuleResponse.t(),
:deleteDeveloperMetadata =>
GoogleApi.Sheets.V4.Model.DeleteDeveloperMetadataResponse.t(),
:deleteDimensionGroup => GoogleApi.Sheets.V4.Model.DeleteDimensionGroupResponse.t(),
:deleteDuplicates => GoogleApi.Sheets.V4.Model.DeleteDuplicatesResponse.t(),
:duplicateFilterView => GoogleApi.Sheets.V4.Model.DuplicateFilterViewResponse.t(),
:duplicateSheet => GoogleApi.Sheets.V4.Model.DuplicateSheetResponse.t(),
:findReplace => GoogleApi.Sheets.V4.Model.FindReplaceResponse.t(),
:trimWhitespace => GoogleApi.Sheets.V4.Model.TrimWhitespaceResponse.t(),
:updateConditionalFormatRule =>
GoogleApi.Sheets.V4.Model.UpdateConditionalFormatRuleResponse.t(),
:updateDeveloperMetadata =>
GoogleApi.Sheets.V4.Model.UpdateDeveloperMetadataResponse.t(),
:updateEmbeddedObjectPosition =>
GoogleApi.Sheets.V4.Model.UpdateEmbeddedObjectPositionResponse.t()
}
field(:addBanding, as: GoogleApi.Sheets.V4.Model.AddBandingResponse)
field(:addChart, as: GoogleApi.Sheets.V4.Model.AddChartResponse)
field(:addDimensionGroup, as: GoogleApi.Sheets.V4.Model.AddDimensionGroupResponse)
field(:addFilterView, as: GoogleApi.Sheets.V4.Model.AddFilterViewResponse)
field(:addNamedRange, as: GoogleApi.Sheets.V4.Model.AddNamedRangeResponse)
field(:addProtectedRange, as: GoogleApi.Sheets.V4.Model.AddProtectedRangeResponse)
field(:addSheet, as: GoogleApi.Sheets.V4.Model.AddSheetResponse)
field(:createDeveloperMetadata, as: GoogleApi.Sheets.V4.Model.CreateDeveloperMetadataResponse)
field(
:deleteConditionalFormatRule,
as: GoogleApi.Sheets.V4.Model.DeleteConditionalFormatRuleResponse
)
field(:deleteDeveloperMetadata, as: GoogleApi.Sheets.V4.Model.DeleteDeveloperMetadataResponse)
field(:deleteDimensionGroup, as: GoogleApi.Sheets.V4.Model.DeleteDimensionGroupResponse)
field(:deleteDuplicates, as: GoogleApi.Sheets.V4.Model.DeleteDuplicatesResponse)
field(:duplicateFilterView, as: GoogleApi.Sheets.V4.Model.DuplicateFilterViewResponse)
field(:duplicateSheet, as: GoogleApi.Sheets.V4.Model.DuplicateSheetResponse)
field(:findReplace, as: GoogleApi.Sheets.V4.Model.FindReplaceResponse)
field(:trimWhitespace, as: GoogleApi.Sheets.V4.Model.TrimWhitespaceResponse)
field(
:updateConditionalFormatRule,
as: GoogleApi.Sheets.V4.Model.UpdateConditionalFormatRuleResponse
)
field(:updateDeveloperMetadata, as: GoogleApi.Sheets.V4.Model.UpdateDeveloperMetadataResponse)
field(
:updateEmbeddedObjectPosition,
as: GoogleApi.Sheets.V4.Model.UpdateEmbeddedObjectPositionResponse
)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.Response do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.Response.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.Response do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 62.917355 | 186 | 0.746486 |
1c58c34c4dc8d4a57d236401e969bf3a69384248 | 762 | exs | Elixir | apps/tools_1/rel/config.exs | WhiteRookPL/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | 14 | 2017-08-09T14:21:47.000Z | 2022-03-11T04:10:49.000Z | apps/tools_1/rel/config.exs | nicholasjhenry/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | null | null | null | apps/tools_1/rel/config.exs | nicholasjhenry/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | 15 | 2017-09-05T15:43:53.000Z | 2020-04-13T16:20:18.000Z | use Mix.Releases.Config,
default_release: :default,
default_environment: Mix.env()
cookie_dev = :"DEV_COOKIE"
environment :dev do
set dev_mode: true
set include_erts: false
set cookie: cookie_dev
set overlay_vars: [ cookie: cookie_dev ]
set vm_args: "rel/vm.args"
end
cookie_prod = :"PROD_COOKIE"
environment :prod do
set include_erts: true
set include_src: false
set cookie: cookie_prod
set overlay_vars: [ cookie: cookie_prod ]
set vm_args: "rel/vm.args"
end
release :random_server do
set version: "1.0.0"
set applications: [
sasl: :permanent,
logger: :permanent,
random_server: :permanent,
runtime_tools: :permanent,
xprof: :permanent,
recon: :permanent,
eper: :permanent,
dbg: :permanent
]
end | 21.771429 | 43 | 0.707349 |
1c58d4a44edb14920ffb9c3245a5714bdcb981b7 | 1,764 | ex | Elixir | web/models/organization.ex | roryqueue/code-corps-api | f23007e13fed2d7264fd2e2e97b1497488fb54ba | [
"MIT"
] | null | null | null | web/models/organization.ex | roryqueue/code-corps-api | f23007e13fed2d7264fd2e2e97b1497488fb54ba | [
"MIT"
] | null | null | null | web/models/organization.ex | roryqueue/code-corps-api | f23007e13fed2d7264fd2e2e97b1497488fb54ba | [
"MIT"
] | null | null | null | defmodule CodeCorps.Organization do
@moduledoc """
Represents an organization on Code Corps, e.g. "Code Corps" itself.
"""
use Arc.Ecto.Schema
use CodeCorps.Web, :model
import CodeCorps.Base64ImageUploader
import CodeCorps.ModelHelpers
import CodeCorps.Validators.SlugValidator
alias CodeCorps.SluggedRoute
schema "organizations" do
field :base64_icon_data, :string, virtual: true
field :description, :string
field :icon, CodeCorps.OrganizationIcon.Type
field :name, :string
field :slug, :string
has_one :slugged_route, SluggedRoute
has_many :projects, CodeCorps.Project
has_many :organization_memberships, CodeCorps.OrganizationMembership
has_many :members, through: [:organization_memberships, :member]
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:name, :description, :slug, :base64_icon_data])
|> validate_required([:name])
|> upload_image(:base64_icon_data, :icon)
end
@doc """
Builds a changeset for creating an organization.
"""
def create_changeset(struct, params) do
struct
|> changeset(params)
|> generate_slug(:name, :slug)
|> validate_required([:slug])
|> validate_slug(:slug)
|> put_slugged_route()
end
def index_filters(query, params) do
query |> id_filter(params)
end
defp put_slugged_route(changeset) do
case changeset do
%Ecto.Changeset{valid?: true, changes: %{slug: slug}} ->
slugged_route_changeset = SluggedRoute.create_changeset(%SluggedRoute{}, %{slug: slug})
put_assoc(changeset, :slugged_route, slugged_route_changeset)
_ ->
changeset
end
end
end
| 26.727273 | 95 | 0.696712 |
1c58d9ed79331f1106c97245429eb19cbb6610aa | 702 | ex | Elixir | lib/browser_launcher.ex | kentaro/browser_launcher | 91b96974a24ba5ac100a3e264fff118fa09f7576 | [
"MIT"
] | 1 | 2020-12-23T22:25:04.000Z | 2020-12-23T22:25:04.000Z | lib/browser_launcher.ex | kentaro/browser_launcher | 91b96974a24ba5ac100a3e264fff118fa09f7576 | [
"MIT"
] | null | null | null | lib/browser_launcher.ex | kentaro/browser_launcher | 91b96974a24ba5ac100a3e264fff118fa09f7576 | [
"MIT"
] | null | null | null | defmodule BrowserLauncher do
@moduledoc File.read!("./README.md")
@doc """
Opens URL with the default browser. URL can be either remote location or local file path.
## Examples
iex> BrowserLauncher.open("https://elixir-lang.org/")
{"", 0}
iex> BrowserLauncher.open("file:///path/to/your.html")
{"", 0}
"""
def open(url) do
os_type()
|> cmd()
|> run(url)
end
def cmd({:unix, :darwin}) do
"open"
end
def cmd({:unix, _}) do
"xdg-open"
end
def cmd({:win32, _}) do
"start"
end
def cmd(_) do
raise("unknown platform")
end
def run(cmd, url) do
System.cmd(cmd, [url])
end
defp os_type do
:os.type
end
end
| 16.714286 | 91 | 0.578348 |
1c590d81ae7dcfdf6e2bb7cf8ec95b2721253f83 | 1,637 | ex | Elixir | lib/game/command/equipment.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | null | null | null | lib/game/command/equipment.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | null | null | null | lib/game/command/equipment.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | null | null | null | defmodule Game.Command.Equipment do
@moduledoc """
The "equipment" command
"""
use Game.Command
alias Game.Format.Items, as: FormatItems
alias Game.Items
commands([{"equipment", ["eq"]}], parse: false)
@impl Game.Command
def help(:topic), do: "Equipment"
def help(:short), do: "View your character's worn equipment"
def help(:full) do
"""
#{help(:short)}. Similar to inventory but
will only display items worn and wielded.
Example:
[ ] > {command}equipment{/command}
"""
end
@impl true
def parse(command, _context), do: parse(command)
@impl Game.Command
@doc """
Parse the command into arguments
iex> Game.Command.Equipment.parse("equipment")
{}
iex> Game.Command.Equipment.parse("eq")
{}
iex> Game.Command.Equipment.parse("equipment hi")
{:error, :bad_parse, "equipment hi"}
iex> Game.Command.Equipment.parse("unknown")
{:error, :bad_parse, "unknown"}
"""
def parse(command)
def parse("eq"), do: {}
def parse("equipment"), do: {}
@impl Game.Command
@doc """
View your character's worn equipment
"""
def run(command, state)
def run({}, %{socket: socket, save: %{wearing: wearing, wielding: wielding}}) do
wearing =
wearing
|> Enum.reduce(%{}, fn {slot, instance}, wearing ->
Map.put(wearing, slot, Items.item(instance))
end)
wielding =
wielding
|> Enum.reduce(%{}, fn {hand, instance}, wielding ->
Map.put(wielding, hand, Items.item(instance))
end)
socket |> @socket.echo(FormatItems.equipment(wearing, wielding))
:ok
end
end
| 22.736111 | 82 | 0.619426 |
1c591011d054ea958e00af4d85d379bb1e46385e | 2,717 | ex | Elixir | lib/mix/tasks/conform.archive.ex | aforward-oss/conform | 794ba6ae9554b59c787632a1f5fa7a66e5c5da6e | [
"MIT"
] | null | null | null | lib/mix/tasks/conform.archive.ex | aforward-oss/conform | 794ba6ae9554b59c787632a1f5fa7a66e5c5da6e | [
"MIT"
] | null | null | null | lib/mix/tasks/conform.archive.ex | aforward-oss/conform | 794ba6ae9554b59c787632a1f5fa7a66e5c5da6e | [
"MIT"
] | 1 | 2021-07-01T09:02:19.000Z | 2021-07-01T09:02:19.000Z | defmodule Mix.Tasks.Conform.Archive do
@moduledoc """
Create an archive with the app.schema.ez name near the application's schema.
An archive contains dependencies which are noted in the schema.
"""
def run([schema_path]) do
Mix.Tasks.Loadpaths.run([])
curr_path = File.cwd!
schema_dir = Path.dirname(schema_path) |> Path.expand
build_dir = case String.split(schema_dir, "/") |> List.last do
"files" -> "#{curr_path}/_build/#{Mix.env}/lib"
_ -> "#{Path.dirname(schema_dir)}/_build/#{Mix.env}/lib"
end
raw_schema = File.read!(schema_path) |> Conform.Schema.parse!
imports = Keyword.get(raw_schema, :import, [])
extends = Keyword.get(raw_schema, :extends, [])
case {imports, extends} do
{[], []} -> {:ok, "", []}
{_, _} ->
# Make config dir in _build, move schema files there
archiving = Enum.reduce(extends, [], fn app, acc ->
app_path = Mix.Dep.children
|> Enum.filter(fn %Mix.Dep{app: app_name} -> app_name == app end)
|> Enum.map(fn %Mix.Dep{opts: opts} ->
Keyword.get(opts, :path, Keyword.get(opts, :dest))
end)
|> Enum.filter(fn nil -> false; _ -> true end)
|> Enum.map(fn path -> Path.expand(path) end)
case app_path do
nil -> []
[app_path] ->
src_path = Path.join([app_path, "config", "#{app}.schema.exs"])
if File.exists?(src_path) do
dest_path = Path.join(["#{app}", "config", "#{app}.schema.exs"])
File.mkdir_p!(Path.join(build_dir, Path.dirname(dest_path)))
File.cp!(src_path, Path.join(build_dir, dest_path))
[String.to_char_list(dest_path) | acc]
else
[]
end
end
end)
File.cd! build_dir
# Add imported application BEAM files to archive
archiving = Enum.reduce(imports, archiving, fn app, acc ->
path = Path.join("#{app}", "ebin")
files = path
|> File.ls!
|> Enum.map(fn filename -> Path.join(path, filename) end)
|> Enum.map(&String.to_char_list/1)
files ++ acc
end)
# create archive
[archive_name|_] = String.split(Path.basename(schema_path), ".")
archive_path = Path.join(schema_dir, "#{archive_name}.schema.ez")
{:ok, zip_path} = :zip.create('#{archive_path}', archiving)
# Reset current directory
File.cd! curr_path
# Return the path to the archive and what was archived
{:ok, zip_path, archiving}
end
end
end
| 41.166667 | 86 | 0.548031 |
1c591c7a2c0e624c4d6f198facf0378bd519f17c | 6,531 | ex | Elixir | lib/mix/lib/releases/config/providers/elixir.ex | TokiTori/distillery | 9158df6d17c8b70a201ade4ca0d3f243245ceebd | [
"MIT"
] | null | null | null | lib/mix/lib/releases/config/providers/elixir.ex | TokiTori/distillery | 9158df6d17c8b70a201ade4ca0d3f243245ceebd | [
"MIT"
] | null | null | null | lib/mix/lib/releases/config/providers/elixir.ex | TokiTori/distillery | 9158df6d17c8b70a201ade4ca0d3f243245ceebd | [
"MIT"
] | null | null | null | defmodule Mix.Releases.Config.Providers.Elixir do
@moduledoc """
Provides loading of Elixir's `config.exs` config file.
"""
use Mix.Releases.Config.Provider
alias Mix.Releases.Logger
@impl Mix.Releases.Config.Provider
def init([path]) do
if File.exists?(path) do
path
|> Mix.Config.read!()
|> Mix.Config.persist()
else
:ok
end
end
@impl Mix.Releases.Config.Provider
def get([app | rest]) do
app
|> Application.get_all_env()
|> get_in(rest)
end
@doc """
Given a path to a config file, this function will return the quoted AST of
that config and all configs that it imports.
"""
def read_quoted!(file) do
if String.contains?(file, "*") do
{quoted, _} = do_read_quoted_wildcard!(file, [])
quoted
else
{quoted, _} = do_read_quoted!(file, [])
quoted
end
end
defp do_read_quoted_wildcard!(path, loaded_paths) do
# This has a wildcard path, so we need to walk the list
# of files, and strip the `use Mix.Config` from all but the first,
# and merge all of the quoted contents of those files
{final_quoted, new_loaded_paths} =
path
|> Path.wildcard()
|> Enum.reduce({nil, loaded_paths}, fn
f, {nil, loaded_paths} ->
# Extract the quoted body of the top-level block for merging
{{:__block__, _, quoted}, new_loaded_paths} = do_read_quoted!(f, loaded_paths)
{quoted, new_loaded_paths}
f, {quoted, loaded_paths} ->
if f in loaded_paths do
raise ArgumentError, message: "recursive load of #{f} detected"
end
# Again, extract the quoted body, strip the `use`, and concat to the
# head of the merged quoted body
{{:__block__, _, f_quoted}, new_loaded_paths} = do_read_quoted!(f, loaded_paths)
f_quoted =
f_quoted
|> Enum.reject(fn
{:use, _, [{:__aliases__, _, [:Mix, :Config]}]} -> true
_ -> false
end)
{Enum.concat(quoted, f_quoted), new_loaded_paths}
end)
# In the final step, reverse the quoted body so that they are in the file
# in the order they were traversed, and wrap them all in a block
{{:__block__, [], final_quoted}, new_loaded_paths}
end
defp do_read_quoted!(file, loaded_paths) do
try do
file = Path.expand(file)
if file in loaded_paths do
raise ArgumentError, message: "recursive load of #{file} detected"
end
content = File.read!(file)
quoted = Code.string_to_quoted!(content, file: file, line: 1)
merged = merge_imports(quoted, [], file, [file | loaded_paths])
{merged, loaded_paths}
rescue
e in [Mix.Config.LoadError] -> reraise(e, System.stacktrace())
e -> reraise(Mix.Config.LoadError, [file: file, error: e], System.stacktrace())
end
end
defp merge_imports({:__block__, _, block}, acc, file, loaded_paths) do
merge_imports(block, acc, file, loaded_paths)
end
defp merge_imports(item, acc, file, loaded_paths) when is_tuple(item) do
merge_imports([item], acc, file, loaded_paths)
end
defp merge_imports([], acc, _file, _loaded_paths) do
{:__block__, [], Enum.reverse(acc)}
end
defp merge_imports([{:import_config, _, [path]} | block], acc, file, loaded_paths)
when is_binary(path) do
path = Path.join(Path.dirname(file), Path.relative_to(path, file))
{quoted, new_loaded_paths} =
if String.contains?(path, "*") do
{{:__block__, _, quoted}, new_loaded_paths} = do_read_quoted_wildcard!(path, loaded_paths)
{quoted, new_loaded_paths}
else
{{:__block__, _, quoted}, new_loaded_paths} = do_read_quoted!(path, loaded_paths)
{quoted, new_loaded_paths}
end
new_acc =
quoted
|> Enum.reject(fn
{:use, _, [{:__aliases__, _, [:Mix, :Config]}]} -> true
_ -> false
end)
|> Enum.reverse()
|> Enum.concat(acc)
merge_imports(block, new_acc, file, new_loaded_paths)
end
defp merge_imports([{:import_config, _, [path_expr]} | block], acc, file, loaded_paths) do
case eval_path(acc, path_expr) do
{:error, err} ->
raise Mix.Config.LoadError, file: file, error: err
path ->
path = Path.join(Path.dirname(file), Path.relative_to(path, file))
{quoted, new_loaded_paths} =
if String.contains?(path, "*") do
{{:__block__, _, quoted}, new_loaded_paths} =
do_read_quoted_wildcard!(path, loaded_paths)
{quoted, new_loaded_paths}
else
{{:__block__, _, quoted}, new_loaded_paths} = do_read_quoted!(path, loaded_paths)
{quoted, new_loaded_paths}
end
new_acc =
quoted
|> Enum.reject(fn
{:use, _, [{:__aliases__, _, [:Mix, :Config]}]} -> true
_ -> false
end)
|> Enum.reverse()
|> Enum.concat(acc)
merge_imports(block, new_acc, file, new_loaded_paths)
end
end
defp merge_imports([{:config, env, [:kernel | _]} = other | rest], acc, file, loaded_paths) do
line = Keyword.get(env, :line, "N/A")
file_path = Path.relative_to_cwd(file)
Logger.warn(
"Found config setting for :kernel application in Mix config!\n" <>
" File: #{file_path}\n" <>
" Line: #{line}\n" <>
" Any :kernel config settings need to be placed in vm.args, or they will not take effect!"
)
merge_imports(rest, [other | acc], file, loaded_paths)
end
defp merge_imports([other | rest], acc, file, loaded_paths) do
merge_imports(rest, [other | acc], file, loaded_paths)
end
defp eval_path(_acc, path) when is_binary(path) do
path
end
defp eval_path(acc, expr) do
# Rebuild script context without Mix.Config macros
stripped = strip_config_macros(acc, [expr])
quoted = {:__block__, [], stripped}
try do
{path, _bindings} = Code.eval_quoted(quoted)
path
rescue
e ->
{:error, e}
end
end
defp strip_config_macros([], acc), do: acc
defp strip_config_macros([{:use, _, [{:__aliases__, _, [:Mix, :Config]}]} | rest], acc) do
strip_config_macros(rest, acc)
end
defp strip_config_macros([{type, _, _} | rest], acc) when type in [:import_config, :config] do
strip_config_macros(rest, acc)
end
defp strip_config_macros([expr | rest], acc) do
strip_config_macros(rest, [expr | acc])
end
end
| 30.096774 | 101 | 0.616751 |
1c59a154e620151da8bf6d845fb518160b8b5443 | 12,731 | ex | Elixir | installer/lib/phx_new/generator.ex | udanieli/phoenix | 40b79c8d227eadb2af90f19b26c297a87f3b69aa | [
"MIT"
] | null | null | null | installer/lib/phx_new/generator.ex | udanieli/phoenix | 40b79c8d227eadb2af90f19b26c297a87f3b69aa | [
"MIT"
] | null | null | null | installer/lib/phx_new/generator.ex | udanieli/phoenix | 40b79c8d227eadb2af90f19b26c297a87f3b69aa | [
"MIT"
] | null | null | null | defmodule Phx.New.Generator do
@moduledoc false
import Mix.Generator
alias Phx.New.{Project}
@phoenix Path.expand("../..", __DIR__)
@phoenix_version Version.parse!(Mix.Project.config()[:version])
@callback prepare_project(Project.t()) :: Project.t()
@callback generate(Project.t()) :: Project.t()
defmacro __using__(_env) do
quote do
@behaviour unquote(__MODULE__)
import Mix.Generator
import unquote(__MODULE__)
Module.register_attribute(__MODULE__, :templates, accumulate: true)
@before_compile unquote(__MODULE__)
end
end
defmacro __before_compile__(env) do
root = Path.expand("../../templates", __DIR__)
templates_ast =
for {name, mappings} <- Module.get_attribute(env.module, :templates) do
for {format, source, _, _} <- mappings, format != :keep do
path = Path.join(root, source)
if format in [:config, :prod_config, :eex] do
compiled = EEx.compile_file(path)
quote do
@external_resource unquote(path)
@file unquote(path)
def render(unquote(name), unquote(source), var!(assigns))
when is_list(var!(assigns)),
do: unquote(compiled)
end
else
quote do
@external_resource unquote(path)
def render(unquote(name), unquote(source), _assigns), do: unquote(File.read!(path))
end
end
end
end
quote do
unquote(templates_ast)
def template_files(name), do: Keyword.fetch!(@templates, name)
end
end
defmacro template(name, mappings) do
quote do
@templates {unquote(name), unquote(mappings)}
end
end
def copy_from(%Project{} = project, mod, name) when is_atom(name) do
mapping = mod.template_files(name)
for {format, source, project_location, target_path} <- mapping do
target = Project.join_path(project, project_location, target_path)
case format do
:keep ->
File.mkdir_p!(target)
:text ->
create_file(target, mod.render(name, source, project.binding))
:config ->
contents = mod.render(name, source, project.binding)
config_inject(Path.dirname(target), Path.basename(target), contents)
:prod_config ->
contents = mod.render(name, source, project.binding)
prod_only_config_inject(Path.dirname(target), Path.basename(target), contents)
:eex ->
contents = mod.render(name, source, project.binding)
create_file(target, contents)
end
end
end
def config_inject(path, file, to_inject) do
file = Path.join(path, file)
contents =
case File.read(file) do
{:ok, bin} -> bin
{:error, _} -> "import Config\n"
end
with :error <- split_with_self(contents, "use Mix.Config"),
:error <- split_with_self(contents, "import Config") do
Mix.raise(~s[Could not find "use Mix.Config" or "import Config" in #{inspect(file)}])
else
[left, middle, right] ->
write_formatted!(file, [left, middle, ?\n, to_inject, right])
end
end
def prod_only_config_inject(path, file, to_inject) do
file = Path.join(path, file)
contents =
case File.read(file) do
{:ok, bin} ->
bin
{:error, _} ->
"""
import Config
if config_env() == :prod do
end
"""
end
case split_with_self(contents, "if config_env() == :prod do") do
[left, middle, right] ->
write_formatted!(file, [left, middle, ?\n, to_inject, right])
:error ->
Mix.raise(~s[Could not find "if config_env() == :prod do" in #{inspect(file)}])
end
end
defp write_formatted!(file, contents) do
formatted = contents |> IO.iodata_to_binary() |> Code.format_string!()
File.write!(file, [formatted, ?\n])
end
def inject_umbrella_config_defaults(project) do
unless File.exists?(Project.join_path(project, :project, "config/dev.exs")) do
path = Project.join_path(project, :project, "config/config.exs")
extra =
Phx.New.Umbrella.render(:new, "phx_umbrella/config/extra_config.exs", project.binding)
File.write(path, [File.read!(path), extra])
end
end
defp split_with_self(contents, text) do
case :binary.split(contents, text) do
[left, right] -> [left, text, right]
[_] -> :error
end
end
def in_umbrella?(app_path) do
umbrella = Path.expand(Path.join([app_path, "..", ".."]))
mix_path = Path.join(umbrella, "mix.exs")
apps_path = Path.join(umbrella, "apps")
File.exists?(mix_path) && File.exists?(apps_path)
end
def put_binding(%Project{opts: opts} = project) do
db = Keyword.get(opts, :database, "postgres")
ecto = Keyword.get(opts, :ecto, true)
html = Keyword.get(opts, :html, true)
live = Keyword.get(opts, :live, false)
dashboard = Keyword.get(opts, :dashboard, true)
gettext = Keyword.get(opts, :gettext, true)
assets = Keyword.get(opts, :assets, true)
mailer = Keyword.get(opts, :mailer, true)
dev = Keyword.get(opts, :dev, false)
phoenix_path = phoenix_path(project, dev)
# We lowercase the database name because according to the
# SQL spec, they are case insensitive unless quoted, which
# means creating a database like FoO is the same as foo in
# some storages.
{adapter_app, adapter_module, adapter_config} =
get_ecto_adapter(db, String.downcase(project.app), project.app_mod)
pubsub_server = get_pubsub_server(project.app_mod)
adapter_config =
case Keyword.fetch(opts, :binary_id) do
{:ok, value} -> Keyword.put_new(adapter_config, :binary_id, value)
:error -> adapter_config
end
version = @phoenix_version
binding = [
elixir_version: elixir_version(),
app_name: project.app,
app_module: inspect(project.app_mod),
root_app_name: project.root_app,
root_app_module: inspect(project.root_mod),
lib_web_name: project.lib_web_name,
web_app_name: project.web_app,
endpoint_module: inspect(Module.concat(project.web_namespace, Endpoint)),
web_namespace: inspect(project.web_namespace),
phoenix_github_version_tag: "v#{version.major}.#{version.minor}",
phoenix_dep: phoenix_dep(phoenix_path, version),
pubsub_server: pubsub_server,
secret_key_base: random_string(64),
signing_salt: random_string(8),
lv_signing_salt: random_string(8),
in_umbrella: project.in_umbrella?,
assets: assets,
mailer: mailer,
ecto: ecto,
html: html,
live: live,
dashboard: dashboard,
gettext: gettext,
adapter_app: adapter_app,
adapter_module: adapter_module,
adapter_config: adapter_config,
generators: nil_if_empty(project.generators ++ adapter_generators(adapter_config)),
namespaced?: namespaced?(project)
]
%Project{project | binding: binding}
end
defp elixir_version do
System.version()
end
defp namespaced?(project) do
Macro.camelize(project.app) != inspect(project.app_mod)
end
def gen_ecto_config(%Project{project_path: project_path, binding: binding}) do
adapter_config = binding[:adapter_config]
config_inject(project_path, "config/dev.exs", """
# Configure your database
config :#{binding[:app_name]}, #{binding[:app_module]}.Repo#{kw_to_config(adapter_config[:dev])}
""")
config_inject(project_path, "config/test.exs", """
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :#{binding[:app_name]}, #{binding[:app_module]}.Repo#{kw_to_config(adapter_config[:test])}
""")
prod_only_config_inject(project_path, "config/runtime.exs", """
#{adapter_config[:prod_variables]}
config :#{binding[:app_name]}, #{binding[:app_module]}.Repo,
#{adapter_config[:prod_config]}
""")
end
defp get_pubsub_server(module) do
module
|> Module.split()
|> hd()
|> Module.concat(PubSub)
end
defp get_ecto_adapter("mssql", app, module) do
{:tds, Ecto.Adapters.Tds, db_config(app, module, "sa", "some!Password")}
end
defp get_ecto_adapter("mysql", app, module) do
{:myxql, Ecto.Adapters.MyXQL, db_config(app, module, "root", "")}
end
defp get_ecto_adapter("postgres", app, module) do
{:postgrex, Ecto.Adapters.Postgres, db_config(app, module, "postgres", "postgres")}
end
defp get_ecto_adapter("sqlite3", app, module) do
{:ecto_sqlite3, Ecto.Adapters.SQLite3, db_config(app, module)}
end
defp get_ecto_adapter(db, _app, _mod) do
Mix.raise("Unknown database #{inspect(db)}")
end
defp db_config(app, module) do
[
dev: [
database: {:literal, ~s|Path.expand("../#{app}_dev.db", Path.dirname(__ENV__.file))|},
pool_size: 5,
show_sensitive_data_on_connection_error: true
],
test: [
database: {:literal, ~s|Path.expand("../#{app}_test.db", Path.dirname(__ENV__.file))|},
pool_size: 5,
pool: Ecto.Adapters.SQL.Sandbox
],
test_setup_all: "Ecto.Adapters.SQL.Sandbox.mode(#{inspect(module)}.Repo, :manual)",
test_setup: """
pid = Ecto.Adapters.SQL.Sandbox.start_owner!(#{inspect(module)}.Repo, shared: not tags[:async])
on_exit(fn -> Ecto.Adapters.SQL.Sandbox.stop_owner(pid) end)\
""",
prod_variables: """
database_path =
System.get_env("DATABASE_PATH") ||
raise \"""
environment variable DATABASE_PATH is missing.
For example: /etc/#{app}/#{app}.db
\"""
""",
prod_config: """
database: database_path,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "5")
"""
]
end
defp db_config(app, module, user, pass) do
[
dev: [
username: user,
password: pass,
database: "#{app}_dev",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
pool_size: 10
],
test: [
username: user,
password: pass,
database: {:literal, ~s|"#{app}_test\#{System.get_env("MIX_TEST_PARTITION")}"|},
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox,
pool_size: 10,
],
test_setup_all: "Ecto.Adapters.SQL.Sandbox.mode(#{inspect(module)}.Repo, :manual)",
test_setup: """
pid = Ecto.Adapters.SQL.Sandbox.start_owner!(#{inspect(module)}.Repo, shared: not tags[:async])
on_exit(fn -> Ecto.Adapters.SQL.Sandbox.stop_owner(pid) end)\
""",
prod_variables: """
database_url =
System.get_env("DATABASE_URL") ||
raise \"""
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
\"""
""",
prod_config: """
# ssl: true,
# socket_options: [:inet6],
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
"""
]
end
defp kw_to_config(kw) do
Enum.map(kw, fn
{k, {:literal, v}} -> ",\n #{k}: #{v}"
{k, v} -> ",\n #{k}: #{inspect(v)}"
end)
end
defp adapter_generators(adapter_config) do
adapter_config
|> Keyword.take([:binary_id, :migration, :sample_binary_id])
|> Enum.filter(fn {_, value} -> not is_nil(value) end)
end
defp nil_if_empty([]), do: nil
defp nil_if_empty(other), do: other
defp phoenix_path(%Project{} = project, true) do
absolute = Path.expand(project.project_path)
relative = Path.relative_to(absolute, @phoenix)
if absolute == relative do
Mix.raise("--dev projects must be generated inside Phoenix directory")
end
project
|> phoenix_path_prefix()
|> Path.join(relative)
|> Path.split()
|> Enum.map(fn _ -> ".." end)
|> Path.join()
end
defp phoenix_path(%Project{}, false) do
"deps/phoenix"
end
defp phoenix_path_prefix(%Project{in_umbrella?: true}), do: "../../../"
defp phoenix_path_prefix(%Project{in_umbrella?: false}), do: ".."
defp phoenix_dep("deps/phoenix", %{pre: ["dev"]}),
do: ~s[{:phoenix, github: "phoenixframework/phoenix", override: true}]
defp phoenix_dep("deps/phoenix", version),
do: ~s[{:phoenix, "~> #{version}"}]
defp phoenix_dep(path, _version),
do: ~s[{:phoenix, path: #{inspect(path)}, override: true}]
defp random_string(length),
do: :crypto.strong_rand_bytes(length) |> Base.encode64() |> binary_part(0, length)
end
| 31.05122 | 105 | 0.628544 |
1c59b12a230616a100f2dffd91983ab90c1865ee | 1,086 | exs | Elixir | mix.exs | ericmj/logster | 49487868ab9230c1649f559d02f9e1db5ce1d0bd | [
"MIT"
] | null | null | null | mix.exs | ericmj/logster | 49487868ab9230c1649f559d02f9e1db5ce1d0bd | [
"MIT"
] | null | null | null | mix.exs | ericmj/logster | 49487868ab9230c1649f559d02f9e1db5ce1d0bd | [
"MIT"
] | null | null | null | defmodule Logster.Mixfile do
use Mix.Project
def project do
[app: :logster,
version: "0.8.0",
name: "Logster",
description: "Easily parsable single-line plain text and JSON logger for Plug and Phoenix applications",
package: package(),
source_url: "https://github.com/navinpeiris/logster",
homepage_url: "https://github.com/navinpeiris/logster",
elixir: "~> 1.2",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
docs: [extras: ["README.md"]]]
end
def application do
[applications: [:logger]]
end
defp deps do
[{:plug, "~> 1.0"},
{:poison, "~> 1.5 or ~> 2.0 or ~> 3.0"},
{:earmark, "~> 1.0", only: :dev},
{:ex_doc, "~> 0.14", only: :dev},
{:mix_test_watch, "~> 0.2", only: :dev},
{:ex_unit_notifier, "~> 0.1", only: :test}]
end
defp package do
[files: ["lib", "mix.exs", "README*", "LICENSE*"],
maintainers: ["Navin Peiris"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/navinpeiris/logster"}]
end
end
| 26.487805 | 109 | 0.578269 |
1c59f4a56c862bfd8a73100f9f09fb3d3929d577 | 2,745 | ex | Elixir | lib/consul/event.ex | am-kantox/consul-ex | b1c5e1f44f1afe4fd6bd55010832d17b8284b547 | [
"MIT"
] | null | null | null | lib/consul/event.ex | am-kantox/consul-ex | b1c5e1f44f1afe4fd6bd55010832d17b8284b547 | [
"MIT"
] | null | null | null | lib/consul/event.ex | am-kantox/consul-ex | b1c5e1f44f1afe4fd6bd55010832d17b8284b547 | [
"MIT"
] | null | null | null | #
# The MIT License (MIT)
#
# Copyright (c) 2014-2015 Undead Labs, LLC
#
defmodule Consul.Event do
alias Consul.Endpoint
use Consul.Endpoint, handler: Consul.Handler.Base
defstruct id: nil,
name: nil,
payload: nil,
node_filter: nil,
service_filter: nil,
tag_filter: nil,
version: nil,
l_time: nil
@type t :: %{
id: binary,
name: binary,
payload: binary | nil,
service_filter: binary,
tag_filter: binary,
version: integer,
l_time: integer
}
@event "event"
@fire "fire"
@list "list"
@spec fire(binary, binary, Keyword.t()) :: Endpoint.response()
def fire(name, payload \\ "", opts \\ []) when is_binary(payload) do
build_url([@event, @fire, name], opts)
|> req_put(payload)
end
@doc """
Build a list of `Consul.Event` from the given `Consul.Response`.
"""
@spec from_response(Consul.Response.t()) :: [t]
def from_response(%{body: body}) when is_list(body) do
Enum.map(body, &build_event/1)
end
@doc """
Return the l_time of the most recent `Consul.Event` in the given list.
"""
@spec last_time([t]) :: integer | nil
def last_time([]), do: nil
def last_time(events) do
[%__MODULE__{l_time: time} | _] = sort(events)
time
end
@spec list(Keyword.t()) :: Endpoint.response()
def list(opts \\ []) do
build_url([@event, @list], opts)
|> req_get()
end
@doc """
Sort a list of `Consul.Event` by their `l_time` field.
"""
@spec sort([t]) :: [t]
def sort(events) do
Enum.sort(events, &(&1.l_time > &2.l_time))
end
#
# Private
#
defp build_event(%{
"ID" => id,
"Name" => name,
"Payload" => nil,
"NodeFilter" => node_filter,
"ServiceFilter" => service_filter,
"TagFilter" => tag_filter,
"Version" => version,
"LTime" => l_time
}) do
%Consul.Event{
id: id,
name: name,
payload: nil,
node_filter: node_filter,
service_filter: service_filter,
tag_filter: tag_filter,
version: version,
l_time: l_time
}
end
defp build_event(%{
"ID" => id,
"Name" => name,
"Payload" => payload,
"NodeFilter" => node_filter,
"ServiceFilter" => service_filter,
"TagFilter" => tag_filter,
"Version" => version,
"LTime" => l_time
}) do
%Consul.Event{
id: id,
name: name,
payload: :base64.decode(payload),
node_filter: node_filter,
service_filter: service_filter,
tag_filter: tag_filter,
version: version,
l_time: l_time
}
end
end
| 22.68595 | 72 | 0.554827 |
1c59fd8934c3a821eb100b1c2ba68ee8f51cdd53 | 2,080 | ex | Elixir | lib/couch_normalizer/scenario/field_methods.ex | Zatvobor/couch_normalizer | 1bbde7b7235220c7e2aed9924e3781f423175afd | [
"Apache-2.0"
] | 1 | 2015-02-23T14:25:10.000Z | 2015-02-23T14:25:10.000Z | lib/couch_normalizer/scenario/field_methods.ex | Zatvobor/couch_normalizer | 1bbde7b7235220c7e2aed9924e3781f423175afd | [
"Apache-2.0"
] | 2 | 2015-03-05T17:02:38.000Z | 2015-03-30T10:33:29.000Z | lib/couch_normalizer/scenario/field_methods.ex | Zatvobor/couch_normalizer | 1bbde7b7235220c7e2aed9924e3781f423175afd | [
"Apache-2.0"
] | null | null | null | defmodule CouchNormalizer.Scenario.FieldMethods do
@moduledoc """
This module provides convenience for getting/creating/removing/updating fields
from current or second dbs.
"""
@doc false
defmacro field(name) do
quote do: field(var!(body), unquote(name))
end
@doc false
defmacro field(body, name) do
quote do
{ body, name } = { unquote(body), unquote(name) }
case body do
:not_found -> nil
_ -> body[to_binary(name)]
end
end
end
@doc false
defmacro remove_field(name) do
quote do: var!(body) = remove_field(var!(body), unquote(name))
end
@doc false
defmacro remove_field(body, name) do
quote do
{ body, name } = { unquote(body), unquote(name) }
HashDict.delete(body, to_binary(name))
end
end
@doc false
defmacro remove_fields(names) do
removers = lc name inlist names, do: quote(do: remove_field(unquote(name)))
quote do: (unquote_splicing(removers))
end
@doc false
defmacro rename_field(name, new_name) do
quote do
{ name, new_name } = { unquote(name), unquote(new_name) }
update_field(name, new_name, field(var!(body), name))
end
end
@doc false
defmacro update_field(name, value) do
quote do
{ name, value } = { unquote(name), unquote(value) }
if field(name), do: create_field(name, value)
end
end
@doc false
defmacro update_field(name, new_name, value) do
quote do
{ name, new_name, value } = { unquote(name), unquote(new_name), unquote(value) }
if field(name) do
remove_field(name)
create_field(new_name, value)
end
end
end
@doc false
defmacro create_field(name, value) do
quote do
{ name, value } = { unquote(name), unquote(value) }
var!(body) = create_field(var!(body), name, value)
end
end
@doc false
defmacro create_field(body, name, value) do
quote do
{ body, name, value } = { unquote(body), unquote(name), unquote(value)}
HashDict.put(body, to_binary(name), value)
end
end
end | 24.470588 | 86 | 0.636538 |
1c5a80f4312c1e492c3b8b9da7ea3b53fb4dd468 | 1,272 | ex | Elixir | apps/ui/lib/ui/channels/user_socket.ex | solomonhawk/gifs-to-gifs-reborn | fbe829a73bba45dc015eb091e9016d48a422bd9d | [
"MIT"
] | 3 | 2020-05-01T17:14:23.000Z | 2021-05-15T16:22:43.000Z | apps/ui/lib/ui/channels/user_socket.ex | solomonhawk/gifs-to-gifs-reborn | fbe829a73bba45dc015eb091e9016d48a422bd9d | [
"MIT"
] | 13 | 2019-08-14T15:44:24.000Z | 2021-09-01T20:17:55.000Z | apps/ui/lib/ui/channels/user_socket.ex | solomonhawk/gifs-to-gifs-reborn | fbe829a73bba45dc015eb091e9016d48a422bd9d | [
"MIT"
] | null | null | null | defmodule Ui.UserSocket do
use Phoenix.Socket
## Channels
channel "games:*", Ui.GameChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(%{"token" => token}, socket) do
case Phoenix.Token.verify(socket, "secret salt", token, max_age: 86400) do
{:ok, player} ->
{:ok, assign(socket, :current_player, player)}
{:error, _reason} ->
:error
end
end
def connect(_params, _socket), do: :error
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# Ui.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.285714 | 83 | 0.669025 |
1c5aaf71d69be8aa0547deb207390933c2e77784 | 3,181 | exs | Elixir | test/combinators_test.exs | qubbit/vortex | 2c73bd5ab1c5173db1e9e784e9d41ca787ee4613 | [
"MIT"
] | null | null | null | test/combinators_test.exs | qubbit/vortex | 2c73bd5ab1c5173db1e9e784e9d41ca787ee4613 | [
"MIT"
] | null | null | null | test/combinators_test.exs | qubbit/vortex | 2c73bd5ab1c5173db1e9e784e9d41ca787ee4613 | [
"MIT"
] | null | null | null | defmodule CombinatorTest do
use ExUnit.Case
import Combinators
doctest Combinators
describe "str" do
test "recognizes simple strings" do
input = State.new("hello world")
hello = str("hello")
world = str("world")
assert hello.(input)
assert nil == world.(input)
end
test "recognizes strings one after another" do
input = State.new("hello world")
hello = str("hello")
world = str(" world")
assert {_, new_state} = hello.(input)
assert world.(new_state)
end
end
describe "char" do
test "recognizes character by pattern" do
input = State.new("7+8")
digit = char("0-9")
something_else = char("a")
assert digit.(input)
assert nil == something_else.(input)
end
test "recognizes character by pattern one after another" do
input = State.new("7+8")
digit = char("0-9")
plus = char("+")
assert {_, new_state} = digit.(input)
assert plus.(new_state)
end
end
describe "seq" do
test "recognizes sequence of parsers" do
input = State.new("7+8")
addition = seq([char("1-9"), str("+"), char("0-9")])
assert addition.(input)
end
end
describe "rep" do
test "recognizes positive integer number of repetitions" do
input = State.new("vor7ex")
repetition = rep(char("a-z"), 1)
assert repetition.(input)
end
test "fails when minimum number of repetitions are not satisfied" do
input = State.new("vor7ex")
repetition = rep(char("a-z"), 4)
assert nil == repetition.(input)
end
test "fails when starting string is empty and repetitions is not, with n > 0" do
input = State.new("")
repetition = rep(char(" "), 1)
assert nil == repetition.(input)
end
test "recognizes zero repetitions" do
input = State.new("")
repetition = rep(char(" "), 0)
assert repetition.(input)
end
test "recognizes one repetition" do
input = State.new(" ")
repetition = rep(char(" "), 1)
assert repetition.(input)
end
end
describe "alt" do
test "recognizes alternatives" do
input = State.new("2")
alpha = char("a-z")
digit = char("0-9")
alts = alt([alpha, digit])
assert alts.(input)
end
test "if non match returns nil" do
input = State.new("hello world")
alpha7 = char("a-g")
digit = char("0-9")
alts = alt([alpha7, digit])
assert nil == alts.(input)
end
end
describe "combination of fundamental combinators" do
test "simple expression grammar" do
ws = rep(str(" "), 0)
natural_number = alt([str("0"), seq([char("1-9"), rep(char("0-9"), 0)])])
addition = seq([natural_number, ws, str("+"), ws, natural_number])
expression = alt([addition, natural_number])
assert expression.(State.new("05"))
assert expression.(State.new("50"))
assert expression.(State.new("050"))
assert expression.(State.new("5"))
assert expression.(State.new("5+7"))
assert expression.(State.new("5 + 7"))
assert expression.(State.new("5 + 7"))
end
end
end
| 26.508333 | 84 | 0.593524 |
1c5ab49d8110a3049dcb2788ec1de5f4accf4412 | 578 | exs | Elixir | exercises/practice/word-count/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/practice/word-count/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/practice/word-count/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule WordCount.MixProject do
use Mix.Project
def project do
[
app: :word_count,
version: "0.1.0",
# elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 19.931034 | 87 | 0.577855 |
1c5ad5274f8ab585f2fecd1c42b2212ac724a228 | 1,649 | exs | Elixir | apps/ecto_gen_error_reproduce_web/mix.exs | TheMaikXX/ecto_gen_error_reproduce_umbrella | 14af47d24b17a9478c4a67715995fa45fc9cc02a | [
"MIT"
] | null | null | null | apps/ecto_gen_error_reproduce_web/mix.exs | TheMaikXX/ecto_gen_error_reproduce_umbrella | 14af47d24b17a9478c4a67715995fa45fc9cc02a | [
"MIT"
] | null | null | null | apps/ecto_gen_error_reproduce_web/mix.exs | TheMaikXX/ecto_gen_error_reproduce_umbrella | 14af47d24b17a9478c4a67715995fa45fc9cc02a | [
"MIT"
] | null | null | null | defmodule EctoGenErrorReproduceWeb.MixProject do
use Mix.Project
def project do
[
app: :ecto_gen_error_reproduce_web,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {EctoGenErrorReproduceWeb.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.8"},
{:phoenix_ecto, "~> 4.0"},
{:phoenix_live_dashboard, "~> 0.4"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.11"},
{:ecto_gen_error_reproduce, in_umbrella: true},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"]
]
end
end
| 26.596774 | 67 | 0.59248 |
1c5b55c3e531b03484ae33fdc96e83fe56ca1c51 | 2,450 | ex | Elixir | lib/mix/tasks/g.ex | dominikknafelj/arc | f8850cafae821d0c00b6f6803c5196e7bbc1e332 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/g.ex | dominikknafelj/arc | f8850cafae821d0c00b6f6803c5196e7bbc1e332 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/g.ex | dominikknafelj/arc | f8850cafae821d0c00b6f6803c5196e7bbc1e332 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Arc do
defmodule G do
use Mix.Task
import Mix.Generator
import Mix.Utils, only: [camelize: 1, underscore: 1]
@shortdoc "For Arc definition generation code"
@moduledoc """
A task for generating arc uploader modules.
"""
def run([model_name]) do
app_name = Mix.Project.config[:app]
project_module_name = camelize(to_string(app_name))
generate_uploader_file(model_name, project_module_name)
end
def run(_) do
IO.puts "Incorrect syntax. Please try mix arc.g <model_name>"
end
defp generate_uploader_file(model_name, project_module_name) do
model_destination = Path.join(System.cwd(), "/web/uploaders/#{underscore(model_name)}.ex")
create_file model_destination, uploader_template(
model_name: model_name,
uploader_model_name: Module.concat(project_module_name, camelize(model_name))
)
end
embed_template :uploader, """
defmodule <%= inspect @uploader_model_name %> do
use Arc.Definition
# Include ecto support (requires package arc_ecto installed):
# use Arc.Ecto.Definition
@versions [:original]
# To add a thumbnail version:
# @versions [:original, :thumb]
# Whitelist file extensions:
# def validate({file, _}) do
# ~w(.jpg .jpeg .gif .png) |> Enum.member?(Path.extname(file.file_name))
# end
# Define a thumbnail transformation:
# def transform(:thumb, _) do
# {:convert, "-strip -thumbnail 250x250^ -gravity center -extent 250x250 -format png", :png}
# end
# Override the persisted filenames:
# def filename(version, _) do
# version
# end
# Override the storage directory:
# def storage_dir(version, {file, scope}) do
# "uploads/user/avatars/\#{scope.id}"
# end
# Provide a default URL if there hasn't been a file uploaded
# def default_url(version, scope) do
# "/images/avatars/default_\#{version}.png"
# end
# Specify custom headers for s3 objects
# Available options are [:cache_control, :content_disposition,
# :content_encoding, :content_length, :content_type,
# :expect, :expires, :storage_class, :website_redirect_location]
#
# def s3_object_headers(version, {file, scope}) do
# [content_type: Plug.MIME.path(file.file_name)]
# end
end
"""
end
end
| 30.246914 | 100 | 0.644898 |
1c5bb10a100956cb7d41b6ae3b68052c44ec0a44 | 92 | exs | Elixir | test/coophub/cache_warmer_test.exs | peth-yursick/coophub | 27ef7ffcdace4eb377535f8f1caa47490b45ef22 | [
"MIT"
] | 49 | 2019-12-11T16:40:52.000Z | 2022-01-14T12:44:14.000Z | test/coophub/cache_warmer_test.exs | peth-yursick/coophub | 27ef7ffcdace4eb377535f8f1caa47490b45ef22 | [
"MIT"
] | 47 | 2019-12-12T20:03:10.000Z | 2021-10-14T14:25:00.000Z | test/coophub/cache_warmer_test.exs | peth-yursick/coophub | 27ef7ffcdace4eb377535f8f1caa47490b45ef22 | [
"MIT"
] | 28 | 2019-12-11T17:15:03.000Z | 2021-09-17T09:19:44.000Z | defmodule Coophub.CacheWarmerTest do
use Coophub.DataCase
describe "WIP!" do
end
end
| 13.142857 | 36 | 0.76087 |
1c5beacbbfdae20baf0aa22102216d4ca012786f | 529 | ex | Elixir | lib/remote_retro_web/channels/presence_utils.ex | birkand/remote_retro | f7fef13aa8779cd4a8a9fd73d2fbbd5ae537634c | [
"MIT"
] | null | null | null | lib/remote_retro_web/channels/presence_utils.ex | birkand/remote_retro | f7fef13aa8779cd4a8a9fd73d2fbbd5ae537634c | [
"MIT"
] | null | null | null | lib/remote_retro_web/channels/presence_utils.ex | birkand/remote_retro | f7fef13aa8779cd4a8a9fd73d2fbbd5ae537634c | [
"MIT"
] | null | null | null | defmodule RemoteRetroWeb.PresenceUtils do
@moduledoc """
Helpers for retro user presence.
"""
alias RemoteRetroWeb.Presence
alias RemoteRetro.User
alias Phoenix.Token
def track_timestamped(%{assigns: assigns} = socket) do
case Token.verify(socket, "user", assigns.user_token, max_age: 86_400) do
{:ok, user} ->
user = %User{user | online_at: :os.system_time}
Presence.track(socket, assigns.user_token, user)
{:error, _} ->
IO.puts "Stale or invalid token"
end
end
end
| 27.842105 | 77 | 0.674858 |
1c5bf15f04023e42bd9ea58e578f0bd4d418d4fa | 1,529 | exs | Elixir | apps/commuter_rail_boarding/test/trip_cache_test.exs | mbta/commuter_rail_boarding | 213eb4ac72e5c678b06f3298e98c36b9a9dbd1ff | [
"MIT"
] | 1 | 2022-01-30T20:53:07.000Z | 2022-01-30T20:53:07.000Z | apps/commuter_rail_boarding/test/trip_cache_test.exs | mbta/commuter_rail_boarding | 213eb4ac72e5c678b06f3298e98c36b9a9dbd1ff | [
"MIT"
] | 47 | 2021-05-05T10:31:05.000Z | 2022-03-30T22:18:14.000Z | apps/commuter_rail_boarding/test/trip_cache_test.exs | mbta/commuter_rail_boarding | 213eb4ac72e5c678b06f3298e98c36b9a9dbd1ff | [
"MIT"
] | 1 | 2021-05-14T00:35:08.000Z | 2021-05-14T00:35:08.000Z | defmodule TripCacheTest do
@moduledoc false
use ExUnit.Case
import TripCache
@route_id "CR-Worcester"
@direction_id 1
# need a roughly-current date in order to look it up in the API
@datetime DateTime.utc_now()
describe "route_direction_id/1" do
test "returns :error for an invalid trip" do
assert :error = route_direction_id("made up trip")
end
end
describe "trip_name_headsign/1" do
test "returns :error for an invalid trip" do
assert trip_name_headsign("") == :error
assert trip_name_headsign("made up trip") == :error
end
end
describe "route_trip_name_to_id/3" do
test "returns an error if we can't match the name" do
assert :error == route_trip_name_to_id(@route_id, "not a trip", @datetime)
end
test "correctly finds a trip ID based on the date passed in" do
# find the next Saturday
day_of_week = @datetime |> DateHelpers.service_date() |> Date.day_of_week()
unix = DateTime.to_unix(@datetime)
unix_saturday = unix + 86_400 * (6 - day_of_week)
saturday = DateTime.from_unix!(unix_saturday)
assert route_trip_name_to_id("CR-Worcester", "0000", @datetime) == :error
end
end
describe "handle_info(:timeout)" do
test "clears the table and reschedules for the next day" do
timeout = :timer.seconds(DateHelpers.seconds_until_next_service_date())
assert {:noreply, :state, ^timeout} = handle_info(:timeout, :state)
assert :ets.info(TripCache.Table, :size) == 0
end
end
end
| 31.204082 | 81 | 0.690647 |
1c5c2239cfa40de7bdd132fcf67a95858635f3fb | 705 | ex | Elixir | lib/eltix_web/gettext.ex | evanbattaglia/eltix | 0e594b91d877c027d8f473abb3dcb2b617129976 | [
"MIT"
] | null | null | null | lib/eltix_web/gettext.ex | evanbattaglia/eltix | 0e594b91d877c027d8f473abb3dcb2b617129976 | [
"MIT"
] | null | null | null | lib/eltix_web/gettext.ex | evanbattaglia/eltix | 0e594b91d877c027d8f473abb3dcb2b617129976 | [
"MIT"
] | null | null | null | defmodule EltixWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import EltixWeb.Gettext
# Simple translation
gettext("Here is the string to translate")
# Plural translation
ngettext("Here is the string to translate",
"Here are the strings to translate",
3)
# Domain-based translation
dgettext("errors", "Here is the error message to translate")
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :eltix
end
| 28.2 | 72 | 0.675177 |
1c5c3c93622c45d422c303d411eef611368022ef | 27 | exs | Elixir | .dialyzer_ignore.exs | XiaoTeTech/instream | be469d2d8f94c9537a64b052b2dfd583c784d086 | [
"Apache-2.0"
] | 204 | 2015-06-04T06:38:57.000Z | 2022-01-30T08:56:33.000Z | .dialyzer_ignore.exs | XiaoTeTech/instream | be469d2d8f94c9537a64b052b2dfd583c784d086 | [
"Apache-2.0"
] | 71 | 2015-05-24T02:08:28.000Z | 2022-01-10T17:13:27.000Z | .dialyzer_ignore.exs | XiaoTeTech/instream | be469d2d8f94c9537a64b052b2dfd583c784d086 | [
"Apache-2.0"
] | 41 | 2015-09-05T21:24:04.000Z | 2021-11-10T02:52:48.000Z | [
~r|.*/nimble_csv/.*|
]
| 6.75 | 22 | 0.407407 |
1c5c45e308e2e5c77089b6fcc06263f4ddbb331d | 506 | ex | Elixir | 2016/day13/lib/day13/cubicle.ex | matt-thomson/advent-of-code | feff903151284240a9d3f0c84cdfe52d8d11ef06 | [
"MIT"
] | null | null | null | 2016/day13/lib/day13/cubicle.ex | matt-thomson/advent-of-code | feff903151284240a9d3f0c84cdfe52d8d11ef06 | [
"MIT"
] | null | null | null | 2016/day13/lib/day13/cubicle.ex | matt-thomson/advent-of-code | feff903151284240a9d3f0c84cdfe52d8d11ef06 | [
"MIT"
] | null | null | null | defmodule Day13.Cubicle do
@moduledoc """
Functions for working with cubicles.
"""
def neighbours({x, y}) do
[{x - 1, y}, {x + 1, y}, {x, y - 1}, {x, y + 1}]
|> Enum.filter(&inside_building?/1)
end
def wall?({x, y}, input) do
value = x * x + 3 * x + 2 * x * y + y + y * y + input
value =
value
|> Integer.to_string(2)
|> to_char_list
|> Enum.count(&(&1 == ?1))
value |> rem(2) == 1
end
defp inside_building?({x, y}), do: x >= 0 && y >= 0
end
| 21.083333 | 57 | 0.494071 |
1c5c71afe10597a2742096456e7775aa9022fd73 | 1,752 | ex | Elixir | lib/cloudflare_stream/tus/client/head.ex | reetou/cloudflare_stream_ex | eb8715d788a77e0def5d47a9ee336fd1374fe209 | [
"MIT"
] | null | null | null | lib/cloudflare_stream/tus/client/head.ex | reetou/cloudflare_stream_ex | eb8715d788a77e0def5d47a9ee336fd1374fe209 | [
"MIT"
] | null | null | null | lib/cloudflare_stream/tus/client/head.ex | reetou/cloudflare_stream_ex | eb8715d788a77e0def5d47a9ee336fd1374fe209 | [
"MIT"
] | null | null | null | defmodule CloudflareStream.TusClient.Head do
@moduledoc false
alias CloudflareStream.TusClient.Utils
require Logger
def request(url, headers \\ [], opts \\ []) do
url
|> HTTPoison.head(headers, Utils.httpoison_opts([], opts))
|> parse()
end
defp parse({:ok, %{status_code: status} = resp}) when status in [200, 204] do
resp
|> process()
end
defp parse({:ok, %{status_code: status}}) when status in [403, 404, 410] do
{:error, :not_found}
end
defp parse({:ok, resp}) do
Logger.error("HEAD response not handled: #{inspect(resp)}")
{:error, :generic}
end
defp parse({:error, err}) do
Logger.error("HEAD request failed: #{inspect(err)}")
{:error, :transport}
end
defp process(%{headers: []}), do: {:error, :preconditions}
defp process(%{headers: headers}) do
with {:ok, offset} <- get_upload_offset(headers),
:ok <- ensure_no_cache(headers),
{:ok, len} <- get_upload_len(headers) do
{:ok,
%{
upload_offset: offset,
upload_length: len
}}
else
{:error, :no_offset} -> {:error, :preconditions}
{:error, :wrong_cache} -> {:error, :preconditions}
end
end
defp get_upload_len(headers) do
case Utils.get_header(headers, "upload-length") do
v when is_binary(v) -> {:ok, String.to_integer(v)}
_ -> {:ok, nil}
end
end
defp get_upload_offset(headers) do
case Utils.get_header(headers, "upload-offset") do
v when is_binary(v) -> {:ok, String.to_integer(v)}
_ -> {:error, :no_offset}
end
end
defp ensure_no_cache(headers) do
case Utils.get_header(headers, "cache-control") do
"no-store" -> :ok
_ -> {:error, :wrong_cache}
end
end
end
| 25.028571 | 79 | 0.612443 |
1c5cabbf4d66b6d17cdd89220e280ea2e65c7787 | 754 | ex | Elixir | lib/beacon_web/live_helpers.ex | treble37/beacon | 12cab02ec8bb072582ffaab60d9f21a65588e392 | [
"MIT"
] | 98 | 2021-12-22T04:37:58.000Z | 2022-02-27T22:53:25.000Z | lib/beacon_web/live_helpers.ex | treble37/beacon | 12cab02ec8bb072582ffaab60d9f21a65588e392 | [
"MIT"
] | 5 | 2021-12-24T01:10:11.000Z | 2022-03-25T17:35:37.000Z | lib/beacon_web/live_helpers.ex | treble37/beacon | 12cab02ec8bb072582ffaab60d9f21a65588e392 | [
"MIT"
] | 7 | 2021-12-24T01:09:36.000Z | 2022-03-01T14:46:41.000Z | defmodule BeaconWeb.LiveHelpers do
import Phoenix.LiveView.Helpers
@doc """
Renders a component inside the `BeaconWeb.PageManagement.ModalComponent` component.
The rendered modal receives a `:return_to` option to properly update
the URL when the modal is closed.
## Examples
<%= live_modal BeaconWeb.PageManagement.PageLive.FormComponent,
id: @page.id || :new,
action: @live_action,
page: @page,
return_to: Routes.page_index_path(@socket, :index) %>
"""
def live_modal(component, opts) do
path = Keyword.fetch!(opts, :return_to)
modal_opts = [id: :modal, return_to: path, component: component, opts: opts]
live_component(BeaconWeb.PageManagement.ModalComponent, modal_opts)
end
end
| 31.416667 | 85 | 0.709549 |
1c5cb1e482eb064834c76f907abf44df2ea0c7a0 | 804 | ex | Elixir | util/util.ex | mason-bially/aoc-2021 | 866ffc7e85f690c2ad9195da2304bfb515819023 | [
"MIT"
] | null | null | null | util/util.ex | mason-bially/aoc-2021 | 866ffc7e85f690c2ad9195da2304bfb515819023 | [
"MIT"
] | null | null | null | util/util.ex | mason-bially/aoc-2021 | 866ffc7e85f690c2ad9195da2304bfb515819023 | [
"MIT"
] | null | null | null | defmodule Util do
def parse_comma_while(c, acc) do
cond do
c == "," -> {:cont, acc, ""}
true -> {:cont, acc <> c}
end
end
def parse_after_emit(acc) do
cond do
acc != "" -> {:cont, acc, ""}
true -> {:cont, ""}
end
end
def stream_comma_seperated(enum) do
Stream.chunk_while(enum, "", &parse_comma_while/2, &parse_after_emit/1)
end
def manhatten_distance({ax, ay}, {bx, by}) do
abs(ax - bx) + abs(ay - by)
end
def permutations([]) do
[[]]
end
def permutations(list) do
for elem <- list,
rest <- permutations(list -- [elem]) do
[elem | rest]
end
end
def result_send(value, dest, key) do
send(dest, {key, value})
end
def counter(index, acc) do
Map.update(acc, index, 1, &(&1 + 1))
end
end
| 19.142857 | 75 | 0.559701 |
1c5cbbf2e7d543ac84f24e078cd4c16fb87654f7 | 2,904 | ex | Elixir | clients/storage_transfer/lib/google_api/storage_transfer/v1/model/schedule.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/storage_transfer/lib/google_api/storage_transfer/v1/model/schedule.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/storage_transfer/lib/google_api/storage_transfer/v1/model/schedule.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.StorageTransfer.V1.Model.Schedule do
@moduledoc """
Transfers can be scheduled to recur or to run just once.
## Attributes
* `scheduleEndDate` (*type:* `GoogleApi.StorageTransfer.V1.Model.Date.t`, *default:* `nil`) - The last day the recurring transfer will be run. If `scheduleEndDate`
is the same as `scheduleStartDate`, the transfer will be executed only
once.
* `scheduleStartDate` (*type:* `GoogleApi.StorageTransfer.V1.Model.Date.t`, *default:* `nil`) - Required. The first day the recurring transfer is scheduled to run. If
`scheduleStartDate` is in the past, the transfer will run for the first
time on the following day.
* `startTimeOfDay` (*type:* `GoogleApi.StorageTransfer.V1.Model.TimeOfDay.t`, *default:* `nil`) - The time in UTC at which the transfer will be scheduled to start in a day.
Transfers may start later than this time. If not specified, recurring and
one-time transfers that are scheduled to run today will run immediately;
recurring transfers that are scheduled to run on a future date will start
at approximately midnight UTC on that date. Note that when configuring a
transfer with the Cloud Platform Console, the transfer's start time in a
day is specified in your local timezone.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:scheduleEndDate => GoogleApi.StorageTransfer.V1.Model.Date.t(),
:scheduleStartDate => GoogleApi.StorageTransfer.V1.Model.Date.t(),
:startTimeOfDay => GoogleApi.StorageTransfer.V1.Model.TimeOfDay.t()
}
field(:scheduleEndDate, as: GoogleApi.StorageTransfer.V1.Model.Date)
field(:scheduleStartDate, as: GoogleApi.StorageTransfer.V1.Model.Date)
field(:startTimeOfDay, as: GoogleApi.StorageTransfer.V1.Model.TimeOfDay)
end
defimpl Poison.Decoder, for: GoogleApi.StorageTransfer.V1.Model.Schedule do
def decode(value, options) do
GoogleApi.StorageTransfer.V1.Model.Schedule.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.StorageTransfer.V1.Model.Schedule do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.095238 | 176 | 0.742424 |
1c5ce463b5d79c198db47d3634f76c12a293b868 | 1,069 | exs | Elixir | config/dev.exs | hgaard/RethinkLogs-Elixir | 45fd72938bf7c953c76e867687fca0e9a8e05115 | [
"MIT"
] | null | null | null | config/dev.exs | hgaard/RethinkLogs-Elixir | 45fd72938bf7c953c76e867687fca0e9a8e05115 | [
"MIT"
] | null | null | null | config/dev.exs | hgaard/RethinkLogs-Elixir | 45fd72938bf7c953c76e867687fca0e9a8e05115 | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :rethink_logs, RethinkLogs.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin"]]
# Watch static and templates for browser reloading.
config :rethink_logs, RethinkLogs.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development.
# Do not configure such in production as keeping
# and calculating stacktraces is usually expensive.
config :phoenix, :stacktrace_depth, 20
| 31.441176 | 74 | 0.710945 |
1c5d39b377a2b91fe5ebfbf4f6616d21a4dcffd2 | 1,119 | exs | Elixir | config/config.exs | styx/whenever.ex | c3d57d6323f3681b790599291a3009e2000782d9 | [
"MIT"
] | null | null | null | config/config.exs | styx/whenever.ex | c3d57d6323f3681b790599291a3009e2000782d9 | [
"MIT"
] | null | null | null | config/config.exs | styx/whenever.ex | c3d57d6323f3681b790599291a3009e2000782d9 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :whenever, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:whenever, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.096774 | 73 | 0.751564 |
1c5d5b1f2bbec61e1003a2274b47d33ef6bed88d | 1,023 | ex | Elixir | apps/ewallet/lib/ewallet/policies/mint_policy.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet/lib/ewallet/policies/mint_policy.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet/lib/ewallet/policies/mint_policy.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.MintPolicy do
@moduledoc """
The authorization policy for mints.
"""
alias EWallet.PolicyHelper
alias EWallet.{Bouncer, Bouncer.Permission}
alias EWalletDB.Mint
def authorize(:create, attrs, mint_attrs) do
Bouncer.bounce(attrs, %Permission{action: :create, target: mint_attrs})
end
def authorize(action, attrs, target) do
PolicyHelper.authorize(action, attrs, :mints, Mint, target)
end
end
| 33 | 75 | 0.751711 |
1c5d64117e6bb5953161dd58fc3f96fa4031f0d9 | 71 | exs | Elixir | postgis-ecto/gis_demo/test/test_helper.exs | wisq/slides | e0a1b9dd8187b82c8772b4e6ab20f8b069e1feb1 | [
"MIT"
] | null | null | null | postgis-ecto/gis_demo/test/test_helper.exs | wisq/slides | e0a1b9dd8187b82c8772b4e6ab20f8b069e1feb1 | [
"MIT"
] | null | null | null | postgis-ecto/gis_demo/test/test_helper.exs | wisq/slides | e0a1b9dd8187b82c8772b4e6ab20f8b069e1feb1 | [
"MIT"
] | null | null | null | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(GisDemo.Repo, :manual)
| 14.2 | 53 | 0.760563 |
1c5d77a1d1cbfae51ef45b6369712f13284c49c5 | 156 | ex | Elixir | testData/org/elixir_lang/parser_definition/bracket_operation_parsing_test_case/AtomKeyword.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/bracket_operation_parsing_test_case/AtomKeyword.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/bracket_operation_parsing_test_case/AtomKeyword.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | nil[key: value]
nil [key: value]
true[()]
true [()]
false[matched_expression]
false [matched_expression]
nil[matched_expression,]
nil [matched_expression,]
| 17.333333 | 26 | 0.75641 |
1c5d7a4dba70a2cc0bbf9f28ff0c89f81afec926 | 451 | ex | Elixir | lib/documents_design_web/controllers/plugs/needs_first_user.ex | documents-org/documents.design-el | 6976254e175232afe5e913c29b04c13a86d2e9a2 | [
"MIT"
] | null | null | null | lib/documents_design_web/controllers/plugs/needs_first_user.ex | documents-org/documents.design-el | 6976254e175232afe5e913c29b04c13a86d2e9a2 | [
"MIT"
] | null | null | null | lib/documents_design_web/controllers/plugs/needs_first_user.ex | documents-org/documents.design-el | 6976254e175232afe5e913c29b04c13a86d2e9a2 | [
"MIT"
] | null | null | null | defmodule DocumentsDesignWeb.Plugs.NeedsFirstUser do
import Plug.Conn
import Phoenix.Controller
@moduledoc """
Guards access to the app, to force the registration of a first user.
"""
def init(_params) do
end
def call(conn, _params) do
if !DocumentsDesign.Accounts.has_user() do
conn
|> halt()
|> redirect(to: DocumentsDesignWeb.Router.Helpers.auth_path(conn, :register))
else
conn
end
end
end
| 20.5 | 83 | 0.685144 |
1c5da49f22c5b8c2d89f71be66faba0c2ff6333a | 1,654 | ex | Elixir | clients/big_query/lib/google_api/big_query/v2/model/arima_order.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/big_query/lib/google_api/big_query/v2/model/arima_order.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/big_query/lib/google_api/big_query/v2/model/arima_order.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQuery.V2.Model.ArimaOrder do
@moduledoc """
Arima order, can be used for both non-seasonal and seasonal parts.
## Attributes
* `d` (*type:* `String.t`, *default:* `nil`) - Order of the differencing part.
* `p` (*type:* `String.t`, *default:* `nil`) - Order of the autoregressive part.
* `q` (*type:* `String.t`, *default:* `nil`) - Order of the moving-average part.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:d => String.t() | nil,
:p => String.t() | nil,
:q => String.t() | nil
}
field(:d)
field(:p)
field(:q)
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.ArimaOrder do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.ArimaOrder.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.ArimaOrder do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.207547 | 84 | 0.692261 |
1c5dbad34af1f0c3382729a8f84ae50af69092a3 | 283 | exs | Elixir | priv/repo/migrations/20210429142928_create_users_table.exs | vbrazo/chat_elixir_api | f2595d123934014b3a7c1b3b63b987fa3df5b3ad | [
"MIT"
] | 2 | 2021-04-30T08:30:26.000Z | 2021-04-30T16:20:39.000Z | priv/repo/migrations/20210429142928_create_users_table.exs | vbrazo/chat_elixir_api | f2595d123934014b3a7c1b3b63b987fa3df5b3ad | [
"MIT"
] | null | null | null | priv/repo/migrations/20210429142928_create_users_table.exs | vbrazo/chat_elixir_api | f2595d123934014b3a7c1b3b63b987fa3df5b3ad | [
"MIT"
] | null | null | null | defmodule ChatApi.Repo.Migrations.CreateUsersTable do
use Ecto.Migration
def change do
create table(:users) do
add(:email, :string)
add(:password, :string)
timestamps(inserted_at: :created_at)
end
create(unique_index(:users, [:email]))
end
end
| 18.866667 | 53 | 0.678445 |
1c5dc15836d184dbd1b3c151ed7a9fd7c966b270 | 12,660 | exs | Elixir | test/mastani_server_web/query/cms/video_comment_test.exs | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | 1 | 2019-05-07T15:03:54.000Z | 2019-05-07T15:03:54.000Z | test/mastani_server_web/query/cms/video_comment_test.exs | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | test/mastani_server_web/query/cms/video_comment_test.exs | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | defmodule MastaniServer.Test.Query.VideoComment do
use MastaniServer.TestTools
alias MastaniServer.CMS
setup do
{:ok, video} = db_insert(:video)
{:ok, user} = db_insert(:user)
{:ok, community} = db_insert(:community)
guest_conn = simu_conn(:guest)
user_conn = simu_conn(:user)
{:ok, ~m(user_conn guest_conn video user community)a}
end
describe "[video dataloader comment]" do
@query """
query($filter: PagedVideosFilter) {
pagedVideos(filter: $filter) {
entries {
id
title
commentsParticipators(filter: { first: 5 }) {
id
nickname
}
pagedCommentsParticipators {
entries {
id
}
totalCount
}
commentsCount
}
totalCount
}
}
"""
test "can get comments participators of a video", ~m(user guest_conn)a do
{:ok, user2} = db_insert(:user)
{:ok, community} = db_insert(:community)
{:ok, video} = CMS.create_content(community, :video, mock_attrs(:video), user)
variables = %{thread: "VIDEO", filter: %{community: community.raw}}
guest_conn |> query_result(@query, variables, "pagedVideos")
body = "this is a test comment"
assert {:ok, _comment} =
CMS.create_comment(:video, video.id, %{community: community.raw, body: body}, user)
assert {:ok, _comment} =
CMS.create_comment(:video, video.id, %{community: community.raw, body: body}, user)
assert {:ok, _comment} =
CMS.create_comment(
:video,
video.id,
%{community: community.raw, body: body},
user2
)
variables = %{filter: %{community: community.raw}}
results = guest_conn |> query_result(@query, variables, "pagedVideos")
comments_count = results["entries"] |> List.first() |> Map.get("commentsCount")
assert comments_count == 3
end
test "can get comments participators of a video with multi user", ~m(user guest_conn)a do
body = "this is a test comment"
{:ok, community} = db_insert(:community)
{:ok, video1} = CMS.create_content(community, :video, mock_attrs(:video), user)
{:ok, video2} = CMS.create_content(community, :video, mock_attrs(:video), user)
{:ok, users_list} = db_insert_multi(:user, 10)
{:ok, users_list2} = db_insert_multi(:user, 10)
Enum.each(
users_list,
&CMS.create_comment(:video, video1.id, %{community: community.raw, body: body}, &1)
)
Enum.each(
users_list2,
&CMS.create_comment(:video, video2.id, %{community: community.raw, body: body}, &1)
)
variables = %{thread: "VIDEO", filter: %{community: community.raw}}
results = guest_conn |> query_result(@query, variables, "pagedVideos")
assert results["entries"] |> List.first() |> Map.get("commentsParticipators") |> length ==
10
assert results["entries"] |> List.last() |> Map.get("commentsParticipators") |> length == 10
end
test "can get paged commetns participators of a video", ~m(user guest_conn)a do
body = "this is a test comment"
{:ok, community} = db_insert(:community)
{:ok, video} = CMS.create_content(community, :video, mock_attrs(:video), user)
{:ok, users_list} = db_insert_multi(:user, 10)
Enum.each(
users_list,
&CMS.create_comment(:video, video.id, %{community: community.raw, body: body}, &1)
)
variables = %{filter: %{community: community.raw}}
results = guest_conn |> query_result(@query, variables, "pagedVideos")
participators = results["entries"] |> List.first() |> Map.get("pagedCommentsParticipators")
assert participators["totalCount"] == 10
end
end
@query """
query($id: ID!, $thread: CmsThread, $filter: PagedFilter!) {
pagedCommentsParticipators(id: $id, thread: $thread, filter: $filter) {
entries {
id
nickname
}
totalPages
totalCount
pageSize
pageNumber
}
}
"""
test "can get video's paged commetns participators", ~m(user guest_conn)a do
body = "this is a test comment"
{:ok, community} = db_insert(:community)
{:ok, video} = CMS.create_content(community, :video, mock_attrs(:video), user)
{:ok, users_list} = db_insert_multi(:user, 10)
Enum.each(
users_list,
&CMS.create_comment(:video, video.id, %{community: community.raw, body: body}, &1)
)
variables = %{id: video.id, thread: "VIDEO", filter: %{page: 1, size: 20}}
results = guest_conn |> query_result(@query, variables, "pagedCommentsParticipators")
assert results |> is_valid_pagination?()
assert results["totalCount"] == 10
end
# TODO: user can get specific user's replies :list_replies
describe "[video comment]" do
@query """
query($filter: PagedVideosFilter) {
pagedVideos(filter: $filter) {
entries {
id
title
commentsCount
}
totalCount
}
}
"""
test "can get comments info in paged videos", ~m(user guest_conn)a do
body = "this is a test comment"
{:ok, community} = db_insert(:community)
{:ok, video} = CMS.create_content(community, :video, mock_attrs(:video), user)
{:ok, _comment} =
CMS.create_comment(:video, video.id, %{community: community.raw, body: body}, user)
variables = %{filter: %{community: community.raw}}
results = guest_conn |> query_result(@query, variables, "pagedVideos")
assert results["entries"] |> List.first() |> Map.get("commentsCount") == 1
end
@query """
query($thread: CmsThread, $id: ID!, $filter: CommentsFilter!) {
pagedComments(thread: $thread, id: $id, filter: $filter) {
entries {
id
body
likesCount
dislikesCount
}
totalPages
totalCount
pageSize
pageNumber
}
}
"""
test "guest user can get a paged comment", ~m(guest_conn video user community)a do
body = "test comment"
Enum.reduce(1..30, [], fn _, acc ->
{:ok, value} =
CMS.create_comment(:video, video.id, %{community: community.raw, body: body}, user)
acc ++ [value]
end)
variables = %{thread: "VIDEO", id: video.id, filter: %{page: 1, size: 10}}
results = guest_conn |> query_result(@query, variables, "pagedComments")
assert results |> is_valid_pagination?
assert results["totalCount"] == 30
end
test "MOST_LIKES filter should work", ~m(guest_conn video user community)a do
body = "test comment"
comments =
Enum.reduce(1..10, [], fn _, acc ->
{:ok, value} =
CMS.create_comment(:video, video.id, %{community: community.raw, body: body}, user)
acc ++ [value]
end)
[comment_1, _comment_2, comment_3, _comment_last] = comments |> firstn_and_last(3)
{:ok, [user_1, user_2, user_3, user_4, user_5]} = db_insert_multi(:user, 5)
# comment_3 is most likes
{:ok, _} = CMS.like_comment(:video_comment, comment_3.id, user_1)
{:ok, _} = CMS.like_comment(:video_comment, comment_3.id, user_2)
{:ok, _} = CMS.like_comment(:video_comment, comment_3.id, user_3)
{:ok, _} = CMS.like_comment(:video_comment, comment_3.id, user_4)
{:ok, _} = CMS.like_comment(:video_comment, comment_3.id, user_5)
{:ok, _} = CMS.like_comment(:video_comment, comment_1.id, user_1)
{:ok, _} = CMS.like_comment(:video_comment, comment_1.id, user_2)
{:ok, _} = CMS.like_comment(:video_comment, comment_1.id, user_3)
{:ok, _} = CMS.like_comment(:video_comment, comment_1.id, user_4)
variables = %{
thread: "VIDEO",
id: video.id,
filter: %{page: 1, size: 10, sort: "MOST_LIKES"}
}
results = guest_conn |> query_result(@query, variables, "pagedComments")
entries = results["entries"]
assert entries |> Enum.at(0) |> Map.get("id") == to_string(comment_3.id)
assert entries |> Enum.at(0) |> Map.get("likesCount") == 5
assert entries |> Enum.at(1) |> Map.get("id") == to_string(comment_1.id)
assert entries |> Enum.at(1) |> Map.get("likesCount") == 4
end
test "MOST_DISLIKES filter should work", ~m(guest_conn video user community)a do
body = "test comment"
comments =
Enum.reduce(1..10, [], fn _, acc ->
{:ok, value} =
CMS.create_comment(:video, video.id, %{community: community.raw, body: body}, user)
acc ++ [value]
end)
[comment_1, _comment_2, comment_3, _comment_last] = comments |> firstn_and_last(3)
{:ok, [user_1, user_2, user_3, user_4, user_5]} = db_insert_multi(:user, 5)
# comment_3 is most likes
{:ok, _} = CMS.dislike_comment(:video_comment, comment_3.id, user_1)
{:ok, _} = CMS.dislike_comment(:video_comment, comment_3.id, user_2)
{:ok, _} = CMS.dislike_comment(:video_comment, comment_3.id, user_3)
{:ok, _} = CMS.dislike_comment(:video_comment, comment_3.id, user_4)
{:ok, _} = CMS.dislike_comment(:video_comment, comment_3.id, user_5)
{:ok, _} = CMS.dislike_comment(:video_comment, comment_1.id, user_1)
{:ok, _} = CMS.dislike_comment(:video_comment, comment_1.id, user_2)
{:ok, _} = CMS.dislike_comment(:video_comment, comment_1.id, user_3)
{:ok, _} = CMS.dislike_comment(:video_comment, comment_1.id, user_4)
variables = %{
thread: "VIDEO",
id: video.id,
filter: %{page: 1, size: 10, sort: "MOST_DISLIKES"}
}
results = guest_conn |> query_result(@query, variables, "pagedComments")
entries = results["entries"]
assert entries |> Enum.at(0) |> Map.get("id") == to_string(comment_3.id)
assert entries |> Enum.at(0) |> Map.get("dislikesCount") == 5
assert entries |> Enum.at(1) |> Map.get("id") == to_string(comment_1.id)
assert entries |> Enum.at(1) |> Map.get("dislikesCount") == 4
end
@query """
query($thread: CmsThread, $id: ID!, $filter: CommentsFilter!) {
pagedComments(thread: $thread, id: $id, filter: $filter) {
entries {
id
viewerHasLiked
}
}
}
"""
test "login user can get hasLiked feedBack", ~m(user_conn video user community)a do
body = "test comment"
{:ok, comment} =
CMS.create_comment(:video, video.id, %{community: community.raw, body: body}, user)
{:ok, _like} = CMS.like_comment(:video_comment, comment.id, user)
variables = %{thread: "VIDEO", id: video.id, filter: %{page: 1, size: 10}}
results = user_conn |> query_result(@query, variables, "pagedComments")
found =
results["entries"] |> Enum.filter(&(&1["id"] == to_string(comment.id))) |> List.first()
assert found["viewerHasLiked"] == false
own_like_conn = simu_conn(:user, user)
results = own_like_conn |> query_result(@query, variables, "pagedComments")
found =
results["entries"] |> Enum.filter(&(&1["id"] == to_string(comment.id))) |> List.first()
assert found["viewerHasLiked"] == true
end
@query """
query($thread: CmsThread, $id: ID!, $filter: PagedFilter!) {
pagedComments(thread: $thread, id: $id, filter: $filter) {
entries {
id
body
replyTo {
id
body
}
repliesCount
replies {
id
body
}
}
}
}
"""
test "guest user can get replies info", ~m(guest_conn video user community)a do
body = "test comment"
{:ok, comment} =
CMS.create_comment(:video, video.id, %{community: community.raw, body: body}, user)
{:ok, reply} = CMS.reply_comment(:video, comment.id, %{body: "reply body"}, user)
variables = %{thread: "VIDEO", id: video.id, filter: %{page: 1, size: 10}}
results = guest_conn |> query_result(@query, variables, "pagedComments")
found =
results["entries"] |> Enum.filter(&(&1["id"] == to_string(comment.id))) |> List.first()
found_reply =
results["entries"] |> Enum.filter(&(&1["id"] == to_string(reply.id))) |> List.first()
assert found["repliesCount"] == 1
assert found["replies"] |> Enum.any?(&(&1["id"] == to_string(reply.id)))
assert found["replyTo"] == nil
assert found_reply["replyTo"] |> Map.get("id") == to_string(comment.id)
end
end
end
| 33.315789 | 98 | 0.594708 |
1c5ddbe7cde4dcc5ac047905014edb397fe4be46 | 2,323 | ex | Elixir | lib/mailjex/contact.ex | dwarner/mailjex | d611f2e2d70b4e6567b22dd3ef19b792dcbf2d79 | [
"MIT"
] | 8 | 2017-10-10T15:51:33.000Z | 2021-08-18T01:06:49.000Z | lib/mailjex/contact.ex | dwarner/mailjex | d611f2e2d70b4e6567b22dd3ef19b792dcbf2d79 | [
"MIT"
] | 5 | 2018-05-23T15:03:15.000Z | 2019-11-17T16:30:18.000Z | lib/mailjex/contact.ex | dwarner/mailjex | d611f2e2d70b4e6567b22dd3ef19b792dcbf2d79 | [
"MIT"
] | 4 | 2018-11-16T15:45:22.000Z | 2019-11-16T16:19:48.000Z | defmodule Mailjex.Contact do
@moduledoc """
Exposes functions for managing contacts on MailJet.
"""
use GenServer
alias Mailjex.Api.Contact
@behaviour Mailjex.Behaviour.Contact
@doc false
def start_link do
initial_state = %{}
GenServer.start_link(__MODULE__, initial_state, [name: __MODULE__])
end
##########################
# Public API
##########################
@doc """
Allows you to create a contact on MailJet
## Examples
iex> body = %{
...> "Email": "contactemail@gmail.com",
...>}
iex> Mailjex.Contact.create(body)
"""
def create(body) do
GenServer.call(__MODULE__, {:create, body})
end
@doc """
Allows you to view a contact on MailJet
## Examples
iex> Mailjex.Contact.view("1234ID")
"""
def view(id) do
GenServer.call(__MODULE__, {:view, id})
end
@doc """
Allows you to list contacts on MailJet
## Examples
iex> Mailjex.Contact.list()
"""
def list do
GenServer.call(__MODULE__, {:list})
end
@doc """
Allows you to update a contact on MailJet
## Examples
iex> body = %{
...> "Email": "contactemail@gmail.com",
...>}
iex> Mailjex.Contact.update("1234ID", body)
"""
def update(id, body) do
GenServer.call(__MODULE__, {:update, id, body})
end
@doc """
Allows you to manage what contact lists
a given contact is on.
## Examples
iex> body = %{
...> "ContactsLists": [%{"ListID": 1684490, "Action": "addnoforce"}],
...>}
iex> Mailjex.Contact.manage_contacts_lists(12345, body)
"""
def manage_contacts_lists(id, body) do
GenServer.call(__MODULE__, {:manage_contacts_lists, id, body})
end
##########################
# GenServer Callbacks
##########################
def handle_call({:create, body}, _from, state) do
{:reply, Contact.create(body), state}
end
def handle_call({:view, id}, _from, state) do
{:reply, Contact.view(id), state}
end
def handle_call({:list}, _from, state) do
{:reply, Contact.list(), state}
end
def handle_call({:update, id, body}, _from, state) do
{:reply, Contact.update(id, body), state}
end
def handle_call({:manage_contacts_lists, id, body}, _from, state) do
{:reply, Contact.manage_contacts_lists(id, body), state}
end
end
| 21.509259 | 76 | 0.600517 |
1c5e1d15c1e62cf8c561188d30f8c53d20cbb968 | 709 | ex | Elixir | lib/code_corps/model/preview.ex | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 275 | 2015-06-23T00:20:51.000Z | 2021-08-19T16:17:37.000Z | lib/code_corps/model/preview.ex | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 1,304 | 2015-06-26T02:11:54.000Z | 2019-12-12T21:08:00.000Z | lib/code_corps/model/preview.ex | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 140 | 2016-01-01T18:19:47.000Z | 2020-11-22T06:24:47.000Z | defmodule CodeCorps.Preview do
@moduledoc """
Represents an category on Code Corps, e.g. "Society" and "Technology".
"""
use CodeCorps.Model
alias CodeCorps.Services.MarkdownRendererService
@type t :: %__MODULE__{}
schema "previews" do
field :body, :string
field :markdown, :string
belongs_to :user, CodeCorps.User
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def create_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:markdown, :user_id])
|> validate_required([:markdown, :user_id])
|> assoc_constraint(:user)
|> MarkdownRendererService.render_markdown_to_html(:markdown, :body)
end
end
| 22.870968 | 72 | 0.67701 |
1c5e2e6ccaf3faef05581b83c7e8474eb3d8cef3 | 1,441 | ex | Elixir | lib/live_view_examples/tabs/system.ex | zorbash/observer_live | f78af309a85783ac61d97ba9bb5ffd7a64ec9823 | [
"MIT"
] | 219 | 2019-04-02T02:51:59.000Z | 2021-11-10T22:14:17.000Z | lib/live_view_examples/tabs/system.ex | zorbash/observer_live | f78af309a85783ac61d97ba9bb5ffd7a64ec9823 | [
"MIT"
] | 1 | 2021-05-08T11:51:29.000Z | 2021-05-08T11:51:29.000Z | lib/live_view_examples/tabs/system.ex | zorbash/observer_live | f78af309a85783ac61d97ba9bb5ffd7a64ec9823 | [
"MIT"
] | 14 | 2019-04-02T07:19:38.000Z | 2021-03-27T19:19:45.000Z | defmodule LiveViewExamples.Tabs.System do
import LiveViewExamples.Format
alias LiveViewExamples.Tabs.Home
@allocators [
:binary_alloc,
:driver_alloc,
:eheap_alloc,
:ets_alloc,
:fix_alloc,
:ll_alloc,
:sl_alloc,
:std_alloc,
:temp_alloc
]
def collect(%{stats: stats, settings: _settings} = state) do
put_in(state[:stats], Map.merge(stats, Home.base_stats()))
|> put_in([:stats, :mem_stats], Home.mem_stats())
|> put_in([:stats, :allocators], rendered_allocators())
end
defp rendered_allocators do
# This is due to an "Uncaught TypeError: Cannot create property 'dynamics' on string"
# when trying to use a template
for {alloc, stats} <- allocators() do
"""
<tr>
<td class="bold">#{alloc}</td>
<td>#{stats[:current_multi]}</td>
<td>#{stats[:max_multi]}</td>
<td>#{stats[:current_single]}</td>
<td>#{stats[:max_single]}</td>
</tr>
"""
end
end
defp allocators do
current = :recon_alloc.average_block_sizes(:current)
max = :recon_alloc.average_block_sizes(:max)
for alloc <- @allocators, into: %{} do
{alloc, %{
current_single: number_to_human_size(current[alloc][:sbcs]),
current_multi: number_to_human_size(current[alloc][:mbcs]),
max_single: number_to_human_size(max[alloc][:sbcs]),
max_multi: number_to_human_size(max[alloc][:mbcs])
}}
end
end
end
| 26.2 | 89 | 0.639139 |
1c5e34c6ec90eb369a1d869a70de51707e7fff06 | 321 | ex | Elixir | apps/api/web/controllers/sensors_controller.ex | elicopter/core | 7731dc7558dea39bd1c473ab9e512c9db9e1b2c9 | [
"MIT"
] | 39 | 2016-11-01T07:21:51.000Z | 2021-02-05T20:19:02.000Z | apps/api/web/controllers/sensors_controller.ex | elicopter/core | 7731dc7558dea39bd1c473ab9e512c9db9e1b2c9 | [
"MIT"
] | null | null | null | apps/api/web/controllers/sensors_controller.ex | elicopter/core | 7731dc7558dea39bd1c473ab9e512c9db9e1b2c9 | [
"MIT"
] | null | null | null | defmodule Api.SensorsController do
use Api.Web, :controller
def index(conn, _params) do
sensors = Brain.Sensors.Supervisor.registered_sensors() |> Enum.map(fn sensor ->
{:ok, snapshot} = GenServer.call(sensor, :snapshot)
snapshot
end)
render conn, "index.json", %{sensors: sensors}
end
end
| 26.75 | 84 | 0.685358 |
1c5e3db27f1801adbd0a6106bbc3fa7112b358dd | 1,772 | ex | Elixir | clients/translate/lib/google_api/translate/v2/model/detections_resource_inner.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/translate/lib/google_api/translate/v2/model/detections_resource_inner.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/translate/lib/google_api/translate/v2/model/detections_resource_inner.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Translate.V2.Model.DetectionsResourceInner do
@moduledoc """
## Attributes
- confidence (float()): The confidence of the detection result of this language. Defaults to: `null`.
- isReliable (boolean()): A boolean to indicate is the language detection result reliable. Defaults to: `null`.
- language (String.t): The language we detected. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:confidence => any(),
:isReliable => any(),
:language => any()
}
field(:confidence)
field(:isReliable)
field(:language)
end
defimpl Poison.Decoder, for: GoogleApi.Translate.V2.Model.DetectionsResourceInner do
def decode(value, options) do
GoogleApi.Translate.V2.Model.DetectionsResourceInner.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Translate.V2.Model.DetectionsResourceInner do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.814815 | 113 | 0.733634 |
1c5e8a88b4daecf59e0159e5bbfcc88f59065df9 | 2,691 | exs | Elixir | priv/repo/migrations/20151211222543_add_delete_constrains.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 691 | 2017-03-08T09:15:45.000Z | 2022-03-23T22:04:47.000Z | priv/repo/migrations/20151211222543_add_delete_constrains.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 491 | 2017-03-07T12:58:42.000Z | 2022-03-29T23:32:54.000Z | priv/repo/migrations/20151211222543_add_delete_constrains.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 200 | 2017-03-12T23:03:39.000Z | 2022-03-05T17:55:52.000Z | defmodule Hexpm.Repo.Migrations.AddDeleteConstraints do
use Ecto.Migration
def up() do
execute("ALTER TABLE keys DROP CONSTRAINT IF EXISTS keys_user_id_fkey")
execute("ALTER TABLE package_owners DROP CONSTRAINT IF EXISTS package_owners_package_id_fkey")
execute("ALTER TABLE package_owners DROP CONSTRAINT IF EXISTS package_owners_owner_id_fkey")
execute("ALTER TABLE requirements DROP CONSTRAINT IF EXISTS requirements_release_id_fkey")
execute("ALTER TABLE downloads DROP CONSTRAINT IF EXISTS downloads_release_id_fkey")
execute("""
ALTER TABLE keys
ADD CONSTRAINT keys_user_id_fkey
FOREIGN KEY (user_id) REFERENCES users ON DELETE CASCADE
""")
execute("""
ALTER TABLE package_owners
ADD CONSTRAINT package_owners_package_id_fkey
FOREIGN KEY (package_id) REFERENCES packages ON DELETE CASCADE
""")
execute("""
ALTER TABLE package_owners
ADD CONSTRAINT package_owners_owner_id_fkey
FOREIGN KEY (owner_id) REFERENCES users ON DELETE CASCADE
""")
execute("""
ALTER TABLE requirements
ADD CONSTRAINT requirements_release_id_fkey
FOREIGN KEY (release_id) REFERENCES releases ON DELETE CASCADE
""")
execute("""
ALTER TABLE downloads
ADD CONSTRAINT downloads_release_id_fkey
FOREIGN KEY (release_id) REFERENCES releases ON DELETE CASCADE
""")
end
def down() do
execute("ALTER TABLE keys DROP CONSTRAINT keys_user_id_fkey")
execute("ALTER TABLE package_owners DROP CONSTRAINT package_owners_package_id_fkey")
execute("ALTER TABLE package_owners DROP CONSTRAINT package_owners_owner_id_fkey")
execute("ALTER TABLE requirements DROP CONSTRAINT requirements_release_id_fkey")
execute("ALTER TABLE downloads DROP CONSTRAINT downloads_release_id_fkey")
execute("""
ALTER TABLE keys
ADD CONSTRAINT keys_user_id_fkey
FOREIGN KEY (user_id) REFERENCES users
""")
execute("""
ALTER TABLE package_owners
ADD CONSTRAINT package_owners_package_id_fkey
FOREIGN KEY (package_id) REFERENCES packages
""")
execute("""
ALTER TABLE package_owners
ADD CONSTRAINT package_owners_owner_id_fkey
FOREIGN KEY (owner_id) REFERENCES users
""")
execute("""
ALTER TABLE requirements
ADD CONSTRAINT requirements_release_id_fkey
FOREIGN KEY (release_id) REFERENCES releases
""")
execute("""
ALTER TABLE downloads
ADD CONSTRAINT downloads_release_id_fkey
FOREIGN KEY (release_id) REFERENCES releases
""")
end
end
| 33.6375 | 98 | 0.705314 |
1c5ea203436805d706402e1542e95f5d211d9f5b | 1,239 | ex | Elixir | lib/placex/image_generator.ex | jnbt/placex | 8b031f04df092ed94ba660acbc8f3fcd18d2beb8 | [
"MIT"
] | 3 | 2016-01-18T16:20:57.000Z | 2017-08-02T18:04:19.000Z | lib/placex/image_generator.ex | jnbt/placex | 8b031f04df092ed94ba660acbc8f3fcd18d2beb8 | [
"MIT"
] | null | null | null | lib/placex/image_generator.ex | jnbt/placex | 8b031f04df092ed94ba660acbc8f3fcd18d2beb8 | [
"MIT"
] | null | null | null | defmodule Placex.ImageGenerator do
@background_color "E0E0E0"
@text_color "6D6D6D"
@font Path.join([__DIR__, "..", "..", "priv", "Lato-Regular.ttf"])
@resolution 72
@sampling_factor 1
def render(description) do
case call_convert(description) do
{data, 0} -> data
_ -> nil
end
end
defp call_convert(description) do
System.cmd "convert", args_for_convert(description)
end
defp args_for_convert(%{width: w, height: h, format: f}) do
[
"-density", "#{@resolution * @sampling_factor}", # sample up
"-size", "#{w*@sampling_factor}x#{h*@sampling_factor}", # corrected size
"canvas:##{@background_color}", # background color
"-fill", "##{@text_color}", # text color
"-font", "#{@font}", # font location
"-pointsize", "25", # font size
"-gravity", "center", # center text
"-annotate", "+0+0", "#{w}x#{h}", # render text
"-resample", "#{@resolution}", # sample down to reduce aliasing
"#{f}:-"
]
end
end
| 36.441176 | 95 | 0.489911 |
1c5ec16d20e48acde008b37c8ad807627845458a | 408 | ex | Elixir | lib/instagram_clone/notifications/notification.ex | elixirprogrammer/InstagramClonePETAL | 5710f89de32a95b0416a960a46d081e7ad4833ab | [
"MIT"
] | 69 | 2021-04-11T21:57:26.000Z | 2022-03-30T01:09:40.000Z | lib/instagram_clone/notifications/notification.ex | hminy572/InstagramClonePETAL | 577cdad0e17399e47ef9d3f8e789bd07e33012b9 | [
"MIT"
] | 1 | 2021-04-16T17:38:11.000Z | 2021-04-16T17:38:11.000Z | lib/instagram_clone/notifications/notification.ex | hminy572/InstagramClonePETAL | 577cdad0e17399e47ef9d3f8e789bd07e33012b9 | [
"MIT"
] | 4 | 2021-04-16T10:45:18.000Z | 2021-07-23T15:08:08.000Z | defmodule InstagramClone.Notifications.Notification do
use Ecto.Schema
schema "notifications" do
field :action, :string
field :read, :boolean, default: false
belongs_to :user, InstagramClone.Accounts.User
belongs_to :actor, InstagramClone.Accounts.User
belongs_to :comment, InstagramClone.Comments.Comment
belongs_to :post, InstagramClone.Posts.Post
timestamps()
end
end
| 25.5 | 56 | 0.757353 |
1c5ee3e3419177ac81906d0e762844681a7f9059 | 2,864 | exs | Elixir | apps/omg_watcher_rpc/test/omg_watcher_rpc/web/views/transaction_test.exs | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 177 | 2018-08-24T03:51:02.000Z | 2020-05-30T13:29:25.000Z | apps/omg_watcher_rpc/test/omg_watcher_rpc/web/views/transaction_test.exs | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 1,042 | 2018-08-25T00:52:39.000Z | 2020-06-01T05:15:17.000Z | apps/omg_watcher_rpc/test/omg_watcher_rpc/web/views/transaction_test.exs | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 47 | 2018-08-24T12:06:33.000Z | 2020-04-28T11:49:25.000Z | # Copyright 2019-2020 OMG Network Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.WatcherRPC.Web.View.TransactionTest do
use ExUnitFixtures
use ExUnit.Case, async: false
use OMG.WatcherInfo.Fixtures
alias OMG.Utils.Paginator
alias OMG.Watcher.Utxo
alias OMG.WatcherInfo.DB
alias OMG.WatcherRPC.Web.View
require Utxo
describe "render/2 with transaction.json" do
@tag fixtures: [:initial_blocks]
test "renders the transaction's inputs and outputs" do
transaction =
1000
|> DB.Transaction.get_by_position(1)
|> DB.Repo.preload([:inputs, :outputs])
rendered = View.Transaction.render("transaction.json", %{response: transaction})
# Asserts all transaction inputs get rendered
assert Map.has_key?(rendered.data, :inputs)
assert utxos_match_all?(rendered.data.inputs, transaction.inputs)
# Asserts all transaction outputs get rendered
assert Map.has_key?(rendered.data, :outputs)
assert utxos_match_all?(rendered.data.outputs, transaction.outputs)
end
end
describe "render/2 with transactions.json" do
@tag fixtures: [:initial_blocks]
test "renders the transactions' inputs and outputs" do
tx_1 = DB.Transaction.get_by_position(1000, 0) |> DB.Repo.preload([:inputs, :outputs])
tx_2 = DB.Transaction.get_by_position(1000, 1) |> DB.Repo.preload([:inputs, :outputs])
paginator = %Paginator{
data: [tx_1, tx_2],
data_paging: %{
limit: 10,
page: 1
}
}
rendered = View.Transaction.render("transactions.json", %{response: paginator})
[rendered_1, rendered_2] = rendered.data
assert utxos_match_all?(rendered_1.inputs, tx_1.inputs)
assert utxos_match_all?(rendered_1.outputs, tx_1.outputs)
assert utxos_match_all?(rendered_2.inputs, tx_2.inputs)
assert utxos_match_all?(rendered_2.outputs, tx_2.outputs)
end
end
defp utxos_match_all?(renders, originals) when length(renders) != length(originals), do: false
defp utxos_match_all?(renders, originals) do
original_utxo_positions =
Enum.map(originals, fn utxo ->
Utxo.position(utxo.blknum, utxo.txindex, utxo.oindex) |> Utxo.Position.encode()
end)
Enum.all?(renders, fn rendered -> rendered.utxo_pos in original_utxo_positions end)
end
end
| 34.926829 | 96 | 0.709846 |
1c5f13391f06fc7d4d57ab582e427650e5327ce7 | 466 | ex | Elixir | lib/bitpal_web/live/home_live.ex | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 5 | 2021-05-04T21:28:00.000Z | 2021-12-01T11:19:48.000Z | lib/bitpal_web/live/home_live.ex | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 71 | 2021-04-21T05:48:49.000Z | 2022-03-23T06:30:37.000Z | lib/bitpal_web/live/home_live.ex | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 1 | 2021-04-25T10:35:41.000Z | 2021-04-25T10:35:41.000Z | defmodule BitPalWeb.HomeLive do
use BitPalWeb, :live_view
alias BitPal.Stores
on_mount(BitPalWeb.UserLiveAuth)
@impl true
def mount(_params, _session, socket) do
if socket.assigns[:stores] do
{:ok, socket}
else
stores = Stores.user_stores(socket.assigns.current_user)
{:ok, assign(socket, stores: stores)}
end
end
@impl true
def render(assigns) do
render(BitPalWeb.HomeView, "dashboard.html", assigns)
end
end
| 20.26087 | 62 | 0.695279 |
1c5f3925fe737b777fe7accccf281cf1e6497c3e | 2,352 | exs | Elixir | config/prod.exs | ashkan18/mestatus | c0d1cf156729fc2f7a8c822f1757efd663515ba5 | [
"MIT"
] | null | null | null | config/prod.exs | ashkan18/mestatus | c0d1cf156729fc2f7a8c822f1757efd663515ba5 | [
"MIT"
] | null | null | null | config/prod.exs | ashkan18/mestatus | c0d1cf156729fc2f7a8c822f1757efd663515ba5 | [
"MIT"
] | null | null | null | use Mix.Config
# For production, we configure the host to read the PORT
# from the system environment. Therefore, you will need
# to set PORT=80 before running your server.
#
# You should also configure the url host to something
# meaningful, we use this information when generating URLs.
#
# Finally, we also include the path to a manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :mestatus, Mestatus.Endpoint,
http: [port: System.get_env("PORT")],
url: [host: "artsy-mestatus.herokuapp.com", port: 80],
secret_key_base: System.get_env("SECRET_KEY_BASE")
# Do not print debug messages in production
config :logger, level: :debug
# Configure your database
config :mestatus, Mestatus.Repo,
adapter: Ecto.Adapters.Postgres,
url: System.get_env("DATABASE_URL"),
pool_size: 20
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :mestatus, Mestatus.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :mestatus, Mestatus.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :mestatus, Mestatus.Endpoint, server: true
#
# You will also need to set the application root to `.` in order
# for the new static assets to be served after a hot upgrade:
#
# config :mestatus, Mestatus.Endpoint, root: "."
# Finally import the config/prod.secret.exs
# which should be versioned separately.
#import_config "prod.secret.exs"
| 32.666667 | 67 | 0.718537 |
1c5f54281e3eb5e6a3cb9930056ee8eae21f3d33 | 1,811 | exs | Elixir | mix.exs | erikmueller/ex_admin | af8f3e23a03148d8365cc3b71286f855fb919f8c | [
"MIT"
] | null | null | null | mix.exs | erikmueller/ex_admin | af8f3e23a03148d8365cc3b71286f855fb919f8c | [
"MIT"
] | null | null | null | mix.exs | erikmueller/ex_admin | af8f3e23a03148d8365cc3b71286f855fb919f8c | [
"MIT"
] | null | null | null | defmodule ExAdmin.Mixfile do
use Mix.Project
@version "0.8.2-dev"
def project do
[ app: :ex_admin,
version: @version,
elixir: "~> 1.2",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
name: "ExAdmin",
docs: [extras: ["README.md"], main: "ExAdmin"],
deps: deps,
package: package,
description: """
An Elixir Phoenix Auto Administration Package.
"""
]
end
def application do
[ applications: applications(Mix.env)]
end
defp applications(:test) do
[:plug | applications(:prod)]
end
defp applications(_) do
[:gettext, :phoenix, :ecto, :logger, :ex_queb, :xain]
end
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
defp deps do
[
{:decimal, "~> 1.0"},
{:phoenix, ">= 1.1.6"},
{:ecto, "~> 2.0", override: true},
{:phoenix_ecto, "~> 3.0.0"},
{:postgrex, ">= 0.9.0", only: :test},
{:floki, "~> 0.8", only: :test},
{:cowboy, "~> 1.0"},
{:phoenix_html, "~> 2.5"},
{:inflex, "~> 1.7"},
{:scrivener_ecto, github: "drewolson/scrivener_ecto"},
{:xain, "~> 0.6"},
{:csvlixir, "~> 1.0.0"},
{:exactor, "~> 2.2.0"},
{:ex_doc, "~> 0.11", only: :dev},
{:earmark, "~> 0.1", only: :dev},
{:ex_queb, "~> 0.1"},
{:gettext, "~> 0.11"}
]
end
defp package do
[ maintainers: ["Stephen Pallen", "Roman Smirnov"],
licenses: ["MIT"],
links: %{ "Github" => "https://github.com/smpallen99/ex_admin" },
files: ~w(lib priv web README.md package.json mix.exs LICENSE brunch-config.js AdminLte-LICENSE)]
end
end
| 27.029851 | 103 | 0.54169 |
1c5f5face79121c788aa55f7557d0f9b5d57333d | 57 | ex | Elixir | broadway_test/message_failed.ex | kujua/blog-kujuahub-articles-code | 1a65b1a481cbf5269a3f8846a5ac353937d45a2d | [
"Apache-2.0"
] | 2 | 2019-05-25T13:25:33.000Z | 2019-05-29T15:36:17.000Z | broadway_test/message_failed.ex | kujua/blog-kujuahub-articles-code | 1a65b1a481cbf5269a3f8846a5ac353937d45a2d | [
"Apache-2.0"
] | null | null | null | broadway_test/message_failed.ex | kujua/blog-kujuahub-articles-code | 1a65b1a481cbf5269a3f8846a5ac353937d45a2d | [
"Apache-2.0"
] | null | null | null | Message.failed(updatedmessage, "image processing failed") | 57 | 57 | 0.842105 |
1c5f5fad02cebb43e8eec83c76b1d08d45f42bf0 | 581 | exs | Elixir | lib/mix/tasks/templates/new.exs | h1u2i3/ex_wechat | 0d5dc88f3f694cac9e46dd6529a1df203d60cdf1 | [
"MIT"
] | 23 | 2016-12-25T13:38:38.000Z | 2020-11-02T15:06:10.000Z | lib/mix/tasks/templates/new.exs | h1u2i3/ex_wechat | 0d5dc88f3f694cac9e46dd6529a1df203d60cdf1 | [
"MIT"
] | 15 | 2016-11-05T17:00:18.000Z | 2019-08-28T10:00:41.000Z | lib/mix/tasks/templates/new.exs | h1u2i3/ex_wechat | 0d5dc88f3f694cac9e46dd6529a1df203d60cdf1 | [
"MIT"
] | 8 | 2017-07-04T03:44:25.000Z | 2020-11-11T08:32:39.000Z | use Mix.Config
config :data,
fromusername: "from",
tousername: "to",
msgtype: "news",
articlecount: "2",
articles: [
item: [
[
title: "title",
description: "description",
pic_url: "http://pic_url",
url: "http://baidu.com"
],
[
title: "title",
description: "description",
pic_url: "http://pic_url",
url: "http://baidu.com"
],
[
title: "title",
description: "description",
pic_url: "http://pic_url",
url: "http://baidu.com"
]
]
]
| 19.366667 | 35 | 0.483649 |
1c5f62038425a96304d5a836e7e7a0d38ef16b41 | 305 | ex | Elixir | lib/memcache/application.ex | thecodeboss/memcachex | d6de70a8b93524a71dc54f736fcb79b5377718fd | [
"MIT"
] | null | null | null | lib/memcache/application.ex | thecodeboss/memcachex | d6de70a8b93524a71dc54f736fcb79b5377718fd | [
"MIT"
] | null | null | null | lib/memcache/application.ex | thecodeboss/memcachex | d6de70a8b93524a71dc54f736fcb79b5377718fd | [
"MIT"
] | 2 | 2019-03-12T13:47:13.000Z | 2019-05-29T23:10:57.000Z | defmodule Memcache.Application do
@moduledoc false
use Application
def start(_type, _args) do
import Supervisor.Spec
children = [
worker(Memcache.Registry, [])
]
opts = [strategy: :one_for_one, name: Memcache.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 17.941176 | 62 | 0.691803 |
1c5f9f82f5e7ec32e90aa5b4dcb1296f972d1579 | 2,491 | ex | Elixir | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/authentication.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/authentication.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/authentication.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceConsumerManagement.V1.Model.Authentication do
@moduledoc """
`Authentication` defines the authentication configuration for an API. Example for an API targeted for external use: name: calendar.googleapis.com authentication: providers: - id: google_calendar_auth jwks_uri: https://www.googleapis.com/oauth2/v1/certs issuer: https://securetoken.google.com rules: - selector: "*" requirements: provider_id: google_calendar_auth
## Attributes
* `providers` (*type:* `list(GoogleApi.ServiceConsumerManagement.V1.Model.AuthProvider.t)`, *default:* `nil`) - Defines a set of authentication providers that a service supports.
* `rules` (*type:* `list(GoogleApi.ServiceConsumerManagement.V1.Model.AuthenticationRule.t)`, *default:* `nil`) - A list of authentication rules that apply to individual API methods. **NOTE:** All service configuration rules follow "last one wins" order.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:providers => list(GoogleApi.ServiceConsumerManagement.V1.Model.AuthProvider.t()) | nil,
:rules =>
list(GoogleApi.ServiceConsumerManagement.V1.Model.AuthenticationRule.t()) | nil
}
field(:providers, as: GoogleApi.ServiceConsumerManagement.V1.Model.AuthProvider, type: :list)
field(:rules, as: GoogleApi.ServiceConsumerManagement.V1.Model.AuthenticationRule, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.Authentication do
def decode(value, options) do
GoogleApi.ServiceConsumerManagement.V1.Model.Authentication.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.Authentication do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 48.843137 | 364 | 0.763549 |
1c5fc421cd2db9aed4b775b905f7df552bc510a4 | 332 | ex | Elixir | web/controllers/summoner_controller.ex | ktornwall/elixir_conf_2016_demo | 5a0adf832773714ba7743c049964623262ee7a13 | [
"MIT"
] | 27 | 2016-09-02T19:26:16.000Z | 2017-11-30T02:27:16.000Z | web/controllers/summoner_controller.ex | katzenbar/elixir_conf_2016_demo | 5a0adf832773714ba7743c049964623262ee7a13 | [
"MIT"
] | 1 | 2016-10-23T18:24:59.000Z | 2016-10-23T18:24:59.000Z | web/controllers/summoner_controller.ex | ktornwall/elixir_conf_2016_demo | 5a0adf832773714ba7743c049964623262ee7a13 | [
"MIT"
] | 4 | 2016-10-08T02:51:41.000Z | 2017-12-05T02:02:40.000Z | defmodule ExConf.SummonerController do
use ExConf.Web, :controller
def index(conn, %{"summoner" => %{"name" => name}}) do
summoner = ExConf.SummonerService.get_summoner(name)
if summoner do
render(conn, "index.html", summoner: summoner)
else
render(conn, ExConf.ErrorView, "404.html")
end
end
end
| 25.538462 | 56 | 0.674699 |
1c603a35a725bb17875fbcfdf0cd5dcb24a66d7e | 453 | ex | Elixir | lib/metrix.ex | samullen/metrix | 8c5335c63db811166c46da273485df7f19e27e51 | [
"Apache-2.0"
] | 1 | 2020-07-26T14:38:10.000Z | 2020-07-26T14:38:10.000Z | lib/metrix.ex | samullen/metrix | 8c5335c63db811166c46da273485df7f19e27e51 | [
"Apache-2.0"
] | null | null | null | lib/metrix.ex | samullen/metrix | 8c5335c63db811166c46da273485df7f19e27e51 | [
"Apache-2.0"
] | null | null | null | defmodule Metrix do
defmacro __using__(_opts) do
quote do
import Metrix
end
end
defmacro measure(event, metadata \\ Macro.escape(%{}), do: block) do
quote do
case :timer.tc(fn -> unquote(block) end) do
{time, response} ->
metadata = Map.put(unquote(metadata), :response, response)
:telemetry.execute(unquote(event), %{duration: time}, metadata)
response
end
end
end
end
| 21.571429 | 73 | 0.609272 |
1c605c2ea6e32f44eab007133baebf86099d8c56 | 1,439 | ex | Elixir | apps/omg_watcher_rpc/lib/web/views/deposit.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher_rpc/lib/web/views/deposit.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher_rpc/lib/web/views/deposit.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019-2020 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.WatcherRPC.Web.View.Deposit do
@moduledoc """
The deposit view for rendering JSON.
"""
alias OMG.Utils.HttpRPC.Response
alias OMG.Utils.Paginator
alias OMG.WatcherRPC.Web.Response, as: WatcherRPCResponse
use OMG.WatcherRPC.Web, :view
def render("deposits.json", %{response: %Paginator{data: ethevents, data_paging: data_paging}}) do
ethevents
|> Enum.map(&render_ethevent/1)
|> Response.serialize_page(data_paging)
|> WatcherRPCResponse.add_app_infos()
end
defp render_ethevent(event) do
event
|> Map.update!(:txoutputs, &render_txoutputs/1)
|> Map.take([
:eth_height,
:event_type,
:log_index,
:root_chain_txhash,
:txoutputs,
:inserted_at,
:updated_at
])
end
defp render_txoutputs(outputs) do
Enum.map(outputs, &to_utxo/1)
end
end
| 28.215686 | 100 | 0.714385 |
1c6061cb413fc218d59e848454e8c048ff62950c | 2,825 | ex | Elixir | lib/1000/p1000.ex | penqen/yukicoder-elixir | 4f3e9e4694a14434cc3700280e9205226434733b | [
"MIT"
] | null | null | null | lib/1000/p1000.ex | penqen/yukicoder-elixir | 4f3e9e4694a14434cc3700280e9205226434733b | [
"MIT"
] | null | null | null | lib/1000/p1000.ex | penqen/yukicoder-elixir | 4f3e9e4694a14434cc3700280e9205226434733b | [
"MIT"
] | null | null | null | defmodule P1000 do
@moduledoc """
:timer.tc(&Main.main/0)
Query
A(x, y) : A_x + y
B(x, y) : B_i + A_i (x <= i <= y)
# Examples
iex> an = [9, 8, 1, 9, 6, 10, 8]
...> |> Enum.with_index() |> Enum.reduce(%{}, &(Map.put(&2, elem(&1, 1), elem(&1, 0))))
...> P1000.solve(7, 3, an, [["B", 2, 5], ["A", 7, 9], ["B", 4, 7]])
[0, 8, 1, 18, 12, 10, 17]
iex> {n, q} = {10, 10}
...> an = [1, 4, 1, 5, 9, 2, 6, 5, 3, 5]
...> |> Enum.with_index() |> Enum.reduce(%{}, &(Map.put(&2, elem(&1, 1), elem(&1, 0))))
...> queries = [
...> ["A", 2, 7],
...> ["A", 5, 9],
...> ["B", 1, 4],
...> ["B", 6, 10],
...> ["A", 10, 3],
...> ["B", 1, 2],
...> ["A", 8, 3],
...> ["B", 4, 9],
...> ["A", 6, 2],
...> ["B", 1, 10]
...> ]
...> P1000.solve(n, q, an, queries)
[3, 33, 2, 15, 36, 8, 18, 21, 9, 13]
"""
def main do
[n, q] = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
an = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.with_index() |> Enum.reduce(%{}, &(Map.put(&2, elem(&1, 1), String.to_integer(elem(&1, 0)))))
queries = for _ <- 0..(q-1) do
IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(fn
v when v in ["A", "B"] -> v
v -> String.to_integer(v)
end)
end
solve(n, q, an, queries) |> Enum.join(" ") |> IO.puts()
end
def solve(n, _q, an, queries) do
{_, bn} = Enum.reduce(queries, {an, Enum.reduce(0..(n-1), %{}, &(Map.put(&2, &1, 0)))}, fn
["A", x, y], {an, bn} ->
{Map.put(an, x - 1, an[x - 1] + y), bn}
["B", x, y], {an, bn} ->
{an, Enum.reduce((x - 1)..(y - 1), bn, &(Map.put(&2, &1, bn[&1] + an[&1])))}
end)
(n-1)..0 |> Enum.reduce([], fn i, acc -> [bn[i] | acc] end)
end
end
"""
defmodule Main do
def main do
[n, q] = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
an = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.with_index() |> Enum.reduce(%{}, &(Map.put(&2, elem(&1, 1), String.to_integer(elem(&1, 0)))))
queries = for _ <- 0..(q-1) do
IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(fn
v when v in ["A", "B"] -> v
v -> String.to_integer(v)
end)
end
solve(n, q, an, queries) |> Enum.join(" ") |> IO.puts()
end
def solve(n, _q, an, queries) do
{_, bn} = Enum.reduce(queries, {an, Enum.reduce(0..(n-1), %{}, &(Map.put(&2, &1, 0)))}, fn
["A", x, y], {an, bn} -> {Map.put(an, x - 1, an[x - 1] + y), bn}
["B", x, y], {an, bn} -> {an, Enum.reduce((x - 1)..(y - 1), bn, &(Map.put(&2, &1, bn[&1] + an[&1])))}
end)
(n-1)..0 |> Enum.reduce([], fn i, acc -> [bn[i] | acc] end)
end
end
""" | 36.217949 | 163 | 0.431504 |
1c608e6c68454f0bb86116b2d9b023460b18cb9a | 661 | exs | Elixir | test/ethereumex/counter_test.exs | InoMurko/ethereumex | f8e18f8aa2d2f1719a67c69f11486621416ac324 | [
"MIT"
] | null | null | null | test/ethereumex/counter_test.exs | InoMurko/ethereumex | f8e18f8aa2d2f1719a67c69f11486621416ac324 | [
"MIT"
] | null | null | null | test/ethereumex/counter_test.exs | InoMurko/ethereumex | f8e18f8aa2d2f1719a67c69f11486621416ac324 | [
"MIT"
] | null | null | null | defmodule Ethereumex.CounterTest do
use ExUnit.Case
alias Ethereumex.Counter
test "incrementing twice returns correct number" do
1 = Counter.increment(:test_1)
2 = Counter.increment(:test_1)
end
test "incrementing twice and updating with a count returns correct number" do
1 = Counter.increment(:test_2)
2 = Counter.increment(:test_2)
4 = Counter.increment(:test_2, 2)
end
test "incrementing twice, updating with a count and incrementing again returns correct number" do
1 = Counter.increment(:test_3)
2 = Counter.increment(:test_3)
4 = Counter.increment(:test_3, 2)
5 = Counter.increment(:test_3)
end
end
| 28.73913 | 99 | 0.720121 |
1c60a5757fcf04a83c9e711525e8e897c1064f44 | 4,845 | ex | Elixir | lib/esi/request.ex | johnschultz/esi | 6bc69c39e21baa8655523e71755a65516b68e60c | [
"MIT"
] | null | null | null | lib/esi/request.ex | johnschultz/esi | 6bc69c39e21baa8655523e71755a65516b68e60c | [
"MIT"
] | null | null | null | lib/esi/request.ex | johnschultz/esi | 6bc69c39e21baa8655523e71755a65516b68e60c | [
"MIT"
] | null | null | null | defmodule ESI.Request do
@enforce_keys [
:verb,
:path
]
defstruct [
:verb,
:path,
opts_schema: %{},
opts: %{}
]
@type t :: %__MODULE__{
verb: :get | :post | :put | :delete,
path: String.t(),
opts_schema: %{atom => {:body | :query, :required | :optional}},
opts: %{atom => any}
}
@typedoc """
Additional request options.
You can provide any options that the API accepts, and/or these common options:
- `datasource` -- (DEFAULT: :tranquility) — The server name you would like data from
- `user_agent` -- Client identifier
"""
@type request_opts :: [request_opt]
@type request_opt ::
{:datasource, :tranquility | :singularity} | {:user_agent, String.t()} | {atom, any}
@doc """
Add query options to a request
"""
@spec options(req :: ESI.Request.t(), opts :: request_opts) :: ESI.Request.t()
def options(req, []) do
req
end
def options(req, opts) do
%{req | opts: Map.merge(req.opts, Map.new(opts))}
end
@base "https://esi.evetech.net/latest"
@doc """
Run a request.
"""
@spec run(t) :: {:ok, any} | {:error, any}
def run(request) do
case validate(request) do
:ok ->
do_run(request)
other ->
other
end
end
@doc """
Validate that the request is ready.
"""
@spec validate(request :: t) :: :ok | {:error, String.t()}
def validate(request) do
Enum.reduce(request.opts_schema, [], fn
{key, {_, :required}}, acc ->
case Map.has_key?(request.opts, key) do
true ->
acc
false ->
[key | acc]
end
_, acc ->
acc
end)
|> case do
[] ->
:ok
[missing_one] ->
{:error, "missing option `#{inspect(missing_one)}`"}
missing_many ->
detail = Enum.map(missing_many, &"`#{inspect(&1)}`") |> Enum.join(", ")
{:error, "missing options #{detail}"}
end
end
defp do_run(request) do
encoded_opts = encode_options(request)
url = @base <> request.path <> encoded_opts.query
case :hackney.request(request.verb, url, [], encoded_opts.body, [
:with_body,
follow_redirect: true,
recv_timout: 30_000
]) do
{:ok, code, _headers, body} when code in 200..299 ->
Jason.decode(body)
{:ok, 404, _, body} ->
case Jason.decode(body) do
{:ok, %{"error" => eve_error}} ->
{:error, eve_error}
_ ->
{:error, "HTTP 404"}
end
{:ok, code, _, body} ->
{:ok, %{"error" => eve_error}} = Poison.decode(body)
{:error, "HTTP #{code}: #{eve_error}"}
{:error, :timeout} ->
{:error, "timeout"}
end
end
@spec opts_by_location(request :: t) :: %{(:body | :query) => %{atom => any}}
def opts_by_location(request) do
Enum.reduce(request.opts, %{body: %{}, query: %{}}, fn {key, value}, acc ->
case Map.get(request.opts_schema, key) do
{location, _} ->
update_in(acc, [location], &Map.put(&1, key, value))
_ ->
acc
end
end)
end
@spec encode_options(request :: t) :: %{(:body | :query) => String.t()}
def encode_options(request) do
opts = opts_by_location(request)
%{
body: encode_options(:body, opts.body),
query: encode_options(:query, opts.query)
}
end
@spec encode_options(:body | :query, opts :: map) :: String.t()
defp encode_options(:body, opts) when map_size(opts) == 0, do: ""
# In the body, only support one option and just encode the value
defp encode_options(:body, opts), do: Jason.encode!(opts |> Map.values() |> hd)
defp encode_options(:query, opts) when map_size(opts) == 0, do: ""
defp encode_options(:query, opts), do: "?" <> URI.encode_query(opts)
def stream!(%{opts_schema: %{page: _}} = request) do
request_fun = fn page ->
options(request, page: page)
|> run
end
first_page = Map.get(request.opts, :page, 1)
Stream.resource(
fn -> {request_fun, first_page} end,
fn
:quit ->
{:halt, nil}
{fun, page} ->
case fun.(page) do
{:ok, []} ->
{[], :quit}
{:ok, data} when is_list(data) ->
{data, {fun, page + 1}}
{:ok, data} ->
{[data], :quit}
{:error, err} ->
raise err
end
end,
& &1
)
end
def stream!(request) do
Stream.resource(
fn -> request end,
fn
:quit ->
{:halt, nil}
request ->
case run(request) do
{:ok, data} ->
{List.wrap(data), :quit}
{:error, err} ->
raise err
end
end,
& &1
)
end
end
| 23.634146 | 94 | 0.518473 |
1c60b46c7f9ff2dabcef20dbccc8ddb116c9f898 | 3,306 | exs | Elixir | test/rdb_parser_test.exs | joshuawscott/rdb_parser | 388f0c0b05a29dfb1fbdfad8916cecb71d9897f6 | [
"MIT"
] | null | null | null | test/rdb_parser_test.exs | joshuawscott/rdb_parser | 388f0c0b05a29dfb1fbdfad8916cecb71d9897f6 | [
"MIT"
] | null | null | null | test/rdb_parser_test.exs | joshuawscott/rdb_parser | 388f0c0b05a29dfb1fbdfad8916cecb71d9897f6 | [
"MIT"
] | null | null | null | defmodule RdbParserTest do
use ExUnit.Case, async: false
import RdbParserTest.Support
setup do
{:ok, redis} = Redix.start_link()
Redix.command(redis, ["flushall"])
save(redis)
%{
redis: redis
}
end
test "parsing a simple string", %{redis: redis} do
Redix.command(redis, ["SET", "mykey", "myvalue"])
save(redis)
entries = parse_rdb()
assert %{"mykey" => {"myvalue", []}} = entries
end
test "parsing a string with an expire", %{redis: redis} do
# earlier than expiration
beginning = get_milliseconds() + 60_000
Redix.command(redis, ["SET", "mykey", "myvalue", "EX", "60"])
save(redis)
entries = parse_rdb()
# later than expiration
ending = get_milliseconds() + 60_000
%{"mykey" => {"myvalue", [expire_ms: expiration]}} = entries
assert beginning <= expiration
assert ending >= expiration
end
test "parsing a compressed string", %{redis: redis} do
# long repeating strings are compressed in the dump
key = String.duplicate("ab", 100)
value = String.duplicate("ba", 100)
Redix.command(redis, ["SET", key, value])
save(redis)
entries = parse_rdb()
assert Map.has_key?(entries, key)
assert {value, []} == entries[key]
end
for integer <- [
-65_537,
-65_536,
-65_535,
-257,
-256,
-255,
-14,
-13,
-12,
-1,
0,
1,
12,
13,
255,
256,
65_535,
65_536
] do
test "parsing integer #{integer}", %{redis: redis} do
int = unquote(integer)
Redix.command(redis, ["SET", "mykey", int])
save(redis)
entries = parse_rdb()
%{"mykey" => {got_int, []}} = entries
assert int == got_int
end
end
test "parsing a list", %{redis: redis} do
original_list = ["AAAAAAAAAAA", "AAAAAAAAAA", "A"]
add_key(redis, "mylist", original_list)
save(redis)
entries = parse_rdb()
%{"mylist" => {parsed_list, []}} = entries
assert original_list == parsed_list
end
test "parsing a mix of keys", %{redis: redis} do
# earlier than expiration
beginning = get_milliseconds() + 60_000
Redix.command(redis, ["set", "mykey", "myval"])
Redix.command(redis, ["set", "myexpkey", "myexpval", "ex", "60"])
Redix.command(redis, ["sadd", "myset", "one"])
Redix.command(redis, ["sadd", "myset", "two"])
save(redis)
# later than expiration
ending = get_milliseconds() + 60_000
entries = parse_rdb()
%{
"mykey" => {"myval", []},
"myexpkey" => {"myexpval", [expire_ms: expire_ms]},
"myset" => {myset, []}
} = entries
assert expire_ms >= beginning
assert expire_ms <= ending
assert MapSet.member?(myset, "one")
assert MapSet.member?(myset, "two")
end
test "parsing many keys", %{redis: redis} do
Enum.each(1..10_000, fn n ->
{:ok, "OK"} = Redix.command(redis, ["SET", "mykey#{n}", "myval#{n}"])
end)
{:ok, _} = Redix.command(redis, ["SAVE"])
entries = parse_rdb()
assert 10_000 == map_size(entries)
Enum.each(1..10_000, fn n ->
key = "mykey#{n}"
expected = "myval#{n}"
%{^key => {val, []}} = entries
assert expected == val
end)
end
end
| 22.8 | 75 | 0.565033 |
1c60d6ebbbe298cff5acfdd0ac01a4664338ba9c | 1,292 | ex | Elixir | apps/tai/lib/tai/commander/venues.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 276 | 2018-01-16T06:36:06.000Z | 2021-03-20T21:48:01.000Z | apps/tai/lib/tai/commander/venues.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 78 | 2020-10-12T06:21:43.000Z | 2022-03-28T09:02:00.000Z | apps/tai/lib/tai/commander/venues.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | 43 | 2018-06-09T09:54:51.000Z | 2021-03-07T07:35:17.000Z | defmodule Tai.Commander.Venues do
@type venue :: Tai.Venue.t()
@type store_id :: Tai.Venues.VenueStore.store_id()
@type store_id_opt :: {:store_id, store_id}
@type where_opt :: {:where, [{atom, term}]}
@type order_opt :: {:order, [atom]}
@type opt :: store_id_opt | where_opt | order_opt
@default_filters []
@default_order [:id]
@default_store_id Tai.Venues.VenueStore.default_store_id()
@spec get([opt]) :: [venue]
def get(options) do
store_id = Keyword.get(options, :store_id, @default_store_id)
filters = Keyword.get(options, :where, @default_filters)
order_by = Keyword.get(options, :order, @default_order)
store_id
|> Tai.Venues.VenueStore.all()
|> Enum.map(&to_instance/1)
|> Enumerati.filter(filters)
|> Enumerati.order(order_by)
end
defp to_instance(venue) do
%Tai.Venues.Instance{
id: venue.id,
adapter: venue.adapter,
channels: venue.channels,
products: venue.products,
accounts: venue.accounts,
credentials: venue.credentials,
quote_depth: venue.quote_depth,
timeout: venue.timeout,
start_on_boot: venue.start_on_boot,
broadcast_change_set: venue.broadcast_change_set,
opts: venue.opts,
status: Tai.Venues.Status.status(venue)
}
end
end
| 30.046512 | 65 | 0.678019 |
1c60dbe5bbb9c8091ddddcb5939a8f244d9d42eb | 5,523 | ex | Elixir | lib/tus_storage_s3.ex | gullitmiranda/tus-storage-s3 | 3c5f1a92ce3f0954673cab5acc566e948d13629b | [
"BSD-3-Clause"
] | null | null | null | lib/tus_storage_s3.ex | gullitmiranda/tus-storage-s3 | 3c5f1a92ce3f0954673cab5acc566e948d13629b | [
"BSD-3-Clause"
] | null | null | null | lib/tus_storage_s3.ex | gullitmiranda/tus-storage-s3 | 3c5f1a92ce3f0954673cab5acc566e948d13629b | [
"BSD-3-Clause"
] | null | null | null | defmodule Tus.Storage.S3 do
@moduledoc """
S3 (or compatible) storage backend for the [Tus server](https://hex.pm/packages/tus)
## Installation
The package can be installed by adding `tus_storage_s3` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:tus, "~> 0.1.1"},
{:tus_storage_s3, "~> 0.1.0"},
]
end
```
## Configuration
- `storage`: Set it as `Tus.Storage.S3`
- `s3_bucket`: The name of your bucket
- `s3_host`: Optional. "s3.amazonaws.com" by default
- `s3_virtual_host`: Optional. false by default
- `s3_prefix`: Optional. Prefix added to all files. Empty by default
- `s3_min_part_size`: The minimum size of a single part (except the last).
In Amazon S3 this is 5MB. For other, compatible services, you might want/need to
change this restriction.
In order to allow this backend to function properly, the user accessing the bucket must have at least the
following AWS IAM policy permissions for the bucket and all of its subresources:
```
s3:AbortMultipartUpload
s3:DeleteObject
s3:GetObject
s3:ListMultipartUploadParts
s3:PutObject
```
Furthermore, this uses the ExAWS package, so you'll need to add valid AWS keys to its config.
```elixir
config :ex_aws,
access_key_id: [{:system, "AWS_ACCESS_KEY_ID"}, :instance_role],
secret_access_key: [{:system, "AWS_SECRET_ACCESS_KEY"}, :instance_role]
```
This means it will try to resolve credentials in this order
- a. Look for the AWS standard AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables
- b. Resolve credentials with IAM
Consult the (ExAWS documentation)[https://hexdocs.pm/ex_aws/ExAws.html#module-aws-key-configuration] for more details.
"""
alias ExAws.S3
@default_host "s3.amazonaws.com"
@default_min_part_size 5 * 1024 * 1024
@doc """
Get uid file path.
"""
def file_path(uid, config) do
[base_path(config), slice_path(uid, config)]
|> Path.join()
end
def url(uid, config) do
[host_url(config), file_path(uid, config)]
|> Path.join()
|> URI.encode()
end
@doc """
Get base_path.
"""
def base_path(config) do
config
|> Map.get(:s3_prefix, "")
|> String.trim("/")
end
def slice_path(uid, %{slice_path: true} = _config) do
uid
|> String.split("")
|> Enum.slice(1, 3)
|> Enum.concat([uid])
|> Path.join()
end
def slice_path(uid, _config), do: uid
# Storage methods
defp host(config) do
config |> Map.get(:s3_host, @default_host)
end
defp host_url(config) do
Map.get(config, :s3_host, default_host(config))
|> case do
{:system, env_var} when is_binary(env_var) -> System.get_env(env_var)
url -> url
end
end
defp default_host(%{s3_bucket: s3_bucket} = config) do
Map.get(config, :s3_virtual_host, false)
|> case do
true -> "https://#{s3_bucket}.s3.amazonaws.com"
_ -> "https://s3.amazonaws.com/#{s3_bucket}"
end
end
defp min_part_size(config) do
config |> Map.get(:s3_min_part_size, @default_min_part_size)
end
defp last_part?(file, part_size) do
file.offset + part_size >= file.size
end
defp part_too_small?(file, config, part_size) do
if last_part?(file, part_size) do
false
else
min_size = min_part_size(config)
part_size < min_size && file.offset + min_size > file.size
end
end
@doc """
Start a [Multipart Upload](http://docs.aws.amazon.com/AmazonS3/latest/dev/uploadobjusingmpu.html)
and store its `upload_id`.
"""
def create(file, config) do
host = host(config)
file_path = file_path(file.uid, config)
%{bucket: config.s3_bucket, path: file_path, opts: [], upload_id: nil}
|> S3.Upload.initialize(host: host)
|> case do
{:ok, rs} ->
%Tus.File{file | upload_id: rs.upload_id, path: file_path}
err ->
{:error, err}
end
end
@doc """
Add data to an already started [Multipart Upload](http://docs.aws.amazon.com/AmazonS3/latest/dev/uploadobjusingmpu.html)
(identified by `file.upload_id`).
Amazon restrict the minimum size of a single part (except the last one) to
at least 5MB. If the data is smaller than that, this function returns `:too_small`.
That limit can be customized with the config option `s3_min_part_size`.
"""
def append(file, config, body) do
part_size = byte_size(body)
if part_too_small?(file, config, part_size) do
:too_small
else
append_data(file, config, body, part_size)
end
end
defp append_data(file, config, body, part_size) do
part_id = div(file.offset, min_part_size(config)) + 1
config.s3_bucket
|> S3.upload_part(file.path, file.upload_id, part_id, body, "Content-Length": part_size)
|> ExAws.request(host: host(config))
|> case do
{:ok, %{headers: headers}} ->
{_, etag} = Enum.find(headers, fn {k, _v} -> String.downcase(k) == "etag" end)
file = %Tus.File{file | parts: file.parts ++ [{part_id, etag}]}
{:ok, file}
error ->
{:error, error}
end
end
@doc """
Finish a Multipart Upload
"""
def complete_upload(file, config) do
config.s3_bucket
|> ExAws.S3.complete_multipart_upload(file.path, file.upload_id, file.parts)
|> ExAws.request(host: host(config))
end
@doc """
Delete an uploaded object
"""
def delete(file, config) do
""
|> ExAws.S3.delete_object(file_path(file.uid, config))
|> ExAws.request(host: host(config))
end
end
| 26.81068 | 122 | 0.659967 |
1c612a6be9e7e8e04f666654a88ff2a6c875e06b | 1,128 | exs | Elixir | test/liblink/data/cluster/policies_test.exs | Xerpa/liblink | 7b983431c5b391bb8cf182edd9ca4937601eea35 | [
"Apache-2.0"
] | 3 | 2018-10-26T12:55:15.000Z | 2019-05-03T22:41:34.000Z | test/liblink/data/cluster/policies_test.exs | Xerpa/liblink | 7b983431c5b391bb8cf182edd9ca4937601eea35 | [
"Apache-2.0"
] | 4 | 2018-08-26T14:43:57.000Z | 2020-09-23T21:14:56.000Z | test/liblink/data/cluster/policies_test.exs | Xerpa/liblink | 7b983431c5b391bb8cf182edd9ca4937601eea35 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 (c) Xerpa
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule Liblink.Data.Cluster.PoliciesTest do
use ExUnit.Case, async: true
alias Liblink.Data.Cluster.Policies
describe "new" do
test "default" do
assert %Policies{retries: 3, send_timeout_in_ms: 1_000, recv_timeout_in_ms: 1_000} ==
Policies.new!()
assert Policies.new!() == Policies.new!([])
end
test "success" do
assert %Policies{retries: 0, send_timeout_in_ms: 1, recv_timeout_in_ms: 2} ==
Policies.new!(retries: 0, send_timeout_in_ms: 1, recv_timeout_in_ms: 2)
end
end
end
| 33.176471 | 91 | 0.712766 |
1c615c6e00839dcf226e737918d8dfb1f1237c37 | 145 | exs | Elixir | test/fixtures/multiple_tables_single_rows_single_columns.exs | bcardarella/elixir-fixtures | 2c49e0af3713bd912dc7bb639324da423d155299 | [
"MIT"
] | 139 | 2016-02-07T12:28:18.000Z | 2022-02-15T11:39:58.000Z | test/fixtures/multiple_tables_single_rows_single_columns.exs | bcardarella/elixir-fixtures | 2c49e0af3713bd912dc7bb639324da423d155299 | [
"MIT"
] | 17 | 2016-05-16T18:59:26.000Z | 2020-09-11T16:53:53.000Z | test/fixtures/multiple_tables_single_rows_single_columns.exs | bcardarella/elixir-fixtures | 2c49e0af3713bd912dc7bb639324da423d155299 | [
"MIT"
] | 10 | 2016-02-23T09:25:09.000Z | 2020-02-29T14:21:21.000Z | owners model: Owner, repo: Base do
brian do
name "Brian"
end
end
pets model: Pet, repo: Base do
boomer do
name "Boomer"
end
end
| 12.083333 | 34 | 0.648276 |
1c615f02e28728c12d92eae2744ba44308dc8702 | 71 | exs | Elixir | test/test_helper.exs | IE-bourgeois-union/ie_chat_bot | c0f155288170f558ff26bfc5cbd37f777c79a05b | [
"MIT"
] | null | null | null | test/test_helper.exs | IE-bourgeois-union/ie_chat_bot | c0f155288170f558ff26bfc5cbd37f777c79a05b | [
"MIT"
] | null | null | null | test/test_helper.exs | IE-bourgeois-union/ie_chat_bot | c0f155288170f558ff26bfc5cbd37f777c79a05b | [
"MIT"
] | null | null | null | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(IeChatBot.Repo, :manual)
| 23.666667 | 55 | 0.788732 |
1c61d586d8bcf81dec94fa4bdbf880833211ce07 | 184 | exs | Elixir | test/controllers/page_controller_test.exs | smoynes/get_a_job_ex | 13b17562f48561803ee4635342c345d28cffe27d | [
"MIT"
] | 1 | 2020-06-10T11:19:17.000Z | 2020-06-10T11:19:17.000Z | test/controllers/page_controller_test.exs | smoynes/get_a_job_ex | 13b17562f48561803ee4635342c345d28cffe27d | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | smoynes/get_a_job_ex | 13b17562f48561803ee4635342c345d28cffe27d | [
"MIT"
] | null | null | null | defmodule GetAJobEx.PageControllerTest do
use GetAJobEx.ConnCase
test "GET /" do
conn = get conn(), "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 20.444444 | 60 | 0.684783 |
1c61e7cb1e5fd520abfa0069149028f97372282a | 1,319 | exs | Elixir | test/serialization/json_serializer_test.exs | edwardzhou/commanded | f104cbf5ff3a37a6e9b637bc07ccde1d79c0725d | [
"MIT"
] | 1,220 | 2017-10-31T10:56:40.000Z | 2022-03-31T17:40:19.000Z | test/serialization/json_serializer_test.exs | edwardzhou/commanded | f104cbf5ff3a37a6e9b637bc07ccde1d79c0725d | [
"MIT"
] | 294 | 2017-11-03T10:33:41.000Z | 2022-03-24T08:36:42.000Z | test/serialization/json_serializer_test.exs | edwardzhou/commanded | f104cbf5ff3a37a6e9b637bc07ccde1d79c0725d | [
"MIT"
] | 208 | 2017-11-03T10:56:47.000Z | 2022-03-14T05:49:38.000Z | defmodule Commanded.Serialization.JsonSerializerTest do
use ExUnit.Case
alias Commanded.Serialization.JsonSerializer
alias Commanded.ExampleDomain.BankAccount.Events.BankAccountOpened
@serialized_event_json "{\"account_number\":\"ACC123\",\"initial_balance\":1000}"
test "should serialize event to JSON" do
account_opened = %BankAccountOpened{account_number: "ACC123", initial_balance: 1_000}
assert JsonSerializer.serialize(account_opened) == @serialized_event_json
end
test "should deserialize event from JSON" do
account_opened = %BankAccountOpened{account_number: "ACC123", initial_balance: 1_000}
type = Atom.to_string(account_opened.__struct__)
assert JsonSerializer.deserialize(@serialized_event_json, type: type) == account_opened
end
defmodule NamedEvent do
defstruct [:data]
end
defmodule AnotherNamedEvent do
defstruct [:data]
end
test "should deserialize to event type module name" do
assert %NamedEvent{data: "data"} ==
JsonSerializer.deserialize("{\"data\": \"data\"}",
type: "#{__MODULE__}.NamedEvent"
)
assert %AnotherNamedEvent{data: "data"} ==
JsonSerializer.deserialize("{\"data\": \"data\"}",
type: "#{__MODULE__}.AnotherNamedEvent"
)
end
end
| 31.404762 | 91 | 0.703563 |
1c61f189c8a9b6e031258d1df89be71bb2b181f1 | 603 | ex | Elixir | Microsoft.Azure.Management.Subscription/lib/microsoft/azure/management/subscription/model/tenant_id_description.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Subscription/lib/microsoft/azure/management/subscription/model/tenant_id_description.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Subscription/lib/microsoft/azure/management/subscription/model/tenant_id_description.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Subscription.Model.TenantIdDescription do
@moduledoc """
Tenant Id information.
"""
@derive [Poison.Encoder]
defstruct [
:"id",
:"tenantId"
]
@type t :: %__MODULE__{
:"id" => String.t,
:"tenantId" => String.t
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.Subscription.Model.TenantIdDescription do
def decode(value, _options) do
value
end
end
| 21.535714 | 97 | 0.704809 |
1c621a5dff4f8125d43aedfd98c3c2f67be11dd3 | 16,915 | ex | Elixir | lib/livebook/runtime/erl_dist/runtime_server.ex | naritomo08/livebook | baccc964dbffe86974cc06a1ebc7ce2a7ca381df | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime/erl_dist/runtime_server.ex | naritomo08/livebook | baccc964dbffe86974cc06a1ebc7ce2a7ca381df | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime/erl_dist/runtime_server.ex | naritomo08/livebook | baccc964dbffe86974cc06a1ebc7ce2a7ca381df | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Runtime.ErlDist.RuntimeServer do
@moduledoc false
# A server process backing a specific runtime.
#
# This process handles `Livebook.Runtime` operations,
# like evaluation and completion. It spawns/terminates
# individual evaluators corresponding to evaluation
# containers as necessary.
#
# Every runtime server must have an owner process,
# to which the server lifetime is bound.
#
# For more specification see `Livebook.Runtime`.
use GenServer, restart: :temporary
require Logger
alias Livebook.Runtime.Evaluator
alias Livebook.Runtime
alias Livebook.Runtime.ErlDist
@await_owner_timeout 5_000
@memory_usage_interval 15_000
@doc """
Starts the manager.
Note: make sure to call `attach` within #{@await_owner_timeout}ms
or the runtime server assumes it's not needed and terminates.
## Options
* `:smart_cell_definitions_module` - the module to read smart
cell definitions from, it needs to export a `definitions/0`
function. Defaults to `Kino.SmartCell`
"""
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts)
end
@doc """
Sets the owner process.
The owner process is monitored and as soon as it terminates,
the server also terminates. All the evaluation results are
send directly to the owner.
## Options
See `Livebook.Runtime.connect/2` for the list of available
options.
"""
@spec attach(pid(), pid(), keyword()) :: :ok
def attach(pid, owner, opts \\ []) do
GenServer.cast(pid, {:attach, owner, opts})
end
@doc """
Evaluates the given code using an `Livebook.Runtime.Evaluator`
process belonging to the given container and instructs
it to send all the outputs to the owner process.
If no evaluator exists for the given container, a new
one is started.
See `Livebook.Runtime.Evaluator` for more details.
"""
@spec evaluate_code(pid(), String.t(), Runtime.locator(), Runtime.locator(), keyword()) :: :ok
def evaluate_code(pid, code, locator, base_locator, opts \\ []) do
GenServer.cast(pid, {:evaluate_code, code, locator, base_locator, opts})
end
@doc """
Removes the specified evaluation from the history.
See `Livebook.Runtime.Evaluator` for more details.
"""
@spec forget_evaluation(pid(), Runtime.locator()) :: :ok
def forget_evaluation(pid, locator) do
GenServer.cast(pid, {:forget_evaluation, locator})
end
@doc """
Terminates the `Livebook.Runtime.Evaluator` process that belongs
to the given container.
"""
@spec drop_container(pid(), Runtime.container_ref()) :: :ok
def drop_container(pid, container_ref) do
GenServer.cast(pid, {:drop_container, container_ref})
end
@doc """
Asynchronously sends an intellisense request to the server.
Completions are forwarded to `Livebook.Runtime.Evaluator` process
that belongs to the given container. If there's no evaluator,
there's also no binding and environment, so a generic
completion is handled by a temporary process.
See `Livebook.Runtime` for more details.
"""
@spec handle_intellisense(
pid(),
pid(),
reference(),
Runtime.intellisense_request(),
Runtime.locator()
) :: :ok
def handle_intellisense(pid, send_to, ref, request, base_locator) do
GenServer.cast(pid, {:handle_intellisense, send_to, ref, request, base_locator})
end
@doc """
Reads file at the given absolute path within the runtime
file system.
"""
@spec read_file(pid(), String.t()) :: {:ok, binary()} | {:error, String.t()}
def read_file(pid, path) do
{result_ref, task_pid} = GenServer.call(pid, {:read_file, path})
monitor_ref = Process.monitor(task_pid)
receive do
{:result, ^result_ref, result} ->
result
{:DOWN, ^monitor_ref, :process, _object, _reason} ->
{:error, "unexpected termination"}
end
end
@doc """
Starts a new smart cell.
"""
@spec start_smart_cell(
pid(),
String.t(),
Runtime.smart_cell_ref(),
Runtime.smart_cell_attrs(),
Runtime.locator()
) :: :ok
def start_smart_cell(pid, kind, ref, attrs, base_locator) do
GenServer.cast(pid, {:start_smart_cell, kind, ref, attrs, base_locator})
end
@doc """
Updates the locator with smart cell context.
"""
@spec set_smart_cell_base_locator(pid(), Runtime.smart_cell_ref(), Runtime.locator()) :: :ok
def set_smart_cell_base_locator(pid, ref, base_locator) do
GenServer.cast(pid, {:set_smart_cell_base_locator, ref, base_locator})
end
@doc """
Stops the given smart cell.
"""
@spec stop_smart_cell(pid(), String.t()) :: :ok
def stop_smart_cell(pid, ref) do
GenServer.cast(pid, {:stop_smart_cell, ref})
end
@doc """
Stops the manager.
This results in all Livebook-related modules being unloaded
from the runtime node.
"""
@spec stop(pid()) :: :ok
def stop(pid) do
GenServer.stop(pid)
end
@impl true
def init(opts) do
Process.send_after(self(), :check_owner, @await_owner_timeout)
schedule_memory_usage_report()
{:ok, evaluator_supervisor} = ErlDist.EvaluatorSupervisor.start_link()
{:ok, task_supervisor} = Task.Supervisor.start_link()
{:ok, object_tracker} = Livebook.Runtime.Evaluator.ObjectTracker.start_link()
{:ok,
%{
owner: nil,
runtime_broadcast_to: nil,
evaluators: %{},
evaluator_supervisor: evaluator_supervisor,
task_supervisor: task_supervisor,
object_tracker: object_tracker,
smart_cell_supervisor: nil,
smart_cell_gl: nil,
smart_cells: %{},
smart_cell_definitions: [],
smart_cell_definitions_module:
Keyword.get(opts, :smart_cell_definitions_module, Kino.SmartCell),
memory_timer_ref: nil
}}
end
@impl true
def handle_info(:check_owner, state) do
# If not owner has been set within @await_owner_timeout
# from the start, terminate the process.
if state.owner do
{:noreply, state}
else
{:stop, :no_owner, state}
end
end
def handle_info({:DOWN, _, :process, owner, _}, %{owner: owner} = state) do
{:stop, :shutdown, state}
end
def handle_info({:DOWN, _, :process, _, _} = message, state) do
{:noreply,
state
|> handle_down_evaluator(message)
|> handle_down_scan_binding(message)}
end
def handle_info({:evaluation_finished, locator}, state) do
{:noreply,
state
|> report_smart_cell_definitions()
|> scan_binding_after_evaluation(locator)}
end
def handle_info(:memory_usage, state) do
report_memory_usage(state)
schedule_memory_usage_report()
{:noreply, state}
end
def handle_info({:scan_binding_ack, ref}, state) do
{:noreply, finish_scan_binding(ref, state)}
end
def handle_info(_message, state), do: {:noreply, state}
defp handle_down_evaluator(state, {:DOWN, _, :process, pid, reason}) do
state.evaluators
|> Enum.find(fn {_container_ref, evaluator} -> evaluator.pid == pid end)
|> case do
{container_ref, _} ->
message = Exception.format_exit(reason)
send(state.owner, {:runtime_container_down, container_ref, message})
%{state | evaluators: Map.delete(state.evaluators, container_ref)}
nil ->
state
end
end
defp handle_down_scan_binding(state, {:DOWN, monitor_ref, :process, _, _}) do
Enum.find_value(state.smart_cells, fn
{ref, %{scan_binding_monitor_ref: ^monitor_ref}} -> ref
_ -> nil
end)
|> case do
nil -> state
ref -> finish_scan_binding(ref, state)
end
end
@impl true
def handle_cast({:attach, owner, opts}, state) do
if state.owner do
raise "runtime owner has already been configured"
end
Process.monitor(owner)
state = %{state | owner: owner, runtime_broadcast_to: opts[:runtime_broadcast_to]}
state = report_smart_cell_definitions(state)
report_memory_usage(state)
{:ok, smart_cell_supervisor} = DynamicSupervisor.start_link(strategy: :one_for_one)
{:ok, smart_cell_gl} = ErlDist.SmartCellGL.start_link(state.runtime_broadcast_to)
Process.group_leader(smart_cell_supervisor, smart_cell_gl)
{:noreply,
%{state | smart_cell_supervisor: smart_cell_supervisor, smart_cell_gl: smart_cell_gl}}
end
def handle_cast(
{:evaluate_code, code, {container_ref, evaluation_ref} = locator, base_locator, opts},
state
) do
state = ensure_evaluator(state, container_ref)
base_evaluation_ref =
case base_locator do
{^container_ref, evaluation_ref} ->
evaluation_ref
{parent_container_ref, evaluation_ref} ->
Evaluator.initialize_from(
state.evaluators[container_ref],
state.evaluators[parent_container_ref],
evaluation_ref
)
nil
end
{smart_cell_ref, opts} = Keyword.pop(opts, :smart_cell_ref)
smart_cell_info = smart_cell_ref && state.smart_cells[smart_cell_ref]
myself = self()
opts =
Keyword.put(opts, :on_finish, fn result ->
with %{scan_eval_result: scan_eval_result} when scan_eval_result != nil <- smart_cell_info do
try do
smart_cell_info.scan_eval_result.(smart_cell_info.pid, result)
rescue
error -> Logger.error("scanning evaluation result raised an error: #{inspect(error)}")
end
end
send(myself, {:evaluation_finished, locator})
end)
Evaluator.evaluate_code(
state.evaluators[container_ref],
code,
evaluation_ref,
base_evaluation_ref,
opts
)
{:noreply, state}
end
def handle_cast({:forget_evaluation, {container_ref, evaluation_ref}}, state) do
with {:ok, evaluator} <- Map.fetch(state.evaluators, container_ref) do
Evaluator.forget_evaluation(evaluator, evaluation_ref)
end
{:noreply, state}
end
def handle_cast({:drop_container, container_ref}, state) do
state = discard_evaluator(state, container_ref)
{:noreply, state}
end
def handle_cast({:handle_intellisense, send_to, ref, request, base_locator}, state) do
{container_ref, evaluation_ref} = base_locator
evaluator = state.evaluators[container_ref]
intellisense_context =
if evaluator == nil or elem(request, 0) in [:format] do
Evaluator.intellisense_context()
else
Evaluator.intellisense_context(evaluator, evaluation_ref)
end
Task.Supervisor.start_child(state.task_supervisor, fn ->
response = Livebook.Intellisense.handle_request(request, intellisense_context)
send(send_to, {:runtime_intellisense_response, ref, request, response})
end)
{:noreply, state}
end
def handle_cast({:start_smart_cell, kind, ref, attrs, base_locator}, state) do
definition = Enum.find(state.smart_cell_definitions, &(&1.kind == kind))
state =
case DynamicSupervisor.start_child(
state.smart_cell_supervisor,
{definition.module, %{ref: ref, attrs: attrs, target_pid: state.owner}}
) do
{:ok, pid, info} ->
%{
js_view: js_view,
source: source,
scan_binding: scan_binding,
scan_eval_result: scan_eval_result
} = info
send(
state.owner,
{:runtime_smart_cell_started, ref, %{js_view: js_view, source: source}}
)
info = %{
pid: pid,
scan_binding: scan_binding,
base_locator: base_locator,
scan_binding_pending: false,
scan_binding_monitor_ref: nil,
scan_eval_result: scan_eval_result
}
info = scan_binding_async(ref, info, state)
put_in(state.smart_cells[ref], info)
_ ->
state
end
{:noreply, state}
end
def handle_cast({:set_smart_cell_base_locator, ref, base_locator}, state) do
state =
update_in(state.smart_cells[ref], fn
%{base_locator: ^base_locator} = info -> info
info -> scan_binding_async(ref, %{info | base_locator: base_locator}, state)
end)
{:noreply, state}
end
def handle_cast({:stop_smart_cell, ref}, state) do
{%{pid: pid}, state} = pop_in(state.smart_cells[ref])
if pid do
DynamicSupervisor.terminate_child(state.smart_cell_supervisor, pid)
end
{:noreply, state}
end
@impl true
def handle_call({:read_file, path}, {from_pid, _}, state) do
# Delegate reading to a separate task and let the caller
# wait for the response
result_ref = make_ref()
{:ok, task_pid} =
Task.Supervisor.start_child(state.task_supervisor, fn ->
result =
case File.read(path) do
{:ok, content} -> {:ok, content}
{:error, posix} -> {:error, posix |> :file.format_error() |> List.to_string()}
end
send(from_pid, {:result, result_ref, result})
end)
{:reply, {result_ref, task_pid}, state}
end
defp ensure_evaluator(state, container_ref) do
if Map.has_key?(state.evaluators, container_ref) do
state
else
{:ok, evaluator} =
ErlDist.EvaluatorSupervisor.start_evaluator(
state.evaluator_supervisor,
send_to: state.owner,
runtime_broadcast_to: state.runtime_broadcast_to,
object_tracker: state.object_tracker
)
Process.monitor(evaluator.pid)
%{state | evaluators: Map.put(state.evaluators, container_ref, evaluator)}
end
end
defp discard_evaluator(state, container_ref) do
case Map.fetch(state.evaluators, container_ref) do
{:ok, evaluator} ->
ErlDist.EvaluatorSupervisor.terminate_evaluator(state.evaluator_supervisor, evaluator)
%{state | evaluators: Map.delete(state.evaluators, container_ref)}
:error ->
state
end
end
defp schedule_memory_usage_report() do
Process.send_after(self(), :memory_usage, @memory_usage_interval)
end
defp report_memory_usage(%{owner: nil}), do: :ok
defp report_memory_usage(state) do
send(state.owner, {:runtime_memory_usage, Evaluator.memory()})
end
defp report_smart_cell_definitions(state) do
smart_cell_definitions = get_smart_cell_definitions(state.smart_cell_definitions_module)
if smart_cell_definitions == state.smart_cell_definitions do
state
else
defs = Enum.map(smart_cell_definitions, &Map.take(&1, [:kind, :name]))
send(state.owner, {:runtime_smart_cell_definitions, defs})
%{state | smart_cell_definitions: smart_cell_definitions}
end
end
defp get_smart_cell_definitions(module) do
if Code.ensure_loaded?(module) and function_exported?(module, :definitions, 0) do
module.definitions()
else
[]
end
end
defp scan_binding_async(_ref, %{scan_binding: nil} = info, _state), do: info
# We wait for the current scanning to finish, this way we avoid
# race conditions and don't unnecessarily spam evaluators
defp scan_binding_async(_ref, %{scan_binding_monitor_ref: ref} = info, _state) when ref != nil,
do: %{info | scan_binding_pending: true}
defp scan_binding_async(ref, info, state) do
%{pid: pid, scan_binding: scan_binding} = info
myself = self()
scan_and_ack = fn binding, env ->
try do
scan_binding.(pid, binding, env)
rescue
error -> Logger.error("scanning binding raised an error: #{inspect(error)}")
end
send(myself, {:scan_binding_ack, ref})
end
{container_ref, evaluation_ref} = info.base_locator
evaluator = state.evaluators[container_ref]
worker_pid =
if evaluator do
Evaluator.peek_context(evaluator, evaluation_ref, &scan_and_ack.(&1.binding, &1.env))
evaluator.pid
else
{:ok, pid} =
Task.Supervisor.start_child(state.task_supervisor, fn ->
binding = []
# TODO: Use Code.env_for_eval and eval_quoted_with_env on Elixir v1.14+
env = :elixir.env_for_eval([])
scan_and_ack.(binding, env)
end)
pid
end
monitor_ref = Process.monitor(worker_pid)
%{info | scan_binding_pending: false, scan_binding_monitor_ref: monitor_ref}
end
defp finish_scan_binding(ref, state) do
update_in(state.smart_cells[ref], fn info ->
Process.demonitor(info.scan_binding_monitor_ref, [:flush])
info = %{info | scan_binding_monitor_ref: nil}
if info.scan_binding_pending do
scan_binding_async(ref, info, state)
else
info
end
end)
end
defp scan_binding_after_evaluation(state, locator) do
update_in(state.smart_cells, fn smart_cells ->
Map.map(smart_cells, fn
{ref, %{base_locator: ^locator} = info} -> scan_binding_async(ref, info, state)
{_, info} -> info
end)
end)
end
end
| 29.315425 | 101 | 0.667041 |
1c624b6cf9c3cd3bb6fff60458780785e605497c | 2,510 | exs | Elixir | test/integration/player_can_exit_at_any_time_test.exs | lottetreg/minesweeper | 72f85eb86b792346263ab4997a83c88873e0a04b | [
"MIT"
] | null | null | null | test/integration/player_can_exit_at_any_time_test.exs | lottetreg/minesweeper | 72f85eb86b792346263ab4997a83c88873e0a04b | [
"MIT"
] | 8 | 2019-05-20T20:53:46.000Z | 2019-06-27T13:50:24.000Z | test/integration/player_can_exit_at_any_time_test.exs | lottetreg/minesweeper | 72f85eb86b792346263ab4997a83c88873e0a04b | [
"MIT"
] | null | null | null | defmodule PlayerCanExitAtAnyTimeTest do
use ExUnit.Case
import IntegrationTestHelper
test "the player can exit by entering 'exit' when asked for the number of bombs" do
new_game_state(number_of_bombs: "exit")
|> Game.start()
assert_received {
:write,
"Enter the number of mines to place on the board (1 to 99).\n"
}
end
test "the player can exit by entering 'exit' when asked for their first move" do
new_game_state(
number_of_bombs: "1",
moves: ["exit"]
)
|> Game.start()
assert_received {
:write,
"Enter the number of mines to place on the board (1 to 99).\n"
}
assert_received {
:write,
[
" A B C D E F G H I J\n",
[
"0 | | | | | | | | | | |\n",
"1 | | | | | | | | | | |\n",
"2 | | | | | | | | | | |\n",
"3 | | | | | | | | | | |\n",
"4 | | | | | | | | | | |\n",
"5 | | | | | | | | | | |\n",
"6 | | | | | | | | | | |\n",
"7 | | | | | | | | | | |\n",
"8 | | | | | | | | | | |\n",
"9 | | | | | | | | | | |\n"
]
]
}
end
test "the player can exit by entering 'exit' when asked for a subsequent move" do
new_game_state(
number_of_bombs: "1",
bomb_locations: [{0, 0}],
moves: ["0B", "exit"]
)
|> Game.start()
assert_received {
:write,
"Enter the number of mines to place on the board (1 to 99).\n"
}
assert_received {
:write,
[
" A B C D E F G H I J\n",
[
"0 | | | | | | | | | | |\n",
"1 | | | | | | | | | | |\n",
"2 | | | | | | | | | | |\n",
"3 | | | | | | | | | | |\n",
"4 | | | | | | | | | | |\n",
"5 | | | | | | | | | | |\n",
"6 | | | | | | | | | | |\n",
"7 | | | | | | | | | | |\n",
"8 | | | | | | | | | | |\n",
"9 | | | | | | | | | | |\n"
]
]
}
assert_received {
:write,
[
" A B C D E F G H I J\n",
[
"0 | |1| | | | | | | | |\n",
"1 | | | | | | | | | | |\n",
"2 | | | | | | | | | | |\n",
"3 | | | | | | | | | | |\n",
"4 | | | | | | | | | | |\n",
"5 | | | | | | | | | | |\n",
"6 | | | | | | | | | | |\n",
"7 | | | | | | | | | | |\n",
"8 | | | | | | | | | | |\n",
"9 | | | | | | | | | | |\n"
]
]
}
end
end
| 25.1 | 85 | 0.310757 |
1c624f8363b8bd4d418e6ce2b5f2e8bee2350396 | 459 | ex | Elixir | lib/tilex/notifications/notifiers_supervisor.ex | plicjo/tilex | f3d9cba7f2ca99c75622cd1a9992508614dd455f | [
"MIT"
] | 1 | 2018-11-20T15:23:59.000Z | 2018-11-20T15:23:59.000Z | lib/tilex/notifications/notifiers_supervisor.ex | plicjo/tilex | f3d9cba7f2ca99c75622cd1a9992508614dd455f | [
"MIT"
] | 14 | 2018-10-29T09:43:13.000Z | 2019-01-18T11:06:08.000Z | lib/tilex/notifications/notifiers_supervisor.ex | plicjo/tilex | f3d9cba7f2ca99c75622cd1a9992508614dd455f | [
"MIT"
] | 1 | 2020-02-24T18:21:26.000Z | 2020-02-24T18:21:26.000Z | defmodule Tilex.Notifications.NotifiersSupervisor do
@slack_notifier Application.get_env(:tilex, :slack_notifier)
@twitter_notifier Application.get_env(:tilex, :twitter_notifier)
use Supervisor
def start_link() do
Supervisor.start_link(__MODULE__, :ok, name: __MODULE__)
end
def init(_) do
Supervisor.init(children(), strategy: :one_for_one)
end
def children do
[
@slack_notifier,
@twitter_notifier
]
end
end
| 20.863636 | 66 | 0.727669 |
1c6288636a45b73057c5d32e265961597425862d | 10,005 | ex | Elixir | deps/phoenix_pubsub/lib/phoenix/pubsub.ex | matin360/TaksoWebApp | 4dd8fef625ecc2364fe1d6e18e73c96c59d15349 | [
"MIT"
] | 2 | 2020-05-06T14:51:56.000Z | 2020-05-06T14:52:21.000Z | deps/phoenix_pubsub/lib/phoenix/pubsub.ex | rwtrecs/rocketseat-nlw5-inmana | 8ce8bc32e0bdd005c423394bb163945747b557e2 | [
"MIT"
] | 4 | 2021-03-04T13:00:52.000Z | 2021-03-12T12:42:09.000Z | deps/phoenix_pubsub/lib/phoenix/pubsub.ex | adrianomota/blog | ef3b2d2ed54f038368ead8234d76c18983caa75b | [
"MIT"
] | 1 | 2020-05-16T22:44:14.000Z | 2020-05-16T22:44:14.000Z | defmodule Phoenix.PubSub do
@moduledoc """
Realtime Publisher/Subscriber service.
## Getting started
You start Phoenix.PubSub directly in your supervision
tree:
{Phoenix.PubSub, name: :my_pubsub}
You can now use the functions in this module to subscribe
and broadcast messages:
iex> alias Phoenix.PubSub
iex> PubSub.subscribe :my_pubsub, "user:123"
:ok
iex> Process.info(self(), :messages)
{:messages, []}
iex> PubSub.broadcast :my_pubsub, "user:123", {:user_update, %{id: 123, name: "Shane"}}
:ok
iex> Process.info(self(), :messages)
{:messages, [{:user_update, %{id: 123, name: "Shane"}}]}
## Adapters
Phoenix PubSub was designed to be flexible and support
multiple backends. There are two officially supported
backends:
* `Phoenix.PubSub.PG2` - the default adapter that ships
as part of Phoenix.PubSub. It uses Distributed Elixir,
directly exchanging notifications between servers
* `Phoenix.PubSub.Redis` - uses Redis to exchange
data between servers. It requires the
`:phoenix_pubsub_redis` dependency
See `Phoenix.PubSub.Adapter` to implement a custom adapter.
## Custom dispatching
Phoenix.PubSub allows developers to perform custom dispatching
by passing a `dispatcher` module which is responsible for local
message deliveries.
The dispatcher must be available on all nodes running the PubSub
system. The `dispatch/3` function of the given module will be
invoked with the subscriptions entries, the broadcaster identifier
(either a pid or `:none`), and the message to broadcast.
You may want to use the dispatcher to perform special delivery for
certain subscriptions. This can be done by passing the :metadata
option during subscriptions. For instance, Phoenix Channels use a
custom `value` to provide "fastlaning", allowing messages broadcast
to thousands or even millions of users to be encoded once and written
directly to sockets instead of being encoded per channel.
"""
@type node_name :: atom | binary
@type t :: atom
@type topic :: binary
@type message :: term
@type dispatcher :: module
defmodule BroadcastError do
defexception [:message]
def exception(msg) do
%BroadcastError{message: "broadcast failed with #{inspect(msg)}"}
end
end
@doc """
Returns a child specification for pubsub with the given `options`.
The `:name` is required as part of `options`. The remaining options
are described below.
## Options
* `:name` - the name of the pubsub to be started
* `:adapter` - the adapter to use (defauls to `Phoenix.PubSub.PG2`)
* `:pool_size` - number of pubsub partitions to launch
(defaults to one partition for every 4 cores)
"""
@spec child_spec(keyword) :: Supervisor.child_spec()
defdelegate child_spec(options), to: Phoenix.PubSub.Supervisor
@doc """
Subscribes the caller to the PubSub adapter's topic.
* `server` - The Pid registered name of the server
* `topic` - The topic to subscribe to, for example: `"users:123"`
* `opts` - The optional list of options. See below.
## Duplicate Subscriptions
Callers should only subscribe to a given topic a single time.
Duplicate subscriptions for a Pid/topic pair are allowed and
will cause duplicate events to be sent; however, when using
`Phoenix.PubSub.unsubscribe/2`, all duplicate subscriptions
will be dropped.
## Options
* `:metadata` - provides metadata to be attached to this
subscription. The metadata can be used by custom
dispatching mechanisms. See the "Custom dispatching"
section in the module documentation
"""
@spec subscribe(t, topic, keyword) :: :ok | {:error, term}
def subscribe(pubsub, topic, opts \\ [])
when is_atom(pubsub) and is_binary(topic) and is_list(opts) do
case Registry.register(pubsub, topic, opts[:metadata]) do
{:ok, _} -> :ok
{:error, _} = error -> error
end
end
@doc """
Unsubscribes the caller from the PubSub adapter's topic.
"""
@spec unsubscribe(t, topic) :: :ok
def unsubscribe(pubsub, topic) when is_atom(pubsub) and is_binary(topic) do
Registry.unregister(pubsub, topic)
end
@doc """
Broadcasts message on given topic across the whole cluster.
* `pubsub` - The name of the pubsub system
* `topic` - The topic to broadcast to, ie: `"users:123"`
* `message` - The payload of the broadcast
A custom dispatcher may also be given as a fourth, optional argument.
See the "Custom dispatching" section in the module documentation.
"""
@spec broadcast(t, topic, message, dispatcher) :: :ok | {:error, term}
def broadcast(pubsub, topic, message, dispatcher \\ __MODULE__)
when is_atom(pubsub) and is_binary(topic) and is_atom(dispatcher) do
{:ok, {adapter, name}} = Registry.meta(pubsub, :pubsub)
with :ok <- adapter.broadcast(name, topic, message, dispatcher) do
dispatch(pubsub, :none, topic, message, dispatcher)
end
end
@doc """
Broadcasts message on given topic from the given process across the whole cluster.
* `pubsub` - The name of the pubsub system
* `topic` - The topic to broadcast to, ie: `"users:123"`
* `message` - The payload of the broadcast
A custom dispatcher may also be given as a fourth, optional argument.
See the "Custom dispatching" section in the module documentation.
"""
@spec broadcast_from(t, pid, topic, message, dispatcher) :: :ok | {:error, term}
def broadcast_from(pubsub, from, topic, message, dispatcher \\ __MODULE__)
when is_atom(pubsub) and is_pid(from) and is_binary(topic) and is_atom(dispatcher) do
{:ok, {adapter, name}} = Registry.meta(pubsub, :pubsub)
with :ok <- adapter.broadcast(name, topic, message, dispatcher) do
dispatch(pubsub, from, topic, message, dispatcher)
end
end
@doc """
Broadcasts message on given topic only for the current node.
* `pubsub` - The name of the pubsub system
* `topic` - The topic to broadcast to, ie: `"users:123"`
* `message` - The payload of the broadcast
A custom dispatcher may also be given as a fourth, optional argument.
See the "Custom dispatching" section in the module documentation.
"""
@spec local_broadcast(t, topic, message, dispatcher) :: :ok
def local_broadcast(pubsub, topic, message, dispatcher \\ __MODULE__)
when is_atom(pubsub) and is_binary(topic) and is_atom(dispatcher) do
dispatch(pubsub, :none, topic, message, dispatcher)
end
@doc """
Broadcasts message on given topic from a given process only for the current node.
* `pubsub` - The name of the pubsub system
* `topic` - The topic to broadcast to, ie: `"users:123"`
* `message` - The payload of the broadcast
A custom dispatcher may also be given as a fifth, optional argument.
See the "Custom dispatching" section in the module documentation.
"""
@spec local_broadcast_from(t, pid, topic, message, dispatcher) :: :ok
def local_broadcast_from(pubsub, from, topic, message, dispatcher \\ __MODULE__)
when is_atom(pubsub) and is_pid(from) and is_binary(topic) and is_atom(dispatcher) do
dispatch(pubsub, from, topic, message, dispatcher)
end
@doc """
Broadcasts message on given topic to a given node.
* `node_name` - The target node name
* `pubsub` - The name of the pubsub system
* `topic` - The topic to broadcast to, ie: `"users:123"`
* `message` - The payload of the broadcast
**DO NOT** use this function if you wish to broadcast to the current
node, as it is always serialized, use `local_broadcast/4` instead.
A custom dispatcher may also be given as a fifth, optional argument.
See the "Custom dispatching" section in the module documentation.
"""
@spec direct_broadcast(t, topic, message, dispatcher) :: :ok | {:error, term}
def direct_broadcast(node_name, pubsub, topic, message, dispatcher \\ __MODULE__)
when is_atom(pubsub) and is_binary(topic) and is_atom(dispatcher) do
{:ok, {adapter, name}} = Registry.meta(pubsub, :pubsub)
adapter.direct_broadcast(name, node_name, topic, message, dispatcher)
end
@doc """
Raising version of `broadcast!/4`.
"""
@spec broadcast!(t, topic, message, dispatcher) :: :ok
def broadcast!(pubsub, topic, message, dispatcher \\ __MODULE__) do
case broadcast(pubsub, topic, message, dispatcher) do
:ok -> :ok
{:error, error} -> raise BroadcastError, "broadcast failed: #{inspect(error)}"
end
end
@doc """
Raising version of `broadcast_from!/5`.
"""
@spec broadcast_from!(t, pid, topic, message, dispatcher) :: :ok
def broadcast_from!(pubsub, from, topic, message, dispatcher \\ __MODULE__) do
case broadcast_from(pubsub, from, topic, message, dispatcher) do
:ok -> :ok
{:error, error} -> raise BroadcastError, "broadcast failed: #{inspect(error)}"
end
end
@doc """
Raising version of `direct_broadcast!/5`.
"""
@spec direct_broadcast!(node_name, t, topic, message, dispatcher) :: :ok
def direct_broadcast!(node_name, pubsub, topic, message, dispatcher \\ __MODULE__) do
case direct_broadcast(node_name, pubsub, topic, message, dispatcher) do
:ok -> :ok
{:error, error} -> raise BroadcastError, "broadcast failed: #{inspect(error)}"
end
end
@doc """
Returns the node name of the PubSub server.
"""
@spec node_name(t) :: node_name
def node_name(pubsub) do
{:ok, {adapter, name}} = Registry.meta(pubsub, :pubsub)
adapter.node_name(name)
end
## Dispatch callback
@doc false
def dispatch(entries, :none, message) do
for {pid, _} <- entries do
send(pid, message)
end
:ok
end
def dispatch(entries, from, message) do
for {pid, _} <- entries, pid != from do
send(pid, message)
end
:ok
end
defp dispatch(pubsub, from, topic, message, dispatcher) do
Registry.dispatch(pubsub, topic, {dispatcher, :dispatch, [from, message]})
:ok
end
end
| 34.5 | 93 | 0.692454 |
1c628ccfea668d880c72d1a617b0df4726f492fd | 1,691 | exs | Elixir | apps/omg_db/test/omg_db/release_tasks/set_key_value_db_test.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_db/test/omg_db/release_tasks/set_key_value_db_test.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_db/test/omg_db/release_tasks/set_key_value_db_test.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019-2020 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.DB.ReleaseTasks.SetKeyValueDBTest do
use ExUnit.Case, async: true
import ExUnit.CaptureLog, only: [capture_log: 1]
alias OMG.DB.ReleaseTasks.SetKeyValueDB
@app :omg_db
setup do
_ = Application.ensure_all_started(:logger)
on_exit(fn ->
:ok = System.delete_env("DB_PATH")
end)
:ok
end
test "if environment variables get applied in the configuration" do
test_path = "/tmp/YOLO/"
release = :watcher_info
:ok = System.put_env("DB_PATH", test_path)
capture_log(fn ->
config = SetKeyValueDB.load([], release: release)
path = config |> Keyword.fetch!(@app) |> Keyword.fetch!(:path)
assert path == test_path <> "#{release}"
end)
end
test "if default configuration is used when there's no environment variables" do
:ok = System.delete_env("DB_PATH")
capture_log(fn ->
config = SetKeyValueDB.load([], release: :watcher_info)
path = config |> Keyword.fetch!(@app) |> Keyword.fetch!(:path)
assert path == Path.join([System.get_env("HOME"), ".omg/data"]) <> "/watcher_info"
end)
end
end
| 30.745455 | 88 | 0.694855 |
1c629165eb6ba73d06d987079a71ec338b0f211c | 2,769 | exs | Elixir | test/ex_machina_test.exs | cnsa/ex_machina | 8a55155df735b2d0c143676f451a7a1ee191dc85 | [
"MIT"
] | null | null | null | test/ex_machina_test.exs | cnsa/ex_machina | 8a55155df735b2d0c143676f451a7a1ee191dc85 | [
"MIT"
] | null | null | null | test/ex_machina_test.exs | cnsa/ex_machina | 8a55155df735b2d0c143676f451a7a1ee191dc85 | [
"MIT"
] | null | null | null | defmodule ExMachinaTest do
use ExUnit.Case
defmodule Factory do
use ExMachina
def user_factory do
%{
id: 3,
name: "John Doe",
admin: false
}
end
def email_factory do
%{
email: sequence(:email, &"me-#{&1}@foo.com")
}
end
def article_factory do
%{
title: sequence("Post Title")
}
end
def struct_factory do
%{
__struct__: Foo.Bar
}
end
end
test "sequence/2 sequences a value" do
assert "me-0@foo.com" == Factory.build(:email).email
assert "me-1@foo.com" == Factory.build(:email).email
end
test "sequence/1 shortcut for creating sequences" do
assert "Post Title0" == Factory.build(:article).title
assert "Post Title1" == Factory.build(:article).title
end
test "raises a helpful error if the factory is not defined" do
assert_raise ExMachina.UndefinedFactoryError, fn ->
Factory.build(:foo)
end
end
test "build/2 returns the matching factory" do
assert Factory.build(:user) == %{
id: 3,
name: "John Doe",
admin: false
}
end
test "build/2 merges passed in options as keyword list" do
assert Factory.build(:user, admin: true) == %{
id: 3,
name: "John Doe",
admin: true
}
end
test "build/2 merges passed in options as a map" do
assert Factory.build(:user, %{admin: true}) == %{
id: 3,
name: "John Doe",
admin: true
}
end
test "build/2 raises if passing invalid keys to a struct factory" do
assert_raise KeyError, fn ->
Factory.build(:struct, doesnt_exist: true)
end
end
test "build_pair/2 builds 2 factories" do
records = Factory.build_pair(:user, admin: true)
expected_record = %{
id: 3,
name: "John Doe",
admin: true
}
assert records == [expected_record, expected_record]
end
test "build_list/3 builds the factory the passed in number of times" do
records = Factory.build_list(3, :user, admin: true)
expected_record = %{
id: 3,
name: "John Doe",
admin: true
}
assert records == [expected_record, expected_record, expected_record]
end
test "raises helpful error when using old create functions" do
assert_raise RuntimeError, ~r/create\/1 has been removed/, fn ->
Factory.create(:user)
end
assert_raise RuntimeError, ~r/create\/2 has been removed/, fn ->
Factory.create(:user, admin: true)
end
assert_raise RuntimeError, ~r/create_pair\/2 has been removed/, fn ->
Factory.create_pair(:user, admin: true)
end
assert_raise RuntimeError, ~r/create_list\/3 has been removed/, fn ->
Factory.create_list(3, :user, admin: true)
end
end
end
| 23.075 | 73 | 0.620441 |
1c62e2d4f3e7e4d4fa707ecb3942a43c177e8a61 | 6,711 | exs | Elixir | lib/elixir/test/elixir/uri_test.exs | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/uri_test.exs | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/uri_test.exs | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule URITest do
use ExUnit.Case, async: true
test :encode_with_binary do
raw = <<13,10,38,60,62,34,32,227,130,134,227,130,147,227,130,134,227,130,147>>
expected = "%0D%0A%26%3C%3E%22+%E3%82%86%E3%82%93%E3%82%86%E3%82%93"
assert URI.encode(raw) == expected
end
test :encode_query do
assert URI.encode_query([{:foo, :bar}, {:baz, :quux}]) == "foo=bar&baz=quux"
assert URI.encode_query([{"foo", "bar"}, {"baz", "quux"}]) == "foo=bar&baz=quux"
assert URI.encode_query([{'foo', 'bar'}, {'baz', 'quux'}]) == "foo=bar&baz=quux"
end
test :encode_query_mixed do
assert URI.encode_query([{"foo", :bar}]) == "foo=bar"
assert URI.encode_query([{"foo", 'bar'}]) == "foo=bar"
assert URI.encode_query([{:foo, "bar"}]) == "foo=bar"
assert URI.encode_query([{:foo, 'bar'}]) == "foo=bar"
end
test :decode_query do
assert HashDict.equal?(URI.decode_query("q=search%20query&cookie=ab%26cd&block%20buster="),
HashDict.new [{"block buster", ""}, {"cookie", "ab&cd"}, {"q", "search query"}])
assert HashDict.equal?(URI.decode_query(""), HashDict.new)
assert HashDict.equal?(URI.decode_query("something=weird%3Dhappening"), HashDict.new [{"something", "weird=happening"}])
assert URI.decode_query("", []) == []
assert HashDict.equal?(URI.decode_query("garbage"), HashDict.new [{"garbage", nil}])
assert HashDict.equal?(URI.decode_query("=value"), HashDict.new [{"", "value"}])
assert HashDict.equal?(URI.decode_query("something=weird=happening"), HashDict.new [{"something", "weird=happening"}])
end
test :decoder do
decoder = URI.query_decoder("q=search%20query&cookie=ab%26cd&block%20buster=")
expected = [{"q", "search query"}, {"cookie", "ab&cd"}, {"block buster", ""}]
assert Enum.map(decoder, fn(x) -> x end) == expected
end
test :decode do
data_to_be_decoded = "%26%3C%3E%22+%E3%82%86%E3%82%93%E3%82%86%E3%82%93"
assert URI.decode(data_to_be_decoded) == "&<>\" ゆんゆん"
end
test :parse_http do
assert URI.Info[scheme: "http", host: "foo.com", path: "/path/to/something",
query: "foo=bar&bar=foo", fragment: "fragment", port: 80,
authority: "foo.com", userinfo: nil] ==
URI.parse("http://foo.com/path/to/something?foo=bar&bar=foo#fragment")
end
test :parse_https do
assert URI.Info[scheme: "https", host: "foo.com", authority: "foo.com",
query: nil, fragment: nil, port: 443, path: nil, userinfo: nil] ==
URI.parse("https://foo.com")
end
test :parse_file do
assert URI.Info[scheme: "file", host: nil, path: "/foo/bar/baz", userinfo: nil,
query: nil, fragment: nil, port: nil, authority: nil] ==
URI.parse("file:///foo/bar/baz")
end
test :parse_ftp do
assert URI.Info[scheme: "ftp", host: "private.ftp-servers.example.com",
userinfo: "user001:secretpassword", authority: "user001:secretpassword@private.ftp-servers.example.com",
path: "/mydirectory/myfile.txt", query: nil, fragment: nil,
port: 21] ==
URI.parse("ftp://user001:secretpassword@private.ftp-servers.example.com/mydirectory/myfile.txt")
end
test :parse_sftp do
assert URI.Info[scheme: "sftp", host: "private.ftp-servers.example.com",
userinfo: "user001:secretpassword", authority: "user001:secretpassword@private.ftp-servers.example.com",
path: "/mydirectory/myfile.txt", query: nil, fragment: nil,
port: 22] ==
URI.parse("sftp://user001:secretpassword@private.ftp-servers.example.com/mydirectory/myfile.txt")
end
test :parse_tftp do
assert URI.Info[scheme: "tftp", host: "private.ftp-servers.example.com",
userinfo: "user001:secretpassword", authority: "user001:secretpassword@private.ftp-servers.example.com",
path: "/mydirectory/myfile.txt", query: nil, fragment: nil, port: 69] ==
URI.parse("tftp://user001:secretpassword@private.ftp-servers.example.com/mydirectory/myfile.txt")
end
test :parse_ldap do
assert URI.Info[scheme: "ldap", host: nil, authority: nil, userinfo: nil,
path: "/dc=example,dc=com", query: "?sub?(givenName=John)",
fragment: nil, port: 389] ==
URI.parse("ldap:///dc=example,dc=com??sub?(givenName=John)")
assert URI.Info[scheme: "ldap", host: "ldap.example.com", authority: "ldap.example.com",
userinfo: nil, path: "/cn=John%20Doe,dc=example,dc=com", fragment: nil,
port: 389, query: nil] ==
URI.parse("ldap://ldap.example.com/cn=John%20Doe,dc=example,dc=com")
end
test :parse_splits_authority do
assert URI.Info[scheme: "http", host: "foo.com", path: nil,
query: nil, fragment: nil, port: 4444,
authority: "foo:bar@foo.com:4444",
userinfo: "foo:bar"] ==
URI.parse("http://foo:bar@foo.com:4444")
assert URI.Info[scheme: "https", host: "foo.com", path: nil,
query: nil, fragment: nil, port: 443,
authority: "foo:bar@foo.com", userinfo: "foo:bar"] ==
URI.parse("https://foo:bar@foo.com")
assert URI.Info[scheme: "http", host: "foo.com", path: nil,
query: nil, fragment: nil, port: 4444,
authority: "foo.com:4444",
userinfo: nil] ==
URI.parse("http://foo.com:4444")
end
test :parse_bad_uris do
assert URI.parse("https:??@?F?@#>F//23/")
assert URI.parse("")
assert URI.parse(":https")
assert URI.parse("https")
end
test :downcase_properly do
assert URI.parse("hTtP://google.com").scheme == "http"
assert URI.parse("http://GoOgLe.CoM").host == "google.com"
assert URI.parse("http://LOL:wut@GoOgLe.CoM").authority == "LOL:wut@google.com"
end
test :to_binary do
assert to_binary(URI.parse("http://google.com")) == "http://google.com"
assert to_binary(URI.parse("http://google.com:443")) == "http://google.com:443"
assert to_binary(URI.parse("https://google.com:443")) == "https://google.com"
assert to_binary(URI.parse("http://lol:wut@google.com")) == "http://lol:wut@google.com"
assert to_binary(URI.parse("http://google.com/elixir")) == "http://google.com/elixir"
assert to_binary(URI.parse("http://google.com?q=lol")) == "http://google.com?q=lol"
assert to_binary(URI.parse("http://google.com?q=lol#omg")) == "http://google.com?q=lol#omg"
end
end
| 46.93007 | 124 | 0.605722 |
1c62ff2c5b7ce5835304f40484eac46f81325ed4 | 13,391 | ex | Elixir | lib/plug/crypto/message_encryptor.ex | wojtekmach/plug | e9afab6c10f173d55b13bd78c64526fb21495b39 | [
"Apache-2.0"
] | null | null | null | lib/plug/crypto/message_encryptor.ex | wojtekmach/plug | e9afab6c10f173d55b13bd78c64526fb21495b39 | [
"Apache-2.0"
] | null | null | null | lib/plug/crypto/message_encryptor.ex | wojtekmach/plug | e9afab6c10f173d55b13bd78c64526fb21495b39 | [
"Apache-2.0"
] | null | null | null | defmodule Plug.Crypto.MessageEncryptor do
@moduledoc ~S"""
`MessageEncryptor` is a simple way to encrypt values which get stored
somewhere you don't trust.
The encrypted key, initialization vector, cipher text, and cipher tag
are base64url encoded and returned to you.
This can be used in situations similar to the `MessageVerifier`, but where
you don't want users to be able to determine the value of the payload.
## Example
secret_key_base = "072d1e0157c008193fe48a670cce031faa4e..."
encrypted_cookie_salt = "encrypted cookie"
encrypted_signed_cookie_salt = "signed encrypted cookie"
secret = KeyGenerator.generate(secret_key_base, encrypted_cookie_salt)
sign_secret = KeyGenerator.generate(secret_key_base, encrypted_signed_cookie_salt)
data = "José"
encrypted = MessageEncryptor.encrypt(data, secret, sign_secret)
decrypted = MessageEncryptor.decrypt(encrypted, secret, sign_secret)
decrypted # => {:ok, "José"}
"""
# TODO: Remove deprecated API.
alias Plug.Crypto.MessageVerifier
@doc """
Encrypts a message using authenticated encryption.
"""
def encrypt(message, secret, sign_secret)
when is_binary(message) and is_binary(secret) and is_binary(sign_secret) do
aes128_gcm_encrypt(message, secret, sign_secret)
end
@doc """
Decrypts a message using authenticated encryption.
"""
def decrypt(encrypted, secret, sign_secret)
when is_binary(encrypted) and is_binary(secret) and is_binary(sign_secret) do
aes128_gcm_decrypt(encrypted, secret, sign_secret)
end
# Encrypts and authenticates a message using AES128-GCM mode.
#
# A random 128-bit content encryption key (CEK) is generated for
# every message which is then encrypted with `aes_gcm_key_wrap/3`.
defp aes128_gcm_encrypt(plain_text, secret, sign_secret) when bit_size(secret) > 256 do
aes128_gcm_encrypt(plain_text, binary_part(secret, 0, 32), sign_secret)
end
defp aes128_gcm_encrypt(plain_text, secret, sign_secret)
when is_binary(plain_text) and bit_size(secret) in [128, 192, 256] and is_binary(sign_secret) do
key = :crypto.strong_rand_bytes(16)
iv = :crypto.strong_rand_bytes(12)
aad = "A128GCM"
{cipher_text, cipher_tag} = :crypto.block_encrypt(:aes_gcm, key, iv, {aad, plain_text})
encrypted_key = aes_gcm_key_wrap(key, secret, sign_secret)
encode_token(aad, encrypted_key, iv, cipher_text, cipher_tag)
end
# Verifies and decrypts a message using AES128-GCM mode.
#
# Decryption will never be performed prior to verification.
#
# The encrypted content encryption key (CEK) is decrypted
# with `aes_gcm_key_unwrap/3`.
defp aes128_gcm_decrypt(cipher_text, secret, sign_secret) when bit_size(secret) > 256 do
aes128_gcm_decrypt(cipher_text, binary_part(secret, 0, 32), sign_secret)
end
defp aes128_gcm_decrypt(cipher_text, secret, sign_secret)
when is_binary(cipher_text) and bit_size(secret) in [128, 192, 256] and is_binary(sign_secret) do
case decode_token(cipher_text) do
{aad = "A128GCM", encrypted_key, iv, cipher_text, cipher_tag} when bit_size(iv) === 96 and bit_size(cipher_tag) === 128 ->
encrypted_key
|> aes_gcm_key_unwrap(secret, sign_secret)
|> case do
{:ok, key} ->
:crypto.block_decrypt(:aes_gcm, key, iv, {aad, cipher_text, cipher_tag})
_ ->
:error
end
|> case do
plain_text when is_binary(plain_text) ->
{:ok, plain_text}
_ ->
:error
end
_ ->
:error
end
end
# Wraps a decrypted content encryption key (CEK) with secret and
# sign_secret using AES GCM mode.
#
# See: https://tools.ietf.org/html/rfc7518#section-4.7
defp aes_gcm_key_wrap(cek, secret, sign_secret) when bit_size(secret) > 256 do
aes_gcm_key_wrap(cek, binary_part(secret, 0, 32), sign_secret)
end
defp aes_gcm_key_wrap(cek, secret, sign_secret)
when bit_size(cek) in [128, 192, 256] and bit_size(secret) in [128, 192, 256] and is_binary(sign_secret) do
iv = :crypto.strong_rand_bytes(12)
{cipher_text, cipher_tag} = :crypto.block_encrypt(:aes_gcm, secret, iv, {sign_secret, cek})
cipher_text <> cipher_tag <> iv
end
# Unwraps an encrypted content encryption key (CEK) with secret and
# sign_secret using AES GCM mode.
#
# See: https://tools.ietf.org/html/rfc7518#section-4.7
defp aes_gcm_key_unwrap(wrapped_cek, secret, sign_secret) when bit_size(secret) > 256 do
aes_gcm_key_unwrap(wrapped_cek, binary_part(secret, 0, 32), sign_secret)
end
defp aes_gcm_key_unwrap(wrapped_cek, secret, sign_secret)
when bit_size(secret) in [128, 192, 256] and is_binary(sign_secret) do
wrapped_cek
|> case do
<<cipher_text :: 128-bitstring, cipher_tag :: 128-bitstring, iv :: 96-bitstring>> ->
:crypto.block_decrypt(:aes_gcm, secret, iv, {sign_secret, cipher_text, cipher_tag})
<<cipher_text :: 192-bitstring, cipher_tag :: 128-bitstring, iv :: 96-bitstring>> ->
:crypto.block_decrypt(:aes_gcm, secret, iv, {sign_secret, cipher_text, cipher_tag})
<<cipher_text :: 256-bitstring, cipher_tag :: 128-bitstring, iv :: 96-bitstring>> ->
:crypto.block_decrypt(:aes_gcm, secret, iv, {sign_secret, cipher_text, cipher_tag})
_ ->
:error
end
|> case do
cek when bit_size(cek) in [128, 192, 256] ->
{:ok, cek}
_ ->
:error
end
end
# Pads a message using the PKCS #7 cryptographic message syntax.
#
# See: https://tools.ietf.org/html/rfc2315
# See: `pkcs7_unpad/1`
defp pkcs7_pad(message) do
bytes_remaining = rem(byte_size(message), 16)
padding_size = 16 - bytes_remaining
message <> :binary.copy(<<padding_size>>, padding_size)
end
# Unpads a message using the PKCS #7 cryptographic message syntax.
#
# See: https://tools.ietf.org/html/rfc2315
# See: `pkcs7_pad/1`
defp pkcs7_unpad(<<>>) do
:error
end
defp pkcs7_unpad(message) do
padding_size = :binary.last(message)
if padding_size <= 16 do
message_size = byte_size(message)
if binary_part(message, message_size, -padding_size) === :binary.copy(<<padding_size>>, padding_size) do
{:ok, binary_part(message, 0, message_size - padding_size)}
else
:error
end
else
:error
end
end
defp encode_token(protected, encrypted_key, iv, cipher_text, cipher_tag) do
Base.url_encode64(protected, padding: false)
|> Kernel.<>(".")
|> Kernel.<>(Base.url_encode64(encrypted_key, padding: false))
|> Kernel.<>(".")
|> Kernel.<>(Base.url_encode64(iv, padding: false))
|> Kernel.<>(".")
|> Kernel.<>(Base.url_encode64(cipher_text, padding: false))
|> Kernel.<>(".")
|> Kernel.<>(Base.url_encode64(cipher_tag, padding: false))
end
defp decode_token(token) do
with [protected, encrypted_key, iv, cipher_text, cipher_tag] <- String.split(token, ".", parts: 5),
{:ok, protected} <- Base.url_decode64(protected, padding: false),
{:ok, encrypted_key} <- Base.url_decode64(encrypted_key, padding: false),
{:ok, iv} <- Base.url_decode64(iv, padding: false),
{:ok, cipher_text} <- Base.url_decode64(cipher_text, padding: false),
{:ok, cipher_tag} <- Base.url_decode64(cipher_tag, padding: false) do
{protected, encrypted_key, iv, cipher_text, cipher_tag}
else
_ -> :error
end
end
## Deprecated API
@doc """
WARNING: This function is deprecated in favor of `encrypt/3`.
Encrypts and signs a message.
"""
def encrypt_and_sign(message, secret, sign_secret, cipher \\ nil)
when is_binary(message) and is_binary(secret) and is_binary(sign_secret) do
IO.puts :stderr, "warning: `Plug.Crypto.MessageEncryptor.encrypt_and_sign/4` is deprecated," <>
"please use `encrypt/3` instead\n" <> Exception.format_stacktrace
case cipher do
nil ->
encrypt(message, secret, sign_secret)
:aes_cbc256 ->
aes256_cbc_hmac_sha1_encrypt(message, secret, sign_secret)
_ ->
iv = :crypto.strong_rand_bytes(16)
message
|> pkcs7_pad()
|> encrypt_legacy(cipher, secret, iv)
|> Base.encode64()
|> Kernel.<>("--")
|> Kernel.<>(Base.encode64(iv))
|> MessageVerifier.sign(sign_secret)
end
end
@doc """
WARNING: This function is deprecated in favor of `decrypt/3`.
Decrypts and verifies a message.
We need to verify the message in order to avoid padding attacks.
Reference: http://www.limited-entropy.com/padding-oracle-attacks
"""
def verify_and_decrypt(encrypted, secret, sign_secret, cipher \\ nil)
when is_binary(encrypted) and is_binary(secret) and is_binary(sign_secret) do
IO.puts :stderr, "warning: `Plug.Crypto.MessageEncryptor.verify_and_decrypt/4` is deprecated," <>
"please use `decrypt/3` instead\n" <> Exception.format_stacktrace
case cipher do
nil ->
if String.contains?(encrypted, ".") do
decrypt(encrypted, secret, sign_secret)
else
verify_and_decrypt(encrypted, secret, sign_secret, :aes_cbc256)
end
:aes_cbc256 ->
aes256_cbc_hmac_sha1_decrypt(encrypted, secret, sign_secret)
_ ->
case MessageVerifier.verify(encrypted, sign_secret) do
{:ok, verified} ->
[encrypted, iv] = String.split(verified, "--")
case Base.decode64(encrypted) do
{:ok, encrypted} ->
case Base.decode64(iv) do
{:ok, iv} ->
encrypted |> decrypt_legacy(cipher, secret, iv) |> pkcs7_unpad
:error ->
:error
end
:error ->
:error
end
:error ->
:error
end
end
end
defp encode_legacy_token(sign_secret, iv, cipher_text) do
cipher_text = Base.encode64(cipher_text) <> "--" <> Base.encode64(iv)
cipher_text = Base.url_encode64(cipher_text)
cipher_tag = Base.url_encode64(:crypto.hmac(:sha, sign_secret, cipher_text))
cipher_text <> "##" <> cipher_tag
end
defp decode_legacy_token(token, sign_secret) do
split =
token
|> String.split("##", parts: 2)
|> case do
[_, _] = both -> both
_ -> String.split(token, "--", parts: 2)
end
with [cipher_text, cipher_tag] when byte_size(cipher_text) > 0 and byte_size(cipher_tag) > 0 <- split,
{:ok, cipher_tag} <- Base.url_decode64(cipher_tag),
challenge = :crypto.hmac(:sha, sign_secret, cipher_text),
true <- Plug.Crypto.secure_compare(challenge, cipher_tag),
{:ok, cipher_text} <- Base.url_decode64(cipher_text),
[cipher_text, iv] <- String.split(cipher_text, "--", parts: 2),
{:ok, cipher_text} <- Base.decode64(cipher_text),
{:ok, iv} <- Base.decode64(iv) do
{"A256CBC-HS1", "", iv, cipher_text, cipher_tag}
else
_ -> :error
end
end
defp encrypt_legacy(message, cipher, secret, iv)
when bit_size(secret) > 256,
do: encrypt_legacy(message, cipher, binary_part(secret, 0, 32), iv)
defp encrypt_legacy(message, cipher, secret, iv) do
:crypto.block_encrypt(cipher, secret, iv, message)
end
defp decrypt_legacy(encrypted, cipher, secret, iv)
when bit_size(secret) > 256,
do: decrypt_legacy(encrypted, cipher, binary_part(secret, 0, 32), iv)
defp decrypt_legacy(encrypted, cipher, secret, iv) do
:crypto.block_decrypt(cipher, secret, iv, encrypted)
end
# Encrypts and authenticates a message using AES128-CBC mode
# with HMAC-SHA-1 for the authentication code.
defp aes256_cbc_hmac_sha1_encrypt(plain_text, secret, sign_secret) when bit_size(secret) > 256 do
aes256_cbc_hmac_sha1_encrypt(plain_text, binary_part(secret, 0, 32), sign_secret)
end
defp aes256_cbc_hmac_sha1_encrypt(plain_text, secret, sign_secret)
when is_binary(plain_text) and bit_size(secret) in [128, 192, 256] and is_binary(sign_secret) do
iv = :crypto.strong_rand_bytes(16)
cipher_text = :crypto.block_encrypt(:aes_cbc256, secret, iv, pkcs7_pad(plain_text))
encode_legacy_token(sign_secret, iv, cipher_text)
end
# Verifies and decrypts a message using AES128-CBC mode
# with HMAC-SHA-1 for the authentication code.
#
# Decryption will never be performed prior to verification.
defp aes256_cbc_hmac_sha1_decrypt(cipher_text, secret, sign_secret) when bit_size(secret) > 256 do
aes256_cbc_hmac_sha1_decrypt(cipher_text, binary_part(secret, 0, 32), sign_secret)
end
defp aes256_cbc_hmac_sha1_decrypt(cipher_text, secret, sign_secret)
when is_binary(cipher_text) and bit_size(secret) === 256 and is_binary(sign_secret) do
case decode_legacy_token(cipher_text, sign_secret) do
{"A256CBC-HS1", _encrypted_key, iv, cipher_text, cipher_tag}
when bit_size(iv) === 128 and bit_size(cipher_tag) === 160 ->
key = secret
:crypto.block_decrypt(:aes_cbc256, key, iv, cipher_text)
|> case do
plain_text when is_binary(plain_text) ->
pkcs7_unpad(plain_text)
_ ->
:error
end
_ ->
:error
end
end
end
| 38.702312 | 128 | 0.666418 |
1c631481a4ccbd9e6d8960cc3530ad2a2a83b99c | 3,176 | ex | Elixir | apps/core/lib/core/cabinet/declaration_requests/declaration_requests.ex | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 8 | 2019-06-14T11:34:49.000Z | 2021-08-05T19:14:24.000Z | apps/core/lib/core/cabinet/declaration_requests/declaration_requests.ex | edenlabllc/ehealth.api.public | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 1 | 2019-07-08T15:20:22.000Z | 2019-07-08T15:20:22.000Z | apps/core/lib/core/cabinet/declaration_requests/declaration_requests.ex | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 6 | 2018-05-11T13:59:32.000Z | 2022-01-19T20:15:22.000Z | defmodule Core.Cabinet.DeclarationRequests do
@moduledoc false
import Core.API.Helpers.Connection, only: [get_consumer_id: 1]
import Ecto.Query
alias Core.Cabinet.DeclarationRequestsSearch
alias Core.DeclarationRequests.DeclarationRequest
alias Core.Persons
alias Scrivener.Page
@mithril_api Application.get_env(:core, :api_resolvers)[:mithril]
@status_expired DeclarationRequest.status(:expired)
@read_repo Application.get_env(:core, :repos)[:read_repo]
def search(search_params, headers) do
user_id = get_consumer_id(headers)
with {:ok, %{"data" => user}} <- @mithril_api.get_user_by_id(user_id, headers),
{:ok, person} <- Persons.get_by_id(user["person_id"]),
:ok <- validate_user_person(user, person),
:ok <- check_user_blocked(user["is_blocked"]),
%Ecto.Changeset{valid?: true} <- DeclarationRequestsSearch.changeset(search_params),
%Page{} = paging <- get_person_declaration_requests(search_params, person.id) do
{:ok, paging}
end
end
def get_by_id(id, headers) do
user_id = get_consumer_id(headers)
with {:ok, %{"data" => user}} <- @mithril_api.get_user_by_id(user_id, headers),
{:ok, person} <- Persons.get_by_id(user["person_id"]),
:ok <- validate_user_person(user, person),
:ok <- check_user_blocked(user["is_blocked"]),
%DeclarationRequest{} = declaration_request <- @read_repo.get(DeclarationRequest, id),
:ok <- validate_person_id(declaration_request, person.id) do
{:ok, declaration_request}
end
end
defp validate_user_person(user, person) do
if user["person_id"] == person.id and user["tax_id"] == person.tax_id do
:ok
else
{:error, {:access_denied, "Person not found"}}
end
end
defp check_user_blocked(false), do: :ok
defp check_user_blocked(true), do: {:error, :access_denied}
defp get_person_declaration_requests(%{"status" => @status_expired} = params, _) do
%Page{
entries: [],
page_number: 1,
page_size: Map.get(params, "page_size", 50),
total_entries: 0,
total_pages: 1
}
end
defp get_person_declaration_requests(params, person_id) do
DeclarationRequest
|> order_by([dr], desc: :inserted_at)
|> filter_by_person_id(person_id)
|> filter_by_status(params)
|> filter_by_start_year(params)
|> @read_repo.paginate(params)
end
defp filter_by_person_id(query, person_id) when is_binary(person_id) do
where(query, [r], r.mpi_id == ^person_id)
end
defp filter_by_person_id(query, _), do: query
defp filter_by_status(query, %{"status" => status}) when is_binary(status) do
where(query, [r], r.status == ^status)
end
defp filter_by_status(query, _) do
where(query, [r], r.status != ^@status_expired)
end
defp filter_by_start_year(query, %{"start_year" => start_year}) when is_binary(start_year) do
where(query, [r], r.data_start_date_year == ^start_year)
end
defp filter_by_start_year(query, _), do: query
defp validate_person_id(%DeclarationRequest{mpi_id: person_id}, person_id), do: :ok
defp validate_person_id(_, _), do: {:error, :forbidden}
end
| 33.083333 | 95 | 0.690806 |
1c63a0524cacac12ef6ac5db528b37378e3fb551 | 42,407 | ex | Elixir | lib/phoenix/endpoint.ex | Douvi/phoenix | 7832e45e056a48f9dd4bd8178d6c6b0569f19194 | [
"MIT"
] | null | null | null | lib/phoenix/endpoint.ex | Douvi/phoenix | 7832e45e056a48f9dd4bd8178d6c6b0569f19194 | [
"MIT"
] | null | null | null | lib/phoenix/endpoint.ex | Douvi/phoenix | 7832e45e056a48f9dd4bd8178d6c6b0569f19194 | [
"MIT"
] | null | null | null | defmodule Phoenix.Endpoint do
@moduledoc ~S"""
Defines a Phoenix endpoint.
The endpoint is the boundary where all requests to your
web application start. It is also the interface your
application provides to the underlying web servers.
Overall, an endpoint has three responsibilities:
* to provide a wrapper for starting and stopping the
endpoint as part of a supervision tree
* to define an initial plug pipeline for requests
to pass through
* to host web specific configuration for your
application
## Endpoints
An endpoint is simply a module defined with the help
of `Phoenix.Endpoint`. If you have used the `mix phx.new`
generator, an endpoint was automatically generated as
part of your application:
defmodule YourApp.Endpoint do
use Phoenix.Endpoint, otp_app: :your_app
# plug ...
# plug ...
plug YourApp.Router
end
Endpoints must be explicitly started as part of your application
supervision tree. Endpoints are added by default
to the supervision tree in generated applications. Endpoints can be
added to the supervision tree as follows:
children = [
YourApp.Endpoint
]
### Endpoint configuration
All endpoints are configured in your application environment.
For example:
config :your_app, YourApp.Endpoint,
secret_key_base: "kjoy3o1zeidquwy1398juxzldjlksahdk3"
Endpoint configuration is split into two categories. Compile-time
configuration means the configuration is read during compilation
and changing it at runtime has no effect. The compile-time
configuration is mostly related to error handling.
Runtime configuration, instead, is accessed during or
after your application is started and can be read through the
`c:config/2` function:
YourApp.Endpoint.config(:port)
YourApp.Endpoint.config(:some_config, :default_value)
### Dynamic configuration
For dynamically configuring the endpoint, such as loading data
from environment variables or configuration files, Phoenix invokes
the `init/2` callback on the endpoint, passing a `:supervisor`
atom as first argument and the endpoint configuration as second.
All of Phoenix configuration, except the Compile-time configuration
below can be set dynamically from the `c:init/2` callback.
### Compile-time configuration
* `:code_reloader` - when `true`, enables code reloading functionality.
For code the list of code reloader configuration options see
`Phoenix.CodeReloader.reload!/1`
* `:debug_errors` - when `true`, uses `Plug.Debugger` functionality for
debugging failures in the application. Recommended to be set to `true`
only in development as it allows listing of the application source
code during debugging. Defaults to `false`
* `:render_errors` - responsible for rendering templates whenever there
is a failure in the application. For example, if the application crashes
with a 500 error during a HTML request, `render("500.html", assigns)`
will be called in the view given to `:render_errors`. Defaults to:
[view: MyApp.ErrorView, accepts: ~w(html), layout: false]
The default format is used when none is set in the connection
### Runtime configuration
* `:cache_static_manifest` - a path to a json manifest file that contains
static files and their digested version. This is typically set to
"priv/static/cache_manifest.json" which is the file automatically generated
by `mix phx.digest`
* `:check_origin` - configure transports to check `origin` header or not. May
be `false`, `true`, a list of hosts that are allowed, or a function provided as
MFA tuple. Hosts also support wildcards.
For example, using a list of hosts:
check_origin: ["//phoenixframework.org", "//*.example.com"]
or a custom MFA function:
check_origin: {MyAppWeb.Auth, :my_check_origin?, []}
The MFA is invoked with the request `%URI{}` as the first argument,
followed by arguments in the MFA list
Defaults to `true`.
* `:http` - the configuration for the HTTP server. Currently uses
Cowboy and accepts all options as defined by
[`Plug.Cowboy`](https://hexdocs.pm/plug_cowboy/). Defaults to `false`
* `:https` - the configuration for the HTTPS server. Currently uses
Cowboy and accepts all options as defined by
[`Plug.Cowboy`](https://hexdocs.pm/plug_cowboy/). Defaults to `false`
* `:force_ssl` - ensures no data is ever sent via HTTP, always redirecting
to HTTPS. It expects a list of options which are forwarded to `Plug.SSL`.
By default it sets the "strict-transport-security" header in HTTPS requests,
forcing browsers to always use HTTPS. If an unsafe request (HTTP) is sent,
it redirects to the HTTPS version using the `:host` specified in the `:url`
configuration. To dynamically redirect to the `host` of the current request,
set `:host` in the `:force_ssl` configuration to `nil`
* `:secret_key_base` - a secret key used as a base to generate secrets
for encrypting and signing data. For example, cookies and tokens
are signed by default, but they may also be encrypted if desired.
Defaults to `nil` as it must be set per application
* `:server` - when `true`, starts the web server when the endpoint
supervision tree starts. Defaults to `false`. The `mix phx.server`
task automatically sets this to `true`
* `:url` - configuration for generating URLs throughout the app.
Accepts the `:host`, `:scheme`, `:path` and `:port` options. All
keys except `:path` can be changed at runtime. Defaults to:
[host: "localhost", path: "/"]
The `:port` option requires either an integer, string, or
`{:system, "ENV_VAR"}`. When given a tuple like `{:system, "PORT"}`,
the port will be referenced from `System.get_env("PORT")` at runtime
as a workaround for releases where environment specific information
is loaded only at compile-time.
The `:host` option requires a string or `{:system, "ENV_VAR"}`. Similar
to `:port`, when given a tuple like `{:system, "HOST"}`, the host
will be referenced from `System.get_env("HOST")` at runtime.
The `:scheme` option accepts `"http"` and `"https"` values. Default value
is infered from top level `:http` or `:https` option. It is useful
when hosting Phoenix behind a load balancer or reverse proxy and
terminating SSL there.
The `:path` option can be used to override root path. Useful when hosting
Phoenix behind a reverse proxy with URL rewrite rules
* `:static_url` - configuration for generating URLs for static files.
It will fallback to `url` if no option is provided. Accepts the same
options as `url`
* `:watchers` - a set of watchers to run alongside your server. It
expects a list of tuples containing the executable and its arguments.
Watchers are guaranteed to run in the application directory, but only
when the server is enabled. For example, the watcher below will run
the "watch" mode of the webpack build tool when the server starts.
You can configure it to whatever build tool or command you want:
[node: ["node_modules/webpack/bin/webpack.js", "--mode", "development",
"--watch-stdin"]]
The `:cd` option can be used on a watcher to override the folder from
which the watcher will run. By default this will be the project's root:
`File.cwd!()`
[node: ["node_modules/webpack/bin/webpack.js", "--mode", "development",
"--watch-stdin"], cd: "my_frontend"]
* `:live_reload` - configuration for the live reload option.
Configuration requires a `:patterns` option which should be a list of
file patterns to watch. When these files change, it will trigger a reload.
If you are using a tool like [pow](http://pow.cx) in development,
you may need to set the `:url` option appropriately.
live_reload: [
url: "ws://localhost:4000",
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
* `:pubsub` - configuration for this endpoint's pubsub adapter.
Configuration either requires a `:name` of the registered pubsub
server or a `:name` and `:adapter` pair. The pubsub name and adapter
are compile time configuration, while the remaining options are runtime.
The given adapter and name pair will be started as part of the supervision
tree. If no adapter is specified, the pubsub system will work by sending
events and subscribing to the given name. Defaults to:
[adapter: Phoenix.PubSub.PG2, name: MyApp.PubSub]
It also supports custom adapter configuration:
[name: :my_pubsub, adapter: Phoenix.PubSub.Redis,
host: "192.168.100.1"]
## Endpoint API
In the previous section, we have used the `c:config/2` function that is
automatically generated in your endpoint. Here's a list of all the functions
that are automatically defined in your endpoint:
* for handling paths and URLs: `c:struct_url/0`, `c:url/0`, `c:path/1`,
`c:static_url/0`,`c:static_path/1`, and `c:static_integrity/1`
* for handling channel subscriptions: `c:subscribe/2` and `c:unsubscribe/1`
* for broadcasting to channels: `c:broadcast/3`, `c:broadcast!/3`,
`c:broadcast_from/4`, and `c:broadcast_from!/4`
* for configuration: `c:start_link/0`, `c:config/2`, and `c:config_change/2`
* as required by the `Plug` behaviour: `c:Plug.init/1` and `c:Plug.call/2`
## Instrumentation
Phoenix uses the `:telemetry` library for instrumentation. The following events
are published by Phoenix with the following measurements and metadata:
* `[:phoenix, :endpoint, :start]` - dispatched by `Plug.Telemetry` in your
endpoint at the beginning of every request.
* Measurement: `%{time: System.monotonic_time}`
* Metadata: `%{conn: Plug.Conn.t}`
* `[:phoenix, :endpoint, :stop]` - dispatched by `Plug.Telemetry` in your
endpoint whenever the response is sent
* Measurement: `%{duration: native_time}`
* Metadata: `%{conn: Plug.Conn.t}`
* `[:phoenix, :router_dispatch, :start]` - dispatched by `Phoenix.Router`
before dispatching to a matched route
* Measurement: `%{time: System.monotonic_time}`
* Metadata: `%{conn: Plug.Conn.t, route: binary, plug: module, plug_opts: term, path_params: map, pipe_through: [atom]}`
* `[:phoenix, :router_dispatch, :stop]` - dispatched by `Phoenix.Router`
after successfully dispatching to a matched route
* Measurement: `%{duration: native_time}`
* Metadata: `%{conn: Plug.Conn.t, route: binary, plug: module, plug_opts: term, path_params: map, pipe_through: [atom]}`
* `[:phoenix, :error_rendered]` - dispatched at the end of an error view being rendered
* Measurement: `%{duration: native_time}`
* Metadata: `%{status: Plug.Conn.status, kind: Exception.kind, reason: term, stacktrace: Exception.stacktrace}`
* `[:phoenix, :socket_connected]` - dispatched at the end of a socket connection
* Measurement: `%{duration: native_time}`
* Metadata: `%{endpoint: atom, transport: atom, params: term, connect_info: map, vsn: binary, user_socket: atom, result: :ok | :error, serializer: atom}`
* `[:phoenix, :channel_joined]` - dispatched at the end of a channel join
* Measurement: `%{duration: native_time}`
* Metadata: `%{params: term, socket: Phoenix.Socket.t}`
* `[:phoenix, :channel_handled_in]` - dispatched at the end of a channel handle in
* Measurement: `%{duration: native_time}`
* Metadata: `%{event: binary, params: term, socket: Phoenix.Socket.t}`
"""
@type topic :: String.t
@type event :: String.t
@type msg :: map
require Logger
# Configuration
@doc """
Starts the endpoint supervision tree.
Starts endpoint's configuration cache and possibly the servers for
handling requests.
"""
@callback start_link() :: Supervisor.on_start
@doc """
Access the endpoint configuration given by key.
"""
@callback config(key :: atom, default :: term) :: term
@doc """
Reload the endpoint configuration on application upgrades.
"""
@callback config_change(changed :: term, removed :: term) :: term
@doc """
Initialize the endpoint configuration.
Invoked when the endpoint supervisor starts, allows dynamically
configuring the endpoint from system environment or other runtime sources.
"""
@callback init(:supervisor, config :: Keyword.t) :: {:ok, Keyword.t}
# Paths and URLs
@doc """
Generates the endpoint base URL, but as a `URI` struct.
"""
@callback struct_url() :: URI.t
@doc """
Generates the endpoint base URL without any path information.
"""
@callback url() :: String.t
@doc """
Generates the path information when routing to this endpoint.
"""
@callback path(path :: String.t) :: String.t
@doc """
Generates the static URL without any path information.
"""
@callback static_url() :: String.t
@doc """
Generates a route to a static file in `priv/static`.
"""
@callback static_path(path :: String.t) :: String.t
@doc """
Generates an integrity hash to a static file in `priv/static`.
"""
@callback static_integrity(path :: String.t) :: String.t | nil
@doc """
Generates a two item tuple containing the `static_path` and `static_integrity`.
"""
@callback static_lookup(path :: String.t) :: {String.t, String.t} | {String.t, nil}
# Channels
@doc """
Subscribes the caller to the given topic.
See `Phoenix.PubSub.subscribe/3` for options.
"""
@callback subscribe(topic, opts :: Keyword.t) :: :ok | {:error, term}
@doc """
Unsubscribes the caller from the given topic.
"""
@callback unsubscribe(topic) :: :ok | {:error, term}
@doc """
Broadcasts a `msg` as `event` in the given `topic`.
"""
@callback broadcast(topic, event, msg) :: :ok | {:error, term}
@doc """
Broadcasts a `msg` as `event` in the given `topic`.
Raises in case of failures.
"""
@callback broadcast!(topic, event, msg) :: :ok | no_return
@doc """
Broadcasts a `msg` from the given `from` as `event` in the given `topic`.
"""
@callback broadcast_from(from :: pid, topic, event, msg) :: :ok | {:error, term}
@doc """
Broadcasts a `msg` from the given `from` as `event` in the given `topic`.
Raises in case of failures.
"""
@callback broadcast_from!(from :: pid, topic, event, msg) :: :ok | no_return
@doc false
defmacro __using__(opts) do
quote do
@behaviour Phoenix.Endpoint
unquote(config(opts))
unquote(pubsub())
unquote(plug())
unquote(server())
end
end
defp config(opts) do
quote do
@otp_app unquote(opts)[:otp_app] || raise "endpoint expects :otp_app to be given"
var!(config) = Phoenix.Endpoint.Supervisor.config(@otp_app, __MODULE__)
var!(code_reloading?) = var!(config)[:code_reloader]
# Avoid unused variable warnings
_ = var!(code_reloading?)
@doc false
def init(_key, config) do
{:ok, config}
end
defoverridable init: 2
end
end
@doc false
def __pubsub_server__!(module) do
if server = module.__pubsub_server__() do
server
else
raise ArgumentError, """
no :pubsub server configured at, please setup :pubsub in your config.
By default this looks like:
config :my_app, MyApp.PubSub,
...,
pubsub: [name: MyApp.PubSub,
adapter: Phoenix.PubSub.PG2]
"""
end
end
defp pubsub() do
quote do
def __pubsub_server__ do
Phoenix.Config.cache(__MODULE__,
:__phoenix_pubsub_server__,
&Phoenix.Endpoint.Supervisor.pubsub_server/1)
end
# TODO v2: Remove pid version
@doc false
def subscribe(pid, topic) when is_pid(pid) and is_binary(topic) do
IO.warn "#{__MODULE__}.subscribe/2 is deprecated, please use subscribe/1"
Phoenix.PubSub.subscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), pid, topic, [])
end
def subscribe(pid, topic, opts) when is_pid(pid) and is_binary(topic) and is_list(opts) do
Phoenix.PubSub.subscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), pid, topic, opts)
end
def subscribe(topic) when is_binary(topic) do
Phoenix.PubSub.subscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic, [])
end
def subscribe(topic, opts) when is_binary(topic) and is_list(opts) do
Phoenix.PubSub.subscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic, opts)
end
# TODO v2: Remove pid version
@doc false
def unsubscribe(pid, topic) do
IO.warn "#{__MODULE__}.unsubscribe/2 is deprecated, please use unsubscribe/1"
Phoenix.PubSub.unsubscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic)
end
def unsubscribe(topic) do
Phoenix.PubSub.unsubscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic)
end
def broadcast_from(from, topic, event, msg) do
Phoenix.Channel.Server.broadcast_from(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), from, topic, event, msg)
end
def broadcast_from!(from, topic, event, msg) do
Phoenix.Channel.Server.broadcast_from!(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), from, topic, event, msg)
end
def broadcast(topic, event, msg) do
Phoenix.Channel.Server.broadcast(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic, event, msg)
end
def broadcast!(topic, event, msg) do
Phoenix.Channel.Server.broadcast!(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic, event, msg)
end
end
end
defp plug() do
quote location: :keep do
use Plug.Builder, init_mode: Phoenix.plug_init_mode()
import Phoenix.Endpoint
Module.register_attribute(__MODULE__, :phoenix_sockets, accumulate: true)
if force_ssl = Phoenix.Endpoint.__force_ssl__(__MODULE__, var!(config)) do
plug Plug.SSL, force_ssl
end
if var!(config)[:debug_errors] do
use Plug.Debugger,
otp_app: @otp_app,
banner: {Phoenix.Endpoint.RenderErrors, :__debugger_banner__, []},
style: [
primary: "#EB532D",
logo: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAJEAAABjCAYAAACbguIxAAAAAXNSR0IArs4c6QAAAAlwSFlzAAALEwAACxMBAJqcGAAAHThJREFUeAHtPWlgVOW197vbLNkTFoFQlixAwpIVQZ8ooE+tRaBWdoK4VF5tfe2r1tb2ta611r6n9b1Xd4GETRGxIuJSoKACAlkIkD0hsiRoIHtmues7J3LpOJ2Z3Jm5yUxi5s+991vOOd+5Z777fWf7CGXA79Ct46ZGmyPnshw9WaX5qTSlJBCKjqU51aoohKVUivaIRqUUmlactEK3iCp1gablTztsnZ9kbK16w2P7wcKw5AAJhKqiBWlzIyIjVrKsnKtQ7HiiqiaGZQOC5Qm/JAkiUekqSha2X7/x2JP1FOXw1G6wLDw4oPvFl94+ZVmkib9HJnQuy7MRfUW+qoqSLMtHWi60PzB9Z+2BvsI7iEc/B3wK0d8Wjk8dHRX7B5hjbqBZU6R+sMa3VBWFUiSxqLmhdc303XVHjMcwCDFQDngUosO3JF0VPzz2eSKRLJrjPLbxhVARYYXDUCKlKAJFMV00yw731d6fOlWVKadT/mjSxsIb/ek32Lb3OPANAdl/c3La8CExmziGnUYYz2thd1JwhpBk5RDDyBccTuWgKNpqWxzCsdk76iuwbdXiyd/nIqO2ufcL9lmVBZvgcP5k4pYTrwcLa7B/cBy4LESVeVlvsxS9wN+ZR1Jkioi2B5M3nPiTJ1LqVuXaCcuaPdUZUSbJjg9T1hXfZASsQRiBcYDULJ/2OM1zDxOa0zf1eMFDROmcQ5Jeam7peE+iKOfQ+IjFHM//gqF7T4A0UhD3dflHkusHd3EaS/r0SupWZO+lCHWFwislio2Kpi30cKKQZEKYGEL7L1e4ZqFkRSWs/2upYEauSpKjpblldvaOmkPBwBns6z8HLn/O3Lsenjs+N2pU7G94hr6JpjnevT4cn0GQ1HZb29JBZWXfvh2vQuRCBg2z1W5i4q9zKQvfW1mmOrrsy6duPb4pfIkcWJTp+V4p4zcUzrY72h9SJCX8R88wVGSEdWPZkskrw5/YgUGhnpno8khLbk9dHBMZu4Wimctl4XqjKCrV4ehcmbH5xAZXGsuWTLpFdSpylyC1t3RIjQfLv2h6pInqdG0zeO8fB/wSIgR9clnGw1aL5Un/0ISmtSorVJe97cYpb1R8pFFQtSzzBc5iXoPPMqyhCKOqlEycKqW2gHL0vCqRvR1S146srRX7tD6DV98c8FuIEFxlXnYxz/EZvkGHR60kSUrjVy1TZu2qKdMoqr4j8wOWMXvVeOMsJqlyB0vkfRdPtz42aGbROOf5GpAQIai61Tlgiw1Ot+SZJONLFUUU5q49GlPvokequStzM0OZl/SEDWczmLIq2mwdv8rcVvVOT+2/jfV6FtYe+SJQ9CseK8KwEFUUu1flNLqSlvxa8VKH0/msa5mnezT/EJ6fGBubsL1qdfahVxOj4z21+zaXBTwTIdNq7siVGIYN/1X2pTcsCY6alILiFNcXfmxR+qrICMsrIGica7m3e0WWRFWyP+zNzOOt30AuD3gmQqbAwnRPf2IOy5uTa1dlfuxK87Q3T64/V9o0RhLFBtdyb/c0w3KMKeqZyhVZu721+baVByVELS3tv+pvDANT3vUVt019xpXuWYVfNKbkHx0liM7tuKjW8+NNpjk1q6af/9vkcYa5uejBG45tgvqc4YCq83I6WY7rM09Ho5jY1n5xiSfzCOqRLBbrWormh+rBBYt20emw/yht88lX9bQfiG2CmomQIYqifN4fGRMZGb1p46QRY9xpT9tSvnPc2sJhotjxgiLLTvd692dcS1ms0a9U5uW85173bXkOWohssrSjPzKLAfXEjNzEclfa86cOH4aRK1iWmn/iR0nrDpslQdiqqKLo2s7TPc9xt1Tm5bafXDL1fk/1A7ks6M/Z7mmJo8ZmjDpLs0HLY0j4jAtqXA8hclzfjM+M/7ugCqUTNxxf7EIQe3LFlGdZYlrC89wQl3KPt7IoXJAVeqfU1b4lfXvlB66Ntt88OmnikJhFxEbH7zt+4el7qxouuNb3x/ughQgHXZU3vZPjmH63LtJemCRIx1IKjnRr4E8unHCTJTZ2l6jIdRPWH03S2mjX0vmp3zVbI+6jeeYqQjGxPf15upWVYFNBPytCE4jAU0WiKC2CxHz44aHa+++vaW7XYPfXqzFCtHz6Kc7MjO2vTEC6FcX5XtLaonl4j4JkjY/fJUO0UofofCBzc+lzWO7+++yWpMnDYyMXixQ7nefIBAjFjCZEtUA7FvTcDAM7PZUhqqLS4OyptqhELBEd4sa0LScK3GH152dDhKhmedZ+xmy6pj8zAmmXFfHl5LVH78X76vkTfsAOid+K9+h+2253/EKvj9IPR1LW5fEjEzY2N1x8uYGyIYxgfwe/m3JldBSXwUhsMmdhR6gmlVFE9UvJQVU7VMeJUBqMDRGiyhW563gTuypYRoVD/06b8NSUzYUPIy0YqcKazW9prr4oTJIsrE3eeOw/e5tWnOVi46z3WhjTXIUm42iKNnt1V4ZgCZjuHLIqldrt0p/1CrtRYzBEiMpXZDxiNll+ZxRRoYYjO2xPaIKCbsJxo4fsZxnGrNGFBl14bcVSl1yQ9mYJ2hAhvi74H35G+cjIOxWKzOYYZojesC13zIIk1rWdbV7SV94HhggR2p+io6LXuQ+mPz/bHfYn0zaW/AbH8MhQKnLZTbnlHM8muo+JyJIsqmoDuCaVU4rzI8Uhnjxc/OWh1fWtre5tXZ9xVzs0Ne5as4WZrlDMbI6iU2iOxfWUIT8VTHyCKP9u4qbixw0B6AOIIUKkLUR94OmXVXab49W0zcX3aMR3x+Yx/EKa9s02FCxYU4sQ8yIwtGSTZGJHGDRLWWSFtcLim4f9Gs+yva8XcQqdz00sOP4zbQy9cfXNDZ0YcdE3fHj8Ia/fbJ1wwrGZ6LTtSN1w7FaNtuOLJ/5rpDVig16ziNYvlFdvJh6jaOqfGkKjRq8DDmeyzqtbmX1Zs42utmgWcbZ2/QnSlTh0gAh5k8iImI29SYQhQoQ2SAr0aAP1h05paGg+sWhitx4JxzlxW+mDKesOW9DGJshSR6jHjv7i3mhAn6+qpZk7vdUHW27I5wxtTtdkjWkA9VrYOqih5lhQpFJVkbfbZaUyyuYUO62mRCvDzuNYMoMwvLUnZn6dvEJ6KzW/8Hb3tjUrJj8AMNaAFns85B4whK/uOLRnRQTHcVWqVwh3UHYIn6uivbZVkM7yFjbJyloywI63EN7EFML8Y82F4V7791XG9bTg13D4czVksOEuROiN2NLWNidne9Wn3phTtiLzVRPN3KknoQVkzGlz2OwPpb9R9pI7vP3ZY0YMGR/zM85ims8Q6jtGJbNAtQJYTqpE1bFpUsGJpwGvzyBAtAOOzorfBgEVV2s0uipTtTIjroYIUbcRNvuK0zQJP8d9zFrS0dl+nR6NLuqEYkYl7OY5NkoPc0X498s222OTtp1EXZHH3/GFk25gIyw3w7phGsXQYymVDCUU7MwYiqMU0s1/lIbudQUDzwqoDVFHrqgCTOunZUqusovC2+7xcx6ReSgsWzTlZ+ZIy39DbgUK0vE0jV9XOMxDs6CKDBGitWNjY6+ZlXKB4cLP3xomoYbk9V9b6fVyqvaOnHqa4cbobY8vxympG/YfPv97vVZ5nL2ThltGMhZyeUZRRIYRz9guXHui4Yxe3HradQedRidswU96/s7Po4wO1jREiHAgdXfmOAjhTHoG1Zdt0OV1Qn7R9/3FWbUyq4jjTZn+9MMYN0LJpwVZ3c112D5I+WvlW/707822WtCmvbP1vrQ3yv9iJC7DhKhq1ZVtHEtHG0mcEbCCUbZVrZy6jeMj/BZAjW70AiCM0qnI9JegYHTSKjFJolSTurl4IbQxxFSi4dJzxYRjsIcrSc0/MlNPe71tDNnidyNTlLD0i6EJ/0+mCr3MSS0ovc3W2bYGdkPdGme9/bR2+HmnaT6G5dhUCBKZAnvw0QorVUE9uIb0/U9S7WtZosYYjZk1CiCjyhAc+M+2JaPgBwqHZugZgfbFfpd2YC/V5GW9D9v3G8C+5RfPcDsuU9RRsaP9UXcvx2DoCqRvU2PnywmJVuMmjktEGPY5q1s1rYCw1hWBDK43+2Am250H6mKN8CAcS1HmD1ZOeYol3DzwaExUVdbkyY4GubedlKie6pKo7fM2Fz5W7xK+3Ztj1QkbhejyYl5nH5/NDBOiikVpa0xRMS/4xBaiStQqo+O90egP35oyK9JqGqPS7GgTeDR2KOpFkypWY8SI0bjCGZ5hQoRKtsSpVzSEoxEWbVxoogjnF9GfaTNMiJAJvb1DU2UJwtxAXQfmFU+fEV8vwuG0PzppQ8kjvtqEYx266UrRXApR2RRCkUTw9rfAuToyHMDDKERtpmS5pNPpKMp9q/KvoaLfUCGqzMvYx3OWWUYORpLEM6oqvS122D+4UN1xsq7T1pGenpAWHRN5K01Mi/UGCOACNyn/iK6kDUbS7y8sNPJyZutqnqZmKoRO0JtoApSqqDKoVFXnxpT842gW6bOfoUJkpIcjWqVFxf5rsBM95YsbR34wYX6cNfJVhuN7jAdzCo59EwuKr/MFLxR1Y2HB/uGK3BdZTlmAKoFgacBgS0mit0zIP5wXLCw9/Q0VIkRYuypXhLM8/NoGeyLU2dVxlz9HLmC2D0zW4AmWa1lHe2fYZJZFc9Gs2eMLCKFvAm2/XzzDODb4qAk0kbp1TiohrAofejjiC/LPX9rFC6Iqs9QrEMFyH/Cg13RThgtR9cqsz1jedJXri/P3Xpac9cnri8b52w8t8RaT+S5f/XBddfb4V4mYCcRXu96uQ1rNPLPKH+FR0K6iSkWdorwZ/mR7Zrx7qtSFThoScMWOHh8XMzLBmsxwplQ+klkNm/mhXTbHbzGFjktbQ28NFyI8oWjoFcM+C4ZKm93+6/RNJb8PBEb58mmPms3W3/rqK4pyV2r+4ZAcvYWpkU1m8/+AgVf3Z0sGn20wnr696+CpuwPRd2F2t7vPtjf74kkwdYYLERKDeXvAmW54oIS12ZvnZGyq3Btof83Y6Ks/+Oc0J609muCrjZF16N8zNjPufYY3ZfkDV1aFwvrDzbdcf+LUl/7068u2fn2H9RLW0tV275CY+ICTZEp2VdSLy1O71E3F/1a1Ytoo9I/2VI9lsOuJr12dc3H/3pqk3vD2c8VbtjTzFRPP3uHPWhHdSzpsjgf9+Qx1H6URa8kgVjqNU7mhAk1FgXdSE22XWxy8cszW6jh51a6aYlfajLjvlZkICTuVl9NAcdyIQIhsbb240IhMrTV5OccZjpvsiwZURDrs7fNdc137ao8OeFFjLEnT363e76sdfkKuuibpaTPPrvDHu1EW5Xan0/mX9DeO/coXfK2uaOnUpVaWuZejSTZk843sSdkrgj88ZJeoUJ32Fye+WfaiBieYa68J0Wc3jM0Y+Z0RAUm9e7xXMAOsyZvexnCMTxeV7qNBKflyHL4vfHiw4BVD416jCRmnggZQkZWzhBJr4R/vlAlrg8wfQ3mangauiqP1enriwTaCSmpkwfG/6VtKn/eFX6srvy39Hi4y4vFglg2YxEsUxCcgwPEJDW4g114TIiSmdnXWDpo2fc9fwsCH+XzS2sKAZjF3XC+ljhxy/b+M/FLPC0UvyPY2W17WO2U9JfVkIe/jU6yVW6TSdKK/QYiqgnGNik0SmQrZ4dxbfKLp/5aXN37hTrunZ5wJvzNtxB50L/FU76kM13+gbH2v1WF/W7VLTSxnspis/JUmhr5NUdh40tn2YDAOdL0qRDggzB6m12dZYwDODAcPnR6rl7FaP29X1AJHRMW9663etRxxy7JwuLGpY7VrFn7XNu73JcsmzDbRlmsZmeSqHD2SAidprQ3ogOw0JbfQRL5oF0m5U1VONR/v2BPIQrlsefoveM76e3/SPjud9rUTN5TcqdHj6YqCOffY2XOe6vSUXR6snsaBtMETrcdHJ1T4G0YD/9BPkjcWGWZCqcrLeA6yK/673jHIqKijSKHN1vakEeszvXi9tatcPmUTb45c6q3evRz/DA5H5z19kZC014UIB1e2NP1uTI7pPlCfz3Bu2UcHzg7V6/juE9alyupVmQfgONqZetq6tsHPgSyre5wdtpenbC//2LXOqHuczd75uPKIJyf6QOh2tLb/0FcUyt55YycOi7TOZNSvEwtA7s1aPRExnsbbJ0KEiDF3tCk24gFPRHgrc4py9cT8w7q//d7guJYHs2tEOKiohN1NOVGEUggCeOfcefuJG/d/ccoVh5573L3NzB0x3RJtXi6ppoWQ+OGLgp1FV7oLUc3KrEJ/dUvePBZQBRA7LOYRxkxfDUe0Rmt5l7rpxRxHRHGCD1+F0yH80Z8cR30mREho1fLM5zmz+Sd6mKy1sXd0/kfam8ef1Z6NuNbdkd2lJ+JVDy70nKSI0gX/505RZZqJIrdCfqEmVRWcsIPr1sMRlhcVSTXD+mg47OiGQXhZDFTEqpeOtMBt95Ej5ya4rwErV+Ye4Xk2Rw8dWhvB0bl5wsbjy7RnvKIVIT5h6HaGI7pjzmCTcRxCrVAx2qPNrU+FCAd0cknG73gL/wir8+A9zLNTfaopKZB/O+Lz9EMHulGTh532R/nnCY4RZbLorE3OL0p2hxWIW43qFP6Op2S6w8IASlOk5WmQdhqickeBX1KCnkhfUHjaGptar7x6Z+0Jd5iuz30uRIgc09hRJvMmjtMXp4YnTc9ZfySu3kBf5cJ5yTPihsR+FsrjtgSnc8+EDUVzXV8I3mNQABhQb3Yv9/UsCNLRCQVHcn210epwszM6KvYPNGHm96SewLCnpgutV898v/pzrb/7NSRChERgcsxfzs0uxIwb7kR5eobptXXD+0dHu68ZPLXVW4bTfNyQ+E96YqReeHrboSeB3SE+lr6l5FH3PoEEPHibgdxhuz/vuCExZdLIkZ/0pLBEA/AXxY1jvKkBQiZE2oDQ6s6x3C8hLovXyrxdMf6rtaVlTvaOmkPe2vhbjovN+MT4T/Xg9xe2p/b4+Spv/OrmeR+frXavDySBqt3peC1tQ/Hd7rD8edZjHkLtdlNz03Q395NuNCEXokuDZcvzsraxhPleT7OCih41qvP51PySn/rDKF9tUdkGQQYlerLl+4Ljq04QpQ74LP/Rm4mhekXGetZk0e2JCCcBdHXZ2+/ydMiNLzq81ek5khXTCNrsnfe7h2GHRIhqV2RtQAvzpPyi+a6DwgNbcrOHga+N+UZIreNzZsKMHJJof9jIxOIVKzP/buLN17rSFOw9mNQ6HYK4Ln3Dca+7UvgD/dXMmS6n9POJE5SgDqLscOedax+c0RhemSyLlB08IKsdsrTHwvHfx5wExbdm326NoZZPKChc4NoH74GOg0BHj8GeuHMTnI5nzjR0fFp/XuwIiRBholBzbNwuyBvU0FDUMMNTFoyy5RlP8DSzElKRj2YgXb37gC8/y87zTkFef7a0/dlATAmX4Vy6wQwaUdaYP8POLWB/qG4HREWt7pKEF71l49fwYio/PetCXJfIinKoqvHL1Z4+hRo8vKJ2Hs4huZ+wNLG3dz3DmLlUnufnj3vtIKlZlXMOPt0j8d61j3ZftXzaa6CQXY19tTJvV/DlVhw26bEeG3oDEGw5OtijzxEkXgJ7q7gudeMxj26t3ZrVmKj7TLTpOkJIErg6WLy5O6AbBbgAnmJU54Zgj9fEvD6syXQv6HrA1dR3yhxcKKu0bANdUBmRlY++OHHxRW+LUI1v5Usn/5znLY+DsFq0MvcrWvchQqoRkhZt37u75rf+eCeiioBWuWw4sySyenXOFpbmFquCUAG+2BPgEHfq+oKj1novu11MxD4kPvYFjqZzwPHqG0nYUS8G1mMbZD+pFBTnG3/7vPHFkAkRMszVlRU1wZCt/jktd7Q7Q7Vn3JrTkdYZVsaUQdFyNOg8INQd5is4RoMGDZ9EMZLd2bbLqLUC5rBePCt9KYmOyIY1wTCwwIugFuBoRemQiFThlKgzpSebPsor/fIrjUYvVxr0NXMjovk8WeUWuh80iMm4OPj2SApzUaSEOiKp75e3XNi0cNeZWi/wfBZXrcypAKVmEoZJVa7M/oTlyFXdngzwOVRoqu1Ue/OV12+vw+QSPn/IbytvmiIR1gwa7YtfSV1H3fuFVIiQend3EVUWbaJEth74tPqnRnscfjhrzLjEkXF5LA/+PpSSAAkavoLPRNn59rbNs3fUV/jkZpCVOKOOiI170cTAQTLwg7nrNBw5dBoOFGnsghONlE7bodt21JTUe5kd/EWP6xueIZPApSYWTSegKQfNs/Q2CKmFZbkft7W1LfCVftAffCEXIiQW/imwM+Lhxf7jh2sAilZKhC7b6+67gX+06vkO/YnmZI/4JTHTi2mFHuXtW48KTYck/ldPM2HPGL22wI0CBhj2yQ/HnWyhTfhZ3Td55Ojq1s4u7XOIBwO+fvRUjVGH14SFECFXcfrleK77X+rOZZjjBULEGkhk+LkiObcVH2s94W5n0vog865Kj8lkIsyLzTR7DXgaJvnKagvCI6m0coHIdLtDFrf2ohBpJA64a9gIEXJW704FF3eEhu0roRzgCGbHvuA4bGJpxQzJNa16vBhReOwO4U96fZkRx+DPMwfCSoiQRNiClsIWdIpncg0qlWW5tu1CmvsC0SDo3zowl+Jtw2fc4H4wFQ2TvUmRCruTQQEyjsNhJ0Q4NLRsi6L9zzpcWQLiBCT9jUdvy4A6D3b6Jw6E3efMlcLi21IXREbFbnY9sM61Pph79EEWRNubX5W3/zTUcfnBjCMc+oa1EF1iEF+Tl1sEWuP03mAYqu7BqHsKZqdDHc7OHbZOpWrZrpryeoP0Nb1Bc7jB7A9C1M0z9Ig0W9iHIfzZp2E2WAbjDKVSYECRaYEBtbGsgm8Bo0CkDy3CQXcXVFUpkxSpvKK5OT9QbXKwNIZb/34jRJcYx4JNaDdP87NA9xNSXqJdC+wsLaD5PnDxq7anpu+sPRBSgkKIvL8JUTer0CMRDISvEZaZCKkLQ8i+r1Hj7KXIYm2LrevnocydGCpG9Esh0piFsVoRTMQTkAcUzivT0oNptaG5gvXkYMr64qCSfIWG8sCx9msh0oaNJ/bMmHLFU7BcgjPGSEJvzU5oaWcUOEtKwUOBARPtWUOCRuTGppYeoyQ0+vv7dUAIketLQNeFyLj4H0Es2NUwNyX6sxDH0GnI5iECU2yQ//AcIVKjSHO1YofzJMU4K+0XhJb2aKoN8VkddERUNDuUoUgyy/LZkBA9FRIjTwJfnTjNxbe1SViU+W7hVlf6BuL9gBMi95eEXpR8FD+NIfRkQaFHw0vvTkNM06pNoZmLquxophWqrl2mz3W22o7pTeLgjkd7xoxoIybHrDHxzI8hiDGq9VzzNdN31x3R6gfidcALkZEv7cDNyZmxUZbrBNXZ8Pmxzt095QlAAcazWXsK/jOSxlDAGhQiP7iOkaSWePOdRGZmghfBKAJZrWSacmBKOzgbsxFcaY/YHLZ39WZd8wN1WDcdFKIAX0/Zooz7OAv7EHgJjnYHAX5P7USRPty3t3qN5gjm3mYgPQ8KUZBvs2hB2tzouIh1kIE80R0UhiBDvNnatM3F97jXDaTnQSEy6G1WrMh43WSyrPYEDqMsxhcUTvJUNxDKBoXIwLdYsnTyimizeb2nJBGSIJxKKSgcbyC6sAE1KEQGvwp0gh86JOEouOh2qxJcwQuiUDIhvzDTtWwg3HtWuQ6EkYVoDJjw4PyZC9PRQOtOAs/xGRXLpv3Bvby/Pw8KUS+8was/ri+52NW+UJHAPuL2482mhzAixa24Xz8OClEvvT605jd3tS6ApKHfOGKCEIaaM3NkUS+hDQnYQSHqRbajIH1WeCZRFaVvhCujbqlmdc5LvYi6T0EPLqz7iN14Wjdtivg1C0eha9Z/OB/x0P49lbf0d4XkoBD1kRBpaNChLiYhYY2JUufIrDpCEkkR5FrE3No9ZmnVYITb9f8BhSZnYemqCy4AAAAASUVORK5CYII="
]
end
# Compile after the debugger so we properly wrap it.
@before_compile Phoenix.Endpoint
@phoenix_render_errors var!(config)[:render_errors]
end
end
defp server() do
quote location: :keep, unquote: false do
@doc """
Returns the child specification to start the endpoint
under a supervision tree.
"""
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
@doc """
Starts the endpoint supervision tree.
"""
def start_link(_opts \\ []) do
Phoenix.Endpoint.Supervisor.start_link(@otp_app, __MODULE__)
end
@doc """
Returns the endpoint configuration for `key`
Returns `default` if the key does not exist.
"""
def config(key, default \\ nil) do
case :ets.lookup(__MODULE__, key) do
[{^key, val}] -> val
[] -> default
end
end
@doc """
Reloads the configuration given the application environment changes.
"""
def config_change(changed, removed) do
Phoenix.Endpoint.Supervisor.config_change(__MODULE__, changed, removed)
end
@doc """
Generates the endpoint base URL without any path information.
It uses the configuration under `:url` to generate such.
"""
def url do
Phoenix.Config.cache(__MODULE__,
:__phoenix_url__,
&Phoenix.Endpoint.Supervisor.url/1)
end
@doc """
Generates the static URL without any path information.
It uses the configuration under `:static_url` to generate
such. It falls back to `:url` if `:static_url` is not set.
"""
def static_url do
Phoenix.Config.cache(__MODULE__,
:__phoenix_static_url__,
&Phoenix.Endpoint.Supervisor.static_url/1)
end
@doc """
Generates the endpoint base URL but as a `URI` struct.
It uses the configuration under `:url` to generate such.
Useful for manipulating the URL data and passing it to
URL helpers.
"""
def struct_url do
Phoenix.Config.cache(__MODULE__,
:__phoenix_struct_url__,
&Phoenix.Endpoint.Supervisor.struct_url/1)
end
@doc """
Returns the host for the given endpoint.
"""
def host do
Phoenix.Config.cache(__MODULE__,
:__phoenix_host__,
&Phoenix.Endpoint.Supervisor.host/1)
end
@doc """
Generates the path information when routing to this endpoint.
"""
def path(path) do
Phoenix.Config.cache(__MODULE__,
:__phoenix_path__,
&Phoenix.Endpoint.Supervisor.path/1) <> path
end
@doc """
Generates the script name.
"""
def script_name do
Phoenix.Config.cache(__MODULE__,
:__phoenix_script_name__,
&Phoenix.Endpoint.Supervisor.script_name/1)
end
@doc """
Generates a route to a static file in `priv/static`.
"""
def static_path(path) do
Phoenix.Config.cache(__MODULE__, :__phoenix_static__,
&Phoenix.Endpoint.Supervisor.static_path/1) <>
elem(static_lookup(path), 0)
end
@doc """
Generates a base64-encoded cryptographic hash (sha512) to a static file
in `priv/static`. Meant to be used for Subresource Integrity with CDNs.
"""
def static_integrity(path) do
elem(static_lookup(path), 1)
end
@doc """
Returns a two item tuple with the first item being the `static_path`
and the second item being the `static_integrity`.
"""
def static_lookup(path) do
Phoenix.Config.cache(__MODULE__, {:__phoenix_static__, path},
&Phoenix.Endpoint.Supervisor.static_lookup(&1, path))
end
end
end
@doc false
def __force_ssl__(module, config) do
if force_ssl = config[:force_ssl] do
Keyword.put_new(force_ssl, :host, {module, :host, []})
end
end
@doc false
defmacro __before_compile__(%{module: module}) do
sockets = Module.get_attribute(module, :phoenix_sockets)
otp_app = Module.get_attribute(module, :otp_app)
instrumentation = Phoenix.Endpoint.Instrument.definstrument(otp_app, module)
dispatches =
for {path, socket, socket_opts} <- sockets,
{path, type, conn_ast, socket, opts} <- socket_paths(module, path, socket, socket_opts) do
quote do
defp do_handler(unquote(path), conn, _opts) do
{unquote(type), unquote(conn_ast), unquote(socket), unquote(Macro.escape(opts))}
end
end
end
quote do
defoverridable [call: 2]
# Inline render errors so we set the endpoint before calling it.
def call(conn, opts) do
conn = put_in conn.secret_key_base, config(:secret_key_base)
conn = put_in conn.script_name, script_name()
conn = Plug.Conn.put_private(conn, :phoenix_endpoint, __MODULE__)
try do
super(conn, opts)
rescue
e in Plug.Conn.WrapperError ->
%{conn: conn, kind: kind, reason: reason, stack: stack} = e
Phoenix.Endpoint.RenderErrors.__catch__(conn, kind, reason, stack, @phoenix_render_errors)
catch
kind, reason ->
stack = System.stacktrace()
Phoenix.Endpoint.RenderErrors.__catch__(conn, kind, reason, stack, @phoenix_render_errors)
end
end
@doc false
def __sockets__, do: unquote(Macro.escape(sockets))
@doc false
def __handler__(%{path_info: path} = conn, opts), do: do_handler(path, conn, opts)
unquote(instrumentation)
unquote(dispatches)
defp do_handler(_path, conn, opts), do: {:plug, conn, __MODULE__, opts}
end
end
defp socket_paths(endpoint, path, socket, opts) do
paths = []
websocket = Keyword.get(opts, :websocket, true)
longpoll = Keyword.get(opts, :longpoll, false)
paths =
if websocket do
config = Phoenix.Socket.Transport.load_config(websocket, Phoenix.Transports.WebSocket)
{conn_ast, match_path} = socket_path(path, config)
[{match_path, :websocket, conn_ast, socket, config} | paths]
else
paths
end
paths =
if longpoll do
config = Phoenix.Socket.Transport.load_config(longpoll, Phoenix.Transports.LongPoll)
plug_init = {endpoint, socket, config}
{conn_ast, match_path} = socket_path(path, config)
[{match_path, :plug, conn_ast, Phoenix.Transports.LongPoll, plug_init} | paths]
else
paths
end
paths
end
defp socket_path(path, config) do
end_path_fragment = Keyword.fetch!(config, :path)
{vars, path} =
String.split(path <> "/" <> end_path_fragment, "/", trim: true)
|> Enum.join("/")
|> Plug.Router.Utils.build_path_match()
conn_ast =
if vars == [] do
quote do
conn
end
else
params_map = {:%{}, [], Plug.Router.Utils.build_path_params_match(vars)}
quote do
params = unquote(params_map)
%{conn | path_params: params, params: params}
end
end
{conn_ast, path}
end
## API
@doc """
Defines a websocket/longpoll mount-point for a socket.
Note: for backwards compatibility purposes, the `:websocket`
and `:longpoll` options only have an effect if the socket
given as argument has no `transport` declarations in it.
## Options
* `:websocket` - controls the websocket configuration.
Defaults to `true`. May be false or a keyword list
of options. See "Shared configuration" and
"WebSocket configuration" for the whole list
* `:longpoll` - controls the longpoll configuration.
Defaults to `false`. May be true or a keyword list
of options. See "Shared configuration" and
"Longpoll configuration" for the whole list
* `:shutdown` - the maximum shutdown time of each channel
when the endpoint is shutting down. Applies only to
channel-based sockets
## Examples
socket "/ws", MyApp.UserSocket
socket "/ws/admin", MyApp.AdminUserSocket,
longpoll: true,
websocket: [compress: true]
## Path params
It is possible to include variables in the path, these will be
available in the `params` that are passed to the socket.
socket "/ws/:user_id", MyApp.UserSocket,
websocket: [path: "/project/:project_id"]
Note: This feature is not supported with the Cowboy 1 adapter.
## Shared configuration
The configuration below can be given to both `:websocket` and
`:longpoll` keys:
* `:path` - the path to use for the transport. Will default
to the transport name ("/websocket" or "/longpoll")
* `:serializer` - a list of serializers for messages. See
`Phoenix.Socket` for more information
* `:transport_log` - if the transport layer itself should log and,
if so, the level
* `:check_origin` - if we should check the origin of requests when the
origin header is present. It defaults to true and, in such cases,
it will check against the host value in `YourApp.Endpoint.config(:url)[:host]`.
It may be set to `false` (not recommended) or to a list of explicitly
allowed origins.
check_origin: ["https://example.com",
"//another.com:888", "//other.com"]
Note: To connect from a native app be sure to either have the native app
set an origin or allow any origin via `check_origin: false`
* `:code_reloader` - enable or disable the code reloader. Defaults to your
endpoint configuration
* `:connect_info` - a list of keys that represent data to be copied from
the transport to be made available in the user socket `connect/3` callback
The valid keys are:
* `:peer_data` - the result of `Plug.Conn.get_peer_data/1`
* `:headers` - all request headers
* `:uri` - a `%URI{}` with information from the conn
* `{:session, session_config}` - the session information from `Plug.Conn`.
The `session_config` is an exact copy of the arguments given to `Plug.Session`.
This requires the "_csrf_token" to be given as request parameter with
the value of `URI.encode_www_form(Plug.CSRFProtection.get_csrf_token())`
when connecting to the socket. Otherwise the session will be `nil`.
Arbitrary keywords may also appear following the above valid keys, which
is useful for passing custom connection information to the socket.
For example:
socket "/socket", AppWeb.UserSocket,
websocket: [
connect_info: [:peer_data, :headers, :uri, session: [store: :cookie]]
]
With arbitrary keywords:
socket "/socket", AppWeb.UserSocket,
websocket: [
connect_info: [:uri, custom_value: "abcdef"]
]
## Websocket configuration
The following configuration applies only to `:websocket`.
* `:timeout` - the timeout for keeping websocket connections
open after it last received data, defaults to 60_000ms
* `:max_frame_size` - the maximum allowed frame size in bytes.
Supported from Cowboy 2.3 onwards, defaults to "infinity"
* `:compress` - whether to enable per message compresssion on
all data frames, defaults to false
## Longpoll configuration
The following configuration applies only to `:longpoll`:
* `:window_ms` - how long the client can wait for new messages
in its poll request
* `:pubsub_timeout_ms` - how long a request can wait for the
pubsub layer to respond
* `:crypto` - options for verifying and signing the token, accepted
by `Phoenix.Token`. By default tokens are valid for 2 weeks
"""
defmacro socket(path, module, opts \\ []) do
# Tear the alias to simply store the root in the AST.
# This will make Elixir unable to track the dependency
# between endpoint <-> socket and avoid recompiling the
# endpoint (alongside the whole project ) whenever the
# socket changes.
module = tear_alias(module)
quote do
@phoenix_sockets {unquote(path), unquote(module), unquote(opts)}
end
end
@doc false
defmacro instrument(endpoint_or_conn_or_socket, event, runtime \\ Macro.escape(%{}), fun) do
compile = Phoenix.Endpoint.Instrument.strip_caller(__CALLER__) |> Macro.escape()
quote do
case Phoenix.Endpoint.Instrument.extract_endpoint(unquote(endpoint_or_conn_or_socket)) do
nil -> unquote(fun).()
endpoint -> endpoint.instrument(unquote(event), unquote(compile), unquote(runtime), unquote(fun))
end
end
end
@doc """
Checks if Endpoint's web server has been configured to start.
* `otp_app` - The OTP app running the endpoint, for example `:my_app`
* `endpoint` - The endpoint module, for example `MyApp.Endpoint`
## Examples
iex> Phoenix.Endpoint.server?(:my_app, MyApp.Endpoint)
true
"""
def server?(otp_app, endpoint) when is_atom(otp_app) and is_atom(endpoint) do
Phoenix.Endpoint.Supervisor.server?(otp_app, endpoint)
end
defp tear_alias({:__aliases__, meta, [h|t]}) do
alias = {:__aliases__, meta, [h]}
quote do
Module.concat([unquote(alias)|unquote(t)])
end
end
defp tear_alias(other), do: other
end
| 45.452304 | 10,138 | 0.733888 |
1c6424553d7465ca2814d818e4aa9fcbcbddde02 | 4,314 | exs | Elixir | test/oban/migrations_test.exs | polvalente/oban | 7267f8f37f8257b735d498d81cc1e43147cefc49 | [
"Apache-2.0"
] | null | null | null | test/oban/migrations_test.exs | polvalente/oban | 7267f8f37f8257b735d498d81cc1e43147cefc49 | [
"Apache-2.0"
] | null | null | null | test/oban/migrations_test.exs | polvalente/oban | 7267f8f37f8257b735d498d81cc1e43147cefc49 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.MigrationsTest do
use Oban.Case, async: true
import Oban.Migrations, only: [initial_version: 0, current_version: 0, migrated_version: 2]
@arbitrary_checks 20
defmodule StepMigration do
use Ecto.Migration
def up do
Oban.Migrations.up(version: up_version(), prefix: "migrating")
end
def down do
Oban.Migrations.down(version: down_version(), prefix: "migrating")
end
defp up_version do
Application.get_env(:oban, :up_version)
end
def down_version do
Application.get_env(:oban, :down_version)
end
end
defmodule DefaultMigration do
use Ecto.Migration
def up do
Oban.Migrations.up(prefix: "migrating")
end
def down do
Oban.Migrations.down(prefix: "migrating")
end
end
defmodule DefaultMigrationNoSchemaCreation do
use Ecto.Migration
def up do
Oban.Migrations.up(prefix: "migrating", create_schema: false)
end
def down do
Oban.Migrations.down(prefix: "migrating")
end
end
@base_version 20_300_000_000_000
test "migrating up and down between specific versions" do
for up <- initial_version()..current_version() do
Application.put_env(:oban, :up_version, up)
assert :ok = Ecto.Migrator.up(Repo, @base_version + up, StepMigration)
assert migrated_version() == up
end
assert table_exists?("oban_jobs")
assert table_exists?("oban_peers")
assert migrated_version() == current_version()
Application.put_env(:oban, :down_version, 2)
assert :ok = Ecto.Migrator.down(Repo, @base_version + 2, StepMigration)
assert table_exists?("oban_jobs")
assert migrated_version() == 1
Application.put_env(:oban, :down_version, 1)
assert :ok = Ecto.Migrator.down(Repo, @base_version + 1, StepMigration)
refute table_exists?("oban_jobs")
refute table_exists?("oban_peers")
after
clear_migrated()
end
test "migrating up and down between default versions" do
assert :ok = Ecto.Migrator.up(Repo, @base_version, DefaultMigration)
assert table_exists?("oban_jobs")
assert migrated_version() == current_version()
# Migrating once more to replicate multiple migrations that don't specify a version.
assert :ok = Ecto.Migrator.up(Repo, @base_version + 1, DefaultMigration)
assert :ok = Ecto.Migrator.down(Repo, @base_version + 1, DefaultMigration)
refute table_exists?("oban_jobs")
# Migrating once more to replicate multiple migrations that don't specify a version.
assert :ok = Ecto.Migrator.down(Repo, @base_version, DefaultMigration)
after
clear_migrated()
end
test "migrating up and down between arbitrary versions" do
ups = 2..current_version()
dns = 1..(current_version() - 1)
ups
|> Enum.zip(dns)
|> Enum.shuffle()
|> Enum.take(@arbitrary_checks)
|> Enum.each(fn {up, down} ->
Application.put_env(:oban, :up_version, up)
Application.put_env(:oban, :down_version, down)
assert :ok = Ecto.Migrator.up(Repo, @base_version, StepMigration)
assert :ok = Ecto.Migrator.down(Repo, @base_version, StepMigration)
clear_migrated()
end)
end
test "skipping schema creation when schema doesn't exist" do
assert_raise Postgrex.Error, fn ->
Ecto.Migrator.up(Repo, @base_version, DefaultMigrationNoSchemaCreation)
end
refute table_exists?("oban_jobs")
assert migrated_version() == 0
after
clear_migrated()
end
test "skipping schema creation when schema does exist" do
Repo.query!("CREATE SCHEMA IF NOT EXISTS migrating")
assert :ok = Ecto.Migrator.up(Repo, @base_version, DefaultMigrationNoSchemaCreation)
assert table_exists?("oban_jobs")
assert migrated_version() == current_version()
after
clear_migrated()
end
defp migrated_version do
migrated_version(Repo, "migrating")
end
defp table_exists?(table) do
query = """
SELECT EXISTS (
SELECT 1
FROM pg_tables
WHERE schemaname = 'migrating'
AND tablename = '#{table}'
)
"""
{:ok, %{rows: [[bool]]}} = Repo.query(query)
bool
end
defp clear_migrated do
Repo.query("DELETE FROM schema_migrations WHERE version >= #{@base_version}")
Repo.query("DROP SCHEMA IF EXISTS migrating CASCADE")
end
end
| 26.145455 | 93 | 0.688688 |
1c6427821024f0e5ffafbeab03629c426cbc5454 | 3,925 | ex | Elixir | clients/dataproc/lib/google_api/dataproc/v1/model/instance_group_config.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/dataproc/lib/google_api/dataproc/v1/model/instance_group_config.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/dataproc/lib/google_api/dataproc/v1/model/instance_group_config.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Dataproc.V1.Model.InstanceGroupConfig do
@moduledoc """
Optional. The config settings for Compute Engine resources in an instance group, such as a master or worker group.
## Attributes
- accelerators ([AcceleratorConfig]): Optional. The Compute Engine accelerator configuration for these instances.Beta Feature: This feature is still under development. It may be changed before final release. Defaults to: `null`.
- diskConfig (DiskConfig): Optional. Disk option config settings. Defaults to: `null`.
- imageUri (String.t): Optional. The Compute Engine image resource used for cluster instances. It can be specified or may be inferred from SoftwareConfig.image_version. Defaults to: `null`.
- instanceNames ([String.t]): Output only. The list of instance names. Cloud Dataproc derives the names from cluster_name, num_instances, and the instance group. Defaults to: `null`.
- isPreemptible (boolean()): Optional. Specifies that this instance group contains preemptible instances. Defaults to: `null`.
- machineTypeUri (String.t): Optional. The Compute Engine machine type used for cluster instances.A full URL, partial URI, or short name are valid. Examples: https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2 n1-standard-2Auto Zone Exception: If you are using the Cloud Dataproc Auto Zone Placement feature, you must use the short name of the machine type resource, for example, n1-standard-2. Defaults to: `null`.
- managedGroupConfig (ManagedGroupConfig): Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups. Defaults to: `null`.
- numInstances (integer()): Optional. The number of VM instances in the instance group. For master instance groups, must be set to 1. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accelerators => list(GoogleApi.Dataproc.V1.Model.AcceleratorConfig.t()),
:diskConfig => GoogleApi.Dataproc.V1.Model.DiskConfig.t(),
:imageUri => any(),
:instanceNames => list(any()),
:isPreemptible => any(),
:machineTypeUri => any(),
:managedGroupConfig => GoogleApi.Dataproc.V1.Model.ManagedGroupConfig.t(),
:numInstances => any()
}
field(:accelerators, as: GoogleApi.Dataproc.V1.Model.AcceleratorConfig, type: :list)
field(:diskConfig, as: GoogleApi.Dataproc.V1.Model.DiskConfig)
field(:imageUri)
field(:instanceNames, type: :list)
field(:isPreemptible)
field(:machineTypeUri)
field(:managedGroupConfig, as: GoogleApi.Dataproc.V1.Model.ManagedGroupConfig)
field(:numInstances)
end
defimpl Poison.Decoder, for: GoogleApi.Dataproc.V1.Model.InstanceGroupConfig do
def decode(value, options) do
GoogleApi.Dataproc.V1.Model.InstanceGroupConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataproc.V1.Model.InstanceGroupConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 56.884058 | 535 | 0.752866 |
1c6437a4c651027ab6e5b5a992667bacc8a7a601 | 355 | exs | Elixir | priv/repo/seeds.exs | Api2sem2021/5-ADS2020-2-equipe6-api | 11484e0232c1edd7fc928aa68d5014f2e3a20d07 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | Api2sem2021/5-ADS2020-2-equipe6-api | 11484e0232c1edd7fc928aa68d5014f2e3a20d07 | [
"MIT"
] | 3 | 2020-09-20T22:59:35.000Z | 2020-09-20T23:00:47.000Z | priv/repo/seeds.exs | SEGURANCA-DA-INFORMACAO-LGPD/sakavault_api | 11484e0232c1edd7fc928aa68d5014f2e3a20d07 | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# SakaVault.Repo.insert!(%SakaVault.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.583333 | 61 | 0.709859 |
1c644086219bc9c54d368430354108da8b840691 | 317 | ex | Elixir | lib/rmq.ex | nmbrone/rmq | 1478c80f886116638bb0a7831fcd55f6570de18c | [
"MIT"
] | null | null | null | lib/rmq.ex | nmbrone/rmq | 1478c80f886116638bb0a7831fcd55f6570de18c | [
"MIT"
] | null | null | null | lib/rmq.ex | nmbrone/rmq | 1478c80f886116638bb0a7831fcd55f6570de18c | [
"MIT"
] | null | null | null | defmodule RMQ do
@moduledoc """
A set of tools for convenient work with RabbitMQ.
Based on `AMQP` library.
It includes:
* `RMQ.Connection` - for holding a robust connection to the server.
* `RMQ.Consumer` - for implementing consumers.
* `RMQ.RPC` - for implementing RPC via RabbitMQ.
"""
end
| 22.642857 | 71 | 0.678233 |
1c647d703d1730efe69074e3244fd4401c73656f | 1,189 | exs | Elixir | exercism/elixir/nucleotide-count/nucleotide_count_test.exs | kfr2/elixir-experiments | b9aace74d5e352c505440f3a6e493dd714113fac | [
"MIT"
] | null | null | null | exercism/elixir/nucleotide-count/nucleotide_count_test.exs | kfr2/elixir-experiments | b9aace74d5e352c505440f3a6e493dd714113fac | [
"MIT"
] | null | null | null | exercism/elixir/nucleotide-count/nucleotide_count_test.exs | kfr2/elixir-experiments | b9aace74d5e352c505440f3a6e493dd714113fac | [
"MIT"
] | null | null | null | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("nucleotide_count.exs", __DIR__)
end
ExUnit.start
ExUnit.configure exclude: :pending, trace: true
defmodule NucleotideCountTest do
use ExUnit.Case
# @tag :pending
test "empty dna string has no adenine" do
assert NucleotideCount.count('', ?A) == 0
end
# @tag :pending
test "repetitive cytosine gets counted" do
assert NucleotideCount.count('CCCCC', ?C) == 5
end
# @tag :pending
test "counts only thymine" do
assert NucleotideCount.count('GGGGGTAACCCGG', ?T) == 1
end
# @tag :pending
test "empty dna string has no nucleotides" do
expected = %{?A => 0, ?T => 0, ?C => 0, ?G => 0}
assert NucleotideCount.histogram('') == expected
end
# @tag :pending
test "repetitive sequence has only guanine" do
expected = %{?A => 0, ?T => 0, ?C => 0, ?G => 8}
assert NucleotideCount.histogram('GGGGGGGG') == expected
end
# @tag :pending
test "counts all nucleotides" do
s = 'AGCTTTTCATTCTGACTGCAACGGGCAATATGTCTCTGTGTGGATTAAAAAAAGAGTGTCTGATAGCAGC'
expected = %{?A => 20, ?T => 21, ?C => 12, ?G => 17}
assert NucleotideCount.histogram(s) == expected
end
end
| 26.422222 | 80 | 0.666106 |
1c647fb793da7376e922af4b2fcbd0aacdcd52e3 | 1,774 | exs | Elixir | test/fixture/apps/example_app/rel/config.exs | tyrchen/overseer | e46dbd965e14d92361f223db1a6d6f44b20272b3 | [
"MIT"
] | 18 | 2018-03-01T05:59:27.000Z | 2021-06-15T13:12:50.000Z | test/fixture/apps/example_app/rel/config.exs | tyrchen/overseer | e46dbd965e14d92361f223db1a6d6f44b20272b3 | [
"MIT"
] | 2 | 2019-01-04T09:37:34.000Z | 2019-05-20T10:45:15.000Z | test/fixture/apps/example_app/rel/config.exs | tyrchen/overseer | e46dbd965e14d92361f223db1a6d6f44b20272b3 | [
"MIT"
] | 4 | 2019-01-04T14:21:50.000Z | 2020-12-19T05:05:22.000Z | # Import all plugins from `rel/plugins`
# They can then be used by adding `plugin MyPlugin` to
# either an environment, or release definition, where
# `MyPlugin` is the name of the plugin module.
Path.join(["rel", "plugins", "*.exs"])
|> Path.wildcard()
|> Enum.map(&Code.eval_file(&1))
use Mix.Releases.Config,
# This sets the default release built by `mix release`
default_release: :default,
# This sets the default environment used by `mix release`
default_environment: Mix.env()
# For a full list of config options for both releases
# and environments, visit https://hexdocs.pm/distillery/configuration.html
# You may define one or more environments in this file,
# an environment's settings will override those of a release
# when building in that environment, this combination of release
# and environment configuration is called a profile
environment :dev do
# If you are running Phoenix, you should make sure that
# server: true is set and the code reloader is disabled,
# even in dev mode.
# It is recommended that you build with MIX_ENV=prod and pass
# the --env flag to Distillery explicitly if you want to use
# dev mode.
set dev_mode: true
set include_erts: false
set cookie: :"*S|BZYYtaj*)[h$*J/rG}KpMl41tVHI,hw&v;.dzFD1]nO}gx@lzLkr{vUms62:c"
end
environment :prod do
set include_erts: false
set include_src: false
set cookie: :"pzG/$v(Xt4S{&j)r?MZsYsyksB?7<@8:%B_k^$&)qG@@Eg6j2~EiW]m0c*d`K,zw"
end
# You may define one or more releases in this file.
# If you have not set a default release, or selected one
# when running `mix release`, the first release in the file
# will be used by default
release :example_app do
set version: current_version(:example_app)
set applications: [
:runtime_tools
]
end
| 33.471698 | 81 | 0.732244 |
1c64a695724023998f6bbf93c3a0a889b4c01a77 | 267 | exs | Elixir | config/test.exs | skunkwerks/wax_demo | e0c562ea168654d2475f70cba1ef29e74fc0dd24 | [
"Apache-2.0"
] | 5 | 2019-02-14T18:39:20.000Z | 2022-01-31T05:49:23.000Z | config/test.exs | skunkwerks/wax_demo | e0c562ea168654d2475f70cba1ef29e74fc0dd24 | [
"Apache-2.0"
] | 18 | 2019-03-17T14:21:37.000Z | 2022-03-25T20:55:56.000Z | config/test.exs | skunkwerks/wax_demo | e0c562ea168654d2475f70cba1ef29e74fc0dd24 | [
"Apache-2.0"
] | 4 | 2019-03-17T13:13:24.000Z | 2022-02-01T19:50:22.000Z | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :wax_demo, WaxDemoWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 24.272727 | 56 | 0.737828 |
1c64b98d0e23fa1992bd419477a215080540e155 | 822 | exs | Elixir | apps/ewallet_db/priv/repo/migrations/20180209132236_add_deleted_at_to_api_key_table.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet_db/priv/repo/migrations/20180209132236_add_deleted_at_to_api_key_table.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet_db/priv/repo/migrations/20180209132236_add_deleted_at_to_api_key_table.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWalletDB.Repo.Migrations.AddDeletedAtToApiKeyTable do
use Ecto.Migration
def change do
alter table(:api_key) do
add :deleted_at, :naive_datetime_usec
end
create index(:api_key, [:deleted_at])
end
end
| 31.615385 | 74 | 0.754258 |
1c64be8369ddadfdf659ae028183127dbb6afba4 | 304 | ex | Elixir | web/oauth/dropbox/utils.ex | taorg/rumbl | 7b212283f8dc2da56eddcba4281a6fff2cf4f378 | [
"Apache-2.0"
] | null | null | null | web/oauth/dropbox/utils.ex | taorg/rumbl | 7b212283f8dc2da56eddcba4281a6fff2cf4f378 | [
"Apache-2.0"
] | null | null | null | web/oauth/dropbox/utils.ex | taorg/rumbl | 7b212283f8dc2da56eddcba4281a6fff2cf4f378 | [
"Apache-2.0"
] | 2 | 2017-08-08T19:41:57.000Z | 2020-05-25T19:14:21.000Z | defmodule ElixirDropbox.Utils do
def to_struct(kind, attrs) do
struct = struct(kind)
Enum.reduce Map.to_list(struct), struct, fn {k, _}, acc ->
case Map.fetch(attrs, Atom.to_string(k)) do
{:ok, v} -> %{acc | k => v}
:error -> acc
end
end
end
end
| 25.333333 | 64 | 0.552632 |
1c64cf62e31b11be47a2a6d837d058e9ef032e00 | 1,475 | ex | Elixir | apps/artemis_api/lib/artemis_api/user_access.ex | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 10 | 2019-07-05T19:59:20.000Z | 2021-05-23T07:36:11.000Z | apps/artemis_api/lib/artemis_api/user_access.ex | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 7 | 2019-07-12T21:41:01.000Z | 2020-08-17T21:29:22.000Z | apps/artemis_api/lib/artemis_api/user_access.ex | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 4 | 2019-07-05T20:04:08.000Z | 2021-05-13T16:28:33.000Z | defmodule ArtemisApi.UserAccess do
@moduledoc """
A thin wrapper around `Artemis.UserAccess`.
Adds functions to validate request user.
Sources Supported:
- `context` from Abinsthe GraphQL
- `socket` from Phoenix Socket
"""
import Artemis.UserAccess
alias Artemis.Helpers.Feature
def authorize(request, permission, action) do
with {:ok, user} <- fetch_user(request),
true <- has?(user, permission) do
action.()
else
_ -> {:error, "Unauthorized User"}
end
end
def authorize_any(request, permissions, action) do
with {:ok, user} <- fetch_user(request),
true <- has_any?(user, permissions) do
action.()
else
_ -> {:error, "Unauthorized User"}
end
end
def authorize_all(request, permissions, action) do
with {:ok, user} <- fetch_user(request),
true <- has_all?(user, permissions) do
action.()
else
_ -> {:error, "Unauthorized User"}
end
end
def require_feature(_request, feature, action) do
case Feature.active?(feature) do
true -> action.()
false -> {:error, "Unauthorized Feature"}
end
end
# Helpers
def get_user(%{assigns: %{user: user}}), do: user
def get_user(%{context: %{user: user}}), do: user
def get_user(_), do: nil
defp fetch_user(%{assigns: %{user: user}}), do: {:ok, user}
defp fetch_user(%{context: %{user: user}}), do: {:ok, user}
defp fetch_user(_), do: {:error, "User not found"}
end
| 24.583333 | 61 | 0.629831 |
1c64d00ac261e5d24f6205f97b3b65abeca2efed | 443 | ex | Elixir | lib/blog/api/api.ex | nlundkvist/blog | 8353b0619e7d88285bb233990dcf8ec4959d6ed8 | [
"MIT"
] | null | null | null | lib/blog/api/api.ex | nlundkvist/blog | 8353b0619e7d88285bb233990dcf8ec4959d6ed8 | [
"MIT"
] | null | null | null | lib/blog/api/api.ex | nlundkvist/blog | 8353b0619e7d88285bb233990dcf8ec4959d6ed8 | [
"MIT"
] | null | null | null | defmodule Blog.Api do
def github_repo_commits(user, repo) when
is_binary(user) or is_atom(user) and
is_binary(repo) or is_atom(repo) do
case Tentacat.Commits.list(user, repo) do
result when is_list(result) and length(result) > 0 ->
{:ok, result}
{number, %{"message" => message}} when is_number(number) ->
{:error, Integer.to_string(number) <> " " <> message}
_error ->
{:error, "Unknown error"}
end
end
end | 29.533333 | 62 | 0.663657 |
1c64fe194ba09b1effb757e044b6dea019df67b0 | 123 | ex | Elixir | lib/google_fit/activity_type/tennis.ex | tsubery/google_fit | 7578b832c560b3b4a78059ac86af6e111812712e | [
"Apache-2.0"
] | 2 | 2017-02-01T13:51:26.000Z | 2019-04-12T11:37:25.000Z | lib/google_fit/activity_type/tennis.ex | tsubery/google_fit | 7578b832c560b3b4a78059ac86af6e111812712e | [
"Apache-2.0"
] | null | null | null | lib/google_fit/activity_type/tennis.ex | tsubery/google_fit | 7578b832c560b3b4a78059ac86af6e111812712e | [
"Apache-2.0"
] | null | null | null | defmodule GoogleFit.ActivityType.Tennis do
@moduledoc false
def code, do: GoogleFit.ActivityType.code(__MODULE__)
end
| 20.5 | 55 | 0.804878 |
1c657f790ba010e9064dd1fa5029a0df1f00aef3 | 4,025 | ex | Elixir | lib/ex_dadata/address/geolocate_address/suggestion/data.ex | Elonsoft/ex_dadata | 71edca65f9d35fd2c9aa6553e87992b377500a1c | [
"MIT"
] | 1 | 2021-08-23T08:24:50.000Z | 2021-08-23T08:24:50.000Z | lib/ex_dadata/address/geolocate_address/suggestion/data.ex | Elonsoft/ex_dadata | 71edca65f9d35fd2c9aa6553e87992b377500a1c | [
"MIT"
] | null | null | null | lib/ex_dadata/address/geolocate_address/suggestion/data.ex | Elonsoft/ex_dadata | 71edca65f9d35fd2c9aa6553e87992b377500a1c | [
"MIT"
] | null | null | null | defmodule ExDadata.Address.GeolocateAddress.Suggestion.Data do
@moduledoc false
use Ecto.Schema
alias Ecto.Changeset
@primary_key false
embedded_schema do
field :postal_code, :string
field :country, :string
field :country_iso_code, :string
field :federal_district, :string
field :region_fias_id, Ecto.UUID
field :region_kladr_id, :string
field :region_iso_code, :string
field :region_with_type, :string
field :region_type, :string
field :region_type_full, :string
field :region, :string
field :area_fias_id, Ecto.UUID
field :area_kladr_id, :string
field :area_with_type, :string
field :area_type, :string
field :area_type_full, :string
field :area, :string
field :city_fias_id, Ecto.UUID
field :city_kladr_id, :string
field :city_with_type, :string
field :city_type, :string
field :city_type_full, :string
field :city, :string
field :city_district_fias_id, Ecto.UUID
field :city_district_kladr_id, :string
field :city_district_with_type, :string
field :city_district_type, :string
field :city_district_type_full, :string
field :city_district, :string
field :settlement_fias_id, Ecto.UUID
field :settlement_kladr_id, :string
field :settlement_with_type, :string
field :settlement_type, :string
field :settlement_type_full, :string
field :settlement, :string
field :street_fias_id, Ecto.UUID
field :street_kladr_id, :string
field :street_with_type, :string
field :street_type, :string
field :street_type_full, :string
field :street, :string
field :house_fias_id, Ecto.UUID
field :house_kladr_id, :string
field :house_type, :string
field :house_type_full, :string
field :house, :string
field :block_type, :string
field :block_type_full, :string
field :block, :string
field :flat_fias_id, Ecto.UUID
field :flat_type, :string
field :flat_type_full, :string
field :flat, :string
field :postal_box, :string
field :fias_id, Ecto.UUID
field :fias_level, :integer
field :kladr_id, :string
field :geoname_id, :string
field :capital_marker, :string
field :okato, :string
field :oktmo, :string
field :tax_office, :string
field :tax_office_legal, :string
field :history_values, {:array, :string}
# Additional fields for all plans.
field :geo_lat, :float
field :geo_lon, :float
field :qc_geo, :integer
field :fias_code, :string
field :fias_actuality_state, :integer
field :city_area, :string
# For "Extended" and "Max" plans.
field :beltway_hit, :boolean
field :beltway_distance, :integer
# Only for "Max" plan.
field :flat_area, :integer
field :flat_price, :decimal
field :timezone, :string
field :square_meter_price, :decimal
field :metro, :any, virtual: true
end
@fields ~w(postal_code country country_iso_code federal_district region_fias_id region_kladr_id region_iso_code region_with_type region_type region_type_full region area_fias_id area_kladr_id area_with_type area_type area_type_full area city_fias_id city_kladr_id city_with_type city_type city_type_full city city_district_fias_id city_district_kladr_id city_district_with_type city_district_type city_district_type_full city_district settlement_fias_id settlement_kladr_id settlement_with_type settlement_type settlement_type_full settlement street_fias_id street_kladr_id street_with_type street_type street_type_full street house_fias_id house_kladr_id house_type house_type_full house block_type block_type_full block flat_fias_id flat_type flat_type_full flat postal_box fias_id fias_level kladr_id geoname_id capital_marker okato oktmo tax_office tax_office_legal history_values geo_lat geo_lon qc_geo fias_code fias_actuality_state city_area beltway_hit beltway_distance flat_area flat_price timezone square_meter_price metro)a
def changeset(data, attrs) do
Changeset.cast(data, attrs, @fields)
end
end
| 35 | 1,036 | 0.755031 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.