code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Serum.Template.Compiler do
@moduledoc false
_moduledocp = "This module handles template loading and preprocessing."
alias Serum.Plugin
alias Serum.Result
alias Serum.Template
alias Serum.Template.Compiler.Include
@type templates() :: %{optional(binary()) => Template.t()}
@type options :: [
type: Template.template_type(),
includes: templates()
]
@default_options [type: :template, includes: %{}]
@inject """
<%
require Serum.Template.Helpers
import Serum.Template.Helpers
%>
"""
@doc """
Compiles a list of template files.
A code that requires and imports `Serum.Template.Helpers` is injected before
the input data.
The `files` parameter is a list of `Serum.File` structs representing loaded
template files. That is, for each item of this list, the value of `:in_data`
must not be `nil`.
The `options` parameter is a keyword list of additional options controlling
the behavior of this function. The available options are:
- `type`: Either `:template` or `:include`, defaults to `:template`.
- `includes`: A map where the key of each item is the name of the includable
template, and the value associated with the key is a `Serum.Template`
struct, which is an already compiled Serum template.
"""
@spec compile_files([Serum.File.t()], options()) :: Result.t(map())
def compile_files(files, options) do
options = Keyword.merge(@default_options, options)
result =
files
|> Task.async_stream(&compile_file(&1, options))
|> Enum.map(&elem(&1, 1))
|> Result.aggregate_values(:template_loader)
case result do
{:ok, list} -> {:ok, Map.new(list)}
{:error, _} = error -> error
end
end
@spec compile_file(Serum.File.t(), options()) :: Result.t({binary(), Template.t()})
defp compile_file(file, options) do
injected_file = %Serum.File{file | in_data: @inject <> file.in_data}
with {:ok, file2} <- Plugin.processing_template(injected_file),
{:ok, ast} <- compile_string(file2.in_data, options),
template = Template.new(ast, options[:type], file2.src),
name = Path.basename(file2.src, ".html.eex"),
{:ok, template2} <- Plugin.processed_template(template) do
{:ok, {name, template2}}
else
{:ct_error, msg, line} -> {:error, {msg, file.src, line}}
{:error, _} = plugin_error -> plugin_error
end
end
@doc """
Compiles the given EEx string.
"""
@spec compile_string(binary(), options()) ::
{:ok, Macro.t()}
| {:ct_error, binary(), integer()}
def compile_string(string, options) do
compiled = EEx.compile_string(string)
includes = options[:includes] || []
case options[:type] do
:include -> {:ok, compiled}
_ -> Include.expand(compiled, includes)
end
rescue
e in EEx.SyntaxError ->
{:ct_error, e.message, e.line}
e in [SyntaxError, TokenMissingError] ->
{:ct_error, e.description, e.line}
end
end
|
lib/serum/template/compiler.ex
| 0.762733
| 0.411318
|
compiler.ex
|
starcoder
|
defmodule ExFieldDoc do
@moduledoc """
Documenting `defstruct` fields inline.
## Usage
`ExFieldDoc` adds function documentation which includes fields and their corresponding
default values (if any). Fields are marked to be documented by appending `||` followed
by a string. See the example below and note that keywords cannot be used when using `||`.
```
defmodule Test do
use ExFieldDoc # note: imports `Kernel`, except `defstruct/1`
defstruct [
:undocumented, # This is an undocumented field
:documented || "Documented field with default `nil`",
{:with_default, 1} || "Documented field with default `1`"
does_not_work: 2 || "Does not work due to operator precendence"
]
end
```
"""
defmodule FieldDoc do
@moduledoc false
defstruct [:field, :default, :doc]
end
defmacro defstruct(fields) do
{fields, doc} = split(fields)
quote do
Module.add_doc(__MODULE__, __ENV__.line, :def, {:__struct__, 0}, [], unquote(doc))
Kernel.defstruct(unquote(fields))
end
end
defmacro __using__(_) do
m = __MODULE__
quote do
import Kernel, except: [defstruct: 1]
import unquote(m)
end
end
defp split(fields) do
{fields, field_docs} =
fields
|> Enum.map(&separate_doc_strings/1)
|> Enum.unzip()
{fields, to_doc(field_docs)}
end
defp separate_doc_strings({:||, _line, [inner, doc]}) do
{inner, %FieldDoc{field: field(inner), default: default(inner), doc: doc}}
end
defp separate_doc_strings(other), do: {other, nil}
defp field(field) when is_atom(field), do: field
defp field({field, _value}) when is_atom(field), do: field
defp default(field) when is_atom(field), do: nil
defp default({_field, value}), do: {:default, value}
defp to_doc(field_docs) do
docs =
for %FieldDoc{field: field, default: default, doc: doc} <- field_docs do
default_str =
case default do
nil ->
""
{:default, default} ->
"(default: #{inspect default})"
end
"* `#{field}`: #{doc}#{default_str}"
end |> Enum.join("\n")
"""
Fields documentation.
#{docs}
"""
end
end
|
lib/ex_field_doc.ex
| 0.839389
| 0.874828
|
ex_field_doc.ex
|
starcoder
|
defmodule McProtocol.NBT do
@moduledoc """
Module for reading and writing NBT (http://wiki.vg/NBT)
The optional argument on the read/write functions allows the root tag to be nil.
This encodes as a NBT end tag.
"""
@type tag_name :: binary | nil
@type integer_tag :: {:byte | :short | :int | :long, tag_name, integer}
@type float_tag :: {:float | :double, tag_name, float}
@type byte_array_tag :: {:byte_array, tag_name, binary}
@type string_tag :: {:string, tag_name, binary}
@type list_tag :: {:list, tag_name, [tag]}
@type compound_tag :: {:compound, tag_name, [tag]}
@type int_array_tag :: {:int_array, tag_name, [integer]}
@type tag :: integer_tag | float_tag | byte_array_tag | string_tag | list_tag |
compound_tag | int_array_tag
@type t :: compound_tag
@spec read(binary, boolean) :: t
def read(bin, optional \\ false), do: McProtocol.NBT.Read.read(bin, optional)
@spec read_gzip(binary, boolean) :: t
def read_gzip(bin, optional \\ false), do: McProtocol.NBT.Read.read_gzip(bin, optional)
@spec write(t, boolean) :: t
def write(struct, optional \\ false), do: McProtocol.NBT.Write.write(struct, optional)
defmodule Read do
@moduledoc false
def read_gzip(bin, optional \\ false) do
decomp = :zlib.gunzip(bin)
read(decomp, optional)
end
def read(bin, optional \\ false) do
{start_tag, bin} = read_tag_id(bin)
if optional and start_tag == :end do
nil
else
read_tag(:compound, bin)
end
end
defp read_tag_id(<<0::8, bin::binary>>), do: {:end, bin}
defp read_tag_id(<<1::8, bin::binary>>), do: {:byte, bin}
defp read_tag_id(<<2::8, bin::binary>>), do: {:short, bin}
defp read_tag_id(<<3::8, bin::binary>>), do: {:int, bin}
defp read_tag_id(<<4::8, bin::binary>>), do: {:long, bin}
defp read_tag_id(<<5::8, bin::binary>>), do: {:float, bin}
defp read_tag_id(<<6::8, bin::binary>>), do: {:double, bin}
defp read_tag_id(<<7::8, bin::binary>>), do: {:byte_array, bin}
defp read_tag_id(<<8::8, bin::binary>>), do: {:string, bin}
defp read_tag_id(<<9::8, bin::binary>>), do: {:list, bin}
defp read_tag_id(<<10::8, bin::binary>>), do: {:compound, bin}
defp read_tag_id(<<11::8, bin::binary>>), do: {:int_array, bin}
defp read_tag(tag, bin) do
{name, bin} = read_type(:string, bin)
{val, bin} = read_type(tag, bin)
{{tag, name, val}, bin}
end
defp read_type(:byte, <<val::signed-integer-1*8, bin::binary>>), do: {val, bin}
defp read_type(:short, <<val::signed-integer-2*8, bin::binary>>), do: {val, bin}
defp read_type(:int, <<val::signed-integer-4*8, bin::binary>>), do: {val, bin}
defp read_type(:long, <<val::signed-integer-8*8, bin::binary>>), do: {val, bin}
defp read_type(:float, <<val::signed-float-4*8, bin::binary>>), do: {val, bin}
defp read_type(:double, <<val::signed-float-8*8, bin::binary>>), do: {val, bin}
defp read_type(:byte_array, bin) do
<<length::signed-integer-4*8, data::binary-size(length), bin::binary>> = bin
{data, bin}
end
defp read_type(:string, bin) do
<<length::unsigned-integer-2*8, name::binary-size(length), bin::binary>> = bin
{to_string(name), bin}
end
defp read_type(:list, bin) do
{tag, bin} = read_tag_id(bin)
<<length::signed-integer-4*8, bin::binary>> = bin
read_list_item(bin, tag, length, [])
end
defp read_type(:compound, bin) do
{tag, bin} = read_tag_id(bin)
read_compound_item(bin, tag, [])
end
defp read_type(:int_array, bin) do
<<length::signed-integer-4*8, bin::binary>> = bin
read_int_array(bin, length, [])
end
defp read_list_item(bin, _, 0, results) do
{results, bin}
end
defp read_list_item(bin, tag, num, results) when is_integer(num) and num > 0 do
{val, bin} = read_type(tag, bin)
read_list_item(bin, tag, num-1, results ++ [{tag, nil, val}])
end
defp read_compound_item(bin, :end, results) do
{results, bin}
end
defp read_compound_item(bin, next_tag, results) do
{result, bin} = read_tag(next_tag, bin)
{tag, bin} = read_tag_id(bin)
read_compound_item(bin, tag, results ++ [result])
end
defp read_int_array(bin, 0, results) do
{results, bin}
end
defp read_int_array(<<val::signed-integer-4*8, bin::binary>>, num, results) when is_integer(num) and num > 0 do
read_int_array(bin, num-1, results ++ [val])
end
end
defmodule Write do
@moduledoc false
def write(struct, optional \\ false) do
if (!struct or (struct == nil)) and optional do
write_tag_id(:end)
else
{:compound, name, value} = struct
IO.iodata_to_binary write_tag(:compound, name, value)
end
end
# Writes a single tag id
defp write_tag_id(:end), do: <<0::8>>
defp write_tag_id(:byte), do: <<1::8>>
defp write_tag_id(:short), do: <<2::8>>
defp write_tag_id(:int), do: <<3::8>>
defp write_tag_id(:long), do: <<4::8>>
defp write_tag_id(:float), do: <<5::8>>
defp write_tag_id(:double), do: <<6::8>>
defp write_tag_id(:byte_array), do: <<7::8>>
defp write_tag_id(:string), do: <<8::8>>
defp write_tag_id(:list), do: <<9::8>>
defp write_tag_id(:compound), do: <<10::8>>
defp write_tag_id(:int_array), do: <<11::8>>
# Writes a complete tag, including tag type, name and value
defp write_tag(tag, name, value) do
[write_tag_id(tag), write_type(:string, name), write_type(tag, value)]
end
# Writes a tag value of the supplied type
defp write_type(:byte, value) when is_integer(value), do: <<value::signed-integer-1*8>>
defp write_type(:short, value) when is_integer(value), do: <<value::signed-integer-2*8>>
defp write_type(:int, value) when is_integer(value), do: <<value::signed-integer-4*8>>
defp write_type(:long, value) when is_integer(value), do: <<value::signed-integer-8*8>>
defp write_type(:float, value) when is_float(value), do: <<value::signed-float-4*8>>
defp write_type(:double, value) when is_float(value), do: <<value::signed-float-8*8>>
defp write_type(:byte_array, value) when is_binary(value) do
[<<byte_size(value)::signed-integer-4*8>>, value]
end
defp write_type(:string, value) when is_binary(value) do
[<<byte_size(value)::unsigned-integer-2*8>>, value]
end
defp write_type(:list, values) when is_list(values) do
{bin, tag} = write_list_values(values)
[write_tag_id(tag), write_type(:int, length(values)), bin]
end
defp write_type(:compound, [{tag, name, value} | rest]) do
[write_tag(tag, name, value), write_type(:compound, rest)]
end
defp write_type(:compound, []) do
write_tag_id(:end)
end
defp write_type(:int_array, values) when is_list(values) do
[write_type(:int, length(values)), write_int_array_values(values)]
end
defp write_list_values(values) do
{tag, nil, _} = hd(values)
{write_list_values(tag, values), tag}
end
defp write_list_values(tag, values) do
Enum.map(values, fn({f_tag, nil, val}) ->
^tag = f_tag
write_type(tag, val)
end)
end
defp write_int_array_values(values) do
Enum.map(values, fn(value) -> write_type(:int, value) end)
end
end
end
|
lib/nbt.ex
| 0.680454
| 0.40248
|
nbt.ex
|
starcoder
|
defmodule PowerAssert.Renderer do
@moduledoc false
alias PowerAssert.PositionAndValue
@doc """
renders test result
"""
def render(code_ast, position_and_values, lhs_result \\ nil, rhs_result \\ nil)
def render(code_ast, [], lhs_result, rhs_result) do
Macro.to_string(code_ast) <> extra_information(lhs_result, rhs_result)
end
def render(code_ast, position_and_values, lhs_result, rhs_result) do
code_str = Macro.to_string(code_ast)
position_and_values =
Enum.sort(position_and_values, fn %PositionAndValue{position: x_pos},
%PositionAndValue{position: y_pos} ->
x_pos > y_pos
end)
%PositionAndValue{position: max_pos} =
Enum.max_by(position_and_values, fn %PositionAndValue{position: pos} -> pos end)
first_line = String.duplicate(" ", max_pos + 1) |> replace_with_bar(position_and_values)
lines = make_lines([], Enum.count(position_and_values), position_and_values, -1)
Enum.join([code_str, first_line] ++ lines, "\n") <> extra_information(lhs_result, rhs_result)
end
defp make_lines(lines, 0, _, _latest_pos) do
lines
end
defp make_lines(lines, times, position_and_values, latest_pos) do
[%PositionAndValue{position: pos, value: value} | t] = position_and_values
value = inspect(value)
value_len = String.length(value)
lines =
if latest_pos != -1 && latest_pos - (pos + value_len) > 0 do
[last_line | tail_lines] = Enum.reverse(lines)
{before_str, after_str} = String.split_at(last_line, pos)
{_removed_str, after_str} = String.split_at(after_str, value_len)
line = before_str <> value <> after_str
Enum.reverse([line | tail_lines])
else
line = String.duplicate(" ", pos + 1)
line = replace_with_bar(line, position_and_values)
line = String.replace(line, ~r/\|$/, value)
lines ++ [line]
end
make_lines(lines, times - 1, t, pos)
end
defp replace_with_bar(line, position_and_values) do
Enum.reduce(position_and_values, line, fn %PositionAndValue{position: pos}, line ->
{front, back} = String.split_at(line, pos + 1)
String.replace(front, ~r/ $/, "|") <> back
end)
end
defp extra_information(lhs_result, rhs_result)
when is_list(lhs_result) and is_list(rhs_result) do
[
"\n\nonly in lhs: " <> ((lhs_result -- rhs_result) |> inspect),
"only in rhs: " <> ((rhs_result -- lhs_result) |> inspect)
]
|> Enum.join("\n")
end
defp extra_information(lhs_result, rhs_result) when is_map(lhs_result) and is_map(rhs_result) do
lhs_result = Map.delete(lhs_result, :__struct__)
rhs_result = Map.delete(rhs_result, :__struct__)
in_left = Map.split(lhs_result, Map.keys(rhs_result)) |> elem(1)
in_right = Map.split(rhs_result, Map.keys(lhs_result)) |> elem(1)
str = "\n"
str =
if map_size(in_left) != 0 do
str <> "\nonly in lhs: " <> inspect(in_left)
else
str
end
str =
if map_size(in_right) != 0 do
str <> "\nonly in rhs: " <> inspect(in_right)
else
str
end
diff = collect_map_diff(lhs_result, rhs_result)
str =
case Enum.empty?(diff) do
true -> str
false -> str <> "\ndifference:\n" <> Enum.join(diff, "\n")
end
str
end
defp extra_information(lhs_result, rhs_result) do
if String.valid?(lhs_result) && String.valid?(rhs_result) do
extra_information_for_string(lhs_result, rhs_result)
else
""
end
end
defp extra_information_for_string(lhs_result, rhs_result) do
"\n\ndifference:" <> "\n" <> lhs_result <> "\n" <> rhs_result
end
defp collect_map_diff(map1, map2) do
Enum.reduce(map2, [], fn {k, v}, acc ->
case Map.fetch(map1, k) do
{:ok, ^v} ->
acc
{:ok, map1_value} ->
acc ++ ["key #{inspect(k)} => {#{inspect(map1_value)}, #{inspect(v)}}"]
_ ->
acc
end
end)
end
end
|
lib/power_assert/renderer.ex
| 0.755186
| 0.551936
|
renderer.ex
|
starcoder
|
defmodule Ninjaproxies.Proxy do
defstruct [:alive, :bandwidth, :cityName, :connectTotal, :cookie, :countryCode, :countryName, :craigslist, :created, :facebook, :get, :google, :id, :instagram, :ip, :modified, :paypal, :pinterest, :portNum, :post, :processing, :protocol, :referrer, :regionName, :speed, :twitter, :type, :uptime, :uptimeAverage, :whitelisted, :youtube]
end
defmodule Ninjaproxies.API do
@moduledoc """
Internal module to fetch data from the API
"""
@base "http://ninjaproxies.com/proxies/api/"
@options ~w(order portNum type protocol countryCode countryName regionName speed bandwidth uptime whitelisted referrer cookie get post google facebook paypal craigslist twitter youtube pinterest instagram alive limit key)a
@doc """
request some results from the HTTP API
"""
def request(options \\ %{}) do
api_key = Ninjaproxies.Config.get.api_key
options = options |> Map.put(:key, api_key) |> build_options
case HTTPoison.get(@base <> options, [], [hackney: [follow_redirect: true], timeout: 60000, recv_timeout: 60000]) do
{:ok, %{status_code: 200} = response} ->
Poison.decode!(response.body, keys: :atoms).data |> Enum.map(&parse_proxy/1)
{:ok, %{status_code: code, body: body}} ->
message = body |> Poison.decode! |> Map.get("data")
raise(Ninjaproxies.APIError, [code: code, message: message])
end
end
defp build_options(options) do
options = Enum.filter(options, fn({k, _}) -> Enum.member?(@options, k) end)
options |> Enum.reduce("?", fn(opt, acc) -> acc <> build_option(opt) <> "&" end)
end
defp build_option({key, values}) when is_list(values) do
opts = Enum.reduce(values, fn(v, acc) -> acc <> "+" <> v end)
to_string(key) <> "=" <> opts
end
defp build_option({key, value}) when is_binary(value) do
to_string(key) <> "=" <> value
end
defp build_option({key, value}) do
to_string(key) <> "=" <> to_string(value)
end
defp parse_proxy(proxy) do
proxy = Map.get(proxy, :Proxy)
struct(Ninjaproxies.Proxy, proxy)
end
end
|
lib/api.ex
| 0.520496
| 0.435301
|
api.ex
|
starcoder
|
defmodule MeshxConsul.Proxy do
@moduledoc """
Manages sidecar service proxy binary command.
Service mesh data plane is using system of connected proxies managed by control plane application for service communication.
Module is managing starting, stopping and restarting proxies binary commands.
Proxies should be automatically started running `MeshxConsul.start/4` and stopped with `MeshxConsul.stop/1`.
Command running proxy binary for given service is passed as service `template` argument to `MeshxConsul.start/4`.
Default proxy command will start Consul Connect proxy with error log-level and will use `/bin/sh` as shell:
```elixir
[proxy: ["/bin/sh", "-c", "consul connect proxy -log-level err -sidecar-for {{id}}"]]
```
Command will be rendered using Mustache system, check `MeshxConsul` documentation for details.
Example proxy command starting [Envoy Proxy](https://www.envoyproxy.io/):
```elixir
[proxy: ["/bin/sh", "-c", "consul connect envoy -sidecar-for {{id}} -- -l error"]]
```
**Note:** Consul application version must be compatible with provided Envoy binary, see: [Consul documentation](https://www.consul.io/docs/connect/proxies/envoy#supported-versions).
Proxy binary command must be able to communicate with Consul: agent instance address and ACL token must be provided. Configuration may be provided as shell environment variable defined by `:cli_env` key in `config.exs` or directly using proxy command. Environment variable is preferred when passing secrets.
"""
alias MeshxConsul.Proxy.{Supervisor, Worker}
@doc """
Starts long-running proxy binary `cmd` for `service_id` service.
"""
@spec start(service_id :: atom() | String.t(), cmd :: [String.t()]) :: DynamicSupervisor.on_start_child() | {:ok, nil}
defdelegate start(service_id, cmd), to: Supervisor
@doc """
Stops `service_id` service proxy.
"""
@spec stop(service_id :: atom() | String.t()) :: :ok | {:error, :not_found}
defdelegate stop(service_id), to: Supervisor
@doc """
Restarts proxy binary for `service_id` service.
```elixir
iex(1)> MeshxConsul.start("service1")
{:ok, "service1-h11", {:tcp, {127, 0, 0, 1}, 1024}}
iex(2)> MeshxConsul.Proxy.restart("service1-h11")
:ok
```
"""
@spec restart(service_id :: atom() | String.t()) :: :ok
def restart(service_id), do: Worker.restart(Supervisor.id(service_id))
@doc """
Returns info about `service_id` service proxy worker.
Function result if successful is map with two keys:
* `:cmd` - contains Mustache rendered command that was used to start proxy binary,
* `:restarts` - equal to number of proxy restarts due to proxy command failure.
Exits if `service_id` worker is not running.
```elixir
iex(1)> MeshxConsul.start("service1")
{:ok, "service1-h11", {:tcp, {127, 0, 0, 1}, 1024}}
iex(2)> MeshxConsul.Proxy.info("service1-h11")
%{
cmd: ["/bin/sh", "-c",
"consul connect proxy -log-level err -sidecar-for service1-h11"],
restarts: 0
}
```
"""
@spec info(service_id :: atom() | String.t()) :: %{cmd: String.t(), restarts: non_neg_integer()}
def info(service_id), do: Worker.info(Supervisor.id(service_id))
end
|
lib/proxy/proxy.ex
| 0.894502
| 0.844281
|
proxy.ex
|
starcoder
|
defmodule ComplexNumbers do
@moduledoc false
@typedoc """
In this module, complex numbers are represented as a tuple-pair containing the real and
imaginary parts.
For example, the real number `1` is `{1, 0}`, the imaginary number `i` is `{0, 1}` and
the complex number `4+3i` is `{4, 3}'.
"""
@type complex :: {float, float}
@doc """
Return the real part of a complex number
"""
@spec real(a :: complex) :: float
def real({r, _}), do: r
@doc """
Return the imaginary part of a complex number
"""
@spec imaginary(a :: complex) :: float
def imaginary({_, i}), do: i
@doc """
Multiply two complex numbers, or a real and a complex number
"""
@spec mul(a :: complex | float, b :: complex | float) :: complex
def mul({r1, i1}, {r2, i2}), do: {r1 * r2 - i1 * i2, i1 * r2 + r1 * i2}
def mul(a, {r2, i2}), do: {r2 * a, i2 * a}
def mul({r1, i1}, b), do: {r1 * b, i1 * b}
@doc """
Add two complex numbers, or a real and a complex number
"""
@spec add(a :: complex | float, b :: complex | float) :: complex
def add({r1, i1}, {r2, i2}), do: {r1 + r2, i1 + i2}
def add(a, {r2, i2}), do: add({a, 0.0}, {r2, i2})
def add({r1, i1}, b), do: add({r1, i1}, {b, 0.0})
@doc """
Subtract two complex numbers, or a real and a complex number
"""
@spec sub(a :: complex | float, b :: complex | float) :: complex
def sub({r1, i1}, {r2, i2}), do: {r1 - r2, i1 - i2}
def sub(a, {r2, i2}), do: sub({a, 0.0}, {r2, i2})
def sub({r1, i1}, b), do: sub({r1, i1}, {b, 0.0})
@doc """
Divide two complex numbers, or a real and a complex number
"""
@spec div(a :: complex | float, b :: complex | float) :: complex
def div({r1, i1}, {r2, i2}) do
r_d = (r1 * r2 + i1 * i2) / (r2 ** 2 + i2 ** 2)
i_d = (i1 * r2 - r1 * i2) / (r2 ** 2 + i2 ** 2)
{r_d, i_d}
end
def div(a, {r2, i2}), do: __MODULE__.div({a, 0.0}, {r2, i2})
def div({r1, i1}, b), do: __MODULE__.div({r1, i1}, {b, 0.0})
@doc """
Absolute value of a complex number
"""
@spec abs(a :: complex) :: float
def abs({r, i}) do
Kernel.abs(:math.sqrt(r ** 2 + i ** 2))
end
@doc """
Conjugate of a complex number
"""
@spec conjugate(a :: complex) :: complex
def conjugate({r, i}) do
{r, i * -1}
end
@doc """
Exponential of a complex number
"""
@spec exp(a :: complex) :: complex
def exp({r, i}) do
{:math.exp(r) * :math.cos(i), :math.exp(r) * :math.sin(i)}
end
end
|
complex-numbers/lib/complex_numbers.ex
| 0.925911
| 0.837021
|
complex_numbers.ex
|
starcoder
|
defmodule YipyipExAuth.Config do
@moduledoc """
Config struct. Keys `:session_ttl`, `:refresh_token_ttl` and `:session_store_module` have no defaults and are mandatory.
Setting `:session_ttl` to `nil` means sessions can live forever, as long as they are refreshed.
The token salts serve to separate one token from another, the real secret is the endpoint's secret key base.
Defaults:
```
%Config{
:refresh_token_ttl, # max age of a refresh token
:session_store_module, # an implementation of YipyipExAuth.SessionStore
:session_ttl, # max age of a session, nil is infinite
access_token_ttl: 1800, # max age of an access token
access_token_salt: "<PASSWORD>", # "namespace" of the access token
refresh_token_salt: "<PASSWORD>", # "namespace" of the refresh token
access_token_key_digest: :sha256, # hashing algorithm of the access token
refresh_token_key_digest: :sha512, # hashing algorithm of the refresh token
access_cookie_name: "_access_token_signature", # name of the access token's signature cookie
refresh_cookie_name: "_refresh_token_signature", # name of the refresh token's signature cookie
access_cookie_opts: [ # access cookie opts for Plug.Conn.put_resp_cookie/4
http_only: true,
extra: "SameSite=Strict",
secure: true
],
refresh_cookie_opts: [ # refresh cookie opts for Plug.Conn.put_resp_cookie/4
http_only: true,
extra: "SameSite=Strict",
secure: true
]
}
```
"""
@enforce_keys [:session_ttl, :refresh_token_ttl, :session_store_module]
defstruct [
:refresh_token_ttl,
:session_store_module,
:session_ttl,
access_token_ttl: 1800,
access_token_salt: "<PASSWORD>",
refresh_token_salt: "<PASSWORD>",
access_token_key_digest: :sha256,
refresh_token_key_digest: :sha512,
access_cookie_name: "_access_token_signature",
refresh_cookie_name: "_refresh_token_signature",
access_cookie_opts: [
http_only: true,
extra: "SameSite=Strict",
secure: true
],
refresh_cookie_opts: [
http_only: true,
extra: "SameSite=Strict",
secure: true
]
]
@type t :: %__MODULE__{
access_token_ttl: pos_integer(),
refresh_token_ttl: pos_integer(),
session_store_module: module(),
session_ttl: pos_integer() | nil,
access_token_salt: binary(),
refresh_token_salt: binary(),
access_token_key_digest: :sha256 | :sha384 | :sha512,
refresh_token_key_digest: :sha512 | :sha256 | :sha384,
access_cookie_name: binary(),
refresh_cookie_name: binary(),
access_cookie_opts: keyword(),
refresh_cookie_opts: keyword()
}
@doc """
Build config struct from enumerable (useful for passing in application environment).
Raises for missing mandatory keys and sets defaults for optional keys.
## Examples / doctests
iex> from_enum([])
** (ArgumentError) the following keys must also be given when building struct YipyipExAuth.Config: [:session_ttl, :refresh_token_ttl, :session_store_module]
iex> %YipyipExAuth.Config{} = from_enum([session_ttl: 30 * 24 * 60 * 60, refresh_token_ttl: 24 * 60 * 60, session_store_module: MyModule])
"""
@spec from_enum(Enum.t()) :: %__MODULE__{}
def from_enum(enum) do
struct!(__MODULE__, enum)
end
end
|
lib/config.ex
| 0.816845
| 0.591222
|
config.ex
|
starcoder
|
if Code.ensure_loaded?(Tesla) do
defmodule RssWatcher.HTTP.Tesla do
@moduledoc """
`Tesla` adapter for HTTP fetching. Used by default if no configuration is
provided.
## Installation
To use, add the following to your dependancies.
```
{:tesla, "~> 1.2.1"}
```
You may need to add additional dependencies based on your HTTP adapter
of choice. (hackney, etc)
"""
@moduledoc since: "0.1.0"
require Logger
@behaviour RssWatcher.HTTP
@spec get_feed(String.t(), Keyword.t()) ::
{:ok, String.t()}
| {:error, {:http_client_error, term}}
| {:error, {:not_xml, String.t()}}
| {:error, {:unsuccessful_request, term}}
@doc """
Fetch HTTP data using `Tesla`
Additional middleware and adapter configuration can be provided through
the `http_client_options` key in the `RssWatcher.Subscription` config.
## Options
- `:adapter` - The tesla HTTP adpater to use. Defaults to `:httpc`. Can be a `module` or a tuple of a `{module, options}`
- `:middleware` - The tesla middleware to use.
"""
@doc since: "0.1.0"
def get_feed(url, options \\ []) do
with {:ok, %Tesla.Env{status: status, headers: headers, body: body}}
when status == 200 <- Tesla.get(client(options), url),
{true, _content_type} <- is_xml(headers) do
{:ok, body}
else
{false, content_type} ->
{:error, {:not_xml, content_type}}
{:ok, response} ->
{:error, {:unsuccessful_request, response}}
{:error, reason} ->
{:error, {:http_client_error, reason}}
end
end
defp client(options) do
base_middleware = [
{Tesla.Middleware.Timeout, timeout: 10_000},
Tesla.Middleware.FollowRedirects,
{Tesla.Middleware.Retry, delay: 500, max_retries: 10},
{Tesla.Middleware.Headers, [{"user-agent", "Elixir/RssWatcher"}]}
]
middleware = Keyword.get(options, :middleware, [])
adapter = Keyword.get(options, :adapter, Tesla.Adapter.Httpc)
Tesla.client(base_middleware ++ middleware, adapter)
end
defp is_xml(headers) do
case Enum.find(headers, fn {header, _val} -> "content-type" == header end) do
{_, val} ->
# too many MIME types for xml
{String.contains?(val, "xml"), val}
nil ->
{false, ""}
end
end
end
end
|
lib/rss_watcher/http/adapter/tesla.ex
| 0.768212
| 0.691888
|
tesla.ex
|
starcoder
|
defmodule BRAN do
alias BRAN.Banks.C6
alias BRAN.Banks.Itau
alias BRAN.Banks.Nubank
alias BRAN.Banks.Santander
@moduledoc """
Documentation for `BRAN`.
"""
@doc """
validate
Returns a tuple with {:ok, :valid} or {:error, :reason}, after checking if the combination of bank_code, branch_number, account_number and digit is valid
## Examples
iex> BRAN.validate("341","2545", "02366", 1)
{:ok, :valid}
"""
@spec validate(String.t(), String.t(), String.t(), String.t() | integer()) ::
{:error,
:invalid_account_number_length
| :invalid_account_type
| :invalid_bank_branch_length
| :not_supported
| :not_valid}
| {:ok, :valid}
def validate(bank_code, bank_branch, account_number, digit) do
parsed_bank_branch = parse_to_integer_list(bank_branch)
parsed_account_number = parse_to_integer_list(account_number)
case bank_code do
"033" ->
Santander.validate(parsed_bank_branch, parsed_account_number, digit)
"341" ->
Itau.validate(parsed_bank_branch, parsed_account_number, digit)
"336" ->
C6.validate(parsed_bank_branch, parsed_account_number, digit)
"260" ->
Nubank.validate(parsed_bank_branch, parsed_account_number, digit)
_ ->
{:error, :not_supported}
end
end
@spec validate(binary, binary, binary) ::
{:error,
:invalid_account_number_length
| :invalid_account_type
| :invalid_bank_branch_length
| :not_supported
| :not_valid}
| {:ok, :valid}
def validate(bank_code, bank_branch, account_with_digit) do
{account, digit} = split_account_and_digit(account_with_digit)
validate(bank_code, bank_branch, account, digit)
end
defp split_account_and_digit(account_with_digit) do
{account, digit} =
account_with_digit
|> String.replace("-", "")
|> String.split_at(-1)
{account, digit}
end
defp parse_to_integer_list(numbers) do
numbers
|> String.codepoints()
|> Enum.map(&String.to_integer/1)
end
end
|
lib/bran.ex
| 0.869811
| 0.507873
|
bran.ex
|
starcoder
|
defmodule Cldr.Number.String do
@moduledoc false
@doc """
Returns a regex which matches all latin1 characters
"""
@latin1 ~r/([\x00-\x7F])/
def latin1 do
@latin1
end
@doc """
Returns a regex which matches all non-latin1 characters
"""
@not_latin1 ~r/([^\x00-\x7F])/
def not_latin1 do
@not_latin1
end
@doc """
Replaces characters with a string hex representation
"""
def hex_string(string) do
String.to_charlist(string)
|> Enum.map(&("\\x" <> Integer.to_string(&1)))
|> Enum.join()
end
@doc """
Pad a a string (representing a number) with leading "0"'s to the
specified length.
## Options
* `number` is a string representation of a number
* `count` is the final length required of the string
"""
@spec pad_leading_zeros(String.t(), integer) :: String.t()
def pad_leading_zeros(number_string, count) when count <= 0 do
number_string
end
def pad_leading_zeros(number_string, count) do
:binary.copy("0", count - byte_size(number_string)) <> number_string
end
@doc """
Pad a a string (representing a number) with trailing "0"'s to the
specified length.
## Options
* `number` is a string representation of a number
* `count` is the final length required of the string
"""
@spec pad_trailing_zeros(String.t(), integer) :: String.t()
def pad_trailing_zeros(number_string, count) when count <= 0 do
number_string
end
def pad_trailing_zeros(number_string, count) do
number_string <> :binary.copy("0", count - byte_size(number_string))
end
@doc """
Split a string up into fixed size chunks.
Returns a list of strings the size of `size` plus potentially
one more chunk at the end that is the remainder of the string
after chunking.
## Examples
iex> Cldr.Number.String.chunk_string("This is a string", 3)
["Thi", "s i", "s a", " st", "rin", "g"]
iex> Cldr.Number.String.chunk_string("1234", 4)
["1234"]
iex> Cldr.Number.String.chunk_string("1234", 3)
["123","4"]
iex> Cldr.Number.String.chunk_string("1234", 3, :reverse)
["1", "234"]
"""
@spec chunk_string(String.t(), integer, :forward | :reverse) :: [String.t()]
def chunk_string(string, size, direction \\ :forward)
def chunk_string(string, 0, _direction) do
[string]
end
def chunk_string("", _size, _) do
[""]
end
if Version.compare(System.version(), "1.6.0") in [:gt, :eq] do
def chunk_string(string, size, :forward) do
string
|> String.to_charlist()
|> Enum.chunk_every(size, size, [])
|> Enum.map(&List.to_string/1)
end
else
def chunk_string(string, size, :forward) do
string
|> String.to_charlist()
|> Enum.chunk(size, size, [])
|> Enum.map(&List.to_string/1)
end
end
def chunk_string(string, size, :reverse) do
len = String.length(string)
remainder = rem(len, size)
if remainder > 0 do
{head, last} = String.split_at(string, remainder)
[head] ++ do_chunk_string(last, size)
else
do_chunk_string(string, size)
end
end
defp do_chunk_string("", _size) do
[]
end
defp do_chunk_string(string, size) do
{chunk, rest} = String.split_at(string, size)
[chunk] ++ do_chunk_string(rest, size)
end
end
|
lib/cldr/number/string.ex
| 0.890922
| 0.590632
|
string.ex
|
starcoder
|
defmodule ArrowChoice do
@typedoc """
Functor dictionary
intuitive type: fmap : (a -> b) -> f a -> f b
* `map`: (f a, a -> b) -> f b # params are swapped to facilitate piping, mandatory
* `lift_left`: a -> f b -> f a # default implementation provided, optional
"""
@type t :: %__MODULE__{
arrow: Arrow.t,
}
def __struct__, do: %{
__struct__: __MODULE__,
arrow: Arrow.__struct__(),
left: fn _ -> raise("ArrowChoice : missing definition for left") end,
right: fn _ -> raise("ArrowChoice : missing definition for right") end,
multiplex: fn _ -> raise("ArrowChoice : missing definition for multiplex") end,
merge: fn _ -> raise("ArrowChoice : missing definition for merge") end,
}
def __struct__(kv) do
required_keys = [
:arrow,
:left,
:right,
:multiplex,
:merge,
]
{map, keys} =
Enum.reduce(kv, {__struct__(), required_keys}, fn {key, val}, {map, keys} ->
{Map.replace!(map, key, val), List.delete(keys, key)}
end)
case keys do
[] ->
map
_ ->
raise ArgumentError,
"the following keys must also be given when building " <>
"struct #{inspect(__MODULE__)}: #{inspect(keys)}"
end
end
def define(base_dict) do
base_dict = Map.new(base_dict)
arrow = Map.fetch!(base_dict, :arrow)
{left, multiplex} = case base_dict do
%{left: left} ->
multiplex = Map.get(base_dict, :multiplex, fn arl, arr ->
c = arrow.category
mirror = arrow.arr.(fn
{:Left, l} -> {:Right, l}
{:Right, r} -> {:Left, r}
end)
left.(arl) |> c.>>>.(mirror) |> c.>>>.(left.(arr)) |> c.>>>.(mirror)
end)
{left, multiplex}
%{multiplex: multiplex} ->
left = Map.get(base_dict, :left, fn ar -> multiplex.(ar, arrow.category.id) end)
{left, multiplex}
_ ->
raise("ArrowChoice minimal definition require either `left` or `multiplex`")
end
right = Map.get(base_dict, :right, fn ar -> multiplex.(arrow.category.id, ar) end)
merge = Map.get(base_dict, :merge, fn arl, arr ->
c = arrow.category
untag = arrow.arr.(fn
{:Left , l} -> l
{:Right, r} -> r
end)
multiplex.(arl, arr) |> c.>>>.(untag)
end)
%__MODULE__{
arrow: arrow,
left: left,
right: right,
multiplex: multiplex,
merge: merge,
}
end
end
|
typeclassopedia/lib/arrow_choice.ex
| 0.855474
| 0.481271
|
arrow_choice.ex
|
starcoder
|
defmodule AVLTree do
@moduledoc """
Pure Elixir [AVL tree](https://en.wikipedia.org/wiki/AVL_tree) implementation.
This data structure is very similar to `MapSet`, but unlike the latter,
elements in the `AVLTree` are always sorted in ascending or descending order.
To sort items, `AVLTree` uses a comparison function that looks like:
`less(a, b) :: boolean`
This function returns `true` if element `a` must be placed strictly before element `b`, otherwise it returns false.
`AVLTree` can store duplicate elements.
It is important to understand that duplicate elements are not necessarily the same.
Values `a` and `b` are considered equal if they satisfy the following condition:
`less(a, b) == false and less(b, a) == false`, where `less(x, y)` is comparison function
For example, if the comparison function is `fn {a, _}, {b, _} -> a < b end`,
then the elements `{1, 10}` and `{1, 20}` are considered equal, although actually they aren't.
By default, comparison function is `Kernel.</2`.
## Features
- custom comparison function;
- support for duplicate elements;
- `Collectable`, `Enumerable`, `Inspect` protocols;
- drawing the tree in the console :)
## Basic Usage
By default, inserted elements are sorted in ascending order:
```elixir
iex> tree = AVLTree.new()
#AVLTree<[]>
iex> tree = AVLTree.put(tree, 5)
iex> tree = AVLTree.put(tree, 2)
iex> tree = [1, 3, 6, 4] |> Enum.into(tree)
iex> tree
#AVLTree<[1, 2, 3, 4, 5, 6]>
```
You can specify ordering when creating a tree:
```elixir
iex> tree1 = AVLTree.new(:asc)
iex> tree2 = AVLTree.new(:desc)
iex> [4, 2, 1, 3] |> Enum.into(tree1)
#AVLTree<[1, 2, 3, 4]>
iex> [4, 2, 1, 3] |> Enum.into(tree2)
#AVLTree<[4, 3, 2, 1]>
```
Also you can use a custom comparison function.
Example of a tree with tuples as elements, ordered by the first field
```elixir
iex> tree = AVLTree.new(fn {a, _}, {b, _} -> a < b end)
iex> [{2, "A"}, {3, "B"}, {1, "C"}] |> Enum.into(tree)
#AVLTree<[{1, "C"}, {2, "A"}, {3, "B"}]>
```
Checks if the tree contains a value
```elixir
iex> tree = [5, 2, 1, 3] |> Enum.into(AVLTree.new())
iex> AVLTree.member?(tree, 2)
true
```
`AVLTree` fully supports `Enumerable` protocol
```elixir
iex> tree = [4, 2, 1, 3] |> Enum.into(AVLTree.new())
iex> Enum.to_list(tree)
[1, 2, 3, 4]
iex> Enum.sum(tree)
10
```
## Sorted list of dates
Let's create an ascending list of `DateTime` values.
```elixir
iex> tree = AVLTree.new(fn a, b -> DateTime.compare(a, b) == :lt end)
iex> [
...> ~U[2020-02-03 01:01:01Z],
...> ~U[2020-01-01 01:01:01Z],
...> ~U[2019-10-10 02:11:01Z],
...> ~U[2020-01-01 01:01:02Z]
...> ] |> Enum.into(tree)
#AVLTree<[~U[2019-10-10 02:11:01Z], ~U[2020-01-01 01:01:01Z], ~U[2020-01-01 01:01:02Z], ~U[2020-02-03 01:01:01Z]]>
```
## AVLTree as a map.
If you use a key-value pairs as elements, `AVLTree` can work as a map:
Create a tree
```elixir
tree = AVLTree.new(fn {a, _}, {b, _} -> a < b end)
```
Insert key-value pairs:
```elixir
tree =
tree
|> AVLTree.put({:a, "first value"})
|> AVLTree.put({:c, "third value"})
|> AVLTree.put({:b, "second value"})
```
or
```elixir
tree =
[a: "first value", c: "third value", b: "second value"]
|> Enum.into(tree)
```
Retrieve element by key. We can use anything as a value since comparison function cares only about keys.
```elixir
AVLTree.get(tree, {:b, nil}) # {:b, "second value"}
```
Delete element:
```elixir
AVLTree.delete(tree, {:b, nil}) # #AVLTree<[a: "first value", c: "third value"]>
```
Benefits? Elements are always ordered by keys. Custom comparison function.
## Performance
All inserts, removes and searches in general has complexity of `Ο(lg(n))`.
This implementation is about 4-5 times slower than `MapSet`.
To run benchmark use:
```shell
mix run bench/run.exs
```
"""
alias __MODULE__.Node
defstruct root: nil, size: 0, less: &Kernel.</2
@doc """
Creates a new tree with default ascending order.
```
iex> [3, 1, 4, 2] |> Enum.into(AVLTree.new())
#AVLTree<[1, 2, 3, 4]>
```
"""
@spec new() :: t()
def new() do
%__MODULE__{}
end
@doc """
Creates a new tree with the given `ordering` or comparison function.
```
iex> [3, 1, 4, 2] |> Enum.into(AVLTree.new(:asc))
#AVLTree<[1, 2, 3, 4]>
iex> [3, 1, 4, 2] |> Enum.into(AVLTree.new(:desc))
#AVLTree<[4, 3, 2, 1]>
iex> [3, 1, 4, 2] |> Enum.into(AVLTree.new(fn a, b -> a > b end))
#AVLTree<[4, 3, 2, 1]>
```
"""
@spec new(:asc | :desc | less()) :: t()
def new(ordering) when is_function(ordering) do
%__MODULE__{less: ordering}
end
def new(:asc) do
%__MODULE__{less: &Kernel.</2}
end
def new(:desc) do
%__MODULE__{less: &Kernel.>/2}
end
@doc """
Returns height of the tree.
```
iex> tree = [5, 9, 3, 8, 1, 6, 7] |> Enum.into(AVLTree.new())
#AVLTree<[1, 3, 5, 6, 7, 8, 9]>
iex> AVLTree.height(tree)
4
```
"""
@spec height(t()) :: integer()
def height(%__MODULE__{root: root}) do
Node.height(root)
end
@doc """
Returns the number of elements in the tree
```
iex> tree = [5, 9, 3, 8, 1, 6, 7] |> Enum.into(AVLTree.new())
#AVLTree<[1, 3, 5, 6, 7, 8, 9]>
iex> AVLTree.size(tree)
7
```
"""
@spec size(t()) :: integer()
def size(%__MODULE__{size: size}) do
size
end
@doc """
Retrieves an element equal to `value`.
If the tree contains more than one element equal to `value`, retrieves one of them. It is undefined which one.
Returns `defailt` if nothing is found.
```
iex> tree = AVLTree.new(fn {a, _}, {b, _} -> a < b end)
#AVLTree<[]>
iex> tree = [a: "A", c: "C", d: "D", b: "B"] |> Enum.into(tree)
#AVLTree<[a: "A", b: "B", c: "C", d: "D"]>
iex> AVLTree.get(tree, {:c, nil}, :error)
{:c, "C"}
iex> AVLTree.get(tree, {:e, nil}, :error)
:error
```
"""
@spec get(t(), value(), term()) :: value() | term()
def get(%__MODULE__{root: root, less: less}, value, default \\ nil) do
Node.get(root, value, default, less)
end
@doc """
Retrieves the first value in the tree.
Returns `default` if the tree is empty.
```
iex> tree = [3, 2, 4, 6] |> Enum.into(AVLTree.new())
#AVLTree<[2, 3, 4, 6]>
iex> AVLTree.get_first(tree)
2
```
"""
@spec get_first(t(), term()) :: value() | term()
def get_first(%__MODULE__{root: root}, default \\ nil) do
Node.get_first(root, default)
end
@doc """
Retrieves the last value in the tree.
Returns `default` if the tree is empty.
```
iex> tree = [3, 2, 4, 6] |> Enum.into(AVLTree.new())
#AVLTree<[2, 3, 4, 6]>
iex> AVLTree.get_last(tree)
6
```
"""
@spec get_last(t(), term()) :: value() | term()
def get_last(%__MODULE__{root: root}, default \\ nil) do
Node.get_last(root, default)
end
@doc """
Retrieves an element equal to `value`.
If the tree contains more than one element equal to `value`, retrieves the first of them
Returns `default` if nothing is found.
```
iex> tree = [b: 21, a: 1, b: 22, c: 3, b: 23] |> Enum.into(AVLTree.new(fn {a, _}, {b, _} -> a < b end))
#AVLTree<[a: 1, b: 21, b: 22, b: 23, c: 3]>
iex> AVLTree.get_lower(tree, {:b, nil})
{:b, 21}
```
"""
@spec get_lower(t(), value(), term()) :: value() | term()
def get_lower(%__MODULE__{root: root, less: less}, value, default \\ nil) do
Node.get_lower(root, value, default, less)
end
@doc """
Retrieves an element equal to `value`.
If the tree contains more than one element equal to `value`, retrieves the last of them
Returns `default` if nothing is found.
```
iex> tree = [b: 21, a: 1, b: 22, c: 3, b: 23] |> Enum.into(AVLTree.new(fn {a, _}, {b, _} -> a < b end))
#AVLTree<[a: 1, b: 21, b: 22, b: 23, c: 3]>
iex> AVLTree.get_upper(tree, {:b, nil})
{:b, 23}
```
"""
@spec get_upper(t(), value(), term()) :: value() | term()
def get_upper(%__MODULE__{root: root, less: less}, value, default \\ nil),
do: Node.get_upper(root, value, default, less)
@doc """
Checks if the tree contains an element equal to `value`.
```
iex> tree = [3, 2, 4, 6] |> Enum.into(AVLTree.new())
#AVLTree<[2, 3, 4, 6]>
iex> AVLTree.member?(tree, 4)
true
iex> AVLTree.member?(tree, 1)
false
```
"""
@spec member?(t(), term()) :: boolean()
def member?(%__MODULE__{root: root, less: less}, value), do: Node.member?(root, value, less)
@doc """
Puts the given `value` in the tree.
If the tree already contains elements equal to `value`, replaces one of them. It is undefined which one.
```
iex> tree = [b: 2, a: 1, c: 3] |> Enum.into(AVLTree.new(fn {a, _}, {b, _} -> a < b end))
#AVLTree<[a: 1, b: 2, c: 3]>
iex> AVLTree.put(tree, {:d, 4})
#AVLTree<[a: 1, b: 2, c: 3, d: 4]>
iex> AVLTree.put(tree, {:a, 11})
#AVLTree<[a: 11, b: 2, c: 3]>
```
"""
@spec put(t(), value()) :: t()
def put(%__MODULE__{root: root, size: size, less: less} = avl_tree, value) do
case Node.put(root, value, less) do
{:update, root} -> %{avl_tree | root: root}
root -> %{avl_tree | root: root, size: size + 1}
end
end
@doc """
Puts the given `value` in the tree.
If the tree already contains elements equal to `value`, inserts `value` before them.
```
iex> tree = [b: 21, a: 11, d: 41, c: 31] |> Enum.into(AVLTree.new(fn {a, _}, {b, _} -> a < b end))
#AVLTree<[a: 11, b: 21, c: 31, d: 41]>
iex> tree = AVLTree.put_lower(tree, {:a, 12})
#AVLTree<[a: 12, a: 11, b: 21, c: 31, d: 41]>
iex> tree = AVLTree.put_lower(tree, {:b, 22})
#AVLTree<[a: 12, a: 11, b: 22, b: 21, c: 31, d: 41]>
iex> AVLTree.put_lower(tree, {:d, 42})
#AVLTree<[a: 12, a: 11, b: 22, b: 21, c: 31, d: 42, d: 41]>
```
"""
@spec put_lower(t(), value()) :: t()
def put_lower(%__MODULE__{root: root, size: size, less: less} = avl_tree, value) do
%{avl_tree | root: Node.put_lower(root, value, less), size: size + 1}
end
@doc """
Puts the given `value` in the tree.
If the tree already contains elements equal to `value`, inserts `value` after them.
```
iex> tree = [b: 21, a: 11, d: 41, c: 31] |> Enum.into(AVLTree.new(fn {a, _}, {b, _} -> a < b end))
#AVLTree<[a: 11, b: 21, c: 31, d: 41]>
iex> tree = AVLTree.put_upper(tree, {:a, 12})
#AVLTree<[a: 11, a: 12, b: 21, c: 31, d: 41]>
iex> tree = AVLTree.put_upper(tree, {:b, 22})
#AVLTree<[a: 11, a: 12, b: 21, b: 22, c: 31, d: 41]>
iex> AVLTree.put_upper(tree, {:d, 42})
#AVLTree<[a: 11, a: 12, b: 21, b: 22, c: 31, d: 41, d: 42]>
```
`Enum.into/2` uses `put_upper/2`:
```
iex> [a: 11, c: 31, a: 12, b: 21, a: 13] |> Enum.into(AVLTree.new(fn {a, _}, {b, _} -> a < b end)) |> Enum.to_list()
[a: 11, a: 12, a: 13, b: 21, c: 31]
```
"""
@spec put_upper(t(), value()) :: t()
def put_upper(%__MODULE__{root: root, size: size, less: less} = avl_tree, value) do
%{avl_tree | root: Node.put_upper(root, value, less), size: size + 1}
end
@doc """
Deletes an element equal to the given `value`.
If the tree contains more than one element equal to `value`, deletes one of them. It is undefined which one.
If no element is found, returns the tree unchanged.
```
iex> tree = [3, 2, 1, 4] |> Enum.into(AVLTree.new())
#AVLTree<[1, 2, 3, 4]>
iex> AVLTree.delete(tree, 3)
#AVLTree<[1, 2, 4]>
iex> AVLTree.delete(tree, 5)
#AVLTree<[1, 2, 3, 4]>
```
"""
@spec delete(t(), value()) :: t()
def delete(%__MODULE__{root: root, size: size, less: less} = avl_tree, value) do
case Node.delete(root, value, less) do
{true, a} -> %{avl_tree | root: a, size: size - 1}
{false, _} -> avl_tree
end
end
@doc """
Deletes an element equal to the given `value`.
If the tree contains more than one element equal to `value`, deletes the first of them.
If no element is found, returns the tree unchanged.
```
iex> tree = [b: 21, a: 1, b: 22, c: 3, b: 23] |> Enum.into(AVLTree.new(fn {a, _}, {b, _} -> a < b end))
#AVLTree<[a: 1, b: 21, b: 22, b: 23, c: 3]>
iex> AVLTree.delete_lower(tree, {:b, nil})
#AVLTree<[a: 1, b: 22, b: 23, c: 3]>
```
"""
@spec delete_lower(t(), value()) :: {:ok, t()} | :error
def delete_lower(%__MODULE__{root: root, size: size, less: less} = avl_tree, value) do
case Node.delete_lower(root, value, less) do
{true, a} -> %{avl_tree | root: a, size: size - 1}
{false, _} -> avl_tree
end
end
@doc """
Deletes an element equal to the given `value`.
If the tree contains more than one element equal to `value`, deletes the last of them.
If no element is found, returns the tree unchanged.
```
iex> tree = [b: 21, a: 1, b: 22, c: 3, b: 23] |> Enum.into(AVLTree.new(fn {a, _}, {b, _} -> a < b end))
#AVLTree<[a: 1, b: 21, b: 22, b: 23, c: 3]>
iex> AVLTree.delete_upper(tree, {:b, nil})
#AVLTree<[a: 1, b: 21, b: 22, c: 3]>
```
"""
@spec delete_upper(t(), value()) :: {:ok, t()} | :error
def delete_upper(%__MODULE__{root: root, size: size, less: less} = avl_tree, value) do
case Node.delete_upper(root, value, less) do
{true, a} -> %{avl_tree | root: a, size: size - 1}
{false, _} -> avl_tree
end
end
@doc """
Displays the tree in human readable form.
```
iex> tree = 1..10 |> Enum.into(AVLTree.new())
#AVLTree<[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]>
iex> IO.puts AVLTree.view(tree)
```
```shell
4
┌─┴───┐
2 8
┌┴┐ ┌─┴─┐
1 3 6 9
┌┴┐ ┌┴─┐
5 7 10
```
"""
@spec view(t()) :: String.t()
def view(%__MODULE__{root: root}) do
Node.view(root)
end
defimpl Enumerable do
import AVLTree.Node, only: [iter_lower: 1, next: 1, value: 1]
def reduce(%AVLTree{root: root}, {:cont, acc}, fun) do
iter_lower(root) |> next() |> reduce({:cont, acc}, fun)
end
def reduce(iter, {state, acc}, fun) do
case state do
:halt ->
{:halted, acc}
:suspend ->
{:suspended, acc, &reduce(iter, &1, fun)}
:cont ->
case iter do
:none -> {:done, acc}
{e, iter} -> reduce(next(iter), fun.(value(e), acc), fun)
end
end
end
def member?(%AVLTree{} = tree, value) do
{:ok, AVLTree.member?(tree, value)}
end
def count(%AVLTree{size: size}) do
{:ok, size}
end
def slice(_) do
{:error, __MODULE__}
end
end
defimpl Collectable do
def into(original) do
{
original,
fn
tree, {:cont, value} -> AVLTree.put_upper(tree, value)
tree, :done -> tree
_, :halt -> :ok
end
}
end
end
@opaque t() :: %__MODULE__{}
@type value() :: term()
@type less() :: (value(), value() -> boolean())
end
defimpl Inspect, for: AVLTree do
def inspect(%AVLTree{} = tree, opts) do
cnt = tree |> Enum.take(opts.limit + 1) |> Enum.to_list() |> inspect
"#AVLTree<#{cnt}>"
end
end
|
lib/avl_tree/tree.ex
| 0.944944
| 0.937268
|
tree.ex
|
starcoder
|
defmodule ExDgraph.Mutation do
@moduledoc """
Provides the functions for the callbacks from the DBConnection behaviour.
"""
alias ExDgraph.{Exception, MutationStatement, Transform}
@doc false
def mutation(conn, statement) do
case mutation_commit(conn, statement) do
{:error, f} -> {:error, code: f.code, message: f.message}
r -> {:ok, r}
end
end
@doc false
def mutation!(conn, statement) do
case mutation(conn, statement) do
{:ok, r} ->
r
{:error, code: code, message: message} ->
raise Exception, code: code, message: message
end
end
@doc false
def set_map(conn, map) do
map_with_tmp_uids = insert_tmp_uids(map)
json = Poison.encode!(map_with_tmp_uids)
case set_map_commit(conn, json, map_with_tmp_uids) do
{:error, f} -> {:error, code: f.code, message: f.message}
r -> {:ok, r}
end
end
@doc false
def set_map!(conn, map) do
case set_map(conn, map) do
{:ok, r} ->
r
{:error, code: code, message: message} ->
raise Exception, code: code, message: message
end
end
@doc false
def set_struct(conn, struct) do
uids_and_schema_map = set_tmp_ids_and_schema(struct)
json = Poison.encode!(uids_and_schema_map)
case set_struct_commit(conn, json, uids_and_schema_map) do
{:error, f} -> {:error, code: f.code, message: f.message}
r -> {:ok, r}
end
end
@doc false
def set_struct!(conn, struct) do
case set_struct(conn, struct) do
{:ok, r} ->
r
{:error, code: code, message: message} ->
raise Exception, code: code, message: message
end
end
defp mutation_commit(conn, statement) do
exec = fn conn ->
q = %MutationStatement{statement: statement}
case DBConnection.execute(conn, q, %{}) do
{:ok, resp} -> Transform.transform_mutation(resp)
other -> other
end
end
# Response.transform(DBConnection.run(conn, exec, run_opts()))
DBConnection.run(conn, exec, run_opts())
end
defp set_map_commit(conn, json, map_with_tmp_uids) do
exec = fn conn ->
q = %MutationStatement{set_json: json}
case DBConnection.execute(conn, q, %{}) do
{:ok, resp} ->
parsed_response = Transform.transform_mutation(resp)
# Now exchange the tmp ids for the ones returned from the db
result_with_uids = replace_tmp_uids(map_with_tmp_uids, parsed_response.uids)
Map.put(parsed_response, :result, result_with_uids)
other ->
other
end
end
# Response.transform(DBConnection.run(conn, exec, run_opts()))
DBConnection.run(conn, exec, run_opts())
end
defp set_struct_commit(conn, json, struct_with_tmp_uids) do
exec = fn conn ->
q = %MutationStatement{set_json: json}
case DBConnection.execute(conn, q, %{}) do
{:ok, resp} ->
parsed_response = Transform.transform_mutation(resp)
# Now exchange the tmp ids for the ones returned from the db
result_with_uids = replace_tmp_struct_uids(struct_with_tmp_uids, parsed_response.uids)
Map.put(parsed_response, :result, result_with_uids)
other ->
other
end
end
DBConnection.run(conn, exec, run_opts())
end
defp insert_tmp_uids(map) when is_list(map), do: Enum.map(map, &insert_tmp_uids/1)
defp insert_tmp_uids(map) when is_map(map) do
map
|> Map.update(:uid, "_:#{UUID.uuid4()}", fn existing_uuid -> existing_uuid end)
|> Enum.reduce(%{}, fn {key, map_value}, a ->
Map.merge(a, %{key => insert_tmp_uids(map_value)})
end)
end
defp insert_tmp_uids(value), do: value
defp set_tmp_ids_and_schema(map) when is_list(map), do: Enum.map(map, &set_tmp_ids_and_schema/1)
defp set_tmp_ids_and_schema(%x{} = map) do
schema = x |> get_schema_name()
map
|> Map.from_struct()
|> Map.update(:uid, "_:#{UUID.uuid4()}", fn
nil -> "_:#{UUID.uuid4()}"
existing_uuid -> existing_uuid
end)
|> Enum.reduce(%{}, fn {key, map_value}, a ->
set_schema(schema, {key, map_value}, a, ExDgraph.config(:enforce_struct_schema))
end)
end
defp set_tmp_ids_and_schema(map) when is_map(map) do
map
|> Map.update(:uid, "_:#{UUID.uuid4()}", fn existing_uuid -> existing_uuid end)
|> Enum.reduce(%{}, fn {key, map_value}, a ->
Map.merge(a, %{key => set_tmp_ids_and_schema(map_value)})
end)
end
defp set_tmp_ids_and_schema(value), do: value
defp replace_tmp_uids(map, uids) when is_list(map),
do: Enum.map(map, &replace_tmp_uids(&1, uids))
defp replace_tmp_uids(map, uids) when is_map(map) do
map
|> Map.update(:uid, map[:uid], fn existing_uuid ->
case String.slice(existing_uuid, 0, 2) == "_:" do
true -> uids[String.replace_leading(existing_uuid, "_:", "")]
false -> existing_uuid
end
end)
|> Enum.reduce(%{}, fn {key, map_value}, a ->
Map.merge(a, %{key => replace_tmp_uids(map_value, uids)})
end)
end
defp replace_tmp_uids(value, _uids), do: value
defp replace_tmp_struct_uids(map, uids) when is_list(map),
do: Enum.map(map, &replace_tmp_struct_uids(&1, uids))
defp replace_tmp_struct_uids(map, uids) when is_map(map) do
map
|> Map.update(:uid, map[:uid], fn existing_uuid ->
case String.slice(existing_uuid, 0, 2) == "_:" do
true -> uids[String.replace_leading(existing_uuid, "_:", "")]
false -> existing_uuid
end
end)
|> Enum.reduce(%{}, fn {key, map_value}, a ->
# delete the schema prefix
key = key |> to_string() |> String.split(".") |> List.last() |> String.to_existing_atom()
Map.merge(a, %{key => replace_tmp_struct_uids(map_value, uids)})
end)
end
defp replace_tmp_struct_uids(value, _uids), do: value
defp get_schema_name(schema) do
schema |> to_string() |> String.split(".") |> List.last() |> String.downcase()
end
defp set_schema(_schema_name, {:uid, map_value}, result, _is_enforced_schema),
do: Map.merge(result, %{:uid => set_tmp_ids_and_schema(map_value)})
defp set_schema(schema_name, {key, map_value}, result, is_enforced_schema)
when is_enforced_schema == true,
do: Map.merge(result, %{"#{schema_name}.#{key}" => set_tmp_ids_and_schema(map_value)})
defp set_schema(_schema_name, {key, map_value}, result, _is_enforced_schema),
do: Map.merge(result, %{key => set_tmp_ids_and_schema(map_value)})
defp run_opts do
[pool: ExDgraph.config(:pool)]
end
end
|
lib/exdgraph/mutation.ex
| 0.525612
| 0.479077
|
mutation.ex
|
starcoder
|
defmodule Crux.Structs.Channel do
@moduledoc """
Represents a Discord [Channel Object](https://discordapp.com/developers/docs/resources/channel#channel-object-channel-structure).
List of where every property can be present:
| Property | Text (0) | DM (1) | Voice (2) | Group (3) | Category (4) | News (5) |
| :-------------------: | :------: | :------------------: | :-------: | :-------: | :----------: | :------: |
| application_id | no | no | no | yes | no | no |
| bitrate | no | no | yes | no | no | no |
| guild_id | yes | no | yes | no | yes | yes |
| icon | no | no | no | yes | no | no |
| id | yes | yes | yes | yes | yes | yes |
| last_message_id | yes | yes | no | yes | no | yes |
| last_pin_timestamp | yes | yes | no | yes | no | yes |
| name | yes | no | yes | yes | yes | yes |
| nsfw | yes | no | no | no | no | yes |
| owner_id | no | no | no | yes | no | no |
| parent_id | yes | no | yes | no | no | yes |
| permission_overwrites | yes | no | yes | no | yes | yes |
| position | yes | no | yes | no | yes | yes |
| rate_limit_per_user | yes | no | no | no | no | no |
| recipients | no | yes<br>(One Element) | no | yes | no | no |
| topic | yes | no | yes | no | yes | yes |
| type | `0` | `1` | `2` | `3` | `4` | `5` |
| user_limit | no | no | yes | no | no | no |
Differences opposed to the Discord API Object:
- `:recipients` is a MapSet of user ids
"""
@behaviour Crux.Structs
alias Crux.Structs
alias Crux.Structs.{Channel, Message, Overwrite, Snowflake, Util}
require Util
Util.modulesince("0.1.0")
defstruct(
application_id: nil,
bitrate: nil,
guild_id: nil,
icon: nil,
id: nil,
last_message_id: nil,
last_pin_timestamp: nil,
name: nil,
nsfw: nil,
owner_id: nil,
parent_id: nil,
permission_overwrites: %{},
position: nil,
rate_limit_per_user: nil,
recipients: %MapSet{},
topic: nil,
type: nil,
user_limit: nil
)
Util.typesince("0.1.0")
@type t :: %__MODULE__{
application_id: Snowflake.t(),
bitrate: integer(),
guild_id: Snowflake.t(),
icon: String.t(),
id: Snowflake.t(),
last_message_id: Snowflake.t(),
last_pin_timestamp: String.t(),
name: String.t(),
nsfw: boolean(),
owner_id: Snowflake.t(),
parent_id: Snowflake.t(),
permission_overwrites: %{optional(Snowflake.t()) => Overwrite.t()},
position: integer(),
rate_limit_per_user: integer(),
recipients: MapSet.t(Snowflake.t()),
topic: String.t(),
type: integer(),
user_limit: non_neg_integer()
}
@typedoc """
All available types that can be resolved into a channel id.
"""
Util.typesince("0.2.1")
@type id_resolvable() :: Message.t() | Channel.t() | Snowflake.t() | String.t()
@doc """
Resolves the id of a `t:Crux.Structs.Channel.t/0`.
> Automatically invoked by `Crux.Structs.resolve_id/2`.
```elixir
iex> %Crux.Structs.Message{channel_id: 222079895583457280}
...> |> Crux.Structs.Channel.resolve_id()
222079895583457280
iex> %Crux.Structs.Channel{id: 222079895583457280}
...> |> Crux.Structs.Channel.resolve_id()
222079895583457280
iex> 222079895583457280
...> |> Crux.Structs.Channel.resolve_id()
222079895583457280
iex> "222079895583457280"
...> |> Crux.Structs.Channel.resolve_id()
222079895583457280
```
"""
@spec resolve_id(id_resolvable()) :: Snowflake.t() | nil
Util.since("0.2.1")
def resolve_id(%Message{channel_id: channel_id}) do
resolve_id(channel_id)
end
def resolve_id(%Channel{id: id}) do
resolve_id(id)
end
def resolve_id(resolvable), do: Structs.resolve_id(resolvable)
@typedoc """
All available types that can be resolved into a channel position.
"""
Util.typesince("0.2.1")
@type position_resolvable() ::
Channel.t()
| %{channel: id_resolvable(), position: integer()}
| {id_resolvable(), integer()}
| %{id: id_resolvable(), position: integer()}
@doc """
Resolves a `t:position_resolvable/0` into a channel position.
## Examples
```elixir
iex> %Crux.Structs.Channel{id: 222079895583457280, position: 5}
...> |> Crux.Structs.Channel.resolve_position()
%{id: 222079895583457280, position: 5}
iex> {%Crux.Structs.Channel{id: 222079895583457280}, 5}
...> |> Crux.Structs.Channel.resolve_position()
%{id: 222079895583457280, position: 5}
iex> {222079895583457280, 5}
...> |> Crux.Structs.Channel.resolve_position()
%{id: 222079895583457280, position: 5}
iex> %{id: 222079895583457280, position: 5}
...> |> Crux.Structs.Channel.resolve_position()
%{id: 222079895583457280, position: 5}
iex> {nil, 5}
...> |> Crux.Structs.Channel.resolve_position()
nil
```
"""
Util.since("0.2.1")
@spec resolve_position(position_resolvable()) :: %{id: Snowflake.t(), position: integer()} | nil
def resolve_position(resolvable)
def resolve_position(%Channel{id: id, position: position}) do
validate_position(%{id: id, position: position})
end
def resolve_position(%{channel: resolvable, position: position}) do
validate_position(%{id: resolve_id(resolvable), position: position})
end
def resolve_position(%{id: resolvable, position: position}) do
validate_position(%{id: resolve_id(resolvable), position: position})
end
def resolve_position({resolvable, position}) do
validate_position(%{id: resolve_id(resolvable), position: position})
end
@spec validate_position(%{id: Snowflake.t(), position: integer()}) :: %{
id: Snowflake.t(),
position: integer()
}
@spec validate_position(%{id: nil, position: integer()}) :: nil
defp validate_position(%{id: nil, position: _}), do: nil
defp validate_position(%{id: _id, position: position} = entry)
when is_integer(position) do
entry
end
@doc """
Creates a `t:Crux.Structs.Channel.t/0` struct from raw data.
> Automatically invoked by `Crux.Structs.create/2`
"""
@spec create(data :: map()) :: t()
Util.since("0.1.0")
def create(data) do
channel =
data
|> Util.atomify()
|> Map.update!(:id, &Snowflake.to_snowflake/1)
|> Map.update(:guild_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:owner_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:last_message_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:appliparent_idcation_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:parent_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:permission_overwrites, %{}, &Util.raw_data_to_map(&1, Overwrite))
|> Map.update(:recipients, %MapSet{}, &MapSet.new(&1, Util.map_to_id()))
struct(__MODULE__, channel)
end
@doc ~S"""
Converts a `t:Crux.Structs.Channel.t/0` into its discord mention format.
## Example
```elixir
iex> %Crux.Structs.Channel{id: 316880197314019329}
...> |> Crux.Structs.Channel.to_mention()
"<#316880197314019329>"
```
"""
@spec to_mention(user :: Crux.Structs.Channel.t()) :: String.t()
Util.since("0.1.1")
def to_mention(%__MODULE__{id: id}), do: "<##{id}>"
defimpl String.Chars, for: Crux.Structs.Channel do
alias Crux.Structs.Channel
@spec to_string(Channel.t()) :: String.t()
def to_string(%Channel{} = data), do: Channel.to_mention(data)
end
end
|
lib/structs/channel.ex
| 0.880283
| 0.755862
|
channel.ex
|
starcoder
|
defmodule TypedStructLens do
@moduledoc """
TypedStructLens is a [TypedStruct](https://github.com/ejpcmac/typed_struct)
plugin for defining a [Lens](https://github.com/obrok/lens) on each field
without writing boilerplate code.
## Rationale
If you define your structs with TypedStruct and use Lens alongside, you may
end up defining lenses for your fields:
defmodule Person do
use TypedStruct
import Lens.Macros
typedstruct do
field :name, String.t(), enforce: true
field :age, non_neg_integer()
field :happy?, boolean(), default: true
field :phone, String.t()
end
deflens name, do: Lens.key(:name)
deflens age, do: Lens.key(:age)
deflens happy?, do: Lens.key(:happy?)
deflens phone, do: Lens.key(:phone)
end
But if you are using TypedStruct, it is also highly probable that you do not
like to write boilerplate code. TypedStructLens is here to write the `deflens`
for you:
defmodule Person do
use TypedStruct
typedstruct do
plugin TypedStructLens
field :name, String.t(), enforce: true
field :age, non_neg_integer()
field :happy?, boolean(), default: true
field :phone, String.t()
end
end
## Usage
### Setup
To use this plugin in your project, add this to your Mix dependencies:
{:typed_struct_lens, "~> #{Mix.Project.config()[:version]}"}
If you do not plan to compile modules using this TypedStruct plugin at
runtime, you can add `runtime: false` to the dependency tuple as it is only
used during compilation.
### General usage
To use this plugin in a typed struct, simply register it in the `typedstruct`
block:
defmodule MyStruct do
use TypedStruct
typedstruct do
# Just add this line to your struct.
plugin TypedStructLens
field :a_field, String.t()
field :other_field, atom()
end
@spec change(t()) :: t()
def change(data) do
# a_field/0 is generated by TypedStructLens.
lens = a_field()
put_in(data, [lens], "Changed")
end
end
### Options
You can generate private lenses:
defmodule MyStruct do
use TypedStruct
typedstruct do
# Define private lenses instead.
plugin TypedStructLens, lens: :private
field :a_field, String.t()
# You can still make it public for a given field.
field :other_field, atom(), lens: :public
end
end
Conversely, you can make only a given lens private:
defmodule MyStruct do
use TypedStruct
typedstruct do
# By default lenses are public.
plugin TypedStructLens
field :a_field, String.t()
# You can still make it private for a given field.
field :other_field, atom(), lens: :private
end
end
To avoid naming clashes, you can also prefix or postfix the generated function
names:
defmodule MyStruct do
use TypedStruct
typedstruct do
# Configure a prefix and postfix.
plugin TypedStructLens, prefix: :demo_, postfix: :_lens
field :a_field, String.t()
field :other_field, atom()
end
@spec change(t()) :: t()
def change(data) do
# demo_a_field_lens/0 is generated by TypedStructLens instead of
# a_field/0.
lens = demo_a_field_lens()
put_in(data, [lens], "Changed")
end
end
"""
use TypedStruct.Plugin
@impl true
@spec field(atom(), any(), keyword()) :: Macro.t()
def field(name, _type, opts) do
prefix = opts[:prefix]
postfix = opts[:postfix]
function_name = :"#{prefix}#{name}#{postfix}"
quote do
import Lens.Macros
if unquote(opts[:lens] == :private) do
deflensp unquote({function_name, [], []}), do: Lens.key(unquote(name))
else
deflens unquote({function_name, [], []}), do: Lens.key(unquote(name))
end
end
end
end
|
lib/typed_struct_lens.ex
| 0.793666
| 0.483161
|
typed_struct_lens.ex
|
starcoder
|
defmodule Blogit.Components.Supervisor do
@moduledoc """
Represents a `Supervisor`, which supervises the components worker
processes.
By default this Supervisor starts with no children.
`Blogit.Supervisor` starts a supervisor process implementing this module first
and after it a `Blogit.Server` worker process. When the `Blogit.Server` becomes
active and loads all of the source repository data into itself, it creates
the component specifications and adds them to the
`Blogit.Components.Supervisor` process.
This supervisor uses `one_for_one` strategy for its workers as they are
not dependent on each other. They are dependent on the `Blogit.Server`
process, so if it dies, this supervisor is restated and its child processes
are added by the newly restarted `Blogit.Server` process.
Every type of component will have a process for every language configured
for `Blogit` with its unique id. For example if `Blogit` is configured to
support `bg` and `en`, the `Blogit.Components.Posts` module will have two
worker processes, one with id `posts_bg` and one with `posts_en`. Every
language configured has its own set of data and processes, which are isolated
from the data and the processes of the other languages.
"""
use Supervisor
@doc """
Starts the `Blogit.Components.Supervisor` process.
The strategy of the `Blogit.Components.Supervisor` is `one_for_one` and it
starts with no children specifications. The specifications of the components
are added to it by the `Blogit.Server` worker process once it can accept
messages and has the data needed by the component processes as its state.
## Examples
iex> {:ok, pid} = Blogit.Components.Supervisor.start_link()
iex> is_pid(pid)
true
iex> {:ok, pid} = Blogit.Components.Supervisor.start_link()
iex> Process.alive?(pid)
true
iex> {:ok, pid} = Blogit.Components.Supervisor.start_link()
iex> Supervisor.count_children(pid)
%{active: 0, specs: 0, supervisors: 0, workers: 0}
iex> {:ok, pid} = Blogit.Components.Supervisor.start_link()
iex> elem(:sys.get_state(pid), 2) # strategy
:one_for_one
"""
@spec start_link() :: Supervisor.on_start()
def start_link do
Supervisor.start_link(__MODULE__, nil, name: __MODULE__)
end
def init(_), do: supervise([], strategy: :one_for_one)
end
|
lib/blogit/components/supervisor.ex
| 0.71889
| 0.596903
|
supervisor.ex
|
starcoder
|
defmodule StepFlow.Step.Live do
@moduledoc """
The Live step context.
"""
alias StepFlow.Amqp.CommonEmitter
alias StepFlow.Jobs
alias StepFlow.Jobs.Status
alias StepFlow.LiveWorkers
alias StepFlow.Repo
alias StepFlow.Step.Launch
alias StepFlow.Step.LaunchParams
def create_job_live([source_path | _source_paths], launch_params) do
message = generate_message_live(source_path, launch_params)
message =
Map.put(
message,
:type,
"create"
)
message = filter_message(message)
params =
StepFlow.Map.get_by_key_or_atom(message, :parameters, []) ++
[%{id: "action", type: "string", value: "create"}]
message = StepFlow.Map.replace_by_atom(message, :parameters, params)
case CommonEmitter.publish_json(
"job_worker_manager",
LaunchParams.get_step_id(launch_params),
message
) do
:ok -> {:ok, "started"}
_ -> {:error, "unable to publish message"}
end
end
def update_job_live(job_id) do
job = Repo.preload(Jobs.get_job(job_id), [:status, :updates, :workflow])
workflow_jobs = Repo.preload(job.workflow, [:jobs]).jobs
steps = job.workflow.steps
start_next_job_live(workflow_jobs, steps)
end
defp start_next_job_live([], _step_id), do: {:ok, "nothing to do"}
defp start_next_job_live([job | jobs], steps) do
job = Repo.preload(Jobs.get_job(job.id), [:status])
if job.status != [] do
case Status.get_last_status(job.status).state do
:ready_to_init -> update_live_worker(steps, job, "initializing")
:ready_to_start -> update_live_worker(steps, job, "starting")
:update -> update_live_worker(steps, job, "updating")
:stopped -> delete_live_worker(steps, job)
_ -> {:ok, "nothing to do"}
end
end
start_next_job_live(jobs, steps)
end
def stop_job(job) do
Jobs.get_message(job)
|> Map.put(:type, "stop_process")
|> publish_message(job.step_id)
end
defp update_live_worker(steps, job, status) do
case generate_message(steps, job) do
{:ok, message} ->
{:ok, _} = Status.set_job_status(job.id, status)
publish_message(message, job.step_id)
_ ->
{:ok, "nothing to do"}
end
end
defp delete_live_worker(steps, job) do
{_, message} = generate_message(steps, job)
message = filter_message(message)
params =
StepFlow.Map.get_by_key_or_atom(message, :parameters, []) ++
[%{id: "action", type: "string", value: "delete"}]
message = StepFlow.Map.replace_by_atom(message, :parameters, params)
case CommonEmitter.publish_json(
"job_worker_manager",
job.step_id,
message
) do
:ok -> {:ok, "deleted"}
_ -> {:error, "unable to publish message"}
end
end
def generate_message(steps, job) do
message = Jobs.get_message(job)
requirements = get_requirements(steps, job.step_id)
{result, message} =
if requirements != nil do
replace_ip_address(message, job.id, requirements)
else
live_worker = LiveWorkers.get_by(%{"job_id" => job.id})
if live_worker.creation_date == nil || live_worker.instance_id == "" do
{:error, message}
else
{:ok, message}
end
end
action =
job.status
|> Status.get_last_status()
|> Status.get_action()
{result, Map.put(message, :type, action)}
end
defp replace_ip_address(message, job_id, requirements) do
job = Repo.preload(Jobs.get_job(job_id), [:status, :updates, :workflow])
workflow = Repo.preload(job.workflow, [:jobs])
job_req =
Jobs.list_jobs(%{
"workflow_id" => workflow.id,
"step_id" => requirements |> List.first()
})
|> Map.get(:data)
|> List.first()
live_worker = LiveWorkers.get_by(%{"job_id" => job_req.id})
ips = live_worker.ips
port = live_worker.ports |> List.last()
created = live_worker.creation_date
if created != nil && ips != [] do
ip = ips |> List.first()
params =
StepFlow.Map.get_by_key_or_atom(message, :parameters, [])
|> Enum.map(fn param ->
case StepFlow.Map.get_by_key_or_atom(param, :id) do
"source_paths" ->
value = ["srt://#{ip}:#{port}"]
StepFlow.Map.replace_by_string(param, "value", value)
"source_path" ->
value = "srt://#{ip}:#{port}"
StepFlow.Map.replace_by_string(param, "value", value)
_ ->
param
end
end)
Jobs.update_job(job, %{parameters: params})
{:ok, StepFlow.Map.replace_by_atom(message, :parameters, params)}
else
{:error, message}
end
end
defp filter_message(message) do
Map.put(
message,
:parameters,
Enum.filter(message.parameters, fn x ->
Enum.member?(
["step_id", "namespace", "worker", "ports", "direct_messaging_queue_name"],
StepFlow.Map.get_by_key_or_atom(x, :id)
)
end)
)
end
defp publish_message(message, step_id) do
case CommonEmitter.publish_json(
"direct_messaging_" <> get_direct_messaging_queue(message),
step_id,
message,
"direct_messaging",
headers: [{"instance_id", :longstr, get_instance_id(message)}]
) do
:ok ->
{:ok, "started"}
_ ->
{:error, "unable to publish message"}
end
end
defp get_direct_messaging_queue(message) do
StepFlow.Map.get_by_key_or_atom(message, :parameters)
|> Enum.filter(fn param ->
StepFlow.Map.get_by_key_or_atom(param, :id) == "direct_messaging_queue_name"
end)
|> List.first()
|> StepFlow.Map.get_by_key_or_atom(:value)
end
defp get_instance_id(message) do
job_id = StepFlow.Map.get_by_key_or_atom(message, :job_id)
LiveWorkers.get_by(%{"job_id" => job_id}).instance_id |> String.slice(0..11)
end
defp get_requirements(steps, step_id) do
case steps do
[] ->
nil
_ ->
Enum.filter(steps, fn step ->
StepFlow.Map.get_by_key_or_atom(step, :id) == step_id
end)
|> List.first()
|> StepFlow.Map.get_by_key_or_atom(:parent_ids)
end
end
def generate_message_live(
source_path,
launch_params
) do
parameters =
Launch.generate_job_parameters_one_for_one(
source_path,
launch_params
)
job_params = %{
name: LaunchParams.get_step_name(launch_params),
step_id: LaunchParams.get_step_id(launch_params),
is_live: true,
workflow_id: launch_params.workflow.id,
parameters: parameters
}
{:ok, job} = Jobs.create_job(job_params)
Jobs.get_message(job)
end
end
|
lib/step_flow/step/live.ex
| 0.521715
| 0.534916
|
live.ex
|
starcoder
|
defmodule Infusionsoft.Schemas.XML.Contact do
@moduledoc false
# Functions to translate between common names and XML api names for Contact.
alias Infusionsoft.Caches.ContactCustomFields
@common_names [
"Shipping Address Street 1",
"Shipping Address Street 2",
"Other Address Street 1",
"Other Address Street 2",
"Anniversary",
"Birthday",
"Billing Address City",
"Shipping Address City",
"Other Address City",
"Person Notes",
"Billing Address Country",
"Shipping Address Country",
"Other Address Country",
"Date Created",
"Email",
"Email 2",
"Email 3",
"Fax 1",
"Fax 1 Type",
"Fax 2",
"Fax 2 Type",
"<NAME>",
"Tags",
"Id",
"Job Title",
"<NAME>",
"Last Updated",
"<NAME>",
"Nickname",
"Phone 1",
"Phone 1 Ext",
"Phone 1 Type",
"Phone 2",
"Phone 2 Ext",
"Phone 2 Type",
"Phone 3",
"Phone 3 Ext",
"Phone 3 Type",
"Phone 4",
"Phone 4 Ext",
"Phone 4 Type",
"Phone 5",
"Phone 5 Ext",
"Phone 5 Type",
"Billing Address Postal Code",
"Shipping Address Postal Code",
"Other Address Postal Code",
"Spouse Name",
"Billing Address State",
"Shipping Address State",
"Other Address State",
"Billing Address Street 1",
"Billing Address Street 2",
"Suffix",
"Time Zone",
"Title",
"Website",
"Billing Address Zip Four",
"Shipping Address Zip Four",
"Other Address Zip Four",
"Person Type",
"Lead Source Id",
"Company"
]
@xml_names [
"Address2Street1",
"Address2Street2",
"Address3Street1",
"Address3Street2",
"Anniversary",
"Birthday",
"City",
"City2",
"City3",
"ContactNotes",
"Country",
"Country2",
"Country3",
"DateCreated",
"Email",
"EmailAddress2",
"EmailAddress3",
"Fax1",
"Fax1Type",
"Fax2",
"Fax2Type",
"FirstName",
"Groups",
"Id",
"JobTitle",
"LastName",
"LastUpdated",
"MiddleName",
"Nickname",
"Phone1",
"Phone1Ext",
"Phone1Type",
"Phone2",
"Phone2Ext",
"Phone2Type",
"Phone3",
"Phone3Ext",
"Phone3Type",
"Phone4",
"Phone4Ext",
"Phone4Type",
"Phone5",
"Phone5Ext",
"Phone5Type",
"PostalCode",
"PostalCode2",
"PostalCode3",
"SpouseName",
"State",
"State2",
"State3",
"StreetAddress1",
"StreetAddress2",
"Suffix",
"TimeZone",
"Title",
"Website",
"ZipFour1",
"ZipFour2",
"ZipFour3",
"ContactType",
"LeadSourceId",
"CompanyId"
]
# @xml_unique [
# "Assistant Name",
# "Assistant Phone",
# "Created By",
# "Language",
# "Last Updated By",
# "Password",
# "Username",
# "Lead Source",
# "Company"
# ]
@common_names_downcase Enum.map(@common_names, &String.downcase/1)
@common_to_xml_downcase Enum.zip(@common_names_downcase, @xml_names) |> Enum.into(%{})
@xml_to_common Enum.zip(@xml_names, @common_names) |> Enum.into(%{})
@doc "Takes a list of Common names and returns XML names or list of errors"
@spec to([String.t()], String.t(), nil | String.t()) :: {:ok, list()} | {:error, String.t()}
def to(names, token, app) when is_list(names) do
results = Enum.map(names, &get_name(&1, @common_to_xml_downcase, token, app, downcase: true))
if !Enum.any?(results, fn {status, _} -> status == :error end) do
{:ok, Enum.map(results, fn {_, name} -> name end)}
else
{:error,
results
|> Enum.filter(fn {status, _} -> status == :error end)
|> Enum.map(fn {_, message} -> message end)
|> Enum.join(", ")}
end
end
@doc "Takes a list of XML names and returns Common names or list of errors"
@spec from([String.t()], String.t(), nil | String.t()) :: {:ok, list()} | {:error, String.t()}
def from(names, token, app) when is_list(names) do
results = Enum.map(names, &get_name(&1, @xml_to_common, token, app))
if !Enum.any?(results, fn {status, _} -> status == :error end) do
{:ok, Enum.map(results, fn {_, name} -> name end)}
else
{:error,
results
|> Enum.filter(fn {status, _} -> status == :error end)
|> Enum.map(fn {_, message} -> message end)
|> Enum.join(", ")}
end
end
defp get_name(name, map, token, app, opts \\ []) do
# If our map has downcase keys we need to transform before accessing the map.
opts = Keyword.merge([downcase: false], opts)
value = if(Keyword.get(opts, :downcase), do: map[String.downcase(name)], else: map[name])
if value do
{:ok, value}
else
if String.first(name) == "_" do
get_custom_field_from(name, token, app)
else
get_custom_field_to(name, token, app)
end
end
end
defp get_custom_field_to(name, token, app) do
with {:ok, field} <- ContactCustomFields.lookup(name, token, app) do
{:ok, "_" <> field["Name"]}
end
end
defp get_custom_field_from(name, token, app) do
with {:ok, field} <- ContactCustomFields.lookup(name, token, app) do
{:ok, field["Label"]}
end
end
end
|
lib/infusionsoft/schemas/xml/contact.ex
| 0.620852
| 0.411584
|
contact.ex
|
starcoder
|
defmodule MarcoPolo.GenericParser do
@moduledoc false
# Provides facilities for parsing binary data with support for incomplete
# data.
# This module provides functions for parsing binary data through given
# *parsers* (which are just functions). What makes this module useful over
# manually parsing these data is its declarativeness (you just list what data
# you expect) as well as its support for incomplete data.
# Incomplete data means data that ends before they can be fully parsed. For
# example, an OrientDB long takes 8 bytes, so if you want to parse a long and
# the binary contains less than 8 bytes than it's incomplete. Incomplete
# responses are not handled in this module (which just returns `:incomplete`
# when a response is incomplete), but on a higher level (the connection
# server, which caches the incomplete data until it receives new data, then
# tries to parse again).
@typedoc """
Type returned by the parsing functions in this module.
"""
@type ok_or_incomplete :: {term, binary} | :incomplete
@typedoc """
A basic parser is just a function that takes a binary and returns `{value,
rest}` or `:incomplete`; a parser can be a basic parser or a more complex
parser usually based on basic ones.
"""
@type parser :: (binary -> ok_or_incomplete)
@doc """
Parses `data` based on the given list of parsers. Returns `:incomplete` when
the data is not enough to satisfy all `parsers`, `{value, rest}` otherwise.
"""
@spec parse(binary, parser | [parser]) :: ok_or_incomplete
def parse(data, parsers)
# You could basically call the parser directly on the data, but using
# parse/1 makes sense because of nested parsing.
def parse(data, parser) when is_binary(data) and is_function(parser, 1) do
parser.(data)
end
def parse(data, parsers) when is_binary(data) and is_list(parsers) do
parse(data, parsers, [])
end
defp parse(data, [parser|t], acc) do
case parser.(data) do
{value, rest} -> parse(rest, t, [value|acc])
:incomplete -> :incomplete
end
end
defp parse(data, [], acc) do
{Enum.reverse(acc), data}
end
@doc """
Returns a parser that parses arrays.
The returned parser will first parse the number of elements in the array from
the given binary using the `nelems_fn` parser; then, it will parse elements
using the `elem_parsers` parsers that number of times.
"""
@spec array_parser(parser, parser | [parser]) :: parser
def array_parser(nelems_fn, elem_parsers) when is_function(nelems_fn, 1) do
fn(data) ->
case nelems_fn.(data) do
:incomplete -> :incomplete
{nelems, rest} -> parse_array(rest, nelems, elem_parsers, [])
end
end
end
defp parse_array(data, 0, _parsers, acc) do
{Enum.reverse(acc), data}
end
defp parse_array(data, nelems, parsers, acc) do
case parse(data, parsers) do
{values, rest} -> parse_array(rest, nelems - 1, parsers, [values|acc])
:incomplete -> :incomplete
end
end
end
|
lib/marco_polo/generic_parser.ex
| 0.780579
| 0.778397
|
generic_parser.ex
|
starcoder
|
defmodule OMG.State.UtxoSet do
@moduledoc """
Handles all the operations done on the UTXOs held in the ledger.
Provides the requested UTXOs by a collection of input pointers.
Trades in transaction effects (new utxos, utxos to delete).
Translates the modifications to itself into DB updates, and is able to interpret the UTXO query result from DB.
Intended to handle any kind UTXO _subsets_ of the entire UTXO set, relying on that the subset of UTXOs is selected
correctly.
"""
alias OMG.Crypto
alias OMG.Output
alias OMG.State.Transaction
alias OMG.Utxo
require Utxo
@type t() :: %{OMG.Utxo.Position.t() => Utxo.t()}
@type query_result_t() :: list({OMG.DB.utxo_pos_db_t(), OMG.Utxo.t()})
@spec init(query_result_t()) :: t()
def init(utxos_query_result) do
utxos_query_result
|> Enum.reject(&(&1 == :not_found))
|> Enum.into(%{}, fn {db_input_pointer, db_utxo} ->
{OMG.Utxo.Position.from_db_key(db_input_pointer), Utxo.from_db_value(db_utxo)}
end)
end
@doc """
Provides the outputs that are pointed by `inputs` provided
"""
@spec get_by_inputs(t(), list(OMG.Utxo.Position.t())) ::
{:ok, list(Output.t())} | {:error, :utxo_not_found}
def get_by_inputs(utxos, inputs) do
with {:ok, utxos_for_inputs} <- get_utxos_by_inputs(utxos, inputs),
do: {:ok, utxos_for_inputs |> Enum.reverse() |> Enum.map(fn %Utxo{output: output} -> output end)}
end
@doc """
Updates itself given a list of spent input pointers and a map of UTXOs created upon a transaction
"""
@spec apply_effects(t(), list(OMG.Utxo.Position.t()), t()) :: t()
def apply_effects(utxos, spent_input_pointers, new_utxos_map) do
utxos |> Map.merge(new_utxos_map) |> Map.drop(spent_input_pointers)
end
@doc """
Returns the DB updates required given a list of spent input pointers and a map of UTXOs created upon a transaction
"""
@spec db_updates(list(OMG.Utxo.Position.t()), t()) ::
list({:put, :utxo, {Utxo.Position.db_t(), Utxo.t()}} | {:delete, :utxo, Utxo.Position.db_t()})
def db_updates(spent_input_pointers, new_utxos_map) do
db_updates_new_utxos = new_utxos_map |> Enum.map(&utxo_to_db_put/1)
db_updates_spent_utxos = spent_input_pointers |> Enum.map(&utxo_to_db_delete/1)
Enum.concat(db_updates_new_utxos, db_updates_spent_utxos)
end
@spec exists?(t(), OMG.Utxo.Position.t()) :: boolean()
def exists?(utxos, input_pointer),
do: Map.has_key?(utxos, input_pointer)
@doc """
Searches the UTXO set for a particular UTXO created with a `txhash` on `oindex` position.
Current implementation is **expensive**
"""
@spec find_matching_utxo(t(), Transaction.tx_hash(), non_neg_integer()) :: {OMG.Utxo.Position.t(), Utxo.t()}
def find_matching_utxo(utxos, requested_txhash, oindex) do
utxos
|> Stream.filter(&utxo_kv_created_by?(&1, requested_txhash))
|> Enum.find(&utxo_kv_has_oindex_equal?(&1, oindex))
end
@doc """
Streams the UTXO key-value pairs found to be owner by a particular address
"""
@spec filter_owned_by(t(), Crypto.address_t()) :: Enumerable.t()
def filter_owned_by(utxos, address) do
Stream.filter(utxos, fn utxo_kv -> utxo_kv_get_owner(utxo_kv) == address end)
end
@doc """
Turns any enumerable of UTXOs (for example an instance of `OMG.State.UtxoSet.t` here) and produces a new enumerable
where the UTXO k-v pairs got zipped with UTXO positions coming from the data confined in the UTXO set
"""
@spec zip_with_positions(t() | Enumerable.t()) :: Enumerable.t()
def zip_with_positions(utxos) do
Stream.map(utxos, fn utxo_kv -> {utxo_kv, utxo_kv_get_position(utxo_kv)} end)
end
defp get_utxos_by_inputs(utxos, inputs) do
Enum.reduce_while(inputs, {:ok, []}, fn input, acc -> get_utxo(utxos, input, acc) end)
end
defp get_utxo(utxos, position, {:ok, acc}) do
case Map.get(utxos, position) do
nil -> {:halt, {:error, :utxo_not_found}}
found -> {:cont, {:ok, [found | acc]}}
end
end
defp utxo_to_db_put({input_pointer, utxo}),
do: {:put, :utxo, {Utxo.Position.to_input_db_key(input_pointer), Utxo.to_db_value(utxo)}}
defp utxo_to_db_delete(input_pointer),
do: {:delete, :utxo, Utxo.Position.to_input_db_key(input_pointer)}
# based on some key-value pair representing {input_pointer, utxo}, get its position from somewhere
defp utxo_kv_get_position(utxo_kv)
defp utxo_kv_get_position({Utxo.position(_, _, _) = utxo_pos, _utxo}), do: utxo_pos
defp utxo_kv_get_position({_non_utxo_pos_input_pointer, %{utxo_pos: Utxo.position(_, _, _) = utxo_pos}}), do: utxo_pos
# based on some key-value pair representing {input_pointer, utxo}, get its owner
defp utxo_kv_get_owner(utxo_kv)
defp utxo_kv_get_owner({_input_pointer, %Utxo{output: %{owner: owner}}}), do: owner
defp utxo_kv_get_owner({%{owner: owner}, _output_without_owner_specified}), do: owner
defp utxo_kv_created_by?({_input_pointer, %Utxo{creating_txhash: requested_txhash}}, requested_txhash), do: true
defp utxo_kv_created_by?({_input_pointer, %Utxo{}}, _), do: false
defp utxo_kv_has_oindex_equal?(utxo_kv, oindex) do
Utxo.position(_, _, utxo_kv_oindex) = utxo_kv_get_position(utxo_kv)
utxo_kv_oindex == oindex
end
end
|
apps/omg/lib/omg/state/utxo_set.ex
| 0.841761
| 0.410638
|
utxo_set.ex
|
starcoder
|
defmodule Ratatouille.Renderer.Element do
@moduledoc false
alias __MODULE__, as: Element
alias Ratatouille.Renderer.Element.{
Bar,
Canvas,
Chart,
Column,
Label,
Overlay,
Panel,
Row,
Sparkline,
Table,
Tree,
View,
Viewport
}
@type t :: %Element{tag: atom()}
@enforce_keys [:tag]
defstruct tag: nil, attributes: %{}, children: []
@content_tags [
:canvas,
:chart,
:label,
:panel,
:row,
:sparkline,
:table,
:tree,
:viewport
]
### Element Specs
@specs [
bar: [
description:
"Block-level element for creating title, status or menu bars",
renderer: Bar,
child_tags: [:label],
attributes: []
],
canvas: [
description: "A free-form canvas for drawing arbitrary shapes",
renderer: Canvas,
child_tags: [:canvas_cell],
attributes: [
height: {:required, "Integer representing the canvas height"},
width: {:required, "Integer representing the canvas width"}
]
],
canvas_cell: [
description: "A canvas cell which represents one square of the canvas",
child_tags: [],
attributes: [
x: {:required, "Integer representing the cell's column (zero-indexed)"},
y: {:required, "Integer representing the cell's row (zero-indexed)"},
color: {:optional, "Constant representing color to use for foreground"},
char: {:optional, "Single character to render within this cell"},
background:
{:optional, "Constant representing color to use for background"},
attributes:
{:optional, "Constant representing style attributes to apply"}
]
],
chart: [
description: "Element for plotting a series as a multi-line chart",
renderer: Chart,
child_tags: [],
attributes: [
series:
{:required, "List of float or integer values representing the series"},
type:
{:required,
"Type of chart to plot. Currently only `:line` is supported"},
height: {:optional, "Height of the chart in rows"}
]
],
column: [
description: "Container occupying a vertical segment of the grid",
renderer: Column,
child_tags: @content_tags,
attributes: [
size:
{:required,
"Number of units on the grid that the column should occupy"}
]
],
label: [
description: "Block-level element for displaying text",
renderer: Label,
child_tags: [:text],
attributes: [
content:
{:optional, "Binary containing the text content to be displayed"},
color: {:optional, "Constant representing color to use for foreground"},
background:
{:optional, "Constant representing color to use for background"},
attributes:
{:optional, "Constant representing style attributes to apply"},
wrap:
{:optional,
"Boolean indicating whether or not to wrap lines to fit available space"}
]
],
overlay: [
description: "Container overlaid on top of the view",
renderer: Overlay,
child_tags: @content_tags,
attributes: [
padding: {:optional, "Integer number of units of padding"}
]
],
panel: [
description:
"Container with a border and title used to demarcate content",
renderer: Panel,
child_tags: @content_tags,
attributes: [
color: {:optional, "Color of title"},
background: {:optional, "Background of title"},
attributes: {:optional, "Attributes for the title"},
padding:
{:optional,
"Integer providing inner padding to use when rendering child elements"},
height:
{:optional,
"Height of the table in rows or `:fill` to fill the parent container's box"},
title: {:optional, "Binary containing the title for the panel"}
]
],
row: [
description:
"Container used to define grid layouts with one or more columns",
renderer: Row,
child_tags: [:column],
attributes: []
],
sparkline: [
description: "Element for plotting a series in a single line",
renderer: Sparkline,
child_tags: [],
attributes: [
series:
{:required, "List of float or integer values representing the series"}
]
],
table: [
description: "Container for displaying data in rows and columns",
renderer: Table,
child_tags: [:table_row],
attributes: []
],
table_cell: [
description: "Element representing a table cell",
child_tags: [],
attributes: [
content:
{:required, "Binary containing the text content to be displayed"},
color: {:optional, "Constant representing color to use for foreground"},
background:
{:optional, "Constant representing color to use for background"},
attributes:
{:optional, "Constant representing style attributes to apply"}
]
],
table_row: [
description: "Container representing a row of the table",
child_tags: [:table_cell],
attributes: [
color: {:optional, "Constant representing color to use for foreground"},
background:
{:optional, "Constant representing color to use for background"},
attributes:
{:optional, "Constant representing style attributes to apply"}
]
],
text: [
description: "Inline element for displaying uniformly-styled text",
child_tags: [],
attributes: [
content:
{:required, "Binary containing the text content to be displayed"},
color: {:optional, "Constant representing color to use for foreground"},
background:
{:optional, "Constant representing color to use for background"},
attributes:
{:optional, "Constant representing style attributes to apply"}
]
],
tree: [
description: "Container for displaying data as a tree of nodes",
renderer: Tree,
child_tags: [:tree_node],
attributes: []
],
tree_node: [
description: "Container representing a tree node",
child_tags: [:tree_node],
attributes: [
content: {:required, "Binary label for the node"},
color: {:optional, "Constant representing color to use for foreground"},
background:
{:optional, "Constant representing color to use for background"},
attributes:
{:optional, "Constant representing style attributes to apply"}
]
],
view: [
description: "Top-level container",
renderer: View,
child_tags: [:overlay | @content_tags],
attributes: [
top_bar: {:optional, "A `:bar` element to occupy the view's first row"},
bottom_bar:
{:optional, "A `:bar` element to occupy the view's last row"}
]
],
viewport: [
description: "Container for offsetting content (e.g., for scrolling)",
renderer: Viewport,
child_tags: @content_tags,
attributes: [
offset_x:
{:optional,
"Integer representing the number of columns to offset the child content by. Defaults to 0."},
offset_y:
{:optional,
"Integer representing the number of rows to offset the child content by. Defaults to 0."}
]
]
]
def specs, do: @specs
end
|
lib/ratatouille/renderer/element.ex
| 0.872998
| 0.50415
|
element.ex
|
starcoder
|
defmodule ExPesa.Jenga.SendMoney.EFT do
@moduledoc """
This module enables you to Send Money To Other Banks Via Electronic Funds Transfer (EFT)
"""
import ExPesa.Jenga.JengaBase
alias ExPesa.Jenga.Signature
@doc """
Send Money To Other Banks Via Electronic Funds Transfer (EFT)
## Parameters
attrs: - a map containing:
- `source` - a map containing; `countryCode`, `name` and `accountNumber`
- `destination` - a map containing; `type`, `countryCode`, `name`, `bankCode`, `branchCode`, and `accountNumber`
- `transfer` - a map containing; `type`, `amount`, `currencyCode`, `reference`, `date` and `description`
Read More about the parameters' descriptions here: https://developer.jengaapi.io/reference#eft
## Example
iex> ExPesa.Jenga.SendMoney.EFT.request(%{ source: %{ countryCode: "KE", name: "<NAME>", accountNumber: "0770194201783" }, destination: %{ type: "bank", countryCode: "KE", name: "<NAME>", bankCode: "07", branchCode: "026", accountNumber: "7265810011" }, transfer: %{ type: "EFT", amount: 1000, currencyCode: "KES", reference: "692194625821", date: "2020-12-03", description: "some remarks here" } })
{:ok,
%{
"transactionId" => "1452854",
"status" => "SUCCESS"
}}
"""
@spec request(map()) :: {:error, any()} | {:ok, any()}
def request(
%{
source: %{
countryCode: _countryCode,
name: _name,
accountNumber: accountNumber
},
destination: %{
type: _type,
countryCode: _cc,
name: _n,
bankCode: bankCode,
branchCode: _wN,
accountNumber: accNo
},
transfer: %{
type: _t,
amount: amount,
currencyCode: _currencyCode,
reference: reference,
date: _date,
description: _description
}
} = requestBody
) do
message = "#{reference}#{accountNumber}#{accNo}#{amount}#{bankCode}"
make_request("/transaction/v2/remittance#eft", requestBody, [
{"signature", Signature.sign(message)}
])
end
def request(_), do: {:error, "Required Parameters missing, check your request body"}
end
|
lib/ex_pesa/Jenga/send_money/eft.ex
| 0.782912
| 0.516595
|
eft.ex
|
starcoder
|
defmodule LosslessJason.Codegen do
@moduledoc false
alias LosslessJason.{Encode, EncodeError}
def jump_table(ranges, default) do
ranges
|> ranges_to_orddict()
|> :array.from_orddict(default)
|> :array.to_orddict()
end
def jump_table(ranges, default, max) do
ranges
|> ranges_to_orddict()
|> :array.from_orddict(default)
|> resize(max)
|> :array.to_orddict()
end
defmacro bytecase(var, do: clauses) do
{ranges, default, literals} = clauses_to_ranges(clauses, [])
jump_table = jump_table(ranges, default)
quote do
case unquote(var) do
unquote(jump_table_to_clauses(jump_table, literals))
end
end
end
defmacro bytecase(var, max, do: clauses) do
{ranges, default, empty} = clauses_to_ranges(clauses, [])
jump_table = jump_table(ranges, default, max)
quote do
case unquote(var) do
unquote(jump_table_to_clauses(jump_table, empty))
end
end
end
def build_kv_iodata(kv, encode_args) do
elements =
kv
|> Enum.map(&encode_pair(&1, encode_args))
|> Enum.intersperse(",")
collapse_static(List.flatten(["{", elements] ++ '}'))
end
defp clauses_to_ranges([{:->, _, [[{:in, _, [byte, range]}, rest], action]} | tail], acc) do
clauses_to_ranges(tail, [{range, {byte, rest, action}} | acc])
end
defp clauses_to_ranges([{:->, _, [[default, rest], action]} | tail], acc) do
{Enum.reverse(acc), {default, rest, action}, literal_clauses(tail)}
end
defp literal_clauses(clauses) do
Enum.map(clauses, fn {:->, _, [[literal], action]} ->
{literal, action}
end)
end
defp jump_table_to_clauses([{val, {{:_, _, _}, rest, action}} | tail], empty) do
quote do
<<unquote(val), unquote(rest)::bits>> ->
unquote(action)
end ++ jump_table_to_clauses(tail, empty)
end
defp jump_table_to_clauses([{val, {byte, rest, action}} | tail], empty) do
quote do
<<unquote(byte), unquote(rest)::bits>> when unquote(byte) === unquote(val) ->
unquote(action)
end ++ jump_table_to_clauses(tail, empty)
end
defp jump_table_to_clauses([], literals) do
Enum.flat_map(literals, fn {pattern, action} ->
quote do
unquote(pattern) ->
unquote(action)
end
end)
end
defp resize(array, size), do: :array.resize(size, array)
defp ranges_to_orddict(ranges) do
ranges
|> Enum.flat_map(fn
{int, value} when is_integer(int) ->
[{int, value}]
{enum, value} ->
Enum.map(enum, &{&1, value})
end)
|> :orddict.from_list()
end
defp encode_pair({key, value}, encode_args) do
key = IO.iodata_to_binary(Encode.key(key, &escape_key/3))
key = "\"" <> key <> "\":"
[key, quote(do: Encode.value(unquote(value), unquote_splicing(encode_args)))]
end
defp escape_key(binary, _original, _skip) do
check_safe_key!(binary)
binary
end
defp check_safe_key!(binary) do
for <<(<<byte>> <- binary)>> do
if byte > 0x7F or byte < 0x1F or byte in '"\\/' do
raise EncodeError,
"invalid byte #{inspect(byte, base: :hex)} in literal key: #{inspect(binary)}"
end
end
:ok
end
defp collapse_static([bin1, bin2 | rest]) when is_binary(bin1) and is_binary(bin2) do
collapse_static([bin1 <> bin2 | rest])
end
defp collapse_static([other | rest]) do
[other | collapse_static(rest)]
end
defp collapse_static([]) do
[]
end
end
|
lib/codegen.ex
| 0.702836
| 0.585486
|
codegen.ex
|
starcoder
|
defmodule Sentry.Sources do
alias Sentry.Config
@moduledoc """
This module is responsible for providing functionality that stores
the text of source files during compilation for displaying the
source code that caused an exception.
### Configuration
There is configuration required to set up this functionality. The options
include `:enable_source_code_context`, `:root_source_code_path`, `:context_lines`,
`:source_code_exclude_patterns`, and `:source_code_path_pattern`.
* `:enable_source_code_context` - when `true`, enables reporting source code
alongside exceptions.
* `:root_source_code_path` - The path from which to start recursively reading files from.
Should usually be set to `File.cwd!`.
* `:context_lines` - The number of lines of source code before and after the line that
caused the exception to be included. Defaults to `3`.
* `:source_code_exclude_patterns` - a list of Regex expressions used to exclude file paths that
should not be stored or referenced when reporting exceptions. Defaults to
`[~r"/_build/", ~r"/deps/", ~r"/priv/"]`.
* `:source_code_path_pattern` - a glob that is expanded to select files from the
`:root_source_code_path`. Defaults to `"**/*.ex"`.
An example configuration:
config :sentry,
dsn: "https://public:secret@app.getsentry.com/1",
enable_source_code_context: true,
root_source_code_path: File.cwd!,
context_lines: 5
### Source code storage
The file contents are saved when Sentry is compiled, which can cause some
complications. If a file is changed, and Sentry is not recompiled,
it will still report old source code.
The best way to ensure source code is up to date is to recompile Sentry
itself via `mix deps.compile sentry --force`. It's possible to create a Mix
Task alias in `mix.exs` to do this. The example below would allow one to
run `mix.sentry_recompile` which will force recompilation of Sentry so
it has the newest source and then compile the project:
defp aliases do
[sentry_recompile: ["deps.compile sentry --force", "compile"]]
end
This is an important to note especially when building for production. If your
build or deployment system caches prior builds, it may not recompile Sentry
and could cause issues with reported source code being out of date.
Due to Sentry reading the file system and defaulting to a recursive search
of directories, it is important to check your configuration and compilation
environment to avoid a folder recursion issue. Problems may be seen when
deploying to the root folder, so it is best to follow the practice of
compiling your application in its own folder. Modifying the
`source_code_path_pattern` configuration option from its default is also
an avenue to avoid compile problems.
"""
@type file_map :: %{pos_integer() => String.t()}
@type source_map :: %{String.t() => file_map}
def load_files do
root_path = Config.root_source_code_path()
path_pattern = Config.source_code_path_pattern()
exclude_patterns = Config.source_code_exclude_patterns()
Path.join(root_path, path_pattern)
|> Path.wildcard()
|> exclude_files(exclude_patterns)
|> Enum.reduce(%{}, fn path, acc ->
key = Path.relative_to(path, root_path)
value = source_to_lines(File.read!(path))
Map.put(acc, key, value)
end)
end
@doc """
Given the source code map, a filename and a line number, this method retrieves the source code context.
When reporting source code context to the Sentry API, it expects three separate values. They are the source code
for the specific line the error occurred on, the list of the source code for the lines preceding, and the
list of the source code for the lines following. The number of lines in the lists depends on what is
configured in `:context_lines`. The number configured is how many lines to get on each side of the line that
caused the error. If it is configured to be `3`, the method will attempt to get the 3 lines preceding, the
3 lines following, and the line that the error occurred on, for a possible maximum of 7 lines.
The three values are returned in a three element tuple as `{preceding_source_code_list, source_code_from_error_line, following_source_code_list}`.
"""
@spec get_source_context(source_map, String.t(), pos_integer()) ::
{[String.t()], String.t() | nil, [String.t()]}
def get_source_context(files, file_name, line_number) do
context_lines = Config.context_lines()
file = Map.get(files, file_name)
do_get_source_context(file, line_number, context_lines)
end
defp do_get_source_context(nil, _, _), do: {[], nil, []}
defp do_get_source_context(file, line_number, context_lines) do
context_line_indices = 0..(2 * context_lines)
Enum.reduce(context_line_indices, {[], nil, []}, fn i, {pre_context, context, post_context} ->
context_line_number = line_number - context_lines + i
source = Map.get(file, context_line_number)
cond do
context_line_number == line_number && source ->
{pre_context, source, post_context}
context_line_number < line_number && source ->
{pre_context ++ [source], context, post_context}
context_line_number > line_number && source ->
{pre_context, context, post_context ++ [source]}
true ->
{pre_context, context, post_context}
end
end)
end
defp exclude_files(file_names, []), do: file_names
defp exclude_files(file_names, [exclude_pattern | rest]) do
Enum.reject(file_names, &String.match?(&1, exclude_pattern))
|> exclude_files(rest)
end
defp source_to_lines(source) do
String.replace_suffix(source, "\n", "")
|> String.split("\n")
|> Enum.with_index()
|> Enum.reduce(%{}, fn {line_string, line_number}, acc ->
Map.put(acc, line_number + 1, line_string)
end)
end
end
|
lib/sentry/sources.ex
| 0.804021
| 0.450782
|
sources.ex
|
starcoder
|
defmodule EventStore do
@moduledoc """
EventStore is CQRS event store implemented in Elixir.
It uses PostgreSQL (v9.5 or later) as the underlying storage engine.
The `EventStore` module provides the public API to read and write events to an
event stream, and subscribe to event notifications.
Please refer to the following guides to learn more:
- [Getting started](getting-started.html)
- [Usage](usage.html)
- [Subscriptions](subscriptions.html)
- [Running on a cluster of nodes](cluster.html)
- [Event serialization](event-serialization.html)
- [Upgrading an existing EventStore database](upgrades.html)
"""
@type expected_version :: :any_version | :no_stream | :stream_exists | non_neg_integer()
@type start_from :: :origin | :current | non_neg_integer()
alias EventStore.{Config, EventData, Registration, Subscriptions}
alias EventStore.Snapshots.{SnapshotData, Snapshotter}
alias EventStore.Subscriptions.Subscription
alias EventStore.Streams.Stream
@conn EventStore.Postgrex
@all_stream "$all"
@default_batch_size 1_000
@default_count 1_000
@default_timeout 15_000
@doc """
Append one or more events to a stream atomically.
- `stream_uuid` is used to uniquely identify a stream.
- `expected_version` is used for optimistic concurrency checks.
You can provide a non-negative integer to specify the expected stream
version. This is used to ensure you can only append to the stream if it is
at exactly that version.
You can also provide one of the following values to affect the concurrency
check behaviour:
- `:any_version` - No concurrency checking; allow any stream version
(including no stream).
- `:no_stream` - Ensure the stream does not exist.
- `:stream_exists` - Ensure the stream exists.
- `events` is a list of `%EventStore.EventData{}` structs.
- `timeout` an optional timeout for the database transaction, in
milliseconds. Defaults to #{@default_timeout}ms.
Returns `:ok` on success, or an `{:error, reason}` tagged tuple. The returned
error may be due to one of the following reasons:
- `{:error, :wrong_expected_version}` when the actual stream version differs
from the provided expected version.
- `{:error, :stream_exists}` when the stream exists, but expected version
was `:no_stream`.
- `{:error, :stream_does_not_exist}` when the stream does not exist, but
expected version was `:stream_exists`.
"""
@spec append_to_stream(String.t(), expected_version, list(EventData.t()), timeout() | nil) ::
:ok
| {:error, :cannot_append_to_all_stream}
| {:error, :stream_exists}
| {:error, :stream_does_not_exist}
| {:error, :wrong_expected_version}
| {:error, reason :: term}
def append_to_stream(stream_uuid, expected_version, events, timeout \\ @default_timeout)
def append_to_stream(@all_stream, _expected_version, _events, _timeout),
do: {:error, :cannot_append_to_all_stream}
def append_to_stream(stream_uuid, expected_version, events, timeout) do
Stream.append_to_stream(@conn, stream_uuid, expected_version, events, opts(timeout))
end
@doc """
Link one or more existing events to another stream.
Allows you to construct streams containing events already appended to any
other stream. This is more efficient than copying events between streams since
only a reference to the existing event is created.
- `stream_uuid` is used to uniquely identify the target stream.
- `expected_version` is used for optimistic concurrency checks.
You can provide a non-negative integer to specify the expected stream
version. This is used to ensure you can only append to the stream if it is
at exactly that version.
You can also provide one of the following values to affect the concurrency
check behaviour:
- `:any_version` - No concurrency checking; allow any stream version
(including no stream).
- `:no_stream` - Ensure the stream does not exist.
- `:stream_exists` - Ensure the stream exists.
- `events_or_event_ids` is a list of `%EventStore.EventData{}` structs or
event ids.
- `timeout` an optional timeout for the database transaction, in
milliseconds. Defaults to #{@default_timeout}ms.
Returns `:ok` on success, or an `{:error, reason}` tagged tuple. The returned
error may be due to one of the following reasons:
- `{:error, :wrong_expected_version}` when the actual stream version differs
from the provided expected version.
- `{:error, :stream_exists}` when the stream exists, but expected version
was `:no_stream`.
- `{:error, :stream_does_not_exist}` when the stream does not exist, but
expected version was `:stream_exists`.
"""
@spec link_to_stream(
String.t(),
expected_version,
list(EventStore.RecordedEvent.t()) | list(non_neg_integer),
timeout() | nil
) ::
:ok
| {:error, :cannot_append_to_all_stream}
| {:error, :stream_exists}
| {:error, :stream_does_not_exist}
| {:error, :wrong_expected_version}
| {:error, reason :: term}
def link_to_stream(
stream_uuid,
expected_version,
events_or_event_ids,
timeout \\ @default_timeout
)
def link_to_stream(@all_stream, _expected_version, _events_or_event_ids, _timeout),
do: {:error, :cannot_append_to_all_stream}
def link_to_stream(stream_uuid, expected_version, events_or_event_ids, timeout) do
Stream.link_to_stream(
@conn,
stream_uuid,
expected_version,
events_or_event_ids,
opts(timeout)
)
end
@doc """
Reads the requested number of events from the given stream, in the order in
which they were originally written.
- `stream_uuid` is used to uniquely identify a stream.
- `start_version` optionally, the version number of the first event to read.
Defaults to the beginning of the stream if not set.
- `count` optionally, the maximum number of events to read.
If not set it will be limited to returning #{@default_count} events from the stream.
- `timeout` an optional timeout for querying the database, in milliseconds.
Defaults to #{@default_timeout}ms.
"""
@spec read_stream_forward(String.t(), non_neg_integer, non_neg_integer, timeout() | nil) ::
{:ok, list(EventStore.RecordedEvent.t())}
| {:error, reason :: term}
def read_stream_forward(
stream_uuid,
start_version \\ 0,
count \\ @default_count,
timeout \\ @default_timeout
)
def read_stream_forward(stream_uuid, start_version, count, timeout) do
Stream.read_stream_forward(@conn, stream_uuid, start_version, count, opts(timeout))
end
@doc """
Streams events from the given stream, in the order in which they were
originally written.
- `start_version` optionally, the version number of the first event to read.
Defaults to the beginning of the stream if not set.
- `read_batch_size` optionally, the number of events to read at a time from storage.
Defaults to reading #{@default_batch_size} events per batch.
- `timeout` an optional timeout for querying the database (per batch), in
milliseconds. Defaults to #{@default_timeout}ms.
"""
@spec stream_forward(String.t(), non_neg_integer, non_neg_integer, timeout() | nil) ::
Enumerable.t() | {:error, reason :: term}
def stream_forward(
stream_uuid,
start_version \\ 0,
read_batch_size \\ @default_batch_size,
timeout \\ @default_timeout
)
def stream_forward(stream_uuid, start_version, read_batch_size, timeout) do
Stream.stream_forward(
@conn,
stream_uuid,
start_version,
read_batch_size,
opts(timeout)
)
end
@doc """
Reads the requested number of events from all streams, in the order in which
they were originally written.
- `start_event_number` optionally, the number of the first event to read.
Defaults to the beginning of the stream if not set.
- `count` optionally, the maximum number of events to read.
If not set it will be limited to returning #{@default_count} events from all streams.
- `timeout` an optional timeout for querying the database, in milliseconds.
Defaults to #{@default_timeout}ms.
"""
@spec read_all_streams_forward(non_neg_integer, non_neg_integer, timeout() | nil) ::
{:ok, list(EventStore.RecordedEvent.t())} | {:error, reason :: term}
def read_all_streams_forward(
start_event_number \\ 0,
count \\ @default_count,
timeout \\ @default_timeout
)
def read_all_streams_forward(start_event_number, count, timeout) do
Stream.read_stream_forward(
@conn,
@all_stream,
start_event_number,
count,
opts(timeout)
)
end
@doc """
Streams events from all streams, in the order in which they were originally
written.
- `start_event_number` optionally, the number of the first event to read.
Defaults to the beginning of the stream if not set.
- `read_batch_size` optionally, the number of events to read at a time from
storage. Defaults to reading #{@default_batch_size} events per batch.
- `timeout` an optional timeout for querying the database (per batch), in
milliseconds. Defaults to #{@default_timeout}ms.
"""
@spec stream_all_forward(non_neg_integer, non_neg_integer) :: Enumerable.t()
def stream_all_forward(
start_event_number \\ 0,
read_batch_size \\ @default_batch_size,
timeout \\ @default_timeout
)
def stream_all_forward(start_event_number, read_batch_size, timeout) do
Stream.stream_forward(
@conn,
@all_stream,
start_event_number,
read_batch_size,
opts(timeout)
)
end
@doc """
Create a transient subscription to a given stream.
- `stream_uuid` is the stream to subscribe to.
Use the `$all` identifier to subscribe to events from all streams.
- `opts` is an optional map providing additional subscription configuration:
- `selector` to define a function to filter each event, i.e. returns
only those elements for which fun returns a truthy value
- `mapper` to define a function to map each recorded event before sending
to the subscriber.
The calling process will be notified whenever new events are appended to
the given `stream_uuid`.
As the subscription is transient you do not need to acknowledge receipt of
each event. The subscriber process will miss any events if it is restarted
and resubscribes. If you need a persistent subscription with guaranteed
at-least-once event delivery and back-pressure you should use
`EventStore.subscribe_to_stream/4`.
## Notification message
Events will be sent to the subscriber, in batches, as `{:events, events}`
where events is a collection of `EventStore.RecordedEvent` structs.
## Example
{:ok, subscription} = EventStore.subscribe(stream_uuid)
# receive first batch of events
receive do
{:events, events} ->
IO.puts "Received events: " <> inspect(events)
end
"""
@spec subscribe(
String.t(),
selector: (EventStore.RecordedEvent.t() -> any()),
mapper: (EventStore.RecordedEvent.t() -> any())
) :: :ok | {:error, term}
def subscribe(stream_uuid, opts \\ [])
def subscribe(stream_uuid, opts), do: Registration.subscribe(stream_uuid, opts)
@doc """
Create a persistent subscription to a single stream.
The `subscriber` process will be notified of each batch of events appended to
the single stream identified by `stream_uuid`.
- `stream_uuid` is the stream to subscribe to.
Use the `$all` identifier to subscribe to events from all streams.
- `subscription_name` is used to uniquely identify the subscription.
- `subscriber` is a process that will be sent `{:events, events}`
notification messages.
- `opts` is an optional map providing additional subscription configuration:
- `start_from` is a pointer to the first event to receive.
It must be one of:
- `:origin` for all events from the start of the stream (default).
- `:current` for any new events appended to the stream after the
subscription has been created.
- any positive integer for a stream version to receive events after.
- `selector` to define a function to filter each event, i.e. returns
only those elements for which fun returns a truthy value.
- `mapper` to define a function to map each recorded event before sending
to the subscriber.
- `concurrency_limit` defines the maximum number of concurrent subscribers
allowed to connect to the subscription. By default only one subscriber
may connect. If too many subscribers attempt to connect to the
subscription an `{:error, :too_many_subscribers}` is returned.
- `buffer_size` limits how many in-flight events will be sent to the
subscriber process before acknowledgement of successful processing. This
limits the number of messages sent to the subscriber and stops their
message queue from getting filled with events. Defaults to one in-flight
event.
- `partition_by` is an optional function used to partition events to
subscribers. It can be used to guarantee processing order when multiple
subscribers have subscribed to a single subscription. The function is
passed a single argument (an `EventStore.RecordedEvent` struct) and must
return the partition key. As an example to guarantee events for a single
stream are processed serially, but different streams are processed
concurrently, you could use the `stream_uuid` as the partition key.
alias EventStore.RecordedEvent
by_stream = fn %RecordedEvent{stream_uuid: stream_uuid} -> stream_uuid end
{:ok, _subscription} =
EventStore.subscribe_to_stream(stream_uuid, "example", self(),
concurrency_limit: 10,
partition_by: by_stream
)
The subscription will resume from the last acknowledged event if it already
exists. It will ignore the `start_from` argument in this case.
Returns `{:ok, subscription}` when subscription succeeds.
## Notification messages
Subscribers will initially receive a `{:subscribed, subscription}` message
once the subscription has successfully subscribed.
After this message events will be sent to the subscriber, in batches, as
`{:events, events}` where events is a collection of `EventStore.RecordedEvent`
structs.
## Example
{:ok, subscription} = EventStore.subscribe_to_stream(stream_uuid, "example", self())
# wait for the subscription confirmation
receive do
{:subscribed, ^subscription} ->
IO.puts "Successfully subscribed to stream: " <> inspect(stream_uuid)
end
receive do
{:events, events} ->
IO.puts "Received events: " <> inspect(events)
# acknowledge receipt
EventStore.ack(subscription, events)
end
"""
@spec subscribe_to_stream(String.t(), String.t(), pid, keyword) ::
{:ok, subscription :: pid}
| {:error, :already_subscribed}
| {:error, :subscription_already_exists}
| {:error, :too_many_subscribers}
| {:error, reason :: term}
def subscribe_to_stream(stream_uuid, subscription_name, subscriber, opts \\ [])
def subscribe_to_stream(stream_uuid, subscription_name, subscriber, opts) do
with {start_from, opts} <- Keyword.pop(opts, :start_from, :origin),
{:ok, start_from} <- Stream.start_from(@conn, stream_uuid, start_from, opts()),
opts <- Keyword.put(opts, :start_from, start_from) do
Subscriptions.subscribe_to_stream(stream_uuid, subscription_name, subscriber, opts)
else
reply -> reply
end
end
@doc """
Create a persistent subscription to all streams.
The `subscriber` process will be notified of each batch of events appended to
any stream.
- `subscription_name` is used to uniquely identify the subscription.
- `subscriber` is a process that will be sent `{:events, events}`
notification messages.
- `opts` is an optional map providing additional subscription configuration:
- `start_from` is a pointer to the first event to receive.
It must be one of:
- `:origin` for all events from the start of the stream (default).
- `:current` for any new events appended to the stream after the
subscription has been created.
- any positive integer for an event id to receive events after that
exact event.
- `selector` to define a function to filter each event, i.e. returns
only those elements for which fun returns a truthy value
- `mapper` to define a function to map each recorded event before sending
to the subscriber.
- `concurrency_limit` defines the maximum number of concurrent subscribers
allowed to connect to the subscription. By default only one subscriber
may connect. If too many subscribers attempt to connect to the
subscription an `{:error, :too_many_subscribers}` is returned.
The subscription will resume from the last acknowledged event if it already
exists. It will ignore the `start_from` argument in this case.
Returns `{:ok, subscription}` when subscription succeeds.
## Example
{:ok, subscription} = EventStore.subscribe_to_all_streams("all_subscription", self())
# wait for the subscription confirmation
receive do
{:subscribed, ^subscription} ->
IO.puts "Successfully subscribed to all streams"
end
receive do
{:events, events} ->
IO.puts "Received events: " <> inspect(events)
# acknowledge receipt
EventStore.ack(subscription, events)
end
"""
@spec subscribe_to_all_streams(String.t(), pid, keyword) ::
{:ok, subscription :: pid}
| {:error, :already_subscribed}
| {:error, :subscription_already_exists}
| {:error, :too_many_subscribers}
| {:error, reason :: term}
def subscribe_to_all_streams(subscription_name, subscriber, opts \\ [])
def subscribe_to_all_streams(subscription_name, subscriber, opts) do
subscribe_to_stream(@all_stream, subscription_name, subscriber, opts)
end
@doc """
Acknowledge receipt of the given events received from a single stream, or all
streams, subscription.
Accepts a `RecordedEvent`, a list of `RecordedEvent`s, or the event number of
the recorded event to acknowledge.
"""
@spec ack(
pid,
EventStore.RecordedEvent.t() | list(EventStore.RecordedEvent.t()) | non_neg_integer()
) :: :ok | {:error, reason :: term}
def ack(subscription, ack) do
Subscription.ack(subscription, ack)
end
@doc """
Unsubscribe an existing subscriber from event notifications.
- `stream_uuid` is the stream to unsubscribe from.
- `subscription_name` is used to identify the existing subscription process
to stop.
Returns `:ok` on success.
"""
@spec unsubscribe_from_stream(String.t(), String.t()) :: :ok
def unsubscribe_from_stream(stream_uuid, subscription_name) do
Subscriptions.unsubscribe_from_stream(stream_uuid, subscription_name)
end
@doc """
Unsubscribe an existing subscriber from all event notifications.
- `subscription_name` is used to identify the existing subscription process
to stop.
Returns `:ok` on success.
"""
@spec unsubscribe_from_all_streams(String.t()) :: :ok
def unsubscribe_from_all_streams(subscription_name) do
Subscriptions.unsubscribe_from_stream(@all_stream, subscription_name)
end
@doc """
Delete an existing persistent subscription.
- `stream_uuid` is the stream the subscription is subscribed to.
- `subscription_name` is used to identify the existing subscription to
remove.
Returns `:ok` on success.
"""
@spec delete_subscription(String.t(), String.t()) :: :ok
def delete_subscription(stream_uuid, subscription_name) do
Subscriptions.delete_subscription(@conn, stream_uuid, subscription_name, opts())
end
@doc """
Delete an existing persistent subscription to all streams.
- `stream_uuid` is the stream the subscription is subscribed to.
- `subscription_name` is used to identify the existing subscription to
remove.
Returns `:ok` on success.
"""
@spec delete_all_streams_subscription(String.t()) :: :ok
def delete_all_streams_subscription(subscription_name) do
EventStore.delete_subscription(@all_stream, subscription_name)
end
@doc """
Read a snapshot, if available, for a given source.
Returns `{:ok, %EventStore.Snapshots.SnapshotData{}}` on success, or
`{:error, :snapshot_not_found}` when unavailable.
"""
@spec read_snapshot(String.t()) :: {:ok, SnapshotData.t()} | {:error, :snapshot_not_found}
def read_snapshot(source_uuid) do
Snapshotter.read_snapshot(@conn, source_uuid, Config.serializer(), opts())
end
@doc """
Record a snapshot of the data and metadata for a given source.
Returns `:ok` on success.
"""
@spec record_snapshot(SnapshotData.t()) :: :ok | {:error, reason :: term}
def record_snapshot(%SnapshotData{} = snapshot) do
Snapshotter.record_snapshot(@conn, snapshot, Config.serializer(), opts())
end
@doc """
Delete a previously recorded snapshop for a given source.
Returns `:ok` on success, or when the snapshot does not exist.
"""
@spec delete_snapshot(String.t()) :: :ok | {:error, reason :: term}
def delete_snapshot(source_uuid) do
Snapshotter.delete_snapshot(@conn, source_uuid, opts())
end
@doc """
Get the event store configuration for the environment.
"""
def configuration, do: EventStore.Config.get()
@default_opts [pool: DBConnection.Poolboy]
defp opts, do: @default_opts
defp opts(timeout) when is_integer(timeout) do
Keyword.put(@default_opts, :timeout, timeout)
end
defp opts(:infinity) do
Keyword.put(@default_opts, :timeout, :infinity)
end
end
|
lib/event_store.ex
| 0.893292
| 0.638737
|
event_store.ex
|
starcoder
|
defmodule Blueprint.Plot.Graph do
@moduledoc """
Convenient functions for building simple node dependency
graphs.
"""
@type graph_node :: any
@type meta :: any
@type connection :: { graph_node, graph_node } | { graph_node, graph_node, meta }
@type graph :: [connection]
@type labeler :: (graph_node -> String.t)
@type styler :: ({ :node, graph_node } | { :connection, connection } -> keyword())
@typep node_cache :: %{ optional(graph_node) => pos_integer }
@typep cluster_type :: :app | :mod
@spec add_node(node_cache, graph_node, labeler, styler) :: node_cache
defp add_node(nodes, node, label, styler), do: Map.put_new(nodes, node, elem(Graphvix.Node.new(Keyword.merge([label: label.(node)], styler.({ :node, node }))), 0))
@spec add_module(%{ optional(graph_node) => [pos_integer] }, graph_node, pos_integer) :: %{ optional(graph_node) => [pos_integer] }
defp add_module(modules, mod, node_id) do
case modules do
%{ ^mod => nodes } -> %{ modules | mod => [node_id|nodes] }
_ -> Map.put_new(modules, mod, [node_id])
end
end
@spec define_clusters(node_cache, [cluster_type] | cluster_type | nil) :: node_cache
defp define_clusters(nodes, nil), do: nodes
defp define_clusters(nodes, :mod) do
Enum.reduce(nodes, %{}, fn
{ { mod, _, _ }, node_id }, modules -> add_module(modules, mod, node_id)
{ mod, node_id }, modules -> add_module(modules, mod, node_id)
end)
|> Enum.each(fn
{ _, [_] } -> nil
{ _, nodes } -> Graphvix.Cluster.new(nodes)
end)
end
defp define_clusters(nodes, :app) do
Enum.reduce(nodes, %{}, fn
{ { mod, _, _ }, node_id }, modules ->
app = case to_string(mod) do
"Elixir." <> m ->
[app|_] = String.split(m, ".")
app
_ -> mod
end
add_module(modules, app, node_id)
{ mod, node_id }, modules ->
app = case to_string(mod) do
"Elixir." <> m ->
[app|_] = String.split(m, ".")
app
_ -> mod
end
add_module(modules, app, node_id)
end)
|> Enum.each(fn
{ _, [_] } -> nil
{ _, nodes } -> Graphvix.Cluster.new(nodes)
end)
end
defp define_clusters(nodes, []), do: nodes
defp define_clusters(nodes, [h|t]) do
define_clusters(nodes, h)
define_clusters(nodes, t)
end
@spec define_nodes(node_cache, graph_node, graph_node, labeler, styler) :: node_cache
defp define_nodes(nodes, a, b, label, styler) do
case nodes do
%{ ^a => _, ^b => _ } -> nodes
%{ ^a => _ } -> add_node(nodes, b, label, styler)
%{ ^b => _ } -> add_node(nodes, a, label, styler)
_ ->
if(a != b, do: add_node(nodes, a, label, styler), else: nodes)
|> add_node(b, label, styler)
end
end
@doc """
Convert a node graph into a DOT graph.
Options can be provided to change the resulting graph. These
options are:
* `:labeler` - A function of type `labeler`, where
the node is passed to the function and is expected to
receive a string that will be used on the graph to label it
as a result.
* `:styler` - A function of type `styler` where a node
or connection is passed the function and is expected to
return any styling changes to overwrite the defaults with.
"""
@spec to_dot(graph, keyword()) :: String.t
def to_dot(graph, opts \\ []) do
styler = Keyword.get(opts, :styler, fn _ -> [color: "black"] end)
label = Keyword.get(opts, :labeler, &Blueprint.Plot.Label.strip_namespace(Blueprint.Plot.Label.to_label(&1)))
:ok = Graphvix.Graph.new(__MODULE__)
nodes = Enum.reduce(graph, %{}, fn
connection = { a, b }, nodes ->
nodes = %{ ^a => node_a, ^b => node_b } = define_nodes(nodes, a, b, label, styler)
Graphvix.Edge.new(node_a, node_b, styler.({ :connection, connection }))
nodes
connection = { a, b, _ }, nodes ->
nodes = %{ ^a => node_a, ^b => node_b } = define_nodes(nodes, a, b, label, styler)
Graphvix.Edge.new(node_a, node_b, styler.({ :connection, connection }))
nodes
end)
define_clusters(nodes, opts[:group])
dot = Graphvix.Graph.write
Graphvix.Graph.clear
dot
end
@doc """
Write the DOT graph to a file.
"""
@spec save!(String.t, String.t) :: :ok | no_return
def save!(dot, path \\ "graph.dot"), do: File.write!(path, dot)
end
|
lib/blueprint/plot/graph.ex
| 0.77518
| 0.463019
|
graph.ex
|
starcoder
|
defmodule Timex.Convert do
@moduledoc false
@doc """
Converts a map to a Date, NaiveDateTime or DateTime, depending on the amount
of date/time information in the map.
"""
@spec convert_map(Map.t) :: Date.t | DateTime.t | NaiveDateTime.t | {:error, term}
def convert_map(map) when is_map(map) do
case convert_keys(map) do
{:error, _} = err ->
err
datetime_map when is_map(datetime_map) ->
year = Map.get(datetime_map, :year)
month = Map.get(datetime_map, :month)
day = Map.get(datetime_map, :day)
cond do
not(is_nil(year)) and not(is_nil(month)) and not(is_nil(day)) ->
case Map.get(datetime_map, :hour) do
nil ->
Date.new(year, month, day)
hour ->
minute = Map.get(datetime_map, :minute, 0)
second = Map.get(datetime_map, :second, 0)
us = Map.get(datetime_map, :microsecond, 0)
tz = Map.get(datetime_map, :time_zone, nil)
case tz do
s when is_binary(s) ->
Timex.DateTime.Helpers.construct({{year,month,day},{hour,minute,second,us}}, tz)
nil ->
{:ok, nd} = NaiveDateTime.new(year, month, day, hour, minute, second, us)
nd
end
end
:else ->
{:error, :insufficient_date_information}
end
end
end
def try_convert(_), do: {:error, :invalid_date}
@allowed_keys_atom [
:year, :month, :day,
:hour, :minute, :min, :mins, :second, :sec, :secs,
:milliseconds, :millisecond, :ms,
:microsecond
]
@allowed_keys Enum.concat(@allowed_keys_atom, Enum.map(@allowed_keys_atom, &Atom.to_string/1))
@valid_keys_map %{
:min => :minute,
:mins => :minute,
:secs => :second,
:sec => :second,
:milliseconds => :millisecond,
:ms => :millisecond,
:microsecond => :microsecond,
:tz => :time_zone,
:timezone => :time_zone,
:time_zone => :time_zone
}
def convert_keys(map) when is_map(map) do
Enum.reduce(map, %{}, fn
{_, _}, {:error, _} = err -> err
{k, v}, acc when k in [:microsecond, "microsecond"] ->
case v do
{us, pr} when is_integer(us) and pr >= 0 and pr <= 6 ->
Map.put(acc, :microsecond, {us, pr})
us when is_integer(us) ->
Map.put(acc, :microsecond, {us, 6})
_ -> acc
end
{k, v}, acc when k in [:milliseconds, "milliseconds", :ms, "ms", :millisecond, "millisecond"] ->
case v do
n when is_integer(n) ->
us = Timex.DateTime.Helpers.construct_microseconds(n*1_000)
Map.put(acc, :microsecond, us)
:error ->
{:error, {:expected_integer, for: k, got: v}}
end
{k, v}, acc when k in [:tz, "tz", :timezone, "timezone", :time_zone, "time_zone"] ->
case v do
s when is_binary(s) -> Map.put(acc, :time_zone, s)
%{"full_name" => s} -> Map.put(acc, :time_zone, s)
_ -> acc
end
{k, v}, acc when k in @allowed_keys and is_atom(k) and is_integer(v) ->
case Map.get(@valid_keys_map, k) do
nil -> Map.put(acc, k, v)
vk -> Map.put(acc, vk, v)
end
{k, v}, acc when k in @allowed_keys and is_integer(v) ->
ak = String.to_atom(k)
case Map.get(@valid_keys_map, ak) do
nil -> Map.put(acc, ak, v)
vk -> Map.put(acc, vk, v)
end
{k, v}, acc when k in @allowed_keys and is_atom(k) and is_binary(v) ->
case Integer.parse(v) do
{n, _} ->
case Map.get(@valid_keys_map, k) do
nil -> Map.put(acc, k, n)
vk -> Map.put(acc, vk, n)
end
:error ->
{:error, {:expected_integer, for: k, got: v}}
end
{k, v}, acc when k in @allowed_keys and is_binary(v) ->
case Integer.parse(v) do
{n, _} ->
ak = String.to_atom(k)
case Map.get(@valid_keys_map, ak) do
nil -> Map.put(acc, ak, n)
vk -> Map.put(acc, vk, n)
end
:error ->
{:error, {:expected_integer, for: k, got: v}}
end
{_, _}, acc -> acc
end)
end
end
|
deps/timex/lib/convert/convert.ex
| 0.756987
| 0.690357
|
convert.ex
|
starcoder
|
defmodule Kernel.ParallelCompiler do
@moduledoc """
A module responsible for compiling files in parallel.
"""
@doc """
Compiles the given files.
Those files are compiled in parallel and can automatically
detect dependencies between them. Once a dependency is found,
the current file stops being compiled until the dependency is
resolved.
If there is an error during compilation or if `warnings_as_errors`
is set to `true` and there is a warning, this function will fail
with an exception.
This function accepts the following options:
* `:each_file` - for each file compiled, invokes the callback passing the
file
* `:each_long_compilation` - for each file that takes more than a given
timeout (see the `:long_compilation_threshold` option) to compile, invoke
this callback passing the file as its argument
* `:long_compilation_threshold` - the timeout (in seconds) after the
`:each_long_compilation` callback is invoked; defaults to `10`
* `:each_module` - for each module compiled, invokes the callback passing
the file, module and the module bytecode
* `:dest` - the destination directory for the BEAM files. When using `files/2`,
this information is only used to properly annotate the BEAM files before
they are loaded into memory. If you want a file to actually be written to
`dest`, use `files_to_path/3` instead.
Returns the modules generated by each compiled file.
"""
def files(files, options \\ [])
def files(files, options) when is_list(options) do
spawn_compilers(files, nil, options)
end
@doc """
Compiles the given files to the given path.
Read `files/2` for more information.
"""
def files_to_path(files, path, options \\ [])
def files_to_path(files, path, options) when is_binary(path) and is_list(options) do
spawn_compilers(files, path, options)
end
defp spawn_compilers(files, path, options) do
true = Code.ensure_loaded?(Kernel.ErrorHandler)
compiler_pid = self()
:elixir_code_server.cast({:reset_warnings, compiler_pid})
schedulers = max(:erlang.system_info(:schedulers_online), 2)
result = spawn_compilers(%{
entries: files,
original: files,
output: path,
options: options,
waiting: [],
queued: [],
schedulers: schedulers,
result: [],
})
# In case --warning-as-errors is enabled and there was a warning,
# compilation status will be set to error.
case :elixir_code_server.call({:compilation_status, compiler_pid}) do
:ok ->
result
:error ->
IO.puts :stderr, "Compilation failed due to warnings while using the --warnings-as-errors option"
exit({:shutdown, 1})
end
end
# We already have n=schedulers currently running, don't spawn new ones
defp spawn_compilers(%{queued: queued, waiting: waiting, schedulers: schedulers} = state)
when length(queued) - length(waiting) >= schedulers do
wait_for_messages(state)
end
# Release waiting processes
defp spawn_compilers(%{entries: [{ref, found} | t], waiting: waiting} = state) do
waiting =
case List.keytake(waiting, ref, 2) do
{{_kind, pid, ^ref, _on, _defining}, waiting} ->
send pid, {ref, found}
waiting
nil ->
waiting
end
spawn_compilers(%{state | entries: t, waiting: waiting})
end
defp spawn_compilers(%{entries: [file | files], queued: queued, output: output, options: options} = state) do
parent = self()
{pid, ref} =
:erlang.spawn_monitor fn ->
# Set the elixir_compiler_pid used by our custom Kernel.ErrorHandler.
:erlang.put(:elixir_compiler_pid, parent)
:erlang.put(:elixir_compiler_file, file)
:erlang.process_flag(:error_handler, Kernel.ErrorHandler)
exit(try do
_ = if output do
:elixir_compiler.file_to_path(file, output)
else
:elixir_compiler.file(file, Keyword.get(options, :dest))
end
{:shutdown, file}
catch
kind, reason ->
{:failure, kind, reason, System.stacktrace}
end)
end
timeout = Keyword.get(options, :long_compilation_threshold, 10) * 1_000
timer_ref = Process.send_after(self(), {:timed_out, pid}, timeout)
new_queued = [{pid, ref, file, timer_ref} | queued]
spawn_compilers(%{state | entries: files, queued: new_queued})
end
# No more files, nothing waiting, queue is empty, we are done
defp spawn_compilers(%{entries: [], waiting: [], queued: [], result: result}) do
for {:module, mod} <- result, do: mod
end
# Queued x, waiting for x: POSSIBLE ERROR! Release processes so we get the failures
defp spawn_compilers(%{entries: [], waiting: waiting, queued: queued} = state) when length(waiting) == length(queued) do
entries = for {pid, _, _, _} <- queued,
entry = waiting_on_without_definition(waiting, pid),
{_, _, ref, on, _} = entry,
do: {on, {ref, :not_found}}
# Instead of releasing all files at once, we release them in groups
# based on the module they are waiting on. We pick the module being
# depended on with less edges, as it is the mostly likely source of
# error (for example, someone made a typo). This may not always be
# true though: for example, if there is a macro injecting code into
# multiple modules and such code becomes faulty, now multiple modules
# are waiting on the same module required by the faulty code. However,
# since we need to pick something to be first, the one with fewer edges
# sounds like a sane choice.
entries =
entries
|> Enum.group_by(&elem(&1, 0), &elem(&1, 1))
|> Enum.sort_by(&length(elem(&1, 1)))
|> Enum.find_value([], &elem(&1, 1))
case entries do
[] -> handle_deadlock(waiting, queued)
_ -> spawn_compilers(%{state | entries: entries})
end
end
# No more files, but queue and waiting are not full or do not match
defp spawn_compilers(%{entries: []} = state) do
wait_for_messages(state)
end
defp waiting_on_without_definition(waiting, pid) do
{_, ^pid, _, on, _} = entry = List.keyfind(waiting, pid, 1)
if Enum.any?(waiting, fn {_, _, _, _, defining} -> on in defining end) do
nil
else
entry
end
end
# Wait for messages from child processes
defp wait_for_messages(state) do
%{entries: entries, options: options, waiting: waiting, queued: queued, result: result} = state
receive do
{:struct_available, module} ->
available = for {:struct, _, ref, waiting_module, _defining} <- waiting,
module == waiting_module,
do: {ref, :found}
spawn_compilers(%{state | entries: available ++ entries, result: [{:struct, module} | result]})
{:module_available, child, ref, file, module, binary} ->
if callback = Keyword.get(options, :each_module) do
callback.(file, module, binary)
end
# Release the module loader which is waiting for an ack
send child, {ref, :ack}
available = for {:module, _, ref, waiting_module, _defining} <- waiting,
module == waiting_module,
do: {ref, :found}
cancel_waiting_timer(queued, child)
spawn_compilers(%{state | entries: available ++ entries, result: [{:module, module} | result]})
{:waiting, kind, child, ref, on, defining} ->
# Oops, we already got it, do not put it on waiting.
# OR
# We're waiting on ourselves, send :found so that we can crash with a better error
waiting =
if :lists.any(&match?({^kind, ^on}, &1), result) or on in defining do
send child, {ref, :found}
waiting
else
[{kind, child, ref, on, defining} | waiting]
end
spawn_compilers(%{state | waiting: waiting})
{:timed_out, child} ->
callback = Keyword.get(options, :each_long_compilation)
case List.keyfind(queued, child, 0) do
{^child, _, file, _} when not is_nil(callback) ->
callback.(file)
_ ->
:ok
end
spawn_compilers(state)
{:DOWN, _down_ref, :process, down_pid, {:shutdown, file}} ->
if callback = Keyword.get(options, :each_file) do
callback.(file)
end
cancel_waiting_timer(queued, down_pid)
# Sometimes we may have spurious entries in the waiting
# list because someone invoked try/rescue UndefinedFunctionError
new_entries = List.delete(entries, down_pid)
new_queued = List.keydelete(queued, down_pid, 0)
new_waiting = List.keydelete(waiting, down_pid, 1)
spawn_compilers(%{state | entries: new_entries, waiting: new_waiting, queued: new_queued})
{:DOWN, down_ref, :process, _down_pid, reason} ->
handle_failure(down_ref, reason, queued)
wait_for_messages(state)
end
end
defp handle_deadlock(waiting, queued) do
deadlock =
for {pid, _, file, _} <- queued do
{:current_stacktrace, stacktrace} = Process.info(pid, :current_stacktrace)
Process.exit(pid, :kill)
{_kind, ^pid, _, on, _} = List.keyfind(waiting, pid, 1)
error = CompileError.exception(description: "deadlocked waiting on module #{inspect on}",
file: nil, line: nil)
print_failure(file, {:failure, :error, error, stacktrace})
{file, on}
end
IO.puts """
Compilation failed because of a deadlock between files.
The following files depended on the following modules:
"""
max =
deadlock
|> Enum.map(& &1 |> elem(0) |> String.length)
|> Enum.max
for {file, mod} <- deadlock do
IO.puts [" ", String.pad_leading(file, max), " => " | inspect(mod)]
end
IO.puts ""
exit({:shutdown, 1})
end
defp handle_failure(ref, reason, queued) do
if file = find_failure(ref, queued) do
print_failure(file, reason)
for {pid, _, _, _} <- queued do
Process.exit(pid, :kill)
end
exit({:shutdown, 1})
end
end
defp find_failure(ref, queued) do
case List.keyfind(queued, ref, 1) do
{_child, ^ref, file, _timer_ref} -> file
_ -> nil
end
end
defp print_failure(_file, {:shutdown, _}) do
:ok
end
defp print_failure(file, {:failure, kind, reason, stacktrace}) do
IO.puts "\n== Compilation error on file #{Path.relative_to_cwd(file)} =="
IO.puts Exception.format(kind, reason, prune_stacktrace(stacktrace))
end
defp print_failure(file, reason) do
IO.puts "\n== Compilation error on file #{Path.relative_to_cwd(file)} =="
IO.puts Exception.format(:exit, reason, [])
end
@elixir_internals [:elixir, :elixir_exp, :elixir_compiler, :elixir_module, :elixir_clauses,
:elixir_translator, :elixir_expand, :elixir_lexical, :elixir_exp_clauses,
:elixir_def, :elixir_map, Kernel.ErrorHandler]
defp prune_stacktrace([{mod, _, _, _} | t]) when mod in @elixir_internals do
prune_stacktrace(t)
end
defp prune_stacktrace([h | t]) do
[h | prune_stacktrace(t)]
end
defp prune_stacktrace([]) do
[]
end
defp cancel_waiting_timer(queued, child_pid) do
case List.keyfind(queued, child_pid, 0) do
{^child_pid, _ref, _file, timer_ref} ->
Process.cancel_timer(timer_ref)
# Let's flush the message in case it arrived before we canceled the
# timeout.
receive do
{:timed_out, ^child_pid} -> :ok
after
0 -> :ok
end
nil ->
:ok
end
end
end
|
lib/elixir/lib/kernel/parallel_compiler.ex
| 0.794106
| 0.477189
|
parallel_compiler.ex
|
starcoder
|
defmodule FakeServer.Response do
@moduledoc """
Response structure and helpers.
FakeServer makes use of the `%FakeServer.Response{}` structure to define the responses that will be given by the server.
## Structure Fields
- `:status`: The status code of the response. It must be an integer.
- `:body`: Optional. The response body. Can be a string or a map. If the body is a map, it will be encoded so the map must be equivalent to a valid JSON.
- `:headers`: Optional. The response headers. Must be a map with the string keys.
You can use the `new/3` function to create a response. Since this function performs several validations, you should avoid to create the structure directly.
"""
@enforce_keys [:status]
defstruct [status: nil, body: "", headers: %{}]
@doc """
Creates a new Response structure. Returns `{:ok, response}` on success or `{:error, reason}` when validation fails
## Example
```elixir
iex> FakeServer.Response.new(200, %{name: "<NAME>", email: "<EMAIL>"}, %{"Content-Type" => "application/json"})
iex> FakeServer.Response.new(200, ~s<{"name":"<NAME>","email":"<EMAIL>"}>, %{"Content-Type" => "application/json"})
iex> FakeServer.Response.new(201, ~s<{"name":"<NAME>","email":"<EMAIL>"}>)
iex> FakeServer.Response.new(404)
```
"""
def new(status_code, body \\ "", headers \\ %{}) do
with response <- %__MODULE__{status: status_code, body: body, headers: headers},
:ok <- validate(response),
{:ok, response} <- ensure_body_format(response),
{:ok, response} <- ensure_headers_keys(response)
do
{:ok, response}
end
end
@doc """
Similar to `new/3`, but raises `FakeServer.Error` when validation fails.
"""
def new!(status_code, body \\ "", headers \\ %{}) do
case new(status_code, body, headers) do
{:ok, response} -> response
{:error, reason} -> raise FakeServer.Error, reason
end
end
@doc false
def validate({:ok, %__MODULE__{} = response}), do: validate(response)
def validate(%__MODULE__{body: body, status: status, headers: headers}) do
cond do
not is_map(headers) -> {:error, {headers, "response headers must be a map"}}
not (is_bitstring(body) or is_map(body)) -> {:error, {body, "body must be a map or a string"}}
not Enum.member?(allowed_status_codes(), status) -> {:error, {status, "invalid status code"}}
true -> :ok
end
end
def validate(response), do: {:error, {response, "invalid response type"}}
@doc """
Creates a new response with status 200
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def ok(body \\ "", headers \\ %{}), do: new(200, body, headers)
@doc """
Creates a new response with status 200 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def ok!(body \\ "", headers \\ %{}), do: new!(200, body, headers)
@doc """
Creates a new response with status 201
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def created(body \\ "", headers \\ %{}), do: new(201, body, headers)
@doc """
Creates a new response with status 201 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def created!(body \\ "", headers \\ %{}), do: new!(201, body, headers)
@doc """
Creates a new response with status 202
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def accepted(body \\ "", headers \\ %{}), do: new(202, body, headers)
@doc """
Creates a new response with status 202 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def accepted!(body \\ "", headers \\ %{}), do: new!(202, body, headers)
@doc """
Creates a new response with status 203.
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def non_authoritative_information(body \\ "", headers \\ %{}), do: new(203, body, headers)
@doc """
Creates a new response with status 203 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def non_authoritative_information!(body \\ "", headers \\ %{}), do: new!(203, body, headers)
@doc """
Creates a new response with status 204
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def no_content(body \\ "", headers \\ %{}), do: new(204, body, headers)
@doc """
Creates a new response with status 204 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def no_content!(body \\ "", headers \\ %{}), do: new!(204, body, headers)
@doc """
Creates a new response with status 205
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def reset_content(body \\ "", headers \\ %{}), do: new(205, body, headers)
@doc """
Creates a new response with status 205 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def reset_content!(body \\ "", headers \\ %{}), do: new!(205, body, headers)
@doc """
Creates a new response with status 206
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def partial_content(body \\ "", headers \\ %{}), do: new(206, body, headers)
@doc """
Creates a new response with status 206 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def partial_content!(body \\ "", headers \\ %{}), do: new!(206, body, headers)
@doc """
Returns a list with all 4xx HTTP methods available
"""
def all_4xx do
[
bad_request!(),
unauthorized!(),
forbidden!(),
not_found!(),
method_not_allowed!(),
not_acceptable!(),
proxy_authentication_required!(),
request_timeout!(),
conflict!(),
gone!(),
length_required!(),
precondition_failed!(),
payload_too_large!(),
uri_too_long!(),
unsupported_media_type!(),
expectation_failed!(),
im_a_teapot!(),
unprocessable_entity!(),
locked!(),
failed_dependency!(),
upgrade_required!(),
precondition_required!(),
too_many_requests!(),
request_header_fields_too_large!()
]
end
@doc """
Creates a new response with status 400
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def bad_request(body \\ "", headers \\ %{}), do: new(400, body, headers)
@doc """
Creates a new response with status 400 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def bad_request!(body \\ "", headers \\ %{}), do: new!(400, body, headers)
@doc """
Creates a new response with status 401
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def unauthorized(body \\ "", headers \\ %{}), do: new(401, body, headers)
@doc """
Creates a new response with status 401 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def unauthorized!(body \\ "", headers \\ %{}), do: new!(401, body, headers)
@doc """
Creates a new response with status 403
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def forbidden(body \\ "", headers \\ %{}), do: new(403, body, headers)
@doc """
Creates a new response with status 403 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def forbidden!(body \\ "", headers \\ %{}), do: new!(403, body, headers)
@doc """
Creates a new response with status 404
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def not_found(body \\ "", headers \\ %{}), do: new(404, body, headers)
@doc """
Creates a new response with status 404 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def not_found!(body \\ "", headers \\ %{}), do: new!(404, body, headers)
@doc """
Creates a new response with status 405
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def method_not_allowed(body \\ "", headers \\ %{}), do: new(405, body, headers)
@doc """
Creates a new response with status 405 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def method_not_allowed!(body \\ "", headers \\ %{}), do: new!(405, body, headers)
@doc """
Creates a new response with status 406
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def not_acceptable(body \\ "", headers \\ %{}), do: new(406, body, headers)
@doc """
Creates a new response with status 406 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def not_acceptable!(body \\ "", headers \\ %{}), do: new!(406, body, headers)
@doc """
Creates a new response with status 407
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def proxy_authentication_required(body \\ "", headers \\ %{}), do: new(407, body, headers)
@doc """
Creates a new response with status 407 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def proxy_authentication_required!(body \\ "", headers \\ %{}), do: new!(407, body, headers)
@doc """
Creates a new response with status 408
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def request_timeout(body \\ "", headers \\ %{}), do: new(408, body, headers)
@doc """
Creates a new response with status 408 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def request_timeout!(body \\ "", headers \\ %{}), do: new!(408, body, headers)
@doc """
Creates a new response with status 409
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def conflict(body \\ "", headers \\ %{}), do: new(409, body, headers)
@doc """
Creates a new response with status 409 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def conflict!(body \\ "", headers \\ %{}), do: new!(409, body, headers)
@doc """
Creates a new response with status 410
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def gone(body \\ "", headers \\ %{}), do: new(410, body, headers)
@doc """
Creates a new response with status 410 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def gone!(body \\ "", headers \\ %{}), do: new!(410, body, headers)
@doc """
Creates a new response with status 411
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def length_required(body \\ "", headers \\ %{}), do: new(411, body, headers)
@doc """
Creates a new response with status 411 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def length_required!(body \\ "", headers \\ %{}), do: new!(411, body, headers)
@doc """
Creates a new response with status 412
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def precondition_failed(body \\ "", headers \\ %{}), do: new(412, body, headers)
@doc """
Creates a new response with status 412 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def precondition_failed!(body \\ "", headers \\ %{}), do: new!(412, body, headers)
@doc """
Creates a new response with status 413
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def payload_too_large(body \\ "", headers \\ %{}), do: new(413, body, headers)
@doc """
Creates a new response with status 413 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def payload_too_large!(body \\ "", headers \\ %{}), do: new!(413, body, headers)
@doc """
Creates a new response with status 414
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def uri_too_long(body \\ "", headers \\ %{}), do: new(414, body, headers)
@doc """
Creates a new response with status 414 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def uri_too_long!(body \\ "", headers \\ %{}), do: new!(414, body, headers)
@doc """
Creates a new response with status 415
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def unsupported_media_type(body \\ "", headers \\ %{}), do: new(415, body, headers)
@doc """
Creates a new response with status 415 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def unsupported_media_type!(body \\ "", headers \\ %{}), do: new!(415, body, headers)
@doc """
Creates a new response with status 417
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def expectation_failed(body \\ "", headers \\ %{}), do: new(417, body, headers)
@doc """
Creates a new response with status 417 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def expectation_failed!(body \\ "", headers \\ %{}), do: new!(417, body, headers)
@doc """
Creates a new response with status 418
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def im_a_teapot(body \\ "", headers \\ %{}), do: new(418, body, headers)
@doc """
Creates a new response with status 418 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def im_a_teapot!(body \\ "", headers \\ %{}), do: new!(418, body, headers)
@doc """
Creates a new response with status 422
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def unprocessable_entity(body \\ "", headers \\ %{}), do: new(422, body, headers)
@doc """
Creates a new response with status 422 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def unprocessable_entity!(body \\ "", headers \\ %{}), do: new!(422, body, headers)
@doc """
Creates a new response with status 423
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def locked(body \\ "", headers \\ %{}), do: new(423, body, headers)
@doc """
Creates a new response with status 423 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def locked!(body \\ "", headers \\ %{}), do: new!(423, body, headers)
@doc """
Creates a new response with status 424
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def failed_dependency(body \\ "", headers \\ %{}), do: new(424, body, headers)
@doc """
Creates a new response with status 424 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def failed_dependency!(body \\ "", headers \\ %{}), do: new!(424, body, headers)
@doc """
Creates a new response with status 426
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def upgrade_required(body \\ "", headers \\ %{}), do: new(426, body, headers)
@doc """
Creates a new response with status 426 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def upgrade_required!(body \\ "", headers \\ %{}), do: new!(426, body, headers)
@doc """
Creates a new response with status 428
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def precondition_required(body \\ "", headers \\ %{}), do: new(428, body, headers)
@doc """
Creates a new response with status 428 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def precondition_required!(body \\ "", headers \\ %{}), do: new!(428, body, headers)
@doc """
Creates a new response with status 429
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def too_many_requests(body \\ "", headers \\ %{}), do: new(429, body, headers)
@doc """
Creates a new response with status 429 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def too_many_requests!(body \\ "", headers \\ %{}), do: new!(429, body, headers)
@doc """
Creates a new response with status 431
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def request_header_fields_too_large(body \\ "", headers \\ %{}), do: new(431, body, headers)
@doc """
Creates a new response with status 431 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def request_header_fields_too_large!(body \\ "", headers \\ %{}), do: new!(431, body, headers)
@doc """
Returns a list with all 5xx HTTP methods available.
"""
def all_5xx do
[
internal_server_error!(),
not_implemented!(),
bad_gateway!(),
service_unavailable!(),
gateway_timeout!(),
http_version_not_supported!(),
variant_also_negotiates!(),
insufficient_storage!(),
not_extended!(),
network_authentication_required!()
]
end
@doc """
Creates a new response with status 500
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def internal_server_error(body \\ "", headers \\ %{}), do: new(500, body, headers)
@doc """
Creates a new response with status 500 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def internal_server_error!(body \\ "", headers \\ %{}), do: new!(500, body, headers)
@doc """
Creates a new response with status 501
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def not_implemented(body \\ "", headers \\ %{}), do: new(501, body, headers)
@doc """
Creates a new response with status 501 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def not_implemented!(body \\ "", headers \\ %{}), do: new!(501, body, headers)
@doc """
Creates a new response with status 502
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def bad_gateway(body \\ "", headers \\ %{}), do: new(502, body, headers)
@doc """
Creates a new response with status 502 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def bad_gateway!(body \\ "", headers \\ %{}), do: new!(502, body, headers)
@doc """
Creates a new response with status 503
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def service_unavailable(body \\ "", headers \\ %{}), do: new(503, body, headers)
@doc """
Creates a new response with status 503 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def service_unavailable!(body \\ "", headers \\ %{}), do: new!(503, body, headers)
@doc """
Creates a new response with status 504
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def gateway_timeout(body \\ "", headers \\ %{}), do: new(504, body, headers)
@doc """
Creates a new response with status 504 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def gateway_timeout!(body \\ "", headers \\ %{}), do: new!(504, body, headers)
@doc """
Creates a new response with status 505
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def http_version_not_supported(body \\ "", headers \\ %{}), do: new(505, body, headers)
@doc """
Creates a new response with status 505 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def http_version_not_supported!(body \\ "", headers \\ %{}), do: new!(505, body, headers)
@doc """
Creates a new response with status 506
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def variant_also_negotiates(body \\ "", headers \\ %{}), do: new(506, body, headers)
@doc """
Creates a new response with status 506 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def variant_also_negotiates!(body \\ "", headers \\ %{}), do: new!(506, body, headers)
@doc """
Creates a new response with status 507
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def insufficient_storage(body \\ "", headers \\ %{}), do: new(507, body, headers)
@doc """
Creates a new response with status 507 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def insufficient_storage!(body \\ "", headers \\ %{}), do: new!(507, body, headers)
@doc """
Creates a new response with status 510
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def not_extended(body \\ "", headers \\ %{}), do: new(510, body, headers)
@doc """
Creates a new response with status 510 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def not_extended!(body \\ "", headers \\ %{}), do: new!(510, body, headers)
@doc """
Creates a new response with status 511
Returns `{:ok, response}` tuple on succes and `{:error, reason}` when validation fails.
"""
def network_authentication_required(body \\ "", headers \\ %{}), do: new(511, body, headers)
@doc """
Creates a new response with status 511 and returns it.
Raises `FakeServer.Error` if the validation fails.
"""
def network_authentication_required!(body \\ "", headers \\ %{}), do: new!(511, body, headers)
@doc """
FakeServer default response. Used when there are no responses left to reply.
```
iex> FakeServer.Response.default()
{:ok,
%FakeServer.Response{
body: "{\"message\": \"This is a default response from FakeServer\"}",
headers: %{},
status: 200
}
}
```
"""
def default, do: new(200, ~s<{"message": "This is a default response from FakeServer"}>)
@doc """
Similar to `default/0`.
"""
def default!, do: new!(200, ~s<{"message": "This is a default response from FakeServer"}>)
defp allowed_status_codes() do
[
100, 101, 102, 103, 200, 201, 202,
203, 204, 205, 206, 300, 301, 302,
303, 304, 305, 306, 307, 308, 400,
401, 403, 404, 405, 406, 407, 408,
409, 410, 411, 412, 413, 414, 415,
417, 418, 422, 423, 424, 426, 428,
429, 431, 500, 501, 502, 503, 504,
505, 506, 507, 510, 511
]
end
defp ensure_body_format(%__MODULE__{body: body} = response) when is_bitstring(body), do: {:ok, response}
defp ensure_body_format(%__MODULE__{body: body} = response) when is_map(body) do
case Poison.encode(body) do
{:ok, body} -> {:ok, %__MODULE__{response | body: body}}
{:error, _} -> {:error, {body, "could not turn body map into json"}}
end
end
defp ensure_headers_keys(%__MODULE__{headers: headers} = response) do
valid? = headers
|> Map.keys()
|> Enum.all?(&(is_bitstring(&1)))
if valid?, do: {:ok, response}, else: {:error, {headers, "all header keys must be strings"}}
end
end
|
lib/fake_server/response.ex
| 0.923407
| 0.71716
|
response.ex
|
starcoder
|
defmodule Microdata.Error do
@errors %{
document: [:no_items]
}
@moduledoc """
`Microdata.Error` provides a generic error struct implementing the `Exception` behaviour and containing three keys: `type`, `reason`, and `metadata`.
- `type` is an atom classifying the general context the error exists in, such as `:document`.
- `reason` is an atom classifying the general problem, such as `:no_items`.
- `metadata` is a map containing any additional information useful for debugging the error, such as `%{input: "..."}`.
### Microdata Errors:
#{
@errors
|> Enum.flat_map(fn {type, reasons} ->
Enum.map(reasons, fn reason ->
"- `%Microdata.Error{type: #{inspect(type)}, reason: #{inspect(reason)}}`"
end)
end)
|> Enum.join("\n")
}
"""
@enforce_keys [:type, :reason]
defexception type: nil, reason: nil, metadata: %{}
@type type :: atom
@type reason :: atom
@type metadata :: %{any => any}
@type t :: %__MODULE__{
type: type,
reason: reason,
metadata: metadata
}
@doc """
Lists a mapping of error types to reasons for all possible Microdata errors.
"""
@spec list_errors() :: %{type => [reason]}
def list_errors(), do: @errors
@doc """
Creates a new `%Microdata.Error{}`.
"""
@spec new(type, reason, metadata) :: t
def new(type, reason, metadata \\ %{}) do
%__MODULE__{type: type, reason: reason, metadata: metadata}
end
# Exception callbacks
@impl true
def exception(%{type: type, reason: reason} = info) do
metadata = Map.get(info, :metadata, %{})
new(type, reason, metadata)
end
@impl true
def message(%__MODULE__{type: type, reason: reason, metadata: metadata}) do
IO.iodata_to_binary([
"\n",
"\n Type: #{inspect(type)}",
"\n",
"\n Reason: #{inspect(reason)}",
render_metadata(metadata)
])
end
# Helpers
defp render_metadata(%{} = metadata) do
rendered = for {k, v} <- metadata, do: "\n #{k}: #{inspect(v)}"
case rendered do
[] -> []
_ -> ["\n\n Metadata:" | rendered]
end
end
end
|
lib/microdata/error.ex
| 0.900096
| 0.590986
|
error.ex
|
starcoder
|
defmodule AWS.Cloud9 do
@moduledoc """
AWS Cloud9
AWS Cloud9 is a collection of tools that you can use to code, build, run,
test, debug, and release software in the cloud.
For more information about AWS Cloud9, see the [AWS Cloud9 User
Guide](https://docs.aws.amazon.com/cloud9/latest/user-guide).
AWS Cloud9 supports these operations:
<ul> <li> `CreateEnvironmentEC2`: Creates an AWS Cloud9 development
environment, launches an Amazon EC2 instance, and then connects from the
instance to the environment.
</li> <li> `CreateEnvironmentMembership`: Adds an environment member to an
environment.
</li> <li> `DeleteEnvironment`: Deletes an environment. If an Amazon EC2
instance is connected to the environment, also terminates the instance.
</li> <li> `DeleteEnvironmentMembership`: Deletes an environment member
from an environment.
</li> <li> `DescribeEnvironmentMemberships`: Gets information about
environment members for an environment.
</li> <li> `DescribeEnvironments`: Gets information about environments.
</li> <li> `DescribeEnvironmentStatus`: Gets status information for an
environment.
</li> <li> `ListEnvironments`: Gets a list of environment identifiers.
</li> <li> `ListTagsForResource`: Gets the tags for an environment.
</li> <li> `TagResource`: Adds tags to an environment.
</li> <li> `UntagResource`: Removes tags from an environment.
</li> <li> `UpdateEnvironment`: Changes the settings of an existing
environment.
</li> <li> `UpdateEnvironmentMembership`: Changes the settings of an
existing environment member for an environment.
</li> </ul>
"""
@doc """
Creates an AWS Cloud9 development environment, launches an Amazon Elastic
Compute Cloud (Amazon EC2) instance, and then connects from the instance to
the environment.
"""
def create_environment_e_c2(client, input, options \\ []) do
request(client, "CreateEnvironmentEC2", input, options)
end
@doc """
Adds an environment member to an AWS Cloud9 development environment.
"""
def create_environment_membership(client, input, options \\ []) do
request(client, "CreateEnvironmentMembership", input, options)
end
@doc """
Deletes an AWS Cloud9 development environment. If an Amazon EC2 instance is
connected to the environment, also terminates the instance.
"""
def delete_environment(client, input, options \\ []) do
request(client, "DeleteEnvironment", input, options)
end
@doc """
Deletes an environment member from an AWS Cloud9 development environment.
"""
def delete_environment_membership(client, input, options \\ []) do
request(client, "DeleteEnvironmentMembership", input, options)
end
@doc """
Gets information about environment members for an AWS Cloud9 development
environment.
"""
def describe_environment_memberships(client, input, options \\ []) do
request(client, "DescribeEnvironmentMemberships", input, options)
end
@doc """
Gets status information for an AWS Cloud9 development environment.
"""
def describe_environment_status(client, input, options \\ []) do
request(client, "DescribeEnvironmentStatus", input, options)
end
@doc """
Gets information about AWS Cloud9 development environments.
"""
def describe_environments(client, input, options \\ []) do
request(client, "DescribeEnvironments", input, options)
end
@doc """
Gets a list of AWS Cloud9 development environment identifiers.
"""
def list_environments(client, input, options \\ []) do
request(client, "ListEnvironments", input, options)
end
@doc """
Gets a list of the tags associated with an AWS Cloud9 development
environment.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Adds tags to an AWS Cloud9 development environment.
<important> Tags that you add to an AWS Cloud9 environment by using this
method will NOT be automatically propagated to underlying resources.
</important>
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes tags from an AWS Cloud9 development environment.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Changes the settings of an existing AWS Cloud9 development environment.
"""
def update_environment(client, input, options \\ []) do
request(client, "UpdateEnvironment", input, options)
end
@doc """
Changes the settings of an existing environment member for an AWS Cloud9
development environment.
"""
def update_environment_membership(client, input, options \\ []) do
request(client, "UpdateEnvironmentMembership", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "cloud9"}
host = build_host("cloud9", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSCloud9WorkspaceManagementService.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/cloud9.ex
| 0.832305
| 0.626453
|
cloud9.ex
|
starcoder
|
defmodule Soap do
@moduledoc """
The SOAP client for Elixir based on `HTTPoison` (for send requests) and `SweetXml` (for XML parsing).
Soap contains 5 main modules:
* `Soap.Wsdl` - Build wsdl components data map. Can parse raw wsdl file
from external url or local path. Wsdl which is prepared this module are
using for send requests.
* `Soap.Request` - Provides functionality for build and calling requests.
Contains Request.Headers and Soap.Params submodules for build headers and
build body with parameters validation respectively. This module is a
wrapper over HTTPoison. It send requests and handle them.
* `Soap.Response` - Handle soap response and handle them. It provides
functionality for parsing xml-like body and transform it to comfortable
structure. Structure for this module returns with necessary data after send
a request.
* `Soap.Xsd` - This module have same functionality as Soap.Wsdl module, but
only for Xsd-files. It allows to parse xsd files from external resources or
local path and convert it to map.
* `Soap.Type` - Provides a functionality for find and parse complex types
from raw xsd file. It uses in library for validation parameters when we
build request body.
The `Soap` module can be used to parse WSDL files:
```elixir
iex> Soap.init_model("https://git.io/vNCWd", :url)
{:ok, %{
complex_types: [...],
endpoint: "...",
messages: [...],
namespaces: %{...},
operations: [...],
schema_attributes: %{...},
soap_version: "x.x",
validation_types: %{...}
}
}
```
And send requests:
```elixir
iex> Soap.call(wsdl, action, params)
{:ok, %Soap.Response{}}
```
It's very common to use Soap in order to wrap APIs.
See `call/5` for more details on how to issue requests to soap services
"""
alias Soap.{Request, Response, Wsdl}
@doc """
Initialization of a WSDL model. Response a map of parsed data from file.
Returns `{:ok, wsdl}`.
## Parameters
- `path`: Path for wsdl file.
- `type`: Atom that represents the type of path for WSDL file. Can be `:file`
or `url`. Default: `:file`.
- `endpoint`: Endpoint to be used for the request. Defaults to the endpoint
specified in the WSDL file. Useful for (e.g.) sending a request to a mock
server during testing.
- `opts`: any options for `HTTPoison.Request` and the following parsing options:
* `:soap_version` - Specifies SOAP version for parsing.
* `:allow_empty_soap_actions` - Allows SOAP operations with an empty
`soapAction` attribute. This may be required for APIs that do not set a
`soapAction` for each operation.
* `:skip_type_imports` - Prevents fetching external XSDs for importing types.
## Examples
iex> {:ok, wsdl} = Soap.init_model("https://git.io/vNCWd", :url)
{:ok, %{...}}
"""
@spec init_model(String.t(), :file | :url, list()) :: {:ok, map()}
def init_model(path, type \\ :file, opts \\ [])
def init_model(path, :file, opts), do: Wsdl.parse_from_file(path, opts)
def init_model(path, :url, opts), do: Wsdl.parse_from_url(path, opts)
@doc """
Send a request to the SOAP server based on the passed WSDL file, action and parameters.
Returns `{:ok, %Soap.Response{}}` if the request is successful, `{:error, reason}` otherwise.
## Parameters
- `wsdl`: Wsdl model from `Soap.init_model/2` function.
- `action`: Soap action to be called. Use `Soap.operations/1` to get a list of available actions
- `params`: Parameters to build the body of a SOAP request.
- `headers`: Custom request headers.
- `opts`: HTTPoison options.
## Examples
iex> Soap.call(wsdl, action, params)
{:ok, %Soap.Response{}}
"""
@spec call(wsdl :: map(), operation :: String.t(), params :: map(), headers :: any(), opts :: any()) :: any()
def call(wsdl, operation, params, headers \\ [], opts \\ []) do
wsdl
|> validate_operation(operation)
|> Request.call(operation, params, headers, opts)
|> handle_response
end
@doc """
Returns a list of available actions of the passed WSDL.
## Parameters
- `wsdl`: Wsdl model from `Soap.init_model/2` function.
## Examples
iex> {:ok, wsdl} = Soap.init_model("https://git.io/vNCWd", :url)
iex> Soap.operations(wsdl)
["SendMessage", "SendMessageMultipleRecipients"]
"""
@spec operations(map()) :: nonempty_list(String.t())
def operations(wsdl) do
wsdl.operations
end
defp handle_response(
{:ok, %HTTPoison.Response{body: body, headers: headers, request_url: request_url, status_code: status_code}}
) do
{:ok, %Response{body: body, headers: headers, request_url: request_url, status_code: status_code}}
end
defp handle_response({:error, %HTTPoison.Error{reason: reason}}) do
{:error, reason}
end
defp validate_operation(wsdl, operation) do
case valid_operation?(wsdl, operation) do
false -> raise OperationError, operation
true -> wsdl
end
end
defp valid_operation?(wsdl, operation) do
Enum.any?(wsdl[:operations], &(&1[:name] == operation))
end
end
|
lib/soap.ex
| 0.895947
| 0.837686
|
soap.ex
|
starcoder
|
defmodule Kazan.Codegen.Apis do
@moduledoc false
# Macros for generating API clients from OAI specs.
import Kazan.Swagger, only: [swagger_to_op_map: 1]
alias Kazan.Codegen.Apis.{ApiId, Operation, Parameter}
require EEx
@doc """
Generates API client modules for all the operations defined in an OAPI spec.
This reads the provided file at compile time and uses it to generate functions
and their corresponding models.
The modules will be defined by the tags for each of the operations, and the
functions will be named using a camel case version of the operationId.
Currently the operationId has some tag-related data embedded in it, which we
remove for the sake of brevity.
"""
defmacro from_spec(spec_file) do
operations =
File.read!(spec_file)
|> Poison.decode!()
|> swagger_to_op_map
|> Map.values()
|> Enum.flat_map(&duplicate_on_tags/1)
# Create the atoms for all the names of our operations.
:ok =
operations
|> Enum.map(fn op_desc -> op_desc["tag"] end)
|> Enum.uniq()
|> Enum.map(&ApiId.from_oai_tag/1)
|> Enum.each(&api_module(&1, unsafe: true))
operations = Enum.map(operations, &Operation.from_oai_desc/1)
api_groups = Enum.group_by(operations, fn op -> op.api_id end)
module_forms =
for {api_id, functions} <- api_groups do
function_forms = Enum.map(functions, &function_form/1)
module_doc = module_doc(api_id)
module_name = api_module(api_id)
quote do
defmodule unquote(module_name) do
@moduledoc unquote(module_doc)
unquote_splicing(function_forms)
end
end
end
quote do
@external_resource unquote(spec_file)
unquote_splicing(module_forms)
defp operation_descs do
unquote(Macro.escape(operations))
end
end
end
@doc """
Builds an api module name from an ApiId.
"""
@spec api_module(ApiId.t(), Keyword.t()) :: atom
def api_module(api_id, opts \\ []) do
components =
if api_id.version != nil do
[Kazan.Apis, api_id.group, api_id.version]
else
[Kazan.Apis, api_id.group]
end
if Keyword.get(opts, :unsafe, false) do
Module.concat(components)
else
try do
Module.safe_concat(components)
rescue
ArgumentError ->
nil
end
end
end
@doc """
Builds a function name from the operationId of an OAI operation.
We take the operation tag in here too, because the Kube OAI operations use IDs
like listCoreV1ConfigMapForAllNamespaces where the operation is on the core_v1
API. We don't want to have such large function names, so we try to strip the
API name out.
"""
@spec function_name(String.t(), String.t() | atom) :: atom
def function_name(operation_id, tag, opts \\ []) when is_binary(tag) do
string_name =
operation_id
|> String.replace(Macro.camelize(tag), "")
|> Macro.underscore()
if Keyword.get(opts, :unsafe, false) do
String.to_atom(string_name)
else
try do
String.to_existing_atom(string_name)
rescue
ArgumentError ->
nil
end
end
end
# Swagger tags are a list. There _appears_ to only be one tag per operation,
# but there could be more. We handle that by duplicating on tags.
# Once this function is finished, we will have a bunch of operations with a
# single tag.
@spec duplicate_on_tags(Map.t()) :: [Map.t()]
defp duplicate_on_tags(operation) do
for tag <- operation["tags"] do
operation |> Map.put("tag", tag) |> Map.delete("tags")
end
end
# Builds the quoted function form for an operation function.
@spec function_form(Operation.t()) :: term
defp function_form(operation) do
param_groups =
Enum.group_by(operation.parameters, fn param -> param.type end)
is_required = fn param -> param.required end
query_params = Map.get(param_groups, :query, [])
path_params =
param_groups |> Map.get(:path, []) |> sort_path_params(operation.path)
# The main arguments our function will take:
argument_params =
Map.get(param_groups, :body, []) ++
path_params ++ Enum.filter(query_params, is_required)
optional_params = Enum.reject(query_params, is_required)
arguments = argument_forms(argument_params, optional_params)
docs =
function_docs(
operation.operation_id,
operation.description,
argument_params,
optional_params,
operation.response_schema
)
param_unpacking =
if Enum.empty?(argument_params) do
quote do
%{}
end
else
argument_map_pairs =
for arg <- argument_params do
{arg.var_name, Macro.var(arg.var_name, __MODULE__)}
end
quote location: :keep do
%{unquote_splicing(argument_map_pairs)}
end
end
option_merging =
cond do
Enum.empty?(optional_params) ->
quote do
params
end
Enum.empty?(argument_params) ->
quote location: :keep do
Enum.into(options, %{})
end
:otherwise ->
quote location: :keep do
Map.merge(Enum.into(options, %{}), params)
end
end
transform_map =
for parameter <- operation.parameters, into: %{} do
{parameter.var_name, parameter.field_name}
end
bang_function_name =
String.to_atom(Atom.to_string(operation.function_name) <> "!")
argument_forms_in_call =
argument_call_forms(
argument_params,
optional_params
)
quote location: :keep do
@doc unquote(docs)
def unquote(operation.function_name)(unquote_splicing(arguments)) do
params = unquote(param_unpacking)
params = unquote(option_merging)
Kazan.Request.create(
unquote(operation.operation_id),
Kazan.Codegen.Apis.transform_request_parameters(
unquote(Macro.escape(transform_map)),
params
)
)
end
@doc unquote(docs)
def unquote(bang_function_name)(unquote_splicing(arguments)) do
rv =
unquote(operation.function_name)(
unquote_splicing(argument_forms_in_call)
)
case rv do
{:ok, result} ->
result
{:err, reason} ->
raise Kazan.BuildRequestError,
reason: reason,
operation: unquote(operation.function_name)
end
end
end
end
# Transforms a map of function arguments into a map of request parameters.
def transform_request_parameters(parameter_descs, parameters) do
for {k, v} <- parameters, into: %{} do
{parameter_descs[k], v}
end
end
# List of argument forms to go in function argument lists.
@spec argument_forms([Map.t()], [Map.t()]) :: [term]
defp argument_forms(argument_params, []) do
for param <- argument_params do
Macro.var(param.var_name, __MODULE__)
end
end
defp argument_forms(argument_params, _optional_params) do
argument_forms(argument_params, []) ++
[{:\\, [], [Macro.var(:options, __MODULE__), []]}]
end
# List of arugment forms to go in call to function from bang function.
@spec argument_call_forms([Map.t()], [Map.t()]) :: [term]
defp argument_call_forms(argument_params, []) do
for param <- argument_params do
Macro.var(param.var_name, __MODULE__)
end
end
defp argument_call_forms(argument_params, _optional_params) do
argument_forms(argument_params, []) ++ [Macro.var(:options, __MODULE__)]
end
# The Kube API specs provide path parameters in an unintuitive order,
# so we sort them by the order they appear in the request path here.
@spec sort_path_params([Parameter.t()], String.t()) :: [Parameter.t()]
defp sort_path_params(parameters, path) do
Enum.sort(parameters, fn param1, param2 ->
loc1 = str_index("{#{param1.field_name}}", path)
loc2 = str_index("{#{param2.field_name}}", path)
loc1 <= loc2
end)
end
# I can't believe I'm having to implement this myself :/
defp str_index(needle, haystack) do
case String.split(haystack, needle, parts: 2) do
[left, _] -> String.length(left)
[_] -> nil
end
end
EEx.function_from_string(
:defp,
:function_docs,
"""
<%= if description do description end %>
OpenAPI Operation ID: `<%= operation_id %>`
<%= unless Enum.empty?(parameters) do %>
### Parameters
<%= for param <- parameters do %>
* `<%= param.var_name %>` - <%= param.description %><%= if param.schema do %>See `<%= doc_ref(param.schema) %>`. <% end %> <% end %>
<% end %>
<%= unless Enum.empty?(options) do %>
### Options
<%= for option <- options do %>
* `<%= option.var_name %>` - <%= option.description %>
<% end %>
<% end %>
<%= if response_schema do %>
### Response
See `<%= doc_ref(response_schema) %>`
<% end %>
""",
[:operation_id, :description, :parameters, :options, :response_schema]
)
defp module_doc(api_id) do
case api_id do
%{group: group, version: nil} ->
"""
Module for the #{group} API group.
This module contains functions that can be used to query the avaliable
versions of the #{group} API in a k8s server. Each of these functions
will output a `Kazan.Request` suitable for passing to `Kazan.run`.
The submodules of this module provide implementations of each of those
versions.
"""
%{group: group, version: version} ->
"""
Contains functions for #{version} of the #{group} API group.
Each of these functions will output a `Kazan.Request` suitable for passing
to `Kazan.run`.
This module also contains struct submodules that can be sent & received
from this version of the #{group} API.
"""
end
end
# Strips the `Elixir.` prefix from an atom for use in documentation.
# Atoms will not be linked if they include the Elixir. prefix.
defp doc_ref(str) do
str |> Atom.to_string() |> String.replace(~r/^Elixir./, "")
end
end
|
lib/kazan/codegen/apis.ex
| 0.691081
| 0.40589
|
apis.ex
|
starcoder
|
defmodule StaffNotes.Accounts.Team do
@moduledoc """
A team is a collection of users within an organization that have the same permission level.
## Permission Level
There are three permission levels:
* `:owner`
* All permissions of `:write`
* Can invite users to the organization
* Can remove users from the organization
* If there is only one owner, they can delete the organization
* Can create or delete teams
* Can rename teams
* Can add or remove users from teams
* Can view list of users within all teams
* `:write`
* All permissions of `:read`
* Can create members
* Can create identities
* Can merge members
* Can create notes
* Can edit notes they have authored
* `:read`
* Can view notes
* Can view members
* Can view identities
* Can view list of teams within the organization
* Can view list of users within the organization
* Can view list of users within teams to which the user belongs
There can be multiple teams with the same permission level so that an organization can track
users how they wish.
## Original
The original team is special:
* There can be only one original team
* It cannot stop being the original team
* It is created with `owner` permission level
* It cannot be deleted
* It cannot have its permission level changed
* If the original team contains one member, they are not allowed to leave the team
* If the original team contains one member, they are not allowed to leave the organization
This is done so that there will always be at least one organization member that is capable of
administrating the organization.
"""
use Ecto.Schema
import Ecto.Changeset
alias StaffNotes.Accounts
alias StaffNotes.Accounts.Organization
alias StaffNotes.Accounts.PermissionLevel
alias StaffNotes.Accounts.Team
alias StaffNotes.Accounts.User
alias StaffNotes.Ecto.Slug
@type t :: %__MODULE__{}
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
schema "teams" do
field(:name, Slug)
field(:permission, PermissionLevel)
field(:original, :boolean)
belongs_to(:organization, Organization)
many_to_many(:users, User, join_through: "teams_users", on_delete: :delete_all)
timestamps()
end
@doc false
def original_team_attrs, do: %{name: "owners", permission: :owner, original: true}
@doc """
Generates an `Ecto.Changeset` that is applicable to all database operations.
A more specialized changeset will implement further business rules and should be preferred.
"""
@spec changeset(t, %{}) :: Ecto.Changeset.t()
def changeset(%Team{} = team, attrs) do
team
|> cast(attrs, [:name, :permission, :original, :organization_id])
|> validate_required([:name, :permission, :original, :organization_id])
|> foreign_key_constraint(:organization_id)
end
@doc """
Creates an `Ecto.Changeset` suitable for creating a new team.
## Business rules
* Prevents creating an original team if one already exists in the organization
"""
@spec create_team_changeset(t, %{}) :: Ecto.Changeset.t()
def create_team_changeset(attrs \\ %{}, %Organization{} = org) do
%Team{}
|> Map.put(:organization_id, org.id)
|> changeset(attrs)
|> validate_not_creating_second_original_team()
end
@doc """
Creates an `Ecto.Changeset` suitable for deleting a team.
## Business rules
* Prevents deleting an original team
"""
@spec delete_team_changeset(t) :: Ecto.Changeset.t()
def delete_team_changeset(%Team{} = team) do
team
|> changeset(%{})
|> validate_not_deleting_original_team()
end
@doc """
Creates an `Ecto.Changeset` suitable for updating a team.
## Business rules
* Prevents changing the value of the original field
* Prevents changing of an original team's permission level
"""
@spec update_team_changeset(t, %{}) :: Ecto.Changeset.t()
def update_team_changeset(%Team{} = team, attrs \\ %{}) do
team
|> changeset(attrs)
|> validate_original_field_unchanged()
|> validate_original_permission()
|> unique_constraint(:name, name: :teams_organization_id_name_index)
end
defimpl Phoenix.Param do
def to_param(%{name: name}) do
"#{name}"
end
end
defp is_original_team?(changeset) do
org_id = get_field(changeset, :organization_id)
original = Accounts.original_team(org_id)
if original do
team_id = get_field(changeset, :id)
team_id == original.id
else
false
end
end
defp validate_not_creating_second_original_team(changeset) do
original = get_field(changeset, :original)
org_id = get_field(changeset, :organization_id)
if original && Accounts.original_team(org_id) do
add_error(
changeset,
:original,
"An original team already exists in this organization"
)
else
changeset
end
end
defp validate_not_deleting_original_team(changeset) do
if is_original_team?(changeset) do
add_error(changeset, :original, "Cannot delete the original team")
else
changeset
end
end
defp validate_original_field_unchanged(changeset) do
change = get_change(changeset, :original)
do_validate_original_field_unchanged(changeset, is_original_team?(changeset), change)
end
defp do_validate_original_field_unchanged(changeset, true, true), do: changeset
defp do_validate_original_field_unchanged(changeset, false, false), do: changeset
defp do_validate_original_field_unchanged(changeset, _, nil), do: changeset
defp do_validate_original_field_unchanged(changeset, _, _) do
add_error(changeset, :original, "A team's original field cannot be changed")
end
defp validate_original_permission(changeset) do
do_validate_original_permission(changeset, is_original_team?(changeset))
end
defp do_validate_original_permission(changeset, true) do
case get_field(changeset, :permission) do
:owner ->
changeset
_ ->
add_error(
changeset,
:permission,
"Cannot change the permission level of the original team"
)
end
end
defp do_validate_original_permission(changeset, false), do: changeset
end
|
lib/staff_notes/accounts/team.ex
| 0.778902
| 0.475484
|
team.ex
|
starcoder
|
defmodule MeshxNode.Default do
@moduledoc """
Defaults for "node service" and upstream node connections registration parameters with service mesh adapter.
"""
@doc """
Returns service `params` required by `c:Meshx.ServiceMesh.start/4` as first argument.
When node is transformed to distributed one using `Node.start/3` or `:net_kernel.start/1`, special downstream "node service" is registered with `:mesh_adapter` service mesh adapter using `c:Meshx.ServiceMesh.start/4` callback. `service_params/2` function return value is passed as first argument to this callback.
Starting node by running `Node.start(:mynode@myhost)` will register "node service" with `params` set to: `{"mynode@myhost", "mynode@myhost"}`.
Function can be overwritten by `:service_params` config option.
"""
@spec service_params(name :: atom(), host :: nonempty_charlist()) :: {node :: String.t(), node :: String.t()}
def service_params(name, host) do
node = "#{name}@#{host}"
{node, node}
end
@doc """
Returns upstream `params` required by `c:Meshx.ServiceMesh.connect/3` as first argument.
When connection to other node is requested by `Node.connect/1` or `:net_kernel.connect_node/1` `MeshxNode` will ask service mesh adapter to prepare mesh upstream endpoint associated with other node "node service" by running `c:Meshx.ServiceMesh.connect/3`. `upstream_params/1` function return value is passed as first argument to this callback.
Function can be overwritten by `:upstream_params` config option.
"""
@spec upstream_params(node :: atom()) :: node :: atom()
def upstream_params(node), do: node
@doc """
Returns sidecar proxy service name used to register upstream connections to other nodes ("nodes services").
Function return value is passed as third argument to `c:Meshx.ServiceMesh.connect/3`.
Can be overwritten by `:upstream_proxy` config option.
"""
@spec upstream_proxy(node :: atom(), my_node :: atom()) :: {my_node :: atom(), my_node :: atom()}
def upstream_proxy(_node, my_node), do: {my_node, my_node}
end
|
lib/default.ex
| 0.904716
| 0.55929
|
default.ex
|
starcoder
|
defmodule Expt.Renderer do
alias Expt.{Renderer, Camera, Scene, Ray, Material, Intersection, Const}
use Expt.Vector
def render_seq(%Scene{} = scene) do
%Scene{
samples: samples,
supersamples: supersamples,
camera: %Camera{
width: width,
height: height,
position: pos,
screen_x: scr_x,
screen_y: scr_y,
screen_center: scr_c
}
} = scene
for y <- 0..(height-1) do
render_line(scene, width, height, supersamples, samples, y, scr_c, scr_x, scr_y, pos)
end
end
def render(%Scene{} = scene) do
%Scene{
samples: samples,
supersamples: supersamples,
camera: %Camera{
width: width,
height: height,
position: pos,
screen_x: scr_x,
screen_y: scr_y,
screen_center: scr_c
}
} = scene
scene_renderer = self()
for y <- 0..(height-1) do
spawn fn ->
rendered_line = render_line(scene, width, height, supersamples, samples, y, scr_c, scr_x, scr_y, pos)
send scene_renderer, {(height - y - 1), rendered_line}
end
end
for y <- 0..(height-1) do
receive do
{^y, line} -> line
end
end
|> List.flatten
end
def render_line(scene, w, h, ss, s, y, scr_c, scr_x, scr_y, pos) do
for x <- 0..(w-1) do
for sy <- 0..(ss-1),
sx <- 0..(ss-1),
_ <- 0..(s-1) do
rate = 1.0 / ss
r1 = sx * rate + rate / 2.0
r2 = sy * rate + rate / 2.0
scr_p = scr_c +
scr_x * ((r1 + x) / w - 0.5) +
scr_y * ((r2 + y) / h - 0.5)
ray = Ray.create(pos, normalize(scr_p - pos))
Renderer.radiance(scene, ray, Const.white, true, 0) / s / (ss*ss)
end
|> Enum.reduce(Const.black, fn(radiance, acc) -> acc + radiance end)
end
end
def radiance(scene, %Ray{} = ray, weight, is_direct, depth) do
case Scene.intersect(scene, ray) do
{:ng, _} -> Const.black
{:ok, %Intersection{} = intersection} ->
{:ok, %{material: %Material{} = mtl}} = Enum.fetch(scene.objects, intersection.id)
o_n = orienting_normal(ray.dir, intersection.normal)
direct_light(is_direct, mtl.emission, weight) +
case russian_roulette(mtl.color, depth) do
{:ng, _} -> Const.black
{:ok, rr_prob} ->
case mtl.type do
"Diffuse" ->
diffuse(scene, o_n, intersection, mtl, rr_prob, weight, depth) +
next_event_estimation(scene, intersection, mtl.color, weight, o_n)
"Specular" ->
specular(scene, ray.dir, intersection, mtl, rr_prob, weight, depth)
"Refraction" ->
refraction(scene, o_n, ray.dir, intersection, mtl, rr_prob, weight, depth)
end
end
end
end
def direct_light(is_direct, emission, weight) do
if is_direct, do: weight * emission, else: Const.black
end
def orienting_normal(d, n) do
if (dot(n, d) < 0.0), do: n, else: -1.0*n
end
def russian_roulette(color, depth) do
rr_prob = (color |> Tuple.to_list |> Enum.max)
if depth > Const.max_depth do
if :rand.uniform >= rr_prob do
{:ng, nil}
else
{:ok, rr_prob}
end
else
{:ok, 1.0}
end
end
def next_event_estimation(scene, intersection, color, weight, orienting_n) do
if Enum.member?(scene.light_id, intersection.id) do
Const.black
else
{light_pos, light_pdf, light_id} = Scene.sample_light_surface(scene)
light_dir = light_pos - intersection.position
dist_sq = dot(light_dir, light_dir)
nlight_dir = normalize(light_dir)
shadow_ray = Ray.create(intersection.position, nlight_dir)
case Scene.intersect(scene, shadow_ray) do
{:ok, %Intersection{
normal: light_n,
id: ^light_id
}} ->
{:ok, light} = Enum.fetch(scene.objects, light_id)
dot1 = dot(orienting_n, nlight_dir) |> abs
dot2 = dot(light_n, nlight_dir * -1.0) |> abs
g = dot1 * dot2 / dist_sq
weight * light.material.emission * (color / :math.pi) * g / light_pdf
_ -> Const.black
end
end
end
def diffuse(scene, o_n, intersection, mtl, rr_prob, weight, depth) do
new_ray = cos_weighted_sample(intersection, get_onb(o_n))
new_weight = weight * mtl.color / rr_prob
Renderer.radiance(scene, new_ray, new_weight, false, depth+1)
end
def specular(scene, dir, intersection, mtl, rr_prob, weight, depth) do
reflec = get_reflect(intersection, dir)
new_weight = weight * mtl.color / rr_prob
Renderer.radiance(scene, reflec, new_weight, true, depth+1)
end
def refraction(scene, o_n, dir, intersection, mtl, rr_prob, weight, depth) do
reflec = get_reflect(intersection, dir)
into = dot(intersection.normal, o_n) > 0.0
# Snell's low
nc = 1.0
nt = mtl.ior
nnt = if into, do: nc / nt, else: nt / nc
ddn = dot(dir, o_n)
cos2t = 1.0 - nnt*nnt * (1.0 - ddn*ddn)
if cos2t < 0.0 do
new_weight = weight * mtl.color / rr_prob
Renderer.radiance(scene, reflec, new_weight, true, depth+1)
else
refrac = get_refract(intersection, dir, nnt, into, ddn, nnt, cos2t)
# Schlick's Fresnell estimation
al = nt - nc
be = nt + nc
r0 = (al*al) / (be*be)
th = 1.0 - (if into, do: -ddn, else: dot(refrac.dir, o_n * -1.0))
re = r0 + (1.0 - r0) * :math.pow(th, 5.0)
nnt2 = :math.pow((if into, do: nc / nt, else: nt / nc), 2.0)
tr = (1.0 - re) * nnt2
prob = 0.25 + 0.5 * re
if :rand.uniform < prob do
new_weight = weight * mtl.color * re / prob / rr_prob
Renderer.radiance(scene, reflec, new_weight, true, depth+1)
else
new_weight = weight * mtl.color * tr / (1.0-prob) / rr_prob
Renderer.radiance(scene, refrac, new_weight, true, depth+1)
end
end
end
def get_onb(normal) do
w = normal
u =
if abs(elem(w,0)) > Const.eps do
{0.0, 1.0, 0.0}
else
{1.0, 0.0, 0.0}
end
|> cross(w)
|> normalize
v = w |> cross(u)
%{w: w, u: u, v: v}
end
def cos_weighted_sample(intersection, onb) do
r1 = 2 * :math.pi * :rand.uniform
r2 = :rand.uniform
r2s = :math.sqrt(r2)
Ray.create(
intersection.position,
normalize(
onb.u * :math.cos(r1) * r2s +
onb.v * :math.sin(r1) * r2s +
onb.w * :math.sqrt(1.0 - r2)))
end
def get_reflect(intersection, dir) do
Ray.create(
intersection.position,
dir - intersection.normal * 2.0 * dot(intersection.normal, dir))
end
def get_refract(intersection, dir, nnt, into, ddn, nnt, cos2t) do
Ray.create(
intersection.position,
normalize(
dir * nnt -
intersection.normal * (if into, do: 1.0, else: -1.0) *
(ddn * nnt + :math.sqrt(cos2t))
))
end
end
|
lib/Expt/Renderer.ex
| 0.647352
| 0.465448
|
Renderer.ex
|
starcoder
|
defmodule Topo.Cleaner do
@moduledoc false
import Topo.Util
@type geometry ::
{number, number}
| %{type: String.t(), coordinates: list}
| %Geo.Point{}
| %Geo.MultiPoint{}
| %Geo.LineString{}
| %Geo.MultiLineString{}
| %Geo.Polygon{}
| %Geo.MultiPolygon{}
@spec clean(geometry) :: geometry
def clean({x, y}), do: %Geo.Point{coordinates: {x, y}}
def clean(%Geo.LineString{coordinates: a}) do
%Geo.LineString{coordinates: do_clean_line(a)}
end
def clean(%Geo.Polygon{} = a) do
%Geo.Polygon{coordinates: Enum.map(a.coordinates, &do_clean_ring/1)}
end
def clean(%Geo.MultiLineString{} = a) do
%Geo.MultiLineString{coordinates: Enum.map(a.coordinates, &do_clean_line/1)}
end
def clean(%Geo.MultiPolygon{} = a) do
%Geo.MultiPolygon{
coordinates:
Enum.map(a.coordinates, fn poly ->
Enum.map(poly, &do_clean_ring/1)
end)
}
end
def clean(%{type: "Point", coordinates: coords}), do: clean(%Geo.Point{coordinates: coords})
def clean(%{type: "MultiPoint", coordinates: coords}),
do: clean(%Geo.MultiPoint{coordinates: coords})
def clean(%{type: "LineString", coordinates: coords}),
do: clean(%Geo.LineString{coordinates: coords})
def clean(%{type: "MultiLineString", coordinates: coords}),
do: clean(%Geo.MultiLineString{coordinates: coords})
def clean(%{type: "Polygon", coordinates: coords}), do: clean(%Geo.Polygon{coordinates: coords})
def clean(%{type: "MultiPolygon", coordinates: coords}),
do: clean(%Geo.MultiPolygon{coordinates: coords})
def clean(a), do: a
defp do_clean_line(line) do
if List.first(line) == List.last(line) do
do_clean_ring(line)
else
line |> Enum.dedup() |> assert_no_collinear
end
end
defp do_clean_ring(ring) do
ring
|> Enum.dedup()
|> assert_closed
|> assert_direction
|> assert_no_collinear
|> assert_no_collinear_over_closure
end
defp area([a, b, c, d | rest]) do
cross(a, b, c) + area([a, c, d | rest])
end
defp area(_), do: 0
defp assert_closed([a | rest]) do
case List.last(rest) do
^a -> [a | rest]
_ -> [a | rest] ++ [a]
end
end
defp assert_direction(ring) do
ring
|> area
|> reverse_if_negative_area(ring)
end
defp reverse_if_negative_area(area, ring) when area >= 0, do: ring
defp reverse_if_negative_area(_, ring), do: Enum.reverse(ring)
defp assert_no_collinear_over_closure(ring) when length(ring) < 4, do: ring
defp assert_no_collinear_over_closure([b, c | rest]) do
a = Enum.at(rest, -2)
if collinear?(a, c, b) && between?(a, c, b) do
[c | Enum.drop(rest, -1)] ++ [c]
else
[b, c | rest]
end
end
end
|
lib/topo/cleaner.ex
| 0.794225
| 0.498901
|
cleaner.ex
|
starcoder
|
defmodule AtomTweaksWeb do
@moduledoc """
A module that keeps using definitions for controllers, views and so on.
This can be used in your application as:
```
use AtomTweaksWeb, :controller
use AtomTweaksWeb, :view
```
The definitions below will be executed for every view, controller, etc, so keep them short and
clean, focused on imports, uses and aliases.
Do NOT define functions inside the quoted expressions below.
"""
@doc """
Imports the common code for controllers.
"""
def controller do
quote do
use Phoenix.Controller, namespace: AtomTweaksWeb
alias AtomTweaks.Repo
alias AtomTweaksWeb.Router.Helpers, as: Routes
import Ecto
import Ecto.Query
import AtomTweaksWeb.ApiHelpers
import AtomTweaksWeb.ControllerHelpers
import AtomTweaksWeb.Gettext
import AtomTweaksWeb.PlugHelpers
end
end
@doc """
Imports the common code for views.
"""
def view do
quote do
use Phoenix.View,
root: "lib/atom_tweaks_web/templates",
namespace: AtomTweaksWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
alias AtomTweaksWeb.Router.Helpers, as: Routes
# Built-in view helpers
import AtomTweaksWeb.ErrorHelpers
import AtomTweaksWeb.Gettext
# Project-specific view helpers
import PhoenixOcticons
import AtomTweaksWeb.FormHelpers
import AtomTweaksWeb.LinkHelpers
import AtomTweaksWeb.PrimerHelpers
import AtomTweaksWeb.OcticonHelpers
import AtomTweaksWeb.RenderHelpers
import AtomTweaksWeb.TimeHelpers
end
end
@doc """
Imports the common code for routers.
"""
def router do
quote do
use Phoenix.Router
import AtomTweaksWeb.PlugHelpers
end
end
@doc """
Imports the common code for channels.
"""
def channel do
quote do
use Phoenix.Channel
alias AtomTweaks.Repo
import Ecto
import Ecto.Query
import AtomTweaksWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
|
lib/atom_tweaks_web.ex
| 0.712932
| 0.604165
|
atom_tweaks_web.ex
|
starcoder
|
defmodule AWS.Athena do
@moduledoc """
Amazon Athena is an interactive query service that lets you use standard SQL to
analyze data directly in Amazon S3.
You can point Athena at your data in Amazon S3 and run ad-hoc queries and get
results in seconds. Athena is serverless, so there is no infrastructure to set
up or manage. You pay only for the queries you run. Athena scales
automatically—executing queries in parallel—so results are fast, even with large
datasets and complex queries. For more information, see [What is Amazon Athena](http://docs.aws.amazon.com/athena/latest/ug/what-is.html) in the *Amazon
Athena User Guide*.
If you connect to Athena using the JDBC driver, use version 1.1.0 of the driver
or later with the Amazon Athena API. Earlier version drivers do not support the
API. For more information and to download the driver, see [Accessing Amazon Athena with
JDBC](https://docs.aws.amazon.com/athena/latest/ug/connect-with-jdbc.html).
For code samples using the AWS SDK for Java, see [Examples and Code Samples](https://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in the
*Amazon Athena User Guide*.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-05-18",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "athena",
global?: false,
protocol: "json",
service_id: "Athena",
signature_version: "v4",
signing_name: "athena",
target_prefix: "AmazonAthena"
}
end
@doc """
Returns the details of a single named query or a list of up to 50 queries, which
you provide as an array of query ID strings.
Requires you to have access to the workgroup in which the queries were saved.
Use `ListNamedQueriesInput` to get the list of named query IDs in the specified
workgroup. If information could not be retrieved for a submitted query ID,
information about the query ID submitted is listed under
`UnprocessedNamedQueryId`. Named queries differ from executed queries. Use
`BatchGetQueryExecutionInput` to get details about each unique query execution,
and `ListQueryExecutionsInput` to get a list of query execution IDs.
"""
def batch_get_named_query(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "BatchGetNamedQuery", input, options)
end
@doc """
Returns the details of a single query execution or a list of up to 50 query
executions, which you provide as an array of query execution ID strings.
Requires you to have access to the workgroup in which the queries ran. To get a
list of query execution IDs, use `ListQueryExecutionsInput$WorkGroup`. Query
executions differ from named (saved) queries. Use `BatchGetNamedQueryInput` to
get details about named queries.
"""
def batch_get_query_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "BatchGetQueryExecution", input, options)
end
@doc """
Creates (registers) a data catalog with the specified name and properties.
Catalogs created are visible to all users of the same AWS account.
"""
def create_data_catalog(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDataCatalog", input, options)
end
@doc """
Creates a named query in the specified workgroup.
Requires that you have access to the workgroup.
For code samples using the AWS SDK for Java, see [Examples and Code Samples](http://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in the
*Amazon Athena User Guide*.
"""
def create_named_query(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateNamedQuery", input, options)
end
@doc """
Creates a prepared statement for use with SQL queries in Athena.
"""
def create_prepared_statement(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreatePreparedStatement", input, options)
end
@doc """
Creates a workgroup with the specified name.
"""
def create_work_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateWorkGroup", input, options)
end
@doc """
Deletes a data catalog.
"""
def delete_data_catalog(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDataCatalog", input, options)
end
@doc """
Deletes the named query if you have access to the workgroup in which the query
was saved.
For code samples using the AWS SDK for Java, see [Examples and Code Samples](http://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in the
*Amazon Athena User Guide*.
"""
def delete_named_query(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteNamedQuery", input, options)
end
@doc """
Deletes the prepared statement with the specified name from the specified
workgroup.
"""
def delete_prepared_statement(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeletePreparedStatement", input, options)
end
@doc """
Deletes the workgroup with the specified name.
The primary workgroup cannot be deleted.
"""
def delete_work_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteWorkGroup", input, options)
end
@doc """
Returns the specified data catalog.
"""
def get_data_catalog(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetDataCatalog", input, options)
end
@doc """
Returns a database object for the specified database and data catalog.
"""
def get_database(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetDatabase", input, options)
end
@doc """
Returns information about a single query.
Requires that you have access to the workgroup in which the query was saved.
"""
def get_named_query(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetNamedQuery", input, options)
end
@doc """
Retrieves the prepared statement with the specified name from the specified
workgroup.
"""
def get_prepared_statement(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetPreparedStatement", input, options)
end
@doc """
Returns information about a single execution of a query if you have access to
the workgroup in which the query ran.
Each time a query executes, information about the query execution is saved with
a unique ID.
"""
def get_query_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetQueryExecution", input, options)
end
@doc """
Streams the results of a single query execution specified by `QueryExecutionId`
from the Athena query results location in Amazon S3.
For more information, see [Query Results](https://docs.aws.amazon.com/athena/latest/ug/querying.html) in the
*Amazon Athena User Guide*. This request does not execute the query but returns
results. Use `StartQueryExecution` to run a query.
To stream query results successfully, the IAM principal with permission to call
`GetQueryResults` also must have permissions to the Amazon S3 `GetObject` action
for the Athena query results location.
IAM principals with permission to the Amazon S3 `GetObject` action for the query
results location are able to retrieve query results from Amazon S3 even if
permission to the `GetQueryResults` action is denied. To restrict user or role
access, ensure that Amazon S3 permissions to the Athena query location are
denied.
"""
def get_query_results(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetQueryResults", input, options)
end
@doc """
Returns table metadata for the specified catalog, database, and table.
"""
def get_table_metadata(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetTableMetadata", input, options)
end
@doc """
Returns information about the workgroup with the specified name.
"""
def get_work_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetWorkGroup", input, options)
end
@doc """
Lists the data catalogs in the current AWS account.
"""
def list_data_catalogs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDataCatalogs", input, options)
end
@doc """
Lists the databases in the specified data catalog.
"""
def list_databases(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatabases", input, options)
end
@doc """
Returns a list of engine versions that are available to choose from, including
the Auto option.
"""
def list_engine_versions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListEngineVersions", input, options)
end
@doc """
Provides a list of available query IDs only for queries saved in the specified
workgroup.
Requires that you have access to the specified workgroup. If a workgroup is not
specified, lists the saved queries for the primary workgroup.
For code samples using the AWS SDK for Java, see [Examples and Code Samples](http://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in the
*Amazon Athena User Guide*.
"""
def list_named_queries(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListNamedQueries", input, options)
end
@doc """
Lists the prepared statements in the specfied workgroup.
"""
def list_prepared_statements(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListPreparedStatements", input, options)
end
@doc """
Provides a list of available query execution IDs for the queries in the
specified workgroup.
If a workgroup is not specified, returns a list of query execution IDs for the
primary workgroup. Requires you to have access to the workgroup in which the
queries ran.
For code samples using the AWS SDK for Java, see [Examples and Code Samples](http://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in the
*Amazon Athena User Guide*.
"""
def list_query_executions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListQueryExecutions", input, options)
end
@doc """
Lists the metadata for the tables in the specified data catalog database.
"""
def list_table_metadata(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTableMetadata", input, options)
end
@doc """
Lists the tags associated with an Athena workgroup or data catalog resource.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Lists available workgroups for the account.
"""
def list_work_groups(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListWorkGroups", input, options)
end
@doc """
Runs the SQL query statements contained in the `Query`.
Requires you to have access to the workgroup in which the query ran. Running
queries against an external catalog requires `GetDataCatalog` permission to the
catalog. For code samples using the AWS SDK for Java, see [Examples and Code Samples](http://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in the
*Amazon Athena User Guide*.
"""
def start_query_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartQueryExecution", input, options)
end
@doc """
Stops a query execution.
Requires you to have access to the workgroup in which the query ran.
For code samples using the AWS SDK for Java, see [Examples and Code Samples](http://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in the
*Amazon Athena User Guide*.
"""
def stop_query_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopQueryExecution", input, options)
end
@doc """
Adds one or more tags to an Athena resource.
A tag is a label that you assign to a resource. In Athena, a resource can be a
workgroup or data catalog. Each tag consists of a key and an optional value,
both of which you define. For example, you can use tags to categorize Athena
workgroups or data catalogs by purpose, owner, or environment. Use a consistent
set of tag keys to make it easier to search and filter workgroups or data
catalogs in your account. For best practices, see [Tagging Best Practices](https://aws.amazon.com/answers/account-management/aws-tagging-strategies/).
Tag keys can be from 1 to 128 UTF-8 Unicode characters, and tag values can be
from 0 to 256 UTF-8 Unicode characters. Tags can use letters and numbers
representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys
and values are case-sensitive. Tag keys must be unique per resource. If you
specify more than one tag, separate them by commas.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes one or more tags from a data catalog or workgroup resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates the data catalog that has the specified name.
"""
def update_data_catalog(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateDataCatalog", input, options)
end
@doc """
Updates a prepared statement.
"""
def update_prepared_statement(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdatePreparedStatement", input, options)
end
@doc """
Updates the workgroup with the specified name.
The workgroup's name cannot be changed.
"""
def update_work_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateWorkGroup", input, options)
end
end
|
lib/aws/generated/athena.ex
| 0.916316
| 0.595816
|
athena.ex
|
starcoder
|
defmodule Hound.Metadata do
@moduledoc """
Metadata allows to pass and extract custom data through.
This can be useful if you need to identify sessions.
The keys and values must be serializable using `:erlang.term_to_binary/1`.
## Examples
You can start a session using metadata by doing the following:
Hound.start_session(metadata: %{pid: self()})
If you need to retrieve the metadata, you simply need to use
`Hound.Metadata.extract/1` on the user agent string, so supposing you are using plug,
user_agent = conn |> get_req_header("user-agent") |> List.first
metadata = Hound.Metadata.extract(user_agent)
assert %{pid: pid} = metadata
# you can use your pid here
"""
@metadata_prefix "BeamMetadata"
@extract_regexp ~r{#{@metadata_prefix} \((.*?)\)}
@doc """
Appends the metdata to the user_agent string.
"""
@spec append(String.t(), nil | map | String.t()) :: String.t()
def append(user_agent, nil), do: user_agent
def append(user_agent, metadata) when is_map(metadata) or is_list(metadata) do
append(user_agent, format(metadata))
end
def append(user_agent, metadata) when is_binary(metadata) do
"#{user_agent}/#{metadata}"
end
@doc """
Formats a string to a valid UserAgent string to be passed to be
appended to the browser user agent.
"""
@spec format(map | Keyword.t()) :: String.t()
def format(metadata) do
encoded = {:v1, metadata} |> :erlang.term_to_binary() |> Base.url_encode64()
"#{@metadata_prefix} (#{encoded})"
end
@doc """
Extracts and parses the metadata contained in a user agent string.
If the user agent does not contain any metadata, an empty map is returned.
"""
@spec parse(String.t()) :: %{String.t() => String.t()}
def extract(str) do
ua_last_part = str |> String.split("/") |> List.last()
case Regex.run(@extract_regexp, ua_last_part) do
[_, metadata] -> parse(metadata)
_ -> %{}
end
end
defp parse(encoded_metadata) do
encoded_metadata
|> Base.url_decode64!()
|> :erlang.binary_to_term()
|> case do
{:v1, metadata} -> metadata
_ -> raise Hound.InvalidMetadataError, value: encoded_metadata
end
end
end
|
lib/hound/metadata.ex
| 0.82011
| 0.551393
|
metadata.ex
|
starcoder
|
defmodule DTask.TUI.Views.MainView do
@moduledoc """
## [mode: :table_only]
```
*==============================================================================*
| <Top Bar> |
*==============================================================================*
| |
| <Extra Bar> [Optional] |
| |
*==============================================================================* ─────
| +-|<table>|----------------------------------------------------------------+ | ↑
| | | | │
| | | | │
| | | | │
| | | | │
| | | | │
| | | | │
| | | | │ main
| | | | │
| | | | │
| | | | │
| | | | │
| | | | │
| | | | │
| +--------------------------------------------------------------------------+ | ↓
*==============================================================================* ─────
| <Bottom Bar> |
*==============================================================================*
```
## [mode: {:split_horizontal, ratio}]
```
*==============================================================================*
| <Top Bar> |
*==============================================================================*
| |
| <Extra Bar> [Optional] |
| |
*==============================================================================* ─────
| +-|<table>|----------------------------------------------------------------+ | ↑
| | | | │
| | | | │
| | | | │
| | | | │
| | | | │
| | | | │
| | | | │ main
| +--------------------------------------------------------------------------+ | │
| +-|<details>|--------------------------------------------------------------+ | │
| | | | │
| | | | │
| | | | │
| | | | │
| +--------------------------------------------------------------------------+ | ↓
*==============================================================================* ─────
| <Bottom Bar> |
*==============================================================================*
```
## [mode: {:split_vertical, ratio}]
```
*==============================================================================*
| <Top Bar> |
*==============================================================================*
| |
| <Extra Bar> [Optional] |
| |
*==============================================================================* ─────
| +-|<table>|-------------------------------------+ +-|<details>|------------+ | ↑
| | | | | | │
| +-----------------------------------------------+ | | | │
| | | | | | │
| +-----------------------------------------------+ | | | │
| | | | | | │
| +-----------------------------------------------+ | | | │
| | | | | | │ main
| +-----------------------------------------------+ | | | │
| | | | | | │
| +-----------------------------------------------+ | | | │
| | | | | | │
| +-----------------------------------------------+ | | | │
| | | | | | │
| +-----------------------------------------------+ +------------------------+ | ↓
*==============================================================================* ─────
| <Bottom Bar> |
*==============================================================================*
```
"""
alias DTask.TUI
alias DTask.TUI.Views
import Ratatouille.View
@behaviour DTask.TUI.Render
@grid_size 12
@render_top_bar Views.TopBar
@height_top_bar 1
@render_bottom_bar Views.TabsBar
@height_bottom_bar 1
@render_extra Views.HelpPanel
@height_extra &Views.HelpPanel.height/1
@const_table_height 5
@impl true
@spec render(TUI.state) :: Element.t()
def render(state) do
view_opts = [
top_bar: @render_top_bar.render(state),
bottom_bar: @render_bottom_bar.render(state)
]
extra = if state.ui.show_help,
do: [
row do
column(size: @grid_size) do
@render_extra.render(state)
end
end
],
else: []
main = case {state.ui.layout, state.ui.tab.render_side} do
{layout, render_side} when layout == :table_only or is_nil(render_side) -> [
row do
column(size: @grid_size) do
state.ui.tab.render_main.render(state)
end
end
]
{{:split_horizontal, _}, render_side} -> [
row do
column(size: @grid_size) do
state.ui.tab.render_main.render(state)
end
end,
row do
column(size: @grid_size) do
render_side.render(state)
end
end
]
{{:split_vertical, {size_left, size_right}}, render_side} -> [
row do
column size: size_left do
state.ui.tab.render_main.render(state)
end
column size: size_right do
render_side.render(state)
end
end
]
end
overlays = for overlay <- state.ui.overlays do
padding = if is_function(overlay.padding, 1),
do: overlay.padding.(state),
else: overlay.padding
overlay(padding: padding) do
overlay.render.render(state)
end
end
view(view_opts, extra ++ main ++ Enum.reverse(overlays))
end
@spec const_height(TUI.state) :: non_neg_integer
def const_height(state) do
height0 = @height_top_bar + @height_bottom_bar
if state.ui.show_help, do: height0 + @height_extra.(state), else: height0
end
@spec main_height(TUI.state) :: pos_integer
def main_height(state) do
state.ui.window.height - const_height(state)
end
@spec table_height(TUI.state) :: pos_integer
def table_height(state) do
table_rows(state) + @const_table_height
end
@spec table_height_and_rows(TUI.state) :: {pos_integer, pos_integer}
def table_height_and_rows(state) do
rows = table_rows(state)
{rows + @const_table_height, rows}
end
# TODO
@spec details_height(TUI.state) :: pos_integer
def details_height(state) do
case state.ui.layout do
{:split_horizontal, ratio} -> main_height(state) * (1 - ratio)
_ -> main_height(state)
end
end
@spec table_rows(TUI.state) :: pos_integer
def table_rows(state) do
case state.ui.layout do
{:split_horizontal, ratio} -> max(round(main_height(state) * ratio), 1)
_ -> main_height(state) - @const_table_height
end
end
@spec main_width(TUI.state) :: pos_integer
def main_width(state), do: state.ui.window.width
@spec table_width(TUI.state) :: pos_integer
def table_width(state), do: do_width(state, 0)
@spec details_width(TUI.state) :: pos_integer
def details_width(state), do: do_width(state, 1)
defp do_width(state, index) do
case state.ui.layout do
{:split_vertical, g_width} -> round(state.ui.window.width * (elem(g_width, index) / @grid_size))
_ -> state.ui.window.width
end
end
defmodule TableCursor do
use Views.Stateful.Cursor
@spec max_y(TUI.state) :: non_neg_integer
def max_y(state) do
data = state.data[state.ui.tab.data_key]
if data, do: Enum.count(data), else: 0
end
@spec max_y_view(TUI.state) :: non_neg_integer
defdelegate max_y_view(state), to: Views.MainView, as: :table_rows
end
end
|
apps/dtask_tui/lib/dtask/tui/views/main_view.ex
| 0.784443
| 0.790247
|
main_view.ex
|
starcoder
|
defmodule Bundlex.Project do
@bundlex_file_name "bundlex.exs"
@moduledoc """
Behaviour that should be implemented by each project using Bundlex in the
`#{@bundlex_file_name}` file.
"""
use Bunch
alias Bunch.KVList
alias Bundlex.Helper.MixHelper
alias __MODULE__.Store
@src_dir_name "c_src"
@type native_name_t :: atom
@typedoc """
Type describing configuration of a native.
It's a keyword list containing the following keys:
* `sources` - C files to be compiled (at least one must be provided),
* `includes` - Paths to look for header files (empty list by default).
* `lib_dirs` - Paths to look for libraries (empty list by default).
* `libs` - Names of libraries to link (empty list by default).
* `pkg_configs` - Names of libraries for which the appropriate flags will be
obtained using pkg-config (empty list by default).
* `deps` - Dependencies in the form of `{app, lib_name}`, where `app`
is the application name of the dependency, and `lib_name` is the name of lib
specified in bundlex project of this dependency.
* `src_base` - Native files should reside in `project_root/c_src/<src_base>`
(application name by default).
* `compiler_flags` - Custom flags for compiler.
* `linker_flags` - Custom flags for linker.
"""
@type native_config_t :: [
sources: [String.t()],
includes: [String.t()],
lib_dirs: [String.t()],
libs: [String.t()],
pkg_configs: [String.t()],
deps: [{Application.app(), native_name_t | [native_name_t]}],
src_base: String.t(),
compiler_flags: [String.t()],
linker_flags: [String.t()]
]
@typedoc """
Type describing project configuration.
It's a keyword list, where nifs, cnodes and libs can be specified. Libs are
native packages that are compiled as static libraries and linked to natives
that have them specified in `deps` field of their configuration.
"""
@type config_t :: KVList.t(:nifs | :cnodes | :libs, KVList.t(native_name_t, native_config_t))
@doc """
Callback returning project configuration.
"""
@callback project() :: config_t
defmacro __using__(_args) do
quote do
@behaviour unquote(__MODULE__)
def bundlex_project?, do: true
def src_path, do: __DIR__ |> Path.join(unquote(@src_dir_name))
end
end
@typedoc """
Struct representing bundlex project.
Contains the following fileds:
- `:config` - project configuration
- `:src_path` - path to the native sources
- `:module` - bundlex project module
- `:app` - application that exports project
"""
@type t :: %__MODULE__{config: config_t, src_path: String.t(), module: module, app: atom}
@enforce_keys [:config, :src_path, :module, :app]
defstruct @enforce_keys
@doc """
Determines if `module` is a bundlex project module.
"""
@spec project_module?(module) :: boolean
def project_module?(module) do
function_exported?(module, :bundlex_project?, 0) and module.bundlex_project?()
end
@doc """
Returns the project struct of given application.
If the module has not been loaded yet, it is loaded from
`project_dir/#{@bundlex_file_name}` file.
"""
@spec get(application :: atom) ::
{:ok, t}
| {:error,
:invalid_project_specification
| {:no_bundlex_project_in_file, path :: binary()}
| :unknown_application}
def get(application \\ MixHelper.get_app!()) do
project = Store.get_project(application)
if project do
{:ok, project}
else
with {:ok, module} <- load(application),
project = %__MODULE__{
config: module.project(),
src_path: module.src_path(),
module: module,
app: application
},
true <- Keyword.keyword?(project.config) or {:error, :invalid_project_specification} do
Store.store_project(application, project)
{:ok, project}
end
end
end
@spec load(application :: atom) ::
{:ok, module}
| {:error, {:no_bundlex_project_in_file, path :: binary()} | :unknown_application}
defp load(application) do
with {:ok, dir} <- MixHelper.get_project_dir(application) do
bundlex_file_path = dir |> Path.join(@bundlex_file_name)
modules = Code.require_file(bundlex_file_path) |> Keyword.keys()
modules
|> Enum.find(&project_module?/1)
|> Bunch.error_if_nil({:no_bundlex_project_in_file, bundlex_file_path})
end
end
end
|
lib/bundlex/project.ex
| 0.820146
| 0.410845
|
project.ex
|
starcoder
|
defmodule Bigtable.RowFilter do
alias Bigtable.RowFilter.ColumnRange
alias Google.Bigtable.V2.{ReadRowsRequest, RowFilter, TimestampRange}
@moduledoc """
Provides functions for creating `Google.Bigtable.V2.RowFilter` and applying them to a `Google.Bigtable.V2.ReadRowsRequest` or `Google.Bigtable.V2.RowFilter.Chain`.
"""
@doc """
Adds a `Google.Bigtable.V2.RowFilter` chain to a `Google.Bigtable.V2.ReadRowsRequest` given a list of `Google.Bigtable.V2.RowFilter`.
## Examples
iex> filters = [Bigtable.RowFilter.cells_per_column(2), Bigtable.RowFilter.row_key_regex("^Test#\w+")]
iex> request = Bigtable.ReadRows.build("table") |> Bigtable.RowFilter.chain(filters)
iex> with %Google.Bigtable.V2.ReadRowsRequest{} <- request, do: request.filter
%Google.Bigtable.V2.RowFilter{
filter: {:chain,
%Google.Bigtable.V2.RowFilter.Chain{
filters: [
%Google.Bigtable.V2.RowFilter{
filter: {:cells_per_column_limit_filter, 2}
},
%Google.Bigtable.V2.RowFilter{
filter: {:row_key_regex_filter, "^Test#\w+"}
}
]
}}
}
"""
@spec chain(ReadRowsRequest.t(), [RowFilter.t()]) :: ReadRowsRequest.t()
def chain(%ReadRowsRequest{} = request, filters) when is_list(filters) do
{:chain, RowFilter.Chain.new(filters: filters)}
|> build_filter()
|> apply_filter(request)
end
@doc """
Adds a cells per column `Google.Bigtable.V2.RowFilter` to a `Google.Bigtable.V2.ReadRowsRequest`.
## Examples
iex> request = Bigtable.ReadRows.build() |> Bigtable.RowFilter.cells_per_column(2)
iex> with %Google.Bigtable.V2.ReadRowsRequest{} <- request, do: request.filter
%Google.Bigtable.V2.RowFilter{
filter: {:cells_per_column_limit_filter, 2}
}
"""
@spec cells_per_column(ReadRowsRequest.t(), integer()) :: ReadRowsRequest.t()
def cells_per_column(%ReadRowsRequest{} = request, limit) when is_integer(limit) do
filter = cells_per_column(limit)
filter
|> apply_filter(request)
end
@doc """
Creates a cells per column `Google.Bigtable.V2.RowFilter`.
## Examples
iex> Bigtable.RowFilter.cells_per_column(2)
%Google.Bigtable.V2.RowFilter{
filter: {:cells_per_column_limit_filter, 2}
}
"""
@spec cells_per_column(integer()) :: RowFilter.t()
def cells_per_column(limit) when is_integer(limit) do
{:cells_per_column_limit_filter, limit}
|> build_filter()
end
@doc """
Adds a cells per row `Google.Bigtable.V2.RowFilter` to a `Google.Bigtable.V2.ReadRowsRequest`.
## Examples
iex> request = Bigtable.ReadRows.build() |> Bigtable.RowFilter.cells_per_row(2)
iex> with %Google.Bigtable.V2.ReadRowsRequest{} <- request, do: request.filter
%Google.Bigtable.V2.RowFilter{
filter: {:cells_per_row_limit_filter, 2}
}
"""
@spec cells_per_row(ReadRowsRequest.t(), integer()) :: ReadRowsRequest.t()
def cells_per_row(%ReadRowsRequest{} = request, limit) when is_integer(limit) do
filter = cells_per_row(limit)
filter
|> apply_filter(request)
end
@doc """
Creates a cells per row `Google.Bigtable.V2.RowFilter`.
## Examples
iex> Bigtable.RowFilter.cells_per_row(2)
%Google.Bigtable.V2.RowFilter{
filter: {:cells_per_row_limit_filter, 2}
}
"""
@spec cells_per_row(integer()) :: RowFilter.t()
def cells_per_row(limit) when is_integer(limit) do
{:cells_per_row_limit_filter, limit}
|> build_filter()
end
@doc """
Adds a cells per row offset `Google.Bigtable.V2.RowFilter` to a `Google.Bigtable.V2.ReadRowsRequest`.
## Examples
iex> request = Bigtable.ReadRows.build() |> Bigtable.RowFilter.cells_per_row_offset(2)
iex> with %Google.Bigtable.V2.ReadRowsRequest{} <- request, do: request.filter
%Google.Bigtable.V2.RowFilter{
filter: {:cells_per_row_offset_filter, 2}
}
"""
@spec cells_per_row_offset(ReadRowsRequest.t(), integer()) :: ReadRowsRequest.t()
def cells_per_row_offset(%ReadRowsRequest{} = request, offset) when is_integer(offset) do
filter = cells_per_row_offset(offset)
filter
|> apply_filter(request)
end
@doc """
Creates a cells per row offset `Google.Bigtable.V2.RowFilter`.
## Examples
iex> Bigtable.RowFilter.cells_per_row_offset(2)
%Google.Bigtable.V2.RowFilter{
filter: {:cells_per_row_offset_filter, 2}
}
"""
@spec cells_per_row_offset(integer()) :: RowFilter.t()
def cells_per_row_offset(offset) when is_integer(offset) do
{:cells_per_row_offset_filter, offset}
|> build_filter()
end
@doc """
Adds a row key regex `Google.Bigtable.V2.RowFilter` a `Google.Bigtable.V2.ReadRowsRequest`.
## Examples
iex> request = Bigtable.ReadRows.build() |> Bigtable.RowFilter.row_key_regex("^Test#\\w+")
iex> with %Google.Bigtable.V2.ReadRowsRequest{} <- request, do: request.filter
%Google.Bigtable.V2.RowFilter{
filter: {:row_key_regex_filter, "^Test#\\w+"}
}
"""
@spec row_key_regex(ReadRowsRequest.t(), binary()) :: ReadRowsRequest.t()
def row_key_regex(%ReadRowsRequest{} = request, regex) do
filter = row_key_regex(regex)
filter
|> apply_filter(request)
end
@doc """
Creates a row key regex `Google.Bigtable.V2.RowFilter`.
## Examples
iex> Bigtable.RowFilter.row_key_regex("^Test#\\w+")
%Google.Bigtable.V2.RowFilter{
filter: {:row_key_regex_filter, "^Test#\\w+"}
}
"""
@spec row_key_regex(binary()) :: RowFilter.t()
def row_key_regex(regex) do
{:row_key_regex_filter, regex}
|> build_filter()
end
@doc """
Adds a value regex `Google.Bigtable.V2.RowFilter` a `Google.Bigtable.V2.ReadRowsRequest`.
## Examples
iex> request = Bigtable.ReadRows.build() |> Bigtable.RowFilter.value_regex("^test$")
iex> with %Google.Bigtable.V2.ReadRowsRequest{} <- request, do: request.filter
%Google.Bigtable.V2.RowFilter{
filter: {:value_regex_filter, "^test$"}
}
"""
@spec value_regex(ReadRowsRequest.t(), binary()) :: ReadRowsRequest.t()
def value_regex(%ReadRowsRequest{} = request, regex) do
filter = value_regex(regex)
filter
|> apply_filter(request)
end
@doc """
Creates a value regex `Google.Bigtable.V2.RowFilter`.
## Examples
iex> Bigtable.RowFilter.value_regex("^test$")
%Google.Bigtable.V2.RowFilter{
filter: {:value_regex_filter, "^test$"}
}
"""
@spec value_regex(binary()) :: RowFilter.t()
def value_regex(regex) do
{:value_regex_filter, regex}
|> build_filter()
end
@doc """
Adds a family name regex `Google.Bigtable.V2.RowFilter` a `Google.Bigtable.V2.ReadRowsRequest`.
## Examples
iex> request = Bigtable.ReadRows.build() |> Bigtable.RowFilter.family_name_regex("^testFamily$")
iex> with %Google.Bigtable.V2.ReadRowsRequest{} <- request, do: request.filter
%Google.Bigtable.V2.RowFilter{
filter: {:family_name_regex_filter, "^testFamily$"}
}
"""
@spec family_name_regex(ReadRowsRequest.t(), binary()) :: ReadRowsRequest.t()
def family_name_regex(%ReadRowsRequest{} = request, regex) do
filter = family_name_regex(regex)
filter
|> apply_filter(request)
end
@doc """
Creates a family name regex `Google.Bigtable.V2.RowFilter`.
## Examples
iex> Bigtable.RowFilter.family_name_regex("^testFamily$")
%Google.Bigtable.V2.RowFilter{
filter: {:family_name_regex_filter, "^testFamily$"}
}
"""
@spec family_name_regex(binary()) :: RowFilter.t()
def family_name_regex(regex) do
{:family_name_regex_filter, regex}
|> build_filter()
end
@doc """
Adds a column qualifier regex `Google.Bigtable.V2.RowFilter` a `Google.Bigtable.V2.ReadRowsRequest`.
## Examples
iex> request = Bigtable.ReadRows.build() |> Bigtable.RowFilter.column_qualifier_regex("^testColumn$")
iex> with %Google.Bigtable.V2.ReadRowsRequest{} <- request, do: request.filter
%Google.Bigtable.V2.RowFilter{
filter: {:column_qualifier_regex_filter, "^testColumn$"}
}
"""
@spec column_qualifier_regex(ReadRowsRequest.t(), binary()) :: ReadRowsRequest.t()
def column_qualifier_regex(%ReadRowsRequest{} = request, regex) do
filter = column_qualifier_regex(regex)
filter
|> apply_filter(request)
end
@doc """
Creates a family name regex `Google.Bigtable.V2.RowFilter`.
## Examples
iex> Bigtable.RowFilter.column_qualifier_regex("^testColumn$")
%Google.Bigtable.V2.RowFilter{
filter: {:column_qualifier_regex_filter, "^testColumn$"}
}
"""
@spec column_qualifier_regex(binary()) :: RowFilter.t()
def column_qualifier_regex(regex) do
{:column_qualifier_regex_filter, regex}
|> build_filter()
end
@doc """
Adds a column range `Google.Bigtable.V2.RowFilter` a `Google.Bigtable.V2.ReadRowsRequest`.
Column range should be provided in the format {start, end} or {start, end, inclusive}.
Defaults to inclusive start and end column qualifiers.
## Examples
iex> range = {"column2", "column4"}
iex> request = Bigtable.ReadRows.build() |> Bigtable.RowFilter.column_range("family", range)
iex> with %Google.Bigtable.V2.ReadRowsRequest{} <- request, do: request.filter
%Google.Bigtable.V2.RowFilter{
filter: {
:column_range_filter,
%Google.Bigtable.V2.ColumnRange{
end_qualifier: {:end_qualifier_closed, "column4"},
family_name: "family",
start_qualifier: {:start_qualifier_closed, "column2"}
}
}
}
"""
@spec column_range(
Google.Bigtable.V2.ReadRowsRequest.t(),
binary(),
{binary(), binary()} | {binary(), binary(), boolean()}
) :: ReadRowsRequest.t()
def column_range(
%ReadRowsRequest{} = request,
family_name,
range
) do
filter = column_range(family_name, range)
filter
|> apply_filter(request)
end
@doc """
Creates a column range `Google.Bigtable.V2.RowFilter`.
Column range should be provided in the format {start, end} or {start, end, inclusive}.
Defaults to inclusive start and end column qualifiers.
## Examples
iex> range = {"column2", "column4"}
iex> Bigtable.RowFilter.column_range("family", range)
%Google.Bigtable.V2.RowFilter{
filter: {
:column_range_filter,
%Google.Bigtable.V2.ColumnRange{
end_qualifier: {:end_qualifier_closed, "column4"},
family_name: "family",
start_qualifier: {:start_qualifier_closed, "column2"}
}
}
}
"""
@spec column_range(binary(), {binary(), binary(), boolean()} | {binary(), binary()}) ::
RowFilter.t()
def column_range(family_name, range) do
range = ColumnRange.create_range(family_name, range)
{:column_range_filter, range}
|> build_filter()
end
@doc """
Adds a timestamp range `Google.Bigtable.V2.RowFilter` a `Google.Bigtable.V2.ReadRowsRequest`.
`start_timestamp`: Inclusive lower bound. If left empty, interpreted as 0.
`end_timestamp`: Exclusive upper bound. If left empty, interpreted as infinity.
## Examples
iex> range = [start_timestamp: 1000, end_timestamp: 2000]
iex> request = Bigtable.ReadRows.build() |> Bigtable.RowFilter.timestamp_range(range)
iex> with %Google.Bigtable.V2.ReadRowsRequest{} <- request, do: request.filter
%Google.Bigtable.V2.RowFilter{
filter: {
:timestamp_range_filter,
%Google.Bigtable.V2.TimestampRange{
end_timestamp_micros: 2000,
start_timestamp_micros: 1000
}
}
}
"""
@spec timestamp_range(ReadRowsRequest.t(), Keyword.t()) :: ReadRowsRequest.t()
def timestamp_range(%ReadRowsRequest{} = request, timestamps) do
filter = timestamp_range(timestamps)
filter
|> apply_filter(request)
end
@doc """
Creates a timestamp range `Google.Bigtable.V2.RowFilter`.
`start_timestamp`: Inclusive lower bound. If left empty, interpreted as 0.
`end_timestamp`: Exclusive upper bound. If left empty, interpreted as infinity.
## Examples
iex> range = [start_timestamp: 1000, end_timestamp: 2000]
iex> Bigtable.RowFilter.timestamp_range(range)
%Google.Bigtable.V2.RowFilter{
filter: {
:timestamp_range_filter,
%Google.Bigtable.V2.TimestampRange{
end_timestamp_micros: 2000,
start_timestamp_micros: 1000
}
}
}
"""
@spec timestamp_range(Keyword.t()) :: RowFilter.t()
def timestamp_range(timestamps) do
range =
TimestampRange.new(
start_timestamp_micros: Keyword.get(timestamps, :start_timestamp, 0),
end_timestamp_micros: Keyword.get(timestamps, :end_timestamp, 0)
)
{:timestamp_range_filter, range}
|> build_filter()
end
@doc """
Adds a pass all `Google.Bigtable.V2.RowFilter` a `Google.Bigtable.V2.ReadRowsRequest`.
## Examples
iex> request = Bigtable.ReadRows.build() |> Bigtable.RowFilter.pass_all()
iex> with %Google.Bigtable.V2.ReadRowsRequest{} <- request, do: request.filter
%Google.Bigtable.V2.RowFilter{
filter: {:pass_all_filter, true}
}
"""
@spec pass_all(ReadRowsRequest.t()) :: ReadRowsRequest.t()
def pass_all(%ReadRowsRequest{} = request) do
filter = pass_all()
filter
|> apply_filter(request)
end
@doc """
Creates a pass all `Google.Bigtable.V2.RowFilter`.
Matches all cells, regardless of input. Functionally equivalent to leaving filter unset, but included for completeness.
## Examples
iex> Bigtable.RowFilter.pass_all()
%Google.Bigtable.V2.RowFilter{
filter: {:pass_all_filter, true}
}
"""
@spec pass_all() :: RowFilter.t()
def pass_all() do
{:pass_all_filter, true}
|> build_filter()
end
@doc """
Adds a block all `Google.Bigtable.V2.RowFilter` a `Google.Bigtable.V2.ReadRowsRequest`.
## Examples
iex> request = Bigtable.ReadRows.build() |> Bigtable.RowFilter.block_all()
iex> with %Google.Bigtable.V2.ReadRowsRequest{} <- request, do: request.filter
%Google.Bigtable.V2.RowFilter{
filter: {:block_all_filter, true}
}
"""
@spec block_all(ReadRowsRequest.t()) :: ReadRowsRequest.t()
def block_all(%ReadRowsRequest{} = request) do
filter = block_all()
filter
|> apply_filter(request)
end
@doc """
Creates a block all `Google.Bigtable.V2.RowFilter`.
Does not match any cells, regardless of input. Useful for temporarily disabling just part of a filter.
## Examples
iex> Bigtable.RowFilter.block_all()
%Google.Bigtable.V2.RowFilter{
filter: {:block_all_filter, true}
}
"""
@spec block_all() :: RowFilter.t()
def block_all() do
{:block_all_filter, true}
|> build_filter()
end
@doc """
Adds a strip value transformer Google.Bigtable.V2.RowFilter` a `Google.Bigtable.V2.ReadRowsRequest`.
## Examples
iex> request = Bigtable.ReadRows.build() |> Bigtable.RowFilter.strip_value_transformer()
iex> with %Google.Bigtable.V2.ReadRowsRequest{} <- request, do: request.filter
%Google.Bigtable.V2.RowFilter{
filter: {:strip_value_transformer, true}
}
"""
@spec strip_value_transformer(ReadRowsRequest.t()) :: ReadRowsRequest.t()
def strip_value_transformer(%ReadRowsRequest{} = request) do
filter = strip_value_transformer()
filter
|> apply_filter(request)
end
@doc """
Creates a strip value transformer `Google.Bigtable.V2.RowFilter`.
## Examples
iex> Bigtable.RowFilter.strip_value_transformer()
%Google.Bigtable.V2.RowFilter{
filter: {:strip_value_transformer, true}
}
"""
@spec strip_value_transformer() :: RowFilter.t()
def strip_value_transformer() do
{:strip_value_transformer, true}
|> build_filter()
end
@doc """
Adds an apply label transformer Google.Bigtable.V2.RowFilter` a `Google.Bigtable.V2.ReadRowsRequest`.
## Examples
iex> request = Bigtable.ReadRows.build() |> Bigtable.RowFilter.apply_label_transformer("label")
iex> with %Google.Bigtable.V2.ReadRowsRequest{} <- request, do: request.filter
%Google.Bigtable.V2.RowFilter{
filter: {:apply_label_transformer, "label"}
}
"""
@spec apply_label_transformer(ReadRowsRequest.t(), binary()) :: ReadRowsRequest.t()
def apply_label_transformer(%ReadRowsRequest{} = request, label) do
filter = apply_label_transformer(label)
filter
|> apply_filter(request)
end
@doc """
Creates an apply label transformer `Google.Bigtable.V2.RowFilter`.
## Examples
iex> Bigtable.RowFilter.apply_label_transformer("label")
%Google.Bigtable.V2.RowFilter{
filter: {:apply_label_transformer, "label"}
}
"""
@spec apply_label_transformer(binary()) :: RowFilter.t()
def apply_label_transformer(label) do
{:apply_label_transformer, label}
|> build_filter()
end
# Creates a Bigtable.V2.RowFilter given a type and value
@doc false
@spec build_filter({atom(), any()}) :: RowFilter.t()
defp build_filter({type, value}) when is_atom(type) do
RowFilter.new(filter: {type, value})
end
@spec apply_filter(RowFilter.t(), ReadRowsRequest.t()) :: ReadRowsRequest.t()
defp apply_filter(%RowFilter{} = filter, %ReadRowsRequest{} = request) do
%{request | filter: filter}
end
end
|
lib/row_filter/row_filter.ex
| 0.917349
| 0.700608
|
row_filter.ex
|
starcoder
|
defmodule Aoc2019Day16 do
# https://adventofcode.com/2019/day/16
def as_list(text) do
text
|> String.trim()
|> String.to_charlist()
|> Enum.map(fn d -> d - ?0 end)
|> Enum.with_index(1)
end
@doc """
This originally used for part 1 but now keep for reference only after refactor
"""
def raw_pattern(base_pattern, nth) do
Stream.flat_map(base_pattern, fn x -> Stream.cycle([x]) |> Enum.take(nth) end)
|> Stream.cycle()
end
def doone(numbers) do
numbers_with_index = Enum.with_index(numbers)
Enum.map(numbers_with_index, fn {{_, nth}, local_index} ->
calculate(numbers, nth, local_index)
end)
end
def ones_digit(number) do
rem(abs(number), 10)
end
def apply_phases(text, 0) do
text
|> Enum.map(fn {n, _} -> n end)
|> Enum.map(&Integer.to_string/1)
|> Enum.join()
end
def apply_phases(text, phases) do
output = doone(text)
apply_phases(output, phases - 1)
end
@doc """
Instead of calculate 0 1 0 -1 stream and zip with numbers,
this calculates result base on pattern of 0 1 0 -1
"""
def calculate(numbers, nth, drop \\ 0) do
dropped = numbers |> Enum.drop(drop)
r =
dropped
|> Enum.reduce(0, fn x, acc ->
{n, idx} = x
idx = idx + 1
remainder = rem(idx, nth * 4)
v =
case trunc(:math.ceil(remainder / nth)) do
4 -> -n
1 -> 0
2 -> n
3 -> 0
0 -> -n
end
acc + v
end)
# |> IO.inspect #|> ones_digit
|> ones_digit
{r, nth}
end
@doc """
Part2 caculate in a totally different way. The input is huge, using
method in part1 would never work (too slow).
This method based on lot of observations:
- row nth, there would be n 1 numbers in pattern
- row nth, result would depends on col nth forward, because before it is all multiply with 0
- base on part 2 examples and input, the offset is big enough to guarantee
all numbers would still part of multiply by 1. E.g offset 5_000_000 on 500_000
remaining number (ie total lengh 5_500_000)
- as we care only about 1 last digit, there is no need to calculate huge sum,
just need to ensure the part ensure last digit -> add 10 to it to avoid
negative
"""
def generate_new_number(numbers) do
sum_base = ones_digit(Enum.sum(numbers))
numbers
|> Enum.reduce({[], sum_base, 0}, fn n, {ls, cumsum, last} ->
{[ones_digit(10 + cumsum - last) | ls], 10 + cumsum - last, n}
end)
|> elem(0)
|> Enum.reverse()
end
def loop(text, 0) do
text
|> Enum.map(&Integer.to_string/1)
|> Enum.join()
end
def loop(text, phases) do
output = generate_new_number(text)
loop(output, phases - 1)
end
def solve1(text, phases \\ 100) do
input = as_list(text)
apply_phases(input, phases) |> String.slice(0, 8)
end
def solve2(input, phases \\ 100) do
offset = String.to_integer(String.slice(input, 0, 7))
inp = input |> String.trim()
inp =
inp
|> String.to_charlist()
|> Enum.map(fn d -> d - ?0 end)
|> Stream.cycle()
|> Enum.take(10000 * String.length(inp))
|> Enum.drop(offset)
Aoc2019Day16.loop(inp, phases) |> String.slice(0, 8)
end
end
|
lib/aoc2019_day16.ex
| 0.782663
| 0.429968
|
aoc2019_day16.ex
|
starcoder
|
defmodule JPMarc.DataField do
@moduledoc """
Tools for working with JPMARC DataFields
"""
alias JPMarc.SubField
@fs "\x1e" # Field separator
@typedoc """
Type that represents `JPMarc.DataField` struct.
This is constructed with `:tag` as String, `:ind1` as String, `:ind2` as String and `:subfields` as List of `JPMarc.SubField.t`
"""
@type t :: %__MODULE__{tag: String.t, ind1: String.t, ind2: String.t, subfields: [SubField.t]}
@derive [Poison.Encoder]
defstruct tag: "", ind1: " ", ind2: " ", subfields: []
@doc """
Returns a list of SubFields with `code` in `field`, [] when it doesn't exist
`code` is either of :all, code as String or List of code.
Default is `:all`.
"""
@spec subfields(t, (atom|String.t|[String.t]))::[SubField.t]
def subfields(field, code \\ :all) do
cond do
code == :all ->
field.subfields
is_list(code) ->
field.subfields |> Enum.filter(&Enum.member?(code, &1.code))
is_binary(code) ->
field.subfields |> Enum.filter(&(&1.code == code))
true -> []
end
end
@doc """
Returns a Subfield value with `code` in `field`, `""` when it doesn't exist
`code` is either of :all, code as String or List of code.
Default is `:all`.
"""
@spec subfield_value(t, (atom|String.t|[String.t]), String.t)::String.t
def subfield_value(field, code \\ :all, joiner \\ " ") do
subfields(field, code) |> Enum.map(&("#{&1.value}")) |> Enum.join(joiner)
end
@doc """
Return the MARC Format of the data field
"""
@spec to_marc(t)::String.t
def to_marc(field) do
subfields = field.subfields |> Enum.map(&SubField.to_marc/1) |> Enum.join
field.ind1 <> field.ind2 <> subfields <> @fs
end
@doc """
Return a tuple representing its xml element
"""
@spec to_xml(t)::tuple
def to_xml(df) do
subfields = df.subfields |> Enum.map(&SubField.to_xml/1)
{:datafield, %{tag: df.tag}, subfields}
end
@doc """
Return a text representing of the field
"""
@spec to_text(t)::String.t
def to_text(df) do
subfields = df.subfields |> Enum.map(&SubField.to_text/1)
"#{df.tag} #{df.ind1}#{df.ind2} #{Enum.join(subfields, " ")}"
end
defimpl Poison.Encoder, for: JPMarc.DataField do
def encode(df, _options) do
subfields = df.subfields |> Enum.map(&Poison.encode!/1) |> Enum.join(",")
"{\"#{df.tag}\":{\"ind1\":\"#{df.ind1}\",\"ind2\":\"#{df.ind2}\",\"subfields\":[#{subfields}]}}"
end
end
defimpl Inspect do
def inspect(%JPMarc.DataField{tag: tag, ind1: ind1, ind2: ind2, subfields: subfields}, _opts) do
"#{tag} #{ind1}#{ind2} #{Enum.join(subfields, " ")}"
end
end
defimpl String.Chars, for: JPMarc.DataField do
def to_string(%JPMarc.DataField{tag: tag, ind1: ind1, ind2: ind2, subfields: subfields}) do
"#{tag} #{ind1}#{ind2} #{Enum.join(subfields, " ")}"
end
end
end
|
lib/jpmarc/data_field.ex
| 0.844265
| 0.563048
|
data_field.ex
|
starcoder
|
defmodule GitDiff do
@moduledoc """
A simple implementation for taking the output from 'git diff' and transforming it into Elixir structs.
## Installation
The package can be installed by adding `git_diff` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:git_diff, "~> 0.5.0"}
]
end
```
## Example
Output:
```
[
%GitDiff.Patch{
chunks: [
%GitDiff.Chunk{
from_num_lines: "42",
from_start_line: "42",
header: "@@ -481,23 +483,24 @@ class Cursor extends Model {"
context: "class Cursor extends Model {", # will be "" if there is no context
lines: [
%GitDiff.Line{
from_line_number: 481,
text: " {",
to_line_number: 483,
type: :context # will be one of :context, :add, :remove
},
...
],
to_num_lines: "42",
to_start_line: "42"
}
],
from: "src/cursor.js",
headers: %{"index" => {"10bdef8", "181eeb9", "100644"}},
to: "src/cursor.js"},
]
```
The above output is heavily truncated for illustration, but it should give enough of an idea of what to expect. The
code, while naive, is less than 100 lines of actual code and all takes place in the GitDiff module. Emulate the tests
in a an interactive shell for quick viewing of the output.
## Benchmarks
Haven't done much benchmarking, but up to around a 5k (I just stopped trying there) line diff the performance was
linear and took a whopping 35ms per call on the test VM. For a more reasonably sized ~150 line diff it clocked in at
around 340 microseconds.
"""
alias GitDiff.Patch
alias GitDiff.Chunk
alias GitDiff.Line
@doc """
Parse the output from a 'git diff' command.
Returns `{:ok, [%GitDiff.Patch{}]}` for success, `{:error, :unrecognized_format}` otherwise. See `GitDiff.Patch`.
"""
@spec parse_patch(String.t) :: {:ok, [%GitDiff.Patch{}]} | {:error, :unrecognized_format}
def parse_patch(git_diff) do
try do
parsed_diff =
git_diff
|> String.splitter("\n")
|> split_diffs()
|> process_diffs()
|> Enum.to_list()
if Enum.all?(parsed_diff, fn(%Patch{} = _patch) -> true; (_) -> false end) do
{:ok, parsed_diff}
else
{:error, :unrecognized_format}
end
rescue
_ -> {:error, :unrecognized_format}
end
end
defp process_diffs(diffs) do
Stream.map(diffs, fn(diff) ->
[headers | chunks] = split_diff(diff) |> Enum.to_list()
patch = process_diff_headers(headers)
chunks =
Enum.map(chunks, fn(lines) ->
process_chunk(%{from_line_number: nil, to_line_number: nil}, %Chunk{}, lines)
end)
%{patch | chunks: chunks}
end)
end
defp process_chunk(_, chunk, []) do
%{chunk | lines: Enum.reverse(chunk.lines)}
end
defp process_chunk(context, chunk, ["" |lines]), do: process_chunk(context, chunk, lines)
defp process_chunk(context, chunk, [line |lines]) do
{context, chunk} =
case line do
"@@" <> text ->
results = Regex.named_captures(~r/ -(?<from_start_line>[0-9]+)(,(?<from_num_lines>[0-9]+))? \+(?<to_start_line>[0-9]+)(,(?<to_num_lines>[0-9]+))? @@( (?<context>.+))?/, text)
{
%{context | from_line_number: String.to_integer(results["from_start_line"]), to_line_number: String.to_integer(results["to_start_line"])},
%{chunk | from_num_lines: results["from_num_lines"],
from_start_line: results["from_start_line"],
to_num_lines: results["to_num_lines"],
to_start_line: results["to_start_line"],
context: results["context"],
header: "@@" <> text
}}
" " <> _ = text ->
line =
%Line{
text: text,
type: :context,
to_line_number: context.to_line_number,
from_line_number: context.from_line_number
}
{
%{context | to_line_number: context.to_line_number + 1, from_line_number: context.from_line_number + 1},
%{chunk | lines: [line | chunk.lines]}
}
"+" <> _ = text ->
line =
%Line{
text: text,
type: :add,
to_line_number: context.to_line_number
}
{
%{context | to_line_number: context.to_line_number + 1},
%{chunk | lines: [line | chunk.lines]}
}
"-" <> _ = text ->
line =
%Line{
text: text,
type: :remove,
from_line_number: context.from_line_number
}
{
%{context | from_line_number: context.from_line_number + 1},
%{chunk | lines: [line | chunk.lines]}
}
"\\" <> _ = text ->
line =
%Line{
text: text,
type: :context,
}
{
context,
%{chunk | lines: [line | chunk.lines]}
}
end
process_chunk(context, chunk, lines)
end
defp process_diff_headers([header | headers]) do
[_ | [diff_type | _]] = String.split(header, " ")
if diff_type !== "--git" do
raise "Invalid diff type"
else
process_diff_headers(%Patch{}, headers)
end
end
defp process_diff_headers(patch, []), do: patch
defp process_diff_headers(patch, [header | headers]) do
patch =
case header do
"old mode " <> mode -> %{patch | headers: Map.put(patch.headers, "old mode", mode)}
"new mode " <> mode -> %{patch | headers: Map.put(patch.headers, "new mode", mode)}
"deleted file mode " <> mode -> %{patch | headers: Map.put(patch.headers, "deleted file mode", mode)}
"new file mode " <> mode -> %{patch | headers: Map.put(patch.headers, "new file mode", mode)}
"copy from mode " <> mode -> %{patch | headers: Map.put(patch.headers, "copy from mode", mode)}
"copy to mode " <> mode -> %{patch | headers: Map.put(patch.headers, "copy to mode", mode)}
"rename from " <> filepath -> %{patch | headers: Map.put(patch.headers, "rename from", filepath)}
"rename from mode " <> mode -> %{patch | headers: Map.put(patch.headers, "rename from mode", mode)}
"rename to " <> filepath -> %{patch | headers: Map.put(patch.headers, "rename to", filepath)}
"rename to mode " <> mode -> %{patch | headers: Map.put(patch.headers, "rename to mode", mode)}
"similarity index " <> number -> %{patch | headers: Map.put(patch.headers, "similarity index", number)}
"dissimilarity index " <> number -> %{patch | headers: Map.put(patch.headers, "dissimilarity index", number)}
"index " <> rest ->
results = Regex.named_captures(~r/(?<first_hash>.+?)\.\.(?<second_hash>.+?) (?<mode>.+)/, rest)
%{patch | headers: Map.put(patch.headers, "index", {results["first_hash"], results["second_hash"], results["mode"]})}
"--- a/" <> from -> %{patch | from: from}
"--- /dev/null" -> %{patch | from: nil}
"+++ b/" <> to -> %{patch | to: to}
"+++ /dev/null" -> %{patch | to: nil}
end
process_diff_headers(patch, headers)
end
defp split_diff(diff) do
chunk_fun =
fn line, lines ->
if String.starts_with?(line, "@@") do
{:cont, Enum.reverse(lines), [line]}
else
{:cont, [line | lines]}
end
end
after_fun =
fn
[] -> {:cont, []}
lines -> {:cont, Enum.reverse(lines), []}
end
Stream.chunk_while(diff, [], chunk_fun, after_fun)
end
defp split_diffs(split_diff) do
chunk_fun =
fn line, lines ->
if String.starts_with?(line, "diff") and lines != [] do
{:cont, Enum.reverse(lines), [line]}
else
{:cont, [line | lines]}
end
end
after_fun =
fn
[] -> {:cont, []}
lines -> {:cont, Enum.reverse(lines), []}
end
Stream.chunk_while(split_diff, [], chunk_fun, after_fun)
end
end
|
lib/git_diff.ex
| 0.836388
| 0.769015
|
git_diff.ex
|
starcoder
|
defmodule Parse.VehiclePositionsJson do
@moduledoc """
Parses an enhanced Vehicle Position JSON file into a list of `%Model.Vehicle{}` structs.
"""
alias Model.Vehicle
def parse(body) do
body
|> Jason.decode!(strings: :copy)
|> Map.get("entity")
|> Enum.flat_map(&parse_entity/1)
end
def parse_entity(
%{
"vehicle" => %{
"position" => position,
"trip" => trip,
"vehicle" => vehicle
}
} = entity
) do
data = Map.get(entity, "vehicle")
[
%Vehicle{
id: Map.get(vehicle, "id"),
trip_id: Map.get(trip, "trip_id"),
route_id: Map.get(trip, "route_id"),
direction_id: Map.get(trip, "direction_id"),
stop_id: Map.get(data, "stop_id"),
label: Map.get(vehicle, "label"),
latitude: Map.get(position, "latitude"),
longitude: Map.get(position, "longitude"),
bearing: Map.get(position, "bearing"),
speed: Map.get(position, "speed"),
current_status: parse_status(Map.get(data, "current_status")),
current_stop_sequence: Map.get(data, "current_stop_sequence"),
updated_at: unix_to_local(Map.get(data, "timestamp")),
consist: parse_consist(Map.get(vehicle, "consist")),
occupancy_status: parse_occupancy_status(Map.get(data, "occupancy_status"))
}
]
end
def parse_entity(%{}) do
[]
end
defp parse_consist([_ | _] = consist) do
Enum.map(consist, fn %{"label" => car_label} -> car_label end)
end
defp parse_consist([]), do: nil
defp parse_consist(nil), do: nil
defp parse_status(nil) do
:in_transit_to
end
defp parse_status("IN_TRANSIT_TO") do
:in_transit_to
end
defp parse_status("INCOMING_AT") do
:incoming_at
end
defp parse_status("STOPPED_AT") do
:stopped_at
end
defp parse_occupancy_status(nil), do: nil
defp parse_occupancy_status("EMPTY"), do: :empty
defp parse_occupancy_status("MANY_SEATS_AVAILABLE"), do: :many_seats_available
defp parse_occupancy_status("FEW_SEATS_AVAILABLE"), do: :few_seats_available
defp parse_occupancy_status("STANDING_ROOM_ONLY"), do: :standing_room_only
defp parse_occupancy_status("CRUSHED_STANDING_ROOM_ONLY"), do: :crushed_standing_room_only
defp parse_occupancy_status("FULL"), do: :full
defp parse_occupancy_status("NOT_ACCEPTING_PASSENGERS"), do: :not_accepting_passengers
defp unix_to_local(timestamp) when is_integer(timestamp) do
Parse.Timezone.unix_to_local(timestamp)
end
defp unix_to_local(nil) do
DateTime.utc_now()
end
end
|
apps/parse/lib/parse/vehicle_positions_json.ex
| 0.767559
| 0.459622
|
vehicle_positions_json.ex
|
starcoder
|
defprotocol Access do
@moduledoc """
Dictionary-like access to data structures via the `foo[bar]` syntax.
This module also empowers `Kernel`s nested update functions
`Kernel.get_in/2`, `Kernel.put_in/3`, `Kernel.update_in/3` and
`Kernel.get_and_update_in/3`.
## Deprecated
Currently, the Access protocol is deprecated as there are performance
concerns in the current implementation. Since Elixir v1.1, instead of
using a protocol, `foo[bar]` will dispatch directly to the `Dict`
module. Therefore, while `foo[bar]` will continue to work, extension
of the syntax should be done via a custom `Dict` implementation.
## Examples
Out of the box, Access works all built-in dictionaries: `Keyword`,
`Map` and `HashDict`:
iex> keywords = [a: 1, b: 2]
iex> keywords[:a]
1
iex> map = %{a: 1, b: 2}
iex> map[:a]
1
iex> star_ratings = %{1.0 => "★", 1.5 => "★☆", 2.0 => "★★"}
iex> star_ratings[1.5]
"★☆"
Furthermore, Access transparently ignores `nil` values:
iex> keywords = [a: 1, b: 2]
iex> keywords[:c][:unknown]
nil
The key comparison must be implemented using the `===` operator.
"""
@doc """
Gets the container's value for the given key.
"""
@spec get(t, term) :: t
def get(container, key)
@doc """
Gets and updates the container's value for the given key, in a single pass.
The argument function `fun` must receive the value for the given `key` (or
`nil` if the key doesn't exist in `container`). It must return a tuple
containing the `get` value and the new value to be stored in the `container`.
This function returns a two-element tuple.
The first element is the `get` value, as returned by `fun`.
The second element is the container, updated with the value returned by `fun`.
"""
@spec get_and_update(t, term, (term -> {get, term})) :: {get, t} when get: var
def get_and_update(container, key, fun)
end
defimpl Access, for: List do
def get(dict, key) when is_atom(key) do
case :lists.keyfind(key, 1, dict) do
{^key, value} -> value
false -> nil
end
end
def get(_dict, key) do
raise ArgumentError,
"the access protocol for lists expect the key to be an atom, got: #{inspect key}"
end
def get_and_update(dict, key, fun) when is_atom(key) do
get_and_update(dict, [], key, fun)
end
defp get_and_update([{key, value}|t], acc, key, fun) do
{get, update} = fun.(value)
{get, :lists.reverse(acc, [{key, update}|t])}
end
defp get_and_update([h|t], acc, key, fun) do
get_and_update(t, [h|acc], key, fun)
end
defp get_and_update([], acc, key, fun) do
{get, update} = fun.(nil)
{get, [{key, update}|:lists.reverse(acc)]}
end
end
defimpl Access, for: Map do
def get(map, key) do
case :maps.find(key, map) do
{:ok, value} -> value
:error -> nil
end
end
def get_and_update(map, key, fun) do
value =
case :maps.find(key, map) do
{:ok, value} -> value
:error -> nil
end
{get, update} = fun.(value)
{get, :maps.put(key, update, map)}
end
def get!(%{} = map, key) do
case :maps.find(key, map) do
{:ok, value} -> value
:error -> raise KeyError, key: key, term: map
end
end
def get!(other, key) do
raise ArgumentError,
"could not get key #{inspect key}. Expected map/struct, got: #{inspect other}"
end
def get_and_update!(%{} = map, key, fun) do
case :maps.find(key, map) do
{:ok, value} ->
{get, update} = fun.(value)
{get, :maps.put(key, update, map)}
:error ->
raise KeyError, key: key, term: map
end
end
def get_and_update!(other, key, _fun) do
raise ArgumentError,
"could not put/update key #{inspect key}. Expected map/struct, got: #{inspect other}"
end
end
defimpl Access, for: Atom do
def get(nil, _) do
nil
end
def get(atom, _) do
undefined(atom)
end
def get_and_update(nil, key, _fun) do
raise ArgumentError,
"could not put/update key #{inspect key} on a nil value"
end
def get_and_update(atom, _key, _fun) do
undefined(atom)
end
defp undefined(atom) do
raise Protocol.UndefinedError,
protocol: @protocol,
value: atom,
description: "only the nil atom is supported"
end
end
|
lib/elixir/lib/access.ex
| 0.919032
| 0.715747
|
access.ex
|
starcoder
|
defmodule Exonerate.Pointer do
@moduledoc false
# JSONPointer implementation. Internally, it's managed as a
# list of strings, with the head of the list being the outermost
# leaf in the JSON structure, and the end of the list being the
# root.
@type t :: [String.t]
alias Exonerate.Type
@spec to_fun(path :: t, keyword) :: atom
@doc """
creates a function call for a specific JSONPointer.
options:
- context: prepends a context to the uri
```elixir
iex> alias Exonerate.Pointer
iex> Pointer.to_fun(["foo", "bar"])
:"/bar/foo"
iex> Pointer.to_fun(["foo~bar", "baz"])
:"/baz/foo~0bar"
iex> Pointer.to_fun(["€", "currency"])
:"/currency/%E2%82%AC"
iex> Pointer.to_fun([], authority: "foo")
:"foo#/"
```
"""
def to_fun(path, opts \\ []) do
path
|> to_uri(opts)
|> String.to_atom
end
@spec from_uri(String.t) :: t
@doc """
converts a uri to a JSONPointer
```elixir
iex> alias Exonerate.Pointer
iex> Pointer.from_uri("/") # the root-only case
[]
iex> Pointer.from_uri("/bar/foo")
["foo", "bar"]
iex> Pointer.from_uri("/baz/foo~0bar")
["foo~bar", "baz"]
iex> Pointer.from_uri("/currency/%E2%82%AC")
["€", "currency"]
```
"""
def from_uri("/" <> rest) do
rest
|> URI.decode()
|> String.split("/")
|> Enum.map(&deescape/1)
|> Enum.reverse
|> case do
[""] -> []
pointer -> pointer
end
end
@spec to_uri(t, keyword) :: String.t
@doc """
creates a JSONPointer to its URI equivalent.
options
- context: prepends a context to the uri.
```elixir
iex> alias Exonerate.Pointer
iex> Pointer.to_uri(["foo", "bar"])
"/bar/foo"
iex> Pointer.to_uri(["foo~bar", "baz"])
"/baz/foo~0bar"
iex> Pointer.to_uri(["€", "currency"])
"/currency/%E2%82%AC"
iex> Pointer.to_uri([], authority: "foo")
"foo#/"
```
"""
def to_uri(path, opts \\ []) do
str = path
|> Enum.reverse
|> Enum.map(&escape/1)
|> Enum.map(&URI.encode/1)
|> Enum.join("/")
lead = List.wrap(if opts[:authority] do
[opts[:authority], "#"]
end)
IO.iodata_to_binary([lead, "/", str])
end
@spec eval(pointer :: t, data :: Type.json) :: Type.json
@doc """
evaluates a JSONPointer given some json data
```elixir
iex> alias Exonerate.Pointer
iex> Pointer.eval([], true)
true
iex> Pointer.eval(["foo~bar"], %{"foo~bar" => "baz"})
"baz"
iex> Pointer.eval(["1", "€"], %{"€" => ["quux", "ren"]})
"ren"
```
"""
def eval([], data), do: data
def eval([_ | _], data) when not (is_list(data) or is_map(data)) do
raise ArgumentError, message: "#{Type.name data} can not take a path"
end
def eval(pointer, data) do
pointer
|> Enum.reverse
|> eval_rev(data)
end
@spec eval_rev([String.t], Type.json) :: Type.json
defp eval_rev([], data), do: data
defp eval_rev([index | rest], data) when is_list(data) do
eval_rev(rest, Enum.at(data, String.to_integer(index)))
end
defp eval_rev([key | rest], data) when is_map(data) do
eval_rev(rest, Map.fetch!(data, key))
end
@spec deescape(String.t) :: String.t
defp deescape(string) do
string
|> String.replace("~1", "/")
|> String.replace("~0", "~")
end
@spec escape(String.t) :: String.t
defp escape(string) do
string
|> String.replace("~", "~0")
|> String.replace("/", "~1")
end
end
|
lib/exonerate/pointer.ex
| 0.767429
| 0.802362
|
pointer.ex
|
starcoder
|
defmodule K8s.Conn.Auth.Exec do
@moduledoc """
Cluster authentication for kube configs using an `exec` section.
Useful for Kubernetes clusters running on AWS which use IAM authentication (eg. the `aws-iam-authenticator` binary).
An applicable kube config may look something like this:
```
# ...
users:
- name: staging-user
user:
exec:
# API version to use when decoding the ExecCredentials resource. Required.
apiVersion: client.authentication.k8s.io/v1alpha1
# Command to execute. Required.
command: aws-iam-authenticator
# Arguments to pass when executing the plugin. Optional.
args:
- token
- -i
- staging
# Environment variables to set when executing the plugin. Optional.
env:
- name: "FOO"
value: "bar"
```
"""
@behaviour K8s.Conn.Auth
alias __MODULE__
alias K8s.Conn.Error
defstruct [:command, :env, :args]
@type t :: %__MODULE__{
command: String.t(),
env: %{name: String.t(), value: String.t()},
args: list(String.t())
}
@impl true
@spec create(map() | any, String.t() | any) :: {:ok, t} | {:error, Error.t()} | :skip
def create(%{"exec" => %{"command" => command} = config}, _) do
# Optional:
args = Map.get(config, "args", [])
env = Map.get(config, "env", [])
{:ok,
%__MODULE__{
command: command,
env: format_env(env),
args: args
}}
end
def create(_, _), do: :skip
@spec format_env(nil | map) :: {binary, any}
defp format_env(nil), do: %{}
defp format_env(env) when is_list(env), do: Enum.into(env, %{}, &format_env/1)
defp format_env(%{"name" => key, "value" => value}), do: {key, value}
defimpl K8s.Conn.RequestOptions, for: __MODULE__ do
@doc "Generates HTTP Authorization options for auth-provider authentication"
@spec generate(Exec.t()) :: K8s.Conn.RequestOptions.generate_t()
def generate(%Exec{} = provider) do
with {:ok, token} <- Exec.generate_token(provider) do
{
:ok,
%K8s.Conn.RequestOptions{
headers: [{"Authorization", "Bearer #{token}"}],
ssl_options: []
}
}
end
end
end
@doc """
"Generate" a token using the `exec` config in kube config.
"""
@spec generate_token(t) ::
{:ok, binary} | {:error, Jason.DecodeError.t() | Error.t()}
def generate_token(config) do
with {cmd_response, 0} <- System.cmd(config.command, config.args, env: config.env),
{:ok, data} <- Jason.decode(cmd_response),
{:ok, token} when not is_nil(token) <- parse_cmd_response(data) do
{:ok, token}
else
{cmd_response, err_code} when is_binary(cmd_response) and is_integer(err_code) ->
msg = "#{__MODULE__} failed: #{cmd_response}"
{:error, %Error{message: msg}}
error ->
error
end
end
@spec parse_cmd_response(map) :: {:ok, binary} | {:error, Error.t()}
defp parse_cmd_response(%{"kind" => "ExecCredential", "status" => %{"token" => token}}),
do: {:ok, token}
defp parse_cmd_response(_) do
msg = "#{__MODULE__} failed: Unsupported ExecCredential"
{:error, %Error{message: msg}}
end
end
|
lib/k8s/conn/auth/exec.ex
| 0.913472
| 0.700242
|
exec.ex
|
starcoder
|
require IEx
defmodule Chopsticks.Engine do
@moduledoc """
Functions for playing the numbers hand game.
Rules:
Each player starts with 1 of 5 possible fingers up on each hand.
On each turn, one player gives the number of up fingers on one hand to one of the other player's hands.
If a hand has exactly 5 fingers up, all are knocked down.
If a hand plus the given fingers is more than 5, the new number is old + added mod 5
If a player has an even number of fingers on one hand, and no fingers on the other hand,
they may use their turn to split their fingers evenly.
The goal is to knock both the other player's hands to 0.
"""
@player %{left: 1, right: 1}
@players %{1 => @player, 2 => @player}
@doc """
Take a single turn, returning the new game state.
"""
def turn(%{players: players, turns_left: turns_left, next_player: player_number, dumb: dumb} = game_state, move) do
case update_players(player_number, players, move) do
{:ok, players} -> update_ok_game_state(game_state, players)
{:quit, players, winner} -> update_quit_game_state(game_state, players, winner)
{:error, players, code} -> update_error_game_state(game_state, players, code)
end
end
def starting_state(turns, dumb \\ false) do
%{turns_left: turns, next_player: 1, players: @players, dumb: dumb}
end
@doc """
Play a game of Chopsticks, passing in the number of turns and function to get the move.
"""
def play(turns, callbacks) do
get_move = callbacks[:get_move]
get_move_2 = callbacks[:get_move_2] || callbacks[:get_move]
display_error = callbacks[:display_error] || fn _ -> nil end
play_turn(
starting_state(turns),
get_move,
get_move_2,
display_error
)
end
# A recursive turn.
defp play_turn(
%{next_player: player_number, players: players} = game_state,
get_move,
get_move_2,
display_error
) do
move = get_move.(player_number, players)
case turn(game_state, move) do
{:ok, game_state} ->
play_turn(game_state, get_move_2, get_move, display_error)
{:error, %{error_code: error_code} = game_state} ->
# For errors, display the error, then re-run the turn.
display_error.(error_code)
play_turn(game_state, get_move, get_move_2, display_error)
{:done, %{winner: winner}} ->
# For a finished game, just return.
winner
end
end
def update_players(player_number, players, {type, move}) do
player = players[player_number]
opponent_number = next_player_number(player_number)
opponent = players[opponent_number]
result =
case type do
:quit ->
# When a player quits, the other player wins.
{:quit}
:touch ->
touch_turn(player, opponent, move)
:split ->
split_turn(player, opponent)
unknown_type ->
IO.puts "unknown_type"
IO.puts unknown_type
{:error, :unknown_move_type}
end
case result do
{:error, code} ->
{:error, players, code}
{:ok, player, opponent} ->
updated_players =
%{}
|> Map.put(player_number, player)
|> Map.put(opponent_number, opponent)
{:ok, updated_players}
{:quit} ->
{:quit, players, opponent_number}
end
end
def touch_turn(player, opponent, {player_direction, opponent_direction}) do
case validate_touch(player, opponent, player_direction, opponent_direction) do
{:error, code} ->
{:error, code}
{:ok} ->
{
:ok,
player,
add_to_hand(opponent, opponent_direction, player[player_direction])
}
end
end
def split_turn(player, opponent) do
case validate_split(player) do
{:error, code} ->
{:error, code}
{:ok} ->
{:ok, split(player), opponent}
end
end
def validate_touch(player, opponent, player_direction, opponent_direction) do
cond do
player[player_direction] === 0 -> {:error, :empty_player_hand}
opponent[opponent_direction] === 0 -> {:error, :empty_opponent_hand}
true -> {:ok}
end
end
def validate_split(player) do
cond do
!(empty_hand?(player.left) || empty_hand?(player.right)) -> {:error, :no_empty_hand}
!(splitable_hand?(player.left) || splitable_hand?(player.right)) -> {:error, :no_even_hand}
true -> {:ok}
end
end
def empty_hand?(0), do: true
def empty_hand?(_), do: false
def splitable_hand?(0), do: false
def splitable_hand?(hand) do
rem(hand, 2) === 0
end
def split(player) do
{_, hand} = Enum.find(player, fn {_, hand} -> splitable_hand?(hand) end)
%{left: split_hand(hand), right: split_hand(hand)}
end
def split_hand(hand), do: round(hand / 2)
def next_player_number(1), do: 2
def next_player_number(2), do: 1
def add_to_hand(player, direction, add_count) do
{_, player} = Map.get_and_update!(player, direction, fn
hand_count ->
{hand_count, add_fingers(hand_count, add_count)}
end)
player
end
def add_fingers(hand_count, add_count) do
new_count = hand_count + add_count
cond do
new_count > 5 -> new_count - 5
new_count === 5 -> 0
true -> new_count
end
end
def check_for_win(players) do
cond do
lost?(players[1]) -> 2
lost?(players[2]) -> 1
true -> 0
end
end
def lost?(player) do
player.left === 0 && player.right === 0
end
defp update_ok_game_state(game_state, updated_players) do
# If there was a move, check if it finished the game.
case check_for_win(updated_players) do
0 -> update_continue_game_state(game_state, updated_players)
winner -> update_win_game_state(game_state, updated_players, winner)
end
end
defp update_continue_game_state(%{turns_left: turns_left, next_player: player_number, dumb: dumb}, players) do
if turns_left === 1 do
{:done, %{players: players,
winner: 0,
turns_left: 0,
dumb: dumb}}
else
{:ok, %{players: players,
turns_left: turns_left - 1,
next_player: next_player_number(player_number),
dumb: dumb}}
end
end
defp update_win_game_state(%{turns_left: turns_left, next_player: player_number, dumb: dumb}, players, winner) do
{:done, %{players: players,
winner: winner,
turns_left: turns_left - 1,
dumb: dumb}}
end
defp update_quit_game_state(%{turns_left: turns_left, next_player: player_number, dumb: dumb}, players, winner) do
{:done, %{players: players,
winner: winner,
turns_left: turns_left,
dumb: dumb}}
end
defp update_error_game_state(%{players: players, turns_left: turns_left, next_player: player_number, dumb: dumb}, players, code) do
{:error, %{players: players,
error_code: code,
next_player: player_number,
turns_left: turns_left,
dumb: dumb}}
end
end
|
lib/chopsticks/engine.ex
| 0.757077
| 0.583233
|
engine.ex
|
starcoder
|
defmodule ElixirALE.SPI do
use GenServer
@moduledoc """
This module enables Elixir programs to interact with hardware that's connected
via a SPI bus.
"""
defmodule State do
@moduledoc false
defstruct port: nil, devname: nil
end
@type spi_option ::
{:mode, 0..3} |
{:bits_per_word, 0..16} | # 0 is interpreted as 8-bits
{:speed_hz, pos_integer} |
{:delay_us, non_neg_integer}
# Public API
@doc """
Return a list of available SPI bus device names. If nothing is returned,
it's possible that the kernel driver for that SPI bus is not enabled or the
kernel's device tree is not configured. On Raspbian, run `raspi-config` and
look in the advanced options.
```
iex> ElixirALE.SPI.device_names
["spidev0.0", "spidev0.1"]
```
"""
@spec device_names() :: [binary]
def device_names() do
Path.wildcard("/dev/spidev*")
|> Enum.map(fn(p) -> String.replace_prefix(p, "/dev/", "") end)
end
@doc """
Start and link a SPI GenServer.
SPI bus options include:
* `mode`: This specifies the clock polarity and phase to use. (0)
* `bits_per_word`: bits per word on the bus (8)
* `speed_hz`: bus speed (1000000)
* `delay_us`: delay between transaction (10)
Parameters:
* `devname` is the Linux device name for the bus (e.g., "spidev0.0")
* `spi_opts` is a keyword list to configure the bus
* `opts` are any options to pass to GenServer.start_link
"""
@spec start_link(binary, [spi_option], [term]) :: {:ok, pid}
def start_link(devname, spi_opts \\ [], opts \\ []) do
GenServer.start_link(__MODULE__, {devname, spi_opts}, opts)
end
@doc """
Stop the GenServer and release the SPI resources.
"""
@spec release(pid) :: :ok
def release(pid) do
GenServer.cast pid, :release
end
@doc """
Perform a SPI transfer. The `data` should be a binary containing the bytes to
send. Since SPI transfers simultaneously send and receive, the return value
will be a binary of the same length.
"""
@spec transfer(pid, binary) :: binary | {:error, term}
def transfer(pid, data) do
GenServer.call pid, {:transfer, data}
end
# gen_server callbacks
def init({devname, spi_opts}) do
mode = Keyword.get(spi_opts, :mode, 0)
bits_per_word = Keyword.get(spi_opts, :bits_per_word, 8)
speed_hz = Keyword.get(spi_opts, :speed_hz, 1_000_000)
delay_us = Keyword.get(spi_opts, :delay_us, 10)
executable = :code.priv_dir(:elixir_ale) ++ '/ale'
port = Port.open({:spawn_executable, executable},
[{:args, ["spi",
"/dev/#{devname}",
Integer.to_string(mode),
Integer.to_string(bits_per_word),
Integer.to_string(speed_hz),
Integer.to_string(delay_us)]},
{:packet, 2},
:use_stdio,
:binary,
:exit_status])
state = %State{port: port, devname: devname}
{:ok, state}
end
def handle_call({:transfer, data}, _from, state) do
{:ok, response} = call_port(state, :transfer, data)
{:reply, response, state}
end
def handle_cast(:release, state) do
{:stop, :normal, state}
end
# Private helper functions
defp call_port(state, command, arguments) do
msg = {command, arguments}
send state.port, {self(), {:command, :erlang.term_to_binary(msg)}}
receive do
{_, {:data, response}} ->
{:ok, :erlang.binary_to_term(response)}
_ -> :error
end
end
end
|
lib/elixir_ale/spi.ex
| 0.836555
| 0.771284
|
spi.ex
|
starcoder
|
defmodule Exchange.Validations do
@moduledoc """
Validations for Data Structures for the Exchange
"""
@doc """
Function that validates the parameters of an order taking into account the type of the `Exchange.Order`.
Different validations are made:
- price is positive
- side one of [:buy, :sell]
- size is positive
- exp_time is a date in future
## Parameters
- order_params: Map that represents the parameters on an `Exchange.Order`.
"""
@spec cast_order(map) ::
{:ok, Exchange.Order.order()} | {:error, String.t()}
def cast_order(%{type: type} = order_params) when type == :limit or type == :stop_loss do
validate(order_params)
end
def cast_order(%{type: type} = order_params)
when type == :market or type == :marketable_limit do
order_params
|> Map.put(:price, 0)
|> validate
end
defp validate(%{price: p, side: s, size: z} = order_params) do
with {:ok, price} <- validate(:price, p),
{:ok, side} <- validate(:side, s),
{:ok, size} <- validate(:positive_num, z),
{:ok, time} <- validate(:exp_time, order_params[:exp_time]) do
{:ok,
%Exchange.Order{
order_id: order_params[:order_id],
trader_id: order_params[:trader_id],
side: side,
size: size,
initial_size: size,
price: price,
type: order_params[:type],
exp_time: time,
ticker: order_params[:ticker]
}}
else
err -> err
end
end
defp validate(:price, %Money{} = price) do
if Money.positive?(price) and price.currency == :GBP do
{:ok, price.amount}
else
{:error, "Price must be a positive amount in GBP"}
end
end
defp validate(:price, price) when is_number(price) and price >= 0 do
{:ok, price}
end
defp validate(:price, _price), do: {:error, "price must be a positive number"}
defp validate(:side, side) do
if Enum.member?([:buy, :sell], side) do
{:ok, side}
else
{:error, "Order Side accepted values are: - :buy or :sell"}
end
end
defp validate(:positive_num, num) when is_number(num) and num > 0 do
{:ok, num}
end
defp validate(:positive_num, _num), do: {:error, "size must be a positive number"}
defp validate(:exp_time, %DateTime{} = time) do
now = DateTime.utc_now()
case DateTime.compare(time, now) do
:gt -> {:ok, DateTime.to_unix(time, :millisecond)}
_ -> {:error, "exp_time must be a DateTime in the future"}
end
end
defp validate(:exp_time, nil), do: {:ok, nil}
defp validate(:exp_time, _), do: {:error, "exp_time must be a valid DateTime"}
end
|
lib/exchange/validations.ex
| 0.85067
| 0.70724
|
validations.ex
|
starcoder
|
defmodule ExTwilio.Notify.Notification do
@moduledoc """
Represents a Notification resource in the Twilio Notify.
- [Twilio docs](https://www.twilio.com/docs/notify/api/notifications)
- body (optional for all except Alexa) Indicates the notification body text.
Translates to `data.twi_body` for FCM and GCM, `aps.alert.body` for APNS,
`Body` for SMS and Facebook Messenger and `request.message.data` for Alexa.
For SMS either this, `body`, or the `media_url` attribute of the `Sms`
parameter is required. For Facebook Messenger either this parameter or the
body attribute in the `FacebookMessenger` parameter is required.
- priority Two priorities defined: `low` and `high` (default). `low`
optimizes the client app's battery consumption, and notifications may be
delivered with unspecified delay. This is the same as Normal priority for FCM
and GCM or priority 5 for APNS. `high` sends the notification immediately,
and can wake up a sleeping device. This is the same as High priority for FCM
and GCM or priority 10 for APNS. This feature is not supported by SMS and
Facebook Messenger and will be ignored for deliveries via those channels.
- ttl This parameter specifies how long (in seconds) the notification is
valid. Delivery should be attempted if the device is offline. The maximum
time to live supported is 4 weeks. The value zero means that the notification
delivery is attempted immediately once but not stored for future delivery.
The default value is 4 weeks. This feature is not supported by SMS and
Facebook Messenger and will be ignored for deliveries via those channels.
- title (optional for all except Alexa) Indicates the notification title.
This field is not visible on iOS phones and tablets but it is on Apple Watch
and Android devices. Translates to `data.twi_title` for FCM and GCM,
`aps.alert.title` for APNS and `displayInfo.content[0].title`,
`displayInfo.content[].toast.primaryText` of `request.message` for Alexa. It
is not supported for SMS and Facebook Messenger and will be omitted from
deliveries via those channels.
- sound Indicates a sound to be played. Translates to `data.twi_sound` for
FCM and GCM and `aps.sound` for APNS. This parameter is not supported by SMS
and Facebook Messenger and is omitted from deliveries via those channels.
- action Specifies the actions to be displayed for the notification.
Translates to `data.twi_action` for GCM and `aps.category` for APNS. This
parameter is not supported by SMS and Facebook Messenger and is omitted from
deliveries via those channels.
- data This parameter specifies the custom key-value pairs of the
notification's payload. Translates to `data` dictionary in FCM and GCM
payload. FCM and GCM [reserves certain keys](https://firebase.google.com/docs/cloud-messaging/http-server-ref)
that cannot be used for those channels. For APNS, attributes of `Data` will be
inserted into the APNS payload as custom properties outside of the `aps`
dictionary. For Alexa they are added to `request.message.data`. For all
channels, the `twi_` prefix is reserved for Twilio for future use. Requests
including custom data with keys starting with `twi_` will be rejected as 400
Bad request and no delivery will be attempted. This parameter is not
supported by SMS and Facebook Messenger and is omitted from deliveries via
those channels.
-apn APNS specific payload that overrides corresponding attributes in a
generic payload for Bindings with the apn BindingType. This value is mapped
to the Payload item, therefore the `aps` key has to be used to change standard
attributes. Adds custom key-value pairs to the root of the dictionary. Refer
to [APNS documentation](https://developer.apple.com/library/content/documentation/NetworkingInternet/Conceptual/RemoteNotificationsPG/CommunicatingwithAPNs.html) for more
details. The `twi_` key prefix for custom key-value pairs is reserved for
Twilio for future use. Custom data with keys starting with `twi_` is not
allowed.
- gcm GCM specific payload that overrides corresponding attributes in generic
payload for Bindings with gcm BindingType. This value is mapped to the root
json dictionary. Refer to [GCM documentation](https://developers.google.com/cloud-messaging/http-server-ref)
for more details. Target parameters `to`, `registration_ids`, and
`notification_key` are not allowed. The `twi_` key prefix for custom key-value
pairs is reserved for Twilio for future use. Custom data with keys starting
with `twi_` is not allowed. FCM and GCM
[reserves certain keys](https://firebase.google.com/docs/cloud-messaging/http-server-ref)
that cannot be used for those channels.
- sms SMS specific payload that overrides corresponding attributes in generic
payload for Bindings with sms BindingType. Each attribute in this JSON
object is mapped to the corresponding form parameter of the Twilio
[Message](https://www.twilio.com/docs/api/rest/sending-messages) resource.
The following parameters of the Message resource are supported in snake case
format: `body`, `media_urls`, `status_callback`, and `max_price`. The
`status_callback` parameter overrides the corresponding parameter in the
messaging service if configured. The `media_urls` expects a JSON array.
- facebook_messenger Messenger specific payload that overrides corresponding
attributes in generic payload for Bindings with facebook-messenger
BindingType. This value is mapped to the root json dictionary of Facebook's
[Send API request](https://developers.facebook.com/docs/messenger-platform/send-api-reference).
Overriding the `recipient` parameter is not allowed.
- fcm FCM specific payload that overrides corresponding attributes in generic
payload for Bindings with fcm BindingType. This value is mapped to the root
json dictionary. Refer to [FCM documentation](https://firebase.google.com/docs/cloud-messaging/http-server-ref#downstream)
for more details. Target parameters `to`, `registration_ids`, `condition`,
and `notification_key` are not allowed. The `twi_` key prefix for custom
key-value pairs is reserved for Twilio for future use. Custom data with keys
starting with `twi_` is not allowed. Custom data with keys starting with
`twi_` is not allowed. FCM and GCM
[reserves certain keys](https://firebase.google.com/docs/cloud-messaging/http-server-ref)
that cannot be used for those channels.
- segment The segment
- alexa The alexa
- to_binding The destination address in a JSON object (see attributes below).
Multiple ToBinding parameters can be included but the total size of the
request entity should not exceed 1MB. This is typically sufficient for
10,000 phone numbers.
- identity Delivery will be attempted only to Bindings with an Identity in
this list. Maximum 20 items allowed in this list.
- tag Delivery will be attempted only to Bindings that have all of the Tags
in this list. Maximum 5 items allowed in this list. The implicit tag "all" is
available to notify all Bindings in a Service instance. Similarly the
implicit tags "apn", "fcm", "gcm", "sms" and "facebook-messenger" are
available to notify all Bindings of the given type.
"""
defstruct sid: nil,
account_sid: nil,
service_sid: nil,
date_created: nil,
identities: nil,
tags: nil,
tag: nil,
segments: nil,
priority: nil,
ttl: nil,
title: nil,
body: nil,
sound: nil,
action: nil,
data: nil,
apn: nil,
gcm: nil,
fcm: nil,
sms: nil,
facebook_messenger: nil,
alexa: nil,
to_binding: nil,
identity: nil
use ExTwilio.Resource,
import: [
:create
]
def parents,
do: [
%ExTwilio.Parent{module: ExTwilio.Notify.Service, key: :service}
]
end
|
lib/ex_twilio/resources/notify/notification.ex
| 0.763043
| 0.545346
|
notification.ex
|
starcoder
|
defmodule Rummage.Phoenix.SortView do
@moduledoc """
Sort View Module for Rummage. This has view helpers that can generate rummagable links and forms.
Usage:
Usage:
```elixir
defmodule MyApp.ProductView do
use MyApp.Web, :view
use Rummage.Phoenix.View, only: [:paginate]
end
```
OR
```elixir
defmodule MyApp.ProductView do
use MyApp.Web, :view
use Rummage.Phoenix.View
end
```
"""
use Rummage.Phoenix.ThemeAdapter
@doc """
This macro includes the helpers functions for sorting.
Provides helpers function `sort_link/3` for creating sort links in an html.eex
file of using `Phoenix`.
Usage:
Just add the following code in the index template. Make sure that you're passing
rummage from the controller. Please look at the
[README](https://github.com/Excipients/rummage_phoenix) for more details
```elixir
<%= sort_link @conn, @rummage, field, "Name" %>
<%= sort_link @conn, {@rummage, :rummage_key}, field, "Name" %>
```
"""
def sort_link(conn, {rummage, rummage_key}, field, name, opts) do
opts = opts
|> Keyword.merge([rummage_key: rummage_key])
sort_link(conn, rummage, field, name, opts)
end
def sort_link(conn, rummage, field, name, opts) do
sort_params = rummage.sort
asc_icon = Keyword.get(opts, :asc_icon)
asc_text = Keyword.get(opts, :asc_text, "↑")
desc_icon = Keyword.get(opts, :desc_icon)
desc_text = Keyword.get(opts, :desc_text, "↓")
rummage_key = Keyword.get(opts, :rummage_key, :rummage)
# Drop pagination unless we're showing the entire result set
rummage_params = if rummage.params.paginate && rummage.params.paginate.per_page == -1 do
rummage.params
else
rummage.params
|> Map.drop([:paginate])
end
if sort_params.name == Atom.to_string(field) do
case sort_params.order do
"asc" ->
rummage_params = rummage_params
|> Map.put(:sort, %{name: field, order: "desc"})
url = index_path(opts, [conn, :index, %{rummage_key => rummage_params}])
sort_text_or_image(url, [img: desc_icon, text: desc_text], name)
"desc" ->
rummage_params = rummage_params
|> Map.put(:sort, %{name: field, order: "asc"})
url = index_path(opts, [conn, :index, %{rummage_key => rummage_params}])
sort_text_or_image(url, [img: asc_icon, text: asc_text], name)
end
else
rummage_params = rummage_params
|> Map.put(:sort, %{name: field, order: "asc"})
url = index_path(opts, [conn, :index, %{rummage_key => rummage_params}])
sort_text_or_image(url, [], name)
end
end
defp index_path(opts, params) do
helpers = opts[:helpers]
path_function_name = String.to_atom("#{opts[:struct]}_path")
apply(helpers, path_function_name, params)
end
end
|
lib/rummage_phoenix/hooks/views/sort_view.ex
| 0.687945
| 0.757481
|
sort_view.ex
|
starcoder
|
defmodule Attempt do
@moduledoc """
"""
alias Attempt.{Bucket, Retry}
@doc """
Implements a block form of `Attempt.run/2`.
## Examples
iex> require Attempt
...> Attempt.execute tries: 3 do
...> IO.puts "Welcome to Attempt"
...> end
Hi
:ok
"""
defmacro execute(options, block) do
if match?({:fn, _, _}, options) do
quote do
Attempt.run(unquote(options), unquote(block))
end
else
block = block[:do]
quote do
Attempt.run(fn -> unquote(block) end, unquote(options))
end
end
end
defmacro execute(options) do
{block, options} = Keyword.pop(options, :do)
if Enum.empty?(options) do
quote do
Attempt.run(fn -> unquote(block) end, [])
end
else
quote do
Attempt.run(fn -> unquote(block) end, unquote(options))
end
end
end
@doc """
Run a function in the context of a retry budget.
A retry budget has several compoents:
* a `token bucket` which acts to provide retry throttlnh for any retries
* a `retry policy` which determines whether to return, retry or reraise
* a `backoff` strategy which determines the retry backoff strategy
* a maximum number of allowable `tries` that are performed when in an
effort to generate a non-error return
The given function will be executed until a successful return is delivered
or the maximum number of tries is exceeded or if no token could be claimed.
## Arguments
* `fun` is an anonymous function or function reference to be executed.
* `options` is a keyword list of options to configure the retry budget
## Options
* `:tries` is the number of times the function will be executed if an error
is returned from the function
* `:token_bucket` is the token bucket used to throttle the execution rate.
Currently only one token bucket is implemented. See `Attempt.Bucket.Token`
* `:retry_policy` is a module that implements the `Attempt.Retry` behaviour
to classify the return value from the `fun` as either `:return`, `:retry` or
`reraise`. The default `retry_policy` is `Attempt.Retry.DefaultPolicy`.
* `:backoff` is a module that implements the `Attempt.Retry.Backoff`
behaviour which is used to determine the backoff strategy for retries.
## Default options
If not supplied the default options are:
* `:tries` is `1`
* `:token_bucket` is `Attempt.Bucket.Token.new(@default_bucket_name)`
* `:retry_policy` is `Attempt.Retry,Policy.Default`
* `:backoff` is `Attempt.Retry.Backoff.Exponential`
## Retry policy actions
In order to ascertain whether a function should be retried each return value
needs to be classified. The classification is the responsibility of the
`:retry_policy` module. Three classifications are available:
* `:return` means that the return value of the function is considered
a success and it returned to the called
* `:retry` means that a failure return was detected but that the failure
is considered transient and is therefore eligble to be retried
* `:reraise` means that an exception was detected and the execption is not
considered transient. Therefore the exception should be re-raised.
See also `Attempt.Retry.Exception` which defines a protocol for determining
the classification of exceptions and `Attempt.Retry.DefaultPolicy` which
implements the default classifier.
## Examples
iex#> Attempt.run fn -> "Hello World" end
"Hello World"
iex#> Attempt.run fn -> IO.puts "Reraise Failure!"; div(1,0) end, tries: 3
Reraise Failure!
** (ArithmeticError) bad argument in arithmetic expression
:erlang.div(1, 0)
(attempt) lib/attempt.ex:119: Attempt.execute_function/1
(attempt) lib/attempt.ex:98: Attempt.execute/6
iex#> Attempt.run fn -> IO.puts "Try 3 times"; :error end, tries: 3
Try 3 times
Try 3 times
Try 3 times
:error
# Create a bucket that adds a new token only every 10 seconds
iex#> {:ok, bucket} = Attempt.Bucket.Token.new :test, fill_rate: 10_000
iex#> Attempt.run fn ->
IO.puts "Try 11 times and we'll timeout claiming a token"
:error
end, tries: 11, token_bucket: bucket
Try 11 times and we'll timeout claiming a token
Try 11 times and we'll timeout claiming a token
Try 11 times and we'll timeout claiming a token
Try 11 times and we'll timeout claiming a token
Try 11 times and we'll timeout claiming a token
Try 11 times and we'll timeout claiming a token
Try 11 times and we'll timeout claiming a token
Try 11 times and we'll timeout claiming a token
Try 11 times and we'll timeout claiming a token
Try 11 times and we'll timeout claiming a token
{:error, {:timeout, {GenServer, :call, [:test, :claim_token, 5000]}}}
"""
@spec run(function(), Keyword.t() | Retry.Budget.t()) :: any()
def run(fun, options \\ [])
def run(fun, options) when is_list(options) do
options =
default_options()
|> Keyword.merge(options)
|> Enum.into(%{})
|> Map.put(:current_try, 1)
|> maybe_start_default_bucket
run(fun, struct(Retry.Budget, options))
end
def run(
fun,
%Retry.Budget{
retry_policy: retry_policy,
token_bucket: token_bucket,
tries: max_tries,
current_try: current_try
} = budget
) do
with {:ok, budget} <- backoff(budget),
{:ok, _remaining_tokens} <- Bucket.claim_token(token_bucket, budget),
result = execute_function(fun) do
case retry_policy.action(result) do
:return ->
result
:retry ->
if current_try >= max_tries do
result
else
run(fun, %Retry.Budget{budget | current_try: current_try + 1})
end
:reraise ->
{exception, stacktrace} = result
Kernel.reraise(exception, stacktrace)
end
end
end
defp execute_function(fun) do
try do
fun.()
rescue
e ->
{e, System.stacktrace()}
end
end
defp backoff(budget) do
delay = budget.backoff_strategy.delay(budget)
if delay > 0, do: Process.sleep(delay)
{:ok, %Retry.Budget{budget | last_sleep: delay}}
end
@default_bucket_name Attempt.Bucket.Token.Default
@default_tries 1
defp default_options do
[
tries: @default_tries,
token_bucket: nil,
retry_policy: Retry.Policy.Default
]
end
@default_bucket_type Bucket.Infinite
defp maybe_start_default_bucket(%{token_bucket: nil} = options) do
case @default_bucket_type.new(@default_bucket_name) do
{:ok, bucket} ->
%{options | token_bucket: bucket}
{:error, {Attempt.TokenBucket.AlreadyStartedError, _}, bucket} ->
%{options | token_bucket: bucket}
end
end
defp maybe_start_default_bucket(options) do
options
end
end
|
lib/attempt.ex
| 0.902995
| 0.62144
|
attempt.ex
|
starcoder
|
defmodule CardBinEx do
@moduledoc """
Module to discover the CreditCard brand based in the first digits.
### Valid CreditCard brands
| brand | alias |
|------------------|-------------|
| Visa | `visa` |
| Elo | `elo` |
| JCB | `jcb` |
| Diners | `diners` |
| Discover | `discover` |
| MasterCard | `master` |
| Hipercard | `hipercard` |
| American Express | `amex` |
"""
import CardBinEx.Gettext
alias CardBinEx.{
Brand,
Error,
Validate
}
defmodule Error do
@moduledoc """
Exception reaised if is an invalid creditcard bin
"""
defexception message: "invalid card"
end
@doc """
Get the creditcard brand base in the first digits.
## Examples
iex> CardBinEx.brand_from_number("4716892")
{:ok, "visa"}
iex> CardBinEx.brand_from_number("9716892")
{:error, :card_brand, "9716892"}
"""
def brand_from_number(number) do
number
|> match()
|> format()
|> respond(number)
end
@doc """
Get the creditcard brand base in the first digits and return only the brand.
## Examples
iex> CardBinEx.brand_from_number!("4716892")
"visa"
iex> try do
...> CardBinEx.brand_from_number!("9716892")
...> rescue
...> e in CardBinEx.Error -> IO.puts(e.message)
...> end
invalid card number
"""
def brand_from_number!(number) do
case brand_from_number(number) do
{:ok, card_brand} ->
card_brand
{:error, :card_bin, card_number} ->
raise Error, message: dgettext("card", "invalid card number", number: card_number)
end
end
defp respond(brand, number) when is_nil(brand) do
{:error, :card_bin, number}
end
defp respond(brand, _number) do
{:ok, brand}
end
defp match(number) do
Enum.find(Brand.options(), fn brand ->
brand
|> Brand.data()
|> Validate.number(number)
end)
end
defp format(brand) when is_nil(brand), do: nil
defp format(brand) do
Atom.to_string(brand)
end
end
|
lib/card_bin.ex
| 0.712132
| 0.509276
|
card_bin.ex
|
starcoder
|
defmodule Elrondex.Transaction do
alias Elrondex.{Transaction, Account, REST}
@sign_fields [
:nonce,
:value,
:receiver,
:sender,
:gasPrice,
:gasLimit,
:data,
:chainID,
:version
]
@number_sign_fields [:nonce, :gasPrice, :gasLimit, :version]
defstruct network: nil,
account: nil,
# Require signature fields in signature order
# nonce signature field 1/9
nonce: nil,
# value signature field 2/9
# TODO Do we store integer or string value ?
value: nil,
# receiver signature field 3/9
receiver: nil,
# sender signature field 4/9
sender: nil,
# gasPrice signature field 5/9, loaded from Network.erd_min_gas_price
gasPrice: nil,
# gasLimit signature field 6/9, loaded from Network.erd_min_gas_limit
gasLimit: nil,
# data signature field 7/9
data: nil,
# chainID signature field 8/9, loaded from Network.erd_chain_id
chainID: nil,
# version signature field 9/9, loaded from Network.erd_min_transaction_version
version: nil,
# Computed signature based on 1-9 signature fields
signature: nil
@doc """
Creates a new transaction.
## Arguments
* `account` - the account that signs the transaction, called 'sender'.
* `receiver` - the receiver who receives native EGLD amount sent by sender.
* `value` - the native EGLD amount sent from sender to receiver.
* `data` - the data associated to the transaction.
"""
def transaction(%Account{} = account, receiver, value, data \\ nil) do
%Transaction{
account: account,
sender: account.address,
receiver: receiver,
value: value,
data: data
}
end
def to_signed_map(%Transaction{} = tr) do
# Encode data as base64
tr =
case tr.data do
nil -> tr
_ -> %{tr | data: Base.encode64(tr.data)}
end
# Ensure value is returned as string
tr =
case tr.value do
int_value when is_integer(int_value) -> %{tr | value: Integer.to_string(int_value)}
_ -> tr
end
Enum.reduce([:signature | @sign_fields], %{}, fn f, acc -> Map.put(acc, f, Map.get(tr, f)) end)
end
@doc """
Signs a transaction.
## Arguments
* `tr` - the transaction to be signed
## Examples
iex> Elrondex.Test.Bob.transfer_1_egld_to_alice()
...> |> Elrondex.Transaction.sign()
...> |> Map.get(:signature)
"89c2d0de0612b99ba51235801b3e6488d9fb5e1b33c7d858afd0517df9258056a5d07b573a211ccd4c99f4f130ef6dcfdccd30079feb53c9d5775970b97fc802"
"""
def sign(%Transaction{} = tr) do
signature =
tr
|> data_to_sign()
|> Account.sign(tr.account)
|> Base.encode16(case: :lower)
%{tr | signature: signature}
end
@doc """
Verifies the signature of a transaction.
## Arguments
* `tr` - the signed transaction
* 'account' - the account that signs the transaction
## Examples
iex> Elrondex.Test.Bob.transfer_1_egld_to_alice()
...> |> Elrondex.Transaction.sign()
...> |> Elrondex.Transaction.sign_verify()
true
"""
def sign_verify(%Transaction{} = tr, %Account{} = account) do
data_to_sign(tr)
|> Account.sign_verify(tr.signature, account)
end
def sign_verify(%Transaction{} = tr) do
sign_verify(tr, Account.from_address(tr.sender))
end
@doc """
Prepares a transaction to be done on certain network.
## Arguments
* `tr` - the transaction details
* 'network' - the network used for that transaction
"""
def prepare(%Transaction{} = tr, network, nonce \\ nil) do
with {:ok, tr} <- prepare_network(tr, network),
{:ok, tr} <- prepare_nonce(tr, nonce),
{:ok, tr} <- prepare_gas_limit(tr, tr.network) do
tr
else
{:error, reason} -> {:error, reason}
end
end
def prepare_network(%Transaction{} = tr, network) do
{:ok,
%{
tr
| network: network,
gasPrice: network.erd_min_gas_price,
# Is calculated by prepare_gas_limit
# gasLimit: network.erd_min_gas_limit,
chainID: network.erd_chain_id,
version: network.erd_min_transaction_version
}}
end
@doc """
Prepares the nonce of a transaction.
## Arguments
* `tr` - the transaction details
* 'nonce' - the nonce (integer)
"""
def prepare_nonce(%Transaction{} = tr, nonce) when is_integer(nonce) do
{:ok, %{tr | nonce: nonce}}
end
def prepare_nonce(%Transaction{} = tr, nonce) when is_nil(nonce) do
case REST.get_address_nonce(tr.network, tr.sender) do
{:ok, sender_nonce} -> {:ok, %{tr | nonce: sender_nonce}}
{:error, reason} -> {:error, reason}
end
end
def prepare_nonce(%Transaction{} = _tr, nonce) do
{:error, {:invalid_nonce, nonce}}
end
# We calculate gasLimit only when is not calculated gasLimit: nil
@doc """
Calculates the gas limit for certain transaction.
## Arguments
* `tr` - the transaction details
* 'network' - the network used for that transaction
"""
def prepare_gas_limit(%Transaction{gasLimit: nil} = tr, network) do
# TODO calculate gasLimit
tr =
case tr.data do
nil ->
%{tr | gasLimit: network.erd_min_gas_limit}
data ->
gas_limit = network.erd_min_gas_limit + byte_size(data) * network.erd_gas_per_data_byte
%{tr | gasLimit: gas_limit}
end
{:ok, tr}
end
def prepare_gas_limit(%Transaction{} = tr, _network) do
{:ok, tr}
end
def sign_fields, do: @sign_fields
def sign_field_type(field) when field in @number_sign_fields, do: :number
def sign_field_type(_), do: :string
# TODO, is not used
def is_required_sign_field(:data), do: false
def is_required_sign_field(field) when field in @sign_fields, do: true
def is_required_sign_field(_), do: false
@doc """
Returns raw data to sign in JSON format.
## Arguments
* `tr` - the transaction
## Examples
iex> Elrondex.Test.Bob.transfer_1_egld_to_alice()
...> |> Elrondex.Transaction.data_to_sign()
"{\"nonce\":1,
\"value\":\"1000000000000000000\",
\"receiver\":\"erd18n5zgmet82jvqag9n8pcvzdzlgqr3jhqxld2z6nwxzekh4cwt6ps87zfmu\",
\"sender\":\"erd1edmdkecu95u6aj9ehd0lf3d97qw85k86pkqqdu5029zcydslg7qs3tdc59\",
\"gasPrice\":10
,\"gasLimit\":100
,\"chainID\":\"T\"}"
"""
def data_to_sign(tr) do
json_to_sign =
@sign_fields
|> Enum.map(fn field -> prepare_sign_field(tr, field) end)
|> Enum.filter(fn value -> value != nil end)
|> Enum.join(",")
"{#{json_to_sign}}"
end
def prepare_sign_field(tr, field) do
value = Map.get(tr, field)
case {field, sign_field_type(field)} do
{:data, _} -> prepare_sign_field_base64(field, value)
{_, :string} -> prepare_sign_field_string(field, value)
{_, :number} -> prepare_sign_field_number(field, value)
end
end
def prepare_sign_field_base64(field, value) do
case value do
nil -> nil
"" -> nil
data -> prepare_sign_field_string(field, Base.encode64(data))
end
end
def prepare_sign_field_string(field, value) do
case value do
nil -> nil
"" -> nil
data -> "\"#{field}\":\"#{data}\""
end
end
def prepare_sign_field_number(field, value) do
case value do
nil -> nil
"" -> nil
data when is_integer(data) -> "\"#{field}\":#{data}"
_ -> nil
end
end
end
|
lib/elrondex/transaction.ex
| 0.699357
| 0.579817
|
transaction.ex
|
starcoder
|
defmodule ElasticsearchElixirBulkProcessor.Helpers.Events do
alias ElasticsearchElixirBulkProcessor.Items.{Create, Index, Update, Delete}
@doc ~S"""
Return the size of the string in bytes
## Examples
iex> ElasticsearchElixirBulkProcessor.Helpers.Events.byte_sum([%ElasticsearchElixirBulkProcessor.Items.Index{index: "test", source: %{"test" => "test"}}])
43
iex> ElasticsearchElixirBulkProcessor.Helpers.Events.byte_sum([
...> %ElasticsearchElixirBulkProcessor.Items.Index{index: "test", source: %{"test" => "test"}},
...> %ElasticsearchElixirBulkProcessor.Items.Index{index: "test", source: %{"test" => "test"}}
...> ])
86
iex> ElasticsearchElixirBulkProcessor.Helpers.Events.byte_sum([])
0
"""
def byte_sum([]),
do: 0
def byte_sum(item_list) when is_list(item_list),
do:
Stream.map(item_list, fn %struct{} = item
when struct in [Create, Index, Update, Delete] ->
struct.to_payload(item) |> byte_size
end)
|> Enum.sum()
@doc ~S"""
Split list of strings into first chunk of given byte size and rest of the list.
## Examples
iex> alias ElasticsearchElixirBulkProcessor.Items.Index
...> [
...> %Index{index: "test", source: %{"test" => "test"}},
...> %Index{index: "test", source: %{"test" => "test"}},
...> %Index{index: "test", source: %{"test" => "test"}}
...> ]
...> |> ElasticsearchElixirBulkProcessor.Helpers.Events.split_first_bytes(43)
alias ElasticsearchElixirBulkProcessor.Items.Index
{
[%Index{index: "test", source: %{"test" => "test"}}],
[%Index{index: "test", source: %{"test" => "test"}}, %Index{index: "test", source: %{"test" => "test"}}]
}
iex> alias ElasticsearchElixirBulkProcessor.Items.Index
...> [
...> %Index{index: "test", source: %{"test" => "test"}},
...> %Index{index: "test", source: %{"test" => "test"}},
...> %Index{index: "test", source: %{"test" => "test"}}
...> ]
...> |> ElasticsearchElixirBulkProcessor.Helpers.Events.split_first_bytes(43 * 2)
alias ElasticsearchElixirBulkProcessor.Items.Index
{
[%Index{index: "test", source: %{"test" => "test"}}, %Index{index: "test", source: %{"test" => "test"}}],
[%Index{index: "test", source: %{"test" => "test"}}]
}
iex> alias ElasticsearchElixirBulkProcessor.Items.Index
...> [
...> %Index{index: "test", source: %{"test" => "test"}},
...> %Index{index: "test", source: %{"test" => "test"}},
...> %Index{index: "test", source: %{"test" => "test"}}
...> ]
...> |> ElasticsearchElixirBulkProcessor.Helpers.Events.split_first_bytes(0)
alias ElasticsearchElixirBulkProcessor.Items.Index
{
[],
[%Index{index: "test", source: %{"test" => "test"}}, %Index{index: "test", source: %{"test" => "test"}}, %Index{index: "test", source: %{"test" => "test"}}]
}
"""
def split_first_bytes(list, first_byte_size) do
list
|> Enum.reduce(
{[], []},
fn element, acc -> build_up_first_chunk_elements(element, acc, first_byte_size) end
)
end
defp build_up_first_chunk_elements(element = %struct{}, {first, rest}, first_byte_size)
when struct in [Create, Index, Update, Delete] do
if first |> byte_sum >= first_byte_size do
{first, rest ++ [element]}
else
{first ++ [element], rest}
end
end
@doc ~S"""
Split list of strings into chunks of given byte size and rest of the list.
## Examples
iex> alias ElasticsearchElixirBulkProcessor.Items.Index
...> [
...> %Index{index: "test", source: %{"test" => "test"}},
...> %Index{index: "test", source: %{"test" => "test"}}
...> ]
...> |> ElasticsearchElixirBulkProcessor.Helpers.Events.chunk_bytes(10)
alias ElasticsearchElixirBulkProcessor.Items.Index
[[%Index{index: "test", source: %{"test" => "test"}}], [%Index{index: "test", source: %{"test" => "test"}}]]
iex> alias ElasticsearchElixirBulkProcessor.Items.Index
...> [
...> %Index{index: "test", source: %{"test" => "test"}},
...> %Index{index: "test", source: %{"test" => "test"}}
...> ]
...> |> ElasticsearchElixirBulkProcessor.Helpers.Events.chunk_bytes(10)
alias ElasticsearchElixirBulkProcessor.Items.Index
[[%Index{index: "test", source: %{"test" => "test"}}], [%Index{index: "test", source: %{"test" => "test"}}]]
"""
def chunk_bytes(list, chunk_byte_size) do
list
|> Enum.reduce(
[[]],
fn element, acc -> build_up_chunk_elements(element, acc, chunk_byte_size) end
)
|> Enum.reverse()
end
defp build_up_chunk_elements(
element = %struct{},
[head | tail],
chunk_byte_size
)
when is_list(head) and struct in [Create, Index, Update, Delete] do
current_byte_size = byte_sum(head)
if current_byte_size >= chunk_byte_size do
[[element] | [head | tail]]
else
[head ++ [element] | tail]
end
end
end
|
lib/elasticsearch_elixir_bulk_processor/helpers/events.ex
| 0.720073
| 0.636325
|
events.ex
|
starcoder
|
defmodule JaResource.Show do
import Plug.Conn
@moduledoc """
Defines a behaviour for displaying a resource and the function to execute it.
It relies on (and uses):
* JaResource.Record
* JaResource.Serializable
When used JaResource.Show defines the `show/2` action suitable for handling
json-api requests.
To customize the behaviour of the show action the following callbacks can be implemented:
* handle_show/2
* render_index/2
* JaResource.Record.record/2
* JaResource.Record.records/1
"""
@doc """
Returns the model to be represented by this resource.
Default implementation is the result of the JaResource.Record.record/2
callback.
`handle_show/2` can return nil to send a 404, a conn with any response/body,
or a record to be serialized.
Example custom implementation:
def handle_show(conn, id) do
Repo.get_by(Post, slug: id)
end
In most cases JaResource.Record.record/2 and JaResource.Records.records/1 are
the better customization hooks.
"""
@callback handle_show(Plug.Conn.t(), JaResource.id()) :: Plug.Conn.t() | JaResource.record()
@doc """
Returns a `Plug.Conn` in response to successful show.
Default implementation renders the view.
"""
@callback render_show(Plug.Conn.t(), JaResource.record()) :: Plug.Conn.t()
defmacro __using__(_) do
quote do
use JaResource.Record
use JaResource.Serializable
@behaviour JaResource.Show
def handle_show(conn, id), do: record(conn, id)
def render_show(conn, model) do
conn
|> Phoenix.Controller.render(:show, data: model)
end
defoverridable handle_show: 2, render_show: 2
end
end
@doc """
Execute the show action on a given module implementing Show behaviour and conn.
"""
def call(controller, conn) do
conn
|> controller.handle_show(conn.params["id"])
|> JaResource.Show.respond(conn, controller)
end
@doc false
def respond(%Plug.Conn{} = conn, _old_conn, _controller), do: conn
def respond(nil, conn, _controller) do
conn
|> put_status(:not_found)
|> Phoenix.Controller.render(:errors,
data: %{status: 404, title: "Not Found", detail: "The resource was not found"}
)
end
def respond(model, conn, controller), do: controller.render_show(conn, model)
end
|
lib/ja_resource/show.ex
| 0.818918
| 0.444565
|
show.ex
|
starcoder
|
defmodule Cldr.Calendar do
@moduledoc """
Calendar support functions for formatting dates, times and datetimes.
`Cldr` defines formats for several calendars, the names of which
are returned by `Cldr.Calendar.known_calendars/0`.
Currently this implementation only supports the `:gregorian`
calendar which aligns with the proleptic Gregorian calendar
defined by Elixir, `Calendar.ISO`.
This module will be extacted in the future to become part of
a separate calendrical module.
"""
alias Cldr.LanguageTag
alias Cldr.Calendar.Conversion
alias Cldr.Calendar.ISOWeek
alias Cldr.Locale
require Cldr
defdelegate known_calendars, to: Cldr
@doc """
Returns the default CLDR calendar name.
Note this is not the same as the default calendar
`Calendar.ISO` supported by Elixir.
## Example
iex> Cldr.Calendar.default_calendar
:gregorian
"""
@default_calendar :gregorian
def default_calendar do
@default_calendar
end
# Default territory is "World"
@default_territory Cldr.default_territory()
@doc """
Returns the CLDR data that defines the structure
of a week in different locales.
## Example
Cldr.Calendar.week_info
%{first_day: %{IN: "sun", SM: "mon", MN: "mon", MZ: "sun", CR: "mon", AT: "mon",
LA: "sun", EE: "mon", NL: "mon", PT: "mon", PH: "sun", BG: "mon", LT: "mon",
ES: "mon", OM: "sat", SY: "sat", US: "sun", EC: "mon", SG: "sun", DM: "sun",
AR: "sun", MK: "mon", YE: "sun", KW: "sat", GB: "mon",
"GB-alt-variant": "sun", AD: "mon", UZ: "mon", KG: "mon", CZ: "mon",
FI: "mon", RO: "mon", TR: "mon", AI: "mon", MM: "sun", AS: "sun", BS: "sun",
IT: "mon", MX: "sun", BR: "sun", ID: "sun", NZ: "sun", GP: "mon", BE: "mon",
CO: "sun", GR: "mon", NP: "sun", ME: "mon", MO: "sun", ...},
min_days: %{SM: 4, SJ: 4, AT: 4, EE: 4, NL: 4, PT: 4, BG: 4, LT: 4, ES: 4,
US: 1, GI: 4, GB: 4, AD: 4, CZ: 4, FI: 4, IT: 4, GP: 4, JE: 4, BE: 4, GR: 4,
"001": 1, VI: 1, RE: 4, SE: 4, GU: 1, IS: 4, AN: 4, IM: 4, GG: 4, CH: 4,
FO: 4, UM: 1, SK: 4, AX: 4, LU: 4, FR: 4, IE: 4, HU: 4, FJ: 4, MC: 4, GF: 4,
NO: 4, DK: 4, DE: 4, LI: 4, PL: 4, VA: 4, MQ: 4}, weekend_end: nil,
weekend_start: nil}
"""
@week_info Cldr.Config.week_info()
def week_info do
@week_info
end
@doc """
Returns the first day of a week for a locale as an ordinal number
in then range one to seven with one representing Monday and seven
representing Sunday.
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/1`
## Example
iex> Cldr.Calendar.first_day_of_week "en"
7
iex> Cldr.Calendar.first_day_of_week "en-GB"
1
"""
def first_day_of_week(locale) do
with {:ok, locale} <- Cldr.validate_locale(locale) do
(get_in(week_info(), [:first_day, territory_from_locale(locale)]) ||
get_in(week_info(), [:first_day, @default_territory]))
|> day_ordinal
else
{:error, reason} -> {:error, reason}
end
end
@doc """
Returns the minimum days required in a week for it
to be considered week one of a year.
* `territory` is any territory code returned by `Cldr.known_territories/0`
## Examples
iex> Cldr.Calendar.minumim_days_in_week_one :US
1
iex> Cldr.Calendar.minumim_days_in_week_one :FR
4
"""
def minumim_days_in_week_one(territory \\ @default_territory) do
with {:ok, territory} <- Cldr.validate_territory(territory) do
get_in(week_info(), [:min_days, territory])
else
{:error, reason} -> {:error, reason}
end
end
@doc """
Returns the calendar type and calendar era definitions
for the calendars in CLDR.
## Example
Cldr.Calendar.calendars
%{buddhist: %{calendar_system: "solar", eras: [{0, %{start: -198326}}]},
chinese: %{calendar_system: "lunisolar", eras: [{0, %{start: -963144}}]},
coptic: %{calendar_system: "other",
eras: [{0, %{end: 103604}}, {1, %{start: 103605}}]},
dangi: %{calendar_system: "lunisolar", eras: [{0, %{start: -852110}}]},
ethiopic: %{calendar_system: "other",
eras: [{0, %{end: 2797}}, {1, %{start: 2798}}]},
ethiopic_amete_alem: %{eras: [{0, %{end: -2006036}}]},
gregorian: %{calendar_system: "solar",
eras: [{0, %{end: 0}}, {1, %{start: 1}}]}, ...
"""
@calendar_info Cldr.Config.calendar_info()
def calendars do
@calendar_info
end
@doc """
Returns the era number for a given date and calendar
* `date` is a `Date` or any struct with the fields `:year`,
`:month`, `:day` and `:calendar`
* `calendar` is any calendar returned by `Cldr.Calendar.known_calendars/0`
## Example
iex> Cldr.Calendar.era_number_from_date ~D[2017-09-03], :gregorian
1
iex> Cldr.Calendar.era_number_from_date ~D[0000-09-03], :gregorian
0
iex> Cldr.Calendar.era_number_from_date ~D[1700-09-03], :japanese
208
"""
def era_number_from_date(date, calendar \\ Cldr.Calendar.default_calendar()) do
date
|> Conversion.to_iso_days()
|> era_from_iso_days(calendar)
end
@doc """
Returns the era number for a given rata die.
The era number is an index into Cldr list of
eras for a given calendar which is primarily
for the use of `Cldr.Date.to_string/2` when
processing the format symbol `G`. For further
information see `Cldr.DateTime.Formatter.era/4`.
"""
def era_from_iso_days(iso_days, calendar)
for {calendar, content} <- @calendar_info do
Enum.each(content[:eras], fn
{era, %{start: start, end: finish}} ->
def era_from_iso_days(iso_days, unquote(calendar))
when iso_days in unquote(start)..unquote(finish),
do: unquote(era)
{era, %{start: start}} ->
def era_from_iso_days(iso_days, unquote(calendar))
when iso_days >= unquote(start),
do: unquote(era)
{era, %{end: finish}} ->
def era_from_iso_days(iso_days, unquote(calendar))
when iso_days <= unquote(finish),
do: unquote(era)
end)
end
@doc """
Returns a date struct for a given iso days
"""
def date_from_iso_days(days, calendar) do
{year, month, day, _, _, _, _} = calendar.naive_datetime_from_iso_days(days)
%{year: year, month: month, day: day, calendar: calendar}
end
@doc """
Returns iso days for a given date
"""
def iso_days_from_date(%{year: _, month: _, day: _, calendar: _} = date) do
date
|> naive_datetime_from_date
|> iso_days_from_datetime
end
@doc """
Converts a date to a naive datetime
"""
def naive_datetime_from_date(%{year: year, month: month, day: day, calendar: calendar}) do
{:ok, naive_datetime} = NaiveDateTime.new(year, month, day, 0, 0, 0, {0, 6}, calendar)
naive_datetime
end
@doc """
Converts a datetime to iso days
"""
def iso_days_from_datetime(%NaiveDateTime{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: calendar
}) do
calendar.naive_datetime_to_iso_days(year, month, day, hour, minute, second, microsecond)
end
def iso_days_from_datetime(%DateTime{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: calendar,
zone_abbr: "UTC",
time_zone: "Etc/UTC"
}) do
calendar.naive_datetime_to_iso_days(year, month, day, hour, minute, second, microsecond)
end
@doc """
Returns the ordinal day of the year for a given
date.
* `date` is a `Date` or any other struct that contains the
keys `:year`, `:month`, `;day` and `:calendar`
## Example
iex> Cldr.Calendar.day_of_year ~D[2017-01-01]
1
iex> Cldr.Calendar.day_of_year ~D[2017-09-03]
246
iex> Cldr.Calendar.day_of_year ~D[2017-12-31]
365
"""
@spec day_of_year(Date.t()) :: 1..366
def day_of_year(%{year: year, month: _month, day: _day, calendar: calendar} = date) do
{days, _fraction} = iso_days_from_date(date)
{new_year, _fraction} =
iso_days_from_date(%{year: year, month: 1, day: 1, calendar: calendar})
days - new_year + 1
end
@doc """
Returns the day of the week for a date where
the first day is Monday and the result is in
the range `1` (for Monday) to `7` (for Sunday)
* `date` is a `Date` or any other struct that contains the
keys `:year`, `:month`, `;day` and `:calendar`
## Examples
iex> Cldr.Calendar.day_of_week ~D[2017-09-03]
7
iex> Cldr.Calendar.day_of_week ~D[2017-09-01]
5
"""
@spec day_of_week(Date.t()) :: 1..7
def day_of_week(%{year: year, month: month, day: day, calendar: calendar}) do
calendar.day_of_week(year, month, day)
end
@doc """
Returns the date that is the first day of the `n`th week of
the year that containts the supplied `date`.
* `date` is a `Date` or any other struct that contains the
keys `:year`, `:month`, `;day` and `:calendar`
* `n` is the week number
*NOTE* The first week is defined according to the week
definition of the ISO Week calendar.
## Example
iex> Cldr.Calendar.nth_week_of_year ~D[2017-01-04], 1
%{calendar: Calendar.ISO, day: 2, month: 1, year: 2017}
"""
def nth_week_of_year(%{year: _year, calendar: Calendar.ISO} = date, n) do
date
|> ISOWeek.first_day_of_year()
|> add(7 * (n - 1))
end
def nth_week_of_year(%{year: _year, calendar: calendar} = date, n) do
date
|> calendar.first_day_of_year
|> add(7 * (n - 1))
end
def nth_week_of_year(year, n, Calendar.ISO) do
year
|> ISOWeek.first_day_of_year()
|> add(7 * (n - 1))
end
@doc """
Returns the date of the previous day to the
provided date.
## Example
iex> Cldr.Calendar.previous_day %{calendar: Calendar.ISO, day: 2, month: 1, year: 2017}
%{calendar: Calendar.ISO, day: 1, month: 1, year: 2017}
iex> Cldr.Calendar.previous_day %{calendar: Calendar.ISO, day: 1, month: 3, year: 2017}
%{calendar: Calendar.ISO, day: 28, month: 2, year: 2017}
iex> Cldr.Calendar.previous_day %{calendar: Calendar.ISO, day: 1, month: 3, year: 2016}
%{calendar: Calendar.ISO, day: 29, month: 2, year: 2016}
"""
def previous_day(%{calendar: _calendar} = date) do
add(date, -1)
end
@doc """
Returns the date of the next day to the
provided date.
## Examples
iex> Cldr.Calendar.next_day %{calendar: Calendar.ISO, day: 2, month: 1, year: 2017}
%{calendar: Calendar.ISO, day: 3, month: 1, year: 2017}
iex> Cldr.Calendar.next_day %{calendar: Calendar.ISO, day: 28, month: 2, year: 2017}
%{calendar: Calendar.ISO, day: 1, month: 3, year: 2017}
iex> Cldr.Calendar.next_day %{calendar: Calendar.ISO, day: 28, month: 2, year: 2016}
%{calendar: Calendar.ISO, day: 29, month: 2, year: 2016}
"""
def next_day(%{calendar: _calendar} = date) do
add(date, 1)
end
@doc """
Returns the date `n` days after the provided
data.
## Examples
"""
def add(%{calendar: calendar} = date, n) do
{days, fraction} = iso_days_from_date(date)
date_from_iso_days({days + n, fraction}, calendar)
end
@doc """
Returns the date `n` days after the provided
data.
## Example
iex> Cldr.Calendar.add %{calendar: Calendar.ISO, day: 1, month: 3, year: 2017}, 3
%{calendar: Calendar.ISO, day: 4, month: 3, year: 2017}
"""
def sub(%{calendar: _calendar} = date, n) do
add(date, n * -1)
end
defp territory_from_locale(locale) do
try do
String.to_existing_atom(locale.territory)
catch
_, _ -> @default_territory
end
end
# erlang/elixir standard is that Monday -> 1
def day_key(1), do: :mon
def day_key(2), do: :tue
def day_key(3), do: :wed
def day_key(4), do: :thu
def day_key(5), do: :fri
def day_key(6), do: :sat
def day_key(7), do: :sun
def day_ordinal("mon"), do: 1
def day_ordinal("tue"), do: 2
def day_ordinal("wed"), do: 3
def day_ordinal("thu"), do: 4
def day_ordinal("fri"), do: 5
def day_ordinal("sat"), do: 6
def day_ordinal("sun"), do: 7
def day_ordinal(_), do: nil
@doc """
Returns the first day of the month.
*Note* that whilst this is trivial for an ISO/Gregorian calendar it may
well be quite different for other types of calendars
"""
def first_day_of_month(%{year: _year, month: _month, calendar: Calendar.ISO} = date) do
date
|> Map.put(:day, 1)
end
@doc """
Returns a `Date.Range` with the first date as the
first day of the year and the last day as the last
day of the year
## Example
"""
def year(%{calendar: Calendar.ISO} = date) do
%Date.Range{first: ISOWeek.first_day_of_year(date), last: ISOWeek.last_day_of_year(date)}
end
def year(%{calendar: calendar} = date) do
%Date.Range{first: calendar.first_day_of_year(date), last: calendar.last_day_of_year(date)}
end
@doc false
def iso_days_to_float({days, {numerator, denominator}}) do
days + numerator / denominator
end
@doc false
def calendar_error(calendar_name) do
{Cldr.UnknownCalendarError, "The calendar #{inspect(calendar_name)} is not known."}
end
@doc false
def era(locale, calendar, backend), do: backend.era(calendar, backend)
@doc false
def period(locale, calendar, backend), do: backend.period(calendar, backend)
@doc false
def quarter(locale, calendar, backend), do: backend.quarter(calendar, backend)
@doc false
def month(locale, calendar, backend), do: backend.month(calendar, backend)
@doc false
def day(locale, calendar, backend), do: backend.day(calendar, backend)
end
end
|
lib/cldr/calendar.ex
| 0.913903
| 0.625753
|
calendar.ex
|
starcoder
|
defmodule RDF.NQuads.Encoder do
@moduledoc """
An encoder for N-Quads serializations of RDF.ex data structures.
As for all encoders of `RDF.Serialization.Format`s, you normally won't use these
functions directly, but via one of the `write_` functions on the `RDF.NQuads`
format module or the generic `RDF.Serialization` module.
"""
use RDF.Serialization.Encoder
alias RDF.Statement
@impl RDF.Serialization.Encoder
@callback encode(RDF.Data.t(), keyword) :: {:ok, String.t()} | {:error, any}
def encode(data, _opts \\ []) do
{:ok,
data
|> Enum.reduce([], &[statement(&1) | &2])
|> Enum.reverse()
|> Enum.join()}
end
@impl RDF.Serialization.Encoder
@spec stream(RDF.Data.t(), keyword) :: Enumerable.t()
def stream(data, opts \\ []) do
case Keyword.get(opts, :mode, :string) do
:string -> Stream.map(data, &statement(&1))
:iodata -> Stream.map(data, &iolist_statement(&1))
invalid -> raise "Invalid stream mode: #{invalid}"
end
end
@spec statement(Statement.t()) :: String.t()
def statement(statement)
def statement({subject, predicate, object, nil}) do
statement({subject, predicate, object})
end
def statement({subject, predicate, object, graph}) do
"#{term(subject)} #{term(predicate)} #{term(object)} #{term(graph)} .\n"
end
def statement({subject, predicate, object}) do
"#{term(subject)} #{term(predicate)} #{term(object)} .\n"
end
defdelegate term(value), to: RDF.NTriples.Encoder
@spec iolist_statement(Statement.t()) :: iolist
def iolist_statement(statement)
def iolist_statement({subject, predicate, object, nil}) do
iolist_statement({subject, predicate, object})
end
def iolist_statement({subject, predicate, object, graph}) do
[
iolist_term(subject),
" ",
iolist_term(predicate),
" ",
iolist_term(object),
" ",
iolist_term(graph),
" .\n"
]
end
def iolist_statement({subject, predicate, object}) do
[iolist_term(subject), " ", iolist_term(predicate), " ", iolist_term(object), " .\n"]
end
defdelegate iolist_term(value), to: RDF.NTriples.Encoder
end
|
lib/rdf/serializations/nquads_encoder.ex
| 0.831691
| 0.541106
|
nquads_encoder.ex
|
starcoder
|
defmodule Financeiro do
@moduledoc """
Início do sistema.
Através desse módulo o usuário escolhe quais operações fazer.
"""
@doc """
Inicia o sistema financeiro.
"""
def main([]) do
alfa(usr_padrao(), entrada("Sistema Financeiro\nDigite 1 para entrar ou 2 para criar um cadastro: "))
end
@doc """
Criação dos usuários de exemplo.
Cada um recebem uma lista com as moedas do padrão ISO 4217.
"""
def usr_padrao do
[maria: Moeda.novo(), stone: Moeda.novo(), john: Moeda.novo()]
end
@doc """
Leva o usuário a criar um cadastro ou entrar com um existente.
## Parâmetro
- usuarios: Lista com os nomes de usuários e suas respectivas quantias de dinheiro.
"""
def alfa(usuarios, escolha) do
case escolha do
"1" ->
usuario = acessar(usuarios, escolha, entrada("Digite seu nome de usuário: "))
alternativas(usuarios, usuario)
"2" ->
usuario = Cadastro.cria_usuario(usuarios)
usuarios = Cadastro.add_conta(usuarios, usuario)
Financeiro.alternativas(usuarios, usuario)
_ ->
IO.puts "Digite apenas 1 ou 2"
alfa(usuarios, escolha)
end
end
@doc """
Obtém entrada do usuário e remove espaço.
## Parâmetro
- msg: String com a mensagem para o `IO.gets/1`
"""
def entrada(msg) do
obter = IO.gets msg
String.trim(obter)
end
@doc """
Realiza acesso a conta do usuário.
"""
def acessar(usuarios, escolha, usuario) do
usuario = string_atom(usuario)
if Consulta.usuario?(usuarios, usuario) == :error do
IO.puts "Usuário não existe."
alfa(usuarios, escolha)
else
usuario
end
end
@doc """
Remove espaço e transforma string em atom.
## Exemplos
iex> Financeiro.string_atom("Ana_Roberta")
:Ana_Roberta
iex> Financeiro.string_atom(" Lucas ")
:Lucas
"""
def string_atom(usuario) do
usuario = String.trim(usuario)
String.to_atom(usuario)
end
@doc """
Opções que o usuário pode escolher.
"""
def alternativas(usuarios, usuario) do
opcao = entrada("Digite 1 para verificar saldo, 2 para realizar depósito, 3 para realizar transferência ou 4 para realizar câmbio de moedas: ")
case opcao do
"1" ->
Consulta.verifica_saldo(usuarios, usuario)
alternativas(usuarios, usuario)
"2" ->
usuarios = Transacao.deposito(usuarios, usuario, Transacao.cedula(usuarios, usuario), Transacao.valor())
alternativas(usuarios, usuario)
"3" ->
usuarios = Transacao.transferencia(usuarios, usuario)
alternativas(usuarios, usuario)
"4" ->
usuarios = Cambio.cambio_moeda(usuarios, usuario)
alternativas(usuarios, usuario)
_ ->
IO.puts "Digite apenas 1, 2, 3 ou 4"
alternativas(usuarios, usuario)
end
end
end
|
lib/financeiro.ex
| 0.557966
| 0.614886
|
financeiro.ex
|
starcoder
|
defmodule Ofex.Helpers do
@spec convert_to_positive_float(number() | String.t) :: float()
def convert_to_positive_float(num) when is_float(num) do
num |> Float.to_string() |> String.replace("-", "") |> string_to_float
end
def convert_to_positive_float(num) when is_integer(num) do
num |> Integer.to_string() |> String.replace("-", "") |> string_to_float
end
def convert_to_positive_float(num) when is_bitstring(num) do
num |> String.replace("-", "") |> string_to_float
end
def convert_to_positive_float(_), do: nil
@spec create_attribute_map(keyword(String.t) | map()) :: map()
def create_attribute_map(attribute_list) when is_list(attribute_list) do
Map.new(attribute_list, &format_attribute_value/1)
end
def create_attribute_map(attribute_map) when is_map(attribute_map) do
create_attribute_map(Map.to_list(attribute_map))
end
@spec format_attribute_value({atom(), String.t}) :: {atom(), String.t | Date.t | number()}
defp format_attribute_value({attr, ""}), do: {attr, nil}
defp format_attribute_value({:amount, amount_str}), do: {:amount, string_to_float(amount_str)}
defp format_attribute_value({:positive_amount, amount_str}) do
{:positive_amount, convert_to_positive_float(amount_str)}
end
defp format_attribute_value({:balance, balance_str}) do
{:balance, string_to_float(balance_str)}
end
defp format_attribute_value({:balance_date, date_str}) do
{:balance_date, string_to_date(date_str)}
end
defp format_attribute_value({:positive_balance, balance_str}) do
{:positive_balance, convert_to_positive_float(balance_str)}
end
defp format_attribute_value({:posted_date, date_str}) do
{:posted_date, string_to_date(date_str)}
end
defp format_attribute_value({:transactions_end_date, date_str}) do
{:transactions_end_date, string_to_date(date_str)}
end
defp format_attribute_value({:transactions_start_date, date_str}) do
{:transactions_start_date, string_to_date(date_str)}
end
defp format_attribute_value(attribute_tuple), do: attribute_tuple
@spec string_to_date(String.t) :: Date.t | nil
def string_to_date(date_str) when byte_size(date_str) == 8 do
string_to_date(date_str, "%Y%m%d")
end
def string_to_date(date_str, strf_pattern \\ "%Y%m%d%H%M%S")
def string_to_date(date_str, strf_pattern) when byte_size(date_str) > 0 do
[cleansed_date_str] = Regex.run(~r/^[[:digit:]]{0,14}/, date_str, capture: :first)
case Timex.parse(cleansed_date_str, strf_pattern, :strftime) do
{:ok, naive_date} -> NaiveDateTime.to_date(naive_date)
{:error, _reason} -> nil
end
end
def string_to_date(_date_str, _strf_pattern), do: nil
@spec string_to_float(String.t) :: float()
def string_to_float(nil), do: nil
def string_to_float(""), do: nil
def string_to_float(str) do
[float_string] = Regex.run(~r/-{0,1}[\d,]+\.?\d*/, str)
{float, _} = float_string |> String.replace(",", "") |> Float.parse()
float
end
end
|
lib/helpers.ex
| 0.772874
| 0.540378
|
helpers.ex
|
starcoder
|
defmodule BonnyPlug.WebhookHandler do
@moduledoc """
This module dispatches the admission webhook requests to the handlers. You can `use` this module in your webhook
handler to connect it to the Plug.
## Options
When `use`-ing this module, you have to tell it about the resource you want to act upon:
### Custom Resource Definition
* `crd` - If you have a CRD YAML file, just pass the path to the file as option `crd`. The `WebhookHandler` will extract the required values from the file.
### Explicit Resource Specification
The `WebhookHandler` needs to know the following values from the resource you want to act upon:
* `group` - The group of the resource, e.g. `"apps"`
* `plural` - The plural name of the resource, e.g. `"deployments"`
* `api_versions` - A list of versions of the resource, e.g. `["v1beta1", "v1"]`
## Functions to be implemented in your Webhook Handler
Your webhook handler should implement at least one of the two functions `validating_webhook/1` and
`mutating_webhook/1`. These are going to be called by this module depending on whether the incoming request is of
type `:validating_webhook` or `:mutating_webhook` according to the `BonnyPlug.WebhookPlug` configuration.
## Examples
```
defmodule FooAdmissionWebhookHandler do
use BonnyPlug.WebhookHandler, crd: "manifest/src/crds/foo.crd.yaml"
@impl true
def validating_webhook(admission_review) do
check_immutable(admission_review, ["spec", "someField"])
end
@impl true
def mutating_webhook(admission_review) do
allow(admission_review)
end
end
```
```
defmodule BarAdmissionWebhookHandler do
use BonnyPlug.WebhookHandler,
group: "my.operator.com",
resource: "barresources",
api_versions: ["v1"]
@impl true
def validating_webhook(admission_review) do
check_immutable(admission_review, ["spec", "someField"])
end
@impl true
def mutating_webhook(admission_review) do
deny(admission_review)
end
end
```
"""
require Logger
alias BonnyPlug.{AdmissionReview, WebhookPlug}
@callback process(AdmissionReview.t(), WebhookPlug.webhook_type()) :: AdmissionReview.t()
@callback mutating_webhook(AdmissionReview.t()) :: AdmissionReview.t()
@callback validating_webhook(AdmissionReview.t()) :: AdmissionReview.t()
@optional_callbacks mutating_webhook: 1, validating_webhook: 1
@type webhook_type :: :mutating_webhook | :validating_webhook
defmacro __using__(opts) do
[group: group, plural: plural, api_versions: api_versions] = case opts do
[crd: crd] -> read_crd(crd)
[group: _, plural: _, api_versions: _] = bindings -> bindings
_ -> raise(ArgumentError, "Wrong usage of BonnyPlug.WebhookHandler. You have to pass either `crd: \"path-to-crd.yaml\"` or all three of `group`, `plural` and `api_verions` when using BonnyPlug.WebhookHandler")
end
quote bind_quoted: [group: group, plural: plural, api_versions: api_versions] do
import BonnyPlug.AdmissionReview.Request
@behaviour BonnyPlug.WebhookHandler
@group group
@plural plural
@api_versions api_versions
@impl true
@spec process(AdmissionReview.t(), WebhookPlug.webhook_type()) :: AdmissionReview.t()
def process(
%AdmissionReview{request: %{"resource" => %{"group" => @group, "version" => version, "resource" => @plural}}} = admission_review,
webhook_type
) when webhook_type in [:validating_webhook, :mutating_webhook] and version in @api_versions do
if function_exported?(__MODULE__, webhook_type, 1) do
Kernel.apply(__MODULE__, webhook_type, [admission_review])
else
admission_review
end
end
def process(admission_review, _), do: admission_review
end
end
defp read_crd(path_to_crd) do
crd = case YamlElixir.read_from_file(path_to_crd) do
{:ok, crd} -> crd
{:error, %YamlElixir.FileNotFoundError{message: error}} ->
raise(ArgumentError, "Wrong usage of BonnyPlug.WebhookHandler. The CRD you passed was not found: " <> error)
{:error, %YamlElixir.ParsingError{message: error}} ->
raise(ArgumentError, "Wrong usage of BonnyPlug.WebhookHandler. The CRD YAML file you passed could not be parsed: " <> error)
end
api_versions = derive_api_versions(crd)
[
group: get_in(crd, ~w(spec group)),
plural: get_in(crd, ~w(spec names plural)),
api_versions: api_versions,
]
end
defp derive_api_versions(%{"spec" => %{"versions" => versions}}) do
versions
|> Enum.filter(&(&1["served"] == true))
|> Enum.map(&Map.fetch!(&1, "name"))
end
defp derive_api_versions(%{"spec" => %{"version" => version}}), do: [version]
defp derive_api_versions(_), do: raise(ArgumentError, "CRD version not supported. Currently only CRD versions v1 and v1beta1 are supported.")
end
|
lib/bonny_plug/webhook_handler.ex
| 0.866217
| 0.688442
|
webhook_handler.ex
|
starcoder
|
defmodule LevelWeb.Schema.InputObjects do
@moduledoc false
use Absinthe.Schema.Notation
@desc "The field and direction to sort users."
input_object :user_order do
@desc "The field by which to sort."
field :field, non_null(:user_order_field)
@desc "The sort direction."
field :direction, non_null(:order_direction)
end
@desc "The field and direction to sort users."
input_object :space_order do
@desc "The field by which to sort."
field :field, non_null(:space_order_field)
@desc "The sort direction."
field :direction, non_null(:order_direction)
end
@desc "The field and direction to sort space users."
input_object :space_user_order do
@desc "The field by which to sort."
field :field, non_null(:space_user_order_field)
@desc "The sort direction."
field :direction, non_null(:order_direction)
end
@desc "The field and direction to sort groups."
input_object :group_order do
@desc "The field by which to sort."
field :field, non_null(:group_order_field)
@desc "The sort direction."
field :direction, non_null(:order_direction)
end
@desc "The field and direction to sort posts."
input_object :post_order do
@desc "The field by which to sort."
field :field, non_null(:post_order_field), default_value: :posted_at
@desc "The sort direction."
field :direction, non_null(:order_direction), default_value: :desc
end
@desc "The field and direction to sort replies."
input_object :reply_order do
@desc "The field by which to sort."
field :field, non_null(:reply_order_field)
@desc "The sort direction."
field :direction, non_null(:order_direction)
end
@desc "Filtering criteria for post connections."
input_object :post_filters do
@desc """
Filter by whether the post is being followed by the user. A user is considered
to be "following" a post if they are explicitly subscribed to it, or if the
post was created in a group that the user belongs to.
"""
field :following_state, :following_state_filter, default_value: :all
@desc """
Filter by the different inbox states.
"""
field :inbox_state, :inbox_state_filter, default_value: :all
@desc """
Filter by the different post states.
"""
field :state, :post_state_filter, default_value: :all
end
end
|
lib/level_web/schema/input_objects.ex
| 0.775477
| 0.436562
|
input_objects.ex
|
starcoder
|
defmodule Bitcoin.Protocol.Messages.GetHeaders do
@moduledoc """
Return a headers packet containing the headers of blocks starting right after the last known hash in the block
locator object, up to hash_stop or 2000 blocks, whichever comes first. To receive the next block headers, one needs
to issue getheaders again with a new block locator object. The getheaders command is used by thin clients to
quickly download the block chain where the contents of the transactions would be irrelevant (because they are not
ours). Keep in mind that some clients may provide headers of blocks which are invalid if the block locator object
contains a hash on the invalid branch.
For the block locator object in this packet, the same rules apply as for the getblocks packet.
https://en.bitcoin.it/wiki/Protocol_specification#getheaders
"""
import Bitcoin.Protocol
# the protocol version
defstruct version: 0,
# block locator object; newest back to genesis block (dense to start, but then sparse)
block_locator_hashes: [],
# hash of the last desired block; set to zero to get as many headers as possible (up to 2000)
hash_stop:
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0>>
@type t :: %__MODULE__{
version: non_neg_integer,
block_locator_hashes: list(Bitcoin.Block.t_hash()),
hash_stop: Bitcoin.Block.t_hash()
}
@spec parse(binary) :: t
def parse(data) do
<<version::unsigned-little-integer-size(32), payload::binary>> = data
{block_locator_hashes, payload} = payload |> collect_items(:hash)
<<hash_stop::bytes-size(32)>> = payload
%__MODULE__{
version: version,
block_locator_hashes: block_locator_hashes,
hash_stop: hash_stop
}
end
@spec serialize(t) :: binary
def serialize(%__MODULE__{} = s) do
<<s.version::unsigned-little-integer-size(32)>> <>
(s.block_locator_hashes |> serialize_items) <>
<<s.hash_stop::bytes-size(32)>>
end
end
|
lib/bitcoin/protocol/messages/get_headers.ex
| 0.827689
| 0.508178
|
get_headers.ex
|
starcoder
|
defmodule DataBase.Schemas.AccountTransfer do
@moduledoc """
The centralized agreement for exchanging assets between two
distinct accounts.
A transfer between accounts is performed by creating a outbound
movement to the *sender* account and a inbound movement to the
*recipient* account.
This module provides a function to register those two
`t:DataBase.Schemas.AccountMovement.t/0` as being part of a formal
transfer.
See `register/2`.
Expresses information over the `account_transfers` database table.
"""
use Ecto.Schema
alias DataBase.Schemas.AccountMovement, as: Movement
alias DataBase.Repos.AmethystRepo, as: Repo
alias DataBase.Schemas.Account
@typedoc """
A `DataBase.Schemas.AccountTransfer` struct.
"""
@type t :: %__MODULE__{}
@typedoc """
A standard Ecto response to `DataBase.Schemas.AccountTransfer` data
insertion.
"""
@type response_t :: {:ok, t()} | {:error, any()}
schema "account_transfers" do
field :amount, :decimal
field :transfer_at, :utc_datetime
belongs_to(:sender_account, Account)
belongs_to(:recipient_account, Account)
belongs_to(:sender_movement, Movement)
belongs_to(:recipient_movement, Movement)
timestamps()
end
@doc """
Builds and registers a `t:t/0`.
Given the inbound and outbound
`t:DataBase.Schemas.AccountMovement.t/0`, for the *sender* and
*recipient* `t:DataBase.Schemas.Account.t/0`, it builds a
respective `t:t/0` and saves it.
"""
@spec register(Movement.t, Movement.t) :: response_t()
def register(%Movement{} = outgoing, %Movement{} = incoming) do
Repo.insert(build(outgoing, incoming))
end
@spec build(Movement.t, Movement.t) :: t()
defp build(%Movement{} = outgoing, %Movement{} = incoming) do
%__MODULE__{
sender_account_id: outgoing.account_id,
sender_movement_id: outgoing.id,
recipient_account_id: incoming.account_id,
recipient_movement_id: incoming.id,
transfer_at: outgoing.move_at,
amount: outgoing.amount
}
end
end
|
apps/database/lib/database/schemas/account_transfer.ex
| 0.866895
| 0.628379
|
account_transfer.ex
|
starcoder
|
defmodule Issues.CLI do
@default_count 4
@moduledoc """
Handle the command line parsing and the dispatch to
the various functions that end up generating a
table of the last _n_ issues in a github project
"""
def run(argv) do
argv
|> parse_args
|> process
end
@doc """
`argv` can be -h or --help, which returns :help.
Otherwise it is a github user name, project name, and (optionally)
the number of entries to format.
Return a tuple of `{ user, project, count }`, or `:help` if help was given.
"""
def parse_args(argv) do
parse = OptionParser.parse(argv, switches: [ help: :boolean],
aliases: [ h: :help ])
case parse do
{ [ help: true ], _, _ } -> :help
{ _, [ user, project, count ], _ } -> { user, project, String.to_integer(count) }
{ _, [ user, project ], _ } -> { user, project, @default_count }
_ -> :help
end
end
def process(:help) do
IO.puts """
usage: issues <user> <project> [ count | #{@default_count} ]
"""
System.halt(0)
end
def process({user, project, count}) do
Issues.GithubIssues.fetch(user, project)
|> decode_response
|> convert_to_list_of_hashdicts
|> sort_into_ascending_order
|> Enum.take(count)
|> print_table_for_columns(["number","created_at","title"])
end
def decode_response({:ok, body}), do: body
def decode_response({:error, error}) do
{_, message} = List.keyfind(error, "message", 0)
IO.puts "Error fetching from Github: #{message}"
System.halt(2)
end
def convert_to_list_of_hashdicts(list) do
list
|> Enum.map(&Enum.into(&1, HashDict.new))
end
def sort_into_ascending_order(list_of_issues) do
Enum.sort list_of_issues,
fn i1, i2 -> i1["created_at"] <= i2["created_at"] end
end
def print_table_for_columns(issues, columns) do
rows = process_table_rows(issues, columns)
columns_max_size = get_columns_max_size(rows, columns)
print_table_headers(columns, columns_max_size)
print_table_separator(columns, columns_max_size)
print_table_body(rows, columns_max_size)
issues
end
def process_table_rows(issues, columns) do
issues
|> Enum.map &(process_table_row(&1, columns))
end
def process_table_row(issue, columns) do
columns
|> Enum.map &(issue[&1])
end
def print_table_headers(columns, columns_max_size) do
header_columns = get_header_columns(columns)
Enum.zip(header_columns, columns_max_size)
|> Enum.map_join(" | ",
&(String.ljust(
to_string(get_tuple_key(&1)),
get_tuple_value(&1))))
|> IO.puts
end
def print_table_separator(columns, columns_max_size) do
Enum.zip(columns, columns_max_size)
|> Enum.map_join("-+-",
&(String.ljust("", get_tuple_value(&1), ?-)))
|> IO.puts
end
def print_table_body(rows, columns_max_size) do
rows
|> Enum.each &(print_table_row(&1,columns_max_size))
end
def print_table_row(values, columns_max_size) do
Enum.zip(values, columns_max_size)
|> Enum.map_join(" | ",
&(String.ljust(to_string(get_tuple_key(&1)),
get_tuple_value(&1))))
|> IO.puts
end
def get_columns_max_size(rows, columns) do
total_rows = Enum.into(rows, [columns])
get_iter_counters(columns)
|> Enum.map(&(get_column_max_size(total_rows, &1))) # rows
end
def get_column_max_size(rows, column_number) do
rows
|> get_column_values(column_number)
|> get_longest_value
|> get_cell_size
end
def get_column_values(rows, column_number) do
rows
|> Enum.map &(Enum.at(&1, column_number))
end
def get_longest_value(list) do
list
|> Enum.max_by &(String.length(to_string(&1)))
end
def get_cell_size(value) do
value
|> to_string
|> String.length
end
def get_iter_counters(list) do
total_num_elements = length list
Stream.iterate(0, &(&1+1))
|> Enum.take(total_num_elements)
end
def get_header_columns(list) do
case list do
["number" | tail ] ->
[ "#" | get_header_columns(tail) ]
[ head | tail ]->
[ head | get_header_columns(tail) ]
[] -> []
end
end
def get_tuple_key(tuple) do
tuple
|> Tuple.to_list
|> Enum.at(0)
end
def get_tuple_value(tuple) do
tuple
|> Tuple.to_list
|> Enum.at(1)
end
end
|
chapter-13/organizing-a-project-4/issues/lib/issues/cli.ex
| 0.631708
| 0.401248
|
cli.ex
|
starcoder
|
defmodule Scenic.Toggle.Components do
@moduledoc """
A set of helper functions for working with toggle components.
"""
alias Scenic.Graph
alias Scenic.Primitive
alias Scenic.Component.Toggle
@doc """
Add toggle to a Scenic graph.
You must pass the initial state, `on?`. Pass `true` if the toggle is on, pass `false` if not.
### Styles
Toggles honor the following styles. The `:light` and `:dark` styles look nice. The other bundled themes...not so much. You can also [supply your own theme](Scenic.Toggle.Components.html#toggle/3-theme).
* `:hidden` - If `false` the toggle is rendered. If true, it is skipped. The default
is `false`.
* `:theme` - The color set used to draw. See below. The default is `:dark`
### Additional Styles
Toggles also honor the following additional styles.
* `:border_width` - the border width. Defaults to `2`.
* `:padding` - the space between the border and the thumb. Defaults to `2`
* `:thumb_radius` - the radius of the thumb. This determines the size of the entire toggle. Defaults to `10`.
## Theme
To pass in a custom theme, supply a map with at least the following entries:
* `:border` - the color of the border around the toggle
* `:background` - the color of the track when the toggle is `off`.
* `:text` - the color of the thumb.
* `:thumb` - the color of the track when the toggle is `on`.
Optionally, you can supply the following entries:
* `:thumb_pressed` - the color of the thumb when pressed. Defaults to `:gainsboro`.
### Examples
The following example creates a toggle.
graph
|> toggle(true, translate: {20, 20})
The next example makes a larger toggle.
graph
|> toggle(true, translate: {20, 20}, thumb_radius: 14)
"""
@spec toggle(Graph.t() | Primitive.t(), any, Keyword.t() | nil) :: Graph.t()
def toggle(graph, data, options \\ [])
def toggle(%Graph{} = g, data, options) do
add_to_graph(g, Toggle, data, options)
end
def toggle(%Primitive{module: SceneRef} = p, data, options) do
modify(p, Toggle, data, options)
end
# ============================================================================
# internal utilities
@spec add_to_graph(Graph.t(), Toggle, any, Keyword.t()) :: Graph.t()
defp add_to_graph(%Graph{} = g, mod, data, options) do
mod.verify!(data)
mod.add_to_graph(g, data, options)
end
@spec modify(Primitive.t(), Toggle, any, Keyword.t()) :: Primitive.t()
defp modify(%Primitive{module: SceneRef} = p, mod, data, options) do
mod.verify!(data)
Primitive.put(p, {mod, data}, options)
end
end
|
lib/components.ex
| 0.904268
| 0.736401
|
components.ex
|
starcoder
|
defmodule WechatPay.Native do
@moduledoc """
The **Native** payment method.
[Official document](https://pay.weixin.qq.com/wiki/doc/api/native.php?chapter=6_1)
## Example
Set up a client:
```elixir
{:ok, client} = WechatPay.Client.new(
app_id: "the-app_id",
mch_id: "the-mch-id",
api_key: "the-api_key",
ssl: [
ca_cert: File.read!("fixture/certs/rootca.pem"),
cert: File.read!("fixture/certs/apiclient_cert.pem"),
key: File.read!("fixture/certs/apiclient_key.pem")
]
)
```
Place an order:
```elixir
WechatPay.Native.place_order(client, %{
body: "Plan 1",
out_trade_no: "12345",
fee_type: "CNY",
total_fee: "600",
spbill_create_ip: Void.Utils.get_system_ip(),
notify_url: "http://example.com/",
trade_type: "Native",
product_id: "12345"
})
```
"""
alias WechatPay.Client
alias WechatPay.API
alias WechatPay.API.HTTPClient
alias WechatPay.Utils.Signature
@doc """
Place an order.
[Official document](https://pay.weixin.qq.com/wiki/doc/api/native.php?chapter=9_1)
"""
@spec place_order(Client.t(), map, keyword) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate place_order(client, attrs, options \\ []), to: API
@doc """
Query the order.
[Official document](https://pay.weixin.qq.com/wiki/doc/api/native.php?chapter=9_2)
"""
@spec query_order(Client.t(), map, keyword) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate query_order(client, attrs, options \\ []), to: API
@doc """
Close the order.
[Official document](https://pay.weixin.qq.com/wiki/doc/api/native.php?chapter=9_3)
"""
@spec close_order(Client.t(), map, keyword) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate close_order(client, attrs, options \\ []), to: API
@doc """
Request to refund.
[Official document](https://pay.weixin.qq.com/wiki/doc/api/native.php?chapter=9_4)
"""
@spec refund(Client.t(), map, keyword) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate refund(client, attrs, options \\ []), to: API
@doc """
Query the refund.
[Official document](https://pay.weixin.qq.com/wiki/doc/api/native.php?chapter=9_5)
"""
@spec query_refund(Client.t(), map, keyword) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate query_refund(client, attrs, options \\ []), to: API
@doc """
Download bill.
[Official document](https://pay.weixin.qq.com/wiki/doc/api/native.php?chapter=9_6)
"""
@spec download_bill(Client.t(), map, keyword) ::
{:ok, String.t()} | {:error, HTTPoison.Error.t()}
defdelegate download_bill(client, attrs, options \\ []), to: API
@doc """
Download fund flow.
[Official document](https://pay.weixin.qq.com/wiki/doc/api/native.php?chapter=9_18&index=7)
"""
@spec download_fund_flow(Client.t(), map, keyword) ::
{:ok, String.t()} | {:error, HTTPoison.Error.t()}
defdelegate download_fund_flow(client, attrs, options \\ []), to: API
@doc """
Report.
[Official document](https://pay.weixin.qq.com/wiki/doc/api/native.php?chapter=9_8)
"""
@spec report(Client.t(), map, keyword) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate report(client, attrs, options \\ []), to: API
@doc """
Query comments in a batch.
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_17&index=11)
"""
@spec batch_query_comments(Client.t(), map, keyword) ::
{:ok, String.t()} | {:error, HTTPoison.Error.t()}
defdelegate batch_query_comments(client, attrs, options \\ []), to: API
@doc """
Shorten the URL to reduce the QR image size.
[Official document](https://pay.weixin.qq.com/wiki/doc/api/native.php?chapter=9_9)
"""
@spec shorten_url(Client.t(), String.t(), keyword) ::
{:ok, String.t()} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
def shorten_url(client, url, options \\ []) do
with {:ok, data} <-
HTTPClient.post(client, "tools/shorturl", %{long_url: URI.encode(url)}, options),
:ok <- Signature.verify(data, client.api_key, client.sign_type) do
{:ok, data}
end
end
end
|
lib/wechat_pay/payment_methods/native.ex
| 0.828973
| 0.639511
|
native.ex
|
starcoder
|
defmodule Para do
@moduledoc """
Para is an Elixir library that provides structured and
declarative way to parse and validate parameters.
Para uses Ecto under the hood and therefore inherits most of
its utilities such as changeset and built-in validators.
## Usage
Let's imagine that you have a controller named `Web.UserController` and
wanted to validate the parameters for its `:create` and `:update` actions.
First, let's define your parameters schema.
defmodule Web.UserPara do
use Para
validator :create do
required :name, :string
required :age, :integer
required :email, :string
optional :phone, :string
end
validator :update do
required :name, :string
required :age, :integer
required :email, :string
optional :phone, :string
end
end
This will generate two `validate/2` functions for your module
with action `name` and `params` as arguments.
defmodule Web.UserController do
use Web, :controller
alias Web.UserPara
def create(conn, params) do
with {:ok, data} <- UserPara.validate(:create, params) do
# ...
end
end
def update(conn, params) do
with {:ok, data} <- UserPara.validate(:update, params) do
# ...
end
end
end
The `validate/2` function will return either an `{:ok, map}` or `{:error, changeset}`
tuple.
## Inline validators
Inline validator is a convenient way to validate your fields. This is
especially useful when you need to perform some basic validation
using `Ecto.Changeset`'s built-in validators.
defmodule UserPara do
use Para
validator :update do
required :name, :string, validator: {:validate_length, [min: 3, max: 100]}
end
end
You can also use custom inline validators by supplying the function name
as an atom. Similar to most Ecto's built-in validators, the function will
receive `changeset`, `key`, and `opts` as the arguments.
defmodule UserPara do
use Para
validator :update do
required :age, :string, validator: :validate_age
required :gender, :string, validator: {:validate_gender, [allow: :non_binary]}
end
def validate_age(changeset, key, opts) do
# ...
end
end
## Callback validator
Sometimes, you might want to use custom validators or need to perform
additional data manipulations. For this, you can use the `callback/1` macro.
The `callback/1` macro will always be the last function to be called
after the validator has parsed and validated the parameters.
defmodule Web.UserPara do
use Para
validator :create do
required :name, :string
required :age, :integer
required :email, :string
optional :phone, :string
callback :create_validators
end
def create_validators(changeset, params) do
changeset
|> format_email(params)
|> format_phone(params)
|> validate_age()
end
def format_email(changeset, params) do
# ...
end
def format_phone(changeset, params) do
# ...
end
def validate_age(changeset) do
# ...
end
end
"""
@type t :: {:ok, map()} | {:error, Ecto.Changeset.t()}
@type data :: %{atom => term}
@type spec :: %{
data: map,
types: map,
embeds: map,
permitted: list,
required: list,
validators: map
}
@doc """
Parse and validate parameters for a given action.
The function will cast all the returned map keys into atoms except
for embedded map or list.
## Examples
defmodule OrderPara do
use Para
validator :create do
required :title
required :data, {:array, :map}
end
end
# Validate action with parameters
OrderPara.validate(:create, %{
"title" => "test"
"data" => [%{"color" => "black", "material" => "cotton"}]
})
#=> {:ok, %{
title: "test"
data: [%{"color" => "black", "material" => "cotton"}]
}}
"""
@callback validate(atom, map) :: {:ok, data} | {:error, Ecto.Changeset.t()}
@doc """
Returns basic spec.
This is useful when you need to build a custom changeset,
or when you just need the basic structure of your schema.
Also see: `Ecto.Changeset.change/2`
## Examples
defmodule OrderPara do
use Para
validator :create do
required :title
required :data, {:array, :map}
end
end
def changeset(:new, params) do
spec = __MODULE__.spec(:create, params)
Ecto.Changeset.change(spec.data, spec.types)
end
"""
@callback spec(atom, map) :: spec()
@doc false
defmacro __using__(_) do
quote do
@behaviour Para
import Para,
only: [
validator: 2,
required: 1,
required: 2,
required: 3,
optional: 1,
optional: 2,
optional: 3,
callback: 1,
embeds_one: 2,
embeds_many: 2
]
end
end
@doc """
Define a validator schema with an action name and field definitions.
This will generate a new function called `validate/2` with the action `name`
and `params` as the arguments.
iex> defmodule UserPara do
...> use Para
...>
...> validator :create do
...> required :name
...> end
...> end
...>
...> UserPara.validate(:create, %{"name" => "<NAME>"})
{:ok, %{name: "<NAME>"}}
"""
defmacro validator(name, do: block) do
fields =
case block do
{:__block__, _, fields} -> fields
block -> [block]
end
quote do
def validate(unquote(name), params) do
Para.validate(__MODULE__, unquote(fields), params)
end
def spec(unquote(name), params) do
Para.build_spec(unquote(fields), params)
end
end
end
@doc """
Define a custom callback function that will be called to perform any
additional manipulation to the changeset or parameters.
The callback function must accept two arguments namely `changeset` and
`params` and return an `Ecto.Changeset` struct.
## Examples
# Define callback function to be called
validator :create do
callback :validate_price
end
def validate_price(changeset, params) do
#...
end
"""
defmacro callback(name) do
quote do
{:callback, unquote(name)}
end
end
@doc """
Define a required field.
## Options
* `:default` - Assign a default value if the not set by input parameters
* `:validator` - Define either one of the built-in Ecto.Changeset's validators
or use your own custom inline validator. Refer: [Custom inline validator](#required/3-custom-inline-validator)
* `:droppable` - Drop the field when the key doesn't exist in parameters. This
is useful when you need to perform partial update by leaving out certain fields.
## Custom inline validator
You can define your own validator as such:
def validate_country(changeset, field) do
# ...
end
Then use it as an inline validator for your field
validator :create do
required :country, :string, [validator: :validate_country]
end
You can also supply options with your custom inline validator
validator :create do
required :country, :string, [validator: {:validate_country, region: :asia}]
end
"""
defmacro required(name, type \\ :string, opts \\ []) do
quote do
{:required, unquote(name), unquote(type), unquote(opts)}
end
end
@doc """
Define an optional field.
Similar to `required/3`, it also accepts the same Options
"""
defmacro optional(name, type \\ :string, opts \\ []) do
quote do
{:optional, unquote(name), unquote(type), unquote(opts)}
end
end
@doc """
Define an embedded map field
It accepts similar schema definition like `validator/2`.
## Examples
defmodule ParentPara do
use Para
validator :create do
embeds_one :child do
optional :name, :string
optional :age, :integer
end
end
end
"""
defmacro embeds_one(name, do: block) do
fields =
case block do
{:__block__, _, fields} -> fields
block -> [block]
end
quote do
{:embed_one, unquote(name), unquote(fields)}
end
end
@doc """
Define an embedded array of maps field.
It accepts similar schema definition like `validator/2`.
## Examples
defmodule OrderPara do
use Para
validator :create do
embeds_many :items do
required :title
required :price, :float
end
end
end
"""
defmacro embeds_many(name, do: block) do
fields =
case block do
{:__block__, _, fields} -> fields
block -> [block]
end
quote do
{:embed_many, unquote(name), unquote(fields)}
end
end
@doc false
def validate(module, fields, params) do
case changeset = do_validate(module, fields, params) do
%{valid?: true} -> {:ok, apply_changes(changeset)}
_ -> {:error, changeset}
end
end
@doc false
def do_validate(module, fields, params) do
spec = build_spec(fields, params)
callback =
Enum.find_value(fields, fn
{:callback, name} -> name
_ -> nil
end)
{spec.data, spec.types}
|> Ecto.Changeset.cast(params, spec.permitted)
|> Ecto.Changeset.validate_required(spec.required)
|> validate_embeds(module, spec)
|> apply_inline_validators(module, spec.validators)
|> apply_callback(module, callback, params)
end
@doc false
def build_spec(fields, params) do
default = %{data: %{}, types: %{}, embeds: %{}, permitted: [], required: [], validators: %{}}
fields
|> discard_droppable_fields(params)
|> Enum.reduce(default, fn
{:embed_one, name, block}, acc ->
acc
|> put_in([:data, name], nil)
|> put_in([:embeds, name], {:embed_one, block})
|> put_in([:types, name], {:map, :string})
{:embed_many, name, block}, acc ->
acc
|> put_in([:data, name], nil)
|> put_in([:embeds, name], {:embed_many, block})
|> put_in([:types, name], {:map, :string})
{requirement, name, type, opts}, acc ->
acc
|> put_in([:data, name], opts[:default])
|> put_in([:types, name], type)
|> assign_permitted_fields(name)
|> assign_required_fields(requirement, name)
|> assign_inline_validators(name, opts)
_, acc ->
acc
end)
end
@doc false
def discard_droppable_fields(fields, params) do
Enum.filter(fields, fn
# optional/required fields
{_, name, _, opts} ->
with true <- opts[:droppable],
false <- Map.has_key?(params, Atom.to_string(name)) do
false
else
_ -> true
end
# embed fields
{_, name, opts} ->
with true <- opts[:droppable],
false <- Map.has_key?(params, Atom.to_string(name)) do
false
else
_ -> true
end
any ->
any
end)
end
@doc false
def assign_permitted_fields(spec, name) do
put_in(spec, [:permitted], spec.permitted ++ [name])
end
@doc false
def assign_required_fields(spec, :required, name) do
put_in(spec, [:required], spec.required ++ [name])
end
def assign_required_fields(spec, _, _), do: spec
@doc false
def assign_inline_validators(spec, name, opts) do
if validator = opts[:validator] do
put_in(spec, [:validators, name], validator)
else
spec
end
end
@doc false
def validate_embeds(changeset, module, %{embeds: embeds}) do
Enum.reduce(embeds, changeset, fn {name, embed}, acc ->
validate_embed(acc, module, name, embed, changeset.params)
end)
end
def validate_embeds(changeset, _, _), do: changeset
@doc false
def validate_embed(changeset, module, name, {:embed_one, block}, params) do
params = Map.get(params, Atom.to_string(name))
case do_validate(module, block, params) do
%{valid?: true} = valid_changeset ->
Ecto.Changeset.put_change(changeset, name, valid_changeset)
invalid_changeset ->
Ecto.Changeset.put_change(%{changeset | valid?: false}, name, invalid_changeset)
end
end
def validate_embed(changeset, module, name, {:embed_many, block}, params) do
params = Map.get(params, Atom.to_string(name))
if is_list(params) do
Enum.reduce(params, changeset, fn embedded_params, acc ->
embedded_changesets = Ecto.Changeset.get_change(acc, name, [])
case do_validate(module, block, embedded_params) do
%{valid?: true} = valid_changeset ->
Ecto.Changeset.put_change(
acc,
name,
embedded_changesets ++ [valid_changeset]
)
invalid_changeset ->
Ecto.Changeset.put_change(
%{acc | valid?: false},
name,
embedded_changesets ++ [invalid_changeset]
)
end
end)
else
changeset
end
end
@doc false
def apply_inline_validators(changeset, module, validators) do
Enum.reduce(validators, changeset, fn {key, validator}, acc ->
apply_inline_validator(acc, module, key, validator)
end)
end
@doc false
def apply_inline_validator(changeset, module, key, validator) do
case validator do
{function, data_or_opts} ->
do_apply_inline_validator(module, function, [changeset, key] ++ [data_or_opts])
{function, data, opts} ->
do_apply_inline_validator(module, function, [changeset, key] ++ [data, opts])
function when is_atom(function) ->
do_apply_inline_validator(module, function, [changeset, key, []])
_ ->
changeset
end
end
@doc false
def do_apply_inline_validator(module, function, params) do
arity = length(params)
if function_exported?(Ecto.Changeset, function, arity) do
apply(Ecto.Changeset, function, params)
else
apply(module, function, params)
end
end
@doc false
def apply_callback(changeset, _, nil, _), do: changeset
def apply_callback(changeset, module, callback, params) do
apply(module, callback, [changeset, params])
end
@doc false
def apply_changes(%{changes: changes, data: data}) do
Enum.reduce(changes, data, fn
{key, list}, acc when is_list(list) ->
Map.put(acc, key, apply_changes(list))
{key, %Ecto.Changeset{} = changeset}, acc ->
Map.put(acc, key, apply_changes(changeset))
{key, value}, acc ->
Map.put(acc, key, value)
end)
end
def apply_changes(list) when is_list(list) do
Enum.map(list, &apply_changes/1)
end
def apply_changes(any), do: any
end
|
lib/para.ex
| 0.876079
| 0.610976
|
para.ex
|
starcoder
|
defmodule Solar do
@moduledoc """
A library that provides information about the sun and in particular; events.
This first version handles sunrise and sunset.
All calls to `Solar` library are through this module.
"""
@doc """
Provides sunrise or sunset times for a provided location and date.
The algorithms/math used are from:
https://github.com/mikereedell/sunrisesunsetlib-java
Code has been completely rewritten to take advantage of the Elixir language.
The event function takes a minimum of two parameters, the event of interest
which can be either :rise or :set and the latitude and longitude. Additionally
a list of options can be provided as follows:
* `date:` allows a value of either `:today` or an Elixir date. The default
if this option is not provided is the current day.
* `zenith:` can be set to define the sunrise or sunset. See the `Zeniths`
module for a set of standard zeniths that are used. The default if a
zenith is not provided is `:official` most commonly used for sunrise and
sunset.
* `tim*ezone:` can be provided and should be a standard timezone identifier
such as "America/Chicago". If the option is not provided, the timezone is
taken from the system and used.
The following, without any options and run on December 25:
iex> Solar.event(:rise, {39.1371, -88.65})
{:ok,~T[07:12:26]}
iex> Solar.event(:set, {39.1371, -88.65})
{:ok,~T[16:38:01]}
The coordinates are for Lake Sara, IL where sunrise on this day will be at 7:12:26AM and sunset will be at 4:38:01PM.
The function returns the following:
{:ok, Time}
{:error, message}
"""
def event(event, location, opts \\ []) do
Solar.Events.event event, location, opts
end
@doc """
Generates the sunrise, sunset times as well as the daylight hours. The following tuple is returned:
{:ok, sunrise, sunset, daylight}
This takes the same parameters as `Solar.event` except the event parameter.
"""
def day_info(location, opts \\ []) do
{ :ok, rise } = Solar.Events.event :rise, location, opts
{ :ok, set } = Solar.Events.event :set, location, opts
{ :ok, rise, set, Solar.Events.daylight(rise, set) }
end
end
|
lib/solar.ex
| 0.889993
| 0.752877
|
solar.ex
|
starcoder
|
defmodule StepFlow.Map do
@moduledoc """
Extend Map with some additional functions.
"""
@doc """
Get a key matching on an atom or a string.
Default value can be specified.
## Examples
iex> StepFlow.Map.get_by_key_or_atom(%{key: "value"}, :key)
"value"
iex> StepFlow.Map.get_by_key_or_atom(%{"key" => "value"}, :key)
"value"
iex> StepFlow.Map.get_by_key_or_atom(%{key: "value"}, "key")
"value"
iex> StepFlow.Map.get_by_key_or_atom(%{"key" => "value"}, "key")
"value"
"""
def get_by_key_or_atom(dict, atom, default \\ nil)
def get_by_key_or_atom(dict, atom, default) when is_atom(atom) do
Map.get_lazy(dict, atom, fn -> Map.get(dict, Atom.to_string(atom), default) end)
end
def get_by_key_or_atom(dict, string, default) when is_bitstring(string) do
Map.get_lazy(dict, string, fn -> Map.get(dict, String.to_atom(string), default) end)
end
def get_by_key_or_atom(_, _, _) do
raise "Got unsupported key type instead of expected Atom or String."
end
@doc """
Replace an item in a map, with atom or string keys.
## Examples
iex> StepFlow.Map.replace_by_atom(%{key: "value"}, :key, "replaced_value")
%{key: "replaced_value"}
iex> StepFlow.Map.replace_by_atom(%{"key" => "value"}, :key, "replaced_value")
%{key: "replaced_value"}
iex> StepFlow.Map.replace_by_atom(%{"key" => "value"}, "key", "replaced_value")
%{key: "replaced_value"}
"""
def replace_by_atom(dict, atom, value) when is_atom(atom) do
dict
|> Map.delete(Atom.to_string(atom))
|> Map.delete(atom)
|> Map.put(atom, value)
end
def replace_by_atom(dict, string, value) when is_bitstring(string) do
dict
|> Map.delete(String.to_atom(string))
|> Map.delete(string)
|> Map.put(String.to_atom(string), value)
end
def replace_by_atom(_dict, _atom, _value) do
raise "Got unsupported key type instead of expected Atom or String."
end
def replace_by_string(dict, string, value) when is_bitstring(string) do
dict
|> Map.delete(String.to_atom(string))
|> Map.delete(string)
|> Map.put(string, value)
end
def replace_by_string(_dict, _string, _value) do
raise "Got unsupported 2nd argument type, expected String."
end
end
|
lib/step_flow/map_tool.ex
| 0.872924
| 0.683631
|
map_tool.ex
|
starcoder
|
defmodule Ptolemy.Loader do
@moduledoc """
`Ptolemy.Loader` implements a highly opinionated Application Configuration solution.
Instead of having compile-time configuration and secrets, or simple system environment variables
on application startup, this module provides infrastructure on loading configuration
from anywhere, with the bonus support of dynamic configurations.
# Basics
Tell `Loader` what and where your configuration values should go. This is done in `config.exs`:
```elixir
alias Ptolemy.Providers.SystemEnv
config :ptolemy, loader: [
env: [
{{:app_name, :secret_key}, {SystemEnv, "PATH"}},
# ...
]
]
```
> The above configuration will result in the system environment variable `PATH` being set to
> your application's `:secret_key` value. It can be retrieved at any time afterwards with
> `Application.get_env(:app_name, :secret_key)`
To start your application with the loader, simply add it as the *first process* under your
application supervision tree.
```elixir
# add to your child process list in application.ex or other top-level supervising process
children = [
Ptolemy.Loader,
# ...
]
```
This will populate your application's key/value store for
all following processes. It is important to note that the one caveat to loading configuration
this way is that the `Loader` will block the startup of the remainder of the supervision tree
until initial values have been loaded into the application. This will most likely lead to
slightly longer startup times, depending on the providers used. All updates the providers
notify the loader of will be handled concurrently.
# Nested Configurations
Nested configurations are also supported by `Ptolemy.Loader`. To achieve the equivalent configuration as:
```elixir
config :app_name, top_key: [
first_nest: %{
target_key: "hello!"
}
]
```
The loader configuration would be similar to:
```elixir
config :app_name, top_key: [
first_nest: %{
target_key: "dummy_value"
}
]
alias Ptolemy.Providers.SystemEnv
config :ptolemy, loader: [
env: [
{{:app_name, [:top_key, :first_nest, :target_key]}, {SystemEnv, "TARGET_VAR"}}
]
]
```
The loader can only populate configuration values with no stub if the value is stored as the top level value.
Once the first value stored in a configuration is a structure, loader will not be able to imply what structure
the value is expected to be stored in. The dummy value is included in the stub to be explicit; but only the surrounding
structure is required. For example, the configuration below will also work:
```elixir
config :app_name, top_key: [
first_nest: %{}
]
```
The loader will make no assumptions on the structure of configurations. It will raise an error on initialization
if the structure can not be updated to ensure configuration is always as intended after loader was initialized.
# Built-In Providers
Providers that ship with Ptolemy include:
- `Ptolemy.Providers.SystemEnv` - Loads system environment variables
# Performance Considerations
The best practices implied by the purpose of `Ptolemy.Loader` is that `Application.get_env/2`
should be called repeatedly at runtime whenever configuration dependent code is executed. This raises the question
of performance impacts on that dependent code from constantly calling a lookup function. As explored
in [this article](https://engineering.tripping.com/blazing-fast-elixir-configuration-475aca10011d),
you may incur small costs on massively frequent invocations and/or large return values, however at the
time of writing these docs, it is felt that this is an acceptable price to pay. If ever the case does arise where
there is a performance bottleneck, support for application environment will not be replaced to preserve
integration with third party libraries.
"""
use GenServer
@typedoc """
The target configuration to be updated by a provider.
Targets are mapped to be later retrieved from `Application.get_env/2`.
"""
@type config_target :: {atom, atom | list(atom)}
@typedoc """
The specification to query a provider.
"""
@type provider_spec :: {module, any}
@load_callback_name :load
@doc """
Starts the Loader process.
While still functioning as a typical `start_link/1` helper, this implementation also contains blocking business
logic to ensure subsequent processes can retrieve populated application state values.
"""
def start_link(config \\ Application.get_env(:ptolemy, :loader)) do
case GenServer.start_link(__MODULE__, config) do
{:ok, pid} = result ->
GenServer.call(pid, :startup, 16000)
result
result ->
result
end
end
@doc """
Initializes the process's state.
This process is a special case where the state will already be built in the same process as the supervisor to
intentionally delay other processes from starting when loading configuration.
"""
@impl true
def init(args) do
{:ok, args}
end
@doc """
Retrieves the configuration of the loader.
"""
@spec config(pid) :: keyword
def config(pid) do
GenServer.call(pid, :config)
end
@doc """
Invokes a provider with a query and sets the result to the mapped application environment target.
"""
@spec load(config_target, provider_spec) :: :ok
def load(config_target, provider_spec)
def load({app, [env_key]}, provider_spec) when is_atom(env_key),
do: load({app, env_key}, provider_spec)
def load({app, env_key}, {provider, provider_arg}) when is_atom(env_key) do
Application.put_env(
app,
env_key,
apply(provider, @load_callback_name, [self(), provider_arg])
)
end
def load({app, [top_key | nested_keys] = keys}, {provider, provider_arg})
when is_list(keys) do
case Application.get_env(app, top_key) do
nil ->
raise(
"No configuration structure to update! Please provide dummy configurations for all loaded configurations."
)
config ->
Application.put_env(
app,
top_key,
update_in(config, nested_keys, fn _ ->
apply(provider, @load_callback_name, [self(), provider_arg])
end)
)
end
end
####### impl
@impl true
def handle_call(:startup, _from, config) do
started_providers =
config
|> Keyword.get(:env, [])
|> Enum.reduce([], fn
{target, {provider, _} = provider_spec}, started ->
unless provider in started do
apply(provider, :init, [self()])
end
load(target, provider_spec)
[provider | started]
end)
|> Enum.uniq()
{:reply, :ok, config |> Keyword.put(:started, started_providers)}
end
@impl true
def handle_call(:config, _from, config) do
{:reply, config, config}
end
@impl true
def handle_info({:expired, {module, module_args}}, config) do
config
|> Keyword.get(:env)
|> Enum.find(fn
{_, {^module, ^module_args}} ->
true
_ ->
false
end)
|> case do
{target, provider} ->
load(target, provider)
nil ->
# TODO: Maybe log that a token expired with no env target?
nil
end
{:noreply, config}
end
end
|
lib/loader.ex
| 0.848737
| 0.836755
|
loader.ex
|
starcoder
|
defmodule PassiveSupport.Math do
@moduledoc """
Mathematical functions.
I'm not great at math.
"""
@doc """
Returns 1 if the number is positive, -1 if negative, and 0 if 0
"""
@spec sign(number) :: -1 | 0 | 1
def sign(0), do: 0
def sign(number) when is_number(number), do:
number / abs(number)
defdelegate acos(x), to: :math
defdelegate acosh(x), to: :math
defdelegate asin(x), to: :math
defdelegate asinh(x), to: :math
defdelegate atan(x), to: :math
defdelegate atan2(x, y), to: :math
defdelegate atanh(x), to: :math
defdelegate cos(x), to: :math
defdelegate cosh(x), to: :math
defdelegate erf(x), to: :math
defdelegate erfc(x), to: :math
defdelegate exp(x), to: :math
defdelegate fmod(x, y), to: :math
defdelegate log(x), to: :math
defdelegate log10(x), to: :math
defdelegate log2(x), to: :math
defdelegate pi, to: :math
defdelegate sin(x), to: :math
defdelegate sinh(x), to: :math
defdelegate sqrt(x), to: :math
defdelegate tan(x), to: :math
defdelegate tanh(x), to: :math
defdelegate floor(x), to: Kernel
defdelegate ceil(x), to: Kernel
defdelegate fact(x), to: PassiveSupport.Integer, as: :factorial
@doc """
Raises `base` to the power of `exponent`.
Since `:math.pow/2` always does floating point arithmetic, whereas
`PassiveSupport.Integer.exponential/2` will do arbitrary precision arithmetic
if the `exponent` is a positive integer, `Math.pow/2` checks the types of
`base` and `exponent` and delegates out to the appropriate function based on
what it finds.
## Examples
iex> pow(2, 5)
32
iex> pow(2, 5.0)
32.0
iex> pow(2, 200)
1606938044258990275541962092341162602522202993782792835301376
iex> pow(2.0, 2000)
** (ArithmeticError)
"""
@spec pow(number, number) :: number
def pow(base, exponent) when is_integer(base) and is_integer(exponent),
do: PassiveSupport.Integer.exponential(base, exponent)
def pow(base, exponent), do: :math.pow(base, exponent)
end
|
lib/passive_support/ext/math.ex
| 0.827863
| 0.749615
|
math.ex
|
starcoder
|
defmodule OMG.API.Monitor do
@moduledoc """
This module is a custom implemented supervisor that monitors all it's chilldren
and restarts them based on alarms raised. This means that in the period when Geth alarms are raised
it would wait before it would restart them.
When you receive an EXIT, check for an alarm raised that's related to Ethereum client synhronisation or connection
problems and react accordingly.
Children that need Ethereum client connectivity are OMG.API.EthereumEventListener
OMG.API.BlockQueue.Server and OMG.API.RootChainCoordinator. For these children, we make
additional checks if they exit. If there's an alarm raised of type :ethereum_client_connection we postpone
the restart util the alarm is cleared. Other children are restarted immediately.
"""
use GenServer
require Logger
alias OMG.API.Alert.Alarm
# needs to be less then checks from RootChainCoordinator
@default_interval 300
@type t :: %__MODULE__{
pid: pid(),
spec: {module(), term()} | map(),
tref: :timer.tref() | nil
}
defstruct pid: nil, spec: nil, tref: nil
def start_link(children_specs) do
GenServer.start_link(__MODULE__, children_specs, name: __MODULE__)
end
def init(children_specs) do
Process.flag(:trap_exit, true)
children = Enum.map(children_specs, &start_child(&1))
{:ok, children}
end
def handle_info({:delayed_restart, child}, state) do
# child still holds the old pid
from = child.pid
with false <- is_raised?(),
{%__MODULE__{pid: ^from, tref: tref} = child, other_children} <- find_child_from_dead_pid(from, state) do
{:ok, :cancel} = :timer.cancel(tref)
new_child = start_child(child.spec)
{:noreply, [new_child | other_children]}
else
_ ->
# alarm is still raised, or the child was already cleared from state in a previous timer
{:noreply, state}
end
end
# we got an exit signal from a linked child, we have to act as a supervisor now and decide what to do
# we try to find the child via his old pid that we kept in the state, retrieve his exit reason and specification for
# starting the child
def handle_info({:EXIT, from, _reason}, state) do
{%__MODULE__{pid: ^from} = child, other_children} = find_child_from_dead_pid(from, state)
new_child = restart_or_delay(child)
{:noreply, [new_child | other_children]}
end
# We try to find the child specs from the pid that was started.
# The child will be updated so we return also the new child list without that child.
@spec find_child_from_dead_pid(pid(), list(t)) :: {t, list(t)} | {nil, list(t)}
defp find_child_from_dead_pid(pid, state) do
item = Enum.find(state, &(&1.pid == pid))
{item, state -- [item]}
end
### Figure out, if the client is unavailable. If it is, we'll postpone the
### restart until the alarm clears. Other processes can be restarted immediately.
defp restart_or_delay(child) do
case is_raised?() do
true ->
{:ok, tref} = :timer.send_interval(@default_interval, {:delayed_restart, child})
%__MODULE__{child | tref: tref}
_ ->
start_child(child.spec)
end
end
defp start_child({child_module, args} = spec) do
{:ok, pid} = child_module.start_link(args)
%__MODULE__{pid: pid, spec: spec}
end
defp start_child(%{id: _name, start: {child_module, function, args}} = spec) do
{:ok, pid} = apply(child_module, function, args)
%__MODULE__{pid: pid, spec: spec}
end
defp is_raised?() do
alarms = Alarm.all()
alarms
|> Enum.find(fn x -> match?(%{id: :ethereum_client_connection}, x) end)
|> is_map()
end
end
|
apps/omg_api/lib/monitor.ex
| 0.608361
| 0.433082
|
monitor.ex
|
starcoder
|
defmodule AWS.Cloudsearchdomain do
@moduledoc """
You use the AmazonCloudSearch2013 API to upload documents to a search domain and
search those documents.
The endpoints for submitting `UploadDocuments`, `Search`, and `Suggest` requests
are domain-specific. To get the endpoints for your domain, use the Amazon
CloudSearch configuration service `DescribeDomains` action. The domain endpoints
are also displayed on the domain dashboard in the Amazon CloudSearch console.
You submit suggest requests to the search endpoint.
For more information, see the [Amazon CloudSearch Developer Guide](http://docs.aws.amazon.com/cloudsearch/latest/developerguide).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2013-01-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "cloudsearchdomain",
global?: false,
protocol: "rest-json",
service_id: nil,
signature_version: "v4",
signing_name: "cloudsearch",
target_prefix: nil
}
end
@doc """
Retrieves a list of documents that match the specified search criteria.
How you specify the search criteria depends on which query parser you use.
Amazon CloudSearch supports four query parsers:
* `simple`: search all `text` and `text-array` fields for the
specified string. Search for phrases, individual terms, and prefixes.
* `structured`: search specific fields, construct compound queries
using Boolean operators, and use advanced features such as term boosting and
proximity searching.
* `lucene`: specify search criteria using the Apache Lucene query
parser syntax.
* `dismax`: specify search criteria using the simplified subset of
the Apache Lucene query parser syntax defined by the DisMax query parser.
For more information, see [Searching Your Data](http://docs.aws.amazon.com/cloudsearch/latest/developerguide/searching.html)
in the *Amazon CloudSearch Developer Guide*.
The endpoint for submitting `Search` requests is domain-specific. You submit
search requests to a domain's search endpoint. To get the search endpoint for
your domain, use the Amazon CloudSearch configuration service `DescribeDomains`
action. A domain's endpoints are also displayed on the domain dashboard in the
Amazon CloudSearch console.
"""
def search(
%Client{} = client,
cursor \\ nil,
expr \\ nil,
facet \\ nil,
filter_query \\ nil,
highlight \\ nil,
partial \\ nil,
query,
query_options \\ nil,
query_parser \\ nil,
return \\ nil,
size \\ nil,
sort \\ nil,
start \\ nil,
stats \\ nil,
options \\ []
) do
url_path = "/2013-01-01/search?format=sdk&pretty=true"
headers = []
query_params = []
query_params =
if !is_nil(stats) do
[{"stats", stats} | query_params]
else
query_params
end
query_params =
if !is_nil(start) do
[{"start", start} | query_params]
else
query_params
end
query_params =
if !is_nil(sort) do
[{"sort", sort} | query_params]
else
query_params
end
query_params =
if !is_nil(size) do
[{"size", size} | query_params]
else
query_params
end
query_params =
if !is_nil(return) do
[{"return", return} | query_params]
else
query_params
end
query_params =
if !is_nil(query_parser) do
[{"q.parser", query_parser} | query_params]
else
query_params
end
query_params =
if !is_nil(query_options) do
[{"q.options", query_options} | query_params]
else
query_params
end
query_params =
if !is_nil(query) do
[{"q", query} | query_params]
else
query_params
end
query_params =
if !is_nil(partial) do
[{"partial", partial} | query_params]
else
query_params
end
query_params =
if !is_nil(highlight) do
[{"highlight", highlight} | query_params]
else
query_params
end
query_params =
if !is_nil(filter_query) do
[{"fq", filter_query} | query_params]
else
query_params
end
query_params =
if !is_nil(facet) do
[{"facet", facet} | query_params]
else
query_params
end
query_params =
if !is_nil(expr) do
[{"expr", expr} | query_params]
else
query_params
end
query_params =
if !is_nil(cursor) do
[{"cursor", cursor} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Retrieves autocomplete suggestions for a partial query string.
You can use suggestions enable you to display likely matches before users finish
typing. In Amazon CloudSearch, suggestions are based on the contents of a
particular text field. When you request suggestions, Amazon CloudSearch finds
all of the documents whose values in the suggester field start with the
specified query string. The beginning of the field must match the query string
to be considered a match.
For more information about configuring suggesters and retrieving suggestions,
see [Getting Suggestions](http://docs.aws.amazon.com/cloudsearch/latest/developerguide/getting-suggestions.html)
in the *Amazon CloudSearch Developer Guide*.
The endpoint for submitting `Suggest` requests is domain-specific. You submit
suggest requests to a domain's search endpoint. To get the search endpoint for
your domain, use the Amazon CloudSearch configuration service `DescribeDomains`
action. A domain's endpoints are also displayed on the domain dashboard in the
Amazon CloudSearch console.
"""
def suggest(%Client{} = client, query, size \\ nil, suggester, options \\ []) do
url_path = "/2013-01-01/suggest?format=sdk&pretty=true"
headers = []
query_params = []
query_params =
if !is_nil(suggester) do
[{"suggester", suggester} | query_params]
else
query_params
end
query_params =
if !is_nil(size) do
[{"size", size} | query_params]
else
query_params
end
query_params =
if !is_nil(query) do
[{"q", query} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Posts a batch of documents to a search domain for indexing.
A document batch is a collection of add and delete operations that represent the
documents you want to add, update, or delete from your domain. Batches can be
described in either JSON or XML. Each item that you want Amazon CloudSearch to
return as a search result (such as a product) is represented as a document.
Every document has a unique ID and one or more fields that contain the data that
you want to search and return in results. Individual documents cannot contain
more than 1 MB of data. The entire batch cannot exceed 5 MB. To get the best
possible upload performance, group add and delete operations in batches that are
close the 5 MB limit. Submitting a large volume of single-document batches can
overload a domain's document service.
The endpoint for submitting `UploadDocuments` requests is domain-specific. To
get the document endpoint for your domain, use the Amazon CloudSearch
configuration service `DescribeDomains` action. A domain's endpoints are also
displayed on the domain dashboard in the Amazon CloudSearch console.
For more information about formatting your data for Amazon CloudSearch, see
[Preparing Your Data](http://docs.aws.amazon.com/cloudsearch/latest/developerguide/preparing-data.html)
in the *Amazon CloudSearch Developer Guide*. For more information about
uploading data for indexing, see [Uploading Data](http://docs.aws.amazon.com/cloudsearch/latest/developerguide/uploading-data.html)
in the *Amazon CloudSearch Developer Guide*.
"""
def upload_documents(%Client{} = client, input, options \\ []) do
url_path = "/2013-01-01/documents/batch?format=sdk"
{headers, input} =
[
{"contentType", "Content-Type"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
|
lib/aws/generated/cloudsearchdomain.ex
| 0.887205
| 0.457924
|
cloudsearchdomain.ex
|
starcoder
|
defmodule RDF.Dataset do
@moduledoc """
A set of `RDF.Graph`s.
It may have multiple named graphs and at most one unnamed ("default") graph.
`RDF.Dataset` implements:
- Elixir's `Access` behaviour
- Elixir's `Enumerable` protocol
- Elixir's `Inspect` protocol
- the `RDF.Data` protocol
"""
@behaviour Access
alias RDF.{Description, Graph, IRI, Statement}
import RDF.Statement
@type graph_name :: IRI.t | nil
@type t :: %__MODULE__{
name: graph_name,
graphs: %{graph_name => Graph.t}
}
@type input :: Graph.input | t
@type update_graph_fun :: (Graph.t -> {Graph.t, input} | :pop)
defstruct name: nil, graphs: %{}
@doc """
Creates an empty unnamed `RDF.Dataset`.
"""
@spec new :: t
def new, do: %RDF.Dataset{}
@doc """
Creates an `RDF.Dataset`.
If a keyword list is given an empty dataset is created.
Otherwise an unnamed dataset initialized with the given data is created.
See `new/2` for available arguments and the different ways to provide data.
## Examples
RDF.Graph.new({EX.S, EX.p, EX.O})
RDF.Graph.new(name: EX.GraphName)
"""
@spec new(input | [input] | keyword) :: t
def new(data_or_options)
def new(data_or_options)
when is_list(data_or_options) and length(data_or_options) != 0 do
if Keyword.keyword?(data_or_options) do
new([], data_or_options)
else
new(data_or_options, [])
end
end
def new(data), do: new(data, [])
@doc """
Creates an `RDF.Dataset` initialized with data.
The initial RDF triples can be provided
- as a single statement tuple
- an `RDF.Description`
- an `RDF.Graph`
- an `RDF.Dataset`
- or a list with any combination of the former
Available options:
- `name`: the name of the dataset to be created
"""
@spec new(input | [input], keyword) :: t
def new(data, options)
def new(%RDF.Dataset{} = graph, options) do
%RDF.Dataset{graph | name: options |> Keyword.get(:name) |> coerce_graph_name()}
end
def new(data, options) do
%RDF.Dataset{}
|> new(options)
|> add(data)
end
@doc """
Adds triples and quads to a `RDF.Dataset`.
The optional third `graph_context` argument allows to set a different
destination graph to which the statements are added, ignoring the graph context
of given quads or the name of given graphs.
"""
@spec add(t, input | [input], boolean | nil) :: t
def add(dataset, statements, graph_context \\ false)
def add(dataset, statements, graph_context) when is_list(statements) do
with graph_context = graph_context && coerce_graph_name(graph_context) do
Enum.reduce statements, dataset, fn (statement, dataset) ->
add(dataset, statement, graph_context)
end
end
end
def add(dataset, {subject, predicate, objects}, false),
do: add(dataset, {subject, predicate, objects, nil})
def add(dataset, {subject, predicate, objects}, graph_context),
do: add(dataset, {subject, predicate, objects, graph_context})
def add(%RDF.Dataset{name: name, graphs: graphs},
{subject, predicate, objects, graph_context}, false) do
with graph_context = coerce_graph_name(graph_context) do
updated_graphs =
Map.update(graphs, graph_context,
Graph.new({subject, predicate, objects}, name: graph_context),
fn graph -> Graph.add(graph, {subject, predicate, objects}) end)
%RDF.Dataset{name: name, graphs: updated_graphs}
end
end
def add(%RDF.Dataset{} = dataset, {subject, predicate, objects, _}, graph_context),
do: add(dataset, {subject, predicate, objects, graph_context}, false)
def add(%RDF.Dataset{} = dataset, %Description{} = description, false),
do: add(dataset, description, nil)
def add(%RDF.Dataset{name: name, graphs: graphs},
%Description{} = description, graph_context) do
with graph_context = coerce_graph_name(graph_context) do
updated_graph =
Map.get(graphs, graph_context, Graph.new(name: graph_context))
|> Graph.add(description)
%RDF.Dataset{
name: name,
graphs: Map.put(graphs, graph_context, updated_graph)
}
end
end
def add(%RDF.Dataset{name: name, graphs: graphs}, %Graph{} = graph, false) do
%RDF.Dataset{name: name,
graphs:
Map.update(graphs, graph.name, graph, fn current ->
Graph.add(current, graph)
end)
}
end
def add(%RDF.Dataset{} = dataset, %Graph{} = graph, graph_context),
do: add(dataset, %Graph{graph | name: coerce_graph_name(graph_context)}, false)
def add(%RDF.Dataset{} = dataset, %RDF.Dataset{} = other_dataset, graph_context) do
with graph_context = graph_context && coerce_graph_name(graph_context) do
Enum.reduce graphs(other_dataset), dataset, fn (graph, dataset) ->
add(dataset, graph, graph_context)
end
end
end
@doc """
Adds statements to a `RDF.Dataset` and overwrites all existing statements with the same subjects and predicates in the specified graph context.
## Examples
iex> dataset = RDF.Dataset.new({EX.S, EX.P1, EX.O1})
...> RDF.Dataset.put(dataset, {EX.S, EX.P1, EX.O2})
RDF.Dataset.new({EX.S, EX.P1, EX.O2})
iex> RDF.Dataset.put(dataset, {EX.S, EX.P2, EX.O2})
RDF.Dataset.new([{EX.S, EX.P1, EX.O1}, {EX.S, EX.P2, EX.O2}])
iex> RDF.Dataset.new([{EX.S1, EX.P1, EX.O1}, {EX.S2, EX.P2, EX.O2}]) |>
...> RDF.Dataset.put([{EX.S1, EX.P2, EX.O3}, {EX.S2, EX.P2, EX.O3}])
RDF.Dataset.new([{EX.S1, EX.P1, EX.O1}, {EX.S1, EX.P2, EX.O3}, {EX.S2, EX.P2, EX.O3}])
"""
@spec put(t, input | [input], Statement.coercible_graph_name | boolean | nil) :: t
def put(dataset, statements, graph_context \\ false)
def put(%RDF.Dataset{} = dataset, {subject, predicate, objects}, false),
do: put(dataset, {subject, predicate, objects, nil})
def put(%RDF.Dataset{} = dataset, {subject, predicate, objects}, graph_context),
do: put(dataset, {subject, predicate, objects, graph_context})
def put(%RDF.Dataset{name: name, graphs: graphs},
{subject, predicate, objects, graph_context}, false) do
with graph_context = coerce_graph_name(graph_context) do
new_graph =
case graphs[graph_context] do
graph = %Graph{} ->
Graph.put(graph, {subject, predicate, objects})
nil ->
Graph.new({subject, predicate, objects}, name: graph_context)
end
%RDF.Dataset{name: name,
graphs: Map.put(graphs, graph_context, new_graph)}
end
end
def put(%RDF.Dataset{} = dataset, {subject, predicate, objects, _}, graph_context),
do: put(dataset, {subject, predicate, objects, graph_context}, false)
def put(%RDF.Dataset{} = dataset, statements, false) when is_list(statements) do
do_put dataset, Enum.group_by(statements,
fn
{s, _, _} -> {s, nil}
{s, _, _, nil} -> {s, nil}
{s, _, _, c} -> {s, coerce_graph_name(c)}
end,
fn
{_, p, o, _} -> {p, o}
{_, p, o} -> {p, o}
end)
end
def put(%RDF.Dataset{} = dataset, statements, graph_context) when is_list(statements) do
with graph_context = coerce_graph_name(graph_context) do
do_put dataset, Enum.group_by(statements,
fn
{s, _, _, _} -> {s, graph_context}
{s, _, _} -> {s, graph_context}
end,
fn
{_, p, o, _} -> {p, o}
{_, p, o} -> {p, o}
end)
end
end
def put(%RDF.Dataset{} = dataset, %Description{} = description, false),
do: put(dataset, description, nil)
def put(%RDF.Dataset{name: name, graphs: graphs},
%Description{} = description, graph_context) do
with graph_context = coerce_graph_name(graph_context) do
updated_graph =
Map.get(graphs, graph_context, Graph.new(name: graph_context))
|> Graph.put(description)
%RDF.Dataset{
name: name,
graphs: Map.put(graphs, graph_context, updated_graph)
}
end
end
def put(%RDF.Dataset{name: name, graphs: graphs}, %Graph{} = graph, false) do
%RDF.Dataset{name: name,
graphs:
Map.update(graphs, graph.name, graph, fn current ->
Graph.put(current, graph)
end)
}
end
def put(%RDF.Dataset{} = dataset, %Graph{} = graph, graph_context),
do: put(dataset, %Graph{graph | name: coerce_graph_name(graph_context)}, false)
def put(%RDF.Dataset{} = dataset, %RDF.Dataset{} = other_dataset, graph_context) do
with graph_context = graph_context && coerce_graph_name(graph_context) do
Enum.reduce graphs(other_dataset), dataset, fn (graph, dataset) ->
put(dataset, graph, graph_context)
end
end
end
defp do_put(%RDF.Dataset{} = dataset, statements) when is_map(statements) do
Enum.reduce statements, dataset,
fn ({subject_with_context, predications}, dataset) ->
do_put(dataset, subject_with_context, predications)
end
end
defp do_put(%RDF.Dataset{name: name, graphs: graphs},
{subject, graph_context}, predications)
when is_list(predications) do
with graph_context = coerce_graph_name(graph_context) do
graph = Map.get(graphs, graph_context, Graph.new(name: graph_context))
new_graphs = graphs
|> Map.put(graph_context, Graph.put(graph, subject, predications))
%RDF.Dataset{name: name, graphs: new_graphs}
end
end
@doc """
Deletes statements from a `RDF.Dataset`.
The optional third `graph_context` argument allows to set a different
destination graph from which the statements are deleted, ignoring the graph
context of given quads or the name of given graphs.
Note: When the statements to be deleted are given as another `RDF.Dataset`,
the dataset name must not match dataset name of the dataset from which the statements
are deleted. If you want to delete only datasets with matching names, you can
use `RDF.Data.delete/2`.
"""
@spec delete(t, input | [input], Statement.coercible_graph_name | boolean | nil) :: t
def delete(dataset, statements, graph_context \\ false)
def delete(%RDF.Dataset{} = dataset, statements, graph_context) when is_list(statements) do
with graph_context = graph_context && coerce_graph_name(graph_context) do
Enum.reduce statements, dataset, fn (statement, dataset) ->
delete(dataset, statement, graph_context)
end
end
end
def delete(%RDF.Dataset{} = dataset, {_, _, _} = statement, false),
do: do_delete(dataset, nil, statement)
def delete(%RDF.Dataset{} = dataset, {_, _, _} = statement, graph_context),
do: do_delete(dataset, graph_context, statement)
def delete(%RDF.Dataset{} = dataset, {subject, predicate, objects, graph_context}, false),
do: do_delete(dataset, graph_context, {subject, predicate, objects})
def delete(%RDF.Dataset{} = dataset, {subject, predicate, objects, _}, graph_context),
do: do_delete(dataset, graph_context, {subject, predicate, objects})
def delete(%RDF.Dataset{} = dataset, %Description{} = description, false),
do: do_delete(dataset, nil, description)
def delete(%RDF.Dataset{} = dataset, %Description{} = description, graph_context),
do: do_delete(dataset, graph_context, description)
def delete(%RDF.Dataset{} = dataset, %RDF.Graph{name: name} = graph, false),
do: do_delete(dataset, name, graph)
def delete(%RDF.Dataset{} = dataset, %RDF.Graph{} = graph, graph_context),
do: do_delete(dataset, graph_context, graph)
def delete(%RDF.Dataset{} = dataset, %RDF.Dataset{graphs: graphs}, graph_context) do
Enum.reduce graphs, dataset, fn ({_, graph}, dataset) ->
delete(dataset, graph, graph_context)
end
end
defp do_delete(%RDF.Dataset{name: name, graphs: graphs} = dataset,
graph_context, statements) do
with graph_context = coerce_graph_name(graph_context),
graph when not is_nil(graph) <- graphs[graph_context],
new_graph = Graph.delete(graph, statements)
do
%RDF.Dataset{name: name,
graphs:
if Enum.empty?(new_graph) do
Map.delete(graphs, graph_context)
else
Map.put(graphs, graph_context, new_graph)
end
}
else
nil -> dataset
end
end
@doc """
Deletes the given graph.
"""
@spec delete_graph(t, Statement.graph_name | [Statement.graph_name] | nil) :: t
def delete_graph(graph, graph_names)
def delete_graph(%RDF.Dataset{} = dataset, graph_names) when is_list(graph_names) do
Enum.reduce graph_names, dataset, fn (graph_name, dataset) ->
delete_graph(dataset, graph_name)
end
end
def delete_graph(%RDF.Dataset{name: name, graphs: graphs}, graph_name) do
with graph_name = coerce_graph_name(graph_name) do
%RDF.Dataset{name: name, graphs: Map.delete(graphs, graph_name)}
end
end
@doc """
Deletes the default graph.
"""
@spec delete_default_graph(t) :: t
def delete_default_graph(%RDF.Dataset{} = graph),
do: delete_graph(graph, nil)
@doc """
Fetches the `RDF.Graph` with the given name.
When a graph with the given name can not be found can not be found `:error` is returned.
## Examples
iex> dataset = RDF.Dataset.new([{EX.S1, EX.P1, EX.O1, EX.Graph}, {EX.S2, EX.P2, EX.O2}])
...> RDF.Dataset.fetch(dataset, EX.Graph)
{:ok, RDF.Graph.new({EX.S1, EX.P1, EX.O1}, name: EX.Graph)}
iex> RDF.Dataset.fetch(dataset, nil)
{:ok, RDF.Graph.new({EX.S2, EX.P2, EX.O2})}
iex> RDF.Dataset.fetch(dataset, EX.Foo)
:error
"""
@impl Access
@spec fetch(t, Statement.graph_name | nil) :: {:ok, Graph.t} | :error
def fetch(%RDF.Dataset{graphs: graphs}, graph_name) do
Access.fetch(graphs, coerce_graph_name(graph_name))
end
@doc """
Fetches the `RDF.Graph` with the given name.
When a graph with the given name can not be found can not be found the optionally
given default value or `nil` is returned
## Examples
iex> dataset = RDF.Dataset.new([{EX.S1, EX.P1, EX.O1, EX.Graph}, {EX.S2, EX.P2, EX.O2}])
...> RDF.Dataset.get(dataset, EX.Graph)
RDF.Graph.new({EX.S1, EX.P1, EX.O1}, name: EX.Graph)
iex> RDF.Dataset.get(dataset, nil)
RDF.Graph.new({EX.S2, EX.P2, EX.O2})
iex> RDF.Dataset.get(dataset, EX.Foo)
nil
iex> RDF.Dataset.get(dataset, EX.Foo, :bar)
:bar
"""
@spec get(t, Statement.graph_name | nil, Graph.t | nil) :: Graph.t | nil
def get(%RDF.Dataset{} = dataset, graph_name, default \\ nil) do
case fetch(dataset, graph_name) do
{:ok, value} -> value
:error -> default
end
end
@doc """
The graph with given name.
"""
@spec graph(t, Statement.graph_name | nil) :: Graph.t
def graph(%RDF.Dataset{graphs: graphs}, graph_name),
do: Map.get(graphs, coerce_graph_name(graph_name))
@doc """
The default graph of a `RDF.Dataset`.
"""
@spec default_graph(t) :: Graph.t
def default_graph(%RDF.Dataset{graphs: graphs}),
do: Map.get(graphs, nil, Graph.new)
@doc """
The set of all graphs.
"""
@spec graphs(t) :: [Graph.t]
def graphs(%RDF.Dataset{graphs: graphs}), do: Map.values(graphs)
@doc """
Gets and updates the graph with the given name, in a single pass.
Invokes the passed function on the `RDF.Graph` with the given name;
this function should return either `{graph_to_return, new_graph}` or `:pop`.
If the passed function returns `{graph_to_return, new_graph}`, the
return value of `get_and_update` is `{graph_to_return, new_dataset}` where
`new_dataset` is the input `Dataset` updated with `new_graph` for
the given name.
If the passed function returns `:pop` the graph with the given name is
removed and a `{removed_graph, new_dataset}` tuple gets returned.
## Examples
iex> dataset = RDF.Dataset.new({EX.S, EX.P, EX.O, EX.Graph})
...> RDF.Dataset.get_and_update(dataset, EX.Graph, fn current_graph ->
...> {current_graph, {EX.S, EX.P, EX.NEW}}
...> end)
{RDF.Graph.new({EX.S, EX.P, EX.O}, name: EX.Graph), RDF.Dataset.new({EX.S, EX.P, EX.NEW, EX.Graph})}
"""
@impl Access
@spec get_and_update(t, Statement.graph_name | nil, update_graph_fun) :: {Graph.t, input}
def get_and_update(%RDF.Dataset{} = dataset, graph_name, fun) do
with graph_context = coerce_graph_name(graph_name) do
case fun.(get(dataset, graph_context)) do
{old_graph, new_graph} ->
{old_graph, put(dataset, new_graph, graph_context)}
:pop ->
pop(dataset, graph_context)
other ->
raise "the given function must return a two-element tuple or :pop, got: #{inspect(other)}"
end
end
end
@doc """
Pops an arbitrary statement from a `RDF.Dataset`.
"""
@spec pop(t) :: {Statement.t | nil, t}
def pop(dataset)
def pop(%RDF.Dataset{graphs: graphs} = dataset)
when graphs == %{}, do: {nil, dataset}
def pop(%RDF.Dataset{name: name, graphs: graphs}) do
# TODO: Find a faster way ...
[{graph_name, graph}] = Enum.take(graphs, 1)
{{s, p, o}, popped_graph} = Graph.pop(graph)
popped = if Enum.empty?(popped_graph),
do: graphs |> Map.delete(graph_name),
else: graphs |> Map.put(graph_name, popped_graph)
{{s, p, o, graph_name}, %RDF.Dataset{name: name, graphs: popped}}
end
@doc """
Pops the graph with the given name.
When a graph with given name can not be found the optionally given default value
or `nil` is returned.
## Examples
iex> dataset = RDF.Dataset.new([
...> {EX.S1, EX.P1, EX.O1, EX.Graph},
...> {EX.S2, EX.P2, EX.O2}])
...> RDF.Dataset.pop(dataset, EX.Graph)
{RDF.Graph.new({EX.S1, EX.P1, EX.O1}, name: EX.Graph), RDF.Dataset.new({EX.S2, EX.P2, EX.O2})}
iex> RDF.Dataset.pop(dataset, EX.Foo)
{nil, dataset}
"""
@impl Access
@spec pop(t, Statement.coercible_graph_name) :: {Statement.t | nil, t}
def pop(%RDF.Dataset{name: name, graphs: graphs} = dataset, graph_name) do
case Access.pop(graphs, coerce_graph_name(graph_name)) do
{nil, _} ->
{nil, dataset}
{graph, new_graphs} ->
{graph, %RDF.Dataset{name: name, graphs: new_graphs}}
end
end
@doc """
The number of statements within a `RDF.Dataset`.
## Examples
iex> RDF.Dataset.new([
...> {EX.S1, EX.p1, EX.O1, EX.Graph},
...> {EX.S2, EX.p2, EX.O2},
...> {EX.S1, EX.p2, EX.O3}]) |>
...> RDF.Dataset.statement_count
3
"""
@spec statement_count(t) :: non_neg_integer
def statement_count(%RDF.Dataset{graphs: graphs}) do
Enum.reduce graphs, 0, fn ({_, graph}, count) ->
count + Graph.triple_count(graph)
end
end
@doc """
The set of all subjects used in the statement within all graphs of a `RDF.Dataset`.
## Examples
iex> RDF.Dataset.new([
...> {EX.S1, EX.p1, EX.O1, EX.Graph},
...> {EX.S2, EX.p2, EX.O2},
...> {EX.S1, EX.p2, EX.O3}]) |>
...> RDF.Dataset.subjects
MapSet.new([RDF.iri(EX.S1), RDF.iri(EX.S2)])
"""
def subjects(%RDF.Dataset{graphs: graphs}) do
Enum.reduce graphs, MapSet.new, fn ({_, graph}, subjects) ->
MapSet.union(subjects, Graph.subjects(graph))
end
end
@doc """
The set of all properties used in the predicates within all graphs of a `RDF.Dataset`.
## Examples
iex> RDF.Dataset.new([
...> {EX.S1, EX.p1, EX.O1, EX.Graph},
...> {EX.S2, EX.p2, EX.O2},
...> {EX.S1, EX.p2, EX.O3}]) |>
...> RDF.Dataset.predicates
MapSet.new([EX.p1, EX.p2])
"""
def predicates(%RDF.Dataset{graphs: graphs}) do
Enum.reduce graphs, MapSet.new, fn ({_, graph}, predicates) ->
MapSet.union(predicates, Graph.predicates(graph))
end
end
@doc """
The set of all resources used in the objects within a `RDF.Dataset`.
Note: This function does collect only IRIs and BlankNodes, not Literals.
## Examples
iex> RDF.Dataset.new([
...> {EX.S1, EX.p1, EX.O1, EX.Graph},
...> {EX.S2, EX.p2, EX.O2, EX.Graph},
...> {EX.S3, EX.p1, EX.O2},
...> {EX.S4, EX.p2, RDF.bnode(:bnode)},
...> {EX.S5, EX.p3, "foo"}
...> ]) |> RDF.Dataset.objects
MapSet.new([RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode)])
"""
def objects(%RDF.Dataset{graphs: graphs}) do
Enum.reduce graphs, MapSet.new, fn ({_, graph}, objects) ->
MapSet.union(objects, Graph.objects(graph))
end
end
@doc """
The set of all resources used within a `RDF.Dataset`.
## Examples
iex> RDF.Dataset.new([
...> {EX.S1, EX.p1, EX.O1, EX.Graph},
...> {EX.S2, EX.p1, EX.O2, EX.Graph},
...> {EX.S2, EX.p2, RDF.bnode(:bnode)},
...> {EX.S3, EX.p1, "foo"}
...> ]) |> RDF.Dataset.resources
MapSet.new([RDF.iri(EX.S1), RDF.iri(EX.S2), RDF.iri(EX.S3),
RDF.iri(EX.O1), RDF.iri(EX.O2), RDF.bnode(:bnode), EX.p1, EX.p2])
"""
def resources(%RDF.Dataset{graphs: graphs}) do
Enum.reduce graphs, MapSet.new, fn ({_, graph}, resources) ->
MapSet.union(resources, Graph.resources(graph))
end
end
@doc """
All statements within all graphs of a `RDF.Dataset`.
## Examples
iex> RDF.Dataset.new([
...> {EX.S1, EX.p1, EX.O1, EX.Graph},
...> {EX.S2, EX.p2, EX.O2},
...> {EX.S1, EX.p2, EX.O3}]) |>
...> RDF.Dataset.statements
[{RDF.iri(EX.S1), RDF.iri(EX.p1), RDF.iri(EX.O1), RDF.iri(EX.Graph)},
{RDF.iri(EX.S1), RDF.iri(EX.p2), RDF.iri(EX.O3)},
{RDF.iri(EX.S2), RDF.iri(EX.p2), RDF.iri(EX.O2)}]
"""
@spec statements(t) :: [Statement.t]
def statements(%RDF.Dataset{graphs: graphs}) do
Enum.reduce graphs, [], fn ({_, graph}, all_statements) ->
statements = Graph.triples(graph)
if graph.name do
Enum.map statements, fn {s, p, o} -> {s, p, o, graph.name} end
else
statements
end ++ all_statements
end
end
@doc """
Returns if a given statement is in a `RDF.Dataset`.
## Examples
iex> dataset = RDF.Dataset.new([
...> {EX.S1, EX.p1, EX.O1, EX.Graph},
...> {EX.S2, EX.p2, EX.O2},
...> {EX.S1, EX.p2, EX.O3}])
...> RDF.Dataset.include?(dataset, {EX.S1, EX.p1, EX.O1, EX.Graph})
true
"""
@spec include?(t, Statement.t, Statement.coercible_graph_name | nil) :: boolean
def include?(dataset, statement, graph_context \\ nil)
def include?(%RDF.Dataset{graphs: graphs}, triple = {_, _, _}, graph_context) do
with graph_context = coerce_graph_name(graph_context) do
if graph = graphs[graph_context] do
Graph.include?(graph, triple)
else
false
end
end
end
def include?(%RDF.Dataset{} = dataset, {subject, predicate, object, graph_context}, _),
do: include?(dataset, {subject, predicate, object}, graph_context)
@doc """
Checks if a graph of a `RDF.Dataset` contains statements about the given resource.
## Examples
iex> RDF.Dataset.new([{EX.S1, EX.p1, EX.O1}]) |> RDF.Dataset.describes?(EX.S1)
true
iex> RDF.Dataset.new([{EX.S1, EX.p1, EX.O1}]) |> RDF.Dataset.describes?(EX.S2)
false
"""
@spec describes?(t, Statement.t, Statement.coercible_graph_name | nil) :: boolean
def describes?(%RDF.Dataset{graphs: graphs}, subject, graph_context \\ nil) do
with graph_context = coerce_graph_name(graph_context) do
if graph = graphs[graph_context] do
Graph.describes?(graph, subject)
else
false
end
end
end
@doc """
Returns the names of all graphs of a `RDF.Dataset` containing statements about the given subject.
## Examples
iex> dataset = RDF.Dataset.new([
...> {EX.S1, EX.p, EX.O},
...> {EX.S2, EX.p, EX.O},
...> {EX.S1, EX.p, EX.O, EX.Graph1},
...> {EX.S2, EX.p, EX.O, EX.Graph2}])
...> RDF.Dataset.who_describes(dataset, EX.S1)
[nil, RDF.iri(EX.Graph1)]
"""
@spec who_describes(t, Statement.coercible_subject) :: [Graph.t]
def who_describes(%RDF.Dataset{graphs: graphs}, subject) do
with subject = coerce_subject(subject) do
graphs
|> Map.values
|> Stream.filter(&Graph.describes?(&1, subject))
|> Enum.map(&(&1.name))
end
end
@doc """
Returns a nested map of the native Elixir values of a `RDF.Dataset`.
The optional second argument allows to specify a custom mapping with a function
which will receive a tuple `{statement_position, rdf_term}` where
`statement_position` is one of the atoms `:subject`, `:predicate`, `:object`,
or `graph_name` while `rdf_term` is the RDF term to be mapped.
## Examples
iex> [
...> {~I<http://example.com/S>, ~I<http://example.com/p>, ~L"Foo", ~I<http://example.com/Graph>},
...> {~I<http://example.com/S>, ~I<http://example.com/p>, RDF.XSD.integer(42), }
...> ]
...> |> RDF.Dataset.new()
...> |> RDF.Dataset.values()
%{
"http://example.com/Graph" => %{
"http://example.com/S" => %{"http://example.com/p" => ["Foo"]}
},
nil => %{
"http://example.com/S" => %{"http://example.com/p" => [42]}
}
}
iex> [
...> {~I<http://example.com/S>, ~I<http://example.com/p>, ~L"Foo", ~I<http://example.com/Graph>},
...> {~I<http://example.com/S>, ~I<http://example.com/p>, RDF.XSD.integer(42), }
...> ]
...> |> RDF.Dataset.new()
...> |> RDF.Dataset.values(fn
...> {:graph_name, graph_name} ->
...> graph_name
...> {:predicate, predicate} ->
...> predicate
...> |> to_string()
...> |> String.split("/")
...> |> List.last()
...> |> String.to_atom()
...> {_, term} ->
...> RDF.Term.value(term)
...> end)
%{
~I<http://example.com/Graph> => %{
"http://example.com/S" => %{p: ["Foo"]}
},
nil => %{
"http://example.com/S" => %{p: [42]}
}
}
"""
@spec values(t, Statement.term_mapping) :: map
def values(dataset, mapping \\ &RDF.Statement.default_term_mapping/1)
def values(%RDF.Dataset{graphs: graphs}, mapping) do
Map.new graphs, fn {graph_name, graph} ->
{mapping.({:graph_name, graph_name}), Graph.values(graph, mapping)}
end
end
@doc """
Checks if two `RDF.Dataset`s are equal.
Two `RDF.Dataset`s are considered to be equal if they contain the same triples
and have the same name.
"""
@spec equal?(t | any, t | any) :: boolean
def equal?(dataset1, dataset2)
def equal?(%RDF.Dataset{} = dataset1, %RDF.Dataset{} = dataset2) do
clear_metadata(dataset1) == clear_metadata(dataset2)
end
def equal?(_, _), do: false
defp clear_metadata(%RDF.Dataset{graphs: graphs} = dataset) do
%RDF.Dataset{dataset |
graphs:
Map.new(graphs, fn {name, graph} ->
{name, RDF.Graph.clear_metadata(graph)}
end)
}
end
defimpl Enumerable do
def member?(dataset, statement), do: {:ok, RDF.Dataset.include?(dataset, statement)}
def count(dataset), do: {:ok, RDF.Dataset.statement_count(dataset)}
def slice(_dataset), do: {:error, __MODULE__}
def reduce(%RDF.Dataset{graphs: graphs}, {:cont, acc}, _fun)
when map_size(graphs) == 0, do: {:done, acc}
def reduce(%RDF.Dataset{} = dataset, {:cont, acc}, fun) do
{statement, rest} = RDF.Dataset.pop(dataset)
reduce(rest, fun.(statement, acc), fun)
end
def reduce(_, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(dataset = %RDF.Dataset{}, {:suspend, acc}, fun) do
{:suspended, acc, &reduce(dataset, &1, fun)}
end
end
defimpl Collectable do
def into(original) do
collector_fun = fn
dataset, {:cont, list} when is_list(list)
-> RDF.Dataset.add(dataset, List.to_tuple(list))
dataset, {:cont, elem} -> RDF.Dataset.add(dataset, elem)
dataset, :done -> dataset
_dataset, :halt -> :ok
end
{original, collector_fun}
end
end
end
|
lib/rdf/dataset.ex
| 0.875175
| 0.701638
|
dataset.ex
|
starcoder
|
defmodule Yggdrasil.GraphQL do
@moduledoc """
[](https://travis-ci.org/etherharvest/yggdrasil_graphql) [](https://hex.pm/packages/yggdrasil_graphql) [](https://hex.pm/packages/yggdrasil_graphql)
This project is a GraphQL adapter for `Yggdrasil` publisher/subscriber.
## Small example
Let's say we want to have the following GraphQL `subscription`:
```graphql
subscription {
events(channel: "my_channel") {
content
}
}
```
And we have a process in Elixir that, using `Yggdrasil`, generates the
following event:
```
Yggdrasil.publish(
%Yggdrasil.Channel{name: "my_channel"},
%{content: "some message"}
)
```
Using [Absinthe](https://github.com/absinthe-graphql/absinthe), our Schema
would look like this:
```elixir
defmodule MyAppWeb.Schema do
use Absinthe.Schema
object :message do
field :content, :string
end
query do
end
subscription do
field :events, :message do
arg :channel, non_null(:string)
config fn args, %{context: %{pubsub: endpoint}} ->
channel = %Yggdrasil.Channel{name: args.channel}
Yggdrasil.GraphQL.subscribe(endpoint, :events, channel)
end
end
end
end
```
## Phoenix setup
> This is an extract from
> [this guide](https://hexdocs.pm/absinthe/subscriptions.html) modified
> slightly to fit this example.
1. Add the `Absinthe` libraries:
```elixir
{:absinthe, "~> 1.4"},
{:absinthe_phoenix, "~> 1.4"},
```
2. Add the `Phoenix.PubSub` configuration for your endpoint:
```elixir
config :my_app, MyAppWeb.Endpoint,
# ... other config
pubsub: [
name: MyApp.PubSub,
adapter: Phoenix.PubSub.PG2
]
```
3. In your application supervisor, add a line **after** your existing endpoint
supervision line:
```elixir
[
# other children ...
supervisor(MyAppWeb.Endpoint, []), # this line should already exist.
supervisor(Absinthe.Subscription, [MyAppWeb.Endpoint]), # add this line
# other children ...
]
```
Where `MyAppWeb.Endpoint` is the name of your application’s phoenix endpoint.
4. In your `MyAppWeb.Endpoint` module add:
```
use Absinthe.Phoenix.Endpoint
```
5. In your socket add:
- **Phoenix 1.3**
```elixir
use Absinthe.Phoenix.Socket, schema: MyAppWeb.Schema
```
- **Phoenix 1.2**
```
use Absinthe.Phoenix.Socket
def connect(_params, socket) do
socket = Absinthe.Phoenix.Socket.put_schema(socket, MyAppWeb.Schema)
{:ok, socket}
end
```
And that should be enough to have a working subscription setup.
## GraphQL adapter
The GraphQL adapter has the following rules:
* The `adapter` name is identified by the atom `:graphql`.
* The channel `name` must be a tuple with the `endpoint` name,
the subscription `field` and an `Yggdrasil` channel to any of the available
adapters.
* The `transformer` must encode to a map. It is recommended to leave the
encoding and decoding to the underlying adapter. Defaults to `:default`
transformer.
* The `backend` is and always should be `:graphql`.
The function `Yggdrasil.GraphQL.subscribe/3` is in charged of creating the
channel and generating the topic for` Absinthe`.
## Installation
Using this GraphQL adapter with `Yggdrasil` is a matter of adding the available
hex package to your `mix.exs` file e.g:
```elixir
def deps do
[{:yggdrasil_graphql, "~> 0.1"}]
end
```
"""
alias Yggdrasil.Channel
alias Yggdrasil.Registry
@doc """
Subscribes to `channel` using the `endpoint` for message distribution to
subscribers of a `field`.
"""
@spec subscribe(module(), atom(), Channel.t())
:: {:ok, Keyword.t()} | {:error, term()}
def subscribe(endpoint, field, channel)
def subscribe(endpoint, field, %Channel{} = source) do
channel = %Channel{
name: {endpoint, field, source},
adapter: :graphql
}
with :ok <- Yggdrasil.subscribe(channel) do
{:ok, topic: gen_topic(channel)}
end
end
@doc """
Generates a `topic` from a `channel`.
"""
@spec gen_topic(Channel.t()) :: binary()
def gen_topic(channel)
def gen_topic(%Channel{} = channel) do
channel
|> Registry.get_full_channel()
|> :erlang.phash2()
|> to_string()
end
end
|
lib/yggdrasil/graphql.ex
| 0.908318
| 0.946399
|
graphql.ex
|
starcoder
|
defmodule Glimesh.FileValidation do
@moduledoc """
Reads a file and matches against common magic bytes to determine the type of the file
"""
require Logger
import SweetXml
@doc """
Helper function to quickly match against known types.
## Examples
iex> validate(png_file, [:png, :jpg])
true
iex> validate(svg_file, [:png])
false
"""
def validate(%Waffle.File{path: path}, allowed \\ []) do
case get_file_type(path) do
{:ok, type} ->
type in allowed
_ ->
false
end
end
@doc """
Dumb check for file extensions.
Should not be used as files (especially temporary) may not contain extensions.
"""
def validate_extension(%Waffle.File{file_name: file_name}, allowed \\ []) do
Enum.member?(allowed, file_name |> Path.extname() |> String.downcase())
end
@doc """
Validate that the file is an SVG as best as we can.
SVG's are XML files, and they should contain an svg element with a svg namespace.
https://www.w3.org/TR/SVG2/struct.html#Namespace states that only one svg
namespace exists, so we check for that using an XML parser.
"""
def validate_svg(%Waffle.File{path: path}) do
data = File.read!(path)
xml = SweetXml.parse(data)
case SweetXml.xpath(xml, ~x"/*/namespace::*[name()='']") do
{:xmlNsNode, _, _, _, :"http://www.w3.org/2000/svg"} ->
true
_ ->
Logger.info("Unexpected SVG namespace contents")
false
end
end
@doc """
Validate the size of the file before processing is smaller than specified.
"""
def validate_size(%Waffle.File{path: path}, max_size_in_bytes) do
case File.stat(path) do
{:ok, %{size: size}} ->
size <= max_size_in_bytes
{:error, _reason} ->
false
end
end
@doc """
Validate image to a set of options. You can combine them in anyway.
Current options:
:shape -> :square
:min_width -> int
:max_width -> int
:min_height -> int
:max_height -> int
"""
def validate_image(%Waffle.File{path: path}, options) do
case ExImageInfo.info(File.read!(path)) do
nil ->
false
info ->
Enum.map(options, fn {option, value} ->
perform_image_validation(info, option, value)
end)
|> Enum.reject(fn result -> result end)
|> Enum.empty?()
end
end
defp perform_image_validation({_, width, height, _}, :shape, :square) do
width == height
end
defp perform_image_validation(_, :shape, shape) do
raise "Shape validator not found for #{shape}."
end
defp perform_image_validation({_, width, _, _}, :min_width, value) do
width >= value
end
defp perform_image_validation({_, width, _, _}, :max_width, value) do
width <= value
end
defp perform_image_validation({_, _, height, _}, :min_height, value) do
height >= value
end
defp perform_image_validation({_, _, height, _}, :max_height, value) do
height <= value
end
@doc """
Matches a file against known magic bytes.
"""
def get_file_type(path) when is_binary(path) do
case File.read(path) do
{:ok, <<255, 216, 255, _rest::binary>>} ->
# FF D8 FF
{:ok, :jpg}
{:ok, <<137, 80, 78, 71, 13, 10, 26, 10, _rest::binary>>} ->
# 89 50 4E 47 0D 0A 1A 0A
{:ok, :png}
{:ok, <<71, 73, 70, 56, 55, 97, _rest::binary>>} ->
# 47 49 46 38 37 61
{:ok, :gif}
{:ok, <<71, 73, 70, 56, 57, 97, _rest::binary>>} ->
# 47 49 46 38 39 61
{:ok, :gif}
{:ok, _} ->
{:ok, :unknown}
{:error, reason} ->
{:error, List.to_string(:file.format_error(reason))}
end
end
end
|
lib/glimesh_web/uploaders/file_validation.ex
| 0.786295
| 0.540439
|
file_validation.ex
|
starcoder
|
defmodule ExLTTB.SampleUtils do
@moduledoc """
Utility functions to perform common operations on generic samples
"""
@doc """
Returns the average sample of a list of sample.
## Arguments
* `samples`: a list of samples. These can have any representation provided that access functions are provided (see Options)
* `opts`: a keyword list of options
## Options
* `sample_to_x_fun`: a function that takes as argument a sample and returns its x coordinate. Defaults to `sample[:x]`
* `sample_to_y_fun`: a function that takes as argument a sample and returns its y coordinate. Defaults to `sample[:y]`
* `xy_to_sample_fun`: a function that takes as argument `x` and `y` and returns a sample with these coordinates. Defaults to `%{x: x, y: y}`
"""
def average_sample(samples, opts \\ []) when is_list(samples) do
{x_sum, y_sum} =
Enum.reduce(samples, {0, 0}, fn sample, {x_sum, y_sum} ->
{x_sum + get_x(sample, opts), y_sum + get_y(sample, opts)}
end)
len = length(samples) / 1.0
xy_to_sample(x_sum / len, y_sum / len, opts)
end
@doc """
Returns the area of the triangle defined by `s1`, `s2` and `s3`.
## Arguments
* `s1`, `s2`, `s3`: the vertices of the triangle. These can have any representation provided that access functions are provided (see Options)
* `opts`: a keyword list of options
## Options
* `sample_to_x_fun`: a function that takes as argument a sample and returns its x coordinate. Defaults to `sample[:x]`
* `sample_to_y_fun`: a function that takes as argument a sample and returns its y coordinate. Defaults to `sample[:y]`
* `xy_to_sample_fun`: a function that takes as argument `x` and `y` and returns a sample with these coordinates. Defaults to `%{x: x, y: y}`
"""
def triangle_area(s1, s2, s3, opts) do
x1 = get_x(s1, opts)
y1 = get_y(s1, opts)
x2 = get_x(s2, opts)
y2 = get_y(s2, opts)
x3 = get_x(s3, opts)
y3 = get_y(s3, opts)
abs((x1 - x3) * (y2 - y1) - (x1 - x2) * (y3 - y1)) / 2
end
defp get_x(sample, opts) do
sample_to_x_fun = Keyword.get(opts, :sample_to_x_fun, fn sample -> sample[:x] end)
sample_to_x_fun.(sample)
end
defp get_y(sample, opts) do
sample_to_y_fun = Keyword.get(opts, :sample_to_y_fun, fn sample -> sample[:y] end)
sample_to_y_fun.(sample)
end
defp xy_to_sample(x, y, opts) do
xy_to_sample_fun = Keyword.get(opts, :xy_to_sample_fun, fn x, y -> %{x: x, y: y} end)
xy_to_sample_fun.(x, y)
end
end
|
lib/ex_lttb/sample_utils.ex
| 0.923282
| 0.933915
|
sample_utils.ex
|
starcoder
|
defmodule Raft.LogStore do
@moduledoc """
The `LogStore` module provides a behaviour and high-level api for interacting
with the underlying database engine. All values are encoded and decoded using
`:erlang.term_to_binary` and `:erlang.binary_to_term` before they're stored.
"""
alias Raft.Log.{
Entry,
Metadata,
}
@typep path :: String.t
@typep db :: term()
@typep index :: non_neg_integer()
@typep key :: String.t
@typep encoded :: String.t
@type metadata :: Metadata.t
@callback open(path()) :: {:ok, db()} | {:error, any()}
@callback store_entry(db(), key(), encoded()) :: :ok | {:error, any()}
@callback get_entry(db(), key()) :: {:ok, encoded()} | {:error, :not_found}
@callback last_entry(db()) :: {:ok, encoded()} | {:ok, :empty}
@callback store_metadata(db(), encoded()) :: :ok | {:error, any()}
@callback get_metadata(db()) :: {:ok, encoded()} | {:error, :not_found}
@callback close(db()) :: :ok | {:error, any()}
@callback destroy(db()) :: :ok | {:error, any()}
@doc """
Opens a new or existing database at the given path.
"""
@spec open(path()) :: db()
def open(path) do
adapter().open(path)
end
@doc """
Closes the connection to the database
"""
@spec close(db()) :: :ok
def close(db) do
adapter().close(db)
end
@doc """
Store logs in the log store and returns the last index.
"""
@spec store_entries(db(), [Entry.t]) :: {:ok, index()}
def store_entries(db, entries) when is_list(entries) do
last_index = Enum.reduce entries, nil, fn entry, _ ->
index = encode_index(entry.index)
encoded = encode(entry)
:ok = adapter().store_entry(db, index, encoded)
index
end
{:ok, decode_index(last_index)}
end
@doc """
Gets all metadata from the store.
"""
@spec get_metadata(db()) :: metadata()
def get_metadata(db) do
case adapter().get_metadata(db) do
{:ok, metadata} ->
decode(metadata)
{:error, :not_found} ->
%Metadata{term: 0, voted_for: :none}
end
end
@doc """
Stores the metadata.
"""
@spec store_metadata(db(), metadata()) :: :ok
def store_metadata(db, meta) do
adapter().store_metadata(db, encode(meta))
end
@doc """
Gets a log at a specific index.
"""
@spec get_entry(db(), index()) :: {:ok, Entry.t} | {:error, :not_found}
def get_entry(db, index) do
case adapter().get_entry(db, encode_index(index)) do
{:ok, value} ->
{:ok, decode(value)}
{:error, :not_found} ->
{:error, :not_found}
end
end
@doc """
Retrieves the last entry from the log. Returns `:empty` if the log is empty.
"""
@spec last_entry(db()) :: {:ok, Entry.t} | {:ok, :empty}
def last_entry(db) do
case adapter().last_entry(db) do
{:ok, :empty} ->
{:ok, :empty}
{:ok, value} ->
{:ok, decode(value)}
end
end
@doc """
Retrieves the last index thats been saved to stable storage.
If the database is empty then 0 is returned.
"""
@spec last_index(db()) :: index()
def last_index(db) do
case last_entry(db) do
{:ok, %{index: index}} ->
index
{:ok, :empty} ->
0
end
end
@doc """
Deletes a range of logs inclusively
"""
def delete_range(db, range) do
range = Enum.map(range, &encode_index/1)
adapter().delete_range(db, range)
end
@doc """
Gets all logs from starting index to end index inclusive.
"""
def slice(db, range) do
range
|> Enum.map(&encode_index/1)
|> Enum.map(& adapter().get_entry(db, &1))
|> Enum.map(fn {:ok, value} -> value end)
|> Enum.map(&decode/1)
end
@doc """
Used to destroy the data on disk. This is used for testing and development
and is dangerous to run in production.
"""
def destroy(db) do
adapter().destroy(db)
end
@doc """
Determines if anything has been written to the log yet.
"""
@spec has_data?(db()) :: boolean()
def has_data?(db) do
last_index(db) > 0
end
@doc """
Returns metadata and all the persisted logs. This is used for debugging and
testing purposes only and should not be used in production.
"""
def dump_data(db) do
%{term: term, voted_for: voted_for} = get_metadata(db)
last_index = last_index(db)
logs = slice(db, 0..last_index)
%{
term: term,
voted_for: voted_for,
logs: logs
}
end
defp decode(value) when is_binary(value) do
:erlang.binary_to_term(value)
end
defp encode(value) do
:erlang.term_to_binary(value)
end
defp encode_index(index) when is_integer(index) do
<< index :: size(64) >>
end
defp decode_index(index) when is_binary(index) do
:binary.decode_unsigned(index)
end
defp adapter, do: Application.get_env(:raft, :log_store, Raft.LogStore.RocksDB)
end
|
lib/raft/log_store.ex
| 0.84729
| 0.515254
|
log_store.ex
|
starcoder
|
defmodule NYSETL.Engines.E1.Cache do
@moduledoc """
Wrapper for Cachex, providing a large number of checksum caches under a supervision
tree. While Cachex provides non-blocking reads and writes for caches, transactions
for a specific cache block other transactions via a single GenServer message queue.
This supervision tree improves the concurrency of transactions by providing a cache ring
of N caches, using `:erlang.phash2/2` to deterministically assign each checksum to a
member of the ring.
"""
use Supervisor
defdelegate get(cache, key), to: Cachex
## Cache functions
def clear() do
cache_count_range() |> Enum.each(fn x -> Cachex.clear(:"checksum_#{x}") end)
:ok
end
def dump(path) do
cache_count_range()
|> Enum.each(fn x ->
Cachex.dump(:"checksum_#{x}", Path.join(path, "checksum_#{x}"))
end)
end
def load(path) do
cache_count_range()
|> Enum.each(fn x ->
Cachex.load(:"checksum_#{x}", Path.join(path, "checksum_#{x}"))
end)
end
def get(checksum) do
checksum
|> cache_for()
|> get(checksum)
end
def put!(cache, checksum, value) do
cache
|> Cachex.put!(checksum, value)
end
def put!(checksum, value) do
checksum
|> cache_for()
|> put!(checksum, value)
end
def transaction(checksum, fun) when is_function(fun) do
cache_for(checksum)
|> Cachex.transaction!([checksum], fun)
end
## Supervisor
def start_link(init_arg) do
Supervisor.start_link(__MODULE__, init_arg, name: __MODULE__)
end
## Callbacks
@impl true
def init(_init_arg) do
children =
cache_count_range()
|> Enum.map(fn cache ->
Supervisor.child_spec({Cachex, :"checksum_#{cache}"}, id: {Cachex, :"checksum_#{cache}"})
end)
Supervisor.init(children, strategy: :one_for_one)
end
## Private
defp cache_count(), do: System.schedulers() * 3
defp cache_count_range(), do: 0..(cache_count() - 1)
defp cache_for(checksum), do: :"checksum_#{:erlang.phash2(checksum, cache_count())}"
end
|
lib/nys_etl/engines/e1/cache.ex
| 0.832713
| 0.585101
|
cache.ex
|
starcoder
|
defmodule State.Trip.Added do
@moduledoc """
State for added trips. They aren't matched to GTFS trip IDs, so we
maintain them separately, based on the predictions we see.
"""
use State.Server,
indices: [:id, :route_id],
recordable: Model.Trip,
hibernate: false
alias Model.{Prediction, Trip}
@impl GenServer
def init(state) do
subscribe({:new_state, State.Prediction})
super(state)
end
@impl Events.Server
def handle_event(_, _, _, state) do
handle_new_state(&build_state/0)
{:noreply, state}
end
@spec build_state :: Enumerable.t()
defp build_state do
State.Prediction.all()
|> Stream.reject(&is_nil(&1.schedule_relationship))
|> Stream.reject(&is_nil(&1.trip_id))
|> Stream.reject(&is_nil(&1.stop_id))
|> Stream.filter(&(State.Trip.by_id(&1.trip_id) == []))
|> Enum.reduce(%{}, &last_stop_prediction/2)
|> Stream.flat_map(&prediction_to_trip/1)
end
@spec last_stop_prediction(Prediction.t(), acc) :: acc
when acc: %{optional(Trip.id()) => Prediction.t()}
defp last_stop_prediction(prediction, acc) do
# remember the last prediction for the given trip
Map.update(acc, prediction.trip_id, prediction, fn old ->
if old.stop_sequence > prediction.stop_sequence do
old
else
prediction
end
end)
end
@spec prediction_to_trip({Trip.id(), Prediction.t()}) :: [Trip.t()]
defp prediction_to_trip({trip_id, prediction}) do
route = State.Route.by_id(prediction.route_id)
stop =
case State.Stop.by_id(prediction.stop_id) do
%{parent_station: nil} = stop -> stop
%{parent_station: id} -> State.Stop.by_id(id)
_other -> nil
end
last_stop_id =
[prediction.route_id]
|> State.Shape.select_routes(prediction.direction_id)
|> Enum.filter(&(&1.route_id == prediction.route_id))
|> Enum.find_value(&last_stop_on_shape(&1, prediction, stop))
stop =
if is_nil(last_stop_id) or last_stop_id == stop.id do
stop
else
State.Stop.by_id(last_stop_id)
end
if stop == nil do
[]
else
[
%Trip{
id: trip_id,
route_id: prediction.route_id,
route_pattern_id: prediction.route_pattern_id,
direction_id: prediction.direction_id,
route_type: if(route, do: route.type),
wheelchair_accessible: 0,
headsign: stop.name,
name: "",
bikes_allowed: 0
}
]
end
end
defp last_stop_on_shape(%{priority: p} = shape, prediction, stop) when p >= 0 do
shape_stops =
State.StopsOnRoute.by_route_id(
prediction.route_id,
direction_id: prediction.direction_id,
shape_ids: [shape.id],
include_alternates?: false
)
if Enum.any?(shape_stops, &(&1 == stop.id)) do
List.last(shape_stops)
end
end
defp last_stop_on_shape(_, _, _) do
nil
end
end
|
apps/state/lib/state/trip/added.ex
| 0.749362
| 0.469581
|
added.ex
|
starcoder
|
defmodule Square.Checkout do
@doc """
Links a `checkoutId` to a `checkout_page_url` that customers will
be directed to in order to provide their payment information using a
payment processing workflow hosted on connect.squareup.com.
```ruby
def create_checkout(client, location_id, body)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `location_id` | `String` | Template, Required | The ID of the business location to associate the checkout with. |
| `body` | [`Create Checkout Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/create-checkout-request.md) | Body, Required | An object containing the fields to POST for the request.<br><br>See the corresponding object definition for field details. |
### Response Type
[`Create Checkout Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/create-checkout-response.md)
### Example Usage
iex> location_id = "location_id4"
iex> body = %{
idempotency_key: "86ae1696-b1e3-4328-af6d-f1e04d947ad6",
order: %{
order: %{
location_id: "location_id",
reference_id: "reference_id",
customer_id: "customer_id",
line_items: [
%{
name: "Printed T Shirt",
quantity: "2",
applied_taxes: [%{ tax_uid: "38ze1696-z1e3-5628-af6d-f1e04d947fg3" }],
applied_discounts: [%{
discount_uid: "56ae1696-z1e3-9328-af6d-f1e04d947gd4"
}],
base_price_money = %{
amount: 1500,
currency: "USD"
}
%{
name: "<NAME>",
quantity: "1",
base_price_money = %{
amount: 2500,
currency: "USD"
}
%{
name: "<NAME>",
quantity: "3",
base_price_money = %{
amount: 3500,
currency: "USD"
}
],
taxes: [%{
uid: "38ze1696-z1e3-5628-af6d-f1e04d947fg3",
type: "INCLUSIVE",
percentage: "7.75",
scope: "LINE_ITEM"
}],
discounts: [%{
uid: "56ae1696-z1e3-9328-af6d-f1e04d947gd4",
type: "FIXED_AMOUNT",
amount_money: %{
amount: 100,
currency: "USD"
},
scope: "LINE_ITEM"
}]
},
idempotency_key: "<KEY>",
},
ask_for_shipping_address: true,
merchant_support_email: '<EMAIL>',
pre_populate_buyer_email: '<EMAIL>',
pre_populate_shipping_address: %{
address_line_1: "1455 Market St.",
address_line_2: "Suite 600",
locality: "San Francisco",
administrative_district_level_1: "CA",
postal_code: "94103",
country: "US",
first_name: "Jane",
last_name: "Doe"
},
redirect_url: "https://merchant.website.com/order-confirm",
additional_recipients: [
%{
location_id: "057P5VYJ4A5X1",
description: "Application fees"
}
]
}
iex> Square.client |> Square.Checkout.create_checkout(location_id, body)
"""
@spec create_checkout(Tesla.Client.t(), binary, map) ::
{:error, any} | {:ok, Tesla.Env.t()}
def create_checkout(client, location_id, body \\ %{}),
do: Tesla.post(client, "locations/#{location_id}/checkouts", body)
end
|
lib/api/checkout_api.ex
| 0.881602
| 0.775775
|
checkout_api.ex
|
starcoder
|
defmodule LightBridge.Workflow do
@moduledoc """
Converts the workflow defined in the json into a Graph which can be used by LightBridge.Instance
A workflow is defined using an acyclic graph. The nodes of the graph are the components and the
links between them define the connections.
#### Node
A node has an `id`, `type`, `component`
`id ` should be unique for each node
A node can be of three types: "in", "normal", "out"
#### Component
A component consists of `inports`, `outports`, `code`
`inports` is a list of atoms, which will be provided as the binding to the worflow for execution
`outports` is a list of atoms from the binding which will be used to construct the response object
`code` is the DSL which is defined in `Composer.DSL`
#### Edges
An edge has a `sorce_node`, `source_port`, `target_node`, `target_port`
`source_node` is the ID of the node from which the connection is originating
`source_port` is the port of the node from which the connection is originating
`target_node` is the ID of the node to which the connection is going
`target_port` is the port of the node to which the connection is going
### Sample Workflow JSON
```elixir
{
"nodes": [
{
"id": 0, "type": "in", "component": {
"inports": [ "a", "b" ],
"outports": [ "c" ],
"code": {
"type": "=", "arguments": [
{ "type": "var", "arguments": [ { "type": "atom", "arguments": [ "c" ] } ] },
{
"type": "+", "arguments": [
{ "type": "var", "arguments": [ { "type": "atom", "arguments": [ "a" ] } ] },
{ "type": "var", "arguments": [ { "type": "atom", "arguments": [ "b" ] } ] }
]
}
]
}
}
},
{
"id": 1, "type": "out", "component": {
"inports": [ "a", "b" ],
"outports": [ "c" ],
"code": {
"type": "=", "arguments": [
{ "type": "var", "arguments": [ { "type": "atom", "arguments": [ "c" ] } ] },
{
"type": "+", "arguments": [
{ "type": "var", "arguments": [ { "type": "atom", "arguments": [ "a" ] } ] },
{ "type": "var", "arguments": [ { "type": "atom", "arguments": [ "b" ] } ] }
]
}
]
}
}
}
],
"edges": [
{ "source_node": 0, "target_node": 1, "source_port": "c", "target_port": "a" },
{ "source_node": 0, "target_node": 1, "source_port": "c", "target_port": "b" }
]
}
```
"""
alias Composer.DSL
alias Composer.AST
@doc """
Parses the json and generates the json
"""
def convert(json) do
json
|> Poison.decode!
|> do_convert
end
@doc """
Generates the graph from the nodes and edges.
It expects the input as `%{ "nodes" => nodes, "edges" => edges }`
"""
def do_convert(%{ "nodes" => nodes, "edges" => edges }) do
Graph.new(type: :directed)
|> add_nodes(nodes)
|> add_edges(edges)
end
defp add_nodes(graph, nodes) do
Enum.reduce(nodes, graph, fn(%{ "id" => id, "component" => component, "type" => type }, graph) ->
%{ "inports" => inports, "outports" => outports, "code" => code } = component
Graph.add_vertex(graph, id, label: %{
code: compile_code(code),
type: String.to_atom(type),
inports: Enum.map(inports, &String.to_atom/1),
outports: Enum.map(outports, &String.to_atom/1),
})
end)
end
defp add_edges(graph, edges) do
Enum.reduce(edges, graph, fn(edge, graph) ->
%{
"source_node" => source_node,
"source_port" => source_port,
"target_node" => target_node,
"target_port" => target_port,
} = edge
Graph.add_edge(graph, source_node, target_node, label: %{
from_port: String.to_atom(source_port), to_port: String.to_atom(target_port),
})
end)
end
defp compile_code(code) do
code
|> DSL.do_convert
|> AST.do_convert
end
end
|
apps/light_bridge/lib/workflow.ex
| 0.853699
| 0.944125
|
workflow.ex
|
starcoder
|
defmodule ExOrient.QueryBuilder do
@moduledoc """
Logic for building query strings to be used in DB commands. This is used in
`ExOrient.DB` function calls.
"""
alias MarcoPolo.RID
@doc """
Return a DB class name for a given module name or string.
iex> ExOrient.QueryBuilder.class_name(Models.Person)
"Person"
iex> ExOrient.QueryBuilder.class_name("User")
"User"
"""
def class_name(%RID{cluster_id: c, position: p}), do: "##{c}:#{p}"
def class_name(module) when is_binary(module), do: module
def class_name(module) when is_atom(module) do
module
|> Atom.to_string()
|> String.split(".")
|> Enum.fetch!(-1)
end
@doc """
Wrap a given `str` in parentheses.
iex> ExOrient.QueryBuilder.wrap_in_parens("hello")
"(hello)"
"""
def wrap_in_parens(str), do: "(#{str})"
@doc """
Wrap a given `str` in square brackets.
iex> ExOrient.QueryBuilder.wrap_in_square_brackets("test")
"[test]"
"""
def wrap_in_square_brackets(str), do: "[#{str}]"
@doc """
Convert a single item to a list
iex> ExOrient.QueryBuilder.to_list([:a, :b])
[:a, :b]
iex> ExOrient.QueryBuilder.to_list(:a)
[:a]
"""
def to_list(list) when is_list(list), do: list
def to_list(single), do: [single]
@doc """
Put some parameters into a query string. We try not to use this if we don't
have to.
"""
def combine_params(query, params) do
params
|> Enum.to_list()
|> Enum.reduce(query, fn({key, value}, acc) ->
String.replace(acc, ":#{key}", value)
end)
end
@doc """
Add a let block to a query for a given `map`.
iex> ExOrient.QueryBuilder.append_let(%{"$n" => :name}, "SELECT FROM Test", %{})
{"SELECT FROM Test LET $n = name", %{}}
"""
def append_let(nil, query, params), do: {query, params}
def append_let(map, query, params) do
block = map
|> Map.to_list()
|> Enum.map(fn
({var, {sub_query, sub_params}}) -> "#{var} = (#{combine_params(sub_query, sub_params)})"
({var, field}) -> "#{var} = #{field}"
end)
|> Enum.join(", ")
{query <> " LET #{block}", params}
end
@doc """
Append a where clause based on a map, keyword list, 3-elem tuple, or list of
3-elem tuples. Maps, keyword lists, single tuples are all converted to lists
of 3-elem tuples and passed down the line. You can also specify a logical
operator, such as `:or`. `:and` is used by default if you have multiple fields
in your where clause. If you're doing anything more complicated, you can also
pass a string to use directly as the WHERE clause, although this is not
preferred.
"""
def append_where(clause_or_map, logical \\ :and, query, params)
def append_where(nil, _logical, query, params) do
{query, params}
end
def append_where(clause, _logical, query, params) when is_binary(clause) do
{query <> " WHERE #{clause}", params}
end
def append_where(map, logical, query, params) when is_map(map) do
map
|> Map.to_list()
|> Enum.map(fn({key, value}) -> {to_string(key), "=", value} end)
|> append_where(logical, query, params)
end
def append_where(clause, logical, query, params) when is_tuple(clause) do
append_where([clause], logical, query, params)
end
# For a keyword list: [name: "Paul", name: "Bob"]
def append_where(clauses = [{_key, _val} | _rest], logical, query, params) do
clauses
|> Enum.map(fn({key, value}) -> {to_string(key), "=", value} end)
|> append_where(logical, query, params)
end
# For the finally built list of clauses: [{"name", "=", "Paul"}, {"name", "=", "Bob"}]
def append_where(clauses = [{_key, _op, _val} | _rest], logical, query, params) do
# First, we build up a tuple like this:
# {"name = :where_name_0", "where_name_0", "Paul"}
clauses_keys_vals =
clauses
|> Enum.with_index(:rand.uniform * 1000 |> round())
|> Enum.map(fn({{key, oper, value}, index}) ->
case key do
"@" <> rec_attr -> {"#{key} #{oper} :where_#{rec_attr}_#{index}", "where_#{rec_attr}_#{index}", value}
"$" <> var -> {"#{key} #{oper} :where_#{var}_#{index}", "where_#{var}_#{index}", value}
_ ->
var = key |> to_string() |> String.split(".") |> Enum.at(0) # Support prop.toLowerCase()
{"#{key} #{oper} :where_#{var}_#{index}", "where_#{var}_#{index}", value}
end
end)
# Pull out the map of keys => values
# %{"where_name_0" => "Paul"}
map =
clauses_keys_vals
|> Enum.map(fn({_clause, key, val}) -> {key, val} end)
|> Enum.into(%{})
|> Map.merge(params)
# Join all the clauses together
# "name = :where_name_0 AND age = :where_age_1"
clause =
clauses_keys_vals
|> Enum.map(fn({clause, _key, _val}) -> clause end)
|> Enum.join(" #{logical |> to_string() |> String.upcase()} ")
{query <> " WHERE #{clause}", map}
end
@doc """
Add a group by clause to a given `query`. `field` can be a string or atom.
iex> ExOrient.QueryBuilder.append_group_by(:name, "SELECT FROM Test", %{})
{"SELECT FROM Test GROUP BY name", %{}}
"""
def append_group_by(nil, query, params), do: {query, params}
def append_group_by(field, query, params) do
{query <> " GROUP BY #{field}", params}
end
@doc """
Append an order by clause to a given `query`.
iex> ExOrient.QueryBuilder.append_order_by(:name, "SELECT FROM Test", %{})
{"SELECT FROM Test ORDER BY name ASC", %{}}
"""
def append_order_by(fields, order \\ "ASC", query, params)
def append_order_by(nil, _order, query, params), do: {query, params}
def append_order_by(field, order, query, params) when is_atom(field), do: append_order_by([field], order, query, params)
def append_order_by(field, order, query, params) when is_binary(field), do: append_order_by([field], order, query, params)
def append_order_by(fields, order, query, params) do
fields =
fields
|> Enum.map(&to_string/1)
|> Enum.join(", ")
order =
order
|> to_string()
|> String.upcase()
{query <> " ORDER BY #{fields} #{order}", params}
end
@doc """
Append an unwind statement to a query
iex> ExOrient.QueryBuilder.append_unwind(:friend, "SELECT FROM People", %{})
{"SELECT FROM People UNWIND friend", %{}}
"""
def append_unwind(nil, query, params), do: {query, params}
def append_unwind(field, query, params) do
{query <> " UNWIND #{field}", params}
end
@doc """
Append a skip statement
iex> ExOrient.QueryBuilder.append_skip(20, "SELECT FROM Test", %{})
{"SELECT FROM Test SKIP 20", %{}}
"""
def append_skip(nil, query, params), do: {query, params}
def append_skip(number, query, params) do
{query <> " SKIP #{number}", params}
end
@doc """
Append a limit statement
iex> ExOrient.QueryBuilder.append_limit(10, "SELECT FROM Test", %{})
{"SELECT FROM Test LIMIT 10", %{}}
"""
def append_limit(nil, query, params), do: {query, params}
def append_limit(number, query, params) do
{query <> " LIMIT #{number}", params}
end
@doc """
Append fetch plan
iex> ExOrient.QueryBuilder.append_fetchplan("*:-1", "SELECT FROM Test", %{})
{"SELECT FROM Test FETCHPLAN *:-1", %{}}
"""
def append_fetchplan(nil, query, params), do: {query, params}
def append_fetchplan(plan, query, params) do
{query <> " FETCHPLAN #{plan}", params}
end
@doc """
Append timeout
iex> ExOrient.QueryBuilder.append_timeout(5000, "SELECT FROM Test", %{})
{"SELECT FROM Test TIMEOUT 5000", %{}}
iex> ExOrient.QueryBuilder.append_timeout({5000, :return}, "SELECT FROM Test", %{})
{"SELECT FROM Test TIMEOUT 5000 RETURN", %{}}
iex> ExOrient.QueryBuilder.append_timeout({5000, :exception}, "SELECT FROM Test", %{})
{"SELECT FROM Test TIMEOUT 5000 EXCEPTION", %{}}
"""
def append_timeout(nil, query, params), do: {query, params}
def append_timeout({millis, :return}, query, params), do: {query <> " TIMEOUT #{millis} RETURN", params}
def append_timeout({millis, :exception}, query, params), do: {query <> " TIMEOUT #{millis} EXCEPTION", params}
def append_timeout(millis, query, params), do: {query <> " TIMEOUT #{millis}", params}
@doc """
Append a lock statement
iex> ExOrient.QueryBuilder.append_lock(:default, "SELECT FROM Test", %{})
{"SELECT FROM Test LOCK DEFAULT", %{}}
iex> ExOrient.QueryBuilder.append_lock(:record, "SELECT FROM Test", %{})
{"SELECT FROM Test LOCK RECORD", %{}}
"""
def append_lock(nil, query, params), do: {query, params}
def append_lock(:default, query, params), do: {query <> " LOCK DEFAULT", params}
def append_lock(:record, query, params), do: {query <> " LOCK RECORD", params}
@doc """
Append a parallel statement
iex> ExOrient.QueryBuilder.append_parallel(true, "SELECT FROM Test", %{})
{"SELECT FROM Test PARALLEL", %{}}
"""
def append_parallel(nil, query, params), do: {query, params}
def append_parallel(true, query, params), do: {query <> " PARALLEL", params}
def append_parallel(false, query, params), do: {query, params}
@doc """
Append a nocache statement
iex> ExOrient.QueryBuilder.append_nocache(true, "SELECT FROM Test", %{})
{"SELECT FROM Test NOCACHE", %{}}
"""
def append_nocache(nil, query, params), do: {query, params}
def append_nocache(true, query, params), do: {query <> " NOCACHE", params}
def append_nocache(false, query, params), do: {query, params}
@doc """
Append a values statement
iex> ExOrient.QueryBuilder.append_values({[:name, :type], ["Elixir", "Awesome"]}, "INSERT INTO Test", %{})
{"INSERT INTO Test (name, type) VALUES (:values_name, :values_type)", %{"values_name" => "Elixir", "values_type" => "Awesome"}}
"""
def append_values(nil, query, params), do: {query, params}
def append_values({fields, values}, query, params) do
built_fields =
fields
|> Enum.map(&to_string/1)
|> Enum.join(", ")
|> wrap_in_parens()
placeholders =
fields
|> Enum.map(&to_string/1)
|> Enum.map(fn(field) -> ":values_#{field}" end)
|> Enum.join(", ")
|> wrap_in_parens()
map =
fields
|> Enum.map(fn(field) -> "values_#{field}" end)
|> Enum.zip(values)
|> Enum.into(%{})
|> Map.merge(params)
{query <> " #{built_fields} VALUES #{placeholders}", map}
end
@type_tags [:binary, :long, :short, :int, :float, :double]
@doc """
Append a set statement
iex> ExOrient.QueryBuilder.append_set([key: "val"], "INSERT INTO Test", %{})
{"INSERT INTO Test SET key = :set_key", %{"set_key" => "val"}}
"""
def append_set(nil, query, params), do: {query, params}
def append_set(kv, query, params) do
sets =
kv
|> Enum.map(fn
({key, {op, _}}) when op in @type_tags -> "#{key} = :set_#{key}"
({key, {q, p}}) -> "#{key} = (#{combine_params(q, p)})"
({key, _val}) -> "#{key} = :set_#{key}"
end)
|> Enum.join(", ")
map =
kv
|> Enum.map(fn({key, val}) -> {"set_#{key}", val} end)
|> Enum.into(params)
{query <> " SET #{sets}", map}
end
@doc """
Append a content statement
iex> ExOrient.QueryBuilder.append_content(%{key: "val"}, "INSERT INTO Test", %{})
{~s/INSERT INTO Test CONTENT {"key":"val"}/, %{}}
"""
def append_content(nil, query, params), do: {query, params}
def append_content(map, query, params) do
json = Poison.encode!(map)
{query <> " CONTENT #{json}", params}
end
@doc """
Append a return statement
iex> ExOrient.QueryBuilder.append_return("@rid", "INSERT INTO Test", %{})
{"INSERT INTO Test RETURN @rid", %{}}
"""
def append_return(nil, query, params), do: {query, params}
def append_return(sql, query, params) do
{query <> " RETURN #{sql}", params}
end
@doc """
Append a from statement
iex> ExOrient.QueryBuilder.append_from("#10:0", "CREATE EDGE Watched", %{})
{"CREATE EDGE Watched FROM #10:0", %{}}
iex> ExOrient.QueryBuilder.append_from("(SELECT FROM account)", "CREATE EDGE Watched", %{})
{"CREATE EDGE Watched FROM (SELECT FROM account)", %{}}
"""
def append_from(nil, query, params), do: {query, params}
def append_from({sub_query, sub_params}, query, params) do
{query <> " FROM (#{sub_query})", Map.merge(sub_params, params)}
end
def append_from(rid = %RID{}, query, params) do
append_from(class_name(rid), query, params)
end
def append_from(rid = "#" <> _, query, params) do
{query <> " FROM #{rid}", params}
end
def append_from(class, query, params) do
{query <> " FROM #{class_name(class)}", params}
end
@doc """
Append an increment statement
iex> ExOrient.QueryBuilder.append_increment([number: 5], "UPDATE Counter", %{})
{"UPDATE Counter INCREMENT number = :increment_number", %{"increment_number" => 5}}
"""
def append_increment(nil, query, params), do: {query, params}
def append_increment(kv, query, params) do
fields =
kv
|> Enum.map(fn({field, _amt}) -> "#{field} = :increment_#{field}" end)
|> Enum.join(", ")
params =
kv
|> Enum.map(fn({key, val}) -> {"increment_#{key}", val} end)
|> Enum.into(params)
{query <> " INCREMENT #{fields}", params}
end
@doc """
Append an add statement
iex> ExOrient.QueryBuilder.append_add([something: "#9:0"], "UPDATE Person", %{})
{"UPDATE Person ADD something = :add_something", %{"add_something" => "#9:0"}}
"""
def append_add(nil, query, params), do: {query, params}
def append_add(kv, query, params) do
fields =
kv
|> Enum.map(fn({field, _val}) -> "#{field} = :add_#{field}" end)
|> Enum.join(", ")
params =
kv
|> Enum.map(fn({key, val}) -> {"add_#{key}", val} end)
|> Enum.into(params)
{query <> " ADD #{fields}", params}
end
@doc """
Append a remove statement
iex> ExOrient.QueryBuilder.append_remove(:name, "UPDATE ProgrammingLanguage", %{})
{"UPDATE ProgrammingLanguage REMOVE name", %{}}
iex> ExOrient.QueryBuilder.append_remove([meta: "type"], "UPDATE ProgrammingLanguage", %{})
{"UPDATE ProgrammingLanguage REMOVE meta = :remove_meta", %{"remove_meta" => "type"}}
"""
def append_remove(nil, query, params), do: {query, params}
def append_remove(field, query, params) when is_atom(field), do: append_remove([field], query, params)
def append_remove(field, query, params) when is_binary(field), do: append_remove([field], query, params)
def append_remove(list, query, params) when is_list(list) do
fields =
list
|> Enum.map(fn
{field, _val} -> "#{field} = :remove_#{field}"
field -> to_string(field)
end)
|> Enum.join(", ")
case hd(list) do
{_, _} ->
params =
list
|> Enum.map(fn({key, val}) -> {"remove_#{key}", val} end)
|> Enum.into(params)
{query <> " REMOVE #{fields}", params}
_ ->
{query <> " REMOVE #{fields}", params}
end
end
@doc """
Append a put statement
iex> ExOrient.QueryBuilder.append_put([addresses: {"CLE", "#12:0"}], "UPDATE Person", %{})
{"UPDATE Person PUT addresses = :put_addresses_key, :put_addresses_val", %{"put_addresses_key" => "CLE", "put_addresses_val" => "#12:0"}}
"""
def append_put(nil, query, params), do: {query, params}
def append_put(list, query, params) do
fields =
list
|> Enum.map(fn({field, _}) -> "#{field} = :put_#{field}_key, :put_#{field}_val" end)
|> Enum.join(", ")
params =
list
|> Enum.flat_map(fn({field, {key, val}}) -> [{"put_#{field}_key", key}, {"put_#{field}_val", val}] end)
|> Enum.into(params)
{query <> " PUT #{fields}", params}
end
@doc """
Append a merge statement
iex> ExOrient.QueryBuilder.append_merge(%{key: "val"}, "UPDATE Person", %{})
{~s/UPDATE Person MERGE {"key":"val"}/, %{}}
"""
def append_merge(nil, query, params), do: {query, params}
def append_merge(map, query, params) do
json = Poison.encode!(map)
{query <> " MERGE #{json}", params}
end
@doc """
Append an upsert statement
iex> ExOrient.QueryBuilder.append_upsert(true, "UPDATE Person", %{})
{"UPDATE Person UPSERT", %{}}
"""
def append_upsert(nil, query, params), do: {query, params}
def append_upsert(true, query, params), do: {query <> " UPSERT", params}
def append_upsert(false, query, params), do: {query, params}
@doc """
Append a cluster statement
iex> ExOrient.QueryBuilder.append_cluster("Name", "CREATE VERTEX V1", %{})
{"CREATE VERTEX V1 CLUSTER Name", %{}}
"""
def append_cluster(nil, query, params), do: {query, params}
def append_cluster(cluster, query, params), do: {query <> " CLUSTER #{cluster}", params}
@doc """
Append a from statement
iex> ExOrient.QueryBuilder.append_to("#10:0", "CREATE EDGE Watched", %{})
{"CREATE EDGE Watched TO #10:0", %{}}
iex> ExOrient.QueryBuilder.append_to("SELECT FROM account", "CREATE EDGE Watched", %{})
{"CREATE EDGE Watched TO (SELECT FROM account)", %{}}
"""
def append_to(nil, query, params), do: {query, params}
def append_to({sub_query, sub_params}, query, params) do
{query <> " TO (#{sub_query})", Map.merge(sub_params, params)}
end
def append_to(rid = %RID{}, query, params) do
append_to(class_name(rid), query, params)
end
def append_to(rid = "#" <> _, query, params) do
{query <> " TO #{rid}", params}
end
def append_to(subquery, query, params) do
{query <> " TO (#{subquery})", params}
end
@doc """
Append retry statement
iex> ExOrient.QueryBuilder.append_retry(10, "CREATE EDGE Test", %{})
{"CREATE EDGE Test RETRY 10", %{}}
"""
def append_retry(nil, query, params), do: {query, params}
def append_retry(num, query, params), do: {query <> " RETRY #{num}", params}
@doc """
Append wait statement
iex> ExOrient.QueryBuilder.append_wait(100, "CREATE EDGE Test", %{})
{"CREATE EDGE Test WAIT 100", %{}}
"""
def append_wait(nil, query, params), do: {query, params}
def append_wait(millis, query, params), do: {query <> " WAIT #{millis}", params}
@doc """
Append batch statement
iex> ExOrient.QueryBuilder.append_batch(200, "CREATE EDGE Test", %{})
{"CREATE EDGE Test BATCH 200", %{}}
"""
def append_batch(nil, query, params), do: {query, params}
def append_batch(num, query, params), do: {query <> " BATCH #{num}", params}
@doc """
Append vertex statement
iex> ExOrient.QueryBuilder.append_vertex("#10:0", "DELETE", %{})
{"DELETE VERTEX #10:0", %{}}
"""
def append_vertex(nil, query, params), do: {query, params}
def append_vertex(class, query, params), do: {query <> " VERTEX #{class_name(class)}", params}
@doc """
Append edge statement
iex> ExOrient.QueryBuilder.append_edge("#10:0", "DELETE", %{})
{"DELETE EDGE #10:0", %{}}
"""
def append_edge(nil, query, params), do: {query, params}
def append_edge(class, query, params), do: {query <> " EDGE #{class_name(class)}", params}
@doc """
Append extends statement
iex> ExOrient.QueryBuilder.append_extends("E", "CREATE CLASS Person", %{})
{"CREATE CLASS Person EXTENDS E", %{}}
"""
def append_extends(nil, query, params), do: {query, params}
def append_extends(class, query, params), do: {query <> " EXTENDS #{class_name(class)}", params}
@doc """
Append abstract statement
iex> ExOrient.QueryBuilder.append_abstract(true, "CREATE CLASS Person", %{})
{"CREATE CLASS Person ABSTRACT", %{}}
"""
def append_abstract(nil, query, params), do: {query, params}
def append_abstract(true, query, params), do: {query <> " ABSTRACT", params}
def append_abstract(false, query, params), do: {query, params}
@doc """
Append an on statement
iex> ExOrient.QueryBuilder.append_on("Movie (thumbs)", "CREATE INDEX thumbsAuthor", %{})
{"CREATE INDEX thumbsAuthor ON Movie (thumbs)", %{}}
"""
def append_on(nil, query, params), do: {query, params}
def append_on(on, query, params), do: {query <> " ON #{on}", params}
@doc """
Append a type
iex> ExOrient.QueryBuilder.append_type(:string, "CREATE INDEX Test", %{})
{"CREATE INDEX Test STRING", %{}}
"""
def append_type(nil, query, params), do: {query, params}
def append_type(type, query, params) do
type = type |> to_string() |> String.upcase()
{query <> " #{type}", params}
end
@doc """
Append metadata
iex> ExOrient.QueryBuilder.append_metadata(%{ignoreNullValues: false}, "CREATE INDEX Test", %{})
{~S/CREATE INDEX Test METADATA {"ignoreNullValues":false}/, %{}}
"""
def append_metadata(nil, query, params), do: {query, params}
def append_metadata(map, query, params) do
json = Poison.encode!(map)
{query <> " METADATA #{json}", params}
end
@doc """
Append unsafe
iex> ExOrient.QueryBuilder.append_unsafe(true, "TRUNCATE CLASS Person", %{})
{"TRUNCATE CLASS Person UNSAFE", %{}}
"""
def append_unsafe(nil, query, params), do: {query, params}
def append_unsafe(true, query, params), do: {query <> " UNSAFE", params}
def append_unsafe(false, query, params), do: {query, params}
@doc """
Append polymorphic
iex> ExOrient.QueryBuilder.append_polymorphic(true, "TRUNCATE CLASS Person", %{})
{"TRUNCATE CLASS Person POLYMORPHIC", %{}}
"""
def append_polymorphic(nil, query, params), do: {query, params}
def append_polymorphic(true, query, params), do: {query <> " POLYMORPHIC", params}
def append_polymorphic(false, query, params), do: {query, params}
@doc """
Append force
iex> ExOrient.QueryBuilder.append_force(true, "DROP PROPERTY Person.name", %{})
{"DROP PROPERTY Person.name FORCE", %{}}
"""
def append_force(nil, query, params), do: {query, params}
def append_force(true, query, params), do: {query <> " FORCE", params}
def append_force(false, query, params), do: {query, params}
end
|
lib/ex_orient/query_builder.ex
| 0.817647
| 0.41941
|
query_builder.ex
|
starcoder
|
defmodule Timex.Time do
@moduledoc """
This module provides a friendly API for working with Erlang
timestamps, i.e. `{megasecs, secs, microsecs}`. In addition,
it provides an easy way to wrap the measurement of function
execution time (via `measure`).
"""
@type units :: :usecs | :msecs | :secs | :mins | :hours | :days | :weeks | :hms
@type quantity :: float
@usecs_in_sec 1_000_000
@usecs_in_msec 1_000
@msecs_in_sec 1_000
@secs_in_min 60
@secs_in_hour @secs_in_min * 60
@secs_in_day @secs_in_hour * 24
@secs_in_week @secs_in_day * 7
@million 1_000_000
@doc """
Converts a timestamp to its value in microseconds
"""
@spec to_usecs(Date.timestamp) :: quantity
def to_usecs({mega, sec, micro}), do: (mega * @million + sec) * @million + micro
@doc """
Converts a timestamp to its value in milliseconds
"""
@spec to_msecs(Date.timestamp) :: quantity
def to_msecs({_, _, _} = ts), do: to_usecs(ts) / @usecs_in_msec
@doc """
Converts a timestamp to its value in seconds
"""
@spec to_secs(Date.timestamp) :: quantity
def to_secs({_, _, _} = ts), do: to_usecs(ts) / @usecs_in_sec
@doc """
Converts a timestamp to its value in minutes
"""
@spec to_mins(Date.timestamp) :: quantity
def to_mins(timestamp), do: to_secs(timestamp) / @secs_in_min
@doc """
Converts a timestamp to its value in hours
"""
@spec to_hours(Date.timestamp) :: quantity
def to_hours(timestamp), do: to_secs(timestamp) / @secs_in_hour
@doc """
Converts a timestamp to its value in days
"""
@spec to_days(Date.timestamp) :: quantity
def to_days(timestamp), do: to_secs(timestamp) / @secs_in_day
@doc """
Converts a timestamp to its value in weeks
"""
@spec to_weeks(Date.timestamp) :: quantity
def to_weeks(timestamp), do: to_secs(timestamp) / @secs_in_week
Enum.each [usecs: 1 / @usecs_in_sec,
msecs: 1 / @msecs_in_sec,
secs: 1,
mins: @secs_in_min,
hours: @secs_in_hour,
days: @secs_in_day,
weeks: @secs_in_week], fn {type, coef} ->
@spec to_usecs(quantity, unquote(type)) :: quantity
def to_usecs(value, unquote(type)), do: do_round(value * unquote(coef) * @usecs_in_sec)
@spec to_msecs(quantity, unquote(type)) :: quantity
def to_msecs(value, unquote(type)), do: do_round(value * unquote(coef) * @msecs_in_sec)
@spec to_secs(quantity, unquote(type)) :: quantity
def to_secs(value, unquote(type)), do: do_round(value * unquote(coef))
@spec to_mins(quantity, unquote(type)) :: quantity
def to_mins(value, unquote(type)), do: do_round(value * unquote(coef) / @secs_in_min)
@spec to_hours(quantity, unquote(type)) :: quantity
def to_hours(value, unquote(type)), do: do_round(value * unquote(coef) / @secs_in_hour)
@spec to_days(quantity, unquote(type)) :: quantity
def to_days(value, unquote(type)), do: do_round(value * unquote(coef) / @secs_in_day)
@spec to_weeks(quantity, unquote(type)) :: quantity
def to_weeks(value, unquote(type)), do: do_round(value * unquote(coef) / @secs_in_week)
end
Enum.each [:to_usecs, :to_msecs, :to_secs, :to_mins, :to_hours, :to_days, :to_weeks], fn name ->
@spec unquote(name)({quantity, quantity, quantity}, :hms) :: quantity
def unquote(name)({hours, minutes, seconds}, :hms), do: unquote(name)(hours * @secs_in_hour + minutes * @secs_in_min + seconds, :secs)
end
@doc """
Converts an hour between 0..24 to {1..12, :am/:pm}
## Examples
iex> Timex.Time.to_12hour_clock(23)
{11, :pm}
"""
def to_12hour_clock(hour) when hour in 0..24 do
case hour do
hour when hour in [0, 24] -> {12, :am}
hour when hour < 12 -> {hour, :am}
hour when hour === 12 -> {12, :pm}
hour when hour > 12 -> {hour - 12, :pm}
end
end
@doc """
Converts an hour between 1..12 in either am or pm, to value between 0..24
## Examples
iex> Timex.Time.to_24hour_clock(7, :pm)
19
"""
def to_24hour_clock(hour, am_or_pm) when hour in 1..12 and am_or_pm in [:am, :pm] do
case am_or_pm do
:am when hour === 12 -> 0
:am -> hour
:pm when hour === 12 -> hour
:pm -> hour + 12
end
end
@doc """
Converts the given input value and unit to an Erlang timestamp.
## Example
iex> Timex.Time.from(1500, :secs)
{0, 1500, 0}
"""
@spec from(integer | Date.time, units) :: Date.timestamp
def from(value, :usecs) do
value = round(value)
{ sec, micro } = mdivmod(value)
{ mega, sec } = mdivmod(sec)
{ mega, sec, micro }
end
def from(value, :msecs), do: from(value * @usecs_in_msec, :usecs)
def from(value, :secs), do: from(value * @usecs_in_sec, :usecs)
def from(value, :mins), do: from(value * @secs_in_min, :secs)
def from(value, :hours), do: from(value * @secs_in_hour, :secs)
def from(value, :days), do: from(value * @secs_in_day, :secs)
def from(value, :weeks), do: from(value * @secs_in_week, :secs)
def from(value, :hms), do: from(to_secs(value, :hms), :secs)
Enum.each [:usecs, :msecs, :secs, :mins, :hours, :days, :weeks, :hms], fn type ->
def to_timestamp(value, unquote(type)), do: from(value, unquote(type))
end
def add({mega1,sec1,micro1}, {mega2,sec2,micro2}) do
normalize { mega1+mega2, sec1+sec2, micro1+micro2 }
end
def sub({mega1,sec1,micro1}, {mega2,sec2,micro2}) do
normalize { mega1-mega2, sec1-sec2, micro1-micro2 }
end
def scale({mega, secs, micro}, coef) do
normalize { mega*coef, secs*coef, micro*coef }
end
def invert({mega, sec, micro}) do
{ -mega, -sec, -micro }
end
def abs(timestamp={mega, sec, micro}) do
cond do
mega != 0 -> value = mega
sec != 0 -> value = sec
true -> value = micro
end
if value < 0 do
invert(timestamp)
else
timestamp
end
end
@doc """
Return a timestamp representing a time lapse of length 0.
Time.convert(Time.zero, :secs)
#=> 0
Can be useful for operations on collections of timestamps. For instance,
Enum.reduce timestamps, Time.zero, Time.add(&1, &2)
"""
def zero, do: {0, 0, 0}
@doc """
Convert timestamp in the form { megasecs, seconds, microsecs } to the
specified time units.
Supported units: microseconds (:usecs), milliseconds (:msecs), seconds (:secs),
minutes (:mins), hours (:hours), days (:days), or weeks (:weeks).
"""
def convert(timestamp, type \\ :timestamp)
def convert(timestamp, :timestamp), do: timestamp
def convert(timestamp, :usecs), do: to_usecs(timestamp)
def convert(timestamp, :msecs), do: to_msecs(timestamp)
def convert(timestamp, :secs), do: to_secs(timestamp)
def convert(timestamp, :mins), do: to_mins(timestamp)
def convert(timestamp, :hours), do: to_hours(timestamp)
def convert(timestamp, :days), do: to_days(timestamp)
def convert(timestamp, :weeks), do: to_weeks(timestamp)
@doc """
Return time interval since the first day of year 0 to Epoch.
"""
def epoch(type \\ :timestamp)
def epoch(:timestamp) do
seconds = :calendar.datetime_to_gregorian_seconds({ {1970,1,1}, {0,0,0} })
{ mega, sec } = mdivmod(seconds)
{ mega, sec, 0 }
end
def epoch(type), do: convert(epoch, type)
@doc """
Time interval since Epoch.
The argument is an atom indicating the type of time units to return (see
convert/2 for supported values).
When the argument is omitted, the return value's format is { megasecs, seconds, microsecs }.
"""
def now(type \\ :timestamp)
case Timex.Utils.get_otp_release do
ver when ver >= 18 ->
def now(:timestamp), do: :erlang.system_time(:micro_seconds) |> from(:usecs)
def now(:usecs), do: :erlang.system_time(:micro_seconds)
def now(:msecs), do: :erlang.system_time(:milli_seconds)
def now(:secs), do: :erlang.system_time(:seconds)
def now(type), do: now(:timestamp) |> convert(type)
_ ->
def now(:timestamp), do: :os.timestamp
def now(type), do: :os.timestamp |> convert(type)
end
@doc """
Time interval between timestamp and now. If timestamp is after now in time, the
return value will be negative. Timestamp must be in format { megasecs, seconds,
microseconds }.
The second argument is an atom indicating the type of time units to return:
microseconds (:usecs), milliseconds (:msecs), seconds (:secs), minutes (:mins),
or hours (:hours).
When the second argument is omitted, the return value's format is { megasecs,
seconds, microsecs }.
"""
def elapsed(timestamp, type \\ :timestamp)
def elapsed(timestamp = {_,_,_}, type) do
elapsed(timestamp, now, type)
end
def elapsed(timestamp = {_,_,_}, reference_time = {_,_,_}, type) do
diff(reference_time, timestamp) |> convert(type)
end
@doc """
Time interval between two timestamps. If the first timestamp comes before the
second one in time, the return value will be negative. Timestamp must be in format
{ megasecs, seconds, microseconds }.
The third argument is an atom indicating the type of time units to return:
microseconds (:usecs), milliseconds (:msecs), seconds (:secs), minutes (:mins),
or hours (:hours).
When the third argument is omitted, the return value's format is { megasecs,
seconds, microsecs }.
"""
def diff(t1, t2, type \\ :timestamp)
def diff({mega1,secs1,micro1}, {mega2,secs2,micro2}, :timestamp) do
# TODO: normalize the result
{mega1 - mega2, secs1 - secs2, micro1 - micro2}
end
def diff(t1 = {_,_,_}, t2 = {_,_,_}, type) do
convert(diff(t1, t2), type)
end
@doc """
Evaluates fun() and measures the elapsed time.
Returns {timestamp, result}, timestamp is the usual `{ megasecs, seconds, microsecs }`.
## Example
iex> {_timestamp, result} = Time.measure(fn -> 2 * 2 end)
...> result == 4
true
"""
@spec measure((() -> any)) :: { Date.timestamp, any }
def measure(fun), do: do_measure(fun)
@doc """
Evaluates apply(fun, args). Otherwise works like measure/1
"""
@spec measure(fun, [any]) :: { Date.timestamp, any }
def measure(fun, args), do: do_measure(fun, args)
@doc """
Evaluates apply(module, fun, args). Otherwise works like measure/1
"""
@spec measure(module, atom, [any]) :: { Date.timestamp, any }
def measure(module, fun, args), do: do_measure(module, fun, args)
case Timex.Utils.get_otp_release do
ver when ver >= 18 ->
defp do_measure(m, f \\ nil, a \\ []) do
start_time = :erlang.monotonic_time(:micro_seconds)
result = cond do
is_function(m) && f == nil -> apply(m, [])
is_function(m) && is_list(f) -> apply(m, f)
is_atom(m) && is_atom(f) && is_list(a) -> apply(m, f, a)
true -> {:error, "Invalid arguments for do_measure!"}
end
end_time = :erlang.monotonic_time(:micro_seconds)
{(end_time - start_time) |> to_timestamp(:usecs), result}
end
_ ->
defp do_measure(m, f \\ nil, a \\ []) do
{time, result} = cond do
is_function(m) && f == nil -> :timer.tc(m)
is_function(m) && is_list(f) -> :timer.tc(m, f)
is_atom(m) && is_atom(f) && is_list(a) -> :timer.tc(m, f, a)
true -> {:error, "Invalid arguments for do_measure!"}
end
{to_timestamp(time, :usecs), result}
end
end
defp normalize({mega, sec, micro}) do
# TODO: check for negative values
if micro >= @million do
{ sec, micro } = mdivmod(sec, micro)
end
if sec >= @million do
{ mega, sec } = mdivmod(mega, sec)
end
{ mega, sec, micro }
end
defp divmod(a, b) do
{ div(a, b), rem(a, b) }
end
defp divmod(initial, a, b) do
{ initial + div(a, b), rem(a, b) }
end
defp mdivmod(a) do
divmod(a, 1_000_000)
end
defp mdivmod(initial, a) do
divmod(initial, a, 1_000_000)
end
defp do_round(value) when is_integer(value), do: value
defp do_round(value) when is_float(value), do: Float.round(value, 6)
end
|
lib/time/time.ex
| 0.891575
| 0.660669
|
time.ex
|
starcoder
|
defmodule Md0.ManualMacroScanner do
use Md0.Scanner.ManualMacro
def scan_document(doc) do
doc
|> String.split(~r{\r\n?|\n})
|> Enum.zip(Stream.iterate(1, &(&1 + 1)))
|> Enum.flat_map(&scan_line/1)
end
state :start do
empty :halt# allows input do end here and emit all tokens scanned so far
on " ", :indent
on "*", :li
on "`", :back
anything :any
end
state :any do
empty :halt, emit: :any # and returns implicitly
on " ", :ws, emit: :any # " " is collected **after** emission
on "*", :star, emit: :any
on "`", :back, emit: :any
anything :any # Maybe a good idea to add the `anything current_state` transition as default transiton for a state
end
state :back do
empty :halt, emit: :back
on " ", :ws, emit: :back
on "`", :back
on "*", :star, emit: :back
anything :any, emit: :back
end
state :indent do
empty :halt, emit: :indent
on " ", :indent
on "*", :li, emit: :indent
on "`", :back, emit: :indent
anything :any, emit: :indent
end
# Looking at the three above states the following syntax seems like a big gain
# e.g.
# state :back do
# with_default emit: :back do
# empty :halt
# on " ", :ws
# on "*", :star
# anything :any
# end
# consume "`" # same as on "`" <current state>
# end
state :li do
empty :halt, emit: :star
on " ", :rest, emit: :li, collect: :before # this means the " " is part of the emitted token and not the next one
on "*", :star
on "`", :back, emit: :star
anything :any, emit: :star
end
state :rest do
empty :halt
on " ", :ws
on "*", :star
on "`", :back
anything :any
end
state :star do
empty :halt, emit: :star
on " ", :ws, emit: :star
on "`", :back, emit: :star
on "*", :star
anything :any, emit: :star
end
state :ws do
empty :halt, emit: :ws
on " ", :ws
on "`", :back, emit: :ws
on "*", :li, emit: :ws
anything :any, emit: :ws
end
end
|
lib/md0/manual_macro_scanner.ex
| 0.503662
| 0.403684
|
manual_macro_scanner.ex
|
starcoder
|
defmodule ImageClassifier do
@moduledoc """
Image classification using Tensorflow.
"""
@doc """
Returns the most probable label along with an accuracy for a given image.
## Examples
iex> ImageClassifier.label(File.read!("file/t54wjgedk1kd3d8s.jpg"))
{0.49980872869491577, "tvs"}
"""
def label(image) do
label(
image,
app_file("retrained_graph.pb"),
app_file("retrained_labels.txt")
)
end
@doc """
Returns the most probable label along with an accuracy for a given image.
## Example
iex(1)> image = File.read!("tv.jpeg")
<<255, 216, 255, 224, 0, 16, 74, 70, 73, 70, 0, ...>>>
iex(2)> {:ok, graph} = Tensorflex.read_graph("retrained_graph.pb")
{:ok,
%Tensorflex.Graph{
def: #Reference<0.1322660680.104464391.77632>,
name: "retrained_graph.pb"
}}
iex(3)> labels = ImageClassifier.read_labels("retrained_labels.txt")
["headphones", "hi fi audio speakers", "tools", "tv audio accessories", "tvs"]
iex(4)> ImageClassifier.label(image, graph, labels)
{0.9993681311607361, "tvs"}
"""
def label(image, graph_path, labels) when is_binary(graph_path) do
{:ok, graph} = Tensorflex.read_graph(graph_path)
label(image, graph, labels)
end
def label(image, graph, labels_path) when is_binary(labels_path) do
labels = read_labels(labels_path)
label(image, graph, labels)
end
def label(image, graph, labels) do
image
|> classify_image(graph, labels)
|> find_label(labels)
end
@doc """
Read all labels separated by a new line from a given file.
## Examples
iex> ImageClassifier.read_labels("dir/retrained_labels.txt")
["headphones", "hi fi audio speakers", "tools", "tv audio accessories", "tvs"]
"""
def read_labels(path) do
path
|> File.read!()
|> String.split("\n", trim: true)
end
def classify_image(image, graph, labels) do
{:ok, decoded, properties} = Jaypeg.decode(image)
in_width = properties[:width]
in_height = properties[:height]
channels = properties[:channels]
height = width = 224
{:ok, resized} =
ImgUtils.resize(decoded, in_width, in_height, channels, width, height)
{:ok, input_tensor} =
Tensorflex.binary_to_matrix(resized, width, height * channels)
|> Tensorflex.divide_matrix_by_scalar(255)
|> Tensorflex.matrix_to_float32_tensor({1, width, height, channels})
{:ok, output_tensor} =
Tensorflex.create_matrix(1, 2, [[length(labels), 1]])
|> Tensorflex.float32_tensor_alloc()
Tensorflex.run_session(
graph,
input_tensor,
output_tensor,
"Placeholder",
"final_result"
)
end
defp find_label(probes, labels) do
List.flatten(probes)
|> Enum.zip(labels)
|> Enum.max()
end
defp app_file(name) do
Application.app_dir(:image_classifier, ["priv", name])
end
end
|
lib/image_classifier.ex
| 0.8951
| 0.572872
|
image_classifier.ex
|
starcoder
|
defmodule Auctoritas.DataStorage.RefreshTokenData do
alias Auctoritas.DataStorage.RefreshTokenData
@typedoc "Token expiration in seconds"
@type expiration() :: non_neg_integer()
@typedoc "When was token inserted (UNIX Epoch time)"
@type inserted_at() :: non_neg_integer()
@typedoc "When was token updated (UNIX Epoch time)"
@type updated_at() :: non_neg_integer()
@typedoc "Authentication token"
@type token() :: String.t()
@type metadata() :: %{
inserted_at: inserted_at(),
updated_at: updated_at(),
expires_in: expiration()
}
@derive Jason.Encoder
@enforce_keys [:auth_data, :token, :metadata]
defstruct [:auth_data, :token, :metadata]
@typedoc """
Data struct with data and metadata maps
* data is data associated when inserting token into data_storage
* metadata contains inserted_at, updated_at, expires_in time
inserted when using `get_token_data` function from data_storage
"""
@type t :: %__MODULE__{
auth_data: map(),
token: token(),
metadata: metadata()
}
@spec new(refresh_token_data_map :: map()) :: %__MODULE__{}
def new(refresh_token_data_map) when is_map(refresh_token_data_map) do
struct(__MODULE__, refresh_token_data_map)
end
@spec new(auth_data :: map(), token :: token(), expiration :: expiration()) :: %__MODULE__{}
def new(auth_data, token, expiration)
when is_map(auth_data) and is_number(expiration) and is_bitstring(token) do
new(%{auth_data: auth_data, token: token, metadata: initial_metadata(expiration)})
end
@spec get_auth_data(data :: %__MODULE__{}) :: map()
def get_auth_data(%__MODULE__{} = data) do
data.auth_data
end
@spec get_metadata(data :: %__MODULE__{}) :: map()
def get_metadata(%__MODULE__{} = data) do
data.metadata
end
@spec get_token(data :: %__MODULE__{}) :: token()
def get_token(%__MODULE__{} = data) do
data.token
end
@spec update_auth_data(data :: %__MODULE__{}, new_auth_data :: map()) :: %__MODULE__{}
def update_auth_data(%__MODULE__{} = data, new_auth_data) when is_map(new_auth_data) do
data
|> update_metadata(%{
updated_at: System.system_time(:second)
})
|> Map.put(:auth_data, Map.merge(data.auth_data, new_auth_data))
end
@spec update_metadata(data :: %__MODULE__{}, new_metadata :: map()) :: %__MODULE__{}
def update_metadata(%__MODULE__{} = data, new_metadata) when is_map(new_metadata) do
Map.put(data, :metadata, Map.merge(data.metadata, new_metadata))
end
@spec add_expiration(data :: %__MODULE__{}, expiration :: expiration()) :: %__MODULE__{}
def add_expiration(%__MODULE__{} = data, expiration) when is_number(expiration) do
data
|> update_metadata(%{expires_in: expiration})
end
@spec initial_metadata(expiration :: expiration()) :: metadata()
def initial_metadata(expiration) do
%{
inserted_at: System.system_time(:second),
updated_at: System.system_time(:second),
expires_in: expiration
}
end
def encode(%__MODULE__{} = data) do
Jason.encode(data)
end
def decode(data_json) when is_bitstring(data_json) do
case Jason.decode(data_json, keys: :atoms) do
{:ok, data_map} ->
{:ok, RefreshTokenData.new(data_map)}
{:error, error} ->
{:error, error}
end
end
end
|
lib/auctoritas/data_storage/refresh_token_data.ex
| 0.830972
| 0.406273
|
refresh_token_data.ex
|
starcoder
|
defmodule ExTectonicdb.Connection do
@moduledoc """
Handles connection to the database socket
`tdb-server` uses first bit in the reply to denote success/failure, so `:gen_tcp` needs to connect with `packet: :raw`.
Incoming message format: 1 byte for success failure, 8 bytes big endian (64 bit) for length n, and n bytes for body
Outgoing message format: 4 byte big endian for length n, and n bytes for body
"""
require Logger
use GenServer
defmodule State do
@type config :: ExTectonicdb.Config.t()
@type socket :: :gen_tcp.socket()
@type t :: %State{
socket: socket,
config: config,
queue: :queue.queue()
}
@enforce_keys ~w[config queue]a
defstruct ~w[config socket queue]a
end
def send_message(pid, message) do
GenServer.call(pid, {:message, message})
end
def start_link(args \\ []) do
state = %State{
config: Keyword.get(args, :config, %ExTectonicdb.Config{}),
queue: :queue.new()
}
opts = Keyword.take(args, [:name])
GenServer.start_link(__MODULE__, state, opts)
end
def init(state) do
{:ok, state, {:continue, :connect}}
end
def handle_continue(:connect, %{config: config} = state) do
:ok = Logger.info("Connecting to #{:inet.ntoa(config.host)}:#{config.port}")
case :gen_tcp.connect(config.host, config.port, packet: :raw, active: true) do
{:ok, socket} ->
{:noreply, %{state | socket: socket}}
{:error, reason} ->
disconnect(state, reason)
end
end
def handle_info({:tcp, socket, [success_bit | data]}, %{socket: socket} = state) do
{{:value, from}, new_queue} = :queue.out(state.queue)
msg = from_packet(data)
if success_bit == 1 do
GenServer.reply(from, {:ok, msg})
else
GenServer.reply(from, {:error, msg})
end
{:noreply, %{state | queue: new_queue}}
end
def handle_info({:tcp_closed, _}, state), do: {:stop, :normal, state}
def handle_info({:tcp_error, _}, state), do: {:stop, :normal, state}
def handle_call({:message, message}, from, %{socket: socket, queue: queue} = state) do
# format message to binary and send over tcp
packet = to_packet(message)
:ok = :gen_tcp.send(socket, packet)
# queue client for later reply
q = :queue.in(from, queue)
state = %{state | queue: q}
{:noreply, state}
end
def disconnect(state, reason) do
:ok = Logger.info("Disconnected: #{reason}")
{:stop, :normal, state}
end
@packet_endian 32
defp to_packet(msg) do
size = byte_size(msg)
:binary.bin_to_list(<<size::@packet_endian, msg::binary>>)
end
defp from_packet(packet) do
<<_size::@packet_endian*2, msg::binary>> = :binary.list_to_bin(packet)
msg
end
end
|
lib/ex_tectonicdb/connection.ex
| 0.739799
| 0.401776
|
connection.ex
|
starcoder
|
defmodule Day09 do
def get_low_points(grid) do
Range.new(0, tuple_size(grid[:data]) - 1)
|> Enum.map(&Grid.position_to_point(&1, grid))
|> Enum.map(fn p -> {p, Grid.get(p, grid), Grid.min_adjacent(p, grid)} end)
|> Enum.filter(fn {_, n, adj} -> n < adj end)
|> Enum.map(fn {p, _, _} -> p end)
end
def part_1(contents) do
grid = Grid.new(contents)
grid
|> get_low_points()
|> Enum.map(fn p -> 1 + Grid.get(p, grid) end)
|> Enum.sum()
end
def just_keep_peaks(grid) do
data =
grid[:data]
|> Tuple.to_list()
|> Enum.map(fn x ->
cond do
x == 9 -> :peak
true -> :unknown
end
end)
%{grid | data: List.to_tuple(data)}
end
def set_low_points(grid) do
get_low_points(grid)
|> Enum.with_index()
|> Enum.reduce(just_keep_peaks(grid), fn {p, index}, acc ->
Grid.put(p, acc, index)
end)
end
def spread_basin(p, grid) do
value = Grid.get(p, grid)
cond do
value == :peak ->
grid
value == :unknown ->
grid
true ->
Grid.get_adjacent(p, grid)
|> Enum.reduce(grid, fn p, acc ->
if Grid.get(p, acc) == :unknown, do: Grid.put(p, acc, value), else: acc
end)
end
end
def is_done?(grid) do
grid[:data] |> Tuple.to_list() |> Enum.filter(fn x -> x == :unknown end) |> Enum.empty?()
end
def find_all_basins(grid) do
grid = grid |> Grid.get_all_points() |> Enum.reduce(grid, &spread_basin(&1, &2))
if is_done?(grid), do: grid, else: find_all_basins(grid)
end
def part_2(contents) do
Grid.new(contents)
|> set_low_points()
|> find_all_basins()
|> Map.get(:data)
|> Tuple.to_list()
|> Enum.reduce(%{}, fn x, acc -> Map.update(acc, x, 1, &(&1 + 1)) end)
|> Enum.filter(fn {k, _} -> k != :peak end)
|> Enum.map(fn {_, v} -> v end)
|> Enum.sort(:desc)
|> Enum.take(3)
|> Enum.reduce(1, &(&1 * &2))
end
def main do
{:ok, contents} = File.read("data/day09.txt")
IO.inspect(contents |> part_1(), label: "part 1")
IO.inspect(contents |> part_2(), label: "part 2")
end
end
|
aoc21/lib/day09.ex
| 0.660063
| 0.536434
|
day09.ex
|
starcoder
|
defmodule Vessel.Mapper do
@moduledoc """
This module contains the implementation of the Mapper behaviour for Vessel.
A Mapper uses a Vessel Pipe in order to receive input split by lines and pass
them through to the mapping function. The key for the Mapper is just a binary
representation of the nth record - i.e. the first record will be "1", the next
"2", etc. They're binary in order to conform to typical Hadoop standards, and
it also provides us an easy migration in case we need to move away from numbers
in future.
You can store state by using `Vessel.put_private/3` and returning the Vessel
context at any point in the lifecycle. You can use `Vessel.get_private/3` or
matching in order to retrieve values - but do not modify any other root fields
inside the Vessel context as this is where job state is tracked. If you do not
return a Vessel context, it will ignore the return value and remain unchanged.
Values written from inside the Mapper will be converted to binary output which
means that you will have to re-parse them from inside the Reducer. This is due
to Hadoop Streaming passing everything via stdio and so there's no way to keep
the typing consistent at this time.
"""
@doc """
Invoked prior to any values being read from the stream.
This allows for setup and initialization within your Mapper. This is where you
should start any dependencies, or construct any variables. If you need to store
your variables for later, you should make use of `Vessel.put_private/3` and
make sure that you return the modified context.
If you don't return a valid context, the mapping phase will execute with the
default context (so always ensure you're explicitly returning it just to be
safe).
"""
@callback setup(ctx :: Vessel.t) :: Vessel.t | any
@doc """
Invoked once for every input segment (usually a line of text).
The first argument is the key, and the second value is your text input. The
type of both will be a binary, with the key being a binary counter.
The final argument is the Vessel context. This is passed through when calling
functions like `Vessel.write/3` in order to write values to the Job context.
This context is purely an application-level construct for Vessel to work with,
it does not represent the Hadoop Job Context (as there's no way to do so in
Hadoop Streaming).
If you wish to write any values, you must do so calling `Vessel.write/3`, which
writes your value to the intermediate stream. You can write as many as you
wish within one call to `map/3`, in case your logic needs to generate multiple
records.
The return value of this function is ignored unless it is a Vessel context
which has been modified using `Vessel.put_private/3`, in which case it is kept
to be used as the context going forward.
"""
@callback map(key :: binary, value :: binary, ctx :: Vessel.t) :: Vessel.t | any
@doc """
Invoked after all values have been read from the stream.
Basically the counterpart to the `setup/` callback, in order to allow you to
clean up any temporary files you may have written, or close any connections,
etc.
The returned context here will be the final context, but it's highly unlikely
you'll need to modify the context at this point.
"""
@callback cleanup(ctx :: Vessel.t) :: Vessel.t | any
@doc false
defmacro __using__(_) do
quote location: :keep do
# inherit piping
use Vessel.Pipe
# inherit Mapper behaviour
@behaviour Vessel.Mapper
@doc false
def map(key, value, ctx) do
Vessel.write(ctx, { key, value })
end
@doc false
def handle_start(ctx) do
input = Vessel.get_conf(ctx, "stream.map.input.field.separator", "\t")
output = Vessel.get_conf(ctx, "stream.map.output.field.separator", "\t")
ctx
|> Vessel.put_meta(:separators, { input, output })
|> super
end
@doc false
def handle_line(line, %{ meta: %{ count: count } } = ctx) do
trimmed = String.trim_trailing(line, "\n")
new_ctx =
count
|> to_string
|> map(trimmed, ctx)
|> handle_return(ctx)
super(line, new_ctx)
end
@doc false
def handle_end(ctx) do
super(ctx)
end
# We allow overriding map (obviously)
defoverridable [ map: 3 ]
end
end
end
|
lib/vessel/mapper.ex
| 0.853989
| 0.698702
|
mapper.ex
|
starcoder
|
defmodule Phoenix.LiveView.Helpers do
@moduledoc """
A collection of helpers to be imported into your views.
"""
alias Phoenix.LiveView.{Component, Socket, Static}
@doc false
def live_patch(opts) when is_list(opts) do
live_link("patch", Keyword.fetch!(opts, :do), Keyword.delete(opts, :do))
end
@doc """
Generates a link that will patch the current LiveView.
When navigating to the current LiveView,
`c:Phoenix.LiveView.handle_params/3` is
immediately invoked to handle the change of params and URL state.
Then the new state is pushed to the client, without reloading the
whole page while also maintaining the current scroll position.
For live redirects to another LiveView, use `live_redirect/2`.
## Options
* `:to` - the required path to link to.
* `:replace` - the flag to replace the current history or push a new state.
Defaults `false`.
All other options are forwarded to the anchor tag.
## Examples
<%= live_patch "home", to: Routes.page_path(@socket, :index) %>
<%= live_patch "next", to: Routes.live_path(@socket, MyLive, @page + 1) %>
<%= live_patch to: Routes.live_path(@socket, MyLive, dir: :asc), replace: false do %>
Sort By Price
<% end %>
"""
def live_patch(text, opts)
def live_patch(%Socket{}, _) do
raise """
you are invoking live_patch/2 with a socket but a socket is not expected.
If you want to live_patch/2 inside a LiveView, use push_patch/2 instead.
If you are inside a template, make the sure the first argument is a string.
"""
end
def live_patch(opts, do: block) when is_list(opts) do
live_link("patch", block, opts)
end
def live_patch(text, opts) when is_list(opts) do
live_link("patch", text, opts)
end
@doc false
def live_redirect(opts) when is_list(opts) do
live_link("redirect", Keyword.fetch!(opts, :do), Keyword.delete(opts, :do))
end
@doc """
Generates a link that will redirect to a new LiveView.
The current LiveView will be shut down and a new one will be mounted
in its place, without reloading the whole page. This can
also be used to remount the same LiveView, in case you want to start
fresh. If you want to navigate to the same LiveView without remounting
it, use `live_patch/2` instead.
## Options
* `:to` - the required path to link to.
* `:replace` - the flag to replace the current history or push a new state.
Defaults `false`.
All other options are forwarded to the anchor tag.
## Examples
<%= live_redirect "home", to: Routes.page_path(@socket, :index) %>
<%= live_redirect "next", to: Routes.live_path(@socket, MyLive, @page + 1) %>
<%= live_redirect to: Routes.live_path(@socket, MyLive, dir: :asc), replace: false do %>
Sort By Price
<% end %>
"""
def live_redirect(text, opts)
def live_redirect(%Socket{}, _) do
raise """
you are invoking live_redirect/2 with a socket but a socket is not expected.
If you want to live_redirect/2 inside a LiveView, use push_redirect/2 instead.
If you are inside a template, make the sure the first argument is a string.
"""
end
def live_redirect(opts, do: block) when is_list(opts) do
live_link("redirect", block, opts)
end
def live_redirect(text, opts) when is_list(opts) do
live_link("redirect", text, opts)
end
defp live_link(type, block_or_text, opts) do
uri = Keyword.fetch!(opts, :to)
replace = Keyword.get(opts, :replace, false)
kind = if replace, do: "replace", else: "push"
data = [phx_link: type, phx_link_state: kind]
opts =
opts
|> Keyword.update(:data, data, &Keyword.merge(&1, data))
|> Keyword.put(:href, uri)
Phoenix.HTML.Tag.content_tag(:a, Keyword.delete(opts, :to), do: block_or_text)
end
@doc """
Renders a LiveView within an originating plug request or
within a parent LiveView.
## Options
* `:session` - the map of extra session data to be serialized
and sent to the client. Note that all session data currently in
the connection is automatically available in LiveViews. You
can use this option to provide extra data. Also note that the keys
in the session are strings keys, as a reminder that data has
to be serialized first.
* `:container` - an optional tuple for the HTML tag and DOM
attributes to be used for the LiveView container. For example:
`{:li, style: "color: blue;"}`. By default it uses the module
definition container. See the "Containers" section below for more
information.
* `:id` - both the DOM ID and the ID to uniquely identify a LiveView.
An `:id` is automatically generated when rendering root LiveViews
but it is a required option when rendering a child LiveView.
* `:router` - an optional router that enables this LiveView to
perform live navigation. Only a single LiveView in a page may
have the `:router` set. LiveViews defined at the router with
the `live` macro automatically have the `:router` option set.
## Examples
# within eex template
<%= live_render(@conn, MyApp.ThermostatLive) %>
# within leex template
<%= live_render(@socket, MyApp.ThermostatLive, id: "thermostat") %>
## Containers
When a `LiveView` is rendered, its contents are wrapped in a container.
By default, said container is a `div` tag with a handful of `LiveView`
specific attributes.
The container can be customized in different ways:
* You can change the default `container` on `use Phoenix.LiveView`:
use Phoenix.LiveView, container: {:tr, id: "foo-bar"}
* You can override the container tag and pass extra attributes when
calling `live_render` (as well as on your `live` call in your router):
live_render socket, MyLiveView, container: {:tr, class: "highlight"}
"""
def live_render(conn_or_socket, view, opts \\ [])
def live_render(%Plug.Conn{} = conn, view, opts) do
case Static.render(conn, view, opts) do
{:ok, content, _assigns} ->
content
{:stop, _} ->
raise RuntimeError, "cannot redirect from a child LiveView"
end
end
def live_render(%Socket{} = parent, view, opts) do
Static.nested_render(parent, view, opts)
end
@doc """
Renders a `Phoenix.LiveComponent` within a parent LiveView.
While `LiveView`s can be nested, each LiveView starts its
own process. A LiveComponent provides similar functionality
to LiveView, except they run in the same process as the
`LiveView`, with its own encapsulated state.
LiveComponent comes in two shapes, stateful and stateless.
See `Phoenix.LiveComponent` for more information.
## Examples
All of the `assigns` given are forwarded directly to the
`live_component`:
<%= live_component(MyApp.WeatherComponent, id: "thermostat", city: "Kraków") %>
Note the `:id` won't necessarily be used as the DOM ID.
That's up to the component. However, note that the `:id` has
a special meaning: whenever an `:id` is given, the component
becomes stateful. Otherwise, `:id` is always set to `nil`.
"""
defmacro live_component(component, assigns \\ [], do_block \\ []) do
if match?({:@, _, [{:socket, _, _}]}, component) or match?({:socket, _, _}, component) do
IO.warn(
"passing the @socket to live_component is no longer necessary, " <>
"please remove the socket argument",
Macro.Env.stacktrace(__CALLER__)
)
end
{inner_block, do_block, assigns} =
case {do_block, assigns} do
{[do: do_block], _} -> {rewrite_do(do_block, __CALLER__), [], assigns}
{_, [do: do_block]} -> {rewrite_do(do_block, __CALLER__), [], []}
{_, _} -> {nil, do_block, assigns}
end
if match?({:__aliases__, _, _}, component) or is_atom(component) or is_list(assigns) or is_map(assigns) do
quote do
Phoenix.LiveView.Helpers.__live_component__(
unquote(component).__live__(),
unquote(assigns),
unquote(inner_block)
)
end
else
quote do
case unquote(component) do
%Phoenix.LiveView.Socket{} ->
Phoenix.LiveView.Helpers.__live_component__(
unquote(assigns).__live__(),
unquote(do_block),
unquote(inner_block)
)
component ->
Phoenix.LiveView.Helpers.__live_component__(
component.__live__(),
unquote(assigns),
unquote(inner_block)
)
end
end
end
end
defmacro live_component(socket, component, assigns, do_block) do
IO.warn(
"passing the @socket to live_component is no longer necessary, " <>
"please remove the socket argument",
Macro.Env.stacktrace(__CALLER__)
)
{inner_block, assigns} =
case {do_block, assigns} do
{[do: do_block], _} -> {rewrite_do(do_block, __CALLER__), assigns}
{_, [do: do_block]} -> {rewrite_do(do_block, __CALLER__), []}
{_, _} -> {nil, assigns}
end
quote do
# Fixes unused variable compilation warning
_ = unquote(socket)
Phoenix.LiveView.Helpers.__live_component__(
unquote(component).__live__(),
unquote(assigns),
unquote(inner_block)
)
end
end
@doc """
Renders a component defined by the given function.
It takes two optional arguments, the assigns to pass to the given function
and a do-block - which will be converted into a `@inner_block` assign (see
`render_block/2` for more information).
The given function must expect one argument, which are the `assigns` as a
map.
All of the `assigns` given are forwarded directly to the function as
the first only argument.
## Examples
The function can be either local:
<%= component(&weather_component/1, city: "Kraków") %>
Or remote:
<%= component(&MyApp.Weather.component/1, city: "Kraków") %>
"""
defmacro component(func, assigns \\ [], do_block \\ []) do
{inner_block, assigns} =
case {do_block, assigns} do
{[do: do_block], _} -> {rewrite_do(do_block, __CALLER__), assigns}
{_, [do: do_block]} -> {rewrite_do(do_block, __CALLER__), []}
{_, _} -> {nil, assigns}
end
quote do
Phoenix.LiveView.Helpers.__component__(
unquote(func),
unquote(assigns),
unquote(inner_block)
)
end
end
defp rewrite_do([{:->, meta, _} | _] = do_block, _caller) do
inner_fun = {:fn, meta, do_block}
quote do
fn parent_changed, arg ->
var!(assigns) = unquote(__MODULE__).__render_inner_fun__(var!(assigns), parent_changed)
_ = var!(assigns)
unquote(inner_fun).(arg)
end
end
end
defp rewrite_do(do_block, caller) do
unless Macro.Env.has_var?(caller, {:assigns, nil}) and
Macro.Env.has_var?(caller, {:changed, Phoenix.LiveView.Engine}) do
raise ArgumentError, """
cannot use live_component do/end blocks because we could not find existing assigns.
Please pass a `->` clause to do/end instead, for example:
live_component GridComponent, entries: @entries do
new_assigns -> "New entry: " <> new_assigns[:entry]
end
"""
end
# TODO: deprecate implicit assigns (i.e. do/end without -> should not get any assign)
quote do
fn changed, extra_assigns ->
var!(assigns) =
unquote(__MODULE__).__render_inner_do__(var!(assigns), changed, extra_assigns)
unquote(do_block)
end
end
end
@doc false
def __render_inner_fun__(assigns, parent_changed) do
if is_nil(parent_changed) or parent_changed[:inner_block] == true do
assigns
else
Map.put(assigns, :__changed__, %{})
end
end
@doc false
def __render_inner_do__(assigns, parent_changed, extra_assigns) do
# If the parent is tracking changes or the inner content changed,
# we will keep the current __changed__ values
changed =
if is_nil(parent_changed) or parent_changed[:inner_block] == true do
Map.get(assigns, :__changed__)
else
%{}
end
assigns = Enum.into(extra_assigns, assigns)
changed =
changed &&
for {key, _} <- extra_assigns,
key != :socket,
into: changed,
do: {key, true}
Map.put(assigns, :__changed__, changed)
end
@doc false
def __live_component__(%{kind: :component, module: component}, assigns, inner)
when is_list(assigns) or is_map(assigns) do
assigns = assigns |> Map.new() |> Map.put_new(:id, nil)
assigns = if inner, do: Map.put(assigns, :inner_block, inner), else: assigns
id = assigns[:id]
# TODO: Deprecate stateless live component
if is_nil(id) and
(function_exported?(component, :handle_event, 3) or
function_exported?(component, :preload, 1)) do
raise "a component #{inspect(component)} that has implemented handle_event/3 or preload/1 " <>
"requires an :id assign to be given"
end
%Component{id: id, assigns: assigns, component: component}
end
def __live_component__(%{kind: kind, module: module}, assigns)
when is_list(assigns) or is_map(assigns) do
raise "expected #{inspect(module)} to be a component, but it is a #{kind}"
end
@doc false
def __component__(func, assigns, inner)
when is_function(func, 1) and is_list(assigns) or is_map(assigns) do
assigns = Map.new(assigns)
assigns = if inner, do: Map.put(assigns, :inner_block, inner), else: assigns
func.(assigns)
end
def __component__(func, assigns, _) when is_list(assigns) or is_map(assigns) do
raise ArgumentError, """
component/3 expected an anonymous function with 1-arity, got: #{inspect(func)}
Please call component with a 1-arity function, for example:
<%= component &func/1 %>
def func(assigns) do
~L\"""
Hello
\"""
end
"""
end
@doc """
Renders the `@inner_block` assign of a component with the given `argument`.
<%= render_block(@inner_block, value: @value)
"""
defmacro render_block(inner_block, argument \\ []) do
quote do
unquote(inner_block).(var!(changed, Phoenix.LiveView.Engine), unquote(argument))
end
end
@doc """
Returns the flash message from the LiveView flash assign.
## Examples
<p class="alert alert-info"><%= live_flash(@flash, :info) %></p>
<p class="alert alert-danger"><%= live_flash(@flash, :error) %></p>
"""
def live_flash(%_struct{} = other, _key) do
raise ArgumentError, "live_flash/2 expects a @flash assign, got: #{inspect(other)}"
end
def live_flash(%{} = flash, key), do: Map.get(flash, to_string(key))
@doc """
Provides `~L` sigil with HTML safe Live EEx syntax inside source files.
iex> ~L"\""
...> Hello <%= "world" %>
...> "\""
{:safe, ["Hello ", "world", "\\n"]}
"""
defmacro sigil_L({:<<>>, meta, [expr]}, []) do
options = [
engine: Phoenix.LiveView.Engine,
file: __CALLER__.file,
line: __CALLER__.line + 1,
indentation: meta[:indentation] || 0
]
EEx.compile_string(expr, options)
end
@doc """
Provides `~H` sigil with HTML safe Live `HEEx` syntax inside source files.
> Note: `HEEx` requires Elixir >= `1.12.0` in order to provide accurate file:line:column information
> in error messages. Earlier Elixir versions will work but will show inaccurate error messages.
`HEEx` is a HTML-aware and component-friendly extension of `EEx` that provides:
* Built-in handling of HTML attributes
* An HTML-like notation for injecting function components
* Compile-time validation of the structure of the template
## Example
def render(assigns) do
~H"\""
<div title="My div" class={@class}>
<MyApp.Weather.render city="Kraków"/>
</div>
"\""
end
## Syntax extensions
Although `HEEx` may be considered an extension of `EEx`, templates written in `EEx` may not
be fully compatible with `HEEx`. The same goes the other way around. Whenever copying/pasting
code from one format to the other, make sure your update it accordingly.
The main difference comes when defining attributes and function components.
### Defining attributes
`EEx` handles templates as plain text so you're free to interpolate elixir code anywhere in your
template. `HEEx`, on the other hand, parses the code, validating its structure, including
HTML/component nodes and attributes. In order to perform validation, code interpolation
using `<%= ... %>` and `<% ... %>` are restricted to the body (inner content) of the HTML/component
nodes and it cannot be applied within tags.
For instance, the following syntax is invalid:
<div class="<%= @class %>">
...
</div>
Instead do:
<div class={@class}>
...
</div>
For multiple dynamic attributes, you can use the same notation but without
assigning the expression to any specific attribute.
<div {@dynamic_attrs}>
...
</div>
The expression inside `{ ... }` must be either a keyword list or a map containing
the key-value pairs representing the dynamic attributes.
### Defining function components
Function components are stateless components implemented as pure functions. They can be either
local (same module) or remote (external module).
`HEEx` allows invoking whose function components directly in the template using an HTML-like
notation. For example, a remote function:
<MyApp.Weather.render city="Kraków"/>
A local function can be invoked with a leading dot:
<.component city="Kraków"/>
Function components can also receive their inner content as
the `@inner_block` assign to be rendered with `render_block/2`:
<MyApp.Weather.render city="Kraków">
Some content to be assigned to @inner_block
</MyApp.Weather.render>
"""
defmacro sigil_H({:<<>>, meta, [expr]}, []) do
options = [
engine: Phoenix.LiveView.HTMLEngine,
file: __CALLER__.file,
line: __CALLER__.line + 1,
indentation: meta[:indentation] || 0
]
EEx.compile_string(expr, options)
end
@doc """
Returns the entry errors for an upload.
The following errors may be returned:
* `:too_many_files` - The number of selected files exceeds the `:max_entries` constraint
## Examples
def error_to_string(:too_many_files), do: "You have selected too many files"
<%= for err <- upload_errors(@uploads.avatar) do %>
<div class="alert alert-danger">
<%= error_to_string(err) %>
</div>
<% end %>
"""
def upload_errors(%Phoenix.LiveView.UploadConfig{} = conf) do
for {ref, error} <- conf.errors, ref == conf.ref, do: error
end
@doc """
Returns the entry errors for an upload.
The following errors may be returned:
* `:too_large` - The entry exceeds the `:max_file_size` constraint
* `:not_accepted` - The entry does not match the `:accept` MIME types
## Examples
def error_to_string(:too_large), do: "Too large"
def error_to_string(:not_accepted), do: "You have selected an unacceptable file type"
<%= for entry <- @uploads.avatar.entries do %>
<%= for err <- upload_errors(@uploads.avatar, entry) do %>
<div class="alert alert-danger">
<%= error_to_string(err) %>
</div>
<% end %>
<% end %>
"""
def upload_errors(
%Phoenix.LiveView.UploadConfig{} = conf,
%Phoenix.LiveView.UploadEntry{} = entry
) do
for {ref, error} <- conf.errors, ref == entry.ref, do: error
end
@doc """
Generates an image preview on the client for a selected file.
## Examples
<%= for entry <- @uploads.avatar.entries do %>
<%= live_img_preview entry, width: 75 %>
<% end %>
"""
def live_img_preview(%Phoenix.LiveView.UploadEntry{ref: ref} = entry, opts \\ []) do
opts =
Keyword.merge(opts,
id: "phx-preview-#{ref}",
data_phx_upload_ref: entry.upload_ref,
data_phx_entry_ref: ref,
data_phx_hook: "Phoenix.LiveImgPreview",
data_phx_update: "ignore"
)
Phoenix.HTML.Tag.content_tag(:img, "", opts)
end
@doc """
Builds a file input tag for a LiveView upload.
Options may be passed through to the tag builder for custom attributes.
## Drag and Drop
Drag and drop is supported by annotating the droppable container with a `phx-drop-target`
attribute pointing to the DOM ID of the file input. By default, the file input ID is the
upload `ref`, so the following markup is all that is required for drag and drop support:
<div class="container" phx-drop-target="<%= @uploads.avatar.ref %>">
...
<%= live_file_input @uploads.avatar %>
</div>
## Examples
<%= live_file_input @uploads.avatar %>
"""
def live_file_input(%Phoenix.LiveView.UploadConfig{} = conf, opts \\ []) do
if opts[:id], do: raise(ArgumentError, "the :id cannot be overridden on a live_file_input")
opts =
if conf.max_entries > 1 do
Keyword.put(opts, :multiple, true)
else
opts
end
preflighted_entries = for entry <- conf.entries, entry.preflighted?, do: entry
done_entries = for entry <- conf.entries, entry.done?, do: entry
valid? = Enum.any?(conf.entries) && Enum.empty?(conf.errors)
Phoenix.HTML.Tag.content_tag(
:input,
"",
Keyword.merge(opts,
type: "file",
id: conf.ref,
name: conf.name,
accept: if(conf.accept != :any, do: conf.accept),
phx_hook: "Phoenix.LiveFileUpload",
data_phx_update: "ignore",
data_phx_upload_ref: conf.ref,
data_phx_active_refs: Enum.map_join(conf.entries, ",", & &1.ref),
data_phx_done_refs: Enum.map_join(done_entries, ",", & &1.ref),
data_phx_preflighted_refs: Enum.map_join(preflighted_entries, ",", & &1.ref),
data_phx_auto_upload: valid? && conf.auto_upload?
)
)
end
@doc """
Renders a title tag with automatic prefix/suffix on `@page_title` updates.
## Examples
<%= live_title_tag assigns[:page_title] || "Welcome", prefix: "MyApp – " %>
<%= live_title_tag assigns[:page_title] || "Welcome", suffix: " – MyApp" %>
"""
def live_title_tag(title, opts \\ []) do
title_tag(title, opts[:prefix], opts[:suffix], opts)
end
defp title_tag(title, nil = _prefix, "" <> suffix, _opts) do
Phoenix.HTML.Tag.content_tag(:title, title <> suffix, data: [suffix: suffix])
end
defp title_tag(title, "" <> prefix, nil = _suffix, _opts) do
Phoenix.HTML.Tag.content_tag(:title, prefix <> title, data: [prefix: prefix])
end
defp title_tag(title, "" <> pre, "" <> post, _opts) do
Phoenix.HTML.Tag.content_tag(:title, pre <> title <> post, data: [prefix: pre, suffix: post])
end
defp title_tag(title, _prefix = nil, _postfix = nil, []) do
Phoenix.HTML.Tag.content_tag(:title, title)
end
defp title_tag(_title, _prefix = nil, _suffix = nil, opts) do
raise ArgumentError,
"live_title_tag/2 expects a :prefix and/or :suffix option, got: #{inspect(opts)}"
end
end
|
lib/phoenix_live_view/helpers.ex
| 0.897011
| 0.541348
|
helpers.ex
|
starcoder
|
defmodule Grouper.GroupLeader do
@moduledoc """
This implements a group leader capable of IO and storing group metadata.
A `Grouper.GroupLeader` forwards IO upstream, just like any other
group_leader. In addition, it also allocates a ETS table for this group and
registers it in a global ETS table.
Requests for configuration and naming functions (mediated by the
`Grouper.Data` module) store information in this ETS table (among other
places).
"""
use GenServer
@init_opts [:leader, :commandeer, :parent]
defstruct [:self, :group_leader, :commandeered, :ets_table_id]
@typedoc "state for Grouper.Leader"
@type t() :: %__MODULE__{
self: pid(),
group_leader: pid(),
commandeered: [pid()],
# we used named tables, so this can't be a tid (i.e. ref)
ets_table_id: atom()
}
@typedoc "options to start function"
@type options() :: [option()]
@typedoc "options to init function"
@type init_options() :: [init_option()]
@typedoc "keyword options for start function"
@type option() :: GenServer.option() | init_option()
@typedoc "keyword options for init function"
@type init_option() ::
{:commandeer, boolean() | pid() | [pid()]}
| {:leader, pid()}
| {:parent, pid()}
@typedoc "valid info terms"
@type infos() :: io_request()
@typedoc "IO request info term"
@type io_request() :: {:io_request, pid(), any(), any()}
# === API ===
@doc """
Starts a Grouper.Leader process linked to the current process.
This is typically used in a supervision tree.
In addition to the normal options accepted by GenServer.start_link/3, this
also accepts the following options:
* `commandeer` - during initialization, sets itself as group_leader for
this process or list of processes instead of the calling process
* `leader` - during initialization, sets this process as the group_leader
to which it forwards IO requests
* `parent` - this specifies the parent process, useful for testing and when
not being started in a normal supervision tree
"""
@spec start_link(options()) :: GenServer.on_start()
def start_link(opts \\ []) when is_list(opts) do
{init_opts, start_link_opts} = Keyword.split(opts, @init_opts)
GenServer.start_link(__MODULE__, init_opts, start_link_opts)
end
@doc """
gets state data for a given group leader
"""
@spec get_group_data(pid()) :: {:ok, t()}
def get_group_data(glpid) do
case :ets.lookup(:grouper_global_tab, {:group, glpid}) do
[{_key, state}] ->
{:ok, state}
[] ->
{:error, :not_found}
end
end
@doc """
gets original group leader for this process
"""
@spec get_group_leader(pid) :: pid()
def get_group_leader(glpid) do
GenServer.call(glpid, :get_group_leader)
end
@doc """
stops a group leader process
"""
@spec stop(pid(), reason :: atom()) :: :ok
def stop(glpid, reason \\ :normal) do
GenServer.stop(glpid, reason)
end
# === GenServer Callbacks ===
@doc false
@impl true
@spec init(init_options()) :: {:ok, t()} | {:stop, :no_parent}
def init(opts) when is_list(opts) do
my_group_leader = Keyword.get_lazy(opts, :leader, &Process.group_leader/0)
Process.flag(:trap_exit, true)
commandeered =
case Keyword.get(opts, :commandeer, false) do
pid when is_pid(pid) ->
Process.group_leader(pid, self())
[pid]
pids when is_list(pids) ->
for pid <- pids do
Process.group_leader(pid, self())
pid
end
true ->
parent = get_parent(opts)
true = Process.group_leader(parent, self())
[parent]
false ->
[]
end
uid = :erlang.unique_integer([:positive])
tid = :ets.new(:"grouper_group_#{uid}_tab", [:set, :public, :named_table])
state = %__MODULE__{
self: self(),
group_leader: my_group_leader,
commandeered: commandeered,
ets_table_id: tid
}
true = :ets.insert(:grouper_global_tab, {{:group, self()}, state})
{:ok, state}
catch
{:error, reason} ->
{:stop, reason}
end
@doc false
@impl true
def handle_call(:get_group_leader, _from, %__MODULE__{} = state) do
{:reply, state.group_leader, state}
end
@doc false
@impl true
@spec handle_info(infos(), t()) :: {:noreply, t()}
def handle_info({:io_request, _from, _reply_as, _request} = io_request, %__MODULE__{} = state) do
send(state.group_leader, io_request)
{:noreply, state}
end
@doc false
@impl true
def terminate(_reason, %__MODULE__{} = state) do
# restore commandeered processes' group leaders
for pid <- state.commandeered do
try do
Process.group_leader(pid, state.group_leader)
rescue
ArgumentError ->
# happens when one of the pids has already exited
:ok
end
end
# delete ETS table
true = :ets.delete(:grouper_global_tab, {:group, state.group_leader})
:ignored
end
# TODO: re-parent processes to application master during shutdown if
# feasible, not sure on efficiency of walking entire process space
# === helper functions ===
defp get_parent(opts) do
ancestors = Process.get(:"$ancestors")
parent = Keyword.get(opts, :parent)
case {parent, ancestors} do
{parent, _} when is_pid(parent) ->
parent
{_, nil} ->
exit(:no_parent)
{_, []} ->
exit(:no_parent)
{_, [parent | _]} when is_pid(parent) ->
parent
end
end
end
|
lib/grouper/group_leader.ex
| 0.750827
| 0.436862
|
group_leader.ex
|
starcoder
|
defmodule Screens.V2.Template do
@moduledoc false
import Screens.V2.Template.Guards
@typedoc """
A paging_index is used in combination with a slot_id to
uniquely identify a paged region on the screen.
"""
@type paging_index :: non_neg_integer()
@typedoc """
A non_paged_slot_id represents a defined, non-paged region on the screen.
e.g. :header, :main_content, :flex_zone
"""
@type non_paged_slot_id :: atom()
@typedoc """
A paged_slot_id uniquely identifies a paged region on the screen.
e.g. {0, :medium_left}, {1, :large}
"""
@type paged_slot_id :: {paging_index(), non_paged_slot_id()}
@typedoc """
A slot_id represents a defined region on the screen.
e.g. :header, {0, :medium_left}
"""
@type slot_id :: non_paged_slot_id() | paged_slot_id()
@typedoc """
A layout_type names a way of filling a defined region on the screen.
In the API, this is the value of `type`.
On the frontend, this corresponds to the React Component which will be used.
e.g. :normal, :takeover, :two_medium
"""
@type layout_type :: atom()
@type non_paged_template ::
non_paged_slot_id()
| {non_paged_slot_id(), %{layout_type() => list(non_paged_template())}}
@type paged_template ::
paged_slot_id() | {paged_slot_id(), %{layout_type() => list(paged_template())}}
@typedoc """
A template represents all possible ways to fill a region on the screen.
e.g. a Bus Shelter Screen Flex Zone could have the template:
{:flex_zone,
%{
one_large: [:large],
two_medium: [:medium_left, :medium_right],
one_medium_two_small: [:medium_left, :small_upper_right, :small_lower_right]
}}
"""
@type template ::
non_paged_slot_id()
| paged_template()
| {non_paged_slot_id(), %{layout_type() => list(template())}}
@type non_paged_layout ::
non_paged_slot_id() | {non_paged_slot_id(), {layout_type(), list(non_paged_layout())}}
@typedoc """
A layout represents one possible way to resolve a template.
e.g. a layout for the above flex zone could be:
{:flex_zone, {:two_medium, [:medium_left, :medium_right]}}
"""
@type layout :: slot_id() | {slot_id(), {layout_type(), list(layout())}}
@spec slot_combinations(template()) :: nonempty_list({nonempty_list(slot_id()), layout()})
def slot_combinations(template) do
template
|> layout_combinations()
|> Enum.map(fn layout -> {flatten_layout(layout), layout} end)
end
@spec layout_combinations(template()) :: nonempty_list(layout())
def layout_combinations(template)
when is_slot_id(template) do
[template]
end
def layout_combinations({slot_id, layout_map}) do
Enum.flat_map(layout_map, fn {layout_type, template_list} ->
template_list
|> Enum.map(&layout_combinations/1)
|> product()
|> Enum.map(fn list -> {slot_id, {layout_type, list}} end)
end)
end
@spec product(list(list(layout()))) :: list(layout())
defp product(list_of_lists) do
Enum.reduce(list_of_lists, [[]], fn list, acc ->
for l <- list, a <- acc do
a ++ [l]
end
end)
end
@spec flatten_layout(layout()) :: list(slot_id())
defp flatten_layout(layout) when is_slot_id(layout) do
[layout]
end
defp flatten_layout({_, {_, layout_list}}) do
Enum.flat_map(layout_list, &flatten_layout/1)
end
@spec position_widget_instances(layout(), map(), map()) :: map() | nil
def position_widget_instances(layout, selected_widget_map, _paging_metadata)
when is_atom(layout) do
Map.get(selected_widget_map, layout)
end
def position_widget_instances(
{_slot_id, {layout_type, layout_list}},
selected_widget_map,
paging_metadata
) do
layout_list
|> Enum.map(fn layout ->
slot_id = get_slot_id(layout)
widget_data =
layout
|> position_widget_instances(selected_widget_map, paging_metadata)
|> put_paging_metadata(slot_id, paging_metadata)
{slot_id, widget_data}
end)
|> Enum.into(%{type: layout_type})
end
defp put_paging_metadata(positioned_widget, slot_id, paging_metadata) do
case Map.get(paging_metadata, slot_id) do
{page_index, num_pages} ->
Map.merge(positioned_widget, %{page_index: page_index, num_pages: num_pages})
_ ->
positioned_widget
end
end
@spec get_slot_id(layout()) :: slot_id()
def get_slot_id(layout) when is_slot_id(layout), do: layout
def get_slot_id({slot_id, _}) when is_slot_id(slot_id), do: slot_id
def slots_match?(s1, s2) do
unpage(s1) == unpage(s2)
end
@spec get_page(layout()) :: paging_index()
def get_page(slot_id) when is_paged_slot_id(slot_id), do: elem(slot_id, 0)
def get_page({slot_id, _}) when is_paged_slot_id(slot_id), do: elem(slot_id, 0)
@spec unpage(layout()) :: non_paged_slot_id()
def unpage(slot_id) when is_paged_slot_id(slot_id), do: elem(slot_id, 1)
def unpage({slot_id, _}) when is_paged_slot_id(slot_id), do: elem(slot_id, 1)
def unpage(slot_id) when is_non_paged_slot_id(slot_id), do: slot_id
@doc """
Used for sorting. Non-paged slots precede all paged slots but are otherwise
considered equal to each other for sorting purposes, to maintain the layout order as much as possible.
Paged slots are ordered by their page indices only, again to maintain the layout order.
"""
def slot_precedes_or_equal?(s1, s2)
when is_paged_slot_id(s1) and is_non_paged_slot_id(s2) do
false
end
def slot_precedes_or_equal?({page1, _} = s1, {page2, _} = s2)
when is_paged_slot_id(s1) and is_paged_slot_id(s2) do
page1 <= page2
end
def slot_precedes_or_equal?(_, _), do: true
end
|
lib/screens/v2/template.ex
| 0.852429
| 0.445891
|
template.ex
|
starcoder
|
defmodule ExForce.OAuth do
@moduledoc """
Handles OAuth2
## Grant Types
- `authorization_code`: [Understanding the Web Server OAuth Authentication Flow](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/intro_understanding_web_server_oauth_flow.htm)
- `password`: [Understanding the Username-Password OAuth Authentication Flow](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/intro_understanding_username_password_oauth_flow.htm)
- `token`: [Understanding the User-Agent OAuth Authentication Flow](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/intro_understanding_user_agent_oauth_flow.htm)
- `refresh_token`: [Understanding the OAuth Refresh Token Process](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/intro_understanding_refresh_token_oauth.htm)
"""
alias ExForce.OAuthResponse
import ExForce.Client, only: [request: 2]
@type username :: String.t()
@type password :: String.t()
@type code :: String.t()
@type redirect_uri :: String.t()
@default_user_agent "ex_force"
@doc """
Returns client for OAuth functions
### Options
- `:user_agent`
"""
def build_client(url, opts \\ [headers: [{"user-agent", @default_user_agent}]]) do
Tesla.client([
{Tesla.Middleware.BaseUrl, url},
{Tesla.Middleware.Compression, format: "gzip"},
Tesla.Middleware.FormUrlencoded,
{Tesla.Middleware.DecodeJson, engine: Jason},
{Tesla.Middleware.Headers, Keyword.get(opts, :headers, [])}
])
end
@doc """
Returns the authorize url based on the configuration.
### `authorization_code`
```elixir
ExForce.OAuth.authorize_url(
"https://login.salesforce.com",
response_type: "code",
client_id: "client-id",
redirect_uri: "https://example.com/callback"
)
```
### `token`
```elixir
ExForce.OAuth.authorize_url(
"https://login.salesforce.com",
response_type: "token",
client_id: "client-id",
redirect_uri: "https://example.com/callback"
)
```
"""
@spec authorize_url(String.t(), Enum.t()) :: String.t()
def authorize_url(endpoint, enum) do
endpoint <> "/services/oauth2/authorize?" <> URI.encode_query(enum)
end
@doc """
Fetches an `ExForce.OAuthResponse` struct by making a request to the token endpoint.
### `authorization_code`
```elixir
ExForce.OAuth.get_token(
"https://login.salesforce.com",
grant_type: "authorization_code",
code: "code",
redirect_uri: "https://example.com/callback",
client_id: "client_id",
client_secret: "client_secret"
)
```
### `password`
```elixir
ExForce.OAuth.get_token(
"https://login.salesforce.com",
grant_type: "password",
client_id: "client_id",
client_secret: "client_secret",
username: "username",
password: "password"
)
```
### `refresh_token`
```elixir
ExForce.OAuth.get_token(
"https://login.salesforce.com",
grant_type: "refresh_token",
client_id: "client_id",
client_secret: "client_secret",
refresh_token: "refresh_token"
)
```
"""
@spec get_token(ExForce.Client.t() | String.t(), list) ::
{:ok, OAuthResponse.t()} | {:error, :invalid_signature | term}
def get_token(url, payload) when is_binary(url), do: url |> build_client() |> get_token(payload)
def get_token(client, payload) do
client_secret = Keyword.fetch!(payload, :client_secret)
case request(client, method: :post, url: "/services/oauth2/token", body: payload) do
{:ok,
%Tesla.Env{
status: 200,
body:
map = %{
"token_type" => token_type,
"instance_url" => instance_url,
"id" => id,
"signature" => signature,
"issued_at" => issued_at,
"access_token" => access_token
}
}} ->
verify_signature(
%OAuthResponse{
token_type: token_type,
instance_url: instance_url,
id: id,
issued_at: issued_at |> String.to_integer() |> DateTime.from_unix!(:millisecond),
signature: signature,
access_token: access_token,
refresh_token: Map.get(map, "refresh_token"),
scope: Map.get(map, "scope")
},
client_secret
)
{:ok, %Tesla.Env{body: body}} ->
{:error, body}
{:error, _} = other ->
other
end
end
defp verify_signature(
%OAuthResponse{id: id, issued_at: issued_at, signature: signature} = resp,
client_secret
) do
if signature == calculate_signature(id, issued_at, client_secret) do
{:ok, resp}
else
{:error, :invalid_signature}
end
end
defp calculate_signature(id, issued_at, client_secret) do
issued_at_raw =
issued_at
|> DateTime.to_unix(:millisecond)
|> Integer.to_string()
:sha256
|> :crypto.hmac(client_secret, id <> issued_at_raw)
|> Base.encode64()
end
end
|
lib/ex_force/oauth.ex
| 0.885792
| 0.724383
|
oauth.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.