code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Exq.Middleware.Pipeline do
@moduledoc """
Pipeline is a structure that is used as an argument in functions of module with
`Exq.Middleware.Behaviour` behaviour. This structure must be returned by particular function
to be used in the next middleware based on defined middleware chain.
Pipeline contains the following options:
* `assigns` - map that contains shared data across the whole job lifecycle
* `worker_pid` - process id of `Exq.Worker.Server`
* `event` - name of current middleware function, possible values are: `before_work`,
`after_processed_work` and `after_failed_work`
* `halted` - flag indicating whether pipeline was halted, defaults to `false`
* `terminated` - flag indicating whether worker and pipeline were halted, If
the flag was set to true, the job will not be dispatched and all after_*_work/1
will be skipped. For each specific middleware:
- Exq.Middleware.Job: Will NOT remove the backup from job queue
- Exq.Middleware.Logger: Will NOT record job as done or failed with timestamp
- Exq.Middleware.Manager: Will NOT update worker counter
- Exq.Middleware.Stats: Will NOT remove job from processes queue
"""
defstruct assigns: %{},
halted: false,
terminated: false,
worker_pid: nil,
event: nil
alias Exq.Middleware.Pipeline
@doc """
Puts the `key` with value equal to `value` into `assigns` map
"""
def assign(%Pipeline{assigns: assigns} = pipeline, key, value) when is_atom(key) do
%{pipeline | assigns: Map.put(assigns, key, value)}
end
@doc """
Sets `halted` to true
"""
def halt(%Pipeline{} = pipeline) do
%{pipeline | halted: true}
end
@doc """
Sets `terminated` to true
"""
def terminate(%Pipeline{} = pipeline) do
%{pipeline | terminated: true}
end
@doc """
Puts a state of `Exq.Worker.Server` into `assigns` map
"""
def assign_worker_state(pipeline, worker_state) do
pipeline
|> assign(:host, worker_state.host)
|> assign(:namespace, worker_state.namespace)
|> assign(:queue, worker_state.queue)
|> assign(:manager, worker_state.manager)
|> assign(:stats, worker_state.stats)
|> assign(:job_serialized, worker_state.job_serialized)
end
@doc """
Implements middleware chain: sequential call of function with `pipeline.event` name inside `module` module
"""
def chain(pipeline, []) do
pipeline
end
def chain(%Pipeline{halted: true} = pipeline, _modules) do
pipeline
end
def chain(%Pipeline{terminated: true} = pipeline, _modules) do
pipeline
end
def chain(pipeline, [module | modules]) do
chain(apply(module, pipeline.event, [pipeline]), modules)
end
end
|
lib/exq/middleware/pipeline.ex
| 0.86866
| 0.657092
|
pipeline.ex
|
starcoder
|
defmodule Harald.HCI.ControllerAndBaseband do
alias Harald.HCI
@moduledoc """
HCI commands for working with the controller and baseband.
> The Controller & Baseband Commands provide access and control to various capabilities of the
> Bluetooth hardware. These parameters provide control of BR/EDR Controllers and of the
> capabilities of the Link Manager and Baseband in the BR/EDR Controller, the PAL in an AMP
> Controller, and the Link Layer in an LE Controller. The Host can use these commands to modify
> the behavior of the local Controller.
Bluetooth Spec v5
"""
@ogf 0x03
@doc """
> The Read_Local_Name command provides the ability to read the stored user-friendly name for
> the BR/EDR Controller. See Section 6.23.
iex> read_local_name()
<<20, 12, 0>>
"""
@spec read_local_name :: HCI.command()
def read_local_name, do: @ogf |> HCI.opcode(0x0014) |> HCI.command()
@doc """
Reset the baseband
iex> reset()
<<0x03, 0x0C, 0x0>>
"""
@spec reset :: HCI.command()
def reset(), do: @ogf |> HCI.opcode(0x03) |> HCI.command()
@doc """
> The Set_Event_Mask command is used to control which events are generated
> by the HCI for the Host. If the bit in the Event_Mask is set to a one,
> then the event associated with that bit will be enabled. For an LE Controller,
> the “LE Meta Event” bit in the Event_Mask shall enable or disable all LE
> events in the LE Meta Event
iex> set_event_mask(<<0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F>>)
<<0x1, 0xC, 0x8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F>>
"""
@spec set_event_mask(<<_::8, _::_*8>>) :: HCI.command()
def set_event_mask(mask) when byte_size(mask) == 0x8,
do: @ogf |> HCI.opcode(0x0001) |> HCI.command(mask)
@doc """
> This command enables Simple Pairing mode in the BR/EDR Controller.
> When Simple Pairing Mode is set to 'enabled' the Link Manager shall
> respond to an LMP_io_capability_req PDU with an LMP_io_capability_res
> PDU and continue with the subsequent pairing procedure.
> When Simple Pairing mode is set to 'disabled', the Link Manager
> shall reject an IO capability request. A Host shall not set the Simple
> Pairing Mode to ‘disabled.’
iex> write_simple_pairing_mode(true)
<<0x56, 0x0C, 0x01, 0x01>>
iex> write_simple_pairing_mode(false)
<<0x56, 0x0C, 0x01, 0x00>>
"""
@spec write_simple_pairing_mode(boolean) :: HCI.command()
def write_simple_pairing_mode(enabled?),
do: @ogf |> HCI.opcode(0x0056) |> HCI.command([enabled?])
@doc """
> This command writes the value for the Page_Timeout configuration parameter.
> The Page_Timeout configuration parameter defines the maximum time the local
> Link Manager shall wait for a baseband page response from the remote device
> at a locally initiated connection attempt. If this time expires and the
> remote device has not responded to the page at baseband level,
> the connection attempt will be considered to have failed.
iex> write_page_timeout(0x60)
<<0x18, 0x0C, 0x02, 0x00, 0x60>>
"""
@spec write_page_timeout(timeout :: 0..65535) :: HCI.command()
def write_page_timeout(timeout) when timeout <= 65535,
do: @ogf |> HCI.opcode(0x0018) |> HCI.command(<<timeout::16>>)
@doc """
> This command writes the value for the Class_of_Device parameter.
iex> write_class_of_device(0x0C027A)
<<0x24, 0x0C, 0x03, 0x0C, 0x02, 0x7A>>
"""
@spec write_class_of_device(class :: 0..16_777_215) :: HCI.command()
def write_class_of_device(class) when class <= 16_777_215,
do: @ogf |> HCI.opcode(0x0024) |> HCI.command(<<class::24>>)
@doc """
> The Write_Local_Name command provides the ability to modify the
> user-friendly name for the BR/EDR Controller.
iex> write_local_name("some friendly name")
<<0x13, 0xC, 0xF8, 0x73, 0x6F, 0x6D, 0x65, 0x20, 0x66, 0x72, 0x69, 0x65, 0x6E,
0x64, 0x6C, 0x79, 0x20, 0x6E, 0x61, 0x6D, 0x65, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0>>
"""
@spec write_local_name(name :: <<_::248, _::_*8>>) :: HCI.command()
def write_local_name(name) when byte_size(name) <= 248 do
remaining = 248 - byte_size(name)
padding = :binary.copy(<<0x00>>, remaining)
@ogf |> HCI.opcode(0x0013) |> HCI.command(name <> padding)
end
@doc """
> The Write_Extended_Inquiry_Response command writes the extended inquiry
> response to be sent during the extended inquiry response procedure.
> The FEC_Required command parameter states if FEC encoding is required.
> The extended inquiry response data is not preserved over a reset.
> The initial value of the inquiry response data is all zero octets.
> The controller shall not interpret the extended inquiry response data.
iex> write_extended_inquiry_response(false, <<0x1A, 0x9, 0x42, 0x54, 0x73, 0x74, 0x61, 0x63, 0x6B, 0x20, 0x45,
...> 0x20, 0x38, 0x3A, 0x34, 0x45, 0x3A, 0x30, 0x36, 0x3A, 0x38, 0x31, 0x3A, 0x41, 0x34, 0x3A,
...> 0x35, 0x30, 0x20>>)
<<0x13, 0xC, 0xF1, 0x0, 0x1A, 0x9, 0x42, 0x54, 0x73, 0x74, 0x61, 0x63, 0x6B,
0x20, 0x45, 0x20, 0x38, 0x3A, 0x34, 0x45, 0x3A, 0x30, 0x36, 0x3A, 0x38, 0x31,
0x3A, 0x41, 0x34, 0x3A, 0x35, 0x30, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0>>
"""
def write_extended_inquiry_response(fec_required?, response) when byte_size(response) <= 240 do
fec_required = HCI.to_bin(fec_required?)
remaining = 240 - byte_size(response)
padding = :binary.copy(<<0x00>>, remaining)
@ogf |> HCI.opcode(0x0013) |> HCI.command(fec_required <> response <> padding)
end
@doc """
> This command writes the Inquiry_Mode configuration parameter of the local
> BR/EDR Controller.
iex> write_inquiry_mode(0x00)
<<0x045, 0x0C, 0x01, 0x00>>
"""
@spec write_inquiry_mode(mode :: 0x00 | 0x01 | 0x02) :: HCI.command()
def write_inquiry_mode(mode) when mode <= 0x02,
do: @ogf |> HCI.opcode(0x0045) |> HCI.command(<<mode::8>>)
@doc """
> This command writes the Secure_Connections_Host_Support parameter in
> the BR/EDR Controller.
iex> write_secure_connections_host_support(true)
<<0x7A, 0x0C, 0x01, 0x01>>
iex> write_secure_connections_host_support(false)
<<0x7A, 0x0C, 0x01, 0x00>>
"""
@spec write_secure_connections_host_support(boolean()) :: HCI.command()
def write_secure_connections_host_support(support?),
do: @ogf |> HCI.opcode(0x007A) |> HCI.command([support?])
end
|
lib/harald/hci/controller_and_baseband.ex
| 0.681197
| 0.736732
|
controller_and_baseband.ex
|
starcoder
|
defmodule Identicon do
@moduledoc """
Identicon is a unique image that is always the same for the same `input`.
The file is in the format of "`input`.png"
"""
@compile if Mix.env() == :test, do: :export_all
alias Identicon.Image
@doc """
Creates an Identicon in the project directory
Examples
iex> Identicon.create_identicon("monkey_island")
:ok
iex> Identicon.create_identicon(:booze)
** (FunctionClauseError) no function clause matching in Identicon.create_identicon/1
"""
@spec create_identicon(String.t()) :: :ok
def create_identicon(input) when is_binary(input) do
input
|> hash_input
|> store_hash
|> pick_color
|> build_grid
|> filter_odd_squares
|> build_pixel_map
|> draw_image
|> save_image(input)
end
@spec hash_input(String.t(), atom) :: list
defp hash_input(input, hash_type \\ :md5) when is_atom(hash_type) do
hash_type
|> :crypto.hash(input)
|> :binary.bin_to_list()
end
@spec store_hash(list) :: Image.t()
defp store_hash(hex) do
%Image{hex: hex}
end
@spec pick_color(Image.t()) :: Image.t()
defp pick_color(%Image{hex: [r, g, b | _tail]} = image) do
%Image{image | color: {r, g, b}}
end
@spec build_grid(Image.t()) :: Image.t()
defp build_grid(%Image{hex: hex} = image) do
grid =
hex
|> Enum.chunk(3)
|> Enum.map(&mirror_row/1)
|> List.flatten()
|> Enum.with_index()
%Image{image | grid: grid}
end
@spec mirror_row([integer]) :: [integer]
defp mirror_row(row) do
[first, second | _tail] = row
row ++ [second, first]
end
@spec filter_odd_squares(Image.t()) :: Image.t()
defp filter_odd_squares(%Image{grid: grid} = image) do
grid =
Enum.filter(grid, fn {value, _index} ->
rem(value, 2) == 0
end)
%Image{image | grid: grid}
end
@spec build_pixel_map(Image.t()) :: Image.t()
defp build_pixel_map(%Image{grid: grid} = image) do
pixel_map =
Enum.map(grid, fn {_value, index} ->
horizontal = rem(index, 5) * 50
vertical = div(index, 5) * 50
top_left = {horizontal, vertical}
bottom_right = {horizontal + 50, vertical + 50}
{top_left, bottom_right}
end)
%Image{image | pixel_map: pixel_map}
end
@spec draw_image(Image.t()) :: :egd.egd_image()
defp draw_image(%Image{color: color, pixel_map: pixel_map}) do
image = :egd.create(250, 250)
fill = :egd.color(color)
Enum.each(pixel_map, fn {start, stop} ->
:egd.filledRectangle(image, start, stop, fill)
end)
:egd.render(image)
end
@spec save_image(:egd.egd_image(), String.t()) :: :ok
defp save_image(image, input) do
File.write!("#{input}.png", image)
end
end
|
identicon/lib/identicon.ex
| 0.915564
| 0.425725
|
identicon.ex
|
starcoder
|
defmodule Dlex.Node do
@moduledoc """
Simple high level API for accessing graphs
## Usage
defmodule Shared do
use Dlex.Node
shared do
field :id, :string, index: ["term"]
field :name, :string, index: ["term"]
end
end
defmodule User do
use Dlex.Node, depends_on: Shared
schema "user" do
field :id, :auto
field :name, :auto
end
end
defmodule User do
use Dlex.Node
schema "user" do
field :id, :auto, depends_on: Shared
field :name, :string, index: ["term"]
field :age, :integer
field :cache, :any, virtual: true
field :owns, :uid
end
end
Dgraph types:
* `:boolean`
* `:datetime`
* `:float`
* `:geo`
* `:integer`
* `:password`
* `:string`
* `:uid`
* `:auto` - special type, which can be used for `depends_on`
## Reflection
Any schema module will generate the `__schema__` function that can be
used for runtime introspection of the schema:
* `__schema__(:source)` - Returns the source as given to `schema/2`;
* `__schema__(:fields)` - Returns a list of all non-virtual field names;
* `__schema__(:alter)` - Returns a generated alter schema
* `__schema__(:field, field)` - Returns the name of field in database for field in a struct and
vice versa;
* `__schema__(:type, field)` - Returns the type of the given non-virtual field;
Additionally it generates `Ecto` compatible `__changeset__` for using with `Ecto.Changeset`.
"""
alias Dlex.Field
defmacro __using__(opts) do
depends_on = Keyword.get(opts, :depends_on, nil)
quote do
@depends_on unquote(depends_on)
import Dlex.Node, only: [shared: 1, schema: 2]
end
end
defmacro schema(name, block) do
prepare = prepare_block(name, block)
postprocess = postprocess()
quote do
unquote(prepare)
unquote(postprocess)
end
end
defmacro shared(block) do
prepare = prepare_block(nil, block)
postprocess = postprocess()
quote do
@depends_on __MODULE__
unquote(prepare)
unquote(postprocess)
end
end
defp prepare_block(name, block) do
quote do
@name unquote(name)
Module.register_attribute(__MODULE__, :fields, accumulate: true)
Module.register_attribute(__MODULE__, :fields_struct, accumulate: true)
Module.register_attribute(__MODULE__, :fields_data, accumulate: true)
Module.register_attribute(__MODULE__, :depends_on_modules, accumulate: true)
import Dlex.Node
unquote(block)
end
end
defp postprocess() do
quote unquote: false do
defstruct [:uid | @fields_struct]
fields = Enum.reverse(@fields)
source = @name
alter = Dlex.Node.__schema_alter___(__MODULE__, source)
def __schema__(:source), do: unquote(source)
def __schema__(:fields), do: unquote(fields)
def __schema__(:alter), do: unquote(Macro.escape(alter))
def __schema__(:depends_on), do: unquote(Dlex.Node.__depends_on_modules__(__MODULE__))
for %Dlex.Field{name: name, type: type} <- @fields_data do
def __schema__(:type, unquote(name)), do: unquote(type)
end
def __schema__(:type, _), do: nil
for %Dlex.Field{name: name, db_name: db_name, type: type} <- @fields_data do
def __schema__(:field, unquote(name)), do: unquote(db_name)
def __schema__(:field, unquote(db_name)), do: {unquote(name), unquote(type)}
end
def __schema__(:field, _), do: nil
changeset = Dlex.Node.__gen_changeset__(@fields_data)
def __changeset__(), do: unquote(Macro.escape(changeset))
end
end
@doc false
def __schema_alter___(module, source) do
preds =
module
|> Module.get_attribute(:fields_data)
|> Enum.flat_map(&List.wrap(&1.alter))
|> Enum.reverse()
type_fields =
module
|> Module.get_attribute(:fields_data)
|> Enum.map(fn fdata ->
%{
"name" => fdata.db_name,
"type" => db_type(fdata.type)
}
end)
type = %{"name" => source, "fields" => type_fields}
%{
"types" => List.wrap(type),
"schema" => preds
}
end
@doc false
def __depends_on_modules__(module) do
depends_on_module = module |> Module.get_attribute(:depends_on) |> List.wrap()
:lists.usort(depends_on_module ++ Module.get_attribute(module, :depends_on_modules))
end
@doc false
def __gen_changeset__(fields) do
for %Dlex.Field{name: name, type: type} <- fields, into: %{}, do: {name, ecto_type(type)}
end
defp ecto_type(:datetime), do: :utc_datetime
defp ecto_type(type), do: type
defmacro field(name, type, opts \\ []) do
quote do
Dlex.Node.__field__(__MODULE__, unquote(name), unquote(type), unquote(opts), @depends_on)
end
end
@doc false
def __field__(module, name, type, opts, depends_on) do
schema_name = Module.get_attribute(module, :name)
Module.put_attribute(module, :fields_struct, {name, opts[:default]})
unless opts[:virtual] do
Module.put_attribute(module, :fields, name)
{db_name, type, alter} = db_field(name, type, opts, schema_name, module, depends_on)
field = %Field{name: name, type: type, db_name: db_name, alter: alter, opts: opts}
Module.put_attribute(module, :fields_data, field)
end
end
defp db_field(name, type, opts, schema_name, module, depends_on) do
if depends_on = opts[:depends_on] || depends_on do
put_attribute_if_not_exists(module, :depends_on_modules, depends_on)
with {:error, error} <- Code.ensure_compiled(depends_on),
do: raise("Module `#{depends_on}` not available, error: #{error}")
field_name = Atom.to_string(name)
if module == depends_on do
{field_name, type, alter_field(field_name, type, opts)}
else
{field_name, depends_on.__schema__(:type, name), nil}
end
else
field_name = "#{schema_name}.#{name}"
{field_name, type, alter_field(field_name, type, opts)}
end
end
defp put_attribute_if_not_exists(module, key, value) do
unless module |> Module.get_attribute(key) |> Enum.member?(value),
do: Module.put_attribute(module, key, value)
end
defp alter_field(field_name, type, opts) do
basic_alter = %{
"predicate" => field_name,
"type" => db_type(type)
}
opts |> Enum.flat_map(&gen_opt(&1, type)) |> Enum.into(basic_alter)
end
@types_mapping [
boolean: "bool",
datetime: "datetime",
float: "float",
geo: "geo",
integer: "int",
password: "password",
string: "string",
uid: "[uid]"
]
for {type, dgraph_type} <- @types_mapping do
defp primitive_type(unquote(type)), do: unquote(dgraph_type)
end
@primitive_types Keyword.keys(@types_mapping)
def primitive_type?(type), do: type in @primitive_types
defp db_type(type) do
if primitive_type?(type), do: primitive_type(type), else: primitive_type(type.type)
end
@ignore_keys [:default, :depends_on]
defp gen_opt({key, _value}, _type) when key in @ignore_keys, do: []
defp gen_opt({:index, true}, type), do: [{"index", true}, {"tokenizer", [db_type(type)]}]
defp gen_opt({:index, tokenizers}, :string) when is_list(tokenizers),
do: [{"index", true}, {"tokenizer", tokenizers}]
defp gen_opt({key, value}, _type), do: [{Atom.to_string(key), value}]
end
|
lib/dlex/node.ex
| 0.744378
| 0.430746
|
node.ex
|
starcoder
|
defmodule X509.Certificate.Template do
@moduledoc """
Certificate templates.
"""
import X509.Certificate.Extension
defstruct serial: {:random, 8}, validity: 365, hash: :sha256, extensions: []
@type t :: %__MODULE__{
serial: pos_integer() | {:random, pos_integer()},
validity: pos_integer() | X509.Certificate.Validity.t(),
hash: atom(),
extensions: [{atom(), X509.Certificate.Extension.t() | boolean()}]
}
@type named_template :: :root_ca | :ca | :server
@doc """
Returns a template, optionally customized with user-provided validity, hash
and extensions options.
The base template can be selected from a list of built-in named templates,
or as a custom template. The following named templates are supported:
* `:root_ca` - intended for a self-signed root CA.
The default path length constraint is set to 1, meaning the root CA can
be used to issue intermediate CAs, and those CAs can only sign end
certificates. The value can be overridden by passing a custom value
for the `:basic_constraints` extension.
The default validity is 25 years.
* `:ca` - intended for intermediate CA certificates.
The default path length constraint is set to 0, meaning the CA can only
sign end certificates. The value can be overridden by passing a custom
value for the `:basic_constraints` extension (assuming the issuing CA
allows it).
The Extended Key Usage extension is set to TLS server & client. Many
(but not all) TLS implementations will interpret this as a constraint
on the type of certificates the CA is allowed to issue. This constraint
can be removed by setting `:ext_key_usage` to `false`, or by overriding
the value to set the desired constraints.
The default validity is 10 years.
* `:server` - intended for end-certificates.
The Extended Key Usage extension is set to TLS server & client. For other
types of end-certificates, set the `:ext_key_usage` extension to the
desired value. It may be necessary to update the `:key_usage` value as
well.
The default validity is 1 year, plus a 30 day grace period.
All of the above templates generate a random 8 byte (64 bit) serial number,
which can be overriden through the `:serial` option (see below).
The `extensions` attribute of a template is a keyword list of extension
name/value pairs, where the value should typically be an
`X509.Certificate.Extension` record. The `subject_key_identifier` and
`authority_key_identifier` extensions may simply be set to `true`: the
actual values will be calculated during the certificate signing process.
## Options:
* `:hash` - the hash algorithm to use when signing the certificate
* `:serial` - the serial number of the certificate (an integer >0) or
`{:random, n}` to generate an n-byte random serial number
* `:validity` - override the validity period; can be specified as the
number of days (integer) or a `X509.Certificate.Validity` value
* `:extensions` - a keyword list of extensions to be merged into the
template's defaults; set an extension value to `false` to exclude that
extension from the certificate
## Examples:
iex> X509.Certificate.Template.new(:root_ca,
...> hash: :sha512, serial: 1,
...> extensions: [authority_key_identifier: false]
...> )
%X509.Certificate.Template{
extensions: [
basic_constraints: {:Extension, {2, 5, 29, 19}, true,
{:BasicConstraints, true, 1}},
key_usage: {:Extension, {2, 5, 29, 15}, true,
[:digitalSignature, :keyCertSign, :cRLSign]},
subject_key_identifier: true,
authority_key_identifier: false
],
hash: :sha512,
serial: 1,
validity: 9131
}
iex> X509.Certificate.Template.new(:server, extensions: [
...> ext_key_usage: X509.Certificate.Extension.ext_key_usage([:codeSigning])
...> ])
%X509.Certificate.Template{
extensions: [
basic_constraints: {:Extension, {2, 5, 29, 19}, false,
{:BasicConstraints, false, :asn1_NOVALUE}},
key_usage: {:Extension, {2, 5, 29, 15}, true,
[:digitalSignature, :keyEncipherment]},
subject_key_identifier: true,
authority_key_identifier: true,
ext_key_usage: {:Extension, {2, 5, 29, 37}, false,
[{1, 3, 6, 1, 5, 5, 7, 3, 3}]}
],
hash: :sha256,
serial: {:random, 8},
validity: 395
}
"""
@spec new(named_template() | t(), Keyword.t()) :: t()
def new(template, opts \\ [])
def new(:root_ca, opts) do
%__MODULE__{
# 25 years
validity: round(25 * 365.2425),
hash: :sha256,
extensions: [
basic_constraints: basic_constraints(true, 1),
key_usage: key_usage([:digitalSignature, :keyCertSign, :cRLSign]),
subject_key_identifier: true,
authority_key_identifier: true
]
}
|> new(opts)
end
def new(:ca, opts) do
%__MODULE__{
# 10 years
validity: round(10 * 365.2425),
hash: :sha256,
extensions: [
basic_constraints: basic_constraints(true, 0),
key_usage: key_usage([:digitalSignature, :keyCertSign, :cRLSign]),
ext_key_usage: ext_key_usage([:serverAuth, :clientAuth]),
subject_key_identifier: true,
authority_key_identifier: true
]
}
|> new(opts)
end
def new(:server, opts) do
%__MODULE__{
# 1 year, plus a 30 days grace period
validity: 365 + 30,
hash: :sha256,
extensions: [
basic_constraints: basic_constraints(false),
key_usage: key_usage([:digitalSignature, :keyEncipherment]),
ext_key_usage: ext_key_usage([:serverAuth, :clientAuth]),
subject_key_identifier: true,
authority_key_identifier: true
]
}
|> new(opts)
end
def new(template, opts) do
override =
opts
|> Keyword.take([:hash, :serial, :validity])
|> Enum.into(%{})
extensions =
template.extensions
|> Keyword.merge(Keyword.get(opts, :extensions, []))
template
|> Map.merge(override)
|> Map.put(:extensions, extensions)
end
end
|
lib/x509/certificate/template.ex
| 0.911746
| 0.456894
|
template.ex
|
starcoder
|
defmodule Xandra.Frame do
@moduledoc false
defstruct [
:kind,
:body,
stream_id: 0,
compressor: nil,
tracing: false,
warning: false,
atom_keys?: false
]
use Bitwise
alias Xandra.Protocol
@type kind ::
:startup
| :options
| :query
| :prepare
| :execute
| :register
| :batch
| :auth_response
| :error
| :ready
| :authenticate
| :supported
| :result
| :event
| :auth_success
@type t(kind) :: %__MODULE__{kind: kind}
@type t :: t(kind)
@request_versions %{
Protocol.V3 => 0x03,
Protocol.V4 => 0x04
}
@request_opcodes %{
:startup => 0x01,
:options => 0x05,
:query => 0x07,
:prepare => 0x09,
:execute => 0x0A,
:register => 0x0B,
:batch => 0x0D,
:auth_response => 0x0F
}
@response_versions %{
Protocol.V3 => 0x83,
Protocol.V4 => 0x84
}
@response_opcodes %{
0x00 => :error,
0x02 => :ready,
0x03 => :authenticate,
0x06 => :supported,
0x08 => :result,
0x0C => :event,
0x10 => :auth_success
}
@spec new(kind, keyword) :: t(kind) when kind: var
def new(kind, options \\ []) do
%__MODULE__{
kind: kind,
compressor: Keyword.get(options, :compressor),
tracing: Keyword.get(options, :tracing, false)
}
end
@spec header_length() :: 9
def header_length(), do: 9
@spec body_length(binary) :: non_neg_integer
def body_length(<<_::5-bytes, length::32>>) do
length
end
@spec encode(t(kind), module) :: iodata
def encode(%__MODULE__{} = frame, protocol_module) when is_atom(protocol_module) do
%{
compressor: compressor,
tracing: tracing?,
kind: kind,
stream_id: stream_id,
body: body
} = frame
body = maybe_compress_body(compressor, body)
[
Map.fetch!(@request_versions, protocol_module),
encode_flags(compressor, tracing?),
<<stream_id::16>>,
Map.fetch!(@request_opcodes, kind),
<<IO.iodata_length(body)::32>>,
body
]
end
@spec decode(binary, binary, module, nil | module) :: t(kind)
def decode(header, body \\ <<>>, protocol_module, compressor \\ nil)
when is_binary(body) and is_atom(compressor) do
<<response_version, flags, _stream_id::16, opcode, _::32>> = header
# For now, raise if the response version doens't match the requested protocol
# because we don't know how to deal with the mismatch.
assert_response_version_matches_request(response_version, protocol_module)
compression? = flag_set?(flags, _compression = 0x01)
tracing? = flag_set?(flags, _tracing = 0x02)
warning? = flag_set?(flags, _warning? = 0x08)
kind = Map.fetch!(@response_opcodes, opcode)
body = maybe_decompress_body(compression?, compressor, body)
%__MODULE__{
kind: kind,
body: body,
tracing: tracing?,
warning: warning?,
compressor: compressor
}
end
defp assert_response_version_matches_request(response_version, protocol_module) do
case Map.fetch!(@response_versions, protocol_module) do
^response_version ->
:ok
other ->
raise "response version #{inspect(other, base: :hex)} doesn't match the " <>
"requested protocol (#{inspect(protocol_module)})"
end
end
defp encode_flags(_compressor = nil, _tracing? = false), do: 0x00
defp encode_flags(_compressor = nil, _tracing? = true), do: 0x02
defp encode_flags(_compressor = _, _tracing? = false), do: 0x01
defp encode_flags(_compressor = _, _tracing? = true), do: 0x03
defp flag_set?(flags, flag) do
(flags &&& flag) == flag
end
defp maybe_compress_body(_compressor = nil, body), do: body
defp maybe_compress_body(compressor, body), do: compressor.compress(body)
defp maybe_decompress_body(_compression? = true, _compressor = nil, _body) do
raise "received frame was flagged as compressed, but there's no module to decompress"
end
defp maybe_decompress_body(_compression? = true, compressor, body) do
compressor.decompress(body)
end
defp maybe_decompress_body(_compression? = false, _compressor, body) do
body
end
end
|
lib/xandra/frame.ex
| 0.735452
| 0.402099
|
frame.ex
|
starcoder
|
defmodule Gherkin do
@moduledoc """
See `Gherkin.parse/1` for primary usage.
"""
@doc """
Primary helper function for parsing files or streams through `Gherkin`. To use
simply call this function passing in the full text of the file or a file stream.
Example:
%Gherkin.Elements.Feature{scenarios: scenarios} = File.read!("test/features/coffee.feature") |> Gherkin.parse()
# Do something with `scenarios`
# Also supports file streams for larger files (must read by lines, bytes not supported)
%Gherkin.Elements.Feature{scenarios: scenarios} = File.stream!("test/features/coffee.feature") |> Gherkin.parse()
"""
def parse(string_or_stream) do
Gherkin.Parser.parse_feature(string_or_stream)
end
def parse_file(file_name) do
file_name
|> File.read!()
|> Gherkin.Parser.parse_feature(file_name)
end
defmodule Elements do
@moduledoc false
defmodule Feature do
@moduledoc """
Representation of an entire feature. Contains scenarios which are the primary focus of the feature.
"""
defstruct name: "",
description: "",
tags: [],
role: nil,
background_steps: [],
scenarios: [],
line: 0,
file: nil
end
defmodule Scenario do
@moduledoc """
Represents a single scenario within a feature. Contains steps which are the primary focus of the scenario.
"""
defstruct name: "",
tags: [],
steps: [],
line: 0
end
defmodule ScenarioOutline do
@moduledoc """
Represents an outline of a single scenario.
"""
defstruct name: "",
tags: [],
steps: [],
examples: [],
line: 0
end
defmodule Steps do
@moduledoc false
defmodule Given, do: defstruct text: "", table_data: [], doc_string: "", line: 0
defmodule When, do: defstruct text: "", table_data: [], doc_string: "", line: 0
defmodule Then, do: defstruct text: "", table_data: [], doc_string: "", line: 0
defmodule And, do: defstruct text: "", table_data: [], doc_string: "", line: 0
defmodule But, do: defstruct text: "", table_data: [], doc_string: "", line: 0
end
end
@doc """
Changes a `Gherkin.Elements.ScenarioOutline` into multiple `Gherkin.Elements.Scenario`s
so that they may be executed in the same manner.
Given an outline, its easy to run all scenarios:
outline = %Gherkin.Elements.ScenarioOutline{}
Gherkin.scenarios_for(outline) |> Enum.each(&run_scenario/1)
"""
def scenarios_for(%Elements.ScenarioOutline{name: name, tags: tags, steps: steps, examples: examples, line: line}) do
examples
|> Enum.with_index()
|> Enum.map(fn({example, index}) ->
%Elements.Scenario{
name: name <> " (Example #{index + 1})",
tags: tags,
line: line,
steps: Enum.map(steps, fn(step)->
%{step | text: Enum.reduce(example, step.text, fn({k,v}, t)->
String.replace(t, ~r/<#{k}>/, v)
end)}
end)
}
end)
end
@doc """
Given a `Gherkin.Element.Feature`, changes all `Gherkin.Elements.ScenarioOutline`s
into `Gherkin.ElementScenario` as a flattened list of scenarios.
"""
def flatten(feature = %Gherkin.Elements.Feature{scenarios: scenarios}) do
%{feature | scenarios: scenarios |> Enum.map(fn
scenario = %Gherkin.Elements.Scenario{} -> scenario # Nothing to do
outline = %Gherkin.Elements.ScenarioOutline{} -> scenarios_for(outline)
end) |> List.flatten()}
end
end
|
deps/gherkin/lib/gherkin/gherkin.ex
| 0.843573
| 0.49231
|
gherkin.ex
|
starcoder
|
defmodule Brook.Event do
@moduledoc """
The `Brook.Event` struct is the basic unit of message written to
and read from the event stream. It encodes the type of event (for
application event handlers to pattern match on), the author (source application),
The creation timestamp of the message, the actual data of the message,
and a boolean detailing if the message was forwarded within the Brook
Server process group.
The data component of the message is an arbitrary Elixir term but is typically
a map or struct.
"""
require Logger
@type data :: term()
@type driver :: term()
@type t :: %__MODULE__{
type: String.t(),
author: Brook.author(),
create_ts: pos_integer(),
data: data(),
forwarded: boolean()
}
@enforce_keys [:type, :author, :data, :create_ts]
defstruct type: nil,
author: nil,
create_ts: nil,
data: nil,
forwarded: false
def new(%{} = data) do
struct!(__MODULE__, Map.put_new(data, :create_ts, now()))
end
def new(args) do
args
|> Map.new()
|> new()
end
@doc """
Takes a `Brook.Event` struct and a function and updates the data value of the struct
based on the outcome of applying the function to the incoming data value. Merges the resulting
data value back into the struct.
"""
@spec update_data(Brook.Event.t(), (data() -> data())) :: Brook.Event.t()
def update_data(%Brook.Event{data: data} = event, function) when is_function(function, 1) do
%Brook.Event{event | data: function.(data)}
end
@doc """
Send a message to the `Brook.Server` synchronously, passing the term to be encoded into a
`Brook.Event` struct, the authoring application, and the type of event. The event type must
implement the `String.Chars.t` type.
"""
@spec send(Brook.instance(), Brook.event_type(), Brook.author(), Brook.event()) :: :ok | {:error, Brook.reason()}
def send(instance, type, author, event) do
send(instance, type, author, event, Brook.Config.driver(instance))
end
@spec send(Brook.instance(), Brook.event_type(), Brook.author(), Brook.event(), driver()) ::
:ok | {:error, Brook.reason()}
def send(instance, type, author, event, driver) do
brook_event =
Brook.Event.new(
type: type,
author: author,
data: event
)
case Brook.serialize(brook_event) do
{:ok, serialized_event} ->
:ok = apply(driver.module, :send_event, [instance, type, serialized_event])
{:error, reason} = error ->
Logger.error(
"Unable to send event: type(#{type}), author(#{author}), event(#{inspect(event)}), error reason: #{
inspect(reason)
}"
)
error
end
end
@doc """
Process a `Brook.Event` struct via synchronous call to the `Brook.Server`
"""
@spec process(Brook.instance(), Brook.Event.t() | term()) :: :ok | {:error, Brook.reason()}
def process(instance, event) do
registry = Brook.Config.registry(instance)
timeout = Brook.Config.event_processing_timeout(instance)
GenServer.call({:via, Registry, {registry, Brook.Server}}, {:process, event}, timeout)
end
defp now(), do: DateTime.utc_now() |> DateTime.to_unix(:millisecond)
end
|
lib/brook/event.ex
| 0.835181
| 0.641001
|
event.ex
|
starcoder
|
defmodule Phoenix.HTML.Link do
@moduledoc """
Conveniences for working with links and URLs in HTML.
"""
import Phoenix.HTML.Tag
@doc """
Generates a link to the given URL.
## Examples
link("hello", to: "/world")
#=> <a href="/world">hello</a>
link("hello", to: URI.parse("https://elixir-lang.org"))
#=> <a href="https://elixir-lang.org">hello</a>
link("<hello>", to: "/world")
#=> <a href="/world"><hello></a>
link("<hello>", to: "/world", class: "btn")
#=> <a class="btn" href="/world"><hello></a>
link("delete", to: "/the_world", data: [confirm: "Really?"])
#=> <a data-confirm="Really?" href="/the_world">delete</a>
# If you supply a method other than `:get`:
link("delete", to: "/everything", method: :delete)
#=> <a href="/everything" data-csrf="csrf_token" data-method="delete" data-to="/everything">delete</a>
# You can use a `do ... end` block too:
link to: "/hello" do
"world"
end
## Options
* `:to` - the page to link to. This option is required
* `:method` - the method to use with the link. In case the
method is not `:get`, the link is generated inside the form
which sets the proper information. In order to submit the
form, JavaScript must be enabled
* `:csrf_token` - a custom token to use for links with a method
other than `:get`.
All other options are forwarded to the underlying `<a>` tag.
## JavaScript dependency
In order to support links where `:method` is not `:get` or use the above
data attributes, `Phoenix.HTML` relies on JavaScript. You can load
`priv/static/phoenix_html.js` into your build tool.
### Data attributes
Data attributes are added as a keyword list passed to the `data` key.
The following data attributes are supported:
* `data-confirm` - shows a confirmation prompt before
generating and submitting the form when `:method`
is not `:get`.
### Overriding the default confirm behaviour
`phoenix_html.js` does trigger a custom event `phoenix.link.click` on the
clicked DOM element when a click happened. This allows you to intercept the
event on it's way bubbling up to `window` and do your own custom logic to
enhance or replace how the `data-confirm` attribute is handled.
You could for example replace the browsers `confirm()` behavior with a
custom javascript implementation:
```javascript
// listen on document.body, so it's executed before the default of
// phoenix_html, which is listening on the window object
document.body.addEventListener('phoenix.link.click', function (e) {
// Prevent default implementation
e.stopPropagation();
// Introduce alternative implementation
var message = e.target.getAttribute("data-confirm");
if(!message){ return true; }
vex.dialog.confirm({
message: message,
callback: function (value) {
if (value == false) { e.preventDefault(); }
}
})
}, false);
```
Or you could attach your own custom behavior.
```javascript
window.addEventListener('phoenix.link.click', function (e) {
// Introduce custom behaviour
var message = e.target.getAttribute("data-prompt");
var answer = e.target.getAttribute("data-prompt-answer");
if(message && answer && (answer != window.prompt(message))) {
e.preventDefault();
}
}, false);
```
The latter could also be bound to any `click` event, but this way you can be
sure your custom code is only executed when the code of `phoenix_html.js` is run.
## CSRF Protection
By default, CSRF tokens are generated through `Plug.CSRFProtection`.
"""
@valid_uri_schemes [
"http:",
"https:",
"ftp:",
"ftps:",
"mailto:",
"news:",
"irc:",
"gopher:",
"nntp:",
"feed:",
"telnet:",
"mms:",
"rtsp:",
"svn:",
"tel:",
"fax:",
"xmpp:"
]
def link(text, opts)
def link(opts, do: contents) when is_list(opts) do
link(contents, opts)
end
def link(_text, opts) when not is_list(opts) do
raise ArgumentError, "link/2 requires a keyword list as second argument"
end
def link(text, opts) do
{to, opts} = pop_required_option!(opts, :to, "expected non-nil value for :to in link/2")
to = valid_destination!(to, "link/2")
{method, opts} = Keyword.pop(opts, :method, :get)
if method == :get do
opts = skip_csrf(opts)
content_tag(:a, text, [href: to] ++ opts)
else
{csrf_data, opts} = csrf_data(to, opts)
opts = Keyword.put_new(opts, :rel, "nofollow")
content_tag(:a, text, [data: csrf_data ++ [method: method, to: to], href: to] ++ opts)
end
end
@doc """
Generates a button tag that uses the Javascript function handleClick()
(see phoenix_html.js) to submit the form data.
Useful to ensure that links that change data are not triggered by
search engines and other spidering software.
## Examples
button("hello", to: "/world")
#=> <button class="button" data-csrf="csrf_token" data-method="post" data-to="/world">hello</button>
button("hello", to: "/world", method: :get, class: "btn")
#=> <button class="btn" data-method="get" data-to="/world">hello</button>
## Options
* `:to` - the page to link to. This option is required
* `:method` - the method to use with the button. Defaults to :post.
All other options are forwarded to the underlying button input.
When the `:method` is set to `:get` and the `:to` URL contains query
parameters the generated form element will strip the parameters in accordance
with the [W3C](https://www.w3.org/TR/html401/interact/forms.html#h-17.13.3.4)
form specification.
## Data attributes
Data attributes are added as a keyword list passed to the
`data` key. The following data attributes are supported:
* `data-confirm` - shows a confirmation prompt before generating and
submitting the form.
"""
def button(opts, do: contents) do
button(contents, opts)
end
def button(text, opts) do
{to, opts} = pop_required_option!(opts, :to, "option :to is required in button/2")
{method, opts} = Keyword.pop(opts, :method, :post)
to = valid_destination!(to, "button/2")
if method == :get do
opts = skip_csrf(opts)
content_tag(:button, text, [data: [method: method, to: to]] ++ opts)
else
{csrf_data, opts} = csrf_data(to, opts)
content_tag(:button, text, [data: csrf_data ++ [method: method, to: to]] ++ opts)
end
end
defp skip_csrf(opts) do
Keyword.delete(opts, :csrf_token)
end
defp csrf_data(to, opts) do
case Keyword.pop(opts, :csrf_token, true) do
{csrf, opts} when is_binary(csrf) ->
{[csrf: csrf], opts}
{true, opts} ->
{[csrf: Phoenix.HTML.Tag.csrf_token_value(to)], opts}
{false, opts} ->
{[], opts}
end
end
defp pop_required_option!(opts, key, error_message) do
{value, opts} = Keyword.pop(opts, key)
unless value do
raise ArgumentError, error_message
end
{value, opts}
end
defp valid_destination!(%URI{} = uri, context) do
valid_destination!(URI.to_string(uri), context)
end
defp valid_destination!({:safe, to}, context) do
{:safe, valid_string_destination!(IO.iodata_to_binary(to), context)}
end
defp valid_destination!({other, to}, _context) when is_atom(other) do
[Atom.to_string(other), ?:, to]
end
defp valid_destination!(to, context) do
valid_string_destination!(IO.iodata_to_binary(to), context)
end
for scheme <- @valid_uri_schemes do
defp valid_string_destination!(unquote(scheme) <> _ = string, _context), do: string
end
defp valid_string_destination!(to, context) do
if not match?("/" <> _, to) and String.contains?(to, ":") do
raise ArgumentError, """
unsupported scheme given to #{context}. In case you want to link to an
unknown or unsafe scheme, such as javascript, use a tuple: {:javascript, rest}
"""
else
to
end
end
end
|
lib/phoenix_html/link.ex
| 0.794106
| 0.705379
|
link.ex
|
starcoder
|
defmodule Raxx.View do
@moduledoc ~S"""
Generate views from `.eex` template files.
Using this module will add the functions `html` and `render` to a module.
To create layouts that can be reused across multiple pages check out `Raxx.View.Layout`.
## Example
# greet.html.eex
<p>Hello, <%= name %></p>
# layout.html.eex
<h1>Greetings</h1>
<%= __content__ %>
# greet.ex
defmodule Greet do
use Raxx.View,
arguments: [:name],
layout: "layout.html.eex"
end
# iex -S mix
Greet.html("Alice")
# => "<h1>Greetings</h1>\n<p>Hello, Alice</p>"
Raxx.response(:ok)
|> Greet.render("Bob")
# => %Raxx.Response{
# status: 200,
# headers: [{"content-type", "text/html"}],
# body: "<h1>Greetings</h1>\n<p>Hello, Bob</p>"
# }
## Options
- **arguments:** A list of atoms for variables used in the template.
This will be the argument list for the html function.
The render function takes one additional argument to this list,
a response struct.
- **template (optional):** The eex file containing a main content template.
If not given the template file will be generated from the file of the calling module.
i.e. `path/to/file.ex` -> `path/to/file.html.eex`
- **layout (optional):** An eex file containing a layout template.
This template can use all the same variables as the main template.
In addition it must include the content using `<%= __content__ %>`
## Safety
### [XSS (Cross Site Scripting) Prevention](https://www.owasp.org/index.php/XSS_(Cross_Site_Scripting)_Prevention_Cheat_Sheet#RULE_.231_-_HTML_Escape_Before_Inserting_Untrusted_Data_into_HTML_Element_Content)
All content interpolated into a view is escaped.
iex> Greet.html("<script>")
# => "<h1>Greetings</h1>\n<p>Hello, <script></p>"
Values in the template can be marked as secure using the `EExHTML.raw/1` function.
*raw is automatically imported to the template scope*.
# greet.html.eex
<p>Hello, <%= raw name %></p>
### JavaScript
> Including untrusted data inside any other JavaScript context is quite dangerous, as it is extremely easy to switch into an execution context with characters including (but not limited to) semi-colon, equals, space, plus, and many more, so use with caution.
[XSS Prevention Cheat Sheet](https://www.owasp.org/index.php/XSS_(Cross_Site_Scripting)_Prevention_Cheat_Sheet#RULE_.233_-_JavaScript_Escape_Before_Inserting_Untrusted_Data_into_JavaScript_Data_Values)
**DONT DO THIS**
```eex
<script type="text/javascript">
console.log('Hello, ' + <%= name %>)
</script>
```
Use `javascript_variables/1` for injecting variables into any JavaScript environment.
"""
defmacro __using__(options) do
{options, []} = Module.eval_quoted(__CALLER__, options)
{arguments, options} = Keyword.pop_first(options, :arguments, [])
{page_template, options} =
Keyword.pop_first(options, :template, Raxx.View.template_for(__CALLER__.file))
page_template = Path.expand(page_template, Path.dirname(__CALLER__.file))
{layout_template, remaining_options} = Keyword.pop_first(options, :layout)
if remaining_options != [] do
keys =
Keyword.keys(remaining_options)
|> Enum.map(&inspect/1)
|> Enum.join(", ")
raise ArgumentError, "Unexpected options for #{inspect(unquote(__MODULE__))}: [#{keys}]"
end
layout_template =
if layout_template do
Path.expand(layout_template, Path.dirname(__CALLER__.file))
end
arguments = Enum.map(arguments, fn a when is_atom(a) -> {a, [line: 1], nil} end)
compiled_page = EEx.compile_file(page_template, engine: EExHTML.Engine)
# This step would not be necessary if the compiler could return a wrapped value.
safe_compiled_page =
quote do
EExHTML.raw(unquote(compiled_page))
end
compiled_layout =
if layout_template do
EEx.compile_file(layout_template, engine: EExHTML.Engine)
else
{:__content__, [], nil}
end
{compiled, has_page?} =
Macro.prewalk(compiled_layout, false, fn
{:__content__, _opts, nil}, _acc ->
{safe_compiled_page, true}
ast, acc ->
{ast, acc}
end)
if !has_page? do
raise ArgumentError, "Layout missing content, add `<%= __content__ %>` to template"
end
quote do
import EExHTML
import unquote(__MODULE__), only: [partial: 2, partial: 3]
if unquote(layout_template) do
@external_resource unquote(layout_template)
@file unquote(layout_template)
end
@external_resource unquote(page_template)
@file unquote(page_template)
def render(request, unquote_splicing(arguments)) do
request
|> Raxx.set_header("content-type", "text/html")
|> Raxx.set_body(html(unquote_splicing(arguments)).data)
end
def html(unquote_splicing(arguments)) do
# NOTE from eex_html >= 0.2.0 the content will already be wrapped as safe.
EExHTML.raw(unquote(compiled))
end
end
end
@doc """
Generate template partials from eex templates.
"""
defmacro partial(name, arguments, options \\ []) do
{private, options} = Keyword.pop(options, :private, false)
type = if private, do: :defp, else: :def
file = Keyword.get(options, :template, "#{name}.html.eex")
file = Path.expand(file, Path.dirname(__CALLER__.file))
{_, options} = Keyword.pop(options, :engine, false)
options = options ++ [engine: EExHTML.Engine]
quote do
require EEx
EEx.function_from_file(
unquote(type),
unquote(name),
unquote(file),
unquote(arguments),
unquote(options)
)
end
end
@doc false
def template_for(file) do
case String.split(file, ~r/\.ex(s)?$/) do
[path_and_name, ""] ->
path_and_name <> ".html.eex"
_ ->
raise "#{__MODULE__} needs to be used from a `.ex` or `.exs` file"
end
end
end
|
extensions/raxx_view/lib/raxx/view.ex
| 0.763792
| 0.747731
|
view.ex
|
starcoder
|
defmodule AbsintheErrorPayload.ValidationMessageTypes do
@moduledoc """
This contains absinthe objects used in mutation responses.
To use, import into your Absinthe.Schema files with
```
import_types AbsintheErrorPayload.ValidationMessageTypes
```
## Objects
`:validation_option` holds a key value pair. These values are substitutions to be applied to a validation message template
```elixir
object :validation_option do
field :key, non_null(:string), description: "..."
field :value, non_null(:string), description: "..."
end
```
:validation_message contains all fields included in a `AbsintheErrorPayload.ValidationMessage` for maximum flexibility.
This is possibly more information than you wish to supply - in that case, rather than importing this Module,
you can create your own objects and use them. For example, if you only want to supply interpolated messages,
the `:template` and `:options` fields are unnecessary.
```elixir
object :validation_message, description: "..." do
field :field, :string, description: "..."
field :message, :string, description: "..."
field :code, non_null(:string), description: "..."
field :template, :string, description: "..."
field :options, list_of(:validation_option), description: "..."
end
```
Actual descriptions have been ommited for brevity - check the github repo to see them.
"""
use Absinthe.Schema.Notation
object(:validation_option) do
@desc "The name of a variable to be subsituted in a validation message template"
field(:key, non_null(:string), description: @descs.option_key)
@desc "The value of a variable to be substituted in a validation message template"
field(:value, non_null(:string), description: @descs.option_value)
end
@desc """
Validation messages are returned when mutation input does not meet the requirements.
While client-side validation is highly recommended to provide the best User Experience,
All inputs will always be validated server-side.
Some examples of validations are:
* Username must be at least 10 characters
* Email field does not contain an email address
* Birth Date is required
While GraphQL has support for required values, mutation data fields are always
set to optional in our API. This allows 'required field' messages
to be returned in the same manner as other validations. The only exceptions
are id fields, which may be required to perform updates or deletes.
"""
object :validation_message do
@desc """
The input field that the error applies to. The field can be used to
identify which field the error message should be displayed next to in the
presentation layer.
If there are multiple errors to display for a field, multiple validation
messages will be in the result.
This field may be null in cases where an error cannot be applied to a specific field.
"""
field(:field, :string, description: @descs.field)
@desc """
A friendly error message, appropriate for display to the end user.
The message is interpolated to include the appropriate variables.
Example: `Username must be at least 10 characters`
This message may change without notice, so we do not recommend you match against the text.
Instead, use the *code* field for matching.
"""
field(:message, :string, description: @descs.message)
@desc "A unique error code for the type of validation used."
field(:code, non_null(:string), description: @descs.code)
@desc """
A template used to generate the error message, with placeholders for option substiution.
Example: `Username must be at least {count} characters`
This message may change without notice, so we do not recommend you match against the text.
Instead, use the *code* field for matching.
"""
field(:template, :string, description: @descs.template)
@desc "A list of substitutions to be applied to a validation message template"
field(:options, list_of(:validation_option), description: @descs.option_list)
end
end
|
lib/absinthe_error_payload/validation_message_types.ex
| 0.931166
| 0.891999
|
validation_message_types.ex
|
starcoder
|
defmodule RateTheDub.Anime do
@moduledoc """
The Anime context.
"""
import Ecto.Query, warn: false
alias RateTheDub.Repo
alias RateTheDub.Anime.AnimeSeries
alias RateTheDub.DubVotes.Vote
@limit 5
@doc """
Returns the list of anime.
## Examples
iex> list_anime()
[%AnimeSeries{}, ...]
"""
def list_anime_series do
Repo.all(AnimeSeries)
end
@doc """
Gets a single anime series.
Raises `Ecto.NoResultsError` if the Anime series does not exist.
## Examples
iex> get_anime_series!(123)
%AnimeSeries{}
iex> get_anime_series!(456)
** (Ecto.NoResultsError)
"""
def get_anime_series!(id), do: Repo.get_by!(AnimeSeries, mal_id: id)
@doc """
Gets a single anime series.
## Examples
iex> get_anime_series(123)
%AnimeSeries{}
iex> get_anime_series(456)
nil
"""
def get_anime_series(id), do: Repo.get_by(AnimeSeries, mal_id: id)
@doc """
Gets a single anime series if it exists or downloads and creates it from Jikan
using the `RateTheDub.Jikan` module.
## Examples
iex> get_or_create_anime_series!(10)
%AnimeSeries{}
iex> get_or_create_anime_series!(-1)
** (Ecto.NoResultsError)
"""
def get_or_create_anime_series!(id) do
case Repo.get_by(AnimeSeries, mal_id: id) do
%AnimeSeries{} = series ->
series
|> Repo.preload(:characters)
nil ->
insert_anime_series_from_jikan(id)
|> Repo.preload(:characters)
_ ->
raise Ecto.NoResultsError, queryable: AnimeSeries
end
end
defp insert_anime_series_from_jikan(id) do
{series, characters, actors, relations} = RateTheDub.Jikan.get_series_everything!(id)
{:ok, series} =
Repo.transaction(fn ->
# Save the series so it can be returned later
# Has to come first due to FK constraints
series = Repo.insert!(series, on_conflict: :nothing)
characters
|> Stream.each(&Repo.insert!(&1, on_conflict: :nothing))
# Insert relation to current series
|> Enum.map(&[anime_id: id, character_id: &1.mal_id])
|> then(&Repo.insert_all("anime_characters", &1, on_conflict: :nothing))
actors
|> Enum.each(&Repo.insert!(&1, on_conflict: :nothing))
Repo.insert_all("character_actors", relations, on_conflict: :nothing)
series
end)
series
end
@doc """
Gets all the featured series in all languages with no limits.
"""
def get_featured() do
AnimeSeries
|> select([a], %{mal_id: a.mal_id, featured_in: a.featured_in})
|> where([a], not is_nil(a.featured_in))
|> Repo.all()
end
@doc """
Gets the top 5 series that are featured in this language.
## Examples
iex> featured_by_lang("en")
[%AnimeSeries{}, ...]
"""
def get_featured_for(lang) do
AnimeSeries
|> where(featured_in: ^lang)
|> limit(@limit)
|> Repo.all()
|> Enum.map(&[&1, RateTheDub.DubVotes.count_votes_for(&1.mal_id, lang)])
end
@doc """
Gets all the top 5 trending series in all languages.
"""
def get_trending() do
Vote
|> where([v], v.inserted_at > ^month_ago())
|> select([v], [v.mal_id, v.language, count(v)])
|> group_by([:mal_id, :language])
|> order_by([v], asc: v.language, desc: count(v))
|> Repo.all()
|> Stream.chunk_by(fn [_, lang, _] -> lang end)
|> Enum.flat_map(&Enum.take(&1, @limit))
end
@doc """
Gets all the top 5 tredning series in this language.
"""
def get_trending_for(lang) do
Vote
|> where(language: ^lang)
|> where([v], v.inserted_at > ^month_ago())
|> select([v], [v.mal_id, count(v)])
|> order_by([v], desc: count(v))
|> group_by(:mal_id)
|> limit(@limit)
|> Repo.all()
|> Enum.map(fn [id, count] -> [RateTheDub.Anime.get_anime_series!(id), count] end)
end
defp month_ago() do
NaiveDateTime.local_now()
|> Date.add(-30)
|> NaiveDateTime.new!(~T[00:00:00])
end
@doc """
Gets the top rated 5 series in all languages and returns then as an array of
rows.
## Examples
iex> get_top_rated()
[[1, "en" 10], [10, "es", 20], ...]
"""
def get_top_rated() do
Vote
|> select([v], [v.mal_id, v.language, count(v)])
|> group_by([:mal_id, :language])
|> order_by(desc: :language)
|> Repo.all()
|> Stream.chunk_by(fn [_, l, _] -> l end)
|> Enum.flat_map(fn lis ->
lis |> Enum.sort_by(&List.last/1) |> Enum.reverse() |> Enum.take(@limit)
end)
end
@doc """
Gets the top 5 series with the most votes for the given language in descending
order.
"""
def get_top_rated_for(lang) do
Vote
|> select([v], [v.mal_id, count(v)])
|> where(language: ^lang)
|> group_by(:mal_id)
|> order_by(desc: :count)
|> limit(@limit)
|> Repo.all()
|> Enum.map(fn [id, count] -> [RateTheDub.Anime.get_anime_series!(id), count] end)
end
@doc """
Creates a anime series.
## Examples
iex> create_anime_series(%{field: value})
{:ok, %AnimeSeries{}}
iex> create_anime_series(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_anime_series(attrs \\ %{}) do
%AnimeSeries{}
|> AnimeSeries.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a anime series.
## Examples
iex> update_anime_series(anime_series, %{field: new_value})
{:ok, %AnimeSeries{}}
iex> update_anime_series(anime_series, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_anime_series(%AnimeSeries{} = anime_series, attrs) do
anime_series
|> AnimeSeries.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a anime series.
## Examples
iex> delete_anime_series(anime_series)
{:ok, %AnimeSeries{}}
iex> delete_anime_series(anime_series)
{:error, %Ecto.Changeset{}}
"""
def delete_anime_series(%AnimeSeries{} = anime_series) do
Repo.delete(anime_series)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking anime series changes.
## Examples
iex> change_anime_series(anime_series)
%Ecto.Changeset{data: %AnimeSeries{}}
"""
def change_anime_series(%AnimeSeries{} = anime_series, attrs \\ %{}) do
AnimeSeries.changeset(anime_series, attrs)
end
def character_actor_pairs_for_lang(%AnimeSeries{} = series, lang) do
series
|> Repo.preload(:characters)
|> Map.get(:characters)
|> Stream.map(&{&1, RateTheDub.VoiceActors.actor_for_character_with_lang(&1.mal_id, lang)})
|> Enum.filter(fn {c, a} -> c && a end)
end
end
|
lib/ratethedub/anime.ex
| 0.856242
| 0.498596
|
anime.ex
|
starcoder
|
defmodule ExUnit.ClusteredCase.Cluster do
@moduledoc """
This module is responsible for managing the setup and lifecycle of a single cluster.
"""
use GenServer
require Logger
alias ExUnit.ClusteredCaseError
alias ExUnit.ClusteredCase.Utils
alias __MODULE__.{Partition, PartitionChange}
@type node_spec :: ExUnit.ClusteredCase.Node.node_opts()
@type callback :: {module, atom, [term]} | (() -> term)
@type cluster_opts :: [cluster_opt]
@type cluster_opt ::
{:nodes, [node_spec]}
| {:cluster_size, pos_integer}
| {:partitions, pos_integer | [pos_integer] | [[atom]]}
| {:env, [{String.t(), String.t()}]}
| {:erl_flags, [String.t()]}
| {:config, Keyword.t()}
| {:boot_timeout, pos_integer}
| {:init_timeout, pos_integer}
| {:post_start_functions, [callback]}
| {:stdout, atom | pid}
| {:capture_log, boolean}
defstruct [
:parent,
:pids,
:nodes,
:partitions,
:partition_spec,
:cluster_size,
:env,
:erl_flags,
:config,
:boot_timeout,
:init_timeout,
:post_start_functions
]
@doc """
Starts a new cluster with the given specification
"""
@spec start(cluster_opts) :: {:ok, pid} | {:error, term}
def start(opts), do: start_link(opts, [])
@doc """
Stops a running cluster. Expects the pid of the cluster manager process.
"""
@spec stop(pid) :: :ok
def stop(pid), do: GenServer.call(pid, :terminate, :infinity)
@doc false
@spec reset(pid) :: :ok
def reset(pid), do: GenServer.call(pid, :reset, :infinity)
@doc """
Stops a running node in a cluster. Expects the cluster and node to stop.
"""
@spec stop_node(pid, node) :: :ok
def stop_node(pid, node), do: GenServer.call(pid, {:stop_node, node}, :infinity)
@doc """
Kills a running node in a cluster. Expects the cluster and node to stop.
"""
@spec kill_node(pid, node) :: :ok
def kill_node(pid, node), do: GenServer.call(pid, {:kill_node, node}, :infinity)
@doc """
Get the captured log for a specific node in the cluster
"""
@spec log(node) :: {:ok, binary}
defdelegate log(node), to: ExUnit.ClusteredCase.Node
@doc """
Retrieve a list of nodes in the given cluster
"""
@spec members(pid) :: [node]
def members(pid), do: GenServer.call(pid, :members, :infinity)
@doc """
Retrieve the name of a random node in the given cluster
"""
@spec random_member(pid) :: node
def random_member(pid) do
Enum.random(members(pid))
end
@doc """
Retrieve the partitions this cluster is composed of
"""
@spec partitions(pid) :: [[node]]
def partitions(pid), do: GenServer.call(pid, :partitions, :infinity)
@doc """
Partition the cluster based on the provided specification.
You can specify partitions in one of the following ways:
- As an integer representing the number of partitions
- As a list of integers representing the number of nodes in each partition
- As a list of lists, where each sub-list contains the nodes in that partition
If your partitioning specification cannot be complied with, an error is returned
## Examples
test "partition by number of partitions", %{cluster: c} do
Cluster.partition(c, 2)
end
test "partition by number of nodes per partition", %{cluster: c} do
Cluster.partition(c, [2, 2])
end
test "partition by list of nodes in each partition", %{cluster: c} do
Cluster.partition(c, [[:a, :b], [:c, :d]])
end
"""
@spec partition(pid, Partition.opts()) :: :ok | {:error, term}
def partition(pid, n) when is_list(n) do
cond do
Enum.all?(n, fn i -> is_integer(i) and i > 0 end) ->
do_partition(pid, n)
Enum.all?(n, fn p -> is_list(p) and Enum.all?(p, fn x -> is_binary(x) or is_atom(x) end) end) ->
do_partition(pid, n)
:else ->
{:error, :invalid_partition_spec}
end
end
def partition(pid, n) when is_integer(n) and n > 0 do
do_partition(pid, n)
end
defp do_partition(pid, spec) do
GenServer.call(pid, {:partition, spec}, :infinity)
end
@doc """
Repartitions the cluster based on the provided specification.
See `partition/2` for specification details.
Repartitioning performs the minimal set of changes required to
converge on the partitioning scheme in an attempt to minimize the
amount of churn. That said, some churn is expected, so bear that in
mind when writing tests with partitioning events involved.
"""
@spec repartition(pid, Partition.opts()) :: :ok | {:error, term}
def repartition(pid, n), do: partition(pid, n)
@doc """
Heals all partitions in the cluster.
"""
@spec heal(pid) :: :ok
def heal(pid) do
GenServer.call(pid, :heal, :infinity)
end
@doc """
Invoke a function on a specific member of the cluster
"""
@spec call(node, callback) :: term | {:error, term}
defdelegate call(node, callback), to: ExUnit.ClusteredCase.Node
@doc """
Invoke a function on a specific member of the cluster
"""
@spec call(node, module, atom, [term]) :: term | {:error, term}
defdelegate call(node, m, f, a), to: ExUnit.ClusteredCase.Node
@doc """
Applies a function on all nodes in the cluster.
"""
@spec each(pid, callback) :: :ok | {:error, term}
def each(pid, callback) when is_function(callback, 0) do
do_each(pid, callback)
end
@doc """
Applies a function on all nodes in the cluster.
"""
@spec each(pid, module, atom, [term]) :: :ok | {:error, term}
def each(pid, m, f, a) when is_atom(m) and is_atom(f) and is_list(a) do
do_each(pid, {m, f, a})
end
defp do_each(pid, callback) do
do_call(pid, callback, collect: false)
end
@doc """
Maps a function across all nodes in the cluster.
Returns a list of results, where each element is the result from one node.
"""
@spec map(pid, callback) :: [term] | {:error, term}
def map(pid, fun) when is_function(fun, 0) do
do_map(pid, fun)
end
@doc """
Maps a function across all nodes in the cluster.
Returns a list of results, where each element is the result from one node.
"""
@spec map(pid, module, atom, [term]) :: [term] | {:error, term}
def map(pid, m, f, a) when is_atom(m) and is_atom(f) and is_list(a) do
do_map(pid, {m, f, a})
end
defp do_map(pid, callback) do
[results] = do_call(pid, callback)
results
end
# Function for running functions against nodes in the cluster
# Provides options for tweaking the behavior of such calls
defp do_call(pid, fun, opts \\ [])
defp do_call(pid, {m, f, a} = mfa, opts) when is_atom(m) and is_atom(f) and is_list(a) do
do_call(pid, [mfa], opts)
end
defp do_call(pid, fun, opts) when is_function(fun, 0) do
do_call(pid, [fun], opts)
end
defp do_call(pid, funs, opts) when is_list(funs) do
unless Enum.all?(funs, &valid_callback?/1) do
raise ArgumentError, "expected list of valid callback functions, got: #{inspect(funs)}"
end
nodes = members(pid)
parallel? = Keyword.get(opts, :parallel, true)
collect? = Keyword.get(opts, :collect, true)
if parallel? do
async_call_all(nodes, funs, collect?)
else
sync_call_all(nodes, funs, collect?)
end
catch
:throw, err ->
err
end
defp valid_callback?({m, f, a}) when is_atom(m) and is_atom(f) and is_list(a), do: true
defp valid_callback?(fun) when is_function(fun, 0), do: true
defp valid_callback?(_), do: false
## Server Implementation
@doc false
def child_spec([_config, opts] = args) do
%{
id: Keyword.get(opts, :name, __MODULE__),
type: :worker,
start: {__MODULE__, :start_link, args}
}
end
@doc false
def start_link(config, opts \\ []) do
case Keyword.get(opts, :name) do
nil ->
GenServer.start_link(__MODULE__, [config, self()])
name ->
GenServer.start_link(__MODULE__, [config, self()], name: name)
end
end
@doc false
def init([opts, parent]) do
Process.flag(:trap_exit, true)
cluster_size = Keyword.get(opts, :cluster_size)
custom_nodes = Keyword.get(opts, :nodes)
nodes =
cond do
is_nil(cluster_size) and is_nil(custom_nodes) ->
raise ClusteredCaseError,
"you must provide either :cluster_size or :nodes when starting a cluster"
is_nil(custom_nodes) ->
generate_nodes(cluster_size, opts)
:else ->
decorate_nodes(custom_nodes, opts)
end
case init_nodes(nodes) do
{:stop, _} = err ->
err
{:ok, results} ->
state = to_cluster_state(parent, nodes, opts, results)
case state.partition_spec do
{:error, _} = err ->
{:stop, err}
partition_spec ->
change = Partition.partition(nodenames(state), nil, partition_spec)
PartitionChange.execute!(change)
{:ok, %{state | :partitions => change.partitions}}
end
end
end
defp init_nodes(nodes) do
cluster_start_timeout = get_cluster_start_timeout(nodes)
results =
nodes
|> Enum.map(&start_node_async/1)
|> Enum.map(&await_node_start(&1, cluster_start_timeout))
|> Enum.map(&link_node_manager/1)
if Enum.any?(results, &startup_failed?/1) do
terminate_started(results)
{:stop, {:cluster_start, failed_nodes(results)}}
else
{:ok, results}
end
end
defp partition_cluster(%{partition_spec: spec} = state, new_spec) do
change = Partition.partition(nodenames(state), spec, new_spec)
PartitionChange.execute!(change)
%{state | :partitions => change.partitions, :partition_spec => new_spec}
end
defp heal_cluster(state) do
nodes = nodenames(state)
Enum.each(nodes, &ExUnit.ClusteredCase.Node.connect(&1, nodes -- [&1]))
%{state | :partitions => nil, :partition_spec => nil}
end
def handle_call(:partitions, _from, %{partitions: partitions} = state) do
{:reply, partitions || [nodenames(state)], state}
end
def handle_call({:partition, opts}, _from, state) do
spec = Partition.new(nodenames(state), opts)
{:reply, :ok, partition_cluster(state, spec)}
end
def handle_call(:heal, _from, state) do
{:reply, :ok, heal_cluster(state)}
end
def handle_call(:members, _from, state) do
{:reply, nodenames(state), state}
end
def handle_call(:reset, from, %{pids: pidmap, nodes: nodes} = state) do
# Find killed/stopped nodes and restart them
dead_nodes = for {node, nil} <- pidmap, do: node
dead =
nodes
|> Enum.filter(fn n -> Enum.member?(dead_nodes, n[:name]) end)
case init_nodes(dead) do
{:stop, reason} ->
GenServer.reply(from, {:error, reason})
{:stop, reason, state}
{:ok, results} ->
started =
for {name, {:ok, pid}} <- results, into: %{} do
{name, pid}
end
{:reply, :ok, %{state | pids: Map.merge(pidmap, started)}}
end
end
def handle_call(:terminate, from, state) do
Enum.each(nodepids(state), &ExUnit.ClusteredCase.Node.stop/1)
GenServer.reply(from, :ok)
{:stop, :shutdown, state}
end
def handle_call({:stop_node, node}, _from, %{pids: pidmap} = state) do
{node_pid, pidmap} = Map.get_and_update!(pidmap, node, fn pid -> {pid, nil} end)
ExUnit.ClusteredCase.Node.stop(node_pid)
{:reply, :ok, %{state | pids: pidmap}}
end
def handle_call({:kill_node, node}, _from, %{pids: pidmap} = state) do
{node_pid, pidmap} = Map.get_and_update!(pidmap, node, fn pid -> {pid, nil} end)
ExUnit.ClusteredCase.Node.kill(node_pid)
{:reply, :ok, %{state | pids: pidmap}}
end
def handle_info({:EXIT, parent, reason}, %{parent: parent} = state) do
{:stop, reason, state}
end
def handle_info({:EXIT, _task, :normal}, state) do
{:noreply, state}
end
def handle_info({:EXIT, task, reason}, state) do
Logger.warn("Task #{inspect(task)} failed with reason: #{inspect(reason)}")
{:noreply, state}
end
## Private
defp generate_nodes(cluster_size, opts) when is_integer(cluster_size) do
nodes = for _ <- 1..cluster_size, do: [name: Utils.generate_name()]
decorate_nodes(nodes, opts)
end
defp decorate_nodes(nodes, opts) do
for n <- nodes do
name = Keyword.get(n, :name)
global_env = Keyword.get(opts, :env, [])
global_flags = Keyword.get(opts, :erl_flags, [])
global_config = Keyword.get(opts, :config, [])
global_psf = Keyword.get(opts, :post_start_functions, [])
decorated_node = [
name: Utils.nodename(name),
env: Keyword.merge(global_env, Keyword.get(n, :env, [])),
erl_flags: Keyword.merge(global_flags, Keyword.get(n, :erl_flags, [])),
config: Mix.Config.merge(global_config, Keyword.get(n, :config, [])),
boot_timeout: Keyword.get(n, :boot_timeout, Keyword.get(opts, :boot_timeout)),
init_timeout: Keyword.get(n, :init_timeout, Keyword.get(opts, :init_timeout)),
post_start_functions: global_psf ++ Keyword.get(n, :post_start_functions, []),
stdout: Keyword.get(n, :stdout, Keyword.get(opts, :stdout, false)),
capture_log: Keyword.get(n, :capture_log, Keyword.get(opts, :capture_log, false))
]
# Strip out any nil or empty options
Enum.reduce(decorated_node, decorated_node, fn
{key, nil}, acc ->
Keyword.delete(acc, key)
{key, []}, acc ->
Keyword.delete(acc, key)
{_key, _val}, acc ->
acc
end)
end
end
defp nodenames(%{pids: pidmap}) do
for {name, pid} <- pidmap, pid != nil, do: name
end
defp nodepids(%{pids: pids}) do
for {_name, pid} <- pids, pid != nil, do: pid
end
defp sync_call_all(nodes, funs, collect?),
do: sync_call_all(nodes, funs, collect?, [])
defp sync_call_all(_nodes, [], _collect?, acc), do: Enum.reverse(acc)
defp sync_call_all(nodes, [fun | funs], collect?, acc) do
# Run function on each node sequentially
if collect? do
results =
nodes
|> Enum.map(&ExUnit.ClusteredCase.Node.call(&1, fun, collect: collect?))
sync_call_all(nodes, funs, [results | acc])
else
for n <- nodes do
case ExUnit.ClusteredCase.Node.call(n, fun, collect: collect?) do
{:error, reason} ->
throw(reason)
_ ->
:ok
end
end
sync_call_all(nodes, funs, :ok)
end
end
defp async_call_all(nodes, funs, collect?),
do: async_call_all(nodes, funs, collect?, [])
defp async_call_all(_nodes, [], _collect?, acc), do: Enum.reverse(acc)
defp async_call_all(nodes, [fun | funs], collect?, acc) do
# Invoke function on all nodes
results =
nodes
|> Enum.map(
&Task.async(fn -> ExUnit.ClusteredCase.Node.call(&1, fun, collect: collect?) end)
)
|> await_all(collect: collect?)
# Move on to next function
async_call_all(nodes, funs, collect?, [results | acc])
end
defp await_all(tasks, opts),
do: await_all(tasks, Keyword.get(opts, :collect, true), [])
defp await_all([], true, acc), do: Enum.reverse(acc)
defp await_all([], false, acc), do: acc
defp await_all([t | tasks] = retry_tasks, collect?, acc) do
case Task.yield(t) do
{:ok, result} when collect? ->
await_all(tasks, collect?, [result | acc])
{:ok, {:error, reason}} ->
throw(reason)
{:ok, _} ->
await_all(tasks, collect?, :ok)
nil ->
await_all(retry_tasks, collect?, acc)
end
end
defp to_cluster_state(parent, nodes, opts, results) do
pidmap =
for {name, {:ok, pid}} <- results, into: %{} do
{name, pid}
end
nodelist = for {name, _} <- results, do: name
%__MODULE__{
parent: parent,
pids: pidmap,
nodes: nodes,
partition_spec: Partition.new(nodelist, Keyword.get(opts, :partitions)),
cluster_size: Keyword.get(opts, :cluster_size),
env: Keyword.get(opts, :env),
erl_flags: Keyword.get(opts, :erl_flags),
config: Keyword.get(opts, :config),
boot_timeout: Keyword.get(opts, :boot_timeout),
init_timeout: Keyword.get(opts, :init_timeout),
post_start_functions: Keyword.get(opts, :post_start_functions)
}
end
defp start_node_async(node_opts) do
name = Keyword.fetch!(node_opts, :name)
{name, Task.async(fn -> ExUnit.ClusteredCase.Node.start_nolink(node_opts) end)}
end
defp await_node_start({nodename, task}, cluster_start_timeout) do
{nodename, Task.await(task, cluster_start_timeout)}
end
defp link_node_manager({_nodename, {:ok, pid}} = result) do
Process.link(pid)
result
end
defp link_node_manager({_nodename, _err} = result), do: result
defp startup_failed?({_nodename, {:ok, _}}), do: false
defp startup_failed?({_nodename, _err}), do: true
defp terminate_started([]), do: :ok
defp terminate_started([{_nodename, {:ok, pid}} | rest]) do
ExUnit.ClusteredCase.Node.stop(pid)
terminate_started(rest)
end
defp terminate_started([{_nodename, _err} | rest]) do
terminate_started(rest)
end
defp failed_nodes(results) do
Enum.reject(results, fn
{_nodename, {:ok, _}} -> true
_ -> false
end)
end
defp get_cluster_start_timeout(nodes) when is_list(nodes) do
get_cluster_start_timeout(nodes, 10_000)
end
defp get_cluster_start_timeout([], timeout), do: timeout
defp get_cluster_start_timeout([node_opts | rest], timeout) do
boot = Keyword.get(node_opts, :boot_timeout, 2_000)
init = Keyword.get(node_opts, :init_timeout, 10_000)
total = boot + init
get_cluster_start_timeout(rest, max(total, timeout))
end
end
|
lib/cluster.ex
| 0.871242
| 0.531088
|
cluster.ex
|
starcoder
|
defmodule BitPal.Backend.Flowee.Connection do
@moduledoc """
This is the low-level API to flowee.
Allows connecting to a Flowee server and communicating with it. This module
handles the packet-based API with Flowee, and handles serialization and
deserialization of the binary format. The binary format essentially consists
of a list of key-value tuples, where keys are integers with pre-defined
meaning, and values are of any of a few supported data types (integers, strings
binaries or floats).
The responsibilities of this module ends where these tuples are to be interpreted
as a higher-level data structure, with meanings specific to some part of Flowee.
This is instead handled by the Protocol module.
Call connect\3 to create a connection, then send\2 and recv\1 to send and receive
messages.
Messages are described by the RawMsg struct, and to distinguish between binaries
and strings, binaries are wrapped in the Binary struct (since strings are binaries
in Elixir).
"""
use Bitwise
alias BitPal.Backend.Flowee.Connection
require Logger
# Struct for the connection itself. Create using "connect".
defstruct client: nil, data: nil
defmodule Binary do
@moduledoc """
Binary value used to distinguish strings from binary values in Elixir.
"""
defstruct data: <<>>
def to_string(binary) do
binary[:data]
end
def to_binary(data) do
%Binary{data: data}
end
end
defmodule RawMsg do
@moduledoc """
Raw message to/from Flowee. Holds a deserialized message, including header and data.
"""
# Note: seq_start och last are only used internally.
defstruct service: nil,
message: nil,
ping: false,
pong: false,
seq_start: nil,
last: nil,
data: []
end
# Tags used in the header
@header_end 0
@header_service_id 1
@header_message_id 2
@header_sequence_start 3
@header_last_in_sequence 4
@header_ping 5
@header_pong 6
@doc """
Connect to a Flowee host using the supplied tcp_client (default is BitPal.TCPClient for plain TCP connections).
Returns a connection object that can be passed to send and recv in here.
"""
def connect(tcp_client, host \\ {127, 0, 0, 1}, port \\ 1235) do
# Would be nice if we could get a packet in little endian mode. Now, we need to handle that ourselves...
{:ok, c} = tcp_client.connect(host, port, [:binary, {:packet, 0}, {:active, false}])
%Connection{client: tcp_client, data: c}
end
@doc """
Close the connection.
"""
def close(connection) do
connection.client.close(connection.data)
end
@doc """
Send a raw message.
"""
def send(connection, msg) do
# IO.puts("sent: #{inspect(serialize(msg), limit: :infinity)}")
send_packet(connection, serialize(msg))
end
@doc """
Receive a high-level message. We will parse the header here since we need to merge long messages, etc.
Returns a RawMsg with the appropriate fields set.
"""
def recv(connection) do
data = recv_packet(connection)
# IO.puts("received: #{inspect(data, limit: :infinity)}")
{header, rem} = parse_header(data)
recv(connection, header, rem)
end
# Internal helper for receiving messages.
defp recv(connection, header, data) do
if header.last == false do
# More data... Ignore the next header mostly.
{new_header, more_data} = parse_header(recv_packet(connection))
# Note: It might be important to check the header here since there might be other messages
# that are interleaved with chained messages. The docs does not state if this is a
# possibility, but from a quick glance at the code, I don't think so.
recv(connection, %{header | last: new_header.last}, data <> more_data)
else
# Last packet! Either header.last == true or header.last == nil
%{header | data: deserialize(data)}
end
end
# Send a message (a binary)
defp send_packet(connection, message) do
size = byte_size(message) + 2
size_msg = <<rem(size, 256), div(size, 256)>>
connection.client.send(connection.data, size_msg <> message)
end
# Receive a packet.
defp recv_packet(connection) do
case connection.client.recv(connection.data, 2) do
{:ok, <<size_low, size_high>>} ->
size = size_high * 256 + size_low
{:ok, data} = connection.client.recv(connection.data, size - 2)
data
{:error, msg} ->
msg
end
end
# Low-level serialization/deserialization.
# Constants for the protocol.
@tag_positive 0
@tag_negative 1
@tag_string 2
@tag_byte_array 3
@tag_true 4
@tag_false 5
@tag_double 6
defp serialize(key, val) when is_integer(val) and val >= 0 do
encode_token_header(key, @tag_positive) <> encode_int(val)
end
defp serialize(key, val) when is_integer(val) and val < 0 do
encode_token_header(key, @tag_positive) <> encode_int(-val)
end
defp serialize(key, val) when is_binary(val) do
encode_token_header(key, @tag_string) <> encode_int(byte_size(val)) <> val
end
defp serialize(key, %Binary{data: data}) do
encode_token_header(key, @tag_byte_array) <> encode_int(byte_size(data)) <> data
end
defp serialize(key, val) when val == true do
encode_token_header(key, @tag_true)
end
defp serialize(key, val) when val == false do
encode_token_header(key, @tag_false)
end
defp serialize(key, val) when is_float(val) do
# Should be exactly 8 bytes, little endian "native double"
encode_token_header(key, @tag_double) <> <<val::little-float>>
end
# Serialize a sequence of {key, val} tuples.
defp serialize_data(message) do
case message do
[{key, val} | rest] -> serialize(key, val) <> serialize_data(rest)
[] -> <<>>
end
end
# Serialize a header into tuples (including the data)
defp msg_to_tuples(%RawMsg{service: svc, message: msg, ping: ping, pong: pong, data: data}) do
# End of header
result = [{@header_end, true} | data]
# Ping/pong?
result = if pong, do: [{@header_pong, true} | result], else: result
result = if ping, do: [{@header_ping, true} | result], else: result
# Message id?
result = if msg != nil, do: [{@header_message_id, msg} | result], else: result
# Service id?
result = if svc != nil, do: [{@header_service_id, svc} | result], else: result
result
end
# Serialize an entire RawMsg
defp serialize(rawMsg) do
serialize_data(msg_to_tuples(rawMsg))
end
defp encode_token_header(key, type) do
if key < 31 do
<<key <<< 3 ||| type>>
else
# This case is unclear in the spec...
<<31 <<< 3 ||| type>> <> encode_int(key)
end
end
def encode_int(value) do
encode_int1(value, false)
end
defp encode_int1(value, mark) do
here = (value &&& 0x7F) ||| if mark, do: 0x80, else: 0x00
if value < 0x80 do
<<here>>
else
prev = encode_int1((value >>> 7) - 1, true)
prev <> <<here>>
end
end
# Parse only the data part of the header. Returns { header, remaining data }
defp parse_header(data) do
parse_header(data, %RawMsg{})
end
defp parse_header(data, header) do
case decode_tuple(data) do
{remaining, {@header_end, _}} ->
# Done!
{header, remaining}
{remaining, {@header_service_id, svc}} ->
parse_header(remaining, %{header | service: svc})
{remaining, {@header_message_id, msg}} ->
parse_header(remaining, %{header | message: msg})
{remaining, {@header_sequence_start, s}} ->
parse_header(remaining, %{header | seq_start: s})
{remaining, {@header_last_in_sequence, l}} ->
parse_header(remaining, %{header | last: l})
{remaining, {@header_ping, p}} ->
parse_header(remaining, %{header | ping: p})
{remaining, {@header_pong, p}} ->
parse_header(remaining, %{header | pong: p})
end
end
defp deserialize(data) do
if byte_size(data) > 0 do
{rem, tuple} = decode_tuple(data)
[tuple | deserialize(rem)]
else
[]
end
end
# Decode a single tuple. Returns { remaining, { key, data } }
defp decode_tuple(data) do
{data, key, tag} = decode_token_header(data)
cond do
tag == @tag_positive ->
{rem, val} = decode_int(data)
{rem, {key, val}}
tag == @tag_negative ->
{rem, val} = decode_int(data)
{rem, {key, -val}}
tag == @tag_string ->
{rem, len} = decode_int(data)
<<str::binary-size(len), rest::binary>> = rem
{rest, {key, str}}
tag == @tag_byte_array ->
{rem, len} = decode_int(data)
<<str::binary-size(len), rest::binary>> = rem
{rest, {key, %Binary{data: str}}}
tag == @tag_true ->
{data, {key, true}}
tag == @tag_false ->
{data, {key, false}}
tag == @tag_double ->
<<v::little-float, rest::binary>> = data
{rest, {key, v}}
true ->
Logger.warn("""
Unmatched tag in Flowee connection
tag: #{inspect(tag)}
data: #{inspect(data)}
""")
end
end
# Returns { remaining data, key, tag }
defp decode_token_header(message) do
<<first, rest::binary>> = message
key = (first &&& 0xF8) >>> 3
tag = first &&& 0x7
if key == 31 do
{rest, key} = decode_int(rest)
{rest, key, tag}
else
{rest, key, tag}
end
end
# Decode an integer value. Returns { remaining data, value }
def decode_int(message) do
decode_int1(message, -1)
end
defp decode_int1(message, prev_val) do
<<first, rest::binary>> = message
value = (prev_val + 1) <<< 7 ||| (first &&& 0x7F)
if first >= 0x80 do
decode_int1(rest, value)
else
{rest, value}
end
end
end
|
lib/bitpal/backends/flowee/connection.ex
| 0.844794
| 0.632616
|
connection.ex
|
starcoder
|
defmodule Benchee.Conversion.Format do
@moduledoc """
Functions for formatting values and their unit labels. Different domains
handle this task differently, for example durations and counts.
See `Benchee.Conversion.Count` and `Benchee.Conversion.Duration` for examples.
"""
alias Benchee.Conversion.Unit
@doc """
Formats a number as a string, with a unit label. See `Benchee.Conversion.Count`
and `Benchee.Conversion.Duration` for examples
"""
@callback format(number) :: String.t()
# Generic formatting functions
@doc """
Formats a unit value with specified label and separator
"""
def format(count, label, separator) do
separator = separator(label, separator)
"#{number_format(count)}#{separator}#{label}"
end
defp number_format(count) do
count
|> :erlang.float_to_list(decimals: float_precision(count))
|> to_string
end
@doc """
Formats a unit value in the domain described by `module`. The module should
provide a `units/0` function that returns a Map like
%{ :unit_name => %Benchee.Conversion.Unit{ ... } }
Additionally, `module` may specify a `separator/0` function, which provides a
custom separator string that will appear between the value and label in the
formatted output. If no `separator/0` function exists, the default separator
(a single space) will be used.
iex> Benchee.Conversion.Format.format({1.0, :kilobyte}, Benchee.Conversion.Memory)
"1 KB"
"""
def format({count, unit = %Unit{}}) do
format(count, label(unit), separator())
end
def format({count, unit = %Unit{}}, _module) do
format({count, unit})
end
def format({count, unit_atom}, module) do
format({count, module.unit_for(unit_atom)})
end
def format(number, module) do
number
|> module.scale()
|> format
end
@default_separator " "
# should we need it again, a customer separator could be returned
# per module here
defp separator do
@default_separator
end
# Returns the separator, or an empty string if there isn't a label
defp separator(label, _separator) when label == "" or label == nil, do: ""
defp separator(_label, separator), do: separator
# Fetches the label for the given unit
defp label(%Unit{label: label}) do
label
end
defp float_precision(float) when trunc(float) == float, do: 0
defp float_precision(float) when float < 0.01, do: 5
defp float_precision(float) when float < 0.1, do: 4
defp float_precision(float) when float < 0.2, do: 3
defp float_precision(_float), do: 2
end
|
lib/benchee/conversion/format.ex
| 0.905943
| 0.776453
|
format.ex
|
starcoder
|
defmodule Controller.Server do
use GenServer.Behaviour
@doc """
A record for storing the state of the controller
"""
defrecord ControllerState,
# the module that defines map() and reduce()
module: nil,
# the keys to mapper_input_tbl
mapper_input_keys: [],
# the keys to reducer_input_tbl
reducer_input_keys: [],
# the total number of mappers
num_mappers: nil,
# the total number of reducers
num_reducers: nil
@doc """
Convenience function to start the controller gen_server
"""
def start_link(path) do
:gen_server.start_link({:local, :controller}, __MODULE__, path, [])
end
@doc """
Init function.
"""
def init(path) do
num_mappers = MapReduce.Config.num_mappers
num_reducers = MapReduce.Config.num_reducers
{:ok, ControllerState.new(num_mappers: num_mappers,
num_reducers: num_reducers)}
end
@doc """
Handle {:start, module_path, input_path}.
`module_path` is the path to an .ex or .erl file which defines a
module with at least two functions: `map(key, value)` and
`reduce(key, values)`. You may optionally define a function
`combine(key_value_pairs)`, which will be used to combine all
key-value pairs emitted by mappers and combine them into a
list of {key, values}.
`input_path` is the path to a directory which contains a set of
input files. All files under this directory will eventually be
read
"""
def handle_cast({:start, module_path, input_path}, state) do
# Load module and get a list of input files
[{module, _}] = Code.load_file(module_path)
{:ok, filenames} = :file.list_dir(input_path)
filenames = lc fname inlist filenames do
# absolute path to the input file
# TODO: what if input_path doesn't end with '/'?
input_path <> fname
end
state = state.module(module)
state = state.mapper_input_keys(filenames)
# Start the mapper supervision tree
{:ok, supervisor_pid} = Mapper.Supervisor.start_link(length(filenames))
true = :erlang.register(:mapper_supervisor, supervisor_pid)
{:noreply, state}
end
def handle_cast({:mapper_ready, pid}, state) do
if state.mapper_input_keys == [] do
{:noreply, state}
else
[fname | rest] = state.mapper_input_keys
:gen_server.cast(pid, {:key, fname})
{:noreply, state.mapper_input_keys(rest)}
end
end
def handle_cast(:mapper_done, state) do
new_state = state.update_num_mappers_done(&1 + 1)
if new_state.num_mappers_done == new_state.num_mappers do
# All map jobs are down, now we do:
# 1. shut down the mapper supervision tree
# 2. combine results returned by mappers
# 3. split the combine results and store them in ets
# 4. start the reducer supervision tree
true = :erlang.exit(:mapper_supervisor, :shutdown)
new_state = new_state.reducer_input_keys(combine_and_split(new_state.num_reducers))
{:ok, supervisor_pid} = Reducer.Supervisor.start_link(new_state.num_mappers)
true = :erlang.register(:reducer_supervisor, supervisor_pid)
end
{:noreply, new_state}
end
def handle_cast({:reducer_ready, pid}, state) do
if state.reducer_input_keys == [] do
{:noreply, state}
else
[k | rest] = state.reducer_input_keys
:gen_server.cast(pid, {:key, k})
{:noreply, state.reducer_input_keys(rest)}
end
end
def handle_cast(:reducer_done, state) do
new_state = state.update_num_reducers_done(&1 + 1)
if new_state.num_reducers_done == new_state.num_reducers do
# All reduce jobs are down, now we do:
# 1. shut down the reducer supervision tree
true = :erlang.exit(:reducer_supervisor, :shutdown)
end
{:noreply, new_state}
end
def combine_and_split(num_reducers) do
lst = List.flatten(:ets.lookup(:mapper_table, :output))
dict = HashDict.new
List.foldl(lst, dict, fn({key, value}, acc) ->
HashDict.update(acc, key, [value], [&1 | value])
end)
combined_lst = HashDict.to_list(dict)
# Split the combined results into the number of reducers
split_to_multiple(combined_lst, num_reducers)
end
defp split_to_multiple(lst, n) when n == 0 or n == 1 do
:ets.insert(:reducer_input_tbl, {n, lst})
[n]
end
defp split_to_multiple(lst, n) do
len = round(length(lst) / n)
{sub_lst, lst} = Enum.split(lst, len)
:ets.insert(:reducer_input_tbl, {n, sub_lst})
[n | split_to_multiple(lst, n-1)]
end
end
|
lib/controller/controller_server.ex
| 0.592902
| 0.523359
|
controller_server.ex
|
starcoder
|
if Code.ensure_loaded?(Plug) do
defmodule Zachaeus.Plug do
@moduledoc """
Provides functions and a behaviour for dealing with Zachaeus in a Plug environment.
You can use the following functions to build plugs with your own behaviour.
To fulfill the behaviour, the `build_response` callback needs to be implemented within your custom plug.
The usual functions you would use in your plug are:
### `fetch_license(conn)`
Try to get a signed license passed from the HTTP authorization request header.
When an error occurs, the error is forwarded, in order to be handled within the `build_response` function.
```elixir
{:ok, "lzcAxWfls4hDHs8fHwJu53AWsxX08KYpxGUwq4qsc..."} = Zachaeus.Plug.fetch_license(conn)
```
### `verify_license({conn, signed_license})`
Verifies a signed license with the `public_key` stored in your configuration environment.
When an error occurs, the error is forwarded, in order to be handled within the `build_response` function.
```elixir
{conn, {:ok, %License{}}} = Zachaeus.Plug.verify_license({conn, {:ok, "lzcAxWfls4hDHs8fHwJu53AWsxX08KYpxGUwq4qsc..."}})
```
### `validate_license({conn, license})`
Validates an already verified license whether it is still valid.
When an error occurs, the error is forwarded, in order to be handled within the `build_response` function.
```elixir
{conn, {:ok, %License{...}}} = Zachaeus.Plug.validate_license({conn, {:ok, %License{...}}})
```
"""
alias Zachaeus.{License, Error}
import Plug.Conn
## -- PLUG MACRO
defmacro __using__(_opts) do
quote do
alias Zachaeus.{License, Error}
import Zachaeus.Plug
import Plug.Conn
@behaviour Plug
@behaviour Zachaeus.Plug
end
end
## -- PLUG BEHAVIOUR
@doc """
Respond whether the license is still valid or has already expired.
This callback is meant to implement your own logic, e.g. rendering a template, returning some JSON or just aplain text.
## Example
conn = Zachaeus.Plug.build_response({conn, {:ok, %License{...}}})
"""
@callback build_response({Plug.Conn.t(), {:ok, License.t()} | {:error, Error.t()}}) :: Plug.Conn.t()
## -- PLUG FUNCTIONS
@doc """
Fetches a signed license which is passed via the `Authorization` HTTP request header as a Bearer Token.
When no valid signed license is found, the function returns a corresponding error.
## HTTP header example
Authorization: lzcAxWfls4hDHs8fHwJu53AWsxX08KYpxGUwq4qsc...
## Example
{conn, {:ok, "lzcAxWfls4hDHs8fHwJu53AWsxX08KYpxGUwq4qsc..."}} = Zachaeus.Plug.fetch_license(conn)
"""
@spec fetch_license(Plug.Conn.t()) :: {Plug.Conn.t(), {:ok, License.signed()} | {:error, Error.t()}}
def fetch_license(conn) do
case get_req_header(conn, "authorization") do
["Bearer " <> signed_license | _] when is_binary(signed_license) and byte_size(signed_license) > 0 ->
{conn, {:ok, signed_license}}
_license_not_found_in_request ->
{conn, {:error, %Error{code: :extraction_failed, message: "Unable to extract license from the HTTP Authorization request header"}}}
end
end
@doc """
Verifies that a signed license is valid and has not been tampered.
When no signed license could be retrieved by the `fetch_license` function, it forwards this error.
## Example
{conn, {:ok, %License{}}} = Zachaeus.Plug.verify_license({conn, {:ok, "lzcAxWfls4hDHs8fHwJu53AWsxX08KYpxGUwq4qsc..."}})
"""
@spec verify_license({Plug.Conn.t(), {:ok, License.signed()} | {:error, Error.t()}}) :: {Plug.Conn.t(), {:ok, License.t()} | {:error, Error.t()}}
def verify_license({conn, {:ok, signed_license}}) when is_binary(signed_license) and byte_size(signed_license) > 0 do
case Zachaeus.verify(signed_license) do
{:ok, %License{identifier: identifier, plan: plan}} = result ->
conn =
conn
|> put_private(:zachaeus_identifier, identifier)
|> put_private(:zachaeus_plan, plan)
{conn, result}
{:error, %Error{}} = error ->
{conn, error}
_unknown_error ->
{conn, {:error, %Error{code: :verification_failed, message: "Unable to verify the license to to an unknown error"}}}
end
end
def verify_license({conn, {:error, %Error{}} = error}),
do: {conn, error}
def verify_license({conn, _invalid_signed_license_or_error}),
do: {conn, {:error, %Error{code: :verification_failed, message: "Unable to verify the license due to an invalid type"}}}
@doc """
Validates a license whether it has not expired.
When the license could not be verified by `verify_license` it forwards this error.
## Example
{conn, {:ok, %License{...}} = Zachaeus.Plug.validate_license({conn, {:ok, %License{...}}})
"""
@spec validate_license({Plug.Conn.t(), {:ok, License.t()} | {:error, Error.t()}}) :: {Plug.Conn.t(), {:ok, License.t()} | {:error, Error.t()}}
def validate_license({conn, {:ok, %License{} = license} = result}) do
case License.validate(license) do
{:ok, remaining_seconds} ->
conn = conn
|> put_private(:zachaeus_remaining_seconds, remaining_seconds)
{conn, result}
{:error, %Error{}} = error ->
{conn, error}
_unknown_error ->
{conn, {:error, %Error{code: :validation_failed, message: "Unable to validate license due to an unknown error"}}}
end
end
def validate_license({conn, {:error, %Error{}} = error}),
do: {conn, error}
def validate_license({conn, _invalid_license_or_error}),
do: {conn, {:error, %Error{code: :validation_failed, message: "Unable to validate license due to an invalid type"}}}
## -- PLUG INFORMATION FUNCTIONS
@doc """
Get the identifier assigned with the license.
## Example
"user_1" = zachaeus_identifier(conn)
"""
@spec zachaeus_identifier(Plug.Conn.t()) :: String.t() | nil
def zachaeus_identifier(conn), do: conn.private[:zachaeus_identifier]
@doc """
Get the plan assigned with the license.
## Example
"standard_plan" = zachaeus_plan(conn)
"""
@spec zachaeus_plan(Plug.Conn.t()) :: String.t() | nil
def zachaeus_plan(conn), do: conn.private[:zachaeus_plan]
@doc """
Get the remaining seconds of the license.
## Example
17436373 = zachaeus_remaining_seconds(conn)
"""
@spec zachaeus_remaining_seconds(Plug.Conn.t()) :: Integer.t() | nil
def zachaeus_remaining_seconds(conn), do: conn.private[:zachaeus_remaining_seconds]
end
end
|
lib/zachaeus/plug.ex
| 0.835953
| 0.80837
|
plug.ex
|
starcoder
|
defmodule DockerBuild.Plugins do
@moduledoc """
A behaviour for a plugin system allowing functionality to be extended when building the docker image.
By implementing the optional callbacks the docker file can be changed at various points.
All callbacks are optional.
## Creating a plugin
1. Create module e.g. `MyProject.MyPlugin`
2. Add `use DockerBuild.Plugins`
3. Implement the required callbacks to modify the docker file or `.dockerignore`
4. To fetch a plugin config value with the plugin callback use `plugin_config(context, key)` where
`context` is either the `config` or `df` parameter passed to the callback.
"""
@typedoc "The dockerfile"
@type df() :: %DockerBuild.Dockerfile{}
@typedoc "The dockerfile config"
@type config() :: %DockerBuild.Config{}
@doc """
Invoked when creating the .dockerignore file.
A list of addition lines can be returned which are added to the file.
"""
@callback extra_dockerignore(config) :: [String.t()]
@doc """
Invoked to discover the command to use for compiling assets.
Only one plugin should be used that implements this function.
"""
@callback assets_compile_command(config) :: [String.t()]
@doc "Invoked before copying assets into the docker image"
@callback before_assets_copy(df) :: df
@doc "Invoked before getting mix dependencies"
@callback before_deps_get(df) :: df
@optional_callbacks extra_dockerignore: 1,
assets_compile_command: 1,
before_assets_copy: 1,
before_deps_get: 1
defmacro __using__(_opts) do
quote do
import DockerBuild.Dockerfile
alias DockerBuild.Config
@behaviour DockerBuild.Plugins
@doc false
def extra_dockerignore(config), do: []
@doc false
def assets_compile_command(config), do: nil
@doc false
def before_deps_get(df), do: df
@doc false
def before_assets_copy(df), do: df
@doc false
def plugin_config(context, key), do: Config.plugin_config(context, __MODULE__, key)
defoverridable DockerBuild.Plugins
end
end
end
|
lib/docker_build/plugins.ex
| 0.835148
| 0.402833
|
plugins.ex
|
starcoder
|
defmodule Resin do
@moduledoc """
Introduce a configurable delay to all requests in a plug pipeline.
Usage
use Resin
This will introduce a delay to all incoming requests. This delay defaults to
3000 ms, but can be configured by the `enterpriseyness` option, like so:
use Resin, enterpriseyness: 4711
It can also be configured with a range of integers, to allow some
variation in how long the delay will be:
use Resin, enterpriseyness: 1_000 .. 3_000
Another option is to configure it with a pattern of delays. This is
done by configuring the `enterpriseyness` option with an array.
use Resin, enterpriseyness: [ 0, 0, 0, 1_000 ]
Resin will cycle through this array as it processes requests, so the
result of the above example is that every 4th request will have an
added delay of 1000 ms.
This can also be done with an array of ranges, or an array that
combines integers and ranges, like so:
use Resin, enterpriseyness: [ 0, 100 .. 200, 1_000, 300 .. 400 ]
When running with `MIX_ENV=prod`, Resin will do nothing, but instead just edit
itself out of your AST. See docs on `Resin.__using__/1` for more info on that.
"""
@behaviour Plug
@default_options [enterpriseyness: 3_000]
def init(options \\ []) do
forecast_level =
@default_options
|> Keyword.merge(options)
|> Keyword.get(:enterpriseyness)
|> List.wrap
{:ok, agent} = PerformanceForecast.init(forecast_level)
[forecast: agent]
end
def call(conn, options) do
agent = Keyword.get(options, :forecast)
agent
|> PerformanceForecast.pop()
|> enterpriseyness()
|> :timer.sleep()
conn
end
defp enterpriseyness(min .. max) when max < min,
do: enterpriseyness(max .. min)
defp enterpriseyness(min .. max),
do: min + :rand.uniform(max - min)
defp enterpriseyness(level),
do: level
@doc """
Pour some resin in your plug pipeline, by `use`ing this module.
use Resin
Resin will insert itself into your pipeline, unless you compiled your project
with `MIX_ENV=prod`, and add a configurable delay to every request.
The default delay is set to 3000 ms.
If you want a shorter (or longer (really?)) delay, you can use the
`enterpriseyness` option to set the configured delay in milliseconds, like so:
use Resin, enterpriseyness: 4711
When compiling with `MIX_ENV=prod`, this macro will do nothing. You only want
to display the enterpriseyness during your demo, right?
"""
defmacro __using__(opts \\ []) do
unless Mix.env == :prod do
quote bind_quoted: [opts: opts] do
plug Resin, opts
end
end
end
end
|
lib/resin.ex
| 0.86267
| 0.455804
|
resin.ex
|
starcoder
|
defmodule Valkyrie do
@moduledoc """
Main Business logic for Valkyrie
Validating and transforming the payload to conform to the provided dataset schema
"""
alias SmartCity.Dataset
def instance_name(), do: :valkyrie_brook
@type reason :: %{String.t() => term()}
@spec standardize_data(Dataset.t(), map()) :: {:ok, map()} | {:error, reason()}
def standardize_data(%Dataset{technical: %{schema: schema}}, payload) do
%{data: data, errors: errors} = standardize_schema(schema, payload)
case Enum.empty?(errors) do
true -> {:ok, data}
false -> {:error, errors}
end
end
defp standardize_schema(schema, payload) do
schema
|> Enum.reduce(%{data: %{}, errors: %{}}, fn %{name: name} = field, acc ->
case standardize(field, payload[name]) do
{:ok, value} -> %{acc | data: Map.put(acc.data, name, value)}
{:error, reason} -> %{acc | errors: Map.put(acc.errors, name, reason)}
end
end)
end
defp standardize(_field, nil), do: {:ok, nil}
defp standardize(%{type: "string"}, value) do
{:ok, value |> to_string() |> String.trim()}
rescue
Protocol.UndefinedError -> {:error, :invalid_string}
end
defp standardize(_, ""), do: {:ok, nil}
defp standardize(%{type: type}, value) when type in ["integer", "long"] and is_integer(value), do: {:ok, value}
defp standardize(%{type: type}, value) when type in ["integer", "long"] do
case Integer.parse(value) do
{parsed_value, ""} -> {:ok, parsed_value}
_ -> {:error, :"invalid_#{type}"}
end
end
defp standardize(%{type: "boolean"}, value) when is_boolean(value), do: {:ok, value}
defp standardize(%{type: "boolean"}, value) do
case value do
"true" -> {:ok, true}
"false" -> {:ok, false}
_ -> {:error, :invalid_boolean}
end
end
defp standardize(%{type: type}, value) when type in ["float", "double"] and (is_integer(value) or is_float(value)),
do: {:ok, value / 1}
defp standardize(%{type: type}, value) when type in ["float", "double"] do
case Float.parse(value) do
{parsed_value, ""} -> {:ok, parsed_value}
_ -> {:error, :"invalid_#{type}"}
end
end
defp standardize(%{type: type, format: format}, value) when type in ["date", "timestamp"] do
case Timex.parse(value, format) do
{:ok, parsed_value} -> {:ok, parsed_value}
{:error, reason} -> {:error, {:"invalid_#{type}", reason}}
end
end
defp standardize(%{type: "json"}, value) do
case Jason.encode(value) do
{:ok, result} -> {:ok, result}
_ -> {:error, :invalid_json}
end
end
defp standardize(%{type: "map"}, value) when not is_map(value), do: {:error, :invalid_map}
defp standardize(%{type: "map", subSchema: sub_schema}, value) do
%{data: data, errors: errors} = standardize_schema(sub_schema, value)
case Enum.empty?(errors) do
true -> {:ok, data}
false -> {:error, errors}
end
end
defp standardize(%{type: "list"}, value) when not is_list(value), do: {:error, :invalid_list}
defp standardize(%{type: "list"} = field, value) do
case standardize_list(field, value) do
{:ok, reversed_list} -> {:ok, Enum.reverse(reversed_list)}
{:error, reason} -> {:error, {:invalid_list, reason}}
end
end
defp standardize(_, _) do
{:error, :invalid_type}
end
defp standardize_list(%{itemType: item_type} = field, value) do
value
|> Enum.with_index()
|> Enum.reduce_while({:ok, []}, fn {item, index}, {:ok, acc} ->
case standardize(%{type: item_type, subSchema: field[:subSchema]}, item) do
{:ok, new_value} -> {:cont, {:ok, [new_value | acc]}}
{:error, reason} -> {:halt, {:error, "#{inspect(reason)} at index #{index}"}}
end
end)
end
end
|
apps/valkyrie/lib/valkyrie.ex
| 0.776792
| 0.570361
|
valkyrie.ex
|
starcoder
|
defmodule Commands.StrCommands do
alias Interp.Functions
alias Commands.ListCommands
alias Commands.GeneralCommands
require Interp.Functions
use Memoize
@doc """
Replace at the given index. Replaces the element found in a at index c and replaces it with b.
"""
def replace_at(a, b, c) when Functions.is_iterable(a) and Functions.is_iterable(b) and Functions.is_iterable(c) do
Enum.reduce(Enum.to_list(Stream.zip([b, c])), a,
fn ({new, index}, acc) -> acc |> Stream.with_index |> Stream.map(fn {element, curr_index} -> if curr_index == index do new else element end end)
end)
end
def replace_at(a, b, c) when Functions.is_iterable(a) and Functions.is_iterable(b), do: a |> Stream.with_index |> Stream.map(fn {element, curr_index} -> if curr_index == c do b else element end end)
def replace_at(a, b, c) when Functions.is_iterable(a) and Functions.is_iterable(c), do: Enum.reduce(c, a, fn (index, acc) -> replace_at(acc, b, index) end)
def replace_at(a, b, c) when Functions.is_iterable(a), do: a |> Stream.with_index |> Stream.map(fn {element, curr_index} -> if GeneralCommands.equals(curr_index, c) do b else element end end)
def replace_at(a, b, c), do: String.graphemes(to_string(a)) |> replace_at(b, c) |> Enum.join("")
@doc """
Infinite replacement method. When the first element ('a') is an iterable, it maps the replace_infinite method over each
element of 'a'. An alternative non-vectorizing version (although not infinitly replaced) is the transliteration method.
## Parameters
- a: The value in which the replacements will happen.
- b: The from value(s) for the replacement pair(s).
- c: The to value(s) for the replacement pair(s).
## Returns
Returns the element 'a' where the each replacement pair is infinitly replaced.
"""
def replace_infinite(a, b, c) when Functions.is_single?(a) and Functions.is_single?(b) and Functions.is_single?(c) do
a = to_string(a)
b = to_string(b)
c = to_string(c)
replace_infinite(String.replace(a, b, c), b, c, a)
end
def replace_infinite(a, b, c) when Functions.is_single?(a) and Functions.is_iterable(b) and Functions.is_iterable(c), do: Enum.reduce(Stream.zip(b, c), a, fn ({from, to}, acc) -> replace_infinite(acc, from, to) end)
def replace_infinite(a, b, c) when Functions.is_single?(a) and Functions.is_iterable(b) and Functions.is_single?(c) do (case Enum.reduce(b, a, fn (from, acc) -> replace_infinite(acc, from, c) end) do; ^a -> a; x -> replace_infinite(x, b, c) end) end
def replace_infinite(a, b, c) when Functions.is_single?(a) and Functions.is_single?(b) and Functions.is_iterable(c), do: Enum.reduce(c, a, fn (to, acc) -> replace_infinite(acc, b, to) end)
def replace_infinite(a, b, c) when Functions.is_iterable(a), do: a |> Stream.map(fn x -> replace_infinite(x, b, c) end)
defp replace_infinite(a, b, c, acc) do (case String.replace(a, b, c) do; ^acc -> acc; x -> replace_infinite(x, b, c, a) end) end
def replace_all(a, b, c) when Functions.is_single?(a) and Functions.is_single?(b) and Functions.is_single?(c) do
a = to_string(a)
b = to_string(b)
c = to_string(c)
String.replace(a, b, c)
end
def replace_all(a, b, c) when Functions.is_single?(a) and Functions.is_iterable(b) and Functions.is_iterable(c), do: Enum.reduce(Stream.zip(b, c), a, fn ({from, to}, acc) -> replace_all(acc, from, to) end)
def replace_all(a, b, c) when Functions.is_single?(a) and Functions.is_iterable(b) and Functions.is_single?(c), do: Enum.reduce(b, a, fn (from, acc) -> replace_all(acc, from, c) end)
def replace_all(a, b, c) when Functions.is_single?(a) and Functions.is_single?(b) and Functions.is_iterable(c), do: Enum.reduce(c, a, fn (to, acc) -> replace_all(acc, b, to) end)
def replace_all(a, b, c) when Functions.is_iterable(a), do: a |> Stream.map(fn x -> replace_all(x, b, c) end)
def replace_first(a, b, c) when Functions.is_iterable(a) and Functions.is_single?(b) and Functions.is_single?(c) do
case a |> ListCommands.index_in(b) do
-1 -> a
index -> a |> replace_at(c, index)
end
end
def replace_first(a, b, c) when Functions.is_single?(a) and Functions.is_single?(b) and Functions.is_single?(c) do
case String.split(to_string(a), to_string(b)) do
[left, right | remaining] -> left <> to_string(c) <> Enum.join([right | remaining], to_string(b))
_ -> a
end
end
def replace_first(a, b, c) when Functions.is_iterable(b) and Functions.is_iterable(c), do: Enum.reduce(Stream.zip(b, c), a, fn ({from, to}, acc) -> replace_first(acc, from, to) end)
def replace_first(a, b, c) when Functions.is_iterable(b) and Functions.is_single?(c), do: Enum.reduce(b, a, fn (from, acc) -> replace_first(acc, from, c) end)
def replace_first(a, b, c) when Functions.is_single?(b) and Functions.is_iterable(c), do: Enum.reduce(c, a, fn (to, acc) -> replace_first(acc, b, to) end)
@doc """
Computes the Levenshtein distance between two lists of characters using the following recursive formula:
lev([], b) = length(b)
lev(a, []) = length(a)
lev(a, b) = min(lev(a - 1, b) + 1, lev(a, b - 1) + 1, lev(a - 1, b - 1) + (a[0] == b[0]))
"""
defmemo levenshtein_distance([], b), do: length(b)
defmemo levenshtein_distance(a, []), do: length(a)
defmemo levenshtein_distance([a | as], [b | bs]) do
min(levenshtein_distance(as, [b | bs]) + 1, min(levenshtein_distance([a | as], bs) + 1, levenshtein_distance(as, bs) + (if GeneralCommands.equals(a, b) do 0 else 1 end)))
end
def squarify(list) do
list = Enum.to_list(list)
max_length = list |> Enum.map(fn x -> String.length(to_string(x)) end) |> Enum.max
list |> Enum.map(fn x -> to_string(x) <> String.duplicate(" ", max_length - String.length(to_string(x))) end)
end
def align_center(list, focus) do
list = Enum.to_list(list)
max_length = list |> Enum.map(fn x -> String.length(to_string(x)) end) |> Enum.max
result = case focus do
:left -> list |> Enum.map(fn x -> String.duplicate(" ", round(Float.floor((max_length - String.length(to_string(x))) / 2))) <> to_string(x) end)
:right -> list |> Enum.map(fn x -> String.duplicate(" ", round(Float.ceil((max_length - String.length(to_string(x))) / 2))) <> to_string(x) end)
end
result |> Enum.join("\n")
end
def overlap(left, right) when not Functions.is_iterable(left), do: overlap(String.graphemes(to_string(left)), right)
def overlap(left, right) when not Functions.is_iterable(right), do: overlap(left, String.graphemes(to_string(right)))
def overlap(left, right), do: overlap(Enum.to_list(left), Enum.to_list(right), "")
defp overlap([], [], acc), do: acc
defp overlap([], right_remaining, acc), do: acc <> Enum.join(right_remaining, "")
defp overlap([head | left_remaining], [head | right_remaining], acc), do: overlap(left_remaining, right_remaining, acc <> head)
defp overlap([_ | left_remaining], right_remaining, acc), do: overlap(left_remaining, right_remaining, acc <> " ")
def title_case(string), do: title_case(string, "")
defp title_case("", parsed), do: parsed
defp title_case(string, parsed) do
cond do
Regex.match?(~r/^[a-zA-Z]/, string) ->
matches = Regex.named_captures(~r/^(?<string>[a-zA-Z]+)(?<remaining>.*)/s, string)
title_case(matches["remaining"], parsed <> String.capitalize(matches["string"]))
true ->
matches = Regex.named_captures(~r/^(?<string>[^a-zA-Z]+)(?<remaining>.*)/s, string)
title_case(matches["remaining"], parsed <> matches["string"])
end
end
def switch_case(string), do: switch_case(String.graphemes(string), []) |> Enum.join("")
defp switch_case([], parsed), do: parsed |> Enum.reverse
defp switch_case([char | remaining], parsed) do
cond do
Regex.match?(~r/^[a-z]$/, char) -> switch_case(remaining, [String.upcase(char) | parsed])
Regex.match?(~r/^[A-Z]$/, char) -> switch_case(remaining, [String.downcase(char) | parsed])
true -> switch_case(remaining, [char | parsed])
end
end
def sentence_case(string), do: sentence_case(string, "")
defp sentence_case("", parsed), do: parsed
defp sentence_case(string, parsed) do
cond do
Regex.match?(~r/^[a-zA-Z]/, string) ->
matches = Regex.named_captures(~r/^(?<string>[a-zA-Z].+?)(?<remaining>(\.|!|\?|$).*)/s, string)
sentence_case(matches["remaining"], parsed <> String.capitalize(String.slice(matches["string"], 0..0)) <> String.slice(matches["string"], 1..-1))
true ->
matches = Regex.named_captures(~r/^(?<string>.)(?<remaining>.*)/s, string)
sentence_case(matches["remaining"], parsed <> matches["string"])
end
end
def keep_letters(string) when is_bitstring(string), do: keep_letters(String.graphemes(string)) |> Enum.join("")
def keep_letters(list) do
list |> Stream.filter(fn x -> Regex.match?(~r/^[A-Za-z]+$/, to_string(x)) end)
end
def keep_digits(string) when is_bitstring(string), do: keep_digits(String.graphemes(string)) |> Enum.join("")
def keep_digits(list) do
list |> Stream.filter(fn x -> Regex.match?(~r/^[0-9]+$/, to_string(x)) end)
end
def keep_chars(string, chars) when is_bitstring(string) and is_bitstring(chars), do: keep_chars(String.graphemes(string), String.graphemes(chars)) |> Enum.join("")
def keep_chars(string, chars) when is_bitstring(string), do: keep_chars(String.graphemes(string), chars) |> Enum.join("")
def keep_chars(list, chars) when is_bitstring(chars) do
list |> Stream.filter(fn x -> GeneralCommands.equals(x, chars) end)
end
def keep_chars(list, chars) do
list |> Stream.filter(fn x -> ListCommands.contains(chars, x) end)
end
def to_codepoints(value) when Functions.is_iterable(value), do: value |> Stream.map(
fn x ->
if not Functions.is_iterable(x) and String.length(to_string(x)) == 1 do
hd(to_codepoints(to_string(x)))
else
to_codepoints(to_string(x))
end
end)
def to_codepoints(value), do: String.to_charlist(to_string(value))
@doc """
Transliterates the given string with the given transliteration set. For example, transliterating "abcd" with "bdg" → "qrs" would
transliterate the following in the initial string:
"b" → "q"
"d" → "r"
"g" → "s"
The first match in the transliteration set is the transliteration that is executed. Therefore "abcd" results in "aqcr" after transliteration.
## Parameters
- string/list: The string or list that needs to be transliterated.
- from_chars: The from characters either as a single element or as a list.
- to_chars: The characters to which the initial characters will be mapped to, either as a single element or a list.
## Returns
The transliterated string or list depending on the initial type of the first parameter.
"""
def transliterate(string, from_chars, to_chars) when Functions.is_single?(string), do: Enum.join(transliterate(String.graphemes(to_string(string)), from_chars, to_chars), "")
def transliterate(list, from_chars, to_chars) when Functions.is_single?(from_chars), do: transliterate(list, String.graphemes(to_string(from_chars)), to_chars)
def transliterate(list, from_chars, to_chars) when Functions.is_single?(to_chars), do: transliterate(list, from_chars, String.graphemes(to_string(to_chars)))
def transliterate(list, from_chars, to_chars) do
transliteration_pairs = Stream.zip(from_chars, to_chars)
list |> Stream.map(fn x ->
case ListCommands.first_where(transliteration_pairs, fn {a, _} -> GeneralCommands.equals(a, x) end) do
nil -> x
{_, b} -> b
end
end)
end
def vertical_mirror(string) when is_bitstring(string), do: Enum.join(vertical_mirror(String.split(string, "\n")), "\n")
def vertical_mirror(list) do
list ++ (list |> Enum.to_list |> Enum.reverse |> Enum.map(fn x -> x |> transliterate("\\/", "/\\") end))
end
def mirror(list) when Functions.is_iterable(list) do
list |> Stream.map(fn x -> if Functions.is_iterable(x) do x ++ (x |> Enum.to_list |> Enum.reverse |> transliterate("<>{}()[]\\/", "><}{)(][/\\")) else mirror(x) end end)
end
def mirror(string) do
string = to_string(string)
cond do
String.contains?(string, "\n") -> Enum.join(mirror(String.split(string, "\n")), "\n")
true -> string <> (string |> String.reverse |> transliterate("<>{}()[]\\/", "><}{)(][/\\"))
end
end
def intersected_mirror(list) when Functions.is_iterable(list) do
list |> Stream.map(fn x -> if Functions.is_iterable(x) do x ++ (x |> Enum.to_list |> Enum.drop(1) |> Enum.reverse |> transliterate("<>{}()[]\\/", "><}{)(][/\\")) else intersected_mirror(x) end end)
end
def intersected_mirror(string) do
string = to_string(string)
cond do
String.contains?(string, "\n") -> Enum.join(intersected_mirror(String.split(string, "\n")), "\n")
true -> string <> (string |> String.reverse |> String.slice(1..-1) |> transliterate("<>{}()[]\\/", "><}{)(][/\\"))
end
end
def vertical_intersected_mirror(list) when Functions.is_iterable(list) do
list ++ (list |> Enum.reverse |> Enum.drop(1) |> Enum.map(fn x -> x |> transliterate("/\\", "\\/") end)) |> Enum.join("\n")
end
def vertical_intersected_mirror(string), do: vertical_intersected_mirror(String.split(to_string(string), "\n"))
def leftpad_with(list, length, pad_char) when Functions.is_iterable(list), do: list |> Stream.map(fn x -> leftpad_with(x, length, pad_char) end)
def leftpad_with(string, length, pad_char) when is_bitstring(string), do: String.duplicate(pad_char, max(length - String.length(string), 0)) <> string
def leftpad_with(value, length, pad_char), do: leftpad_with(Functions.to_non_number(value), length, pad_char)
def run_length_encode(string) when not Functions.is_iterable(string), do: run_length_encode(Functions.to_list(string))
def run_length_encode(list) do
chars = list |> ListCommands.deduplicate
lengths = list |> ListCommands.group_equal |> Stream.map(fn x -> length(Enum.to_list(x)) end)
{chars, lengths}
end
def run_length_decode(elements, lengths) do
Stream.zip(elements, lengths) |> Stream.flat_map(fn {element, len} -> List.duplicate(element, Functions.to_number(len)) end) |> Functions.as_stream
end
defp exchange_capitalization(left, [], acc), do: acc <> Enum.join(left, "")
defp exchange_capitalization([], _, acc), do: acc
defp exchange_capitalization([a | as], [b | bs], acc) do
cond do
Regex.match?(~r/^[A-Z]/, b) -> exchange_capitalization(as, bs, acc <> String.upcase(a))
Regex.match?(~r/^[a-z]/, b) -> exchange_capitalization(as, bs, acc <> String.downcase(a))
true -> exchange_capitalization(as, bs, acc <> a)
end
end
def exchange_capitalization(left, right), do: exchange_capitalization(Functions.to_list(left), Functions.to_list(right), "")
end
|
lib/commands/str_commands.ex
| 0.767341
| 0.796174
|
str_commands.ex
|
starcoder
|
defmodule Meilisearch.Indexes do
@moduledoc """
Collection of functions used to manage indexes.
[MeiliSearch Documentation - Indexes](https://docs.meilisearch.com/references/indexes.html)
"""
alias Meilisearch.HTTP
@doc """
List all indexes
## Example
iex> Meilisearch.Indexes.list()
{:ok, [
%{
"createdAt" => "2020-05-23T06:20:18.394281328Z",
"name" => "meilisearch_test",
"primaryKey" => nil,
"uid" => "meilisearch_test",
"updatedAt" => "2020-05-23T06:20:18.394292399Z"
}
]}
"""
@spec list :: HTTP.response()
def list do
HTTP.get_request("indexes")
end
@doc """
Get information about an index
## Example
iex> Meilisearch.Indexes.get("meilisearch_test")
{:ok,
%{
"createdAt" => "2020-05-23T06:20:18.394281328Z",
"name" => "meilisearch_test",
"primaryKey" => nil,
"uid" => "meilisearch_test",
"updatedAt" => "2020-05-23T06:20:18.394292399Z"
}
}
"""
@spec get(String.t()) :: HTTP.response()
def get(uid) do
HTTP.get_request("indexes/#{uid}")
end
@doc """
Create an index
`primary_key` can be passed as an option.
## Examples
iex> Meilisearch.Indexes.create("meilisearch_test")
{:ok,
%{
"createdAt" => "2020-05-23T06:20:18.394281328Z",
"name" => "meilisearch_test",
"primaryKey" => nil,
"uid" => "meilisearch_test",
"updatedAt" => "2020-05-23T06:20:18.394292399Z"
}
}
iex> Meilisearch.create("meilisearch_test", primary_key: "key_name")
{:ok,
%{
"createdAt" => "2020-05-23T06:20:18.394281328Z",
"name" => "meilisearch_test",
"primaryKey" => "key_name",
"uid" => "meilisearch_test",
"updatedAt" => "2020-05-23T06:20:18.394292399Z"
}
}
"""
@spec create(String.t(), Keyword.t()) :: HTTP.response()
def create(uid, opts \\ []) do
body = %{
uid: uid,
primaryKey: Keyword.get(opts, :primary_key)
}
HTTP.post_request("indexes", body)
end
@doc """
Update an index with new primary key. Will fail if primary key has already been set
`primary_key` option is required.
## Examples
iex> Meilisearch.Indexes.update("meilisearch_test", primary_key: "new_key")
{:ok,
%{
"primaryKey" => "new_primary_key",
"createdAt" => "2020-05-25T04:30:10.681720067Z",
"name" => "meilisearch_test",
"uid" => "meilisearch_test",
"updatedAt" => "2020-05-25T04:30:10.685540577Z"
}
}
"""
@spec update(String.t(), primary_key: String.t()) :: HTTP.response()
def update(uid, opts \\ []) do
with {:ok, primary_key} <- Keyword.fetch(opts, :primary_key),
body <- %{primaryKey: primary_key} do
HTTP.put_request("indexes/#{uid}", body)
else
_ -> {:error, "primary_key is required"}
end
end
@doc """
Delete an index
## Examples
iex> Meilisearch.Indexes.delete("meilisearch_test")
{:ok, nil}
iex> Meilisearch.delete("nonexistent_index")
{:error, 404, Index meilisearch_test not found"}
"""
@spec delete(String.t()) :: HTTP.response()
def delete(uid) do
HTTP.delete_request("indexes/#{uid}")
end
@doc """
Check if index exists
## Examples
iex> Meilisearch.Indexes.exists?("meilisearch_test")
{:ok, true}
iex> Meilisearch.Indexes.exists?("nonexistent_index")
{:ok, false}
"""
@spec exists?(String.t()) :: {:ok, true | false} | {:error, String.t()}
def exists?(uid) do
case get(uid) do
{:ok, _} -> {:ok, true}
{:error, 404, _} -> {:ok, false}
_ -> {:error, "Unknown error has occured"}
end
end
end
|
lib/meilisearch/indexes.ex
| 0.747063
| 0.411436
|
indexes.ex
|
starcoder
|
defmodule OpenSCAD.Watcher do
@moduledoc """
Giles forever!
This is the only child spec for the OpenSCAD application. It watches
`./models` by default, but you can configure that to whatever you want like
so:
```elixir
config :open_scad, :watcher_path, "./slayers"
```
"""
use GenServer, restart: :permanent
require Logger
defstruct watcher_pid: nil
def start_link([]) do
GenServer.start(__MODULE__, [])
end
def init(_args) do
Process.flag(:trap_exit, true)
_ = Logger.info("Running OpenSCAD Watcher")
use_correct_mac_listener()
# TODO: Rethink the hardcoding of `./models`
{pwd, 0} = System.cmd("pwd", [])
path =
Path.join(
String.trim(pwd),
Application.get_env(:open_scad, :watcher_path, "./models")
)
{:ok, watcher_pid} = FileSystem.start_link(dirs: [path])
FileSystem.subscribe(watcher_pid)
{:ok, %__MODULE__{watcher_pid: watcher_pid}}
end
# Compiles a file that's been changed
defp compile(:stop), do: :stop
defp compile(path) do
{:ok, script} = File.read(path)
case string_to_quoted(String.to_charlist(script), 0, path, []) do
# `e` can be binary() or {binary(), binary()}
{:error, {line, e, _token}} ->
_ = Logger.error("#{path} compilation error")
_ = Logger.error(" #{line}: #{inspect(e)}")
:stop
_ ->
_ = Logger.info("Compiling #{path}")
try do
modules = Code.compile_file(path)
_ = Logger.info("Done compiling")
modules
rescue
e ->
_ = Logger.error("Error Compiling #{path}")
_ = Logger.error(inspect(e))
:stop
end
end
end
defp maybe_run(:stop), do: :stop
defp maybe_run(modules) do
for {mod, _} <- modules do
if Kernel.function_exported?(mod, :is_open_scad_model?, 0) do
try do
mod.main
catch
e ->
_ = Logger.error("Error running #{mod}")
_ = Logger.error(e)
end
end
end
end
def handle_info(
{:file_event, watcher_pid, {path, _events}} = f,
%__MODULE__{:watcher_pid => watcher_pid} = state
) do
_ = Logger.info("file event: #{inspect(f)}")
_ =
path
|> maybe_path()
|> compile()
|> maybe_run()
{:noreply, state}
end
def handle_info(
{:file_event, watcher_pid, :stop},
%__MODULE__{:watcher_pid => watcher_pid} = state
) do
{:noreply, state}
end
def handle_info({:EXIT, from, reason}, state) do
_ = Logger.info("Exit from #{inspect(from)} : #{inspect(reason)}")
{:noreply, state}
end
def handle_info(msg, state) do
_ = Logger.error("Unexpected message: #{inspect(msg)}")
{:noreply, state}
end
defp string_to_quoted(string, start_line, file, opts) do
case :elixir.string_to_tokens(string, start_line, file, opts) do
{:ok, tokens} ->
:elixir.tokens_to_quoted(tokens, file, opts)
error ->
error
end
end
defp maybe_path(p) do
case Path.extname(p) do
".ex" -> p
".exs" -> p
_ -> :stop
end
end
defp use_correct_mac_listener() do
case :escript.script_name() do
[] ->
# Not an escript
:ok
_ ->
# An escript
executable_override = Path.absname("mac_listener")
if File.exists?(executable_override) do
file_system =
Application.get_env(:file_system, :fs_mac, [])
|> Keyword.put(:executable_file, executable_override)
|> IO.inspect()
Application.put_env(:file_system, :fs_mac, file_system)
end
end
end
end
|
lib/watcher.ex
| 0.505859
| 0.559892
|
watcher.ex
|
starcoder
|
defmodule Canvas.Resources.Assignments do
@moduledoc """
Provides functions to interact with the
[assignment endpoints](https://canvas.instructure.com/doc/api/assignments).
"""
alias Canvas.{Client, Listing, Response}
alias Canvas.Resources.Assignment
def delete_an_assignment() do
end
@doc """
Lists the assignments in a course.
See:
- https://canvas.instructure.com/doc/api/assignments#method.assignments_api.index
## Examples:
client = %Canvas.Client{access_token: "a<PASSWORD>", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Assignments.list_assignments(client, 101)
{:ok, response} = Canvas.Resources.Assignments.list_assignments(client, 101, per_page: 50, page: 4)
"""
@spec list_assignments(Client.t(), String.t() | integer, Keyword.t()) ::
{:ok | :error, Response.t()}
def list_assignments(client, course_id, options \\ []) do
url = Client.versioned("/courses/#{course_id}/assignments")
Listing.get(client, url, options)
|> Response.parse([%Assignment{}])
end
@doc """
List all assignments in an course automatically paginating if necessary.
This function will automatically page through all pages, returning all assignments.
## Examples:
client = %Canvas.Client{access_token: "<PASSWORD>", base_url: "https://instructure.test"}
{:ok, assignments} = Canvas.Reources.Assignments.all_assignments(client, 101)
"""
@spec all_assignments(Client.t(), String.t() | integer, Keyword.t()) ::
{:ok, list(%Assignment{})} | {:error, Response.t()}
def all_assignments(client, course_id, options \\ []) do
Listing.get_all(__MODULE__, :list_assignments, [client, course_id, options])
end
def list_assignments_for_assignment_group() do
end
def list_assignments_for_user() do
end
@doc """
Returns the assignment with the given id.
See:
https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.show
## Examples:
client = %Canvas.Client{access_token: "<PASSWORD>", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Assignments.get_a_single_assignment(client, 101, 1234)
{:ok, response} = Canvas.Resources.Assignments.get_a_single_assignment(client, 101, 1234, per_page: 50, page: 4)
"""
@spec get_a_single_assignment(
Client.t(),
String.t() | integer,
String.t() | integer,
Keyword.t()
) ::
{:ok | :error, Response.t()}
def get_a_single_assignment(client, course_id, id, options \\ []) do
url = Client.versioned("/courses/#{course_id}/assignments/#{id}")
Listing.get(client, url, options)
|> Response.parse(%Assignment{})
end
def create_an_assignment() do
end
def edit_an_assignment() do
end
end
|
lib/canvas/resources/assignments.ex
| 0.824991
| 0.486454
|
assignments.ex
|
starcoder
|
defmodule Tz.TimeZoneDatabase do
@moduledoc false
@behaviour Calendar.TimeZoneDatabase
alias Tz.PeriodsProvider
@compile {:inline, period_to_map: 1}
@impl true
def time_zone_period_from_utc_iso_days(_, "Etc/UTC"),
do: {:ok, %{utc_offset: 0, std_offset: 0, zone_abbr: "UTC"}}
def time_zone_period_from_utc_iso_days(iso_days, time_zone) do
with {:ok, periods} <- PeriodsProvider.periods(time_zone) do
iso_days_to_gregorian_seconds(iso_days)
|> find_period_for_secs(periods, :utc)
end
end
@impl true
def time_zone_periods_from_wall_datetime(_, "Etc/UTC"),
do: {:ok, %{utc_offset: 0, std_offset: 0, zone_abbr: "UTC"}}
def time_zone_periods_from_wall_datetime(naive_datetime, time_zone) do
with {:ok, periods} <- PeriodsProvider.periods(time_zone) do
naive_datetime_to_gregorian_seconds(naive_datetime)
|> find_period_for_secs(periods, :wall)
end
end
defp find_period_for_secs(secs, periods, time_modifier) do
case do_find_period_for_secs(secs, periods, time_modifier) do
{:max, utc_offset, rules_and_template} ->
periods = generate_dynamic_periods(secs, utc_offset, rules_and_template)
do_find_period_for_secs(secs, periods, time_modifier)
result -> result
end
end
defp do_find_period_for_secs(secs, periods, :utc) do
case Enum.find(periods, fn {from, _, _, _} -> secs >= from end) do
{_, period, _, nil} ->
{:ok, period_to_map(period)}
{_, {utc_off, _, _}, _, rules_and_template} ->
{:max, utc_off, rules_and_template}
end
end
defp do_find_period_for_secs(secs, periods, :wall), do: find_period_for_wall_secs(secs, periods)
# receives wall gregorian seconds (also referred as the 'given timestamp' in the comments below)
# and the list of transitions
defp find_period_for_wall_secs(_, [{0, period, _, _}]), do: {:ok, period_to_map(period)}
defp find_period_for_wall_secs(secs, [{utc_secs, period = {utc_off, std_off, _}, prev_period = {prev_utc_off, prev_std_off, _}, rules_and_template} | tail]) do
# utc_secs + utc_off + std_off = wall gregorian seconds
if secs < utc_secs + utc_off + std_off do
# the given timestamp occurs in a gap if it occurs between
# the utc timestamp + the previous offset and
# the utc timestamp + the offset (= this transition's wall time)
if secs >= utc_secs + prev_utc_off + prev_std_off do
{:gap,
{period_to_map(prev_period), gregorian_seconds_to_naive_datetime(utc_secs + prev_utc_off + prev_std_off)},
{period_to_map(period), gregorian_seconds_to_naive_datetime(utc_secs + utc_off + std_off)}}
else
# the given timestamp occurs before this transition and there is no gap with the previous period,
# so continue iterating
find_period_for_wall_secs(secs, tail)
end
else
# the given timestamp occurs during two periods if it occurs between
# the utc timestamp + the offset (= this transition's wall time) and
# the utc timestamp + the previous offset
if secs < utc_secs + prev_utc_off + prev_std_off do
{:ambiguous, period_to_map(prev_period), period_to_map(period)}
else
# the given timestamp occurs after this transition's wall time, and there is no gap nor overlap
case rules_and_template do
nil ->
{:ok, period_to_map(period)}
_ ->
{:max, utc_off, rules_and_template}
end
end
end
end
defp period_to_map({utc_off, std_off, abbr}) do
%{
utc_offset: utc_off,
std_offset: std_off,
zone_abbr: abbr
}
end
defp generate_dynamic_periods(secs, utc_offset, {rule_name, format_time_zone_abbr}) do
%{year: year} = gregorian_seconds_to_naive_datetime(secs)
[rule1, rule2] = Tz.OngoingChangingRulesProvider.rules(rule_name)
rule_records = Tz.IanaFileParser.denormalized_rule_data([
Tz.IanaFileParser.change_rule_year(rule2, year - 1),
Tz.IanaFileParser.change_rule_year(rule2, year),
Tz.IanaFileParser.change_rule_year(rule1, year)
])
zone_line = %{
from: :min,
to: :max,
rules: rule_name,
format_time_zone_abbr: format_time_zone_abbr,
std_offset_from_utc_time: utc_offset
}
Tz.PeriodsBuilder.build_periods([zone_line], rule_records, :dynamic_far_future)
|> Tz.PeriodsBuilder.periods_to_tuples_and_reverse()
end
defp iso_days_to_gregorian_seconds({days, {parts_in_day, 86_400_000_000}}) do
div(days * 86_400_000_000 + parts_in_day, 1_000_000)
end
defp naive_datetime_to_gregorian_seconds(datetime) do
NaiveDateTime.to_erl(datetime)
|> :calendar.datetime_to_gregorian_seconds()
end
defp gregorian_seconds_to_naive_datetime(seconds) do
:calendar.gregorian_seconds_to_datetime(seconds)
|> NaiveDateTime.from_erl!()
end
end
|
lib/time_zone_database.ex
| 0.834609
| 0.511961
|
time_zone_database.ex
|
starcoder
|
defmodule Contex.SVG do
@moduledoc """
Convenience functions for generating SVG output
"""
def text(x, y, content, opts \\ []) do
attrs = opts_to_attrs(opts)
[
"<text ",
~s|x="#{x}" y="#{y}"|,
attrs,
">",
clean(content),
"</text>"
]
end
def text(content, opts \\ []) do
attrs = opts_to_attrs(opts)
[
"<text ",
attrs,
">",
clean(content),
"</text>"
]
end
def title(content, opts \\ []) do
attrs = opts_to_attrs(opts)
[
"<title ",
attrs,
">",
clean(content),
"</title>"
]
end
def rect({_x1, _x2} = x_extents, {_y1, _y2} = y_extents, inner_content, opts \\ []) do
width = width(x_extents)
height = width(y_extents)
y = min(y_extents)
x = min(x_extents)
attrs = opts_to_attrs(opts)
[
"<rect ",
~s|x="#{x}" y="#{y}" width="#{width}" height="#{height}"|,
attrs,
">",
inner_content,
"</rect>"
]
end
def circle(x, y, radius, opts \\ []) do
attrs = opts_to_attrs(opts)
[
"<circle ",
~s|cx="#{x}" cy="#{y}" r="#{radius}"|,
attrs,
"></circle>"
]
end
def line(points, smoothed, opts \\ []) do
attrs = opts_to_attrs(opts)
path = path(points, smoothed)
[
"<path d=\"",
path,
"\"",
attrs,
"></path>"
]
end
defp path([], _), do: ""
defp path(points, false) do
Enum.reduce(points, :first, fn {x, y}, acc ->
coord = ~s|#{x} #{y}|
case acc do
:first -> ["M ", coord]
_ -> [acc, " L " | coord]
end
end)
end
defp path(points, true) do
# Use Catmull-Rom curve - see http://schepers.cc/getting-to-the-point
# First point stays as-is. Subsequent points are draw using SVG cubic-spline
# where control points are calculated as follows:
# - Take the immediately prior data point, the data point itself and the next two into
# an array of 4 points. Where this isn't possible (first & last) duplicate
# Apply Cardinal Spline to Cubic Bezier conversion matrix (this is with tension = 0.0)
# 0 1 0 0
# -1/6 1 1/6 0
# 0 1/6 1 -1/6
# 0 0 1 0
# First control point is second result, second control point is third result, end point is last result
initial_window = {nil, nil, nil, nil}
{_, window, last_p, result} =
Enum.reduce(points, {:first, initial_window, nil, ""}, fn p,
{step, window, last_p, result} ->
case step do
:first ->
{:second, {p, p, p, p}, p, []}
:second ->
{:rest, bump_window(window, p), p, ["M ", coord(last_p)]}
:rest ->
window = bump_window(window, p)
{cp1, cp2} = cardinal_spline_control_points(window)
{:rest, window, p, [result, " C " | [coord(cp1), coord(cp2), coord(last_p)]]}
end
end)
window = bump_window(window, last_p)
{cp1, cp2} = cardinal_spline_control_points(window)
[result, " C " | [coord(cp1), coord(cp2), coord(last_p)]]
end
defp bump_window({_p1, p2, p3, p4}, new_p), do: {p2, p3, p4, new_p}
@spline_tension 0.3
@factor (1.0 - @spline_tension) / 6.0
defp cardinal_spline_control_points({{x1, y1}, {x2, y2}, {x3, y3}, {x4, y4}}) do
cp1 = {x2 + @factor * (x3 - x1), y2 + @factor * (y3 - y1)}
cp2 = {x3 + @factor * (x2 - x4), y3 + @factor * (y2 - y4)}
{cp1, cp2}
end
defp coord({x, y}) do
x = if is_float(x), do: :erlang.float_to_binary(x, decimals: 2), else: x
y = if is_float(y), do: :erlang.float_to_binary(y, decimals: 2), else: y
~s| #{x} #{y}|
end
def opts_to_attrs(opts), do: opts_to_attrs(opts, [])
defp opts_to_attrs([{_, nil} | t], attrs), do: opts_to_attrs(t, attrs)
defp opts_to_attrs([{_, ""} | t], attrs), do: opts_to_attrs(t, attrs)
defp opts_to_attrs([{:phx_click, val} | t], attrs),
do: opts_to_attrs(t, [[" phx-click=\"", val, "\""] | attrs])
defp opts_to_attrs([{:phx_target, val} | t], attrs),
do: opts_to_attrs(t, [[" phx-target=\"", val, "\""] | attrs])
defp opts_to_attrs([{:series, val} | t], attrs),
do: opts_to_attrs(t, [[" phx-value-series=\"", "#{clean(val)}", "\""] | attrs])
defp opts_to_attrs([{:category, val} | t], attrs),
do: opts_to_attrs(t, [[" phx-value-category=\"", "#{clean(val)}", "\""] | attrs])
defp opts_to_attrs([{:value, val} | t], attrs),
do: opts_to_attrs(t, [[" phx-value-value=\"", "#{clean(val)}", "\""] | attrs])
defp opts_to_attrs([{:id, val} | t], attrs),
do: opts_to_attrs(t, [[" phx-value-id=\"", "#{val}", "\""] | attrs])
defp opts_to_attrs([{:task, val} | t], attrs),
do: opts_to_attrs(t, [[" phx-value-task=\"", "#{clean(val)}", "\""] | attrs])
# TODO: This is going to break down with more complex styles
defp opts_to_attrs([{:fill, val} | t], attrs),
do: opts_to_attrs(t, [[" style=\"fill:#", val, ";\""] | attrs])
defp opts_to_attrs([{:transparent, true} | t], attrs),
do: opts_to_attrs(t, [[" fill=\"transparent\""] | attrs])
defp opts_to_attrs([{:stroke, val} | t], attrs),
do: opts_to_attrs(t, [[" stroke=\"#", val, "\""] | attrs])
defp opts_to_attrs([{:stroke_width, val} | t], attrs),
do: opts_to_attrs(t, [[" stroke-width=\"", val, "\""] | attrs])
defp opts_to_attrs([{:stroke_linejoin, val} | t], attrs),
do: opts_to_attrs(t, [[" stroke-linejoin=\"", val, "\""] | attrs])
defp opts_to_attrs([{:opacity, val} | t], attrs),
do: opts_to_attrs(t, [[" fill-opacity=\"", val, "\""] | attrs])
defp opts_to_attrs([{:class, val} | t], attrs),
do: opts_to_attrs(t, [[" class=\"", val, "\""] | attrs])
defp opts_to_attrs([{:transform, val} | t], attrs),
do: opts_to_attrs(t, [[" transform=\"", val, "\""] | attrs])
defp opts_to_attrs([{:text_anchor, val} | t], attrs),
do: opts_to_attrs(t, [[" text-anchor=\"", val, "\""] | attrs])
defp opts_to_attrs([{:dominant_baseline, val} | t], attrs),
do: opts_to_attrs(t, [[" dominant-baseline=\"", val, "\""] | attrs])
defp opts_to_attrs([{:alignment_baseline, val} | t], attrs),
do: opts_to_attrs(t, [[" alignment-baseline=\"", val, "\""] | attrs])
defp opts_to_attrs([{:marker_start, val} | t], attrs),
do: opts_to_attrs(t, [[" marker-start=\"", val, "\""] | attrs])
defp opts_to_attrs([{:marker_mid, val} | t], attrs),
do: opts_to_attrs(t, [[" marker-mid=\"", val, "\""] | attrs])
defp opts_to_attrs([{:marker_end, val} | t], attrs),
do: opts_to_attrs(t, [[" marker-end=\"", val, "\""] | attrs])
defp opts_to_attrs([{key, val} | t], attrs) when is_atom(key),
do: opts_to_attrs(t, [[" ", Atom.to_string(key), "=\"", clean(val), "\""] | attrs])
defp opts_to_attrs([{key, val} | t], attrs) when is_binary(key),
do: opts_to_attrs(t, [[" ", key, "=\"", clean(val), "\""] | attrs])
defp opts_to_attrs([], attrs), do: attrs
defp width({a, b}), do: abs(a - b)
defp min({a, b}), do: min(a, b)
defp clean(s), do: Contex.SVG.Sanitize.basic_sanitize(s)
end
|
lib/chart/svg.ex
| 0.63023
| 0.500061
|
svg.ex
|
starcoder
|
defmodule SimpleMarkdown.Renderer.HTML.Utilities do
@moduledoc """
Convenient functions for working with HTML.
"""
@type ast :: { tag :: String.Chars.t, attrs :: [{ String.Chars.t, String.Chars.t }], ast } | [ast] | String.t
@type version :: { major :: non_neg_integer, minor :: non_neg_integer }
@type format(type) :: { type, version }
@type formats :: format(:html) | format(:xhtml)
@type tag_list :: [atom | String.t]
@type chardata_list :: [{ String.t, String.t }]
@spaces [?\s, ?\t, ?\n, ?\f, ?\r]
@quotes [?", ?']
@terminators [?>, ?/]
defmodule UnencodableAttributeError do
defexception [:value]
@impl Exception
def exception(value) do
%__MODULE__{
value: value
}
end
@impl Exception
def message(%{ value: value }), do: "Attribute value must either be encoded or have a format compatible with unquoted, single-quoted, or double-quoted attribute-value syntax: #{inspect value}"
end
@doc """
Convert the HTML AST to HTML.
The conversion behaviour can be modified by setting the `opts` parameter with
any of the following:
* `:format` - To control the HTML format. This takes one of the valid `t:formats/0`.
By default this is set to generate HTML5 code (`{ :html, { 5, 0 } }`).
* `:void_elements` - To customise which elements are void elements (do not
contain content). This takes a `t:tag_list/0`. By default this is set to the list
of tags returned by `void_elements/0`.
* `:raw_text_elements` - To customise which elements are raw text elements (do not
encode their content nor contain nested nodes). This takes a `t:tag_list/0`. By default
this is set to the list of tags returned by `raw_text_elements/0`.
* `:include_chardata` - To control whether nodes that match `:chardata` should be
included in the HTML or not. By default this is set to false.
* `:chardata` - To customise which elements are considered to be character data (special
cases that do not encode their content nor contain nested nodes). This takes a
`t:chardata_list/0`. By default this is set to the list of opening/closing tags returned
by `chardata/0`.
* `:encode_attributes` - To control whether attribute values are encoded, expects a
`boolean` value. By default this is set to `false`.
Example
-------
iex> SimpleMarkdown.Renderer.HTML.Utilities.ast_to_html({ :p, [], "hello" }) |> IO.chardata_to_string
"<p>hello</p>"
iex> SimpleMarkdown.Renderer.HTML.Utilities.ast_to_html({ "!--", [], "hello" }, include_chardata: true) |> IO.chardata_to_string
"<!--hello-->"
"""
@spec ast_to_html(ast, keyword) :: IO.chardata
def ast_to_html(ast, opts \\ []) do
ast_to_html(ast, opts[:format] || { :html, { 5, 0 } }, make_set(opts[:void_elements] || void_elements()), make_set(opts[:raw_text_elements] || raw_text_elements()), false, Map.new(opts[:chardata] || chardata()), opts[:include_chardata] || false, opts[:encode_attributes] || false)
end
@spec ast_to_html(ast, formats, MapSet.t, MapSet.t, boolean, %{ optional(String.t) => String.t }, boolean, boolean) :: IO.chardata
defp ast_to_html({ tag, attrs, nodes }, format, void_elements, raw_text_elements, is_raw_text, chardata, include_chardata, encode_attrs) do
tag_s = to_string(tag)
case chardata[tag_s] do
nil ->
{ { is_void, is_raw_text }, tag } = if is_raw_text do
case MapSet.member?(void_elements, tag) do
true -> { { true, true }, tag_s }
result -> { if(is_binary(tag), do: { result, true }, else: { MapSet.member?(void_elements, tag_s), true }), tag_s }
end
else
case { MapSet.member?(void_elements, tag), MapSet.member?(raw_text_elements, tag) } do
{ true, true } -> { { true, true }, tag_s }
result -> { if(is_binary(tag), do: result, else: { MapSet.member?(void_elements, tag_s), MapSet.member?(raw_text_elements, tag_s) }), tag_s }
end
end
html_element(tag, attrs, nodes, format, is_void, void_elements, raw_text_elements, is_raw_text, chardata, include_chardata, encode_attrs)
suffix ->
if include_chardata do
["<", tag_s, ast_to_html(nodes, format, void_elements, raw_text_elements, true, chardata, include_chardata, encode_attrs), suffix, ">"]
else
""
end
end
end
defp ast_to_html(list, format, void_elements, raw_text_elements, is_raw_text, chardata, include_chardata, encode_attrs) when is_list(list), do: Enum.map(list, &ast_to_html(&1, format, void_elements, raw_text_elements, is_raw_text, chardata, include_chardata, encode_attrs))
defp ast_to_html(string, _, _, _, false, _, _, _), do: HtmlEntities.encode(string)
defp ast_to_html(string, _, _, _, true, _, _, _), do: string
defp html_element(tag, attrs, [], { :html, { vsn, _ } }, true, _, _, _, _, _, encode_attrs) when vsn >= 5 do
[
"<",
tag,
html_attribute(attrs, encode_attrs),
">"
]
end
defp html_element(tag, attrs, [], { :xhtml, _ }, true, _, _, _, _, _, encode_attrs) do
[
"<",
tag,
html_attribute(attrs, encode_attrs),
" />"
]
end
defp html_element(tag, attrs, nodes, format, _, void_elements, raw_text_elements, is_raw_text, chardata, include_chardata, encode_attrs) do
[
"<",
tag,
html_attribute(attrs, encode_attrs),
">",
ast_to_html(nodes, format, void_elements, raw_text_elements, is_raw_text, chardata, include_chardata, encode_attrs),
"</",
tag,
">"
]
end
defp html_attribute(attrs, encode_attrs) do
Enum.map(attrs, fn
{ key, "" } -> [" ", to_string(key)]
{ key, value } -> [" ", to_string(key), "=", to_string(value) |> encode_attr_value(encode_attrs)]
end)
end
defp encode_attr_value(value, true), do: ["\"", HtmlEntities.encode(value), "\""]
defp encode_attr_value(value, false) do
cond do
not String.contains?(value, "\"") -> ["\"", value, "\""]
not String.contains?(value, "'") -> ["'", value, "'"]
true -> raise UnencodableAttributeError, value
end
end
defp make_set(tags) do
Enum.reduce(tags, MapSet.new(), fn
e, acc when is_binary(e) -> MapSet.put(acc, e)
e, acc -> MapSet.put(acc, e) |> MapSet.put(to_string(e))
end)
end
@doc """
A list of [void elements](https://html.spec.whatwg.org/multipage/syntax.html#void-elements).
"""
@spec void_elements() :: tag_list
def void_elements() do
[
:area,
:base,
:br,
:col,
:embed,
:hr,
:img,
:input,
:keygen, # obsolete
:link,
:meta,
:param,
:source,
:track,
:wbr
]
end
@doc """
A list of [raw text elements](https://html.spec.whatwg.org/multipage/syntax.html#raw-text-elements).
"""
@spec raw_text_elements() :: tag_list
def raw_text_elements() do
[
:script,
:style
]
end
@doc """
A list of any special nodes that will be treated as raw character data.
Currently this includes comments, character data, DTD (document type definitions),
PI (processing instructons).
Examples of currently supported nodes and how they're represented in
the AST:
{ "!--", [], " comment " } \#<!-- comment -->
{ "![CDATA[", [], "foo" } \#<![CDATA[foo]]>
{ "!DOCTYPE", [], " html" } \#<!DOCTYPE html>
{ "?", [], "xml version=\"1.0\" encoding=\"UTF-8\" " } \#<?xml version="1.0" encoding="UTF-8" ?>
"""
def chardata() do
[
{ "!--", "--" },
{ "![CDATA[", "]]" },
{ "!DOCTYPE", "" },
{ "?", "?" }
]
end
@doc """
Convert the HTML to HTML AST.
The parsing behaviour can be modified by setting the `opts` parameter with
any of the following:
* `:void_elements` - To customise which elements are void elements (do not
contain content). This takes a `t:tag_list/0`. By default this is set to the list
of tags returned by `void_elements/0`.
* `:raw_text_elements` - To customise which elements are raw text elements (do not
encode their content nor contain nested nodes). This takes a `t:tag_list/0`. By default
this is set to the list of tags returned by `raw_text_elements/0`.
* `:include_chardata` - To control whether nodes that match `:chardata` should be
included in the AST or not, expects a `boolean` value. By default this is set to `false`.
* `:chardata` - To customise which elements are considered to be character data (special
cases that do not encode their content nor contain nested nodes). This takes a
`t:chardata_list/0`. By default this is set to the list of opening/closing tags returned
by `chardata/0`.
* `:decode_attributes` - To control whether attribute values are decoded, expects a
`boolean` value. By default this is set to `false`.
Example
-------
iex> SimpleMarkdown.Renderer.HTML.Utilities.html_to_ast("<p>hello</p>")
{ "p", [], "hello" }
iex> SimpleMarkdown.Renderer.HTML.Utilities.html_to_ast("<!--hello-->", include_chardata: true)
{ "!--", [], "hello" }
"""
@spec html_to_ast(IO.chardata, keyword) :: ast
def html_to_ast(html, opts \\ []) do
{ nodes, _ } = to_ast_nodes(IO.chardata_to_string(html), make_set(opts[:void_elements] || void_elements()), make_set(opts[:raw_text_elements] || raw_text_elements()), { opts[:chardata] || chardata(), opts[:include_chardata] || false }, opts[:decode_attributes] || false)
nodes
end
defp to_ast_nodes(html, void_elements, raw_text_elements, chardata, decode_attrs, raw_text_tag \\ nil, nodes \\ [], body \\ "")
defp to_ast_nodes("", _, _, _, _, nil, nodes, body), do: { merge_nodes(HtmlEntities.decode(body), nodes) |> compact_nodes, "" }
defp to_ast_nodes("", _, _, _, _, _, nodes, body), do: { merge_nodes(body, nodes) |> compact_nodes, "" }
defp to_ast_nodes("</" <> html, _, _, _, _, nil, nodes, body), do: { merge_nodes(HtmlEntities.decode(body), nodes) |> compact_nodes, till_closing_bracket(html) }
defp to_ast_nodes("</" <> html, void_elements, raw_text_elements, chardata, decode_attrs, raw_text_tag, nodes, body) do
if Regex.match?(~r/^#{raw_text_tag}\W*>/, html) do
{ merge_nodes(body, nodes) |> compact_nodes, till_closing_bracket(html) }
else
to_ast_nodes(html, void_elements, raw_text_elements, chardata, decode_attrs, raw_text_tag, nodes, body <> "</")
end
end
defp to_ast_nodes("<" <> html, void_elements, raw_text_elements, chardata, decode_attrs, nil, nodes, body) do
to_ast_nodes(html, void_elements, raw_text_elements, chardata, decode_attrs, nil, nodes, body, chardata)
end
defp to_ast_nodes(<<c :: utf8, html :: binary>>, void_elements, raw_text_elements, chardata, decode_attrs, raw_text_tag, nodes, body), do: to_ast_nodes(html, void_elements, raw_text_elements, chardata, decode_attrs, raw_text_tag, nodes, <<body :: binary, c :: utf8>>)
defp to_ast_nodes(html, void_elements, raw_text_elements, chardata, decode_attrs, raw_text_tag, nodes, body, { [{ open, close }|matches], include }) do
if String.starts_with?(html, open) do
size = byte_size(open)
<<_ :: binary-size(size), html :: binary>> = html
{ element, html } = to_ast_chardata(String.split(html, close <> ">", parts: 2), open, include)
to_ast_nodes(html, void_elements, raw_text_elements, chardata, decode_attrs, nil, merge_nodes(element, HtmlEntities.decode(body), nodes))
else
to_ast_nodes(html, void_elements, raw_text_elements, chardata, decode_attrs, raw_text_tag, nodes, body, { matches, include })
end
end
defp to_ast_nodes(html, void_elements, raw_text_elements, chardata, decode_attrs, _, nodes, body, { [], _ }) do
{ element, html } = to_ast_element(html, void_elements, raw_text_elements, chardata, decode_attrs)
to_ast_nodes(html, void_elements, raw_text_elements, chardata, decode_attrs, nil, merge_nodes(element, HtmlEntities.decode(body), nodes))
end
defp to_ast_chardata([content], open, true), do: { { open, [], content }, "" }
defp to_ast_chardata([content, html], open, true), do: { { open, [], content }, html }
defp to_ast_chardata([_], _, false), do: { "", "" }
defp to_ast_chardata([_, html], _, false), do: { "", html }
defp compact_nodes([node]), do: node
defp compact_nodes(nodes), do: nodes |> Enum.reverse
defp merge_nodes("", list), do: list
defp merge_nodes(a, list), do: [a|list]
defp merge_nodes("", "", list), do: list
defp merge_nodes(a, "", list), do: [a|list]
defp merge_nodes("", b, list), do: [b|list]
defp merge_nodes(a, b, list), do: [a, b|list]
defp till_closing_bracket(">" <> html), do: html
defp till_closing_bracket(<<_ :: utf8, html :: binary>>), do: till_closing_bracket(html)
defp to_ast_element(html, void_elements, raw_text_elements, chardata, decode_attrs, tag \\ "", attrs \\ [])
defp to_ast_element(<<c :: utf8, html :: binary>>, void_elements, raw_text_elements, chardata, decode_attrs, "", _) when c in @spaces, do: to_ast_element(html, void_elements, raw_text_elements, chardata, decode_attrs, "")
defp to_ast_element(<<c :: utf8, html :: binary>>, void_elements, raw_text_elements, chardata, decode_attrs, tag, _) when c in @spaces do
{ attrs, html } = to_ast_attributes(html)
to_ast_element(html, void_elements, raw_text_elements, chardata, decode_attrs, tag, if(decode_attrs, do: Enum.map(attrs, fn { k, v } -> { k, HtmlEntities.decode(v) } end), else: attrs))
end
defp to_ast_element("/>" <> html, _, _, _, _, tag, attrs), do: { { tag, attrs, [] }, html }
defp to_ast_element(">" <> html, void_elements, raw_text_elements, chardata, decode_attrs, tag, attrs) do
{ nodes, html } = if MapSet.member?(void_elements, tag) do
{ [], html }
else
to_ast_nodes(html, void_elements, raw_text_elements, chardata, decode_attrs, if(MapSet.member?(raw_text_elements, tag), do: tag))
end
{ { tag, attrs, nodes }, html }
end
defp to_ast_element(<<c :: utf8, html :: binary>>, void_elements, raw_text_elements, chardata, decode_attrs, tag, attrs), do: to_ast_element(html, void_elements, raw_text_elements, chardata, decode_attrs, <<tag :: binary, c :: utf8>>, attrs)
defp to_ast_element(_, _, _, _, _, _, _), do: { [], "" }
defp to_ast_attributes(html, type \\ :key, quoted \\ nil, attrs \\ [{ "", "" }])
defp to_ast_attributes("=" <> html, type, nil, attrs) when type in [:key, :key_s], do: to_ast_attributes(html, :value, nil, attrs)
defp to_ast_attributes(html = <<c :: utf8, _ :: binary>>, _, nil, [{ "", "" }|attrs]) when c in @terminators, do: { Enum.reverse(attrs), html }
defp to_ast_attributes(html = <<c :: utf8, _ :: binary>>, _, nil, attrs) when c in @terminators, do: { Enum.reverse(attrs), html }
defp to_ast_attributes(<<c :: utf8, html :: binary>>, :key, nil, attrs = [{ "", "" }|_]) when c in @spaces, do: to_ast_attributes(html, :key, nil, attrs)
defp to_ast_attributes(<<c :: utf8, html :: binary>>, :key, nil, attrs) when c in @spaces, do: to_ast_attributes(html, :key_s, nil, attrs)
defp to_ast_attributes(<<c :: utf8, html :: binary>>, :value, nil, attrs = [{ _, value }|_]) when byte_size(value) > 0 and c in @spaces, do: to_ast_attributes(html, :key, nil, [{ "", "" }|attrs])
defp to_ast_attributes(<<c :: utf8, html :: binary>>, :value, nil, attrs) when c in @spaces, do: to_ast_attributes(html, :value, nil, attrs)
defp to_ast_attributes(<<c :: utf8, html :: binary>>, :value, nil, attrs) when c in @quotes, do: to_ast_attributes(html, :value, c, attrs)
defp to_ast_attributes(<<c :: utf8, html :: binary>>, :value, c, attrs), do: to_ast_attributes(html, :value, nil, attrs)
defp to_ast_attributes(<<c :: utf8, html :: binary>>, :key, quoted, [{ key, value }|attrs]), do: to_ast_attributes(html, :key, quoted, [{ <<key :: binary, c :: utf8>>, value }|attrs])
defp to_ast_attributes(<<c :: utf8, html :: binary>>, :value, quoted, [{ key, value }|attrs]), do: to_ast_attributes(html, :value, quoted, [{ key, <<value :: binary, c :: utf8>> }|attrs])
defp to_ast_attributes(html, :key_s, _, attrs), do: to_ast_attributes(html, :key, nil, [{ "", "" }|attrs])
defp to_ast_attributes(_, _, _, _), do: { [], "" }
end
|
lib/simple_markdown/Renderer/html/utilities.ex
| 0.895251
| 0.490114
|
utilities.ex
|
starcoder
|
defmodule Platformsh.Config do
@moduledoc """
Reads Platform.sh configuration from environment variables.
See: https://docs.platform.sh/development/variables.html
The following are 'magic' properties that may exist on a Config object. Before accessing a property, check its
existence with hasattr(config, variableName). Attempting to access a nonexistent variable will throw an exception.
Attributes:
(The following properties are available at build time and run time.)
project (string):
The project ID.
application_name (string):
The name of the application, as defined in its configuration.
tree_id (string):
An ID identifying the application tree before it was built: a unique hash is generated based on the contents
of the application's files in the repository.
app_dir (string):
The absolute path to the application.
project_entropy (string):
A random string generated for each project, useful for generating hash keys.
(The following properties are only available at runtime.)
branch (string):
The Git branch name.
environment (string):
The environment ID (usually the Git branch plus a hash).
document_root (string):
The absolute path to the web root of the application.
smtp_host (string):
The hostname of the Platform.sh default SMTP server (an empty string if emails are disabled on the
environment.
port (string):
The TCP port number the application should listen to for incoming requests.
socket (string):
The Unix socket the application should listen to for incoming requests.
. Platform.sh Environment Variables
https://docs.platform.sh/development/variables.html
"""
@doc """
Decodes a Platform.sh environment variable.
Args:
variable (string):
Base64-encoded JSON (the content of an environment variable).
Returns:
An dict (if representing a JSON object), or a scalar type.
Raises:
JSON decoding error.
"""
def decode(variable) do
if variable != nil do
Poison.decode!(Base.decode64!(variable))
else
nil
end
end
@doc """
value/1 Reads unprefixed environment variable, taking the prefix into account.
Args:
item (string):
The variable to read.
"""
def value(item) do
directVariablesRuntime = %{
port: "PORT",
socket: "SOCKET"
}
System.get_env(directVariablesRuntime[item])
end
@doc """
value/2 Reads an environment variable, taking the prefix into account.
Args:
item (string):
The variable to read.
prefix (string):
The Environment variable prefix
"""
def value(item, env_prefix) do
# Local index of the variables that can be accessed as direct properties
# (runtime only). The key is the property that will be read. The value is the environment variables, minus
# prefix, that contains the value to look up.
directVariables = %{
project: "PROJECT",
app_dir: "APP_DIR",
application_name: 'APPLICATION_NAME',
tree_id: "TREE_ID",
project_entropy: "PROJECT_ENTROPY"
}
directVariablesRuntime = %{
branch: "BRANCH",
environment: "ENVIRONMENT",
document_root: "DOCUMENT_ROOT",
smtp_host: "SMTP_HOST"
}
inDirectVariablesRuntime = %{
routes: "ROUTES",
relationships: "RELATIONSHIPS",
application: "APPLICATION",
variables: "VARIABLES"
}
cond do
Map.has_key?(directVariables, item) ->
System.get_env("#{env_prefix}#{directVariables[item]}")
Map.has_key?(directVariablesRuntime, item) ->
System.get_env("#{env_prefix}#{directVariablesRuntime[item]}")
Map.has_key?(inDirectVariablesRuntime, item) ->
decode(System.get_env("#{env_prefix}#{inDirectVariablesRuntime[item]}"))
True ->
nil
end
end
@doc """
Local index of the variables that can be accessed as direct properties (build and
runtime). The key is the property that will be read. The value is the environment variables, minus prefix,
that contains the value to look up.
"""
def environment() do
env_prefix = 'PLATFORM_'
%{
# Local index of the variables that can be accessed at build-time
project: value(:project, env_prefix),
app_dir: value(:app_dir, env_prefix),
application_name: value(:application_name, env_prefix),
tree_id: value(:tree_id, env_prefix),
project_entropy: value(:project_entropy, env_prefix),
mode: value(:mode, env_prefix),
# Local index of the variables that can be accessed as direct properties
# (runtime only).
branch: value(:branch, env_prefix),
environment: value(:environment, env_prefix),
document_root: value(:document_root, env_prefix),
smtp_host: value(:smtp_host, env_prefix),
# Local index of variables available at runtime that have no prefix.
port: value(:port),
socket: value(:socket),
# Local index of variables available at runtime that need decoding
routes: value(:routes, env_prefix),
relationships: value(:relationships, env_prefix),
application: value(:application, env_prefix),
variables: value(:variables, env_prefix)
}
end
@doc """
Checks whether the code is running on a platform with valid environment variables.
Returns:
bool:
True if configuration can be used, False otherwise.
"""
def is_valid_platform?() do
environment()[:application_name] != nil
end
@doc """
Checks whether the code is running in a build environment.
Returns:
bool: True if running in build environment, False otherwise.
"""
def in_build?() do
is_valid_platform?() and environment()[:environment] == nil
end
@doc """
Checks whether the code is running in a runtime environment.
Returns:
bool: True if in a runtime environment, False otherwise.
"""
def in_runtime?() do
is_valid_platform?() and environment()[:environment]
end
@doc """
Retrieves the credentials for accessing a relationship.
Args:
relationship (string):
The relationship name as defined in .platform.app.yaml
for the moment it returns the first in the index of clustered services
Returns:
The credentials dict for the service pointed to by the relationship.
"""
def credentials(relationship) do
[config | _tail] = environment()[:relationships][relationship]
config
end
@doc """
Retrieves the unfiltered credentials for accessing a relationship.
Returns:
The credentials dict for the service pointed to by the relationship.
"""
def credentials() do
environment()[:relationships]
end
@doc """
variables/1 Returns a variable from the VARIABLES dict.
Note:
Variables prefixed with `env`: can be accessed as normal environment variables. This method will return
such a variable by the name with the prefix still included. Generally it's better to access those variables
directly.
Args:
name (string):
The name of the variable to retrieve.
default (mixed):
The default value to return if the variable is not defined. Defaults to nil.
Returns:
The value of the variable, or nil. This may be a string or a dict.
"""
def variables(name) do
if Map.has_key?(environment()[:variables], name) do
environment()[:variables][name]
else
nil
end
end
@doc """
variables/0 Returns the full variables dict.
If you're looking for a specific variable, the variable() method is a more robust option.
This method is for classes where you want to scan the whole variables list looking for a pattern.
It's valid for there to be no variables defined at all, so there's no guard for missing values.
Returns:
The full variables dict.
"""
def variables() do
environment()[:variables]
end
@doc """
routes/0 Return the routes definition.
Returns:
The routes dict.
Raises:
RuntimeError:
If the routes are not accessible due to being in the wrong environment.
"""
def routes() do
environment()[:routes]
end
@doc """
routes/1 Get route definition by route ID.
Args:
route_id (string):
The ID of the route to load.
Returns:
The route definition. The generated URL of the route is added as a 'url' key.
Raises:
KeyError:
If there is no route by that ID, an exception is thrown.
"""
def routes(route_id) do
environment()[:routes][route_id]
end
@doc """
Returns the application definition dict.
This is, approximately, the .platform.app.yaml file as a nested dict. However, it also has other information
added by Platform.sh as part of the build and deploy process.
Returns:
The application definition dict.
"""
def application() do
environment()[:application]
end
@doc """
Determines if the current environment is a Platform.sh Dedicated Enterprise environment.
Returns:
bool:
True on an Enterprise environment, False otherwise.
"""
def on_dedicated_enterprise?() do
is_valid_platform?() and environment()[:mode] == 'enterprise'
end
@doc """
Determines if the current environment is a production environment.
Note:
There may be a few edge cases where this is not entirely correct on Enterprise, if the production branch is
not named `production`. In that case you'll need to use your own logic.
Returns:
bool:
True if the environment is a production environment, False otherwise. It will also return False if not
running on Platform.sh or in the build phase.
"""
def on_production?() do
prod_branch = if on_dedicated_enterprise?(), do: "production", else: "master"
environment()[:branch] == prod_branch
end
@doc """
Determines if a routes are defined
Returns:
bool:
True if the relationship is defined, False otherwise.
"""
def has_routes?() do
environment()[:routes] != nil
end
@doc """
Determines if a relationships are defined, and thus has credentials available.
Returns:
bool:
True if the relationship is defined, False otherwise.
"""
def has_relationships() do
environment()[:relationships] != nil
end
@doc """
Determines if a relationship is defined, and thus has credentials available.
Args:
relationship (string):
The name of the relationship to check.
Returns:
bool:
True if the relationship is defined, False otherwise.
"""
def has_relationship(relationship) do
Map.has_key?(environment()[:relationships], relationship)
end
@doc """
Returns the just the names of relationships
Returns:
a list with relationship names
"""
def relationships() do
Map.keys(environment()[:relationships])
end
@doc """
Formats a dsn for use with ecto
Returns:
a string in the format of a dsn url for ecto
"""
def ecto_dsn_formatter(config) do
username = config["username"]
password = config["password"]
hostname = config["host"]
path = config["path"]
port = config["port"]
"ecto://#{username}:#{password}@#{hostname}:#{port}/#{path}"
end
@doc """
Guesses a relational database for ecto
Returns:
a string in the format of a dsn url for ecto or nil if none found,
this is guesss work so we don't want to crash on no value
"""
def guess_relational_database() do
if in_runtime?() do
cred =
Enum.find(Platformsh.Config.credentials(), fn {_rel, cred} ->
[config | _tail] = cred
String.contains?(config["scheme"], ["mysql", "pgsql"])
end)
[[config | _tailer] = _outer_list | _tail] = Tuple.to_list(Tuple.delete_at(cred, 0))
config
end
end
@doc """
Gets primary route
Returns:
a url of the primary route
"""
def primary_route() do
if in_runtime?() do
route =
Enum.find(Platformsh.Config.routes(), fn {_route, conf} -> conf["primary"] == true end)
if route != nil do
List.first(Tuple.to_list(route))
else
# We got nothing pick the top route.
List.first(Map.keys(Platformsh.Config.routes()))
end
end
end
@doc """
Magic configurations from process ENV may be configurable, so process them as a list of configurable items
"""
def config(l) when is_list(l) do
Enum.reduce(l, [], &Config.Reader.merge(&2, config(&1)))
end
@doc """
Load all magical config elements
"""
def config(:all) do
config([:ecto, :environment])
end
@doc """
Default ecto repository to configure is `Repo` module
"""
def config(:ecto) do
[repo | _tail] = Application.get_all_env(Mix.Project.config()[:app])[:ecto_repos]
config({:ecto, repo})
end
@doc """
Actual configuration of Ecto
"""
def config({:ecto, repo}) do
conf = Platformsh.Config.guess_relational_database()
url = URI.parse(Platformsh.Config.primary_route())
[
"#{Mix.Project.config()[:app]}": [
"#{repo}": [
username: conf["username"],
password: conf["password"],
hostname: conf["host"],
database: conf["path"],
port: conf["port"]
]
],
url: [host: url.host, path: url.path, scheme: url.scheme]
]
end
@doc """
Load everything we have into the environment
"""
def config(:environment) do
["#{Mix.Project.config()[:app]}": [env: Platformsh.Config.environment()]]
end
end
defmodule Platformsh.ConfigProvider do
@behaviour Config.Provider
require Logger
@moduledoc """
if app is started as a release, add this config provider
"""
@doc """
To derive PlatformSh config at bootup of a release, add this config provider to release
"""
def init(magics) do
magics
end
@doc """
Loads PlatformSh config explictly
"""
def load(config, magics) do
if Platformsh.Config.is_valid_platform?() do
Config.Reader.merge(config, Platformsh.Config.config(magics || :all))
else
Logger.warn("Trying to load Platform.sh Config but environment not detected")
end
end
end
defmodule Mix.Tasks.Platformsh.Config do
require Logger
@moduledoc """
Add mix tasks
If app is started with mix, add this task beforehand
"""
@doc """
to derive PlatformSh config if app is launched with mix: add this task before app.start
"""
def run(_) do
if Platformsh.Config.is_valid_platform?() do
config = Platformsh.Config.config(Mix.Project.config()[:platformsh_config] || :all)
Application.put_all_env(config)
else
Logger.warn("Trying to load Platform.sh Config but environment not detected")
end
end
:ok
end
defmodule Mix.Tasks.Platformsh.Run do
require Logger
@moduledoc """
hard alias to config+run for convenience
"""
@doc """
Platform.sh config+run task
"""
def run(args) do
if Platformsh.Config.is_valid_platform?() do
Mix.Task.run("platformsh.config", args)
else
Logger.warn("Trying to run Platform.sh Mix task but environment not detected")
end
Mix.Task.run("run", args)
end
end
defmodule Mix.Tasks.Platformsh.Compile do
require Logger
@moduledoc """
hard alias to config+compile for convenience
"""
@doc """
Platform.sh config+compile task
"""
def run(args) do
if Platformsh.Config.is_valid_platform?() do
Mix.Task.run("platformsh.config", args)
else
Logger.warn("Trying to run Platform.sh Mix task but environment not detected")
end
Mix.Task.run("compile", args)
end
end
defmodule Mix.Tasks.Compile.PlatformshConf do
require Logger
@moduledoc """
mix compiler adding config to compile for convenience
"""
@doc """
Platform.sh config for compile task
"""
def run(_) do
if Platformsh.Config.is_valid_platform?() do
Mix.Task.run("platformsh.config", [])
else
Logger.warn("Trying to run Platform.sh Mix task but environment not detected")
end
:ok
end
end
|
lib/platformshconfig.ex
| 0.865594
| 0.459986
|
platformshconfig.ex
|
starcoder
|
defmodule Granulix.Generator.Lfo do
alias Granulix.Math, as: GM
@moduledoc """
**Low Frequency Oscillator**
The Lfo module returns a stream of floats() between -1.0 and 1.0.
To be used as amplitude/frequency modulating input into audio rate streams.
Example, create a 4 Hz frequency modulator between 420 and 460 Hz and use
it as input for a sinus oscillator:
alias Granulix.Generator.Lfo
alias Granulix.Generator.Oscillator
ctx = Granulix.Ctx.new()
fm = Lfo.triangle(ctx, 4) |> Lfo.nma(40, 420)
# You can have a stream as modulating frequency input for osc
sinosc = SC.Plugin.stream(Oscillator.sin(fm))
You can also use the Stream module zip function to insert LFOs,
here moving sound between left and right channel every second:
panmove = Lfo.triangle(ctx, 1.0) |> Stream.map(fn val -> val * 0.9) end)
sinosc
|> Stream.zip(panmove)
|> Stream.map(fn {x, y} -> Granulix.Util.pan(x, y) end)
Actually the pan function can take a stream directly and so:
sinosc |> Granulix.Util.pan(panmove)
"""
@spec sin(frequency :: number()) :: Enumerable.float()
def sin(freq) do
ctx = Granulix.Ctx.get()
step = GM.twopi() * freq * ctx.period_size / ctx.rate
Stream.unfold(
0,
fn acc ->
next = acc + step
next =
cond do
next > GM.twopi() -> next - GM.twopi()
true -> next
end
{:math.sin(acc), next}
end
)
end
@spec saw(frequency :: number()) :: Enumerable.float()
def saw(freq) do
ctx = Granulix.Ctx.get()
step = 2 * freq * ctx.period_size / ctx.rate
Stream.unfold(
0,
fn acc ->
val = 1.0 - acc
next1 = acc + step
next2 =
cond do
next1 > 2.0 -> next1 - 2.0
true -> next1
end
{val, next2}
end
)
end
@spec triangle(frequency :: number()) :: Enumerable.float()
def triangle(freq) do
ctx = Granulix.Ctx.get()
step = 4 * freq * ctx.period_size / ctx.rate
Stream.unfold(
0,
fn acc ->
val =
cond do
acc < 2.0 -> acc - 1.0
true -> 3.0 - acc
end
next1 = acc + step
next2 =
cond do
next1 > 4.0 -> next1 - 4.0
true -> next1
end
{val, next2}
end
)
end
@spec square(frequency :: number(), duty :: float()) :: Enumerable.float()
def square(freq, duty \\ 0.5) do
ctx = Granulix.Ctx.get()
step = freq * ctx.period_size / ctx.rate
Stream.unfold(
0,
fn acc ->
val =
cond do
acc < duty -> 1.0
true -> -1.0
end
next1 = acc + step
next2 =
cond do
next1 > 1.0 -> next1 - 1.0
true -> next1
end
{val, next2}
end
)
end
@doc """
Normalize, Multiply, Add
Move from -1, 1 range to 0, 1 and then multiply and add offset.
"""
@spec nma(frames :: Enumerable.t, mul :: float, bottomlevel :: float) :: Enumerable.t
def nma(frames, mul, bottomlevel) do
x = 0.5 * mul
Stream.map(frames,(&(&1 * x + bottomlevel + x)))
end
end
|
lib/granulix/generator/lfo.ex
| 0.873323
| 0.674848
|
lfo.ex
|
starcoder
|
defmodule Money.Subscription.Plan do
@moduledoc """
Defines a standard subscription plan data structure.
"""
@typedoc "A plan interval type."
@type interval :: :day | :week | :month | :year
@typedoc "A integer interval count for a plan."
@type interval_count :: non_neg_integer
@typedoc "A Subscription Plan"
@type t :: %__MODULE__{
price: Money.t() | nil,
interval: interval,
interval_count: interval_count
}
@doc """
Defines the structure of a subscription plan.
"""
defstruct price: nil,
interval: nil,
interval_count: nil
@interval [:day, :week, :month, :year]
@doc """
Returns `{:ok, Money.Subscription.Plan.t}` or an `{:error, reason}`
tuple.
## Arguments
* `:price` is any `Money.t`
* `:interval` is the period of the plan. The valid intervals are
` `:day`, `:week`, `:month` or ':year`.
* `:interval_count` is an integer count of the number of `:interval`s
of the plan. The default is `1`
## Returns
A `Money.Subscription.Plan.t`
## Examples
iex> Money.Subscription.Plan.new Money.new(:USD, 100), :month, 1
{:ok,
%Money.Subscription.Plan{
interval: :month,
interval_count: 1,
price: Money.new(:USD, 100)
}}
iex> Money.Subscription.Plan.new Money.new(:USD, 100), :month
{:ok,
%Money.Subscription.Plan{
interval: :month,
interval_count: 1,
price: Money.new(:USD, 100)
}}
iex> Money.Subscription.Plan.new Money.new(:USD, 100), :day, 30
{:ok,
%Money.Subscription.Plan{
interval: :day,
interval_count: 30,
price: Money.new(:USD, 100)
}}
iex> Money.Subscription.Plan.new 23, :day, 30
{:error, {Money.Invalid, "Invalid subscription plan definition"}}
"""
@spec new(Money.t(), interval(), interval_count()) ::
{:ok, t()} | {:error, {module(), String.t()}}
def new(price, interval, interval_count \\ 1)
def new(%Money{} = price, interval, interval_count)
when interval in @interval and is_integer(interval_count) do
{:ok, %__MODULE__{price: price, interval: interval, interval_count: interval_count}}
end
def new(_price, _interval, _interval_count) do
{:error, {Money.Invalid, "Invalid subscription plan definition"}}
end
@doc """
Returns `{:ok, Money.Subscription.Plan.t}` or raises an
exception.
Takes the same arguments as `Money.Subscription.Plan.new/3`.
## Example
iex> Money.Subscription.Plan.new! Money.new(:USD, 100), :day, 30
%Money.Subscription.Plan{
interval: :day,
interval_count: 30,
price: Money.new(:USD, 100)
}
"""
@spec new!(Money.t(), interval(), interval_count()) :: t() | no_return()
def new!(price, interval, interval_count \\ 1)
def new!(price, interval, interval_count) do
case new(price, interval, interval_count) do
{:ok, plan} -> plan
{:error, {exception, reason}} -> raise exception, reason
end
end
end
|
lib/money/subscription/plan.ex
| 0.940763
| 0.693486
|
plan.ex
|
starcoder
|
defmodule SoftBank.ExchangeRates.CoinMarketCap do
@moduledoc """
Implements the `Money.ExchangeRates` for CoinMarketCap
Rates service.
## Required configuration:
The configuration key `:coin_market_cap_key` should be
set to your `app_id`. for example:
config :soft_bank,
coin_market_cap_key: "your_key"
or configure it via environment variable:
config :soft_bank,
coin_market_cap_key: {:system, "coin_market_cap_key"}
It is also possible to configure an alternative base url for this
service in case it changes in the future. For example:
config :soft_bank,
coin_market_cap_key: "your_key"
coin_market_cap_url: "https://pro-api.coinmarketcap.com"
"""
require Logger
alias SoftBank.ExchangeRates.CoinMarketCap.Retriever
alias SoftBank.Config
@behaviour Money.ExchangeRates
@rate_url "https://pro-api.coinmarketcap.com/v1"
@doc """
Update the retriever configuration to include the requirements
for CoinMarketCap Rates. This function is invoked when the
exchange rate service starts up, just after the ets table
:exchange_rates is created.
* `default_config` is the configuration returned by `Money.ExchangeRates.default_config/0`
Returns the configuration either unchanged or updated with
additional configuration specific to this exchange
rates retrieval module.
"""
def init(default_config) do
url = Application.get_env(:ex_money, :rate_url, @rate_url)
api_key = Application.get_env(:ex_money, :exchange_rates_api_key, nil)
Map.put(default_config, :retriever_options, %{url: url, api_key: api_key})
end
def decode_rates(body) do
%{"data" => data} = Money.json_library().decode!(body)
add_currencies_to_bank(data)
rates = marshall_rates(data)
r =
rates
|> Cldr.Map.atomize_keys()
|> Enum.map(fn
{k, v} when is_float(v) -> {k, Decimal.from_float(v)}
{k, v} when is_integer(v) -> {k, Decimal.new(v)}
end)
|> Enum.into(%{})
end
defp marshall_rates(data) do
Enum.map(data, fn x ->
key = "X" <> String.slice(x["symbol"], 0..1)
value = x["quote"]["USD"]["price"]
{key, value}
end)
end
defp add_currencies_to_bank(data) do
Enum.each(data, fn x ->
key = "X" <> String.slice(x["symbol"], 0..1)
currency = %{
name: x["name"],
digits: 16,
symbol: key,
alt_code: x["slug"],
code: x["symbol"]
}
SoftBank.Currencies.new(currency)
end)
end
@doc """
Retrieves the latest exchange rates from CoinMarketCap site.
* `config` is the retrieval configuration. When invoked from the
exchange rates services this will be the config returned from
`Money.ExchangeRates.config/0`
Returns:
* `{:ok, rates}` if the rates can be retrieved
* `{:error, reason}` if rates cannot be retrieved
Typically this function is called by the exchange rates retrieval
service although it can be called outside that context as
required.
"""
@spec get_latest_rates(Money.ExchangeRates.Config.t()) :: {:ok, map()} | {:error, String.t()}
def get_latest_rates(config) do
url = config.retriever_options.url
api_key = config.retriever_options.api_key
retrieve_latest_rates(url, api_key, config)
end
defp retrieve_latest_rates(_url, nil, _config) do
{:error, api_key_not_configured()}
end
@latest_rates "/cryptocurrency/listings/latest"
defp retrieve_latest_rates(url, api_key, config) do
endpoint = url <> @latest_rates <> "?CMC_PRO_API_KEY=" <> api_key
Retriever.retrieve_rates(endpoint, config)
end
@doc """
Retrieves the historic exchange rates from CoinMarketCap.
* `date` is a date returned by `Date.new/3` or any struct with the
elements `:year`, `:month` and `:day`.
* `config` is the retrieval configuration. When invoked from the
exchange rates services this will be the config returned from
`Money.ExchangeRates.config/0`
Returns:
* `{:ok, rates}` if the rates can be retrieved
* `{:error, reason}` if rates cannot be retrieved
Typically this function is called by the exchange rates retrieval
service although it can be called outside that context as
required.
"""
def get_historic_rates(date, config) do
url = config.retriever_options.url
api_key = config.retriever_options.api_key
retrieve_historic_rates(date, url, api_key, config)
end
defp retrieve_historic_rates(_date, _url, nil, _config) do
{:error, api_key_not_configured()}
end
@historic_rates "/historical/"
defp retrieve_historic_rates(%Date{calendar: Calendar.ISO} = date, url, api_key, config) do
date_string = Date.to_string(date)
Retriever.retrieve_rates(
url <> @historic_rates <> "?CMC_PRO_API_KEY=" <> api_key,
config
)
end
defp retrieve_historic_rates(%{year: year, month: month, day: day}, url, api_key, config) do
case Date.new(year, month, day) do
{:ok, date} -> retrieve_historic_rates(date, url, api_key, config)
error -> error
end
end
defp api_key_not_configured do
"exchange_rates_api_key is not configured. Rates are not retrieved."
end
end
|
lib/exchange_rates/coin_market_cap.ex
| 0.906839
| 0.415907
|
coin_market_cap.ex
|
starcoder
|
defmodule AshGraphql.Resource do
@moduledoc """
This Ash resource extension adds configuration for exposing a resource in a graphql.
See `graphql/1` for more information
"""
alias Ash.Dsl.Extension
alias Ash.Query.Aggregate
alias AshGraphql.Resource
alias AshGraphql.Resource.{Mutation, Query}
@get %Ash.Dsl.Entity{
name: :get,
args: [:name, :action],
describe: "A query to fetch a record by primary key",
examples: [
"get :get_post, :default"
],
schema: Query.get_schema(),
target: Query,
auto_set_fields: [
type: :get
]
}
@list %Ash.Dsl.Entity{
name: :list,
schema: Query.list_schema(),
args: [:name, :action],
describe: "A query to fetch a list of records",
examples: [
"list :list_posts, :default"
],
target: Query,
auto_set_fields: [
type: :list
]
}
@create %Ash.Dsl.Entity{
name: :create,
schema: Mutation.create_schema(),
args: [:name, :action],
describe: "A mutation to create a record",
examples: [
"create :create_post, :default"
],
target: Mutation,
auto_set_fields: [
type: :create
]
}
@update %Ash.Dsl.Entity{
name: :update,
schema: Mutation.update_schema(),
args: [:name, :action],
describe: "A mutation to update a record",
examples: [
"update :update_post, :default"
],
target: Mutation,
auto_set_fields: [
type: :update
]
}
@destroy %Ash.Dsl.Entity{
name: :destroy,
schema: Mutation.destroy_schema(),
args: [:name, :action],
describe: "A mutation to destroy a record",
examples: [
"destroy :destroy_post, :default"
],
target: Mutation,
auto_set_fields: [
type: :destroy
]
}
@queries %Ash.Dsl.Section{
name: :queries,
describe: """
Queries (read actions) to expose for the resource.
""",
entities: [
@get,
@list
]
}
@mutations %Ash.Dsl.Section{
name: :mutations,
describe: """
Mutations (create/update/destroy actions) to expose for the resource.
""",
entities: [
@create,
@update,
@destroy
]
}
@graphql %Ash.Dsl.Section{
name: :graphql,
describe: """
Configuration for a given resource in graphql
""",
schema: [
type: [
type: :atom,
required: true,
doc: "The type to use for this entity in the graphql schema"
],
fields: [
type: {:custom, __MODULE__, :__fields, []},
required: true,
doc: "The fields from this entity to include in the graphql"
]
],
sections: [
@queries,
@mutations
]
}
@doc false
def __fields(fields) do
fields = List.wrap(fields)
if Enum.all?(fields, &is_atom/1) do
{:ok, fields}
else
{:error, "Expected `fields` to be a list of atoms"}
end
end
@transformers [
AshGraphql.Resource.Transformers.RequireIdPkey
]
use Extension, sections: [@graphql], transformers: @transformers
def queries(resource) do
Extension.get_entities(resource, [:graphql, :queries])
end
def mutations(resource) do
Extension.get_entities(resource, [:graphql, :mutations])
end
def type(resource) do
Extension.get_opt(resource, [:graphql], :type, nil)
end
def fields(resource) do
Extension.get_opt(resource, [:graphql], :fields, [])
end
@doc false
def queries(api, resource, schema) do
type = Resource.type(resource)
resource
|> queries()
|> Enum.map(fn query ->
%Absinthe.Blueprint.Schema.FieldDefinition{
arguments: args(query.type),
identifier: query.name,
middleware: [
{{AshGraphql.Graphql.Resolver, :resolve}, {api, resource, query.type, query.action}}
],
module: schema,
name: to_string(query.name),
type: query_type(query.type, type)
}
end)
end
# sobelow_skip ["DOS.StringToAtom"]
@doc false
def mutations(api, resource, schema) do
resource
|> mutations()
|> Enum.map(fn
%{type: :destroy} = mutation ->
%Absinthe.Blueprint.Schema.FieldDefinition{
arguments: [
%Absinthe.Blueprint.Schema.InputValueDefinition{
identifier: :id,
module: schema,
name: "id",
placement: :argument_definition,
type: :id
}
],
identifier: mutation.name,
middleware: [
{{AshGraphql.Graphql.Resolver, :mutate},
{api, resource, mutation.type, mutation.action}}
],
module: schema,
name: to_string(mutation.name),
type: String.to_atom("#{mutation.name}_result")
}
%{type: :create} = mutation ->
%Absinthe.Blueprint.Schema.FieldDefinition{
arguments: [
%Absinthe.Blueprint.Schema.InputValueDefinition{
identifier: :input,
module: schema,
name: "input",
placement: :argument_definition,
type: String.to_atom("#{mutation.name}_input")
}
],
identifier: mutation.name,
middleware: [
{{AshGraphql.Graphql.Resolver, :mutate},
{api, resource, mutation.type, mutation.action}}
],
module: schema,
name: to_string(mutation.name),
type: String.to_atom("#{mutation.name}_result")
}
mutation ->
%Absinthe.Blueprint.Schema.FieldDefinition{
arguments: [
%Absinthe.Blueprint.Schema.InputValueDefinition{
identifier: :id,
module: schema,
name: "id",
placement: :argument_definition,
type: :id
},
%Absinthe.Blueprint.Schema.InputValueDefinition{
identifier: :input,
module: schema,
name: "input",
placement: :argument_definition,
type: String.to_atom("#{mutation.name}_input")
}
],
identifier: mutation.name,
middleware: [
{{AshGraphql.Graphql.Resolver, :mutate},
{api, resource, mutation.type, mutation.action}}
],
module: schema,
name: to_string(mutation.name),
type: String.to_atom("#{mutation.name}_result")
}
end)
end
@doc false
# sobelow_skip ["DOS.StringToAtom"]
def mutation_types(resource, schema) do
resource
|> mutations()
|> Enum.flat_map(fn mutation ->
description =
if mutation.type == :destroy do
"The record that was successfully deleted"
else
"The successful result of the mutation"
end
result = %Absinthe.Blueprint.Schema.ObjectTypeDefinition{
description: "The result of the #{inspect(mutation.name)} mutation",
fields: [
%Absinthe.Blueprint.Schema.FieldDefinition{
description: description,
identifier: :result,
module: schema,
name: "result",
type: Resource.type(resource)
},
%Absinthe.Blueprint.Schema.FieldDefinition{
description: "Any errors generated, if the mutation failed",
identifier: :errors,
module: schema,
name: "errors",
type: %Absinthe.Blueprint.TypeReference.List{
of_type: :mutation_error
}
}
],
identifier: String.to_atom("#{mutation.name}_result"),
module: schema,
name: Macro.camelize("#{mutation.name}_result")
}
if mutation.type == :destroy do
[result]
else
input = %Absinthe.Blueprint.Schema.InputObjectTypeDefinition{
fields: mutation_fields(resource, schema, mutation),
identifier: String.to_atom("#{mutation.name}_input"),
module: schema,
name: Macro.camelize("#{mutation.name}_input")
}
[input, result]
end
end)
end
defp mutation_fields(resource, schema, query) do
fields = Resource.fields(resource)
attribute_fields =
resource
|> Ash.Resource.attributes()
|> Enum.filter(&(&1.name in fields))
|> Enum.filter(& &1.writable?)
|> Enum.map(fn attribute ->
type = field_type(attribute.type)
field_type =
if attribute.allow_nil? || query.type == :update do
type
else
%Absinthe.Blueprint.TypeReference.NonNull{
of_type: type
}
end
%Absinthe.Blueprint.Schema.FieldDefinition{
description: attribute.description,
identifier: attribute.name,
module: schema,
name: to_string(attribute.name),
type: field_type
}
end)
relationship_fields =
resource
|> Ash.Resource.relationships()
|> Enum.filter(&(&1.name in fields))
|> Enum.filter(fn relationship ->
Resource in Ash.Resource.extensions(relationship.destination)
end)
|> Enum.map(fn
%{cardinality: :one} = relationship ->
%Absinthe.Blueprint.Schema.FieldDefinition{
identifier: relationship.name,
module: schema,
name: to_string(relationship.name),
type: :id
}
%{cardinality: :many} = relationship ->
case query.type do
:update ->
%Absinthe.Blueprint.Schema.FieldDefinition{
identifier: relationship.name,
module: schema,
name: to_string(relationship.name),
type: :relationship_change
}
:create ->
%Absinthe.Blueprint.Schema.FieldDefinition{
identifier: relationship.name,
module: schema,
name: to_string(relationship.name),
type: %Absinthe.Blueprint.TypeReference.List{
of_type: :id
}
}
end
end)
attribute_fields ++ relationship_fields
end
defp query_type(:get, type), do: type
# sobelow_skip ["DOS.StringToAtom"]
defp query_type(:list, type), do: String.to_atom("page_of_#{type}")
defp args(:get) do
[
%Absinthe.Blueprint.Schema.InputValueDefinition{
name: "id",
identifier: :id,
type: :id,
description: "The id of the record"
}
]
end
defp args(:list) do
[
%Absinthe.Blueprint.Schema.InputValueDefinition{
name: "limit",
identifier: :limit,
type: :integer,
description: "The limit of records to return",
default_value: 20
},
%Absinthe.Blueprint.Schema.InputValueDefinition{
name: "offset",
identifier: :offset,
type: :integer,
description: "The count of records to skip",
default_value: 0
},
%Absinthe.Blueprint.Schema.InputValueDefinition{
name: "filter",
identifier: :filter,
type: :string,
description: "A json encoded filter to apply"
}
]
end
@doc false
def type_definitions(resource, schema) do
[
type_definition(resource, schema),
page_of(resource, schema)
]
end
# sobelow_skip ["DOS.StringToAtom"]
defp page_of(resource, schema) do
type = Resource.type(resource)
%Absinthe.Blueprint.Schema.ObjectTypeDefinition{
description: "A page of #{inspect(type)}",
fields: [
%Absinthe.Blueprint.Schema.FieldDefinition{
description: "The records contained in the page",
identifier: :results,
module: schema,
name: "results",
type: %Absinthe.Blueprint.TypeReference.List{
of_type: type
}
},
%Absinthe.Blueprint.Schema.FieldDefinition{
description: "The count of records",
identifier: :count,
module: schema,
name: "count",
type: :integer
}
],
identifier: String.to_atom("page_of_#{type}"),
module: schema,
name: Macro.camelize("page_of_#{type}")
}
end
defp type_definition(resource, schema) do
type = Resource.type(resource)
%Absinthe.Blueprint.Schema.ObjectTypeDefinition{
description: Ash.Resource.description(resource),
fields: fields(resource, schema),
identifier: type,
module: schema,
name: Macro.camelize(to_string(type))
}
end
defp fields(resource, schema) do
fields = Resource.fields(resource)
attributes(resource, schema, fields) ++
relationships(resource, schema, fields) ++
aggregates(resource, schema, fields)
end
defp attributes(resource, schema, fields) do
resource
|> Ash.Resource.attributes()
|> Enum.filter(&(&1.name in fields))
|> Enum.map(fn
%{name: :id} = attribute ->
%Absinthe.Blueprint.Schema.FieldDefinition{
description: attribute.description,
identifier: :id,
module: schema,
name: "id",
type: :id
}
attribute ->
%Absinthe.Blueprint.Schema.FieldDefinition{
description: attribute.description,
identifier: attribute.name,
module: schema,
name: to_string(attribute.name),
type: field_type(attribute.type)
}
end)
end
# sobelow_skip ["DOS.StringToAtom"]
defp relationships(resource, schema, fields) do
resource
|> Ash.Resource.relationships()
|> Enum.filter(&(&1.name in fields))
|> Enum.filter(fn relationship ->
Resource in Ash.Resource.extensions(relationship.destination)
end)
|> Enum.map(fn
%{cardinality: :one} = relationship ->
type = Resource.type(relationship.destination)
%Absinthe.Blueprint.Schema.FieldDefinition{
identifier: relationship.name,
module: schema,
name: to_string(relationship.name),
middleware: [
{{AshGraphql.Graphql.Resolver, :resolve_assoc}, {:one, relationship.name}}
],
arguments: [],
type: type
}
%{cardinality: :many} = relationship ->
type = Resource.type(relationship.destination)
query_type = String.to_atom("page_of_#{type}")
%Absinthe.Blueprint.Schema.FieldDefinition{
identifier: relationship.name,
module: schema,
name: to_string(relationship.name),
middleware: [
{{AshGraphql.Graphql.Resolver, :resolve_assoc}, {:many, relationship.name}}
],
arguments: args(:list),
type: query_type
}
end)
end
defp aggregates(resource, schema, fields) do
resource
|> Ash.Resource.aggregates()
|> Enum.filter(&(&1.name in fields))
|> Enum.map(fn aggregate ->
{:ok, type} = Aggregate.kind_to_type(aggregate.kind)
%Absinthe.Blueprint.Schema.FieldDefinition{
identifier: aggregate.name,
module: schema,
name: to_string(aggregate.name),
type: field_type(type)
}
end)
end
defp field_type(Ash.Type.String), do: :string
defp field_type(Ash.Type.UUID), do: :string
defp field_type(Ash.Type.Integer), do: :integer
defp field_type({:array, type}) do
%Absinthe.Blueprint.TypeReference.List{
of_type: field_type(type)
}
end
end
|
lib/resource/resource.ex
| 0.833392
| 0.42937
|
resource.ex
|
starcoder
|
defmodule MFL.League do
@moduledoc """
Wrapper for requests which return information for a specific
league.
This module contains functions that make API requests that
require a league `id` as an argument.
The structure for every call is `MFL.League.request_type(year, id, options)`
where the `id` is a league `id` and the MyFantasyLeague request
name is "requestType".
See the `MFL` module documentation for a discussion of optional
request/function parameters.
"""
import MFL.Request
@doc """
Returns league settings data for the specified league.
Also includes some franchise information and links to previous years'
home pages for that league.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=league)
"""
def league(year, league, options \\ []) do
retrieve_league_node(["league"], year, league, options)
end
@doc """
Returns league rules for a given league.
Rules are labelled using abbreviations; descriptions
for rule abbreviations are available via `MFL.all_rules\2`
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=rules)
"""
def rules(year, league, options \\ []) do
records = retrieve_league_node(["rules", "positionRules"], year, league, options)
case records do
{:error, message} ->
{:error, message}
records ->
flatten_rules(records)
end
end
@doc """
Returns rosters (list of players and player data) and franchise
`id` for each franchise.
Each franchise has an `id` (string) and a list of players (maps).
Each player map includes roster status (`"ROSTER"`|`"TAXI SQUAD"`
|`"INJURED RESERVE"`) and possibly other data depending on league
settings, such as salary information in salary cap leagues.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=rosters)
"""
def rosters(year, league, options \\ []) do
retrieve_league_node(["rosters", "franchise"], year, league, options)
end
@doc """
Returns a list of player contract information.
Note that salary values are returned as strings,
and the associated numbers may/may not have decimal
values.
This appears somewhat arbitrary and not related to whether
the league settings allow for sub-$1 salaries.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=salaries)
"""
def salaries(year, league, options \\ []) do
retrieve_league_node(["salaries", "leagueUnit", "player"], year, league, options)
end
@doc """
Returns a list of franchises and league standings.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=leagueStandings)
"""
def league_standings(year, league, options \\ []) do
retrieve_league_node(["leagueStandings", "franchise"], year, league, options)
end
@doc """
Returns a list of weekly matchup information.
The returned data include score and winning/losing
franchise in each matchup for each week. The results
can be filtered by week and/or franchise via the `w:`
and `f:` options.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=schedule)
"""
def schedule(year, league, options \\ []) do
retrieve_league_node(["schedule", "weeklySchedule"], year, league, options)
end
@doc """
Returns a list of weekly team/player scores.
A week number (as a string) or `"YTD"` can provided
to specify a week or weeks; otherwise results default
to the current week.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=weeklyResults)
"""
def weekly_results(year, league, options \\ []) do
retrieve_league_node(["weeklyResults"], year, league, options)
end
@doc """
Returns live scoring results for a given week.
Accepts week number option to specify a week or weeks;
otherwise results default to the current week.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=liveScoring)
"""
def live_scoring(year, league, options \\ []) do
retrieve_league_node(["liveScoring"], year, league, options)
end
@doc """
Returns all player scores (including free agents) for a given week.
This request supports options for filtering by week, year,
position, and specific player as well as other summary options.
Note that per the documentation the league ID is optional, but
calls without a league ID do not appear to produce meaningful data
and are not supported.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=playerScores)
"""
def player_scores(year, league, options \\ []) do
retrieve_league_node(["playerScores"], year, league, options)
end
@doc """
Returns draft results for specified league
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=draftResults)
"""
def draft_results(year, league, options \\ []) do
retrieve_league_node(["draftResults", "draftUnit"], year, league, options)
end
@doc """
Returns list of future draft picks by franchise.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=futureDraftPicks)
"""
def future_draft_picks(year, league, options \\ []) do
retrieve_league_node(["futureDraftPicks", "franchise"], year, league, options)
end
@doc """
Returns list of auction results
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=auctionResults)
"""
def auction_results(year, league, options \\ []) do
retrieve_league_node(["auctionResults", "auctionUnit", "auction"], year, league, options)
end
@doc """
Returns a list of player `id`s for free agents.
Just `id`s are returned; these must then be merged with data
from the other requests (e.g. `MFL.players/2` or elsewhere
to incorporate any related data such as the player's name or
team.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=freeAgents)
"""
def free_agents(year, league, options \\ []) do
decoded = retrieve_league_node(["freeAgents", "leagueUnit", "player"], year, league, options)
case decoded do
{:error, message} ->
%{error: message}
records ->
records
|> Enum.map(&Map.take(&1, ["id"]))
|> Enum.map(&Map.values(&1))
|> List.flatten()
end
end
@doc """
Returns list of transactions.
Supports several filters, e.g. week, franchise, transcation type
and number of days.
Note that the maps for different types of transactions have different
keys. Add/drop-type transactions also appear to have a kind of pipe notation
such that the `"transaction"` key for this kind of transaction may look like:
```
"transaction" => "1234,|2|,6789" # $2 bid on player 1234, drop 6789
"transaction" => "1234,|1|" # $1 bid on player 1234, drop no one
"transaction" => "|6789" # drop 6789
```
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=transactions)
"""
def transactions(year, league, options \\ []) do
retrieve_league_node(["transactions", "transaction"], year, league, options)
end
@doc """
Returns list of projected scores for specified players.
Note if only one players is returned, the return value is a map,
not a list with one map element.
Accepts week, position and free agent filters. Expects a player `id`
or comma-delimited list. If no player is provided, it appears to
return the projected score for an arbitrary player. If no week is
provided, returns results for the current week. If `count:` is
specified, returns that many arbitrary players.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=projectedScores)
"""
def projected_scores(year, league, options \\ []) do
retrieve_league_node(["projectedScores"], year, league, options)
end
@doc """
Returns list of message board topics (threads).
Each topic has an `"id"` key that can be passed to `MFL.League.message_board_thread/4`
as the `thread` argument.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=messageBoard)
"""
def message_board(year, league, options \\ []) do
retrieve_league_node(["messageBoard", "thread"], year, league, options)
end
@doc """
Returns list of messages for a given thread.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=messageBoardThread)
"""
def message_board_thread(year, league, thread, options \\ []) do
options = Keyword.merge(options, thread: thread)
retrieve_league_node(["messageBoardThread", "post"], year, league, options)
end
@doc """
Returns players' "status".
Note if only one player is returned, the return value is a map,
not a list with one map element. If no week is specified, defaults
to the current week.
`"status"` appears to formatted as follows:
```
"status" => "Joe's Team - S" # Started in specified week for Joe's Team
"status" => "Joe's Team - NS" # Did not start in specified week for Joe's Team
"status" => "Joe's Team - S<br />Free Agent" # Started in specified week for Joe's Team, now a free agent(?)
"status" => "Free Agent" # Was a free agent
```
There may be other statuses heretofore unobserved.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=auctionResults)
"""
# TODO: has to be a better way to implement conditional
def player_status(year, league, player_list, options \\ []) do
nodes =
cond do
length(player_list) > 1 ->
["playerStatuses", "playerStatus"]
true ->
["playerStatus"]
end
player_list = Enum.join(player_list, "%2C")
options = Keyword.merge(options, p: player_list)
case fetch_league("playerStatus", year, league, options) do
{:ok, response} ->
decode_nodes(response.body, nodes)
{:error, message} ->
%{error: message}
end
end
@doc """
Returns list of NFL teams and points allowed by position.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=pointsAllowed)
"""
def points_allowed(year, league, options \\ []) do
retrieve_league_node(["pointsAllowed", "team"], year, league, options)
end
@doc """
Returns list of NFL or fantasy pool picks.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=pool)
"""
def pool(year, league, options \\ []) do
case fetch_league("pool", year, league, options) do
{:ok, response} ->
decode_nodes(response.body, ["poolPicks"])
{:error, message} ->
%{error: message}
end
end
@doc """
Returns a list of all playoff brackets for the league.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=playoffBrackets)
"""
def playoff_brackets(year, league, options \\ []) do
retrieve_league_node(["playoffBrackets", "playoffBracket"], year, league, options)
end
@doc """
Returns skins/tabs/home page modules set up by commissioner.
[MyFantasyLeague documentation](https://www03.myfantasyleague.com/2018/api_info?STATE=test&CMD=export&TYPE=appearance)
"""
def appearance(year, league, options \\ []) do
retrieve_league_node(["appearance"], year, league, options)
end
defp flatten_rules(map) when is_map(map) do
Map.put(map, "rule", flatten_rule_node(Map.get(map, "rule")))
end
defp flatten_rules(list) when is_list(list) do
Enum.map(list, &flatten_rules/1)
end
defp flatten_rule_node(list) when is_list(list) do
Enum.map(list, &flatten_nodes(&1, ["event", "points", "range"]))
end
defp flatten_rule_node(map) when is_map(map) do
flatten_nodes(map, ["event", "points", "range"])
end
defp flatten_nodes(map, nodes) do
Enum.reduce(nodes, map, &Map.put(&2, &1, &2[&1]["$t"]))
end
end
|
lib/mfl/league.ex
| 0.828523
| 0.658452
|
league.ex
|
starcoder
|
defmodule MDACube do
@moduledoc """
Experimental Multi-Dimensional Attribute Cube.
Allows to set attributes by coordinates. Use partial coordinates, so any
unspecified dimension's member attribute will have the same value.
Possible useful for: ranking by multi-attributes.
Provides great visibility.
Example:
TODO
"""
defstruct [
dimensions: %{}, # %{dimension1: %MapSet{} = items1}
attributes: %{}, # %{attribute1: %{CoordinatesMap => value}}
]
def new(), do: %__MODULE__{}
@doc """
Set attribute by coordinates (can be partial), attribute label and value
"""
def set(%__MODULE__{} = cube, coordinates, attribute_label, value)
when is_map(coordinates) do
facts = cube.attributes
|> Map.get(attribute_label, %{})
|> Map.put(coordinates, value)
attributes = Map.put(cube.attributes, attribute_label, facts)
dimensions = for dimension <- coordinates |> Map.keys do
members = cube.dimensions
|> Map.get(dimension, %MapSet{})
|> MapSet.put(coordinates[dimension])
{dimension, members}
end
|> Enum.into(cube.dimensions)
%{cube | attributes: attributes, dimensions: dimensions}
end
@doc """
Returns cells count
"""
def count(%__MODULE__{dimensions: dimensions} = _cube)
when map_size(dimensions) == 0, do: 0
def count(%__MODULE__{} = cube) do
cube.dimensions
|> Map.values
|> Enum.reduce(1, fn x, acc -> MapSet.size(x) * acc end)
end
end
defimpl Enumerable, for: MDACube do
@moduledoc """
Enumerable implementation for MDACube, Enum module is fully supported.
"""
defstruct [
index: 0,
count: 0,
dimensions: [],
attributes: [],
cube: nil
]
@doc """
Enumerable reduce implementation
"""
def reduce(%MDACube{} = cube, action, fun) do
reduce(get_iterable(cube), action, fun)
end
def reduce(_iterable, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(iterable, {:suspend, acc}, fun), do:
{:suspended, acc, &reduce(iterable, &1, fun)}
def reduce(%__MODULE__{index: index, count: index}, {:cont, acc}, _fun), do:
{:done, acc}
def reduce(%__MODULE__{index: index} = iterable, {:cont, acc}, fun) do
coordinates = get_row_coordinates(iterable, index)
attributes = get_row(iterable, coordinates)
data = %{coordinates: coordinates, attributes: attributes}
reduce(%{iterable | index: index+1}, fun.(data, acc), fun)
end
def count(cube), do: {:ok, MDACube.count(cube)}
@doc """
Enumerable member? implementation
"""
def member?(cube, x), do: {:ok, get_row(cube, x.coordinates) == x.attributes}
@doc """
Enumerable slice implementation
"""
def slice(cube) do
reducer = fn x, acc -> {:cont, [x | acc]} end
slicer = fn index, length ->
iterable = %{get_iterable(cube) | index: index, count: index+length}
iterable
|> reduce({:cont, []}, reducer)
|> elem(1)
|> :lists.reverse()
end
{:ok, MDACube.count(cube), slicer}
end
defp get(coordinates, attribute) when is_map(coordinates) do
attribute_facts = attribute.facts
coordinates = attribute.subsets
|> Enum.map(&(Map.take(coordinates, &1)))
|> Enum.find(&(Map.has_key?(attribute_facts, &1)))
coordinates && attribute_facts[coordinates]
end
defp get_row(%__MODULE__{} = iterable, coordinates) do
iterable.attributes
|> Enum.reduce(%{}, fn attribute, acc ->
Map.put(acc, attribute.label, get(coordinates, attribute))
end)
end
defp get_row_coordinates(iterable, index) do
members_indexes = get_members_indexes(iterable, index)
Enum.zip(iterable.dimensions, members_indexes)
|> Enum.reduce(%{}, fn {item, member_index}, acc ->
Map.put(acc, item.dimension, Enum.at(item.members, member_index))
end)
end
defp get_members_indexes(iterable, index) do
iterable.dimensions
|> Enum.reverse
|> do_get_members_indexes(index)
|> Enum.reverse
end
defp do_get_members_indexes([], 0), do: []
defp do_get_members_indexes([%{members_count: members_count} | tail], index) do
member_index = rem(index, members_count)
rest_index = div(index, members_count)
[member_index | do_get_members_indexes(tail, rest_index)]
end
defp get_iterable(cube) do
%__MODULE__{
index: 0,
count: MDACube.count(cube),
dimensions: dimensions_ordered(cube),
attributes: cube.attributes |> aggregate_attributes,
cube: cube}
end
defp aggregate_attributes(attributes) do
attributes
|> Enum.map(fn {label, facts} ->
subsets = facts
|> Enum.reduce(MapSet.new(), fn {coordinates, _value}, acc ->
MapSet.union(acc, [coordinates |> Map.keys |> Enum.sort] |> MapSet.new)
end)
|> MapSet.to_list
|> Enum.sort_by(&(length(&1)), &>=/2)
%{label: label, subsets: subsets, facts: facts}
end)
end
defp dimensions_ordered(%MDACube{} = cube) do
for dimension <- cube.dimensions |> Map.keys |> Enum.sort do
members = cube.dimensions
|> Map.get(dimension)
|> MapSet.to_list
|> Enum.sort
%{dimension: dimension,
members: members,
members_count: length(members)}
end
end
end
|
lib/mdacube.ex
| 0.722625
| 0.502991
|
mdacube.ex
|
starcoder
|
defmodule Sanbase.SocialData.MetricAdapter do
@moduledoc """
Provides access and metadata for social metrics - these metrics are currently taken from internal service called metricshub.
All `_total` metrics are served from 2 different places depending on the invocation.
The ones with `slug` argument are served from clickhouse, the others with `text` argument
from metricshub.
"""
@behaviour Sanbase.Metric.Behaviour
import Sanbase.Metric.Transform
alias Sanbase.SocialData.SocialHelper
alias Sanbase.Model.Project
@aggregations [:sum]
@social_volume_timeseries_metrics [
# Social volume counts the mentions of a given word or words describing as subject
# A project can be addressed by different words.
# Example: `btc` and `bitcoin` refer to bitcoin
"social_volume_telegram",
"social_volume_reddit",
"social_volume_twitter",
"social_volume_bitcointalk",
"social_volume_total"
]
@community_messages_count_timeseries_metrics [
## Community messages count counts the total amount of messages in a project's
# own social medium. All messages are counted. Handles spam
"community_messages_count_telegram",
"community_messages_count_total"
]
@social_dominance_timeseries_metrics [
"social_dominance_telegram",
"social_dominance_reddit",
"social_dominance_total"
]
@sentiment_timeseries_metrics for name <- ["sentiment"],
type <- ["positive", "negative", "balance", "volume_consumed"],
source <-
["total"] ++ Sanbase.SocialData.SocialHelper.sources(),
do: "#{name}_#{type}_#{source}"
@active_users_timeseries_metrics ["social_active_users"]
@timeseries_metrics @social_dominance_timeseries_metrics ++
@social_volume_timeseries_metrics ++
@community_messages_count_timeseries_metrics ++
@sentiment_timeseries_metrics ++
@active_users_timeseries_metrics
@histogram_metrics []
@table_metrics []
@metrics @histogram_metrics ++ @timeseries_metrics ++ @table_metrics
@social_volume_metrics_access_map @social_volume_timeseries_metrics
|> Enum.into(
%{},
&{&1, %{"historical" => :restricted, "realtime" => :free}}
)
@access_map (@metrics -- @social_volume_timeseries_metrics)
|> Enum.reduce(%{}, fn metric, acc ->
Map.put(acc, metric, :restricted)
end)
|> Map.merge(@social_volume_metrics_access_map)
@min_plan_map Enum.reduce(@metrics, %{}, fn metric, acc -> Map.put(acc, metric, :free) end)
@required_selectors Enum.into(@metrics, %{}, &{&1, []})
|> Map.put("social_active_users", [[:source]])
@default_complexity_weight 1
@impl Sanbase.Metric.Behaviour
def has_incomplete_data?(_), do: false
@impl Sanbase.Metric.Behaviour
def complexity_weight(_), do: @default_complexity_weight
@impl Sanbase.Metric.Behaviour
def required_selectors(), do: @required_selectors
@impl Sanbase.Metric.Behaviour
def timeseries_data(metric, selector, from, to, interval, _opts)
when metric in @social_volume_timeseries_metrics do
"social_volume_" <> source = metric
Sanbase.SocialData.social_volume(selector, from, to, interval, source)
|> transform_to_value_pairs(:mentions_count)
end
def timeseries_data(metric, %{} = selector, from, to, interval, _opts)
when metric in @social_dominance_timeseries_metrics do
"social_dominance_" <> source = metric
Sanbase.SocialData.social_dominance(selector, from, to, interval, source)
|> transform_to_value_pairs(:dominance)
end
def timeseries_data(metric, %{slug: _slug} = selector, from, to, interval, _opts)
when metric in @community_messages_count_timeseries_metrics do
"community_messages_count_" <> source = metric
Sanbase.SocialData.community_messages_count(selector, from, to, interval, source)
|> transform_to_value_pairs(:mentions_count)
end
def timeseries_data(metric, %{} = selector, from, to, interval, _opts)
when metric in @sentiment_timeseries_metrics do
"sentiment_" <> type_source = metric
{type, source} = SocialHelper.split_by_source(type_source)
Sanbase.SocialData.sentiment(selector, from, to, interval, source, type)
|> transform_to_value_pairs(:value)
end
def timeseries_data(metric, %{source: _source} = selector, from, to, interval, _opts)
when metric in @active_users_timeseries_metrics do
Sanbase.SocialData.social_active_users(selector, from, to, interval)
end
@impl Sanbase.Metric.Behaviour
def aggregated_timeseries_data(metric, selector, from, to, opts)
when metric in @social_volume_timeseries_metrics or
metric in @community_messages_count_timeseries_metrics do
case timeseries_data(metric, selector, from, to, "1h", opts) do
{:ok, result} ->
value = Enum.reduce(result, 0, &(&1.value + &2))
{:ok, %{value: value}}
{:error, error} ->
{:error, error}
end
end
def aggregated_timeseries_data(metric, selector, from, to, opts)
when metric in @social_dominance_timeseries_metrics do
case timeseries_data(metric, selector, from, to, "1h", opts) do
{:ok, result} ->
value =
Enum.map(result, & &1.value)
|> Sanbase.Math.average()
{:ok, %{value: value}}
{:error, error} ->
{:error, error}
end
end
@impl Sanbase.Metric.Behaviour
def slugs_by_filter(_metric, _from, _to, _operator, _threshold, _opts) do
{:error, "Slugs filtering is not implemented for Social Data."}
end
@impl Sanbase.Metric.Behaviour
def slugs_order(_metric, _from, _to, _direction, _opts) do
{:error, "Slugs ordering is not implemented for Social Data."}
end
@impl Sanbase.Metric.Behaviour
def human_readable_name(metric) when metric in @metrics do
human_readable_name =
String.split(metric, "_")
|> Enum.map(&String.capitalize/1)
|> Enum.join(" ")
{:ok, human_readable_name}
end
@impl Sanbase.Metric.Behaviour
def available_aggregations(), do: @aggregations
@impl Sanbase.Metric.Behaviour
def available_slugs(),
do: {:ok, Project.List.projects_slugs(preload?: false)}
@impl Sanbase.Metric.Behaviour
def available_slugs("social_volume_" <> _source),
do: {:ok, Project.List.projects_slugs(preload?: false)}
def available_slugs("social_dominance_" <> _source),
do: {:ok, Project.List.projects_slugs(preload?: false)}
def available_slugs("community_messages_count_" <> _source),
do: {:ok, Project.List.projects_by_non_null_field(:telegram_link) |> Enum.map(& &1.slug)}
@impl Sanbase.Metric.Behaviour
def available_timeseries_metrics(), do: @timeseries_metrics
@impl Sanbase.Metric.Behaviour
def available_histogram_metrics(), do: @histogram_metrics
@impl Sanbase.Metric.Behaviour
def available_table_metrics(), do: @table_metrics
@impl Sanbase.Metric.Behaviour
def available_metrics(), do: @metrics
@impl Sanbase.Metric.Behaviour
def available_metrics(%{slug: slug}) do
with %Project{telegram_link: telegram_link} <- Project.by_slug(slug, preload?: false) do
metrics =
case is_binary(telegram_link) do
true -> @metrics
false -> @metrics -- @community_messages_count_timeseries_metrics
end
{:ok, metrics}
end
end
@impl Sanbase.Metric.Behaviour
def free_metrics(), do: []
@impl Sanbase.Metric.Behaviour
def restricted_metrics(), do: @metrics
@impl Sanbase.Metric.Behaviour
def access_map(), do: @access_map
@impl Sanbase.Metric.Behaviour
def min_plan_map(), do: @min_plan_map
@impl Sanbase.Metric.Behaviour
def metadata(metric) do
selectors =
case metric do
"community_messages_count" <> _ -> [:slug]
"social_active_users" -> [:source]
_ -> [:slug, :text]
end
{:ok,
%{
metric: metric,
min_interval: "5m",
default_aggregation: :sum,
available_aggregations: @aggregations,
available_selectors: selectors,
data_type: :timeseries,
complexity_weight: @default_complexity_weight
}}
end
@impl Sanbase.Metric.Behaviour
def first_datetime(metric, _selector) do
{_metric, source} = SocialHelper.split_by_source(metric)
source |> source_first_datetime()
end
@impl Sanbase.Metric.Behaviour
def last_datetime_computed_at(_metric, _selector), do: {:ok, Timex.now()}
# Private functions
# total has the datetime of the earliest of all - bitcointalk
defp source_first_datetime("total"), do: source_first_datetime("bitcointalk")
defp source_first_datetime("telegram"), do: {:ok, ~U[2016-03-29 00:00:00Z]}
defp source_first_datetime("twitter"), do: {:ok, ~U[2018-02-13 00:00:00Z]}
defp source_first_datetime("reddit"), do: {:ok, ~U[2016-01-01 00:00:00Z]}
defp source_first_datetime("bitcointalk"), do: {:ok, ~U[2011-06-01 00:00:00Z]}
end
|
lib/sanbase/social_data/metric_adapter.ex
| 0.916787
| 0.470797
|
metric_adapter.ex
|
starcoder
|
defmodule Rinku do
@moduledoc """
A pattern for composing functions to execute in a chain.
Execution will stop when all links in the chain have been resolved, or any link in the chain returns an error.
The initial input will be provided as the first argument in the chain.
The result of each link in the chain will be supplied to the next link in the chain.
The input will always be the first argument provided to the next link in the chain.
"""
defstruct [:links, :resolved, :result]
alias Rinku.Link
alias Rinku.Resolved
@type t() :: %__MODULE__{
links: [Link.t()],
resolved: [Resolved.t()],
result: any()
}
@type link_callback() ::
(... -> any() | {:error, any()})
| {(... -> any() | {:error, any()}), term() | list()}
| {module(), atom(), term() | list()}
@doc """
Create a new Rinku chain.
iex> Rinku.new()
%Rinku{
links: [],
resolved: [
%Rinku.Resolved{
name: :seed,
result: nil
}
],
result: nil
}
"""
@spec new(initial_value :: any(), input_name :: String.t() | atom()) :: t()
def new(intial_value \\ nil, input_name \\ :seed) do
%__MODULE__{
links: [],
resolved: [%Resolved{result: intial_value, name: input_name}],
result: nil
}
end
@doc """
Add a new link to the Rinku chain.
"""
@spec link(rinku :: t(), new_link :: link_callback(), link_name :: String.t() | atom()) :: t()
def link(%__MODULE__{links: links} = rinku, new_link, link_name \\ nil) do
link = Link.new(new_link, link_name)
%__MODULE__{rinku | links: [link | links]}
end
@doc """
Execute a built rinku chain.
"""
@spec run(rinku :: t()) :: t()
def run(%__MODULE__{links: links, resolved: resolved} = rinku) do
[final | _] =
resolved_links =
links
|> Enum.reverse()
|> Enum.reduce_while(resolved, fn link, [previous | _resolved] = links ->
resolved_link = Link.resolve(link, previous.result)
end_loop(resolved_link.result, [resolved_link | links])
end)
%__MODULE__{
rinku
| resolved: resolved_links,
result: final.result
}
end
@spec end_loop(Link.t(), list()) :: {:cont, any()} | {:halt, any()}
defp end_loop(result, updated_links) when is_tuple(result) do
if elem(result, 0) == :error do
{:halt, updated_links}
end
end
defp end_loop(:error, updated_links), do: {:halt, updated_links}
defp end_loop(_loop_result, updated_links), do: {:cont, updated_links}
@doc """
Get the result from a processed chain.
iex> Rinku.new() |> Rinku.link(fn _ -> 1 end) |> Rinku.run() |> Rinku.result()
1
"""
@spec result(rinku :: t()) :: any() | {:error, any()}
def result(%__MODULE__{result: result}), do: result
@doc """
Get the result for a specific named execution step.
iex> Rinku.new() |> Rinku.link(fn _ -> 1 end, :step1) |> Rinku.link(fn _ -> 2 end, :step2) |> Rinku.run() |> Rinku.link_result(:step1)
1
"""
@spec link_result(rinku :: t(), link_name :: String.t()) :: any() | {:error, any()}
def link_result(%__MODULE__{resolved: resolved}, resolved_name) do
case Enum.find(resolved, &(&1.name == resolved_name)) do
nil -> nil
resolved -> resolved.result
end
end
end
|
lib/rinku.ex
| 0.795142
| 0.597872
|
rinku.ex
|
starcoder
|
defmodule Pun do
defp count(text, phrase) do
others = String.split(text, phrase) |> Enum.count
others - 1
end
def search(text, use_pronunciation \\ false) do
words = text |> Helper.parse |> Parser.get_parsed_words(use_pronunciation)
combinations = get_combinations words
search_pun text, combinations
end
def search_from_sentences sentences do
sentences |> Enum.map(fn x ->
result = search x
result_with_pronunciation = search x, true
case {result.surface, result_with_pronunciation.surface} do
{"", ""} -> nil
{lhs, ""} -> result
{"", rhs} -> result_with_pronunciation
{lhs, rhs} -> if String.length(lhs) >= String.length(rhs), do: result, else: result_with_pronunciation
end
end) |> Enum.filter(fn x -> x != nil end)
end
def is_same_yomi(lhs, rhs) do
Enum.join(Enum.map(lhs, fn(x) -> x.yomi end), "") == Enum.join(Enum.map(rhs, fn(x) -> x.yomi end), "")
end
@doc """
最初から最後の単語の途中まで同じだったら
"""
def starts_with_yomi(base, target) do
base_yomi = base |> Enum.map(fn x -> x.yomi end) |> Enum.join("")
target_yomi = target |> Enum.map(fn x -> x.yomi end) |> Enum.join("")
non_last_target_yomi = target |> Enum.map(fn x -> x.yomi end) |> Enum.reverse |> tl |> Enum.reverse |> Enum.join("")
!String.starts_with?(non_last_target_yomi, base_yomi) && String.starts_with?(target_yomi, base_yomi)
end
@doc """
最初の単語の途中から最後の単語までが同じだったら
"""
def ends_with_yomi(base, target) do
base_yomi = base |> Enum.map(fn x -> x.yomi end) |> Enum.join("")
target_yomi = target |> Enum.map(fn x -> x.yomi end) |> Enum.join("")
non_first_target_yomi = target |> tl |> Enum.map(fn x -> x.yomi end) |> Enum.join("")
!String.ends_with?(non_first_target_yomi, base_yomi) && String.ends_with?(target_yomi, base_yomi)
end
defp remove_empty_and_unique(list) do
list |> Enum.filter(fn x -> x != [] end)
|> Enum.reduce([], fn(x, acc) ->
if !Enum.find(acc, fn y -> y == x end) do
[x | acc]
else
acc
end
end)
end
def get_combinations(words) do
len = Enum.count words
Range.new(0, len-1) |>
Enum.map(fn at ->
Range.new(1, len-at) |>
Enum.map(fn at2 ->
Enum.slice words, at, at2 end)
end)
|>
Enum.reduce([], fn(x, acc) ->
remove_empty_and_unique(x) ++ acc
end)
end
def is_same_meaning lhs, rhs do
lhs_surface = lhs |> Enum.map(fn x -> x.surface end) |> Enum.join("")
rhs_surface = rhs |> Enum.map(fn x -> x.surface end) |> Enum.join("")
lhs_surface == rhs_surface
end
def starts_same_meaning lhs, rhs do
lhs_surface = lhs |> Enum.map(fn x -> x.surface end) |> Enum.join("")
rhs_surface = rhs |> Enum.map(fn x -> x.surface end) |> Enum.join("")
String.starts_with?(lhs_surface, rhs_surface)
end
def ends_same_meaning lhs, rhs do
lhs_surface = lhs |> Enum.map(fn x -> x.surface end) |> Enum.join("")
rhs_surface = rhs |> Enum.map(fn x -> x.surface end) |> Enum.join("")
String.ends_with?(lhs_surface, rhs_surface)
end
def is_duplication lhs, rhs do
lhs_last_word = lhs |> Enum.reverse |> hd
rhs_last_word = rhs |> Enum.reverse |> hd
case {(hd lhs).at <= (hd rhs).at, (hd rhs).at <= lhs_last_word.at} do
{true, true} -> true
_ ->
case {(hd lhs).at <= rhs_last_word.at, rhs_last_word.at <= lhs_last_word.at} do
{true, true} -> true
_ -> false
end
end
end
defp get_longest_word list do
list |> Enum.reduce([], fn (x, acc) ->
case {(x |> Enum.map(fn y -> y.yomi end) |> Enum.join("") |> String.length),
(acc |> Enum.map(fn y -> y.yomi end) |> Enum.join("") |> String.length) } do
{a, b} when a > b -> x
_ -> acc
end
end)
end
defp get_longest_pun pun_list do
pun_list |> Enum.reduce(%{:yomi => "", :surface => "", :checked_yomi => ""}, fn (x, acc) ->
case x do
nil -> acc
_ ->
yomi = x.base |> Enum.map(fn y -> y.yomi end) |> Enum.join("")
case (String.length(yomi) > String.length(acc.yomi) && String.length(yomi) != 1) do
true ->
%{:yomi => yomi,
:surface => x.base |> Enum.map(fn y -> y.surface end) |> Enum.join(""),
:checked_yomi => x.checked |> Enum.map(fn y -> y.yomi end) |> Enum.join(""),
:checked_surface => x.checked |> Enum.map(fn y -> y.surface end) |> Enum.join("")
}
false -> acc
end
end
end)
end
defp search_middle_pun selected_words, check_words, same_word_count do
case {starts_with_yomi(selected_words, check_words), ends_with_yomi(selected_words, check_words)} do
{true, false} when same_word_count == 1 -> if !starts_same_meaning(selected_words, check_words), do: check_words, else: false
{false, true} when same_word_count == 1 -> if !ends_same_meaning(selected_words, check_words), do: check_words, else: false
_ -> false
end
end
defp get_words_yomi_length(words) do
words |> Enum.map(fn x -> x.yomi end) |> Enum.join("") |> String.length
end
defp is_all_noun words do
words |> Enum.all?(fn word -> word.part == "名詞" end)
end
defp filter_puns puns do
puns |> Enum.filter(fn x -> x != false end) |>
Enum.filter(fn pun ->
len = get_words_yomi_length(pun)
if len <= 3, do: is_all_noun(pun), else: true
end)
end
defp search_pun text, combinations do
combinations |> Enum.map(fn selected_words ->
puns = combinations |> Enum.map(fn check_words ->
if is_duplication selected_words, check_words do
false
else
same_word_count = count(text, (selected_words |> Enum.map(fn x -> x.surface end) |> Enum.join("")))
case is_same_yomi(selected_words, check_words) do
true -> if !is_same_meaning(selected_words, check_words), do: check_words, else: false
false -> search_middle_pun selected_words, check_words, same_word_count
end
end
end) |> filter_puns
len = get_words_yomi_length(selected_words)
case Enum.empty?(puns) do
false -> if (len <= 2) && !is_all_noun(selected_words), do: nil, else: %{:base => selected_words, :checked => puns |> get_longest_word}
_ -> nil
end
end) |> get_longest_pun
end
end
|
lib/pun/pun.ex
| 0.610105
| 0.413418
|
pun.ex
|
starcoder
|
defmodule Artem.ImportIntrospection do
@external_resource "./README.md"
@moduledoc """
#{File.read!(@external_resource) |> String.split("---", parts: 2) |> List.last()}
"""
alias Absinthe.Schema
@type import_introspection_option :: {:path, String.t() | Macro.t()}
@doc """
Import types defined using the results of the introspection query.
### Examples
```
import_introspection path: "/path/to/introspection.json"
import_introspection provider: {FileProvider, file: "test/fixtures/test.json"}
```
"""
defmacro import_introspection(opts) when is_list(opts) do
__CALLER__
|> do_import_introspection(nil, opts)
end
@doc """
See `import_introspection/1`
"""
@spec import_introspection(String.t() | Macro.t(), [import_introspection_option()]) :: Macro.t()
defmacro import_introspection(introspection, opts \\ []) do
__CALLER__
|> do_import_introspection(introspection, opts)
end
defp do_import_introspection(env, nil, opts) do
case Keyword.fetch(opts, :path) do
{:ok, path} ->
[
quote do
@__absinthe_import_introspection_path__ unquote(path)
end,
do_import_introspection(
env,
quote do
File.read!(@__absinthe_import_introspection_path__)
end,
opts
),
quote do
@external_resource @__absinthe_import_introspection_path__
end
]
:error ->
case Keyword.fetch(opts, :provider) do
{:ok, {provider, opts}} ->
[
do_import_introspection(
env,
quote do
case unquote(provider).get(unquote(opts)) do
{:ok, body} -> body
{:error, error} -> raise Schema.Notation.Error, error
end
end,
opts
)
]
:error ->
raise Schema.Notation.Error,
"Must provide `:path` option to `import_introspection` unless passing a raw json string as the first argument"
end
end
end
defp do_import_introspection(env, json, opts) do
ref = Schema.Notation.build_reference(env)
quote do
case Artem.IntrospectionSchemaBuilder.build_definitions(
unquote(json),
__MODULE__,
unquote(Macro.escape(ref)),
unquote(Macro.escape(opts))
) do
{:ok, definitions} ->
@__absinthe_sdl_definitions__ definitions ++
(Module.get_attribute(
__MODULE__,
:__absinthe_sdl_definitions__
) || [])
{:error, error} ->
raise Absinthe.Schema.Notation.Error,
"`import_introspection` could not parse JSON:\n#{error}"
end
end
end
end
|
lib/artem/import_introspection.ex
| 0.717804
| 0.548915
|
import_introspection.ex
|
starcoder
|
defmodule LineBuffer do
@moduledoc """
Buffer lines like a boss.
"""
defmodule State do
@typedoc """
`%State{}`'s type
"""
@type t :: %__MODULE__{}
@doc false
defstruct [
splitter: "\n",
buf: "",
]
end
@spec new(String.t()) :: State.t()
@doc ~S"""
Create a new line buffer
## Parameters
- splitter: A string to use to split input into lines. Pass nil to use the default "\n"
## Returns
`%State{}` that is the first parameter to all other module functions.
## Examples
```elixir
# Default construction
iex> LineBuffer.new()
%LineBuffer.State{buf: "", splitter: "\n"}
# Specific splitter
iex> LineBuffer.new("\r\n")
%LineBuffer.State{buf: "", splitter: "\r\n"}
```
"""
def new(splitter \\ "\n"), do: %State{splitter: splitter}
@spec add_data(State.t(), String.t()) :: {State.t(), [String.t()]}
@doc ~S"""
Add data to a line buffer
## Parameters
- state: An initialized `%State{}`
- new_data: A `String.t()` to use to split input into lines, defaults to `"\n"`
## Returns
`{updated_state, [line_without_delimiter]}`
## Examples
```elixir
iex> lb = LineBuffer.new()
%LineBuffer.State{buf: "", splitter: "\n"}
iex> LineBuffer.add_data(lb, "foo\n")
{%LineBuffer.State{buf: "", splitter: "\n"}, ["foo"]}
iex> lb = LineBuffer.new()
%LineBuffer.State{buf: "", splitter: "\n"}
iex> LineBuffer.add_data(lb, "foo\nbar")
{%LineBuffer.State{buf: "bar", splitter: "\n"}, ["foo"]}
iex> lb = LineBuffer.new()
%LineBuffer.State{buf: "", splitter: "\n"}
iex> LineBuffer.add_data(lb, "foo\nbar\n")
{%LineBuffer.State{buf: "", splitter: "\n"}, ["foo", "bar"]}
```
"""
def add_data(state, new_data) do
working_buf = state.buf <> new_data
#IO.puts("working_buf: #{inspect working_buf}")
split_result = String.split(working_buf, state.splitter, trim: false)
#IO.puts("split_result: #{inspect split_result}")
{new_buf, lines} = List.pop_at(split_result, -1)
#IO.puts("{new_buf, lines}: #{inspect {new_buf, lines}}")
case {new_buf, lines} do
{"" = _buf, [] = lines} -> {state, lines} # had no data, added no data
{"" = buf, lines} -> {%{state| buf: buf}, lines} # ended up with one or more complete lines
{buf, [] = lines} -> {%{state| buf: buf}, lines} # ended up with more data but no complete lines
{buf, lines} -> {%{state| buf: buf}, lines} # ended up with more data and one or more complete lines
end
end
@spec peek(State.t()) :: String.t()
@doc ~S"""
Get the current string being buffered.
## Parameters
- state: An initialized `%State{}`
## Returns
`String.t()`
## Examples
```elixir
iex> lb = LineBuffer.new()
%LineBuffer.State{buf: "", splitter: "\n"}
iex> {updated_lb, _} = LineBuffer.add_data(lb, "foo\nbar")
{%LineBuffer.State{buf: "bar", splitter: "\n"}, ["foo"]}
iex> LineBuffer.peek(updated_lb)
"bar"
```
"""
def peek(state), do: state.buf
@spec flush(State.t()) :: {State.t(), String.t()}
@doc ~S"""
Flush (empty) the buffer.
## Parameters
- state: An initialized `%State{}`
## Returns
New and emptied state and the old buffered data: `{%State{}, String.t}`
## Examples
```elixir
iex> lb = LineBuffer.new()
%LineBuffer.State{buf: "", splitter: "\n"}
iex> {updated_lb, _} = LineBuffer.add_data(lb, "foo\nbar")
{%LineBuffer.State{buf: "bar", splitter: "\n"}, ["foo"]}
iex> LineBuffer.flush(updated_lb)
{%LineBuffer.State{buf: "", splitter: "\n"}, "bar"}
```
"""
def flush(state), do: {%{state| buf: ""}, state.buf}
@spec get_splitter(State.t()) :: String.t()
@doc ~S"""
Get the splitter from state
## Parameters
- state: An initialized `%State{}`
## Returns
The splitter from state (a `String.t()`)
## Examples
```elixir
iex> lb = LineBuffer.new()
%LineBuffer.State{buf: "", splitter: "\n"}
iex> LineBuffer.get_splitter(lb)
"\n"
```
"""
def get_splitter(state), do: state.splitter
@spec set_splitter(State.t(), String.t()) :: {State.t(), [String.t()]}
@doc ~S"""
Set the splitter.
Changing the splitter may cause new lines to be returned
that were not considered lines before. Therefore this function is roughly
equivalent to creating a new LineBuffer and adding the old line buffer's
data to it.
## Parameters
- state: An initialized `%State{}`
- splitter: A string to use as the new splitter/delimiter
## Returns
`{state, [line_without_delimiter]}`
## Examples
```elixir
iex> lb = LineBuffer.new("\r\n")
%LineBuffer.State{buf: "", splitter: "\r\n"}
iex> {updated_lb, _} = LineBuffer.add_data(lb, "foo\nbar\n")
{%LineBuffer.State{buf: "foo\nbar\n", splitter: "\r\n"}, []}
iex> LineBuffer.set_splitter(updated_lb, "\n")
{%LineBuffer.State{buf: "", splitter: "\n"}, ["foo", "bar"]}
```
"""
def set_splitter(state, splitter) do
splitter
|> new()
|> add_data(state.buf)
end
end
|
lib/line_buffer.ex
| 0.900767
| 0.813238
|
line_buffer.ex
|
starcoder
|
defmodule Exexif.Decode do
@moduledoc """
Decode tags and (in some cases) their parameters
"""
def tag(:tiff, 0x0100, value), do: {:image_width, value}
def tag(:tiff, 0x0101, value), do: {:image_height, value}
def tag(:tiff, 0x010D, value), do: {:document_name, value}
def tag(:tiff, 0x010E, value), do: {:image_description, value}
def tag(:tiff, 0x010F, value), do: {:make, value}
def tag(:tiff, 0x0110, value), do: {:model, value}
def tag(:tiff, 0x0112, value), do: {:orientation, orientation(value)}
def tag(:tiff, 0x011A, value), do: {:x_resolution, value}
def tag(:tiff, 0x011B, value), do: {:y_resolution, value}
def tag(:tiff, 0x0128, value), do: {:resolution_units, resolution(value)}
def tag(:tiff, 0x0131, value), do: {:software, value}
def tag(:tiff, 0x0132, value), do: {:modify_date, inspect(value)}
def tag(:tiff, 0x8769, value), do: {:exif, value}
def tag(:tiff, 0x8825, value), do: {:gps, value}
def tag(:exif, 0x0201, value), do: {:thumbnail_offset, value}
def tag(:exif, 0x0202, value), do: {:thumbnail_size, value}
def tag(_, 0x829A, value), do: {:exposure_time, value}
def tag(_, 0x829D, value), do: {:f_number, value}
def tag(_, 0x8822, value), do: {:exposure_program, exposure_program(value)}
def tag(_, 0x8824, value), do: {:spectral_sensitivity, value}
def tag(_, 0x8827, value), do: {:iso_speed_ratings, value}
def tag(_, 0x8828, value), do: {:oecf, value}
def tag(_, 0x8830, value), do: {:sensitivity_type, sensitivity_type(value)}
def tag(_, 0x8831, value), do: {:standard_output_sensitivity, value}
def tag(_, 0x8832, value), do: {:recommended_exposure, value}
def tag(_, 0x9000, value), do: {:exif_version, version(value)}
def tag(_, 0x9003, value), do: {:datetime_original, value}
def tag(_, 0x9004, value), do: {:datetime_digitized, value}
def tag(_, 0x9101, value), do: {:component_configuration, component_configuration(value)}
def tag(_, 0x9102, value), do: {:compressed_bits_per_pixel, value}
def tag(_, 0x9201, value), do: {:shutter_speed_value, value}
def tag(_, 0x9202, value), do: {:aperture_value, value}
def tag(_, 0x9203, value), do: {:brightness_value, value}
def tag(_, 0x9204, value), do: {:exposure_bias_value, value}
def tag(_, 0x9205, value), do: {:max_aperture_value, value}
def tag(_, 0x9206, value), do: {:subject_distance, value}
def tag(_, 0x9207, value), do: {:metering_mode, metering_mode(value)}
def tag(_, 0x9208, value), do: {:light_source, value}
def tag(_, 0x9209, value), do: {:flash, flash(value)}
def tag(_, 0x920A, value), do: {:focal_length, value}
def tag(_, 0x9214, value), do: {:subject_area, value}
def tag(_, 0x927C, value), do: {:maker_note, value}
def tag(_, 0x9286, value), do: {:user_comment, value}
def tag(_, 0x9290, value), do: {:subsec_time, value}
def tag(_, 0x9291, value), do: {:subsec_time_orginal, value}
def tag(_, 0x9292, value), do: {:subsec_time_digitized, value}
def tag(_, 0xA000, value), do: {:flash_pix_version, version(value)}
def tag(_, 0xA001, value), do: {:color_space, color_space(value)}
def tag(_, 0xA002, value), do: {:exif_image_width, value}
def tag(_, 0xA003, value), do: {:exif_image_height, value}
def tag(_, 0xA004, value), do: {:related_sound_file, value}
def tag(_, 0xA20B, value), do: {:flash_energy, value}
def tag(_, 0xA20C, value), do: {:spatial_frequency_response, value}
def tag(_, 0xA20E, value), do: {:focal_plane_x_resolution, value}
def tag(_, 0xA20F, value), do: {:focal_plane_y_resolution, value}
def tag(_, 0xA210, value),
do: {:focal_plane_resolution_unit, focal_plane_resolution_unit(value)}
def tag(_, 0xA214, value), do: {:subject_location, value}
def tag(_, 0xA215, value), do: {:exposure_index, value}
def tag(_, 0xA217, value), do: {:sensing_method, sensing_method(value)}
def tag(_, 0xA300, value), do: {:file_source, file_source(value)}
def tag(_, 0xA301, value), do: {:scene_type, scene_type(value)}
def tag(_, 0xA302, value), do: {:cfa_pattern, value}
def tag(_, 0xA401, value), do: {:custom_rendered, custom_rendered(value)}
def tag(_, 0xA402, value), do: {:exposure_mode, exposure_mode(value)}
def tag(_, 0xA403, value), do: {:white_balance, white_balance(value)}
def tag(_, 0xA404, value), do: {:digital_zoom_ratio, value}
def tag(_, 0xA405, value), do: {:focal_length_in_35mm_film, value}
def tag(_, 0xA406, value), do: {:scene_capture_type, scene_capture_type(value)}
def tag(_, 0xA407, value), do: {:gain_control, gain_control(value)}
def tag(_, 0xA408, value), do: {:contrast, contrast(value)}
def tag(_, 0xA409, value), do: {:saturation, saturation(value)}
def tag(_, 0xA40A, value), do: {:sharpness, sharpness(value)}
def tag(_, 0xA40B, value), do: {:device_setting_description, value}
def tag(_, 0xA40C, value), do: {:subject_distance_range, subject_distance_range(value)}
def tag(_, 0xA420, value), do: {:image_unique_id, value}
def tag(_, 0xA432, value), do: {:lens_info, value}
def tag(_, 0xA433, value), do: {:lens_make, value}
def tag(_, 0xA434, value), do: {:lens_model, value}
def tag(_, 0xA435, value), do: {:lens_serial_number, value}
# http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/GPS.html
Exexif.Data.Gps.fields()
|> Enum.with_index()
|> Enum.each(fn {e, i} ->
def tag(:gps, unquote(i), value), do: {unquote(e), value}
end)
def tag(type, tag, value) do
{~s[#{type} tag(0x#{:io_lib.format("~.16B", [tag])})], inspect(value)}
end
# Value decodes
defp orientation(1), do: "Horizontal (normal)"
defp orientation(2), do: "Mirror horizontal"
defp orientation(3), do: "Rotate 180"
defp orientation(4), do: "Mirror vertical"
defp orientation(5), do: "Mirror horizontal and rotate 270 CW"
defp orientation(6), do: "Rotate 90 CW"
defp orientation(7), do: "Mirror horizontal and rotate 90 CW"
defp orientation(8), do: "Rotate 270 CW"
defp orientation(_), do: "Unsupported"
defp resolution(1), do: "None"
defp resolution(2), do: "Pixels/in"
defp resolution(3), do: "Pixels/cm"
defp resolution(_), do: "Unsupported"
defp exposure_program(0), do: "Unknown"
defp exposure_program(1), do: "Manual"
defp exposure_program(2), do: "Program AE"
defp exposure_program(3), do: "Aperture-priority AE"
defp exposure_program(4), do: "Shutter speed priority AE"
defp exposure_program(5), do: "Creative (Slow speed)"
defp exposure_program(6), do: "Action (High speed)"
defp exposure_program(7), do: "Portrait"
defp exposure_program(8), do: "Landscape"
defp exposure_program(9), do: "Bulb"
defp exposure_program(_), do: "Unsupported"
defp sensitivity_type(0), do: "Unknown"
defp sensitivity_type(1), do: "Standard Output Sensitivity"
defp sensitivity_type(2), do: "Recommended Exposure Index"
defp sensitivity_type(3), do: "ISO Speed"
defp sensitivity_type(4), do: " Standard Output Sensitivity and Recommended Exposure Index"
defp sensitivity_type(5), do: "Standard Output Sensitivity and ISO Speed"
defp sensitivity_type(6), do: "Recommended Exposure Index and ISO Speed"
defp sensitivity_type(7),
do: "Standard Output Sensitivity, Recommended Exposure Index and ISO Speed"
defp sensitivity_type(_), do: "Unsupported"
@comp_conf {"-", "Y", "Cb", "Cr", "R", "G", "B"}
defp component_configuration(list) do
for cc <- list do
elem(@comp_conf, cc)
end
|> Enum.join(",")
end
defp metering_mode(0), do: "Unknown"
defp metering_mode(1), do: "Average"
defp metering_mode(2), do: "Center-weighted average"
defp metering_mode(3), do: "Spot"
defp metering_mode(4), do: "Multi-spot"
defp metering_mode(5), do: "Multi-segment"
defp metering_mode(6), do: "Partial"
defp metering_mode(_), do: "Other"
defp color_space(0x1), do: "sRGB"
defp color_space(0x2), do: "Adobe RGB"
defp color_space(0xFFFD), do: "Wide Gamut RGB"
defp color_space(0xFFFE), do: "ICC Profile"
defp color_space(0xFFFF), do: "Uncalibrated"
defp color_space(_), do: "Unsupported"
defp focal_plane_resolution_unit(1), do: "None"
defp focal_plane_resolution_unit(2), do: "inches"
defp focal_plane_resolution_unit(3), do: "cm"
defp focal_plane_resolution_unit(4), do: "mm"
defp focal_plane_resolution_unit(5), do: "um"
defp focal_plane_resolution_unit(_), do: "Unsupported"
defp sensing_method(1), do: "Not defined"
defp sensing_method(2), do: "One-chip color area"
defp sensing_method(3), do: "Two-chip color area"
defp sensing_method(4), do: "Three-chip color area"
defp sensing_method(5), do: "Color sequential area"
defp sensing_method(7), do: "Trilinear"
defp sensing_method(8), do: "Color sequential linear"
defp sensing_method(_), do: "Unsupported"
defp file_source(1), do: "Film Scanner"
defp file_source(2), do: "Reflection Print Scanner"
defp file_source(3), do: "Digital Camera"
defp file_source(0x03000000), do: "Sigma Digital Camera"
defp file_source(_), do: "Unsupported"
defp custom_rendered(0), do: "Normal"
defp custom_rendered(1), do: "Custom"
defp custom_rendered(v) when is_number(v), do: "Unknown (#{v})"
defp scene_type(1), do: "Directly photographed"
defp scene_type(_), do: "Unsupported"
defp exposure_mode(0), do: "Auto"
defp exposure_mode(1), do: "Manual"
defp exposure_mode(2), do: "Auto bracket"
defp exposure_mode(_), do: "Unsupported"
defp white_balance(0), do: "Auto"
defp white_balance(1), do: "Manual"
defp white_balance(_), do: "Unsupported"
defp scene_capture_type(0), do: "Standard"
defp scene_capture_type(1), do: "Landscape"
defp scene_capture_type(2), do: "Portrait"
defp scene_capture_type(3), do: "Night"
defp scene_capture_type(_), do: "Unsupported"
defp gain_control(0), do: "None"
defp gain_control(1), do: "Low gain up"
defp gain_control(2), do: "High gain up"
defp gain_control(3), do: "Low gain down"
defp gain_control(4), do: "High gain down"
defp gain_control(_), do: "Unsupported"
defp contrast(0), do: "Normal"
defp contrast(1), do: "Low"
defp contrast(2), do: "High"
defp contrast(_), do: "Unsupported"
defp saturation(0), do: "Normal"
defp saturation(1), do: "Low"
defp saturation(2), do: "High"
defp saturation(_), do: "Unsupported"
defp sharpness(0), do: "Normal"
defp sharpness(1), do: "Soft"
defp sharpness(2), do: "Hard"
defp sharpness(_), do: "Unsupported"
defp subject_distance_range(0), do: "Unknown"
defp subject_distance_range(1), do: "Macro"
defp subject_distance_range(2), do: "Close"
defp subject_distance_range(3), do: "Distant"
defp subject_distance_range(_), do: "Unsupported"
defp flash(0x0), do: "No Flash"
defp flash(0x1), do: "Fired"
defp flash(0x5), do: "Fired, Return not detected"
defp flash(0x7), do: "Fired, Return detected"
defp flash(0x8), do: "On, Did not fire"
defp flash(0x9), do: "On, Fired"
defp flash(0xD), do: "On, Return not detected"
defp flash(0xF), do: "On, Return detected"
defp flash(0x10), do: "Off, Did not fire"
defp flash(0x14), do: "Off, Did not fire, Return not detected"
defp flash(0x18), do: "Auto, Did not fire"
defp flash(0x19), do: "Auto, Fired"
defp flash(0x1D), do: "Auto, Fired, Return not detected"
defp flash(0x1F), do: "Auto, Fired, Return detected"
defp flash(0x20), do: "No flash function"
defp flash(0x30), do: "Off, No flash function"
defp flash(0x41), do: "Fired, Red-eye reduction"
defp flash(0x45), do: "Fired, Red-eye reduction, Return not detected"
defp flash(0x47), do: "Fired, Red-eye reduction, Return detected"
defp flash(0x49), do: "On, Red-eye reduction"
defp flash(0x4D), do: "On, Red-eye reduction, Return not detected"
defp flash(0x4F), do: "On, Red-eye reduction, Return detected"
defp flash(0x50), do: "Off, Red-eye reduction"
defp flash(0x58), do: "Auto, Did not fire, Red-eye reduction"
defp flash(0x59), do: "Auto, Fired, Red-eye reduction"
defp flash(0x5D), do: "Auto, Fired, Red-eye reduction, Return not detected"
defp flash(0x5F), do: "Auto, Fired, Red-eye reduction, Return detected"
defp flash(_), do: "Unsupported"
defp version([?0, major, minor1, minor2]) do
<<major, ?., minor1, minor2>>
end
defp version([major1, major2, minor1, minor2]) do
<<major1, major2, ?., minor1, minor2>>
end
end
|
lib/exexif/decode.ex
| 0.759225
| 0.580947
|
decode.ex
|
starcoder
|
defmodule Riptide.Store.Postgres do
@moduledoc """
This store persists data to a single Postgres table as materialized paths. It is best used in scenarios where your application will have multiple erlang nodes running that all need shared access to data. Note with this store Postgres is treated like a dumb key/value store and does not take advantage of any other Postgres capabilities.
## Configuration
Add `postgrex` as a dependency to your `mix.exs`.
```elixir
defp deps do
[
{:riptide, "~> 0.4.0"},
{:postgrex, "~> 0.15.3"}
]
end
```
And then you can configure the store:
```elixir
config :riptide,
store: %{
read: {Riptide.Store.Postgres, []},
write: {Riptide.Store.Postgres, []},
}
```
You can start up a named `Postgrex` pool manually but this module provides a convenient way for you to do that. Add this to your `application.ex`:
```elixir
children = [
{Riptide.Store.Postgres, [
hostname: "localhost",
database: "riptide",
username: "postgres",
password: "<PASSWORD>",
]},
Riptide,
]
opts = [strategy: :one_for_one, name: Riptide.Supervisor]
Supervisor.start_link(children, opts)
```
Note, make sure the Postgres pool starts up *before* Riptide.
## Options
- `:table` - name of table defaults to `riptide` (optional)
- `:name` - name of Postgrex pool, defaults to `postgres` (optional)
- `:transaction_timeout` - duration for transaction timeout in milliseconds, defaults to 1 minute (optional)
"""
@behaviour Riptide.Store
@delimiter "×"
@doc """
Convenience implementation of `Supervisor.child_spec/1` to start up a `Postgrex` pool with name `:postgres`
## Examples
```elixir
children = [
{Riptide.Store.Postgres, [
hostname: "localhost",
database: "riptide",
username: "postgres",
password: "<PASSWORD>",
]},
Riptide,
]
opts = [strategy: :one_for_one, name: Riptide.Supervisor]
Supervisor.start_link(children, opts)
```
"""
def child_spec(opts) do
Postgrex.child_spec(Keyword.merge([name: :postgres], opts))
end
@impl true
def init(opts) do
Postgrex.query!(
opts_name(opts),
"""
CREATE TABLE IF NOT EXISTS "#{opts_table(opts)}" (
path text COLLATE "C",
value jsonb,
PRIMARY KEY(path)
);
""",
[]
)
:ok
end
defp opts_table(opts), do: Keyword.get(opts, :table, "riptide")
defp opts_name(opts), do: Keyword.get(opts, :name, :postgres)
defp opts_transaction_timeout(opts),
do: Keyword.get(opts, :transaction_timeout, :timer.minutes(1))
@impl true
def mutation(merges, deletes, opts) do
opts
|> opts_name()
|> Postgrex.transaction(
fn conn ->
:ok = delete(deletes, conn, opts)
:ok = merge(merges, conn, opts)
end,
timeout: :timer.hours(1)
)
|> case do
{:ok, _} -> :ok
result -> {:error, result}
end
end
defp merge([], _conn, _opts), do: :ok
defp merge(merges, conn, opts) do
merges
|> Stream.chunk_every(30_000)
|> Enum.map(fn layers ->
{_, statement, params} =
layers
|> Enum.reduce({1, [], []}, fn {path, value}, {index, statement, params} ->
{
index + 2,
["($#{index}, $#{index + 1})" | statement],
[encode_path(path), value | params]
}
end)
Postgrex.query!(
conn,
"INSERT INTO \"#{opts_table(opts)}\"(path, value) VALUES #{Enum.join(statement, ", ")} ON CONFLICT (path) DO UPDATE SET value = excluded.value",
params
)
end)
:ok
end
defp delete([], _conn, _opts), do: :ok
defp delete(layers, conn, opts) do
{arguments, statement} =
layers
|> Enum.with_index()
|> Stream.map(fn {{path, _}, index} ->
{[encode_path(path) <> "%"], "(path LIKE $#{index + 1})"}
end)
|> Enum.reduce({[], []}, fn {args, field}, {a, b} -> {args ++ a, [field | b]} end)
statement = Enum.join(statement, " OR ")
Postgrex.query!(
conn,
"DELETE FROM \"#{opts_table(opts)}\" WHERE #{statement}",
arguments
)
:ok
end
defp encode_prefix(path) do
Enum.join(path, @delimiter)
end
defp encode_path(path) do
Enum.join(path, @delimiter) <> @delimiter
end
defp decode_path(input) do
String.split(input, @delimiter, trim: true)
end
@impl true
def query(paths, store_opts) do
# {full, partial} = Enum.split_with(paths, fn {_path, opts} -> opts[:limit] == nil end)
Stream.resource(
fn ->
{holder, conn} = txn_start(store_opts)
Postgrex.query!(conn, "SET enable_seqscan = OFF;", [])
{holder, conn}
end,
fn
{holder, conn} ->
{Stream.concat([
query_partial(paths, conn, store_opts)
# query_full(full, conn)
]), holder}
holder ->
{:halt, holder}
end,
fn holder -> txn_end(holder) end
)
end
defp query_partial(paths, conn, store_opts) do
paths
|> Stream.map(fn {path, opts} ->
{path, query_path(path, opts, conn, store_opts)}
end)
end
defp query_full([], _conn), do: []
defp query_full(paths, conn) do
{values, args, _} =
Enum.reduce(paths, {[], [], 0}, fn {path, opts}, {values, args, count} ->
combined = encode_prefix(path)
{min, max} = Riptide.Store.Prefix.range(combined, opts)
{
values ++ ["($#{count + 1}, $#{count + 2}, $#{count + 3})"],
args ++ [combined, encode_path(min), encode_path(max)],
count + 3
}
end)
statement = """
WITH ranges (prefix, min, max) AS (VALUES #{Enum.join(values, ", ")})
SELECT ranges.prefix, path, value FROM riptide JOIN ranges ON riptide.path >= ranges.min AND riptide.path < ranges.max
"""
conn
|> Postgrex.stream(
statement,
args,
max_rows: 1000
)
|> Stream.flat_map(fn item -> item.rows end)
|> Stream.chunk_by(fn [prefix, _path, _value] -> prefix end)
|> Stream.map(fn chunk ->
[prefix, _, _] = Enum.at(chunk, 0)
{
decode_path(prefix),
Stream.map(chunk, fn [_, path, value] -> {decode_path(path), value} end)
}
end)
end
defp query_path(path, opts, conn, store_opts) do
combined = encode_prefix(path)
{min, max} = Riptide.Store.Prefix.range(combined, opts)
conn
|> Postgrex.stream(
"SELECT path, value FROM \"#{opts_table(store_opts)}\" WHERE path >= $1 AND path < $2 ORDER BY path ASC",
[encode_path(min), encode_path(max)]
)
|> Stream.flat_map(fn item -> item.rows end)
|> Stream.map(fn [path, value] -> {decode_path(path), value} end)
end
defp txn_start(store_opts) do
self = self()
{:ok, child} =
Task.start_link(fn ->
Postgrex.transaction(
opts_name(store_opts),
fn conn ->
send(self, {:conn, conn})
receive do
{:conn, :done} -> :ok
end
end,
timeout: opts_transaction_timeout(store_opts)
)
end)
conn =
receive do
{:conn, conn} -> conn
end
{child, conn}
end
defp txn_end(holder) do
send(holder, {:conn, :done})
end
end
|
packages/elixir/lib/riptide/store/store_postgres.ex
| 0.843041
| 0.763374
|
store_postgres.ex
|
starcoder
|
defmodule VintageNetWiFi.Cookbook do
@moduledoc """
Recipes for common WiFi network configurations
For example, if you want the standard configuration for the most common type of WiFi
network (WPA2 Preshared Key networks), pass the SSID and password to `wpa_psk/2`
"""
alias VintageNetWiFi.WPA2
@doc """
Return a configuration for connecting to open WiFi network
Pass an SSID and passphrase. If the SSID and passphrase are ok, you'll get an
`:ok` tuple with the configuration. If there's a problem, you'll get an error
tuple with a reason.
"""
@spec open_wifi(String.t()) :: {:ok, map()} | {:error, WPA2.invalid_ssid_error()}
def open_wifi(ssid) when is_binary(ssid) do
with :ok <- WPA2.validate_ssid(ssid) do
{:ok,
%{
type: VintageNetWiFi,
vintage_net_wifi: %{
networks: [
%{
key_mgmt: :none,
ssid: ssid
}
]
},
ipv4: %{method: :dhcp}
}}
end
end
@doc """
Return a configuration for connecting to a WPA-PSK network
Pass an SSID and passphrase. If the SSID and passphrase are ok, you'll get an
`:ok` tuple with the configuration. If there's a problem, you'll get an error
tuple with a reason.
"""
@spec wpa_psk(String.t(), String.t()) ::
{:ok, map()} | {:error, WPA2.invalid_ssid_error() | WPA2.invalid_passphrase_error()}
def wpa_psk(ssid, passphrase) when is_binary(ssid) and is_binary(passphrase) do
with :ok <- WPA2.validate_ssid(ssid),
:ok <- WPA2.validate_passphrase(passphrase) do
{:ok,
%{
type: VintageNetWiFi,
vintage_net_wifi: %{
networks: [
%{
key_mgmt: :wpa_psk,
ssid: ssid,
psk: passphrase
}
]
},
ipv4: %{method: :dhcp}
}}
end
end
@doc """
Return a configuration for connecting to a WPA-EAP PEAP network
Pass an SSID and login credentials. If valid, you'll get an
`:ok` tuple with the configuration. If there's a problem, you'll get an error
tuple with a reason.
"""
@spec wpa_eap_peap(String.t(), String.t(), String.t()) ::
{:ok, map()} | {:error, WPA2.invalid_ssid_error()}
def wpa_eap_peap(ssid, username, passphrase)
when is_binary(ssid) and is_binary(username) and is_binary(passphrase) do
with :ok <- WPA2.validate_ssid(ssid) do
{:ok,
%{
type: VintageNetWiFi,
vintage_net_wifi: %{
networks: [
%{
key_mgmt: :wpa_eap,
ssid: ssid,
identity: username,
password: <PASSWORD>,
eap: "PEAP",
phase2: "auth=MSCHAPV2"
}
]
},
ipv4: %{method: :dhcp}
}}
end
end
@doc """
Return a configuration for creating an open access point
Pass an SSID and an optional IPv4 class C network.
"""
@spec open_access_point(String.t(), VintageNet.any_ip_address()) ::
{:ok, map()} | {:error, term()}
def open_access_point(ssid, ipv4_subnet \\ "192.168.24.0") do
with :ok <- WPA2.validate_ssid(ssid),
{:ok, {a, b, c, _d}} <- VintageNet.IP.ip_to_tuple(ipv4_subnet) do
our_address = {a, b, c, 1}
dhcp_start = {a, b, c, 10}
dhcp_end = {a, b, c, 250}
{:ok,
%{
type: VintageNetWiFi,
vintage_net_wifi: %{
networks: [
%{
mode: :ap,
ssid: ssid,
key_mgmt: :none
}
]
},
ipv4: %{
method: :static,
address: our_address,
netmask: {255, 255, 255, 0}
},
dhcpd: %{
start: dhcp_start,
end: dhcp_end
}
}}
end
end
end
|
lib/vintage_net_wifi/cookbook.ex
| 0.790409
| 0.591074
|
cookbook.ex
|
starcoder
|
defmodule PassiveSupport.Range do
@moduledoc """
Helper functions for working with ranges.
Ranges have some interesting characteristics in Elixir. A range literal
is the language's simplest representation of a Stream; the use case for
them is rather limited compared to other languages; and as of version 1.12.0,
it is the first data type in Elixir to make use of a ternary operator (`..///3`)
All of this can mean exactly one thing: Ranges are for lovers. And by
virtue of that fact, this library maintainer's personal soft spot for
the data type has to be categorical proof that he is, in fact, a lover.
This module defines a number of functions that help in determining the
characteristics of a range, especially in terms of another range, as
well as some functions that aid in manipulating ranges for various
use cases — the existence of all of which, as of yet, are unproven.
Nevertheless, if any of these hypothetical workflows are eventually
found to be extant, these functions will all doubtlessly prove invaluable
to whatever intrepid, frontier programmer is brave enough to address
the challenging burdens, somehow lightened by these desperate grasps
for relevance.
"""
@doc ~S"""
Returns `true` if `other` is a number that falls within `range`, or
if `other_range` is fully contained within `range`, regardless of
polarity. Returns `false` for all other values of `other`.
## Examples
iex> includes?(1..5, 3)
true
iex> includes?(1..5, 5)
true
iex> includes?(1..5, :math.pi)
true
iex> includes?(1..5, :no)
false
iex> includes?(1..5, nil)
false
iex> includes?(1..5, 2..4)
true
iex> includes?(1..5, 4..6)
false
iex> includes?(1..5, 0..2)
false
iex> includes?(5..1, 3)
true
iex> includes?(5..1, 2..4)
true
iex> includes?(5..1, 4..2)
true
"""
@doc since: "0.1.0"
@spec includes?(Range.t, any) :: boolean
def includes?(range, other_start..other_finish), do:
includes?(range, other_start) and includes?(range, other_finish)
def includes?(start..finish, point) when start <= finish, do:
start <= point and point <= finish
def includes?(start..finish, point), do:
start >= point and point >= finish
@doc ~S"""
Returns `true` if either end of either range falls within the other.
Returns `false` if the second argument is not a range, or if the
ranges have opposing polarities.
## Examples
iex> overlaps?(1..5, 4..6)
true
iex> overlaps?(4..6, 1..5)
true
iex> overlaps?(1..5, 6..7)
false
iex> overlaps?(1..5, 2..4)
true
iex> overlaps?(2..4, 1..5)
true
iex> overlaps?(5..1, 4..6)
false
iex> overlaps?(4..6, 5..1)
false
iex> overlaps?(1..5, 6..4)
false
iex> overlaps?(6..4, 1..5)
false
iex> overlaps?(6..4, 5..1)
true
"""
@doc since: "0.1.0"
@spec overlaps?(Range.t, Range.t) :: boolean
def overlaps?(start_1..finish_1, start_a..finish_a)
when ((start_1 > finish_1) and (start_a < finish_a))
or ((start_1 < finish_1) and (start_a > finish_a)),
do:
false
def overlaps?(start_1..finish_1, start_a..finish_a), do:
includes?(start_1..finish_1, start_a) or includes?(start_a..finish_a, start_1) or includes?(start_1..finish_1, finish_a)
@doc ~S"""
Returns `true` if either range begins immediately after the other.
Returns `false` if the ranges have opposing polarities
## Examples
iex> adjacent?(1..5, 6..10)
true
iex> adjacent?(6..10, 1..5)
true
iex> adjacent?(10..6, 5..1)
true
iex> adjacent?(5..1, 10..6)
true
iex> adjacent?(0..4, 6..10)
false
iex> adjacent?(6..10, 0..4)
false
iex> adjacent?(10..6, 1..5)
false
"""
@doc since: "0.1.0"
@spec adjacent?(Range.t, Range.t) :: boolean
def adjacent?(start_1..finish_1, start_b..finish_b) when start_1 > finish_1, do:
finish_1-1 == start_b or finish_b-1 == start_1
def adjacent?(start_1..finish_1, start_b..finish_b), do:
finish_1+1 == start_b or finish_b + 1 == start_1
@doc ~S"""
If the provided ranges overlap or are adjacent, returns a new range
spanning the entirety of both
## Examples
iex> join(1..5, 6..10)
1..10
iex> join(1..5, 4..8)
1..8
iex> join(10..20, 5..15)
5..20
iex> join(1..10, 2..8)
1..10
iex> join(1..2, 5..10)
** (ArgumentError) Cannot join 1..2 and 5..10
iex> join(1..5, 10..5)
** (ArgumentError) Cannot join 1..5 and 10..5//-1
"""
@doc since: "0.1.0"
@spec join(Range.t, Range.t) :: Range.t
def join(range_1, range_a) do
case overlaps?(range_1, range_a) or adjacent?(range_1, range_a) do
true ->
Enum.min([min(range_1), min(range_a)])..Enum.max([max(range_1), max(range_a)])
false ->
raise ArgumentError, "Cannot join #{inspect range_1} and #{inspect range_a}"
end
end
@doc ~S"""
Returns the size of the range
## Examples
iex> size(1..5)
5
iex> size(0..5)
6
iex> size(5..0)
6
"""
@doc since: "0.1.0"
@spec size(Range.t) :: integer
def size(start..start), do:
1
def size(start..finish) when finish < start, do:
1+start-finish
def size(start..finish), do:
1+finish-start
@doc ~S"""
Returns the first number of the range
## Examples
iex> first(0..5)
0
iex> first(5..0)
5
"""
@doc since: "0.1.0"
@spec first(Range.t) :: integer
def first(start.._finish), do:
start
@doc ~S"""
Returns the last number of the range
## Examples
iex> last(0..5)
5
iex> last(5..0)
0
"""
@doc since: "0.1.0"
@spec last(Range.t) :: integer
def last(_start..finish), do:
finish
@doc ~S"""
Returns the larger end of the range
## Examples
iex> max(0..5)
5
iex> max(5..0)
5
"""
@doc since: "0.1.0"
def max(start..finish) when finish < start, do:
start
def max(_start..finish), do:
finish
@doc ~S"""
Returns the smaller end of the range
## Examples
iex> min(0..5)
0
iex> min(5..0)
0
"""
@doc since: "0.1.0"
def min(start..finish) when finish < start, do:
finish
def min(start.._finish), do:
start
@doc ~S"""
Returns a new range that immediately follows the range provided, with an equivalent size
## Examples
iex> next_page(1..10)
11..20
iex> next_page(10..1)
0..-9
"""
@doc since: "0.1.0"
@spec next_page(Range.t) :: Range.t
def next_page(start..finish) when finish < start, do:
finish-1..finish-(1+start-finish)
def next_page(start..finish), do:
finish+1..finish+(1+finish-start)
@doc ~S"""
Returns a new range that immediately precedes the range provided, with an equivalent size
## Examples
iex> prev_page(1..10)
-9..0
iex> prev_page(10..1)
20..11
"""
@doc since: "0.1.0"
@spec prev_page(Range.t) :: Range.t
def prev_page(start..finish) when finish < start, do:
(start+(1+start-finish))..finish+(1+start-finish)
def prev_page(start..finish), do:
(start-(1+finish-start))..start-1
end
|
lib/passive_support/base/range.ex
| 0.88578
| 0.72645
|
range.ex
|
starcoder
|
defmodule Riak.CRDT.Map do
@moduledoc """
Encapsulates Riak maps
"""
require Record
@doc """
Creates a new `map`
"""
def new(), do: :riakc_map.new()
@doc """
Get the `map` size
"""
def size(map) when Record.is_record(map, :map), do: :riakc_map.size(map)
def size(nil), do: {:error, :nil_object}
def size({:error, term}), do: {:error, term}
@doc """
Fetch the value associated to `key` with the `key_type` on `map`
"""
def get(map, key_type, key) when Record.is_record(map, :map) do
:riakc_map.fetch({key, key_type}, map)
end
def get(nil, _, _), do: {:error, :nil_object}
def get({:error, term}, _, _), do: {:error, term}
@doc """
Update the `key` on the `map` by passing the function `fun`
to update the value based on the current value (if exists) as argument
The key_type must be :register, :map, :set, :flag or :counter
"""
def update(map, key_type, key, fun) when Record.is_record(map, :map)
and is_atom(key_type)
and is_binary(key)
and is_function(fun, 1) do
:riakc_map.update({key, key_type}, fun, map)
end
def update(nil, _, _, _), do: {:error, :nil_object}
def update({:error, term}, _, _, _), do: {:error, term}
@doc """
Update the `key` on the `map` by passing the `value`
The value can be any other CRDT
"""
def put(map, key, value) when Record.is_record(map, :map)
and is_binary(key) do
key_type = Riak.CRDT.type(value)
fun = fn _ -> value end
:riakc_map.update({key, key_type}, fun, map)
end
def put(nil, _, _), do: {:error, :nil_object}
def put({:error, term}, _, _), do: {:error, term}
@doc """
Delete a `key` from the `map`
"""
def delete(map, key) when Record.is_record(map, :map) do
:riakc_map.erase(key, map)
end
def delete(nil, _), do: {:error, :nil_object}
def delete({:error, term}, _), do: {:error, term}
@doc """
Get the original value of the `map`
"""
def value(map) when Record.is_record(map, :map), do: :riakc_map.value(map)
def value(nil), do: {:error, :nil_object}
def value({:error, term}), do: {:error, term}
@doc """
List all keys of the `map`
"""
def keys(map) when Record.is_record(map, :map), do: :riakc_map.fetch_keys(map)
def keys(nil), do: {:error, :nil_object}
def keys({:error, term}), do: {:error, term}
@doc """
Test if the `key` is contained in the `map`
"""
def has_key?(map, key) when Record.is_record(map, :map) do
:riakc_map.is_key(key, map)
end
def has_key?(nil, _), do: {:error, :nil_object}
def has_key?({:error, term}, _), do: {:error, term}
end
|
lib/riak/crdt/map.ex
| 0.791217
| 0.709887
|
map.ex
|
starcoder
|
defmodule ExMaps do
@moduledoc """
Public ExMaps application interface.
"""
alias ExMaps.{DirectionsCoordinator, DistanceMatrixCoordinator}
@typedoc """
General params.
Format of the output of Google Maps API call.
Please note that json is recommended by Google docs.
"""
@type output_format :: :json | :xml
@type protocol :: :https | :http
@type waypoint :: String.t() | {float, float} | %{place_id: String.t()}
@type origins :: String.t() | {float, float} | %{place_id: String.t()}
@type destinations :: String.t() | {float, float} | %{place_id: String.t()}
@type ttl :: integer()
@typedoc """
Required Distance Calculations API request parameters.
* `origin` — It can be passed in three different forms, as the address string,
latitude/longitude tuple or map containing PlaceID.
* `destination` — It can be passed in three different forms, as the address string,
latitude/longitude tuple or map containing PlaceID.
"""
@type coordinates :: [%{origin: waypoint, destination: waypoint}]
@typedoc """
Required Distance Matrix API request parameters.
* `origin` — It can be passed in three different forms, as the address string,
latitude/longitude tuple or map containing PlaceID.
* `destination` — It can be passed in three different forms, as the address string,
latitude/longitude tuple or map containing PlaceID.
"""
@type matrix_coordinates :: [%{origins: [waypoint], destinations: [waypoint]}]
@typedoc """
Shared APIs request optional parameters. Detailed description can be found below:
https://developers.google.com/maps/documentation/directions/intro
* `mode` - Specifies the mode of transport to use when calculating directions.
Defaults to driving.
* `waypoints` - A list of waypoints.
* `alternatives` - If set to true, API may provide more than one route alternative.
* `avoid` - List of specific routes to avoid.
* `language` - Directions may be provided in specified language (but not all JSON / XML answer fields)
* `units` - If not present, unit system of the origin's country or region will be returned.
* `region` - Biasing on a specific region.
* `arrival_time` - Desired arrival time in seconds since midnight, January 1, 1970 UTC.
* `departure_time` - Desired departure time in seconds since midnight, January 1, 1970 UTC.
* `traffic_model` - It may only be specified for driving directions where the request includes a departure_time.
* `transit_mode` - It may only be specified for transit directions.
* `transit_routing_preference` - It may bias the options returned.
"""
@type mode :: :driving | :walking | :bicycling | :transit
@type waypoints :: :waypoints
@type alternatives :: boolean()
@type avoid :: [avoid_value]
@type avoid_value :: :tolls | :highways | :ferries | :indoor
@type language :: String.t()
@type units :: :metric | :imperial
@type region :: String.t()
@type arrival_time :: integer
@type departure_time :: integer
@type traffic_model :: :best_guess | :pessimistic | :optimistic
@type transit_mode :: :bus | :subway | :train | :tram | :rail
@type transit_routing_preference :: :less_walking | :fewer_transfers
@type option ::
mode
| output_format
| waypoints
| alternatives
| language
| units
| region
| arrival_time
| departure_time
| traffic_model
| transit_mode
| transit_routing_preference
@type options :: [{option, term}]
@doc """
Returns calculated directions between provided locations.
It checkes wether the directions with same set of options were alread calculated
and set in cache, if not, it calls Google API, fetches the result, saves it in
cache and returns it.
## Examples
iex> ExMaps.get_directions([%{origin: "Warsaw", destination: "Amsterdam"}], units: :metric)
[%{"geocoded_waypoints" => ... }]
"""
@spec get_directions(coordinates, options) :: [map]
def get_directions(coordinates, options \\ []) when is_list(coordinates) do
DirectionsCoordinator.spawn_workers(coordinates, options)
end
@doc """
Returns travel distance and time for a matrix of origins and destinations.
It checkes wether the matrix with same set of options was alread requested
and set in cache, if not, it calls Google API, fetches the result, saves it in
cache and returns it.
## Examples
iex> ExMaps.get_distance_matrix([%{origins: ["Warsaw", "Kraków"], destinations: ["Amsterdam", "Utrecht"]}], language: "pl")
[%{"destination_addresses" => ...}]
"""
@spec get_distance_matrix(matrix_coordinates, options) :: [map]
def get_distance_matrix(matrix_coordinates, options \\ []) do
DistanceMatrixCoordinator.spawn_workers(matrix_coordinates, options)
end
end
|
lib/ex_maps.ex
| 0.9027
| 0.591487
|
ex_maps.ex
|
starcoder
|
defmodule AdventOfCode.Day03 do
import AdventOfCode.Utils
@spec part1([binary()]) :: integer()
def part1(args) do
frequencies = parse_args(args) |> dominant_bits()
gamma = frequencies |> Integer.undigits(2)
epsilon = frequencies |> Enum.map(&Bitwise.bxor(&1, 1)) |> Integer.undigits(2)
gamma * epsilon
end
@spec part2([binary()]) :: integer()
def part2(args) do
data = parse_args(args) |> Enum.to_list()
oxygen_rating = filter_rating_by(data, :oxygen)
c02_rating = filter_rating_by(data, :c02)
oxygen_rating * c02_rating
end
@spec filter_rating_by([[integer]], atom) :: integer
defp filter_rating_by(entries, type) do
entries
|> filter_rating_by(type, 0)
|> Integer.undigits(2)
end
@spec filter_rating_by([[integer]], atom, integer) :: [integer]
defp filter_rating_by(entries, _, _) when length(entries) == 1 do
hd(entries)
end
defp filter_rating_by(entries, type, position) when length(entries) > 1 do
selected_bit =
entries
|> dominant_bits()
|> Enum.at(position)
required_bit =
case type do
:oxygen -> selected_bit
:c02 -> Bitwise.bxor(selected_bit, 1)
end
entries
|> Enum.filter(&(Enum.at(&1, position) == required_bit))
|> filter_rating_by(type, position + 1)
end
@spec dominant_bits([[integer]]) :: [integer]
defp dominant_bits(entries) do
[first_element] = Enum.take(entries, 1)
init = List.duplicate(0, length(first_element))
{number_ones, total} =
Enum.reduce(entries, {init, 0}, fn entry, {count, total} ->
{merge(count, entry), total + 1}
end)
threshold = total / 2
Enum.map(number_ones, &bool_to_int(&1 >= threshold))
end
@spec merge([integer], [integer]) :: [integer]
defp merge(a, b) do
Enum.zip(a, b) |> Enum.map(fn {a, b} -> a + b end)
end
@spec bool_to_int(boolean) :: 0 | 1
defp bool_to_int(true), do: 1
defp bool_to_int(false), do: 0
@spec parse_args([binary()]) :: [[integer()]]
defp parse_args(args), do: Enum.map(args, &parse_line/1)
defp parse_line(line), do: String.graphemes(line) |> Enum.map(&parse_int!/1)
end
|
lib/advent_of_code/day_03.ex
| 0.766162
| 0.444806
|
day_03.ex
|
starcoder
|
defmodule Txbox.Transactions do
@moduledoc """
Collection of functions for composing Ecto queries.
The functions in this module can be broadly split into two types, expressions
and queries.
## Expressions
Expression functions can be used to compose queries following the Elixir
pipeline syntax.
iex> Tx
...> |> Transactions.confirmed(true)
...> |> Transactions.tagged(["space", "photos"])
%Ecto.Query{}
## Queries
Query functions interface with the repo and either create or return records
from the repo.
iex> Transactions.list_tx()
[%Tx{}, ...]
"""
import Ecto.Query, warn: false
alias Ecto.Multi
alias Txbox.Transactions.{Tx, MapiResponse}
@query_keys [:channel, :search, :tagged, :from, :to, :at, :order, :limit, :offset, :rawtx]
@doc """
Returns the application's configured Repo.
Ensure your application's Repo is configured in `config.exs`:
config :txbox, repo: MyApp.Repo
"""
@spec repo() :: module
def repo(), do: Application.get_env(:txbox, :repo)
@doc """
Get a transaction by it's internal ID or TXID.
Can optionally pass a `Ecto.Queryable.t` as the first argument to compose
queries.
## Examples
# Find a tx by it's Txbox uuid
iex> tx = Transactions.find_tx "e9d356cf-47e9-47c3-bfc8-c12673877302"
# Composed query, found by txid
iex> tx = Transactions.channel("mychannel)
...> |> Transactions.confirmed(true)
...> |> Transactions.find_tx("6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110")
"""
@doc group: :query
@spec get_tx(Ecto.Queryable.t, binary) :: Ecto.Schema.t | nil
def get_tx(tx \\ Tx, id) when is_binary(id) do
pre_qry = from(r in MapiResponse, order_by: [desc: r.inserted_at])
qry = tx
|> preload(status: ^pre_qry)
|> optimize_select
case String.match?(id, ~r/^[a-f0-9]{64}$/i) do
true ->
qry
|> repo().get_by(txid: id)
false ->
qry
|> repo().get(id)
end
end
@doc false
def list_tx(), do: list_tx(Tx, %{})
@doc false
def list_tx(Tx = tx), do: list_tx(tx, %{})
def list_tx(%Ecto.Query{} = tx), do: list_tx(tx, %{})
@doc """
Returns a list of transactions.
Can optionally pass a `Ecto.Queryable.t` as the first argument to compose
queries. If a map of query options is given as a secndon argument, the query
is filtered by those arguments.
## Examples
iex> txns = Transactions.channel("mychannel)
...> |> Transactions.confirmed(true)
...> |> Transactions.list_tx
"""
@doc group: :query
@spec list_tx(Ecto.Queryable.t, map) :: list(Ecto.Schema.t)
def list_tx(tx \\ Tx, params) when is_map(params) do
pre_qry = from(r in MapiResponse, order_by: [desc: r.inserted_at])
tx
|> preload(status: ^pre_qry)
|> query(params)
|> optimize_select
|> repo().all
end
@doc """
Returns a list of transactions that must be pushed or have their status
confirmed by mAPI.
This is used internally by `Txbox.Mapi.Queue` to fetch transactions for
automatic processing.
## Options
The accepted options are:
* `:max_status_attempts` - How many times to poll mAPI for confirmation status. Defaults to `20`.
* `:retry_status_after` - Number of seconds before polling mAPI for confirmation status. Defaults to `300` (5 minutes).
"""
@doc group: :query
@spec list_tx_for_mapi(keyword) :: list(Ecto.Schema.t)
def list_tx_for_mapi(opts \\ []) do
max_status_attempts = Keyword.get(opts, :max_status_attempts, 20)
retry_status_after = Keyword.get(opts, :retry_status_after, 300)
retry_datetime = DateTime.now!("Etc/UTC") |> DateTime.add(-retry_status_after)
pre_qry = from(r in MapiResponse, order_by: [desc: r.inserted_at])
Tx
|> join(:left, [t], r in subquery(pre_qry), on: r.tx_guid == t.guid)
|> preload(status: ^pre_qry)
|> where([t],
t.state == "queued"
and fragment("SELECT COUNT(*) FROM txbox_mapi_responses WHERE type = ? AND tx_guid = ?", "push", t.guid) < 1)
|> or_where([t, r],
t.state == "pushed"
and (is_nil(r) or r.inserted_at < ^retry_datetime)
and fragment("SELECT COUNT(*) FROM txbox_mapi_responses WHERE type = ? AND tx_guid = ?", "status", t.guid) < ^max_status_attempts)
|> repo().all
end
@doc """
Creates a transaction from the given params.
Returns an `:ok` / `:error` tuple response.
## Examples
iex> {:ok, tx} = Transactions.create_tx(%{
...> txid: "6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110",
...> channel: "mychannel"
...> })
"""
@doc group: :query
@spec create_tx(map) :: {:ok, Ecto.Schema.t} | {:error, Ecto.Changeset.t()}
def create_tx(attrs \\ %{}) do
%Tx{}
|> Tx.changeset(attrs)
|> repo().insert
end
@doc """
Updates the transaction with the given params.
Returns an `:ok` / `:error` tuple response.
## Examples
iex> {:ok, tx} = Transactions.update_tx(tx, %{
...> meta: %{
...> title: "Hubble Ultra-Deep Field"
...> }
...> })
"""
@doc group: :query
@spec update_tx(Ecto.Schema.t, map) ::
{:ok, Ecto.Schema.t} |
{:error, Ecto.Changeset.t()}
def update_tx(%Tx{} = tx, attrs \\ %{}) do
tx
|> Tx.changeset(attrs)
|> repo().update
|> case do
{:ok, tx} ->
pre_qry = from(r in MapiResponse, order_by: [desc: r.inserted_at])
{:ok, repo().preload(tx, [status: pre_qry], force: true)}
error ->
error
end
end
@doc """
Updates the given transaction's state.
Returns an `:ok` / `:error` tuple response.
## Examples
iex> {:ok, tx} = Transactions.update_tx_state(tx, "pushed")
"""
@doc group: :query
@spec update_tx_state(Ecto.Schema.t, String.t) ::
{:ok, Ecto.Schema.t} |
{:error, Ecto.Changeset.t()}
def update_tx_state(%Tx{} = tx, state),
do: update_tx(tx, %{state: state})
@doc """
Updates the given transaction's state, and stores the mAPI response.
Returns an `:ok` / `:error` tuple response.
## Examples
iex> {:ok, tx} = Transactions.update_tx_state(tx, "pushed", mapi_response)
"""
@doc group: :query
@spec update_tx_state(Ecto.Schema.t, String.t, Manic.JSONEnvelope.t | map) ::
{:ok, Ecto.Schema.t} |
{:error, Ecto.Changeset.t} |
{:error, %{required(atom) => Ecto.Changeset.t}}
def update_tx_state(%Tx{state: "queued"} = tx, state, mapi_response) do
mapi = Ecto.build_assoc(tx, :mapi_responses)
Multi.new
|> Multi.insert(:mapi_response, MapiResponse.push_changeset(mapi, mapi_response))
|> Multi.update(:tx, Fsmx.transition_changeset(tx, state, mapi_response))
|> update_tx_state
end
def update_tx_state(%Tx{state: "pushed"} = tx, state, mapi_response) do
mapi = Ecto.build_assoc(tx, :mapi_responses)
Multi.new
|> Multi.insert(:mapi_response, MapiResponse.status_changeset(mapi, mapi_response))
|> Multi.update(:tx, Fsmx.transition_changeset(tx, state, mapi_response))
|> update_tx_state
end
def update_tx_state(%Tx{} = tx, state, _mapi_response) do
Multi.new
|> Multi.update(:tx, Fsmx.transition_changeset(tx, state))
|> update_tx_state
end
defp update_tx_state(%Multi{} = multi) do
case repo().transaction(multi) do
{:ok, %{tx: tx}} ->
pre_qry = from(r in MapiResponse, order_by: [desc: r.inserted_at])
{:ok, repo().preload(tx, [status: pre_qry], force: true)}
{:error, name, changeset, _} ->
{:error, %{name => changeset}}
{:error, error} ->
{:error, error}
end
end
@doc """
Deletes the given transaction from the repo.
Returns an `:ok` / `:error` tuple response.
## Examples
iex> {:ok, tx} = Transactions.get_tx("6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110")
...> |> Transactions.delete_tx
"""
@doc group: :query
@spec delete_tx(Ecto.Schema.t) :: {:ok, Ecto.Schema.t} | {:error, Ecto.Changeset.t()}
def delete_tx(%Tx{} = tx),
do: repo().delete(tx)
@doc """
Returns a list of transactions filtered by the given search term.
Performs a full text search on the transactions' metadata. Can optionally pass
a `Ecto.Queryable.t` as the first argument to compose queries.
## Examples
iex> {:ok, txns} = Transactions.search_tx("unwriter bitpic")
"""
@doc group: :query
@spec search_tx(Ecto.Queryable.t, String.t) :: list(Ecto.Schema.t)
def search_tx(tx \\ Tx, term) when is_binary(term) do
tx
|> search(term)
|> list_tx
end
@doc """
Query by the given query map.
"""
@doc group: :expression
@spec query(Ecto.Queryable.t, map) :: Ecto.Queryable.t
def query(tx, %{} = qry) do
qry
|> normalize_query
|> Enum.reduce(tx, &build_query/2)
end
@doc """
Search by the given term.
"""
@doc group: :expression
@spec search(Ecto.Queryable.t, String.t) :: Ecto.Queryable.t
def search(tx, term) when is_binary(term) do
tx
|> where(fragment("search_vector @@ plainto_tsquery(?)", ^term))
|> order_by(fragment("ts_rank(search_vector, plainto_tsquery(?)) DESC", ^term))
end
@doc """
Query by the given channel name.
"""
@doc group: :expression
@spec channel(Ecto.Queryable.t, binary) :: Ecto.Queryable.t
def channel(tx, "_"), do: tx
def channel(tx, chan)
when is_binary(chan),
do: where(tx, channel: ^chan)
@doc """
Query by the given tag or list of tags.
Optionally tags can be specified as a comma seperated string
"""
@doc group: :expression
@spec tagged(Ecto.Queryable.t, list | String.t) :: Ecto.Queryable.t
def tagged(tx, tags) when is_list(tags),
do: where(tx, fragment("tags @> ?", ^tags))
def tagged(tx, tags) when is_binary(tags),
do: tagged(tx, String.split(tags, ",") |> Enum.map(&String.trim/1))
@doc """
Query by the transaction confirmation status.
"""
@doc group: :expression
@spec confirmed(Ecto.Queryable.t, boolean) :: Ecto.Queryable.t
def confirmed(tx, conf \\ true)
def confirmed(tx, true),
do: where(tx, [t], t.state == "confirmed")
def confirmed(tx, false),
do: where(tx, [t], t.state != "confirmed")
@doc """
Ensures rawtx is selected in the given query.
"""
@doc group: :expression
@spec with_rawtx(Ecto.Queryable.t) :: Ecto.Queryable.t
def with_rawtx(tx), do: select(tx, [t], t)
# Normalizes a query map by converting all keys to strings, taking the
# allowed keys, and converting back to atoms
defp normalize_query(query) do
query
|> Map.new(fn {k, v} -> {normalize_key(k), v} end)
|> Map.take(Enum.map(@query_keys, &Atom.to_string/1))
|> Map.new(fn {k, v} -> {String.to_atom(k), v} end)
end
# Normalizes the given key as a string
defp normalize_key(key) when is_atom(key), do: Atom.to_string(key)
defp normalize_key(key), do: key
# Composes a query from the given tuple, adding to the existing queryable
defp build_query({:search, term}, tx), do: search(tx, term)
defp build_query({:channel, chan}, tx), do: channel(tx, chan)
defp build_query({:tagged, tags}, tx), do: tagged(tx, tags)
defp build_query({:from, height}, tx),
do: where(tx, [t], t.block_height >= ^height)
defp build_query({:to, height}, tx),
do: where(tx, [t], t.block_height <= ^height)
defp build_query({:at, true}, tx), do: confirmed(tx, true)
defp build_query({:at, "-null"}, tx), do: confirmed(tx, true)
defp build_query({:at, false}, tx), do: confirmed(tx, false)
defp build_query({:at, nil}, tx), do: confirmed(tx, false)
defp build_query({:at, "null"}, tx), do: confirmed(tx, false)
defp build_query({:at, height}, tx),
do: where(tx, [t], t.block_height == ^height)
defp build_query({:order, "created_at"}, tx),
do: order_by(tx, asc: :inserted_at)
defp build_query({:order, "inserted_at"}, tx),
do: order_by(tx, asc: :inserted_at)
defp build_query({:order, "-created_at"}, tx),
do: order_by(tx, desc: :inserted_at)
defp build_query({:order, "-inserted_at"}, tx),
do: order_by(tx, desc: :inserted_at)
defp build_query({:order, "i"}, tx),
do: order_by(tx, asc: :block_height)
defp build_query({:order, "block_height"}, tx),
do: order_by(tx, asc: :block_height)
defp build_query({:order, "-i"}, tx),
do: order_by(tx, desc: :block_height)
defp build_query({:order, "-block_height"}, tx),
do: order_by(tx, desc: :block_height)
defp build_query({:order, _order}, tx), do: tx
defp build_query({:limit, num}, tx), do: limit(tx, ^num)
defp build_query({:offset, num}, tx), do: offset(tx, ^num)
defp build_query({:rawtx, val}, tx) when val in [false, nil], do: tx
defp build_query({:rawtx, _}, tx), do: with_rawtx(tx)
# Optimizes the select query unless already set
defp optimize_select(%{select: sel} = tx) when not is_nil(sel), do: tx
defp optimize_select(tx) do
keys = Tx.__schema__(:fields)
|> Enum.reject(& &1 == :rawtx)
select(tx, ^keys)
end
end
|
lib/txbox/transactions.ex
| 0.86674
| 0.431614
|
transactions.ex
|
starcoder
|
defmodule PactElixir.Response do
@moduledoc """
Represent the expected response.
"""
# @derive [Poison.Encoder]
defstruct [:body, :headers, :status, :matching_rules]
def new(attributes \\ %{}) do
value_or_default = &value_from_map(attributes, &1, &2)
%PactElixir.Response{
body: value_or_default.(:body, "") |> collect_values_for_body(),
headers: value_or_default.(:headers, %{"Content-Type" => "application/json"}),
status: value_or_default.(:status, 200),
matching_rules: value_or_default.(:body, %{}) |> matching_rules()
}
# |> PactElixir.TermDetector.recursively_update_terms()
end
defp value_from_map(attributes, name, default) do
attributes[name] || attributes[:"#{name}"] || default
end
def collect_values_for_body(body) when is_map(body) do
body
|> Map.to_list()
|> Enum.map(fn
{k, %PactElixir.TypeMatcher{value: value}} -> {k, collect_values_for_body(value)}
{k, %PactElixir.Term{generate: value, regex: _pattern}} -> {k, value}
{k, %{} = v} -> {k, collect_values_for_body(v)}
{k, v} -> {k, v}
end)
|> Enum.into(%{})
end
def collect_values_for_body(body) do
body
end
def matching_rules(body), do: do_matching_rules({:body, body}, [:"$"], %{})
def do_matching_rules(
{path, %PactElixir.TypeMatcher{value: value} = matcher},
previous_paths,
rules
) do
do_matching_rules(
{path, value},
previous_paths,
rules |> add_rule(path, previous_paths, matcher)
)
end
def do_matching_rules(
{path, %PactElixir.Term{generate: value} = matcher},
previous_paths,
rules
) do
do_matching_rules(
{path, value},
previous_paths,
rules |> add_rule(path, previous_paths, matcher)
)
end
def do_matching_rules({path, content}, previous_paths, rules) when is_map(content) do
content
|> Enum.reduce(rules, fn {key, value}, rules ->
do_matching_rules({key, value}, previous_paths ++ [path], rules)
end)
end
def do_matching_rules({path, values}, previous_paths, rules) when is_list(values) do
values
|> Enum.with_index()
|> Enum.reduce(rules, fn {value, index}, rules ->
do_matching_rules({key_for_list_element(path, index), value}, previous_paths, rules)
end)
end
def do_matching_rules({_path, value}, _previous_paths, _rules) when is_tuple(value),
do: raise(ArgumentError, "Tuples are not supported. Given #{value |> inspect()}")
def do_matching_rules(_content, _previous_paths, rules), do: rules
def add_rule(rules, key, previous_paths, %PactElixir.TypeMatcher{}) do
rules |> Map.put(Enum.join(previous_paths ++ [key], "."), %{"match" => "type"})
end
def add_rule(rules, key, previous_paths, %PactElixir.Term{regex: regex}) do
rules
|> Map.put(Enum.join(previous_paths ++ [key], "."), %{"match" => "regex", "regex" => regex})
end
def key_for_list_element(path, index) do
"#{path}[#{index}]"
end
end
defimpl Poison.Encoder, for: PactElixir.Response do
def encode(
%PactElixir.Response{
body: body,
headers: headers,
status: status,
matching_rules: matching_rules
},
options
) do
Poison.Encoder.Map.encode(
%{
body: body,
headers: headers,
status: status,
matchingRules: matching_rules
},
options
)
end
end
|
lib/pact_elixir/response.ex
| 0.588416
| 0.449393
|
response.ex
|
starcoder
|
defmodule Receiver do
@moduledoc ~S"""
Conveniences for creating processes that hold important state.
A wrapper around an `Agent` that adds callbacks and reduces boilerplate code, making it
quick and easy to store important state in a separate supervised process.
# Use cases
* Creating a "stash" to persist process state across restarts. See [example](#stash) below.
* Application or server configuration. See [example](#config) below.
* Storing mutable state outside of a worker process, or as a shared repository
for multiple processes running the same module code. See [example](#a-look-at-callbacks) below.
* Testing higher order functions. By passing a function call to a `Receiver` process into a higher
order function you can test if the function is executed as intended by checking the change in state.
See `ExUnitReceiver` module documentation.
## <a name="stash"></a>Using as a stash
defmodule Counter do
use GenServer
use Receiver, as: :stash
def start_link(arg) do
GenServer.start_link(__MODULE__, arg, name: __MODULE__)
end
def increment(num) do
GenServer.cast(__MODULE__, {:increment, num})
end
def get do
GenServer.call(__MODULE__, :get)
end
# The stash is started with the initial state of the counter. If the stash is already
# started when `start_stash/1` is called then its state will not change. The current state
# of the stash is returned as the initial counter state whenever the counter is started.
def init(arg) do
start_stash(fn -> arg end)
{:ok, get_stash()}
end
def handle_cast({:increment, num}, state) do
{:noreply, state + num}
end
def handle_call(:get, _from, state) do
{:reply, state, state}
end
# The stash is updated to the current counter state before the counter exits.
# This state will be stored for use as the initial state of the counter when
# it restarts, allowing the state to persist in the event of failure.
def terminate(_reason, state) do
update_stash(fn _ -> state end)
end
end
The line `use Receiver, as: :stash` creates an `Agent` named with the `:via` semantics of the `Registry` module.
The stash is supervised in the `Receiver` application supervision tree, not in your own application's. It also
defines the following *private* client functions in the `Counter` module:
* `start_stash/0` - Defaults the inital state to an empty list.
* `start_stash/1` - Expects an anonymous function that will return the initial state when called.
* `start_stash/3` - Expects a module, function name, and list of args that will return the initial state
when called.
* `stop_stash/2` - Optional `reason` and `timeout` args. See `stop/3` for more information.
* `get_stash/0` - Returns the current state of the stash.
* `get_stash/1` - Expects an anonymous function that accepts a single argument. The state of the stash
is passed to the anonymous function, and the result of the function is returned.
* `update_stash/1` - Updates the state of the stash. Expects an anonymous function that receives
the current state as an argument and returns the updated state.
* `get_and_update_stash/1` - Gets and updates the stash. Expects an anonymous function that receives the
current state as an argument and returns a two element tuple, the first element being the value to
return, the second element is the updated state.
If no `:as` option were given in this example then the default function names are used:
* `start_receiver/0`
* `start_receiver/1`
* `start_receiver/3`
* `stop_receiver/2`
* `get_receiver/0`
* `get_receiver/1`
* `update_receiver/1`
* `get_and_update_receiver/1`
See more detail on the generated functions in the [client functions](#client-functions) section below.
The `Counter` can now be supervised and its state will be isolated from failure and persisted across restarts.
# Start the counter under a supervisor
{:ok, _pid} = Supervisor.start_link([{Counter, 0}], strategy: :one_for_one)
# Get the state of the counter
Counter.get()
#=> 0
# Increment the counter
Counter.increment(2)
#=> :ok
# Get the updated state of the counter
Counter.get()
#=> 2
# Stop the counter, initiating a restart and losing the counter state
GenServer.stop(Counter)
#=> :ok
# Get the counter state, which was persisted across restarts with help of the stash
Counter.get()
#=> 2
## <a name="client-functions"></a>Client functions
When we `use Receiver, as: :stash` above, the following private function definitions
are automatically generated inside the `Counter` module:
defp start_stash do
Receiver.start_supervised({__MODULE__, :stash}, fn -> [] end)
end
defp start_stash(fun) do
Receiver.start_supervised({__MODULE__, :stash}, fun)
end
defp start_stash(module, fun, args)
Receiver.start_supervised({__MODULE__, :stash}, module, fun, args)
end
defp stop_stash(reason \\ :normal, timeout \\ :infinity) do
Receiver.stop({__MODULE__, :stash}, reason, timeout)
end
defp get_stash do
Receiver.get({__MODULE__, :stash})
end
defp get_stash(fun) do
Receiver.get({__MODULE__, :stash}, fun)
end
defp update_stash(fun) do
Receiver.update({__MODULE__, :stash}, fun)
end
defp get_and_update_stash(fun) do
Receiver.get_and_update({__MODULE__, :stash}, fun)
end
These are private to encourage starting, stopping, and updating the stash from only the `Counter` API.
A receiver can always be manipulated by calling the `Receiver` functions directly
i.e. `Receiver.update({Counter, :stash}, & &1 + 1)`, but use these functions with caution to avoid
race conditions.
## <a name="config"></a>Using as a configuration store
A `Receiver` can be used to store application configuration, and even be initialized
at startup. Since the receiver processes are supervised in a separate application
that is started as a dependency of yours, it will already be ready to start even before your
application's `start/2` callback has returned:
defmodule MyApp do
@doc false
use Application
use Receiver, as: :config
def start(_app, _type) do
start_config(fn ->
Application.get_env(:my_app, :configuration, [setup: :default])
|> Enum.into(%{})
end)
children = [
MyApp.Worker,
MyApp.Task
]
Supervisor.start_link(children, strategy: :one_for_one, name: MyApp)
end
def config, do: get_config()
end
Now the configuration can be globally read with the public `MyApp.config/0`.
MyApp.config()
#=> %{setup: :default}
MyApp.config.setup
#=> :default
## <a name="a-look-at-callbacks"></a>A look at callbacks
The first argument to all of the callbacks is the name of the receiver. This will either be the atom passed to
the `:as` option or the default name `:receiver`. The intent is to avoid any naming collisions with other `handle_*`
callbacks.
defmodule Account do
use GenServer
use Receiver, as: :ledger
# Client API
def start_link(initial_balance) do
start_ledger(fn -> %{} end)
GenServer.start_link(__MODULE__, initial_balance)
end
def get_balance_history(pid) do
get_ledger(fn ledger -> Map.get(ledger, pid) end)
end
def transact(pid, amount) do
GenServer.cast(pid, {:transact, amount})
end
# GenServer callbacks
def init(initial_balance) do
pid = self()
update_ledger(fn ledger -> Map.put(ledger, pid, [initial_balance]) end)
{:ok, initial_balance}
end
def handle_cast({:transact, amount}, balance) do
pid = self()
new_balance = balance + amount
update_ledger(fn ledger -> Map.update(ledger, pid, [new_balance], &([new_balance | &1])) end)
{:noreply, new_balance}
end
# Receiver callbacks
def handle_start(:ledger, pid, _state) do
IO.inspect(pid, label: "Started ledger")
IO.inspect(self(), label: "From caller")
end
def handle_get(:ledger, history) do
current_balance = history |> List.first()
IO.inspect(self(), label: "Handling get from")
IO.inspect(current_balance, label: "Current balance")
{:reply, history}
end
def handle_update(:ledger, _old_state, new_state) do
pid = self()
new_balance = new_state |> Map.get(pid) |> List.first()
IO.inspect(pid, label: "Handling update from")
IO.inspect(new_balance, label: "Balance updated to")
end
end
All of the callbacks are invoked within the calling process, not the receiver process.
{:ok, one} = Account.start_link(10.0)
# Started ledger: #PID<0.213.0>
# From caller: #PID<0.206.0>
# Handling update from: #PID<0.214.0>
# Balance updated to: 10.0
#=> {:ok, #PID<0.214.0>}
Process.whereis(Receiver.Sup)
#=> #PID<0.206.0>
Receiver.whereis({Account, :ledger})
#=> #PID<0.213.0>
self()
#=> #PID<0.210.0>
In `Account.start_link/1` a ledger is started with a call to `start_ledger/1`. `#PID<0.213.0>` is the ledger pid,
and the calling process `#PID<0.206.0>` handles the `handle_start/3` callback as can be seen in the output.
The calling process in this case is `Receiver.Sup`, the `DynamicSupervisor` that supervises all receivers
when started with the private convenience functions and is the process that makes the actual call to
`Receiver.start_link/1`.
When `init/1` is invoked in the account server (`#PID<0.214.0>`) it updates the ledger with it's starting balance by
making a call to `update_ledger/1`, and receives the `handle_update/3` callback.
{:ok, two} = Account.start_link(15.0)
# Handling update from: #PID<0.219.0>
# Balance updated to: 15.0
#=> {:ok, #PID<0.219.0>}
When `start_link/1` is called the second time the ledger already exists so the call to `start_ledger/1` is
a noop and the `handle_start/3` callback is never invoked.
Account.get_balance_history(one)
# Handling get from: #PID<0.210.0>
# Current balance: 10.0
#=> [10.0]
Account.get_balance_history(two)
# Handling get from: #PID<0.210.0>
# Current balance: 15.0
#=> [15.0]
Account.transact(one, 15.0)
# Handling update from: #PID<0.214.0>
# Balance updated to: 25.0
#=> :ok
This may be confusing at first, and it's different from the way callbacks are dispatched in a GenServer for
example. The important thing to remember is that the receiver does not invoke the callbacks, they are always
invoked from the process that's sending it the message.
A `Receiver` is meant to be isolated from complex and potentially error-prone operations. It only exists to
hold important state and should be protected from failure and remain highly available. The callbacks provide
an opportunity to perform additional operations with the receiver data, such as interacting with the outside
world, that may have no impact on the return value and do not expose the receiver itself to errors or block
the process from answering other callers. The goal is to keep the functions passed to the receiver as simple
as possible and perform more complex operations in the callbacks.
"""
use Agent, restart: :transient
@typedoc "The receiver name"
@type receiver :: atom | {module, atom} | pid
@typedoc "Return values of `start_supervised/3` and `start_supervised/5`"
@type on_start_supervised :: DynamicSupervisor.on_start_child() | start_error
@typedoc "Return values of `start/3` and `start/5`"
@type on_start :: Agent.on_start() | start_error
@typedoc "Error tuple returned for pattern matching on function results"
@type start_error ::
{:error, {%UndefinedFunctionError{} | %FunctionClauseError{}, stacktrace :: list}}
@typedoc "Error returned from bad arguments"
@type not_found_error :: {:error, {%Receiver.NotFoundError{}, stacktrace :: list}}
@typedoc "A list of function arguments"
@type args :: [term]
@typedoc "A list of arguments accepted by `start*` functions"
@type start_args ::
[module | fun]
| [module | fun | options]
| [module | atom | args]
| [module | atom | args | options]
@typedoc "Option values used by the `start*` functions"
@type option :: {:as, atom} | {:name, atom}
@typedoc "Options used by the `start*` functions"
@type options :: [option]
@typedoc "The receiver state"
@type state :: term
@typedoc "The registered name of a receiver"
@type registered_name :: {:via, Registry, {Receiver.Registry, {module, atom}}}
@typedoc "The receiver attributes required for successful start and registration"
@type start_attrs :: %{
module: module,
receiver: atom,
name: atom | registered_name,
args: args
}
@doc """
Invoked in the calling process after the receiver is started. All `start*` functions will block until it returns.
`atom` is the name of the receiver passed to the `:as` option at start. Defaults to `:receiver`.
`pid` is the PID of the receiver process, `state` is the starting state of the receiver after the initializing
function is called.
If the receiver was already started when `start*` was called then the callback will not be invoked.
The return value is ignored.
"""
@callback handle_start(atom, pid, state) :: term
@doc """
Invoked in the calling process after the receiver is stopped. `stop/3` will block until it returns.
`atom` is the name of the receiver passed to the `:as` option at start. Defaults to `:receiver`.
`reason` is the exit reason, `state` is the receiver state at the time of shutdown. See `Agent.stop/3`
for more information.
The return value is ignored.
"""
@callback handle_stop(atom, reason :: term, state) :: term
@doc """
Invoked in the calling process after a `get` request is sent to the receiver. `get/1` and `get/2`
will block until it returns.
`atom` is the name of the receiver passed to the `:as` option at start. Defaults to `:receiver`.
`return_value` is the return value of the `get*` anonymous function. With a basic `get` function this is
often the current state of the receiver.
Returning `{:reply, reply}` causes `reply` to be the return value of `get/1` and `get/2`
(and the private `get_receiver` client functions).
"""
@callback handle_get(atom, return_value :: term) :: {:reply, reply :: term}
@doc """
Invoked in the calling process after an `update` is sent to the receiver. `update/2` will
block until it returns.
`atom` is the name of the receiver passed to the `:as` option at start. Defaults to `:receiver`.
`old_state` is the state of the receiver before `update/2` was called. `state` is the updated
state of the receiver.
The return value is ignored.
"""
@callback handle_update(atom, old_state :: state, state) :: term
@doc """
Invoked in the calling process after a `get_and_update` is sent to the receiver. `get_and_update/2` will
block until it returns.
`atom` is the name of the receiver passed to the `:as` option at start. Defaults to `:receiver`.
`return_val` is the first element of the tuple (the return value) of the anonymous function passed to
`get_and_update/2`.
`state` is the second element of the tuple and is the new state of the receiver.
Returning `{:reply, reply}` causes `reply` to be the return value of `get_and_update/2`
(and the private `get_and_update_receiver` client function).
Returning `:noreply` defaults the return value of `get_and_update/2` to `return_val`.
"""
@callback handle_get_and_update(atom, return_value :: term, state) :: {:reply, reply :: term}
@optional_callbacks handle_start: 3,
handle_stop: 3,
handle_get: 2,
handle_update: 3,
handle_get_and_update: 3
@doc """
Starts a `Receiver` process linked to the current process.
This is the function used to start a receiver as part of a supervision tree. It accepts a list
containing from two to five arguments.
Usually this should be used to build a child spec in your supervision tree.
## Examples
children = [
{Receiver, [One, fn -> 1 end]},
{Receiver, [Two, fn -> 2 end, [name: Two]]},
{Receiver, [Three, Kernel, :+, [2, 1]]},
{Receiver, [Four, Kernerl, :+, [2, 2], [name: Four]]}
]
Supervisor.start_link(children, strategy: one_for_one)
Only use this is if you want to supervise your own receiver from application startup. In most cases you can
simply use the `start_supervised*` functions to start a supervised receiver dynamically in an isolated
application. See `start_supervised/3` and `start_supervised/5` for more information.
"""
@spec start_link(start_args) :: on_start
def start_link(list_of_args) when is_list(list_of_args) do
apply(__MODULE__, :start_link, list_of_args)
end
@spec start_link(module, (() -> term), options) :: on_start
def start_link(module, fun, opts \\ [])
when is_atom(module) and is_function(fun, 0) and is_list(opts) do
do_start(:link, module, [fun], opts)
end
@spec start_link(module, module, atom, args, options) :: on_start
def start_link(module, mod, fun, args, opts \\ [])
when is_atom(module) and is_atom(mod) and is_atom(fun) and is_list(args) and is_list(opts) do
do_start(:link, module, [mod, fun, args], opts)
end
@doc """
Starts a new receiver without links (outside of a supervision tree).
See `start_link/3` for more information.
"""
@spec start(module, (() -> term), options) :: on_start
def start(module, fun, opts \\ [])
when is_atom(module) and is_function(fun, 0) and is_list(opts) do
do_start(:nolink, module, [fun], opts)
end
@spec start(module, module, atom, args, options) :: on_start
def start(module, mod, fun, args, opts \\ [])
when is_atom(module) and is_atom(mod) and is_atom(fun) and is_list(args) and is_list(opts) do
do_start(:nolink, module, [mod, fun, args], opts)
end
@spec do_start(:link | :nolink, module, args, options) :: on_start
defp do_start(link, module, args, opts) do
attrs = get_start_attrs(module, args, opts)
start_function =
case link do
:link -> :start_link
:nolink -> :start
end
Agent
|> apply(start_function, [initialization_func(self(), attrs), [name: attrs.name]])
|> invoke_handle_start_callback(module, attrs)
end
@spec start_supervised(module, (() -> term), options) :: on_start_supervised
def start_supervised(module, fun, opts \\ [])
when is_atom(module) and is_function(fun, 0) and is_list(opts) do
do_start_supervised([module, fun, opts])
end
@spec start_supervised(module, module, atom, args, options) :: on_start_supervised
def start_supervised(module, mod, fun, args, opts \\ [])
when is_atom(module) and is_atom(mod) and is_atom(fun) and is_list(args) and is_list(opts) do
do_start_supervised([module, mod, fun, args, opts])
end
@spec do_start_supervised(args) :: on_start_supervised
defp do_start_supervised(args) do
child = {Receiver, args}
DynamicSupervisor.start_child(Receiver.Sup, child)
end
@spec invoke_handle_start_callback(
Agent.on_start() | DynamicSupervisor.on_start_child(),
module,
start_attrs
) ::
on_start | on_start_supervised
defp invoke_handle_start_callback(on_start_result, module, attrs) do
with {:ok, pid} <- on_start_result do
initial_state =
receive do
{:initial_state, result} -> result
end
apply(module, :handle_start, [attrs.receiver, pid, initial_state])
{:ok, pid}
end
rescue
# Catch `UndefinedFunctionError` (raised from invoking the `handle_start/3` callback on a
# module that hasn't defined it) and `FunctionClauseError` (raised from a bad pattern match,
# due to an invalid receiver name passed with the `:as` options). At this point the receiver
# has already been started and needs to be stopped gracefully so it isn't orphaned, then return
# the error tuple.
exception in [UndefinedFunctionError, FunctionClauseError] ->
Agent.stop(attrs.name)
{:error, {exception, __STACKTRACE__}}
end
@spec get_start_attrs(module, args, options) :: start_attrs
defp get_start_attrs(module, args, opts) do
receiver = Keyword.get(opts, :as, :receiver)
%{
module: module,
receiver: receiver,
name: Keyword.get(opts, :name, registered_name(module, receiver)),
args: args
}
end
@spec initialization_func(pid, start_attrs) :: (() -> state)
defp initialization_func(caller, attrs) do
# If an atom is provided as the `:name` option at `start*` it overrides the `:via` naming pattern,
# skipping registration with the `Registry`. In this case the process needs to be manually registered
# on initialization so the PID is associated with the receiver name and registered process name.
# If the process has already been registered with the `:via` pattern then `Registry.register/3` returns
# `{:error, {:already_registered, pid}}` and is effectively a noop. We do this from within the
# initialization function because the calling process will be the one registered. See `Registry.register/3`.
fn ->
task = apply(Task.Supervisor, :async, [Receiver.TaskSup | attrs.args])
Registry.register(Receiver.Registry, {attrs.module, attrs.receiver}, attrs.name)
send(caller, {:initial_state, result = Task.await(task)})
result
end
end
@spec get(receiver) :: term
def get(name), do: get(name, & &1)
@spec get(receiver, (state -> term)) :: term
def get(name, fun)
when is_function(fun, 1) and
((not is_nil(name) and is_atom(name)) or is_pid(name) or is_tuple(name)) do
name
|> validate_name()
|> do_get(fun)
end
defp do_get({module, receiver} = name, fun) do
state = Agent.get(whereis(name), fun)
case apply(module, :handle_get, [receiver, state]) do
{:reply, reply} ->
reply
other ->
raise Receiver.CallbackError, """
handle_get/2 must have a return in the form:
* {:reply, reply}
where `reply` is the value to return from the get/1 or get/2 function
Got #{inspect(other)}
"""
end
end
@spec update(receiver, (state -> state)) :: :ok
def update(name, fun)
when is_function(fun, 1) and
((not is_nil(name) and is_atom(name)) or is_pid(name) or is_tuple(name)) do
name
|> validate_name()
|> do_update(fun)
end
defp do_update({module, receiver} = name, fun) do
{old_state, new_state} =
Agent.get_and_update(whereis(name), fn old ->
new = fun.(old)
{{old, new}, new}
end)
apply(module, :handle_update, [receiver, old_state, new_state])
:ok
end
@spec get_and_update(receiver, (state -> {term, state})) :: term
def get_and_update(name, fun)
when is_function(fun, 1) and
((not is_nil(name) and is_atom(name)) or is_pid(name) or is_tuple(name)) do
name
|> validate_name()
|> do_get_and_update(fun)
end
defp do_get_and_update({module, receiver} = name, fun) do
{return_val, new_state} =
Agent.get_and_update(whereis(name), fn old ->
{return, new} = fun.(old)
{{return, new}, new}
end)
case apply(module, :handle_get_and_update, [receiver, return_val, new_state]) do
{:reply, reply} ->
reply
other ->
raise Receiver.CallbackError, """
handle_get_and_update/3 must have a return in the form:
* {:reply, reply}
where `reply` is the value to return from get_and_update/2
Got #{inspect(other)}
"""
end
end
@spec stop(receiver, reason :: term, timeout) :: :ok
def stop(name, reason \\ :normal, timeout \\ :infinity)
when (not is_nil(name) and is_atom(name)) or is_pid(name) or
is_tuple(name) do
name
|> validate_name()
|> do_stop(reason, timeout)
end
defp do_stop({module, receiver} = name, reason, timeout) do
pid = whereis(name)
state = Agent.get(pid, & &1)
with :ok <- Agent.stop(pid, reason, timeout) do
apply(module, :handle_stop, [receiver, reason, state])
:ok
end
end
@spec validate_name(receiver) :: {module, atom} | not_found_error
defp validate_name(name) do
case which_receiver(name) do
{_, _} = tuple ->
tuple
_ ->
name = with {mod, atom} <- name, do: "{#{inspect(mod)}, #{inspect(atom)}}"
stacktrace =
self()
|> Process.info(:current_stacktrace)
|> elem(1)
|> List.delete_at(0)
|> List.delete_at(0)
exception = %Receiver.NotFoundError{
message: """
Expected input to be one of the following terms associate with a Receiver:
* atom - the global process name
* pid - the identifier of the process
* {module, atom} - the callback module and receiver name
No Receiver is associated with the input: #{inspect(name)}
"""
}
reraise exception, stacktrace
end
end
@doc """
Returns the PID of a receiver process, or `nil` if it does not exist.
Accepts one argument, either a two-element tuple containing the name of the
callback module and an atom that is the name of the receiver, or a PID.
"""
@spec whereis(receiver) :: pid | nil
def whereis(nil), do: nil
def whereis(pid) when is_pid(pid), do: pid |> which_receiver() |> whereis()
def whereis(name) when is_atom(name), do: name |> which_receiver() |> whereis()
def whereis({mod, receiver} = name) when is_atom(mod) and is_atom(receiver) do
case Registry.lookup(Receiver.Registry, name) do
[{pid, _}] -> pid
_ -> nil
end
end
@doc """
Returns a two element tuple containing the callback module and name of the receiver associated
with a PID or a registered process name.
Accepts one argument, a PID or a `name`. `name` must be an atom that can be used to
register a process with `Process.register/2`.
"""
@spec which_receiver(receiver) :: {module, atom} | nil
def which_receiver(pid) when is_pid(pid) do
case Registry.keys(Receiver.Registry, pid) do
[{_, _} = name] -> name
[] -> nil
end
end
def which_receiver({_, _} = tuple) do
case Registry.lookup(Receiver.Registry, tuple) do
[{pid, name}] when is_pid(pid) and is_atom(name) -> tuple
_ -> nil
end
end
def which_receiver(name) when is_atom(name) do
with pid when is_pid(pid) <- Process.whereis(name), do: which_receiver(pid)
end
@doc """
Returns the `name` of a registered process associated with a receiver. `name` must be an atom that
can be used to register a process with `Process.register/2`.
Accepts one argument, a PID or a two element tuple containing the callback module and the name of the
receiver. Returns nil if no name was registered with the process.
"""
@spec which_name(pid | receiver) :: atom | nil
def which_name(pid) when is_pid(pid), do: do_which_name(pid)
def which_name({_, _} = tuple), do: do_which_name(tuple)
def which_name(name) when is_atom(name), do: do_which_name(name)
defp do_which_name(receiver) do
with {_, _} = receiver <- which_receiver(receiver),
[{_, name}] <- Registry.lookup(Receiver.Registry, receiver),
do: name
end
@spec registered_name(module, receiver) :: registered_name
defp registered_name(module, receiver) do
{:via, Registry, {Receiver.Registry, {module, receiver}}}
end
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
@behaviour Receiver
{test, opts} = Keyword.pop(opts, :test, false)
as = Keyword.get(opts, :as, :receiver)
if test do
defp unquote(:"start_#{as}")() do
start_supervised({Receiver, [__MODULE__, fn -> [] end, unquote(opts)]})
end
defp unquote(:"start_#{as}")(fun) do
start_supervised({Receiver, [__MODULE__, fun, unquote(opts)]})
end
defp unquote(:"start_#{as}")(module, fun, args) do
start_supervised({Receiver, [__MODULE__, module, fun, args, unquote(opts)]})
end
else
defp unquote(:"start_#{as}")() do
Receiver.start_supervised(__MODULE__, fn -> [] end, unquote(opts))
end
defp unquote(:"start_#{as}")(fun) do
Receiver.start_supervised(__MODULE__, fun, unquote(opts))
end
defp unquote(:"start_#{as}")(module, fun, args) do
Receiver.start_supervised(__MODULE__, module, fun, args, unquote(opts))
end
end
defp unquote(:"stop_#{as}")(reason \\ :normal, timeout \\ :infinity) do
Receiver.stop({__MODULE__, unquote(as)}, reason, timeout)
end
defp unquote(:"get_#{as}")() do
Receiver.get({__MODULE__, unquote(as)})
end
defp unquote(:"get_#{as}")(fun) do
Receiver.get({__MODULE__, unquote(as)}, fun)
end
defp unquote(:"update_#{as}")(fun) do
Receiver.update({__MODULE__, unquote(as)}, fun)
end
defp unquote(:"get_and_update_#{as}")(fun) do
Receiver.get_and_update({__MODULE__, unquote(as)}, fun)
end
defoverridable "start_#{as}": 0,
"start_#{as}": 1,
"start_#{as}": 3,
"stop_#{as}": 0,
"stop_#{as}": 1,
"stop_#{as}": 2,
"get_#{as}": 0,
"get_#{as}": 1,
"update_#{as}": 1,
"get_and_update_#{as}": 1
@doc false
def handle_stop(unquote(as), reason, state), do: :ok
@doc false
def handle_start(unquote(as), pid, state), do: :ok
@doc false
def handle_get(unquote(as), return_val), do: {:reply, return_val}
@doc false
def handle_update(unquote(as), old_state, new_state), do: :ok
@doc false
def handle_get_and_update(unquote(as), return_val, new_state), do: {:reply, return_val}
defoverridable handle_stop: 3,
handle_start: 3,
handle_get: 2,
handle_update: 3,
handle_get_and_update: 3
end
end
end
|
lib/receiver.ex
| 0.920706
| 0.706494
|
receiver.ex
|
starcoder
|
defmodule ICouch.StreamChunk do
@moduledoc """
Struct module for stream chunks.
The `name` field will hold the document ID on document streaming, both
document ID and attachment name on attachment streaming and nil on changes
streaming.
"""
defstruct [:ref, :type, :name, :data]
@type t :: %__MODULE__{
ref: pid | reference,
type: :document | :attachment,
name: String.t | {String.t, String.t} | nil,
data: binary | map | ICouch.Document.t | nil
}
def for_document(ref, doc_id, data),
do: %__MODULE__{ref: ref, type: :document, name: doc_id, data: data}
def for_attachment(ref, doc_id, filename, data),
do: %__MODULE__{ref: ref, type: :attachment, name: {doc_id, filename}, data: data}
end
defmodule ICouch.StreamEnd do
@moduledoc """
Struct module for stream ends.
The document stream ends when all attachments have been streamed.
"""
defstruct [:ref, :type, :name]
@type t :: %__MODULE__{
ref: pid | reference,
type: :document | :attachment,
name: String.t | {String.t, String.t} | nil
}
def for_document(ref, doc_id),
do: %__MODULE__{ref: ref, type: :document, name: doc_id}
def for_attachment(ref, doc_id, filename),
do: %__MODULE__{ref: ref, type: :attachment, name: {doc_id, filename}}
end
# Internal module for ibrowse stream transformations
defmodule ICouch.StreamTransformer do
@moduledoc false
alias ICouch.{StreamChunk, StreamEnd}
@doc false
def spawn(:document, doc_id, stream_to),
do: Kernel.spawn(__MODULE__, :transform_document, [self(), doc_id, stream_to, nil, "", nil])
def spawn(:attachment, {doc_id, filename}, stream_to),
do: Kernel.spawn(__MODULE__, :transform_attachment, [self(), doc_id, filename, stream_to, nil])
@doc false
def set_req_id(pid, req_id),
do: send(pid, {:set_req_id, req_id})
@doc false
def get_headers(pid) do
receive do
{__MODULE__, ^pid, status_code, headers} -> {status_code, headers}
end
end
@doc false
def cancel(pid) do
send(pid, :cancel)
:ok
end
@doc false
def transform_document(origin, doc_id, stream_to, multipart, buffer, req_id) do
receive do
{:set_req_id, new_req_id} ->
transform_document(origin, doc_id, stream_to, multipart, buffer, new_req_id)
:cancel ->
if req_id != nil, do: :ibrowse.stream_close(req_id)
:cancel
{:ibrowse_async_headers, ^req_id, status_code, headers} ->
send(origin, {__MODULE__, self(), status_code, headers})
multipart = case ICouch.Multipart.get_boundary(headers) do
{:ok, _, boundary} ->
{:init, boundary}
_ ->
nil
end
transform_document(origin, doc_id, stream_to, multipart, buffer, req_id)
{:ibrowse_async_raw_req, ^req_id} ->
transform_document(origin, doc_id, stream_to, multipart, buffer, req_id)
{:ibrowse_async_response, ^req_id, data} when multipart in [nil, :end] ->
transform_document(origin, doc_id, stream_to, multipart, buffer <> data, req_id)
{:ibrowse_async_response, ^req_id, data} ->
transform_document_multipart(origin, doc_id, stream_to, multipart, buffer <> data, req_id)
{:ibrowse_async_response_end, ^req_id} ->
if multipart == nil, do: decode_send_doc(doc_id, stream_to, buffer)
send(stream_to, StreamEnd.for_document(self(), doc_id))
:ok
{:ibrowse_async_response_timeout, ^req_id} ->
send(origin, {__MODULE__, self(), :timeout, []})
:ok
end
end
@doc false
def transform_attachment(origin, doc_id, filename, stream_to, req_id) do
receive do
{:set_req_id, new_req_id} ->
transform_attachment(origin, doc_id, filename, stream_to, new_req_id)
:cancel ->
if req_id != nil, do: :ibrowse.stream_close(req_id)
:cancel
{:ibrowse_async_headers, ^req_id, status_code, headers} ->
send(origin, {__MODULE__, self(), status_code, headers})
transform_attachment(origin, doc_id, filename, stream_to, req_id)
{:ibrowse_async_raw_req, ^req_id} ->
transform_attachment(origin, doc_id, filename, stream_to, req_id)
{:ibrowse_async_response, ^req_id, data} ->
send(stream_to, StreamChunk.for_attachment(self(), doc_id, filename, data))
transform_attachment(origin, doc_id, filename, stream_to, req_id)
{:ibrowse_async_response_end, ^req_id} ->
send(stream_to, StreamEnd.for_attachment(self(), doc_id, filename))
:ok
{:ibrowse_async_response_timeout, ^req_id} ->
send(origin, {__MODULE__, self(), :timeout, []})
:ok
end
end
defp transform_document_multipart(origin, doc_id, stream_to, {:init, boundary} = multipart, buffer, req_id) do
case ICouch.Multipart.split_part(buffer, boundary) do
{"", nil, _} ->
transform_document(origin, doc_id, stream_to, multipart, buffer, req_id)
{"", _, rest} when rest != nil ->
transform_document_multipart(origin, doc_id, stream_to, {:document, "", boundary}, rest, req_id)
_ ->
transform_document(origin, doc_id, stream_to, :end, "", req_id)
end
end
defp transform_document_multipart(origin, doc_id, stream_to, {:document, doc_buffer, boundary}, buffer, req_id) do
case ICouch.Multipart.split_part(buffer, boundary) do
{_, _, nil} ->
transform_document(origin, doc_id, stream_to, :end, "", req_id)
{data, next_headers, rest} ->
doc_buffer = doc_buffer <> data
if next_headers != nil do
decode_send_doc(doc_id, stream_to, doc_buffer)
case Regex.run(~r/attachment; *filename="([^"]*)"/, Map.get(next_headers, "content-disposition", "")) do
[_, filename] ->
transform_document_multipart(origin, doc_id, stream_to, {:attachment, filename, boundary}, rest, req_id)
_ ->
transform_document_multipart(origin, doc_id, stream_to, {:attachment, nil, boundary}, rest, req_id)
end
else
transform_document(origin, doc_id, stream_to, {:document, doc_buffer, boundary}, rest, req_id)
end
end
end
defp transform_document_multipart(origin, doc_id, stream_to, {:attachment, filename, boundary} = multipart, buffer, req_id) do
case ICouch.Multipart.split_part(buffer, boundary) do
{data, next_headers, rest} ->
if byte_size(data) > 0 do
send(stream_to, StreamChunk.for_attachment(self(), doc_id, filename, data))
end
if next_headers != nil or rest == nil do
send(stream_to, StreamEnd.for_attachment(self(), doc_id, filename))
end
cond do
next_headers != nil ->
case Regex.run(~r/attachment; *filename="([^"]*)"/, Map.get(next_headers, "content-disposition", "")) do
[_, filename] ->
transform_document_multipart(origin, doc_id, stream_to, {:attachment, filename, boundary}, rest, req_id)
_ ->
transform_document_multipart(origin, doc_id, stream_to, {:attachment, nil, boundary}, rest, req_id)
end
rest == nil ->
transform_document(origin, doc_id, stream_to, :end, "", req_id)
true ->
transform_document(origin, doc_id, stream_to, multipart, rest, req_id)
end
end
end
defp decode_send_doc(doc_id, stream_to, buffer) do
case ICouch.Document.from_api(buffer) do
{:ok, %{attachment_data: atts} = doc} when map_size(atts) > 0 ->
send(stream_to, StreamChunk.for_document(self(), doc_id, %{doc | attachment_data: %{}}))
Enum.each(atts, fn {att_name, att_data} ->
send(stream_to, StreamChunk.for_attachment(self(), doc_id, att_name, att_data))
send(stream_to, StreamEnd.for_attachment(self(), doc_id, att_name))
end)
{:ok, doc} ->
send(stream_to, StreamChunk.for_document(self(), doc_id, doc))
_ ->
send(stream_to, StreamChunk.for_document(self(), doc_id, nil))
:ok
end
end
@doc false
def collect_document(ref, timeout) do
receive do
%StreamChunk{ref: ^ref, type: :document, data: doc} ->
collect_attachments(ref, doc, timeout)
after timeout ->
cancel(ref)
{:error, :timeout}
end
end
defp collect_attachments(ref, doc, timeout) do
receive do
%StreamChunk{ref: ^ref, type: :attachment, name: {_, name}, data: data} ->
case doc do
%{attachment_data: %{^name => existing_data} = attachment_data} ->
collect_attachments(ref, %{doc | attachment_data: %{attachment_data | name => existing_data <> data}}, timeout)
%{attachment_data: attachment_data} ->
collect_attachments(ref, %{doc | attachment_data: Map.put(attachment_data, name, data)}, timeout)
end
%StreamEnd{ref: ^ref, type: :attachment} ->
collect_attachments(ref, doc, timeout)
%StreamEnd{ref: ^ref, type: :document} ->
{:ok, doc}
after timeout ->
cancel(ref)
{:error, :timeout}
end
end
@doc false
def collect_attachment(ref, timeout),
do: collect_attachment(ref, "", timeout)
defp collect_attachment(ref, buffer, timeout) do
receive do
%StreamChunk{ref: ^ref, type: :attachment, data: data} ->
collect_attachment(ref, buffer <> data, timeout)
%StreamEnd{ref: ^ref, type: :attachment} ->
{:ok, buffer}
after timeout ->
cancel(ref)
{:error, :timeout}
end
end
end
|
lib/icouch/stream.ex
| 0.748168
| 0.405449
|
stream.ex
|
starcoder
|
defmodule RowBinary do
@moduledoc """
`RowBinary` format encoding for ClickHouse.
`RowBinary` is a binary format used to ingest data into ClickHouse efficiently. See https://clickhouse.yandex/docs/en/interfaces/formats/#rowbinary .
You can either use `RowBinary.encode/2` to manually encode a field, or implement the `RowBinary.RowBinaryEncoding` protocol for your struct.
## Examples
iex> RowBinary.encode(17, [:int8])
<<17>>
"""
use Bitwise
defprotocol RowBinaryEncoding do
@moduledoc ~S"""
The `RowBinary.RowBinaryEncoding` protocol can be used to implement customer RowBinary encoders for structs.
"""
@doc """
Converts any struct into RowBinary format.
"""
@spec encode(any) :: binary
def encode(struct)
end
@doc """
Encodes a single value into a binary in *RowBinary* format.
You must provide the `value` you want to convert and a type definition in `types`.
`types` must be a list, containing the type definitions including modifiers in order. For example,
a `Array(Nullable(Uint8))` type would look like `[:array, :nullable, :uint8]`.
Some types require an additional argument. For example, the `FixedString` type requires the byte size:
A `Nullable(FixedString(16))` would look like this: `[:nullable, :fixedstring, 16]`.
## Examples
iex> RowBinary.encode(17, [:int8])
<<17>>
iex> RowBinary.encode("hello", [:enum8, %{"hello" => 0, "world" => 1}])
<<0>>
iex> RowBinary.encode(["foo", nil, "barbazbarbaz"], [:array, :nullable, :fixedstring, 8])
<<3, 0, 102, 111, 111, 0, 0, 0, 0, 0, 1, 0, 98, 97, 114, 98, 97, 122, 98, 97>>
iex> RowBinary.encode(1337, [:int8]) # 1137 is out of range for 8 bit integers
** (ArgumentError) value=1337 with wrong types=[:int8]
## Supported types
* `:int8`: Signed 8 bit `integer`
* `:int16`: Signed 16 bit `integer`
* `:int32`: Signed 32 bit `integer`
* `:int64`: Signed 64 bit `integer`
* `:uint8`: Unsigned 8 bit `integer`
* `:uint16`: Unsigned 16 bit `integer`
* `:uint32`: Unsigned 32 bit `integer`
* `:uint64`: Unsigned 64 bit `integer`
* `:float32`: 32 bit `float`
* `:float64`: 64 bit `float`
* `:date`: Elixir `Date` type
* `:datetime`: Elixir `DateTime` type
* `:string`: Elixir `binary`
* `:fixedstring`: Elixir binary. Needs `byte size` as parameter
* `:ipv4`: IPv4 tuple in the format `{_,_,_,_}` as returned by `:inet.parse_ipv4_address/1`
* `:ipv6`: IPv6 tuple in the format `{_,_,_,_,_,_,_,_}` as returned by `:inet.parse_ipv6_address/1`
* `:uuid`: UUID value as binary, as returned by `UUID.uuid4/1`
* `:enum8`: A 8 bit enum type. Needs a mapping as parameter
* `:enum16`: A 16 bit enum type. Needs a mapping as parameter
* `:nullable`: Marks a subsequent type as `Nullable` and accepts `nil` as value
* `:array`: Defines an subsequent type as `Array(T)`. Need a list as value
Currently `Decimal` and `AggregateFunction` types are not implemented.
`Nested` types also don't have a specific type, but can be handled manually, via `:array` types. See https://clickhouse.yandex/docs/en/data_types/nested_data_structures/nested/ .
Enums (`:enum8` and `:enum16`) require a mapping as parameter. You should use a `Map` structure as a translation table.
The following example will map the `"hello"` string to the value `0` and encode it as an 8 bit integer:
iex> RowBinary.encode("hello", [:enum8, %{"hello" => 0, "world" => 1}])
<<0>>
Note that currently not all type combinations are checked correctly. Things like `Nullable(Array(Int8))` (`[:nullable, :array, :int8]`) are not allowed in Clickhouse.
The function returns a binary that is in RowBinary format. If errros are encountered (e.g. integer overflows, UUID parsing, Date or DateTime overflows, ...) an `ArgumentError` exception is raised.
"""
def encode(value, types) when is_list(types) do
RowBinary.Encode.encode(value, types)
end
end
|
lib/row_binary.ex
| 0.911059
| 0.684956
|
row_binary.ex
|
starcoder
|
defmodule ExWire.Struct.WarpQueue do
@moduledoc """
`WarpQueue` maintains the current state of an active warp, this mean we will
track the `block_chunk` hashes and `state_chunk` hashes given to us, so we
can request each from our connected peers. This structure is also persisted
during a warp sync, so that if interrupted, we can resume a warp where we
left off.
TODO: This will likely need to be updated to handle warping from more than
one direct peer.
"""
require Logger
alias Blockchain.{Block, Blocktree}
alias Exth.Time
alias ExWire.Packet.Capability.Par.SnapshotManifest
alias MerklePatriciaTree.Trie
@type t :: %__MODULE__{
manifest: SnapshotManifest.manifest() | nil,
manifest_hashes: MapSet.t(EVM.hash()),
manifest_block_hashes: MapSet.t(EVM.hash()),
manifest_state_hashes: MapSet.t(EVM.hash()),
chunk_requests: MapSet.t(EVM.hash()),
retrieved_chunks: MapSet.t(EVM.hash()),
processed_chunks: MapSet.t(EVM.hash()),
processed_blocks: MapSet.t(integer()),
processed_accounts: integer(),
warp_start: Time.time(),
block_tree: Blocktree.t(),
state_root: EVM.hash()
}
defstruct [
:manifest,
:manifest_hashes,
:manifest_block_hashes,
:manifest_state_hashes,
:chunk_requests,
:retrieved_chunks,
:processed_chunks,
:processed_blocks,
:processed_accounts,
:warp_start,
:block_tree,
:state_root
]
@empty_trie Trie.empty_trie_root_hash()
@doc """
Creates a new `WarpQueue`.
"""
def new() do
%__MODULE__{
manifest: nil,
manifest_hashes: MapSet.new(),
manifest_block_hashes: MapSet.new(),
manifest_state_hashes: MapSet.new(),
chunk_requests: MapSet.new(),
retrieved_chunks: MapSet.new(),
processed_chunks: MapSet.new(),
processed_blocks: MapSet.new(),
processed_accounts: 0,
warp_start: Time.time_start(),
block_tree: Blocktree.new_tree(),
state_root: @empty_trie
}
end
@doc """
Handle receiving a new manifest from a peer. The current behaviour is to
ignore all but the first received manifest, but later on, we may add matching
manifests to track similar peers.
"""
@spec new_manifest(t(), SnapshotManifest.manifest()) :: t()
def new_manifest(warp_queue, manifest) do
if warp_queue.manifest do
# Right now, ignore new manifests
warp_queue
else
manifest_block_hashes = MapSet.new(manifest.block_hashes)
manifest_state_hashes = MapSet.new(manifest.state_hashes)
manifest_hashes = MapSet.union(manifest_block_hashes, manifest_state_hashes)
%{
warp_queue
| manifest: manifest,
manifest_hashes: manifest_hashes,
manifest_block_hashes: manifest_block_hashes,
manifest_state_hashes: manifest_state_hashes
}
end
end
@doc """
When we receive a new block chunk, we want to remove it from requests
and add it to our processing queue.
"""
@spec new_block_chunk(t(), EVM.hash()) :: t()
def new_block_chunk(warp_queue, chunk_hash) do
updated_chunk_requests = MapSet.delete(warp_queue.chunk_requests, chunk_hash)
updated_retrieved_chunks = MapSet.put(warp_queue.retrieved_chunks, chunk_hash)
%{
warp_queue
| chunk_requests: updated_chunk_requests,
retrieved_chunks: updated_retrieved_chunks
}
end
@doc """
When we receive a new state chunk, we simply add it to our queue, which we'll
later process.
"""
@spec new_state_chunk(t(), EVM.hash()) :: t()
def new_state_chunk(warp_queue, chunk_hash) do
updated_chunk_requests = MapSet.delete(warp_queue.chunk_requests, chunk_hash)
updated_retrieved_chunks = MapSet.put(warp_queue.retrieved_chunks, chunk_hash)
%{
warp_queue
| chunk_requests: updated_chunk_requests,
retrieved_chunks: updated_retrieved_chunks
}
end
@spec get_hashes_to_request(t(), number(), number()) :: {t(), list(EVM.hash())}
def get_hashes_to_request(
warp_queue = %__MODULE__{
chunk_requests: chunk_requests,
retrieved_chunks: retrieved_chunks,
processed_chunks: processed_chunks
},
request_limit,
queue_limit
) do
queued_count =
Enum.count(
MapSet.difference(
retrieved_chunks,
processed_chunks
)
)
allowed_by_parallelism = request_limit - MapSet.size(chunk_requests)
allowed_by_queue = queue_limit - queued_count
desired_requests = min(allowed_by_parallelism, allowed_by_queue)
if desired_requests > 0 do
unfetched_block_hashes =
warp_queue.manifest_block_hashes
|> MapSet.difference(chunk_requests)
|> MapSet.difference(retrieved_chunks)
|> MapSet.difference(processed_chunks)
|> MapSet.to_list()
unfetched_state_hashes =
warp_queue.manifest_state_hashes
|> MapSet.difference(chunk_requests)
|> MapSet.difference(retrieved_chunks)
|> MapSet.difference(processed_chunks)
|> MapSet.to_list()
total_unfetched_hashes = unfetched_block_hashes ++ unfetched_state_hashes
total_unfetched_count = Enum.count(total_unfetched_hashes)
if min(total_unfetched_count, desired_requests) > 0 do
hashes_to_request = Enum.take(total_unfetched_hashes, desired_requests)
:ok =
Logger.debug(fn ->
"[Warp] Retreiving #{Enum.count(hashes_to_request)} of #{total_unfetched_count} hash(es) needed."
end)
new_chunk_requests =
MapSet.union(
chunk_requests,
MapSet.new(hashes_to_request)
)
{
%{warp_queue | chunk_requests: new_chunk_requests},
hashes_to_request
}
else
{warp_queue, []}
end
else
{warp_queue, []}
end
end
@spec status(t()) :: {:pending, atom()} | :success | {:failure, atom()}
def status(warp_queue) do
cond do
is_nil(warp_queue.manifest) ->
{:pending, :no_manifest}
MapSet.size(warp_queue.chunk_requests) > 0 ->
{:pending, :awaiting_requests}
!MapSet.equal?(warp_queue.manifest_hashes, warp_queue.processed_chunks) ->
{:pending, :awaiting_processing}
is_nil(warp_queue.block_tree.best_block) ->
{:failure, :missing_best_block}
warp_queue.block_tree.best_block.header.number != warp_queue.manifest.block_number ->
:pending
warp_queue.block_tree.best_block.block_hash != warp_queue.manifest.block_hash ->
:ok =
Logger.error(fn ->
"[Warp] Mismatched block hash: expected: #{
Exth.encode_hex(warp_queue.manifest.block_hash)
}, got: #{Exth.encode_hex(warp_queue.block_tree.best_block.block_hash)}"
end)
{:failure, :mismatched_block_hash}
warp_queue.state_root != warp_queue.manifest.state_root ->
:ok =
Logger.error(fn ->
"[Warp] Mismatched state root: expected: #{
Exth.encode_hex(warp_queue.manifest.state_root)
}, got: #{Exth.encode_hex(warp_queue.state_root)}"
end)
{:failure, :mismatched_state_root}
true ->
:success
end
end
@spec processed_state_chunk(t(), EVM.hash(), integer(), EVM.hash()) :: t()
def processed_state_chunk(warp_queue, chunk_hash, processed_accounts, state_root) do
next_processed_accounts = warp_queue.processed_accounts + processed_accounts
# Show some stats for debugging
:ok =
Logger.debug(fn ->
"[Warp] Completed: #{next_processed_accounts} account(s) in #{
Time.elapsed(warp_queue.warp_start, :second)
} at #{Time.rate(next_processed_accounts, warp_queue.warp_start, "accts", :second)} with new state root #{
Exth.encode_hex(state_root)
}"
end)
%{
warp_queue
| processed_accounts: next_processed_accounts,
processed_chunks: MapSet.put(warp_queue.processed_chunks, chunk_hash),
state_root: state_root
}
end
@spec processed_block_chunk(
t(),
EVM.hash(),
Block.t(),
list(integer())
) :: t()
def processed_block_chunk(warp_queue, chunk_hash, block, processed_blocks) do
next_processed_blocks =
MapSet.union(
warp_queue.processed_blocks,
MapSet.new(processed_blocks)
)
next_block_tree = Blocktree.update_best_block(warp_queue.block_tree, block)
# Show some stats for debugging
list =
next_processed_blocks
|> MapSet.to_list()
|> Enum.sort()
min = List.first(list)
{max, missing} =
Enum.reduce(Enum.drop(list, 1), {min, 0}, fn el, {last, count} ->
{el, count + el - last - 1}
end)
:ok =
Logger.debug(fn ->
"[Warp] Completed: #{min}..#{max} with #{missing} missing block(s) in #{
Time.elapsed(warp_queue.warp_start, :second)
} at #{Time.rate(Enum.count(list), warp_queue.warp_start, "blks", :second)}"
end)
%{
warp_queue
| processed_blocks: next_processed_blocks,
processed_chunks: MapSet.put(warp_queue.processed_chunks, chunk_hash),
block_tree: next_block_tree
}
end
end
|
apps/ex_wire/lib/ex_wire/struct/warp_queue.ex
| 0.756088
| 0.509886
|
warp_queue.ex
|
starcoder
|
defmodule AmqpOne.TypeManager.Type, do: defstruct [:name, :class, :label,
:doc, :fields, :choices, :descriptor, :source, provides: [], encodings: []]
defmodule AmqpOne.TypeManager.Encoding, do: defstruct [:name, :code, :category, :label, :width]
defmodule AmqpOne.TypeManager.Descriptor, do: defstruct [:name, :code]
defmodule AmqpOne.TypeManager.Field, do:
defstruct [:name, :type, :default, :label, mandatory: false,
multiple: false, requires: []]
defmodule AmqpOne.TypeManager.Choice, do: defstruct [:name, :value, :doc]
defmodule AmqpOne.TypeManager do
@moduledoc """
Manages type specifications.
You can add types and query types. All predefined types
are handled directly, user definied types are stored in
an ETS table.
"""
alias AmqpOne.TypeManager.XML
require AmqpOne.TypeManager.XML
alias AmqpOne.TypeManager.{Type, Field, Descriptor, Encoding, Choice}
require Logger
@type class_t :: :primitive | :composite | :restricted | :union
@type category_t :: :fixed | :variable | :compound | :array
defstruct [:type_store, :struct_store]
@type_ets_name AmqpOne.TypeManager.Types
@struct_ets_name AmqpOne.TypeManager.Structs
@doc """
The type specification for the standard types as specified in the xml spec
of the AMQP 1.0 standard.
"""
@spec type_spec(String.t) :: Type.t | nil
# Generate the primitives types
XML.typespec AmqpOne.TypeManager.XML.xml_spec()
# this must always be the last resort: We do not find a type and return nil
def type_spec(%{__struct__: name}) do
case :ets.lookup(@type_ets_name, name) do
[] -> nil
[{^name, type}] -> type
end
end
def type_spec(name) do
case :ets.lookup(@type_ets_name, name) do
[] -> nil
[{^name, type}] -> type
end
end
@doc """
Identifies the struct for the (complex) type, if it is registered
with the type.
"""
@spec struct_for_type(Type.t) :: %{__struct__: atom} | nil
def struct_for_type(type) do
case :ets.lookup(@struct_ets_name, type) do
[] -> nil
[{^type, struct}] -> struct
end
end
def start_link() do
ret_val = Agent.start_link(fn() ->
# use ETS default options: set, key on position 1, protected
%__MODULE__{
type_store: :ets.new(@type_ets_name, [:named_table]),
struct_store: :ets.new(@struct_ets_name, [:named_table])
} end,
name: __MODULE__)
add_frame_types()
ret_val
end
def stop() do
Agent.stop(__MODULE__, :normal)
end
@doc """
Adds a type with an explicit name. For structs, the struct and the type
are also registered in the struct store.
"""
def add_type(%{__struct__: name} = s, %Type{} = t) do
#Logger.info "add struct #{name} = #{inspect s} for type #{t.name}"
Agent.get(__MODULE__, fn(%__MODULE__{type_store: ts, struct_store: ss}) ->
true = :ets.insert(ts, {name, t})
true = :ets.insert(ss, {t, s})
end)
end
def add_type(name, %Type{} = t) do
#Logger.info "add with #{name} the type #{t.name}"
Agent.get(__MODULE__, fn(%__MODULE__{type_store: ts}) ->
true = :ets.insert(ts, {name, t})
end)
end
@doc """
Adds a type with the name(s) implicitely given in the type specification
"""
def add_type(%Type{} = t) do
Agent.get(__MODULE__, fn(%__MODULE__{type_store: ts}) ->
get_names(t)
|> Enum.each(fn name -> true = :ets.insert(ts, {name, t}) end)
add_provides_types(t, ts)
end)
end
defp get_names(%Type{} = t) do
case t.descriptor do
nil -> [t.name]
[%Descriptor{name: n, code: c}] -> [n, c]
end
end
defp add_provides_types(%Type{provides: provides} = t, type_store) do
provides
|> Enum.each(fn(p) ->
case type_spec(p) do
nil -> true = :ets.insert(type_store, {p, [t]})
l when is_list(l) -> true = :ets.insert(type_store, {p, [t | l]})
%Type{} = t1 -> Logger.error "Type #{p} is already present ignoring"
end
end)
end
@doc """
Adds all required frame type to the type manager. This function
must be called during startup of the type manager.
"""
def add_frame_types() do
AmqpOne.Transport.Frame.init()
end
end
|
lib/type.ex
| 0.533884
| 0.519521
|
type.ex
|
starcoder
|
defmodule AstroEx.Unit.DMS do
@moduledoc """
Degrees Minutes Seconds
"""
alias AstroEx.Unit.{Arcmin, Arcsec, Degrees, HMS, Radian}
alias AstroEx.Utils.Math
@enforce_keys [:value]
defstruct [:value]
@typep degrees :: -360..360
@typep minutes :: 0..59
@typep seconds :: number()
@typep dms :: {degrees(), minutes(), seconds()}
@type t :: %__MODULE__{value: dms()}
@doc """
## Examples
iex> AstroEx.Unit.DMS.new("03:00:00.0")
#AstroEx.Unit.DMS<03:00:00.0>
"""
def new(str) when is_binary(str),
do: str |> parse_dms() |> new()
def new({deg, min, sec} = dms)
when is_integer(deg) and is_integer(min) and is_number(sec),
do: %__MODULE__{value: dms}
def new(deg, min, sec),
do: new({deg, min, sec})
@doc """
## Examples
iex> "03:00:00.0"
...> |> AstroEx.Unit.DMS.new()
...> |> AstroEx.Unit.DMS.to_degrees()
#AstroEx.Unit.Degrees<3.0>
iex> "03:00:00.0"
...> |> AstroEx.Unit.DMS.new()
...> |> AstroEx.Unit.cast(AstroEx.Unit.Degrees)
#AstroEx.Unit.Degrees<3.0>
"""
def to_degrees(%__MODULE__{value: value}),
do: value |> convert_to_degrees() |> Degrees.new()
@doc """
## Examples
iex> "03:00:00.0"
...> |> AstroEx.Unit.DMS.new()
...> |> AstroEx.Unit.DMS.to_radian()
#AstroEx.Unit.Radian<0.05236>
iex> "03:00:00.0"
...> |> AstroEx.Unit.DMS.new()
...> |> AstroEx.Unit.cast(AstroEx.Unit.Radian)
#AstroEx.Unit.Radian<0.05236>
"""
def to_radian(%__MODULE__{} = value),
do: value |> to_degrees() |> Degrees.to_radian()
@doc """
## Examples
iex> "03:00:00.0"
...> |> AstroEx.Unit.DMS.new()
...> |> AstroEx.Unit.DMS.to_arcsec()
#AstroEx.Unit.Arcsec<10800.0>
iex> "03:00:00.0"
...> |> AstroEx.Unit.DMS.new()
...> |> AstroEx.Unit.cast(AstroEx.Unit.Arcsec)
#AstroEx.Unit.Arcsec<10800.0>
"""
def to_arcsec(%__MODULE__{} = value),
do: value |> to_degrees() |> Arcsec.from_degrees()
@doc """
## Examples
iex> "03:00:00.0"
...> |> AstroEx.Unit.DMS.new()
...> |> AstroEx.Unit.DMS.to_arcmin()
#AstroEx.Unit.Arcmin<180.0>
iex> "03:00:00.0"
...> |> AstroEx.Unit.DMS.new()
...> |> AstroEx.Unit.cast(AstroEx.Unit.Arcmin)
#AstroEx.Unit.Arcmin<180.0>
"""
def to_arcmin(%__MODULE__{} = value),
do: value |> to_degrees() |> Arcmin.from_degrees()
@doc """
## Examples
iex> "03:00:00.0"
...> |> AstroEx.Unit.DMS.new()
...> |> AstroEx.Unit.DMS.to_hms()
#AstroEx.Unit.HMS<00:12:00.0>
iex> "03:00:00.0"
...> |> AstroEx.Unit.DMS.new()
...> |> AstroEx.Unit.cast(AstroEx.Unit.HMS)
#AstroEx.Unit.HMS<00:12:00.0>
"""
def to_hms(%__MODULE__{} = value),
do: value |> to_degrees() |> HMS.from_degrees()
@doc """
## Examples
iex> 180
...> |> AstroEx.Unit.Degrees.new()
...> |> AstroEx.Unit.DMS.from_degrees()
#AstroEx.Unit.DMS<180:00:00.0>
"""
def from_degrees(%Degrees{value: value}), do: from_degrees(value)
def from_degrees(val) when is_integer(val), do: from_degrees(val * 1.0)
def from_degrees(val) when is_float(val), do: val |> calculate_dms() |> new()
defp parse_dms(str) do
[deg, min, sec] = String.split(str, ":")
deg = String.to_integer(deg)
min = String.to_integer(min)
sec = String.to_float(sec)
{deg, min, sec}
end
defp calculate_dms(degrees) do
degrees
|> calculate_deg()
|> calculate_min()
|> calculate_sec()
end
defp calculate_deg(val) when val < 0,
do: val |> abs |> calculate_deg() |> invert_deg()
defp calculate_deg(val), do: Math.divmod(val, 1)
defp calculate_min({deg, remdr}) do
remdr
|> (&(&1 * 60)).()
|> Math.divmod(1)
|> Tuple.insert_at(0, deg)
end
defp calculate_sec({deg, min, remdr}),
do: {deg, min, remdr * 60.0}
defp invert_deg({deg, remdr}), do: {deg * -1.0, remdr}
defp convert_to_degrees(%__MODULE__{value: value}),
do: convert_to_degrees(value)
defp convert_to_degrees({deg, _, _} = dms) when deg < 0,
do: dms |> do_convert_to_degrees() |> (&(&1 * -1)).() |> (&(deg + &1)).()
defp convert_to_degrees({deg, _, _} = dms),
do: dms |> do_convert_to_degrees() |> (&(deg + &1)).()
defp do_convert_to_degrees({_, min, sec}) do
[min, sec / 60.0]
|> Enum.sum()
|> (&(&1 / 60.0)).()
end
defimpl AstroEx.Unit, for: __MODULE__ do
alias AstroEx.Unit.{Arcmin, Arcsec, Degrees, DMS, HMS, Radian}
def cast(%DMS{} = dms, Arcmin), do: DMS.to_arcmin(dms)
def cast(%DMS{} = dms, Arcsec), do: DMS.to_arcsec(dms)
def cast(%DMS{} = dms, Degrees), do: DMS.to_degrees(dms)
def cast(%DMS{} = dms, DMS), do: dms
def cast(%DMS{} = dms, HMS), do: DMS.to_hms(dms)
def cast(%DMS{} = dms, Radian), do: DMS.to_radian(dms)
def cast(%{value: value}, Float), do: value
def cast(%{value: value}, Integer), do: trunc(value)
def to_string(%{value: val}), do: val |> format_values() |> to_s()
def from_degrees(val), do: DMS.from_degrees(val)
defp to_s({deg, min, sec}), do: "#{deg}:#{min}:#{sec}"
defp format_values({deg, min, sec}),
do: {pad(trunc(deg)), pad(trunc(min)), pad(sec)}
defp pad(val) when is_float(val) and val < 10,
do: val |> Float.round(1) |> Float.to_string() |> pad(4)
defp pad(val) when is_integer(val) and val < 10,
do: val |> Integer.to_string() |> pad()
defp pad(val) when is_integer(val),
do: val |> Integer.to_string()
defp pad(val) when is_float(val),
do: val |> Float.round(1) |> Float.to_string()
defp pad(val, n \\ 2) when is_binary(val),
do: String.pad_leading(val, n, "0")
end
defimpl Inspect, for: __MODULE__ do
alias AstroEx.Unit
import Inspect.Algebra
def inspect(value, _opts) do
value = Unit.to_string(value)
concat(["#AstroEx.Unit.DMS<", value, ">"])
end
end
end
|
lib/astro_ex/unit/dms.ex
| 0.817975
| 0.557243
|
dms.ex
|
starcoder
|
defmodule Tyyppi.T do
@moduledoc """
Raw type wrapper. All the macros exported by that module are available in `Tyyppi`.
Require and use `Tyyppi` instead.
"""
use Boundary, deps: [Tyyppi]
alias Tyyppi.{Stats, T}
require Logger
@doc false
defguardp is_params(params) when is_list(params) or is_atom(params)
@typep kind :: :type | :remote_type | :user_type | :ann_type | :atom | :var
@typep ast_lead :: :->
@typep visibility :: :typep | :type | :opaque | :built_in
@typep simple ::
nil
| :a_function
| :a_set
| :abstract_expr
| :af_atom
| :af_clause
| :af_lit_atom
| :af_variable
| :any
| :atom
| :binary
| :boolean
| :byte
| :check_schedulers
| :deflated
| :des3_cbc
| :filename
| :filename_all
| :fun
| :idn
| :input
| :integer
| :iovec
| :iter
| :iterator
| :list
| :map
| :maybe_improper_list
| :module
| :non_neg_integer
| :nonempty_list
| :nonempty_string
| :orddict
| :pid
| :pos_integer
| :queue
| :range
| :receive
| :record
| :reference
| :relation
| :set
| :string
| :term
| :timeout
| :tree
| :tuple
| :union
@type ast :: Macro.t() | {module(), atom(), list() | nil | non_neg_integer()}
@type raw :: {kind() | ast_lead(), non_neg_integer() | keyword(), simple() | [ast()], [raw()]}
@typedoc """
The type information in a human-readable format.
For remote types, it’s gathered from
[`Code.Typespec`](https://github.com/elixir-lang/elixir/blob/master/lib/elixir/lib/code/typespec.ex#L1),
for built-in like `atom()` it’s simply constructed on the fly.
"""
@type t(wrapped) :: %__MODULE__{
type: visibility(),
module: module(),
name: atom(),
params: [atom()],
source: binary() | nil,
definition: raw() | nil,
quoted: wrapped
}
defstruct ~w|type module name params source definition quoted|a
@spec loaded?(type :: T.t(wrapped)) :: boolean() when wrapped: term()
@doc "Returns `true` if the type definition was loaded, `false` otherwise."
def loaded?(%T{definition: nil}), do: false
def loaded?(%T{}), do: true
@spec parse_quoted({:| | {:., keyword(), list()} | atom(), keyword(), list() | nil}) ::
Tyyppi.T.t(wrapped)
when wrapped: term()
@doc false
def parse_quoted({:|, _, [_, _]} = union) do
union
|> union()
|> parse_definition()
|> Stats.type()
end
def parse_quoted({{:., _, [{:__aliases__, _, aliases}, fun]}, _, params})
when is_params(params) do
params = params |> normalize_params() |> length()
Stats.type({Module.concat(aliases), fun, params})
end
def parse_quoted({{:., _, [module, fun]}, _, params}) when is_params(params) do
params = params |> normalize_params() |> length()
Stats.type({module, fun, params})
end
def parse_quoted({fun, _, params}) when is_atom(fun) and fun != :{} and is_params(params),
do: Stats.type({:type, 0, fun, param_names(params)})
def parse_quoted(any) do
Logger.debug("[🚰 T.parse_quoted/1]: " <> inspect(any))
any
|> parse_definition()
|> Stats.type()
end
@doc false
def parse_definition(atom) when is_atom(atom), do: {:atom, 0, atom}
def parse_definition(list) when is_list(list),
do: {:type, 0, :union, Enum.map(list, &parse_definition/1)}
def parse_definition(tuple) when is_tuple(tuple) do
case Macro.decompose_call(tuple) do
:error -> {:type, 0, :tuple, tuple |> decompose_tuple() |> Enum.map(&parse_definition/1)}
{:{}, list} when is_list(list) -> {:type, 0, :tuple, Enum.map(list, &parse_definition/1)}
_ -> parse_quoted(tuple).definition
end
end
defp decompose_tuple({:{}, _, list}) when is_list(list), do: list
defp decompose_tuple(tuple), do: Tuple.to_list(tuple)
@doc false
def union(ast, acc \\ [])
def union({:|, _, [t1, t2]}, acc), do: union(t2, [t1 | acc])
def union(t, acc), do: Enum.reverse([t | acc])
@doc false
@spec normalize_params([raw()] | any()) :: [raw()]
def normalize_params(params) when is_list(params), do: params
def normalize_params(_params), do: []
@doc false
@spec param_names([raw()] | any()) :: [atom()]
def param_names(params) when is_list(params) do
params
|> Enum.reduce([], fn kv, acc ->
case kv do
{k, _} -> [k | acc]
{k, _, _} -> [k | acc]
_ -> acc
end
end)
|> Enum.reverse()
end
def param_names(_), do: []
# FIXME
@doc false
def collectable?(%Tyyppi.T{
definition:
{:type, _, :map,
[{:type, _, :map_field_exact, [{:atom, _, :__struct__}, {:atom, _, _struct}]} | _]}
}),
do: false
def collectable?(any), do: not is_nil(Collectable.impl_for(any))
@doc false
def enumerable?(%Tyyppi.T{
definition:
{:type, _, :map,
[{:type, _, :map_field_exact, [{:atom, _, :__struct__}, {:atom, _, _struct}]} | _]}
}),
do: false
def enumerable?(any), do: not is_nil(Enumerable.impl_for(any))
defimpl String.Chars do
@moduledoc false
use Boundary, classify_to: Tyyppi.T
defp stringify([]), do: ~s|[]|
defp stringify({:atom, _, atom}) when atom in [nil, false, true], do: ~s|#{atom}|
defp stringify({:atom, _, atom}), do: ~s|:#{atom}|
defp stringify({:var, _, name}), do: ~s|_#{name}|
defp stringify({:type, _, type}), do: ~s|#{type}()|
defp stringify({:type, _, type, params}) do
params = Enum.map_join(params, ", ", &stringify/1)
~s|#{type}(#{params})|
end
defp stringify({:remote_type, _, type}) when is_list(type),
do: Enum.map_join(type, ", ", &stringify/1)
defp stringify({:remote_type, _, type, params}) do
params = Enum.map_join(params, ", ", &stringify/1)
~s|#{type}(#{params})|
end
defp stringify(any), do: inspect(any)
def to_string(%T{module: nil, name: nil, definition: {:type, _, _type, _params} = type}),
do: stringify(type)
def to_string(%T{module: module, name: name, params: params}),
do: stringify({:type, 0, "#{inspect(module)}.#{name}", params})
end
end
|
lib/tyyppi/t.ex
| 0.815269
| 0.661376
|
t.ex
|
starcoder
|
defmodule McProtocol.Handler do
@type t :: module
@moduledoc """
Basic component for the connection state machine.
This behaviour is one of the two components that makes McProtocol flexible,
the other one being the Orchestrator. To interact directly with the protocol
on the standard acceptor, you need to implement this behavior.
## Interaction
A handler has two ways to interact with the connection it's associated with,
synchronous, and asynchronous.
Synchronous interaction is named a transition (it transitions the connection
state machine into a new state). Transitions can do things like set protocol
encryption, send packets, raw data, or transition to a new protocol state. It
allows you to control the precise order of operations.
(Most of this is not done yet) Asynchronous interaction is done by messaging.
Any process can interact with any connection, as long as it has the pid and
state cookie. Because the exact order of operations can not be controlled,
things like setting encryption or compression is not possible.
"""
@type protocol_direction :: :Client | :Server
@type protocol_mode :: :Handshake | :Status | :Login | :Play
@typedoc """
The protocol play mode contains additional information on what state the
connection is in when the protocol_mode is :Play.
There are currently 3 defined modes:
* :init - When the connection just switched from the login mode.
* :reset - The connection has been reset, and is in a well defined state so
that another handler can take over.
* :in_world - The player is currently in a world, and you should expect to
receive movement packets and more from the client. Care should be taken to
handle things like movement packets when returning the connection to the :reset
play_mode.
When the connection play_mode is set to :reset, the connection is required to
be in the following state:
* Respawn or Join Game packet has just been sent. (as in not yet spawned in
world)
* Gamemode set is 0
* Dimension set is 0
* Difficulty set if 0
* Level type is "default"
* Reduced Debug Info is false
"""
@type play_mode :: :init | :reset | :in_world | nil
@typedoc """
A command to transition the connection state machine to a new state.
* set_encryption - Sets the encryption data for both reading and writing.
* send_packet - Encodes and sends the provided packet struct.
* send_data - Sends the provided raw data to the socket. DO NOT use.
* stash - Updates the stash of the socket. When using this, make sure you
are only updating things you are allowed to touch.
* handler_process - Tells the connection to monitor this process. If the
process stops, it will be handled as a handler crash.
* next - Tells the orchestrator that the handler is done with the connection.
The second element will be returned to the orchestrator as the handler return
value.
* close - There is nothing more that can be done on this connection, and
it should be closed. Examples of this are when the player has been kicked or
when the status exchange has been completed.
"""
@type transition ::{:set_encryption, %McProtocol.Crypto.Transport.CryptData{}}
| {:set_compression, non_neg_integer}
| {:send_packet, McProtocol.Packet.t}
| {:send_data, iodata}
| {:stash, Stash.t}
| {:handler_process, pid}
| {:next, return_value :: any}
| :close
@type transitions :: [transition]
@type handler :: module
@type handler_state :: term
@doc """
This callback is the first thing called when the handler is given control.
"""
@callback enter(args :: any, stash :: Stash.t) :: {transitions, handler_state}
@doc """
When a packet is received on the connection, this callback is called.
"""
@callback handle(packet :: McProtocol.Packet.In.t, stash :: Stash.t, state :: handler_state) :: {transitions, handler_state}
@doc """
This callback the absolute last thing called when control is taken away from
a handler. You are not able to influence the state of anything related to the
connection from here, and it should only be used to gracefully stop things
like related processes.
"""
@callback leave(stash :: Stash.t, state :: handler_state) :: nil
defmacro __using__(opts) do
quote do
@behaviour McProtocol.Handler
def leave(_stash, _handler_state), do: nil
defoverridable [leave: 2]
end
end
defmodule Stash do
@type t :: %__MODULE__{
direction: McProtocol.Handler.protocol_direction,
mode: McProtocol.Handler.protocol_mode,
play_mode: McProtocol.Handler.play_mode,
connection: %McProtocol.Acceptor.ProtocolState.Connection{},
identity: %{authed: boolean, name: String.t, uuid: McProtocol.UUID.t} | nil,
entity_id: non_neg_integer,
}
defstruct(
direction: nil,
mode: :Handshake,
play_mode: nil,
connection: nil,
# Stores player identity from the authentication protocol phase.
identity: nil,
# Because the entity id of a player can never change once it's set by the
# server, we need to keep track of this through the lifetime of the connection.
# Currently set statically to 0 for simplicity.
entity_id: 0,
)
end
end
|
lib/handler/handler.ex
| 0.86923
| 0.532729
|
handler.ex
|
starcoder
|
defmodule Project2 do
def main(args \\ []) do
{_, input, _} = OptionParser.parse(args)
numNodes = 0
if length(input) == 3 do
numNodes = String.to_integer(List.first(input))
if numNodes > 1 do
algorithm = List.last(input)
{topology, _} = List.pop_at(input, 1)
case algorithm do
"gossip" ->
IO.puts "Using Gossip algorithm"
actors = init_actors(numNodes)
init_algorithm(actors, topology, numNodes, algorithm)
"push-sum" ->
IO.puts "Using push-sum algorithm"
actors = init_actors_push_sum(numNodes)
init_algorithm(actors, topology, numNodes, algorithm)
_ ->
IO.puts "Invalid algorithm"
IO.puts "Enter gossip or push-sum"
end
end
else
IO.puts "Invalid input. Number of arguments should be 3"
IO.puts "Example: ./project2 30 2D gossip"
end
end
def init_actors(numNodes) do
middleNode = trunc(numNodes/2)
Enum.map(1..numNodes, fn x -> cond do
x == middleNode -> {:ok, actor} = Client.start_link("This is rumour")
true -> {:ok, actor} = Client.start_link("")
end
actor end)
end
def init_actors_push_sum(numNodes) do
middleNode = trunc(numNodes/2)
Enum.map(1..numNodes,
fn x ->
cond do
x == middleNode ->
x = Integer.to_string(x)
{x, _} = Float.parse(x)
#Client.start_link returns the pid of the process
{:ok, actor} = Client.start_link([x] ++ ["This is rumour"])
true ->
x = Integer.to_string(x)
{x, _} = Float.parse(x)
#Client.start_link returns the pid of the process
{:ok, actor} = Client.start_link([x] ++ [""])
end
actor
end)
end
def init_algorithm(actors, topology, numNodes, algorithm) do
:ets.new(:count, [:set, :public, :named_table])
:ets.insert(:count, {"spread", 0})
neighbors = %{}
case topology do
"full" ->
IO.puts "Using full topology"
neighbors = get_full_neighbors(actors)
"2D" ->
IO.puts "Using 2D topology"
neighbors = get_2d_neighbors(actors, topology)
"line" ->
IO.puts "Using line topology"
neighbors = get_line_neighbors(actors) # Gives map of host, neighbors
"imp2D" ->
IO.puts "Using imp2D topology"
neighbors = get_2d_neighbors(actors, topology)
_ ->
IO.puts "Invalid topology"
IO.puts "Enter full/2D/line/imp2D"
end
set_neighbors(neighbors)
prev = System.monotonic_time(:milliseconds)
if (algorithm == "gossip") do
gossip(actors, neighbors, numNodes)
else
push_sum(actors, neighbors, numNodes)
end
IO.puts "Time required: " <> to_string(System.monotonic_time(:milliseconds) - prev) <> " ms"
end
def gossip(actors, neighbors, numNodes) do
for {k, v} <- neighbors do
Client.send_message(k)
end
actors = check_actors_alive(actors)
[{_, spread}] = :ets.lookup(:count, "spread")
if ((spread/numNodes) < 0.9 && length(actors) > 1) do
neighbors = Enum.filter(neighbors, fn {k,_} -> Enum.member?(actors, k) end)
gossip(actors, neighbors, numNodes)
else
IO.puts "Spread: " <> to_string(spread * 100/numNodes) <> " %"
end
end
def push_sum(actors, neighbors, numNodes) do
for {k, v} <- neighbors do
Client.send_message_push_sum(k)
end
actors = check_actors_alive_push_sum(actors)
[{_, spread}] = :ets.lookup(:count, "spread")
if ((spread/numNodes) < 0.9 && length(actors) > 1) do
neighbors = Enum.filter(neighbors, fn ({k,_}) -> Enum.member?(actors, k) end)
[{_, spread}] = :ets.lookup(:count, "spread")
push_sum(actors, neighbors, numNodes)
else
IO.puts "Spread: " <> to_string(spread * 100/numNodes) <> " %"
end
end
def check_actors_alive(actors) do
current_actors = Enum.map(actors, fn x -> if (Process.alive?(x) && Client.get_count(x) < 10 && Client.has_neighbors(x)) do x end end)
List.delete(Enum.uniq(current_actors), nil)
end
def push_sum(actors, neighbors, numNodes) do
for {k, v} <- neighbors do
Client.send_message_push_sum(k)
end
actors = check_actors_alive_push_sum(actors)
[{_, spread}] = :ets.lookup(:count, "spread")
if ((spread/numNodes) < 0.9 && length(actors) > 1) do
neighbors = Enum.filter(neighbors, fn ({k,_}) -> Enum.member?(actors, k) end)
push_sum(actors, neighbors, numNodes)
end
end
def check_actors_alive_push_sum(actors) do
current_actors = Enum.map(actors,
fn x ->
diff = Client.get_diff(x)
if(Process.alive?(x) && Client.has_neighbors(x) && (abs(List.first(diff)) > :math.pow(10, -10)
|| abs(List.last(diff)) > :math.pow(10, -10))) do
x
end
end)
List.delete(Enum.uniq(current_actors), nil)
end
def get_full_neighbors(actors) do
Enum.reduce(actors, %{}, fn (x, acc) -> Map.put(acc, x, Enum.filter(actors, fn y -> y != x end)) end)
end
def get_line_neighbors(actors) do
# actors_with_index = %{pid1 => 0, pid2 => 1, pid3 => 2}
actors_with_index = Stream.with_index(actors, 0) |> Enum.reduce(%{}, fn({v,k}, acc) -> Map.put(acc, v, k) end)
first = Enum.at(actors,0)
lastIndex = length(actors) - 1
last = Enum.at(actors, lastIndex)
Enum.reduce(actors, %{}, fn (x, acc) -> {:ok, currentIndex} = Map.fetch(actors_with_index, x)
cond do
x == first -> Map.put(acc, x, [Enum.at(actors, 1)])
x == last -> Map.put(acc, x, [Enum.at(actors, lastIndex - 1)])
true -> Map.put(acc, x, [Enum.at(actors, currentIndex - 1), Enum.at(actors, currentIndex + 1)])
end end)
end
def get_2d_neighbors(actors, topology) do
actors_with_index = Stream.with_index(actors, 0) |> Enum.reduce(%{}, fn({v,k}, acc) -> Map.put(acc, k, v) end)
neighbors = %{}
numNodes = length(actors)
xMax = trunc(:math.ceil(:math.sqrt(numNodes)))
yMax = xMax
yMulti = yMax
xLimit = xMax - 1
yLimit = yMax - 1
final_neighbors = Enum.reduce(0..yLimit, %{}, fn(y, neighbors) ->
Enum.reduce(0..xLimit, neighbors, fn (x, neighbors) ->
i = y * yMulti + x
if (i < numNodes) do
adjacents = []
if (x > 0) do adjacents = Enum.into([i - 1], adjacents) end
if (x < xLimit && (i + 1) < numNodes) do adjacents = Enum.into([i+1], adjacents) end
if (y > 0) do adjacents = Enum.into([i - yMulti], adjacents) end
if (y < yLimit && (i + yMulti) < numNodes) do adjacents = Enum.into([i + yMulti], adjacents) end
{:ok, actor} = Map.fetch(actors_with_index, i)
# Add random neighbor
case topology do
"imp2D" -> adjacents = Enum.into(get_random_node_imp2D(adjacents, numNodes), adjacents) # :rand.uniform(n) gives random number: 1 <= x <= n
_ ->
end
neighbor_pids = Enum.map(adjacents, fn x ->
{:ok, n} = Map.fetch(actors_with_index, x)
n end)
Map.put(neighbors, actor, neighbor_pids)
else
Map.put(neighbors, "dummy", "dummy")
end
end)
end)
Map.delete(final_neighbors, "dummy")
end
def set_neighbors(neighbors) do
for {k, v} <- neighbors do
Client.set_neighbors(k, v)
end
end
def get_random_node_imp2D(neighbors, numNodes) do
random_node_index = :rand.uniform(numNodes) - 1
if(Enum.member?(neighbors, random_node_index)) do
get_random_node_imp2D(neighbors, numNodes)
end
[random_node_index]
end
def print_rumour_count(actors) do
Enum.each(actors, fn x -> IO.inspect x
IO.puts to_string(Client.get_rumour(x)) <> " Count: " <>to_string(Client.get_count(x))
end)
end
end
|
project2/lib/project2.ex
| 0.536799
| 0.553626
|
project2.ex
|
starcoder
|
defmodule Wavex.Chunk.BAE do
@moduledoc """
A BAE (Broadcast Audio Extension) chunk.
"""
alias Wavex.{
FourCC,
CString
}
@enforce_keys [
:size,
:description,
:originator,
:originator_reference,
:origination_date,
:origination_time,
:time_reference_low,
:time_reference_high,
:version
]
defstruct [
:size,
:description,
:originator,
:originator_reference,
:origination_date,
:origination_time,
:time_reference_low,
:time_reference_high,
:version,
:umid,
:loudness_value,
:loudness_range,
:max_true_peak_level,
:max_momentary_loudness,
:max_short_term_loudness
]
@type t :: %__MODULE__{
size: non_neg_integer,
description: binary,
originator: binary,
originator_reference: binary,
origination_date: Date.t(),
origination_time: Time.t(),
time_reference_low: non_neg_integer,
time_reference_high: non_neg_integer,
version: non_neg_integer,
umid: <<_::512>> | nil,
loudness_value: integer | nil,
loudness_range: integer | nil,
max_true_peak_level: integer | nil,
max_momentary_loudness: integer | nil,
max_short_term_loudness: integer | nil
}
@type date_binary :: <<_::80>>
@type time_binary :: <<_::64>>
@four_cc "bext"
@doc """
The ID that identifies a BAE chunk.
"""
@spec four_cc :: FourCC.t()
def four_cc, do: @four_cc
@spec date(date_binary) :: {:ok, Date.t()} | {:error, :unreadable_date, date_binary}
defp date(
<<
year_binary::binary-size(4),
_::8,
month_binary::binary-size(2),
_::8,
day_binary::binary-size(2)
>> = binary
) do
with {year, ""} <- Integer.parse(year_binary),
{month, ""} <- Integer.parse(month_binary),
{day, ""} <- Integer.parse(day_binary) do
Date.new(year, month, day)
else
_ -> {:error, {:unreadable_date, binary}}
end
end
@spec time(time_binary) :: {:ok, Time.t()} | {:error, :unreadable_time, time_binary}
defp time(
<<
hour_binary::binary-size(2),
_::8,
minute_binary::binary-size(2),
_::8,
second_binary::binary-size(2)
>> = binary
) do
with {hour, ""} <- Integer.parse(hour_binary),
{minute, ""} <- Integer.parse(minute_binary),
{second, ""} <- Integer.parse(second_binary) do
Time.new(hour, minute, second)
else
_ -> {:error, {:unreadable_time, %{actual: binary}}}
end
end
defp read_v0(%__MODULE__{size: size} = chunk, etc) do
skip_bytes = size - 348
with <<
_::binary-size(skip_bytes),
etc::binary
>> <- etc do
{:ok, chunk, etc}
else
binary when is_binary(binary) -> {:error, :unexpected_eof}
end
end
defp read_v1(%__MODULE__{size: size} = chunk, etc) do
skip_bytes = size - 412
with <<
# 356 - 419
umid::binary-size(64),
# 420 - ...
_::binary-size(skip_bytes),
etc::binary
>> <- etc do
{:ok, %__MODULE__{chunk | umid: umid}, etc}
else
binary when is_binary(binary) -> {:error, :unexpected_eof}
end
end
defp read_v2(%__MODULE__{size: size} = chunk, etc) do
skip_bytes = size - 422
with <<
# 356 - 419
umid::binary-size(64),
# 420 - 421
loudness_value::16-signed-little,
# 422 - 423
loudness_range::16-signed-little,
# 424 - 425
max_true_peak_level::16-signed-little,
# 426 - 427
max_momentary_loudness::16-signed-little,
# 428 - 429
max_short_term_loudness::16-signed-little,
# 430 - ...
_::binary-size(skip_bytes),
etc::binary
>> <- etc do
{:ok,
%__MODULE__{
chunk
| umid: umid,
loudness_value: loudness_value,
loudness_range: loudness_range,
max_true_peak_level: max_true_peak_level,
max_momentary_loudness: max_momentary_loudness,
max_short_term_loudness: max_short_term_loudness
}, etc}
else
binary when is_binary(binary) -> {:error, :unexpected_eof}
end
end
@doc ~S"""
Read a BAE chunk.
"""
@spec read(binary) ::
{:ok, t, binary}
| {:error,
:unexpected_eof
| {:unexpected_four_cc, %{actual: FourCC.t(), expected: FourCC.t()}}
| {:unreadable_date, date_binary}
| {:unreadable_time, time_binary}
| {:unsupported_bae_version, non_neg_integer}}
def read(binary) do
with <<
# 0 - 3
bext_id::binary-size(4),
# 4 - 7
size::32-little,
# 8 - 263
description::binary-size(256),
# 264 - 295
originator::binary-size(32),
# 296 - 327
originator_reference::binary-size(32),
# 328 - 337
date_binary::binary-size(10),
# 338 - 345
time_binary::binary-size(8),
# 346 - 349
time_reference_low::32-little,
# 350 - 353
time_reference_high::32-little,
# 354 - 355
version::16-little,
etc::binary
>> <- binary,
:ok <- FourCC.verify(bext_id, @four_cc),
{:ok, date} <- date(date_binary),
{:ok, time} <- time(time_binary),
chunk <- %__MODULE__{
size: size,
description: CString.read(description),
originator: CString.read(originator),
originator_reference: CString.read(originator_reference),
origination_date: date,
origination_time: time,
time_reference_low: time_reference_low,
time_reference_high: time_reference_high,
version: version
},
{:ok, _, _} = result <-
(case version do
0x00 -> read_v0(chunk, etc)
0x01 -> read_v1(chunk, etc)
0x02 -> read_v2(chunk, etc)
_ -> {:error, {:unsupported_bae_version, version}}
end) do
result
else
binary when is_binary(binary) -> {:error, :unexpected_eof}
error -> error
end
end
end
|
lib/wavex/chunk/bae.ex
| 0.763748
| 0.560674
|
bae.ex
|
starcoder
|
defmodule CoAP.Payload do
defstruct segments: [], multipart: false, data: <<>>, size: nil
alias CoAP.Block
def empty(), do: %__MODULE__{}
def add(%__MODULE__{segments: segments}, number, segment) do
%__MODULE__{
multipart: true,
segments: [{number, segment} | segments]
}
end
def to_binary(%__MODULE__{data: data, segments: []}), do: data
def to_binary(%__MODULE__{segments: segments, data: <<>>}) do
segments
|> Enum.reverse()
|> List.keysort(0)
|> Enum.uniq_by(&elem(&1, 0))
|> Enum.map(&elem(&1, 1))
|> Enum.join(<<>>)
end
@doc """
Extract the next segment of the payload's data given the
current offset and a requested size
Examples
iex> data = Enum.take(StreamData.binary(length: 1024), 1) |> hd()
iex> {_bytes, block, payload} = CoAP.Payload.segment_at(data, 256, 0)
iex> {block, payload.multipart, payload.size}
{%CoAP.Block{number: 0, more: true, size: 256}, true, 256}
iex> data = Enum.take(StreamData.binary(length: 1024), 1) |> hd()
iex> payload = %CoAP.Payload{data: data, size: 256}
iex> {_bytes, block, next_payload} = CoAP.Payload.segment_at(payload, 2)
iex> {block, next_payload.multipart, next_payload.size}
{%CoAP.Block{number: 2, more: true, size: 256}, true, 256}
iex> data = Enum.take(StreamData.binary(length: 1024), 1) |> hd()
iex> payload = %CoAP.Payload{data: data, size: 256}
iex> {_bytes, block, next_payload} = CoAP.Payload.segment_at(payload, 3)
iex> {block, next_payload.multipart, next_payload.size}
{%CoAP.Block{number: 3, more: false, size: 256}, false, 256}
iex> data = Enum.take(StreamData.binary(length: 1048), 1) |> hd()
iex> payload = %CoAP.Payload{data: data, size: 256}
iex> {bytes, block, _next_payload} = CoAP.Payload.segment_at(payload, 4)
iex> {block, byte_size(bytes)}
{%CoAP.Block{number: 4, more: false, size: 256}, 24}
iex> CoAP.Payload.segment_at(<<18, 5, 10, 3, 1, 2, 3>>, 512, 0)
{
<<18, 5, 10, 3, 1, 2, 3>>,
%CoAP.Block{number: 0, more: false, size: 512},
%CoAP.Payload{segments: [], multipart: false, data: <<18, 5, 10, 3, 1, 2, 3>>, size: 512}
}
"""
def segment_at(payload, number \\ nil)
def segment_at(%__MODULE__{data: <<>>, size: size}, _number),
do: {<<>>, Block.build({0, false, size}), %__MODULE__{}}
def segment_at(
%__MODULE__{data: data, size: size} = payload,
number
) do
offset = size * number
data_size = byte_size(data)
part_size = Enum.min([data_size - offset, size])
more = data_size > offset + part_size
data = data |> :binary.part(offset, part_size)
block = Block.build({number, more, size})
{data, block, %{payload | multipart: more}}
end
# This is the only time we can set size
def segment_at(data, size, number) when is_binary(data) do
%__MODULE__{data: data, size: size} |> segment_at(number)
end
end
|
lib/coap/payload.ex
| 0.848816
| 0.609379
|
payload.ex
|
starcoder
|
defprotocol Vow.Generatable do
@moduledoc """
Generatable protocol used by `Vow.gen/2` for generating data from
vows.
## Default Generators
There are a handful of default generators that are less than
optimal to use. The following all use relatively open-ended
generators with (potentially) restrictive filters, which have
a high chance to raise (see `StreamData.filter/3` for more details).
* `Vow.Also`
* `Vow.Amp`
* `Vow.Merge`
* `Function`
* `Regex`
* `Vow.Pat` (if `Expat` is installed)
`Vow.Ref` can create potentially recursive definitions (which is fine
for validating data), but can be potentially problematic for data
generation (as this does not use `StreamData.tree/2`).
If any of these vows are not overriden with explicit overrides in
`Vow.gen/2`, or using `Vow.with_gen/2`, then a warning for each of these
will be logged on that `Vow.gen/2` call (unless the `:ignore_warn?` option
is set to `true`).
"""
@fallback_to_any true
if Code.ensure_loaded?(StreamData) do
@type generator :: StreamData.t(term)
else
@type generator :: term
end
@type gen_fun :: (() -> generator)
@typedoc """
The options that can (optionally) be passed to a
`gen/2` call:
* ignore_warn? - whether or not generator warnings related to
'problematic' default generators should be logged (defaults to `false`)
"""
@type gen_opt :: {:ignore_warn?, boolean}
@type result :: {:ok, generator} | {:error, reason :: term}
@doc """
Given a `Vow.t`, returns either a generator or an error tuple
describing the problem that occurred while attempting to create
the generator.
The 'generator' in question will generate data that conforms to
the given vow.
"""
@spec gen(t, [gen_opt]) :: result
def gen(generatable, opts \\ [])
end
if Code.ensure_loaded?(StreamData) do
defimpl Vow.Generatable, for: StreamData do
@moduledoc false
@impl Vow.Generatable
def gen(stream_data, _opts) do
{:ok, stream_data}
end
end
defimpl Vow.Generatable, for: Function do
@moduledoc false
alias Vow.Utils
import StreamData
import StreamDataUtils
@impl Vow.Generatable
def gen(vow, opts) when is_function(vow, 1) do
if Map.has_key?(supported_functions(), vow) do
{:ok, Map.get(supported_functions(), vow)}
else
ignore_warn? = Keyword.get(opts, :ignore_warn?, false)
_ = Utils.no_override_warn(vow, ignore_warn?)
{:ok, filter(string(:printable), vow)}
end
end
def gen(vow, _opts) do
{:error, {:invalid_function_arity, vow}}
end
@spec supported_functions() :: %{optional((term -> boolean)) => StreamData.t(term)}
defp supported_functions do
# credo:disable-for-previous-line Credo.Check.Refactor.ABCSize
%{
&is_boolean/1 => boolean(),
&is_atom/1 => atom(:alphanumeric),
&is_binary/1 => binary(),
&is_bitstring/1 => bitstring(),
&is_float/1 => float(),
&is_integer/1 => integer(),
&is_number/1 => one_of([integer(), float()]),
&is_nil/1 => constant(nil),
&is_map/1 => map_of(simple(), simple()),
&is_list/1 => list_of(simple()),
&is_tuple/1 => tuple_of(simple())
}
end
end
defimpl Vow.Generatable, for: List do
@moduledoc false
@impl Vow.Generatable
def gen(vow, opts) do
vow
|> Enum.reduce({:ok, []}, fn
_, {:error, reason} ->
{:error, reason}
v, {:ok, acc} ->
case @protocol.gen(v, opts) do
{:error, reason} -> {:error, reason}
{:ok, data} -> {:ok, [data | acc]}
end
end)
|> case do
{:error, reason} -> {:error, reason}
{:ok, data} -> {:ok, StreamData.fixed_list(Enum.reverse(data))}
end
end
end
defimpl Vow.Generatable, for: Tuple do
@moduledoc false
@impl Vow.Generatable
def gen(vow, opts) do
vow
|> Tuple.to_list()
|> Enum.reduce({:ok, []}, &reducer(&1, &2, opts))
|> to_tuple()
end
@spec reducer(Vow.t(), {:ok, [Vow.t()]} | {:error, reason}, keyword) ::
{:ok, [Vow.t()]} | {:error, reason}
when reason: term
defp reducer(_, {:error, reason}, _opts) do
{:error, reason}
end
defp reducer(vow, {:ok, acc}, opts) do
case @protocol.gen(vow, opts) do
{:error, reason} -> {:error, reason}
{:ok, data} -> {:ok, [data | acc]}
end
end
@spec to_tuple({:ok, [Vow.t()]} | {:error, reason}) ::
{:ok, StreamData.t(tuple)} | {:error, reason}
when reason: term
defp to_tuple({:error, reason}) do
{:error, reason}
end
defp to_tuple({:ok, data}) do
tuple = List.to_tuple(Enum.reverse(data))
{:ok, StreamData.tuple(tuple)}
end
end
defimpl Vow.Generatable, for: Map do
@moduledoc false
@impl Vow.Generatable
def gen(vow, opts) do
vow
|> Enum.reduce({:ok, %{}}, fn
_, {:error, reason} ->
{:error, reason}
{k, v}, {:ok, acc} ->
case @protocol.gen(v, opts) do
{:error, reason} -> {:error, reason}
{:ok, data} -> {:ok, Map.put(acc, k, data)}
end
end)
|> case do
{:error, reason} -> {:error, reason}
{:ok, data} -> {:ok, StreamData.fixed_map(data)}
end
end
end
defimpl Vow.Generatable, for: MapSet do
@moduledoc false
@impl Vow.Generatable
def gen(%MapSet{map: %{}}, _opts) do
{:ok, StreamData.constant(MapSet.new([]))}
end
def gen(vow, _opts) do
{:ok,
StreamData.one_of([
StreamData.member_of(vow),
StreamData.map(
StreamData.uniq_list_of(StreamData.member_of(vow)),
&MapSet.new/1
)
])}
end
end
defimpl Vow.Generatable, for: Regex do
@moduledoc false
alias Vow.Utils
import StreamData
@impl Vow.Generatable
def gen(vow, opts) do
ignore_warn? = Keyword.get(opts, :ignore_warn?, false)
_ = Utils.no_override_warn(vow, ignore_warn?)
{:ok, filter(string(:printable), &Regex.match?(vow, &1))}
end
end
defimpl Vow.Generatable, for: Range do
@moduledoc false
@impl Vow.Generatable
def gen(min..max, _opts) do
{:ok,
StreamData.one_of([
StreamData.integer(min..max),
StreamDataUtils.range(min..max)
])}
end
end
defimpl Vow.Generatable, for: Date.Range do
@moduledoc false
@impl Vow.Generatable
def gen(vow, _opts) do
{:ok,
StreamData.one_of([
StreamDataUtils.date(range: vow),
StreamDataUtils.date_range(range: vow)
])}
end
end
defimpl Vow.Generatable, for: Any do
@moduledoc false
@impl Vow.Generatable
def gen(%{__struct__: mod} = vow, opts) do
case @protocol.Map.gen(Map.delete(vow, :__struct__), opts) do
{:error, reason} -> {:error, reason}
{:ok, data} -> {:ok, StreamData.map(data, &Map.put(&1, :__struct__, mod))}
end
end
def gen(vow, _opts) do
StreamData.constant(vow)
end
end
end
|
lib/vow/generatable.ex
| 0.910443
| 0.57087
|
generatable.ex
|
starcoder
|
defmodule Disco.EventStore.Client do
@moduledoc """
The `Disco.EventStore.Client` specification.
A client is used to interact with `Disco.EventStore` while keeping details isolated.
Like other components in `Disco`, even the `Disco.EventStore.Client` is built as a
behaviour that implements default callbacks. This means that the simplest definition of
a client can be achieved like the following:
```
defmodule MyApp.EventStoreClient do
use Disco.EventStore.Client
end
```
"""
@doc """
Called to emit an event to the event store.
"""
@callback emit(type :: binary(), payload :: map()) ::
{:ok, event :: map()} | {:error, reason :: any()}
@doc """
Called to load emitted events that need to be consumed.
"""
@callback load_events_with_types(event_types :: [binary()]) :: list()
@doc """
Called to obtain the current offset for a given cosumer. Returns `0` by default.
"""
@callback get_consumer_offset(consumer :: binary()) :: integer()
@doc """
Called to load events to be consumed after a given offset.
"""
@callback load_events_after_offset(events_listened :: [binary], offset :: integer) :: [map()]
@doc """
Called to update current offset counter.
"""
@callback update_consumer_offset(consumer :: binary(), offset :: integer()) :: {:ok, integer()}
defmacro __using__(_opts) do
quote do
@behaviour Disco.EventStore.Client
alias Disco.EventStore
alias Disco.EventStore.Client
@doc """
Emits an event.
"""
@spec emit(type :: binary, payload :: map()) :: {:ok, event :: map()}
def emit(type, %{} = payload), do: Client.build_event(type, payload) |> EventStore.emit()
@doc """
Loads events with given types.
"""
@spec load_events_with_types(event_types :: [binary()]) :: list()
def load_events_with_types(event_types), do: EventStore.list_events_with_types(event_types)
@doc """
Returns current offset for a given consumer.
"""
@spec get_consumer_offset(consumer :: binary()) :: integer()
def get_consumer_offset(consumer), do: EventStore.event_consumer_offset(consumer)
@doc """
Updates current offset counter.
"""
@spec update_consumer_offset(consumer :: binary(), offset :: integer()) :: {:ok, integer()}
def update_consumer_offset(consumer, offset) do
EventStore.update_event_consumer_offset(consumer, offset)
end
@doc """
Loads events emitted after a given offset.
"""
@spec load_events_after_offset(events_listened :: [binary], offset :: integer()) :: [map()]
def load_events_after_offset(events_listened, offset) do
EventStore.load_events_after_offset(events_listened, offset)
end
end
end
@doc """
Builds an event map.
"""
@spec build_event(type :: binary(), payload :: map()) :: event :: map()
def build_event(type, payload) do
%{type: type, payload: Disco.EventPayloadEncoder.encode(payload)}
end
end
|
lib/disco/event_store/client.ex
| 0.909802
| 0.62065
|
client.ex
|
starcoder
|
defmodule CSSEx.Helpers.Functions do
@moduledoc """
Default base functions to use in stylesheets.
```
@fn::opacity(red, 0.8)
```
"""
@doc """
Lighten function, takes a color in the form of a string and a number representing
the percentage to lighten and returns a CSS rgba() string.
@fn::lighten(orange, 10)
"""
def lighten(_ctx_content, color, percentage) do
{
:ok,
%CSSEx.HSLA{l: %CSSEx.Unit{value: l} = l_unit} = hsla
} = CSSEx.HSLA.new_hsla(color)
{percentage, _} = Float.parse(percentage)
new_l =
case l + percentage do
n_l when n_l <= 100 and n_l >= 0 -> n_l
n_l when n_l > 100 -> 100
n_l when n_l < 0 -> 0
end
%CSSEx.HSLA{hsla | l: %CSSEx.Unit{l_unit | value: new_l}}
|> CSSEx.RGBA.from_hsla()
|> elem(1)
|> to_string
end
@doc """
Darken function, takes a color in the form of a string and a number representing
the percentage to darken and returns a CSS rgba() string.
@fn::darken(orange, 25)
"""
def darken(_ctx_content, color, percentage) do
{
:ok,
%CSSEx.HSLA{l: %CSSEx.Unit{value: l} = l_unit} = hsla
} = CSSEx.HSLA.new_hsla(color)
{percentage, _} = Float.parse(percentage)
new_l =
case l - percentage do
n_l when n_l <= 100 and n_l >= 0 -> n_l
n_l when n_l > 100 -> 100
n_l when n_l < 0 -> 0
end
%CSSEx.HSLA{hsla | l: %CSSEx.Unit{l_unit | value: new_l}}
|> CSSEx.RGBA.from_hsla()
|> elem(1)
|> to_string
end
@doc """
Opacity function, takes a color in the form of a string and a number representing
the desired alpha channel value in the form of a float and returns a CSS rgba()
string.
@fn::opacity(orange, 0.5)
"""
def opacity(_ctx_content, color, alpha) do
{:ok, %CSSEx.RGBA{} = rgba} = CSSEx.RGBA.new_rgba(color)
{parsed_alpha, _} = Float.parse(alpha)
n_alpha =
case parsed_alpha do
n when n <= 1 and n >= 0 -> n
n when n > 1 -> 1
n when n < 0 -> 0
end
%CSSEx.RGBA{rgba | a: n_alpha}
|> to_string()
end
end
|
lib/helpers/functions.ex
| 0.870184
| 0.776538
|
functions.ex
|
starcoder
|
defmodule PgContrivance.Postgres do
@moduledoc """
The base functions that inteface directly with PostgreSQL via Postgrex.
Uses `postgrex` for communicating to the database
and a connection pool, such as `poolboy`.
## Options
Postgres options split in different categories described
below. All options should be given via the repository
configuration.
### Compile time options
Those options should be set in the config file and require
recompilation in order to make an effect.
* `:adapter` - The adapter name, in this case, `Ecto.Adapters.Postgres`
* `:name`- The name of the Repo supervisor process
* `:pool` - The connection pool module, defaults to `Ecto.Pools.Poolboy`
* `:pool_timeout` - The default timeout to use on pool calls, defaults to `5000`
* `:timeout` - The default timeout to use on queries, defaults to `15000`
### Connection options
* `:hostname` - Server hostname
* `:port` - Server port (default: 5432)
* `:username` - Username
* `:password` - <PASSWORD>
* `:parameters` - Keyword list of connection parameters
* `:ssl` - Set to true if ssl should be used (default: false)
* `:ssl_opts` - A list of ssl options, see Erlang's `ssl` docs
* `:connect_timeout` - The timeout for establishing new connections (default: 5000)
* `:extensions` - Specify extensions to the postgres adapter
* `:after_connect` - A `{mod, fun, args}` to be invoked after a connection is established
### Storage options
* `:encoding` - the database encoding (default: "UTF8")
* `:template` - the template to create the database from
* `:lc_collate` - the collation order
* `:lc_ctype` - the character classification
"""
@name __MODULE__
@doc """
Connect to postgresql using the application config connection merged with
opts passed in.
The application should define a :pg_contrivance config
`config :pg_contrivance, connection: [database: "contrived"]`
"""
def start_link() do
connection_options
|> Postgrex.start_link()
end
def connection_options() do
Application.get_env(:pg_contrivance, :connection, [])
|> Keyword.put(:name, @name)
end
@doc """
If there isn't a connection process started then one is added to the command
"""
def query(statement, params, opts \\ []) when is_binary(statement) and is_list(params) do
Postgrex.query(@name, statement, params, opts)
end
def query!(statement, params, opts \\ []) when is_binary(statement) and is_list(params) do
Postgrex.query!(@name, statement, params, opts)
end
def transaction(statement, params, opts \\ []) when is_binary(statement) and is_list(params) do
case Postgrex.transaction(@name, fn(conn) -> Postgrex.query(conn, statement, params, opts) end) do
{:ok, result} -> result
{:error, _err} -> Postgrex.transaction(@name, fn(conn) ->
DBConnection.rollback(conn, :oops)
IO.puts "TRANSACTION FAILED - ROLLINGBACK"
end)
end
end
end
|
lib/pg_contrivance/postgres.ex
| 0.850748
| 0.55929
|
postgres.ex
|
starcoder
|
defmodule APDS9960.ALS do
@moduledoc "The ambient light and RGB color sensing."
alias APDS9960.{Comm, Sensor}
@doc """
Returns all the current Color / ALS settings.
"""
@spec settings(Sensor.t()) :: %{
adc_integration_time: byte,
enabled: boolean,
gain: 0..3,
interrupt_enabled: boolean,
interrupt_persistence: byte,
saturation_interrupt: boolean,
threshold: %{high: 0xFFFF, low: 0xFFFF},
wait_long_enabled: boolean,
wait_time: byte
}
def settings(%Sensor{} = sensor) do
%{
enabled: enabled?(sensor),
interrupt_enabled: interrupt_enabled?(sensor),
adc_integration_time: get_adc_integration_time(sensor),
wait_time: get_wait_time(sensor),
threshold: get_threshold(sensor),
interrupt_persistence: get_interrupt_persistence(sensor),
wait_long_enabled: wait_long_enabled?(sensor),
gain: get_gain(sensor),
saturation_interrupt: saturation_interrupt_enabled?(sensor)
}
end
## ALS Enable
@spec enabled?(Sensor.t()) :: boolean
def enabled?(%Sensor{transport: i2c}) do
{:ok, %{als: value}} = Comm.get_enable(i2c)
value == 1
end
@spec enable(Sensor.t(), 0 | 1) :: :ok
def enable(%Sensor{transport: i2c}, value \\ 1) do
Comm.set_enable(i2c, als: value)
end
@spec interrupt_enabled?(Sensor.t()) :: boolean
def interrupt_enabled?(%Sensor{transport: i2c}) do
{:ok, %{als_interrupt: value}} = Comm.get_enable(i2c)
value == 1
end
@spec enable_interrupt(Sensor.t(), 0 | 1) :: :ok
def enable_interrupt(%Sensor{transport: i2c}, value \\ 1) do
Comm.set_enable(i2c, als_interrupt: value)
end
@spec wait_enabled?(Sensor.t()) :: boolean
def wait_enabled?(%Sensor{transport: i2c}) do
{:ok, %{wait: value}} = Comm.get_enable(i2c)
value == 1
end
@spec enable_wait(Sensor.t(), 0 | 1) :: :ok
def enable_wait(%Sensor{transport: i2c}, value \\ 1) do
Comm.set_enable(i2c, wait: value)
end
## ALS ADC Integration Time
@spec get_adc_integration_time(Sensor.t()) :: byte
def get_adc_integration_time(%Sensor{transport: i2c}) do
{:ok, data} = Comm.get_adc_integration_time(i2c)
:binary.decode_unsigned(data)
end
@spec set_adc_integration_time(Sensor.t(), byte) :: :ok
def set_adc_integration_time(%Sensor{transport: i2c}, byte) do
Comm.set_adc_integration_time(i2c, <<byte>>)
end
## Wait Time
@spec get_wait_time(Sensor.t()) :: byte
def get_wait_time(%Sensor{transport: i2c}) do
{:ok, data} = Comm.get_wait_time(i2c)
:binary.decode_unsigned(data)
end
@spec set_wait_time(Sensor.t(), byte) :: :ok
def set_wait_time(%Sensor{transport: i2c}, byte) do
Comm.set_wait_time(i2c, <<byte>>)
end
## ALS low/high threshold
@spec get_threshold(Sensor.t()) :: %{high: 0..0xFFFF, low: 0..0xFFFF}
def get_threshold(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_als_threshold(i2c)
%{low: x.low, high: x.high}
end
@spec set_threshold(Sensor.t(), {low :: 0..0xFFFF, high :: 0..0xFFFF}) :: :ok
def set_threshold(%Sensor{transport: i2c}, {low, high}) do
Comm.set_als_threshold(i2c, {low, high})
end
## ALS Interrupt Persistence
@spec get_interrupt_persistence(Sensor.t()) :: 0..15
def get_interrupt_persistence(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_interrupt_persistence(i2c)
x.als
end
@spec set_interrupt_persistence(Sensor.t(), 0..15) :: :ok
def set_interrupt_persistence(%Sensor{transport: i2c}, byte) do
Comm.set_interrupt_persistence(i2c, als: byte)
end
## Wait Long Enable
@spec wait_long_enabled?(Sensor.t()) :: boolean
def wait_long_enabled?(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_config1(i2c)
x.wait_long == 1
end
@spec enable_wait_long(Sensor.t(), 0 | 1) :: :ok
def enable_wait_long(%Sensor{transport: i2c}, value \\ 1) do
Comm.set_config1(i2c, wait_long: value)
end
## ALS Gain Control
@spec get_gain(Sensor.t()) :: 0..3
def get_gain(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_control(i2c)
x.als_and_color_gain
end
@spec set_gain(Sensor.t(), 0..3) :: :ok
def set_gain(%Sensor{transport: i2c}, value) do
Comm.set_control(i2c, als_and_color_gain: value)
end
## Clear diode Saturation Interrupt Enable
@spec saturation_interrupt_enabled?(Sensor.t()) :: boolean
def saturation_interrupt_enabled?(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_config2(i2c)
x.als_saturation_interrupt == 1
end
@spec enable_saturation_interrupt(Sensor.t(), 0 | 1) :: :ok
def enable_saturation_interrupt(%Sensor{transport: i2c}, value \\ 1) do
Comm.set_config2(i2c, als_saturation_interrupt: value)
end
## ALS Status
@spec status(Sensor.t()) :: %{
clear_photo_diode_saturation: boolean,
interrupt: boolean,
valid: boolean
}
def status(%Sensor{transport: i2c}) do
{:ok, x} = Comm.status(i2c)
%{
interrupt: x.als_interrupt == 1,
valid: x.als_valid == 1,
clear_photo_diode_saturation: x.als_interrupt == 1
}
end
## Color Data
@spec read_color(Sensor.t(), Enum.t()) :: %{
red: 0..0xFFFF,
green: 0..0xFFFF,
blue: 0..0xFFFF,
clear: 0..0xFFFF
}
def read_color(%Sensor{} = sensor, _opts \\ []) do
{:ok, x} = Comm.color_data(sensor.transport)
%{
red: x.red,
green: x.green,
blue: x.blue,
clear: x.clear
}
end
# Clear Channel Interrupt Clear
@spec clear_interrupt(Sensor.t()) :: :ok
def clear_interrupt(%Sensor{transport: i2c}) do
Comm.clear_als_clear_channel_interrupt(i2c)
end
end
|
lib/apds9960/als.ex
| 0.873822
| 0.482673
|
als.ex
|
starcoder
|
defmodule Clover.Script do
@moduledoc """
A data structure for handling `Clover.Message`s
"""
alias Clover.{
Error,
Message
}
alias Clover.Util.Logger
import Kernel, except: [match?: 2]
@type match_mode :: :overhear | :respond
@type script :: {module :: atom, function :: atom} | function()
@type data :: term
@type response ::
:nomatch
| :noreply
| {:noreply, data}
| Message.t()
| {Message.t(), data}
| [Message.t()]
| :invalid_return
@enforce_keys [:match, :respond]
defstruct match: nil,
match_mode: :respond,
respond: nil
@type t :: %__MODULE__{
match: Regex.t(),
match_mode: match_mode,
respond: script
}
# Descends into the list of scripts, attempting to match the last script first, to preserve the order in which
# scripts were declared
@spec handle_message(Message.t(), data :: map, [t()] | atom) :: response()
def handle_message(_message, _data, []), do: :noreply
def handle_message(message, data, [script | []]),
do: handle(script, message, data)
def handle_message(message, data, [script | tail]) do
case handle_message(message, data, tail) do
:nomatch -> handle(script, message, data)
reply -> reply
end
end
@spec handle(t, Message.t(), data) :: response
# If the script is a module, then skip the match and try all of the module's scripts
def handle(%__MODULE__{respond: mod}, %Message{} = message, data)
when is_atom(mod) do
handle_message(message, data, mod.scripts())
end
def handle(%__MODULE__{} = script, %Message{} = message, data) do
case match(script, message) do
nil ->
:nomatch
match ->
validated =
script
|> respond(message, match, data)
|> validate_response()
case validated do
{:ok, response} ->
response
{:error, %Error{} = error} ->
log(:error, Error.message(error))
:nomatch
end
end
end
def match(%__MODULE__{match_mode: :overhear} = script, message) do
match(script.match, message)
end
def match(%__MODULE__{match_mode: :respond} = script, message) do
original_text = message.text
mention_format = Message.mention_format(message, :me)
case Message.trim_leading_mention(message, mention_format) do
%{text: ^original_text} -> nil
trimmed -> match(script.match, trimmed)
end
end
def match(%Regex{} = regex, %Message{text: text}) do
case Regex.run(regex, text) do
nil -> nil
captures -> %{captures: captures, named_captures: Regex.named_captures(regex, text)}
end
end
def respond(%__MODULE__{respond: {mod, fun}}, message, match, data) do
apply(mod, fun, [message, match, data])
end
@spec validate_response(response) :: {:ok, response} | {:error, %Error{}}
defp validate_response(response) do
case response do
%Message{action: action} when action in [:say, :typing] ->
{:ok, response}
{%Message{action: action}, _new_data} when action in [:say, :typing] ->
{:ok, response}
messages when is_list(messages) ->
{:ok, response}
{:noreply, _new_data} ->
{:ok, response}
:noreply ->
{:ok, response}
:nomatch ->
{:ok, response}
invalid_return ->
{:error, Error.exception({:invalid_script_return, invalid_return})}
end
end
@doc """
Create a new script struct
## Examples
iex> Script.new(:overhear, ~r/hi/, {SomeModule, :some_function})
%Script{match: ~r/hi/, match_mode: :overhear, respond: {SomeModule, :some_function}}
iex> Script.new(%Script{match: ~r/hi/, match_mode: :overhear, respond: {SomeModule, :some_function}})
%Script{match: ~r/hi/, match_mode: :overhear, respond: {SomeModule, :some_function}}
iex> Script.new({:overhear, ~r/hi/, {SomeModule, :some_function}})
%Script{match: ~r/hi/, match_mode: :overhear, respond: {SomeModule, :some_function}}
"""
def new(%__MODULE__{} = struct), do: struct
def new(tuple) when is_tuple(tuple), do: from_tuple(tuple)
def new(mode, match, {mod, fun}) when is_atom(mod) and is_atom(fun) do
%__MODULE__{match: match, match_mode: mode, respond: {mod, fun}}
end
def new(mode, match, script) when is_atom(script) do
%__MODULE__{match: match, match_mode: mode, respond: script}
end
@doc """
Given a script struct, return a tuple
## Examples
iex> Script.to_tuple(%Script{match: ~r/hi/, match_mode: :overhear, respond: {SomeModule, :some_function}})
{:overhear, ~r/hi/, {SomeModule, :some_function}}
"""
def to_tuple(%__MODULE__{match: match, match_mode: mode, respond: respond}) do
{mode, match, respond}
end
@doc """
Given a script tuple, return a struct
## Examples
iex> Script.from_tuple({:overhear, ~r/hi/, {SomeModule, :some_function}})
%Script{match: ~r/hi/, match_mode: :overhear, respond: {SomeModule, :some_function}}
"""
def from_tuple({mode, match, respond}) when mode in [:overhear, :respond] do
%__MODULE__{match: match, match_mode: mode, respond: respond}
end
defp log(level, message, opts \\ []) do
Logger.log(level, "message worker", message, opts)
end
end
|
lib/script.ex
| 0.841744
| 0.514278
|
script.ex
|
starcoder
|
defmodule BrDocs do
@moduledoc ~S"""
Generation, validation and formatting for Brazilian docs.
Currently supported docs:
* `CPF` it's a Brazilian identification number for individuals (like SSN, in USA).
* `CNPJ` it's a Brazilian identification number for companies.
"""
alias BrDocs.Doc
@doc """
Used mostly for testing, yet you can generate a valid doc. Returns a `BrDocs.Doc`.
The kind must be one of #{Doc.formatted_available_docs()}.
## Options
* `formatted` - a boolean to format the doc after generation. Defaults to `false`.
## Examples
iex> BrDocs.generate(:cpf)
%BrDocs.Doc{kind: :cpf, value: "12345678909"}
iex> BrDocs.generate(:cpf, formatted: true)
%BrDocs.Doc{kind: :cpf, value: "123.456.789-09"}
iex> BrDocs.generate(:cnpj)
%BrDocs.Doc{kind: :cnpj, value: "11444777000161"}
iex> BrDocs.generate(:cnpj, formatted: true)
%BrDocs.Doc{kind: :cnpj, value: "11.444.777/0001-61"}
"""
@spec generate(atom(), keyword()) :: BrDocs.Doc.t()
def generate(kind, opts \\ [formatted: false])
def generate(:cpf, opts), do: BrDocs.CPF.generate(opts)
def generate(:cnpj, opts), do: BrDocs.CNPJ.generate(opts)
@doc """
Formats the value. Returns a formatted `BrDocs.Doc`.
The atom argument must be one of #{Doc.formatted_available_docs()}.
## Examples
iex> BrDocs.format("12345678909", :cpf)
%BrDocs.Doc{kind: :cpf, value: "123.456.789-09"}
iex> BrDocs.format("11444777000161", :cnpj)
%BrDocs.Doc{kind: :cnpj, value: "11.444.777/0001-61"}
"""
@spec format(String.t(), atom()) :: BrDocs.Doc.t()
def format(value, kind)
def format(value, :cpf), do: BrDocs.CPF.Formatter.format(value)
def format(value, :cnpj), do: BrDocs.CNPJ.Formatter.format(value)
@doc """
Formats the `BrDocs.Doc` value. Returns a formatted `BrDocs.Doc`.
## Examples
iex> BrDocs.format(%BrDocs.Doc{kind: :cpf, value: "12345678909"})
%BrDocs.Doc{kind: :cpf, value: "123.456.789-09"}
iex> BrDocs.format(%BrDocs.Doc{kind: :cnpj, value: "11444777000161"})
%BrDocs.Doc{kind: :cnpj, value: "11.444.777/0001-61"}
"""
@spec format(BrDocs.Doc.t()) :: BrDocs.Doc.t()
def format(%Doc{kind: :cpf} = brdoc), do: BrDocs.CPF.Formatter.format(brdoc)
def format(%Doc{kind: :cnpj} = brdoc), do: BrDocs.CNPJ.Formatter.format(brdoc)
@doc """
Validates the value. Returns a boolean.
The atom argument must be one of #{Doc.formatted_available_docs()}.
## Examples
iex> BrDocs.validate("12345678909", :cpf)
true
iex> BrDocs.validate("12345678900", :cpf)
false
iex> BrDocs.validate("123.456.789-09", :cpf)
true
iex> BrDocs.validate("123.456.789-00", :cpf)
false
iex> BrDocs.validate("11444777000161", :cnpj)
true
iex> BrDocs.validate("11444777000160", :cnpj)
false
iex> BrDocs.validate("11.444.777/0001-61", :cnpj)
true
iex> BrDocs.validate("11.444.777/0001-60", :cnpj)
false
"""
@spec validate(String.t(), atom()) :: BrDocs.Doc.t()
def validate(value, kind)
def validate(value, :cpf), do: BrDocs.CPF.Validator.validate(value)
def validate(value, :cnpj), do: BrDocs.CNPJ.Validator.validate(value)
@doc """
Validates `BrDocs.Doc`. Returns a boolean.
## Examples
iex> BrDocs.validate(%BrDocs.Doc{kind: :cpf, value: "12345678909"})
true
iex> BrDocs.validate(%BrDocs.Doc{kind: :cpf, value: "12345678900"})
false
iex> BrDocs.validate(%BrDocs.Doc{kind: :cpf, value: "123.456.789-09"})
true
iex> BrDocs.validate(%BrDocs.Doc{kind: :cpf, value: "123.456.789-00"})
false
iex> BrDocs.validate(%BrDocs.Doc{kind: :cnpj, value: "11444777000161"})
true
iex> BrDocs.validate(%BrDocs.Doc{kind: :cnpj, value: "11444777000160"})
false
iex> BrDocs.validate(%BrDocs.Doc{kind: :cnpj, value: "11.444.777/0001-61"})
true
iex> BrDocs.validate(%BrDocs.Doc{kind: :cnpj, value: "11.444.777/0001-60"})
false
"""
@spec validate(BrDocs.Doc.t()) :: BrDocs.Doc.t()
def validate(%Doc{kind: :cpf} = brdoc), do: BrDocs.CPF.Validator.validate(brdoc)
def validate(%Doc{kind: :cnpj} = brdoc), do: BrDocs.CNPJ.Validator.validate(brdoc)
end
|
lib/brdocs.ex
| 0.89594
| 0.471284
|
brdocs.ex
|
starcoder
|
defmodule AkinML.Axon.Name do
require Axon
require Logger
@epochs 10
@learning_rate 0.001
@loss :mean_squared_error
@dropout_rate 0.1
@input_columns [
"Bag Distance",
"Chunk Set",
"Dice Sorensen",
"Metaphone",
"Double Metaphone",
"Double Metaphone Chunks",
"Jaccard",
"Jaro-Winkler",
"Levenshtein",
"NGram",
"Overlap",
"Sorted Chunks",
"Tversky",
"Initials",
"Match"
]
def train() do
# split the data into test and train sets, each with inputs and targets
{test_inputs, test_targets, train_inputs, train_targets} =
Axon.Data.split_inputs(
"./lib/axon/new.data",
input_columns: @input_columns,
target_column: "Match",
test_train_ratio: 0.1
)
# train the model
model = do_training(train_inputs, train_targets)
# make some predictions
test_inputs
|> Enum.zip(test_targets)
|> Enum.each(fn {name_input, actual_match} ->
predicted_match = predict(model, name_input)
|> Float.round(8)
actual_match = scalar(actual_match)
color = case actual_match do
0 ->
if 0.98 > predicted_match, do: :green, else: :red
1 ->
if predicted_match > 0.98 do
:green
else
:red
end
end
Logger.info("Actual: #{actual_match}. Predicted: #{predicted_match}.", ansi_color: color)
end)
end
def do_training(inputs, targets) do
model =
Axon.input({nil, Enum.count(@input_columns)})
|> Axon.dense(14)
|> Axon.dropout(rate: @dropout_rate)
|> Axon.dense(1)
optimizer = Axon.Optimizers.adamw(@learning_rate)
%{params: trained_params} =
model
|> Axon.Training.step(@loss, optimizer)
|> Axon.Training.train(inputs, targets, epochs: @epochs)
{model, trained_params}
end
def predict({model, trained_params}, name_input) do
model
|> Axon.predict(trained_params, name_input)
|> Nx.to_flat_list()
|> List.first()
end
def scalar(tensor) do
tensor |> Nx.to_flat_list() |> List.first()
end
end
|
lib/axon/name.ex
| 0.725065
| 0.438004
|
name.ex
|
starcoder
|
defmodule Ecto do
@moduledoc ~S"""
Ecto is split into 4 main components:
* `Ecto.Repo` - repositories are wrappers around the data store.
Via the repository, we can create, update, destroy and query existing entries.
A repository needs an adapter and credentials to communicate to the database
* `Ecto.Schema` - schemas are used to map any data source into an Elixir
struct. We will often use them to map tables into Elixir data but that's
one of their use cases and not a requirement for using Ecto
* `Ecto.Changeset` - changesets provide a way for developers to filter
and cast external parameters, as well as a mechanism to track and
validate changes before they are applied to your data
* `Ecto.Query` - written in Elixir syntax, queries are used to retrieve
information from a given repository. Queries in Ecto are secure, avoiding
common problems like SQL Injection, while still being composable, allowing
developers to build queries piece by piece instead of all at once
In the following sections, we will provide an overview of those components and
how they interact with each other. Feel free to access their respective module
documentation for more specific examples, options and configuration.
If you want to quickly check a sample application using Ecto, please check
the [getting started guide](http://hexdocs.pm/ecto/getting-started.html) and
the accompanying sample application.
After exploring the documentation and guides, consider checking out the
["What's new in Ecto 2.0"](http://pages.plataformatec.com.br/ebook-whats-new-in-ecto-2-0)
free ebook to learn more about many features in Ecto 2.0 such as `many_to_many`,
schemaless queries, concurrent testing and more.
## Repositories
`Ecto.Repo` is a wrapper around the database. We can define a
repository as follows:
defmodule Repo do
use Ecto.Repo, otp_app: :my_app
end
Where the configuration for the Repo must be in your application
environment, usually defined in your `config/config.exs`:
config :my_app, Repo,
adapter: Ecto.Adapters.Postgres,
database: "ecto_simple",
username: "postgres",
password: "<PASSWORD>",
hostname: "localhost",
# OR use a URL to connect instead
url: "postgres://postgres:postgres@localhost/ecto_simple"
Each repository in Ecto defines a `start_link/0` function that needs to be invoked
before using the repository. In general, this function is not called directly,
but used as part of your application supervision tree.
If your application was generated with a supervisor (by passing `--sup` to `mix new`)
you will have a `lib/my_app.ex` file containing the application start callback that
defines and starts your supervisor. You just need to edit the `start/2` function to
start the repo as a supervisor on your application's supervisor:
def start(_type, _args) do
import Supervisor.Spec
children = [
supervisor(Repo, [])
]
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor.start_link(children, opts)
end
## Schema
Schemas allows developers to define the shape of their data.
Let's see an example:
defmodule Weather do
use Ecto.Schema
# weather is the DB table
schema "weather" do
field :city, :string
field :temp_lo, :integer
field :temp_hi, :integer
field :prcp, :float, default: 0.0
end
end
By defining a schema, Ecto automatically defines a struct with
the schema fields:
iex> weather = %Weather{temp_lo: 30}
iex> weather.temp_lo
30
The schema also allows us to interact with a repository:
iex> weather = %Weather{temp_lo: 0, temp_hi: 23}
iex> Repo.insert!(weather)
%Weather{...}
After persisting `weather` to the database, it will return a new copy of
`%Weather{}` with the primary key (the `id`) set. We can use this value
to read a struct back from the repository:
# Get the struct back
iex> weather = Repo.get Weather, 1
%Weather{id: 1, ...}
# Delete it
iex> Repo.delete!(weather)
%Weather{...}
> NOTE: by using `Ecto.Schema`, an `:id` field with type `:id` (:id means :integer) is
> generated by default, which is the primary key of the Schema. If you want
> to use a different primary key, you can declare custom `@primary_key`
> before the `schema/2` call. Consult the `Ecto.Schema` documentation
> for more information.
Notice how the storage (repository) and the data are decoupled. This provides
two main benefits:
* By having structs as data, we guarantee they are light-weight,
serializable structures. In many languages, the data is often represented
by large, complex objects, with entwined state transactions, which makes
serialization, maintenance and understanding hard;
* You do not need to define schemas in order to interact with repositories,
operations like `all`, `insert_all` and so on allow developers to directly
access and modify the data, keeping the database at your fingertips when
necessary;
## Changesets
Although in the example above we have directly inserted and deleted the
struct in the repository, operations on top of schemas are done through
changesets so Ecto can efficiently track changes.
Changesets allow developers to filter, cast, and validate changes before
we apply them to the data. Imagine the given schema:
defmodule User do
use Ecto.Schema
import Ecto.Changeset
schema "users" do
field :name
field :email
field :age, :integer
end
def changeset(user, params \\ %{}) do
user
|> cast(params, [:name, :email, :age])
|> validate_required([:name, :email])
|> validate_format(:email, ~r/@/)
|> validate_inclusion(:age, 18..100)
end
end
The `changeset/2` function first invokes `Ecto.Changeset.cast/4` with
the struct, the parameters and a list of allowed fields; this returns a changeset.
The parameters is a map with binary keys and values that will be cast based
on the type defined on the schema.
Any parameter that was not explicitly listed in the fields list will be ignored.
After casting, the changeset is given to many `Ecto.Changeset.validate_*/2`
functions that validate only the **changed fields**. In other words:
if a field was not given as a parameter, it won't be validated at all.
For example, if the params map contain only the "name" and "email" keys,
the "age" validation won't run.
Once a changeset is built, it can be given to functions like `insert` and
`update` in the repository that will return an `:ok` or `:error` tuple:
case Repo.update(changeset) do
{:ok, user} ->
# user updated
{:error, changeset} ->
# an error occurred
end
The benefit of having explicit changesets is that we can easily provide
different changesets for different use cases. For example, one
could easily provide specific changesets for registering and updating
users:
def registration_changeset(user, params) do
# Changeset on create
end
def update_changeset(user, params) do
# Changeset on update
end
Changesets are also capable of transforming database constraints,
like unique indexes and foreign key checks, into errors. Allowing
developers to keep their database consistent while still providing
proper feedback to end users. Check `Ecto.Changeset.unique_constraint/3`
for some examples as well as the other `_constraint` functions.
## Query
Last but not least, Ecto allows you to write queries in Elixir and send
them to the repository, which translates them to the underlying database.
Let's see an example:
import Ecto.Query, only: [from: 2]
query = from u in User,
where: u.age > 18 or is_nil(u.email),
select: u
# Returns %User{} structs matching the query
Repo.all(query)
In the example above we relied on our schema but queries can also be
made directly against a table by giving the table name as a string. In
such cases, the data to be fetched must be explicitly outlined:
query = from u in "users",
where: u.age > 18 or is_nil(u.email),
select: %{name: u.name, age: u.age}
# Returns maps as defined in select
Repo.all(query)
Queries are defined and extended with the `from` macro. The supported
keywords are:
* `:distinct`
* `:where`
* `:order_by`
* `:offset`
* `:limit`
* `:lock`
* `:group_by`
* `:having`
* `:join`
* `:select`
* `:preload`
Examples and detailed documentation for each of those are available
in the `Ecto.Query` module. Functions supported in queries are listed
in `Ecto.Query.API`.
When writing a query, you are inside Ecto's query syntax. In order to
access params values or invoke Elixir functions, you need to use the `^`
operator, which is overloaded by Ecto:
def min_age(min) do
from u in User, where: u.age > ^min
end
Besides `Repo.all/1` which returns all entries, repositories also
provide `Repo.one/1` which returns one entry or nil, `Repo.one!/1`
which returns one entry or raises, `Repo.get/2` which fetches
entries for a particular ID and more.
Finally, if you need an escape hatch, Ecto provides fragments
(see `Ecto.Query.API.fragment/1`) to inject SQL (and non-SQL)
fragments into queries. Also, most adapters provide direct
APIs for queries, like `Ecto.Adapters.SQL.query/4`, allowing
developers to completely bypass Ecto queries.
## Other topics
### Associations
Ecto supports defining associations on schemas:
defmodule Post do
use Ecto.Schema
schema "posts" do
has_many :comments, Comment
end
end
defmodule Comment do
use Ecto.Schema
schema "comments" do
field :title, :string
belongs_to :post, Post
end
end
When an association is defined, Ecto also defines a field in the schema
with the association name. By default, associations are not loaded into
this field:
iex> post = Repo.get(Post, 42)
iex> post.comments
#Ecto.Association.NotLoaded<...>
However, developers can use the preload functionality in queries to
automatically pre-populate the field:
Repo.all from p in Post, preload: [:comments]
Preloading can also be done with a pre-defined join value:
Repo.all from p in Post,
join: c in assoc(p, :comments),
where: c.votes > p.votes,
preload: [comments: c]
Finally, for the simple cases, preloading can also be done after
a collection was fetched:
posts = Repo.all(Post) |> Repo.preload(:comments)
The `Ecto` module also provides conveniences for working
with associations. For example, `Ecto.assoc/2` returns a query
with all associated data to a given struct:
import Ecto
# Get all comments for the given post
Repo.all assoc(post, :comments)
# Or build a query on top of the associated comments
query = from c in assoc(post, :comments), where: not is_nil(c.title)
Repo.all(query)
Another function in `Ecto` is `build_assoc/3`, which allows
someone to build an associated struct with the proper fields:
Repo.transaction fn ->
post = Repo.insert!(%Post{title: "Hello", body: "world"})
# Build a comment from post
comment = Ecto.build_assoc(post, :comments, body: "Excellent!")
Repo.insert!(comment)
end
In the example above, `Ecto.build_assoc/3` is equivalent to:
%Comment{post_id: post.id, body: "Excellent!"}
You can find more information about defining associations and each
respective association module in `Ecto.Schema` docs.
> NOTE: Ecto does not lazy load associations. While lazily loading
> associations may sound convenient at first, in the long run it
> becomes a source of confusion and performance issues.
### Embeds
Ecto also supports embeds. While associations keep parent and child
entries in different tables, embeds stores the child along side the
parent.
Databases like MongoDB have native support for embeds. Databases
like PostgreSQL uses a mixture of JSONB (`embeds_one/3`) and ARRAY
columns to provide this functionality.
Check `Ecto.Schema.embeds_one/3` and `Ecto.Schema.embeds_many/3`
for more information.
### Mix tasks and generators
Ecto provides many tasks to help your workflow as well as code generators.
You can find all available tasks by typing `mix help` inside a project
with Ecto listed as a dependency.
Ecto generators will automatically open the generated files if you have
`ECTO_EDITOR` set in your environment variable.
#### Migrations
Ecto supports database migrations. You can generate a migration with:
$ mix ecto.gen.migration create_posts
This will create a new file inside `priv/repo/migrations` with the `change`
function. Check `Ecto.Migration` for more information.
#### Repo resolution
Ecto requires developers to specify the key `:ecto_repos` in their application
configuration before using tasks like `ecto.create` and `ecto.migrate`. For example:
config :my_app, :ecto_repos, [MyApp.Repo]
config :my_app, MyApp.Repo,
adapter: Ecto.Adapters.Postgres,
database: "ecto_simple",
username: "postgres",
password: "<PASSWORD>",
hostname: "localhost"
"""
@doc """
Returns the schema primary keys as a keyword list.
"""
@spec primary_key(Ecto.Schema.t) :: Keyword.t
def primary_key(%{__struct__: schema} = struct) do
Enum.map schema.__schema__(:primary_key), fn(field) ->
{field, Map.fetch!(struct, field)}
end
end
@doc """
Returns the schema primary keys as a keyword list.
Raises `Ecto.NoPrimaryKeyFieldError` if the schema has no
primary key field.
"""
@spec primary_key!(Ecto.Schema.t) :: Keyword.t | no_return
def primary_key!(%{__struct__: schema} = struct) do
case primary_key(struct) do
[] -> raise Ecto.NoPrimaryKeyFieldError, schema: schema
pk -> pk
end
end
@doc """
Builds a struct from the given `assoc` in `struct`.
## Examples
If the relationship is a `has_one` or `has_many` and
the key is set in the given struct, the key will automatically
be set in the built association:
iex> post = Repo.get(Post, 13)
%Post{id: 13}
iex> build_assoc(post, :comments)
%Comment{id: nil, post_id: 13}
Note though it doesn't happen with `belongs_to` cases, as the
key is often the primary key and such is usually generated
dynamically:
iex> comment = Repo.get(Comment, 13)
%Comment{id: 13, post_id: 25}
iex> build_assoc(comment, :post)
%Post{id: nil}
You can also pass the attributes, which can be a map or
a keyword list, to set the struct's fields except the
association key.
iex> build_assoc(post, :comments, text: "cool")
%Comment{id: nil, post_id: 13, text: "cool"}
iex> build_assoc(post, :comments, %{text: "cool"})
%Comment{id: nil, post_id: 13, text: "cool"}
iex> build_assoc(post, :comments, post_id: 1)
%Comment{id: nil, post_id: 13}
"""
def build_assoc(%{__struct__: schema} = struct, assoc, attributes \\ %{}) do
assoc = Ecto.Association.association_from_schema!(schema, assoc)
assoc.__struct__.build(assoc, struct, drop_meta(attributes))
end
defp drop_meta(%{} = attrs), do: Map.drop(attrs, [:__struct__, :__meta__])
defp drop_meta([_|_] = attrs), do: Keyword.drop(attrs, [:__struct__, :__meta__])
@doc """
Builds a query for the association in the given struct or structs.
## Examples
In the example below, we get all comments associated to the given
post:
post = Repo.get Post, 1
Repo.all Ecto.assoc(post, :comments)
`assoc/2` can also receive a list of posts, as long as the posts are
not empty:
posts = Repo.all from p in Post, where: is_nil(p.published_at)
Repo.all Ecto.assoc(posts, :comments)
This function can also be used to dynamically load through associations
by giving it a list. For example, to get all authors for all comments for
the given posts, do:
posts = Repo.all from p in Post, where: is_nil(p.published_at)
Repo.all Ecto.assoc(posts, [:comments, :author])
"""
def assoc(struct_or_structs, assocs) do
[assoc | assocs] = List.wrap(assocs)
structs = List.wrap(struct_or_structs)
if structs == [] do
raise ArgumentError, "cannot retrieve association #{inspect assoc} for empty list"
end
schema = hd(structs).__struct__
assoc = %{owner_key: owner_key} =
Ecto.Association.association_from_schema!(schema, assoc)
values =
Enum.uniq for(struct <- structs,
assert_struct!(schema, struct),
key = Map.fetch!(struct, owner_key),
do: key)
Ecto.Association.assoc_query(assoc, assocs, nil, values)
end
@doc """
Checks if an association is loaded.
## Examples
iex> post = Repo.get(Post, 1)
iex> Ecto.assoc_loaded?(post.comments)
false
iex> post = post |> Repo.preload(:comments)
iex> Ecto.assoc_loaded?(post.comments)
true
"""
def assoc_loaded?(association) do
case association do
%Ecto.Association.NotLoaded{} -> false
_ -> true
end
end
@doc """
Gets the metadata from the given struct.
"""
def get_meta(struct, :context),
do: struct.__meta__.context
def get_meta(struct, :state),
do: struct.__meta__.state
def get_meta(struct, :source),
do: struct.__meta__.source |> elem(1)
def get_meta(struct, :prefix),
do: struct.__meta__.source |> elem(0)
@doc """
Returns a new struct with updated metadata.
It is possible to set:
* `:source` - changes the struct query source
* `:prefix` - changes the struct query prefix
* `:context` - changes the struct meta context
* `:state` - changes the struct state
"""
@spec put_meta(Ecto.Schema.t, [source: String.t, prefix: String.t,
context: term, state: :built | :loaded | :deleted]) :: Ecto.Schema.t
def put_meta(struct, opts) do
update_in struct.__meta__, &update_meta(opts, &1)
end
defp update_meta([{:state, state}|t], meta) do
if state in [:built, :loaded, :deleted] do
update_meta t, %{meta | state: state}
else
raise ArgumentError, "invalid state #{inspect state}"
end
end
defp update_meta([{:source, source}|t], %{source: {prefix, _}} = meta) do
update_meta t, %{meta | source: {prefix, source}}
end
defp update_meta([{:prefix, prefix}|t], %{source: {_, source}} = meta) do
update_meta t, %{meta | source: {prefix, source}}
end
defp update_meta([{:context, context}|t], meta) do
update_meta t, %{meta | context: context}
end
defp update_meta([], meta) do
meta
end
defp update_meta([{k, _}], _meta) do
raise ArgumentError, "unknown meta key #{inspect k}"
end
defp assert_struct!(module, %{__struct__: struct}) do
if struct != module do
raise ArgumentError, "expected a homogeneous list containing the same struct, " <>
"got: #{inspect module} and #{inspect struct}"
else
true
end
end
end
|
lib/ecto.ex
| 0.8789
| 0.620047
|
ecto.ex
|
starcoder
|
defmodule Flickrex.Flickr do
@moduledoc """
Flickr API Modules.
These modules and functions map to the methods from the Flickr [API
Documentation](https://www.flickr.com/services/api/).
Arguments for the API functions should be strings, or integers (if the API
accepts a number). Any additional `opts` will be set as params for the Rest
operation.
Each function returns an operation that can be executed with
`Flickrex.request/2`.
Some Flickr methods require user access tokens that were granted read, write,
or delete permissions.
## Examples
Get the five most recent public photos:
get_recent = Flickrex.Flickr.Photos.get_recent(per_page: 5)
{:ok, resp} = Flickrex.request(get_recent)
%{"photos" => photos} = resp.body
Test logging in as a user, by configuring the tokens for the request:
config = [oauth_token: "...", oauth_token_secret: "..."]
{:ok, resp} = Flickrex.Flickr.Test.login() |> Flickrex.request(config)
%{"user" => user} = resp.body
The API methods will return an error tuple if there was a problem with the
request:
{:error, resp} = Flickrex.Flickr.Photos.get_info(nil) |> Flickrex.request()
resp.body == %{"code" => 1, "message" => "Photo not found", "stat" => "fail"}
"""
# Flickr directory
flickr_dir = Path.join(File.cwd!(), "/lib/flickr")
# A local copy of "flickr.reflection.getMethods"
methods_file = Path.join(flickr_dir, "flickr.reflection.getMethods.json")
# Directory holding a JSON info file for each method
methods_dir = Path.join(flickr_dir, "getMethodInfo")
methods_files = File.ls!(methods_dir)
# Recompile this module when the source files change
@external_resource methods_file
for method_file <- methods_files do
@external_resource Path.join(methods_dir, method_file)
end
# Group Flickr methods based on the module they will be included in
methods_modules =
methods_file
|> File.read!()
|> Jason.decode!()
|> get_in(["methods", "method"])
|> Enum.map(fn %{"_content" => m} -> m end)
|> Enum.group_by(fn m -> m |> String.split(".") |> List.delete_at(-1) end)
for {[_ | namespaces], methods} <- methods_modules do
# Generate a module name for each method namespace, e.g.
# `Flickr.Photos.People` for "flickr.photos.people"
aliases = ["flickrex", "flickr"] ++ namespaces
module =
aliases
|> Enum.map(&String.capitalize/1)
|> Enum.map(&String.to_atom/1)
|> Module.concat()
defmodule module do
alias Flickrex.Decoder
@type arg :: String.Chars.t()
@type opts :: Flickrex.Rest.args()
@type operation :: Flickrex.Operation.Rest.t()
for method <- methods do
# Generate the function name from the method, e.g. `get_list` for
# "flickr.photos.people.getList"
function =
method
|> String.split(".")
|> Enum.reverse()
|> List.first()
|> Macro.underscore()
|> String.to_atom()
method_file = "#{method}.json"
method_info =
case method_file in methods_files do
true ->
methods_dir
|> Path.join(method_file)
|> File.read!()
|> Jason.decode!()
false ->
%{}
end
description =
method_info
|> get_in(["method", "description", "_content"])
|> String.replace("\"/services/api/", "\"https://www.flickr.com/services/api/")
|> String.replace(~r/<br\ ?\/>/, " ")
needs_login = get_in(method_info, ["method", "needslogin"])
requiredperms = get_in(method_info, ["method", "requiredperms"])
method_response =
case get_in(method_info, ["method", "response", "_content"]) do
nil ->
nil
"<?xml" <> _rest = xml_doc ->
xml_doc
"<rsp" <> _rest = rsp_tag ->
rsp_tag
content_tag ->
"""
<rsp stat="ok">
#{content_tag}
</rsp>
"""
end
example_response =
case method_response do
nil ->
nil
response_content ->
response_content
|> String.replace(~r/<!-- .* -->/U, "")
|> Decoder.XML.parse_data()
end
permission_code =
if is_binary(requiredperms) do
String.to_integer(requiredperms)
else
requiredperms
end
permission =
case permission_code do
0 -> "no"
1 -> "read"
2 -> "write"
3 -> "delete"
end
arguments =
method_info
|> get_in(["arguments", "argument"])
|> Enum.reject(fn a -> a["name"] == "api_key" end)
|> Enum.map(fn a ->
case is_binary(a["optional"]) do
true -> Map.put(a, "optional", String.to_integer(a["optional"]))
false -> a
end
end)
|> Enum.map(fn argument ->
content =
argument
|> Map.get("_content")
|> String.replace("\n", " ")
|> String.replace("\"/services/api/", "\"https://www.flickr.com/services/api/")
Map.put(argument, "_content", content)
end)
{required_args, optional_args} =
Enum.split_with(arguments, fn
%{"optional" => 0} -> true
_ -> false
end)
args_names = Enum.map(required_args, fn %{"name" => name} -> String.to_atom(name) end)
args_vars = Enum.map(args_names, &Macro.var(&1, __MODULE__))
args_specs = Enum.map(args_names, fn _ -> Macro.var(:arg, __MODULE__) end)
args_params = Enum.zip(args_names, args_vars)
doc_source = """
<%= @description %>
<%= if @needs_login == 1 do %>
This method requires authentication with "<%= @permission %>" permission.
<% else %>
This method does not require authentication.
<% end %>
<%= if length(@required_args) > 0 do %>
## Arguments
<% end %>
<%= for arg <- @required_args do %>
* `<%= arg["name"] %>` - <%= arg["_content"] %>
<% end %>
<%= if length(@optional_args) > 0 do %>
## Options
<% end %>
<%= for arg <- @optional_args do %>
* `<%= arg["name"] %>` - <%= arg["_content"] %>
<% end %>
<%= unless is_nil(@example_response) do %>
## Example response
```elixir
<%= inspect(@example_response, pretty: true) %>
```
<% end %>
"""
assigns = [
description: description,
required_args: required_args,
optional_args: optional_args,
example_response: example_response,
needs_login: needs_login,
permission: permission
]
doc = EEx.eval_string(doc_source, assigns: assigns)
verb =
case permission_code do
0 -> :get
1 -> :get
2 -> :post
3 -> :post
end
function_arity = length(required_args) + 1
if function_arity > 1 do
@doc false
@spec unquote(function)() :: operation
def unquote(function)() do
unquote(function)([])
end
@doc false
@spec unquote(function)(opts) :: operation
def unquote(function)(opts) when is_list(opts) do
IO.warn(
"calling `#{unquote(function)}/1` with required arguments as options is " <>
"deprecated. Use `#{unquote(function)}/#{unquote(function_arity)}` instead."
)
Flickrex.Rest.unquote(verb)(unquote(method), opts)
end
end
@doc doc
@spec unquote(function)(unquote_splicing(args_specs), opts) :: operation
def unquote(function)(unquote_splicing(args_vars), opts \\ []) do
Flickrex.Rest.unquote(verb)(unquote(method), unquote(args_params) ++ opts)
end
end
end
end
end
|
lib/flickrex/flickr.ex
| 0.802478
| 0.456228
|
flickr.ex
|
starcoder
|
defmodule Circuits.UART.Framing do
@moduledoc """
A behaviour for implementing framers for data received over a UART.
"""
@doc """
Initialize the state of the framer based on options passed to
`Circuits.UART.open/3`.
This function should return the initial state for the framer or
an error.
"""
@callback init(args :: term) :: {:ok, state} | {:error, reason} when state: term, reason: term
@doc """
Add framing to the passed in data.
The returned `frame_data` will be sent out the UART.
"""
@callback add_framing(data :: term, state :: term) ::
{:ok, framed_data, new_state} | {:error, reason, new_state}
when new_state: term,
framed_data: binary,
reason: term
@doc """
Remove the framing off received data. If a partial frame is left over at the
end, then `:in_frame` should be returned. All of the frames received should
be returned in the second tuple.
The terms returned as the second part of the tuple can be anything. They can be
the binary messages without the framing, structs based on your commands, or anything
else. If you have errors in the protocol, for example a bad checksum, one convention
is to return an error tuple `{:error, :echksum, message}`.
For debugging you may want to include the message and framing with the error for
simpler debugging.
"""
@callback remove_framing(new_data :: binary, state :: term) ::
{:in_frame, [term], new_state} | {:ok, [term], new_state}
when new_state: term
@doc """
If `remove_framing/2` returned `:in_frame` and a user-specified timeout for
reassembling frames has elapsed, then this function is called. Depending on
the semantics of the framing, a partial frame may be returned or the
incomplete frame may be dropped.
"""
@callback frame_timeout(state :: term) :: {:ok, [term], new_state} when new_state: term
@doc """
This is called when the user invokes `Circuits.UART.flush/2`. Any partially
received frames should be dropped.
"""
@callback flush(direction :: :receive | :transmit | :both, state :: term) :: new_state
when new_state: term
end
|
lib/uart/framing.ex
| 0.836821
| 0.576393
|
framing.ex
|
starcoder
|
defmodule AWS.DataSync do
@moduledoc """
AWS DataSync
AWS DataSync is a managed data transfer service that makes it simpler for
you to automate moving data between on-premises storage and Amazon Simple
Storage Service (Amazon S3) or Amazon Elastic File System (Amazon EFS).
This API interface reference for AWS DataSync contains documentation for a
programming interface that you can use to manage AWS DataSync.
"""
@doc """
Cancels execution of a task.
When you cancel a task execution, the transfer of some files is abruptly
interrupted. The contents of files that are transferred to the destination
might be incomplete or inconsistent with the source files. However, if you
start a new task execution on the same task and you allow the task
execution to complete, file content on the destination is complete and
consistent. This applies to other unexpected failures that interrupt a task
execution. In all of these cases, AWS DataSync successfully complete the
transfer when you start the next task execution.
"""
def cancel_task_execution(client, input, options \\ []) do
request(client, "CancelTaskExecution", input, options)
end
@doc """
Activates an AWS DataSync agent that you have deployed on your host. The
activation process associates your agent with your account. In the
activation process, you specify information such as the AWS Region that you
want to activate the agent in. You activate the agent in the AWS Region
where your target locations (in Amazon S3 or Amazon EFS) reside. Your tasks
are created in this AWS Region.
You can activate the agent in a VPC (virtual private cloud) or provide the
agent access to a VPC endpoint so you can run tasks without going over the
public internet.
You can use an agent for more than one location. If a task uses multiple
agents, all of them need to have status AVAILABLE for the task to run. If
you use multiple agents for a source location, the status of all the agents
must be AVAILABLE for the task to run.
Agents are automatically updated by AWS on a regular basis, using a
mechanism that ensures minimal interruption to your tasks.
<p/>
"""
def create_agent(client, input, options \\ []) do
request(client, "CreateAgent", input, options)
end
@doc """
Creates an endpoint for an Amazon EFS file system.
"""
def create_location_efs(client, input, options \\ []) do
request(client, "CreateLocationEfs", input, options)
end
@doc """
Creates an endpoint for an Amazon FSx for Windows file system.
"""
def create_location_fsx_windows(client, input, options \\ []) do
request(client, "CreateLocationFsxWindows", input, options)
end
@doc """
Defines a file system on a Network File System (NFS) server that can be
read from or written to.
"""
def create_location_nfs(client, input, options \\ []) do
request(client, "CreateLocationNfs", input, options)
end
@doc """
Creates an endpoint for a self-managed object storage bucket. For more
information about self-managed object storage locations, see
`create-object-location`.
"""
def create_location_object_storage(client, input, options \\ []) do
request(client, "CreateLocationObjectStorage", input, options)
end
@doc """
Creates an endpoint for an Amazon S3 bucket.
For more information, see
https://docs.aws.amazon.com/datasync/latest/userguide/create-locations-cli.html#create-location-s3-cli
in the *AWS DataSync User Guide*.
"""
def create_location_s3(client, input, options \\ []) do
request(client, "CreateLocationS3", input, options)
end
@doc """
Defines a file system on a Server Message Block (SMB) server that can be
read from or written to.
"""
def create_location_smb(client, input, options \\ []) do
request(client, "CreateLocationSmb", input, options)
end
@doc """
Creates a task. A task is a set of two locations (source and destination)
and a set of Options that you use to control the behavior of a task. If you
don't specify Options when you create a task, AWS DataSync populates them
with service defaults.
When you create a task, it first enters the CREATING state. During CREATING
AWS DataSync attempts to mount the on-premises Network File System (NFS)
location. The task transitions to the AVAILABLE state without waiting for
the AWS location to become mounted. If required, AWS DataSync mounts the
AWS location before each task execution.
If an agent that is associated with a source (NFS) location goes offline,
the task transitions to the UNAVAILABLE status. If the status of the task
remains in the CREATING status for more than a few minutes, it means that
your agent might be having trouble mounting the source NFS file system.
Check the task's ErrorCode and ErrorDetail. Mount issues are often caused
by either a misconfigured firewall or a mistyped NFS server hostname.
"""
def create_task(client, input, options \\ []) do
request(client, "CreateTask", input, options)
end
@doc """
Deletes an agent. To specify which agent to delete, use the Amazon Resource
Name (ARN) of the agent in your request. The operation disassociates the
agent from your AWS account. However, it doesn't delete the agent virtual
machine (VM) from your on-premises environment.
"""
def delete_agent(client, input, options \\ []) do
request(client, "DeleteAgent", input, options)
end
@doc """
Deletes the configuration of a location used by AWS DataSync.
"""
def delete_location(client, input, options \\ []) do
request(client, "DeleteLocation", input, options)
end
@doc """
Deletes a task.
"""
def delete_task(client, input, options \\ []) do
request(client, "DeleteTask", input, options)
end
@doc """
Returns metadata such as the name, the network interfaces, and the status
(that is, whether the agent is running or not) for an agent. To specify
which agent to describe, use the Amazon Resource Name (ARN) of the agent in
your request.
"""
def describe_agent(client, input, options \\ []) do
request(client, "DescribeAgent", input, options)
end
@doc """
Returns metadata, such as the path information about an Amazon EFS
location.
"""
def describe_location_efs(client, input, options \\ []) do
request(client, "DescribeLocationEfs", input, options)
end
@doc """
Returns metadata, such as the path information about an Amazon FSx for
Windows location.
"""
def describe_location_fsx_windows(client, input, options \\ []) do
request(client, "DescribeLocationFsxWindows", input, options)
end
@doc """
Returns metadata, such as the path information, about an NFS location.
"""
def describe_location_nfs(client, input, options \\ []) do
request(client, "DescribeLocationNfs", input, options)
end
@doc """
Returns metadata about a self-managed object storage server location. For
more information about self-managed object storage locations, see
`create-object-location`.
"""
def describe_location_object_storage(client, input, options \\ []) do
request(client, "DescribeLocationObjectStorage", input, options)
end
@doc """
Returns metadata, such as bucket name, about an Amazon S3 bucket location.
"""
def describe_location_s3(client, input, options \\ []) do
request(client, "DescribeLocationS3", input, options)
end
@doc """
Returns metadata, such as the path and user information about an SMB
location.
"""
def describe_location_smb(client, input, options \\ []) do
request(client, "DescribeLocationSmb", input, options)
end
@doc """
Returns metadata about a task.
"""
def describe_task(client, input, options \\ []) do
request(client, "DescribeTask", input, options)
end
@doc """
Returns detailed metadata about a task that is being executed.
"""
def describe_task_execution(client, input, options \\ []) do
request(client, "DescribeTaskExecution", input, options)
end
@doc """
Returns a list of agents owned by an AWS account in the AWS Region
specified in the request. The returned list is ordered by agent Amazon
Resource Name (ARN).
By default, this operation returns a maximum of 100 agents. This operation
supports pagination that enables you to optionally reduce the number of
agents returned in a response.
If you have more agents than are returned in a response (that is, the
response returns only a truncated list of your agents), the response
contains a marker that you can specify in your next request to fetch the
next page of agents.
"""
def list_agents(client, input, options \\ []) do
request(client, "ListAgents", input, options)
end
@doc """
Returns a list of source and destination locations.
If you have more locations than are returned in a response (that is, the
response returns only a truncated list of your agents), the response
contains a token that you can specify in your next request to fetch the
next page of locations.
"""
def list_locations(client, input, options \\ []) do
request(client, "ListLocations", input, options)
end
@doc """
Returns all the tags associated with a specified resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Returns a list of executed tasks.
"""
def list_task_executions(client, input, options \\ []) do
request(client, "ListTaskExecutions", input, options)
end
@doc """
Returns a list of all the tasks.
"""
def list_tasks(client, input, options \\ []) do
request(client, "ListTasks", input, options)
end
@doc """
Starts a specific invocation of a task. A `TaskExecution` value represents
an individual run of a task. Each task can have at most one `TaskExecution`
at a time.
`TaskExecution` has the following transition phases: INITIALIZING |
PREPARING | TRANSFERRING | VERIFYING | SUCCESS/FAILURE.
For detailed information, see the Task Execution section in the Components
and Terminology topic in the *AWS DataSync User Guide*.
"""
def start_task_execution(client, input, options \\ []) do
request(client, "StartTaskExecution", input, options)
end
@doc """
Applies a key-value pair to an AWS resource.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes a tag from an AWS resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates the name of an agent.
"""
def update_agent(client, input, options \\ []) do
request(client, "UpdateAgent", input, options)
end
@doc """
Updates the metadata associated with a task.
"""
def update_task(client, input, options \\ []) do
request(client, "UpdateTask", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "datasync"}
host = build_host("datasync", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "FmrsService.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/data_sync.ex
| 0.801781
| 0.623749
|
data_sync.ex
|
starcoder
|
defmodule ExPlasma.Transaction.Signed do
@moduledoc """
Holds functions related to transactions containing signatures.
"""
alias ExPlasma.Crypto
alias ExPlasma.Signature
alias ExPlasma.Transaction
alias ExPlasma.Transaction.Witness
alias ExPlasma.TypedData
alias ExPlasma.Utils.RlpDecoder
@type tx_bytes() :: binary()
@type decoding_error() :: :malformed_rlp | :malformed_witnesses
@type validation_error() :: {:witnesses, :malformed_witnesses}
@type sigs() :: list(Crypto.sig_t()) | []
@doc """
Decodes a binary expecting it to represent a signed transactions with
the signatures being the first element of the decoded RLP list.
Returns {:ok, signed_tx_rlp_items} if the encoded RLP can be decoded,
or {:error, atom} otherwise.
Only validates that the RLP is structurally correct.
Does not perform any other kind of validation, use validate/1 for that.
"""
@spec decode(tx_bytes()) :: {:ok, list()} | {:error, decoding_error()}
def decode(signed_tx_bytes) do
with {:ok, tx_rlp_items} <- RlpDecoder.decode(signed_tx_bytes),
{:ok, signed_tx_rlp_items} <- validate_rlp_items(tx_rlp_items) do
{:ok, signed_tx_rlp_items}
end
end
@doc """
Validate a signed transaction.
Returns :ok if valid or {:error, {:witnesses, :malformed_witnesses}} otherwise.
"""
@spec validate(Transaction.t()) :: :ok | {:error, validation_error()}
def validate(transaction), do: validate_sigs(transaction.sigs)
@doc """
Recovers the witnesses for non-empty signatures, in the order they appear in transaction's signatures.
Returns {:ok, witness_list} if witnesses are recoverable,
or {:error, :corrupted_witness} otherwise.
"""
@spec get_witnesses(Transaction.t()) :: {:ok, list(Witness.t())} | {:error, Witness.recovery_error()}
def get_witnesses(%Transaction{sigs: []}), do: {:ok, []}
def get_witnesses(transaction) do
hash = TypedData.hash(transaction)
transaction.sigs
|> Enum.reverse()
|> Enum.reduce_while({:ok, []}, fn signature, {:ok, addresses} ->
case Witness.recover(hash, signature) do
{:ok, address} ->
{:cont, {:ok, [address | addresses]}}
error ->
{:halt, error}
end
end)
end
@spec compute_signatures(Transaction.t(), list(String.t())) :: {:ok, sigs()} | {:error, :not_signable}
def compute_signatures(transaction, keys) when is_list(keys) do
case TypedData.impl_for(transaction) do
nil ->
{:error, :not_signable}
_ ->
eip712_hash = TypedData.hash(transaction)
sigs = Enum.map(keys, fn key -> Signature.signature_digest(eip712_hash, key) end)
{:ok, sigs}
end
end
defp validate_rlp_items([sigs | _typed_tx_rlp_items] = rlp) when is_list(sigs), do: {:ok, rlp}
defp validate_rlp_items([_sigs | _typed_tx_rlp_items]), do: {:error, :malformed_witnesses}
defp validate_rlp_items(_), do: {:error, :malformed_transaction}
defp validate_sigs([sig | rest]) do
case Witness.valid?(sig) do
true -> validate_sigs(rest)
false -> {:error, {:witnesses, :malformed_witnesses}}
end
end
defp validate_sigs([]), do: :ok
end
|
lib/ex_plasma/transaction/signed.ex
| 0.920883
| 0.543954
|
signed.ex
|
starcoder
|
defmodule Mix.Tasks.Cadet.Users.Import do
@moduledoc """
Import user and grouping information from several csv files.
To use this, you need to prepare 3 csv files:
1. List of all the students together with their group names
2. List of all the leaders together with their group names
3. List of all the mentors together with their group names
For all the files, they must be comma-separated csv and in this format:
```
name,username,group_name
```
(Username could be e.g. NUSNET ID)
Note that group names must be unique.
"""
@shortdoc "Import user and grouping information from csv files."
use Mix.Task
require Logger
alias Cadet.{Accounts, Course, Repo}
alias Cadet.Course.Group
alias Cadet.Accounts.User
def run(_args) do
# Required for Ecto to work properly, from Mix.Ecto
if function_exported?(Mix.Task, :run, 2), do: Mix.Task.run("app.start")
students_csv_path = trimmed_gets("Path to students csv (leave blank to skip): ")
leaders_csv_path = trimmed_gets("Path to leaders csv (leave blank to skip): ")
mentors_csv_path = trimmed_gets("Path to mentors csv (leave blank to skip): ")
Repo.transaction(fn ->
students_csv_path != "" && process_students_csv(students_csv_path)
leaders_csv_path != "" && process_leaders_csv(leaders_csv_path)
mentors_csv_path != "" && process_mentors_csv(mentors_csv_path)
end)
end
defp process_students_csv(path) when is_binary(path) do
if File.exists?(path) do
csv_stream = path |> File.stream!() |> CSV.decode(strip_fields: true)
for {:ok, [name, username, group_name]} <- csv_stream do
with {:ok, group = %Group{}} <- Course.get_or_create_group(group_name),
{:ok, %User{}} <-
Accounts.insert_or_update_user(%{
username: username,
name: name,
role: :student,
group: group
}) do
:ok
else
error ->
Logger.error(
"Unable to insert student (name: #{name}, username: #{username}, " <>
"group_name: #{group_name})"
)
Logger.error("error: #{inspect(error, pretty: true)}")
Repo.rollback(error)
end
end
Logger.info("Imported students csv at #{path}")
else
Logger.error("Cannot find students csv at #{path}")
end
end
defp process_leaders_csv(path) when is_binary(path) do
if File.exists?(path) do
csv_stream = path |> File.stream!() |> CSV.decode(strip_fields: true)
for {:ok, [name, username, group_name]} <- csv_stream do
with {:ok, leader = %User{}} <-
Accounts.insert_or_update_user(%{username: username, name: name, role: :staff}),
{:ok, %Group{}} <-
Course.insert_or_update_group(%{name: group_name, leader: leader}) do
:ok
else
error ->
Logger.error(
"Unable to insert leader (name: #{name}, username: #{username}, " <>
"group_name: #{group_name})"
)
Logger.error("error: #{inspect(error, pretty: true)}")
Repo.rollback(error)
end
end
Logger.info("Imported leaders csv at #{path}")
else
Logger.error("Cannot find leaders csv at #{path}")
end
end
defp process_mentors_csv(path) when is_binary(path) do
if File.exists?(path) do
csv_stream = path |> File.stream!() |> CSV.decode(strip_fields: true)
for {:ok, [name, username, group_name]} <- csv_stream do
with {:ok, mentor = %User{}} <-
Accounts.insert_or_update_user(%{username: username, name: name, role: :staff}),
{:ok, %Group{}} <-
Course.insert_or_update_group(%{name: group_name, mentor: mentor}) do
:ok
else
error ->
Logger.error(
"Unable to insert mentor (name: #{name}, username: #{username}, " <>
"group_name: #{group_name})"
)
Logger.error("error: #{inspect(error, pretty: true)}")
Repo.rollback(error)
end
end
Logger.info("Imported mentors csv at #{path}")
else
Logger.error("Cannot find mentors csv at #{path}")
end
end
@spec trimmed_gets(String.t()) :: String.t()
defp trimmed_gets(prompt) when is_binary(prompt) do
prompt
|> IO.gets()
|> String.trim()
end
end
|
lib/mix/tasks/users/import.ex
| 0.66769
| 0.623878
|
import.ex
|
starcoder
|
defmodule Braintree.PaymentMethodNonce do
@moduledoc """
Create a payment method nonce from an existing payment method token
"""
use Braintree.Construction
alias Braintree.HTTP
alias Braintree.ErrorResponse, as: Error
@type t :: %__MODULE__{
default: String.t,
description: String.t,
nonce: String.t,
three_d_secure_info: String.t,
type: String.t,
details: map,
is_locked: boolean,
consumed: boolean,
security_questions: [any]
}
defstruct default: nil,
description: nil,
nonce: nil,
three_d_secure_info: nil,
type: nil,
is_locked: false,
details: nil,
consumed: false,
security_questions: nil
@doc """
Create a payment method nonce from `token`
## Example
{:ok, payment_method_nonce} = Braintree.PaymentMethodNonce.create(token)
payment_method_nonce.nonce
"""
@spec create(String.t, Keyword.t) :: {:ok, t} | {:error, Error.t}
def create(payment_method_token, opts \\ []) do
path = "payment_methods/#{payment_method_token}/nonces"
with {:ok, payload} <- HTTP.post(path, opts) do
{:ok, new(payload)}
end
end
@doc """
Find a payment method nonce, or return an error response if token invalid
## Example
{:ok, payment_method} = Braintree.PaymentMethodNonce.find(token)
payment_method.type #CreditCard
"""
@spec find(String.t, Keyword.t) :: {:ok, t} | {:error, Error.t}
def find(nonce, opts \\ []) do
path = "payment_method_nonces/" <> nonce
with {:ok, payload} <- HTTP.get(path, opts) do
{:ok, new(payload)}
end
end
@doc false
def new(%{"payment_method_nonce" => map}) do
super(map)
end
end
|
lib/payment_method_nonce.ex
| 0.888205
| 0.469581
|
payment_method_nonce.ex
|
starcoder
|
defmodule Snitch.Data.Schema.Promotion do
@moduledoc """
Models coupon based `promotions`.
Allows creation of PromoCodes and uses a set of rules to apply set of
actions on the payload to provide discounts.
"""
use Snitch.Data.Schema
alias Snitch.Data.Schema.{PromotionAction, PromotionRule}
alias Snitch.Tools.EctoType.UnixTimestamp
@typedoc """
Represents a promotion struct.
Fields
- `code`: Unique code to identify the promotion. Made available to user for
applying a promotion.
- `name`: A kind of label to identify the `promotion` with.
- `starts_at`: The time at which the promotion will start.
- `expires_at`: The time at which the promotion will end.
- `usage_limit`: This is used to set the number of times this code can be used
thoroughout it's life for all the users.
- `current_usage_count`: Tracks the number of times the promotion has been used.
- `match_policy`: The policy used while checking for rules of an action, an
`all` policy means all the rules should be satisfied whereas an `any` policy
would require any one of them to be satisified.
`active?`: Used to mark the promotion active or inactive.
`archived_at`: This is used to check if a promotion archived. An archived
promotion means it is no longer active and is present only for record.
"""
@type t :: %__MODULE__{}
@match_policy ~w(all any)s
schema "snitch_promotions" do
field(:code, :string)
field(:name, :string)
field(:description, :string)
field(:starts_at, :utc_datetime_usec, default: DateTime.utc_now())
field(:expires_at, :utc_datetime_usec)
field(:usage_limit, :integer, default: 0)
field(:current_usage_count, :integer, default: 0)
field(:match_policy, :string, default: "all")
field(:active?, :boolean, default: false)
field(:archived_at, UnixTimestamp, default: 0)
# associations
has_many(:rules, PromotionRule, on_replace: :delete, on_delete: :delete_all)
has_many(:actions, PromotionAction, on_replace: :delete, on_delete: :delete_all)
timestamps()
end
@required_fields ~w(code name)a
@optional_fields ~w(description starts_at expires_at usage_limit match_policy
active? archived_at)a
@create_fields @optional_fields ++ @required_fields
@doc """
Returns a create changeset for `Promotion.t()`.
"""
def create_changeset(%__MODULE__{} = promotion, params) do
promotion
|> cast(params, @create_fields)
|> common_changeset()
|> cast_assoc(:rules, with: &PromotionRule.changeset/2)
|> cast_assoc(:actions, with: &PromotionAction.changeset/2)
end
@doc """
Returns a changeset to update the rules for a promotion.
### Note
- The function uses `cast_assoc` for managing associations so
rules specified by `cast_assoc` applies.
__See__
`Ecto.Changeset.cast_assoc(changeset, name, opts \\ [])`
- The `:rules` association needs to be preloaded before calling
update `action`.
"""
def rule_update_changeset(%__MODULE__{} = promotion, params) do
promotion
|> cast(params, @create_fields)
|> common_changeset()
|> cast_assoc(:rules, with: &PromotionRule.changeset/2)
end
@doc """
Returns a changeset to update the actions for a promotion.
### Note
- The function uses `cast_assoc` for managing associations so
rules specified by `cast_assoc` applies.
__See__
`Ecto.Changeset.cast_assoc(changeset, name, opts \\ [])`
- The `:actions` association needs to be preloaded before calling
update `action`.
- `on_replace: :nilify_all` is being used for `actions` because
in case a promotion is updated and the action is removed then it should
not be removed as it keeps track of adjustments against the order.
"""
def action_update_changeset(%__MODULE__{} = promotion, params) do
promotion
|> cast(params, @create_fields)
|> common_changeset()
|> cast_assoc(:actions, with: &PromotionAction.changeset/2)
end
@doc """
Returns an update changeset for `Promotion.t()`.
"""
def update_changeset(%__MODULE__{} = promotion, params) do
promotion
|> cast(params, @create_fields)
|> common_changeset()
|> cast_assoc(:rules, with: &PromotionRule.changeset/2)
|> cast_assoc(:actions, with: &PromotionAction.changeset/2)
end
defp common_changeset(changeset) do
changeset
|> validate_required(@required_fields)
|> validate_future_date(:expires_at)
|> validate_inclusion(:match_policy, @match_policy)
|> validate_starts_at_before_expiry()
|> unique_constraint(:code,
name: :unique_promotion_code,
message: "has already been taken"
)
end
# checks if `expires_at` is after `starts_at`
defp validate_starts_at_before_expiry(%Ecto.Changeset{valid?: true} = changeset) do
handle_start_and_expiry_date(
changeset,
get_change(changeset, :starts_at),
get_change(changeset, :expires_at)
)
end
defp validate_starts_at_before_expiry(changeset), do: changeset
defp handle_start_and_expiry_date(changeset, nil, nil) do
changeset
end
defp handle_start_and_expiry_date(changeset, nil = _starts_at, expires_at) do
{:data, date} = fetch_field(changeset, :starts_at)
handle_date_related_changeset(
changeset,
date,
expires_at,
:expires_at,
"expires_at should be after starts_at"
)
end
defp handle_start_and_expiry_date(changeset, starts_at, nil = _expires_at) do
{:data, date} = fetch_field(changeset, :expires_at)
handle_date_related_changeset(
changeset,
starts_at,
date,
:starts_at,
"starts_at should be before expires_at"
)
end
defp handle_start_and_expiry_date(changeset, starts_at, expires_at) do
handle_date_related_changeset(
changeset,
starts_at,
expires_at,
:expires_at,
"expires_at should be after starts_at"
)
end
defp handle_date_related_changeset(changeset, starts_at, expires_at, key, error) do
if DateTime.compare(expires_at, starts_at) == :gt do
changeset
else
add_error(changeset, key, error)
end
end
end
|
apps/snitch_core/lib/core/data/schema/promotion/promotion.ex
| 0.879987
| 0.519704
|
promotion.ex
|
starcoder
|
defmodule Ueberauth.Strategy.Foursquare do
@moduledoc """
Foursquare Strategy for Überauth.
### Setup
Create an application in Foursquare for you to use.
Register a new application at: [foursquare developer page](https://developer.foursquare.com/) and get the `client_id` and `client_secret`.
Include the provider in your configuration for Ueberauth
config :ueberauth, Ueberauth,
providers: [
foursquare: { Ueberauth.Strategy.Foursquare, [] }
]
Then include the configuration for Foursquare.
config :ueberauth, Ueberauth.Strategy.Foursquare.OAuth,
client_id: System.get_env("FOURSQUARE_CLIENT_ID"),
client_secret: System.get_env("FOURSQUARE_CLIENT_SECRET")
If you haven't already, create a pipeline and setup routes for your callback handler
pipeline :auth do
Ueberauth.plug "/auth"
end
scope "/auth" do
pipe_through [:browser, :auth]
get "/:provider/callback", AuthController, :callback
end
Create an endpoint for the callback where you will handle the `Ueberauth.Auth` struct
defmodule MyApp.AuthController do
use MyApp.Web, :controller
def callback_phase(%{ assigns: %{ ueberauth_failure: fails } } = conn, _params) do
# do things with the failure
end
def callback_phase(%{ assigns: %{ ueberauth_auth: auth } } = conn, params) do
# do things with the auth
end
end
"""
use Ueberauth.Strategy, uid_field: :id,
oauth2_module: Ueberauth.Strategy.Foursquare.OAuth
alias Ueberauth.Auth.Info
alias Ueberauth.Auth.Credentials
alias Ueberauth.Auth.Extra
@doc """
Handles the initial redirect to the Foursquare authentication page
"""
def handle_request!(conn) do
opts = [
redirect_uri: callback_url(conn)
]
module = option(conn, :oauth2_module)
redirect!(conn, apply(module, :authorize_url!, [opts]))
end
@doc """
Handles the callback from Foursquare. When there is a failure from Foursquare the failure is included in the
`ueberauth_failure` struct. Otherwise the information returned from Foursquare is returned in the `Ueberauth.Auth` struct
"""
def handle_callback!(%Plug.Conn{params: %{"code" => code}} = conn) do
module = option(conn, :oauth2_module)
token = apply(module, :get_token!, [[code: code], [redirect_uri: callback_url(conn)]])
if token.access_token == nil do
set_errors!(conn, [error(token.other_params["error"], token.other_params["error_description"])])
else
fetch_user(conn, token)
end
end
@doc false
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
@doc """
Cleans up the private area of the connection used for passing the raw Foursquare response around during the callback
"""
def handle_cleanup!(conn) do
conn
|> put_private(:foursquare_user, nil)
|> put_private(:foursquare_token, nil)
end
@doc """
Includes the credentials from Foursquare response
"""
def credentials(conn) do
token = conn.private.foursquare_token
%Credentials{
token: token.access_token,
refresh_token: token.refresh_token,
expires_at: token.expires_at,
token_type: token.token_type,
expires: !!token.expires_at,
scopes: []
}
end
@doc """
Fetches the fields to populate the info section of the `Ueberauth.Auth` struct
"""
def info(conn) do
user = conn.private.foursquare_user
%Info{
name: "#{user["firstName"]} #{user["lastName"]}",
first_name: user["firstName"],
last_name: user["lastName"],
email: (user["contact"] || %{})["email"],
phone: (user["contact"] || %{})["phone"],
image: user["photo"],
location: user["homeCity"],
description: user["bio"]
}
end
@doc """
Stores the raw information (including the token) obtained from the Foursquare
"""
def extra(conn) do
%Extra {
raw_info: %{
token: conn.private.foursquare_token,
user: conn.private.foursquare_user
}
}
end
@doc """
Fetches the uid field from the response
"""
def uid(conn) do
uid_field =
conn
|> option(:uid_field)
|> to_string
conn.private.foursquare_user[uid_field]
end
defp option(conn, key) do
default_value = default_options() |> Keyword.get(key)
options(conn) |> Keyword.get(key, default_value)
end
defp fetch_user(conn, token) do
conn = put_private(conn, :foursquare_token, token)
response = OAuth2.Client.get(
OAuth2.Client.new([]),
"https://api.foursquare.com/v2/users/self",
[],
params: %{v: 20170115, oauth_token: token.access_token}
)
case response do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: body}}
when status_code in 200..399 ->
put_private(conn, :foursquare_user, body["response"]["user"])
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
end
|
lib/ueberauth/strategy/foursquare.ex
| 0.699973
| 0.441854
|
foursquare.ex
|
starcoder
|
defmodule Timex.DateTime.Helpers do
@moduledoc false
alias Timex.{Types, Timezone, TimezoneInfo, AmbiguousDateTime, AmbiguousTimezoneInfo}
@doc """
Constructs an empty DateTime, for internal use only
"""
def empty() do
%DateTime{year: 0, month: 1, day: 1,
hour: 0, minute: 0, second: 0,
microsecond: {0,0},
time_zone: nil,
zone_abbr: nil,
utc_offset: 0, std_offset: 0}
end
@doc """
Constructs a DateTime from an Erlang date or datetime tuple and a timezone.
Intended for internal use only.
"""
@spec construct(Types.date, Types.valid_timezone) :: DateTime.t | AmbiguousDateTime.t | {:error, term}
@spec construct(Types.datetime, Types.valid_timezone) :: DateTime.t | AmbiguousDateTime.t | {:error, term}
def construct({_, _, _} = date, timezone) do
construct({date, {0,0,0,0}}, timezone)
end
def construct({{_,_,_} = date, {h,mm,s}}, timezone) do
construct({date,{h,mm,s,0}}, timezone)
end
def construct({{y,m,d} = date, {h,mm,s,us}}, timezone) do
seconds_from_zeroyear = :calendar.datetime_to_gregorian_seconds({date,{h,mm,s}})
case Timezone.name_of(timezone) do
{:error, _} = err -> err
tzname ->
case Timezone.resolve(tzname, seconds_from_zeroyear) do
{:error, _} = err -> err
%TimezoneInfo{} = tz ->
%DateTime{:year => y, :month => m, :day => d,
:hour => h, :minute => mm, :second => s,
:microsecond => construct_microseconds(us),
:time_zone => tz.full_name, :zone_abbr => tz.abbreviation,
:utc_offset => tz.offset_utc, :std_offset => tz.offset_std}
%AmbiguousTimezoneInfo{before: b, after: a} ->
bd = %DateTime{:year => y, :month => m, :day => d,
:hour => h, :minute => mm, :second => s,
:microsecond => construct_microseconds(us),
:time_zone => b.full_name, :zone_abbr => b.abbreviation,
:utc_offset => b.offset_utc, :std_offset => b.offset_std}
ad = %DateTime{:year => y, :month => m, :day => d,
:hour => h, :minute => mm, :second => s,
:microsecond => construct_microseconds(us),
:time_zone => a.full_name, :zone_abbr => a.abbreviation,
:utc_offset => a.offset_utc, :std_offset => a.offset_std}
%AmbiguousDateTime{before: bd, after: ad}
end
end
end
def construct_microseconds(0), do: {0,0}
def construct_microseconds(n), do: {n, precision(n)}
defp precision(0), do: 0
defp precision(n) when is_integer(n) do
ns = Integer.to_string(n)
n_width = byte_size(ns)
trimmed = byte_size(String.trim_trailing(ns, "0"))
6 - (n_width - trimmed)
end
end
|
deps/timex/lib/datetime/helpers.ex
| 0.806396
| 0.462534
|
helpers.ex
|
starcoder
|
defmodule Current.Stream do
@moduledoc false
defstruct [:repo, :queryable, :options, :state]
def __build__(repo, queryable, options) do
key = Keyword.get(options, :key, :id)
direction = Keyword.get(options, :direction, :asc)
chunk = Keyword.get(options, :chunk, 1_000)
%__MODULE__{
repo: repo,
queryable: queryable,
options: %{key: key, direction: direction, chunk: chunk},
state: %{}
}
end
end
defmodule Current.Stream.Chunk do
@moduledoc false
defstruct [:stream, :rows]
end
defimpl Enumerable, for: Current.Stream do
require Ecto.Query
def count(_), do: {:error, __MODULE__}
def member?(_, _), do: {:error, __MODULE__}
def slice(_), do: {:error, __MODULE__}
def reduce(_, {:halt, acc}, _fun) do
{:halted, acc}
end
def reduce(stream, {:suspend, acc}, fun) do
{:suspended, acc, &reduce(stream, &1, fun)}
end
def reduce(stream, {:cont, acc}, fun) do
key = stream.options[:key]
chunk = stream.options[:chunk]
rows =
stream.queryable
|> offset(stream)
|> Ecto.Query.limit(^chunk)
|> stream.repo.all()
case List.last(rows) do
nil ->
{:done, acc}
%{^key => last_seen_key} ->
state = Map.put(stream.state, :last_seen_key, last_seen_key)
stream = %Current.Stream{stream | state: state}
chunk = %Current.Stream.Chunk{stream: stream, rows: rows}
Enumerable.reduce(chunk, {:cont, acc}, fun)
end
end
defp offset(query, %Current.Stream{state: state}) when state == %{} do
query
end
defp offset(query, stream) do
key = stream.options[:key]
direction = stream.options[:direction]
last_seen_key = stream.state.last_seen_key
case direction do
:asc ->
query |> Ecto.Query.where([r], field(r, ^key) > ^last_seen_key)
:desc ->
query |> Ecto.Query.where([r], field(r, ^key) < ^last_seen_key)
end
end
end
defimpl Enumerable, for: Current.Stream.Chunk do
def count(_), do: {:error, __MODULE__}
def member?(_, _), do: {:error, __MODULE__}
def slice(_), do: {:error, __MODULE__}
def reduce(_, {:halt, acc}, _fun) do
{:halted, acc}
end
def reduce(chunk, {:suspend, acc}, fun) do
{:suspended, acc, &reduce(chunk, &1, fun)}
end
def reduce(%Current.Stream.Chunk{rows: []} = chunk, {:cont, acc}, fun) do
Enumerable.reduce(chunk.stream, {:cont, acc}, fun)
end
def reduce(%Current.Stream.Chunk{rows: [row | remaining]} = chunk, {:cont, acc}, fun) do
reduce(%Current.Stream.Chunk{chunk | rows: remaining}, fun.(row, acc), fun)
end
end
|
lib/current/stream.ex
| 0.755997
| 0.42182
|
stream.ex
|
starcoder
|
defmodule XGPS.Parser do
def start_link do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
def init([]) do
{:ok, %{}}
end
def parse_sentence(sentence) do
case unwrap_sentence(sentence) do
{:ok, body} ->
body
|> match_type
|> parse_content
{:error, :checksum} ->
{:error, :checksum}
end
end
defp unwrap_sentence(sentence) do
{body, checksum} = split(sentence)
calculated_checksum = XGPS.Tools.calculate_checksum(body) |> XGPS.Tools.int_to_hex_string
case calculated_checksum == checksum do
true -> {:ok, body}
false -> {:error,:checksum}
end
end
defp match_type(body) do
parts = String.split(body, ",")
get_type(parts)
end
defp split(sentence_raw) do
sentence =
sentence_raw
|> String.trim_leading("\n")
|> String.trim_leading("$")
|> String.trim_trailing("\n")
|> String.trim_trailing("\r")
[main, checksum] = String.split(sentence,"*",parts: 2)
{main, checksum}
end
defp get_type(["GPRMC"|content]), do: {:rmc, content}
defp get_type(["GPGGA"|content]), do: {:gga, content}
defp get_type(content), do: {:unknown, content}
defp parse_content({:rmc, content}) do
case length(content) do
12 ->
%XGPS.Messages.RMC{
time: parse_time(Enum.at(content, 0)),
status: Enum.at(content, 1) |> parse_string,
latitude: parse_latitude(Enum.at(content, 2),Enum.at(content, 3)),
longitude: parse_longitude(Enum.at(content, 4),Enum.at(content, 5)),
speed_over_groud: parse_float(Enum.at(content, 6)),
track_angle: parse_float(Enum.at(content, 7)),
date: Enum.at(content, 8) |> parse_date,
magnetic_variation: parse_float(Enum.at(content, 9))
}
_ -> {:unknown, :unknown_content_length}
end
end
defp parse_content({:gga, content}) do
case length(content) do
14 ->
%XGPS.Messages.GGA{
fix_taken: parse_time(Enum.at(content, 0)),
latitude: parse_latitude(Enum.at(content, 1),Enum.at(content, 2)),
longitude: parse_longitude(Enum.at(content, 3),Enum.at(content, 4)),
fix_quality: parse_int(Enum.at(content, 5)),
number_of_satelites_tracked: parse_int(Enum.at(content, 6)),
horizontal_dilution: parse_float(Enum.at(content, 7)),
altitude: {parse_float(Enum.at(content, 8)), parse_metric(Enum.at(content, 9))},
height_over_goeid: {parse_float(Enum.at(content, 10)), parse_metric(Enum.at(content, 11))},
time_since_last_dgps: Enum.at(content, 12) |> parse_string,
dgps_station_id: Enum.at(content, 13) |> parse_string
}
_ -> {:unknown, :unknown_content_length}
end
end
defp parse_content({:unknown, content}) do
{:unknown, content}
end
defp parse_float(""), do: nil
defp parse_float(value) do
{float, _} = Float.parse(value)
float
end
defp parse_int(""), do: nil
defp parse_int(value) do
{integer, _} = Integer.parse(value)
integer
end
defp parse_metric("M"), do: :meter
defp parse_metric(_), do: :unknown
defp parse_string(""), do: nil
defp parse_string(value), do: value
defp parse_time(time) when length(time) < 6, do: nil
defp parse_time(time) do
parts = String.split(time, ".")
parse_hours_minutes_seconds_ms(parts)
end
defp parse_hours_minutes_seconds_ms([main]) when length(main) != 6, do: :unknown_format
defp parse_hours_minutes_seconds_ms([main, _millis]) when length(main) != 6, do: :unknown_format
defp parse_hours_minutes_seconds_ms([main, ""]), do: parse_hours_minutes_seconds_ms([main,"0"])
defp parse_hours_minutes_seconds_ms([main]), do: parse_hours_minutes_seconds_ms([main,"0"])
defp parse_hours_minutes_seconds_ms([main, millis]) do
{ms,_} = Integer.parse(millis)
{h,_} = Integer.parse(String.slice(main, 0, 2))
{m,_} = Integer.parse(String.slice(main, 2, 2))
{s,_} = Integer.parse(String.slice(main, 4, 2))
{:ok, time} = Time.new(h,m,s,ms)
time
end
defp parse_date(date_raw) when length(date_raw) != 6, do: :unknown_format
defp parse_date(date_raw) do
{day,_} = String.slice(date_raw,0,2) |> Integer.parse
{month,_} = String.slice(date_raw,2,2) |> Integer.parse
{year,_} = ("20" <> String.slice(date_raw, 4, 2)) |> Integer.parse
{:ok, date} = Date.new(year, month, day)
date
end
defp parse_latitude("", ""), do: nil
defp parse_latitude(string, bearing) do
{deg, _} = String.slice(string,0,2) |> Float.parse
{min, _} = String.slice(string,2,100) |> Float.parse
XGPS.Tools.lat_to_decimal_degrees(deg,min,bearing)
end
defp parse_longitude("", ""), do: nil
defp parse_longitude(string, bearing) do
{deg, _} = String.slice(string,0,3) |> Float.parse
{min, _} = String.slice(string,3,100) |> Float.parse
XGPS.Tools.lon_to_decimal_degrees(deg,min,bearing)
end
end
|
lib/xgps/parser.ex
| 0.584153
| 0.453322
|
parser.ex
|
starcoder
|
defmodule Bamboo.ElasticEmail.Utilities do
@moduledoc """
Utilities for working with the Elastic Email API.
The decode_query/{1,2} and encode_query/{1,2} functions are based heavily
on Plug.Conn.Query, but the Elastic Email API accepts repeated values for
list values instead of requiring `[]` be appended to the key name. Nested
key names are not supported.
"""
@doc """
Decode an Elastic Email API query string. Because the decoded query is
returned as a map, a list is always returned.
iex> decode_query("foo=bar")["foo"]
["bar"]
If a value is given more than once, a list is returned:
iex> decode_query("foo=bar&foo=baz")["foo"]
["bar", "baz"]
Decoding an empty string returns an empty map.
iex> decode_query("")
%{}
"""
def decode_query(query, initial \\ %{})
def decode_query("", initial), do: initial
def decode_query(query, initial) do
parts = :binary.split(query, "&", [:global])
Enum.reduce(Enum.reverse(parts), initial, &decode_www_pair(&1, &2))
end
@doc """
Encodes an Elastic Email API query string. Maps can be encoded:
iex> encode_query(%{foo: "bar", baz: "bat"})
"baz=bat&foo=bar"
Encoding keyword lists preserves the order of the fields:
iex> encode_query([foo: "bar", baz: "bat"])
"foo=bar&baz=bat"
When encoding keyword lists with duplicate keys, the keys are repeated:
iex> encode_query([foo: "bar", foo: "bat"])
"foo=bar&foo=bat"
Encoding maps or keys with simple lists will have the same result as
repeated keys in a keyword list:
iex> encode_query(%{foo: ["bar", "bat"]})
"foo=bar&foo=bat"
Encoding a list of maps works the same way:
iex> encode_query([%{foo: "bar"}, %{foo: "bat"}])
"foo=bar&foo=bat"
Nested maps and keyword lists are not supported and raise exceptions:
iex> encode_query(%{foo: %{bar: "baz"}})
** (ArgumentError) cannot encode nested structures for foo
iex> encode_query(%{foo: [bar: "baz"]})
** (ArgumentError) cannot encode nested structures for foo
Structs work as well as maps:
iex> encode_query(%Point{x: 1, y: 1})
"x=1&y=1"
Other structures raise an exception:
iex> encode_query(3)
** (ArgumentError) can only encode maps, keyword lists, or lists of maps, got: 3
"""
def encode_query(kv, encoder \\ &to_string/1) do
kv
|> encode_pair(encoder)
|> IO.iodata_to_binary()
|> String.trim_leading("&")
|> String.replace("&&", "&", global: true)
end
defp decode_www_pair(binary, acc) do
current =
case :binary.split(binary, "=") do
[key, value] ->
{decode_www_form(key), decode_www_form(value)}
[key] ->
{decode_www_form(key), nil}
end
decode_pair(current, acc)
end
defp decode_www_form(value) do
URI.decode_www_form(value)
rescue
ArgumentError ->
# credo:disable-for-lines:1 Credo.Check.Warning.RaiseInsideRescue
raise Plug.Conn.InvalidQueryError,
message: "invalid www-form encoding on query-string, got #{value}"
end
# Decodes the given tuple and stores it in the accumulator. It parses the key
# and stores the value into the current accumulator. Parameter lists are
# added to the accumulator in reverse order, so be sure # to pass the
# parameters in reverse order.
defp decode_pair({key, value}, acc), do: assign_map(acc, key, value)
defp assign_map(acc, key, value) do
case acc do
%{^key => values} -> Map.put(acc, key, [value | values])
%{} -> Map.put(acc, key, [value])
end
end
# covers structs
defp encode_pair(%{__struct__: struct} = map, encoder) when is_atom(struct) do
encode_pair(Map.from_struct(map), encoder)
end
defp encode_pair(%{} = map, encoder), do: encode_kv(map, encoder)
defp encode_pair([], _encoder), do: []
defp encode_pair([head | _] = list, encoder) when is_list(list) and is_tuple(head) do
encode_kv(list, encoder)
end
defp encode_pair([head | _] = list, encoder) when is_list(list) and is_map(head) do
list
|> Enum.flat_map(&[?&, encode_pair(&1, encoder)])
|> prune()
end
# covers nil
defp encode_pair(nil, _encoder), do: []
defp encode_pair(value, _encoder) do
raise ArgumentError,
"can only encode maps, keyword lists, or lists of maps, got: #{inspect(value)}"
end
defp encode_kv(kv, encoder) do
mapper = fn
{_, value} when value in [%{}, []] ->
[]
{field, value} when is_map(value) ->
raise ArgumentError, "cannot encode nested structures for #{field}"
{field, [head | _] = value} when is_list(value) and (is_tuple(head) or is_map(head)) ->
raise ArgumentError, "cannot encode nested structures for #{field}"
{field, value} when is_list(value) ->
field = encode_key(field)
[?&, Enum.map(value, &[?&, field, ?=, encode_value(&1, encoder)])]
{field, value} ->
[?&, [encode_key(field), ?=, encode_value(value, encoder)]]
end
kv
|> Enum.flat_map(mapper)
|> prune()
end
defp encode_key(item), do: URI.encode_www_form(to_string(item))
defp encode_value(item, encoder), do: URI.encode_www_form(encoder.(item))
defp prune([?& | t]), do: t
defp prune([]), do: []
end
|
lib/bamboo/elastic_email/utilities.ex
| 0.831759
| 0.539711
|
utilities.ex
|
starcoder
|
defmodule AWS.Appflow do
@moduledoc """
Welcome to the Amazon AppFlow API reference.
This guide is for developers who need detailed information about the Amazon
AppFlow API operations, data types, and errors.
Amazon AppFlow is a fully managed integration service that enables you to
securely transfer data between software as a service (SaaS) applications like
Salesforce, Marketo, Slack, and ServiceNow, and AWS services like Amazon S3 and
Amazon Redshift.
Use the following links to get started on the Amazon AppFlow API:
*
[Actions](https://docs.aws.amazon.com/appflow/1.0/APIReference/API_Operations.html): An alphabetical list of all Amazon AppFlow API operations.
* [Data
types](https://docs.aws.amazon.com/appflow/1.0/APIReference/API_Types.html): An
alphabetical list of all Amazon AppFlow data types.
* [Common parameters](https://docs.aws.amazon.com/appflow/1.0/APIReference/CommonParameters.html):
Parameters that all Query operations can use.
* [Common errors](https://docs.aws.amazon.com/appflow/1.0/APIReference/CommonErrors.html):
Client and server errors that all operations can return.
If you're new to Amazon AppFlow, we recommend that you review the [Amazon AppFlow User
Guide](https://docs.aws.amazon.com/appflow/latest/userguide/what-is-appflow.html).
Amazon AppFlow API users can use vendor-specific mechanisms for OAuth, and
include applicable OAuth attributes (such as `auth-code` and `redirecturi`) with
the connector-specific `ConnectorProfileProperties` when creating a new
connector profile using Amazon AppFlow API operations. For example, Salesforce
users can refer to the [ *Authorize Apps with OAuth*
](https://help.salesforce.com/articleView?id=remoteaccess_authenticate.htm)
documentation.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2020-08-23",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "appflow",
global?: false,
protocol: "rest-json",
service_id: "Appflow",
signature_version: "v4",
signing_name: "appflow",
target_prefix: nil
}
end
@doc """
Creates a new connector profile associated with your AWS account.
There is a soft quota of 100 connector profiles per AWS account. If you need
more connector profiles than this quota allows, you can submit a request to the
Amazon AppFlow team through the Amazon AppFlow support channel.
"""
def create_connector_profile(%Client{} = client, input, options \\ []) do
url_path = "/create-connector-profile"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Enables your application to create a new flow using Amazon AppFlow.
You must create a connector profile before calling this API. Please note that
the Request Syntax below shows syntax for multiple destinations, however, you
can only transfer data to one item in this list at a time. Amazon AppFlow does
not currently support flows to multiple destinations at once.
"""
def create_flow(%Client{} = client, input, options \\ []) do
url_path = "/create-flow"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Enables you to delete an existing connector profile.
"""
def delete_connector_profile(%Client{} = client, input, options \\ []) do
url_path = "/delete-connector-profile"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Enables your application to delete an existing flow.
Before deleting the flow, Amazon AppFlow validates the request by checking the
flow configuration and status. You can delete flows one at a time.
"""
def delete_flow(%Client{} = client, input, options \\ []) do
url_path = "/delete-flow"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Provides details regarding the entity used with the connector, with a
description of the data model for each entity.
"""
def describe_connector_entity(%Client{} = client, input, options \\ []) do
url_path = "/describe-connector-entity"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns a list of `connector-profile` details matching the provided
`connector-profile` names and `connector-types`.
Both input lists are optional, and you can use them to filter the result.
If no names or `connector-types` are provided, returns all connector profiles in
a paginated form. If there is no match, this operation returns an empty list.
"""
def describe_connector_profiles(%Client{} = client, input, options \\ []) do
url_path = "/describe-connector-profiles"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Describes the connectors vended by Amazon AppFlow for specified connector types.
If you don't specify a connector type, this operation describes all connectors
vended by Amazon AppFlow. If there are more connectors than can be returned in
one page, the response contains a `nextToken` object, which can be be passed in
to the next call to the `DescribeConnectors` API operation to retrieve the next
page.
"""
def describe_connectors(%Client{} = client, input, options \\ []) do
url_path = "/describe-connectors"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Provides a description of the specified flow.
"""
def describe_flow(%Client{} = client, input, options \\ []) do
url_path = "/describe-flow"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Fetches the execution history of the flow.
"""
def describe_flow_execution_records(%Client{} = client, input, options \\ []) do
url_path = "/describe-flow-execution-records"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns the list of available connector entities supported by Amazon AppFlow.
For example, you can query Salesforce for *Account* and *Opportunity* entities,
or query ServiceNow for the *Incident* entity.
"""
def list_connector_entities(%Client{} = client, input, options \\ []) do
url_path = "/list-connector-entities"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Lists all of the flows associated with your account.
"""
def list_flows(%Client{} = client, input, options \\ []) do
url_path = "/list-flows"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Retrieves the tags that are associated with a specified flow.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Activates an existing flow.
For on-demand flows, this operation runs the flow immediately. For schedule and
event-triggered flows, this operation activates the flow.
"""
def start_flow(%Client{} = client, input, options \\ []) do
url_path = "/start-flow"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deactivates the existing flow.
For on-demand flows, this operation returns an `unsupportedOperationException`
error message. For schedule and event-triggered flows, this operation
deactivates the flow.
"""
def stop_flow(%Client{} = client, input, options \\ []) do
url_path = "/stop-flow"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Applies a tag to the specified flow.
"""
def tag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes a tag from the specified flow.
"""
def untag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{URI.encode(resource_arn)}"
headers = []
{query_params, input} =
[
{"tagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a given connector profile associated with your account.
"""
def update_connector_profile(%Client{} = client, input, options \\ []) do
url_path = "/update-connector-profile"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates an existing flow.
"""
def update_flow(%Client{} = client, input, options \\ []) do
url_path = "/update-flow"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
|
lib/aws/generated/appflow.ex
| 0.873107
| 0.548613
|
appflow.ex
|
starcoder
|
defmodule Bolt.Cogs.Assign do
@moduledoc false
@behaviour Nosedrum.Command
alias Bolt.Schema.SelfAssignableRoles
alias Bolt.{Converters, ErrorFormatters, Helpers, Humanizer, ModLog, Repo}
alias Nosedrum.Predicates
alias Nostrum.Api
alias Nostrum.Cache.GuildCache
require Logger
@impl true
def usage, do: ["assign <role:role...>"]
@impl true
def description,
do: """
Assigns the given self-assignable role to yourself.
To see which roles are self-assignable, use `lsar`.
Aliased to `iam`.
**Examples**:
```rs
// Assign the role 'Movie Nighter'
assign movie nighter
```
"""
@impl true
def predicates, do: [&Predicates.guild_only/1]
@impl true
def command(msg, [role_name]) do
response =
with roles_row when roles_row != nil <- Repo.get(SelfAssignableRoles, msg.guild_id),
{:ok, role} <- Converters.to_role(msg.guild_id, role_name, true),
true <- role.id in roles_row.roles,
{:ok} <- Api.add_guild_member_role(msg.guild_id, msg.author.id, role.id) do
ModLog.emit(
msg.guild_id,
"SELF_ASSIGNABLE_ROLES",
"gave #{Humanizer.human_user(msg.author)}" <>
" the self-assignable role #{Humanizer.human_role(msg.guild_id, role.id)}"
)
"👌 gave you the `#{Helpers.clean_content(role.name)}` role"
else
nil ->
"🚫 this guild has no self-assignable roles configured"
false ->
"🚫 that role is not self-assignable"
error ->
ErrorFormatters.fmt(msg, error)
end
{:ok, _msg} = Api.create_message(msg.channel_id, response)
end
def command(msg, args) when length(args) >= 2 do
case Repo.get(SelfAssignableRoles, msg.guild_id) do
nil ->
response = "🚫 this guild has no self-assignable roles configured"
{:ok, _msg} = Api.create_message(msg.channel_id, response)
roles_row ->
# Let's check if there's a multi-word role matching the arguments...
maybe_multi_word_name = Enum.join(args, " ")
conversion_result = Converters.to_role(msg.guild_id, maybe_multi_word_name, true)
if match?({:ok, _role}, conversion_result) do
# If yes, we only have a single role we care about,
# and the author specified a multi-word role.
# Pass it along to the single-role command handler, it will perform the rest of the work.
command(msg, [maybe_multi_word_name])
else
# Otherwise, assume we got a list of roles to assign.
converted_roles = Enum.map(args, &Converters.to_role(msg.guild_id, &1, true))
response = assign_converted(msg, converted_roles, roles_row.roles)
{:ok, _msg} = Api.create_message(msg.channel_id, response)
end
end
end
def command(msg, []) do
response = "🚫 expected a single or multiple role name(s) to assign, got nothing"
{:ok, _msg} = Api.create_message(msg.channel_id, response)
end
@spec assign_converted(Message.t(), [Role.t()], [Role.id()]) :: String.t()
defp assign_converted(msg, converted_roles, self_assignable_roles) do
valid_roles =
converted_roles
|> Stream.filter(&match?({:ok, _role}, &1))
|> Enum.map(&elem(&1, 1))
selected_self_assignable_roles =
valid_roles
|> Enum.filter(&(&1.id in self_assignable_roles))
not_selfassignable_errors =
valid_roles
|> MapSet.new()
|> MapSet.difference(MapSet.new(selected_self_assignable_roles))
|> Enum.map(&"`#{&1.name}` is not self-assignable")
errors =
converted_roles
|> Stream.filter(&match?({:error, _reason}, &1))
|> Enum.map(&elem(&1, 1))
|> Kernel.++(not_selfassignable_errors)
if Enum.empty?(selected_self_assignable_roles) do
"🚫 no valid roles to be given - if you meant to assign a single role, " <>
"check your spelling. errors:\n#{errors |> Stream.map(&"• #{&1}") |> Enum.join("\n")}"
else
with {:ok, member} when member != nil <-
GuildCache.select(
msg.guild_id,
&Map.get(&1.members, msg.author.id)
),
{:ok, _member} <-
Api.modify_guild_member(msg.guild_id, msg.author.id,
roles: Enum.uniq(member.roles ++ Enum.map(selected_self_assignable_roles, & &1.id))
) do
added_role_list =
selected_self_assignable_roles
|> Stream.map(& &1.name)
|> Stream.map(&Helpers.clean_content/1)
|> Stream.map(&"`#{&1}`")
|> Enum.join(", ")
ModLog.emit(
msg.guild_id,
"SELF_ASSIGNABLE_ROLES",
"gave #{Humanizer.human_user(msg.author)}" <>
" the self-assignable roles #{added_role_list}"
)
if Enum.empty?(errors) do
"👌 gave you the role(s) #{added_role_list}"
else
"""
👌 gave you the role(s) #{added_role_list}, but could not give you the others:
#{errors |> Stream.map(&"• #{&1}") |> Enum.join("\n")}
"""
end
else
{:ok, nil} ->
"❌ you are currently not cached for this guild"
error ->
ErrorFormatters.fmt(msg, error)
end
end
end
end
|
lib/bolt/cogs/assign.ex
| 0.838894
| 0.583619
|
assign.ex
|
starcoder
|
defmodule ExUnit do
@moduledoc """
Basic unit testing framework for Elixir.
## Example
A basic setup for ExUnit is shown below:
# File: assertion_test.exs
# 1) Start ExUnit.
ExUnit.start
# 2) Create a new test module (test case) and use `ExUnit.Case`.
defmodule AssertionTest do
# 3) Notice we pass `async: true`, this runs the test case
# concurrently with other test cases
use ExUnit.Case, async: true
# 4) A test is a function whose name starts with
# `test` and receives a context.
def test_always_pass(_) do
assert true
end
# 5) Use the `test` macro instead of `def` for clarity.
test "the truth" do
assert true
end
end
To run the tests above, run the file
using `elixir` from the command line. Assuming you named the file
`assertion_test.exs`, you can run it as:
bin/elixir assertion_test.exs
## Case, Callbacks and Assertions
See `ExUnit.Case` and `ExUnit.Callbacks`
for more information about defining test cases.
The `ExUnit.Assertions` module contains
a set of macros to easily generate assertions with appropriate
error messages.
## Integration with Mix
Mix is the project management and build tool for Elixir. Invoking `mix test`
from the command line will run the tests in each file matching the pattern
`*_test.exs` found in the `test` directory of your project.
You must create a `test_helper.exs` file inside the
`test` directory and put the code common to all tests there.
The minimum example of a `test_helper.exs` file would be:
# test/test_helper.exs
ExUnit.start
Mix will load the `test_helper.exs` file before executing the tests.
It is not necessary to `require` the `test_helper.exs` file in your test files.
See `Mix.Tasks.Test` for more information.
"""
@typedoc "The state returned by ExUnit.Test and ExUnit.TestCase"
@type state :: nil | :passed | { :failed, failed } | { :invalid, invalid }
@type failed :: { :error | :exit | :throw | :EXIT, reason :: term, stacktrace :: [tuple] }
@type invalid :: module
defrecord Test, [:name, :case, :state, :time, :tags, :line] do
@moduledoc """
A record that keeps information about the test.
It is received by formatters and also accessible
in the metadata under the key `:test`.
"""
record_type name: atom, case: module, state: ExUnit.state,
time: non_neg_integer, tags: Keyword.t
end
defrecord TestCase, [:name, :state, :tests] do
@moduledoc """
A record that keeps information about the test case.
It is received by formatters and also accessible
in the metadata under the key `:case`.
"""
record_type name: module, state: ExUnit.state, tests: [ExUnit.Test.t]
end
use Application.Behaviour
@doc false
def start(_type, []) do
pid = ExUnit.Sup.start_link
ExUnit.Server.start_load
pid
end
@doc """
Starts ExUnit and automatically runs tests right before the
VM terminates. It accepts a set of options to configure `ExUnit`
(the same ones accepted by `configure/1`).
If you want to run tests manually, you can set `:autorun` to `false`.
"""
def start(options // []) do
:application.start(:elixir)
:application.start(:ex_unit)
configure(options)
if :application.get_env(:ex_unit, :autorun) != { :ok, false } do
:application.set_env(:ex_unit, :autorun, false)
System.at_exit fn
0 ->
failures = ExUnit.run
System.at_exit fn _ ->
if failures > 0, do: System.halt(1)
end
_ ->
:ok
end
end
end
@doc """
Configures ExUnit.
## Options
ExUnit supports the following options:
* `:color` - When color should be used by specific formatters.
Defaults to the result of `IO.ANSI.terminal?/1`;
* `:formatter` - The formatter that will print results.
Defaults to `ExUnit.CLIFormatter`;
* `:max_cases` - Maximum number of cases to run in parallel.
Defaults to `:erlang.system_info(:schedulers_online)`;
* `:trace` - Set ExUnit into trace mode, this sets `:max_cases` to `1`
and prints each test case and test while running;
* `:autorun` - If ExUnit should run by default on exit, defaults to `true`;
* `:include` - Specify which tests are run by skipping tests that do not match the filter
* `:exclude` - Specify which tests are run by skipping tests that match the filter
"""
def configure(options) do
Enum.each options, fn { k, v } ->
:application.set_env(:ex_unit, k, v)
end
end
@doc """
Returns ExUnit configuration.
"""
def configuration do
:application.get_all_env(:ex_unit)
end
@doc """
API used to run the tests. It is invoked automatically
if ExUnit is started via `ExUnit.start/1`.
Returns the number of failures.
"""
def run do
{ async, sync, load_us } = ExUnit.Server.start_run
opts = Keyword.put_new(configuration, :color, IO.ANSI.terminal?)
ExUnit.Runner.run async, sync, opts, load_us
end
end
|
lib/ex_unit/lib/ex_unit.ex
| 0.869313
| 0.851212
|
ex_unit.ex
|
starcoder
|
defmodule Saxmerl.Handler do
@moduledoc false
import Saxmerl.Records
@behaviour Saxy.Handler
@impl true
def handle_event(:start_element, element, state) do
{:ok, start_element(element, state)}
end
@impl true
def handle_event(:end_element, element, state) do
{:ok, end_element(element, state)}
end
@impl true
def handle_event(:characters, element, state) do
{:ok, characters(element, state)}
end
@impl true
def handle_event(_event_name, _event_data, state) do
{:ok, state}
end
# Event handlers
defp start_element({name, attributes}, state) do
%{dynamic_atoms?: dynamic_atoms?, stack: stack, child_count: child_count} = state
element = make_element(name, attributes, stack, child_count, dynamic_atoms?)
%{state | stack: [element | stack], child_count: [0 | child_count]}
end
defp end_element(_name, %{stack: [root]} = state) do
%{state | stack: [reverse_element_content(root)]}
end
defp end_element(_name, state) do
%{stack: stack, child_count: child_count} = state
[current | [parent | stack]] = stack
[_ | [count | child_count]] = child_count
current = reverse_element_content(current)
parent = prepend_element_content(parent, current)
%{state | stack: [parent | stack], child_count: [count + 1 | child_count]}
end
defp characters(characters, state) do
%{stack: [current | stack]} = state
text = xmlText(value: String.to_charlist(characters))
current = prepend_element_content(current, text)
%{state | stack: [current | stack]}
end
# Helpers
defp prepend_element_content(xmlElement(content: content) = current, object) do
xmlElement(current, content: [object | content])
end
defp reverse_element_content(xmlElement(content: content) = element) do
xmlElement(element, content: Enum.reverse(content))
end
defp make_element(binary_name, attributes, stack, child_count, dynamic_atoms?) do
{namespace, local} = split_name(binary_name)
name = binary_to_atom(dynamic_atoms?, binary_name)
nsinfo = make_nsinfo(namespace, local)
attributes = make_attributes(attributes, dynamic_atoms?)
namespace = make_namespace()
parents = make_parents(stack)
position = determine_element_position(child_count)
content = []
xmlElement(
name: name,
expanded_name: name,
pos: position,
nsinfo: nsinfo,
namespace: namespace,
parents: parents,
attributes: attributes,
content: content
)
end
defp determine_element_position([count | _]), do: count + 1
defp determine_element_position([]), do: 1
defp split_name(name) do
case String.split(name, ":", parts: 2) do
[local] -> {<<>>, local}
[namespace, local] -> {namespace, local}
end
end
defp make_nsinfo(<<>>, _local), do: []
defp make_nsinfo(namespace, local),
do: {String.to_charlist(namespace), String.to_charlist(local)}
defp make_namespace(), do: xmlNamespace()
defp make_parents(stack, acc \\ [])
defp make_parents([], acc), do: Enum.reverse(acc)
defp make_parents([current | stack], acc) do
xmlElement(name: name, pos: pos) = current
make_parents(stack, [{name, pos} | acc])
end
defp make_attributes(attributes, dynamic_atoms?, count \\ 0, acc \\ [])
defp make_attributes([], _dynamic_atoms?, _count, acc), do: Enum.reverse(acc)
defp make_attributes([{binary_name, value} | attributes], dynamic_atoms?, count, acc) do
{namespace, local} = split_name(binary_name)
name = binary_to_atom(dynamic_atoms?, binary_name)
attribute =
xmlAttribute(
name: name,
expanded_name: name,
nsinfo: make_nsinfo(namespace, local),
pos: count + 1,
value: String.to_charlist(value)
)
make_attributes(attributes, dynamic_atoms?, count + 1, [attribute | acc])
end
defp binary_to_atom(true, binary), do: String.to_atom(binary)
defp binary_to_atom(false, binary), do: String.to_existing_atom(binary)
end
|
lib/saxmerl/handler.ex
| 0.648578
| 0.478529
|
handler.ex
|
starcoder
|
if Code.ensure_loaded?(Phoenix.LiveView) do
defmodule PromEx.Plugins.PhoenixLiveView do
@moduledoc """
This plugin captures metrics emitted by PhoenixLiveView. Specifically, it captures events related to the
mount, handle_event, and handle_params callbacks for live views and live components.
This plugin supports the following options:
- `metric_prefix`: This option is OPTIONAL and is used to override the default metric prefix of
`[otp_app, :prom_ex, :phoenix_live_view]`. If this changes you will also want to set
`phoenix_live_view_metric_prefix` in your `dashboard_assigns` to the snakecase version of your
prefix, the default `phoenix_live_view_metric_prefix` is `{otp_app}_prom_ex_phoenix_live_view`.
This plugin exposes the following metric groups:
- `:phoenix_live_view_event_metrics`
- `:phoenix_live_view_component_event_metrics`
To use plugin in your application, add the following to your PromEx module:
```
defmodule WebApp.PromEx do
use PromEx, otp_app: :web_app
@impl true
def plugins do
[
...
PromEx.Plugins.PhoenixLiveView
]
end
@impl true
def dashboards do
[
...
{:prom_ex, "phoenix_live_view.json"}
]
end
end
```
"""
use PromEx.Plugin
alias Phoenix.LiveView.Socket
@live_view_mount_stop [:phoenix, :live_view, :mount, :stop]
@live_view_mount_exception [:phoenix, :live_view, :mount, :exception]
@live_view_handle_event_stop [:phoenix, :live_view, :handle_event, :stop]
@live_view_handle_event_exception [:phoenix, :live_view, :handle_event, :exception]
# Coming soon
# @live_view_handle_params_stop [:phoenix, :live_view, :handle_params, :stop]
# @live_view_handle_params_exception [:phoenix, :live_view, :handle_params, :exception]
# Coming soon
# @live_component_handle_event_stop [:phoenix, :live_component, :handle_event, :stop]
# @live_component_handle_event_exception [:phoenix, :live_component, :handle_event, :exception]
@impl true
def event_metrics(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
metric_prefix = Keyword.get(opts, :metric_prefix, PromEx.metric_prefix(otp_app, :phoenix_live_view))
# Event metrics definitions
[
live_view_event_metrics(metric_prefix),
live_component_event_metrics(metric_prefix)
]
end
defp live_view_event_metrics(metric_prefix) do
bucket_intervals = [10, 100, 250, 500, 1_000, 2_000, 5_000, 10_000]
Event.build(
:phoenix_live_view_event_metrics,
[
distribution(
metric_prefix ++ [:mount, :duration, :milliseconds],
event_name: @live_view_mount_stop,
measurement: :duration,
description: "The time it takes for the live view to complete the mount callback.",
reporter_options: [
buckets: bucket_intervals
],
tag_values: &get_mount_socket_tags/1,
tags: [:action, :module],
unit: {:native, :millisecond}
),
distribution(
metric_prefix ++ [:mount, :exception, :duration, :milliseconds],
event_name: @live_view_mount_exception,
measurement: :duration,
description:
"The time it takes for the live view to complete the mount callback that resulted in an exception",
reporter_options: [
buckets: bucket_intervals
],
tag_values: &get_mount_socket_exception_tags/1,
tags: [:action, :module, :kind, :reason],
unit: {:native, :millisecond}
),
distribution(
metric_prefix ++ [:handle_event, :duration, :milliseconds],
event_name: @live_view_handle_event_stop,
measurement: :duration,
description: "The time it takes for the live view to complete the handle_event callback.",
reporter_options: [
buckets: bucket_intervals
],
tag_values: &get_handle_event_socket_tags/1,
tags: [:event, :action, :module],
unit: {:native, :millisecond}
),
distribution(
metric_prefix ++ [:handle_event, :exception, :duration, :milliseconds],
event_name: @live_view_handle_event_exception,
measurement: :duration,
description:
"The time it takes for the live view to complete the handle_event callback that resulted in an exception.",
reporter_options: [
buckets: bucket_intervals
],
tag_values: &get_handle_event_exception_socket_tags/1,
tags: [:event, :action, :module, :kind, :reason],
unit: {:native, :millisecond}
)
]
)
end
defp live_component_event_metrics(_metric_prefix) do
Event.build(
:phoenix_live_view_component_event_metrics,
[]
)
end
defp get_handle_event_exception_socket_tags(%{socket: socket = %Socket{}} = metadata) do
%{
event: metadata.event,
action: get_live_view_action(socket),
module: get_live_view_module(socket),
kind: metadata.kind,
reason: metadata.reason
}
end
defp get_handle_event_socket_tags(%{socket: socket = %Socket{}} = metadata) do
%{
event: metadata.event,
action: get_live_view_action(socket),
module: get_live_view_module(socket)
}
end
defp get_mount_socket_tags(%{socket: socket = %Socket{}}) do
%{
action: get_live_view_action(socket),
module: get_live_view_module(socket)
}
end
defp get_mount_socket_exception_tags(%{socket: socket = %Socket{}, kind: kind, reason: reason}) do
%{
action: get_live_view_action(socket),
module: get_live_view_module(socket),
kind: kind,
reason: reason
}
end
defp get_live_view_module(%Socket{} = socket) do
socket
|> Map.get(:view, :unknown)
|> normalize_module_name()
end
defp get_live_view_action(%Socket{} = socket) do
socket.assigns
|> Map.get(:live_action, :unknown)
end
defp normalize_module_name(name) when is_atom(name) do
name
|> Atom.to_string()
|> String.trim_leading("Elixir.")
end
defp normalize_module_name(name), do: name
end
else
defmodule PromEx.Plugins.PhoenixLiveView do
@moduledoc false
use PromEx.Plugin
@impl true
def event_metrics(_opts) do
PromEx.Plugin.no_dep_raise(__MODULE__, "PhoenixLiveView")
end
end
end
|
lib/prom_ex/plugins/phoenix_live_view.ex
| 0.879761
| 0.63202
|
phoenix_live_view.ex
|
starcoder
|
defmodule Construct.Compiler.AST.Types do
@moduledoc false
@builtin Construct.Type.builtin()
@doc """
Returns typespec AST for given type
iex> spec([CommaList, {:array, :integer}]) |> Macro.to_string()
"list(:integer)"
iex> spec({:array, :string}) |> Macro.to_string()
"list(String.t())"
iex> spec({:map, CustomType}) |> Macro.to_string()
"%{optional(term) => CustomType.t()}"
iex> spec(:string) |> Macro.to_string()
"String.t()"
iex> spec(CustomType) |> Macro.to_string()
"CustomType.t()"
"""
@spec spec(Construct.Type.t()) :: Macro.t()
def spec(type) when is_list(type) do
type |> List.last() |> spec()
end
def spec({:array, type}) do
quote do
list(unquote(spec(type)))
end
end
def spec({:map, type}) do
quote do
%{optional(term) => unquote(spec(type))}
end
end
def spec({typec, _arg}) do
quote do
unquote(typec).t()
end
end
def spec(:string) do
quote do
String.t()
end
end
def spec(:decimal) do
quote do
Decimal.t()
end
end
def spec(:utc_datetime) do
quote do
DateTime.t()
end
end
def spec(:naive_datetime) do
quote do
NaiveDateTime.t()
end
end
def spec(:date) do
quote do
Date.t()
end
end
def spec(:time) do
quote do
Time.t()
end
end
def spec(type) when type in @builtin do
type
end
def spec(type) when is_atom(type) do
quote do
unquote(type).t()
end
end
def spec(type) do
type
end
@doc """
Returns typespec AST for given term
iex> typeof(nil) |> Macro.to_string()
"nil"
iex> typeof(1.42) |> Macro.to_string()
"float()"
iex> typeof("string") |> Macro.to_string()
"String.t()"
iex> typeof(CustomType) |> Macro.to_string()
"CustomType.t()"
iex> typeof(&NaiveDateTime.utc_now/0) |> Macro.to_string()
"NaiveDateTime.t()"
"""
@spec typeof(term()) :: Macro.t()
def typeof(term) when is_nil(term) do
nil
end
def typeof(term) when is_integer(term) do
{:integer, [], []}
end
def typeof(term) when is_float(term) do
{:float, [], []}
end
def typeof(term) when is_boolean(term) do
{:boolean, [], []}
end
def typeof(term) when is_binary(term) do
quote do
String.t()
end
end
def typeof(term) when is_pid(term) do
{:pid, [], []}
end
def typeof(term) when is_reference(term) do
{:reference, [], []}
end
def typeof(%{__struct__: struct}) when is_atom(struct) do
quote do
unquote(struct).t()
end
end
def typeof(term) when is_map(term) do
{:map, [], []}
end
def typeof(term) when is_atom(term) do
quote do
unquote(term).t()
end
end
def typeof(term) when is_list(term) do
{:list, [], []}
end
def typeof(term) when is_function(term, 0) do
typeof(term.())
end
def typeof(_) do
{:term, [], []}
end
end
|
lib/construct/compiler/ast/ast_types.ex
| 0.745398
| 0.590986
|
ast_types.ex
|
starcoder
|
defmodule Singleton do
@moduledoc """
Singleton.
The top supervisor of singleton is a DynamicSupervisor. Singleton
can manage many singleton processes at the same time. Each singleton
is identified by its unique `name` term.
"""
@doc """
Start a new singleton process. Optionally provide the `on_conflict`
parameter which will be called whenever a singleton process shuts
down due to another instance being present in the cluster.
This function needs to be executed on all nodes where the singleton
process is allowed to live. The actual process will be started only
once; a manager process is started on each node for each singleton
to ensure that the process continues on (possibly) another node in
case of node disconnects or crashes.
"""
def start_child(
supervisor_name,
module,
args,
name,
on_conflict \\ fn -> nil end
) do
child_name = name(module, args)
spec =
{Singleton.Manager,
[
mod: module,
args: args,
name: name,
child_name: child_name,
on_conflict: on_conflict
]}
case Process.whereis(supervisor_name) do
nil ->
raise("""
No process found with name #{supervisor_name}.
Singleton.Supervisor must be added to your application's supervision tree.
If your application includes a supervision tree in `application.ex`, you can
simply add `Singleton.Supervisor` to the list of children.
children = [
...,
{Singleton.Supervisor, name: MyApp.Sinlgeton}
]
supervisor = Supervisor.start_link(children, opts)
""")
_pid ->
DynamicSupervisor.start_child(supervisor_name, spec)
end
end
def stop_child(supervisor_name, module, args) do
child_name = name(module, args)
case Process.whereis(child_name) do
nil -> {:error, :not_found}
pid -> DynamicSupervisor.terminate_child(supervisor_name, pid)
end
end
defp name(module, args) do
bin = :crypto.hash(:sha, :erlang.term_to_binary({module, args}))
String.to_atom("singleton_" <> Base.encode64(bin, padding: false))
end
end
|
lib/singleton.ex
| 0.845065
| 0.404713
|
singleton.ex
|
starcoder
|
defmodule EverythingLocation.Options do
@moduledoc """
A struct for validating the parameters before passing them to the api
"""
use Ecto.Model
import Ecto.Schema
@required ~w(api_key address1)
@optional ~w(geocode certify suggest enhance address2 address3 address4 address5 address6 address7 address8 locality administrative_area postal_code country)
@input_mapping %{
address1: "Address1",
address2: "Address2",
address3: "Address3",
address4: "Address4",
address5: "Address5",
address6: "Address6",
address7: "Address7",
address8: "Address8",
locality: "Locality",
administrative_area: "AdministrativeArea",
postal_code: "PostalCode",
country: "Country"
}
schema "Options" do
field :geocode, :boolean, default: false
field :certify, :boolean, default: false
field :suggest, :boolean, default: false
field :enhance, :boolean, default: true
field :address1, :string
field :address2, :string
field :address3, :string
field :address4, :string
field :address5, :string
field :address6, :string
field :address7, :string
field :address8, :string
field :locality, :string
field :administrative_area, :string
field :postal_code, :string
field :country, :string
field :api_key, :string
end
@doc """
You can pass a Map with key values which will be cast and validated for use with the EverythingLocation API.
"""
@spec changeset(%EverythingLocation.Options{}, Map) :: %Ecto.Changeset{}
def changeset(model, params) do
model
|> cast(params, @required, @optional)
end
@doc """
Takes a EverythingLocation.Options struct and converts it to a map of key values formatted for the EverythingLocation API.
"""
@spec create(%EverythingLocation.Options{}) :: %{} | {:error, string}
def create({:error, _} = error), do: error
def create(%EverythingLocation.Options{} = data) do
%{lqtkey: data.api_key}
|> add_inputs(data)
|> add_settings(data)
end
defp add_inputs(params, data) do
inputs = Enum.reduce(@input_mapping, %{}, fn ({k, v}, acc) ->
Map.put(acc, v, Map.get(data,k))
end)
Map.put(params, :input, [inputs])
end
defp add_settings(params, data) do
[:geocode, :certify, :suggest, :enhance] |> Enum.reduce(params, fn(x, acc) ->
case Map.get(data, x) do
true -> Map.put(acc, x, "on")
_ -> acc
end
end)
end
end
|
lib/everything_location/options.ex
| 0.810929
| 0.483526
|
options.ex
|
starcoder
|
defmodule AWS.Discovery do
@moduledoc """
AWS Application Discovery Service
AWS Application Discovery Service helps you plan application migration
projects by automatically identifying servers, virtual machines (VMs),
software, and software dependencies running in your on-premises data
centers. Application Discovery Service also collects application
performance data, which can help you assess the outcome of your migration.
The data collected by Application Discovery Service is securely retained in
an Amazon-hosted and managed database in the cloud. You can export the data
as a CSV or XML file into your preferred visualization tool or
cloud-migration solution to plan your migration. For more information, see
the Application Discovery Service
[FAQ](http://aws.amazon.com/application-discovery/faqs/).
Application Discovery Service offers two modes of operation.
<ul> <li> **Agentless discovery** mode is recommended for environments that
use VMware vCenter Server. This mode doesn't require you to install an
agent on each host. Agentless discovery gathers server information
regardless of the operating systems, which minimizes the time required for
initial on-premises infrastructure assessment. Agentless discovery doesn't
collect information about software and software dependencies. It also
doesn't work in non-VMware environments. We recommend that you use
agent-based discovery for non-VMware environments and if you want to
collect information about software and software dependencies. You can also
run agent-based and agentless discovery simultaneously. Use agentless
discovery to quickly complete the initial infrastructure assessment and
then install agents on select hosts to gather information about software
and software dependencies.
</li> <li> **Agent-based discovery** mode collects a richer set of data
than agentless discovery by using Amazon software, the AWS Application
Discovery Agent, which you install on one or more hosts in your data
center. The agent captures infrastructure and application information,
including an inventory of installed software applications, system and
process performance, resource utilization, and network dependencies between
workloads. The information collected by agents is secured at rest and in
transit to the Application Discovery Service database in the cloud.
</li> </ul> Application Discovery Service integrates with application
discovery solutions from AWS Partner Network (APN) partners. Third-party
application discovery tools can query the Application Discovery Service and
write to the Application Discovery Service database using a public API. You
can then import the data into either a visualization tool or
cloud-migration solution.
<important> Application Discovery Service doesn't gather sensitive
information. All data is handled according to the [AWS Privacy
Policy](http://aws.amazon.com/privacy/). You can operate Application
Discovery Service using offline mode to inspect collected data before it is
shared with the service.
</important> Your AWS account must be granted access to Application
Discovery Service, a process called *whitelisting*. This is true for AWS
partners and customers alike. To request access, sign up for the AWS
Application Discovery Service
[here](http://aws.amazon.com/application-discovery/preview/). We will send
you information about how to get started.
This API reference provides descriptions, syntax, and usage examples for
each of the actions and data types for the Application Discovery Service.
The topic for each action shows the API request parameters and the
response. Alternatively, you can use one of the AWS SDKs to access an API
that is tailored to the programming language or platform that you're using.
For more information, see [AWS SDKs](http://aws.amazon.com/tools/#SDKs).
This guide is intended for use with the [ *AWS Application Discovery
Service User Guide*
](http://docs.aws.amazon.com/application-discovery/latest/userguide/).
"""
@doc """
Associates one or more configuration items with an application.
"""
def associate_configuration_items_to_application(client, input, options \\ []) do
request(client, "AssociateConfigurationItemsToApplication", input, options)
end
@doc """
Creates an application with the given name and description.
"""
def create_application(client, input, options \\ []) do
request(client, "CreateApplication", input, options)
end
@doc """
Creates one or more tags for configuration items. Tags are metadata that
help you categorize IT assets. This API accepts a list of multiple
configuration items.
"""
def create_tags(client, input, options \\ []) do
request(client, "CreateTags", input, options)
end
@doc """
Deletes a list of applications and their associations with configuration
items.
"""
def delete_applications(client, input, options \\ []) do
request(client, "DeleteApplications", input, options)
end
@doc """
Deletes the association between configuration items and one or more tags.
This API accepts a list of multiple configuration items.
"""
def delete_tags(client, input, options \\ []) do
request(client, "DeleteTags", input, options)
end
@doc """
Lists agents or the Connector by ID or lists all agents/Connectors
associated with your user account if you did not specify an ID.
"""
def describe_agents(client, input, options \\ []) do
request(client, "DescribeAgents", input, options)
end
@doc """
Retrieves attributes for a list of configuration item IDs. All of the
supplied IDs must be for the same asset type (server, application, process,
or connection). Output fields are specific to the asset type selected. For
example, the output for a *server* configuration item includes a list of
attributes about the server, such as host name, operating system, and
number of network cards.
For a complete list of outputs for each asset type, see [Querying
Discovered Configuration
Items](http://docs.aws.amazon.com/application-discovery/latest/APIReference/querying-configuration-items.html#DescribeConfigurations).
"""
def describe_configurations(client, input, options \\ []) do
request(client, "DescribeConfigurations", input, options)
end
@doc """
Retrieves the status of a given export process. You can retrieve status
from a maximum of 100 processes.
"""
def describe_export_configurations(client, input, options \\ []) do
request(client, "DescribeExportConfigurations", input, options)
end
@doc """
Retrieves a list of configuration items that are tagged with a specific
tag. Or retrieves a list of all tags assigned to a specific configuration
item.
"""
def describe_tags(client, input, options \\ []) do
request(client, "DescribeTags", input, options)
end
@doc """
Disassociates one or more configuration items from an application.
"""
def disassociate_configuration_items_from_application(client, input, options \\ []) do
request(client, "DisassociateConfigurationItemsFromApplication", input, options)
end
@doc """
Exports all discovered configuration data to an Amazon S3 bucket or an
application that enables you to view and evaluate the data. Data includes
tags and tag associations, processes, connections, servers, and system
performance. This API returns an export ID which you can query using the
*DescribeExportConfigurations* API. The system imposes a limit of two
configuration exports in six hours.
"""
def export_configurations(client, input, options \\ []) do
request(client, "ExportConfigurations", input, options)
end
@doc """
Retrieves a short summary of discovered assets.
"""
def get_discovery_summary(client, input, options \\ []) do
request(client, "GetDiscoverySummary", input, options)
end
@doc """
Retrieves a list of configuration items according to criteria you specify
in a filter. The filter criteria identify relationship requirements.
"""
def list_configurations(client, input, options \\ []) do
request(client, "ListConfigurations", input, options)
end
@doc """
Retrieves a list of servers which are one network hop away from a specified
server.
"""
def list_server_neighbors(client, input, options \\ []) do
request(client, "ListServerNeighbors", input, options)
end
@doc """
Instructs the specified agents or Connectors to start collecting data.
"""
def start_data_collection_by_agent_ids(client, input, options \\ []) do
request(client, "StartDataCollectionByAgentIds", input, options)
end
@doc """
Instructs the specified agents or Connectors to stop collecting data.
"""
def stop_data_collection_by_agent_ids(client, input, options \\ []) do
request(client, "StopDataCollectionByAgentIds", input, options)
end
@doc """
Updates metadata about an application.
"""
def update_application(client, input, options \\ []) do
request(client, "UpdateApplication", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "discovery"}
host = get_host("discovery", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSPoseidonService_V2015_11_01.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/discovery.ex
| 0.87637
| 0.444806
|
discovery.ex
|
starcoder
|
defmodule Cocktail.Schedule do
@moduledoc """
Struct used to represent a schedule of recurring events.
Use the `new/2` function to create a new schedule, and the
`add_recurrence_rule/2` function to add rules to describe how to repeat.
Currently, Cocktail supports the following types of repeat rules:
* Weekly - Every week, relative to the schedule's start time
* Daily - Every day at the schedule's start time
* Hourly - Every hour, starting at the schedule's start time
* Minutely - Every minute, starting at the schedule's start time
* Secondly - Every second, starting at the schedule's start time
Once a schedule has been created, you can use `occurrences/2` to generate
a stream of occurrences, which are either `t:Cocktail.time/0`s or
`t:Cocktail.Span.t/0`s if a `duration` option was given to the schedule.
Various options can be given to modify the way the repeat rule and schedule
behave. See `add_recurrence_rule/3` for details on them.
"""
alias Cocktail.{Builder, Parser, Rule, ScheduleState}
@typedoc """
Struct used to represent a schedule of recurring events.
This type should be considered opaque, so its fields shouldn't be modified
directly. Instead, use the functions provided in this module to create and
manipulate schedules.
## Fields:
* `:start_time` - The schedule's start time
* `:duration` - The duration of each occurrence (in seconds)
"""
@type t :: %__MODULE__{
recurrence_rules: [Rule.t()],
recurrence_times: [Cocktail.time()],
exception_times: [Cocktail.time()],
start_time: Cocktail.time(),
duration: pos_integer | nil
}
@enforce_keys [:start_time]
defstruct recurrence_rules: [],
recurrence_times: [],
exception_times: [],
start_time: nil,
duration: nil
@doc """
Creates a new schedule using the given start time and options.
This schedule will be empty and needs recurrence rules added to it before it is useful.
Use `add_recurrence_rule/3` to add rules to a schedule.
## Options
* `:duration` - The duration of each event in the schedule (in seconds).
## Examples
iex> new(~N[2017-01-01 06:00:00], duration: 3_600)
#Cocktail.Schedule<>
"""
@spec new(Cocktail.time(), Cocktail.schedule_options()) :: t
def new(start_time, options \\ []) do
%__MODULE__{
start_time: no_ms(start_time),
duration: options[:duration]
}
end
@doc false
@spec set_start_time(t, Cocktail.time()) :: t
def set_start_time(schedule, start_time), do: %{schedule | start_time: no_ms(start_time)}
@doc false
@spec set_duration(t, pos_integer) :: t
def set_duration(schedule, duration), do: %{schedule | duration: duration}
@doc false
@spec set_end_time(t, Cocktail.time()) :: t
def set_end_time(%__MODULE__{start_time: start_time} = schedule, end_time) do
duration = Timex.diff(end_time, start_time, :seconds)
%{schedule | duration: duration}
end
@doc false
@spec add_recurrence_rule(t, Rule.t()) :: t
def add_recurrence_rule(%__MODULE__{} = schedule, %Rule{} = rule) do
%{schedule | recurrence_rules: [rule | schedule.recurrence_rules]}
end
@doc """
Adds a recurrence rule of the given frequency to a schedule.
The frequency can be one of `:monthly`, `:weekly`, `:daily`, `:hourly`, `:minutely` or `:secondly`
## Options
* `:interval` - How often to repeat, given the frequency. For example a `:daily` rule with interval `2` would be "every other day".
* `:count` - The number of times this rule can produce an occurrence. *(not yet support)*
* `:until` - The end date/time after which the rule will no longer produce occurrences.
* `:days_of_month` - Restrict this rule to specific days of the month. (e.g. `[-1, 10, 31]`)
* `:days` - Restrict this rule to specific days. (e.g. `[:monday, :wednesday, :friday]`)
* `:hours` - Restrict this rule to certain hours of the day. (e.g. `[10, 12, 14]`)
* `:minutes` - Restrict this rule to certain minutes of the hour. (e.g. `[0, 15, 30, 45]`)
* `:seconds` - Restrict this rule to certain seconds of the minute. (e.g. `[0, 30]`)
## Examples
iex> start_time = ~N[2017-01-01 06:00:00]
...> start_time |> new() |> add_recurrence_rule(:daily, interval: 2, hours: [10, 14])
#Cocktail.Schedule<Every 2 days on the 10th and 14th hours of the day>
"""
@spec add_recurrence_rule(t, Cocktail.frequency(), Cocktail.rule_options()) :: t
def add_recurrence_rule(%__MODULE__{} = schedule, frequency, options \\ []) do
rule =
options
|> Keyword.put(:frequency, frequency)
|> Rule.new()
add_recurrence_rule(schedule, rule)
end
@doc """
Adds a one-off recurrence time to the schedule.
This recurrence time can be any time after (or including) the schedule's start
time. When generating occurrences from this schedule, the given time will be
included in the set of occurrences alongside any recurrence rules.
"""
@spec add_recurrence_time(t, Cocktail.time()) :: t
def add_recurrence_time(%__MODULE__{} = schedule, time),
do: %{schedule | recurrence_times: [no_ms(time) | schedule.recurrence_times]}
@doc """
Adds an exception time to the schedule.
This exception time will cancel out any occurrence generated from the
schedule's recurrence rules or recurrence times.
"""
@spec add_exception_time(t, Cocktail.time()) :: t
def add_exception_time(%__MODULE__{} = schedule, time),
do: %{schedule | exception_times: [no_ms(time) | schedule.exception_times]}
@doc """
Creates a stream of occurrences from the given schedule.
An optional `start_time` can be supplied to not start at the schedule's start time.
The occurrences that are produced by the stream can be one of several types:
* If the schedule's start time is a `t:DateTime.t/0`, then it will produce
`t:DateTime.t/0`s
* If the schedule's start time is a `t:NaiveDateTime.t/0`, the it will
produce `t:NaiveDateTime.t/0`s
* If a duration is supplied when creating the schedule, the stream will
produce `t:Cocktail.Span.t/0`s with `:from` and `:until` fields matching
the type of the schedule's start time
## Examples
# using a NaiveDateTime
iex> start_time = ~N[2017-01-01 06:00:00]
...> schedule = start_time |> new() |> add_recurrence_rule(:daily, interval: 2, hours: [10, 14])
...> schedule |> occurrences() |> Enum.take(3)
[~N[2017-01-01 10:00:00],
~N[2017-01-01 14:00:00],
~N[2017-01-03 10:00:00]]
# using an alternate start time
iex> start_time = ~N[2017-01-01 06:00:00]
...> schedule = start_time |> new() |> add_recurrence_rule(:daily, interval: 2, hours: [10, 14])
...> schedule |> occurrences(~N[2017-10-01 06:00:00]) |> Enum.take(3)
[~N[2017-10-02 10:00:00],
~N[2017-10-02 14:00:00],
~N[2017-10-04 10:00:00]]
# using a DateTime with a time zone
iex> start_time = Timex.to_datetime(~N[2017-01-02 10:00:00], "America/Los_Angeles")
...> schedule = start_time |> new() |> add_recurrence_rule(:daily)
...> schedule |> occurrences() |> Enum.take(3) |> Enum.map(&Timex.format!(&1, "{ISO:Extended}"))
["2017-01-02T10:00:00-08:00",
"2017-01-03T10:00:00-08:00",
"2017-01-04T10:00:00-08:00"]
# using a NaiveDateTime with a duration
iex> start_time = ~N[2017-02-01 12:00:00]
...> schedule = start_time |> new(duration: 3_600) |> add_recurrence_rule(:weekly)
...> schedule |> occurrences() |> Enum.take(3)
[%Cocktail.Span{from: ~N[2017-02-01 12:00:00], until: ~N[2017-02-01 13:00:00]},
%Cocktail.Span{from: ~N[2017-02-08 12:00:00], until: ~N[2017-02-08 13:00:00]},
%Cocktail.Span{from: ~N[2017-02-15 12:00:00], until: ~N[2017-02-15 13:00:00]}]
"""
@spec occurrences(t, Cocktail.time() | nil) :: Enumerable.t()
def occurrences(%__MODULE__{} = schedule, start_time \\ nil) do
schedule
|> ScheduleState.new(no_ms(start_time))
|> Stream.unfold(&ScheduleState.next_time/1)
end
@doc """
Add an end time to all recurrence rules in the schedule.
This has the same effect as if you'd passed the `:until` option when adding
all recurrence rules to the schedule.
"""
@spec end_all_recurrence_rules(t, Cocktail.time()) :: t
def end_all_recurrence_rules(%__MODULE__{recurrence_rules: rules} = schedule, end_time),
do: %{schedule | recurrence_rules: Enum.map(rules, &Rule.set_until(&1, end_time))}
@doc """
Parses a string in iCalendar format into a `t:Cocktail.Schedule.t/0`.
see `Cocktail.Parser.ICalendar.parse/1` for details.
"""
@spec from_i_calendar(String.t()) :: {:ok, t} | {:error, term}
def from_i_calendar(i_calendar_string), do: Parser.ICalendar.parse(i_calendar_string)
@doc """
Builds an iCalendar format string representation of a `t:Cocktail.Schedule.t/0`.
see `Cocktail.Builder.ICalendar.build/1` for details.
"""
@spec to_i_calendar(t) :: String.t()
def to_i_calendar(%__MODULE__{} = schedule), do: Builder.ICalendar.build(schedule)
@doc """
Builds a human readable string representation of a `t:Cocktail.Schedule.t/0`.
see `Cocktail.Builder.String.build/1` for details.
"""
@spec to_string(t) :: String.t()
def to_string(%__MODULE__{} = schedule), do: Builder.String.build(schedule)
@spec no_ms(Cocktail.time() | nil) :: Cocktail.time() | nil
defp no_ms(nil), do: nil
defp no_ms(time), do: %{time | microsecond: {0, 0}}
defimpl Inspect, for: __MODULE__ do
import Inspect.Algebra
def inspect(schedule, _) do
concat(["#Cocktail.Schedule<", Builder.String.build(schedule), ">"])
end
end
end
|
lib/cocktail/schedule.ex
| 0.942348
| 0.794783
|
schedule.ex
|
starcoder
|
defmodule Distancia do
@moduledoc """
Distancia is a module which provides functions that calculate distances between two points.
It allows to perform calulations in various metrics:
- Euclidean (`Distancia.euclidean/2`)
- Manhattan (`Distancia.manhattan/2`)
- Chebyshev (`Distancia.chebyshev/2`)
- Hamming (`Distancia.hamming/2`)
"""
@typedoc """
Point can be represented as a list, a tuple, a number or a string.
"""
@type point :: list | tuple | number | String.t()
@doc """
Calculates the Euclidean distance between points `p` and `q`.
The Euclidean distance is the most common understanding of distance - a straight line between two points in the Cartesian plane.
## Examples
iex> Distancia.euclidean(1, 2)
1.0
iex> Distancia.euclidean([2, -1, 6], [8, 5, -4])
13.114877048604
iex> Distancia.euclidean({1.34, 2}, {-3.13, 5.3})
5.556158745032398
"""
@spec euclidean(point, point) :: number
defdelegate euclidean(p, q), to: Distancia.Euclidean, as: :calculate
@doc """
Calculates the Manhattan distance between points `p` and `q`.
The Manhattan distance is a sum of all horizontal and vertical segments between two points in a grid.
## Examples
iex> Distancia.manhattan(1, 2)
1
iex> Distancia.manhattan([2, -1, 6], [8, 5, -4])
22
iex> Distancia.manhattan({1.34, 2}, {-3.13, 5.3})
7.77
"""
@spec manhattan(point, point) :: number
defdelegate manhattan(p, q), to: Distancia.Manhattan, as: :calculate
@doc """
Calculates the Chebyshev distance between points `p` and `q`.
Simply speaking, the Chebyshev distance is equal to the minimum number of moves needed by a king to go from one point to another in the game of chess.
## Examples
iex> Distancia.chebyshev(1, 2)
1
iex> Distancia.chebyshev([2, -1, 6], [8, 5, -4])
10
iex> Distancia.chebyshev({1.34, 2}, {-3.13, 5.3})
4.47
"""
@spec chebyshev(point, point) :: number
defdelegate chebyshev(p, q), to: Distancia.Chebyshev, as: :calculate
@doc """
Calculates the Hamming distance between strings `p` and `q`.
The Hamming distance is the amount of elements which differ between two strings.
## Examples
iex> Distancia.hamming(1, 2)
1
iex> Distancia.hamming("This is Sparta!", "<NAME>!")
2
iex> Distancia.hamming("2173896", "2233796")
3
"""
@spec hamming(point, point) :: number
defdelegate hamming(p, q), to: Distancia.Hamming, as: :calculate
end
|
lib/distancia.ex
| 0.954732
| 0.825132
|
distancia.ex
|
starcoder
|
defmodule Wavex do
@moduledoc """
Read LPCM WAVE data.
"""
alias Wavex.FourCC
alias Wavex.Chunk.{
BAE,
Data,
Format,
RIFF
}
@enforce_keys [
:riff,
:format,
:data
]
defstruct [
:riff,
:format,
:data,
:bae
]
@type t :: %__MODULE__{
riff: RIFF.t(),
format: Format.t(),
data: Data.t(),
bae: BAE.t() | nil
}
@chunks_required %{
Format.four_cc() => {Format, :format},
Data.four_cc() => {Data, :data}
}
@chunks_optional %{
BAE.four_cc() => {BAE, :bae}
}
@chunks Map.merge(@chunks_optional, @chunks_required)
@spec map8(binary, (non_neg_integer -> non_neg_integer), binary) :: binary
defp map8(binary, function, acc \\ <<>>)
defp map8(<<sample, etc::binary>>, function, acc),
do: map8(etc, function, <<function.(sample)>> <> acc)
defp map8(<<>>, _, acc), do: String.reverse(acc)
@spec map16(binary, (integer -> integer), binary) :: binary
defp map16(binary, function, acc \\ <<>>)
defp map16(<<sample::16-signed-little, etc::binary>>, function, acc),
do: map16(etc, function, <<function.(sample)::16-signed-big>> <> acc)
defp map16(<<>>, _, acc), do: String.reverse(acc)
@spec map24(binary, (integer -> integer), binary) :: binary
defp map24(binary, function, acc \\ <<>>)
defp map24(<<sample::24-signed-little, etc::binary>>, function, acc),
do: map24(etc, function, <<function.(sample)::24-signed-big>> <> acc)
defp map24(<<>>, _, acc), do: String.reverse(acc)
@spec skip_chunk(binary) :: {:ok, binary} | {:error, :unexpected_eof}
defp skip_chunk(binary) when is_binary(binary) do
with <<size::32-little, etc::binary>> <- binary,
size <- round(size / 2) * 2,
<<_::binary-size(size), etc::binary>> <- etc do
{:ok, etc}
else
_ -> {:error, :unexpected_eof}
end
end
@spec read_chunks(binary, map) :: {:ok, map} | {:error, :unexpected_eof}
defp read_chunks(binary, chunks \\ %{})
defp read_chunks(<<>>, chunks), do: {:ok, chunks}
defp read_chunks(<<four_cc::binary-size(4), etc::binary>> = binary, chunks) do
case Map.fetch(@chunks, four_cc) do
{:ok, {module, key}} ->
with {:ok, chunk, etc} <- module.read(binary) do
read_chunks(etc, Map.put(chunks, key, chunk))
end
_ ->
with {:ok, etc} <- skip_chunk(etc) do
read_chunks(etc, chunks)
end
end
end
defp read_chunks(binary, _) when is_binary(binary), do: {:error, :unexpected_eof}
@spec verify_riff_size(non_neg_integer, binary) ::
:ok
| {:error,
{:unexpected_riff_size, %{expected: non_neg_integer, actual: non_neg_integer}}}
defp verify_riff_size(actual, binary) do
case byte_size(binary) - 0x0008 do
^actual -> :ok
expected -> {:error, {:unexpected_riff_size, %{expected: expected, actual: actual}}}
end
end
@spec verify_chunks(map) :: :ok | {:error, {:missing_chunks, [atom]}}
defp verify_chunks(chunks) do
chunks_missing =
for {_, {module, key}} <- @chunks_required, !match?(%{^key => %^module{}}, chunks) do
module
end
case chunks_missing do
[] -> :ok
missing -> {:error, {:missing_chunks, missing}}
end
end
@doc """
The duration of a wave file in seconds.
"""
@spec duration(t) :: number
def duration(%__MODULE__{
data: %Data{size: size},
format: %Format{byte_rate: byte_rate}
}) do
size / byte_rate
end
@doc ~S"""
Map over the data of a `Wavex` value.
"""
@spec map(t, (integer -> integer)) :: t
def map(
%__MODULE__{
data: %Data{data: data} = data_chunk,
format: %Format{bits_per_sample: bits_per_sample}
} = wave,
function
) do
data =
case bits_per_sample do
0x08 -> map8(data, function)
0x10 -> map16(data, function)
0x18 -> map24(data, function)
end
%__MODULE__{wave | data: %Data{data_chunk | data: data}}
end
@doc ~S"""
Read LPCM WAVE data.
"""
@spec read(binary) ::
{:ok, t}
| {:error,
:unexpected_eof
| :zero_channels
| {:missing_chunks, [atom]}
| {:unexpected_block_align, %{expected: non_neg_integer, actual: non_neg_integer}}
| {:unexpected_byte_rate, %{expected: non_neg_integer, actual: non_neg_integer}}
| {:unexpected_format_size, non_neg_integer}
| {:unexpected_four_cc, %{actual: FourCC.t(), expected: FourCC.t()}}
| {:unsupported_bae_version, non_neg_integer}
| {:unsupported_bits_per_sample, non_neg_integer}
| {:unsupported_format, non_neg_integer}
| {:unreadable_date, BAE.date_binary()}
| {:unreadable_time, BAE.time_binary()}}
def read(binary) when is_binary(binary) do
with {:ok, %RIFF{size: riff_size} = riff, etc} <- RIFF.read(binary),
:ok <- verify_riff_size(riff_size, binary),
{:ok, chunks} <- read_chunks(etc),
:ok <- verify_chunks(chunks) do
{:ok, struct(Wavex, Map.put(chunks, :riff, riff))}
end
end
end
|
lib/wavex.ex
| 0.778944
| 0.57332
|
wavex.ex
|
starcoder
|
defmodule Ecto.Adapters.SQL do
@moduledoc """
Behaviour and implementation for SQL adapters.
The implementation for SQL adapter provides a
pooled based implementation of SQL and also expose
a query function to developers.
Developers that use `Ecto.Adapters.SQL` should implement
a connection module with specifics on how to connect
to the database and also how to translate the queries
to SQL.
See `Ecto.Adapters.Connection` for connection processes and
`Ecto.Adapters.SQL.Query` for the query semantics.
"""
@doc false
defmacro __using__(adapter) do
quote do
@behaviour Ecto.Adapter
@behaviour Ecto.Adapter.Migration
@behaviour Ecto.Adapter.Transaction
@conn __MODULE__.Connection
@adapter unquote(adapter)
## Worker
@doc false
defmacro __before_compile__(_env) do
:ok
end
@doc false
def start_link(repo, opts) do
{:ok, _} = Application.ensure_all_started(@adapter)
Ecto.Adapters.SQL.start_link(@conn, @adapter, repo, opts)
end
## Query
@doc false
def id_types(_repo) do
%{binary_id: Ecto.UUID}
end
@doc false
def all(repo, query, params, opts) do
Ecto.Adapters.SQL.all(repo, @conn.all(query), query, params, id_types(repo), opts)
end
@doc false
def update_all(repo, query, params, opts) do
Ecto.Adapters.SQL.count_all(repo, @conn.update_all(query), params, opts)
end
@doc false
def delete_all(repo, query, params, opts) do
Ecto.Adapters.SQL.count_all(repo, @conn.delete_all(query), params, opts)
end
@doc false
# Nil ids are generated in the database.
def insert(repo, source, params, {key, :id, nil}, returning, opts) do
insert(repo, source, params, nil, [key|returning], opts)
end
# Nil binary_ids are generated in the adapter.
def insert(repo, source, params, {key, :binary_id, nil}, returning, opts) do
{req, resp} = Ecto.Adapters.SQL.bingenerate(key, id_types(repo))
case insert(repo, source, req ++ params, nil, returning, opts) do
{:ok, values} -> {:ok, resp ++ values}
{:error, _} = err -> err
end
end
def insert(repo, source, params, _autogenerate, returning, opts) do
{fields, values} = :lists.unzip(params)
sql = @conn.insert(source, fields, returning)
Ecto.Adapters.SQL.model(repo, sql, values, returning, opts)
end
@doc false
def update(repo, source, fields, filter, _autogenerate, returning, opts) do
{fields, values1} = :lists.unzip(fields)
{filter, values2} = :lists.unzip(filter)
sql = @conn.update(source, fields, filter, returning)
Ecto.Adapters.SQL.model(repo, sql, values1 ++ values2, returning, opts)
end
@doc false
def delete(repo, source, filter, _autogenarate, opts) do
{filter, values} = :lists.unzip(filter)
Ecto.Adapters.SQL.model(repo, @conn.delete(source, filter, []), values, [], opts)
end
## Transaction
@doc false
def transaction(repo, opts, fun) do
Ecto.Adapters.SQL.transaction(repo, opts, fun)
end
@doc false
def rollback(_repo, value) do
throw {:ecto_rollback, value}
end
## Migration
@doc false
def execute_ddl(repo, definition, opts) do
sql = @conn.execute_ddl(definition)
Ecto.Adapters.SQL.query(repo, sql, [], opts)
:ok
end
@doc false
def ddl_exists?(repo, object, opts) do
sql = @conn.ddl_exists(object)
%{rows: [{count}]} = Ecto.Adapters.SQL.query(repo, sql, [], opts)
count > 0
end
defoverridable [all: 4, update_all: 4, delete_all: 4,
insert: 6, update: 7, delete: 5,
execute_ddl: 3, ddl_exists?: 3]
end
end
alias Ecto.Adapters.Pool
alias Ecto.Adapters.SQL.Sandbox
@doc """
Runs custom SQL query on given repo.
In case of success, it must return an `:ok` tuple containing
a map with at least two keys:
* `:num_rows` - the number of rows affected
* `:rows` - the result set as a list. `nil` may be returned
instead of the list if the command does not yield any row
as result (but still yields the number of affected rows,
like a `delete` command without returning would)
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000)
* `:log` - When false, does not log the query
## Examples
iex> Ecto.Adapters.SQL.query(MyRepo, "SELECT $1 + $2", [40, 2])
%{rows: [{42}], num_rows: 1}
"""
@spec query(Ecto.Repo.t, String.t, [term], Keyword.t) ::
%{rows: nil | [tuple], num_rows: non_neg_integer} | no_return
def query(repo, sql, params, opts \\ []) do
case query(repo, sql, params, nil, opts) do
{{:ok, result}, entry} ->
log(repo, entry)
result
{{:error, err}, entry} ->
log(repo, entry)
raise err
:noconnect ->
# :noconnect can never be the reason a call fails because
# it is converted to {:nodedown, node}. This means the exit
# reason can be easily identified.
exit({:noconnect, {__MODULE__, :query, [repo, sql, params, opts]}})
end
end
defp query(repo, sql, params, outer_queue_time, opts) do
{pool_mod, pool, timeout} = repo.__pool__
opts = Keyword.put_new(opts, :timeout, timeout)
timeout = Keyword.fetch!(opts, :timeout)
log? = Keyword.get(opts, :log, true)
query_fun = fn({mod, conn}, inner_queue_time) ->
query(mod, conn, inner_queue_time || outer_queue_time, sql, params, log?, opts)
end
case Pool.run(pool_mod, pool, timeout, query_fun) do
{:ok, result} ->
result
{:error, :noconnect} ->
:noconnect
{:error, :noproc} ->
raise ArgumentError, "repo #{inspect repo} is not started, " <>
"please ensure it is part of your supervision tree"
end
end
defp query(mod, conn, _queue_time, sql, params, false, opts) do
{mod.query(conn, sql, params, opts), nil}
end
defp query(mod, conn, queue_time, sql, params, true, opts) do
{query_time, res} = :timer.tc(mod, :query, [conn, sql, params, opts])
entry = %Ecto.LogEntry{query: sql, params: params, result: res,
query_time: query_time, queue_time: queue_time,
connection_pid: conn}
{res, entry}
end
defp log(_repo, nil), do: :ok
defp log(repo, entry), do: repo.log(entry)
@doc ~S"""
Starts a transaction for test.
This function work by starting a transaction and storing the connection
back in the pool with an open transaction. On every test, we restart
the test transaction rolling back to the appropriate savepoint.
**IMPORTANT:** Test transactions only work if the connection pool is
`Ecto.Adapters.SQL.Sandbox`
## Example
The first step is to configure your database to use the
`Ecto.Adapters.SQL.Sandbox` pool. You set those options in your
`config/config.exs`:
config :my_app, Repo,
pool: Ecto.Adapters.SQL.Sandbox
Since you don't want those options in your production database, we
typically recommend to create a `config/test.exs` and add the
following to the bottom of your `config/config.exs` file:
import_config "config/#{Mix.env}.exs"
Now with the test database properly configured, you can write
transactional tests:
# At the end of your test_helper.exs
# From now, all tests happen inside a transaction
Ecto.Adapters.SQL.begin_test_transaction(TestRepo)
defmodule PostTest do
# Tests that use the shared repository cannot be async
use ExUnit.Case
setup do
# Go back to a clean slate at the beginning of every test
Ecto.Adapters.SQL.restart_test_transaction(TestRepo)
:ok
end
test "create comment" do
assert %Post{} = TestRepo.insert!(%Post{})
end
end
In some cases, you may want to start the test transaction only
for specific tests and then roll it back. You can do it as:
defmodule PostTest do
# Tests that use the shared repository cannot be async
use ExUnit.Case
setup_all do
# Wrap this case in a transaction
Ecto.Adapters.SQL.begin_test_transaction(TestRepo)
# Roll it back once we are done
on_exit fn ->
Ecto.Adapters.SQL.rollback_test_transaction(TestRepo)
end
:ok
end
setup do
# Go back to a clean slate at the beginning of every test
Ecto.Adapters.SQL.restart_test_transaction(TestRepo)
:ok
end
test "create comment" do
assert %Post{} = TestRepo.insert!(%Post{})
end
end
"""
@spec begin_test_transaction(Ecto.Repo.t, Keyword.t) :: :ok
def begin_test_transaction(repo, opts \\ []) do
test_transaction(:begin, repo, opts)
end
@doc """
Restarts a test transaction, see `begin_test_transaction/2`.
"""
@spec restart_test_transaction(Ecto.Repo.t, Keyword.t) :: :ok
def restart_test_transaction(repo, opts \\ []) do
test_transaction(:restart, repo, opts)
end
@spec rollback_test_transaction(Ecto.Repo.t, Keyword.t) :: :ok
def rollback_test_transaction(repo, opts \\ []) do
test_transaction(:rollback, repo, opts)
end
defp test_transaction(fun, repo, opts) do
case repo.__pool__ do
{Sandbox, pool, timeout} ->
opts = Keyword.put_new(opts, :timeout, timeout)
test_transaction(pool, fun, &repo.log/1, opts)
{pool_mod, _, _} ->
raise """
cannot #{fun} test transaction with pool #{inspect pool_mod}.
In order to use test transactions with Ecto SQL, you need to
configure your repository to use #{inspect Sandbox}:
pool: #{inspect Sandbox}
"""
end
end
defp test_transaction(pool, fun, log, opts) do
timeout = Keyword.fetch!(opts, :timeout)
case apply(Sandbox, fun, [pool, log, opts, timeout]) do
:ok ->
:ok
{:error, :sandbox} when fun == :begin ->
raise "cannot begin test transaction because we are already inside one"
end
end
## Worker
@doc false
def start_link(connection, adapter, repo, opts) do
unless Code.ensure_loaded?(connection) do
raise """
could not find #{inspect connection}.
Please verify you have added #{inspect adapter} as a dependency:
{#{inspect adapter}, ">= 0.0.0"}
And remember to recompile Ecto afterwards by cleaning the current build:
mix deps.clean ecto
"""
end
{default_pool_mod, default_pool_name, _} = repo.__pool__
pool_mod = Keyword.get(opts, :pool, default_pool_mod)
opts = opts
|> Keyword.put(:timeout, Keyword.get(opts, :connect_timeout, 5000))
|> Keyword.put_new(:name, default_pool_name)
|> Keyword.put_new(:size, 10)
pool_mod.start_link(connection, opts)
end
## Query
@doc false
def bingenerate(key, id_types) do
%{binary_id: binary_id} = id_types
{:ok, value} = binary_id.dump(binary_id.generate)
{[{key, value}], [{key, unwrap(value)}]}
end
defp unwrap(%Ecto.Query.Tagged{value: value}), do: value
defp unwrap(value), do: value
@doc false
def all(repo, sql, query, params, id_types, opts) do
%{rows: rows} = query(repo, sql, params, opts)
fields = extract_fields(query.select.fields, query.sources)
Enum.map(rows, &process_row(&1, fields, id_types))
end
@doc false
def count_all(repo, sql, params, opts) do
%{num_rows: num} = query(repo, sql, params, opts)
{num, nil}
end
@doc false
def model(repo, sql, values, returning, opts) do
case query(repo, sql, values, opts) do
%{rows: nil, num_rows: 1} ->
{:ok, []}
%{rows: [values], num_rows: 1} ->
{:ok, Enum.zip(returning, Tuple.to_list(values))}
%{num_rows: 0} ->
{:error, :stale}
end
end
defp extract_fields(fields, sources) do
Enum.map fields, fn
{:&, _, [idx]} ->
{_source, model} = pair = elem(sources, idx)
{length(model.__schema__(:fields)), pair}
_ ->
{1, nil}
end
end
defp process_row(row, fields, id_types) do
Enum.map_reduce(fields, 0, fn
{1, nil}, idx ->
{elem(row, idx), idx + 1}
{count, {source, model}}, idx ->
if all_nil?(row, idx, count) do
{nil, idx + count}
else
{model.__schema__(:load, source, idx, row, id_types), idx + count}
end
end) |> elem(0)
end
defp all_nil?(_tuple, _idx, 0), do: true
defp all_nil?(tuple, idx, _count) when elem(tuple, idx) != nil, do: false
defp all_nil?(tuple, idx, count), do: all_nil?(tuple, idx + 1, count - 1)
## Transactions
@doc false
def transaction(repo, opts, fun) do
{pool_mod, pool, timeout} = repo.__pool__
opts = Keyword.put_new(opts, :timeout, timeout)
timeout = Keyword.fetch!(opts, :timeout)
trans_fun = fn(ref, {mod, _conn}, depth, queue_time) ->
mode = transaction_mode(pool_mod, pool, timeout)
transaction(repo, ref, mod, mode, depth, queue_time, timeout, opts, fun)
end
case Pool.transaction(pool_mod, pool, timeout, trans_fun) do
{:ok, {{:return, result}, entry}} ->
log(repo, entry)
result
{:ok, {{:raise, class, reason, stack}, entry}} ->
log(repo, entry)
:erlang.raise(class, reason, stack)
{:ok, {{:error, err}, entry}} ->
log(repo, entry)
raise err
{:ok, :noconnect} ->
exit({:noconnect, {__MODULE__, :transaction, [repo, opts, fun]}})
{:error, :noproc} ->
raise ArgumentError, "repo #{inspect repo} is not started, " <>
"please ensure it is part of your supervision tree"
end
end
defp transaction_mode(Sandbox, pool, timeout), do: Sandbox.mode(pool, timeout)
defp transaction_mode(_, _, _), do: :raw
defp transaction(repo, ref, mod, mode, depth, queue_time, timeout, opts, fun) do
case begin(repo, mod, mode, depth, queue_time, opts) do
{{:ok, _}, entry} ->
try do
log(repo, entry)
value = fun.()
commit(repo, ref, mod, mode, depth, timeout, opts, {:return, {:ok, value}})
catch
:throw, {:ecto_rollback, value} ->
res = {:return, {:error, value}}
rollback(repo, ref, mod, mode, depth, nil, timeout, opts, res)
class, reason ->
stack = System.stacktrace()
res = {:raise, class, reason, stack}
rollback(repo, ref, mod, mode, depth, nil, timeout, opts, res)
end
{{:error, _err}, _entry} = error ->
Pool.break(ref, timeout)
error
:noconnect ->
:noconnect
end
end
defp begin(repo, mod, mode, depth, queue_time, opts) do
sql = begin_sql(mod, mode, depth)
query(repo, sql, [], queue_time, opts)
end
defp begin_sql(mod, :raw, 1), do: mod.begin_transaction
defp begin_sql(mod, :raw, :sandbox), do: mod.savepoint "ecto_sandbox"
defp begin_sql(mod, _, depth), do: mod.savepoint "ecto_#{depth}"
defp commit(repo, ref, mod, :raw, 1, timeout, opts, result) do
case query(repo, mod.commit, [], nil, opts) do
{{:ok, _}, entry} ->
{result, entry}
{{:error, _}, _entry} = error ->
Pool.break(ref, timeout)
error
:noconnect ->
{result, nil}
end
end
defp commit(_repo, _ref, _mod, _mode, _depth, _timeout, _opts, result) do
{result, nil}
end
defp rollback(repo, ref, mod, mode, depth, queue_time, timeout, opts, result) do
sql = rollback_sql(mod, mode, depth)
case query(repo, sql, [], queue_time, opts) do
{{:ok, _}, entry} ->
{result, entry}
{{:error, _}, _entry} = error ->
Pool.break(ref, timeout)
error
:noconnect ->
{result, nil}
end
end
defp rollback_sql(mod, :raw, 1), do: mod.rollback
defp rollback_sql(mod, :sandbox, :sandbox) do
mod.rollback_to_savepoint "ecto_sandbox"
end
defp rollback_sql(mod, _, depth) do
mod.rollback_to_savepoint "ecto_#{depth}"
end
end
|
lib/ecto/adapters/sql.ex
| 0.861815
| 0.506836
|
sql.ex
|
starcoder
|
defmodule Day3 do
def solve(input) do
normalized =
input
|> String.trim()
|> String.split("\n")
|> normalize([], [])
[first | _] = normalized
Enum.reduce(0..(Enum.count(first) - 1), [], fn position, acc ->
[count(normalized, position, {0, 0}) | acc]
end)
|> Enum.reverse()
|> evaluate(<<>>, <<>>)
end
def solve2(input) do
normalized =
input
|> String.trim()
|> String.split("\n")
|> normalize([], [])
[first | _] = normalized
width = Enum.count(first) - 1
[[o2]] =
Enum.reduce(0..width, normalized, fn
_, {:break, a} ->
{:break, a}
position, acc ->
{zeros, ones} = count(acc, position, {0, 0})
check = if ones >= zeros, do: 1, else: 0
# {acc, check, position, ones, zeros}|> IO.inspect(width: 500)
a =
Enum.filter(acc, fn item ->
Enum.at(item, position) == check
end)
if Enum.count(a) == 1, do: {:break, a}, else: a
end)
|> case do
{:break, a} -> [a]
a -> a
end
[[co2]] =
Enum.reduce(0..width, normalized, fn
_, {:break, a} ->
{:break, a}
position, acc ->
{zeros, ones} = count(acc, position, {0, 0})
check =
case {zeros, ones} do
{zeros, ones} when zeros < ones -> 0
{zeros, ones} when ones < zeros -> 1
{zeros, ones} when zeros == ones -> 0
_ -> 1
end
a =
Enum.filter(acc, fn item ->
Enum.at(item, position) == check
end)
if Enum.count(a) == 1, do: {:break, a}, else: a
end)
|> case do
{:break, a} -> [a]
a -> a
end
{Enum.reduce(o2, <<>>, fn v, a -> a <> "#{v}" end) |> String.to_integer(2), Enum.reduce(co2, <<>>, fn v, a -> a <> "#{v}" end) |> String.to_integer(2)}
end
## part1
def evaluate([{zero, one} | rest], g, e) when zero > one do
evaluate(rest, g <> "0", e <> "1")
end
def evaluate([{zero, one} | rest], g, e) when zero < one do
evaluate(rest, g <> "1", e <> "0")
end
def evaluate([], g, e), do: {String.to_integer(g, 2), String.to_integer(e, 2)}
def count([item | rest], position, {zeros, ones}) do
case Enum.at(item, position) do
0 -> count(rest, position, {zeros + 1, ones})
1 -> count(rest, position, {zeros, ones + 1})
end
end
def count([], _, {zeros, ones}), do: {zeros, ones}
def normalize([<<"0", chars::binary>> | rest], current, acc) do
normalize([chars | rest], [0 | current], acc)
end
def normalize([<<"1", chars::binary>> | rest], current, acc) do
normalize([chars | rest], [1 | current], acc)
end
def normalize([<<>> | rest], current, acc) do
normalize(rest, [], [Enum.reverse(current) | acc])
end
def normalize([], [], acc) do
Enum.reverse(acc)
end
end
|
lib/day3.ex
| 0.650467
| 0.512876
|
day3.ex
|
starcoder
|
defmodule Logi.Condition do
@moduledoc "Sink Applicable Condition."
@typedoc "The condition to determine which messages to be consumed by a sink."
@type condition :: severity_condition | location_condition
@typedoc """
Condition based on the specified severity pattern.
### min
- The messages with `min` or higher severity will be consumed.
### {min, max}
- The messages with severity between `min` and `max` will be consumed.
### severities
- The messages with severity included in `severities` will be consumed.
## Examples
```elixir
iex> [:emergency, :alert] = Logi.Condition.normalize(:alert) # level
iex> [:warning, :notice, :info] = Logi.Condition.normalize({:info, :warning}) # range
iex> [:alert, :debug, :info] = Logi.Condition.normalize([:debug, :info, :alert]) # list
```
"""
@type severity_condition ::
min :: Logi.severity |
{min :: Logi.severity, max :: Logi.severity} |
severities :: [Logi.severity]
@typedoc """
The messages which satisfy `severity` (default is `debug`) and are sent from the specified location will be consumed.
The location is specified by `application` and `module` (OR condition).
NOTE: The modules which does not belong to any application are forbidden.
## Examples
```elixir
iex> Logi.Condition.is_condition(%{:application => :stdlib}) # application
iex> Logi.Condition.is_condition(%{:application => [:stdlib, :kernel]}) # applications
iex> Logi.Condition.is_condition(%{:module => :lists}) # module
iex> Logi.Condition.is_condition(%{:module => [:lists, :dict]}) # modules
iex> Logi.Condition.is_condition(%{:application => :kernel, :module => [:lists, :dict]}) # application and modules
iex> Logi.Condition.is_condition(%{:severity => [:info, :alert], :module => :lists}) # severity and module
```
"""
@type location_condition :: %{
:severity => severity_condition,
:application => Logi.Location.application | [Logi.Location.application],
:module => module | [module]
}
@typedoc """
The normalized form of a `t:condition/0`.
## Examples
```elixir
iex> normalize = fn (c) -> :lists.sort(Logi.Condition.normalize c) end
iex> normalize.(:info)
[:alert, :critical, :emergency, :error, :info, :notice, :warning]
iex> normalize.({:info, :alert})
[:alert, :critical, :error, :info, :notice, :warning]
iex> normalize.(%{:severity => [:info], :application => [:kernel, :stdlib]})
[info: :kernel, info: :stdlib]
iex> normalize.(%{:severity => [:info], :module => [:lists, Logi]})
[{:info, :logi_ex, Logi}, {:info, :stdlib, :lists}]
iex> normalize.(%{:severity => [:info], :application => :kernel, :module => [:lists, Logi]})
[{:info, :kernel}, {:info, :logi_ex, Logi}, {:info, :stdlib, :lists}]
```
"""
@type normalized_condition :: [
Logi.severity |
{Logi.severity, Logi.Location.application} |
{Logi.severity, Logi.Location.application, module}
]
@doc "Returns `true` if `x` is a valid `t:condition/0` value, otherwise `false`."
@spec condition?(any) :: boolean
def condition?(x) do
:logi_condition.is_condition x
end
@doc "Returns a normalized form of `condition`."
@spec normalize(condition) :: normalized_condition
def normalize(condition) do
:logi_condition.normalize condition
end
end
|
lib/logi/condition.ex
| 0.885074
| 0.849535
|
condition.ex
|
starcoder
|
defmodule ContexSampleWeb.ScalesLive do
use Phoenix.LiveView
use Phoenix.HTML
alias Contex.{Axis, Scale, TimeScale, ContinuousLinearScale}
def render(assigns) do
~L"""
<h3>Fun With Scales</h3>
<div class="container">
<div class="row">
<div class="column">
<table>
<%= for scale <- @scales do %>
<tr>
<td><%= scale.title %></td>
<td style="text-align:center;"><%= plot_axis(scale) %></td>
</tr>
<% end %>
</table>
</div>
</div>
</div>
"""
end
def mount(_params, _session, socket) do
socket =
socket
|> make_test_scales()
{:ok, socket}
end
defp plot_axis(%{scale: scale}=details) do
formatter = details[:formatter]
scale = if formatter, do: %{scale | custom_tick_formatter: formatter}, else: scale
axis = Axis.new_bottom_axis(scale)
rotation = details[:rotation]
axis = if rotation, do: %{axis | rotation: rotation}, else: axis
height = if rotation, do: 60, else: 20
{d_min, d_max} = scale.domain
output =
~s"""
<small style="color:#aa3333">Domain: #{d_min}</small> → <small style="color:#aa3333">#{d_max}</small>
<svg height="#{height}" width="600" viewBox="-50 0 550 #{height}" >
#{Axis.to_svg(axis)}
</svg>
"""
{:safe, [output]}
end
defp make_test_scales(socket) do
scales = [
%{title: "Time: Five seconds", scale: make_time_scale(~N[2020-01-01 13:00:00], ~N[2020-01-01 13:00:05])},
%{title: "Time: Ten seconds", scale: make_time_scale(~N[2020-01-01 13:00:01], ~N[2020-01-01 13:00:10])},
%{title: "Time: Five minutes", scale: make_time_scale(~N[2020-01-01 13:12:00], ~N[2020-01-01 13:17:00])},
%{title: "Time: Five minutes, hour rollover", scale: make_time_scale(~N[2020-01-01 13:58:00], ~N[2020-01-01 14:03:00])},
%{title: "Time: Ten minutes", scale: make_time_scale(~N[2020-01-01 13:00:00], ~N[2020-01-01 13:10:00])},
%{title: "Time: Five days", scale: make_time_scale(~N[2020-01-01 13:00:00], ~N[2020-01-05 13:00:00]), rotation: 45},
%{title: "Time: Ten days", scale: make_time_scale(~N[2020-01-01 13:00:00], ~N[2020-01-10 13:00:00])},
%{title: "Time: One month", scale: make_time_scale(~N[2020-01-01 13:00:00], ~N[2020-02-01 13:00:00])},
%{title: "Time: Five months", scale: make_time_scale(~N[2020-01-01 13:00:00], ~N[2020-05-01 13:00:00])},
%{title: "Time: One year", scale: make_time_scale(~N[2019-01-01 13:00:00], ~N[2020-01-01 13:00:00])},
%{title: "Time: Five years", scale: make_time_scale(~N[2019-01-01 13:00:00], ~N[2024-01-01 13:00:00])},
%{title: "Time: Ten years", scale: make_time_scale(~N[2019-01-01 13:00:00], ~N[2029-01-01 13:00:00])},
%{title: "Number: Tiny numbers", scale: make_linear_scale(0.000001, 0.000007), rotation: 45},
%{title: "Number: Tiny numbers II", scale: make_linear_scale(0.000001, 0.0000111), rotation: 45},
%{title: "Number: Big numbers", scale: make_linear_scale(1_000_000, 11_000_000), rotation: 45},
%{title: "Number: Big numbers, more intervals", scale: make_linear_scale(0, 13_000_000, 40), rotation: 45},
%{title: "Number: Custom formatter", scale: make_linear_scale(1_000_000, 50_000_000), formatter: &million_formatter/1},
]
assign(socket, scales: scales)
end
defp make_time_scale(d1, d2) do
TimeScale.new() |> TimeScale.domain(d1, d2) |> Scale.set_range(0.0, 450.0)
end
defp make_linear_scale(d1, d2, intervals \\ 0) do
ContinuousLinearScale.new()
|> ContinuousLinearScale.domain(d1, d2)
|> Scale.set_range(0.0, 450.0)
|> ContinuousLinearScale.interval_count(intervals)
end
defp million_formatter(value) when is_number(value), do: "#{:erlang.float_to_binary(value/1_000_000.0, [decimals: 0])}M"
end
|
lib/contexsample_web/live/scales.ex
| 0.711932
| 0.464051
|
scales.ex
|
starcoder
|
defmodule AshCsv.DataLayer do
@behaviour Ash.DataLayer
alias Ash.Actions.Sort
alias Ash.Dsl.Extension
@impl true
def can?(_, :read), do: true
def can?(_, :create), do: true
def can?(_, :update), do: true
def can?(_, :destroy), do: true
def can?(_, :sort), do: true
def can?(_, :filter), do: true
def can?(_, :limit), do: true
def can?(_, :offset), do: true
def can?(_, :boolean_filter), do: true
def can?(_, :transact), do: true
def can?(_, :delete_with_query), do: false
def can?(_, {:filter_expr, _}), do: true
def can?(_, {:sort, _}), do: true
def can?(_, _), do: false
@csv %Ash.Dsl.Section{
name: :csv,
examples: [
"""
csv do
file "priv/data/tags.csv"
create? true
header? true
separator '-'
columns [:id, :name]
end
"""
],
schema: [
file: [
type: :string,
doc: "The file to read the data from",
required: true
],
create?: [
type: :boolean,
doc:
"Whether or not the file should be created if it does not exist (this will only happen on writes)",
default: false
],
header?: [
type: :boolean,
default: false,
doc: "If the csv file has a header that should be skipped"
],
separator: [
type: {:custom, __MODULE__, :separator_opt, []},
default: ?,,
doc: "The separator to use, defaults to a comma. Pass in a character (not a string)."
],
columns: [
type: {:custom, __MODULE__, :columns_opt, []},
doc: "The order that the attributes appear in the columns of the CSV"
]
]
}
def file(resource) do
resource
|> Extension.get_opt([:csv], :file, "", true)
|> Path.expand(File.cwd!())
end
def columns(resource) do
Extension.get_opt(resource, [:csv], :columns, [], true)
end
def separator(resource) do
Extension.get_opt(resource, [:csv], :separator, nil, true)
end
def header?(resource) do
Extension.get_opt(resource, [:csv], :header?, nil, true)
end
def create?(resource) do
Extension.get_opt(resource, [:csv], :create?, nil, true)
end
@impl true
def limit(query, offset, _), do: {:ok, %{query | limit: offset}}
@impl true
def offset(query, offset, _), do: {:ok, %{query | offset: offset}}
@impl true
def filter(query, filter, _resource) do
{:ok, %{query | filter: filter}}
end
@impl true
def sort(query, sort, _resource) do
{:ok, %{query | sort: sort}}
end
@doc false
def columns_opt(columns) do
if Enum.all?(columns, &is_atom/1) do
{:ok, columns}
else
{:error, "Expected all columns to be atoms"}
end
end
@doc false
def separator_opt(val) when is_integer(val) do
{:ok, val}
end
def separator_opt(val) do
{:error, "Expected a character for separator, got #{val}"}
end
@sections [@csv]
@moduledoc """
The data layer implementation for AshCsv
# Table of Contents
#{Ash.Dsl.Extension.doc_index(@sections)}
#{Ash.Dsl.Extension.doc(@sections)}
"""
use Extension, sections: @sections
defmodule Query do
@moduledoc false
defstruct [:resource, :sort, :filter, :limit, :offset]
end
@impl true
def run_query(query, resource) do
case read_file(resource) do
{:ok, results} ->
offset_records =
results
|> filter_matches(query.filter)
|> Sort.runtime_sort(query.sort)
|> Enum.drop(query.offset || 0)
if query.limit do
{:ok, Enum.take(offset_records, query.limit)}
else
{:ok, offset_records}
end
{:error, error} ->
{:error, error}
end
rescue
e in File.Error ->
if create?(resource) do
{:ok, []}
else
{:error, e}
end
end
@impl true
def create(resource, changeset) do
case run_query(%Query{resource: resource}, resource) do
{:ok, records} ->
create_from_records(records, resource, changeset)
{:error, error} ->
{:error, error}
end
end
@impl true
def update(resource, changeset) do
resource
|> do_read_file()
|> do_update(resource, changeset)
end
@impl true
def destroy(resource, %{data: record}) do
resource
|> do_read_file()
|> do_destroy(resource, record)
end
defp cast_stored(resource, keys) do
Enum.reduce_while(keys, {:ok, resource.__struct__}, fn {key, value}, {:ok, record} ->
with attribute when not is_nil(attribute) <- Ash.Resource.Info.attribute(resource, key),
{:value, value} when not is_nil(value) <- {:value, stored_value(value, attribute)},
{:ok, loaded} <- Ash.Type.cast_stored(attribute.type, value) do
{:cont, {:ok, struct(record, [{key, loaded}])}}
else
{:value, nil} ->
{:cont, {:ok, struct(record, [{key, nil}])}}
nil ->
{:halt, {:error, "#{key} is not an attribute"}}
:error ->
{:halt, {:error, "#{key} could not be loaded"}}
end
end)
end
defp stored_value(value, attribute) do
if value == "" and Ash.Type.ecto_type(attribute.type) not in [:string, :uuid, :binary_id] do
nil
else
value
end
end
@impl true
def resource_to_query(resource, _) do
%Query{resource: resource}
end
@impl true
def transaction(resource, fun) do
file = file(resource)
:global.trans({{:csv, file}, System.unique_integer()}, fn ->
try do
Process.put({:csv_in_transaction, file(resource)}, true)
{:res, fun.()}
catch
{{:csv_rollback, ^file}, value} ->
{:error, value}
end
end)
|> case do
{:res, result} -> {:ok, result}
{:error, error} -> {:error, error}
:aborted -> {:error, "transaction failed"}
end
end
@impl true
def rollback(resource, error) do
throw({{:csv_rollback, file(resource)}, error})
end
@impl true
def in_transaction?(resource) do
Process.get({:csv_in_transaction, file(resource)}, false) == true
end
def filter_matches(records, nil), do: records
def filter_matches(records, filter) do
Enum.filter(records, &Ash.Filter.Runtime.matches?(nil, &1, filter.expression))
end
# sobelow_skip ["Traversal.FileModule"]
defp do_destroy({:ok, results}, resource, record) do
columns = columns(resource)
pkey = Ash.Resource.Info.primary_key(resource)
changeset_pkey = Map.take(record, pkey)
results
|> Enum.reduce_while({:ok, []}, fn result, {:ok, results} ->
key_vals =
columns
|> Enum.zip(result)
|> Enum.reject(fn {key, _value} ->
key == :_
end)
cast(resource, key_vals, pkey, changeset_pkey, result, results)
end)
|> case do
{:ok, rows} ->
lines =
rows
|> CSV.encode(separator: separator(resource))
|> Enum.to_list()
resource
|> file()
|> File.write(lines, [:write])
|> case do
:ok ->
:ok
{:error, error} ->
{:error, "Error while writing to CSV: #{inspect(error)}"}
end
end
end
defp do_destroy({:error, error}, _, _), do: {:error, error}
defp cast(resource, key_vals, pkey, changeset_pkey, result, results) do
case cast_stored(resource, key_vals) do
{:ok, casted} ->
if Map.take(casted, pkey) == changeset_pkey do
{:cont, {:ok, results}}
else
{:cont, {:ok, [result | results]}}
end
{:error, error} ->
{:halt, {:error, error}}
end
end
defp do_update({:error, error}, _, _) do
{:error, error}
end
# sobelow_skip ["Traversal.FileModule"]
defp do_update({:ok, results}, resource, changeset) do
columns = columns(resource)
pkey = Ash.Resource.Info.primary_key(resource)
changeset_pkey =
Enum.into(pkey, %{}, fn key ->
{key, Ash.Changeset.get_attribute(changeset, key)}
end)
results
|> Enum.reduce_while({:ok, []}, fn result, {:ok, results} ->
key_vals =
columns
|> Enum.zip(result)
|> Enum.reject(fn {key, _value} ->
key == :_
end)
dump(resource, changeset, results, result, key_vals, pkey, changeset_pkey)
end)
|> case do
{:ok, rows} ->
lines =
rows
|> CSV.encode(separator: separator(resource))
|> Enum.to_list()
resource
|> file()
|> File.write(lines, [:write])
|> case do
:ok ->
{:ok, struct(changeset.data, changeset.attributes)}
{:error, error} ->
{:error, "Error while writing to CSV: #{inspect(error)}"}
end
end
end
defp dump(resource, changeset, results, result, key_vals, pkey, changeset_pkey) do
case cast_stored(resource, key_vals) do
{:ok, casted} ->
if Map.take(casted, pkey) == changeset_pkey do
dump_row(resource, changeset, results)
else
{:cont, {:ok, [result | results]}}
end
{:error, error} ->
{:halt, {:error, error}}
end
end
defp dump_row(resource, changeset, results) do
Enum.reduce_while(Enum.reverse(columns(resource)), {:ok, []}, fn key, {:ok, row} ->
value = Ash.Changeset.get_attribute(changeset, key)
{:cont, {:ok, [to_string(value) | row]}}
end)
|> case do
{:ok, new_row} ->
{:cont, {:ok, [new_row | results]}}
{:error, error} ->
{:halt, {:error, error}}
end
end
defp read_file(resource) do
columns = columns(resource)
resource
|> do_read_file()
|> case do
{:ok, results} ->
do_cast_stored(results, columns, resource)
{:error, error} ->
{:error, error}
end
end
defp do_cast_stored(results, columns, resource) do
results
|> Enum.reduce_while({:ok, []}, fn result, {:ok, results} ->
key_vals =
columns
|> Enum.zip(result)
|> Enum.reject(fn {key, _value} ->
key == :_
end)
case cast_stored(resource, key_vals) do
{:ok, casted} -> {:cont, {:ok, [casted | results]}}
{:error, error} -> {:halt, {:error, error}}
end
end)
end
defp do_read_file(resource) do
amount_to_drop =
if header?(resource) do
1
else
0
end
resource
|> file()
|> File.stream!()
|> Stream.drop(amount_to_drop)
|> CSV.decode(separator: separator(resource))
|> Enum.reduce_while({:ok, []}, fn
{:ok, result}, {:ok, results} ->
{:cont, {:ok, [result | results]}}
{:error, error}, _ ->
{:halt, {:error, error}}
end)
end
# sobelow_skip ["Traversal.FileModule"]
defp create_from_records(records, resource, changeset, retry? \\ false) do
pkey = Ash.Resource.Info.primary_key(resource)
pkey_value = Map.take(changeset.attributes, pkey)
if Enum.any?(records, fn record -> Map.take(record, pkey) == pkey_value end) do
{:error, "Record is not unique"}
else
row =
Enum.reduce_while(columns(resource), {:ok, []}, fn key, {:ok, row} ->
value = Map.get(changeset.attributes, key)
{:cont, {:ok, [to_string(value) | row]}}
end)
case row do
{:ok, row} ->
lines =
[Enum.reverse(row)]
|> CSV.encode(separator: separator(resource))
|> Enum.to_list()
resource
|> file()
|> File.write(lines, [:append])
|> case do
:ok ->
{:ok, struct(resource, changeset.attributes)}
{:error, :enoent} when retry? ->
{:error, "Error while writing to CSV: #{inspect(:enoent)}"}
{:error, :enoent} ->
File.mkdir_p!(Path.dirname(file(resource)))
create_from_records(records, resource, changeset, true)
{:error, error} ->
{:error, "Error while writing to CSV: #{inspect(error)}"}
end
{:error, error} ->
{:error, error}
end
end
end
end
|
lib/ash_csv/data_layer.ex
| 0.750827
| 0.42316
|
data_layer.ex
|
starcoder
|
defmodule CrockfordBase32 do
@moduledoc """
The main module implements Douglas Crockford's [Base32](https://www.crockford.com/base32.html) encoding.
"""
import Bitwise, only: [bor: 2, bsl: 2]
defmacro __using__(opts \\ []) do
alias CrockfordBase32.FixedEncoding
opts = Macro.prewalk(opts, &Macro.expand(&1, __CALLER__))
bits_size = Keyword.get(opts, :bits_size)
type = Keyword.get(opts, :type, :bitstring)
if bits_size != nil do
quote do
require FixedEncoding
FixedEncoding.generate(unquote(bits_size), unquote(type))
end
end
end
@doc """
Encode an integer or a binary in Crockford's Base32.
After encoded, the return string only contains these characters set(`"0123456789ABCDEFGHJKMNPQRSTVWXYZ"`, 10 digits and
22 letters, excluding `"I"`, `"L"`, `"O"` and `"U"`), if set `checksum: true`, there would be with one of these check
symbols(`"*~$=U"`) or the previous 10 digits and 22 letters as the last character.
## Example
iex> CrockfordBase32.encode(1234567)
"15NM7"
iex> CrockfordBase32.encode(1234567, checksum: true)
"15NM7S"
iex> CrockfordBase32.encode(1234567, split_size: 3)
"15N-M7"
iex> CrockfordBase32.encode(1234567, split_size: 3, checksum: true)
"15N-M7S"
## Options
* `:checksum`, optional, a boolean, by defaults to `false`, if set it as `true` will calculate a check
symbol and append it to the return string.
* `:split_size`, optional, a positive integer, if set it will use it as a step size to split
the return string with hyphen(s).
"""
@spec encode(integer() | binary(), Keyword.t()) :: String.t()
def encode(value, opts \\ [])
def encode(value, opts) when is_integer(value) do
value
|> may_checksum(Keyword.get(opts, :checksum, false))
|> integer_to_encode()
|> may_split_by_split_size_with_hyphen(Keyword.get(opts, :split_size))
end
def encode(value, opts) when is_binary(value) do
value
|> may_checksum(Keyword.get(opts, :checksum, false))
|> bytes_to_encode()
|> may_split_by_split_size_with_hyphen(Keyword.get(opts, :split_size))
end
@doc """
Decode the encoded to an integer, all hyphen(s) are removed and ignore the encoded's case.
If the encoded string be with a check symbol, require to use `checksum: true` in decoding.
## Example
iex> CrockfordBase32.decode_to_integer("16JD", checksum: true)
{:ok, 1234}
iex> CrockfordBase32.decode_to_integer("16j")
{:ok, 1234}
iex> CrockfordBase32.decode_to_integer("16j*", checksum: true)
{:error, "invalid_checksum"}
## Options
* `:checksum`, optional, a boolean, by defaults to `false` means expect input the encoded string without a check symbol in its tail,
if set it as `true`, please ensure input encoded is a string be with a check symbol, or return `{:error, "invalid_checksum"}`.
"""
@spec decode_to_integer(String.t(), Keyword.t()) :: {:ok, integer} | {:error, String.t()}
def decode_to_integer(string, opts \\ [])
def decode_to_integer(<<>>, _opts) do
error_invalid()
end
def decode_to_integer(string, opts) when is_binary(string) do
string
|> remove_hyphen()
|> may_split_with_checksum(Keyword.get(opts, :checksum, false))
|> decoding_integer()
rescue
_error ->
error_invalid()
end
@doc """
Decode the encoded to an string, all hyphen(s) are removed and ignore the encoded's case.
If the encoded string be with a check symbol, require to use `checksum: true` in decoding.
## Example
iex> CrockfordBase32.decode_to_binary("C5H66")
{:ok, "abc"}
iex> CrockfordBase32.decode_to_binary("C5H66C", checksum: true)
{:ok, "abc"}
iex> CrockfordBase32.decode_to_binary("C5H66D", checksum: true)
{:error, "invalid_checksum"}
## Options
The same to the options of `decode_to_integer/2`.
"""
def decode_to_binary(string, opts \\ [])
def decode_to_binary(<<>>, _opts) do
error_invalid()
end
def decode_to_binary(string, opts) when is_binary(string) do
string
|> remove_hyphen()
|> may_split_with_checksum(Keyword.get(opts, :checksum, false))
|> decoding_string()
rescue
_error ->
error_invalid()
end
defp may_split_with_checksum(str, false), do: {str, nil}
defp may_split_with_checksum(str, true) do
String.split_at(str, -1)
end
defp remove_hyphen(str) do
String.replace(str, "-", "")
end
defp decoding_integer({str, nil}) do
{:ok, decoding_integer(str, 0)}
end
defp decoding_integer({str, <<checksum::integer-size(8)>>}) do
check_value = d(checksum)
integer = decoding_integer(str, 0)
if check_value != rem(integer, 37) do
{:error, "invalid_checksum"}
else
{:ok, integer}
end
end
defp decoding_integer(_), do: {:error, "invalid_checksum"}
defp decoding_integer(<<>>, acc), do: acc
defp decoding_integer(<<byte::integer-size(8), rest::binary>>, acc) do
acc = acc * 32 + d(byte)
decoding_integer(rest, acc)
end
defp decoding_string({str, nil}) do
decode_string(str, <<>>)
end
defp decoding_string({str, <<checksum::integer-size(8)>>}) do
with {:ok, decoded} = result <- decode_string(str, <<>>),
checksum_of_decoded <-
decoded
|> bytes_to_integer_nopadding(0)
|> calculate_checksum() do
if checksum_of_decoded != checksum do
{:error, "invalid_checksum"}
else
result
end
else
{:error, _} = error ->
error
end
end
defp integer_to_encode({value, checksum}) do
integer_to_encode(value, checksum)
end
defp integer_to_encode(0, []), do: "0"
defp integer_to_encode(0, ["0"]), do: "00"
defp integer_to_encode(0, encoded), do: to_string(encoded)
defp integer_to_encode(value, encoded) when value > 0 do
remainder = rem(value, 32)
value = div(value, 32)
integer_to_encode(value, [e(remainder) | encoded])
end
defp encode_bytes_maybe_padding(value, expected_size, checksum) do
do_encode_bytes_maybe_padding(value, expected_size, checksum)
end
defp do_encode_bytes_maybe_padding(0, _, []), do: "0"
defp do_encode_bytes_maybe_padding(0, 0, acc), do: to_string(acc)
defp do_encode_bytes_maybe_padding(0, size, acc) when size > 0 do
encode_bytes_maybe_padding(0, size - 1, [e(0) | acc])
end
defp do_encode_bytes_maybe_padding(value, size, acc) do
remainder = rem(value, 32)
value = div(value, 32)
encode_bytes_maybe_padding(value, size - 1, [e(remainder) | acc])
end
defp bytes_to_integer_nopadding(<<>>, n), do: n
defp bytes_to_integer_nopadding(<<bytes::integer-size(1)>>, n) do
bsl(n, 1) |> bor(bytes)
end
defp bytes_to_integer_nopadding(<<bytes::integer-size(2)>>, n) do
bsl(n, 2) |> bor(bytes)
end
defp bytes_to_integer_nopadding(<<bytes::integer-size(3)>>, n) do
bsl(n, 3) |> bor(bytes)
end
defp bytes_to_integer_nopadding(<<bytes::integer-size(4)>>, n) do
bsl(n, 4) |> bor(bytes)
end
defp bytes_to_integer_nopadding(<<bytes::integer-size(5), rest::bitstring>>, n) do
bytes_to_integer_nopadding(rest, bsl(n, 5) |> bor(bytes))
end
defp bytes_to_integer_with_padding(<<>>, n), do: n
defp bytes_to_integer_with_padding(<<bytes::integer-size(1)>>, n) do
bsl(n, 5) |> bor(bsl(bytes, 4))
end
defp bytes_to_integer_with_padding(<<bytes::integer-size(2)>>, n) do
bsl(n, 5) |> bor(bsl(bytes, 3))
end
defp bytes_to_integer_with_padding(<<bytes::integer-size(3)>>, n) do
bsl(n, 5) |> bor(bsl(bytes, 2))
end
defp bytes_to_integer_with_padding(<<bytes::integer-size(4)>>, n) do
bsl(n, 5) |> bor(bsl(bytes, 1))
end
defp bytes_to_integer_with_padding(<<bytes::integer-size(5), rest::bitstring>>, n) do
bytes_to_integer_with_padding(rest, bsl(n, 5) |> bor(bytes))
end
defp bytes_to_encode({bytes, checksum}) do
bytes
|> bytes_to_integer_with_padding(0)
|> encode_bytes_maybe_padding(encoded_length_of_bytes(bytes), checksum)
end
defp encoded_length_of_bytes(bytes) do
bit_size = bit_size(bytes)
base = div(bit_size, 5)
case rem(bit_size, 5) do
0 -> base
_ -> base + 1
end
end
defp may_checksum(input, true) when is_integer(input) do
{input, [<<calculate_checksum(input)::integer>>]}
end
defp may_checksum(input, true) when is_binary(input) do
int = bytes_to_integer_nopadding(input, 0)
{input, [<<calculate_checksum(int)::integer>>]}
end
defp may_checksum(input, _) do
{input, []}
end
defp calculate_checksum(int) do
int |> rem(37) |> e()
end
defp may_split_by_split_size_with_hyphen(encoded, split_size)
when is_integer(split_size) and split_size > 0 do
split_with_hyphen(encoded, split_size, [])
end
defp may_split_by_split_size_with_hyphen(encoded, _), do: encoded
defp split_with_hyphen(str, size, prepared) when byte_size(str) > size do
<<chunk::size(size)-binary, rest::binary>> = str
split_with_hyphen(rest, size, [chunk | prepared])
end
defp split_with_hyphen(str, _size, []), do: str
defp split_with_hyphen(rest, _size, prepared) do
Enum.reverse([rest | prepared]) |> Enum.join("-")
end
encoding_symbol_charlist = '0123456789ABCDEFGHJKMNPQRSTVWXYZ'
check_symbol_charlist = '*~$=U'
alphabet = encoding_symbol_charlist ++ check_symbol_charlist
@doc false
@compile {:inline, e: 1}
for {alphabet, index} <- Enum.with_index(alphabet) do
def e(unquote(index)), do: unquote(alphabet)
end
# also generate the alphabet(A-Z) in lowercase when decode
@compile {:inline, d: 1}
@doc false
for {alphabet, index} <- Enum.with_index(alphabet) do
def d(unquote(alphabet)), do: unquote(index)
if alphabet in ?A..?Z do
def d(unquote(alphabet+32)), do: unquote(index)
end
end
# O
def d(79), do: 0
# L
def d(76), do: 1
# I
def d(73), do: 1
# invalid
def d(input), do: raise "invalid: #{inspect input}"
@doc false
def error_invalid(), do: {:error, "invalid"}
@compile {:inline, decode_string: 2}
defp decode_string(<<>>, acc) do
decoded_size = bit_size(acc)
case rem(decoded_size, 8) do
0 ->
{:ok, acc}
padding_size ->
data_size = decoded_size - padding_size
case acc do
<<decoded::bitstring-size(data_size), 0::size(padding_size)>> ->
{:ok, decoded}
_ ->
error_invalid()
end
end
end
# also generate the alphabet(A-Z) in lowercase when decode with accumulator
for {alphabet, index} <- Enum.with_index(encoding_symbol_charlist) do
defp decode_string(<<unquote(alphabet), rest::bitstring>>, acc) do
decode_string(rest, <<acc::bitstring, unquote(index)::5>>)
end
if alphabet in ?A..?Z do
defp decode_string(<<unquote(alphabet+32), rest::bitstring>>, acc) do
decode_string(rest, <<acc::bitstring, unquote(index)::5>>)
end
end
end
defp decode_string(input, _acc), do: raise "invalid: #{inspect input}"
end
|
lib/crockford_base32.ex
| 0.920169
| 0.508971
|
crockford_base32.ex
|
starcoder
|
defmodule IO do
@moduledoc """
Module responsible for doing IO. Many functions in this
module expects an IO device and an io data encoded in UTF-8.
An IO device must be a pid or an atom representing a process.
For convenience, Elixir provides `:stdio` and `:stderr` as
shortcut to Erlang's `:standard_io` and `:standard_error`.
An io data can be:
* A list of integers representing a string. Any unicode
character must be represented with one entry in the list,
this entry being an integer with the codepoint value;
* A binary in which unicode characters are represented
with many bytes (Elixir's default representation);
* A list of binaries or a list of char lists (as described above);
* If none of the above, `to_binary` is invoked in the
given argument;
"""
import :erlang, only: [group_leader: 0]
@doc """
Reads `count` bytes from the IO device. It returns:
* `data` - The input characters.
* :eof - End of file was encountered.
* {:error, reason} - Other (rare) error condition,
for instance {:error, :estale} if reading from an
NFS file system.
"""
def read(device // group_leader(), count) do
:io.get_chars(map_dev(device), "", count)
end
@doc """
Reads `count` bytes from the IO device as binary,
no unicode conversion happens.
Check `read/2` for more information.
"""
def binread(device // group_leader(), count) do
case :file.read(map_dev(device), count) do
{ :ok, data } -> data
other -> other
end
end
@doc """
Reads a line from the IO device. It returns:
* `data` - The input characters.
* :eof - End of file was encountered.
* {:error, reason} - Other (rare) error condition,
for instance {:error, :estale} if reading from an
NFS file system.
This function does the same as `gets/2`,
except the prompt is not required as argument.
"""
def readline(device // group_leader()) do
:io.get_line(map_dev(device), "")
end
@doc """
Reads a line from the IO device as binary,
no unicode conversion happens.
Check `readline/1` for more information.
"""
def binreadline(device // group_leader()) do
case :file.read_line(map_dev(device)) do
{ :ok, data } -> data
other -> other
end
end
@doc """
Writes the given argument to the given device.
By default the device is the standard output.
The argument is expected to be a chardata (i.e.
a char list or an unicode binary).
It returns `:ok` if it succeeds.
## Examples
IO.write "sample"
#=> "sample"
IO.write :stderr, "error"
#=> "error"
"""
def write(device // group_leader(), item) do
:io.put_chars map_dev(device), to_iodata(item)
end
@doc """
Writes the given argument to the given device
as a binary, no unicode conversion happens.
Check `write/2` for more information.
"""
def binwrite(device // group_leader(), item) do
:file.write map_dev(device), to_iodata(item)
end
@doc """
Writes the argument to the device, similarly to write
but adds a new line at the end. The argument is expected
to be a chardata.
"""
def puts(device // group_leader(), item) do
erl_dev = map_dev(device)
:io.put_chars erl_dev, [to_iodata(item), ?\n]
end
@doc """
Inspects and writes the given argument to the device
followed by a new line. A set of options can be given.
## Examples
IO.inspect Process.list
"""
def inspect(item, opts // []) do
inspect group_leader(), item, opts
end
@doc """
Inspects the item with options using the given device.
"""
def inspect(device, item, opts) do
puts device, Kernel.inspect(item, opts)
item
end
@doc """
Gets a number of bytes from the io device. If the
io device is a unicode device, count implies
the number of unicode codepoints to be retrieved.
Otherwise, the number of raw bytes. It returns:
* `data` - The input characters.
* :eof - End of file was encountered.
* {:error, reason} - Other (rare) error condition,
for instance {:error, :estale} if reading from an
NFS file system.
"""
def getn(prompt, count // 1)
def getn(prompt, count) when is_integer(count) do
getn(group_leader, prompt, count)
end
def getn(device, prompt) do
getn(device, prompt, 1)
end
@doc """
Gets a number of bytes from the io device. If the
io device is a unicode device, count implies
the number of unicode codepoints to be retrieved.
Otherwise, the number of raw bytes.
"""
def getn(device, prompt, count) do
:io.get_chars(map_dev(device), to_iodata(prompt), count)
end
@doc false
def getb(prompt, count // 1)
def getb(prompt, count) when is_integer(count) do
IO.write "[WARNING] IO.getb is deprecated, please use IO.getn instead\n#{Exception.format_stacktrace}"
getn(prompt, count)
end
def getb(device, prompt) do
IO.write "[WARNING] IO.getb is deprecated, please use IO.getn instead\n#{Exception.format_stacktrace}"
getn(device, prompt)
end
@doc false
def getb(device, prompt, count) do
IO.write "[WARNING] IO.getb is deprecated, please use IO.getn instead\n#{Exception.format_stacktrace}"
getn(device, prompt, count)
end
@doc """
Reads a line from the IO device. It returns:
* `data` - The characters in the line terminated
by a LF (or end of file).
* :eof - End of file was encountered.
* {:error, reason} - Other (rare) error condition,
for instance {:error, :estale} if reading from an
NFS file system.
"""
def gets(device // group_leader(), prompt) do
:io.get_line(map_dev(device), to_iodata(prompt))
end
# Map the Elixir names for standard io and error to Erlang names
defp map_dev(:stdio), do: :standard_io
defp map_dev(:stderr), do: :standard_error
defp map_dev(other), do: other
defp to_iodata(io) when is_list(io) or is_binary(io), do: io
defp to_iodata(other), do: to_binary(other)
end
|
lib/elixir/lib/io.ex
| 0.843525
| 0.729182
|
io.ex
|
starcoder
|
# based on XKCD 287 - https://xkcd.com/287/
defmodule Entry do
@moduledoc """
Defines a struct for an entry in an order, including an item and a quantity
"""
@type t :: %__MODULE__{
item: Item,
quantity: integer
}
defstruct item: Item, quantity: 1
@type order() :: [t, ...]
@spec new(Item.t, pos_integer) :: t
@doc ~S"""
Create a new `Entry` struct with an Item and quantity
## Examples
iex> Entry.new(Item.parse("caviar,$99.99"), 2)
%Entry{item: %Item{name: "caviar", price: %Money{amount: 9999, currency: :USD}}, quantity: 2}
"""
def new(item, quantity) when quantity > 0,
do: %Entry{item: item, quantity: quantity}
@spec subtotal(t) :: Money.t
@doc ~S"""
Returns total for this item and quantity
## Examples
iex> Entry.subtotal(Entry.new(Item.parse("caviar,$99.99"), 2))
%Money{amount: 19998, currency: :USD}
"""
def subtotal(%Entry{item: %Item{name: _, price: price}, quantity: quantity}),
do: Money.multiply(price, quantity)
@spec total(order()) :: Money.t
@doc ~S"""
Returns the total of a list of entries
## Examples
iex> Entry.total([
...> Entry.new(Item.parse("tuna sandwich,$3.50"), 2),
...> Entry.new(Item.parse("caesar salad,$5.25"), 1),
...> Entry.new(Item.parse("onion rings,$2.00"), 1),
...> Entry.new(Item.parse("fountain drink,$1.00"), 3)
...> ])
%Money{amount: 1725, currency: :USD}
"""
def total(entries) do
Enum.map(entries, &subtotal/1) |> Enum.reduce(&Money.add/2)
end
@spec print(order()) :: :ok
@doc ~S"""
Prints a list of entries
"""
def print(entries) do
alias TableRex.Table
rows = Enum.map(entries,
fn(entry) ->
[entry.quantity,
entry.item.name,
Money.to_string(Entry.subtotal(entry))]
end)
header = ["", "Item", "Cost"]
Table.new(rows, header)
|> Table.put_column_meta(0, align: :right)
|> Table.put_column_meta(2, align: :right)
|> Table.render!
|> IO.puts
end
end
|
lib/entry.ex
| 0.834407
| 0.507141
|
entry.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.