code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule UniversalOrbitMap do
@moduledoc """
Day 6 — https://adventofcode.com/2019/day/6
"""
@doc """
iex> ["COM)B", "B)C", "C)D", "D)E", "E)F", "B)G", "G)H", "D)I", "E)J", "J)K", "K)L"]
iex> |> UniversalOrbitMap.part1()
42
"""
@spec part1(Enumerable.t()) :: integer()
def part1(in_stream) do
in_stream
|> Stream.map(&parse_orbit/1)
|> Map.new()
|> count_orbits()
end
@doc """
iex> ["COM)B", "B)C", "C)D", "D)E", "E)F", "B)G", "G)H", "D)I", "E)J", "J)K", "K)L", "K)YOU", "I)SAN"]
iex> |> UniversalOrbitMap.part2()
4
"""
@spec part2(Enumerable.t()) :: integer()
def part2(in_stream) do
in_stream
|> Stream.map(&parse_orbit/1)
|> Map.new()
|> count_transfers_from("YOU")
|> sum_transfers_to("SAN")
end
@spec parse_orbit(String.t()) :: Enumerable.t()
defp parse_orbit(line) do
String.trim_trailing(line)
|> String.split(")")
|> Enum.reverse()
|> List.to_tuple()
end
@spec count_orbits(map()) :: integer()
defp count_orbits(map), do: count_orbits(map, Map.keys(map))
@spec count_orbits(map(), Enumerable.t()) :: integer()
defp count_orbits(_, []), do: 0
defp count_orbits(map, [key | tail]) do
count_orbits(map, key) + count_orbits(map, tail)
end
defp count_orbits(_, nil), do: -1
defp count_orbits(map, key), do: 1 + count_orbits(map, map[key])
@spec count_transfers_from(map(), String.t()) :: Enumerable.t()
defp count_transfers_from(map, obj) do
count_transfers_from(%{}, map, map[obj], 0)
end
@spec count_transfers_from(map(), map(), String.t(), integer()) :: {map(), map()}
defp count_transfers_from(moves_map, map, nil, _), do: {map, moves_map}
defp count_transfers_from(moves_map, map, obj, moves) do
Map.put(moves_map, obj, moves)
|> count_transfers_from(map, map[obj], moves + 1)
end
@spec sum_transfers_to({map(), map()}, String.t()) :: integer()
defp sum_transfers_to({map, moves_map}, obj) do
sum_transfers_to(moves_map, map, map[obj], 0)
end
@spec sum_transfers_to(map(), map(), String.t(), integer()) :: integer()
defp sum_transfers_to(moves_map, _, obj, moves) when is_map_key(moves_map, obj) do
moves_map[obj] + moves
end
defp sum_transfers_to(moves_map, map, obj, moves) do
sum_transfers_to(moves_map, map, map[obj], moves + 1)
end
end
|
lib/advent_of_code_2019/day06.ex
| 0.81257
| 0.602354
|
day06.ex
|
starcoder
|
defmodule Harald.HCI.Event.CommandStatus do
@moduledoc """
The HCI_Command_Status event is used to indicate that the command described by
the Command_Opcode parameter has been received, and that the Controller is
currently performing the task for this command.
This event is needed to provide mechanisms for asynchronous operation, which
makes it possible to prevent the Host from waiting for a command to finish. If
the command cannot begin to execute (a parameter error may have occurred, or the
command may currently not be allowed), the Status event parameter will contain
the corresponding error code, and no complete event will follow since the
command was not started. The Num_HCI_Command_Packets event parameter allows the
Controller to indicate the number of HCI command packets the Host can send to
the Controller. If the Controller requires the Host to stop sending commands,
the Num_HCI_Command_Packets event parameter will be set to zero. To indicate to
the Host that the Controller is ready to receive HCI command packets, the
Controller generates an HCI_Command_Status event with Status 0x00 and
Command_Opcode 0x0000 and the Num_HCI_Command_Packets event parameter set to 1
or more. Command_Opcode 0x0000 is a special value indicating that this event is
not associated with a command sent by the Host. The Controller can send an
HCI_Command_Status event with Command Opcode 0x0000 at any time to change the
number of outstanding HCI command packets that the Host can send before waiting.
Reference: Version 5.2, Vol 4, Part E, 7.7.15
"""
alias Harald.Serializable
alias Harald.ErrorCode, as: Status
require Logger
@behaviour Serializable
@type t :: %__MODULE__{}
defstruct [
:num_hci_command_packets,
:opcode,
:status
]
@event_code 0x0F
@doc """
Returns the Inquiry Complete event code.
"""
def event_code, do: @event_code
@impl Serializable
def serialize(%__MODULE__{} = data) do
{:ok,
<<
data.num_hci_command_packets::8,
data.opcode::16,
data.status::binary
>>}
end
@impl Serializable
def deserialize(<<status::8, num_hci_command_packets::8, opcode::binary-2>>) do
cs = %__MODULE__{
num_hci_command_packets: num_hci_command_packets,
opcode: opcode,
status: Status.name!(status)
}
{:ok, cs}
end
def deserialize(bin), do: {:error, bin}
end
|
lib/harald/hci/event/command_status.ex
| 0.75183
| 0.520618
|
command_status.ex
|
starcoder
|
defmodule Serum.File do
@moduledoc """
Defines a struct representing a file to be read or written.
## Fields
* `src`: Source path
* `dest`: Destination path
* `in_data`: Data read from a file
* `out_data`: Data to be written to a file
"""
require Serum.Result, as: Result
import Serum.IOProxy, only: [put_msg: 2]
defstruct [:src, :dest, :in_data, :out_data]
@type t :: %__MODULE__{
src: binary() | nil,
dest: binary() | nil,
in_data: IO.chardata() | String.Chars.t() | nil,
out_data: IO.chardata() | String.Chars.t() | nil
}
@doc """
Reads data from a file described by the given `Serum.File` struct.
An error will be returned if `src` is `nil`.
"""
@spec read(t()) :: Result.t(t())
def read(%__MODULE__{src: nil}) do
msg = "a Serum.File struct with 'src = nil' cannot be used with Serum.File.read/1"
Result.fail(Simple, [msg])
end
def read(%__MODULE__{src: src} = file) do
case File.read(src) do
{:ok, data} when is_binary(data) ->
print_read(src)
Result.return(%__MODULE__{file | in_data: data})
{:error, reason} ->
Result.fail(POSIX, [reason], file: file)
end
end
@doc """
Writes data to a file described by the given `Serum.File` struct.
An error will be returned if `dest` is `nil`.
"""
@spec write(t()) :: Result.t(t())
def write(%__MODULE__{dest: nil}) do
msg = "a Serum.File struct with 'dest = nil' cannot be used with Serum.File.write/1"
Result.fail(Simple, [msg])
end
def write(%__MODULE__{dest: dest, out_data: data} = file) do
case File.open(dest, [:write, :utf8], &IO.write(&1, data)) do
{:ok, _} ->
print_write(dest)
Result.return(file)
{:error, reason} ->
Result.fail(POSIX, [reason], file: file)
end
end
@spec print_read(binary()) :: Result.t({})
defp print_read(src), do: put_msg(:read, src)
@spec print_write(binary()) :: Result.t({})
defp print_write(dest), do: put_msg(:gen, dest)
end
|
lib/serum/file.ex
| 0.757525
| 0.560824
|
file.ex
|
starcoder
|
defmodule AMQPX.Receiver.Standard.Retry do
defstruct [
:table,
:limit,
:identity,
:delay
]
@doc """
Retry options.
`identity` specifies the list of methods to generate a unique term for a
message. The first non-`nil` result is used. If all methods evaluate to
`nil`, retry tracking is not used for that message.
`delay` specifies the time to wait (in milliseconds) before rejecting the
delivery, to prevent a hot retry loop.
"""
@type option ::
{:limit, integer()}
| {:identity, [AMQPX.MessageIdentity.identity_function()]}
| {:delay, nil | integer()}
def init(nil), do: nil
def init(opts) do
table = :ets.new(__MODULE__, [:public])
limit = opts |> Keyword.fetch!(:limit)
identity = opts |> Keyword.get(:identity, [:message_id, :payload_hash])
delay = opts |> Keyword.get(:delay)
%__MODULE__{
table: table,
limit: limit,
identity: identity,
delay: delay
}
end
def exhausted?(payload, meta, handler, state)
def exhausted?(_, _, _, nil), do: false
def exhausted?(
payload,
meta,
handler,
%__MODULE__{table: table, limit: limit, identity: identity}
) do
case AMQPX.MessageIdentity.get(payload, meta, handler, identity) do
nil ->
false
key ->
seen_times =
case :ets.lookup(table, key) do
[] -> 0
[{^key, n}] -> n
end
# A retry limit of 0 allows us to see a message once. A retry limit
# of 1 lets us see a message twice, and so on.
if seen_times > limit do
true
else
:ets.insert(table, {key, seen_times + 1})
false
end
end
end
def delay(state)
def delay(%__MODULE__{delay: delay}) when is_integer(delay), do: Process.sleep(delay)
def delay(_), do: nil
def clear(payload, meta, handler, state)
def clear(_, _, _, nil), do: nil
def clear(payload, meta, handler, %__MODULE__{table: table, identity: identity}) do
case AMQPX.MessageIdentity.get(payload, meta, handler, identity) do
nil ->
nil
key ->
:ets.delete(table, key)
end
end
end
|
lib/amqpx/receiver/standard/retry.ex
| 0.810629
| 0.47384
|
retry.ex
|
starcoder
|
defmodule Alchemy.AuditLog do
@moduledoc """
This module contains functions and types related to audit logs.
"""
alias Alchemy.{Guild.Role, OverWrite, User, Webhook}
alias Alchemy.Discord.Guilds
import Alchemy.Discord.RateManager, only: [send_req: 2]
import Alchemy.Structs
@type snowflake :: String.t()
@typedoc """
Represents the Audit Log information of a guild.
- `webhooks`
List of webhooks found in the Audit Log.
- `user`
List of users found in the Audit Log.
- `audit_log_entries`
List of entries in the Audit Log.
"""
@type t :: %__MODULE__{
webhooks: Alchemy.Webhook.t(),
users: Alchemy.User.t(),
audit_log_entries: [entry]
}
defstruct [:webhooks, :users, :audit_log_entries]
@doc false
def from_map(map) do
map
|> field_map("webhooks", &map_struct(&1, Webhook))
|> field_map("users", &map_struct(&1, User))
|> field_map(
"audit_log_entries",
&Enum.map(&1, fn x ->
__MODULE__.Entry.from_map(x)
end)
)
end
@typedoc """
An enumeration of action types.
"""
@type action ::
:guild_update
| :channel_create
| :channel_update
| :channel_delete
| :channel_overwrite_create
| :channel_overwrite_update
| :channel_overwrite_delete
| :member_kick
| :member_prune
| :member_ban_add
| :member_ban_remove
| :member_update
| :member_role_update
| :role_create
| :role_update
| :role_delete
| :invite_create
| :invite_update
| :invite_delete
| :webhook_create
| :webhook_update
| :webhook_delete
| :emoji_create
| :emoji_update
| :message_delete
@typedoc """
Additional information fields in an audit log based on `action_type`.
`:member_prune` -> `[:delete_member_days, :members_removed]`
`:message_delete` -> `[:channel_id, :count]`
`:channel_overwrite_create | delete | update` -> [:id, :type, :role_name]
"""
@type options :: %{
optional(:delete_member_days) => String.t(),
optional(:members_removed) => String.t(),
optional(:channel_id) => snowflake,
optional(:count) => integer,
optional(:id) => snowflake,
optional(:type) => String.t(),
optional(:role_name) => String.t()
}
@typedoc """
An entry in an audit log.
- `target_id`
The id of the affected entity.
- `changes`
The changes made to the `target_id`.
- `user_id`
The user who made the changes.
- `id`
The id of the entry
- `action_type`
The type of action that occurred
- `options`
Additional map of information for certain action types.
- `reason`
The reason for the change
"""
@type entry :: %__MODULE__.Entry{
target_id: String.t(),
changes: [change],
user_id: snowflake,
id: snowflake,
action_type: action,
options: options
}
defmodule Entry do
@moduledoc false
import Alchemy.Structs
defstruct [:target_id, :changes, :user_id, :id, :action_type, :options, :reason]
@audit_log_events %{
1 => :guild_update,
10 => :channel_create,
11 => :channel_update,
12 => :channel_delete,
13 => :channel_overwrite_create,
14 => :channel_overwrite_update,
15 => :channel_overwrite_delete,
20 => :member_kick,
21 => :member_prune,
22 => :member_ban_add,
23 => :member_ban_remove,
24 => :member_update,
25 => :member_role_update,
30 => :role_create,
31 => :role_update,
32 => :role_delete,
40 => :invite_create,
41 => :invite_update,
42 => :invite_delete,
50 => :webhook_create,
51 => :webhook_update,
52 => :webhook_delete,
60 => :emoji_create,
61 => :emoji_update,
72 => :message_delete
}
@events_to_int for {k, v} <- @audit_log_events, into: %{}, do: {v, k}
def action_to_int(k) do
@events_to_int[k]
end
def from_map(map) do
action_type = Map.get(@audit_log_events, map["action_type"])
options =
for {k, v} <- map["options"], into: %{} do
# this is safe, because there's a set amount of keys.
{String.to_atom(k), v}
end
|> Map.get_and_update(:count, fn
nil ->
:pop
x ->
{a, _} = Integer.parse(x)
{x, a}
end)
map
|> field_map("action_type", fn _ -> action_type end)
|> field_map("options", fn _ -> options end)
|> field_map("changes", &map_struct(&1, Alchemy.AuditLog.Change))
|> to_struct(__MODULE__)
end
end
@typedoc """
The type of an audit log change.
- `new_value`
The new value after the change.
- `old_value`
The value prior to the change.
- `key`
The type of change that occurred. This also dictates the type of
`new_value` and `old_value`
[more information on this relation](https://discordapp.com/developers/docs/resources/audit-log#audit-log-change-object-audit-log-change-key)
"""
@type change :: %__MODULE__.Change{
new_value: any,
old_value: any,
key: String.t()
}
defmodule Change do
@moduledoc false
import Alchemy.Structs
defstruct [:new_value, :old_value, :key]
def from_map(map) do
key_change =
case map["key"] do
"$add" -> &map_struct(&1, Role)
"$remove" -> &map_struct(&1, Role)
"permission_overwrites" -> &struct(OverWrite, &1)
_ -> & &1
end
map
|> field_map("key", key_change)
|> to_struct(__MODULE__)
end
end
@doc """
Returns an audit log entry for a guild.
Requires `:view_audit_log` permission.
## Options
- `user_id`
Filters the log for a user id.
- `action_type`
The type of audit log event
- `before`
Filter the log before a certain entry id.
- `limit`
How many entries are returned (default 50, between 1 and 100).
"""
@spec get_guild_log(snowflake,
user_id: snowflake,
action_type: action,
before: snowflake,
limit: integer
) :: {:ok, __MODULE__.t()} | {:error, term}
def get_guild_log(guild, options \\ []) do
options =
Keyword.get_and_update(options, :action_type, fn
nil ->
:pop
x ->
{x, __MODULE__.Entry.action_to_int(x)}
end)
{Guilds, :get_audit_log, [guild, options]}
|> send_req("/guilds/#{guild}/audit-log")
end
end
|
lib/Structs/audit_log.ex
| 0.794505
| 0.470493
|
audit_log.ex
|
starcoder
|
defmodule RisteysWeb.PhenocodeView do
use RisteysWeb, :view
require Integer
def render("assocs.json", %{
phenocode: phenocode,
assocs: assocs,
hr_prior_distribs: hr_prior_distribs,
hr_outcome_distribs: hr_outcome_distribs
}) do
%{
"plot" => data_assocs_plot(phenocode, assocs),
"table" => data_assocs_table(phenocode.id, assocs, hr_prior_distribs, hr_outcome_distribs)
}
end
def render("assocs.csv", %{assocs: assocs}) do
header =
~w(prior_name outcome_name hr_lag prior_longname outcome_longname hr ci_min ci_max p N)
assocs =
Enum.map(assocs, fn assoc ->
[
assoc.prior_name,
assoc.outcome_name,
assoc.lagged_hr_cut_year,
assoc.prior_longname,
assoc.outcome_longname,
assoc.hr,
assoc.ci_min,
assoc.ci_max,
assoc.pvalue,
assoc.nindivs
]
end)
Enum.concat([header], assocs)
|> CSV.encode()
|> Enum.join()
end
def render("drugs.json", %{drug_stats: drug_stats}) do
Enum.map(drug_stats, fn drug ->
ci_min = drug.score - 1.96 * drug.stderr
ci_max = drug.score + 1.96 * drug.stderr
%{
name: drug.description,
score_num: drug.score,
score_str: round(drug.score, 2),
ci_min_num: ci_min,
ci_min_str: round(ci_min, 2),
ci_max_num: ci_max,
ci_max_str: round(ci_max, 2),
pvalue_num: drug.pvalue,
pvalue_str: pvalue_str(drug.pvalue),
n_indivs: drug.n_indivs,
atc: drug.atc,
atc_link: atc_link_wikipedia(drug.atc)
}
end)
end
def render("drugs.csv", %{drug_stats: drug_stats}) do
header = ~w(ATC name score score_ci_min score_ci_max p N)
stats =
Enum.map(drug_stats, fn drug ->
ci_min = drug.score - 1.96 * drug.stderr
ci_max = drug.score + 1.96 * drug.stderr
[drug.atc, drug.description, drug.score, ci_min, ci_max, drug.pvalue, drug.n_indivs]
end)
Enum.concat([header], stats)
|> CSV.encode()
|> Enum.join()
end
# -- Endpoint Explainer --
defp get_explainer_step(steps, name) do
# Get a step by name
Enum.find(steps, fn %{name: step_name} -> step_name == name end)
end
defp readable_conditions(conditions) do
Enum.map(conditions, fn condition ->
condition
|> String.replace("!", "not ")
|> String.replace("_NEVT", "number of events ")
|> String.replace("&", "and ")
|> String.replace("|", "or ")
end)
end
defp readable_icdver(icd_numbers) do
icd_numbers
|> Enum.map(&Integer.to_string/1)
|> Enum.intersperse(", ")
end
defp readable_metadata(endpoint) do
[
{"Level in the ICD hierarchy", endpoint.level},
{"Special", endpoint.special},
{"First used in FinnGen datafreeze", endpoint.version},
{"Parent code in ICD-10", endpoint.parent},
{"Name in latin", endpoint.latin}
]
|> Enum.reject(fn {_col, val} -> is_nil(val) end)
end
defp cell_icd10(filter, key_orig_rule, key_expanded) do
if Map.has_key?(filter, key_expanded) do
%{
^key_orig_rule => orig_rule,
^key_expanded => expanded_rule
} = filter
max_icds = 10
if length(expanded_rule) > 0 and length(expanded_rule) <= max_icds do
render_icds(expanded_rule, true)
else
orig_rule
end
else
filter[key_orig_rule]
end
end
defp render_icds([], _url), do: ""
defp render_icds(icds, url?) do
icds
|> Enum.sort()
|> Enum.map(fn icd ->
content =
if url? do
icd10_url(icd.code, icd.code)
else
icd.code
end
abbr(content, icd.description)
end)
|> Enum.intersperse(", ")
end
defp relative_count(steps, count) do
# Compute the percentage of the given count across meaningful steps
check_steps =
MapSet.new([
:filter_registries,
:precond_main_mode_icdver,
:min_number_events,
:includes
])
max =
steps
|> Enum.filter(fn %{name: name} -> name in check_steps end)
|> Enum.map(fn %{nindivs_post_step: ncases} -> ncases end)
|> Enum.reject(&is_nil/1)
|> Enum.max()
count / max * 100
end
# -- Ontology --
defp ontology_links(ontology) do
# Helper function to link to external resources
linker = fn source, id ->
link =
case source do
"DOID" ->
"https://www.ebi.ac.uk/ols/search?q=" <> id <> "&ontology=doid"
"EFO" ->
"https://www.ebi.ac.uk/gwas/efotraits/EFO_" <> id
"MESH" ->
"https://meshb.nlm.nih.gov/record/ui?ui=" <> id
"SNOMED" ->
"https://browser.ihtsdotools.org/?perspective=full&conceptId1=" <>
id <> "&edition=en-edition"
end
ahref(source, link)
end
ontology = Enum.reject(ontology, fn {_source, ids} -> ids == [] end)
for {source, ids} <- ontology, into: [] do
first_id = Enum.at(ids, 0)
linker.(source, first_id)
end
end
# -- Stats --
defp mortality_table(stats) do
lags = [
{0, "1998–2019"},
{15, "15 years"},
{5, "5 years"},
{1, "1 year"}
]
no_data = %{
absolute_risk: "-",
hr: "-",
pvalue: "-",
n_individuals: "-"
}
for {lag, title} <- lags do
data = Enum.find(stats, fn %{lagged_hr_cut_year: lag_hr} -> lag_hr == lag end)
stat =
if not is_nil(data) do
hr =
"#{data.hr |> round(2)} [#{data.hr_ci_min |> round(2)}, #{data.hr_ci_max |> round(2)}]"
%{
absolute_risk: data.absolute_risk |> round(2),
hr: hr,
pvalue: data.pvalue |> pvalue_str(),
n_individuals: data.n_individuals
}
else
no_data
end
{title, stat}
end
end
defp data_assocs_plot(phenocode, assocs) do
assocs
|> Enum.filter(fn %{lagged_hr_cut_year: cut} ->
# keep only non-lagged HR on plot
cut == 0
end)
|> Enum.map(fn assoc ->
# Find direction given phenocode of interest
{other_pheno_name, other_pheno_longname, other_pheno_category, direction} =
if phenocode.name == assoc.prior_name do
{assoc.outcome_name, assoc.outcome_longname, assoc.outcome_category, "after"}
else
{assoc.prior_name, assoc.prior_longname, assoc.prior_category, "before"}
end
%{
"name" => other_pheno_name,
"longname" => other_pheno_longname,
"category" => other_pheno_category,
"direction" => direction,
"hr" => assoc.hr,
"hr_str" => round(assoc.hr, 2),
"ci_min" => round(assoc.ci_min, 2),
"ci_max" => round(assoc.ci_max, 2),
"pvalue_str" => pvalue_str(assoc.pvalue),
"pvalue_num" => assoc.pvalue,
"nindivs" => assoc.nindivs
}
end)
end
defp data_assocs_table(pheno_id, assocs, hr_prior_distribs, hr_outcome_distribs) do
# Merge binned HR distrib in assocs table
binned_prior_hrs =
for bin <- hr_prior_distribs, into: %{}, do: {bin.pheno_id, bin.percent_rank}
binned_outcome_hrs =
for bin <- hr_outcome_distribs, into: %{}, do: {bin.pheno_id, bin.percent_rank}
assocs =
Enum.map(
assocs,
fn assoc ->
if assoc.outcome_id == pheno_id do
hr_binned = Map.get(binned_prior_hrs, assoc.prior_id)
Map.put(assoc, :hr_binned, hr_binned)
else
hr_binned = Map.get(binned_outcome_hrs, assoc.outcome_id)
Map.put(assoc, :hr_binned, hr_binned)
end
end
)
# Takes the associations from the database and transform them to
# values for the assocation table, such that each table row has
# "before" and "after" associations with the given pheno_id.
no_stats = %{
"hr" => nil,
"hr_str" => nil,
"ci_min" => nil,
"ci_max" => nil,
"pvalue" => nil,
"nindivs" => nil,
"lagged_hr_cut_year" => nil,
# value for CompBox plot
"hr_binned" => nil
}
rows =
Enum.reduce(assocs, %{}, fn assoc, acc ->
to_record(acc, assoc, pheno_id)
end)
Enum.map(rows, fn {other_id, lag_data} ->
no_lag_before =
case get_in(lag_data, [0, "before"]) do
nil ->
no_stats
stats ->
stats
end
no_lag_after =
case get_in(lag_data, [0, "after"]) do
nil ->
no_stats
stats ->
stats
end
lag_1y_before =
case get_in(lag_data, [1, "before"]) do
nil ->
no_stats
stats ->
stats
end
lag_1y_after =
case get_in(lag_data, [1, "after"]) do
nil ->
no_stats
stats ->
stats
end
lag_5y_before =
case get_in(lag_data, [5, "before"]) do
nil ->
no_stats
stats ->
stats
end
lag_5y_after =
case get_in(lag_data, [5, "after"]) do
nil ->
no_stats
stats ->
stats
end
lag_15y_before =
case get_in(lag_data, [15, "before"]) do
nil ->
no_stats
stats ->
stats
end
lag_15y_after =
case get_in(lag_data, [15, "after"]) do
nil ->
no_stats
stats ->
stats
end
%{
"id" => other_id,
"name" => lag_data["name"],
"longname" => lag_data["longname"],
"all" => %{
"before" => no_lag_before,
"after" => no_lag_after
},
"lagged_1y" => %{
"before" => lag_1y_before,
"after" => lag_1y_after
},
"lagged_5y" => %{
"before" => lag_5y_before,
"after" => lag_5y_after
},
"lagged_15y" => %{
"before" => lag_15y_before,
"after" => lag_15y_after
}
}
end)
end
defp to_record(res, assoc, pheno_id) do
# Takes an association and transform it to a suitable value for a
# row in the association table.
[dir, other_pheno] =
if pheno_id == assoc.prior_id do
[
"after",
%{
id: assoc.outcome_id,
name: assoc.outcome_name,
longname: assoc.outcome_longname
}
]
else
[
"before",
%{
id: assoc.prior_id,
name: assoc.prior_name,
longname: assoc.prior_longname
}
]
end
lag = assoc.lagged_hr_cut_year
new_stats = %{
"hr" => assoc.hr,
"hr_str" => round(assoc.hr, 2),
"ci_min" => round(assoc.ci_min, 2),
"ci_max" => round(assoc.ci_max, 2),
"pvalue" => assoc.pvalue,
"pvalue_str" => pvalue_str(assoc.pvalue),
"nindivs" => assoc.nindivs,
"hr_binned" => assoc.hr_binned
}
# Create pheno mapping if not existing
res =
if is_nil(Map.get(res, other_pheno.id)) do
Map.put(res, other_pheno.id, %{})
else
res
end
# Create inner lag mapping if not existing
res =
if is_nil(get_in(res, [other_pheno.id, lag])) do
put_in(res, [other_pheno.id, lag], %{})
else
res
end
res
|> put_in([other_pheno.id, lag, dir], new_stats)
|> put_in([other_pheno.id, "name"], other_pheno.name)
|> put_in([other_pheno.id, "longname"], other_pheno.longname)
end
defp sort_variants(variants) do
# Sort variant by CHR, POS.
Enum.sort_by(variants, fn {variant, _genes} ->
[chr, pos, _ref, _alt] = String.split(variant, "-")
chr = String.to_integer(chr)
pos = String.to_integer(pos)
[chr, pos]
end)
end
defp list_genes(genes) do
genes
|> Enum.map(fn gene -> gene.name end)
|> Enum.map(fn name -> ahref(name, "https://results.finngen.fi/gene/" <> name) end)
|> Enum.intersperse(", ")
end
# -- Helpers --
defp abbr(text, title) do
# "data_title" will be converted to "data-title" in HTML
content_tag(:abbr, text, data_title: title)
end
defp icd10_url(text, icd) do
ahref(text, "https://icd.who.int/browse10/2016/en#/#{icd}")
end
defp ahref(text, link) do
content_tag(:a, text,
href: link,
rel: "external nofollow noopener noreferrer",
target: "_blank"
)
end
defp round(number, precision) do
case number do
"-" -> "-"
_ -> :io_lib.format("~.#{precision}. f", [number]) |> to_string()
end
end
defp percentage(number) do
case number do
"-" ->
"-"
nil ->
"-"
_ ->
number * 100
end
end
defp pvalue_str(pvalue) do
# Print the given pvalue using scientific notation, display
# "<1e-100" if very low.
cond do
is_nil(pvalue) ->
"-"
pvalue < 1.0e-100 ->
"<1e-100"
true ->
# See http://erlang.org/doc/man/io.html#format-2
:io_lib.format("~.2. e", [pvalue]) |> to_string()
end
end
defp atc_link_wikipedia(atc) do
short = String.slice(atc, 0..2)
"https://en.wikipedia.org/wiki/ATC_code_#{short}##{atc}"
end
end
|
web_finngen_r8/lib/risteys_web/views/phenocode_view.ex
| 0.550366
| 0.431584
|
phenocode_view.ex
|
starcoder
|
defmodule Easing.Range do
@moduledoc """
Range struct for Easing
This struct is basically a reimplementation of Elixir's `Range` struct
but removing the limitations on only working with `Integer` constraints and steps
"""
defstruct first: nil, last: nil, step: nil
@one_second 1_000
@type range :: %Easing.Range{first: number(), last: number(), step: number()}
@spec new(number(), number(), number()) :: range()
@doc """
Convenience function for creating a new `Easing.Range` struct
* first: represents the starting % of the range. Value should be: `value >= 0 and < 1`
* last: represents the ending % of the range. Value should be: `value > 0 and <= 1`
* step: value representing what the incremental value is between `first` and `last`. Can represent
"""
def new(first, last, step) do
%__MODULE__{first: first, last: last, step: step}
end
@spec calculate(integer(), integer()) :: range()
@doc """
Creates a new `Easing.Range` struct from a desired duration and target fps
* duration_in_ms - total duration of the animation, only accepts `Integer`
* fps - target frames per second of the animation, only accepts `Integer`
## Examples:
iex> Easing.Range.calculate(1000, 1)
%Easing.Range{first: 0, last: 1, step: 1.0}
"""
def calculate(duration_in_ms, fps) when is_integer(duration_in_ms) and is_integer(fps) do
%__MODULE__{first: 0, last: 1, step: (@one_second / fps) / duration_in_ms}
end
def calculate(duration_in_ms, fps) do
raise ArgumentError, "Easing.Range.calculate/2 can only accept values in Integer form " <>
"got: (#{duration_in_ms}, #{fps})"
end
@spec size(range()) :: integer()
@doc """
Returns the size of the `Easing.Range`
Sizes are *inclusive* across a range. So a range from `0` - `1` with a step of `0.1` will have
`11` values, not `10` because the `0` value is included in that result.
## Examples:
iex> Easing.Range.calculate(1000, 60) |> Easing.Range.size()
61
"""
def size(%{__struct__: __MODULE__, first: first, last: last, step: step}) do
(abs(:erlang./(last - first, step)) |> Kernel.trunc()) + 1
end
defimpl Enumerable, for: Easing.Range do
def reduce(%{__struct__: Easing.Range, first: first, last: last, step: step}, acc, fun) do
reduce(first, last, acc, fun, step)
end
defp reduce(_first, _last, {:halt, acc}, _fun, _step) do
{:halted, acc}
end
defp reduce(first, last, {:suspend, acc}, fun, step) do
{:suspended, acc, &reduce(first, last, &1, fun, step)}
end
# todo: this is probably shit performance
defp reduce(first, last, {:cont, acc}, fun, step) do
cond do
(step > 0 and first > last) or (step < 0 and first < last) ->
{:done, acc}
(step > 0 and Float.ceil(first + 0.0, 10) >= last) or (step < 0 and Float.ceil(first + 0.0, 10) <= last) ->
{_, acc} = fun.(last, acc)
{:done, acc}
true ->
reduce(first + step, last, fun.(first, acc), fun, step)
end
end
def member?(%{__struct__: Easing.Range, first: first, last: last, step: step} = range, value) do
cond do
Easing.Range.size(range) == 0 ->
{:ok, false}
first <= last ->
{:ok, first <= value and value <= last and rem(value - first, step) == 0}
true ->
{:ok, last <= value and value <= first and rem(value - first, step) == 0}
end
end
def count(range) do
{:ok, Easing.Range.size(range)}
end
def slice(%{__struct__: Easing.Range, first: first, step: step} = range) do
{:ok, Easing.Range.size(range), &slice(first + &1 * step, step, &2)}
end
defp slice(current, _step, 1), do: [current]
defp slice(current, step, remaining), do: [current | slice(current + step, step, remaining - 1)]
end
end
|
lib/easing/range.ex
| 0.853608
| 0.864597
|
range.ex
|
starcoder
|
--- Part Two ---
You notice that the device repeats the same frequency change list over and over. To calibrate the device, you need to find the first frequency it reaches twice.
For example, using the same list of changes above, the device would loop as follows:
Current frequency 0, change of +1; resulting frequency 1.
Current frequency 1, change of -2; resulting frequency -1.
Current frequency -1, change of +3; resulting frequency 2.
Current frequency 2, change of +1; resulting frequency 3.
(At this point, the device continues from the start of the list.)
Current frequency 3, change of +1; resulting frequency 4.
Current frequency 4, change of -2; resulting frequency 2, which has already been seen.
In this example, the first frequency reached twice is 2. Note that your device might need to repeat its list of frequency changes many times before a duplicate frequency is found, and that duplicates might be found while in the middle of processing the list.
Here are other examples:
+1, -1 first reaches 0 twice.
+3, +3, +4, -2, -4 first reaches 10 twice.
-6, +3, +8, +5, -6 first reaches 5 twice.
+7, +7, -2, -7, -4 first reaches 14 twice.
What is the first frequency your device reaches twice?
"""
defmodule Day1 do
def frequency do
file
|> Enum.reduce(0, &parse/2)
end
def frequency(state = {freq, seen, dup}) do
file
|> Enum.reduce_while(state, &parse/2)
|> duplicate
end
def duplicate({next, seen, nil}) do
frequency({next, seen, nil})
end
def duplicate({next, seen, dup}) do
dup
end
def file do
File.open!("assets/day1.txt")
|> IO.stream(:line)
|> Enum.map(&String.strip/1)
|> Enum.map((fn (line) ->
{operation, frequency} = String.split_at(line, 1)
{int, _} = Integer.parse(frequency)
{operation, int}
end))
end
def calculate("+", frequency, current) do
current + frequency
end
def calculate("-", frequency, current) do
current - frequency
end
def parse({operation, frequency}, {current, seen, _}) do
next = calculate(operation, frequency, current)
if !Enum.member?(seen, next), do: {:cont, {next, seen ++ [next], nil}}, else: {:halt, {next, seen, next}}
end
def parse({operation, frequency}, current) do
calculate(operation, frequency, current)
end
end
|
2018/lib/day1.ex
| 0.893143
| 0.928797
|
day1.ex
|
starcoder
|
defmodule Telnet.MSSP do
@moduledoc """
Parse MSSP data
Telnet option or plaintext
"""
@se 240
@iac 255
@mssp_var 1
@mssp_val 2
@doc """
Parse MSSP subnegotiation options
"""
def parse(data) do
case mssp(data, :start, []) do
:error ->
:error
data ->
data =
data
|> Enum.reject(&is_start?/1)
|> Enum.into(%{}, fn map ->
{to_string(Enum.reverse(map[:name])), to_string(Enum.reverse(map[:value]))}
end)
{:ok, data}
end
end
defp is_start?(:start), do: true
defp is_start?(_), do: false
def mssp(<<>>, _current, _stack) do
:error
end
def mssp(<<@iac, @se, _data::binary>>, current, stack) do
[current | stack]
end
def mssp(<<@iac, data::binary>>, current, stack) do
mssp(data, current, stack)
end
def mssp(<<@se, data::binary>>, current, stack) do
mssp(data, current, stack)
end
def mssp(<<@mssp_var, data::binary>>, current, stack) do
mssp(data, %{type: :name, name: [], value: []}, [current | stack])
end
def mssp(<<@mssp_val, data::binary>>, current, stack) do
current =
current
|> Map.put(:type, :value)
|> Map.put(:value_start, true)
mssp(data, Map.put(current, :type, :value), stack)
end
def mssp(<<_byte::size(8), data::binary>>, :start, stack) do
mssp(data, :start, stack)
end
def mssp(<<byte::size(8), data::binary>>, current, stack) do
case current[:type] do
:name ->
mssp(data, Map.put(current, :name, [byte | current.name]), stack)
:value ->
mssp(data, append_value(current, byte), stack)
end
end
defp append_value(current, byte) do
case {current.value_start, current.value} do
{true, []} ->
current
|> Map.put(:value, [byte | current.value])
|> Map.put(:value_start, false)
{true, value} ->
current
|> Map.put(:value, [byte, " ", "," | value])
|> Map.put(:value_start, false)
{false, value} ->
Map.put(current, :value, [byte | value])
end
end
@doc """
Parse text as a response to `mssp-request`
Should include MSSP-REPLY-START and end with MSSP-REPLY-END
"""
def parse_text(text) do
data =
text
|> String.replace("\r", "")
|> String.split("\n")
|> find_mssp_text([])
case data do
:error ->
:error
data ->
Enum.into(data, %{}, &parse_mssp_text_line/1)
end
end
def find_mssp_text([], _stack) do
:error
end
def find_mssp_text(["MSSP-REPLY-START" | data], _stack) do
find_mssp_text(data, [])
end
def find_mssp_text(["MSSP-REPLY-END" | _data], stack) do
stack
end
def find_mssp_text([line | data], stack) do
find_mssp_text(data, [line | stack])
end
def parse_mssp_text_line(line) do
[name | values] = String.split(line, "\t")
{name, Enum.join(values, "\t")}
end
end
|
lib/telnet/mssp.ex
| 0.546496
| 0.477493
|
mssp.ex
|
starcoder
|
defmodule Robotica.Plugins.HDMI do
@moduledoc """
HDMI matrix plugin
"""
use GenServer
use Robotica.Plugin
require Logger
alias Robotica.Devices.HDMI
defmodule Config do
@moduledoc false
@type t :: %__MODULE__{}
defstruct [:host, :destination]
end
## Server Callbacks
def init(plugin) do
# {:ok, _timer} = :timer.send_interval(60_000, :poll)
# Process.send_after(self(), :poll, 0)
{:ok, plugin}
end
def config_schema do
%{
struct_type: Config,
host: {:string, true}
}
end
@spec publish_device_output(Robotica.Plugin.t(), integer, integer) :: :ok
defp publish_device_output(%Robotica.Plugin{} = state, input, output) do
topic = "output#{output}"
publish_state_raw(state, topic, Integer.to_string(input))
end
@spec publish_device_output_off(Robotica.Plugin.t(), integer) :: :ok
defp publish_device_output_off(%Robotica.Plugin{} = state, output) do
topic = "output#{output}"
publish_state_raw(state, topic, "OFF")
end
@spec publish_device_output_hard_off(Robotica.Plugin.t(), integer) :: :ok
defp publish_device_output_hard_off(%Robotica.Plugin{} = state, output) do
topic = "output#{output}"
publish_state_raw(state, topic, "HARD_OFF")
end
@spec poll(Robotica.Plugin.t(), list(integer)) :: :ok | {:error, String.t()}
defp poll(%Robotica.Plugin{}, []), do: :ok
defp poll(%Robotica.Plugin{} = state, [output | tail]) do
case HDMI.get_input_for_output(state.config.host, output) do
{:ok, input} ->
publish_device_output(state, input, output)
poll(state, tail)
{:error, error} ->
Logger.error("HDMI #{state.config.host}: error: #{error}")
{:error, error}
end
end
def handle_info(:poll, %Robotica.Plugin{} = state) do
outputs = [1, 2, 3, 4]
case poll(state, outputs) do
:ok ->
:ok
{:error, _error} ->
Enum.each(outputs, fn remaining_output ->
publish_device_output_hard_off(state, remaining_output)
end)
end
{:noreply, state}
end
def handle_info(message, %Robotica.Plugin{} = state) do
Logger.error("HDMI #{state.config.host}: Got unhandled message #{inspect(message)}.")
end
def handle_command(%Robotica.Plugin{} = state, command) do
Logger.info("HDMI #{state.config.host}: #{command.input} #{command.output}")
publish_command(state.location, state.device, command)
publish_device_output_off(state, command.output)
case HDMI.switch(state.config.host, command.input, command.output) do
:ok ->
publish_device_output(state, command.input, command.output)
{:error, error} ->
Logger.error("HDMI #{state.config.host}: error: #{error}")
publish_device_output_hard_off(state, command.output)
end
{:noreply, state}
end
def handle_cast({:mqtt, _, :command, command}, %Robotica.Plugin{} = state) do
case Robotica.Config.validate_hdmi_command(command) do
{:ok, command} ->
case check_type(command, "hdmi") do
{command, true} -> handle_command(state, command)
{_, false} -> state
end
{:error, error} ->
Logger.error(
"HDMI #{state.config.host}: Invalid hdmi command received: #{inspect(error)}."
)
end
{:noreply, state}
end
end
|
robotica/lib/robotica/plugins/hdmi.ex
| 0.710226
| 0.412234
|
hdmi.ex
|
starcoder
|
defmodule Resx.Producer do
@moduledoc """
A producer is an interface for working with a referenceable resource.
This could be anything from a type of storage or networking protocol, to
generation of content (e.g. you might expose some procedural generation
algorithm as a producer), to manipulations on other resources (there is
already a common way to handle this through `Resx.Transformer`).
"""
alias Resx.Resource
alias Resx.Resource.Content
alias Resx.Resource.Reference
@doc """
Implement the behaviour for retrieving the URI schemes this producer can
handle.
Return a list of the scheme names.
"""
@callback schemes() :: [String.t, ...]
@doc """
Implement the behaviour for retrieving a resource.
The reference to the resource can either be an existing `Resx.Resource.Reference`
struct, or a URI.
The `options` keyword allows for your implementation to expose some configurable
settings.
If the resource was successfully retrieved return `{ :ok, resource }`. Where
`resource` is the `Resx.Resource` struct. Otherwise return an appropriate
error.
"""
@callback open(reference :: Resx.ref, options :: keyword) :: { :ok, resource :: Resource.t(Content.t) } | Resx.error(Resx.resource_error | Resx.reference_error)
@doc """
Optionally implement the behaviour for retrieving a resource stream.
The reference to the resource can either be an existing `Resx.Resource.Reference`
struct, or a URI.
The `options` keyword allows for your implementation to expose some configurable
settings.
If the resource was successfully retrieved return `{ :ok, resource }`. Where
`resource` is the `Resx.Resource` struct. Otherwise return an appropriate
error.
"""
@callback stream(reference :: Resx.ref, options :: keyword) :: { :ok, resource :: Resource.t(Content.Stream.t) } | Resx.error(Resx.resource_error | Resx.reference_error)
@doc """
Optionally implement the behaviour for checking whether a resource exists for the
given reference.
If an implementation is not provided it will determine whether the resource exists
by opening the resource.
The reference to the resource can either be an existing `Resx.Resource.Reference`
struct, or a URI.
If the resource exists return `{ :ok, true }`, if it does not exist return
`{ :ok, false }`. Otherwise return an appropriate error.
"""
@callback exists?(reference :: Resx.ref) :: { :ok, exists :: boolean } | Resx.error(Resx.reference_error)
@doc """
Implement the behaviour for checking if two references point to the same
resource.
The reference to the resource can either be an existing `Resx.Resource.Reference`
struct, or a URI.
If the references are alike return `true`, otherwise return `false`.
"""
@callback alike?(reference_a :: Resx.ref, reference_b :: Resx.ref) :: boolean
@doc """
Implement the behaviour to retrieve source (if any).
The reference to the resource can either be an existing `Resx.Resource.Reference`
struct, or a URI.
If the source can be retrieved return `{ :ok, source }`, where `source` is
either the reference chain for the source or `nil` if there is none. Otherwise
return an appropriate error.
"""
@callback source(reference :: Resx.ref) :: { :ok, source :: Resx.ref | nil } | Resx.error(Resx.reference_error)
@doc """
Implement the behaviour to retrieve the URI for a resource reference.
The reference to the resource is an existing `Resx.Resource.Reference`
struct.
If the URI can be created return `{ :ok, uri }`. Otherwise return an
appropriate error.
"""
@callback resource_uri(reference :: Reference.t) :: { :ok, Resx.uri } | Resx.error(Resx.resource_error | Resx.reference_error)
@doc """
Optionally implement the behaviour to retrieve the attribute for a resource.
The reference to the resource can either be an existing `Resx.Resource.Reference`
struct, or a URI.
If the attribute was successfully retrieved for the resource return
`{ :ok, value }`, where `value` is the value of the attribute. Otherwise
return an appropriate error.
"""
@callback resource_attribute(reference :: Resx.ref, field :: Resource.attribute_key) :: { :ok, attribute_value :: any } | Resx.error(Resx.resource_error | Resx.reference_error | :unknown_key)
@doc """
Implement the behaviour to retrieve the attributes for a resource.
The reference to the resource can either be an existing `Resx.Resource.Reference`
struct, or a URI.
If the attributes were successfully retrieved for the resource return
`{ :ok, %{ key => value } }`, where `key` is the field names of the attribute,
and `value` is the value of the attribute. Otherwise return an appropriate error.
"""
@callback resource_attributes(reference :: Resx.ref) :: { :ok, attribute_values :: %{ optional(Resource.attribute_key) => any } } | Resx.error(Resx.resource_error | Resx.reference_error)
@doc """
Optionally implement the behaviour to retrieve the attribute keys for a resource.
The reference to the resource can either be an existing `Resx.Resource.Reference`
struct, or a URI.
If the attribute was successfully retrieved for the resource return
`{ :ok, keys }`, where `keys` are the field names of the different attributes.
Otherwise return an appropriate error.
"""
@callback resource_attribute_keys(reference :: Resx.ref) :: { :ok, [field :: Resource.attribute_key] } | Resx.error(Resx.resource_error | Resx.reference_error)
@doc false
defmacro __using__(_opts) do
quote do
@behaviour Resx.Producer
@impl Resx.Producer
def stream(reference, opts \\ []) do
case __MODULE__.open(reference, opts) do
{ :ok, resource = %Resource{ content: content } } -> { :ok, %{ resource | content: Content.Stream.new(content) } }
error -> error
end
end
@impl Resx.Producer
def exists?(reference) do
case __MODULE__.open(reference, []) do
{ :ok, _ } -> true
error -> error
end
end
@impl Resx.Producer
def resource_attribute(reference, field) do
case __MODULE__.resource_attributes(reference) do
{ :ok, attributes } ->
if Map.has_key?(attributes, field) do
{ :ok, attributes[field] }
else
{ :error, { :unknown_key, field } }
end
error -> error
end
end
@impl Resx.Producer
def resource_attribute_keys(reference) do
case __MODULE__.resource_attributes(reference) do
{ :ok, attributes } -> { :ok, Map.keys(attributes) }
error -> error
end
end
defoverridable [stream: 1, stream: 2, exists?: 1, resource_attribute: 2, resource_attribute_keys: 1]
end
end
end
|
lib/resx/producer.ex
| 0.893978
| 0.498779
|
producer.ex
|
starcoder
|
defmodule Graph.Directed do
@moduledoc false
@compile {:inline, [in_neighbors: 2, in_neighbors: 3, out_neighbors: 2, out_neighbors: 3]}
import Graph.Utils, only: [vertex_id: 1]
def topsort(%Graph{vertices: vs} = g) do
l = reverse_postorder(g)
if length(forest(g, &in_neighbors/3, l)) == map_size(vs) do
Enum.map(l, &Map.get(vs, &1))
else
false
end
end
def preorder(%Graph{vertices: vs} = g) do
g
|> reverse_preorder()
|> Stream.map(fn id -> Map.get(vs, id) end)
|> Enum.reverse()
end
def postorder(%Graph{vertices: vs} = g) do
g
|> reverse_postorder()
|> Stream.map(fn id -> Map.get(vs, id) end)
|> Enum.reverse()
end
def is_arborescence?(%Graph{} = g) do
arborescence_root(g) != nil
end
def arborescence_root(%Graph{vertices: vs, out_edges: oe} = g) do
num_edges = Enum.reduce(oe, 0, fn {_, out}, sum -> sum + MapSet.size(out) end)
num_vertices = map_size(vs)
if num_edges == num_vertices - 1 do
[root] =
Enum.reduce(vs, [], fn {v_id, v}, acc ->
case length(in_neighbors(g, v_id)) do
1 -> acc
0 when acc == [] -> [v]
end
end)
root
else
nil
end
catch
_type, _err ->
nil
end
def is_acyclic?(%Graph{} = g) do
has_loops?(g) == false and topsort(g) != false
end
def has_loops?(%Graph{vertices: vs} = g) do
for {v_id, _} <- vs do
if is_reflexive_vertex(g, v_id) do
throw(:has_loop)
end
end
false
catch
_, :has_loop ->
true
end
def loop_vertices(%Graph{vertices: vs} = g) do
for {v_id, v} <- vs, is_reflexive_vertex(g, v_id), do: v
end
def components(%Graph{vertices: vs} = g) do
for component <- forest(g, &inout/3) do
for id <- component, do: Map.get(vs, id)
end
end
def strong_components(%Graph{vertices: vs} = g) do
for component <- forest(g, &in_neighbors/3, reverse_postorder(g)) do
for id <- component, do: Map.get(vs, id)
end
end
def reachable(%Graph{vertices: vertices} = g, vs) when is_list(vs) do
vs = Enum.map(vs, &vertex_id/1)
for id <- :lists.append(forest(g, &out_neighbors/3, vs, :first)), do: Map.get(vertices, id)
end
def reachable_neighbors(%Graph{vertices: vertices} = g, vs) when is_list(vs) do
vs = Enum.map(vs, &vertex_id/1)
for id <- :lists.append(forest(g, &out_neighbors/3, vs, :not_first)),
do: Map.get(vertices, id)
end
def reaching(%Graph{vertices: vertices} = g, vs) when is_list(vs) do
vs = Enum.map(vs, &vertex_id/1)
for id <- :lists.append(forest(g, &in_neighbors/3, vs, :first)), do: Map.get(vertices, id)
end
def reaching_neighbors(%Graph{vertices: vertices} = g, vs) when is_list(vs) do
vs = Enum.map(vs, &vertex_id/1)
for id <- :lists.append(forest(g, &in_neighbors/3, vs, :not_first)), do: Map.get(vertices, id)
end
def in_neighbors(%Graph{} = g, v, []) do
in_neighbors(g, v)
end
def in_neighbors(%Graph{in_edges: ie}, v, vs) do
case Map.get(ie, v) do
nil -> vs
v_in -> MapSet.to_list(v_in) ++ vs
end
end
def in_neighbors(%Graph{in_edges: ie}, v) do
case Map.get(ie, v) do
nil -> []
v_in -> MapSet.to_list(v_in)
end
end
def out_neighbors(%Graph{} = g, v, []) do
out_neighbors(g, v)
end
def out_neighbors(%Graph{out_edges: oe}, v, vs) do
case Map.get(oe, v) do
nil -> vs
v_out -> MapSet.to_list(v_out) ++ vs
end
end
def out_neighbors(%Graph{out_edges: oe}, v) do
case Map.get(oe, v) do
nil -> []
v_out -> MapSet.to_list(v_out)
end
end
## Private
defp is_reflexive_vertex(g, v) do
Enum.member?(out_neighbors(g, v), v)
end
defp forest(%Graph{vertices: vs} = g, fun) do
forest(g, fun, Map.keys(vs))
end
defp forest(g, fun, vs) do
forest(g, fun, vs, :first)
end
defp forest(g, fun, vs, handle_first) do
{_, acc} =
List.foldl(vs, {MapSet.new(), []}, fn v, {visited, acc} ->
pretraverse(handle_first, v, fun, g, visited, acc)
end)
acc
end
defp pretraverse(:first, v, fun, g, visited, acc) do
ptraverse([v], fun, g, visited, [], acc)
end
defp pretraverse(:not_first, v, fun, g, visited, acc) do
if MapSet.member?(visited, v) do
{visited, acc}
else
ptraverse(fun.(g, v, []), fun, g, visited, [], acc)
end
end
defp ptraverse([v | vs], fun, g, visited, results, acc) do
if MapSet.member?(visited, v) do
ptraverse(vs, fun, g, visited, results, acc)
else
visited = MapSet.put(visited, v)
ptraverse(fun.(g, v, vs), fun, g, visited, [v | results], acc)
end
end
defp ptraverse([], _fun, _g, visited, [], acc), do: {visited, acc}
defp ptraverse([], _fun, _g, visited, results, acc), do: {visited, [results | acc]}
defp reverse_preorder(g) do
:lists.append(forest(g, &out_neighbors/3))
end
defp reverse_postorder(%Graph{vertices: vs} = g) do
{_, l} = posttraverse(Map.keys(vs), g, MapSet.new(), [])
l
end
defp posttraverse([v | vs], g, visited, acc) do
{visited, acc} =
if MapSet.member?(visited, v) do
{visited, acc}
else
visited = MapSet.put(visited, v)
{visited2, acc2} = posttraverse(out_neighbors(g, v, []), g, visited, acc)
{visited2, [v | acc2]}
end
posttraverse(vs, g, visited, acc)
end
defp posttraverse([], _g, visited, acc), do: {visited, acc}
defp inout(g, v, vs) do
in_neighbors(g, v, out_neighbors(g, v, vs))
end
end
|
lib/graph/directed.ex
| 0.602412
| 0.609757
|
directed.ex
|
starcoder
|
# simple functions to load a file, following the hashing rules
defmodule Scenic.Cache.Hash do
# import IEx
@hash_types [:sha, :sha224, :sha256, :sha384, :sha512, :ripemd160]
@default_hash :sha
# ===========================================================================
defmodule Error do
defexception message: "Hash check failed"
end
# --------------------------------------------------------
def valid_hash_types(), do: @hash_types
# --------------------------------------------------------
def valid_hash_type?(hash_type), do: Enum.member?(@hash_types, hash_type)
# --------------------------------------------------------
def valid_hash_type!(hash_type) do
case Enum.member?(@hash_types, hash_type) do
true ->
hash_type
false ->
msg = "Invalid hash type: #{hash_type}\r\n" <> "Must be one of: #{inspect(@hash_types)}"
raise Error, message: msg
end
end
# --------------------------------------------------------
def compute(data, hash_type) do
valid_hash_type!(hash_type)
|> :crypto.hash(data)
|> Base.url_encode64(padding: false)
end
# --------------------------------------------------------
def compute_file(path, hash_type) do
do_compute_file(
path,
hash_type,
valid_hash_type?(hash_type)
)
end
defp do_compute_file(_, _, false), do: {:error, :invalid_hash_type}
defp do_compute_file(path, hash_type, true) do
# start the hash context
hash_context = :crypto.hash_init(hash_type)
# since there is no File.stream option, only File.stream!, catch the error
try do
# stream the file into the hash
hash =
File.stream!(path, [], 2048)
|> Enum.reduce(hash_context, &:crypto.hash_update(&2, &1))
|> :crypto.hash_final()
|> Base.url_encode64(padding: false)
{:ok, hash}
rescue
err ->
:crypto.hash_final(hash_context)
case err do
%{reason: reason} -> {:error, reason}
_ -> {:error, :hash}
end
end
end
# --------------------------------------------------------
def compute_file!(path, hash_type) do
# start the hash context
hash_context =
valid_hash_type!(hash_type)
|> :crypto.hash_init()
# stream the file into the hash
File.stream!(path, [], 2048)
|> Enum.reduce(hash_context, &:crypto.hash_update(&2, &1))
|> :crypto.hash_final()
|> Base.url_encode64(padding: false)
end
# --------------------------------------------------------
def verify(data, hash, hash_type) do
case compute(data, hash_type) == hash do
true -> {:ok, data}
false -> {:error, :hash_failure}
end
end
# --------------------------------------------------------
def verify!(data, hash, hash_type) do
case compute(data, hash_type) == hash do
true -> data
false -> raise Error
end
end
# --------------------------------------------------------
def verify_file(path_data), do: path_params(path_data) |> do_verify_file()
defp do_verify_file({path, hash, hash_type}) do
case compute_file(path, hash_type) do
{:ok, computed_hash} ->
case computed_hash == hash do
true -> {:ok, hash}
false -> {:error, :hash_failure}
end
err ->
err
end
end
# --------------------------------------------------------
def verify_file!(path_data), do: path_params(path_data) |> do_verify_file!()
defp do_verify_file!({path, hash, hash_type}) do
case compute_file!(path, hash_type) == hash do
true -> hash
false -> raise Error
end
end
# --------------------------------------------------------
def from_path(path) do
String.split(path, ".")
|> List.last()
end
# --------------------------------------------------------
def path_params(path)
def path_params(path) when is_bitstring(path) do
hash = from_path(path)
path_params({path, hash, @default_hash})
end
def path_params({path, hash_type}) when is_atom(hash_type) do
hash = from_path(path)
path_params({path, hash, hash_type})
end
def path_params({path_or_data, hash}), do: path_params({path_or_data, hash, @default_hash})
def path_params({path_or_data, hash, hash_type})
when is_binary(path_or_data) and is_bitstring(hash) and is_atom(hash_type) do
{path_or_data, hash, valid_hash_type!(hash_type)}
end
def path_params(path_or_data, hash_or_type), do: path_params({path_or_data, hash_or_type})
def path_params(path_or_data, hash, hash_type), do: path_params({path_or_data, hash, hash_type})
end
|
lib/scenic/cache/hash.ex
| 0.628863
| 0.406921
|
hash.ex
|
starcoder
|
defmodule Csp.CLI do
@moduledoc """
Command line interface for constraint satisfaction.
"""
alias Csp
alias Csp.Problems
alias Csp.AC3
@doc """
Usage:
```
$ mix escript.build
$ ./csp
```
"""
def main(_args) do
IO.puts("Let's try out some example constraint satisfaction problems.")
trial_problem_selection()
end
## Helpers
defp trial_problem_selection() do
IO.puts("Select a trial problem (type `1`, `2`, ...), or terminate (type `q`):")
IO.puts("\t1. Sudoku")
IO.puts("\t2. Squares")
IO.puts("\t3. Map coloring")
IO.puts("\t4. N queens")
IO.puts("\tq. Exit")
problem = IO.read(:line) |> String.trim()
if problem != "q" do
case Integer.parse(problem) do
{problem, ""} ->
case problem do
1 -> trial_sudoku_problem()
2 -> trial_squares_problem()
3 -> trial_map_coloring_problem()
4 -> trial_n_queens_problem()
end
unexpected ->
IO.puts("Unexpected input: #{unexpected}; restarting.")
trial_problem_selection()
end
else
IO.puts("Terminating.")
end
end
defp trial_sudoku_problem() do
original_cells_map = Problems.wiki_sudoku_cells_map()
IO.puts("\nThis is the input Sudoku puzzle:\n")
Problems.pretty_print_sudoku(original_cells_map)
ws_csp = Problems.wiki_sudoku()
IO.puts(
"\nCSP definition has #{length(ws_csp.variables)} variables " <>
"and #{length(ws_csp.constraints)} inequality constraints."
)
{time, {:solved, ws_csp_solved}} = :timer.tc(fn -> AC3.solve(ws_csp) end)
IO.puts("Solved with AC-3 in #{time / 1_000_000} seconds.")
solution_cells_map =
ws_csp_solved.domains
|> Enum.map(fn {cell, [value]} -> {cell, value} end)
|> Enum.into(%{})
IO.puts("\nSolution:\n")
Problems.pretty_print_sudoku(solution_cells_map)
IO.puts("")
trial_problem_selection()
end
defp trial_squares_problem() do
IO.puts("\nSelect the max value of x and y (integer between 5 and 1_000_000_000):")
max_value = IO.read(:line) |> String.trim()
{max_value, ""} = Integer.parse(max_value)
IO.puts("\nThis is the input squares puzzle:\n")
IO.puts(
"\t>> Find all pairs (x, y), such that y = x * x, \n" <>
"\t if x and y are integers between 0 and #{max_value}.\n"
)
csp = Problems.squares(max_value)
IO.puts("Original CSP (note variables' domains!):\n#{inspect(csp)}\n")
IO.puts("We will need to supplement AC-3 with brute-force search to solve it.")
IO.puts("Do you want to run AC-3 before doing brute force search? (y/n)")
run_ac3 = IO.read(:line) |> String.trim()
if run_ac3 == "y" do
IO.puts("First, we will reduce the domains of our variables via AC-3.\n")
{time, {:reduced, csp}} = :timer.tc(fn -> AC3.solve(csp) end)
IO.puts("AC-3 run took #{time / 1_000_000} seconds, and reduced domains of variables to:\n")
IO.puts("#{inspect(csp.domains)}\n")
trial_squares_problem_brute_force_part(csp)
else
trial_squares_problem_brute_force_part(csp)
end
end
def trial_squares_problem_brute_force_part(csp) do
IO.puts("Now we can run brute force search.\n")
{time, {:solved, solutions}} = :timer.tc(fn -> Csp.solve(csp, method: :brute_force, all: true) end)
IO.puts(
"Brute force search run took #{time / 1_000_000} seconds, " <>
"and found the following solutions:\n"
)
solution_string = Enum.map(solutions, fn solution -> "\t#{inspect(solution)}" end) |> Enum.join("\n")
IO.puts(solution_string)
IO.puts("")
trial_problem_selection()
end
def trial_map_coloring_problem() do
IO.puts("Let's solve map coloring problem for Austrialian states with backtracking search.\n")
csp = Problems.map_coloring()
IO.puts("CSP is defined like this:\n#{inspect(csp)}\n\n")
IO.puts("Running backtracking...")
{time, {:solved, solutions}} = :timer.tc(fn -> Csp.solve(csp, method: :backtracking, all: true) end)
IO.puts(
"Backtracking run took #{time / 1_000_000} seconds, " <>
"and found the following solutions:\n"
)
solution_string = Enum.map(solutions, fn solution -> "\t#{inspect(solution)}\n" end) |> Enum.join("")
IO.puts(solution_string)
IO.puts("")
trial_problem_selection()
end
def trial_n_queens_problem() do
IO.puts(
"Let's solve N Queens problem with backtracking, " <>
"and see how constriant selection affects performance.\n"
)
IO.puts("Select N:")
n = IO.read(:line) |> String.trim()
{n, ""} = Integer.parse(n)
IO.puts(
"""
Do you want to use:
\t1. Optimal N Queens CSP representation (fastest)
\t2. Row-based queen placement constraint (slower)
\t3. Global queen placement constraint (the slowest)
Type 1, 2, or 3:
"""
|> String.trim_trailing("\n")
)
placement_constraint_type = IO.read(:line) |> String.trim()
{placement_constraint_type, ""} = Integer.parse(placement_constraint_type)
IO.puts("\nEnable AC-3 (y/n)?:")
ac3 = IO.read(:line) |> String.trim()
ac3 = ac3 == "y"
csp =
case placement_constraint_type do
1 -> Problems.nqueens(n)
2 -> Problems.nqueens_slow(n, true)
3 -> Problems.nqueens_slow(n, false)
end
IO.puts("Generated CSP with #{length(csp.constraints)} constraints.")
IO.puts("Solving...")
{time, {:solved, solution}} = :timer.tc(fn -> Csp.solve(csp, method: :backtracking, ac3: ac3) end)
IO.puts("Solved in #{inspect(time / 1_000_000)} seconds:\n")
case placement_constraint_type do
1 -> Problems.pretty_print_nqueens(solution, n)
_ -> Problems.pretty_print_nqueens_slow(solution, n)
end
IO.puts("\n")
trial_problem_selection()
end
end
|
lib/csp/cli.ex
| 0.874921
| 0.783699
|
cli.ex
|
starcoder
|
defmodule Strukt.Validator.Builder do
@moduledoc """
This module compiles a validator pipeline.
It is largely based on `Plug.Builder`, with minimal changes.
"""
@doc """
Compiles the pipeline.
Each pipeline element should be a tuple of `{validator_name, options, guards}`
This function expects a reversed pipeline, i.e. the last validator to be called
comes first in the list.
This function returns a tuple where the first element is a quoted reference to the
changeset being validated, and the second element being the compiled quoted pipeline.
## Example
Strukt.Validator.Builder.compile(env [
{Strukt.Validators.RequireOnInsert, [:field], quote(do: changeset.action == :insert),
{Strukt.Validators.RequireOnUpdate, [:other_field], quote(do: changeset.action == :update)},
{Strukt.Validators.RequireOnChange, [:other_field], true},
], [])
"""
def compile(env, pipeline, builder_opts \\ []) do
module = env.module
changeset = Macro.var(:changeset, __MODULE__)
ast =
Enum.reduce(pipeline, changeset, fn {validator, opts, guards}, acc ->
{validator, opts, guards}
|> init_validator()
|> quote_validator(acc, env, builder_opts)
end)
{ast, _} =
Macro.postwalk(ast, nil, fn
# Ensure all guard references to the changeset binding in the resulting AST
# refer to the correct context
{:changeset, meta, context}, acc when context in [nil, module] ->
{{:changeset, meta, __MODULE__}, acc}
node, acc ->
{node, acc}
end)
{changeset, ast}
end
defp init_validator({validator, opts, guards}) do
case Atom.to_charlist(validator) do
~c"Elixir." ++ _ -> init_module_validator(validator, opts, guards)
_ -> init_fun_validator(validator, opts, guards)
end
end
defp init_module_validator(validator, opts, guards) do
{:module, validator, quote(do: unquote(validator).init(unquote(escape(opts)))), guards}
end
defp init_fun_validator(validator, opts, guards) do
{:function, validator, escape(opts), guards}
end
defp escape(opts), do: Macro.escape(opts, unquote: true)
defp quote_validator({ty, validator, opts, guards}, acc, _env, _builder_opts) do
call = quote_validator_call(ty, validator, opts)
error_message =
case ty do
:module -> "expected #{inspect(validator)}.validate/2 to return an Ecto.Changeset"
:function -> "expected #{validator}/2 to return an Ecto.Changeset"
end <> ", all validators must receive a changeset and return a changeset"
quote generated: true do
case unquote(compile_guards(call, guards)) do
%Ecto.Changeset{} = changeset ->
unquote(acc)
other ->
raise unquote(error_message) <> ", got: #{inspect(other)}"
end
end
end
defp quote_validator_call(:function, validator, opts) do
quote do: unquote(validator)(changeset, unquote(opts))
end
defp quote_validator_call(:module, validator, opts) do
quote do: unquote(validator).validate(changeset, unquote(opts))
end
defp compile_guards(call, true), do: call
defp compile_guards(call, guards) do
quote do
case true do
true when unquote_splicing(guards) -> unquote(call)
true -> changeset
end
end
end
end
|
lib/validator/builder.ex
| 0.828072
| 0.497803
|
builder.ex
|
starcoder
|
defmodule Yams.Query do
require Logger
defmodule State do
defstruct range: {:none, :none}, stream: nil
end
defmodule Bucket do
defstruct start_t: nil, end_t: nil, data: [], aggregations: []
end
defmodule Aggregate do
defstruct start_t: nil, end_t: nil, aggregations: %{}
end
def bucket(state, seconds, "seconds") do
bucket(state, Yams.seconds_to_key(seconds), "nanoseconds")
end
def bucket(state, ms, "milliseconds") do
bucket(state, Yams.ms_to_key(ms), "nanoseconds")
end
def bucket(%State{stream: stream, range: {from_ts, _}} = state, nanoseconds, "nanoseconds") do
chunked = Stream.chunk_by(stream, fn {time, _} ->
Float.floor((time - from_ts) / nanoseconds)
end)
|> Stream.map(fn bucket ->
{{mini, _}, {maxi, _}} = Enum.min_max_by(bucket, fn {t, _} -> t end)
%Bucket{data: bucket, start_t: mini, end_t: maxi}
end)
struct(state, stream: chunked)
end
def push_aggregate(bucket, key, value) do
struct(bucket, aggregations: [{key, value} | bucket.aggregations])
end
def safe_percentile(data, p) do
case data do
[] -> 0
[n] -> n
others -> Statistics.percentile(others, p)
end
end
defp bind_row([{e, m, args} | rest]) do
[{e, m, bind_row(args)} | bind_row(rest)]
end
defp bind_row({comparator, meta, args}) do
{comparator, meta, bind_row(args)}
end
defp bind_row("row." <> str) do
{
{:., [], [
{:__aliases__, [alias: false], [:Map]},
:get
]}, [],
[Macro.var(:row, nil), str]
}
end
defp bind_row([prim | rest]) do
[bind_row(prim) | bind_row(rest)]
end
defp bind_row(prim) do
prim
end
def aggregate_buckets(state, evaluator, aggregator, label) do
%State{stream: stream} = state
new_stream = Stream.map(stream, fn
%Bucket{} = b ->
data = Enum.map(b.data, fn {_, datum} ->
evaluator.(datum)
end)
value = aggregator.(data)
Yams.Query.push_aggregate(b, label, value)
a ->
Logger.warn("Cannot make an aggregate on an aggregate stream!")
a
end)
struct(state, stream: new_stream)
end
defp minimax(state, expr, aggregator, label) do
rowified = bind_row(expr)
quote do
require Logger
func = fn t ->
var!(row) = t
unquote(rowified)
end
Yams.Query.aggregate_buckets(
unquote(state),
func,
unquote(aggregator),
unquote(label)
)
end
end
def safe_min([]), do: 0
def safe_min(data), do: Enum.min(data)
def safe_max([]), do: 0
def safe_max(data), do: Enum.max(data)
defmacro minimum(state, expr, label) do
minimax(state, expr, &Yams.Query.safe_min/1, label)
end
defmacro maximum(state, expr, label) do
minimax(state, expr, &Yams.Query.safe_max/1, label)
end
defmacro count(state, expr, label) do
rowified = bind_row(expr)
quote do
require Logger
func = fn t ->
var!(row) = t
unquote(rowified)
end
aggregator = fn data -> length(data) end
Yams.Query.aggregate_buckets(
unquote(state),
func,
aggregator,
unquote(label)
)
end
end
defmacro count_where(state, expr, label) do
rowified = bind_row(expr)
quote do
require Logger
predicate = fn t ->
var!(row) = t
unquote(rowified)
end
aggregator = fn data -> length(data) end
%State{stream: stream} = unquote(state)
new_stream = Stream.map(stream, fn
%Bucket{} = b ->
value = Enum.reduce(b.data, 0, fn {_t, x}, acc ->
if(predicate.(x)) do
acc + 1
else
acc
end
end)
Yams.Query.push_aggregate(b, unquote(label), value)
a ->
Logger.warn("Cannot make an aggregate on an aggregate stream!")
a
end)
struct(unquote(state), stream: new_stream)
end
end
defmacro percentile(state, expr, perc, label) do
rowified = bind_row(expr)
quote do
require Logger
func = fn t ->
var!(row) = t
unquote(rowified)
end
aggregator = fn data ->
Yams.Query.safe_percentile(data, unquote(perc))
end
Yams.Query.aggregate_buckets(
unquote(state),
func,
aggregator,
unquote(label)
)
end
end
defmacro where(state, expr) do
rowified = bind_row(expr)
quote do
predicate = fn t ->
var!(row) = t
unquote(rowified)
end
%State{stream: stream} = s = unquote(state)
new_stream = Stream.flat_map(stream, fn
%Bucket{} = b ->
data = Enum.filter(b.data, fn {_, datum} ->
predicate.(datum)
end)
[struct(b, data: data)]
%Aggregate{} = a ->
if predicate.(a.aggregations) do
[a]
else
[]
end
end)
struct(s, stream: new_stream)
end
end
def aggregates(%State{stream: stream} = state) do
new_stream = Stream.map(stream, fn %Bucket{aggregations: aggs} = b ->
%Aggregate{
start_t: b.start_t,
end_t: b.end_t,
aggregations: Enum.into(aggs, %{})
}
end)
struct(state, stream: new_stream)
end
def as_stream!(%State{stream: stream}), do: stream
end
|
lib/yams/query.ex
| 0.564939
| 0.498962
|
query.ex
|
starcoder
|
defmodule MssqlEcto.Connection do
alias Mssqlex.Query
alias MssqlEcto.Query, as: SQL
@typedoc "The prepared query which is an SQL command"
@type prepared :: String.t()
@typedoc "The cache query which is a DBConnection Query"
@type cached :: map
@doc """
Receives options and returns `DBConnection` supervisor child
specification.
"""
@spec child_spec(options :: Keyword.t()) :: {module, Keyword.t()}
def child_spec(opts) do
DBConnection.child_spec(Mssqlex.Protocol, opts)
end
@doc """
Prepares and executes the given query with `DBConnection`.
"""
@spec prepare_execute(
connection :: DBConnection.t(),
name :: String.t(),
prepared,
params :: [term],
options :: Keyword.t()
) :: {:ok, query :: map, term} | {:error, Exception.t()}
def prepare_execute(conn, name, prepared_query, params, options) do
statement = sanitise_query(prepared_query)
ordered_params = order_params(prepared_query, params)
case DBConnection.prepare_execute(
conn,
%Query{name: name, statement: statement},
ordered_params,
options
) do
{:ok, query, result} ->
{:ok, %{query | statement: prepared_query},
process_rows(result, options)}
{:error, %Mssqlex.Error{}} = error ->
if is_erlang_odbc_no_data_found_bug?(error, prepared_query) do
{:ok, %Query{name: "", statement: prepared_query},
%{num_rows: 0, rows: []}}
else
error
end
{:error, error} ->
raise error
end
end
@doc """
Executes the given prepared query with `DBConnection`.
"""
@spec execute(
connection :: DBConnection.t(),
prepared_query :: prepared,
params :: [term],
options :: Keyword.t()
) :: {:ok, term} | {:error, Exception.t()}
@spec execute(
connection :: DBConnection.t(),
prepared_query :: cached,
params :: [term],
options :: Keyword.t()
) :: {:ok, term} | {:error | :reset, Exception.t()}
def execute(conn, %Query{} = query, params, options) do
ordered_params =
query.statement
|> IO.iodata_to_binary()
|> order_params(params)
sanitised_query = sanitise_query(query.statement)
query = Map.put(query, :statement, sanitised_query)
case DBConnection.prepare_execute(conn, query, ordered_params, options) do
{:ok, _query, result} ->
{:ok, process_rows(result, options)}
{:error, %Mssqlex.Error{}} = error ->
if is_erlang_odbc_no_data_found_bug?(error, query.statement) do
{:ok, %{num_rows: 0, rows: []}}
else
error
end
{:error, error} ->
raise error
end
end
def execute(conn, statement, params, options) do
execute(conn, %Query{name: "", statement: statement}, params, options)
end
defp order_params(query, params) do
sanitised =
Regex.replace(
~r/(([^\\]|^))["'].*?[^\\]['"]/,
IO.iodata_to_binary(query),
"\\g{1}"
)
ordering =
Regex.scan(~r/\?([0-9]+)/, sanitised)
|> Enum.map(fn [_, x] -> String.to_integer(x) end)
if length(ordering) != length(params) do
raise "\nError: number of params received (#{length(params)}) does not match expected (#{
length(ordering)
})"
end
ordered_params =
ordering
|> Enum.reduce([], fn ix, acc -> [Enum.at(params, ix - 1) | acc] end)
|> Enum.reverse()
case ordered_params do
[] -> params
_ -> ordered_params
end
end
defp sanitise_query(query) do
query
|> IO.iodata_to_binary()
|> String.replace(
~r/(\?([0-9]+))(?=(?:[^\\"']|[\\"'][^\\"']*[\\"'])*$)/,
"?"
)
end
defp is_erlang_odbc_no_data_found_bug?({:error, error}, statement) do
is_dml =
statement
|> IO.iodata_to_binary()
|> (fn string ->
String.starts_with?(string, "INSERT") ||
String.starts_with?(string, "DELETE") ||
String.starts_with?(string, "UPDATE")
end).()
is_dml and error.message =~ "No SQL-driver information available."
end
defp process_rows(result, options) do
decoder = options[:decode_mapper] || fn x -> x end
Map.update!(result, :rows, fn row ->
unless is_nil(row), do: Enum.map(row, decoder)
end)
end
@doc """
Receives the exception returned by `query/4`.
The constraints are in the keyword list and must return the
constraint type, like `:unique`, and the constraint name as
a string, for example:
[unique: "posts_title_index"]
Must return an empty list if the error does not come
from any constraint.
"""
@spec to_constraints(exception :: Exception.t()) :: Keyword.t()
def to_constraints(%Mssqlex.Error{} = error), do: error.constraint_violations
@doc """
Returns a stream that prepares and executes the given query with
`DBConnection`.
"""
@spec stream(
connection :: DBConnection.conn(),
prepared_query :: prepared,
params :: [term],
options :: Keyword.t()
) :: Enum.t()
def stream(_conn, _prepared, _params, _options) do
raise("not implemented")
end
## Queries
def all(query), do: SQL.all(query)
def update_all(query, prefix \\ nil), do: SQL.update_all(query, prefix)
@doc false
def delete_all(query), do: SQL.delete_all(query)
def insert(prefix, table, header, rows, on_conflict, returning),
do: SQL.insert(prefix, table, header, rows, on_conflict, returning)
def update(prefix, table, fields, filters, returning),
do: SQL.update(prefix, table, fields, filters, returning)
def delete(prefix, table, filters, returning),
do: SQL.delete(prefix, table, filters, returning)
## Migration
def execute_ddl(command), do: MssqlEcto.Migration.execute_ddl(command)
end
|
lib/mssql_ecto/connection.ex
| 0.850049
| 0.413714
|
connection.ex
|
starcoder
|
defmodule Litelist.Discussions do
@moduledoc """
The Discussions context.
"""
import Ecto.Query, warn: false
alias Litelist.Repo
alias Litelist.Discussions.Discussion
@doc """
Returns the list of discussions.
## Examples
iex> list_discussions()
[%Discussion{}, ...]
"""
def list_discussions do
Repo.all(Discussion)
end
@doc """
Returns the list of discussions created by the neighbor.
## Examples
iex> list_discussions()
[%Discussion{}, ...]
"""
def list_discussions_by_neighbor(neighbor) do
Repo.all(from d in Discussion, where: d.neighbor_id == ^neighbor.id)
end
@doc """
Gets a single discussion.
Raises `Ecto.NoResultsError` if the Discussion does not exist.
## Examples
iex> get_discussion!(123)
%Discussion{}
iex> get_discussion!(456)
** (Ecto.NoResultsError)
"""
def get_discussion!(id), do: Repo.get!(Discussion, id)
@doc """
Creates a discussion.
## Examples
iex> create_discussion(%{field: value})
{:ok, %Discussion{}}
iex> create_discussion(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_discussion(attrs \\ %{}) do
%Discussion{}
|> Discussion.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a discussion.
## Examples
iex> update_discussion(discussion, %{field: new_value})
{:ok, %Discussion{}}
iex> update_discussion(discussion, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_discussion(%Discussion{} = discussion, attrs) do
discussion
|> Discussion.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Discussion.
## Examples
iex> delete_discussion(discussion)
{:ok, %Discussion{}}
iex> delete_discussion(discussion)
{:error, %Ecto.Changeset{}}
"""
def delete_discussion(%Discussion{} = discussion) do
Repo.delete(discussion)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking discussion changes.
## Examples
iex> change_discussion(discussion)
%Ecto.Changeset{source: %Discussion{}}
"""
def change_discussion(%Discussion{} = discussion) do
Discussion.changeset(discussion, %{})
end
end
|
lib/litelist/discussions/discussions.ex
| 0.716318
| 0.478346
|
discussions.ex
|
starcoder
|
defmodule Fares.OneWay do
@moduledoc """
Calculates the lowest, highest and reduced one-way fares for a particular trip for the given mode.
Commuter rail and ferry fares distinguish between the possible sets of stops.
Bus fares for express buses do not distinguish between the local and express portions;
the express fare is always returned.
"""
alias Fares.{Fare, Repo}
alias Routes.Route
alias Schedules.Trip
@default_filters [duration: :single_trip]
@default_foxboro_filters [duration: :round_trip]
@default_trip %Trip{name: "", id: ""}
@spec recommended_fare(
Route.t() | map,
Trip.t() | map,
Stops.Stop.id_t(),
Stops.Stop.id_t(),
(Keyword.t() -> [Fare.t()])
) ::
Fare.t() | nil
def recommended_fare(route, trip, origin_id, destination_id, fare_fn \\ &Repo.all/1)
def recommended_fare(nil, _, _, _, _), do: nil
def recommended_fare(route, nil, origin_id, destination_id, fare_fn) do
recommended_fare(route, @default_trip, origin_id, destination_id, fare_fn)
end
def recommended_fare(route, trip, origin_id, destination_id, fare_fn) do
route
|> get_fares(trip, origin_id, destination_id, fare_fn)
|> Enum.filter(fn fare -> fare.reduced == nil end)
|> Enum.min_by(& &1.cents, fn -> nil end)
end
@spec base_fare(
Route.t() | map,
Trip.t() | map,
Stops.Stop.id_t(),
Stops.Stop.id_t(),
(Keyword.t() -> [Fare.t()])
) ::
Fare.t() | nil
def base_fare(route, trip, origin_id, destination_id, fare_fn \\ &Repo.all/1)
def base_fare(nil, _, _, _, _), do: nil
def base_fare(route, nil, origin_id, destination_id, fare_fn) do
base_fare(route, @default_trip, origin_id, destination_id, fare_fn)
end
def base_fare(route, trip, origin_id, destination_id, fare_fn) do
route
|> get_fares(trip, origin_id, destination_id, fare_fn)
|> Enum.filter(fn fare -> fare.reduced == nil end)
|> Enum.max_by(& &1.cents, fn -> nil end)
end
@spec reduced_fare(
Route.t() | map,
Trip.t() | map,
Stops.Stop.id_t(),
Stops.Stop.id_t(),
(Keyword.t() -> [Fare.t()])
) ::
Fare.t() | nil
def reduced_fare(route, trip, origin_id, destination_id, fare_fn \\ &Repo.all/1)
def reduced_fare(nil, _, _, _, _), do: nil
def reduced_fare(route, trip, origin_id, destination_id, fare_fn) do
# The reduced fare is always the same so we just return any element from the list
route
|> get_fares(trip, origin_id, destination_id, fare_fn)
|> Enum.filter(fn fare -> fare.reduced != nil end)
|> List.first()
end
@spec get_fares(
Route.t() | map,
Trip.t() | map,
Stops.Stop.id_t(),
Stops.Stop.id_t(),
(Keyword.t() -> [Fare.t()])
) ::
[Fare.t() | nil]
defp get_fares(route, trip, origin_id, destination_id, fare_fn) do
route_filters =
route.type
|> Route.type_atom()
|> name_or_mode_filter(route, origin_id, destination_id, trip)
default_filters =
if {:name, :foxboro} in route_filters do
@default_foxboro_filters
else
@default_filters
end
default_filters
|> Keyword.merge(route_filters)
|> fare_fn.()
end
defp name_or_mode_filter(:subway, _route, _origin_id, _destination_id, _trip) do
[mode: :subway]
end
defp name_or_mode_filter(_, %{description: :rail_replacement_bus}, _, _, _) do
[name: :free_fare]
end
defp name_or_mode_filter(_, %{id: "CR-Foxboro"}, _, _, _) do
[name: :foxboro]
end
defp name_or_mode_filter(:bus, %{id: route_id}, origin_id, _destination_id, _trip) do
name =
cond do
Fares.express?(route_id) -> :express_bus
Fares.silver_line_airport_stop?(route_id, origin_id) -> :free_fare
Fares.silver_line_rapid_transit?(route_id) -> :subway
true -> :local_bus
end
[name: name]
end
defp name_or_mode_filter(:commuter_rail, _, origin_id, destination_id, trip) do
case Fares.fare_for_stops(:commuter_rail, origin_id, destination_id, trip) do
{:ok, name} ->
[name: name]
:error ->
[mode: :commuter_rail]
end
end
defp name_or_mode_filter(:ferry, _, origin_id, destination_id, _) do
[name: :ferry |> Fares.fare_for_stops(origin_id, destination_id) |> elem(1)]
end
end
|
apps/fares/lib/one_way.ex
| 0.763175
| 0.509703
|
one_way.ex
|
starcoder
|
defmodule GenStage.DemandDispatcher do
@moduledoc """
A dispatcher that sends batches to the highest demand.
This is the default dispatcher used by `GenStage`. In order
to avoid greedy consumers, it is recommended that all consumers
have exactly the same maximum demand.
"""
@behaviour GenStage.Dispatcher
@doc false
def init(_opts) do
{:ok, {[], 0, nil}}
end
@doc false
def info(msg, state) do
send(self(), msg)
{:ok, state}
end
@doc false
def subscribe(_opts, {pid, ref}, {demands, pending, max}) do
{:ok, 0, {demands ++ [{0, pid, ref}], pending, max}}
end
@doc false
def cancel({_, ref}, {demands, pending, max}) do
{current, demands} = pop_demand(ref, demands)
{:ok, 0, {demands, current + pending, max}}
end
@doc false
def ask(counter, {pid, ref}, {demands, pending, max}) do
max = max || counter
if counter > max do
warning =
'GenStage producer DemandDispatcher expects a maximum demand of ~tp. ' ++
'Using different maximum demands will overload greedy consumers. ' ++
'Got demand for ~tp events from ~tp~n'
:error_logger.warning_msg(warning, [max, counter, pid])
end
{current, demands} = pop_demand(ref, demands)
demands = add_demand(current + counter, pid, ref, demands)
already_sent = min(pending, counter)
{:ok, counter - already_sent, {demands, pending - already_sent, max}}
end
@doc false
def dispatch(events, length, {demands, pending, max}) do
{events, demands} = dispatch_demand(events, length, demands)
{:ok, events, {demands, pending, max}}
end
defp dispatch_demand([], _length, demands) do
{[], demands}
end
defp dispatch_demand(events, _length, [{0, _, _} | _] = demands) do
{events, demands}
end
defp dispatch_demand(events, length, [{counter, pid, ref} | demands]) do
{deliver_now, deliver_later, length, counter} = split_events(events, length, counter)
Process.send(pid, {:"$gen_consumer", {self(), ref}, deliver_now}, [:noconnect])
demands = add_demand(counter, pid, ref, demands)
dispatch_demand(deliver_later, length, demands)
end
defp split_events(events, length, counter) when length <= counter do
{events, [], 0, counter - length}
end
defp split_events(events, length, counter) do
{now, later} = Enum.split(events, counter)
{now, later, length - counter, 0}
end
defp add_demand(counter, pid, ref, [{c, _, _} | _] = demands) when counter > c do
[{counter, pid, ref} | demands]
end
defp add_demand(counter, pid, ref, [demand | demands]) do
[demand | add_demand(counter, pid, ref, demands)]
end
defp add_demand(counter, pid, ref, []) when is_integer(counter) do
[{counter, pid, ref}]
end
defp pop_demand(ref, demands) do
{{current, _pid, ^ref}, rest} = List.keytake(demands, ref, 2)
{current, rest}
end
end
|
deps/gen_stage/lib/gen_stage/dispatchers/demand_dispatcher.ex
| 0.759493
| 0.577138
|
demand_dispatcher.ex
|
starcoder
|
defmodule Fly.Postgres.LSN do
@moduledoc """
Data structure that represents a PostgreSQL LSN or Log Sequence Number.
Two LSN values can be compared using the `replicated?/2` function. An LSN
associated with the DB modification has a `source` of `:insert`. On a replica
instance, that can be used to see when the insert has been replicated locally.
"""
alias __MODULE__
defstruct fpart: nil, offset: nil, source: nil
@type t :: %LSN{
fpart: nil | integer,
offset: nil | integer,
source: :not_replicating | :insert | :replay
}
@doc """
Create a new `Fly.Postgres.LSN` struct from the a queried WAL value.
"""
def new(nil, :replay) do
%LSN{fpart: nil, offset: nil, source: :not_replicating}
end
def new(lsn, source) when is_binary(lsn) and source in [:insert, :replay] do
with [file_part_str, offset_str] <- String.split(lsn, "/"),
{fpart, ""} = Integer.parse(file_part_str, 16),
{offset, ""} = Integer.parse(offset_str, 16) do
%LSN{fpart: fpart, offset: offset, source: source}
else
_ -> raise ArgumentError, "invalid lsn format #{inspect(lsn)}"
end
end
# F1/O1 is at least as new as F2/O2 if (F1 > F2) or (F1 == F2 and O1 >= O2)
@doc """
Compare two `Fly.Postgres.LSN` structs to determine if the transaction representing a
data change on the primary has been replayed locally.
They are compared where the replay/replica value is in argument 1 and the
insert value is in arguemnt two.
## Examples
repo |> last_wal_replay() |> replicated?(primary_lsn)
"""
def replicated?(replay_lsn, insert_lsn)
def replicated?(%LSN{source: :not_replicating}, %LSN{source: :insert}), do: true
def replicated?(%LSN{fpart: f1, offset: o1, source: :replay}, %LSN{
fpart: f2,
offset: o2,
source: :insert
}) do
f1 > f2 or (f1 == f2 and o1 >= o2)
end
@doc """
After performing a database modification, calling `current_wal_insert/1`
returns a value that can be used to compare against a WAL value from the
replica database to determine when the changes have been replayed on the
replica.
"""
def current_wal_insert(repo) do
%Postgrex.Result{rows: [[lsn]]} =
repo.query!("select CAST(pg_current_wal_insert_lsn() AS TEXT)")
new(lsn, :insert)
end
@doc """
When talking to a replica database, this returns a value for what changes have
been replayed on the replica from the primary.
"""
def last_wal_replay(repo) do
%Postgrex.Result{rows: [[lsn]]} = repo.query!("select CAST(pg_last_wal_replay_lsn() AS TEXT)")
new(lsn, :replay)
end
end
|
lib/lsn/lsn.ex
| 0.900325
| 0.564219
|
lsn.ex
|
starcoder
|
defmodule Seedex do
@moduledoc """
Functions to populate database with seed data.
"""
require Logger
@doc """
`seed/3` inserts data in the given table
## Arguments
* `module` - The module containing the Ecto schema.
* `contraints` - The fields used to idenfity a record. The record will be updated
if a record with matching fields already exist. The default value is `[:id]`
* `data` - The data to insert. It should be a list of maps. If it is not passed,
a single record will be created using the function passed.
* `process` - A function to post-process each created record. It is required
only if `data` is omitted.
## Examples
```elixir
seed MyApp.Point, [:x, :y], fn point ->
point
|> Map.put(:x, 4)
|> Map.put(:y, 7)
|> Map.put(:name, "Home")
end
seed MyApp.User, [
%{name: "Daniel", age: 26},
%{name: "Ai", age: 24}
]
```
"""
@spec seed(module :: atom, constraints :: [atom], data :: [map], process :: (struct -> struct)) ::
:ok
def seed(module, constraints \\ [:id], data \\ [], process \\ nil) do
dispatch_seed(module, constraints, data, process, update: true)
end
@doc """
Same as `seed/3` but does not update the record if it already exists
"""
@spec seed_once(
module :: atom,
constraints :: [atom],
data :: (struct -> struct) | [map],
process :: (struct -> struct)
) :: :ok
def seed_once(module, constraints \\ [:id], data \\ [], process \\ nil) do
dispatch_seed(module, constraints, data, process, update: false)
end
defp identity(x), do: x
# arguments were all pased
defp dispatch_seed(module, constraints, data, func, opts) when is_function(func, 1),
do: do_seed(module, constraints, data, func, opts)
# 3 arguments passed
defp dispatch_seed(module, [h | t], data, nil, opts) when is_atom(h) and is_list(data),
do: do_seed(module, [h | t], data, &identity/1, opts)
defp dispatch_seed(module, [h | t], func, nil, opts) when is_atom(h) and is_function(func, 1),
do: do_seed(module, [h | t], [], func, opts)
defp dispatch_seed(module, [h | t], func, nil, opts) when is_map(h) and is_function(func, 1),
do: do_seed(module, [:id], [h | t], func, opts)
# 2 arguments passed
defp dispatch_seed(module, func, [], nil, opts) when is_function(func, 1),
do: do_seed(module, [:id], [], func, opts)
defp dispatch_seed(module, [h | t], [], nil, opts) when is_map(h),
do: do_seed(module, [:id], [h | t], &identity/1, opts)
defp dispatch_seed(_module, _constraints, _data, _func, _opts),
do: raise(ArgumentError, "invalid arguments to seed")
defp do_seed(module, constraints, [], process, opts),
do: do_seed(module, constraints, [%{}], process, opts)
defp do_seed(module, constraints, data, process, opts) do
Enum.each(data, fn record ->
record = struct(module, record) |> process.()
insert_seed(module, record, constraints, opts)
end)
end
defp insert_seed(module, record, constraints, opts) do
existing = fetch_record(module, record, constraints)
cond do
existing && opts[:update] ->
update_record(record, existing)
!existing ->
Logger.debug("Inserting record #{inspect(record)}")
repo().insert(record)
true ->
:ok
end
end
defp fetch_record(module, record, constraints) do
case make_query(record, constraints) do
[] ->
nil
query ->
repo().get_by(module, query)
end
end
defp make_query(record, constraints) do
constraints
|> Enum.map(&{&1, Map.fetch!(record, &1)})
|> Enum.reject(fn {_k, v} -> is_nil(v) end)
end
defp update_record(record, existing) do
changeset = make_changeset(record, existing)
Logger.debug("Updating #{inspect(record)} with changes: #{inspect(changeset.changes)}")
repo().update!(changeset)
end
defp make_changeset(record, existing) do
{changeset, changes} = {Ecto.Changeset.change(existing), Map.from_struct(record)}
Enum.reduce(changes, changeset, fn
{_key, %Ecto.Association.NotLoaded{}}, changeset ->
changeset
{_key, nil}, changeset ->
changeset
{key, _value}, changeset when key in ["__meta__", :__meta__] ->
changeset
{key, %Ecto.Association.BelongsTo{} = assoc}, changeset ->
Ecto.Changeset.put_assoc(changeset, key, assoc)
{key, value}, changeset ->
Ecto.Changeset.put_change(changeset, key, value)
end)
end
defp repo do
Application.get_env(:seedex, :repo)
end
end
|
lib/seedex.ex
| 0.897531
| 0.88578
|
seedex.ex
|
starcoder
|
defmodule CucumberExpressions.ParameterType.Transformer do
@moduledoc false
alias CucumberExpressions.ParameterType.SyntaxError
@stages [:pre, :post]
defstruct paradigm: {:atom, :atom, :integer},
stage: @stages
def new(a, stage \\ :pre)
def new(nil, _), do: %{pre: nil, post: nil}
def new(module, stage) when is_atom(module), do: new({module, :run, 2}, stage)
def new({module, function}, stage), do: new({module, function, 2}, stage)
def new(%{pre: pre, post: post}, _) do
%{pre: new(pre, :pre).pre, post: new(post, :post).post}
end
def new(mfa = {module, function, arity}, stage) do
Code.ensure_loaded(module)
if :erlang.function_exported(module, function, arity) do
if stage in @stages do
%{
flip_stage(stage) => nil,
stage => struct(__MODULE__, %{paradigm: mfa, stage: stage})
}
else
raise_error(
"has an invalid stage: #{stage}. Valid ones are: #{inspect(@stages)}",
:invalid_stage
)
end
else
raise_error("is non-existent", :non_existent)
end
end
def new(_, _) do
raise_error("is invalid", :invalid)
end
defp flip_stage(:pre), do: :post
defp flip_stage(:post), do: :pre
def run(%__MODULE__{paradigm: {module, function, 2}}, str) do
module
|> apply(function, [str, :ctx])
|> case do
ok = {:ok, _} ->
ok
error = {:error, _} ->
error
unknown_format ->
raise_error(
"returns an incompatible format: #{inspect(unknown_format)}",
:incompatible_format
)
end
end
def run(nil, str), do: {:ok, str}
def raise_error(msg, error_code = :incompatible_format) do
"""
Transformer supplied for `ParameterType` #{msg}.
Kindly return a tagged tuple:
* {:ok, result}
* {:error, error}
"""
|> SyntaxError.raise(error_code)
end
def raise_error(msg, error_code) do
"""
Transformer supplied for `ParameterType` #{msg}.
Kindly specify a remote function:
`{module, function, arity}` where arity is always `2`.
"""
|> SyntaxError.raise(error_code)
end
end
|
apps/cucumber_expressions/lib/cucumber_expressions/parameter_type/lib/transformer.ex
| 0.736874
| 0.533094
|
transformer.ex
|
starcoder
|
defmodule Square.Catalog do
alias Tesla.Multipart
@moduledoc """
Documentation for `Square.Catalog`.
"""
@doc """
Deletes a set of [CatalogItem](#type-catalogitem)s based on the
provided list of target IDs and returns a set of successfully deleted IDs in
the response. Deletion is a cascading event such that all children of the
targeted object are also deleted. For example, deleting a CatalogItem will
also delete all of its [CatalogItemVariation](#type-catalogitemvariation)
children.
`BatchDeleteCatalogObjects` succeeds even if only a portion of the targeted
IDs can be deleted. The response will only include IDs that were
actually deleted.
```
def batch_delete_catalog_objects(client, body)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `body` | [`Batch Delete Catalog Objects Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/batch-delete-catalog-objects-request.md) | Body, Required | An object containing the fields to POST for the request.<br><br>See the corresponding object definition for field details. |
### Response Type
[`Batch Delete Catalog Objects Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/batch-delete-catalog-objects-response.md)
### Example Usage
iex> body = %{
object_ids: ["W62UWFY35CWMYGVWK6TWJDNI", "AA27W3M2GGTF3H6AVPNB77CK"]
}
iex> Square.client |> Square.Catalog.batch_delete_catalog_objects(body)
"""
@spec batch_delete_catalog_objects(Tesla.Client.t(), map) ::
{:error, any} | {:ok, Tesla.Env.t()}
def batch_delete_catalog_objects(client, body \\ %{}),
do: Tesla.post(client, "catalog/batch-delete", body)
@doc """
Returns a set of objects based on the provided ID.
Each [CatalogItem](#type-catalogitem) returned in the set includes all of its
child information including: all of its
[CatalogItemVariation](#type-catalogitemvariation) objects, references to
its [CatalogModifierList](#type-catalogmodifierlist) objects, and the ids of
any [CatalogTax](#type-catalogtax) objects that apply to it.
```
def batch_retrieve_catalog_objects(body)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `body` | [`Batch Retrieve Catalog Objects Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/batch-retrieve-catalog-objects-request.md) | Body, Required | An object containing the fields to POST for the request.<br><br>See the corresponding object definition for field details. |
### Response Type
[`Batch Retrieve Catalog Objects Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/batch-retrieve-catalog-objects-response.md)
### Example Usage
iex> body = %{
object_ids: ["W62UWFY35CWMYGVWK6TWJDNI", "AA27W3M2GGTF3H6AVPNB77CK"],
include_releted_objects: true
}
iex> Square.client |> Square.Catalog.batch_retrieve_catalog_objects(body)
"""
@spec batch_retrieve_catalog_objects(Tesla.Client.t(), map) ::
{:error, any} | {:ok, Tesla.Env.t()}
def batch_retrieve_catalog_objects(client, body \\ %{}),
do: Tesla.post(client, "catalog/batch-retrieve", body)
@doc """
Creates or updates up to 10,000 target objects based on the provided
list of objects. The target objects are grouped into batches and each batch is
inserted/updated in an all-or-nothing manner.
If an object within a batch is malformed in some way, or violates a database constraint, the entire batch
containing that item will be disregarded. However, other batches in the same
request may still succeed. Each batch may contain up to 1,000 objects, and
batches will be processed in order as long as the total object count for the
request (items, variations, modifier lists, discounts, and taxes) is no more
than 10,000.
```
def batch_upsert_catalog_objects(body:)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `body` | [`Batch Upsert Catalog Objects Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/batch-upsert-catalog-objects-request.md) | Body, Required | An object containing the fields to POST for the request.<br><br>See the corresponding object definition for field details. |
### Response Type
[`Batch Upsert Catalog Objects Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/batch-upsert-catalog-objects-response.md)
### Example Usage
iex> body = %{
idempotency_key: "<KEY>",
batches: [%{
objects: [
%{
type: "ITEM",
id: "#Tea",
present_at_all_locations: true,
item_data: %{
name: "Tea",
description: "Hot Leaf Juice",
category_id: "#Beverages",
tax_ids: ["#SalesTax"],
variations: [%{
type: "ITEM_VARIATION",
id: "#Tea_Mug",
present_at_all_locations: true,
item_variation_data: %{
item_id: "#Tea",
name: "Mug",
pricing_type: "FIXED_PRICING",
price_money: %{
amount: 150,
currency: "USD"
}
}
}]
}
},
%{
type: "ITEM",
id: "#Coffee",
present_at_all_location: true,
item_data: %{
name: "Coffee",
description: "Hot Bean Juice",
category_id: "#Beverage",
tax_id: "#SalesTax",
variations: [
%{
type: "ITEM_VARIATION",
id: "#Coffee_Regular",
present_at_all_locations: true,
item_variation_data: %{
item_id: "#Coffee",
name: "Regular",
pricing_type: "FIXED_PRICING",
price_money: %{
amount: 250,
currency: "USD"
}
}
}, %{
type: "ITEM_VARIATION",
id: "#Coffee_Large",
present_at_all_locations: true,
item_variation_data: %{
item_id: "#Coffee",
name: "Large",
pricing_type: "FIXED_PRICING",
price_money: %{
amount: 350,
currency: "USD"
}
}
}]
}
},
%{
type: "CATEGORY",
id: "#Beverages",
present_at_all_locations: true,
category_data: %{
name: "Beverages",
}
},
%{
type: "TAX",
id: "#SalesTax",
present_at_all_locations: true,
tax_data: %{
name: "Sales Tax",
calculation_phase: "TAX_SUBTOTAL_PHASE",
inclusion_type: "ADDITIVE",
percentage: "5.0",
applies_to_custom_amounts: true,
enabled: true
}
}
],
}]
}
iex> Square.client |> Square.Catalog.batch_upsert_catalog_objects(body)
"""
@spec batch_upsert_catalog_objects(Tesla.Client.t(), map) ::
{:error, any} | {:ok, Tesla.Env.t()}
def batch_upsert_catalog_objects(client, body \\ %{}),
do: Tesla.post(client, "catalog/batch-upsert", body)
@doc """
Upload an image file to create a new [CatalogImage](#type-catalogimage) for an existing
[CatalogObject](#type-catalogobject). Images can be uploaded and linked in this request or created independently
(without an object assignment) and linked to a [CatalogObject](#type-catalogobject) at a later time.
CreateCatalogImage accepts HTTP multipart/form-data requests with a JSON part and an image file part in
JPEG, PJPEG, PNG, or GIF format. The maximum file size is 15MB. The following is an example of such an HTTP request:
```
POST /v2/catalog/images
Accept: application/json
Content-Type: multipart/form-data;boundary="boundary"
Square-Version: XXXX-XX-XX
Authorization: Bearer {ACCESS_TOKEN}
--boundary
Content-Disposition: form-data; name="request"
Content-Type: application/json
{
"idempotency_key":"<KEY>",
"object_id": "ND6EA5AAJEO5WL3JNNIAQA32",
"image":{
"id":"#TEMP_ID",
"type":"IMAGE",
"image_data":{
"caption":"A picture of a cup of coffee"
}
}
}
--boundary
Content-Disposition: form-data; name="image"; filename="Coffee.jpg"
Content-Type: image/jpeg
{ACTUAL_IMAGE_BYTES}
--boundary
```
Additional information and an example cURL request can be found in the [Create a Catalog Image recipe](https://developer.squareup.com/docs/more-apis/catalog/cookbook/create-catalog-images).
```
def create_catalog_image(client, [request: nil, image_file: nil])
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `request` | [`Create Catalog Image Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/create-catalog-image-request.md) | Form, Optional | - |
| `image_file` | `File | UploadIO` | Form, Optional | - |
### Response Type
[`Create Catalog Image Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/create-catalog-image-response.md)
### Example Usage
iex> request = %{
idempotency_key: "528dea59-7bfb-43c1-bd48-4a6bba7dd61f86",
image: {
type: "IMAGE",
id: "#TEMP_ID"
}
}
iex> Square.client |> Square.Catalog.create_catalog_image(request: request)
"""
@spec create_catalog_image(Tesla.Client.t(), list) :: {:error, any} | {:ok, Tesla.Env.t()}
def create_catalog_image(client, params \\ []) do
mp =
case params do
[request: request, image_file: image_file] ->
Multipart.new()
|> Multipart.add_field("request", request, headers: [{"content-type: application/json"}])
|> Multipart.add_file(image_file, detect_content_type: true)
[request: request] ->
Multipart.new()
|> Multipart.add_field("request", request, headers: [{"content-type: application/json"}])
[image_file: image_file] ->
Multipart.new() |> Multipart.add_file(image_file, detect_content_type: true)
_ ->
Multipart.new()
end
Tesla.post(client, "catalog/images", mp)
end
@doc """
Returns information about the Square Catalog API, such as batch size
limits for `BatchUpsertCatalogObjects`.
```
def catalog_info(client)
```
### Response Type
[`Catalog Info Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/catalog-info-response.md)
### Example Usage
iex> Square.client |> Square.Catalog.catalog_info()
"""
@spec catalog_info(Tesla.Client.t()) :: {:error, any} | {:ok, Tesla.Env.t()}
def catalog_info(client), do: Tesla.get(client, "catalog/info")
@doc """
Returns a list of [CatalogObject](#type-catalogobject)s that includes
all objects of a set of desired types (for example, all [CatalogItem](#type-catalogitem)
and [CatalogTax](#type-catalogtax) objects) in the catalog. The `types` parameter
is specified as a comma-separated list of valid [CatalogObject](#type-catalogobject) types:
`ITEM`, `ITEM_VARIATION`, `MODIFIER`, `MODIFIER_LIST`, `CATEGORY`, `DISCOUNT`, `TAX`, `IMAGE`.
__Important:__ ListCatalog does not return deleted catalog items. To retrieve
deleted catalog items, use SearchCatalogObjects and set `include_deleted_objects`
to `true`.
```
def list_catalog(client, [
cursor: nil,
types: nil
])
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `cursor` | `String` | Query, Optional | The pagination cursor returned in the previous response. Leave unset for an initial request.<br>See [Pagination](https://developer.squareup.com/docs/basics/api101/pagination) for more information. |
| `types` | `String` | Query, Optional | An optional case-insensitive, comma-separated list of object types to retrieve, for example<br>`ITEM,ITEM_VARIATION,CATEGORY,IMAGE`.<br><br>The legal values are taken from the CatalogObjectType enum:<br>`ITEM`, `ITEM_VARIATION`, `CATEGORY`, `DISCOUNT`, `TAX`,<br>`MODIFIER`, `MODIFIER_LIST`, or `IMAGE`. |
### Response Type
[`List Catalog Response Map`](/doc/models/list-catalog-response.md)
### Example Usage
iex> Square.Catalog |> Square.Catalog.list_catalog()
"""
@spec list_catalog(Tesla.Client.t(), list) :: {:error, any} | {:ok, Tesla.Env.t()}
def list_catalog(client, params \\ []), do: Tesla.get(client, "catalog/list", query: params)
@doc """
Creates or updates the target [CatalogObject](#type-catalogobject).
```
def upsert_catalog_object(client, body)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `body` | [`Upsert Catalog Object Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/upsert-catalog-object-request.md) | Body, Required | An object containing the fields to POST for the request.<br><br>See the corresponding object definition for field details. |
### Response Type
[`Upsert Catalog Object Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/upsert-catalog-object-response.md)
### Example Usage
iex> body = %{
idempotency_key: "<KEY>",
type: "ITEM",
id: "#Cocoa",
item_data: %{
name: "Cocoa",
description: "Hot chocolate",
abbreviation: "Ch"
}
}
iex> Square.client |> Square.Catalog.upsert_catalog_object(body)
"""
@spec upsert_catalog_object(Tesla.Client.t(), map) ::
{:error, any} | {:ok, Tesla.Env.t()}
def upsert_catalog_object(client, body \\ %{}), do: Tesla.post(client, "catalog/object", body)
@doc """
Deletes a single [CatalogObject](#type-catalogobject) based on the
provided ID and returns the set of successfully deleted IDs in the response.
Deletion is a cascading event such that all children of the targeted object
are also deleted. For example, deleting a [CatalogItem](#type-catalogitem)
will also delete all of its [CatalogItemVariation](#type-catalogitemvariation) children.
```
def delete_catalog_object(client, object_id)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `object_id` | `String` | Template, Required | The ID of the catalog object to be deleted. When an object is deleted, other<br>objects in the graph that depend on that object will be deleted as well (for example, deleting a<br>catalog item will delete its catalog item variations). |
### Response Type
[`Delete Catalog Object Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/delete-catalog-object-response.md)
### Example Usage
iex> object_id = "object_id8"
iex> Square.client |> Square.Category.delete_catalog_object(object_id)
"""
@spec delete_catalog_object(Tesla.Client.t(), binary) ::
{:error, any} | {:ok, Tesla.Env.t()}
def delete_catalog_object(client, object_id),
do: Tesla.delete(client, "catalog/object/#{object_id}")
@doc """
Returns a single [CatalogItem](#type-catalogitem) as a
[CatalogObject](#type-catalogobject) based on the provided ID. The returned
object includes all of the relevant [CatalogItem](#type-catalogitem)
information including: [CatalogItemVariation](#type-catalogitemvariation)
children, references to its
[CatalogModifierList](#type-catalogmodifierlist) objects, and the ids of
any [CatalogTax](#type-catalogtax) objects that apply to it.
```
def retrieve_catalog_object(client, object_id:, [
include_related_objects: nil
])
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `object_id` | `String` | Template, Required | The object ID of any type of catalog objects to be retrieved. |
| `include_related_objects` | `Boolean` | Query, Optional | If `true`, the response will include additional objects that are related to the<br>requested object, as follows:<br><br>If the `object` field of the response contains a CatalogItem,<br>its associated CatalogCategory, CatalogTax objects,<br>CatalogImages and CatalogModifierLists<br>will be returned in the `related_objects` field of the response. If the `object`<br>field of the response contains a CatalogItemVariation,<br>its parent CatalogItem will be returned in the `related_objects` field of<br>the response.<br><br>Default value: `false` |
### Response Type
[`Retrieve Catalog Object Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/retrieve-catalog-object-response.md)
### Example Usage
iex> object_id = "object_id8"
iex> Square.client |> Square.Catalog.retrieve_catalog_object(object_id)
"""
@spec retrieve_catalog_object(Tesla.Client.t(), binary, list) ::
{:error, any} | {:ok, Tesla.Env.t()}
def retrieve_catalog_object(client, object_id, params \\ []),
do: Tesla.get(client, "catalog/object/#{object_id}", query: params)
@doc """
Queries the targeted catalog using a variety of query types:
`CatalogQuerySortedAttribute`,
`CatalogQueryExact`,
`CatalogQueryRange`,
`CatalogQueryText` ,
`CatalogQueryItemsForTax`,
`CatalogQueryItemsForModifierList` ,
Future end of the above comment:
`CatalogQueryItemsForTax`,
`CatalogQueryItemsForModifierList`,
`CatalogQueryItemsForItemOptions`,
`CatalogQueryItemVariationsForItemOptionValues`,
```
def search_catalog_objects(body:)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `body` | [`Search Catalog Objects Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/search-catalog-objects-request.md) | Body, Required | An object containing the fields to POST for the request.<br><br>See the corresponding object definition for field details. |
### Response Type
[`Search Catalog Objects Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/search-catalog-objects-response.md)
### Example Usage
iex> body = %{
object_types: "ITEM",
prefix_query: %{
attribute_name: "name",
attribute_prefix: "tea"
},
limit: 100
}
iex> Square.client |> Square.Catalog.search_catalog_objects(body)
"""
@spec search_catalog_objects(Tesla.Client.t(), any) ::
{:error, any} | {:ok, Tesla.Env.t()}
def search_catalog_objects(client, body \\ %{}), do: Tesla.post(client, "catalog/search", body)
@doc """
Updates the [CatalogModifierList](#type-catalogmodifierlist) objects
that apply to the targeted [CatalogItem](#type-catalogitem) without having
to perform an upsert on the entire item.
```
def update_item_modifier_lists(body:)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `body` | [`Update Item Modifier Lists Request Map`](/doc/models/update-item-modifier-lists-request.md) | Body, Required | An object containing the fields to POST for the request.<br><br>See the corresponding object definition for field details. |
### Response Type
[`Update Item Modifier Lists Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/update-item-modifier-lists-response.md)
### Example Usage
iex> body = %{
item_ids: ["H42BRLUJ5KTZTTMPVSLFAACQ", "2JXOBJIHCWBQ4NZ3RIXQGJA6"],
modifier_lists_to_enable: ["H42BRLUJ5KTZTTMPVSLFAACQ", "2JXOBJIHCWBQ4NZ3RIXQGJA6"],
modifier_lists_to_disable: ["7WRC16CJZDVLSNDQ35PP6YAD"]
}
iex> Square.client |> Square.Catalog.update_item_modifier_lists(body)
"""
@spec update_modifier_lists(Tesla.Client.t(), any) ::
{:error, any} | {:ok, Tesla.Env.t()}
def update_modifier_lists(client, body \\ %{}),
do: Tesla.post(client, "catalog/update-item-modifiers-lists", body)
@doc """
Updates the [CatalogTax](#type-catalogtax) objects that apply to the
targeted [CatalogItem](#type-catalogitem) without having to perform an
upsert on the entire item.
```
def update_item_taxes(body:)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `body` | [`Update Item Taxes Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/update-item-taxes-request.md) | Body, Required | An object containing the fields to POST for the request.<br><br>See the corresponding object definition for field details. |
### Response Type
[`Update Item Taxes Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/update-item-taxes-response.md)
### Example Usage
iex> body = %{
item_ids: ["H42BRLUJ5KTZTTMPVSLFAACQ", "2JXOBJIHCWBQ4NZ3RIXQGJA6"],
taxes_to_enable: ["4WRCNHCJZDVLSNDQ35PP6YAD"],
taxes_to_disable: ["AQCEGCEBBQONINDOHRGZISEX]
}
iex> Square.client |> Square.Catalog.update_item_taxes(body)
"""
@spec update_item_taxes(Tesla.Client.t(), any) :: {:error, any} | {:ok, Tesla.Env.t()}
def update_item_taxes(client, body \\ %{}),
do: Tesla.post(client, "catalog/update-item-taxes", body)
end
|
lib/api/catalog_api.ex
| 0.943595
| 0.806796
|
catalog_api.ex
|
starcoder
|
defmodule Helper.QueryBuilder do
@moduledoc """
handle common query pices across the project
"""
import Ecto.Query, warn: false
alias GroupherServer.CMS
alias CMS.Model.Repo, as: CMSRepo
@audit_illegal CMS.Constant.pending(:illegal)
@audit_failed CMS.Constant.pending(:audit_failed)
@doc """
load inner user field
"""
def load_inner_users(queryable, filter) do
queryable
|> join(:inner, [f], u in assoc(f, :user))
|> select([f, u], u)
|> filter_pack(filter)
end
@doc """
inserted in latest x mounth
"""
def recent_inserted(queryable, months: count) do
end_of_today = Timex.now() |> Timex.end_of_day()
x_months_ago = Timex.today() |> Timex.shift(months: -count) |> Timex.to_datetime()
queryable
|> where([q], q.inserted_at >= ^x_months_ago)
|> where([q], q.inserted_at <= ^end_of_today)
end
@doc """
inserted in latest x days
"""
def recent_inserted(queryable, days: count) do
end_of_today = Timex.now() |> Timex.end_of_day()
x_days_ago = Timex.today() |> Timex.shift(days: -count) |> Timex.to_datetime()
queryable
|> where([q], q.inserted_at >= ^x_days_ago)
|> where([q], q.inserted_at <= ^end_of_today)
end
# this is strategy will cause
# defp sort_strategy(:desc_inserted), do: [desc: :inserted_at, desc: :views]
# defp sort_strategy(:most_views), do: [desc: :views, desc: :inserted_at]
# defp sort_strategy(:least_views), do: [asc: :views, desc: :inserted_at]
# defp strategy(:most_stars), do: [desc: :views, desc: :inserted_at]
defp sort_by_count(queryable, field, direction) do
queryable
|> join(:left, [p], s in assoc(p, ^field))
|> group_by([p], p.id)
|> select([p], p)
|> order_by([_, s], {^direction, fragment("count(?)", s.id)})
end
def filter_pack(queryable, filter) when is_map(filter) do
Enum.reduce(filter, queryable, fn
{:sort, :desc_active}, queryable ->
queryable |> order_by(desc: :active_at)
{:sort, :desc_inserted}, queryable ->
# queryable |> order_by(^sort_strategy(:desc_inserted))
queryable |> order_by(desc: :inserted_at)
{:sort, :asc_inserted}, queryable ->
queryable |> order_by(asc: :inserted_at)
{:sort, :desc_index}, queryable ->
queryable |> order_by(desc: :index)
{:sort, :asc_index}, queryable ->
queryable |> order_by(asc: :index)
{:sort, :most_views}, queryable ->
queryable |> order_by(desc: :views, desc: :inserted_at)
{:sort, :least_views}, queryable ->
queryable |> order_by(asc: :views, desc: :inserted_at)
{:sort, :most_stars}, queryable ->
queryable |> sort_by_count(:stars, :desc)
{:sort, :least_stars}, queryable ->
queryable |> sort_by_count(:stars, :asc)
{:length, :most_words}, queryable ->
queryable |> order_by(desc: :length)
{:length, :least_words}, queryable ->
queryable |> order_by(asc: :length)
{:when, :today}, queryable ->
# date = DateTime.utc_now() |> Timex.to_datetime()
# use timezone info is server is not in the some timezone
# Timex.now("America/Chicago")
date = Timex.now()
queryable
|> where([p], p.inserted_at >= ^Timex.beginning_of_day(date))
|> where([p], p.inserted_at <= ^Timex.end_of_day(date))
{:when, :this_week}, queryable ->
date = Timex.now()
queryable
|> where([p], p.inserted_at >= ^Timex.beginning_of_week(date))
|> where([p], p.inserted_at <= ^Timex.end_of_week(date))
{:when, :this_month}, queryable ->
date = Timex.now()
queryable
|> where([p], p.inserted_at >= ^Timex.beginning_of_month(date))
|> where([p], p.inserted_at <= ^Timex.end_of_month(date))
{:when, :this_year}, queryable ->
date = Timex.now()
queryable
|> where([p], p.inserted_at >= ^Timex.beginning_of_year(date))
|> where([p], p.inserted_at <= ^Timex.end_of_year(date))
{:article_tag, tag_name}, queryable ->
from(
q in queryable,
join: t in assoc(q, :article_tags),
where: t.raw == ^tag_name
)
{:article_tags, tag_name_list}, queryable ->
from(
q in queryable,
join: t in assoc(q, :article_tags),
where: t.raw in ^tag_name_list,
distinct: q.id,
group_by: q.id
)
{:category, catetory_raw}, queryable ->
from(
q in queryable,
join: t in assoc(q, :categories),
where: t.raw == ^catetory_raw
)
{:thread, thread}, queryable ->
thread = thread |> to_string |> String.upcase()
from(q in queryable, where: q.thread == ^thread)
{:community_id, community_id}, queryable ->
from(
q in queryable,
join: t in assoc(q, :community),
where: t.id == ^community_id
)
{:community_raw, community_raw}, queryable ->
from(
q in queryable,
join: t in assoc(q, :community),
where: t.raw == ^community_raw
)
{:community, community_raw}, queryable ->
from(
q in queryable,
join: t in assoc(q, :communities),
where: t.raw == ^community_raw
)
{:first, first}, queryable ->
queryable |> limit(^first)
{:mark_delete, bool}, queryable ->
queryable |> where([p], p.mark_delete == ^bool)
{:pending, :legal}, queryable ->
queryable |> where([p], p.pending != ^@audit_illegal)
{:pending, :audit_failed}, queryable ->
queryable |> where([p], p.pending == ^@audit_failed)
{_, _}, queryable ->
queryable
end)
end
@doc """
handle spec needs for CMS query filter
"""
def domain_query(CMSRepo = queryable, filter) do
Enum.reduce(filter, queryable, fn
{:sort, :most_github_star}, queryable ->
queryable |> order_by(desc: :star_count)
{:sort, :most_github_fork}, queryable ->
queryable |> order_by(desc: :fork_count)
{:sort, :most_github_watch}, queryable ->
queryable |> order_by(desc: :watch_count)
{:sort, :most_github_pr}, queryable ->
queryable |> order_by(desc: :prs_count)
{:sort, :most_github_issue}, queryable ->
queryable |> order_by(desc: :issues_count)
{_, _}, queryable ->
queryable
end)
end
def domain_query(queryable, _filter), do: queryable
end
|
lib/helper/query_builder.ex
| 0.744192
| 0.528594
|
query_builder.ex
|
starcoder
|
defmodule Scenic.Primitive.SceneRef do
@moduledoc """
A reference to another graph or component.
When rendering a graph, the SceneRef primmitive causes the render to stop
what it is doing, render another graph, then continue on where it left off.
The SceneRef primitive is usually added for you when you use a Component
via the Primitive.Components helpers.
However, it can also be useful directly if you want to declare multiple
graphs in a single scene and reference them from each other. This is
done when you want to limit the data scanned and sent when just a portion
of your graph is changing.
Be careful not to create circular references!
## Data
The data for a SceneRef can take one of several forms.
* `scene_name` - an atom naming a scene you are managing yourself
* `{scene_name, sub_id}` - an atom naming a scene you are managing yourself and a sub-id
* `pid` - the pid of a running scene (rarely used)
* `{pid, sub_id}` - the pid of a running scene and a sub_id (rarely used)
* `{:graph, scene, sub_id}` - a full graph key - must already be in `ViewPort.Tables`
* `{{module, data}, sub_id}` - init data for a dynamic scene (very common)
## Styles
The SceneRef is special in that it accepts all styles and transforms, even if they
are non-standard. These are then inherited by any dynamic scenes that get created.
## Usage
You should add/modify primitives via the helper functions in
[`Scenic.Primitives`](Scenic.Primitives.html#scene_ref/3)
"""
use Scenic.Primitive
# ============================================================================
# data verification and serialization
# --------------------------------------------------------
@doc false
def info(data),
do: """
#{IO.ANSI.red()}#{__MODULE__} data must point to a valid scene or component.
#{IO.ANSI.yellow()}Received: #{inspect(data)}
#{IO.ANSI.default_color()}
"""
# --------------------------------------------------------
@doc false
def verify(name) when is_atom(name), do: {:ok, name}
def verify({name, id}) when is_atom(name), do: {:ok, {name, id}}
def verify(pid) when is_pid(pid), do: {:ok, {pid, nil}}
def verify({pid, id}) when is_pid(pid), do: {:ok, {pid, id}}
def verify({:graph, scene, id}), do: {:ok, {:graph, scene, id}}
def verify({{module, data}, id}) when is_atom(module), do: {:ok, {{module, data}, id}}
def verify(_), do: :invalid_data
# ============================================================================
# filter and gather styles
@doc """
Returns a list of styles recognized by this primitive.
"""
@spec valid_styles() :: [:all, ...]
def valid_styles(), do: [:all]
def filter_styles(styles) when is_map(styles), do: styles
end
|
lib/scenic/primitive/scene_ref.ex
| 0.863536
| 0.564579
|
scene_ref.ex
|
starcoder
|
use Croma
defmodule DistAgent.Config do
@default_tick_interval (if Mix.env() == :test, do: 1_000, else: 60_000)
@default_quota_collection_interval (if Mix.env() == :test, do: 1_000, else: 5 * 60_000)
@moduledoc """
`dist_agent` defines the following application configs:
- `:tick_interval`
Time interval (in milliseconds) between periodic tick events.
Defaults to `#{@default_tick_interval}`.
By using smaller value you can increase the precision of low-resolution timers with higher overhead.
- `:quota_collection_interval`
Time interval (in milliseconds) between periodic collections of number of distributed agent.
Defaults to `#{@default_quota_collection_interval}`.
By using smaller value you can get more accurate view of total number of distributed agents with higher overhead.
Note that each `dist_agent` process uses application configs stored in the local node.
If you want to configure the options above you must set them on all nodes in your cluster.
In addition to the configurations above, the following configurations defined by the underlying libraries are also available:
- `RaftFleet.Config`
- `RaftKV.Config`
## About `:rafted_value_config_maker` option for `:raft_fleet`
`:raft_fleet` provides a way to configure each consensus group by setting an implementation of `RaftFleet.RaftedValueConfigMaker` behaviour
as `:rafted_value_config_maker` option.
`:dist_agent` and underlying `:raft_kv` respect this option; they use the callback module (if any)
when creating a `t:RaftedValue.Config.t/0`.
`:dist_agent` defines `DistAgent.Quota` as a consensus group, and in order to construct a `t:RaftedValue.Config.t/0`
for `DistAgent.Quota` in your implementation of `RaftFleet.RaftedValueConfigMaker` behaviour,
you can use `DistAgent.Quota.make_rv_config/1`.
See also the moduledoc of `RaftKV.Config`.
"""
defun tick_interval() :: pos_integer do
Application.get_env(:dist_agent, :tick_interval, @default_tick_interval)
end
defun quota_collection_interval() :: pos_integer do
Application.get_env(:dist_agent, :quota_collection_interval, @default_quota_collection_interval)
end
end
|
lib/dist_agent/config.ex
| 0.835484
| 0.47171
|
config.ex
|
starcoder
|
defmodule Surface.Catalogue do
@moduledoc """
A behaviour to provide additional information about the catalogue.
Optional for local catalogues. Usually required if you want to share
your components as a library.
"""
@doc """
Returns a keyword list of config options to be used by the catalogue tool.
Available options:
* `head_css` - CSS related content to be added to the `<head>...</head>` section
of each example or playground.
* `head_js` - JS related content to be added to the `<head>...</head>` section
of each example or playground.
* `example` - A keyword list of options to be applied for all examples
in in the catalogue.
* `playground` - A keyword list of options to be applied for all playgrounds
in in the catalogue.
"""
@callback config :: keyword()
@default_config [
head_css: """
<link phx-track-static rel="stylesheet" href="/css/app.css"/>
""",
head_js: """
<script defer type="module" src="/js/app.js"></script>
"""
]
defmacro __using__(_opts) do
quote do
@behaviour Surface.Catalogue
import Surface.Catalogue, only: [load_asset: 2]
end
end
@doc """
Loads a text file as module attribute so you can inject its content directly
in `head_css` or `head_js` config options.
Useful to avoid depending on external css or js code. The path should be relative
to the caller's folder.
Available options:
* `as` - the name of the module attribute to be generated.
"""
defmacro load_asset(file, opts) do
as = Keyword.fetch!(opts, :as)
quote do
path = Path.join(__DIR__, unquote(file))
@external_resource path
Module.put_attribute(__MODULE__, unquote(as), File.read!(path))
end
end
@doc false
def get_metadata(module) do
case Code.fetch_docs(module) do
{:docs_v1, _, _, "text/markdown", docs, %{catalogue: meta}, _} ->
doc = Map.get(docs, "en")
meta |> Map.new() |> Map.put(:doc, doc)
_ ->
nil
end
end
@doc false
def get_config(module) do
meta = get_metadata(module)
user_config = Map.get(meta, :config, [])
catalogue = Keyword.get(user_config, :catalogue)
catalogue_config = get_catalogue_config(catalogue)
{type_config, catalogue_config} = Keyword.split(catalogue_config, [:example, :playground])
@default_config
|> Keyword.merge(catalogue_config)
|> Keyword.merge(type_config[meta.type] || [])
|> Keyword.merge(user_config)
end
@doc false
def fetch_subject!(config, type, caller) do
case Keyword.fetch(config, :subject) do
{:ok, subject} ->
subject
_ ->
message = """
no subject defined for #{inspect(type)}
Hint: You can define the subject using the :subject option. Example:
use #{inspect(type)}, subject: MyApp.MyButton
"""
Surface.IOHelper.compile_error(message, caller.file, caller.line)
end
end
defp get_catalogue_config(nil) do
[]
end
defp get_catalogue_config(catalogue) do
if module_loaded?(catalogue) do
catalogue.config()
else
[]
end
end
defp module_loaded?(module) do
match?({:module, _mod}, Code.ensure_compiled(module))
end
end
|
lib/surface/catalogue.ex
| 0.857141
| 0.408542
|
catalogue.ex
|
starcoder
|
defmodule Poly1305 do
require Chacha20
import Bitwise
@moduledoc """
Poly1305 message authentication
https://tools.ietf.org/html/rfc7539
"""
@typedoc """
Encryption key
"""
@type key :: binary
@typedoc """
Per-message nonce
By convention, the first 4 bytes should be sender-specific.
The trailing 8 bytes may be as simple as a counter.
"""
@type nonce :: binary
@typedoc """
MAC tag
"""
@type tag :: binary
defp clamp(r), do: r &&& 0x0FFFFFFC0FFFFFFC0FFFFFFC0FFFFFFF
defp split_key(k),
do:
{k |> binary_part(0, 16) |> :binary.decode_unsigned(:little) |> clamp,
k |> binary_part(16, 16) |> :binary.decode_unsigned(:little)}
defp p, do: 0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFB
@doc """
Compute a Message authentication code
The one-time key should never be reused.
"""
@spec hmac(binary, key) :: tag
def hmac(m, k) do
{r, s} = split_key(k)
val = process_message(m, r, 0) + s
val
|> :binary.encode_unsigned(:little)
|> result_align
end
@doc false
def key_gen(k, n), do: k |> Chacha20.block(n, 0) |> binary_part(0, 32)
defp result_align(s) when byte_size(s) >= 16, do: binary_part(s, 0, 16)
defp result_align(s) when byte_size(s) < 16, do: align_pad(s, 16)
defp int_pow_two(n), do: 2 |> :math.pow(n) |> round
defp process_message(<<>>, _r, a), do: a
defp process_message(<<i::unsigned-little-integer-size(128), rest::binary>>, r, a),
do: process_message(rest, r, new_a(i, a, r, 128))
defp process_message(m, r, a),
do: m |> :binary.decode_unsigned(:little) |> new_a(a, r, bit_size(m))
defp new_a(i, a, r, s), do: rem(r * (a + i + int_pow_two(s)), p())
@doc """
authenticated encryption with additional data - encryption
- message to be encrypted
- shared secret key
- one-time use nonce
- additional authenticated data
The return value will be a tuple of `{ciphertext, MAC}`
The algorithm is applied as described in RFC7539:
- The key and nonce are used to encrypt the message with ChaCha20.
- The one-time MAC key is derived from the cipher key and nonce.
- The ciphertext and additional data are authenticated with the MAC
"""
@spec aead_encrypt(binary, key, nonce, binary) :: {binary, tag}
def aead_encrypt(m, k, n, a \\ "") do
otk = key_gen(k, n)
c = Chacha20.crypt(m, k, n, 1)
md = align_pad(a, 16) <> align_pad(c, 16) <> msg_length(a) <> msg_length(c)
{c, hmac(md, otk)}
end
@doc """
authenticated encryption with additional data - decryption
- encrypted message
- shared secret key
- one-time use nonce
- additional authenticated data
- MAC
On success, returns the plaintext message. If the message cannot be
authenticated `:error` is returned.
"""
@spec aead_decrypt(binary, key, nonce, binary, tag) :: binary | :error
def aead_decrypt(c, k, n, a \\ "", t) do
otk = key_gen(k, n)
md = align_pad(a, 16) <> align_pad(c, 16) <> msg_length(a) <> msg_length(c)
m = Chacha20.crypt(c, k, n, 1)
if md |> hmac(otk) |> same_hmac?(t), do: m, else: :error
end
defp msg_length(s), do: s |> byte_size |> :binary.encode_unsigned(:little) |> align_pad(8)
defp align_pad(s, n) do
case s |> byte_size |> rem(n) do
# Already the proper width
0 ->
s
r ->
s <> zeroes(n - r)
end
end
defp zeroes(n), do: zero_loop(<<>>, n)
defp zero_loop(z, 0), do: z
defp zero_loop(z, n), do: zero_loop(z <> <<0>>, n - 1)
@doc """
compare two HMACs in constant time
"""
@spec same_hmac?(binary, binary) :: boolean
def same_hmac?(a, b), do: Equivalex.equal?(a, b)
end
|
lib/poly1305.ex
| 0.830628
| 0.51129
|
poly1305.ex
|
starcoder
|
alias Subtitle.Frame
defmodule Subtitle.SubRip.Parser do
@moduledoc """
Parse a single Frame. The parser is defined as a finite state machine where
each line of text contains a fraction of a Frame (index or time or caption).
The Frame struct is returned once it's completely parsed, it returns an
intermediate parser state otherwise.
"""
@states [
:frame_index,
:frame_time,
:frame_caption,
:frame_end
]
defstruct [
:index,
:state,
frame: %Frame{},
caption_buffer: []
]
@doc """
Returns a new parser struct configured to start parsing a new frame
"""
def new() do
%__MODULE__{
state: :frame_index
}
end
@doc """
Parses a line of text into a frame part. The parser struct holds the
information about what part of the Frame is expected next.
You need to call this function with the previous parser state and the next
subtitle line until a Frame is completed. When all the information about the
frame is extracted, the new frame is returned.
"""
# @spec t() :: {:ok, Frame.t()} | {:cont, t()}
def parse(%__MODULE__{state: :frame_index, frame: frame} = parser, line) do
case Regex.scan(~r/^(\d+)$/, line) do
[] ->
continue(parser)
[[_match, value]] ->
parser
|> Map.put(:index, String.to_integer(value))
|> transition()
end
end
def parse(%__MODULE__{state: :frame_time, frame: frame} = parser, line) do
case Regex.scan(
~r/^(\d{2}):(\d{2}):(\d{2}),(\d{3}) --> (\d{2}):(\d{2}):(\d{2}),(\d{3})$/,
line
) do
[] ->
continue(parser)
[
[
_,
begin_hour,
begin_minute,
begin_second,
begin_microsecond,
end_hour,
end_minute,
end_second,
end_microsecond
]
] ->
{:ok, begin_time} =
Time.new(
String.to_integer(begin_hour),
String.to_integer(begin_minute),
String.to_integer(begin_second),
String.to_integer(begin_microsecond)
)
{:ok, end_time} =
Time.new(
String.to_integer(end_hour),
String.to_integer(end_minute),
String.to_integer(end_second),
String.to_integer(end_microsecond)
)
parser
|> put_frame(%{frame | begin_time: begin_time, end_time: end_time})
|> transition()
end
end
def parse(
%__MODULE__{state: :frame_caption, caption_buffer: buffer, frame: frame} = parser,
"\n"
) do
parser
|> put_frame(%{frame | caption: buffer_to_caption(buffer)})
|> transition()
end
def parse(%__MODULE__{state: :frame_caption} = parser, line) do
parser
|> append_buffer(line)
|> continue()
end
defp continue(%__MODULE__{} = parser) do
{:cont, parser}
end
defp transition(%__MODULE__{state: state, frame: frame} = parser) do
index = Enum.find_index(@states, &(&1 == state))
case Enum.at(@states, index + 1) do
:frame_end -> {:ok, frame}
state -> continue(%{parser | state: state})
end
end
defp put_frame(%__MODULE__{} = parser, %Frame{} = frame) do
%{parser | frame: frame}
end
defp append_buffer(%__MODULE__{caption_buffer: buffer} = parser, data) do
%{parser | caption_buffer: [data | buffer]}
end
defp buffer_to_caption(buffer) do
buffer
|> Enum.reverse()
|> to_string()
|> String.trim_trailing()
rescue
_error in UnicodeConversionError ->
# FIXME: If the file was read with an incorrect encoding we might need to
# fallback to latin1
buffer
|> Enum.reverse()
|> Enum.map(fn value ->
:unicode.characters_to_binary(value, :latin1)
end)
|> to_string()
|> String.trim_trailing()
end
end
|
lib/subtitle/sub_rip/parser.ex
| 0.869417
| 0.694782
|
parser.ex
|
starcoder
|
defmodule Platformsh do
alias Platformsh.Get, as: Get
defmodule Config do
@moduledoc """
Reads Platform.sh configuration from environment variables.
See: https://docs.platform.sh/development/variables.html
The following are 'magic' properties that may exist on a Config object. Before accessing a property, check its
existence with hasattr(config, variableName). Attempting to access a nonexistent variable will throw an exception.
Attributes:
(The following properties are available at build time and run time.)
project (string):
The project ID.
application_name (string):
The name of the application, as defined in its configuration.
tree_id (string):
An ID identifying the application tree before it was built: a unique hash is generated based on the contents
of the application's files in the repository.
app_dir (string):
The absolute path to the application.
project_entropy (string):
A random string generated for each project, useful for generating hash keys.
(The following properties are only available at runtime.)
branch (string):
The Git branch name.
environment (string):
The environment ID (usually the Git branch plus a hash).
document_root (string):
The absolute path to the web root of the application.
smtp_host (string):
The hostname of the Platform.sh default SMTP server (an empty string if emails are disabled on the
environment.
port (string):
The TCP port number the application should listen to for incoming requests.
socket (string):
The Unix socket the application should listen to for incoming requests.
. Platform.sh Environment Variables
https://docs.platform.sh/development/variables.html
"""
@doc """
Local index of the variables that can be accessed as direct properties (build and
runtime). The key is the property that will be read. The value is the environment variables, minus prefix,
that contains the value to look up.
"""
def environment() do
env_prefix = 'PLATFORM_'
%{
# Local index of the variables that can be accessed at build-time
project: Get.value(:project, env_prefix),
app_dir: Get.value(:app_dir, env_prefix),
application_name: Get.value(:application_name, env_prefix),
tree_id: Get.value(:tree_id, env_prefix),
project_entropy: Get.value(:project_entropy, env_prefix),
mode: Get.value(:mode, env_prefix),
# Local index of the variables that can be accessed as direct properties
# (runtime only).
branch: Get.value(:branch, env_prefix),
environment: Get.value(:environment, env_prefix),
document_root: Get.value(:document_root, env_prefix),
smtp_host: Get.value(:smtp_host, env_prefix),
# Local index of variables available at runtime that have no prefix.
port: Get.value(:port),
socket: Get.value(:socket),
# Local index of variables available at runtime that need decoding
routes: Get.value(:routes, env_prefix),
relationships: Get.value(:relationships, env_prefix),
application: Get.value(:application, env_prefix),
variables: Get.value(:variables, env_prefix)
}
end
@doc """
Checks whether the code is running on a platform with valid environment variables.
Returns:
bool:
True if configuration can be used, False otherwise.
"""
def is_valid_platform?() do
environment()[:application_name] != nil
end
@doc """
Checks whether the code is running in a build environment.
Returns:
bool: True if running in build environment, False otherwise.
"""
def in_build?() do
is_valid_platform?() and environment()[:environment] == nil
end
@doc """
Checks whether the code is running in a runtime environment.
Returns:
bool: True if in a runtime environment, False otherwise.
"""
def in_runtime?() do
is_valid_platform?() and environment()[:environment]
end
@doc """
Retrieves the credentials for accessing a relationship.
Args:
relationship (string):
The relationship name as defined in .platform.app.yaml
for the moment it returns the first in the index of clustered services
Returns:
The credentials dict for the service pointed to by the relationship.
"""
def credentials(relationship) do
[config | _tail] = environment()[:relationships][relationship]
config
end
@doc """
Retrieves the unfiltered credentials for accessing a relationship.
Returns:
The credentials dict for the service pointed to by the relationship.
"""
def credentials() do
environment()[:relationships]
end
@doc """
variables/1 Returns a variable from the VARIABLES dict.
Note:
Variables prefixed with `env`: can be accessed as normal environment variables. This method will return
such a variable by the name with the prefix still included. Generally it's better to access those variables
directly.
Args:
name (string):
The name of the variable to retrieve.
default (mixed):
The default value to return if the variable is not defined. Defaults to nil.
Returns:
The value of the variable, or nil. This may be a string or a dict.
"""
def variables(name) do
if Map.has_key?(environment()[:variables], name) do
environment()[:variables][name]
else
nil
end
end
@doc """
variables/0 Returns the full variables dict.
If you're looking for a specific variable, the variable() method is a more robust option.
This method is for classes where you want to scan the whole variables list looking for a pattern.
It's valid for there to be no variables defined at all, so there's no guard for missing values.
Returns:
The full variables dict.
"""
def variables() do
environment()[:variables]
end
@doc """
routes/0 Return the routes definition.
Returns:
The routes dict.
Raises:
RuntimeError:
If the routes are not accessible due to being in the wrong environment.
"""
def routes() do
environment()[:routes]
end
@doc """
routes/1 Get route definition by route ID.
Args:
route_id (string):
The ID of the route to load.
Returns:
The route definition. The generated URL of the route is added as a 'url' key.
Raises:
KeyError:
If there is no route by that ID, an exception is thrown.
"""
def routes(route_id) do
environment()[:routes][route_id]
end
@doc """
Returns the application definition dict.
This is, approximately, the .platform.app.yaml file as a nested dict. However, it also has other information
added by Platform.sh as part of the build and deploy process.
Returns:
The application definition dict.
"""
def application() do
environment()[:application]
end
@doc """
Determines if the current environment is a Platform.sh Dedicated Enterprise environment.
Returns:
bool:
True on an Enterprise environment, False otherwise.
"""
def on_dedicated_enterprise?() do
is_valid_platform?() and environment()[:mode] == 'enterprise'
end
@doc """
Determines if the current environment is a production environment.
Note:
There may be a few edge cases where this is not entirely correct on Enterprise, if the production branch is
not named `production`. In that case you'll need to use your own logic.
Returns:
bool:
True if the environment is a production environment, False otherwise. It will also return False if not
running on Platform.sh or in the build phase.
"""
def on_production?() do
prod_branch = if on_dedicated_enterprise?(), do: "production", else: "master"
environment()[:branch] == prod_branch
end
@doc """
Determines if a routes are defined
Returns:
bool:
True if the relationship is defined, False otherwise.
"""
def has_routes?() do
environment()[:routes] != nil
end
@doc """
Determines if a relationships are defined, and thus has credentials available.
Returns:
bool:
True if the relationship is defined, False otherwise.
"""
def has_relationships() do
environment()[:relationships] != nil
end
@doc """
Determines if a relationship is defined, and thus has credentials available.
Args:
relationship (string):
The name of the relationship to check.
Returns:
bool:
True if the relationship is defined, False otherwise.
"""
def has_relationship(relationship) do
Map.has_key?(environment()[:relationships], relationship)
end
@doc """
Returns the just the names of relationships
Returns:
a list with relationship names
"""
def relationships() do
Map.keys(environment()[:relationships])
end
@doc """
Formats a dsn for use with ecto
Returns:
a string in the format of a dsn url for ecto
"""
def ecto_dsn_formatter(config) do
username = config["username"]
password = config["password"]
host = config["host"]
path = config["path"]
"ecto://#{username}:#{password}@#{host}/#{path}"
end
@doc """
Guesses a relational database for ecto
Returns:
a string in the format of a dsn url for ecto or nil if none found,
this is guesss work so we don't want to crash on no value
"""
def guess_relational_database() do
if in_runtime?() do
cred =
Enum.find(Platformsh.Config.credentials(), fn {_rel, cred} ->
[config | _tail] = cred
String.contains?(config["scheme"], ["mysql", "pgsql"])
end)
[[config | _tailer] = _outer_list | _tail] = Tuple.to_list(Tuple.delete_at(cred, 0))
Platformsh.Config.ecto_dsn_formatter(config)
end
end
end
end
defmodule Platformsh do
defmodule Get do
@moduledoc """
Reads Platform.sh helper functions
See: https://docs.platform.sh/development/variables.html
"""
@doc """
Decodes a Platform.sh environment variable.
Args:
variable (string):
Base64-encoded JSON (the content of an environment variable).
Returns:
An dict (if representing a JSON object), or a scalar type.
Raises:
JSON decoding error.
"""
def decode(variable) do
if variable != nil do
Poison.decode!(Base.decode64!(variable))
else
nil
end
end
@doc """
value/1 Reads unprefixed environment variable, taking the prefix into account.
Args:
item (string):
The variable to read.
"""
def value(item) do
directVariablesRuntime = %{
port: "PORT",
socket: "SOCKET"
}
System.get_env(directVariablesRuntime[item])
end
@doc """
value/2 Reads an environment variable, taking the prefix into account.
Args:
item (string):
The variable to read.
prefix (string):
The Environment variable prefix
"""
def value(item, env_prefix) do
# Local index of the variables that can be accessed as direct properties
# (runtime only). The key is the property that will be read. The value is the environment variables, minus
# prefix, that contains the value to look up.
directVariables = %{
project: "PROJECT",
app_dir: "APP_DIR",
application_name: 'APPLICATION_NAME',
tree_id: "TREE_ID",
project_entropy: "PROJECT_ENTROPY"
}
directVariablesRuntime = %{
branch: "BRANCH",
environment: "ENVIRONMENT",
document_root: "DOCUMENT_ROOT",
smtp_host: "SMTP_HOST"
}
inDirectVariablesRuntime = %{
routes: "ROUTES",
relationships: "RELATIONSHIPS",
application: "APPLICATION",
variables: "VARIABLES"
}
cond do
Map.has_key?(directVariables, item) ->
System.get_env("#{env_prefix}#{directVariables[item]}")
Map.has_key?(directVariablesRuntime, item) ->
System.get_env("#{env_prefix}#{directVariablesRuntime[item]}")
Map.has_key?(inDirectVariablesRuntime, item) ->
Platformsh.Get.decode(System.get_env("#{env_prefix}#{inDirectVariablesRuntime[item]}"))
True ->
nil
end
end
end
end
@moduledoc """
Magically exports a guessed DATABASE url for ecto
"""
defmodule PlatformshConfigMagic do
@on_load :magic
def magic() do
database_url = Platformsh.Config.guess_relational_database()
if System.get_env("DATABASE_URL") == nil && database_url do
System.put_env("DATABASE_URL", database_url)
end
:ok
end
end
|
lib/platformshconfig.ex
| 0.897535
| 0.646376
|
platformshconfig.ex
|
starcoder
|
defmodule RDF.IRI do
@moduledoc """
A structure for IRIs.
This structure just wraps a plain IRI string and doesn't bother with the
components of the IRI, since in the context of RDF there are usually very many
IRIs and parsing them isn't needed in most cases. For these reasons we don't
use Elixirs built-in `URI` structure, because it would be unnecessary
expensive in terms of performance and memory.
The component parts can always be retrieved with the `RDF.IRI.parse/1`
function, which returns Elixirs built-in `URI` structure. Note, that `URI`
doesn't escape Unicode characters by default, so it's a suitable structure for
IRIs.
see <https://tools.ietf.org/html/rfc3987>
"""
alias RDF.Namespace
import RDF.Guards
@type t :: %__MODULE__{value: String.t()}
@type coercible :: String.t() | URI.t() | module | t
@enforce_keys [:value]
defstruct [:value]
# see https://tools.ietf.org/html/rfc3986#appendix-B
@scheme_regex Regex.recompile!(~r/^([a-z][a-z0-9\+\-\.]*):/i)
@doc """
The default base IRI to be used when reading a serialization and no `base_iri` option is provided.
The value can be set via the `default_base_iri` configuration. For example:
config :rdf,
default_base_iri: "http://my_app.example/"
You can also set `:default_base_iri` to a module-function tuple `{mod, fun}`
with a function which should be called to determine the default base IRI.
See [section 5.1.4 of RFC 3987](https://tools.ietf.org/html/rfc3986#page-29)
"""
case Application.get_env(:rdf, :default_base_iri) do
{mod, fun} ->
def default_base(), do: apply(unquote(mod), unquote(fun), [])
default_base ->
@default_base default_base
def default_base, do: @default_base
end
@doc """
Creates a `RDF.IRI`.
"""
@spec new(coercible) :: t
def new(iri)
def new(iri) when is_binary(iri), do: %__MODULE__{value: iri}
def new(term) when maybe_ns_term(term), do: Namespace.resolve_term!(term)
def new(%URI{} = uri), do: uri |> URI.to_string() |> new
def new(%__MODULE__{} = iri), do: iri
@doc """
Creates a `RDF.IRI`, but checks if the given IRI is valid.
If the given IRI is not valid a `RDF.IRI.InvalidError` is raised.
see `valid?/1`
"""
@spec new!(coercible) :: t
def new!(iri)
def new!(iri) when is_binary(iri), do: iri |> valid!() |> new()
# since terms of a namespace are already validated
def new!(term) when maybe_ns_term(term), do: new(term)
def new!(%URI{} = uri), do: uri |> valid!() |> new()
def new!(%__MODULE__{} = iri), do: valid!(iri)
@doc """
Appends a String to a `RDF.IRI`.
## Example
iex> ~I<http://example.com/> |> RDF.IRI.append("foo")
~I<http://example.com/foo>
iex> EX.foo |> RDF.IRI.append("bar")
EX.foobar
iex> EX.Foo |> RDF.IRI.append("bar")
RDF.iri(EX.Foobar)
"""
@spec append(t | module, String.t()) :: t
def append(iri, string)
def append(%__MODULE__{} = iri, string) do
%__MODULE__{iri | value: iri.value <> string}
end
def append(term, string) when maybe_ns_term(term) do
term |> Namespace.resolve_term!() |> append(string)
end
@doc """
Coerces an IRI serving as a base IRI.
As opposed to `new/1` this also accepts bare `RDF.Vocabulary.Namespace` modules
and uses the base IRI from their definition.
"""
@spec coerce_base(coercible) :: t
def coerce_base(base_iri)
def coerce_base(module) when maybe_ns_term(module) do
if RDF.Vocabulary.Namespace.vocabulary_namespace?(module) do
apply(module, :__base_iri__, [])
|> new()
else
new(module)
end
end
def coerce_base(base_iri), do: new(base_iri)
@doc """
Returns the given value unchanged if it's a valid IRI, otherwise raises an exception.
## Examples
iex> RDF.IRI.valid!("http://www.example.com/foo")
"http://www.example.com/foo"
iex> RDF.IRI.valid!(RDF.IRI.new("http://www.example.com/foo"))
RDF.IRI.new("http://www.example.com/foo")
iex> RDF.IRI.valid!("not an iri")
** (RDF.IRI.InvalidError) Invalid IRI: "not an iri"
"""
@spec valid!(coercible) :: coercible
def valid!(iri) do
if not valid?(iri), do: raise(RDF.IRI.InvalidError, "Invalid IRI: #{inspect(iri)}")
iri
end
@doc """
Checks if the given IRI is valid.
Note: This currently checks only if the given IRI is absolute.
## Examples
iex> RDF.IRI.valid?("http://www.example.com/foo")
true
iex> RDF.IRI.valid?("not an iri")
false
"""
@spec valid?(coercible) :: boolean
# TODO: Provide a more elaborate validation
def valid?(iri), do: absolute?(iri)
@doc """
Checks if the given value is an absolute IRI.
An absolute IRI is defined in [RFC3987](http://www.ietf.org/rfc/rfc3987.txt)
containing a scheme along with a path and optional query and fragment segments.
"""
@spec absolute?(any) :: boolean
def absolute?(iri)
def absolute?(value) when is_binary(value), do: not is_nil(scheme(value))
def absolute?(%__MODULE__{value: value}), do: absolute?(value)
def absolute?(%URI{scheme: nil}), do: false
def absolute?(%URI{scheme: _}), do: true
def absolute?(term) when maybe_ns_term(term) do
case Namespace.resolve_term(term) do
{:ok, iri} -> absolute?(iri)
_ -> false
end
end
def absolute?(_), do: false
@doc """
Resolves a relative IRI against a base IRI.
as specified in [section 5.1 Establishing a Base URI of RFC3986](http://tools.ietf.org/html/rfc3986#section-5.1).
Only the basic algorithm in [section 5.2 of RFC3986](http://tools.ietf.org/html/rfc3986#section-5.2)
is used; neither Syntax-Based Normalization nor Scheme-Based Normalization are performed.
Characters additionally allowed in IRI references are treated in the same way that unreserved
characters are treated in URI references, per [section 6.5 of RFC3987](http://tools.ietf.org/html/rfc3987#section-6.5)
If the given `base` is not an absolute IRI `nil` is returned.
"""
@spec absolute(coercible, coercible) :: t | nil
def absolute(iri, base) do
cond do
absolute?(iri) -> new(iri)
not absolute?(base) -> nil
true -> merge(base, iri)
end
end
@doc """
Merges two IRIs.
This function merges two IRIs as per
[RFC 3986, section 5.2](https://tools.ietf.org/html/rfc3986#section-5.2).
"""
@spec merge(coercible, coercible) :: t
def merge(base, rel) do
base
|> parse()
|> URI.merge(parse(rel))
|> new()
end
@doc """
Returns the scheme of the given IRI
If the given string is not a valid absolute IRI, `nil` is returned.
## Examples
iex> RDF.IRI.scheme("http://www.example.com/foo")
"http"
iex> RDF.IRI.scheme("not an iri")
nil
"""
@spec scheme(coercible) :: String.t() | nil
def scheme(iri)
def scheme(%__MODULE__{value: value}), do: scheme(value)
def scheme(%URI{scheme: scheme}), do: scheme
def scheme(term) when maybe_ns_term(term), do: Namespace.resolve_term!(term) |> scheme()
def scheme(iri) when is_binary(iri) do
with [_, scheme] <- Regex.run(@scheme_regex, iri) do
scheme
end
end
@doc """
Parses an IRI into its components and returns them as an `URI` struct.
"""
@spec parse(coercible) :: URI.t()
def parse(iri)
def parse(iri) when is_binary(iri), do: URI.parse(iri)
def parse(term) when maybe_ns_term(term), do: Namespace.resolve_term!(term) |> parse()
def parse(%__MODULE__{value: value}), do: URI.parse(value)
def parse(%URI{} = uri), do: uri
@doc """
Checks whether `iri` lies in `namespace`.
## Examples
iex> RDF.IRI.in_namespace?(~I<http://example.com/foo>, ~I<http://example.com/>)
true
iex> RDF.IRI.in_namespace?(~I<http://example.com/foo/bar>, "http://example.com/")
true
iex> RDF.IRI.in_namespace?(~I<http://example.com/#foo>, EX)
true
"""
@spec in_namespace?(t | module, String.t() | t | module) :: boolean
def in_namespace?(iri, namespace)
def in_namespace?(%__MODULE__{value: value}, namespace) when is_binary(namespace),
do: String.starts_with?(value, namespace)
def in_namespace?(term, namespace) when maybe_ns_term(term),
do: term |> Namespace.resolve_term!() |> in_namespace?(namespace)
def in_namespace?(iri, namespace) when maybe_ns_term(namespace),
do: in_namespace?(iri, coerce_base(namespace))
def in_namespace?(iri, %__MODULE__{} = namespace),
do: in_namespace?(iri, __MODULE__.to_string(namespace))
# def in_namespace?(_, _), do: false
@doc """
Tests for value equality of IRIs.
Returns `nil` when the given arguments are not comparable as IRIs.
see <https://www.w3.org/TR/rdf-concepts/#section-Graph-URIref>
"""
@spec equal_value?(t | RDF.Literal.t() | atom, t | RDF.Literal.t() | URI.t() | atom) ::
boolean | nil
def equal_value?(left, right)
def equal_value?(%__MODULE__{value: left}, %__MODULE__{value: right}),
do: left == right
def equal_value?(%__MODULE__{} = left, %RDF.Literal{} = right),
do: RDF.Literal.equal_value?(right, left)
def equal_value?(%__MODULE__{value: left}, %URI{} = right),
do: left == URI.to_string(right)
def equal_value?(left, %__MODULE__{} = right) when maybe_ns_term(left),
do: equal_value?(right, left)
def equal_value?(%__MODULE__{} = left, right) when maybe_ns_term(right) do
case Namespace.resolve_term(right) do
{:ok, iri} -> equal_value?(left, iri)
_ -> nil
end
end
def equal_value?(_, _),
do: nil
@doc """
Returns the given IRI as a string.
Note that this function can also handle `RDF.Vocabulary.Namespace` terms.
## Examples
iex> RDF.IRI.to_string RDF.IRI.new("http://example.com/#foo")
"http://example.com/#foo"
iex> RDF.IRI.to_string EX.foo
"http://example.com/#foo"
iex> RDF.IRI.to_string EX.Foo
"http://example.com/#Foo"
"""
@spec to_string(t | module) :: String.t()
def to_string(iri)
def to_string(%__MODULE__{value: value}),
do: value
def to_string(term) when maybe_ns_term(term),
do: term |> new() |> __MODULE__.to_string()
defimpl String.Chars do
def to_string(iri), do: RDF.IRI.to_string(iri)
end
end
|
lib/rdf/iri.ex
| 0.831212
| 0.608332
|
iri.ex
|
starcoder
|
defmodule FutureMadeConcerts.Duration do
@moduledoc """
Provides functions to convert millisecond durations to different formats.
"""
@minute :timer.minutes(1)
@hour :timer.hours(1)
import FutureMadeConcerts.Gettext
@type milliseconds :: pos_integer()
@doc """
Given a duration in milliseconds, returns a string with the duration formatted
as hours, minutes and seconds, omitting units where appropriate.
iex> milliseconds = :timer.seconds(5)
iex> FutureMadeConcerts.Duration.hms(milliseconds)
"0:05"
iex> milliseconds = :timer.seconds(61)
iex> FutureMadeConcerts.Duration.hms(milliseconds)
"1:01"
iex> milliseconds = :timer.hours(2)
iex> FutureMadeConcerts.Duration.hms(milliseconds)
"2:00:00"
"""
def hms(milliseconds) do
milliseconds
|> System.convert_time_unit(:millisecond, :second)
|> format_seconds()
end
@doc """
Given a duration in milliseconds, returns a localized, human-readable
representation of that duration.
iex> FutureMadeConcerts.Duration.human(100)
"Less than a minute"
Durations are rounded to the minute:
iex> milliseconds = :timer.seconds(61)
iex> FutureMadeConcerts.Duration.human(milliseconds)
"1 minute"
iex> milliseconds = :timer.seconds(95)
iex> FutureMadeConcerts.Duration.human(milliseconds)
"2 minutes"
"""
@spec human(milliseconds()) :: String.t()
def human(milliseconds) when milliseconds < @minute do
gettext("Less than a minute")
end
def human(milliseconds) when milliseconds < @hour do
total_seconds = milliseconds_to_rounded_seconds(milliseconds)
total_minutes = seconds_to_rounded_minutes(total_seconds)
ngettext(
"1 minute",
"%{count} minutes",
total_minutes
)
end
def human(milliseconds) when milliseconds >= @hour do
total_seconds = milliseconds_to_rounded_seconds(milliseconds)
total_minutes = seconds_to_rounded_minutes(total_seconds)
total_hours = div(total_minutes, 60)
remaining_minutes = rem(total_minutes, 60)
if remaining_minutes > 0 do
hours_fragment = ngettext("1 hour", "%{count} hours", total_hours)
minutes_fragment = ngettext("1 minute", "%{count} minutes", remaining_minutes)
gettext("%{hours} and %{minutes}", %{hours: hours_fragment, minutes: minutes_fragment})
else
ngettext("1 hour", "%{count} hours", total_hours)
end
end
defp milliseconds_to_rounded_seconds(milliseconds) do
total_seconds = System.convert_time_unit(milliseconds, :millisecond, :second)
remaining_milliseconds = rem(milliseconds, 1000)
if remaining_milliseconds > 500 do
total_seconds + 1
else
total_seconds
end
end
defp seconds_to_rounded_minutes(seconds) do
total_minutes = div(seconds, 60)
remaining_seconds = rem(seconds, 60)
if remaining_seconds > 30 do
total_minutes + 1
else
total_minutes
end
end
defp format_seconds(seconds) when seconds <= 59 do
"0:#{zero_pad(seconds)}"
end
defp format_seconds(seconds) do
minutes = div(seconds, 60)
remaining_seconds = rem(seconds, 60)
format_minutes(minutes, remaining_seconds)
end
defp format_minutes(minutes, seconds) when minutes <= 59 do
"#{minutes}:#{zero_pad(seconds)}"
end
defp format_minutes(minutes, seconds) do
hours = div(minutes, 60)
remaining_minutes = rem(minutes, 60)
format_hours(hours, remaining_minutes, seconds)
end
defp format_hours(hours, minutes, seconds) do
"#{hours}:#{zero_pad(minutes)}:#{zero_pad(seconds)}"
end
defp zero_pad(integer) do
integer
|> to_string()
|> String.pad_leading(2, "0")
end
end
|
lib/future_made_concerts/duration.ex
| 0.779867
| 0.459743
|
duration.ex
|
starcoder
|
defmodule ExCell.Base do
@moduledoc false
defmacro __using__(opts \\ []) do
quote do
import ExCell.View
@adapter unquote(opts[:adapter])
@namespace unquote(opts[:namespace])
@doc """
Returns the name of the module as a string. Module namespaces are replaced
by a dash.
## Example
iex(0)> AvatarCell.name()
"AvatarCell"
iex(1)> User.AvatarCell.name()
"User-AvatarCell"
"""
def __adapter__, do: @adapter
def name, do: ExCell.relative_name(__MODULE__, @namespace)
@doc """
Generates the CSS class name based on the cell name. Can be overriden
to pre- or postfix the class name or to create a distinct class name with
CSS modules.
## Examples
iex(0)> AvatarCell.class_name()
"AvatarCell"
"""
def class_name, do: name()
@doc """
Generates the HTML attribute name based on the cell name. Can be overriden
to pre- or postfix the attribute name.
## Examples
iex(0)> AvatarCell.cell_name()
"AvatarCell"
"""
def cell_name, do: name()
@doc false
def params, do: %{}
@doc """
Combines the parameters set on the cell with custom parameters for a
specific instance
## Examples
iex(0)> AvatarCell.params
%{hello: "world"}
iex(0)> AvatarCell.params(%{foo: "bar"})
%{hello: "world", foo: "bar"}
"""
def params(values), do: Map.merge(params(), values)
@doc """
Returns the container of a cell as a Phoenix.Tag.
iex(0)> Phoenix.HTML.safe_to_string(AvatarCell.container)
"<div class=\\"AvatarCell\\" data-cell=\\"AvatarCell\\" data-cell-params=\\"{}\\">"
"""
def container, do: container(%{}, [], do: nil)
@doc """
Returns the container of a cell as a Phoenix.Tag with it's content.
iex(0)> Phoenix.HTML.safe_to_string(AvatarCell.container(do: "Hello"))
"<div class=\\"AvatarCell\\" data-cell=\\"AvatarCell\\" data-cell-params=\\"{}\\">Hello</div>"
"""
def container(do: content), do: container(%{}, [], do: content)
def container(callback) when is_function(callback), do: container(%{}, [], callback)
@doc """
Returns the container of a cell as a Phoenix.Tag with options.
## Options
Adds attributes to the HTML tag of the cell, the following options can be
used to extend certain funtionality of the cell:
- `:class` - adds a custom class name to the cell class
- `:tag` - sets the tagname of the cell, defaults to `:div`
- `:data` - adds data attributes to the default `data-cell` and `data-cell-params` data attributes
## Examples
iex(0)> Phoenix.HTML.safe_to_string(AvatarCell.container(tag: :a, data: [foo: "bar"], class: "Moo", href: "/"))
"<a class=\\"AvatarCell Moo\\" data-foo="bar" data-cell=\\"AvatarCell\\" data-cell-params=\\"{}\\">"
"""
def container(options) when is_list(options), do: container(%{}, options, do: nil)
def container(options, content) when is_list(options), do: container(%{}, options, content)
@doc """
Returns the container of a cell as a Phoenix.Tag with attributes added to
the data-cell-params attribute. This is used to add parameters to `cell-js` cells.
## Examples
iex(0)> Phoenix.HTML.safe_to_string(AvatarCell.container(%{ foo: "bar" }))
"<a class=\\"AvatarCell\\" data-cell=\\"AvatarCell\\" data-cell-params=\\"{"foo":"bar"}">"
"""
def container(%{} = params), do: container(params, [], do: nil)
def container(%{} = params, do: content), do: container(params, [], do: content)
def container(%{} = params, callback) when is_function(callback),
do: container(params, [], callback)
def container(%{} = params, options) when is_list(options),
do: container(params, options, do: nil)
def container(%{} = params, options, content),
do: ExCell.container(__MODULE__, UUID.uuid4(), params, options, content)
defoverridable class_name: 0, cell_name: 0, params: 0
end
end
end
|
lib/ex_cell/base.ex
| 0.85817
| 0.41185
|
base.ex
|
starcoder
|
defmodule Annex.Layer.Dropout do
@moduledoc """
Given a `frequency` the dropout layer randomly drops an input at the `frequency`.
"""
use Annex.Debug, debug: true
alias Annex.{
Data,
Layer,
Layer.Backprop,
Layer.Dropout,
Utils
}
use Layer
@type t :: %__MODULE__{
frequency: float()
}
@type data :: Data.data()
defstruct [:frequency]
defguard is_frequency(x) when is_float(x) and x >= 0.0 and x <= 1.0
@impl Layer
@spec init_layer(LayerConfig.t(Dropout)) :: t()
def init_layer(%LayerConfig{} = cfg) do
cfg
|> LayerConfig.details()
|> Map.fetch(:frequency)
|> case do
{:ok, frequency} when is_frequency(frequency) ->
%Dropout{frequency: frequency}
{:ok, not_frequency} ->
raise %AnnexError{
message: "Dropout.build/1 requires a :frequency that is a float between 0.0 and 1.0",
details: [
invalid_frequency: not_frequency,
reason: :invalid_frequency_value
]
}
:error ->
raise %AnnexError{
message: "Dropout.build/1 requires a :frequency that is a float between 0.0 and 1.0",
details: [
reason: {:key_not_found, :frequency}
]
}
end
end
@spec frequency(t()) :: float()
def frequency(%Dropout{frequency: f}), do: f
@impl Layer
@spec feedforward(t(), data()) :: {t(), data()}
def feedforward(%Dropout{} = layer, inputs) do
{layer, drop(inputs, frequency(layer))}
end
@impl Layer
@spec backprop(t(), data(), Backprop.t()) :: {t(), data(), Backprop.t()}
def backprop(%Dropout{} = dropout, error, backprop), do: {dropout, error, backprop}
defp drop(inputs, frequency) do
data_type = Data.infer_type(inputs)
dropper = fn value -> zeroize_by_frequency(frequency, value) end
Data.apply_op(data_type, inputs, :map, [dropper])
end
defp zeroize_by_frequency(frequency, value) do
if Utils.random_float() <= frequency, do: 0.0, else: value
end
end
|
lib/annex/layer/dropout.ex
| 0.925281
| 0.599749
|
dropout.ex
|
starcoder
|
defmodule AWS.Transcoder do
@moduledoc """
AWS Elastic Transcoder Service
The AWS Elastic Transcoder Service.
"""
@doc """
The CancelJob operation cancels an unfinished job.
<note> You can only cancel a job that has a status of `Submitted`. To
prevent a pipeline from starting to process a job while you're getting the
job identifier, use `UpdatePipelineStatus` to temporarily pause the
pipeline.
</note>
"""
def cancel_job(client, id, input, options \\ []) do
url = "/2012-09-25/jobs/#{URI.encode(id)}"
headers = []
request(client, :delete, url, headers, input, options, 202)
end
@doc """
When you create a job, Elastic Transcoder returns JSON data that includes
the values that you specified plus information about the job that is
created.
If you have specified more than one output for your jobs (for example, one
output for the Kindle Fire and another output for the Apple iPhone 4s), you
currently must use the Elastic Transcoder API to list the jobs (as opposed
to the AWS Console).
"""
def create_job(client, input, options \\ []) do
url = "/2012-09-25/jobs"
headers = []
request(client, :post, url, headers, input, options, 201)
end
@doc """
The CreatePipeline operation creates a pipeline with settings that you
specify.
"""
def create_pipeline(client, input, options \\ []) do
url = "/2012-09-25/pipelines"
headers = []
request(client, :post, url, headers, input, options, 201)
end
@doc """
The CreatePreset operation creates a preset with settings that you specify.
<important> Elastic Transcoder checks the CreatePreset settings to ensure
that they meet Elastic Transcoder requirements and to determine whether
they comply with H.264 standards. If your settings are not valid for
Elastic Transcoder, Elastic Transcoder returns an HTTP 400 response
(`ValidationException`) and does not create the preset. If the settings are
valid for Elastic Transcoder but aren't strictly compliant with the H.264
standard, Elastic Transcoder creates the preset and returns a warning
message in the response. This helps you determine whether your settings
comply with the H.264 standard while giving you greater flexibility with
respect to the video that Elastic Transcoder produces.
</important> Elastic Transcoder uses the H.264 video-compression format.
For more information, see the International Telecommunication Union
publication *Recommendation ITU-T H.264: Advanced video coding for generic
audiovisual services*.
"""
def create_preset(client, input, options \\ []) do
url = "/2012-09-25/presets"
headers = []
request(client, :post, url, headers, input, options, 201)
end
@doc """
The DeletePipeline operation removes a pipeline.
You can only delete a pipeline that has never been used or that is not
currently in use (doesn't contain any active jobs). If the pipeline is
currently in use, `DeletePipeline` returns an error.
"""
def delete_pipeline(client, id, input, options \\ []) do
url = "/2012-09-25/pipelines/#{URI.encode(id)}"
headers = []
request(client, :delete, url, headers, input, options, 202)
end
@doc """
The DeletePreset operation removes a preset that you've added in an AWS
region.
<note> You can't delete the default presets that are included with Elastic
Transcoder.
</note>
"""
def delete_preset(client, id, input, options \\ []) do
url = "/2012-09-25/presets/#{URI.encode(id)}"
headers = []
request(client, :delete, url, headers, input, options, 202)
end
@doc """
The ListJobsByPipeline operation gets a list of the jobs currently in a
pipeline.
Elastic Transcoder returns all of the jobs currently in the specified
pipeline. The response body contains one element for each job that
satisfies the search criteria.
"""
def list_jobs_by_pipeline(client, pipeline_id, options \\ []) do
url = "/2012-09-25/jobsByPipeline/#{URI.encode(pipeline_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
The ListJobsByStatus operation gets a list of jobs that have a specified
status. The response body contains one element for each job that satisfies
the search criteria.
"""
def list_jobs_by_status(client, status, options \\ []) do
url = "/2012-09-25/jobsByStatus/#{URI.encode(status)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
The ListPipelines operation gets a list of the pipelines associated with
the current AWS account.
"""
def list_pipelines(client, options \\ []) do
url = "/2012-09-25/pipelines"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
The ListPresets operation gets a list of the default presets included with
Elastic Transcoder and the presets that you've added in an AWS region.
"""
def list_presets(client, options \\ []) do
url = "/2012-09-25/presets"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
The ReadJob operation returns detailed information about a job.
"""
def read_job(client, id, options \\ []) do
url = "/2012-09-25/jobs/#{URI.encode(id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
The ReadPipeline operation gets detailed information about a pipeline.
"""
def read_pipeline(client, id, options \\ []) do
url = "/2012-09-25/pipelines/#{URI.encode(id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
The ReadPreset operation gets detailed information about a preset.
"""
def read_preset(client, id, options \\ []) do
url = "/2012-09-25/presets/#{URI.encode(id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
The TestRole operation tests the IAM role used to create the pipeline.
The `TestRole` action lets you determine whether the IAM role you are using
has sufficient permissions to let Elastic Transcoder perform tasks
associated with the transcoding process. The action attempts to assume the
specified IAM role, checks read access to the input and output buckets, and
tries to send a test notification to Amazon SNS topics that you specify.
"""
def test_role(client, input, options \\ []) do
url = "/2012-09-25/roleTests"
headers = []
request(client, :post, url, headers, input, options, 200)
end
@doc """
Use the `UpdatePipeline` operation to update settings for a pipeline.
<important> When you change pipeline settings, your changes take effect
immediately. Jobs that you have already submitted and that Elastic
Transcoder has not started to process are affected in addition to jobs that
you submit after you change settings.
</important>
"""
def update_pipeline(client, id, input, options \\ []) do
url = "/2012-09-25/pipelines/#{URI.encode(id)}"
headers = []
request(client, :put, url, headers, input, options, 200)
end
@doc """
With the UpdatePipelineNotifications operation, you can update Amazon
Simple Notification Service (Amazon SNS) notifications for a pipeline.
When you update notifications for a pipeline, Elastic Transcoder returns
the values that you specified in the request.
"""
def update_pipeline_notifications(client, id, input, options \\ []) do
url = "/2012-09-25/pipelines/#{URI.encode(id)}/notifications"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
The UpdatePipelineStatus operation pauses or reactivates a pipeline, so
that the pipeline stops or restarts the processing of jobs.
Changing the pipeline status is useful if you want to cancel one or more
jobs. You can't cancel jobs after Elastic Transcoder has started processing
them; if you pause the pipeline to which you submitted the jobs, you have
more time to get the job IDs for the jobs that you want to cancel, and to
send a `CancelJob` request.
"""
def update_pipeline_status(client, id, input, options \\ []) do
url = "/2012-09-25/pipelines/#{URI.encode(id)}/status"
headers = []
request(client, :post, url, headers, input, options, nil)
end
defp request(client, method, url, headers, input, options, success_status_code) do
client = %{client | service: "elastictranscoder"}
host = get_host("elastictranscoder", client)
url = get_url(host, url, client)
headers = Enum.concat([{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"}],
headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, response=%HTTPoison.Response{status_code: 202, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, response=%HTTPoison.Response{status_code: 204, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
reason = Poison.Parser.parse!(body)["message"]
{:error, reason}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: ^success_status_code, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: ^success_status_code, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
reason = Poison.Parser.parse!(body)["message"]
{:error, reason}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, url, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{url}/"
end
defp encode_payload(input) do
if input != nil do
Poison.Encoder.encode(input, [])
else
""
end
end
end
|
lib/aws/transcoder.ex
| 0.843815
| 0.416025
|
transcoder.ex
|
starcoder
|
defmodule Kazan.Codegen.Models do
@moduledoc false
# Macros for generating client code from OAI specs.
require EEx
alias Kazan.Codegen.Models.ModelDesc
@doc """
Generates structs for all the data definitions in an OAPI spec.
This will read the provided file at compile time and use it to generate a
struct for each data definition. The structs will be named as they are in the
provided file.
It will also generate some model description data that can be used to write
serializers/deserializers for each of the structs.
"""
defmacro from_spec(spec_file) do
models = parse_models(spec_file)
resource_id_index = build_resource_id_index(models)
spec_forms =
for {module_name, desc} <- models do
property_names = Map.keys(desc.properties)
documentation = model_docs(desc.id, desc.description, desc.properties)
quote do
defmodule unquote(module_name) do
@moduledoc unquote(documentation)
defstruct unquote(property_names)
end
end
end
quote do
Module.put_attribute(__MODULE__, :external_resource, unquote(spec_file))
unquote_splicing(spec_forms)
# Function returns a map of module name -> ModelDesc
defp model_descs do
unquote(Macro.escape(models))
end
# Returns a map of ResourceId to module
defp resource_id_index do
unquote(Macro.escape(resource_id_index))
end
end
end
@doc """
Builds a module name atom from an OAI model name.
"""
@spec module_name(String.t(), Keyword.t()) :: atom | nil
def module_name(model_name, opts \\ []) do
components = module_name_components(model_name)
if Keyword.get(opts, :unsafe, false) do
Module.concat(components)
else
try do
Module.safe_concat(components)
rescue
ArgumentError ->
nil
end
end
end
@doc """
Parses a $ref for a definition into a models module name.
"""
@spec definition_ref_to_module_name(String.t()) :: nil | :atom
def definition_ref_to_module_name(nil), do: nil
def definition_ref_to_module_name("#/definitions/" <> model_def) do
module_name(model_def)
end
@spec parse_models(String.t()) :: [ModelDesc.t()]
defp parse_models(spec_file) do
definitions =
spec_file
|> File.read!()
|> Poison.decode!()
|> Map.get("definitions")
# First we need to go over all of the definitions and call module_name
# on their names w/ unsafe. This ensures that the atoms for each models module name are defined, and lets us use the safe module_name call everywhere else...
Enum.each(definitions, fn {name, _} -> module_name(name, unsafe: true) end)
# Most of the top-level definitions in the kube spec are models.
# However, there are a few that are used in $ref statements to define common
# property types instead.
# We can tell these apart by whether or not they have "properties" or not.
is_model = fn {_, model} -> Map.has_key?(model, "properties") end
refs =
definitions
|> Enum.reject(is_model)
|> Enum.map(fn {name, data} -> {module_name(name), data} end)
|> Enum.into(%{})
definitions
|> Enum.filter(is_model)
|> Enum.map(&ModelDesc.from_oai_desc(&1, refs))
|> Enum.map(fn desc -> {desc.module_name, desc} end)
|> Enum.into(%{})
end
EEx.function_from_string(
:defp,
:model_docs,
"""
<%= model_description %>
OpenAPI Definition: `<%= id %>`
### Properties
<%= for {name, property} <- properties do %>
* `<%= name %>` <%= if doc = property_type_doc(property) do %>:: <%= doc %> <% end %>
* <%= process_description(property.description) %> <% end %>
""",
[:id, :model_description, :properties]
)
# Creates a property type doc string.
@spec property_type_doc(Property.t()) :: String.t() | nil
defp property_type_doc(property) do
if property.ref do
"`#{doc_ref(property.ref)}`"
else
case property.type do
"array" ->
"[ #{property_type_doc(property.items)} ]"
"integer" ->
"`Integer`"
"number" ->
"`Float`"
"object" ->
"`Map`"
"string" ->
case property.format do
"date" -> "`Date`"
"date-time" -> "`DateTime`"
_ -> "`String`"
end
"boolean" ->
"`Boolean`"
end
end
end
# Pre-processes a fields description.
# Useful for making actual links out of HTTP links etc.
@spec process_description(String.t()) :: String.t()
defp process_description(nil), do: ""
defp process_description(desc) do
String.replace(
desc,
~r{ (more info): (https?://.*)(\s|$)}i,
" [\\1](\\2)."
)
end
# Strips the `Elixir.` prefix from an atom for use in documentation.
# Atoms will not be linked if they include the Elixir. prefix.
defp doc_ref(str) do
str |> Atom.to_string() |> String.replace(~r/^Elixir./, "")
end
# The Kube OAI specs have some extremely long namespace prefixes on them.
# These really long names make for a pretty ugly API in Elixir, so we chop off
# some common prefixes.
# We also need to categorise things into API specific models or models that
# live in the models module.
@spec module_name_components(String.t()) ::
nonempty_improper_list(atom, String.t())
defp module_name_components(name) do
to_components = fn str ->
str |> String.split(".") |> Enum.map(&titlecase_once/1)
end
case name do
# Deprecated
"io.k8s.kubernetes.pkg.api." <> rest ->
[Kazan.Apis] ++ to_components.(rest)
# Deprecated
"io.k8s.kubernetes.pkg.apis." <> rest ->
[Kazan.Apis] ++ to_components.(rest)
"io.k8s.api." <> rest ->
[Kazan.Apis] ++ to_components.(rest)
"io.k8s.apimachinery.pkg.apis." <> rest ->
[Kazan.Models.Apimachinery] ++ to_components.(rest)
"io.k8s.apimachinery.pkg." <> rest ->
[Kazan.Models.Apimachinery] ++ to_components.(rest)
"io.k8s.kube-aggregator.pkg.apis." <> rest ->
[Kazan.Models.KubeAggregator] ++ to_components.(rest)
"io.k8s.apiextensions-apiserver.pkg.apis." <> rest ->
[Kazan.Models.ApiextensionsApiserver] ++ to_components.(rest)
end
end
@spec build_resource_id_index(%{atom => ModelDesc.t()}) :: %{
ResourceId => atom
}
defp build_resource_id_index(model_map) do
model_map
|> Map.values()
|> Enum.flat_map(fn model_desc ->
Enum.map(model_desc.resource_ids, fn resource_id ->
{resource_id, model_desc.module_name}
end)
end)
|> Enum.into(%{})
end
# Uppercases the first character of str
# This is different from capitalize, in that it leaves the rest of the string
# alone.
defp titlecase_once(str) do
first_letter = String.first(str)
String.replace_prefix(str, first_letter, String.upcase(first_letter))
end
end
|
lib/kazan/codegen/models.ex
| 0.762026
| 0.442697
|
models.ex
|
starcoder
|
defmodule IslandsEngine.Island do
@moduledoc """
An island is a set of coordinates that can be placed in a board
"""
@doc """
A struct that wraps all the data of an island: its coordinates and its hit_coordinates
"""
@enforce_keys [:coordinates, :hit_coordinates]
defstruct [:coordinates, :hit_coordinates]
alias IslandsEngine.{Coordinate}
@typedoc """
A struct that wraps the coordinates of an island and all coordinates that were hit by a player
"""
@type t :: %__MODULE__{
coordinates: MapSet.t(Coordinate.t()),
hit_coordinates: MapSet.t(Coordinate.t())
}
@doc """
Returns all available types of islands
"""
@spec types :: [:atoll | :dot | :l_shape | :s_shape | :square]
def types, do: [:atoll, :dot, :l_shape, :s_shape, :square]
@doc """
Checks if two islands overlap any of their coordinates - they collide.
"""
@spec overlaps?(t(), t()) :: boolean
def overlaps?(%__MODULE__{} = island_a, %__MODULE__{} = island_b) do
not MapSet.disjoint?(island_a.coordinates, island_b.coordinates)
end
@doc """
Checks wether an island has all of its tiles forested (hit)
"""
@spec forested?(t()) :: boolean()
def forested?(%__MODULE__{} = island) do
MapSet.equal?(island.coordinates, island.hit_coordinates)
end
@doc """
Guesses if a coordinate is inside an island
"""
@spec guess(t(), Coordinate.t()) :: {:hit, t()} | :miss
def guess(%__MODULE__{} = island, %Coordinate{} = coordinate) do
case MapSet.member?(island.coordinates, coordinate) do
true ->
hit_coordinates = MapSet.put(island.hit_coordinates, coordinate)
{:hit, %{island | hit_coordinates: hit_coordinates}}
false ->
:miss
end
end
@doc """
Creates a new island, with the given shape starting from the coordinate `upper_left`
"""
@spec new(atom(), Coordinate.t()) ::
{:ok, t()} | {:error, :invalid_coordinate | :invalid_island_shape}
def new(type, %Coordinate{} = upper_left) do
with {:ok, offsets} <- offsets(type),
%MapSet{} = coordinates <- add_coordinates(offsets, upper_left) do
{:ok, %__MODULE__{coordinates: coordinates, hit_coordinates: MapSet.new()}}
else
error -> error
end
end
defp add_coordinates(offsets, upper_left) do
Enum.reduce_while(offsets, MapSet.new(), fn offset, acc ->
add_coordinate(acc, upper_left, offset)
end)
end
defp add_coordinate(coordinates, %Coordinate{row: row, col: col}, {row_offset, col_offset}) do
case Coordinate.new(row + row_offset, col + col_offset) do
{:ok, coordinate} ->
{:cont, MapSet.put(coordinates, coordinate)}
{:error, :invalid_coordinate} ->
{:halt, {:error, :invalid_coordinate}}
end
end
defp offsets(:square) do
{:ok, [{0, 0}, {0, 1}, {1, 0}, {1, 1}]}
end
defp offsets(:atoll) do
{:ok, [{0, 0}, {0, 1}, {1, 1}, {2, 0}, {2, 1}]}
end
defp offsets(:dot) do
{:ok, [{0, 0}]}
end
defp offsets(:l_shape) do
{:ok, [{0, 0}, {1, 0}, {2, 0}, {2, 1}]}
end
defp offsets(:s_shape) do
{:ok, [{0, 1}, {0, 2}, {1, 0}, {1, 1}]}
end
defp offsets(_shape), do: {:error, :invalid_island_shape}
end
|
lib/islands_engine/island.ex
| 0.927174
| 0.814717
|
island.ex
|
starcoder
|
defmodule Poker do
@moduledoc """
An Elixir library to work with Poker hands.
Source: <https://github.com/wojtekmach/poker_elixir>
Documentation: <http://hexdocs.pm/poker/>
## Example
```elixir
hand1 = "As Ks Qs Js Ts"
hand2 = "Ac Ad Ah As Kc"
Poker.hand_rank(hand1) # => {:straight_flush, :A}
Poker.hand_rank(hand2) # => {:four_of_a_kind, :A, :K}
Poker.hand_compare(hand1, hand2) # => :gt
```
"""
@doc """
Returns the best rank & hand out of hole cards and community cards.
iex> Poker.best_hand("4c 5d", "3c 6c 7d Ad Ac")
{{:straight, 7}, {{7,:d}, {6,:c}, {5,:d}, {4,:c}, {3,:c}}}
"""
def best_hand(hole_cards, community_cards) when is_binary(hole_cards) do
best_hand(parse_hand(hole_cards), community_cards)
end
def best_hand(hole_cards, community_cards) when is_binary(community_cards) do
best_hand(hole_cards, parse_hand(community_cards))
end
def best_hand(hole_cards, community_cards) do
hole_cards = Tuple.to_list(hole_cards)
community_cards = Tuple.to_list(community_cards)
cards = hole_cards ++ community_cards
hand = comb(5, cards)
|> Enum.sort_by(fn cards ->
cards |> List.to_tuple |> hand_value
end)
|> Enum.reverse
|> hd
|> List.to_tuple
{hand_rank(hand), sort_hand(hand)}
end
defp comb(0, _), do: [[]]
defp comb(_, []), do: []
defp comb(m, [h|t]) do
(for l <- comb(m-1, t), do: [h|l]) ++ comb(m, t)
end
@doc """
Compares two poker hands and returns :gt, :eq or :lt when the first hand is respectively more valuable, equally valuable or less valuable than the second hand.
iex> Poker.hand_compare("Ac Qd Ah As Kc", "Ac Ad Ah Kc Kc")
:lt
"""
def hand_compare(hand1, hand2) when is_binary(hand1) do
hand_compare(parse_hand(hand1), hand2)
end
def hand_compare(hand1, hand2) when is_binary(hand2) do
hand_compare(hand1, parse_hand(hand2))
end
def hand_compare(hand1, hand2) do
r = hand_value(hand1) - hand_value(hand2)
cond do
r > 0 -> :gt
r == 0 -> :eq
r < 0 -> :lt
end
end
@doc """
Returns hand value - a number than uniquely identifies a given hand.
The bigger the number the more valuable a given hand is.
iex> Poker.hand_value("Ac Kc Qc Jc Tc")
8014
"""
def hand_value(str) when is_binary(str) do
str |> parse_hand |> hand_value
end
def hand_value(hand) do
case hand_rank(hand) do
{:straight_flush, a} -> 8_000 + card_value(a)
{:four_of_a_kind, _a, b} -> 7_000 + card_value(b)
{:full_house, a, b} -> 6_000 + 15 * card_value(a) + card_value(b)
{:flush, _r, a, b, c, d, e} -> 5_000 + card_value(a) + card_value(b) + card_value(c) + card_value(d) + card_value(e)
{:straight, a} -> 4_000 + card_value(a)
{:three_of_a_kind, a, b, c} -> 3_000 + 15 * card_value(a) + card_value(b) + card_value(c)
{:two_pair, a, b, c} -> 2_000 + 15 * card_value(a) + 15 * card_value(b) + card_value(c)
{:one_pair, a, b, c, d} -> 1_000 + 15 * card_value(a) + card_value(b) + card_value(c) + card_value(d)
{:high_card, a, b, c, d, e} -> card_value(a) + card_value(b) + card_value(c) + card_value(d) + card_value(e)
end
end
@doc """
Returns rank of a given hand.
iex> Poker.hand_rank("Ac Kc Qc Jc Tc")
{:straight_flush, :A}
iex> Poker.hand_rank("Kc Qc Jc Tc 9c")
{:straight_flush, :K}
iex> Poker.hand_rank("5c 4c 3c 2c Ac")
{:straight_flush, 5}
iex> Poker.hand_rank("Ac Ad Ah As Kd")
{:four_of_a_kind, :A, :K}
iex> Poker.hand_rank("Ac Ad Ah Kc Kd")
{:full_house, :A, :K}
iex> Poker.hand_rank("Kc Kd Kh Ac Ad")
{:full_house, :K, :A}
iex> Poker.hand_rank("Ac Qc Jc Tc 9c")
{:flush, :c, :A, :Q, :J, :T, 9}
iex> Poker.hand_rank("Ac Kc Qc Jc Td")
{:straight, :A}
iex> Poker.hand_rank("Kc Qc Jc Tc 9d")
{:straight, :K}
iex> Poker.hand_rank("5c 4c 3c 2c Ad")
{:straight, 5}
iex> Poker.hand_rank("Ac Ad Ah Kc Qc")
{:three_of_a_kind, :A, :K, :Q}
iex> Poker.hand_rank("Ac Ad Kc Kd Qc")
{:two_pair, :A, :K, :Q}
iex> Poker.hand_rank("Ac Ad Kc Qc Jd")
{:one_pair, :A, :K, :Q, :J}
iex> Poker.hand_rank("Ac Qc Jd Td 9c")
{:high_card, :A, :Q, :J, :T, 9}
"""
def hand_rank(str) when is_binary(str) do
parse_hand(str) |> hand_rank
end
def hand_rank(hand) do
unless length(Tuple.to_list(hand)) == 5 do
raise ArgumentError, "Must pass 5 cards, got: #{inspect(hand)}"
end
hand = sort_hand(hand)
if is_straight(hand) do
{{r1,_}, {r2,_}, _, _, _} = hand
r =
if r1 == :A && r2 == 5 do
5
else
r1
end
if is_flush(hand) do
{:straight_flush, r}
else
{:straight, r}
end
else
case hand do
{{a,_}, {a,_}, {a,_}, {a,_}, {b,_}} -> {:four_of_a_kind, a, b}
{{a,_}, {a,_}, {a,_}, {b,_}, {b,_}} -> {:full_house, a, b}
{{a,_}, {a,_}, {b,_}, {b,_}, {b,_}} -> {:full_house, b, a}
{{r1,a}, {r2,a}, {r3,a}, {r4,a}, {r5,a}} -> {:flush, a, r1, r2, r3, r4, r5}
{{a,_}, {a,_}, {a,_}, {b,_}, {c,_}} -> {:three_of_a_kind, a, b, c}
{{a,_}, {a,_}, {b,_}, {b,_}, {c,_}} -> {:two_pair, a, b, c}
{{a,_}, {a,_}, {b,_}, {c,_}, {d,_}} -> {:one_pair, a, b, c, d}
{{a,_}, {b,_}, {c,_}, {d,_}, {e,_}} -> {:high_card, a, b, c, d, e}
end
end
end
defp is_straight(str) when is_binary(str) do
str |> parse_hand |> is_straight
end
defp is_straight({{a,_}, {b,_}, {c,_}, {d,_}, {e,_}}) do
(card_value(a) == card_value(b) + 1 || a == :A && b == 5) &&
card_value(b) == card_value(c) + 1 &&
card_value(c) == card_value(d) + 1 &&
card_value(d) == card_value(e) + 1
end
defp is_flush({{_,a},{_,a},{_,a},{_,a},{_,a}}), do: true
defp is_flush({_,_,_,_,_}), do: false
defp card_value(:A), do: 14
defp card_value(:K), do: 13
defp card_value(:Q), do: 12
defp card_value(:J), do: 11
defp card_value(:T), do: 10
defp card_value(i) when is_integer(i) and i >= 2 and i <= 9, do: i
@doc """
Accepts a string and returns a tuple of cards. A card is a tuple of rank and suit.
iex> Poker.parse_hand("Ac Kd")
{{:A, :c}, {:K, :d}}
"""
def parse_hand(str) do
str
|> String.split(" ")
|> Enum.map(&parse_card/1)
|> List.to_tuple
end
defp parse_card(str) do
[rank, suit] = String.codepoints(str)
{parse_rank(rank), String.to_atom(suit)}
end
defp parse_rank("A"), do: :A
defp parse_rank("K"), do: :K
defp parse_rank("Q"), do: :Q
defp parse_rank("J"), do: :J
defp parse_rank("T"), do: :T
defp parse_rank(str), do: String.to_integer(str)
defp sort_hand(hand) do
hand
|> Tuple.to_list
|> Enum.sort_by(fn {rank,_} -> card_value(rank) end)
|> Enum.reverse
|> List.to_tuple
end
end
|
lib/poker.ex
| 0.867078
| 0.781122
|
poker.ex
|
starcoder
|
defmodule DayTwelve do
def solve(input) do
{_, dx, dy} =
input
|> String.split("\n", trim: true)
|> Enum.map(fn
x ->
[cmd, c] = String.split(x, "", trim: true, parts: 2)
c = String.to_integer(c)
case cmd do
"R" -> {:turn, "R", c}
"L" -> {:turn, "L", c}
_ -> {:move, cmd, c}
end
end)
|> Enum.reduce({:east, 0, 0}, fn
{:turn, way, amount}, {facing_dir, x, y} ->
{turn(facing_dir, amount, way), x, y}
{:move, way, amount}, {facing_dir, x, y} ->
{dx, dy} = move(way, amount, facing_dir)
{facing_dir, x + dx, y + dy}
end)
abs(dx) + abs(dy)
end
defp move("F", amount, :east), do: move("E", amount)
defp move("F", amount, :west), do: move("W", amount)
defp move("F", amount, :north), do: move("N", amount)
defp move("F", amount, :south), do: move("S", amount)
defp move(way, amount, _), do: move(way, amount)
def move("E", amount), do: {amount, 0}
def move("W", amount), do: {-amount, 0}
def move("N", amount), do: {0, amount}
def move("S", amount), do: {0, -amount}
defp turn(dir, deg, way) when deg > 90 do
turn(dir, 90, way)
|> turn(deg - 90, way)
end
defp turn(:east, 90, "R"), do: :south
defp turn(:south, 90, "R"), do: :west
defp turn(:west, 90, "R"), do: :north
defp turn(:north, 90, "R"), do: :east
defp turn(:east, 90, "L"), do: :north
defp turn(:south, 90, "L"), do: :east
defp turn(:west, 90, "L"), do: :south
defp turn(:north, 90, "L"), do: :west
end
defmodule DayTwelve.PartTwo do
import DayTwelve, only: [move: 2]
def solve(input) do
{{x, y}, _} =
input
|> String.split("\n", trim: true)
|> Enum.map(fn
x ->
[cmd, c] = String.split(x, "", trim: true, parts: 2)
c = String.to_integer(c)
case cmd do
"R" -> {:turn, "R", c}
"L" -> {:turn, "L", c}
"F" -> {:move_to_waypoint, cmd, c}
_ -> {:move_waypoint, cmd, c}
end
end)
|> Enum.reduce({{0, 0}, {10, 1}}, fn
{:move_to_waypoint, _, c}, {{x, y}, waypoint = {dx, dy}} ->
new_loc = {x + dx * c, y + dy * c}
{new_loc, waypoint}
{:move_waypoint, cmd, c}, {loc, {x, y}} ->
{dx, dy} = move(cmd, c)
new_waypoint = {x + dx, y + dy}
{loc, new_waypoint}
{:turn, cmd, c}, {loc, waypoint} ->
new_waypoint = turn_waypoint(waypoint, c, cmd)
{loc, new_waypoint}
end)
abs(x) + abs(y)
end
def turn_waypoint(waypoint, deg, way) when deg > 90 do
turn_waypoint(waypoint, 90, way)
|> turn_waypoint(deg - 90, way)
end
def turn_waypoint({x, y}, 90, "L"), do: {-y, x}
def turn_waypoint({x, y}, 90, "R"), do: {y, -x}
end
|
adv_2020/lib/day_12.ex
| 0.601945
| 0.6687
|
day_12.ex
|
starcoder
|
defmodule Jiffy do
@moduledoc """
A JSON parser as a NIF.
# Data Format
| Elixir | -> JSON | -> Elixir |
| ---------------------------- | ---------------- | ------- |
| `nil` | `null` | `nil` |
| `true` | `true` | `true` |
| `false` | `false` | `false` |
| `'hi'` | `[104, 105]` | `[104, 105]` |
| `"hi"` | `"hi"` | `"hi"` |
| `:hi` | `"hi"` | `"hi"` |
| `1` | `1` | `1` |
| `1.25` | `1.25` | `1.25` |
| `[]` | `[]` | `[]` |
| `[true, 1.0]` | `[true, 1.0]` | `[true, 1.0]` |
| `%{"foo" => "bar"}` | `{"foo": "bar"}` | `%{"foo" => "bar"}` |
| `%{foo: "bar"}` | `{"foo": "bar"}` | `%{"foo" => "bar"}` |
"""
@encode_options [:use_nil]
@decode_options [:use_nil, :return_maps]
@doc """
Encode a value to JSON.
# Unsupported structures
* Encoding Keywords currently is not supported.
* Encoding DateTime, Date or other Date-related Elixir structures will return
`{:error, {:invalid_ejson, any}}`. If you want to encode them - you need to cast
them to string before encoding.
# Options
* `:uescape` - Escapes UTF-8 sequences to produce a 7-bit clean output.
* `:pretty` - Produce JSON using two-space indentation.
* `:force_utf8` - Force strings to encode as UTF-8 by fixing broken
surrogate pairs and/or using the replacement character to remove
broken UTF-8 sequences in data.
* `:escape_forward_slashes` - Escapes the `/` character which can be
useful when encoding URLs in some cases.
* `{:bytes_per_red, n}` - Refer to the `decode/2` options.
* `{:bytes_per_iter, n}` - Refer to the `decode/2` options.
# Examples
iex> Jiffy.encode([1, 2, 3])
{:ok, "[1,2,3]"}
"""
@spec encode(any, opts :: :jiffy.encode_option()) :: {:ok, any()} | {:error, any()}
def encode(data, opts \\ []) do
{:ok, encode!(data, opts)}
catch
{:error, reason} -> {:error, reason}
end
@doc """
Encode a value to JSON, raises an exception on error.
For list of options see `encode/2`.
# Examples
iex> Jiffy.encode!([1, 2, 3])
"[1,2,3]"
"""
@spec encode!(any, opts :: :jiffy.encode_option()) :: any() | no_return()
def encode!(data, opts \\ []) do
:jiffy.encode(data, @encode_options ++ opts)
end
def encode_to_iodata!(data) do
encode!(data)
end
@doc """
Decode JSON to a value.
# Options
* `:return_trailer` - If any non-whitespace is found after the first
JSON term is decoded the return value of decode/2 becomes
`{:has_trailer, first_term, rest_iodata}`. This is useful to
decode multiple terms in a single binary.
* `{:bytes_per_red, n}` where `n` >= 0 - This controls the number of
bytes that Jiffy will process as an equivalent to a reduction. Each
20 reductions we consume 1% of our allocated time slice for the current
process. When the Erlang VM indicates we need to return from the NIF.
* `{:bytes_per_iter, n}` where `n` >= 0 - Backwards compatible option
that is converted into the `bytes_per_red` value.
# Examples
iex> Jiffy.decode("[1,2,3]")
{:ok, [1, 2, 3]}
"""
@spec decode(String.t(), opts :: :jiffy.decode_option()) :: {:ok, any()} | {:error, atom()}
def decode(data, opts \\ []) do
{:ok, decode!(data, opts)}
catch
{:error, reason} -> {:error, reason}
end
@doc """
Decode JSON to a value, raises an exception on error.
For list of options see `decode/2`.
# Examples
iex> Jiffy.decode!([1, 2, 3])
"[1,2,3]"
"""
@spec decode!(String.t(), opts :: :jiffy.decode_option()) :: any() | no_return()
def decode!(data, opts \\ []) do
:jiffy.decode(data, @decode_options ++ opts)
end
end
|
src/jiffy.ex
| 0.925752
| 0.815306
|
jiffy.ex
|
starcoder
|
defmodule ForgeAbi.RequestSendTx do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
tx: ForgeAbi.Transaction.t() | nil,
wallet: ForgeAbi.WalletInfo.t() | nil,
token: String.t(),
commit: boolean
}
defstruct [:tx, :wallet, :token, :commit]
field :tx, 1, type: ForgeAbi.Transaction
field :wallet, 2, type: ForgeAbi.WalletInfo, deprecated: true
field :token, 3, type: :string, deprecated: true
field :commit, 4, type: :bool
end
defmodule ForgeAbi.ResponseSendTx do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
hash: String.t()
}
defstruct [:code, :hash]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :hash, 2, type: :string
end
defmodule ForgeAbi.RequestGetTx do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
hash: String.t()
}
defstruct [:hash]
field :hash, 1, type: :string
end
defmodule ForgeAbi.ResponseGetTx do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
info: ForgeAbi.TransactionInfo.t() | nil
}
defstruct [:code, :info]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :info, 2, type: ForgeAbi.TransactionInfo
end
defmodule ForgeAbi.RequestGetBlock do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
height: non_neg_integer
}
defstruct [:height]
field :height, 1, type: :uint64
end
defmodule ForgeAbi.ResponseGetBlock do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
block: ForgeAbi.BlockInfo.t() | nil
}
defstruct [:code, :block]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :block, 2, type: ForgeAbi.BlockInfo
end
defmodule ForgeAbi.RequestGetBlocks do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
paging: ForgeAbi.PageInput.t() | nil,
height_filter: ForgeAbi.RangeFilter.t() | nil,
empty_excluded: boolean
}
defstruct [:paging, :height_filter, :empty_excluded]
field :paging, 1, type: ForgeAbi.PageInput
field :height_filter, 2, type: ForgeAbi.RangeFilter
field :empty_excluded, 3, type: :bool
end
defmodule ForgeAbi.ResponseGetBlocks do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
page: ForgeAbi.PageInfo.t() | nil,
blocks: [ForgeAbi.BlockInfoSimple.t()]
}
defstruct [:code, :page, :blocks]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :page, 2, type: ForgeAbi.PageInfo
field :blocks, 3, repeated: true, type: ForgeAbi.BlockInfoSimple
end
defmodule ForgeAbi.RequestDeclareNode do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
validator: boolean,
issuer: String.t()
}
defstruct [:validator, :issuer]
field :validator, 1, type: :bool
field :issuer, 2, type: :string
end
defmodule ForgeAbi.ResponseDeclareNode do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
wallet: ForgeAbi.WalletInfo.t() | nil,
tx: ForgeAbi.Transaction.t() | nil
}
defstruct [:code, :wallet, :tx]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :wallet, 3, type: ForgeAbi.WalletInfo
field :tx, 4, type: ForgeAbi.Transaction
end
defmodule ForgeAbi.RequestGetAccountState do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
address: String.t(),
keys: [String.t()],
height: non_neg_integer
}
defstruct [:address, :keys, :height]
field :address, 1, type: :string
field :keys, 2, repeated: true, type: :string
field :height, 3, type: :uint64
end
defmodule ForgeAbi.ResponseGetAccountState do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
state: ForgeAbi.AccountState.t() | nil
}
defstruct [:code, :state]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :state, 2, type: ForgeAbi.AccountState
end
defmodule ForgeAbi.RequestGetAssetState do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
address: String.t(),
keys: [String.t()],
height: non_neg_integer
}
defstruct [:address, :keys, :height]
field :address, 1, type: :string
field :keys, 2, repeated: true, type: :string
field :height, 3, type: :uint64
end
defmodule ForgeAbi.ResponseGetAssetState do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
state: ForgeAbi.AssetState.t() | nil
}
defstruct [:code, :state]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :state, 2, type: ForgeAbi.AssetState
end
defmodule ForgeAbi.RequestGetForgeState do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
keys: [String.t()],
height: non_neg_integer
}
defstruct [:keys, :height]
field :keys, 1, repeated: true, type: :string
field :height, 3, type: :uint64
end
defmodule ForgeAbi.ResponseGetForgeState do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
state: ForgeAbi.ForgeState.t() | nil
}
defstruct [:code, :state]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :state, 2, type: ForgeAbi.ForgeState
end
defmodule ForgeAbi.RequestGetSwapState do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
address: String.t(),
keys: [String.t()],
height: non_neg_integer
}
defstruct [:address, :keys, :height]
field :address, 1, type: :string
field :keys, 2, repeated: true, type: :string
field :height, 3, type: :uint64
end
defmodule ForgeAbi.ResponseGetSwapState do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
state: ForgeAbi.SwapState.t() | nil
}
defstruct [:code, :state]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :state, 2, type: ForgeAbi.SwapState
end
defmodule ForgeAbi.RequestGetDelegateState do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
address: String.t(),
keys: [String.t()],
height: non_neg_integer
}
defstruct [:address, :keys, :height]
field :address, 1, type: :string
field :keys, 2, repeated: true, type: :string
field :height, 3, type: :uint64
end
defmodule ForgeAbi.ResponseGetDelegateState do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
state: ForgeAbi.DelegateState.t() | nil
}
defstruct [:code, :state]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :state, 2, type: ForgeAbi.DelegateState
end
defmodule ForgeAbi.RequestGetChainInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{}
defstruct []
end
defmodule ForgeAbi.ResponseGetChainInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
info: ForgeAbi.ChainInfo.t() | nil
}
defstruct [:code, :info]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :info, 2, type: ForgeAbi.ChainInfo
end
defmodule ForgeAbi.RequestGetNodeInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{}
defstruct []
end
defmodule ForgeAbi.ResponseGetNodeInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
info: ForgeAbi.NodeInfo.t() | nil
}
defstruct [:code, :info]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :info, 2, type: ForgeAbi.NodeInfo
end
defmodule ForgeAbi.RequestSearch do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: String.t()
}
defstruct [:key, :value]
field :key, 1, type: :string
field :value, 2, type: :string
end
defmodule ForgeAbi.ResponseSearch do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
txs: [ForgeAbi.TransactionInfo.t()]
}
defstruct [:code, :txs]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :txs, 2, repeated: true, type: ForgeAbi.TransactionInfo
end
defmodule ForgeAbi.RequestGetUnconfirmedTxs do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
paging: ForgeAbi.PageInput.t() | nil
}
defstruct [:paging]
field :paging, 1, type: ForgeAbi.PageInput
end
defmodule ForgeAbi.ResponseGetUnconfirmedTxs do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
page: ForgeAbi.PageInfo.t() | nil,
unconfirmed_txs: ForgeAbi.UnconfirmedTxs.t() | nil
}
defstruct [:code, :page, :unconfirmed_txs]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :page, 2, type: ForgeAbi.PageInfo
field :unconfirmed_txs, 3, type: ForgeAbi.UnconfirmedTxs
end
defmodule ForgeAbi.RequestGetNetInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{}
defstruct []
end
defmodule ForgeAbi.ResponseGetNetInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
net_info: ForgeAbi.NetInfo.t() | nil
}
defstruct [:code, :net_info]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :net_info, 2, type: ForgeAbi.NetInfo
end
defmodule ForgeAbi.RequestGetValidatorsInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{}
defstruct []
end
defmodule ForgeAbi.ResponseGetValidatorsInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
validators_info: ForgeAbi.ValidatorsInfo.t() | nil
}
defstruct [:code, :validators_info]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :validators_info, 2, type: ForgeAbi.ValidatorsInfo
end
defmodule ForgeAbi.RequestSubscribe do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
topic: String.t(),
filter: String.t()
}
defstruct [:topic, :filter]
field :topic, 1, type: :string
field :filter, 2, type: :string
end
defmodule ForgeAbi.ResponseSubscribe do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: {atom, any},
code: ForgeAbi.StatusCode.t()
}
defstruct [:value, :code]
oneof :value, 0
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :topic, 2, type: :string, oneof: 0
field :transfer, 3, type: ForgeAbi.Transaction, oneof: 0
field :account_migrate, 4, type: ForgeAbi.Transaction, oneof: 0
field :confirm, 5, type: ForgeAbi.Transaction, oneof: 0
field :create_asset, 6, type: ForgeAbi.Transaction, oneof: 0
field :exchange, 7, type: ForgeAbi.Transaction, oneof: 0
field :begin_block, 16, type: AbciVendor.RequestBeginBlock, oneof: 0
field :end_block, 17, type: AbciVendor.RequestEndBlock, oneof: 0
field :declare, 19, type: ForgeAbi.Transaction, oneof: 0
field :update_asset, 20, type: ForgeAbi.Transaction, oneof: 0
field :consensus_upgrade, 21, type: ForgeAbi.Transaction, oneof: 0
field :sys_upgrade, 23, type: ForgeAbi.Transaction, oneof: 0
field :stake, 24, type: ForgeAbi.Transaction, oneof: 0
field :delegate, 25, type: ForgeAbi.Transaction, oneof: 0
field :revoke_delegate, 28, type: ForgeAbi.Transaction, oneof: 0
field :deposit_token, 29, type: ForgeAbi.Transaction, oneof: 0
field :withdraw_token, 30, type: ForgeAbi.Transaction, oneof: 0
field :approve_withdraw, 31, type: ForgeAbi.Transaction, oneof: 0
field :revoke_withdraw, 32, type: ForgeAbi.Transaction, oneof: 0
field :setup_swap, 33, type: ForgeAbi.Transaction, oneof: 0
field :revoke_swap, 34, type: ForgeAbi.Transaction, oneof: 0
field :retrieve_swap, 35, type: ForgeAbi.Transaction, oneof: 0
field :poke, 36, type: ForgeAbi.Transaction, oneof: 0
field :consume_asset, 38, type: ForgeAbi.Transaction, oneof: 0
field :acquire_asset, 39, type: ForgeAbi.Transaction, oneof: 0
field :upgrade_node, 40, type: ForgeAbi.Transaction, oneof: 0
field :update_validator, 41, type: ForgeAbi.Transaction, oneof: 0
field :update_consensus_params, 42, type: ForgeAbi.Transaction, oneof: 0
field :account_state, 129, type: ForgeAbi.AccountState, oneof: 0
field :asset_state, 130, type: ForgeAbi.AssetState, oneof: 0
field :forge_state, 131, type: ForgeAbi.ForgeState, oneof: 0
field :delegate_state, 134, type: ForgeAbi.DelegateState, oneof: 0
field :swap_state, 135, type: ForgeAbi.SwapState, oneof: 0
end
defmodule ForgeAbi.RequestUnsubscribe do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
topic: String.t()
}
defstruct [:topic]
field :topic, 1, type: :string
end
defmodule ForgeAbi.ResponseUnsubscribe do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t()
}
defstruct [:code]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
end
defmodule ForgeAbi.RequestGetConfig do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
parsed: boolean
}
defstruct [:parsed]
field :parsed, 1, type: :bool
end
defmodule ForgeAbi.ResponseGetConfig do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
config: String.t()
}
defstruct [:code, :config]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :config, 2, type: :string
end
defmodule ForgeAbi.ByDay do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
start_date: String.t(),
end_date: String.t()
}
defstruct [:start_date, :end_date]
field :start_date, 1, type: :string
field :end_date, 2, type: :string
end
defmodule ForgeAbi.ByHour do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
date: String.t()
}
defstruct [:date]
field :date, 1, type: :string
end
defmodule ForgeAbi.RequestGetForgeStats do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: {atom, any}
}
defstruct [:value]
oneof :value, 0
field :day_info, 1, type: ForgeAbi.ByDay, oneof: 0
field :date, 2, type: ForgeAbi.ByHour, oneof: 0
end
defmodule ForgeAbi.ResponseGetForgeStats do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
forge_stats: ForgeAbi.ForgeStats.t() | nil
}
defstruct [:code, :forge_stats]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :forge_stats, 2, type: ForgeAbi.ForgeStats
end
defmodule ForgeAbi.RequestListTransactions do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
paging: ForgeAbi.PageInput.t() | nil,
time_filter: ForgeAbi.TimeFilter.t() | nil,
address_filter: ForgeAbi.AddressFilter.t() | nil,
type_filter: ForgeAbi.TypeFilter.t() | nil,
validity_filter: ForgeAbi.ValidityFilter.t() | nil
}
defstruct [:paging, :time_filter, :address_filter, :type_filter, :validity_filter]
field :paging, 1, type: ForgeAbi.PageInput
field :time_filter, 2, type: ForgeAbi.TimeFilter
field :address_filter, 3, type: ForgeAbi.AddressFilter
field :type_filter, 4, type: ForgeAbi.TypeFilter
field :validity_filter, 5, type: ForgeAbi.ValidityFilter
end
defmodule ForgeAbi.ResponseListTransactions do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
page: ForgeAbi.PageInfo.t() | nil,
transactions: [ForgeAbi.IndexedTransaction.t()]
}
defstruct [:code, :page, :transactions]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :page, 2, type: ForgeAbi.PageInfo
field :transactions, 3, repeated: true, type: ForgeAbi.IndexedTransaction
end
defmodule ForgeAbi.RequestListAssets do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
paging: ForgeAbi.PageInput.t() | nil,
owner_address: String.t()
}
defstruct [:paging, :owner_address]
field :paging, 1, type: ForgeAbi.PageInput
field :owner_address, 2, type: :string
end
defmodule ForgeAbi.ResponseListAssets do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
page: ForgeAbi.PageInfo.t() | nil,
assets: [ForgeAbi.IndexedAssetState.t()]
}
defstruct [:code, :page, :assets]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :page, 2, type: ForgeAbi.PageInfo
field :assets, 3, repeated: true, type: ForgeAbi.IndexedAssetState
end
defmodule ForgeAbi.RequestListStakes do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
paging: ForgeAbi.PageInput.t() | nil,
address_filter: ForgeAbi.AddressFilter.t() | nil
}
defstruct [:paging, :address_filter]
field :paging, 1, type: ForgeAbi.PageInput
field :address_filter, 2, type: ForgeAbi.AddressFilter
end
defmodule ForgeAbi.ResponseListStakes do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
page: ForgeAbi.PageInfo.t() | nil,
stakes: [ForgeAbi.IndexedStakeState.t()]
}
defstruct [:code, :page, :stakes]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :page, 2, type: ForgeAbi.PageInfo
field :stakes, 3, repeated: true, type: ForgeAbi.IndexedStakeState
end
defmodule ForgeAbi.RequestListAccount do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
owner_address: String.t()
}
defstruct [:owner_address]
field :owner_address, 1, type: :string
end
defmodule ForgeAbi.ResponseListAccount do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
account: ForgeAbi.IndexedAccountState.t() | nil
}
defstruct [:code, :account]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :account, 2, type: ForgeAbi.IndexedAccountState
end
defmodule ForgeAbi.RequestListTopAccounts do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
paging: ForgeAbi.PageInput.t() | nil
}
defstruct [:paging]
field :paging, 1, type: ForgeAbi.PageInput
end
defmodule ForgeAbi.ResponseListTopAccounts do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
page: ForgeAbi.PageInfo.t() | nil,
accounts: [ForgeAbi.IndexedAccountState.t()]
}
defstruct [:code, :page, :accounts]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :page, 2, type: ForgeAbi.PageInfo
field :accounts, 3, repeated: true, type: ForgeAbi.IndexedAccountState
end
defmodule ForgeAbi.RequestListAssetTransactions do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
paging: ForgeAbi.PageInput.t() | nil,
address: String.t()
}
defstruct [:paging, :address]
field :paging, 1, type: ForgeAbi.PageInput
field :address, 2, type: :string
end
defmodule ForgeAbi.ResponseListAssetTransactions do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
page: ForgeAbi.PageInfo.t() | nil,
transactions: [ForgeAbi.IndexedTransaction.t()]
}
defstruct [:code, :page, :transactions]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :page, 2, type: ForgeAbi.PageInfo
field :transactions, 3, repeated: true, type: ForgeAbi.IndexedTransaction
end
defmodule ForgeAbi.RequestListBlocks do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
paging: ForgeAbi.PageInput.t() | nil,
proposer: String.t(),
time_filter: ForgeAbi.TimeFilter.t() | nil,
height_filter: ForgeAbi.RangeFilter.t() | nil,
num_txs_filter: ForgeAbi.RangeFilter.t() | nil,
num_invalid_txs_filter: ForgeAbi.RangeFilter.t() | nil
}
defstruct [
:paging,
:proposer,
:time_filter,
:height_filter,
:num_txs_filter,
:num_invalid_txs_filter
]
field :paging, 1, type: ForgeAbi.PageInput
field :proposer, 2, type: :string
field :time_filter, 3, type: ForgeAbi.TimeFilter
field :height_filter, 4, type: ForgeAbi.RangeFilter
field :num_txs_filter, 5, type: ForgeAbi.RangeFilter
field :num_invalid_txs_filter, 6, type: ForgeAbi.RangeFilter
end
defmodule ForgeAbi.ResponseListBlocks do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
page: ForgeAbi.PageInfo.t() | nil,
blocks: [ForgeAbi.IndexedBlock.t()]
}
defstruct [:code, :page, :blocks]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :page, 2, type: ForgeAbi.PageInfo
field :blocks, 3, repeated: true, type: ForgeAbi.IndexedBlock
end
defmodule ForgeAbi.RequestListSwap do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
paging: ForgeAbi.PageInput.t() | nil,
sender: String.t(),
receiver: String.t(),
available: boolean
}
defstruct [:paging, :sender, :receiver, :available]
field :paging, 1, type: ForgeAbi.PageInput
field :sender, 2, type: :string
field :receiver, 3, type: :string
field :available, 4, type: :bool
end
defmodule ForgeAbi.ResponseListSwap do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
page: ForgeAbi.PageInfo.t() | nil,
swap: [ForgeAbi.SwapState.t()]
}
defstruct [:code, :page, :swap]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :page, 2, type: ForgeAbi.PageInfo
field :swap, 3, repeated: true, type: ForgeAbi.SwapState
end
defmodule ForgeAbi.RequestGetSwapStatistics do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
address: String.t()
}
defstruct [:address]
field :address, 1, type: :string
end
defmodule ForgeAbi.ResponseGetSwapStatistics do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
statistics: ForgeAbi.SwapStatistics.t() | nil
}
defstruct [:code, :statistics]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :statistics, 2, type: ForgeAbi.SwapStatistics
end
defmodule ForgeAbi.RequestGetHealthStatus do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{}
defstruct []
end
defmodule ForgeAbi.ResponseGetHealthStatus do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
code: ForgeAbi.StatusCode.t(),
health_status: ForgeAbi.HealthStatus.t() | nil
}
defstruct [:code, :health_status]
field :code, 1, type: ForgeAbi.StatusCode, enum: true
field :health_status, 2, type: ForgeAbi.HealthStatus
end
|
lib/protobuf/gen/rpc.pb.ex
| 0.732305
| 0.588564
|
rpc.pb.ex
|
starcoder
|
if Code.ensure_loaded?(Plug) do
defmodule WeChat.Plug.EventHandler do
@moduledoc """
微信推送消息处理器
## Usage
将下面的代码加到 `router` 里面:
- 单一应用的情况:
```elixir
forward "/wx/event", WeChat.Plug.EventHandler, client: WxOfficialAccount, handler: &Module.handle_event/2
```
- 多个应用的情况:
请将入口路径设置为如下格式: `/xxx/:appid/xxx`
```elixir
scope "/wx/event/:appid" do
forward "/", WeChat.Plug.EventHandler, handler: &Module.handle_event/2
end
```
## Options
- `handler`: 必填, `t:WeChat.ServerMessage.EventHandler.handle_event_fun/0`
- `client`: 可选, `t:WeChat.client/0`
"""
import Plug.Conn
alias WeChat.ServerMessage
@doc false
def init(opts) do
opts = Map.new(opts)
unless Map.has_key?(opts, :handler) do
raise ArgumentError, "please set :handler when using #{inspect(__MODULE__)}"
end
opts
end
@doc false
def call(%{method: "GET", query_params: query_params} = conn, %{client: client}) do
{status, resp} = ServerMessage.EventHandler.handle_get(query_params, client)
send_resp(conn, status, resp)
end
def call(%{method: "GET", path_params: path_params, query_params: query_params} = conn, _opts) do
with appid <- path_params["appid"],
client when client != nil <- WeChat.get_client_by_appid(appid) do
{status, resp} = ServerMessage.EventHandler.handle_get(query_params, client)
send_resp(conn, status, resp)
else
_ ->
send_resp(conn, 400, "Bad Request")
end
end
def call(%{method: "POST"} = conn, %{client: client, handler: handler}) do
{status, resp} = ServerMessage.EventHandler.handle_post(conn, client, handler)
send_resp(conn, status, resp)
end
def call(%{method: "POST", path_params: path_params} = conn, %{handler: handler}) do
with appid <- path_params["appid"],
client when client != nil <- WeChat.get_client_by_appid(appid) do
{status, resp} = ServerMessage.EventHandler.handle_post(conn, client, handler)
send_resp(conn, status, resp)
else
_ ->
send_resp(conn, 400, "Bad Request")
end
end
def call(conn, _opts) do
send_resp(conn, 404, "Invalid Method")
end
end
end
|
lib/wechat/plug/event_handler.ex
| 0.551091
| 0.474753
|
event_handler.ex
|
starcoder
|
defmodule Petrovich.Detector do
@moduledoc """
Guesses the gender by person's name.
"""
alias Petrovich.GenderStore
alias Petrovich.Utils.ResultJoiner
@doc """
Detects a gender by name.
This function receives two arguments:
1. `name` raw person's name in nomenative case
2. `type` which shows what part of person's name it is
You can use any of these values as `type`:
[:firstname, :middlename, :lastname]
On success it returns a tuple `{:ok, "detected_gender"}`.
Or `:error` in cases when it is impossible to detect gender.
## Examples
iex> Detector.detect_gender("Игорь", :firstname)
{:ok, "male"}
iex> Detector.detect_gender("Саша", :firstname)
{:ok, "androgynous"}
iex> Detector.detect_gender("123", :firstname)
:error
"""
@spec detect_gender(String.t(), atom()) :: {:ok, String.t()} | :error
def detect_gender(name, type) do
%{"exceptions" => exceptions, "suffixes" => suffixes} =
GenderStore.get("gender", to_string(type))
name
|> String.downcase()
|> String.split("-")
|> Enum.map(fn item -> prepare_value(item, exceptions, suffixes) end)
|> ResultJoiner.join_any_results(&join_result/1)
end
defp prepare_value(name, exceptions, suffixes) do
name
|> maybe_exception(exceptions)
|> maybe_rule(suffixes)
end
defp join_result(result) do
result
|> Enum.find(fn item ->
case item do
{:ok, _} -> true
_ -> false
end
end)
|> elem(1)
end
defp maybe_exception(name, exceptions) do
exceptions
|> Enum.find(fn {_, names} -> name in names end)
|> case do
{gender, _} -> {:ok, gender}
nil -> {:error, name}
end
end
defp maybe_rule({:ok, gender}, _), do: {:ok, gender}
defp maybe_rule({:error, name}, suffixes) do
suffixes
|> Enum.find(fn {_, rule} -> fits?(name, rule) end)
|> case do
{gender, _} -> {:ok, gender}
nil -> :error
end
end
defp fits?(name, rule) do
name_len = String.length(name)
Enum.any?(rule, fn test ->
test_len = String.length(test)
test == String.slice(name, name_len - test_len, test_len)
end)
end
end
|
lib/petrovich/detector.ex
| 0.840799
| 0.559982
|
detector.ex
|
starcoder
|
defmodule Majudge do
@moduledoc """
Majudge keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
@default_ratings [:excellent, :verygood, :good, :average, :fair, :poor]
defmodule Candidate do
@derive {Jason.Encoder, only: [:name, :id, :thumbnail, :value]}
defstruct name: "Unknown", id: nil, thumbnail: nil, value: [], distance: []
end
# to simplify readability, I will be using the symbols:
# excellent, verygood, good, average, fair, poor
# Find the number of median votes that must be removed
# before the rating would change to the given rating.
# above_cur is the total number of ratings that are
# higher than the current rating.
# This function assumes that the median value is equal
# to or lower than the given rating.
def _distance_above(rating_votes, total, above_cur) do
rating_and_above = rating_votes + above_cur
cond do
rating_and_above >= total / 2 ->
{0, rating_and_above}
true ->
{total - rating_and_above * 2, rating_and_above}
end
end
# Find the number of median votes that must be removed
# before the rating would change to the given rating.
# above_cur is the total number of ratings that are
# higher than the current rating.
# This function assumes that the median value is equal
# to or lower than the given rating.
def _distance_below(rating_votes, total, below_cur) do
rating_and_below = rating_votes + below_cur
cond do
rating_and_below > total / 2 ->
{0, rating_and_below}
true ->
{total - rating_and_below * 2 + 1, rating_and_below}
end
end
def _distance(
ratings,
tallies,
total,
distance_helper \\ &_distance_above/3,
accum_votes \\ 0,
accum_result \\ []
)
def _distance([rating | rest], tallies, total, distance_helper, accum_votes, accum_result) do
case tallies do
%{^rating => rating_votes} ->
{dist, accum_votes} = distance_helper.(rating_votes, total, accum_votes)
accum_result = accum_result ++ [{rating, dist}]
if dist == 0 do
distance_helper = &_distance_below/3
accum_votes = 0
ratings_rev = Enum.reverse(rest)
_distance(ratings_rev, tallies, total, distance_helper, accum_votes, accum_result)
else
_distance(rest, tallies, total, distance_helper, accum_votes, accum_result)
end
_ ->
_distance(rest, tallies, total, distance_helper, accum_votes, accum_result)
end
end
def _distance(_, _, _, _, _, accum_result) do
accum_result
end
# find the number of median votes that must be removed for each rating
def distance(tallies, ratings \\ @default_ratings) do
total = Enum.sum(Map.values(tallies))
result = _distance(ratings, tallies, total)
Enum.sort(result, fn a, b -> elem(a, 1) <= elem(b, 1) end)
end
# could easily be converted to a function which creates a compare function from a given list of ratings
def _compare_rating(a, b) do
Enum.find_index(@default_ratings, &(&1 == a)) <= Enum.find_index(@default_ratings, &(&1 == b))
end
def _compare([], []) do
true
end
def _compare(_, []) do
true
end
def _compare([], _) do
false
end
def _compare([{a_rating, _}], [{b_rating, _}]) do
_compare_rating(a_rating, b_rating)
end
def _compare([{same_rating, _} | a_tail], [{same_rating, _}] = b) do
_compare(a_tail, b)
end
def _compare([{same_rating, _}] = a, [{same_rating, _} | b_tail]) do
_compare(a, b_tail)
end
def _compare([{same_rating, _}, a_next | a_tail] = a, [{same_rating, _}, b_next | b_tail] = b) do
a_dist = elem(a_next, 1)
b_dist = elem(b_next, 1)
cond do
a_dist < b_dist ->
_compare([a_next | a_tail], b)
a_dist > b_dist ->
_compare(a, [b_next | b_tail])
a_dist == b_dist ->
_compare([a_next | a_tail], [b_next | b_tail])
end
end
def _compare([{a_rating, _} | _], [{b_rating, _} | _]) do
_compare_rating(a_rating, b_rating)
end
def sort(distances) do
Enum.sort(distances, &_compare/2)
end
def _compare_candidates(%Candidate{distance: a}, %Candidate{distance: b}) do
_compare(a, b)
end
def sort_candidates(candidates) do
Enum.sort(candidates, &_compare_candidates/2)
end
def count_one(vote, outer_acc) do
Enum.reduce(vote, outer_acc, fn {candId, rating}, acc ->
candMap = Map.get(acc, candId, %{})
curCount = Map.get(candMap, rating, 0)
Map.put(acc, candId, Map.put(candMap, rating, curCount + 1))
end)
end
def count(votes, acc \\ %{})
def count(nil, acc) do
acc
end
def count([], acc) do
acc
end
def count([vote | tail], acc) do
count(tail, count_one(vote, acc))
end
end
|
lib/majudge.ex
| 0.747063
| 0.61396
|
majudge.ex
|
starcoder
|
defmodule Reactivity.DSL.SignalObs do
@moduledoc false
alias Observables.Obs
alias Reactivity.Quality.Context
@doc """
Turns a plain Observable into a Signal Observable
Wraps each of its Values v into a tuple `{v, []}`,
the empty list being a list of potential Contexts.
"""
def from_plain_obs(obs) do
obs
|> Obs.map(fn v -> {v, []} end)
end
@doc """
Turns a Signal Observable back into a plain Observable
Unboxes each of its Values v from its encompassing Message `{v, c}`,
effectively stripping it from any associated Contexts it might have.
"""
def to_plain_obs(sobs) do
{vobs, _cobs} =
sobs
|> Obs.unzip()
vobs
end
@doc """
Transforms a Signal Observable to an Observable carrying only its Contexts
"""
def to_context_obs(sobs) do
{_vobs, cobs} =
sobs
|> Obs.unzip()
cobs
end
@doc """
Adds the appropriate Contexts for the given Guarantee to a Signal Observable.
The Context is added to the back of the list of Contexts `[c]`
that is part of the Message tuple `{v, [c]}` emitted by a Signal Observable.
"""
def add_context(sobs, cg) do
acobs = Context.new_context_obs(sobs, cg)
{vobs, cobs} =
sobs
|> Obs.unzip()
ncobs =
cobs
|> Obs.zip(acobs)
|> Obs.map(fn {pc, ac} -> pc ++ [ac] end)
Obs.zip(vobs, ncobs)
end
@doc """
Removes a Context from a Signal Observable by its index.
"""
def remove_context(sobs, i) do
sobs
|> Obs.map(fn {v, cs} ->
new_cs =
cs
|> List.delete_at(i)
{v, new_cs}
end)
end
@doc """
Removes all Contexts from the Signal Observable, safe for the one at the given index.
"""
def keep_context(sobs, i) do
sobs
|> Obs.map(fn {v, cs} ->
c =
cs
|> Enum.at(i)
new_cs = [c]
{v, new_cs}
end)
end
@doc """
Removes all Contexts from the Signal Observable.
"""
def clear_context(sobs) do
sobs
|> Obs.map(fn {v, _c} -> {v, []} end)
end
@doc """
Sets the appropriate Contexts of a Signal Observable for the given Guarantee.
Replaces all existing Contexts.
"""
def set_context(sobs, cg) do
sobs
|> clear_context
|> add_context(cg)
end
end
|
lib/reactivity/dsl/signal_obs.ex
| 0.845257
| 0.561095
|
signal_obs.ex
|
starcoder
|
defmodule NcsaHmac.Signer do
@default_hash :sha512
@service_name "NCSA.HMAC"
@moduledoc """
The Signer module provides functions for generating a cryptographic hash based on the details of a web request.
"""
@doc """
Generate the complete signature for the request details
Required paramters:
* `:request_details` - A Map of the key elements from the request that are
needed to compute a correct signature, required key-values: "method", "path", "params",
and "content-type", optional values: "date", "service-name"
* `:key_id` - The database id of the record. This is also the publically
visible and unencrypted piece of the request signature
* `:key_secret` - The signing_key or sercret_key that is used to sign the request.
This is the shared secret that must be known to both the requesting server
as well as the receiveing server. The signing_key should be kept securely and
not shared publically.
Optional opts:
* `:hash_type` - Specifies the cryptographic hash function to use when computing
the signature, defaults to :sha512.
Request Details:
* `:method` - The HTTP verb used for the request, GET, POST, PUT, etc.
* `:path` - The http request path. (Everything between the hostname and the query params)
* `:params` - The body of the request. (Excludes query string parameters)
* `:content-type` - The header content-type string, defaults to "application/json"
* `:service-name` - An arbitrary string appended to the the beginning of the authorization signature. Defaults to "NCSA.HMAC"
Set the signature signature string which will be added to the `Authorization`
header. Authorization string takes the form:
'NCSA.HMAC auth_id:base64_encoded_cryptographic_signature'
or 'SERVICE.NAME auth_id:base64_encoded_cryptographic_signature'
"""
def sign(request_details, key_id, key_secret, hash_type \\ @default_hash) do
validate_key!(key_id, "key_id")
validate_key!(key_secret, "key_secret")
authorization_string(
key_id,
signature(request_details, key_secret, hash_type),
service_name(request_details["service-name"])
)
end
@doc """
Create a canonical string from the request that will be used to computed
the signature.
The `canonicalize_request` method performs several steps:
Set the `Date` field, unless it was already set.
Calculate the MD5 Hash of the request parameters and set the `Content-Digest`
field.
Canonicalize the request fields. The helps ensure that only guaranteed fields
are used to calculate the header. It also helps ensure that the same request
signature will be calculated the same way every time.
"""
def canonicalize_request(request_details) do
request_details = request_details
|> set_request_date
|> Map.put("content-digest", content_digest(request_details["params"]))
NcsaHmac.Canonical.string(
request_details["method"],
request_details["path"],
request_details["date"],
request_details["content-digest"],
request_details["content-type"]
)
end
@doc """
Compute the cryptographic signature from the canonical request string using
the key_secret and hash_type specified in the function call.
"""
def signature(request_details, key_secret, hash_type \\ @default_hash) do
Base.encode64(
:crypto.hmac(hash_type, key_secret, canonicalize_request(request_details))
)
end
@doc """
For interoperabiltiy, request parameters are converted to json and returned
in a deterministic order, so hash computation is unlikely to produce
different results on different systems.
For this reason we use the JSON package rather than Poision.
"""
def normalize_parameters(params) when is_map(params) do
case JSON.encode params do
{:ok, json_params} -> json_params
{:error, params} -> params
end
end
def normalize_parameters(params), do: params
defp authorization_string(key_id, signature, service_name) do
"#{service_name} #{key_id}:#{signature}"
end
defp service_name(""), do: @service_name
defp service_name(nil), do: @service_name
defp service_name(service_name), do: service_name
defp content_digest(""), do: ""
defp content_digest(params) when params == %{}, do: ""
defp content_digest(params) do
Base.encode16(:erlang.md5(normalize_parameters(params)), case: :lower)
end
defp set_request_date(request_details) do
date = Map.get(request_details, "date")
Map.put(request_details, "date", set_date(date))
end
defp set_date(nil) do
{_, iso_time} = Timex.Format.DateTime.Formatter.format(Timex.now, "{ISO:Extended:Z}")
iso_time
end
defp set_date(""), do: set_date(nil)
defp set_date(date), do: date
defp validate_key!(key, key_type) do
case key do
nil -> raise NcsaHmac.SigningError, message: "#{key_type} is required"
"" -> raise NcsaHmac.SigningError, message: "#{key_type} is required"
_ -> "carry on"
end
end
end
|
lib/ncsa_hmac/signer.ex
| 0.88275
| 0.57818
|
signer.ex
|
starcoder
|
defmodule ExAws.AutoScaling do
@moduledoc """
Operations on AWS EC2 Auto Scaling
A work-in-progress selection of operations from the
[Amazon EC2 Auto Scaling API](https://docs.aws.amazon.com/autoscaling/ec2/APIReference/API_Operations_List.html).
Examples of how to use this:
```elixir
ExAws.AutoScaling.describe_auto_scaling_instances(
instance_ids: ["i-0598c7d356eba48d7", "i-1234567890abcdef0"]
)
```
"""
use ExAws.Utils,
format_type: :xml,
non_standard_keys: %{}
@version "2011-01-01"
@type lifecycle_action_opts :: [
instance_id: binary,
lifecycle_action_token: binary
]
@doc """
Completes the lifecycle action for the specified token or instance with the specified result.
Doc: https://docs.aws.amazon.com/autoscaling/ec2/APIReference/API_CompleteLifecycleAction.html
"""
@type lifecycle_action_result :: :continue | :abandon
@spec complete_lifecycle_action(
auto_scaling_group_name :: binary,
lifecycle_hook_name :: binary,
lifecycle_action_result :: lifecycle_action_result
) :: ExAws.Operation.Query.t()
@spec complete_lifecycle_action(
auto_scaling_group_name :: binary,
lifecycle_hook_name :: binary,
lifecycle_action_result :: lifecycle_action_result,
opts :: lifecycle_action_opts
) :: ExAws.Operation.Query.t()
def complete_lifecycle_action(
auto_scaling_group_name,
lifecycle_hook_name,
lifecycle_action_result,
opts \\ []
) do
lifecycle_action_result_name =
lifecycle_action_result
|> Atom.to_string()
|> String.upcase()
[
{"AutoScalingGroupName", auto_scaling_group_name},
{"LifecycleHookName", lifecycle_hook_name},
{"LifecycleActionResult", lifecycle_action_result_name}
| opts
]
|> build_request(:complete_lifecycle_action)
end
@doc """
Describes one or more Auto Scaling instances.
Doc: https://docs.aws.amazon.com/autoscaling/ec2/APIReference/API_DescribeAutoScalingInstances.html
"""
@type describe_auto_scaling_instances_opts :: [
instance_ids: [binary, ...],
max_records: integer,
next_token: binary
]
@spec describe_auto_scaling_instances() :: ExAws.Operation.Query.t()
@spec describe_auto_scaling_instances(opts :: describe_auto_scaling_instances_opts) ::
ExAws.Operation.Query.t()
def describe_auto_scaling_instances(opts \\ []) do
opts |> build_request(:describe_auto_scaling_instances)
end
@doc """
Records a heartbeat for the lifecycle action associated with the specified token or instance.
Doc: https://docs.aws.amazon.com/autoscaling/ec2/APIReference/API_RecordLifecycleActionHeartbeat.html
"""
@spec record_lifecycle_action_heartbeat(
auto_scaling_group_name :: binary,
lifecycle_hook_name :: binary
) :: ExAws.Operation.Query.t()
@spec record_lifecycle_action_heartbeat(
auto_scaling_group_name :: binary,
lifecycle_hook_name :: binary,
opts :: lifecycle_action_opts
) :: ExAws.Operation.Query.t()
def record_lifecycle_action_heartbeat(auto_scaling_group_name, lifecycle_hook_name, opts \\ []) do
[
{"AutoScalingGroupName", auto_scaling_group_name},
{"LifecycleHookName", lifecycle_hook_name}
| opts
]
|> build_request(:record_lifecycle_action_heartbeat)
end
defp build_request(opts, action) do
opts
|> Enum.flat_map(&format_param/1)
|> request(action)
end
defp request(params, action) do
action_string = action |> Atom.to_string() |> Macro.camelize()
%ExAws.Operation.Query{
path: "/",
params:
params
|> filter_nil_params
|> Map.put("Action", action_string)
|> Map.put("Version", @version),
service: :autoscaling,
action: action,
parser: &ExAws.AutoScaling.Parsers.parse/2
}
end
defp format_param({:instance_ids, instance_ids}) do
instance_ids |> format(prefix: "InstanceIds.member")
end
defp format_param({key, parameters}) do
format([{key, parameters}])
end
end
|
lib/ex_aws/auto_scaling.ex
| 0.843847
| 0.710025
|
auto_scaling.ex
|
starcoder
|
defmodule Eecrit.AnimalUseReportTxs do
@moduledoc """
Flow:
[... {animal_id, procedure_id, count} ... ]
[... [animal_id, {procedure_id, count}, {procedure_id, count}...] ...
[... [animal, {procedure, count}, {procedure, count}...] ...
[... [animal-view, procedure-view, procedure-view...] ...]
(then sorted version of above)
Most of the work is done on data with a list-of-lists structure, where
the head of a sublist is about an animal and the tail is about procedures.
"""
alias Eecrit.Pile
alias Eecrit.OldAnimalSource
alias Eecrit.OldUseSource
alias Eecrit.OldProcedureSource
alias Eecrit.ViewModel
defmodule P do
defp two_level_transform(list_of_lists, hd_transform, tail_transform) do
for [a | p_list] <- list_of_lists,
do: [ hd_transform.(a) | Enum.map(p_list, tail_transform) ]
end
@doc """
[... {animal_id, procedure_id, count} ... ] # becomes:
[... [animal_id, {procedure_id, count}, {procedure_id, count}...] ...
"""
def create_list_of_lists(uses, ids_of_all_available_animals) do
starting_map = Map.new(ids_of_all_available_animals, & {&1, []})
add_to_intermediate_map = fn({animal_id, procedure_id, count}, intermediate) ->
current = Map.get(intermediate, animal_id)
%{intermediate | animal_id => [{procedure_id, count} | current]}
end
final_map = Enum.reduce(uses, starting_map, add_to_intermediate_map)
Enum.map(final_map, fn {animal_id, list} -> [animal_id | list] end)
end
@doc """
[... [animal_id, {procedure_id, count}, {procedure_id, count}...] ...
[... [animal, {procedure, count}, {procedure, count}...] ...
"""
def convert_ids_to_models(list_of_lists, animals, procedures) do
procedures_by_id = Pile.index(procedures, :id)
animals_by_id = Pile.index(animals, :id)
modelize_animal = fn animal_id -> Map.get(animals_by_id, animal_id) end
modelize_procedure = fn {p_id, count} ->
{ Map.get(procedures_by_id, p_id), count }
end
two_level_transform(list_of_lists, modelize_animal, modelize_procedure)
end
@doc """
[... [animal, {procedure, count}, {procedure, count}...] ...
[... [animal-view, procedure-view, procedure-view...] ...]
"""
def convert_models_to_model_views(list_of_lists) do
procedure_view = fn {procedure, count} ->
ViewModel.procedure(procedure, use_count: count)
end
two_level_transform(list_of_lists, &ViewModel.animal/1, procedure_view)
end
def two_level_sort(list_of_lists) do
list_of_lists
|> Enum.map(fn [a | plist] -> [a | Pile.sort_by_name_key(plist)] end)
|> Pile.sort_human_alphabetically(fn list -> hd(list).name end)
end
def sum_counts_of_duplicate_animal_procedure_pairs(uses) do
grouped_counts = Enum.group_by(
uses,
fn {animal_id, procedure_id, _} -> {animal_id, procedure_id} end,
fn {_, _, use_count} -> use_count end)
for { {animal_id, procedure_id}, counts} <- grouped_counts do
{animal_id, procedure_id, Enum.sum(counts)}
end
end
def view_model(uses, animals, procedures) do
animal_ids = Enum.map(animals, & &1.id)
uses
|> sum_counts_of_duplicate_animal_procedure_pairs
|> create_list_of_lists(animal_ids)
|> convert_ids_to_models(animals, procedures)
|> convert_models_to_model_views
|> two_level_sort
end
def procedures_from_use_tuples(uses) do
ids = (for {_, p_id, _} <- uses, do: p_id) |> Enum.uniq
OldProcedureSource.all(with_ids: ids)
end
def gratuitous_parallelism(date_range),
do: Task.async(fn -> OldAnimalSource.all(ever_in_service_during: date_range) end)
end
### PUBLIC
def run(date_range) do
animals_task = P.gratuitous_parallelism(date_range)
uses = OldUseSource.use_counts(date_range)
procedures = P.procedures_from_use_tuples(uses)
uses |> P.view_model(Task.await(animals_task), procedures)
end
end
|
web/txs/animal_use_report_txs.ex
| 0.627381
| 0.410195
|
animal_use_report_txs.ex
|
starcoder
|
Code.require_file("../util/util.ex", __DIR__)
Code.require_file("../day02/el.ex", __DIR__)
defmodule Day05 do
def parameter_mode_immediate(index) do
fn
(%{memory: memory, pc: pointer}, :get) ->
:array.get(pointer + index, memory)
end
end
def fetch_instruction(%{memory: memory, pc: pointer} = program) do
{program, :array.get(pointer, memory)}
end
def decode_instruction_length(opcode) do
cond do
opcode in [1, 2, 7, 8] -> 4
opcode in [5, 6] -> 3
opcode in [3, 4] -> 2
opcode in [99] -> 1
end
end
def decode_opcode({%{} = program, instruction}, decode_instruction_length) do
opcode = rem(instruction, 100)
{program, {instruction, opcode, decode_instruction_length.(opcode)} }
end
def decode_parameter_mode(mode, index) do
case mode do
0 -> Day02.parameter_mode_position(index)
1 -> parameter_mode_immediate(index)
end
end
def decode_instruction_parameters(params_count, mode, parameter_mode) do
Integer.digits(mode)
|> Enum.reverse
|> Stream.concat(Stream.repeatedly(fn -> 0 end))
|> Enum.zip(1..params_count)
|> Enum.map(fn {mode, index} -> parameter_mode.(mode, index) end)
end
def decode_parameters({%{} = program, {instruction, opcode, opcode_length}}, parameter_mode) do
mode = div(instruction, 100)
{program, {opcode, decode_instruction_parameters(opcode_length-1, mode, parameter_mode)}}
end
def execute_instruction_a({%{memory: _, pc: pointer, io: %{input: input, output: output} = io} = program, instruction}) do
case instruction do
{3, [a | _]} ->
{invalue, newio} = input.(io)
%{program |
memory: a.(program, {:set, invalue}),
io: newio,
pc: pointer + 2
}
{4, [a | _]} ->
%{program |
io: output.(io, a.(program, :get)),
pc: pointer + 2
}
inst -> Day02.execute_instruction({program, inst})
end
end
def execute_instruction_b({%{memory: _, pc: pointer} = program, instruction}) do
case instruction do
{5, [a, b | _]} -> %{program |
pc: (if a.(program, :get) != 0, do: b.(program, :get), else: pointer + 3)
}
{6, [a, b | _]} -> %{program |
pc: (if a.(program, :get) == 0, do: b.(program, :get), else: pointer + 3)
}
{7, [a, b, c]} -> %{program |
memory: c.(program, {:set, (if a.(program, :get) < b.(program, :get), do: 1, else: 0)}),
pc: pointer + 4
}
{8, [a, b, c]} -> %{program |
memory: c.(program, {:set, (if a.(program, :get) == b.(program, :get), do: 1, else: 0)}),
pc: pointer + 4
}
inst -> Day05.execute_instruction_a({program, inst})
end
end
def run_program(program) do
case program do
%{pc: nil} -> program
%{memory: _, pc: _, io: _} ->
program
|> fetch_instruction
|> decode_opcode(&decode_instruction_length/1)
|> decode_parameters(&decode_parameter_mode/2)
|> execute_instruction_b
|> run_program
%{memory: memory, io: io} ->
run_program(%{memory: memory, io: io, pc: 0})
end
end
end
|
day05/el.ex
| 0.507324
| 0.471892
|
el.ex
|
starcoder
|
defmodule ShopDeed.DecodeError do
@type t :: %__MODULE__{message: String.t()}
defexception [:message]
def message(%{message: message}), do: message
end
defmodule ShopDeed.Decoder do
use Bitwise
alias ShopDeed.{Card, Constants, Deck, DecodeError, Hero}
@spec decode(String.t()) :: {:error, ShopDeed.DecodeError.t()} | {:ok, ShopDeed.Deck.t()}
def decode(deck_string) do
with {:ok, bytes} <- validate_prefix(Constants.prefix(), deck_string),
{:ok, decoded_bytes} <- decode_bytes(bytes),
{version, checksum, hero_count, card_bytes, name} <- split_bytes(decoded_bytes),
:ok <- validate_version(Constants.version(), version),
:ok <- validate_checksum(checksum, card_bytes) do
{heroes, left_over_bytes} = read_heroes(card_bytes, hero_count)
cards = read_cards(left_over_bytes)
{:ok, %Deck{cards: cards, heroes: heroes, name: name}}
else
{:error, message} ->
{:error, %DecodeError{message: message}}
end
end
defp validate_prefix(expected_prefix, <<prefix::bytes-size(3)>> <> rest)
when prefix == expected_prefix do
{:ok, rest}
end
defp validate_prefix(expected_prefix, _bytes) do
{:error, "Must start with prefix '#{expected_prefix}'"}
end
defp decode_bytes(bytes) do
decoded =
bytes
|> String.replace("-", "/")
|> String.replace("_", "=")
|> Base.decode64()
case decoded do
:error -> {:error, "Unable to base64 decode string: #{bytes}"}
ok -> ok
end
end
defp split_bytes(
<<version::4, _::1, hero_count::3, checksum::integer, name_length::integer>> <> rest
) do
card_bytes_length = byte_size(rest) - name_length
<<card_bytes::bytes-size(card_bytes_length)>> <> name_bytes = rest
{version, checksum, hero_count, card_bytes, name_bytes}
end
defp validate_version(expected_version, version) when expected_version == version, do: :ok
defp validate_version(_expected, version), do: {:error, "Version must be equal to #{version}"}
defp validate_checksum(checksum, card_bytes) do
calculated_checksum = card_bytes |> :binary.bin_to_list() |> Enum.sum() &&& 0xFF
case calculated_checksum == checksum do
false -> {:error, "Checksum mismatch"}
true -> :ok
end
end
defp read_heroes(bytes, count), do: read_heroes(bytes, count, 0, [])
defp read_heroes(bytes, 0, _carry, cards), do: {cards, bytes}
defp read_heroes(bytes, count, carry, heroes) do
{%Card{count: turn, id: id}, rest} = read_card(bytes, carry)
read_heroes(rest, count - 1, id, heroes ++ [%Hero{turn: turn, id: id}])
end
defp read_cards(bytes), do: read_cards(bytes, 0, [])
defp read_cards("", _carry, cards), do: cards
defp read_cards(bytes, carry, cards) do
{card, rest} = read_card(bytes, carry)
read_cards(rest, card.id, cards ++ [card])
end
defp read_card(<<count::2, _::6, _rest::bits>> = bytes, carry) do
{id_info, rest} = read_encoded_32(bytes, 5)
{%Card{
id: id_info + carry,
count: count + 1
}, rest}
end
defp read_encoded_32(<<chunk::8, rest::bits>>, num_bits) do
chunk |> read_chunk(num_bits) |> read_encoded_32(rest, 7, num_bits)
end
defp read_encoded_32({false, result}, rest, num_bits, _shift) when num_bits != 0 do
{result, rest}
end
defp read_encoded_32({_continue, result}, <<chunk::8, rest::bits>>, num_bits, shift) do
chunk |> read_chunk(num_bits, shift, result) |> read_encoded_32(rest, 7, shift + 7)
end
# Reads num_bits from bytes into out_bits, offset by the shift.
defp read_chunk(bytes, num_bits, shift \\ 0, out_bits \\ 0) do
continue_bit = 1 <<< num_bits
# Wipe out all bits that don't concern us
new_bits = bytes &&& continue_bit - 1
# Prepend the newly read bits
out_bits = out_bits ||| new_bits <<< shift
continue = (bytes &&& continue_bit) != 0
{continue, out_bits}
end
end
|
lib/decoder.ex
| 0.71413
| 0.492005
|
decoder.ex
|
starcoder
|
defmodule AWS.KMS do
@moduledoc """
AWS Key Management Service
AWS Key Management Service (AWS KMS) is an encryption and key management
web service. This guide describes the AWS KMS operations that you can call
programmatically. For general information about AWS KMS, see the [ *AWS Key
Management Service Developer Guide*
](http://docs.aws.amazon.com/kms/latest/developerguide/).
<note> AWS provides SDKs that consist of libraries and sample code for
various programming languages and platforms (Java, Ruby, .Net, macOS,
Android, etc.). The SDKs provide a convenient way to create programmatic
access to AWS KMS and other AWS services. For example, the SDKs take care
of tasks such as signing requests (see below), managing errors, and
retrying requests automatically. For more information about the AWS SDKs,
including how to download and install them, see [Tools for Amazon Web
Services](http://aws.amazon.com/tools/).
</note> We recommend that you use the AWS SDKs to make programmatic API
calls to AWS KMS.
Clients must support TLS (Transport Layer Security) 1.0. We recommend TLS
1.2. Clients must also support cipher suites with Perfect Forward Secrecy
(PFS) such as Ephemeral Diffie-Hellman (DHE) or Elliptic Curve Ephemeral
Diffie-Hellman (ECDHE). Most modern systems such as Java 7 and later
support these modes.
**Signing Requests**
Requests must be signed by using an access key ID and a secret access key.
We strongly recommend that you *do not* use your AWS account (root) access
key ID and secret key for everyday work with AWS KMS. Instead, use the
access key ID and secret access key for an IAM user, or you can use the AWS
Security Token Service to generate temporary security credentials that you
can use to sign requests.
All AWS KMS operations require [Signature Version
4](http://docs.aws.amazon.com/general/latest/gr/signature-version-4.html).
**Logging API Requests**
AWS KMS supports AWS CloudTrail, a service that logs AWS API calls and
related events for your AWS account and delivers them to an Amazon S3
bucket that you specify. By using the information collected by CloudTrail,
you can determine what requests were made to AWS KMS, who made the request,
when it was made, and so on. To learn more about CloudTrail, including how
to turn it on and find your log files, see the [AWS CloudTrail User
Guide](http://docs.aws.amazon.com/awscloudtrail/latest/userguide/).
**Additional Resources**
For more information about credentials and request signing, see the
following:
<ul> <li> [AWS Security
Credentials](http://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html)
- This topic provides general information about the of credentials used for
accessing AWS.
</li> <li> [Temporary Security
Credentials](http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp.html)
- This section of the *IAM User Guide* describes how to create and use
temporary security credentials.
</li> <li> [Signature Version 4 Signing
Process](http://docs.aws.amazon.com/general/latest/gr/signature-version-4.html)
- This set of topics walks you through the process of signing a request
using an access key ID and a secret access key.
</li> </ul> **Commonly Used APIs**
Of the APIs discussed in this guide, the following will prove the most
useful for most applications. You will likely perform actions other than
these, such as creating keys and assigning policies, by using the console.
<ul> <li> `Encrypt`
</li> <li> `Decrypt`
</li> <li> `GenerateDataKey`
</li> <li> `GenerateDataKeyWithoutPlaintext`
</li> </ul>
"""
@doc """
Cancels the deletion of a customer master key (CMK). When this operation is
successful, the CMK is set to the `Disabled` state. To enable a CMK, use
`EnableKey`. You cannot perform this operation on a CMK in a different AWS
account.
For more information about scheduling and canceling deletion of a CMK, see
[Deleting Customer Master
Keys](http://docs.aws.amazon.com/kms/latest/developerguide/deleting-keys.html)
in the *AWS Key Management Service Developer Guide*.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def cancel_key_deletion(client, input, options \\ []) do
request(client, "CancelKeyDeletion", input, options)
end
@doc """
Connects or reconnects a [custom key
store](http://docs.aws.amazon.com/kms/latest/developerguide/key-store-overview.html)
to its associated AWS CloudHSM cluster.
The custom key store must be connected before you can create customer
master keys (CMKs) in the key store or use the CMKs it contains. You can
disconnect and reconnect a custom key store at any time.
To connect a custom key store, its associated AWS CloudHSM cluster must
have at least one active HSM. To get the number of active HSMs in a
cluster, use the
[DescribeClusters](http://docs.aws.amazon.com/cloudhsm/latest/APIReference/API_DescribeClusters)
operation. To add HSMs to the cluster, use the
[CreateHsm](http://docs.aws.amazon.com/cloudhsm/latest/APIReference/API_CreateHsm)
operation.
The connection process can take an extended amount of time to complete; up
to 20 minutes. This operation starts the connection process, but it does
not wait for it to complete. When it succeeds, this operation quickly
returns an HTTP 200 response and a JSON object with no properties. However,
this response does not indicate that the custom key store is connected. To
get the connection state of the custom key store, use the
`DescribeCustomKeyStores` operation.
During the connection process, AWS KMS finds the AWS CloudHSM cluster that
is associated with the custom key store, creates the connection
infrastructure, connects to the cluster, logs into the AWS CloudHSM client
as the [ `kmsuser` crypto
user](http://docs.aws.amazon.com/kms/latest/developerguide/key-store-concepts.html#concept-kmsuser)
(CU), and rotates its password.
The `ConnectCustomKeyStore` operation might fail for various reasons. To
find the reason, use the `DescribeCustomKeyStores` operation and see the
`ConnectionErrorCode` in the response. For help interpreting the
`ConnectionErrorCode`, see `CustomKeyStoresListEntry`.
To fix the failure, use the `DisconnectCustomKeyStore` operation to
disconnect the custom key store, correct the error, use the
`UpdateCustomKeyStore` operation if necessary, and then use
`ConnectCustomKeyStore` again.
If you are having trouble connecting or disconnecting a custom key store,
see [Troubleshooting a Custom Key
Store](http://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html)
in the *AWS Key Management Service Developer Guide*.
"""
def connect_custom_key_store(client, input, options \\ []) do
request(client, "ConnectCustomKeyStore", input, options)
end
@doc """
Creates a display name for a customer master key (CMK). You can use an
alias to identify a CMK in selected operations, such as `Encrypt` and
`GenerateDataKey`.
Each CMK can have multiple aliases, but each alias points to only one CMK.
The alias name must be unique in the AWS account and region. To simplify
code that runs in multiple regions, use the same alias name, but point it
to a different CMK in each region.
Because an alias is not a property of a CMK, you can delete and change the
aliases of a CMK without affecting the CMK. Also, aliases do not appear in
the response from the `DescribeKey` operation. To get the aliases of all
CMKs, use the `ListAliases` operation.
An alias must start with the word `alias` followed by a forward slash
(`alias/`). The alias name can contain only alphanumeric characters,
forward slashes (/), underscores (_), and dashes (-). Alias names cannot
begin with `aws`; that alias name prefix is reserved by Amazon Web Services
(AWS).
The alias and the CMK it is mapped to must be in the same AWS account and
the same region. You cannot perform this operation on an alias in a
different AWS account.
To map an existing alias to a different CMK, call `UpdateAlias`.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def create_alias(client, input, options \\ []) do
request(client, "CreateAlias", input, options)
end
@doc """
Creates a [custom key
store](http://docs.aws.amazon.com/kms/latest/developerguide/key-store-overview.html)
that is associated with an [AWS CloudHSM
cluster](http://docs.aws.amazon.com/cloudhsm/latest/userguide/clusters.html)
that you own and manage.
This operation is part of the [Custom Key Store
feature](http://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive
integration of AWS KMS with the isolation and control of a single-tenant
key store.
When the operation completes successfully, it returns the ID of the new
custom key store. Before you can use your new custom key store, you need to
use the `ConnectCustomKeyStore` operation to connect the new key store to
its AWS CloudHSM cluster.
The `CreateCustomKeyStore` operation requires the following elements.
<ul> <li> You must specify an active AWS CloudHSM cluster in the same
account and AWS Region as the custom key store. You can use an existing
cluster or [create and activate a new AWS CloudHSM
cluster](http://docs.aws.amazon.com/cloudhsm/latest/userguide/create-cluster.html)
for the key store. AWS KMS does not require exclusive use of the cluster.
</li> <li> You must include the content of the *trust anchor certificate*
for the cluster. You created this certificate, and saved it in the
`customerCA.crt` file, when you [initialized the
cluster](http://docs.aws.amazon.com/cloudhsm/latest/userguide/initialize-cluster.html#sign-csr).
</li> <li> You must provide the password of the dedicated [ `kmsuser`
crypto
user](http://docs.aws.amazon.com/kms/latest/developerguide/key-store-concepts.html#concept-kmsuser)
(CU) account in the cluster.
Before you create the custom key store, use the
[createUser](http://docs.aws.amazon.com/cloudhsm/latest/userguide/cloudhsm_mgmt_util-createUser.html)
command in `cloudhsm_mgmt_util` to create [a crypto user (CU) named
`kmsuser`
](http://docs.aws.amazon.com/kms/latest/developerguide/key-store-concepts.html#concept-kmsuser)in
specified AWS CloudHSM cluster. AWS KMS uses the `kmsuser` CU account to
create and manage key material on your behalf. For instructions, see
[Create the kmsuser Crypto
User](http://docs.aws.amazon.com/kms/latest/developerguide/create-keystore.html#before-keystore)
in the *AWS Key Management Service Developer Guide*.
</li> </ul> The AWS CloudHSM cluster that you specify must meet the
following requirements.
<ul> <li> The cluster must be active and be in the same AWS account and
Region as the custom key store.
</li> <li> Each custom key store must be associated with a different AWS
CloudHSM cluster. The cluster cannot be associated with another custom key
store or have the same cluster certificate as a cluster that is associated
with another custom key store. To view the cluster certificate, use the AWS
CloudHSM
[DescribeClusters](http://docs.aws.amazon.com/cloudhsm/latest/APIReference/API_DescribeClusters.html)
operation. Clusters that share a backup history have the same cluster
certificate.
</li> <li> The cluster must be configured with subnets in at least two
different Availability Zones in the Region. Because AWS CloudHSM is not
supported in all Availability Zones, we recommend that the cluster have
subnets in all Availability Zones in the Region.
</li> <li> The cluster must contain at least two active HSMs, each in a
different Availability Zone.
</li> </ul> New custom key stores are not automatically connected. After
you create your custom key store, use the `ConnectCustomKeyStore` operation
to connect the custom key store to its associated AWS CloudHSM cluster.
Even if you are not going to use your custom key store immediately, you
might want to connect it to verify that all settings are correct and then
disconnect it until you are ready to use it.
If this operation succeeds, it returns the ID of the new custom key store.
For help with failures, see [Troubleshoot a Custom Key
Store](http://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html)
in the *AWS KMS Developer Guide*.
"""
def create_custom_key_store(client, input, options \\ []) do
request(client, "CreateCustomKeyStore", input, options)
end
@doc """
Adds a grant to a customer master key (CMK). The grant specifies who can
use the CMK and under what conditions. When setting permissions, grants are
an alternative to key policies.
To perform this operation on a CMK in a different AWS account, specify the
key ARN in the value of the `KeyId` parameter. For more information about
grants, see
[Grants](http://docs.aws.amazon.com/kms/latest/developerguide/grants.html)
in the *AWS Key Management Service Developer Guide*.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def create_grant(client, input, options \\ []) do
request(client, "CreateGrant", input, options)
end
@doc """
Creates a customer master key (CMK) in the caller's AWS account.
You can use a CMK to encrypt small amounts of data (4 KiB or less)
directly, but CMKs are more commonly used to encrypt data keys, which are
used to encrypt raw data. For more information about data keys and the
difference between CMKs and data keys, see the following:
<ul> <li> The `GenerateDataKey` operation
</li> <li> [AWS Key Management Service
Concepts](http://docs.aws.amazon.com/kms/latest/developerguide/concepts.html)
in the *AWS Key Management Service Developer Guide*
</li> </ul> If you plan to [import key
material](http://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html),
use the `Origin` parameter with a value of `EXTERNAL` to create a CMK with
no key material.
To create a CMK in a [custom key
store](http://docs.aws.amazon.com/kms/latest/developerguide/key-store-overview.html),
use `CustomKeyStoreId` parameter to specify the custom key store. You must
also use the `Origin` parameter with a value of `AWS_CLOUDHSM`. The AWS
CloudHSM cluster that is associated with the custom key store must have at
least two active HSMs, each in a different Availability Zone in the Region.
You cannot use this operation to create a CMK in a different AWS account.
"""
def create_key(client, input, options \\ []) do
request(client, "CreateKey", input, options)
end
@doc """
Decrypts ciphertext. Ciphertext is plaintext that has been previously
encrypted by using any of the following operations:
<ul> <li> `GenerateDataKey`
</li> <li> `GenerateDataKeyWithoutPlaintext`
</li> <li> `Encrypt`
</li> </ul> Note that if a caller has been granted access permissions to
all keys (through, for example, IAM user policies that grant `Decrypt`
permission on all resources), then ciphertext encrypted by using keys in
other accounts where the key grants access to the caller can be decrypted.
To remedy this, we recommend that you do not grant `Decrypt` access in an
IAM user policy. Instead grant `Decrypt` access only in key policies. If
you must grant `Decrypt` access in an IAM user policy, you should scope the
resource to specific keys or to specific trusted accounts.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def decrypt(client, input, options \\ []) do
request(client, "Decrypt", input, options)
end
@doc """
Deletes the specified alias. You cannot perform this operation on an alias
in a different AWS account.
Because an alias is not a property of a CMK, you can delete and change the
aliases of a CMK without affecting the CMK. Also, aliases do not appear in
the response from the `DescribeKey` operation. To get the aliases of all
CMKs, use the `ListAliases` operation.
Each CMK can have multiple aliases. To change the alias of a CMK, use
`DeleteAlias` to delete the current alias and `CreateAlias` to create a new
alias. To associate an existing alias with a different customer master key
(CMK), call `UpdateAlias`.
"""
def delete_alias(client, input, options \\ []) do
request(client, "DeleteAlias", input, options)
end
@doc """
Deletes a [custom key
store](http://docs.aws.amazon.com/kms/latest/developerguide/key-store-overview.html).
This operation does not delete the AWS CloudHSM cluster that is associated
with the custom key store, or affect any users or keys in the cluster.
The custom key store that you delete cannot contain any AWS KMS [customer
master keys
(CMKs)](http://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#master_keys).
Before deleting the key store, verify that you will never need to use any
of the CMKs in the key store for any cryptographic operations. Then, use
`ScheduleKeyDeletion` to delete the AWS KMS customer master keys (CMKs)
from the key store. When the scheduled waiting period expires, the
`ScheduleKeyDeletion` operation deletes the CMKs. Then it makes a best
effort to delete the key material from the associated cluster. However, you
might need to manually [delete the orphaned key
material](http://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html#fix-keystore-orphaned-key)
from the cluster and its backups.
After all CMKs are deleted from AWS KMS, use `DisconnectCustomKeyStore` to
disconnect the key store from AWS KMS. Then, you can delete the custom key
store.
Instead of deleting the custom key store, consider using
`DisconnectCustomKeyStore` to disconnect it from AWS KMS. While the key
store is disconnected, you cannot create or use the CMKs in the key store.
But, you do not need to delete CMKs and you can reconnect a disconnected
custom key store at any time.
If the operation succeeds, it returns a JSON object with no properties.
This operation is part of the [Custom Key Store
feature](http://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive
integration of AWS KMS with the isolation and control of a single-tenant
key store.
"""
def delete_custom_key_store(client, input, options \\ []) do
request(client, "DeleteCustomKeyStore", input, options)
end
@doc """
Deletes key material that you previously imported. This operation makes the
specified customer master key (CMK) unusable. For more information about
importing key material into AWS KMS, see [Importing Key
Material](http://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *AWS Key Management Service Developer Guide*. You cannot perform
this operation on a CMK in a different AWS account.
When the specified CMK is in the `PendingDeletion` state, this operation
does not change the CMK's state. Otherwise, it changes the CMK's state to
`PendingImport`.
After you delete key material, you can use `ImportKeyMaterial` to reimport
the same key material into the CMK.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def delete_imported_key_material(client, input, options \\ []) do
request(client, "DeleteImportedKeyMaterial", input, options)
end
@doc """
Gets information about [custom key
stores](http://docs.aws.amazon.com/kms/latest/developerguide/key-store-overview.html)
in the account and region.
This operation is part of the [Custom Key Store
feature](http://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive
integration of AWS KMS with the isolation and control of a single-tenant
key store.
By default, this operation returns information about all custom key stores
in the account and region. To get only information about a particular
custom key store, use either the `CustomKeyStoreName` or `CustomKeyStoreId`
parameter (but not both).
To determine whether the custom key store is connected to its AWS CloudHSM
cluster, use the `ConnectionState` element in the response. If an attempt
to connect the custom key store failed, the `ConnectionState` value is
`FAILED` and the `ConnectionErrorCode` element in the response indicates
the cause of the failure. For help interpreting the `ConnectionErrorCode`,
see `CustomKeyStoresListEntry`.
Custom key stores have a `DISCONNECTED` connection state if the key store
has never been connected or you use the `DisconnectCustomKeyStore`
operation to disconnect it. If your custom key store state is `CONNECTED`
but you are having trouble using it, make sure that its associated AWS
CloudHSM cluster is active and contains the minimum number of HSMs required
for the operation, if any.
For help repairing your custom key store, see the [Troubleshooting Custom
Key
Stores](http://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore-html)
topic in the *AWS Key Management Service Developer Guide*.
"""
def describe_custom_key_stores(client, input, options \\ []) do
request(client, "DescribeCustomKeyStores", input, options)
end
@doc """
Provides detailed information about the specified customer master key
(CMK).
If you use `DescribeKey` on a predefined AWS alias, that is, an AWS alias
with no key ID, AWS KMS associates the alias with an [AWS managed
CMK](http://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#master_keys)
and returns its `KeyId` and `Arn` in the response.
To perform this operation on a CMK in a different AWS account, specify the
key ARN or alias ARN in the value of the KeyId parameter.
"""
def describe_key(client, input, options \\ []) do
request(client, "DescribeKey", input, options)
end
@doc """
Sets the state of a customer master key (CMK) to disabled, thereby
preventing its use for cryptographic operations. You cannot perform this
operation on a CMK in a different AWS account.
For more information about how key state affects the use of a CMK, see [How
Key State Affects the Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def disable_key(client, input, options \\ []) do
request(client, "DisableKey", input, options)
end
@doc """
Disables [automatic rotation of the key
material](http://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html)
for the specified customer master key (CMK). You cannot perform this
operation on a CMK in a different AWS account.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def disable_key_rotation(client, input, options \\ []) do
request(client, "DisableKeyRotation", input, options)
end
@doc """
Disconnects the [custom key
store](http://docs.aws.amazon.com/kms/latest/developerguide/key-store-overview.html)
from its associated AWS CloudHSM cluster. While a custom key store is
disconnected, you can manage the custom key store and its customer master
keys (CMKs), but you cannot create or use CMKs in the custom key store. You
can reconnect the custom key store at any time.
<note> While a custom key store is disconnected, all attempts to create
customer master keys (CMKs) in the custom key store or to use existing CMKs
in cryptographic operations will fail. This action can prevent users from
storing and accessing sensitive data.
</note> <p/> To find the connection state of a custom key store, use the
`DescribeCustomKeyStores` operation. To reconnect a custom key store, use
the `ConnectCustomKeyStore` operation.
If the operation succeeds, it returns a JSON object with no properties.
This operation is part of the [Custom Key Store
feature](http://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive
integration of AWS KMS with the isolation and control of a single-tenant
key store.
"""
def disconnect_custom_key_store(client, input, options \\ []) do
request(client, "DisconnectCustomKeyStore", input, options)
end
@doc """
Sets the key state of a customer master key (CMK) to enabled. This allows
you to use the CMK for cryptographic operations. You cannot perform this
operation on a CMK in a different AWS account.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def enable_key(client, input, options \\ []) do
request(client, "EnableKey", input, options)
end
@doc """
Enables [automatic rotation of the key
material](http://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html)
for the specified customer master key (CMK). You cannot perform this
operation on a CMK in a different AWS account.
You cannot enable automatic rotation of CMKs with imported key material or
CMKs in a [custom key
store](http://docs.aws.amazon.com/kms/latest/developerguide/key-store-overview.html).
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def enable_key_rotation(client, input, options \\ []) do
request(client, "EnableKeyRotation", input, options)
end
@doc """
Encrypts plaintext into ciphertext by using a customer master key (CMK).
The `Encrypt` operation has two primary use cases:
<ul> <li> You can encrypt up to 4 kilobytes (4096 bytes) of arbitrary data
such as an RSA key, a database password, or other sensitive information.
</li> <li> To move encrypted data from one AWS region to another, you can
use this operation to encrypt in the new region the plaintext data key that
was used to encrypt the data in the original region. This provides you with
an encrypted copy of the data key that can be decrypted in the new region
and used there to decrypt the encrypted data.
</li> </ul> To perform this operation on a CMK in a different AWS account,
specify the key ARN or alias ARN in the value of the KeyId parameter.
Unless you are moving encrypted data from one region to another, you don't
use this operation to encrypt a generated data key within a region. To get
data keys that are already encrypted, call the `GenerateDataKey` or
`GenerateDataKeyWithoutPlaintext` operation. Data keys don't need to be
encrypted again by calling `Encrypt`.
To encrypt data locally in your application, use the `GenerateDataKey`
operation to return a plaintext data encryption key and a copy of the key
encrypted under the CMK of your choosing.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def encrypt(client, input, options \\ []) do
request(client, "Encrypt", input, options)
end
@doc """
Returns a data encryption key that you can use in your application to
encrypt data locally.
You must specify the customer master key (CMK) under which to generate the
data key. You must also specify the length of the data key using either the
`KeySpec` or `NumberOfBytes` field. You must specify one field or the
other, but not both. For common key lengths (128-bit and 256-bit symmetric
keys), we recommend that you use `KeySpec`. To perform this operation on a
CMK in a different AWS account, specify the key ARN or alias ARN in the
value of the KeyId parameter.
This operation returns a plaintext copy of the data key in the `Plaintext`
field of the response, and an encrypted copy of the data key in the
`CiphertextBlob` field. The data key is encrypted under the CMK specified
in the `KeyId` field of the request.
We recommend that you use the following pattern to encrypt data locally in
your application:
<ol> <li> Use this operation (`GenerateDataKey`) to get a data encryption
key.
</li> <li> Use the plaintext data encryption key (returned in the
`Plaintext` field of the response) to encrypt data locally, then erase the
plaintext data key from memory.
</li> <li> Store the encrypted data key (returned in the `CiphertextBlob`
field of the response) alongside the locally encrypted data.
</li> </ol> To decrypt data locally:
<ol> <li> Use the `Decrypt` operation to decrypt the encrypted data key
into a plaintext copy of the data key.
</li> <li> Use the plaintext data key to decrypt data locally, then erase
the plaintext data key from memory.
</li> </ol> To return only an encrypted copy of the data key, use
`GenerateDataKeyWithoutPlaintext`. To return a random byte string that is
cryptographically secure, use `GenerateRandom`.
If you use the optional `EncryptionContext` field, you must store at least
enough information to be able to reconstruct the full encryption context
when you later send the ciphertext to the `Decrypt` operation. It is a good
practice to choose an encryption context that you can reconstruct on the
fly to better secure the ciphertext. For more information, see [Encryption
Context](http://docs.aws.amazon.com/kms/latest/developerguide/encryption-context.html)
in the *AWS Key Management Service Developer Guide*.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def generate_data_key(client, input, options \\ []) do
request(client, "GenerateDataKey", input, options)
end
@doc """
Returns a data encryption key encrypted under a customer master key (CMK).
This operation is identical to `GenerateDataKey` but returns only the
encrypted copy of the data key.
To perform this operation on a CMK in a different AWS account, specify the
key ARN or alias ARN in the value of the KeyId parameter.
This operation is useful in a system that has multiple components with
different degrees of trust. For example, consider a system that stores
encrypted data in containers. Each container stores the encrypted data and
an encrypted copy of the data key. One component of the system, called the
*control plane*, creates new containers. When it creates a new container,
it uses this operation (`GenerateDataKeyWithoutPlaintext`) to get an
encrypted data key and then stores it in the container. Later, a different
component of the system, called the *data plane*, puts encrypted data into
the containers. To do this, it passes the encrypted data key to the
`Decrypt` operation, then uses the returned plaintext data key to encrypt
data, and finally stores the encrypted data in the container. In this
system, the control plane never sees the plaintext data key.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def generate_data_key_without_plaintext(client, input, options \\ []) do
request(client, "GenerateDataKeyWithoutPlaintext", input, options)
end
@doc """
Returns a random byte string that is cryptographically secure.
By default, the random byte string is generated in AWS KMS. To generate the
byte string in the AWS CloudHSM cluster that is associated with a [custom
key
store](http://docs.aws.amazon.com/kms/latest/developerguide/key-store-overview.html),
specify the custom key store ID.
For more information about entropy and random number generation, see the
[AWS Key Management Service Cryptographic
Details](https://d0.awsstatic.com/whitepapers/KMS-Cryptographic-Details.pdf)
whitepaper.
"""
def generate_random(client, input, options \\ []) do
request(client, "GenerateRandom", input, options)
end
@doc """
Gets a key policy attached to the specified customer master key (CMK). You
cannot perform this operation on a CMK in a different AWS account.
"""
def get_key_policy(client, input, options \\ []) do
request(client, "GetKeyPolicy", input, options)
end
@doc """
Gets a Boolean value that indicates whether [automatic rotation of the key
material](http://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html)
is enabled for the specified customer master key (CMK).
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
<ul> <li> Disabled: The key rotation status does not change when you
disable a CMK. However, while the CMK is disabled, AWS KMS does not rotate
the backing key.
</li> <li> Pending deletion: While a CMK is pending deletion, its key
rotation status is `false` and AWS KMS does not rotate the backing key. If
you cancel the deletion, the original key rotation status is restored.
</li> </ul> To perform this operation on a CMK in a different AWS account,
specify the key ARN in the value of the `KeyId` parameter.
"""
def get_key_rotation_status(client, input, options \\ []) do
request(client, "GetKeyRotationStatus", input, options)
end
@doc """
Returns the items you need in order to import key material into AWS KMS
from your existing key management infrastructure. For more information
about importing key material into AWS KMS, see [Importing Key
Material](http://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *AWS Key Management Service Developer Guide*.
You must specify the key ID of the customer master key (CMK) into which you
will import key material. This CMK's `Origin` must be `EXTERNAL`. You must
also specify the wrapping algorithm and type of wrapping key (public key)
that you will use to encrypt the key material. You cannot perform this
operation on a CMK in a different AWS account.
This operation returns a public key and an import token. Use the public key
to encrypt the key material. Store the import token to send with a
subsequent `ImportKeyMaterial` request. The public key and import token
from the same response must be used together. These items are valid for 24
hours. When they expire, they cannot be used for a subsequent
`ImportKeyMaterial` request. To get new ones, send another
`GetParametersForImport` request.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def get_parameters_for_import(client, input, options \\ []) do
request(client, "GetParametersForImport", input, options)
end
@doc """
Imports key material into an existing AWS KMS customer master key (CMK)
that was created without key material. You cannot perform this operation on
a CMK in a different AWS account. For more information about creating CMKs
with no key material and then importing key material, see [Importing Key
Material](http://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *AWS Key Management Service Developer Guide*.
Before using this operation, call `GetParametersForImport`. Its response
includes a public key and an import token. Use the public key to encrypt
the key material. Then, submit the import token from the same
`GetParametersForImport` response.
When calling this operation, you must specify the following values:
<ul> <li> The key ID or key ARN of a CMK with no key material. Its `Origin`
must be `EXTERNAL`.
To create a CMK with no key material, call `CreateKey` and set the value of
its `Origin` parameter to `EXTERNAL`. To get the `Origin` of a CMK, call
`DescribeKey`.)
</li> <li> The encrypted key material. To get the public key to encrypt the
key material, call `GetParametersForImport`.
</li> <li> The import token that `GetParametersForImport` returned. This
token and the public key used to encrypt the key material must have come
from the same response.
</li> <li> Whether the key material expires and if so, when. If you set an
expiration date, you can change it only by reimporting the same key
material and specifying a new expiration date. If the key material expires,
AWS KMS deletes the key material and the CMK becomes unusable. To use the
CMK again, you must reimport the same key material.
</li> </ul> When this operation is successful, the key state of the CMK
changes from `PendingImport` to `Enabled`, and you can use the CMK. After
you successfully import key material into a CMK, you can reimport the same
key material into that CMK, but you cannot import different key material.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def import_key_material(client, input, options \\ []) do
request(client, "ImportKeyMaterial", input, options)
end
@doc """
Gets a list of all aliases in the caller's AWS account and region. You
cannot list aliases in other accounts. For more information about aliases,
see `CreateAlias`.
By default, the `ListAliases` command returns all aliases in the account
and region. To get only the aliases that point to a particular customer
master key (CMK), use the `KeyId` parameter.
The `ListAliases` response might include several aliases have no
`TargetKeyId` field. These are predefined aliases that AWS has created but
has not yet associated with a CMK. Aliases that AWS creates in your
account, including predefined aliases, do not count against your [AWS KMS
aliases
limit](http://docs.aws.amazon.com/kms/latest/developerguide/limits.html#aliases-limit).
"""
def list_aliases(client, input, options \\ []) do
request(client, "ListAliases", input, options)
end
@doc """
Gets a list of all grants for the specified customer master key (CMK).
To perform this operation on a CMK in a different AWS account, specify the
key ARN in the value of the `KeyId` parameter.
"""
def list_grants(client, input, options \\ []) do
request(client, "ListGrants", input, options)
end
@doc """
Gets the names of the key policies that are attached to a customer master
key (CMK). This operation is designed to get policy names that you can use
in a `GetKeyPolicy` operation. However, the only valid policy name is
`default`. You cannot perform this operation on a CMK in a different AWS
account.
"""
def list_key_policies(client, input, options \\ []) do
request(client, "ListKeyPolicies", input, options)
end
@doc """
Gets a list of all customer master keys (CMKs) in the caller's AWS account
and region.
"""
def list_keys(client, input, options \\ []) do
request(client, "ListKeys", input, options)
end
@doc """
Returns a list of all tags for the specified customer master key (CMK).
You cannot perform this operation on a CMK in a different AWS account.
"""
def list_resource_tags(client, input, options \\ []) do
request(client, "ListResourceTags", input, options)
end
@doc """
Returns a list of all grants for which the grant's `RetiringPrincipal`
matches the one specified.
A typical use is to list all grants that you are able to retire. To retire
a grant, use `RetireGrant`.
"""
def list_retirable_grants(client, input, options \\ []) do
request(client, "ListRetirableGrants", input, options)
end
@doc """
Attaches a key policy to the specified customer master key (CMK). You
cannot perform this operation on a CMK in a different AWS account.
For more information about key policies, see [Key
Policies](http://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html)
in the *AWS Key Management Service Developer Guide*.
"""
def put_key_policy(client, input, options \\ []) do
request(client, "PutKeyPolicy", input, options)
end
@doc """
Encrypts data on the server side with a new customer master key (CMK)
without exposing the plaintext of the data on the client side. The data is
first decrypted and then reencrypted. You can also use this operation to
change the encryption context of a ciphertext.
You can reencrypt data using CMKs in different AWS accounts.
Unlike other operations, `ReEncrypt` is authorized twice, once as
`ReEncryptFrom` on the source CMK and once as `ReEncryptTo` on the
destination CMK. We recommend that you include the `"kms:ReEncrypt*"`
permission in your [key
policies](http://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html)
to permit reencryption from or to the CMK. This permission is automatically
included in the key policy when you create a CMK through the console, but
you must include it manually when you create a CMK programmatically or when
you set a key policy with the `PutKeyPolicy` operation.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def re_encrypt(client, input, options \\ []) do
request(client, "ReEncrypt", input, options)
end
@doc """
Retires a grant. To clean up, you can retire a grant when you're done using
it. You should revoke a grant when you intend to actively deny operations
that depend on it. The following are permitted to call this API:
<ul> <li> The AWS account (root user) under which the grant was created
</li> <li> The `RetiringPrincipal`, if present in the grant
</li> <li> The `GranteePrincipal`, if `RetireGrant` is an operation
specified in the grant
</li> </ul> You must identify the grant to retire by its grant token or by
a combination of the grant ID and the Amazon Resource Name (ARN) of the
customer master key (CMK). A grant token is a unique variable-length
base64-encoded string. A grant ID is a 64 character unique identifier of a
grant. The `CreateGrant` operation returns both.
"""
def retire_grant(client, input, options \\ []) do
request(client, "RetireGrant", input, options)
end
@doc """
Revokes the specified grant for the specified customer master key (CMK).
You can revoke a grant to actively deny operations that depend on it.
To perform this operation on a CMK in a different AWS account, specify the
key ARN in the value of the `KeyId` parameter.
"""
def revoke_grant(client, input, options \\ []) do
request(client, "RevokeGrant", input, options)
end
@doc """
Schedules the deletion of a customer master key (CMK). You may provide a
waiting period, specified in days, before deletion occurs. If you do not
provide a waiting period, the default period of 30 days is used. When this
operation is successful, the key state of the CMK changes to
`PendingDeletion`. Before the waiting period ends, you can use
`CancelKeyDeletion` to cancel the deletion of the CMK. After the waiting
period ends, AWS KMS deletes the CMK and all AWS KMS data associated with
it, including all aliases that refer to it.
<important> Deleting a CMK is a destructive and potentially dangerous
operation. When a CMK is deleted, all data that was encrypted under the CMK
is unrecoverable. To prevent the use of a CMK without deleting it, use
`DisableKey`.
</important> If you schedule deletion of a CMK from a [custom key
store](http://docs.aws.amazon.com/kms/latest/developerguide/key-store-overview.html),
when the waiting period expires, `ScheduleKeyDeletion` deletes the CMK from
AWS KMS. Then AWS KMS makes a best effort to delete the key material from
the associated AWS CloudHSM cluster. However, you might need to manually
[delete the orphaned key
material](http://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html#fix-keystore-orphaned-key)
from the cluster and its backups.
You cannot perform this operation on a CMK in a different AWS account.
For more information about scheduling a CMK for deletion, see [Deleting
Customer Master
Keys](http://docs.aws.amazon.com/kms/latest/developerguide/deleting-keys.html)
in the *AWS Key Management Service Developer Guide*.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def schedule_key_deletion(client, input, options \\ []) do
request(client, "ScheduleKeyDeletion", input, options)
end
@doc """
Adds or edits tags for a customer master key (CMK). You cannot perform this
operation on a CMK in a different AWS account.
Each tag consists of a tag key and a tag value. Tag keys and tag values are
both required, but tag values can be empty (null) strings.
You can only use a tag key once for each CMK. If you use the tag key again,
AWS KMS replaces the current tag value with the specified value.
For information about the rules that apply to tag keys and tag values, see
[User-Defined Tag
Restrictions](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/allocation-tag-restrictions.html)
in the *AWS Billing and Cost Management User Guide*.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes the specified tags from the specified customer master key (CMK).
You cannot perform this operation on a CMK in a different AWS account.
To remove a tag, specify the tag key. To change the tag value of an
existing tag key, use `TagResource`.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Associates an existing alias with a different customer master key (CMK).
Each CMK can have multiple aliases, but the aliases must be unique within
the account and region. You cannot perform this operation on an alias in a
different AWS account.
This operation works only on existing aliases. To change the alias of a CMK
to a new value, use `CreateAlias` to create a new alias and `DeleteAlias`
to delete the old alias.
Because an alias is not a property of a CMK, you can create, update, and
delete the aliases of a CMK without affecting the CMK. Also, aliases do not
appear in the response from the `DescribeKey` operation. To get the aliases
of all CMKs in the account, use the `ListAliases` operation.
An alias name can contain only alphanumeric characters, forward slashes
(/), underscores (_), and dashes (-). An alias must start with the word
`alias` followed by a forward slash (`alias/`). The alias name can contain
only alphanumeric characters, forward slashes (/), underscores (_), and
dashes (-). Alias names cannot begin with `aws`; that alias name prefix is
reserved by Amazon Web Services (AWS).
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def update_alias(client, input, options \\ []) do
request(client, "UpdateAlias", input, options)
end
@doc """
Changes the properties of a custom key store. Use the `CustomKeyStoreId`
parameter to identify the custom key store you want to edit. Use the
remaining parameters to change the properties of the custom key store.
You can only update a custom key store that is disconnected. To disconnect
the custom key store, use `DisconnectCustomKeyStore`. To reconnect the
custom key store after the update completes, use `ConnectCustomKeyStore`.
To find the connection state of a custom key store, use the
`DescribeCustomKeyStores` operation.
Use the `NewCustomKeyStoreName` parameter to change the friendly name of
the custom key store to the value that you specify.
Use the `KeyStorePassword` parameter tell AWS KMS the current password of
the [ `kmsuser` crypto user
(CU)](http://docs.aws.amazon.com/kms/latest/developerguide/key-store-concepts.html#concept-kmsuser)
in the associated AWS CloudHSM cluster. You can use this parameter to fix
connection failures that occur when AWS KMS cannot log into the associated
cluster because the `kmsuser` password has changed. This value does not
change the password in the AWS CloudHSM cluster.
Use the `CloudHsmClusterId` parameter to associate the custom key store
with a related AWS CloudHSM cluster, that is, a cluster that shares a
backup history with the original cluster. You can use this parameter to
repair a custom key store if its AWS CloudHSM cluster becomes corrupted or
is deleted, or when you need to create or restore a cluster from a backup.
The cluster ID must identify a AWS CloudHSM cluster with the following
requirements.
<ul> <li> The cluster must be active and be in the same AWS account and
Region as the custom key store.
</li> <li> The cluster must have the same cluster certificate as the
original cluster. You cannot use this parameter to associate the custom key
store with an unrelated cluster. To view the cluster certificate, use the
AWS CloudHSM
[DescribeClusters](http://docs.aws.amazon.com/cloudhsm/latest/APIReference/API_DescribeClusters.html)
operation. Clusters that share a backup history have the same cluster
certificate.
</li> <li> The cluster must be configured with subnets in at least two
different Availability Zones in the Region. Because AWS CloudHSM is not
supported in all Availability Zones, we recommend that the cluster have
subnets in all Availability Zones in the Region.
</li> <li> The cluster must contain at least two active HSMs, each in a
different Availability Zone.
</li> </ul> If the operation succeeds, it returns a JSON object with no
properties.
This operation is part of the [Custom Key Store
feature](http://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive
integration of AWS KMS with the isolation and control of a single-tenant
key store.
"""
def update_custom_key_store(client, input, options \\ []) do
request(client, "UpdateCustomKeyStore", input, options)
end
@doc """
Updates the description of a customer master key (CMK). To see the
decription of a CMK, use `DescribeKey`.
You cannot perform this operation on a CMK in a different AWS account.
The result of this operation varies with the key state of the CMK. For
details, see [How Key State Affects Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def update_key_description(client, input, options \\ []) do
request(client, "UpdateKeyDescription", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "kms"}
host = get_host("kms", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "TrentService.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/kms.ex
| 0.877556
| 0.444505
|
kms.ex
|
starcoder
|
defmodule ExPool.Manager do
@moduledoc """
Module to start a pool, and check-in and check-out workers.
When a worker is available, it can be checked-out from the pool.
In case there are no available workers it will respond `{:waiting, state}`
and enqueue the pending request.
When a worker is checked-in with any pending requests, it will
respond with `{:check_out, {requester_identifier, state}}` and
remove from de pending queue the current request.
## Example:
```elixir
defmodule HardWorker do
def start_link(_), do: Agent.start_link(fn -> :ok end)
end
alias ExPool.Manager
state = Manager(worker_mod: HardWorker, size: 1)
# There is a worker available. It can be checked-out.
{:ok, {worker, state}} = Manager.check_out(state, :request_1)
# There are no workers available. The current request identified
# by :request_2 has to wait for available workers.
{:waiting, state} = Manager.check_out(state, :request_2)
# A worker is checked-in but there is a request pending. The
# worker is checked_out to be used by the pending request (:request_2).
{:check_out, {:request_2, state}} = Manager.check_in(state, worker)
# There are no pending requests. The worker is checked-in properly.
{:ok, state} = Manager.check_in(state, worker)
```
"""
alias ExPool.State
alias ExPool.Manager.Populator
alias ExPool.Manager.Info
alias ExPool.Manager.Joiner
alias ExPool.Manager.Requester
alias ExPool.Manager.DownHandler
@doc """
Create a new pool state with the given configuration.
(See State.new/1 for more info about configuration options)
"""
@spec new(config :: [Keyword]) :: State.t
def new(config),
do: State.new(config) |> prepopulate
defp prepopulate(state) do
{workers, state} = Populator.populate(state)
Enum.reduce workers, state, fn (worker, state) ->
{:ok, state} = check_in(state, worker)
state
end
end
@doc """
Gathers information about the current state of the pool.
## Format:
%{
workers: %{
free: <number_of_available_workers>,
in_use: <number_of_workers_in_use>,
total: <total_number_of_workers>
},
waiting: <number_of_processes_waiting_for_an_available_worker>
}
"""
@spec info(State.t) :: map
def info(state), do: Info.get(state)
@doc """
Check-out a worker from the pool.
It receives the state, and a term to identify the current check-out
request. In case there are no workers available, the same term will
be returned by check-in to identify the requester of the worker.
"""
@spec check_out(State.t, from :: any) :: {:ok, {pid, State.t}} | {:waiting, State.t}
def check_out(state, from),
do: Requester.request(state, from)
@doc """
Check-in a worker from the pool.
When returning a worker to the pool, there are 2 possible scenarios:
* There aren't any worker requests pending: The worker is stored in the
pool and responds with `{:ok, state}`.
* There is any worker request pending: The worker is not stored. Instead
the term identifying the request (`from`) is returned expecting the caller to yield
the resource to the requester. It responds with
`{:check_out, {from, worker, state}`.
"""
@spec check_in(State.t, pid) ::
{:ok, State.t} | {:check_out, {from :: any, worker :: pid, State.t}}
def check_in(state, worker) do
case Joiner.join(state, worker) do
{:dead_worker, state} -> handle_dead_worker(state)
{:check_out, {from, worker, state}} -> {:check_out, {from, worker, state}}
{:ok, state} -> {:ok, state}
end
end
@doc """
Handle a process down.
There are 2 types of monitored processes that can crash:
* worker - If the crashed process is a worker, a new one is started
and monitored
* client - If the crashed process is a client, the worker that the
process was using is returned to the pool
"""
@spec process_down(State.t, reference) :: any
def process_down(state, ref) do
case DownHandler.process_down(state, ref) do
{:dead_worker, state} -> handle_dead_worker(state)
{:check_in, {worker, state}} -> check_in(state, worker)
{:ok, state} -> {:ok, state}
end
end
defp handle_dead_worker(state) do
{worker, state} = Populator.add(state)
check_in(state, worker)
end
end
|
lib/ex_pool/manager.ex
| 0.789599
| 0.871748
|
manager.ex
|
starcoder
|
defmodule LibJudge.Filter do
@moduledoc """
A collection of filters to do common searches on rules.
Each filter returns a single-argument function designed to be
used with `Enum.filter/2`.
"""
alias LibJudge.Rule
alias LibJudge.Tokenizer
require Logger
@type filter :: (Tokenizer.rule() -> boolean)
@spec token_type(Tokenizer.token_type()) :: filter
def token_type(type) do
fn
{^type, _info} ->
true
_ ->
false
end
end
@spec rule_is(String.t()) :: filter
def rule_is(rule_str) do
{:ok, rule} = Rule.from_string(rule_str)
fn
{:rule, {_type, ^rule, _body, _examples}} ->
true
_ ->
false
end
rescue
_ -> fn _ -> false end
end
@spec rule_starts_with(String.t()) :: filter
def rule_starts_with(prefix) do
fn
{:rule, {_type, rule = %Rule{}, _body, _examples}} ->
case Rule.to_string(rule) do
{:ok, string} -> String.starts_with?(string, prefix)
_ -> false
end
_ ->
false
end
end
@spec rule_type(Rule.rule_type()) :: filter
def rule_type(type) do
fn
{:rule, {^type, %Rule{type: ^type}, _body, _examples}} -> true
_ -> false
end
end
@spec body_contains(String.t()) :: filter
def body_contains(text) do
fn
{:rule, {_type, _rule, body, _examples}} when is_binary(body) ->
String.contains?(body, text)
_ ->
false
end
end
@spec has_examples() :: filter
def has_examples do
fn
{:rule, {_type, _rule, _body, [_at_least_one | _example]}} -> true
_ -> false
end
end
@spec body_matches(Regex.t()) :: filter
def body_matches(regex) do
fn
{:rule, {_type, _rule, body, _examples}} when is_binary(body) -> Regex.match?(regex, body)
_ -> false
end
end
@spec rule_matches(Regex.t()) :: filter
def rule_matches(regex) do
fn
{:rule, {_type, rule, _body, _examples}} ->
try do
Regex.match?(regex, Rule.to_string!(rule))
rescue
_ -> fn _ -> false end
end
_ ->
false
end
end
@spec example_matches(Regex.t()) :: filter
def example_matches(regex) do
fn
{:rule, {_type, _rule, _body, examples}} when is_list(examples) ->
Enum.reduce(examples, false, fn
x, acc when is_binary(x) -> Regex.match?(regex, x) || acc
_, acc -> acc
end)
_ ->
false
end
end
@spec either(filter, filter) :: filter
def either(filter1, filter2) do
fn x ->
filter1.(x) or filter2.(x)
end
end
@spec any([filter]) :: filter
def any(filters) do
Enum.reduce(
filters,
&either/2
)
end
@spec both(filter, filter) :: filter
def both(filter1, filter2) do
fn x ->
filter1.(x) and filter2.(x)
end
end
@spec all([filter]) :: filter
def all(filters) do
Enum.reduce(
filters,
&both/2
)
end
end
|
lib/lib_judge/filter.ex
| 0.847195
| 0.487063
|
filter.ex
|
starcoder
|
defmodule Adventofcode.Day14DockingData do
use Adventofcode
alias __MODULE__.{Part1, Part2}
def part_1(input) do
input
|> parse
|> Part1.solve()
end
def part_2(input) do
input
|> parse
|> Part2.solve()
end
defmodule Part1 do
defstruct mask: String.duplicate("x", 36), mem: %{}
def solve(instructions) when is_list(instructions) do
instructions
|> Enum.reduce(%__MODULE__{}, &solve/2)
|> sum
end
def solve({:mask, mask}, state) do
%{state | mask: mask}
end
def solve({:mem, [address, value]}, %__MODULE__{mask: mask, mem: mem} = state) do
value = value |> apply_mask(mask) |> String.to_integer(2)
%{state | mem: Map.put(mem, address, value)}
end
def sum(%__MODULE__{mem: mem}) do
mem
|> Map.values()
|> Enum.sum()
end
def apply_mask(num, mask) do
[to_bitmask(num), mask]
|> Enum.map(&String.graphemes/1)
|> Enum.zip()
|> Enum.map(fn
{current, "X"} -> current
{_, new} -> new
end)
|> Enum.join("")
end
defp to_bitmask(val), do: val |> Integer.to_string(2) |> String.pad_leading(36, "0")
end
defmodule Part2 do
defstruct mask: String.duplicate("x", 36), mem: %{}
def solve(instructions) when is_list(instructions) do
instructions
|> Enum.reduce(%__MODULE__{}, &solve/2)
|> sum
end
def solve({:mask, mask}, state) do
%{state | mask: mask}
end
def solve({:mem, [address, value]}, %__MODULE__{mask: mask} = state) do
address
|> apply_mask(mask)
|> Enum.map(&String.to_integer(&1, 2))
|> Enum.reduce(state, &%{&2 | mem: Map.put(&2.mem, &1, value)})
end
def sum(%__MODULE__{mem: mem}) do
mem
|> Map.values()
|> Enum.sum()
end
def apply_mask(num, mask) do
[to_bitmask(num), mask]
|> Enum.map(&String.graphemes/1)
|> Enum.zip()
|> Enum.map(fn
{current, "0"} -> current
{_, new} -> new
end)
|> apply_floating("")
|> Enum.map(&String.reverse/1)
end
def apply_floating(mask) when is_binary(mask) do
mask
|> String.graphemes()
|> apply_floating("")
|> Enum.map(&String.reverse/1)
end
def apply_floating([], result), do: [result]
def apply_floating(["X" | tail], result) do
["0" <> result, "1" <> result]
|> Enum.flat_map(&apply_floating(tail, &1))
end
def apply_floating([b | tail], result) do
[b <> result]
|> Enum.flat_map(&apply_floating(tail, &1))
end
defp to_bitmask(val), do: val |> Integer.to_string(2) |> String.pad_leading(36, "0")
end
defp parse(input) do
input
|> String.trim()
|> String.split("\n")
|> Enum.map(&parse_line/1)
end
defp parse_line("mask = " <> mask), do: {:mask, mask}
defp parse_line("mem[" <> mem) do
{:mem,
~r/\d+/
|> Regex.scan(mem)
|> List.flatten()
|> Enum.map(&String.to_integer/1)}
end
end
|
lib/day_14_docking_data.ex
| 0.7237
| 0.559651
|
day_14_docking_data.ex
|
starcoder
|
defmodule Chalk do
@moduledoc """
`Chalk` is a client to make your life easy when you need to request GraphQL API's
"""
alias __MODULE__.{GraphQLResponse, Request}
@doc """
Make a GrahpQL query to a client and returns %GraphQLResponse{} struct
## Arguments
* request_params, a keyword that could contains
- url, the client url
- options, keyworkd with options to the request
- headers, keyworkd with headers to the request, i.e: [{"authorization", "Bearer 234"}]
* query_params, keyword with params to build the query
* variables, map with variables that will be uses in the query
## Examples
iex> request_params = [url: "https://test.com/]
iex> query_params = [users: [:name, :age, friends: [:id, :name]]]
iex> Chalk.query(request_params, query_params)
%GraphQLResponse{}
iex> request_params = [url: "https://test.com/, headers: [{"Authorization", "Bearer 23333"}]]
iex> query_params = ["User(id: $id)": [:name, :age, friends: [:id, :name]]]
iex> variables = %{id: 123}
iex> Chalk.query(request_params, query_params, variables)
%GraphQLResponse{}
"""
@spec query(request_params :: keyword(), query_params :: keyword(), variables :: map()) ::
{:ok | :error, GraphQLResponse.t()} | {:error, {:chalk, :BAD_RESPOSE | :CLIENT_ERROR}}
def query(request_params, query_params, variables \\ %{}) do
query = build_query(query_params)
Request.graphql_query(request_params, query, variables)
end
@doc """
It builds a query in format expected in Graphql
## Arguments
* query_params, keyword with params to build the query
## Examples
iex> query_params = [users: [:name, :age, friends: [:id, :name]]]
iex> Chalk.build_query(query_params)
"query{users{name age friends{id name}}}"
iex> query_params = ["User(id: $id)": [:name, :age, friends: [:id, :name]]]
iex> Chalk.build_query(query_params)
"query{User(id: $id){name age friends{id name}}}"
"""
@spec build_query(query_params :: Keyword.t()) :: String.t()
def build_query(query_params),
do: query_params |> to_graphql() |> add_curly_braces() |> query_key()
defp to_graphql(query) when is_list(query) do
Enum.reduce(query, "", fn {action, fields}, acc ->
acc <> ~s(#{to_camel_case(action)}#{to_graphql_fields(fields)})
end)
end
defp to_graphql(query) when is_tuple(query), do: to_graphql([query])
defp to_graphql(field) when is_atom(field), do: to_camel_case(field)
defp to_graphql_fields(query) when is_list(query) do
for(field <- query, into: "", do: "#{to_graphql(field)} ")
|> String.trim()
|> add_curly_braces()
end
defp to_camel_case(key) do
[non_capitalize_word | to_capitalize_words] =
key
|> to_string()
|> String.split("_")
[non_capitalize_word, Enum.map(to_capitalize_words, &String.capitalize(&1))]
|> Enum.join("")
end
defp add_curly_braces(string) do
string
|> String.replace_prefix("", "{")
|> String.replace_suffix("", "}")
end
defp query_key(query), do: "query" <> query
end
|
lib/chalk.ex
| 0.81372
| 0.446615
|
chalk.ex
|
starcoder
|
defmodule GitGud.Web.ReactComponents do
@moduledoc """
Functions to make rendering React components.
"""
import Phoenix.HTML.Tag
@doc """
Generates a div containing the named React component with no props or attrs.
Returns safe html: `{:safe, [60, "div", ...]}`.
You can utilize this in your Phoenix views:
```
<%= GitGud.Web.React.react_component("MyComponent") %>
```
The resulting `<div>` tag is formatted specifically for the included javascript
helper to then turn into your named React component.
"""
def react_component(name), do: react_component(name, %{})
@doc """
Generates a div containing the named React component with the given `props`.
Returns safe html: `{:safe, [60, "div", ...]}`.
Props can be passed in as a Map or a List.
You can utilize this in your Phoenix views:
```
<%= GitGud.Web.React.react_component("MyComponent", %{language: "elixir", awesome: true}) %>
```
The resulting `<div>` tag is formatted specifically for the included javascript
helper to then turn into your named React component and then pass in the `props` specified.
"""
def react_component(name, props) when is_list(props), do: react_component(name, Enum.into(props, %{}))
def react_component(name, props) when is_map(props) do
props = Poison.encode!(props)
content_tag(:div, "", [{:data, [react_class: name, react_props: props]}])
end
@doc """
Generates a div containing the named React component with the given `props` and `attrs`.
Returns safe html: `{:safe, [60, "div", ...]}`.
You can utilize this in your Phoenix views:
```
<%= GitGud.Web.React.react_component(
"MyComponent",
%{language: "elixir", awesome: true},
class: "my-component"
) %>
```
The resulting `<div>` tag is formatted specifically for the included javascript
helper to then turn into your named React component and then pass in the `props` specified.
"""
def react_component(name, props, attrs) when is_list(props), do: react_component(name, Enum.into(props, %{}), attrs)
def react_component(name, props, attrs) when is_map(props) do
props = Poison.encode!(props)
react_attrs = [react_class: name, react_props: props]
content_tag(:div, "", attrs ++ [{:data, react_attrs}])
end
end
|
apps/gitgud_web/lib/gitgud_web/react_components.ex
| 0.858363
| 0.76432
|
react_components.ex
|
starcoder
|
defmodule Robotica.Plugins.SonOff do
@moduledoc """
Sonoff plugin
"""
use GenServer
use Robotica.Plugin
require Logger
defmodule Config do
@moduledoc false
@type t :: %__MODULE__{topic: String.t(), postfix: String.t()}
defstruct [:topic, :postfix]
end
defmodule State do
@moduledoc false
@type t :: %__MODULE__{
config: Config.t(),
location: String.t(),
device: String.t(),
last_power: String.t() | nil
}
defstruct [:config, :location, :device, :last_power]
end
@spec publish_device_state(State.t(), String.t()) :: :ok
defp publish_device_state(%State{} = state, device_state) do
publish_state_raw(state, "power", device_state)
end
# @spec publish_device_error(State.t()) :: :ok
# defp publish_device_error(%State{} = state) do
# publish_state_raw(state, "power", "ERROR")
# end
@spec publish_device_hard_off(State.t()) :: :ok
defp publish_device_hard_off(%State{} = state) do
publish_state_raw(state, "power", "HARD_OFF")
end
@spec publish_device_unknown(State.t()) :: :ok
defp publish_device_unknown(%State{} = state) do
publish_state_raw(state, "power", "")
end
## Server Callbacks
def init(plugin) do
state = %State{
config: plugin.config,
location: plugin.location,
device: plugin.device,
last_power: nil
}
postfix = get_postfix(state)
Robotica.Subscriptions.subscribe(
["stat", plugin.config.topic, "POWER#{postfix}"],
:power,
self(),
:raw,
:resend
)
Robotica.Subscriptions.subscribe(
["tele", plugin.config.topic, "LWT"],
:lwt,
self(),
:raw,
:resend
)
{:ok, state}
end
def config_schema do
%{
struct_type: Config,
topic: {:string, true},
postfix: {:string, false}
}
end
defp get_postfix(%State{} = state) do
case state.config.postfix do
nil -> ""
postfix -> postfix
end
end
defp handle_command(%State{} = state, command) do
publish_command(state.location, state.device, command)
power =
case command.action do
"turn_on" -> "ON"
"turn_off" -> "OFF"
_ -> nil
end
if power != nil do
postfix = get_postfix(state)
:ok = Robotica.Mqtt.publish_raw("cmnd/#{state.config.topic}/power#{postfix}", power)
else
:ok
end
end
def handle_cast({:mqtt, _, :command, command}, %State{} = state) do
case Robotica.Config.validate_device_command(command) do
{:ok, command} ->
case check_type(command, "device") do
{command, true} -> handle_command(state, command)
{_, false} -> state
end
{:error, error} ->
Logger.error("Invalid sonoff command received: #{inspect(error)}.")
end
{:noreply, state}
end
def handle_cast({:mqtt, _, :power, power}, %State{} = state) do
publish_device_state(state, power)
state = %State{state | last_power: power}
{:noreply, state}
end
def handle_cast({:mqtt, _, :lwt, msg}, %State{} = state) do
cond do
msg != "Online" -> publish_device_hard_off(state)
state.last_power != nil -> publish_device_state(state, state.last_power)
true -> publish_device_unknown(state)
end
{:noreply, state}
end
end
|
robotica/lib/robotica/plugins/sonoff.ex
| 0.633864
| 0.460713
|
sonoff.ex
|
starcoder
|
defmodule ExBitstamp do
@moduledoc """
Wraps Bitstamp HTTP API into functions respective of public and private API endpoints.
The `currency_pair` parameter in functions is expected to be an `ExBitstamp.CurrencyPair` struct. See module
documentation for more info on all available convenience functions, but in short, supported currency pairs can
be obtained by calling appropriately named functions:
alias ExBitstamp.CurrencyPair
ExBitstamp.ticker(CurrencyPair.btcusd())
API call functions return successful results as a `{:ok, results}` tuple or `{:error, {error_type, reason}`
tuple in case of an error. See respective functions' docs for examples of specific return values.
`ExBitstamp` module uses v2 Bitstamp API endpoints whenever possible.
Module functions which take an optional `creds` parameter hit Bitstamp's private API which requires valid
API credentials for signature generation. By default, if no credentials are provided as an argument ExBitstamp
will try to fetch credentials from config. Config should be defined as:
config :ex_bitstamp,
creds: %{
customer_id: "customer_id",
key: "key",
secret: "key"
}
Otherwise, if credentials are provided as an argument, `ExBitstamp.Credentials` struct should be used to
pass the credentials to functions.
"""
alias ExBitstamp.{ApiClient, CurrencyPair, Credentials, BankWithdrawal}
@doc """
Fetches ticker data for a currency pair.
Example successful response:
{:ok,
%{
"ask" => "7117.11",
"bid" => "7100.00",
"high" => "7499.00",
"last" => "7100.00",
"low" => "5920.72",
"open" => "6878.65",
"timestamp" => "1517927052",
"volume" => "71876.22396439",
"vwap" => "6707.61"
}}
"""
@spec ticker(CurrencyPair.t()) :: tuple()
def ticker(%CurrencyPair{} = currency_pair), do: public("/v2/ticker/#{segment(currency_pair)}/")
@doc """
Fetches hourly ticker data for a currency pair.
Example successful response:
{:ok,
%{
"ask" => "6905.42",
"bid" => "6900.51",
"high" => "7281.80",
"last" => "6900.60",
"low" => "6784.18",
"open" => "7165.04",
"timestamp" => "1517927653",
"volume" => "4102.91186873",
"vwap" => "7076.36"
}}
"""
@spec ticker_hour(CurrencyPair.t()) :: tuple()
def ticker_hour(%CurrencyPair{} = currency_pair),
do: public("/v2/ticker_hour/#{segment(currency_pair)}/")
@doc """
Fetches order book data for a currency pair.
Example successful response:
{:ok,
%{
"asks" => [
["7021.91", "1.11552422"],
["7021.92", "0.00216174"],
["7022.70", ...],
[...],
...
],
"bids" => [
["6867.88", "20.63509100"],
["6865.74", "2.94040800"],
["6861.31", ...],
[...],
...
],
"timestamp" => "1517927756"
}}
"""
@spec order_book(CurrencyPair.t()) :: tuple()
def order_book(%CurrencyPair{} = currency_pair),
do: public("/v2/order_book/#{segment(currency_pair)}/")
@doc """
Fetches transactions data for a currency pair.
Accepts list of optional parameters as a keyword list. Allowed key is `time` with on of the following values:
`"minute"`, `"hour"` (default) or `"day"`. See [Bitstamp API docs](https://www.bitstamp.net/api/#transactions)
for more info.
Example successful response:
{:ok,
[
%{
"amount" => "0.51879098",
"date" => "1517928659",
"price" => "7011.46",
"tid" => "52902107",
...
},
%{
"amount" => "0.23897801",
"date" => "1517928659",
"price" => "7011.45",
...
},
%{"amount" => "0.01480362", "date" => "1517928659", ...},
%{"amount" => "0.04021837", ...},
%{...},
...
]}
"""
@spec transactions(CurrencyPair.t(), list()) :: tuple()
def transactions(%CurrencyPair{} = currency_pair, opts \\ []),
do: public("/v2/transactions/#{segment(currency_pair)}/", opts)
@doc """
Fetches trading pairs info.
Example successful response:
{:ok,
[
%{
"base_decimals" => 8,
"counter_decimals" => 2,
"description" => "Litecoin / U.S. dollar",
"minimum_order" => "5.0 USD",
"name" => "LTC/USD",
"trading" => "Enabled",
"url_symbol" => "ltcusd"
},
%{
"base_decimals" => 8,
"counter_decimals" => 2,
"description" => "Ether / U.S. dollar",
"minimum_order" => "5.0 USD",
"name" => "ETH/USD",
"trading" => "Enabled",
"url_symbol" => "ethusd"
},
...
]}
"""
@spec trading_pairs_info() :: tuple()
def trading_pairs_info(), do: public("/v2/trading-pairs-info/")
@doc """
Fetches EUR/USD conversion rate.
Example successful response:
{:ok, %{"buy" => "1.235", "sell" => "1.235"}}
"""
@spec eur_usd() :: tuple()
def eur_usd(), do: public("/eur_usd/")
@doc """
Fetches account balance data for a currency pair.
Example successful response:
{:ok,
%{
"btc_available" => "0.00000000",
"btc_balance" => "0.00000000",
"btc_reserved" => "0.00000000",
"fee" => 0.25,
"usd_available" => "0.00",
"usd_balance" => "0.00",
"usd_reserved" => "0.00"
}}
"""
@spec balance(CurrencyPair.t(), Credentials.t() | nil) :: tuple()
def balance(%CurrencyPair{} = currency_pair, creds \\ nil),
do: private("/v2/balance/#{segment(currency_pair)}/", [], creds)
@doc """
Fetches account balance data for all currencies.
Example successful response:
{:ok,
%{
"xrp_reserved" => "0.00000000",
"bcheur_fee" => "0.12",
"ltc_balance" => "0.00000000",
"ltcbtc_fee" => "0.25",
"btc_balance" => "0.00000000",
"ltc_reserved" => "0.00000000",
"eth_balance" => "0.39706665",
"eur_available" => "0.00",
"xrpbtc_fee" => "0.25",
"bchusd_fee" => "0.12",
"bch_available" => "0.00000000",
"eurusd_fee" => "0.25",
"ethusd_fee" => "0.25",
"btc_available" => "0.00000000",
"xrpeur_fee" => "0.25",
"eur_balance" => "0.00",
"btceur_fee" => "0.25",
"usd_balance" => "0.00",
"bch_balance" => "0.00000000",
"xrpusd_fee" => "0.25",
"ltcusd_fee" => "0.25",
"eth_available" => "0.00000000",
"bch_reserved" => "0.00000000",
"ltceur_fee" => "0.25",
"etheur_fee" => "0.25",
"eur_reserved" => "0.00",
"ethbtc_fee" => "0.25",
"xrp_balance" => "0.00000000",
"ltc_available" => "0.00000000",
"bchbtc_fee" => "0.12",
"eth_reserved" => "0.00000000",
"btcusd_fee" => "0.25",
"usd_available" => "0.00",
"xrp_available" => "0.00000000",
"usd_reserved" => "0.00",
"btc_reserved" => "0.00000000"
}}
"""
@spec balance_all(Credentials.t() | nil) :: tuple()
def balance_all(creds \\ nil), do: private("/v2/balance/", [], creds)
@doc """
Fetches user transaction data for all currencies.
Example successful response:
{:ok,
[
%{
"btc" => 0.0,
"datetime" => "2018-02-02 13:08:20",
"eth" => "0.41245141",
"eth_eur" => 725.54,
"eur" => "-299.25",
"fee" => "0.75",
"id" => 51366122,
"order_id" => 880205621,
"type" => "2",
"usd" => 0.0
},
%{
"btc" => 0.0,
"btc_usd" => "0.00",
"datetime" => "2018-02-02 13:00:29",
"eur" => "300.00",
"fee" => "0.00",
"id" => 51351200,
"type" => "0",
"usd" => 0.0
},
....
]}
"""
@spec user_transactions_all(Credentials.t() | nil) :: tuple()
def user_transactions_all(creds \\ nil), do: private("/v2/user_transactions/", [], creds)
@doc """
Fetches user transaction data for a currency pair.
Example successful response:
{:ok,
[
%{
"btc" => 0.0,
"datetime" => "2018-02-02 13:08:20",
"eth" => "0.41245141",
"eth_eur" => 725.54,
"eur" => "-299.25",
"fee" => "0.75",
"id" => 51366122,
"order_id" => 880205621,
"type" => "2",
"usd" => 0.0
},
%{
"btc" => 0.0,
"btc_usd" => "0.00",
"datetime" => "2018-02-02 13:00:29",
"eur" => "300.00",
"fee" => "0.00",
"id" => 51351200,
"type" => "0",
"usd" => 0.0
},
....
]}
"""
@spec user_transactions(CurrencyPair.t(), Credentials.t() | nil) :: tuple()
def user_transactions(%CurrencyPair{} = currency_pair, creds \\ nil),
do: private("/v2/user_transactions/#{segment(currency_pair)}/", [], creds)
@doc """
Fetches open orders data for a currency pair.
Example successful response:
{:ok,
[
%{
"amount" => "0.10000000",
"datetime" => "2018-02-13 16:24:00",
"id" => "951827494",
"price" => "750.00",
"type" => "1"
}
]}
"""
@spec open_orders(CurrencyPair.t(), Credentials.t() | nil) :: tuple()
def open_orders(%CurrencyPair{} = currency_pair, creds \\ nil),
do: private("/v2/open_orders/#{segment(currency_pair)}/", [], creds)
@doc """
Fetches open orders data for all currencies.
Example successful response:
{:ok,
[
%{
"amount" => "0.10000000",
"currency_pair" => "ETH/EUR",
"datetime" => "2018-02-13 16:24:00",
"id" => "951827494",
"price" => "750.00",
"type" => "1"
}
]}
"""
@spec open_orders_all(Credentials.t() | nil) :: tuple()
def open_orders_all(creds \\ nil), do: private("/v2/open_orders/all/", [], creds)
@doc """
Fetches order status.
Example successful response:
{:ok, %{"status" => "Open", "transactions" => []}}
"""
@spec order_status(String.t(), Credentials.t() | nil) :: tuple()
def order_status(id, creds \\ nil), do: private("/order_status/", [id: id], creds)
@doc """
Cancels an order.
Example successful response:
{:ok, %{"amount" => 0.1, "id" => 951827494, "price" => 750.0, "type" => 1}}
"""
@spec cancel_order(String.t(), Credentials.t() | nil) :: tuple()
def cancel_order(id, creds \\ nil), do: private("/v2/cancel_order/", [id: id], creds)
@doc """
Cancels all orders.
Example successful response:
{:ok, true}
"""
@spec cancel_all_orders(Credentials.t() | nil) :: tuple()
def cancel_all_orders(creds \\ nil), do: private("/cancel_all_orders/", [], creds)
@doc """
Places a limit buy order for a currency pair.
Accepts list of optional parameters as a keyword list. Allowed keys are: `limit_price` or `daily_order`.
Only one of these can be present. See [Bitstamp API docs](https://www.bitstamp.net/api/#buy-order)
for more info.
"""
@spec buy(CurrencyPair.t(), float(), float(), list() | nil, Credentials.t() | nil) :: tuple()
def buy(%CurrencyPair{} = currency_pair, amount, price, opts \\ [], creds \\ nil)
when is_list(opts),
do:
private(
"/v2/buy/#{segment(currency_pair)}/",
opts ++ [amount: to_string(amount), price: to_string(price)],
creds
)
@doc """
Places a buy market order for a currency pair.
"""
@spec buy_market(CurrencyPair.t(), float(), Credentials.t() | nil) :: tuple()
def buy_market(%CurrencyPair{} = currency_pair, amount, creds \\ nil),
do: private("/v2/buy/market/#{segment(currency_pair)}/", [amount: to_string(amount)], creds)
@doc """
Places a limit sell order for a currency pair.
Accepts list of optional parameters as a keyword list. Allowed keys are: `limit_price` or `daily_order`.
Only one of these can be present. See [Bitstamp API docs](https://www.bitstamp.net/api/#sell-order)
for more info.
"""
@spec sell(CurrencyPair.t(), float(), float(), list() | nil, Credentials.t() | nil) :: tuple()
def sell(%CurrencyPair{} = currency_pair, amount, price, opts \\ [], creds \\ nil)
when is_list(opts),
do:
private(
"/v2/sell/#{segment(currency_pair)}/",
opts ++ [amount: to_string(amount), price: to_string(price)],
creds
)
@doc """
Places a sell market order for a currency pair.
"""
@spec sell_market(CurrencyPair.t(), float(), Credentials.t() | nil) :: tuple()
def sell_market(%CurrencyPair{} = currency_pair, amount, creds \\ nil),
do: private("/v2/sell/market/#{segment(currency_pair)}/", [amount: to_string(amount)], creds)
@doc """
Fetches all withdrawal requests.
"""
@spec withdrawal_requests(list(), Credentials.t() | nil) :: tuple()
def withdrawal_requests(opts \\ [], creds \\ nil) when is_list(opts),
do: private("/v2/withdrawal-requests/", opts, creds)
@doc """
Executes bitcoin withdrawal.
"""
@spec withdrawal_btc(float(), String.t(), boolean(), Credentials.t() | nil) :: tuple()
def withdrawal_btc(amount, address, instant, creds \\ nil) do
instant =
case instant do
true -> 1
false -> 0
end
coin_withdrawal(creds, "bitcoin_withdrawal", amount, address, [instant: instant], :v1)
end
@doc """
Executes litecoin withdrawal.
"""
@spec withdrawal_ltc(float(), String.t(), Credentials.t() | nil) :: tuple()
def withdrawal_ltc(amount, address, creds \\ nil),
do: coin_withdrawal(creds, "ltc_withdrawal", amount, address)
@doc """
Executes ethereum withdrawal.
"""
@spec withdrawal_eth(float(), String.t(), Credentials.t() | nil) :: tuple()
def withdrawal_eth(amount, address, creds \\ nil),
do: coin_withdrawal(creds, "eth_withdrawal", amount, address)
@doc """
Executes ripple withdrawal.
"""
@spec withdrawal_xrp(float(), String.t(), String.t() | nil, Credentials.t() | nil) :: tuple()
def withdrawal_xrp(amount, address, destination_tag \\ nil, creds \\ nil) do
opts =
case destination_tag do
nil -> []
tag -> [destination_tag: tag]
end
coin_withdrawal(creds, "xrp_withdrawal", amount, address, opts)
end
@doc """
Executes bitcoin cash withdrawal.
"""
@spec withdrawal_bch(float(), String.t(), Credentials.t() | nil) :: tuple()
def withdrawal_bch(amount, address, creds \\ nil),
do: coin_withdrawal(creds, "bch_withdrawal", amount, address)
@doc """
Executes ripple withdrawal using v1 API.
"""
@spec withdrawal_ripple(float(), String.t(), String.t(), Credentials.t() | nil) :: tuple()
def withdrawal_ripple(amount, address, currency, creds \\ nil),
do: coin_withdrawal(creds, "ripple_withdrawal", amount, address, [currency: currency], :v1)
defp coin_withdrawal(creds, endpoint, amount, address, opts \\ [], version \\ :v2),
do:
private(
"/#{version(version)}#{endpoint}/",
opts ++ [amount: to_string(amount), address: address],
creds
)
@doc """
Retrieves bitcoin deposit address.
"""
@spec deposit_address_btc(Credentials.t() | nil) :: tuple()
def deposit_address_btc(creds \\ nil),
do: coin_deposit_address(creds, "bitcoin_deposit_address", :v1)
@doc """
Retrieves litecoin deposit address.
"""
@spec deposit_address_ltc(Credentials.t() | nil) :: tuple()
def deposit_address_ltc(creds \\ nil), do: coin_deposit_address(creds, "ltc_address")
@doc """
Retrieves ethereum deposit address.
"""
@spec deposit_address_eth(Credentials.t() | nil) :: tuple()
def deposit_address_eth(creds \\ nil), do: coin_deposit_address(creds, "eth_address")
@doc """
Retrieves xrp deposit address.
"""
@spec deposit_address_xrp(Credentials.t() | nil) :: tuple()
def deposit_address_xrp(creds \\ nil), do: coin_deposit_address(creds, "xrp_address")
@doc """
Retrieves bitcoin cash deposit address.
"""
@spec deposit_address_bch(Credentials.t() | nil) :: tuple()
def deposit_address_bch(creds \\ nil), do: coin_deposit_address(creds, "bch_address")
@doc """
Retrieves ripple deposit address using v1 API.
"""
@spec deposit_address_ripple(Credentials.t() | nil) :: tuple()
def deposit_address_ripple(creds \\ nil), do: coin_deposit_address(creds, "ripple_address", :v1)
defp coin_deposit_address(creds, endpoint, version \\ :v2),
do: private("/#{version(version)}#{endpoint}/", [], creds)
@doc """
Retrieves unconfirmed BTC data.
"""
@spec unconfirmed_btc(Credentials.t() | nil) :: tuple()
def unconfirmed_btc(creds \\ nil), do: private("/unconfirmed_btc/", [], creds)
@doc """
Executes a transfer of funds from sub account to main account.
"""
@spec transfer_to_main(float(), String.t(), any(), Credentials.t() | nil) :: tuple()
def transfer_to_main(amount, currency, sub_account_id \\ nil, creds \\ nil) do
opts =
case sub_account_id do
nil ->
[amount: amount, currency: currency]
sub_account_id ->
[amount: to_string(amount), currency: currency, subAccount: sub_account_id]
end
private("/transfer-to-main/", opts, creds)
end
@doc """
Executes a transfer of funds from main account to sub account.
"""
@spec transfer_from_main(float(), String.t(), Credentials.t() | nil) :: tuple()
def transfer_from_main(amount, currency, sub_account_id, creds \\ nil) do
opts = [amount: to_string(amount), currency: currency, subAccount: sub_account_id]
private("/transfer-from-main/", opts, creds)
end
@doc """
Executes a bank withdrawal.
"""
@spec open_bank_withdrawal(BankWithdrawal.t(), Credentials.t() | nil) :: tuple()
def open_bank_withdrawal(%BankWithdrawal{} = bank_withdrawal, creds \\ nil),
do: private("/v2/withdrawal/open/", Map.to_list(bank_withdrawal), creds)
@doc """
Retrieves bank withdrawal status.
"""
@spec bank_withdrawal_status(String.t(), Credentials.t() | nil) :: tuple()
def bank_withdrawal_status(id, creds \\ nil),
do: private("/v2/withdrawal/status/", [id: id], creds)
@doc """
Cancels bank withdrawal status.
"""
@spec cancel_bank_withdrawal(String.t(), Credentials.t() | nil) :: tuple()
def cancel_bank_withdrawal(id, creds \\ nil),
do: private("/v2/withdrawal/cancel/", [id: id], creds)
@doc """
Creates new liquidation address.
"""
@spec new_liquidation_address(String.t(), Credentials.t() | nil) :: tuple()
def new_liquidation_address(liquidation_currency, creds \\ nil) do
opts = [liquidation_currency: liquidation_currency]
private("/v2/liquidation_address/new/", opts, creds)
end
@doc """
Retireves transactions for liquidation address.
"""
@spec liquidation_address_info(String.t() | nil, Credentials.t() | nil) :: tuple()
def liquidation_address_info(address \\ nil, creds \\ nil) do
opts =
case address do
nil -> []
address -> [address: address]
end
private("/v2/liquidation_address/info/", opts, creds)
end
defp public(uri, data \\ []) do
case ApiClient.get(uri, [], data) do
{:error, reason} ->
{:error, {:http_error, reason}}
{:ok, response} ->
case Poison.decode(response.body) do
{:ok, data} -> {:ok, data}
{:error, reason} -> {:error, {:poison_decode_error, reason}}
{:error, reason, _code} -> {:error, {:poison_decode_error, reason}}
end
end
end
defp private(uri, data, creds) do
creds =
case creds do
creds = %Credentials{} ->
creds
_ ->
%Credentials{
key: Application.get_env(:ex_bitstamp, :creds).key,
secret: Application.get_env(:ex_bitstamp, :creds).secret,
customer_id: Application.get_env(:ex_bitstamp, :creds).customer_id
}
end
case ApiClient.post(uri, {:form, data ++ signature(creds)}) do
{:error, reason} ->
{:error, {:http_error, reason}}
{:ok, response} ->
case Poison.decode(response.body) do
{:ok, data} -> {:ok, data}
{:error, reason} -> {:error, {:poison_decode_error, reason}}
{:error, reason, _code} -> {:error, {:poison_decode_error, reason}}
end
end
end
defp signature(%{key: key, secret: secret, customer_id: customer_id}) do
nonce = :os.system_time(:millisecond)
signature =
"#{nonce}#{customer_id}#{key}"
|> encrypt_hmac_sha256(secret)
|> Base.encode16()
[key: key, nonce: nonce, signature: signature]
end
defp encrypt_hmac_sha256(message, key), do: :crypto.mac(:hmac, :sha256, key, message)
defp version(:v1), do: ""
defp version(:v2), do: "v2/"
defp segment(%CurrencyPair{from: from, to: to}), do: String.downcase(from <> to)
end
|
lib/ex_bitstamp.ex
| 0.898438
| 0.64058
|
ex_bitstamp.ex
|
starcoder
|
defmodule Octicons do
@moduledoc """
Octicons are a scalable set of icons handcrafted with <3 by GitHub.
See [the website][octicons] for an up-to-date reference of all of the available icons.
[octicons]: https://octicons.github.com/
"""
alias Octicons.Storage
@type octicon_name :: String.t() | atom
@type t :: map
@doc """
Retrieves the attributes of the icon or `nil` if the named icon doesn't exist.
## Examples
```
iex> Octicons.icon(:beaker)
%{
"aria-hidden" => "true",
"class" => "octicons octicons-beaker",
"figma" => %{"file" => "FP7lqd1V00LUaT5zvdklkkZr", "id" => "0:26"},
"height" => 16,
"keywords" => ["experiment", "labs", "experimental", "feature", "test",
"science", "education", "study", "development", "testing"],
"name" => "beaker",
"path" => "<path fill-rule=\\"evenodd\\" d=\\"M14.38 14.59L11 7V3h1V2H3v1h1v4L.63 14.59A1 1 0 0 0 1.54 16h11.94c.72 0 1.2-.75.91-1.41h-.01zM3.75 10L5 7V3h5v4l1.25 3h-7.5zM8 8h1v1H8V8zM7 7H6V6h1v1zm0-3h1v1H7V4zm0-3H6V0h1v1z\\"/>",
"symbol" => "beaker",
"version" => "1.1",
"viewBox" => "0 0 16 16",
"width" => 16
}
```
"""
@spec icon(octicon_name) :: t | nil
def icon(name) when is_atom(name), do: icon(Atom.to_string(name))
def icon(name) do
name
|> Storage.get_data()
|> merge_additional_info(name)
end
@doc """
Returns the SVG tag that renders the icon.
## Options
* `:"aria-label"` Aria label for the SVG tag. When `aria-label` is specified, the `aria-hidden`
attribute is removed.
* `:class` CSS class text to add to the classes already present
* `:height` Height in pixels to render the icon. If only `:height` is specified, width is
calculated to maintain the aspect ratio.
* `:width` Width in pixels to render the icon. If only `:width` is specified, height is
calculated to maintain the aspect ratio.
## Examples
```
iex> Octicons.to_svg(:beaker)
"<svg aria-hidden=\\"true\\" class=\\"octicons octicons-beaker\\" height=\\"16\\" \
version=\\"1.1\\" viewBox=\\"0 0 16 16\\" width=\\"16\\"><path fill-rule=\\"evenodd\\" \
d=\\"M14.38 14.59L11 7V3h1V2H3v1h1v4L.63 14.59A1 1 0 0 0 1.54 16h11.94c.72 0 \
1.2-.75.91-1.41h-.01zM3.75 10L5 7V3h5v4l1.25 3h-7.5zM8 8h1v1H8V8zM7 \
7H6V6h1v1zm0-3h1v1H7V4zm0-3H6V0h1v1z\\"/></svg>"
```
"""
@doc since: "v0.7.0"
@spec to_svg(octicon_name | t, keyword) :: String.t()
def to_svg(icon, options \\ [])
def to_svg(nil, _), do: nil
def to_svg(name, options) when is_atom(name) or is_binary(name), do: to_svg(icon(name), options)
def to_svg(icon_data, options) when is_list(options),
do: to_svg(icon_data, to_string_key_map(options))
def to_svg(icon_data = %{}, options) do
symbol = icon_data["symbol"]
path = icon_data["path"]
"<svg #{html_attributes(symbol, options)}>#{path}</svg>"
end
@doc false
@deprecated "Use Octicons.to_svg/2 in its place."
def toSVG(icon, options \\ []), do: to_svg(icon, options)
@doc """
Get the version of the packaged Octicons data.
## Examples
```
iex> Octicons.version()
"9.0.0"
```
"""
@spec version() :: String.t()
def version do
Storage.get_version()
end
defp aria(map, %{"aria-label" => label}) do
map
|> Map.merge(%{"aria-label" => label})
|> Map.merge(%{"role" => "img"})
|> Map.delete("aria-hidden")
end
defp aria(map, _), do: map
defp class(map, key, %{"class" => option_class}) do
Map.merge(
map,
%{
"class" => String.trim("octicons octicons-#{key} #{option_class}")
}
)
end
defp class(map, _, _), do: map
defp dimensions(map, key, options = %{"height" => height}) when not is_binary(height) do
dimensions(map, key, Map.put(options, "height", Integer.to_string(height)))
end
defp dimensions(map, key, options = %{"width" => width}) when not is_binary(width) do
dimensions(map, key, Map.put(options, "width", Integer.to_string(width)))
end
defp dimensions(map, _, %{"height" => height, "width" => width}) do
Map.merge(map, %{"height" => height, "width" => width})
end
defp dimensions(map, key, %{"height" => height}) do
data = Storage.get_data(key)
Map.merge(
map,
%{
"height" => height,
"width" => round(parse_int(height) * data["width"] / data["height"])
}
)
end
defp dimensions(map, key, %{"width" => width}) do
data = Storage.get_data(key)
Map.merge(
map,
%{
"height" => round(parse_int(width) * data["height"] / data["width"]),
"width" => width
}
)
end
defp dimensions(map, _, _), do: map
defp default_options(key) do
data = Storage.get_data(key)
%{
"version" => "1.1",
"width" => data["width"],
"height" => data["height"],
"viewBox" => "0 0 #{data["width"]} #{data["height"]}",
"class" => "octicons octicons-#{key}",
"aria-hidden" => "true"
}
end
defp html_attributes(key, options) do
key
|> Storage.get_data()
|> Map.merge(default_options(key))
|> Map.merge(options)
|> dimensions(key, options)
|> class(key, options)
|> aria(options)
|> Map.drop(["figma", "keywords", "name", "path"])
|> Map.to_list()
|> Enum.map(fn {key, value} -> "#{key}=\"#{value}\"" end)
|> Enum.join(" ")
|> String.trim()
end
defp merge_additional_info(nil, _), do: nil
defp merge_additional_info(map, name) do
map
|> Map.merge(default_options(name))
|> Map.merge(%{"symbol" => name})
end
defp parse_int(text) do
{int, _} = Integer.parse(text)
int
end
defp to_string_key_map(list) do
Enum.reduce(list, %{}, fn {key, value}, map -> Map.put(map, Atom.to_string(key), value) end)
end
end
|
lib/octicons.ex
| 0.912526
| 0.752843
|
octicons.ex
|
starcoder
|
# Scrolling layout modal component. Great for displaying content within a modal that needs to scroll.
# data is a tuple in the form of ` elixir {label, component, component_data, component_opts}`
# style opts
# `width: :integer`
# `height: :integer`
# `frame_width: :integer`
# `frame_height: :integer`
# `content_height: :integer`
# `content_width: :integer`
# `show_check: :boolean`
# `show_close: :boolean`
# ``` elixir
# <%= graph font_size: 20 %>
# <%= component FloUI.Modal.ScrollLayout,
# {"Label", FloUI.SelectionList, {@selection_list, @selected}, [id: :project_list]},
# id: :scroll_layout,
# width: 500,
# height: 520,
# frame_width: 480,
# frame_height: 500,
# content_width: 480,
# content_height: @content_height,
# show_check: true,
# show_close: true
# %>
# ```
# """
# use Scenic.Component
# alias Scenic.Graph
# import Scenic.Primitives
# alias FloUI.Theme
# alias FloUI.Modal.Body
# alias FloUI.Modal.Header
# import FloUI.Scrollable.Components
# @graph Graph.build(font_size: 16)
# def validate(nil), do: :invalid_data
# def validate(data), do: {:ok, data}
# def init(scene, {title, cmp, data, cmp_opts}, opts) do
# scene =
# assign(scene,
# graph: @graph,
# title: title,
# component: cmp,
# component_data: data,
# component_opts: cmp_opts,
# width: opts[:width] || 500,
# height: opts[:height] || 500,
# frame_width: opts[:frame_width] || 480,
# frame_height: opts[:frame_height] || 500,
# content_width: opts[:content_width] || 500,
# content_height: opts[:content_height] || 500,
# show_check: opts[:show_check] || false,
# show_close: opts[:show_close] || false
# )
# |> render_layout
# {:ok, scene}
# end
# def handle_event({:click, :btn_check}, _from, scene) do
# send_parent_event(scene, :modal_done)
# {:noreply, scene}
# end
# def handle_event({:click, :btn_close}, _from, scene) do
# send_parent_event(scene, :modal_close)
# {:noreply, scene}
# end
# def handle_event(event, _, scene) do
# {:cont, event, scene}
# end
# defp render_layout(
# %{
# assigns: %{
# graph: graph,
# title: title,
# component: cmp,
# component_data: cmp_data,
# component_opts: cmp_opts,
# width: width,
# height: height,
# frame_width: frame_width,
# frame_height: frame_height,
# content_width: content_width,
# content_height: content_height,
# show_check: show_check,
# show_close: show_close
# }
# } = scene
# ) do
# graph =
# graph
# |> Body.add_to_graph(nil, translate: {0, 50}, width: width, height: height)
# |> scrollable(
# %{
# frame: {frame_width, frame_height},
# content: %{x: 0, y: 10, width: content_width, height: content_height}
# },
# &cmp.add_to_graph(&1, cmp_data, cmp_opts),
# id: :scroll_box,
# translate: {0, 60},
# vertical_scroll_bar: [
# scroll_buttons: true,
# scroll_bar_theme: Scenic.Primitive.Style.Theme.preset(:dark),
# scroll_bar_thickness: 15
# ]
# )
# |> Header.add_to_graph(title, width: width, show_check: show_check, show_close: show_close)
# assign(scene, graph: graph)
# |> push_graph(graph)
# end
# end
|
lib/modal/scroll_layout.ex
| 0.690246
| 0.664568
|
scroll_layout.ex
|
starcoder
|
defmodule BMP280 do
use GenServer
require Logger
alias BMP280.{Calc, Comm, Measurement, Transport}
@sea_level_pa 100_000
@default_bmp280_bus_address 0x77
@polling_interval 1000
@typedoc """
The type of sensor in use
If the sensor is unknown, then number in the parts ID register is used.
"""
@type sensor_type() :: :bmp180 | :bmp280 | :bme280 | :bme680 | 0..255
@moduledoc """
Read temperature and pressure from a Bosch BM280, BME280, or BME680 sensor
"""
@typedoc """
BMP280 GenServer start_link options
* `:name` - a name for the GenServer
* `:bus_name` - which I2C bus to use (e.g., `"i2c-1"`)
* `:bus_address` - the address of the BMP280 (defaults to 0x77)
* `:sea_level_pa` - a starting estimate for the sea level pressure in Pascals
"""
@type options() :: [
name: GenServer.name(),
bus_name: String.t(),
bus_address: 0x76 | 0x77,
sea_level_pa: number()
]
@doc """
Start a new GenServer for interacting with a BMP280
Normally, you'll want to pass the `:bus_name` option to specify the I2C
bus going to the BMP280.
"""
@spec start_link(options()) :: GenServer.on_start()
def start_link(init_arg) do
options = Keyword.take(init_arg, [:name])
GenServer.start_link(__MODULE__, init_arg, options)
end
@doc """
Return the type of sensor
This function returns the cached result of reading the ID register.
if the part is recognized. If not, it returns the integer read.
"""
@spec sensor_type(GenServer.server()) :: sensor_type()
def sensor_type(server) do
GenServer.call(server, :sensor_type)
end
@doc """
Measure the current temperature, pressure, altitude
An error is return if the I2C transactions fail.
"""
@spec measure(GenServer.server()) :: {:ok, Measurement.t()} | {:error, any()}
def measure(server) do
GenServer.call(server, :measure)
end
@deprecated "Use BMP280.measure/1 instead"
def read(server), do: measure(server)
@doc """
Update the sea level pressure estimate
The sea level pressure should be specified in Pascals. The estimate
is used for altitude calculations.
"""
@spec update_sea_level_pressure(GenServer.server(), number()) :: :ok
def update_sea_level_pressure(server, new_estimate) do
GenServer.call(server, {:update_sea_level, new_estimate})
end
@doc """
Force the altitude to a known value
Altitude calculations depend on the accuracy of the sea level pressure estimate. Since
the sea level pressure changes based on the weather, it needs to be kept up to date
or altitude measurements can be pretty far off. Another way to set the sea level pressure
is to report a known altitude. Call this function with the current altitude in meters.
This function returns an error if the attempt to sample the current barometric
pressure fails.
"""
@spec force_altitude(GenServer.server(), number()) :: :ok | {:error, any()}
def force_altitude(server, altitude_m) do
GenServer.call(server, {:force_altitude, altitude_m})
end
@doc """
Detect the type of sensor that is located at the I2C address
If the sensor is a known BMP280 or BME280 the response will either contain
`:bmp280` or `:bme280`. If the sensor does not report back that it is one of
those two types of sensors the return value will contain the id value that
was reported back form the sensor.
The bus address is likely going to be 0x77 (the default) or 0x76.
"""
@spec detect(String.t(), 0x76 | 0x77) :: {:ok, sensor_type()} | {:error, any()}
def detect(bus_name, bus_address \\ @default_bmp280_bus_address) do
with {:ok, transport} <- Transport.open(bus_name, bus_address) do
Comm.sensor_type(transport)
end
end
@impl GenServer
def init(args) do
bus_name = Keyword.get(args, :bus_name, "i2c-1")
bus_address = Keyword.get(args, :bus_address, @default_bmp280_bus_address)
Logger.info(
"[BMP280] Starting on bus #{bus_name} at address #{inspect(bus_address, base: :hex)}"
)
with {:ok, transport} <- Transport.open(bus_name, bus_address),
{:ok, sensor_type} <- Comm.sensor_type(transport) do
state = %{
transport: transport,
calibration: nil,
sea_level_pa: Keyword.get(args, :sea_level_pa, @sea_level_pa),
sensor_type: sensor_type,
last_measurement: nil
}
{:ok, state, {:continue, :init_sensor}}
else
_error ->
{:stop, :device_not_found}
end
end
@impl GenServer
def handle_continue(:init_sensor, state) do
Logger.info("[BMP280] Initializing sensor type #{state.sensor_type}")
new_state =
state
|> init_sensor()
|> read_and_put_new_measurement()
schedule_measurement()
{:noreply, new_state}
end
@impl GenServer
def handle_call(:measure, _from, state) do
if state.last_measurement do
{:reply, {:ok, state.last_measurement}, state}
else
{:reply, {:error, :no_measurement}, state}
end
end
def handle_call(:sensor_type, _from, state) do
{:reply, state.sensor_type, state}
end
def handle_call({:update_sea_level, new_estimate}, _from, state) do
{:reply, :ok, %{state | sea_level_pa: new_estimate}}
end
def handle_call({:force_altitude, altitude_m}, _from, state) do
if state.last_measurement do
sea_level = Calc.sea_level_pressure(state.last_measurement.pressure_pa, altitude_m)
{:reply, :ok, %{state | sea_level_pa: sea_level}}
else
{:reply, {:error, :no_measurement}, state}
end
end
@impl GenServer
def handle_info(:schedule_measurement, state) do
schedule_measurement()
{:noreply, read_and_put_new_measurement(state)}
end
defp schedule_measurement() do
Process.send_after(self(), :schedule_measurement, @polling_interval)
end
defp init_sensor(state) do
state.sensor_type |> sensor_module() |> apply(:init, [state])
end
defp read_sensor(state) do
state.sensor_type |> sensor_module() |> apply(:read, [state])
end
defp read_and_put_new_measurement(state) do
case read_sensor(state) do
{:ok, measurement} ->
%{state | last_measurement: measurement}
{:error, reason} ->
Logger.error("[BMP280] Error reading measurement: #{inspect(reason)}")
state
end
end
defp sensor_module(:bmp180), do: BMP280.BMP180Sensor
defp sensor_module(:bmp280), do: BMP280.BMP280Sensor
defp sensor_module(:bme280), do: BMP280.BME280Sensor
defp sensor_module(:bme680), do: BMP280.BME680Sensor
end
|
lib/bmp280.ex
| 0.919667
| 0.555496
|
bmp280.ex
|
starcoder
|
defmodule Eventually do
@moduledoc """
Macros to support assertions/refutations that might not be correct immediately
but will eventually become so due to, say, eventual consistency.
"""
@default_timeout 1000
@default_interval 10
@doc """
Assert that the passed clause eventually returns a truthy value.
Returns the first value that was truthy.
"""
defmacro assert_eventually(
clause,
timeout \\ @default_timeout,
interval \\ @default_interval
) do
quote do
fun = fn -> unquote(clause) end
case eventually(fun, true, unquote(timeout), unquote(interval)) do
{:ok, value} ->
value
{:fail, value} ->
raise ExUnit.AssertionError,
expr: unquote(escape_quoted(:assert_eventually, clause)),
message: "Expected truthy, last got #{inspect(value)}"
end
end
end
@doc """
Assert that the passed clause eventually returns a falsy value.
Returns the first value that was falsy.
"""
defmacro refute_eventually(
clause,
timeout \\ @default_timeout,
interval \\ @default_interval
) do
quote do
fun = fn -> unquote(clause) end
case eventually(fun, false, unquote(timeout), unquote(interval)) do
{:ok, value} ->
value
{:fail, value} ->
raise ExUnit.AssertionError,
expr: unquote(escape_quoted(:refute_eventually, clause)),
message: "Expected false or nil, last got #{inspect(value)}"
end
end
end
@doc false
@spec eventually(fun(), boolean(), non_neg_integer(), non_neg_integer()) ::
{:ok, term()} | {:fail, any()}
def eventually(fun, result, timeout, interval),
do:
do_eventually(
fun,
result,
interval,
DateTime.add(DateTime.utc_now(), timeout, :millisecond)
)
defp do_eventually(fun, result, interval, stop_at) do
case check_condition(fun, result) do
{:ok, value} ->
{:ok, value}
{:fail, value} ->
if DateTime.compare(stop_at, DateTime.utc_now()) == :lt do
{:fail, value}
else
Process.sleep(interval)
do_eventually(fun, result, interval, stop_at)
end
end
end
# Convert the truthiness of the condition to a boolean reflecting whether
# it matches the test result
defp check_condition(fun, result) do
value = fun.()
if (value && result) || (!value && !result) do
{:ok, value}
else
{:fail, value}
end
end
defp escape_quoted(kind, expr) do
Macro.escape({kind, [], [expr]})
end
end
|
lib/eventually.ex
| 0.80271
| 0.547283
|
eventually.ex
|
starcoder
|
defmodule Map.Parser do
@default_map_path "data/map.osm"
def default_map_path, do: @default_map_path
def load_default, do: load(default_map_path())
def load(map_path) when is_binary(map_path) do
raw = File.read!(map_path)
parsed = Saxy.SimpleForm.parse_string(raw)
{:ok, {"osm", _attr, children}} = parsed
children = Enum.reject(children, &is_deleted?(&1))
nodes = nodes(children)
ways = ways(children, nodes)
relations = relations(children, ways, nodes)
%Map.Parsed{ways: filter_typed(ways), nodes: filter_typed(nodes), relations: relations}
end
@spec filter_typed(%{optional(binary()) => Map.Way.t() | Map.Node.t()}) ::
%{optional(binary()) => Map.Way.t() | Map.Node.t()}
defp filter_typed(objs) do
Enum.reduce(objs, %{}, fn
{key, obj = %{tags: %{type: type}}}, acc when is_binary(type) -> Map.put(acc, key, obj)
{_key, _obj}, acc -> acc
end)
end
defp is_deleted?({_tag, attrsList, _children}) do
Enum.member?(attrsList, {"action", "delete"})
end
defp is_deleted?(_), do: false
defp relations(parsed, ways, nodes) do
parsed
|> filter_by("relation")
|> Enum.into(%{}, fn w ->
{"relation", attrsList, children} = w
attrs = Enum.into(attrsList, %{})
members =
children
|> filter_by("member")
|> Enum.map(fn {"member", attrsList, _} ->
attrs = Enum.into(attrsList, %{})
ref =
case attrs["type"] do
"way" -> ways[attrs["ref"]]
"node" -> nodes[attrs["ref"]]
end
%{ref: ref, role: attrs["role"]}
end)
rel =
%Map.Relation{
id: attrs["id"],
tags: tags(children),
members: members
}
|> Map.Element.with_bbox()
|> Map.Relation.purge_member_bbox()
{attrs["id"], rel}
end)
end
defp ways(parsed, nodes) do
parsed
|> filter_by("way")
|> Enum.into(%{}, fn w ->
{"way", attrsList, children} = w
attrs = Enum.into(attrsList, %{})
nodes =
children
|> filter_by("nd")
|> Enum.map(fn {"nd", [{"ref", ref}], _} -> nodes[ref] end)
|> ensure_right_hand_winding
if length(nodes) == 0, do: raise("Way without nodes: #{inspect(w)}")
{attrs["id"],
Map.Element.with_bbox(%Map.Way{
id: attrs["id"],
tags: tags(children),
nodes: nodes
})}
end)
end
defp nodes(parsed) do
parsed
|> filter_by("node")
|> Enum.into(%{}, fn w ->
{"node", attrsList, children} = w
attrs = Enum.into(attrsList, %{})
{attrs["id"],
%Map.Node{
id: attrs["id"],
tags: tags(children),
lon: to_f(attrs["lon"]),
lat: to_f(attrs["lat"])
}}
end)
end
defp ensure_right_hand_winding([]), do: []
defp ensure_right_hand_winding(nodes) do
if hd(nodes) == List.last(nodes) && area(nodes) < 0,
do: Enum.reverse(nodes),
else: nodes
end
defp area(nodes) do
Enum.reduce(tl(nodes), {0, hd(nodes)}, fn curr, {area, prev} ->
area = area + (curr.lon - prev.lon) * (prev.lat + curr.lat)
{area, curr}
end)
|> elem(0)
end
defp filter_by(enum, tagName) do
Enum.filter(enum, &match?({^tagName, _attr, _children}, &1))
end
defp tags(enum) do
enum
|> filter_by("tag")
|> Enum.into(%{}, fn {"tag", [{"k", key}, {"v", val}], _children} ->
{String.to_atom(key), weak_bool(val)}
end)
end
defp to_f(str) do
{f, ""} = Float.parse(str)
f
end
defp weak_bool("yes"), do: true
defp weak_bool("no"), do: false
defp weak_bool(x), do: x
end
|
lib/map/parser.ex
| 0.66628
| 0.486514
|
parser.ex
|
starcoder
|
defmodule Meeseeks.CSS do
@moduledoc """
Compile CSS selector syntax into `Meeseeks.Selector`s.
## Supported Syntax
| Pattern | Example | Notes |
| --- | --- | --- |
| **Basic Selectors** | --- | --- |
| `*` | `*` | Matches any for `ns` or `tag` |
| `tag` | `div` | |
| `ns\\|tag` | `foo\\|div` | |
| `#id` | `div#bar`, `#bar` | |
| `.class` | `div.baz`, `.baz` | |
| `[attr]` | `a[href]`, `[lang]` | |
| `[^attrPrefix]` | `div[^data-]` | |
| `[attr=val]` | `a[rel="nofollow"]` | |
| `[attr~=valIncludes]` | `div[things~=thing1]` | |
| `[attr\\|=valDash]` | `p[lang\\|=en]` | |
| `[attr^=valPrefix]` | `a[href^=https:]` | |
| `[attr$=valSuffix]` | `img[src$=".png"]` | |
| `[attr*=valContaining]` | `a[href*=admin]` | |
| ​ | | |
| **Pseudo Classes** | --- | --- |
| `:first-child` | `li:first-child` | |
| `:first-of-type` | `li:first-of-type` | |
| `:last-child` | `tr:last-child` | |
| `:last-of-type` | `tr:last-of-type` | |
| `:not` | `not(p:nth-child(even))` | Selectors cannot contain combinators or the `not` pseudo class |
| `:nth-child(n)` | `p:nth-child(even)` | Supports even, odd, 1.., or *a*n+*b* formulas |
| `:nth-last-child(n)` | `p:nth-last-child(2)` | Supports even, odd, 1.., or *a*n+*b* formulas |
| `:nth-last-of-type(n)` | `p:nth-last-of-type(2n+1)` | Supports even, odd, 1.., or *a*n+*b* formulas |
| `:nth-of-type(n)` | `p:nth-of-type(1)` | Supports even, odd, 1.., or *a*n+*b* formulas |
| ​ | | |
| **Combinators** | --- | --- |
| `X Y` | `div.header .logo` | `Y` descendant of `X` |
| `X > Y` | `ol > li` | `Y` child of `X` |
| `X + Y` | `div + p` | `Y` is sibling directly after `X` |
| `X ~ Y` | `div ~ p` | `Y` is any sibling after `X` |
| `X, Y, Z` | `button.standard, button.alert` | Matches `X`, `Y`, or `Z` |
## Examples
iex> import Meeseeks.CSS
iex> css("a[href^=\\"https://\\"]")
%Meeseeks.Selector.Element{
combinator: nil,
selectors: [
%Meeseeks.Selector.Element.Tag{value: "a"},
%Meeseeks.Selector.Element.Attribute.ValuePrefix{
attribute: "href",
value: "https://"}]}
iex> css("ul, ol")
[%Meeseeks.Selector.Element{
combinator: nil,
selectors: [%Meeseeks.Selector.Element.Tag{value: "ul"}]},
%Meeseeks.Selector.Element{
combinator: nil,
selectors: [%Meeseeks.Selector.Element.Tag{value: "ol"}]}]
"""
alias Meeseeks.Selector.CSS
@doc """
Compiles a string representing CSS selector syntax into one or more
`Meeseeks.Selector`s.
When a static string literal is provided this work will be done during
compilation, but if a string with interpolated values or a var is provided
this work will occur at run time.
"""
defmacro css(string_literal) when is_binary(string_literal) do
string_literal
|> CSS.compile_selectors()
|> Macro.escape()
end
defmacro css(other) do
quote do: CSS.compile_selectors(unquote(other))
end
end
|
lib/meeseeks/css.ex
| 0.864811
| 0.676002
|
css.ex
|
starcoder
|
defmodule Unicode.String do
@moduledoc """
This module provides functions that implement somee
of the [Unicode](https://unicode.org) stanards:
* The [Unicode Case Folding](https://www.unicode.org/versions/Unicode13.0.0/ch03.pdf) algorithm
to provide case-independent equality checking irrespective of language or script.
* The [Unicode Segmentation](https://unicode.org/reports/tr29/) algorithm to detect,
break or splut strings into grapheme clusters, works and sentences.
"""
alias Unicode.String.Segment
alias Unicode.String.Break
alias Unicode.Property
defdelegate fold(string), to: Unicode.String.Case.Folding
defdelegate fold(string, type), to: Unicode.String.Case.Folding
@type string_interval :: {String.t, String.t}
@type break_type :: :grapheme | :word | :line | :sentence
@type error_return :: {:error, String.t}
@type options :: [
{:locale, String.t},
{:break, break_type},
{:suppressions, boolean}
]
@type split_options :: [
{:locale, String.t},
{:break, break_type},
{:suppressions, boolean},
{:trim, boolean}
]
@type break_or_no_break :: :break | :no_break
@type break_match ::
{break_or_no_break, {String.t, {String.t, String.t}}} |
{break_or_no_break, {String.t, String.t}}
@doc """
Compares two strings in a case insensitive
manner.
Case folding is applied to the two string
arguments which are then compared with the
`==` operator.
## Arguments
* `string_a` and `string_b` are two strings
to be compared
* `type` is the case folding type to be
applied. The alternatives are `:full`,
`:simple` and `:turkic`. The default is
`:full`.
## Returns
* `true` or `false`
## Notes
* This function applies the [Unicode Case Folding
algorithm](https://www.unicode.org/versions/Unicode13.0.0/ch03.pdf)
* The algorithm does not apply any treatment to diacritical
marks hence "compare strings without accents" is not
part of this function.
## Examples
iex> Unicode.String.equals_ignoring_case? "ABC", "abc"
true
iex> Unicode.String.equals_ignoring_case? "beißen", "beissen"
true
iex> Unicode.String.equals_ignoring_case? "grüßen", "grussen"
false
"""
@spec equals_ignoring_case?(String.t, String.t, atom()) :: boolean
def equals_ignoring_case?(string_a, string_b, type \\ :full) do
fold(string_a, type) == fold(string_b, type)
end
@default_locale "root"
@doc """
Returns a boolean indicating if the
requested break is applicable
at the point between the two string
segments represented by `{string_before, string_after}`.
## Arguments
* `string` is any `String.t`.
* `options` is a keyword list of
options.
## Returns
* `true` or `false` or
* raises an exception if there is an error
## Options
* `:locale` is any locale returned by
`Unicode.String.Segment.known_locales/0`.
The default is "root" which corresponds
to the break rules defined by the
[Unicode Segmentation](https://unicode.org/reports/tr29/) rules.
* `:break` is the type of break. It is one of
`:grapheme`, `:word`, `:line` or `:sentence`. The
default is `:word`.
* `:suppressions` is a boolean which,
if `true`, will suppress breaks for common
abbreviations defined for the `locale`. The
default is `true`.
## Examples
iex> Unicode.String.break? {"This is ", "some words"}
true
iex> Unicode.String.break? {"This is ", "some words"}, break: :sentence
false
iex> Unicode.String.break? {"This is one. ", "This is some words."}, break: :sentence
true
"""
@spec break?(string_interval, options) :: boolean
def break?({string_before, string_after}, options \\ []) do
case break({string_before, string_after}, options) do
{:break, _} -> true
{:no_break, _} -> false
{:error, reason} -> raise ArgumentError, reason
end
end
@doc """
Returns match data indicating if the
requested break is applicable
at the point between the two string
segments represented by `{string_before, string_after}`.
## Arguments
* `string` is any `String.t`.
* `options` is a keyword list of
options.
## Returns
A tuple indicating if a break would
be applicable at this point between
`string_before` and `string_after`.
* `{:break, {string_before, {matched_string, remaining_string}}}` or
* `{:no_break, {string_before, {matched_string, remaining_string}}}` or
* `{:error, reason}`
## Options
* `:locale` is any locale returned by
`Unicode.String.Segment.known_locales/0`.
The default is "root" which corresponds
to the break rules defined by the
[Unicode Segmentation](https://unicode.org/reports/tr29/) rules.
* `:break` is the type of break. It is one of
`:grapheme`, `:word`, `:line` or `:sentence`. The
default is `:word`.
* `:suppressions` is a boolean which,
if `true`, will suppress breaks for common
abbreviations defined for the `locale`. The
default is `true`.
## Examples
iex> Unicode.String.break {"This is ", "some words"}
{:break, {"This is ", {"s", "ome words"}}}
iex> Unicode.String.break {"This is ", "some words"}, break: :sentence
{:no_break, {"This is ", {"s", "ome words"}}}
iex> Unicode.String.break {"This is one. ", "This is some words."}, break: :sentence
{:break, {"This is one. ", {"T", "his is some words."}}}
"""
@spec break(string_interval, options) :: break_match | error_return
def break({string_before, string_after}, options \\ []) do
locale = Keyword.get(options, :locale, @default_locale)
break = Keyword.get(options, :break, :word)
with {:ok, break} <- validate(:break, break),
{:ok, locale} <- validate(:locale, locale) do
Break.break({string_before, string_after}, locale, break, options)
end
end
@doc """
Returns an enumerable that splits a string on demand.
## Arguments
* `string` is any `String.t`.
* `options` is a keyword list of
options.
## Returns
* A function that implements the enumerable
protocol or
* `{:error, reason}`
## Options
* `:locale` is any locale returned by
`Unicode.String.Segment.known_locales/0`.
The default is "root" which corresponds
to the break rules defined by the
[Unicode Segmentation](https://unicode.org/reports/tr29/) rules.
* `:break` is the type of break. It is one of
`:grapheme`, `:word`, `:line` or `:sentence`. The
default is `:word`.
* `:suppressions` is a boolean which,
if `true`, will suppress breaks for common
abbreviations defined for the `locale`. The
default is `true`.
* `:trim` is a boolean indicating if segments
the are comprised of only white space are to be
excluded from the returned list. The default
is `false`.
## Examples
iex> enum = Unicode.String.splitter "This is a sentence. And another.", break: :word, trim: true
iex> Enum.take enum, 3
["This", "is", "a"]
"""
@spec splitter(String.t, split_options) :: function | error_return
def splitter(string, options) when is_binary(string) do
locale = Keyword.get(options, :locale, @default_locale)
break = Keyword.get(options, :break, :word)
with {:ok, break} <- validate(:break, break),
{:ok, locale} <- validate(:locale, locale) do
Stream.unfold(string, &Break.next(&1, locale, break, options))
end
end
@doc """
Returns next segment in a string.
## Arguments
* `string` is any `String.t`.
* `options` is a keyword list of
options.
## Returns
A tuple with the segment and the remainder of the string or `""`
in case the String reached its end.
* `{next_string, rest_of_the_string}` or
* `{:error, reason}`
## Options
* `:locale` is any locale returned by
`Unicode.String.Segment.known_locales/0`.
The default is "root" which corresponds
to the break rules defined by the
[Unicode Segmentation](https://unicode.org/reports/tr29/) rules.
* `:break` is the type of break. It is one of
`:grapheme`, `:word`, `:line` or `:sentence`. The
default is `:word`.
* `:suppressions` is a boolean which,
if `true`, will suppress breaks for common
abbreviations defined for the `locale`. The
default is `true`.
## Examples
iex> Unicode.String.next "This is a sentence. And another.", break: :word
{"This", " is a sentence. And another."}
iex> Unicode.String.next "This is a sentence. And another.", break: :sentence
{"This is a sentence. ", "And another."}
"""
@spec next(String.t, split_options) :: String.t | nil | error_return
def next(string, options \\ []) when is_binary(string) do
locale = Keyword.get(options, :locale, @default_locale)
break = Keyword.get(options, :break, :word)
with {:ok, break} <- validate(:break, break),
{:ok, locale} <- validate(:locale, locale) do
Break.next(string, locale, break, options)
end
end
@doc """
Splits a string according to the
specified break type.
## Arguments
* `string` is any `String.t`.
* `options` is a keyword list of
options.
## Returns
* A list of strings after applying the
specified break rules or
* `{:error, reason}`
## Options
* `:locale` is any locale returned by
`Unicode.String.Segment.known_locales/0`.
The default is "root" which corresponds
to the break rules defined by the
[Unicode Segmentation](https://unicode.org/reports/tr29/) rules.
* `:break` is the type of break. It is one of
`:grapheme`, `:word`, `:line` or `:sentence`. The
default is `:word`.
* `:suppressions` is a boolean which,
if `true`, will suppress breaks for common
abbreviations defined for the `locale`. The
default is `true`.
* `:trim` is a boolean indicating if segments
the are comprised of only white space are to be
excluded fromt the returned list. The default
is `false`.
## Examples
iex> Unicode.String.split "This is a sentence. And another.", break: :word
["This", " ", "is", " ", "a", " ", "sentence", ".", " ", "And", " ", "another", "."]
iex> Unicode.String.split "This is a sentence. And another.", break: :word, trim: true
["This", "is", "a", "sentence", ".", "And", "another", "."]
iex> Unicode.String.split "This is a sentence. And another.", break: :sentence
["This is a sentence. ", "And another."]
"""
@spec split(String.t, split_options) :: [String.t, ...] | error_return
def split(string, options \\ []) when is_binary(string) do
locale = Keyword.get(options, :locale, @default_locale)
break = Keyword.get(options, :break, :word)
with {:ok, break} <- validate(:break, break),
{:ok, locale} <- validate(:locale, locale) do
Break.split(string, locale, break, options)
end
|> maybe_trim(options[:trim])
end
defp maybe_trim(list, true) when is_list(list) do
Enum.reject(list, &Property.white_space?/1)
end
defp maybe_trim(list, _) do
list
end
defp validate(:locale, locale) do
if locale in Segment.known_locales() do
{:ok, locale}
else
{:error, Segment.unknown_locale_error(locale)}
end
end
@breaks [:word, :grapheme, :line, :sentence]
defp validate(:break, break) do
if break in @breaks do
{:ok, break}
else
{:error, "Unknown break #{inspect break}. Valid breaks are #{inspect @breaks}"}
end
end
end
|
lib/unicode/string.ex
| 0.941318
| 0.757481
|
string.ex
|
starcoder
|
defmodule Mix.Tasks.Run do
use Mix.Task
@shortdoc "Run the given file or expression"
@moduledoc """
Runs the given file or expression in the context of the application.
Before running the code, it invokes the `app.start` task which compiles
and loads your project.
It is the goal of this task to provide a subset of the functionality
existent in the `elixir` executable, including setting up the `System.argv`:
mix run -e Hello.world
mix run my_script.exs arg1 arg2 arg3
Many command line options need to be passed to the `elixir` executable
directly, which can be done as follows:
elixir --sname hello -S mix run -e "My.code"
## Command line options
* `--eval`, `-e` - Evaluates the given code
* `--require`, `-r` - Requires pattern before running the command
* `--parallel-require`, `-pr` - Requires pattern in parallel
* `--no-halt` - Does not halt the system after running the command
* `--no-compile` - Does not compile even if files require compilation
* `--no-start` - Does not start applications after compilation
"""
def run(args) do
{ opts, head, _ } = OptionParser.parse_head(args,
aliases: [r: :require, pr: :parallel_require, e: :eval],
switches: [parallel_require: :keep, require: :keep])
# Require the project to be available
Mix.Project.get!
Mix.Task.run "app.start", args
file =
case head do
["--"|t] -> System.argv(t); nil
[h|t] -> System.argv(t); h
[] -> System.argv([]); nil
end
Enum.each opts, fn({ key, value }) ->
case key do
:parallel_require ->
value |> filter_patterns |> Kernel.ParallelRequire.files
:require ->
value |> filter_patterns |> Enum.each &Code.require_file(&1)
:eval ->
Code.eval_string(value)
_ ->
:ok
end
end
if file, do: Code.require_file(h)
if opts[:no_halt], do: :timer.sleep(:infinity)
end
defp filter_patterns(pattern) do
Enum.filter(Enum.uniq(Path.wildcard(pattern)), &File.regular?(&1))
end
end
|
lib/mix/lib/mix/tasks/run.ex
| 0.789112
| 0.507202
|
run.ex
|
starcoder
|
defmodule Mix.Tasks.Escript.Build do
use Mix.Task
use Bitwise, only_operators: true
@shortdoc "Builds an escript for the project"
@moduledoc ~S"""
Builds an escript for the project.
An escript is an executable that can be invoked from the
command line. An escript can run on any machine that has
Erlang/OTP installed and by default does not require Elixir to
be installed, as Elixir is embedded as part of the escript.
This task guarantees the project and its dependencies are
compiled and packages them inside an escript. Before invoking
`mix escript.build`, it is only necessary to define a `:escript`
key with a `:main_module` option in your `mix.exs` file:
escript: [main_module: MyApp.CLI]
Escripts should be used as a mechanism to share scripts between
developers and not as a deployment mechanism. For running live
systems, consider using `mix run` or building releases. See
the `Application` module for more information on systems life-
cycles.
By default, this task starts the current application. If this
is not desired, set the `:app` configuration to nil.
This task also removes documentation and debugging chunks from
the compiled `.beam` files to reduce the size of the escript.
If this is not desired, check the `:strip_beams` option.
> Note: escripts do not support projects and dependencies
> that need to store or read artifacts from the priv directory.
## Command line options
Expects the same command line options as `mix compile`.
## Configuration
The following option must be specified in your `mix.exs` under `:escript`
key:
* `:main_module` - the module to be invoked once the escript starts.
The module must contain a function named `main/1` that will receive the
command line arguments as binaries.
The remaining options can be specified to further customize the escript:
* `:name` - the name of the generated escript.
Defaults to app name.
* `:path` - the path to write the escript to.
Defaults to app name.
* `:app` - the app that starts with the escript.
Defaults to app name. Set it to `nil` if no application should
be started.
* `:strip_beam` - if `true` strips BEAM code in the escript to remove chunks
unnecessary at runtime, such as debug information and documentation.
Defaults to `true`.
* `:embed_elixir` - if `true` embeds Elixir and its children apps
(`ex_unit`, `mix`, etc.) mentioned in the `:applications` list inside the
`application/0` function in `mix.exs`.
Defaults to `true` for Elixir projects, `false` for Erlang projects.
Note: if you set this to `false` for an Elixir project, you will have to add paths to Elixir's
`ebin` directories to `ERL_LIBS` environment variable when running the resulting escript, in
order for the code loader to be able to find `:elixir` application and its children
applications (if they are used).
* `:shebang` - shebang interpreter directive used to execute the escript.
Defaults to `"#! /usr/bin/env escript\n"`.
* `:comment` - comment line to follow shebang directive in the escript.
Defaults to `""`.
* `:emu_args` - emulator arguments to embed in the escript file.
Defaults to `""`.
There is one project-level option that affects how the escript is generated:
* `language: :elixir | :erlang` - set it to `:erlang` for Erlang projects
managed by Mix. Doing so will ensure Elixir is not embedded by default.
Your app will still be started as part of escript loading, with the
config used during build.
## Example
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "0.0.1",
escript: escript()
]
end
def escript do
[main_module: MyApp.CLI]
end
end
defmodule MyApp.CLI do
def main(_args) do
IO.puts("Hello from MyApp!")
end
end
"""
@impl true
def run(args) do
Mix.Project.get!()
Mix.Task.run("loadpaths", args)
unless "--no-compile" in args do
Mix.Project.compile(args)
end
project = Mix.Project.config()
language = Keyword.get(project, :language, :elixir)
escriptize(project, language)
end
defp escriptize(project, language) do
escript_opts = project[:escript] || []
if Mix.Project.umbrella?() do
Mix.raise("Building escripts for umbrella projects is unsupported")
end
script_name = Mix.Local.name_for(:escript, project)
filename = escript_opts[:path] || script_name
main = escript_opts[:main_module]
unless script_name do
error_message =
"Could not generate escript, no name given, " <>
"set :name escript option or :app in the project settings"
Mix.raise(error_message)
end
unless main do
error_message =
"Could not generate escript, please set :main_module " <>
"in your project configuration (under :escript option) to a module that implements main/1"
Mix.raise(error_message)
end
unless Code.ensure_loaded?(main) do
error_message =
"Could not generate escript, module #{main} defined as " <>
":main_module could not be loaded"
Mix.raise(error_message)
end
app = Keyword.get(escript_opts, :app, project[:app])
strip_beam? = Keyword.get(escript_opts, :strip_beam, true)
escript_mod = String.to_atom(Atom.to_string(app) <> "_escript")
beam_paths =
[project_files(), deps_files(), core_files(escript_opts, language)]
|> Stream.concat()
|> prepare_beam_paths()
|> Map.merge(consolidated_paths(project))
tuples = gen_main(project, escript_mod, main, app, language) ++ read_beams(beam_paths)
tuples = if strip_beam?, do: strip_beams(tuples), else: tuples
case :zip.create('mem', tuples, [:memory]) do
{:ok, {'mem', zip}} ->
shebang = escript_opts[:shebang] || "#! /usr/bin/env escript\n"
comment = build_comment(escript_opts[:comment])
emu_args = build_emu_args(escript_opts[:emu_args], escript_mod)
script = IO.iodata_to_binary([shebang, comment, emu_args, zip])
File.mkdir_p!(Path.dirname(filename))
File.write!(filename, script)
set_perms(filename)
{:error, error} ->
Mix.raise("Error creating escript: #{error}")
end
Mix.shell().info("Generated escript #{filename} with MIX_ENV=#{Mix.env()}")
:ok
end
defp project_files() do
get_files(Mix.Project.app_path())
end
defp get_files(app) do
Path.wildcard("#{app}/ebin/*.{app,beam}") ++
(Path.wildcard("#{app}/priv/**/*") |> Enum.filter(&File.regular?/1))
end
defp set_perms(filename) do
stat = File.stat!(filename)
:ok = File.chmod(filename, stat.mode ||| 0o111)
end
defp deps_files() do
deps = Mix.Dep.cached()
Enum.flat_map(deps, fn dep -> get_files(dep.opts[:build]) end)
end
defp core_files(escript_opts, language) do
if Keyword.get(escript_opts, :embed_elixir, language == :elixir) do
Enum.flat_map([:elixir | extra_apps()], &app_files/1)
else
[]
end
end
defp extra_apps() do
Mix.Project.config()[:app]
|> extra_apps_in_app_tree()
|> Enum.uniq()
end
defp extra_apps_in_app_tree(app) when app in [:kernel, :stdlib, :elixir] do
[]
end
defp extra_apps_in_app_tree(app) when app in [:eex, :ex_unit, :iex, :logger, :mix] do
[app]
end
defp extra_apps_in_app_tree(app) do
_ = Application.load(app)
case Application.spec(app) do
nil ->
[]
spec ->
applications =
Keyword.get(spec, :applications, []) ++ Keyword.get(spec, :included_applications, [])
Enum.flat_map(applications, &extra_apps_in_app_tree/1)
end
end
defp app_files(app) do
case :code.where_is_file('#{app}.app') do
:non_existing -> Mix.raise("Could not find application #{app}")
file -> get_files(Path.dirname(Path.dirname(file)))
end
end
defp prepare_beam_paths(paths) do
for path <- paths, into: %{}, do: {Path.basename(path), path}
end
defp read_beams(items) do
Enum.map(items, fn {basename, beam_path} ->
{String.to_charlist(basename), File.read!(beam_path)}
end)
end
defp strip_beams(tuples) do
for {basename, maybe_beam} <- tuples do
case Path.extname(basename) do
".beam" -> {basename, strip_beam(maybe_beam)}
_ -> {basename, maybe_beam}
end
end
end
defp strip_beam(beam) when is_binary(beam) do
{:ok, _, all_chunks} = :beam_lib.all_chunks(beam)
strip_chunks = ['Abst', 'CInf', 'Dbgi', 'Docs']
preserved_chunks = for {name, _} = chunk <- all_chunks, name not in strip_chunks, do: chunk
{:ok, content} = :beam_lib.build_module(preserved_chunks)
compress(content)
end
defp compress(binary) do
{:ok, file} = :ram_file.open(binary, [:write, :binary])
{:ok, _} = :ram_file.compress(file)
{:ok, binary} = :ram_file.get_file(file)
:ok = :ram_file.close(file)
binary
end
defp consolidated_paths(config) do
if config[:consolidate_protocols] do
Mix.Project.consolidation_path(config)
|> Path.join("*")
|> Path.wildcard()
|> prepare_beam_paths()
else
%{}
end
end
defp build_comment(user_comment) do
"%% #{user_comment}\n"
end
defp build_emu_args(user_args, escript_mod) do
"%%! -escript main #{escript_mod} #{user_args}\n"
end
defp gen_main(project, name, module, app, language) do
config =
if File.regular?(project[:config_path]) do
{config, _} = Mix.Config.eval!(project[:config_path])
Macro.escape(config)
else
[]
end
module_body =
quote do
@module unquote(module)
@config unquote(config)
@app unquote(app)
@spec main(OptionParser.argv()) :: any
def main(args) do
unquote(main_body_for(language))
end
defp load_config(config) do
each_fun = fn {app, kw} ->
set_env_fun = fn {k, v} -> :application.set_env(app, k, v, persistent: true) end
:lists.foreach(set_env_fun, kw)
end
:lists.foreach(each_fun, config)
:ok
end
defp start_app(nil) do
:ok
end
defp start_app(app) do
case :application.ensure_all_started(app) do
{:ok, _} ->
:ok
{:error, {app, reason}} ->
formatted_error =
case :code.ensure_loaded(Application) do
{:module, Application} -> Application.format_error(reason)
{:error, _} -> :io_lib.format('~p', [reason])
end
error_message = [
"Could not start application ",
:erlang.atom_to_binary(app, :utf8),
": ",
formatted_error,
?\n
]
io_error(error_message)
:erlang.halt(1)
end
end
defp io_error(message) do
:io.put_chars(:standard_error, message)
end
end
{:module, ^name, binary, _} = Module.create(name, module_body, Macro.Env.location(__ENV__))
[{'#{name}.beam', binary}]
end
defp main_body_for(:elixir) do
quote do
erl_version = :erlang.system_info(:otp_release)
case :string.to_integer(erl_version) do
{num, _} when num >= 20 ->
:ok
_ ->
error_message = [
"Incompatible Erlang/OTP release: ",
erl_version,
".\nThis escript requires at least Erlang/OTP 20.0\n"
]
io_error(error_message)
:erlang.halt(1)
end
case :application.ensure_all_started(:elixir) do
{:ok, _} ->
load_config(@config)
start_app(@app)
args = Enum.map(args, &List.to_string(&1))
Kernel.CLI.run(fn _ -> @module.main(args) end, true)
error ->
io_error(["Failed to start Elixir.\n", :io_lib.format('error: ~p~n', [error])])
:erlang.halt(1)
end
end
end
defp main_body_for(:erlang) do
quote do
load_config(@config)
start_app(@app)
@module.main(args)
end
end
end
|
lib/mix/lib/mix/tasks/escript.build.ex
| 0.784732
| 0.473718
|
escript.build.ex
|
starcoder
|
defmodule SiteEncrypt.Acme.Client.API do
@moduledoc """
Low level API for interacting with an ACME CA server.
This module is a very incomplete implementation of the ACME client, as described in
[RFC8555](https://tools.ietf.org/html/rfc8555). Internally, the module uses `Mint.HTTP` to
communicate with the server. All functions will internally make a blocking HTTP request to
the server. Therefore it's advised to invoke the functions of this module from within a separate
process, powered by `Task`.
To use the client, you first need to create the session with `new_session/3`. Then you can
interact with the server using the remaining functions of this module. The session doesn't hold
any resources open, so you can safely use it from multiple processes.
"""
alias SiteEncrypt.HttpClient
alias SiteEncrypt.HttpClient
alias SiteEncrypt.Acme.Client.Crypto
defmodule Session do
@moduledoc false
defstruct ~w/http_opts account_key kid directory nonce/a
defimpl Inspect do
def inspect(session, _opts), do: "##{inspect(session.__struct__)}<#{session.directory.url}>"
end
end
@type session :: %Session{
http_opts: Keyword.t(),
account_key: JOSE.JWK.t(),
kid: nil | String.t(),
directory: nil | directory,
nonce: nil | String.t()
}
@type directory :: %{
url: String.t(),
key_change: String.t(),
new_account: String.t(),
new_nonce: String.t(),
new_order: String.t(),
revoke_cert: String.t()
}
@type error :: Mint.Types.error() | HTTP.response()
@type order :: %{
:status => status,
:authorizations => [String.t()],
:finalize => String.t(),
:location => String.t(),
optional(:certificate) => String.t()
}
@type challenge :: %{
:status => status,
:type => String.t(),
:url => String.t(),
optional(:token) => String.t()
}
@type status :: :invalid | :pending | :ready | :processing | :valid
@type session_opts :: [verify_server_cert: boolean]
@doc """
Creates a new session to the given CA.
- `directory_url` has to point to the GET directory resource, such as
https://acme-v02.api.letsencrypt.org/directory or
https://acme-staging-v02.api.letsencrypt.org/directory
- `account_key` is the private key of the CA account. If you want to create the new account, you
need to generate this key yourself, for example with
JOSE.JWK.generate_key({:rsa, _key_size = 4096})
Note that this will not create the account. You need to invoke `new_account/2` to do that.
It is your responsibility to safely store the private key somewhere.
If you want to access the existing account, you should pass the same key used for the account
creation. In this case you'll usually need to invoke `fetch_kid/1` to fetch the key identifier
from the CA server.
Note that this function will make an in-process GET HTTP request to the given directory URL.
"""
@spec new_session(String.t(), JOSE.JWK.t(), session_opts) ::
{:ok, session} | {:error, error}
def new_session(directory_url, account_key, http_opts \\ []) do
with {response, session} <- initialize_session(http_opts, account_key, directory_url),
:ok <- validate_response(response) do
directory =
response.payload
|> normalize_keys(~w/keyChange newAccount newNonce newOrder revokeCert/)
|> Map.merge(session.directory)
{:ok, %Session{session | directory: directory}}
end
end
@doc "Creates the new account at the CA server."
@spec new_account(session, [String.t()]) :: {:ok, session} | {:error, error}
def new_account(session, emails) do
url = session.directory.new_account
payload = %{"contact" => Enum.map(emails, &"mailto:#{&1}"), "termsOfServiceAgreed" => true}
with {:ok, response, session} <- jws_request(session, :post, url, :jwk, payload) do
location = :proplists.get_value("location", response.headers)
{:ok, %Session{session | kid: location}}
end
end
@doc """
Obtains the key identifier of the existing account.
You only need to invoke this function if the session is created using the key of the existing
account.
"""
@spec fetch_kid(session) :: {:ok, session} | {:error, error}
def fetch_kid(session) do
url = session.directory.new_account
payload = %{"onlyReturnExisting" => true}
with {:ok, response, session} <- jws_request(session, :post, url, :jwk, payload) do
location = :proplists.get_value("location", response.headers)
{:ok, %Session{session | kid: location}}
end
end
@doc "Creates a new order on the CA server."
@spec new_order(session, [String.t()]) :: {:ok, order, session} | {:error, error}
def new_order(session, domains) do
payload = %{"identifiers" => Enum.map(domains, &%{"type" => "dns", "value" => &1})}
with {:ok, response, session} <-
jws_request(session, :post, session.directory.new_order, :kid, payload) do
location = :proplists.get_value("location", response.headers)
result =
response.payload
|> normalize_keys(~w/authorizations finalize status/)
|> Map.update!(:status, &parse_status!/1)
|> Map.put(:location, location)
{:ok, result, session}
end
end
@doc "Obtains the status of the given order."
@spec order_status(session, order) :: {:ok, order, session} | {:error, error}
def order_status(session, order) do
with {:ok, response, session} <- jws_request(session, :post, order.location, :kid) do
result =
response.payload
|> normalize_keys(~w/authorizations finalize status certificate/)
|> Map.update!(:status, &parse_status!/1)
{:ok, Map.merge(order, result), session}
end
end
@doc "Obtains authorization challenges from the CA."
@spec authorization(session, String.t()) :: {:ok, [challenge], session}
def authorization(session, authorization) do
with {:ok, response, session} <- jws_request(session, :post, authorization, :kid) do
challenges =
response.payload
|> Map.fetch!("challenges")
|> Stream.map(&normalize_keys(&1, ~w/status token type url/))
|> Enum.map(&Map.update!(&1, :status, fn value -> parse_status!(value) end))
{:ok, challenges, session}
end
end
@doc "Returns the status and the token of the http-01 challenge."
@spec challenge(session, challenge) ::
{:ok, %{status: status, token: String.t()}, session} | {:error, error}
def challenge(session, challenge) do
payload = %{}
with {:ok, response, session} <- jws_request(session, :post, challenge.url, :kid, payload) do
result =
response.payload
|> normalize_keys(~w/status token/)
|> Map.update!(:status, &parse_status!/1)
{:ok, result, session}
end
end
@doc "Finalizes the given order."
@spec finalize(session, order, binary) :: {:ok, %{status: status}, session} | {:error, error}
def finalize(session, order, csr) do
payload = %{"csr" => Base.url_encode64(csr, padding: false)}
with {:ok, response, session} <- jws_request(session, :post, order.finalize, :kid, payload) do
result =
response.payload
|> normalize_keys(~w/status/)
|> Map.update!(:status, &parse_status!/1)
{:ok, result, session}
end
end
@doc "Obtains the certificate and chain from a finalized order."
@spec get_cert(session, order) :: {:ok, String.t(), String.t(), session} | {:error, error}
def get_cert(session, order) do
with {:ok, response, session} <- jws_request(session, :post, order.certificate, :kid) do
[cert | chain] = String.split(response.body, ~r/^\-+END CERTIFICATE\-+$\K/m, parts: 2)
{:ok, Crypto.normalize_pem(cert), Crypto.normalize_pem(to_string(chain)), session}
end
end
defp initialize_session(http_opts, account_key, directory_url) do
http_request(
%Session{
http_opts: http_opts,
account_key: account_key,
directory: %{url: directory_url}
},
:get,
directory_url
)
end
defp jws_request(session, verb, url, id_field, payload \\ "") do
with {:ok, session} <- get_nonce(session) do
headers = [{"content-type", "application/jose+json"}]
body = jws_body(session, url, id_field, payload)
session = %Session{session | nonce: nil}
case http_request(session, verb, url, headers: headers, body: body) do
{%{status: status} = response, session} when status in 200..299 ->
{:ok, response, session}
{%{payload: %{"type" => "urn:ietf:params:acme:error:badNonce"}}, session} ->
jws_request(session, verb, url, id_field, payload)
{response, session} ->
{:error, response, session}
end
end
end
defp get_nonce(%Session{nonce: nil} = session) do
with {response, session} <- http_request(session, :head, session.directory.new_nonce),
:ok <- validate_response(response),
do: {:ok, session}
end
defp get_nonce(session), do: {:ok, session}
defp jws_body(session, url, id_field, payload) do
protected =
Map.merge(
%{"alg" => "RS256", "nonce" => session.nonce, "url" => url},
id_map(id_field, session)
)
plain_text = if payload == "", do: "", else: Jason.encode!(payload)
{_, signed} = JOSE.JWS.sign(session.account_key, plain_text, protected)
Jason.encode!(signed)
end
defp id_map(:jwk, session) do
{_modules, public_map} = JOSE.JWK.to_public_map(session.account_key)
%{"jwk" => public_map}
end
defp id_map(:kid, session), do: %{"kid" => session.kid}
defp http_request(session, verb, url, opts \\ []) do
opts =
opts
|> Keyword.put_new(:headers, [])
|> Keyword.update!(:headers, &[{"user-agent", "site_encrypt native client"} | &1])
|> Keyword.merge(session.http_opts)
response = HttpClient.request(verb, url, opts)
content_type = :proplists.get_value("content-type", response.headers, "")
payload =
if String.starts_with?(content_type, "application/json") or
String.starts_with?(content_type, "application/problem+json"),
do: Jason.decode!(response.body)
session =
case Enum.find(response.headers, &match?({"replay-nonce", _nonce}, &1)) do
{"replay-nonce", nonce} -> %Session{session | nonce: nonce}
nil -> session
end
{Map.put(response, :payload, payload), session}
end
defp parse_status!("invalid"), do: :invalid
defp parse_status!("pending"), do: :pending
defp parse_status!("ready"), do: :ready
defp parse_status!("processing"), do: :processing
defp parse_status!("valid"), do: :valid
defp normalize_keys(map, allowed_keys) do
map
|> Map.take(allowed_keys)
|> Enum.into(%{}, fn {key, value} ->
{key |> Macro.underscore() |> String.to_atom(), value}
end)
end
defp validate_response(response),
do: if(response.status in 200..299, do: :ok, else: {:error, response})
end
|
lib/site_encrypt/acme/client/api.ex
| 0.911145
| 0.489259
|
api.ex
|
starcoder
|
defmodule Mix.Shell.Process do
@moduledoc """
This is Mix shell that uses the current process mailbox
for communication instead of IO.
When a developer calls `info("hello")`, the following
message will be sent to the current process:
{ :mix_shell, :info, ["hello"] }
This is mainly useful in tests, allowing us to assert
if given messages were received or not. Since we need
to guarantee a clean slate in between tests, there
is also a flush function responsible for flushing all
`:mix_shell` related tasks from the process inbox.
"""
@behavior Mix.Shell
@doc """
Flush all :mix_shell messages from the current process.
If a callback is given, it is invoked for each received message.
## Examples
flush IO.inspect(&1)
"""
def flush(callback // fn(x) -> x end) do
receive do
{ :mix_shell, _, _ } = message ->
callback.(message)
flush(callback)
after
0 -> :done
end
end
@doc """
Executes the given command and fowards its messages to
the current process.
"""
def cmd(command) do
put_app
Mix.Shell.cmd(command, fn(data) ->
self <- { :mix_shell, :run, [data] }
end)
end
@doc """
Simply forwards the message to the current process.
"""
def info(message) do
put_app
self <- { :mix_shell, :info, [IO.ANSI.escape(message, false)] }
end
@doc """
Simply forwards the message to the current process.
"""
def error(message) do
put_app
self <- { :mix_shell, :error, [IO.ANSI.escape(message, false)] }
end
@doc """
Simply forwards the message to the current process.
It also checks the inbox for an input message matching:
{ :mix_shell_input, :yes?, value }
If one does not exist, it will abort since there no shell
process input given. Value must be true or false.
"""
def yes?(message) do
put_app
self <- { :mix_shell, :yes?, [IO.ANSI.escape(message, false)] }
receive do
{ :mix_shell_input, :yes?, response } -> response
after
0 -> raise Mix.Error, message: "No shell process input given for yes?/1"
end
end
def put_app do
if Mix.Shell.output_app? do
self <- { :mix_shell, :info, ["==> #{Mix.project[:app]}"] }
end
end
end
|
lib/mix/lib/mix/shell/process.ex
| 0.778439
| 0.446676
|
process.ex
|
starcoder
|
defmodule Statistics.Distributions.Exponential do
@moduledoc """
Exponential distribution.
`lambda` is the rate parameter and must be greater than zero.
"""
alias Statistics.Math
@doc """
The probability density function
## Examples
iex> Statistics.Distributions.Exponential.pdf().(1)
0.36787944117144233
"""
@spec pdf() :: fun
@spec pdf(number) :: fun
def pdf() do
pdf(1)
end
def pdf(lambda) do
fn x ->
cond do
x < 0 ->
0
lambda <= 0 ->
:nan
true ->
lambda * Math.exp(-lambda * x)
end
end
end
@doc """
The cumulative density function
## Examples
iex> Statistics.Distributions.Exponential.cdf().(1)
0.6321205588285577
"""
@spec cdf() :: fun
@spec cdf(number) :: fun
def cdf() do
cdf(1)
end
def cdf(lambda) do
fn x ->
cond do
x < 0 ->
0
lambda <= 0 ->
:nan
true ->
1 - Math.exp(-lambda * x)
end
end
end
@doc """
The percentile-point function
## Examples
iex> Statistics.Distributions.Exponential.ppf().(0.1)
0.10536051565782628
"""
@spec ppf() :: fun
@spec ppf(number) :: fun
def ppf() do
ppf(1)
end
def ppf(lambda) do
fn x ->
cond do
x == 1 ->
:inf
x < 0 or x > 1 or lambda < 0 ->
:nan
true ->
-1 * Math.ln(1 - x) / lambda
end
end
end
@doc """
Draw a random number from the distribution with specified lambda
Uses the [rejection sampling method](https://en.wikipedia.org/wiki/Rejection_sampling)
## Examples
iex> Statistics.Distributions.Exponential.rand()
0.145709384787
"""
@spec rand() :: number
@spec rand(number) :: number
def rand() do
rand(1)
end
def rand(lambda) do
x = Math.rand() * lambda * 100
if pdf(lambda).(x) > Math.rand() do
x
else
# keep trying
rand(lambda)
end
end
end
|
lib/statistics/distributions/exponential.ex
| 0.915606
| 0.668679
|
exponential.ex
|
starcoder
|
defmodule Util.Cache do
@moduledoc false
# Simple LRU cache, with optional persistence to disk.
@doc """
Start the cache service for a collection
"""
def start(frame, size \\ 50) do
{ets_name, dets_name, dets_path} = expand(frame)
unless Process.whereis(ets_name) do
alias Searchex.Command.CmdHelpers
File.mkdir_p(CmdHelpers.cache_dir(frame))
LruCache.start_link(ets_name, size)
:dets.open_file(dets_name, [file: dets_path])
:ets.from_dets(ets_name, dets_name)
:dets.close(dets_name)
end
:ok
end
@doc "Stop the cache process"
def stop(frame) do
save(frame)
{ets_name, dets_name, _} = expand(frame)
if Process.whereis(ets_name), do: GenServer.stop(ets_name)
if Process.whereis(dets_name), do: GenServer.stop(dets_name)
end
@doc "Save the cache data to disk"
def save(frame) do
unless frame.halted do
{ets_name, dets_name, dets_path} = expand(frame)
if Process.whereis(ets_name) do
:dets.open_file(dets_name, [file: dets_path])
:ets.to_dets(ets_name, dets_name)
:dets.close(dets_name)
end
end
frame
end
def save(frame, _el) do
save(frame)
frame
end
@doc "Get a value from the cache"
def get_cache(_frame, ""), do: nil
def get_cache(frame, key) do
{ets_name, _, _} = expand(frame)
start(frame)
LruCache.get(ets_name, Util.Ext.Term.to_atom(key))
end
@doc "Put a value in the cache"
def put_cache(frame, key, val) do
start(frame)
{ets_name, _, _} = expand(frame)
LruCache.put(ets_name, Util.Ext.Term.to_atom(key), val)
key
end
@doc "Remove all values from cache"
def clear_cache(frame) do
start(frame)
{ets_name, _, _} = expand(frame)
:ets.delete_all_objects(ets_name)
save(frame)
end
defp expand(frame) do
alias Searchex.Command.CmdHelpers
cfg_str = to_string(frame.cfg_name) |> String.replace("/", "_")
ets_name = Util.Ext.Term.to_atom("ets_#{cfg_str}")
dets_name = Util.Ext.Term.to_atom("dets_#{cfg_str}")
dets_path = CmdHelpers.cache_file(frame) |> String.to_charlist
{ets_name, dets_name, dets_path}
end
end
|
lib/util/cache.ex
| 0.526586
| 0.429848
|
cache.ex
|
starcoder
|
defmodule EventStore.Notifications.Listener do
@moduledoc false
# Listener subscribes to event notifications using PostgreSQL's `LISTEN`
# command. Whenever events are appended to storage a `NOTIFY` command is
# executed by a trigger. The notification payload contains the first and last
# event number of the appended events. These events are then read from storage
# and published to interested subscribers.
use GenStage
require Logger
alias EventStore.MonitoredServer
alias EventStore.Notifications.Listener
defstruct [:listen_to, :ref, demand: 0, queue: :queue.new()]
def start_link(opts \\ []) do
listen_to = Keyword.fetch!(opts, :listen_to)
start_opts = Keyword.take(opts, [:name, :timeout, :debug, :spawn_opt])
state = %Listener{listen_to: listen_to}
GenStage.start_link(__MODULE__, state, start_opts)
end
def init(%Listener{} = state) do
%Listener{listen_to: listen_to} = state
:ok = MonitoredServer.monitor(listen_to)
{:producer, state}
end
def handle_info({:UP, listen_to, _pid}, %Listener{listen_to: listen_to} = state) do
{:noreply, [], listen_for_events(state)}
end
def handle_info({:DOWN, listen_to, _pid, _reason}, %Listener{listen_to: listen_to} = state) do
{:noreply, [], %Listener{state | ref: nil}}
end
# Notification received from PostgreSQL's `NOTIFY`
def handle_info({:notification, _connection_pid, _ref, channel, payload}, %Listener{} = state) do
Logger.debug(fn ->
"Listener received notification on channel " <>
inspect(channel) <> " with payload: " <> inspect(payload)
end)
%Listener{queue: queue} = state
# Notify payload contains the stream uuid, stream id, and first / last stream
# versions (e.g. "stream-12345,1,1,5")
[last, first, stream_id, stream_uuid] =
payload
|> String.reverse()
|> String.split(",", parts: 4)
|> Enum.map(&String.reverse/1)
{stream_id, ""} = Integer.parse(stream_id)
{first_stream_version, ""} = Integer.parse(first)
{last_stream_version, ""} = Integer.parse(last)
event = {stream_uuid, stream_id, first_stream_version, last_stream_version}
state = %Listener{
state
| queue: :queue.in(event, queue)
}
dispatch_events([], state)
end
# Ignore notifications when database connection down.
def handle_info(
{:notification, _connection_pid, _ref, _channel, _payload},
%Listener{ref: nil} = state
) do
{:noreply, [], state}
end
def handle_demand(incoming_demand, %Listener{demand: pending_demand} = state) do
dispatch_events([], %Listener{state | demand: incoming_demand + pending_demand})
end
defp listen_for_events(%Listener{} = state) do
%Listener{listen_to: listen_to} = state
{:ok, ref} = Postgrex.Notifications.listen(listen_to, "events")
%Listener{state | ref: ref}
end
defp dispatch_events(events, %Listener{demand: 0} = state) do
{:noreply, Enum.reverse(events), state}
end
defp dispatch_events(events, %Listener{} = state) do
%Listener{demand: demand, queue: queue} = state
case :queue.out(queue) do
{{:value, event}, queue} ->
state = %Listener{state | demand: demand - 1, queue: queue}
dispatch_events([event | events], state)
{:empty, _queue} ->
{:noreply, Enum.reverse(events), state}
end
end
end
|
lib/event_store/notifications/listener.ex
| 0.813757
| 0.428592
|
listener.ex
|
starcoder
|
defmodule HashDict do
@moduledoc """
WARNING: this module is deprecated.
Use the `Map` module instead.
"""
use Dict
@node_bitmap 0b111
@node_shift 3
@node_size 8
@node_template :erlang.make_tuple(@node_size, [])
@opaque t :: %__MODULE__{size: non_neg_integer, root: term}
@doc false
defstruct size: 0, root: @node_template
# Inline common instructions
@compile :inline_list_funcs
@compile {:inline, key_hash: 1, key_mask: 1, key_shift: 1}
defmacrop warn_deprecated() do
quote do
{function, arity} = __ENV__.function
IO.warn String.trim_trailing("""
HashDict.#{function}/#{arity} is deprecated since the HashDict module is
deprecated; use maps and the Map module instead
""")
end
end
@doc """
Creates a new empty dict.
"""
@spec new :: Dict.t
def new do
warn_deprecated()
%HashDict{}
end
def put(%HashDict{root: root, size: size}, key, value) do
warn_deprecated()
{root, counter} = do_put(root, key, value, key_hash(key))
%HashDict{root: root, size: size + counter}
end
def update!(%HashDict{root: root, size: size} = dict, key, fun) when is_function(fun, 1) do
warn_deprecated()
{root, counter} = do_update(root, key, fn -> raise KeyError, key: key, term: dict end,
fun, key_hash(key))
%HashDict{root: root, size: size + counter}
end
def update(%HashDict{root: root, size: size}, key, initial, fun) when is_function(fun, 1) do
warn_deprecated()
{root, counter} = do_update(root, key, fn -> initial end, fun, key_hash(key))
%HashDict{root: root, size: size + counter}
end
def fetch(%HashDict{root: root}, key) do
warn_deprecated()
do_fetch(root, key, key_hash(key))
end
def delete(dict, key) do
warn_deprecated()
case dict_delete(dict, key) do
{dict, _value} -> dict
:error -> dict
end
end
def pop(dict, key, default \\ nil) do
warn_deprecated()
case dict_delete(dict, key) do
{dict, value} -> {value, dict}
:error -> {default, dict}
end
end
def size(%HashDict{size: size}) do
warn_deprecated()
size
end
@doc false
def reduce(%HashDict{root: root}, acc, fun) do
do_reduce(root, acc, fun, @node_size, fn
{:suspend, acc} -> {:suspended, acc, &{:done, elem(&1, 1)}}
{:halt, acc} -> {:halted, acc}
{:cont, acc} -> {:done, acc}
end)
end
## General helpers
@doc false
def dict_delete(%HashDict{root: root, size: size}, key) do
case do_delete(root, key, key_hash(key)) do
{root, value} -> {%HashDict{root: root, size: size - 1}, value}
:error -> :error
end
end
## Dict manipulation
defp do_fetch(node, key, hash) do
index = key_mask(hash)
case elem(node, index) do
[^key | v] -> {:ok, v}
{^key, v, _} -> {:ok, v}
{_, _, n} -> do_fetch(n, key, key_shift(hash))
_ -> :error
end
end
defp do_put(node, key, value, hash) do
index = key_mask(hash)
case elem(node, index) do
[] ->
{put_elem(node, index, [key | value]), 1}
[^key | _] ->
{put_elem(node, index, [key | value]), 0}
[k | v] ->
n = put_elem(@node_template, key_mask(key_shift(hash)), [key | value])
{put_elem(node, index, {k, v, n}), 1}
{^key, _, n} ->
{put_elem(node, index, {key, value, n}), 0}
{k, v, n} ->
{n, counter} = do_put(n, key, value, key_shift(hash))
{put_elem(node, index, {k, v, n}), counter}
end
end
defp do_update(node, key, initial, fun, hash) do
index = key_mask(hash)
case elem(node, index) do
[] ->
{put_elem(node, index, [key | initial.()]), 1}
[^key | value] ->
{put_elem(node, index, [key | fun.(value)]), 0}
[k | v] ->
n = put_elem(@node_template, key_mask(key_shift(hash)), [key | initial.()])
{put_elem(node, index, {k, v, n}), 1}
{^key, value, n} ->
{put_elem(node, index, {key, fun.(value), n}), 0}
{k, v, n} ->
{n, counter} = do_update(n, key, initial, fun, key_shift(hash))
{put_elem(node, index, {k, v, n}), counter}
end
end
defp do_delete(node, key, hash) do
index = key_mask(hash)
case elem(node, index) do
[] ->
:error
[^key | value] ->
{put_elem(node, index, []), value}
[_ | _] ->
:error
{^key, value, n} ->
{put_elem(node, index, do_compact_node(n)), value}
{k, v, n} ->
case do_delete(n, key, key_shift(hash)) do
{@node_template, value} ->
{put_elem(node, index, [k | v]), value}
{n, value} ->
{put_elem(node, index, {k, v, n}), value}
:error ->
:error
end
end
end
Enum.each 0..(@node_size - 1), fn index ->
defp do_compact_node(node) when elem(node, unquote(index)) != [] do
case elem(node, unquote(index)) do
[k | v] ->
case put_elem(node, unquote(index), []) do
@node_template -> [k | v]
n -> {k, v, n}
end
{k, v, n} ->
{k, v, put_elem(node, unquote(index), do_compact_node(n))}
end
end
end
## Dict reduce
defp do_reduce_each(_node, {:halt, acc}, _fun, _next) do
{:halted, acc}
end
defp do_reduce_each(node, {:suspend, acc}, fun, next) do
{:suspended, acc, &do_reduce_each(node, &1, fun, next)}
end
defp do_reduce_each([], acc, _fun, next) do
next.(acc)
end
defp do_reduce_each([k | v], {:cont, acc}, fun, next) do
next.(fun.({k, v}, acc))
end
defp do_reduce_each({k, v, n}, {:cont, acc}, fun, next) do
do_reduce(n, fun.({k, v}, acc), fun, @node_size, next)
end
defp do_reduce(node, acc, fun, count, next) when count > 0 do
do_reduce_each(:erlang.element(count, node), acc, fun, &do_reduce(node, &1, fun, count - 1, next))
end
defp do_reduce(_node, acc, _fun, 0, next) do
next.(acc)
end
## Key operations
import Bitwise
defp key_hash(key) do
:erlang.phash2(key)
end
defp key_mask(hash) do
hash &&& @node_bitmap
end
defp key_shift(hash) do
hash >>> @node_shift
end
end
defimpl Enumerable, for: HashDict do
def reduce(dict, acc, fun), do: HashDict.reduce(dict, acc, fun)
def member?(dict, {k, v}), do: {:ok, match?({:ok, ^v}, HashDict.fetch(dict, k))}
def member?(_dict, _), do: {:ok, false}
def count(dict), do: {:ok, HashDict.size(dict)}
end
defimpl Collectable, for: HashDict do
def into(original) do
{original, fn
dict, {:cont, {k, v}} -> HashDict.put(dict, k, v)
dict, :done -> dict
_, :halt -> :ok
end}
end
end
defimpl Inspect, for: HashDict do
import Inspect.Algebra
def inspect(dict, opts) do
concat ["#HashDict<", Inspect.List.inspect(HashDict.to_list(dict), opts), ">"]
end
end
|
lib/elixir/lib/hash_dict.ex
| 0.72086
| 0.483405
|
hash_dict.ex
|
starcoder
|
defmodule AWS.LexModelBuilding do
@moduledoc """
Amazon Lex Build-Time Actions
Amazon Lex is an AWS service for building conversational voice and text
interfaces. Use these actions to create, update, and delete conversational
bots for new and existing client applications.
"""
@doc """
Creates a new version of the bot based on the `$LATEST` version. If the
`$LATEST` version of this resource hasn't changed since you created the
last version, Amazon Lex doesn't create a new version. It returns the last
created version.
<note> You can update only the `$LATEST` version of the bot. You can't
update the numbered versions that you create with the `CreateBotVersion`
operation.
</note> When you create the first version of a bot, Amazon Lex sets the
version to 1. Subsequent versions increment by 1. For more information, see
`versioning-intro`.
This operation requires permission for the `lex:CreateBotVersion` action.
"""
def create_bot_version(client, name, input, options \\ []) do
path_ = "/bots/#{URI.encode(name)}/versions"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a new version of an intent based on the `$LATEST` version of the
intent. If the `$LATEST` version of this intent hasn't changed since you
last updated it, Amazon Lex doesn't create a new version. It returns the
last version you created.
<note> You can update only the `$LATEST` version of the intent. You can't
update the numbered versions that you create with the `CreateIntentVersion`
operation.
</note> When you create a version of an intent, Amazon Lex sets the version
to 1. Subsequent versions increment by 1. For more information, see
`versioning-intro`.
This operation requires permissions to perform the
`lex:CreateIntentVersion` action.
"""
def create_intent_version(client, name, input, options \\ []) do
path_ = "/intents/#{URI.encode(name)}/versions"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a new version of a slot type based on the `$LATEST` version of the
specified slot type. If the `$LATEST` version of this resource has not
changed since the last version that you created, Amazon Lex doesn't create
a new version. It returns the last version that you created.
<note> You can update only the `$LATEST` version of a slot type. You can't
update the numbered versions that you create with the
`CreateSlotTypeVersion` operation.
</note> When you create a version of a slot type, Amazon Lex sets the
version to 1. Subsequent versions increment by 1. For more information, see
`versioning-intro`.
This operation requires permissions for the `lex:CreateSlotTypeVersion`
action.
"""
def create_slot_type_version(client, name, input, options \\ []) do
path_ = "/slottypes/#{URI.encode(name)}/versions"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Deletes all versions of the bot, including the `$LATEST` version. To delete
a specific version of the bot, use the `DeleteBotVersion` operation. The
`DeleteBot` operation doesn't immediately remove the bot schema. Instead,
it is marked for deletion and removed later.
Amazon Lex stores utterances indefinitely for improving the ability of your
bot to respond to user inputs. These utterances are not removed when the
bot is deleted. To remove the utterances, use the `DeleteUtterances`
operation.
If a bot has an alias, you can't delete it. Instead, the `DeleteBot`
operation returns a `ResourceInUseException` exception that includes a
reference to the alias that refers to the bot. To remove the reference to
the bot, delete the alias. If you get the same exception again, delete the
referring alias until the `DeleteBot` operation is successful.
This operation requires permissions for the `lex:DeleteBot` action.
"""
def delete_bot(client, name, input, options \\ []) do
path_ = "/bots/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes an alias for the specified bot.
You can't delete an alias that is used in the association between a bot and
a messaging channel. If an alias is used in a channel association, the
`DeleteBot` operation returns a `ResourceInUseException` exception that
includes a reference to the channel association that refers to the bot. You
can remove the reference to the alias by deleting the channel association.
If you get the same exception again, delete the referring association until
the `DeleteBotAlias` operation is successful.
"""
def delete_bot_alias(client, bot_name, name, input, options \\ []) do
path_ = "/bots/#{URI.encode(bot_name)}/aliases/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the association between an Amazon Lex bot and a messaging platform.
This operation requires permission for the
`lex:DeleteBotChannelAssociation` action.
"""
def delete_bot_channel_association(client, bot_alias, bot_name, name, input, options \\ []) do
path_ = "/bots/#{URI.encode(bot_name)}/aliases/#{URI.encode(bot_alias)}/channels/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes a specific version of a bot. To delete all versions of a bot, use
the `DeleteBot` operation.
This operation requires permissions for the `lex:DeleteBotVersion` action.
"""
def delete_bot_version(client, name, version, input, options \\ []) do
path_ = "/bots/#{URI.encode(name)}/versions/#{URI.encode(version)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes all versions of the intent, including the `$LATEST` version. To
delete a specific version of the intent, use the `DeleteIntentVersion`
operation.
You can delete a version of an intent only if it is not referenced. To
delete an intent that is referred to in one or more bots (see
`how-it-works`), you must remove those references first.
<note> If you get the `ResourceInUseException` exception, it provides an
example reference that shows where the intent is referenced. To remove the
reference to the intent, either update the bot or delete it. If you get the
same exception when you attempt to delete the intent again, repeat until
the intent has no references and the call to `DeleteIntent` is successful.
</note> This operation requires permission for the `lex:DeleteIntent`
action.
"""
def delete_intent(client, name, input, options \\ []) do
path_ = "/intents/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes a specific version of an intent. To delete all versions of a
intent, use the `DeleteIntent` operation.
This operation requires permissions for the `lex:DeleteIntentVersion`
action.
"""
def delete_intent_version(client, name, version, input, options \\ []) do
path_ = "/intents/#{URI.encode(name)}/versions/#{URI.encode(version)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes all versions of the slot type, including the `$LATEST` version. To
delete a specific version of the slot type, use the `DeleteSlotTypeVersion`
operation.
You can delete a version of a slot type only if it is not referenced. To
delete a slot type that is referred to in one or more intents, you must
remove those references first.
<note> If you get the `ResourceInUseException` exception, the exception
provides an example reference that shows the intent where the slot type is
referenced. To remove the reference to the slot type, either update the
intent or delete it. If you get the same exception when you attempt to
delete the slot type again, repeat until the slot type has no references
and the `DeleteSlotType` call is successful.
</note> This operation requires permission for the `lex:DeleteSlotType`
action.
"""
def delete_slot_type(client, name, input, options \\ []) do
path_ = "/slottypes/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes a specific version of a slot type. To delete all versions of a slot
type, use the `DeleteSlotType` operation.
This operation requires permissions for the `lex:DeleteSlotTypeVersion`
action.
"""
def delete_slot_type_version(client, name, version, input, options \\ []) do
path_ = "/slottypes/#{URI.encode(name)}/version/#{URI.encode(version)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes stored utterances.
Amazon Lex stores the utterances that users send to your bot. Utterances
are stored for 15 days for use with the `GetUtterancesView` operation, and
then stored indefinitely for use in improving the ability of your bot to
respond to user input.
Use the `DeleteUtterances` operation to manually delete stored utterances
for a specific user. When you use the `DeleteUtterances` operation,
utterances stored for improving your bot's ability to respond to user input
are deleted immediately. Utterances stored for use with the
`GetUtterancesView` operation are deleted after 15 days.
This operation requires permissions for the `lex:DeleteUtterances` action.
"""
def delete_utterances(client, bot_name, user_id, input, options \\ []) do
path_ = "/bots/#{URI.encode(bot_name)}/utterances/#{URI.encode(user_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Returns metadata information for a specific bot. You must provide the bot
name and the bot version or alias.
This operation requires permissions for the `lex:GetBot` action.
"""
def get_bot(client, name, version_or_alias, options \\ []) do
path_ = "/bots/#{URI.encode(name)}/versions/#{URI.encode(version_or_alias)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns information about an Amazon Lex bot alias. For more information
about aliases, see `versioning-aliases`.
This operation requires permissions for the `lex:GetBotAlias` action.
"""
def get_bot_alias(client, bot_name, name, options \\ []) do
path_ = "/bots/#{URI.encode(bot_name)}/aliases/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns a list of aliases for a specified Amazon Lex bot.
This operation requires permissions for the `lex:GetBotAliases` action.
"""
def get_bot_aliases(client, bot_name, max_results \\ nil, name_contains \\ nil, next_token \\ nil, options \\ []) do
path_ = "/bots/#{URI.encode(bot_name)}/aliases/"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(name_contains) do
[{"nameContains", name_contains} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns information about the association between an Amazon Lex bot and a
messaging platform.
This operation requires permissions for the `lex:GetBotChannelAssociation`
action.
"""
def get_bot_channel_association(client, bot_alias, bot_name, name, options \\ []) do
path_ = "/bots/#{URI.encode(bot_name)}/aliases/#{URI.encode(bot_alias)}/channels/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns a list of all of the channels associated with the specified bot.
The `GetBotChannelAssociations` operation requires permissions for the
`lex:GetBotChannelAssociations` action.
"""
def get_bot_channel_associations(client, bot_alias, bot_name, max_results \\ nil, name_contains \\ nil, next_token \\ nil, options \\ []) do
path_ = "/bots/#{URI.encode(bot_name)}/aliases/#{URI.encode(bot_alias)}/channels/"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(name_contains) do
[{"nameContains", name_contains} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Gets information about all of the versions of a bot.
The `GetBotVersions` operation returns a `BotMetadata` object for each
version of a bot. For example, if a bot has three numbered versions, the
`GetBotVersions` operation returns four `BotMetadata` objects in the
response, one for each numbered version and one for the `$LATEST` version.
The `GetBotVersions` operation always returns at least one version, the
`$LATEST` version.
This operation requires permissions for the `lex:GetBotVersions` action.
"""
def get_bot_versions(client, name, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/bots/#{URI.encode(name)}/versions/"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns bot information as follows:
<ul> <li> If you provide the `nameContains` field, the response includes
information for the `$LATEST` version of all bots whose name contains the
specified string.
</li> <li> If you don't specify the `nameContains` field, the operation
returns information about the `$LATEST` version of all of your bots.
</li> </ul> This operation requires permission for the `lex:GetBots`
action.
"""
def get_bots(client, max_results \\ nil, name_contains \\ nil, next_token \\ nil, options \\ []) do
path_ = "/bots/"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(name_contains) do
[{"nameContains", name_contains} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns information about a built-in intent.
This operation requires permission for the `lex:GetBuiltinIntent` action.
"""
def get_builtin_intent(client, signature, options \\ []) do
path_ = "/builtins/intents/#{URI.encode(signature)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Gets a list of built-in intents that meet the specified criteria.
This operation requires permission for the `lex:GetBuiltinIntents` action.
"""
def get_builtin_intents(client, locale \\ nil, max_results \\ nil, next_token \\ nil, signature_contains \\ nil, options \\ []) do
path_ = "/builtins/intents/"
headers = []
query_ = []
query_ = if !is_nil(signature_contains) do
[{"signatureContains", signature_contains} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(locale) do
[{"locale", locale} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Gets a list of built-in slot types that meet the specified criteria.
For a list of built-in slot types, see [Slot Type
Reference](https://developer.amazon.com/public/solutions/alexa/alexa-skills-kit/docs/built-in-intent-ref/slot-type-reference)
in the *Alexa Skills Kit*.
This operation requires permission for the `lex:GetBuiltInSlotTypes`
action.
"""
def get_builtin_slot_types(client, locale \\ nil, max_results \\ nil, next_token \\ nil, signature_contains \\ nil, options \\ []) do
path_ = "/builtins/slottypes/"
headers = []
query_ = []
query_ = if !is_nil(signature_contains) do
[{"signatureContains", signature_contains} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(locale) do
[{"locale", locale} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Exports the contents of a Amazon Lex resource in a specified format.
"""
def get_export(client, export_type, name, resource_type, version, options \\ []) do
path_ = "/exports/"
headers = []
query_ = []
query_ = if !is_nil(version) do
[{"version", version} | query_]
else
query_
end
query_ = if !is_nil(resource_type) do
[{"resourceType", resource_type} | query_]
else
query_
end
query_ = if !is_nil(name) do
[{"name", name} | query_]
else
query_
end
query_ = if !is_nil(export_type) do
[{"exportType", export_type} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Gets information about an import job started with the `StartImport`
operation.
"""
def get_import(client, import_id, options \\ []) do
path_ = "/imports/#{URI.encode(import_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns information about an intent. In addition to the intent name, you
must specify the intent version.
This operation requires permissions to perform the `lex:GetIntent` action.
"""
def get_intent(client, name, version, options \\ []) do
path_ = "/intents/#{URI.encode(name)}/versions/#{URI.encode(version)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Gets information about all of the versions of an intent.
The `GetIntentVersions` operation returns an `IntentMetadata` object for
each version of an intent. For example, if an intent has three numbered
versions, the `GetIntentVersions` operation returns four `IntentMetadata`
objects in the response, one for each numbered version and one for the
`$LATEST` version.
The `GetIntentVersions` operation always returns at least one version, the
`$LATEST` version.
This operation requires permissions for the `lex:GetIntentVersions` action.
"""
def get_intent_versions(client, name, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/intents/#{URI.encode(name)}/versions/"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns intent information as follows:
<ul> <li> If you specify the `nameContains` field, returns the `$LATEST`
version of all intents that contain the specified string.
</li> <li> If you don't specify the `nameContains` field, returns
information about the `$LATEST` version of all intents.
</li> </ul> The operation requires permission for the `lex:GetIntents`
action.
"""
def get_intents(client, max_results \\ nil, name_contains \\ nil, next_token \\ nil, options \\ []) do
path_ = "/intents/"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(name_contains) do
[{"nameContains", name_contains} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns information about a specific version of a slot type. In addition to
specifying the slot type name, you must specify the slot type version.
This operation requires permissions for the `lex:GetSlotType` action.
"""
def get_slot_type(client, name, version, options \\ []) do
path_ = "/slottypes/#{URI.encode(name)}/versions/#{URI.encode(version)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Gets information about all versions of a slot type.
The `GetSlotTypeVersions` operation returns a `SlotTypeMetadata` object for
each version of a slot type. For example, if a slot type has three numbered
versions, the `GetSlotTypeVersions` operation returns four
`SlotTypeMetadata` objects in the response, one for each numbered version
and one for the `$LATEST` version.
The `GetSlotTypeVersions` operation always returns at least one version,
the `$LATEST` version.
This operation requires permissions for the `lex:GetSlotTypeVersions`
action.
"""
def get_slot_type_versions(client, name, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/slottypes/#{URI.encode(name)}/versions/"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns slot type information as follows:
<ul> <li> If you specify the `nameContains` field, returns the `$LATEST`
version of all slot types that contain the specified string.
</li> <li> If you don't specify the `nameContains` field, returns
information about the `$LATEST` version of all slot types.
</li> </ul> The operation requires permission for the `lex:GetSlotTypes`
action.
"""
def get_slot_types(client, max_results \\ nil, name_contains \\ nil, next_token \\ nil, options \\ []) do
path_ = "/slottypes/"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(name_contains) do
[{"nameContains", name_contains} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Use the `GetUtterancesView` operation to get information about the
utterances that your users have made to your bot. You can use this list to
tune the utterances that your bot responds to.
For example, say that you have created a bot to order flowers. After your
users have used your bot for a while, use the `GetUtterancesView` operation
to see the requests that they have made and whether they have been
successful. You might find that the utterance "I want flowers" is not being
recognized. You could add this utterance to the `OrderFlowers` intent so
that your bot recognizes that utterance.
After you publish a new version of a bot, you can get information about the
old version and the new so that you can compare the performance across the
two versions.
Utterance statistics are generated once a day. Data is available for the
last 15 days. You can request information for up to 5 versions of your bot
in each request. Amazon Lex returns the most frequent utterances received
by the bot in the last 15 days. The response contains information about a
maximum of 100 utterances for each version.
If you set `childDirected` field to true when you created your bot, or if
you opted out of participating in improving Amazon Lex, utterances are not
available.
This operation requires permissions for the `lex:GetUtterancesView` action.
"""
def get_utterances_view(client, bot_name, bot_versions, status_type, options \\ []) do
path_ = "/bots/#{URI.encode(bot_name)}/utterances?view=aggregation"
headers = []
query_ = []
query_ = if !is_nil(status_type) do
[{"status_type", status_type} | query_]
else
query_
end
query_ = if !is_nil(bot_versions) do
[{"bot_versions", bot_versions} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Gets a list of tags associated with the specified resource. Only bots, bot
aliases, and bot channels can have tags associated with them.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Creates an Amazon Lex conversational bot or replaces an existing bot. When
you create or update a bot you are only required to specify a name, a
locale, and whether the bot is directed toward children under age 13. You
can use this to add intents later, or to remove intents from an existing
bot. When you create a bot with the minimum information, the bot is created
or updated but Amazon Lex returns the <code/> response `FAILED`. You can
build the bot after you add one or more intents. For more information about
Amazon Lex bots, see `how-it-works`.
If you specify the name of an existing bot, the fields in the request
replace the existing values in the `$LATEST` version of the bot. Amazon Lex
removes any fields that you don't provide values for in the request, except
for the `idleTTLInSeconds` and `privacySettings` fields, which are set to
their default values. If you don't specify values for required fields,
Amazon Lex throws an exception.
This operation requires permissions for the `lex:PutBot` action. For more
information, see `security-iam`.
"""
def put_bot(client, name, input, options \\ []) do
path_ = "/bots/#{URI.encode(name)}/versions/$LATEST"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Creates an alias for the specified version of the bot or replaces an alias
for the specified bot. To change the version of the bot that the alias
points to, replace the alias. For more information about aliases, see
`versioning-aliases`.
This operation requires permissions for the `lex:PutBotAlias` action.
"""
def put_bot_alias(client, bot_name, name, input, options \\ []) do
path_ = "/bots/#{URI.encode(bot_name)}/aliases/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Creates an intent or replaces an existing intent.
To define the interaction between the user and your bot, you use one or
more intents. For a pizza ordering bot, for example, you would create an
`OrderPizza` intent.
To create an intent or replace an existing intent, you must provide the
following:
<ul> <li> Intent name. For example, `OrderPizza`.
</li> <li> Sample utterances. For example, "Can I order a pizza, please."
and "I want to order a pizza."
</li> <li> Information to be gathered. You specify slot types for the
information that your bot will request from the user. You can specify
standard slot types, such as a date or a time, or custom slot types such as
the size and crust of a pizza.
</li> <li> How the intent will be fulfilled. You can provide a Lambda
function or configure the intent to return the intent information to the
client application. If you use a Lambda function, when all of the intent
information is available, Amazon Lex invokes your Lambda function. If you
configure your intent to return the intent information to the client
application.
</li> </ul> You can specify other optional information in the request, such
as:
<ul> <li> A confirmation prompt to ask the user to confirm an intent. For
example, "Shall I order your pizza?"
</li> <li> A conclusion statement to send to the user after the intent has
been fulfilled. For example, "I placed your pizza order."
</li> <li> A follow-up prompt that asks the user for additional activity.
For example, asking "Do you want to order a drink with your pizza?"
</li> </ul> If you specify an existing intent name to update the intent,
Amazon Lex replaces the values in the `$LATEST` version of the intent with
the values in the request. Amazon Lex removes fields that you don't provide
in the request. If you don't specify the required fields, Amazon Lex throws
an exception. When you update the `$LATEST` version of an intent, the
`status` field of any bot that uses the `$LATEST` version of the intent is
set to `NOT_BUILT`.
For more information, see `how-it-works`.
This operation requires permissions for the `lex:PutIntent` action.
"""
def put_intent(client, name, input, options \\ []) do
path_ = "/intents/#{URI.encode(name)}/versions/$LATEST"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Creates a custom slot type or replaces an existing custom slot type.
To create a custom slot type, specify a name for the slot type and a set of
enumeration values, which are the values that a slot of this type can
assume. For more information, see `how-it-works`.
If you specify the name of an existing slot type, the fields in the request
replace the existing values in the `$LATEST` version of the slot type.
Amazon Lex removes the fields that you don't provide in the request. If you
don't specify required fields, Amazon Lex throws an exception. When you
update the `$LATEST` version of a slot type, if a bot uses the `$LATEST`
version of an intent that contains the slot type, the bot's `status` field
is set to `NOT_BUILT`.
This operation requires permissions for the `lex:PutSlotType` action.
"""
def put_slot_type(client, name, input, options \\ []) do
path_ = "/slottypes/#{URI.encode(name)}/versions/$LATEST"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Starts a job to import a resource to Amazon Lex.
"""
def start_import(client, input, options \\ []) do
path_ = "/imports/"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Adds the specified tags to the specified resource. If a tag key already
exists, the existing value is replaced with the new value.
"""
def tag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 204)
end
@doc """
Removes tags from a bot, bot alias or bot channel.
"""
def untag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
{query_, input} =
[
{"tagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "lex"}
host = build_host("models.lex", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/lex_model_building.ex
| 0.866175
| 0.492615
|
lex_model_building.ex
|
starcoder
|
defmodule HttpApi.Database do
@moduledoc """
GenServer that owns a ETS table to store and retrieve the football results.
The data is loaded on application start and the results are saved as Protobuf
Structs, so it will be easy to encode them afterwards either to json (with
poison) or protobuf (with exprotobuf).
The purpose of this process is to create and keep the ETS table alive,
while providing a public api to the table. ETS operations won’t go through
the server process and instead will be handled immediately in the client process.
"""
# Provides access to the GenServer API.
use GenServer
## Client
# Starts the ETS table owner process.
def start_link(_) do
GenServer.start(__MODULE__, nil, name: __MODULE__)
end
# Returns a list of all objects in the table.
def all do
:ets.tab2list(__MODULE__)
end
# Returns a list of all objects in the table matched to the match specification.
def select(match_spec) do
:ets.select(__MODULE__, match_spec)
end
## Server
# Creates the ETS table.
def init(_) do
:ets.new(
__MODULE__,
[:named_table, :set, :protected, read_concurrency: true]
)
load_data()
{:ok, nil}
end
# Loads the data from the CSV file to the ETS table.
defp load_data do
Application.app_dir(:http_api, "priv/Data.csv")
|> File.stream!()
|> NimbleCSV.RFC4180.parse_stream()
|> Enum.each(fn [id | values] ->
match =
values
|> process_values()
|> HttpApi.Protobuf.Match.new()
:ets.insert(__MODULE__, {id, match})
end)
end
# Processes the row values so they can be used to initialize a Match struct.
defp process_values([
league,
season,
date,
home_team,
away_team,
fthg,
ftag,
ftr,
hthg,
htag,
htr
]) do
[
Div: league,
Season: season,
Date: date,
HomeTeam: home_team,
AwayTeam: away_team,
FTHG: String.to_integer(fthg),
FTAG: String.to_integer(ftag),
FTR: ftr,
HTHG: String.to_integer(hthg),
HTAG: String.to_integer(htag),
HTR: htr
]
end
end
|
lib/http_api/database.ex
| 0.702224
| 0.413033
|
database.ex
|
starcoder
|
defmodule Phoenix.Socket do
@moduledoc """
Holds state for every channel, pointing to its transport,
pubsub server and more.
## Socket Fields
* `assigns` - The map of socket assigns, default: `%{}`
* `channel` - The channel module where this socket originated
* `channel_pid` - The channel pid
* `endpoint` - The endpoint module where this socket originated
* `joined` - If the socket has effectively joined the channel
* `pubsub_server` - The registered name of the socket's PubSub server
* `ref` - The latest ref sent by the client
* `topic` - The string topic, ie `"rooms:123"`
* `transport` - The socket's transport, ie: `Phoenix.Transports.WebSocket`
* `transport_pid` - The pid of the socket's transport process
"""
defmodule InvalidMessageError do
@moduledoc """
Raised when the socket message is invalid.
"""
defexception [:message]
end
alias Phoenix.Socket
@type t :: %Socket{assigns: %{},
channel: atom,
channel_pid: pid,
endpoint: atom,
joined: boolean,
pubsub_server: atom,
ref: term,
topic: String.t,
transport: atom,
transport_pid: pid}
defstruct assigns: %{},
channel: nil,
channel_pid: nil,
endpoint: nil,
joined: false,
pubsub_server: nil,
ref: nil,
topic: nil,
transport: nil,
transport_pid: nil
end
defmodule Phoenix.Socket.Message do
@moduledoc """
Defines a message dispatched over transport to channels and vice-versa.
The message format requires the following keys:
* `topic` - The string topic or topic:subtopic pair namespace, ie "messages", "messages:123"
* `event`- The string event name, ie "phx_join"
* `payload` - The message payload
* `ref` - The unique string ref
"""
defstruct topic: nil, event: nil, payload: nil, ref: nil
@doc """
Converts a map with string keys into a message struct.
Raises `Phoenix.Socket.InvalidMessageError` if not valid.
"""
def from_map!(map) when is_map(map) do
try do
%Phoenix.Socket.Message{
topic: Map.fetch!(map, "topic"),
event: Map.fetch!(map, "event"),
payload: Map.fetch!(map, "payload"),
ref: Map.fetch!(map, "ref")
}
rescue
err in [KeyError] ->
raise Phoenix.Socket.InvalidMessageError, message: "missing key #{inspect err.key}"
end
end
end
defmodule Phoenix.Socket.Reply do
@moduledoc """
Defines a reply sent from channels to transports.
The message format requires the following keys:
* `topic` - The string topic or topic:subtopic pair namespace, ie "messages", "messages:123"
* `status` - The reply status as an atom
* `payload` - The reply payload
* `ref` - The unique string ref
"""
defstruct topic: nil, status: nil, payload: nil, ref: nil
end
defmodule Phoenix.Socket.Broadcast do
@moduledoc """
Defines a message sent from pubsub to channels and vice-versa.
The message format requires the following keys:
* `topic` - The string topic or topic:subtopic pair namespace, ie "messages", "messages:123"
* `event`- The string event name, ie "phx_join"
* `payload` - The message payload
"""
defstruct topic: nil, event: nil, payload: nil
end
|
lib/phoenix/socket.ex
| 0.837321
| 0.409368
|
socket.ex
|
starcoder
|
defmodule Msgpax.Ext do
@moduledoc """
A struct used to represent the MessagePack [Extension
type](https://github.com/msgpack/msgpack/blob/master/spec.md#formats-ext).
## Examples
Let's say we want to be able to serialize a custom type that consists of a
byte `data` repeated `reps` times. We could represent this as a `RepByte`
struct in Elixir:
defmodule RepByte do
defstruct [:data, :reps]
end
A simple (albeit not space efficient) approach to encoding such data is simply
a binary containing `data` for `reps` times: `%RepByte{data: ?a, reps: 2}`
would be encoded as `"aa"`.
We can now define the `Msgpax.Packer` protocol for the `RepByte` struct to
tell `Msgpax` how to encode this struct (we'll choose `10` as an arbitrary
integer to identify the type of this extension).
defimpl Msgpax.Packer, for: RepByte do
@rep_byte_ext_type 10
def pack(%RepByte{data: b, reps: reps}) do
@rep_byte_ext_type
|> Msgpax.Ext.new(String.duplicate(<<b>>, reps))
|> Msgpax.Packer.pack()
end
end
Now, we can pack `RepByte`s:
iex> packed = Msgpax.pack!(%RepByte{data: ?a, reps: 3})
iex> Msgpax.unpack!(packed)
#Msgpax.Ext<10, "aaa">
### Unpacking
As seen in the example above, since the `RepByte` struct is *packed* as a
MessagePack extension, it will be unpacked as that extension later on; what we
may want, however, is to unpack that extension back to a `RepByte` struct.
To do this, we can pass an `:ext` option to `Msgpax.unpack/2` (and other
unpacking functions). This option has to be a module that implements the
`Msgpax.Ext.Unpacker` behaviour; it will be used to unpack extensions to
arbitrary Elixir terms.
For our `RepByte` example, we could create an unpacker module like this:
defmodule MyExtUnpacker do
@behaviour Msgpax.Ext.Unpacker
@rep_byte_ext_type 10
def unpack(%Msgpax.Ext{type: @rep_byte_ext_type, data: data}) do
<<byte, _rest::binary>> = data
{:ok, %RepByte{data: byte, reps: byte_size(data)}}
end
end
With this in place, we can now unpack a packed `RepByte` back to a `RepByte`
struct:
iex> packed = Msgpax.pack!(%RepByte{data: ?a, reps: 3})
iex> Msgpax.unpack!(packed, ext: MyExtUnpacker)
%RepByte{data: ?a, reps: 3}
"""
@type type :: 0..127
@type t :: %__MODULE__{
type: type,
data: binary
}
defstruct [:type, :data]
@doc """
Creates a new `Msgpax.Ext` struct.
`type` must be an integer in `0..127` and it will be used as the type of the
extension (whose meaning depends on your application). `data` must be a binary
containing the serialized extension (whose serialization depends on your
application).
## Examples
iex> Msgpax.Ext.new(24, "foo")
#Msgpax.Ext<24, "foo">
"""
def new(type, data)
when type in 0..127 and is_binary(data) do
%__MODULE__{type: type, data: data}
end
defimpl Inspect do
import Inspect.Algebra
def inspect(%{type: type, data: data}, opts) do
concat([
"#Msgpax.Ext<",
Inspect.Integer.inspect(type, opts),
", ",
Inspect.BitString.inspect(data, opts),
">"
])
end
end
end
|
lib/msgpax/ext.ex
| 0.873633
| 0.662943
|
ext.ex
|
starcoder
|
defmodule BSV.Contract.PushTxHelpers do
@moduledoc """
Helper module for implementing the technique known as `OP_PUSH_TX` in
`BSV.Contract` modules.
`OP_PUSH_TX` is a technique that enables true "smart contracts" to be deployed
on Bitcoin. The technique can be defined as:
* Push the transaction preimage into an unlocking script
* In the locking script we can verify it is the correct preimage by using
Script to create a signature and verifying it with `OP_CHECKSIG`
* From there we can extract any data from the preimage and use it in our
smart contracts.
The technique allows for storing and tracking state across Bitcoin
transactions, defining spending conditions in locking scripts, and much more.
## Usage
To use these helpers, import this module into your contract module.
defmodule MyContract do
use BSV.Contract
import BSV.Contract.PushTxHelpers
def locking_script(ctx, _params) do
check_tx(ctx)
end
def unlocking_script(ctx, _params) do
push_tx(ctx)
end
end
"""
alias BSV.{Contract, Sig, UTXO}
use Contract.Helpers
@order_prefix Base.decode16!("414136d08c5ed2bf3ba048afe6dcaebafe", case: :mixed)
@pubkey_a Base.decode16!("<KEY>", case: :mixed)
@pubkey_b Base.decode16!("<KEY>", case: :mixed)
@pubkey_opt Base.decode16!("02b405d7f0322a89d0f9f3a98e6f938fdc1c969a8d1382a2bf66a71ae74a1e83b0", case: :mixed)
@sig_prefix Base.decode16!("3044022079be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f817980220", case: :mixed)
@sighash_flag 0x41
@doc """
Assuming the top stack item is a Tx Preimage, gets the tx version number and
places it on the stack on top of the preimage.
"""
@spec get_version(Contract.t()) :: Contract.t()
def get_version(%Contract{} = contract) do
contract
|> op_dup()
|> slice(0, 4)
|> decode_uint(:little)
end
@doc """
Assuming the top stack item is a Tx Preimage, gets the 32 byte prevouts hash
and places it on the stack on top of the preimage.
"""
@spec get_prevouts_hash(Contract.t()) :: Contract.t()
def get_prevouts_hash(%Contract{} = contract) do
contract
|> op_dup()
|> slice(4, 32)
end
@doc """
Assuming the top stack item is a Tx Preimage, gets the 32 byte sequence hash
and places it on the stack on top of the preimage.
"""
@spec get_sequence_hash(Contract.t()) :: Contract.t()
def get_sequence_hash(%Contract{} = contract) do
contract
|> op_dup()
|> slice(36, 32)
end
@doc """
Assuming the top stack item is a Tx Preimage, gets the 36 byte outpoint and
places it on the stack on top of the preimage.
"""
@spec get_outpoint(Contract.t()) :: Contract.t()
def get_outpoint(%Contract{} = contract) do
contract
|> op_dup()
|> slice(68, 36)
end
@doc """
Assuming the top stack item is a Tx Preimage, gets the locking script and
places it on the stack on top of the preimage.
State can be placed in the locking script and so this becomes an invaluable
method for extracting and using that state.
"""
@spec get_script(Contract.t()) :: Contract.t()
def get_script(%Contract{} = contract) do
contract
|> op_dup()
|> trim(104)
|> trim(-52)
|> trim_varint()
end
@doc """
Assuming the top stack item is a Tx Preimage, gets the input satoshis number
and places it on the stack on top of the preimage.
"""
@spec get_satoshis(Contract.t()) :: Contract.t()
def get_satoshis(%Contract{} = contract) do
contract
|> op_dup()
|> slice(-52, 8)
|> decode_uint(:little)
end
@doc """
Assuming the top stack item is a Tx Preimage, gets the input sequence number
and places it on the stack on top of the preimage.
"""
@spec get_sequence(Contract.t()) :: Contract.t()
def get_sequence(%Contract{} = contract) do
contract
|> op_dup()
|> slice(-44, 4)
|> decode_uint(:little)
end
@doc """
Assuming the top stack item is a Tx Preimage, gets the 32 byte outputs hash
and places it on the stack on top of the preimage.
"""
@spec get_outputs_hash(Contract.t()) :: Contract.t()
def get_outputs_hash(%Contract{} = contract) do
contract
|> op_dup()
|> slice(-40, 32)
end
@doc """
Assuming the top stack item is a Tx Preimage, gets the tx locktime number and
places it on the stack on top of the preimage.
"""
@spec get_lock_time(Contract.t()) :: Contract.t()
def get_lock_time(%Contract{} = contract) do
contract
|> op_dup()
|> slice(-8, 4)
|> decode_uint(:little)
end
@doc """
Assuming the top stack item is a Tx Preimage, gets the preimage sighash type
and places it on the stack on top of the preimage.
"""
@spec get_sighash_type(Contract.t()) :: Contract.t()
def get_sighash_type(%Contract{} = contract) do
contract
|> op_dup()
|> slice(-4, 4)
|> decode_uint(:little)
end
@doc """
Pushes the corrent Tx Preimage onto the stack. If no context is available in
the [`contract`](`t:BSV.Contract.t/0`) or if this is called in a locking
script, then 181 bytes of zeros are pushed onto the script instead.
"""
@spec push_tx(Contract.t()) :: Contract.t()
def push_tx(%Contract{ctx: {tx, vin}, subject: %UTXO{txout: txout}} = contract) do
preimage = Sig.preimage(tx, vin, txout, @sighash_flag)
push(contract, preimage)
end
def push_tx(%Contract{} = contract),
do: push(contract, <<0::1448>>)
@doc """
Assuming the top stack item is a Tx Preimage, creates and verifies a signature
with `OP_CHECKSIG`.
The Tx Preimage is removed from the stack and replaced with the result from
`OP_CHECKSIG`.
"""
@spec check_tx(Contract.t()) :: Contract.t()
def check_tx(%Contract{} = contract) do
contract
|> op_hash256()
|> prepare_sighash()
|> push_order()
|> div_order()
|> sighash_msb_is_0_or_255()
|> op_if(
fn contract ->
contract
|> op_2()
|> op_pick()
|> op_add()
end,
&op_1add/1
)
|> sighash_mod_gt_order()
|> op_if(&op_sub/1, &op_nip/1)
|> push_sig()
|> op_swap()
|> op_if(&push(&1, @pubkey_a), &push(&1, @pubkey_b))
|> op_checksig()
end
@doc """
As `check_tx/1` but verifies the signature with `OP_CHECKSIGVERIFY`.
"""
@spec check_tx!(Contract.t()) :: Contract.t()
def check_tx!(%Contract{} = contract) do
contract = check_tx(contract)
update_in(contract.script.chunks, & List.replace_at(&1, -1, :OP_CHECKSIGVERIFY))
end
# Prepares the sighash and MSB
defp prepare_sighash(contract) do
contract
|> reverse(32)
|> push(<<0x1F>>)
|> op_split()
|> op_tuck()
|> op_cat()
|> decode_uint(:little)
end
# Pushes the secp256k1 order onto the stack
defp push_order(contract) do
contract
|> push(@order_prefix)
|> push(<<0>>)
|> op_15()
|> op_num2bin()
|> op_invert()
|> op_cat()
|> push(<<0>>)
|> op_cat()
end
# Divides the order by 2
defp div_order(contract) do
contract
|> op_dup()
|> op_2()
|> op_div()
end
# Is the sighash MSB 0x00 or 0xFF
defp sighash_msb_is_0_or_255(contract) do
contract
|> op_rot()
|> op_3()
|> op_roll()
|> op_dup()
|> push(<<255>>)
|> op_equal()
|> op_swap()
|> push(<<0>>)
|> op_equal()
|> op_boolor()
|> op_tuck()
end
# Is the sighash mod greater than the secp256k1 order
defp sighash_mod_gt_order(contract) do
contract
|> op_3()
|> op_roll()
|> op_tuck()
|> op_mod()
|> op_dup()
|> op_4()
|> op_roll()
|> op_greaterthan()
end
# Constructs and pushes the signature onto the stack
defp push_sig(contract) do
contract
|> push(@sig_prefix)
|> op_swap()
|> reverse(32)
|> op_cat()
|> push(@sighash_flag)
|> op_cat()
end
@doc """
Assuming the top stack item is a Tx Preimage, creates and verifies a signature
with `OP_CHECKSIG`.
This uses the [optimal OP_PUSH_TX approach](https://xiaohuiliu.medium.com/optimal-op-push-tx-ded54990c76f)
which compiles to 87 bytes (compared to 438 as per `check_tx/1`).
However, due to the [Low-S Constraint](https://bitcoin.stackexchange.com/questions/85946/low-s-value-in-bitcoin-signature)
the most significant byte of the sighash must be less than a theshold of `0x7E`.
There is a roughly 50% chance the signature being invalid. Therefore, when
using this technique it is necessary to check the preimage and if necessary
keep maleating the transaction until it is valid.
"""
@spec check_tx_opt(Contract.t()) :: Contract.t()
def check_tx_opt(%Contract{} = contract) do
contract
|> op_hash256()
|> add_1_to_hash()
|> push_sig_opt()
|> push(@pubkey_opt)
|> op_checksig()
end
@doc """
As `check_tx_opt/1` but verifies the signature with `OP_CHECKSIGVERIFY`.
"""
@spec check_tx_opt!(Contract.t()) :: Contract.t()
def check_tx_opt!(%Contract{} = contract) do
contract = check_tx_opt(contract)
update_in(contract.script.chunks, & List.replace_at(&1, -1, :OP_CHECKSIGVERIFY))
end
# Adds 1 to the sighash MSB
defp add_1_to_hash(contract) do
contract
|> op_1()
|> op_split()
|> op_swap()
|> op_bin2num()
|> op_1add()
|> op_swap()
|> op_cat()
end
# Constructs and pushes the signature onto the stack (optimal version)
defp push_sig_opt(contract) do
contract
|> push(@sig_prefix)
|> op_swap()
|> op_cat()
|> push(@sighash_flag)
|> op_cat()
end
end
|
lib/bsv/contract/push_tx_helpers.ex
| 0.836388
| 0.563138
|
push_tx_helpers.ex
|
starcoder
|
defmodule Radixir.Crypto.RSAPublicKey do
@moduledoc false
defstruct version: nil,
public_modulus: nil,
public_exponent: nil
@type t :: %Radixir.Crypto.RSAPublicKey{
version: atom,
public_modulus: integer,
public_exponent: integer
}
def from_sequence(rsa_key_seq) do
%Radixir.Crypto.RSAPublicKey{}
|> struct(
public_modulus: elem(rsa_key_seq, 1),
public_exponent: elem(rsa_key_seq, 2)
)
end
def as_sequence(rsa_public_key) do
case rsa_public_key do
%Radixir.Crypto.RSAPublicKey{} ->
{:ok,
{
:RSAPublicKey,
Map.get(rsa_public_key, :public_modulus),
Map.get(rsa_public_key, :public_exponent)
}}
_ ->
{:error, "invalid Radixir.Crypto.RSAPublicKey: #{rsa_public_key}"}
end
end
def get_fingerprint(rsa_public_key = %__MODULE__{}, opts \\ []) do
# parse opts
digest_type = Keyword.get(opts, :digest_type, :sha256)
colons = Keyword.get(opts, :colons, false)
# encode_der and hash
with {:ok, der_encoded} <- encode_der(rsa_public_key),
digest = :crypto.hash(digest_type, der_encoded),
hex_fp = Base.encode16(digest, case: :lower),
do: add_fingerprint_colons(hex_fp, colons)
end
def encode_der(rsa_public_key = %__MODULE__{}) do
# hack to encode same defaults as openssl in SubjectPublicKeyInfo format
with {:ok, key_sequence} <- as_sequence(rsa_public_key) do
pem_entry = :public_key.pem_entry_encode(:SubjectPublicKeyInfo, key_sequence)
der_encoded =
:public_key.pem_encode([pem_entry])
|> String.trim()
|> String.split("\n")
|> Enum.filter(fn line -> !String.contains?(line, "-----") end)
|> Enum.join("")
|> Base.decode64!()
{:ok, der_encoded}
end
end
def decode_der(der_encoded, opts \\ []) do
# parse opts
# also supports :RSAPublicKey
format = Keyword.get(opts, :format, :SubjectPublicKeyInfo)
# decode and parse
:public_key.der_decode(format, der_encoded)
|> from_der_encoded_0()
end
# Protocols
defimpl Inspect do
import Inspect.Algebra
@doc """
Formats the RSAPrivateKey and includes the SHA256 fingerprint.
example:
```
#Radixir.Crypto.RSAPublicKey<
fingerprint_sha256=7a:40:1c:b9:4b:b8:a5:bb:6b:98:b6:1b:8b:7a:24:8d:45:9b:e5:54
17:7e:66:26:7e:95:11:9d:39:14:7b:b2>
```
"""
def inspect(data, _opts) do
fp_opts = [format: :sha256, colons: true]
fp_sha256_parts_doc =
Radixir.Crypto.RSAPublicKey.get_fingerprint(data, fp_opts)
|> String.split(":")
|> fold_doc(fn doc, acc -> glue(doc, ":", acc) end)
fp_sha256_doc =
glue("fingerprint_sha256=", "", fp_sha256_parts_doc)
|> group()
|> nest(2)
glue("#Radixir.Crypto.RSAPublicKey<", "", fp_sha256_doc)
|> concat(">")
|> nest(2)
end
end
# Helpers
defp add_fingerprint_colons(data, true) do
case String.valid?(data) do
true ->
String.splitter(data, "", trim: true)
|> Enum.chunk_every(2)
|> Enum.map(fn chunk_list ->
Enum.join(chunk_list, "")
end)
|> Enum.join(":")
false ->
data
end
end
defp add_fingerprint_colons(data, _false) do
data
end
def from_der_encoded_0({:SubjectPublicKeyInfo, _, der_key}) do
with {:RSAPublicKey, pub_mod, pub_exp} <- :public_key.der_decode(:RSAPublicKey, der_key),
do: from_der_encoded_0({:RSAPublicKey, pub_mod, pub_exp})
end
def from_der_encoded_0({:RSAPublicKey, pub_mod, pub_exp}) do
rsa_pub_key = from_sequence({:RSAPublicKey, pub_mod, pub_exp})
{:ok, rsa_pub_key}
end
def from_der_encoded_0(_other) do
{:error, :invalid_public_key}
end
end
|
lib/radixir/crypto/rsa_public_key.ex
| 0.780662
| 0.490236
|
rsa_public_key.ex
|
starcoder
|
defmodule Ueberauth.Strategy do
@moduledoc """
The Strategy is the work-horse of the system.
Strategies are implemented outside this library to meet your needs, the
strategy provides a consistent API and behaviour.
Each strategy operates through two phases.
1. `request phase`
2. `callback phase`
These phases can be understood with the following psuedocode.
### Request Phase
request (for the request phase - default /auth/:provider)
|> relevant_strategy.handle_request!(conn)
|> continue with request plug pipeline
The request phase follows normal plug pipeline behaviour. The request will not
continue if the strategy halted the connection.
### Callback Phase
request (for a callback phase - default /auth/:provider/callback)
|> relevant_strategy.handle_auth!(conn)
if connection does not have ueberauth failure
|> set ueberauth auth with relevant_strategy.auth
|> cleanup from the strategy with relevant_strategy.handle_cleanup!
|> continue with plug pipeline
The callback phase is essentially a decorator and does not usually redirect or
halt the request. Its result is that one of two cases will end up in your
connections assigns when it reaches your controller.
* On Failure - An `Ueberauth.Failure` struct is available at `:ueberauth_failure`
* On Success - An `Ueberauth.Auth` struct is available at `:ueberauth_auth`
### An example
The simplest example is an email/password strategy. This does not intercept
the request and just decorates it with the `Ueberauth.Auth` struct. (it is
always successful)
defmodule Ueberauth.Strategies.Identity do
use Ueberauth.Strategy
alias Ueberauth.Auth.Credentials
alias Ueberauth.Auth.Extra
def uid(conn), do: conn.params["email"]
def extra(conn), do: struct(Extra, raw_info: conn.params)
def credentials(conn) do
%Credentials{
other: %{
password: conn.params["password"],
password_confirmation: conn.params["password_confirmation"]
}
}
end
end
After the strategy has run through the `c:handle_callback!/1` function, since
there are no errors added, Ueberauth will add the constructed auth struct to
the connection.
The Auth struct is constructed like:
def auth(conn) do
%Auth{
provider: strategy_name(conn),
strategy: strategy(conn),
uid: uid(conn),
info: info(conn),
extra: extra(conn),
credentials: credentials(conn)
}
end
Each component of the struct is a separate function and receives the connection
object. From this Ueberauth will construct and assign the struct for processing
in your own controller.
### Redirecting during the request phase
Many strategies may require a redirect (looking at you OAuth). To do this,
implement the `c:handle_request!/1` function.
def handle_request!(conn) do
callback_url = callback_url(conn)
redirect!(conn, callback_url)
end
### Callback phase
The callback phase may not do anything other than instruct the strategy where
to get the information to construct the auth struct. In that case define the
functions for the components of the struct and fetch the information from the
connection struct.
In the case where you do need to take some other step, the `c:handle_callback!/1`
function is where its at.
def handle_callback!(conn) do
conn
|> call_external_service_and_assign_result_to_private
end
def uid(conn) do
fetch_from_my_private_area(conn, :username)
end
def handle_cleanup!(conn) do
remove_my_private_area(conn)
end
This provides a simplistic psuedocode look at what a callback + cleanup phase
might look like. By setting the result of your call to the external service in
the connections private assigns, you can use that to construct the auth struct
in the auth component functions. Of course, as a good citizen you also cleanup
the connection before the request continues.
### Cleanup phase
The cleanup phase is provided for you to be a good citizen and clean up after
your strategy. During the callback phase, you may need to temporarily store
information in the private section of the conn struct. Once this is done,
the cleanup phase exists to cleanup that temporary storage after the strategy
has everything it needs.
Implement the `c:handle_cleanup!/1` function and return the cleaned conn struct.
### Adding errors during callback
You have two options when you're in the callback phase. Either you can let the
connection go through and Ueberauth will construct the auth hash for you, or
you can add errors.
You should add errors before you leave your `c:handle_callback!/1` function.
def handle_callback!(conn) do
errors = []
if (something_bad), do: errors = [error("error_key", "Some message") | errors]
if (length(errors) > 0) do
set_errors!(errors)
else
conn
end
end
Once you've set errors, Ueberauth will not set the auth struct in the connections
assigns at `:ueberauth_auth`, instead it will set a `Ueberauth.Failure` struct at
`:ueberauth_failure` with the information provided detailing the failure.
"""
alias Ueberauth.Auth
alias Ueberauth.Auth.Credentials
alias Ueberauth.Auth.Info
alias Ueberauth.Auth.Extra
@doc """
The request phase implementation for your strategy.
Setup, redirect or otherwise in here. This is an information gathering phase
and should provide the end user with a way to provide the information
required for your application to authenticate them.
"""
@callback handle_request!(Plug.Conn.t()) :: Plug.Conn.t()
@doc """
The callback phase implementation for your strategy.
In this function you should make any external calls you need, check for
errors etc. The result of this phase is that either a failure
(`Ueberauth.Failure`) will be assigned to the connections assigns at
`ueberauth_failure` or an `Ueberauth.Auth` struct will be constrcted and
added to the assigns at `:ueberauth_auth`.
"""
@callback handle_callback!(Plug.Conn.t()) :: Plug.Conn.t()
@doc """
The cleanup phase implementation for your strategy.
The cleanup phase runs after the callback phase and is present to provide a
mechanism to cleanup any temporary data your strategy may have placed in the
connection.
"""
@callback handle_cleanup!(Plug.Conn.t()) :: Plug.Conn.t()
@doc """
Provides the uid for the user.
This is one of the component functions that is used to construct the auth
struct. What you return here will be in the auth struct at the `uid` key.
"""
@callback uid(Plug.Conn.t()) :: binary | nil
@doc """
Provides the info for the user.
This is one of the component functions that is used to construct the auth
struct. What you return here will be in the auth struct at the `info` key.
"""
@callback info(Plug.Conn.t()) :: Info.t()
@doc """
Provides the extra params for the user.
This is one of the component functions that is used to construct the auth
struct. What you return here will be in the auth struct at the `extra` key.
You would include any additional information within extra that does not fit
in either `info` or `credentials`
"""
@callback extra(Plug.Conn.t()) :: Extra.t()
@doc """
Provides the credentials for the user.
This is one of the component functions that is used to construct the auth
struct. What you return here will be in the auth struct at the `credentials`
key.
"""
@callback credentials(Plug.Conn.t()) :: Credentials.t()
@doc """
When defining your own strategy you should use Ueberauth.Strategy.
This provides default callbacks for all required callbacks to meet the
Ueberauth.Strategy behaviour and imports some helper functions found in
`Ueberauth.Strategy.Helpers`
### Imports
* Ueberauth.Stratgey.Helpers
* Plug.Conn
## Default Options
When using the strategy you can pass a keyword list for default options:
defmodule MyStrategy do
use Ueberauth.Strategy, some: "options"
# …
end
MyStrategy.default_options # [ some: "options" ]
These options are made available to your strategy at `YourStrategy.default_options`.
On a per usage level, other options can also be passed to the strategy to provide
customization.
"""
defmacro __using__(opts \\ []) do
quote location: :keep do
@behaviour Ueberauth.Strategy
import Ueberauth.Strategy.Helpers
import Plug.Conn, except: [request_url: 1]
def default_options, do: unquote(opts)
def uid(conn), do: nil
def info(conn), do: %Info{}
def extra(conn), do: %Extra{}
def credentials(conn), do: %Credentials{}
def handle_request!(conn), do: conn
def handle_callback!(conn), do: conn
def handle_cleanup!(conn), do: conn
def auth(conn) do
struct(
Auth,
provider: strategy_name(conn),
strategy: strategy(conn),
uid: uid(conn),
info: info(conn),
extra: extra(conn),
credentials: credentials(conn)
)
end
defoverridable uid: 1,
info: 1,
extra: 1,
credentials: 1,
handle_request!: 1,
handle_callback!: 1,
handle_cleanup!: 1
end
end
@doc false
def run_request(conn, strategy) do
apply(strategy, :handle_request!, [conn])
end
@doc false
def run_callback(conn, strategy) do
handled_conn =
strategy
|> apply(:handle_callback!, [conn])
|> handle_callback_result(strategy)
apply(strategy, :handle_cleanup!, [handled_conn])
end
defp handle_callback_result(%{halted: true} = conn, _), do: conn
defp handle_callback_result(%{assigns: %{ueberauth_failure: _}} = conn, _), do: conn
defp handle_callback_result(%{assigns: %{ueberauth_auth: %{}}} = conn, _), do: conn
defp handle_callback_result(conn, strategy) do
auth = apply(strategy, :auth, [conn])
Plug.Conn.assign(conn, :ueberauth_auth, auth)
end
end
|
lib/ueberauth/strategy.ex
| 0.863017
| 0.548613
|
strategy.ex
|
starcoder
|
defmodule Maru.Validations do
defmodule Regexp do
@moduledoc """
Param Validator: check whether param and regexp matched.
"""
@doc false
def validate_param!(attr_name, values, option) when is_list(values) do
for value <- values do
validate_param!(attr_name, value, option)
end
end
def validate_param!(attr_name, value, option) do
value |> to_string =~ option ||
Maru.Exceptions.Validation
|> raise(param: attr_name, validator: :regexp, value: value, option: option)
end
end
defmodule Values do
@moduledoc """
Param Validator: check whether param in list or range.
"""
@doc false
def validate_param!(attr_name, value, option) do
value in option ||
Maru.Exceptions.Validation
|> raise(param: attr_name, validator: :values, value: value, option: option)
end
end
defmodule AllowBlank do
@moduledoc """
Param Validator: check whether black value is illegal.
"""
@doc false
def validate_param!(_, _, true), do: true
def validate_param!(attr_name, value, false) do
not Maru.Utils.is_blank(value) ||
Maru.Exceptions.Validation
|> raise(param: attr_name, validator: :allow_blank, value: value, option: false)
end
end
defmodule MutuallyExclusive do
@moduledoc """
Param Validator: raise when exclusive params present at the same time.
"""
@doc false
def validate!(attr_names, params) do
unless Enum.count(attr_names, &(not is_nil(params[&1]))) <= 1 do
Maru.Exceptions.Validation
|> raise(param: attr_names, validator: :mutually_exclusive, value: params)
end
true
end
end
defmodule ExactlyOneOf do
@moduledoc """
Param Validator: make sure only one of designated params present.
"""
@doc false
def validate!(attr_names, params) do
unless Enum.count(attr_names, &(not is_nil(params[&1]))) == 1 do
Maru.Exceptions.Validation
|> raise(param: attr_names, validator: :exactly_one_of, value: params)
end
true
end
end
defmodule AtLeastOneOf do
@moduledoc """
Param Validator: make sure at least one of designated params present.
"""
@doc false
def validate!(attr_names, params) do
unless Enum.count(attr_names, &(not is_nil(params[&1]))) >= 1 do
Maru.Exceptions.Validation
|> raise(param: attr_names, validator: :at_least_one_of, value: params)
end
true
end
end
defmodule AllOrNoneOf do
@moduledoc """
Param Validator: make sure all or none of designated params present.
"""
@doc false
def validate!(attr_names, params) do
unless Enum.count(attr_names, &(not is_nil(params[&1]))) in [0, length(attr_names)] do
Maru.Exceptions.Validation
|> raise(param: attr_names, validator: :all_or_none_of, value: params)
end
true
end
end
end
|
lib/maru/validations.ex
| 0.795499
| 0.510924
|
validations.ex
|
starcoder
|
use Croma
defmodule Antikythera.Cron do
@moduledoc """
Calculate time schedules based on cron format strings.
`parse/1` recognizes the [POSIX specifications of crontab format](http://www.unix.com/man-page/posix/1posix/crontab)
with the extension of "step values" (explained below).
The parsed object can be used to compute next matching time in `next/2`.
Note that all times are in UTC, as is the case with `Antikythera.Time`.
## Schedule format
- The cron schedule is specified by 5 fields separated by whitespaces.
- Allowed values for each field are:
- minutes : 0-59
- hours : 0-23
- day of month : 1-31
- month : 1-12
- day of week : 0-6 (0=Sunday)
- Multiple elements can be used within a field by separating each by `,`.
- An element shall be either a number or two numbers separated by a `-` (meaning an inclusive range).
- A field may contain `*` which stands for "first-last".
- Step values as in "/<skip>" can be used in conjunction with ranges.
For example,
- "0-18/4" is identical to "0,4,8,12,16", and
- "*/10" in minutes field is identical to "0,10,20,30,40,50".
- If both 'day of month' and 'day of week' are not "*", then the dates are the ones matching **either** of the fields.
For example, "30 4 1,15 * 5" indicates both of the followings:
- 4:30 on the 1st and 15th of each month
- 4:30 on every Friday
- Schedules that actually don't represent valid date are not allowed.
For example, "0 0 31 4 *" is rejected as 31st of April does not exist.
"""
alias Croma.Result, as: R
alias Antikythera.{Time, MilliSecondsSinceEpoch}
[
{Minute , 0, 59},
{Hour , 0, 23},
{DayOfMonth, 1, 31},
{Month , 1, 12},
{DayOfWeek , 0, 6},
] |> Enum.each(fn {mod, min, max} ->
m = Module.safe_concat(__MODULE__, mod)
defmodule m do
defmodule Int do
use Croma.SubtypeOfInt, min: min, max: max
end
defun min() :: Int.t, do: Int.min()
defun max() :: Int.t, do: Int.max()
@typedoc "Wildcard `:*` or sorted list of values."
@type t :: :* | [Int.t]
defun valid?(v :: term) :: boolean do
:* -> true
l when is_list(l) -> Enum.all?(l, &Int.valid?/1)
end
end
end)
# defmodule using variable name does not automatically make alias
alias Antikythera.Cron.{Minute, Hour, DayOfMonth, Month, DayOfWeek}
use Croma.Struct, recursive_new?: true, fields: [
minute: Minute,
hour: Hour,
day_of_month: DayOfMonth,
month: Month,
day_of_week: DayOfWeek,
source: Croma.String,
]
defun parse!(s :: v[String.t]) :: t do
parse(s) |> R.get!()
end
defun parse(s :: v[String.t]) :: R.t(t) do
case String.split(s, " ", trim: true) do
[minute, hour, day_of_month, month, day_of_week] ->
R.m do
l1 <- parse_field(minute , Minute )
l2 <- parse_field(hour , Hour )
l3 <- parse_field(day_of_month, DayOfMonth)
l4 <- parse_field(month , Month )
l5 <- parse_field(day_of_week , DayOfWeek )
if matching_dates_exist?(l3, l4) do
{:ok, %__MODULE__{minute: l1, hour: l2, day_of_month: l3, month: l4, day_of_week: l5, source: s}}
else
{:error, {:invalid_value, [__MODULE__]}}
end
end
_ -> {:error, {:invalid_value, [__MODULE__]}}
end
end
defp matching_dates_exist?(day_of_month, month) do
# The following combinations of month/day do not exist: 2/30, 2/31, 4/31, 6/31, 9/31, 11/31
# Cron patterns that only specify those dates are prohibited in order to prevent infinite loops in `next/2`.
case {day_of_month, month} do
{[30 | _], [2]} -> false
{[31] , ms } when is_list(ms) -> !Enum.all?(ms, &(&1 in [2, 4, 6, 9, 11]))
_ -> true
end
end
defp parse_field(s, mod) do
case s do
"*" -> {:ok, :*}
_ ->
String.split(s, ",")
|> Enum.map(&parse_element(&1, mod))
|> R.sequence()
|> R.map(&(List.flatten(&1) |> Enum.sort() |> Enum.uniq()))
end
rescue
_ in [MatchError, ArgumentError, FunctionClauseError] -> {:error, {:invalid_value, [__MODULE__, mod]}}
end
defp parse_element(str, mod) do
case str do
"*/" <> step -> {:ok, Enum.take_every(mod.min()..mod.max(), String.to_integer(step))}
_ ->
{range, step} = parse_range_and_step(str)
{first, last} = parse_first_and_last(range)
cond do
first < mod.min() -> {:error, {:invalid_value, [__MODULE__, mod]}}
last > mod.max() -> {:error, {:invalid_value, [__MODULE__, mod]}}
true -> {:ok, Enum.take_every(first..last, step)}
end
end
end
defp parse_range_and_step(str) do
case String.split(str, "/") do
[r, s] -> {r, String.to_integer(s)}
[_] -> {str, 1}
end
end
defp parse_first_and_last(range) do
case String.split(range, "-") do
[f, l] -> {String.to_integer(f), String.to_integer(l)}
[_] ->
i = String.to_integer(range)
{i, i}
end
end
defun next(cron :: v[t], t :: v[Time.t]) :: v[Time.t] do
# ensure that returned time is larger than the given time `t` by making "1 minute after `t`" as the starting point
next_impl(cron, beginning_of_next_minute(t))
end
defp next_impl(cron, {_, ymd1, {h1, m1, _}, _} = t) do
ymd2 = find_matching_date(cron, ymd1)
if ymd2 == ymd1 do
# no reset, `h1` and `m1` are still valid
case find_matching_hour_and_minute(cron, h1, m1) do
{h2, m2} -> {Time, ymd2, {h2, m2, 0}, 0}
nil ->
# can't find matching hour and minute in this day; search again from the beginning of the next day
next_impl(cron, beginning_of_next_day(t))
end
else
# hour and minute are reset to 0, we don't have to worry about carries
{h2, m2} = find_matching_hour_and_minute(cron, 0, 0)
{Time, ymd2, {h2, m2, 0}, 0}
end
end
defp beginning_of_next_minute(t), do: Time.truncate_to_minute(t) |> Time.shift_minutes(1)
defp beginning_of_next_day( t), do: Time.truncate_to_day(t) |> Time.shift_days(1)
defp find_matching_date(%__MODULE__{day_of_month: dom, day_of_week: dow} = cron, ymd) do
case {dom, dow} do
{:*, :*} -> ymd
{_ , :*} -> find_matching_date_by_day_of_month(cron, ymd)
{:*, _ } -> find_matching_date_by_day_of_week(cron, ymd)
{_ , _ } -> min(find_matching_date_by_day_of_month(cron, ymd), find_matching_date_by_day_of_week(cron, ymd))
end
end
defp find_matching_date_by_day_of_month(%__MODULE__{day_of_month: day_of_month} = cron, ymd) do
{y, m, d1} = find_matching_month(cron, ymd)
last_day_of_month = :calendar.last_day_of_the_month(y, m)
case Enum.find(day_of_month, &(&1 >= d1)) do
d2 when d2 <= last_day_of_month -> {y, m, d2}
_ ->
# can't find matching date in this month; search again from the 1st day of the next month
find_matching_date_by_day_of_month(cron, next_month_1st(y, m))
end
end
defp find_matching_date_by_day_of_week(%__MODULE__{day_of_week: day_of_week} = cron, ymd1) do
{y, m, d1} = ymd2 = find_matching_month(cron, ymd1)
dow1 = day_of_the_week(ymd2)
d2 = d1 + num_days_to_day_of_week(day_of_week, dow1)
if d2 <= :calendar.last_day_of_the_month(y, m) do
{y, m, d2}
else
# can't find matching date in this month; search again from the 1st day of the next month
find_matching_date_by_day_of_week(cron, next_month_1st(y, m))
end
end
defp num_days_to_day_of_week(day_of_week, dow_offset) do
case Enum.find(day_of_week, &(&1 >= dow_offset)) do
nil -> hd(day_of_week) + 7 - dow_offset
dow2 -> dow2 - dow_offset
end
end
defp find_matching_month(%__MODULE__{month: month} = cron, {y, m1, d}) do
case find_matching_value(month, m1) do
nil -> find_matching_month(cron, {y + 1, 1, 1})
^m1 -> {y, m1, d}
m2 -> {y, m2, 1}
end
end
defpt day_of_the_week(ymd) do
case :calendar.day_of_the_week(ymd) do
7 -> 0 # `:calendar.daynum` type is defined as `1..7`; we need to convert 7 to 0 (which represents sunday)
dow -> dow
end
end
defp next_month_1st(y, 12), do: {y + 1, 1 , 1}
defp next_month_1st(y, m ), do: {y , m + 1, 1}
defp find_matching_hour_and_minute(%__MODULE__{hour: hour, minute: minute} = cron, h1, m1) do
case find_matching_value(hour, h1) do
nil -> nil # can't find matching hour in this day
^h1 ->
# no reset, `m1` is still valid
case find_matching_value(minute, m1) do
nil when h1 == 23 -> nil
nil -> find_matching_hour_and_minute(cron, h1 + 1, 0)
m2 -> {h1, m2}
end
h2 -> {h2, find_matching_value(minute, 0)}
end
end
defp find_matching_value(:*, v), do: v
defp find_matching_value(l , v), do: Enum.find(l, &(&1 >= v))
defun next_in_epoch_milliseconds(cron :: v[t], t :: v[MilliSecondsSinceEpoch.t]) :: v[MilliSecondsSinceEpoch.t] do
next(cron, Time.from_epoch_milliseconds(t)) |> Time.to_epoch_milliseconds()
end
end
|
lib/util/cron.ex
| 0.853699
| 0.536981
|
cron.ex
|
starcoder
|
defmodule Advent2019Web.Day07Controller do
use Advent2019Web, :controller
alias Advent2019Web.Day05Controller, as: Day05
@doc """
Given a program like the ones defined in day 5 and an input list,
runs it and return the output.
This time the history and the final result are ignored, to make it simpler
to build more complex structures involving multiple computers.
"""
def run_computing_element(program, input) do
{_, _, output, _, :finished} = Day05.run_intcode(Day05.list_to_map(program), 0, input, [], [])
output
end
@doc """
Given a program and a list of lists of initial inputs, run it with the first
list of inputs, then calls the program again using the concatenation of the
second initial input and the output of the first execution.
This is repeated until all initial inputs are consumed, then the output of
the last block is returned.
"""
def run_computing_pipeline(program, initial_inputs) do
[this_input | remaining_inputs] = initial_inputs
if length(remaining_inputs) == 0 do
# last step, return the final value
run_computing_element(program, this_input)
else
# prepare the new input for the program
[next_input | other_inputs] = remaining_inputs
intermediate_input = run_computing_element(program, this_input)
run_computing_pipeline(program, [next_input ++ intermediate_input | other_inputs])
end
end
def pipeline_initial_states(program, inputs) do
# first, the state of each step of the loop is initialized separately, from now on they will evolve indipendently
# when a program hangs, it simply returns its current state, the (empty) input list, the output and the position
# so that the loop can continue and at the next step will fill the input and keep running
for input <- inputs do
%{
heap: program,
position: 0,
input: input,
output: [],
history: []
}
end
end
def run_pipeline_with_loop(computer_states) do
run_pipeline_with_loop(computer_states, 0, [])
end
def run_pipeline_with_loop(computer_states, index_to_run, output_from_previous_step) do
state = Enum.at(computer_states, index_to_run)
IO.puts("Running machine number #{index_to_run}")
{new_heap, new_position, new_output, new_history, exit_state} =
Day05.run_intcode(
state[:heap],
state[:position],
state[:input] ++ output_from_previous_step,
state[:output],
state[:history]
)
# note that new_output is the COMPLETE output by this computer across loops
extra_output = new_output -- state[:output]
IO.puts("Exit state was: #{exit_state}")
# IO.puts(
# "Input was: '#{state[:input] ++ output_from_previous_step}' of length #{
# length(state[:input] ++ output_from_previous_step)
# }"
# )
# IO.puts("output was: '#{extra_output}'")
is_last_one = index_to_run == length(computer_states) - 1
if exit_state == :finished and is_last_one do
# the last one finished and is successful, this is the end
extra_output
else
# update the states and continue from the next step
new_states =
List.replace_at(computer_states, index_to_run, %{
heap: new_heap,
position: new_position,
# it ended, so the input is either empty or will never used
input: [],
output: new_output,
history: new_history
})
run_pipeline_with_loop(
new_states,
if is_last_one do
0
else
index_to_run + 1
end,
extra_output
)
end
end
# more readable version of
# https://stackoverflow.com/questions/33756396/how-can-i-get-permutations-of-a-list
def permutations(list) do
if list == [] do
[[]]
else
# recursively call itself on every element picked on the list and the remaining ones
for h <- list, t <- permutations(list -- [h]) do
[h | t]
end
end
end
def inputs_from_permutation(list) do
[initial | others] = list
other_inputs = for o <- others, do: [o]
[[initial | [0]] | other_inputs]
end
def solve1(conn, params) do
program = params["_json"]
all_candidate_solutions = permutations([0, 1, 2, 3, 4])
best_input =
Enum.max_by(all_candidate_solutions, fn sol ->
run_computing_pipeline(program, inputs_from_permutation(sol))
end)
result = run_computing_pipeline(program, inputs_from_permutation(best_input))
json(conn, %{
result: result,
best_input: best_input
})
end
end
|
lib/advent2019_web/controllers/day07_controller.ex
| 0.605799
| 0.559079
|
day07_controller.ex
|
starcoder
|
defmodule PhoenixMTM.Helpers do
@moduledoc """
Provides HTML helpers for Phoenix.
"""
import Phoenix.HTML, only: [html_escape: 1]
import Phoenix.HTML.Form, only: [input_name: 2, input_id: 2, hidden_input: 3 ]
@doc ~S"""
Generates a list of checkboxes and labels to update a Phoenix
many_to_many relationship.
## Basic Example
<%= PhoenixMTM.Helpers.collection_checkboxes f, :tags,
Enum.map(@tags, &({&1.name, &1.id})),
selected: Enum.map(f.data.tags, &(&1.id)) %>
## Custom `<input>` and `<label>` options
<%= PhoenixMTM.Helpers.collection_checkboxes f, :tags,
Enum.map(@tags, &({&1.name, &1.id})),
selected: Enum.map(f.data.tags, &(&1.id)),
label_opts: [class: "form-input"], input_opts: [class: "form-control"] %>
## Options
* `:selected` - a list of options that should be pre-selected
* `:input_opts` - a list of attributes to be applied to each checkbox input
* `:label_opts` - a list of attributes to be applied to each checkbox label
* `:wrapper` - a function to wrap the HTML structure of each checkbox/label
* `:mapper` - a function to customize the HTML structure of each checkbox/label
## Wrapper
A `wrapper` function can be used to wrap each checkbox and label pair in one
or more HTML elements.
The wrapper function receives the pair as a single argument, and should return
a `safe` tuple as expected by Phoenix.
A simplified version of this is to call `Phoenix.HTML.Tag.content_tag`
<%= PhoenixMTM.Helpers.collection_checkboxes f, :tags,
Enum.map(@tags, &({&1.name, &1.id})),
selected: Enum.map(f.data.tags, &(&1.id)),
wrapper: &Phoenix.HTML.Tag.content_tag(:p, &1)
## Mapper
A `mapper` function can be used to customize the structure of the checkbox and
label pair.
The mapper function receives the form, field name, input options, label text,
label options, and helper options, and should return a `safe` tuple as expected
by Phoenix.
# Somewhere in your application
defmodule CustomMappers do
use PhoenixMTM.Mappers
def bootstrap(form, field, input_opts, label_content, label_opts, _opts) do
content_tag(:div, class: "checkbox") do
label(form, field, label_opts) do
[
tag(:input, input_opts),
html_escape(label_content)
]
end
end
end
end
# In your template
<%= PhoenixMTM.Helpers.collection_checkboxes f, :tags,
Enum.map(@tags, &({&1.name, &1.id})),
selected: Enum.map(f.data.tags, &(&1.id)),
mapper: &CustomMappers.bootstrap/6
"""
def collection_checkboxes(form, field, collection, opts \\ []) do
name = input_name(form, field) <> "[]"
selected = Keyword.get(opts, :selected, [])
input_opts = Keyword.get(opts, :input_opts, [])
label_opts = Keyword.get(opts, :label_opts, [])
mapper = Keyword.get(opts, :mapper, &PhoenixMTM.Mappers.unwrapped/6)
wrapper = Keyword.get(opts, :wrapper, &(&1))
mapper = if {:nested, true} in opts do
IO.write :stderr, """
warning: using nested option is deprecated. Use nested mapper instead.
#{Exception.format_stacktrace}
"""
&PhoenixMTM.Mappers.nested/6
else
mapper
end
inputs = Enum.map(collection, fn {label_content, value} ->
id = input_id(form, field) <> "_#{value}"
input_opts =
input_opts
|> Keyword.put(:type, "checkbox")
|> Keyword.put(:id, id)
|> Keyword.put(:name, name)
|> Keyword.put(:value, "#{value}")
|> put_selected(selected, value)
label_opts = label_opts ++ [for: id]
mapper.(form, field, input_opts, label_content, label_opts, opts)
|> wrapper.()
end)
html_escape(
inputs ++
hidden_input(form, field, [name: name, value: ""])
)
end
defp put_selected(opts, selected, value) do
if Enum.member?(selected, value) do
Keyword.put(opts, :checked, true)
else
opts
end
end
end
|
lib/phoenix_mtm/helpers.ex
| 0.77223
| 0.533094
|
helpers.ex
|
starcoder
|
defmodule TaiShang.Gene.Generator do
@moduledoc """
NFT-Base Generator
"""
alias TaiShang.Gene.Rules
alias Utils.TypeTranslator
require Logger
@doc """
generate gene by params.
8 Bit Gene Example:
```
init_rules_base2 = TaiShang.Rules.gen_init_rule(16, 4, 4) # 16 b = 2 bit
limits_base2 = TaiShang.Rules.gen_limit(16, [1,1,1,1,0,0,0,0,1,1,1,1])
init_rules_base10 = TaiShang.Rules.gen_init_rule(16, 6, 6)
limits_base10 = TaiShang.Rules.gen_limit(6, [20, 30, 40, 50, 60, 70])
TaiShang.Generator.genrate_gene(init_rules_base2, init_rules_base10, limits_base2, limits_base10)
```
"""
def generate_gen(rules_mixed, limits_mixed) do
%{rules_base2: rules_base2, rules_base10: rules_base10} =
rules_mixed
%{limits_base2: limits_base2, limits_base10: limits_base10} =
limits_mixed
generate_gene(rules_base2, rules_base10, limits_base2, limits_base10)
end
def generate_gene(init_rules_base2, init_rules_base10, limits_base2, limits_base10) do
gene_base2 = do_generate_gene(:base2, init_rules_base2, limits_base2)
gene_base10 = do_generate_gene(:base10, init_rules_base10, limits_base10)
payload = gene_base2 <> gene_base10
Logger.info("+------------------------------+")
Logger.info("base2 rules: #{inspect(init_rules_base2)}")
Logger.info("limits base2: #{inspect(limits_base2)}")
Logger.info("base10 rules: #{inspect(init_rules_base10)}")
Logger.info("limits base10: #{inspect(limits_base10)}")
Logger.info("rnd gene generated: #{inspect(payload)}")
payload
end
def do_generate_gene(type, init_rules, limits) do
init_rules
|> Enum.zip(limits)
|> Enum.map(fn {init_rule, limit} ->
case type do
:base2 ->
Rules.handle_base2_by_rule(limit, init_rule)
:base10 ->
Rules.handle_base10_by_rule(limit, init_rule)
end
end)
|> handle_result(type)
end
def handle_result(payload, :base2) do
TypeTranslator.base2_list_to_bin(payload)
end
def handle_result(payload, :base10) do
:binary.list_to_bin(payload)
end
end
|
lib/tai_shang/gene/generator.ex
| 0.77518
| 0.52476
|
generator.ex
|
starcoder
|
defmodule Oban.Plugins.Lifeline do
@moduledoc """
Naively transition jobs stuck `executing` back to `available`.
The `Lifeline` plugin periodically rescues orphaned jobs, i.e. jobs that are stuck in the
`executing` state because the node was shut down before the job could finish. Rescuing is
purely based on time, rather than any heuristic about the job's expected execution time or
whether the node is still alive.
If an executing job has exhausted all attempts, the Lifeline plugin will mark it `discarded`
rather than `available`.
> #### 🌟 DynamicLifeline {: .info}
>
> This plugin may transition jobs that are genuinely `executing` and cause duplicate execution.
> For more accurate rescuing or to rescue jobs that have exhausted retry attempts see the
> `DynamicLifeline` plugin in [Oban Pro](dynamic_lifeline.html).
## Using the Plugin
Rescue orphaned jobs that are still `executing` after the default of 60 minutes:
config :my_app, Oban,
plugins: [Oban.Plugins.Lifeline],
...
Override the default period to rescue orphans after a more aggressive period of 5 minutes:
config :my_app, Oban,
plugins: [{Oban.Plugins.Lifeline, rescue_after: :timer.minutes(5)}],
...
## Options
* `:interval` — the number of milliseconds between rescue attempts. The default is `60_000ms`.
* `:rescue_after` — the maximum amount of time, in milliseconds, that a job may execute before
being rescued. 60 minutes by default, and rescuing is performed once a minute.
## Instrumenting with Telemetry
The `Oban.Plugins.Lifeline` plugin adds the following metadata to the `[:oban, :plugin, :stop]`
event:
* `:rescued_count` — the number of jobs transitioned back to `available`
* `:discarded_count` — the number of jobs transitioned to `discarded`
"""
@behaviour Oban.Plugin
use GenServer
import Ecto.Query, only: [where: 3]
alias Oban.{Job, Peer, Plugin, Repo, Validation}
@type option ::
Plugin.option()
| {:interval, timeout()}
| {:rescue_after, pos_integer()}
defmodule State do
@moduledoc false
defstruct [
:conf,
:name,
:timer,
interval: :timer.minutes(1),
rescue_after: :timer.minutes(60)
]
end
@impl Plugin
@spec start_link([option()]) :: GenServer.on_start()
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: opts[:name])
end
@impl Plugin
def validate(opts) do
Validation.validate(opts, fn
{:conf, _} -> :ok
{:name, _} -> :ok
{:interval, interval} -> Validation.validate_integer(:interval, interval)
{:rescue_after, interval} -> Validation.validate_integer(:rescue_after, interval)
option -> {:error, "unknown option provided: #{inspect(option)}"}
end)
end
@impl GenServer
def init(opts) do
Validation.validate!(opts, &validate/1)
state =
State
|> struct!(opts)
|> schedule_rescue()
{:ok, state}
end
@impl GenServer
def terminate(_reason, %State{timer: timer}) do
if is_reference(timer), do: Process.cancel_timer(timer)
:ok
end
@impl GenServer
def handle_info(:rescue, %State{} = state) do
meta = %{conf: state.conf, plugin: __MODULE__}
:telemetry.span([:oban, :plugin], meta, fn ->
case check_leadership_and_rescue_jobs(state) do
{:ok, {rescued_count, discarded_count}} when is_integer(rescued_count) ->
meta =
meta
|> Map.put(:rescued_count, rescued_count)
|> Map.put(:discarded_count, discarded_count)
{:ok, meta}
error ->
{:error, Map.put(meta, :error, error)}
end
end)
{:noreply, schedule_rescue(state)}
end
# Scheduling
defp schedule_rescue(state) do
timer = Process.send_after(self(), :rescue, state.interval)
%{state | timer: timer}
end
# Rescuing
defp check_leadership_and_rescue_jobs(state) do
if Peer.leader?(state.conf) do
Repo.transaction(state.conf, fn ->
time = DateTime.add(DateTime.utc_now(), -state.rescue_after, :millisecond)
base = where(Job, [j], j.state == "executing" and j.attempted_at < ^time)
{rescued_count, _} = transition_available(base, state)
{discard_count, _} = transition_discarded(base, state)
{rescued_count, discard_count}
end)
else
{:ok, 0, 0}
end
end
defp transition_available(query, state) do
Repo.update_all(
state.conf,
where(query, [j], j.attempt < j.max_attempts),
set: [state: "available"]
)
end
defp transition_discarded(query, state) do
Repo.update_all(state.conf, where(query, [j], j.attempt >= j.max_attempts),
set: [state: "discarded", discarded_at: DateTime.utc_now()]
)
end
end
|
lib/oban/plugins/lifeline.ex
| 0.876456
| 0.643882
|
lifeline.ex
|
starcoder
|
defmodule Nostrum.Struct.Channel do
@moduledoc """
Struct and helper functions for working with channels.
## Channel Struct
The channel struct is used by Nostrum to represent a _Discord Channel Object_. More information can be found on the [Discord API Channel Documentation](https://discord.com/developers/docs/resources/channel#channels-resource).
The struct can have one of several forms depending on the type of channel. You can read more about the individual channel types [below](#module-channel-types).
A typical channel would appear as:
```elixir
%Nostrum.Struct.Channel{
guild_id: 766435015768539156,
id: 827333533688397865,
name: "announcements",
nsfw: false,
permission_overwrites: [],
position: 1,
type: 5,
}
```
The channel struct implements `String.Chars` protocol through the `mention/1` function. This example uses our channel from the previous code block.
```elixir
channel |> to_string()
"<#766435015768539156>"
```
## Channel Cache
The [`ChannelCache`](`Nostrum.Cache.ChannelCache`) module provides functionality for you to retreive information about any channel that your application can see. It provides two functions: [`get/1`](`Nostrum.Cache.ChannelCache.get/1`) and [`get!/1`](`Nostrum.Cache.ChannelCache.get!/1`).
## Example
```elixir
Nostrum.Cache.ChannelCache.get!(827333533688397865)
%Nostrum.Struct.Channel{
application_id: nil,
bitrate: nil,
guild_id: 766435015768539156,
icon: nil,
id: 827333533688397865,
last_message_id: nil,
last_pin_timestamp: nil,
name: "announcements",
nsfw: false,
owner_id: nil,
parent_id: nil,
permission_overwrites: [
%Nostrum.Struct.Overwrite{
allow: 0,
deny: 2048,
id: 766435015768539156,
type: :role
}
],
position: 1,
recipients: nil,
topic: nil,
type: 5,
user_limit: nil
}
```
More details of the cache can be found at `Nostrum.Cache.ChannelCache`.
## Helper Functions
This module contains two functions for assisting with channel structs. `mention/1` to convert the channel into a mention as a string, and `link/1` to convert the channel into a hyperlink as a string. Further details and examples are detailed in the [Functions section of this module.](#functions)
## Api Functions
The Nostrum Api contains numerous functions related to channels. Notable functions relating to channels are shown below.
- `Nostrum.Api.create_guild_channel/2`
- `Nostrum.Api.get_channel/1`
- `Nostrum.Api.modify_channel/3`
- `Nostrum.Api.delete_channel/2`
- `Nostrum.Api.add_pinned_channel_message/2`
- `Nostrum.Api.create_channel_invite/3`
- `Nostrum.Api.get_guild_channels/1`
- `Nostrum.Api.modify_guild_channel_positions/2`
> Note: This is not an exhaustive list, for full details please see the `Nostrum.Api` module documentation.
## Channel Types
Channels take the shape of various types depending on their use and not all fields are always used. The currently implemented channel types are detailed below. The type of channel is determined by the `:type` field.
This diagram represents the regular channel types `0`, `2`, `5` and `13`.

The currently implemented channel types are:
| |Channel Type | |
|---- |-------------------- |--------------------------------------------------------------- |
|`0` |[`GUILD_TEXT`](`t:guild_text_channel/0`) |_A text channel within a server_ |
|`1` |[`DM`](`t:dm_channel/0`) |_A direct message between users_ |
|`2` |[`GUILD_VOICE`](`t:guild_voice_channel/0`) |_A voice channel within a server_ |
|`3` |[`GROUP_DM`](`t:group_dm_channel/0`) |_A direct message between multiple users_ |
|`4` |[`GUILD_CATEGORY`](`t:guild_category_channel/0`) |_A category that contains up to 50 channels_ |
|`5` |[`GUILD_NEWS`](`t:guild_news_channel/0`) |_A channel that users can follow and crosspost_ |
|`6` |[`GUILD_STORE`](`t:guild_store_channel/0`) |_A channel to sell games on Discord_ |
|`10` |[`GUILD_NEWS_THREAD`](`t:guild_news_thread_channel/0`) |_A temporary sub-channel within a news channel_ |
|`11` |[`GUILD_PUBLIC_THREAD`](`t:guild_public_thread_channel/0`) |_A temporary sub-channel within a text channel_ |
|`12` |[`GUILD_PRIVATE_THREAD`](`t:guild_private_thread_channel/0`) |_A temporary private sub-channel within a text channel_ |
|`13` |[`GUILD_STAGE_VOICE`](`t:guild_stage_voice_channel/0`) |_A voice channel for hosting events with an audience_ |
More information about _Discord Channel Types_ can be found on the [Discord API Channel Type Documentation](https://discord.com/developers/docs/resources/channel#channel-object-channel-types).
"""
defimpl String.Chars do
@spec to_string(Nostrum.Struct.Channel.t()) :: String.t()
def to_string(channel), do: @for.mention(channel)
end
alias Nostrum.Struct.{Channel, Guild, Message, Overwrite, User}
alias Nostrum.{Snowflake, Util}
defstruct [
:id,
:type,
:guild_id,
:position,
:permission_overwrites,
:name,
:topic,
:nsfw,
:last_message_id,
:bitrate,
:user_limit,
:rate_limit_per_user,
:recipients,
:icon,
:owner_id,
:application_id,
:parent_id,
:last_pin_timestamp,
:rtc_region,
:video_quality_mode,
:message_count,
:member_count,
:thread_metadata,
:member,
:default_auto_archive_duration,
:permissions
]
@typedoc """
The id of the channel object.
"""
@type id :: Snowflake.t()
@typedoc """
The type of channel.
More information about _Discord Channel Types_ can be found under the [`types`](#module-channel-types) on the [Discord API Channel Type Documentation](https://discord.com/developers/docs/resources/channel#channel-object-channel-types).
"""
@type type :: integer()
@typedoc """
The id of the guild the channel is located in.
"""
@type guild_id :: Guild.id()
@typedoc """
The position of the channel in the sidebar of the guild.
"""
@type position :: integer()
@typedoc """
A list of permission overwrites applied to the channel.
"""
@type permission_overwrites :: [Overwrite.t()]
@typedoc """
The name of the channel.
"""
@type name :: String.t()
@typedoc """
The topic of the channel.
"""
@type topic :: String.t()
@typedoc """
Whether the NSFW setting is enabled for this channel.
"""
@type nsfw :: boolean()
@typedoc """
The id of the last message sent in the channel.
"""
@type last_message_id :: Message.id() | nil
@typedoc """
The bitate of the voice channel.
"""
@type bitrate :: integer()
@typedoc """
The users rate limit.
Amount of seconds a user has to wait before sending another message (0-21600); bots, as well as users with the permission manage_messages or manage_channel, are unaffected
"""
@typedoc since: "0.5.0"
@type rate_limit_per_user :: integer() | nil
@typedoc """
The user limit of a voice channel.
"""
@type user_limit :: integer()
@typedoc """
A list of users in a group DM.
"""
@type recipients :: [User.t()]
@typedoc """
The hash of the channels icon.
"""
@type icon :: String.t() | nil
@typedoc """
The id of the user of a group direct message or thread.
This applies to user created channels.
"""
@type owner_id :: User.id()
@typedoc """
The id of the application that created a group direct message or thread.
This applies to bot created channels.
"""
@type application_id :: Snowflake.t() | nil
@typedoc """
The id of the parent channel that this channel is located under.
For threads, that is the channel that contains the thread. For regular channels, it is the category that the channel is located under.
"""
@type parent_id :: Channel.id() | nil
@typedoc """
Timestamp for the last pinned message.
"""
@type last_pin_timestamp :: DateTime.t() | nil
@typedoc """
Region id for the channel.
More information about _region ids_ can be found on the [Discord API Voice Region Object Documentation](https://discord.com/developers/docs/resources/voice#voice-region-object).
"""
@typedoc since: "0.5.0"
@type rtc_region :: String.t() | nil
@typedoc """
The video quality mode of the channel.
More information about _video quality modes_ can be found on the [Discord API Video Quality Mode Documentation](https://discord.com/developers/docs/resources/channel#channel-object-video-quality-modes).
"""
@typedoc since: "0.5.0"
@type video_quality_mode :: integer() | nil
@typedoc """
Approximate count of messages in a thread, capped at 50.
"""
@typedoc since: "0.5.0"
@type message_count :: integer() | nil
@typedoc """
Approximate count of members in a thread, capped at 50.
"""
@typedoc since: "0.5.0"
@type member_count :: integer() | nil
@typedoc """
The video quality mode of the voice channel.
More information about _video quality modes_ can be found on the [Discord API Video Quality Mode Documentation](https://discord.com/developers/docs/resources/channel#channel-object-video-quality-modes).
"""
@typedoc since: "0.5.0"
@type thread_metadata :: %{
archived: archived,
auto_archive_duration: auto_archive_duration,
archive_timestamp: archive_timestamp,
locked: boolean()
}
@typedoc """
When the thread was archived.
"""
@typedoc since: "0.5.0"
@type archive_timestamp :: DateTime.t() | nil
@typedoc """
The threads locked status.
"""
@typedoc since: "0.5.0"
@type locked :: boolean()
@typedoc """
The threads archived status.
"""
@typedoc since: "0.5.0"
@type archived :: boolean()
@typedoc """
Archive duration for the thread in minutes.
- 60, 1 hour
- 1440, 24 hours
- 4320, 3 days
- 10080, 7 days
"""
@typedoc since: "0.5.0"
@type auto_archive_duration :: integer()
@typedoc """
Present when the bot joins a thread.
Note: This is omitted on threads that the bot can immediately access on `:GUILD_CREATE` events received.
"""
@typedoc since: "0.5.0"
@type member :: %{
id: id,
user_id: user_id,
join_timestamp: join_timestamp,
flags: flags
}
@typedoc """
User id of the threads creator.
"""
@typedoc since: "0.5.0"
@type user_id :: Snowflake.t() | nil
@typedoc """
When the user joined the thread.
"""
@typedoc since: "0.5.0"
@type join_timestamp :: DateTime.t()
@typedoc """
User thread settings, currently only used for notifications.
"""
@typedoc since: "0.5.0"
@type flags :: integer()
@typedoc """
Default duration for newly created threads in minutes.
- 60, 1 hour
- 1440, 24 hours
- 4320, 3 days
- 10080, 7 days
"""
@typedoc since: "0.5.0"
@type default_auto_archive_duration :: integer()
@typedoc """
Computed permissions of the invoking user.
Permissions for the invoking user in the channel, including overwrites, only included when part of the resolved data received on a slash command interaction
"""
@typedoc since: "0.5.0"
@type permissions :: String.t()
@typedoc """
Type 0 partial channel object representing a text channel within a guild.
"""
@type guild_text_channel :: %__MODULE__{
id: id,
guild_id: guild_id,
name: name,
type: type,
position: position,
permission_overwrites: permission_overwrites,
rate_limit_per_user: rate_limit_per_user,
nsfw: nsfw,
topic: topic,
last_message_id: last_message_id,
parent_id: parent_id,
default_auto_archive_duration: default_auto_archive_duration
}
@typedoc """
Type 1 partial channel object representing a direct message.
"""
@type dm_channel :: %__MODULE__{
id: id,
type: 1,
last_message_id: last_message_id,
recipients: recipients,
last_pin_timestamp: last_pin_timestamp
}
@typedoc """
Type 2 partial channel object representing an audio channel within a guild.
"""
@type guild_voice_channel :: %__MODULE__{
id: id,
type: 2,
guild_id: guild_id,
position: position,
permission_overwrites: permission_overwrites,
name: name,
nsfw: nsfw,
bitrate: bitrate,
user_limit: user_limit,
parent_id: parent_id,
rtc_region: rtc_region
}
@typedoc """
Type 3 partial channel object representing a group direct message.
"""
@type group_dm_channel :: %__MODULE__{
id: id,
type: 3,
name: name,
last_message_id: last_message_id,
recipients: recipients,
icon: icon,
owner_id: owner_id,
application_id: application_id
}
@typedoc """
Type 4 partial channel object representing a channel category.
> Note: Other channels `parent_id` field refers to this type of object.
"""
@type guild_category_channel :: %__MODULE__{
id: id,
type: 4,
guild_id: guild_id,
position: position,
permission_overwrites: permission_overwrites,
name: name,
nsfw: nsfw,
parent_id: parent_id
}
@typedoc """
Type 5 partial channel object representing a news channel.
"""
@typedoc since: "0.5.0"
@type guild_news_channel :: %__MODULE__{
id: id,
type: 5,
guild_id: guild_id,
name: name,
position: position,
permission_overwrites: permission_overwrites,
nsfw: nsfw,
topic: topic,
last_message_id: last_message_id,
parent_id: parent_id,
default_auto_archive_duration: default_auto_archive_duration
}
@typedoc """
Type 6 partial channel object representing a store channel.
"""
@typedoc since: "0.5.0"
@type guild_store_channel :: %__MODULE__{
id: id,
guild_id: guild_id,
name: name,
type: type,
position: position,
permission_overwrites: permission_overwrites,
nsfw: nsfw,
parent_id: parent_id
}
@typedoc """
Type 10 partial channel object representing a news thread.
"""
@typedoc since: "0.5.0"
@type guild_news_thread_channel :: %__MODULE__{
id: id,
guild_id: guild_id,
parent_id: parent_id,
owner_id: owner_id,
name: name,
type: type,
last_message_id: last_message_id,
message_count: message_count,
member_count: member_count,
rate_limit_per_user: rate_limit_per_user,
thread_metadata: thread_metadata
}
@typedoc """
Type 11 partial channel object representing a standard thread.
"""
@typedoc since: "0.5.0"
@type guild_public_thread_channel :: %__MODULE__{
id: id,
guild_id: guild_id,
parent_id: parent_id,
owner_id: owner_id,
name: name,
type: type,
last_message_id: last_message_id,
message_count: message_count,
member_count: member_count,
rate_limit_per_user: rate_limit_per_user,
thread_metadata: thread_metadata
}
@typedoc """
Type 12 partial channel object representing a private thread.
"""
@typedoc since: "0.5.0"
@type guild_private_thread_channel :: %__MODULE__{
id: id,
guild_id: guild_id,
parent_id: parent_id,
owner_id: owner_id,
name: name,
type: type,
last_message_id: last_message_id,
message_count: message_count,
member_count: member_count,
rate_limit_per_user: rate_limit_per_user,
thread_metadata: thread_metadata
}
@typedoc """
Type 13 channel object representing a stage channel.
"""
@typedoc since: "0.5.0"
@type guild_stage_voice_channel :: %__MODULE__{
id: id,
guild_id: guild_id,
parent_id: parent_id,
owner_id: owner_id,
name: name,
type: type,
last_message_id: last_message_id,
message_count: message_count,
member_count: member_count,
rate_limit_per_user: rate_limit_per_user,
thread_metadata: thread_metadata
}
@typedoc """
A partial channel object representing a channel mention.
More information about the _Discord Channel Mention Object_ can be found at the [Discord API Channel Mention Object
Documentation](https://discord.com/developers/docs/resources/channel#channel-mention-object).
"""
@type channel_mention :: %__MODULE__{
id: id,
guild_id: guild_id,
type: type,
name: name
}
@typedoc """
Guild channel types
"""
@typedoc deprecated: "See t.0"
@type guild_channel ::
guild_text_channel
| guild_voice_channel
| guild_category_channel
@typedoc """
All valid text channels.
"""
@typedoc deprecated: "See t.0"
@type text_channel ::
guild_text_channel
| dm_channel
| group_dm_channel
@typedoc """
A `Nostrum.Struct.Channel` that represents a voice channel
"""
@typedoc deprecated: "See t:guild_voice_channel/0"
@type voice_channel :: guild_voice_channel
@typedoc """
All valid channel types.
"""
@type t ::
guild_text_channel
| dm_channel
| guild_voice_channel
| group_dm_channel
| guild_category_channel
| guild_news_channel
| guild_store_channel
| guild_news_thread_channel
| guild_public_thread_channel
| guild_private_thread_channel
| guild_stage_voice_channel
@doc """
Convert a channel into a mention.
Handles the conversion of a `Nostrum.Struct.Channel` into the required format to _mention_ the channel within a message. Mentioning the channel will provide a clickable link to take the user to the channel.
## Parameters
- channel: `t:Nostrum.Struct.Channel.t/0`
## Examples
```elixir
Nostrum.Cache.ChannelCache.get(381889573426429952)
|> Nostrum.Struct.Channel.mention()
"<#381889573426429952>"
```
"""
@spec mention(t) :: String.t()
def mention(%__MODULE__{id: id}), do: "<##{id}>"
@doc false
def to_struct(map) do
new =
map
|> Map.new(fn {k, v} -> {Util.maybe_to_atom(k), v} end)
|> Map.update(:id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:guild_id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:permission_overwrites, nil, &Util.cast(&1, {:list, {:struct, Overwrite}}))
|> Map.update(:last_message_id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:recipients, nil, &Util.cast(&1, {:list, {:struct, User}}))
|> Map.update(:owner_id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:application_id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:parent_id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:last_pin_timestamp, nil, &Util.maybe_to_datetime/1)
|> Map.update(:archive_timestamp, nil, &Util.maybe_to_datetime/1)
|> Map.update(:join_timestamp, nil, &Util.maybe_to_datetime/1)
struct(__MODULE__, new)
end
end
|
lib/nostrum/struct/channel.ex
| 0.926827
| 0.848408
|
channel.ex
|
starcoder
|
defmodule HTTPDate.Formatter do
@moduledoc false
defp integer_to_month(1), do: "Jan"
defp integer_to_month(2), do: "Feb"
defp integer_to_month(3), do: "Mar"
defp integer_to_month(4), do: "Apr"
defp integer_to_month(5), do: "May"
defp integer_to_month(6), do: "Jun"
defp integer_to_month(7), do: "Jul"
defp integer_to_month(8), do: "Aug"
defp integer_to_month(9), do: "Sep"
defp integer_to_month(10), do: "Oct"
defp integer_to_month(11), do: "Nov"
defp integer_to_month(12), do: "Dec"
defp integer_to_short_weekday(1), do: "Mon"
defp integer_to_short_weekday(2), do: "Tue"
defp integer_to_short_weekday(3), do: "Wed"
defp integer_to_short_weekday(4), do: "Thu"
defp integer_to_short_weekday(5), do: "Fri"
defp integer_to_short_weekday(6), do: "Sat"
defp integer_to_short_weekday(7), do: "Sun"
defp integer_to_weekday(1), do: "Monday"
defp integer_to_weekday(2), do: "Tuesday"
defp integer_to_weekday(3), do: "Wednesday"
defp integer_to_weekday(4), do: "Thursday"
defp integer_to_weekday(5), do: "Friday"
defp integer_to_weekday(6), do: "Saturday"
defp integer_to_weekday(7), do: "Sunday"
defp pad_integer(integer, length, padchr), do: to_string(integer) |> String.pad_leading(length, padchr)
@doc false
def format_date(date, format), do: format_date(date, format, date.calendar.day_of_week(date.year, date.month, date.day)) |> IO.iodata_to_binary
defp format_date(date, :imf_fixdate, weekday) do
[
integer_to_short_weekday(weekday),
", ",
pad_integer(date.day, 2, "0"),
" ",
integer_to_month(date.month),
" ",
pad_integer(date.year, 4, "0"),
" ",
pad_integer(date.hour, 2, "0"),
":",
pad_integer(date.minute, 2, "0"),
":",
pad_integer(date.second, 2, "0"),
" GMT"
]
end
defp format_date(date, :rfc850, weekday) do
[
integer_to_weekday(weekday),
", ",
pad_integer(date.day, 2, "0"),
"-",
integer_to_month(date.month),
"-",
pad_integer(date.year, 2, "0") |> String.slice(-2, 2),
" ",
pad_integer(date.hour, 2, "0"),
":",
pad_integer(date.minute, 2, "0"),
":",
pad_integer(date.second, 2, "0"),
" GMT"
]
end
defp format_date(date, :asctime, weekday) do
[
integer_to_short_weekday(weekday),
" ",
integer_to_month(date.month),
" ",
pad_integer(date.day, 2, " "),
" ",
pad_integer(date.hour, 2, "0"),
":",
pad_integer(date.minute, 2, "0"),
":",
pad_integer(date.second, 2, "0"),
" ",
pad_integer(date.year, 4, "0")
]
end
end
|
lib/http_date/formatter.ex
| 0.638948
| 0.539954
|
formatter.ex
|
starcoder
|
defmodule Confex do
@moduledoc """
Confex simplifies reading configuration at run-time with adapter-based system for resolvers.
# Configuration tuples
Whenever there is a configuration that should be resolved at run-time you need to replace it's value in `config.exs`
by Confex configuration type. Common structure:
```elixir
@type fetch_statement :: {adapter :: atom() | module(), value_type :: value_type, key :: String.t, default :: any()}
| {value_type :: value_type, key :: String.t}
| {key :: String.t, default :: any()}
| {key :: String.t}
```
If `value_type` is set, Confex will automatically cast it's value. Otherwise, default type of `:string` is used.
| Confex Type | Elixir Type | Description |
| ----------- | ----------------- | ----------- |
| `:string` | `String.t` | Default. |
| `:integer` | `Integer.t` | Parse Integer value in string. |
| `:float` | `Float.t` | Parse Float value in string. |
| `:boolean` | `true` or `false` | Cast 'true', '1', 'yes' to `true`; 'false', '0', 'no' to `false`. |
| `:atom` | `atom()` | Cast string to atom. |
| `:module` | `module()` | Cast string to module name. |
| `:list` | `List.t` | Cast comma-separated string (`1,2,3`) to list (`[1, 2, 3]`). |
| `:charlist` | `charlist()` | Cast string to charlist. |
# Custom type casting
You can use your own casting function by replacing type with `{module, function, arguments}` tuple,
Confex will call that function with `apply(module, function, [value] ++ arguments)`.
This function returns either `{:ok, value}` or `{:error, reason :: String.t}` tuple.
# Adapters
* `:system` - read configuration from system environment;
* `:system_file` - read file path from system environment and read configuration from this file.
You can create adapter by implementing `Confex.Adapter` behaviour with your own logic.
# Examples
* `var` - any bare values will be left as-is;
* `{:system, "ENV_NAME", default}` - read string from "ENV_NAME" environment variable or return `default` \
if it's not set or has empty value;
* `{:system, "ENV_NAME"}` - same as above, with default value `nil`;
* `{:system, :integer, "ENV_NAME", default}` - read string from "ENV_NAME" environment variable and cast it \
to integer or return `default` if it's not set or has empty value;
* `{:system, :integer, "ENV_NAME"}` - same as `{:system, :integer, "ENV_NAME", nil}`;
* `{{:via, MyAdapter}, :string, "ENV_NAME", default}` - read value by key "ENV_NAME" via adapter `MyAdapter` \
or return `default` if it's not set or has empty value;
* `{{:via, MyAdapter}, :string, "ENV_NAME"}` - same as above, with default value `nil`;
* `{:system, {MyApp.MyType, :cast, [:foo]}, "ENV_NAME"}` - `MyApp.MyType.cast(value, :foo)` call would be made \
to resolve environment variable value.
"""
alias Confex.Resolver
@typep app :: Application.app()
@typep key :: Application.key()
@typep value :: Application.value()
@type configuration_tuple ::
{value_type :: Confex.Type.t(), key :: String.t(), default :: any()}
| {value_type :: Confex.Type.t(), key :: String.t()}
| {key :: String.t(), default :: any()}
| {key :: String.t()}
@doc """
Returns the value for key in app’s environment in a tuple.
This function mimics `Application.fetch_env/2` function.
If the configuration parameter does not exist or can not be parsed, the function returns :error.
## Example
iex> :ok = System.put_env("MY_TEST_ENV", "foo")
...> Application.put_env(:myapp, :test_var, {:system, "MY_TEST_ENV"})
...> {:ok, "foo"} = #{__MODULE__}.fetch_env(:myapp, :test_var)
{:ok, "foo"}
iex> :ok = System.delete_env("MY_TEST_ENV")
...> Application.put_env(:myapp, :test_var, {:system, :integer, "MY_TEST_ENV", "bar"})
...> {:ok, "bar"} = #{__MODULE__}.fetch_env(:myapp, :test_var)
{:ok, "bar"}
iex> :ok = System.delete_env("MY_TEST_ENV")
...> Application.put_env(:myapp, :test_var, {:system, :integer, "MY_TEST_ENV"})
...> :error = #{__MODULE__}.fetch_env(:myapp, :test_var)
:error
iex> :ok = System.put_env("MY_TEST_ENV", "foo")
...> Application.put_env(:myapp, :test_var, {:system, :integer, "MY_TEST_ENV"})
...> :error = #{__MODULE__}.fetch_env(:myapp, :test_var)
:error
iex> Application.put_env(:myapp, :test_var, 1)
...> {:ok, 1} = #{__MODULE__}.fetch_env(:myapp, :test_var)
{:ok, 1}
"""
@spec fetch_env(app :: app(), key :: key()) :: {:ok, value()} | :error
def fetch_env(app, key) do
with {:ok, config} <- Application.fetch_env(app, key),
{:ok, config} <- Resolver.resolve(config) do
{:ok, config}
else
:error -> :error
{:error, _reason} -> :error
end
end
@doc """
Returns the value for key in app’s environment.
This function mimics `Application.fetch_env!/2` function.
If the configuration parameter does not exist or can not be parsed, raises `ArgumentError`.
## Example
iex> :ok = System.put_env("MY_TEST_ENV", "foo")
...> Application.put_env(:myapp, :test_var, {:system, "MY_TEST_ENV"})
...> "foo" = #{__MODULE__}.fetch_env!(:myapp, :test_var)
"foo"
iex> :ok = System.delete_env("MY_TEST_ENV")
...> Application.put_env(:myapp, :test_var, {:system, :integer, "MY_TEST_ENV", "bar"})
...> "bar" = #{__MODULE__}.fetch_env!(:myapp, :test_var)
"bar"
iex> :ok = System.delete_env("MY_TEST_ENV")
...> Application.put_env(:myapp, :test_var, {:system, :integer, "MY_TEST_ENV"})
...> #{__MODULE__}.fetch_env!(:myapp, :test_var)
** (ArgumentError) can't fetch value for key `:test_var` of application `:myapp`, can not resolve key MY_TEST_ENV value via adapter Elixir.Confex.Adapters.SystemEnvironment
iex> :ok = System.put_env("MY_TEST_ENV", "foo")
...> Application.put_env(:myapp, :test_var, {:system, :integer, "MY_TEST_ENV"})
...> #{__MODULE__}.fetch_env!(:myapp, :test_var)
** (ArgumentError) can't fetch value for key `:test_var` of application `:myapp`, can not cast "foo" to Integer
iex> Application.put_env(:myapp, :test_var, 1)
...> 1 = #{__MODULE__}.fetch_env!(:myapp, :test_var)
1
"""
@spec fetch_env!(app :: app(), key :: key()) :: value() | no_return
def fetch_env!(app, key) do
config = Application.fetch_env!(app, key)
case Resolver.resolve(config) do
{:ok, config} ->
config
{:error, {_reason, message}} ->
raise ArgumentError, "can't fetch value for key `#{inspect(key)}` of application `#{inspect(app)}`, #{message}"
end
end
@doc """
Returns the value for key in app’s environment.
This function mimics `Application.get_env/2` function.
If the configuration parameter does not exist or can not be parsed, returns default value or `nil`.
## Example
iex> :ok = System.put_env("MY_TEST_ENV", "foo")
...> Application.put_env(:myapp, :test_var, {:system, "MY_TEST_ENV"})
...> "foo" = #{__MODULE__}.get_env(:myapp, :test_var)
"foo"
iex> :ok = System.delete_env("MY_TEST_ENV")
...> Application.put_env(:myapp, :test_var, {:system, :integer, "MY_TEST_ENV", "bar"})
...> "bar" = #{__MODULE__}.get_env(:myapp, :test_var)
"bar"
iex> :ok = System.delete_env("MY_TEST_ENV")
...> Application.put_env(:myapp, :test_var, {:system, :integer, "MY_TEST_ENV"})
...> nil = #{__MODULE__}.get_env(:myapp, :test_var)
nil
iex> :ok = System.delete_env("MY_TEST_ENV")
...> Application.put_env(:myapp, :test_var, {:system, :integer, "MY_TEST_ENV"})
...> "baz" = #{__MODULE__}.get_env(:myapp, :test_var, "baz")
"baz"
iex> :ok = System.put_env("MY_TEST_ENV", "foo")
...> Application.put_env(:myapp, :test_var, {:system, :integer, "MY_TEST_ENV"})
...> nil = #{__MODULE__}.get_env(:myapp, :test_var)
nil
iex> nil = #{__MODULE__}.get_env(:myapp, :does_not_exist)
nil
iex> Application.put_env(:myapp, :test_var, 1)
...> 1 = #{__MODULE__}.get_env(:myapp, :test_var)
1
"""
@spec get_env(app :: app(), key :: key(), default :: value()) :: value()
def get_env(app, key, default \\ nil) do
with {:ok, config} <- Application.fetch_env(app, key),
{:ok, config} <- Resolver.resolve(config) do
config
else
:error -> default
{:error, _reason} -> default
end
end
@doc """
Reads all key-value pairs from an application environment and replaces them
with resolved values.
# Example
iex> :ok = System.put_env("MY_TEST_ENV", "foo")
...> Application.put_env(:myapp, :test_var, {:system, "MY_TEST_ENV"})
...> Confex.resolve_env!(:myapp)
...> "foo" = Application.get_env(:myapp, :test_var)
"foo"
iex> :ok = System.put_env("MY_TEST_ENV", "foo")
...> Application.put_env(:myapp, :test_var, {:system, :integer, "MY_TEST_ENV"})
...> Confex.resolve_env!(:myapp)
** (ArgumentError) can't fetch value for key `:test_var` of application `:myapp`, can not cast "foo" to Integer
The optional `opts` argument is passed through to the internal call to
`Application.put_env/4` and may be used to set the `timeout` and/or
`persistent` parameters.
*Warning!* Do not use this function if you want to change your environment
while VM is running. All `{:system, _}` tuples would be replaced with actual values.
"""
@spec resolve_env!(app :: app(), opts :: Keyword.t()) :: [{key(), value()}] | no_return
def resolve_env!(app, opts \\ []) do
app
|> Application.get_all_env()
|> Enum.map(&resolve_and_update_env(app, &1, opts))
end
defp resolve_and_update_env(app, {key, config}, opts) do
case Resolver.resolve(config) do
{:ok, config} ->
:ok = Application.put_env(app, key, config, opts)
{key, config}
{:error, {_reason, message}} ->
raise ArgumentError, "can't fetch value for key `#{inspect(key)}` of application `#{inspect(app)}`, " <> message
end
end
@doc """
Recursively merges configuration with default values.
Both values must be either in `Keyword` or `Map` structures, otherwise ArgumentError is raised.
## Example
iex> [b: 3, a: 1] = #{__MODULE__}.merge_configs!([a: 1], [a: 2, b: 3])
[b: 3, a: 1]
iex> %{a: 1, b: 3} = #{__MODULE__}.merge_configs!(%{a: 1}, %{a: 2, b: 3})
%{a: 1, b: 3}
iex> #{__MODULE__}.merge_configs!(%{a: 1}, [b: 2])
** (ArgumentError) can not merge default values [b: 2] with configuration %{a: 1} because their types mismatch, \
expected both to be either Map or Keyword structures
"""
@spec merge_configs!(config :: Keyword.t() | map, defaults :: Keyword.t() | map) :: Keyword.t() | map
def merge_configs!(config, []), do: config
def merge_configs!(nil, defaults), do: defaults
def merge_configs!(config, defaults) do
cond do
Keyword.keyword?(config) and Keyword.keyword?(defaults) ->
defaults
|> Keyword.merge(config, &compare/3)
|> Resolver.resolve!()
is_map(config) and is_map(defaults) ->
defaults
|> Map.merge(config, &compare/3)
|> Resolver.resolve!()
true ->
raise ArgumentError,
"can not merge default values #{inspect(defaults)} " <>
"with configuration #{inspect(config)} because their types mismatch, " <>
"expected both to be either Map or Keyword structures"
end
end
defp compare(_k, v1, v2) do
if is_map(v2) or Keyword.keyword?(v2) do
merge_configs!(v1, v2)
else
v2
end
end
# Helper to include configuration into module and validate it at compile-time/run-time.
@doc false
defmacro __using__(opts) do
quote bind_quoted: [opts: opts], location: :keep do
@otp_app Keyword.fetch!(opts, :otp_app)
@module_config_defaults Keyword.delete(opts, :otp_app)
@doc """
Returns module configuration.
If application environment contains values in `Keyword` or `Map` struct,
defaults from macro usage will be recursively merged with application configuration.
If one of the configuration parameters does not exist or can not be resolved, raises `ArgumentError`.
"""
@spec config() :: any()
def config do
@otp_app
|> Confex.get_env(__MODULE__)
|> Confex.merge_configs!(@module_config_defaults)
|> Confex.Resolver.resolve!()
|> validate_config!()
end
@spec validate_config!(config :: any()) :: any()
def validate_config!(config), do: config
defoverridable validate_config!: 1
end
end
end
|
lib/confex.ex
| 0.932668
| 0.787931
|
confex.ex
|
starcoder
|
defmodule Data.Opportunities do
@moduledoc """
The public API for `opportunity` resources
"""
alias Data.{Opportunity, Repo}
import Ecto.Query, except: [update: 2]
@defaults %{filters: %{}, include: [], sort_by: :updated_at, page: 1, page_size: 15}
def all(opts \\ %{}) do
opts = Map.merge(@defaults, opts)
Opportunity
|> Ecto.Query.order_by(^opts.sort_by)
|> filters(opts.filters)
|> Repo.paginate(page: opts.page, page_size: opts.page_size)
|> include(opts.include)
end
def get(params), do: Repo.get_by(Opportunity, params)
def insert(params) do
params = map_labels(params)
%Opportunity{}
|> Opportunity.changeset(params)
|> Repo.insert()
end
def insert_or_update(%{"url" => url} = params) do
case get(%{url: url}) do
nil -> insert(params)
opportunity -> update(opportunity, params)
end
end
def update(id, params) when is_integer(id) do
Opportunity
|> Repo.get(id)
|> update(params)
end
def update(struct, params) do
params = map_labels(params)
struct
|> Opportunity.changeset(params)
|> Repo.update()
end
defp filter({field, value}, query) when is_list(value),
do: where(query, [o], field(o, ^field) in ^value)
defp filter({field, value}, query), do: where(query, [o], field(o, ^field) == ^value)
defp filters(query, filters), do: Enum.reduce(filters, query, &filter/2)
defp include(results, nil), do: results
defp include(%{entries: entries} = results, schemas) do
preloaded = Repo.preload(entries, schemas)
Map.put(results, :entries, preloaded)
end
defp level_mapping, do: Application.get_env(:data, :level_label_mapping)
defp map_from_labels([], _mapping), do: nil
defp map_from_labels(labels, mapping) do
labels = Enum.map(labels, &(&1 |> Map.get("name") |> String.downcase()))
mapping =
Enum.find(mapping, fn {_, mappings} -> length(mappings -- mappings -- labels) != 0 end)
case mapping do
{level, _} -> level
_ -> nil
end
end
defp map_labels(%{"labels" => labels} = params) do
params
|> Map.put("level", map_from_labels(labels, level_mapping()))
|> Map.put("type", map_from_labels(labels, type_mapping()))
end
defp map_labels(params), do: params
defp type_mapping, do: Application.get_env(:data, :type_label_mapping)
end
|
apps/data/lib/data/opportunities.ex
| 0.724578
| 0.470372
|
opportunities.ex
|
starcoder
|
defmodule Sqlitex.Query do
alias Sqlitex.Statement
@doc """
Runs a query and returns the results.
## Parameters
* `db` - A sqlite database.
* `sql` - The query to run as a string.
* `opts` - Options to pass into the query. See below for details.
## Options
* `bind` - If your query has parameters in it, you should provide the options
to bind as a list.
* `into` - The collection to put results into. This defaults to a list.
## Returns
* [results...] on success
* {:error, _} on failure.
"""
@spec query(Sqlitex.connection, String.t | char_list) :: [[]] | Sqlitex.sqlite_error
@spec query(Sqlitex.connection, String.t | char_list, [bind: [], into: Enum.t]) :: [Enum.t] | Sqlitex.sqlite_error
def query(db, sql, opts \\ []) do
with {:ok, stmt} <- Statement.prepare(db, sql),
{:ok, stmt} <- Statement.bind_values(stmt, Keyword.get(opts, :bind, [])),
{:ok, res} <- Statement.fetch_all(stmt, Keyword.get(opts, :into, [])),
do: {:ok, res}
end
@doc """
Same as `query/3` but raises a Sqlitex.QueryError on error.
Returns the results otherwise.
"""
@spec query!(Sqlitex.connection, String.t | char_list) :: [[]]
@spec query!(Sqlitex.connection, String.t | char_list, [bind: [], into: Enum.t]) :: [Enum.t]
def query!(db, sql, opts \\ []) do
case query(db, sql, opts) do
{:error, reason} -> raise Sqlitex.QueryError, reason: reason
{:ok, results} -> results
end
end
@doc """
Runs a query and returns the results as a list of rows each represented as
a list of column values.
## Parameters
* `db` - A sqlite database.
* `sql` - The query to run as a string.
* `opts` - Options to pass into the query. See below for details.
## Options
* `bind` - If your query has parameters in it, you should provide the options
to bind as a list.
## Returns
* {:ok, %{rows: [[1, 2], [2, 3]], columns: [:a, :b], types: [:INTEGER, :INTEGER]}} on success
* {:error, _} on failure.
"""
@spec query_rows(Sqlitex.connection, String.t | char_list) :: {:ok, %{}} | Sqlitex.sqlite_error
@spec query_rows(Sqlitex.connection, String.t | char_list, [bind: []]) :: {:ok, %{}} | Sqlitex.sqlite_error
def query_rows(db, sql, opts \\ []) do
with {:ok, stmt} <- Statement.prepare(db, sql),
{:ok, stmt} <- Statement.bind_values(stmt, Keyword.get(opts, :bind, [])),
{:ok, rows} <- Statement.fetch_all(stmt, :raw_list),
do: {:ok, %{rows: rows, columns: stmt.column_names, types: stmt.column_types}}
end
@doc """
Same as `query_rows/3` but raises a Sqlitex.QueryError on error.
Returns the results otherwise.
"""
@spec query_rows!(Sqlitex.connection, String.t | char_list) :: [[]]
@spec query_rows!(Sqlitex.connection, String.t | char_list, [bind: []]) :: [Enum.t]
def query_rows!(db, sql, opts \\ []) do
case query_rows(db, sql, opts) do
{:error, reason} -> raise Sqlitex.QueryError, reason: reason
{:ok, results} -> results
end
end
end
|
deps/sqlitex/lib/sqlitex/query.ex
| 0.886248
| 0.547525
|
query.ex
|
starcoder
|
defmodule Modbux.Response do
@moduledoc """
Response helper, functions that handles Server & Slave response messages.
"""
alias Modbux.Helper
@exceptions %{
1 => :efun,
2 => :eaddr,
3 => :einval,
4 => :edevice,
5 => :ack,
6 => :sbusy,
7 => :nack,
8 => :ememp,
9 => :error,
10 => :egpath,
11 => :egtarg
}
@spec pack({:fc | :phr | :rc | :rhr | :ri | :rir, integer, any, maybe_improper_list | integer}, any) ::
<<_::24, _::_*8>>
def pack({:rc, slave, _address, count}, values) do
^count = Enum.count(values)
data = Helper.bitlist_to_bin(values)
reads(slave, 1, data)
end
def pack({:ri, slave, _address, count}, values) do
^count = Enum.count(values)
data = Helper.bitlist_to_bin(values)
reads(slave, 2, data)
end
def pack({:rhr, slave, _address, count}, values) do
^count = Enum.count(values)
data = Helper.reglist_to_bin(values)
reads(slave, 3, data)
end
def pack({:rir, slave, _address, count}, values) do
^count = Enum.count(values)
data = Helper.reglist_to_bin(values)
reads(slave, 4, data)
end
def pack({:fc, slave, address, value}, nil) when is_integer(value) do
write(:d, slave, 5, address, value)
end
def pack({:phr, slave, address, value}, nil) when is_integer(value) do
write(:a, slave, 6, address, value)
end
def pack({:fc, slave, address, values}, nil) when is_list(values) do
writes(:d, slave, 15, address, values)
end
def pack({:phr, slave, address, values}, nil) when is_list(values) do
writes(:a, slave, 16, address, values)
end
@spec parse(any, <<_::24, _::_*8>>) :: nil | [any] | {:error, any} | {:error, byte, <<_::104>>}
def parse({:rc, slave, _address, count}, <<slave, 1, bytes, data::binary>>) do
^bytes = Helper.byte_count(count)
Helper.bin_to_bitlist(count, data)
end
def parse({:ri, slave, _address, count}, <<slave, 2, bytes, data::binary>>) do
^bytes = Helper.byte_count(count)
Helper.bin_to_bitlist(count, data)
end
def parse({:rhr, slave, _address, count}, <<slave, 3, bytes, data::binary>>) do
^bytes = 2 * count
Helper.bin_to_reglist(count, data)
end
def parse({:rir, slave, _address, count}, <<slave, 4, bytes, data::binary>>) do
^bytes = 2 * count
Helper.bin_to_reglist(count, data)
end
def parse({:fc, slave, address, 0}, <<slave, 5, address::16, 0x00, 0x00>>) do
nil
end
def parse({:fc, slave, address, 1}, <<slave, 5, address::16, 0xFF, 0x00>>) do
nil
end
def parse({:phr, slave, address, value}, <<slave, 6, address::16, value::16>>) do
nil
end
def parse({:fc, slave, address, values}, <<slave, 15, address::16, count::16>>) do
^count = Enum.count(values)
nil
end
def parse({:phr, slave, address, values}, <<slave, 16, address::16, count::16>>) do
^count = Enum.count(values)
nil
end
# Error messages.
def parse(_cmd, <<_slave, _fc, error_code>>) when error_code in 1..11 do
{:error, @exceptions[error_code]}
end
def parse(_cmd, <<slave, _fc, _error_code>>) do
{:error, slave, "Unknown error"}
end
@spec length({:fc | :phr | :rc | :rhr | :ri | :rir, any, any, any}) :: number
def length({:rc, _slave, _address, count}) do
3 + Helper.byte_count(count)
end
def length({:ri, _slave, _address, count}) do
3 + Helper.byte_count(count)
end
def length({:rhr, _slave, _address, count}) do
3 + 2 * count
end
def length({:rir, _slave, _address, count}) do
3 + 2 * count
end
def length({:fc, _slave, _address, _}) do
6
end
def length({:phr, _slave, _address, _}) do
6
end
defp reads(slave, function, data) do
bytes = :erlang.byte_size(data)
<<slave, function, bytes, data::binary>>
end
defp write(:d, slave, function, address, value) do
<<slave, function, address::16, Helper.bool_to_byte(value), 0x00>>
end
defp write(:a, slave, function, address, value) do
<<slave, function, address::16, value::16>>
end
defp writes(_type, slave, function, address, values) do
count = Enum.count(values)
<<slave, function, address::16, count::16>>
end
end
|
lib/helpers/response.ex
| 0.647575
| 0.560764
|
response.ex
|
starcoder
|
defmodule ExAws.Comprehend do
@moduledoc """
Operations on the AWS Comprehend service.
## Basic usage
```elixir
alias ExAws.Comprehend
# Detect dominant language
ExAws.Comprehend.detect_dominant_language("Amazon.com, Inc. is located in Seattle, WA and was founded July 5th, 1994 by <NAME>, allowing customers to buy everything from books to blenders.Seattle is north of Portland and south of Vancouver, BC. Other notable Seattle - based companies are Starbucks and Boeing.")
|> ExAws.request
# Detect Entities
# You must specify the language of the input text
ExAws.Comprehend.detect_entities("It is raining today in Seattle", "en")
|> ExAws.request
# Detect Sentiment
# You must specify the language of the input text
ExAws.Comprehend.detect_sentiment("Amazon.com, Inc. is located in Seattle, WA and was founded July 5th, 1994 by <NAME>, allowing customers to buy everything from books to blenders.Seattle is north of Portland and south of Vancouver, BC. Other notable Seattle - based companies are Starbucks and Boeing", "en")
|> ExAws.request
```
"""
@namespace "Comprehend_20171127"
@type text :: binary
@type language_code :: binary
@doc """
Detect dominant language
"""
@spec detect_dominant_language(
text :: binary
) :: ExAws.Operations.JSON.t()
def detect_dominant_language(
text
) do
data = %{"Text" => text}
request(:detect_dominant_language, data)
end
@doc """
Detect entities
You must specify the language of the input text.
Examples
```
ExAws.Comprehend.detect_entities("It is raining today in Seattle", "en")
|> ExAws.request
```
"""
@spec detect_entities(
text :: binary,
language_code :: binary
) :: ExAws.Operations.JSON.t()
def detect_entities(
text,
language_code
) do
data = %{
"Text" => text,
"LanguageCode" => language_code
}
request(:detect_entities, data)
end
@doc """
Detect key phrases
You must specify the language of the input text.
Examples
```
ExAws.Comprehend.detect_key_phrases("It is raining today in Seattle", "en")
|> ExAws.request
```
"""
@spec detect_key_phrases(
text :: binary,
language_code :: binary
) :: ExAws.Operations.JSON.t()
def detect_key_phrases(
text,
language_code
) do
data = %{
"Text" => text,
"LanguageCode" => language_code
}
request(:detect_key_phrases, data)
end
@doc """
Detect sentiment
You must specify the language of the input text.
Examples
```
ExAws.Comprehend.detect_sentiment("Amazon.com, Inc. is located in Seattle, WA and was founded July 5th, 1994 by <NAME>, allowing customers to buy everything from books to blenders.Seattle is north of Portland and south of Vancouver, BC. Other notable Seattle - based companies are Starbucks and Boeing", "en")
|> ExAws.request
```
"""
@spec detect_sentiment(
text :: binary,
language_code :: binary
) :: ExAws.Operations.JSON.t()
def detect_sentiment(
text,
language_code
) do
data = %{
"Text" => text,
"LanguageCode" => language_code
}
request(:detect_sentiment, data)
end
@doc """
Detect syntax
You must specify the language of the input text.
Examples
```
ExAws.Comprehend.detect_syntax("It is raining today in Seattle.", "en")
|> ExAws.request
```
"""
@spec detect_syntax(
text :: binary,
language_code :: binary
) :: ExAws.Operations.JSON.t()
def detect_syntax(
text,
language_code
) do
data = %{
"Text" => text,
"LanguageCode" => language_code
}
request(:detect_syntax, data)
end
defp request(op, data, opts \\ %{}) do
operation =
op
|> Atom.to_string()
|> Macro.camelize()
ExAws.Operation.JSON.new(
:comprehend,
%{
data: data,
headers: [
{"x-amz-target", "#{@namespace}.#{operation}"},
{"content-type", "application/x-amz-json-1.1"}
]
}
|> Map.merge(opts)
)
end
end
|
lib/ex_aws/comprehend.ex
| 0.789964
| 0.875946
|
comprehend.ex
|
starcoder
|
defmodule TripPlan.PersonalDetail do
@moduledoc """
Additional information for legs which are taken on personal transportation
"""
defstruct distance: 0.0,
steps: []
@type t :: %__MODULE__{
distance: float,
steps: [__MODULE__.Step.t()]
}
end
defmodule TripPlan.PersonalDetail.Step do
@moduledoc """
A turn-by-turn direction
"""
defstruct distance: 0.0,
relative_direction: :depart,
absolute_direction: :north,
street_name: ""
@type t :: %__MODULE__{
distance: float,
relative_direction: relative_direction,
absolute_direction: absolute_direction | nil
}
@type relative_direction ::
:depart
| :slightly_left
| :left
| :hard_left
| :slightly_right
| :right
| :hard_right
| :continue
| :circle_clockwise
| :circle_counterclockwise
| :elevator
| :uturn_left
| :uturn_right
@type absolute_direction ::
:north
| :northeast
| :east
| :southeast
| :south
| :southwest
| :west
| :northwest
@spec human_relative_direction(relative_direction) :: binary
def human_relative_direction(:depart), do: "Depart"
def human_relative_direction(:slightly_left), do: "Slightly left"
def human_relative_direction(:left), do: "Left"
def human_relative_direction(:hard_left), do: "Hard left"
def human_relative_direction(:slightly_right), do: "Slightly right"
def human_relative_direction(:right), do: "Right"
def human_relative_direction(:hard_right), do: "Hard right"
def human_relative_direction(:continue), do: "Continue"
def human_relative_direction(:circle_clockwise), do: "Enter the traffic circle"
def human_relative_direction(:circle_counterclockwise), do: "Enter the traffic circle"
def human_relative_direction(:elevator), do: "Take the elevator"
def human_relative_direction(:uturn_left), do: "Make a U-turn"
def human_relative_direction(:uturn_right), do: "Make a U-turn"
end
|
apps/trip_plan/lib/trip_plan/personal_detail.ex
| 0.853303
| 0.5592
|
personal_detail.ex
|
starcoder
|
defmodule DarkEcto.Types.CFAbsoluteTime do
@moduledoc """
Representation for a datetime stored as a unix epoch integer
"""
alias DarkMatter.DateTimes
@typedoc """
Stored as an `t:pos_integer/0` in the database but used at runtime as a `t:DateTime.t/0`.
"""
@type t() :: DateTime.t()
@type t_db() :: pos_integer()
@behaviour Ecto.Type
@opts [
unit: :nanosecond,
epoch: ~U[2001-01-01 00:00:00.000000Z]
]
@doc """
Ecto storage type
"""
@spec type() :: :integer
def type, do: :integer
@doc """
Ecto embed type
"""
@spec embed_as(format :: atom()) :: :dump
def embed_as(_), do: :dump
@doc """
Cast runtime values.
"""
@spec cast(any) :: {:ok, t_db()} | :error
def cast(nil) do
{:ok, nil}
end
def cast(integer) when is_integer(integer) and integer >= 0 do
{:ok, integer}
end
def cast(value) do
unit = Keyword.fetch!(@opts, :unit)
epoch = Keyword.fetch!(@opts, :epoch)
epoch_time = DateTime.to_unix(epoch, unit)
case DateTimes.cast_datetime(value) do
%DateTime{} = datetime ->
unix = DateTime.to_unix(datetime, unit) - epoch_time
{:ok, unix}
_ ->
:error
end
rescue
_ -> :error
end
@doc """
Load database data.
"""
@spec load(any()) :: {:ok, t()} | :error
def load(nil), do: {:ok, nil}
def load(integer) when is_integer(integer) and integer >= 0 do
unit = Keyword.fetch!(@opts, :unit)
epoch = Keyword.fetch!(@opts, :epoch)
epoch_time = DateTime.to_unix(epoch, unit)
case DateTime.from_unix(integer + epoch_time, unit) do
{:ok, %DateTime{} = datetime} -> {:ok, datetime}
_ -> :error
end
end
def load(value) do
case DateTimes.cast_datetime(value) do
%DateTime{} = datetime -> {:ok, datetime}
_ -> :error
end
rescue
_ -> :error
end
@doc """
Dump values into the database.
"""
@spec dump(any()) :: {:ok, t()} | :error
def dump(value) do
load(value)
end
@doc """
Implement equality.
"""
@spec equal?(any, any) :: boolean()
def equal?(left, right) do
with {:ok, left} <- cast(left),
{:ok, right} <- cast(right) do
left == right
else
:error -> false
end
end
end
|
lib/dark_ecto/types/cf_absolute_time.ex
| 0.900605
| 0.457258
|
cf_absolute_time.ex
|
starcoder
|
defmodule Mix.Shell.Process do
@moduledoc """
Mix shell that uses the current process mailbox for communication.
This module provides a Mix shell implementation that uses
the current process mailbox for communication instead of IO.
As an example, when `Mix.shell.info("hello")` is called,
the following message will be sent to the calling process:
{:mix_shell, :info, ["hello"]}
This is mainly useful in tests, allowing us to assert
if given messages were received or not instead of performing
checks on some captured IO. Since we need to guarantee a clean
slate between tests, there is also a `flush/1` function
responsible for flushing all `:mix_shell` related messages
from the process inbox.
## Examples
Mix.shell.info "hello"
receive do {:mix_shell, :info, [msg]} -> msg end
#=> "hello"
send self(), {:mix_shell_input, :prompt, "Pretty cool"}
Mix.shell.prompt?("How cool was that?!")
#=> "Pretty cool"
"""
@behaviour Mix.Shell
@doc """
Flushes all `:mix_shell` and `:mix_shell_input` messages from the current process.
If a callback is given, it is invoked for each received message.
## Examples
flush &IO.inspect(&1)
"""
def flush(callback \\ fn(x) -> x end) do
receive do
{:mix_shell, _, _} = message ->
callback.(message)
flush(callback)
{:mix_shell_input, _, _} = message ->
callback.(message)
flush(callback)
after
0 -> :done
end
end
@doc """
Prints the currently running application if it
was not printed yet.
"""
def print_app do
if name = Mix.Shell.printable_app_name do
send self, {:mix_shell, :info, ["==> #{name}"]}
end
end
@doc """
Executes the given command and forwards its messages to
the current process.
"""
def cmd(command, opts \\ []) do
print_app? = Keyword.get(opts, :print_app, true)
Mix.Shell.cmd(command, opts, fn(data) ->
if print_app?, do: print_app()
send self, {:mix_shell, :run, [data]}
end)
end
@doc """
Forwards the message to the current process.
"""
def info(message) do
print_app
send self, {:mix_shell, :info, [format(message)]}
end
@doc """
Forwards the message to the current process.
"""
def error(message) do
print_app
send self, {:mix_shell, :error, [format(message)]}
end
defp format(message) do
message |> IO.ANSI.format(false) |> IO.iodata_to_binary
end
@doc """
Forwards the message to the current process.
It also checks the inbox for an input message matching:
{:mix_shell_input, :prompt, value}
If one does not exist, it will abort since there was no shell
process inputs given. `value` must be a string.
## Examples
The following will answer with `"Meg"` to the prompt
`"What's your name?"`:
# The response is sent before calling prompt/1 so that prompt/1 can read it
send self(), {:mix_shell_input, :prompt, "Meg"}
Mix.shell.prompt("What's your name?")
"""
def prompt(message) do
print_app
send self, {:mix_shell, :prompt, [message]}
receive do
{:mix_shell_input, :prompt, response} -> response
after
0 -> raise "no shell process input given for prompt/1"
end
end
@doc """
Forwards the message to the current process.
It also checks the inbox for an input message matching:
{:mix_shell_input, :yes?, value}
If one does not exist, it will abort since there was no shell
process inputs given. `value` must be `true` or `false`.
## Example
# Send the response to self() first so that yes?/1 will be able to read it
send self(), {:mix_shell_input, :yes?, true}
Mix.shell.yes?("Are you sure you want to continue?")
"""
def yes?(message) do
print_app
send self, {:mix_shell, :yes?, [message]}
receive do
{:mix_shell_input, :yes?, response} -> response
after
0 -> raise "no shell process input given for yes?/1"
end
end
end
|
lib/mix/lib/mix/shell/process.ex
| 0.717111
| 0.441312
|
process.ex
|
starcoder
|
defmodule Chain.Transaction do
alias Chain.TransactionReceipt
@enforce_keys [:chain_id]
defstruct nonce: 1,
gasPrice: 0,
gasLimit: 0,
to: nil,
value: 0,
chain_id: nil,
signature: nil,
init: nil,
data: nil
@type t :: %Chain.Transaction{}
def nonce(%Chain.Transaction{nonce: nonce}), do: nonce
def data(%Chain.Transaction{data: nil}), do: ""
def data(%Chain.Transaction{data: data}), do: data
def gas_price(%Chain.Transaction{gasPrice: gas_price}), do: gas_price
def gas_limit(%Chain.Transaction{gasLimit: gas_limit}), do: gas_limit
def value(%Chain.Transaction{value: val}), do: val
def signature(%Chain.Transaction{signature: sig}), do: sig
def payload(%Chain.Transaction{to: nil, init: nil}), do: ""
def payload(%Chain.Transaction{to: nil, init: init}), do: init
def payload(%Chain.Transaction{data: nil}), do: ""
def payload(%Chain.Transaction{data: data}), do: data
def to(%Chain.Transaction{to: nil} = tx), do: new_contract_address(tx)
def to(%Chain.Transaction{to: to}), do: to
def chain_id(%Chain.Transaction{chain_id: chain_id}), do: chain_id
@spec from_rlp(binary()) :: Chain.Transaction.t()
def from_rlp(bin) do
[nonce, gas_price, gas_limit, to, value, init, rec, r, s] = Rlp.decode!(bin)
to = Rlpx.bin2addr(to)
%Chain.Transaction{
nonce: Rlpx.bin2num(nonce),
gasPrice: Rlpx.bin2num(gas_price),
gasLimit: Rlpx.bin2num(gas_limit),
to: to,
value: Rlpx.bin2num(value),
init: if(to == nil, do: init, else: nil),
data: if(to != nil, do: init, else: nil),
signature: Secp256k1.rlp_to_bitcoin(rec, r, s),
chain_id: Secp256k1.chain_id(rec)
}
end
@spec print(Chain.Transaction.t()) :: :ok
def print(tx) do
hash = Base16.encode(hash(tx))
from = Base16.encode(from(tx))
to = Base16.encode(to(tx))
type = Atom.to_string(type(tx))
value = value(tx)
code = Base16.encode(payload(tx))
code =
if byte_size(code) > 40 do
binary_part(code, 0, 37) <> "... [#{byte_size(code)}]"
end
IO.puts("")
IO.puts("\tTransaction: #{hash} Type: #{type}")
IO.puts("\tFrom: #{from} To: #{to}")
IO.puts("\tValue: #{value} Code: #{code}")
# rlp = to_rlp(tx) |> Rlp.encode!()
# IO.puts("\tRLP: #{Base16.encode(rlp)}")
:ok
end
@spec valid?(Chain.Transaction.t()) :: boolean()
def valid?(tx) do
validate(tx) == true
end
@spec type(Chain.Transaction.t()) :: :call | :create
def type(tx) do
if contract_creation?(tx) do
:create
else
:call
end
end
@spec validate(Chain.Transaction.t()) :: true | {non_neg_integer(), any()}
def validate(tx) do
with {1, %Chain.Transaction{}} <- {1, tx},
{2, 65} <- {2, byte_size(signature(tx))},
{4, true} <- {4, value(tx) >= 0},
{5, true} <- {5, gas_price(tx) >= 0},
{6, true} <- {6, gas_limit(tx) >= 0},
{7, true} <- {7, byte_size(payload(tx)) >= 0} do
true
else
{nr, error} -> {nr, error}
end
end
@spec contract_creation?(Chain.Transaction.t()) :: boolean()
def contract_creation?(%Chain.Transaction{to: to}) do
to == nil
end
@spec new_contract_address(Chain.Transaction.t()) :: binary()
def new_contract_address(%Chain.Transaction{to: to}) when to != nil do
nil
end
def new_contract_address(%Chain.Transaction{nonce: nonce} = tx) do
address = Wallet.address!(origin(tx))
Rlp.encode!([address, nonce])
|> Hash.keccak_256()
|> Hash.to_address()
end
@spec to_rlp(Chain.Transaction.t()) :: [...]
def to_rlp(tx) do
[tx.nonce, gas_price(tx), gas_limit(tx), tx.to, tx.value, payload(tx)] ++
Secp256k1.bitcoin_to_rlp(tx.signature, tx.chain_id)
end
@spec from(Chain.Transaction.t()) :: <<_::160>>
def from(tx) do
Wallet.address!(origin(tx))
end
@spec recover(Chain.Transaction.t()) :: binary()
def recover(tx) do
Secp256k1.recover!(signature(tx), to_message(tx), :kec)
end
@spec origin(Chain.Transaction.t()) :: Wallet.t()
def origin(%Chain.Transaction{signature: {:fake, pubkey}}) do
Wallet.from_address(pubkey)
end
def origin(tx) do
recover(tx) |> Wallet.from_pubkey()
end
@spec sign(Chain.Transaction.t(), <<_::256>>) :: Chain.Transaction.t()
def sign(tx = %Chain.Transaction{}, priv) do
%{tx | signature: Secp256k1.sign(priv, to_message(tx), :kec)}
end
def hash(tx = %Chain.Transaction{signature: {:fake, _pubkey}}) do
to_message(tx) |> Diode.hash()
end
@spec hash(Chain.Transaction.t()) :: binary()
def hash(tx) do
to_rlp(tx) |> Rlp.encode!() |> Diode.hash()
end
@spec to_message(Chain.Transaction.t()) :: binary()
def to_message(tx = %Chain.Transaction{chain_id: nil}) do
# pre EIP-155 encoding
[tx.nonce, gas_price(tx), gas_limit(tx), tx.to, tx.value, payload(tx)]
|> Rlp.encode!()
end
def to_message(tx = %Chain.Transaction{chain_id: 0}) do
# pre EIP-155 encoding
[tx.nonce, gas_price(tx), gas_limit(tx), tx.to, tx.value, payload(tx)]
|> Rlp.encode!()
end
def to_message(tx = %Chain.Transaction{chain_id: chain_id}) do
# EIP-155 encoding
[tx.nonce, gas_price(tx), gas_limit(tx), tx.to, tx.value, payload(tx), chain_id, 0, 0]
|> Rlp.encode!()
end
@spec apply(Chain.Transaction.t(), Chain.Block.t(), Chain.State.t(), Keyword.t()) ::
{:error, atom()} | {:ok, Chain.State.t(), Chain.Receipt.t()}
def apply(
tx = %Chain.Transaction{nonce: nonce},
env = %Chain.Block{},
state = %Chain.State{},
opts \\ []
) do
# :io.format("tx origin: ~p~n", [origin(tx)])
from = from(tx)
# IO.puts("Nonce: #{nonce} => #{Chain.State.account(state, from).nonce}")
# Note: Even a non-existing account can send transaction, as long as value and gasprice are 0
# :io.format("Trying nonce ~p (should be ~p) on account ~p~n", [nonce, Chain.State.ensure_account(state, from).nonce, from])
# :io.format("~p~n", [:erlang.process_info(self(), :current_stacktrace)])
case Chain.State.ensure_account(state, from) do
from_acc = %Chain.Account{nonce: ^nonce} ->
fee = gas_limit(tx) * gas_price(tx)
# Deducting fee and value from source account
from_acc = %{from_acc | nonce: nonce + 1, balance: from_acc.balance - tx.value - fee}
if from_acc.balance < 0 do
{:error, :not_enough_balance}
else
do_apply(tx, env, state, from, from_acc, opts)
end
%Chain.Account{nonce: low} when low < nonce ->
{:error, :nonce_too_high}
_acc ->
{:error, :wrong_nonce}
end
end
defp do_apply(tx, env, state, from, from_acc, opts) do
# Creating / finding destination account
{acc, code} =
if contract_creation?(tx) do
acc = Chain.Account.new(nonce: 1)
{acc, payload(tx)}
else
acc = Chain.State.ensure_account(state, to(tx))
{acc, Chain.Account.code(acc)}
end
# Adding tx.value to it's balance
acc = %{acc | balance: acc.balance + tx.value}
new_state =
state
|> Chain.State.set_account(from, from_acc)
|> Chain.State.set_account(to(tx), acc)
evm = Evm.init(tx, new_state, env, code, opts)
ret = Evm.eval(evm)
case ret do
{:ok, evm} ->
new_state = Evm.state(evm)
from_acc = Chain.State.account(new_state, from)
from_acc = %{from_acc | balance: from_acc.balance + Evm.gas(evm) * gas_price(tx)}
new_state = Chain.State.set_account(new_state, from, from_acc)
# The destination might be selfdestructed
new_state =
case Chain.State.account(new_state, to(tx)) do
nil ->
new_state
acc ->
acc = if contract_creation?(tx), do: %{acc | code: Evm.out(evm)}, else: acc
Chain.State.set_account(new_state, to(tx), acc)
end
# :io.format("evm: ~240p~n", [evm])
{:ok, new_state,
%TransactionReceipt{
evmout: Evm.out(evm),
return_data: Evm.return_data(evm),
data: Evm.input(evm),
logs: Evm.logs(evm),
trace: Evm.trace(evm),
gas_used: gas_limit(tx) - Evm.gas(evm),
msg: :ok
}}
{:evmc_revert, evm} ->
# Only applying the delta fee (see new_state vs. state)
gas_used = gas_limit(tx) - Evm.gas(evm)
state =
Chain.State.set_account(state, from, %{
from_acc
| balance: from_acc.balance + Evm.gas(evm) * gas_price(tx) + tx.value
})
{:ok, state,
%TransactionReceipt{msg: :evmc_revert, gas_used: gas_used, evmout: Evm.out(evm)}}
{other, evm} ->
# Only applying the full fee (restoring the tx.value)
gas_used = gas_limit(tx) - Evm.gas(evm)
state =
Chain.State.set_account(state, from, %{
from_acc
| balance: from_acc.balance + tx.value
})
{:ok, state, %TransactionReceipt{msg: other, gas_used: gas_used, evmout: Evm.out(evm)}}
end
end
end
|
lib/chain/transaction.ex
| 0.767603
| 0.632673
|
transaction.ex
|
starcoder
|
defmodule CSV.LineAggregator do
use CSV.Defaults
alias CSV.LineAggregator.CorruptStreamError
@moduledoc ~S"""
The CSV LineAggregator module - aggregates lines in a stream that are part
of a common escape sequence.
"""
@doc """
Aggregates the common escape sequences of a stream with the given separator.
## Options
Options get transferred from the decoder. They are:
* `:separator` – The field separator
"""
def aggregate(stream, options \\ []) do
separator = options |> Keyword.get(:separator, @separator)
multiline_escape_max_lines = options |> Keyword.get(:multiline_escape_max_lines, @multiline_escape_max_lines)
stream |> Stream.transform(fn -> { [], 0 } end, fn line, { collected, collected_size } ->
case collected do
[] -> start_aggregate(line, separator)
_ when collected_size < multiline_escape_max_lines ->
continue_aggregate(collected, collected_size + 1, line, separator)
_ -> raise CorruptStreamError,
message: "Stream halted with escape sequence spanning more than #{multiline_escape_max_lines} lines. Use the multiline_escape_max_lines option to increase this threshold."
end
end, fn { collected, _ } ->
case collected do
[] -> :ok
_ -> raise CorruptStreamError,
message: "Stream halted with unterminated escape sequence"
end
end)
end
defp start_aggregate(line, separator) do
cond do
is_open?(line, separator) ->
{ [], { [line], 1 } }
true ->
{ [line], { [], 0 } }
end
end
defp continue_aggregate(collected, collected_size, line, separator) do
{ is_closing, tail } = is_closing?(line, separator)
cond do
is_closing && is_open?(tail, separator) ->
{ [], { collected ++ [line], collected_size } }
is_closing ->
{ [collected ++ [line] |> Enum.join(@delimiter)], { [], collected_size } }
true ->
{ [], { collected ++ [line], collected_size } }
end
end
defp is_closing?(line, separator) do
is_closing?(line, "", true, separator)
end
defp is_closing?(<< @double_quote :: utf8 >> <> tail, _, quoted, separator) do
is_closing?(tail, << @double_quote :: utf8 >>, !quoted, separator)
end
defp is_closing?(<< head :: utf8 >> <> tail, _, quoted, separator) do
is_closing?(tail, << head :: utf8 >>, quoted, separator)
end
defp is_closing?("", _, quoted, _) do
{ !quoted, "" }
end
defp is_open?(line, separator) do
is_open?(line, "", false, separator)
end
defp is_open?(<< @double_quote :: utf8 >> <> tail, last_token, false, separator) when last_token == << separator :: utf8 >> do
is_open?(tail, @double_quote, true, separator)
end
defp is_open?(<< @double_quote :: utf8 >> <> tail, "", false, separator) do
is_open?(tail, @double_quote, true, separator)
end
defp is_open?(<< @double_quote :: utf8 >> <> tail, _, quoted, separator) do
is_open?(tail, @double_quote, !quoted, separator)
end
defp is_open?(<< head :: utf8 >> <> tail, _, quoted, separator) do
is_open?(tail, << head :: utf8 >>, quoted, separator)
end
defp is_open?(<< head >> <> tail, _, quoted, separator) do
is_open?(tail, << head >>, quoted, separator)
end
defp is_open?("", _, quoted, _) do
quoted
end
end
|
data/web/deps/csv/lib/csv/line_aggregator.ex
| 0.711732
| 0.456107
|
line_aggregator.ex
|
starcoder
|
defmodule AWS.CodeBuild do
@moduledoc """
AWS CodeBuild
AWS CodeBuild is a fully managed build service in the cloud. AWS CodeBuild
compiles your source code, runs unit tests, and produces artifacts that are
ready to deploy. AWS CodeBuild eliminates the need to provision, manage,
and scale your own build servers. It provides prepackaged build
environments for the most popular programming languages and build tools,
such as Apache Maven, Gradle, and more. You can also fully customize build
environments in AWS CodeBuild to use your own build tools. AWS CodeBuild
scales automatically to meet peak build requests. You pay only for the
build time you consume. For more information about AWS CodeBuild, see the *
[AWS CodeBuild User
Guide](https://docs.aws.amazon.com/codebuild/latest/userguide/welcome.html).*
AWS CodeBuild supports these operations:
<ul> <li> `BatchDeleteBuilds`: Deletes one or more builds.
</li> <li> `BatchGetBuilds`: Gets information about one or more builds.
</li> <li> `BatchGetProjects`: Gets information about one or more build
projects. A *build project* defines how AWS CodeBuild runs a build. This
includes information such as where to get the source code to build, the
build environment to use, the build commands to run, and where to store the
build output. A *build environment* is a representation of operating
system, programming language runtime, and tools that AWS CodeBuild uses to
run a build. You can add tags to build projects to help manage your
resources and costs.
</li> <li> `BatchGetReportGroups`: Returns an array of report groups.
</li> <li> `BatchGetReports`: Returns an array of reports.
</li> <li> `CreateProject`: Creates a build project.
</li> <li> `CreateReportGroup`: Creates a report group. A report group
contains a collection of reports.
</li> <li> `CreateWebhook`: For an existing AWS CodeBuild build project
that has its source code stored in a GitHub or Bitbucket repository,
enables AWS CodeBuild to start rebuilding the source code every time a code
change is pushed to the repository.
</li> <li> `DeleteProject`: Deletes a build project.
</li> <li> `DeleteReport`: Deletes a report.
</li> <li> `DeleteReportGroup`: Deletes a report group.
</li> <li> `DeleteResourcePolicy`: Deletes a resource policy that is
identified by its resource ARN.
</li> <li> `DeleteSourceCredentials`: Deletes a set of GitHub, GitHub
Enterprise, or Bitbucket source credentials.
</li> <li> `DeleteWebhook`: For an existing AWS CodeBuild build project
that has its source code stored in a GitHub or Bitbucket repository, stops
AWS CodeBuild from rebuilding the source code every time a code change is
pushed to the repository.
</li> <li> `DescribeTestCases`: Returns a list of details about test cases
for a report.
</li> <li> `GetResourcePolicy`: Gets a resource policy that is identified
by its resource ARN.
</li> <li> `ImportSourceCredentials`: Imports the source repository
credentials for an AWS CodeBuild project that has its source code stored in
a GitHub, GitHub Enterprise, or Bitbucket repository.
</li> <li> `InvalidateProjectCache`: Resets the cache for a project.
</li> <li> `ListBuilds`: Gets a list of build IDs, with each build ID
representing a single build.
</li> <li> `ListBuildsForProject`: Gets a list of build IDs for the
specified build project, with each build ID representing a single build.
</li> <li> `ListCuratedEnvironmentImages`: Gets information about Docker
images that are managed by AWS CodeBuild.
</li> <li> `ListProjects`: Gets a list of build project names, with each
build project name representing a single build project.
</li> <li> `ListReportGroups`: Gets a list ARNs for the report groups in
the current AWS account.
</li> <li> `ListReports`: Gets a list ARNs for the reports in the current
AWS account.
</li> <li> `ListReportsForReportGroup`: Returns a list of ARNs for the
reports that belong to a `ReportGroup`.
</li> <li> `ListSharedProjects`: Gets a list of ARNs associated with
projects shared with the current AWS account or user.
</li> <li> `ListSharedReportGroups`: Gets a list of ARNs associated with
report groups shared with the current AWS account or user
</li> <li> `ListSourceCredentials`: Returns a list of
`SourceCredentialsInfo` objects. Each `SourceCredentialsInfo` object
includes the authentication type, token ARN, and type of source provider
for one set of credentials.
</li> <li> `PutResourcePolicy`: Stores a resource policy for the ARN of a
`Project` or `ReportGroup` object.
</li> <li> `StartBuild`: Starts running a build.
</li> <li> `StopBuild`: Attempts to stop running a build.
</li> <li> `UpdateProject`: Changes the settings of an existing build
project.
</li> <li> `UpdateReportGroup`: Changes a report group.
</li> <li> `UpdateWebhook`: Changes the settings of an existing webhook.
</li> </ul>
"""
@doc """
Deletes one or more builds.
"""
def batch_delete_builds(client, input, options \\ []) do
request(client, "BatchDeleteBuilds", input, options)
end
@doc """
Retrieves information about one or more batch builds.
"""
def batch_get_build_batches(client, input, options \\ []) do
request(client, "BatchGetBuildBatches", input, options)
end
@doc """
Gets information about one or more builds.
"""
def batch_get_builds(client, input, options \\ []) do
request(client, "BatchGetBuilds", input, options)
end
@doc """
Gets information about one or more build projects.
"""
def batch_get_projects(client, input, options \\ []) do
request(client, "BatchGetProjects", input, options)
end
@doc """
Returns an array of report groups.
"""
def batch_get_report_groups(client, input, options \\ []) do
request(client, "BatchGetReportGroups", input, options)
end
@doc """
Returns an array of reports.
"""
def batch_get_reports(client, input, options \\ []) do
request(client, "BatchGetReports", input, options)
end
@doc """
Creates a build project.
"""
def create_project(client, input, options \\ []) do
request(client, "CreateProject", input, options)
end
@doc """
Creates a report group. A report group contains a collection of reports.
"""
def create_report_group(client, input, options \\ []) do
request(client, "CreateReportGroup", input, options)
end
@doc """
For an existing AWS CodeBuild build project that has its source code stored
in a GitHub or Bitbucket repository, enables AWS CodeBuild to start
rebuilding the source code every time a code change is pushed to the
repository.
<important> If you enable webhooks for an AWS CodeBuild project, and the
project is used as a build step in AWS CodePipeline, then two identical
builds are created for each commit. One build is triggered through
webhooks, and one through AWS CodePipeline. Because billing is on a
per-build basis, you are billed for both builds. Therefore, if you are
using AWS CodePipeline, we recommend that you disable webhooks in AWS
CodeBuild. In the AWS CodeBuild console, clear the Webhook box. For more
information, see step 5 in [Change a Build Project's
Settings](https://docs.aws.amazon.com/codebuild/latest/userguide/change-project.html#change-project-console).
</important>
"""
def create_webhook(client, input, options \\ []) do
request(client, "CreateWebhook", input, options)
end
@doc """
Deletes a batch build.
"""
def delete_build_batch(client, input, options \\ []) do
request(client, "DeleteBuildBatch", input, options)
end
@doc """
Deletes a build project. When you delete a project, its builds are not
deleted.
"""
def delete_project(client, input, options \\ []) do
request(client, "DeleteProject", input, options)
end
@doc """
Deletes a report.
"""
def delete_report(client, input, options \\ []) do
request(client, "DeleteReport", input, options)
end
@doc """
`DeleteReportGroup`: Deletes a report group. Before you delete a report
group, you must delete its reports. Use
[ListReportsForReportGroup](https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ListReportsForReportGroup.html)
to get the reports in a report group. Use
[DeleteReport](https://docs.aws.amazon.com/codebuild/latest/APIReference/API_DeleteReport.html)
to delete the reports. If you call `DeleteReportGroup` for a report group
that contains one or more reports, an exception is thrown.
"""
def delete_report_group(client, input, options \\ []) do
request(client, "DeleteReportGroup", input, options)
end
@doc """
Deletes a resource policy that is identified by its resource ARN.
"""
def delete_resource_policy(client, input, options \\ []) do
request(client, "DeleteResourcePolicy", input, options)
end
@doc """
Deletes a set of GitHub, GitHub Enterprise, or Bitbucket source
credentials.
"""
def delete_source_credentials(client, input, options \\ []) do
request(client, "DeleteSourceCredentials", input, options)
end
@doc """
For an existing AWS CodeBuild build project that has its source code stored
in a GitHub or Bitbucket repository, stops AWS CodeBuild from rebuilding
the source code every time a code change is pushed to the repository.
"""
def delete_webhook(client, input, options \\ []) do
request(client, "DeleteWebhook", input, options)
end
@doc """
Retrieves one or more code coverage reports.
"""
def describe_code_coverages(client, input, options \\ []) do
request(client, "DescribeCodeCoverages", input, options)
end
@doc """
Returns a list of details about test cases for a report.
"""
def describe_test_cases(client, input, options \\ []) do
request(client, "DescribeTestCases", input, options)
end
@doc """
Gets a resource policy that is identified by its resource ARN.
"""
def get_resource_policy(client, input, options \\ []) do
request(client, "GetResourcePolicy", input, options)
end
@doc """
Imports the source repository credentials for an AWS CodeBuild project that
has its source code stored in a GitHub, GitHub Enterprise, or Bitbucket
repository.
"""
def import_source_credentials(client, input, options \\ []) do
request(client, "ImportSourceCredentials", input, options)
end
@doc """
Resets the cache for a project.
"""
def invalidate_project_cache(client, input, options \\ []) do
request(client, "InvalidateProjectCache", input, options)
end
@doc """
Retrieves the identifiers of your build batches in the current region.
"""
def list_build_batches(client, input, options \\ []) do
request(client, "ListBuildBatches", input, options)
end
@doc """
Retrieves the identifiers of the build batches for a specific project.
"""
def list_build_batches_for_project(client, input, options \\ []) do
request(client, "ListBuildBatchesForProject", input, options)
end
@doc """
Gets a list of build IDs, with each build ID representing a single build.
"""
def list_builds(client, input, options \\ []) do
request(client, "ListBuilds", input, options)
end
@doc """
Gets a list of build IDs for the specified build project, with each build
ID representing a single build.
"""
def list_builds_for_project(client, input, options \\ []) do
request(client, "ListBuildsForProject", input, options)
end
@doc """
Gets information about Docker images that are managed by AWS CodeBuild.
"""
def list_curated_environment_images(client, input, options \\ []) do
request(client, "ListCuratedEnvironmentImages", input, options)
end
@doc """
Gets a list of build project names, with each build project name
representing a single build project.
"""
def list_projects(client, input, options \\ []) do
request(client, "ListProjects", input, options)
end
@doc """
Gets a list ARNs for the report groups in the current AWS account.
"""
def list_report_groups(client, input, options \\ []) do
request(client, "ListReportGroups", input, options)
end
@doc """
Returns a list of ARNs for the reports in the current AWS account.
"""
def list_reports(client, input, options \\ []) do
request(client, "ListReports", input, options)
end
@doc """
Returns a list of ARNs for the reports that belong to a `ReportGroup`.
"""
def list_reports_for_report_group(client, input, options \\ []) do
request(client, "ListReportsForReportGroup", input, options)
end
@doc """
Gets a list of projects that are shared with other AWS accounts or users.
"""
def list_shared_projects(client, input, options \\ []) do
request(client, "ListSharedProjects", input, options)
end
@doc """
Gets a list of report groups that are shared with other AWS accounts or
users.
"""
def list_shared_report_groups(client, input, options \\ []) do
request(client, "ListSharedReportGroups", input, options)
end
@doc """
Returns a list of `SourceCredentialsInfo` objects.
"""
def list_source_credentials(client, input, options \\ []) do
request(client, "ListSourceCredentials", input, options)
end
@doc """
Stores a resource policy for the ARN of a `Project` or `ReportGroup`
object.
"""
def put_resource_policy(client, input, options \\ []) do
request(client, "PutResourcePolicy", input, options)
end
@doc """
Restarts a build.
"""
def retry_build(client, input, options \\ []) do
request(client, "RetryBuild", input, options)
end
@doc """
Restarts a batch build.
"""
def retry_build_batch(client, input, options \\ []) do
request(client, "RetryBuildBatch", input, options)
end
@doc """
Starts running a build.
"""
def start_build(client, input, options \\ []) do
request(client, "StartBuild", input, options)
end
@doc """
Starts a batch build for a project.
"""
def start_build_batch(client, input, options \\ []) do
request(client, "StartBuildBatch", input, options)
end
@doc """
Attempts to stop running a build.
"""
def stop_build(client, input, options \\ []) do
request(client, "StopBuild", input, options)
end
@doc """
Stops a running batch build.
"""
def stop_build_batch(client, input, options \\ []) do
request(client, "StopBuildBatch", input, options)
end
@doc """
Changes the settings of a build project.
"""
def update_project(client, input, options \\ []) do
request(client, "UpdateProject", input, options)
end
@doc """
Updates a report group.
"""
def update_report_group(client, input, options \\ []) do
request(client, "UpdateReportGroup", input, options)
end
@doc """
Updates the webhook associated with an AWS CodeBuild build project.
<note> If you use Bitbucket for your repository, `rotateSecret` is ignored.
</note>
"""
def update_webhook(client, input, options \\ []) do
request(client, "UpdateWebhook", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "codebuild"}
host = build_host("codebuild", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "CodeBuild_20161006.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/code_build.ex
| 0.851073
| 0.636367
|
code_build.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.