code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Ello.Search.Post.Trending do
def build_boosting_queries(search_struct) do
search_struct
|> boost_recent
|> boost_comment_count
|> boost_repost_count
|> boost_view_count
|> boost_love_count
|> update_score_mode
end
defp boost_recent(%{trending: true, following: false, category: nil} = search_struct) do
weight = Application.get_env(:ello_search, :post_trending_recency_weight)
scale = Application.get_env(:ello_search, :post_trending_recency_scale)
offset = Application.get_env(:ello_search, :post_trending_recency_offset)
recent_boost = %{gauss: %{created_at: %{scale: scale, offset: offset}}, weight: weight}
update_function_score(search_struct, :functions, &([recent_boost | &1]))
end
defp boost_recent(%{trending: true, following: true, category: nil} = search_struct) do
weight = Application.get_env(:ello_search, :following_trending_recency_weight)
scale = Application.get_env(:ello_search, :following_trending_recency_scale)
offset = Application.get_env(:ello_search, :following_trending_recency_offset)
recent_boost = %{gauss: %{created_at: %{scale: scale, offset: offset}}, weight: weight}
update_function_score(search_struct, :functions, &([recent_boost | &1]))
end
defp boost_recent(%{trending: true, following: false} = search_struct) do
weight = Application.get_env(:ello_search, :category_trending_recency_weight)
scale = Application.get_env(:ello_search, :category_trending_recency_scale)
offset = Application.get_env(:ello_search, :category_trending_recency_offset)
recent_boost = %{gauss: %{created_at: %{scale: scale, offset: offset}}, weight: weight}
update_function_score(search_struct, :functions, &([recent_boost | &1]))
end
defp boost_recent(search_struct) do
put_in(search_struct.query[:sort], %{created_at: %{order: "desc"}})
end
defp boost_comment_count(%{trending: true} = search_struct) do
factor = Application.get_env(:ello_search, :post_trending_comment_boost)
update_function_score(search_struct, :functions, &([%{field_value_factor: %{field: "comment_count", modifier: "log1p", factor: factor}} | &1]))
end
defp boost_comment_count(search_struct), do: search_struct
defp boost_repost_count(%{trending: true} = search_struct) do
factor = Application.get_env(:ello_search, :post_trending_repost_boost)
update_function_score(search_struct, :functions, &([%{field_value_factor: %{field: "repost_count", modifier: "log1p", factor: factor}} | &1]))
end
defp boost_repost_count(search_struct), do: search_struct
defp boost_view_count(%{trending: true} = search_struct) do
factor = Application.get_env(:ello_search, :post_trending_view_boost)
update_function_score(search_struct, :functions, &([%{field_value_factor: %{field: "view_count", modifier: "log1p", factor: factor}} | &1]))
end
defp boost_view_count(search_struct), do: search_struct
defp boost_love_count(%{trending: true} = search_struct) do
factor = Application.get_env(:ello_search, :post_trending_love_boost)
update_function_score(search_struct, :functions, &([%{field_value_factor: %{field: "love_count", modifier: "log1p", factor: factor}} | &1]))
end
defp boost_love_count(search_struct), do: search_struct
defp update_score_mode(%{trending: true} = search_struct) do
put_in(search_struct.query[:query][:function_score][:score_mode], "multiply")
end
defp update_score_mode(search_struct), do: search_struct
defp update_function_score(struct, key, fun) do
update_in(struct.query[:query][:function_score][key], fun)
end
end
|
apps/ello_search/lib/ello_search/post/trending.ex
| 0.546496
| 0.465266
|
trending.ex
|
starcoder
|
defmodule UrbitEx.Session do
@moduledoc """
GenServer to keep state of an Urbit Session.
Creates a struct keeping track of global data, and client functions to add and remove channels.
"""
alias UrbitEx.{API, Channel, S3Config, Unread, Resource, Notification}
@derive Jason.Encoder
use GenServer
defstruct ship: "",
url: "",
cookie: "",
keys: [],
groups: [],
channels: [],
unread: [],
profile: %{},
contacts: %{},
invites: [],
metadata: [],
notifications: %{},
group_joins: [],
pending_dms: [],
s3: %S3Config{},
settings: %{},
status: :init
@doc """
Starts the GenServer as a linked process.
Takes a keyword list with initialization options, and an atom to name the process.
"""
def start_link(options \\ [], name \\ :urbit) when is_list(options) do
GenServer.start_link(__MODULE__, struct(__MODULE__, options), name: name)
end
@doc """
Starts the GenServer as an unlinked process.
Takes a keyword list with initialization options, and an atom to name the process.
"""
def start(options \\ [], name \\ :urbit) when is_list(options) do
GenServer.start(__MODULE__, struct(__MODULE__, options), name: name)
end
@doc """
Resets state with a new url and cookie.
Takes a pid or atom name of the server to reset, an url string and a cookie string.
"""
def reconnect(pid, url, cookie), do: GenServer.call(pid, {:login, url, cookie})
@doc """
Creates an eyre channel and attackes it to the session
Takes a keyword list with three optional keys: `:name` to give the new channel, `:parent`, name or pid of the parent session, and `:keep_state`, a boolean to determine whether the channel should send events for automatic state-keeping by the parent session.
Takes a pid or atom name of the server to reset, an url string and a cookie string.
"""
def add_channel(opts \\ []) do
name = opts[:name] || :main
parent = opts[:parent] || :urbit
keep_state = opts[:keep_state] || true
{:ok, cpid} = Channel.start([parent: parent, keep_state: keep_state, name: name], name)
# todo error handling
Channel.connect(cpid)
end
@doc """
Closes an open eyre channel.
Takes a pid or atom name of the parent session, and atom name or pid of the channel to close.
Returns :ok
"""
def close_channel(pid \\ :urbit, name) do
s = read(pid)
c = Channel.read(name)
API.close_channel(s, c)
cpid = Process.whereis(name)
GenServer.cast(pid, {:channel_closed, cpid})
Process.exit(cpid, :kill)
:ok
end
@doc """
Returns the state of the Session.
Takes a pid or atom name of the Session process.
Aliased as `UrbitEx.get`.
"""
def read(pid \\ :urbit) do
GenServer.call(pid, :get)
end
def save_contacts(pid, contacts), do: GenServer.cast(pid, {:save_contacts, contacts})
def set_value(pid, key, value), do: GenServer.cast(pid, {:save_entry, key, value})
def add(pid, key, item), do: GenServer.cast(pid, {:add, key, item})
# server
@impl true
def init(session) do
{:ok, session}
end
@impl true
def handle_call({:reconnect, url, cookie} , _from, session) do
[ship] = Regex.run(~r|~.+=|, cookie)
ship = String.replace(ship, ~r|[~=]|,"")
ns = %{session | url: url, cookie: cookie, ship: ship}
{:reply, ns, ns}
end
@impl true
def handle_call(:get, _from, session) do
{:reply, session, session}
end
@impl true
def handle_call({:save, {:contacts, contacts}}, from, session) do
profile = contacts[UrbitEx.Utils.add_tilde(session.ship)]
new_session = %{session | contacts: contacts, profile: profile}
{:reply, :contacts, new_session}
end
@impl true
def handle_call({:save, {key, value}}, from, session) do
ns = Map.put(session, key, value)
{:reply, key, ns}
end
@impl true
def handle_cast({:channel_closed, pid}, session) do
new_session = %{session | channels: List.delete(session.channels, pid)}
IO.inspect("closing channel")
{:noreply, new_session}
end
@impl true
def handle_info({:channel_added, name, pid}, session) do
IO.inspect("channel #{name} added")
new_session = %{session | channels: [pid | session.channels]}
{:noreply, new_session}
end
# TODO move data processing to reducer, keep genserver as dumb as possible
@impl true
defp lookup_notes(type) when is_atom(type) do
case type do
:mention -> :mentions
:message -> :messages
:link -> :links
:post -> :posts
:note -> :notes
:comment -> :comments
:"add-members" -> :joined
:"remove-members" -> :left
end
end
def handle_info({:add_or_update, {:notifications, notif}}, session) do
type = lookup_notes(notif.type)
notes = session.notifications[type]
filtered = Enum.filter(notes, & &1.resource != notif.resource)
new = [notif | filtered]
notifications = Map.put(session.notifications, type, new)
{:noreply, %{session | notifications: notifications}}
end
@impl true
def handle_info({:add, {key, item}}, session) do
{:noreply, Map.put(session, key, [item | Map.get(session, key)])}
end
@impl true
def handle_info({:remove, {:notifications, type, resource, _index, _time}}, session) do
notifications = session.notifications[lookup_notes(type)]
filtered = Enum.filter(notifications, & &1.resource != resource)
new = Map.put(session.notifications, lookup_notes(type), filtered)
{:noreply, %{session | notifications: new}}
end
@impl true
def handle_info({:remove, {key, item}}, session) do
{:noreply, Map.put(session, key, List.delete(Map.get(session, key), item))}
end
@impl true
def handle_info({:add_or_update, {:contacts, ship, key, value}}, session) do
to_update = session.contacts[ship]
new_contacts =
if to_update do
updated = Map.put(to_update, key, value)
Map.put(session.contacts, ship, updated)
else
contact = %{key => value}
Map.put(session.contacts, ship, contact)
end
{:noreply, %{session | contacts: new_contacts}}
end
# this applies to metadata and groups
@impl true
def handle_info({:add_or_update, {key, item}}, session) do
list = Map.get(session, key)
old = list |> Enum.find(& &1.resource == item.resource)
if old do
newlist = [item | List.delete(list, old)]
ns = Map.put(session, key, newlist)
{:noreply, ns}
else
{:noreply, Map.put(session, key, [item | list])}
end
end
@impl true
def handle_info({:update, {:unread, :add_count, resource, index, timestamp}}, session) do
old = Enum.find(session.unread, & &1.resource == resource && &1.index == index && &1.count)
new = if old do
%{old | last: timestamp, count: old.count + 1}
else
Unread.newcount(resource, index, timestamp)
end
new_unreads = [new | List.delete(session.unread, old)]
{:noreply, %{session | unread: new_unreads}}
end
@impl true
def handle_info({:update, {:unread, :clear_count, resource, index}}, session) do
old = Enum.find(session.unread, & &1.resource == resource && &1.index == index && &1.count)
new_unreads = List.delete(session.unread, old)
{:noreply, %{session | unread: new_unreads}}
end
@impl true
def handle_info({:update, {:unread, :add_each, resource, index,timestamp}}, session) do
old = Enum.find(session.unread, & &1.resource == resource && &1.each)
new = if old do
%{old | last: timestamp, each: [ index | old.each]}
else
Unread.neweach(resource, index, timestamp)
end
new_unreads = [new | List.delete(session.unread, old)]
{:noreply, %{session | unread: new_unreads}}
end
@impl true
def handle_info({:update, {:unread, :clear_each, resource, index }}, session) do
# todo fix this
old = Enum.find(session.unread, & &1.resource == resource && &1.each)
if old do
new = %{old | each: List.delete(old.each, index)}
new_unreads = [new | List.delete(session.unread, old)]
{:noreply, %{session | unread: new_unreads}}
else
{:noreply, session}
end
end
@impl true
def handle_info({:update, {:groups, :add_members, resource, members}}, session) do
group = session.groups |> Enum.find(& &1.resource == resource)
if group do
updated_group = %{group | members: group.members ++ members}
new_groups = [updated_group | List.delete(session.groups, updated_group)]
{:noreply, %{session | groups: new_groups}}
else
{:noreply, session}
end
end
@impl true
def handle_info({:update, {:groups, :remove_members, resource, members}}, session) do
group = session.groups |> Enum.find(& &1.resource == resource)
updated_group = %{group | members: group.members -- members}
new_groups = [updated_group | List.delete(session.groups, updated_group)]
{:noreply, %{session | groups: new_groups}}
end
@impl true
def handle_info({:update, {:groups, :policy, resource, diff}}, session) do
group = session.groups |> Enum.find(& &1.resource == resource)
updated_group = %{group | policy: diff["replace"]}
new_groups = [updated_group | List.delete(session.groups, updated_group)]
{:noreply, %{session | groups: new_groups}}
end
@impl true
def handle_info({:update, {:groups, :tags, resource, tag, ships}}, session) do
group = session.groups |> Enum.find(& &1.resource == resource)
updated_group = %{group | tags: %{tag => ships}}
new_groups = [updated_group | List.delete(session.groups, updated_group)]
{:noreply, %{session | groups: new_groups}}
end
@impl true
def handle_info({:update, {:s3, struct}}, session) do
data = Map.merge(session.s3, struct, fn (_k, v1, v2) ->
s1 = Jason.encode!(v1)
s2 = Jason.encode!(v2)
if String.length(s1) > String.length(s2), do: v1, else: v2
end)
{:noreply, %{session | s3: data}}
end
@impl true
def handle_info({:update, {:s3, :add_bucket, bucket}}, session) do
newbucket = [bucket | session.s3.buckets]
news3 = Map.put(session.s3, :buckets, newbucket)
{:noreply, %{session | s3: news3}}
end
@impl true
def handle_info({:update, {:s3, :remove_bucket, bucket}}, session) do
newbucket = List.delete(session.s3.buckets, bucket)
news3 = Map.put(session.s3, :buckets, newbucket)
{:noreply, %{session | s3: news3}}
end
@impl true
def handle_info({:update, {:s3, key, value}}, session) do
ns3 = Map.put(session.s3, key, value)
{:noreply, %{session | s3: ns3}}
end
@impl true
def handle_info({:update, {_key, _item}}, session) do
{:noreply, session}
end
end
# login: 370310366L
# contrasena: router2727
|
lib/types/session.ex
| 0.577019
| 0.411584
|
session.ex
|
starcoder
|
defmodule Tablespoon.Transport.FakeModem do
@moduledoc """
Transport implementation which pretends to be a modem.
By default, sends an initial "OK\n" line, then responds to any lines it receives with "OK".
However, there are some configuration variables that can be set to change that.
- send_error_rate: the percent of messages which result in a sending error (1 to 100)
- disconnect_rate: the percent of replies which result in a disconnection (to 100)
- delay_range: a range of milliseconds by which to delay replies
"""
@behaviour Tablespoon.Transport
alias Tablespoon.Protocol.Line
require Logger
defstruct [
:ref,
:buffer,
connect_error_rate: 0,
send_error_rate: 0,
response_error_rate: 0,
disconnect_rate: 0,
delay_range: 0..0
]
@impl Tablespoon.Transport
def new(opts \\ []) do
struct(__MODULE__, opts)
end
@impl Tablespoon.Transport
def connect(%__MODULE__{} = t) do
if trigger?(t.connect_error_rate) do
{:error, :failed_to_connect}
else
ref = make_ref()
t = %{t | ref: ref, buffer: ""}
reply(t)
{:ok, t}
end
end
@impl Tablespoon.Transport
def close(%__MODULE__{} = t) do
%{t | ref: nil, buffer: ""}
end
@impl Tablespoon.Transport
def send(%__MODULE__{ref: nil}, _) do
{:error, :not_connected}
end
def send(%__MODULE__{} = t, data) do
if trigger?(t.send_error_rate) do
{:error, :trigger_failed}
else
buffer = IO.iodata_to_binary([t.buffer, data])
t = %{t | buffer: buffer}
handle_buffer(t)
end
end
@impl Tablespoon.Transport
def stream(%__MODULE__{ref: ref} = t, {ref, message}) do
if trigger?(t.disconnect_rate) do
t = %{t | ref: nil, buffer: ""}
{:ok, t, [:closed]}
else
{:ok, t, [message]}
end
end
def stream(%__MODULE__{}, _) do
:unknown
end
defp reply(t, data \\ "OK") do
delay = Enum.random(t.delay_range)
_ =
for message <- [data, "\r", "\n"] do
send_after(self(), {t.ref, {:data, message}}, delay)
end
:ok
end
defp handle_buffer(t) do
case Line.decode(t.buffer) do
{:ok, line, rest} ->
t = %{t | buffer: rest}
handle_line(t, line)
{:error, :too_short} ->
{:ok, t}
end
end
defp handle_line(t, "AT*RELAYOUT" <> _ = line) do
reply(t, line)
if trigger?(t.response_error_rate) do
reply(t, "ERROR")
else
reply(t)
end
handle_buffer(t)
end
defp handle_line(t, "") do
handle_buffer(t)
end
def trigger?(rate) do
Enum.random(1..100) <= rate
end
defp send_after(pid, message, delay) when delay > 0 do
Process.send_after(pid, message, delay)
end
defp send_after(pid, message, _delay) do
Kernel.send(pid, message)
end
end
|
lib/tablespoon/transport/fake_modem.ex
| 0.740831
| 0.437343
|
fake_modem.ex
|
starcoder
|
defmodule Stop do
defstruct name: nil, ref: nil, timetable: [], lines: []
@last_time_checked {2017, 4, 10}
@last_time_checked_formatted @last_time_checked
|> Tuple.to_list
|> Enum.join("-")
@moduledoc """
Dublin Bus API
=============
Access to the Real Time Passenger Information (RTPI) for Dublin Bus services.
The API are focused on retrieving bus stop and timetables
Disclaimer
----------
This service is in no way affiliated with Dublin Bus or the providers of the RTPI service.
Data are retrieved parsing the still-in-development [RTPI](http://rtpi.ie/) site. As with any website
scraping the html could change without notice and break the API.
Rtpi.ie html parsing work as **#{@last_time_checked_formatted}**
Test
-----
Parsing function are tested both against fixture and the actual website, this could lead to failing test if an
internet connection is missing. It also could find if something has changed in the rtpi.ie website html.
"""
defmodule Row do
@moduledoc false
defstruct [:line, :direction, :time]
end
@info_url "https://data.dublinked.ie/cgi-bin/rtpi/busstopinformation?format=json&stopid="
@real_time_url "https://data.dublinked.ie/cgi-bin/rtpi/realtimebusinformation?format=json&stopid="
@search_url "http://www.rtpi.ie/Text/StopResults.aspx?did=-1&search="
@regex Regex.compile!("stopRef=(?<stop>.*)\&stopName")
@typedoc """
A struct that represent a row in a bus stop timetable, time could be an absolute (16:13) or relative time (5m).
"""
@type row :: %Row{
line: String.t,
direction: String.t,
time: String.t
}
@typedoc """
A struct that represent a single stop, it could contain the `timetable` or the `lines` that serve the stop.
`name` and `ref` are always available
"""
@type stop :: %Stop{
name: String.t,
ref: String.t,
lines: list(String.t),
timetable: list(row)}
@doc """
Return the last time it was checked that the html parsing is still working
"""
def last_time_checked, do: @last_time_checked
@doc """
Return the last time it was checked that the html parsing is still working as a string (yyyy-MM-dd)
"""
def last_time_checked_formatted, do: @last_time_checked_formatted
defp make_request(url) do
{:ok, res} = url
|> HTTPoison.get
|> get_body
|> elem(1)
|> Poison.decode
res
end
@doc"""
Return the requested `Stop`
"""
@spec get_info(String.t) :: stop
def get_info(stop) when is_binary(stop) do
[ok: info, ok: real_time] = [@info_url, @real_time_url]
|> Enum.map(fn url -> url <> stop end)
|> Task.async_stream(&make_request/1)
|> Enum.to_list
[%{"fullname" => name}|_] = info["results"]
timetable = real_time["results"]
|> Enum.map(&parse_row/1)
%Stop{
ref: stop,
name: name,
timetable: timetable
}
end
@spec get_info(Integer.t) :: stop
def get_info(id) do
get_info(Integer.to_string(id))
end
@doc """
Return a list of `Stop` matching the `query` provided. It only returns the first ten results
"""
@spec search(String.t) :: list(stop)
def search(query) do
url = @search_url <> URI.encode(query)
body = url
|> HTTPoison.get([], [follow_redirect: false])
|> get_body
case body do
{:ok, body} ->
case Floki.find(body, "#GridViewStopResults") do
[] -> []
elements -> elements
|> hd # get the only element
|> Tuple.to_list
|> List.last # look for the children of the table (tr)
|> tl # discard the header
|> Enum.map(&parse_stop/1)
|> Enum.reject(&is_nil(&1))
end
{:redirect, stop} ->
{stop_int, ""} = Integer.parse(stop)
[get_info(stop_int)]
{:no_results} -> []
end
end
defp get_body({:ok,
%HTTPoison.Response{status_code: 200,
body: body}}) do
{:ok, body}
end
defp get_body({:ok,
%HTTPoison.Response{status_code: 302,
headers: headers}}) do
{_, location} = headers
|> Enum.find(fn(header) -> elem(header, 0) == "Location" end)
%{"stop" => stop} = Regex.named_captures(@regex, location)
{:redirect, stop}
end
defp get_body(_), do: {:no_results}
defp parse_stop({"tr", _ ,
[{"td", _ , [line]},
{"td", _ , [name]}, lines_html]}) do
lines = try do
lines_html
|> Floki.find("tr")
|> Enum.map(&Floki.find(&1, "td"))
|> Enum.map(fn x -> x |> Enum.map(&Floki.text/1) end)
|> Enum.map(&List.to_tuple/1)
|> Enum.into(%{})
rescue
_ -> %{}
end
%Stop{name: Floki.text(name),
ref: line,
lines: lines}
end
defp parse_stop(_), do: nil
defp parse_row(%{"duetime" => time, "destination" => destination, "route" => line}), do: %Row{time: time,
line: line,
direction: destination}
defp parse_row({"tr", _,
[{"td", [{"class", "gridServiceItem"}, _], [line]}, _,
{"td", [{"class", "gridDestinationItem"}, _],
[{"span", _, [direction]}]}, _,
{"td", [{"class", "gridTimeItem"}, _], [time]}, _]}) do
%Row{time: time,
line: line,
direction: direction}
end
end
|
lib/stop.ex
| 0.777933
| 0.519095
|
stop.ex
|
starcoder
|
defmodule D4 do
@moduledoc """
--- Day 4: Passport Processing ---
You arrive at the airport only to realize that you grabbed your North Pole Credentials instead of your passport. While these documents are extremely similar, North Pole Credentials aren't issued by a country and therefore aren't actually valid documentation for travel in most of the world.
It seems like you're not the only one having problems, though; a very long line has formed for the automatic passport scanners, and the delay could upset your travel itinerary.
Due to some questionable network security, you realize you might be able to solve both of these problems at the same time.
The automatic passport scanners are slow because they're having trouble detecting which passports have all required fields. The expected fields are as follows:
byr (Birth Year)
iyr (Issue Year)
eyr (Expiration Year)
hgt (Height)
hcl (Hair Color)
ecl (Eye Color)
pid (Passport ID)
cid (Country ID)
Passport data is validated in batch files (your puzzle input). Each passport is represented as a sequence of key:value pairs separated by spaces or newlines. Passports are separated by blank lines.
Count the number of valid passports - those that have all required fields. Treat cid as optional. In your batch file, how many passports are valid?
--- Part Two ---
The line is moving more quickly now, but you overhear airport security talking about how passports with invalid data are getting through. Better add some data validation, quick!
You can continue to ignore the cid field, but each other field has strict rules about what values are valid for automatic validation:
byr (Birth Year) - four digits; at least 1920 and at most 2002.
iyr (Issue Year) - four digits; at least 2010 and at most 2020.
eyr (Expiration Year) - four digits; at least 2020 and at most 2030.
hgt (Height) - a number followed by either cm or in:
If cm, the number must be at least 150 and at most 193.
If in, the number must be at least 59 and at most 76.
hcl (Hair Color) - a # followed by exactly six characters 0-9 or a-f.
ecl (Eye Color) - exactly one of: amb blu brn gry grn hzl oth.
pid (Passport ID) - a nine-digit number, including leading zeroes.
cid (Country ID) - ignored, missing or not.
Your job is to count the passports where all required fields are both present and valid according to the above rules. Here are some example values:
Count the number of valid passports - those that have all required fields and valid values. Continue to treat cid as optional. In your batch file, how many passports are valid?
"""
@behaviour Day
@required MapSet.new(["byr", "iyr", "eyr", "hgt", "hcl", "ecl", "pid"])
defp initial_pass(input) do
input
|> Enum.join("\n")
|> String.split("\n\n")
|> Enum.map(fn pass ->
kvs = String.split(pass)
if length(kvs) < 7 do
nil
else
map =
kvs
|> Enum.map(&String.split(&1, ":"))
|> Map.new(fn [a, b] -> {a, b} end)
if MapSet.subset?(@required, MapSet.new(Map.keys(map))), do: map, else: nil
end
end)
|> Enum.filter(& &1)
end
defp digits(x, 4), do: Regex.match?(~r/^\d{4}$/, x)
defp digits(x, 9), do: Regex.match?(~r/^\d{9}$/, x)
defp btwn(x, min, max) do
x = Utils.to_int(x)
min <= x and x <= max
end
defp hgt_fmt(x), do: Regex.match?(~r/^(\d{3}cm)|^(\d{2}in)$/, x)
defp hgt_bnd([x, "cm"]), do: btwn(x, 150, 193)
defp hgt_bnd([x, "in"]), do: btwn(x, 59, 76)
defp hgt_bnd(x), do: hgt_bnd(Regex.run(~r/(\d+)(\w+)/, x, capture: :all_but_first))
defp hex(x), do: Regex.match?(~r/^#[a-f0-9]{6}$/, x)
defp ecl(x), do: x in ["amb", "blu", "brn", "gry", "grn", "hzl", "oth"]
defp second_pass(passports) do
requirements = [
{"byr", &digits(&1, 4)},
{"iyr", &digits(&1, 4)},
{"eyr", &digits(&1, 4)},
{"pid", &digits(&1, 9)},
{"byr", &btwn(&1, 1920, 2002)},
{"iyr", &btwn(&1, 2010, 2020)},
{"eyr", &btwn(&1, 2020, 2030)},
{"hgt", &hgt_fmt/1},
{"hgt", &hgt_bnd/1},
{"hcl", &hex/1},
{"ecl", &ecl/1}
]
passports
|> Enum.filter(fn passport ->
Enum.all?(requirements, fn {prop, fun} ->
apply(fun, [passport[prop]])
end)
end)
end
@impl true
def solve(input) do
passports = initial_pass(input)
part_1 = Enum.count(passports)
part_2 =
passports
|> second_pass
|> Enum.count()
{part_1, part_2}
end
end
|
lib/days/04.ex
| 0.675229
| 0.768255
|
04.ex
|
starcoder
|
defmodule ExRabbitMQ.Producer do
@moduledoc """
A behaviour module that abstracts away the handling of RabbitMQ connections and channels.
It also provides hooks to allow the programmer to publish a message without having to directly
access the AMPQ interfaces.
For a connection configuration example see `ExRabbitMQ.Connection.Config`.
#### Example usage for a producer implementing a `GenServer`
```elixir
defmodule MyExRabbitMQProducer do
@module __MODULE__
use GenServer
use ExRabbitMQ.Producer
def start_link() do
GenServer.start_link(@module, :ok)
end
def init(state) do
new_state =
xrmq_init(:my_connection_config, state)
|> xrmq_extract_state()
{:ok, new_state}
end
def handle_cast({:publish, something}, state) do
xrmq_basic_publish(something, "", "my_queue")
{:noreply, state}
end
# optional override when there is a need to do setup the channel right after the connection has been established.
def xrmq_channel_setup(channel, state) do
# any other channel setup goes here...
{:ok, state}
end
end
```
"""
@doc """
Initiates a connection or reuses an existing one.
When a connection is established then a new channel is opened.
Next, `c:xrmq_channel_setup/2` is called to do any extra work on the opened channel.
This variant accepts an atom as the argument for the `connection_key` parameters and
uses this atom to read the connection's configuration.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
For the configuration format see the top section of `ExRabbitMQ.Producer`.
"""
@callback xrmq_init(connection_key :: atom, state :: term) ::
{:ok, new_state :: term}
| {:error, reason :: term, new_state :: term}
@doc """
Initiates a connection or reuses an existing one.
When a connection is established then a new channel is opened.
Next, `c:xrmq_channel_setup/2` is called to do any extra work on the opened channel.
This variant accepts a `ExRabbitMQ.Connection` struct as the argument for the `connection_config` parameter.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
For the configuration format see the top section of `ExRabbitMQ.Producer`.
"""
@callback xrmq_init(connection_config :: struct, state :: term) ::
{:ok, new_state :: term}
| {:error, reason :: term, new_state :: term}
@doc """
Returns a part of the `:exrabbitmq` configuration section, specified with the
`key` argument.
For the configuration format see the top section of `ExRabbitMQ.Producer`.
"""
@callback xrmq_get_env_config(key :: atom) :: keyword
@doc """
Returns the connection configuration as it was passed to `c:xrmq_init/2`.
This configuration is set in the wrapper process's dictionary.
For the configuration format see the top section of `ExRabbitMQ.Producer`.
"""
@callback xrmq_get_connection_config() :: term
@doc """
This hook is called when a connection has been established and a new channel has been opened.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_channel_setup(channel :: term, state :: term) ::
{:ok, new_state :: term}
| {:error, reason :: term, new_state :: term}
@doc """
This hook is called when a connection has been established and a new channel has been opened,
right after `c:xrmq_channel_setup/2`.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_channel_open(channel :: term, state :: term) ::
{:ok, new_state :: term}
| {:error, reason :: term, new_state :: term}
@doc """
This overridable function publishes the `payload` to the `exchange` using the provided `routing_key`.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_basic_publish(
payload :: term,
exchange :: String.t(),
routing_key :: String.t(),
opts :: [term]
) ::
:ok
| {:error, reason :: :blocked | :closing | :no_channel}
@doc """
Helper function that extracts the `state` argument from the passed in tuple.
"""
@callback xrmq_extract_state({:ok, state :: term} | {:error, reason :: term, state :: term}) ::
state :: term
require ExRabbitMQ.AST.Common
require ExRabbitMQ.AST.Producer.GenServer
defmacro __using__(_) do
common_ast = ExRabbitMQ.AST.Common.ast()
inner_ast = ExRabbitMQ.AST.Producer.GenServer.ast()
quote location: :keep do
require Logger
alias ExRabbitMQ.Constants
alias ExRabbitMQ.Connection
alias ExRabbitMQ.Connection.Config, as: ConnectionConfig
alias ExRabbitMQ.ChannelRipper
unquote(inner_ast)
def xrmq_init(connection_key, state)
when is_atom(connection_key) do
xrmq_init(xrmq_get_connection_config(connection_key), state)
end
def xrmq_init(%ConnectionConfig{} = connection_config, state) do
connection_config = xrmq_set_connection_config_defaults(connection_config)
connection_pids_group_name = Constants.connection_pids_group_name()
connection_pids =
case :pg2.get_local_members(connection_pids_group_name) do
[] -> []
[_pid | _rest_pids] = pids -> pids
{:error, {:no_such_group, ^connection_pids_group_name}} -> []
end
connection_pid =
case Enum.find(connection_pids, fn c -> Connection.subscribe(c, connection_config) end) do
nil ->
{:ok, pid} = ExRabbitMQ.Connection.Supervisor.start_child(connection_config)
Connection.subscribe(pid, connection_config)
pid
pid ->
pid
end
Process.link(connection_pid)
xrmq_set_connection_pid(connection_pid)
xrmq_set_connection_config(connection_config)
{:ok, channel_ripper_pid} = ChannelRipper.start()
xrmq_set_channel_ripper_pid(channel_ripper_pid)
xrmq_open_channel(state)
end
unquote(common_ast)
end
end
end
|
lib/ex_rabbit_m_q/producer.ex
| 0.894086
| 0.801315
|
producer.ex
|
starcoder
|
defmodule Prog do
@moduledoc """
Documentation for `Prog`.
"""
@doc """
Hello world.
## Examples
iex> Prog.hello()
:world
"""
def solve do
{:ok, raw} = File.read("data/day_1/input")
strings = String.split(raw, "\n")
as_ints = Enum.map(strings, fn(s) -> String.to_integer(s) end)
sorted = Enum.sort(as_ints)
head = List.first(sorted)
tail = List.last(sorted)
rem = List.delete_at(sorted, 0) |> List.delete_at(-1)
part_1_res = find_it(2020, head, tail, rem)
IO.puts part_1_res
part_2_res = find_it(2020, sorted)
IO.puts part_2_res
:world
end
# O(n log(n))
# Part 1
def find_it(target, head, tail, rem) do
# 1000, 2000
cond do
head + tail == target ->
head * tail
head + tail > target ->
next_tail = List.last(rem)
updated_rem = List.delete_at(rem, -1)
find_it(target, head, next_tail, updated_rem)
head + tail < target ->
next_head = List.first(rem)
updated_rem = List.delete_at(rem, 0)
find_it(target, next_head, tail, updated_rem)
end
end
# O(n^2 log(n))
# Part 2
def find_it(target, rem) do
res = rem
|> Enum.with_index
|> Enum.map(fn({invariant, i}) ->
current_rem = List.delete_at(rem, i)
head = List.first(current_rem)
tail = List.last(current_rem)
current_rem_go = List.delete_at(current_rem, 0) |> List.delete_at(-1)
find_it_iter(target, head, tail, invariant, current_rem_go)
end)
IO.puts res
end
def find_it_iter(target, head, tail, invariant, rem) do
cond do
head == nil || tail == nil ->
0
head + tail + invariant == target ->
head * tail * invariant
head + tail + invariant > target ->
next_tail = List.last(rem)
updated_rem = List.delete_at(rem, -1)
find_it_iter(target, head, next_tail, invariant, updated_rem)
head + tail + invariant < target ->
next_head = List.first(rem)
updated_rem = List.delete_at(rem, 0)
find_it_iter(target, next_head, tail, invariant, updated_rem)
end
end
end
Prog.solve
|
lib/days/day_1.ex
| 0.867275
| 0.455501
|
day_1.ex
|
starcoder
|
defmodule Kino.DataTable do
@moduledoc """
A kino for interactively viewing tabular data.
The data must be a tabular data supported by `Table`.
## Examples
data = [
%{id: 1, name: "Elixir", website: "https://elixir-lang.org"},
%{id: 2, name: "Erlang", website: "https://www.erlang.org"}
]
Kino.DataTable.new(data)
The tabular view allows you to quickly preview the data
and analyze it thanks to sorting capabilities.
data = Process.list() |> Enum.map(&Process.info/1)
Kino.DataTable.new(
data,
keys: [:registered_name, :initial_call, :reductions, :stack_size]
)
"""
@behaviour Kino.Table
@type t :: Kino.JS.Live.t()
@doc """
Creates a new kino displaying given tabular data.
## Options
* `:keys` - a list of keys to include in the table for each record.
The order is reflected in the rendered table. Optional
* `:name` - The displayed name of the table. Defaults to `"Data"`
* `:sorting_enabled` - whether the table should support sorting the
data. Sorting requires traversal of the whole enumerable, so it
may not be desirable for large lazy enumerables. Defaults to `true`
"""
@spec new(Table.Reader.t(), keyword()) :: t()
def new(tabular, opts \\ []) do
tabular = normalize_tabular(tabular)
name = Keyword.get(opts, :name, "Data")
sorting_enabled = Keyword.get(opts, :sorting_enabled, true)
{data_rows, %{columns: data_columns}} =
if keys = opts[:keys] do
Table.to_rows_with_info(tabular, only: keys)
else
Table.to_rows_with_info(tabular)
end
Kino.Table.new(__MODULE__, {data_rows, data_columns, name, sorting_enabled})
end
defp normalize_tabular([%struct{} | _] = tabular) do
Enum.map(tabular, fn
%^struct{} = item ->
Map.reject(item, fn {key, _val} ->
key |> Atom.to_string() |> String.starts_with?("_")
end)
other ->
raise ArgumentError,
"expected a list of %#{inspect(struct)}{} structs, but got: #{inspect(other)}"
end)
end
defp normalize_tabular(tabular), do: tabular
@impl true
def init({data_rows, data_columns, name, sorting_enabled}) do
features = Kino.Utils.truthy_keys(pagination: true, sorting: sorting_enabled)
info = %{name: name, features: features}
total_rows = Enum.count(data_rows)
{:ok, info,
%{
data_rows: data_rows,
total_rows: total_rows,
columns: Enum.map(data_columns, fn key -> %{key: key, label: inspect(key)} end)
}}
end
@impl true
def get_data(rows_spec, state) do
records = query(state.data_rows, rows_spec)
rows =
Enum.map(records, fn record ->
%{fields: Map.new(record, fn {key, value} -> {key, inspect(value)} end)}
end)
{:ok, %{columns: state.columns, rows: rows, total_rows: state.total_rows}, state}
end
defp query(data, rows_spec) do
sorted_data =
if order_by = rows_spec[:order_by] do
Enum.sort_by(data, & &1[order_by], rows_spec.order)
else
data
end
Enum.slice(sorted_data, rows_spec.offset, rows_spec.limit)
end
end
|
lib/kino/data_table.ex
| 0.910177
| 0.741768
|
data_table.ex
|
starcoder
|
defmodule Arango.Graph do
@moduledoc "ArangoDB Graph methods"
alias Arango.Request
defmodule EdgeDefinition do
@moduledoc false
defstruct [:collection, :from, :to]
@type t :: %__MODULE__{
# The name of the collection
collection: String.t,
# The vertex types an edge can come from
from: [String.t],
# The vertex types an edge can go to
to: [String.t],
}
end
defimpl Jason.Encoder, for: EdgeDefinition do
def encode(value, opts) do
value |> Map.from_struct() |> Jason.Encode.map(opts)
end
end
defmodule Edge do
@moduledoc false
defstruct [:type, :from, :to, :data]
@type t :: %__MODULE__{
# The edge type
type: String.t,
# The from document
from: String.t,
# The to document
to: String.t,
data: map
}
end
# TODO: do we need a struct for structs with a single value?
defmodule VertexCollection do
@moduledoc false
defstruct [:collection]
@type t :: %__MODULE__{
# The name of the collection
collection: String.t,
}
end
@doc """
List all graphs
GET /_api/gharial
"""
@spec graphs() :: Request.t
def graphs() do
%Request{
endpoint: :graph,
http_method: :get,
path: "gharial"
}
end
@doc """
Create a graph
POST /_api/gharial
"""
@spec create(String.t, [EdgeDefinition.t], [String.t]) :: Request.t
def create(graph_name, edge_definitions \\ [], orphan_collections \\ []) do
body = %{
"name" => graph_name,
"edgeDefinitions" => edge_definitions,
"orphanCollections" => orphan_collections
}
%Request{
endpoint: :graph,
http_method: :post,
path: "gharial",
body: body
}
end
@doc """
Drop a graph
DELETE /_api/gharial/{graph-name}
"""
@spec drop(String.t) :: Request.t
def drop(graph_name) do
%Request{
endpoint: :graph,
http_method: :delete,
path: "gharial/#{graph_name}"
}
end
@doc """
Get a graph
GET /_api/gharial/{graph-name}
"""
@spec graph(String.t) :: Request.t
def graph(graph_name) do
%Request{
endpoint: :graph,
http_method: :get,
path: "gharial/#{graph_name}"
}
end
@doc """
List edge definitions
GET /_api/gharial/{graph-name}/edge
"""
@spec edges(String.t) :: Request.t
def edges(graph_name) do
%Request{
endpoint: :graph,
http_method: :get,
path: "gharial/#{graph_name}/edge"
}
end
@doc """
Add edge definition
POST /_api/gharial/{graph-name}/edge
"""
@spec extend_edge_definintions(String.t, EdgeDefinition.t) :: Request.t
def extend_edge_definintions(graph_name, edge_definition) do
body = Map.from_struct(edge_definition)
%Request{
endpoint: :graph,
http_method: :post,
path: "gharial/#{graph_name}/edge",
body: body
}
end
@doc """
Create an edge
POST /_api/gharial/{graph-name}/edge/{collection-name}
"""
@spec edge_create(String.t, String.t, Edge.t) :: Request.t
def edge_create(graph_name, collection_name, edge) do
body = %{
"type" => edge.type,
"_from" => edge.from,
"_to" => edge.to,
} |> Map.merge(edge.data || %{})
%Request{
endpoint: :graph,
http_method: :post,
path: "gharial/#{graph_name}/edge/#{collection_name}",
body: body
}
end
@doc """
Remove an edge
DELETE /_api/gharial/{graph-name}/edge/{collection-name}/{edge-key}
"""
@spec edge_delete(String.t, String.t, String.t) :: Request.t
def edge_delete(graph_name, collection_name, edge_key) do
%Request{
endpoint: :graph,
http_method: :delete,
path: "gharial/#{graph_name}/edge/#{collection_name}/#{edge_key}"
}
end
@doc """
Get an edge
GET /_api/gharial/{graph-name}/edge/{collection-name}/{edge-key}
"""
@spec edge(String.t, String.t, String.t) :: Request.t
def edge(graph_name, collection_name, edge_key) do
%Request{
endpoint: :graph,
http_method: :get,
path: "gharial/#{graph_name}/edge/#{collection_name}/#{edge_key}"
}
end
@doc """
Modify an edge
PATCH /_api/gharial/{graph-name}/edge/{collection-name}/{edge-key}
"""
@spec edge_update(String.t, String.t, String.t, map()) :: Request.t
def edge_update(graph_name, collection_name, edge_key, edge_body) do
%Request{
endpoint: :graph,
http_method: :patch,
path: "gharial/#{graph_name}/edge/#{collection_name}/#{edge_key}",
body: edge_body
}
end
@doc """
Replace an edge
PUT /_api/gharial/{graph-name}/edge/{collection-name}/{edge-key}
"""
@spec edge_replace(String.t, String.t, String.t, Edge.t) :: Request.t
def edge_replace(graph_name, collection_name, edge_key, edge) do
body = %{
"type" => edge.type,
"_from" => edge.from,
"_to" => edge.to,
} |> Map.merge(edge.data || %{})
%Request{
endpoint: :graph,
http_method: :put,
path: "gharial/#{graph_name}/edge/#{collection_name}/#{edge_key}",
body: body
}
end
@doc """
Remove an edge definition from the graph
DELETE /_api/gharial/{graph-name}/edge/{definition-name}
"""
@spec edge_definition_delete(String.t, String.t) :: Request.t
def edge_definition_delete(graph_name, edge_definition_name) do
%Request{
endpoint: :graph,
http_method: :delete,
path: "gharial/#{graph_name}/edge/#{edge_definition_name}"
}
end
@doc """
Replace an edge definition
PUT /_api/gharial/{graph-name}/edge/{definition-name}
"""
@spec edge_definition_replace(String.t, String.t, EdgeDefinition.t) :: Request.t
def edge_definition_replace(graph_name, edge_definition_name, edge_definition) do
%Request{
endpoint: :graph,
http_method: :put,
path: "gharial/#{graph_name}/edge/#{edge_definition_name}",
body: edge_definition
}
end
@doc """
List vertex collections
GET /_api/gharial/{graph-name}/vertex
"""
@spec vertex_collections(String.t) :: Request.t
def vertex_collections(graph_name) do
%Request{
endpoint: :graph,
http_method: :get,
path: "gharial/#{graph_name}/vertex",
}
end
@doc """
Add vertex collection
POST /_api/gharial/{graph-name}/vertex
"""
@spec vertex_collection_create(String.t, VertexCollection.t) :: Request.t
def vertex_collection_create(graph_name, vertex_collection) do
body = Map.from_struct(vertex_collection)
%Request{
endpoint: :graph,
http_method: :post,
path: "gharial/#{graph_name}/vertex",
body: body
}
end
@doc """
Remove vertex collection
DELETE /_api/gharial/{graph-name}/vertex/{collection-name}
"""
@spec vertex_collection_delete(String.t, String.t) :: Request.t
def vertex_collection_delete(graph_name, collection_name) do
%Request{
endpoint: :graph,
http_method: :delete,
path: "gharial/#{graph_name}/vertex/#{collection_name}",
}
end
@doc """
Create a vertex
POST /_api/gharial/{graph-name}/vertex/{collection-name}
"""
@spec vertex_create(String.t, String.t, map()) :: Request.t
def vertex_create(graph_name, collection_name, vertex_body) do
%Request{
endpoint: :graph,
http_method: :post,
path: "gharial/#{graph_name}/vertex/#{collection_name}",
body: vertex_body
}
end
@doc """
Remove a vertex
DELETE /_api/gharial/{graph-name}/vertex/{collection-name}/{vertex-key}
"""
@spec vertex_delete(String.t, String.t, String.t) :: Request.t
def vertex_delete(graph_name, collection_name, vertex_key) do
%Request{
endpoint: :graph,
http_method: :delete,
path: "gharial/#{graph_name}/vertex/#{collection_name}/#{vertex_key}",
}
end
@doc """
Get a vertex
GET /_api/gharial/{graph-name}/vertex/{collection-name}/{vertex-key}
"""
@spec vertex(String.t, String.t, String.t) :: Request.t
def vertex(graph_name, collection_name, vertex_key) do
%Request{
endpoint: :graph,
http_method: :get,
path: "gharial/#{graph_name}/vertex/#{collection_name}/#{vertex_key}",
}
end
@doc """
Modify a vertex
PATCH /_api/gharial/{graph-name}/vertex/{collection-name}/{vertex-key}
"""
@spec vertex_update(String.t, String.t, String.t, map()) :: Request.t
def vertex_update(graph_name, collection_name, vertex_key, vertex_body) do
%Request{
endpoint: :graph,
http_method: :patch,
path: "gharial/#{graph_name}/vertex/#{collection_name}/#{vertex_key}",
body: vertex_body
}
end
@doc """
Replace a vertex
PUT /_api/gharial/{graph-name}/vertex/{collection-name}/{vertex-key}
"""
@spec vertex_replace(String.t, String.t, String.t, map()) :: Request.t
def vertex_replace(graph_name, collection_name, vertex_key, vertex_body) do
%Request{
endpoint: :graph,
http_method: :put,
path: "gharial/#{graph_name}/vertex/#{collection_name}/#{vertex_key}",
body: vertex_body
}
end
end
|
lib/arango/graph.ex
| 0.613121
| 0.423279
|
graph.ex
|
starcoder
|
defmodule Tempo.Iso8601.Parser.Numbers do
@moduledoc """
Numbers aren't just numbers in ISO8601 when considering
the extension formats. In some situations they may:
* Have exponents
* Have precision
* Have unknown digits
And how these are formed varies by whether the
number is being parsed for an implicit form,
extended form or explicit form.
## Implicit form
* Numbers are always positive with no sign
except when its the year in which case it may have
a negative sign
* Numbers are always a positive integer, or a the
"unknown" symbol `X` in any digit location. Numbers
are either 2, 3 or 4 digits wide (decades are three
digits) and this implementation does not currently
support more than 4 digits for years.
* Neither exponent or significant digits are supported
** Extended form
* Same as the Implicit Form
## Explicit form
* Numbers may be positive of negative
* The "unknown" symbol `X` may appear in
any digit location.
* The symbol `X*` means the entire
field is unspecified.
* Exponent and significant digits are supported,
but only if the number is an integer (ie does
not have unknown digits)
"""
import NimbleParsec
import Tempo.Iso8601.Parser.Helpers
def positive_number(combinator \\ empty(), opts)
def positive_number(combinator, n) when is_integer(n) do
combinator
|> choice([
integer(n) |> optional(exponent()) |> optional(significant()),
digit_or_unknown() |> times(n)
])
|> reduce(:form_number)
|> label("positive number")
end
def positive_number(combinator, opts) do
combinator
|> choice([
integer(opts)
|> lookahead_not(unknown())
|> optional(exponent())
|> optional(significant()),
digit_or_unknown() |> times(opts)
])
|> reduce(:form_number)
|> label("positive number")
end
def maybe_negative_number(combinator \\ empty(), opts) do
combinator
|> optional(negative())
|> positive_number(opts)
|> reduce(:form_number)
|> label("maybe negative number")
end
def exponent do
ignore(string("E"))
|> integer(min: 1)
|> unwrap_and_tag(:exponent)
end
def significant do
ignore(string("S"))
|> integer(min: 1)
|> unwrap_and_tag(:significant)
end
def form_number([integer]) when is_integer(integer) do
integer
end
def form_number([?-, integer | rest]) when is_integer(integer) do
form_number([-integer | rest])
end
def form_number([integer, {:exponent, exponent} | rest]) do
form_number([(integer * :math.pow(10, exponent)) |> trunc | rest])
end
def form_number([integer, {:significant, significant}]) do
{integer, significant}
end
def form_number([tuple]) when is_tuple(tuple) do
tuple
end
def form_number([list]) when is_list(list) do
list
end
def form_number(other) do
other
end
def fraction do
ignore(decimal_separator())
|> times(ascii_char([?0..?9]), min: 1)
|> reduce({List, :to_integer, []})
|> unwrap_and_tag(:fraction)
|> label("fraction")
end
end
|
lib/parser/numbers.ex
| 0.822474
| 0.594581
|
numbers.ex
|
starcoder
|
defmodule Mix.Tasks.Sobelow do
use Mix.Task
@moduledoc """
Sobelow is a static analysis tool for discovering
vulnerabilities in Phoenix applications.
This tool should be run in the root of the project directory
with the following command:
mix sobelow
## Command line options
* `--root -r` - Specify application root directory
* `--verbose -v` - Print vulnerable code snippets
* `--ignore -i` - Ignore modules
* `--ignore-files` - Ignore files
* `--details -d` - Get module details
* `--all-details` - Get all module details
* `--private` - Skip update checks
* `--mark-skip-all` - Mark all printed findings as skippable
* `--clear-skip` - Clear configuration added by `--mark-skip-all`
* `--skip` - Skip functions flagged with `#sobelow_skip` or tagged with `--mark-skip-all`
* `--router` - Specify router location
* `--exit` - Return non-zero exit status
* `--threshold` - Only return findings at or above a given confidence level
* `--format` - Specify findings output format
* `--quiet` - Return no output if there are no findings
* `--compact` - Minimal, single-line findings
* `--save-config` - Generates a configuration file based on command line options
* `--config` - Run Sobelow with configuration file
## Ignoring modules
If specific modules, or classes of modules are not relevant
to the scan, it is possible to ignore them with a
comma-separated list.
mix sobelow -i XSS.Raw,Traversal
## Supported modules
* XSS
* XSS.Raw
* XSS.SendResp
* XSS.ContentType
* XSS.HTML
* SQL
* SQL.Query
* SQL.Stream
* Config
* Config.CSRF
* Config.Headers
* Config.CSP
* Config.HTTPS
* Config.HSTS
* Config.Secrets
* Config.CSWH
* Vuln
* Vuln.CookieRCE
* Vuln.HeaderInject
* Vuln.PlugNull
* Vuln.Redirect
* Vuln.Coherence
* Vuln.Ecto
* Traversal
* Traversal.SendFile
* Traversal.FileModule
* Traversal.SendDownload
* Misc
* Misc.BinToTerm
* Misc.FilePath
* RCE.EEx
* RCE.CodeModule
* CI
* CI.System
* CI.OS
* DOS
* DOS.StringToAtom
* DOS.ListToAtom
* DOS.BinToAtom
"""
@switches [
verbose: :boolean,
root: :string,
ignore: :string,
ignore_files: :string,
details: :string,
all_details: :boolean,
private: :boolean,
diff: :string,
skip: :boolean,
mark_skip_all: :boolean,
clear_skip: :boolean,
router: :string,
exit: :string,
format: :string,
config: :boolean,
save_config: :boolean,
quiet: :boolean,
compact: :boolean,
flycheck: :boolean,
out: :string,
threshold: :string
]
@aliases [v: :verbose, r: :root, i: :ignore, d: :details, f: :format]
# For escript entry
def main(argv) do
run(argv)
end
def run(argv) do
{opts, _, _} = OptionParser.parse(argv, aliases: @aliases, switches: @switches)
root = Keyword.get(opts, :root, ".")
config = Keyword.get(opts, :config, false)
conf_file = root <> "/.sobelow-conf"
conf_file? = config && File.exists?(conf_file)
opts =
if is_nil(Keyword.get(opts, :exit)) && Enum.member?(argv, "--exit") do
[{:exit, "low"} | opts]
else
opts
end
opts =
if conf_file? do
{:ok, opts} = File.read!(conf_file) |> Code.string_to_quoted()
opts
else
opts
end
{verbose, diff, details, private, skip, mark_skip_all, clear_skip, router, exit_on, format,
ignored, ignored_files, all_details, out, threshold} = get_opts(opts, root, conf_file?)
set_env(:verbose, verbose)
if with_code = Keyword.get(opts, :with_code) do
Mix.Shell.IO.info("WARNING: --with-code is deprecated, please use --verbose instead.\n")
set_env(:verbose, with_code)
end
set_env(:root, root)
set_env(:details, details)
set_env(:private, private)
set_env(:skip, skip)
set_env(:mark_skip_all, mark_skip_all)
set_env(:clear_skip, clear_skip)
set_env(:router, router)
set_env(:exit_on, exit_on)
set_env(:format, format)
set_env(:ignored, ignored)
set_env(:ignored_files, ignored_files)
set_env(:out, out)
set_env(:threshold, threshold)
save_config = Keyword.get(opts, :save_config)
cond do
diff ->
run_diff(argv)
!is_nil(save_config) ->
Sobelow.save_config(conf_file)
!is_nil(all_details) ->
Sobelow.all_details()
!is_nil(details) ->
Sobelow.details()
true ->
Sobelow.run()
end
end
# This diff check is strictly used for testing/debugging and
# isn't meant for general use.
def run_diff(argv) do
diff_idx = Enum.find_index(argv, fn i -> i === "--diff" end)
{_, list} = List.pop_at(argv, diff_idx)
{diff_target, list} = List.pop_at(list, diff_idx)
args = Enum.join(list, " ") |> to_charlist()
diff_target = to_charlist(diff_target)
:os.cmd('mix sobelow ' ++ args ++ ' > sobelow.tempdiff')
IO.puts(:os.cmd('diff sobelow.tempdiff ' ++ diff_target))
end
def set_env(key, value) do
Application.put_env(:sobelow, key, value)
end
defp get_opts(opts, root, conf_file?) do
verbose = Keyword.get(opts, :verbose, false)
details = Keyword.get(opts, :details, nil)
all_details = Keyword.get(opts, :all_details)
private = Keyword.get(opts, :private, false)
diff = Keyword.get(opts, :diff, false)
skip = Keyword.get(opts, :skip, false)
mark_skip_all = Keyword.get(opts, :mark_skip_all, false)
clear_skip = Keyword.get(opts, :clear_skip, false)
router = Keyword.get(opts, :router)
out = Keyword.get(opts, :out)
exit_on =
case String.downcase(Keyword.get(opts, :exit, "None")) do
"high" -> :high
"medium" -> :medium
"low" -> :low
_ -> false
end
format =
cond do
Keyword.get(opts, :quiet) -> "quiet"
Keyword.get(opts, :compact) -> "compact"
Keyword.get(opts, :flycheck) -> "flycheck"
true -> Keyword.get(opts, :format, "txt") |> String.downcase()
end
format = out_format(out, format)
{ignored, ignored_files} =
if conf_file? do
{Keyword.get(opts, :ignore, []),
Keyword.get(opts, :ignore_files, []) |> Enum.map(&Path.expand(&1, root))}
else
ignored =
Keyword.get(opts, :ignore, "")
|> String.split(",")
ignored_files =
Keyword.get(opts, :ignore_files, "")
|> String.split(",")
|> Enum.reject(fn file -> file == "" end)
|> Enum.map(&Path.expand(&1, root))
{ignored, ignored_files}
end
threshold =
case String.downcase(Keyword.get(opts, :threshold, "low")) do
"high" -> :high
"medium" -> :medium
_ -> :low
end
{verbose, diff, details, private, skip, mark_skip_all, clear_skip, router, exit_on, format,
ignored, ignored_files, all_details, out, threshold}
end
# Future updates will include format hinting based on the outfile name. Additional output
# formats will also be added.
defp out_format(nil, format), do: format
defp out_format("", format), do: format
defp out_format(_out, format) do
cond do
format in ["json", "quiet", "sarif"] -> format
true -> "json"
end
end
end
|
lib/mix/tasks/sobelow.ex
| 0.648466
| 0.521959
|
sobelow.ex
|
starcoder
|
defmodule Pow.Store.Backend.MnesiaCache do
@moduledoc """
GenServer based key value Mnesia cache store with auto expiration.
When the MnesiaCache starts, it'll initialize invalidators for all stored
keys using the `expire` value. If the `expire` datetime is past, it'll
send call the invalidator immediately.
Mnesia will create a `Mnesia.Node` directory in the current working directory
to write files to. This can be changed by setting the `-mnesia dir` config:
config :mnesia, dir: '/path/to/dir'
The directory path should be accessible, otherwise MnesiaCache will crash on
startup.
## Distribution
The MnesiaCache is built to handle multi-node setup.
If you initialize with `extra_db_nodes: Node.list()`, it'll automatically
connect to the cluster. If there is no other nodes available, the data
persisted to disk will be loaded, but if a cluster is running, the data in
the existing cluster nodes will be loaded instead of the local data. This
could potentially cause data loss, but is an accepted risk as all data stored
by Pow should be ephemeral.
When a cache key expires, the expiration will be verified before deletion to
ensure that it hasn't been updated by another node. When a key is updated on
a node, the node will ping all other nodes to refresh their invalidators so
the new TTL is used.
All nodes spun up will by default persist to disk. If you start up multiple
nodes from the same physical directory you have to make sure that each node
has a unique directory path configured. This can be done using different
config files, or by using a system environment variable:
config :mnesia, dir: to_charlist(System.get_env("MNESIA_DIR"))
You can use `Pow.Store.Backend.MnesiaCache.Unsplit` to automatically recover
from network split issues. All partitioned nodes will have their table
flushed and reloaded from the oldest node in the cluster.
## Usage
To start the GenServer, add it to your application `start/2` method:
defmodule MyApp.Application do
use Application
def start(_type, _args) do
children = [
MyApp.Repo,
MyAppWeb.Endpoint,
Pow.Store.Backend.MnesiaCache
# # Or in a distributed system:
# {Pow.Store.Backend.MnesiaCache, extra_db_nodes: Node.list()},
# Pow.Store.Backend.MnesiaCache.Unsplit # Recover from netsplit
]
opts = [strategy: :one_for_one, name: MyAppWeb.Supervisor]
Supervisor.start_link(children, opts)
end
# ...
end
## Initialization options
* `:extra_db_nodes` - list of nodes in cluster to connect to.
* `:table_opts` - options to add to table definition. This value defaults
to `[disc_copies: [node()]]`.
* `:timeout` - timeout value in milliseconds for how long to wait until the
cache table has initiated. Defaults to 15 seconds.
## Configuration options
* `:ttl` - integer value in milliseconds for ttl of records (required).
* `:namespace` - string value to use for namespacing keys, defaults to
"cache".
"""
use GenServer
alias Pow.{Config, Store.Backend.Base}
require Logger
@behaviour Base
@mnesia_cache_tab __MODULE__
@spec start_link(Config.t()) :: GenServer.on_start()
def start_link(config) do
# TODO: Remove by 1.1.0
case Config.get(config, :nodes) do
nil -> :ok
_nodes -> IO.warn("use of `:nodes` config value for #{inspect unquote(__MODULE__)} is no longer used")
end
GenServer.start_link(__MODULE__, config, name: __MODULE__)
end
@impl Base
def put(config, record_or_records) do
ttl = ttl!(config)
GenServer.cast(__MODULE__, {:cache, config, record_or_records, ttl})
end
@impl Base
def delete(config, key) do
GenServer.cast(__MODULE__, {:delete, config, key})
end
@impl Base
def get(config, key) do
table_get(key, config)
end
@impl Base
def all(config, match) do
table_all(match, config)
end
# Callbacks
@impl GenServer
@spec init(Config.t()) :: {:ok, map()}
def init(config) do
init_mnesia(config)
{:ok, %{invalidators: init_invalidators(config)}}
end
@impl GenServer
@spec handle_cast({:cache, Config.t(), Base.record() | [Base.record()], integer()}, map()) :: {:noreply, map()}
def handle_cast({:cache, config, record_or_records, ttl}, %{invalidators: invalidators} = state) do
invalidators =
record_or_records
|> table_insert(ttl, config)
|> Enum.reduce(invalidators, fn {key, _}, invalidators ->
append_invalidator(key, invalidators, ttl, config)
end)
refresh_invalidators_in_cluster(config)
{:noreply, %{state | invalidators: invalidators}}
end
@spec handle_cast({:delete, Config.t(), Base.key() | [Base.key()]}, map()) :: {:noreply, map()}
def handle_cast({:delete, config, key}, %{invalidators: invalidators} = state) do
invalidators =
key
|> table_delete(config)
|> clear_invalidator(invalidators)
{:noreply, %{state | invalidators: invalidators}}
end
@spec handle_cast({:refresh_invalidators, Config.t()}, map()) :: {:noreply, map()}
def handle_cast({:refresh_invalidators, config}, %{invalidators: invalidators} = state) do
{:noreply, %{state | invalidators: init_invalidators(config, invalidators)}}
end
@impl GenServer
@spec handle_info({:invalidate, Config.t(), [Base.key()]}, map()) :: {:noreply, map()}
def handle_info({:invalidate, config, key}, %{invalidators: invalidators} = state) do
invalidators = delete_or_reschedule(key, invalidators, config)
{:noreply, %{state | invalidators: invalidators}}
end
defp delete_or_reschedule(key, invalidators, config) do
config
|> fetch(key)
|> case do
nil ->
invalidators
{_value, expire} ->
case Enum.max([expire - timestamp(), 0]) do
0 ->
key
|> table_delete(config)
|> clear_invalidator(invalidators)
ttl ->
append_invalidator(key, invalidators, ttl, config)
end
end
end
defp append_invalidator(key, invalidators, ttl, config) do
invalidators = clear_invalidator(key, invalidators)
invalidator = trigger_ttl(key, ttl, config)
Map.put(invalidators, key, invalidator)
end
defp trigger_ttl(key, ttl, config) do
Process.send_after(self(), {:invalidate, config, key}, ttl)
end
defp refresh_invalidators_in_cluster(config) do
:running_db_nodes
|> :mnesia.system_info()
|> Enum.reject(& &1 == node())
|> Enum.each(&:rpc.call(&1, GenServer, :cast, [__MODULE__, {:refresh_invalidators, config}]))
end
defp clear_invalidator(key, invalidators) do
case Map.get(invalidators, key) do
nil -> nil
invalidator -> Process.cancel_timer(invalidator)
end
Map.delete(invalidators, key)
end
defp table_get(key, config) do
config
|> fetch(key)
|> case do
{value, _expire} -> value
nil -> :not_found
end
end
defp fetch(config, key) do
mnesia_key = mnesia_key(config, key)
{@mnesia_cache_tab, mnesia_key}
|> :mnesia.dirty_read()
|> case do
[{@mnesia_cache_tab, ^mnesia_key, value} | _rest] -> value
[] -> nil
end
end
defp table_all(key_match, config) do
mnesia_key_match = mnesia_key(config, key_match)
@mnesia_cache_tab
|> :mnesia.dirty_select([{{@mnesia_cache_tab, mnesia_key_match, :_}, [], [:"$_"]}])
|> Enum.map(fn {@mnesia_cache_tab, key, {value, _expire}} -> {unwrap(key), value} end)
end
defp unwrap([_namespace, key]), do: key
defp unwrap([_namespace | key]), do: key
defp table_insert(record_or_records, ttl, config) do
expire = timestamp() + ttl
records = List.wrap(record_or_records)
{:atomic, _result} =
:mnesia.sync_transaction(fn ->
Enum.map(records, fn {key, value} ->
mnesia_key = mnesia_key(config, key)
value = {value, expire}
:mnesia.write({@mnesia_cache_tab, mnesia_key, value})
end)
end)
records
end
defp table_delete(key, config) do
{:atomic, key} =
:mnesia.sync_transaction(fn ->
mnesia_key = mnesia_key(config, key)
:mnesia.delete({@mnesia_cache_tab, mnesia_key})
key
end)
key
end
defp init_mnesia(config) do
config
|> find_active_cluster_nodes()
|> case do
[] -> init_cluster(config)
nodes -> join_cluster(config, nodes)
end
end
defp find_active_cluster_nodes(config) do
visible_nodes = Node.list()
db_nodes = Config.get(config, :extra_db_nodes, [])
db_nodes
|> Enum.filter(& &1 in visible_nodes)
|> Enum.filter(&:rpc.block_call(&1, :mnesia, :system_info, [:is_running]) == :yes)
end
defp init_cluster(config) do
with :ok <- start_mnesia(),
:ok <- change_table_copy_type(config),
:ok <- create_table(config),
:ok <- wait_for_table(config) do
Logger.info("[#{inspect __MODULE__}] Mnesia cluster initiated on #{inspect node()}")
else
{:error, reason} ->
Logger.error("[inspect __MODULE__}] Couldn't initialize mnesia cluster because: #{inspect reason}")
{:error, reason}
end
end
defp join_cluster(config, cluster_nodes) do
with :ok <- set_mnesia_master_nodes(cluster_nodes),
:ok <- start_mnesia(),
:ok <- connect_to_cluster(cluster_nodes),
:ok <- change_table_copy_type(config),
:ok <- sync_table(config, cluster_nodes),
:ok <- wait_for_table(config) do
Logger.info("[#{inspect __MODULE__}] Joined mnesia cluster nodes #{inspect cluster_nodes} for #{inspect node()}")
:ok
else
{:error, reason} ->
Logger.error("[#{inspect __MODULE__}] Couldn't join mnesia cluster because: #{inspect reason}")
{:error, reason}
end
end
defp start_mnesia do
case Application.start(:mnesia) do
{:error, {:already_started, :mnesia}} -> :ok
:ok -> :ok
end
end
defp set_mnesia_master_nodes(cluster_nodes) do
case :mnesia.system_info(:running_db_nodes) do
[] ->
:ok
_nodes ->
Application.stop(:mnesia)
:mnesia.set_master_nodes(@mnesia_cache_tab, cluster_nodes)
end
end
defp change_table_copy_type(config) do
copy_type = get_copy_type(config, node())
case :mnesia.change_table_copy_type(:schema, node(), copy_type) do
{:atomic, :ok} -> :ok
{:aborted, {:already_exists, :schema, _, _}} -> :ok
end
end
defp get_copy_type(config, node) do
types = [:ram_copies, :disc_copies, :disc_only_copies]
table_opts = Config.get(config, :table_opts, [])
Enum.find(types, :disc_copies, fn type ->
nodes = table_opts[type] || []
node in nodes
end)
end
defp create_table(config) do
table_opts = Config.get(config, :table_opts, [disc_copies: [node()]])
table_def = Keyword.merge(table_opts, [type: :ordered_set])
case :mnesia.create_table(@mnesia_cache_tab, table_def) do
{:atomic, :ok} -> :ok
{:aborted, {:already_exists, @mnesia_cache_tab}} -> :ok
end
end
defp sync_table(_config, [cluster_node | _rest]) do
copy_type = :rpc.block_call(cluster_node, :mnesia, :table_info, [@mnesia_cache_tab, :storage_type])
case :mnesia.add_table_copy(@mnesia_cache_tab, node(), copy_type) do
{:atomic, :ok} -> :ok
{:aborted, {:already_exists, _, _}} -> :ok
any -> {:error, any}
end
end
defp wait_for_table(config) do
timeout = Config.get(config, :timeout, :timer.seconds(15))
:mnesia.wait_for_tables([@mnesia_cache_tab], timeout)
end
defp connect_to_cluster([cluster_node | _cluster_nodes]) do
case :mnesia.change_config(:extra_db_nodes, [cluster_node]) do
{:ok, _} -> :ok
{:error, reason} -> {:error, reason}
end
end
defp mnesia_key(config, key) do
[namespace(config) | List.wrap(key)]
end
defp namespace(config), do: Config.get(config, :namespace, "cache")
defp init_invalidators(config, existing_invalidators \\ %{}) do
clear_all_invalidators(existing_invalidators)
{:atomic, invalidators} =
:mnesia.sync_transaction(fn ->
:mnesia.foldl(fn
{@mnesia_cache_tab, key, {_value, expire}}, invalidators when is_list(key) ->
ttl = Enum.max([expire - timestamp(), 0])
key
|> unwrap()
|> append_invalidator(invalidators, ttl, config)
# TODO: Remove by 1.1.0
{@mnesia_cache_tab, key, {_key, _value, _config, expire}}, invalidators when is_binary(key) and is_number(expire) ->
Logger.warn("[#{inspect __MODULE__}] Deleting old record #{inspect key}")
:mnesia.delete({@mnesia_cache_tab, key})
invalidators
{@mnesia_cache_tab, key, _value}, invalidators ->
Logger.warn("[#{inspect __MODULE__}] Found unexpected record #{inspect key}, please delete it")
invalidators
end,
%{},
@mnesia_cache_tab)
end)
invalidators
end
defp clear_all_invalidators(invalidators) do
invalidators
|> Map.keys()
|> Enum.reduce(invalidators, fn key, invalidators ->
clear_invalidator(key, invalidators)
end)
end
defp timestamp, do: :os.system_time(:millisecond)
defp ttl!(config) do
Config.get(config, :ttl) || raise_ttl_error()
end
@spec raise_ttl_error :: no_return
defp raise_ttl_error,
do: Config.raise_error("`:ttl` configuration option is required for #{inspect(__MODULE__)}")
# TODO: Remove by 1.1.0
@deprecated "Use `put/2` instead"
@doc false
def put(config, key, value), do: put(config, {key, value})
# TODO: Remove by 1.1.0
@deprecated "Use `all/2` instead"
@doc false
def keys(config), do: all(config, :_)
end
|
lib/pow/store/backend/mnesia_cache.ex
| 0.799325
| 0.497376
|
mnesia_cache.ex
|
starcoder
|
defmodule Oban.Job do
@moduledoc """
A Job is an Ecto schema used for asynchronous execution.
Job changesets are created by your application code and inserted into the database for
asynchronous execution. Jobs can be inserted along with other application data as part of a
transaction, which guarantees that jobs will only be triggered from a successful transaction.
"""
@moduledoc since: "0.1.0"
use Ecto.Schema
import Ecto.Changeset
@type args :: map()
@type errors :: [%{at: DateTime.t(), attempt: pos_integer(), error: binary()}]
@type tags :: [binary()]
@type unique_field :: [:args | :queue | :worker]
@type unique_period :: pos_integer() | :infinity
@type unique_state :: [
:available
| :scheduled
| :executing
| :retryable
| :completed
| :discarded
]
@type unique_option ::
{:fields, [unique_field()]}
| {:period, unique_period()}
| {:states, [unique_state()]}
@type option ::
{:args, args()}
| {:max_attempts, pos_integer()}
| {:priority, pos_integer()}
| {:queue, atom() | binary()}
| {:schedule_in, pos_integer()}
| {:scheduled_at, DateTime.t()}
| {:tags, tags()}
| {:unique, [unique_option()]}
| {:worker, atom() | binary()}
@type t :: %__MODULE__{
id: pos_integer(),
state: binary(),
queue: binary(),
worker: binary(),
args: args(),
errors: errors(),
tags: tags(),
attempt: non_neg_integer(),
attempted_by: [binary()],
max_attempts: pos_integer(),
priority: pos_integer(),
inserted_at: DateTime.t(),
scheduled_at: DateTime.t(),
attempted_at: DateTime.t(),
completed_at: DateTime.t(),
discarded_at: DateTime.t(),
unique: %{fields: [unique_field()], period: unique_period(), states: [unique_state()]},
unsaved_error: %{kind: atom(), reason: term(), stacktrace: Exception.stacktrace()}
}
schema "oban_jobs" do
field :state, :string, default: "available"
field :queue, :string, default: "default"
field :worker, :string
field :args, :map
field :errors, {:array, :map}, default: []
field :tags, {:array, :string}, default: []
field :attempt, :integer, default: 0
field :attempted_by, {:array, :string}
field :max_attempts, :integer, default: 20
field :priority, :integer, default: 0
field :attempted_at, :utc_datetime_usec
field :completed_at, :utc_datetime_usec
field :discarded_at, :utc_datetime_usec
field :inserted_at, :utc_datetime_usec
field :scheduled_at, :utc_datetime_usec
field :unique, :map, virtual: true
field :unsaved_error, :map, virtual: true
end
@permitted_params ~w(
args
attempt
attempted_by
attempted_at
completed_at
discarded_at
errors
inserted_at
max_attempts
priority
queue
scheduled_at
state
tags
worker
)a
@required_params ~w(worker args)a
@doc """
Construct a new job changeset ready for insertion into the database.
## Options
* `:max_attempts` — the maximum number of times a job can be retried if there are errors
during execution
* `:priority` — a numerical indicator from 0 to 3 of how important this job is relative to
other jobs in the same queue. The lower the number, the higher priority the job.
* `:queue` — a named queue to push the job into. Jobs may be pushed into any queue, regardless
of whether jobs are currently being processed for the queue.
* `:schedule_in` - the number of seconds until the job should be executed
* `:scheduled_at` - a time in the future after which the job should be executed
* `:tags` — a list of tags to group and organize related jobs, i.e. to identify scheduled jobs
* `:unique` — a keyword list of options specifying how uniqueness will be calculated. The
options define which fields will be used, for how long, and for which states.
* `:worker` — a module to execute the job in. The module must implement the `Oban.Worker`
behaviour.
## Examples
Insert a job with the `:default` queue:
%{id: 1, user_id: 2}
|> Oban.Job.new(queue: :default, worker: MyApp.Worker)
|> Oban.insert()
Generate a pre-configured job for `MyApp.Worker` and push it:
%{id: 1, user_id: 2} |> MyApp.Worker.new() |> Oban.insert()
Schedule a job to run in 5 seconds:
%{id: 1} |> MyApp.Worker.new(schedule_in: 5) |> Oban.insert()
Insert a job, ensuring that it is unique within the past minute:
%{id: 1} |> MyApp.Worker.new(unique: [period: 60]) |> Oban.insert()
Insert a unique job based only on the worker field, and within multiple states:
fields = [:worker]
states = [:available, :scheduled, :executing, :retryable, :completed]
%{id: 1}
|> MyApp.Worker.new(unique: [fields: fields, period: 60, states: states])
|> Oban.insert()
"""
@doc since: "0.1.0"
@spec new(args(), [option]) :: Ecto.Changeset.t()
def new(args, opts \\ []) when is_map(args) and is_list(opts) do
params =
opts
|> Keyword.put(:args, args)
|> Map.new()
|> coerce_field(:queue)
|> coerce_field(:worker)
|> normalize_tags()
%__MODULE__{}
|> cast(params, @permitted_params)
|> validate_required(@required_params)
|> put_scheduling(params[:schedule_in])
|> put_uniqueness(params[:unique])
|> put_state()
|> validate_length(:queue, min: 1, max: 128)
|> validate_length(:worker, min: 1, max: 128)
|> validate_number(:max_attempts, greater_than: 0)
|> validate_number(:priority, greater_than: -1, less_than: 4)
end
@unique_fields ~w(args queue worker)a
@unique_period 60
@unique_states ~w(scheduled available executing retryable completed discarded)a
@doc """
A canonical list of all possible job states.
This may be used to build up `:unique` options without duplicating states in application code.
## Examples
iex> Oban.Job.states() -- [:completed, :discarded]
[:scheduled, :available, :executing, :retryable]
"""
def states, do: @unique_states
@doc """
Convert a Job changeset into a map suitable for database insertion.
## Examples
Convert a worker generated changeset into a plain map:
%{id: 123}
|> MyApp.Worker.new()
|> Oban.Job.to_map()
"""
@doc since: "0.9.0"
@spec to_map(Ecto.Changeset.t(t())) :: map()
def to_map(%Ecto.Changeset{} = changeset) do
changeset
|> apply_changes()
|> Map.from_struct()
|> Map.take(@permitted_params)
|> Enum.reject(fn {_, val} -> is_nil(val) end)
|> Map.new()
end
defp coerce_field(params, field) do
case Map.get(params, field) do
value when is_atom(value) and not is_nil(value) ->
update_in(params, [field], &to_clean_string/1)
value when is_binary(value) ->
update_in(params, [field], &to_clean_string/1)
_ ->
params
end
end
@doc false
@spec valid_unique_opt?({:fields | :period | :states, [atom()] | integer()}) :: boolean()
def valid_unique_opt?({:fields, [_ | _] = fields}), do: fields -- @unique_fields == []
def valid_unique_opt?({:period, :infinity}), do: true
def valid_unique_opt?({:period, period}), do: is_integer(period) and period > 0
def valid_unique_opt?({:states, [_ | _] = states}), do: states -- states() == []
def valid_unique_opt?(_option), do: false
defp put_scheduling(changeset, value) do
case value do
in_seconds when is_integer(in_seconds) ->
scheduled_at = DateTime.add(DateTime.utc_now(), in_seconds)
put_change(changeset, :scheduled_at, scheduled_at)
nil ->
changeset
_ ->
add_error(changeset, :schedule_in, "invalid value")
end
end
defp put_state(changeset) do
case fetch_change(changeset, :scheduled_at) do
{:ok, _} -> put_change(changeset, :state, "scheduled")
:error -> changeset
end
end
defp put_uniqueness(changeset, value) do
case value do
[_ | _] = opts ->
unique =
opts
|> Keyword.put_new(:fields, @unique_fields)
|> Keyword.put_new(:period, @unique_period)
|> Keyword.put_new(:states, @unique_states)
|> Map.new()
case validate_unique_opts(unique) do
:ok ->
put_change(changeset, :unique, unique)
{:error, field, value} ->
add_error(changeset, :unique, "invalid unique option for #{field}, #{inspect(value)}")
end
nil ->
changeset
_ ->
add_error(changeset, :unique, "invalid unique options")
end
end
defp normalize_tags(%{tags: [_ | _] = tags} = params) do
normalize = fn string ->
string
|> to_string()
|> String.trim()
|> String.downcase()
end
tags =
tags
|> Enum.map(normalize)
|> Enum.reject(&(&1 == ""))
|> Enum.uniq()
%{params | tags: tags}
end
defp normalize_tags(params), do: params
defp validate_unique_opts(unique) do
Enum.reduce_while(unique, :ok, fn {key, val}, _acc ->
if valid_unique_opt?({key, val}) do
{:cont, :ok}
else
{:halt, {:error, key, val}}
end
end)
end
defp to_clean_string(value) do
value
|> to_string()
|> String.trim_leading("Elixir.")
end
end
|
lib/oban/job.ex
| 0.904734
| 0.527012
|
job.ex
|
starcoder
|
defmodule ExAudit.Repo do
@moduledoc """
Adds ExAudit version tracking to your Ecto.Repo actions. The following functions are
extended to detect if the given struct or changeset is in the list of :tracked_schemas
given in :ex_audit config:
insert: 2,
update: 2,
insert_or_update: 2,
delete: 2,
insert!: 2,
update!: 2,
insert_or_update!: 2,
delete!: 2
If the given struct or changeset is not tracked then the original function from Ecto.Repo is
executed, i.e., the functions are marked as overridable and the overrided implementations
call `Kernel.super/1` when the given struct or changeset is not tracked.
## How to use it.
Just `use ExAudit.Repo` after `Ecto.Repo`
```elixir
defmodule MyApp.Repo do
use Ecto.Repo,
otp_app: :my_app,
adapter: Ecto.Adapters.Postgres
use ExAudit.Repo
end
```
## Shared options
All normal Ecto.Repo options will work the same, however, there are additional options specific to ex_audit:
* `:ex_audit_custom` - Keyword list of custom data that should be placed in new version entries. Entries in this
list overwrite data with the same keys from the ExAudit.track call
* `:ignore_audit` - If true, ex_audit will not track changes made to entities
"""
defmacro __using__(_opts) do
quote location: :keep do
@behaviour ExAudit.Repo
# These are the Ecto.Repo functions that ExAudit "extends" but these are not
# marked as overridable in Ecto.Repo. (ecto v3.4.2)
defoverridable(
insert: 2,
update: 2,
insert_or_update: 2,
delete: 2,
insert!: 2,
update!: 2,
insert_or_update!: 2,
delete!: 2
)
@doc """
Decides based on config `tracked_schema` wether the current schema is tracked or not.
Can be overwritten for custom tracking logic.
E.g.
```
def tracked?(struct_or_schema) do
tracked? =
case Process.get(__MODULE__) do
%{tracked?: true} -> true
_ -> false
end
tracked? && super(struct_or_schema)
end
```
"""
def tracked?(struct_or_changeset) do
tracked_schemas = Application.get_env(:ex_audit, :tracked_schemas, [])
schema =
case struct_or_changeset do
%Ecto.Changeset{} = changeset ->
Map.get(changeset.data, :__struct__)
_ ->
Map.get(struct_or_changeset, :__struct__)
end
schema in tracked_schemas
end
@compile {:inline, tracked?: 1}
defoverridable(tracked?: 1)
def insert(struct, opts) do
if tracked?(struct) do
ExAudit.Schema.insert(
__MODULE__,
get_dynamic_repo(),
struct,
opts
)
else
super(struct, opts)
end
end
def update(struct, opts) do
if tracked?(struct) do
ExAudit.Schema.update(
__MODULE__,
get_dynamic_repo(),
struct,
opts
)
else
super(struct, opts)
end
end
def insert_or_update(changeset, opts) do
if tracked?(changeset) do
ExAudit.Schema.insert_or_update(
__MODULE__,
get_dynamic_repo(),
changeset,
opts
)
else
super(changeset, opts)
end
end
def delete(struct, opts) do
if tracked?(struct) do
ExAudit.Schema.delete(
__MODULE__,
get_dynamic_repo(),
struct,
opts
)
else
super(struct, opts)
end
end
def insert!(struct, opts) do
if tracked?(struct) do
ExAudit.Schema.insert!(
__MODULE__,
get_dynamic_repo(),
struct,
opts
)
else
super(struct, opts)
end
end
def update!(struct, opts) do
if tracked?(struct) do
ExAudit.Schema.update!(
__MODULE__,
get_dynamic_repo(),
struct,
opts
)
else
super(struct, opts)
end
end
def insert_or_update!(changeset, opts) do
if tracked?(changeset) do
ExAudit.Schema.insert_or_update!(
__MODULE__,
get_dynamic_repo(),
changeset,
opts
)
else
super(changeset, opts)
end
end
def delete!(struct, opts) do
if tracked?(struct) do
ExAudit.Schema.delete!(
__MODULE__,
get_dynamic_repo(),
struct,
opts
)
else
super(struct, opts)
end
end
def default_options(_operation), do: []
defoverridable(default_options: 1)
defoverridable(child_spec: 1)
# additional functions
def history(struct, opts \\ []) do
ExAudit.Queryable.history(__MODULE__, struct, opts)
end
def revert(version, opts \\ []) do
ExAudit.Queryable.revert(__MODULE__, version, opts)
end
end
end
@doc """
Gathers the version history for the given struct, ordered by the time the changes
happened from newest to oldest.
### Options
* `:render_structs` if true, renders the _resulting_ struct of the patch for every version in its history.
This will shift the ids of the versions one down, so visualisations are correct and corresponding "Revert"
buttons revert the struct back to the visualized state.
Will append an additional version that contains the oldest ID and the oldest struct known. In most cases, the
`original` will be `nil` which means if this version would be reverted, the struct would be deleted.
`false` by default.
"""
@callback history(struct, opts :: list) :: [version :: struct]
@doc """
Undoes the changes made in the given version, as well as all of the following versions.
Inserts a new version entry in the process, with the `:rollback` flag set to true
### Options
* `:preload` if your changeset depends on assocs being preloaded on the struct before
updating it, you can define a list of assocs to be preloaded with this option
"""
@callback revert(version :: struct, opts :: list) ::
{:ok, struct} | {:error, changeset :: Ecto.Changeset.t()}
@callback default_options(operation :: atom) :: keyword
end
|
lib/repo/repo.ex
| 0.870101
| 0.748191
|
repo.ex
|
starcoder
|
defmodule AWS.AuditManager do
@moduledoc """
Welcome to the Audit Manager API reference.
This guide is for developers who need detailed information about the Audit
Manager API operations, data types, and errors.
Audit Manager is a service that provides automated evidence collection so that
you can continuously audit your Amazon Web Services usage, and assess the
effectiveness of your controls to better manage risk and simplify compliance.
Audit Manager provides pre-built frameworks that structure and automate
assessments for a given compliance standard. Frameworks include a pre-built
collection of controls with descriptions and testing procedures, which are
grouped according to the requirements of the specified compliance standard or
regulation. You can also customize frameworks and controls to support internal
audits with unique requirements.
Use the following links to get started with the Audit Manager API:
*
[Actions](https://docs.aws.amazon.com/audit-manager/latest/APIReference/API_Operations.html): An alphabetical list of all Audit Manager API operations.
* [Data
types](https://docs.aws.amazon.com/audit-manager/latest/APIReference/API_Types.html):
An alphabetical list of all Audit Manager data types.
* [Common parameters](https://docs.aws.amazon.com/audit-manager/latest/APIReference/CommonParameters.html):
Parameters that all Query operations can use.
* [Common errors](https://docs.aws.amazon.com/audit-manager/latest/APIReference/CommonErrors.html):
Client and server errors that all operations can return.
If you're new to Audit Manager, we recommend that you review the [ Audit Manager User
Guide](https://docs.aws.amazon.com/audit-manager/latest/userguide/what-is.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-07-25",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "auditmanager",
global?: false,
protocol: "rest-json",
service_id: "AuditManager",
signature_version: "v4",
signing_name: "auditmanager",
target_prefix: nil
}
end
@doc """
Associates an evidence folder to the specified assessment report in Audit
Manager.
"""
def associate_assessment_report_evidence_folder(
%Client{} = client,
assessment_id,
input,
options \\ []
) do
url_path = "/assessments/#{AWS.Util.encode_uri(assessment_id)}/associateToAssessmentReport"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Associates a list of evidence to an assessment report in an Audit Manager
assessment.
"""
def batch_associate_assessment_report_evidence(
%Client{} = client,
assessment_id,
input,
options \\ []
) do
url_path =
"/assessments/#{AWS.Util.encode_uri(assessment_id)}/batchAssociateToAssessmentReport"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Create a batch of delegations for a specified assessment in Audit Manager.
"""
def batch_create_delegation_by_assessment(
%Client{} = client,
assessment_id,
input,
options \\ []
) do
url_path = "/assessments/#{AWS.Util.encode_uri(assessment_id)}/delegations"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the delegations in the specified Audit Manager assessment.
"""
def batch_delete_delegation_by_assessment(
%Client{} = client,
assessment_id,
input,
options \\ []
) do
url_path = "/assessments/#{AWS.Util.encode_uri(assessment_id)}/delegations"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Disassociates a list of evidence from the specified assessment report in Audit
Manager.
"""
def batch_disassociate_assessment_report_evidence(
%Client{} = client,
assessment_id,
input,
options \\ []
) do
url_path =
"/assessments/#{AWS.Util.encode_uri(assessment_id)}/batchDisassociateFromAssessmentReport"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Uploads one or more pieces of evidence to the specified control in the
assessment in Audit Manager.
"""
def batch_import_evidence_to_assessment_control(
%Client{} = client,
assessment_id,
control_id,
control_set_id,
input,
options \\ []
) do
url_path =
"/assessments/#{AWS.Util.encode_uri(assessment_id)}/controlSets/#{AWS.Util.encode_uri(control_set_id)}/controls/#{AWS.Util.encode_uri(control_id)}/evidence"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates an assessment in Audit Manager.
"""
def create_assessment(%Client{} = client, input, options \\ []) do
url_path = "/assessments"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a custom framework in Audit Manager.
"""
def create_assessment_framework(%Client{} = client, input, options \\ []) do
url_path = "/assessmentFrameworks"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates an assessment report for the specified assessment.
"""
def create_assessment_report(%Client{} = client, assessment_id, input, options \\ []) do
url_path = "/assessments/#{AWS.Util.encode_uri(assessment_id)}/reports"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new custom control in Audit Manager.
"""
def create_control(%Client{} = client, input, options \\ []) do
url_path = "/controls"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes an assessment in Audit Manager.
"""
def delete_assessment(%Client{} = client, assessment_id, input, options \\ []) do
url_path = "/assessments/#{AWS.Util.encode_uri(assessment_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a custom framework in Audit Manager.
"""
def delete_assessment_framework(%Client{} = client, framework_id, input, options \\ []) do
url_path = "/assessmentFrameworks/#{AWS.Util.encode_uri(framework_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes an assessment report from an assessment in Audit Manager.
"""
def delete_assessment_report(
%Client{} = client,
assessment_id,
assessment_report_id,
input,
options \\ []
) do
url_path =
"/assessments/#{AWS.Util.encode_uri(assessment_id)}/reports/#{AWS.Util.encode_uri(assessment_report_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a custom control in Audit Manager.
"""
def delete_control(%Client{} = client, control_id, input, options \\ []) do
url_path = "/controls/#{AWS.Util.encode_uri(control_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deregisters an account in Audit Manager.
"""
def deregister_account(%Client{} = client, input, options \\ []) do
url_path = "/account/deregisterAccount"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes the specified member account as a delegated administrator for Audit
Manager.
When you remove a delegated administrator from your Audit Manager settings, or
when you deregister a delegated administrator from Organizations, you continue
to have access to the evidence that you previously collected under that account.
However, Audit Manager will stop collecting and attaching evidence to that
delegated administrator account moving forward.
"""
def deregister_organization_admin_account(%Client{} = client, input, options \\ []) do
url_path = "/account/deregisterOrganizationAdminAccount"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Disassociates an evidence folder from the specified assessment report in Audit
Manager.
"""
def disassociate_assessment_report_evidence_folder(
%Client{} = client,
assessment_id,
input,
options \\ []
) do
url_path =
"/assessments/#{AWS.Util.encode_uri(assessment_id)}/disassociateFromAssessmentReport"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns the registration status of an account in Audit Manager.
"""
def get_account_status(%Client{} = client, options \\ []) do
url_path = "/account/status"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns an assessment from Audit Manager.
"""
def get_assessment(%Client{} = client, assessment_id, options \\ []) do
url_path = "/assessments/#{AWS.Util.encode_uri(assessment_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a framework from Audit Manager.
"""
def get_assessment_framework(%Client{} = client, framework_id, options \\ []) do
url_path = "/assessmentFrameworks/#{AWS.Util.encode_uri(framework_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns the URL of a specified assessment report in Audit Manager.
"""
def get_assessment_report_url(
%Client{} = client,
assessment_id,
assessment_report_id,
options \\ []
) do
url_path =
"/assessments/#{AWS.Util.encode_uri(assessment_id)}/reports/#{AWS.Util.encode_uri(assessment_report_id)}/url"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a list of changelogs from Audit Manager.
"""
def get_change_logs(
%Client{} = client,
assessment_id,
control_id \\ nil,
control_set_id \\ nil,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/assessments/#{AWS.Util.encode_uri(assessment_id)}/changelogs"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(control_set_id) do
[{"controlSetId", control_set_id} | query_params]
else
query_params
end
query_params =
if !is_nil(control_id) do
[{"controlId", control_id} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a control from Audit Manager.
"""
def get_control(%Client{} = client, control_id, options \\ []) do
url_path = "/controls/#{AWS.Util.encode_uri(control_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a list of delegations from an audit owner to a delegate.
"""
def get_delegations(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/delegations"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns evidence from Audit Manager.
"""
def get_evidence(
%Client{} = client,
assessment_id,
control_set_id,
evidence_folder_id,
evidence_id,
options \\ []
) do
url_path =
"/assessments/#{AWS.Util.encode_uri(assessment_id)}/controlSets/#{AWS.Util.encode_uri(control_set_id)}/evidenceFolders/#{AWS.Util.encode_uri(evidence_folder_id)}/evidence/#{AWS.Util.encode_uri(evidence_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns all evidence from a specified evidence folder in Audit Manager.
"""
def get_evidence_by_evidence_folder(
%Client{} = client,
assessment_id,
control_set_id,
evidence_folder_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path =
"/assessments/#{AWS.Util.encode_uri(assessment_id)}/controlSets/#{AWS.Util.encode_uri(control_set_id)}/evidenceFolders/#{AWS.Util.encode_uri(evidence_folder_id)}/evidence"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns an evidence folder from the specified assessment in Audit Manager.
"""
def get_evidence_folder(
%Client{} = client,
assessment_id,
control_set_id,
evidence_folder_id,
options \\ []
) do
url_path =
"/assessments/#{AWS.Util.encode_uri(assessment_id)}/controlSets/#{AWS.Util.encode_uri(control_set_id)}/evidenceFolders/#{AWS.Util.encode_uri(evidence_folder_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns the evidence folders from a specified assessment in Audit Manager.
"""
def get_evidence_folders_by_assessment(
%Client{} = client,
assessment_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/assessments/#{AWS.Util.encode_uri(assessment_id)}/evidenceFolders"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a list of evidence folders associated with a specified control of an
assessment in Audit Manager.
"""
def get_evidence_folders_by_assessment_control(
%Client{} = client,
assessment_id,
control_id,
control_set_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path =
"/assessments/#{AWS.Util.encode_uri(assessment_id)}/evidenceFolders-by-assessment-control/#{AWS.Util.encode_uri(control_set_id)}/#{AWS.Util.encode_uri(control_id)}"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns the name of the delegated Amazon Web Services administrator account for
the organization.
"""
def get_organization_admin_account(%Client{} = client, options \\ []) do
url_path = "/account/organizationAdminAccount"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a list of the in-scope Amazon Web Services services for the specified
assessment.
"""
def get_services_in_scope(%Client{} = client, options \\ []) do
url_path = "/services"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns the settings for the specified account.
"""
def get_settings(%Client{} = client, attribute, options \\ []) do
url_path = "/settings/#{AWS.Util.encode_uri(attribute)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a list of the frameworks available in the Audit Manager framework
library.
"""
def list_assessment_frameworks(
%Client{} = client,
framework_type,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/assessmentFrameworks"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(framework_type) do
[{"frameworkType", framework_type} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a list of assessment reports created in Audit Manager.
"""
def list_assessment_reports(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/assessmentReports"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a list of current and past assessments from Audit Manager.
"""
def list_assessments(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/assessments"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a list of controls from Audit Manager.
"""
def list_controls(
%Client{} = client,
control_type,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/controls"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(control_type) do
[{"controlType", control_type} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a list of keywords that pre-mapped to the specified control data source.
"""
def list_keywords_for_data_source(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
source,
options \\ []
) do
url_path = "/dataSourceKeywords"
headers = []
query_params = []
query_params =
if !is_nil(source) do
[{"source", source} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a list of all Audit Manager notifications.
"""
def list_notifications(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/notifications"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a list of tags for the specified resource in Audit Manager.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Enables Audit Manager for the specified account.
"""
def register_account(%Client{} = client, input, options \\ []) do
url_path = "/account/registerAccount"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Enables an account within the organization as the delegated administrator for
Audit Manager.
"""
def register_organization_admin_account(%Client{} = client, input, options \\ []) do
url_path = "/account/registerOrganizationAdminAccount"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Tags the specified resource in Audit Manager.
"""
def tag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes a tag from a resource in Audit Manager.
"""
def untag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
{query_params, input} =
[
{"tagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Edits an Audit Manager assessment.
"""
def update_assessment(%Client{} = client, assessment_id, input, options \\ []) do
url_path = "/assessments/#{AWS.Util.encode_uri(assessment_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a control within an assessment in Audit Manager.
"""
def update_assessment_control(
%Client{} = client,
assessment_id,
control_id,
control_set_id,
input,
options \\ []
) do
url_path =
"/assessments/#{AWS.Util.encode_uri(assessment_id)}/controlSets/#{AWS.Util.encode_uri(control_set_id)}/controls/#{AWS.Util.encode_uri(control_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the status of a control set in an Audit Manager assessment.
"""
def update_assessment_control_set_status(
%Client{} = client,
assessment_id,
control_set_id,
input,
options \\ []
) do
url_path =
"/assessments/#{AWS.Util.encode_uri(assessment_id)}/controlSets/#{AWS.Util.encode_uri(control_set_id)}/status"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a custom framework in Audit Manager.
"""
def update_assessment_framework(%Client{} = client, framework_id, input, options \\ []) do
url_path = "/assessmentFrameworks/#{AWS.Util.encode_uri(framework_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the status of an assessment in Audit Manager.
"""
def update_assessment_status(%Client{} = client, assessment_id, input, options \\ []) do
url_path = "/assessments/#{AWS.Util.encode_uri(assessment_id)}/status"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a custom control in Audit Manager.
"""
def update_control(%Client{} = client, control_id, input, options \\ []) do
url_path = "/controls/#{AWS.Util.encode_uri(control_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates Audit Manager settings for the current user account.
"""
def update_settings(%Client{} = client, input, options \\ []) do
url_path = "/settings"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Validates the integrity of an assessment report in Audit Manager.
"""
def validate_assessment_report_integrity(%Client{} = client, input, options \\ []) do
url_path = "/assessmentReports/integrity"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
|
lib/aws/generated/audit_manager.ex
| 0.778776
| 0.487673
|
audit_manager.ex
|
starcoder
|
defmodule Ccxtex do
alias Ccxtex.OHLCVS.Opts
alias Ccxtex.{Ticker, Utils, OHLCV, Market}
@type result_tuple :: {:ok, any} | {:error, String.t()}
@moduledoc """
Ccxtex main module
"""
@doc """
Usage example:
`exchanges = exchanges()`
Return value example:
```
[
...
%{
has: %{
cancel_order: true,
cancel_orders: false,
cors: false,
create_deposit_address: true,
create_limit_order: true,
create_market_order: false,
create_order: true,
deposit: false,
edit_order: true,
fetch_balance: true,
fetch_closed_orders: "emulated",
fetch_currencies: true,
fetch_deposit_address: true,
fetch_funding_fees: false,
fetch_l2_order_book: true,
fetch_markets: true,
fetch_my_trades: true,
fetch_ohlcv: true,
fetch_open_orders: true,
fetch_order: "emulated",
fetch_order_book: true,
fetch_order_books: false,
fetch_orders: "emulated",
fetch_ticker: true,
fetch_tickers: true,
fetch_trades: true,
fetch_trading_fees: true,
private_api: true,
public_api: true,
withdraw: true
},
id: "poloniex",
timeout: 10000
}
]
```
"""
@spec exchanges() :: [String.t()]
def exchanges() do
with {:ok, exchanges} <- call_js_main(:exchanges, []) do
{:ok, exchanges}
else
err_tup -> err_tup
end
end
@doc """
Fetches a list of ohlcv data, takes OHLCVS.Opts argument
```
opts =
Ccxtex.OHLCVS.Opts.make!(%{
exchange: "poloniex",
base: "ETH",
quote: "USDT",
timeframe: "1h",
since: ~N[2018-01-01T00:00:00],
limit: 100
})
ohlcvs = fetch_ohlcvs(opts)
```
Return value example:
```
%Ccxtex.OHLCV{
base: "ETH",
base_volume: 4234.62695691,
close: 731.16,
exchange: "bitfinex2",
high: 737.07,
low: 726,
open: 736.77,
quote: "USDT",
timestamp: ~N[2018-01-01 00:00:00.000]
}
```
"""
@spec fetch_ohlcvs(OHLCVS.Opts.t()) :: result_tuple
def fetch_ohlcvs(%Ccxtex.OHLCVS.Opts{} = opts) do
since_unix =
if opts.since do
opts.since
|> DateTime.from_naive!("Etc/UTC")
|> DateTime.to_unix(:millisecond)
end
opts =
opts
|> Map.from_struct()
|> Map.put(:since, since_unix)
with {:ok, ohlcvs} <- call_js_main(:fetchOhlcvs, [opts]) do
ohlcvs =
ohlcvs
|> Utils.parse_ohlcvs()
|> Enum.map(&OHLCV.make!/1)
{:ok, ohlcvs}
else
err_tup -> err_tup
end
end
@doc """
Fetches ticker for a given exchange, base and quote symbols
```
exchange = "bitstamp"
base = "ETH"
quote = "USD"
ticker = fetch_ticker(exchange, base, quote)
```
Return value example:
```
%Ccxtex.Ticker{
ask: 577.35,
ask_volume: nil,
average: nil,
base_volume: 73309.52075575,
bid: 576.8,
bid_volume: nil,
change: nil,
close: 577.35,
datetime: "2018-05-24T14:06:09.000Z",
high: 619.95,
info: %{
ask: "577.35",
bid: "576.80",
high: "619.95",
last: "577.35",
low: "549.28",
open: "578.40",
timestamp: "1527170769",
volume: "73309.52075575",
vwap: "582.86"
},
last: 577.35,
low: 549.28,
open: 578.4,
percentage: nil,
previous_close: nil,
quote_volume: 42729187.26769644,
pair_symbol: "ETH/USD",
timestamp: 1527170769000,
vwap: 582.86
}
```
"""
@spec fetch_ticker(String.t(), String.t(), String.t()) :: result_tuple
def fetch_ticker(exchange, base, quote) do
opts = %{
exchange: exchange,
symbol: base <> "/" <> quote
}
with {:ok, ticker} <- call_js_main(:fetchTicker, [opts]) do
ticker =
ticker
|> MapKeys.to_snake_case()
|> Ticker.make!()
{:ok, ticker}
else
err_tup -> err_tup
end
end
@doc """
Fetches all or some tickers for a given exchange
Usage:
```
exchange = "poloniex"
ticker = fetch_tickers(exchange)
```
Return value example:
```
[
...
%Ccxtex.Ticker{
ask: 577.35,
ask_volume: nil,
average: nil,
base_volume: 73309.52075575,
bid: 576.8,
bid_volume: nil,
change: nil,
close: 577.35,
datetime: "2018-05-24T14:06:09.000Z",
high: 619.95,
info: %{
ask: "577.35",
bid: "576.80",
high: "619.95",
last: "577.35",
low: "549.28",
open: "578.40",
timestamp: "1527170769",
volume: "73309.52075575",
vwap: "582.86"
},
last: 577.35,
low: 549.28,
open: 578.4,
percentage: nil,
previous_close: nil,
quote_volume: 42729187.26769644,
pair_symbol: "ETH/USD",
timestamp: 1527170769000,
vwap: 582.86
}
...
]
```
"""
@spec fetch_tickers([String.t()], map) :: result_tuple
def fetch_tickers(symbols \\ nil, params \\ nil) do
with {:ok, tickers} <- call_js_main(:fetchTickers, [symbols, params]) do
tickers =
tickers
|> Enum.map(fn {k, v} -> {k, Map.put(v, "symbol", k)} end)
|> Enum.map(fn {k, v} -> {k, MapKeys.to_snake_case(v)} end)
|> Enum.map(fn {k, v} -> {k, Ticker.make!(v)} end)
|> Enum.into(Map.new())
{:ok, tickers}
else
err_tup -> process_error(err_tup)
end
end
@doc """
Fetches all or some trades for a given symbol on a given exchange
Usage:
```
exchange = "poloniex"
ticker = fetch_trades(exchange, symbol)
```
Return value example:
```
[
...
%Ccxtex.Trade{
{
info: { ... }, # the original decoded JSON as is
id: '12345-67890:09876/54321', # string trade id
timestamp: 1502962946216, # Unix timestamp in milliseconds
datetime: '2017-08-17 12:42:48.000', # ISO8601 datetime with milliseconds
symbol: 'ETH/BTC', # symbol
order: '12345-67890:09876/54321', # string order id or undefined/None/null
type: 'limit', # order type, 'market', 'limit' or undefined/None/null
side: 'buy', # direction of the trade, 'buy' or 'sell'
price: 0.06917684, # float price in quote currency
amount: 1.5, # amount of base currency
},
}
...
]
```
"""
@spec fetch_trades(Trades.Opts.t()) :: result_tuple
def fetch_trades(%Ccxtex.Trades.Opts{} = opts) do
since_unix =
if opts.since do
opts.since
|> DateTime.from_naive!("Etc/UTC")
|> DateTime.to_unix(:millisecond)
end
opts =
opts
|> Map.from_struct()
|> Map.put(:since, since_unix)
with {:ok, trades} <- call_js_main(:fetchTrades, [opts]) do
trades =
trades
|> Utils.parse_trades()
|> Enum.map(&Trade.make!/1)
{:ok, trades}
else
err_tup -> err_tup
end
end
@doc """
Fetches markets for a given exchange
Response example
```
[
...
%Ccxtex.Market{
active: true,
base: "ETH",
base_id: "eth",
id: "etheur",
info: %{
"base_decimals" => 8,
"counter_decimals" => 2,
"description" => "Ether / Euro",
"minimum_order" => "5.0 EUR",
"name" => "ETH/EUR",
"trading" => "Enabled",
"url_symbol" => "etheur"
},
limits: %{
"amount" => %{"min" => 1.0e-8},
"cost" => %{"min" => 5},
"price" => %{"min" => 0.01}
},
precision: %{"amount" => 8, "price" => 2},
quote: "EUR",
quote_id: "eur",
symbol: "ETH/EUR",
symbol_id: "eth_eur"
}
...
]
```
"""
@spec fetch_markets(String.t()) :: result_tuple
def fetch_markets(exchange) do
with {:ok, markets} <- call_js_main(:fetchMarkets, [exchange]) do
markets =
markets
|> Enum.map(&MapKeys.to_snake_case/1)
|> Enum.map(&Market.make!/1)
{:ok, markets}
else
err_tup -> err_tup
end
end
def call_js_main(jsfn, args) do
NodeJS.call({"exec.js", jsfn}, args)
end
@spec process_error({:error, String.t()}) :: {:error, String.t()}
defp process_error(errtup = {:error, reason}) do
cond do
String.contains?(reason, "fetchTickers not supported") ->
{:error, "fetchTickers not supported"}
true ->
errtup
end
end
end
|
lib/ccxtex.ex
| 0.804751
| 0.759337
|
ccxtex.ex
|
starcoder
|
defmodule Hui do
@moduledoc """
Hui 辉 ("shine" in Chinese) is an [Elixir](https://elixir-lang.org) client and library for
[Solr enterprise search platform](http://lucene.apache.org/solr/).
### Usage
- Searching Solr: `q/1`, `q/6`, `search/2`, `search/7`
- Updating: `update/3`, `delete/3`, `delete_by_query/3`, `commit/2`
- Other: `suggest/2`, `suggest/5`, `spellcheck/3`
- [README](https://hexdocs.pm/hui/readme.html#usage)
"""
import Hui.Guards
alias Hui.Request
@type url :: binary | atom | Hui.URL.t
@doc """
Issue a keyword list or structured query to the default Solr endpoint.
The query can either be a keyword list, a standard query struct (`Hui.Q`)
or a struct list. This function is a shortcut for `search/2` with `:default` as URL key.
### Example
```
Hui.q(%Hui.Q{q: "loch", fq: ["type:illustration", "format:image/jpeg"]})
Hui.q(q: "loch", rows: 5, facet: true, "facet.field": ["year", "subject"])
# supply a list of Hui structs for more complex query, e.g. faceting
Hui.q( [%Hui.Q{q: "author:I*", rows: 5}, %Hui.F{field: ["cat", "author_str"], mincount: 1}])
# DisMax
x = %Hui.D{q: "run", qf: "description^2.3 title", mm: "2<-25% 9<-3", pf: "title", ps: 1, qs: 3}
y = %Hui.Q{rows: 10, start: 10, fq: ["edited:true"]}
z = %Hui.F{field: ["cat", "author_str"], mincount: 1}
Hui.q([x, y, z])
```
"""
@spec q(Hui.Q.t | Request.query_struct_list | Keyword.t) :: {:ok, HTTPoison.Response.t} | {:error, Hui.Error.t}
def q(%Hui.Q{} = query), do: search(:default, [query])
def q(query) when is_list(query), do: search(:default, query)
@doc """
Issue a keyword list or structured query to the default Solr endpoint, raising an exception in case of failure.
See `q/1`.
"""
@spec q!(Hui.Q.t | Request.query_struct_list | Keyword.t) :: HTTPoison.Response.t
def q!(%Hui.Q{} = query), do: Request.search(:default, true, [query])
def q!(query) when is_list(query), do: Request.search(:default, true, query)
@doc """
Convenience function for issuing various typical queries to the default Solr endpoint.
### Example
```
Hui.q("scott")
# keywords
Hui.q("loch", 10, 20)
# .. with paging parameters
Hui.q("\\\"apache documentation\\\"~5", 1, 0, "stream_content_type_str:text/html", ["subject"])
# .. plus filter(s) and facet fields
```
"""
@spec q(binary, nil|integer, nil|integer, nil|binary|list(binary), nil|binary|list(binary), nil|binary)
:: {:ok, HTTPoison.Response.t} | {:error, Hui.Error.t}
def q(keywords, rows \\ nil, start \\ nil, filters \\ nil, facet_fields \\ nil, sort \\ nil)
def q(keywords, _, _, _, _, _) when is_nil_empty(keywords), do: {:error, %Hui.Error{reason: :einval}}
def q(keywords, rows, start, filters, facet_fields, sort) do
q = %Hui.Q{q: keywords, rows: rows, start: start, fq: filters, sort: sort}
f = %Hui.F{field: facet_fields}
Request.search(:default, false, [q,f])
end
@doc """
Convenience function for issuing various typical queries to the default Solr endpoint,
raise an exception in case of failure.
"""
@spec q!(binary, nil|integer, nil|integer, nil|binary|list(binary), nil|binary|list(binary), nil|binary)
:: HTTPoison.Response.t
def q!(keywords, rows \\ nil, start \\ nil, filters \\ nil, facet_fields \\ nil, sort \\ nil)
def q!(keywords, _, _, _, _, _) when is_nil_empty(keywords), do: raise %Hui.Error{reason: :einval}
def q!(keywords, rows, start, filters, facet_fields, sort) do
q = %Hui.Q{q: keywords, rows: rows, start: start, fq: filters, sort: sort}
f = %Hui.F{field: facet_fields}
Request.search(:default, true, [q,f])
end
@doc """
Issue a keyword list or structured query to a specified Solr endpoint.
### Example - parameters
```
# structured query with permitted or qualified Solr parameters
url = "http://localhost:8983/solr/collection"
Hui.search(url, %Hui.Q{q: "loch", rows: 5, wt: "xml", fq: ["type:illustration", "format:image/jpeg"]})
# a keyword list of arbitrary parameters
Hui.search(url, q: "edinburgh", rows: 10)
# supply a list of Hui structs for more complex query e.g. DisMax
x = %Hui.D{q: "run", qf: "description^2.3 title", mm: "2<-25% 9<-3", pf: "title", ps: 1, qs: 3}
y = %Hui.Q{rows: 10, start: 10, fq: ["edited:true"]}
z = %Hui.F{field: ["cat", "author_str"], mincount: 1}
Hui.search(url, [x, y, z])
# SolrCloud query
x = %Hui.Q{q: "john", collection: "library,commons", rows: 10, distrib: true, "shards.tolerant": true, "shards.info": true}
Hui.search(url, x)
# Add results highlighting (snippets) with `Hui.H`
x = %Hui.Q{q: "features:photo", rows: 5}
y = %Hui.H{fl: "features", usePhraseHighlighter: true, fragsize: 250, snippets: 3 }
Hui.search(url, [x, y])
```
### Example - URL endpoints
```
url = "http://localhost:8983/solr/collection"
Hui.search(url, q: "loch")
url = :library
Hui.search(url, q: "edinburgh", rows: 10)
url = %Hui.URL{url: "http://localhost:8983/solr/collection", handler: "suggest"}
Hui.search(url, suggest: true, "suggest.dictionary": "mySuggester", "suggest.q": "el")
```
See `Hui.URL.configured_url/1` and `Hui.URL.encode_query/1` for more details on Solr parameter keyword list.
`t:Hui.URL.t/0` struct also enables HTTP headers and HTTPoison options to be specified
in keyword lists. HTTPoison options provide further controls for a request, e.g. `timeout`, `recv_timeout`,
`max_redirect`, `params` etc.
```
# setting up a header and a 10s receiving connection timeout
url = %Hui.URL{url: "..", headers: [{"accept", "application/json"}], options: [recv_timeout: 10000]}
Hui.search(url, q: "solr rocks")
```
See `HTTPoison.request/5` for more details on HTTPoison options.
### Example - faceting
```
x = %Hui.Q{q: "author:I*", rows: 5}
y = %Hui.F{field: ["cat", "author_str"], mincount: 1}
Hui.search(:library, [x, y])
# more elaborated faceting query
x = %Hui.Q{q: "*", rows: 5}
range1 = %Hui.F.Range{range: "price", start: 0, end: 100, gap: 10, per_field: true}
range2 = %Hui.F.Range{range: "popularity", start: 0, end: 5, gap: 1, per_field: true}
y = %Hui.F{field: ["cat", "author_str"], mincount: 1, range: [range1, range2]}
Hui.search(:default, [x, y])
```
The above `Hui.search(:default, [x, y])` example issues a request that resulted in
the following Solr response header showing the corresponding generated and encoded parameters.
```json
"responseHeader" => %{
"QTime" => 106,
"params" => %{
"f.popularity.facet.range.end" => "5",
"f.popularity.facet.range.gap" => "1",
"f.popularity.facet.range.start" => "0",
"f.price.facet.range.end" => "100",
"f.price.facet.range.gap" => "10",
"f.price.facet.range.start" => "0",
"facet" => "true",
"facet.field" => ["cat", "author_str"],
"facet.mincount" => "1",
"facet.range" => ["price", "popularity"],
"q" => "*",
"rows" => "5"
},
"status" => 0,
"zkConnected" => true
}
```
"""
@spec search(url, Hui.Q.t | Request.query_struct_list | Keyword.t) :: {:ok, HTTPoison.Response.t} | {:error, Hui.Error.t}
def search(url, %Hui.Q{} = query), do: Request.search(url, [query])
def search(url, query) when is_list(query), do: Request.search(url, query)
@doc """
Issue a keyword list or structured query to a specified Solr endpoint, raise an exception in case of failure.
See `search/2`.
"""
@spec search!(url, Hui.Q.t | Request.query_struct_list | Keyword.t) :: HTTPoison.Response.t
def search!(url, %Hui.Q{} = query), do: Request.search(url, true, [query])
def search!(url, query) when is_list(query), do: Request.search(url, true, query)
@doc """
Convenience function for issuing various typical queries to a specified Solr endpoint.
See `q/6`.
"""
@spec search(url, binary, nil|integer, nil|integer, nil|binary|list(binary), nil|binary|list(binary), nil|binary)
:: {:ok, HTTPoison.Response.t} | {:error, Hui.Error.t}
def search(url, keywords, rows \\ nil, start \\ nil, filters \\ nil, facet_fields \\ nil, sort \\ nil)
def search(url, keywords, _, _, _, _, _) when is_nil_empty(keywords) or is_nil_empty(url), do: {:error, %Hui.Error{reason: :einval}}
def search(url, keywords, rows, start, filters, facet_fields, sort) do
q = %Hui.Q{q: keywords, rows: rows, start: start, fq: filters, sort: sort}
f = %Hui.F{field: facet_fields}
Request.search(url, false, [q,f])
end
@doc """
Convenience function for issuing various typical queries to a specified Solr endpoint,
raise an exception in case of failure.
See `q/6`.
"""
@spec search!(url, binary, nil|integer, nil|integer, nil|binary|list(binary), nil|binary|list(binary), nil|binary)
:: HTTPoison.Response.t
def search!(url, keywords, rows \\ nil, start \\ nil, filters \\ nil, facet_fields \\ nil, sort \\ nil)
def search!(url, keywords, _, _, _, _, _) when is_nil_empty(keywords) or is_nil_empty(url), do: raise %Hui.Error{reason: :einval}
def search!(url, keywords, rows, start, filters, facet_fields, sort) do
q = %Hui.Q{q: keywords, rows: rows, start: start, fq: filters, sort: sort}
f = %Hui.F{field: facet_fields}
Request.search(url, true, [q,f])
end
@doc """
Issue a spell checking query to a specified Solr endpoint.
### Example
```
spellcheck_query = %Hui.Sp{q: "delll ultra sharp", count: 10, "collateParam.q.op": "AND", dictionary: "default"}
Hui.spellcheck(:library, spellcheck_query)
```
"""
@spec spellcheck(url, Hui.Sp.t) :: {:ok, HTTPoison.Response.t} | {:error, Hui.Error.t}
def spellcheck(url, %Hui.Sp{} = spellcheck_query_struct), do: Request.search(url, [spellcheck_query_struct])
@doc """
Issue a spell checking query to a specified Solr endpoint, raise an exception in case of failure.
"""
@spec spellcheck!(url, Hui.Sp.t) :: HTTPoison.Response.t
def spellcheck!(url, %Hui.Sp{} = spellcheck_query_struct), do: Request.search(url, true, [spellcheck_query_struct])
@doc """
Issue a spell checking query to a specified Solr endpoint.
"""
@spec spellcheck(url, Hui.Sp.t, Hui.Q.t) :: {:ok, HTTPoison.Response.t} | {:error, Hui.Error.t}
def spellcheck(url, %Hui.Sp{} = spellcheck_query_struct, %Hui.Q{} = query_struct), do: Request.search(url, [query_struct, spellcheck_query_struct])
@doc """
Issue a spell checking query to a specified Solr endpoint, raise an exception in case of failure.
"""
@spec spellcheck!(url, Hui.Sp.t, Hui.Q.t) :: HTTPoison.Response.t
def spellcheck!(url, %Hui.Sp{} = spellcheck_query_struct, %Hui.Q{} = query_struct), do: Request.search(url, true, [query_struct, spellcheck_query_struct])
@doc """
Issue a structured suggester query to a specified Solr endpoint.
### Example
```
suggest_query = %Hui.S{q: "ha", count: 10, dictionary: "name_infix"}
Hui.suggest(:library, suggest_query)
```
"""
@spec suggest(url, Hui.S.t) :: {:ok, HTTPoison.Response.t} | {:error, Hui.Error.t}
def suggest(url, %Hui.S{} = suggest_query_struct), do: Request.search(url, [suggest_query_struct])
@doc """
Issue a structured suggester query to a specified Solr endpoint, raise an exception in case of failure.
"""
@spec suggest!(url, Hui.S.t) :: HTTPoison.Response.t
def suggest!(url, %Hui.S{} = suggest_query_struct), do: Request.search(url, true, [suggest_query_struct])
@doc """
Convenience function for issuing a suggester query to a specified Solr endpoint.
### Example
```
Hui.suggest(:autocomplete, "t")
Hui.suggest(:autocomplete, "bo", 5, ["name_infix", "ln_prefix", "fn_prefix"], "1939")
```
"""
@spec suggest(url, binary, nil|integer, nil|binary|list(binary), nil|binary)
:: {:ok, HTTPoison.Response.t} | {:error, Hui.Error.t}
def suggest(url, q, count \\ nil, dictionaries \\ nil, context \\ nil)
def suggest(url, q, _, _, _) when is_nil_empty(q) or is_nil_empty(url), do: {:error, %Hui.Error{reason: :einval}}
def suggest(url, q, count, dictionaries, context) do
suggest_query = %Hui.S{q: q, count: count, dictionary: dictionaries, cfq: context}
Request.search(url, false, [suggest_query])
end
@doc """
Convenience function for issuing a suggester query to a specified Solr endpoint,
raise an exception in case of failure.
"""
@spec suggest!(url, binary, nil|integer, nil|binary|list(binary), nil|binary)
:: HTTPoison.Response.t
def suggest!(url, q, count \\ nil, dictionaries \\ nil, context \\ nil)
def suggest!(url, q, _, _, _) when is_nil_empty(q) or is_nil_empty(url), do: raise %Hui.Error{reason: :einval}
def suggest!(url, q, count, dictionaries, context) do
suggest_query = %Hui.S{q: q, count: count, dictionary: dictionaries, cfq: context}
Request.search(url, true, [suggest_query])
end
@doc """
Issue a MoreLikeThis (mlt) query to a specified Solr endpoint.
### Example
```
query = %Hui.Q{q: "apache", rows: 10, wt: "xml"}
mlt = %Hui.M{fl: "manu,cat", mindf: 10, mintf: 200, "match.include": true, count: 10}
Hui.mlt(:library, query, mlt)
```
"""
@spec mlt(url, Hui.Q.t, Hui.M.t) :: {:ok, HTTPoison.Response.t} | {:error, Hui.Error.t}
def mlt(url, %Hui.Q{} = query_struct, %Hui.M{} = mlt_query_struct), do: Request.search(url, [query_struct, mlt_query_struct])
@doc """
Issue a MoreLikeThis (mlt) query to a specified Solr endpoint, raise an exception in case of failure.
"""
@spec mlt!(url, Hui.Q.t, Hui.M.t) :: HTTPoison.Response.t
def mlt!(url, %Hui.Q{} = query_struct, %Hui.M{} = mlt_query_struct), do: Request.search(url, true, [query_struct, mlt_query_struct])
@doc """
Updates or adds Solr documents to an index or collection.
This function accepts documents as map (single or a list) and commits the docs
to the index immediately by default - set `commit` to `false` for manual or
auto commits later. It can also operate in binary mode, accepting
text containing any valid Solr update data or commands.
An index/update handler endpoint should be specified through a `t:Hui.URL.t/0` struct
or a URL config key. A content type header is required so that Solr knows the
incoming data format (JSON, XML etc.) and can process data accordingly.
### Example
```
# Index handler for JSON-formatted update
headers = [{"Content-type", "application/json"}]
url = %Hui.URL{url: "http://localhost:8983/solr/collection", handler: "update", headers: headers}
# Solr docs in maps
doc1 = %{
"actors" => ["<NAME>", "<NAME>", "<NAME>", "<NAME>"],
"desc" => "A married daughter who longs for her mother's love is visited by the latter, a successful concert pianist.",
"directed_by" => ["<NAME>"],
"genre" => ["Drama", "Music"],
"id" => "tt0077711",
"initial_release_date" => "1978-10-08",
"name" => "<NAME>"
}
doc2 = %{
"actors" => ["<NAME>", "<NAME>", "<NAME>"],
"desc" => "A nurse is put in charge of a mute actress and finds that their personas are melding together.",
"directed_by" => ["<NAME>"],
"genre" => ["Drama", "Thriller"],
"id" => "tt0060827",
"initial_release_date" => "1967-09-21",
"name" => "Persona"
}
Hui.update(url, doc1) # add a single doc
Hui.update(url, [doc1, doc2]) # add a list of docs
# Don't commit the docs e.g. mass ingestion when index handler is setup for autocommit.
Hui.update(url, [doc1, doc2], false)
# Send to a configured endpoint
Hui.update(:updater, [doc1, doc2])
# Binary mode, add and commit a doc
Hui.update(url, "{\\\"add\\\":{\\\"doc\\\":{\\\"name\\\":\\\"Blade Runner\\\",\\\"id\\\":\\\"tt0083658\\\",..}},\\\"commit\\\":{}}")
# Binary mode, delete a doc via XML
headers = [{"Content-type", "application/xml"}]
url = %Hui.URL{url: "http://localhost:8983/solr/collection", handler: "update", headers: headers}
Hui.update(url, "<delete><id>9780141981727</id></delete>")
```
See `Hui.Request.update/3` for more advanced update options.
"""
@spec update(binary | Hui.URL.t, binary | map | list(map), boolean) :: {:ok, HTTPoison.Response.t} | {:error, Hui.Error.t}
def update(url, docs, commit \\ true)
def update(url, docs, _commit) when is_binary(docs), do: Request.update(url, docs)
def update(url, docs, commit) when is_map(docs) or is_list(docs), do: Request.update(url, %Hui.U{doc: docs, commit: commit})
@doc """
Updates or adds Solr documents to an index or collection, raise an exception in case of failure.
"""
@spec update!(binary | Hui.URL.t, binary | map | list(map), boolean) :: HTTPoison.Response.t
def update!(url, docs, commit \\ true)
def update!(url, docs, _commit) when is_binary(docs), do: Request.update(url, true, docs)
def update!(url, docs, commit) when is_map(docs) or is_list(docs), do: Request.update(url, true, %Hui.U{doc: docs, commit: commit})
@doc """
Deletes Solr documents.
This function accepts a single or list of IDs and immediately delete the corresponding
documents from the Solr index (commit by default).
An index/update handler endpoint should be specified through a `t:Hui.URL.t/0` struct
or a URL config key. A JSON content type header for the URL is required so that Solr knows the
incoming data format and can process data accordingly.
### Example
```
# Index handler for JSON-formatted update
headers = [{"Content-type", "application/json"}]
url = %Hui.URL{url: "http://localhost:8983/solr/collection", handler: "update", headers: headers}
Hui.delete(url, "tt2358891") # delete a single doc
Hui.delete(url, ["tt2358891", "tt1602620"]) # delete a list of docs
Hui.delete(url, ["tt2358891", "tt1602620"], false) # delete without immediate commit
```
"""
@spec delete(binary | Hui.URL.t, binary | list(binary), boolean) :: {:ok, HTTPoison.Response.t} | {:error, Hui.Error.t}
def delete(url, ids, commit \\ true)
def delete(url, ids, commit) when is_binary(ids) or is_list(ids), do: Request.update(url, %Hui.U{delete_id: ids, commit: commit})
@doc """
Deletes Solr documents, raise an exception in case of failure.
"""
@spec delete!(binary | Hui.URL.t, binary | list(binary), boolean) :: HTTPoison.Response.t
def delete!(url, ids, commit \\ true)
def delete!(url, ids, commit) when is_binary(ids) or is_list(ids), do: Request.update(url, true, %Hui.U{delete_id: ids, commit: commit})
@doc """
Deletes Solr documents by filter queries.
This function accepts a single or list of filter queries and immediately delete the corresponding
documents from the Solr index (commit by default).
An index/update handler endpoint should be specified through a `t:Hui.URL.t/0` struct
or a URL config key. A JSON content type header for the URL is required so that Solr knows the
incoming data format and can process data accordingly.
### Example
```
# Index handler for JSON-formatted update
headers = [{"Content-type", "application/json"}]
url = %Hui.URL{url: "http://localhost:8983/solr/collection", handler: "update", headers: headers}
Hui.delete_by_query(url, "name:Persona") # delete with a single filter
Hui.delete_by_query(url, ["genre:Drama", "name:Persona"]) # delete with a list of filters
```
"""
@spec delete_by_query(binary | Hui.URL.t, binary | list(binary), boolean) :: {:ok, HTTPoison.Response.t} | {:error, Hui.Error.t}
def delete_by_query(url, queries, commit \\ true)
def delete_by_query(url, queries, commit) when is_binary(queries) or is_list(queries), do: Request.update(url, %Hui.U{delete_query: queries, commit: commit})
@doc """
Deletes Solr documents by filter queries, raise an exception in case of failure.
"""
@spec delete_by_query!(binary | Hui.URL.t, binary | list(binary), boolean) :: HTTPoison.Response.t
def delete_by_query!(url, queries, commit \\ true)
def delete_by_query!(url, queries, commit) when is_binary(queries) or is_list(queries), do: Request.update(url, %Hui.U{delete_query: queries, commit: commit})
@doc """
Commit any added or deleted Solr documents to the index.
This provides a (separate) mechanism to commit previously added or deleted documents to
Solr index for different updating and index maintenance scenarios. By default, the commit
waits for a new Solr searcher to be regenerated, so that the commit result is made available
for search.
An index/update handler endpoint should be specified through a `t:Hui.URL.t/0` struct
or a URL config key. A JSON content type header for the URL is required so that Solr knows the
incoming data format and can process data accordingly.
### Example
```
# Index handler for JSON-formatted update
headers = [{"Content-type", "application/json"}]
url = %Hui.URL{url: "http://localhost:8983/solr/collection", handler: "update", headers: headers}
Hui.commit(url) # commits, make new docs available for search
Hui.commit(url, false) # commits op only, new docs to be made available later
```
Use `Hui.Request.update/3` for other types of commit and index optimisation, e.g. expunge deleted docs to
physically remove docs from the index, which could be a system-intensive operation.
"""
@spec commit(binary | Hui.URL.t, boolean) :: {:ok, HTTPoison.Response.t} | {:error, Hui.Error.t}
def commit(url, wait_searcher \\ true)
def commit(url, wait_searcher), do: Request.update(url, %Hui.U{commit: true, waitSearcher: wait_searcher})
@doc """
Commit any added or deleted Solr documents to the index, raise an exception in case of failure.
"""
@spec commit!(binary | Hui.URL.t, boolean) :: HTTPoison.Response.t
def commit!(url, wait_searcher \\ true)
def commit!(url, wait_searcher), do: Request.update(url, %Hui.U{commit: true, waitSearcher: wait_searcher})
end
|
lib/hui.ex
| 0.890711
| 0.78899
|
hui.ex
|
starcoder
|
defmodule EctoSchemaStore.Fetch do
@moduledoc false
defmacro build(schema, repo) do
quote do
defp __preload__(model, preload), do: preload_assocs model, preload
defp __to_map__(model, true), do: to_map(model)
defp __to_map__(model, false), do: model
defp __order_by__(query, nil), do: query
defp __order_by__(query, order_list) when is_atom order_list do
__order_by__ query, [order_list]
end
defp __order_by__(query, order_list) do
from m in query,
order_by: ^order_list
end
defp __limit_to_first__(results) when is_list results do
Enum.at results, 0
end
defp __limit_to_first__([]) do
nil
end
defp __limit_to_first__(results), do: results
@doc """
Fetch all records from `#{unquote(schema)}`.
"""
def all, do: all []
@doc """
Fetch all records from `#{unquote(schema)}` filtered by provided fields map.
Options:
* `preload` - Atom or array of atoms with the associations to preload.
* `to_map` - Should the record model be converted from its struct to a generic map. Default: `false`
* `order_by` - Order the results by a the provided keyword list.
"""
def all(filters, opts \\ [])
def all(%Ecto.Query{} = query, opts) do
preload = Keyword.get opts, :preload, []
destruct = Keyword.get opts, :destructure, false
to_map = Keyword.get opts, :to_map, false
to_map = destruct || to_map
order_list = Keyword.get opts, :order_by, nil
query
|> __order_by__(order_list)
|> unquote(repo).all
|> __preload__(preload)
|> __to_map__(to_map)
end
def all(filters, opts) do
preload = Keyword.get opts, :preload, []
destruct = Keyword.get opts, :destructure, false
to_map = Keyword.get opts, :to_map, false
to_map = destruct or to_map
order_list = Keyword.get opts, :order_by, nil
case build_query(filters) do
{:error, _} = error -> error
{:ok, query} ->
query
|> __order_by__(order_list)
|> unquote(repo).all
|> __preload__(preload)
|> __to_map__(to_map)
end
end
@doc """
Cound the number of records that met that query.
"""
def count_records(filters \\ [])
def count_records(%Ecto.Query{} = query) do
query =
from q in query,
select: count(q.id)
unquote(repo).one(query)
end
def count_records(filters) do
case build_query(filters) do
{:error, _} = error -> error
{:ok, query} -> count_records query
end
end
@doc """
Fetch a single record from `#{unquote(schema)}` filtered by provided record id or fields map. If multiple
records are returned. Will return the first record. This operation will not return an error if more than
one record is found.
Options:
* `preload` - Atom or array of atoms with the associations to preload.
* `to_map` - Should the record model be converted from its struct to a generic map. Default: `false`
* `order_by` - Order the results by a the provided keyword list.
"""
def one(filters, opts \\ [])
def one(nil, _opts), do: nil
def one(id, opts) when is_binary(id), do: one String.to_integer(id), opts
def one(id, opts) when is_integer(id) and id > 0, do: one %{id: id}, opts
def one(%Ecto.Query{} = query, opts) do
preload = Keyword.get opts, :preload, []
destruct = Keyword.get opts, :destructure, false
to_map = Keyword.get opts, :to_map, false
to_map = destruct or to_map
order_list = Keyword.get opts, :order_by, nil
query
|> __order_by__(order_list)
|> unquote(repo).all
|> __limit_to_first__
|> __preload__(preload)
|> __to_map__(to_map)
end
def one(filters, opts) do
preload = Keyword.get opts, :preload, []
destruct = Keyword.get opts, :destructure, false
to_map = Keyword.get opts, :to_map, false
to_map = destruct or to_map
order_list = Keyword.get opts, :order_by, nil
case build_query(filters) do
{:error, _} = error -> error
{:ok, query} ->
query
|> __order_by__(order_list)
|> unquote(repo).all
|> __limit_to_first__
|> __preload__(preload)
|> __to_map__(to_map)
end
end
@doc """
Reloads a single record for `#{unquote(schema)}` from the database.
"""
def refresh(record), do: one record.id
@doc """
Preloads child associations.
"""
def preload_assocs(record, :all), do: preload_assocs(record, schema_associations())
def preload_assocs(record, fields) when is_list fields do
unquote(repo).preload(record, fields)
end
def preload_assocs(record, field), do: preload_assocs(record, [field])
@doc """
Helper to order a preload by a provided Ecto repo order by value.
```elixir
store.preload_assocs(model, [field: order_preload_by(:name)])
```
The same as:
```elixir
import Ecto.Query, only: [from: 2]
store.preload_assocs(model, [field: (from(s in Schema, order_by: s.name))])
```
"""
def order_preload_by(order_params) do
order_by build_query!(), ^order_params
end
@doc """
Returns true if any records match the provided query filters.
"""
def exists?(filters), do: count_records(filters) > 0
@doc """
Convert the provided record to a generic map and Ecto date or time values to
Elixir 1.3 equivalents. Replaces `destructure`.
"""
def to_map(record) when is_list record do
Enum.map record, fn(entry) -> to_map entry end
end
def to_map(record), do: convert_model_to_map record
defp convert_model_to_map(model, convert_ecto \\ true)
defp convert_model_to_map(nil, _convert_ecto), do: nil
defp convert_model_to_map(%{} = model, convert_ecto) do
keys = List.delete Map.keys(model), :__meta__
keys = List.delete keys, :__struct__
key_values = for key <- keys do
convert_value key, Map.get(model, key), convert_ecto
end
Enum.into key_values, %{}
end
defp convert_model_to_map(value, _convert_ecto), do: value
defp convert_value(key, %Ecto.Association.NotLoaded{}, true), do: {key, :not_loaded}
defp convert_value(key, %DateTime{} = value, true), do: {key, value}
defp convert_value(key, %Date{} = value, true), do: {key, value}
defp convert_value(key, %Time{} = value, true), do: {key, value}
defp convert_value(key, %NaiveDateTime{} = value, true), do: {key, value}
defp convert_value(key, %{} = value, convert_ecto), do: {key, convert_model_to_map(value, convert_ecto)}
defp convert_value(key, [%{} = h | t], convert_ecto) do
first = convert_model_to_map(h, convert_ecto)
rest = for entry <- t do
convert_model_to_map(entry, convert_ecto)
end
{key, [first | rest]}
end
defp convert_value(key, value, _convert_ecto), do: {key, value}
end
end
end
|
lib/ecto_schema_store/fetch.ex
| 0.831006
| 0.501831
|
fetch.ex
|
starcoder
|
defmodule Xray do
@moduledoc """
Xray offers utility functions for inspecting string binaries, their
code points, and their base2 representations.
This package was the result of my own studying of Elixir strings and binaries.
It's unlikely you would actually use this as a dependency, but I offer it up
for public use in the hopes that it may be educational.
"""
@doc """
This function prints a report on the provided input string. This may not work
especially well when the input contains non-printable characters (YMMV).
For each character in the string, the following information is shown:
- code point as a decimal, e.g. `228`
- code point in its Elixir Unicode representation, e.g. `\\u00E4`
- a link to a page containing more information about this Unicode code point
- count of the number of bytes required to represent this code point using UTF-8 encoding
- an inspection of the UTF-8 binaries, e.g. `<<195, 164>>`
- a `Base2` representation (i.e. 1's and 0's) of the encoded code point
The `Base2` representation (what we would be tempted to call the "binary" representation)
highlights control bits in red to help show how [UTF-8](https://en.wikipedia.org/wiki/UTF-8)
identifies how many bytes are required to encode each character.
## Examples
```
iex> Xray.inspect("cät")
======================================================
Input String: cät
Character Count: 3
Byte Count: 4
Is valid? true
Is printable? true
======================================================
c Codepoint: 99 (\\u0063) https://codepoints.net/U+0063
Script(s): latin
Byte Count: 1
UTF-8: <<99>>
Base2: 01100011
ä Codepoint: 228 (\\u00E4) https://codepoints.net/U+00E4
Script(s): latin
Byte Count: 2
UTF-8: <<195, 164>>
Base2: 11000011 10100100
t Codepoint: 116 (\\u0074) https://codepoints.net/U+0074
Script(s): latin
Byte Count: 1
UTF-8: <<116>>
Base2: 01110100
```
"""
@spec inspect(value :: binary) :: String.t()
def inspect(value) when is_binary(value) do
value
|> heading()
|> String.codepoints()
|> Enum.map(&character_profile/1)
end
# This headlines the `inspect/1` output.
defp heading(value) do
IO.puts("======================================================")
IO.puts("Input String: #{value}")
IO.puts("Character Count: #{String.length(value)}")
IO.puts("Byte Count: #{byte_size(value)}")
IO.puts("Is valid? #{String.valid?(value)}")
IO.puts("Is printable? #{String.printable?(value)}")
IO.puts("======================================================")
value
end
# This is our formatting card for displaying info about a single character
defp character_profile(x) do
IO.puts("")
IO.puts(
character_heading(x) <>
" Codepoint: #{codepoint(x)} (\\u#{codepoint(x, as_hex: true)}) #{link(x)}"
)
IO.puts(indent("Is printable? #{String.printable?(x)}"))
IO.puts(indent("Script(s): #{Enum.join(Unicode.script(x), ",")}"))
IO.puts(indent("Byte Count: #{byte_size(x)}"))
IO.inspect(x, binaries: :as_binaries, label: " UTF-8")
IO.puts(indent("Base2: " <> to_base2(x)))
end
defp character_heading(x) do
case String.printable?(x) do
true -> IO.ANSI.bright() <> x <> IO.ANSI.reset()
_ -> IO.ANSI.bright() <> "�" <> IO.ANSI.reset()
end
end
@doc """
Reveals the integer codepoint for the given single character; when run with
the default options, this is equivalent to the question-mark operator, e.g.
`?x` but this function works with variables (whereas the question mark only
evaluates literal characters).
## Options:
### `:as_hex` (boolean) default: `false`
When true, returns the hexidecimal representation of the codepoint number. The
hexidecimal representation is useful when looking up documentation, e.g. on
[Wikipedia](https://en.wikipedia.org/wiki/List_of_Unicode_characters) or on
websites like [codepoints.net](https://codepoints.net/).
## Examples
iex> Xray.codepoint("ä")
228
iex> Xray.codepoint("ä", as_hex: true)
"00E4"
"""
@spec codepoint(binary, opts :: keyword) :: integer | String.t()
def codepoint(<<codepoint::utf8>>, opts \\ []) do
case Keyword.get(opts, :as_hex) do
true -> codepoint_as_hex(codepoint)
_ -> codepoint
end
end
@doc """
Given a string binary, this returns a list of the codepoints that represent
each of the characters in the string. This is what you might expect
`String.codepoints/1` to return, but instead of returning a list of the
component *characters* like `String.codepoints/1` does, this function
returns the *numbers* (which is what code points are).
Note that this function returns a string: if a list is returned, it will be
automatically displayed as a
## Options
- `:as_hex` (see `codepoint/2`)
## Examples
iex> Xray.codepoints("cät")
"99, 228, 116"
"""
@spec codepoints(string :: binary, opts :: keyword) :: list
def codepoints(string, opts \\ []) when is_binary(string) do
string
|> String.codepoints()
|> Enum.map(fn x -> codepoint(x, opts) end)
|> Enum.join(", ")
# We want to see the numbers!!!
# IO.inspect(x, charlists: :as_lists)
end
# Converts a character like ä to its hexidecimal representation like `00E4`
defp codepoint_as_hex(codepoint) do
list = Integer.to_charlist(codepoint, 16)
String.pad_leading(to_string(list), 4, ["0"])
end
# Simple indentation
defp indent(string) do
" " <> string
end
# get a link to some official documentation about this codepoint
# e.g. https://codepoints.net/U+00E4
defp link(<<codepoint::utf8>>) do
slug = "U+#{codepoint_as_hex(codepoint)}"
IO.ANSI.blue() <> "https://codepoints.net/#{slug}" <> IO.ANSI.reset()
end
defp to_base2(x) when is_binary(x) do
base2 = Base2.encode2(x, padding: :all)
parse_bytes(base2, "") <> IO.ANSI.reset()
end
# Some nice formatting of the base2 representation highlighting the control bits that
# flag multi-byte characters in UTF-8.
# Start of 4-byte encoding of codepoint
defp parse_bytes("11110" <> <<byte_rest::binary-size(3), other_bytes::binary>>, acc) do
parse_bytes(
other_bytes,
acc <> IO.ANSI.red() <> "11110" <> IO.ANSI.green() <> byte_rest <> " "
)
end
# Start of 3-byte encoding of codepoint
defp parse_bytes("1110" <> <<byte_rest::binary-size(4), other_bytes::binary>>, acc) do
parse_bytes(
other_bytes,
acc <> IO.ANSI.red() <> "1110" <> IO.ANSI.green() <> byte_rest <> " "
)
end
# Start of 2-byte encoding of codepoint
defp parse_bytes("110" <> <<byte_rest::binary-size(5), other_bytes::binary>>, acc) do
parse_bytes(other_bytes, acc <> IO.ANSI.red() <> "110" <> IO.ANSI.green() <> byte_rest <> " ")
end
# Start of secondary byte
defp parse_bytes("10" <> <<byte_rest::binary-size(6), other_bytes::binary>>, acc) do
parse_bytes(other_bytes, acc <> IO.ANSI.red() <> "10" <> IO.ANSI.green() <> byte_rest <> " ")
end
# Start of a single byte codepoint
defp parse_bytes("0" <> <<byte_rest::binary-size(7), other_bytes::binary>>, acc) do
parse_bytes(other_bytes, acc <> IO.ANSI.red() <> "0" <> IO.ANSI.green() <> byte_rest <> " ")
end
# All done!
defp parse_bytes("", acc), do: acc
end
|
lib/xray.ex
| 0.92944
| 0.894835
|
xray.ex
|
starcoder
|
defmodule Robotica.Plugins.Hs100 do
@moduledoc """
hs100 switch plugin
"""
use GenServer
use Robotica.Plugin
require Logger
defmodule Config do
@moduledoc false
@type t :: %__MODULE__{id: String.t()}
defstruct [:id]
end
defmodule State do
@moduledoc false
@type t :: %__MODULE__{
config: Config.t(),
location: String.t(),
device: String.t()
}
defstruct [:config, :location, :device]
end
## Server Callbacks
def init(plugin) do
state = %State{
config: plugin.config,
location: plugin.location,
device: plugin.device
}
case TpLinkHs100.Client.get_device(state.config.id) do
:error ->
publish_device_hard_off(state)
{:ok, device} ->
device_state = if device.sysinfo["relay_state"] == 0, do: "OFF", else: "ON"
publish_device_state(state, device_state)
end
TpLinkHs100.Client.add_handler(self())
{:ok, state}
end
def config_schema do
%{
struct_type: Config,
id: {:string, true}
}
end
@spec publish_device_state(State.t(), String.t()) :: :ok
defp publish_device_state(%State{} = state, device_state) do
publish_state_raw(state, "power", device_state)
end
@spec publish_device_error(State.t()) :: :ok
defp publish_device_error(%State{} = state) do
publish_state_raw(state, "power", "ERROR")
end
@spec publish_device_hard_off(State.t()) :: :ok
defp publish_device_hard_off(%State{} = state) do
publish_state_raw(state, "power", "HARD_OFF")
end
@spec handle_command(State.t(), map()) :: :ok
def handle_command(%State{} = state, command) do
publish_command(state.location, state.device, command)
{power, device_state} =
case command.action do
"turn_on" -> {true, "ON"}
"turn_off" -> {false, "OFF"}
_ -> {nil, nil}
end
if power != nil and device_state != nil do
case TpLinkHs100.Client.get_device(state.config.id) do
:error ->
:ok
{:ok, device} ->
case TpLinkHs100.Device.switch(device, power) do
:ok -> publish_device_state(state, device_state)
{:error, _} -> publish_device_error(state)
end
end
end
end
def handle_cast({:mqtt, _, :command, command}, %State{} = state) do
case Robotica.Config.validate_device_command(command) do
{:ok, command} ->
case check_type(command, "device") do
{command, true} -> handle_command(state, command)
{_, false} -> state
end
{:error, error} ->
Logger.error("Invalid hs100 command received: #{inspect(error)}.")
end
{:noreply, state}
end
def handle_cast({:added, %TpLinkHs100.Device{} = device}, %State{} = state) do
if device.id == state.config.id do
device_state = if device.sysinfo["relay_state"] == 0, do: "OFF", else: "ON"
:ok = publish_device_state(state, device_state)
end
{:noreply, state}
end
def handle_cast({:updated, %TpLinkHs100.Device{} = device}, %State{} = state) do
if device.id == state.config.id do
device_state = if device.sysinfo["relay_state"] == 0, do: "OFF", else: "ON"
:ok = publish_device_state(state, device_state)
end
{:noreply, state}
end
def handle_cast({:deleted, %TpLinkHs100.Device{} = device}, %State{} = state) do
if device.id == state.config.id do
:ok = publish_device_hard_off(state)
end
{:noreply, state}
end
end
|
robotica/lib/robotica/plugins/hs100.ex
| 0.61173
| 0.406774
|
hs100.ex
|
starcoder
|
defmodule Ace.HTTP.Worker do
@moduledoc """
Run a Raxx application in isolation to handle a single HTTP exchange.
- The application consists of a behaviour module and initial state.
- An HTTP exchange is a single response to a single request.
See `Raxx.Server` for details on implementing a valid module.
A worker must be started for each message sent.
Even if messages are sent on a single connection,
e.g. HTTP pipelining or HTTP/2 streams.
"""
@typep application :: {module, any}
use GenServer
@enforce_keys [
:app_module,
:app_state,
:channel,
:channel_monitor
]
defstruct @enforce_keys
@doc """
Start a new worker linked to the calling process.
"""
@spec start_link(application, Ace.HTTP.Channel.t()) :: GenServer.on_start()
def start_link({module, config}, channel) do
GenServer.start_link(__MODULE__, {module, config, channel}, [])
end
@doc false
def child_spec(channel) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [channel]},
type: :worker,
restart: :temporary,
shutdown: 500
}
end
## Server Callbacks
@impl GenServer
def init({module, config, channel}) do
channel_monitor = Ace.HTTP.Channel.monitor_endpoint(channel)
nil = Process.put(Ace.HTTP.Channel, channel)
{:ok,
%__MODULE__{
app_module: module,
app_state: config,
channel: channel,
channel_monitor: channel_monitor
}}
end
@impl GenServer
def handle_info({channel, part}, state = %{channel: channel}) do
Ace.HTTP.Channel.ack(channel)
Raxx.Server.handle({state.app_module, state.app_state}, part)
|> do_send(state)
end
def handle_info({:DOWN, ref, :process, _pid, reason}, state = %{channel_monitor: ref}) do
{:stop, reason, state}
end
def handle_info(other, state) do
Raxx.Server.handle({state.app_module, state.app_state}, other)
|> do_send(state)
end
defp do_send({parts, new_app_state}, state) do
new_state = %{state | app_state: new_app_state}
case Ace.HTTP.Channel.send(state.channel, parts) do
{:ok, _channel} ->
case List.last(parts) do
%{body: false} ->
{:stop, :normal, new_state}
%Raxx.Tail{} ->
{:stop, :normal, new_state}
%{body: body} when is_binary(body) ->
{:stop, :normal, new_state}
_ ->
{:noreply, new_state}
end
{:error, :connection_closed} ->
{:stop, :normal, new_state}
end
end
end
|
lib/ace/http/worker.ex
| 0.865835
| 0.416352
|
worker.ex
|
starcoder
|
defmodule AOC.Day4 do
@moduledoc """
Solution to Day 4 of the Advent of code 2021
https://adventofcode.com/2021/day/4
"""
@doc """
Read the input file
Returns the data as a tuple with inputs as first element and bingo boards as second
"""
@spec get_inputs(File) :: [String.t()]
def get_inputs(f \\ "lib/inputs/day4.txt") do
File.read!(f)
|> String.trim()
|> String.split("\n\n")
|> then(fn [hd | tail] ->
{hd,
tail
|> Enum.map(
&(String.split(&1, "\n")
|> Enum.map(fn s -> String.split(s) end))
)}
end)
end
@doc """
Transpose the board. Useful when we need to traverse column wise
"""
def transpose(board) do
Enum.reverse(Enum.zip_reduce(board, [], fn e, acc -> [e | acc] end))
end
def flatten([head | tail]), do: flatten(head) ++ flatten(tail)
def flatten([]), do: []
def flatten(head), do: [head]
@doc """
Convert the bingo boards from [[1,2,3] [4,5,6] [7,8,9]...] to
[[{1, false}, {2, false}, {3, false}] ...]
"""
def create_boards(inputs) do
inputs
|> Enum.map(fn board ->
for r <- board do
for n <- r do
{String.to_integer(n), false}
end
end
end)
end
@doc """
Given a board and a num, mark it as true if the number exists in the board
"""
def mark_num_on_board(board, num) do
for row <- board do
for {n, m?} <- row do
if n == num, do: {n, true}, else: {n, m?}
end
end
end
@doc """
Check if all numbers in a row or column are marked true
"""
def check_row_col_for_win(rc) do
Enum.all?(rc, fn {_n, m?} -> m? end)
end
@doc """
Check if the board has won i.e any row or column is completely marked true
"""
def check_board_for_win(board) do
if(
Enum.any?(
for r <- board do
check_row_col_for_win(r)
end
) or
Enum.any?(
for r <- transpose(board) do
check_row_col_for_win(r)
end
)
) do
board
else
nil
end
end
def call_to_win(_boards, []) do
nil
end
@doc """
Find the first board that wins given the number.
If not winners are found, repeat with the next input number.
"""
def call_to_win(boards, [num | tail]) do
new_boards =
boards
|> Stream.map(&mark_num_on_board(&1, num))
w =
Enum.find(
new_boards
|> Enum.map(&check_board_for_win/1),
fn x -> x != nil end
)
if w != nil, do: {w, num}, else: call_to_win(Enum.to_list(new_boards), tail)
end
def call_to_lose(_boards, []) do
nil
end
@doc """
Find the last board that wins given the number.
If a board wins, it is removed from the list of boards and process is repeated for next numbers.
If no boards remain, return the first winner and number
If no board wins, repeat with the next input number
"""
def call_to_lose(boards, [num | tail]) do
new_boards =
boards
|> Stream.map(&mark_num_on_board(&1, num))
winners =
Enum.filter(
new_boards
|> Enum.map(&check_board_for_win/1),
fn x -> x != nil end
)
if winners != nil do
rem_boards = Enum.to_list(new_boards) -- winners
if length(rem_boards) == 0,
do: {hd(winners), num},
else: call_to_lose(rem_boards, tail)
else
call_to_lose(Enum.to_list(new_boards), tail)
end
end
@doc """
Find the score of a winning board.
Score is sum of unmarked numbers * number which won the board
"""
def calculate_score({board, num}) do
num *
(for r <- board do
for {n, m?} <- r do
if not m?, do: n, else: 0
end
end
|> flatten
|> Enum.sum())
end
@doc """
To guarantee victory against the giant squid, figure out which board will win first.
What will your final score be if you choose that board?
mix run -e "IO.inspect(AOC.Day4.part1)"
"""
def part1() do
{i, b} = get_inputs("lib/inputs/day4.txt")
inputs = String.split(i, ",") |> Enum.map(&String.to_integer/1)
b |> create_boards |> call_to_win(inputs) |> calculate_score
end
@doc """
Figure out which board will win last. Once it wins, what would its final score be?
mix run -e "IO.inspect(AOC.Day4.part2)"
"""
def part2() do
{i, b} = get_inputs("lib/inputs/day4.txt")
inputs = String.split(i, ",") |> Enum.map(&String.to_integer/1)
b |> create_boards |> call_to_lose(inputs) |> calculate_score
end
end
|
elixir/advent_of_code/lib/2021/day4.ex
| 0.801237
| 0.63375
|
day4.ex
|
starcoder
|
defmodule Neoscan.Blocks do
@moduledoc """
The boundary for the Blocks system.
"""
import Ecto.Query, warn: true
alias Neoscan.Repo
alias Neoscan.Block
alias Neoscan.BlockMeta
alias Neoscan.Counters
alias Neoscan.Transaction
alias Neoscan.Transfer
require Logger
@page_size 15
@missing_block_range 1_000
@doc """
Gets a single block by its height or hash value
## Examples
iex> get(123)
%Block{}
iex> get(456)
nill
"""
def get(hash) do
block = _get(hash)
unless is_nil(block) do
transfers =
Repo.all(
from(t in Transfer, where: t.block_index == ^block.index, preload: [:transaction])
)
transfers =
transfers
|> Enum.filter(&(not is_nil(&1.transaction)))
|> Enum.map(& &1.transaction.hash)
Map.put(block, :transfers, transfers)
end
end
defp _get(hash) when is_binary(hash) do
Repo.one(
from(
e in Block,
where: e.hash == ^hash,
preload: [
transactions: ^transaction_query()
]
)
)
end
defp _get(index) when is_integer(index) do
Repo.one(
from(
e in Block,
where: e.index == ^index,
preload: [
transactions: ^transaction_query()
]
)
)
end
defp transaction_query do
from(t in Transaction, select: t.hash)
end
@doc """
Returns the list of paginated blocks.
## Examples
iex> paginate(page)
[%Block{}, ...]
"""
def paginate(page), do: paginate(page, @page_size)
def paginate(page, page_size) do
block_query =
from(
e in Block,
order_by: [
desc: e.index
],
limit: ^page_size,
select:
merge(
e,
%{
lag: fragment("extract(epoch FROM (? - lead(?) OVER ()))::integer", e.time, e.time)
}
)
)
Repo.paginate(block_query,
page: page,
page_size: page_size,
options: [total_entries: Counters.count_blocks() || 0]
)
end
def get_missing_block_indexes do
query = """
SELECT * FROM generate_series((SELECT GREATEST(0, (SELECT MAX(index) FROM blocks) - #{
@missing_block_range - 1
})),
(SELECT MAX(index) FROM blocks)) as index
EXCEPT SELECT * FROM (SELECT index FROM blocks ORDER BY index DESC LIMIT #{
@missing_block_range
}) as t(index)
"""
result = Ecto.Adapters.SQL.query!(Repo, query, [])
List.flatten(result.rows)
end
def get_max_index do
max_index =
Repo.one(
from(
b in Block,
order_by: [
desc: b.index
],
limit: 1,
select: b.index
)
)
if is_nil(max_index), do: -1, else: max_index
end
def get_cumulative_fees(indexes) do
max_query =
from(
b in Block,
where: b.cumulative_sys_fee > 0.0,
select: b.cumulative_sys_fee,
order_by: [desc: :index],
limit: 1
)
max = Repo.one(max_query) || Decimal.new(0.0)
all_query =
from(
b in Block,
where: b.index in ^indexes,
select: map(b, [:index, :cumulative_sys_fee])
)
values = Repo.all(all_query)
map =
Map.new(values, fn %{index: index, cumulative_sys_fee: cumulative_sys_fee} ->
{index, cumulative_sys_fee || max}
end)
map = Map.put(map, -1, Decimal.new(0.0))
map = Enum.reduce(indexes, map, fn index, acc -> Map.put_new(acc, index, max) end)
map
end
def last_index do
Repo.one(from(c in BlockMeta, where: c.id == 1, select: c.index)) || 0
end
end
|
apps/neoscan/lib/neoscan/blocks/blocks.ex
| 0.712432
| 0.427576
|
blocks.ex
|
starcoder
|
defmodule StableMarriage.Match do
@doc """
Start the process with two properly formatted data structures.
All men are initially known as bachelors.
First bachelor starts the proposal cycle.
"""
def start(men_prefs, women_prefs) do
men_prefs = Enum.into(men_prefs, HashDict.new)
women_prefs = Enum.into(women_prefs, HashDict.new)
bachelors = Enum.map(men_prefs, fn({man, _}) -> man end)
propose(HashDict.new, bachelors, men_prefs, women_prefs)
end
@doc """
Proposal Cycle
A man proposes to a woman.
She accepts his proposal,
or she accepts and rejects another who then must propose,
or she reject his proposal and he must propose to someone else.
If no one was rejected, the next available bachelor proposes.
The result is complete when there are no more bachelors remaining.
"""
def propose(results, [], _, _), do: results
def propose(results, [nil | bachelors], men_prefs, women_prefs) do
propose(results, bachelors, men_prefs, women_prefs)
end
def propose(results, [suitor | bachelors], men_prefs, women_prefs) do
{modified_men_prefs, woman} = next_proposal_for_suitor(suitor, men_prefs)
previous_suitor = results[woman]
[accepted, rejected] = choose_between_suitors(suitor, previous_suitor, women_prefs[woman])
new_results = Dict.put(results, woman, accepted)
propose(new_results, [rejected | bachelors], modified_men_prefs, women_prefs)
end
@doc """
Cycle through a woman's preferred list of suitors.
Return quickly if there isn't a current engagement.
Stop when either a suitor or previous suitor is found first.
Return a list in the format [preferred_suitor, rejected]
"""
def choose_between_suitors(suitor, nil, _), do: [suitor, nil]
def choose_between_suitors(suitor, previous_suitor, [preferred | _])
when suitor == preferred do
[suitor, previous_suitor]
end
def choose_between_suitors(suitor, previous_suitor, [preferred | _])
when previous_suitor == preferred do
[previous_suitor, suitor]
end
def choose_between_suitors(suitor, previous_suitor, [_ | remainder]) do
choose_between_suitors(suitor, previous_suitor, remainder)
end
@doc """
Pop the first choice from the suitor's list.
Return modified pref list of all men, and the popped woman record.
"""
def next_proposal_for_suitor(suitor, men_prefs) do
[woman | modified_suitors_prefs] = men_prefs[suitor]
modified_men_prefs = Dict.put(men_prefs, suitor, modified_suitors_prefs)
{modified_men_prefs, woman}
end
end
|
lib/stable_marriage/match.ex
| 0.560493
| 0.549459
|
match.ex
|
starcoder
|
defmodule ServerSentEvent do
@moduledoc """
**Push updates to Web clients over HTTP or using dedicated server-push protocols.**
Messages are sent in the following form, with the `text/event-stream` MIME type:
```txt
data: This is the first message.
data: This is the second message, it
data: has two lines.
event: custom
data: This message has event type 'custom'.
```
A living standard is available from [WHATWG](https://html.spec.whatwg.org/#server-sent-events).
The contents of a server-sent-event are:
| **type** | The type of an event |
| **lines** | The data contents of the event split by line |
| **id** | Value to send in `last-event-id` header when reconnecting |
| **retry** | Time to wait before retrying connection in milliseconds |
| **comments** | Any lines from original block that were marked as comments |
"""
@new_line ["\r\n", "\r", "\n"]
@field_name_terminator [": ", ":"]
@type t :: %__MODULE__{
type: nil | String.t(),
lines: [String.t()],
id: nil | String.t(),
retry: nil | integer(),
comments: [String.t()]
}
defstruct type: nil,
lines: [],
id: nil,
retry: nil,
comments: []
@doc """
This event stream format's MIME type is `text/event-stream`.
"""
@spec mime_type() :: String.t()
def mime_type() do
"text/event-stream"
end
@doc """
Create a `ServerSentEvent` struct.
## Examples
iex> SSE.new("my data")
...> |> Map.get(:lines)
["my data"]
iex> SSE.new("some\\r\\nlines")
...> |> Map.get(:lines)
["some", "lines"]
iex> SSE.new("some\\nlines")
...> |> Map.get(:lines)
["some", "lines"]
iex> SSE.new("my data", id: "45")
...> |> Map.get(:id)
"45"
iex> SSE.new("my data", retry: 45)
...> |> Map.get(:retry)
45
iex> SSE.new("my data", type: "update")
...> |> Map.get(:type)
"update"
"""
@spec new(String.t(), list()) :: t()
def new(data, opts \\ []) do
lines = String.split(data, @new_line)
%__MODULE__{
type: Keyword.get(opts, :type, nil),
lines: lines,
id: Keyword.get(opts, :id, nil),
retry: Keyword.get(opts, :retry, nil)
}
end
@doc """
Does the event have any data lines.
An event without any data lines will not trigger any browser events.
"""
@spec empty?(event :: t()) :: boolean
def empty?(_event = %{lines: []}), do: true
def empty?(_event = %{lines: _}), do: false
@doc """
Format an event to be sent as part of a stream
serialize accepts the same arguments as new to create and serialize in one step.
**NOTE:** Each data/comment line must be without new line charachters.
## Examples
*In these examples this module has been aliased to `SSE`*.
iex> SSE.serialize("my data", type: "update")
"event: update\\ndata: my data\\n\\n"
iex> %SSE{type: "greeting", lines: ["Hi,", "there"], comments: ["comment"]}
...> |> SSE.serialize()
"event: greeting\\n: comment\\ndata: Hi,\\ndata: there\\n\\n"
iex> %SSE{lines: ["message with id"], id: "some-id"}
...> |> SSE.serialize()
"data: message with id\\nid: some-id\\n\\n"
iex> %SSE{lines: ["message setting retry to 10s"], retry: 10_000}
...> |> SSE.serialize()
"data: message setting retry to 10s\\nretry: 10000\\n\\n"
"""
@spec serialize(event :: t()) :: String.t()
@spec serialize(String.t(), list()) :: String.t()
def serialize(event = %__MODULE__{}) do
(type_line(event) ++
comment_lines(event) ++ data_lines(event) ++ id_line(event) ++ retry_line(event) ++ ["\n"])
|> Enum.join("\n")
end
def serialize(data, opts \\ []) do
new(data, opts)
|> serialize()
end
defp type_line(%{type: nil}) do
[]
end
defp type_line(%{type: type}) do
single_line?(type) || raise "Bad"
["event: " <> type]
end
defp comment_lines(%{comments: comments}) do
Enum.map(comments, fn comment ->
single_line?(comment) || raise "Bad"
": " <> comment
end)
end
defp data_lines(%{lines: lines}) do
Enum.map(lines, fn line ->
single_line?(line) || raise "Bad"
"data: " <> line
end)
end
defp id_line(%{id: nil}) do
[]
end
defp id_line(%{id: id}) do
single_line?(id) || raise "Bad"
["id: " <> id]
end
defp retry_line(%{retry: nil}) do
[]
end
defp retry_line(%{retry: retry}) when is_integer(retry) do
["retry: " <> to_string(retry)]
end
defp single_line?(text) do
length(:binary.split(text, @new_line)) == 1
end
@doc """
Parse all events from text stream.
## Examples
*In these examples this module has been aliased to `SSE`*.
iex> SSE.parse_all("data: First message\\n\\ndata: Second\\ndata: message\\n\\nrest")
{:ok,
{
[
%SSE{lines: ["First message"]},
%SSE{lines: ["Second", "message"]}
],
"rest"
}
}
iex> SSE.parse_all("data: This is the first message\\n\\n")
{:ok, {[%SSE{lines: ["This is the first message"]}], ""}}
iex> SSE.parse_all("data: This is the first message\\n\\nrest")
{:ok, {[%SSE{lines: ["This is the first message"]}], "rest"}}
iex> SSE.parse_all("data: This message is not complete")
{:ok, {[], "data: This message is not complete"}}
iex> SSE.parse_all("This line is invalid\\nit doesn't contain a colon\\n")
{:error, {:malformed_line, "This line is invalid"}}
iex> SSE.parse_all("data: This is the first message\\n\\nThis line is invalid\\n")
{:error, {:malformed_line, "This line is invalid"}}
iex> SSE.parse_all("data: This is the first message\\n\\nThis line is yet to terminate")
{:ok, {[%SSE{lines: ["This is the first message"]}], "This line is yet to terminate"}}
"""
@spec parse_all(String.t()) ::
{:ok, {[event :: t()], rest :: String.t()}}
| {:error, term}
def parse_all(stream) do
case do_parse_all(stream, []) do
{:ok, {evts, rest}} ->
{:ok, {Enum.reverse(evts), rest}}
err ->
err
end
end
defp do_parse_all(stream, events) do
case parse(stream) do
{:ok, {nil, rest}} ->
{:ok, {events, rest}}
{:ok, {evt, rest}} ->
do_parse_all(rest, [evt | events])
err ->
err
end
end
@doc ~S"""
Parse the next event from text stream, if present.
## Examples
*In these examples this module has been aliased to `SSE`*.
iex> SSE.parse("data: This is the first message\n\n")
{:ok, {%SSE{lines: ["This is the first message"]}, ""}}
iex> SSE.parse("data:First whitespace character is optional\n\n")
{:ok, {%SSE{lines: ["First whitespace character is optional"]}, ""}}
iex> SSE.parse("data: This message\ndata: has two lines.\n\n")
{:ok, {%SSE{lines: ["This message", "has two lines."]}, ""}}
iex> SSE.parse("data: This is the first message\n\nrest")
{:ok, {%SSE{lines: ["This is the first message"]}, "rest"}}
iex> SSE.parse("data: This message is not complete")
{:ok, {nil, "data: This message is not complete"}}
iex> SSE.parse("This line is invalid\nit doesn't contain a colon\n")
{:error, {:malformed_line, "This line is invalid"}}
iex> SSE.parse("event: custom\ndata: This message is type custom\n\n")
{:ok, {%SSE{type: "custom", lines: ["This message is type custom"]}, ""}}
iex> SSE.parse("id: 100\ndata: This message has an id\n\n")
{:ok, {%SSE{id: "100", lines: ["This message has an id"]}, ""}}
iex> SSE.parse("retry: 5000\ndata: This message retries after 5s.\n\n")
{:ok, {%SSE{retry: 5000, lines: ["This message retries after 5s."]}, ""}}
iex> SSE.parse("retry: five thousand\ndata: retry value is not a valid integer\n\n")
{:error, {:invalid_retry_value, "five thousand"}}
iex> SSE.parse(": This is a comment\n\n")
{:ok, {%SSE{comments: ["This is a comment"]}, ""}}
iex> SSE.parse("data: data can have more :'s in it'\n\n")
{:ok, {%SSE{lines: ["data can have more :'s in it'"]}, ""}}
iex> SSE.parse("DATA: field names are case-sensitive\n\n")
{:error, {:invalid_field_name, "DATA"}}
iex> SSE.parse("unknown: what is this field?\n\n")
{:error, {:invalid_field_name, "unknown"}}
# It is possible for an event stream using `CRLF` to be split mid line delimiter.
# In this case the parser needs to clear the leading newline character.
iex> SSE.parse("data: This is the first message\r\n\r")
{:ok, {%SSE{lines: ["This is the first message"]}, ""}}
iex> SSE.parse("\ndata: This is the second message\r\n\r\n")
{:ok, {%SSE{lines: ["This is the second message"]}, ""}}
"""
# parse_block block has comments event does not
@spec parse(String.t()) ::
{:ok, {event :: t() | nil, rest :: String.t()}}
| {:error, term}
def parse(<<lead_charachter, rest::binary>>) when lead_charachter in [?\r, ?\n] do
parse(rest)
end
def parse(stream) do
do_parse(stream, %__MODULE__{}, stream)
end
defp do_parse(stream, event, original) do
case pop_line(stream) do
nil ->
{:ok, {nil, original}}
{"", rest} ->
{:ok, {event, rest}}
{line, rest} ->
with {:ok, event} <- process_line(line, event),
do: do_parse(rest, event, original)
end
end
defp pop_line(stream) do
case :binary.split(stream, @new_line) do
[^stream] ->
nil
[line, rest] ->
{line, rest}
end
end
defp process_line(line, event) do
case :binary.split(line, @field_name_terminator) do
["", value] ->
process_field("comment", value, event)
[field, value] ->
process_field(field, value, event)
_ ->
{:error, {:malformed_line, line}}
end
end
defp process_field("event", type, event) do
{:ok, Map.put(event, :type, type)}
end
defp process_field("data", line, event = %{lines: lines}) do
{:ok, %{event | lines: lines ++ [line]}}
end
defp process_field("id", id, event) do
{:ok, Map.put(event, :id, id)}
end
defp process_field("retry", timeout, event) do
case Integer.parse(timeout) do
{timeout, ""} ->
{:ok, Map.put(event, :retry, timeout)}
_err ->
{:error, {:invalid_retry_value, timeout}}
end
end
defp process_field("comment", comment, event = %{comments: comments}) do
{:ok, %{event | comments: comments ++ [comment]}}
end
defp process_field(other_field_name, _value, _event) do
{:error, {:invalid_field_name, other_field_name}}
end
end
|
lib/server_sent_event.ex
| 0.910483
| 0.738846
|
server_sent_event.ex
|
starcoder
|
defmodule AWS.MigrationHubRefactorSpaces do
@moduledoc """
Amazon Web Services Migration Hub Refactor Spaces
` This API reference provides descriptions, syntax, and other details about each
of the actions and data types for Amazon Web Services Migration Hub Refactor
Spaces (Refactor Spaces).
The topic for each action shows the API request parameters and the response.
Alternatively, you can use one of the Amazon Web Services SDKs to access an API
that is tailored to the programming language or platform that you're using. For
more information, see [Amazon Web Services SDKs](http://aws.amazon.com/tools/#SDKs).
`
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2021-10-26",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "refactor-spaces",
global?: false,
protocol: "rest-json",
service_id: "Migration Hub Refactor Spaces",
signature_version: "v4",
signing_name: "refactor-spaces",
target_prefix: nil
}
end
@doc """
Creates an Amazon Web Services Migration Hub Refactor Spaces application.
The account that owns the environment also owns the applications created inside
the environment, regardless of the account that creates the application.
Refactor Spaces provisions the Amazon API Gateway and Network Load Balancer for
the application proxy inside your account.
"""
def create_application(%Client{} = client, environment_identifier, input, options \\ []) do
url_path = "/environments/#{AWS.Util.encode_uri(environment_identifier)}/applications"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Creates an Amazon Web Services Migration Hub Refactor Spaces environment.
The caller owns the environment resource, and they are referred to as the
*environment owner*. The environment owner has cross-account visibility and
control of Refactor Spaces resources that are added to the environment by other
accounts that the environment is shared with. When creating an environment,
Refactor Spaces provisions a transit gateway in your account.
"""
def create_environment(%Client{} = client, input, options \\ []) do
url_path = "/environments"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Creates an Amazon Web Services Migration Hub Refactor Spaces route.
The account owner of the service resource is always the environment owner,
regardless of which account creates the route. Routes target a service in the
application. If an application does not have any routes, then the first route
must be created as a `DEFAULT` `RouteType`.
When you create a route, Refactor Spaces configures the Amazon API Gateway to
send traffic to the target service as follows:
* If the service has a URL endpoint, and the endpoint resolves to a
private IP address, Refactor Spaces routes traffic using the API Gateway VPC
link.
* If the service has a URL endpoint, and the endpoint resolves to a
public IP address, Refactor Spaces routes traffic over the public internet.
* If the service has an Lambda function endpoint, then Refactor
Spaces uses the API Gateway Lambda integration.
A health check is performed on the service when the route is created. If the
health check fails, the route transitions to `FAILED`, and no traffic is sent to
the service.
For Lambda functions, the Lambda function state is checked. If the function is
not active, the function configuration is updated so that Lambda resources are
provisioned. If the Lambda state is `Failed`, then the route creation fails. For
more information, see the [GetFunctionConfiguration's State response parameter](https://docs.aws.amazon.com/lambda/latest/dg/API_GetFunctionConfiguration.html#SSS-GetFunctionConfiguration-response-State)
in the *Lambda Developer Guide*.
For public URLs, a connection is opened to the public endpoint. If the URL is
not reachable, the health check fails. For private URLs, a target group is
created and the target group health check is run.
The `HealthCheckProtocol`, `HealthCheckPort`, and `HealthCheckPath` are the same
protocol, port, and path specified in the URL or health URL, if used. All other
settings use the default values, as described in [Health checks for your target groups](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/target-group-health-checks.html).
The health check is considered successful if at least one target within the
target group transitions to a healthy state.
"""
def create_route(
%Client{} = client,
application_identifier,
environment_identifier,
input,
options \\ []
) do
url_path =
"/environments/#{AWS.Util.encode_uri(environment_identifier)}/applications/#{AWS.Util.encode_uri(application_identifier)}/routes"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Creates an Amazon Web Services Migration Hub Refactor Spaces service.
The account owner of the service is always the environment owner, regardless of
which account in the environment creates the service. Services have either a URL
endpoint in a virtual private cloud (VPC), or a Lambda function endpoint.
If an Amazon Web Services resourceis launched in a service VPC, and you want it
to be accessible to all of an environment’s services with VPCs and routes, apply
the `RefactorSpacesSecurityGroup` to the resource. Alternatively, to add more
cross-account constraints, apply your own security group.
"""
def create_service(
%Client{} = client,
application_identifier,
environment_identifier,
input,
options \\ []
) do
url_path =
"/environments/#{AWS.Util.encode_uri(environment_identifier)}/applications/#{AWS.Util.encode_uri(application_identifier)}/services"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Deletes an Amazon Web Services Migration Hub Refactor Spaces application.
Before you can delete an application, you must first delete any services or
routes within the application.
"""
def delete_application(
%Client{} = client,
application_identifier,
environment_identifier,
input,
options \\ []
) do
url_path =
"/environments/#{AWS.Util.encode_uri(environment_identifier)}/applications/#{AWS.Util.encode_uri(application_identifier)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Deletes an Amazon Web Services Migration Hub Refactor Spaces environment.
Before you can delete an environment, you must first delete any applications and
services within the environment.
"""
def delete_environment(%Client{} = client, environment_identifier, input, options \\ []) do
url_path = "/environments/#{AWS.Util.encode_uri(environment_identifier)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Deletes the resource policy set for the environment.
"""
def delete_resource_policy(%Client{} = client, identifier, input, options \\ []) do
url_path = "/resourcepolicy/#{AWS.Util.encode_uri(identifier)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Deletes an Amazon Web Services Migration Hub Refactor Spaces route.
"""
def delete_route(
%Client{} = client,
application_identifier,
environment_identifier,
route_identifier,
input,
options \\ []
) do
url_path =
"/environments/#{AWS.Util.encode_uri(environment_identifier)}/applications/#{AWS.Util.encode_uri(application_identifier)}/routes/#{AWS.Util.encode_uri(route_identifier)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Deletes an Amazon Web Services Migration Hub Refactor Spaces service.
"""
def delete_service(
%Client{} = client,
application_identifier,
environment_identifier,
service_identifier,
input,
options \\ []
) do
url_path =
"/environments/#{AWS.Util.encode_uri(environment_identifier)}/applications/#{AWS.Util.encode_uri(application_identifier)}/services/#{AWS.Util.encode_uri(service_identifier)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Gets an Amazon Web Services Migration Hub Refactor Spaces application.
"""
def get_application(
%Client{} = client,
application_identifier,
environment_identifier,
options \\ []
) do
url_path =
"/environments/#{AWS.Util.encode_uri(environment_identifier)}/applications/#{AWS.Util.encode_uri(application_identifier)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Gets an Amazon Web Services Migration Hub Refactor Spaces environment.
"""
def get_environment(%Client{} = client, environment_identifier, options \\ []) do
url_path = "/environments/#{AWS.Util.encode_uri(environment_identifier)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Gets the resource-based permission policy that is set for the given environment.
"""
def get_resource_policy(%Client{} = client, identifier, options \\ []) do
url_path = "/resourcepolicy/#{AWS.Util.encode_uri(identifier)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Gets an Amazon Web Services Migration Hub Refactor Spaces route.
"""
def get_route(
%Client{} = client,
application_identifier,
environment_identifier,
route_identifier,
options \\ []
) do
url_path =
"/environments/#{AWS.Util.encode_uri(environment_identifier)}/applications/#{AWS.Util.encode_uri(application_identifier)}/routes/#{AWS.Util.encode_uri(route_identifier)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Gets an Amazon Web Services Migration Hub Refactor Spaces service.
"""
def get_service(
%Client{} = client,
application_identifier,
environment_identifier,
service_identifier,
options \\ []
) do
url_path =
"/environments/#{AWS.Util.encode_uri(environment_identifier)}/applications/#{AWS.Util.encode_uri(application_identifier)}/services/#{AWS.Util.encode_uri(service_identifier)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists all the Amazon Web Services Migration Hub Refactor Spaces applications
within an environment.
"""
def list_applications(
%Client{} = client,
environment_identifier,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/environments/#{AWS.Util.encode_uri(environment_identifier)}/applications"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists all the virtual private clouds (VPCs) that are part of an Amazon Web
Services Migration Hub Refactor Spaces environment.
"""
def list_environment_vpcs(
%Client{} = client,
environment_identifier,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/environments/#{AWS.Util.encode_uri(environment_identifier)}/vpcs"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists Amazon Web Services Migration Hub Refactor Spaces environments owned by a
caller account or shared with the caller account.
"""
def list_environments(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/environments"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists all the Amazon Web Services Migration Hub Refactor Spaces routes within an
application.
"""
def list_routes(
%Client{} = client,
application_identifier,
environment_identifier,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path =
"/environments/#{AWS.Util.encode_uri(environment_identifier)}/applications/#{AWS.Util.encode_uri(application_identifier)}/routes"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists all the Amazon Web Services Migration Hub Refactor Spaces services within
an application.
"""
def list_services(
%Client{} = client,
application_identifier,
environment_identifier,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path =
"/environments/#{AWS.Util.encode_uri(environment_identifier)}/applications/#{AWS.Util.encode_uri(application_identifier)}/services"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists the tags of a resource.
The caller account must be the same as the resource’s `OwnerAccountId`. Listing
tags in other accounts is not supported.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Attaches a resource-based permission policy to the Amazon Web Services Migration
Hub Refactor Spaces environment.
The policy must contain the same actions and condition statements as the
`arn:aws:ram::aws:permission/AWSRAMDefaultPermissionRefactorSpacesEnvironment`
permission in Resource Access Manager. The policy must not contain new lines or
blank lines.
"""
def put_resource_policy(%Client{} = client, input, options \\ []) do
url_path = "/resourcepolicy"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Removes the tags of a given resource.
Tags are metadata which can be used to manage a resource. To tag a resource, the
caller account must be the same as the resource’s `OwnerAccountId`. Tagging
resources in other accounts is not supported.
Amazon Web Services Migration Hub Refactor Spaces does not propagate tags to
orchestrated resources, such as an environment’s transit gateway.
"""
def tag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Adds to or modifies the tags of the given resource.
Tags are metadata which can be used to manage a resource. To untag a resource,
the caller account must be the same as the resource’s `OwnerAccountId`.
Untagging resources across accounts is not supported.
"""
def untag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
{query_params, input} =
[
{"TagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
end
|
lib/aws/generated/migration_hub_refactor_spaces.ex
| 0.845544
| 0.623348
|
migration_hub_refactor_spaces.ex
|
starcoder
|
defmodule Elsol.Query do
@moduledoc """
Example of a simple and extensible Elixir module for building Solr query string
according to existing syntax, using a single `build` function type with
pattern-matching and recursion.
For sub-parameters (dotted) such as `facet.count`, `facet.range.gap`, substitute `.`
with `_` in field keys (atoms), e.g. `facet_count`, `facet_range_gap`.
Note that because of the nature of how Elixir handles maps and lists,
query params will naturally be rendered in alphabetical order.
Most Solr params are optional (nil) and shall not be rendered unless a value is given.
Change `name: "/select"` for other custom request handlers.
## Examples, use the `%Elsol.Query{}` struct to build a simple query
iex> Elsol.Query.build(%Elsol.Query{q: "market", rows: 20, fq: ["postcode:1234", "city:london"], fl: "*"})
"/select?fl=*&fq=postcode:1234&fq=city:london&q=market&rows=20"
## use the `%Elsol.Query{}` and `%Elsol.Query.Facet` structs to include faceting
iex> Elsol.Query.build( Map.merge %Elsol.Query{q: "{!lucene}*:*"}, %Elsol.Query.Facet{facet_field: ["postcode", "city"]})
"/select?facet=true&facet.field=postcode&facet.field=city&q={!lucene}*:*"
"""
defstruct url: nil, name: "/select", q: nil, fq: nil, start: nil, rows: nil,
wt: "json", tr: nil, sort: nil, echoParams: nil, fl: nil, collection: %Elsol.Collection{}
def build(params) when is_map(params) do
cond do
Map.has_key?(params, :name) ->
Elsol.Collection.path(params.collection) # this can be an empty string or /collection_name
<> params.name
<> "?"
<> build( Map.drop(params, [:url, :name]) |> Map.to_list ) # query params
true -> build( Map.to_list(params) )
end
end
def build([head|tail]) do
(build(head) <> build(tail)) |> String.rstrip(?&)
end
def build({k,v}) when is_bitstring(v) or is_integer(v) do
(Atom.to_string(k) |> String.replace("_",".")) <> "=#{v}&"
end
def build({k, v}) when is_boolean(v), do: (Atom.to_string(k) |> String.replace("_",".")) <> "=#{v}&"
def build({k,v}) when is_list(v) and length(v)>0 do
build({k, hd(v)}) <> build({k, tl(v)})
end
def build({:collection, %Elsol.Collection{query: query} = _collection}) when is_bitstring(query), do: "collection=#{query}&"
def build({:collection, _collection}), do: ""
def build({:__struct__, _}), do: ""
def build({_, nil}), do: ""
def build({_,[]}), do: ""
def build([]), do: ""
end
|
lib/elsol/query.ex
| 0.874386
| 0.685074
|
query.ex
|
starcoder
|
defmodule Plug.Telemetry do
@moduledoc """
A plug to instrument the pipeline with `:telemetry` events.
When plugged, the event prefix is a required option:
plug Plug.Telemetry, event_prefix: [:my, :plug]
In the example above, two events will be emitted:
* `[:my, :plug, :start]` - emitted when the plug is invoked.
The event carries a single measurement, `:time`, which is the
system time in native units at the moment the event is emitted.
The only metadata is the whole `Plug.Conn` under the `:conn` key.
* `[:my, :plug, :stop]` - emitted right before the request is sent.
The event carries a single measurement, `:duration`, which is the
monotonic time difference between the stop and start events.
The same as for the start event, the only metadata is the `Plug.Conn`
struct under the `:conn` key.
After the Plug is added, please be sure to add
[:telemetry](https://github.com/beam-telemetry/telemetry) as
project dependency.
Note that this plug measures only the time between its invocation and
the rest of the plug pipeline - this can be used to exclude some plugs
from measurement.
## Time unit
Both `:time` and `:duration` measurements are presented in the `:native`
time unit. You can read more about it in the docs for `System.convert_time_unit/3`.
## Example
defmodule InstrumentedPlug do
use Plug.Router
plug :match
plug Plug.Telemetry, event_prefix: [:my, :plug]
plug Plug.Parsers, parsers: [:urlencoded, :multipart]
plug :dispatch
get "/" do
send_resp(conn, 200, "Hello, world!")
end
end
In this example, the stop event's `duration` includes the time
it takes to parse the request, dispatch it to the correct handler,
and execute the handler. The events are not emitted for requests
not matching any handlers, since the plug is placed after the match plug.
"""
@behaviour Plug
@impl true
def init(opts) do
event_prefix = opts[:event_prefix] || raise ArgumentError, ":event_prefix is required"
ensure_valid_event_prefix!(event_prefix)
start_event = event_prefix ++ [:start]
stop_event = event_prefix ++ [:stop]
{start_event, stop_event}
end
@impl true
def call(conn, {start_event, stop_event}) do
start_time = System.monotonic_time()
:telemetry.execute(start_event, %{time: System.system_time()}, %{conn: conn})
Plug.Conn.register_before_send(conn, fn conn ->
duration = System.monotonic_time() - start_time
:telemetry.execute(stop_event, %{duration: duration}, %{conn: conn})
conn
end)
end
defp ensure_valid_event_prefix!(event_prefix) do
if is_list(event_prefix) && Enum.all?(event_prefix, &is_atom/1) do
:ok
else
raise ArgumentError,
"expected :event_prefix to be a list of atoms, got: #{inspect(event_prefix)}"
end
end
end
|
lib/plug/telemetry.ex
| 0.920634
| 0.705024
|
telemetry.ex
|
starcoder
|
defmodule CRUD do
@moduledoc """
A module for easy access to the database.
"""
@moduledoc since: "1.0.5"
use Ecto.Schema
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
import Ecto.Query, only: [from: 2, where: 2, where: 3, offset: 2]
@cont Keyword.get(opts, :context)
@behaviour __MODULE__
@doc """
Returns the current Repo
"""
def context(), do: @cont
@doc """
Adds a new entity to the database
## Takes in parameters:
- `mod`: Module
- `opts`: Map or paramatras key: value separated by commas
## Returns:
- `{:ok, struct}`
- `{:error, error as a string or list of errors}`
## Examples:
- `iex> MyApp.CRUD.add(MyApp.MyModule, %{key1: value1, key2: value2})`
`{:ok, struct}`
- `iex> MyApp.CRUD.add(MyApp.MyModule, key1: value1, key2: value)`
`{:ok, struct}`
"""
def add(mod, opts), do: @cont.insert(set_field(mod, opts)) |> response(mod)
@doc """
Retrieves structure from DB
## Takes in parameters:
- Using `id` records from the database
- `mod`: Module
- `id`: Structure identifier in the database
- Search by a bunch of `keys: value` of a record in the database
- `mod`: Module
- `opts`: Map or paramatras `keys: value` separated by commas
## Returns:
- `{:ok, struct}`
- `{:error, error as a string}`
## Examples:
- `iex> MyApp.CRUD.add(MyApp.MyModule, 1)`
`{:ok, struct}`
- `iex> MyApp.CRUD.add(MyApp.MyModule, id: 1)`
`{:ok, struct}`
- `iex> MyApp.CRUD.add(MyApp.MyModule, %{id: 1})`
`{:ok, struct}`
"""
def get(mod, id) when is_integer(id) or is_binary(id) do
@cont.get(mod, id) |> response(mod)
end
@doc """
Retrieves structure from DB
## Takes in parameters:
- Using `id` records from the database
- `mod`: Module
- `id`: Structure identifier in the database
- Search by a bunch of `keys: value` of a record in the database
- `mod`: Module
- `opts`: Map or paramatras `keys: value` separated by commas
## Returns:
- `{:ok, struct}`
- `{:error, error as a string}`
## Examples:
- `iex> MyApp.CRUD.add(MyApp.MyModule, 1)`
`{:ok, struct}`
- `iex> MyApp.CRUD.add(MyApp.MyModule, id: 1)`
`{:ok, struct}`
- `iex> MyApp.CRUD.add(MyApp.MyModule, %{id: 1})`
`{:ok, struct}`
"""
def get(mod, opts) when is_list(opts) or is_map(opts) do
@cont.get_by(mod, opts_to_map(opts)) |> response(mod)
end
@doc """
Returns a list of structures from the database corresponding to the given Module
## Takes in parameters:
- `mod`: Module
## Returns:
- `{:ok, list of structures}`
- `{:ok, []}`
## Examples
- `iex> MyApp.CRUD.get_all(MyApp.MyModule)`
`{:ok, list of structures}`
"""
def get_all(mod) do
{:ok, @cont.all(from(item in mod, select: item, order_by: item.id))}
end
@doc """
Returns a list of structures from the database corresponding to the given Module
## Takes in parameters:
- `mod`: Module
- `opts`: Map or paramatras `keys: value` separated by commas
## Returns
- `{:ok, list of structures}`
- `{:ok, []}`
## Examples
- `iex> MyApp.CRUD.add(MyApp.MyModule, id: 1)`
`{:ok, list of structures}`
- `iex> MyApp.CRUD.add(MyApp.MyModule, %{id: 1})`
`{:ok, list of structures}`
"""
def get_all(mod, opts) when is_list(opts) or is_map(opts) do
{:ok, @cont.all(from(i in mod, select: i, order_by: i.id) |> filter(opts))}
end
@doc """
Returns the specified number of items for the module
## Takes in parameters:
- `mod`: Module
- `limit`: Number of items to display
## Returns
- `{:ok, list of structures}`
- `{:ok, []}`
## Examples
- `iex> MyApp.CRUD.get_few(MyApp.MyModule, 200)`
`{:ok, list of structures}`
"""
def get_few(mod, limit) when is_integer(limit) do
{:ok, @cont.all(from(i in mod, select: i, order_by: i.id, limit: ^limit))}
end
@doc """
Returns the specified number of items for a module starting from a specific item
## Takes in parameters:
- `mod`: Module
- `limit`: Number of items to display
- `offset`: First element number
## Returns
- `{:ok, list of structures}`
- `{:ok, []}`
## Examples
- `iex> MyApp.CRUD.get_few(MyApp.MyModule, 200, 50)`
`{:ok, list of structures}`
"""
def get_few(mod, limit, offset) when is_integer(limit) and is_integer(offset) do
query = from(i in mod, select: i, order_by: i.id, limit: ^limit, offset: ^offset)
{:ok, @cont.all(query)}
end
@doc """
Returns the specified number of items for a module starting from a specific item
## Takes in parameters:
- `mod`: Module
- `limit`: Number of items to display
- `opts`: Map or paramatras `keys: value` separated by commas
## Returns
- `{:ok, list of structures}`
- `{:ok, []}`
## Examples
- `iex> MyApp.CRUD.get_few(MyApp.MyModule, 200, key: value)`
`{:ok, list of structures}`
- `iex> MyApp.CRUD.get_few(MyApp.MyModule, 200, %{key: value})`
`{:ok, list of structures}`
"""
def get_few(mod, limit, opts) when is_list(opts) or is_map(opts) do
query = from(i in mod, select: i, order_by: i.id, limit: ^limit)
{:ok, @cont.all(query |> filter(opts))}
end
@doc """
Returns the specified number of items for a module starting from a specific item
## Takes in parameters:
- `mod`: Module
- `limit`: Number of items to display
- `offset`: First element number
- `opts`: Map or paramatras `keys: value` separated by commas
## Returns
- `{:ok, list of structures}`
- `{:ok, []}`
## Examples
- `iex> MyApp.CRUD.get_few(MyApp.MyModule, 200, 50, key: value)`
`{:ok, list of structures}`
- `iex> MyApp.CRUD.get_few(MyApp.MyModule, 200, 50, %{key: value})`
`{:ok, list of structures}`
"""
def get_few(mod, limit, offset, opts) when is_list(opts) or is_map(opts) do
query = from(i in mod, select: i, limit: ^limit)
{:ok, @cont.all(query |> filter(opts) |> offset(^offset))}
end
@doc """
Makes changes to the structure from the database
## Takes in parameters:
- `item`: Structure for change
- `opts`: Map or paramatras `keys: value` separated by commas
## Returns
- `{:ok, list of structures}`
- `{:ok, []}`
## Examples
- `iex> MyApp.CRUD.update(item, key: value)`
`{:ok, list of structures}`
- `iex> MyApp.CRUD.update(item, %{key: value})`
`{:ok, list of structures}`
"""
def update(item, opts) when is_struct(item),
do: item.__struct__.changeset(item, opts_to_map(opts)) |> @cont.update()
@doc """
Makes changes to the structure from the database
## Takes in parameters:
- `mod`: Module
- `id`: Structure identifier in the database
- `opts`: Map or paramatras `keys: value` separated by commas
## Returns
- `{:ok, structure}`
- `{:ok, error as a string or list of errors}`
## Examples
- `iex> MyApp.CRUD.update(MyApp.MyModule, 1, key: value)`
`{:ok, structure}`
- `iex> MyApp.CRUD.update(MyApp.MyModule, 1, %{key: value})`
`{:ok, structure}`
"""
def update(mod, id, opts) when is_integer(id) or is_binary(id),
do: get(mod, id) |> update_response(opts)
@doc """
Makes changes to the structure from the database
## Takes in parameters:
- `mod`: Module
- `key`: Field from structure
- `val`: Field value
- `opts`: Map or paramatras `keys: value` separated by commas
## Returns
- `{:ok, structure}`
- `{:error, error as a string or list of errors}`
## Examples
- `iex> MyApp.CRUD.update(MyApp.MyModule, key, 1, key: value)`
`{:ok, structure}`
- `iex> MyApp.CRUD.update(MyApp.MyModule, key, 1, %{key: value})`
`{:ok, structure}`
"""
def update(mod, key, val, opts), do: get(mod, [{key, val}]) |> update_response(opts)
@doc """
Removes the specified structure from the database
## Takes in parameters:
- `item`: Structure
## Returns
- `{:ok, structure}`
- `{:error, error as a string or list of errors}`
## Examples
- `iex> MyApp.CRUD.delete(structure)`
`{:ok, structure}`
"""
def delete(item) when is_struct(item) do
try do
@cont.delete(item)
rescue
_ -> {:error, module_title(item) <> " is not fount"}
end
end
@doc """
Removes the specified structure from the database
## Takes in parameters:
- `mod`: Module
- `id`: Structure identifier in the database
## Returns
- `{:ok, structure}`
- `{:error, error as a string or list of errors}`
## Examples
- `iex> MyApp.CRUD.delete(MyApp.MyModule, 1)`
`{:ok, structure}`
"""
def delete(mod, id), do: get(mod, id) |> delete_response()
@doc """
Returns a list of structures in which the values of the specified fields partially or completely correspond to the entered text
## Takes in parameters:
- `mod`: Module
- `id`: Structure identifier in the database
## Returns
- `{:ok, list of structures}`
- `{:ok, []}`
## Examples
- `iex> MyApp.CRUD.find(MyApp.MyModule, key: "sample")`
`{:ok, list of structures}`
- `iex> MyApp.CRUD.find(MyApp.MyModule, %{key: "sample"})`
`{:ok, list of structures}`
"""
def find(mod, opts),
do: from(item in mod, select: item) |> find(opts_to_map(opts), Enum.count(opts), 0)
defp set_field(mod, opts), do: mod.changeset(mod.__struct__, opts_to_map(opts))
defp opts_to_map(opts) when is_map(opts), do: opts
defp opts_to_map(opts) when is_list(opts),
do: Enum.reduce(opts, %{}, fn {key, value}, acc -> Map.put(acc, key, value) end)
defp find(query, opts, count, acc) do
{key, val} = Enum.at(opts, acc)
result = query |> where([i], ilike(field(i, ^key), ^"%#{val}%"))
if acc < count - 1,
do: find(result, opts, count, acc + 1),
else: {:ok, @cont.all(result)}
end
defp filter(query, opts), do: filter(query, opts, Enum.count(opts), 0)
defp filter(query, opts, count, acc) do
fields = Map.new([Enum.at(opts, acc)]) |> Map.to_list()
result = query |> where(^fields)
if acc < count - 1, do: filter(result, opts, count, acc + 1), else: result
end
defp module_title(mod) when is_struct(mod), do: module_title(mod.__struct__)
defp module_title(mod), do: Module.split(mod) |> Enum.at(Enum.count(Module.split(mod)) - 1)
defp error_handler(err) when is_struct(err),
do: Enum.map(err.errors, fn {key, {msg, _}} -> error_str(key, msg) end)
defp error_handler(err) when is_tuple(err),
do: Enum.map([err], fn {_, message} -> message end)
defp error_handler(error), do: error
defp delete_response({:error, reason}), do: {:error, error_handler(reason)}
defp delete_response({:ok, item}), do: delete(item)
defp update_response({:error, reason}, _opts), do: {:error, error_handler(reason)}
defp update_response({:ok, item}, opts), do: update(item, opts)
defp response(nil, mod), do: {:error, module_title(mod) <> " not found"}
defp response({:error, reason}, _module), do: {:error, error_handler(reason)}
defp response({:ok, item}, _module), do: {:ok, item}
defp response(item, _module), do: {:ok, item}
defp error_str(key, msg), do: "#{Atom.to_string(key) |> String.capitalize()}: #{msg}"
end
end
end
|
lib/CRUD.ex
| 0.895042
| 0.470372
|
CRUD.ex
|
starcoder
|
defmodule TimeMachinex.Compiler do
@moduledoc false
@test_env Mix.env() == :test
@default_precision :microsecond
@doc false
@spec compile(Keyword.t()) :: :ok
def compile(opts \\ []) do
config =
:time_machinex
|> Application.get_env(TimeMachinex, [])
|> Keyword.merge(opts)
|> Keyword.put_new(:dynamic, @test_env)
|> Keyword.put_new(:default_precision, @default_precision)
Code.compiler_options(ignore_module_conflict: true)
Code.compile_quoted(
quote do
defmodule TimeMachinex do
@moduledoc ~S"""
Define a generic clock api
"""
unquote(now(config))
unquote(utc_now(config))
unquote(adapter(config))
unquote(configure(config))
end
end
)
Code.compiler_options(ignore_module_conflict: false)
:ok
end
@spec now(Keyword.t()) :: term
defp now(config) do
precision = config[:default_precision]
fun =
if config[:dynamic] do
quote do
@spec now(keyword) :: DateTime.t()
def now(opts \\ []) do
precision = Keyword.get(opts, :precision, unquote(precision))
DateTime.truncate(adapter().now(), precision)
end
end
else
quote do
@spec now(keyword) :: term
defmacro now(opts \\ []) do
precision = Keyword.get(opts, :precision, unquote(precision))
if precision == :microsecond do
adapter().quoted_now()
else
quote do
DateTime.truncate(unquote(adapter().quoted_now()), unquote(precision))
end
end
end
end
end
quote do
@doc """
Return the current time from the configured adapter
Options are:
precision: :microsecond | :millisecond | :second, default: #{unquote(inspect(precision))}
## Examples
```elixir
iex> TimeMachinex.now
~U[2019-12-16 00:35:07.571Z]
```
"""
unquote(fun)
end
end
@spec utc_now(Keyword.t()) :: term
defp utc_now(config) do
precision = config[:default_precision]
fun =
if config[:dynamic] do
quote do
@spec utc_now(keyword) :: DateTime.t()
def utc_now(opts \\ []) do
precision = Keyword.get(opts, :precision, unquote(precision))
UTCDateTime.truncate(adapter().utc_now(), precision)
end
end
else
quote do
@spec utc_now(keyword) :: term
defmacro utc_now(opts \\ []) do
precision = Keyword.get(opts, :precision, unquote(precision))
if precision == :microsecond do
adapter().quoted_utc_now()
else
quote do
UTCDateTime.truncate(unquote(adapter().quoted_utc_now()), unquote(precision))
end
end
end
end
end
quote do
@doc """
Return the current time from the configured adapter
Options are:
precision: :microsecond | :millisecond | :second, default: #{unquote(inspect(precision))}
## Examples
```elixir
iex> TimeMachinex.utc_now
~Z[2019-12-16 00:35:11.422092]
```
"""
unquote(fun)
end
end
@spec configure(Keyword.t()) :: term
defp configure(config) do
quote do
@doc ~S"""
[Re]Configure TimeMachinex.
## Examples
```elixir
iex> TimeMachinex.configure(adapter: TimeMachinex.ManagedClock)
:ok
```
"""
@spec configure(Keyword.t()) :: :ok
def configure(opts) do
unquote(config)
|> Keyword.merge(opts)
|> unquote(__MODULE__).compile()
end
end
end
@spec adapter(Keyword.t()) :: term
defp adapter(config) do
quote do
@spec adapter :: module
defp adapter,
do:
:time_machinex
|> Application.get_env(TimeMachinex, unquote(config))
|> Keyword.get(:adapter, TimeMachinex.SystemClock)
end
end
end
|
lib/time_machinex/compiler.ex
| 0.877457
| 0.700677
|
compiler.ex
|
starcoder
|
defmodule Explorer.PolarsBackend.Shared do
# A collection of **private** helpers shared in Explorer.PolarsBackend.
@moduledoc false
alias Explorer.DataFrame, as: DataFrame
alias Explorer.PolarsBackend.DataFrame, as: PolarsDataFrame
alias Explorer.PolarsBackend.LazyDataFrame, as: PolarsLazyFrame
alias Explorer.PolarsBackend.Native
alias Explorer.PolarsBackend.Series, as: PolarsSeries
alias Explorer.Series, as: Series
def apply_native(df_or_s, fun, args \\ [])
def apply_native(%Series{} = series, fun, args) do
case apply(Native, fun, [series.data | args]) do
{:ok, %PolarsDataFrame{} = new_df} -> create_dataframe(new_df)
{:ok, %PolarsLazyFrame{} = new_df} -> create_dataframe(new_df)
{:ok, %PolarsSeries{} = new_series} -> create_series(new_series)
{:ok, value} -> value
{:error, error} -> raise "#{error}"
end
end
def apply_native(%DataFrame{} = df, fun, args) do
case apply(Native, fun, [df.data | args]) do
{:ok, %PolarsDataFrame{} = new_df} -> update_dataframe(new_df, df)
{:ok, %PolarsLazyFrame{} = new_df} -> update_dataframe(new_df, df)
{:ok, %PolarsSeries{} = new_series} -> create_series(new_series)
{:ok, value} -> value
{:error, error} -> raise "#{error}"
end
end
def create_dataframe(%module{} = polars_df) when module in [PolarsDataFrame, PolarsLazyFrame],
do: %DataFrame{data: polars_df, groups: []}
def update_dataframe(%module{} = polars_df, %DataFrame{} = df)
when module in [PolarsDataFrame, PolarsLazyFrame],
do: %DataFrame{df | data: polars_df}
def create_series(%PolarsSeries{} = polars_series) do
{:ok, dtype} = Native.s_dtype(polars_series)
%Series{data: polars_series, dtype: normalise_dtype(dtype)}
end
def normalise_dtype("u32"), do: :integer
def normalise_dtype("i32"), do: :integer
def normalise_dtype("i64"), do: :integer
def normalise_dtype("f64"), do: :float
def normalise_dtype("bool"), do: :boolean
def normalise_dtype("str"), do: :string
def normalise_dtype("date"), do: :date
def normalise_dtype("datetime"), do: :datetime
def normalise_dtype("datetime[ms]"), do: :datetime
def normalise_dtype("datetime[μs]"), do: :datetime
def internal_from_dtype(:integer), do: "i64"
def internal_from_dtype(:float), do: "f64"
def internal_from_dtype(:boolean), do: "bool"
def internal_from_dtype(:string), do: "str"
def internal_from_dtype(:date), do: "date"
def internal_from_dtype(:datetime), do: "datetime[μs]"
end
|
lib/explorer/polars_backend/shared.ex
| 0.7237
| 0.619399
|
shared.ex
|
starcoder
|
defmodule DiffList do
@moduledoc """
Difference lists are a way of encoding a list as the action of preappending them.
Instead of a list being `[1, 2, 3]`, it is the anonymous function `fn(ys) -> [1, 2, 3] ++ ys end`.
Difference lists are fast for left-associated appends (`list ++ [x]`) as they are represented as function composition.
Refer to [this](http://h2.jaguarpaw.co.uk/posts/demystifying-dlist/) excellent blog post for more information.
"""
alias DiffList.Utils
@type difflist :: (list -> list)
@doc """
Converts a list into a difference list.
"""
@spec from_list(list) :: difflist
def from_list(xs) do
fn ys -> xs ++ ys end
end
@doc """
Convert a difference list into a list.
## Examples
iex> DiffList.from_list([1, 2, 3]) |> DiffList.to_list
[1, 2, 3]
"""
@spec to_list(difflist) :: list
def to_list(difflist) do
difflist.([])
end
@doc """
Returns an empty difference list.
"""
@spec empty() :: difflist
def empty, do: from_list([])
@doc """
Returns a difference list of one item.
## Example
iex> DiffList.singleton(1) |> DiffList.to_list
[1]
"""
@spec singleton(any) :: difflist
def singleton(x) do
cons(empty(), x)
end
@doc """
Append a difference list to another difference list.
## Example
iex> x = DiffList.from_list([1, 2, 3])
iex> y = DiffList.from_list([4, 5, 6])
iex> DiffList.append(x, y) |> DiffList.to_list
[1, 2, 3, 4, 5, 6]
"""
@spec append(difflist, difflist) :: difflist
def append(difflist_a, difflist_b) do
Utils.compose(difflist_a, difflist_b)
end
@doc """
Prepends an item to a difference list.
The difference list equivalent of `[x] ++ list`.
## Example
iex> x = DiffList.from_list([2, 3])
iex> DiffList.cons(x, 1) |> DiffList.to_list
[1, 2, 3]
"""
@spec cons(difflist, any) :: difflist
def cons(difflist, x) do
Utils.compose(Utils.list_cons(x), difflist)
end
@doc """
Appends an item to a difference list.
The difference list equivalent of `list ++ [x]`.
## Example
iex> x = DiffList.from_list([1, 2])
iex> DiffList.snoc(x, 3) |> DiffList.to_list
[1, 2, 3]
"""
@spec snoc(difflist, any) :: difflist
def snoc(difflist, x) do
Utils.compose(difflist, Utils.list_cons(x))
end
@doc """
Gets the first element of a difference list.
Essentially the same as `hd(list)`.
## Example
iex> x = DiffList.from_list([1, 2, 3])
iex> DiffList.head(x)
1
"""
@spec head(difflist) :: any
def head(difflist), do: list(&Utils.const/2, difflist)
@doc """
Gets the tail a difference list.
Essentially the same as `tl(list)`.
## Example
iex> x = DiffList.from_list([1, 2, 3])
iex> DiffList.tail(x) |> DiffList.to_list
[2, 3]
"""
@spec tail(difflist) :: list
def tail(difflist), do: list(&Utils.flipped_const/2, difflist)
@doc """
Concatenates a list of difference lists into one difference list.
## Example
iex> x = DiffList.from_list([1, 2, 3])
iex> y = DiffList.from_list([4, 5, 6])
iex> z = [x, y]
iex> DiffList.concat(z) |> DiffList.to_list
[1, 2, 3, 4, 5, 6]
"""
@spec concat(list(difflist)) :: difflist
def concat(difflists) do
Enum.reduce(difflists, empty(), fn(x, acc) -> append(acc, x) end)
end
defp list(fun, difflist) do
case to_list(difflist) do
[] -> nil
[h | t] -> fun.(h, from_list(t))
end
end
end
|
lib/difflist.ex
| 0.818156
| 0.688665
|
difflist.ex
|
starcoder
|
defmodule Tanx.ContinuousGame.Mazes do
pi = :math.pi()
@mazes %{
standard: %Tanx.Game.Arena{
size: {20.0, 20.0},
walls: [
[
{10, 10},
{5.5, 10},
{5.5, 8},
{5, 8},
{5, 10},
{-8, 10},
{-8, 7},
{-8.5, 7},
{-8.5, 10},
{-10, 10},
{-10, 5.5},
{-8, 5.5},
{-8, 5},
{-10, 5},
{-10, -8},
{-7, -8},
{-7, -8.5},
{-10, -8.5},
{-10, -10},
{-5.5, -10},
{-5.5, -8},
{-5, -8},
{-5, -10},
{8, -10},
{8, -7},
{8.5, -7},
{8.5, -10},
{10, -10},
{10, -5.5},
{8, -5.5},
{8, -5},
{10, -5},
{10, 8},
{7, 8},
{7, 8.5},
{10, 8.5}
],
[{-8, 3}, {-4.5, 3}, {-5, 4}, {-1, 4}, {-5, 4}, {-3, 0}, {-4.5, 3}],
[{8, -3}, {4.5, -3}, {5, -4}, {1, -4}, {5, -4}, {3, 0}, {4.5, -3}],
[{-6, 6}, {-6, 8}, {-3, 8}, {-6, 8}],
[{6, -6}, {6, -8}, {3, -8}, {6, -8}],
[{-8, 1}, {-5.5, 1}, {-4, -2}, {-5.5, 1}],
[{8, -1}, {5.5, -1}, {4, 2}, {5.5, -1}],
[{-4, 6}, {0, 6}, {1, 4}, {3, 4}, {1, 4}, {0, 6}],
[{4, -6}, {0, -6}, {-1, -4}, {-3, -4}, {-1, -4}, {0, -6}],
[{4, 6}, {6, 2}],
[{-4, -6}, {-6, -2}],
[{7.5, 3}, {6, 6}, {8, 6}, {6, 6}],
[{-7.5, -3}, {-6, -6}, {-8, -6}, {-6, -6}],
[{2, 6}, {1, 8}, {3, 8}, {1, 8}],
[{-2, -6}, {-1, -8}, {-3, -8}, {-1, -8}],
[{-1, 10}, {-1, 8}],
[{1, -10}, {1, -8}],
[{10, 1}, {8, 1}],
[{-10, -1}, {-8, -1}]
],
entry_points: %{
"nw" => %Tanx.Game.Arena.EntryPoint{
pos: {-9.25, 9.25},
heading: -pi / 2,
buffer_left: 0.75,
buffer_right: 1.25,
buffer_up: 0.75,
buffer_down: 4.25
},
"ne" => %Tanx.Game.Arena.EntryPoint{
pos: {9.25, 9.25},
heading: pi,
buffer_left: 4.25,
buffer_right: 0.75,
buffer_up: 0.75,
buffer_down: 1.25
},
"se" => %Tanx.Game.Arena.EntryPoint{
pos: {9.25, -9.25},
heading: pi / 2,
buffer_left: 1.25,
buffer_right: 0.75,
buffer_up: 4.25,
buffer_down: 0.75
},
"sw" => %Tanx.Game.Arena.EntryPoint{
pos: {-9.25, -9.25},
heading: 0.0,
buffer_left: 0.75,
buffer_right: 4.25,
buffer_up: 1.25,
buffer_down: 0.75
}
}
}
}
def get(name) do
@mazes[name]
end
end
|
apps/tanx/lib/tanx/continuous_game/mazes.ex
| 0.514888
| 0.457803
|
mazes.ex
|
starcoder
|
defmodule Kl.Primitives do
alias Kl.Equality
alias Kl.Env, as: E
alias Kl.Types, as: T
alias Kl.Eval
import Kl.Curry
require IEx
@spec kl_and(boolean, boolean) :: boolean
def kl_and(x, y) when is_boolean(x) and is_boolean(y), do: x and y
@spec kl_or(boolean, boolean) :: boolean
def kl_or(x, y) when is_boolean(x) and is_boolean(y), do: x or y
@spec kl_if(boolean, T.kl_term, T.kl_term) :: T.kl_term
def kl_if(x, y, z) when is_boolean(x), do: if x, do: y, else: z
@spec trap_error(Exception.t | T.kl_term, fun) :: T.kl_term
def trap_error(%Kl.SimpleError{} = x, f), do: f.(x)
def trap_error(x, _f), do: x
@spec simple_error(String.t) :: Exception.t
def simple_error(x) when is_binary(x), do: raise Kl.SimpleError, message: x
@spec error_to_string(Exception.t) :: String.t
def error_to_string(x) do
if match?(%Kl.SimpleError{}, x) do
x.message
else
Exception.format_banner(:error, x)
end
end
@spec intern(String.t) :: atom
def intern(x), do: String.to_atom(x)
@spec set(atom, T.kl_term) :: atom
def set(x, y) do
:ok = E.set_var(x, y)
y
end
@spec value(atom) :: T.kl_term
def value(x), do: E.get_var(x)
@spec number?(T.kl_term) :: boolean
def number?(x), do: is_number(x)
@spec add(number, number) :: number
def add(x, y), do: x + y
@spec subtract(number, number) :: number
def subtract(x, y), do: x - y
@spec multiply(number, number) :: number
def multiply(x, y), do: x * y
@spec divide(number, number) :: number
def divide(x, y), do: x / y
@spec greater_than(number, number) :: boolean
def greater_than(x, y), do: x > y
@spec less_than(number, number) :: boolean
def less_than(x, y), do: x < y
@spec greater_or_equal_than(number, number) :: boolean
def greater_or_equal_than(x, y), do: x >= y
@spec less_or_equal_than(number, number) :: boolean
def less_or_equal_than(x, y), do: x <= y
@spec string?(T.kl_term) :: boolean
def string?(x), do: is_binary(x)
@spec pos(String.t, number) :: String.t
def pos(x, y) do
z = String.at(x, y)
if is_nil(z) do
raise "string index is out of bounds"
else
z
end
end
@spec tlstr(String.t) :: String.t
def tlstr(x) when is_binary(x) do
if String.length(x) == 0 do
raise "argument is empty string"
else
{_, tlstr} = String.split_at(x, 1)
tlstr
end
end
@spec cn(String.t, String.t) :: String.t
def cn(x, y) when is_binary(x) and is_binary(y), do: x <> y
@spec str(T.kl_term) :: String.t
def str(x) do
cond do
match?([], x) -> raise "[] is not an atom in Shen; str cannot convert it to a string."
is_atom(x) -> to_string(x)
is_number(x) -> to_string(x)
is_binary(x) -> ~s("#{x}")
is_pid(x) -> inspect(x)
is_function(x) -> inspect(x)
true -> raise "#{inspect(x)} is not an atom, stream or closure; str cannot convert it to a string."
end
end
@spec string_to_n(String.t) :: integer
def string_to_n(x), do: List.first String.to_charlist(x)
@spec n_to_string(integer) :: String.t
def n_to_string(x), do: <<x>>
@spec absvector(integer) :: {:vector, pid}
def absvector(x) do
{:ok, p} = Agent.start_link(
fn ->
try do
:array.new(x)
rescue
ex -> ex
end
end)
{:vector, p}
end
@spec put_to_address({:vector, pid}, integer, T.kl_term) :: {:vector, pid}
def put_to_address({:vector, p}, y, z) do
Agent.update(p,
fn(a) ->
try do
:array.set(y, z, a)
rescue
ex -> ex
end
end)
{:vector, p}
end
@spec get_from_address({:vector, pid}, integer) :: T.kl_term
def get_from_address({:vector, p}, y) do
Agent.get(p,
fn(a) ->
try do
:array.get(y, a)
rescue
ex -> ex
end
end)
end
@spec absvector?(T.kl_term) :: boolean
def absvector?({:vector, p}) when is_pid(p), do: true
def absvector?(_), do: false
@spec cons?(T.kl_term) :: boolean
def cons?({:cons, _h, _t}), do: true
def cons?(_), do: false
@spec cons(T.kl_term, T.kl_term) :: {:cons, T.kl_term, T.kl_term}
def cons(x, y), do: {:cons, x, y}
@spec kl_hd({:cons, T.kl_term, T.kl_term}) :: T.kl_term
def kl_hd({:cons, h, _}), do: h
@spec kl_tl({:cons, T.kl_term, T.kl_term}) :: T.kl_term
def kl_tl({:cons, _, t}), do: t
# TODO: remove tuple
@spec write_byte(integer, {:stream, pid | :stdio} | :stdio) :: integer
def write_byte(b, :stdio), do: write_byte(b, {:stream, :stdio})
def write_byte(b, {:stream, s}) do
:ok = IO.binwrite(s, <<b>>)
b
end
@spec read_byte({:stream, pid | :stdio} | :stdio) :: integer
def read_byte(:stdio), do: read_byte({:stream, :stdio})
def read_byte({:stream, s}) do
c = IO.binread(s, 1)
if c == :eof do
-1
else
<<n, _>> = c <> <<0>>
n
end
end
@spec open(String.t, atom) :: {:stream, File.io_device}
def open(x, y) do
m = case y do
:in -> :read
:out -> :write
_ -> raise "invalid direction"
end
{:ok, p} = File.open("#{elem(File.cwd, 1)}/#{E.get_var(:"*home-directory*")}/#{x}", [m])
{:stream, p}
end
@spec close({:stream, pid}) :: nil
def close({:stream, x}) do
:ok = File.close(x)
nil
end
@spec equal?(T.kl_term, T.kl_term) :: boolean
def equal?(x, y), do: Equality.equal?(x, y)
@spec eval_kl({:cons, T.kl_term, T.kl_term} | T.kl_term) :: T.kl_term
def eval_kl({:cons, _, _} = x) do
x |> cons_to_list |> Eval.eval(%{})
end
def eval_kl(x), do: x
@spec get_time(T.kl_term) :: integer
def get_time(x) do
now = DateTime.utc_now() |> DateTime.to_unix()
case x do
:unix -> now
:run -> now - Agent.get(:env, fn state -> state[:start_time] end)
_ -> raise "get-time does not understand the parameter #{inspect(x)}"
end
end
@spec type(T.kl_term, atom) :: T.kl_term
def type(x, _y), do: x
def mapping do
%{
and: &kl_and/2,
or: &kl_or/2,
if: &kl_if/3,
"trap-error": &trap_error/2,
"simple-error": &simple_error/1,
"error-to-string": &error_to_string/1,
intern: &intern/1,
set: &set/2,
value: &value/1,
number?: &number?/1,
+: &add/2,
-: &subtract/2,
*: &multiply/2,
/: ÷/2,
>: &greater_than/2,
<: &less_than/2,
>=: &greater_or_equal_than/2,
<=: &less_or_equal_than/2,
"string?": &string?/1,
pos: &pos/2,
tlstr: &tlstr/1,
cn: &cn/2,
str: &str/1,
"string->n": &string_to_n/1,
"n->string": &n_to_string/1,
absvector: &absvector/1,
"address->": &put_to_address/3,
"<-address": &get_from_address/2,
"absvector?": &absvector?/1,
"cons?": &cons?/1,
cons: &cons/2,
"hd": &kl_hd/1,
"tl": &kl_tl/1,
"write-byte": &write_byte/2,
"read-byte": &read_byte/1,
open: &open/2,
close: &close/1,
"=": &equal?/2,
"eval-kl": &eval_kl/1,
"get-time": &get_time/1,
type: &type/2
}
|> Enum.map(fn({n, f}) -> {n, curry(f)} end)
|> Enum.into(%{})
end
@spec cons_to_list({:cons, T.kl_term, T.kl_term} | T.kl_term) :: list(T.kl_term) | T.kl_term
def cons_to_list({:cons, h, t}), do: [cons_to_list(h) | cons_to_list(t)]
def cons_to_list(x), do: x
end
|
lib/primitives.ex
| 0.705278
| 0.5425
|
primitives.ex
|
starcoder
|
defmodule DealerReviewScraper.Review do
@moduledoc """
Review context
All function used to parse data from HTML into a Review.t() is live here.
"""
defstruct title: nil,
date: nil,
user: nil,
user_comment: nil,
user_overall_rating: nil,
user_detailed_rating: nil
@typedoc "A review"
@type t() :: %__MODULE__{
title: String.t() | nil,
date: String.t() | nil,
user: String.t() | nil,
user_comment: String.t() | nil,
user_overall_rating: integer() | nil,
user_detailed_rating: map() | nil
}
@doc """
Receives a body that is a HTML extracted from page and extract review from each user and return
list of Review.t()
iex> parse_document(body)
{:ok, [Review.t()]}
iex> parse_document("")
{:error, :empty_body_detected}
"""
@spec parse_document(binary()) :: {:ok, [Review.t()]} | {:error, :empty_body_detected}
def parse_document(body) do
body
|> Floki.parse_document!()
|> find_reviews()
end
defp find_reviews(document) do
case Floki.find(document, ".review-entry") do
[] ->
{:error, :empty_body_detected}
reviews ->
result = Enum.map(reviews, &parse_review/1)
{:ok, result}
end
end
defp parse_review(review) do
%__MODULE__{
title: parse_title(review),
date: parse_date(review),
user: parse_user(review),
user_comment: parse_user_comment(review),
user_overall_rating: parse_user_rating(review),
user_detailed_rating: parse_user_detailed_rating(review)
}
end
defp parse_title(review) do
review
|> Floki.find(".review-title")
|> Floki.text()
|> String.replace("\"", "")
end
defp parse_date(review) do
review
|> Floki.find(".review-date div:first-child")
|> Floki.text()
end
defp parse_user(review) do
review
|> Floki.find(".review-wrapper span.notranslate")
|> Floki.text()
|> String.replace("by ", "")
end
defp parse_user_comment(review) do
review
|> Floki.find(".review-whole")
|> Floki.text()
|> String.replace(["\"", "\n", "\r"], "")
|> String.trim()
end
defp parse_user_rating(review) do
review
|> Floki.find(".dealership-rating .rating-static:first-child")
|> Floki.attribute("class")
|> List.first()
|> String.split()
|> extracting_ratings()
end
defp parse_user_detailed_rating(review) do
[recommend | rating_remain] =
review
|> Floki.find(".review-ratings-all .table .tr")
|> Enum.reverse()
rating_remain
|> Enum.map(fn data ->
name = parse_user_detailed_rating_name(data)
value = parse_user_detailed_rating_value(data)
{name, value}
end)
|> Map.new()
|> Map.put(:recommend_dealer, parse_user_detailed_rating_recommend(recommend))
end
defp parse_user_detailed_rating_name(data) do
data
|> Floki.find("div.small-text")
|> Floki.text()
|> String.downcase()
|> String.replace(" ", "_")
|> String.to_atom()
end
defp parse_user_detailed_rating_value(data) do
data
|> Floki.find("div.rating-static-indv")
|> Floki.attribute("class")
|> List.first()
|> String.split()
|> extracting_ratings()
end
defp parse_user_detailed_rating_recommend(recommend) do
recommend
|> Floki.text()
|> String.upcase()
|> String.contains?("YES")
end
defp extracting_ratings(ratings) when is_list(ratings) do
ratings
|> Enum.map(&extracting_rating/1)
|> Enum.reject(&is_atom/1)
end
defp extracting_ratings(_), do: :error
defp extracting_rating(<<"rating-", rating::binary-size(2)>>), do: String.to_integer(rating)
defp extracting_rating(_), do: :none
end
|
lib/review/review.ex
| 0.731922
| 0.42913
|
review.ex
|
starcoder
|
defmodule AWS.Forecast do
@moduledoc """
Provides APIs for creating and managing Amazon Forecast resources.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2018-06-26",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "forecast",
global?: false,
protocol: "json",
service_id: "forecast",
signature_version: "v4",
signing_name: "forecast",
target_prefix: "AmazonForecast"
}
end
@doc """
Creates an Amazon Forecast predictor.
Amazon Forecast creates predictors with AutoPredictor, which involves applying
the optimal combination of algorithms to each time series in your datasets. You
can use `CreateAutoPredictor` to create new predictors or upgrade/retrain
existing predictors.
## Creating new predictors
The following parameters are required when creating a new predictor:
* `PredictorName` - A unique name for the predictor.
* `DatasetGroupArn` - The ARN of the dataset group used to train the
predictor.
* `ForecastFrequency` - The granularity of your forecasts (hourly,
daily, weekly, etc).
* `ForecastHorizon` - The number of time steps being forecasted.
When creating a new predictor, do not specify a value for
`ReferencePredictorArn`.
## Upgrading and retraining predictors
The following parameters are required when retraining or upgrading a predictor:
* `PredictorName` - A unique name for the predictor.
* `ReferencePredictorArn` - The ARN of the predictor to retrain or
upgrade.
When upgrading or retraining a predictor, only specify values for the
`ReferencePredictorArn` and `PredictorName`.
"""
def create_auto_predictor(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateAutoPredictor", input, options)
end
@doc """
Creates an Amazon Forecast dataset.
The information about the dataset that you provide helps Forecast understand how
to consume the data for model training. This includes the following:
* * `DataFrequency` * - How frequently your historical time-series
data is collected.
* * `Domain` * and * `DatasetType` * - Each dataset has an
associated dataset domain and a type within the domain. Amazon Forecast provides
a list of predefined domains and types within each domain. For each unique
dataset domain and type within the domain, Amazon Forecast requires your data to
include a minimum set of predefined fields.
* * `Schema` * - A schema specifies the fields in the dataset,
including the field name and data type.
After creating a dataset, you import your training data into it and add the
dataset to a dataset group. You use the dataset group to create a predictor. For
more information, see `howitworks-datasets-groups`.
To get a list of all your datasets, use the `ListDatasets` operation.
For example Forecast datasets, see the [Amazon Forecast Sample GitHub repository](https://github.com/aws-samples/amazon-forecast-samples).
The `Status` of a dataset must be `ACTIVE` before you can import training data.
Use the `DescribeDataset` operation to get the status.
"""
def create_dataset(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDataset", input, options)
end
@doc """
Creates a dataset group, which holds a collection of related datasets.
You can add datasets to the dataset group when you create the dataset group, or
later by using the `UpdateDatasetGroup` operation.
After creating a dataset group and adding datasets, you use the dataset group
when you create a predictor. For more information, see
`howitworks-datasets-groups`.
To get a list of all your datasets groups, use the `ListDatasetGroups`
operation.
The `Status` of a dataset group must be `ACTIVE` before you can use the dataset
group to create a predictor. To get the status, use the `DescribeDatasetGroup`
operation.
"""
def create_dataset_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDatasetGroup", input, options)
end
@doc """
Imports your training data to an Amazon Forecast dataset.
You provide the location of your training data in an Amazon Simple Storage
Service (Amazon S3) bucket and the Amazon Resource Name (ARN) of the dataset
that you want to import the data to.
You must specify a `DataSource` object that includes an AWS Identity and Access
Management (IAM) role that Amazon Forecast can assume to access the data, as
Amazon Forecast makes a copy of your data and processes it in an internal AWS
system. For more information, see `aws-forecast-iam-roles`.
The training data must be in CSV format. The delimiter must be a comma (,).
You can specify the path to a specific CSV file, the S3 bucket, or to a folder
in the S3 bucket. For the latter two cases, Amazon Forecast imports all files up
to the limit of 10,000 files.
Because dataset imports are not aggregated, your most recent dataset import is
the one that is used when training a predictor or generating a forecast. Make
sure that your most recent dataset import contains all of the data you want to
model off of, and not just the new data collected since the previous import.
To get a list of all your dataset import jobs, filtered by specified criteria,
use the `ListDatasetImportJobs` operation.
"""
def create_dataset_import_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDatasetImportJob", input, options)
end
@doc """
Explainability is only available for Forecasts and Predictors generated from an
AutoPredictor (`CreateAutoPredictor`)
Creates an Amazon Forecast Explainability.
Explainability helps you better understand how the attributes in your datasets
impact forecast. Amazon Forecast uses a metric called Impact scores to quantify
the relative impact of each attribute and determine whether they increase or
decrease forecast values.
To enable Forecast Explainability, your predictor must include at least one of
the following: related time series, item metadata, or additional datasets like
Holidays and the Weather Index.
CreateExplainability accepts either a Predictor ARN or Forecast ARN. To receive
aggregated Impact scores for all time series and time points in your datasets,
provide a Predictor ARN. To receive Impact scores for specific time series and
time points, provide a Forecast ARN.
## CreateExplainability with a Predictor ARN
You can only have one Explainability resource per predictor. If you already
enabled `ExplainPredictor` in `CreateAutoPredictor`, that predictor already has
an Explainability resource.
The following parameters are required when providing a Predictor ARN:
* `ExplainabilityName` - A unique name for the Explainability.
* `ResourceArn` - The Arn of the predictor.
* `TimePointGranularity` - Must be set to “ALL”.
* `TimeSeriesGranularity` - Must be set to “ALL”.
Do not specify a value for the following parameters:
* `DataSource` - Only valid when TimeSeriesGranularity is
“SPECIFIC”.
* `Schema` - Only valid when TimeSeriesGranularity is “SPECIFIC”.
* `StartDateTime` - Only valid when TimePointGranularity is
“SPECIFIC”.
* `EndDateTime` - Only valid when TimePointGranularity is
“SPECIFIC”.
## CreateExplainability with a Forecast ARN
You can specify a maximum of 50 time series and 500 time points.
The following parameters are required when providing a Predictor ARN:
* `ExplainabilityName` - A unique name for the Explainability.
* `ResourceArn` - The Arn of the forecast.
* `TimePointGranularity` - Either “ALL” or “SPECIFIC”.
* `TimeSeriesGranularity` - Either “ALL” or “SPECIFIC”.
If you set TimeSeriesGranularity to “SPECIFIC”, you must also provide the
following:
* `DataSource` - The S3 location of the CSV file specifying your
time series.
* `Schema` - The Schema defines the attributes and attribute types
listed in the Data Source.
If you set TimePointGranularity to “SPECIFIC”, you must also provide the
following:
* `StartDateTime` - The first timestamp in the range of time points.
* `EndDateTime` - The last timestamp in the range of time points.
"""
def create_explainability(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateExplainability", input, options)
end
@doc """
Exports an Explainability resource created by the `CreateExplainability`
operation.
Exported files are exported to an Amazon Simple Storage Service (Amazon S3)
bucket.
You must specify a `DataDestination` object that includes an Amazon S3 bucket
and an AWS Identity and Access Management (IAM) role that Amazon Forecast can
assume to access the Amazon S3 bucket. For more information, see
`aws-forecast-iam-roles`.
The `Status` of the export job must be `ACTIVE` before you can access the export
in your Amazon S3 bucket. To get the status, use the
`DescribeExplainabilityExport` operation.
"""
def create_explainability_export(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateExplainabilityExport", input, options)
end
@doc """
Creates a forecast for each item in the `TARGET_TIME_SERIES` dataset that was
used to train the predictor.
This is known as inference. To retrieve the forecast for a single item at low
latency, use the operation. To export the complete forecast into your Amazon
Simple Storage Service (Amazon S3) bucket, use the `CreateForecastExportJob`
operation.
The range of the forecast is determined by the `ForecastHorizon` value, which
you specify in the `CreatePredictor` request. When you query a forecast, you can
request a specific date range within the forecast.
To get a list of all your forecasts, use the `ListForecasts` operation.
The forecasts generated by Amazon Forecast are in the same time zone as the
dataset that was used to create the predictor.
For more information, see `howitworks-forecast`.
The `Status` of the forecast must be `ACTIVE` before you can query or export the
forecast. Use the `DescribeForecast` operation to get the status.
"""
def create_forecast(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateForecast", input, options)
end
@doc """
Exports a forecast created by the `CreateForecast` operation to your Amazon
Simple Storage Service (Amazon S3) bucket.
The forecast file name will match the following conventions:
<ForecastExportJobName>_<ExportTimestamp>_<PartNumber>
where the <ExportTimestamp> component is in Java SimpleDateFormat
(yyyy-MM-ddTHH-mm-ssZ).
You must specify a `DataDestination` object that includes an AWS Identity and
Access Management (IAM) role that Amazon Forecast can assume to access the
Amazon S3 bucket. For more information, see `aws-forecast-iam-roles`.
For more information, see `howitworks-forecast`.
To get a list of all your forecast export jobs, use the `ListForecastExportJobs`
operation.
The `Status` of the forecast export job must be `ACTIVE` before you can access
the forecast in your Amazon S3 bucket. To get the status, use the
`DescribeForecastExportJob` operation.
"""
def create_forecast_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateForecastExportJob", input, options)
end
@doc """
This operation creates a legacy predictor that does not include all the
predictor functionalities provided by Amazon Forecast.
To create a predictor that is compatible with all aspects of Forecast, use
`CreateAutoPredictor`.
Creates an Amazon Forecast predictor.
In the request, provide a dataset group and either specify an algorithm or let
Amazon Forecast choose an algorithm for you using AutoML. If you specify an
algorithm, you also can override algorithm-specific hyperparameters.
Amazon Forecast uses the algorithm to train a predictor using the latest version
of the datasets in the specified dataset group. You can then generate a forecast
using the `CreateForecast` operation.
To see the evaluation metrics, use the `GetAccuracyMetrics` operation.
You can specify a featurization configuration to fill and aggregate the data
fields in the `TARGET_TIME_SERIES` dataset to improve model training. For more
information, see `FeaturizationConfig`.
For RELATED_TIME_SERIES datasets, `CreatePredictor` verifies that the
`DataFrequency` specified when the dataset was created matches the
`ForecastFrequency`. TARGET_TIME_SERIES datasets don't have this restriction.
Amazon Forecast also verifies the delimiter and timestamp format. For more
information, see `howitworks-datasets-groups`.
By default, predictors are trained and evaluated at the 0.1 (P10), 0.5 (P50),
and 0.9 (P90) quantiles. You can choose custom forecast types to train and
evaluate your predictor by setting the `ForecastTypes`.
## AutoML
If you want Amazon Forecast to evaluate each algorithm and choose the one that
minimizes the `objective function`, set `PerformAutoML` to `true`. The
`objective function` is defined as the mean of the weighted losses over the
forecast types. By default, these are the p10, p50, and p90 quantile losses. For
more information, see `EvaluationResult`.
When AutoML is enabled, the following properties are disallowed:
* `AlgorithmArn`
* `HPOConfig`
* `PerformHPO`
* `TrainingParameters`
To get a list of all of your predictors, use the `ListPredictors` operation.
Before you can use the predictor to create a forecast, the `Status` of the
predictor must be `ACTIVE`, signifying that training has completed. To get the
status, use the `DescribePredictor` operation.
"""
def create_predictor(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreatePredictor", input, options)
end
@doc """
Exports backtest forecasts and accuracy metrics generated by the
`CreateAutoPredictor` or `CreatePredictor` operations.
Two folders containing CSV files are exported to your specified S3 bucket.
The export file names will match the following conventions:
`<ExportJobName>_<ExportTimestamp>_<PartNumber>.csv`
The <ExportTimestamp> component is in Java SimpleDate format
(yyyy-MM-ddTHH-mm-ssZ).
You must specify a `DataDestination` object that includes an Amazon S3 bucket
and an AWS Identity and Access Management (IAM) role that Amazon Forecast can
assume to access the Amazon S3 bucket. For more information, see
`aws-forecast-iam-roles`.
The `Status` of the export job must be `ACTIVE` before you can access the export
in your Amazon S3 bucket. To get the status, use the
`DescribePredictorBacktestExportJob` operation.
"""
def create_predictor_backtest_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreatePredictorBacktestExportJob", input, options)
end
@doc """
Deletes an Amazon Forecast dataset that was created using the `CreateDataset`
operation.
You can only delete datasets that have a status of `ACTIVE` or `CREATE_FAILED`.
To get the status use the `DescribeDataset` operation.
Forecast does not automatically update any dataset groups that contain the
deleted dataset. In order to update the dataset group, use the operation,
omitting the deleted dataset's ARN.
"""
def delete_dataset(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDataset", input, options)
end
@doc """
Deletes a dataset group created using the `CreateDatasetGroup` operation.
You can only delete dataset groups that have a status of `ACTIVE`,
`CREATE_FAILED`, or `UPDATE_FAILED`. To get the status, use the
`DescribeDatasetGroup` operation.
This operation deletes only the dataset group, not the datasets in the group.
"""
def delete_dataset_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDatasetGroup", input, options)
end
@doc """
Deletes a dataset import job created using the `CreateDatasetImportJob`
operation.
You can delete only dataset import jobs that have a status of `ACTIVE` or
`CREATE_FAILED`. To get the status, use the `DescribeDatasetImportJob`
operation.
"""
def delete_dataset_import_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDatasetImportJob", input, options)
end
@doc """
Deletes an Explainability resource.
You can delete only predictor that have a status of `ACTIVE` or `CREATE_FAILED`.
To get the status, use the `DescribeExplainability` operation.
"""
def delete_explainability(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteExplainability", input, options)
end
@doc """
Deletes an Explainability export.
"""
def delete_explainability_export(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteExplainabilityExport", input, options)
end
@doc """
Deletes a forecast created using the `CreateForecast` operation.
You can delete only forecasts that have a status of `ACTIVE` or `CREATE_FAILED`.
To get the status, use the `DescribeForecast` operation.
You can't delete a forecast while it is being exported. After a forecast is
deleted, you can no longer query the forecast.
"""
def delete_forecast(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteForecast", input, options)
end
@doc """
Deletes a forecast export job created using the `CreateForecastExportJob`
operation.
You can delete only export jobs that have a status of `ACTIVE` or
`CREATE_FAILED`. To get the status, use the `DescribeForecastExportJob`
operation.
"""
def delete_forecast_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteForecastExportJob", input, options)
end
@doc """
Deletes a predictor created using the `DescribePredictor` or `CreatePredictor`
operations.
You can delete only predictor that have a status of `ACTIVE` or `CREATE_FAILED`.
To get the status, use the `DescribePredictor` operation.
"""
def delete_predictor(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeletePredictor", input, options)
end
@doc """
Deletes a predictor backtest export job.
"""
def delete_predictor_backtest_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeletePredictorBacktestExportJob", input, options)
end
@doc """
Deletes an entire resource tree.
This operation will delete the parent resource and its child resources.
Child resources are resources that were created from another resource. For
example, when a forecast is generated from a predictor, the forecast is the
child resource and the predictor is the parent resource.
Amazon Forecast resources possess the following parent-child resource
hierarchies:
* **Dataset**: dataset import jobs
* **Dataset Group**: predictors, predictor backtest export jobs,
forecasts, forecast export jobs
* **Predictor**: predictor backtest export jobs, forecasts, forecast
export jobs
* **Forecast**: forecast export jobs
`DeleteResourceTree` will only delete Amazon Forecast resources, and will not
delete datasets or exported files stored in Amazon S3.
"""
def delete_resource_tree(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteResourceTree", input, options)
end
@doc """
Describes a predictor created using the CreateAutoPredictor operation.
"""
def describe_auto_predictor(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAutoPredictor", input, options)
end
@doc """
Describes an Amazon Forecast dataset created using the `CreateDataset`
operation.
In addition to listing the parameters specified in the `CreateDataset` request,
this operation includes the following dataset properties:
* `CreationTime`
* `LastModificationTime`
* `Status`
"""
def describe_dataset(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDataset", input, options)
end
@doc """
Describes a dataset group created using the `CreateDatasetGroup` operation.
In addition to listing the parameters provided in the `CreateDatasetGroup`
request, this operation includes the following properties:
* `DatasetArns` - The datasets belonging to the group.
* `CreationTime`
* `LastModificationTime`
* `Status`
"""
def describe_dataset_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDatasetGroup", input, options)
end
@doc """
Describes a dataset import job created using the `CreateDatasetImportJob`
operation.
In addition to listing the parameters provided in the `CreateDatasetImportJob`
request, this operation includes the following properties:
* `CreationTime`
* `LastModificationTime`
* `DataSize`
* `FieldStatistics`
* `Status`
* `Message` - If an error occurred, information about the error.
"""
def describe_dataset_import_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDatasetImportJob", input, options)
end
@doc """
Describes an Explainability resource created using the `CreateExplainability`
operation.
"""
def describe_explainability(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeExplainability", input, options)
end
@doc """
Describes an Explainability export created using the
`CreateExplainabilityExport` operation.
"""
def describe_explainability_export(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeExplainabilityExport", input, options)
end
@doc """
Describes a forecast created using the `CreateForecast` operation.
In addition to listing the properties provided in the `CreateForecast` request,
this operation lists the following properties:
* `DatasetGroupArn` - The dataset group that provided the training
data.
* `CreationTime`
* `LastModificationTime`
* `Status`
* `Message` - If an error occurred, information about the error.
"""
def describe_forecast(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeForecast", input, options)
end
@doc """
Describes a forecast export job created using the `CreateForecastExportJob`
operation.
In addition to listing the properties provided by the user in the
`CreateForecastExportJob` request, this operation lists the following
properties:
* `CreationTime`
* `LastModificationTime`
* `Status`
* `Message` - If an error occurred, information about the error.
"""
def describe_forecast_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeForecastExportJob", input, options)
end
@doc """
This operation is only valid for legacy predictors created with CreatePredictor.
If you are not using a legacy predictor, use `DescribeAutoPredictor`.
Describes a predictor created using the `CreatePredictor` operation.
In addition to listing the properties provided in the `CreatePredictor` request,
this operation lists the following properties:
* `DatasetImportJobArns` - The dataset import jobs used to import
training data.
* `AutoMLAlgorithmArns` - If AutoML is performed, the algorithms
that were evaluated.
* `CreationTime`
* `LastModificationTime`
* `Status`
* `Message` - If an error occurred, information about the error.
"""
def describe_predictor(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribePredictor", input, options)
end
@doc """
Describes a predictor backtest export job created using the
`CreatePredictorBacktestExportJob` operation.
In addition to listing the properties provided by the user in the
`CreatePredictorBacktestExportJob` request, this operation lists the following
properties:
* `CreationTime`
* `LastModificationTime`
* `Status`
* `Message` (if an error occurred)
"""
def describe_predictor_backtest_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribePredictorBacktestExportJob", input, options)
end
@doc """
Provides metrics on the accuracy of the models that were trained by the
`CreatePredictor` operation.
Use metrics to see how well the model performed and to decide whether to use the
predictor to generate a forecast. For more information, see [Predictor Metrics](https://docs.aws.amazon.com/forecast/latest/dg/metrics.html).
This operation generates metrics for each backtest window that was evaluated.
The number of backtest windows (`NumberOfBacktestWindows`) is specified using
the `EvaluationParameters` object, which is optionally included in the
`CreatePredictor` request. If `NumberOfBacktestWindows` isn't specified, the
number defaults to one.
The parameters of the `filling` method determine which items contribute to the
metrics. If you want all items to contribute, specify `zero`. If you want only
those items that have complete data in the range being evaluated to contribute,
specify `nan`. For more information, see `FeaturizationMethod`.
Before you can get accuracy metrics, the `Status` of the predictor must be
`ACTIVE`, signifying that training has completed. To get the status, use the
`DescribePredictor` operation.
"""
def get_accuracy_metrics(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAccuracyMetrics", input, options)
end
@doc """
Returns a list of dataset groups created using the `CreateDatasetGroup`
operation.
For each dataset group, this operation returns a summary of its properties,
including its Amazon Resource Name (ARN). You can retrieve the complete set of
properties by using the dataset group ARN with the `DescribeDatasetGroup`
operation.
"""
def list_dataset_groups(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatasetGroups", input, options)
end
@doc """
Returns a list of dataset import jobs created using the `CreateDatasetImportJob`
operation.
For each import job, this operation returns a summary of its properties,
including its Amazon Resource Name (ARN). You can retrieve the complete set of
properties by using the ARN with the `DescribeDatasetImportJob` operation. You
can filter the list by providing an array of `Filter` objects.
"""
def list_dataset_import_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatasetImportJobs", input, options)
end
@doc """
Returns a list of datasets created using the `CreateDataset` operation.
For each dataset, a summary of its properties, including its Amazon Resource
Name (ARN), is returned. To retrieve the complete set of properties, use the ARN
with the `DescribeDataset` operation.
"""
def list_datasets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatasets", input, options)
end
@doc """
Returns a list of Explainability resources created using the
`CreateExplainability` operation.
This operation returns a summary for each Explainability. You can filter the
list using an array of `Filter` objects.
To retrieve the complete set of properties for a particular Explainability
resource, use the ARN with the `DescribeExplainability` operation.
"""
def list_explainabilities(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListExplainabilities", input, options)
end
@doc """
Returns a list of Explainability exports created using the
`CreateExplainabilityExport` operation.
This operation returns a summary for each Explainability export. You can filter
the list using an array of `Filter` objects.
To retrieve the complete set of properties for a particular Explainability
export, use the ARN with the `DescribeExplainability` operation.
"""
def list_explainability_exports(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListExplainabilityExports", input, options)
end
@doc """
Returns a list of forecast export jobs created using the
`CreateForecastExportJob` operation.
For each forecast export job, this operation returns a summary of its
properties, including its Amazon Resource Name (ARN). To retrieve the complete
set of properties, use the ARN with the `DescribeForecastExportJob` operation.
You can filter the list using an array of `Filter` objects.
"""
def list_forecast_export_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListForecastExportJobs", input, options)
end
@doc """
Returns a list of forecasts created using the `CreateForecast` operation.
For each forecast, this operation returns a summary of its properties, including
its Amazon Resource Name (ARN). To retrieve the complete set of properties,
specify the ARN with the `DescribeForecast` operation. You can filter the list
using an array of `Filter` objects.
"""
def list_forecasts(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListForecasts", input, options)
end
@doc """
Returns a list of predictor backtest export jobs created using the
`CreatePredictorBacktestExportJob` operation.
This operation returns a summary for each backtest export job. You can filter
the list using an array of `Filter` objects.
To retrieve the complete set of properties for a particular backtest export job,
use the ARN with the `DescribePredictorBacktestExportJob` operation.
"""
def list_predictor_backtest_export_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListPredictorBacktestExportJobs", input, options)
end
@doc """
Returns a list of predictors created using the `CreateAutoPredictor` or
`CreatePredictor` operations.
For each predictor, this operation returns a summary of its properties,
including its Amazon Resource Name (ARN).
You can retrieve the complete set of properties by using the ARN with the
`DescribeAutoPredictor` and `DescribePredictor` operations. You can filter the
list using an array of `Filter` objects.
"""
def list_predictors(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListPredictors", input, options)
end
@doc """
Lists the tags for an Amazon Forecast resource.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Stops a resource.
The resource undergoes the following states: `CREATE_STOPPING` and
`CREATE_STOPPED`. You cannot resume a resource once it has been stopped.
This operation can be applied to the following resources (and their
corresponding child resources):
* Dataset Import Job
* Predictor Job
* Forecast Job
* Forecast Export Job
* Predictor Backtest Export Job
* Explainability Job
* Explainability Export Job
"""
def stop_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopResource", input, options)
end
@doc """
Associates the specified tags to a resource with the specified `resourceArn`.
If existing tags on a resource are not specified in the request parameters, they
are not changed. When a resource is deleted, the tags associated with that
resource are also deleted.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Deletes the specified tags from a resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Replaces the datasets in a dataset group with the specified datasets.
The `Status` of the dataset group must be `ACTIVE` before you can use the
dataset group to create a predictor. Use the `DescribeDatasetGroup` operation to
get the status.
"""
def update_dataset_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateDatasetGroup", input, options)
end
end
|
lib/aws/generated/forecast.ex
| 0.952827
| 0.574335
|
forecast.ex
|
starcoder
|
defmodule PlayfabEx.Client.Default.SharedGroupData do
@doc """
Adds users to the set of those able to update both the shared data, as well as the set of users in the group. Only users in the group can add new members. Shared Groups are designed for sharing data between a very small number of players, please see our guide:
@link https://api.playfab.com/documentation/client/method/AddSharedGroupMembers
"""
@spec add_shared_group_members(map()) :: {:ok, map} | {:error, String.t}
def add_shared_group_members(params) do
fetch("Client/AddSharedGroupMembers", params)
end
@doc """
Retrieves data stored in a shared group object, as well as the list of members in the group. Non-members of the group may use this to retrieve group data, including membership, but they will not receive data for keys marked as private. Shared Groups are designed for sharing data between a very small number of players, please see our guide:
@link https://api.playfab.com/documentation/client/method/CreateSharedGroup
"""
@spec create_shared_group(map()) :: {:ok, map} | {:error, String.t}
def create_shared_group(params) do
fetch("Client/CreateSharedGroup", params)
end
@doc """
Adds, updates, and removes data keys for a shared group object. If the permission is set to Public, all fields updated or added in this call will be readable by users not in the group. By default, data permissions are set to Private. Regardless of the permission setting, only members of the group can update the data. Shared Groups are designed for sharing data between a very small number of players, please see our guide:
@link https://api.playfab.com/documentation/client/method/GetSharedGroupData
"""
@spec get_shared_group_data(map()) :: {:ok, map} | {:error, String.t}
def get_shared_group_data(params) do
fetch("Client/GetSharedGroupData", params)
end
@doc """
@link https://api.playfab.com/documentation/client/method/RemoveSharedGroupMembers
"""
@spec remove_shared_group_members(map()) :: {:ok, map} | {:error, String.t}
def remove_shared_group_members(params) do
fetch("Client/RemoveSharedGroupMembers", params)
end
@doc """
@link https://api.playfab.com/documentation/client/method/UpdateSharedGroupData
"""
@spec update_shared_group_data(map()) :: {:ok, map} | {:error, String.t}
def update_shared_group_data(params) do
fetch("Client/UpdateSharedGroupData", params)
end
defp fetch(path, params) do
string_params = Poison.encode!(params)
try do
response = Tesla.post("https://#{title_id()}.playfabapi.com/#{path}", string_params, headers: %{"Content-Type" => "application/json", "X-SecretKey" => secret_key()})
|> Map.get(:body)
|> Poison.decode!
{:ok, response}
rescue
_ -> {:error, "Unable to fetch data from playfab"}
end
end
defp title_id() do
Application.get_env(:playfab_ex, :title_id)
end
defp secret_key() do
Application.get_env(:playfab_ex, :secret_key)
end
end
|
lib/client/default/shared_group_data.ex
| 0.645679
| 0.408749
|
shared_group_data.ex
|
starcoder
|
defmodule Ockam.Stream do
@moduledoc """
Defines an Ockam Stream
"""
@doc false
use GenServer
alias Ockam.Router
require Logger
# Set the name of the supervisor that will be used to start workers.
# The supervisor is assumed to be a DynamicSupervisor later in this module.
@supervisor Ockam.Stream.Supervisor
defstruct [:address]
@typedoc "The stream address type."
@type address :: Router.address()
@typedoc "The stream type."
@type t :: %__MODULE__{address: address}
@doc """
Attaches a consumer with the given `stream`.
"""
def attach_consumer(%__MODULE__{address: address}, consumer) when is_binary(consumer) do
Router.whereis(address) |> GenServer.call({:attach_consumer, consumer})
end
@doc """
Sends a message to the given `stream`.
"""
@spec send(t, any) :: any
def send(%__MODULE__{address: address}, message), do: Router.route(address, message)
@doc """
Returns the `pid` of the given `stream`.
"""
@spec whereis(t) :: pid
def whereis(%__MODULE__{address: address}), do: Router.whereis(address)
@doc """
Returns a list of all streams currently known to `@supervisor`.
"""
@spec list() :: [t]
def list do
@supervisor
|> DynamicSupervisor.which_children()
|> Enum.reduce([], fn {_, pid, _, _}, workers ->
address = GenServer.call(pid, :get_address)
[%__MODULE__{address: address} | workers]
end)
end
@doc """
Destroy the given stream.
"""
@spec destroy(t) :: :ok | {:error, :not_found}
def destroy(%__MODULE__{address: address} = worker) do
pid = whereis(worker)
Router.unregister(address)
DynamicSupervisor.terminate_child(@supervisor, pid)
end
@doc """
Creates a new stream.
"""
@spec create(Keyword.t()) :: {:ok, t} | {:error, term}
def create(options \\ []) when is_list(options) do
options = Enum.into(options, %{})
options = Map.put_new_lazy(options, :address, fn -> Router.get_unused_address() end)
on_start_child = DynamicSupervisor.start_child(@supervisor, {__MODULE__, options})
with {:ok, _pid, worker} <- on_start_child, do: {:ok, worker}
end
@doc false
def start_link(%{address: nil}), do: {:error, :address_cannot_be_nil}
def start_link(%{address: address} = options) do
with {:ok, pid} <- GenServer.start_link(__MODULE__, options, name: {:via, Router, address}) do
{:ok, pid, %__MODULE__{address: address}}
end
end
@doc false
@impl true
def init(%{address: address}) do
{:ok, %{address: address, consumers: [], messages: []}}
end
@doc false
@impl true
def handle_call(:get_address, _from, %{address: address} = state),
do: {:reply, address, state}
@doc false
@impl true
def handle_call({:attach_consumer, consumer}, _from, %{consumers: consumers} = state) do
{:reply, :ok, %{state | consumers: [consumer | consumers]}}
end
@doc false
@impl true
def handle_info(message, %{consumers: consumers, messages: messages} = state) do
Enum.each(consumers, fn consumer -> Router.route(consumer, message) end)
{:noreply, %{state | messages: [message | messages]}}
end
end
|
implementations/elixir/lib/ockam/stream.ex
| 0.904619
| 0.419618
|
stream.ex
|
starcoder
|
defmodule Hextille.Cube do
alias Hextille.Cube, as: Cube
@moduledoc """
Cube module that represents hexagon tiles using Cube coordinates.
Instead of names x, y, z this module uses names q, r, s.
Cube coordinates have a constraint `q + r + s = 0`,
even with floating point cube coordinates. This has to be always respected.
Axes are aligned in the following order:
```
-r
+s .^. +q
.´ `.
| |
`. .´
-q `.´ -s
+r
```
"""
defstruct q: 0, r: 0, s: 0
@doc """
Creates a Cube or throws `ArgumentError` if the given arguments don't
satisfy constraint `q + r + s = 0`. This function should be the preferred way
to create new Hexagons when using this module.
## Examples:
iex> Cube.create!(1, -2, 1)
%Cube{q: 1, r: -2, s: 1}
iex> Cube.create!(4, -2, 1)
** (ArgumentError) Invalid coordinates, constraint q + r + s = 0
"""
def create!(q, r, s) do
if q + r + s == 0 do
%Cube{q: q, r: r, s: s}
else
raise ArgumentError, message: "Invalid coordinates, constraint q + r + s = 0"
end
end
@doc """
Addition of cube coordinates.
Returns a new Cube with added coordinates.
## Examples:
iex> a = %Cube{q: 1, r: -2, s: 1}
iex> b = %Cube{q: 3, r: -2, s: -1}
iex> Cube.add(a, b)
%Cube{q: 4, r: -4, s: 0}
"""
def add(%Cube{} = a, %Cube{} = b) do
%Cube{
q: a.q + b.q,
r: a.r + b.r,
s: a.s + b.s
}
end
@doc """
Subtraction of cube coordinates.
Returns a new Cube with subtracted coordinates.
## Examples:
iex> a = %Cube{q: 1, r: -2, s: 1}
iex> b = %Cube{q: 3, r: -2, s: -1}
iex> Cube.subtract(a, b)
%Cube{q: -2, r: 0, s: 2}
"""
def subtract(%Cube{} = a, %Cube{} = b) do
%Cube{
q: a.q - b.q,
r: a.r - b.r,
s: a.s - b.s
}
end
@doc """
Scale cube coordinates with given multiplier `k`.
Returns a new scaled Cube.
## Examples:
iex> a = %Cube{q: 1, r: -2, s: 1}
iex> Cube.scale(a, 3)
%Cube{q: 3, r: -6, s: 3}
"""
def scale(%Cube{} = h, k) when is_integer(k) do
%Cube{
q: h.q * k,
r: h.r * k,
s: h.s * k
}
end
@doc ~S"""
Rotate cube coordinates 60° to given direction `:left` or `:right`.
Returns a new Cube at rotated position.
Rotations visualized
```
.^. .^. .^.
.´2 0`.´ `.´0 2`.
| | | |
.^. -2 .^. .^. -2 .^.
.´ `.´ `.´ `.´ `.
| | | | |
`. .^. .^. .^. .^.
`.´ `.´s q`.´ `.´-2 2`.
| | | | |
`. .^. r .^. .^. 0 .´
`.´ `.´ `.´ `.´
| | | |
`. .^. .^. .´
`.´ `.´ `.´
```
## Examples:
iex> a = %Cube{q: 2, r: -2, s: 0}
iex> Cube.rotate(a, :left)
%Cube{q: 0, r: -2, s: 2}
iex> Cube.rotate(a, :right)
%Cube{q: 2, r: 0, s: -2}
"""
def rotate(%Cube{} = h, direction) do
case direction do
:left -> %Cube{q: -h.s, r: -h.q, s: -h.r}
:right -> %Cube{q: -h.r, r: -h.s, s: -h.q}
end
end
@doc ~S"""
Returns a new Cube representing the `direction` as a vector.
Directions are enumerated in following order:
```
:north_west :north_east
.^.
.´s q`.
:west | | :east
`. r .´
`.´
:south_west :south_east
```
## Examples:
iex> Cube.directions(:north_east)
%Cube{q: 1, r: -1, s: 0}
iex> Cube.directions(:east)
%Cube{q: 1, r: 0, s: -1}
iex> Cube.directions(:south_east)
%Cube{q: 0, r: 1, s: -1}
iex> Cube.directions(:south_west)
%Cube{q: -1, r: 1, s: 0}
iex> Cube.directions(:west)
%Cube{q: -1, r: 0, s: 1}
iex> Cube.directions(:north_west)
%Cube{q: 0, r: -1, s: 1}
"""
def directions(direction) do
case direction do
:north_east -> %Cube{q: 1, r: -1, s: 0}
:east -> %Cube{q: 1, r: 0, s: -1}
:south_east -> %Cube{q: 0, r: 1, s: -1}
:south_west -> %Cube{q: -1, r: 1, s: 0}
:west -> %Cube{q: -1, r: 0, s: 1}
:north_west -> %Cube{q: 0, r: -1, s: 1}
end
end
@doc ~S"""
Finds the neighbouring tile of hexagon `a` at the given `direction`.
Returns a new Cube representing the neighbour Cube.
Directions are enumerated in following order:
```
:north_west :north_east
.^.
.´s q`.
:west | | :east
`. r .´
`.´
:south_west :south_east
```
Neighbours visualized
```
.^. .^.
.´1 1`.´0 2`.
| | |
.^. -2 .^. -2 .^.
.´1 0`.´0 1`.´-1 2`.
| | | |
`. -1 .^. -1 .^. -1 .´
`.´0 0`.´-1 1`.´
| | |
`. 0 .^. 0 .´
`.´ `.´
```
## Examples:
iex> a = %Cube{q: 1, r: -1, s: 0}
iex> Cube.neighbour(a, :north_east)
%Cube{q: 2, r: -2, s: 0}
iex> Cube.neighbour(a, :north_west)
%Cube{q: 1, r: -2, s: 1}
"""
def neighbour(%Cube{} = h, direction) do
Cube.add(h, Cube.directions(direction))
end
@doc ~S"""
Returns the distance of hexagon `a` from origo as an integer.
## Examples:
iex> a = %Cube{q: 1, r: -2, s: 1}
iex> Cube.length(a)
2
iex> b = %Cube{q: -2, r: -3, s: 5}
iex> Cube.length(b)
5
"""
def length(%Cube{} = h) do
div((abs(h.q) + abs(h.r) + abs(h.s)), 2)
end
@doc ~S"""
Calculates the distance between hexagons `a` and `b`.
Return value is an integer value.
## Examples:
iex> a = %Cube{q: 1, r: -2, s: 1}
iex> b = %Cube{q: -2, r: -3, s: 5}
iex> Cube.distance(a, b)
4
"""
def distance(%Cube{} = a, %Cube{} = b) do
Cube.length(Cube.subtract(a, b))
end
@doc ~S"""
Rounds up an hexagon with float values to integer coordinates.
## Examples:
iex> a = %Cube{q: 1.5, r: -2.25, s: 0.75}
iex> Cube.round_hex(a)
%Cube{q: 1, r: -2, s: 1}
iex> b = %Cube{q: 1.2, r: 2.5, s: -3.7}
iex> Cube.round_hex(b)
%Cube{q: 1, r: 3, s: -4}
"""
def round_hex(%Cube{} = h) do
q = round(h.q)
r = round(h.r)
s = round(h.s)
q_diff = abs(q - h.q)
r_diff = abs(r - h.r)
s_diff = abs(s - h.s)
cond do
(q_diff > r_diff && q_diff > s_diff) ->
%Cube{q: -r - s, r: r, s: s}
(r_diff > s_diff) ->
%Cube{q: q, r: -q - s, s: s}
true ->
%Cube{q: q, r: r, s: -q - r}
end
end
end
|
lib/cube.ex
| 0.947345
| 0.866698
|
cube.ex
|
starcoder
|
defmodule Ankh.HTTP2.Frame.PushPromise do
@moduledoc false
defmodule Flags do
@moduledoc false
@type t :: %__MODULE__{end_headers: boolean(), padded: boolean()}
defstruct end_headers: false, padded: false
defimpl Ankh.HTTP2.Frame.Encodable do
def decode(%Flags{} = flags, <<_::4, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, _::2>>, _) do
{:ok, %{flags | end_headers: true, padded: true}}
end
def decode(%Flags{} = flags, <<_::4, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, 0::1, _::2>>, _) do
{:ok, %{flags | end_headers: false, padded: true}}
end
def decode(%Flags{} = flags, <<_::4, 0::1, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, _::2>>, _) do
{:ok, %{flags | end_headers: true, padded: false}}
end
def decode(%Flags{} = flags, <<_::4, 0::1, 0::1, _::2>>, _) do
{:ok, %{flags | end_headers: false, padded: false}}
end
def decode(_flags, _data, _options), do: {:error, :decode_error}
def encode(%Flags{end_headers: true, padded: true}, _) do
{:ok, <<fc00:db20:35b:7399::5, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, 0::2>>}
end
def encode(%Flags{end_headers: true, padded: false}, _) do
{:ok, <<fc00:db20:35b:7399::5, 0::1, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, 0::2>>}
end
def encode(%Flags{end_headers: false, padded: true}, _) do
{:ok, <<fc00:db20:35b:7399::5, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, 0::1, 0::2>>}
end
def encode(%Flags{end_headers: false, padded: false}, _) do
{:ok, <<fc00:db20:35b:7399::5, 0::1, 0::1, 0::2>>}
end
def encode(_flags, _options), do: {:error, :encode_error}
end
end
defmodule Payload do
@moduledoc false
alias Ankh.HTTP2.Stream, as: HTTP2Stream
@type t :: %__MODULE__{
pad_length: non_neg_integer(),
promised_stream_id: HTTP2Stream.id(),
hbf: binary()
}
defstruct pad_length: 0, promised_stream_id: 0, hbf: []
defimpl Ankh.HTTP2.Frame.Encodable do
alias Ankh.HTTP2.Frame.PushPromise.{Flags, Payload}
def decode(
%Payload{} = payload,
<<pl::8, _::1, psi::31, data::binary>>,
flags: %Flags{padded: true}
) do
{:ok,
%{
payload
| pad_length: pl,
promised_stream_id: psi,
hbf: binary_part(data, 0, byte_size(data) - pl)
}}
end
def decode(
%Payload{} = payload,
<<_::8, _::1, psi::31, hbf::binary>>,
flags: %Flags{padded: false}
) do
{:ok, %{payload | promised_stream_id: psi, hbf: hbf}}
end
def decode(_payload, _data, _options), do: {:error, :decode_error}
def encode(
%Payload{pad_length: pad_length, promised_stream_id: psi, hbf: hbf},
flags: %Flags{padded: true}
) do
{:ok, [<<pad_length::8, 0::1, psi::31>>, hbf, :binary.copy(<<0>>, pad_length)]}
end
def encode(%Payload{promised_stream_id: psi, hbf: hbf}, flags: %Flags{padded: false}) do
{:ok, [<<0::1, psi::31>>, hbf]}
end
def encode(_payload, _options), do: {:error, :encode_error}
end
end
use Ankh.HTTP2.Frame, type: 0x5, flags: Flags, payload: Payload
end
|
lib/ankh/http2/frame/push_promise.ex
| 0.781331
| 0.434701
|
push_promise.ex
|
starcoder
|
defmodule ParentTeen do
@moduledoc """
Parent asks the teen if the baby is okay. Teen can't answer until the baby
says its okay. Baby takes a couple of seconds. How does the teen handle this?
Teen answers `handle_call` with `:noreply` but keeps the `from` in `state`.
When teen gets the `handle_cast` from the baby, it does a `reply` to the
`from` in its state.
If the baby takes > 5 sec, then the call from parent to teen will time out.
But this provides a way for the teen to stop executing the handle_call while
it's waiting for the baby to buzz in.
```
iex(1)> ParentTeen.go
Goo goo?
Is the baby okay?
Tell you in a minute.
Gah gah :)
Yes, the baby's fine.
:ok
```
"""
def go do
{:ok, teen} = Teen.start_link
{:ok, baby} = GenServer.start_link(Baby, :ok)
{:ok, parent} = GenServer.start_link(Parent, teen)
GenServer.cast(baby, {:say_youre_fine_in_a_couple_seconds, teen})
IO.puts GenServer.call(parent, :is_the_baby_okay)
end
end
defmodule Parent do
use GenServer
def handle_call(:is_the_baby_okay, _from, state) do
IO.puts "Is the baby okay?"
{:reply, GenServer.call(state, :is_the_baby_okay), state}
end
end
defmodule Teen do
use GenServer
def start_link,
do: GenServer.start_link(__MODULE__, %{baby_good: false, parent_from: nil})
def handle_call(:is_the_baby_okay, _from, %{baby_good: true} = state) do
{:reply, "yes", state}
end
def handle_call(:is_the_baby_okay, from, _state) do
IO.puts "Tell you in a minute."
{:noreply, %{parent_from: from, baby_good: false}}
end
def handle_cast(:im_fine, %{parent_from: nil}),
do: {:noreply, %{baby_good: true, parent_from: nil}}
def handle_cast(:im_fine, %{parent_from: from}) do
GenServer.reply(from, "Yes, the baby's fine.")
{:noreply, %{parent_from: nil, baby_good: true}}
end
end
defmodule Baby do
use GenServer
def handle_cast({:say_youre_fine_in_a_couple_seconds, teen}, state) do
IO.puts "Goo goo?"
:timer.sleep 2000
IO.puts "Gah gah :)"
GenServer.cast(teen, :im_fine)
{:noreply, state}
end
end
|
lib/parent_teen.ex
| 0.59302
| 0.880077
|
parent_teen.ex
|
starcoder
|
defmodule Day21 do
def part1(rules, string) do
Parser.parse(rules)
|> Enum.reduce(string, &execute/2)
end
def part2(rules, string) do
Parser.parse(rules)
|> Enum.reverse
|> Enum.map(&reverse_command/1)
|> Enum.reduce(string, &execute/2)
end
defp execute(command, string) do
case command do
{:move, pos1, pos2} ->
{prefix, rest} = String.split_at(string, pos1)
{char, suffix} = String.split_at(rest, 1)
string = prefix <> suffix
{prefix, suffix} = String.split_at(string, pos2)
prefix <> char <> suffix
{:reverse, pos1, pos2} ->
{prefix, rest} = String.split_at(string, pos1)
{subject, suffix} = String.split_at(rest, pos2 - pos1 + 1)
prefix <> String.reverse(subject) <> suffix
{:rotate, :left, amount} ->
rotate_left(string, amount)
{:rotate, :right, amount} ->
rotate_right(string, amount)
{:rotate_based, letter} ->
rotate_based(string, letter)
{:rotate_based_reversed, letter} ->
rotate_based_reversed(string, letter, string)
{:swap_position, pos1, pos2} ->
char1 = String.at(string, pos1)
char2 = String.at(string, pos2)
string
|> string_put(pos1, char2)
|> string_put(pos2, char1)
{:swap_letter, letter1, letter2} ->
string
|> String.replace(letter1, "\0")
|> String.replace(letter2, letter1)
|> String.replace("\0", letter2)
end
end
defp rotate_based_reversed(string, letter, goal) do
case rotate_based(string, letter) do
^goal -> string
_ -> rotate_based_reversed(rotate_left(string, 1), letter, goal)
end
end
defp rotate_based(string, letter) do
[prefix, _] = String.split(string, letter)
index = byte_size(prefix)
amount = if index >= 4, do: index + 2, else: index + 1
rotate_right(string, amount)
end
defp rotate_left(string, amount) do
amount = rem(amount, byte_size(string))
{prefix, suffix} = String.split_at(string, amount)
suffix <> prefix
end
defp rotate_right(string, amount) do
amount = rem(amount, byte_size(string))
{prefix, suffix} = String.split_at(string, -amount)
suffix <> prefix
end
defp string_put(string, position, char) do
<<prefix::binary-size(position), _::size(8), suffix::binary>> = string
prefix <> char <> suffix
end
defp reverse_command(command) do
case command do
{:move, pos1, pos2} ->
{:move, pos2, pos1}
{:rotate, dir, amount} ->
dir = case dir do
:left -> :right
:right -> :left
end
{:rotate, dir, amount}
{:rotate_based, letter} ->
{:rotate_based_reversed, letter}
_ ->
command
end
end
end
defmodule Parser do
import NimbleParsec
defp pack(list, tag) do
List.to_tuple([tag | list])
end
defp to_atom([word]), do: String.to_atom(word)
blank = ignore(optional(ascii_char([?\s])))
letter = ascii_string([?a..?z], min: 1)
swap_position = ignore(string("swap position "))
|> integer(min: 1)
|> ignore(string(" with position "))
|> integer(min: 1)
|> reduce({:pack, [:swap_position]})
swap_letter = ignore(string("swap letter "))
|> concat(letter)
|> ignore(string(" with letter "))
|> concat(letter)
|> reduce({:pack, [:swap_letter]})
reverse = ignore(string("reverse positions "))
|> integer(min: 1)
|> ignore(string(" through "))
|> integer(min: 1)
|> reduce({:pack, [:reverse]})
side = choice([string("left"), string("right")])
|> reduce({:to_atom, []})
rotate_side = ignore(string("rotate "))
|> concat(side)
|> concat(blank)
|> integer(min: 1)
|> ignore(string(" step") |> optional(string("s")))
|> reduce({:pack, [:rotate]})
rotate_based = ignore(string("rotate based on position of letter "))
|> concat(letter)
|> unwrap_and_tag(:rotate_based)
move = ignore(string("move position "))
|> integer(min: 1)
|> ignore(string(" to position "))
|> integer(min: 1)
|> reduce({:pack, [:move]})
defparsec :command, choice([swap_letter, swap_position, reverse,
rotate_side, rotate_based, move])
def parse(input) do
Enum.map(input, fn line ->
{:ok, [result], "", _, _, _} = command(line)
result
end)
end
end
|
day21/lib/day21.ex
| 0.576304
| 0.478773
|
day21.ex
|
starcoder
|
defmodule Siariwyd.Normalise do
@moduledoc false
require Logger
defp maybe_ast_realise(value, opts \\ [])
defp maybe_ast_realise(value, _opts)
when is_atom(value)
or is_bitstring(value)
or is_boolean(value)
or is_function(value)
or is_map(value)
or is_number(value)
or is_pid(value)
or is_port(value)
or is_reference(value) do
value
end
# quoted module attribute - leave alone
defp maybe_ast_realise({:@, _, [{attr_name, _, _}]} = value, _opts) when is_atom(attr_name) do
value
end
# list with maybe quoted elements
defp maybe_ast_realise(value, _opts) when is_list(value) do
value |> Enum.map(fn v -> v |> maybe_ast_realise end)
end
# quoted map
defp maybe_ast_realise({:%{}, _, args} = _value, _opts) do
args
|> Stream.map(fn {k,v} ->
{k |> maybe_ast_realise, v |> maybe_ast_realise}
end)
|> Enum.into(%{})
end
# quoted tuple
defp maybe_ast_realise({:{}, _, args} = _value, _opts) do
args
|> Enum.map(fn v -> v |> maybe_ast_realise end)
|> List.to_tuple
end
defp maybe_ast_realise({_, _, _} = value, _opts) do
case value |> Macro.validate do
:ok -> value |> Code.eval_quoted([], __ENV__) |> elem(0)
_ -> value
end
end
# default
defp maybe_ast_realise(value, _opts) do
value
end
def maybe_ast_realise_fun(value)
def maybe_ast_realise_fun(nil) do
nil
end
def maybe_ast_realise_fun(value) when is_tuple(value) do
value
|> maybe_ast_realise
|> maybe_ast_realise_fun
end
def maybe_ast_realise_fun(value) when is_function(value) do
value
end
def maybe_ast_realise_fun!(value)
def maybe_ast_realise_fun!(value) when is_function(value) do
value
end
def maybe_ast_realise_fun!(value) do
message = "maybe_ast_realise_fun!: value not a function #{inspect value}"
Logger.error message
raise ArgumentError, message: message
end
def maybe_ast_realise_module(value)
def maybe_ast_realise_module(nil) do
nil
end
def maybe_ast_realise_module(value) when is_tuple(value) do
value
|> maybe_ast_realise
|> maybe_ast_realise_module
end
def maybe_ast_realise_module(value) when is_atom(value) do
value
end
def maybe_ast_realise_module!(value)
def maybe_ast_realise_module!(value) when is_atom(value) do
value
end
def maybe_ast_realise_module!(value) do
message = "maybe_ast_realise_module!: value not a module #{inspect value}"
Logger.error message
raise ArgumentError, message: message
end
def normalise_function_names(names) do
names
|> List.wrap
|> Enum.reject(&is_nil/1)
end
def validate_function_names!(names) do
names = names |> normalise_function_names
true = names |> Enum.all?(fn name -> is_atom(name) end)
names
end
end
|
lib/siariwyd/normalise.ex
| 0.64131
| 0.420332
|
normalise.ex
|
starcoder
|
defmodule Crontab.DateHelper do
@moduledoc false
@type unit :: :year | :month | :day | :hour | :minute | :second | :microsecond
@units [
{:year, {nil, nil}},
{:month, {1, 12}},
{:day, {1, :end_onf_month}},
{:hour, {0, 23}},
{:minute, {0, 59}},
{:second, {0, 59}},
{:microsecond, {{0, 0}, {999_999, 6}}}
]
@doc """
Get Start of a period of a date.
## Examples
iex> Crontab.DateHelper.beginning_of(~N[2016-03-14 01:45:45.123], :year)
~N[2016-01-01 00:00:00]
"""
@spec beginning_of(NaiveDateTime.t(), unit) :: NaiveDateTime.t()
def beginning_of(date, unit) do
_beginning_of(date, proceeding_units(unit))
end
@doc """
Get the end of a period of a date.
## Examples
iex> Crontab.DateHelper.end_of(~N[2016-03-14 01:45:45.123], :year)
~N[2016-12-31 23:59:59.999999]
"""
@spec end_of(NaiveDateTime.t(), unit) :: NaiveDateTime.t()
def end_of(date, unit) do
_end_of(date, proceeding_units(unit))
end
@doc """
Find the last occurrence of weekday in month.
"""
@spec last_weekday(NaiveDateTime.t(), Calendar.day_of_week()) :: Calendar.day()
def last_weekday(date, weekday) do
date
|> end_of(:month)
|> last_weekday(weekday, :end)
end
@doc """
Find the nth weekday of month.
"""
@spec nth_weekday(NaiveDateTime.t(), Calendar.day_of_week(), integer) :: Calendar.day()
def nth_weekday(date, weekday, n) do
date
|> beginning_of(:month)
|> nth_weekday(weekday, n, :start)
end
@doc """
Find the last occurrence of weekday in month.
"""
@spec last_weekday_of_month(NaiveDateTime.t()) :: Calendar.day()
def last_weekday_of_month(date) do
last_weekday_of_month(end_of(date, :month), :end)
end
@doc """
Find the next occurrence of weekday relative to date.
"""
@spec next_weekday_to(NaiveDateTime.t()) :: Calendar.day()
def next_weekday_to(date = %NaiveDateTime{year: year, month: month, day: day}) do
weekday = :calendar.day_of_the_week(year, month, day)
next_day = NaiveDateTime.add(date, 86_400, :second)
previous_day = NaiveDateTime.add(date, -86_400, :second)
cond do
weekday == 7 && next_day.month == date.month -> next_day.day
weekday == 7 -> NaiveDateTime.add(date, -86_400 * 2, :second).day
weekday == 6 && previous_day.month == date.month -> previous_day.day
weekday == 6 -> NaiveDateTime.add(date, 86_400 * 2, :second).day
true -> date.day
end
end
@spec inc_year(NaiveDateTime.t()) :: NaiveDateTime.t()
def inc_year(date) do
leap_year? =
date
|> NaiveDateTime.to_date()
|> Date.leap_year?()
if leap_year? do
NaiveDateTime.add(date, 366 * 86_400, :second)
else
NaiveDateTime.add(date, 365 * 86_400, :second)
end
end
@spec dec_year(NaiveDateTime.t()) :: NaiveDateTime.t()
def dec_year(date) do
leap_year? =
date
|> NaiveDateTime.to_date()
|> Date.leap_year?()
if leap_year? do
NaiveDateTime.add(date, -366 * 86_400, :second)
else
NaiveDateTime.add(date, -365 * 86_400, :second)
end
end
@spec inc_month(NaiveDateTime.t()) :: NaiveDateTime.t()
def inc_month(date = %NaiveDateTime{day: day}) do
days =
date
|> NaiveDateTime.to_date()
|> Date.days_in_month()
NaiveDateTime.add(date, (days + 1 - day) * 86_400, :second)
end
@spec dec_month(NaiveDateTime.t()) :: NaiveDateTime.t()
def dec_month(date) do
days =
date
|> NaiveDateTime.to_date()
|> Date.days_in_month()
NaiveDateTime.add(date, days * -86_400, :second)
end
@spec _beginning_of(NaiveDateTime.t(), [{unit, {any, any}}]) :: NaiveDateTime.t()
defp _beginning_of(date, [{unit, {lower, _}} | tail]) do
_beginning_of(Map.put(date, unit, lower), tail)
end
defp _beginning_of(date, []), do: date
@spec _end_of(NaiveDateTime.t(), [{unit, {any, any}}]) :: NaiveDateTime.t()
defp _end_of(date, [{unit, {_, :end_onf_month}} | tail]) do
upper =
date
|> NaiveDateTime.to_date()
|> Date.days_in_month()
_end_of(Map.put(date, unit, upper), tail)
end
defp _end_of(date, [{unit, {_, upper}} | tail]) do
_end_of(Map.put(date, unit, upper), tail)
end
defp _end_of(date, []), do: date
@spec proceeding_units(unit) :: [{unit, {any, any}}]
defp proceeding_units(unit) do
[_ | units] =
@units
|> Enum.reduce([], fn {key, value}, acc ->
cond do
Enum.count(acc) > 0 ->
Enum.concat(acc, [{key, value}])
key == unit ->
[{key, value}]
true ->
[]
end
end)
units
end
@spec nth_weekday(NaiveDateTime.t(), Calendar.day_of_week(), :start) :: boolean
defp nth_weekday(date = %NaiveDateTime{}, _, 0, :start),
do: NaiveDateTime.add(date, -86_400, :second).day
defp nth_weekday(date = %NaiveDateTime{year: year, month: month, day: day}, weekday, n, :start) do
if :calendar.day_of_the_week(year, month, day) == weekday do
nth_weekday(NaiveDateTime.add(date, 86_400, :second), weekday, n - 1, :start)
else
nth_weekday(NaiveDateTime.add(date, 86_400, :second), weekday, n, :start)
end
end
@spec last_weekday_of_month(NaiveDateTime.t(), :end) :: Calendar.day()
defp last_weekday_of_month(date = %NaiveDateTime{year: year, month: month, day: day}, :end) do
weekday = :calendar.day_of_the_week(year, month, day)
if weekday > 5 do
last_weekday_of_month(NaiveDateTime.add(date, -86_400, :second), :end)
else
day
end
end
@spec last_weekday(NaiveDateTime.t(), non_neg_integer, :end) :: Calendar.day()
defp last_weekday(date = %NaiveDateTime{year: year, month: month, day: day}, weekday, :end) do
if :calendar.day_of_the_week(year, month, day) == weekday do
day
else
last_weekday(NaiveDateTime.add(date, -86_400, :second), weekday, :end)
end
end
end
|
lib/crontab/date_helper.ex
| 0.911407
| 0.628407
|
date_helper.ex
|
starcoder
|
defmodule Utils.Slide do
def case do
[
"
We create a case statement.
```elixir
case \"snowy\" do
\"sunny\" -> \"wear a t-shirt\"
\"rainy\" -> \"wear a rain jacket\"
\"cold\" -> \"wear a sweater\"
\"snowy\" -> \"wear a thick coat\"
end
```
",
"
Check if snowy equals sunny.
```elixir
case \"snowy\" do
\"snowy\" === \"sunny\" -> \"wear a t-shirt\"
\"rainy\" -> \"wear a rain jacket\"
\"cold\" -> \"wear a sweater\"
\"snowy\" -> \"wear a thick coat\"
end
```
",
"
It's false, so check if snowy equals rainy.
```elixir
case \"snowy\" do
false -> \"wear a t-shirt\"
\"snowy\" === \"rainy\" -> \"wear a rain jacket\"
\"cold\" -> \"wear a sweater\"
\"snowy\" -> \"wear a thick coat\"
end
```
",
"
It's false, so check if snowy equals cold.
```elixir
case \"snowy\" do
false -> \"wear a t-shirt\"
false -> \"wear a rain jacket\"
\"snowy\" === \"cold\" -> \"wear a sweater\"
\"snowy\" -> \"wear a thick coat\"
end
```
",
"
It's false, so check if snowy equals snowy.
```elixir
case \"snowy\" do
false -> \"wear a t-shirt\"
false -> \"wear a rain jacket\"
false -> \"wear a sweater\"
\"snowy\" === \"snowy\" -> \"wear a thick coat\"
end
```
",
"
snowy equals snowy.
```elixir
case \"snowy\" do
false -> \"wear a t-shirt\"
false -> \"wear a rain jacket\"
false -> \"wear a sweater\"
true -> \"wear a thick coat\"
end
```
",
"
Return wear a thick coat.
```elixir
\"wear a thick coat\"
```
"
]
|> Enum.map(&Kino.Markdown.new/1)
|> Kino.Slide.new()
end
def cond do
[
"
Check if plant is dead.
```elixir
daylight = true
days_since_watered = 14
plant = \"healthy\"
cond do
plant === \"dead\" -> \"get a new plant\"
plant === \"wilting\" && !daylight -> \"use a UV light\"
plant === \"wilting\" && daylight -> \"put the plant in sunlight\"
days_since_watered >= 14 -> \"water the plant\"
end
```
",
"
`false`, so check if plant is wilting and it's dark.
```elixir
daylight = true
days_since_watered = 14
plant = \"healthy\"
cond do
false -> \"get a new plant\"
plant === \"wilting\" && !daylight -> \"use a UV light\"
plant === \"wilting\" && daylight -> \"put the plant in sunlight\"
days_since_watered >= 14 -> \"water the plant\"
end
```
",
"
`false`, so check if plant is wilting and it's sunny.
```elixir
daylight = true
days_since_watered = 14
plant = \"healthy\"
cond do
false -> \"get a new plant\"
false -> \"use a UV light\"
plant === \"wilting\" && daylight -> \"put the plant in sunlight\"
days_since_watered >= 14 -> \"water the plant\"
end
```
",
"
`false`, so check if days_since_watered is >= 14.
```elixir
daylight = true
days_since_watered = 14
plant = \"healthy\"
cond do
false -> \"get a new plant\"
false -> \"use a UV light\"
false -> \"put the plant in sunlight\"
days_since_watered >= 14 -> \"water the plant\"
end
```
",
"
`true`! days_since_watered is >= 14.
```elixir
daylight = true
days_since_watered = 14
plant = \"healthy\"
cond do
false -> \"get a new plant\"
false -> \"use a UV light\"
false -> \"put the plant in sunlight\"
true -> \"water the plant\"
end
```
",
"
Water the plant.
```elixir
\"water the plant\"
```
"
]
|> Enum.map(&Kino.Markdown.new/1)
|> Kino.Slide.new()
end
def functions do
[
"
First, we define the `double` function and call it.
```elixir
double = fn number -> number * 2 end
double.(3)
```
",
"
The `double` function executes in place of the `double.(call)` with `number` bound to `3`.
```elixir
double = fn number -> number * 2 end
fn 3 -> 3 * 2 end
```
",
"
The function evaluates the function body between the `->` and the `end`
```elixir
double = fn number -> number * 2 end
3 * 2
```
",
"
`3` * `2` is `6`, so the function call returns `6`.
```elixir
double = fn number -> number * 2 end
6
```
"
]
|> Enum.map(&Kino.Markdown.new/1)
|> Kino.Slide.new()
end
def recursion do
[
"
The base case would return the accumulator when the list is empty.
```elixir
defmodule Recursion do
def sum([], accumulator), do: accumulator
end
```
",
"
Otherwise, we'll add the head to an accumulator and recurse on the tail of the list.
```elixir
defmodule Recursion do
def sum([], accumulator), do: accumulator
def sum([head | tail], accumulator do
sum(tail, accumulator + head)
end
end
Recursion.sum([4, 5, 6], 0)
```
",
"
The `sum/2` function is called with the list `[4, 5, 6]`.
```elixir
defmodule Recursion do
def sum([], accumulator), do: accumulator
def sum([4 | [5, 6]], 0) do
sum([5, 6], 4 + 0)
end
end
Recursion.sum([4, 5, 6], 0)
```
",
"
The `sum/2` function is called again on the tail of the list `[5, 6]`.
```elixir
defmodule Recursion do
def sum([], accumulator), do: accumulator
def sum([5 | [6]], 4) do
sum([6], 5 + 4)
end
end
Recursion.sum([4, 5, 6], 0)
```
",
"
The `sum/2` function is called again on the tail of the list `[6]`.
```elixir
defmodule Recursion do
def sum([], accumulator), do: accumulator
def sum([6 | []], 9) do
sum([], 6 + 9)
end
end
Recursion.sum([4, 5, 6], 0)
```
",
"
The `sum/2` function is called again on the tail of the list `[]`. This triggers the base case to return the accumulator.
```elixir
defmodule Recursion do
def sum([], 15), do: 15
def sum([head | tail], accumulator) do
sum(tail, accumulator + head)
end
end
Recursion.sum([4, 5, 6], 0)
```
",
"
And our function returns `15`.
```elixir
defmodule Recursion do
def sum([], accumulator), do: accumulator
def sum([head | tail], accumulator) do
sum(tail, accumulator + head)
end
end
15
```
"
]
|> Enum.map(&Kino.Markdown.new/1)
|> Kino.Slide.new()
end
def reduce do
[
"
First, we define the call the `Enum.reduce/2` function with a list, and a function.
```elixir
Enum.reduce([1, 2, 3, 4], fn integer, accumulator -> integer + accumulator end)
```
",
"
The first element in the list `1` is the initial accumulator value.
```elixir
Enum.reduce([2, 3, 4], fn integer, 1 -> integer + 1 end)
```
",
"
The function is called on the next element `2`. The next accumulator is 2 + 1
```elixir
Enum.reduce([3, 4], fn 2, 1 -> 2 + 1 end)
```
",
"
The function is called on the next element `3`. The next accumulator is 3 + 3
```elixir
Enum.reduce([4], fn 3, 3 -> 3 + 3 end)
```
",
"
The function is called on the next element `4`. The next accumulator is 4 + 6
```elixir
Enum.reduce([], fn 4, 6 -> 4 + 6 end)
```
",
"
4 + 6 equals 10.
```elixir
Enum.reduce([], fn 4, 6 -> 10 end)
```
",
"
`10` is the last accumulator value, so `Enum.reduce/2` returns `10`.
```elixir
10
```
"
]
|> Enum.map(&Kino.Markdown.new/1)
|> Kino.Slide.new()
end
end
|
utils/lib/slide.ex
| 0.516108
| 0.875255
|
slide.ex
|
starcoder
|
defmodule AssertHTML.DSL do
@moduledoc ~S"""
Add aditional syntax to passing current context inside block
### Example: pass context
```
assert_html html, ".container" do
assert_html "form", action: "/users" do
refute_html ".flash_message"
assert_html ".control_group" do
assert_html "label", class: "title", text: ~r{<NAME>}
assert_html "input", class: "control", type: "text"
end
assert_html("a", text: "Submit", class: "button")
end
assert_html ".user_list" do
assert_html "li"
end
end
```
## Example 2: print current context for debug
```
assert_html(html, ".selector") do
IO.inspect(assert_html, label: "current context html")
end
```
"""
alias AssertHTML, as: HTML
alias AssertHTML.Debug
defmacro assert_html(context, selector \\ nil, attributes \\ nil, maybe_do_block \\ nil) do
Debug.log(context: context, selector: selector, attributes: attributes, maybe_do_block: maybe_do_block)
{args, block} = extract_block([context, selector, attributes], maybe_do_block)
call_html_method(:assert, args, block)
|> Debug.log_dsl()
end
defmacro refute_html(context, selector \\ nil, attributes \\ nil, maybe_do_block \\ nil) do
Debug.log(context: context, selector: selector, attributes: attributes, maybe_do_block: maybe_do_block)
{args, block} = extract_block([context, selector, attributes], maybe_do_block)
call_html_method(:refute, args, block)
|> Debug.log_dsl()
end
defp call_html_method(matcher, args, block \\ nil)
defp call_html_method(:assert, args, nil) do
quote do
HTML.assert_html(unquote_splicing(args))
end
end
defp call_html_method(:refute, args, nil) do
quote do
HTML.refute_html(unquote_splicing(args))
end
end
defp call_html_method(matcher, args, block) do
block_arg =
quote do
fn unquote(context_var()) ->
unquote(Macro.prewalk(block, &postwalk/1))
end
end
call_html_method(matcher, args ++ [block_arg])
end
# found do: block if exists
defp extract_block(args, do: do_block) do
{args, do_block}
end
defp extract_block(args, _maybe_block) do
args
|> Enum.reverse()
|> Enum.reduce({[], nil}, fn
arg, {args, block} when is_list(arg) ->
{maybe_block, updated_arg} = Keyword.pop(arg, :do)
{
(updated_arg == [] && args) || [updated_arg | args],
block || maybe_block
}
nil, {args, block} ->
{args, block}
arg, {args, block} ->
{[arg | args], block}
end)
end
# replace assert_html without arguments to context
defp postwalk({:assert_html, env, nil}) do
context_var(env)
end
defp postwalk({:assert_html, env, arguments}) do
context = context_var(env)
{args, block} = extract_block([context | arguments], nil)
call_html_method(:assert, args, block)
end
# replace refute_html without arguments to context
defp postwalk({:refute_html, env, nil}) do
context_var(env)
end
defp postwalk({:refute_html, env, arguments}) do
context = context_var(env)
{args, block} = extract_block([context | arguments], nil)
call_html_method(:refute, args, block)
end
defp postwalk(segment) do
segment
end
defp context_var(env \\ []) do
{:assert_html_context, env, nil}
end
end
|
lib/assert_html/dsl.ex
| 0.822653
| 0.716268
|
dsl.ex
|
starcoder
|
defmodule Oli.Interop.LegacySupport do
@resources [
"x-oli-workbook_page",
"x-oli-inline-assessment"
]
@supported [
"activity_link",
"link",
"sym",
"em",
"sub",
"sup",
"term",
"var",
"code",
"codeblock",
"p",
"ul",
"ol",
"dl",
"dd",
"dt",
"li",
"iframe",
"audio",
"youtube",
"table",
"th",
"td",
"tr",
"title",
"caption",
"materials",
"quote",
"image",
"example",
"section",
"wb:inline"
]
@converted [
"sym",
"materials",
"material",
"pullout",
"anchor",
"extra"
]
@unsupported [
"applet",
"director",
"flash",
"mathematica",
"panopto",
"unity",
"video",
"vimeo",
"custom"
]
@pending [
"labels",
"preconditions",
"supplements",
"unordered",
"schedule",
"include",
"progress_constraints",
"essay",
"introduction",
"conclusion",
"embed_activity",
"fill_in_the_blank",
"ordering",
"numeric",
"short_answer",
"image_hotspot",
"variable",
"page",
"pool_ref",
"pool",
"cite",
"foreign",
"ipa",
"bdo",
"formula",
"alternatives",
"alternative",
"composite",
"conjugation",
"dialog",
"definition",
"figure",
"inquiry",
"theorem",
"command",
"dependencies",
"activity",
"activity_report",
"multipanel",
"xref",
"pref:choose",
"pref:when",
"pref:otherwise",
"pref:if",
"pref:label",
"pref:value",
"bib:entry"
]
# The legacy resource types that Torus supports (beyond project and organization)
def resources, do: @resources
# The content elements that Torus provides direct support for.
def supported, do: @supported
# The content elements that Torus supports by converting these elements into
# elements that Torus does support. This conversion could be a mapping, or
# simply the removal of the element (leaving behind the content of the element)
def converted, do: @converted
# These are elements that Torus will never support, and that a Legacy course must
# replace or remove prior to conversion
def unsupported, do: @unsupported
# These are the elements that Torus plans to support in some way (either directly or
# via conversion) but that currently does not.
def pending, do: @pending
end
|
lib/oli/interop/legacy_support.ex
| 0.563378
| 0.48054
|
legacy_support.ex
|
starcoder
|
defmodule ToyRobot.Simulation do
alias ToyRobot.{Robot, Table, Simulation}
defstruct [:table, :robot]
@doc """
Returns next position of the robot.
## Examples
iex> alias ToyRobot.{Robot, Table, Simulation}
[ToyRobot.Robot, ToyRobot.Table, ToyRobot.Simulation]
iex> table = %Table{north_boundary: 5, east_boundary: 5}
%Table{north_boundary: 5, east_boundary: 5}
iex> simulation = %Simulation{
...> table: table,
...> robot: %Robot{north: 0, east: 0, facing: :north}
...> }
iex> Simulation.next_position(simulation)
%Robot{north: 1, east: 0, facing: :north}
"""
def next_position(%{robot: robot} = _simulation) do
Robot.move(robot)
end
@doc """
Move the robot one step forward in facing direction unless,
unless that position passes the boundaries of the table.
## Examples
### A valid movement
iex> alias ToyRobot.{Robot, Table, Simulation}
[ToyRobot.Robot, ToyRobot.Table, ToyRobot.Simulation]
iex> table = %Table{north_boundary: 4, east_boundary: 4}
%Table{north_boundary: 4, east_boundary: 4}
iex> simulation = %Simulation{table: table, robot: %Robot{north: 0, east: 0, facing: :north}}
%Simulation{table: table, robot: %Robot{north: 0, east: 0, facing: :north}}
iex> Simulation.move(simulation)
{:ok, %Simulation{table: table, robot: %Robot{north: 1, east: 0, facing: :north}}}
### An invalid movement
iex> alias ToyRobot.{Robot, Table, Simulation}
[ToyRobot.Robot, ToyRobot.Table, ToyRobot.Simulation]
iex> table = %Table{north_boundary: 4, east_boundary: 4}
%Table{north_boundary: 4, east_boundary: 4}
iex> simulation = %Simulation{table: table, robot: %Robot{north: 4, east: 0, facing: :north}}
%Simulation{table: table, robot: %Robot{north: 4, east: 0, facing: :north}}
iex> Simulation.move(simulation)
{:error, :at_table_boundary}
"""
def move(%{robot: robot, table: table} = simulation) do
moved_robot = Robot.move(robot)
if Table.valid_position?(table, moved_robot) do
{:ok, %{simulation | robot: moved_robot}}
else
{:error, :at_table_boundary}
end
end
@doc """
Return the robot's current position
## Examples
iex> alias ToyRobot.{Robot, Table, Simulation}
[ToyRobot.Robot, ToyRobot.Table, ToyRobot.Simulation]
iex> table = %Table{north_boundary: 4, east_boundary: 4}
%Table{north_boundary: 4, east_boundary: 4}
iex> simulation = %Simulation{table: table, robot: %Robot{north: 0, east: 0, facing: :north}}
%Simulation{table: table, robot: %Robot{north: 0, east: 0, facing: :north}}
iex> Simulation.report(simulation)
%Robot{north: 0, east: 0, facing: :north}
"""
def report(%Simulation{robot: robot}) do
robot
end
@doc """
Simulate robot placement.
## Examples
With valid position:
iex> alias ToyRobot.{Robot, Table, Simulation}
[ToyRobot.Robot, ToyRobot.Table, ToyRobot.Simulation]
iex> table = %Table{north_boundary: 4, east_boundary: 4}
%Table{north_boundary: 4, east_boundary: 4}
iex> Simulation.place(table, %{north: 0, east: 0, facing: :north})
{:ok, %Simulation{table: table, robot: %Robot{north: 0, east: 0, facing: :north}}}
With invalid position:
iex> alias ToyRobot.{Table, Simulation}
[ToyRobot.Table, ToyRobot.Simulation]
iex> table = %Table{north_boundary: 4, east_boundary: 4}
%Table{north_boundary: 4, east_boundary: 4}
iex> Simulation.place(table, %{north: 6, east: 0, facing: :north})
{:error, :invalid_position}
"""
def place(table, placement) do
if Table.valid_position?(table, placement) do
{:ok, %Simulation{table: table, robot: struct(Robot, placement)}}
else
{:error, :invalid_position}
end
end
@doc """
Turn the robot left
## Examples
iex> alias ToyRobot.{Robot, Table, Simulation}
[ToyRobot.Robot, ToyRobot.Table, ToyRobot.Simulation]
iex> table = %Table{north_boundary: 4, east_boundary: 4}
%Table{north_boundary: 4, east_boundary: 4}
iex> simulation = %Simulation{table: table, robot: %Robot{north: 0, east: 0, facing: :north}}
%Simulation{table: table, robot: %Robot{north: 0, east: 0, facing: :north}}
iex> Simulation.turn_left(simulation)
{:ok, %Simulation{table: table, robot: %Robot{north: 0, east: 0, facing: :west}}}
"""
def turn_left(%Simulation{robot: robot} = simulation) do
{:ok, %{simulation | robot: Robot.turn_left(robot)}}
end
@doc """
Turn the robot right
## Examples
iex> alias ToyRobot.{Robot, Table, Simulation}
[ToyRobot.Robot, ToyRobot.Table, ToyRobot.Simulation]
iex> table = %Table{north_boundary: 4, east_boundary: 4}
%Table{north_boundary: 4, east_boundary: 4}
iex> simulation = %Simulation{table: table, robot: %Robot{north: 0, east: 0, facing: :north}}
%Simulation{table: table, robot: %Robot{north: 0, east: 0, facing: :north}}
iex> Simulation.turn_right(simulation)
{:ok, %Simulation{table: table, robot: %Robot{north: 0, east: 0, facing: :east}}}
"""
def turn_right(%Simulation{robot: robot} = simulation) do
{:ok, %{simulation | robot: Robot.turn_right(robot)}}
end
@doc """
Turn the robot in the opposite direction
## Examples
iex> alias ToyRobot.{Robot, Table, Simulation}
[ToyRobot.Robot, ToyRobot.Table, ToyRobot.Simulation]
iex> table = %Table{north_boundary: 4, east_boundary: 4}
%Table{north_boundary: 4, east_boundary: 4}
iex> simulation = %Simulation{table: table, robot: %Robot{north: 0, east: 0, facing: :north}}
%Simulation{table: table, robot: %Robot{north: 0, east: 0, facing: :north}}
iex> Simulation.uturn(simulation)
{:ok, %Simulation{table: table, robot: %Robot{north: 0, east: 0, facing: :south}}}
"""
def uturn(%Simulation{robot: robot} = simulation) do
{:ok, %{simulation | robot: Robot.uturn(robot)}}
end
end
|
lib/toy_robot/simulation.ex
| 0.913953
| 0.770465
|
simulation.ex
|
starcoder
|
defmodule Cadet.Test.Seeds do
@moduledoc """
This module contains functions that seed more complex setups into the DB for tests.
"""
import Cadet.Factory
@doc """
This sets up the common assessments environment by inserting relevant entries into the DB.
Returns a map of the following format:
%{
accounts: %{
avenger: avenger,
group: group,
students: students,
admin: admin
},
users: %{
staff: avenger,
student: List.first(students),
admin: admin
},
assessments: %{
path: %{
assessment: assessment,
programming_questions: programming_questions,
mcq_questions: mcq_questions,
submissions: submissions,
programming_answers: programming_answers,
mcq_answers: mcq_answers
},
mission: ...,
contest: ...,
sidequest: ...
}
}
"""
def assessments do
if Cadet.Env.env() == :test do
# Course
course1 = insert(:course)
course2 = insert(:course, %{course_name: "Algorithm", course_short_name: "CS2040S"})
# Users
avenger1 = insert(:user, %{name: "avenger", latest_viewed_course: course1})
admin1 = insert(:user, %{name: "admin", latest_viewed_course: course1})
studenta1admin2 = insert(:user, %{name: "student a", latest_viewed_course: course1})
studentb1 = insert(:user, %{latest_viewed_course: course1})
studentc1 = insert(:user, %{latest_viewed_course: course1})
# CourseRegistration and Group
avenger1_cr = insert(:course_registration, %{user: avenger1, course: course1, role: :staff})
admin1_cr = insert(:course_registration, %{user: admin1, course: course1, role: :admin})
group = insert(:group, %{leader: avenger1_cr})
student1a_cr =
insert(:course_registration, %{
user: studenta1admin2,
course: course1,
role: :student,
group: group
})
student1b_cr =
insert(:course_registration, %{
user: studentb1,
course: course1,
role: :student,
group: group
})
student1c_cr =
insert(:course_registration, %{
user: studentc1,
course: course1,
role: :student,
group: group
})
students = [student1a_cr, student1b_cr, student1c_cr]
_admin2cr =
insert(:course_registration, %{user: studenta1admin2, course: course2, role: :admin})
assessment_configs = [
insert(:assessment_config, %{course: course1, order: 1, type: "mission"}),
insert(:assessment_config, %{course: course1, order: 2}),
insert(:assessment_config, %{
course: course1,
order: 3,
show_grading_summary: false,
is_manually_graded: false,
type: "path"
}),
insert(:assessment_config, %{course: course1, order: 4}),
insert(:assessment_config, %{
course: course1,
order: 5,
type: "practical"
})
]
# 1..5 |> Enum.map(&insert(:assessment_config, %{course: course1, order: &1}))
assessments =
assessment_configs
|> Enum.reduce(
%{},
fn config, acc ->
Map.put(acc, config.type, insert_assessments(config, students, course1))
end
)
%{
courses: %{
course1: course1,
course2: course2
},
course_regs: %{
avenger1_cr: avenger1_cr,
group: group,
students: students,
admin1_cr: admin1_cr
},
role_crs: %{
staff: avenger1_cr,
student: student1a_cr,
admin: admin1_cr
},
assessment_configs: assessment_configs,
assessments: assessments
}
end
end
defp insert_assessments(assessment_config, students, course) do
assessment =
insert(:assessment, %{course: course, config: assessment_config, is_published: true})
contest_assessment =
insert(:assessment, %{course: course, config: assessment_config, is_published: true})
programming_questions =
Enum.map(1..3, fn id ->
insert(:programming_question, %{
display_order: id,
assessment: assessment,
max_xp: 1000,
show_solution: assessment.config.type == "path"
})
end)
mcq_questions =
Enum.map(4..6, fn id ->
insert(:mcq_question, %{
display_order: id,
assessment: assessment,
max_xp: 500,
show_solution: assessment.config.type == "path"
})
end)
voting_questions =
Enum.map(7..9, fn id ->
insert(:voting_question, %{
display_order: id,
assessment: assessment,
max_xp: 100,
show_solution: assessment.config.type == "path",
question: build(:voting_question_content, contest_number: contest_assessment.number)
})
end)
submissions =
students
|> Enum.take(2)
|> Enum.map(&insert(:submission, %{assessment: assessment, student: &1}))
# Programming Answers
programming_answers =
Enum.map(submissions, fn submission ->
Enum.map(programming_questions, fn question ->
insert(:answer, %{
xp: 800,
question: question,
submission: submission,
answer: build(:programming_answer)
})
end)
end)
mcq_answers =
Enum.map(submissions, fn submission ->
Enum.map(mcq_questions, fn question ->
insert(:answer, %{
xp: 500,
question: question,
submission: submission,
answer: build(:mcq_answer)
})
end)
end)
voting_answers =
Enum.map(submissions, fn submission ->
Enum.map(voting_questions, fn question ->
insert(:answer, %{
xp: 100,
question: question,
submission: submission,
answer: build(:voting_answer)
})
end)
end)
%{
assessment: assessment,
programming_questions: programming_questions,
mcq_questions: mcq_questions,
voting_questions: voting_questions,
submissions: submissions,
programming_answers: programming_answers,
mcq_answers: mcq_answers,
voting_answers: voting_answers
}
end
end
|
test/support/seeds.ex
| 0.672869
| 0.426441
|
seeds.ex
|
starcoder
|
defmodule EXNN.Sensor do
@moduledoc """
_Sensor server metamodule to be used within your implementation_
#### Modules using EXNN.Sensor are turned into Sensor servers.
Sensor modules *MUST* implement either
a `sense/2` function emitting a tuple containing scalar impulses
of length compatible with the configured dimension,
or a `sync/2` function which returns sensor.
Both functions take (sensor, {origin, :sync}) as arguments.
A sensor has a forward(sensor, value) function available.
In case we want to change a sensor's state during sync, we
can override a `before_synch(state)` function in case we
don't overridde the sync function.
They share the underlying genome as state, which can
be merged with custom attributes and default values
passign a state option to the use macro.
A sensor receives or propagates a signal from the outside world
and broadcasts it to the neuron of the front layer.
## State Attributes
- id: primary id
- outs: neuron of the first layer
"""
defmacro __using__(options \\ []) do
caller = __CALLER__.module
quote location: :keep do
require Logger
use EXNN.NodeServer
defstruct unquote(options)
|> Keyword.get(:state, [])
|> Dict.merge([id: nil, outs: []])
@doc "#sense must be implemented in the sensor implementation"
def sync(sensor, metadata) do
sensor = before_sync(sensor)
forward(sensor, sense(sensor, metadata))
end
def sense(_state, _metadata) do
raise "NotImplementedError"
end
def forward(sensor, value) do
spread_value = format_impulse(sensor, value)
cast_out = fn(out_id) ->
EXNN.NodeServer.forward(out_id, spread_value, [{sensor.id, value}])
end
sensor.outs |> Enum.each(cast_out)
Logger.debug "[EXNN.Sensor] - fanned out #{inspect value} (#{inspect spread_value}) from #{sensor.id} to #{inspect sensor.outs}"
sensor
end
def before_sync(sensor), do: sensor
@doc "value must be an enumerable compatible with the
dimension of the sensor"
def format_impulse(sensor, tuple) do
sensor_id = sensor.id
iterator = fn(val, {list, index})->
step = {:"#{sensor_id}_#{index}", val}
{[step | list], index + 1}
end
{list, num} = tuple
|> Tuple.to_list
|> List.foldl({[], 1}, iterator)
list
end
defimpl EXNN.Connection do
def signal(sensor, :sync, metadata) do
unquote(caller).sync(sensor, metadata)
end
end
defoverridable [before_sync: 1, sync: 2, sense: 2]
end
end
end
|
lib/exnn/sensor.ex
| 0.892824
| 0.6306
|
sensor.ex
|
starcoder
|
defmodule Cldr.Time.Interval.Backend do
@moduledoc false
def define_time_interval_module(config) do
backend = config.backend
config = Macro.escape(config)
quote location: :keep, bind_quoted: [config: config, backend: backend] do
defmodule Time.Interval do
@moduledoc """
Interval formats allow for software to format intervals like "Jan 10-12, 2008" as a
shorter and more natural format than "Jan 10, 2008 - Jan 12, 2008". They are designed
to take a start and end date, time or datetime plus a formatting pattern
and use that information to produce a localized format.
See `Cldr.Interval.to_string/3` and `Cldr.Time.Interval.to_string/3`
"""
@doc """
Returns a string representing the formatted
interval formed by two times.
## Arguments
* `from` is any map that conforms to the
`Calendar.time` type.
* `to` is any map that conforms to the
`Calendar.time` type. `to` must occur
on or after `from`.
* `options` is a keyword list of options. The default is `[]`.
## Options
* `:format` is one of `:short`, `:medium` or `:long` or a
specific format type or a string representing of an interval
format. The default is `:medium`.
* `:style` supports dfferent formatting styles. The
alternatives are `:time`, `:zone`,
and `:flex`. The default is `:time`.
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct. The default is `#{backend}.get_locale/0`
* `number_system:` a number system into which the formatted date digits should
be transliterated
## Returns
* `{:ok, string}` or
* `{:error, {exception, reason}}`
## Notes
* For more information on interval format string
see `Cldr.Interval`.
* The available predefined formats that can be applied are the
keys of the map returned by `Cldr.DateTime.Format.interval_formats("en", :gregorian)`
where `"en"` can be replaced by any configured locale name and `:gregorian`
is the underlying `CLDR` calendar type.
* In the case where `from` and `to` are equal, a single
time is formatted instead of an interval
## Examples
iex> #{inspect(__MODULE__)}.to_string ~T[10:00:00], ~T[10:03:00], format: :short
{:ok, "10 – 10"}
iex> #{inspect(__MODULE__)}.to_string ~T[10:00:00], ~T[10:03:00], format: :medium
{:ok, "10:00 – 10:03"}
iex> #{inspect(__MODULE__)}.to_string ~T[10:00:00], ~T[10:03:00], format: :long
{:ok, "10:00 – 10:03"}
iex> #{inspect(__MODULE__)}.to_string ~T[10:00:00], ~T[10:03:00],
...> format: :long, style: :flex
{:ok, "10:00 – 10:03 in the morning"}
iex> #{inspect(__MODULE__)}.to_string ~U[2020-01-01 00:00:00.0Z], ~U[2020-01-01 10:00:00.0Z],
...> format: :long, style: :flex
{:ok, "12:00 – 10:00 in the morning"}
iex> #{inspect(__MODULE__)}.to_string ~U[2020-01-01 00:00:00.0Z], ~U[2020-01-01 10:00:00.0Z],
...> format: :long, style: :zone
{:ok, "00:00 – 10:00 Etc/UTC"}
iex> #{inspect(__MODULE__)}.to_string ~T[10:00:00], ~T[10:03:00],
...> format: :long, style: :flex, locale: "th"
{:ok, "10:00 – 10:03 ในตอนเช้า"}
"""
@spec to_string(Elixir.Calendar.time, Elixir.Calendar.time, Keyword.t) ::
{:ok, String.t} | {:error, {module, String.t}}
def to_string(from, to, options \\ []) do
locale = unquote(backend).get_locale
options = Keyword.put_new(options, :locale, locale)
Cldr.Time.Interval.to_string(from, to, unquote(backend), options)
end
@doc """
Returns a string representing the formatted
interval formed by two times or raises an
exception.
## Arguments
* `from` is any map that conforms to the
`Calendar.time` type.
* `to` is any map that conforms to the
`Calendar.time` type. `to` must occur
on or after `from`.
* `options` is a keyword list of options. The default is `[]`.
## Options
* `:format` is one of `:short`, `:medium` or `:long` or a
specific format type or a string representing of an interval
format. The default is `:medium`.
* `:style` supports dfferent formatting styles. The
alternatives are `:time`, `:zone`,
and `:flex`. The default is `:time`.
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct. The default is `#{backend}.get_locale/0`
* `number_system:` a number system into which the formatted date digits should
be transliterated
## Returns
* `string` or
* raises an exception
## Notes
* For more information on interval format string
see `Cldr.Interval`.
* The available predefined formats that can be applied are the
keys of the map returned by `Cldr.DateTime.Format.interval_formats("en", :gregorian)`
where `"en"` can be replaced by any configured locale name and `:gregorian`
is the underlying `CLDR` calendar type.
* In the case where `from` and `to` are equal, a single
time is formatted instead of an interval
## Examples
iex> #{inspect(__MODULE__)}.to_string! ~T[10:00:00], ~T[10:03:00], format: :short
"10 – 10"
iex> #{inspect(__MODULE__)}.to_string! ~T[10:00:00], ~T[10:03:00], format: :medium
"10:00 – 10:03"
iex> #{inspect(__MODULE__)}.to_string! ~T[10:00:00], ~T[10:03:00], format: :long
"10:00 – 10:03"
iex> #{inspect(__MODULE__)}.to_string! ~T[10:00:00], ~T[10:03:00],
...> format: :long, style: :flex
"10:00 – 10:03 in the morning"
iex> #{inspect(__MODULE__)}.to_string! ~U[2020-01-01 00:00:00.0Z], ~U[2020-01-01 10:00:00.0Z],
...> format: :long, style: :flex
"12:00 – 10:00 in the morning"
iex> #{inspect(__MODULE__)}.to_string! ~U[2020-01-01 00:00:00.0Z], ~U[2020-01-01 10:00:00.0Z],
...> format: :long, style: :zone
"00:00 – 10:00 Etc/UTC"
iex> #{inspect(__MODULE__)}.to_string! ~T[10:00:00], ~T[10:03:00],
...> format: :long, style: :flex, locale: "th"
"10:00 – 10:03 ในตอนเช้า"
"""
@spec to_string!(Elixir.Calendar.time, Elixir.Calendar.time, Keyword.t) ::
String.t | no_return()
def to_string!(from, to, options \\ []) do
locale = unquote(backend).get_locale
options = Keyword.put_new(options, :locale, locale)
Cldr.Time.Interval.to_string!(from, to, unquote(backend), options)
end
end
end
end
end
|
lib/cldr/backend/interval/time.ex
| 0.918911
| 0.642671
|
time.ex
|
starcoder
|
defmodule Geocoder.Providers.OpenCageData do
use HTTPoison.Base
use Towel
@endpoint "http://api.opencagedata.com/"
@path "geocode/v1/json"
def geocode(opts) do
request(@path, opts |> extract_opts())
|> fmap(&parse_geocode/1)
end
def geocode_list(opts) do
request_all(@path, opts |> extract_opts())
|> fmap(fn(r) -> Enum.map(r, &parse_geocode/1) end)
end
def reverse_geocode(opts) do
request(@path, opts |> extract_opts())
|> fmap(&parse_reverse_geocode/1)
end
def reverse_geocode_list(opts) do
request_all(@path, opts |> extract_opts())
|> fmap(fn(r) -> Enum.map(r, &parse_reverse_geocode/1) end)
end
defp extract_opts(opts) do
opts
|> Keyword.merge(opts)
|> Keyword.put(:q, case opts |> Keyword.take([:address, :latlng]) |> Keyword.values do
[{lat, lon}] -> "#{lat},#{lon}"
[query] -> query
_ -> nil
end)
|> Keyword.take([:q, :key, :bounds, :language, :add_request, :countrycode, :jsonp,
:limit, :min_confidence, :no_annotations, :no_dedupe, :pretty])
end
defp parse_geocode(response) do
coords = geocode_coords(response)
bounds = geocode_bounds(response)
location = geocode_location(response)
%{coords | bounds: bounds, location: location}
end
defp parse_reverse_geocode(response) do
coords = geocode_coords(response)
location = geocode_location(response)
%{coords | location: location}
end
defp geocode_coords(%{"geometry" => coords}) do
%{"lat" => lat, "lng" => lon} = coords
%Geocoder.Coords{lat: lat, lon: lon}
end
defp geocode_bounds(%{"bounds" => bounds}) do
%{"northeast" => %{"lat" => north, "lng" => east},
"southwest" => %{"lat" => south, "lng" => west}} = bounds
%Geocoder.Bounds{top: north, right: east, bottom: south, left: west}
end
defp geocode_bounds(_), do: %Geocoder.Bounds{}
@map %{
"house_number" => :street_number,
"road" => :street,
"city" => :city,
"state" => :state,
"county" => :county,
"postcode" => :postal_code,
"country" => :country,
"country_code" => :country_code
}
defp geocode_location(%{"components" => components, "formatted" => formatted_address}) do
reduce = fn {type, name}, location ->
Map.put(location, Map.get(@map, type), name)
end
location = %Geocoder.Location{formatted_address: formatted_address}
components
|> Enum.reduce(location, reduce)
|> Map.drop([nil])
end
defp request_all(path, params) do
httpoison_options = Application.get_env(:geocoder, Geocoder.Worker)[:httpoison_options] || []
get(path, [], Keyword.merge(httpoison_options, params: Enum.into(params, %{})))
|> fmap(&Map.get(&1, :body))
|> fmap(&Map.get(&1, "results"))
end
defp request(path, params) do
request_all(path, params)
|> fmap(&List.first/1)
end
defp process_url(url) do
@endpoint <> url
end
defp process_response_body(body) do
body |> Poison.decode!
end
end
|
lib/geocoder/providers/open_cage_data.ex
| 0.613237
| 0.512632
|
open_cage_data.ex
|
starcoder
|
defmodule Commanded.Aggregates.AggregateLifespan do
@moduledoc """
The `Commanded.Aggregates.AggregateLifespan` behaviour is used to control an
aggregate lifespan.
By default an aggregate instance process will run indefinitely once started.
You can control this by implementing the
`Commanded.Aggregates.AggregateLifespan` behaviour in a module and configuring
it in your router.
After a command successfully executes, and creates at least one domain event,
the `c:after_event/1` function is called passing the last created event. The
returned inactivity timeout value is used to shutdown the aggregate process if
no other messages are received.
## Supported return values
- Non-negative integer - specify an inactivity timeout, in millisconds.
- `:infinity` - prevent the aggregate instance from shutting down.
- `:hibernate` - send the process into hibernation.
- `:stop` - immediately shutdown the aggregate process.
### Hibernation
A hibernated process will continue its loop once a message is in its message
queue. Hibernating an aggregate causes garbage collection and minimises the
memory used by the process. Hibernating should not be used aggressively as too
much time could be spent garbage collecting.
## Example
Define a module that implements the `Commanded.Aggregates.AggregateLifespan`
behaviour:
defmodule BankAccountLifespan do
@behaviour Commanded.Aggregates.AggregateLifespan
def after_event(%BankAccountOpened{}), do: :infinity
def after_event(%MoneyDeposited{}), do: 60_000
def after_event(%BankAccountClosed{}), do: :stop
end
Then specify the module as the `lifespan` option when registering
the applicable commands in your router:
defmodule BankRouter do
use Commanded.Commands.Router
dispatch [OpenAccount, CloseAccount],
to: BankAccount,
lifespan: BankAccountLifespan,
identity: :account_number
end
"""
@doc """
Aggregate process will be stopped after specified inactivity timeout unless
`:infinity`, `:hibernate`, or `:stop` are returned.
"""
@callback after_event(event :: struct()) :: timeout() | :hibernate | :stop
end
|
lib/commanded/aggregates/aggregate_lifespan.ex
| 0.847179
| 0.548674
|
aggregate_lifespan.ex
|
starcoder
|
defmodule Wand.Test.Helpers.System do
import Mox
def stub_update_deps() do
message = "Resolving Hex dependencies"
expect(Wand.SystemMock, :cmd, fn "mix", ["deps.get"], _opts -> {message, 0} end)
end
def stub_failed_update_deps() do
message =
"Could not find a Mix.Project, please ensure you are running Mix in a directory with a mix.exs file"
expect(Wand.SystemMock, :cmd, fn "mix", ["deps.get"], _opts -> {message, 1} end)
end
def stub_cleanup_deps() do
expect(Wand.SystemMock, :cmd, fn "mix", ["deps.unlock", "--unused"], _opts -> {"", 0} end)
end
def stub_failed_cleanup_deps() do
message = "** (CompileError) mix.lock:2"
expect(Wand.SystemMock, :cmd, fn "mix", ["deps.unlock", "--unused"], _opts -> {message, 1} end)
end
def stub_get_deps() do
[
["earmark", "~> 1.2"],
["mox", "~> 0.3.2", [["only", ":test"]]],
["ex_doc", ">= 0.0.0", [["only", ":dev"]]]
]
|> stub_get_deps()
end
def stub_get_deps(deps) do
message = WandCore.Poison.encode!(deps)
expect(Wand.SystemMock, :cmd, fn "mix", ["wand_core.init"], _opts -> {message, 0} end)
end
def stub_failed_get_deps() do
expect(Wand.SystemMock, :cmd, fn "mix", ["wand_core.init"], _opts -> {"", 1} end)
end
def stub_get_bad_deps() do
message =
[["mox", "~> 0.3.2", [["only", "test"]]], ["oops", "ex_doc", ">= 0.0.0", [["only", "dev"]]]]
|> WandCore.Poison.encode!()
expect(Wand.SystemMock, :cmd, fn "mix", ["wand_core.init"], _opts -> {message, 0} end)
end
def stub_outdated() do
message = "A green version"
expect(Wand.SystemMock, :cmd, fn "mix", ["hex.outdated"], _opts -> {message, 0} end)
end
def stub_core_version(), do: stub_core_version(Wand.version())
def stub_core_version(version) do
message = "#{version}\n"
expect(Wand.SystemMock, :cmd, fn "mix", ["wand_core.version"], _opts -> {message, 0} end)
end
def stub_core_version_missing() do
message = "** (Mix) The task"
expect(Wand.SystemMock, :cmd, fn "mix", ["wand_core.version"], _opts -> {message, 1} end)
end
def stub_install_core() do
message = "Resolving Hex dependencies"
expect(Wand.SystemMock, :cmd, fn "mix",
["archive.install", "hex", "wand_core", "--force"],
_opts ->
{message, 0}
end)
end
def stub_failed_install_core() do
message = "Elixir.Mix.Local.Installer.Fetch"
expect(Wand.SystemMock, :cmd, fn "mix",
["archive.install", "hex", "wand_core", "--force"],
_opts ->
{message, 1}
end)
end
end
|
test/support/system.ex
| 0.660391
| 0.474509
|
system.ex
|
starcoder
|
defmodule OLEDVirtual.MultiDisplay do
@moduledoc """
Multi display to invoke several displays simultaneously.
It supports the same functions as `OLEDVirtual.Display` and `OLED.Display`.
The only difference is that the functions always return a list of `{display, result}` tuples,
where `display` is the display module like `MyApp.MyDisplay` and `result` is the function result.
The display functions are invoked simultaneously using `Task.async/1`, so they do not block each other.
When used, the multi display expects an `:app` as option.
The `:app` should be the app that has the configuration.
## Example
defmodule MyApp.MyMultiDisplay do
use OLEDVirtual.MultiDisplay, app: :my_app
end
Could be configured with:
config :my_app, MyApp.MyMultiDisplay,
displays: [
MyApp.OledVirtual,
MyAppFirmware.Oled,
]
And then used like this:
MyApp.MyMultiDisplay.rect(0, 0, 127, 63)
MyApp.MyMultiDisplay.display()
See `OLED.Display` for all draw and display functions.
## Telemetry Events
Each function call on each display emits a telemetry event.
- `[:oled_virtual, :multi_display, <function_name>]`
Where `<function_name>` is the invoked function name as an atom, e.g. `:display`
The event contains the following measurements:
- `:duration` - The duration of the function call in milliseconds
The event contains the following metadata:
- `:display` - The display module
- `:multi_display` - The multi display module
"""
defmacro __using__(opts) do
quote bind_quoted: [opts: opts, moduledoc: @moduledoc] do
@moduledoc moduledoc
|> String.replace(
~r/MyApp\.MyMultiDisplay/,
Enum.join(Module.split(__MODULE__), ".")
)
|> String.replace(~r/:my_app/, ":#{Atom.to_string(Keyword.fetch!(opts, :app))}")
@app Keyword.fetch!(opts, :app)
@me __MODULE__
def module_config(),
do: Application.get_env(@app, @me, [])
@spec display() :: [{module(), :ok}]
def display() do
execute(:display, [])
end
@spec display_frame(data :: binary(), opts :: OLED.Display.Server.display_frame_opts()) :: [
{module(), :ok}
]
def display_frame(data, opts \\ []) do
execute(:display_frame, [data, opts])
end
@spec display_raw_frame(data :: binary(), opts :: OLED.Display.Server.display_frame_opts()) ::
[{module(), :ok}]
def display_raw_frame(data, opts \\ []) do
execute(:display_raw_frame, [data, opts])
end
@spec clear() :: [{module(), :ok}]
def clear() do
execute(:clear, [])
end
@spec clear(pixel_state :: OLED.Display.Server.pixel_state()) :: [{module(), :ok}]
def clear(pixelstate) do
execute(:clear, [pixelstate])
end
@spec put_buffer(data :: binary()) :: [{module(), :ok | {:error, term()}}]
def put_buffer(data) do
execute(:put_buffer, [data])
end
@spec get_buffer() :: [{module(), {:ok, binary()}}]
def get_buffer() do
execute(:get_buffer, [])
end
@spec put_pixel(x :: integer(), y :: integer(), opts :: OLED.Display.Server.pixel_opts()) ::
[{module(), :ok}]
def put_pixel(x, y, opts \\ []) do
execute(:put_pixel, [x, y, opts])
end
@spec line(
x1 :: integer(),
y1 :: integer(),
x2 :: integer(),
y2 :: integer(),
opts :: OLED.Display.Server.pixel_opts()
) :: [{module(), :ok}]
def line(x1, y1, x2, y2, opts \\ []) do
execute(:line, [x1, y1, x2, y2, opts])
end
@spec line_h(
x :: integer(),
y :: integer(),
width :: integer(),
opts :: OLED.Display.Server.pixel_opts()
) :: [{module(), :ok}]
def line_h(x, y, width, opts \\ []) do
execute(:line_h, [x, y, width, opts])
end
@spec line_v(
x :: integer(),
y :: integer(),
height :: integer(),
opts :: OLED.Display.Server.pixel_opts()
) :: [{module(), :ok}]
def line_v(x, y, height, opts \\ []) do
execute(:line_v, [x, y, height, opts])
end
@spec rect(
x :: integer(),
y :: integer(),
width :: integer(),
height :: integer(),
opts :: OLED.Display.Server.pixel_opts()
) :: [{module(), :ok}]
def rect(x, y, width, height, opts \\ []) do
execute(:rect, [x, y, width, height, opts])
end
@spec circle(
x0 :: integer(),
y0 :: integer(),
r :: integer(),
opts :: OLED.Display.Server.pixel_opts()
) :: [{module(), :ok}]
def circle(x0, y0, r, opts \\ []) do
execute(:circle, [x0, y0, r, opts])
end
@spec fill_rect(
x :: integer(),
y :: integer(),
width :: integer(),
height :: integer(),
opts :: OLED.Display.Server.pixel_opts()
) :: [{module(), :ok}]
def fill_rect(x, y, width, height, opts \\ []) do
execute(:fill_rect, [x, y, width, height, opts])
end
@spec get_dimensions() :: [
{module(), {:ok, width :: integer(), height :: integer()} | {:error, term()}}
]
def get_dimensions() do
execute(:get_dimensions, [])
end
defp execute(function, opts) when is_atom(function) and is_list(opts) do
displays = Keyword.get(module_config(), :displays, [])
displays
|> Enum.map(fn display ->
Task.async(fn ->
start_time = :erlang.monotonic_time()
result = apply(display, function, opts)
end_time = :erlang.monotonic_time()
duration = (end_time - start_time) / 1_000_000
Task.start(fn ->
:telemetry.execute(
[:oled_virtual, :multi_display, function],
%{duration: duration},
%{display: display, multi_display: @me}
)
end)
{display, result}
end)
end)
|> Task.await_many()
end
end
end
end
|
lib/oled_virtual/multi_display.ex
| 0.909451
| 0.479138
|
multi_display.ex
|
starcoder
|
defmodule Type.Iolist do
@moduledoc false
# this module is a PRIVATE module to segregate the challenging
# iolist logic away from the Type module.
# note that the public methods here don't conform to any of the
# method schemata implemented by any of the other modules.
import Type, only: :macros
alias Type.{Bitstring, List}
@char 0..0x10FFFF
@binary %Bitstring{size: 0, unit: 8}
@ltype Type.union([@char, @binary, iolist()])
@final Type.union([], @binary)
# INTERSECTIONS
def intersection_with([]), do: []
def intersection_with(list) when is_list(list) do
Type.intersection(list, iolist())
end
def intersection_with(list = %List{}) do
# iolist is char | binary | iolist
type = [@char, @binary, iolist()]
|> Enum.map(&Type.intersection(&1, list.type))
|> Type.union
final = Type.intersection(list.final, @final)
if final == none() or type == none() do
none()
else
%List{type: type, final: final, nonempty: list.nonempty}
end
end
def intersection_with(_), do: none()
# COMPARISONS
def compare_list(list) do
case Type.compare(@ltype, list.type) do
:eq -> Type.compare(@final, list.final)
ordered -> ordered
end
end
def compare_list_inv(list) do
case compare_list(list) do
:gt -> :lt
:eq -> :eq
:lt -> :gt
end
end
# SUBTYPE
def subtype_of_iolist?(list) do
Type.subtype?(list.type, @ltype) and Type.subtype?(list.final, @final)
end
def supertype_of_iolist?(list) do
Type.subtype?(@ltype, list.type) and Type.subtype?(@final, list.final)
and not list.nonempty
end
alias Type.Message
# USABLE_AS
def usable_as_list(target = %List{nonempty: true}, meta) do
u1 = Type.usable_as(@ltype, target.type, meta)
u2 = Type.usable_as(@final, target.final, meta)
case Type.ternary_and(u1, u2) do
:ok -> {:maybe, [Message.make(iolist(), target, meta)]}
# TODO: make this report the internal error as well.
{:maybe, _} -> {:maybe, [Message.make(iolist(), target, meta)]}
{:error, _} -> {:error, Message.make(iolist(), target, meta)}
end
end
def usable_as_list(target, meta) do
case Type.usable_as(@ltype, target.type, meta) do
{:error, _} -> {:maybe, [Message.make(@ltype, target.type, meta)]}
any -> any
end
|> Type.ternary_and(Type.usable_as(@final, target.final, meta))
|> case do
:ok -> :ok
{:maybe, _} -> {:maybe, [Message.make(iolist(), target, meta)]}
{:error, _} -> {:error, Message.make(iolist(), target, meta)}
end
end
def usable_as_iolist(challenge = %{nonempty: nonempty}, meta) do
case Type.usable_as(challenge.type, @ltype, meta) do
{:error, _} when not nonempty ->
{:maybe, [Message.make(challenge.type, @ltype, meta)]}
any -> any
end
|> Type.ternary_and(Type.usable_as(challenge.final, @final, meta))
|> case do
:ok -> :ok
{:maybe, _} -> {:maybe, [Message.make(challenge, iolist(), meta)]}
{:error, _} -> {:error, Message.make(challenge, iolist(), meta)}
end
end
end
|
lib/type/iolist.ex
| 0.515864
| 0.479382
|
iolist.ex
|
starcoder
|
defmodule Train do
@moduledoc"""
Train a network to respond to the given scape.
"""
@doc"""
Default training on XOR scape. Generates genotype, then trains it x number of iterations.
"""
def genotype(iterations) do
geno = Genotype.create
network(geno, iterations)
end
def network(geno_initial, iterations) do
[{:error, error_initial}, {:training_size, training_size}] = Network.link_and_process(geno_initial)
if iterations !== 0 do
geno_perturbed = Genotype.update(geno_initial)
[{:error, error_perturbed}, {:training_size, _}] = Network.link_and_process(geno_perturbed)
IO.inspect [error_initial, error_perturbed], label: 'both errors'
best = case error_initial > error_perturbed do
true -> geno_perturbed
false -> geno_initial
end
network(best, iterations - 1)
else
IO.inspect error_initial, label: "Final error"
geno_initial
end
end
def network_average(geno_initial, iterations) do
initial_fitness = average_fitness(geno_initial)
if iterations !== 0 do
perturbed_geno = Genotype.update(geno_initial)
perturbed_fitness = average_fitness(perturbed_geno)
IO.inspect [initial_fitness, perturbed_fitness], label: 'both fitnesses'
best = case perturbed_fitness < initial_fitness do
true -> perturbed_geno
false -> geno_initial
end
network_average(best, iterations - 1)
else
IO.inspect initial_fitness, label: 'Final fitness'
end
end
@doc"""
Because the network/2 module often yields a fitness that is not better than the previous iteration, average_fitness/3
seeks to produce the most reliably fit genotype.
"""
def average_fitness(genotype) do
iterated_fitness = iterate(genotype, 5, [])
Enum.sum(iterated_fitness) / length(iterated_fitness)
end
def iterate(genotype, count, acc) do
case count == 0 do
false -> [{:error, error}, {:training_size, training_size}] = Network.link_and_process(genotype)
iterate(genotype, count - 1, [error | acc])
true -> acc
end
end
end
|
lib/train.ex
| 0.721841
| 0.519338
|
train.ex
|
starcoder
|
defmodule TypeCheck.TypeError.DefaultFormatter do
@behaviour TypeCheck.TypeError.Formatter
def format(problem_tuple, location \\ []) do
res =
do_format(problem_tuple)
|> indent() # Ensure we start with four spaces, which multi-line exception pretty-printing expects
|> indent()
location_string(location) <> res
|> String.trim()
end
defp location_string([]), do: ""
defp location_string(location) do
raw_file = location[:file]
line = location[:line]
file = String.replace_prefix(raw_file, File.cwd! <> "/", "")
"At #{file}:#{line}:\n"
end
@doc """
Transforms a `problem_tuple` into a humanly-readable explanation string.
C.f. `TypeCheck.TypeError.Formatter` for more information about problem tuples.
"""
@spec do_format(TypeCheck.TypeError.Formatter.problem_tuple()) :: String.t()
def do_format(problem_tuple)
def do_format({%TypeCheck.Builtin.Atom{}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` is not an atom."
end
def do_format({%TypeCheck.Builtin.Binary{}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` is not a binary."
end
def do_format({%TypeCheck.Builtin.Bitstring{}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` is not a bitstring."
end
def do_format({%TypeCheck.Builtin.SizedBitstring{}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` is not a bitstring."
end
def do_format({s = %TypeCheck.Builtin.SizedBitstring{}, :wrong_size, _, val}) do
cond do
s.unit_size == nil ->
"`#{inspect(val, inspect_value_opts())}` has a different bit_size (#{bit_size(val)}) than expected (#{s.prefix_size})."
s.prefix_size == 0 ->
"`#{inspect(val, inspect_value_opts())}` has a different bit_size (#{bit_size(val)}) than expected (_ * #{s.unit_size})."
true ->
"`#{inspect(val, inspect_value_opts())}` has a different bit_size (#{bit_size(val)}) than expected (#{s.prefix_size} + _ * #{s.unit_size})."
end
end
def do_format({%TypeCheck.Builtin.Boolean{}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` is not a boolean."
end
def do_format({s = %TypeCheck.Builtin.FixedList{}, :not_a_list, _, val}) do
problem = "`#{inspect(val, inspect_value_opts())}` is not a list."
compound_check(val, s, problem)
end
def do_format(
{s = %TypeCheck.Builtin.FixedList{}, :different_length,
%{expected_length: expected_length}, val}
) do
problem = "`#{inspect(val, inspect_value_opts())}` has #{length(val)} elements rather than #{expected_length}."
compound_check(val, s, problem)
end
def do_format(
{s = %TypeCheck.Builtin.FixedList{}, :element_error, %{problem: problem, index: index},
val}
) do
compound_check(val, s, "at index #{index}:\n", do_format(problem))
end
def do_format({s = %TypeCheck.Builtin.FixedMap{}, :not_a_map, _, val}) do
problem = "`#{inspect(val, inspect_value_opts())}` is not a map."
compound_check(val, s, problem)
end
def do_format({s = %TypeCheck.Builtin.FixedMap{}, :missing_keys, %{keys: keys}, val}) do
keys_str =
keys
|> Enum.map(&inspect/1)
|> Enum.join(", ")
problem = "`#{inspect(val, inspect_value_opts())}` is missing the following required key(s): `#{keys_str}`."
compound_check(val, s, problem)
end
def do_format(
{s = %TypeCheck.Builtin.FixedMap{}, :value_error, %{problem: problem, key: key}, val}
) do
compound_check(val, s, "under key `#{inspect(key, inspect_type_opts())}`:\n", do_format(problem))
end
def do_format({%TypeCheck.Builtin.Float{}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` is not a float."
end
def do_format({%TypeCheck.Builtin.Function{param_types: list}, :no_match, _, val}) when is_list(list) and is_function(val) do
{:arity, arity} = Function.info(val, :arity)
"`#{inspect(val, inspect_value_opts())}` (arity #{arity}) is not a function of arity `#{length(list)}`."
end
def do_format({%TypeCheck.Builtin.Function{}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` is not a function."
end
def do_format({s = %TypeCheck.Builtin.Guarded{}, :type_failed, %{problem: problem}, val}) do
compound_check(val, s, do_format(problem))
end
def do_format({s = %TypeCheck.Builtin.Guarded{}, :guard_failed, %{bindings: bindings}, val}) do
guard_str = Inspect.Algebra.format(Inspect.Algebra.color(Macro.to_string(s.guard), :builtin_type, struct(Inspect.Opts, inspect_type_opts())), 80)
problem = """
`#{guard_str}` evaluated to false or nil.
bound values: #{inspect(bindings, inspect_type_opts())}
"""
compound_check(val, s, "type guard:\n", problem)
end
def do_format({%TypeCheck.Builtin.Integer{}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` is not an integer."
end
def do_format({%TypeCheck.Builtin.PosInteger{}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` is not a positive integer."
end
def do_format({%TypeCheck.Builtin.NegInteger{}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` is not a negative integer."
end
def do_format({%TypeCheck.Builtin.NonNegInteger{}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` is not a non-negative integer."
end
def do_format({s = %TypeCheck.Builtin.List{}, :not_a_list, _, val}) do
compound_check(val, s, "`#{inspect(val, inspect_value_opts())}` is not a list.")
end
def do_format(
{s = %TypeCheck.Builtin.List{}, :element_error, %{problem: problem, index: index}, val}
) do
compound_check(val, s, "at index #{index}:\n", do_format(problem))
end
def do_format({%TypeCheck.Builtin.Literal{value: expected_value}, :not_same_value, %{}, val}) do
"`#{inspect(val, inspect_value_opts())}` is not the same value as `#{inspect(expected_value, inspect_type_opts())}`."
end
def do_format({s = %TypeCheck.Builtin.Map{}, :not_a_map, _, val}) do
compound_check(val, s, "`#{inspect(val, inspect_value_opts())}` is not a map.")
end
def do_format({s = %TypeCheck.Builtin.Map{}, :key_error, %{problem: problem}, val}) do
compound_check(val, s, "key error:\n", do_format(problem))
end
def do_format({s = %TypeCheck.Builtin.Map{}, :value_error, %{problem: problem, key: key}, val}) do
compound_check(val, s, "under key `#{inspect(key, inspect_type_opts())}`:\n", do_format(problem))
end
def do_format({s = %TypeCheck.Builtin.NamedType{}, :named_type, %{problem: problem}, val}) do
child_str =
indent(do_format(problem))
"""
`#{inspect(val, inspect_value_opts())}` does not match the definition of the named type `#{Inspect.Algebra.format(Inspect.Algebra.color(to_string(s.name), :named_type, struct(Inspect.Opts, inspect_type_opts())), 80)}`
which is: `#{TypeCheck.Inspect.inspect_binary(s, [show_long_named_type: true] ++ inspect_type_opts())}`. Reason:
#{child_str}
"""
# compound_check(val, s, do_format(problem))
end
def do_format({%TypeCheck.Builtin.None{}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` does not match `none()` (no value matches `none()`)."
end
def do_format({%TypeCheck.Builtin.Number{}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` is not a number."
end
def do_format({s = %TypeCheck.Builtin.OneOf{}, :all_failed, %{problems: problems}, val}) do
message =
problems
|> Enum.with_index()
|> Enum.map(fn {problem, index} ->
"""
#{index})
#{indent(do_format(problem))}
"""
end)
|> Enum.join("\n")
compound_check(val, s, "all possibilities failed:\n", message)
end
def do_format({%TypeCheck.Builtin.PID{}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` is not a pid."
end
def do_format({s = %TypeCheck.Builtin.Range{}, :not_an_integer, _, val}) do
compound_check(val, s, "`#{inspect(val, inspect_value_opts())}` is not an integer.")
end
def do_format({s = %TypeCheck.Builtin.Range{range: range}, :not_in_range, _, val}) do
compound_check(val, s, "`#{inspect(val, inspect_value_opts())}` falls outside the range #{inspect(range, inspect_type_opts())}.")
end
def do_format({s = %TypeCheck.Builtin.FixedTuple{}, :not_a_tuple, _, val}) do
problem = "`#{inspect(val, inspect_value_opts())}` is not a tuple."
compound_check(val, s, problem)
end
def do_format(
{s = %TypeCheck.Builtin.FixedTuple{}, :different_size, %{expected_size: expected_size},
val}
) do
problem = "`#{inspect(val, inspect_value_opts())}` has #{tuple_size(val)} elements rather than #{expected_size}."
compound_check(val, s, problem)
end
def do_format(
{s = %TypeCheck.Builtin.FixedTuple{}, :element_error, %{problem: problem, index: index},
val}
) do
compound_check(val, s, "at index #{index}:\n", do_format(problem))
end
def do_format({s = %TypeCheck.Builtin.Tuple{}, :no_match, _, val}) do
problem = "`#{inspect(val, inspect_value_opts())}` is not a tuple."
compound_check(val, s, problem)
end
def do_format({%TypeCheck.Builtin.ImplementsProtocol{protocol: protocol_name}, :no_match, _, val}) do
"`#{inspect(val, inspect_value_opts())}` does not implement the protocol `#{protocol_name}`"
end
def do_format({s = %mod{}, :param_error, %{index: index, problem: problem}, val}) when mod in [TypeCheck.Spec, TypeCheck.Builtin.Function] do
# compound_check(val, s, "at parameter no. #{index + 1}:\n", do_format(problem))
name = Map.get(s, :name, "#Function<...>")
function_with_arity = IO.ANSI.format_fragment([:white, "#{name}/#{Enum.count(val)}", :red])
param_spec = s.param_types |> Enum.at(index) |> TypeCheck.Inspect.inspect_binary(inspect_type_opts())
arguments = val |> Enum.map(&inspect/1) |> Enum.join(", ")
raw_call = if mod == TypeCheck.Builtin.Function do
"#{name}.(#{arguments})"
else
"#{name}(#{arguments})"
end
call = IO.ANSI.format_fragment([:white, raw_call, :red])
value = Enum.at(val, index)
value_str = inspect(value, inspect_value_opts())
"""
The call to `#{function_with_arity}` failed,
because parameter no. #{index + 1} does not adhere to the spec `#{param_spec}`.
Rather, its value is: `#{value_str}`.
Details:
The call `#{call}`
does not adhere to spec `#{TypeCheck.Inspect.inspect_binary(s, inspect_type_opts())}`. Reason:
parameter no. #{index + 1}:
#{indent(indent(indent(do_format(problem))))}
"""
end
def do_format(
{s = %mod{}, :return_error, %{problem: problem, arguments: arguments}, val}
) when mod in [TypeCheck.Spec, TypeCheck.Builtin.Function] do
name = Map.get(s, :name, "#Function<...>")
function_with_arity = IO.ANSI.format_fragment([:white, "#{name}/#{Enum.count(arguments)}", :red])
result_spec = s.return_type |> TypeCheck.Inspect.inspect_binary(inspect_type_opts())
arguments_str = arguments |> Enum.map(fn val -> inspect(val, inspect_value_opts()) end) |> Enum.join(", ")
arguments_str = IO.ANSI.format_fragment([:white, arguments_str, :white])
raw_call = if mod == TypeCheck.Builtin.Function do
"#{name}.(#{arguments_str})"
else
"#{name}(#{arguments_str})"
end
call = IO.ANSI.format_fragment([:white, raw_call, :red])
val_str = inspect(val, inspect_value_opts())
"""
The call to `#{function_with_arity}` failed,
because the returned result does not adhere to the spec `#{result_spec}`.
Rather, its value is: `#{val_str}`.
Details:
The result of calling `#{call}`
does not adhere to spec `#{TypeCheck.Inspect.inspect_binary(s, inspect_type_opts())}`. Reason:
Returned result:
#{indent(indent(indent(do_format(problem))))}
"""
end
defp compound_check(val, s, child_prefix \\ nil, child_problem) do
child_str =
if child_prefix do
indent(child_prefix <> indent(child_problem))
else
indent(child_problem)
end
"""
`#{inspect(val, inspect_value_opts())}` does not check against `#{TypeCheck.Inspect.inspect_binary(s, inspect_type_opts())}`. Reason:
#{child_str}
"""
end
defp indent(str) do
String.replace(" " <> str, "\n", "\n ")
end
defp inspect_value_opts() do
# [reset_color: :red, syntax_colors: ([reset: :white] ++ TypeCheck.Inspect.default_colors())]
if IO.ANSI.enabled? do
[reset_color: :red, syntax_colors: ([reset: :red] ++ TypeCheck.Inspect.default_colors())]
else
[]
end
end
defp inspect_type_opts() do
if IO.ANSI.enabled? do
[reset_color: :red, syntax_colors: ([reset: :red] ++ TypeCheck.Inspect.default_colors())]
else
[]
end
end
end
|
lib/type_check/type_error/default_formatter.ex
| 0.797714
| 0.447883
|
default_formatter.ex
|
starcoder
|
defmodule StepFlow.WorkflowView do
use StepFlow, :view
alias StepFlow.{ArtifactView, JobView, RightView, WorkflowView}
def render("index.json", %{workflows: %{data: workflows, total: total}}) do
%{
data: render_many(workflows, WorkflowView, "workflow.json"),
total: total
}
end
def render("show.json", %{workflow: workflow}) do
%{data: render_one(workflow, WorkflowView, "workflow.json")}
end
def render("created.json", %{workflow: workflow}) do
%{data: render_one(workflow, WorkflowView, "workflow_created.json")}
end
def render("workflow.json", %{workflow: workflow}) do
result = %{
schema_version: workflow.schema_version,
id: workflow.id,
identifier: workflow.identifier,
version_major: workflow.version_major,
version_minor: workflow.version_minor,
version_micro: workflow.version_micro,
tags: workflow.tags,
reference: workflow.reference,
steps: workflow.steps,
parameters: workflow.parameters,
created_at: workflow.inserted_at
}
result =
if is_list(workflow.artifacts) do
artifacts = render_many(workflow.artifacts, ArtifactView, "artifact.json")
Map.put(result, :artifacts, artifacts)
else
result
end
result =
if is_list(workflow.jobs) do
jobs = render_many(workflow.jobs, JobView, "job.json")
Map.put(result, :jobs, jobs)
else
result
end
if is_list(workflow.rights) do
rights = render_many(workflow.rights, RightView, "right.json")
Map.put(result, :rights, rights)
else
result
end
end
def render("workflow_created.json", %{workflow: workflow}) do
%{
schema_version: workflow.schema_version,
id: workflow.id,
identifier: workflow.identifier,
version_major: workflow.version_major,
version_minor: workflow.version_minor,
version_micro: workflow.version_micro,
tags: workflow.tags,
reference: workflow.reference,
parameters: workflow.parameters,
created_at: workflow.inserted_at
}
end
end
|
lib/step_flow/view/workflow_view.ex
| 0.587707
| 0.456349
|
workflow_view.ex
|
starcoder
|
defmodule StrawHat.GraphQL.Types do
@moduledoc """
Common Absinthe Types.
## Interfaces
`straw_hat_node`
Just a simple interface that will force you to have an ID
`straw_hat_mutation_response`
Shape of the mutation response.
**Important:** read the usage guide because `payload` field is not included
due to limitations and avoiding macros.
`straw_hat_pagination`
Shape of the pagination.
**Important:** read the usage guide because `entries` field is not included
due to limitations and avoiding macros.
## Objects
- `straw_hat_pagination_page`
#### Errors
- `straw_hat_error_metadata`
- `straw_hat_error`
These just map `t:StrawHat.Error.t/0` and `t:StrawHat.Error.ErrorMetadata.t/0`
## Input Objects
- `straw_hat_pagination_page_input`
"""
use Absinthe.Schema.Notation
alias StrawHat.GraphQL.MetadataResolver
interface :straw_hat_node do
field(:id, non_null(:id))
end
interface :straw_hat_mutation_response do
@desc "If the mutation happened without any problem"
field(:successful, non_null(:boolean))
@desc "List of errors when the mutation failed (successful: false)"
field(:errors, list_of(:straw_hat_error))
# Super Important
# Due to limitations we can't include it here, we do not know the type
# of the response
# field(:payload, TYPE)
end
interface :straw_hat_pagination do
field(:page, non_null(:straw_hat_pagination_page))
field(:total_entries, non_null(:integer))
field(:total_pages, non_null(:integer))
# Super Important
# Due to limitations we can't include it here, we do not know the type
# of the response
# field(:entries, list_of(:TYPE))
end
input_object :straw_hat_pagination_page_input do
@desc "Number of page to load"
field(:page_number, non_null(:integer))
@desc "Size of the page"
field(:page_size, non_null(:integer))
end
object :straw_hat_error_metadata do
field :key, :string do
resolve(&MetadataResolver.key/3)
end
field :value, :string do
resolve(&MetadataResolver.value/3)
end
end
object :straw_hat_error do
field(:id, non_null(:id))
@desc "Identifier of the error"
field(:code, non_null(:string))
@desc "Categorize or group the error"
field(:type, :string)
@desc "Information relative to the error"
field(:metadata, list_of(:straw_hat_error_metadata))
end
object :straw_hat_pagination_page do
@desc "Number of page to load"
field(:page_number, non_null(:integer))
@desc "Number of page to load"
field(:page_size, non_null(:integer))
end
end
|
lib/straw_hat_graphql/types.ex
| 0.818954
| 0.54698
|
types.ex
|
starcoder
|
defmodule StringConverter do
@moduledoc """
StringConverter convert string value to proper [integer | float | string]
"""
@doc """
convert string to proper type [integer | float | string]
## Examples
iex> StringConverter.to_type("34")
34
iex> StringConverter.to_type(".3f")
0.3
iex> StringConverter.to_type("abc3")
"abc3"
"""
@spec to_type(String.t()) :: Integer | float() | String.t()
def to_type(value) when is_binary(value) do
parse_integer(Integer.parse(value), value)
end
@doc """
convert string to integer
## Examples
iex> StringConverter.to_integer("-25")
-25
iex> StringConverter.to_integer("2.4")
:error
"""
@spec to_integer(String.t(), any()) :: Integer | any()
def to_integer(value, error \\ :error) when is_binary(value) do
case parse_integer(value) do
x when is_integer(x) -> x
_ -> error
end
end
defp parse_integer(value) when is_binary(value),
do: parse_integer(Integer.parse(value), value, & &1)
defp parse_integer(ret, value, parse_float_func \\ &parse_float/1)
defp parse_integer({0, ""}, _value, _parse_float_func), do: 0
defp parse_integer({num, ""}, _value, _parse_float_func), do: num
defp parse_integer({_num, _remainder}, value, parse_float_func), do: parse_float_func.(value)
defp parse_integer(:error, value, parse_float_func), do: parse_float_func.(value)
@doc """
convert string to float
## Examples
iex> StringConverter.to_float("0.3f")
0.3
iex> StringConverter.to_float("3x")
:error
"""
@spec to_float(String.t(), any()) :: float() | any()
def to_float(value, error \\ :error) when is_binary(value) do
case parse_float(value) do
x when is_float(x) -> x
_ -> error
end
end
defp parse_float(value) when is_binary(value), do: parse_float(Float.parse(value), value)
defp parse_float({num, ""}, _value), do: num
defp parse_float({num, "f"}, _value), do: num
defp parse_float({num, ".f"}, _value), do: num
defp parse_float({_num, _remainder}, value), do: value
defp parse_float(:error, <<".", _rest::binary>> = value),
do: parse_float(Float.parse("0" <> value), value)
defp parse_float(:error, value), do: value
end
|
lib/string_converter.ex
| 0.890571
| 0.573678
|
string_converter.ex
|
starcoder
|
defmodule EQRCode do
@moduledoc """
Simple QR Code Generator written in Elixir with no other dependencies.
To generate the SVG QR code:
```elixir
qr_code_content = "your_qr_code_content"
qr_code_content
|> EQRCode.encode()
|> EQRCode.svg()
```
"""
alias EQRCode.{Encode, ReedSolomon, Matrix}
@doc """
Encode the binary.
"""
@spec encode(binary) :: Matrix.t()
def encode(bin) when byte_size(bin) <= 154 do
data =
Encode.encode(bin)
|> ReedSolomon.encode()
Encode.version(bin)
|> Matrix.new()
|> Matrix.draw_finder_patterns()
|> Matrix.draw_seperators()
|> Matrix.draw_alignment_patterns()
|> Matrix.draw_timing_patterns()
|> Matrix.draw_dark_module()
|> Matrix.draw_reserved_format_areas()
|> Matrix.draw_reserved_version_areas()
|> Matrix.draw_data_with_mask(data)
|> Matrix.draw_format_areas()
|> Matrix.draw_version_areas()
|> Matrix.draw_quite_zone()
end
def encode(bin) when is_nil(bin) do
raise(ArgumentError, message: "you must pass in some input")
end
def encode(_),
do: raise(ArgumentError, message: "your input is too long. keep it under 155 characters")
@doc """
Encode the binary with custom pattern bits. Only supports version 5.
"""
@spec encode(binary, bitstring) :: Matrix.t()
def encode(bin, bits) when byte_size(bin) <= 106 do
data =
Encode.encode(bin, bits)
|> ReedSolomon.encode()
Matrix.new(5)
|> Matrix.draw_finder_patterns()
|> Matrix.draw_seperators()
|> Matrix.draw_alignment_patterns()
|> Matrix.draw_timing_patterns()
|> Matrix.draw_dark_module()
|> Matrix.draw_reserved_format_areas()
|> Matrix.draw_data_with_mask0(data)
|> Matrix.draw_format_areas()
|> Matrix.draw_quite_zone()
end
def encode(_, _), do: IO.puts("Binary too long.")
@doc """
```elixir
qr_code_content
|> EQRCode.encode()
|> EQRCode.svg(color: "#cc6600", shape: "circle", width: 300)
```
You can specify the following attributes of the QR code:
* `color`: In hexadecimal format. The default is `#000`
* `shape`: Only `square` or `circle`. The default is `square`
* `width`: The width of the QR code in pixel. Without the width attribute, the QR code size will be dynamically generated based on the input string.
* `viewbox`: When set to `true`, the SVG element will specify its height and width using `viewBox`, instead of explicit `height` and `width` tags.
Default options are `[color: "#000", shape: "square"]`.
"""
defdelegate svg(matrix, options \\ []), to: EQRCode.SVG
end
|
lib/eqrcode.ex
| 0.901453
| 0.807119
|
eqrcode.ex
|
starcoder
|
defmodule Solana do
@moduledoc """
A library for interacting with the Solana blockchain.
"""
@typedoc "See `t:Solana.Key.t/0`"
@type key :: Solana.Key.t()
@typedoc "See `t:Solana.Key.pair/0`"
@type keypair :: Solana.Key.pair()
@doc """
See `Solana.Key.pair/0`
"""
defdelegate keypair(), to: Solana.Key, as: :pair
@doc """
Decodes or extracts a `t:Solana.Key.t/0` from a Base58-encoded string or a
`t:Solana.Key.pair/0`.
Returns `{:ok, key}` if the key is valid, or an error tuple if it's not.
"""
def pubkey(pair_or_encoded)
def pubkey({_sk, pk}), do: Solana.Key.check(pk)
defdelegate pubkey(encoded), to: Solana.Key, as: :decode
@doc """
Decodes or extracts a `t:Solana.Key.t/0` from a Base58-encoded string or a
`t:Solana.Key.pair/0`.
Throws an `ArgumentError` if it fails to retrieve the public key.
"""
def pubkey!(pair_or_encoded)
def pubkey!(pair = {_sk, _pk}) do
case pubkey(pair) do
{:ok, key} -> key
_ -> raise ArgumentError, "invalid keypair: #{inspect(pair)}"
end
end
defdelegate pubkey!(encoded), to: Solana.Key, as: :decode!
@doc """
The public key for the [Rent system
variable](https://docs.solana.com/developing/runtime-facilities/sysvars#rent).
"""
def rent(), do: pubkey!("SysvarRent111111111111111111111111111111111")
@doc """
The public key for the [RecentBlockhashes system
variable](https://docs.solana.com/developing/runtime-facilities/sysvars#recentblockhashes)
"""
def recent_blockhashes(), do: pubkey!("SysvarRecentB1ockHashes11111111111111111111")
@doc """
The public key for the [Clock system
variable](https://docs.solana.com/developing/runtime-facilities/sysvars#clock)
"""
def clock(), do: pubkey!("SysvarC1ock11111111111111111111111111111111")
@doc """
The public key for the [BPF Loader
program](https://docs.solana.com/developing/runtime-facilities/programs#bpf-loader)
"""
def bpf_loader(), do: pubkey!("BPFLoaderUpgradeab1e11111111111111111111111")
@doc false
def lamports_per_sol(), do: 1_000_000_000
end
|
lib/solana.ex
| 0.844553
| 0.485234
|
solana.ex
|
starcoder
|
defmodule Flippant.Adapter.Memory do
@moduledoc """
This module provides ETS backed rule storage.
The Memory adapter is not persisted between application restarts, and
therefore is intended for testing.
"""
use GenServer
import Flippant.Rules, only: [enabled_for_actor?: 2]
@doc """
Starts the Memory adapter.
"""
@spec start_link(Keyword.t()) :: GenServer.on_start()
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
# Callbacks
def init(_opts) do
{:ok, :ets.new(:features, read_concurrency: true)}
end
def handle_cast({:add, feature}, table) do
case :ets.lookup(table, feature) do
[] -> :ets.insert(table, {feature, []})
_ -> true
end
{:noreply, table}
end
def handle_cast({:add, feature, rule}, table) do
case :ets.lookup(table, feature) do
[{_, rules}] -> :ets.insert(table, {feature, merge_rules(rule, rules)})
[] -> :ets.insert(table, {feature, [rule]})
end
{:noreply, table}
end
def handle_cast(:clear, table) do
:ets.delete_all_objects(table)
{:noreply, table}
end
def handle_cast({:remove, feature}, table) do
:ets.delete(table, feature)
{:noreply, table}
end
def handle_cast({:remove, feature, group, []}, table) do
case :ets.lookup(table, feature) do
[{_, rules}] -> :ets.insert(table, {feature, without_group(rules, group)})
_ -> true
end
{:noreply, table}
end
def handle_cast({:remove, feature, group, values}, table) do
case :ets.lookup(table, feature) do
[{_, rules}] -> :ets.insert(table, {feature, diff_rules({group, values}, rules)})
_ -> true
end
{:noreply, table}
end
def handle_cast({:rename, old_name, new_name}, table) do
with [{_, rules}] <- :ets.lookup(table, old_name),
true <- :ets.insert(table, {new_name, rules}),
true <- :ets.delete(table, old_name),
do: true
{:noreply, table}
end
def handle_cast({:restore, loaded}, table) do
for {feature, rules} <- loaded do
true = :ets.insert(table, {feature, rules})
end
{:noreply, table}
end
def handle_cast(:setup, table) do
{:noreply, table}
end
def handle_call({:breakdown, actor}, _from, table) do
fun = fn {feature, rules}, acc ->
Map.put(acc, feature, breakdown_value(rules, actor))
end
{:reply, :ets.foldl(fun, %{}, table), table}
end
def handle_call({:enabled?, feature, actor}, _from, table) do
enabled =
case :ets.lookup(table, feature) do
[{_, rules}] -> enabled_for_actor?(rules, actor)
[] -> false
end
{:reply, enabled, table}
end
def handle_call({:exists?, feature, group}, _from, table) do
exists =
case :ets.lookup(table, feature) do
[{_, rules}] -> contains_group?(rules, group)
[] -> false
end
{:reply, exists, table}
end
def handle_call({:features, group}, _from, table) do
{:reply, get_features(table, group), table}
end
# Helpers
defp breakdown_value(rules, :all) do
Enum.into(rules, %{})
end
defp breakdown_value(rules, actor) do
enabled_for_actor?(rules, actor)
end
defp contains_group?(_, :any) do
true
end
defp contains_group?(rules, group) do
Enum.any?(rules, &(elem(&1, 0) == group))
end
def change_values({group, values}, rules, fun) do
mvalues =
case Enum.find(rules, &(elem(&1, 0) == group)) do
{_, rvalues} -> fun.(rvalues, values)
_ -> values
end
List.keystore(rules, group, 0, {group, Enum.sort(mvalues)})
end
defp diff_rules(rule, rules) do
change_values(rule, rules, fn old, new -> old -- new end)
end
defp merge_rules(rule, rules) do
change_values(rule, rules, fn old, new -> Enum.uniq(new ++ old) end)
end
defp get_features(table, :all) do
table
|> :ets.tab2list()
|> Enum.map(&elem(&1, 0))
end
defp get_features(table, group) do
table
|> :ets.tab2list()
|> Enum.filter(fn {_, rules} -> Enum.any?(rules, &(elem(&1, 0) == group)) end)
|> Enum.map(&elem(&1, 0))
end
defp without_group(rules, group) do
List.keydelete(rules, group, 0)
end
end
|
lib/flippant/adapters/memory.ex
| 0.703346
| 0.504455
|
memory.ex
|
starcoder
|
defmodule Reaper.UrlBuilder do
alias Reaper.Collections.Extractions
@moduledoc """
This module builds a URL to download a source file
"""
@doc """
Returns a string containing the URL with all query string parameters based on the `Reaper.ReaperConfig`
"""
@spec build(SmartCity.Dataset.t()) :: String.t()
def build(%SmartCity.Dataset{technical: %{sourceUrl: url, sourceQueryParams: query_params}} = _dataset)
when query_params == %{},
do: build_url_path(url)
def build(%SmartCity.Dataset{technical: %{sourceUrl: url, sourceQueryParams: query_params}} = dataset) do
last_success_time = extract_last_success_time(dataset.id)
string_params =
query_params
|> evaluate_parameters(last_success_time: last_success_time)
|> URI.encode_query()
"#{build_url_path(url)}?#{string_params}"
end
@spec decode_http_extract_step(%{assigns: any, context: %{queryParams: any, url: binary}}) ::
binary
def decode_http_extract_step(%{context: %{url: url, queryParams: query_params}, assigns: assigns})
when query_params == %{} do
build_safe_url_path(url, assigns)
end
def decode_http_extract_step(%{context: %{url: url, queryParams: query_params}, assigns: assigns}) do
string_params =
query_params
|> safe_evaluate_parameters(assigns)
|> URI.encode_query()
"#{build_safe_url_path(url, assigns)}?#{string_params}"
end
def build_safe_url_path(url, bindings) do
regex = ~r"{{(.+?)}}"
Regex.replace(regex, url, fn _match, var_name ->
bindings[String.to_atom(var_name)]
end)
end
defp build_url_path(url) do
EEx.eval_string(url)
end
defp extract_last_success_time(dataset_id) do
case Extractions.get_last_fetched_timestamp!(dataset_id) do
nil -> false
time -> time
end
end
def safe_evaluate_parameters(parameters, bindings) do
Enum.map(
parameters,
&safe_evaluate_parameter(&1, bindings)
)
end
defp safe_evaluate_parameter({key, %{} = param_map}, bindings) do
evaluated_map =
Enum.map(param_map, fn param ->
safe_evaluate_parameter(param, bindings)
end)
|> Enum.into(%{})
{key, evaluated_map}
end
defp safe_evaluate_parameter({key, value}, bindings) do
regex = ~r"{{(.+?)}}"
value =
Regex.replace(regex, to_string(value), fn _match, var_name ->
bindings[String.to_atom(var_name)]
end)
{key, value}
end
defp evaluate_parameters(parameters, bindings) do
Enum.map(
parameters,
&evaluate_parameter(&1, bindings)
)
end
defp evaluate_parameter({key, value}, bindings) do
{key, EEx.eval_string(value, bindings)}
end
defp url_has_query_params?(url), do: String.split(url, "?") |> Enum.count() > 1
end
|
apps/reaper/lib/reaper/url_builder.ex
| 0.842475
| 0.433022
|
url_builder.ex
|
starcoder
|
defmodule Ada.CLI do
@default_target_node :"<EMAIL>"
@cli_node :"cli@127.0.0.1"
@moduledoc """
Remotely control an Ada device via the Elixir distribution.
Among other things, the CLI allows to:
- control the display brightness
- manage device data (users, locations, tasks)
- manage device preferences
- run or preview tasks
- backup the database with the active backup strategy
- pull the device database to a local file
- restore the device database from a local file
By default, it will try to connect to `#{@default_target_node}`, but
all commands support a `-t` to specify a different node name.
In addition, it relies on the release's erlang cookie (as specified in
`rel/vm.args`) and the one used by the CLI (as specified in
`lib/ada/cli/helpers.ex`) to be identical.
As an example, we can add a new user and setup a news digest about UK news, sent every day at 9am:
```
$ ./ada create_user mary <EMAIL>
Created User with ID 3
$ ./ada create_scheduled_task send_news_by_tag daily:9 --user_id 3 --tag 'uk/uk'
Created scheduled_task with ID 9
```
You can run a task (irrespectively of its frequency) with:
```
$ ./ada run_scheduled_task 9
```
If you're interested in previewing its data, the CLI can render a
shell-friendly version of a task's result with:
```
$ ./ada preview_scheduled_task 9 | less -r
```
As shown in the example, you can pipe the result to `less -r` to scroll down
via keyboard in ANSI colors.
"""
use ExCLI.DSL, escript: true
alias Ada.{CLI.Helpers, CLI.Format, CRUD}
name "ada"
description """
Remotely control an Ada device via the Erlang distribution.
"""
long_description @moduledoc
command :list_users do
option :target_node, aliases: [:t]
aliases [:lsu]
description "Lists the system users"
long_description "Lists the system users"
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
target_node
|> :rpc.call(CRUD, :list, [Ada.Schema.User])
|> Format.list_users()
|> IO.puts()
end
end
command :create_user do
option :target_node, aliases: [:t]
option(:last_fm_username)
aliases [:cu]
description "Creates a new system user"
long_description "Creates a new system user"
argument(:name)
argument(:email)
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
target_node
|> :rpc.call(CRUD, :create, [Ada.Schema.User, context])
|> Format.user_created()
|> IO.puts()
end
end
command :update_user do
option :target_node, aliases: [:t]
aliases [:uu]
description "Updates an existing system user"
long_description "Updates an existing system user"
argument(:id, type: :integer)
option(:last_fm_username)
option(:name)
option(:email)
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
user = :rpc.call(target_node, CRUD, :find, [Ada.Schema.User, context.id])
target_node
|> :rpc.call(CRUD, :update, [Ada.Schema.User, user, context])
|> Format.user_updated()
|> IO.puts()
end
end
command :delete_user do
option :target_node, aliases: [:t]
aliases [:du]
description "Deletes a system user"
long_description "Deletes a system user"
argument(:id, type: :integer)
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
user = :rpc.call(target_node, CRUD, :find, [Ada.Schema.User, context.id])
:rpc.call(target_node, CRUD, :delete, [user])
|> Format.user_deleted()
|> IO.puts()
end
end
command :brightness do
option :target_node, aliases: [:t]
aliases [:b]
description "Controls the device brightness"
long_description "Controls the device brightness"
argument(:operation)
option(:intensity, type: :integer)
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
current_brightness = :rpc.call(target_node, Ada.Display, :get_brightness, [])
case context.operation do
"up" ->
:rpc.call(target_node, Ada.Display, :set_brightness, [
inc_brightness(current_brightness, 10)
])
|> Format.brightness_changed()
|> IO.puts()
"down" ->
:rpc.call(target_node, Ada.Display, :set_brightness, [
dec_brightness(current_brightness, 10)
])
|> Format.brightness_changed()
|> IO.puts()
"set" ->
:rpc.call(target_node, Ada.Display, :set_brightness, [
context.intensity
])
|> Format.brightness_changed()
|> IO.puts()
other ->
IO.puts("""
==> Unsupported option #{other}.
Valid values are:
- up
- down
- set --intensity <integer-between-0-and-255>
""")
System.halt(1)
end
end
end
command :add_current_location do
option :target_node, aliases: [:t]
aliases [:acl]
description "Adds the current location"
long_description """
Adds the current location. Requires CoreLocationCLI to be installed and it may
prompt for user authorization.
"""
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
Helpers.ensure_location_dependencies!()
location_attributes = Helpers.get_current_location_data()
target_node
|> :rpc.call(CRUD, :create, [Ada.Schema.Location, location_attributes])
|> Format.location_created()
|> IO.puts()
end
end
command :update_location do
option :target_node, aliases: [:t]
aliases [:ul]
description "Updates an existing location"
long_description "Updates an existing location"
argument(:id, type: :integer)
option(:lat)
option(:lng)
option(:name)
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
location = :rpc.call(target_node, CRUD, :find, [Ada.Schema.Location, context.id])
target_node
|> :rpc.call(CRUD, :update, [Ada.Schema.Location, location, context])
|> Format.location_updated()
|> IO.puts()
end
end
command :delete_location do
option :target_node, aliases: [:t]
aliases [:dl]
description "Deletes a system location"
long_description "Deletes a system location"
argument(:id, type: :integer)
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
location = :rpc.call(target_node, CRUD, :find, [Ada.Schema.Location, context.id])
:rpc.call(target_node, CRUD, :delete, [location])
|> Format.location_deleted()
|> IO.puts()
end
end
command :create_scheduled_task do
option :target_node, aliases: [:t]
aliases [:cst]
description "Creates a new scheduled task"
long_description "Creates a new scheduled task"
argument(:workflow_name)
argument(:frequency)
option(:user_id, type: :integer)
option(:location_id, type: :integer)
option(:tag)
option(:interval_in_hours)
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
available_workflows = :rpc.call(target_node, Ada.Workflow.Register, :all, [])
workflow_name = parse_workflow_name(context.workflow_name, available_workflows)
params = Map.take(context, [:user_id, :location_id, :tag, :interval_in_hours])
frequency = parse_frequency(context.frequency)
attributes = %{workflow_name: workflow_name, params: params, frequency: frequency}
target_node
|> :rpc.call(CRUD, :create, [Ada.Schema.ScheduledTask, attributes])
|> Format.scheduled_task_created()
|> IO.puts()
end
end
command :update_scheduled_task do
option :target_node, aliases: [:t]
aliases [:ust]
description "Updates an existing scheduled task"
long_description "Updates an existing scheduled task"
argument(:id, type: :integer)
option(:frequency)
option(:user_id, type: :integer)
option(:location_id, type: :integer)
option(:tag)
option(:email)
option(:interval_in_hours, type: :integer)
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
scheduled_task = :rpc.call(target_node, CRUD, :find, [Ada.Schema.ScheduledTask, context.id])
params =
case Map.take(context, [:user_id, :location_id, :tag, :interval_in_hours]) do
map when map_size(map) == 0 -> %{}
non_empty_params -> %{params: Map.merge(scheduled_task.params, non_empty_params)}
end
frequency =
case Map.get(context, :frequency) do
nil -> %{}
frequency_string -> %{frequency: parse_frequency(frequency_string)}
end
attributes = Map.merge(frequency, params)
target_node
|> :rpc.call(CRUD, :update, [Ada.Schema.ScheduledTask, scheduled_task, attributes])
|> Format.scheduled_task_updated()
|> IO.puts()
end
end
command :list_scheduled_tasks do
option :target_node, aliases: [:t]
aliases [:lst]
description "Lists configured scheduled task"
long_description "Lists configured scheduled tasks"
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
users = :rpc.call(target_node, CRUD, :list, [Ada.Schema.User])
locations = :rpc.call(target_node, CRUD, :list, [Ada.Schema.Location])
scheduled_tasks = :rpc.call(target_node, CRUD, :list, [Ada.Schema.ScheduledTask])
scheduled_tasks
|> Format.list_scheduled_tasks(users, locations)
|> IO.puts()
end
end
command :run_scheduled_task do
option :target_node, aliases: [:t]
aliases [:rst]
description "Runs the specified scheduled task"
long_description "Runs the specifed scheduled task"
argument(:id, type: :integer)
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
scheduled_task = :rpc.call(target_node, CRUD, :find, [Ada.Schema.ScheduledTask, context.id])
target_node
|> :rpc.call(Ada.Scheduler, :run_one_sync, [scheduled_task])
|> Format.scheduled_task_result()
|> IO.puts()
end
end
command :preview_scheduled_task do
option :target_node, aliases: [:t]
aliases [:pst]
description "Previews the specified scheduled task"
long_description "Previews the specifed scheduled task"
argument(:id, type: :integer)
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
scheduled_task = :rpc.call(target_node, CRUD, :find, [Ada.Schema.ScheduledTask, context.id])
target_node
|> :rpc.call(Ada.Scheduler, :preview, [scheduled_task])
|> Format.preview(scheduled_task)
|> IO.puts()
end
end
command :set_preference do
option :target_node, aliases: [:t]
description "Sets a preference on the device"
long_description "Sets a preference on the device"
argument(:preference_name)
argument(:preference_value)
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
preference_name = parse_preference_name(context.preference_name)
:ok =
:rpc.call(target_node, Ada.Preferences, :set, [preference_name, context.preference_value])
IO.puts("Preference #{context.preference_name} updated to #{context.preference_value}")
end
end
command :pull_db do
option :target_node, aliases: [:t]
description "Pull a copy of the system database"
long_description "Pull a copy of the system database"
option(:target_file)
run context do
target_node = Map.get(context, :target_node, @default_target_node)
target_file =
Map.get_lazy(context, :target_file, fn ->
now = DateTime.utc_now() |> DateTime.to_iso8601()
"ada-v1-#{now}.db"
end)
Helpers.connect!(@cli_node, target_node)
repo_config = :rpc.call(target_node, Ada.Repo, :config, [])
db_file_path = repo_config[:database]
db_file_contents = :rpc.call(target_node, File, :read!, [db_file_path])
File.write!(target_file, db_file_contents)
IO.puts("DB file written at #{target_file}")
end
end
command :push_db do
option :target_node, aliases: [:t]
description "Restore the device system database from a local copy"
long_description "Restore the device system database from a local copy"
argument(:source_file)
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
source_file_contents = File.read!(context.source_file)
repo_config = :rpc.call(target_node, Ada.Repo, :config, [])
db_file_path = repo_config[:database]
:ok = :rpc.call(target_node, File, :write!, [db_file_path, source_file_contents])
:ok = :rpc.call(target_node, Application, :stop, [:ada])
{:ok, _} = :rpc.call(target_node, Application, :ensure_all_started, [:ranch])
{:ok, _} = :rpc.call(target_node, Application, :ensure_all_started, [:ada])
IO.puts("DB file pushed")
end
end
command :backup_db do
option :target_node, aliases: [:t]
description "Backs up the device database with the configured strategy"
long_description "Backs up the device database with the configured strategy"
run context do
target_node = Map.get(context, :target_node, @default_target_node)
Helpers.connect!(@cli_node, target_node)
{:ok, path} = :rpc.call(target_node, Ada.Backup.Uploader, :save_now, [])
IO.puts("Backup file saved at #{path}")
end
end
command :fish_autocomplete do
description "Generate autocomplete rules for the Fish shell"
long_description """
Generate autocomplete rules for the Fish shell
Load with: ada fish_autocomplete | source
"""
run _context do
Ada.CLI.FishCompletion.render()
|> IO.puts()
end
end
def commands, do: @app.commands
defp inc_brightness(brightness, inc) do
if brightness + inc >= 255, do: 255, else: brightness + inc
end
defp dec_brightness(brightness, dec) do
if brightness - dec <= 1, do: 1, else: brightness - dec
end
@splitter ~r(\:|\.)
defp parse_frequency(frequency_string) do
case String.split(frequency_string, @splitter) do
["hourly", minute] ->
%{type: "hourly", minute: String.to_integer(minute)}
["daily", hour, minute] ->
%{type: "daily", hour: String.to_integer(hour), minute: String.to_integer(minute)}
["daily", hour] ->
%{type: "daily", hour: String.to_integer(hour), minute: 0}
["weekly", day_of_week, hour] ->
%{
type: "weekly",
day_of_week: String.to_integer(day_of_week),
hour: String.to_integer(hour)
}
other ->
IO.puts("""
==> Incorrectly formatted frequency value #{other}.
Allowed values are:
- hourly:10 (every hour at 10 past)
- daily:14 (every day at 2pm)
- daily:14.30 (every day at 2.30pm)
- weekly:1:15 (every monday at 3pm)
""")
System.halt(1)
end
end
defp parse_workflow_name(workflow_name_string, available_workflows) do
suffix_strings =
Enum.map(available_workflows, fn aw ->
[_, _, suffix] = Module.split(aw)
Macro.underscore(suffix)
end)
case workflow_name_string do
"send_last_fm_report" ->
Ada.Workflow.SendLastFmReport
"send_news_by_tag" ->
Ada.Workflow.SendNewsByTag
"send_weather_forecast" ->
Ada.Workflow.SendWeatherForecast
other ->
IO.puts("""
==> Invalid workflow name #{other}.
Valid names are: #{inspect(suffix_strings)}
""")
System.halt(1)
end
end
defp parse_preference_name(name_string) do
case name_string do
"timezone" ->
:timezone
other ->
IO.puts("""
==> Invalid preference name #{other}.
Valid names are: ["timezone"].
""")
System.halt(1)
end
end
end
|
lib/ada/cli.ex
| 0.868646
| 0.786787
|
cli.ex
|
starcoder
|
defmodule Cereal.Utils do
@doc """
Given a the name of a serializer, will return a string
that represents the type of the entity being processed.
"""
def module_to_type(module) do
module
|> Module.split()
|> List.last()
|> String.replace("Serializer", "")
|> String.replace("View", "")
|> underscore()
end
@doc """
Given a set of included relations, will return a normalized list
of child relations. Implicitly filters out any relation in the normalized tree
that cannot be safely converted from a binary string to an atom.
"""
@spec normalize_includes(String.t) :: Keyword.t
def normalize_includes(include) do
include
|> String.split(",")
|> normalize_include_paths()
end
defp normalize_include_paths(paths), do: normalize_include_paths(paths, [])
defp normalize_include_paths([], normalized), do: normalized
defp normalize_include_paths([path | paths], normalized) do
normalized =
path
|> String.split(".")
|> normalize_relationship_path()
|> deep_merge_relationship_paths(normalized)
normalize_include_paths(paths, normalized)
end
defp normalize_relationship_path([]), do: []
defp normalize_relationship_path([rel | rest]) do
case string_to_atom(rel) do
nil -> []
key -> Keyword.put([], key, normalize_relationship_path(rest))
end
end
defp deep_merge_relationship_paths(left, right), do: Keyword.merge(left, right, &deep_merge_relationship_paths/3)
defp deep_merge_relationship_paths(_, left, right), do: deep_merge_relationship_paths(left, right)
@doc """
Takes a keyword list of comma separated strings keyed by serializer name and
converts the strings into lists of atoms. Implicitly removes any binary string
that cannot be safely converted to an atom.
Example:
# Input
[user: "name,id,location", comment: "type"]
# Output
[user: [:name, :id, :location], comment: [:type]]
"""
@spec build_fields_list([{atom(), String.t()}] | String.t()) :: [{atom(), [atom()]}]
def build_fields_list([{_, _} | _] = fields) do
Enum.map(fields, fn {key, fields_str} -> {key, build_fields_list(fields_str)} end)
end
def build_fields_list(fields) when is_binary(fields) do
fields
|> String.split(",")
|> Enum.map(&string_to_atom/1)
|> Enum.filter(& &1 != nil)
end
def build_fields_list(_), do: []
# Attempts to convert an arbitrary String.t() into an existing atom. If an
# exception is raised, or a non-binary is passed in, we simply return `nil`.
@spec string_to_atom(String.t()) :: atom() | nil
defp string_to_atom(str) when is_binary(str) do
try do
String.to_existing_atom(str)
rescue
_ -> nil
end
end
defp string_to_atom(atom) when is_atom(atom), do: atom
defp string_to_atom(_), do: nil
@doc false
def underscore(""), do: ""
def underscore(<<h, t :: binary>>) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<h, t, rest :: binary>>, _) when h in ?A..?Z and not (t in ?A..?Z or t == ?.) do
<<?_, to_lower_char(h), t>> <> do_underscore(rest, t)
end
defp do_underscore(<<h, t :: binary>>, prev) when h in ?A..?Z and prev not in ?A..?Z do
<<?_, to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<?., t :: binary>>, _) do
<<?/>> <> underscore(t)
end
defp do_underscore(<<h, t :: binary>>, _) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<>>, _) do
<<>>
end
defp to_lower_char(char) when char in ?A..?Z, do: char + 32
defp to_lower_char(char), do: char
end
|
lib/cereal/utils.ex
| 0.865423
| 0.453383
|
utils.ex
|
starcoder
|
defmodule ExWareki.Data do
@moduledoc """
Data module is a storage of era-data. You may not need to use this module directly.
"""
def data() do
raw_data()
|> Enum.map(fn d -> parse(d) end)
end
@doc """
str2date/1 converts date-string into date struct
## Examples
iex> ExWareki.Data.str2date("2019-09-13")
%{year: 2019, month: 9, day: 13}
iex> ExWareki.Data.str2date("-")
%{year: -1, month: -1, day: -1}
iex> ExWareki.Data.str2date("2019-03")
%{year: -1, month: -1, day: -1}
"""
def str2date(str_date) do
date_array = String.split(str_date, "-")
case length(date_array) do
3 ->
[year, month, day] = date_array
%{
year: String.to_integer(year),
month: String.to_integer(month),
day: String.to_integer(day),
}
_ ->
%{year: -1, month: -1, day: -1}
end
end
@doc """
parse/1 converts an entity of raw_data into data
## Examples
iex> ExWareki.Data.parse(%{name: "平成", yomi: "へいせい", begin_at: "1989-1-8", end_at: "2019-5-1"})
%{name: "平成", yomi: "へいせい", begin_year: 1989, begin_month: 1, begin_day: 8, end_year: 2019, end_month: 5, end_day: 1}
"""
def parse(data) do
begin_at = str2date(data.begin_at)
end_at = str2date(data.end_at)
%{
name: data.name,
yomi: data.yomi,
begin_year: begin_at.year,
begin_month: begin_at.month,
begin_day: begin_at.day,
end_year: end_at.year,
end_month: end_at.month,
end_day: end_at.day,
}
end
def raw_data() do
[
%{
name: "白雉",
yomi: "はくち",
begin_at: "650-3-22",
end_at: "654-11-24",
},
%{
name: "-",
yomi: "-",
begin_at: "654-11-24",
end_at: "686-8-14",
},
%{
name: "朱鳥",
yomi: "しゅちょう",
begin_at: "686-8-14",
end_at: "686-10-1",
},
%{
name: "-",
yomi: "-",
begin_at: "686-10-1",
end_at: "701-5-3",
},
%{
name: "大宝",
yomi: "たいほう",
begin_at: "701-5-3",
end_at: "704-6-16",
},
%{
name: "慶雲",
yomi: "けいうん",
begin_at: "704-6-16",
end_at: "708-2-7",
},
%{
name: "和銅",
yomi: "わどう",
begin_at: "708-2-7",
end_at: "715-10-3",
},
%{
name: "養老",
yomi: "ようろう",
begin_at: "717-12-24",
end_at: "724-3-3",
},
%{
name: "神亀",
yomi: "じんき",
begin_at: "724-3-3",
end_at: "729-9-2",
},
%{
name: "天平",
yomi: "てんぴょう",
begin_at: "729-9-2",
end_at: "749-5-4",
},
%{
name: "天平感宝",
yomi: "てんぴょうかんぽう",
begin_at: "749-5-4",
end_at: "749-8-19",
},
%{
name: "天平勝宝",
yomi: "てんぴょうしょうほう",
begin_at: "749-8-19",
end_at: "757-9-6",
},
%{
name: "天平宝字",
yomi: "てんぴょうほうじ",
begin_at: "757-9-6",
end_at: "765-2-1",
},
%{
name: "天平神護",
yomi: "てんぴょうじんご",
begin_at: "765-2-1",
end_at: "767-9-13",
},
%{
name: "神護景雲",
yomi: "じんごけいうん",
begin_at: "767-9-13",
end_at: "770-10-23",
},
%{
name: "宝亀",
yomi: "ほうき",
begin_at: "770-10-23",
end_at: "781-1-30",
},
%{
name: "天応",
yomi: "てんおう",
begin_at: "781-1-30",
end_at: "782-9-30",
},
%{
name: "延暦",
yomi: "えんりゃく",
begin_at: "782-9-30",
end_at: "806-6-8",
},
%{
name: "弘仁",
yomi: "こうにん",
begin_at: "810-10-20",
end_at: "824-2-8",
},
%{
name: "天長",
yomi: "てんちょう",
begin_at: "824-2-8",
end_at: "834-2-14",
},
%{
name: "承和",
yomi: "じょうわ",
begin_at: "834-2-14",
end_at: "848-7-16",
},
%{
name: "嘉祥",
yomi: "かしょう",
begin_at: "848-7-16",
end_at: "851-6-1",
},
%{
name: "仁寿",
yomi: "にんじゅ",
begin_at: "851-6-1",
end_at: "854-12-23",
},
%{
name: "斉衡",
yomi: "さいこう",
begin_at: "854-12-23",
end_at: "857-3-20",
},
%{
name: "天安",
yomi: "てんあん",
begin_at: "857-3-20",
end_at: "859-5-20",
},
%{
name: "貞観",
yomi: "じょうがん",
begin_at: "859-5-20",
end_at: "877-6-1",
},
%{
name: "元慶",
yomi: "がんぎょう",
begin_at: "877-6-1",
end_at: "885-3-11",
},
%{
name: "仁和",
yomi: "にんな",
begin_at: "885-3-11",
end_at: "889-5-30",
},
%{
name: "寛平",
yomi: "かんぴょう",
begin_at: "889-5-30",
end_at: "898-5-2",
},
%{
name: "昌泰",
yomi: "しょうたい",
begin_at: "898-5-20",
end_at: "901-8-31",
},
%{
name: "延喜",
yomi: "えんぎ",
begin_at: "901-8-31",
end_at: "923-5-29",
},
%{
name: "延長",
yomi: "えんちょう",
begin_at: "923-5-29",
end_at: "931-5-16",
},
%{
name: "承平",
yomi: "じょうへい",
begin_at: "931-5-16",
end_at: "938-6-22",
},
%{
name: "天慶",
yomi: "てんぎょう",
begin_at: "938-6-22",
end_at: "947-5-15",
},
%{
name: "天暦",
yomi: "てんりゃく",
begin_at: "947-5-15",
end_at: "957-11-21",
},
%{
name: "天徳",
yomi: "てんとく",
begin_at: "957-11-21",
end_at: "961-3-5",
},
%{
name: "応和",
yomi: "おうわ",
begin_at: "961-3-5",
end_at: "964-8-19",
},
%{
name: "康保",
yomi: "こうほう",
begin_at: "964-8-19",
end_at: "968-9-8",
},
%{
name: "安和",
yomi: "あんな",
begin_at: "968-9-8",
end_at: "970-5-3",
},
%{
name: "天禄",
yomi: "てんろく",
begin_at: "970-5-3",
end_at: "974-1-16",
},
%{
name: "天延",
yomi: "てんえん",
begin_at: "974-1-16",
end_at: "976-8-11",
},
%{
name: "貞元",
yomi: "じょうげん",
begin_at: "976-8-11",
end_at: "978-12-31",
},
%{
name: "天元",
yomi: "てんげん",
begin_at: "978-12-31",
end_at: "983-5-29",
},
%{
name: "永観",
yomi: "えいかん",
begin_at: "983-5-29",
end_at: "985-5-19",
},
%{
name: "寛和",
yomi: "かんな",
begin_at: "985-5-19",
end_at: "987-5-5",
},
%{
name: "永延",
yomi: "えいえん",
begin_at: "987-5-5",
end_at: "989-9-10",
},
%{
name: "永祚",
yomi: "えいそ",
begin_at: "989-9-10",
end_at: "990-11-26",
},
%{
name: "正暦",
yomi: "しょうりゃく",
begin_at: "990-11-26",
end_at: "995-3-25",
},
%{
name: "長徳",
yomi: "ちょうとく",
begin_at: "995-3-25",
end_at: "999-2-1",
},
%{
name: "長保",
yomi: "ちょうほう",
begin_at: "999-2-1",
end_at: "1004-8-8",
},
%{
name: "寛弘",
yomi: "かんこう",
begin_at: "1004-8-8",
end_at: "1013-2-8",
},
%{
name: "長和",
yomi: "ちょうわ",
begin_at: "1013-2-8",
end_at: "1017-5-21",
},
%{
name: "寛仁",
yomi: "かんにん",
begin_at: "1017-5-21",
end_at: "1021-3-17",
},
%{
name: "治安",
yomi: "じあん",
begin_at: "1021-3-17",
end_at: "1024-8-19",
},
%{
name: "万寿",
yomi: "まんじゅ",
begin_at: "1024-8-19",
end_at: "1028-8-18",
},
%{
name: "長元",
yomi: "ちょうげん",
begin_at: "1028-8-18",
end_at: "1037-5-9",
},
%{
name: "長暦",
yomi: "ちょうりゃく",
begin_at: "1037-5-9",
end_at: "1040-12-16",
},
%{
name: "長久",
yomi: "ちょうきゅう",
begin_at: "1040-12-16",
end_at: "1044-12-16",
},
%{
name: "寛徳",
yomi: "かんとく",
begin_at: "1044-12-16",
end_at: "1046-5-22",
},
%{
name: "永承",
yomi: "えいしょう",
begin_at: "1046-5-22",
end_at: "1053-2-2",
},
%{
name: "天喜",
yomi: "てんき",
begin_at: "1053-2-2",
end_at: "1058-9-19",
},
%{
name: "康平",
yomi: "こうへい",
begin_at: "1058-9-19",
end_at: "1065-9-4",
},
%{
name: "治暦",
yomi: "じりゃく",
begin_at: "1065-9-4",
end_at: "1069-5-6",
},
%{
name: "延久",
yomi: "えんきゅう",
begin_at: "1069-5-6",
end_at: "1074-9-16",
},
%{
name: "承保",
yomi: "じょうほう",
begin_at: "1074-9-16",
end_at: "1077-12-5",
},
%{
name: "承暦",
yomi: "じょうりゃく",
begin_at: "1077-12-5",
end_at: "1081-3-22",
},
%{
name: "永保",
yomi: "えいほう",
begin_at: "1081-3-22",
end_at: "1084-3-15",
},
%{
name: "応徳",
yomi: "おうとく",
begin_at: "1084-3-15",
end_at: "1087-5-11",
},
%{
name: "寛治",
yomi: "かんじ",
begin_at: "1087-5-11",
end_at: "1095-1-23",
},
%{
name: "嘉保",
yomi: "かほう",
begin_at: "1095-1-23",
end_at: "1097-1-3",
},
%{
name: "永長",
yomi: "えいちょう",
begin_at: "1097-1-3",
end_at: "1097-12-27",
},
%{
name: "承徳",
yomi: "じょうとく",
begin_at: "1097-12-27",
end_at: "1099-9-15",
},
%{
name: "康和",
yomi: "こうわ",
begin_at: "1099-9-15",
end_at: "1104-3-8",
},
%{
name: "長治",
yomi: "ちょうじ",
begin_at: "1104-3-8",
end_at: "1106-5-13",
},
%{
name: "嘉承",
yomi: "かしょう",
begin_at: "1106-5-13",
end_at: "1108-9-9",
},
%{
name: "天仁",
yomi: "てんにん",
begin_at: "1108-9-9",
end_at: "1110-7-31",
},
%{
name: "天永",
yomi: "てんえい",
begin_at: "1110-7-31",
end_at: "1113-8-25",
},
%{
name: "永久",
yomi: "えいきゅう",
begin_at: "1113-8-25",
end_at: "1118-4-25",
},
%{
name: "元永",
yomi: "げんえい",
begin_at: "1118-4-25",
end_at: "1120-5-9",
},
%{
name: "保安",
yomi: "ほうあん",
begin_at: "1120-5-9",
end_at: "1124-5-18",
},
%{
name: "天治",
yomi: "てんじ",
begin_at: "1124-5-18",
end_at: "1126-2-15",
},
%{
name: "大治",
yomi: "だいじ",
begin_at: "1126-2-15",
end_at: "1131-2-28",
},
%{
name: "天承",
yomi: "てんしょう",
begin_at: "1131-2-28",
end_at: "1132-9-21",
},
%{
name: "長承",
yomi: "ちょうしょう",
begin_at: "1132-9-21",
end_at: "1135-6-10",
},
%{
name: "保延",
yomi: "ほうえん",
begin_at: "1135-6-10",
end_at: "1141-8-13",
},
%{
name: "永治",
yomi: "えいじ",
begin_at: "1141-8-13",
end_at: "1142-5-25",
},
%{
name: "康治",
yomi: "こうじ",
begin_at: "1142-5-25",
end_at: "1144-3-28",
},
%{
name: "天養",
yomi: "てんよう",
begin_at: "1144-3-28",
end_at: "1145-8-12",
},
%{
name: "久安",
yomi: "きゅうあん",
begin_at: "1145-8-12",
end_at: "1151-2-14",
},
%{
name: "仁平",
yomi: "にんぺい",
begin_at: "1151-2-14",
end_at: "1154-12-4",
},
%{
name: "久寿",
yomi: "きゅうじゅ",
begin_at: "1154-12-4",
end_at: "1156-5-18",
},
%{
name: "保元",
yomi: "ほうげん",
begin_at: "1156-5-18",
end_at: "1159-5-9",
},
%{
name: "平治",
yomi: "へいじ",
begin_at: "1159-5-9",
end_at: "1160-2-18",
},
%{
name: "永暦",
yomi: "えいりゃく",
begin_at: "1160-2-18",
end_at: "1161-9-24",
},
%{
name: "応保",
yomi: "おうほう",
begin_at: "1161-9-24",
end_at: "1163-5-4",
},
%{
name: "長寛",
yomi: "ちょうかん",
begin_at: "1163-5-4",
end_at: "1165-7-14",
},
%{
name: "永万",
yomi: "えいまん",
begin_at: "1165-7-14",
end_at: "1166-9-23",
},
%{
name: "仁安",
yomi: "にんあん",
begin_at: "1166-9-23",
end_at: "1169-5-6",
},
%{
name: "嘉応",
yomi: "かおう",
begin_at: "1169-5-6",
end_at: "1171-5-27",
},
%{
name: "承安",
yomi: "しょうあん",
begin_at: "1171-5-27",
end_at: "1175-8-16",
},
%{
name: "安元",
yomi: "あんげん",
begin_at: "1175-8-16",
end_at: "1177-8-29",
},
%{
name: "治承",
yomi: "じしょう",
begin_at: "1177-8-29",
end_at: "1181-8-25",
},
%{
name: "養和",
yomi: "ようわ",
begin_at: "1181-8-25",
end_at: "1182-6-29",
},
%{
name: "寿永",
yomi: "じゅえい",
begin_at: "1182-6-29",
end_at: "1184-5-27",
},
%{
name: "元暦",
yomi: "げんりゃく",
begin_at: "1184-5-27",
end_at: "1185-9-9",
},
%{
name: "建久",
yomi: "けんきゅう",
begin_at: "1190-5-16",
end_at: "1199-5-23",
},
%{
name: "正治",
yomi: "しょうじ",
begin_at: "1199-5-23",
end_at: "1201-3-19",
},
%{
name: "建仁",
yomi: "けんにん",
begin_at: "1201-3-19",
end_at: "1204-3-23",
},
%{
name: "元久",
yomi: "げんきゅう",
begin_at: "1204-3-23",
end_at: "1206-6-5",
},
%{
name: "建永",
yomi: "けんえい",
begin_at: "1206-6-5",
end_at: "1207-11-16",
},
%{
name: "承元",
yomi: "じょうげん",
begin_at: "1207-11-16",
end_at: "1211-4-23",
},
%{
name: "建暦",
yomi: "けんりゃく",
begin_at: "1211-4-23",
end_at: "1214-1-18",
},
%{
name: "建保",
yomi: "けんぽう",
begin_at: "1214-1-18",
end_at: "1219-5-27",
},
%{
name: "承久",
yomi: "じょうきゅう",
begin_at: "1219-5-27",
end_at: "1222-5-25",
},
%{
name: "貞応",
yomi: "じょうおう",
begin_at: "1222-5-25",
end_at: "1224-12-31",
},
%{
name: "元仁",
yomi: "げんにん",
begin_at: "1224-12-31",
end_at: "1225-5-28",
},
%{
name: "嘉禄",
yomi: "かろく",
begin_at: "1225-5-28",
end_at: "1228-1-18",
},
%{
name: "安貞",
yomi: "あんてい",
begin_at: "1228-1-18",
end_at: "1229-3-31",
},
%{
name: "寛喜",
yomi: "かんき",
begin_at: "1229-3-31",
end_at: "1232-4-23",
},
%{
name: "貞永",
yomi: "じょうえい",
begin_at: "1232-4-23",
end_at: "1233-5-25",
},
%{
name: "天福",
yomi: "てんぷく",
begin_at: "1233-5-25",
end_at: "1234-11-27",
},
%{
name: "文暦",
yomi: "ぶんりゃく",
begin_at: "1234-11-27",
end_at: "1235-11-1",
},
%{
name: "嘉禎",
yomi: "かてい",
begin_at: "1235-11-1",
end_at: "1238-12-30",
},
%{
name: "暦仁",
yomi: "りゃくにん",
begin_at: "1238-12-30",
end_at: "1239-3-13",
},
%{
name: "延応",
yomi: "えんおう",
begin_at: "1239-3-13",
end_at: "1240-8-5",
},
%{
name: "仁治",
yomi: "にんじ",
begin_at: "1240-8-5",
end_at: "1243-3-18",
},
%{
name: "寛元",
yomi: "かんげん",
begin_at: "1243-3-18",
end_at: "1247-4-5",
},
%{
name: "宝治",
yomi: "ほうじ",
begin_at: "1247-4-5",
end_at: "1249-5-2",
},
%{
name: "建長",
yomi: "けんちょう",
begin_at: "1249-5-2",
end_at: "1256-10-24",
},
%{
name: "康元",
yomi: "こうげん",
begin_at: "1256-10-24",
end_at: "1257-3-31",
},
%{
name: "正嘉",
yomi: "しょうか",
begin_at: "1257-3-31",
end_at: "1259-4-20",
},
%{
name: "正元",
yomi: "しょうげん",
begin_at: "1259-4-20",
end_at: "1260-5-24",
},
%{
name: "文応",
yomi: "ぶんおう",
begin_at: "1260-5-24",
end_at: "1261-3-22",
},
%{
name: "弘長",
yomi: "こうちょう",
begin_at: "1261-3-22",
end_at: "1264-3-27",
},
%{
name: "文永",
yomi: "ぶんえい",
begin_at: "1264-3-27",
end_at: "1275-5-22",
},
%{
name: "建治",
yomi: "けんじ",
begin_at: "1275-5-22",
end_at: "1278-3-23",
},
%{
name: "弘安",
yomi: "こうあん",
begin_at: "1278-3-23",
end_at: "1288-5-29",
},
%{
name: "正応",
yomi: "しょうおう",
begin_at: "1288-5-29",
end_at: "1293-9-6",
},
%{
name: "永仁",
yomi: "えいにん",
begin_at: "1293-9-6",
end_at: "1299-5-25",
},
%{
name: "正安",
yomi: "しょうあん",
begin_at: "1299-5-25",
end_at: "1302-12-10",
},
%{
name: "乾元",
yomi: "けんげん",
begin_at: "1302-12-10",
end_at: "1303-9-16",
},
%{
name: "嘉元",
yomi: "かげん",
begin_at: "1303-9-16",
end_at: "1307-1-18",
},
%{
name: "徳治",
yomi: "とくじ",
begin_at: "1307-1-18",
end_at: "1308-11-22",
},
%{
name: "延慶",
yomi: "えんきょう",
begin_at: "1308-11-22",
end_at: "1311-5-17",
},
%{
name: "応長",
yomi: "おうちょう",
begin_at: "1311-5-17",
end_at: "1312-4-27",
},
%{
name: "正和",
yomi: "しょうわ",
begin_at: "1312-4-27",
end_at: "1317-3-16",
},
%{
name: "文保",
yomi: "ぶんぽう",
begin_at: "1317-3-16",
end_at: "1319-5-18",
},
%{
name: "元応",
yomi: "げんおう",
begin_at: "1319-5-18",
end_at: "1321-3-22",
},
%{
name: "元亨",
yomi: "げんこう",
begin_at: "1321-3-22",
end_at: "1324-12-25",
},
%{
name: "正中",
yomi: "しょうちゅう",
begin_at: "1324-12-25",
end_at: "1326-5-28",
},
%{
name: "嘉暦",
yomi: "かりゃく",
begin_at: "1326-5-28",
end_at: "1329-9-22",
},
%{
name: "元徳",
yomi: "げんとく",
begin_at: "1329-9-22",
end_at: "1332-5-23",
},
%{
name: "興国",
yomi: "こうこく",
begin_at: "1340-5-25",
end_at: "1347-1-20",
},
%{
name: "正平",
yomi: "しょうへい",
begin_at: "1347-1-20",
end_at: "1370-8-16",
},
%{
name: "建徳",
yomi: "けんとく",
begin_at: "1370-8-16",
end_at: "1372-5-1",
},
%{
name: "文中",
yomi: "ぶんちゅう",
begin_at: "1372-5-1",
end_at: "1375-6-26",
},
%{
name: "天授",
yomi: "てんじゅ",
begin_at: "1375-6-26",
end_at: "1381-3-6",
},
%{
name: "弘和",
yomi: "こうわ",
begin_at: "1381-3-6",
end_at: "1384-5-18",
},
%{
name: "元中",
yomi: "げんちゅう",
begin_at: "1384-5-18",
end_at: "1392-11-19",
},
%{
name: "暦応",
yomi: "りゃくおう",
begin_at: "1338-10-11",
end_at: "1342-6-1",
},
%{
name: "康永",
yomi: "こうえい",
begin_at: "1342-6-1",
end_at: "1345-11-15",
},
%{
name: "貞和",
yomi: "じょうわ",
begin_at: "1345-11-15",
end_at: "1350-4-4",
},
%{
name: "観応",
yomi: "かんのう",
begin_at: "1350-4-4",
end_at: "1352-11-4",
},
%{
name: "文和",
yomi: "ぶんな",
begin_at: "1352-11-4",
end_at: "1356-4-29",
},
%{
name: "延文",
yomi: "えんぶん",
begin_at: "1356-4-29",
end_at: "1361-5-4",
},
%{
name: "康安",
yomi: "こうあん",
begin_at: "1361-5-4",
end_at: "1362-10-11",
},
%{
name: "貞治",
yomi: "じょうじ",
begin_at: "1362-10-11",
end_at: "1368-3-7",
},
%{
name: "応安",
yomi: "おうあん",
begin_at: "1368-3-7",
end_at: "1375-3-29",
},
%{
name: "永和",
yomi: "えいわ",
begin_at: "1375-3-29",
end_at: "1379-4-9",
},
%{
name: "康暦",
yomi: "こうりゃく",
begin_at: "1379-4-9",
end_at: "1381-3-20",
},
%{
name: "永徳",
yomi: "えいとく",
begin_at: "1381-3-20",
end_at: "1384-3-19",
},
%{
name: "至徳",
yomi: "しとく",
begin_at: "1384-3-19",
end_at: "1387-10-5",
},
%{
name: "嘉慶",
yomi: "かきょう",
begin_at: "1387-10-5",
end_at: "1389-3-7",
},
%{
name: "康応",
yomi: "こうおう",
begin_at: "1389-3-7",
end_at: "1390-4-12",
},
%{
name: "明徳",
yomi: "めいとく",
begin_at: "1390-4-12",
end_at: "1394-8-2",
},
%{
name: "正長",
yomi: "しょうちょう",
begin_at: "1428-6-10",
end_at: "1429-10-3",
},
%{
name: "永享",
yomi: "えいきょう",
begin_at: "1429-10-3",
end_at: "1441-3-10",
},
%{
name: "嘉吉",
yomi: "かきつ",
begin_at: "1441-3-10",
end_at: "1444-2-23",
},
%{
name: "文安",
yomi: "ぶんあん",
begin_at: "1444-2-23",
end_at: "1449-8-16",
},
%{
name: "宝徳",
yomi: "ほうとく",
begin_at: "1449-8-16",
end_at: "1452-8-10",
},
%{
name: "享徳",
yomi: "きょうとく",
begin_at: "1452-8-10",
end_at: "1455-9-6",
},
%{
name: "康正",
yomi: "こうしょう",
begin_at: "1455-9-6",
end_at: "1457-10-16",
},
%{
name: "長禄",
yomi: "ちょうろく",
begin_at: "1457-10-16",
end_at: "1461-2-1",
},
%{
name: "寛正",
yomi: "かんしょう",
begin_at: "1461-2-1",
end_at: "1466-3-14",
},
%{
name: "文正",
yomi: "ぶんしょう",
begin_at: "1466-3-14",
end_at: "1467-4-9",
},
%{
name: "文明",
yomi: "ぶんめい",
begin_at: "1469-6-8",
end_at: "1487-8-9",
},
%{
name: "長享",
yomi: "ちょうきょう",
begin_at: "1487-8-9",
end_at: "1489-9-16",
},
%{
name: "延徳",
yomi: "えんとく",
begin_at: "1489-9-16",
end_at: "1492-8-12",
},
%{
name: "明応",
yomi: "めいおう",
begin_at: "1492-8-12",
end_at: "1501-3-18",
},
%{
name: "文亀",
yomi: "ぶんき",
begin_at: "1501-3-18",
end_at: "1504-3-16",
},
%{
name: "永正",
yomi: "えいしょう",
begin_at: "1504-3-16",
end_at: "1521-9-23",
},
%{
name: "大永",
yomi: "だいえい",
begin_at: "1521-9-23",
end_at: "1528-9-3",
},
%{
name: "享禄",
yomi: "きょうろく",
begin_at: "1528-9-3",
end_at: "1532-8-29",
},
%{
name: "天文",
yomi: "てんぶん",
begin_at: "1532-8-29",
end_at: "1555-11-7",
},
%{
name: "弘治",
yomi: "こうじ",
begin_at: "1555-11-7",
end_at: "1558-3-18",
},
%{
name: "永禄",
yomi: "えいろく",
begin_at: "1558-3-18",
end_at: "1570-5-27",
},
%{
name: "元亀",
yomi: "げんき",
begin_at: "1570-5-27",
end_at: "1573-8-25",
},
%{
name: "文禄",
yomi: "ぶんろく",
begin_at: "1593-1-10",
end_at: "1596-12-16",
},
%{
name: "慶長",
yomi: "けいちょう",
begin_at: "1596-12-16",
end_at: "1615-9-5",
},
%{
name: "寛永",
yomi: "かんえい",
begin_at: "1624-4-17",
end_at: "1645-1-13",
},
%{
name: "正保",
yomi: "しょうほう",
begin_at: "1645-1-13",
end_at: "1648-4-7",
},
%{
name: "慶安",
yomi: "けいあん",
begin_at: "1648-4-7",
end_at: "1652-10-20",
},
%{
name: "承応",
yomi: "じょうおう",
begin_at: "1652-10-20",
end_at: "1655-5-18",
},
%{
name: "明暦",
yomi: "めいれき",
begin_at: "1655-5-18",
end_at: "1658-8-21",
},
%{
name: "万治",
yomi: "まんじ",
begin_at: "1658-8-21",
end_at: "1661-5-23",
},
%{
name: "寛文",
yomi: "かんぶん",
begin_at: "1661-5-23",
end_at: "1673-10-30",
},
%{
name: "延宝",
yomi: "えんぽう",
begin_at: "1673-10-30",
end_at: "1681-11-9",
},
%{
name: "天和",
yomi: "てんな",
begin_at: "1681-11-9",
end_at: "1684-4-5",
},
%{
name: "貞享",
yomi: "じょうきょう",
begin_at: "1684-4-5",
end_at: "1688-10-23",
},
%{
name: "元禄",
yomi: "げんろく",
begin_at: "1688-10-23",
end_at: "1704-4-16",
},
%{
name: "宝永",
yomi: "ほうえい",
begin_at: "1704-4-16",
end_at: "1711-6-11",
},
%{
name: "正徳",
yomi: "しょうとく",
begin_at: "1711-6-11",
end_at: "1716-8-9",
},
%{
name: "享保",
yomi: "きょうほう",
begin_at: "1716-8-9",
end_at: "1736-6-7",
},
%{
name: "元文",
yomi: "げんぶん",
begin_at: "1736-6-7",
end_at: "1741-4-12",
},
%{
name: "寛保",
yomi: "かんぽう",
begin_at: "1741-4-12",
end_at: "1744-4-3",
},
%{
name: "延享",
yomi: "えんきょう",
begin_at: "1744-4-3",
end_at: "1748-8-5",
},
%{
name: "寛延",
yomi: "かんえん",
begin_at: "1748-8-5",
end_at: "1751-12-14",
},
%{
name: "宝暦",
yomi: "ほうれき",
begin_at: "1751-12-14",
end_at: "1764-6-30",
},
%{
name: "明和",
yomi: "めいわ",
begin_at: "1764-6-30",
end_at: "1772-12-10",
},
%{
name: "安永",
yomi: "あんえい",
begin_at: "1772-12-10",
end_at: "1781-4-25",
},
%{
name: "天明",
yomi: "てんめい",
begin_at: "1781-4-25",
end_at: "1789-2-19",
},
%{
name: "寛政",
yomi: "かんせい",
begin_at: "1789-2-19",
end_at: "1801-3-19",
},
%{
name: "享和",
yomi: "きょうわ",
begin_at: "1801-3-19",
end_at: "1804-3-22",
},
%{
name: "文化",
yomi: "ぶんか",
begin_at: "1804-3-22",
end_at: "1818-5-26",
},
%{
name: "文政",
yomi: "ぶんせい",
begin_at: "1818-5-26",
end_at: "1831-1-23",
},
%{
name: "天保",
yomi: "てんぽう",
begin_at: "1831-1-23",
end_at: "1845-1-9",
},
%{
name: "弘化",
yomi: "こうか",
begin_at: "1845-1-9",
end_at: "1848-4-1",
},
%{
name: "嘉永",
yomi: "かえい",
begin_at: "1848-4-1",
end_at: "1855-1-15",
},
%{
name: "安政",
yomi: "あんせい",
begin_at: "1855-1-15",
end_at: "1860-4-8",
},
%{
name: "万延",
yomi: "まんえん",
begin_at: "1860-4-8",
end_at: "1861-3-29",
},
%{
name: "文久",
yomi: "ぶんきゅう",
begin_at: "1861-3-29",
end_at: "1864-3-27",
},
%{
name: "元治",
yomi: "げんじ",
begin_at: "1864-3-27",
end_at: "1865-5-1",
},
%{
name: "慶応",
yomi: "けいおう",
begin_at: "1865-5-1",
end_at: "1868-10-23",
},
%{
name: "明治",
yomi: "めいじ",
begin_at: "1868-1-25",
end_at: "1912-7-29",
},
%{
name: "大正",
yomi: "たいしょう",
begin_at: "1912-7-30",
end_at: "1926-12-24",
},
%{
name: "昭和",
yomi: "しょうわ",
begin_at: "1926-12-25",
end_at: "1989-1-7",
},
%{
name: "平成",
yomi: "へいせい",
begin_at: "1989-1-8",
end_at: "2019-4-30",
},
%{
name: "令和",
yomi: "れいわ",
begin_at: "2019-5-1",
end_at: "",
},
]
end
end
|
lib/ex_wareki/data.ex
| 0.649356
| 0.532
|
data.ex
|
starcoder
|
defmodule Phoenix.LiveDashboard.SystemInfo do
# Helpers for fetching and formatting system info.
@moduledoc false
def format_uptime(uptime) do
{d, {h, m, _s}} = :calendar.seconds_to_daystime(div(uptime, 1000))
cond do
d > 0 -> "#{d}d#{h}h#{m}m"
h > 0 -> "#{h}h#{m}m"
true -> "#{m}m"
end
end
def format_bytes(bytes) when is_integer(bytes) do
cond do
bytes >= memory_unit(:TB) -> format_bytes(bytes, :TB)
bytes >= memory_unit(:GB) -> format_bytes(bytes, :GB)
bytes >= memory_unit(:MB) -> format_bytes(bytes, :MB)
bytes >= memory_unit(:KB) -> format_bytes(bytes, :KB)
true -> format_bytes(bytes, :B)
end
end
defp format_bytes(bytes, :B) when is_integer(bytes), do: "#{bytes} B"
defp format_bytes(bytes, unit) when is_integer(bytes) do
value = bytes / memory_unit(unit)
"#{:erlang.float_to_binary(value, decimals: 1)} #{unit}"
end
defp memory_unit(:TB), do: 1024 * 1024 * 1024 * 1024
defp memory_unit(:GB), do: 1024 * 1024 * 1024
defp memory_unit(:MB), do: 1024 * 1024
defp memory_unit(:KB), do: 1024
def fetch_info(node) do
:rpc.call(node, __MODULE__, :info_callback, [])
end
def fetch_usage(node) do
:rpc.call(node, __MODULE__, :usage_callback, [])
end
@doc false
def info_callback do
%{
system_info: %{
banner: :erlang.system_info(:system_version),
elixir_version: System.version(),
phoenix_version: Application.spec(:phoenix, :vsn) || "None",
dashboard_version: Application.spec(:phoenix_live_dashboard, :vsn) || "None",
system_architecture: :erlang.system_info(:system_architecture)
},
system_limits: %{
atoms: :erlang.system_info(:atom_limit),
ports: :erlang.system_info(:port_limit),
processes: :erlang.system_info(:process_limit)
},
system_usage: usage_callback()
}
end
@doc false
def usage_callback do
%{
atoms: :erlang.system_info(:atom_count),
ports: :erlang.system_info(:port_count),
processes: :erlang.system_info(:process_count),
io: io(),
uptime: :erlang.statistics(:wall_clock) |> elem(0),
memory: memory(),
total_run_queue: :erlang.statistics(:total_run_queue_lengths_all),
cpu_run_queue: :erlang.statistics(:total_run_queue_lengths)
}
end
defp io() do
{{:input, input}, {:output, output}} = :erlang.statistics(:io)
{input, output}
end
defp memory() do
memory = :erlang.memory()
total = memory[:total]
process = memory[:processes]
atom = memory[:atom]
binary = memory[:binary]
code = memory[:code]
ets = memory[:ets]
%{
total: total,
process: process,
atom: atom,
binary: binary,
code: code,
ets: ets,
other: total - process - atom - binary - code - ets
}
end
end
|
lib/phoenix/live_dashboard/system_info.ex
| 0.626696
| 0.401072
|
system_info.ex
|
starcoder
|
defmodule Basalt.Hex do
alias Basalt.Hex, as: Hex
@moduledoc """
Basalt hexagon tiles ("hexes") are represented by hexagon tiles using
"Cube" coordinates by default. Hexagons can be conceived as having
three primary axes, as in a cube sliced on a diagonal plane. Hexes are
in oriented "pointy"-side up, ⬢. The other orientation is "flat", ⬣.
Cube notation utilizes "q", "r", "s" for coordinates and have a
constraint `q + r + s = 0` which is always respected.
## API
Most of the functions in this module return `:ok` or
`{:ok, result}` in case of success, `{:error, reason}`
otherwise. Those functions also have a variant
that ends with `!` which returns the result (instead of the
`{:ok, result}` tuple) in case of success or raises an
exception in case it fails. For example:
iex> Hex.create(1, 0, -1)
{:ok, %Hex{q: 1, r: 0, s: -1}}
iex> Hex.create(1, 1, 1)
{:error, "invalid coordinates, constraint q + r + s = 0"}
iex> Hex.create!(1, 0, -1)
%Hex{q: 1, r: 0, s: -1}
iex> Hex.create!(1, 1, 1)
** (ArgumentError) invalid coordinates, constraint q + r + s = 0
In general, a developer should use the former in case they want
to react if the Hex does not exist. The latter should be used
when the developer expects their software to fail in case the
file cannot be read (i.e. it is literally an exception).
"""
defstruct q: 0, r: 0, s: 0
@typedoc "Hex Tile"
@opaque t :: %__MODULE__{q: integer, r: integer, s: integer}
defguardp is_valid_cube_coords(q, r, s) when q + r + s == 0
@doc """
Creates a new Hex.
Returns `{:ok, Hex}`, where `Hex` is a hexagon object that contains
coordinates "q", "r", "s". Returns `{:error, reason}` if an error occurs.
"""
@spec create(number, number, number) :: {:ok, t} | {:error, String.t()}
def create(q, r, s) when not is_valid_cube_coords(q, r, s) do
{:error, "invalid coordinates, constraint q + r + s = 0"}
end
def create(q, r, s), do: {:ok, %Hex{q: q, r: r, s: s}}
@doc """
Creates a new Hex.
Returns `Hex`, a hexagon object that contains coordinates
"q", "r", "s", or raises an `ArgumentError` exception if the
given arguments don't satisfy the constraint `q + r + s = 0`.
"""
@spec create!(number, number, number) :: t
def create!(q, r, s) do
case create(q, r, s) do
{:ok, hex} ->
hex
{:error, message} ->
raise ArgumentError, message: message
end
end
@doc """
Adds two Hexes.
Returns a new Hex.
"""
@spec add(t, t) :: t
def add(%Hex{} = a, %Hex{} = b) do
Hex.create!(a.q + b.q, a.r + b.r, a.s + b.s)
end
@doc """
Subtracts two Hexes.
Returns a new Hex.
"""
@spec subtract(t, t) :: t
def subtract(%Hex{} = a, %Hex{} = b) do
Hex.create!(a.q - b.q, a.r - b.r, a.s - b.s)
end
@doc """
Scales a hex with given multiplier `k`.
Returns a new scaled Hex.
"""
@spec scale(t, non_neg_integer) :: t
def scale(%Hex{} = hex, k) when is_integer(k) do
Hex.create!(hex.q * k, hex.r * k, hex.s * k)
end
@doc """
The distance ("length") between a given Hex and the hexagonal grid's
origin.
Returns an integer representing the tile "steps" from origin.
"""
@spec distance(t) :: integer
def distance(%Hex{} = hex) do
round((abs(hex.q) + abs(hex.r) + abs(hex.s)) / 2)
end
@doc """
The distance between two hexes is the "length" between the two
hexes.
Returns an integer representing the tile "steps" from the two hexes.
"""
@spec distance(t, t) :: non_neg_integer
def distance(%Hex{} = a, %Hex{} = b) do
Hex.distance(subtract(a, b))
end
@doc """
Returns a new Hex representing the `direction`. Assuming "pointy"
hexagon orientation, the 6 surrounding hexes can be thought of using compass
directions, with the exception of "north" and "south".
These directions can also be represented with a given range
between 0 and 5, 0 representing "east" and rotating clockwise. 5
representes "north_east".
"""
@spec direction_offset(atom | non_neg_integer) :: t
def direction_offset(direction)
when is_integer(direction) and 0 <= direction and direction <= 5 do
neighbor_directions = [
:east,
:south_east,
:south_west,
:west,
:north_west,
:north_east
]
neighbor_directions
|> Enum.at(direction)
|> direction_offset()
end
def direction_offset(direction) do
case direction do
:east -> Hex.create!(1, -1, 0)
:south_east -> Hex.create!(0, -1, 1)
:south_west -> Hex.create!(-1, 0, 1)
:west -> Hex.create!(-1, 1, 0)
:north_west -> Hex.create!(0, 1, -1)
:north_east -> Hex.create!(1, 0, -1)
_ -> :error
end
end
@doc """
Returns the direct neighboring Hex in any given direction.
"""
@spec neighbor(t, atom) :: t
def neighbor(%Hex{} = hex, direction) do
add(hex, direction_offset(direction))
end
@doc """
Returns all neighboring Hexes, starting with the "eastern" Hex and rotating clockwise.
"""
@spec neighbors(t) :: list
def neighbors(%Hex{} = hex) do
Enum.map(0..5, &neighbor(hex, &1))
end
@doc """
Returns a boolean if the two Hexes are immediate neighbor Hexes.
"""
@spec neighbor?(t, t) :: boolean
def neighbor?(%Hex{} = hex_a, %Hex{} = hex_b) do
distance(hex_a, hex_b) == 1
end
@doc """
Returns all neighboring Hexes within a given radius, starting with the outermost
"East" `q` Hex-line, working descending through the `r` Hex-line.
"""
@spec neighborhood(t, non_neg_integer) :: list
def neighborhood(%Hex{} = hex, radius) do
radius_range = radius..-radius
for dq <- radius_range,
dr <-
Enum.max([-radius, -dq - radius])..Enum.min([radius, -dq + radius]) do
Hex.add(hex, Hex.create!(dq, dr, -dq - dr))
end
end
@doc """
Returns a new Hex representing the diagonal offset. Assuming "pointy"
hexagon orientation, the 6 diagonal hexes can be represented using compass
directions, with the exception of "east" and "west".
These directions can also be represented with a given range
between 0 and 5, 0 representing "north" and rotating clockwise. 5
representes "north_west".
"""
@spec diagonal_offset(atom | non_neg_integer) :: t
def diagonal_offset(direction)
when is_integer(direction) and 0 <= direction and direction <= 5 do
diagonal_directions = [
:north,
:north_east,
:south_east,
:south,
:south_west,
:north_west
]
diagonal_directions
|> Enum.at(direction)
|> diagonal_offset()
end
def diagonal_offset(direction) do
case direction do
:north -> Hex.create!(1, 1, -2)
:north_east -> Hex.create!(2, -1, -1)
:south_east -> Hex.create!(1, -2, 1)
:south -> Hex.create!(-1, -1, 2)
:south_west -> Hex.create!(-2, 1, 1)
:north_west -> Hex.create!(-1, 2, -1)
_ -> :error
end
end
@doc """
Returns the diagonal neighboring Hex in any given direction.
"""
@spec diagonal_neighbor(t, atom) :: t
def diagonal_neighbor(%Hex{} = hex, direction) do
add(hex, diagonal_offset(direction))
end
@doc """
Returns all diagonal neighboring Hexes, starting with the "northen" Hex.
"""
@spec diagonal_neighbors(t) :: list
def diagonal_neighbors(%Hex{} = hex) do
Enum.map(0..5, &diagonal_neighbor(hex, &1))
end
@doc """
Compates two Hexes, returns a boolean indicating whether the Hexes
are equal.
"""
@spec equal?(t, t) :: boolean
def equal?(%Hex{} = a, %Hex{} = b) do
a.q == b.q and a.r == b.r and a.s == b.s
end
@doc """
Inverse of Hex.equal?/2.
Compares two Hexes, returns a boolean indicating whether the Hexes
are not equal.
"""
@spec not_equal?(t, t) :: boolean
def not_equal?(%Hex{} = a, %Hex{} = b) do
not equal?(a, b)
end
end
|
lib/basalt.ex
| 0.96336
| 0.743378
|
basalt.ex
|
starcoder
|
defmodule Rummage.Phoenix.SortView do
@moduledoc """
Sort View Module for Rummage. This has view helpers that can generate rummagable links and forms.
Usage:
Usage:
```elixir
defmodule MyApp.ProductView do
use MyApp.Web, :view
use Rummage.Phoenix.View, only: [:paginate]
end
```
OR
```elixir
defmodule MyApp.ProductView do
use MyApp.Web, :view
use Rummage.Phoenix.View
end
```
"""
use Rummage.Phoenix.ThemeAdapter
import Phoenix.HTML
@doc """
This macro includes the helpers functions for sorting.
Provides helpers function `sort_link/3` for creating sort links in an html.eex
file of using `Phoenix`.
Usage:
Just add the following code in the index template. Make sure that you're passing
rummage from the controller. Please look at the
[README](https://github.com/Excipients/rummage_phoenix) for more details
```elixir
<%= sort_link @conn, @rummage, [field: :name, ci: true] %>
```
"""
def sort_link(conn, rummage, opts \\ []) do
current_sort_params = rummage["sort"]
field = Keyword.fetch!(opts, :field)
asc_icon = Keyword.get(opts, :asc_icon)
asc_text = Keyword.get(opts, :asc_text, "↑")
desc_icon = Keyword.get(opts, :desc_icon)
desc_text = Keyword.get(opts, :desc_text, "↓")
name = opts[:name] || Phoenix.Naming.humanize(field)
assoc = opts[:assoc] || []
{sort_field, current_order} = get_sort_field_and_current_order(current_sort_params, field, assoc)
sort_field = opts[:ci] && sort_field <> ".ci" || sort_field
sort_params = %{"assoc" => assoc, "field" => sort_field}
url = index_path(opts, [conn, :index, %{rummage: Map.put(rummage, "sort", sort_params)}])
text = case current_order do
"asc" -> sort_text_or_image(url, [img: desc_icon, text: desc_text], name)
"desc" -> sort_text_or_image(url, [img: asc_icon, text: asc_text], name)
_ -> sort_text_or_image(url, [], name)
end
sort_text url, do: text
end
defp index_path(opts, params) do
helpers = opts[:helpers]
path_function_name = String.to_atom("#{opts[:struct]}_path")
apply(helpers, path_function_name, params)
end
defp get_sort_field_and_current_order(current_sort_params, field, assoc)
defp get_sort_field_and_current_order(c, field, _) when c in [nil, "", [], %{}], do: {"#{field}.asc", nil}
defp get_sort_field_and_current_order(%{"assoc" => current_assoc, "field" => current_field}, field, assoc) do
current_sort_field = current_field
|> String.split(".ci")
|> Enum.at(0)
cond do
current_assoc != assoc -> {"#{field}.asc", nil}
Regex.match?(~r/^#{field}.asc+$/, current_sort_field) ->
{"#{field}.desc", "desc"}
Regex.match?(~r/^#{field}.desc+$/, current_sort_field) ->
{"#{field}.asc", "asc"}
true -> {"#{field}.asc", nil}
end
end
defp get_sort_field_and_current_order(%{"field" => current_field}, field, assoc) do
current_sort_field = current_field
|> String.split(".ci")
|> Enum.at(0)
cond do
[] != assoc -> {"#{field}.asc", nil}
Regex.match?(~r/^#{field}.asc+$/, current_sort_field) ->
{"#{field}.desc", "desc"}
Regex.match?(~r/^#{field}.desc+$/, current_sort_field) ->
{"#{field}.asc", "asc"}
true -> {"#{field}.asc", nil}
end
end
end
|
lib/rummage_phoenix/hooks/views/sort_view.ex
| 0.675015
| 0.785185
|
sort_view.ex
|
starcoder
|
defmodule Period do
@moduledoc """
Period does represent a timeframe.
## Creating a Period
A Period is a opaque type, so it's not meant to be directly created like
other structs, but you should rather use `Period.new/3`, `Period.new!/3` or
`Period.from_naive/3` to create a Period.
Internally Period's do work with timestamps (`:microsecond` precision) so
any DateTime values extracted from a Period will be using the default `Etc/UTC`
timezone. The caller will be responsible to retain timezones if needed.
## Date.Range
A Period can be converted into a elixir core `Date.Range` struct by using
`Period.to_range/1`.
"""
@enforce_keys [:lower, :upper, :lower_state, :upper_state]
defstruct lower: nil,
upper: nil,
lower_state: :included,
upper_state: :excluded
@opaque t :: %__MODULE__{
lower: timestamp,
upper: timestamp,
lower_state: boundry_state,
upper_state: boundry_state
}
@typedoc "The states a boundry can be in"
@type boundry_state :: :included | :excluded
@typedoc "Unix timestamp in microseconds"
@type timestamp :: integer
@typedoc "The input types for creating a new periods"
@type datetime :: DateTime.t() | integer
@doc """
Does create a new `%Period{}` struct.
Can only be created from timestamps or `DateTime` structs, which will be
converted to a timestamp. This period does not furtherconcern itself with
timezones.
Timestamps need to be in `:microsecond` precision.
## Examples
iex> {:ok, period} = Period.new(1517171882222330, 1517171882222335)
iex> period
#Period<[#DateTime<2018-01-28 20:38:02.222330Z>, #DateTime<2018-01-28 20:38:02.222335Z>)>
iex> opts = [lower_state: :excluded, upper_state: :included]
iex> {:ok, period} = Period.new(1517171882222330, 1517171882222335, opts)
iex> period
#Period<(#DateTime<2018-01-28 20:38:02.222330Z>, #DateTime<2018-01-28 20:38:02.222335Z>]>
iex> from = DateTime.from_unix!(1517171882222330, :microsecond)
iex> to = DateTime.from_unix!(1517171882222335, :microsecond)
iex> {:ok, period} = Period.new(from, to)
iex> period
#Period<[#DateTime<2018-01-28 20:38:02.222330Z>, #DateTime<2018-01-28 20:38:02.222335Z>)>
iex> from = DateTime.from_unix!(1517171882222335, :microsecond)
iex> to = DateTime.from_unix!(1517171882222330, :microsecond)
iex> Period.new(from, to)
{:error, "In strict mode the lower date cannot be before the upper date (2018-01-28 20:38:02.222335Z, 2018-01-28 20:38:02.222330Z)."}
"""
@spec new(datetime, datetime, Keyword.t()) :: {:ok, t} | {:error, term}
def new(lower, upper, opts \\ [])
def new(%DateTime{} = lower, %DateTime{} = upper, opts) do
lower = DateTime.to_unix(lower, :microsecond)
upper = DateTime.to_unix(upper, :microsecond)
new(lower, upper, opts)
end
def new(lower, upper, opts) when is_integer(lower) and is_integer(upper) do
lb = Keyword.get(opts, :lower_state, :included)
ub = Keyword.get(opts, :upper_state, :excluded)
cond do
lower > upper -> err_order(lower, upper)
lower == upper && lb != ub -> err_include_exclude(lb, ub)
true -> {:ok, %Period{lower: lower, upper: upper, lower_state: lb, upper_state: ub}}
end
end
@doc """
Same as `new/3`, but does raise on errors.
## Examples
iex> Period.new!(1517171882222330, 1517171882222335)
#Period<[#DateTime<2018-01-28 20:38:02.222330Z>, #DateTime<2018-01-28 20:38:02.222335Z>)>
iex> opts = [lower_state: :excluded, upper_state: :included]
iex> Period.new!(1517171882222330, 1517171882222335, opts)
#Period<(#DateTime<2018-01-28 20:38:02.222330Z>, #DateTime<2018-01-28 20:38:02.222335Z>]>
iex> from = DateTime.from_unix!(1517171882222330, :microsecond)
iex> to = DateTime.from_unix!(1517171882222335, :microsecond)
iex> Period.new!(from, to)
#Period<[#DateTime<2018-01-28 20:38:02.222330Z>, #DateTime<2018-01-28 20:38:02.222335Z>)>
iex> from = DateTime.from_unix!(1517171882222335, :microsecond)
iex> to = DateTime.from_unix!(1517171882222330, :microsecond)
iex> Period.new!(from, to)
** (ArgumentError) In strict mode the lower date cannot be before the upper date (2018-01-28 20:38:02.222335Z, 2018-01-28 20:38:02.222330Z).
"""
@spec new(datetime, datetime, Keyword.t()) :: t | no_return
def new!(lower, upper, opts \\ []) do
case new(lower, upper, opts) do
{:error, err} -> raise ArgumentError, err
{:ok, period} -> period
end
end
@doc """
Convenience function to use `Period` with naive datetime values.
Does simply attach the `Etc/UTC` timezone to the naive datetime.
"""
@spec from_naive(NaiveDateTime.t(), NaiveDateTime.t(), Keyword.t()) :: {:ok, t} | {:error, term}
def from_naive(%NaiveDateTime{} = from, %NaiveDateTime{} = to, opts \\ []) do
from = DateTime.from_naive!(from, "Etc/UTC")
to = DateTime.from_naive!(to, "Etc/UTC")
new(from, to, opts)
end
@spec from_naive!(NaiveDateTime.t(), NaiveDateTime.t(), Keyword.t()) :: t | no_return
def from_naive!(%NaiveDateTime{} = from, %NaiveDateTime{} = to, opts \\ []) do
from = DateTime.from_naive!(from, "Etc/UTC")
to = DateTime.from_naive!(to, "Etc/UTC")
new!(from, to, opts)
end
@doc """
Get the lower boundry of the period.
Does return the boundry state and the date of the boundry.
## Example
iex> period = Period.new!(1517171882222330, 1517171882222335)
iex> {:included, dt} = Period.get_lower_boundry(period)
iex> dt
#DateTime<2018-01-28 20:38:02.222330Z>
"""
@spec get_lower_boundry(t) :: {boundry_state, DateTime.t()}
def get_lower_boundry(%Period{lower: lower, lower_state: lb}) do
{lb, to_datetime(lower)}
end
@doc """
Get the lower boundry of the period.
Does return the boundry state and the date of the boundry.
## Example
iex> period = Period.new!(1517171882222330, 1517171882222335)
iex> {:excluded, dt} = Period.get_upper_boundry(period)
iex> dt
#DateTime<2018-01-28 20:38:02.222335Z>
"""
@spec get_upper_boundry(t) :: {boundry_state, DateTime.t()}
def get_upper_boundry(%Period{upper: upper, upper_state: ub}) do
{ub, to_datetime(upper)}
end
@doc """
Get the boundry notation for both boundries
## Example
iex> period = Period.new!(1517171882222330, 1517171882222335)
iex> Period.get_boundry_notation(period)
{"[", ")"}
"""
@spec get_boundry_notation(t) :: {binary(), binary()}
def get_boundry_notation(%Period{} = p) do
{get_lower_boundry_notation(p), get_upper_boundry_notation(p)}
end
@doc """
Get the boundry notation for the lower boundry
## Example
iex> period = Period.new!(1517171882222330, 1517171882222335)
iex> Period.get_lower_boundry_notation(period)
"["
iex> period = Period.new!(1517171882222330, 1517171882222335, lower_state: :excluded)
iex> Period.get_lower_boundry_notation(period)
"("
"""
@spec get_lower_boundry_notation(t) :: binary()
def get_lower_boundry_notation(%Period{lower_state: :included}), do: "["
def get_lower_boundry_notation(%Period{lower_state: :excluded}), do: "("
@doc """
Get the boundry notation for the upper boundry
## Example
iex> period = Period.new!(1517171882222330, 1517171882222335)
iex> Period.get_upper_boundry_notation(period)
")"
iex> period = Period.new!(1517171882222330, 1517171882222335, upper_state: :included)
iex> Period.get_upper_boundry_notation(period)
"]"
"""
@spec get_upper_boundry_notation(t) :: binary()
def get_upper_boundry_notation(%Period{upper_state: :included}), do: "]"
def get_upper_boundry_notation(%Period{upper_state: :excluded}), do: ")"
@doc """
Make a period inclusive on both ends.
## Example
iex> period = Period.new!(1517171882222330, 1517171882222335)
iex> Period.make_inclusive(period)
#Period<[#DateTime<2018-01-28 20:38:02.222330Z>, #DateTime<2018-01-28 20:38:02.222334Z>]>
"""
@spec make_inclusive(t) :: t
def make_inclusive(%Period{lower: lower, lower_state: :excluded} = period) do
%{period | lower: lower + 1, lower_state: :included}
|> make_inclusive()
end
def make_inclusive(%Period{upper: upper, upper_state: :excluded} = period) do
%{period | upper: upper - 1, upper_state: :included}
|> make_inclusive()
end
def make_inclusive(period) do
period
end
@doc """
Convert the period into a core `%Date.Range{}`.
Does only work with periods, which are inclusive on both boundries as that's a restriction
of `Date.Range` structs.
"""
@spec to_range(t) :: {:ok, Date.Range.t()} | {:error, term}
def to_range(%Period{lower: lower, upper: upper, lower_state: true, upper_state: true}) do
{:ok, Date.range(to_datetime(lower), to_datetime(upper))}
end
def to_range(%Period{}) do
{:error, "Date.Range's must be inclusive on both ends"}
end
# Helpers
defp to_datetime(boundry), do: DateTime.from_unix!(boundry, :microsecond)
# Exceptions / Errors
@spec err_order(integer, integer) :: {:error, term}
defp err_order(lower, upper) do
details = "(#{to_datetime(lower)}, #{to_datetime(upper)})"
msg = "In strict mode the lower date cannot be before the upper date #{details}."
{:error, msg}
end
@spec err_include_exclude(boundry_state, boundry_state) :: {:error, term}
defp err_include_exclude(lb, ub) do
bounds =
%Period{lower: nil, upper: nil, lower_state: lb, upper_state: ub}
|> get_boundry_notation()
|> Tuple.to_list()
|> Enum.join("")
msg =
"Cannot hold the same date for the lower and upper bound if one boundry is included" <>
" and the other one is not (#{bounds})."
{:error, msg}
end
end
defimpl Inspect, for: Period do
import Inspect.Algebra
def inspect(period, _opts) do
concat([
"#Period<",
Period.get_lower_boundry_notation(period),
inspect(DateTime.from_unix!(period.lower, :microsecond)),
", ",
inspect(DateTime.from_unix!(period.upper, :microsecond)),
Period.get_upper_boundry_notation(period),
">"
])
end
end
|
lib/period.ex
| 0.938498
| 0.596433
|
period.ex
|
starcoder
|
defmodule Xattr do
import Xattr.Nif
@moduledoc ~S"""
API module for accessing custom extended filesystem attributes.
Attributes managed by this module are stored in isolation, in custom namespace.
Because implementation concepts of extended attributes differ in supported
platforms, it would not be possible to provide unified API which could cover
specific use cases.
Some kernels and filesystems may place various limits on extended attributes
functionality, and so it is to use them only to store few, short metadata which
is not crucial to application functionality.
## Implementation
Elixir Xattr is implemented as NIF library with two platform-dependent
backends:
* *Xattr* - Unix extended attributes supported by Linux and macOS
* *Windows* - alternate data streams available in Windows/NTFS
### Xattr
This backed works as an Erlang wrapper for [`xattr(7)`](http://man7.org/linux/man-pages/man7/xattr.7.html)
functionality available in Unix world. Attributes are always prefixed with
`user.ElixirXattr` namespace.
### Windows
On Windows, NTFS has a feature called [*Alternate Data Streams*](https://blogs.technet.microsoft.com/askcore/2013/03/24/alternate-data-streams-in-ntfs/).
Briefly: a file can have many contents.
Attributes are stored in `ElixirXattr` data stream, which is automatically
created when setting an attribute and the stream does not exist. They are
saved in simple binary format, as a contiguous list of *size:data* cells:
```txt
v - name C-string size v - value binary size
+---+------------+---+-----------+---+----------+---+-------+
| 5 | n a m e \0 | 5 | v a l u e | 4 | f o o \0 | 3 | b a r | ...
+---+------------+---+-----------+---+----------+---+-------+
^ - name C-string, note \0 suffix ^ - value binary data
```
### Unicode
Unicode filenames are supported (and as such proper encoding conversions
are performed when needed).
Both names nor values are not processed and stored as-is.
### Attribute name types
Because attribute names can be represented by various Erlang types, they
are prefixed with *type tags* during serialization:
* `a$` - atoms
* `s$` - name
For example, given Xattr backend, call `Xattr.set("foo.txt", "example", "value")`
will create `user.ElixirXattr.s$example` extended attribute on file `foo.txt`.
### Extended attributes & file system links
On both Unix and Windows implementations, attribute storage is attached to
file system data, not file/link entries. Therefore attributes are shared
between all hard links / file and its symlinks.
## Errors
Because of the nature of error handling on both Unix and Windows, only specific
error codes are translated to atoms. Other codes are stringified to some human
readable name, on Unix using [`strerror`](https://linux.die.net/man/3/strerror)
and on Windows to form `'Windows Error {hexadecimal error code}'` (Windows
version of strerror returns localized messages on non-English installations).
Following errors are represented as atoms and as such can be pattern matched:
* `:enoattr` - attribute was not found
* `:enotsup` - extended attributes are not supported for this file
* `:enoent` - file does not exist
* `:invalfmt` - attribute storage is corrupted and should be regenerated
"""
@tag_atom "a$"
@tag_str "s$"
@type name_t :: String.t() | atom
@doc """
Lists names of all extended attributes of `path`.
The order of items in returned list is unspecified. If given `path` has no
attributes, `{:ok, []}` is returned.
## Example
Xattr.set("foo.txt", "hello", "world")
Xattr.set("foo.txt", :foo, "bar")
{:ok, list} = Xattr.ls("foo.txt")
# list should be permutation of ["hello", :foo]
"""
@spec ls(Path.t()) :: {:ok, [name_t]} | {:error, term}
def ls(path) do
path = IO.chardata_to_string(path) <> <<0>>
with {:ok, lst} <- listxattr_nif(path) do
decode_list(lst)
end
end
@doc """
The same as `ls/1`, but raises an exception if it fails.
"""
@spec ls!(Path.t()) :: [name_t] | no_return
def ls!(path) do
case ls(path) do
{:ok, result} ->
result
{:error, reason} ->
raise Xattr.Error,
reason: reason,
action: "list all extended attributes of",
path: IO.chardata_to_string(path)
end
end
@doc """
Checks whether `path` has extended attribute `name`.
## Example
Xattr.set("foo.txt", "hello", "world")
Xattr.has("foo.txt", "hello") == {:ok, true}
Xattr.has("foo.txt", :foo) == {:ok, false}
"""
@spec has(Path.t(), name :: name_t) :: {:ok, boolean} | {:error, term}
def has(path, name) when is_binary(name) or is_atom(name) do
path = IO.chardata_to_string(path) <> <<0>>
name = encode_name(name) <> <<0>>
hasxattr_nif(path, name)
end
@doc """
The same as `has/2`, but raises an exception if it fails.
"""
@spec has!(Path.t(), name :: name_t) :: boolean | no_return
def has!(path, name) do
case has(path, name) do
{:ok, result} ->
result
{:error, reason} ->
raise Xattr.Error,
reason: reason,
action: "check attribute existence of",
path: IO.chardata_to_string(path)
end
end
@doc """
Gets extended attribute value.
If attribute `name` does not exist, `{:error, :enoattr}` is returned.
## Example
Xattr.set("foo.txt", "hello", "world")
Xattr.get("foo.txt", "hello") == {:ok, "world"}
Xattr.get("foo.txt", :foo) == {:error, :enoattr}
"""
@spec get(Path.t(), name :: name_t) :: {:ok, binary} | {:error, term}
def get(path, name) when is_binary(name) or is_atom(name) do
path = IO.chardata_to_string(path) <> <<0>>
name = encode_name(name) <> <<0>>
getxattr_nif(path, name)
end
@doc """
The same as `get/2`, but raises an exception if it fails.
"""
@spec get!(Path.t(), name :: name_t) :: binary | no_return
def get!(path, name) do
case get(path, name) do
{:ok, result} ->
result
{:error, reason} ->
raise Xattr.Error,
reason: reason,
action: "get attribute of",
path: IO.chardata_to_string(path)
end
end
@doc """
Sets extended attribute value.
If attribute `name` does not exist, it is created.
## Example
Xattr.set("foo.txt", "hello", "world")
Xattr.get("foo.txt", "hello") == {:ok, "world"}
"""
@spec set(Path.t(), name :: name_t, value :: binary) :: :ok | {:error, term}
def set(path, name, value)
when (is_binary(name) or is_atom(name)) and is_binary(value) do
path = IO.chardata_to_string(path) <> <<0>>
name = encode_name(name) <> <<0>>
setxattr_nif(path, name, value)
end
@doc """
The same as `set/3`, but raises an exception if it fails.
"""
@spec set!(Path.t(), name :: name_t, value :: binary) :: :ok | no_return
def set!(path, name, value) do
case set(path, name, value) do
:ok ->
:ok
{:error, reason} ->
raise Xattr.Error,
reason: reason,
action: "remove attribute of",
path: IO.chardata_to_string(path)
end
end
@doc """
Removes extended attribute.
If attribute `name` does not exist, `{:error, :enoattr}` is returned.
## Example
Xattr.set("foo.txt", "hello", "world")
Xattr.set("foo.txt", :foo, "bar")
Xattr.rm("foo.txt", "foo")
{:ok, ["hello"]} = Xattr.ls("foo.txt")
"""
@spec rm(Path.t(), name :: name_t) :: :ok | {:error, term}
def rm(path, name) when is_binary(name) or is_atom(name) do
path = IO.chardata_to_string(path) <> <<0>>
name = encode_name(name) <> <<0>>
removexattr_nif(path, name)
end
@doc """
The same as `rm/2`, but raises an exception if it fails.
"""
@spec rm!(Path.t(), name :: name_t) :: :ok | no_return
def rm!(path, name) do
case rm(path, name) do
:ok ->
:ok
{:error, reason} ->
raise Xattr.Error,
reason: reason,
action: "remove attribute of",
path: IO.chardata_to_string(path)
end
end
defp encode_name(name) when is_atom(name) do
@tag_atom <> to_string(name)
end
defp encode_name(name) when is_binary(name) do
@tag_str <> name
end
defp decode_name(@tag_atom <> bin) do
{:ok, String.to_atom(bin)}
end
defp decode_name(@tag_str <> bin) do
{:ok, bin}
end
defp decode_name(_) do
{:error, :invalfmt}
end
defp decode_list(lst) do
decode_list(lst, {:ok, []})
end
defp decode_list([], acc) do
acc
end
defp decode_list([name_enc | rest], {:ok, lst}) do
case decode_name(name_enc) do
{:ok, name} -> decode_list(rest, {:ok, [name | lst]})
err -> err
end
end
end
defmodule Xattr.Error do
defexception [:reason, :path, action: ""]
def message(%{action: action, reason: reason, path: path}) do
formatted = fmt(action, reason)
"could not #{action} #{inspect(path)}: #{formatted}"
end
defp fmt(_action, :enoattr) do
"no such attribute"
end
defp fmt(_action, :invalfmt) do
"corrupted attribute data"
end
defp fmt(_action, reason) do
case IO.iodata_to_binary(:file.format_error(reason)) do
"unknown POSIX error" <> _ -> inspect(reason)
formatted_reason -> formatted_reason
end
end
end
|
lib/xattr.ex
| 0.902939
| 0.722148
|
xattr.ex
|
starcoder
|
defmodule Gradient.TypeData do
@spec all_pp_test_data() :: [{name :: String.t(), type :: tuple(), expected :: String.t()}]
def all_pp_test_data() do
[
value_test_data(),
builtin_types_test_data(),
op_types_test_data(),
fun_types_test_data(),
map_types_test_data(),
tuple_types_test_data(),
other_types_test_data()
]
|> List.flatten()
end
def value_test_data() do
[
{"integer value", {:integer, 0, 12}, "12"},
{"atom value", {:atom, 0, :ok}, ~s(:"ok")},
{"boolean false", {:atom, 0, false}, "false"},
{"boolean true", {:atom, 0, true}, "true"},
{"nil", {:atom, 0, nil}, "nil"}
]
end
def builtin_types_test_data() do
[
{"integer type", {:type, 0, :integer, []}, "integer()"},
{"float type", {:type, 0, :float, []}, "float()"},
{"atom type", {:type, 0, :atom, []}, "atom()"},
{"boolean type", {:type, 0, :boolean, []}, "boolean()"},
{"binary type", {:type, 0, :binary, []}, "binary()"},
{"range type", {:type, 0, :range, [{:integer, 0, 1}, {:integer, 0, 10}]}, "1..10"},
{"list type", {:type, 0, :list, []}, "list()"},
{"nonempty_list type", {:type, 0, :nonempty_list, []}, "nonempty_list()"},
{"nil() or []", {:type, 0, nil, []}, "[]"},
{"type with args", {:type, 0, :list, [{:type, 0, :integer, []}]}, "list(integer())"}
]
end
def op_types_test_data() do
[
{"binary operator", {:op, 0, :+, {:integer, 0, 1}, {:integer, 0, 2}}, "1 + 2"},
{"unary operator", {:op, 0, :not, {:var, 0, :status}}, "not status"}
]
end
def fun_types_test_data() do
[
{"any fun type", {:type, 0, :fun, []}, "fun()"},
{"fun with any args returning a specific type",
{:type, 0, :fun, [{:type, 0, :any}, {:atom, 0, :ok}]}, ~s((... -> :"ok"\))},
{"fun with specific arg types returning a specific type",
{:type, 0, :fun, [{:type, 0, :product, [{:type, 0, :atom, []}]}, {:type, 0, :atom, []}]},
"(atom() -> atom())"}
]
end
def map_types_test_data() do
[
{"any map type", {:type, 0, :map, :any}, "map()"},
{"complex map type",
{:type, 0, :map,
[
{:type, 0, :map_field_assoc, [{:atom, 0, :value_a}, {:integer, 0, 5}]},
{:type, 0, :map_field_exact, [{:atom, 0, :value_b}, {:atom, 0, :neo}]}
]}, ~s(%{optional(:"value_a"\) => 5, required(:"value_b"\) => :"neo"})}
]
end
def tuple_types_test_data() do
[
{"any tuple type", {:type, 0, :tuple, :any}, "tuple()"},
{"tuple {:ok, 8}", {:type, 0, :tuple, [{:atom, 0, :ok}, {:integer, 0, 8}]}, ~s({:"ok", 8})}
]
end
def other_types_test_data() do
[
{"var type", {:var, 0, :a}, "a"},
{"anotated type", {:ann_type, 0, [{:var, 0, :name}, {:type, 0, :integer, []}]},
"name :: integer()"},
{"remote type without args",
{:remote_type, 0, [{:atom, 0, MyModule}, {:atom, 0, :my_fun}, []]}, "MyModule.my_fun()"},
{"remote type with args",
{:remote_type, 0, [{:atom, 0, MyModule}, {:atom, 0, :my_fun}, [{:type, 0, :integer, []}]]},
"MyModule.my_fun(integer())"},
{"user type", {:user_type, 0, :my_type, [{:type, 0, :atom, []}]}, "my_type(atom())"}
]
end
end
|
test/support/type_data.ex
| 0.590071
| 0.523603
|
type_data.ex
|
starcoder
|
defmodule BSV.UTXO do
@moduledoc """
A UTXO is a data structure representing an unspent transaction output.
A UTXO consists of a `t:BSV.OutPoint.t/0` and the `t:BSV.TxOut.t/0` itself.
UTXOs are used in the `BSV.TxBuilder` module to create transaction inputs.
"""
alias BSV.{OutPoint, Script, Tx, TxOut}
import BSV.Util, only: [decode: 2, reverse_bin: 1]
defstruct outpoint: nil, txout: nil
@typedoc "UTXO struct"
@type t() :: %__MODULE__{
outpoint: OutPoint.t(),
txout: TxOut.t()
}
@doc """
Builds a `t:BSV.UTXO.t/0` from the given map of params. Useful for building
UTXO's from JSON APIs.
Returns the result in an `:ok` / `:error` tuple pair.
## Params
The required params are:
* `txid` - Transaction ID
* `vout` - Vector of the output in a transaction. Also accepts `outputIndex`
* `satoshis` - Number of satoshis. Also accepts `amount`
* `script` - Hex-encoded locking script
## Examples
iex> UTXO.from_params(%{
...> "txid" => "5e3014372338f079f005eedc85359e4d96b8440e7dbeb8c35c4182e0c19a1a12",
...> "vout" => 0,
...> "satoshis" => 15399,
...> "script" => "76a91410bdcba3041b5e5517a58f2e405293c14a7c70c188ac"
...> })
{:ok, %UTXO{
outpoint: %OutPoint{
hash: <<18, 26, 154, 193, 224, 130, 65, 92, 195, 184, 190, 125, 14, 68, 184, 150, 77, 158, 53, 133, 220, 238, 5, 240, 121, 240, 56, 35, 55, 20, 48, 94>>,
vout: 0
},
txout: %TxOut{
satoshis: 15399,
script: %Script{chunks: [
:OP_DUP,
:OP_HASH160,
<<16, 189, 203, 163, 4, 27, 94, 85, 23, 165, 143, 46, 64, 82, 147, 193, 74, 124, 112, 193>>,
:OP_EQUALVERIFY,
:OP_CHECKSIG
]}
}
}}
"""
@spec from_params(map()) :: {:ok, t()} | {:error, term()}
def from_params(%{"txid" => txid, "script" => script} = params) do
with {:ok, hash} <- decode(txid, :hex),
{:ok, vout} <- take_any_param(params, ["vout", "outputIndex"]),
{:ok, satoshis} <- take_any_param(params, ["satoshis", "amount"]),
{:ok, script} <- Script.from_binary(script, encoding: :hex)
do
outpoint = struct(OutPoint, hash: reverse_bin(hash), vout: vout)
txout = struct(TxOut, satoshis: satoshis, script: script)
{:ok, struct(__MODULE__, outpoint: outpoint, txout: txout)}
end
end
@doc """
Builds a `t:BSV.UTXO.t/0` from the given map of params.
As `from_params/1` but returns the result or raises an exception.
"""
@spec from_params!(map()) :: t()
def from_params!(%{} = params) do
case from_params(params) do
{:ok, utxo} ->
utxo
{:error, error} ->
raise BSV.DecodeError, error
end
end
@doc """
Builds a `t:BSV.UTXO.t/0` from the given transaction and vout index. Useful
for building UTXO's when you already have the full transaction being spent
from.
"""
@spec from_tx(Tx.t(), TxOut.vout()) :: TxOut.t() | nil
def from_tx(%Tx{outputs: outputs} = tx, vout) when vout < length(outputs) do
with %TxOut{} = txout <- Enum.at(outputs, vout) do
outpoint = %OutPoint{hash: Tx.get_hash(tx), vout: vout}
%__MODULE__{outpoint: outpoint, txout: txout}
end
end
# Takes the first value from the list of keys on the given map of params
defp take_any_param(params, keys) do
case Map.take(params, keys) |> Map.values() do
[value | _] ->
{:ok, value}
_ ->
{:error, {:param_not_found, keys}}
end
end
end
|
lib/bsv/utxo.ex
| 0.87701
| 0.687289
|
utxo.ex
|
starcoder
|
defmodule Linnerud do
@doc """
Lenear Regression with linnerud datasets
features: physiological data
weight
waist
pulse
targets: exersize data
chins
situps
jumps
"""
def run do
# load dataset
features = load_linnerud_feature_dataset()
targets = load_linnerud_target_dataset()
# setup features
pulses = features[:pulse]
waists = features[:waist]
weights = features[:weight]
number_of_data = length pulses
bias = for _times <- 1..number_of_data, do: 1.0
x = [bias, weights, waists, pulses]
x = Matrix.transpose(x)
# setup targets
y = [ targets[:chins] ]
y = Matrix.transpose(y)
# setup gradientDescent params
alpha = 0.00003
iterations = 10000
theta = [[0], [0], [0], [0]]
# train
theta = LenearRegression.gradientDescent(x, y, theta, alpha, iterations)
x_test = Matrix.transpose([[1],[191],[36],[50]])
y_test = [[5]]
# predict
predicted_chins = LenearRegression.predict(x_test, theta)
# computeCost
error = LenearRegression.computeCost(x_test, y_test, theta)
IO.puts "====== test data ====="
IO.puts "x: "
IO.inspect x_test
IO.puts "y: "
IO.inspect y_test
IO.puts "===== pridiction ====="
IO.inspect predicted_chins
IO.puts "error: "
IO.inspect error
end
@doc """
Polynomial Lenear Regression with linnerud datasets
features: physiological data
weight
waist
pulse
targets: exersize data
chins
situps
jumps
"""
def run_polynomial do
features = load_linnerud_feature_dataset()
targets = load_linnerud_target_dataset()
pulses = features[:pulse]
waists = features[:waist]
weights = features[:weight]
number_of_data = length pulses
bias = for _times <- 1..number_of_data, do: 1.0
square_pulses = pulses |> Enum.map(fn x -> x * x end)
square_waists = waists |> Enum.map(fn x -> x * x end)
square_weights = weights |> Enum.map(fn x -> x * x end)
pulses_waists = pulses |> Enum.with_index |> Enum.map(fn {x,i} -> x * Enum.at(waists, i) end)
pulses_weights = pulses |> Enum.with_index |> Enum.map(fn {x,i} -> x * Enum.at(weights, i) end)
waists_weights = waists |> Enum.with_index |> Enum.map(fn {x,i} -> x * Enum.at(weights, i) end)
x = [bias, weights, waists, pulses,
square_weights, square_waists, square_pulses,
pulses_waists, pulses_weights, waists_weights]
x = Matrix.transpose(x)
y = [ targets[:chins] ]
y = Matrix.transpose(y)
theta = [[0],[0],[0],[0],[0],[0],[0],[0],[0],[0]]
alpha = 0.000000001
iterations =10000
theta = LenearRegression.gradientDescent(x, y, theta, alpha, iterations)
test_weight = 191
test_waists = 36
test_pulse = 50
x_test = [[1],[test_weight],[test_waists],[test_pulse],
[test_weight*test_weight], [test_waists*test_waists], [test_pulse*test_pulse],
[test_pulse*test_waists], [test_pulse*test_weight], [test_waists*test_weight]]
predicted_chins = LenearRegression.predict(Matrix.transpose(x_test), theta)
IO.inspect predicted_chins
end
@doc """
plot dataset
"""
def plot do
features = load_linnerud_feature_dataset()
targets = load_linnerud_target_dataset()
plot_datasets(features, targets)
end
defp plot_datasets(features, target) do
plotter = Explotter.new
Explotter.plot(plotter, features[:weight], target[:chins], :ro, [label: "weight and chins"])
Explotter.plot(plotter, features[:waist], target[:chins], :ro, [label: "waist and chins"])
Explotter.plot(plotter, features[:pulse], target[:chins], :ro, [label: "pulse and chins"])
Explotter.show(plotter)
end
defp load_linnerud_feature_dataset do
"apps/sample/dataset/sample_data/linnerud_physiological.csv"
|> load_linnerud_dataset
end
defp load_linnerud_target_dataset do
"apps/sample/dataset/sample_data/linnerud_exercise.csv"
|> load_linnerud_dataset
end
defp load_linnerud_dataset(filepath) do
data = filepath
|> DatasetLoader.load([headers: true])
Map.keys(Enum.at(data, 0))
|> Enum.map(fn key -> {
key
|> String.downcase
|> String.to_atom,
data
|> Enum.map(fn d -> d[key] end)
|> Enum.map(fn d -> elem(Float.parse(d), 0) end)
} end)
end
end
|
apps/sample/lib/linnerud.ex
| 0.664758
| 0.791055
|
linnerud.ex
|
starcoder
|
defmodule Elastic.HTTP do
alias Elastic.AWS
@moduledoc ~S"""
Used to make raw calls to Elastic Search.
Each function returns a tuple indicating whether or not the request
succeeded or failed (`:ok` or `:error`), the status code of the response,
and then the processed body of the response.
For example, a request like this:
```elixir
Elastic.HTTP.get("/answer/_search")
```
Would return a response like this:
```
{:ok, 200,
%{"_shards" => %{"failed" => 0, "successful" => 5, "total" => 5},
"hits" => %{"hits" => [%{"_id" => "1", "_index" => "answer", "_score" => 1.0,
"_source" => %{"text" => "I like using Elastic Search"}, "_type" => "answer"}],
"max_score" => 1.0, "total" => 1}, "timed_out" => false, "took" => 7}}
```
"""
alias Elastic.ResponseHandler
@doc """
Makes a request using the GET HTTP method, and can take a body.
```
Elastic.HTTP.get("/answer/_search", body: %{query: ...})
```
"""
def get(url, options \\ []) do
request(:get, url, options)
end
@doc """
Makes a request using the POST HTTP method, and can take a body.
"""
def post(url, options \\ []) do
request(:post, url, options)
end
@doc """
Makes a request using the PUT HTTP method:
```
Elastic.HTTP.put("/answers/answer/1", body: %{
text: "I like using Elastic Search"
})
```
"""
def put(url, options \\ []) do
request(:put, url, options)
end
@doc """
Makes a request using the DELETE HTTP method:
```
Elastic.HTTP.delete("/answers/answer/1")
```
"""
def delete(url, options \\ []) do
request(:delete, url, options)
end
@doc """
Makes a request using the HEAD HTTP method:
```
Elastic.HTTP.head("/answers")
```
"""
def head(url, options \\ []) do
request(:head, url, options)
end
def bulk(options) do
headers = Keyword.get(options, :headers, [])
body = Keyword.get(options, :body, "") <> "\n"
options = Keyword.put(options, :body, body)
url = build_url(:post, "_bulk", headers, body)
HTTPotion.post(url, options) |> process_response
end
defp base_url do
Elastic.base_url || "http://localhost:9200"
end
defp request(method, url, options) do
body = Keyword.get(options, :body, []) |> encode_body
options = options
|> Keyword.put(:body, body)
|> Keyword.put(:timeout, 30_000)
headers = Keyword.get(options, :headers, [])
url = build_url(method, url, headers, body)
apply(HTTPotion, method, [url, options]) |> process_response
end
defp process_response(response) do
ResponseHandler.process(response)
end
defp encode_body([]) do
[]
end
defp encode_body(body) do
{:ok, encoded_body} = Poison.encode(body)
encoded_body
end
defp build_url(method, url, headers, body) do
url = URI.merge(base_url(), url)
if AWS.enabled?,
do: AWS.sign_url(method, url, headers, body),
else: url
end
end
|
lib/elastic/http.ex
| 0.899274
| 0.803328
|
http.ex
|
starcoder
|
defmodule Mix.Tasks.Ecto.Gen.Migration do
use Mix.Task
import Macro, only: [camelize: 1, underscore: 1]
import Mix.Generator
import Mix.Ecto
import Mix.EctoSQL
@shortdoc "Generates a new migration for the repo"
@aliases [
r: :repo
]
@switches [
change: :string,
repo: [:string, :keep],
no_compile: :boolean,
no_deps_check: :boolean
]
@moduledoc """
Generates a migration.
The repository must be set under `:ecto_repos` in the
current app configuration or given via the `-r` option.
## Examples
mix ecto.gen.migration add_posts_table
mix ecto.gen.migration add_posts_table -r Custom.Repo
The generated migration filename will be prefixed with the current
timestamp in UTC which is used for versioning and ordering.
By default, the migration will be generated to the
"priv/YOUR_REPO/migrations" directory of the current application
but it can be configured to be any subdirectory of `priv` by
specifying the `:priv` key under the repository configuration.
This generator will automatically open the generated file if
you have `ECTO_EDITOR` set in your environment variable.
## Command line options
* `-r`, `--repo` - the repo to generate migration for
* `--no-compile` - does not compile applications before running
* `--no-deps-check` - does not check depedendencies before running
## Configuration
If the current app configuration specifies a custom migration module
the generated migration code will use that rather than the default
`Ecto.Migration`:
config :ecto_sql, migration_module: MyApplication.CustomMigrationModule
"""
@impl true
def run(args) do
no_umbrella!("ecto.gen.migration")
repos = parse_repo(args)
Enum.map repos, fn repo ->
case OptionParser.parse!(args, strict: @switches, aliases: @aliases) do
{opts, [name]} ->
ensure_repo(repo, args)
path = Path.join(source_repo_priv(repo), "migrations")
base_name = "#{underscore(name)}.exs"
file = Path.join(path, "#{timestamp()}_#{base_name}")
unless File.dir?(path), do: create_directory path
fuzzy_path = Path.join(path, "*_#{base_name}")
if Path.wildcard(fuzzy_path) != [] do
Mix.raise "migration can't be created, there is already a migration file with name #{name}."
end
assigns = [mod: Module.concat([repo, Migrations, camelize(name)]), change: opts[:change]]
create_file file, migration_template(assigns)
if open?(file) and Mix.shell.yes?("Do you want to run this migration?") do
Mix.Task.run "ecto.migrate", ["-r", inspect(repo)]
end
file
{_, _} ->
Mix.raise "expected ecto.gen.migration to receive the migration file name, " <>
"got: #{inspect Enum.join(args, " ")}"
end
end
end
defp timestamp do
{{y, m, d}, {hh, mm, ss}} = :calendar.universal_time()
"#{y}#{pad(m)}#{pad(d)}#{pad(hh)}#{pad(mm)}#{pad(ss)}"
end
defp pad(i) when i < 10, do: << ?0, ?0 + i >>
defp pad(i), do: to_string(i)
defp migration_module do
case Application.get_env(:ecto_sql, :migration_module, Ecto.Migration) do
migration_module when is_atom(migration_module) -> migration_module
other -> Mix.raise "Expected :migration_module to be a module, got: #{inspect(other)}"
end
end
embed_template :migration, """
defmodule <%= inspect @mod %> do
use <%= inspect migration_module() %>
def change do
<%= @change %>
end
end
"""
end
|
lib/mix/tasks/ecto.gen.migration.ex
| 0.738858
| 0.419232
|
ecto.gen.migration.ex
|
starcoder
|
defmodule Mix.Ecto do
# Conveniences for writing Mix.Tasks in Ecto.
@moduledoc false
@doc """
Parses the repository as the first argument in the given list
and ensure the repository is loaded and available.
"""
@spec parse_repo([term]) :: {Ecto.Repo.t, [term]} | no_return
def parse_repo([h|t]) when is_binary(h) and h != "" do
{Module.concat([h]), t}
end
def parse_repo([h|t]) when is_atom(h) and h != :"" do
{h, t}
end
def parse_repo(_) do
raise Mix.Error, message: "invalid arguments, expected a repo as first argument"
end
@doc """
Ensures the given module is a repository.
"""
@spec ensure_repo(module) :: Ecto.Repo.t | no_return
def ensure_repo(repo) do
case Code.ensure_compiled(repo) do
{:module, _} ->
if function_exported?(repo, :__repo__, 0) do
repo
else
raise Mix.Error, message: "module #{inspect repo} is not a Ecto.Repo, it does not define __repo__/0"
end
{:error, error} ->
raise Mix.Error, message: "could not load #{inspect repo}, error: #{inspect error}"
end
end
@doc """
Ensures the given repository is started and running.
"""
@spec ensure_started(Ecto.Repo.t) :: Ecto.Repo.t | no_return
def ensure_started(repo) do
case repo.start_link do
:ok -> repo
{:ok, _} -> repo
{:error, {:already_started, _}} -> repo
{:error, error} ->
raise Mix.Error, message: "could not start repo #{inspect repo}, error: #{inspect error}"
end
end
@doc """
Gets the migrations path from a repository.
"""
@spec migrations_path(Ecto.Repo.t) :: String.t | no_return
def migrations_path(repo) do
if function_exported?(repo, :priv, 0) do
Path.join(repo.priv, "migrations")
else
raise Mix.Error, message: "expected repo #{inspect repo} to define priv/0 in order to use migrations"
end
end
@doc """
Asks if the user wants to open a file based on ECTO_EDITOR.
"""
@spec open?(binary) :: boolean
def open?(file) do
editor = System.get_env("ECTO_EDITOR") || ""
if editor != "" do
System.cmd(editor, [inspect(file)])
true
else
false
end
end
@doc """
Gets a path relative to the application path.
Raises on umbrella application.
"""
def no_umbrella!(task) do
if Mix.Project.umbrella? do
raise Mix.Error, message: "cannot run task #{inspect task} from umbrella application"
end
end
@doc """
Returns `true` if module implements behaviour.
"""
def ensure_implements(module, behaviour, message) do
all = Keyword.take(module.__info__(:attributes), [:behaviour])
unless [behaviour] in Keyword.values(all) do
raise Mix.Error, message: "Expected #{inspect module} to implement #{inspect behaviour} " <>
"in order to #{message}"
end
end
end
|
lib/mix/ecto.ex
| 0.696578
| 0.446917
|
ecto.ex
|
starcoder
|
defmodule DeepGet do
@moduledoc """
`DeepGet` allows you to take an object (map/struct/keyword list/list) or list
of them, nested to an arbitrary level, and extract the values corresponding
to a list of keys.
"""
@doc """
Fetches a list of all objects under a struct/map/keyword list/list matching
the list of keys provided.
If `object` is a list, it will retain the key at the level it is currently
working on, and check all map-like/keyable objects in the list for the next
key.
The end result is a flattened list of values for every item in the
deeply-nested structure that matched the full key path.
## Examples
iex> %{ids: [%{id: 1}, %{id: 2}]} |> DeepGet.deep_get([:ids, :id])
[1, 2]
iex> [%{name: %{first: "Alice"}}, %{name: %{first: "Bob"}}] |> DeepGet.deep_get([:name, :first])
["Alice", "Bob"]
"""
def deep_get(nil, _path), do: []
def deep_get(_object, nil), do: []
def deep_get(_object, []), do: []
def deep_get(object, path) do
object
|> do_deep_get(List.wrap(path), [])
|> List.flatten()
|> Enum.reverse()
end
# TODO: Would it be useful if part of the path were an index?
# Example: deep_get(object, [:people, :phone_numbers, [0]])?
defp do_deep_get([], _path, results) do
# No need to add an empty list to a list that's going to get flattened
results
end
defp do_deep_get(object, [], results) do
# Reached the end of the path, so add whatever object we're in to the list
[object | results]
end
defp do_deep_get(%{} = map, path, results) do
[current | remaining_path] = path
if Map.has_key?(map, current) do
nested_results =
map
|> Map.get(current)
|> do_deep_get(remaining_path, [])
[nested_results | results]
else
# If the map/struct doesn't have the key, it can't have any of the
# sub-keys, so just pop back up the stack
results
end
end
defp do_deep_get(list, path, results) when is_list(list) do
if Keyword.keyword?(list) do
[current | remaining_path] = path
if Keyword.has_key?(list, current) do
nested_results =
list
|> Keyword.get(current)
|> do_deep_get(remaining_path, [])
[nested_results | results]
else
results
end
else
nested_results =
list
|> Enum.map(&do_deep_get(&1, path, []))
|> Enum.reverse()
[nested_results | results]
end
end
defp do_deep_get(_non_map_or_list, _path, results) do
# Still have path remaining, but nothing that can accept a key
results
end
end
|
lib/deep_get.ex
| 0.634317
| 0.481637
|
deep_get.ex
|
starcoder
|
defmodule Client do
@moduledoc """
The client module contains two functions of note, `do_request/6` and
`do_request!/6` which perform HTTP actions as well as encoding and decoding
data and setting headers involved in the request.
## Actions
Actions are the part that actually make the HTTP Request, if that is what you
choose to do with this library. It is failry generic. Some actions are provided.
### Provided Actions
- `Client.get/3` / `Client.get!/3`
- `Client.post/3` / `Client.post!/3`
- `Client.patch/3` / `Client.patch!/3`
- `Client.put/3` / `Client.put!/3`
- `Client.delete/3` / `Client.delete!/3`
The provided actions are all simple wrappers around HTTPoison to make the
arguments resemble what the callback requires in `do_request/6` and
`do_request!/6`
"""
alias HTTPoison.Response
defmodule ResponseNot200Error do
defexception [:message]
def exception(%Response{status_code: status}) do
msg = "status code: #{status}"
{:error, %__MODULE__{message: msg}}
end
def exception(value) do
msg = "expected %HTTPoison.Response{}, got: #{inspect value}"
{:error, %__MODULE__{message: msg}}
end
end
def get(href, payload, headers),
do: HTTPoison.get(href, headers, payload)
def post(href, payload, headers),
do: HTTPoison.post(href, payload, headers)
def patch(href, payload, headers),
do: HTTPoison.patch(href, payload, headers)
def put(href, payload, headers),
do: HTTPoison.put(href, payload, headers)
def delete(href, _payload, headers),
do: HTTPoison.delete(href, headers)
def get!(href, payload, headers),
do: HTTPoison.get!(href, headers, payload)
def post!(href, payload, headers),
do: HTTPoison.post!(href, payload, headers)
def patch!(href, payload, headers),
do: HTTPoison.patch!(href, payload, headers)
def put!(href, payload, headers),
do: HTTPoison.put!(href, payload, headers)
def delete!(href, _payload, headers),
do: HTTPoison.delete!(href, headers)
@doc """
Sequences calls to encoder, action, and decoder to perform HTTPoison requests.
It is important to understand how this client works in order to properly use it.
It provides two implementations of a single function `do_request/6`, which takes
the arguments explained below:
| Argument | description |
| ---------- | ----------- |
| `href` | The URL of the resource to be queried |
| `payload` | A Map, Struct, or List to be sent to the server |
| `headers` | The headers to be sent with the query |
| `encoder` | This is an encoder from the Client package, a list of encoders is provided below |
| `decoder` | This is a decoder from the Client package, a list of decoders is proved below |
| `action` | This is an HTTPoison verb. Usage defined below |
## Notes
When using `do_request/6`, your actions all need to return a tuple of the format
`{:ok, data}` or `{:error, reason}`, any other formats will not be properly
handled by `do_request/6`.
## Examples
data = Client.do_request(
"https://httpbin.org/post",
%{"key" => "value", "key2" => ["value1", "value2"]},
%{"Header" => "Header/Value"},
Client.Encoders.JSON,
Client.Decoders.JSON,
&Client.post(&1, &2, &3)
)
assert data == {
:ok,
%{
"args" => %{},
"data" => "{\\"key2\\":[\\"value1\\",\\"value2\\"],\\"key\\":\\"value\\"}",
"files" => %{},
"form" => %{},
"headers" => %{
"Accept" => "application/json",
"Content-Length" => "42",
"Content-Type" => "application/json",
"Header" => "Header/Value",
"Host" => "httpbin.org",
"User-Agent" => "hackney/1.6.1"
},
"json" => %{
"key" => "value",
"key2" => ["value1", "value2"]
},
"origin" => "127.0.0.1",
"url" => "https://httpbin.org/post"
}
}
iex> Client.do_request("a.com", %{"key" => "value"}, %{}, Client.Encoders.JSON, Client.Decoders.JSON, fn _href, payload, _headers -> {:ok, %HTTPoison.Response{status_code: 200, body: payload}} end)
{:ok, %{"key" => "value"}}
"""
def do_request(href, payload, headers, encoder, decoder, action) do
with {:ok, payload} <- encoder.encode(payload) do
headers = encoder.headers
|> Map.merge(decoder.headers)
|> Map.merge(headers)
with {:ok, response} <- action.(href, payload, headers),
do: handle_response(response, decoder)
end
end
@doc """
Aggressive version of `do_request/6`. Aggressive means raising errors rather
than returning error structs.
## Notes
When using `do_request!/6`, your actions must all return `data` directly,
outside of the tuple used in the safer version. The reason for this is we expect
errors in this case to be raised rather than returned.
## Examples
iex> Client.do_request!("a.com", %{"key" => "value"}, %{}, Client.Encoders.JSON, Client.Decoders.JSON, fn _href, payload, _headers -> %HTTPoison.Response{status_code: 200, body: payload} end)
%{"key" => "value"}
"""
def do_request!(href, payload, headers, encoder, decoder, action) do
payload = encoder.encode!(payload)
headers = encoder.headers
|> Map.merge(decoder.headers)
|> Map.merge(headers)
href
|> action.(payload, headers)
|> handle_response!(decoder)
end
defp handle_response(%Response{status_code: status, body: body}=response, decoder) do
cond do
status in [200, 201] ->
decoder.decode(body)
status == 204 ->
{:ok, :no_content}
true ->
ResponseNot200Error.exception(response)
end
end
defp handle_response!(%Response{status_code: status, body: body}=response, decoder) do
cond do
status in [200, 201] ->
decoder.decode!(body)
status == 204 ->
:no_content
true ->
with {:error, error} <- ResponseNot200Error.exception(response),
do: raise error
end
end
end
|
lib/client.ex
| 0.88856
| 0.401746
|
client.ex
|
starcoder
|
defmodule Challenges.One do
@doc """
From https://edabit.com/challenge/qvpHxkZnTh4m9Dk4X
Create a function that determines whether or not a player is holding a Full House in their hand. A hand is represented as an array of 5 cards. A full house is defined as a pair of cards and a three-of-a-kind.
To illustrate: ["A", "A", "A", "K", "K"] would be a Full House, since the player holds 3 aces and 2 kings.
"""
def is_poker_full_house?(input) do
input
end
@doc """
From https://edabit.com/challenge/mE4XYBoSEtJmTCQZu
Write a function that adds two numbers. The catch, however, is that the numbers will be strings.
Numbers can be any length.
Notes
- If there are any non-numerical characters, return "-1".
- If one option is blank the code should still work.
- Your function should be able to add any size number.
- Your function doesn't have to add negative numbers.
"""
def add_str_nums(number_1, number_2) do
"123"
end
@doc """
From https://edabit.com/challenge/48wCSbfh6Q42ihGiJ
A group of n people stand in a circle awaiting a winner. Starting from an arbitrary position(0), the conductor
removes every kth person from the circle until one person remains standing, who is then granted a treat (see
examples).
Create a function that takes 2 arguments — the number of people to be in the circle n, and the step size k, and
returns the original position (index) of the person who wins.
"""
def josephus_permutation(n, k) do
n * k
end
@doc """
See https://edabit.com/challenge/oCcYqckRMPwFLsJPZ
A prison can be represented as a list of cells. Each cell contains exactly one prisoner. A 1 represents an unlocked
cell and a 0 represents a locked cell.
[1, 1, 0, 0, 0, 1, 0]
Starting from the leftmost cell, you are tasked with seeing how many prisoners you can set free, with a catch. Each
time you free a prisoner, the locked cells become unlocked, and the unlocked cells become locked again.
So, if we use the example above:
[1, 1, 0, 0, 0, 1, 0]
# You free the prisoner in the first cell.
[0, 0, 1, 1, 1, 0, 1]
# You free the prisoner in the third cell (2nd one locked).
[1, 1, 0, 0, 0, 1, 0]
# You free the prisoner in the sixth cell (3rd, 4th and 5th locked).
[0, 0, 1, 1, 1, 0, 1]
# You free the prisoner in the seventh cell - and you are done!
Here, we have freed 4 prisoners in total.
Create a function that, given this unique prison arrangement, returns the number of freed prisoners.
Notes
- You must free a prisoner in order for the locks to switch. So in second example where the input is [1, 1, 1],
after you release the first prisoner, the locks change to [0, 0, 0]. Since all cells are locked, you can release
no more prisoners.
- You always start with the leftmost element in the list (the first prison cell). If all the prison cells to your
right are zeroes, you cannot free any more prisoners.
"""
def freed_prisoners(cells_array) do
-1
end
@doc """
Advent of Code Day 3: Crossed Wires.
See https://adventofcode.com/2019/day/3 for a complete description of the problem.
"""
def manhattan_distance_crossed_wires(input) do
-1
end
@doc """
See https://edabit.com/challenge/2WcypxLy2SCFYZWXu
For this challenge, the input will be a (long) string.
A word encountered for the first time is a stranger. A word encountered thrice becomes an acquaintance. A word
encountered 5 times becomes a friend.
Create a function that takes the string and returns an array of two arrays. The first is an array of acquaintances
in the order they became an acquaintance (see example). The second is an array of friends in the order they became a friend. Words in the friend array should no longer be in the acquaintance array.
Notes
- All words should be in lowercase.
- Punctuation should be stripped except for apostrophes (e.g. doesn't, aren't, etc).
"""
def no_strangers(input) do
[]
end
end
|
lib/challenges_one.ex
| 0.830353
| 0.907845
|
challenges_one.ex
|
starcoder
|
defmodule Exchange.MatchingEngine do
@moduledoc """
This is the core of the Exchange
The matching engine is responsible for matching the orders on the order book
"""
use GenServer
alias Exchange.{Order, OrderBook}
@type ticker :: atom
require Logger
# Client
@check_expiration_rate 1_000
@price_broadcast_rate 1_000
def start_link(ticker: ticker, currency: currency, min_price: min_price, max_price: max_price)
when is_atom(currency) and is_atom(ticker) and is_number(min_price) and is_number(max_price) do
name = via_tuple(ticker)
GenServer.start_link(__MODULE__, [ticker, currency, min_price, max_price], name: name)
end
@doc """
Places an order on the matching engine identified by the ticker.
## Market Order
The market order is set with the highest max_price for buying or the min_price for selling
If there is sufficient liquidity on the order book the order is fullfilled
otherwise the remaining unfulfilled order is put on the orderbook with the max/min price set
## Limit Order
Places a limit order on the matching engine identified by the ticker.
If there is a match the order is fullfilled otherwise it enters
the orderbook queue at the chosen price point
## Marketable Limit Order
Places a marketable limit order on the matching engine identified by the ticker.
The price of this order's price point is set with the min price (ask_min) if it
is a buy order or with the max price(bid_max) if it is a sell order.
If there is a match the order is fullfilled otherwise it enters
the orderbook queue at the chosen price point
## Stop Loss Order
"""
@spec place_order(ticker, Order.order()) :: atom
def place_order(ticker, order) do
GenServer.call(via_tuple(ticker), {:place_order, order})
end
@doc """
Cancels an order and removes it from the Order Book
"""
@spec cancel_order(ticker, String.t()) :: atom
def cancel_order(ticker, order_id) do
GenServer.call(via_tuple(ticker), {:cancel_order, order_id})
end
@doc """
Returns the current Order Book
"""
@spec order_book_entries(ticker) :: {atom, OrderBook.order_book()}
def order_book_entries(ticker) do
GenServer.call(via_tuple(ticker), :order_book_entries)
end
@doc """
Returns the current maximum biding price
"""
@spec bid_max(ticker) :: {atom, Money}
def bid_max(ticker) do
GenServer.call(via_tuple(ticker), :bid_max)
end
@doc """
Returns the current maximum biding price
"""
@spec bid_volume(ticker) :: {atom, number}
def bid_volume(ticker) do
GenServer.call(via_tuple(ticker), :bid_volume)
end
@doc """
Returns the current minimum asking price
"""
@spec ask_min(ticker) :: {atom, Money}
def ask_min(ticker) do
GenServer.call(via_tuple(ticker), :ask_min)
end
@doc """
Returns the current Spread
"""
@spec spread(ticker) :: {atom, Money}
def spread(ticker) do
GenServer.call(via_tuple(ticker), :spread)
end
@doc """
Returns the current highest asking volume
"""
@spec ask_volume(ticker) :: {atom, number}
def ask_volume(ticker) do
GenServer.call(via_tuple(ticker), :ask_volume)
end
@doc """
Returns the number of open buy orders
"""
@spec total_bid_orders(ticker) :: {atom, number}
def total_bid_orders(ticker) do
GenServer.call(via_tuple(ticker), :total_bid_orders)
end
@doc """
Returns the number of open sell orders
"""
@spec total_ask_orders(ticker) :: {atom, number}
def total_ask_orders(ticker) do
GenServer.call(via_tuple(ticker), :total_ask_orders)
end
@doc """
Returns the list of open orders
"""
@spec open_orders(ticker) :: {atom, list()}
def open_orders(ticker) do
GenServer.call(via_tuple(ticker), :open_orders)
end
@doc """
Returns the list of open orders from a trader
"""
@spec open_orders_by_trader(ticker, String.t()) :: {atom, list()}
def open_orders_by_trader(ticker, trader_id) do
GenServer.call(via_tuple(ticker), {:open_orders_by_trader, trader_id})
end
@doc """
Returns the open order from a order_id
"""
@spec open_order_by_id(ticker, String.t()) :: {atom, Exchange.Order.order()}
def open_order_by_id(ticker, order_id) do
GenServer.call(via_tuple(ticker), {:open_order_by_id, order_id})
end
@doc """
Returns the open order from a order_id
"""
@spec last_price(ticker :: atom, side :: atom) :: {atom, number}
def last_price(ticker, side) do
GenServer.call(via_tuple(ticker), {:last_price, side})
end
@doc """
Returns the open order from a order_id
"""
@spec last_size(ticker :: atom, side :: atom) :: {atom, number}
def last_size(ticker, side) do
GenServer.call(via_tuple(ticker), {:last_size, side})
end
defp via_tuple(ticker) do
{:via, Registry, {:matching_engine_registry, ticker}}
end
# Server
def init([ticker, currency, min_price, max_price]) do
order_book = %Exchange.OrderBook{
name: ticker,
currency: currency,
buy: %{},
sell: %{},
order_ids: Map.new(),
expiration_list: [],
completed_trades: [],
expired_orders: [],
ask_min: max_price - 1,
bid_max: min_price + 1,
max_price: max_price,
min_price: min_price
}
order_book = order_book_restore!(order_book)
Process.send_after(self(), :check_expiration, @check_expiration_rate)
Process.send_after(self(), :price_broadcast, @price_broadcast_rate)
{:ok, order_book}
end
def order_book_restore!(order_book) do
open_orders = Exchange.Utils.fetch_live_orders(order_book.name)
if Enum.count(open_orders) > 0 do
open_orders
|> Enum.reduce(order_book, fn order, order_book ->
OrderBook.price_time_match(order_book, order)
end)
else
order_book
end
end
def handle_info(:check_stop_loss, order_book) do
order_book = OrderBook.stop_loss_activation(order_book) |> broadcast_trades!()
{:noreply, order_book}
end
def handle_info(:price_broadcast, order_book) do
price_info = %Exchange.Adapters.MessageBus.PriceBroadcast{
ticker: order_book.name,
ask_min: order_book.ask_min,
bid_max: order_book.bid_max
}
message_bus().cast_event(:price_broadcast, price_info)
Process.send_after(self(), :price_broadcast, @price_broadcast_rate)
{:noreply, order_book}
end
def handle_info(:check_expiration, order_book) do
order_book = OrderBook.check_expired_orders!(order_book)
if Enum.count(order_book.expired_orders) > 0 do
Enum.each(
order_book.expired_orders,
fn order ->
message_bus().cast_event(:order_expired, %Exchange.Adapters.MessageBus.OrderExpired{
order: order
})
end
)
end
order_book = OrderBook.flush_expired_orders!(order_book)
Process.send_after(self(), :check_expiration, @check_expiration_rate)
{:noreply, order_book}
end
def handle_call(:ask_min, _from, order_book) do
ask_min =
order_book.ask_min
|> Money.new(order_book.currency)
{:reply, {:ok, ask_min}, order_book}
end
def handle_call(:bid_max, _from, order_book) do
bid_max =
order_book.bid_max
|> Money.new(order_book.currency)
{:reply, {:ok, bid_max}, order_book}
end
def handle_call(:ask_volume, _from, order_book) do
ask_volume = Exchange.OrderBook.highest_ask_volume(order_book)
{:reply, {:ok, ask_volume}, order_book}
end
def handle_call(:bid_volume, _from, order_book) do
bid_volume = Exchange.OrderBook.highest_bid_volume(order_book)
{:reply, {:ok, bid_volume}, order_book}
end
def handle_call(:open_orders, _from, order_book) do
open_orders = Exchange.OrderBook.open_orders(order_book)
{:reply, {:ok, open_orders}, order_book}
end
def handle_call({:open_orders_by_trader, trader_id}, _from, order_book) do
open_orders_by_trader = Exchange.OrderBook.open_orders_by_trader(order_book, trader_id)
{:reply, {:ok, open_orders_by_trader}, order_book}
end
def handle_call({:open_order_by_id, order_id}, _from, order_book) do
order = Exchange.OrderBook.fetch_order_by_id(order_book, order_id)
{:reply, {:ok, order}, order_book}
end
def handle_call(:spread, _from, order_book) do
spread =
OrderBook.spread(order_book)
|> Money.new(order_book.currency)
{:reply, {:ok, spread}, order_book}
end
def handle_call({:place_order, %Order{} = order}, _from, order_book) do
if OrderBook.order_exists?(order_book, order.order_id) do
{:reply, :error, order_book}
else
order = Order.assign_prices(order, order_book)
validity = Order.validate_price(order, order_book)
case validity do
:ok ->
message_bus().cast_event(:order_queued, %Exchange.Adapters.MessageBus.OrderQueued{
order: order
})
order_book =
order_book
|> OrderBook.price_time_match(order)
|> broadcast_trades!
send(self(), :check_stop_loss)
{:reply, :ok, order_book}
{:error, cause} ->
{:reply, {:error, cause}, order_book}
end
end
end
def handle_call(:order_book_entries, _from, order_book) do
{:reply, {:ok, order_book}, order_book}
end
def handle_call({:cancel_order, order_id}, _from, order_book) do
if OrderBook.order_exists?(order_book, order_id) do
cancelled_order = OrderBook.fetch_order_by_id(order_book, order_id)
current_time = DateTime.utc_now() |> DateTime.to_unix(:millisecond)
if (is_integer(cancelled_order.exp_time) and
cancelled_order.exp_time < current_time) or !is_integer(cancelled_order.exp_time) do
order_book = OrderBook.dequeue_order_by_id(order_book, order_id)
message_bus().cast_event(:order_cancelled, %Exchange.Adapters.MessageBus.OrderCancelled{
order: cancelled_order
})
{:reply, :ok, order_book}
else
{:reply, :error, order_book}
end
else
{:reply, :error, order_book}
end
end
def handle_call(:total_bid_orders, _from, order_book) do
total_bid_orders = Exchange.OrderBook.total_bid_orders(order_book)
{:reply, {:ok, total_bid_orders}, order_book}
end
def handle_call(:total_ask_orders, _from, order_book) do
total_ask_orders = Exchange.OrderBook.total_ask_orders(order_book)
{:reply, {:ok, total_ask_orders}, order_book}
end
def handle_call({:last_price, side}, _from, order_book) do
price = Exchange.OrderBook.last_price(order_book, side)
{:reply, {:ok, price}, order_book}
end
def handle_call({:last_size, side}, _from, order_book) do
size = Exchange.OrderBook.last_size(order_book, side)
{:reply, {:ok, size}, order_book}
end
defp broadcast_trades!(order_book) do
trades = OrderBook.completed_trades(order_book)
if Enum.count(trades) > 0 do
trades
|> Enum.each(fn t ->
message_bus().cast_event(:trade_executed, %Exchange.Adapters.MessageBus.TradeExecuted{
trade: t
})
end)
OrderBook.flush_trades!(order_book)
else
order_book
end
end
defp message_bus do
Application.get_env(:exchange, :message_bus_adapter, Exchange.Adapters.EventBus)
end
end
|
lib/exchange/matching_engine.ex
| 0.867668
| 0.64484
|
matching_engine.ex
|
starcoder
|
use Dogma.RuleBuilder
defrule Dogma.Rule.FinalCondition, [catch_all: true] do
@moduledoc """
A rule that checks that the last condition of a `cond` statement is `true`.
For example, prefer this:
cond do
1 + 2 == 5 ->
"Nope"
1 + 3 == 5 ->
"Uh, uh"
true ->
"OK"
end
Not this:
cond do
1 + 2 == 5 ->
"Nope"
1 + 3 == 5 ->
"Nada"
_ ->
"OK"
end
This rule will only catch those `cond` statements where the last condition
is a literal or a `_`. Complex expressions and function calls will not
generate an error.
For example, neither of the following will generate an error:
cond do
some_predicate? -> "Nope"
var == :atom -> "Yep"
end
cond do
var == :atom -> "Nope"
some_predicate? -> "Yep"
end
An atom may also be used as a catch-all expression in a `cond`, since it
evaluates to a truthy value. Suggested atoms are `:else` or `:otherwise`.
To allow one of these instead of `true`, pass it to the rule as a
`:catch_all` option.
If you would like to enforce the use of `_` as your catch-all condition, pass
the atom `:_` into the `:catch_all` option.
cond do
_ -> "Yep"
end
cond do
:_ -> "Yep"
end
"""
def test(rule, script) do
script
|> Script.walk(&check_ast(&1, &2, rule.catch_all))
|> Enum.reverse
end
defp check_ast({:cond, _, [[do: children]]} = ast, errors, check) do
{:->, meta, [[con] | _]} = List.last( children )
if error?(con, check) do
{ast, [error(meta[:line], check) | errors]}
else
{ast, errors}
end
end
# Case for a custom cond function
defp check_ast({:cond, _, _} = ast, errors, _) do
{ast, errors}
end
defp check_ast(ast, errors, _) do
{ast, errors}
end
defp error?(con, con), do: false
defp error?({:_, _,_}, :_), do: false
defp error?({:_, _,_}, _), do: true
defp error?({_, _, _}, _), do: false
defp error?(_, _), do: true
defp error(line, :_), do: error(line, '_')
defp error(line, check) do
%Error{
rule: __MODULE__,
message:
"Always use #{inspect check} as the last condition of a cond statement",
line: Dogma.Script.line(line)
}
end
end
|
lib/dogma/rule/final_condition.ex
| 0.794185
| 0.583292
|
final_condition.ex
|
starcoder
|
defmodule Exhort.SAT.Builder do
@moduledoc """
Provide for the building of a model for eventual solving.
All functions except `build/0` are pure Elixir.
Create a new builder with `new/0`. Build a model with `build/0`.
`build/0` interacts with the underlying native implementation, returning a
`Exhort.SAT.Model` struct.
"""
alias __MODULE__
alias Exhort.NIF.Nif
alias Exhort.SAT.BoolVar
alias Exhort.SAT.Builder
alias Exhort.SAT.Constraint
alias Exhort.SAT.DSL
alias Exhort.SAT.IntervalVar
alias Exhort.SAT.IntVar
alias Exhort.SAT.LinearExpression
alias Exhort.SAT.Model
alias Exhort.SAT.Vars
require __MODULE__
defmacro __using__(_options) do
quote do
alias Exhort.SAT.BoolVar
alias Exhort.SAT.Builder
alias Exhort.SAT.Constraint
alias Exhort.SAT.Expr
alias Exhort.SAT.IntervalVar
alias Exhort.SAT.IntVar
alias Exhort.SAT.LinearExpression
alias Exhort.SAT.Model
alias Exhort.SAT.SolverResponse
require Exhort.SAT.Builder
require Exhort.SAT.Constraint
require Exhort.SAT.Expr
require Exhort.SAT.LinearExpression
require Exhort.SAT.SolverResponse
end
end
@type t :: %__MODULE__{}
defstruct res: nil, vars: %Vars{}, constraints: [], objectives: [], decision_strategy: nil
@doc """
Start a new builder.
"""
@spec new() :: Builder.t()
def new do
%Builder{}
end
@doc """
Add an item or list of items to the builder.
"""
@spec add(Builder.t(), list() | BoolVar.t() | IntVar.t() | Constraint.t()) :: Builder.t()
def add(builder, list) when is_list(list) do
Enum.reduce(list, builder, &add(&2, &1))
end
def add(%Builder{vars: vars} = builder, %BoolVar{} = var) do
%Builder{builder | vars: Vars.add(vars, var)}
end
def add(%Builder{vars: vars} = builder, %IntVar{} = var) do
%Builder{builder | vars: Vars.add(vars, var)}
end
def add(%Builder{vars: vars} = builder, %IntervalVar{} = var) do
%Builder{builder | vars: Vars.add(vars, var)}
end
def add(%Builder{constraints: constraints} = builder, %Constraint{} = constraint) do
%Builder{builder | constraints: constraints ++ [constraint]}
end
@doc """
Define a boolean variable in the model.
"""
def def_bool_var(%Builder{vars: vars} = builder, name) do
%Builder{builder | vars: Vars.add(vars, %BoolVar{name: name})}
end
@doc """
Define an integer variable in the model.
- `name` is the variable name
- `domain` is the uppper and lower bounds of the integer as a tuple,
`{lower_bound, upper_bound}`
"""
def def_int_var(%Builder{vars: vars} = builder, name, domain) do
%Builder{builder | vars: Vars.add(vars, %IntVar{name: name, domain: domain})}
end
@doc """
Define an interval variable in the model.
See https://developers.google.com/optimization/reference/python/sat/python/cp_model#intervalvar
- `name` is the variable name
- `start` is the start of the interval
- `size` is the size of the interval
- `stop` is the end of the interval
"""
def def_interval_var(%Builder{vars: vars} = builder, name, start, size, stop) do
%Builder{
builder
| vars:
Vars.add(vars, %IntervalVar{
name: name,
start: start,
size: size,
stop: stop
})
}
end
@doc """
Create a named constant. `value` should be a constant integer.
"""
def def_constant(%Builder{vars: vars} = builder, name, value) do
%Builder{builder | vars: Vars.add(vars, %IntVar{name: name, domain: value})}
end
@doc """
See `Exhort.SAT.Constraint`.
Define a bounded constraint.
The expression must include a boundary like `==`, `<=`, `>`, etc.
```
x < y
```
The components of the expressoin may be simple mathematical expressions,
including the use of `+` and `*`:
```
x * y = z
```
The `sum/1` function may be used to sum over a series of terms:
```
sum(x + y) == z
```
The variables in the expression are defined in the model and do not by default
reference the variables in Elixir scope. The pin operator, `^` may be used to
reference a scoped Elixir variable.
For example, where `x` is a model variable (e.g., `def_int_var(x, {0, 3}`))
and `y` is an Elixir variable (e.g., `y = 2`):
```
x < ^y
```
A `for` comprehension may be used to generate list values:
```
sum(for {x, y} <- ^list, do: x * y) == z
```
As a larger example:
```
y = 20
z = [{0, 1}, {2, 3}, {4, 5}]
Builder.new()
|> Builder.def_int_var(x, {0, 3})
|> Builder.constrain(sum(for {a, b} <- ^z, do: ^a * ^b) < y)
|> Builder.build()
...
```
"""
defmacro constrain(builder, expr, opts \\ []) do
expr =
case expr do
{:==, m1, [lhs, {:abs, _m2, [var]}]} ->
{:"abs==", m1, [lhs, var]}
expr ->
expr
end
{op, _, [lhs, rhs]} = expr
lhs = DSL.transform_expression(lhs)
rhs = DSL.transform_expression(rhs)
opts = Enum.map(opts, &DSL.transform_expression(&1))
quote do
%Builder{} = builder = unquote(builder)
%Builder{
builder
| constraints:
builder.constraints ++
[%Constraint{defn: {unquote(lhs), unquote(op), unquote(rhs), unquote(opts)}}]
}
end
end
@doc """
Define a constraint on the model using variables.
- `constraint` is specified as an atom. See `Exhort.SAT.Constraint`.
- `lhs` and `rhs` may each either be an atom, string, `LinearExpression`, or
an existing `BoolVar` or `IntVar`.
- `opts` may specify a restriction on the constraint:
- `if: BoolVar` specifies that a constraint only takes effect if `BoolVar`
is true
- `unless: BoolVar` specifies that a constraint only takes effect if
`BoolVar` is false
- `:==` - `lhs == rhs`
- `:abs==` - `lhs == abs(rhs)`
- `:"all!="` - Require each element the provide list has a different value
from all the rest
"""
@spec constrain(
builder :: Builder.t(),
lhs :: atom() | String.t() | BoolVar.t() | IntVar.t() | LinearExpression.t(),
constraint :: Constraint.constraint(),
rhs :: atom() | String.t() | BoolVar.t() | IntVar.t() | LinearExpression.t(),
opts :: [{:if, BoolVar.t()}] | [{:unless, BoolVar.t()}]
) :: Builder.t()
def constrain(%Builder{} = builder, lhs, constraint, rhs, opts \\ []) do
%Builder{
builder
| constraints: builder.constraints ++ [%Constraint{defn: {lhs, constraint, rhs, opts}}]
}
end
@doc """
Apply the constraint to the given list.
See `Exhort.SAT.Constraint` for the list of constraints.
"""
@spec constrain_list(Builder.t(), Constraint.constraint(), list(), opts :: Keyword.t()) ::
Builder.t()
def constrain_list(%Builder{} = builder, constraint, list, opts \\ []) do
%Builder{
builder
| constraints: builder.constraints ++ [%Constraint{defn: {constraint, list, opts}}]
}
end
@doc """
Add a constraint on the variable named by `literal` to the list of items in `list`.
"""
@spec max_equality(Builder.t(), literal :: atom() | String.t() | IntVar.t(), list()) ::
Builder.t()
def max_equality(builder, literal, list) do
%Builder{builder | objectives: builder.objectives ++ [{:max_equality, literal, list}]}
end
@doc """
Specify an objective to minimize `literal`.
"""
defmacro minimize(builder, expression) do
expression = DSL.transform_expression(expression)
quote do
%Builder{vars: vars} = builder = unquote(builder)
%Builder{builder | objectives: builder.objectives ++ [{:minimize, unquote(expression)}]}
end
end
@doc """
Specify an objective to maximize `expression`.
"""
defmacro maximize(builder, expression) do
expression = DSL.transform_expression(expression)
quote do
%Builder{vars: vars} = builder = unquote(builder)
%Builder{builder | objectives: builder.objectives ++ [{:maximize, unquote(expression)}]}
end
end
@doc """
Specifiy a decision strategy on a list of variables.
"""
@spec decision_strategy(
Builder.t(),
list(),
variable_selection_strategy ::
:choose_first
| :choose_lowest_min
| :choose_highest_max
| :choose_min_domain_size
| :choose_max_domain_size,
domain_reduction_strategy ::
:select_min_value
| :select_max_value
| :select_lower_half
| :select_upper_half
| :select_median_value
) :: Builder.t()
def decision_strategy(
builder,
vars,
variable_selection_strategy,
domain_reduction_strategy
) do
%Builder{
builder
| decision_strategy: {vars, variable_selection_strategy, domain_reduction_strategy}
}
end
@doc """
Provide reduction that accepts the builder as the first argument and the
enumerable as the second, faciliating pipelines with the `Builder`.
"""
@spec reduce(Builder.t(), Enumerable.t(), function()) :: Builder.t()
def reduce(builder, items, f) do
Enum.reduce(items, builder, f)
end
@doc """
Build the model. Once the model is built it may be solved.
This function interacts with the underlying native model.
"""
@spec build(Builder.t()) :: Model.t()
def build(%Builder{} = builder) do
builder = %Builder{builder | res: Nif.new_builder_nif()}
vars =
Vars.iter(builder.vars)
|> Enum.reduce(%Vars{}, fn
%BoolVar{name: name} = var, vars ->
%BoolVar{res: res} = new_bool_var(builder, name)
Vars.add(vars, %BoolVar{var | res: res})
%IntVar{name: name, domain: {upper_bound, lower_bound}} = var, vars ->
%IntVar{res: res} = new_int_var(builder, upper_bound, lower_bound, name)
Vars.add(vars, %IntVar{var | res: res})
%IntVar{name: name, domain: constant} = var, vars ->
%IntVar{res: res} = new_constant(builder, name, constant)
Vars.add(vars, %IntVar{var | res: res})
%IntervalVar{name: name, start: start, size: size, stop: stop} = var, vars ->
start =
Vars.get(vars, start)
|> LinearExpression.resolve(vars)
size = LinearExpression.resolve(size, vars)
stop =
Vars.get(vars, stop)
|> LinearExpression.resolve(vars)
%IntervalVar{res: res} = new_interval_var(builder, name, start, size, stop)
Vars.add(vars, %IntervalVar{
var
| res: res,
start: start,
size: size,
stop: stop
})
end)
builder = %Builder{builder | vars: vars}
constraints =
builder.constraints
|> Enum.map(fn
%Constraint{defn: {lhs, :==, rhs, opts}} = constraint ->
lhs = LinearExpression.resolve(lhs, vars)
rhs = LinearExpression.resolve(rhs, vars)
res = builder |> add_equal(lhs, rhs) |> modify(opts, vars)
%Constraint{constraint | res: res}
%Constraint{defn: {lhs, :!=, rhs, opts}} = constraint ->
lhs = LinearExpression.resolve(lhs, vars)
rhs = LinearExpression.resolve(rhs, vars)
res = builder |> add_not_equal(lhs, rhs) |> modify(opts, vars)
%Constraint{constraint | res: res}
%Constraint{defn: {lhs, :>, rhs, opts}} = constraint ->
lhs = LinearExpression.resolve(lhs, vars)
rhs = LinearExpression.resolve(rhs, vars)
res = builder |> add_greater_than(lhs, rhs) |> modify(opts, vars)
%Constraint{constraint | res: res}
%Constraint{defn: {lhs, :>=, rhs, opts}} = constraint ->
lhs = LinearExpression.resolve(lhs, vars)
rhs = LinearExpression.resolve(rhs, vars)
res = builder |> add_greater_or_equal(lhs, rhs) |> modify(opts, vars)
%Constraint{constraint | res: res}
%Constraint{defn: {lhs, :<, rhs, opts}} = constraint ->
lhs = LinearExpression.resolve(lhs, vars)
rhs = LinearExpression.resolve(rhs, vars)
res = builder |> add_less_than(lhs, rhs) |> modify(opts, vars)
%Constraint{constraint | res: res}
%Constraint{defn: {lhs, :<=, rhs, opts}} = constraint ->
lhs = LinearExpression.resolve(lhs, vars)
rhs = LinearExpression.resolve(rhs, vars)
res = builder |> add_less_or_equal(lhs, rhs) |> modify(opts, vars)
%Constraint{constraint | res: res}
%Constraint{defn: {lhs, :"abs==", rhs, opts}} = constraint when is_integer(lhs) ->
res = builder |> add_abs_equal(lhs, Vars.get(vars, rhs)) |> modify(opts, vars)
%Constraint{constraint | res: res}
%Constraint{defn: {lhs, :"abs==", rhs, opts}} = constraint ->
res =
builder
|> add_abs_equal(Vars.get(vars, lhs), Vars.get(vars, rhs))
|> modify(opts, vars)
%Constraint{constraint | res: res}
%Constraint{defn: {:"all!=", list, opts}} = constraint ->
list = Enum.map(list, &LinearExpression.resolve(&1, vars))
res = builder |> add_all_different(list) |> modify(opts, vars)
%Constraint{constraint | res: res}
%Constraint{defn: {:no_overlap, list, opts}} = constraint ->
res = builder |> add_no_overlap(list) |> modify(opts, vars)
%Constraint{constraint | res: res}
end)
builder = %Builder{builder | constraints: constraints}
builder.objectives
|> Enum.map(fn
{:max_equality, name, list} ->
add_max_equality(builder, Vars.get(vars, name), list)
{:minimize, var1} ->
add_minimize(builder, Vars.get(vars, var1))
{:maximize, expr1} ->
expr1 = LinearExpression.resolve(expr1, vars)
add_maximize(builder, expr1)
end)
add_decision_strategy(builder, builder.decision_strategy, vars)
%Model{res: builder.res, vars: vars, constraints: constraints}
end
defp to_str(val) when is_atom(val), do: Atom.to_string(val)
defp to_str(val), do: val
defp new_bool_var(%{res: res} = _cp_model_builder, name) do
res = Nif.new_bool_var_nif(res, to_str(name))
%BoolVar{res: res, name: name}
end
defp new_int_var(%{res: res} = _cp_model_builder, upper_bound, lower_bound, name) do
res = Nif.new_int_var_nif(res, upper_bound, lower_bound, to_str(name))
%IntVar{res: res, name: name, domain: {upper_bound, lower_bound}}
end
defp new_constant(%{res: res} = _cp_model_builder, name, value) do
res = Nif.new_constant_nif(res, to_str(name), value)
%IntVar{res: res, name: name, domain: value}
end
defp new_interval_var(%{res: res} = _cp_model_builder, name, start, size, stop) do
res = Nif.new_interval_var_nif(res, to_str(name), start.res, size.res, stop.res)
%IntervalVar{res: res, name: name, start: start, size: size, stop: stop}
end
defp add_equal(cp_model_builder, %LinearExpression{} = expr1, %LinearExpression{} = expr2) do
Nif.add_equal_expr1_expr2_nif(cp_model_builder.res, expr1.res, expr2.res)
end
defp add_not_equal(cp_model_builder, %LinearExpression{} = expr1, %LinearExpression{} = expr2) do
Nif.add_not_equal_expr1_expr2_nif(cp_model_builder.res, expr1.res, expr2.res)
end
defp add_greater_or_equal(
cp_model_builder,
%LinearExpression{} = expr1,
%LinearExpression{} = expr2
) do
Nif.add_greater_or_equal_expr1_expr2_nif(cp_model_builder.res, expr1.res, expr2.res)
end
defp add_greater_than(
cp_model_builder,
%LinearExpression{} = expr1,
%LinearExpression{} = expr2
) do
Nif.add_greater_than_expr1_expr2_nif(cp_model_builder.res, expr1.res, expr2.res)
end
defp add_less_than(cp_model_builder, %LinearExpression{} = expr1, %LinearExpression{} = expr2) do
Nif.add_less_than_expr1_expr2_nif(cp_model_builder.res, expr1.res, expr2.res)
end
defp add_less_or_equal(
cp_model_builder,
%LinearExpression{} = expr1,
%LinearExpression{} = expr2
) do
Nif.add_less_or_equal_expr1_expr2_nif(cp_model_builder.res, expr1.res, expr2.res)
end
defp add_abs_equal(cp_model_builder, %IntVar{} = var1, %IntVar{} = var2) do
Nif.add_abs_equal_nif(cp_model_builder.res, var1.res, var2.res)
end
defp add_abs_equal(cp_model_builder, int1, %IntVar{} = var2) when is_integer(int1) do
Nif.add_abs_equal_constant_nif(cp_model_builder.res, int1, var2.res)
end
defp add_all_different(%Builder{res: builder_res} = _builder, list) do
list
|> Enum.map(& &1.res)
|> then(fn var_list ->
Nif.add_all_different_nif(builder_res, var_list)
end)
end
defp add_no_overlap(%Builder{res: builder_res, vars: vars} = _builder, list) do
list
|> Enum.map(fn var ->
vars
|> Vars.get(var)
|> then(& &1.res)
end)
|> then(fn var_list ->
Nif.add_no_overlap_nif(builder_res, var_list)
end)
end
defp modify(constraint, opts, vars) do
Enum.each(opts, fn
{:if, sym} ->
only_enforce_if(constraint, Vars.get(vars, sym))
{:unless, sym} ->
only_enforce_if(constraint, bool_not(Vars.get(vars, sym)))
end)
constraint
end
defp only_enforce_if(constraint, %BoolVar{} = var) do
Nif.only_enforce_if_nif(constraint, var.res)
end
defp bool_not(%BoolVar{} = var) do
%BoolVar{var | res: Nif.bool_not_nif(var.res)}
end
defp add_max_equality(%Builder{res: builder_res, vars: vars}, %IntVar{} = var1, list) do
list
|> Enum.map(fn var ->
Vars.get(vars, var)
|> then(& &1.res)
end)
|> then(fn var_list ->
Nif.add_max_equality_nif(builder_res, var1.res, var_list)
end)
end
defp add_minimize(%Builder{res: builder_res}, %IntVar{} = var1) do
Nif.add_minimize_nif(builder_res, var1.res)
end
defp add_maximize(%Builder{res: builder_res}, %LinearExpression{} = expr1) do
Nif.add_maximize_nif(builder_res, expr1.res)
end
defp add_decision_strategy(_, nil, _), do: nil
defp add_decision_strategy(
builder,
{list, variable_selection_strategy, domain_reduction_strategy},
vars
) do
variable_selection_strategies = %{
choose_first: 0,
choose_lowest_min: 1,
choose_highest_max: 2,
choose_min_domain_size: 3,
choose_max_domain_size: 4
}
domain_reduction_strategies = %{
select_min_value: 0,
select_max_value: 1,
select_lower_half: 2,
select_upper_half: 3,
select_median_value: 4
}
list
|> Enum.map(fn var ->
Vars.get(vars, var)
|> then(& &1.res)
end)
|> then(fn var_list ->
Nif.add_decision_strategy_nif(
builder.res,
var_list,
Map.fetch!(variable_selection_strategies, variable_selection_strategy),
Map.fetch!(domain_reduction_strategies, domain_reduction_strategy)
)
end)
end
end
|
lib/exhort/sat/builder.ex
| 0.919453
| 0.612136
|
builder.ex
|
starcoder
|
defmodule CodeCorps.GitHub.SuccessAPI do
@moduledoc ~S"""
A mocked github API layer which returns a default successful response for all
GitHub API requests.
All tests in the test environment use this module as a mock for GitHub API
requests by default.
If certain tests explicitly depend on the data returned by GitHub, they can be
mocked individually using the `CodeCorps.GitHub.TestHelpers.with_mock_api`
macro.
As support for new GitHub endpoints is added, defaults for those endpoints
should be added here.
To assert a request has been made to GitHub as a result as an action, the
`assert_received` test helper can be used:
```
assert_received({:get, "https://api.github.com/user", body, headers, options})
```
"""
import CodeCorps.GitHub.TestHelpers
defmodule UnhandledGitHubEndpointError do
defexception message: "You have a GitHub API endpoint that's unhandled in tests."
end
defmodule GitHubMockResponseError do
defexception message: "There was a problem in building a response for your mocked GitHub API."
end
def request(method, url, body, headers, options) do
send(self(), {method, url, body, headers, options})
with {:ok, body} = get_body(method, url, body, headers, options) |> Poison.encode,
{:ok, code} = method |> success_code()
do
response = %HTTPoison.Response{body: body, request_url: url, status_code: code}
{:ok, response}
end
end
defp get_body(:head, _, _, _, _), do: ""
defp get_body(:post, "https://github.com/login/oauth/access_token", _, _, _) do
%{"access_token" => "foo_auth_token"}
end
defp get_body(method, "https://api.github.com/" <> endpoint, body, headers, options) do
get_body(method, endpoint |> String.split("/"), body, headers, options)
end
defp get_body(:get, ["user"], _, _, _), do: load_endpoint_fixture("user")
defp get_body(_method, ["installation", "repositories"], _, _, _) do
load_endpoint_fixture("installation_repositories")
end
defp get_body(:post, ["installations", _id, "access_tokens"], _, _, _) do
%{
"token" => "<PASSWORD>",
"expires_at" => Timex.now() |> Timex.shift(hours: 1) |> DateTime.to_iso8601
}
end
defp get_body(:get, ["repos", _owner, _repo, "issues", "comments"], _, _, _) do
load_endpoint_fixture("issues_comments")
end
defp get_body(:get, ["repos", _owner, _repo, "issues", _number], _, _, _) do
load_endpoint_fixture("issue")
end
defp get_body(:post, ["repos", _owner, _repo, "issues"], _, _, _) do
load_endpoint_fixture("issue")
end
defp get_body(:patch, ["repos", _owner, _repo, "issues", _number], _, _, _) do
load_endpoint_fixture("issue")
end
defp get_body(:post, ["repos", _owner, _repo, "issues", _number, "comments"], _, _, _) do
load_endpoint_fixture("issue_comment")
end
defp get_body(:patch, ["repos", _owner, _repo, "issues", "comments", _id], _, _, _) do
load_endpoint_fixture("issue_comment")
end
defp get_body(:get, ["repos", _owner, _repo, "issues"], _, _, _) do
load_endpoint_fixture("issues")
end
defp get_body(:get, ["repos", _owner, _repo, "pulls"], _, _, _) do
load_endpoint_fixture("pulls")
end
defp get_body(:get, ["repos", _owner, _repo, "pulls", _number], _, _, _) do
load_endpoint_fixture("pull_request")
end
defp get_body(method, endpoint, _, _, _) when is_binary(endpoint) do
raise UnhandledGitHubEndpointError, message: "You have an unhandled :#{method} request to #{endpoint}"
end
defp get_body(method, uri_parts, _, _, _) when is_list uri_parts do
endpoint = uri_parts |> Enum.join("/")
raise UnhandledGitHubEndpointError, message: "You have an unhandled API :#{method} request to #{endpoint}"
end
@spec success_code(atom) :: integer
defp success_code(:get), do: {:ok, 200}
defp success_code(:post), do: {:ok, 201}
defp success_code(:patch), do: {:ok, 202}
defp success_code(:put), do: {:ok, 202}
defp success_code(:delete), do: {:ok, 204}
defp success_code(:head), do: {:ok, 204}
defp success_code(_), do: {:error, :unexpected_code}
end
|
test/support/github/success_api.ex
| 0.738669
| 0.589687
|
success_api.ex
|
starcoder
|
defmodule ExAws.Dynamo.Decoder do
@moduledoc """
Decodes a dynamo response into a struct.
If Dynamo.Decodable is implemented for the struct it will be called
after the completion of the coercion.
This is important for handling nested maps if you wanted the nested maps
to have atom keys.
"""
def decode(item, as: struct_module) do
item
|> decode
|> binary_map_to_struct(struct_module)
|> ExAws.Dynamo.Decodable.decode()
end
@doc """
Convert Dynamo format to Elixir
Functions which convert the Dynamo-style values into normal Elixir values.
Use these if you just want the Dynamo result to look more like Elixir without
coercing it into a particular struct.
"""
def decode(%{"BOOL" => true}), do: true
def decode(%{"BOOL" => false}), do: false
def decode(%{"BOOL" => "true"}), do: true
def decode(%{"BOOL" => "false"}), do: false
def decode(%{"NULL" => true}), do: nil
def decode(%{"NULL" => "true"}), do: nil
def decode(%{"B" => value}), do: Base.decode64!(value)
def decode(%{"S" => value}), do: value
def decode(%{"M" => value}), do: value |> decode
def decode(%{"BS" => values}), do: MapSet.new(values)
def decode(%{"SS" => values}), do: MapSet.new(values)
def decode(%{"NS" => values}) do
values
|> Stream.map(&binary_to_number/1)
|> Enum.into(MapSet.new())
end
def decode(%{"L" => values}) do
Enum.map(values, &decode/1)
end
def decode(%{"N" => value}) when is_binary(value), do: binary_to_number(value)
def decode(%{"N" => value}) when value |> is_integer or value |> is_float, do: value
def decode(%{} = item) do
item
|> Enum.reduce(%{}, fn {k, v}, map ->
Map.put(map, k, decode(v))
end)
end
@doc "Attempts to convert a number to a float, and then an integer"
def binary_to_number(binary) when is_binary(binary) do
try do
String.to_float(binary)
rescue
ArgumentError -> String.to_integer(binary)
end
end
def binary_to_number(binary), do: binary
@doc "Converts a map with binary keys to the specified struct"
def binary_map_to_struct(bmap, module) do
module.__struct__
|> Map.from_struct()
|> Enum.reduce(%{}, fn {k, v}, map ->
Map.put(map, k, Map.get(bmap, Atom.to_string(k), v))
end)
|> Map.put(:__struct__, module)
end
end
|
lib/ex_aws/dynamo/decoder.ex
| 0.735737
| 0.623706
|
decoder.ex
|
starcoder
|
defmodule ExWire.Packet.Capability.Eth.GetBlockBodies do
@moduledoc """
Request the bodies for a set of blocks by hash.
```
`GetBlockBodies` [`+0x05`, `hash_0`: `B_32`, `hash_1`: `B_32`, ...]
Require peer to return a BlockBodies message. Specify the set of blocks that
we're interested in with the hashes.
```
"""
@behaviour ExWire.Packet
alias Blockchain.Block, as: BlockchainBlock
alias ExWire.Bridge.Sync
alias ExWire.Packet.Capability.Eth.BlockBodies
alias ExWire.Struct.Block
require Logger
@sync Application.get_env(:ex_wire, :sync_mock, Sync)
@type t :: %__MODULE__{
hashes: [binary()]
}
defstruct hashes: []
@doc """
Returns the relative message id offset for this message.
This will help determine what its message ID is relative to other Packets in the same Capability.
"""
@impl true
@spec message_id_offset() :: 5
def message_id_offset do
0x05
end
@doc """
Given a GetBlockBodies packet, serializes for transport over Eth Wire Protocol.
## Examples
iex> %ExWire.Packet.Capability.Eth.GetBlockBodies{hashes: [<<5>>, <<6>>]}
...> |> ExWire.Packet.Capability.Eth.GetBlockBodies.serialize
[<<5>>, <<6>>]
"""
@impl true
@spec serialize(t) :: ExRLP.t()
def serialize(packet = %__MODULE__{}) do
packet.hashes
end
@doc """
Given an RLP-encoded GetBlockBodies packet from Eth Wire Protocol,
decodes into a GetBlockBodies struct.
## Examples
iex> ExWire.Packet.Capability.Eth.GetBlockBodies.deserialize([<<5>>, <<6>>])
%ExWire.Packet.Capability.Eth.GetBlockBodies{hashes: [<<5>>, <<6>>]}
"""
@impl true
@spec deserialize(ExRLP.t()) :: t
def deserialize(rlp) do
# verify it's a list
hashes = [_h | _t] = rlp
%__MODULE__{
hashes: hashes
}
end
@doc """
Handles a GetBlockBodies message. We should send the block bodies
to the peer if we have them. For now, we'll do nothing.
"""
@impl true
@spec handle(ExWire.Packet.packet()) :: ExWire.Packet.handle_response()
def handle(packet = %__MODULE__{}) do
bodies =
case @sync.get_current_trie() do
{:ok, trie} ->
get_block_bodies(trie, packet.hashes)
{:error, error} ->
_ =
Logger.debug(fn ->
"Error calling Sync.get_current_trie(): #{error}. Returning empty headers."
end)
[]
end
{:send, BlockBodies.new(bodies)}
end
defp get_block_bodies(trie, hashes) do
hashes
|> Stream.map(fn hash ->
case BlockchainBlock.get_block(hash, trie) do
{:ok, block} ->
Block.new(block)
:not_found ->
nil
end
end)
|> Stream.reject(&is_nil/1)
|> Enum.to_list()
end
end
|
apps/ex_wire/lib/ex_wire/packet/capability/eth/get_block_bodies.ex
| 0.885798
| 0.848282
|
get_block_bodies.ex
|
starcoder
|
defmodule Membrane.RTP.Header do
@moduledoc """
Describes RTP Header defined in [RFC3550](https://tools.ietf.org/html/rfc3550#page-13)
```
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|V=2|P|X| CC |M| PT | sequence number |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| timestamp |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| synchronization source (SSRC) identifier |
+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
| contributing source (CSRC) identifiers |
| .... |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
```
"""
alias Membrane.RTP
@typedoc """
This field identifies the version of RTP. The version defined by this specification is 2.
"""
@type version :: 2
@typedoc """
The interpretation of the marker is defined by a profile
"""
@type marker :: boolean()
@typedoc """
Timestamp of a packet in ticks of clock according to `t:RTP.clock_rate_t/0`.
Its initial value is random, so it should not be interpreted as an absolute time, but rather used to calculate
time difference from other timestamps.
"""
@type timestamp_t() :: non_neg_integer()
@typedoc """
A 16-bit integer sequential number of a packet.
Its initial value should be random.
"""
@type sequence_number_t() :: non_neg_integer()
@type t :: %__MODULE__{
version: version(),
ssrc: RTP.ssrc_t(),
marker: marker(),
payload_type: RTP.payload_type_t(),
timestamp: timestamp_t(),
sequence_number: sequence_number_t(),
csrcs: [RTP.ssrc_t()],
extensions: [__MODULE__.Extension.t()]
}
@enforce_keys [
:ssrc,
:payload_type,
:timestamp,
:sequence_number
]
defstruct @enforce_keys ++
[
version: 2,
marker: false,
csrcs: [],
extensions: []
]
end
|
lib/membrane/rtp/header.ex
| 0.694924
| 0.635166
|
header.ex
|
starcoder
|
defmodule ElixirALE.GPIO do
use GenServer
@moduledoc """
This is an Elixir interface to Linux GPIOs. Each GPIO is an
independent GenServer.
"""
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, opts}
}
end
defmodule State do
@moduledoc false
defstruct port: nil, pin: 0, direction: nil, logic_polarity: nil, callbacks: []
end
@type pin_number :: non_neg_integer
@type pin_value :: 0 | 1 | true | false
@type pin_direction :: :input | :output
@type logic_polarity :: :active_low | :active_high
@type int_direction :: :rising | :falling | :both | :none
# Public API
@doc """
Start and link a new GPIO GenServer. `pin` should be a valid
GPIO pin number on the system and `pin_direction` should be
`:input` or `:output`.
Including `:active_low?` in `opts` will explicitly configure the
GPIO logic polarity on startup. Expected values are `true` or `false`.
If set to `true`, then the polarity is set to `:active_low`.
If set to `false`, then the polarity is set to `:active_high` (default).
Note that in :active_low polarity, the logical state read and written
by this module is inverted from the physical state on the pin.
Interrupt polling follows the logical state.
ElixirALE.GPIO.start_link(16, :output, active_low?: true)
Including `:start_value` in `opts` will explicitly write the
GPIO logical value on start. Expected values are `1`, `0`, `true`, or `false`
ElixirALE.GPIO.start_link(16, :output, start_value: 1)
"""
@spec start_link(pin_number(), pin_direction, [term]) :: GenServer.on_start()
def start_link(pin, pin_direction, opts \\ []) do
{active_low?, opts} = Keyword.pop(opts, :active_low?, false)
{start_value, opts} = Keyword.pop(opts, :start_value)
logic_polarity = if active_low?, do: :active_low, else: :active_high
GenServer.start_link(__MODULE__, [pin, pin_direction, logic_polarity, start_value], opts)
end
@doc """
Helper method for reading the pin number that the GPIO GenServer
is linked to.
"""
@spec pin(GenServer.server()) :: pin_number()
def pin(pid), do: GenServer.call(pid, :pin)
@doc """
Helper method for reading the logic polarity of this GPIO.
"""
@spec logic_polarity(GenServer.server()) :: logic_polarity()
def logic_polarity(pid), do: GenServer.call(pid, :logic_polarity)
@doc """
Free the resources associated with pin and stop the GenServer.
"""
@spec release(GenServer.server()) :: :ok
def release(pid) do
GenServer.cast(pid, :release)
end
@doc """
Write the specified value to the GPIO. The GPIO should be configured
as an output. Valid values are `0` or `false` for logic low and `1`
or `true` for logic high. Other non-zero values will result in logic
high being output.
"""
@spec write(GenServer.server(), pin_value()) :: :ok | {:error, term}
def write(pid, value) when is_integer(value) do
GenServer.call(pid, {:write, value})
end
def write(pid, true), do: write(pid, 1)
def write(pid, false), do: write(pid, 0)
@doc """
Read the current value of the pin.
"""
@spec read(GenServer.server()) :: pin_value() | {:error, term}
def read(pid) do
GenServer.call(pid, :read)
end
@doc """
Turn on "interrupts" on the input pin. The pin's logical state can be
monitored for `:rising` transitions, `:falling` transitions, or `:both`.
The process that calls this method will receive the messages.
If the logic polarity is configured as `:active_low`, then
the logical edges will be inverted from the physical edges.
(ie: logical :rising -> physical :falling)
"""
@spec set_int(GenServer.server(), int_direction) :: :ok | {:error, term}
def set_int(pid, direction) do
true = pin_interrupt_condition?(direction)
GenServer.call(pid, {:set_int, direction, self()})
end
# gen_server callbacks
def init([pin, pin_direction, logic_polarity]) do
executable = :code.priv_dir(:elixir_ale) ++ '/ale'
port =
Port.open({:spawn_executable, executable}, [
{:args, ["gpio", "#{pin}", Atom.to_string(pin_direction), Atom.to_string(logic_polarity)]},
{:packet, 2},
:use_stdio,
:binary,
:exit_status
])
state = %State{port: port, pin: pin, direction: pin_direction, logic_polarity: logic_polarity}
{:ok, state}
end
def init([pin, pin_direction, logic_polarity, nil]) do
init([pin, pin_direction, logic_polarity])
end
def init([pin, pin_direction, logic_polarity, bool]) when is_boolean(bool) do
start_value = if bool, do: 1, else: 0
init([pin, pin_direction, logic_polarity, start_value])
end
def init([pin, pin_direction, logic_polarity, start_value]) when is_integer(start_value) do
with {:ok, state} <- init([pin, pin_direction, logic_polarity]),
:ok <- call_port(state, :write, start_value) do
{:ok, state}
else
{:error, reason} -> {:stop, reason}
error -> error
end
end
def handle_call(:pin, _from, state) do
{:reply, state.pin, state}
end
def handle_call(:logic_polarity, _from, state) do
{:reply, state.logic_polarity, state}
end
def handle_call(:read, _from, state) do
response = call_port(state, :read, [])
{:reply, response, state}
end
def handle_call({:write, value}, _from, state) do
response = call_port(state, :write, value)
{:reply, response, state}
end
def handle_call({:set_int, direction, requestor}, _from, state) do
response = call_port(state, :set_int, direction)
new_callbacks = insert_unique(state.callbacks, requestor)
state = %{state | callbacks: new_callbacks}
{:reply, response, state}
end
def handle_cast(:release, state) do
{:stop, :normal, state}
end
def handle_info({_, {:data, <<?n, message::binary>>}}, state) do
msg = :erlang.binary_to_term(message)
handle_port(msg, state)
end
defp call_port(state, command, arguments) do
msg = {command, arguments}
send(state.port, {self(), {:command, :erlang.term_to_binary(msg)}})
receive do
{_, {:data, <<?r, response::binary>>}} ->
:erlang.binary_to_term(response)
after
500 -> :timedout
end
end
defp handle_port({:gpio_interrupt, condition}, state) do
# IO.puts "Got interrupt on pin #{state.pin}, #{condition}"
msg = {:gpio_interrupt, state.pin, condition}
for pid <- state.callbacks do
send(pid, msg)
end
{:noreply, state}
end
defp pin_interrupt_condition?(:rising), do: true
defp pin_interrupt_condition?(:falling), do: true
defp pin_interrupt_condition?(:both), do: true
defp pin_interrupt_condition?(:none), do: true
defp pin_interrupt_condition?(_), do: false
defp insert_unique(list, item) do
if Enum.member?(list, item) do
list
else
[item | list]
end
end
end
|
lib/elixir_ale/gpio.ex
| 0.862352
| 0.490785
|
gpio.ex
|
starcoder
|
defmodule Tds.Protocol.Grammar do
@moduledoc """
Grammar Definition for Token Description, General Rules.
Data structure encodings in TDS are defined in terms of the
fundamental definitions.
"""
@doc """
A single bit value of either 0 or 1.
"""
defmacro bit(n \\ 1), do: quote(do: size(1) - unit(unquote(n)))
@doc """
An unsigned single byte (8-bit) value.
The range is 0 to 255.
"""
defmacro byte(n \\ 1), do: quote(do: unsigned - size(unquote(n)) - unit(8))
@doc """
An unsigned single byte (8-bit) value representing the length of the
associated data.
The range is 0 to 255.
"""
defmacro bytelen, do: quote(do: little - unsigned - 8)
@doc """
An unsigned 2-byte (16-bit) value.
The range is 0 to 65535.
"""
defmacro ushort,
do: quote(do: unsigned - integer - size(8) - unit(2))
@doc """
A signed 4-byte (32-bit) value.
The range is -(2^31) to (2^31)-1.
"""
defmacro long, do: quote(do: signed - 32)
@doc """
An unsigned 4-byte (32-bit) value.
The range is 0 to (2^32)-1.
"""
defmacro ulong, do: quote(do: unsigned - 32)
@doc """
An unsigned 4-byte (32-bit) value.
The range when used as a numeric value is 0 to (2^32)- 1.
"""
defmacro dword, do: quote(do: unsigned - 32)
@doc """
A signed 8-byte (64-bit) value.
The range is –(2^63) to (2^63)-1.
"""
defmacro longlong, do: quote(do: signed - 64)
@doc """
An unsigned 8-byte (64-bit) value.
The range is 0 to (2^64)-1.
"""
defmacro ulonglong, do: quote(do: unsigned - 64)
@doc """
An unsigned single byte (8-bit) value representing a character.
The range is 0 to 255.
"""
defmacro uchar(n \\ 1),
do: quote(do: unsigned - size(unquote(n)) - unit(8))
@doc """
An unsigned 2-byte (16-bit) value representing the length of the associated
data.
The range is 0 to 65535.
"""
defmacro ushortlen, do: quote(do: little - unsigned - integer - 16)
@doc """
An unsigned 2-byte (16-bit) value representing the length of the associated
character or binary data.
The range is 0 to 8000.
"""
defmacro ushortcharbinlen, do: quote(do: little - unsigned - integer - 16)
@doc """
A signed 4-byte (32-bit) value representing the length of the associated data.
The range is -(2^31) to (2^31)-1.
"""
defmacro longlen, do: quote(do: little - signed - integer - 32)
@doc """
An unsigned 8-byte (64-bit) value representing the length of the associated
data.
The range is 0 to (2^64)-1.
"""
defmacro ulonglonglen, do: quote(do: little - unsigned - integer - 64)
@doc """
An unsigned single byte (8-bit) value representing the precision of a
numeric number.
"""
defmacro precision, do: quote(do: unsigned - integer - 8)
@doc """
An unsigned single byte (8-bit) value representing the scale of a
numeric number.
"""
defmacro scale, do: quote(do: unsigned - integer - 8)
@doc """
A single byte (8-bit) value representing a NULL value.
"""
defmacro gen_null, do: quote(do: size(8))
@doc """
A 2-byte (16-bit) or 4-byte (32-bit) value representing a T-SQL NULL value
for a character or binary data type.
Please refer to TYPE_VARBYTE (see section 2.2.5.2.3 in MS-TDS.pdf)
for additional details.
"""
defmacro charbin_null(n \\ 2) when n in [2, 4],
do: quote(do: size(unquote(n)) - unit(8))
@doc """
A FRESERVEDBIT is a BIT value used for padding that does not transmit
information.
FRESERVEDBIT fields SHOULD be set to 0b0 and **MUST be ignored on receipt**.
"""
defmacro freservedbit(n \\ 1),
do: quote(do: size(1) - unit(unquote(n)))
@doc """
A FRESERVEDBYTE is a BYTE value used for padding that does not transmit
information.
FRESERVEDBYTE fields SHOULD be set to 0x00 and **MUST be ignored on
receipt**.
"""
defmacro freservedbyte(n \\ 1),
do: quote(do: size(unquote(n)) - unit(8))
@doc """
A single Unicode character in UCS-2 encoding, as specified in
[Unicode](https://go.microsoft.com/fwlink/?LinkId=90550).
"""
defmacro unicodechar(n \\ 1), do: quote(do: size(unquote(n)) - unit(16))
defmacro bigbinary(n), do: quote(do: binary - size(unquote(n)) - unit(8))
end
|
lib/tds/protocol/grammar.ex
| 0.797557
| 0.625524
|
grammar.ex
|
starcoder
|
defmodule TxtLocalEx.HttpMessenger do
@behaviour TxtLocalEx.Messenger
@ets_bucket_prefix "txt-local-rate-limited-api"
@dry_run_on "1"
# Define API endpoints
@send_sms_path "/send/?"
@bulk_send_path "/bulk_json/?"
@message_status_path "/status_message/?"
@batch_status_path "/status_batch/?"
@api_name "TXT_LOCAL_API"
alias TxtLocalEx.Request
# Public API
@doc """
The send_sms/6 function sends an sms to a
given phone number from a given phone number.
## Example:
```
iex(1)> TxtLocalEx.HttpMessenger.send_sms("API-KEY", "SENDER", "RECEIVER", "message text")
%{
"balance" => 1162,
"batch_id" => 123456789,
"cost" => 2,
"message" => %{
"content" => "This is your message",
"num_parts" => 1,
"sender" => "<NAME>"
},
"messages" => [
%{"id" => "1151346216", "recipient" => 447123456789},
%{"id" => "1151347780", "recipient" => 447987654321}
],
"num_messages" => 2,
"status" => "success"
}
```
"""
@spec send_sms(String.t(), String.t(), String.t(), String.t(), String.t(), String.t()) :: map()
def send_sms(api_key, from, to, body, receipt_url \\ "", custom \\ "") do
# raises ApiLimitExceeded if rate limit exceeded
if rate_limit_enabled?(), do: check_rate_limit!(api_key)
sms_payload = send_sms_payload(api_key, from, to, body, receipt_url, custom)
case Request.request(:post, @send_sms_path, sms_payload) do
{:ok, response} -> response.body
{:error, error} -> raise error
end
end
@doc """
The bulk_send/3 function sends different messages to multiple recipients in bulk.
## Example:
```
iex(1)> messages = [
%{
"number" => "mobile-number",
"text" => "This is your message"
}
]
iex(2)> TxtLocalEx.HttpMessenger.bulk_send("API-KEY", "SENDER", messages)
%{
"balance_post_send" => 12344,
"balance_pre_send" => 12346,
"messages" => [
%{
"balance" => 12345,
"batch_id" => 596486325,
"cost" => 2,
"custom" => "message-custom-id",
"message" => %{
"content" => "This is your message",
"num_parts" => 2,
"sender" => "SBTEST"
},
"messages" => [%{"id" => 1, "recipient" => "mobile-number"}],
"num_messages" => 1,
"receipt_url" => ""
}
],
"status" => "success",
"total_cost" => 2
}
"""
@spec bulk_send(String.t(), String.t(), List.t(), String.t() | nil) :: map()
def bulk_send(api_key, from, messages, receipt_url \\ nil)
def bulk_send(_api_key, _from, [], _), do: %{messages: []}
def bulk_send(api_key, from, messages, receipt_url) when is_list(messages) do
# raises ApiLimitExceeded if rate limit exceeded
if rate_limit_enabled?(), do: check_rate_limit!(api_key)
payload = bulk_send_payload(api_key, from, messages, receipt_url)
case Request.request(:post, @bulk_send_path, payload) do
{:ok, response} -> response.body
{:error, error} -> raise TxtLocalEx.Errors.ApiError, error.reason
end
end
def bulk_send(_api_key, _from, _messages, _), do: {:error, "Invalid messages payload"}
@doc """
The time_to_next_bucket/0 function gets the time in seconds to next bucket limit.
## Example:
```
iex(1)> TxtLocalEx.HttpMessenger.time_to_next_bucket("API-KEY")
{:ok, 5} # 5 secconds to next bucket reset
```
"""
@spec time_to_next_bucket(String.t()) :: tuple()
def time_to_next_bucket(api_key) do
{_, _, ms_to_next_bucket, _, _} =
ExRated.inspect_bucket(ets_bucket_name(api_key), time_scale_in_ms(), api_limit())
sec_to_next_bucket = round(ms_to_next_bucket / 1000.0)
{:ok, sec_to_next_bucket}
end
@doc """
The name/0 function returns the name of the API Client.
## Example:
```
iex(1)> TxtLocalEx.HttpMessenger.name
"[TxtLocal] Test"
```
"""
@spec name() :: String.t()
def name do
@api_name
end
@doc """
The message_status/2 function can be used to determine the delivery status of a sent message.
## Example:
```
iex(1)> TxtLocalEx.HttpMessenger.message_status("API-KEY", "MESSAGE-ID")
%{
"message" => %{
"id" => 1151895224,
"recipient" => 918123456789,
"type" => "sms",
"status" => "D",
"date" => "2013-07-04 14:31:18"
},
"status" => "success"
}
```
"""
@spec message_status(String.t(), String.t()) :: map()
def message_status(api_key, message_id) do
# raises ApiLimitExceeded if rate limit exceeded
if rate_limit_enabled?(), do: check_rate_limit!(api_key)
message_payload = message_status_payload(api_key, message_id)
case Request.request(:post, @message_status_path, message_payload) do
{:ok, response} -> response.body
{:error, error} -> raise TxtLocalEx.Errors.ApiError, error.reason
end
end
@doc """
The batch_status/2 function can be used to generate a delivery report for an entire batch send
## Example:
```
iex(1)> TxtLocalEx.HttpMessenger.batch_status("API-KEY", "BATCH-ID")
%{
"batch_id" => 136546495,
"num_messages" => 2,
"num_delivered" => 2,
"num_undelivered" => 0,
"num_unknown" => 0,
"num_invalid" => 0,
"messages" => [%{"recipient" => 918123456789, "status" => "D"}],
"status" => "success"
}
```
"""
@spec batch_status(String.t(), String.t()) :: map()
def batch_status(api_key, batch_id) do
# raises ApiLimitExceeded if rate limit exceeded
if rate_limit_enabled?(), do: check_rate_limit!(api_key)
batch_payload = batch_status_payload(api_key, batch_id)
case Request.request(:post, @batch_status_path, batch_payload) do
{:ok, response} -> response.body
{:error, error} -> raise error
end
end
# Private API
defp send_sms_payload(api_key, from, to, body, "", "") do
%{
"apiKey" => api_key,
"message" => body,
"sender" => from,
"numbers" => to,
"test" => dry_run?()
}
end
defp send_sms_payload(api_key, from, to, body, receipt_url, "") do
%{
"apiKey" => api_key,
"message" => body,
"sender" => from,
"numbers" => to,
"receipt_url" => receipt_url,
"test" => dry_run?()
}
end
defp send_sms_payload(api_key, from, to, body, receipt_url, custom) do
%{
"apiKey" => api_key,
"message" => body,
"sender" => from,
"numbers" => to,
"receipt_url" => receipt_url,
"custom" => custom,
"test" => dry_run?()
}
end
defp message_status_payload(api_key, message_id) do
%{
"apiKey" => api_key,
"message_id" => message_id
}
end
defp batch_status_payload(api_key, batch_id) do
%{
"apiKey" => api_key,
"batch_id" => batch_id
}
end
defp bulk_send_payload(api_key, from, messages, receipt_url) do
data_payload =
%{
"sender" => from,
"messages" => messages,
"receiptUrl" => receipt_url,
"test" => dry_run?()
}
|> Jason.encode!()
%{
"apiKey" => api_key,
"data" => data_payload
}
end
defp dry_run? do
Application.get_env(:txt_local_ex, :dry_run) == @dry_run_on
end
defp check_rate_limit!(api_key) do
case ExRated.check_rate(ets_bucket_name(api_key), time_scale_in_ms(), api_limit()) do
{:ok, current_count} ->
{:ok, current_count}
{:error, current_count} ->
raise %TxtLocalEx.Errors.ApiLimitExceeded{
reason: "API rate limit exceeded - #{current_count}",
args: [time_scale_in_ms(), api_limit()]
}
end
end
defp rate_limit_enabled? do
{
Application.get_env(:txt_local_ex, :rate_limit_count),
Application.get_env(:txt_local_ex, :rate_limit_scale)
}
|> case do
{nil, _} ->
false
{_, nil} ->
false
{_, _} ->
true
end
end
defp time_scale_in_ms do
{time_scale, _} = Integer.parse(Application.get_env(:txt_local_ex, :rate_limit_scale))
time_scale
end
defp api_limit do
{api_limit_rate, _} = Integer.parse(Application.get_env(:txt_local_ex, :rate_limit_count))
api_limit_rate
end
defp ets_bucket_name(api_key) do
@ets_bucket_prefix <> api_key
end
end
|
lib/messengers/http_messenger.ex
| 0.743541
| 0.441011
|
http_messenger.ex
|
starcoder
|
defmodule CustomSet do
alias CustomSet, as: Set
defstruct [:value, :left, :right, size: 0]
@opaque t ::
%Set{value: nil, left: nil, right: nil, size: 0}
| %Set{value: any, left: t, right: t, size: pos_integer}
@spec new(Enum.t()) :: t
def new(enumerable), do: Enum.reduce(enumerable, %Set{}, &add(&2, &1))
@spec empty?(t) :: boolean
def empty?(%Set{size: 0}), do: true
def empty?(_set), do: false
@spec contains?(t, any) :: boolean
def contains?(%Set{size: 0}, _element), do: false
def contains?(%Set{value: element}, element), do: true
def contains?(%Set{value: value, left: left}, element) when element < value do
contains?(left, element)
end
def contains?(%Set{right: right}, element), do: contains?(right, element)
@spec equal?(t, t) :: boolean
def equal?(%Set{size: 0}, %Set{size: 0}), do: true
def equal?(set_a, %Set{value: b, left: l_b, right: r_b} = set_b)
when set_a.size == set_b.size do
case split_at(set_a, b) do
{smaller, true, larger} -> equal?(smaller, l_b) and equal?(larger, r_b)
_ -> false
end
end
def equal?(_, _), do: false
@spec disjoint?(t, t) :: boolean
def disjoint?(%Set{size: 0}, _set_b), do: true
def disjoint?(_set_a, %Set{size: 0}), do: true
def disjoint?(%Set{value: a, left: l_a, right: r_a}, set_b) do
case split_at(set_b, a) do
{smaller, false, larger} -> disjoint?(l_a, smaller) and disjoint?(r_a, larger)
_ -> false
end
end
@spec subset?(t, t) :: boolean
def subset?(%Set{size: 0}, _set_b), do: true
def subset?(%Set{value: a, left: l_a, right: r_a} = set_a, set_b)
when set_a.size <= set_b.size do
case split_at(set_b, a) do
{smaller, true, larger} -> subset?(l_a, smaller) and subset?(r_a, larger)
_ -> false
end
end
def subset?(_set_a, _set_b), do: false
@spec add(t, any) :: t
def add(%Set{size: 0}, element), do: %Set{value: element, size: 1, left: %Set{}, right: %Set{}}
def add(%Set{value: element} = set, element), do: set
def add(%Set{value: value, left: left, right: right} = tree, element) when element < value do
left = add(left, element)
%{tree | size: left.size + right.size + 1, left: left}
end
def add(%Set{left: left, right: right} = tree, element) do
right = add(right, element)
%{tree | size: left.size + right.size + 1, right: right}
end
@spec intersection(t, t) :: t
def intersection(%Set{size: 0}, _set), do: %Set{}
def intersection(_set, %Set{size: 0}), do: %Set{}
def intersection(%Set{value: a, left: l_a, right: r_a}, set_b) do
{smaller, found, larger} = split_at(set_b, a)
left = intersection(l_a, smaller)
right = intersection(r_a, larger)
if found do
%Set{value: a, left: left, right: right, size: left.size + right.size + 1}
else
union(left, right)
end
end
@spec difference(t, t) :: t
def difference(%Set{size: 0}, _set), do: %Set{}
def difference(set, %Set{size: 0}), do: set
def difference(%Set{value: a, left: l_a, right: r_a}, set_b) do
{smaller, found, larger} = split_at(set_b, a)
left = difference(l_a, smaller)
right = difference(r_a, larger)
if found do
union(left, right)
else
%Set{value: a, left: left, right: right, size: left.size + right.size + 1}
end
end
@spec union(t, t) :: t
def union(%Set{size: 0}, set), do: set
def union(set, %Set{size: 0}), do: set
def union(%Set{value: a, left: l_a, right: r_a}, set_b) do
{smaller, _found, larger} = split_at(set_b, a)
left = union(l_a, smaller)
right = union(r_a, larger)
%Set{value: a, left: left, right: right, size: left.size + right.size + 1}
end
@spec split_at(t, any) :: {t, boolean, t}
defp split_at(%Set{size: 0}, _pivot), do: {%Set{}, false, %Set{}}
defp split_at(%Set{value: pivot, left: left, right: right}, pivot) do
{left, true, right}
end
defp split_at(%Set{value: value, left: left, right: right} = set, pivot) when pivot < value do
{smaller, pivot, larger} = split_at(left, pivot)
{smaller, pivot, %{set | left: larger, size: right.size + larger.size + 1}}
end
defp split_at(%Set{left: left, right: right} = set, pivot) do
{smaller, found, larger} = split_at(right, pivot)
{%{set | right: smaller, size: left.size + smaller.size + 1}, found, larger}
end
end
|
elixir/custom-set/lib/custom_set.ex
| 0.902276
| 0.800731
|
custom_set.ex
|
starcoder
|
defmodule DataDaemon.Util do
@moduledoc false
@doc ~S"""
Pack a metric as iodata.
"""
@spec package(DataDaemon.key(), DataDaemon.value(), DataDaemon.type(), Keyword.t()) :: iodata
def package(key, value, type, opts \\ []) do
[key, ?:, pack_value(value), ?|, pack_type(type)]
|> tag(opts[:tags])
end
@spec pack_value(DataDaemon.value()) :: iodata
defp pack_value(value) when is_list(value), do: value
defp pack_value(value), do: to_string(value)
@spec tag(iodata, nil | DataDaemon.tags()) :: iodata
defp tag(data, nil), do: data
defp tag(data, []), do: data
defp tag(data, tags), do: [data, "|#", Enum.intersperse(Enum.map(tags, &pack_tag/1), ?,)]
@spec pack_type(DataDaemon.type()) :: String.t()
defp pack_type(:counter), do: "c"
defp pack_type(:distribution), do: "d"
defp pack_type(:gauge), do: "g"
defp pack_type(:histogram), do: "h"
defp pack_type(:set), do: "s"
defp pack_type(:timing), do: "ms"
defp pack_type(type), do: type
defp pack_tag({tag, value}), do: [to_string(tag), ?:, pack_tag_value(value)]
defp pack_tag(tag), do: to_string(tag)
defp pack_tag_value({:system, env_var}), do: System.get_env(env_var)
defp pack_tag_value({:config, app, value}), do: Application.get_env(app, value)
defp pack_tag_value(value), do: to_string(value)
@unix ~N[1970-01-01 00:00:00]
@doc ~S"""
Convert a given timestamp to iso8601.
Passing `nil` will return the current time.
"""
@spec iso8601(NaiveDateTime.t() | DateTime.t() | nil | integer) :: String.t()
def iso8601(nil), do: NaiveDateTime.to_iso8601(NaiveDateTime.utc_now()) <> "Z"
def iso8601(ts = %NaiveDateTime{}), do: NaiveDateTime.to_iso8601(ts) <> "Z"
def iso8601(ts = %DateTime{}), do: DateTime.to_iso8601(ts)
def iso8601(ts) when is_integer(ts), do: iso8601(NaiveDateTime.add(@unix, ts, :millisecond))
@doc ~S"""
Fetch a setting from either the passed options or the application config.
"""
@spec config(Keyword.t(), atom, atom, atom, term) :: integer | String.t() | term
def config(opts, app, key, setting, default \\ nil) do
case Keyword.get_lazy(
opts,
setting,
fn ->
Keyword.get(Application.get_env(app, key, []), setting, default)
end
) do
{:system, var} ->
System.get_env(var)
val ->
quote do
unquote(val)
end
|> Code.eval_quoted()
|> elem(0)
end
end
@doc ~S"""
Forces a value to integer.
"""
@spec to_integer!(integer | String.t()) :: integer | no_return
def to_integer!(value) when is_integer(value), do: value
def to_integer!(value) when is_binary(value), do: String.to_integer(value)
def to_integer!({:system, var}), do: to_integer!(System.get_env(var))
end
|
lib/data_daemon/util.ex
| 0.870101
| 0.485112
|
util.ex
|
starcoder
|
defmodule Video.Track do
@known_params [
:from,
:to,
:text,
:parent_ref,
:videos,
:group,
:direction
]
@type plain :: [{binary(), Video.Timestamp.t() | :start, Video.Timestamp.t() | :end}]
# 32*8=256
@type hash :: <<_::256>>
@type t :: %__MODULE__{
from: binary(),
to: binary(),
group: binary(),
direction: :forward | :backward,
text: binary(),
parent_ref: module() | binary(),
videos: plain()
}
@enforce_keys @known_params
defstruct @known_params
defguard valid_hash(str) when is_binary(str) and byte_size(str) == 32
@doc """
Loads all references videos and turns them into a single stream of
coordinates. It also calculates the hash for these.
"""
@spec render(t()) :: {hash(), [Video.TimedPoint.t()]}
def render(%__MODULE__{videos: videos}) do
tsvs =
videos
|> Enum.map(&elem(&1, 0))
|> Enum.uniq()
|> Parallel.map(fn file -> {file, Video.TrimmedSource.new_from_path(file)} end)
|> Enum.into(%{})
tsv_list =
Parallel.map(videos, fn {file, from, to} ->
Video.TrimmedSource.extract(tsvs[file], from, to)
end)
{calc_hash(tsv_list), coords(tsv_list)}
end
@spec calc_hash([Video.TrimmedSource.t()]) :: hash()
defp calc_hash(tsv_list) when is_list(tsv_list) do
tsv_list
|> Enum.map(&Video.TrimmedSource.hash_ident(&1))
|> Enum.reduce(:crypto.hash_init(:md5), fn str, hsh ->
:crypto.hash_update(hsh, str)
end)
|> :crypto.hash_final()
|> Base.encode16(case: :lower)
end
# Experimentally determined time to add between two consecutive videos to
# ensure that there's no long term drift. Not sure why it is needed, since
# it's necessary even though we use the video length to determine where to
# start the next coordinates from.
@video_concat_bump_ms 85
# Returns a list of time offsets in milliseconds, relative to the beginning of
# the trimmed and concatenated video and their corresponding lat/lon coordinates.
@spec coords([Video.TrimmedSource.t()]) :: [Video.TimedPoint.t()]
defp coords(tsv_list) when is_list(tsv_list) do
tsv_list
|> Enum.reduce({0, []}, fn tsv, {duration_so_far, acc} ->
%{first: %{time_offset_ms: cur_time_offset_ms}, coords: coords} =
Video.TrimmedSource.coords(tsv)
coords =
Enum.map(
coords,
&Map.put(
&1,
:time_offset_ms,
&1.time_offset_ms - cur_time_offset_ms + duration_so_far
)
)
dur = duration_so_far + Video.TrimmedSource.duration_ms(tsv) + @video_concat_bump_ms
{dur, acc ++ coords}
end)
|> elem(1)
end
end
|
lib/video/track.ex
| 0.848251
| 0.418905
|
track.ex
|
starcoder
|
defmodule Resourceful.Collection.Ecto do
defmodule NoRepoError do
defexception message:
"No Ecto.Repo has been specified! You must either " <>
"pass one explicity using the :ecto_repo option or you " <>
"can specify a global default by setting the config " <>
"option :ecto_repo for :resourceful."
end
import Ecto.Query, only: [limit: 2], warn: false
def all(queryable, opts), do: repo(opts).all(queryable)
def any?(queryable, opts) do
queryable
|> limit(1)
|> all(opts)
|> Enum.any?()
end
def total(queryable, opts), do: repo(opts).aggregate(queryable, :count)
def repo(opts) do
Keyword.get(opts, :ecto_repo) ||
Application.get_env(:resourceful, :ecto_repo) ||
raise(NoRepoError)
end
end
defimpl Resourceful.Collection.Delegate, for: Ecto.Query do
import Ecto.Query, warn: false
def cast_filter(_, {field, op, val}), do: {cast_field(field), op, val}
def cast_sorter(_, {order, field}), do: {order, cast_field(field)}
def collection(_), do: Resourceful.Collection.Ecto
def filters(_), do: Resourceful.Collection.Ecto.Filters
def paginate(queryable, _, -1), do: queryable
def paginate(queryable, number, size) do
by_limit(queryable, size, (number - 1) * size)
end
def sort(queryable, sorters) do
queryable
|> exclude(:order_by)
|> do_sort(sorters)
end
defp do_sort(queryable, sorters) do
Enum.reduce(sorters, queryable, fn sorter, q -> apply_sorter(q, sorter) end)
end
defp apply_sorter(queryable, {dir, {namespace, col}}) do
order_by(queryable, [_, {^namespace, q}], {^dir, field(q, ^col)})
end
defp apply_sorter(queryable, sorter), do: order_by(queryable, ^sorter)
defp by_limit(queryable, limit, offset) do
queryable
|> limit(^limit)
|> offset(^offset)
end
defp cast_field(%{query_alias: query_alias}), do: query_alias
defp cast_field(%{map_to: map_to}), do: map_to
defp cast_field({namespace, field}), do: {to_atom(namespace), to_atom(field)}
defp cast_field(field) when is_atom(field), do: field
defp cast_field(field)
when is_list(field) and length(field) == 1,
do: to_atom(hd(field))
defp cast_field(field) when is_list(field) do
{
field
|> Stream.drop(-1)
|> Stream.map(&to_string/1)
|> Enum.join(".")
|> to_atom(),
field
|> List.last()
|> to_atom
}
end
defp cast_field(field) when is_binary(field) do
case String.contains?(field, ".") do
true ->
field
|> String.split(".")
|> cast_field()
_ ->
to_atom(field)
end
end
defp to_atom(field) when is_binary(field), do: String.to_existing_atom(field)
defp to_atom(field) when is_atom(field), do: field
end
defimpl Resourceful.Collection.Delegate, for: Atom do
alias Resourceful.Collection.Delegate
import Ecto.Queryable, only: [to_query: 1]
def cast_filter(module, filter) do
module
|> to_query()
|> Delegate.cast_filter(filter)
end
def cast_sorter(module, sorter) do
module
|> to_query()
|> Delegate.cast_sorter(sorter)
end
def collection(module) do
module
|> to_query()
|> Delegate.collection()
end
def filters(module) do
module
|> to_query()
|> Delegate.filters()
end
def paginate(module, number, size) do
module
|> to_query()
|> Delegate.paginate(number, size)
end
def sort(module, sorters) do
module
|> to_query()
|> Delegate.sort(sorters)
end
end
|
lib/resourceful/collection/ecto.ex
| 0.638723
| 0.438545
|
ecto.ex
|
starcoder
|
defmodule ExStoneOpenbank do
@moduledoc """
ExStoneOpenbank is a library for calling Stone Open Bank APIs.
In order to use this library you need to create an application profile. Please see docs of
the service at https://docs.openbank.stone.com.br
The steps to use this SDK are:
1. Create a user and open a payment account on Stone Openbank;
2. Create an application profile on Stone Openbank;
3. Add this library to your application;
4. Add configuration for your app(s);
5. Start calling for your bank API.
## Registration
For user registration, just follow the normal flow of signing up for the service. That includes:
1. Register your user;
2. Open a payment account;
3. Wait for approval (you will be notified);
4. Test your account using our web/mobile banking solutions;
For application registration, you will need to submit some parameters. Please, refer to our API docs:
https://docs.openbank.stone.com.br
## Library configuration
First, add this as a dependency:
{:ex_stone_openbank, "~> {{ latest version here }}"}
The SDK does not start a supervision tree on its own. So, to configure it you need to add at
least two servers to your main application's supervision tree. This is usually on a file named
`application.ex`. Here is an example:
defmodule MyApp.Application do
@moduledoc false
use Application
def start(_type, _args) do
children = [
# important JWKS for webhooks! It only uses the sandbox? key from the config.
{ExStoneOpenbank.Webhooks.StoneJWKS, stone_bank()},
# Your application authenticator
{ExStoneOpenbank.Authenticator, stone_bank()},
# ... other configurations like Repo, Endpoint and so on
]
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor.start_link(children, opts)
end
defp stone_bank do
[
name: :my_app,
private_key: get_private_pem_key(), # fetch the key
client_id: get_client_id(), # fetch the client_id
sandbox?: true, # tells the environment of the application (see docs)
consent_redirect_url: "my url" (only needed if consent is enabled)
]
end
end
The possible configuration parameters are:
- `name` (mandatory): will be used on all calls to `ExStoneOpenbank`
- `private_key` (mandatory): must be a PEM encoded string
- `client_id` (mandatory): the id of the application registration
- `sandbox?` (optional, defaults to `false`): which environment the app is registered to
- `consent_redirect_url` (optional): the URL the service will redirect to once consent is granted
by a user
## Call APIs :)
With that you are ready to call any APIs. Those are provided on the namespace `ExStoneopenbank.API`.
For example:
- `ExStoneOpenbank.API.PaymentAccounts`: has functions to work with balances, list all accounts and
so on
- `ExStoneOpenbank.API.Payments`: pays barcode bankslips
- `ExStoneOpenbank.API.Transfers`: makes internal (Stone account to Stone account) and external
(Stone account to other institution)
... and so on!
All APIs expect the config name as the first parameter. This is so to make it possible to have
more than one Stone application, each with its own authenticator.
## Cursor based pagination endpoints
We use cursor-based pagination for list endpoints. Therefore, all endpoints that return a
`ExStoneOpenbank.Page` will have:
- `data`: the list of items retrieved
- `cursor`: details about the next/before pages and the limit used
You can keep paginating with `ExStoneOpenbank.Page.next/1` and `ExStoneOpenbank.Page.before/1`
passing the page result.
Here is an example:
account_id = # some id
{:ok, page} = ExStoneOpenbank.API.PaymentAccounts.list_statements(:my_app, account_id, limit: 50)
page.data # => items
# Suppose there are enought items for paginating (if not the response is {:error, :no_more_pages})
{:ok, page2} = ExStoneOpenbank.Page.next(page)
{:ok, ^page} = ExStoneOpenbank.Page.before(page2)
# You can also query the state before attempting the call
ExStoneOpenbank.Page.has_next?(page) # => true | false
"""
end
|
lib/ex_stone_openbank.ex
| 0.758511
| 0.446555
|
ex_stone_openbank.ex
|
starcoder
|
defmodule ExDiceRoller.Compilers.Separator do
@moduledoc """
Handles the `,` separator for rolls.
The separator allows for multiple, separate dice expressions to be evaluated
and only one returned based on provided options:
* `:highest`: returns the highest calculated value and is the default option
* `:lowest`: returns the lowest calculated value
Examples:
iex> ExDiceRoller.roll("1,2")
2
iex> ExDiceRoller.roll("1,1")
1
iex> ExDiceRoller.roll("1,2", opts: [:highest])
2
iex> ExDiceRoller.roll("1,2", opts: [:lowest])
1
iex> ExDiceRoller.roll("1d6+2,10d8+3", opts: [:highest])
49
iex> ExDiceRoller.roll("1d6+8,10d8+5", opts: [:lowest])
14
Seperator expressions can be wrapped in parentheses to be utilized it as a
subexpression in a larger expression.
Examples:
iex> ExDiceRoller.roll("(5d1,2d1)+5", opts: [:highest])
10
iex> ExDiceRoller.roll("(5d1,2d1)+5", opts: [:lowest])
7
## Separator Use And Keeping Dice
The separator can be used alongside kept dice rolls, provided:
* one side is a list and the other a number
* both sides are lists of equal length
When both sides are lists of equal length, separator will begin comparing the
values from both lists by index location.
iex> ExDiceRoller.roll("5d6,5d100", opts: [:keep, :lowest])
[2, 2, 6, 4, 5]
iex> ExDiceRoller.roll("5d6,5d100", opts: [:keep, :highest])
[47, 6, 49, 91, 54]
iex> ExDiceRoller.roll("(5d2,5d6)+5", opts: [:highest, :keep])
[7, 9, 9, 11, 6]
iex> ExDiceRoller.roll("(5d1,5d100)+5", opts: [:lowest, :keep])
[6, 6, 6, 6, 6]
iex> ExDiceRoller.roll("5d6, 3", opts: [:keep])
[3, 3, 6, 4, 5]
iex> ExDiceRoller.roll("3, 5d6", opts: [:keep])
[3, 4, 4, 6, 3]
iex> ExDiceRoller.roll("4, xd5", x: ["1d4", 2.5], opts: [:keep])
[5, 4, 4, 4]
iex> ExDiceRoller.roll("2d4, 1d8", opts: [:keep])
** (ArgumentError) cannot use separator on lists of differing lengths
"""
@behaviour ExDiceRoller.Compiler
alias ExDiceRoller.{Args, Compiler, ListComprehension}
@impl true
def compile({:sep, left_expr, right_expr}) do
compile_sep(Compiler.delegate(left_expr), Compiler.delegate(right_expr))
end
@spec compile_sep(Compiler.compiled_val(), Compiler.compiled_val()) :: Compiler.compiled_fun()
defp compile_sep(l, r) when is_function(l) and is_function(r),
do: fn args -> high_low(l.(args), r.(args), args) end
defp compile_sep(l, r) when is_function(l),
do: fn args -> high_low(l.(args), r, args) end
defp compile_sep(l, r) when is_function(r),
do: fn args -> high_low(l, r.(args), args) end
defp compile_sep(l, r), do: fn args -> high_low(l, r, args) end
@spec high_low(Compiler.calculated_val(), Compiler.calculated_val(), Compiler.args()) ::
Compiler.calculated_val()
defp high_low(l, l, _), do: l
defp high_low(l, r, args) when is_list(args) do
case Args.find_first(args, [:highest, :lowest]) do
:highest -> ListComprehension.apply(l, r, :highest, "separator", &do_high_low/3)
:lowest -> ListComprehension.apply(l, r, :lowest, "separator", &do_high_low/3)
_ -> ListComprehension.apply(l, r, :highest, "separator", &do_high_low/3)
end
end
@spec do_high_low(Compiler.calculated_val(), Compiler.calculated_val(), :highest | :lowest) ::
Compiler.calculated_val()
defp do_high_low(l, r, :highest) when l > r, do: l
defp do_high_low(_, r, :highest), do: r
defp do_high_low(l, r, :lowest) when l < r, do: l
defp do_high_low(_, r, :lowest), do: r
end
|
lib/compilers/separator.ex
| 0.840177
| 0.72812
|
separator.ex
|
starcoder
|
defmodule ExSlackBot do
@moduledoc ~s"""
`ExSlackBot` provides a base upon which SlackBots can more easily be built. Each bot is addressable by a name and is supervised, so handles errors and restarts.
"""
require Logger
defmacro __using__(override_name \\ nil) do
quote do
require Logger
use GenServer
@override_name unquote(override_name)
defp default_name do
elems = String.split(to_string(__MODULE__) |> String.downcase, ".")
String.replace(List.last(elems), ~r/slackbot|bot/, "") |> String.to_atom
end
def name do
case @override_name do
nil -> default_name
n -> n
end
end
def start_link do
GenServer.start_link __MODULE__, [], name: name
end
def init([]) do
{:ok, %{}}
end
def handle_cast(%{id: slack_id, channel: ch, file: file, args: [cmd | args]} = msg, state) do
attrs = args_to_attributes(args)
attrs = case file do
nil -> attrs
f -> Map.put(attrs, :file, f)
end
reply = try do
call(cmd, msg, attrs, state)
rescue
err ->
err_msg = Exception.format_stacktrace(System.stacktrace)
{:reply, %{
color: "danger",
pretext: "Failed to invoke Bot function `#{inspect(__MODULE__)}.#{cmd}(#{inspect(attrs)}, #{inspect(state)})`",
text: "```#{err_msg}```"
}, state}
end
handle_reply(ch, reply)
end
defp call(cmd, msg, attrs, state) do
# Logger.debug "apply(#{inspect(__MODULE__)}, #{inspect(cmd)}, [#{inspect(attrs, pretty: true)}, #{inspect(state, pretty: true)}])"
:erlang.apply(__MODULE__, String.to_atom(cmd), [attrs, state])
end
defp handle_reply(_, {:noreply, state}) do
{:noreply, state}
end
defp handle_reply(channel, {:reply, msg, state}) when is_binary(msg) do
%{ok: true} = Slackex.request("chat.postMessage", [
as_user: true,
channel: channel,
text: msg
])
{:noreply, state}
end
defp handle_reply(channel, {:reply, msg, state}) when is_map(msg) do
attachments = [
%{
fallback: Map.get(msg, :summary, ""),
pretext: Map.get(msg, :pretext, ""),
text: Map.get(msg, :text, ""),
title: Map.get(msg, :title, ""),
color: Map.get(msg, :color, ""),
mrkdwn_in: ["pretext", "text"]
}
]
{:ok, json} = JSX.encode(attachments)
%{ok: true} = Slackex.request("chat.postMessage", [
as_user: true,
channel: channel,
text: "",
attachments: json
])
{:noreply, state}
end
defp handle_reply(_, msg) do
raise "Invalid reply message: #{inspect(msg)}. Should be `{:noreply, state}` or `{:reply, msg, state}`"
end
defp args_to_attributes(args) do
Map.new args, fn a ->
case String.split(a, "=") do
[flag] -> {String.to_atom(flag), true}
[k, "true"] -> {String.to_atom(k), true}
[k, "false"] -> {String.to_atom(k), false}
[k, v] -> {String.to_atom(k), v}
end
end
end
defoverridable [
name: 0,
start_link: 0,
init: 1,
handle_cast: 2,
handle_reply: 2,
call: 4
]
end
end
end
|
lib/exslackbot.ex
| 0.758242
| 0.694147
|
exslackbot.ex
|
starcoder
|
defmodule SmartCity.Ingestion do
alias SmartCity.Helpers
alias SmartCity.Ingestion.Transformation
@moduledoc """
Struct defining an ingestion update event.
```javascript
const Ingestion = {
"id": "",
"name", "",
"allow_duplicates": boolean,
"cadence": "",
"extractSteps": [],
"schema": [],
"targetDatasetId": "",
"sourceFormat": "",
"topLevelSelector": "",
"transformations": [],
}
```
"""
@type not_required(type) :: type | nil
@type t :: %SmartCity.Ingestion{
id: String.t(),
name: String.t(),
allow_duplicates: not_required(boolean()),
cadence: not_required(String.t()),
extractSteps: list(map()),
schema: list(map()),
sourceFormat: String.t(),
targetDataset: String.t(),
topLevelSelector: not_required(String.t()),
transformations: list(Transformation.t())
}
@derive Jason.Encoder
defstruct id: nil,
name: nil,
allow_duplicates: true,
cadence: "never",
extractSteps: [],
schema: [],
targetDataset: nil,
sourceFormat: nil,
topLevelSelector: nil,
transformations: []
use Accessible
@doc """
Returns a new `SmartCity.Ingestion`.
Can be created from `Map` with string or atom keys.
Raises an `ArgumentError` when passed invalid input
## Parameters
- msg: Map with string or atom keys that defines the ingestion metadata
Required Keys:
- targetDataset
- sourceFormat
- name
- cadence will default to "never"
- allow_duplicates will default to true
"""
@spec new(map()) :: SmartCity.Ingestion.t()
def new(%{"targetDataset" => _} = msg) do
msg
|> Helpers.to_atom_keys()
|> new()
end
def new(
%{
id: _,
name: _,
targetDataset: _,
sourceFormat: type,
schema: schema,
extractSteps: extractSteps,
transformations: transformations
} = msg
) do
msg
|> Map.put(:schema, Helpers.to_atom_keys(schema))
|> Map.put(:extractSteps, Helpers.to_atom_keys(extractSteps))
|> Map.put(:transformations, Enum.map(transformations, &Transformation.new/1))
|> Map.replace!(:sourceFormat, Helpers.mime_type(type))
|> create()
end
def new(msg) do
raise ArgumentError, "Invalid ingestion metadata: #{inspect(msg)}"
end
defp create(%__MODULE__{} = struct) do
struct |> Map.from_struct() |> create()
end
defp create(map), do: struct(%__MODULE__{}, map)
end
|
lib/smart_city/ingestion.ex
| 0.871844
| 0.728169
|
ingestion.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.