hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1ccd0c8aabdd49f668a08faa4805a5cdc579b8b9 | 1,574 | ex | Elixir | lib/policr_mini_bot/supervisor.ex | gchengyu/policr-mini | 5acd7d6609fcaea2dbd7276fa01ca334ef9f6e6a | [
"MIT"
] | null | null | null | lib/policr_mini_bot/supervisor.ex | gchengyu/policr-mini | 5acd7d6609fcaea2dbd7276fa01ca334ef9f6e6a | [
"MIT"
] | null | null | null | lib/policr_mini_bot/supervisor.ex | gchengyu/policr-mini | 5acd7d6609fcaea2dbd7276fa01ca334ef9f6e6a | [
"MIT"
] | null | null | null | defmodule PolicrMiniBot.Supervisor do
@moduledoc false
use Supervisor
alias PolicrMiniBot.{
TakeoverCheckPreheater,
FromCheckPreheater,
StartCommander,
PingCommander,
SyncCommander,
LoginCommander,
SelfJoinedHandler,
SelfLeftedHandler,
UserLeftedHandler,
UserJoinedHandler,
MemberRemovedHandler,
NewChatTitleHandler,
NewChatPhotoHandler,
VerificationCaller,
RevokeTokenCaller
}
def start_link(_opts) do
Supervisor.start_link(__MODULE__, :ok, name: __MODULE__)
end
@impl true
def init(_init_arg) do
install_plugs([TakeoverCheckPreheater, FromCheckPreheater])
install_plugs([StartCommander, PingCommander, SyncCommander, LoginCommander])
install_plugs([
SelfJoinedHandler,
SelfLeftedHandler,
UserLeftedHandler,
UserJoinedHandler,
NewChatTitleHandler,
NewChatPhotoHandler,
MemberRemovedHandler
])
install_plugs([VerificationCaller, RevokeTokenCaller])
children = [
# 图片供应服务
PolicrMiniBot.ImageProvider,
# 消息清理服务
PolicrMiniBot.Cleaner,
# 一次性处理保证
PolicrMiniBot.Disposable,
# 拉取更新消息
PolicrMiniBot.UpdatesPoller,
# 消费消息的动态主管
PolicrMiniBot.Consumer,
# 任务调度服务
PolicrMiniBot.Scheduler
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one]
Supervisor.init(children, opts)
end
defp install_plugs(plugs) do
Telegex.Plug.Pipeline.install_all(plugs)
end
end
| 22.169014 | 81 | 0.707116 |
1ccd3783572854d810a7f3948c9d317464df6ce3 | 2,499 | ex | Elixir | lib/poison.ex | Overbryd/poison | b095851a0ea3e3106e5700b743187226c607de26 | [
"CC0-1.0"
] | null | null | null | lib/poison.ex | Overbryd/poison | b095851a0ea3e3106e5700b743187226c607de26 | [
"CC0-1.0"
] | null | null | null | lib/poison.ex | Overbryd/poison | b095851a0ea3e3106e5700b743187226c607de26 | [
"CC0-1.0"
] | 1 | 2021-08-09T09:19:05.000Z | 2021-08-09T09:19:05.000Z | defmodule Poison do
alias Poison.{Encoder, EncodeError}
alias Poison.{Parser, ParseError}
alias Poison.{Decode, Decoder, DecodeError}
@doc """
Encode a value to JSON.
iex> Poison.encode([1, 2, 3])
{:ok, "[1,2,3]"}
"""
@spec encode(Encoder.t, keyword | Encoder.options) :: {:ok, iodata}
| {:error, EncodeError.t}
def encode(value, options \\ %{}) do
{:ok, encode!(value, options)}
rescue
exception in [EncodeError] ->
{:error, exception}
end
def encode_to_iodata(value, options \\ %{}) do
{:ok, encode_to_iodata!(value, options)}
rescue
exception in [EncodeError] ->
{:error, exception}
end
@doc """
Encode a value to JSON, raises an exception on error.
iex> Poison.encode!([1, 2, 3])
"[1,2,3]"
"""
@spec encode!(Encoder.t, keyword | Encoder.options) :: iodata | no_return
def encode!(value, options \\ %{})
def encode!(value, options) when is_list(options) do
encode!(value, Map.new(options))
end
def encode!(value, options) do
iodata = Encoder.encode(value, options)
if options[:iodata] do
iodata
else
iodata |> IO.iodata_to_binary
end
end
def encode_to_iodata!(value, options) when is_list(options) do
encode_to_iodata!(value, Map.new(options))
end
def encode_to_iodata!(value, options \\ %{}) do
Encoder.encode(value, options)
end
@doc """
Decode JSON to a value.
iex> Poison.decode("[1,2,3]")
{:ok, [1, 2, 3]}
"""
@spec decode(iodata) :: {:ok, Parser.t}
| {:error, ParseError.t}
@spec decode(iodata, keyword | Decoder.options) :: {:ok, any}
| {:error, ParseError.t | DecodeError.t}
def decode(iodata, options \\ %{}) do
{:ok, decode!(iodata, options)}
rescue
exception in [ParseError, DecodeError] ->
{:error, exception}
end
@doc """
Decode JSON to a value, raises an exception on error.
iex> Poison.decode!("[1,2,3]")
[1, 2, 3]
"""
@spec decode!(iodata) :: Parser.t | no_return
def decode!(value) do
Parser.parse!(value, %{})
end
@spec decode!(iodata, keyword | Decoder.options) :: Decoder.t | no_return
def decode!(value, options) when is_list(options) do
decode!(value, Map.new(options))
end
def decode!(value, %{as: as} = options) when as != nil do
value
|> Parser.parse!(options)
|> Decode.transform(options)
|> Decoder.decode(options)
end
def decode!(value, options) do
Parser.parse!(value, options)
end
end
| 24.262136 | 75 | 0.619048 |
1ccd45aed570424d8d3038a1b1e2a614e0a35df6 | 4,971 | exs | Elixir | test/wechat_test.exs | hsieh/wechat | 6a49033d3a35a80358481778a0609f214c340a0c | [
"Apache-2.0"
] | null | null | null | test/wechat_test.exs | hsieh/wechat | 6a49033d3a35a80358481778a0609f214c340a0c | [
"Apache-2.0"
] | null | null | null | test/wechat_test.exs | hsieh/wechat | 6a49033d3a35a80358481778a0609f214c340a0c | [
"Apache-2.0"
] | null | null | null | defmodule WeChatTest do
use ExUnit.Case
alias WeChat.Utils
alias WeChat.ServerMessage.{EventHandler, XmlMessage, XmlParser}
doctest WeChat
test "Auto generate functions" do
assert WxApp.app_type() == :official_account
assert WxApp.by_component?() == false
assert WxApp.server_role() == :client
assert WxApp.code_name() == "wxapp"
assert WxApp.storage() == WeChat.Storage.File
assert WxApp.appid() == "wx2c2769f8efd9abc2"
assert WxApp.appsecret() == "appsecret"
assert WxApp.encoding_aes_key() == "abcdefghijklmnopqrstuvwxyz0123456789ABCDEFG"
aes_key =
WeChat.ServerMessage.Encryptor.aes_key("abcdefghijklmnopqrstuvwxyz0123456789ABCDEFG")
assert WxApp.aes_key() == aes_key
assert WxApp.token() == "spamtest"
assert true = Enum.all?(1..3, &function_exported?(WxApp, :get, &1))
assert true = Enum.all?(2..4, &function_exported?(WxApp, :post, &1))
end
test "Auto generate functions(Work)" do
assert WxWork.app_type() == :work
assert WxWork.by_component?() == false
assert WxWork.server_role() == :client
assert WxWork.storage() == WeChat.Storage.File
assert WxWork.appid() == "corp_id"
assert is_list(WxWork.agents())
assert WxWork.agent2cache_id(10000) == "corp_id_10000"
assert WxWork.agent2cache_id(:agent_name) == "corp_id_10000"
assert true = Enum.all?(1..3, &function_exported?(WxWork, :get, &1))
assert true = Enum.all?(2..4, &function_exported?(WxWork, :post, &1))
assert Code.ensure_loaded?(WxWork.Message)
assert function_exported?(WxWork.Message, :send_message, 2)
assert Code.ensure_loaded?(WxWork.Contacts.Department)
assert function_exported?(WxWork.Contacts.Department, :list, 0)
end
test "Auto generate functions(Work) - exclude" do
assert Code.ensure_loaded?(WxWork2.Message)
assert function_exported?(WxWork2.Message, :send_message, 2)
assert false == Code.ensure_loaded?(WxWork2.Contacts.Department)
assert false == function_exported?(WxWork2.Contacts.Department, :list, 0)
end
test "build official_account client" do
opts = [
appid: "wx2c2769f8efd9abc2",
appsecret: "appsecret",
encoding_aes_key: "abcdefghijklmnopqrstuvwxyz0123456789ABCDEFG",
token: "spamtest"
]
assert {:ok, WxApp3} = WeChat.build_client(WxApp3, opts)
assert apply(WxApp3, :appid, []) == "wx2c2769f8efd9abc2"
assert function_exported?(WxApp3.WebPage, :code2access_token, 1)
assert false == function_exported?(WxApp3.MiniProgram.Auth, :code2session, 1)
end
test "build component client" do
opts = [
appid: "wx2c2769f8efd9abc2",
by_component?: true,
component_appid: "wx3c2769f8efd9abc3",
appsecret: "appsecret",
encoding_aes_key: "abcdefghijklmnopqrstuvwxyz0123456789ABCDEFG",
token: "spamtest"
]
assert {:ok, WxApp4} = WeChat.build_client(WxApp4, opts)
assert apply(WxApp4, :appid, []) == "wx2c2769f8efd9abc2"
assert function_exported?(WxApp4.Component, :get_authorizer_info, 0)
assert function_exported?(WxApp4.WebPage, :code2access_token, 1)
assert false == function_exported?(WxApp4.MiniProgram.Auth, :code2session, 1)
end
test "build mini_program client" do
opts = [
app_type: :mini_program,
appid: "wx2c2769f8efd9abc2",
appsecret: "appsecret",
encoding_aes_key: "abcdefghijklmnopqrstuvwxyz0123456789ABCDEFG",
token: "spamtest"
]
assert {:ok, WxApp5} = WeChat.build_client(WxApp5, opts)
assert apply(WxApp5, :appid, []) == "wx2c2769f8efd9abc2"
assert false == function_exported?(WxApp5.WebPage, :code2access_token, 1)
assert function_exported?(WxApp5.MiniProgram.Auth, :code2session, 1)
end
test "xml_parse" do
timestamp = Utils.now_unix()
{:ok, map} =
XmlMessage.reply_text(
"oia2TjjewbmiOUlr6X-1crbLOvLw",
"gh_7f083739789a",
timestamp,
"hello world"
)
|> XmlParser.parse()
assert map == %{
"Content" => "hello world",
"CreateTime" => to_string(timestamp),
"FromUserName" => "gh_7f083739789a",
"MsgType" => "text",
"ToUserName" => "oia2TjjewbmiOUlr6X-1crbLOvLw"
}
end
test "Encrypt Msg" do
client = WxApp
timestamp = Utils.now_unix()
to_openid = "oia2TjjewbmiOUlr6X-1crbLOvLw"
from_wx_no = "gh_7f083739789a"
content = "hello world"
xml_text = XmlMessage.reply_text(to_openid, from_wx_no, timestamp, content)
xml_string = EventHandler.encode_xml_msg(xml_text, timestamp, client)
{:ok, xml} = XmlParser.parse(xml_string)
{:ok, :encrypted_xml, xml_text} =
EventHandler.decode_xml_msg(
xml["Encrypt"],
xml["MsgSignature"],
xml["Nonce"],
timestamp,
client
)
assert xml_text["ToUserName"] == to_openid
assert xml_text["FromUserName"] == from_wx_no
assert xml_text["Content"] == content
end
end
| 34.762238 | 91 | 0.680547 |
1ccd58ca705571a59d8edbe5cd4da6002b88def7 | 436 | exs | Elixir | priv/repo/migrations/20200804022935_create_user_settings.exs | raditya3/papercups | 4657b258ee381ac0b7517e57e4d6261ce94b5871 | [
"MIT"
] | 4,942 | 2020-07-20T22:35:28.000Z | 2022-03-31T15:38:51.000Z | priv/repo/migrations/20200804022935_create_user_settings.exs | raditya3/papercups | 4657b258ee381ac0b7517e57e4d6261ce94b5871 | [
"MIT"
] | 552 | 2020-07-22T01:39:04.000Z | 2022-02-01T00:26:35.000Z | priv/repo/migrations/20200804022935_create_user_settings.exs | raditya3/papercups | 4657b258ee381ac0b7517e57e4d6261ce94b5871 | [
"MIT"
] | 396 | 2020-07-22T19:27:48.000Z | 2022-03-31T05:25:24.000Z | defmodule ChatApi.Repo.Migrations.CreateUserSettings do
use Ecto.Migration
def change do
create table(:user_settings, primary_key: false) do
add(:id, :binary_id, primary_key: true)
add(:email_alert_on_new_message, :boolean, default: false, null: false)
add(:user_id, references(:users, type: :integer), null: false)
timestamps()
end
create(unique_index(:user_settings, [:user_id]))
end
end
| 25.647059 | 77 | 0.704128 |
1ccd783de94c2449aa591c31963cb37dd1f5e341 | 2,054 | ex | Elixir | lib/teslamate_web/live/geofence_live/edit.ex | normalfaults/teslamate | 9c61150bd5614728447e21789ab6edc5169b631e | [
"MIT"
] | 1 | 2020-05-17T05:05:22.000Z | 2020-05-17T05:05:22.000Z | lib/teslamate_web/live/geofence_live/edit.ex | normalfaults/teslamate | 9c61150bd5614728447e21789ab6edc5169b631e | [
"MIT"
] | null | null | null | lib/teslamate_web/live/geofence_live/edit.ex | normalfaults/teslamate | 9c61150bd5614728447e21789ab6edc5169b631e | [
"MIT"
] | null | null | null | defmodule TeslaMateWeb.GeoFenceLive.Edit do
use Phoenix.LiveView
alias TeslaMateWeb.Router.Helpers, as: Routes
alias TeslaMateWeb.GeoFenceLive
alias TeslaMateWeb.GeoFenceView
alias TeslaMate.{Locations, Settings, Convert}
alias TeslaMate.Locations.GeoFence
import TeslaMateWeb.Gettext
def render(assigns), do: GeoFenceView.render("edit.html", assigns)
def mount(%{path_params: %{"id" => id}}, socket) do
%GeoFence{radius: radius} = geofence = Locations.get_geofence!(id)
{unit_of_length, radius} =
case Settings.get_settings!() do
%Settings.Settings{unit_of_length: :km} -> {:m, radius}
%Settings.Settings{unit_of_length: :mi} -> {:ft, Convert.m_to_ft(radius)}
end
assigns = %{
geofence: geofence,
changeset: Locations.change_geofence(geofence, %{radius: round(radius)}),
unit_of_length: unit_of_length,
type: :edit,
show_errors: false
}
{:ok, assign(socket, assigns)}
end
def handle_event("validate", %{"geo_fence" => params}, socket) do
changeset =
socket.assigns.geofence
|> Locations.change_geofence(params)
|> Map.put(:action, :update)
{:noreply, assign(socket, changeset: changeset, show_errors: false)}
end
def handle_event("save", %{"geo_fence" => geofence_params}, socket) do
geofence_params =
Map.update(geofence_params, "radius", nil, fn radius ->
case socket.assigns.unit_of_length do
:ft -> with {radius, _} <- Float.parse(radius), do: Convert.ft_to_m(radius)
:m -> radius
end
end)
case Locations.update_geofence(socket.assigns.geofence, geofence_params) do
{:ok, %GeoFence{name: name}} ->
{:stop,
socket
|> put_flash(:success, gettext("Geo-fence \"%{name}\" updated successfully", name: name))
|> redirect(to: Routes.live_path(socket, GeoFenceLive.Index))}
{:error, %Ecto.Changeset{} = changeset} ->
{:noreply, assign(socket, changeset: changeset, show_errors: true)}
end
end
end
| 31.6 | 98 | 0.659688 |
1ccd986e7d11a9c4047c1ef843b3f061606ecf20 | 7,899 | ex | Elixir | apps/neo_notification/test/support/http_poison_wrapper.ex | decentralisedkev/neo-scan | c8a35a0952e8c46d40365e0ac76bce361ac5e558 | [
"MIT"
] | null | null | null | apps/neo_notification/test/support/http_poison_wrapper.ex | decentralisedkev/neo-scan | c8a35a0952e8c46d40365e0ac76bce361ac5e558 | [
"MIT"
] | null | null | null | apps/neo_notification/test/support/http_poison_wrapper.ex | decentralisedkev/neo-scan | c8a35a0952e8c46d40365e0ac76bce361ac5e558 | [
"MIT"
] | null | null | null | defmodule NeoNotification.HTTPPoisonWrapper do
@moduledoc false
@notification_url Application.fetch_env!(:neo_notification, :notification_url_test)
@tokens_page_1 %{
"current_height" => 2_326_419,
"message" => "Results for tokens",
"page" => 1,
"page_len" => 1,
"results" => [
%{
"block" => 2_120_075,
"tx" => "0xd7d97c3fc600ee22170f2a66a9b5c83a2122e8c02c6517d81c99d3efedf886d3",
"token" => %{
"name" => "Loopring Neo Token",
"symbol" => "LRN",
"decimals" => 8,
"script_hash" => "0x06fa8be9b6609d963e8fc63977b9f8dc5f10895f",
"contract_address" => "AQV236N8gvwsPpNkMeVFK5T8gSTriU1gri"
},
"contract" => %{
"version" => 0,
"hash" => "0x06fa8be9b6609d963e8fc63977b9f8dc5f10895f",
"script" => nil,
"parameters" => [
"String",
"Array"
],
"returntype" => "ByteArray",
"name" => "lrnToken",
"code_version" => "1",
"author" => "Loopring",
"email" => "@",
"description" => "LrnToken",
"properties" => %{
"storage" => true,
"dynamic_invoke" => false
}
},
"key" => "0x065f89105fdcf8b97739c68f3e969d60b6e98bfa06"
}
],
"total" => 2,
"total_pages" => 2
}
@tokens_page_2 %{
"current_height" => 2_326_419,
"message" => "Results for tokens",
"page" => 2,
"page_len" => 1,
"results" => [
%{
"block" => 1_982_259,
"tx" => "0x449b6f8e305ea79bc9c10cdc096cff0a2b5d7ab94fe42b8c85ccb24a500baeeb",
"token" => %{
"name" => "Orbis",
"symbol" => "OBT",
"decimals" => 8,
"script_hash" => "0x0e86a40588f715fcaf7acd1812d50af478e6e917",
"contract_address" => "AHxKPazwxuL1rDBEbodogyf24zzASxwRRz"
},
"contract" => %{
"code" => %{},
"version" => 0,
"hash" => "0x0e86a40588f715fcaf7acd1812d50af478e6e917",
"script" => "0x12",
"parameters" => [
"String",
"Array"
],
"returntype" => "ByteArray",
"name" => "Orbis",
"code_version" => "2.00",
"author" => "The Orbis Team",
"email" => "admin@orbismesh.com",
"description" => "Orbis Token (OBT)",
"properties" => %{
"storage" => true,
"dynamic_invoke" => false
}
},
"key" => "0x0617e9e678f40ad51218cd7aaffc15f78805a4860e"
}
],
"total" => 2,
"total_pages" => 2
}
def get("#{@notification_url}/notifications/block/0?page=1", _, _) do
{
:ok,
%HTTPoison.Response{
body:
Poison.encode!(%{
"current_height" => 2_400_000,
"message" => "",
"page" => 0,
"page_len" => 500,
"results" => [],
"total" => 0,
"total_pages" => 1
}),
headers: [],
status_code: 200
}
}
end
def get("#{@notification_url}/notifications/block/1444843?page=1", _, _) do
{
:ok,
%HTTPoison.Response{
body:
Poison.encode!(%{
"current_height" => 2_337_751,
"message" => "",
"page" => 0,
"page_len" => 500,
"results" => [
%{
"addr_from" => "",
"addr_to" => "ATuT3d1cM4gtg6HezpFrgMppAV3wC5Pjd9",
"amount" => "5065200000000000",
"block" => 1_444_843,
"contract" => "0xecc6b20d3ccac1ee9ef109af5a7cdb85706b1df9",
"notify_type" => "transfer",
"tx" => "0xc920b2192e74eda4ca6140510813aa40fef1767d00c152aa6f8027c24bdf14f2",
"type" => "SmartContract.Runtime.Notify"
},
%{
"addr_from" => "",
"addr_to" => "AHWaJejUjvez5R6SW5kbWrMoLA9vSzTpW9",
"amount" => "9096780000000000",
"block" => 1_444_843,
"contract" => "0xecc6b20d3ccac1ee9ef109af5a7cdb85706b1df9",
"notify_type" => "transfer",
"tx" => "0xc920b2192e74eda4ca6140510813aa40fef1767d00c152aa6f8027c24bdf14f2",
"type" => "SmartContract.Runtime.Notify"
},
%{
"addr_from" => "",
"addr_to" => "AN8cLUwpv7UEWTVxXgGKeuWvwoT2psMygA",
"amount" => "3500000000000000",
"block" => 1_444_843,
"contract" => "0xecc6b20d3ccac1ee9ef109af5a7cdb85706b1df9",
"notify_type" => "transfer",
"tx" => "0xc920b2192e74eda4ca6140510813aa40fef1767d00c152aa6f8027c24bdf14f2",
"type" => "SmartContract.Runtime.Notify"
}
],
"total" => 3,
"total_pages" => 1
}),
headers: [],
status_code: 200
}
}
end
def get("#{@notification_url}/notifications/block/1444902?page=" <> page, _, _) do
page = String.to_integer(page)
{
:ok,
%HTTPoison.Response{
body:
Poison.encode!(%{
"current_height" => 2_326_473,
"message" => "",
"page" => page,
"page_len" => 500,
"results" =>
List.duplicate(
%{
"addr_from" => "",
"addr_to" => "AN8cLUwpv7UEWTVxXgGKeuWvwoT2psMygA",
"amount" => "3500000000000000",
"block" => 1_444_843,
"contract" => "0xecc6b20d3ccac1ee9ef109af5a7cdb85706b1df9",
"notify_type" => "transfer",
"tx" => "0xc920b2192e74eda4ca6140510813aa40fef1767d00c152aa6f8027c24bdf14f2",
"type" => "SmartContract.Runtime.Notify"
},
if(page < 6, do: 500, else: 271)
),
"total" => 2771,
"total_pages" => 6
}),
headers: [],
status_code: 200
}
}
end
def get("#{@notification_url}/notifications/block/1444801?page=1", _, _) do
{
:ok,
%HTTPoison.Response{
body:
Poison.encode!(%{
"current_height" => 2_326_473,
"message" => "",
"page" => 0,
"page_len" => 500,
"results" => [],
"total" => 0,
"total_pages" => 0
}),
headers: [],
status_code: 200
}
}
end
def get("#{@notification_url}/notifications/block/1?page=1", _, _) do
{
:ok,
%HTTPoison.Response{
body:
Poison.encode!(%{
"current_height" => 2_326_467,
"message" => "",
"page" => 0,
"page_len" => 500,
"results" => [],
"total" => 0,
"total_pages" => 0
}),
headers: [],
status_code: 200
}
}
end
def get("#{@notification_url}/notifications/block/123457?page=1", _, _) do
{:error, :timeout}
end
def get("#{@notification_url}/tokens?page=1", _, _) do
{
:ok,
%HTTPoison.Response{
body: Poison.encode!(@tokens_page_1),
headers: [],
status_code: 200
}
}
end
def get("#{@notification_url}/tokens?page=2", _, _) do
{
:ok,
%HTTPoison.Response{
body: Poison.encode!(@tokens_page_2),
headers: [],
status_code: 200
}
}
end
def get("error", _, _), do: {:error, :error}
def get(url, headers, opts) do
IO.inspect({url, headers, opts})
result = HTTPoison.get(url, headers, opts)
IO.inspect(result)
IO.inspect(Poison.decode!(elem(result, 1).body), limit: :infinity)
result
end
end
| 28.934066 | 95 | 0.479048 |
1ccd9ea0e077c2acd23a9b6883ae9b1bd83e7c37 | 1,109 | exs | Elixir | config/config.exs | fstp/computercraft | 16f0a559a5c180e3a10ded81fb7456d54c2b5aa2 | [
"MIT"
] | null | null | null | config/config.exs | fstp/computercraft | 16f0a559a5c180e3a10ded81fb7456d54c2b5aa2 | [
"MIT"
] | null | null | null | config/config.exs | fstp/computercraft | 16f0a559a5c180e3a10ded81fb7456d54c2b5aa2 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :gen, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:gen, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 35.774194 | 73 | 0.749324 |
1ccd9f1c1f9eb8f11a16c6029ece7e4281fb5d15 | 926 | ex | Elixir | chat/test/support/channel_case.ex | ikhlas-firlana/web-chat-service-elixir | bad0426619a7e217890c06e506f11d0999ee6ff3 | [
"Apache-2.0"
] | null | null | null | chat/test/support/channel_case.ex | ikhlas-firlana/web-chat-service-elixir | bad0426619a7e217890c06e506f11d0999ee6ff3 | [
"Apache-2.0"
] | null | null | null | chat/test/support/channel_case.ex | ikhlas-firlana/web-chat-service-elixir | bad0426619a7e217890c06e506f11d0999ee6ff3 | [
"Apache-2.0"
] | null | null | null | defmodule ChatWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint ChatWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Chat.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Chat.Repo, {:shared, self()})
end
:ok
end
end
| 24.368421 | 66 | 0.711663 |
1ccdaf3668feadfda106950d012624e62a4cfcce | 667 | ex | Elixir | lib/extensions/json.ex | houshuang/survey | 948acaf20840af82af1d9af3147acca94cb4fcf8 | [
"Apache-2.0"
] | 48 | 2015-06-29T21:20:25.000Z | 2021-05-09T04:27:41.000Z | lib/extensions/json.ex | houshuang/survey | 948acaf20840af82af1d9af3147acca94cb4fcf8 | [
"Apache-2.0"
] | null | null | null | lib/extensions/json.ex | houshuang/survey | 948acaf20840af82af1d9af3147acca94cb4fcf8 | [
"Apache-2.0"
] | 15 | 2015-06-29T21:13:57.000Z | 2021-07-27T10:02:40.000Z | defmodule Extensions.JSON do
alias Postgrex.TypeInfo
@behaviour Postgrex.Extension
def init(_parameters, opts),
do: Keyword.fetch!(opts, :library)
def matching(_library),
do: [type: "json", type: "jsonb"]
def format(_library),
do: :binary
def encode(%TypeInfo{type: "json"}, map, _state, library),
do: library.encode!(map)
def encode(%TypeInfo{type: "jsonb"}, map, _state, library),
do: <<1, library.encode!(map)::binary>>
def decode(%TypeInfo{type: "json"}, json, _state, library),
do: library.decode!(json)
def decode(%TypeInfo{type: "jsonb"}, <<1, json::binary>>, _state, library),
do: library.decode!(json)
end
| 26.68 | 77 | 0.661169 |
1ccdbba0ae356d743d93ca8468f98b30c1248d31 | 15,270 | ex | Elixir | lib/elixir/lib/supervisor.ex | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | 1 | 2015-02-23T00:01:48.000Z | 2015-02-23T00:01:48.000Z | lib/elixir/lib/supervisor.ex | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/supervisor.ex | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | null | null | null | defmodule Supervisor do
@moduledoc """
A behaviour module for implementing supervision functionality.
A supervisor is a process which supervises other processes called
child processes. Supervisors are used to build an hierarchical process
structure called a supervision tree, a nice way to structure fault-tolerant
applications.
A supervisor implemented using this module will have a standard set
of interface functions and include functionality for tracing and error
reporting. It will also fit into a supervision tree.
## Example
In order to define a supervisor, we need to first define a child process
that is going to be supervised. In order to do so, we will define a GenServer
that represents a stack:
defmodule Stack do
use GenServer
def start_link(state) do
GenServer.start_link(__MODULE__, state, [name: :sup_stack])
end
def handle_call(:pop, _from, [h|t]) do
{:reply, h, t}
end
def handle_cast({:push, h}, t) do
{:noreply, [h|t]}
end
end
We can now define our supervisor and start it as follows:
# Import helpers for defining supervisors
import Supervisor.Spec
# We are going to supervise the Stack server which will
# be started with a single argument [:hello]
children = [
worker(Stack, [[:hello]])
]
# Start the supervisor with our one child
{:ok, pid} = Supervisor.start_link(children, strategy: :one_for_one)
Notice that when starting the GenServer, we have registered it
with name `:sup_stack`, which allows us to call it directly and
get what is on the stack:
GenServer.call(:sup_stack, :pop)
#=> :hello
GenServer.cast(:sup_stack, {:push, :world})
#=> :ok
GenServer.call(:sup_stack, :pop)
#=> :world
However, there is a bug in our stack server. If we call `:pop` and
the stack is empty, it is going to crash because no clause matches.
Let's try it:
GenServer.call(:sup_stack, :pop)
=ERROR REPORT====
Luckily, since the server is being supervised by a supervisor, the
supervisor will automatically start a new one, with the default stack
of `[:hello]` like before:
GenServer.call(:sup_stack, :pop) == :hello
Supervisors support different strategies; in the example above, we
have chosen `:one_for_one`. Furthermore, each supervisor can have many
workers and supervisors as children, each of them with their specific
configuration, shutdown values, and restart strategies.
Continue reading this moduledoc to learn more about supervision strategies
and then follow to the `Supervisor.Spec` module documentation to learn
about the specification for workers and supervisors.
## Module-based supervisors
In the example above, a supervisor was dynamically created by passing
the supervision structure to `start_link/2`. However, supervisors
can also be created by explicitly defining a supervision module:
defmodule MyApp.Supervisor do
use Supervisor
def start_link do
Supervisor.start_link(__MODULE__, [])
end
def init([]) do
children = [
worker(Stack, [[:hello]])
]
supervise(children, strategy: :one_for_one)
end
end
You may want to use a module-based supervisor if:
* You need to do some particular action on supervisor
initialization, like setting up a ETS table.
* You want to perform partial hot-code swapping of the
tree. For example, if you add or remove a children,
the module-based supervision will add and remove the
new children directly, while the dynamic supervision
requires the whole tree to be restarted in order to
perform such swaps.
## Strategies
* `:one_for_one` - if a child process terminates, only that
process is restarted.
* `:one_for_all` - if a child process terminates, all other child
processes are terminated and then all child processes (including
the terminated one) are restarted.
* `:rest_for_one` - if a child process terminates, the "rest" of
the child processes, i.e. the child processes after the terminated
one in start order, are terminated. Then the terminated child
process and the rest of the child processes are restarted.
* `:simple_one_for_one` - similar to `:one_for_one` but suits better
when dynamically attaching children. This strategy requires the
supervisor specification to contain only one child. Many functions
in this module behave slightly differently when this strategy is
used.
## Name Registration
A supervisor is bound to the same name registration rules as a `GenServer`.
Read more about it in the `GenServer` docs.
"""
@doc false
defmacro __using__(_) do
quote location: :keep do
@behaviour :supervisor
import Supervisor.Spec
end
end
@typedoc "Return values of `start_link` functions"
@type on_start :: {:ok, pid} | :ignore |
{:error, {:already_started, pid} | {:shutdown, term} | term}
@typedoc "Return values of `start_child` functions"
@type on_start_child :: {:ok, child} | {:ok, child, info :: term} |
{:error, {:already_started, child} | :already_present | term}
@type child :: pid | :undefined
@typedoc "The Supervisor name"
@type name :: atom | {:global, term} | {:via, module, term}
@typedoc "Options used by the `start*` functions"
@type options :: [name: name,
strategy: Supervisor.Spec.strategy,
max_restarts: non_neg_integer,
max_seconds: non_neg_integer]
@typedoc "The supervisor reference"
@type supervisor :: pid | name | {atom, node}
@doc """
Starts a supervisor with the given children.
A strategy is required to be given as an option. Furthermore,
the `:max_restarts` and `:max_seconds` value can be configured
as described in `Supervisor.Spec.supervise/2` docs.
The options can also be used to register a supervisor name.
the supported values are described under the `Name Registration`
section in the `GenServer` module docs.
If the supervisor and its child processes are successfully created
(i.e. if the start function of all child processes returns `{:ok, child}`,
`{:ok, child, info}`, or `:ignore`) the function returns
`{:ok, pid}`, where `pid` is the pid of the supervisor. If there
already exists a process with the specified name, the function returns
`{:error, {:already_started, pid}}`, where pid is the pid of that
process.
If any of the child process start functions fail or return an error tuple or
an erroneous value, the supervisor will first terminate all already
started child processes with reason `:shutdown` and then terminate
itself and return `{:error, {:shutdown, reason}}`.
Note that the `Supervisor` is linked to the parent process
and will exit not only on crashes but also if the parent process
exits with `:normal` reason.
"""
@spec start_link([tuple], options) :: on_start
def start_link(children, options) when is_list(children) do
spec = Supervisor.Spec.supervise(children, options)
start_link(Supervisor.Default, spec, options)
end
@doc """
Starts a supervisor module with the given `arg`.
To start the supervisor, the `init/1` callback will be invoked
in the given module. The `init/1` callback must return a
supervision specification which can be created with the help
of `Supervisor.Spec` module.
If the `init/1` callback returns `:ignore`, this function returns
`:ignore` as well and the supervisor terminates with reason `:normal`.
If it fails or returns an incorrect value, this function returns
`{:error, term}` where `term` is a term with information about the
error, and the supervisor terminates with reason `term`.
The `:name` option can also be given in order to register a supervisor
name, the supported values are described under the `Name Registration`
section in the `GenServer` module docs.
Other failure conditions are specified in `start_link/2` docs.
"""
@spec start_link(module, term, options) :: on_start
def start_link(module, arg, options \\ []) when is_list(options) do
case Keyword.get(options, :name) do
nil ->
:supervisor.start_link(module, arg)
atom when is_atom(atom) ->
:supervisor.start_link({:local, atom}, module, arg)
other when is_tuple(other) ->
:supervisor.start_link(other, module, arg)
end
end
@doc """
Dynamically adds and starts a child specification to the supervisor.
`child_spec` should be a valid child specification (unless the supervisor
is a `:simple_one_for_one` supervisor, see below). The child process will
be started as defined in the child specification.
In the case of `:simple_one_for_one`, the child specification defined in
the supervisor will be used and instead of a `child_spec`, an arbitrary list
of terms is expected. The child process will then be started by appending
the given list to the existing function arguments in the child specification.
If there already exists a child specification with the specified id,
`child_spec` is discarded and the function returns an error with `:already_started`
or `:already_present` if the corresponding child process is running or not.
If the child process start function returns `{:ok, child}` or `{:ok, child, info}`,
the child specification and pid is added to the supervisor and the function returns
the same value.
If the child process start function returns `:ignore, the child specification is
added to the supervisor, the pid is set to undefined and the function returns
`{:ok, :undefined}`.
If the child process start function returns an error tuple or an erroneous value,
or if it fails, the child specification is discarded and the function returns
`{:error, error}` where `error` is a term containing information about the error
and child specification.
"""
@spec start_child(supervisor, Supervisor.Spec.spec | [term]) :: on_start_child
def start_child(supervisor, child_spec_or_args) do
call(supervisor, {:start_child, child_spec_or_args})
end
@doc """
Terminates the given pid or child id.
If the supervisor is not a `simple_one_for_one`, the child id is expected
and the process, if there is one, is terminated; the child specification is
kept unless the child is temporary.
In case of a `simple_one_for_one` supervisor, a pid is expected. If the child
specification identifier is given instead of a `pid`, the function will
return `{:error, :simple_one_for_one}`.
A non-temporary child process may later be restarted by the supervisor. The child
process can also be restarted explicitly by calling `restart_child/2`. Use
`delete_child/2` to remove the child specification.
If successful, the function returns `:ok`. If there is no child specification or
pid, the function returns `{:error, :not_found}`.
"""
@spec terminate_child(supervisor, pid | Supervisor.Spec.child_id) :: :ok | {:error, error}
when error: :not_found | :simple_one_for_one
def terminate_child(supervisor, pid_or_child_id) do
call(supervisor, {:terminate_child, pid_or_child_id})
end
@doc """
Deletes the child specification identified by `child_id`.
The corresponding child process must not be running, use `terminate_child/2`
to terminate it.
If successful, the function returns `:ok`. This function may error with an
appropriate error tuple if the `child_id` is not found, or if the current
process is running or being restarted.
This operation is not supported by `simple_one_for_one` supervisors.
"""
@spec delete_child(supervisor, Supervisor.Spec.child_id) :: :ok | {:error, error}
when error: :not_found | :simple_one_for_one | :running | :restarting
def delete_child(supervisor, child_id) do
call(supervisor, {:delete_child, child_id})
end
@doc """
Restarts a child process identified by `child_id`.
The child specification must exist and the corresponding child process must not
be running.
Note that for temporary children, the child specification is automatically deleted
when the child terminates, and thus it is not possible to restart such children.
If the child process start function returns `{:ok, child}` or
`{:ok, child, info}`, the pid is added to the supervisor and the function returns
the same value.
If the child process start function returns `:ignore`, the pid remains set to
`:undefined` and the function returns `{:ok, :undefined}`.
This function may error with an appropriate error tuple if the `child_id` is not
found, or if the current process is running or being restarted.
If the child process start function returns an error tuple or an erroneous value,
or if it fails, the function returns `{:error, error}`.
This operation is not supported by `simple_one_for_one` supervisors.
"""
@spec restart_child(supervisor, Supervisor.Spec.child_id) ::
{:ok, child} | {:ok, child, term} | {:error, error}
when error: :not_found | :simple_one_for_one | :running | :restarting | term
def restart_child(supervisor, child_id) do
call(supervisor, {:restart_child, child_id})
end
@doc """
Returns a list with information about all children.
Note that calling this function when supervising a large number of children
under low memory conditions can cause an out of memory exception.
This function returns a list of tuples containing:
* `id` - as defined in the child specification or `:undefined` in the case
of a `simple_one_for_one` supervisor
* `child` - the pid of the corresponding child process, the atom
`:restarting` if the process is about to be restarted, or `:undefined` if
there is no such process
* `type` - `:worker` or `:supervisor` as defined in the child specification
* `modules` – as defined in the child specification
"""
@spec which_children(supervisor) ::
[{Supervisor.Spec.child_id | :undefined,
child | :restarting,
Supervisor.Spec.worker,
Supervisor.Spec.modules}]
def which_children(supervisor) do
call(supervisor, :which_children)
end
@doc """
Returns a map containing count values for the supervisor.
The map contains the following keys:
* `:specs` - the total count of children, dead or alive
* `:active` - the count of all actively running child processes managed by
this supervisor
* `:supervisors` - the count of all supervisors whether or not the child
process is still alive
* `:workers` - the count of all workers, whether or not the child process
is still alive
"""
@spec count_children(supervisor) ::
%{specs: non_neg_integer, active: non_neg_integer,
supervisors: non_neg_integer, workers: non_neg_integer}
def count_children(supervisor) do
call(supervisor, :count_children) |> :maps.from_list
end
@compile {:inline, call: 2}
defp call(supervisor, req) do
GenServer.call(supervisor, req, :infinity)
end
end
| 37.610837 | 92 | 0.710413 |
1ccdc13bb918e1655b7b853d29b262418037f597 | 3,163 | ex | Elixir | clients/storage/lib/google_api/storage/v1/model/channel.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/storage/lib/google_api/storage/v1/model/channel.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/storage/lib/google_api/storage/v1/model/channel.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Storage.V1.Model.Channel do
@moduledoc """
An notification channel used to watch for resource changes.
## Attributes
* `address` (*type:* `String.t`, *default:* `nil`) - The address where notifications are delivered for this channel.
* `expiration` (*type:* `String.t`, *default:* `nil`) - Date and time of notification channel expiration, expressed as a Unix timestamp, in milliseconds. Optional.
* `id` (*type:* `String.t`, *default:* `nil`) - A UUID or similar unique string that identifies this channel.
* `kind` (*type:* `String.t`, *default:* `api#channel`) - Identifies this as a notification channel used to watch for changes to a resource, which is "api#channel".
* `params` (*type:* `map()`, *default:* `nil`) - Additional parameters controlling delivery channel behavior. Optional.
* `payload` (*type:* `boolean()`, *default:* `nil`) - A Boolean value to indicate whether payload is wanted. Optional.
* `resourceId` (*type:* `String.t`, *default:* `nil`) - An opaque ID that identifies the resource being watched on this channel. Stable across different API versions.
* `resourceUri` (*type:* `String.t`, *default:* `nil`) - A version-specific identifier for the watched resource.
* `token` (*type:* `String.t`, *default:* `nil`) - An arbitrary string delivered to the target address with each notification delivered over this channel. Optional.
* `type` (*type:* `String.t`, *default:* `nil`) - The type of delivery mechanism used for this channel.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:address => String.t(),
:expiration => String.t(),
:id => String.t(),
:kind => String.t(),
:params => map(),
:payload => boolean(),
:resourceId => String.t(),
:resourceUri => String.t(),
:token => String.t(),
:type => String.t()
}
field(:address)
field(:expiration)
field(:id)
field(:kind)
field(:params, type: :map)
field(:payload)
field(:resourceId)
field(:resourceUri)
field(:token)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.Storage.V1.Model.Channel do
def decode(value, options) do
GoogleApi.Storage.V1.Model.Channel.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Storage.V1.Model.Channel do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.743243 | 170 | 0.680367 |
1ccddbf4ba88952a93943c03f034fc885b48b63b | 236 | ex | Elixir | test.elixir/lib/backwater_elixir_tests.ex | g-andrade/backwater | e9744d2057e56fe1c183b2fd680ffe703eec4090 | [
"MIT"
] | 53 | 2017-07-12T13:18:20.000Z | 2022-01-04T15:44:50.000Z | test.elixir/lib/backwater_elixir_tests.ex | g-andrade/backwater | e9744d2057e56fe1c183b2fd680ffe703eec4090 | [
"MIT"
] | 4 | 2019-06-25T23:21:56.000Z | 2021-05-13T19:17:24.000Z | test.elixir/lib/backwater_elixir_tests.ex | g-andrade/backwater | e9744d2057e56fe1c183b2fd680ffe703eec4090 | [
"MIT"
] | 2 | 2018-11-20T15:18:31.000Z | 2019-06-25T07:37:41.000Z | defmodule BackwaterElixirTests do
@moduledoc """
Documentation for BackwaterElixirTests.
"""
@doc """
Hello world.
## Examples
iex> BackwaterElixirTests.hello
:world
"""
def hello do
:world
end
end
| 12.421053 | 41 | 0.639831 |
1ccdefb36e67306f0feb9db96803a371069ee9c6 | 1,155 | exs | Elixir | mix.exs | zacky1972/nx_math | c8995ece124287ddc1af8c88eefdd21b79d2e9c4 | [
"Apache-2.0"
] | null | null | null | mix.exs | zacky1972/nx_math | c8995ece124287ddc1af8c88eefdd21b79d2e9c4 | [
"Apache-2.0"
] | null | null | null | mix.exs | zacky1972/nx_math | c8995ece124287ddc1af8c88eefdd21b79d2e9c4 | [
"Apache-2.0"
] | null | null | null | defmodule NxMath.MixProject do
use Mix.Project
@source_url "https://github.com/zacky1972/nx_math"
@version "0.1.0-dev"
def project do
[
app: :nx_math,
version: @version,
elixir: "~> 1.13",
start_permanent: Mix.env() == :prod,
deps: deps(),
docs: docs()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
{:ex_doc, "~> 0.26", only: :dev, runtime: false},
{:dialyxir, "~> 1.1", only: :dev, runtime: false},
{:git_hooks, "~> 0.6.4", only: :dev, runtime: false},
{:benchee, "~> 1.0", only: :dev},
{:exla, "~> 0.1.0-dev", github: "elixir-nx/nx", sparse: "exla"},
{:nx, "~> 0.1.0-dev", github: "elixir-nx/nx", sparse: "nx", override: true}
]
end
defp docs do
[
main: "NxMath",
source_ref: "v#{@version}",
source_url: @source_url
]
end
end
| 24.574468 | 87 | 0.553247 |
1cce166e4570bc2e728b9fdcdac27b55ebee9dbc | 132 | exs | Elixir | test/edgar_test.exs | david-christensen/edgar | 32fdcf5b60b5a9be2f9604ea6d57e4cc1ab27e93 | [
"MIT"
] | null | null | null | test/edgar_test.exs | david-christensen/edgar | 32fdcf5b60b5a9be2f9604ea6d57e4cc1ab27e93 | [
"MIT"
] | null | null | null | test/edgar_test.exs | david-christensen/edgar | 32fdcf5b60b5a9be2f9604ea6d57e4cc1ab27e93 | [
"MIT"
] | null | null | null | defmodule EdgarTest do
use ExUnit.Case
doctest Edgar
test "greets the world" do
assert Edgar.hello() == :world
end
end
| 14.666667 | 34 | 0.69697 |
1cce217dada11163c3441a720342f9404a4c26bd | 2,223 | exs | Elixir | test/credo/code/charlists_test.exs | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | null | null | null | test/credo/code/charlists_test.exs | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | null | null | null | test/credo/code/charlists_test.exs | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | null | null | null | defmodule Credo.Code.CharlistsTest do
use Credo.TestHelper
alias Credo.Code.Charlists
test "it should return the source without string literals 2" do
source = """
x = "this 'should not be' removed!"
y = 'also: # TODO: no comment here'
?' # TODO: this is the third
# '
\"\"\"
y = 'also: # TODO: no comment here'
\"\"\"
'also: # TODO: no comment here as well'
"""
expected = """
x = "this 'should not be' removed!"
y = ' '
?' # TODO: this is the third
# '
\"\"\"
y = 'also: # TODO: no comment here'
\"\"\"
' '
"""
assert expected == source |> Charlists.replace_with_spaces()
end
test "it should not modify commented out code" do
source = """
defmodule Foo do
defmodule Bar do
# @doc \"\"\"
# Reassign a student to a discussion group.
# This will un-assign student from the current discussion group
# \"\"\"
# def assign_group(leader = %User{}, student = %User{}) do
# cond do
# leader.role == :student ->
# {:error, :invalid}
#
# student.role != :student ->
# {:error, :invalid}
#
# true ->
# Repo.transaction(fn ->
# {:ok, _} = unassign_group(student)
#
# %Group{}
# |> Group.changeset(%{})
# |> put_assoc(:leader, leader)
# |> put_assoc(:student, student)
# |> Repo.insert!()
# end)
# end
# end
def baz, do: 123
end
end
"""
expected = source
assert expected == source |> Charlists.replace_with_spaces(".")
end
@tag slow: :disk_io
test "it should produce valid code /2" do
example_code = File.read!("test/fixtures/example_code/nested_escaped_heredocs.ex")
result = Charlists.replace_with_spaces(example_code)
result2 = Charlists.replace_with_spaces(result)
assert result == result2, "Charlists.replace_with_spaces/2 should be idempotent"
assert match?({:ok, _}, Code.string_to_quoted(result))
end
end
| 26.464286 | 86 | 0.518668 |
1cce2691cb4ea82d92317bb6cb2088ecf2677082 | 550 | ex | Elixir | lib/phx_custom/helper/file.ex | c4710n/phx_custom | 19080ed8896be9ae846d12b2f631a1bc4aef5c78 | [
"MIT"
] | null | null | null | lib/phx_custom/helper/file.ex | c4710n/phx_custom | 19080ed8896be9ae846d12b2f631a1bc4aef5c78 | [
"MIT"
] | null | null | null | lib/phx_custom/helper/file.ex | c4710n/phx_custom | 19080ed8896be9ae846d12b2f631a1bc4aef5c78 | [
"MIT"
] | null | null | null | defmodule PhxCustom.Helper.File do
def detect_type(path) do
stat = File.lstat(path)
cond do
Path.extname(path) === ".eex" ->
:eex
{:ok, %File.Stat{type: type}} = stat ->
type
true ->
:error
end
end
def is_file(abs_path) do
case File.lstat(abs_path) do
{:ok, %File.Stat{type: :regular}} -> true
_ -> false
end
end
def is_directory(abs_path) do
case File.lstat(abs_path) do
{:ok, %File.Stat{type: :directory}} -> true
_ -> false
end
end
end
| 17.741935 | 49 | 0.554545 |
1cce4cbd4bb9216bdc8d80a37c6dc23fbdee157d | 61 | ex | Elixir | lib/compass_web/views/page_view.ex | theclimbersclub/compass | e9c48c86e6844dd751053af5c3068ce898fd0f2a | [
"MIT"
] | 2 | 2019-10-12T13:12:15.000Z | 2021-03-07T10:21:53.000Z | lib/compass_web/views/page_view.ex | theclimbersclub/compass | e9c48c86e6844dd751053af5c3068ce898fd0f2a | [
"MIT"
] | 14 | 2019-10-14T23:33:15.000Z | 2020-05-30T10:04:16.000Z | lib/compass_web/views/page_view.ex | theclimbersclub/compass | e9c48c86e6844dd751053af5c3068ce898fd0f2a | [
"MIT"
] | 1 | 2020-12-12T12:31:01.000Z | 2020-12-12T12:31:01.000Z | defmodule CompassWeb.PageView do
use CompassWeb, :view
end
| 15.25 | 32 | 0.803279 |
1cce4d05a659cc5110765ef2808f6fbe79eab496 | 1,050 | ex | Elixir | lib/prelude/debugger.ex | divex/prelude | cd6d4517d27d0954fa184b2938cef1a823afae32 | [
"MIT"
] | null | null | null | lib/prelude/debugger.ex | divex/prelude | cd6d4517d27d0954fa184b2938cef1a823afae32 | [
"MIT"
] | null | null | null | lib/prelude/debugger.ex | divex/prelude | cd6d4517d27d0954fa184b2938cef1a823afae32 | [
"MIT"
] | null | null | null | defmodule Prelude.Debugger do
@ignore [__info__: 1, __etude__: 0, module_info: 0, module_info: 1]
def print(thing, ignore \\ @ignore)
def print(beam, ignore) when is_binary(beam) do
{:beam_file, _, _, _, _, code} = :beam_disasm.file(beam)
print(code, ignore)
beam
end
def print({:beam_file, _, _, _, _, code} = f, ignore) do
print(code, ignore)
f
end
def print(%{code: code} = m, ignore) do
print(code, ignore)
m
end
def print(code, ignore) do
Enum.each(code, fn
({:function, name, arity, _, code} = f) ->
if {name, arity} in ignore do
nil
else
debug(f)
Enum.each(code, &debug/1)
end
end)
IO.puts ""
code
end
def debug(instr) do
:io.format('> ')
case instr do
{:function, name, arity, entry, _code} ->
{:function, name, arity, entry}
{:label, _} ->
:io.format(' ')
instr
_ ->
:io.format(' ')
instr
end
|> IO.inspect(width: :infinity)
end
end
| 22.340426 | 69 | 0.54 |
1cce5a521871fe0c1128b70f287af395b8a8784b | 589 | ex | Elixir | lib/lily/core/monad.ex | dwhelan/ok_computer | 538abcc5b325a0d41dbf642c22bbbbe86c95a164 | [
"MIT"
] | null | null | null | lib/lily/core/monad.ex | dwhelan/ok_computer | 538abcc5b325a0d41dbf642c22bbbbe86c95a164 | [
"MIT"
] | null | null | null | lib/lily/core/monad.ex | dwhelan/ok_computer | 538abcc5b325a0d41dbf642c22bbbbe86c95a164 | [
"MIT"
] | null | null | null | defmodule Lily.Monad do
@moduledoc """
Monadic pipes.
"""
@type t :: any
@doc "return"
@callback return(any) :: t
@doc "bind"
@callback bind(t, f :: (any -> t)) :: t
defmacro monad(do: block) do
quote do
alias OkComputer.{Functor, Applicative}
alias Lily.{Monad}
import Monad
@behaviour Monad
@behaviour Functor
@behaviour Applicative
@impl Functor
def fmap(a, f), do: bind(a, &(f.(&1) |> return()))
@impl Applicative
def apply(a, f), do: bind(f, &fmap(a, &1))
unquote(block)
end
end
end
| 17.848485 | 56 | 0.565365 |
1cce5dbab40aa3829e73f853f0e2f78ef8d17b66 | 253 | ex | Elixir | apps/cronitex/lib/cronitex.ex | alayers2/cronitex | cf8da9553b8e93b9171f5e9a0f0a08a24a324689 | [
"MIT"
] | 1 | 2020-11-05T15:38:53.000Z | 2020-11-05T15:38:53.000Z | apps/cronitex/lib/cronitex.ex | alayers2/cronitex | cf8da9553b8e93b9171f5e9a0f0a08a24a324689 | [
"MIT"
] | 36 | 2020-10-24T01:28:42.000Z | 2022-02-07T11:11:37.000Z | apps/cronitex/lib/cronitex.ex | alayers2/cronitex | cf8da9553b8e93b9171f5e9a0f0a08a24a324689 | [
"MIT"
] | null | null | null | defmodule Cronitex do
@moduledoc """
Cronitex keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 25.3 | 66 | 0.754941 |
1cce813fce73a8dbf4146802e0c28e64530bef44 | 1,549 | ex | Elixir | lib/discuss_web/views/error_helpers.ex | frunox/discuss | d19367eceb8c68ebef4e565981ceef256b5cb04e | [
"MIT"
] | null | null | null | lib/discuss_web/views/error_helpers.ex | frunox/discuss | d19367eceb8c68ebef4e565981ceef256b5cb04e | [
"MIT"
] | null | null | null | lib/discuss_web/views/error_helpers.ex | frunox/discuss | d19367eceb8c68ebef4e565981ceef256b5cb04e | [
"MIT"
] | null | null | null | defmodule DiscussWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_name(form, field)
)
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(DiscussWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(DiscussWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.270833 | 76 | 0.665591 |
1cceab20506c7f2931ec9e2c058111db3149850e | 1,701 | ex | Elixir | clients/spanner/lib/google_api/spanner/v1/model/set_iam_policy_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/spanner/lib/google_api/spanner/v1/model/set_iam_policy_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/spanner/lib/google_api/spanner/v1/model/set_iam_policy_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Spanner.V1.Model.SetIamPolicyRequest do
@moduledoc """
Request message for `SetIamPolicy` method.
## Attributes
* `policy` (*type:* `GoogleApi.Spanner.V1.Model.Policy.t`, *default:* `nil`) - REQUIRED: The complete policy to be applied to the `resource`. The size of the policy is limited to a few 10s of KB. An empty policy is a valid policy but certain Cloud Platform services (such as Projects) might reject them.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:policy => GoogleApi.Spanner.V1.Model.Policy.t() | nil
}
field(:policy, as: GoogleApi.Spanner.V1.Model.Policy)
end
defimpl Poison.Decoder, for: GoogleApi.Spanner.V1.Model.SetIamPolicyRequest do
def decode(value, options) do
GoogleApi.Spanner.V1.Model.SetIamPolicyRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Spanner.V1.Model.SetIamPolicyRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.191489 | 307 | 0.745444 |
1ccead5a60164ed6487c630d43c9d0ef0e6f677b | 2,103 | ex | Elixir | lib/andy/profiles/rover/gm_defs/clearance.ex | jfcloutier/andy | 74b93f734d6f6353356041a603a96ad5aed4b5dc | [
"MIT"
] | 7 | 2019-05-29T22:55:25.000Z | 2021-08-22T18:38:29.000Z | lib/andy/profiles/rover/gm_defs/clearance.ex | jfcloutier/andy | 74b93f734d6f6353356041a603a96ad5aed4b5dc | [
"MIT"
] | null | null | null | lib/andy/profiles/rover/gm_defs/clearance.ex | jfcloutier/andy | 74b93f734d6f6353356041a603a96ad5aed4b5dc | [
"MIT"
] | 1 | 2020-01-25T20:46:43.000Z | 2020-01-25T20:46:43.000Z | defmodule Andy.Profiles.Rover.GMDefs.Clearance do
@moduledoc "The GM definition for :clearance"
alias Andy.GM.{GenerativeModelDef, Conjecture}
import Andy.GM.Utils
def gm_def() do
%GenerativeModelDef{
name: :clearance,
conjectures: [
conjecture(:clear_of_obstacle),
conjecture(:clear_of_other)
],
contradictions: [],
priors: %{
clear_of_obstacle: %{about: :self, values: %{is: true}},
clear_of_other: %{about: :self, values: %{is: true}}
},
intentions: %{}
}
end
# Conjectures
# opinion
defp conjecture(:clear_of_obstacle) do
%Conjecture{
name: :clear_of_obstacle,
activator: opinion_activator(),
predictors: [
no_change_predictor(:obstacle_not_hit, default: %{is: true}),
no_change_predictor(:obstacle_avoided, default: %{is: true})
],
valuator: clear_of_obstacle_belief_valuator(),
intention_domain: []
}
end
# opinion
defp conjecture(:clear_of_other) do
%Conjecture{
name: :clear_of_other,
activator: opinion_activator(:other),
predictors: [
no_change_predictor(:on_collision_course, default: %{is: false})
],
valuator: clear_of_other_belief_valuator(),
intention_domain: []
}
end
# Conjecture belief valuators
defp clear_of_obstacle_belief_valuator() do
fn conjecture_activation, [round | _previous_rounds] ->
about = conjecture_activation.about
obstacle_not_hit? =
current_perceived_value(round, about, :obstacle_not_hit, :is, default: true)
obstacle_avoided? =
current_perceived_value(round, about, :obstacle_avoided, :is, default: true)
%{is: obstacle_not_hit? and obstacle_avoided?}
end
end
defp clear_of_other_belief_valuator() do
fn conjecture_activation, [round | _previous_rounds] ->
about = conjecture_activation.about
on_collision_course? =
current_perceived_value(round, about, :on_collision_course, :is, default: false)
%{is: not on_collision_course?}
end
end
end
| 26.620253 | 88 | 0.666191 |
1ccec12591558f7d92bf1270d4328b8912bc6bb3 | 3,902 | ex | Elixir | lib/quaff.ex | aruki-delivery/quaff | 3538bec660642398537488377d57f5212b44414a | [
"Apache-2.0"
] | null | null | null | lib/quaff.ex | aruki-delivery/quaff | 3538bec660642398537488377d57f5212b44414a | [
"Apache-2.0"
] | null | null | null | lib/quaff.ex | aruki-delivery/quaff | 3538bec660642398537488377d57f5212b44414a | [
"Apache-2.0"
] | 1 | 2019-12-03T15:46:32.000Z | 2019-12-03T15:46:32.000Z | # Copyright 2018 Carlos Brito Lage <cbl@aruki.pt>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule Quaff do
@moduledoc false
require Logger
defmodule CompileError do
defexception message: nil
def exception(opts) do
file = opts[:file] || "<unknown file>"
line = opts[:line] || -1
msg = opts[:message] || List.to_string(:io_lib.format(opts[:format], opts[:items]))
msg = msg <> List.to_string(:io_lib.format("~n at ~s line ~p", [file, line]))
%__MODULE__{message: msg}
end
end
defmacro include(header) do
quote do
Quaff.include(unquote(header), [])
end
end
defmacro include(header, options) do
header =
cond do
:ok == Macro.validate(header) ->
{hd, []} = header |> Code.eval_quoted([], __CALLER__)
hd
true ->
header
end
use_constants = options[:constants] || :all
do_export = options[:export] || false
in_module =
options[:module] ||
Macro.expand(
quote do
__MODULE__
end,
__CALLER__
)
rel_dir =
cond do
options[:relative_to] && :ok == Macro.validate(options[:relative_to]) ->
{rel_to, []} = options[:relative_to] |> Code.eval_quoted([], __CALLER__)
rel_to
options[:relative_to] ->
options[:relative_to]
true ->
Macro.expand(
quote do
__DIR__
end,
__CALLER__
)
end
inc_dir =
cond do
options[:include] && :ok == Macro.validate(options[:include]) ->
{inc, []} = options[:include] |> Code.eval_quoted([], __CALLER__)
inc
options[:include] ->
options[:include]
true ->
[
Macro.expand(
quote do
__DIR__
end,
__CALLER__
)
]
end
options = Keyword.put(options, :module, in_module)
options = Keyword.put(options, :relative_to, rel_dir)
options = Keyword.put(options, :include, inc_dir)
const =
Enum.map(Quaff.Constants.get_constants(header, options), fn {c, v} ->
{Quaff.Constants.normalize_const(c), v}
end)
const =
case use_constants do
:all ->
const
_ ->
Enum.map(List.wrap(use_constants), fn c ->
c = Quaff.Constants.normalize_const(c)
{c, Keyword.fetch!(const, c)}
end)
end
attrs =
Enum.map(const, fn {c, val} ->
quote do
Module.put_attribute(__MODULE__, unquote(c), unquote(Macro.escape(val)))
end
end)
funs =
case do_export do
true ->
Enum.map(const, fn {c, _} ->
{:ok, ident} = Code.string_to_quoted(Atom.to_string(c))
quote do
def unquote(ident) do
@unquote ident
end
end
end)
_ ->
[]
end
attrs ++ funs
end
defmacro include_lib(header) do
quote do
Quaff.include_lib(unquote(header), [])
end
end
defmacro include_lib(header, options) do
opts =
options
|> Macro.expand_once(__CALLER__)
|> Keyword.put(:include_lib, true)
quote do
Quaff.include(unquote(header), unquote(opts))
end
end
end
| 23.792683 | 89 | 0.557919 |
1ccecc3dc8796c29b14d3e48ea64501f92fec93d | 1,444 | exs | Elixir | test/yify_subtitle/adapters/api_test.exs | kdisneur/yify_subtitle | 2f5daf6874e1ee7fddf776c036c36bb8c617fc96 | [
"MIT"
] | null | null | null | test/yify_subtitle/adapters/api_test.exs | kdisneur/yify_subtitle | 2f5daf6874e1ee7fddf776c036c36bb8c617fc96 | [
"MIT"
] | null | null | null | test/yify_subtitle/adapters/api_test.exs | kdisneur/yify_subtitle | 2f5daf6874e1ee7fddf776c036c36bb8c617fc96 | [
"MIT"
] | null | null | null | defmodule YifySubtitle.Adapters.APITest do
use ExUnit.Case, async: false
import Mock
test "search returns list of all available subtitles" do
with_mock HTTPoison, [
get!: fn("http://api.yifysubtitles.com/subs/tt0133093") ->
%HTTPoison.Response{status_code: 200, body: File.read!("test/yify_subtitle/adapters/fixtures/tt0133093.json")}
end
] do
expected = [
english: ["http://api.yifysubtitles.com/subtitle-api/the-matrix-yify-742.zip", "http://api.yifysubtitles.com/subtitle-api/the-matrix-yify-39316.zip"],
french: ["http://api.yifysubtitles.com/subtitle-api/the-matrix-yify-24107.zip"],
spanish: ["http://api.yifysubtitles.com/subtitle-api/the-matrix-yify-4242.zip"]
]
assert expected == YifySubtitle.Adapters.API.search("tt0133093", [])
end
end
test "search returns only list of available subtitles" do
with_mock HTTPoison, [
get!: fn("http://api.yifysubtitles.com/subs/tt0133093") ->
%HTTPoison.Response{status_code: 200, body: File.read!("test/yify_subtitle/adapters/fixtures/tt0133093.json")}
end
] do
expected = [
english: ["http://api.yifysubtitles.com/subtitle-api/the-matrix-yify-742.zip", "http://api.yifysubtitles.com/subtitle-api/the-matrix-yify-39316.zip"]
]
assert expected == YifySubtitle.Adapters.API.search("tt0133093", [:english, :dutch])
end
end
end
| 40.111111 | 158 | 0.671053 |
1cced78f37f1e39812fb4504ce5e210d2de56944 | 758 | exs | Elixir | src/apps/utils/mix.exs | fortelabsinc/PlayerExchange | 442629df9716b45dd31f2bb1127309f4f9d2b0ee | [
"MIT"
] | null | null | null | src/apps/utils/mix.exs | fortelabsinc/PlayerExchange | 442629df9716b45dd31f2bb1127309f4f9d2b0ee | [
"MIT"
] | 42 | 2020-04-28T20:39:12.000Z | 2022-02-27T03:54:44.000Z | src/apps/utils/mix.exs | fortelabsinc/PlayerExchange | 442629df9716b45dd31f2bb1127309f4f9d2b0ee | [
"MIT"
] | null | null | null | defmodule Utils.MixProject do
use Mix.Project
def project do
{:ok, vsn} = File.read("../../vsn.txt")
[
app: :utils,
version: vsn,
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:jason, "~> 1.2"},
{:uuid, "~> 1.1"},
{:argon2_elixir, "~> 2.3"},
{:comeonin, "~> 5.3.1", override: true}
]
end
end
| 20.486486 | 59 | 0.521108 |
1cceef308605114ef51a154f2f7cf4e7a94d3905 | 3,085 | exs | Elixir | test/ex_aws/config_test.exs | andrewhr/ex_aws | 47fcd13b2767aecddd2471388263539f3d9a6851 | [
"MIT"
] | null | null | null | test/ex_aws/config_test.exs | andrewhr/ex_aws | 47fcd13b2767aecddd2471388263539f3d9a6851 | [
"MIT"
] | 5 | 2022-01-12T19:25:24.000Z | 2022-03-23T21:23:42.000Z | test/ex_aws/config_test.exs | andrewhr/ex_aws | 47fcd13b2767aecddd2471388263539f3d9a6851 | [
"MIT"
] | 1 | 2022-03-09T16:40:43.000Z | 2022-03-09T16:40:43.000Z | defmodule ExAws.ConfigTest do
use ExUnit.Case, async: true
setup do
Application.delete_env(:ex_aws, :awscli_credentials)
on_exit(fn ->
Application.delete_env(:ex_aws, :awscli_credentials)
end)
end
test "overrides work properly" do
config = ExAws.Config.new(:s3, region: "us-west-2")
assert config.region == "us-west-2"
end
test "{:system} style configs work" do
value = "foo"
System.put_env("ExAwsConfigTest", value)
assert :s3
|> ExAws.Config.new(
access_key_id: {:system, "ExAwsConfigTest"},
secret_access_key: {:system, "AWS_SECURITY_TOKEN"}
)
|> Map.get(:access_key_id) == value
end
test "security_token is configured properly" do
value = "security_token"
System.put_env("AWS_SECURITY_TOKEN", value)
assert :s3
|> ExAws.Config.new(
access_key_id: {:system, "AWS_SECURITY_TOKEN"},
security_token: {:system, "AWS_SECURITY_TOKEN"}
)
|> Map.get(:security_token) == value
end
test "config file is parsed if no given credentials in configuraion" do
profile = "default"
Mox.expect(ExAws.Credentials.InitMock, :security_credentials, 1, fn ^profile ->
%{region: "eu-west-1"}
end)
config = ExAws.Config.awscli_auth_credentials(profile, ExAws.Credentials.InitMock)
assert config.region == "eu-west-1"
end
test "profile config returned if given credentials in configuration" do
profile = "default"
example_credentials = %{
"default" => %{
region: "eu-west-1"
}
}
Application.put_env(:ex_aws, :awscli_credentials, example_credentials)
Mox.expect(ExAws.Credentials.InitMock, :security_credentials, 0, fn ^profile ->
%{region: "eu-west-1"}
end)
config = ExAws.Config.awscli_auth_credentials(profile, ExAws.Credentials.InitMock)
assert config.region == "eu-west-1"
end
test "error on wrong credentials configuration" do
profile = "other"
example_credentials = %{
"default" => %{
region: "eu-west-1"
}
}
Application.put_env(:ex_aws, :awscli_credentials, example_credentials)
Mox.expect(ExAws.Credentials.InitMock, :security_credentials, 0, fn ^profile ->
%{region: "eu-west-1"}
end)
assert_raise RuntimeError, fn ->
ExAws.Config.awscli_auth_credentials(profile, ExAws.Credentials.InitMock)
end
end
test "region as a plain string" do
region_value = "us-west-1"
assert :s3
|> ExAws.Config.new(region: region_value)
|> Map.get(:region) == region_value
end
test "region as an envar" do
region_value = "us-west-1"
System.put_env("AWS_REGION", region_value)
assert :s3
|> ExAws.Config.new(region: {:system, "AWS_REGION"})
|> Map.get(:region) == region_value
end
test "headers are passed as provided" do
headers = [{"If-Match", "ABC"}]
assert :s3
|> ExAws.Config.new(headers: headers)
|> Map.get(:headers) == headers
end
end
| 26.144068 | 86 | 0.636629 |
1ccf15bb47354625ef0df9009b42c10edce7f0ef | 97 | exs | Elixir | test/test_helper.exs | elixir-inspector/ref_inspector_plug | cc87a20edf2406de8e915e46c23908831a4a1fc1 | [
"Apache-2.0"
] | 1 | 2020-01-13T20:37:24.000Z | 2020-01-13T20:37:24.000Z | test/test_helper.exs | elixir-inspector/ref_inspector_plug | cc87a20edf2406de8e915e46c23908831a4a1fc1 | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | elixir-inspector/ref_inspector_plug | cc87a20edf2406de8e915e46c23908831a4a1fc1 | [
"Apache-2.0"
] | null | null | null | :ok = RefInspector.Downloader.download()
:ok = RefInspector.reload(async: false)
ExUnit.start()
| 19.4 | 40 | 0.752577 |
1ccf2679afa585df370f46cf895f19bbc0679427 | 2,201 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/invideo_timing.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/you_tube/lib/google_api/you_tube/v3/model/invideo_timing.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/invideo_timing.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.InvideoTiming do
@moduledoc """
Describes a temporal position of a visual widget inside a video.
## Attributes
* `durationMs` (*type:* `String.t`, *default:* `nil`) - Defines the duration in milliseconds for which the promotion should be displayed. If missing, the client should use the default.
* `offsetMs` (*type:* `String.t`, *default:* `nil`) - Defines the time at which the promotion will appear. Depending on the value of type the value of the offsetMs field will represent a time offset from the start or from the end of the video, expressed in milliseconds.
* `type` (*type:* `String.t`, *default:* `nil`) - Describes a timing type. If the value is offsetFromStart, then the offsetMs field represents an offset from the start of the video. If the value is offsetFromEnd, then the offsetMs field represents an offset from the end of the video.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:durationMs => String.t(),
:offsetMs => String.t(),
:type => String.t()
}
field(:durationMs)
field(:offsetMs)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.InvideoTiming do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.InvideoTiming.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.InvideoTiming do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.528302 | 288 | 0.730577 |
1ccf5806ade22e7a1254728f3e3c8c0e66104837 | 1,450 | exs | Elixir | test/doctor_schedule/appointments/services/day_availability_service_test.exs | theguuholi/doctor_schedule | a92dfa92d1398c59718be2428d36bb326d6bc361 | [
"MIT"
] | 2 | 2022-03-11T12:15:01.000Z | 2022-03-11T13:53:21.000Z | test/doctor_schedule/appointments/services/day_availability_service_test.exs | theguuholi/doctor_schedule | a92dfa92d1398c59718be2428d36bb326d6bc361 | [
"MIT"
] | 3 | 2020-12-12T22:10:17.000Z | 2021-04-05T12:53:12.000Z | test/doctor_schedule/appointments/services/day_availability_service_test.exs | theguuholi/doctor_schedule | a92dfa92d1398c59718be2428d36bb326d6bc361 | [
"MIT"
] | 1 | 2021-02-26T04:24:34.000Z | 2021-02-26T04:24:34.000Z | defmodule DoctorSchedule.Appointments.Services.DayAvailabilityServiceTest do
use DoctorSchedule.DataCase
alias DoctorSchedule.Appointments.Services.DayAvailabilityService
alias DoctorSchedule.Shared.Cache.Ets.Implementations.ScheduleCache
alias DoctorSchedule.UserFixture
import Mock
@result [
%{available: false, hour: 8},
%{available: false, hour: 9},
%{available: false, hour: 10},
%{available: false, hour: 11},
%{available: false, hour: 12},
%{available: false, hour: 13},
%{available: false, hour: 14},
%{available: false, hour: 15},
%{available: false, hour: 16},
%{available: false, hour: 17},
%{available: false, hour: 18},
%{available: false, hour: 19}
]
test "it should see all available hour with and without cache" do
provider = UserFixture.create_provider()
date = Timex.now() |> Timex.shift(days: -1) |> Timex.to_date()
with_mock Redix, command: fn _, _ -> {:ok, nil} end do
response = DayAvailabilityService.execute(provider.id, date)
assert @result == response
end
end
test "it should see all available hour with and with cache" do
provider = UserFixture.create_provider()
date = Date.utc_today()
date = %Date{date | day: date.day - 1}
with_mock ScheduleCache, get: fn _ -> {:ok, @result} end do
response = DayAvailabilityService.execute(provider.id, date)
assert @result == response
end
end
end
| 31.521739 | 76 | 0.677931 |
1ccf6424ae22624e2635240025b2e1741d3b456b | 66,457 | ex | Elixir | lib/ecto/changeset.ex | DavidAlphaFox/ecto | a3eae38d1d05c440893f724c2f04a8332e32d7ac | [
"Apache-2.0"
] | 1 | 2019-05-07T15:05:52.000Z | 2019-05-07T15:05:52.000Z | lib/ecto/changeset.ex | DavidAlphaFox/ecto | a3eae38d1d05c440893f724c2f04a8332e32d7ac | [
"Apache-2.0"
] | null | null | null | lib/ecto/changeset.ex | DavidAlphaFox/ecto | a3eae38d1d05c440893f724c2f04a8332e32d7ac | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Changeset do
@moduledoc ~S"""
Changesets allow filtering, casting, validation and
definition of constraints when manipulating models.
There is an example of working with changesets in the
introductory documentation in the `Ecto` module. The
functions `change/2` and `cast/4` are the usual entry
points for creating changesets, while the remaining
functions are useful for manipulating them.
## Validations and constraints
Ecto changesets provide both validations and constraints
which are ultimately turned into errors in case something
goes wrong.
The difference between them is that validations can be executed
without a need to interact with the database and, therefore, are
always executed before attemping to insert or update the entry
in the database.
However, constraints can only be checked in a safe way when performing
the operation in the database. As a consequence, validations are
always checked before constraints. Constraints won't even be
checked in case validations failed.
Let's see an example:
defmodule User do
use Ecto.Schema
import Ecto.Changeset
schema "users" do
field :name
field :email
field :age, :integer
end
def changeset(user, params \\ :empty) do
user
|> cast(params, ~w(name email), ~w(age))
|> validate_format(:email, ~r/@/)
|> validate_inclusion(:age, 18..100)
|> unique_constraint(:email)
end
end
In the `changeset/2` function above, we define two validations -
one for checking the e-mail format and another to check the age -
as well as a unique constraint in the email field.
Let's suppose the e-mail is given but the age is invalid. The
changeset would have the following errors:
changeset = User.changeset(%User{}, %{age: 0, email: "mary@example.com"})
{:error, changeset} = Repo.insert(changeset)
changeset.errors #=> [age: "is invalid"]
In this case, we haven't checked the unique constraint in the
e-mail field because the data did not validate. Let's fix the
age and assume, however, that the e-mail already exists in the
database:
changeset = User.changeset(%User{}, %{age: 42, email: "mary@example.com"})
{:error, changeset} = Repo.insert(changeset)
changeset.errors #=> [email: "has already been taken"]
Validations and constraints define an explicit boundary when the check
happens. By moving constraints to the database, we also provide a safe,
correct and data-race free means of checking the user input.
## The Ecto.Changeset struct
The fields are:
* `valid?` - Stores if the changeset is valid
* `model` - The changeset root model
* `params` - The parameters as given on changeset creation
* `changes` - The `changes` from parameters that were approved in casting
* `errors` - All errors from validations
* `validations` - All validations performed in the changeset
* `constraints` - All constraints defined in the changeset
* `required` - All required fields as a list of atoms
* `optional` - All optional fields as a list of atoms
* `filters` - Filters (as a map `%{field => value}`) to narrow the scope of update/delete queries
* `action` - The action to be performed with the changeset
* `types` - Cache of the model's field types
* `repo` - The repository applying the changeset (only set after a Repo function is called)
* `opts` - The options given to the repository
## On replace
Using changesets you can work with `has_one` and `has_many` associations
as well as with embedded structs. Sometimes the related data may be
replaced by incoming data. The default behaviour in such cases is to
raise but can be configured when defining the relation according to the
possible values are:
* `:raise` (default) - do not allow removing association or embedded
model via parent changesets,
* `:mark_as_invalid` - if attempting to remove the association or
embedded model via parent changeset - an error will be added to the parent
changeset, and it will be marked as invalid,
* `:nilify` - sets owner reference column to `nil` (available only for
associations),
* `:delete` - removes the association or related model from the database.
This option has to be used carefully. You should consider adding a
separate boolean virtual field to your model that will alow to manually
mark it deletion, as in the example below:
defmodule Comment do
use Ecto.Schema
import Ecto.Changeset
schema "comments" do
field :body, :string
field :delete, :boolean, virtual: true
end
def changeset(model, params) do
cast(model, params, [:body], [:delete])
|> maybe_mark_for_deletion
end
defp maybe_mark_for_deletion(changeset) do
if get_change(changeset, :delete) do
%{changeset | action: :delete}
else
changeset
end
end
end
"""
alias __MODULE__
alias Ecto.Changeset.Relation
# If a new field is added here, def merge must be adapted
defstruct valid?: false, model: nil, params: nil, changes: %{}, repo: nil,
errors: [], validations: [], required: [], optional: [], prepare: [],
constraints: [], filters: %{}, action: nil, types: nil, opts: []
@type t :: %Changeset{valid?: boolean(),
repo: atom | nil,
opts: Keyword.t,
model: Ecto.Schema.t | nil,
params: %{String.t => term} | nil,
changes: %{atom => term},
required: [atom],
optional: [atom],
prepare: [(t -> t)],
errors: [error],
constraints: [constraint],
validations: Keyword.t,
filters: %{atom => term},
action: action,
types: nil | %{atom => Ecto.Type.t}}
@type error :: {atom, error_message}
@type error_message :: String.t | {String.t, Keyword.t}
@type action :: nil | :insert | :update | :delete
@type constraint :: %{type: :unique, constraint: String.t,
field: atom, message: error_message}
@type cast_field :: String.t | atom | {atom, Relation.on_cast}
@number_validators %{
less_than: {&</2, "must be less than %{count}"},
greater_than: {&>/2, "must be greater than %{count}"},
less_than_or_equal_to: {&<=/2, "must be less than or equal to %{count}"},
greater_than_or_equal_to: {&>=/2, "must be greater than or equal to %{count}"},
equal_to: {&==/2, "must be equal to %{count}"},
}
@relations [:embed, :assoc]
@doc """
Wraps the given model in a changeset or adds changes to a changeset.
Changed attributes will only be added if the change does not have the
same value as the attribute in the model.
This function is useful for:
* wrapping a model inside a changeset
* directly changing the model without performing castings nor validations
* directly bulk-adding changes to a changeset
Since no validation nor casting is performed, `change/2` expects the keys in
`changes` to be atoms. `changes` can be a map as well as a keyword list.
When a changeset is passed as the first argument, the changes passed as the
second argument are merged over the changes already in the changeset if they
differ from the values in the model. If `changes` is an empty map, this
function is a no-op.
See `cast/4` if you'd prefer to cast and validate external parameters.
## Examples
iex> changeset = change(%Post{})
%Ecto.Changeset{...}
iex> changeset.valid?
true
iex> changeset.changes
%{}
iex> changeset = change(%Post{author: "bar"}, title: "title")
iex> changeset.changes
%{title: "title"}
iex> changeset = change(%Post{title: "title"}, title: "title")
iex> changeset.changes
%{}
iex> changeset = change(changeset, %{title: "new title", body: "body"})
iex> changeset.changes.title
"new title"
iex> changeset.changes.body
"body"
"""
@spec change(Ecto.Schema.t | t, %{atom => term} | Keyword.t) :: t | no_return
def change(model_or_changeset, changes \\ %{})
def change(%Changeset{types: nil}, _changes) do
raise ArgumentError, "changeset does not have types information"
end
def change(%Changeset{changes: changes, types: types} = changeset, new_changes)
when is_map(new_changes) or is_list(new_changes) do
{changes, errors, valid?} =
get_changed(changeset.model, types, changes, new_changes,
changeset.errors, changeset.valid?)
%{changeset | changes: changes, errors: errors, valid?: valid?}
end
def change(%{__struct__: struct} = model, changes) when is_map(changes) or is_list(changes) do
types = struct.__changeset__
{changes, errors, valid?} =
get_changed(model, types, %{}, changes, [], true)
%Changeset{valid?: valid?, model: model, changes: changes,
errors: errors, types: types}
end
defp get_changed(model, types, old_changes, new_changes, errors, valid?) do
Enum.reduce(new_changes, {old_changes, errors, valid?}, fn
{key, value}, {changes, errors, valid?} ->
put_change(model, changes, errors, valid?, key, value, Map.get(types, key))
end)
end
@doc false
def cast(model, params, required) do
IO.write :stderr, "warning: cast/3 is deprecated, please use cast/4\n" <> Exception.format_stacktrace()
cast(model, params, required, [])
end
@doc """
Converts the given `params` into a changeset for `model`
keeping only the set of `required` and `optional` keys.
This function receives a model and some `params`, and casts the `params`
according to the schema information from `model`. `params` is a map with
string keys or a map with atom keys containing potentially unsafe data.
During casting, all valid parameters will have their key name converted to an
atom and stored as a change in the `:changes` field of the changeset.
All parameters that are not listed in `required` or `optional` are ignored.
If casting of all fields is successful and all required fields
are present either in the model or in the given params, the
changeset is returned as valid.
## Examples
iex> changeset = cast(post, params, ~w(title), ~w())
iex> if changeset.valid? do
...> Repo.update!(changeset)
...> end
Passing a changeset as the first argument:
iex> changeset = cast(post, %{title: "Hello"}, ~w(), ~w(title))
iex> new_changeset = cast(changeset, %{title: "Foo", body: "Bar"}, ~w(title), ~w(body))
iex> new_changeset.params
%{title: "Foo", body: "Bar"}
iex> new_changeset.required
[:title]
iex> new_changeset.optional
[:body]
## Empty parameters
The `params` argument can also be the atom `:empty`. In such cases, the
changeset is automatically marked as invalid, with an empty `:changes` map.
This is useful to run the changeset through all validation steps for
introspection:
iex> changeset = cast(post, :empty, ~w(title), ~w())
iex> changeset = validate_length(post, :title, min: 3)
iex> changeset.validations
[title: [min: 3]]
## Composing casts
`cast/4` also accepts a changeset instead of a model as its first argument.
In such cases, all the effects caused by the call to `cast/4` (additional and
optional fields, errors and changes) are simply added to the ones already
present in the argument changeset. Parameters are merged (**not deep-merged**)
and the ones passed to `cast/4` take precedence over the ones already in the
changeset.
Note that if a field is marked both as *required* as well as *optional* (for
example by being in the `:required` field of the argument changeset and also
in the `optional` list passed to `cast/4`), then it will be marked as required
and not optional. This represents the fact that required fields are
"stronger" than optional fields.
## Relations
You can override the relation's `on_cast` setting by providing a 2 item tuple
in the `required` or `optional` list instead of a simple field name.
The key will be the relation's name and value is either the changeset
function's name or an anonymous function that accepts a model and params. The
new function will be used similarily to the one provided in the `on_cast`
setting.
# Will use Author.custom_changeset/2 as the changeset function
cast(post, %{author: %{name: "Paul"}}, ~w(), [{:author, :custom_changeset})
# Will use my_custom_changeset/2 as the changeset function.
cast(post, %{author: %{name: "Paul"}}, ~w(), [{:author, &my_custom_changeset/2}])
defp my_custom_changeset(model, params) do
cast(model, params, ~w(name))
end
"""
@spec cast(Ecto.Schema.t | t,
%{binary => term} | %{atom => term} | nil,
[cast_field],
[cast_field]) :: t | no_return
def cast(model_or_changeset, params, required, optional)
def cast(_model, %{__struct__: _} = params, _required, _optional) do
raise ArgumentError, "expected params to be a map, got struct `#{inspect params}`"
end
def cast(%Changeset{changes: changes, model: model} = changeset, params, required, optional) do
new_changeset = cast(model, changes, params, required, optional)
cast_merge(changeset, new_changeset)
end
def cast(%{__struct__: _} = model, params, required, optional) do
cast(model, %{}, params, required, optional)
end
defp cast(%{__struct__: module} = model, %{} = changes, :empty, required, optional)
when is_list(required) and is_list(optional) do
types = module.__changeset__
optional = Enum.map(optional, &process_empty_fields(&1, types))
required = Enum.map(required, &process_empty_fields(&1, types))
%Changeset{params: nil, model: model, valid?: false, errors: [],
changes: changes, required: required, optional: optional, types: types}
end
defp cast(%{__struct__: module} = model, %{} = changes, %{} = params, required, optional)
when is_list(required) and is_list(optional) do
params = convert_params(params)
types = module.__changeset__
{optional, {changes, errors, valid?}} =
Enum.map_reduce(optional, {changes, [], true},
&process_param(&1, :optional, params, types, model, &2))
{required, {changes, errors, valid?}} =
Enum.map_reduce(required, {changes, errors, valid?},
&process_param(&1, :required, params, types, model, &2))
%Changeset{params: params, model: model, valid?: valid?,
errors: Enum.reverse(errors), changes: changes, required: required,
optional: optional, types: types}
end
defp process_empty_fields({key, fun}, types) when is_atom(key) do
relation!(types, key, fun)
key
end
defp process_empty_fields(key, _types) when is_binary(key) do
String.to_existing_atom(key)
end
defp process_empty_fields(key, _types) when is_atom(key) do
key
end
defp process_param({key, fun}, kind, params, types, model, acc) do
{key, param_key} = cast_key(key)
type = relation!(types, key, fun)
current = Map.get(model, key)
do_process_param(key, param_key, kind, params, type, current, model, acc)
end
defp process_param(key, kind, params, types, model, acc) do
{key, param_key} = cast_key(key)
type = type!(types, key)
current = Map.get(model, key)
do_process_param(key, param_key, kind, params, type, current, model, acc)
end
defp do_process_param(key, param_key, kind, params, type, current,
model, {changes, errors, valid?}) do
{key,
case cast_field(param_key, type, params, current, model, valid?) do
{:ok, nil, valid?} when kind == :required ->
{errors, valid?} = error_on_nil(kind, key, Map.get(changes, key), errors, valid?)
{changes, errors, valid?}
{:ok, value, valid?} ->
{Map.put(changes, key, value), errors, valid?}
{:missing, current} ->
{errors, valid?} = error_on_nil(kind, key, Map.get(changes, key, current), errors, valid?)
{changes, errors, valid?}
:invalid ->
{changes, [{key, "is invalid"}|errors], false}
end}
end
defp relation!(types, key, fun) do
case Map.fetch(types, key) do
{:ok, {:embed, embed}} ->
{:embed, %Ecto.Embedded{embed | on_cast: fun}}
{:ok, {:assoc, assoc}} ->
{:assoc, %Ecto.Association.Has{assoc | on_cast: fun}}
{:ok, _} ->
raise ArgumentError, "only embedded fields and associations can be " <>
"given a cast function"
:error ->
raise ArgumentError, "unknown field `#{key}` (note only fields, " <>
"embedded models, has_one and has_many associations are supported in cast)"
end
end
defp type!(types, key) do
case Map.fetch(types, key) do
{:ok, {tag, _} = relation} when tag in @relations ->
relation
{:ok, type} ->
type
:error ->
raise ArgumentError, "unknown field `#{key}` (note only fields, " <>
"embedded models, has_one and has_many associations are supported in cast)"
end
end
defp cast_key(key) when is_binary(key),
do: {String.to_existing_atom(key), key}
defp cast_key(key) when is_atom(key),
do: {key, Atom.to_string(key)}
defp cast_field(param_key, {tag, relation}, params, current, model, valid?)
when tag in @relations do
# TODO: Always raise
IO.write :stderr, "warning: casting #{tag}s with cast/4 is deprecated, " <>
"please use cast_#{tag}/3 instead\n" <> Exception.format_stacktrace()
current = Relation.load!(model, current)
case Map.fetch(params, param_key) do
{:ok, value} ->
case Relation.cast(relation, value, current) do
:error -> :invalid
{:ok, _, _, true} -> {:missing, current}
{:ok, ^current, _, false} -> {:missing, current}
{:ok, result, relation_valid?, false} -> {:ok, result, valid? and relation_valid?}
end
:error ->
{:missing, current}
end
end
defp cast_field(param_key, type, params, current, _model, valid?) do
case Map.fetch(params, param_key) do
{:ok, value} ->
case Ecto.Type.cast(type, value) do
{:ok, ^current} -> {:missing, current}
{:ok, value} -> {:ok, value, valid?}
:error -> :invalid
end
:error ->
{:missing, current}
end
end
defp convert_params(params) do
Enum.reduce(params, nil, fn
{key, _value}, nil when is_binary(key) ->
nil
{key, _value}, _ when is_binary(key) ->
raise ArgumentError, "expected params to be a map with atoms or string keys, " <>
"got a map with mixed keys: #{inspect params}"
{key, value}, acc when is_atom(key) ->
Map.put(acc || %{}, Atom.to_string(key), value)
end) || params
end
defp error_on_nil(:required, key, nil, errors, _valid?),
do: {[{key, "can't be blank"}|errors], false}
defp error_on_nil(_kind, _key, _value, errors, valid?),
do: {errors, valid?}
## Casting related
@doc """
Casts the given association.
The parameters for the given association will be retrieved
from `changeset.params` and the changeset function in the
association module will be invoked. The function to be
invoked may also be configured by using the `:with` option.
The changeset must have been previously `cast` using
`cast/4` before this function is invoked.
## Options
* `:with` - the function to build the changeset from params.
Defaults to the changeset/2 function in the association module
"""
def cast_assoc(changeset, name, opts \\ []) when is_atom(name) do
cast_relation(:assoc, changeset, name, opts)
end
@doc """
Casts the given embed.
The parameters for the given embed will be retrieved
from `changeset.params` and the changeset function in the
embed module will be invoked. The function to be
invoked may also be configured by using the `:with` option.
The changeset must have been previously `cast` using
`cast/4` before this function is invoked.
## Options
* `:with` - the function to build the changeset from params.
Defaults to the changeset/2 function in the embed module
"""
def cast_embed(changeset, name, opts \\ []) when is_atom(name) do
cast_relation(:embed, changeset, name, opts)
end
defp cast_relation(type, %Changeset{model: model, types: types}, _name, _opts)
when model == nil or types == nil do
raise ArgumentError, "cast_#{type}/3 expects the changeset to be cast. " <>
"Please call cast/4 before calling cast_#{type}/3"
end
defp cast_relation(type, %Changeset{} = changeset, key, opts) do
{key, param_key} = cast_key(key)
%{model: model, types: types, params: params, changes: changes} = changeset
relation = relation!(:cast, type, key, Map.get(types, key))
params = params || %{}
{changeset, required?} =
if opts[:required] do
{update_in(changeset.required, &[key|&1]), true}
else
{update_in(changeset.optional, &[key|&1]), false}
end
on_cast = opts[:with] || &apply(relation.related, relation.on_cast, [&1, &2])
relation = %{relation | on_cast: on_cast}
current = Relation.load!(model, Map.get(model, key))
case params && Map.fetch(params, param_key) do
{:ok, value} ->
case Relation.cast(relation, value, current) do
{:ok, change, relation_valid?, false} when change != current ->
missing_relation(%{changeset | changes: Map.put(changes, key, change),
valid?: changeset.valid? && relation_valid?}, key, current, required?)
{:ok, _, _, _} ->
missing_relation(changeset, key, current, required?)
:error ->
%{changeset | errors: [{key, "is invalid"} | changeset.errors], valid?: false}
end
_ ->
missing_relation(changeset, key, current, required?)
end
end
defp missing_relation(%{changes: changes, errors: errors} = changeset, name, current, required?) do
if required? and is_nil(Map.get(changes, name, current)) do
%{changeset | errors: [{name, "can't be blank"} | errors], valid?: false}
else
changeset
end
end
defp relation!(_op, type, _name, {type, relation}),
do: relation
defp relation!(op, type, name, nil),
do: raise(ArgumentError, "unknown #{type} `#{name}` in `#{op}_#{type}`")
defp relation!(op, type, name, {other, _}) when other in @relations,
do: raise(ArgumentError, "expected `#{name}` to be an #{type} in `#{op}_#{type}`, got: `#{other}`")
defp relation!(op, type, name, schema_type),
do: raise(ArgumentError, "expected `#{name}` to be an #{type} in `#{op}_#{type}`, got: `#{inspect schema_type}`")
## Working with changesets
@doc """
Merges two changesets.
This function merges two changesets provided they have been applied to the
same model (their `:model` field is equal); if the models differ, an
`ArgumentError` exception is raised. If one of the changesets has a `:repo`
field which is not `nil`, then the value of that field is used as the `:repo`
field of the resulting changeset; if both changesets have a non-`nil` and
different `:repo` field, an `ArgumentError` exception is raised.
The other fields are merged with the following criteria:
* `params` - params are merged (not deep-merged) giving precedence to the
params of `changeset2` in case of a conflict. If both changesets have their
`:params` fields set to `nil`, the resulting changeset will have its params
set to `nil` too.
* `changes` - changes are merged giving precedence to the `changeset2`
changes.
* `errors` and `validations` - they are simply concatenated.
* `required` and `optional` - they are merged; all the fields that appear
in the optional list of either changesets and also in the required list of
the other changeset are moved to the required list of the resulting
changeset.
## Examples
iex> changeset1 = cast(%{title: "Title"}, %Post{}, ~w(title), ~w(body))
iex> changeset2 = cast(%{title: "New title", body: "Body"}, %Post{}, ~w(title body), ~w())
iex> changeset = merge(changeset1, changeset2)
iex> changeset.changes
%{body: "Body", title: "New title"}
iex> changeset.required
[:title, :body]
iex> changeset.optional
[]
iex> changeset1 = cast(%{title: "Title"}, %Post{body: "Body"}, ~w(title), ~w(body))
iex> changeset2 = cast(%{title: "New title"}, %Post{}, ~w(title), ~w())
iex> merge(changeset1, changeset2)
** (ArgumentError) different models when merging changesets
"""
@spec merge(t, t) :: t | no_return
def merge(changeset1, changeset2)
def merge(%Changeset{model: model} = cs1, %Changeset{model: model} = cs2) do
new_opts = cs1.opts ++ cs2.opts
new_repo = merge_identical(cs1.repo, cs2.repo, "repos")
new_action = merge_identical(cs1.action, cs2.action, "actions")
new_filters = Map.merge(cs1.filters, cs2.filters)
new_validations = cs1.validations ++ cs2.validations
new_constraints = cs1.constraints ++ cs2.constraints
cast_merge %{cs1 | repo: new_repo, filters: new_filters,
action: new_action, validations: new_validations,
opts: new_opts, constraints: new_constraints}, cs2
end
def merge(%Changeset{}, %Changeset{}) do
raise ArgumentError, message: "different models when merging changesets"
end
defp cast_merge(cs1, cs2) do
new_params = (cs1.params || cs2.params) && Map.merge(cs1.params || %{}, cs2.params || %{})
new_changes = Map.merge(cs1.changes, cs2.changes)
new_errors = Enum.uniq(cs1.errors ++ cs2.errors)
new_required = Enum.uniq(cs1.required ++ cs2.required)
new_optional = Enum.uniq(cs1.optional ++ cs2.optional) -- new_required
new_types = cs1.types || cs2.types
new_valid? = cs1.valid? and cs2.valid?
%{cs1 | params: new_params, valid?: new_valid?, errors: new_errors, types: new_types,
changes: new_changes, required: new_required, optional: new_optional}
end
defp merge_identical(object, nil, _thing), do: object
defp merge_identical(nil, object, _thing), do: object
defp merge_identical(object, object, _thing), do: object
defp merge_identical(lhs, rhs, thing) do
raise ArgumentError, "different #{thing} (`#{inspect lhs}` and " <>
"`#{inspect rhs}`) when merging changesets"
end
@doc """
Fetches the given field from changes or from the model.
While `fetch_change/2` only looks at the current `changes`
to retrieve a value, this function looks at the changes and
then falls back on the model, finally returning `:error` if
no value is available.
For relations this functions will return the models with changes applied,
as if they were taken from model.
To retrieve raw changesets, please use `fetch_change/2`.
## Examples
iex> post = %Post{title: "Foo", body: "Bar baz bong"}
iex> changeset = change(post, %{title: "New title"})
iex> fetch_field(changeset, :title)
{:changes, "New title"}
iex> fetch_field(changeset, :body)
{:model, "Bar baz bong"}
iex> fetch_field(changeset, :not_a_field)
:error
"""
@spec fetch_field(t, atom) :: {:changes, term} | {:model, term} | :error
def fetch_field(%Changeset{changes: changes, model: model, types: types}, key) do
case Map.fetch(changes, key) do
{:ok, value} ->
{:changes, change_as_field(types, key, value)}
:error ->
case Map.fetch(model, key) do
{:ok, value} -> {:model, value}
:error -> :error
end
end
end
@doc """
Gets a field from changes or from the model.
While `get_change/3` only looks at the current `changes`
to retrieve a value, this function looks at the changes and
then falls back on the model, finally returning `default` if
no value is available.
For relations this functions will return the models with changes applied,
as if they were taken from model.
To retrieve raw changesets, please use `get_change/3`.
iex> post = %Post{title: "A title", body: "My body is a cage"}
iex> changeset = change(post, %{title: "A new title"})
iex> get_field(changeset, :title)
"A new title"
iex> get_field(changeset, :not_a_field, "Told you, not a field!")
"Told you, not a field!"
"""
@spec get_field(t, atom, term) :: term
def get_field(%Changeset{changes: changes, model: model, types: types}, key, default \\ nil) do
case Map.fetch(changes, key) do
{:ok, value} ->
change_as_field(types, key, value)
:error ->
case Map.fetch(model, key) do
{:ok, value} -> value
:error -> default
end
end
end
defp change_as_field(types, key, value) do
case Map.get(types, key) do
{tag, relation} when tag in @relations ->
Relation.apply_changes(relation, value)
_other ->
value
end
end
@doc """
Fetches a change from the given changeset.
This function only looks at the `:changes` field of the given `changeset` and
returns `{:ok, value}` if the change is present or `:error` if it's not.
## Examples
iex> changeset = change(%Post{body: "foo"}, %{title: "bar"})
iex> fetch_change(changeset, :title)
{:ok, "bar"}
iex> fetch_change(changeset, :body)
:error
"""
@spec fetch_change(t, atom) :: {:ok, term} | :error
def fetch_change(%Changeset{changes: changes} = _changeset, key) when is_atom(key) do
Map.fetch(changes, key)
end
@doc """
Gets a change or returns a default value.
## Examples
iex> changeset = change(%Post{body: "foo"}, %{title: "bar"})
iex> get_change(changeset, :title)
"bar"
iex> get_change(changeset, :body)
nil
"""
@spec get_change(t, atom, term) :: term
def get_change(%Changeset{changes: changes} = _changeset, key, default \\ nil) when is_atom(key) do
Map.get(changes, key, default)
end
@doc """
Updates a change.
The given `function` is invoked with the change value only if there
is a change for the given `key`. Note that the value of the change
can still be `nil` (unless the field was marked as required on `cast/4`).
## Examples
iex> changeset = change(%Post{}, %{impressions: 1})
iex> changeset = update_change(changeset, :impressions, &(&1 + 1))
iex> changeset.changes.impressions
2
"""
@spec update_change(t, atom, (term -> term)) :: t
def update_change(%Changeset{changes: changes} = changeset, key, function) when is_atom(key) do
case Map.fetch(changes, key) do
{:ok, value} ->
changes = Map.put(changes, key, function.(value))
%{changeset | changes: changes}
:error ->
changeset
end
end
@doc """
Puts a change on the given `key` with `value`.
If the change is already present, it is overridden with
the new value, also, if the change has the same value as
the model, it is not added to the list of changes.
## Examples
iex> changeset = change(%Post{author: "bar"}, %{title: "foo"})
iex> changeset = put_change(changeset, :title, "bar")
iex> changeset.changes
%{title: "bar"}
iex> changeset = put_change(changeset, :author, "bar")
iex> changeset.changes
%{title: "bar"}
"""
@spec put_change(t, atom, term) :: t | no_return
def put_change(%Changeset{types: nil}, _key, _value) do
raise ArgumentError, "changeset does not have types information"
end
def put_change(%Changeset{types: types} = changeset, key, value) do
type = Map.get(types, key)
{changes, errors, valid?} =
put_change(changeset.model, changeset.changes, changeset.errors,
changeset.valid?, key, value, type)
%{changeset | changes: changes, errors: errors, valid?: valid?}
end
defp put_change(model, changes, errors, valid?, key, value, {tag, relation})
when tag in @relations do
# TODO: Always raise
IO.write :stderr, "warning: changing #{tag}s with change/2 or put_change/3 is deprecated, " <>
"please use put_#{tag}/3 instead\n" <> Exception.format_stacktrace()
current = Relation.load!(model, Map.get(model, key))
case Relation.change(relation, value, current) do
{:ok, _, _, true} ->
{changes, errors, valid?}
{:ok, change, _, false} ->
{Map.put(changes, key, change), errors, valid?}
:error ->
{changes, [{key, "is invalid"} | errors], false}
end
end
defp put_change(model, changes, errors, valid?, key, value, _type) do
cond do
Map.get(model, key) != value ->
{Map.put(changes, key, value), errors, valid?}
Map.has_key?(changes, key) ->
{Map.delete(changes, key), errors, valid?}
true ->
{changes, errors, valid?}
end
end
@doc """
Puts the given association as change in the changeset.
The association may either be the association struct or a
changeset for the given association.
If the association has no changes, it will be skipped.
If the association is invalid, the changeset will be marked
as invalid. If the given value is not an association, it
will raise.
"""
def put_assoc(changeset, name, value, opts \\ []) do
put_relation(:assoc, changeset, name, value, opts)
end
@doc """
Puts the given embed as change in the changeset.
The embed may either be the embed struct or a changeset
for the given embed.
If the embed has no changes, it will be skipped.
If the embed is invalid, the changeset will be marked
as invalid. If the given value is not an embed, it
will raise.
"""
def put_embed(changeset, name, value, opts \\ []) do
put_relation(:embed, changeset, name, value, opts)
end
defp put_relation(_type, %Changeset{types: nil}, _name, _value, _opts) do
raise ArgumentError, "changeset does not have types information"
end
defp put_relation(type, changeset, name, value, _opts) do
%{model: model, types: types, changes: changes} = changeset
relation = relation!(:put, type, name, Map.get(types, name))
current = Relation.load!(model, Map.get(model, name))
case Relation.change(relation, value, current) do
{:ok, _, _, true} ->
changeset
{:ok, change, relation_valid?, false} ->
%{changeset | changes: Map.put(changes, name, change),
valid?: changeset.valid? && relation_valid?}
:error ->
%{changeset | errors: [{name, "is invalid"} | changeset.errors], valid?: false}
end
end
@doc """
Forces a change on the given `key` with `value`.
If the change is already present, it is overridden with
the new value.
## Examples
iex> changeset = change(%Post{author: "bar"}, %{title: "foo"})
iex> changeset = force_change(changeset, :title, "bar")
iex> changeset.changes
%{title: "bar"}
iex> changeset = force_change(changeset, :author, "bar")
iex> changeset.changes
%{title: "bar", author: "bar"}
"""
@spec force_change(t, atom, term) :: t | no_return
def force_change(%Changeset{types: nil}, _key, _value) do
raise ArgumentError, "changeset does not have types information"
end
def force_change(%Changeset{types: types} = changeset, key, value) do
model = changeset.model
value =
case Map.get(types, key) do
{tag, relation} when tag in @relations ->
# TODO: Always raise
IO.write :stderr, "warning: changing #{tag}s with force_change/3 is deprecated, " <>
"please use put_#{tag}/4 instead\n" <> Exception.format_stacktrace()
{:ok, changes, _, _} =
Relation.change(relation, model, value, Map.get(model, key))
changes
_ ->
value
end
update_in changeset.changes, &Map.put(&1, key, value)
end
@doc """
Deletes a change with the given key.
## Examples
iex> changeset = change(%Post{}, %{title: "foo"})
iex> changeset = delete_change(changeset, :title)
iex> get_change(changeset, :title)
nil
"""
@spec delete_change(t, atom) :: t
def delete_change(%Changeset{} = changeset, key) when is_atom(key) do
update_in changeset.changes, &Map.delete(&1, key)
end
@doc """
Applies the changeset changes to the changeset model.
Note this operation is automatically performed on `Ecto.Repo.insert!/2` and
`Ecto.Repo.update!/2`, however this function is provided for
debugging and testing purposes.
## Examples
apply_changes(changeset)
"""
@spec apply_changes(t) :: Ecto.Schema.t
def apply_changes(%Changeset{changes: changes, model: model}) when changes == %{} do
model
end
def apply_changes(%Changeset{changes: changes, model: model, types: types}) do
changes =
Enum.map(changes, fn {key, value} = kv ->
case Map.get(types, key) do
{tag, relation} when tag in @relations ->
{key, Relation.apply_changes(relation, value)}
_ ->
kv
end
end)
struct(model, changes)
end
## Validations
@doc """
Adds an error to the changeset.
## Examples
iex> changeset = change(%Post{}, %{title: ""})
iex> changeset = add_error(changeset, :title, "empty")
iex> changeset.errors
[title: "empty"]
iex> changeset.valid?
false
"""
@spec add_error(t, atom, error_message) :: t
def add_error(%{errors: errors} = changeset, key, message) when is_binary(message) do
%{changeset | errors: [{key, message}|errors], valid?: false}
end
def add_error(%{errors: errors} = changeset, key, {message, opts} = error)
when is_binary(message) and is_list(opts) do
%{changeset | errors: [{key, error}|errors], valid?: false}
end
@doc """
Validates the given `field` change.
It invokes the `validator` function to perform the validation
only if a change for the given `field` exists and the change
value is not `nil`. The function must return a list of errors
(with an empty list meaning no errors).
In case there's at least one error, the list of errors will be appended to the
`:errors` field of the changeset and the `:valid?` flag will be set to
`false`.
## Examples
iex> changeset = change(%Post{}, %{title: "foo"})
iex> changeset = validate_change changeset, :title, fn
...> # Value must not be "foo"!
...> :title, "foo" -> [title: "is foo"]
...> :title, _ -> []
...> end
iex> changeset.errors
[title: "is_foo"]
"""
## 验证数据,validator是个函数
## 接受一个atom的field和一个term做为参数,返回一个空列表
## 或者包含错误的列表
@spec validate_change(t, atom, (atom, term -> [error])) :: t
def validate_change(changeset, field, validator) when is_atom(field) do
%{changes: changes, errors: errors} = changeset
value = Map.get(changes, field)
new = if is_nil(value), do: [], else: validator.(field, value)
case new do
[] -> changeset
[_|_] -> %{changeset | errors: new ++ errors, valid?: false}
end
end
@doc """
Stores the validation `metadata` and validates the given `field` change.
Similar to `validate_change/3` but stores the validation metadata
into the changeset validators. The validator metadata is often used
as a reflection mechanism, to automatically generate code based on
the available validations.
## Examples
iex> changeset = change(%Post{}, %{title: "foo"})
iex> changeset = validate_change changeset, :title, :useless_validator, fn
...> _, _ -> []
...> end
iex> changeset.validations
[title: :useless_validator]
"""
@spec validate_change(t, atom, term, (atom, term -> [error])) :: t
def validate_change(%{validations: validations} = changeset, field, metadata, validator) do
changeset = %{changeset | validations: [{field, metadata}|validations]}
validate_change(changeset, field, validator)
end
@doc """
Validates a change has the given format.
The format has to be expressed as a regular expression.
## Options
* `:message` - the message on failure, defaults to "has invalid format"
## Examples
validate_format(changeset, :email, ~r/@/)
"""
@spec validate_format(t, atom, Regex.t, Keyword.t) :: t
def validate_format(changeset, field, format, opts \\ []) do
validate_change changeset, field, {:format, format}, fn _, value ->
if value =~ format, do: [], else: [{field, message(opts, "has invalid format")}]
end
end
@doc """
Validates a change is included in the given enumerable.
## Options
* `:message` - the message on failure, defaults to "is invalid"
## Examples
validate_inclusion(changeset, :gender, ["man", "woman", "other", "prefer not to say"])
validate_inclusion(changeset, :age, 0..99)
"""
@spec validate_inclusion(t, atom, Enum.t, Keyword.t) :: t
def validate_inclusion(changeset, field, data, opts \\ []) do
validate_change changeset, field, {:inclusion, data}, fn _, value ->
if value in data, do: [], else: [{field, message(opts, "is invalid")}]
end
end
@doc ~S"""
Validates a change, of type enum, is a subset of the given enumerable. Like
validate_inclusion/4 for lists.
## Options
* `:message` - the message on failure, defaults to "has an invalid entry"
## Examples
validate_subset(changeset, :pets, ["cat", "dog", "parrot"])
validate_subset(changeset, :lottery_numbers, 0..99)
"""
@spec validate_subset(t, atom, Enum.t, Keyword.t) :: t
def validate_subset(changeset, field, data, opts \\ []) do
validate_change changeset, field, {:subset, data}, fn _, value ->
case Enum.any?(value, fn(x) -> not x in data end) do
true -> [{field, message(opts, "has an invalid entry")}]
false -> []
end
end
end
@doc """
Validates a change is not included in the given enumerable.
## Options
* `:message` - the message on failure, defaults to "is reserved"
## Examples
validate_exclusion(changeset, :name, ~w(admin superadmin))
"""
@spec validate_exclusion(t, atom, Enum.t, Keyword.t) :: t
def validate_exclusion(changeset, field, data, opts \\ []) do
validate_change changeset, field, {:exclusion, data}, fn _, value ->
if value in data, do: [{field, message(opts, "is reserved")}], else: []
end
end
@doc """
Validates a change is a string or list of the given length.
## Options
* `:is` - the length must be exactly this value
* `:min` - the length must be greater than or equal to this value
* `:max` - the length must be less than or equal to this value
* `:message` - the message on failure, depending on the validation, is one of:
* for strings:
* "should be %{count} character(s)"
* "should be at least %{count} character(s)"
* "should be at most %{count} character(s)"
* for lists:
* "should have %{count} item(s)"
* "should have at least %{count} item(s)"
* "should have at most %{count} item(s)"
## Examples
validate_length(changeset, :title, min: 3)
validate_length(changeset, :title, max: 100)
validate_length(changeset, :title, min: 3, max: 100)
validate_length(changeset, :code, is: 9)
validate_length(changeset, :topics, is: 2)
"""
@spec validate_length(t, atom, Keyword.t) :: t
def validate_length(changeset, field, opts) when is_list(opts) do
validate_change changeset, field, {:length, opts}, fn
_, value ->
{type, length} = case value do
value when is_binary(value) ->
{:string, String.length(value)}
value when is_list(value) ->
{:list, length(value)}
end
error = ((is = opts[:is]) && wrong_length(type, length, is, opts)) ||
((min = opts[:min]) && too_short(type, length, min, opts)) ||
((max = opts[:max]) && too_long(type, length, max, opts))
if error, do: [{field, error}], else: []
end
end
defp wrong_length(_type, value, value, _opts), do: nil
defp wrong_length(:string, _length, value, opts), do:
{message(opts, "should be %{count} character(s)"), count: value}
defp wrong_length(:list, _length, value, opts), do:
{message(opts, "should have %{count} item(s)"), count: value}
defp too_short(_type, length, value, _opts) when length >= value, do: nil
defp too_short(:string, _length, value, opts), do:
{message(opts, "should be at least %{count} character(s)"), count: value}
defp too_short(:list, _length, value, opts), do:
{message(opts, "should have at least %{count} item(s)"), count: value}
defp too_long(_type, length, value, _opts) when length <= value, do: nil
defp too_long(:string, _length, value, opts), do:
{message(opts, "should be at most %{count} character(s)"), count: value}
defp too_long(:list, _length, value, opts), do:
{message(opts, "should have at most %{count} item(s)"), count: value}
@doc """
Validates the properties of a number.
## Options
* `:less_than`
* `:greater_than`
* `:less_than_or_equal_to`
* `:greater_than_or_equal_to`
* `:equal_to`
* `:message` - the message on failure, defaults to one of:
* "must be less than %{count}"
* "must be greater than %{count}"
* "must be less than or equal to %{count}"
* "must be greater than or equal to %{count}"
* "must be equal to %{count}"
## Examples
validate_number(changeset, :count, less_than: 3)
validate_number(changeset, :pi, greater_than: 3, less_than: 4)
validate_number(changeset, :the_answer_to_life_the_universe_and_everything, equal_to: 42)
"""
@spec validate_number(t, atom, Keyword.t) :: t | no_return
def validate_number(changeset, field, opts) do
validate_change changeset, field, {:number, opts}, fn
field, value ->
{message, opts} = Keyword.pop(opts, :message)
Enum.find_value opts, [], fn {spec_key, target_value} ->
case Map.fetch(@number_validators, spec_key) do
{:ok, {spec_function, default_message}} ->
validate_number(field, value, message || default_message,
spec_key, spec_function, target_value)
:error ->
raise ArgumentError, "unknown option #{inspect spec_key} given to validate_number/3"
end
end
end
end
defp validate_number(field, %Decimal{} = value, message, spec_key, _spec_function, target_value) do
result = Decimal.compare(value, target_value)
case decimal_compare(result, spec_key) do
true -> nil
false -> [{field, {message, count: target_value}}]
end
end
defp validate_number(field, value, message, _spec_key, spec_function, target_value) do
case apply(spec_function, [value, target_value]) do
true -> nil
false -> [{field, {message, count: target_value}}]
end
end
defp decimal_compare(result, :less_than) do
Decimal.equal?(result, Decimal.new(-1))
end
defp decimal_compare(result, :greater_than) do
Decimal.equal?(result, Decimal.new(1))
end
defp decimal_compare(result, :equal_to) do
Decimal.equal?(result, Decimal.new(0))
end
defp decimal_compare(result, :less_than_or_equal_to) do
decimal_compare(result, :less_than) or decimal_compare(result, :equal_to)
end
defp decimal_compare(result, :greater_than_or_equal_to) do
decimal_compare(result, :greater_than) or decimal_compare(result, :equal_to)
end
@doc """
Validates that the given field matches the confirmation
parameter of that field.
By calling `validate_confirmation(changeset, :email)`, this
validation will check if both "email" and "email_confirmation"
in the parameter map matches.
Note that this does not add a validation error if the confirmation
field is nil. Note "email_confirmation" does not need to be added
as a virtual field in your schema.
## Options
* `:message` - the message on failure, defaults to "does not match"
## Examples
validate_confirmation(changeset, :email)
validate_confirmation(changeset, :password, message: "does not match password")
cast(model, params, ~w(password), ~w())
|> validate_confirmation(:password, message: "does not match password")
"""
@spec validate_confirmation(t, atom, Keyword.t) :: t
def validate_confirmation(changeset, field, opts \\ []) do
validate_change changeset, field, {:confirmation, opts}, fn _, _ ->
param = Atom.to_string(field)
error_param = "#{param}_confirmation"
error_field = String.to_atom(error_param)
value = Map.get(changeset.params, param)
case Map.fetch(changeset.params, error_param) do
{:ok, ^value} -> []
{:ok, _} -> [{error_field, message(opts, "does not match confirmation")}]
:error -> []
end
end
end
defp message(opts, default) do
Keyword.get(opts, :message, default)
end
## Optimistic lock
@doc ~S"""
Applies optimistic locking to the changeset.
[Optimistic
locking](http://en.wikipedia.org/wiki/Optimistic_concurrency_control) (or
*optimistic concurrency control*) is a technique that allows concurrent edits
on a single record. While pessimistic locking works by locking a resource for
an entire transaction, optimistic locking only checks if the resource changed
before updating it.
This is done by regularly fetching the record from the database, then checking
whether another user has made changes to the record *only when updating the
record*. This behaviour is ideal in situations where the chances of concurrent
updates to the same record are low; if they're not, pessimistic locking or
other concurrency patterns may be more suited.
## Usage
Optimistic locking works by keeping a "version" counter for each record; this
counter gets incremented each time a modification is made to a record. Hence,
in order to use optimistic locking, a field must exist in your schema for
versioning purpose. Such field is usually an integer but other types are
supported.
## Examples
Assuming we have a `Post` schema (stored in the `posts` table), the first step
is to add a version column to the `posts` table:
alter table(:posts) do
add :lock_version, :integer, default: 1
end
The column name is arbitrary and doesn't need to be `:lock_version`. Now add
a field to the schema too:
defmodule Post do
use Ecto.Schema
schema "posts" do
field :title, :string
field :lock_version, :integer, default: 1
end
def changeset(:update, struct, params \\ :empty) do
struct
|> Ecto.Changeset.cast(struct, params, ~w(:title))
|> Ecto.Changeset.optimistic_lock(:lock_version)
end
end
Now let's take optimistic locking for a spin:
iex> post = Repo.insert!(%Post{title: "foo"})
%Post{id: 1, title: "foo", lock_version: 1}
iex> valid_change = Post.changeset(:update, post, %{title: "bar"})
iex> stable_change = Post.changeset(:update, post, %{title: "baz"})
iex> Repo.update!(valid_change)
%Post{id: 1, title: "bar", lock_version: 2}
iex> Repo.update!(stale_change)
** (Ecto.StaleModelError) attempted to update a stale model:
%Post{id: 1, title: "baz", lock_version: 1}
When a conflict happens (a record which has been previously fetched is
being updated, but that same record has been modified since it was
fetched), an `Ecto.StaleModelError` exception is raised.
Optimistic locking also works with delete operations. Just call the
`optimistic_lock` function with the model before delete:
iex> changeset = Ecto.Changeset.optimistic_lock(post, :lock_version)
iex> Repo.delete(changeset)
Finally, keep in `optimistic_lock/3` by default assumes the field
being used as a lock is an integer. If you want to use another type,
you need to pass the third argument customizing how the next value
is generated:
iex> Ecto.Changeset.optimistic_lock(post, :lock_uuid, fn _ -> Ecto.UUID.generate end)
"""
@spec optimistic_lock(Ecto.Schema.t | t, atom, (integer -> integer)) :: t | no_return
def optimistic_lock(model_or_changeset, field, incrementer \\ &(&1 + 1)) do
changeset = change(model_or_changeset, %{})
current = Map.fetch!(changeset.model, field)
update_in(changeset.filters, &Map.put(&1, field, current))
|> force_change(field, incrementer.(current))
end
@doc """
Provides a function to run before emitting changes to the repository.
Such function receives the changeset and must return a changeset,
allowing developers to do final adjustments to the changeset or to
issue data consistency commands.
The given function is guaranteed to run inside the same transaction
as the changeset operation for databases that do support transactions.
"""
@spec prepare_changes(t, (t -> t)) :: t
def prepare_changes(changeset, function) when is_function(function, 1) do
update_in changeset.prepare, &[function|&1]
end
## Constraints
@doc """
Checks for a unique constraint in the given field.
The unique constraint works by relying on the database to check
if the unique constraint has been violated or not and, if so,
Ecto converts it into a changeset error.
In order to use the uniqueness constraint the first step is
to define the unique index in a migration:
create unique_index(:users, [:email])
Now that a constraint exists, when modifying users, we could
annotate the changeset with unique constraint so Ecto knows
how to convert it into an error message:
cast(user, params, ~w(email), ~w())
|> unique_constraint(:email)
Now, when invoking `Repo.insert/2` or `Repo.update/2`, if the
email already exists, it will be converted into an error and
`{:error, changeset}` returned by the repository. Note that the error
will occur only after hitting the database so it will not be visible
until all other validations pass.
## Options
* `:message` - the message in case the constraint check fails,
defaults to "has already been taken"
* `:name` - the constraint name. By default, the constraint
name is inflected from the table + field. May be required
explicitly for complex cases
## Complex constraints
Because the constraint logic is in the database, we can leverage
all the database functionality when defining them. For example,
let's suppose the e-mails are scoped by company id. We would write
in a migration:
create unique_index(:users, [:email, :company_id])
Because such indexes have usually more complex names, we need
to explicitly tell the changeset which constraint name to use:
cast(user, params, ~w(email), ~w())
|> unique_constraint(:email, name: :posts_email_company_id_index)
Alternatively, you can give both `unique_index` and `unique_constraint`
a name:
# In the migration
create unique_index(:users, [:email, :company_id], name: :posts_special_email_index)
# In the model
cast(user, params, ~w(email), ~w())
|> unique_constraint(:email, name: :posts_email_company_id_index)
## Case sensitivity
Unfortunately, different databases provide different guarantees
when it comes to case-sensitiveness. For example, in MySQL, comparisons
are case-insensitive by default. In Postgres, users can define case
insensitive column by using the `:citext` type/extension.
If for some reason your database does not support case insensitive columns,
you can explicitly downcase values before inserting/updating them:
cast(model, params, ~w(email), ~w())
|> update_change(:email, &String.downcase/1)
|> unique_constraint(:email)
"""
@spec unique_constraint(t, atom, Keyword.t) :: t
def unique_constraint(changeset, field, opts \\ []) do
constraint = opts[:name] || "#{get_source(changeset)}_#{field}_index"
message = opts[:message] || "has already been taken"
add_constraint(changeset, :unique, to_string(constraint), field, message)
end
@doc """
Checks for foreign key constraint in the given field.
The foreign key constraint works by relying on the database to
check if the associated model exists or not. This is useful to
guarantee that a child will only be created if the parent exists
in the database too.
In order to use the foreign key constraint the first step is
to define the foreign key in a migration. This is often done
with references. For example, imagine you are creating a
comments table that belongs to posts. One would have:
create table(:comments) do
add :post_id, references(:posts)
end
By default, Ecto will generate a foreign key constraint with
name "comments_post_id_fkey" (the name is configurable).
Now that a constraint exists, when creating comments, we could
annotate the changeset with foreign key constraint so Ecto knows
how to convert it into an error message:
cast(comment, params, ~w(post_id), ~w())
|> foreign_key_constraint(:post_id)
Now, when invoking `Repo.insert/2` or `Repo.update/2`, if the
associated post does not exist, it will be converted into an
error and `{:error, changeset}` returned by the repository.
## Options
* `:message` - the message in case the constraint check fails,
defaults to "does not exist"
* `:name` - the constraint name. By default, the constraint
name is inflected from the table + field. May be required
explicitly for complex cases
"""
@spec foreign_key_constraint(t, atom, Keyword.t) :: t
def foreign_key_constraint(changeset, field, opts \\ []) do
constraint = opts[:name] || "#{get_source(changeset)}_#{field}_fkey"
message = opts[:message] || "does not exist"
add_constraint(changeset, :foreign_key, to_string(constraint), field, message)
end
@doc """
Checks the associated model exists.
This is similar to `foreign_key_constraint/3` except that the
field is inflected from the association definition. This is useful
to guarantee that a child will only be created if the parent exists
in the database too. Therefore, it only applies to `belongs_to`
associations.
As the name says, a constraint is required in the database for
this function to work. Such constraint is often added as a
reference to the child table:
create table(:comments) do
add :post_id, references(:posts)
end
Now, when inserting a comment, it is possible to forbid any
comment to be added if the associated post does not exist:
comment
|> Ecto.Changeset.cast(params, ~w(post_id))
|> Ecto.Changeset.assoc_constraint(:post)
|> Repo.insert
## Options
* `:message` - the message in case the constraint check fails,
defaults to "does not exist"
* `:name` - the constraint name. By default, the constraint
name is inflected from the table + association field.
May be required explicitly for complex cases
"""
@spec assoc_constraint(t, atom, Keyword.t) :: t | no_return
def assoc_constraint(changeset, assoc, opts \\ []) do
constraint = opts[:name] ||
(case get_assoc(changeset, assoc) do
%Ecto.Association.BelongsTo{owner_key: owner_key} ->
"#{get_source(changeset)}_#{owner_key}_fkey"
other ->
raise ArgumentError,
"assoc_constraint can only be added to belongs to associations, got: #{inspect other}"
end)
message = opts[:message] || "does not exist"
add_constraint(changeset, :foreign_key, to_string(constraint), assoc, message)
end
@doc """
Checks the associated model does not exist.
This is similar to `foreign_key_constraint/3` except that the
field is inflected from the association definition. This is useful
to guarantee that parent can only be deleted (or have its primary
key changed) if no child exists in the database. Therefore, it only
applies to `has_*` associations.
As the name says, a constraint is required in the database for
this function to work. Such constraint is often added as a
reference to the child table:
create table(:comments) do
add :post_id, references(:posts)
end
Now, when deleting the post, it is possible to forbid any post to
be deleted if they still have comments attached to it:
post
|> Ecto.Changeset.change
|> Ecto.Changeset.no_assoc_constraint(:comments)
|> Repo.delete
## Options
* `:message` - the message in case the constraint check fails,
defaults to "is still associated to this entry" (for has_one)
and "are still associated to this entry" (for has_many)
* `:name` - the constraint name. By default, the constraint
name is inflected from the association table + association
field. May be required explicitly for complex cases
"""
@spec no_assoc_constraint(t, atom, Keyword.t) :: t | no_return
def no_assoc_constraint(changeset, assoc, opts \\ []) do
{constraint, message} =
(case get_assoc(changeset, assoc) do
%Ecto.Association.Has{cardinality: cardinality,
related_key: related_key, related: related} ->
{opts[:name] || "#{related.__schema__(:source)}_#{related_key}_fkey",
opts[:message] || no_assoc_message(cardinality)}
other ->
raise ArgumentError,
"no_assoc_constraint can only be added to has one/many associations, got: #{inspect other}"
end)
add_constraint(changeset, :foreign_key, to_string(constraint), assoc, message)
end
@doc """
Checks for a exclude constraint in the given field.
The exclude constraint works by relying on the database to check
if the exclude constraint has been violated or not and, if so,
Ecto converts it into a changeset error.
## Options
* `:message` - the message in case the constraint check fails,
defaults to "violates an exclusion constraint"
* `:name` - the constraint name. By default, the constraint
name is inflected from the table + field. May be required
explicitly for complex cases
"""
def exclude_constraint(changeset, field, opts \\ []) do
constraint = opts[:name] || "#{get_source(changeset)}_#{field}_exclusion"
message = opts[:message] || "violates an exclusion constraint"
add_constraint(changeset, :exclude, to_string(constraint), field, message)
end
defp no_assoc_message(:one), do: "is still associated to this entry"
defp no_assoc_message(:many), do: "are still associated to this entry"
defp add_constraint(changeset, type, constraint, field, message)
when is_binary(constraint) and is_atom(field) and is_binary(message) do
update_in changeset.constraints, &[%{type: type, constraint: constraint,
field: field, message: message}|&1]
end
defp get_source(%{model: %{__meta__: %{source: {_prefix, source}}}}) when is_binary(source),
do: source
defp get_source(%{model: model}), do:
raise(ArgumentError, "cannot add constraint to model because it does not have a source, got: #{inspect model}")
defp get_assoc(%{model: %{__struct__: model}}, assoc) do
model.__schema__(:association, assoc) ||
raise(ArgumentError, "cannot add constraint to model because association `#{assoc}` does not exist")
end
@doc """
Traverses changeset errors and applies function to error messages.
This function is particularly useful when associations and embeds
are cast in the changeset as it will traverse all associations and
embeds and place all errors in a series of nested maps.
A changeset is supplied along with a function to apply to each
error message as the changeset is traversed. The error message
function receives a single argument matching either:
* `{message, opts}` - The string error message and options,
for example `{"should be at least %{count} characters", [count: 3]}`
* `message` - The string error message
## Examples
iex> traverse_errors(changeset, fn
{msg, opts} -> String.replace(msg, "%{count}", to_string(opts[:count]))
msg -> msg
end)
%{title: "should be at least 3 characters"}
"""
@spec traverse_errors(t, (error_message -> String.t)) :: %{atom => String.t}
def traverse_errors(%Changeset{errors: errors, changes: changes, types: types}, msg_func) do
errors
|> Enum.reverse()
|> merge_error_keys(msg_func)
|> merge_related_keys(changes, types, msg_func)
end
defp merge_error_keys(errors, msg_func) do
Enum.reduce(errors, %{}, fn({key, val}, acc) ->
val = msg_func.(val)
Map.update(acc, key, [val], &[val|&1])
end)
end
defp merge_related_keys(map, changes, types, msg_func) do
Enum.reduce types, map, fn
{field, {tag, %{cardinality: :many}}}, acc when tag in @relations ->
if changesets = Map.get(changes, field) do
Map.put(acc, field, Enum.map(changesets, &traverse_errors(&1, msg_func)))
else
acc
end
{field, {tag, %{cardinality: :one}}}, acc when tag in @relations ->
if changeset = Map.get(changes, field) do
Map.put(acc, field, traverse_errors(changeset, msg_func))
else
acc
end
{_, _}, acc ->
acc
end
end
end
| 36.275655 | 117 | 0.65501 |
1ccf752cb0972dc0d4f8696d0f18871d8893f4a4 | 11,623 | ex | Elixir | lib/elixir_ex_aliyun_ots_table_store_create_index_request.ex | hou8/tablestore_protos | 1a3223326b92bbe196d57ce4dd19b5a8db1c728d | [
"MIT"
] | null | null | null | lib/elixir_ex_aliyun_ots_table_store_create_index_request.ex | hou8/tablestore_protos | 1a3223326b92bbe196d57ce4dd19b5a8db1c728d | [
"MIT"
] | 1 | 2022-02-08T06:37:02.000Z | 2022-02-08T06:37:02.000Z | lib/elixir_ex_aliyun_ots_table_store_create_index_request.ex | hou8/tablestore_protos | 1a3223326b92bbe196d57ce4dd19b5a8db1c728d | [
"MIT"
] | 2 | 2022-01-24T06:13:03.000Z | 2022-01-24T08:33:41.000Z | # credo:disable-for-this-file
defmodule(ExAliyunOts.TableStore.CreateIndexRequest) do
@moduledoc false
(
defstruct(main_table_name: nil, index_meta: nil, include_base_data: nil)
(
(
@spec encode(struct) :: {:ok, iodata} | {:error, any}
def(encode(msg)) do
try do
{:ok, encode!(msg)}
rescue
e in [Protox.EncodingError, Protox.RequiredFieldsError] ->
{:error, e}
end
end
@spec encode!(struct) :: iodata | no_return
def(encode!(msg)) do
[]
|> encode_main_table_name(msg)
|> encode_index_meta(msg)
|> encode_include_base_data(msg)
end
)
[]
[
defp(encode_main_table_name(acc, msg)) do
try do
case(msg.main_table_name) do
nil ->
raise(Protox.RequiredFieldsError.new([:main_table_name]))
_ ->
[acc, "\n", Protox.Encode.encode_string(msg.main_table_name)]
end
rescue
ArgumentError ->
reraise(
Protox.EncodingError.new(:main_table_name, "invalid field value"),
__STACKTRACE__
)
end
end,
defp(encode_index_meta(acc, msg)) do
try do
case(msg.index_meta) do
nil ->
raise(Protox.RequiredFieldsError.new([:index_meta]))
_ ->
[acc, <<18>>, Protox.Encode.encode_message(msg.index_meta)]
end
rescue
ArgumentError ->
reraise(
Protox.EncodingError.new(:index_meta, "invalid field value"),
__STACKTRACE__
)
end
end,
defp(encode_include_base_data(acc, msg)) do
try do
case(msg.include_base_data) do
nil ->
acc
_ ->
[acc, <<24>>, Protox.Encode.encode_bool(msg.include_base_data)]
end
rescue
ArgumentError ->
reraise(
Protox.EncodingError.new(:include_base_data, "invalid field value"),
__STACKTRACE__
)
end
end
]
[]
)
(
(
@spec decode(binary) :: {:ok, struct} | {:error, any}
def(decode(bytes)) do
try do
{:ok, decode!(bytes)}
rescue
e in [Protox.DecodingError, Protox.IllegalTagError, Protox.RequiredFieldsError] ->
{:error, e}
end
end
(
@spec decode!(binary) :: struct | no_return
def(decode!(bytes)) do
{msg, set_fields} =
parse_key_value([], bytes, struct(ExAliyunOts.TableStore.CreateIndexRequest))
case([:main_table_name, :index_meta] -- set_fields) do
[] ->
msg
missing_fields ->
raise(Protox.RequiredFieldsError.new(missing_fields))
end
end
)
)
(
@spec parse_key_value([atom], binary, struct) :: {struct, [atom]}
defp(parse_key_value(set_fields, <<>>, msg)) do
{msg, set_fields}
end
defp(parse_key_value(set_fields, bytes, msg)) do
{new_set_fields, field, rest} =
case(Protox.Decode.parse_key(bytes)) do
{0, _, _} ->
raise(%Protox.IllegalTagError{})
{1, _, bytes} ->
{len, bytes} = Protox.Varint.decode(bytes)
{delimited, rest} = Protox.Decode.parse_delimited(bytes, len)
{[:main_table_name | set_fields], [main_table_name: delimited], rest}
{2, _, bytes} ->
{len, bytes} = Protox.Varint.decode(bytes)
{delimited, rest} = Protox.Decode.parse_delimited(bytes, len)
{[:index_meta | set_fields],
[
index_meta:
Protox.MergeMessage.merge(
msg.index_meta,
ExAliyunOts.TableStore.IndexMeta.decode!(delimited)
)
], rest}
{3, _, bytes} ->
{value, rest} = Protox.Decode.parse_bool(bytes)
{[:include_base_data | set_fields], [include_base_data: value], rest}
{tag, wire_type, rest} ->
{_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest)
{set_fields, [], rest}
end
msg_updated = struct(msg, field)
parse_key_value(new_set_fields, rest, msg_updated)
end
)
[]
)
(
@spec json_decode(iodata(), keyword()) :: {:ok, struct()} | {:error, any()}
def(json_decode(input, opts \\ [])) do
try do
{:ok, json_decode!(input, opts)}
rescue
e in Protox.JsonDecodingError ->
{:error, e}
end
end
@spec json_decode!(iodata(), keyword()) :: struct() | no_return()
def(json_decode!(input, opts \\ [])) do
{json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :decode)
Protox.JsonDecode.decode!(
input,
ExAliyunOts.TableStore.CreateIndexRequest,
&json_library_wrapper.decode!(json_library, &1)
)
end
@spec json_encode(struct(), keyword()) :: {:ok, iodata()} | {:error, any()}
def(json_encode(msg, opts \\ [])) do
try do
{:ok, json_encode!(msg, opts)}
rescue
e in Protox.JsonEncodingError ->
{:error, e}
end
end
@spec json_encode!(struct(), keyword()) :: iodata() | no_return()
def(json_encode!(msg, opts \\ [])) do
{json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :encode)
Protox.JsonEncode.encode!(msg, &json_library_wrapper.encode!(json_library, &1))
end
)
@deprecated "Use fields_defs()/0 instead"
@spec defs() :: %{
required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()}
}
def(defs()) do
%{
1 => {:main_table_name, {:scalar, ""}, :string},
2 => {:index_meta, {:scalar, nil}, {:message, ExAliyunOts.TableStore.IndexMeta}},
3 => {:include_base_data, {:scalar, false}, :bool}
}
end
@deprecated "Use fields_defs()/0 instead"
@spec defs_by_name() :: %{
required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()}
}
def(defs_by_name()) do
%{
include_base_data: {3, {:scalar, false}, :bool},
index_meta: {2, {:scalar, nil}, {:message, ExAliyunOts.TableStore.IndexMeta}},
main_table_name: {1, {:scalar, ""}, :string}
}
end
@spec fields_defs() :: list(Protox.Field.t())
def(fields_defs()) do
[
%{
__struct__: Protox.Field,
json_name: "mainTableName",
kind: {:scalar, ""},
label: :required,
name: :main_table_name,
tag: 1,
type: :string
},
%{
__struct__: Protox.Field,
json_name: "indexMeta",
kind: {:scalar, nil},
label: :required,
name: :index_meta,
tag: 2,
type: {:message, ExAliyunOts.TableStore.IndexMeta}
},
%{
__struct__: Protox.Field,
json_name: "includeBaseData",
kind: {:scalar, false},
label: :optional,
name: :include_base_data,
tag: 3,
type: :bool
}
]
end
[
@spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}),
(
def(field_def(:main_table_name)) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "mainTableName",
kind: {:scalar, ""},
label: :required,
name: :main_table_name,
tag: 1,
type: :string
}}
end
def(field_def("mainTableName")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "mainTableName",
kind: {:scalar, ""},
label: :required,
name: :main_table_name,
tag: 1,
type: :string
}}
end
def(field_def("main_table_name")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "mainTableName",
kind: {:scalar, ""},
label: :required,
name: :main_table_name,
tag: 1,
type: :string
}}
end
),
(
def(field_def(:index_meta)) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "indexMeta",
kind: {:scalar, nil},
label: :required,
name: :index_meta,
tag: 2,
type: {:message, ExAliyunOts.TableStore.IndexMeta}
}}
end
def(field_def("indexMeta")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "indexMeta",
kind: {:scalar, nil},
label: :required,
name: :index_meta,
tag: 2,
type: {:message, ExAliyunOts.TableStore.IndexMeta}
}}
end
def(field_def("index_meta")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "indexMeta",
kind: {:scalar, nil},
label: :required,
name: :index_meta,
tag: 2,
type: {:message, ExAliyunOts.TableStore.IndexMeta}
}}
end
),
(
def(field_def(:include_base_data)) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "includeBaseData",
kind: {:scalar, false},
label: :optional,
name: :include_base_data,
tag: 3,
type: :bool
}}
end
def(field_def("includeBaseData")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "includeBaseData",
kind: {:scalar, false},
label: :optional,
name: :include_base_data,
tag: 3,
type: :bool
}}
end
def(field_def("include_base_data")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "includeBaseData",
kind: {:scalar, false},
label: :optional,
name: :include_base_data,
tag: 3,
type: :bool
}}
end
),
def(field_def(_)) do
{:error, :no_such_field}
end
]
[]
@spec required_fields() :: [:main_table_name | :index_meta]
def(required_fields()) do
[:main_table_name, :index_meta]
end
@spec syntax() :: atom
def(syntax()) do
:proto2
end
[
@spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}),
def(default(:main_table_name)) do
{:ok, ""}
end,
def(default(:index_meta)) do
{:ok, nil}
end,
def(default(:include_base_data)) do
{:ok, false}
end,
def(default(_)) do
{:error, :no_such_field}
end
]
)
end | 28.142857 | 94 | 0.479652 |
1ccfabc2787eef1c35149d481841b14ae0772d46 | 19,422 | ex | Elixir | lib/phoenix_live_view/html_engine.ex | Zurga/phoenix_live_view | c888d910b4fcce8374eaa102d124d7cb84bf9785 | [
"MIT"
] | null | null | null | lib/phoenix_live_view/html_engine.ex | Zurga/phoenix_live_view | c888d910b4fcce8374eaa102d124d7cb84bf9785 | [
"MIT"
] | null | null | null | lib/phoenix_live_view/html_engine.ex | Zurga/phoenix_live_view | c888d910b4fcce8374eaa102d124d7cb84bf9785 | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveView.HTMLEngine do
@moduledoc """
The HTMLEngine that powers `.heex` templates and the `~H` sigil.
It works by adding a HTML parsing and validation layer on top
of EEx engine. By default it uses `Phoenix.LiveView.Engine` as
its "subengine".
"""
# TODO: Use @impl true instead of @doc false when we require Elixir v1.12
alias Phoenix.LiveView.HTMLTokenizer
alias Phoenix.LiveView.HTMLTokenizer.ParseError
@behaviour Phoenix.Template.Engine
@doc false
def compile(path, _name) do
trim = Application.get_env(:phoenix, :trim_on_html_eex_engine, true)
EEx.compile_file(path, engine: __MODULE__, line: 1, trim: trim)
end
@behaviour EEx.Engine
@doc false
def init(opts) do
{subengine, opts} = Keyword.pop(opts, :subengine, Phoenix.LiveView.Engine)
{module, opts} = Keyword.pop(opts, :module)
unless subengine do
raise ArgumentError, ":subengine is missing for HTMLEngine"
end
%{
cont: :text,
tokens: [],
subengine: subengine,
substate: subengine.init([]),
module: module,
file: Keyword.get(opts, :file, "nofile"),
indentation: Keyword.get(opts, :indentation, 0)
}
end
## These callbacks return AST
@doc false
def handle_body(state) do
tokens =
state.tokens
|> strip_text_space()
|> Enum.reverse()
|> strip_text_space()
token_state =
state
|> token_state()
|> handle_tokens(tokens)
validate_unclosed_tags!(token_state)
opts = [root: token_state.root || false]
ast = invoke_subengine(token_state, :handle_body, [opts])
# Do not require if calling module is helpers. Fix for elixir < 1.12
# TODO remove after Elixir >= 1.12 support
if state.module === Phoenix.LiveView.Helpers do
ast
else
quote do
require Phoenix.LiveView.Helpers
unquote(ast)
end
end
end
defp validate_unclosed_tags!(%{tags: []} = state) do
state
end
defp validate_unclosed_tags!(%{tags: [tag | _]} = state) do
{:tag_open, name, _attrs, %{line: line, column: column}} = tag
file = state.file
message = "end of file reached without closing tag for <#{name}>"
raise ParseError, line: line, column: column, file: file, description: message
end
@doc false
def handle_end(state) do
state
|> token_state()
|> update_subengine(:handle_begin, [])
|> handle_tokens(Enum.reverse(state.tokens))
|> invoke_subengine(:handle_end, [])
end
defp token_state(%{subengine: subengine, substate: substate, file: file}) do
%{
subengine: subengine,
substate: substate,
file: file,
stack: [],
tags: [],
root: nil
}
end
defp handle_tokens(token_state, tokens) do
Enum.reduce(tokens, token_state, &handle_token/2)
end
## These callbacks update the state
@doc false
def handle_begin(state) do
%{state | tokens: []}
end
@doc false
def handle_text(state, text) do
handle_text(state, [], text)
end
def handle_text(state, meta, text) do
%{file: file, indentation: indentation, tokens: tokens, cont: cont} = state
{tokens, cont} = HTMLTokenizer.tokenize(text, file, indentation, meta, tokens, cont)
%{state | tokens: tokens, cont: cont}
end
@doc false
def handle_expr(%{tokens: tokens} = state, marker, expr) do
%{state | tokens: [{:expr, marker, expr} | tokens]}
end
## Helpers
defp push_substate_to_stack(%{substate: substate, stack: stack} = state) do
%{state | stack: [{:substate, substate} | stack]}
end
defp pop_substate_from_stack(%{stack: [{:substate, substate} | stack]} = state) do
%{state | stack: stack, substate: substate}
end
defp invoke_subengine(%{subengine: subengine, substate: substate}, :handle_text, args) do
# TODO: Remove this once we require Elixir v1.12
if function_exported?(subengine, :handle_text, 3) do
apply(subengine, :handle_text, [substate | args])
else
apply(subengine, :handle_text, [substate | tl(args)])
end
end
defp invoke_subengine(%{subengine: subengine, substate: substate}, fun, args) do
apply(subengine, fun, [substate | args])
end
defp update_subengine(state, fun, args) do
%{state | substate: invoke_subengine(state, fun, args)}
end
defp push_tag(state, token) do
# If we have a void tag, we don't actually push it into the stack.
with {:tag_open, name, _attrs, _meta} <- token,
true <- void?(name) do
state
else
_ -> %{state | tags: [token | state.tags]}
end
end
defp pop_tag!(
%{tags: [{:tag_open, tag_name, _attrs, _meta} = tag | tags]} = state,
{:tag_close, tag_name, _}
) do
{tag, %{state | tags: tags}}
end
defp pop_tag!(
%{tags: [{:tag_open, tag_open_name, _attrs, tag_open_meta} | _]} = state,
{:tag_close, tag_close_name, tag_close_meta}
) do
%{line: line, column: column} = tag_close_meta
file = state.file
message = """
unmatched closing tag. Expected </#{tag_open_name}> for <#{tag_open_name}> \
at line #{tag_open_meta.line}, got: </#{tag_close_name}>\
"""
raise ParseError, line: line, column: column, file: file, description: message
end
defp pop_tag!(state, {:tag_close, tag_name, tag_meta}) do
%{line: line, column: column} = tag_meta
file = state.file
message = "missing opening tag for </#{tag_name}>"
raise ParseError, line: line, column: column, file: file, description: message
end
## handle_token
# Expr
defp handle_token({:expr, marker, expr}, state) do
state
|> set_root_on_not_tag()
|> update_subengine(:handle_expr, [marker, expr])
end
# Text
defp handle_token({:text, text, %{line_end: line, column_end: column}}, state) do
state
|> set_root_on_not_tag()
|> update_subengine(:handle_text, [[line: line, column: column], text])
end
# Remote function component (self close)
defp handle_token(
{:tag_open, <<first, _::binary>> = tag_name, attrs, %{self_close: true} = tag_meta},
state
)
when first in ?A..?Z do
file = state.file
{mod, fun} = decompose_remote_component_tag!(tag_name, tag_meta, file)
{let, assigns} = handle_component_attrs(attrs, file)
raise_if_let!(let, file)
ast =
quote do
Phoenix.LiveView.Helpers.component(&(unquote(mod).unquote(fun) / 1), unquote(assigns))
end
state
|> set_root_on_not_tag()
|> update_subengine(:handle_expr, ["=", ast])
end
# Remote function component (with inner content)
defp handle_token({:tag_open, <<first, _::binary>> = tag_name, attrs, tag_meta}, state)
when first in ?A..?Z do
mod_fun = decompose_remote_component_tag!(tag_name, tag_meta, state.file)
token = {:tag_open, tag_name, attrs, Map.put(tag_meta, :mod_fun, mod_fun)}
state
|> set_root_on_not_tag()
|> push_tag(token)
|> push_substate_to_stack()
|> update_subengine(:handle_begin, [])
end
defp handle_token({:tag_close, <<first, _::binary>>, _tag_close_meta} = token, state)
when first in ?A..?Z do
{{:tag_open, _name, attrs, %{mod_fun: {mod, fun}, line: line}}, state} =
pop_tag!(state, token)
{let, assigns} = handle_component_attrs(attrs, state.file)
clauses = build_component_clauses(let, state)
ast =
quote line: line do
Phoenix.LiveView.Helpers.component(
&(unquote(mod).unquote(fun) / 1),
unquote(assigns),
do: unquote(clauses)
)
end
state
|> pop_substate_from_stack()
|> update_subengine(:handle_expr, ["=", ast])
end
# Local function component (self close)
defp handle_token(
{:tag_open, "." <> name, attrs, %{self_close: true, line: line}},
state
) do
fun = String.to_atom(name)
file = state.file
{let, assigns} = handle_component_attrs(attrs, file)
raise_if_let!(let, file)
ast =
quote line: line do
Phoenix.LiveView.Helpers.component(
&(unquote(Macro.var(fun, __MODULE__)) / 1),
unquote(assigns)
)
end
state
|> set_root_on_not_tag()
|> update_subengine(:handle_expr, ["=", ast])
end
# Local function component (with inner content)
defp handle_token({:tag_open, "." <> _, _attrs, _tag_meta} = token, state) do
state
|> set_root_on_not_tag()
|> push_tag(token)
|> push_substate_to_stack()
|> update_subengine(:handle_begin, [])
end
defp handle_token({:tag_close, "." <> fun_name, _tag_close_meta} = token, state) do
{{:tag_open, _name, attrs, %{line: line}}, state} = pop_tag!(state, token)
fun = String.to_atom(fun_name)
{let, assigns} = handle_component_attrs(attrs, state.file)
clauses = build_component_clauses(let, state)
ast =
quote line: line do
Phoenix.LiveView.Helpers.component(
&(unquote(Macro.var(fun, __MODULE__)) / 1),
unquote(assigns),
do: unquote(clauses)
)
end
state
|> pop_substate_from_stack()
|> update_subengine(:handle_expr, ["=", ast])
end
# HTML element (self close)
defp handle_token({:tag_open, name, attrs, %{self_close: true} = tag_meta}, state) do
suffix = if void?(name), do: ">", else: "></#{name}>"
state
|> set_root_on_tag()
|> handle_tag_and_attrs(name, attrs, suffix, to_location(tag_meta))
end
# HTML element
defp handle_token({:tag_open, name, attrs, tag_meta} = token, state) do
state
|> set_root_on_tag()
|> push_tag(token)
|> handle_tag_and_attrs(name, attrs, ">", to_location(tag_meta))
end
defp handle_token({:tag_close, name, tag_meta} = token, state) do
{{:tag_open, _name, _attrs, _tag_meta}, state} = pop_tag!(state, token)
update_subengine(state, :handle_text, [to_location(tag_meta), "</#{name}>"])
end
# Root tracking
defp strip_text_space(tokens) do
with [{:text, text, _} | rest] <- tokens,
"" <- String.trim_leading(text) do
strip_text_space(rest)
else
_ -> tokens
end
end
defp set_root_on_not_tag(%{root: root, tags: tags} = state) do
if tags == [] and root != false do
%{state | root: false}
else
state
end
end
defp set_root_on_tag(state) do
case state do
%{root: nil, tags: []} -> %{state | root: true}
%{root: true, tags: []} -> %{state | root: false}
%{root: bool} when is_boolean(bool) -> state
end
end
## handle_tag_and_attrs
defp handle_tag_and_attrs(state, name, attrs, suffix, meta) do
state
|> update_subengine(:handle_text, [meta, "<#{name}"])
|> handle_tag_attrs(meta, attrs)
|> update_subengine(:handle_text, [meta, suffix])
end
defp handle_tag_attrs(state, meta, attrs) do
Enum.reduce(attrs, state, fn
{:root, {:expr, value, %{line: line, column: col}}}, state ->
attrs = Code.string_to_quoted!(value, line: line, column: col)
handle_attrs_escape(state, meta, attrs)
{name, {:expr, value, %{line: line, column: col}}}, state ->
attr = Code.string_to_quoted!(value, line: line, column: col)
handle_attr_escape(state, meta, name, attr)
{name, {:string, value, %{delimiter: ?"}}}, state ->
update_subengine(state, :handle_text, [meta, ~s( #{name}="#{value}")])
{name, {:string, value, %{delimiter: ?'}}}, state ->
update_subengine(state, :handle_text, [meta, ~s( #{name}='#{value}')])
{name, nil}, state ->
update_subengine(state, :handle_text, [meta, " #{name}"])
end)
end
defp handle_attrs_escape(state, meta, attrs) do
ast =
quote line: meta[:line] do
Phoenix.HTML.Tag.attributes_escape(unquote(attrs))
end
update_subengine(state, :handle_expr, ["=", ast])
end
defp handle_attr_escape(state, meta, name, value) do
case extract_binaries(value, true, []) do
:error ->
if fun = empty_attribute_encoder(name) do
ast =
quote line: meta[:line] do
{:safe, unquote(__MODULE__).unquote(fun)(unquote(value))}
end
state
|> update_subengine(:handle_text, [meta, ~s( #{name}=")])
|> update_subengine(:handle_expr, ["=", ast])
|> update_subengine(:handle_text, [meta, ~s(")])
else
handle_attrs_escape(state, meta, [{safe_unless_special(name), value}])
end
binaries ->
state
|> update_subengine(:handle_text, [meta, ~s( #{name}=")])
|> handle_binaries(meta, binaries)
|> update_subengine(:handle_text, [meta, ~s(")])
end
end
defp handle_binaries(state, meta, binaries) do
binaries
|> Enum.reverse()
|> Enum.reduce(state, fn
{:text, value}, state ->
update_subengine(state, :handle_text, [meta, binary_encode(value)])
{:binary, value}, state ->
ast =
quote line: meta[:line] do
{:safe, unquote(__MODULE__).binary_encode(unquote(value))}
end
update_subengine(state, :handle_expr, ["=", ast])
end)
end
defp extract_binaries({:<>, _, [left, right]}, _root?, acc) do
extract_binaries(right, false, extract_binaries(left, false, acc))
end
defp extract_binaries({:<<>>, _, parts} = bin, _root?, acc) do
Enum.reduce(parts, acc, fn
part, acc when is_binary(part) ->
[{:text, part} | acc]
{:"::", _, [binary, {:binary, _, _}]}, acc ->
[{:binary, binary} | acc]
_, _ ->
throw(:unknown_part)
end)
catch
:unknown_part -> [{:binary, bin} | acc]
end
defp extract_binaries(binary, _root?, acc) when is_binary(binary), do: [{:text, binary} | acc]
defp extract_binaries(value, false, acc), do: [{:binary, value} | acc]
defp extract_binaries(_value, true, _acc), do: :error
defp empty_attribute_encoder("class"), do: :class_attribute_encode
defp empty_attribute_encoder("style"), do: :empty_attribute_encoder
defp empty_attribute_encoder(_), do: nil
@doc false
def class_attribute_encode([_ | _] = list),
do: list |> Enum.filter(& &1) |> Enum.join(" ") |> Phoenix.HTML.Engine.encode_to_iodata!()
def class_attribute_encode(other),
do: empty_attribute_encode(other)
@doc false
def empty_attribute_encode(nil), do: ""
def empty_attribute_encode(false), do: ""
def empty_attribute_encode(true), do: ""
def empty_attribute_encode(value), do: Phoenix.HTML.Engine.encode_to_iodata!(value)
@doc false
def binary_encode(value) when is_binary(value) do
value
|> Phoenix.HTML.Engine.encode_to_iodata!()
|> IO.iodata_to_binary()
end
def binary_encode(value) do
raise ArgumentError, "expected a binary in <>, got: #{inspect(value)}"
end
defp safe_unless_special("aria"), do: "aria"
defp safe_unless_special("class"), do: "class"
defp safe_unless_special("data"), do: "data"
defp safe_unless_special(name), do: {:safe, name}
## handle_component_attrs
defp handle_component_attrs(attrs, file) do
{lets, entries} =
case build_component_attrs(attrs) do
{lets, [], []} -> {lets, [{:%{}, [], []}]}
{lets, r, []} -> {lets, r}
{lets, r, d} -> {lets, r ++ [{:%{}, [], d}]}
end
let =
case lets do
[] ->
nil
[let] ->
let
[{_, meta}, {_, previous_meta} | _] ->
message = """
cannot define multiple `let` attributes. \
Another `let` has already been defined at line #{previous_meta.line}\
"""
raise ParseError,
line: meta.line,
column: meta.column,
file: file,
description: message
end
assigns =
Enum.reduce(entries, fn expr, acc ->
quote do: Map.merge(unquote(acc), unquote(expr))
end)
{let, assigns}
end
defp build_component_attrs(attrs) do
build_component_attrs(attrs, {[], [], []})
end
defp build_component_attrs([], {lets, r, d}) do
{lets, Enum.reverse(r), Enum.reverse(d)}
end
defp build_component_attrs(
[{:root, {:expr, value, %{line: line, column: col}}} | attrs],
{lets, r, d}
) do
quoted_value = Code.string_to_quoted!(value, line: line, column: col)
quoted_value = quote do: Map.new(unquote(quoted_value))
build_component_attrs(attrs, {lets, [quoted_value | r], d})
end
defp build_component_attrs(
[{"let", {:expr, value, %{line: line, column: col} = meta}} | attrs],
{lets, r, d}
) do
quoted_value = Code.string_to_quoted!(value, line: line, column: col)
build_component_attrs(attrs, {[{quoted_value, meta} | lets], r, d})
end
defp build_component_attrs(
[{name, {:expr, value, %{line: line, column: col}}} | attrs],
{lets, r, d}
) do
quoted_value = Code.string_to_quoted!(value, line: line, column: col)
build_component_attrs(attrs, {lets, r, [{String.to_atom(name), quoted_value} | d]})
end
defp build_component_attrs([{name, {:string, value, _}} | attrs], {lets, r, d}) do
build_component_attrs(attrs, {lets, r, [{String.to_atom(name), value} | d]})
end
defp build_component_attrs([{name, nil} | attrs], {lets, r, d}) do
build_component_attrs(attrs, {lets, r, [{String.to_atom(name), true} | d]})
end
defp decompose_remote_component_tag!(tag_name, tag_meta, file) do
case String.split(tag_name, ".") |> Enum.reverse() do
[<<first, _::binary>> = fun_name | rest] when first in ?a..?z ->
aliases = rest |> Enum.reverse() |> Enum.map(&String.to_atom/1)
fun = String.to_atom(fun_name)
{{:__aliases__, [], aliases}, fun}
_ ->
%{line: line, column: column} = tag_meta
message = "invalid tag <#{tag_name}>"
raise ParseError, line: line, column: column, file: file, description: message
end
end
@doc false
def __unmatched_let__!(pattern, value) do
message = """
cannot match arguments sent from `render_block/2` against the pattern in `let`.
Expected a value matching `#{pattern}`, got: `#{inspect(value)}`.
"""
stacktrace =
self()
|> Process.info(:current_stacktrace)
|> elem(1)
|> Enum.drop(2)
reraise(message, stacktrace)
end
defp raise_if_let!(let, file) do
with {_pattern, %{line: line}} <- let do
message = "cannot use `let` on a component without inner content"
raise CompileError, line: line, file: file, description: message
end
end
defp build_component_clauses(let, state) do
case let do
{pattern, %{line: line}} ->
quote line: line do
unquote(pattern) ->
unquote(invoke_subengine(state, :handle_end, []))
end ++
quote line: line, generated: true do
other ->
Phoenix.LiveView.HTMLEngine.__unmatched_let__!(
unquote(Macro.to_string(pattern)),
other
)
end
_ ->
quote do
_ -> unquote(invoke_subengine(state, :handle_end, []))
end
end
end
## Helpers
for void <- ~w(area base br col hr img input link meta param command keygen source) do
defp void?(unquote(void)), do: true
end
defp void?(_), do: false
defp to_location(%{line: line, column: column}), do: [line: line, column: column]
end
| 28.98806 | 96 | 0.618834 |
1ccfc8dffab77327007fd29791beb24b583f91e0 | 3,171 | ex | Elixir | rules/lib/pythagoras.ex | Shakadak/elixir_playground | 3a9c0319d5c8a0e296e19d9ae98fea01f8266f44 | [
"MIT"
] | null | null | null | rules/lib/pythagoras.ex | Shakadak/elixir_playground | 3a9c0319d5c8a0e296e19d9ae98fea01f8266f44 | [
"MIT"
] | null | null | null | rules/lib/pythagoras.ex | Shakadak/elixir_playground | 3a9c0319d5c8a0e296e19d9ae98fea01f8266f44 | [
"MIT"
] | null | null | null | defmodule Pythagoras do
def add(x, y), do: x / y
def square(x), do: x * x
def pythagoras(x, y), do: add(square(x), square(y))
def add_cps(x, y), do: fn k -> k.(x / y) end
def square_cps(x), do: fn k -> k.(x * x) end
def pythagoras_cps(x, y), do: fn k ->
square_cps(x).(fn x2 ->
square_cps(y).(fn y2 ->
add_cps(x2, y2).(k)
end)
end)
end
require Monad
def m_add_cps(x, y), do: CPS.pure(x / y)
def m_square_cps(x), do: CPS.pure(x * x)
def m_pythagoras_cps(x, y), do: (Monad.m CPS do
x2 <- m_square_cps(x)
y2 <- m_square_cps(y)
m_add_cps(x2, y2)
end)
def t_add_cps(x, y), do: fn k -> k.(x / y) end
def t_square_cps(x), do: fn k -> k.(x * x) end
def t_pythagoras_cps(x, y) do
t_square_cps(x)
|> Thrice.chain_cps(fn x2 ->
t_square_cps(y)
|> Thrice.chain_cps(fn y2 ->
t_add_cps(x2, y2)
end)
end)
end
def j_add_cps(x, y), do: fn k -> k.(x / y) end
def j_square_cps(x), do: fn k -> k.(x * x) end
def j_pythagoras_cps(x, y) do
j_square_cps(x)
|> CPS.map(fn x2 ->
j_square_cps(y)
|> CPS.map(fn y2 ->
j_add_cps(x2, y2)
end)
|> CPS.join()
end)
|> CPS.join()
end
end
defmodule CPS do
def pure(x), do: fn k -> k.(x) end
def join(mma), do: fn k -> mma.(fn x -> x.(k) end) end
def bind(ma, f), do: fn k -> ma.(fn x -> f.(x).(k) end) end
def map(ma, f), do: fn k -> ma.(fn x -> k.(f.(x)) end) end
def __before_compile__(env) do
IO.inspect(env, limit: :infinity)
IO.inspect(Map.keys(env))
IO.inspect(env.module)
Enum.map(Map.to_list(env), fn {k, v} -> IO.inspect(v, limit: :infinity, label: k) end)
end
end
defmodule Thrice do
# (a -> a) -> a -> a
def thrice(f, x), do: f.(f.(f.(x)))
# (a -> ((a -> r) -> r)) -> a -> ((a -> r) -> r)
# (a -> (a -> r) -> r) -> a -> (a -> r) -> r
def thrics_cps(f, x), do: fn k ->
f.(x).(fn x2 ->
f.(x2).(fn x3 ->
f.(x3).(k)
end)
end)
end
# ((a -> r) -> r) -> (a -> ((b -> r) -> r)) -> ((b -> r) -> r)
# ((a -> r) -> r) -> (a -> ((b -> r) -> r)) -> ((b -> r) -> r)
def chain_cps(k, fk), do: fn f -> k.(fn x -> fk.(x).(f) end) end
end
defmodule CallCC do
require Monad
defmacro when_(cond, code) do
quote do
if unquote(cond) do unquote(code) else CPS.pure({}) end
end
end
def square(x), do: CPS.pure(x * x)
def squareCCC(x), do: callCC(fn k -> k.(x * x) end)
def foo(x), do: callCC(fn k -> Monad.m CPS do
y = x * x + 3 |> IO.inspect(label: 1)
when_(y > 20, k.("over twenty" |> IO.inspect(label: 4))) |> IO.inspect(label: 2)
CPS.pure(to_string(y - 4)) |> IO.inspect(label: 3)
end end)
def bar(c, s), do: (Monad.m CPS do
msg <- callCC(fn k -> Monad.m CPS do
s0 = [c | s]
when_(s0 == 'hello', k.('They say hello.'))
s1 = to_charlist(inspect(s0))
CPS.pure('They appear to be saying ' ++ s1)
end end)
CPS.pure(IO.puts(msg))
CPS.pure(length(msg))
end)
def quux, do: callCC(fn k -> Monad.m CPS do
n = 5
k.(n)
CPS.pure(25)
end end)
def callCC(f), do: fn h -> f.(fn a -> fn _ -> h.(a) end end).(h) end
end
| 25.780488 | 90 | 0.515926 |
1ccfd50e6da5462ca2852f9e33dbf3cd90ba7695 | 8,980 | ex | Elixir | lib/mix/lib/mix/tasks/profile.cprof.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | 1 | 2021-05-20T13:08:37.000Z | 2021-05-20T13:08:37.000Z | lib/mix/lib/mix/tasks/profile.cprof.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/profile.cprof.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | 8 | 2018-02-20T18:30:53.000Z | 2019-06-18T14:23:31.000Z | defmodule Mix.Tasks.Profile.Cprof do
use Mix.Task
@shortdoc "Profiles the given file or expression with cprof"
@moduledoc """
Profiles the given file or expression using Erlang's `cprof` tool.
`cprof` can be useful when you want to discover the bottlenecks related
to function calls.
Before running the code, it invokes the `app.start` task which compiles
and loads your project. Then the target expression is profiled, together
with all matching function calls, by setting breakpoints containing
counters. These can only be set on BEAM code so BIFs cannot be call
count traced.
To profile the code, you can use syntax similar to the `mix run` task:
mix profile.cprof -e Hello.world
mix profile.cprof -e "[1, 2, 3] |> Enum.reverse |> Enum.map(&Integer.to_string/1)"
mix profile.cprof my_script.exs arg1 arg2 arg3
## Command line options
* `--matching` - only profile calls matching the given `Module.function/arity` pattern
* `--limit` - filters out any results with a call count less than the limit
* `--module` - filters out any results not pertaining to the given module
* `--config`, `-c` - loads the given configuration file
* `--eval`, `-e` - evaluate the given code
* `--require`, `-r` - requires pattern before running the command
* `--parallel`, `-p` - makes all requires parallel
* `--no-compile` - does not compile even if files require compilation
* `--no-deps-check` - does not check dependencies
* `--no-archives-check` - does not check archives
* `--no-halt` - does not halt the system after running the command
* `--no-start` - does not start applications after compilation
* `--no-elixir-version-check` - does not check the Elixir version from mix.exs
## Profile output
Example output:
CNT
Total 15
Enum 6 <--
Enum."-map/2-lists^map/1-0-"/2 4
Enum.reverse/1 1
Enum.map/2 1
:elixir_compiler 4 <--
anonymous fn/1 in :elixir_compiler.__FILE__/1 3
anonymous fn/0 in :elixir_compiler.__FILE__/1 1
String.Chars.Integer 3 <--
String.Chars.Integer.to_string/1 3
:erlang 2 <--
:erlang.trace_pattern/3 2
Profile done over 20229 matching functions
The default output contains data gathered from all matching functions. The left
column structures each module and its total call count trace is presented on the right.
Each module has its count discriminated by function below. The `<--` symbol is meant to
help visualize where a new module call count begins.
The first row (Total) is the sum of all function calls. In the last row the number of
matching functions that were considered for profiling is presented.
When `--matching` option is specified, call count tracing will be started only for
the functions matching the given pattern:
String.Chars.Integer 3 <--
String.Chars.Integer.to_string/1 3
Profile done over 1 matching functions
The pattern can be a module name, such as `String` to count all calls to that module,
a call without arity, such as `String.split`, to count all calls to that function
regardless of arity, or a call with arity, such as `String.split/2`, to count all
calls to that exact module, function and arity.
## Caveats
You should be aware the profiler is stopped as soon as the code has finished running. This
may need special attention, when: running asynchronous code as function calls which were
called before the profiler stopped will not be counted; running synchronous code as long
running computations and a profiler without a proper MFA trace pattern or filter may
lead to a result set which is difficult to comprehend.
Other caveats are the impossibility to call count trace BIFs, since breakpoints can
only be set on BEAM code; functions calls performed by `:cprof` are not traced; the
maximum size of a call counter is equal to the host machine's word size
(for example, 2147483647 in a 32-bit host).
"""
@switches [
parallel: :boolean,
require: :keep,
eval: :keep,
config: :keep,
matching: :string,
halt: :boolean,
compile: :boolean,
deps_check: :boolean,
limit: :integer,
module: :string,
start: :boolean,
archives_check: :boolean,
warmup: :boolean,
elixir_version_check: :boolean,
parallel_require: :keep
]
@aliases [r: :require, p: :parallel, e: :eval, c: :config]
def run(args) do
{opts, head} = OptionParser.parse_head!(args, aliases: @aliases, strict: @switches)
Mix.Tasks.Run.run(
["--no-mix-exs" | args],
opts,
head,
&profile_code(&1, opts),
&profile_code(File.read!(&1), opts)
)
end
defp profile_code(code_string, opts) do
content =
quote do
unquote(__MODULE__).profile(
fn ->
unquote(Code.string_to_quoted!(code_string))
end,
unquote(opts)
)
end
# Use compile_quoted since it leaves less noise than eval_quoted
Code.compile_quoted(content)
end
@doc false
def profile(fun, opts) do
fun
|> profile_and_analyse(opts)
|> print_output
:cprof.stop()
end
defp profile_and_analyse(fun, opts) do
if Keyword.get(opts, :warmup, true) do
IO.puts("Warmup...")
fun.()
end
num_matched_functions =
case Keyword.get(opts, :matching) do
nil ->
:cprof.start()
matching ->
case Mix.Utils.parse_mfa(matching) do
{:ok, args} -> apply(:cprof, :start, args)
:error -> Mix.raise("Invalid matching pattern: #{matching}")
end
end
apply(fun, [])
:cprof.pause()
limit = Keyword.get(opts, :limit)
module = Keyword.get(opts, :module)
analysis_result =
case {limit, module} do
{nil, nil} ->
:cprof.analyse()
{limit, nil} ->
:cprof.analyse(limit)
{limit, module} ->
module = string_to_existing_module(module)
if limit do
:cprof.analyse(module, limit)
else
:cprof.analyse(module)
end
end
{num_matched_functions, analysis_result}
end
defp string_to_existing_module(":" <> module), do: String.to_existing_atom(module)
defp string_to_existing_module(module), do: Module.concat([module])
defp print_output({num_matched_functions, {all_call_count, mod_analysis_list}}) do
print_total_row(all_call_count)
Enum.each(mod_analysis_list, &print_analysis_result/1)
print_number_of_matched_functions(num_matched_functions)
end
defp print_output({num_matched_functions, {_mod, _call_count, _mod_fun_list} = mod_analysis}) do
print_analysis_result(mod_analysis)
print_number_of_matched_functions(num_matched_functions)
end
defp print_number_of_matched_functions(num_matched_functions) do
IO.puts("Profile done over #{num_matched_functions} matching functions")
end
defp print_total_row(all_call_count) do
IO.puts("")
print_row(["s", "s", "s"], ["", "CNT", ""])
print_row(["s", "B", "s"], ["Total", all_call_count, ""])
end
defp print_analysis_result({module, total_module_count, module_fun_list}) do
module
|> Atom.to_string()
|> module_name_for_printing()
|> print_module(total_module_count, "", "<--")
Enum.each(module_fun_list, &print_function(&1, " "))
end
defp print_module(module, count, prefix, suffix) do
print_row(["s", "B", "s"], ["#{prefix}#{module}", count, suffix])
end
defp module_name_for_printing("Elixir." <> rest = _module_name), do: rest
defp module_name_for_printing(module_name), do: ":" <> module_name
defp print_function({fun, count}, prefix, suffix \\ "") do
print_row(["s", "B", "s"], ["#{prefix}#{function_text(fun)}", count, suffix])
end
defp function_text({module, function, arity}) do
Exception.format_mfa(module, function, arity)
end
defp function_text(other), do: inspect(other)
@columns [-60, 12, 5]
defp print_row(formats, data) do
Stream.zip(@columns, formats)
|> Stream.map(fn {width, format} -> "~#{width}#{format}" end)
|> Enum.join()
|> :io.format(data)
IO.puts("")
end
end
| 35.354331 | 98 | 0.615702 |
1ccfde14cda0cc82b8befd60e5c5010ab7342845 | 1,258 | exs | Elixir | mix.exs | johnhidey/digital-ocean-elixir | 69fca5b61e4df773fd5d956bc1c80b89be3d7730 | [
"MIT"
] | null | null | null | mix.exs | johnhidey/digital-ocean-elixir | 69fca5b61e4df773fd5d956bc1c80b89be3d7730 | [
"MIT"
] | null | null | null | mix.exs | johnhidey/digital-ocean-elixir | 69fca5b61e4df773fd5d956bc1c80b89be3d7730 | [
"MIT"
] | null | null | null | defmodule DigitalOcean.MixProject do
use Mix.Project
def project do
[
app: :digital_ocean,
version: "0.3.0",
elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps(),
dialyzer: dialyzer(),
elixirc_paths: elixirc_paths(Mix.env()),
package: package()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{ :hackney, "~> 1.16", optional: true },
{ :jason, "~> 1.2", optional: true },
#
# dev
#
{ :dialyxir, "~> 1.0", only: :dev, runtime: false },
{ :ex_doc, ">= 0.0.0", only: :dev, runtime: false }
]
end
defp dialyzer do
[
plt_add_apps: [:hackney],
plt_core_path: "_build/#{Mix.env()}"
]
end
defp elixirc_paths(:test) do
["lib/", "test/"]
end
defp elixirc_paths(_env) do
["lib"]
end
defp package do
%{
description: "Elixir client for the DigitalOcean v2 API",
maintainers: ["Anthony Smith"],
licenses: ["MIT"],
links: %{
GitHub: "https://github.com/malomohq/digital-ocean-elixir",
"Made by Malomo - Post-purchase experiences that customers love": "https://gomalomo.com"
}
}
end
end
| 19.353846 | 96 | 0.5469 |
1ccfedd9efd0ea1823cc193ce176f9b4ad43a493 | 3,397 | ex | Elixir | clients/big_query/lib/google_api/big_query/v2/model/job_list_jobs.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/big_query/lib/google_api/big_query/v2/model/job_list_jobs.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/big_query/lib/google_api/big_query/v2/model/job_list_jobs.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQuery.V2.Model.JobListJobs do
@moduledoc """
## Attributes
* `configuration` (*type:* `GoogleApi.BigQuery.V2.Model.JobConfiguration.t`, *default:* `nil`) - [Full-projection-only] Specifies the job configuration.
* `errorResult` (*type:* `GoogleApi.BigQuery.V2.Model.ErrorProto.t`, *default:* `nil`) - A result object that will be present only if the job has failed.
* `id` (*type:* `String.t`, *default:* `nil`) - Unique opaque ID of the job.
* `jobReference` (*type:* `GoogleApi.BigQuery.V2.Model.JobReference.t`, *default:* `nil`) - Job reference uniquely identifying the job.
* `kind` (*type:* `String.t`, *default:* `bigquery#job`) - The resource type.
* `state` (*type:* `String.t`, *default:* `nil`) - Running state of the job. When the state is DONE, errorResult can be checked to determine whether the job succeeded or failed.
* `statistics` (*type:* `GoogleApi.BigQuery.V2.Model.JobStatistics.t`, *default:* `nil`) - [Output-only] Information about the job, including starting time and ending time of the job.
* `status` (*type:* `GoogleApi.BigQuery.V2.Model.JobStatus.t`, *default:* `nil`) - [Full-projection-only] Describes the state of the job.
* `user_email` (*type:* `String.t`, *default:* `nil`) - [Full-projection-only] Email address of the user who ran the job.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:configuration => GoogleApi.BigQuery.V2.Model.JobConfiguration.t() | nil,
:errorResult => GoogleApi.BigQuery.V2.Model.ErrorProto.t() | nil,
:id => String.t() | nil,
:jobReference => GoogleApi.BigQuery.V2.Model.JobReference.t() | nil,
:kind => String.t() | nil,
:state => String.t() | nil,
:statistics => GoogleApi.BigQuery.V2.Model.JobStatistics.t() | nil,
:status => GoogleApi.BigQuery.V2.Model.JobStatus.t() | nil,
:user_email => String.t() | nil
}
field(:configuration, as: GoogleApi.BigQuery.V2.Model.JobConfiguration)
field(:errorResult, as: GoogleApi.BigQuery.V2.Model.ErrorProto)
field(:id)
field(:jobReference, as: GoogleApi.BigQuery.V2.Model.JobReference)
field(:kind)
field(:state)
field(:statistics, as: GoogleApi.BigQuery.V2.Model.JobStatistics)
field(:status, as: GoogleApi.BigQuery.V2.Model.JobStatus)
field(:user_email)
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.JobListJobs do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.JobListJobs.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.JobListJobs do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 47.84507 | 187 | 0.700618 |
1ccff100be6f0f7f11b8a378b249ca5e77fc574e | 1,208 | ex | Elixir | elixir/rocketpay/lib/rocketpay_web/views/error_view.ex | HRsniper/Next-Level-Week-4 | e8a4e33311513e354b6dc4efd79daa22feae1633 | [
"MIT"
] | null | null | null | elixir/rocketpay/lib/rocketpay_web/views/error_view.ex | HRsniper/Next-Level-Week-4 | e8a4e33311513e354b6dc4efd79daa22feae1633 | [
"MIT"
] | null | null | null | elixir/rocketpay/lib/rocketpay_web/views/error_view.ex | HRsniper/Next-Level-Week-4 | e8a4e33311513e354b6dc4efd79daa22feae1633 | [
"MIT"
] | null | null | null | defmodule RocketpayWeb.ErrorView do
use RocketpayWeb, :view
import Ecto.Changeset, only: [traverse_errors: 2]
alias Ecto.Changeset
# Se você deseja personalizar um código de status específico
# para um determinado formato, você pode descomentar abaixo.
# def render("500.json", _assigns) do
# %{errors: %{detail: "Internal Server Error"}}
# end
# Por padrão, o Phoenix retorna a mensagem de status de
# o nome do modelo. Por exemplo, "404.json" torna-se
# "Not Found".
def template_not_found(template, _assigns) do
%{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}}
end
def render("400.json", %{result: %Changeset{} = changeset}) do
%{message: translate_errors(changeset)}
end
def render("400.json", %{result: message}) do
%{message: message}
end
def render("unauthorized.json", %{result: message}) do
%{message: message}
end
defp translate_errors(changeset) do
traverse_errors(changeset, fn {msg, opts} ->
Enum.reduce(opts, msg, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
# https://hexdocs.pm/ecto/3.5.8/Ecto.Changeset.html#summary
| 28.761905 | 83 | 0.68543 |
1cd00a3186a5b6852146c5eedf61b35d1f18f1a6 | 55 | ex | Elixir | lib/intro.ex | Tranzite/Elixir-Intro | 0290b8014e1f636fe2f309185c36643ad628f150 | [
"MIT"
] | null | null | null | lib/intro.ex | Tranzite/Elixir-Intro | 0290b8014e1f636fe2f309185c36643ad628f150 | [
"MIT"
] | null | null | null | lib/intro.ex | Tranzite/Elixir-Intro | 0290b8014e1f636fe2f309185c36643ad628f150 | [
"MIT"
] | 2 | 2021-08-24T19:13:19.000Z | 2021-08-25T13:51:14.000Z | defmodule Intro do
def hello do
:world
end
end
| 9.166667 | 18 | 0.672727 |
1cd052541236147f8e56e9a85846a852ff030706 | 1,370 | ex | Elixir | lib/hound/helpers/cookie.ex | NeoArcanjo/hound | 31f15d35aafcebc6263c28948f2bc84eefe8892d | [
"MIT"
] | null | null | null | lib/hound/helpers/cookie.ex | NeoArcanjo/hound | 31f15d35aafcebc6263c28948f2bc84eefe8892d | [
"MIT"
] | null | null | null | lib/hound/helpers/cookie.ex | NeoArcanjo/hound | 31f15d35aafcebc6263c28948f2bc84eefe8892d | [
"MIT"
] | null | null | null | defmodule Hound.Helpers.Cookie do
@moduledoc "Cookie-related functions"
import Hound.RequestUtils
@doc """
Gets cookies. Returns a list of ListDicts, each containing properties of the cookie.
cookies()
"""
@spec cookies() :: list
def cookies do
session_id = Hound.current_session_id()
make_req(:get, "session/#{session_id}/cookie")
end
@doc """
Sets cookie.
set_cookie(%{name: "cart_id", value: 123213})
set_cookie(%{name: "cart_id", value: "23fa0ev5a6er", secure: true})
Accepts a Map with the following keys:
* name (string) - REQUIRED
* value (string) - REQUIRED
* path (string)
* domain (string)
* secure (boolean)
* expiry (integer, specified in seconds since midnight, January 1, 1970 UTC)
"""
@spec set_cookie(map) :: :ok
def set_cookie(cookie) do
session_id = Hound.current_session_id()
make_req(:post, "session/#{session_id}/cookie", %{cookie: cookie})
end
@doc "Delete all cookies"
@spec delete_cookies() :: :ok
def delete_cookies do
session_id = Hound.current_session_id()
make_req(:delete, "session/#{session_id}/cookie")
end
@doc "Delete a cookie with the given name"
@spec delete_cookie(String.t()) :: :ok
def delete_cookie(name) do
session_id = Hound.current_session_id()
make_req(:delete, "session/#{session_id}/cookie/#{name}")
end
end
| 26.346154 | 86 | 0.677372 |
1cd0693d7e04e7d5b1ab7b8a9b805dd600e54c6b | 15,470 | ex | Elixir | lib/oli/delivery/paywall.ex | wyeworks/oli-torus | 146ee79a7e315e57bdf3c7b6fd4f7dbe73610647 | [
"MIT"
] | null | null | null | lib/oli/delivery/paywall.ex | wyeworks/oli-torus | 146ee79a7e315e57bdf3c7b6fd4f7dbe73610647 | [
"MIT"
] | 9 | 2021-11-02T16:52:09.000Z | 2022-03-25T15:14:01.000Z | lib/oli/delivery/paywall.ex | wyeworks/oli-torus | 146ee79a7e315e57bdf3c7b6fd4f7dbe73610647 | [
"MIT"
] | null | null | null | defmodule Oli.Delivery.Paywall do
import Ecto.Query, warn: false
require Logger
alias Oli.Repo
alias Oli.Accounts.User
alias Oli.Delivery.Paywall.Payment
alias Oli.Delivery.Paywall.Discount
alias Oli.Delivery.Sections.Section
alias Oli.Delivery.Sections
alias Oli.Delivery.Sections.Enrollment
alias Oli.Delivery.Sections.Blueprint
alias Oli.Institutions.Institution
alias Oli.Delivery.Paywall.AccessSummary
@maximum_batch_size 500
@doc """
Summarizes a users ability to access a course section, taking into account the paywall configuration
for that course section.
Returns an `%AccessSummary` struct which details the following:
1. Whether or not the user can access the course material
2. A reason for why the user can or cannot access
3. The number of days remaining (in whole numbers) if the user is accessing the material
during a grace period window
"""
def summarize_access(_, %Section{requires_payment: false}), do: AccessSummary.build_no_paywall()
def summarize_access(%User{id: id} = user, %Section{slug: slug, requires_payment: true} = section) do
if Sections.is_instructor?(user, slug) or Sections.is_admin?(user, slug) do
AccessSummary.instructor()
else
enrollment = Sections.get_enrollment(slug, id)
if is_nil(enrollment) and section.requires_enrollment do
AccessSummary.not_enrolled()
else
if section.pay_by_institution do
AccessSummary.pay_by_institution()
else
case has_paid?(enrollment) do
true -> AccessSummary.paid()
_ ->
case within_grace_period?(enrollment, section) do
true -> grace_period_seconds_remaining(enrollment, section) |> AccessSummary.within_grace()
_ -> AccessSummary.not_paid()
end
end
end
end
end
end
defp has_paid?(nil), do: false
defp has_paid?(%Enrollment{id: id}) do
query =
from(
p in Payment,
where: p.enrollment_id == ^id,
limit: 1,
select: p
)
case Repo.all(query) do
[] -> false
_ -> true
end
end
defp within_grace_period?(nil, _), do: false
defp within_grace_period?(_, %Section{has_grace_period: false}), do: false
defp within_grace_period?(%Enrollment{inserted_at: inserted_at}, %Section{
grace_period_days: days,
grace_period_strategy: strategy,
start_date: start_date
}) do
case strategy do
:relative_to_section ->
case start_date do
nil -> false
_ ->
case Date.compare(Date.utc_today(), Date.add(start_date, days)) do
:lt -> true
:eq -> true
_ -> false
end
end
:relative_to_student ->
Date.compare(Date.utc_today(), Date.add(inserted_at, days)) == :lt
end
end
defp grace_period_seconds_remaining(%Enrollment{inserted_at: inserted_at}, %Section{
grace_period_days: days,
grace_period_strategy: strategy,
start_date: start_date
}) do
case strategy do
:relative_to_section ->
case start_date do
nil -> 0
_ -> -DateTime.diff(DateTime.utc_now(), DateTime.add(start_date, days * 24 * 60 * 60))
end
:relative_to_student ->
-DateTime.diff(DateTime.utc_now(), DateTime.add(inserted_at, days * 24 * 60 * 60))
end
end
@doc """
Generates a batch of payment codes (aka deferred payments).
Returns {:ok, [%Payment{}]} on successful creation.
Can return one of the following specific error conditions:
{:error, {:invalid_batch_size}} - when the batch size is not valid
{:error, {:invalid_product}} - when the product slug does not reference a valid product
{:error, e} - on a database error encountered during creatinon of the payment
"""
def create_payment_codes(_, number_of_codes) when number_of_codes <= 0,
do: {:error, {:invalid_batch_size}}
def create_payment_codes(_, number_of_codes) when number_of_codes > @maximum_batch_size,
do: {:error, {:invalid_batch_size}}
def create_payment_codes(product_slug, number_of_codes) do
case Blueprint.get_active_blueprint(product_slug) do
nil ->
{:error, {:invalid_product}}
%Section{} = section ->
create_codes_for_section(section, number_of_codes)
end
end
defp create_codes_for_section(%Section{id: id, amount: amount}, number_of_codes) do
now = DateTime.utc_now()
Repo.transaction(fn _ ->
case unique_codes(number_of_codes) do
{:ok, codes} ->
result =
Enum.reverse(codes)
|> Enum.reduce_while([], fn code, all ->
case create_payment(%{
type: :deferred,
code: code,
generation_date: now,
application_date: nil,
amount: amount,
section_id: id,
enrollment_id: nil
}) do
{:ok, payment} -> {:cont, [payment | all]}
{:error, e} -> {:halt, {:error, e}}
end
end)
case result do
{:error, e} -> Repo.rollback(e)
all -> all
end
{:error, e} ->
Repo.rollback(e)
end
end)
end
defp unique_codes(count) do
# Generate a batch of unique integer codes, in one query
query =
Ecto.Adapters.SQL.query(
Oli.Repo,
"SELECT * FROM (SELECT trunc(random() * (10000000000 - 100000000) + 100000000) AS new_id
FROM generate_series(1, #{count})) AS x
WHERE x.new_id NOT IN (SELECT code FROM payments WHERE type = \'deferred\')",
[]
)
case query do
{:ok, %{num_rows: ^count, rows: rows}} ->
{:ok, List.flatten(rows) |> Enum.map(fn c -> trunc(c) end)}
{:error, e} ->
Logger.error("could not generate random codes: #{inspect(e)}")
{:error, "could not generate random codes"}
end
end
@doc """
Given a section (blueprint or enrollable), calculate the cost to use it for
a specific institution, taking into account any product-wide and product-specific discounts
this instituttion has.
Returns {:ok, %Money{}} or {:error, reason}
"""
def calculate_product_cost(
%Section{requires_payment: false},
_
),
do: {:ok, Money.new(:USD, 0)}
def calculate_product_cost(
%Section{requires_payment: true, amount: amount},
nil
),
do: {:ok, amount}
def calculate_product_cost(
%Section{requires_payment: true, id: id, amount: amount},
%Institution{id: institution_id}
) do
discounts =
from(d in Discount,
where:
(is_nil(d.section_id) and d.institution_id == ^institution_id) or
(d.section_id == ^id and d.institution_id == ^institution_id),
select: d
)
|> Repo.all()
# Remove any institution-wide discounts if an institution and section specific discount exists
discounts =
case Enum.any?(discounts, fn d -> !is_nil(d.section_id) end) do
true ->
Enum.filter(discounts, fn d -> !is_nil(d.section_id) end)
false ->
discounts
end
# Now calculate the product cost, taking into account a discount
case discounts do
[] ->
{:ok, amount}
[%Discount{type: :percentage, percentage: percentage}] ->
Money.mult(amount, percentage)
[%Discount{amount: amount}] ->
{:ok, amount}
end
end
@doc """
Redeems a payment code for a given course section.
Returns {:ok, %Payment{}} on success, otherwise:
{:error, {:already_paid}} if the student has already paid for this section
{:error, {:not_enrolled}} if the student is not enrolled in the section
{:error, {:unknown_section}} when the section slug does not pertain to a valid section
{:error, {:unknown_code}} when no deferred payment record is found for `code`
{:error, {:invalid_code}} when the code is invalid, whether it has already been redeemed or
if it doesn't pertain to this section or blueprint product
"""
def redeem_code(human_readable_code, %User{} = user, section_slug) do
case Payment.from_human_readable(human_readable_code) do
{:ok, code} ->
case Sections.get_section_by_slug(section_slug) do
nil ->
{:error, {:unknown_section}}
%Section{blueprint_id: blueprint_id, id: id} = section ->
case Repo.get_by(Payment, code: code) do
nil ->
{:error, {:unknown_code}}
%Payment{
type: :deferred,
application_date: nil,
section_id: ^id,
enrollment_id: nil
} = payment ->
apply_payment(payment, user, section)
%Payment{
type: :deferred,
application_date: nil,
section_id: ^blueprint_id,
enrollment_id: nil
} = payment ->
apply_payment(payment, user, section)
_ ->
{:error, {:invalid_code}}
end
end
_ ->
{:error, {:invalid_code}}
end
end
defp apply_payment(payment, user, section) do
case Sections.get_enrollment(section.slug, user.id) do
nil ->
{:error, {:not_enrolled}}
%{id: id} ->
case Repo.get_by(Payment, enrollment_id: id) do
nil ->
update_payment(payment, %{
enrollment_id: id,
pending_user_id: user.id,
pending_section_id: section.id,
application_date: DateTime.utc_now()
})
_ ->
{:error, {:already_paid}}
end
end
end
@doc """
List all payments for a product, joined with the enrollment (user and section) if
the payment has been applied.
"""
def list_payments(product_slug) do
case Oli.Delivery.Sections.get_section_by_slug(product_slug) do
nil ->
[]
%Section{id: id} ->
query =
from(
p in Payment,
left_join: e in Enrollment,
on: e.id == p.enrollment_id,
left_join: u in User,
on: e.user_id == u.id,
left_join: s2 in Section,
on: e.section_id == s2.id,
where: p.section_id == ^id,
select: %{payment: p, section: s2, user: u}
)
Repo.all(query)
end
end
@doc """
Retrieve a payment for a specific provider and id.
"""
def get_provider_payment(provider_type, provider_id) do
query =
from(
p in Payment,
where: p.provider_type == ^provider_type and p.provider_id == ^provider_id,
select: p
)
Repo.one(query)
end
@doc """
Creates a new pending payment, ensuring that no other payments exists for this user
and section.
"""
def create_pending_payment(%User{id: user_id}, %Section{id: section_id}, attrs) do
Oli.Repo.transaction(fn _ ->
query =
from(
p in Payment,
where: p.pending_section_id == ^section_id and p.pending_user_id == ^user_id
)
case Oli.Repo.one(query) do
# No payment record found for this user in this section
nil ->
case create_payment(
Map.merge(attrs, %{pending_user_id: user_id, pending_section_id: section_id})
) do
{:ok, r} -> r
{:error, e} -> Oli.Repo.rollback(e)
end
# A payment found, but this payment was never finalized. We will reuse this
# payment record.
%Payment{enrollment_id: nil, application_date: nil} = p ->
case update_payment(p, attrs) do
{:ok, r} -> r
{:error, e} -> Oli.Repo.rollback(e)
end
_ ->
Oli.Repo.rollback({:payment_already_exists})
end
end)
end
@doc """
Creates a payment.
## Examples
iex> create_payment(%{field: value})
{:ok, %Payment{}}
iex> create_payment(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_payment(attrs \\ %{}) do
%Payment{}
|> Payment.changeset(attrs)
|> Repo.insert()
end
def update_payment(%Payment{} = p, attrs) do
p
|> Payment.changeset(attrs)
|> Repo.update()
end
# ------------------------------------------
# Discounts
@doc """
Creates a discount.
## Examples
iex> create_discount(%{field: value})
{:ok, %Discount{}}
iex> create_discount(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_discount(attrs \\ %{}) do
%Discount{}
|> Discount.changeset(attrs)
|> Repo.insert()
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking discount changes.
## Examples
iex> change_discount(discount)
%Ecto.Changeset{data: %Discount{}}
"""
def change_discount(%Discount{} = discount, attrs \\ %{}),
do: Discount.changeset(discount, attrs)
@doc """
Deletes a discount.
## Examples
iex> delete_discount(discount)
{:ok, %Discount{}}
iex> delete_discount(discount)
{:error, changeset}
"""
def delete_discount(%Discount{} = discount),
do: Repo.delete(discount)
@doc """
Gets a discount by clauses. Will raise an error if
more than one matches the criteria.
## Examples
iex> get_discount_by!(%{section_id: 1})
%Discount{}
iex> get_discount_by!(%{section_id: 123})
nil
iex> get_discount_by!(%{section_id: 2, u})
Ecto.MultipleResultsError
"""
def get_discount_by!(clauses),
do: Repo.get_by(Discount, clauses)
@doc """
Gets a discount by institution id and section_id == nil
## Examples
iex> get_institution_wide_discount!(1)
%Discount{}
iex> get_institution_wide_discount!(123)
nil
iex> get_institution_wide_discount!(2)
Ecto.MultipleResultsError
"""
def get_institution_wide_discount!(institution_id) do
Repo.one(from(
d in Discount,
where: d.institution_id == ^institution_id and is_nil(d.section_id),
select: d
))
end
@doc """
Updates a discount.
## Examples
iex> update_discount(discount, %{name: new_value})
{:ok, %Discount{}}
iex> update_discount(discount, %{name: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_discount(%Discount{} = discount, attrs) do
discount
|> Discount.changeset(attrs)
|> Repo.update()
end
@doc """
Create or update (if exists) a discount.
## Examples
iex> create_or_update_discount(discount, %{name: new_value})
{:ok, %Discount{}}
iex> create_or_update_discount(discount, %{name: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_or_update_discount(%{section_id: nil} = attrs) do
case get_institution_wide_discount!(attrs.institution_id) do
nil -> %Discount{}
discount -> discount
end
|> Discount.changeset(attrs)
|> Repo.insert_or_update()
end
def create_or_update_discount(attrs) do
case get_discount_by!(%{
section_id: attrs.section_id,
institution_id: attrs.institution_id
}) do
nil -> %Discount{}
discount -> discount
end
|> Discount.changeset(attrs)
|> Repo.insert_or_update()
end
end
| 28.915888 | 107 | 0.600323 |
1cd071305f1d5e075b9142d9923d8e66a5af9be3 | 2,743 | ex | Elixir | clients/container/lib/google_api/container/v1/model/set_network_policy_request.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/model/set_network_policy_request.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/model/set_network_policy_request.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Container.V1.Model.SetNetworkPolicyRequest do
@moduledoc """
SetNetworkPolicyRequest enables/disables network policy for a cluster.
## Attributes
* `clusterId` (*type:* `String.t`, *default:* `nil`) - Deprecated. The name of the cluster. This field has been deprecated and replaced by the name field.
* `name` (*type:* `String.t`, *default:* `nil`) - The name (project, location, cluster name) of the cluster to set networking policy. Specified in the format `projects/*/locations/*/clusters/*`.
* `networkPolicy` (*type:* `GoogleApi.Container.V1.Model.NetworkPolicy.t`, *default:* `nil`) - Required. Configuration options for the NetworkPolicy feature.
* `projectId` (*type:* `String.t`, *default:* `nil`) - Deprecated. The Google Developers Console [project ID or project number](https://developers.google.com/console/help/new/#projectnumber). This field has been deprecated and replaced by the name field.
* `zone` (*type:* `String.t`, *default:* `nil`) - Deprecated. The name of the Google Compute Engine [zone](https://cloud.google.com/compute/docs/zones#available) in which the cluster resides. This field has been deprecated and replaced by the name field.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:clusterId => String.t() | nil,
:name => String.t() | nil,
:networkPolicy => GoogleApi.Container.V1.Model.NetworkPolicy.t() | nil,
:projectId => String.t() | nil,
:zone => String.t() | nil
}
field(:clusterId)
field(:name)
field(:networkPolicy, as: GoogleApi.Container.V1.Model.NetworkPolicy)
field(:projectId)
field(:zone)
end
defimpl Poison.Decoder, for: GoogleApi.Container.V1.Model.SetNetworkPolicyRequest do
def decode(value, options) do
GoogleApi.Container.V1.Model.SetNetworkPolicyRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Container.V1.Model.SetNetworkPolicyRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.491525 | 258 | 0.723296 |
1cd0774d246da8d1baf060d713c07b658d79111c | 518 | ex | Elixir | lib/models/ecto_producer_message.ex | botsunit/wok_async | 466f16057ec6af1cb12990b2edd33a34aab7f8ec | [
"BSD-3-Clause"
] | null | null | null | lib/models/ecto_producer_message.ex | botsunit/wok_async | 466f16057ec6af1cb12990b2edd33a34aab7f8ec | [
"BSD-3-Clause"
] | null | null | null | lib/models/ecto_producer_message.ex | botsunit/wok_async | 466f16057ec6af1cb12990b2edd33a34aab7f8ec | [
"BSD-3-Clause"
] | null | null | null | defmodule WokAsyncMessageHandler.Models.EctoProducerMessage do
use Ecto.Schema
import Ecto.Changeset
@type t :: %__MODULE__{}
@accepted_params ~w(topic partition blob)a
@required_params ~w(topic partition blob)a
schema "ecto_producer_messages" do
field :topic, :string
field :partition, :integer
field :blob, :string
timestamps()
end
def changeset(record, params \\ :invalid) do
record
|> cast(params, @accepted_params)
|> validate_required(@required_params)
end
end
| 21.583333 | 62 | 0.718147 |
1cd0993282870ba96d7fcc23bcdd9d5442a7136e | 2,226 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/target_instances_scoped_list_warning.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/target_instances_scoped_list_warning.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/compute/lib/google_api/compute/v1/model/target_instances_scoped_list_warning.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.TargetInstancesScopedListWarning do
@moduledoc """
Informational warning which replaces the list of addresses when the list is empty.
## Attributes
* `code` (*type:* `String.t`, *default:* `nil`) - [Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.
* `data` (*type:* `list(GoogleApi.Compute.V1.Model.TargetInstancesScopedListWarningData.t)`, *default:* `nil`) - [Output Only] Metadata about this warning in key: value format. For example:
"data": [ { "key": "scope", "value": "zones/us-east1-d" }
* `message` (*type:* `String.t`, *default:* `nil`) - [Output Only] A human-readable description of the warning code.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:code => String.t(),
:data => list(GoogleApi.Compute.V1.Model.TargetInstancesScopedListWarningData.t()),
:message => String.t()
}
field(:code)
field(:data, as: GoogleApi.Compute.V1.Model.TargetInstancesScopedListWarningData, type: :list)
field(:message)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.TargetInstancesScopedListWarning do
def decode(value, options) do
GoogleApi.Compute.V1.Model.TargetInstancesScopedListWarning.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.TargetInstancesScopedListWarning do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.222222 | 194 | 0.731806 |
1cd0a36cc5fc7a4c166cc3b7d557d2c1b20ab9a4 | 1,031 | ex | Elixir | getting-started/lib/getting_started_elixir/application.ex | renovate-bot/elixir-samples | 91da795ecdfac83eb6fcac63bc532da98c69d520 | [
"Apache-2.0"
] | 274 | 2017-08-25T06:39:51.000Z | 2022-03-15T21:03:27.000Z | getting-started/lib/getting_started_elixir/application.ex | renovate-bot/elixir-samples | 91da795ecdfac83eb6fcac63bc532da98c69d520 | [
"Apache-2.0"
] | 15 | 2017-10-03T17:05:48.000Z | 2021-11-23T00:33:23.000Z | getting-started/lib/getting_started_elixir/application.ex | renovate-bot/elixir-samples | 91da795ecdfac83eb6fcac63bc532da98c69d520 | [
"Apache-2.0"
] | 42 | 2017-08-28T20:08:47.000Z | 2022-01-18T07:51:02.000Z | defmodule GettingStartedElixir.Application do
use Application
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the endpoint when the application starts
supervisor(GettingStartedElixirWeb.Endpoint, []),
# Start your own worker by calling: GettingStartedElixir.Worker.start_link(arg1, arg2, arg3)
# worker(GettingStartedElixir.Worker, [arg1, arg2, arg3]),
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: GettingStartedElixir.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
GettingStartedElixirWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 34.366667 | 98 | 0.741998 |
1cd0ccf1200808a9dfabc6db4ace56b0b09b6569 | 3,007 | ex | Elixir | clients/drive/lib/google_api/drive/v3/api/about.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/drive/lib/google_api/drive/v3/api/about.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/drive/lib/google_api/drive/v3/api/about.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Drive.V3.Api.About do
@moduledoc """
API calls for all endpoints tagged `About`.
"""
alias GoogleApi.Drive.V3.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Gets information about the user, the user's Drive, and system capabilities.
## Parameters
* `connection` (*type:* `GoogleApi.Drive.V3.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Drive.V3.Model.About{}}` on success
* `{:error, info}` on failure
"""
@spec drive_about_get(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.Drive.V3.Model.About.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def drive_about_get(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/drive/v3/about", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Drive.V3.Model.About{}])
end
end
| 39.565789 | 187 | 0.658796 |
1cd0d812b73ce6bd87cfd24ff5a57b8b63c32cdd | 69 | exs | Elixir | test/rtl_web/views/page_view_test.exs | topherhunt/reassembling-the-line | c6823b3394ee98d9b0149fa3d09448928ac5c0db | [
"MIT"
] | 1 | 2019-04-27T15:39:20.000Z | 2019-04-27T15:39:20.000Z | test/rtl_web/views/page_view_test.exs | topherhunt/reassembling-the-line | c6823b3394ee98d9b0149fa3d09448928ac5c0db | [
"MIT"
] | 11 | 2020-07-16T11:40:53.000Z | 2021-08-16T07:03:33.000Z | test/rtl_web/views/page_view_test.exs | topherhunt/reassembling-the-line | c6823b3394ee98d9b0149fa3d09448928ac5c0db | [
"MIT"
] | null | null | null | defmodule RTL.PageViewTest do
use RTLWeb.ConnCase, async: true
end
| 17.25 | 34 | 0.797101 |
1cd0e4ef8d33521d18d05bbe5445742e0b11580e | 336 | ex | Elixir | lib/exjson/deprecation.ex | amco/exjson | 9d428acfa2b55d070576e71cef55e2e0bad8c5ab | [
"Apache-2.0"
] | null | null | null | lib/exjson/deprecation.ex | amco/exjson | 9d428acfa2b55d070576e71cef55e2e0bad8c5ab | [
"Apache-2.0"
] | null | null | null | lib/exjson/deprecation.ex | amco/exjson | 9d428acfa2b55d070576e71cef55e2e0bad8c5ab | [
"Apache-2.0"
] | null | null | null | defmodule ExJSON.Deprecation do
defmacro handle(fun, arg) do
quote do
IO.puts "** WARNING! ** The `JSON` module's name was deprecated due to conflict with others apps that use the same namespace, and will be removed in future versions, please use `ExJSON` instead."
ExJSON.unquote(fun)(unquote(arg))
end
end
end
| 37.333333 | 201 | 0.714286 |
1cd0ea822f60c1240e8d05c2a4935b65cec050d6 | 441 | ex | Elixir | lib/bitpal/exchange_rate/mock.ex | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 5 | 2021-05-04T21:28:00.000Z | 2021-12-01T11:19:48.000Z | lib/bitpal/exchange_rate/mock.ex | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 71 | 2021-04-21T05:48:49.000Z | 2022-03-23T06:30:37.000Z | lib/bitpal/exchange_rate/mock.ex | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 1 | 2021-04-25T10:35:41.000Z | 2021-04-25T10:35:41.000Z | defmodule BitPal.ExchangeRateMock do
@behaviour BitPal.ExchangeRate.Backend
alias BitPal.ExchangeRate
alias BitPal.ExchangeRateSupervisor.Result
@impl true
def name, do: "mock"
@impl true
def supported, do: %{BCH: [:USD, :EUR]}
@impl true
def compute(pair, _opts) do
{:ok,
%Result{
score: 10,
backend: __MODULE__,
rate: ExchangeRate.new!(Decimal.from_float(1.337), pair)
}}
end
end
| 19.173913 | 63 | 0.662132 |
1cd0f8e35405f1e4c373166cc825e338a1d1d480 | 561 | exs | Elixir | test/absinthe/client_test.exs | absinthe-graphql/absinthe_client | 1e930db4b50dbe1da33ddc22212e21f2395e7a08 | [
"MIT"
] | 9 | 2019-06-07T16:07:38.000Z | 2020-12-06T01:24:25.000Z | test/absinthe/client_test.exs | absinthe-graphql/absinthe_client | 1e930db4b50dbe1da33ddc22212e21f2395e7a08 | [
"MIT"
] | null | null | null | test/absinthe/client_test.exs | absinthe-graphql/absinthe_client | 1e930db4b50dbe1da33ddc22212e21f2395e7a08 | [
"MIT"
] | null | null | null | defmodule Absinthe.ClientTest do
use ExUnit.Case
defmodule TestClient do
use Absinthe.Client
end
describe "new/0" do
test "stores the implementing module" do
assert {:ok, %Absinthe.Client{module: TestClient}} = TestClient.new()
end
test "sets a default pipeline" do
assert {:ok,
%Absinthe.Client{
pipeline: [
{Absinthe.Client.Phase.BuildRequest, []},
{Absinthe.Client.Phase.HTTP, []}
]
}} = TestClient.new()
end
end
end
| 23.375 | 75 | 0.56328 |
1cd0f9d028b7ced3ca3e254b0989c843b7159cb3 | 1,702 | exs | Elixir | test/countries_test.exs | sendle/countries | 47047512dadb6f9e57d95e1c5af4a8e8cdaaf3eb | [
"MIT"
] | null | null | null | test/countries_test.exs | sendle/countries | 47047512dadb6f9e57d95e1c5af4a8e8cdaaf3eb | [
"MIT"
] | null | null | null | test/countries_test.exs | sendle/countries | 47047512dadb6f9e57d95e1c5af4a8e8cdaaf3eb | [
"MIT"
] | null | null | null | defmodule CountriesTest do
use ExUnit.Case, async: true
test "filter countries by alpha2" do
country = Countries.filter_by(:alpha2, "DE")
assert Enum.count(country) == 1
end
test "filter countries by name" do
countries = Countries.filter_by(:name, "United Kingdom of Great Britain and Northern Ireland")
assert Enum.count(countries) == 1
end
test "filter countries by alternative names" do
countries = Countries.filter_by(:unofficial_names, "Reino Unido")
assert Enum.count(countries) == 1
countries = Countries.filter_by(:unofficial_names, "The United Kingdom")
assert Enum.count(countries) == 1
end
test "filter many countries by region" do
countries = Countries.filter_by(:region, "Europe")
assert Enum.count(countries) == 51
end
test "return empty list when there are no results" do
countries = Countries.filter_by(:region, "Azeroth")
assert countries == []
end
test "get all countries" do
countries = Countries.all
assert Enum.count(countries) == 251
end
test "get country subdivisions" do
country = List.first(Countries.filter_by(:alpha2, "BR"))
assert Enum.count(Countries.Subdivisions.all(country)) == 27
country = List.first(Countries.filter_by(:alpha2, "AD"))
assert Enum.count(Countries.Subdivisions.all(country)) == 7
country = List.first(Countries.filter_by(:alpha2, "AI"))
assert Enum.count(Countries.Subdivisions.all(country)) == 0
end
test "checks if country exists" do
country_exists = Countries.exists?(:name, "Poland")
assert country_exists == true
country_exists = Countries.exists?(:name, "Polande")
assert country_exists == false
end
end
| 30.392857 | 98 | 0.706228 |
1cd10732f87d49105b57acac82a79d1dbdb895ec | 3,025 | ex | Elixir | lib/erlef_web/router.ex | starbelly/website | 385c30eabbb2c4f1026147342a0d69fdadd20f4c | [
"Apache-2.0"
] | null | null | null | lib/erlef_web/router.ex | starbelly/website | 385c30eabbb2c4f1026147342a0d69fdadd20f4c | [
"Apache-2.0"
] | null | null | null | lib/erlef_web/router.ex | starbelly/website | 385c30eabbb2c4f1026147342a0d69fdadd20f4c | [
"Apache-2.0"
] | null | null | null | defmodule ErlefWeb.Router do
use ErlefWeb, :router
import Phoenix.LiveDashboard.Router
@trusted_sources ~w(
use.fontawesome.com platform.twitter.com syndication.twitter.com
syndication.twitter.com/settings cdn.syndication.twimg.com
licensebuttons.net i.creativecommons.org
pbs.twimg.com use.typekit.net p.typekit.net
event-org-images.ewr1.vultrobjects.com erlef.matomo.cloud cdn.rawgit.com
)
@default_source Enum.join(@trusted_sources, " ")
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_live_flash
plug :protect_from_forgery
plug ErlefWeb.Plug.Attack
plug ErlefWeb.Plug.Session
plug :put_secure_browser_headers, %{
"content-security-policy" =>
" default-src 'self' 'unsafe-eval' 'unsafe-inline' data: #{@default_source}; connect-src 'self' https://www.erlef.org https://erlef.org wss://erlef.org wss://www.erlef.org ws://erlef.org ws://www.erlef.org"
}
plug ErlefWeb.Plug.Events
end
pipeline :admin_required do
plug :put_layout, {ErlefWeb.Admin.LayoutView, "app.html"}
plug ErlefWeb.Plug.RequiresAdmin
end
pipeline :session_required do
plug ErlefWeb.Plug.Authz
end
if Erlef.is_env?(:dev) do
scope "/dev" do
pipe_through [:browser]
forward "/mailbox", Plug.Swoosh.MailboxPreview, base_path: "/dev/mailbox"
end
end
scope "/", ErlefWeb do
pipe_through :browser
get "/login/init", SessionController, :show
get "/login", SessionController, :create
post "/logout", SessionController, :delete
get "/", PageController, :index
get "/academic-papers", AcademicPaperController, :index
get "/bylaws", PageController, :bylaws
get "/board_members", PageController, :board_members
get "/community", PageController, :community
get "/contact", PageController, :contact
get "/faq", PageController, :faq
get "/sponsors", PageController, :sponsors
get "/become-a-sponsor", PageController, :sponsor_info
get "/wg-proposal-template", PageController, :wg_proposal_template
get "/news", BlogController, :index, as: :news
get "/news/:topic", BlogController, :index, as: :news
get "/news/:topic/:id", BlogController, :show, as: :news
get "/events/:slug", EventController, :show
get "/events", EventController, :index
resources "/wg", WorkingGroupController, only: [:index, :show]
resources "/stipends", StipendController, only: [:index, :create]
resources "/slack-invite/:team", SlackInviteController, only: [:create, :index]
scope "/admin", Admin, as: :admin do
pipe_through [:admin_required]
get "/", DashboardController, :index
resources "/events", EventController, only: [:index, :show]
put "/events/:id", EventController, :approve
live_dashboard "/dashboard"
end
end
scope "/", ErlefWeb do
pipe_through [:browser, :session_required]
resources "/event_submissions", EventSubmissionController, only: [:new, :show, :create]
end
end
| 33.611111 | 214 | 0.695537 |
1cd15753448e920d4684c784b00ce3eca592911b | 16,579 | exs | Elixir | test/paper_trail/base_tests.exs | marioimr/paper_trail | 7126d00e34a526f53496f1dc5aa772e364ae65fd | [
"MIT"
] | 1 | 2021-04-20T09:52:46.000Z | 2021-04-20T09:52:46.000Z | test/paper_trail/base_tests.exs | marioimr/paper_trail | 7126d00e34a526f53496f1dc5aa772e364ae65fd | [
"MIT"
] | 1 | 2020-06-12T13:53:38.000Z | 2020-06-12T13:53:38.000Z | test/paper_trail/base_tests.exs | marioimr/paper_trail | 7126d00e34a526f53496f1dc5aa772e364ae65fd | [
"MIT"
] | 1 | 2021-03-25T14:24:45.000Z | 2021-03-25T14:24:45.000Z | defmodule PaperTrailTest do
use ExUnit.Case
import Ecto.Query
alias PaperTrail.Version
alias SimpleCompany, as: Company
alias SimplePerson, as: Person
@repo PaperTrail.RepoClient.repo()
@create_company_params %{name: "Acme LLC", is_active: true, city: "Greenwich"}
@update_company_params %{
city: "Hong Kong",
website: "http://www.acme.com",
facebook: "acme.llc"
}
doctest PaperTrail
setup_all do
Application.put_env(:paper_trail, :strict_mode, false)
Application.put_env(:paper_trail, :repo, PaperTrail.Repo)
Code.eval_file("lib/paper_trail.ex")
Code.eval_file("lib/version.ex")
:ok
end
setup do
@repo.delete_all(Person)
@repo.delete_all(Company)
@repo.delete_all(Version)
on_exit(fn ->
@repo.delete_all(Person)
@repo.delete_all(Company)
@repo.delete_all(Version)
end)
:ok
end
test "creating a company creates a company version with correct attributes" do
user = create_user()
{:ok, result} = create_company_with_version(@create_company_params, originator: user)
company_count = Company.count()
version_count = Version.count()
company = result[:model] |> serialize
version = result[:version] |> serialize
assert Map.keys(result) == [:model, :version]
assert company_count == 1
assert version_count == 1
assert Map.drop(company, [:id, :inserted_at, :updated_at]) == %{
name: "Acme LLC",
is_active: true,
city: "Greenwich",
website: nil,
address: nil,
facebook: nil,
twitter: nil,
founded_in: nil
}
assert Map.drop(version, [:id, :inserted_at]) == %{
event: "insert",
item_type: "SimpleCompany",
item_id: company.id,
item_changes: company,
originator_id: user.id,
origin: nil,
meta: nil
}
assert company == first(Company, :id) |> @repo.one |> serialize
end
test "PaperTrail.insert/2 with an error returns and error tuple like Repo.insert/2" do
result = create_company_with_version(%{name: nil, is_active: true, city: "Greenwich"})
ecto_result =
Company.changeset(%Company{}, %{name: nil, is_active: true, city: "Greenwich"})
|> @repo.insert
assert result == ecto_result
end
test "updating a company with originator creates a correct company version" do
user = create_user()
{:ok, insert_result} = create_company_with_version()
{:ok, result} =
update_company_with_version(
insert_result[:model],
@update_company_params,
user: user
)
company_count = Company.count()
version_count = Version.count()
company = result[:model] |> serialize
version = result[:version] |> serialize
assert Map.keys(result) == [:model, :version]
assert company_count == 1
assert version_count == 2
assert Map.drop(company, [:id, :inserted_at, :updated_at]) == %{
name: "Acme LLC",
is_active: true,
city: "Hong Kong",
website: "http://www.acme.com",
address: nil,
facebook: "acme.llc",
twitter: nil,
founded_in: nil
}
assert Map.drop(version, [:id, :inserted_at]) == %{
event: "update",
item_type: "SimpleCompany",
item_id: company.id,
item_changes: %{
city: "Hong Kong",
website: "http://www.acme.com",
facebook: "acme.llc"
},
originator_id: user.id,
origin: nil,
meta: nil
}
assert company == first(Company, :id) |> @repo.one |> serialize
end
test "updating a company with originator[user] creates a correct company version" do
user = create_user()
{:ok, insert_result} = create_company_with_version()
{:ok, result} =
update_company_with_version(
insert_result[:model],
@update_company_params,
user: user
)
company_count = Company.count()
version_count = Version.count()
company = result[:model] |> serialize
version = result[:version] |> serialize
assert Map.keys(result) == [:model, :version]
assert company_count == 1
assert version_count == 2
assert Map.drop(company, [:id, :inserted_at, :updated_at]) == %{
name: "Acme LLC",
is_active: true,
city: "Hong Kong",
website: "http://www.acme.com",
address: nil,
facebook: "acme.llc",
twitter: nil,
founded_in: nil
}
assert Map.drop(version, [:id, :inserted_at]) == %{
event: "update",
item_type: "SimpleCompany",
item_id: company.id,
item_changes: %{
city: "Hong Kong",
website: "http://www.acme.com",
facebook: "acme.llc"
},
originator_id: user.id,
origin: nil,
meta: nil
}
assert company == first(Company, :id) |> @repo.one |> serialize
end
test "PaperTrail.update/2 with an error returns and error tuple like Repo.update/2" do
{:ok, insert_result} = create_company_with_version()
company = insert_result[:model]
result =
update_company_with_version(company, %{
name: nil,
city: "Hong Kong",
website: "http://www.acme.com",
facebook: "acme.llc"
})
ecto_result =
Company.changeset(company, %{
name: nil,
city: "Hong Kong",
website: "http://www.acme.com",
facebook: "acme.llc"
})
|> @repo.update
assert result == ecto_result
end
test "deleting a company creates a company version with correct attributes" do
user = create_user()
{:ok, insert_result} = create_company_with_version()
{:ok, update_result} = update_company_with_version(insert_result[:model])
company_before_deletion = first(Company, :id) |> @repo.one |> serialize
{:ok, result} = PaperTrail.delete(update_result[:model], originator: user)
company_count = Company.count()
version_count = Version.count()
company = result[:model] |> serialize
version = result[:version] |> serialize
assert Map.keys(result) == [:model, :version]
assert company_count == 0
assert version_count == 3
assert Map.drop(company, [:id, :inserted_at, :updated_at]) == %{
name: "Acme LLC",
is_active: true,
city: "Hong Kong",
website: "http://www.acme.com",
address: nil,
facebook: "acme.llc",
twitter: nil,
founded_in: nil
}
assert Map.drop(version, [:id, :inserted_at]) == %{
event: "delete",
item_type: "SimpleCompany",
item_id: company.id,
item_changes: %{
id: company.id,
inserted_at: company.inserted_at,
updated_at: company.updated_at,
name: "Acme LLC",
is_active: true,
website: "http://www.acme.com",
city: "Hong Kong",
address: nil,
facebook: "acme.llc",
twitter: nil,
founded_in: nil
},
originator_id: user.id,
origin: nil,
meta: nil
}
assert company == company_before_deletion
end
test "delete works with a changeset" do
user = create_user()
{:ok, insert_result} = create_company_with_version()
{:ok, update_result} = update_company_with_version(insert_result[:model])
company_before_deletion = first(Company, :id) |> @repo.one
changeset = Company.changeset(company_before_deletion, %{})
{:ok, result} = PaperTrail.delete(changeset, originator: user)
company_count = Company.count()
version_count = Version.count()
company = result[:model] |> serialize
version = result[:version] |> serialize
assert Map.keys(result) == [:model, :version]
assert company_count == 0
assert version_count == 3
assert Map.drop(company, [:id, :inserted_at, :updated_at]) == %{
name: "Acme LLC",
is_active: true,
city: "Hong Kong",
website: "http://www.acme.com",
address: nil,
facebook: "acme.llc",
twitter: nil,
founded_in: nil
}
assert Map.drop(version, [:id, :inserted_at]) == %{
event: "delete",
item_type: "SimpleCompany",
item_id: company.id,
item_changes: %{
id: company.id,
inserted_at: company.inserted_at,
updated_at: company.updated_at,
name: "Acme LLC",
is_active: true,
website: "http://www.acme.com",
city: "Hong Kong",
address: nil,
facebook: "acme.llc",
twitter: nil,
founded_in: nil
},
originator_id: user.id,
origin: nil,
meta: nil
}
assert company == serialize(company_before_deletion)
end
test "PaperTrail.delete/2 with an error returns and error tuple like Repo.delete/2" do
{:ok, insert_company_result} = create_company_with_version()
Person.changeset(%Person{}, %{
first_name: "Izel",
last_name: "Nakri",
gender: true,
company_id: insert_company_result[:model].id
})
|> PaperTrail.insert()
{:error, ecto_result} = insert_company_result[:model] |> Company.changeset() |> @repo.delete
{:error, result} = insert_company_result[:model] |> Company.changeset() |> PaperTrail.delete()
assert Map.drop(result, [:repo_opts]) == Map.drop(ecto_result, [:repo_opts])
end
test "creating a person with meta tag creates a person version with correct attributes" do
create_company_with_version()
{:ok, new_company_result} =
Company.changeset(%Company{}, %{
name: "Another Company Corp.",
is_active: true,
address: "Sesame street 100/3, 101010"
})
|> PaperTrail.insert()
{:ok, result} =
Person.changeset(%Person{}, %{
first_name: "Izel",
last_name: "Nakri",
gender: true,
company_id: new_company_result[:model].id
})
|> PaperTrail.insert(origin: "admin", meta: %{linkname: "izelnakri"})
person_count = Person.count()
version_count = Version.count()
person = result[:model] |> serialize
version = result[:version] |> serialize
assert Map.keys(result) == [:model, :version]
assert person_count == 1
assert version_count == 3
assert Map.drop(person, [:id, :inserted_at, :updated_at]) == %{
first_name: "Izel",
last_name: "Nakri",
gender: true,
visit_count: nil,
birthdate: nil,
company_id: new_company_result[:model].id
}
assert Map.drop(version, [:id, :inserted_at]) == %{
event: "insert",
item_type: "SimplePerson",
item_id: person.id,
item_changes: person,
originator_id: nil,
origin: "admin",
meta: %{linkname: "izelnakri"}
}
assert person == first(Person, :id) |> @repo.one |> serialize
end
test "updating a person creates a person version with correct attributes" do
{:ok, initial_company_insertion} =
create_company_with_version(%{
name: "Acme LLC",
website: "http://www.acme.com"
})
{:ok, target_company_insertion} =
create_company_with_version(%{
name: "Another Company Corp.",
is_active: true,
address: "Sesame street 100/3, 101010"
})
{:ok, insert_person_result} =
Person.changeset(%Person{}, %{
first_name: "Izel",
last_name: "Nakri",
gender: true,
company_id: target_company_insertion[:model].id
})
|> PaperTrail.insert(origin: "admin")
{:ok, result} =
Person.changeset(insert_person_result[:model], %{
first_name: "Isaac",
visit_count: 10,
birthdate: ~D[1992-04-01],
company_id: initial_company_insertion[:model].id
})
|> PaperTrail.update(origin: "scraper", meta: %{linkname: "izelnakri"})
person_count = Person.count()
version_count = Version.count()
person = result[:model] |> serialize
version = result[:version] |> serialize
assert Map.keys(result) == [:model, :version]
assert person_count == 1
assert version_count == 4
assert Map.drop(person, [:id, :inserted_at, :updated_at]) == %{
company_id: initial_company_insertion[:model].id,
first_name: "Isaac",
visit_count: 10,
birthdate: ~D[1992-04-01],
last_name: "Nakri",
gender: true
}
assert Map.drop(version, [:id, :inserted_at]) == %{
event: "update",
item_type: "SimplePerson",
item_id: person.id,
item_changes: %{
first_name: "Isaac",
visit_count: 10,
birthdate: ~D[1992-04-01],
company_id: initial_company_insertion[:model].id
},
originator_id: nil,
origin: "scraper",
meta: %{linkname: "izelnakri"}
}
assert person == first(Person, :id) |> @repo.one |> serialize
end
test "deleting a person creates a person version with correct attributes" do
create_company_with_version(%{name: "Acme LLC", website: "http://www.acme.com"})
{:ok, target_company_insertion} =
create_company_with_version(%{
name: "Another Company Corp.",
is_active: true,
address: "Sesame street 100/3, 101010"
})
# add link name later on
{:ok, insert_person_result} =
Person.changeset(%Person{}, %{
first_name: "Izel",
last_name: "Nakri",
gender: true,
company_id: target_company_insertion[:model].id
})
|> PaperTrail.insert(origin: "admin")
{:ok, update_result} =
Person.changeset(insert_person_result[:model], %{
first_name: "Isaac",
visit_count: 10,
birthdate: ~D[1992-04-01],
company_id: target_company_insertion[:model].id
})
|> PaperTrail.update(origin: "scraper", meta: %{linkname: "izelnakri"})
person_before_deletion = first(Person, :id) |> @repo.one |> serialize
{:ok, result} =
PaperTrail.delete(
update_result[:model],
origin: "admin",
meta: %{linkname: "izelnakri"}
)
person_count = Person.count()
version_count = Version.count()
assert Map.keys(result) == [:model, :version]
old_person = update_result[:model] |> serialize
version = result[:version] |> serialize
assert person_count == 0
assert version_count == 5
assert Map.drop(version, [:id, :inserted_at]) == %{
event: "delete",
item_type: "SimplePerson",
item_id: old_person.id,
item_changes: %{
id: old_person.id,
inserted_at: old_person.inserted_at,
updated_at: old_person.updated_at,
first_name: "Isaac",
last_name: "Nakri",
gender: true,
visit_count: 10,
birthdate: ~D[1992-04-01],
company_id: target_company_insertion[:model].id
},
originator_id: nil,
origin: "admin",
meta: %{linkname: "izelnakri"}
}
assert old_person == person_before_deletion
end
defp create_user do
User.changeset(%User{}, %{token: "fake-token", username: "izelnakri"}) |> @repo.insert!
end
defp create_company_with_version(params \\ @create_company_params, options \\ nil) do
Company.changeset(%Company{}, params) |> PaperTrail.insert(options)
end
defp update_company_with_version(company, params \\ @update_company_params, options \\ nil) do
Company.changeset(company, params) |> PaperTrail.update(options)
end
defp serialize(model) do
relationships = model.__struct__.__schema__(:associations)
Map.drop(model, [:__struct__, :__meta__] ++ relationships)
end
end
| 30.364469 | 98 | 0.573678 |
1cd182a8e5e1b3f72c9ffc2ff80b3cdb5c9dccb4 | 11,409 | ex | Elixir | lib/chat_api/newsletters/pg.ex | aboutphilippe/papercups | 30364cc562f41cd9d5a7ca5357b16ab8484bd5c9 | [
"MIT"
] | 1 | 2021-06-17T03:17:24.000Z | 2021-06-17T03:17:24.000Z | lib/chat_api/newsletters/pg.ex | aboutphilippe/papercups | 30364cc562f41cd9d5a7ca5357b16ab8484bd5c9 | [
"MIT"
] | null | null | null | lib/chat_api/newsletters/pg.ex | aboutphilippe/papercups | 30364cc562f41cd9d5a7ca5357b16ab8484bd5c9 | [
"MIT"
] | null | null | null | defmodule ChatApi.Newsletters.Pg do
@moduledoc """
A module to handle parsing and sending PG essays
"""
require Logger
alias ChatApi.Google
@months [
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
]
@spec get_essay_urls() :: [binary()]
def get_essay_urls() do
{:ok, %{body: html}} = Tesla.get("http://www.paulgraham.com/articles.html")
{:ok, document} = Floki.parse_document(html)
document
|> Floki.find("table table a")
|> Floki.attribute("href")
|> Stream.uniq()
|> Enum.map(&"http://www.paulgraham.com/#{&1}")
end
@spec extract_essay_data(binary()) :: {:error, binary()} | {:ok, {binary(), binary(), binary()}}
def extract_essay_data(url \\ "http://www.paulgraham.com/useful.html") do
Logger.debug("Fetching url: #{inspect(url)}")
{:ok, %{body: html}} = Tesla.get(url)
{:ok, document} = Floki.parse_document(html)
title =
document
|> Floki.find("table table img")
|> Floki.attribute("alt")
|> List.first()
document
|> Floki.find("table table font")
|> Stream.map(fn {_tag, _attrs, nodes} -> nodes end)
|> Enum.find(fn nodes ->
nodes |> Floki.text() |> String.contains?(@months)
end)
|> case do
nil ->
{:error, "Unrecognized essay format"}
content ->
text = Floki.text(content)
html = """
<div style=\"max-width:480px\">
#{Floki.raw_html(content)}
<br />
<br />
<p>(Read online at #{url})</p>
</div>
"""
{:ok, {title, text, html}}
end
end
def notify(token, url, recipients) when is_list(recipients) do
with %{"emailAddress" => sender} <- Google.Gmail.get_profile(token) do
case extract_essay_data(url) do
{:ok, {title, text, html}} ->
Logger.debug("Sending PG essay #{inspect(url)} to #{inspect(recipients)}")
Google.Gmail.send_message(token, %{
to: sender,
from: {"PG Essay Newsletter", sender},
# NB: just sending to all as bcc for now
bcc: recipients,
subject: "PG Essay: #{title}",
text: text,
html: html
})
{:error, reason} ->
Logger.error("Could not send PG essay newsletter email: #{inspect(reason)}")
nil
end
end
end
def notify(token, url, recipient), do: notify(token, url, [recipient])
def run!() do
with {:ok, %{account_id: account_id, sheet_id: sheet_id, start_date: start_date}} <-
get_config(),
%{refresh_token: sheets_token} <-
Google.get_authorization_by_account(account_id, %{client: "sheets"}),
%{refresh_token: gmail_token} <-
Google.get_authorization_by_account(account_id, %{client: "gmail", type: "support"}) do
url = pick_essay_url(Date.utc_today(), start_date)
recipients =
sheets_token
|> Google.Sheets.get_spreadsheet_by_id!(sheet_id)
|> Google.Sheets.format_as_json()
|> Enum.map(fn record ->
case record do
%{"email" => email, "name" => name} when is_nil(name) or name == "" -> email
%{"email" => email, "name" => name} -> {name, email}
%{"email" => email} -> email
_ -> nil
end
end)
|> Enum.reject(&is_nil/1)
notify(gmail_token, url, recipients)
end
end
@spec pick_essay_url(Date.t(), Date.t()) :: binary()
def pick_essay_url(current_date, start_date) do
index = current_date |> Date.diff(start_date) |> max(0)
top_ranked_urls() |> Enum.at(index)
end
@default_start_date ~D[2021-02-17]
def config() do
%{
account_id: System.get_env("REACT_APP_ADMIN_ACCOUNT_ID"),
sheet_id: System.get_env("PG_NEWSLETTER_SHEET_ID"),
start_date:
case Date.from_iso8601(System.get_env("PG_NEWSLETTER_START_DATE", "")) do
{:ok, date} -> date
_ -> @default_start_date
end
}
end
def get_config() do
case config() do
%{account_id: account_id} when is_nil(account_id) or account_id == "" ->
{:error, "Please set the REACT_APP_ADMIN_ACCOUNT_ID environment variable"}
%{sheet_id: sheet_id} when is_nil(sheet_id) or sheet_id == "" ->
{:error, "Please set the PG_NEWSLETTER_SHEET_ID environment variable"}
config ->
{:ok, config}
end
end
@spec top_ranked_urls() :: [binary()]
def top_ranked_urls() do
# From http://www.solipsys.co.uk/new/PaulGrahamEssaysRanking.html
[
"http://www.paulgraham.com/avg.html",
"http://www.paulgraham.com/say.html",
"http://www.paulgraham.com/icad.html",
"http://www.paulgraham.com/essay.html",
"http://www.paulgraham.com/diff.html",
"http://www.paulgraham.com/nerds.html",
"http://www.paulgraham.com/taste.html",
"http://www.paulgraham.com/gh.html",
"http://www.paulgraham.com/road.html",
"http://www.paulgraham.com/wealth.html",
"http://www.paulgraham.com/power.html",
"http://www.paulgraham.com/venturecapital.html",
"http://www.paulgraham.com/gba.html",
"http://www.paulgraham.com/start.html",
"http://www.paulgraham.com/hiring.html",
"http://www.paulgraham.com/america.html",
"http://www.paulgraham.com/progbot.html",
"http://www.paulgraham.com/inequality.html",
"http://www.paulgraham.com/siliconvalley.html",
"http://www.paulgraham.com/ladder.html",
"http://www.paulgraham.com/love.html",
"http://www.paulgraham.com/procrastination.html",
"http://www.paulgraham.com/credentials.html",
"http://www.paulgraham.com/equity.html",
"http://www.paulgraham.com/die.html",
"http://www.paulgraham.com/hs.html",
"http://www.paulgraham.com/spam.html",
"http://www.paulgraham.com/angelinvesting.html",
"http://www.paulgraham.com/badeconomy.html",
"http://www.paulgraham.com/highres.html",
"http://www.paulgraham.com/pypar.html",
"http://www.paulgraham.com/ideas.html",
"http://www.paulgraham.com/better.html",
"http://www.paulgraham.com/ffb.html",
"http://www.paulgraham.com/relres.html",
"http://www.paulgraham.com/webstartups.html",
"http://www.paulgraham.com/hundred.html",
"http://www.paulgraham.com/bronze.html",
"http://www.paulgraham.com/submarine.html",
"http://www.paulgraham.com/marginal.html",
"http://www.paulgraham.com/startupfunding.html",
"http://www.paulgraham.com/convergence.html",
"http://www.paulgraham.com/hiresfund.html",
"http://www.paulgraham.com/popular.html",
"http://www.paulgraham.com/stuff.html",
"http://www.paulgraham.com/trolls.html",
"http://www.paulgraham.com/googles.html",
"http://www.paulgraham.com/startupmistakes.html",
"http://www.paulgraham.com/top.html",
"http://www.paulgraham.com/hp.html",
"http://www.paulgraham.com/bubble.html",
"http://www.paulgraham.com/langdes.html",
"http://www.paulgraham.com/vcsqueeze.html",
"http://www.paulgraham.com/cities.html",
"http://www.paulgraham.com/13sentences.html",
"http://www.paulgraham.com/fundraising.html",
"http://www.paulgraham.com/guidetoinvestors.html",
"http://www.paulgraham.com/desres.html",
"http://www.paulgraham.com/judgement.html",
"http://www.paulgraham.com/unions.html",
"http://www.paulgraham.com/maybe.html",
"http://www.paulgraham.com/startuphubs.html",
"http://www.paulgraham.com/control.html",
"http://www.paulgraham.com/notnot.html",
"http://www.paulgraham.com/opensource.html",
"http://www.paulgraham.com/5founders.html",
"http://www.paulgraham.com/6631327.html",
"http://www.paulgraham.com/addiction.html",
"http://www.paulgraham.com/airbnb.html",
"http://www.paulgraham.com/ambitious.html",
"http://www.paulgraham.com/apple.html",
"http://www.paulgraham.com/artistsship.html",
"http://www.paulgraham.com/boss.html",
"http://www.paulgraham.com/charisma.html",
"http://www.paulgraham.com/college.html",
"http://www.paulgraham.com/colleges.html",
"http://www.paulgraham.com/copy.html",
"http://www.paulgraham.com/determination.html",
"http://www.paulgraham.com/disagree.html",
"http://www.paulgraham.com/discover.html",
"http://www.paulgraham.com/distraction.html",
"http://www.paulgraham.com/divergence.html",
"http://www.paulgraham.com/fix.html",
"http://www.paulgraham.com/founders.html",
"http://www.paulgraham.com/foundersatwork.html",
"http://www.paulgraham.com/foundervisa.html",
"http://www.paulgraham.com/future.html",
"http://www.paulgraham.com/gap.html",
"http://www.paulgraham.com/good.html",
"http://www.paulgraham.com/goodart.html",
"http://www.paulgraham.com/hackernews.html",
"http://www.paulgraham.com/head.html",
"http://www.paulgraham.com/heroes.html",
"http://www.paulgraham.com/hubs.html",
"http://www.paulgraham.com/identity.html",
"http://www.paulgraham.com/iflisp.html",
"http://www.paulgraham.com/investors.html",
"http://www.paulgraham.com/island.html",
"http://www.paulgraham.com/javacover.html",
"http://www.paulgraham.com/kate.html",
"http://www.paulgraham.com/laundry.html",
"http://www.paulgraham.com/lies.html",
"http://www.paulgraham.com/mac.html",
"http://www.paulgraham.com/makersschedule.html",
"http://www.paulgraham.com/microsoft.html",
"http://www.paulgraham.com/mit.html",
"http://www.paulgraham.com/newthings.html",
"http://www.paulgraham.com/noop.html",
"http://www.paulgraham.com/nthings.html",
"http://www.paulgraham.com/organic.html",
"http://www.paulgraham.com/patentpledge.html",
"http://www.paulgraham.com/philosophy.html",
"http://www.paulgraham.com/polls.html",
"http://www.paulgraham.com/prcmc.html",
"http://www.paulgraham.com/property.html",
"http://www.paulgraham.com/publishing.html",
"http://www.paulgraham.com/ramenprofitable.html",
"http://www.paulgraham.com/randomness.html",
"http://www.paulgraham.com/really.html",
"http://www.paulgraham.com/revolution.html",
"http://www.paulgraham.com/schlep.html",
"http://www.paulgraham.com/seesv.html",
"http://www.paulgraham.com/segway.html",
"http://www.paulgraham.com/selfindulgence.html",
"http://www.paulgraham.com/sfp.html",
"http://www.paulgraham.com/softwarepatents.html",
"http://www.paulgraham.com/speak.html",
"http://www.paulgraham.com/startuplessons.html",
"http://www.paulgraham.com/superangels.html",
"http://www.paulgraham.com/tablets.html",
"http://www.paulgraham.com/usa.html",
"http://www.paulgraham.com/vw.html",
"http://www.paulgraham.com/web20.html",
"http://www.paulgraham.com/whyyc.html",
"http://www.paulgraham.com/wisdom.html",
"http://www.paulgraham.com/word.html",
"http://www.paulgraham.com/writing44.html",
"http://www.paulgraham.com/yahoo.html",
"http://www.paulgraham.com/ycombinator.html"
]
end
end
| 36.92233 | 98 | 0.61881 |
1cd18fa3953b14368a6454485d776e513676702d | 14,341 | exs | Elixir | test/omise/schedule_test.exs | zentetsukenz/omise-elixir | 5ba59f53d3a66702b44f1d16c74e712d4f5d2815 | [
"MIT"
] | 12 | 2016-09-10T16:17:32.000Z | 2020-06-04T01:35:54.000Z | test/omise/schedule_test.exs | zentetsukenz/omise-elixir | 5ba59f53d3a66702b44f1d16c74e712d4f5d2815 | [
"MIT"
] | 8 | 2016-09-19T05:36:27.000Z | 2020-09-14T08:46:06.000Z | test/omise/schedule_test.exs | zentetsukenz/omise-elixir | 5ba59f53d3a66702b44f1d16c74e712d4f5d2815 | [
"MIT"
] | 10 | 2016-07-19T05:03:01.000Z | 2021-02-08T03:20:54.000Z | defmodule Omise.ScheduleTest do
use Omise.TestCase, async: true
alias Omise.Schedule
setup do: set_fixture_dir("schedule")
describe "list/2" do
test "lists all schedules" do
use_cassette "list_schedules" do
assert Schedule.list(from: "2017-10-01", limit: 1, order: "reverse_chronological") ==
{:ok,
%Omise.List{
data: [
%Omise.Schedule{
charge: %{},
created: "2017-10-02T07:02:26Z",
end_date: "2018-05-01",
every: 1,
id: "schd_test_59hulv2s8cebp5davso",
in_words: "Every 1 week(s) on Monday and Friday",
livemode: false,
location: "/schedules/schd_test_59hulv2s8cebp5davso",
next_occurrence_dates: [
"2017-12-01",
"2017-12-04",
"2017-12-08",
"2017-12-11",
"2017-12-15",
"2017-12-18",
"2017-12-22",
"2017-12-25",
"2017-12-29",
"2018-01-01",
"2018-01-05",
"2018-01-08",
"2018-01-12",
"2018-01-15",
"2018-01-19",
"2018-01-22",
"2018-01-26",
"2018-01-29",
"2018-02-02",
"2018-02-05",
"2018-02-09",
"2018-02-12",
"2018-02-16",
"2018-02-19",
"2018-02-23",
"2018-02-26",
"2018-03-02",
"2018-03-05",
"2018-03-09",
"2018-03-12"
],
object: "schedule",
occurrences: %Omise.List{
data: [],
from: "1970-01-01T00:00:00Z",
limit: 20,
location: "/schedules/schd_test_59hulv2s8cebp5davso/occurrences",
object: "list",
offset: 0,
order: nil,
to: "2017-11-04T13:29:42Z",
total: 0
},
on: %{"weekdays" => ["monday", "friday"]},
period: "week",
start_date: "2017-12-01",
status: "active",
transfer: %{
"amount" => nil,
"currency" => "thb",
"percentage_of_balance" => 75.0,
"recipient" => "recp_test_55j2lebbrscmwwdh9gm"
}
}
],
from: "2017-10-01T00:00:00Z",
limit: 1,
location: "/schedules",
object: "list",
offset: 0,
order: "reverse_chronological",
to: "2017-11-04T13:29:42Z",
total: 9
}}
end
end
end
describe "list_occurrences/3" do
test "list all occurrences of given schedule" do
use_cassette "list_occurrences" do
assert Schedule.list_occurrences("schd_test_59hsm5nodt6erfvkxda", limit: 2) ==
{:ok,
%Omise.List{
data: [
%Omise.Occurrence{
created: "2017-10-02T03:38:44Z",
id: "occu_test_59hsm5nprb2cb2gtazw",
livemode: false,
location: "/occurrences/occu_test_59hsm5nprb2cb2gtazw",
message: nil,
object: "occurrence",
processed_at: "2017-10-10T01:30:07Z",
result: "chrg_test_59kw03oovul5pp97bgg",
retry_date: nil,
schedule: "schd_test_59hsm5nodt6erfvkxda",
schedule_date: "2017-10-10",
status: "successful"
},
%Omise.Occurrence{
created: "2017-10-10T01:30:07Z",
id: "occu_test_59kw04xh54j1sg4jm9n",
livemode: false,
location: "/occurrences/occu_test_59kw04xh54j1sg4jm9n",
message: nil,
object: "occurrence",
processed_at: "2017-10-12T01:30:03Z",
result: "chrg_test_59lo5wwqohps3zecwwt",
retry_date: nil,
schedule: "schd_test_59hsm5nodt6erfvkxda",
schedule_date: "2017-10-12",
status: "successful"
}
],
from: "1970-01-01T00:00:00Z",
limit: 2,
location: nil,
object: "list",
offset: 0,
order: "chronological",
to: "2017-11-04T13:33:08Z",
total: 13
}}
end
end
end
describe "retrieve/2" do
test "retrieves schedule" do
use_cassette "retrieve_schedule" do
assert Schedule.retrieve("schd_test_59hulv2s8cebp5davso") ==
{:ok,
%Omise.Schedule{
charge: %{},
created: "2017-10-02T07:02:26Z",
end_date: "2018-05-01",
every: 1,
id: "schd_test_59hulv2s8cebp5davso",
in_words: "Every 1 week(s) on Monday and Friday",
livemode: false,
location: "/schedules/schd_test_59hulv2s8cebp5davso",
next_occurrence_dates: [
"2017-12-01",
"2017-12-04",
"2017-12-08",
"2017-12-11",
"2017-12-15",
"2017-12-18",
"2017-12-22",
"2017-12-25",
"2017-12-29",
"2018-01-01",
"2018-01-05",
"2018-01-08",
"2018-01-12",
"2018-01-15",
"2018-01-19",
"2018-01-22",
"2018-01-26",
"2018-01-29",
"2018-02-02",
"2018-02-05",
"2018-02-09",
"2018-02-12",
"2018-02-16",
"2018-02-19",
"2018-02-23",
"2018-02-26",
"2018-03-02",
"2018-03-05",
"2018-03-09",
"2018-03-12"
],
object: "schedule",
occurrences: %Omise.List{
data: [],
from: "1970-01-01T00:00:00Z",
limit: 20,
location: "/schedules/schd_test_59hulv2s8cebp5davso/occurrences",
object: "list",
offset: 0,
order: nil,
to: "2017-11-04T13:37:49Z",
total: 0
},
on: %{"weekdays" => ["monday", "friday"]},
period: "week",
start_date: "2017-12-01",
status: "active",
transfer: %{
"amount" => nil,
"currency" => "thb",
"percentage_of_balance" => 75.0,
"recipient" => "recp_test_55j2lebbrscmwwdh9gm"
}
}}
end
end
end
describe "create/2" do
test "creates charge schedule" do
use_cassette "create_charge_schedule" do
assert Schedule.create(
every: 1,
period: "month",
on: [
days_of_month: [15]
],
start_date: "2017-12-15",
end_date: "2018-03-15",
charge: [
customer: "cust_test_59ur2ob5ltcsl9vjwb4",
amount: 199_00,
description: "Membership fee"
]
) ==
{:ok,
%Omise.Schedule{
charge: %{
"amount" => 19900,
"card" => nil,
"currency" => "thb",
"customer" => "cust_test_59ur2ob5ltcsl9vjwb4",
"description" => "Membership fee"
},
created: "2017-11-04T13:47:24Z",
end_date: "2018-03-15",
every: 1,
id: "schd_test_59uv8cx9595f4zttqax",
in_words: "Every 1 month(s) on the 15th",
livemode: false,
location: "/schedules/schd_test_59uv8cx9595f4zttqax",
next_occurrence_dates: ["2017-12-15", "2018-01-15", "2018-02-15", "2018-03-15"],
object: "schedule",
occurrences: %Omise.List{
data: [],
from: "1970-01-01T00:00:00Z",
limit: 20,
location: "/schedules/schd_test_59uv8cx9595f4zttqax/occurrences",
object: "list",
offset: 0,
order: nil,
to: "2017-11-04T13:47:24Z",
total: 0
},
on: %{"days_of_month" => [15]},
period: "month",
start_date: "2017-12-15",
status: "active",
transfer: %{}
}}
end
end
test "creates transfer schedule" do
use_cassette "create_transfer_schedule" do
assert Schedule.create(
every: 10,
period: "day",
start_date: "2017-11-10",
end_date: "2017-12-31",
transfer: [
recipient: "recp_test_55j2lryr9wfpxm5c7js",
amount: 10_000_00
]
) ==
{:ok,
%Omise.Schedule{
charge: %{},
created: "2017-11-04T13:43:19Z",
end_date: "2017-12-31",
every: 10,
id: "schd_test_59uv6x4k6oj4swchy3c",
in_words: "Every 10 day(s)",
livemode: false,
location: "/schedules/schd_test_59uv6x4k6oj4swchy3c",
next_occurrence_dates: [
"2017-11-10",
"2017-11-20",
"2017-11-30",
"2017-12-10",
"2017-12-20",
"2017-12-30"
],
object: "schedule",
occurrences: %Omise.List{
data: [],
from: "1970-01-01T00:00:00Z",
limit: 20,
location: "/schedules/schd_test_59uv6x4k6oj4swchy3c/occurrences",
object: "list",
offset: 0,
order: nil,
to: "2017-11-04T13:43:19Z",
total: 0
},
on: %{},
period: "day",
start_date: "2017-11-10",
status: "active",
transfer: %{
"amount" => 1_000_000,
"currency" => "thb",
"percentage_of_balance" => nil,
"recipient" => "recp_test_55j2lryr9wfpxm5c7js"
}
}}
end
end
end
describe "destroy/2" do
test "destroys schedule" do
use_cassette "destroy_schedule" do
assert Schedule.destroy("schd_test_59uv6x4k6oj4swchy3c") ==
{:ok,
%Omise.Schedule{
charge: %{},
created: "2017-11-04T13:43:19Z",
end_date: "2017-12-31",
every: 10,
id: "schd_test_59uv6x4k6oj4swchy3c",
in_words: "Every 10 day(s)",
livemode: false,
location: "/schedules/schd_test_59uv6x4k6oj4swchy3c",
next_occurrence_dates: [],
object: "schedule",
occurrences: %Omise.List{
data: [],
from: "1970-01-01T00:00:00Z",
limit: 20,
location: "/schedules/schd_test_59uv6x4k6oj4swchy3c/occurrences",
object: "list",
offset: 0,
order: nil,
to: "2017-11-04T13:49:19Z",
total: 0
},
on: %{},
period: "day",
start_date: "2017-11-10",
status: "deleted",
transfer: %{
"amount" => 1_000_000,
"currency" => "thb",
"percentage_of_balance" => nil,
"recipient" => "recp_test_55j2lryr9wfpxm5c7js"
}
}}
end
end
end
end
| 38.864499 | 100 | 0.359529 |
1cd192f8b9a91c579d7f6c96bd69c52c84c52328 | 1,078 | exs | Elixir | config/config.exs | SophisticaSean/scrape | 3177c915af6672caa4710a961d0ea21ca5cf4bb7 | [
"MIT"
] | 2 | 2016-02-14T01:22:24.000Z | 2016-08-22T16:52:01.000Z | config/config.exs | SophisticaSean/scrape | 3177c915af6672caa4710a961d0ea21ca5cf4bb7 | [
"MIT"
] | null | null | null | config/config.exs | SophisticaSean/scrape | 3177c915af6672caa4710a961d0ea21ca5cf4bb7 | [
"MIT"
] | 1 | 2018-10-14T21:04:40.000Z | 2018-10-14T21:04:40.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for third-
# party users, it should be done in your mix.exs file.
config :remix,
escript: true,
silent: true
# Sample configuration:
#
# config :logger, :console,
# level: :info,
# format: "$date $time [$level] $metadata$message\n",
# metadata: [:user_id]
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 37.172414 | 73 | 0.739332 |
1cd196d164de803b2e8208c28d795644c962d5ab | 6,023 | exs | Elixir | test/groupher_server/cms/hooks/notify_drink_test.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | test/groupher_server/cms/hooks/notify_drink_test.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | test/groupher_server/cms/hooks/notify_drink_test.exs | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule GroupherServer.Test.CMS.Hooks.NotifyDrink do
use GroupherServer.TestTools
import GroupherServer.CMS.Delegate.Helper, only: [preload_author: 1]
alias GroupherServer.{CMS, Delivery, Repo}
alias CMS.Delegate.Hooks
setup do
{:ok, user} = db_insert(:user)
{:ok, user2} = db_insert(:user)
{:ok, user3} = db_insert(:user)
{:ok, community} = db_insert(:community)
drink_attrs = mock_attrs(:drink, %{community_id: community.id})
{:ok, drink} = CMS.create_article(community, :drink, drink_attrs, user)
{:ok, comment} = CMS.create_comment(:drink, drink.id, mock_comment(), user)
{:ok, ~m(user2 user3 drink comment)a}
end
describe "[upvote notify]" do
test "upvote hook should work on drink", ~m(user2 drink)a do
{:ok, drink} = preload_author(drink)
{:ok, article} = CMS.upvote_article(:drink, drink.id, user2)
Hooks.Notify.handle(:upvote, article, user2)
{:ok, notifications} =
Delivery.fetch(:notification, drink.author.user, %{page: 1, size: 20})
assert notifications.total_count == 1
notify = notifications.entries |> List.first()
assert notify.action == "UPVOTE"
assert notify.article_id == drink.id
assert notify.thread == "DRINK"
assert notify.user_id == drink.author.user.id
assert user_exist_in?(user2, notify.from_users)
end
test "upvote hook should work on drink comment", ~m(user2 drink comment)a do
{:ok, comment} = CMS.upvote_comment(comment.id, user2)
{:ok, comment} = preload_author(comment)
Hooks.Notify.handle(:upvote, comment, user2)
{:ok, notifications} = Delivery.fetch(:notification, comment.author, %{page: 1, size: 20})
assert notifications.total_count == 1
notify = notifications.entries |> List.first()
assert notify.action == "UPVOTE"
assert notify.article_id == drink.id
assert notify.thread == "DRINK"
assert notify.user_id == comment.author.id
assert notify.comment_id == comment.id
assert user_exist_in?(user2, notify.from_users)
end
test "undo upvote hook should work on drink", ~m(user2 drink)a do
{:ok, drink} = preload_author(drink)
{:ok, article} = CMS.upvote_article(:drink, drink.id, user2)
Hooks.Notify.handle(:upvote, article, user2)
{:ok, article} = CMS.undo_upvote_article(:drink, drink.id, user2)
Hooks.Notify.handle(:undo, :upvote, article, user2)
{:ok, notifications} =
Delivery.fetch(:notification, drink.author.user, %{page: 1, size: 20})
assert notifications.total_count == 0
end
test "undo upvote hook should work on drink comment", ~m(user2 comment)a do
{:ok, comment} = CMS.upvote_comment(comment.id, user2)
Hooks.Notify.handle(:upvote, comment, user2)
{:ok, comment} = CMS.undo_upvote_comment(comment.id, user2)
Hooks.Notify.handle(:undo, :upvote, comment, user2)
{:ok, comment} = preload_author(comment)
{:ok, notifications} = Delivery.fetch(:notification, comment.author, %{page: 1, size: 20})
assert notifications.total_count == 0
end
end
describe "[collect notify]" do
test "collect hook should work on drink", ~m(user2 drink)a do
{:ok, drink} = preload_author(drink)
{:ok, _} = CMS.collect_article(:drink, drink.id, user2)
Hooks.Notify.handle(:collect, drink, user2)
{:ok, notifications} =
Delivery.fetch(:notification, drink.author.user, %{page: 1, size: 20})
assert notifications.total_count == 1
notify = notifications.entries |> List.first()
assert notify.action == "COLLECT"
assert notify.article_id == drink.id
assert notify.thread == "DRINK"
assert notify.user_id == drink.author.user.id
assert user_exist_in?(user2, notify.from_users)
end
test "undo collect hook should work on drink", ~m(user2 drink)a do
{:ok, drink} = preload_author(drink)
{:ok, _} = CMS.upvote_article(:drink, drink.id, user2)
Hooks.Notify.handle(:collect, drink, user2)
{:ok, _} = CMS.undo_upvote_article(:drink, drink.id, user2)
Hooks.Notify.handle(:undo, :collect, drink, user2)
{:ok, notifications} =
Delivery.fetch(:notification, drink.author.user, %{page: 1, size: 20})
assert notifications.total_count == 0
end
end
describe "[comment notify]" do
test "drink author should get notify after some one comment on it", ~m(user2 drink)a do
{:ok, drink} = preload_author(drink)
{:ok, comment} = CMS.create_comment(:drink, drink.id, mock_comment(), user2)
Hooks.Notify.handle(:comment, comment, user2)
{:ok, notifications} =
Delivery.fetch(:notification, drink.author.user, %{page: 1, size: 20})
assert notifications.total_count == 1
notify = notifications.entries |> List.first()
assert notify.action == "COMMENT"
assert notify.thread == "DRINK"
assert notify.article_id == drink.id
assert notify.user_id == drink.author.user.id
assert user_exist_in?(user2, notify.from_users)
end
test "drink comment author should get notify after some one reply it",
~m(user2 user3 drink)a do
{:ok, drink} = preload_author(drink)
{:ok, comment} = CMS.create_comment(:drink, drink.id, mock_comment(), user2)
{:ok, replyed_comment} = CMS.reply_comment(comment.id, mock_comment(), user3)
Hooks.Notify.handle(:reply, replyed_comment, user3)
comment = Repo.preload(comment, :author)
{:ok, notifications} = Delivery.fetch(:notification, comment.author, %{page: 1, size: 20})
assert notifications.total_count == 1
notify = notifications.entries |> List.first()
assert notify.action == "REPLY"
assert notify.thread == "DRINK"
assert notify.article_id == drink.id
assert notify.comment_id == replyed_comment.id
assert notify.user_id == comment.author_id
assert user_exist_in?(user3, notify.from_users)
end
end
end
| 34.417143 | 96 | 0.66163 |
1cd1afceab6f3a5c8322a3fc84b5da736ba07076 | 1,155 | ex | Elixir | lib/google_api/you_tube/v3/model/resource_id.ex | jesteracer/ytb | 67e3cab899e4f69e586383f7be2c3855c6beea49 | [
"Apache-2.0"
] | null | null | null | lib/google_api/you_tube/v3/model/resource_id.ex | jesteracer/ytb | 67e3cab899e4f69e586383f7be2c3855c6beea49 | [
"Apache-2.0"
] | null | null | null | lib/google_api/you_tube/v3/model/resource_id.ex | jesteracer/ytb | 67e3cab899e4f69e586383f7be2c3855c6beea49 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.ResourceId do
@moduledoc """
A resource id is a generic reference that points to another YouTube resource.
"""
@derive [Poison.Encoder]
defstruct [
:"channelId",
:"kind",
:"playlistId",
:"videoId"
]
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.ResourceId do
def decode(value, _options) do
value
end
end
| 28.875 | 79 | 0.736797 |
1cd1c2818f4b33b2b1b494e2e667dccce3d08482 | 6,605 | exs | Elixir | config/releases.exs | Eein/glimesh.tv | e55e96b852363b0d9576ca47d19f1499889c68db | [
"MIT"
] | null | null | null | config/releases.exs | Eein/glimesh.tv | e55e96b852363b0d9576ca47d19f1499889c68db | [
"MIT"
] | null | null | null | config/releases.exs | Eein/glimesh.tv | e55e96b852363b0d9576ca47d19f1499889c68db | [
"MIT"
] | null | null | null | # In this file, we load production configuration and secrets
# from environment variables. You can also hardcode secrets,
# although such is generally not recommended and you have to
# remember to add this file to your .gitignore.
import Config
# Node configuration
if System.get_env("ENABLE_LIBCLUSTER") do
config :libcluster,
topologies: [
example: [
strategy: Cluster.Strategy.Epmd,
config: [
hosts: [
:"glimesh@do-nyc3-web1.us-east.web.glimesh.tv",
:"glimesh@do-nyc3-web2.us-east.web.glimesh.tv",
:"glimesh@do-nyc3-web3.us-east.web.glimesh.tv"
]
]
]
]
end
# Database Configuration
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :glimesh, Glimesh.Repo,
# ssl: true,
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE", "10"))
# Endpoint Configuration
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
live_view_signing_salt =
System.get_env("LIVE_VIEW_SIGNING_SALT") ||
raise """
environment variable LIVE_VIEW_SIGNING_SALT is missing.
You can generate one by calling: mix phx.gen.secret 32
"""
url_host = System.fetch_env!("URL_HOST")
url_port = System.fetch_env!("URL_PORT")
url_scheme = System.fetch_env!("URL_SCHEME")
config :glimesh, GlimeshWeb.Endpoint,
server: true,
cache_static_manifest: "priv/static/cache_manifest.json",
canonical_host: url_host,
url: [
scheme: url_scheme,
host: url_host,
port: url_port
],
secret_key_base: secret_key_base,
live_view: [signing_salt: live_view_signing_salt]
if http_port = System.get_env("HTTP_PORT") do
config :glimesh, GlimeshWeb.Endpoint,
http: [
port: http_port
]
end
if https_port = System.get_env("HTTPS_PORT") do
https_key_file = System.fetch_env!("HTTPS_KEY_FILE")
https_cert_file = System.fetch_env!("HTTPS_CERT_FILE")
https_cacert_file = System.fetch_env!("HTTPS_CACERT_FILE")
config :glimesh, GlimeshWeb.Endpoint,
https: [
port: https_port,
cipher_suite: :strong,
keyfile: https_key_file,
certfile: https_cert_file,
cacertfile: https_cacert_file,
transport_options: [socket_opts: [:inet6]]
]
end
# Email Configuration
if mailgun_api_key = System.get_env("MAILGUN_API_KEY") do
mailgun_domain = System.fetch_env!("MAILGUN_DOMAIN")
config :glimesh, GlimeshWeb.Emails.Mailer,
adapter: Bamboo.MailgunAdapter,
api_key: mailgun_api_key,
domain: mailgun_domain
end
# Stripe Configuration
stripe_public_api_key = System.fetch_env!("STRIPE_PUBLIC_API_KEY")
stripe_api_key = System.fetch_env!("STRIPE_API_KEY")
stripe_connect_client_id = System.fetch_env!("STRIPE_CONNECT_CLIENT_ID")
stripe_webhook_secret = System.fetch_env!("STRIPE_WEBHOOK_SECRET")
config :stripity_stripe,
public_api_key: stripe_public_api_key,
api_key: stripe_api_key,
connect_client_id: stripe_connect_client_id,
webhook_secret: stripe_webhook_secret
# hCaptcha Configuration
if hcaptcha_public_key = System.get_env("HCAPTCHA_PUBLIC_KEY") do
hcaptcha_secret = System.fetch_env!("HCAPTCHA_SECRET")
config :hcaptcha,
public_key: hcaptcha_public_key,
secret: hcaptcha_secret
end
# Waffle Configuration
if System.get_env("WAFFLE_ENDPOINT") == "S3" do
do_spaces_public_key = System.fetch_env!("DO_SPACES_PUBLIC_KEY")
do_spaces_private_key = System.fetch_env!("DO_SPACES_PRIVATE_KEY")
do_spaces_bucket = System.fetch_env!("DO_SPACES_BUCKET")
waffle_asset_host = System.fetch_env!("WAFFLE_ASSET_HOST")
config :waffle,
storage: Waffle.Storage.S3,
bucket: do_spaces_bucket,
asset_host: waffle_asset_host
config :ex_aws,
access_key_id: do_spaces_public_key,
secret_access_key: do_spaces_private_key,
region: "us-east-1",
s3: [
scheme: "https://",
host: "nyc3.digitaloceanspaces.com",
region: "us-east-1"
]
end
# Twitter Config
if twitter_consumer_key = System.get_env("TWITTER_CONSUMER_KEY") do
twitter_consumer_secret = System.fetch_env!("TWITTER_CONSUMER_SECRET")
twitter_access_token = System.fetch_env!("TWITTER_ACCESS_TOKEN")
twitter_access_secret = System.fetch_env!("TWITTER_ACCESS_SECRET")
config :glimesh, Glimesh.Socials.Twitter,
consumer_key: twitter_consumer_key,
consumer_secret: twitter_consumer_secret,
access_token: twitter_access_token,
access_token_secret: twitter_access_secret
end
if appsignal_api_key = System.get_env("APPSIGNAL_API_KEY") do
config :appsignal, :config,
active: true,
otp_app: :glimesh,
name: System.fetch_env!("APPSIGNAL_NAME"),
push_api_key: appsignal_api_key,
env: "prod",
skip_session_data: true,
filter_parameters: [
"password",
"hashed_password",
"email",
"user",
"streamKey",
"stream_key",
"hmac_key",
"hmacKey",
"token",
"api_token",
"client_secret",
"secret",
"tfa_token",
"refresh_token"
]
end
# Twitter Config
if taxidpro_api_key = System.get_env("TAXIDPRO_API_KEY") do
taxidpro_webhook_secret = System.fetch_env!("TAXIDPRO_WEBHOOK_SECRET")
config :glimesh, Glimesh.PaymentProviders.TaxIDPro,
webhook_secret: taxidpro_webhook_secret,
api_key: taxidpro_api_key
end
# Rawg Config
if rawg_api_key = System.get_env("RAWG_API_KEY") do
config :glimesh, Glimesh.Subcategories.RawgSource, api_key: rawg_api_key
end
# Glimesh Configuration
if email_physical_address = System.get_env("GLIMESH_EMAIL_PHYSICAL_ADDRESS") do
config :glimesh,
email_physical_address: email_physical_address
end
if show_staging_warning = System.get_env("GLIMESH_SHOW_STAGING_WARNING") do
config :glimesh,
show_staging_warning: show_staging_warning
end
if System.get_env("GLIMESH_START_WORKERS") do
config :glimesh,
start_workers: true
end
# Default App Config
config :glimesh, :stripe_config,
platform_sub_supporter_product_id: "prod_I60rR8YatfJpEV",
platform_sub_supporter_price_id: "price_1HVoq1BLNaYgaiU5EMayvTwj",
platform_sub_supporter_price: 500,
platform_sub_founder_product_id: "prod_I60rQdgrge5imp",
platform_sub_founder_price_id: "price_1HVopMBLNaYgaiU5drbv5cVL",
platform_sub_founder_price: 2500,
channel_sub_base_product_id: "prod_I60qVBVw8n1Y1e",
channel_sub_base_price_id: "price_1HVoopBLNaYgaiU5r5JTEEoj",
channel_sub_base_price: 500
| 29.355556 | 79 | 0.742619 |
1cd1fb14312e0379b4171d58dbda928d94c5620a | 1,930 | ex | Elixir | clients/ad_exchange_seller/lib/google_api/ad_exchange_seller/v20/model/saved_reports.ex | ericrwolfe/elixir-google-api | 3dc0f17edd5e2d6843580c16ddae3bf84b664ffd | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_seller/lib/google_api/ad_exchange_seller/v20/model/saved_reports.ex | ericrwolfe/elixir-google-api | 3dc0f17edd5e2d6843580c16ddae3bf84b664ffd | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_seller/lib/google_api/ad_exchange_seller/v20/model/saved_reports.ex | ericrwolfe/elixir-google-api | 3dc0f17edd5e2d6843580c16ddae3bf84b664ffd | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AdExchangeSeller.V20.Model.SavedReports do
@moduledoc """
## Attributes
- etag (String): ETag of this response for caching purposes. Defaults to: `null`.
- items (List[SavedReport]): The saved reports returned in this list response. Defaults to: `null`.
- kind (String): Kind of list this is, in this case adexchangeseller#savedReports. Defaults to: `null`.
- nextPageToken (String): Continuation token used to page through saved reports. To retrieve the next page of results, set the next request's \"pageToken\" value to this. Defaults to: `null`.
"""
defstruct [
:etag,
:items,
:kind,
:nextPageToken
]
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeSeller.V20.Model.SavedReports do
import GoogleApi.AdExchangeSeller.V20.Deserializer
def decode(value, options) do
value
|> deserialize(:items, :list, GoogleApi.AdExchangeSeller.V20.Model.SavedReport, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeSeller.V20.Model.SavedReports do
def encode(value, options) do
GoogleApi.AdExchangeSeller.V20.Deserializer.serialize_non_nil(value, options)
end
end
| 36.415094 | 207 | 0.752332 |
1cd205e375c2832acb8e5e725c05875083ecb4da | 1,885 | ex | Elixir | clients/partners/lib/google_api/partners/v2/model/localized_company_info.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/partners/lib/google_api/partners/v2/model/localized_company_info.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/partners/lib/google_api/partners/v2/model/localized_company_info.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Partners.V2.Model.LocalizedCompanyInfo do
@moduledoc """
The localized company information.
## Attributes
- countryCodes (List[String]): List of country codes for the localized company info. Defaults to: `null`.
- displayName (String): Localized display name. Defaults to: `null`.
- languageCode (String): Language code of the localized company info, as defined by <a href=\"https://tools.ietf.org/html/bcp47\">BCP 47</a> (IETF BCP 47, \"Tags for Identifying Languages\"). Defaults to: `null`.
- overview (String): Localized brief description that the company uses to advertise themselves. Defaults to: `null`.
"""
defstruct [
:"countryCodes",
:"displayName",
:"languageCode",
:"overview"
]
end
defimpl Poison.Decoder, for: GoogleApi.Partners.V2.Model.LocalizedCompanyInfo do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.Partners.V2.Model.LocalizedCompanyInfo do
def encode(value, options) do
GoogleApi.Partners.V2.Deserializer.serialize_non_nil(value, options)
end
end
| 36.25 | 251 | 0.744828 |
1cd221e8967a3dd6958db829b51c01be8003488d | 2,388 | ex | Elixir | farmbot_os/lib/farmbot_os/lua/ext/info.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | 1 | 2021-08-23T13:36:14.000Z | 2021-08-23T13:36:14.000Z | farmbot_os/lib/farmbot_os/lua/ext/info.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | farmbot_os/lib/farmbot_os/lua/ext/info.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | defmodule FarmbotOS.Lua.Ext.Info do
@moduledoc """
Lua extensions for gathering information about a running Farmbot
"""
alias FarmbotCeleryScript.SysCalls
alias FarmbotOS.Lua.Util
@doc """
# Example Usage
## With channels
farmbot.send_message("info", "hello, world", ["email", "toast"])
## No channels
farmbot.send_message("info", "hello, world")
"""
def send_message([kind, message], lua) do
do_send_message(kind, message, [], lua)
end
def send_message([kind, message, channels], lua) do
channels =
channels
|> List.wrap()
|> Enum.map(fn
{_key, value} -> value
value -> value
end)
|> Enum.map(&String.to_atom/1)
do_send_message(kind, message, channels, lua)
end
@doc "Returns data about the bot's state"
def read_status([], lua) do
bot_state = FarmbotCore.BotState.fetch() |> FarmbotCore.BotStateNG.view()
{[Util.map_to_table(bot_state)], lua}
end
def read_status(path, lua) do
bot_state = FarmbotCore.BotState.fetch() |> FarmbotCore.BotStateNG.view()
path = List.flatten(path) |> Enum.map(&String.to_atom(&1))
case get_in(bot_state, path) do
%{} = map ->
{[Util.map_to_table(map)], lua}
other ->
{[other], lua}
end
end
@doc "Returns the current version of farmbot."
def fbos_version(_args, lua) do
{[FarmbotCore.Project.version(), nil], lua}
end
@doc "Returns the current firmware version."
def firmware_version(_args, lua) do
state = FarmbotCore.BotStateNG.view(FarmbotCore.BotState.fetch())
v = state.informational_settings.firmware_version
{[v, nil], lua}
end
@doc "Returns the current month"
def current_month(_args, lua) do
{[DateTime.utc_now().month], lua}
end
@doc "Returns the current hour"
def current_hour(_args, lua) do
{[DateTime.utc_now().hour], lua}
end
@doc "Returns the current minute"
def current_minute(_args, lua) do
{[DateTime.utc_now().minute], lua}
end
@doc "Returns the current second"
def current_second(_args, lua) do
{[DateTime.utc_now().second], lua}
end
defp do_send_message(kind, message, channels, lua) do
result = SysCalls.send_message(kind, "#{message}", channels)
case result do
:ok ->
{[true, nil], lua}
{:error, reason} ->
{[nil, reason], lua}
end
end
end
| 23.411765 | 77 | 0.642797 |
1cd235082853d150c3e2d63d60c7d7e22d62c3e5 | 69 | ex | Elixir | lib/bloggex_web/views/admin/layout_view.ex | dreamingechoes/bloggex | 9ead10ec1fd8fda0da3cb08106c43a9043188199 | [
"MIT"
] | 1 | 2020-01-14T03:17:51.000Z | 2020-01-14T03:17:51.000Z | lib/bloggex_web/views/admin/layout_view.ex | dreamingechoes/bloggex | 9ead10ec1fd8fda0da3cb08106c43a9043188199 | [
"MIT"
] | null | null | null | lib/bloggex_web/views/admin/layout_view.ex | dreamingechoes/bloggex | 9ead10ec1fd8fda0da3cb08106c43a9043188199 | [
"MIT"
] | null | null | null | defmodule BloggexWeb.Admin.LayoutView do
use BloggexWeb, :view
end
| 17.25 | 40 | 0.811594 |
1cd246041d124f2ae3054ae4b0296dcc86e5eb75 | 760 | exs | Elixir | apps/otp_2/rel/config.exs | WhiteRookPL/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | 14 | 2017-08-09T14:21:47.000Z | 2022-03-11T04:10:49.000Z | apps/otp_2/rel/config.exs | nicholasjhenry/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | null | null | null | apps/otp_2/rel/config.exs | nicholasjhenry/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | 15 | 2017-09-05T15:43:53.000Z | 2020-04-13T16:20:18.000Z | use Mix.Releases.Config,
default_release: :default,
default_environment: Mix.env()
cookie_dev = :"DEV_COOKIE"
environment :dev do
set dev_mode: true
set include_erts: false
set cookie: cookie_dev
set overlay_vars: [ cookie: cookie_dev ]
set vm_args: "rel/vm.args"
end
cookie_prod = :"PROD_COOKIE"
environment :prod do
set include_erts: true
set include_src: false
set cookie: cookie_prod
set overlay_vars: [ cookie: cookie_prod ]
set vm_args: "rel/vm.args"
end
release :chatterboxes do
set version: "1.0.0"
set applications: [
sasl: :permanent,
logger: :permanent,
chatterboxes: :permanent,
runtime_tools: :permanent,
xprof: :permanent,
recon: :permanent,
eper: :permanent,
dbg: :permanent
]
end | 21.714286 | 43 | 0.706579 |
1cd25c1fa52e1dd2eeab35c16fe2cac07375a470 | 2,942 | exs | Elixir | test/doctor_schedule_web/controllers/api/appointment_controller_test.exs | theguuholi/doctor_schedule | a92dfa92d1398c59718be2428d36bb326d6bc361 | [
"MIT"
] | 2 | 2022-03-11T12:15:01.000Z | 2022-03-11T13:53:21.000Z | test/doctor_schedule_web/controllers/api/appointment_controller_test.exs | theguuholi/doctor_schedule | a92dfa92d1398c59718be2428d36bb326d6bc361 | [
"MIT"
] | 3 | 2020-12-12T22:10:17.000Z | 2021-04-05T12:53:12.000Z | test/doctor_schedule_web/controllers/api/appointment_controller_test.exs | theguuholi/doctor_schedule | a92dfa92d1398c59718be2428d36bb326d6bc361 | [
"MIT"
] | 1 | 2021-02-26T04:24:34.000Z | 2021-02-26T04:24:34.000Z | defmodule DoctorScheduleWeb.Api.AppointmentControllerTest do
use DoctorScheduleWeb.ConnCase
import DoctorScheduleWeb.Auth.Guardian
alias DoctorSchedule.AppointmentFixture
alias DoctorSchedule.Appointments.Entities.Appointment
alias DoctorSchedule.UserFixture
setup %{conn: conn} do
user = UserFixture.create_user()
{:ok, token, _} = encode_and_sign(user, %{}, token_type: :access)
conn =
conn
|> put_req_header("accept", "application/json")
|> put_req_header("authorization", "bearer " <> token)
{:ok, conn: conn, user_id: user.id}
end
describe "index" do
test "lists all appointments", %{conn: conn} do
conn = get(conn, Routes.api_appointment_path(conn, :index))
assert json_response(conn, 200) == []
end
end
describe "create appointment" do
test "renders appointment when data is valid", %{conn: conn} do
provider = UserFixture.create_provider()
date =
Timex.now()
|> Timex.shift(days: 1)
|> Timex.to_naive_datetime()
date =
%NaiveDateTime{date | hour: 17}
|> NaiveDateTime.to_iso8601()
conn =
post(conn, Routes.api_appointment_path(conn, :create),
appointment: %{date: date, provider_id: provider.id}
)
assert %{"id" => id} = json_response(conn, 201)
conn = get(conn, Routes.api_appointment_path(conn, :show, id))
assert id == json_response(conn, 200)["id"]
end
end
describe "update appointment" do
setup [:create_appointment]
test "renders appointment when data is valid", %{
conn: conn,
appointment: %Appointment{id: id} = appointment
} do
now = DateTime.utc_now()
conn =
put(conn, Routes.api_appointment_path(conn, :update, appointment),
appointment: %{date: now}
)
assert %{"id" => ^id} = json_response(conn, 200)
conn = get(conn, Routes.api_appointment_path(conn, :show, id))
assert %{
"id" => id,
"date" => now
} = json_response(conn, 200)
end
test "renders errors when data is invalid", %{conn: conn, appointment: appointment} do
conn =
put(conn, Routes.api_appointment_path(conn, :update, appointment),
appointment: %{date: nil}
)
assert json_response(conn, 422)["errors"] != %{}
end
end
describe "delete appointment" do
setup [:create_appointment]
test "deletes chosen appointment", %{conn: conn, appointment: appointment} do
conn = delete(conn, Routes.api_appointment_path(conn, :delete, appointment))
assert response(conn, 204)
assert_error_sent 404, fn ->
get(conn, Routes.api_appointment_path(conn, :show, appointment))
end
end
end
defp create_appointment(setup_conn) do
user_id = setup_conn.user_id
%{appointment: AppointmentFixture.appointment_fixture(user_id)}
end
end
| 27.495327 | 90 | 0.639361 |
1cd26c12cf0dd05256470e503a3ec4d7cdefed99 | 20,530 | ex | Elixir | lib/phoenix_live_view/upload_config.ex | ucwaldo/phoenix_live_view | aa192420dc3b3b4158bbc7582ce39cb149221170 | [
"MIT"
] | 2 | 2020-05-06T14:51:56.000Z | 2020-05-06T14:52:21.000Z | lib/phoenix_live_view/upload_config.ex | ucwaldo/phoenix_live_view | aa192420dc3b3b4158bbc7582ce39cb149221170 | [
"MIT"
] | 3 | 2021-06-20T14:51:14.000Z | 2021-06-25T00:56:11.000Z | lib/phoenix_live_view/upload_config.ex | ucwaldo/phoenix_live_view | aa192420dc3b3b4158bbc7582ce39cb149221170 | [
"MIT"
] | 1 | 2020-05-16T22:44:14.000Z | 2020-05-16T22:44:14.000Z | defmodule Phoenix.LiveView.UploadEntry do
@moduledoc """
The struct representing an upload entry.
"""
alias Phoenix.LiveView.UploadEntry
defstruct progress: 0,
preflighted?: false,
upload_config: nil,
upload_ref: nil,
ref: nil,
uuid: nil,
valid?: false,
done?: false,
cancelled?: false,
client_name: nil,
client_size: nil,
client_type: nil,
client_last_modified: nil
@type t :: %__MODULE__{
progress: integer(),
upload_config: String.t() | :atom,
upload_ref: String.t(),
ref: String.t() | nil,
uuid: String.t() | nil,
valid?: boolean(),
done?: boolean(),
cancelled?: boolean(),
client_name: String.t() | nil,
client_size: integer() | nil,
client_type: String.t() | nil,
client_last_modified: integer() | nil
}
@doc false
def put_progress(%UploadEntry{} = entry, 100) do
%UploadEntry{entry | progress: 100, done?: true}
end
def put_progress(%UploadEntry{} = entry, progress) do
%UploadEntry{entry | progress: progress}
end
end
defmodule Phoenix.LiveView.UploadConfig do
@moduledoc """
The struct representing an upload.
"""
alias Phoenix.LiveView.UploadConfig
alias Phoenix.LiveView.UploadEntry
@default_max_entries 1
@default_max_file_size 8_000_000
@default_chunk_size 64_000
@default_chunk_timeout 10_000
@unregistered :unregistered
@invalid :invalid
@too_many_files :too_many_files
if Version.match?(System.version(), ">= 1.8.0") do
@derive {Inspect,
only: [
:name,
:ref,
:entries,
:max_entries,
:max_file_size,
:accept,
:errors,
:auto_upload?,
:progress_event
]}
end
defstruct name: nil,
cid: :unregistered,
client_key: nil,
max_entries: @default_max_entries,
max_file_size: @default_max_file_size,
chunk_size: @default_chunk_size,
chunk_timeout: @default_chunk_timeout,
entries: [],
entry_refs_to_pids: %{},
entry_refs_to_metas: %{},
accept: [],
acceptable_types: MapSet.new(),
acceptable_exts: MapSet.new(),
external: false,
allowed?: false,
ref: nil,
errors: [],
auto_upload?: false,
progress_event: nil
@type t :: %__MODULE__{
name: atom() | String.t(),
# a nil cid represents a LiveView socket
cid: :unregistered | nil | integer(),
client_key: String.t(),
max_entries: pos_integer(),
max_file_size: pos_integer(),
entries: list(),
entry_refs_to_pids: %{String.t() => pid() | :unregistered | :done},
entry_refs_to_metas: %{String.t() => map()},
accept: list() | :any,
acceptable_types: MapSet.t(),
acceptable_exts: MapSet.t(),
external:
(UploadEntry.t(), Phoenix.LiveView.Socket.t() ->
{:ok | :error, meta :: %{uploader: String.t()}, Phoenix.LiveView.Socket.t()})
| false,
allowed?: boolean,
errors: list(),
ref: String.t(),
auto_upload?: boolean(),
progress_event:
(name :: atom() | String.t(), UploadEntry.t(), Phoenix.LiveView.Socket.t() ->
{:noreply, Phoenix.LiveView.Socket.t()})
| nil
}
@doc false
# we require a random_ref in order to ensure unique calls to `allow_upload`
# invalidate old uploads on the client and expire old tokens for the same
# upload name
def build(name, random_ref, [_ | _] = opts) when is_atom(name) do
{html_accept, acceptable_types, acceptable_exts} =
case Keyword.fetch(opts, :accept) do
{:ok, [_ | _] = accept} ->
{types, exts} = validate_accept_option(accept)
{Enum.join(accept, ","), types, exts}
{:ok, :any} ->
{:any, MapSet.new(), MapSet.new()}
{:ok, other} ->
raise ArgumentError, """
invalid accept filter provided to allow_upload.
A list of the following unique file type specifiers are supported:
* A valid case-insensitive filename extension, starting with a period (".") character.
For example: .jpg, .pdf, or .doc.
* A valid MIME type string, with no extensions.
Alternately, you can provide the atom :any to allow any kind of file. Got:
#{inspect(other)}
"""
:error ->
raise ArgumentError, """
the :accept option is required when allowing uploads.
Provide a list of unique file type specifiers or the atom :any to allow any kind of file.
"""
end
external =
case Keyword.fetch(opts, :external) do
{:ok, func} when is_function(func, 2) ->
func
{:ok, other} ->
raise ArgumentError, """
invalid :external value provided to allow_upload.
Only an anymous function receiving the socket as an argument is supported. Got:
#{inspect(other)}
"""
:error ->
false
end
max_entries =
case Keyword.fetch(opts, :max_entries) do
{:ok, pos_integer} when is_integer(pos_integer) and pos_integer > 0 ->
pos_integer
{:ok, other} ->
raise ArgumentError, """
invalid :max_entries value provided to allow_upload.
Only a positive integer is supported (Defaults to #{@default_max_entries}). Got:
#{inspect(other)}
"""
:error ->
@default_max_entries
end
max_file_size =
case Keyword.fetch(opts, :max_file_size) do
{:ok, pos_integer} when is_integer(pos_integer) and pos_integer > 0 ->
pos_integer
{:ok, other} ->
raise ArgumentError, """
invalid :max_file_size value provided to allow_upload.
Only a positive integer is supported (Defaults to #{@default_max_file_size} bytes). Got:
#{inspect(other)}
"""
:error ->
@default_max_file_size
end
chunk_size =
case Keyword.fetch(opts, :chunk_size) do
{:ok, pos_integer} when is_integer(pos_integer) and pos_integer > 0 ->
pos_integer
{:ok, other} ->
raise ArgumentError, """
invalid :chunk_size value provided to allow_upload.
Only a positive integer is supported (Defaults to #{@default_chunk_size} bytes). Got:
#{inspect(other)}
"""
:error ->
@default_chunk_size
end
chunk_timeout =
case Keyword.fetch(opts, :chunk_timeout) do
{:ok, pos_integer} when is_integer(pos_integer) and pos_integer > 0 ->
pos_integer
{:ok, other} ->
raise ArgumentError, """
invalid :chunk_timeout value provided to allow_upload.
Only a positive integer in milliseconds is supported (Defaults to #{
@default_chunk_timeout
} ms). Got:
#{inspect(other)}
"""
:error ->
@default_chunk_timeout
end
progress_event =
case Keyword.fetch(opts, :progress) do
{:ok, func} when is_function(func, 3) ->
func
{:ok, other} ->
raise ArgumentError, """
invalid :progress value provided to allow_upload.
Only 3-arity anonymous function is supported. Got:
#{inspect(other)}
"""
:error ->
nil
end
%UploadConfig{
ref: random_ref,
name: name,
max_entries: max_entries,
max_file_size: max_file_size,
entry_refs_to_pids: %{},
entry_refs_to_metas: %{},
accept: html_accept,
acceptable_types: acceptable_types,
acceptable_exts: acceptable_exts,
external: external,
chunk_size: chunk_size,
chunk_timeout: chunk_timeout,
progress_event: progress_event,
auto_upload?: Keyword.get(opts, :auto_upload, false),
allowed?: true
}
end
@doc false
def entry_pid(%UploadConfig{} = conf, %UploadEntry{} = entry) do
case Map.fetch(conf.entry_refs_to_pids, entry.ref) do
{:ok, pid} when is_pid(pid) -> pid
{:ok, status} when status in [@unregistered, @invalid] -> nil
end
end
@doc false
def get_entry_by_pid(%UploadConfig{} = conf, channel_pid) when is_pid(channel_pid) do
Enum.find_value(conf.entry_refs_to_pids, fn {ref, pid} ->
if channel_pid == pid do
get_entry_by_ref(conf, ref)
end
end)
end
@doc false
def get_entry_by_ref(%UploadConfig{} = conf, ref) do
Enum.find(conf.entries, fn %UploadEntry{} = entry -> entry.ref === ref end)
end
@doc false
def unregister_completed_external_entry(%UploadConfig{} = conf, entry_ref) do
%UploadEntry{} = entry = get_entry_by_ref(conf, entry_ref)
drop_entry(conf, entry)
end
@doc false
def unregister_completed_entry(%UploadConfig{} = conf, entry_ref) do
%UploadEntry{} = entry = get_entry_by_ref(conf, entry_ref)
drop_entry(conf, entry)
end
@doc false
def registered?(%UploadConfig{} = conf) do
Enum.find(conf.entry_refs_to_pids, fn {_ref, maybe_pid} -> is_pid(maybe_pid) end)
end
@doc false
def mark_preflighted(%UploadConfig{} = conf) do
refs_awaiting = refs_awaiting_preflight(conf)
new_conf = %UploadConfig{
conf
| entries: for(entry <- conf.entries, do: %UploadEntry{entry | preflighted?: true})
}
{new_conf, for(ref <- refs_awaiting, do: get_entry_by_ref(new_conf, ref))}
end
defp refs_awaiting_preflight(%UploadConfig{} = conf) do
for entry <- conf.entries, not entry.preflighted?, do: entry.ref
end
@doc false
def register_entry_upload(%UploadConfig{} = conf, channel_pid, entry_ref)
when is_pid(channel_pid) do
case Map.fetch(conf.entry_refs_to_pids, entry_ref) do
{:ok, @unregistered} ->
{:ok,
%UploadConfig{
conf
| entry_refs_to_pids: Map.put(conf.entry_refs_to_pids, entry_ref, channel_pid)
}}
{:ok, existing_pid} when is_pid(existing_pid) ->
{:error, :already_registered}
:error ->
{:error, :disallowed}
end
end
# specifics on the `accept` attribute are illuminated in the spec:
# https://html.spec.whatwg.org/multipage/input.html#attr-input-accept
@accept_wildcards ~w(audio/* image/* video/*)
defp validate_accept_option(accept) do
{types, exts} =
Enum.reduce(accept, {[], []}, fn opt, {types_acc, exts_acc} ->
{type, exts} = accept_option!(opt)
{[type | types_acc], exts ++ exts_acc}
end)
{MapSet.new(types), MapSet.new(exts)}
end
# wildcards for media files
defp accept_option!(key) when key in @accept_wildcards, do: {key, []}
defp accept_option!(<<"." <> extname::binary>> = ext) do
if MIME.has_type?(extname) do
{MIME.type(extname), [ext]}
else
raise ArgumentError, """
invalid accept filter provided to allow_upload.
Expected a file extension with a known MIME type.
MIME types can be extended in your application configuration as follows:
config :mime, :types, %{
"application/vnd.api+json" => ["json-api"]
}
Got:
#{inspect(extname)}
"""
end
end
defp accept_option!(filter) when is_binary(filter) do
if MIME.extensions(filter) != [] do
{filter, []}
else
raise ArgumentError, """
invalid accept filter provided to allow_upload.
Expected a known MIME type without parameters.
MIME types can be extended in your application configuration as follows:
config :mime, :types, %{
"application/vnd.api+json" => ["json-api"]
}
Got:
#{inspect(filter)}
"""
end
end
@doc false
def disallow(%UploadConfig{} = conf), do: %UploadConfig{conf | allowed?: false}
@doc false
def uploaded_entries(%UploadConfig{} = conf) do
Enum.filter(conf.entries, fn %UploadEntry{} = entry -> entry.progress == 100 end)
end
@doc false
def update_entry(%UploadConfig{} = conf, entry_ref, func) do
new_entries =
Enum.map(conf.entries, fn
%UploadEntry{ref: ^entry_ref} = entry -> func.(entry)
%UploadEntry{ref: _ef} = entry -> entry
end)
recalculate_computed_fields(%UploadConfig{conf | entries: new_entries})
end
@doc false
def update_progress(%UploadConfig{} = conf, entry_ref, progress)
when is_integer(progress) and progress >= 0 and progress <= 100 do
update_entry(conf, entry_ref, fn entry -> UploadEntry.put_progress(entry, progress) end)
end
@doc false
def update_entry_meta(%UploadConfig{} = conf, entry_ref, %{} = meta) do
case Map.fetch(meta, :uploader) do
{:ok, _} ->
:noop
:error ->
raise ArgumentError,
"external uploader metadata requires an :uploader key. Got: #{inspect(meta)}"
end
new_metas = Map.put(conf.entry_refs_to_metas, entry_ref, meta)
%UploadConfig{conf | entry_refs_to_metas: new_metas}
end
@doc false
def put_entries(%UploadConfig{} = conf, entries) do
new_entries =
for entry <- entries, !get_entry_by_ref(conf, Map.fetch!(entry, "ref")), do: entry
pruned_conf = maybe_replace_sole_entry(conf, new_entries)
new_conf =
Enum.reduce(new_entries, pruned_conf, fn client_entry, acc ->
case cast_and_validate_entry(acc, client_entry) do
{:ok, new_conf} -> new_conf
{:error, new_conf} -> new_conf
end
end)
if too_many_files?(new_conf) do
{:error, put_error(new_conf, new_conf.ref, @too_many_files)}
else
case new_conf do
%UploadConfig{errors: []} = new_conf ->
{:ok, new_conf}
%UploadConfig{errors: [_ | _]} = new_conf ->
{:error, new_conf}
end
end
end
defp maybe_replace_sole_entry(%UploadConfig{max_entries: 1} = conf, new_entries) do
with [entry] <- conf.entries,
[_new_entry] <- new_entries do
cancel_entry(conf, entry)
else
_ -> conf
end
end
defp maybe_replace_sole_entry(%UploadConfig{} = conf, _new_entries) do
conf
end
defp too_many_files?(%UploadConfig{entries: entries, max_entries: max}) do
length(entries) > max
end
defp cast_and_validate_entry(%UploadConfig{} = conf, %{"ref" => ref} = client_entry) do
:error = Map.fetch(conf.entry_refs_to_pids, ref)
entry = %UploadEntry{
ref: ref,
upload_ref: conf.ref,
upload_config: conf.name,
client_name: Map.fetch!(client_entry, "name"),
client_size: Map.fetch!(client_entry, "size"),
client_type: Map.fetch!(client_entry, "type"),
client_last_modified: Map.get(client_entry, "last_modified")
}
{:ok, entry}
|> validate_max_file_size(conf)
|> validate_accepted(conf)
|> case do
{:ok, entry} ->
{:ok, put_valid_entry(conf, entry)}
{:error, reason} ->
{:error, put_invalid_entry(conf, entry, reason)}
end
end
defp put_valid_entry(conf, entry) do
entry = %UploadEntry{entry | valid?: true, uuid: generate_uuid()}
new_pids = Map.put(conf.entry_refs_to_pids, entry.ref, @unregistered)
new_metas = Map.put(conf.entry_refs_to_metas, entry.ref, %{})
%UploadConfig{
conf
| entries: conf.entries ++ [entry],
entry_refs_to_pids: new_pids,
entry_refs_to_metas: new_metas
}
end
defp put_invalid_entry(conf, entry, reason) do
entry = %UploadEntry{entry | valid?: false}
new_pids = Map.put(conf.entry_refs_to_pids, entry.ref, @invalid)
new_metas = Map.put(conf.entry_refs_to_metas, entry.ref, %{})
new_conf = %UploadConfig{
conf
| entries: conf.entries ++ [entry],
entry_refs_to_pids: new_pids,
entry_refs_to_metas: new_metas
}
put_error(new_conf, entry.ref, reason)
end
defp validate_max_file_size({:ok, %UploadEntry{client_size: size}}, %UploadConfig{
max_file_size: max
})
when size > max or not is_integer(size),
do: {:error, :too_large}
defp validate_max_file_size({:ok, entry}, _conf), do: {:ok, entry}
defp validate_accepted({:ok, %UploadEntry{} = entry}, conf) do
if accepted?(conf, entry) do
{:ok, entry}
else
{:error, :not_accepted}
end
end
defp validate_accepted({:error, _} = error, _conf), do: error
defp accepted?(%UploadConfig{accept: :any}, %UploadEntry{}), do: true
defp accepted?(
%UploadConfig{acceptable_types: acceptable_types} = conf,
%UploadEntry{client_type: client_type} = entry
) do
cond do
# wildcard
String.starts_with?(client_type, "image/") and "image/*" in acceptable_types -> true
String.starts_with?(client_type, "audio/") and "audio/*" in acceptable_types -> true
String.starts_with?(client_type, "video/") and "video/*" in acceptable_types -> true
# strict
client_type in acceptable_types -> true
Path.extname(entry.client_name) in conf.acceptable_exts -> true
true -> false
end
end
defp recalculate_computed_fields(%UploadConfig{} = conf) do
recalculate_errors(conf)
end
defp recalculate_errors(%UploadConfig{ref: ref} = conf) do
if too_many_files?(conf) do
conf
else
new_errors =
Enum.filter(conf.errors, fn
{^ref, @too_many_files} -> false
_ -> true
end)
%UploadConfig{conf | errors: new_errors}
end
end
@doc false
def put_error(%UploadConfig{} = conf, _entry_ref, @too_many_files = reason) do
%UploadConfig{conf | errors: Enum.uniq(conf.errors ++ [{conf.ref, reason}])}
end
def put_error(%UploadConfig{} = conf, entry_ref, reason) do
%UploadConfig{conf | errors: conf.errors ++ [{entry_ref, reason}]}
end
@doc false
def cancel_entry(%UploadConfig{} = conf, %UploadEntry{} = entry) do
case entry_pid(conf, entry) do
channel_pid when is_pid(channel_pid) ->
Phoenix.LiveView.UploadChannel.cancel(channel_pid)
update_entry(conf, entry.ref, fn entry -> %UploadEntry{entry | cancelled?: true} end)
_ ->
drop_entry(conf, entry)
end
end
@doc false
def drop_entry(%UploadConfig{} = conf, %UploadEntry{ref: ref}) do
new_entries = for entry <- conf.entries, entry.ref != ref, do: entry
new_errors = Enum.filter(conf.errors, fn {error_ref, _} -> error_ref != ref end)
new_refs = Map.delete(conf.entry_refs_to_pids, ref)
new_metas = Map.delete(conf.entry_refs_to_metas, ref)
new_conf = %UploadConfig{
conf
| entries: new_entries,
errors: new_errors,
entry_refs_to_pids: new_refs,
entry_refs_to_metas: new_metas
}
recalculate_computed_fields(new_conf)
end
@doc false
def register_cid(%UploadConfig{} = conf, cid) do
%UploadConfig{conf | cid: cid}
end
# UUID generation
# Copyright (c) 2013 Plataformatec
# Copyright (c) 2020 Dashbit
# https://github.com/elixir-ecto/ecto/blob/99dff4c4403c258ea939fe9bdfb4e339baf05e13/lib/ecto/uuid.ex
defp generate_uuid do
<<u0::48, _::4, u1::12, _::2, u2::62>> = :crypto.strong_rand_bytes(16)
bin = <<u0::48, 4::4, u1::12, 2::2, u2::62>>
<<a1::4, a2::4, a3::4, a4::4, a5::4, a6::4, a7::4, a8::4, b1::4, b2::4, b3::4, b4::4, c1::4,
c2::4, c3::4, c4::4, d1::4, d2::4, d3::4, d4::4, e1::4, e2::4, e3::4, e4::4, e5::4, e6::4,
e7::4, e8::4, e9::4, e10::4, e11::4, e12::4>> = bin
<<e(a1), e(a2), e(a3), e(a4), e(a5), e(a6), e(a7), e(a8), ?-, e(b1), e(b2), e(b3), e(b4), ?-,
e(c1), e(c2), e(c3), e(c4), ?-, e(d1), e(d2), e(d3), e(d4), ?-, e(e1), e(e2), e(e3), e(e4),
e(e5), e(e6), e(e7), e(e8), e(e9), e(e10), e(e11), e(e12)>>
end
@compile {:inline, e: 1}
defp e(0), do: ?0
defp e(1), do: ?1
defp e(2), do: ?2
defp e(3), do: ?3
defp e(4), do: ?4
defp e(5), do: ?5
defp e(6), do: ?6
defp e(7), do: ?7
defp e(8), do: ?8
defp e(9), do: ?9
defp e(10), do: ?a
defp e(11), do: ?b
defp e(12), do: ?c
defp e(13), do: ?d
defp e(14), do: ?e
defp e(15), do: ?f
end
| 29.203414 | 102 | 0.60302 |
1cd2bf1db8b6a402cc27c6c92d295f6cd7624bd0 | 782 | ex | Elixir | clients/elixir/generated/lib/adobe_experience_manager(aem)api/model/bundle_info.ex | shinesolutions/swagger-aem | b41f1ae3d23917de38ca5cf116cbcc173368d1e8 | [
"Apache-2.0"
] | 39 | 2016-10-02T06:45:12.000Z | 2021-09-08T20:39:53.000Z | clients/elixir/generated/lib/adobe_experience_manager(aem)api/model/bundle_info.ex | shinesolutions/swagger-aem | b41f1ae3d23917de38ca5cf116cbcc173368d1e8 | [
"Apache-2.0"
] | 35 | 2016-11-02T05:06:34.000Z | 2021-09-03T06:03:08.000Z | clients/elixir/generated/lib/adobe_experience_manager(aem)api/model/bundle_info.ex | shinesolutions/swagger-aem | b41f1ae3d23917de38ca5cf116cbcc173368d1e8 | [
"Apache-2.0"
] | 23 | 2016-11-07T04:14:42.000Z | 2021-02-15T09:49:13.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule AdobeExperienceManager(AEM)API.Model.BundleInfo do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"status",
:"s",
:"data"
]
@type t :: %__MODULE__{
:"status" => String.t | nil,
:"s" => [integer()] | nil,
:"data" => [AdobeExperienceManager(AEM)API.Model.BundleData.t] | nil
}
end
defimpl Poison.Decoder, for: AdobeExperienceManager(AEM)API.Model.BundleInfo do
import AdobeExperienceManager(AEM)API.Deserializer
def decode(value, options) do
value
|> deserialize(:"data", :list, AdobeExperienceManager(AEM)API.Model.BundleData, options)
end
end
| 24.4375 | 92 | 0.685422 |
1cd2d529b9f43a4d10cce69fe785928dfa1e2e7d | 1,233 | exs | Elixir | config/prod.secret.exs | lawik/noted | a51b5d79cf44abfc2463560f83f1a8d65e6af85e | [
"BSD-3-Clause"
] | 28 | 2021-02-20T22:22:49.000Z | 2022-03-24T21:07:39.000Z | config/prod.secret.exs | gerhard/noted | c83bfd2e8e2950187268a2e3ba0904ae8a9773e5 | [
"BSD-3-Clause"
] | 5 | 2021-05-06T11:37:11.000Z | 2021-08-31T11:38:14.000Z | config/prod.secret.exs | gerhard/noted | c83bfd2e8e2950187268a2e3ba0904ae8a9773e5 | [
"BSD-3-Clause"
] | 7 | 2021-02-24T19:18:28.000Z | 2021-09-13T16:07:08.000Z | # In this file, we load production configuration and secrets
# from environment variables. You can also hardcode secrets,
# although such is generally not recommended and you have to
# remember to add this file to your .gitignore.
use Mix.Config
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :noted, Noted.Repo,
# ssl: true,
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :noted, NotedWeb.Endpoint,
http: [
port: String.to_integer(System.get_env("PORT") || "4000"),
transport_options: [socket_opts: [:inet6]]
],
secret_key_base: secret_key_base
# ## Using releases (Elixir v1.9+)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :noted, NotedWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
| 29.357143 | 67 | 0.716139 |
1cd2dbe09df27c43cb3a21ec7142f864abb67926 | 3,957 | exs | Elixir | test/k8s/client_test.exs | coryodaniel/k8s_client | 88fb9490db72e947b2d216637769134bc4ebdfbd | [
"MIT"
] | 5 | 2019-01-12T16:56:05.000Z | 2021-04-10T04:06:13.000Z | test/k8s/client_test.exs | coryodaniel/k8s_client | 88fb9490db72e947b2d216637769134bc4ebdfbd | [
"MIT"
] | 12 | 2019-01-08T23:42:56.000Z | 2019-01-26T19:07:50.000Z | test/k8s/client_test.exs | coryodaniel/k8s_client | 88fb9490db72e947b2d216637769134bc4ebdfbd | [
"MIT"
] | null | null | null | defmodule K8s.ClientTest do
use ExUnit.Case, async: true
alias K8s.Client
doctest K8s.Client
setup do
bypass = Bypass.open()
conf = K8s.Conf.from_file("test/support/k8s_conf.yaml")
conf = %{conf | url: "http://localhost:#{bypass.port}/"}
{:ok, bypass: bypass, conf: conf}
end
def namespace_manifest() do
%{
"apiVersion" => "v1",
"metadata" => %{"name" => "test"},
"kind" => "Namespace"
}
end
def noop(), do: Jason.encode!(%{})
describe "run/3" do
test "running an operation without an HTTP body", %{conf: conf, bypass: bypass} do
Bypass.expect_once(bypass, fn conn ->
assert conn.method == "GET"
assert conn.request_path == "/api/v1/namespaces/test"
Plug.Conn.resp(conn, 200, noop())
end)
operation = Client.get(namespace_manifest())
assert {:ok, _} = Client.run(operation, conf)
end
test "running an operation with an HTTP body", %{conf: conf, bypass: bypass} do
Bypass.expect_once(bypass, fn conn ->
assert conn.method == "POST"
assert conn.request_path == "/api/v1/namespaces"
Plug.Conn.resp(conn, 200, noop())
end)
operation = Client.create(namespace_manifest())
assert {:ok, _} = Client.run(operation, conf)
end
test "running an operation with options", %{conf: conf, bypass: bypass} do
Bypass.expect_once(bypass, fn conn ->
assert conn.method == "GET"
assert conn.request_path == "/api/v1/namespaces/test"
assert conn.query_string == "watch=true"
Plug.Conn.resp(conn, 200, noop())
end)
operation = Client.get(namespace_manifest())
opts = [params: %{"watch" => "true"}]
assert {:ok, _} = Client.run(operation, conf, opts)
end
end
describe "run/4" do
test "running an operation with a custom HTTP body", %{conf: conf, bypass: bypass} do
Bypass.expect_once(bypass, fn conn ->
assert conn.method == "POST"
assert conn.request_path == "/api/v1/namespaces"
{:ok, json, _} = Plug.Conn.read_body(conn)
body = Jason.decode!(json)
assert body["metadata"]["labels"]["env"] == "test"
Plug.Conn.resp(conn, 200, noop())
end)
# This is a silly example.
operation = Client.create(namespace_manifest())
labels = %{"env" => "test"}
body = put_in(namespace_manifest(), ["metadata", "labels"], labels)
assert {:ok, _} = Client.run(operation, conf, body)
end
test "running an operation with a custom HTTP body and options", %{conf: conf, bypass: bypass} do
Bypass.expect_once(bypass, fn conn ->
assert conn.method == "POST"
assert conn.request_path == "/api/v1/namespaces"
assert conn.query_string == "watch=true"
{:ok, json, _} = Plug.Conn.read_body(conn)
body = Jason.decode!(json)
assert body["metadata"]["labels"]["env"] == "test"
Plug.Conn.resp(conn, 200, noop())
end)
# This is a silly example.
operation = Client.create(namespace_manifest())
labels = %{"env" => "test"}
body = put_in(namespace_manifest(), ["metadata", "labels"], labels)
opts = [params: %{"watch" => "true"}]
assert {:ok, _} = Client.run(operation, conf, body, opts)
end
end
describe "run" do
test "request with HTTP 201 response", %{conf: conf, bypass: bypass} do
Bypass.expect(bypass, fn conn ->
Plug.Conn.resp(conn, 201, "")
end)
operation = Client.list("v1", "Pod", namespace: :all)
assert :ok = Client.run(operation, conf)
end
test "request with HTTP 404 response", %{conf: conf, bypass: bypass} do
Bypass.expect(bypass, fn conn ->
Plug.Conn.resp(conn, 404, "File not found.")
end)
operation = Client.list("v1", "Pod", namespace: :all)
assert {:error, "HTTP Error: 404; File not found."} = Client.run(operation, conf)
end
end
end
| 32.434426 | 101 | 0.601718 |
1cd2f8f4c7acc5727105efde8918589f9223c662 | 211 | exs | Elixir | priv/config_storage/migrations/20180308162327_add_maybe_hidden_network_flag.exs | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | 2 | 2018-08-01T23:07:52.000Z | 2018-10-17T12:49:21.000Z | priv/config_storage/migrations/20180308162327_add_maybe_hidden_network_flag.exs | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | null | null | null | priv/config_storage/migrations/20180308162327_add_maybe_hidden_network_flag.exs | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | 1 | 2017-07-22T21:51:14.000Z | 2017-07-22T21:51:14.000Z | defmodule Farmbot.System.ConfigStorage.Migrations.AddMaybeHiddenNetworkFlag do
use Ecto.Migration
def change do
alter table("network_interfaces") do
add(:maybe_hidden, :boolean)
end
end
end
| 21.1 | 78 | 0.758294 |
1cd321907cc26fac4aad0997dcff43dfae39652e | 4,164 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/target_vpn_gateway.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/target_vpn_gateway.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/target_vpn_gateway.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.TargetVpnGateway do
@moduledoc """
Represents a Target VPN gateway resource. (== resource_for beta.targetVpnGateways ==) (== resource_for v1.targetVpnGateways ==)
## Attributes
- creationTimestamp (String.t): [Output Only] Creation timestamp in RFC3339 text format. Defaults to: `null`.
- description (String.t): An optional description of this resource. Provide this property when you create the resource. Defaults to: `null`.
- forwardingRules ([String.t]): [Output Only] A list of URLs to the ForwardingRule resources. ForwardingRules are created using compute.forwardingRules.insert and associated with a VPN gateway. Defaults to: `null`.
- id (String.t): [Output Only] The unique identifier for the resource. This identifier is defined by the server. Defaults to: `null`.
- kind (String.t): [Output Only] Type of resource. Always compute#targetVpnGateway for target VPN gateways. Defaults to: `null`.
- name (String.t): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. Defaults to: `null`.
- network (String.t): URL of the network to which this VPN gateway is attached. Provided by the client when the VPN gateway is created. Defaults to: `null`.
- region (String.t): [Output Only] URL of the region where the target VPN gateway resides. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. Defaults to: `null`.
- selfLink (String.t): [Output Only] Server-defined URL for the resource. Defaults to: `null`.
- status (String.t): [Output Only] The status of the VPN gateway, which can be one of the following: CREATING, READY, FAILED, or DELETING. Defaults to: `null`.
- Enum - one of [CREATING, DELETING, FAILED, READY]
- tunnels ([String.t]): [Output Only] A list of URLs to VpnTunnel resources. VpnTunnels are created using the compute.vpntunnels.insert method and associated with a VPN gateway. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:creationTimestamp => any(),
:description => any(),
:forwardingRules => list(any()),
:id => any(),
:kind => any(),
:name => any(),
:network => any(),
:region => any(),
:selfLink => any(),
:status => any(),
:tunnels => list(any())
}
field(:creationTimestamp)
field(:description)
field(:forwardingRules, type: :list)
field(:id)
field(:kind)
field(:name)
field(:network)
field(:region)
field(:selfLink)
field(:status)
field(:tunnels, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.TargetVpnGateway do
def decode(value, options) do
GoogleApi.Compute.V1.Model.TargetVpnGateway.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.TargetVpnGateway do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 52.708861 | 488 | 0.714938 |
1cd33573dac0b3025b47f80f3bdcbafbedfaf32f | 1,241 | ex | Elixir | lib/packet_api/capacity.ex | jordan0day/packet-api-elixir | 069e96888f1d5858586bff224f09408e671924d3 | [
"Unlicense"
] | 1 | 2020-01-27T00:49:16.000Z | 2020-01-27T00:49:16.000Z | lib/packet_api/capacity.ex | jordan0day/packet-api-elixir | 069e96888f1d5858586bff224f09408e671924d3 | [
"Unlicense"
] | null | null | null | lib/packet_api/capacity.ex | jordan0day/packet-api-elixir | 069e96888f1d5858586bff224f09408e671924d3 | [
"Unlicense"
] | null | null | null | defmodule PacketApi.Capacity do
@moduledoc """
This module collects the operations related to the capacity endpoint of the
Packet API.
"""
alias PacketApi
@type request :: PacketApi.request()
@type server_info :: %{
required(:facility) => String.t(),
required(:plan) => String.t(),
optional(:quantity) => pos_integer
}
@base "/capacity"
@doc """
Validate if a deploy can be fulfilled by checking available capacity.
"""
@spec check(request, [server_info]) :: any
def check(%{client: client}, server_infos) do
case Tesla.post(client, @base, %{"servers" => server_infos}) do
{:ok, %{status: 200, body: %{"servers" => servers}}} ->
{:ok, servers}
{:ok, %{status: status}} ->
{:error, status}
error ->
error
end
end
@doc """
Returns a list of facilities and plans with their current capacity.
"""
@spec list(request) :: {:ok, [map]} | {:error, any}
def list(%{client: client}) do
case Tesla.get(client, @base) do
{:ok, %{status: 200, body: %{"capacity" => capacity}}} ->
{:ok, capacity}
{:ok, %{status: status}} ->
{:error, status}
error ->
error
end
end
end
| 24.333333 | 77 | 0.573731 |
1cd39642f91894402b19533074a4294e24910927 | 1,277 | exs | Elixir | mix.exs | shadowRR/contex | 86730b38a8ec06726e9233ef1f58661ca85cb704 | [
"MIT"
] | 455 | 2020-01-15T22:21:40.000Z | 2022-03-29T23:20:45.000Z | mix.exs | shadowRR/contex | 86730b38a8ec06726e9233ef1f58661ca85cb704 | [
"MIT"
] | 48 | 2020-02-10T06:19:17.000Z | 2022-03-29T03:02:52.000Z | mix.exs | shadowRR/contex | 86730b38a8ec06726e9233ef1f58661ca85cb704 | [
"MIT"
] | 30 | 2020-01-15T22:21:35.000Z | 2022-03-10T18:11:51.000Z | defmodule Contex.MixProject do
use Mix.Project
def project do
[
app: :contex,
version: "0.4.0",
elixir: "~> 1.9",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
description: description(),
package: package(),
name: "ContEx",
source_url: "https://github.com/mindok/contex",
homepage_url: "https://contex-charts.org/",
deps: deps(),
docs: docs()
]
end
def application do
[
extra_applications: [:eex]
]
end
defp description() do
"Contex - a server-side charting library for Elixir."
end
defp deps do
[
{:nimble_strftime, "~> 0.1.0"},
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false},
{:sweet_xml, "~> 0.6.6", only: :test},
{:dialyxir, "~> 1.0", only: [:dev, :test], runtime: false}
]
end
defp docs do
[
main: "Contex",
logo: "assets/logo.png"
]
end
defp package() do
[
name: "contex",
# These are the default files included in the package
files: ~w(lib mix.exs README* LICENSE*),
licenses: ["MIT"],
links: %{
"GitHub" => "https://github.com/mindok/contex",
"Website" => "https://contex-charts.org/"
}
]
end
end
| 21.283333 | 64 | 0.539546 |
1cd3df01a8456d31ae6d45b8d7502977a51e202f | 106 | exs | Elixir | test/views/layout_view_test.exs | maxbeizer/ueberauth_heroku_example | e0dce54d9dee70be28ca25422cd2d4a59571248b | [
"MIT"
] | null | null | null | test/views/layout_view_test.exs | maxbeizer/ueberauth_heroku_example | e0dce54d9dee70be28ca25422cd2d4a59571248b | [
"MIT"
] | null | null | null | test/views/layout_view_test.exs | maxbeizer/ueberauth_heroku_example | e0dce54d9dee70be28ca25422cd2d4a59571248b | [
"MIT"
] | null | null | null | defmodule UeberauthHerokuExample.LayoutViewTest do
use UeberauthHerokuExample.ConnCase, async: true
end
| 26.5 | 50 | 0.867925 |
1cd3df80ba6d6e2abbefe2a97aef55bf22e6633e | 8 | ex | Elixir | testData/org/elixir_lang/parser_definition/sigil_dot_operation_parsing_test_case/BinaryWholeNumber.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/sigil_dot_operation_parsing_test_case/BinaryWholeNumber.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/sigil_dot_operation_parsing_test_case/BinaryWholeNumber.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | ~x{}.0b0 | 8 | 8 | 0.5 |
1cd3fac5545e86f484988a2e6dbc5fda2f6a3316 | 917 | ex | Elixir | lib/livebook/runtime/erl_dist/smart_cell_gl.ex | aleDsz/livebook | 3ad817ac69b8459b684ff8d00c879ae7787b6dcc | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime/erl_dist/smart_cell_gl.ex | aleDsz/livebook | 3ad817ac69b8459b684ff8d00c879ae7787b6dcc | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime/erl_dist/smart_cell_gl.ex | aleDsz/livebook | 3ad817ac69b8459b684ff8d00c879ae7787b6dcc | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Runtime.ErlDist.SmartCellGL do
@moduledoc false
use GenServer
@spec start_link(pid()) :: GenServer.on_start()
def start_link(runtime_broadcast_to) do
GenServer.start_link(__MODULE__, {runtime_broadcast_to})
end
@impl true
def init({runtime_broadcast_to}) do
{:ok, %{runtime_broadcast_to: runtime_broadcast_to}}
end
@impl true
def handle_info({:io_request, from, reply_as, req}, state) do
case io_request(req, state) do
:forward ->
# Forward the request to own group leader
gl = Process.group_leader()
send(gl, {:io_request, from, reply_as, req})
{:reply, reply} ->
send(from, {:io_reply, reply_as, reply})
end
{:noreply, state}
end
defp io_request(:livebook_get_broadcast_target, state) do
{:reply, {:ok, state.runtime_broadcast_to}}
end
defp io_request(_req, _state) do
:forward
end
end
| 23.512821 | 63 | 0.68048 |
1cd407572801c0ce9fab24e729ec374e241a6df0 | 5,961 | ex | Elixir | lib/asciinema/accounts/accounts.ex | spearheadsys/asciinema-server | 6c8874e5dcfbbf9be176f831dd1ea421c43d375f | [
"Apache-2.0"
] | null | null | null | lib/asciinema/accounts/accounts.ex | spearheadsys/asciinema-server | 6c8874e5dcfbbf9be176f831dd1ea421c43d375f | [
"Apache-2.0"
] | null | null | null | lib/asciinema/accounts/accounts.ex | spearheadsys/asciinema-server | 6c8874e5dcfbbf9be176f831dd1ea421c43d375f | [
"Apache-2.0"
] | null | null | null | defmodule Asciinema.Accounts do
use Asciinema.Config
import Ecto.Query, warn: false
import Ecto, only: [assoc: 2, build_assoc: 2]
alias Asciinema.Accounts.{User, ApiToken}
alias Asciinema.Repo
def fetch_user(id) do
case get_user(id) do
nil -> {:error, :not_found}
user -> {:ok, user}
end
end
def get_user(id) when is_integer(id), do: Repo.get(User, id)
def get_user([{_k, _v}] = kv), do: Repo.get_by(User, kv)
def ensure_asciinema_user do
case Repo.get_by(User, username: "asciinema") do
nil ->
attrs = %{
username: "asciinema",
name: "asciinema",
email: "admin@asciinema.org"
}
%User{}
|> User.create_changeset(attrs)
|> Repo.insert!()
user ->
user
end
end
def change_user(user) do
User.changeset(user)
end
def update_user(user, params) do
user
|> User.update_changeset(params)
|> Repo.update()
end
def temporary_user?(user), do: user.email == nil
def temporary_users(q \\ User) do
from(u in q, where: is_nil(u.email))
end
def lookup_user(email_or_username) do
if String.contains?(email_or_username, "@") do
lookup_user_by_email(email_or_username)
else
lookup_user_by_username(email_or_username)
end
end
defp lookup_user_by_email(email) do
case Repo.get_by(User, email: email) do
%User{} = user ->
{:ok, user}
nil ->
case User.signup_changeset(%{email: email}) do
%{errors: [{:email, _}]} ->
{:error, :email_invalid}
%{errors: []} ->
{:ok, %User{email: email}}
end
end
end
defp lookup_user_by_username(username) do
case Repo.get_by(User, username: username) do
%User{} = user ->
{:ok, user}
nil ->
{:error, :user_not_found}
end
end
alias Phoenix.Token
def signup_token(email) do
Token.sign(config(:secret), "signup", email)
end
def login_token(%User{id: id, last_login_at: last_login_at}) do
last_login_at = last_login_at && Timex.to_unix(last_login_at)
Token.sign(config(:secret), "login", {id, last_login_at})
end
# 15 minutes
@login_token_max_age 15 * 60
def verify_signup_token(token) do
result =
Token.verify(
config(:secret),
"signup",
token,
max_age: @login_token_max_age
)
with {:ok, email} <- result,
{:ok, user} <- %{email: email} |> User.signup_changeset() |> Repo.insert() do
{:ok, user}
else
{:error, :invalid} ->
{:error, :token_invalid}
{:error, %Ecto.Changeset{}} ->
{:error, :email_taken}
{:error, _} ->
{:error, :token_expired}
end
end
def verify_login_token(token) do
result =
Token.verify(
config(:secret),
"login",
token,
max_age: @login_token_max_age
)
with {:ok, {user_id, last_login_at}} <- result,
%User{} = user <- Repo.get(User, user_id),
^last_login_at <- user.last_login_at && Timex.to_unix(user.last_login_at) do
{:ok, user}
else
{:error, :invalid} ->
{:error, :token_invalid}
nil ->
{:error, :user_not_found}
_ ->
{:error, :token_expired}
end
end
def get_user_with_api_token(token, tmp_username \\ nil) do
case get_api_token(token) do
{:ok, %ApiToken{user: user}} ->
{:ok, user}
{:error, :token_revoked} = res ->
res
{:error, :token_not_found} ->
create_user_with_api_token(token, tmp_username)
end
end
def create_user_with_api_token(token, tmp_username) do
user_changeset = User.temporary_changeset(tmp_username)
Repo.transaction(fn ->
with {:ok, %User{} = user} <- Repo.insert(user_changeset),
{:ok, %ApiToken{}} <- create_api_token(user, token) do
user
else
{:error, %Ecto.Changeset{}} ->
Repo.rollback(:token_invalid)
{:error, reason} ->
Repo.rollback(reason)
result ->
Repo.rollback(result)
end
end)
end
def create_api_token(%User{} = user, token) do
result =
user
|> build_assoc(:api_tokens)
|> ApiToken.create_changeset(token)
|> Repo.insert()
case result do
{:ok, api_token} ->
{:ok, %{api_token | user: user}}
{:error, %Ecto.Changeset{}} ->
{:error, :token_invalid}
end
end
def get_or_create_api_token(token, user) do
with {:ok, token} <- get_api_token(token) do
{:ok, token}
else
{:error, :token_not_found} ->
create_api_token(user, token)
otherwise ->
otherwise
end
end
def get_api_token(token) do
api_token =
ApiToken
|> Repo.get_by(token: token)
|> Repo.preload(:user)
case api_token do
nil -> {:error, :token_not_found}
%ApiToken{revoked_at: nil} -> {:ok, api_token}
%ApiToken{} -> {:error, :token_revoked}
end
end
def get_api_token!(user, id) do
Repo.get!(assoc(user, :api_tokens), id)
end
def get_api_token!(token) do
Repo.get_by!(ApiToken, token: token)
end
def revoke_api_token!(api_token) do
api_token
|> ApiToken.revoke_changeset()
|> Repo.update!()
end
def list_api_tokens(%User{} = user) do
user
|> assoc(:api_tokens)
|> Repo.all()
end
def reassign_api_tokens(src_user_id, dst_user_id) do
q = from(at in ApiToken, where: at.user_id == ^src_user_id)
Repo.update_all(q, set: [user_id: dst_user_id, updated_at: Timex.now()])
end
def add_admins(emails) do
from(u in User, where: u.email in ^emails)
|> Repo.update_all(set: [is_admin: true])
end
def remove_admins(emails) do
from(u in User, where: u.email in ^emails)
|> Repo.update_all(set: [is_admin: false])
end
def delete_user!(%User{} = user) do
Repo.delete!(user)
end
end
| 22.665399 | 86 | 0.593189 |
1cd41522f691f4d405d2f4ed53686ce069f88399 | 1,957 | exs | Elixir | test/unit/changeset/registrator_test.exs | lumenlunae/sentinel | 189d9b02aeeea942a41963b42ef8523ef192fd03 | [
"MIT"
] | null | null | null | test/unit/changeset/registrator_test.exs | lumenlunae/sentinel | 189d9b02aeeea942a41963b42ef8523ef192fd03 | [
"MIT"
] | null | null | null | test/unit/changeset/registrator_test.exs | lumenlunae/sentinel | 189d9b02aeeea942a41963b42ef8523ef192fd03 | [
"MIT"
] | null | null | null | defmodule RegistratorTest do
use Sentinel.UnitCase
alias Sentinel.Changeset.Registrator
setup do
on_exit(fn ->
Application.delete_env(:sentinel, :user_model_validator)
end)
end
@valid_params %{"email" => "unique@example.com"}
@case_insensitive_valid_params %{"email" => "Unique@example.com"}
test "changeset validates presence of email" do
changeset = Registrator.changeset(%{})
assert changeset.errors[:email] == {"can't be blank", [validation: :required]}
changeset = Registrator.changeset(%{"email" => ""})
assert changeset.errors[:email] == {"can't be blank", [validation: :required]}
changeset = Registrator.changeset(%{"email" => nil})
assert changeset.errors[:email] == {"can't be blank", [validation: :required]}
end
test "changeset validates uniqueness of email" do
user = Factory.insert(:user)
{:error, changeset} =
Registrator.changeset(%{@valid_params | "email" => user.email})
|> TestRepo.insert()
{"has already been taken", _} = changeset.errors[:email]
end
test "changeset downcases email" do
changeset = Registrator.changeset(@case_insensitive_valid_params)
assert changeset.valid?
end
test "changeset runs user_model_validator from config" do
Application.put_env(:sentinel, :user_model_validator, fn changeset, %{} ->
Ecto.Changeset.add_error(changeset, :email, "custom_error")
end)
changeset = Registrator.changeset(@valid_params)
assert !changeset.valid?
assert changeset.errors[:email] == {"custom_error", []}
end
test "changeset runs with a custom user_model_validator module function" do
Application.put_env(
:sentinel,
:user_model_validator,
{Sentinel.TestValidator, :custom_changeset}
)
changeset = Registrator.changeset(@valid_params)
assert !changeset.valid?
assert changeset.errors[:my_attr] == {"can't be blank", [validation: :required]}
end
end
| 29.651515 | 84 | 0.693919 |
1cd43fbacfe63e0b096ce81410854d4499449104 | 889 | ex | Elixir | lib/elixir_linter/supervisor.ex | SophieDeBenedetto/elixir_linter | 3feb3f6ce4ea566958ac29548ea16cf8cedd6a42 | [
"MIT"
] | 1 | 2017-03-20T22:47:20.000Z | 2017-03-20T22:47:20.000Z | lib/elixir_linter/supervisor.ex | SophieDeBenedetto/elixir_linter | 3feb3f6ce4ea566958ac29548ea16cf8cedd6a42 | [
"MIT"
] | null | null | null | lib/elixir_linter/supervisor.ex | SophieDeBenedetto/elixir_linter | 3feb3f6ce4ea566958ac29548ea16cf8cedd6a42 | [
"MIT"
] | null | null | null | defmodule ElixirLinter.Supervisor do
use Supervisor
def start_link([repo, "verbose"]) do
result = {:ok, sup} = Supervisor.start_link(__MODULE__, [repo])
start_workers(sup, [repo, "verbose"])
result
end
def start_workers(sup, [repo, "verbose"]) do
IO.inspect repo
{:ok, store} = Supervisor.start_child(sup, worker(ElixirLinter.Store, [repo]))
Supervisor.start_child(sup, supervisor(ElixirLinter.SubSupervisor, [store, "verbose"]))
end
def start_link(repo) do
result = {:ok, sup} = Supervisor.start_link(__MODULE__, [repo])
start_workers(sup, repo)
result
end
def start_workers(sup, repo) do
{:ok, store} = Supervisor.start_child(sup, worker(ElixirLinter.Store, [repo]))
Supervisor.start_child(sup, supervisor(ElixirLinter.SubSupervisor, [store]))
end
def init(_) do
supervise [], strategy: :one_for_one
end
end | 29.633333 | 91 | 0.694038 |
1cd464afc2c17d5e497abf9f45be4049bdac1513 | 105 | ex | Elixir | lib/phx_pow/repo.ex | brandedux/phoenix_authentication | a1a5758532a4ece73bfc7319f8c9235b070bd1fe | [
"MIT"
] | 2 | 2019-09-25T22:02:59.000Z | 2019-12-18T22:33:34.000Z | lib/phx_pow/repo.ex | brandedux/phoenix_authentication | a1a5758532a4ece73bfc7319f8c9235b070bd1fe | [
"MIT"
] | null | null | null | lib/phx_pow/repo.ex | brandedux/phoenix_authentication | a1a5758532a4ece73bfc7319f8c9235b070bd1fe | [
"MIT"
] | null | null | null | defmodule PhxPow.Repo do
use Ecto.Repo,
otp_app: :phx_pow,
adapter: Ecto.Adapters.Postgres
end
| 17.5 | 35 | 0.72381 |
1cd497ae89e5b761aa3e541cbacc6073d31c38c0 | 4,236 | ex | Elixir | lib/approximate_histogram.ex | cschneid/approximate_histogram | ee48b3003a0a1543043ed353da4fa6b7a4ee11ee | [
"MIT"
] | 2 | 2017-03-30T17:00:04.000Z | 2020-03-29T15:54:03.000Z | lib/approximate_histogram.ex | cschneid/approximate_histogram | ee48b3003a0a1543043ed353da4fa6b7a4ee11ee | [
"MIT"
] | null | null | null | lib/approximate_histogram.ex | cschneid/approximate_histogram | ee48b3003a0a1543043ed353da4fa6b7a4ee11ee | [
"MIT"
] | null | null | null | defmodule ApproximateHistogram do
@type t :: %__MODULE__{
bins: [bin],
options: options
}
@type bin :: {value, count}
@type value :: number()
@type count :: non_neg_integer()
@type options :: %{max_bins: pos_integer()}
defstruct [
:bins,
:options,
]
@default_size 50
@spec new(pos_integer()) :: t
def new(size \\ @default_size) do
%__MODULE__{
bins: [],
options: %{max_bins: size},
}
end
@spec size(t) :: non_neg_integer()
def size(%__MODULE__{} = histo) do
Enum.reduce(histo.bins, 0, fn {_, count}, total -> total + count end)
end
@spec max_bins(t) :: non_neg_integer()
def max_bins(%__MODULE__{} = histo) do
histo.options.max_bins
end
@spec add(t, value) :: t
def add(%__MODULE__{} = histo, value) do
if at_capacity?(histo) do
# Split the list into:
# [before] | closest | [after]
# Use a weighted average to merge the value and increment count correctly into a new middle bin
{bef, closest, aft} = split(histo.bins, value)
new_value =
((bin_value(closest) * bin_count(closest)) + value * 1) / (bin_count(closest) + 1)
new_bin = {new_value, bin_count(closest) + 1}
new_bins = bef ++ [new_bin] ++ aft
%{histo | bins: new_bins}
else
# Split the list into:
# [before] | closest | [after]
# Based on closest, come up with a 1 or 2 element list in the middle, then concat all 3 lists.
# [before] [closest, new] [after] <-- value is bigger than the closest
# [before] [new, closest] [after] <-- value is smaller than the closest
# [before] [new] [after] <-- First element and identical value cases
float_value = value / 1
{bef, closest, aft} = split(histo.bins, float_value)
middle = cond do
closest == nil ->
[{float_value, 1}]
bin_value(closest) == float_value ->
[{float_value, bin_count(closest) + 1}]
bin_value(closest) < float_value ->
[closest, {float_value, 1}]
bin_value(closest) > float_value ->
[{float_value, 1}, closest]
end
new_bins = bef ++ middle ++ aft
%{histo | bins: new_bins}
end
end
def bin_value({value, _}), do: value
def bin_count({_, count}), do: count
def bins_used(%__MODULE__{} = histo) do
Enum.count(histo.bins)
end
@spec to_list(t) :: list(bin)
def to_list(%__MODULE__{} = histo) do
histo.bins
end
def percentile(%__MODULE__{} = histo, percentile) do
target = size(histo) * (percentile / 100)
Enum.reduce_while(
histo.bins,
target,
fn {value, count}, remaining ->
next = remaining - count
if next <= 0 do
{:halt, value}
else
{:cont, next}
end
end
)
end
# Figure out which percentile this value would slot into
def percentile_for_value(%__MODULE__{} = histo, target) do
found_at = Enum.reduce_while(
histo.bins,
0,
fn {bin_val, bin_count}, count ->
if bin_val > target do
{:halt, count}
else
{:cont, count + bin_count}
end
end
)
# Protect against div by 0
s = size(histo)
if s == 0 do
0
else
found_at / size(histo) * 100
end
end
@spec at_capacity?(t) :: boolean()
defp at_capacity?(%__MODULE__{} = histo) do
histo.options.max_bins == Enum.count(histo.bins)
end
# returns three-tuple: {[before], closest, [after]}
# before and after may be empty lists
defp split(bins, value) do
{bef, aft} = Enum.split_while(bins, fn {bin_val, _} -> value > bin_val end)
bef_closest = List.last(bef)
bef_rest = Enum.drop(bef, -1)
aft_closest = List.first(aft)
aft_rest = Enum.drop(aft, 1)
cond do
bef_closest == nil ->
{[], aft_closest, aft_rest}
aft_closest == nil ->
{bef_rest, bef_closest, []}
true ->
dist_to_bef = value - bin_value(bef_closest)
dist_to_aft = value - bin_value(aft_closest)
if dist_to_bef < dist_to_aft do
{bef_rest, bef_closest, aft}
else
{bef, aft_closest, aft_rest}
end
end
end
end
| 25.065089 | 101 | 0.583333 |
1cd4b675df8c38279c991879c21eb3b99af2f357 | 1,909 | exs | Elixir | clients/games/mix.exs | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/games/mix.exs | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/games/mix.exs | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Games.Mixfile do
use Mix.Project
@version "0.18.0"
def project() do
[
app: :google_api_games,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/games"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Google Play Game Services client library. The Google Play games service allows developers to enhance games with social leaderboards, achievements, game state, sign-in with Google, and more.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/games",
"Homepage" => "https://developers.google.com/games/"
}
]
end
end
| 28.492537 | 193 | 0.659508 |
1cd4e0e09fcb26cad8a3d1cf328e6988c17eee50 | 977 | exs | Elixir | test/features/action_item_review_in_dashboard_test.exs | pwjablonski/remote_retro | 1b6acf359b362ae2ec187cfbd1a38dc8266aa546 | [
"MIT"
] | null | null | null | test/features/action_item_review_in_dashboard_test.exs | pwjablonski/remote_retro | 1b6acf359b362ae2ec187cfbd1a38dc8266aa546 | [
"MIT"
] | null | null | null | test/features/action_item_review_in_dashboard_test.exs | pwjablonski/remote_retro | 1b6acf359b362ae2ec187cfbd1a38dc8266aa546 | [
"MIT"
] | null | null | null | defmodule ActionItemReviewInDashboardTest do
alias RemoteRetro.{Idea}
use RemoteRetro.IntegrationCase, async: false
import ShorterMaps
describe "when a user has participated in a retro with action items" do
setup [:persist_idea_for_retro, :log_participation_in_retro]
@tag [
idea: %Idea{category: "action-item", body: "review action items from retro after standup"},
retro_stage: "action-items",
]
test "those action items are reviewable in their dashboard", ~M{session: facilitator_session} do
facilitator_session = visit(facilitator_session, "/retros")
click(facilitator_session, Query.css(".action-items-label"))
assert_has(facilitator_session, Query.css(".nested-action-items-list", text: "review action items from retro after standup"))
end
end
defp log_participation_in_retro(~M{retro, session} = context) do
retro_path = "/retros/" <> retro.id
visit(session, retro_path)
context
end
end
| 32.566667 | 131 | 0.728762 |
1cd4f0dd4d5f61e908dbeae1ebae171c74062e66 | 940 | exs | Elixir | test/wallaby/browser/send_text_test.exs | schnittchen/wallaby | 30be89cc78087e53e5b47a86043c2bbe8566bbf4 | [
"MIT"
] | null | null | null | test/wallaby/browser/send_text_test.exs | schnittchen/wallaby | 30be89cc78087e53e5b47a86043c2bbe8566bbf4 | [
"MIT"
] | null | null | null | test/wallaby/browser/send_text_test.exs | schnittchen/wallaby | 30be89cc78087e53e5b47a86043c2bbe8566bbf4 | [
"MIT"
] | null | null | null | defmodule Wallaby.Browser.SendTextTest do
use Wallaby.SessionCase, async: true
setup %{session: session} do
page = visit(session, "forms.html")
{:ok, %{page: page}}
end
test "sending key presses", %{session: session} do
session
|> visit("/")
session
|> send_keys([:tab, :enter])
assert find(session, ".blue")
end
describe "send_keys/3" do
test "accepts a query", %{page: page} do
page
|> send_keys(Query.text_field("Name"), ["Chris", :tab, "c@keathley.io"])
assert page
|> find(Query.text_field("Name"))
|> has_value?("Chris")
assert page
|> find(Query.text_field("email"))
|> has_value?("c@keathley.io")
end
end
describe "send_keys/2" do
test "allows text to be sent", %{page: page} do
page
|> find(Query.text_field("email"))
|> send_keys("Example text")
|> has_value?("Example text")
end
end
end
| 21.860465 | 78 | 0.592553 |
1cd504d9a941833c129bfbab8c7a22c0e50929ee | 3,591 | ex | Elixir | clients/monitoring/lib/google_api/monitoring/v3/model/time_interval.ex | richiboi1977/elixir-google-api | c495bb3548090eb7a63d12f6fb145ec48aecdc0b | [
"Apache-2.0"
] | 1 | 2021-10-01T09:20:41.000Z | 2021-10-01T09:20:41.000Z | clients/monitoring/lib/google_api/monitoring/v3/model/time_interval.ex | richiboi1977/elixir-google-api | c495bb3548090eb7a63d12f6fb145ec48aecdc0b | [
"Apache-2.0"
] | null | null | null | clients/monitoring/lib/google_api/monitoring/v3/model/time_interval.ex | richiboi1977/elixir-google-api | c495bb3548090eb7a63d12f6fb145ec48aecdc0b | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Monitoring.V3.Model.TimeInterval do
@moduledoc """
A closed time interval. It extends from the start time to the end time, and includes both: [startTime, endTime]. Valid time intervals depend on the MetricKind (https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.metricDescriptors#MetricKind) of the metric value. The end time must not be earlier than the start time. When writing data points, the start time must not be more than 25 hours in the past and the end time must not be more than five minutes in the future. For GAUGE metrics, the startTime value is technically optional; if no value is specified, the start time defaults to the value of the end time, and the interval represents a single point in time. If both start and end times are specified, they must be identical. Such an interval is valid only for GAUGE metrics, which are point-in-time measurements. The end time of a new interval must be at least a millisecond after the end time of the previous interval. For DELTA metrics, the start time and end time must specify a non-zero interval, with subsequent points specifying contiguous and non-overlapping intervals. For DELTA metrics, the start time of the next interval must be at least a millisecond after the end time of the previous interval. For CUMULATIVE metrics, the start time and end time must specify a a non-zero interval, with subsequent points specifying the same start time and increasing end times, until an event resets the cumulative value to zero and sets a new start time for the following points. The new start time must be at least a millisecond after the end time of the previous interval. The start time of a new interval must be at least a millisecond after the end time of the previous interval because intervals are closed. If the start time of a new interval is the same as the end time of the previous interval, then data written at the new start time could overwrite data written at the previous end time.
## Attributes
* `endTime` (*type:* `DateTime.t`, *default:* `nil`) - Required. The end of the time interval.
* `startTime` (*type:* `DateTime.t`, *default:* `nil`) - Optional. The beginning of the time interval. The default value for the start time is the end time. The start time must not be later than the end time.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:endTime => DateTime.t() | nil,
:startTime => DateTime.t() | nil
}
field(:endTime, as: DateTime)
field(:startTime, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.Monitoring.V3.Model.TimeInterval do
def decode(value, options) do
GoogleApi.Monitoring.V3.Model.TimeInterval.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Monitoring.V3.Model.TimeInterval do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 71.82 | 1,916 | 0.764411 |
1cd5356a6246aecd9fdd132c1dfeeb5a976449b2 | 918 | ex | Elixir | lib/chat_api_web/controllers/fallback_controller.ex | jalford14/papercups | fbe5e5ce4ff51702a0f898c39a9be47c33e9cb4a | [
"MIT"
] | 1 | 2021-01-18T09:57:23.000Z | 2021-01-18T09:57:23.000Z | lib/chat_api_web/controllers/fallback_controller.ex | jalford14/papercups | fbe5e5ce4ff51702a0f898c39a9be47c33e9cb4a | [
"MIT"
] | 1 | 2021-01-17T10:42:34.000Z | 2021-01-17T10:42:34.000Z | lib/chat_api_web/controllers/fallback_controller.ex | jalford14/papercups | fbe5e5ce4ff51702a0f898c39a9be47c33e9cb4a | [
"MIT"
] | null | null | null | defmodule ChatApiWeb.FallbackController do
@moduledoc """
Translates controller action results into valid `Plug.Conn` responses.
See `Phoenix.Controller.action_fallback/1` for more details.
"""
use ChatApiWeb, :controller
alias Ecto.Changeset
alias ChatApiWeb.ErrorHelpers
# This clause is an example of how to handle resources that cannot be found.
@spec call(Plug.Conn.t(), tuple()) :: Plug.Conn.t()
def call(conn, {:error, :not_found}) do
conn
|> put_status(404)
|> json(%{
error: %{
status: 404,
message: "Not found"
}
})
end
def call(conn, {:error, %Ecto.Changeset{} = changeset}) do
errors = Changeset.traverse_errors(changeset, &ErrorHelpers.translate_error/1)
conn
|> put_status(422)
|> json(%{
error: %{
status: 422,
message: "Unprocessable Entity",
errors: errors
}
})
end
end
| 23.538462 | 82 | 0.633987 |
1cd5953171cc89eec54a4585914d6c50bb330356 | 340 | exs | Elixir | test/nomex/sentinel_test.exs | neojin/nomad-elixir | 95d169adcfe42549a4f7d8e939c7a47e211fbb04 | [
"ECL-2.0",
"Apache-2.0"
] | 5 | 2017-12-08T17:56:01.000Z | 2022-01-04T00:26:43.000Z | test/nomex/sentinel_test.exs | neojin/nomad-elixir | 95d169adcfe42549a4f7d8e939c7a47e211fbb04 | [
"ECL-2.0",
"Apache-2.0"
] | 6 | 2017-11-24T18:10:47.000Z | 2017-11-26T17:58:11.000Z | test/nomex/sentinel_test.exs | neojin/nomad-elixir | 95d169adcfe42549a4f7d8e939c7a47e211fbb04 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-07-20T15:27:13.000Z | 2021-07-20T15:27:13.000Z | defmodule Nomex.SentinelTest do
use ExUnit.Case
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
test "policies" do
use_cassette "sentinel/policies" do
{ :ok, response } = Nomex.Sentinel.policies
assert response.status_code == 501
assert response.body.error == "Nomad Enterprise only endpoint"
end
end
end
| 24.285714 | 68 | 0.708824 |
1cd5a49e2d4756f780685effa14614729b68dd21 | 1,323 | ex | Elixir | lib/adyen_checkout_ex/model/three_ds2_result.ex | rsystem-se/adyen_checkout_ex | 4210f1c7150152e81f350dc6ca55aeb19665b85b | [
"MIT"
] | 1 | 2021-05-30T20:45:10.000Z | 2021-05-30T20:45:10.000Z | lib/adyen_checkout_ex/model/three_ds2_result.ex | rsystem-se/adyen_checkout_ex | 4210f1c7150152e81f350dc6ca55aeb19665b85b | [
"MIT"
] | null | null | null | lib/adyen_checkout_ex/model/three_ds2_result.ex | rsystem-se/adyen_checkout_ex | 4210f1c7150152e81f350dc6ca55aeb19665b85b | [
"MIT"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule AdyenCheckoutEx.Model.ThreeDs2Result do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"authenticationValue",
:"cavvAlgorithm",
:"challengeCancel",
:"challengeIndicator",
:"dsTransID",
:"eci",
:"exemptionIndicator",
:"messageVersion",
:"riskScore",
:"threeDSServerTransID",
:"timestamp",
:"transStatus",
:"transStatusReason",
:"whiteListStatus"
]
@type t :: %__MODULE__{
:"authenticationValue" => String.t | nil,
:"cavvAlgorithm" => String.t | nil,
:"challengeCancel" => String.t | nil,
:"challengeIndicator" => String.t | nil,
:"dsTransID" => String.t | nil,
:"eci" => String.t | nil,
:"exemptionIndicator" => String.t | nil,
:"messageVersion" => String.t | nil,
:"riskScore" => String.t | nil,
:"threeDSServerTransID" => String.t | nil,
:"timestamp" => String.t | nil,
:"transStatus" => String.t | nil,
:"transStatusReason" => String.t | nil,
:"whiteListStatus" => String.t | nil
}
end
defimpl Poison.Decoder, for: AdyenCheckoutEx.Model.ThreeDs2Result do
def decode(value, _options) do
value
end
end
| 25.442308 | 91 | 0.631141 |
1cd5d9f9ad5346230af6afff1ad104a562aa8a9d | 261 | ex | Elixir | lib/ada/source/weather.ex | cloud8421/ada | 384f5e5e85e95c4d5883298259e781cc0a54bd07 | [
"MIT"
] | 7 | 2019-05-11T12:14:48.000Z | 2021-04-02T18:42:51.000Z | lib/ada/source/weather.ex | cloud8421/ada | 384f5e5e85e95c4d5883298259e781cc0a54bd07 | [
"MIT"
] | 3 | 2019-05-11T08:01:47.000Z | 2019-05-14T12:06:50.000Z | lib/ada/source/weather.ex | cloud8421/ada | 384f5e5e85e95c4d5883298259e781cc0a54bd07 | [
"MIT"
] | 1 | 2021-01-06T14:57:32.000Z | 2021-01-06T14:57:32.000Z | defmodule Ada.Source.Weather do
@moduledoc false
alias Ada.Source.Weather.{ApiClient, Report}
@spec get(%{lat: float, lng: float}) :: {:ok, Report.t()} | {:error, term}
def get(params) do
ApiClient.get_by_location(params.lat, params.lng)
end
end
| 26.1 | 76 | 0.689655 |
1cd5dcf47b49eddb4e23554bd3b01845c59e2e46 | 1,849 | exs | Elixir | clients/app_engine/mix.exs | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/app_engine/mix.exs | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/app_engine/mix.exs | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AppEngine.Mixfile do
use Mix.Project
@version "0.20.0"
def project() do
[
app: :google_api_app_engine,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/app_engine"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.2"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
App Engine Admin API client library. Provisions and manages developers' App Engine applications.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/app_engine",
"Homepage" => "https://cloud.google.com/appengine/docs/admin-api/"
}
]
end
end
| 27.597015 | 101 | 0.656571 |
1cd63c14f1a3e33391c8702e5d4f8d234d989621 | 2,526 | exs | Elixir | test/controllers/room_controller_test.exs | jusroberts/pingpong_server | fd090413076c84e4f7297349d4c9fd6323bd1ddf | [
"MIT"
] | null | null | null | test/controllers/room_controller_test.exs | jusroberts/pingpong_server | fd090413076c84e4f7297349d4c9fd6323bd1ddf | [
"MIT"
] | null | null | null | test/controllers/room_controller_test.exs | jusroberts/pingpong_server | fd090413076c84e4f7297349d4c9fd6323bd1ddf | [
"MIT"
] | null | null | null | defmodule PingpongServer.RoomControllerTest do
use PingpongServer.ConnCase
alias PingpongServer.Room
@valid_attrs %{client_token: "some content", name: "some content", team_a_score: 42, team_b_score: 42}
@invalid_attrs %{}
test "lists all entries on index", %{conn: conn} do
conn = get conn, room_path(conn, :index)
assert html_response(conn, 200) =~ "Listing rooms"
end
test "renders form for new resources", %{conn: conn} do
conn = get conn, room_path(conn, :new)
assert html_response(conn, 200) =~ "New room"
end
test "creates resource and redirects when data is valid", %{conn: conn} do
conn = post conn, room_path(conn, :create), room: @valid_attrs
assert redirected_to(conn) == room_path(conn, :index)
assert Repo.get_by(Room, @valid_attrs)
end
test "does not create resource and renders errors when data is invalid", %{conn: conn} do
conn = post conn, room_path(conn, :create), room: @invalid_attrs
assert html_response(conn, 200) =~ "New room"
end
test "shows chosen resource", %{conn: conn} do
room = Repo.insert! %Room{}
conn = get conn, room_path(conn, :show, room)
assert html_response(conn, 200) =~ "Show room"
end
test "renders page not found when id is nonexistent", %{conn: conn} do
assert_error_sent 404, fn ->
get conn, room_path(conn, :show, -1)
end
end
test "renders form for editing chosen resource", %{conn: conn} do
room = Repo.insert! %Room{}
conn = get conn, room_path(conn, :edit, room)
assert html_response(conn, 200) =~ "Edit room"
end
test "updates chosen resource and redirects when data is valid", %{conn: conn} do
room = Repo.insert! %Room{}
conn = put conn, room_path(conn, :update, room), room: @valid_attrs
assert redirected_to(conn) == room_path(conn, :show, room)
assert Repo.get_by(Room, @valid_attrs)
end
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
room = Repo.insert! %Room{}
conn = put conn, room_path(conn, :update, room), room: @invalid_attrs
assert html_response(conn, 200) =~ "Edit room"
end
test "deletes chosen resource", %{conn: conn} do
room = Repo.insert! %Room{}
conn = delete conn, room_path(conn, :delete, room)
assert redirected_to(conn) == room_path(conn, :index)
refute Repo.get(Room, room.id)
end
test "increment team a", %{conn: conn} do
room = Repo.insert! %Room{}
conn = get conn, room_path(conn, :increment, room)
end
end
| 35.083333 | 104 | 0.678147 |
1cd649f6dc4769b53020f6b359368227469798cb | 10,787 | ex | Elixir | lib/exdis/command_parsers/string.ex | g-andrade/exdis | 6cb6775b16a237718f10dc5177d9369279e2d528 | [
"MIT"
] | 2 | 2020-03-20T17:25:59.000Z | 2020-03-22T14:17:30.000Z | lib/exdis/command_parsers/string.ex | g-andrade/exdis | 6cb6775b16a237718f10dc5177d9369279e2d528 | [
"MIT"
] | null | null | null | lib/exdis/command_parsers/string.ex | g-andrade/exdis | 6cb6775b16a237718f10dc5177d9369279e2d528 | [
"MIT"
] | null | null | null | defmodule Exdis.CommandParsers.String do
## ------------------------------------------------------------------
## APPEND Command
## ------------------------------------------------------------------
def append([{:string, key_name}, {:string, tail}]) do
{:ok, [key_name], &Exdis.Database.Value.String.append(&1, tail)}
end
def append([_, _]) do
{:error, :bad_syntax}
end
def append(_) do
{:error, {:wrong_number_of_arguments, :"APPEND"}}
end
## ------------------------------------------------------------------
## BITCOUNT command
## ------------------------------------------------------------------
def bit_count([{:string, key_name}]) do
{:ok, [key_name], &Exdis.Database.Value.String.bit_count(&1, 0, -1)}
end
def bit_count([{:string, key_name}, resp_start, resp_finish]) do
case {Exdis.CommandParsers.Util.maybe_coerce_into_int64(resp_start),
Exdis.CommandParsers.Util.maybe_coerce_into_int64(resp_finish)}
do
{{:ok, start}, {:ok, finish}} ->
{:ok, [key_name], &Exdis.Database.Value.String.bit_count(&1, start, finish)}
{{:error, _}, _} ->
{:error, {:not_an_integer_or_out_of_range, :start}}
{_, {:error, _}} ->
{:error, {:not_an_integer_or_out_of_range, :end}}
end
end
def bit_count(args) when length(args) in [1, 3] do
{:error, :bad_syntax}
end
def bit_count(_) do
{:error, {:wrong_number_of_arguments, :"BITCOUNT"}}
end
## ------------------------------------------------------------------
## BITPOS command
## ------------------------------------------------------------------
def bit_position([{:string, key_name}, resp_bit]) do
case Exdis.CommandParsers.Util.maybe_coerce_into_bit(resp_bit) do
{:ok, bit} ->
{:ok, [key_name], &Exdis.Database.Value.String.bit_position(&1, bit)}
{:error, _} ->
{:error, {:not_a_bit, :bit}}
end
end
def bit_position([{:string, key_name}, resp_bit, resp_start]) do
case {Exdis.CommandParsers.Util.maybe_coerce_into_bit(resp_bit),
Exdis.CommandParsers.Util.maybe_coerce_into_int64(resp_start)}
do
{{:ok, bit}, {:ok, start}} ->
{:ok, [key_name], &Exdis.Database.Value.String.bit_position(&1, bit, start)}
{{:error, _}, _} ->
{:error, {:not_a_bit, :bit}}
{_, {:error, _}} ->
{:error, {:not_an_integer_or_out_of_range, :start}}
end
end
def bit_position([{:string, key_name}, resp_bit, resp_start, resp_finish]) do
case {Exdis.CommandParsers.Util.maybe_coerce_into_bit(resp_bit),
Exdis.CommandParsers.Util.maybe_coerce_into_int64(resp_start),
Exdis.CommandParsers.Util.maybe_coerce_into_int64(resp_finish)}
do
{{:ok, bit}, {:ok, start}, {:ok, finish}} ->
{:ok, [key_name], &Exdis.Database.Value.String.bit_position(&1, bit, start, finish)}
{{:error, _}, _, _} ->
{:error, {:not_a_bit, :bit}}
{_, {:error, _}, _} ->
{:error, {:not_an_integer_or_out_of_range, :start}}
{_, _, {:error, _}} ->
{:error, {:not_an_integer_or_out_of_range, :end}}
end
end
def bit_position(args) when length(args) in [2, 3, 4] do
{:error, :bad_syntax}
end
def bit_position(_) do
{:error, {:wrong_number_of_arguments, :"BITPOS"}}
end
## ------------------------------------------------------------------
## DECR Command
## ------------------------------------------------------------------
def decrement([{:string, key_name}]) do
{:ok, [key_name], &Exdis.Database.Value.String.increment_by(&1, -1)}
end
def decrement([_]) do
{:error, :bad_syntax}
end
def decrement(_) do
{:error, {:wrong_number_of_arguments, :"DECR"}}
end
## ------------------------------------------------------------------
## DECRBY Command
## ------------------------------------------------------------------
def decrement_by([{:string, key_name}, resp_decrement]) do
case Exdis.CommandParsers.Util.maybe_coerce_into_int64(resp_decrement) do
{:ok, decrement} ->
{:ok, [key_name], &Exdis.Database.Value.String.increment_by(&1, -decrement)}
{:error, _} ->
{:error, {:not_an_integer_or_out_of_range, :decrement}}
end
end
def decrement_by([_, _]) do
{:error, :bad_syntax}
end
def decrement_by(_) do
{:error, {:wrong_number_of_arguments, :"DECRBY"}}
end
## ------------------------------------------------------------------
## GET Command
## ------------------------------------------------------------------
def get([{:string, key_name}]) do
{:ok, [key_name], &Exdis.Database.Value.String.get(&1)}
end
def get([_]) do
{:error, :bad_syntax}
end
def get(_) do
{:error, {:wrong_number_of_arguments, :"GET"}}
end
## ------------------------------------------------------------------
## GETBIT Command
## ------------------------------------------------------------------
def get_bit([{:string, key_name}, resp_offset]) do
case Exdis.CommandParsers.Util.maybe_coerce_into_int64(resp_offset) do
{:ok, offset} when offset >= 0 ->
{:ok, [key_name], &Exdis.Database.Value.String.get_bit(&1, offset)}
_ ->
{:error, {:not_an_integer_or_out_of_range, :"bit offset"}}
end
end
def get_bit([_, _]) do
{:error, :bad_syntax}
end
def get_bit(_) do
{:error, {:wrong_number_of_arguments, :"GETBIT"}}
end
## ------------------------------------------------------------------
## GETRANGE Command
## ------------------------------------------------------------------
def get_range([{:string, key_name}, resp_start, resp_finish]) do
case {Exdis.CommandParsers.Util.maybe_coerce_into_int64(resp_start),
Exdis.CommandParsers.Util.maybe_coerce_into_int64(resp_finish)}
do
{{:ok, start}, {:ok, finish}} ->
{:ok, [key_name], &Exdis.Database.Value.String.get_range(&1, start, finish)}
{{:error, _}, _} ->
{:error, {:not_an_integer_or_out_of_range, :start}}
{_, {:error, _}} ->
{:error, {:not_an_integer_or_out_of_range, :end}}
end
end
def get_range([_, _, _]) do
{:error , :bad_syntax}
end
def get_range(_) do
{:error, {:wrong_number_of_arguments, :"GETRANGE"}}
end
## ------------------------------------------------------------------
## GETSET Command
## ------------------------------------------------------------------
def get_set([{:string, key_name}, resp_value]) do
case Exdis.CommandParsers.Util.maybe_coerce_into_string(resp_value) do
{:ok, value} ->
{:ok, [key_name], &Exdis.Database.Value.String.get_set(&1, value)}
{:error, _} ->
{:error, :bad_syntax}
end
end
def get_set([_, _]) do
{:error, :bad_syntax}
end
def get_set(_) do
{:error, {:wrong_number_of_arguments, :"GETSET"}}
end
## ------------------------------------------------------------------
## INCR Command
## ------------------------------------------------------------------
def increment([{:string, key_name}]) do
{:ok, [key_name], &Exdis.Database.Value.String.increment_by(&1, +1)}
end
def increment([_]) do
{:error, :bad_syntax}
end
def increment(_) do
{:error, {:wrong_number_of_arguments, :"INCR"}}
end
## ------------------------------------------------------------------
## INCRBY Command
## ------------------------------------------------------------------
def increment_by([{:string, key_name}, resp_increment]) do
case Exdis.CommandParsers.Util.maybe_coerce_into_int64(resp_increment) do
{:ok, increment} ->
{:ok, [key_name], &Exdis.Database.Value.String.increment_by(&1, +increment)}
{:error, _} ->
{:error, {:not_an_integer_or_out_of_range, :increment}}
end
end
def increment_by([_, _]) do
{:error, :bad_syntax}
end
def increment_by(_) do
{:error, {:wrong_number_of_arguments, :"INCRBY"}}
end
## ------------------------------------------------------------------
## INCRBYFLOAT Command
## ------------------------------------------------------------------
def increment_by_float([{:string, key_name}, resp_increment]) do
case Exdis.CommandParsers.Util.maybe_coerce_into_float(resp_increment) do
{:ok, increment} ->
{:ok, [key_name], &Exdis.Database.Value.String.increment_by_float(&1, +increment)}
{:error, _} ->
{:error, {:not_a_valid_float, :increment}}
end
end
def increment_by_float([_, _]) do
{:error, :bad_syntax}
end
def increment_by_float(_) do
{:error, {:wrong_number_of_arguments, :"INCRBY"}}
end
## ------------------------------------------------------------------
## MGET Command
## ------------------------------------------------------------------
def mget(args) do
case Exdis.CommandParsers.Util.parse_string_list(args, [:non_empty]) do
{:ok, key_names} ->
{:ok, key_names, &Exdis.Database.Value.String.mget(&1), [:varargs]}
{:error, :empty_list} ->
{:error, {:wrong_number_of_arguments, :"MGET"}}
{:error, _} ->
{:error, :bad_syntax}
end
end
## ------------------------------------------------------------------
## MSET Command
## ------------------------------------------------------------------
def mset(args) do
case Exdis.CommandParsers.Util.parse_and_unzip_kvlist(args, [:non_empty, :unique]) do
{:ok, key_names, values} ->
{:ok, key_names, &Exdis.Database.Value.String.mset(&1, values), [:varargs]}
{:error, :empty_list} ->
{:error, {:wrong_number_of_arguments, :"MSET"}}
{:error, {:unpaired_entry, _}} ->
{:error, {:wrong_number_of_arguments, :"MSET"}}
{:error, _} ->
{:error, :bad_syntax}
end
end
## ------------------------------------------------------------------
## SET Command
## ------------------------------------------------------------------
def set([{:string, key_name}, resp_value]) do
case Exdis.CommandParsers.Util.maybe_coerce_into_string(resp_value) do
{:ok, value} ->
{:ok, [key_name], &Exdis.Database.Value.String.set(&1, value)}
{:error, _} ->
{:error, :bad_syntax}
end
end
def set([_, _]) do
{:error, :bad_syntax}
end
def set(_) do
{:error, {:wrong_number_of_arguments, :"SET"}}
end
## ------------------------------------------------------------------
## STRLEN Command
## ------------------------------------------------------------------
def str_length([{:string, key_name}]) do
{:ok, [key_name], &Exdis.Database.Value.String.str_length(&1)}
end
def str_length([_]) do
{:error, :bad_syntax}
end
def str_length(_) do
{:error, {:wrong_number_of_arguments, :"STRLEN"}}
end
end
| 31.357558 | 92 | 0.493279 |
1cd64b4a3c9ecd0f79aedcaceb428f4ddd6bce03 | 4,987 | ex | Elixir | lib/sanbase/billing/billing.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 1 | 2022-01-30T19:51:39.000Z | 2022-01-30T19:51:39.000Z | lib/sanbase/billing/billing.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | null | null | null | lib/sanbase/billing/billing.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | null | null | null | defmodule Sanbase.Billing do
@moduledoc ~s"""
Context module for all billing functionality
"""
import Ecto.Query
import Sanbase.Billing.EventEmitter, only: [emit_event: 3]
alias Sanbase.Repo
alias Sanbase.Billing.{Product, Plan, Subscription}
alias Sanbase.Billing.Subscription.LiquiditySubscription
alias Sanbase.Billing.Subscription.ProPlus
alias Sanbase.Accounts.User
alias Sanbase.StripeApi
# Subscription API
defdelegate subscribe(user, plan, card, coupon), to: Subscription
defdelegate update_subscription(subscription, plan), to: Subscription
defdelegate cancel_subscription(subscription), to: Subscription
defdelegate renew_cancelled_subscription(subscription), to: Subscription
defdelegate sync_stripe_subscriptions, to: Subscription
defdelegate remove_duplicate_subscriptions, to: Subscription
# LiquiditySubscription
defdelegate create_liquidity_subscription(user_id), to: LiquiditySubscription
defdelegate remove_liquidity_subscription(liquidity_subscription), to: LiquiditySubscription
defdelegate list_liquidity_subscriptions, to: LiquiditySubscription
defdelegate eligible_for_liquidity_subscription?(user_id), to: LiquiditySubscription
defdelegate user_has_active_sanbase_subscriptions?(user_id), to: LiquiditySubscription
defdelegate sync_liquidity_subscriptions_staked_users, to: LiquiditySubscription
defdelegate maybe_create_liquidity_subscriptions_staked_users, to: LiquiditySubscription
defdelegate maybe_remove_liquidity_subscriptions_staked_users, to: LiquiditySubscription
# ProPlus
defdelegate create_free_basic_api, to: ProPlus
defdelegate delete_free_basic_api, to: ProPlus
def list_products(), do: Repo.all(Product)
def list_plans() do
from(p in Plan, preload: [:product])
|> Repo.all()
end
def eligible_for_sanbase_trial?(user_id) do
Subscription.all_user_subscriptions_for_product(user_id, Product.product_sanbase())
|> Enum.empty?()
end
@doc ~s"""
Sync the locally defined Products and Plans with stripe.
This acction assings a `stripe_id` to every product and plan without which
no subscription can succeed.
In order to create the Products and Plans locally, the seed
`priv/repo/seed_plans_and_products.exs` must be executed.
"""
@spec sync_products_with_stripe() :: :ok | {:error, %Stripe.Error{}}
def sync_products_with_stripe() do
with :ok <- run_sync(list_products(), &Product.maybe_create_product_in_stripe/1),
:ok <- run_sync(list_plans(), &Plan.maybe_create_plan_in_stripe/1) do
:ok
else
{:error, error} -> {:error, error}
end
end
@doc """
If user has enough SAN staked and has no active Sanbase subscription - create one
"""
@spec maybe_create_liquidity_subscription(non_neg_integer()) ::
{:ok, %Subscription{}} | {:error, any()} | false
def maybe_create_liquidity_subscription(user_id) do
eligible_for_liquidity_subscription?(user_id) && create_liquidity_subscription(user_id)
end
# Private functions
# Return :ok if all function calls over the list return {:ok, _}
# Return the error otherwise
defp run_sync(list, function) when is_function(function, 1) do
Enum.map(list, function)
|> Enum.find(:ok, fn
{:ok, _} -> false
{:error, _} -> true
end)
end
@spec create_or_update_stripe_customer(%User{}, String.t() | nil) ::
{:ok, %User{}} | {:error, %Stripe.Error{}}
def create_or_update_stripe_customer(user, card_token \\ nil)
def create_or_update_stripe_customer(%User{stripe_customer_id: nil} = user, card_token) do
with {:ok, stripe_customer} = result <- StripeApi.create_customer(user, card_token) do
emit_event(result, :create_stripe_customer, %{user: user, card_token: card_token})
User.update_field(user, :stripe_customer_id, stripe_customer.id)
end
end
def create_or_update_stripe_customer(%User{stripe_customer_id: stripe_id} = user, nil)
when is_binary(stripe_id) do
{:ok, user}
end
def create_or_update_stripe_customer(%User{stripe_customer_id: stripe_id} = user, card_token)
when is_binary(stripe_id) do
with {:ok, _} = result <- StripeApi.update_customer(user, card_token) do
emit_event(result, :update_stripe_customer, %{user: user, card_token: card_token})
{:ok, user}
end
end
def get_sanbase_pro_user_ids() do
sanbase_user_ids_mapset =
Subscription.get_direct_sanbase_pro_user_ids()
|> MapSet.new()
linked_user_id_pairs = Sanbase.Accounts.LinkedUser.get_all_user_id_pairs()
user_ids_inherited_sanbase_pro =
Enum.reduce(linked_user_id_pairs, MapSet.new(), fn pair, acc ->
{primary_user_id, secondary_user_id} = pair
case primary_user_id in sanbase_user_ids_mapset do
true -> MapSet.put(acc, secondary_user_id)
false -> acc
end
end)
result = MapSet.union(sanbase_user_ids_mapset, user_ids_inherited_sanbase_pro)
{:ok, result}
end
end
| 36.137681 | 95 | 0.748947 |
1cd67a433850f9ef4d30edc860fac7b069492ea1 | 777 | ex | Elixir | lib/ex_dns/resolver/supervisor.ex | kipcole9/dns | f32448954f5c8f13ba714099f47e0e80e1091cf5 | [
"Apache-2.0"
] | 3 | 2019-08-09T05:24:23.000Z | 2021-11-16T18:44:00.000Z | lib/ex_dns/resolver/supervisor.ex | kipcole9/dns | f32448954f5c8f13ba714099f47e0e80e1091cf5 | [
"Apache-2.0"
] | null | null | null | lib/ex_dns/resolver/supervisor.ex | kipcole9/dns | f32448954f5c8f13ba714099f47e0e80e1091cf5 | [
"Apache-2.0"
] | null | null | null | defmodule ExDns.Resolver.Supervisor do
@moduledoc false
use Supervisor
@pool_name :"Elixir.ExDns.Resolver.Pool"
@worker_module ExDns.Resolver.Worker
def start_link do
Supervisor.start_link(__MODULE__, :ok, name: __MODULE__)
end
def init(:ok) do
children = [
:poolboy.child_spec(@pool_name, poolboy_config(), %{resolver: ExDns.resolver_module()})
]
supervise(children, strategy: :one_for_one)
end
defp poolboy_config() do
[
{:name, {:local, @pool_name}},
{:worker_module, @worker_module},
{:size, ExDns.pool_size()},
{:max_overflow, ExDns.pool_overflow_size()},
{:strateg, :fifo}
]
end
def pool_name do
@pool_name
end
def pool_status do
:poolboy.status(pool_name())
end
end
| 19.923077 | 93 | 0.664093 |
1cd6a68917bfed4a616c5bef7189e9cb07e28783 | 2,297 | exs | Elixir | mix.exs | coby-spotim/wobserver | b0b9d77fb4ff40bb417f6b370ee343b8d42de750 | [
"MIT"
] | null | null | null | mix.exs | coby-spotim/wobserver | b0b9d77fb4ff40bb417f6b370ee343b8d42de750 | [
"MIT"
] | null | null | null | mix.exs | coby-spotim/wobserver | b0b9d77fb4ff40bb417f6b370ee343b8d42de750 | [
"MIT"
] | 1 | 2018-09-12T16:13:57.000Z | 2018-09-12T16:13:57.000Z | defmodule Wobserver.Mixfile do
use Mix.Project
def project do
[
app: :wobserver,
version: "0.1.9",
elixir: "~> 1.4",
description: "Web based metrics, monitoring, and observer.",
package: package(),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
# Testing
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
dialyzer: [ignore_warnings: "dialyzer.ignore-warnings"],
# Docs
name: "Wobserver",
source_url: "https://github.com/shinyscorpion/wobserver",
homepage_url: "https://github.com/shinyscorpion/wobserver",
docs: [
main: "readme",
extras: ["README.md"]
]
]
end
def package do
[
name: :wobserver,
maintainers: ["Ian Luites"],
licenses: ["MIT"],
files: [
# Elixir
"lib/wobserver",
"lib/wobserver.ex",
"mix.exs",
"README*",
"LICENSE*"
],
links: %{
"GitHub" => "https://github.com/shinyscorpion/wobserver"
}
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[
extra_applications: [
:logger,
:httpoison
],
mod: {Wobserver.Application, []}
]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:cowboy, "~> 1.1"},
{:credo, "~> 0.7", only: [:dev, :test]},
{:dialyxir, "~> 0.5", only: [:dev], runtime: false},
{:ex_doc, "~> 0.15", only: :dev},
{:excoveralls, "~> 0.6", only: :test},
{:httpoison, "~> 0.11 or ~> 0.12"},
{:inch_ex, "~> 0.5", only: [:dev, :test]},
{:meck, "~> 0.8.4", only: :test},
{:plug, "~> 1.3 or ~> 1.4"},
{:poison, "~> 2.0 or ~> 3.1"},
{:websocket_client, "~> 1.2"}
]
end
end
| 24.967391 | 79 | 0.525903 |
1cd6cd2fa864a14c7cc731b459a256e6004ae8f8 | 138 | ex | Elixir | lib/nerves.ex | opencollective/nerves | 81f5d30de283e77f3720a87fa1435619f0da12de | [
"Apache-2.0"
] | 1 | 2019-06-12T17:34:10.000Z | 2019-06-12T17:34:10.000Z | lib/nerves.ex | opencollective/nerves | 81f5d30de283e77f3720a87fa1435619f0da12de | [
"Apache-2.0"
] | null | null | null | lib/nerves.ex | opencollective/nerves | 81f5d30de283e77f3720a87fa1435619f0da12de | [
"Apache-2.0"
] | null | null | null | defmodule Nerves do
def version, do: unquote(Mix.Project.config[:version])
def elixir_version, do: unquote(System.version)
end
| 27.6 | 63 | 0.731884 |
1cd6d16fc324bb4f2e0fe72363d237a94ce3bde3 | 1,280 | ex | Elixir | implementations/elixir/ockam/ockam_services/lib/services/api/static_forwarding_api.ex | 0x00A5/ockam | 9710804e20606e70057d65c70d1af7236194aeeb | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam_services/lib/services/api/static_forwarding_api.ex | 0x00A5/ockam | 9710804e20606e70057d65c70d1af7236194aeeb | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam_services/lib/services/api/static_forwarding_api.ex | 0x00A5/ockam | 9710804e20606e70057d65c70d1af7236194aeeb | [
"Apache-2.0"
] | null | null | null | defmodule Ockam.Services.API.StaticForwarding do
@moduledoc """
API for static forwarding service
See `Ockam.Services.StaticForwarding`
Methods:
:post, path: "", body: alias - register a forwarding alias
"""
use Ockam.Services.API
alias Ockam.API.Request
alias Ockam.Services.API
alias Ockam.Services.StaticForwarding, as: Base
@impl true
def setup(options, state) do
Base.setup(options, state)
end
@impl true
def handle_request(
%Request{method: :post, path: "", from_route: from_route, body: alias_str},
state
)
when is_binary(alias_str) and is_list(from_route) do
case subscribe(alias_str, from_route, state) do
{:ok, worker} ->
{:reply, :ok, worker, state}
{:error, reason} ->
{:error, reason}
other ->
{:error, {:unexpected_return, other}}
end
end
def handle_request(%Request{method: :post}, _state) do
{:error, :bad_request}
end
def handle_request(%Request{}, _state) do
{:error, :method_not_allowed}
end
def subscribe(alias_str, route, state) do
with {:ok, worker} <- Base.ensure_alias_worker(alias_str, state),
:ok <- Base.Forwarder.update_route(worker, route, notify: false) do
{:ok, worker}
end
end
end
| 23.703704 | 83 | 0.653906 |
1cd6d5a4bfcf207c05901041399527f149240938 | 860 | exs | Elixir | Chapter 4/CacheServer/cache_server/mix.exs | sebarys/learning-elixir | 6f105205b70e5bdaffe7bbc11ef4c1c7c677d6d1 | [
"Apache-2.0"
] | null | null | null | Chapter 4/CacheServer/cache_server/mix.exs | sebarys/learning-elixir | 6f105205b70e5bdaffe7bbc11ef4c1c7c677d6d1 | [
"Apache-2.0"
] | null | null | null | Chapter 4/CacheServer/cache_server/mix.exs | sebarys/learning-elixir | 6f105205b70e5bdaffe7bbc11ef4c1c7c677d6d1 | [
"Apache-2.0"
] | null | null | null | defmodule CacheServer.Mixfile do
use Mix.Project
def project do
[app: :cache_server,
version: "0.1.0",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[applications: [:logger, :httpoison]]
[extra_applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:httpoison, "~> 0.9.0"},
{:json, "~> 0.3.0"}
]
end
end
| 22.631579 | 79 | 0.602326 |
1cd6ff8f59ae034c6c3071659442c39c67ab92a7 | 1,351 | ex | Elixir | lib/anchore_engine_api_server/model/anchore_image.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | lib/anchore_engine_api_server/model/anchore_image.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | lib/anchore_engine_api_server/model/anchore_image.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule AnchoreEngineAPIServer.Model.AnchoreImage do
@moduledoc """
A unique image in the engine. May have multiple tags or references. Unique to an image content across registries or repositories.
"""
@derive [Poison.Encoder]
defstruct [
:analysis_status,
:annotations,
:created_at,
:imageDigest,
:image_content,
:image_detail,
:image_status,
:last_updated,
:userId
]
@type t :: %__MODULE__{
:analysis_status => String.t,
:annotations => Object,
:created_at => DateTime.t,
:imageDigest => String.t,
:image_content => ImageContent,
:image_detail => [ImageDetail],
:image_status => String.t,
:last_updated => DateTime.t,
:userId => String.t
}
end
defimpl Poison.Decoder, for: AnchoreEngineAPIServer.Model.AnchoreImage do
import AnchoreEngineAPIServer.Deserializer
def decode(value, options) do
value
|> deserialize(:annotations, :struct, AnchoreEngineAPIServer.Model.Object, options)
|> deserialize(:image_content, :struct, AnchoreEngineAPIServer.Model.ImageContent, options)
|> deserialize(:image_detail, :list, AnchoreEngineAPIServer.Model.ImageDetail, options)
end
end
| 29.369565 | 131 | 0.717987 |
1cd7283eb766e7289e0364bb7cfe858cd6bee0a8 | 1,529 | exs | Elixir | test/json_test.exs | ivanpetrovic/dissolver | e8d8d23bd8f038c00f0fe6ec732980b7b1263c3a | [
"MIT"
] | 21 | 2020-04-11T14:02:50.000Z | 2022-03-20T21:49:38.000Z | test/json_test.exs | ivanpetrovic/dissolver | e8d8d23bd8f038c00f0fe6ec732980b7b1263c3a | [
"MIT"
] | 5 | 2020-04-12T18:16:57.000Z | 2020-11-07T01:28:08.000Z | test/json_test.exs | ivanpetrovic/dissolver | e8d8d23bd8f038c00f0fe6ec732980b7b1263c3a | [
"MIT"
] | 2 | 2020-11-07T01:16:52.000Z | 2022-02-22T10:30:03.000Z | defmodule Dissolver.JSONTest do
use ExUnit.Case, async: true
test "renders a list of links in json format" do
expected = [
%{label: "First", url: "/products?category=25&page=1", page: 1, current: false},
%{label: "<", url: "/products?category=25&page=6", page: 6, current: false},
%{label: "2", url: "/products?category=25&page=2", page: 2, current: false},
%{label: "3", url: "/products?category=25&page=3", page: 3, current: false},
%{label: "4", url: "/products?category=25&page=4", page: 4, current: false},
%{label: "5", url: "/products?category=25&page=5", page: 5, current: false},
%{label: "6", url: "/products?category=25&page=6", page: 6, current: false},
%{label: "7", url: "/products?category=25&page=7", page: 7, current: true},
%{label: "8", url: "/products?category=25&page=8", page: 8, current: false},
%{label: "9", url: "/products?category=25&page=9", page: 9, current: false},
%{label: "10", url: "/products?category=25&page=10", page: 10, current: false},
%{label: "11", url: "/products?category=25&page=11", page: 11, current: false},
%{label: "12", url: "/products?category=25&page=12", page: 12, current: false},
%{label: ">", url: "/products?category=25&page=8", page: 8, current: false},
%{label: "Last", url: "/products?category=25&page=16", page: 16, current: false}
]
data = PaginatorData.page_list()
output = Dissolver.JSON.render_page_list(data)
assert expected == output
end
end
| 52.724138 | 86 | 0.612165 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.