hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e805caa1be21939906007c31cab59713df10746b | 491 | ex | Elixir | lib/wax_demo_web/views/error_view.ex | skunkwerks/wax_demo | e0c562ea168654d2475f70cba1ef29e74fc0dd24 | [
"Apache-2.0"
] | 5 | 2019-02-14T18:39:20.000Z | 2022-01-31T05:49:23.000Z | lib/wax_demo_web/views/error_view.ex | skunkwerks/wax_demo | e0c562ea168654d2475f70cba1ef29e74fc0dd24 | [
"Apache-2.0"
] | 18 | 2019-03-17T14:21:37.000Z | 2022-03-25T20:55:56.000Z | lib/wax_demo_web/views/error_view.ex | skunkwerks/wax_demo | e0c562ea168654d2475f70cba1ef29e74fc0dd24 | [
"Apache-2.0"
] | 4 | 2019-03-17T13:13:24.000Z | 2022-02-01T19:50:22.000Z | defmodule WaxDemoWeb.ErrorView do
use WaxDemoWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 28.882353 | 61 | 0.735234 |
e805cb71ec10fd57c6fb8a9a1a85fa1b58ad238a | 2,491 | ex | Elixir | apps/omg_watcher/lib/omg_watcher/web/router.ex | kendricktan/elixir-omg | 834c103fd5c4b9e063c1d32b9b4e5728abb64009 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/lib/omg_watcher/web/router.ex | kendricktan/elixir-omg | 834c103fd5c4b9e063c1d32b9b4e5728abb64009 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/lib/omg_watcher/web/router.ex | kendricktan/elixir-omg | 834c103fd5c4b9e063c1d32b9b4e5728abb64009 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.Web.Router do
use OMG.Watcher.Web, :router
pipeline :api do
plug(:accepts, ["json"])
plug(:enforce_json_content)
end
scope "/", OMG.Watcher.Web do
pipe_through([:api])
post("/account.get_balance", Controller.Account, :get_balance)
post("/account.get_transactions", Controller.Transaction, :get_transactions)
post("/account.get_utxos", Controller.Account, :get_utxos)
post("/in_flight_exit.get_data", Controller.InFlightExit, :get_in_flight_exit)
post("/in_flight_exit.get_competitor", Controller.InFlightExit, :get_competitor)
post("/in_flight_exit.prove_canonical", Controller.InFlightExit, :prove_canonical)
post("/in_flight_exit.get_input_challenge_data", Controller.InFlightExit, :get_input_challenge_data)
post("/in_flight_exit.get_output_challenge_data", Controller.InFlightExit, :get_output_challenge_data)
post("/transaction.all", Controller.Transaction, :get_transactions)
post("/transaction.get", Controller.Transaction, :get_transaction)
post("/transaction.submit", Controller.Transaction, :submit)
post("/transaction.create", Controller.Transaction, :create)
post("/utxo.get_exit_data", Controller.Utxo, :get_utxo_exit)
post("/utxo.get_challenge_data", Controller.Challenge, :get_utxo_challenge)
post("/status.get", Controller.Status, :get_status)
# NOTE: This *has to* be the last route, catching all unhandled paths
match(:*, "/*path", Controller.Fallback, Route.NotFound)
end
def enforce_json_content(conn, _opts) do
headers = conn |> get_req_header("content-type")
if "application/json" in headers do
conn
else
conn
|> json(
OMG.Utils.HttpRPC.Error.serialize(
"operation:invalid_content",
"Content type of application/json header is required for all requests."
)
)
|> halt()
end
end
end
| 37.179104 | 106 | 0.728222 |
e805cbc6e20c039959ff417670e64619db9dda85 | 1,897 | ex | Elixir | clients/content/lib/google_api/content/v2/model/products_list_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/content/lib/google_api/content/v2/model/products_list_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/products_list_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Model.ProductsListResponse do
@moduledoc """
## Attributes
* `kind` (*type:* `String.t`, *default:* `content#productsListResponse`) - Identifies what kind of resource this is. Value: the fixed string "content#productsListResponse".
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - The token for the retrieval of the next page of products.
* `resources` (*type:* `list(GoogleApi.Content.V2.Model.Product.t)`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => String.t(),
:nextPageToken => String.t(),
:resources => list(GoogleApi.Content.V2.Model.Product.t())
}
field(:kind)
field(:nextPageToken)
field(:resources, as: GoogleApi.Content.V2.Model.Product, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.ProductsListResponse do
def decode(value, options) do
GoogleApi.Content.V2.Model.ProductsListResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.ProductsListResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.792453 | 176 | 0.722193 |
e80625e29e466043fe5af743f64648d0d0b1269a | 924 | ex | Elixir | lib/tmp/worker.ex | preciz/tmp | d26ced0cbe0e1034f7cabd798fd8ec6a7abf347d | [
"MIT"
] | 10 | 2020-08-16T18:49:42.000Z | 2021-11-03T11:19:52.000Z | lib/tmp/worker.ex | preciz/tmp | d26ced0cbe0e1034f7cabd798fd8ec6a7abf347d | [
"MIT"
] | 6 | 2020-09-17T04:57:12.000Z | 2022-02-24T04:12:16.000Z | lib/tmp/worker.ex | preciz/tmp | d26ced0cbe0e1034f7cabd798fd8ec6a7abf347d | [
"MIT"
] | 1 | 2021-08-31T18:58:03.000Z | 2021-08-31T18:58:03.000Z | defmodule Tmp.Worker do
@moduledoc """
Executes the function given to `Tmp.dir/2` in a GenServer process
"""
use GenServer, restart: :temporary
defmodule State do
@enforce_keys [:path, :function]
defstruct [:path, :function]
end
@spec execute(Path.t(), function, timeout) :: term()
def execute(path, function, timeout) when is_function(function, 1) do
state = %State{path: path, function: function}
{:ok, pid} = start_link(state)
GenServer.call(pid, :execute, timeout)
end
def start_link(%State{} = state) do
GenServer.start_link(__MODULE__, state)
end
@impl GenServer
def init(%State{path: path} = state) do
Tmp.Monitor.monitor(path)
{:ok, state}
end
@impl GenServer
def handle_call(:execute, _from, %State{path: path, function: function} = state) do
File.mkdir_p!(path)
reply = function.(path)
{:stop, :normal, reply, state}
end
end
| 22 | 85 | 0.666667 |
e806494148e5a2e8240e401a165cc2cf70a639fe | 2,017 | exs | Elixir | clients/deployment_manager/mix.exs | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/deployment_manager/mix.exs | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/deployment_manager/mix.exs | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DeploymentManager.Mixfile do
use Mix.Project
@version "0.18.4"
def project() do
[
app: :google_api_deployment_manager,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/deployment_manager"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Cloud Deployment Manager V2 API client library. The Google Cloud Deployment Manager v2 API provides services for configuring, deploying, and viewing Google Cloud services and APIs via templates which specify deployments of Cloud resources.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/deployment_manager",
"Homepage" => "https://cloud.google.com/deployment-manager"
}
]
end
end
| 30.104478 | 243 | 0.675756 |
e8065c38bc2dc002acf7119c528dd33f37e41c1e | 1,010 | exs | Elixir | apps/db/mix.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/db/mix.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/db/mix.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | defmodule DB.Mixfile do
use Mix.Project
def project do
[
app: :db,
version: "2.0.0-dev",
build_path: "../../_build",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.8",
elixirc_paths: elixirc_paths(Mix.env),
start_permanent: Mix.env == :prod,
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:db_connection, "~> 2.0"}
]
end
end
| 22.444444 | 56 | 0.580198 |
e8069151c82cbe690fb61dfb3d8ac1342ad43bf1 | 1,866 | ex | Elixir | lib/termato/sock_handler.ex | andyl/termato | ae8af988ef85b98232ddea45f196b7a71d1dc619 | [
"MIT"
] | null | null | null | lib/termato/sock_handler.ex | andyl/termato | ae8af988ef85b98232ddea45f196b7a71d1dc619 | [
"MIT"
] | null | null | null | lib/termato/sock_handler.ex | andyl/termato | ae8af988ef85b98232ddea45f196b7a71d1dc619 | [
"MIT"
] | null | null | null | defmodule Termato.SockHandler do
@behaviour :cowboy_websocket
# ----- Setup & Teardown Callbacks -----
def init(req, state) do
{:cowboy_websocket, req, state}
end
def websocket_init(state) do
IO.inspect self(), label: "WEBSOCKET INIT"
Process.sleep(250)
Termato.SockPidstore.add_client(self())
{:ok, state}
end
def terminate(reason, _req, state) do
IO.inspect([self(), reason], label: "WEBSOCKET TERMINATE")
Termato.SockPidstore.rm_client(self())
{:ok, state}
end
# ----- API -----
def broadcast(msg_type) do
~s({"type": "#{msg_type}"})
|> send_all()
end
def broadcast(type, secs) when is_integer(secs) do
text = Util.Seconds.to_s(secs)
klas = Util.Seconds.klas(secs)
~s({"type": "#{type}", "secs": #{secs}, "text": "#{text}", "klas": "#{klas}"})
|> send_all()
end
def broadcast(msg_type, msg_value) do
~s({"type": "#{msg_type}", "value": "#{msg_value}"})
|> send_all()
end
defp send_all(message) do
Termato.SockPidstore.clients()
|> Enum.each(&(send_one(&1, message)))
end
defp send_one(pid, message) do
send(pid, message)
end
# ----- Event Callbacks -----
def websocket_info({:timeout, _ref, message}, state) do
IO.inspect message, label: "TIMEOUT"
{[{:text, message}], state}
end
def websocket_info(data, state) do
# IO.inspect data, label: "INFODATA"
{[{:text, data}], state}
end
def websocket_handle({:text, message = "HEARTBEAT"}, state) do
# IO.inspect self(), label: "HEARTBEAT"
{[{:text, message}], state}
end
def websocket_handle({:text, message}, state) do
# IO.inspect message, label: "MESSAGE"
{[{:text, message}], state}
end
def websocket_handle(data, state) do
IO.inspect data, label: "GENERIC HANDLER"
{[{:text, data}], state}
end
end
| 23.923077 | 82 | 0.610397 |
e806edcae8aa0478f65e63ed30dceccbeed85d03 | 238 | ex | Elixir | app/lib/noodl_web/live/guide/stream/stream.ex | nathanjohnson320/noodl | 2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967 | [
"MIT"
] | 1 | 2021-01-20T20:00:50.000Z | 2021-01-20T20:00:50.000Z | app/lib/noodl_web/live/guide/stream/stream.ex | nathanjohnson320/noodl | 2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967 | [
"MIT"
] | null | null | null | app/lib/noodl_web/live/guide/stream/stream.ex | nathanjohnson320/noodl | 2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967 | [
"MIT"
] | null | null | null | defmodule NoodlWeb.Live.Guide.Stream do
@moduledoc ~S"""
LiveView for the guide stream page
"""
use NoodlWeb, :live_view
def mount(_params, session, socket) do
{:ok, socket |> Authentication.assign_user(session)}
end
end
| 21.636364 | 56 | 0.710084 |
e8071c3f371570d0200cf82502d2023e37ce5256 | 3,317 | ex | Elixir | lib/gat_core/connection_tcp.ex | SylwBar/gat_core | b37c12e1004e1ee11b3d515f51f02319d0ac3a93 | [
"MIT"
] | null | null | null | lib/gat_core/connection_tcp.ex | SylwBar/gat_core | b37c12e1004e1ee11b3d515f51f02319d0ac3a93 | [
"MIT"
] | null | null | null | lib/gat_core/connection_tcp.ex | SylwBar/gat_core | b37c12e1004e1ee11b3d515f51f02319d0ac3a93 | [
"MIT"
] | null | null | null | defmodule GATCore.ConnectionTCP do
# Server will send KA every 20 seconds
@server_ka_timer_msec 20_000
# Client should send any message ay least every 10 minutes
@client_ka_timer_msec 600_000
use GenServer
require Logger
# ----- ConnectionTCP API -----
def start(object_id, socket) do
GenServer.start(__MODULE__, [object_id, socket])
end
def disconnect(pid) do
GenServer.cast(pid, :disconnect)
end
# ----- ConnectionTCP callbacks -----
@impl true
def init([object_id, socket]) do
:ok = :inet.setopts(socket, active: true)
{:ok, {{ip1, ip2, ip3, ip4}, port}} = :inet.peername(socket)
peer_str = "#{ip1}.#{ip2}.#{ip3}.#{ip4}:#{port}"
{:ok, _} = Registry.register(Registry.ConnectionsTCP, object_id, peer_str)
Logger.info("ConnectionTCP #{inspect(self())} #{inspect(object_id)}: started.")
server_ka_timer_ref = :erlang.send_after(@server_ka_timer_msec, self(), :server_ka_timer_exp)
client_ka_timer_ref = :erlang.send_after(@client_ka_timer_msec, self(), :client_ka_timer_exp)
last_rx_time = :erlang.system_time(:millisecond)
state = %{
object_id: object_id,
socket: socket,
server_ka_timer_ref: server_ka_timer_ref,
client_ka_timer_ref: client_ka_timer_ref,
last_rx_time: last_rx_time
}
{:ok, state}
end
@impl true
def handle_cast(:disconnect, state) do
Logger.info("ConnectionTCP #{inspect(self())} #{inspect(state.object_id)}: got :disconnect.")
:gen_tcp.close(state.socket)
{:stop, :normal, %{}}
end
@impl true
def handle_info({:tcp, _port, packet}, state) do
case CBOR.decode(packet) do
{:ok, cbor, <<>>} ->
last_rx_time = :erlang.system_time(:millisecond)
case cbor do
# Client KA message received
[0, 0, 0, _, _] ->
:ok
cbor ->
Logger.warning(
"ConnectionTCP #{inspect(self())} #{inspect(state.object_id)}: not recognized CBOR: #{inspect(cbor)}"
)
end
{:noreply, %{state | last_rx_time: last_rx_time}}
_ ->
Logger.warning(
"ConnectionTCP #{inspect(self())} #{inspect(state.object_id)}: not recognized packet: #{inspect(packet)}"
)
{:noreply, state}
end
end
def handle_info(:server_ka_timer_exp, state) do
ka_pkt = GATCore.Packet.gen_core_keep_alive()
:ok = :gen_tcp.send(state.socket, ka_pkt)
server_ka_timer_ref = :erlang.send_after(@server_ka_timer_msec, self(), :server_ka_timer_exp)
{:noreply, %{state | server_ka_timer_ref: server_ka_timer_ref}}
end
def handle_info(:client_ka_timer_exp, state) do
if :erlang.system_time(:millisecond) - state.last_rx_time > @client_ka_timer_msec do
Logger.info(
"ConnectionTCP #{inspect(self())} #{inspect(state.object_id)}: client timeout, disconnecting."
)
:gen_tcp.close(state.socket)
{:stop, :normal, %{}}
else
client_ka_timer_ref =
:erlang.send_after(@client_ka_timer_msec, self(), :client_ka_timer_exp)
{:noreply, %{state | client_ka_timer_ref: client_ka_timer_ref}}
end
end
def handle_info({:tcp_closed, _port}, state) do
Logger.info("ConnectionTCP #{inspect(self())} #{inspect(state.object_id)}: got :tcp_closed.")
{:stop, :normal, %{}}
end
end
| 31.590476 | 115 | 0.653301 |
e80736f06db98cfd53a890f139d608bd68e0eb25 | 2,527 | ex | Elixir | lib/mastery/boundary/proctor.ex | grekko/mastery | f80dfcb660f23187970442ea237e4128ce3ac262 | [
"MIT"
] | null | null | null | lib/mastery/boundary/proctor.ex | grekko/mastery | f80dfcb660f23187970442ea237e4128ce3ac262 | [
"MIT"
] | null | null | null | lib/mastery/boundary/proctor.ex | grekko/mastery | f80dfcb660f23187970442ea237e4128ce3ac262 | [
"MIT"
] | null | null | null | defmodule Mastery.Boundary.Proctor do
alias Mastery.Boundary.{QuizManager, QuizSession}
use GenServer
require Logger
def start_link(options \\ []) do
GenServer.start_link(__MODULE__, [], options)
end
def init(quizzes) do
{:ok, quizzes}
end
def schedule_quiz(proctor \\ __MODULE__, quiz, templates, start_at, end_at) do
quiz = %{
fields: quiz,
templates: templates,
start_at: start_at,
end_at: end_at
}
GenServer.call(proctor, {:schedule_quiz, quiz})
end
def handle_call({:schedule_quiz, quiz}, _from, quizzes) do
now = DateTime.utc_now()
ordered_quizzes =
[quiz | quizzes]
|> start_quizzes(now)
|> Enum.sort(fn a, b ->
date_time_less_then_or_equal?(a.start_at, b.start_at)
end)
build_reply_with_timeout({:reply, :ok}, ordered_quizzes, now)
end
def handle_info(:timeout, quizzes) do
now = DateTime.utc_now()
remaining_quizzes = start_quizzes(quizzes, now)
build_reply_with_timeout({:noreply}, remaining_quizzes, now)
end
def handle_info({:end_quiz, quiz_title}, quizzes) do
QuizManager.remove_quiz(quiz_title)
quiz_title
|> QuizSession.active_sessions_for()
|> QuizSession.end_sessions()
Logger.info("Stopped Quiz #{quiz_title}")
handle_info(:timeout, quizzes)
end
defp build_reply_with_timeout(reply, quizzes, now) do
reply
|> append_state(quizzes)
|> maybe_append_timeout(quizzes, now)
end
defp append_state(tuple, quizzes), do: Tuple.append(tuple, quizzes)
defp maybe_append_timeout(tuple, [], _now), do: tuple
defp maybe_append_timeout(tuple, quizzes, now) do
timeout =
quizzes
|> hd
|> Map.fetch!(:start_at)
|> DateTime.diff(now, :millisecond)
Tuple.append(tuple, timeout)
end
def start_quizzes(quizzes, now) do
{ready, not_ready} =
Enum.split_while(quizzes, fn quiz -> date_time_less_then_or_equal?(quiz.start_at, now) end)
Enum.each(ready, fn quiz -> start_quiz(quiz, now) end)
not_ready
end
def start_quiz(quiz, now) do
Logger.info("Starting quiz #{quiz.fields.title}")
QuizManager.build_quiz(quiz.fields)
Enum.each(quiz.templates, fn templates ->
QuizManager.add_template(quiz.fields.title, templates)
end)
timeout = DateTime.diff(quiz.end_at, now, :millisecond)
Process.send_after(self(), {:end_quiz, quiz.fields.title}, timeout)
end
defp date_time_less_then_or_equal?(a, b) do
DateTime.compare(a, b) in ~w[lt eq]a
end
end
| 25.785714 | 97 | 0.684211 |
e80738203a2162ebd17f280da629732e122d7b9a | 1,117 | exs | Elixir | mix.exs | bcardarella/elixir-fixtures | 2c49e0af3713bd912dc7bb639324da423d155299 | [
"MIT"
] | 139 | 2016-02-07T12:28:18.000Z | 2022-02-15T11:39:58.000Z | mix.exs | bcardarella/elixir-fixtures | 2c49e0af3713bd912dc7bb639324da423d155299 | [
"MIT"
] | 17 | 2016-05-16T18:59:26.000Z | 2020-09-11T16:53:53.000Z | mix.exs | bcardarella/elixir-fixtures | 2c49e0af3713bd912dc7bb639324da423d155299 | [
"MIT"
] | 10 | 2016-02-23T09:25:09.000Z | 2020-02-29T14:21:21.000Z | defmodule EctoFixtures.Mixfile do
use Mix.Project
def project do
[app: :ecto_fixtures,
version: "0.0.2",
elixir: "~> 1.2",
name: "Ecto Fixtures",
deps: deps(),
package: package(),
description: description()]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[applications: [:logger, :elixir_uuid, :ecto]]
end
def description do
"""
Ecto Fixtures provides a simple DSL for quickly creating fixture
data for your test suite.
"""
end
def package do
[contributors: ["Brian Cardarella"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/dockyard/ecto_fixtures"}]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type `mix help deps` for more examples and options
defp deps do
[
{:ecto, "> 0.0.0"},
{:postgrex, "> 0.0.0", only: :test},
{:elixir_uuid, "~> 1.2"}
]
end
end
| 21.901961 | 77 | 0.597135 |
e8073e993cca7939fce0d9a87231a7f915fa1cbb | 4,136 | ex | Elixir | clients/run/lib/google_api/run/v1/model/binding.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/run/lib/google_api/run/v1/model/binding.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/run/lib/google_api/run/v1/model/binding.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Run.V1.Model.Binding do
@moduledoc """
Associates `members` with a `role`.
## Attributes
* `condition` (*type:* `GoogleApi.Run.V1.Model.Expr.t`, *default:* `nil`) - The condition that is associated with this binding. If the condition evaluates to `true`, then this binding applies to the current request. If the condition evaluates to `false`, then this binding does not apply to the current request. However, a different role binding might grant the same role to one or more of the members in this binding. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
* `members` (*type:* `list(String.t)`, *default:* `nil`) - Specifies the identities requesting access for a Cloud Platform resource. `members` can have the following values: * `allUsers`: A special identifier that represents anyone who is on the internet; with or without a Google account. * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated with a Google account or a service account. * `user:{emailid}`: An email address that represents a specific Google account. For example, `alice@example.com` . * `serviceAccount:{emailid}`: An email address that represents a service account. For example, `my-other-app@appspot.gserviceaccount.com`. * `group:{emailid}`: An email address that represents a Google group. For example, `admins@example.com`. * `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a user that has been recently deleted. For example, `alice@example.com?uid=123456789012345678901`. If the user is recovered, this value reverts to `user:{emailid}` and the recovered user retains the role in the binding. * `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a service account that has been recently deleted. For example, `my-other-app@appspot.gserviceaccount.com?uid=123456789012345678901`. If the service account is undeleted, this value reverts to `serviceAccount:{emailid}` and the undeleted service account retains the role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a Google group that has been recently deleted. For example, `admins@example.com?uid=123456789012345678901`. If the group is recovered, this value reverts to `group:{emailid}` and the recovered group retains the role in the binding. * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that domain. For example, `google.com` or `example.com`.
* `role` (*type:* `String.t`, *default:* `nil`) - Role that is assigned to `members`. For example, `roles/viewer`, `roles/editor`, or `roles/owner`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:condition => GoogleApi.Run.V1.Model.Expr.t(),
:members => list(String.t()),
:role => String.t()
}
field(:condition, as: GoogleApi.Run.V1.Model.Expr)
field(:members, type: :list)
field(:role)
end
defimpl Poison.Decoder, for: GoogleApi.Run.V1.Model.Binding do
def decode(value, options) do
GoogleApi.Run.V1.Model.Binding.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Run.V1.Model.Binding do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 78.037736 | 1,972 | 0.749275 |
e807418bf3fedf9d6999ae559ebf4284974098dd | 178 | exs | Elixir | priv/repo/migrations/20210311200419_add_eclrs_version_to_files.exs | RatioPBC/epi-viaduct-nys | 99fb637785ea207aee5449fa01fa59dd18ec8bf2 | [
"MIT"
] | 2 | 2021-06-22T21:01:49.000Z | 2021-11-04T18:36:48.000Z | priv/repo/migrations/20210311200419_add_eclrs_version_to_files.exs | RatioPBC/epi-viaduct-nys | 99fb637785ea207aee5449fa01fa59dd18ec8bf2 | [
"MIT"
] | null | null | null | priv/repo/migrations/20210311200419_add_eclrs_version_to_files.exs | RatioPBC/epi-viaduct-nys | 99fb637785ea207aee5449fa01fa59dd18ec8bf2 | [
"MIT"
] | null | null | null | defmodule NYSETL.Repo.Migrations.AddEclrsVersionToFiles do
use Ecto.Migration
def change do
alter table("files") do
add :eclrs_version, :integer
end
end
end
| 17.8 | 58 | 0.724719 |
e807734d7ea908ed936469dbce99271e4eb9b1f9 | 2,217 | exs | Elixir | test/choicest/comparisons/comparison_test.exs | Cadiac/choicest | e3799e3b80024754c6babb561c1b14f916f92373 | [
"MIT"
] | 1 | 2018-07-01T08:18:20.000Z | 2018-07-01T08:18:20.000Z | test/choicest/comparisons/comparison_test.exs | Cadiac/choicest | e3799e3b80024754c6babb561c1b14f916f92373 | [
"MIT"
] | null | null | null | test/choicest/comparisons/comparison_test.exs | Cadiac/choicest | e3799e3b80024754c6babb561c1b14f916f92373 | [
"MIT"
] | null | null | null | defmodule Choicest.ComparisonsTest do
use Choicest.DataCase
alias Choicest.Core
describe "comparisons" do
alias Choicest.Model.Image
alias Choicest.Model.Comparison
alias Choicest.Model.Collection
@valid_image_attrs %{description: "some description", original_filename: "some original_filename", content_type: "image/jpeg", file_size: 42, uploaded_by: "uploaded_by"}
@valid_collection_attrs %{"description" => "some description", "name" => "some name", "voting_active" => true}
def collection_fixture(attrs \\ %{}) do
{:ok, collection} =
attrs
|> Enum.into(@valid_collection_attrs)
|> Core.create_collection()
collection
end
def image_fixture(collection_id, attrs \\ %{}) do
attrs = attrs |> Enum.into(@valid_image_attrs)
{:ok, image} = Core.create_image(collection_id, attrs)
image
end
def comparison_fixture(collection_id) do
%Image{id: winner_id} = image_fixture(collection_id)
%Image{id: loser_id} = image_fixture(collection_id)
{:ok, comparison} = Core.create_comparison(collection_id, winner_id, loser_id)
comparison
end
test "create_comparison/1 with correct ids creates comparison" do
%Collection{id: collection_id} = collection_fixture()
%Image{id: winner_id} = image_fixture(collection_id)
%Image{id: loser_id} = image_fixture(collection_id)
assert {:ok, %Comparison{} = comparison} = Core.create_comparison(collection_id, winner_id, loser_id)
assert comparison.winner_id == winner_id
assert comparison.loser_id == loser_id
end
test "get_comparison!/1 returns the comparison with given id" do
%Collection{id: collection_id} = collection_fixture()
comparison = comparison_fixture(collection_id)
assert Core.get_comparison!(collection_id, comparison.id) == comparison
end
test "list_image_comparisons returns empty comparisons for image with no comparisons" do
%Collection{id: collection_id} = collection_fixture()
image = image_fixture(collection_id)
assert %{lost_against: [], won_against: []} = Core.list_image_comparisons!(collection_id, image.id);
end
end
end
| 33.590909 | 173 | 0.706811 |
e80779a3dc2a9de18ca758e125b43c62b9a44ef2 | 31,765 | ex | Elixir | lib/elixir/lib/kernel/typespec.ex | cdfuller/elixir | 3bd3f88d57d7fff6cab7b171294b89fb08eedfe7 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/kernel/typespec.ex | cdfuller/elixir | 3bd3f88d57d7fff6cab7b171294b89fb08eedfe7 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/kernel/typespec.ex | cdfuller/elixir | 3bd3f88d57d7fff6cab7b171294b89fb08eedfe7 | [
"Apache-2.0"
] | null | null | null | defmodule Kernel.Typespec do
@moduledoc false
## Deprecated API moved to Code.Typespec
@doc false
@deprecated "Use Code.Typespec.spec_to_quoted/2 instead"
def spec_to_ast(name, spec) do
Code.Typespec.spec_to_quoted(name, spec)
end
@doc false
@deprecated "Use Code.Typespec.type_to_quoted/1 instead"
def type_to_ast(type) do
Code.Typespec.type_to_quoted(type)
end
@doc false
@deprecated "Use Code.fetch_docs/1 instead"
def beam_typedocs(module) when is_atom(module) or is_binary(module) do
case Code.fetch_docs(module) do
{:docs_v1, _, _, _, _, _, docs} ->
for {{:type, name, arity}, _, _, doc, _} <- docs do
case doc do
:none -> {{name, arity}, nil}
:hidden -> {{name, arity}, false}
%{"en" => doc_string} -> {{name, arity}, doc_string}
end
end
{:error, _} ->
nil
end
end
@doc false
@deprecated "Use Code.Typespec.fetch_types/1 instead"
def beam_types(module) when is_atom(module) or is_binary(module) do
case Code.Typespec.fetch_types(module) do
{:ok, types} -> types
:error -> nil
end
end
@doc false
@deprecated "Use Code.Typespec.fetch_specs/1 instead"
def beam_specs(module) when is_atom(module) or is_binary(module) do
case Code.Typespec.fetch_specs(module) do
{:ok, specs} -> specs
:error -> nil
end
end
@doc false
@deprecated "Use Code.Typespec.fetch_callbacks/1 instead"
def beam_callbacks(module) when is_atom(module) or is_binary(module) do
case Code.Typespec.fetch_callbacks(module) do
{:ok, callbacks} -> callbacks
:error -> nil
end
end
## Hooks for Module functions
def defines_type?(module, {name, arity} = signature)
when is_atom(module) and is_atom(name) and arity in 0..255 do
{_set, bag} = :elixir_module.data_tables(module)
finder = fn {_kind, expr, _caller} ->
type_to_signature(expr) == signature
end
:lists.any(finder, get_typespecs(bag, [:type, :opaque, :typep]))
end
def spec_to_callback(module, {name, arity} = signature)
when is_atom(module) and is_atom(name) and arity in 0..255 do
{set, bag} = :elixir_module.data_tables(module)
filter = fn {:spec, expr, pos} ->
if spec_to_signature(expr) == signature do
kind = :callback
store_typespec(bag, kind, expr, pos)
case :ets.lookup(set, {:function, name, arity}) do
[{{:function, ^name, ^arity}, line, _, doc, doc_meta}] ->
store_doc(set, kind, name, arity, line, :doc, doc, doc_meta)
_ ->
nil
end
true
else
false
end
end
:lists.filter(filter, get_typespecs(bag, :spec)) != []
end
## Typespec definition and storage
@doc """
Defines a typespec.
Invoked by `Kernel.@/1` expansion.
"""
def deftypespec(:spec, expr, _line, _file, module, pos) do
{_set, bag} = :elixir_module.data_tables(module)
store_typespec(bag, :spec, expr, pos)
end
def deftypespec(kind, expr, line, _file, module, pos)
when kind in [:callback, :macrocallback] do
{set, bag} = :elixir_module.data_tables(module)
case spec_to_signature(expr) do
{name, arity} ->
# store doc only once in case callback has multiple clauses
unless :ets.member(set, {kind, name, arity}) do
{line, doc} = get_doc_info(set, :doc, line)
store_doc(set, kind, name, arity, line, :doc, doc, %{})
end
:error ->
:error
end
store_typespec(bag, kind, expr, pos)
end
def deftypespec(kind, expr, line, file, module, pos)
when kind in [:type, :typep, :opaque] do
{set, bag} = :elixir_module.data_tables(module)
case type_to_signature(expr) do
{name, arity} when kind == :typep ->
{line, doc} = get_doc_info(set, :typedoc, line)
if doc do
warning =
"type #{name}/#{arity} is private, @typedoc's are always discarded for private types"
:elixir_errors.erl_warn(line, file, warning)
end
{name, arity} ->
{line, doc} = get_doc_info(set, :typedoc, line)
spec_meta = if kind == :opaque, do: %{opaque: true}, else: %{}
store_doc(set, :type, name, arity, line, :typedoc, doc, spec_meta)
:error ->
:error
end
store_typespec(bag, kind, expr, pos)
end
defp get_typespecs(bag, keys) when is_list(keys) do
:lists.flatmap(&get_typespecs(bag, &1), keys)
end
defp get_typespecs(bag, key) do
:ets.lookup_element(bag, {:accumulate, key}, 2)
catch
:error, :badarg -> []
end
defp take_typespecs(bag, keys) when is_list(keys) do
:lists.flatmap(&take_typespecs(bag, &1), keys)
end
defp take_typespecs(bag, key) do
:lists.map(&elem(&1, 1), :ets.take(bag, {:accumulate, key}))
end
defp store_typespec(bag, key, expr, pos) do
:ets.insert(bag, {{:accumulate, key}, {key, expr, pos}})
:ok
end
defp store_doc(set, kind, name, arity, line, doc_kind, doc, spec_meta) do
doc_meta = get_doc_meta(spec_meta, doc_kind, set)
:ets.insert(set, {{kind, name, arity}, line, doc, doc_meta})
end
defp get_doc_info(set, attr, line) do
case :ets.take(set, attr) do
[{^attr, {line, doc}, _}] -> {line, doc}
[] -> {line, nil}
end
end
defp get_doc_meta(spec_meta, doc_kind, set) do
case :ets.take(set, {doc_kind, :meta}) do
[{{^doc_kind, :meta}, metadata, _}] -> Map.merge(metadata, spec_meta)
[] -> spec_meta
end
end
defp spec_to_signature({:when, _, [spec, _]}), do: type_to_signature(spec)
defp spec_to_signature(other), do: type_to_signature(other)
defp type_to_signature({:::, _, [{name, _, context}, _]})
when is_atom(name) and name != ::: and is_atom(context),
do: {name, 0}
defp type_to_signature({:::, _, [{name, _, args}, _]}) when is_atom(name) and name != :::,
do: {name, length(args)}
defp type_to_signature(_), do: :error
## Translation from Elixir AST to typespec AST
@doc false
def translate_typespecs_for_module(_set, bag) do
type_typespecs = take_typespecs(bag, [:type, :opaque, :typep])
defined_type_pairs = collect_defined_type_pairs(type_typespecs)
state = %{
defined_type_pairs: defined_type_pairs,
used_type_pairs: [],
local_vars: %{},
undefined_type_error_enabled?: true
}
{types, state} = :lists.mapfoldl(&translate_type/2, state, type_typespecs)
{specs, state} = :lists.mapfoldl(&translate_spec/2, state, take_typespecs(bag, :spec))
{callbacks, state} = :lists.mapfoldl(&translate_spec/2, state, take_typespecs(bag, :callback))
{macrocallbacks, state} =
:lists.mapfoldl(&translate_spec/2, state, take_typespecs(bag, :macrocallback))
optional_callbacks = :lists.flatten(get_typespecs(bag, :optional_callbacks))
used_types = filter_used_types(types, state)
{used_types, specs, callbacks, macrocallbacks, optional_callbacks}
end
defp collect_defined_type_pairs(type_typespecs) do
fun = fn {_kind, expr, pos}, type_pairs ->
%{file: file, line: line} = env = :elixir_locals.get_cached_env(pos)
case type_to_signature(expr) do
{name, arity} = type_pair ->
if built_in_type?(name, arity) do
message = "type #{name}/#{arity} is a built-in type and it cannot be redefined"
compile_error(env, message)
end
if Map.has_key?(type_pairs, type_pair) do
compile_error(env, "type #{name}/#{arity} is already defined")
end
Map.put(type_pairs, type_pair, {file, line})
:error ->
compile_error(env, "invalid type specification: #{Macro.to_string(expr)}")
end
end
:lists.foldl(fun, %{}, type_typespecs)
end
defp filter_used_types(types, state) do
fun = fn {_kind, {name, arity} = type_pair, _line, _type, export} ->
if not export and not :lists.member(type_pair, state.used_type_pairs) do
%{^type_pair => {file, line}} = state.defined_type_pairs
:elixir_errors.erl_warn(line, file, "type #{name}/#{arity} is unused")
false
else
true
end
end
:lists.filter(fun, types)
end
defp translate_type({kind, {:::, _, [{name, _, args}, definition]}, pos}, state) do
caller = :elixir_locals.get_cached_env(pos)
state = clean_local_state(state)
args =
if is_atom(args) do
[]
else
for(arg <- args, do: variable(arg))
end
vars = :lists.filter(&match?({:var, _, _}, &1), args)
var_names = :lists.map(&elem(&1, 2), vars)
state = :lists.foldl(&update_local_vars(&2, &1), state, var_names)
{spec, state} = typespec(definition, var_names, caller, state)
type = {name, spec, vars}
arity = length(args)
ensure_no_unused_local_vars!(caller, state.local_vars)
{kind, export} =
case kind do
:type -> {:type, true}
:typep -> {:type, false}
:opaque -> {:opaque, true}
end
invalid_args = :lists.filter(&(not valid_variable_ast?(&1)), args)
unless invalid_args == [] do
invalid_args = :lists.join(", ", :lists.map(&Macro.to_string/1, invalid_args))
message =
"@type definitions expect all arguments to be variables. The type " <>
"#{name}/#{arity} has an invalid argument(s): #{invalid_args}"
compile_error(caller, message)
end
if underspecified?(kind, arity, spec) do
message = "@#{kind} type #{name}/#{arity} is underspecified and therefore meaningless"
:elixir_errors.erl_warn(caller.line, caller.file, message)
end
{{kind, {name, arity}, caller.line, type, export}, state}
end
defp valid_variable_ast?({variable_name, _, context})
when is_atom(variable_name) and is_atom(context),
do: true
defp valid_variable_ast?(_), do: false
defp underspecified?(:opaque, 0, {:type, _, type, []}) when type in [:any, :term], do: true
defp underspecified?(_kind, _arity, _spec), do: false
defp translate_spec({kind, {:when, _meta, [spec, guard]}, pos}, state) do
caller = :elixir_locals.get_cached_env(pos)
translate_spec(kind, spec, guard, caller, state)
end
defp translate_spec({kind, spec, pos}, state) do
caller = :elixir_locals.get_cached_env(pos)
translate_spec(kind, spec, [], caller, state)
end
defp translate_spec(kind, {:::, meta, [{name, _, args}, return]}, guard, caller, state)
when is_atom(name) and name != ::: do
translate_spec(kind, meta, name, args, return, guard, caller, state)
end
defp translate_spec(_kind, {name, _meta, _args} = spec, _guard, caller, _state)
when is_atom(name) and name != ::: do
spec = Macro.to_string(spec)
compile_error(caller, "type specification missing return type: #{spec}")
end
defp translate_spec(_kind, spec, _guard, caller, _state) do
spec = Macro.to_string(spec)
compile_error(caller, "invalid type specification: #{spec}")
end
defp translate_spec(kind, meta, name, args, return, guard, caller, state) when is_atom(args),
do: translate_spec(kind, meta, name, [], return, guard, caller, state)
defp translate_spec(kind, meta, name, args, return, guard, caller, state) do
ensure_no_defaults!(args)
state = clean_local_state(state)
unless Keyword.keyword?(guard) do
error = "expected keywords as guard in type specification, got: #{Macro.to_string(guard)}"
compile_error(caller, error)
end
line = line(meta)
vars = Keyword.keys(guard)
{fun_args, state} = fn_args(meta, args, return, vars, caller, state)
spec = {:type, line, :fun, fun_args}
{spec, state} =
case guard_to_constraints(guard, vars, meta, caller, state) do
{[], state} -> {spec, state}
{constraints, state} -> {{:type, line, :bounded_fun, [spec, constraints]}, state}
end
ensure_no_unused_local_vars!(caller, state.local_vars)
arity = length(args)
{{kind, {name, arity}, caller.line, spec}, state}
end
# TODO: Remove char_list type by v2.0
defp built_in_type?(:char_list, 0), do: true
defp built_in_type?(:charlist, 0), do: true
defp built_in_type?(:as_boolean, 1), do: true
defp built_in_type?(:struct, 0), do: true
defp built_in_type?(:nonempty_charlist, 0), do: true
defp built_in_type?(:keyword, 0), do: true
defp built_in_type?(:keyword, 1), do: true
defp built_in_type?(:var, 0), do: true
defp built_in_type?(name, arity), do: :erl_internal.is_type(name, arity)
defp ensure_no_defaults!(args) do
fun = fn
{:::, _, [left, right]} ->
ensure_not_default(left)
ensure_not_default(right)
left
other ->
ensure_not_default(other)
other
end
:lists.foreach(fun, args)
end
defp ensure_not_default({:\\, _, [_, _]}) do
raise ArgumentError, "default arguments \\\\ not supported in typespecs"
end
defp ensure_not_default(_), do: :ok
defp guard_to_constraints(guard, vars, meta, caller, state) do
line = line(meta)
fun = fn
{_name, {:var, _, context}}, {constraints, state} when is_atom(context) ->
{constraints, state}
{name, type}, {constraints, state} ->
{spec, state} = typespec(type, vars, caller, state)
constraint = [{:atom, line, :is_subtype}, [{:var, line, name}, spec]]
state = update_local_vars(state, name)
{[{:type, line, :constraint, constraint} | constraints], state}
end
{constraints, state} = :lists.foldl(fun, {[], state}, guard)
{:lists.reverse(constraints), state}
end
## To typespec conversion
defp line(meta) do
Keyword.get(meta, :line, 0)
end
# Handle unions
defp typespec({:|, meta, [_, _]} = exprs, vars, caller, state) do
exprs = collect_union(exprs)
{union, state} = :lists.mapfoldl(&typespec(&1, vars, caller, &2), state, exprs)
{{:type, line(meta), :union, union}, state}
end
# Handle binaries
defp typespec({:<<>>, meta, []}, _, _, state) do
line = line(meta)
{{:type, line, :binary, [{:integer, line, 0}, {:integer, line, 0}]}, state}
end
defp typespec(
{:<<>>, meta, [{:::, unit_meta, [{:_, _, ctx1}, {:*, _, [{:_, _, ctx2}, unit]}]}]},
_,
_,
state
)
when is_atom(ctx1) and is_atom(ctx2) and is_integer(unit) and unit >= 0 do
line = line(meta)
{{:type, line, :binary, [{:integer, line, 0}, {:integer, line(unit_meta), unit}]}, state}
end
defp typespec({:<<>>, meta, [{:::, size_meta, [{:_, _, ctx}, size]}]}, _, _, state)
when is_atom(ctx) and is_integer(size) and size >= 0 do
line = line(meta)
{{:type, line, :binary, [{:integer, line(size_meta), size}, {:integer, line, 0}]}, state}
end
defp typespec(
{
:<<>>,
meta,
[
{:::, size_meta, [{:_, _, ctx1}, size]},
{:::, unit_meta, [{:_, _, ctx2}, {:*, _, [{:_, _, ctx3}, unit]}]}
]
},
_,
_,
state
)
when is_atom(ctx1) and is_atom(ctx2) and is_atom(ctx3) and is_integer(size) and
is_integer(unit) and size >= 0 and unit >= 0 do
args = [{:integer, line(size_meta), size}, {:integer, line(unit_meta), unit}]
{{:type, line(meta), :binary, args}, state}
end
defp typespec({:<<>>, _meta, _args}, _vars, caller, _state) do
message =
"invalid binary specification, expected <<_::size>>, <<_::_*unit>>, " <>
"or <<_::size, _::_*unit>> with size and unit being non-negative integers"
compile_error(caller, message)
end
## Handle maps and structs
defp typespec({:map, meta, args}, _vars, _caller, state) when args == [] or is_atom(args) do
{{:type, line(meta), :map, :any}, state}
end
defp typespec({:%{}, meta, fields} = map, vars, caller, state) do
fun = fn
{{:required, meta2, [k]}, v}, state ->
{arg1, state} = typespec(k, vars, caller, state)
{arg2, state} = typespec(v, vars, caller, state)
{{:type, line(meta2), :map_field_exact, [arg1, arg2]}, state}
{{:optional, meta2, [k]}, v}, state ->
{arg1, state} = typespec(k, vars, caller, state)
{arg2, state} = typespec(v, vars, caller, state)
{{:type, line(meta2), :map_field_assoc, [arg1, arg2]}, state}
{k, v}, state ->
{arg1, state} = typespec(k, vars, caller, state)
{arg2, state} = typespec(v, vars, caller, state)
{{:type, line(meta), :map_field_exact, [arg1, arg2]}, state}
{:|, _, [_, _]}, _state ->
error =
"invalid map specification. When using the | operator in the map key, " <>
"make sure to wrap the key type in parentheses: #{Macro.to_string(map)}"
compile_error(caller, error)
_, _state ->
compile_error(caller, "invalid map specification: #{Macro.to_string(map)}")
end
{fields, state} = :lists.mapfoldl(fun, state, fields)
{{:type, line(meta), :map, fields}, state}
end
defp typespec({:%, _, [name, {:%{}, meta, fields}]}, vars, caller, state) do
# We cannot set a function name to avoid tracking
# as a compile time dependency, because for structs it actually is one.
module = Macro.expand(name, caller)
struct =
module
|> Macro.struct!(caller)
|> Map.delete(:__struct__)
|> Map.to_list()
unless Keyword.keyword?(fields) do
compile_error(caller, "expected key-value pairs in struct #{Macro.to_string(name)}")
end
types =
:lists.map(
fn {field, _} -> {field, Keyword.get(fields, field, quote(do: term()))} end,
struct
)
fun = fn {field, _} ->
unless Keyword.has_key?(struct, field) do
compile_error(
caller,
"undefined field #{inspect(field)} on struct #{Macro.to_string(name)}"
)
end
end
:lists.foreach(fun, fields)
typespec({:%{}, meta, [__struct__: module] ++ types}, vars, caller, state)
end
# Handle records
defp typespec({:record, meta, [atom]}, vars, caller, state) do
typespec({:record, meta, [atom, []]}, vars, caller, state)
end
defp typespec({:record, meta, [tag, field_specs]}, vars, caller, state)
when is_atom(tag) and is_list(field_specs) do
# We cannot set a function name to avoid tracking
# as a compile time dependency because for records it actually is one.
case Macro.expand({tag, [], [{:{}, [], []}]}, caller) do
{_, _, [name, fields | _]} when is_list(fields) ->
types =
:lists.map(
fn {field, _} -> Keyword.get(field_specs, field, quote(do: term())) end,
fields
)
fun = fn {field, _} ->
unless Keyword.has_key?(fields, field) do
compile_error(caller, "undefined field #{field} on record #{inspect(tag)}")
end
end
:lists.foreach(fun, field_specs)
typespec({:{}, meta, [name | types]}, vars, caller, state)
_ ->
compile_error(caller, "unknown record #{inspect(tag)}")
end
end
defp typespec({:record, _meta, [_tag, _field_specs]}, _vars, caller, _state) do
message = "invalid record specification, expected the record name to be an atom literal"
compile_error(caller, message)
end
# Handle ranges
defp typespec({:.., meta, [left, right]}, vars, caller, state) do
{left, state} = typespec(left, vars, caller, state)
{right, state} = typespec(right, vars, caller, state)
:ok = validate_range(left, right, caller)
{{:type, line(meta), :range, [left, right]}, state}
end
# Handle special forms
defp typespec({:__MODULE__, _, atom}, vars, caller, state) when is_atom(atom) do
typespec(caller.module, vars, caller, state)
end
defp typespec({:__aliases__, _, _} = alias, vars, caller, state) do
# We set a function name to avoid tracking
# aliases in typespecs as compile time dependencies.
atom = Macro.expand(alias, %{caller | function: {:typespec, 0}})
typespec(atom, vars, caller, state)
end
# Handle funs
defp typespec([{:->, meta, [args, return]}], vars, caller, state)
when is_list(args) do
{args, state} = fn_args(meta, args, return, vars, caller, state)
{{:type, line(meta), :fun, args}, state}
end
# Handle type operator
defp typespec(
{:::, meta, [{var_name, var_meta, context}, expr]} = ann_type,
vars,
caller,
state
)
when is_atom(var_name) and is_atom(context) do
case typespec(expr, vars, caller, state) do
{{:ann_type, _, _}, _state} ->
message =
"invalid type annotation. Type annotations cannot be nested: " <>
"#{Macro.to_string(ann_type)}"
# TODO: Make this an error on v2.0 and remove the code below
:elixir_errors.erl_warn(caller.line, caller.file, message)
# This may be generating an invalid typespec but we need to generate it
# to avoid breaking existing code that was valid but only broke dialyzer
{right, state} = typespec(expr, vars, caller, state)
{{:ann_type, line(meta), [{:var, line(var_meta), var_name}, right]}, state}
{right, state} ->
{{:ann_type, line(meta), [{:var, line(var_meta), var_name}, right]}, state}
end
end
defp typespec({:::, meta, [left, right]} = expr, vars, caller, state) do
message =
"invalid type annotation. When using the | operator to represent the union of types, " <>
"make sure to wrap type annotations in parentheses: #{Macro.to_string(expr)}"
# TODO: Make this an error on v2.0, and remove the code below and
# the :undefined_type_error_enabled? key from the state
:elixir_errors.erl_warn(caller.line, caller.file, message)
# This may be generating an invalid typespec but we need to generate it
# to avoid breaking existing code that was valid but only broke dialyzer
state = %{state | undefined_type_error_enabled?: false}
{left, state} = typespec(left, vars, caller, state)
state = %{state | undefined_type_error_enabled?: true}
{right, state} = typespec(right, vars, caller, state)
{{:ann_type, line(meta), [left, right]}, state}
end
# Handle unary ops
defp typespec({op, meta, [integer]}, _, _, state) when op in [:+, :-] and is_integer(integer) do
line = line(meta)
{{:op, line, op, {:integer, line, integer}}, state}
end
# Handle remote calls in the form of @module_attribute.type.
# These are not handled by the general remote type clause as calling
# Macro.expand/2 on the remote does not expand module attributes (but expands
# things like __MODULE__).
defp typespec(
{{:., meta, [{:@, _, [{attr, _, _}]}, name]}, _, args} = orig,
vars,
caller,
state
) do
remote = Module.get_attribute(caller.module, attr)
unless is_atom(remote) and remote != nil do
message =
"invalid remote in typespec: #{Macro.to_string(orig)} (@#{attr} is #{inspect(remote)})"
compile_error(caller, message)
end
{remote_spec, state} = typespec(remote, vars, caller, state)
{name_spec, state} = typespec(name, vars, caller, state)
type = {remote_spec, meta, name_spec, args}
remote_type(type, vars, caller, state)
end
# Handle remote calls
defp typespec({{:., meta, [remote, name]}, _, args} = orig, vars, caller, state) do
# We set a function name to avoid tracking
# aliases in typespecs as compile time dependencies.
remote = Macro.expand(remote, %{caller | function: {:typespec, 0}})
unless is_atom(remote) do
compile_error(caller, "invalid remote in typespec: #{Macro.to_string(orig)}")
end
{remote_spec, state} = typespec(remote, vars, caller, state)
{name_spec, state} = typespec(name, vars, caller, state)
type = {remote_spec, meta, name_spec, args}
remote_type(type, vars, caller, state)
end
# Handle tuples
defp typespec({:tuple, meta, []}, _vars, _caller, state) do
{{:type, line(meta), :tuple, :any}, state}
end
defp typespec({:{}, meta, t}, vars, caller, state) when is_list(t) do
{args, state} = :lists.mapfoldl(&typespec(&1, vars, caller, &2), state, t)
{{:type, line(meta), :tuple, args}, state}
end
defp typespec({left, right}, vars, caller, state) do
typespec({:{}, [], [left, right]}, vars, caller, state)
end
# Handle blocks
defp typespec({:__block__, _meta, [arg]}, vars, caller, state) do
typespec(arg, vars, caller, state)
end
# Handle variables or local calls
defp typespec({name, meta, atom}, vars, caller, state) when is_atom(atom) do
if :lists.member(name, vars) do
state = update_local_vars(state, name)
{{:var, line(meta), name}, state}
else
typespec({name, meta, []}, vars, caller, state)
end
end
# Handle local calls
defp typespec({:string, meta, args}, vars, caller, state) do
warning =
"string() type use is discouraged. " <>
"For character lists, use charlist() type, for strings, String.t()\n" <>
Exception.format_stacktrace(Macro.Env.stacktrace(caller))
:elixir_errors.erl_warn(caller.line, caller.file, warning)
{args, state} = :lists.mapfoldl(&typespec(&1, vars, caller, &2), state, args)
{{:type, line(meta), :string, args}, state}
end
defp typespec({:nonempty_string, meta, args}, vars, caller, state) do
warning =
"nonempty_string() type use is discouraged. " <>
"For non-empty character lists, use nonempty_charlist() type, for strings, String.t()\n" <>
Exception.format_stacktrace(Macro.Env.stacktrace(caller))
:elixir_errors.erl_warn(caller.line, caller.file, warning)
{args, state} = :lists.mapfoldl(&typespec(&1, vars, caller, &2), state, args)
{{:type, line(meta), :nonempty_string, args}, state}
end
defp typespec({type, _meta, []}, vars, caller, state) when type in [:charlist, :char_list] do
if type == :char_list do
warning = "the char_list() type is deprecated, use charlist()"
:elixir_errors.erl_warn(caller.line, caller.file, warning)
end
typespec(quote(do: :elixir.charlist()), vars, caller, state)
end
defp typespec({:nonempty_charlist, _meta, []}, vars, caller, state) do
typespec(quote(do: :elixir.nonempty_charlist()), vars, caller, state)
end
defp typespec({:struct, _meta, []}, vars, caller, state) do
typespec(quote(do: :elixir.struct()), vars, caller, state)
end
defp typespec({:as_boolean, _meta, [arg]}, vars, caller, state) do
typespec(quote(do: :elixir.as_boolean(unquote(arg))), vars, caller, state)
end
defp typespec({:keyword, _meta, args}, vars, caller, state) when length(args) <= 1 do
typespec(quote(do: :elixir.keyword(unquote_splicing(args))), vars, caller, state)
end
defp typespec({:fun, meta, args}, vars, caller, state) do
{args, state} = :lists.mapfoldl(&typespec(&1, vars, caller, &2), state, args)
{{:type, line(meta), :fun, args}, state}
end
defp typespec({name, meta, args}, vars, caller, state) do
{args, state} = :lists.mapfoldl(&typespec(&1, vars, caller, &2), state, args)
arity = length(args)
case :erl_internal.is_type(name, arity) do
true ->
{{:type, line(meta), name, args}, state}
false ->
if state.undefined_type_error_enabled? and
not Map.has_key?(state.defined_type_pairs, {name, arity}) do
compile_error(caller, "type #{name}/#{arity} undefined")
end
state =
if :lists.member({name, arity}, state.used_type_pairs) do
state
else
%{state | used_type_pairs: [{name, arity} | state.used_type_pairs]}
end
{{:user_type, line(meta), name, args}, state}
end
end
# Handle literals
defp typespec(atom, _, _, state) when is_atom(atom) do
{{:atom, 0, atom}, state}
end
defp typespec(integer, _, _, state) when is_integer(integer) do
{{:integer, 0, integer}, state}
end
defp typespec([], vars, caller, state) do
typespec({nil, [], []}, vars, caller, state)
end
defp typespec([{:..., _, atom}], vars, caller, state) when is_atom(atom) do
typespec({:nonempty_list, [], []}, vars, caller, state)
end
defp typespec([spec, {:..., _, atom}], vars, caller, state) when is_atom(atom) do
typespec({:nonempty_list, [], [spec]}, vars, caller, state)
end
defp typespec([spec], vars, caller, state) do
typespec({:list, [], [spec]}, vars, caller, state)
end
defp typespec(list, vars, caller, state) when is_list(list) do
[head | tail] = :lists.reverse(list)
union =
:lists.foldl(
fn elem, acc -> {:|, [], [validate_kw(elem, list, caller), acc]} end,
validate_kw(head, list, caller),
tail
)
typespec({:list, [], [union]}, vars, caller, state)
end
defp typespec(other, _vars, caller, _state) do
compile_error(caller, "unexpected expression in typespec: #{Macro.to_string(other)}")
end
## Helpers
defp compile_error(caller, desc) do
raise CompileError, file: caller.file, line: caller.line, description: desc
end
defp remote_type({remote, meta, name, args}, vars, caller, state) do
{args, state} = :lists.mapfoldl(&typespec(&1, vars, caller, &2), state, args)
{{:remote_type, line(meta), [remote, name, args]}, state}
end
defp collect_union({:|, _, [a, b]}), do: [a | collect_union(b)]
defp collect_union(v), do: [v]
defp validate_kw({key, _} = t, _, _caller) when is_atom(key), do: t
defp validate_kw(_, original, caller) do
compile_error(caller, "unexpected list in typespec: #{Macro.to_string(original)}")
end
defp validate_range({:op, _, :-, {:integer, meta, first}}, last, caller) do
validate_range({:integer, meta, -first}, last, caller)
end
defp validate_range(first, {:op, _, :-, {:integer, meta, last}}, caller) do
validate_range(first, {:integer, meta, -last}, caller)
end
defp validate_range({:integer, _, first}, {:integer, _, last}, _caller) when first < last do
:ok
end
defp validate_range(_, _, caller) do
message =
"invalid range specification, expected both sides to be integers, " <>
"with the left side lower than the right side"
compile_error(caller, message)
end
defp fn_args(meta, args, return, vars, caller, state) do
{fun_args, state} = fn_args(meta, args, vars, caller, state)
{spec, state} = typespec(return, vars, caller, state)
case [fun_args, spec] do
[{:type, _, :any}, {:type, _, :any, []}] -> {[], state}
x -> {x, state}
end
end
defp fn_args(meta, [{:..., _, _}], _vars, _caller, state) do
{{:type, line(meta), :any}, state}
end
defp fn_args(meta, args, vars, caller, state) do
{args, state} = :lists.mapfoldl(&typespec(&1, vars, caller, &2), state, args)
{{:type, line(meta), :product, args}, state}
end
defp variable({name, meta, args}) when is_atom(name) and is_atom(args) do
{:var, line(meta), name}
end
defp variable(expr), do: expr
defp clean_local_state(state) do
%{state | local_vars: %{}}
end
defp update_local_vars(%{local_vars: local_vars} = state, var_name) do
case Map.fetch(local_vars, var_name) do
{:ok, :used_once} -> %{state | local_vars: Map.put(local_vars, var_name, :used_multiple)}
{:ok, :used_multiple} -> state
:error -> %{state | local_vars: Map.put(local_vars, var_name, :used_once)}
end
end
defp ensure_no_unused_local_vars!(caller, local_vars) do
fun = fn
{name, :used_once} -> compile_error(caller, "type variable #{name} is unused")
_ -> :ok
end
:lists.foreach(fun, :maps.to_list(local_vars))
end
end
| 32.883023 | 99 | 0.623328 |
e807aaaebf1d4e0a122401f76db7ff91607a48c8 | 1,184 | exs | Elixir | config/config.exs | pk4media/elixir_device | 54aa60ebc294fb9523ac836cc86705410afda6b5 | [
"MIT"
] | 1 | 2017-02-21T06:17:15.000Z | 2017-02-21T06:17:15.000Z | config/config.exs | pk4media/elixir_device | 54aa60ebc294fb9523ac836cc86705410afda6b5 | [
"MIT"
] | null | null | null | config/config.exs | pk4media/elixir_device | 54aa60ebc294fb9523ac836cc86705410afda6b5 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :device,
empty: :desktop
config :device,
unknown: :phone
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :device, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:device, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 32.888889 | 73 | 0.747466 |
e807abf4fb66e8f63df4e12ddc259378928b6adb | 1,034 | ex | Elixir | lib/nostrum/struct/event/channel_pins_update.ex | mckethanor/nostrum | 13a3927c872c1540266e9f1ba4bcad4182baa9bf | [
"MIT"
] | null | null | null | lib/nostrum/struct/event/channel_pins_update.ex | mckethanor/nostrum | 13a3927c872c1540266e9f1ba4bcad4182baa9bf | [
"MIT"
] | null | null | null | lib/nostrum/struct/event/channel_pins_update.ex | mckethanor/nostrum | 13a3927c872c1540266e9f1ba4bcad4182baa9bf | [
"MIT"
] | 1 | 2021-09-13T20:59:42.000Z | 2021-09-13T20:59:42.000Z | defmodule Nostrum.Struct.Event.ChannelPinsUpdate do
@moduledoc "Represents an update to channel pins."
@moduledoc since: "0.5.0"
alias Nostrum.Struct.Channel
alias Nostrum.Struct.Guild
alias Nostrum.Util
defstruct [:guild_id, :channel_id, :last_pin_timestamp]
@typedoc "The ID of the guild, if the pin update was on a guild"
@type guild_id :: Guild.id() | nil
@typedoc "The ID of the channel"
@type channel_id :: Channel.id()
@typedoc "The time at which the most recent pinned message was pinned"
@type last_pin_timestamp :: DateTime.t() | nil
@typedoc "Event sent when a message is pinned or unpinned in a text channel"
@type t :: %__MODULE__{
guild_id: guild_id,
channel_id: channel_id,
last_pin_timestamp: last_pin_timestamp
}
@doc false
def to_struct(map) do
new =
map
|> Map.new(fn {k, v} -> {Util.maybe_to_atom(k), v} end)
|> Map.update(:last_pin_timestamp, nil, &Util.maybe_to_datetime/1)
struct(__MODULE__, new)
end
end
| 27.945946 | 78 | 0.681818 |
e807bd586893c5e670bfea51dbc0b878a6d99d95 | 373 | exs | Elixir | day06/el_a.exs | mason-bially/aoc-2021 | 866ffc7e85f690c2ad9195da2304bfb515819023 | [
"MIT"
] | null | null | null | day06/el_a.exs | mason-bially/aoc-2021 | 866ffc7e85f690c2ad9195da2304bfb515819023 | [
"MIT"
] | null | null | null | day06/el_a.exs | mason-bially/aoc-2021 | 866ffc7e85f690c2ad9195da2304bfb515819023 | [
"MIT"
] | null | null | null | Code.require_file("el.ex", __DIR__)
initial =
File.stream!("day06/input", [encoding: :utf8], 1)
|> Util.stream_comma_seperated
|> Enum.map(&String.trim/1)
|> Stream.map(&elem(Integer.parse(&1), 0))
|> Enum.reduce(%{}, &Util.counter/2)
Enum.reduce(1..80, initial, fn _, acc -> Day06.simulate(acc) end)
|> Enum.map(fn {_, v} -> v end)
|> Enum.sum
|> IO.inspect #
| 26.642857 | 65 | 0.632708 |
e807e4c7e8755ac60448e0809f6e41df953c7852 | 3,780 | exs | Elixir | test/ptr_web/controllers/cellar_controller_test.exs | francocatena/ptr | 4c8a960cdcb1c8523334fcc0cddba6b7fb3b3e60 | [
"MIT"
] | null | null | null | test/ptr_web/controllers/cellar_controller_test.exs | francocatena/ptr | 4c8a960cdcb1c8523334fcc0cddba6b7fb3b3e60 | [
"MIT"
] | 2 | 2021-03-09T01:59:47.000Z | 2022-02-10T17:08:54.000Z | test/ptr_web/controllers/cellar_controller_test.exs | francocatena/ptr | 4c8a960cdcb1c8523334fcc0cddba6b7fb3b3e60 | [
"MIT"
] | null | null | null | defmodule PtrWeb.CellarControllerTest do
use PtrWeb.ConnCase
use Ptr.Support.LoginHelper
import Ptr.Support.FixtureHelper
@create_attrs %{identifier: "some identifier", name: "some name"}
@update_attrs %{identifier: "some updated identifier", name: "some updated name"}
@invalid_attrs %{identifier: nil, name: nil}
describe "unauthorized access" do
test "requires user authentication on all actions", %{conn: conn} do
Enum.each(
[
get(conn, Routes.cellar_path(conn, :index)),
get(conn, Routes.cellar_path(conn, :new)),
post(conn, Routes.cellar_path(conn, :create, %{})),
get(conn, Routes.cellar_path(conn, :show, "123")),
get(conn, Routes.cellar_path(conn, :edit, "123")),
put(conn, Routes.cellar_path(conn, :update, "123", %{})),
delete(conn, Routes.cellar_path(conn, :delete, "123"))
],
fn conn ->
assert html_response(conn, 302)
assert conn.halted
end
)
end
end
describe "index" do
setup [:create_cellar]
@tag login_as: "test@user.com"
test "lists all cellars", %{conn: conn, cellar: cellar} do
conn = get(conn, Routes.cellar_path(conn, :index))
response = html_response(conn, 200)
assert response =~ "Cellars"
assert response =~ cellar.name
end
end
describe "empty index" do
@tag login_as: "test@user.com"
test "lists no cellars", %{conn: conn} do
conn = get(conn, Routes.cellar_path(conn, :index))
assert html_response(conn, 200) =~ "you have no cellars"
end
end
describe "new cellar" do
@tag login_as: "test@user.com"
test "renders form", %{conn: conn} do
conn = get(conn, Routes.cellar_path(conn, :new))
assert html_response(conn, 200) =~ "New cellar"
end
end
describe "create cellar" do
@tag login_as: "test@user.com"
test "redirects to show when data is valid", %{conn: conn} do
conn = post(conn, Routes.cellar_path(conn, :create), cellar: @create_attrs)
assert %{id: id} = redirected_params(conn)
assert redirected_to(conn) == Routes.cellar_path(conn, :show, id)
end
@tag login_as: "test@user.com"
test "renders errors when data is invalid", %{conn: conn} do
conn = post(conn, Routes.cellar_path(conn, :create), cellar: @invalid_attrs)
assert html_response(conn, 200) =~ "New cellar"
end
end
describe "edit cellar" do
setup [:create_cellar]
@tag login_as: "test@user.com"
test "renders form for editing chosen cellar", %{conn: conn, cellar: cellar} do
conn = get(conn, Routes.cellar_path(conn, :edit, cellar))
assert html_response(conn, 200) =~ "Edit cellar"
end
end
describe "update cellar" do
setup [:create_cellar]
@tag login_as: "test@user.com"
test "redirects when data is valid", %{conn: conn, cellar: cellar} do
conn = put(conn, Routes.cellar_path(conn, :update, cellar), cellar: @update_attrs)
assert redirected_to(conn) == Routes.cellar_path(conn, :show, cellar)
end
@tag login_as: "test@user.com"
test "renders errors when data is invalid", %{conn: conn, cellar: cellar} do
conn = put(conn, Routes.cellar_path(conn, :update, cellar), cellar: @invalid_attrs)
assert html_response(conn, 200) =~ "Edit cellar"
end
end
describe "delete cellar" do
setup [:create_cellar]
@tag login_as: "test@user.com"
test "deletes chosen cellar", %{conn: conn, cellar: cellar} do
conn = delete(conn, Routes.cellar_path(conn, :delete, cellar))
assert redirected_to(conn) == Routes.cellar_path(conn, :index)
end
end
defp create_cellar(_) do
{:ok, cellar, _} = fixture(:cellar)
{:ok, cellar: cellar}
end
end
| 30.24 | 89 | 0.643386 |
e807fbff73d951680d3dc792950aae2d68f4728c | 2,476 | ex | Elixir | clients/admin/lib/google_api/admin/datatransfer_v1/model/application.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/admin/lib/google_api/admin/datatransfer_v1/model/application.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/admin/lib/google_api/admin/datatransfer_v1/model/application.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Admin.Datatransfer_v1.Model.Application do
@moduledoc """
Applications resources represent applications installed on the domain that support transferring ownership of user data.
## Attributes
* `etag` (*type:* `String.t`, *default:* `nil`) - Etag of the resource.
* `id` (*type:* `String.t`, *default:* `nil`) - The application's ID.
* `kind` (*type:* `String.t`, *default:* `admin#datatransfer#ApplicationResource`) - Identifies the resource as a DataTransfer Application Resource.
* `name` (*type:* `String.t`, *default:* `nil`) - The application's name.
* `transferParams` (*type:* `list(GoogleApi.Admin.Datatransfer_v1.Model.ApplicationTransferParam.t)`, *default:* `nil`) - The list of all possible transfer parameters for this application. These parameters can be used to select the data of the user in this application to be transferred.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:etag => String.t() | nil,
:id => String.t() | nil,
:kind => String.t() | nil,
:name => String.t() | nil,
:transferParams =>
list(GoogleApi.Admin.Datatransfer_v1.Model.ApplicationTransferParam.t()) | nil
}
field(:etag)
field(:id)
field(:kind)
field(:name)
field(:transferParams,
as: GoogleApi.Admin.Datatransfer_v1.Model.ApplicationTransferParam,
type: :list
)
end
defimpl Poison.Decoder, for: GoogleApi.Admin.Datatransfer_v1.Model.Application do
def decode(value, options) do
GoogleApi.Admin.Datatransfer_v1.Model.Application.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Admin.Datatransfer_v1.Model.Application do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.6875 | 291 | 0.712036 |
e8081aa8ce4bbe3013bb6790669270a4eeb67905 | 44,064 | ex | Elixir | lib/ecto/schema.ex | rbishop/ecto | a8a3215c9e2e35f7556f54c8d47d78a3670796d8 | [
"Apache-2.0"
] | null | null | null | lib/ecto/schema.ex | rbishop/ecto | a8a3215c9e2e35f7556f54c8d47d78a3670796d8 | [
"Apache-2.0"
] | null | null | null | lib/ecto/schema.ex | rbishop/ecto | a8a3215c9e2e35f7556f54c8d47d78a3670796d8 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Schema do
@moduledoc ~S"""
Defines a schema for a model.
A schema is a struct with associated metadata that is persisted to a
repository. Every schema model is also a struct, that means that you work
with models just like you would work with structs.
## Example
defmodule User do
use Ecto.Schema
schema "users" do
field :name, :string
field :age, :integer, default: 0
has_many :posts, Post
end
end
By default, a schema will generate a primary key named `id`
of type `:integer` and `belongs_to` associations in the schema will generate
foreign keys of type `:integer`. Those setting can be configured
below.
## Schema attributes
The schema supports some attributes to be set before hand,
configuring the defined schema.
Those attributes are:
* `@primary_key` - configures the schema primary key. It expects
a tuple with the primary key name, type (:id or :binary_id) and options. Defaults
to `{:id, :id, autogenerate: true}`. When set to
false, does not define a primary key in the model;
* `@foreign_key_type` - configures the default foreign key type
used by `belongs_to` associations. Defaults to `:integer`;
* `@timestamps_opts` - configures the default timestamps type
used by `timestamps`. Defaults to `[type: Ecto.DateTime, usec: false]`;
* `@derive` - the same as `@derive` available in `Kernel.defstruct/1`
as the schema defines a struct behind the scenes;
The advantage of configuring the schema via those attributes is
that they can be set with a macro to configure application wide
defaults.
For example, if your database does not support autoincrementing
primary keys and requires something like UUID or a RecordID, you
configure and use`:binary_id` as your primary key type as follows:
# Define a module to be used as base
defmodule MyApp.Model do
defmacro __using__(_) do
quote do
use Ecto.Model
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
end
end
end
# Now use MyApp.Model to define new models
defmodule MyApp.Comment do
use MyApp.Model
schema "comments" do
belongs_to :post, MyApp.Post
end
end
Any models using `MyApp.Model` will get the `:id` field with type
`:binary_id` as primary key. We explain what the `:binary_id` type
entails in the next section.
The `belongs_to` association on `MyApp.Comment` will also define
a `:post_id` field with `:binary_id` type that references the `:id`
field of the `MyApp.Post` model.
## Primary keys
Ecto supports two ID types, called `:id` and `:binary_id` which are
often used as the type for primary keys and associations.
The `:id` type is used when the primary key is an integer while the
`:binary_id` is used when the primary key is in binary format, which
may be `Ecto.UUID` for databases like PostgreSQL and MySQL, or some
specific ObjectID or RecordID often imposed by NoSQL databases.
In both cases, both types have their semantics specified by the
underlying adapter/database. For example, if you use the `:id`
type with `:autogenerate`, it means the database will be responsible
for auto-generation the id if it supports it.
Similarly, the `:binary_id` type may be generated in the adapter
for cases like UUID but it may also be handled by the database if
required. In any case, both scenarios are handled transparently by
Ecto.
Besides `:id` and `:binary_id`, which are often used by primary
and foreign keys, Ecto provides a huge variety of types to be used
by the remaining columns.
## Types and casting
When defining the schema, types need to be given. Types are split
in two categories, primitive types and custom types.
### Primitive types
The primitive types are:
Ecto type | Elixir type | Literal syntax in query
:---------------------- | :---------------------- | :---------------------
`:id` | `integer` | 1, 2, 3
`:binary_id` | `binary` | `<<int, int, int, ...>>`
`:integer` | `integer` | 1, 2, 3
`:float` | `float` | 1.0, 2.0, 3.0
`:boolean` | `boolean` | true, false
`:string` | UTF-8 encoded `string` | "hello"
`:binary` | `binary` | `<<int, int, int, ...>>`
`{:array, inner_type}` | `list` | `[value, value, value, ...]`
`:decimal` | [`Decimal`](https://github.com/ericmj/decimal)
`:map` | `map`
**Note:** For the `:array` type, replace `inner_type` with one of
the valid types, such as `:string`.
### Custom types
Besides providing primitive types, Ecto allows custom types to be
implemented by developers, allowing Ecto behaviour to be extended.
A custom type is a module that implements the `Ecto.Type` behaviour.
By default, Ecto provides the following custom types:
Custom type | Database type | Elixir type
:---------------------- | :---------------------- | :---------------------
`Ecto.DateTime` | `:datetime` | `%Ecto.DateTime{}`
`Ecto.Date` | `:date` | `%Ecto.Date{}`
`Ecto.Time` | `:time` | `%Ecto.Time{}`
`Ecto.UUID` | `:uuid` | "uuid-string"
Read the `Ecto.Type` documentation for more information on implementing
your own types.
### The map type
The map type allows developers to store an Elixir map directly
in the database:
# In your migration
create table(:users) do
add :data, :map
end
# In your model
field :data, :map
# Now in your code
%User{data: %{"foo" => "bar"}} |> Repo.insert!
%User{data: %{"foo" => value}} = Repo.one(User)
value #=> "bar"
Keep in mind that we advise the map keys to be strings or integers
instead of atoms. Atoms may be accepted depending on how maps are
serialized but the database will always return atom keys as strings
due to security reasons.
In order to support maps, different databases may employ different
techniques. For example, PostgreSQL will store those values in jsonb
fields, allowing you to even query parts of it. MySQL and MSSQL, on
the other hand, do not yet provide a JSON type, so the value will be
stored in a text field.
For maps to work in such databases, Ecto will need a JSON library.
By default Ecto will use [Poison](http://github.com/devinus/poison)
which needs to be added your deps in `mix.exs`:
{:poison, "~> 1.0"}
You can however tell Ecto to use any other library by configuring it:
config :ecto, :json_library, YourLibraryOfChoice
### Casting
When directly manipulating the struct, it is the responsibility of
the developer to ensure the field values have the proper type. For
example, you can create a user struct with an invalid value
for `age`:
iex> user = %User{age: "0"}
iex> user.age
"0"
However, if you attempt to persist the struct above, an error will
be raised since Ecto validates the types when sending them to the
adapter/database.
Therefore, when working and manipulating external data, it is
recommended the usage of `Ecto.Changeset`'s that are able to filter
and properly cast external data:
changeset = Ecto.Changeset.cast(%User{}, %{"age" => "0"}, [:age], [])
user = Repo.insert!(changeset)
In fact, `Ecto.Changeset` and custom types provide a powerful
combination to extend Ecto types and queries.
Finally, models can also have virtual fields by passing the
`virtual: true` option. These fields are not persisted to the database
and can optionally not be type checked by declaring type `:any`.
## Reflection
Any schema module will generate the `__schema__` function that can be
used for runtime introspection of the schema:
* `__schema__(:source)` - Returns the source as given to `schema/2`;
* `__schema__(:primary_key)` - Returns a list of the field that is the primary
key or [] if there is none;
* `__schema__(:fields)` - Returns a list of all non-virtual field names;
* `__schema__(:type, field)` - Returns the type of the given non-virtual field;
* `__schema__(:types)` - Returns a keyword list of all non-virtual
field names and their type;
* `__schema__(:associations)` - Returns a list of all association field names;
* `__schema__(:association, assoc)` - Returns the association reflection of the given assoc;
* `__schema__(:embeds)` - Returns a list of all embedded field names;
* `__schema__(:embed, embed)` - Returns the embedding reflection of the given embed;
* `__schema__(:read_after_writes)` - Non-virtual fields that must be read back
from the database after every write (insert or update);
* `__schema__(:autogenerate)` - Non-virtual fields that are auto generated on insert;
* `__schema__(:autogenerate_id)` - Primary key that is auto generated on insert;
Furthermore, both `__struct__` and `__changeset__` functions are
defined so structs and changeset functionalities are available.
"""
defmodule Metadata do
@moduledoc """
Stores metadata of a struct.
The fields are:
* `state` - the state in a struct's lifetime, one of `:built`,
`:loaded`, `:deleted`
* `source` - the source for the model alongside the query prefix,
defaults to `{nil, "source"}`
* `context` - context stored by the database
"""
defstruct [:state, :source, :context]
defimpl Inspect do
import Inspect.Algebra
def inspect(metadata, opts) do
concat ["#Ecto.Schema.Metadata<", to_doc(metadata.state, opts), ">"]
end
end
end
@doc false
defmacro __using__(_) do
quote do
import Ecto.Schema, only: [schema: 2, embedded_schema: 1]
@primary_key {:id, :id, autogenerate: true}
@timestamps_opts []
@foreign_key_type :id
@before_compile Ecto.Schema
Module.register_attribute(__MODULE__, :ecto_fields, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_assocs, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_embeds, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_raw, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_autogenerate, accumulate: true)
Module.put_attribute(__MODULE__, :ecto_autogenerate_id, nil)
end
end
@doc """
Defines an embedded schema.
This function is literally a shortcut for:
@primary_key {:id, :binary_id, autogenerate: true}
schema "embedded Model" do
"""
defmacro embedded_schema(opts) do
quote do
@primary_key {:id, :binary_id, autogenerate: true}
schema "embedded #{inspect __MODULE__}", unquote(opts)
end
end
@doc """
Defines a schema with a source name and field definitions.
"""
defmacro schema(source, [do: block]) do
quote do
source = unquote(source)
unless is_binary(source) do
raise ArgumentError, "schema source must be a string, got: #{inspect source}"
end
Module.register_attribute(__MODULE__, :changeset_fields, accumulate: true)
Module.register_attribute(__MODULE__, :struct_fields, accumulate: true)
Module.put_attribute(__MODULE__, :struct_fields,
{:__meta__, %Metadata{state: :built, source: {nil, source}}})
primary_key_fields =
case @primary_key do
false ->
[]
{name, type, opts} ->
Ecto.Schema.__field__(__MODULE__, name, type, true, opts)
[name]
other ->
raise ArgumentError, "@primary_key must be false or {name, type, opts}"
end
try do
import Ecto.Schema
unquote(block)
after
:ok
end
fields = @ecto_fields |> Enum.reverse
assocs = @ecto_assocs |> Enum.reverse
embeds = @ecto_embeds |> Enum.reverse
Module.eval_quoted __ENV__, [
Ecto.Schema.__struct__(@struct_fields),
Ecto.Schema.__changeset__(@changeset_fields),
Ecto.Schema.__schema__(source, fields, primary_key_fields),
Ecto.Schema.__types__(fields),
Ecto.Schema.__assocs__(assocs),
Ecto.Schema.__embeds__(embeds),
Ecto.Schema.__read_after_writes__(@ecto_raw),
Ecto.Schema.__autogenerate__(@ecto_autogenerate_id)]
end
end
## API
@doc """
Defines a field on the model schema with given name and type.
## Options
* `:default` - Sets the default value on the schema and the struct.
The default value is calculated at compilation time, so don't use
expressions like `Ecto.DateTime.local` or `Ecto.UUID.generate` as
they would then be the same for all records
* `:autogenerate` - Annotates the field to be autogenerated before
insertion if not value is set.
* `:read_after_writes` - When true, the field only sent on insert
if not nil and always read back from the repository during inserts
and updates.
For relational databases, this means the RETURNING option of those
statements are used. For this reason, MySQL does not support this
option and will raise an error if a model is inserted/updated with
read after writes fields.
* `:virtual` - When true, the field is not persisted to the database.
Notice virtual fields do not support `:autogenerate` nor
`:read_after_writes`.
"""
defmacro field(name, type \\ :string, opts \\ []) do
quote do
Ecto.Schema.__field__(__MODULE__, unquote(name), unquote(type), false, unquote(opts))
end
end
@doc """
Generates `:inserted_at` and `:updated_at` timestamp fields.
When using `Ecto.Model`, the fields generated by this macro
will automatically be set to the current time when inserting
and updating values in a repository.
## Options
* `:type` - the timestamps type, defaults to `Ecto.DateTime`.
* `:usec` - boolean, sets whether microseconds are used in timestamps.
Microseconds will be 0 if false. Defaults to false.
* `:inserted_at` - the name of the column for insertion times or `false`
* `:updated_at` - the name of the column for update times or `false`
All options can be pre-configured by setting `@timestamps_opts`.
"""
defmacro timestamps(opts \\ []) do
quote bind_quoted: binding do
timestamps =
[inserted_at: :inserted_at, updated_at: :updated_at,
type: Ecto.DateTime, usec: false]
|> Keyword.merge(@timestamps_opts)
|> Keyword.merge(opts)
if inserted_at = Keyword.fetch!(timestamps, :inserted_at) do
Ecto.Schema.field(inserted_at, Keyword.fetch!(timestamps, :type), [])
end
if updated_at = Keyword.fetch!(timestamps, :updated_at) do
Ecto.Schema.field(updated_at, Keyword.fetch!(timestamps, :type), [])
end
@ecto_timestamps timestamps
end
end
@doc ~S"""
Indicates a one-to-many association with another model.
The current model has zero or more records of the other model. The other
model often has a `belongs_to` field with the reverse association.
## Options
* `:foreign_key` - Sets the foreign key, this should map to a field on the
other model, defaults to the underscored name of the current model
suffixed by `_id`
* `:references` - Sets the key on the current model to be used for the
association, defaults to the primary key on the model
* `:through` - If this association must be defined in terms of existing
associations. Read below for more information
* `:on_delete` - The action taken on associations when parent model
is deleted. May be `:nothing` (default), `:nilify_all`, `:delete_all`
or `:fetch_and_delete`. See `Ecto.Model.Dependent` for more info.
Notice `:on_delete` may also be set in migrations when creating
a reference. If supported, relying on the database is often prefered
* `:on_replace` - The action taken on associations when the model is
replaced when casting or manipulating parent changeset. May be
`:delete` (default) or `:nilify`. See `Ecto.Changeset`'s section on
related models for more info
* `:on_cast` - The default changeset function to call during casting
of a nested association which can be overridden in `Ecto.Changeset.cast/4`.
It's an atom representing the function name in the associated model's
module which will receive the module and the parameters for casting
(default: `:changeset`)
* `:defaults` - Default values to use when building the association
## Examples
defmodule Post do
use Ecto.Model
schema "posts" do
has_many :comments, Comment
end
end
# Get all comments for a given post
post = Repo.get(Post, 42)
comments = Repo.all assoc(post, :comments)
# The comments can come preloaded on the post struct
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :comments))
post.comments #=> [%Comment{...}, ...]
## has_many/has_one :through
Ecto also supports defining associations in terms of other associations
via the `:through` option. Let's see an example:
defmodule Post do
use Ecto.Model
schema "posts" do
has_many :comments, Comment
has_one :permalink, Permalink
# In the has_many :through example below, in the list
# `[:comments, :author]` the `:comments` refers to the
# `has_many :comments` in the Post model's own schema
# and the `:author` refers to the `belongs_to :author`
# of the Comment module's schema (the module below).
# (see the description below for more details)
has_many :comments_authors, through: [:comments, :author]
# Specify the association with custom source
has_many :tags, {"posts_tags", Tag}
end
end
defmodule Comment do
use Ecto.Model
schema "comments" do
belongs_to :author, Author
belongs_to :post, Post
has_one :post_permalink, through: [:post, :permalink]
end
end
In the example above, we have defined a `has_many :through` association
named `:comments_authors`. A `:through` association always expect a list
and the first element of the list must be a previously defined association
in the current module. For example, `:comments_authors` first points to
`:comments` in the same module (Post), which then points to `:author` in
the next model `Comment`.
This `:through` associations will return all authors for all comments
that belongs to that post:
# Get all comments for a given post
post = Repo.get(Post, 42)
authors = Repo.all assoc(post, :comments_authors)
`:through` associations are read-only as they are useful to avoid repetition
allowing the developer to easily retrieve data that is often seem together
but stored across different tables.
`:through` associations can also be preloaded. In such cases, not only
the `:through` association is preloaded but all intermediate steps are
preloaded too:
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :comments_authors))
post.comments_authors #=> [%Author{...}, ...]
# The comments for each post will be preloaded too
post.comments #=> [%Comment{...}, ...]
# And the author for each comment too
hd(post.comments).author #=> %Author{...}
Finally, `:through` can be used with multiple associations (not only 2)
and with associations of any kind, including `belongs_to` and others
`:through` associations. When the `:through` association is expected to
return one or no item, `has_one :through` should be used instead, as in
the example at the beginning of this section:
# How we defined the association above
has_one :post_permalink, through: [:post, :permalink]
# Get a preloaded comment
[comment] = Repo.all(Comment) |> Repo.preload(:post_permalink)
comment.post_permalink #=> %Permalink{...}
"""
defmacro has_many(name, queryable, opts \\ []) do
quote do
Ecto.Schema.__has_many__(__MODULE__, unquote(name), unquote(queryable), unquote(opts))
end
end
@doc ~S"""
Indicates a one-to-one association with another model.
The current model has zero or one records of the other model. The other
model often has a `belongs_to` field with the reverse association.
## Options
* `:foreign_key` - Sets the foreign key, this should map to a field on the
other model, defaults to the underscored name of the current model
suffixed by `_id`
* `:references` - Sets the key on the current model to be used for the
association, defaults to the primary key on the model
* `:through` - If this association must be defined in terms of existing
associations. Read the section in `has_many/3` for more information
* `:on_delete` - The action taken on associations when parent model
is deleted. May be `:nothing` (default), `:nilify_all`, `:delete_all`
or `:fetch_and_delete`. See `Ecto.Model.Dependent` for more info.
Notice `:on_delete` may also be set in migrations when creating
a reference. If supported, relying on the database is often prefered
* `:on_replace` - The action taken on associations when the model is
replaced when casting or manipulating parent changeset. May be
`:delete` (default) or `:nilify`. See `Ecto.Changeset`'s section on
related models for more info
* `:on_cast` - The default changeset function to call during casting
of a nested association which can be overridden in `Ecto.Changeset.cast/4`.
It's an atom representing the function name in the associated model's
module which will receive the module and the parameters for casting
(default: `:changeset`)
* `:defaults` - Default values to use when building the association
## Examples
defmodule Post do
use Ecto.Model
schema "posts" do
has_one :permalink, Permalink
# Specify the association with custom source
has_one :category, {"posts_categories", Category}
end
end
# The permalink can come preloaded on the post struct
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :permalink))
post.permalink #=> %Permalink{...}
"""
defmacro has_one(name, queryable, opts \\ []) do
quote do
Ecto.Schema.__has_one__(__MODULE__, unquote(name), unquote(queryable), unquote(opts))
end
end
@doc ~S"""
Indicates a one-to-one association with another model.
The current model belongs to zero or one records of the other model. The other
model often has a `has_one` or a `has_many` field with the reverse association.
You should use `belongs_to` in the table that contains the foreign key. Imagine
a company <-> manager relationship. If the company contains the `manager_id` in
the underlying database table, we say the company belongs to manager.
In fact, when you invoke this macro, a field with the name of foreign key is
automatically defined in the schema for you.
## Options
* `:foreign_key` - Sets the foreign key field name, defaults to the name
of the association suffixed by `_id`. For example, `belongs_to :company`
will define foreign key of `:company_id`
* `:references` - Sets the key on the other model to be used for the
association, defaults to: `:id`
* `:define_field` - When false, does not automatically define a `:foreign_key`
field, implying the user is defining the field manually elsewhere
* `:type` - Sets the type of automatically defined `:foreign_key`.
Defaults to: `:integer` and be set per schema via `@foreign_key_type`
All other options are forwarded to the underlying foreign key definition
and therefore accept the same options as `field/3`.
## Examples
defmodule Comment do
use Ecto.Model
schema "comments" do
belongs_to :post, Post
end
end
# The post can come preloaded on the comment record
[comment] = Repo.all(from(c in Comment, where: c.id == 42, preload: :post))
comment.post #=> %Post{...}
## Polymorphic associations
One common use case for belongs to associations is to handle
polymorphism. For example, imagine you have defined a Comment
model and you wish to use it for commenting on both tasks and
posts.
Some abstractions would force you to define some sort of
polymorphic association with two fields in your database:
* commentable_type
* commentable_id
The problem with this approach is that it breaks references in
the database. You can't use foreign keys and it is very inneficient
both in terms of query time and storage.
In Ecto, we have two ways to solve this issue. The simplest one
is to define multiple fields in the Comment model, one for each
association:
* task_id
* post_id
Unless you have dozens of columns, this is simpler for the developer,
more DB friendly and more efficient on all aspects.
Alternatively, because Ecto does not tie a model to a given table,
we can use separate tables for each association. Let's start over
and define a new Comment model:
defmodule Comment do
use Ecto.Model
schema "abstract table: comments" do
# This will be used by associations on each "concrete" table
field :assoc_id, :integer
end
end
Notice we have changed the table name to "abstract table: comments".
You can choose whatever name you want, the point here is that this
particular table will never exist.
Now in your Post and Task models:
defmodule Post do
use Ecto.Model
schema "posts" do
has_many :comments, {"posts_comments", Comment}, foreign_key: :assoc_id
end
end
defmodule Task do
use Ecto.Model
schema "tasks" do
has_many :comments, {"tasks_comments", Comment}, foreign_key: :assoc_id
end
end
Now each association uses its own specific table, "posts_comments"
and "tasks_comments", which must be created on migrations. The
advantage of this approach is that we never store unrelated data
together, also ensuring we keep databases references fast and correct.
When using this technique, the only limitation is that you cannot
build comments directly. For example, the command below
Repo.insert!(%Comment{})
will attempt to use the abstract table. Instead, one should
Repo.insert!(build(post, :comments))
where `build/2` is defined in `Ecto.Model`. You can also
use `assoc/2` in both `Ecto.Model` and in the query syntax
to easily retrieve associated comments to a given post or
task:
# Fetch all comments associated to the given task
Repo.all(assoc(task, :comments))
Finally, if for some reason you wish to query one of comments
table directly, you can also specify the tuple source in
the query syntax:
Repo.all from(c in {"posts_comments", Comment}), ...)
"""
defmacro belongs_to(name, queryable, opts \\ []) do
quote do
Ecto.Schema.__belongs_to__(__MODULE__, unquote(name), unquote(queryable), unquote(opts))
end
end
## Embeds
@doc ~S"""
Indicates an embedding of one model.
The current model has zero or one records of the other model embedded
inside of it. It uses a field similar to the `:map` type for storage,
but allows embedded models to have all the things regular models can -
callbacks, structured fields, etc. All typecasting operations are
performed on an embedded model alongside the operations on the parent
model.
You must declare your `embeds_one/3` field with type `:map` at the
database level.
## Options
* `:on_cast` - the default changeset function to call during casting,
which can be overridden in `Ecto.Changeset.cast/4`. It's an atom representing
the function name in the embedded model's module which will receive
the module and the parameters for casting (default: `:changeset`).
* `:strategy` - the strategy for storing models in the database.
Ecto supports only the `:replace` strategy out of the box which is the
default. Read the strategy in `embeds_many/3` for more info.
* `:on_replace` - The action taken on embeds when it is replaced
during casting or manipulating parent changeset. For now, it
supports `:delete`, implying delete callbacks will be invoked.
## Examples
defmodule Order do
use Ecto.Model
schema "orders" do
embeds_one :item, Item
end
end
defmodule Item do
use Ecto.model
# A required field for all embedded documents
@primary_key {:id, :binary_id, autogenerate: true}
schema "" do
field :name
end
end
# The item is loaded with the order
order = Repo.get!(Order, 42)
order.item #=> %Item{...}
Adding and removal of embeds can only be done via the `Ecto.Changeset`
API so Ecto can properly track the embeded model life-cycle:
order = Repo.get!(Order, 42)
# Generate a changeset
changeset = Ecto.Changeset.change(order)
# Change, put a new one or remove an item
changeset = Ecto.Changeset.put_change(order, :item, nil)
# Update the order
changeset = Repo.update!(changeset)
"""
defmacro embeds_one(name, model, opts \\ []) do
quote do
Ecto.Schema.__embeds_one__(__MODULE__, unquote(name), unquote(model), unquote(opts))
end
end
@doc ~S"""
Indicates an embedding of many models.
The current model has zero or more records of the other model embedded
inside of it, contained in a list. Embedded models have all the things
regular models do - callbacks, structured fields, etc.
It is recommended to declare your `embeds_many/3` field with type
`{:array, :map}` and default value of `[]` at the database level.
In fact, Ecto will automatically translate `nil` values from the
database into empty lists for embeds many (this behaviour is specific
to `embeds_many/3` fields in order to mimic `has_many/3`).
## Options
* `:on_cast` - the default changeset function to call during casting,
which can be overridden in `Ecto.Changeset.cast/4`. It's an atom representing
the function name in the embedded model's module which will receive
the module and the parameters for casting (default: `:changeset`).
* `:strategy` - the strategy for storing models in the database.
Ecto supports only the `:replace` strategy out of the box which is the
default. Read strategy section below for more info.
* `:on_replace` - The action taken on embeds when it is replaced
during casting or manipulating parent changeset. For now, it
supports `:delete`, implying delete callbacks will be invoked.
## Examples
defmodule Order do
use Ecto.Model
schema "orders" do
embeds_many :items, Item
end
end
defmodule Item do
use Ecto.model
# embedded_schema is a shorcut for:
#
# @primary_key {:id, :binary_id, autogenerate: true}
# schema "embedded Item" do
#
embedded_schema do
field :name
end
end
# The items are loaded with the order
order = Repo.get!(Order, 42)
order.items #=> [%Item{...}, ...]
Adding and removal of embeds can only be done via the `Ecto.Changeset`
API so Ecto can properly track the embeded models life-cycle:
order = Repo.get!(Order, 42)
# Generate a changeset
changeset = Ecto.Changeset.change(order)
# Change, put a new one or remove all items
changeset = Ecto.Changeset.put_change(order, :items, [])
# Update the order
changeset = Repo.update!(changeset)
## Strategy
A strategy configures how modules should be inserted, updated and deleted
from the database. Changing the strategy may affect how items are stored in
the database, although embeds_many will always have them as a list in the
model.
Ecto supports only the `:replace` strategy out of the box which is the
default. This means all embeds in the model always fully replace the entries
in the database.
For example, if you have a collection with a 100 items, the 100 items will
be sent whenever any of them change. The approach is useful when you need the
parent and embeds to always be consistent.
Other databases may support different strategies, like one that only changes
the embeds that have effectively changed, also reducing the amount of data
send to the database. This is specially common in NoSQL databases.
Please check your adapter documentation in case it supports other strategies.
"""
defmacro embeds_many(name, model, opts \\ []) do
quote do
Ecto.Schema.__embeds_many__(__MODULE__, unquote(name), unquote(model), unquote(opts))
end
end
## Callbacks
@doc false
def __load__(model, prefix, source, context, data, loader) do
source = source || model.__schema__(:source)
struct = model.__struct__()
fields = model.__schema__(:types)
loaded = do_load(struct, fields, data, loader)
loaded = Map.put(loaded, :__meta__,
%Metadata{state: :loaded, source: {prefix, source}, context: context})
Ecto.Model.Callbacks.__apply__(model, :after_load, loaded)
end
defp do_load(struct, fields, map, loader) when is_map(map) do
Enum.reduce(fields, struct, fn
{field, type}, acc ->
value = load!(type, Map.get(map, Atom.to_string(field)), loader)
Map.put(acc, field, value)
end)
end
defp do_load(struct, fields, list, loader) when is_list(list) do
Enum.reduce(fields, {struct, list}, fn
{field, type}, {acc, [h|t]} ->
value = load!(type, h, loader)
{Map.put(acc, field, value), t}
end) |> elem(0)
end
defp load!(type, value, loader) do
case loader.(type, value) do
{:ok, value} -> value
:error -> raise ArgumentError, "cannot load `#{inspect value}` as type #{inspect type}"
end
end
@doc false
def __field__(mod, name, type, pk?, opts) do
check_type!(name, type, opts[:virtual])
default = default_for_type(type, opts)
check_default!(name, type, default)
Module.put_attribute(mod, :changeset_fields, {name, type})
put_struct_field(mod, name, default)
unless opts[:virtual] do
if raw = opts[:read_after_writes] do
Module.put_attribute(mod, :ecto_raw, name)
end
if gen = opts[:autogenerate] do
store_autogenerate!(mod, name, type, pk?)
end
if raw && gen do
raise ArgumentError, "cannot mark the same field as autogenerate and read_after_writes"
end
Module.put_attribute(mod, :ecto_fields, {name, type})
end
end
@valid_has_options [:foreign_key, :references, :through, :on_delete,
:defaults, :on_cast, :on_replace]
@doc false
def __has_many__(mod, name, queryable, opts) do
check_options!(opts, @valid_has_options, "has_many/3")
if is_list(queryable) and Keyword.has_key?(queryable, :through) do
association(mod, :many, name, Ecto.Association.HasThrough, queryable)
else
struct =
association(mod, :many, name, Ecto.Association.Has, [queryable: queryable] ++ opts)
Module.put_attribute(mod, :changeset_fields, {name, {:assoc, struct}})
end
end
@doc false
def __has_one__(mod, name, queryable, opts) do
check_options!(opts, @valid_has_options, "has_one/3")
if is_list(queryable) and Keyword.has_key?(queryable, :through) do
association(mod, :one, name, Ecto.Association.HasThrough, queryable)
else
struct =
association(mod, :one, name, Ecto.Association.Has, [queryable: queryable] ++ opts)
Module.put_attribute(mod, :changeset_fields, {name, {:assoc, struct}})
end
end
@doc false
def __belongs_to__(mod, name, queryable, opts) do
check_options!(opts, [:foreign_key, :references, :define_field, :type], "belongs_to/3")
opts = Keyword.put_new(opts, :foreign_key, :"#{name}_id")
foreign_key_type = opts[:type] || Module.get_attribute(mod, :foreign_key_type)
if Keyword.get(opts, :define_field, true) do
__field__(mod, opts[:foreign_key], foreign_key_type, false, opts)
end
association(mod, :one, name, Ecto.Association.BelongsTo, [queryable: queryable] ++ opts)
end
@doc false
def __embeds_one__(mod, name, model, opts) do
check_options!(opts, [:on_cast, :strategy], "embeds_one/3")
embed(mod, :one, name, model, opts)
end
@doc false
def __embeds_many__(mod, name, model, opts) do
check_options!(opts, [:on_cast, :strategy], "embeds_many/3")
opts = Keyword.put(opts, :default, [])
embed(mod, :many, name, model, opts)
end
## Quoted callbacks
@doc false
def __changeset__(changeset_fields) do
map = changeset_fields |> Enum.into(%{}) |> Macro.escape()
quote do
def __changeset__, do: unquote(map)
end
end
@doc false
def __struct__(struct_fields) do
quote do
defstruct unquote(Macro.escape(struct_fields))
end
end
@doc false
def __schema__(source, fields, primary_key) do
field_names = Enum.map(fields, &elem(&1, 0))
quote do
def __schema__(:source), do: unquote(Macro.escape(source))
def __schema__(:fields), do: unquote(field_names)
def __schema__(:primary_key), do: unquote(primary_key)
end
end
@doc false
def __types__(fields) do
quoted =
Enum.map(fields, fn {name, type} ->
quote do
def __schema__(:type, unquote(name)) do
unquote(Macro.escape(type))
end
end
end)
types = Macro.escape(fields)
quote do
def __schema__(:types), do: unquote(types)
unquote(quoted)
def __schema__(:type, _), do: nil
end
end
@doc false
def __assocs__(assocs) do
quoted =
Enum.map(assocs, fn {name, refl} ->
quote do
def __schema__(:association, unquote(name)) do
unquote(Macro.escape(refl))
end
end
end)
assoc_names = Enum.map(assocs, &elem(&1, 0))
quote do
def __schema__(:associations), do: unquote(assoc_names)
unquote(quoted)
def __schema__(:association, _), do: nil
end
end
@doc false
def __embeds__(embeds) do
quoted =
Enum.map(embeds, fn {name, refl} ->
quote do
def __schema__(:embed, unquote(name)) do
unquote(Macro.escape(refl))
end
end
end)
embed_names = Enum.map(embeds, &elem(&1, 0))
quote do
def __schema__(:embeds), do: unquote(embed_names)
unquote(quoted)
def __schema__(:embed, _), do: nil
end
end
@doc false
def __read_after_writes__(fields) do
quote do
def __schema__(:read_after_writes), do: unquote(Enum.reverse(fields))
end
end
@doc false
def __autogenerate__(id) do
quote do
def __schema__(:autogenerate_id), do: unquote(id)
end
end
@doc false
def __before_compile__(env) do
unless Module.get_attribute(env.module, :struct_fields) do
raise "module #{inspect env.module} uses Ecto.Model (or Ecto.Schema) but it " <>
"does not define a schema. Please cherry pick the functionality you want " <>
"instead, for example, by importing Ecto.Query, Ecto.Model or others"
end
end
## Private
defp association(mod, cardinality, name, association, opts) do
not_loaded = %Ecto.Association.NotLoaded{__owner__: mod,
__field__: name, __cardinality__: cardinality}
put_struct_field(mod, name, not_loaded)
opts = [cardinality: cardinality] ++ opts
struct = association.struct(mod, name, opts)
Module.put_attribute(mod, :ecto_assocs, {name, struct})
struct
end
defp embed(mod, cardinality, name, model, opts) do
opts = [cardinality: cardinality, related: model] ++ opts
struct = Ecto.Embedded.struct(mod, name, opts)
__field__(mod, name, {:embed, struct}, false, opts)
Module.put_attribute(mod, :ecto_embeds, {name, struct})
end
defp put_struct_field(mod, name, assoc) do
fields = Module.get_attribute(mod, :struct_fields)
if List.keyfind(fields, name, 0) do
raise ArgumentError, "field/association #{inspect name} is already set on schema"
end
Module.put_attribute(mod, :struct_fields, {name, assoc})
end
defp check_options!(opts, valid, fun_arity) do
case Enum.find(opts, fn {k, _} -> not k in valid end) do
{k, _} ->
raise ArgumentError, "invalid option #{inspect k} for #{fun_arity}"
nil ->
:ok
end
end
defp check_type!(name, type, virtual?) do
cond do
type == :any and not virtual? ->
raise ArgumentError, "only virtual fields can have type :any, " <>
"invalid type for field #{inspect name}"
Ecto.Type.primitive?(type) ->
true
is_atom(type) ->
if Code.ensure_compiled?(type) and function_exported?(type, :type, 0) do
type
else
raise ArgumentError, "invalid or unknown type #{inspect type} for field #{inspect name}"
end
true ->
raise ArgumentError, "invalid type #{inspect type} for field #{inspect name}"
end
end
defp check_default!(_name, :binary_id, _default), do: :ok
defp check_default!(_name, {:embed, _}, _default), do: :ok
defp check_default!(name, type, default) do
case Ecto.Type.dump(type, default) do
{:ok, _} ->
:ok
:error ->
raise ArgumentError, "invalid default argument `#{inspect default}` for " <>
"field #{inspect name} of type #{inspect type}"
end
end
defp store_autogenerate!(mod, name, type, true) do
if id = autogenerate_id(type) do
if Module.get_attribute(mod, :ecto_autogenerate_id) do
raise ArgumentError, "only one primary key with ID type may be marked as autogenerated"
end
Module.put_attribute(mod, :ecto_autogenerate_id, {name, id})
else
store_autogenerate!(mod, name, type, false)
end
end
defp store_autogenerate!(mod, name, type, false) do
cond do
_ = autogenerate_id(type) ->
raise ArgumentError, "only primary keys allow :autogenerate for type #{inspect type}, " <>
"field #{inspect name} is not a primary key"
Ecto.Type.primitive?(type) ->
raise ArgumentError, "field #{inspect name} does not support :autogenerate because it uses a " <>
"primitive type #{inspect type}"
# Note the custom type has already been loaded in check_type!/3
not function_exported?(type, :generate, 0) ->
raise ArgumentError, "field #{inspect name} does not support :autogenerate because it uses a " <>
"custom type #{inspect type} that does not define generate/0"
true ->
Module.put_attribute(mod, :ecto_autogenerate, {name, type})
end
end
defp autogenerate_id(type) do
id = if Ecto.Type.primitive?(type), do: type, else: type.type
if id in [:id, :binary_id], do: id, else: nil
end
defp default_for_type({:array, _}, opts) do
Keyword.get(opts, :default, [])
end
defp default_for_type(_, opts) do
Keyword.get(opts, :default)
end
end
| 34.451916 | 105 | 0.66417 |
e8083053a3e0788836272abf229b522961923354 | 1,866 | exs | Elixir | clients/drive/mix.exs | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/drive/mix.exs | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/drive/mix.exs | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Drive.Mixfile do
use Mix.Project
@version "0.13.0"
def project() do
[
app: :google_api_drive,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/drive"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.2"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Drive API client library. Manages files in Drive including uploading, downloading, searching, detecting changes, and updating sharing permissions.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/drive",
"Homepage" => "https://developers.google.com/drive/"
}
]
end
end
| 27.850746 | 150 | 0.65702 |
e80831afbba4ac7780c4fca7be6717411c25cdb7 | 24,326 | ex | Elixir | lib/elixir/lib/uri.ex | hurrycaner/elixir | c54daf97764c222a3ade6c84ac89bacb68abb0fb | [
"Apache-2.0"
] | 1 | 2021-05-05T02:11:24.000Z | 2021-05-05T02:11:24.000Z | lib/elixir/lib/uri.ex | hurrycaner/elixir | c54daf97764c222a3ade6c84ac89bacb68abb0fb | [
"Apache-2.0"
] | 6 | 2021-03-19T12:33:21.000Z | 2021-04-02T17:52:45.000Z | lib/elixir/lib/uri.ex | hurrycaner/elixir | c54daf97764c222a3ade6c84ac89bacb68abb0fb | [
"Apache-2.0"
] | null | null | null | defmodule URI do
@moduledoc """
Utilities for working with URIs.
This module provides functions for working with URIs (for example, parsing
URIs or encoding query strings). The functions in this module are implemented
according to [RFC 3986](https://tools.ietf.org/html/rfc3986).
URIs are structs behind the scenes. You can access the URI fields directly
but you should not create a new `URI` directly via the struct syntax. Instead
use the functions in this module.
"""
defstruct scheme: nil,
path: nil,
query: nil,
fragment: nil,
authority: nil,
userinfo: nil,
host: nil,
port: nil
@type t :: %__MODULE__{
scheme: nil | binary,
path: nil | binary,
query: nil | binary,
fragment: nil | binary,
authority: nil | binary,
userinfo: nil | binary,
host: nil | binary,
port: nil | :inet.port_number()
}
import Bitwise
@reserved_characters ':/?#[]@!$&\'()*+,;='
@formatted_reserved_characters Enum.map_join(@reserved_characters, ", ", &<<?`, &1, ?`>>)
@doc """
Returns the default port for a given `scheme`.
If the scheme is unknown to the `URI` module, this function returns
`nil`. The default port for any scheme can be configured globally
via `default_port/2`.
## Examples
iex> URI.default_port("ftp")
21
iex> URI.default_port("ponzi")
nil
"""
@spec default_port(binary) :: nil | non_neg_integer
def default_port(scheme) when is_binary(scheme) do
:elixir_config.get({:uri, scheme}, nil)
end
@doc """
Registers the default `port` for the given `scheme`.
After this function is called, `port` will be returned by
`default_port/1` for the given scheme `scheme`. Note that this function
changes the default port for the given `scheme` *globally*, meaning for
every application.
It is recommended for this function to be invoked in your
application's start callback in case you want to register
new URIs.
"""
@spec default_port(binary, non_neg_integer) :: :ok
def default_port(scheme, port) when is_binary(scheme) and is_integer(port) and port >= 0 do
:elixir_config.put({:uri, scheme}, port)
end
@doc """
Encodes `enumerable` into a query string using `encoding`.
Takes an enumerable that enumerates as a list of two-element
tuples (for instance, a map or a keyword list) and returns a string
in the form of `key1=value1&key2=value2...`.
Keys and values can be any term that implements the `String.Chars`
protocol with the exception of lists, which are explicitly forbidden.
You can specify one of the following `encoding` strategies:
* `:www_form` - (default, since v1.12.0) keys and values are URL encoded as
per `encode_www_form/1`. This is the format typically used by browsers on
query strings and form data. It encodes " " as "+".
* `:rfc3986` - (since v1.12.0) the same as `:www_form` except it encodes
" " as "%20" according [RFC 3986](https://tools.ietf.org/html/rfc3986).
This is the best option if you are encoding in a non-browser situation,
since encoding spaces as "+" can be ambiguous to URI parsers. This can
inadvertently lead to spaces being interpreted as literal plus signs.
Encoding defaults to `:www_form` for backward compatibility.
## Examples
iex> query = %{"foo" => 1, "bar" => 2}
iex> URI.encode_query(query)
"bar=2&foo=1"
iex> query = %{"key" => "value with spaces"}
iex> URI.encode_query(query)
"key=value+with+spaces"
iex> query = %{"key" => "value with spaces"}
iex> URI.encode_query(query, :rfc3986)
"key=value%20with%20spaces"
iex> URI.encode_query(%{key: [:a, :list]})
** (ArgumentError) encode_query/2 values cannot be lists, got: [:a, :list]
"""
@spec encode_query(Enum.t(), :rfc3986 | :www_form) :: binary
def encode_query(enumerable, encoding \\ :www_form) do
Enum.map_join(enumerable, "&", &encode_kv_pair(&1, encoding))
end
defp encode_kv_pair({key, _}, _encoding) when is_list(key) do
raise ArgumentError, "encode_query/2 keys cannot be lists, got: #{inspect(key)}"
end
defp encode_kv_pair({_, value}, _encoding) when is_list(value) do
raise ArgumentError, "encode_query/2 values cannot be lists, got: #{inspect(value)}"
end
defp encode_kv_pair({key, value}, :rfc3986) do
encode(Kernel.to_string(key), &char_unreserved?/1) <>
"=" <> encode(Kernel.to_string(value), &char_unreserved?/1)
end
defp encode_kv_pair({key, value}, :www_form) do
encode_www_form(Kernel.to_string(key)) <> "=" <> encode_www_form(Kernel.to_string(value))
end
@doc """
Decodes `query` into a map.
Given a query string in the form of `key1=value1&key2=value2...`, this
function inserts each key-value pair in the query string as one entry in the
given `map`. Keys and values in the resulting map will be binaries. Keys and
values will be percent-unescaped.
You can specify one of the following `encoding` options:
* `:www_form` - (default, since v1.12.0) keys and values are decoded as per
`decode_www_form/1`. This is the format typically used by browsers on
query strings and form data. It decodes "+" as " ".
* `:rfc3986` - (since v1.12.0) keys and values are decoded as per
`decode/1`. The result is the same as `:www_form` except for leaving "+"
as is in line with [RFC 3986](https://tools.ietf.org/html/rfc3986).
Encoding defaults to `:www_form` for backward compatibility.
Use `query_decoder/1` if you want to iterate over each value manually.
## Examples
iex> URI.decode_query("foo=1&bar=2")
%{"bar" => "2", "foo" => "1"}
iex> URI.decode_query("percent=oh+yes%21", %{"starting" => "map"})
%{"percent" => "oh yes!", "starting" => "map"}
iex> URI.decode_query("percent=oh+yes%21", %{}, :rfc3986)
%{"percent" => "oh+yes!"}
"""
@spec decode_query(binary, %{optional(binary) => binary}, :rfc3986 | :www_form) :: %{
optional(binary) => binary
}
def decode_query(query, map \\ %{}, encoding \\ :www_form)
def decode_query(query, %_{} = dict, encoding) when is_binary(query) do
IO.warn(
"URI.decode_query/3 expects the second argument to be a map, other usage is deprecated"
)
decode_query_into_dict(query, dict, encoding)
end
def decode_query(query, map, encoding) when is_binary(query) and is_map(map) do
decode_query_into_map(query, map, encoding)
end
def decode_query(query, dict, encoding) when is_binary(query) do
IO.warn(
"URI.decode_query/3 expects the second argument to be a map, other usage is deprecated"
)
decode_query_into_dict(query, dict, encoding)
end
defp decode_query_into_map(query, map, encoding) do
case decode_next_query_pair(query, encoding) do
nil ->
map
{{key, value}, rest} ->
decode_query_into_map(rest, Map.put(map, key, value), encoding)
end
end
defp decode_query_into_dict(query, dict, encoding) do
case decode_next_query_pair(query, encoding) do
nil ->
dict
{{key, value}, rest} ->
# Avoid warnings about Dict being deprecated
dict_module = Dict
decode_query_into_dict(rest, dict_module.put(dict, key, value), encoding)
end
end
@doc """
Returns a stream of two-element tuples representing key-value pairs in the
given `query`.
Key and value in each tuple will be binaries and will be percent-unescaped.
You can specify one of the following `encoding` options:
* `:www_form` - (default, since v1.12.0) keys and values are decoded as per
`decode_www_form/1`. This is the format typically used by browsers on
query strings and form data. It decodes "+" as " ".
* `:rfc3986` - (since v1.12.0) keys and values are decoded as per
`decode/1`. The result is the same as `:www_form` except for leaving "+"
as is in line with [RFC 3986](https://tools.ietf.org/html/rfc3986).
Encoding defaults to `:www_form` for backward compatibility.
## Examples
iex> URI.query_decoder("foo=1&bar=2") |> Enum.to_list()
[{"foo", "1"}, {"bar", "2"}]
iex> URI.query_decoder("food=bread%26butter&drinks=tap%20water+please") |> Enum.to_list()
[{"food", "bread&butter"}, {"drinks", "tap water please"}]
iex> URI.query_decoder("food=bread%26butter&drinks=tap%20water+please", :rfc3986) |> Enum.to_list()
[{"food", "bread&butter"}, {"drinks", "tap water+please"}]
"""
@spec query_decoder(binary, :rfc3986 | :www_form) :: Enumerable.t()
def query_decoder(query, encoding \\ :www_form) when is_binary(query) do
Stream.unfold(query, &decode_next_query_pair(&1, encoding))
end
defp decode_next_query_pair("", _encoding) do
nil
end
defp decode_next_query_pair(query, encoding) do
{undecoded_next_pair, rest} =
case :binary.split(query, "&") do
[next_pair, rest] -> {next_pair, rest}
[next_pair] -> {next_pair, ""}
end
next_pair =
case :binary.split(undecoded_next_pair, "=") do
[key, value] ->
{decode_with_encoding(key, encoding), decode_with_encoding(value, encoding)}
[key] ->
{decode_with_encoding(key, encoding), ""}
end
{next_pair, rest}
end
defp decode_with_encoding(string, :www_form) do
decode_www_form(string)
end
defp decode_with_encoding(string, :rfc3986) do
decode(string)
end
@doc ~s"""
Checks if `character` is a reserved one in a URI.
As specified in [RFC 3986, section 2.2](https://tools.ietf.org/html/rfc3986#section-2.2),
the following characters are reserved: #{@formatted_reserved_characters}
## Examples
iex> URI.char_reserved?(?+)
true
"""
@spec char_reserved?(byte) :: boolean
def char_reserved?(character) do
character in @reserved_characters
end
@doc """
Checks if `character` is an unreserved one in a URI.
As specified in [RFC 3986, section 2.3](https://tools.ietf.org/html/rfc3986#section-2.3),
the following characters are unreserved:
* Alphanumeric characters: `A-Z`, `a-z`, `0-9`
* `~`, `_`, `-`, `.`
## Examples
iex> URI.char_unreserved?(?_)
true
"""
@spec char_unreserved?(byte) :: boolean
def char_unreserved?(character) do
character in ?0..?9 or character in ?a..?z or character in ?A..?Z or character in '~_-.'
end
@doc """
Checks if `character` is allowed unescaped in a URI.
This is the default used by `URI.encode/2` where both
[reserved](`char_reserved?/1`) and [unreserved characters](`char_unreserved?/1`)
are kept unescaped.
## Examples
iex> URI.char_unescaped?(?{)
false
"""
@spec char_unescaped?(byte) :: boolean
def char_unescaped?(character) do
char_reserved?(character) or char_unreserved?(character)
end
@doc """
Percent-escapes all characters that require escaping in `string`.
This means reserved characters, such as `:` and `/`, and the
so-called unreserved characters, which have the same meaning both
escaped and unescaped, won't be escaped by default.
See `encode_www_form/1` if you are interested in escaping reserved
characters too.
This function also accepts a `predicate` function as an optional
argument. If passed, this function will be called with each byte
in `string` as its argument and should return a truthy value (anything other
than `false` or `nil`) if the given byte should be left as is, or return a
falsy value (`false` or `nil`) if the character should be escaped. Defaults
to `URI.char_unescaped?/1`.
## Examples
iex> URI.encode("ftp://s-ite.tld/?value=put it+й")
"ftp://s-ite.tld/?value=put%20it+%D0%B9"
iex> URI.encode("a string", &(&1 != ?i))
"a str%69ng"
"""
@spec encode(binary, (byte -> as_boolean(term))) :: binary
def encode(string, predicate \\ &char_unescaped?/1)
when is_binary(string) and is_function(predicate, 1) do
for <<byte <- string>>, into: "", do: percent(byte, predicate)
end
@doc """
Encodes `string` as "x-www-form-urlencoded".
Note "x-www-form-urlencoded" is not specified as part of
RFC 3986. However, it is a commonly used format to encode
query strings and form data by browsers.
## Example
iex> URI.encode_www_form("put: it+й")
"put%3A+it%2B%D0%B9"
"""
@spec encode_www_form(binary) :: binary
def encode_www_form(string) when is_binary(string) do
for <<byte <- string>>, into: "" do
case percent(byte, &char_unreserved?/1) do
"%20" -> "+"
percent -> percent
end
end
end
defp percent(char, predicate) do
if predicate.(char) do
<<char>>
else
<<"%", hex(bsr(char, 4)), hex(band(char, 15))>>
end
end
defp hex(n) when n <= 9, do: n + ?0
defp hex(n), do: n + ?A - 10
@doc """
Percent-unescapes a URI.
## Examples
iex> URI.decode("https%3A%2F%2Felixir-lang.org")
"https://elixir-lang.org"
"""
@spec decode(binary) :: binary
def decode(uri) do
unpercent(uri, "", false)
catch
:malformed_uri ->
raise ArgumentError, "malformed URI #{inspect(uri)}"
end
@doc """
Decodes `string` as "x-www-form-urlencoded".
Note "x-www-form-urlencoded" is not specified as part of
RFC 3986. However, it is a commonly used format to encode
query strings and form data by browsers.
## Examples
iex> URI.decode_www_form("%3Call+in%2F")
"<all in/"
"""
@spec decode_www_form(binary) :: binary
def decode_www_form(string) when is_binary(string) do
unpercent(string, "", true)
catch
:malformed_uri ->
raise ArgumentError, "malformed URI #{inspect(string)}"
end
defp unpercent(<<?+, tail::binary>>, acc, spaces = true) do
unpercent(tail, <<acc::binary, ?\s>>, spaces)
end
defp unpercent(<<?%, hex1, hex2, tail::binary>>, acc, spaces) do
unpercent(tail, <<acc::binary, bsl(hex_to_dec(hex1), 4) + hex_to_dec(hex2)>>, spaces)
end
defp unpercent(<<?%, _::binary>>, _acc, _spaces), do: throw(:malformed_uri)
defp unpercent(<<head, tail::binary>>, acc, spaces) do
unpercent(tail, <<acc::binary, head>>, spaces)
end
defp unpercent(<<>>, acc, _spaces), do: acc
defp hex_to_dec(n) when n in ?A..?F, do: n - ?A + 10
defp hex_to_dec(n) when n in ?a..?f, do: n - ?a + 10
defp hex_to_dec(n) when n in ?0..?9, do: n - ?0
defp hex_to_dec(_n), do: throw(:malformed_uri)
@doc """
Parses a well-formed URI into its components.
This function can parse both absolute and relative URLs. You can check
if a URI is absolute or relative by checking if the `scheme` field is
nil or not. Furthermore, this function expects both absolute and
relative URIs to be well-formed and does not perform any validation.
See the "Examples" section below.
When a URI is given without a port, the value returned by
`URI.default_port/1` for the URI's scheme is used for the `:port` field.
When a URI hostname is an IPv6 literal, it has the `[]` unwrapped before
being stored in the `:host` field. Note this doesn't match the formal
grammar for hostnames, which preserves the `[]` around the IP. You can
parse the IP address by calling `:inet.parse_address/1` (remember to
call `String.to_charlist/1` to convert the host to a charlist before
calling `:inet`).
If a `%URI{}` struct is given to this function, this function returns it
unmodified.
## Examples
iex> URI.parse("https://elixir-lang.org/")
%URI{
authority: "elixir-lang.org",
fragment: nil,
host: "elixir-lang.org",
path: "/",
port: 443,
query: nil,
scheme: "https",
userinfo: nil
}
iex> URI.parse("//elixir-lang.org/")
%URI{
authority: "elixir-lang.org",
fragment: nil,
host: "elixir-lang.org",
path: "/",
port: nil,
query: nil,
scheme: nil,
userinfo: nil
}
iex> URI.parse("/foo/bar")
%URI{
authority: nil,
fragment: nil,
host: nil,
path: "/foo/bar",
port: nil,
query: nil,
scheme: nil,
userinfo: nil
}
iex> URI.parse("foo/bar")
%URI{
authority: nil,
fragment: nil,
host: nil,
path: "foo/bar",
port: nil,
query: nil,
scheme: nil,
userinfo: nil
}
iex> URI.parse("//[fe80::]/")
%URI{
authority: "[fe80::]",
fragment: nil,
host: "fe80::",
path: "/",
port: nil,
query: nil,
scheme: nil,
userinfo: nil
}
"""
@spec parse(t | binary) :: t
def parse(%URI{} = uri), do: uri
def parse(string) when is_binary(string) do
# From https://tools.ietf.org/html/rfc3986#appendix-B
# Parts: 12 3 4 5 6 7 8 9
regex = ~r{^(([a-z][a-z0-9\+\-\.]*):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?}i
parts = Regex.run(regex, string)
destructure [
_full,
# 1
_scheme_with_colon,
# 2
scheme,
# 3
authority_with_slashes,
# 4
_authority,
# 5
path,
# 6
query_with_question_mark,
# 7
_query,
# 8
_fragment_with_hash,
# 9
fragment
],
parts
scheme = nillify(scheme)
path = nillify(path)
query = nillify_query(query_with_question_mark)
{authority, userinfo, host, port} = split_authority(authority_with_slashes)
scheme = scheme && String.downcase(scheme)
port = port || (scheme && default_port(scheme))
%URI{
scheme: scheme,
path: path,
query: query,
fragment: fragment,
authority: authority,
userinfo: userinfo,
host: host,
port: port
}
end
defp nillify_query("?" <> query), do: query
defp nillify_query(_other), do: nil
# Split an authority into its userinfo, host and port parts.
#
# Note that the host field is returned *without* [] even if, according to
# RFC3986 grammar, a native IPv6 address requires them.
defp split_authority("") do
{nil, nil, nil, nil}
end
defp split_authority("//") do
{"", nil, "", nil}
end
defp split_authority("//" <> authority) do
regex = ~r/(^(.*)@)?(\[[a-zA-Z0-9:.]*\]|[^:]*)(:(\d*))?/
components = Regex.run(regex, authority)
destructure [_, _, userinfo, host, _, port], components
userinfo = nillify(userinfo)
host = if nillify(host), do: host |> String.trim_leading("[") |> String.trim_trailing("]")
port = if nillify(port), do: String.to_integer(port)
{authority, userinfo, host, port}
end
# Regex.run returns empty strings sometimes. We want
# to replace those with nil for consistency.
defp nillify(""), do: nil
defp nillify(other), do: other
@doc """
Returns the string representation of the given [URI struct](`t:t/0`).
## Examples
iex> uri = URI.parse("http://google.com")
iex> URI.to_string(uri)
"http://google.com"
iex> uri = URI.parse("foo://bar.baz")
iex> URI.to_string(uri)
"foo://bar.baz"
Note that when creating this string representation, the `:authority` value will be
used if the `:host` is `nil`. Otherwise, the `:userinfo`, `:host`, and `:port` will
be used.
iex> URI.to_string(%URI{authority: "foo@example.com:80"})
"//foo@example.com:80"
iex> URI.to_string(%URI{userinfo: "bar", host: "example.org", port: 81})
"//bar@example.org:81"
iex> URI.to_string(%URI{
...> authority: "foo@example.com:80",
...> userinfo: "bar",
...> host: "example.org",
...> port: 81
...> })
"//bar@example.org:81"
"""
@spec to_string(t) :: binary
defdelegate to_string(uri), to: String.Chars.URI
@doc ~S"""
Merges two URIs.
This function merges two URIs as per
[RFC 3986, section 5.2](https://tools.ietf.org/html/rfc3986#section-5.2).
## Examples
iex> URI.merge(URI.parse("http://google.com"), "/query") |> to_string()
"http://google.com/query"
iex> URI.merge("http://example.com", "http://google.com") |> to_string()
"http://google.com"
"""
@spec merge(t | binary, t | binary) :: t
def merge(uri, rel)
def merge(%URI{authority: nil}, _rel) do
raise ArgumentError, "you must merge onto an absolute URI"
end
def merge(_base, %URI{scheme: rel_scheme} = rel) when rel_scheme != nil do
%{rel | path: remove_dot_segments_from_path(rel.path)}
end
def merge(base, %URI{authority: authority} = rel) when authority != nil do
%{rel | scheme: base.scheme, path: remove_dot_segments_from_path(rel.path)}
end
def merge(%URI{} = base, %URI{path: rel_path} = rel) when rel_path in ["", nil] do
%{base | query: rel.query || base.query, fragment: rel.fragment}
end
def merge(%URI{} = base, %URI{} = rel) do
new_path = merge_paths(base.path, rel.path)
%{base | path: new_path, query: rel.query, fragment: rel.fragment}
end
def merge(base, rel) do
merge(parse(base), parse(rel))
end
defp merge_paths(nil, rel_path), do: merge_paths("/", rel_path)
defp merge_paths(_, "/" <> _ = rel_path), do: remove_dot_segments_from_path(rel_path)
defp merge_paths(base_path, rel_path) do
[_ | base_segments] = path_to_segments(base_path)
path_to_segments(rel_path)
|> Kernel.++(base_segments)
|> remove_dot_segments([])
|> Enum.join("/")
end
defp remove_dot_segments_from_path(nil) do
nil
end
defp remove_dot_segments_from_path(path) do
path
|> path_to_segments()
|> remove_dot_segments([])
|> Enum.join("/")
end
defp remove_dot_segments([], [head, ".." | acc]), do: remove_dot_segments([], [head | acc])
defp remove_dot_segments([], acc), do: acc
defp remove_dot_segments(["." | tail], acc), do: remove_dot_segments(tail, acc)
defp remove_dot_segments([head | tail], ["..", ".." | _] = acc),
do: remove_dot_segments(tail, [head | acc])
defp remove_dot_segments(segments, [_, ".." | acc]), do: remove_dot_segments(segments, acc)
defp remove_dot_segments([head | tail], acc), do: remove_dot_segments(tail, [head | acc])
defp path_to_segments(path) do
path |> String.split("/") |> Enum.reverse()
end
end
defimpl String.Chars, for: URI do
def to_string(%{host: host, authority: authority, path: path} = uri)
when (host != nil or authority != nil) and is_binary(path) and
path != "" and binary_part(path, 0, 1) != "/" do
raise ArgumentError,
":path in URI must be nil or an absolute path if :host or :authority are given, " <>
"got: #{inspect(uri)}"
end
def to_string(%{scheme: scheme, port: port, path: path, query: query, fragment: fragment} = uri) do
uri =
case scheme && URI.default_port(scheme) do
^port -> %{uri | port: nil}
_ -> uri
end
# Based on https://tools.ietf.org/html/rfc3986#section-5.3
authority = extract_authority(uri)
IO.iodata_to_binary([
if(scheme, do: [scheme, ?:], else: []),
if(authority, do: ["//" | authority], else: []),
if(path, do: path, else: []),
if(query, do: ["?" | query], else: []),
if(fragment, do: ["#" | fragment], else: [])
])
end
defp extract_authority(%{host: nil, authority: authority}) do
authority
end
defp extract_authority(%{host: host, userinfo: userinfo, port: port}) do
# According to the grammar at
# https://tools.ietf.org/html/rfc3986#appendix-A, a "host" can have a colon
# in it only if it's an IPv6 or "IPvFuture" address, so if there's a colon
# in the host we can safely surround it with [].
[
if(userinfo, do: [userinfo | "@"], else: []),
if(String.contains?(host, ":"), do: ["[", host | "]"], else: host),
if(port, do: [":" | Integer.to_string(port)], else: [])
]
end
end
| 30.521957 | 105 | 0.623859 |
e80847869325ab852cfec8ab10c93f1350921ab8 | 3,297 | exs | Elixir | apps/service_gather/test/gather_test.exs | NathanielScottStevens/hindsight | 0dda1a931cff85b62eb53d623cc59cdb970ec33a | [
"Apache-2.0"
] | null | null | null | apps/service_gather/test/gather_test.exs | NathanielScottStevens/hindsight | 0dda1a931cff85b62eb53d623cc59cdb970ec33a | [
"Apache-2.0"
] | null | null | null | apps/service_gather/test/gather_test.exs | NathanielScottStevens/hindsight | 0dda1a931cff85b62eb53d623cc59cdb970ec33a | [
"Apache-2.0"
] | null | null | null | defmodule GatherTest do
use Gather.Case
import Mox
import Events, only: [extract_start: 0, extract_end: 0]
import AssertAsync
require Temp.Env
@instance Gather.Application.instance()
@moduletag capture_log: true
alias Gather.Extraction
Temp.Env.modify([
%{
app: :service_gather,
key: Gather.Extraction,
update: fn config ->
Keyword.put(config, :writer, Gather.WriterMock)
end
}
])
setup :set_mox_global
setup :verify_on_exit!
setup do
on_exit(fn ->
Gather.Extraction.Supervisor.kill_all_children()
end)
:ok
end
setup do
Brook.Test.clear_view_state(@instance, "extractions")
[bypass: Bypass.open()]
end
test "extract csv file", %{bypass: bypass} do
test = self()
{:ok, dummy_process} = Agent.start_link(fn -> :dummy_process end)
Bypass.expect(bypass, "GET", "/file.csv", fn conn ->
Plug.Conn.resp(conn, 200, "one,two,three\nfour,five,six")
end)
Gather.WriterMock
|> stub(:start_link, fn args ->
send(test, {:start_link, args})
{:ok, dummy_process}
end)
|> stub(:write, fn server, messages, opts ->
send(test, {:write, server, messages, opts})
:ok
end)
extract =
Extract.new!(
version: 1,
id: "extract-id-1",
dataset_id: "test-ds1",
subset_id: "Johnny",
destination: "topic-1",
steps: [
Extract.Http.Get.new!(url: "http://localhost:#{bypass.port}/file.csv"),
Extract.Decode.Csv.new!(headers: ["A", "B", "C"])
],
dictionary: [
Dictionary.Type.String.new!(name: "A"),
Dictionary.Type.String.new!(name: "b"),
Dictionary.Type.String.new!(name: "C")
]
)
Brook.Test.send(@instance, extract_start(), "testing", extract)
assert_receive {:write, ^dummy_process, messages, [extract: ^extract]}, 5_000
assert messages == [
%{"a" => "one", "b" => "two", "c" => "three"},
%{"a" => "four", "b" => "five", "c" => "six"}
]
assert extract == Extraction.Store.get!(extract.dataset_id, extract.subset_id)
end
test "removes stored extraction on #{extract_end()}" do
extract =
Extract.new!(
id: "extract-45",
dataset_id: "ds45",
subset_id: "get_some_data",
destination: "topic1",
steps: []
)
Brook.Test.with_event(@instance, fn ->
Extraction.Store.persist(extract)
end)
Brook.Test.send(@instance, extract_end(), "testing", extract)
assert_async do
assert nil == Extraction.Store.get!(extract.dataset_id, extract.subset_id)
end
end
test "sends extract_end on extract completion" do
test = self()
{:ok, dummy_process} = Agent.start_link(fn -> :dummy_process end)
extract =
Extract.new!(
id: "extract-45",
dataset_id: "ds45",
subset_id: "get_some_data",
destination: "topic1",
steps: []
)
Gather.WriterMock
|> expect(:start_link, fn args ->
send(test, {:start_link, args})
{:ok, dummy_process}
end)
Brook.Test.send(@instance, extract_start(), "testing", extract)
assert_receive {:brook_event, %{type: extract_end(), data: ^extract}}, 5_000
end
end
| 24.977273 | 82 | 0.590537 |
e80852516680e5ea206401481a1d42bede0350fe | 3,473 | exs | Elixir | task2/elixir/_success.exs | rolandolucio/kajooly-challenge-001 | 28bd00bb3bc2e610e4eadd7e62921322317bc94e | [
"MIT"
] | 1 | 2021-09-22T05:07:42.000Z | 2021-09-22T05:07:42.000Z | task2/elixir/_success.exs | rolandolucio/kajooly-challenge-001 | 28bd00bb3bc2e610e4eadd7e62921322317bc94e | [
"MIT"
] | null | null | null | task2/elixir/_success.exs | rolandolucio/kajooly-challenge-001 | 28bd00bb3bc2e610e4eadd7e62921322317bc94e | [
"MIT"
] | null | null | null | defmodule Solution do
@moduledoc """
Implementation Details
Aproach: Cahmpion vs Challenger
fixing from fail_3
failed to do it different goin back to the loop loop base
LOCAL ok ~100ms
LEETCODE EXAMPLE TEST CASESES: OK ~363ms
LEETCODE SUBMIT:
Success
Details
Runtime: 446 ms, faster than 100.00% of Elixir online submissions for 3Sum Closest.
Memory Usage: 49.6 MB, less than 100.00% of Elixir online submissions for 3Sum Closest.
131 / 131 test cases passed.
Status: Accepted
Runtime: 446 ms
Memory Usage: 49.6 MB
"""
@spec three_sum_closest(nums :: [integer], target :: integer) :: integer
def three_sum_closest( list ,_target) when length(list) <=3 ,do: Enum.sum(list)
def three_sum_closest( list, target) do
# Enum.uniq(list)
base = list
|> Enum.sort
|> Enum.with_index
# |> List.to_tuple # if timeout next try to tupple
reverse = Enum.reverse(base)
# init(base, reverse, target)
# |> Enum.max
{_, {sum, _}} = loop(base,reverse,target)
sum
end
def loop( [a | tail], reverse, target) do
case xyz([a | tail], reverse, target, {999999,999999}) do
{ :done, champ} -> {:ok, champ}
{ :eoc, champ} -> loop( tail, reverse, target,champ)
end
end
def loop( [a | tail], reverse, target,champ) do
case xyz([a | tail], reverse, target, champ) do
{ :done, champ} -> {:eol, champ}
{ :eoc, champ} -> loop( tail, reverse, target,champ)
# case loop( tail, reverse, target,champ) do
# { :eol , champ} -> {:eol, champ}
# { :eoc, champ} -> loop( tail, reverse, target,champ)
# end
end
end
def loop( base, _, _, champ ) when length(base) <=2, do: { :eol, champ}
def xyz([{ av, ai}, { bv, bi} | tail], [{ zv, zi} | lefty], target, ochamp) when ( bi < zi) do
challv = av+bv+zv
challd = abs(target - challv)
case way(target, {challv, challd}, ochamp) do
{:ok,champ} -> { :done , champ}
{:left,champ} -> xyz([{ av, ai} | tail], [{ zv, zi} | lefty], target, champ)
{:right,champ} -> xyz([{ av, ai}, { bv, bi} | tail], lefty, target, champ)
end
end
def xyz(_,_,_, champ), do: { :eoc, champ}
def way( target, {challv, _challd}, {_champv, _champd}) when challv == target, do: { :ok , {target,0} }
def way( target, {challv, challd}, {champv, champd}) when challv < target, do: { :left, battle({challv, challd}, {champv, champd})}
def way( _target, {challv, challd}, {champv, champd}), do: { :right, battle({challv, challd}, {champv, champd})}
def battle({challv, challd}, {_champv, champd}) when challd <= champd, do: {challv, challd}
def battle(_chall, {champv, champd}), do: {champv, champd}
def main() do
# Input: nums = [-1,2,1,-4], target = 1
# Output: 2
# Explanation: The sum that is closest to the target is 2. (-1 + 2 + 1 = 2).
# Example 2:
IO.puts("#{1},#{2}")
three_sum_closest([-1,2,1,-4],1)
|> IO.inspect
# Input: nums = [0,0,0], target = 1
# Output: 0
IO.puts("#{1},#{0}")
three_sum_closest([0,0,0],1)
|> IO.inspect
#test case 44/131
# Input:
# [0,2,1,-3]
# 1
# 0
#three_sum_closest([0,2,1,-3],1)
IO.puts("#{1},#{0}")
three_sum_closest([0,2,1,-3],1)
|> IO.inspect
# test case 46/131
# Input
# [1,1,-1,-1,3]
# 3
# Expected
# 3
IO.puts("#{3},#{3}")
three_sum_closest([1,1,-1,-1,3],3)
|> IO.inspect
end
end
Solution.main
| 29.184874 | 133 | 0.583933 |
e80866c29e5faa91409dd98f35a3e18fcbdf4b90 | 3,020 | ex | Elixir | lib/code_corps/stripe_service/stripe_connect_card.ex | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 275 | 2015-06-23T00:20:51.000Z | 2021-08-19T16:17:37.000Z | lib/code_corps/stripe_service/stripe_connect_card.ex | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 1,304 | 2015-06-26T02:11:54.000Z | 2019-12-12T21:08:00.000Z | lib/code_corps/stripe_service/stripe_connect_card.ex | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 140 | 2016-01-01T18:19:47.000Z | 2020-11-22T06:24:47.000Z | defmodule CodeCorps.StripeService.StripeConnectCardService do
alias CodeCorps.{Repo, StripeConnectAccount, StripeConnectCard,
StripeConnectCustomer, StripePlatformCard, StripePlatformCustomer}
alias CodeCorps.StripeService.Adapters.StripeConnectCardAdapter
import CodeCorps.MapUtils, only: [rename: 3, keys_to_string: 1]
import Ecto.Query # needed for match
@api Application.get_env(:code_corps, :stripe)
def find_or_create(%StripePlatformCard{} = platform_card, %StripeConnectCustomer{} = connect_customer, %StripePlatformCustomer{} = platform_customer, %StripeConnectAccount{} = connect_account) do
case get_from_db(connect_account.id, platform_card.id) do
%StripeConnectCard{} = existing_card -> {:ok, existing_card}
nil -> create(platform_card, connect_customer, platform_customer, connect_account)
end
end
def update(%StripeConnectCard{} = record, %{} = attributes) do
with {:ok, %Stripe.Card{} = updated_stripe_card} <- update_on_stripe(record, attributes)
do
{:ok, updated_stripe_card}
else
failure -> failure
end
end
defp create(%StripePlatformCard{} = platform_card, %StripeConnectCustomer{} = connect_customer, %StripePlatformCustomer{} = platform_customer, %StripeConnectAccount{} = connect_account) do
platform_customer_id = platform_customer.id_from_stripe
platform_card_id = platform_card.id_from_stripe
connect_customer_id = connect_customer.id_from_stripe
connect_account_id = connect_account.id_from_stripe
attributes =
platform_card
|> create_non_stripe_attributes(connect_account)
with {:ok, %Stripe.Token{} = connect_token} <-
@api.Token.create(%{customer: platform_customer_id, card: platform_card_id}, connect_account: connect_account_id),
{:ok, %Stripe.Card{} = connect_card} <-
@api.Card.create(%{customer: connect_customer_id, source: connect_token.id}, connect_account: connect_account_id),
{:ok, params} <-
StripeConnectCardAdapter.to_params(connect_card, attributes)
do
%StripeConnectCard{}
|> StripeConnectCard.create_changeset(params)
|> Repo.insert
end
end
defp get_from_db(connect_account_id, platform_card_id) do
StripeConnectCard
|> where([c], c.stripe_connect_account_id == ^connect_account_id)
|> where([c], c.stripe_platform_card_id == ^platform_card_id)
|> Repo.one
end
defp create_non_stripe_attributes(platform_card, connect_account) do
platform_card
|> Map.from_struct
|> Map.take([:id])
|> rename(:id, :stripe_platform_card_id)
|> Map.put(:stripe_connect_account_id, connect_account.id)
|> keys_to_string
end
defp update_on_stripe(%StripeConnectCard{} = record, attributes) do
params =
attributes
|> Map.put(:customer, record.stripe_platform_card.customer_id_from_stripe)
@api.Card.update(
record.id_from_stripe,
params,
connect_account: record.stripe_connect_account.id_from_stripe)
end
end
| 39.220779 | 197 | 0.738411 |
e808aaefa8e6637d866d7e50b4d4e9a99fd1bbcf | 8,870 | ex | Elixir | lib/mix/tasks/phx.gen.auth/injector.ex | udanieli/phoenix | 40b79c8d227eadb2af90f19b26c297a87f3b69aa | [
"MIT"
] | null | null | null | lib/mix/tasks/phx.gen.auth/injector.ex | udanieli/phoenix | 40b79c8d227eadb2af90f19b26c297a87f3b69aa | [
"MIT"
] | null | null | null | lib/mix/tasks/phx.gen.auth/injector.ex | udanieli/phoenix | 40b79c8d227eadb2af90f19b26c297a87f3b69aa | [
"MIT"
] | 1 | 2021-06-22T08:06:06.000Z | 2021-06-22T08:06:06.000Z | defmodule Mix.Tasks.Phx.Gen.Auth.Injector do
@moduledoc false
alias Mix.Phoenix.{Context, Schema}
alias Mix.Tasks.Phx.Gen.Auth.HashingLibrary
@type schema :: %Schema{}
@type context :: %Context{schema: schema}
@doc """
Injects a dependency into the contents of mix.exs
"""
@spec mix_dependency_inject(String.t(), String.t()) :: {:ok, String.t()} | :already_injected | {:error, :unable_to_inject}
def mix_dependency_inject(mixfile, dependency) do
with :ok <- ensure_not_already_injected(mixfile, dependency),
{:ok, new_mixfile} <- do_mix_dependency_inject(mixfile, dependency) do
{:ok, new_mixfile}
end
end
@spec do_mix_dependency_inject(String.t(), String.t()) :: {:ok, String.t()} | {:error, :unable_to_inject}
defp do_mix_dependency_inject(mixfile, dependency) do
string_to_split_on = """
defp deps do
[
"""
case split_with_self(mixfile, string_to_split_on) do
{beginning, splitter, rest} ->
new_mixfile = IO.iodata_to_binary([beginning, splitter, " ", dependency, ?,, ?\n, rest])
{:ok, new_mixfile}
_ ->
{:error, :unable_to_inject}
end
end
@doc """
Injects configuration for test environment into `file`.
"""
@spec test_config_inject(String.t(), HashingLibrary.t()) :: {:ok, String.t()} | :already_injected | {:error, :unable_to_inject}
def test_config_inject(file, %HashingLibrary{} = hashing_library) when is_binary(file) do
code_to_inject =
hashing_library
|> test_config_code()
|> normalize_line_endings_to_file(file)
inject_unless_contains(
file,
code_to_inject,
# Matches the entire line and captures the line ending. In the
# replace string:
#
# * the entire matching line is inserted with \\0,
# * the actual code is injected with &2,
# * and the appropriate newlines are injected using \\2.
&Regex.replace(~r/(use Mix\.Config|import Config)(\r\n|\n|$)/, &1, "\\0\\2#{&2}\\2", global: false)
)
end
@doc """
Instructions to provide the user when `test_config_inject/2` fails.
"""
@spec test_config_help_text(String.t(), HashingLibrary.t()) :: String.t()
def test_config_help_text(file_path, %HashingLibrary{} = hashing_library) do
"""
Add the following to #{Path.relative_to_cwd(file_path)}:
#{hashing_library |> test_config_code() |> indent_spaces(4)}
"""
end
defp test_config_code(%HashingLibrary{test_config: test_config}) do
String.trim("""
# Only in tests, remove the complexity from the password hashing algorithm
#{test_config}
""")
end
@router_plug_anchor_line "plug :put_secure_browser_headers"
@doc """
Injects the fetch_current_<schema> plug into router's browser pipeline
"""
@spec router_plug_inject(String.t(), context) :: {:ok, String.t()} | :already_injected | {:error, :unable_to_inject}
def router_plug_inject(file, %Context{schema: schema}) when is_binary(file) do
inject_unless_contains(
file,
router_plug_code(schema),
# Matches the entire line containing `anchor_line` and captures
# the whitespace before the anchor. In the replace string
#
# * the entire matching line is inserted with \\0,
# * the captured indent is inserted using \\1,
# * the actual code is injected with &2,
# * and the appropriate newline is injected using \\2
&Regex.replace(~r/^(\s*)#{@router_plug_anchor_line}.*(\r\n|\n|$)/Um, &1, "\\0\\1#{&2}\\2", global: false)
)
end
@doc """
Instructions to provide the user when `inject_router_plug/2` fails.
"""
@spec router_plug_help_text(String.t(), context) :: String.t()
def router_plug_help_text(file_path, %Context{schema: schema}) do
"""
Add the #{router_plug_name(schema)} plug to the :browser pipeline in #{Path.relative_to_cwd(file_path)}:
pipeline :browser do
...
#{@router_plug_anchor_line}
#{router_plug_code(schema)}
end
"""
end
defp router_plug_code(%Schema{} = schema) do
"plug " <> router_plug_name(schema)
end
defp router_plug_name(%Schema{} = schema) do
":fetch_current_#{schema.singular}"
end
@doc """
Injects a menu in the application layout
"""
@spec app_layout_menu_inject(String.t(), schema) :: {:ok, String.t()} | :already_injected | {:error, :unable_to_inject}
def app_layout_menu_inject(file, %Schema{} = schema) when is_binary(file) do
with {:error, :unable_to_inject} <- app_layout_menu_inject_at_end_of_nav_tag(file, schema),
{:error, :unable_to_inject} <- app_layout_menu_inject_after_opening_body_tag(file, schema) do
{:error, :unable_to_inject}
end
end
@doc """
Instructions to provide the user when `app_layout_menu_inject/2` fails.
"""
@spec app_layout_menu_help_text(String.t(), schema) :: String.t()
def app_layout_menu_help_text(file_path, %Schema{} = schema) do
"""
Add a render call for #{inspect(app_layout_menu_template_name(schema))} to #{Path.relative_to_cwd(file_path)}:
<nav role="navigation">
#{app_layout_menu_code_to_inject(schema)}
</nav>
"""
end
@doc """
Menu code to inject into the application layout template.
"""
@spec app_layout_menu_code_to_inject(schema) :: String.t()
def app_layout_menu_code_to_inject(%Schema{} = schema) do
"<%= render \"#{app_layout_menu_template_name(schema)}\", assigns %>"
end
@doc """
Name of the template containing the menu
"""
@spec app_layout_menu_template_name(schema) :: String.t()
def app_layout_menu_template_name(%Schema{} = schema) do
"_#{schema.singular}_menu.html"
end
defp app_layout_menu_inject_at_end_of_nav_tag(file, schema) do
inject_unless_contains(
file,
app_layout_menu_code_to_inject(schema),
&Regex.replace(~r/(\s*)<\/nav>/m, &1, "\\1 #{&2}\\0", global: false)
)
end
defp app_layout_menu_inject_after_opening_body_tag(file, schema) do
anchor_line = "<body>"
inject_unless_contains(
file,
app_layout_menu_code_to_inject(schema),
# Matches the entire line containing `anchor_line` and captures
# the whitespace before the anchor. In the replace string, the
# entire matching line is inserted with \\0, then a newline then
# the indent that was captured using \\1. &2 is the code to
# inject.
&Regex.replace(~r/^(\s*)#{anchor_line}.*(\r\n|\n|$)/Um, &1, "\\0\\1 #{&2}\\2", global: false)
)
end
@doc """
Injects code unless the existing code already contains `code_to_inject`
"""
@spec inject_unless_contains(String.t(), String.t(), (String.t(), String.t() -> String.t())) ::
{:ok, String.t()} | :already_injected | {:error, :unable_to_inject}
def inject_unless_contains(code, code_to_inject, inject_fn) when is_binary(code) and is_binary(code_to_inject) and is_function(inject_fn, 2) do
with :ok <- ensure_not_already_injected(code, code_to_inject) do
new_code = inject_fn.(code, code_to_inject)
if code != new_code do
{:ok, new_code}
else
{:error, :unable_to_inject}
end
end
end
@doc """
Injects snippet before the final end in a file
"""
@spec inject_before_final_end(String.t(), String.t()) :: {:ok, String.t()} | :already_injected
def inject_before_final_end(code, code_to_inject) when is_binary(code) and is_binary(code_to_inject) do
if String.contains?(code, code_to_inject) do
:already_injected
else
new_code =
code
|> String.trim_trailing()
|> String.trim_trailing("end")
|> Kernel.<>(code_to_inject)
|> Kernel.<>("end\n")
{:ok, new_code}
end
end
@spec ensure_not_already_injected(String.t(), String.t()) :: :ok | :already_injected
defp ensure_not_already_injected(file, inject) do
if String.contains?(file, inject) do
:already_injected
else
:ok
end
end
@spec split_with_self(String.t(), String.t()) :: {String.t(), String.t(), String.t()} | :error
defp split_with_self(contents, text) do
case :binary.split(contents, text) do
[left, right] -> {left, text, right}
[_] -> :error
end
end
@spec normalize_line_endings_to_file(String.t(), String.t()) :: String.t()
defp normalize_line_endings_to_file(code, file) do
String.replace(code, "\n", get_line_ending(file))
end
@spec get_line_ending(String.t()) :: String.t()
defp get_line_ending(file) do
case Regex.run(~r/\r\n|\n|$/, file) do
[line_ending] -> line_ending
[] -> "\n"
end
end
defp indent_spaces(string, number_of_spaces) when is_binary(string) and is_integer(number_of_spaces) do
indent = String.duplicate(" ", number_of_spaces)
string
|> String.split("\n")
|> Enum.map(&(indent <> &1))
|> Enum.join("\n")
end
end
| 33.598485 | 145 | 0.663021 |
e808b69dd74bb76d338c33bb8fa9cd82bf51509a | 1,232 | ex | Elixir | lib/blanks.ex | flowerett/elixir-koans | 174f4610e846f59cc34b41a36b813f5d684fd510 | [
"MIT"
] | 44 | 2018-06-17T03:37:04.000Z | 2022-01-31T06:28:02.000Z | lib/blanks.ex | flowerett/elixir-koans | 174f4610e846f59cc34b41a36b813f5d684fd510 | [
"MIT"
] | 18 | 2018-04-01T10:25:20.000Z | 2021-06-24T09:38:06.000Z | lib/blanks.ex | flowerett/elixir-koans | 174f4610e846f59cc34b41a36b813f5d684fd510 | [
"MIT"
] | 18 | 2018-07-27T17:08:54.000Z | 2022-01-20T09:08:53.000Z | defmodule Blanks do
def replace(ast, replacements) do
replacements = List.wrap(replacements)
ast
|> Macro.prewalk(replacements, &pre/2)
|> elem(0)
end
defp pre({:assert_receive, _, args} = node, replacements) do
{args, replacements} = Macro.prewalk(args, replacements, &pre_pin/2)
{put_elem(node, 2, args), replacements}
end
defp pre({:___, _, _}, [first | remainder]), do: {first, remainder}
defp pre(node, acc), do: {node, acc}
defp pre_pin({:___, _, _}, [first | remainder]), do: {pin(first), remainder}
defp pre_pin(node, acc), do: {node, acc}
defp pin(var) when is_tuple(var) do
quote do
^unquote(var)
end
end
defp pin(var), do: var
def count(ast) do
ast
|> Macro.prewalk(0, &count/2)
|> elem(1)
end
defp count({:___, _, _} = node, acc), do: {node, acc+1}
defp count(node, acc), do: {node, acc}
def replace_line({:__block__, meta, lines}, replacement_fn) do
replaced_lines = Enum.map(lines, fn(line) ->
replace_line(line, replacement_fn)
end)
{:__block__, meta, replaced_lines}
end
def replace_line(line, replacement_fn) do
if count(line) > 0 do
replacement_fn.(line)
else
line
end
end
end
| 24.64 | 78 | 0.62987 |
e808dc7a17153d2e8e5379d7bf8048841aa1e454 | 1,309 | exs | Elixir | test/yggdrasil/rabbitmq/connection/generator_test.exs | gmtprime/yggdrasil_rabbitmq | 56345469888ad91f35b2445dab96b7c19217943f | [
"MIT"
] | 3 | 2020-02-21T11:11:10.000Z | 2020-03-12T12:32:39.000Z | test/yggdrasil/rabbitmq/connection/generator_test.exs | gmtprime/yggdrasil_rabbitmq | 56345469888ad91f35b2445dab96b7c19217943f | [
"MIT"
] | null | null | null | test/yggdrasil/rabbitmq/connection/generator_test.exs | gmtprime/yggdrasil_rabbitmq | 56345469888ad91f35b2445dab96b7c19217943f | [
"MIT"
] | null | null | null | defmodule Yggdrasil.RabbitMQ.Connection.GeneratorTest do
use ExUnit.Case
alias AMQP.Channel
alias Yggdrasil.RabbitMQ.Client
alias Yggdrasil.RabbitMQ.Connection.Generator
setup do
client = %Client{pid: self(), tag: :publisher, namespace: __MODULE__}
{:ok, [client: client]}
end
describe "with_channel/2" do
test "returns a new channel", %{client: client} do
assert {:ok, %Channel{pid: pid}} = Generator.with_channel(client)
assert is_pid(pid) and Process.alive?(pid)
end
test "returns the same channel for the same process", %{client: client} do
assert {:ok, channel} = Generator.with_channel(client)
assert {:ok, ^channel} = Generator.with_channel(client)
end
test "returns an error when no connection is available",
%{client: client} do
namespace = __MODULE__.Disconnected
config = [rabbitmq: [hostname: "disconnected"]]
Application.put_env(:yggdrasil, namespace, config)
client = %Client{client | namespace: namespace}
assert {:error, _} = Generator.with_channel(client)
end
end
describe "with_channel/3" do
test "returns a custom callback", %{client: client} do
callback = fn _channel -> :ok end
assert :ok = Generator.with_channel(client, callback)
end
end
end
| 29.75 | 78 | 0.682964 |
e808e76a9d45efc29d6e2fea2ef8160f34de1451 | 2,624 | ex | Elixir | apps/api_web/lib/date_helpers.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 62 | 2019-01-17T12:34:39.000Z | 2022-03-20T21:49:47.000Z | apps/api_web/lib/date_helpers.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 375 | 2019-02-13T15:30:50.000Z | 2022-03-30T18:50:41.000Z | apps/api_web/lib/date_helpers.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 14 | 2019-01-16T19:35:57.000Z | 2022-02-26T18:55:54.000Z | defmodule DateHelpers do
@moduledoc """
Helper functions for working with dates/times.
"""
# UNIX epoch in gregorian seconds (seconds since year 0)
@gregorian_offset :calendar.datetime_to_gregorian_seconds({{1970, 1, 1}, {0, 0, 0}})
@doc """
Adds a number of seconds to a Date.
The seconds are relative to 12 hours before noon (local time) on the given
date.
## Examples
iex> DateHelpers.add_seconds_to_date(~D[2017-11-03], 86401)
#DateTime<2017-11-04 00:00:01-04:00 EDT America/New_York>
# falling back
iex> DateHelpers.add_seconds_to_date(~D[2017-11-05], 3600)
#DateTime<2017-11-05 01:00:00-05:00 EST America/New_York>
iex> DateHelpers.add_seconds_to_date(~D[2017-11-05], 7200)
#DateTime<2017-11-05 02:00:00-05:00 EST America/New_York>
iex> DateHelpers.add_seconds_to_date(~D[2017-11-05], 43200)
#DateTime<2017-11-05 12:00:00-05:00 EST America/New_York>
# springing forward
iex> DateHelpers.add_seconds_to_date(~D[2017-03-12], 3600)
#DateTime<2017-03-12 00:00:00-05:00 EST America/New_York>
iex> DateHelpers.add_seconds_to_date(~D[2017-03-12], 7200)
#DateTime<2017-03-12 01:00:00-05:00 EST America/New_York>
iex> DateHelpers.add_seconds_to_date(~D[2017-03-12], 43200)
#DateTime<2017-03-12 12:00:00-04:00 EDT America/New_York>
# if you're doing a lot of these, you can pre-convert to seconds
iex> seconds = DateHelpers.unix_midnight_seconds(~D[2018-04-06])
iex> DateHelpers.add_seconds_to_date(seconds, 61)
#DateTime<2018-04-06 00:01:01-04:00 EDT America/New_York>
"""
@spec add_seconds_to_date(Date.t() | non_neg_integer, non_neg_integer) :: DateTime.t()
def add_seconds_to_date(date, seconds)
def add_seconds_to_date(unix_seconds, seconds)
when is_integer(unix_seconds) and is_integer(seconds) do
Parse.Timezone.unix_to_local(unix_seconds + seconds)
end
def add_seconds_to_date(%Date{} = date, seconds) do
date
|> unix_midnight_seconds
|> add_seconds_to_date(seconds)
end
@doc """
Returns a UNIX timestamp for 12 hours before noon on the given day.
"""
def unix_midnight_seconds(%Date{year: year, month: month, day: day}) do
gregorian_noon = :calendar.datetime_to_gregorian_seconds({{year, month, day}, {12, 0, 0}})
utc_noon = gregorian_noon - @gregorian_offset
local_utc_noon = Parse.Timezone.unix_to_local(utc_noon)
local_noon = %{local_utc_noon | hour: 12, minute: 0, second: 0}
local_noon_unix = DateTime.to_unix(local_noon)
twelve_hours = 12 * 3600
local_noon_unix - twelve_hours
end
end
| 36.957746 | 94 | 0.705412 |
e80914223777237d4a39bca8bf53a500b86aca5b | 2,105 | ex | Elixir | lib/boots_of_speed/server.ex | Baradoy/boots_of_speed | 6004da8fb43e15cb7443e0bd00fe70c936a41015 | [
"MIT"
] | 1 | 2019-04-18T05:10:21.000Z | 2019-04-18T05:10:21.000Z | lib/boots_of_speed/server.ex | Baradoy/boots_of_speed | 6004da8fb43e15cb7443e0bd00fe70c936a41015 | [
"MIT"
] | 3 | 2020-07-16T17:41:44.000Z | 2021-05-08T03:39:49.000Z | lib/boots_of_speed/server.ex | Baradoy/boots_of_speed | 6004da8fb43e15cb7443e0bd00fe70c936a41015 | [
"MIT"
] | null | null | null | defmodule BootsOfSpeed.Server do
@moduledoc """
Handles creating and retrieving game agents
"""
use GenServer
defmodule State do
@moduledoc """
Server state
"""
defstruct supervisor: nil, games: nil, game_state_supervisor: nil
end
def child_spec(arg) do
%{
id: __MODULE__,
name: __MODULE__,
start: {__MODULE__, :start_link, [arg]}
}
end
# API
def start_link([supervisor]) do
GenServer.start_link(__MODULE__, supervisor, name: __MODULE__)
end
def fetch_game_state_server(game_name, pid \\ __MODULE__) do
GenServer.call(pid, {:fetch_game_state_server, game_name})
end
def start_game_state_server(game_name, pid \\ __MODULE__) do
GenServer.call(pid, {:start_game_state_server, game_name})
end
# Callbacks
def init(supervisor) when is_pid(supervisor) do
Process.flag(:trap_exit, true)
games = :ets.new(:games, [:private])
{:ok, %State{supervisor: supervisor, games: games}}
end
def handle_call({:fetch_game_state_server, game_name}, _, %{games: games} = state) do
case :ets.lookup(games, game_name) do
[{_game_name, game_state_agent}] ->
{:reply, {:ok, game_state_agent}, state}
_ ->
{:reply, {:error, "Game does not exist"}, state}
end
end
def handle_call({:start_game_state_server, game_name}, _, %State{} = state) do
%State{games: games} = state
game_state_agent = start_new_game(game_name, games)
{:reply, {:ok, game_state_agent}, state}
end
def handle_info({:DOWN, _ref, _, _, _}, %State{} = state) do
{:noreply, state}
end
def handle_info({:EXIT, pid, _reason}, %State{games: games} = state) do
case :ets.match(games, {:"$1", pid}) do
[[game_name]] ->
true = :ets.delete(games, game_name)
{:noreply, state}
[] ->
{:noreply, state}
end
end
## Private
defp start_new_game(game_name, games) do
{:ok, game_state_agent} = BootsOfSpeed.GameStateAgent.start_link(game_name)
true = :ets.insert(games, {game_name, game_state_agent})
game_state_agent
end
end
| 24.764706 | 87 | 0.653682 |
e80925c53f444f01d3e0b8d626c7b988e31b6bfa | 7,409 | ex | Elixir | test/support/v1/services/test_two_pool/test_two_worker_entity.ex | noizu/simple_pool | 59251a3391ff82152a31626072955b95f83c18ee | [
"MIT"
] | null | null | null | test/support/v1/services/test_two_pool/test_two_worker_entity.ex | noizu/simple_pool | 59251a3391ff82152a31626072955b95f83c18ee | [
"MIT"
] | null | null | null | test/support/v1/services/test_two_pool/test_two_worker_entity.ex | noizu/simple_pool | 59251a3391ff82152a31626072955b95f83c18ee | [
"MIT"
] | null | null | null | #-------------------------------------------------------------------------------
# Author: Keith Brings
# Copyright (C) 2018 Noizu Labs, Inc. All rights reserved.
#-------------------------------------------------------------------------------
defmodule Noizu.SimplePool.Support.TestTwoWorkerEntity do
@vsn 1.0
#-----------------------------------------------------------------------------
# aliases, imports, uses,
#-----------------------------------------------------------------------------
require Logger
#-----------------------------------------------------------------------------
# Struct & Types
#-----------------------------------------------------------------------------
@type t :: %__MODULE__{
identifier: Types.entity_reference,
data: Map.t,
vsn: Types.vsn
}
defstruct [
identifier: nil,
data: %{},
vsn: @vsn
]
use Noizu.SimplePool.InnerStateBehaviour,
pool: Noizu.SimplePool.Support.TestTwoPool,
override: [:load, :supervisor_hint]
def supervisor_hint(ref) do
"test_" <> ts = id(ref)
String.to_integer(ts)
end
#-----------------------------------------------------------------------------
# Behaviour
#-----------------------------------------------------------------------------
def load(ref), do: load(ref, nil, nil)
def load(ref, context), do: load(ref, nil, context)
def load(ref, _options, _context) do
%__MODULE__{
identifier: id(ref)
}
end
#-----------------------------------------------------------------------------
# Implementation
#-----------------------------------------------------------------------------
def test_s_call!(this, value, _context) do
state = put_in(this, [Access.key(:data), :s_call!], value)
{:reply, :s_call!, state}
end
def test_s_call(this, value, _context), do: {:reply, :s_call, put_in(this, [Access.key(:data), :s_call], value)}
def test_s_cast!(this, value, _context), do: {:noreply, put_in(this, [Access.key(:data), :s_cast!], value)}
def test_s_cast(this, value, _context), do: {:noreply, put_in(this, [Access.key(:data), :s_cast], value)}
#-----------------------------------------------------------------------------
# call_forwarding
#-----------------------------------------------------------------------------
def call_forwarding({:test_s_call!, value}, context, _from, %__MODULE__{} = this), do: test_s_call!(this, value, context)
def call_forwarding({:test_s_call, value}, context, _from, %__MODULE__{} = this), do: test_s_call(this, value, context)
def call_forwarding({:test_s_cast!, value}, context, %__MODULE__{} = this), do: test_s_cast!(this, value, context)
def call_forwarding({:test_s_cast, value}, context, %__MODULE__{} = this), do: test_s_cast(this, value, context)
#-------------------
# id/1
#-------------------
def id({:ref, __MODULE__, identifier}), do: identifier
def id("ref.noizu-test-2." <> identifier), do: identifier
def id(%__MODULE__{} = entity), do: entity.identifier
#-------------------
# ref/1
#-------------------
def ref({:ref, __MODULE__, identifier}), do: {:ref, __MODULE__, identifier}
def ref("ref.noizu-test-2." <> identifier), do: {:ref, __MODULE__, identifier}
def ref(%__MODULE__{} = entity), do: {:ref, __MODULE__, entity.identifier}
#-------------------
# sref/1
#-------------------
def sref({:ref, __MODULE__, identifier}), do: "ref.noizu-test-2.#{identifier}"
def sref("ref.noizu-test-2." <> identifier), do: "ref.noizu-test-2.#{identifier}"
def sref(%__MODULE__{} = entity), do: "ref.noizu-test-2.#{entity.identifier}"
#-------------------
# entity/2
#-------------------
def entity(ref, options \\ %{})
def entity({:ref, __MODULE__, identifier}, _options), do: %__MODULE__{identifier: identifier}
def entity("ref.noizu-test-2." <> identifier, _options), do: %__MODULE__{identifier: identifier}
def entity(%__MODULE__{} = entity, _options), do: entity
#-------------------
# entity!/2
#-------------------
def entity!(ref, options \\ %{})
def entity!({:ref, __MODULE__, identifier}, _options), do: %__MODULE__{identifier: identifier}
def entity!("ref.noizu-test-2." <> identifier, _options), do: %__MODULE__{identifier: identifier}
def entity!(%__MODULE__{} = entity, _options), do: entity
#-------------------
# record/2
#-------------------
def record(ref, options \\ %{})
def record({:ref, __MODULE__, identifier}, _options), do: %__MODULE__{identifier: identifier}
def record("ref.noizu-test-2." <> identifier, _options), do: %__MODULE__{identifier: identifier}
def record(%__MODULE__{} = entity, _options), do: entity
#-------------------
# record!/2
#-------------------
def record!(ref, options \\ %{})
def record!({:ref, __MODULE__, identifier}, _options), do: %__MODULE__{identifier: identifier}
def record!("ref.noizu-test-2." <> identifier, _options), do: %__MODULE__{identifier: identifier}
def record!(%__MODULE__{} = entity, _options), do: entity
def id_ok(o) do
r = ref(o)
r && {:ok, r} || {:error, o}
end
def ref_ok(o) do
r = ref(o)
r && {:ok, r} || {:error, o}
end
def sref_ok(o) do
r = sref(o)
r && {:ok, r} || {:error, o}
end
def entity_ok(o, options \\ %{}) do
r = entity(o, options)
r && {:ok, r} || {:error, o}
end
def entity_ok!(o, options \\ %{}) do
r = entity!(o, options)
r && {:ok, r} || {:error, o}
end
defimpl Noizu.ERP, for: Noizu.SimplePool.Support.TestTwoWorkerEntity do
def id(obj) do
obj.identifier
end # end sref/1
def ref(obj) do
{:ref, Noizu.SimplePool.Support.TestTwoWorkerEntity, obj.identifier}
end # end ref/1
def sref(obj) do
"ref.noizu-test-2.#{obj.identifier}"
end # end sref/1
def record(obj, _options \\ nil) do
obj
end # end record/2
def record!(obj, _options \\ nil) do
obj
end # end record/2
def entity(obj, _options \\ nil) do
obj
end # end entity/2
def entity!(obj, _options \\ nil) do
obj
end # end defimpl EntityReferenceProtocol, for: Tuple
def id_ok(o) do
r = ref(o)
r && {:ok, r} || {:error, o}
end
def ref_ok(o) do
r = ref(o)
r && {:ok, r} || {:error, o}
end
def sref_ok(o) do
r = sref(o)
r && {:ok, r} || {:error, o}
end
def entity_ok(o, options \\ %{}) do
r = entity(o, options)
r && {:ok, r} || {:error, o}
end
def entity_ok!(o, options \\ %{}) do
r = entity!(o, options)
r && {:ok, r} || {:error, o}
end
end
#-----------------------------------------------------------------------------
# Inspect Protocol
#-----------------------------------------------------------------------------
defimpl Inspect, for: Noizu.SimplePool.Support.TestTwoWorkerEntity do
import Inspect.Algebra
def inspect(entity, opts) do
heading = "#TestTwoWorkerEntity(#{inspect entity.identifier})"
{seperator, end_seperator} = if opts.pretty, do: {"\n ", "\n"}, else: {" ", " "}
inner = cond do
opts.limit == :infinity ->
concat(["<#{seperator}", to_doc(Map.from_struct(entity), opts), "#{end_seperator}>"])
true -> "<>"
end
concat [heading, inner]
end # end inspect/2
end # end defimpl
end # end defmacro
| 33.224215 | 123 | 0.498448 |
e809313f50a5038fec73a2aee062ac931808821c | 245 | ex | Elixir | lib/leather_web/views/plaid_item_view.ex | nicksergeant/leather | 15b1c9403999737f7a6ee9a1c0349e047805bbe6 | [
"MIT"
] | 67 | 2016-10-24T04:11:40.000Z | 2021-11-25T16:46:51.000Z | lib/leather_web/views/plaid_item_view.ex | nicksergeant/leather | 15b1c9403999737f7a6ee9a1c0349e047805bbe6 | [
"MIT"
] | 6 | 2017-08-17T21:43:50.000Z | 2021-11-03T13:13:49.000Z | lib/leather_web/views/plaid_item_view.ex | nicksergeant/leather | 15b1c9403999737f7a6ee9a1c0349e047805bbe6 | [
"MIT"
] | 7 | 2017-08-13T01:43:37.000Z | 2022-01-11T04:38:27.000Z | defmodule LeatherWeb.PlaidItemView do
@moduledoc false
use LeatherWeb, :view
def render("plaid_item.json", %{plaid_item: plaid_item}) do
%{
id: plaid_item.id,
institution_name: plaid_item.institution_name
}
end
end
| 18.846154 | 61 | 0.702041 |
e8093cf69b95a5fbdb77eb84c5c2ee31ec4f1579 | 541 | ex | Elixir | lib/covid19_api_web/views/error_view.ex | HenningLanghorst/jhu_csse_covid19_api | 46861f31ce7154f74c377d37a31bd4283524894f | [
"MIT"
] | null | null | null | lib/covid19_api_web/views/error_view.ex | HenningLanghorst/jhu_csse_covid19_api | 46861f31ce7154f74c377d37a31bd4283524894f | [
"MIT"
] | null | null | null | lib/covid19_api_web/views/error_view.ex | HenningLanghorst/jhu_csse_covid19_api | 46861f31ce7154f74c377d37a31bd4283524894f | [
"MIT"
] | null | null | null | defmodule Covid19ApiWeb.ErrorView do
use Covid19ApiWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.json", _assigns) do
# %{errors: %{detail: "Internal Server Error"}}
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.json" becomes
# "Not Found".
def template_not_found(template, _assigns) do
%{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}}
end
end
| 31.823529 | 83 | 0.722736 |
e80944a7bb57ce33a2c28d7f67ba3e22202c0a17 | 2,944 | ex | Elixir | lib/database/patch.ex | m4ta1l/bors-ng | a4d04fd740e24d03833cd46a76f0f9e5be96f818 | [
"Apache-2.0"
] | 1 | 2020-09-05T11:41:59.000Z | 2020-09-05T11:41:59.000Z | lib/database/patch.ex | m4ta1l/bors-ng | a4d04fd740e24d03833cd46a76f0f9e5be96f818 | [
"Apache-2.0"
] | 15 | 2020-12-11T03:57:12.000Z | 2022-03-27T16:46:53.000Z | lib/database/patch.ex | m4ta1l/bors-ng | a4d04fd740e24d03833cd46a76f0f9e5be96f818 | [
"Apache-2.0"
] | 1 | 2020-04-01T13:34:19.000Z | 2020-04-01T13:34:19.000Z | defmodule BorsNG.Database.Patch do
@moduledoc """
Corresponds to a pull request in GitHub.
A closed patch may not be r+'ed,
nor can a patch associated with a completed batch be r+'ed again,
though a patch may be merged and r+'ed at the same time.
"""
use BorsNG.Database.Model
@type t :: %__MODULE__{}
@type id :: pos_integer
schema "patches" do
belongs_to(:project, Project)
field(:into_branch, :string)
field(:pr_xref, :integer)
field(:title, :string)
field(:body, :string)
field(:commit, :string)
field(:open, :boolean, default: true)
field(:priority, :integer, default: 0)
field(:is_single, :boolean, default: false)
belongs_to(:author, User)
timestamps()
end
@spec changeset(t | Ecto.Changeset.t(), map) :: Ecto.Changeset.t()
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [
:pr_xref,
:title,
:body,
:commit,
:project_id,
:author_id,
:open,
:into_branch,
:priority,
:is_single
])
|> unique_constraint(:pr_xref, name: :patches_pr_xref_index)
end
@spec all_for_batch(Batch.id()) :: Ecto.Queryable.t()
def all_for_batch(batch_id) do
from(p in Patch,
join: l in LinkPatchBatch,
on: l.patch_id == p.id,
where: l.batch_id == ^batch_id,
order_by: [desc: p.pr_xref]
)
end
@spec all_links_not_err() :: Ecto.Queryable.t()
defp all_links_not_err do
from(l in LinkPatchBatch,
join: b in Batch,
on: l.batch_id == b.id and b.state != ^:error and b.state != ^:canceled
)
end
@spec all(:awaiting_review) :: Ecto.Queryable.t()
def all(:awaiting_review) do
all = all_links_not_err()
from(p in Patch,
left_join: l in subquery(all),
on: l.patch_id == p.id,
where: is_nil(l.batch_id),
where: p.open,
order_by: [desc: p.pr_xref]
)
end
@spec all_for_project(Project.id(), :open | :awaiting_review) :: Ecto.Queryable.t()
def all_for_project(project_id, :open) do
from(p in Patch,
where: p.open,
where: p.project_id == ^project_id,
order_by: [desc: p.pr_xref]
)
end
def all_for_project(project_id, :awaiting_review) do
from(p in Patch.all(:awaiting_review),
where: p.project_id == ^project_id,
order_by: [desc: p.pr_xref]
)
end
@spec dups_in_batches() :: Ecto.Queryable.t()
def dups_in_batches do
all = all_links_not_err()
from(p in Patch,
left_join: l in subquery(all),
on: l.patch_id == p.id,
where: p.open,
group_by: p.id,
having: count(p.id) > 1
)
end
@spec ci_skip?(t) :: boolean()
def ci_skip?(patch) do
rexp = ~r/\[ci skip\]\[skip ci\]\[skip netlify\]/
title = patch.title || ""
body = patch.body || ""
String.match?(title, rexp) or String.match?(body, rexp)
end
end
| 24.949153 | 85 | 0.615829 |
e809566144f69eda25fb57027f042d4dd4cf67c0 | 867 | exs | Elixir | test/ecto/uuid_test.exs | timgestson/ecto | 1c1eb6d322db04cfa48a4fc81da1332e91adbc1f | [
"Apache-2.0"
] | null | null | null | test/ecto/uuid_test.exs | timgestson/ecto | 1c1eb6d322db04cfa48a4fc81da1332e91adbc1f | [
"Apache-2.0"
] | null | null | null | test/ecto/uuid_test.exs | timgestson/ecto | 1c1eb6d322db04cfa48a4fc81da1332e91adbc1f | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.UUIDTest do
use ExUnit.Case, async: true
@test_uuid "601D74E4-A8D3-4B6E-8365-EDDB4C893327"
@test_uuid_binary << 0x60, 0x1D, 0x74, 0xE4, 0xA8, 0xD3, 0x4B, 0x6E, 0x83, 0x65, 0xED, 0xDB, 0x4C, 0x89, 0x33, 0x27 >>
test "cast" do
assert Ecto.UUID.cast(@test_uuid) == {:ok, @test_uuid}
assert Ecto.UUID.cast(@test_uuid_binary) == {:ok, @test_uuid}
assert Ecto.UUID.cast(nil) == :error
end
test "load" do
assert Ecto.UUID.load(@test_uuid_binary) == {:ok, @test_uuid}
assert Ecto.UUID.load(@test_uuid) == :error
end
test "dump" do
assert Ecto.UUID.dump(@test_uuid) == {:ok, %Ecto.Query.Tagged{value: @test_uuid_binary, type: :uuid}}
assert Ecto.UUID.dump(@test_uuid_binary) == :error
end
test "generate" do
assert << _::64, ?-, _::32, ?-, _::32, ?-, _::32, ?-, _::96 >> = Ecto.UUID.generate
end
end
| 32.111111 | 120 | 0.645905 |
e8096ae95799f197ecb9998e03de692267ff7f59 | 1,930 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/feature.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/feature.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/vision/lib/google_api/vision/v1/model/feature.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Vision.V1.Model.Feature do
@moduledoc """
The type of Google Cloud Vision API detection to perform, and the maximum number of results to return for that type. Multiple `Feature` objects can be specified in the `features` list.
## Attributes
* `maxResults` (*type:* `integer()`, *default:* `nil`) - Maximum number of results of this type. Does not apply to `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
* `model` (*type:* `String.t`, *default:* `nil`) - Model to use for the feature. Supported values: "builtin/stable" (the default if unset) and "builtin/latest".
* `type` (*type:* `String.t`, *default:* `nil`) - The feature type.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:maxResults => integer(),
:model => String.t(),
:type => String.t()
}
field(:maxResults)
field(:model)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.Feature do
def decode(value, options) do
GoogleApi.Vision.V1.Model.Feature.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.Feature do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.415094 | 186 | 0.709845 |
e80977004162623edf66435c894acf3a42b69bce | 44,132 | ex | Elixir | lib/mongo.ex | Virtual-Repetitions/mongodb | eea06db138ae3b4824c7f208fda621075fbf28a7 | [
"Apache-2.0"
] | null | null | null | lib/mongo.ex | Virtual-Repetitions/mongodb | eea06db138ae3b4824c7f208fda621075fbf28a7 | [
"Apache-2.0"
] | 2 | 2021-07-10T18:53:51.000Z | 2021-08-06T15:36:58.000Z | lib/mongo.ex | Virtual-Repetitions/mongodb | eea06db138ae3b4824c7f208fda621075fbf28a7 | [
"Apache-2.0"
] | 3 | 2021-07-10T18:01:00.000Z | 2021-08-05T00:39:17.000Z | defmodule Mongo do
@moduledoc """
The main entry point for doing queries. All functions take a topology to
run the query on.
## Generic options
All operations take these options.
* `:timeout` - The maximum time that the caller is allowed the to hold the
connection’s state (ignored when using a run/transaction connection,
default: `5_000`)
* `:pool` - The pooling behaviour module to use, this option is required
unless the default `DBConnection.Connection` pool is used
* `:pool_timeout` - The maximum time to wait for a reply when making a
synchronous call to the pool (default: `5_000`)
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`)
* `:log` - A function to log information about a call, either
a 1-arity fun, `{module, function, args}` with `DBConnection.LogEntry.t`
prepended to `args` or `nil`. See `DBConnection.LogEntry` (default: `nil`)
* `:database` - the database to run the operation on
* `:connect_timeout_ms` - maximum timeout for connect (default: `5_000`)
## Read options
All read operations that returns a cursor take the following options
for controlling the behaviour of the cursor.
* `:batch_size` - Number of documents to fetch in each batch
* `:limit` - Maximum number of documents to fetch with the cursor
* `:read_preference` - specifies the rules for selecting a server to query
## Write options
All write operations take the following options for controlling the
write concern.
* `:w` - The number of servers to replicate to before returning from write
operators, a 0 value will return immediately, :majority will wait until
the operation propagates to a majority of members in the replica set
(Default: 1)
* `:j` If true, the write operation will only return after it has been
committed to journal - (Default: false)
* `:wtimeout` - If the write concern is not satisfied in the specified
interval, the operation returns an error
"""
use Bitwise
use Mongo.Messages
alias Mongo.Query
alias Mongo.ReadPreference
alias Mongo.TopologyDescription
alias Mongo.Topology
alias Mongo.UrlParser
@timeout 5000
@dialyzer nowarn_function: [count_documents!: 4]
@type conn :: DbConnection.Conn
@type collection :: String.t()
@opaque cursor :: Mongo.Cursor.t() | Mongo.AggregationCursor.t()
@type result(t) :: :ok | {:ok, t} | {:error, Mongo.Error.t()}
@type write_result(t) ::
:ok | {:ok, t} | {:error, Mongo.Error.t()} | {:error, Mongo.WriteError.t()}
@type result!(t) :: nil | t | no_return
defmacrop bangify(result) do
quote do
case unquote(result) do
{:ok, value} -> value
{:error, error} -> raise error
:ok -> nil
end
end
end
@type initial_type :: :unknown | :single | :replica_set_no_primary | :sharded
@doc """
Start and link to a database connection process.
### Options
* `:database` - The database to use (required)
* `:hostname` - The host to connect to (require)
* `:port` - The port to connect to your server (default: 27017)
* `:url` - A mongo connection url. Can be used in place of `:hostname` and
* `:socket_dir` - Connect to MongoDB via UNIX sockets in the given directory.
The socket name is derived based on the port. This is the preferred method
for configuring sockets and it takes precedence over the hostname. If you
are connecting to a socket outside of the MongoDB convection, use
`:socket` instead.
* `:socket` - Connect to MongoDB via UNIX sockets in the given path.
This option takes precedence over `:hostname` and `:socket_dir`.
* `:database` (optional)
* `:seeds` - A list of host names in the cluster. Can be used in place of
`:hostname` (optional)
* `:username` - The User to connect with (optional)
* `:password` - The password to connect with (optional)
* `:auth` - List of additional users to authenticate as a keyword list with
`:username` and `:password` keys (optional)
* `:auth_source` - The database to authenticate against
* `:set_name` - The name of the replica set to connect to (required if
connecting to a replica set)
* `:type` - a hint of the topology type. See `t:initial_type/0` for
valid values (default: `:unknown`)
* `:pool` - The pool module to use, see `DBConnection` for pool dependent
options, this option must be included with all requests contacting the
pool if not `DBConnection.Connection` (default: `DBConnection.Connection`)
* `:idle` - The idle strategy, `:passive` to avoid checkin when idle and
* `:active` to checking when idle (default: `:passive`)
* `:idle_timeout` - The idle timeout to ping the database (default: `1_000`)
* `:connect_timeout_ms` - The maximum timeout for the initial connection
(default: `5_000`)
* `:backoff_min` - The minimum backoff interval (default: `1_000`)
* `:backoff_max` - The maximum backoff interval (default: `30_000`)
* `:backoff_type` - The backoff strategy, `:stop` for no backoff and to
stop, `:exp` of exponential, `:rand` for random and `:ran_exp` for random
exponential (default: `:rand_exp`)
* `:after_connect` - A function to run on connect use `run/3`. Either a
1-arity fun, `{module, function, args}` with `DBConnection.t`, prepended
to `args` or `nil` (default: `nil`)
* `:auth_mechanism` - options for the mongo authentication mechanism,
currently only supports `:x509` atom as a value
* `:ssl` - Set to `true` if ssl should be used (default: `false`)
* `:ssl_opts` - A list of ssl options, see the ssl docs
### Error Reasons
* `:single_topology_multiple_hosts` - A topology of `:single` was set
but multiple hosts were given
* `:set_name_bad_topology` - A `:set_name` was given but the topology was
set to something other than `:replica_set_no_primary` or `:single`
"""
@spec start_link(Keyword.t()) :: {:ok, pid} | {:error, Mongo.Error.t() | atom}
def start_link(opts) do
opts
|> UrlParser.parse_url()
|> Mongo.ConfigHide.mask_password()
|> Topology.start_link()
end
def child_spec(opts, child_opts \\ []) do
child_opts
|> Map.new()
|> Map.merge(%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]}
})
end
@doc """
Generates a new `BSON.ObjectId`.
"""
@spec object_id :: BSON.ObjectId.t()
def object_id do
Mongo.IdServer.new()
end
@doc """
Performs aggregation operation using the aggregation pipeline.
## Options
* `:allow_disk_use` - Enables writing to temporary files (Default: false)
* `:collation` - Optionally specifies a collation to use in MongoDB 3.4 and
* `:max_time` - Specifies a time limit in milliseconds
* `:use_cursor` - Use a cursor for a batched response (Default: true)
"""
@spec aggregate(GenServer.server(), collection, [BSON.document()], Keyword.t()) :: cursor
def aggregate(topology_pid, coll, pipeline, opts \\ []) do
query =
[
aggregate: coll,
pipeline: pipeline,
allowDiskUse: opts[:allow_disk_use],
collation: opts[:collation],
maxTimeMS: opts[:max_time],
hint: opts[:hint]
]
|> filter_nils()
wv_query = %Query{action: :wire_version}
with {:ok, query} <- Mongo.Session.add_session(query, opts[:session]),
{:ok, conn, _, _} <- select_server(topology_pid, :read, opts),
{:ok, _query, version} <- DBConnection.execute(conn, wv_query, [], defaults(opts)) do
cursor? = version >= 1 and Keyword.get(opts, :use_cursor, true)
opts = Keyword.drop(opts, ~w(allow_disk_use max_time use_cursor)a)
if cursor? do
query = query ++ [cursor: filter_nils(%{batchSize: opts[:batch_size]})]
aggregation_cursor(conn, "$cmd", query, nil, opts)
else
query = query ++ [cursor: %{}]
aggregation_cursor(conn, "$cmd", query, nil, opts)
end
end
end
@doc """
Finds a document and updates it (using atomic modifiers).
## Options
* `:bypass_document_validation` - Allows the write to opt-out of document
level validation
* `:max_time` - The maximum amount of time to allow the query to run (in MS)
* `:projection` - Limits the fields to return for all matching documents.
* `:return_document` - Returns the replaced or inserted document rather than
the original. Values are :before or :after. (default is :before)
* `:sort` - Determines which document the operation modifies if the query
selects multiple documents.
* `:upsert` - Create a document if no document matches the query or updates
the document.
* `:collation` - Optionally specifies a collation to use in MongoDB 3.4 and
"""
@spec find_one_and_update(
GenServer.server(),
collection,
BSON.document(),
BSON.document(),
Keyword.t()
) :: result(BSON.document()) | {:ok, nil}
def find_one_and_update(topology_pid, coll, filter, update, opts \\ []) do
_ = modifier_docs(update, :update)
query =
[
findAndModify: coll,
query: filter,
update: update,
bypassDocumentValidation: opts[:bypass_document_validation],
maxTimeMS: opts[:max_time],
fields: opts[:projection],
new: should_return_new(opts[:return_document]),
sort: opts[:sort],
upsert: opts[:upsert],
collation: opts[:collation]
]
|> filter_nils()
opts =
Keyword.drop(
opts,
~w(bypass_document_validation max_time projection return_document sort upsert collation)a
)
with {:ok, query} <- Mongo.Session.add_session(query, opts[:session]),
{:ok, conn, _, _} <- select_server(topology_pid, :write, opts),
{:ok, doc} <- direct_command(conn, query, opts),
do: {:ok, doc["value"]}
end
@doc """
Finds a document and replaces it.
## Options
* `:bypass_document_validation` - Allows the write to opt-out of document
level validation
* `:max_time` - The maximum amount of time to allow the query to run (in MS)
* `:projection` - Limits the fields to return for all matching documents.
* `:return_document` - Returns the replaced or inserted document rather than
the original. Values are :before or :after. (default is :before)
* `:sort` - Determines which document the operation modifies if the query
selects multiple documents.
* `:upsert` - Create a document if no document matches the query or updates
the document.
* `:collation` - Optionally specifies a collation to use in MongoDB 3.4 and
higher.
"""
@spec find_one_and_replace(
GenServer.server(),
collection,
BSON.document(),
BSON.document(),
Keyword.t()
) :: result(BSON.document())
def find_one_and_replace(topology_pid, coll, filter, replacement, opts \\ []) do
_ = modifier_docs(replacement, :replace)
query =
filter_nils(
findAndModify: coll,
query: filter,
update: replacement,
bypassDocumentValidation: opts[:bypass_document_validation],
maxTimeMS: opts[:max_time],
fields: opts[:projection],
new: should_return_new(opts[:return_document]),
sort: opts[:sort],
upsert: opts[:upsert],
collation: opts[:collation]
)
opts =
Keyword.drop(
opts,
~w(bypass_document_validation max_time projection return_document sort upsert collation)a
)
with {:ok, conn, _, _} <- select_server(topology_pid, :write, opts),
{:ok, doc} <- direct_command(conn, query, opts),
do: {:ok, doc["value"]}
end
defp should_return_new(:after), do: true
defp should_return_new(:before), do: false
defp should_return_new(_), do: false
@doc """
Finds a document and deletes it.
## Options
* `:max_time` - The maximum amount of time to allow the query to run (in MS)
* `:projection` - Limits the fields to return for all matching documents.
* `:sort` - Determines which document the operation modifies if the query selects multiple documents.
* `:collation` - Optionally specifies a collation to use in MongoDB 3.4 and higher.
"""
@spec find_one_and_delete(GenServer.server(), collection, BSON.document(), Keyword.t()) ::
result(BSON.document())
def find_one_and_delete(topology_pid, coll, filter, opts \\ []) do
query =
filter_nils(
findAndModify: coll,
query: filter,
remove: true,
maxTimeMS: opts[:max_time],
fields: opts[:projection],
sort: opts[:sort],
collation: opts[:collation]
)
opts = Keyword.drop(opts, ~w(max_time projection sort collation)a)
with {:ok, conn, _, _} <- select_server(topology_pid, :write, opts),
{:ok, doc} <- direct_command(conn, query, opts),
do: {:ok, doc["value"]}
end
@doc false
@spec count(GenServer.server(), collection, BSON.document(), Keyword.t()) ::
result(non_neg_integer)
def count(topology_pid, coll, filter, opts \\ []) do
query =
filter_nils(
count: coll,
query: filter,
limit: opts[:limit],
skip: opts[:skip],
hint: opts[:hint],
collation: opts[:collation]
)
opts = Keyword.drop(opts, ~w(limit skip hint collation)a)
# Mongo 2.4 and 2.6 returns a float
with {:ok, doc} <- command(topology_pid, query, opts),
do: {:ok, trunc(doc["n"])}
end
@doc false
@spec count!(GenServer.server(), collection, BSON.document(), Keyword.t()) ::
result!(non_neg_integer)
def count!(topology_pid, coll, filter, opts \\ []) do
bangify(count(topology_pid, coll, filter, opts))
end
@doc """
Returns the count of documents that would match a find/4 query.
## Options
* `:limit` - Maximum number of documents to fetch with the cursor
* `:skip` - Number of documents to skip before returning the first
"""
@spec count_documents(GenServer.server(), collection, BSON.document(), Keyword.t()) ::
result(non_neg_integer)
def count_documents(topology_pid, coll, filter, opts \\ []) do
pipeline =
[
{"$match", Map.new(filter)},
{"$skip", opts[:skip]},
{"$limit", opts[:limit]},
{"$group", %{"_id" => nil, "n" => %{"$sum" => 1}}}
]
|> filter_nils
|> Enum.map(&List.wrap/1)
documents =
topology_pid
|> Mongo.aggregate(coll, pipeline, opts)
|> Enum.to_list()
case documents do
[%{"n" => count}] -> {:ok, count}
[] -> {:ok, 0}
end
end
@doc """
Similar to `count_documents/4` but unwraps the result and raises on error.
"""
@spec count_documents!(GenServer.server(), collection, BSON.document(), Keyword.t()) ::
result!(non_neg_integer)
def count_documents!(topology_pid, coll, filter, opts \\ []) do
bangify(count_documents(topology_pid, coll, filter, opts))
end
@doc """
Estimate the number of documents in a collection using collection metadata.
"""
@spec estimated_document_count(GenServer.server(), collection, Keyword.t()) ::
result(non_neg_integer)
def estimated_document_count(topology_pid, coll, opts) do
opts = Keyword.drop(opts, [:skip, :limit, :hint, :collation])
count(topology_pid, coll, %{}, opts)
end
@doc """
Similar to `estimated_document_count/3` but unwraps the result and raises on
error.
"""
@spec estimated_document_count!(GenServer.server(), collection, Keyword.t()) ::
result!(non_neg_integer)
def estimated_document_count!(topology_pid, coll, opts) do
bangify(estimated_document_count(topology_pid, coll, opts))
end
@doc """
Finds the distinct values for a specified field across a collection.
## Options
* `:max_time` - Specifies a time limit in milliseconds
* `:collation` - Optionally specifies a collation to use in MongoDB 3.4 and
"""
@spec distinct(GenServer.server(), collection, String.t() | atom, BSON.document(), Keyword.t()) ::
result([BSON.t()])
def distinct(topology_pid, coll, field, filter, opts \\ []) do
query =
filter_nils(
distinct: coll,
key: field,
query: filter,
collation: opts[:collation],
maxTimeMS: opts[:max_time]
)
opts = Keyword.drop(opts, ~w(max_time)a)
with {:ok, conn, _, _} <- select_server(topology_pid, :read, opts),
{:ok, doc} <- direct_command(conn, query, opts),
do: {:ok, doc["values"]}
end
@doc """
Similar to `distinct/5` but unwraps the result and raises on error.
"""
@spec distinct!(GenServer.server(), collection, String.t() | atom, BSON.document(), Keyword.t()) ::
result!([BSON.t()])
def distinct!(topology_pid, coll, field, filter, opts \\ []) do
bangify(distinct(topology_pid, coll, field, filter, opts))
end
@doc """
Selects documents in a collection and returns a cursor for the selected
documents.
## Options
* `:comment` - Associates a comment to a query
* `:cursor_type` - Set to :tailable or :tailable_await to return a tailable
cursor
* `:max_time` - Specifies a time limit in milliseconds
* `:modifiers` - Meta-operators modifying the output or behavior of a query,
see http://docs.mongodb.org/manual/reference/operator/query-modifier/
* `:cursor_timeout` - Set to false if cursor should not close after 10
minutes (Default: true)
* `:sort` - Sorts the results of a query in ascending or descending order
* `:projection` - Limits the fields to return for all matching document
* `:skip` - The number of documents to skip before returning (Default: 0)
"""
@spec find(GenServer.server(), collection, BSON.document(), Keyword.t()) :: cursor
def find(topology_pid, coll, filter, opts \\ []) do
query =
[
{"$comment", opts[:comment]},
{"$maxTimeMS", opts[:max_time]},
{"$orderby", opts[:sort]}
] ++ Enum.into(opts[:modifiers] || [], [])
query = filter_nils(query)
query =
if query == [] do
filter
else
filter = normalize_doc(filter)
filter = if List.keymember?(filter, "$query", 0), do: filter, else: [{"$query", filter}]
filter ++ query
end
select = opts[:projection]
opts =
if Keyword.get(opts, :cursor_timeout, true),
do: opts,
else: [{:no_cursor_timeout, true} | opts]
drop = ~w(comment max_time modifiers sort cursor_type projection cursor_timeout)a
opts = cursor_type(opts[:cursor_type]) ++ Keyword.drop(opts, drop)
with {:ok, conn, slave_ok, _} <- select_server(topology_pid, :read, opts),
opts = Keyword.put(opts, :slave_ok, slave_ok),
do: cursor(conn, coll, query, select, opts)
end
@doc """
Selects a single document in a collection and returns either a document
or nil.
If multiple documents satisfy the query, this method returns the first document
according to the natural order which reflects the order of documents on the disk.
## Options
* `:comment` - Associates a comment to a query
* `:cursor_type` - Set to :tailable or :tailable_await to return a tailable
cursor
* `:max_time` - Specifies a time limit in milliseconds
* `:modifiers` - Meta-operators modifying the output or behavior of a query,
see http://docs.mongodb.org/manual/reference/operator/query-modifier/
* `:cursor_timeout` - Set to false if cursor should not close after 10
minutes (Default: true)
* `:projection` - Limits the fields to return for all matching document
* `:skip` - The number of documents to skip before returning (Default: 0)
"""
@spec find_one(GenServer.server(), collection, BSON.document(), Keyword.t()) ::
BSON.document() | nil
def find_one(conn, coll, filter, opts \\ []) do
opts =
opts
|> Keyword.delete(:order_by)
|> Keyword.delete(:sort)
|> Keyword.put(:limit, 1)
|> Keyword.put(:batch_size, 1)
with [elem] <- Enum.to_list(find(conn, coll, filter, opts)) do
elem
else
[] -> nil
error -> error
end
end
@doc false
def raw_find(conn, coll, query, select, opts) do
params = [query, select]
query = %Query{action: :find, extra: coll}
with {:ok, _query, reply} <- DBConnection.execute(conn, query, params, defaults(opts)),
:ok <- maybe_failure(reply),
op_reply(docs: docs, cursor_id: cursor_id, from: from, num: num) = reply,
do: {:ok, %{from: from, num: num, cursor_id: cursor_id, docs: docs}}
end
@doc false
def get_more(conn, coll, cursor, opts) do
query =
filter_nils(
getMore: cursor,
collection: coll,
batchSize: Keyword.get(opts, :batch_size),
maxTimeMS: Keyword.get(opts, :max_time_ms)
)
direct_command(conn, query, opts)
end
@doc false
def kill_cursors(conn, cursor_ids, opts) do
query = %Query{action: :kill_cursors, extra: cursor_ids}
with {:ok, _query, :ok} <- DBConnection.execute(conn, query, [], defaults(opts)),
do: :ok
end
@doc """
Issue a database command. If the command has parameters use a keyword
list for the document because the "command key" has to be the first
in the document.
"""
@spec command(GenServer.server(), BSON.document(), Keyword.t()) :: result(BSON.document())
def command(topology_pid, query, opts \\ []) do
rp = ReadPreference.defaults(%{mode: :primary})
rp_opts = [read_preference: Keyword.get(opts, :read_preference, rp)]
with {:ok, conn, slave_ok, _} <- select_server(topology_pid, :read, rp_opts) do
opts = Keyword.put(opts, :slave_ok, slave_ok)
direct_command(conn, query, opts)
end
end
@doc false
@spec direct_command(pid, BSON.document(), Keyword.t()) ::
{:ok, BSON.document() | nil} | {:error, Mongo.Error.t()}
def direct_command(conn, query, opts \\ []) do
params = [query]
query = %Query{action: :command}
with {:ok, _query, reply} <- DBConnection.execute(conn, query, params, defaults(opts)) do
case reply do
op_reply(flags: flags, docs: [%{"$err" => reason, "code" => code}])
when (@reply_query_failure &&& flags) != 0 ->
{:error, Mongo.Error.exception(message: reason, code: code)}
op_reply(flags: flags) when (@reply_cursor_not_found &&& flags) != 0 ->
{:error, Mongo.Error.exception(message: "cursor not found")}
op_reply(docs: [%{"ok" => 0.0, "errmsg" => reason} = error]) ->
{:error, %Mongo.Error{message: "command failed: #{reason}", code: error["code"]}}
op_reply(docs: [%{"ok" => ok} = doc]) when ok == 1 ->
Mongo.Session.update_session(doc, opts[:session])
{:ok, doc}
# TODO: Check if needed
op_reply(docs: []) ->
{:ok, nil}
end
end
end
@doc """
Similar to `command/3` but unwraps the result and raises on error.
"""
@spec command!(GenServer.server(), BSON.document(), Keyword.t()) :: result!(BSON.document())
def command!(topology_pid, query, opts \\ []) do
bangify(command(topology_pid, query, opts))
end
@doc """
Insert a single document into the collection.
If the document is missing the `_id` field or it is `nil`, an ObjectId
will be generated, inserted into the document, and returned in the result struct.
## Examples
Mongo.insert_one(pid, "users", %{first_name: "John", last_name: "Smith"})
"""
@spec insert_one(GenServer.server(), collection, BSON.document(), Keyword.t()) ::
write_result(Mongo.InsertOneResult.t())
def insert_one(topology_pid, coll, doc, opts \\ []) do
assert_single_doc!(doc)
{[id], [doc]} = assign_ids([doc])
write_concern =
filter_nils(%{
w: Keyword.get(opts, :w),
j: Keyword.get(opts, :j),
wtimeout: Keyword.get(opts, :wtimeout)
})
query =
[
insert: coll,
documents: [doc],
ordered: Keyword.get(opts, :ordered),
writeConcern: write_concern,
bypassDocumentValidation: Keyword.get(opts, :bypass_document_validation)
]
|> filter_nils()
with {:ok, query} <- Mongo.Session.add_session(query, opts[:session]),
{:ok, conn, _, _} <- select_server(topology_pid, :write, opts),
{:ok, doc} <- direct_command(conn, query, opts) do
case doc do
%{"writeErrors" => _} ->
{:error,
%Mongo.WriteError{n: doc["n"], ok: doc["ok"], write_errors: doc["writeErrors"]}}
_ ->
case Map.get(write_concern, :w) do
0 ->
{:ok, %Mongo.InsertOneResult{acknowledged: false}}
_ ->
{:ok, %Mongo.InsertOneResult{inserted_id: id}}
end
end
end
end
@doc """
Similar to `insert_one/4` but unwraps the result and raises on error.
"""
@spec insert_one!(GenServer.server(), collection, BSON.document(), Keyword.t()) ::
result!(Mongo.InsertOneResult.t())
def insert_one!(topology_pid, coll, doc, opts \\ []) do
bangify(insert_one(topology_pid, coll, doc, opts))
end
@doc """
Insert multiple documents into the collection.
If any of the documents is missing the `_id` field or it is `nil`, an ObjectId
will be generated, and insertd into the document.
Ids of all documents will be returned in the result struct.
## Options
* `:continue_on_error` - even if insert fails for one of the documents
continue inserting the remaining ones (default: `false`)
* `:ordered` - A boolean specifying whether the mongod instance should
perform an ordered or unordered insert. (default: `true`)
## Examples
Mongo.insert_many(pid, "users", [%{first_name: "John", last_name: "Smith"}, %{first_name: "Jane", last_name: "Doe"}])
"""
@spec insert_many(GenServer.server(), collection, [BSON.document()], Keyword.t()) ::
write_result(Mongo.InsertManyResult.t())
def insert_many(topology_pid, coll, docs, opts \\ []) do
assert_many_docs!(docs)
{ids, docs} = assign_ids(docs)
write_concern =
filter_nils(%{
w: Keyword.get(opts, :w),
j: Keyword.get(opts, :j),
wtimeout: Keyword.get(opts, :wtimeout)
})
query =
filter_nils(
insert: coll,
documents: docs,
ordered: Keyword.get(opts, :ordered),
writeConcern: write_concern,
bypassDocumentValidation: Keyword.get(opts, :bypass_document_validation)
)
with {:ok, conn, _, _} <- select_server(topology_pid, :write, opts),
{:ok, doc} <- direct_command(conn, query, opts) do
case doc do
%{"writeErrors" => _} ->
{:error,
%Mongo.WriteError{n: doc["n"], ok: doc["ok"], write_errors: doc["writeErrors"]}}
_ ->
case Map.get(write_concern, :w) do
0 ->
{:ok, %Mongo.InsertManyResult{acknowledged: false}}
_ ->
{:ok, %Mongo.InsertManyResult{inserted_ids: ids}}
end
end
end
end
@doc """
Similar to `insert_many/4` but unwraps the result and raises on error.
"""
@spec insert_many!(GenServer.server(), collection, [BSON.document()], Keyword.t()) ::
result!(Mongo.InsertManyResult.t())
def insert_many!(topology_pid, coll, docs, opts \\ []) do
bangify(insert_many(topology_pid, coll, docs, opts))
end
@doc """
Remove a document matching the filter from the collection.
"""
@spec delete_one(GenServer.server(), collection, BSON.document(), Keyword.t()) ::
write_result(Mongo.DeleteResult.t())
def delete_one(topology_pid, coll, filter, opts \\ []) do
do_delete(topology_pid, coll, filter, 1, opts)
end
@doc """
Similar to `delete_one/4` but unwraps the result and raises on error.
"""
@spec delete_one!(GenServer.server(), collection, BSON.document(), Keyword.t()) ::
result!(Mongo.DeleteResult.t())
def delete_one!(topology_pid, coll, filter, opts \\ []) do
bangify(delete_one(topology_pid, coll, filter, opts))
end
@doc """
Remove all documents matching the filter from the collection.
"""
@spec delete_many(GenServer.server(), collection, BSON.document(), Keyword.t()) ::
write_result(Mongo.DeleteResult.t())
def delete_many(topology_pid, coll, filter, opts \\ []) do
do_delete(topology_pid, coll, filter, 0, opts)
end
@doc """
Similar to `delete_many/4` but unwraps the result and raises on error.
"""
@spec delete_many!(GenServer.server(), collection, BSON.document(), Keyword.t()) ::
result!(Mongo.DeleteResult.t())
def delete_many!(topology_pid, coll, filter, opts \\ []) do
bangify(delete_many(topology_pid, coll, filter, opts))
end
defp do_delete(topology_pid, coll, filter, limit, opts) do
write_concern =
filter_nils(%{
w: Keyword.get(opts, :w),
j: Keyword.get(opts, :j),
wtimeout: Keyword.get(opts, :wtimeout)
})
delete =
filter_nils(
q: filter,
limit: limit,
collation: Keyword.get(opts, :collation)
)
query =
filter_nils(
delete: coll,
deletes: [delete],
ordered: Keyword.get(opts, :ordered),
writeConcern: write_concern
)
with {:ok, conn, _, _} <- select_server(topology_pid, :write, opts),
{:ok, doc} <- direct_command(conn, query, opts) do
case doc do
%{"writeErrors" => write_errors} ->
{:error, %Mongo.WriteError{n: doc["n"], ok: doc["ok"], write_errors: write_errors}}
%{"n" => n} ->
{:ok, %Mongo.DeleteResult{deleted_count: n}}
%{"ok" => ok} when ok == 1 ->
{:ok, %Mongo.DeleteResult{acknowledged: false}}
end
end
end
@doc """
Replace a single document matching the filter with the new document.
## Options
* `:upsert` - if set to `true` creates a new document when no document
matches the filter (default: `false`)
"""
@spec replace_one(GenServer.server(), collection, BSON.document(), BSON.document(), Keyword.t()) ::
write_result(Mongo.UpdateResult.t())
def replace_one(topology_pid, coll, filter, replacement, opts \\ []) do
_ = modifier_docs(replacement, :replace)
do_update(topology_pid, coll, filter, replacement, false, opts)
end
@doc """
Similar to `replace_one/5` but unwraps the result and raises on error.
"""
@spec replace_one!(
GenServer.server(),
collection,
BSON.document(),
BSON.document(),
Keyword.t()
) :: result!(Mongo.UpdateResult.t())
def replace_one!(topology_pid, coll, filter, replacement, opts \\ []) do
bangify(replace_one(topology_pid, coll, filter, replacement, opts))
end
@doc """
Update a single document matching the filter.
Uses MongoDB update operators to specify the updates. For more information
please refer to the
[MongoDB documentation](http://docs.mongodb.org/manual/reference/operator/update/)
Example:
Mongo.update_one(MongoPool,
"my_test_collection",
%{"filter_field": "filter_value"},
%{"$set": %{"modified_field": "new_value"}})
## Options
* `:upsert` - if set to `true` creates a new document when no document
matches the filter (default: `false`)
"""
@spec update_one(GenServer.server(), collection, BSON.document(), BSON.document(), Keyword.t()) ::
write_result(Mongo.UpdateResult.t())
def update_one(topology_pid, coll, filter, update, opts \\ []) do
_ = modifier_docs(update, :update)
do_update(topology_pid, coll, filter, update, false, opts)
end
@doc """
Similar to `update_one/5` but unwraps the result and raises on error.
"""
@spec update_one!(GenServer.server(), collection, BSON.document(), BSON.document(), Keyword.t()) ::
result!(Mongo.UpdateResult.t())
def update_one!(topology_pid, coll, filter, update, opts \\ []) do
bangify(update_one(topology_pid, coll, filter, update, opts))
end
@doc """
Update all documents matching the filter.
Uses MongoDB update operators to specify the updates. For more information
please refer to the
[MongoDB documentation](http://docs.mongodb.org/manual/reference/operator/update/)
## Options
* `:upsert` - if set to `true` creates a new document when no document
matches the filter (default: `false`)
"""
@spec update_many(GenServer.server(), collection, BSON.document(), BSON.document(), Keyword.t()) ::
write_result(Mongo.UpdateResult.t())
def update_many(topology_pid, coll, filter, update, opts \\ []) do
_ = modifier_docs(update, :update)
do_update(topology_pid, coll, filter, update, true, opts)
end
@doc """
Similar to `update_many/5` but unwraps the result and raises on error.
"""
@spec update_many!(
GenServer.server(),
collection,
BSON.document(),
BSON.document(),
Keyword.t()
) :: result!(Mongo.UpdateResult.t())
def update_many!(topology_pid, coll, filter, update, opts \\ []) do
bangify(update_many(topology_pid, coll, filter, update, opts))
end
defp do_update(topology_pid, coll, filter, update, multi, opts) do
write_concern =
filter_nils(%{
w: Keyword.get(opts, :w),
j: Keyword.get(opts, :j),
wtimeout: Keyword.get(opts, :wtimeout)
})
update =
filter_nils(
q: filter,
u: update,
upsert: Keyword.get(opts, :upsert),
multi: multi,
collation: Keyword.get(opts, :collation),
arrayFilters: Keyword.get(opts, :array_filters)
)
query =
[
update: coll,
updates: [update],
ordered: Keyword.get(opts, :ordered),
writeConcern: write_concern,
bypassDocumentValidation: Keyword.get(opts, :bypass_document_validation)
]
|> filter_nils()
with {:ok, query} <- Mongo.Session.add_session(query, opts[:session]),
{:ok, conn, _, _} <- select_server(topology_pid, :write, opts),
{:ok, doc} <- direct_command(conn, query, opts) do
case doc do
%{"writeErrors" => write_errors} ->
{:error, %Mongo.WriteError{n: doc["n"], ok: doc["ok"], write_errors: write_errors}}
%{"n" => n, "nModified" => n_modified} ->
{:ok,
%Mongo.UpdateResult{
matched_count: n,
modified_count: n_modified,
upserted_ids: upserted_ids(doc["upserted"])
}}
%{"ok" => ok} when ok == 1 ->
{:ok, %Mongo.UpdateResult{acknowledged: false}}
end
end
end
defp upserted_ids(nil), do: nil
defp upserted_ids(docs), do: Enum.map(docs, fn d -> d["_id"] end)
@doc """
Returns a cursor to enumerate all indexes
"""
@spec list_indexes(GenServer.server(), String.t(), Keyword.t()) :: cursor
def list_indexes(topology_pid, coll, opts \\ []) do
with {:ok, conn, _, _} <- select_server(topology_pid, :read, opts) do
aggregation_cursor(conn, "$cmd", [listIndexes: coll], nil, opts)
end
end
@doc """
Convenient function that returns a cursor with the names of the indexes.
"""
@spec list_index_names(GenServer.server(), String.t(), Keyword.t()) :: %Stream{}
def list_index_names(topology_pid, coll, opts \\ []) do
list_indexes(topology_pid, coll, opts)
|> Stream.map(fn %{"name" => name} -> name end)
end
@doc """
Getting Collection Names
"""
@spec show_collections(GenServer.server(), Keyword.t()) :: cursor
def show_collections(topology_pid, opts \\ []) do
##
# from the specs
# https://github.com/mongodb/specifications/blob/f4bb783627e7ed5c4095c5554d35287956ef8970/source/enumerate-collections.rst#post-mongodb-280-rc3-versions
#
# In versions 2.8.0-rc3 and later, the listCollections command returns a cursor!
#
with {:ok, conn, _, _} <- select_server(topology_pid, :read, opts) do
aggregation_cursor(conn, "$cmd", [listCollections: 1], nil, opts)
|> Stream.filter(fn coll -> coll["type"] == "collection" end)
|> Stream.map(fn coll -> coll["name"] end)
end
end
@doc """
Start new session for given `topology_pid`.
## Options
- `:causal_consistency` - whether the causal consistency should be persisted within
session. Default to `true`.
- `:read_concern` - what should be the level for read consistency in session. Should
be map with value `:level` that is one of the described in [*Read Concern*][rc]
documentation. Applied only when `:casual_consistency` is set to `true`. Bu default
uses cluster configuration.
- `:retry_writes` - whether retryable faliures should be retried. Defaults to `true`.
`:causal_consistency` can be set only during the session creation, but the `:read_concern`
can be set for each transaction independently.
[rc]: https://docs.mongodb.com/manual/reference/read-concern/index.html
"""
@spec start_session(GenServer.server(), keyword()) ::
{:ok, Mongo.Session.session()} | {:error, term()}
def start_session(topology_pid, opts \\ []) do
Mongo.SessionPool.checkout(topology_pid, opts)
end
@spec with_session(GenServer.server(), (Mongo.Session.session() -> return)) :: return
when return: term()
@spec with_session(GenServer.server(), keyword(), (Mongo.Session.session() -> return)) :: return
when return: term()
def with_session(topology_pid, opts \\ [], func) do
with {:ok, pid} <- start_session(topology_pid, opts) do
try do
func.(pid)
after
Mongo.Session.end_session(pid)
end
end
end
def select_server(topology_pid, type, opts \\ []) do
with {:ok, servers, slave_ok, mongos?} <-
select_servers(topology_pid, type, opts) do
if Enum.empty?(servers) do
{:ok, [], slave_ok, mongos?}
else
with {:ok, connection} <-
servers
|> Enum.random()
|> get_connection(topology_pid) do
{:ok, connection, slave_ok, mongos?}
end
end
end
end
defp select_servers(topology_pid, type, opts) do
start_time = System.monotonic_time()
select_servers(topology_pid, type, opts, start_time)
end
@sel_timeout 30000
# NOTE: Should think about the handling completely in the Topology GenServer
# in order to make the entire operation atomic instead of querying
# and then potentially having an outdated topology when waiting for the
# connection.
defp select_servers(topology_pid, type, opts, start_time) do
topology = Topology.topology(topology_pid)
with {:ok, servers, slave_ok, mongos?} <-
TopologyDescription.select_servers(topology, type, opts) do
if Enum.empty?(servers) do
case Topology.wait_for_connection(topology_pid, @sel_timeout, start_time) do
{:ok, _servers} ->
select_servers(topology_pid, type, opts, start_time)
{:error, :selection_timeout} ->
{:error,
%Mongo.Error{type: :network, message: "Topology selection timeout", code: 89}}
end
else
{:ok, servers, slave_ok, mongos?}
end
end
end
defp get_connection(server, pid) do
if server != nil do
with {:ok, connection} <- Topology.connection_for_address(pid, server) do
{:ok, connection}
end
else
{:ok, nil}
end
end
defp modifier_docs([{key, _} | _], type),
do: key |> key_to_string |> modifier_key(type)
defp modifier_docs(map, _type) when is_map(map) and map_size(map) == 0,
do: :ok
defp modifier_docs(map, type) when is_map(map),
do: Enum.at(map, 0) |> elem(0) |> key_to_string |> modifier_key(type)
defp modifier_docs(list, type) when is_list(list),
do: Enum.map(list, &modifier_docs(&1, type))
defp modifier_key(<<?$, _::binary>> = other, :replace),
do: raise(ArgumentError, "replace does not allow atomic modifiers, got: #{other}")
defp modifier_key(<<?$, _::binary>>, :update),
do: :ok
defp modifier_key(<<_, _::binary>> = other, :update),
do: raise(ArgumentError, "update only allows atomic modifiers, got: #{other}")
defp modifier_key(_, _),
do: :ok
defp key_to_string(key) when is_atom(key),
do: Atom.to_string(key)
defp key_to_string(key) when is_binary(key),
do: key
defp cursor(conn, coll, query, select, opts) do
%Mongo.Cursor{conn: conn, coll: coll, query: query, select: select, opts: opts}
end
defp aggregation_cursor(conn, coll, query, select, opts) do
%Mongo.AggregationCursor{conn: conn, coll: coll, query: query, select: select, opts: opts}
end
defp filter_nils(keyword) when is_list(keyword) do
Enum.reject(keyword, fn {_key, value} -> is_nil(value) end)
end
defp filter_nils(map) when is_map(map) do
Enum.reject(map, fn {_key, value} -> is_nil(value) end)
|> Enum.into(%{})
end
defp normalize_doc(doc) do
Enum.reduce(doc, {:unknown, []}, fn
{key, _value}, {:binary, _acc} when is_atom(key) ->
invalid_doc(doc)
{key, _value}, {:atom, _acc} when is_binary(key) ->
invalid_doc(doc)
{key, value}, {_, acc} when is_atom(key) ->
{:atom, [{key, value} | acc]}
{key, value}, {_, acc} when is_binary(key) ->
{:binary, [{key, value} | acc]}
end)
|> elem(1)
|> Enum.reverse()
end
defp invalid_doc(doc) do
message = "invalid document containing atom and string keys: #{inspect(doc)}"
raise ArgumentError, message
end
defp cursor_type(nil),
do: []
defp cursor_type(:tailable),
do: [tailable_cursor: true]
defp cursor_type(:tailable_await),
do: [tailable_cursor: true, await_data: true]
defp assert_single_doc!(doc) when is_map(doc), do: :ok
defp assert_single_doc!([]), do: :ok
defp assert_single_doc!([{_, _} | _]), do: :ok
defp assert_single_doc!(other) do
unless Mongo.Encoder.impl_for(other),
do: raise(ArgumentError, "expected single document, got: #{inspect(other)}"),
else: :ok
end
defp assert_many_docs!([first | _]) when not is_tuple(first), do: :ok
defp assert_many_docs!(docs) when is_list(docs),
do: Enum.all?(docs, &assert_single_doc!/1) && :ok
defp assert_many_docs!(other) do
raise ArgumentError, "expected list of documents, got: #{inspect(other)}"
end
defp defaults(opts) do
Keyword.put_new(opts, :timeout, @timeout)
end
defp assign_ids(list) when is_list(list) do
list
|> Enum.map(&Mongo.Encoder.encode/1)
|> Enum.map(&assign_id/1)
|> Enum.unzip()
end
defp assign_id(%{_id: id} = map) when id != nil,
do: {id, map}
defp assign_id(%{"_id" => id} = map) when id != nil,
do: {id, map}
defp assign_id([{_, _} | _] = keyword) do
case Keyword.take(keyword, [:_id, "_id"]) do
[{_key, id} | _] when id != nil ->
{id, keyword}
[] ->
add_id(keyword)
end
end
defp assign_id(map) when is_map(map) do
map |> Map.to_list() |> add_id
end
defp add_id(doc) do
id = Mongo.IdServer.new()
{id, add_id(doc, id)}
end
defp add_id([{key, _} | _] = list, id) when is_atom(key) do
[{:_id, id} | list]
end
defp add_id([{key, _} | _] = list, id) when is_binary(key) do
[{"_id", id} | list]
end
defp add_id([], id) do
# Why are you inserting empty documents =(
[{"_id", id}]
end
defp maybe_failure(op_reply(flags: flags, docs: [%{"$err" => reason, "code" => code}]))
when (@reply_query_failure &&& flags) != 0,
do: {:error, Mongo.Error.exception(message: reason, code: code)}
defp maybe_failure(op_reply(flags: flags))
when (@reply_cursor_not_found &&& flags) != 0,
do: {:error, Mongo.Error.exception(message: "cursor not found")}
defp maybe_failure(_reply),
do: :ok
end
| 34.210853 | 156 | 0.633871 |
e8098318ef23b03e921986030ad8d3faec7eb78f | 1,063 | exs | Elixir | mix.exs | bajankristof/ex_raft | 09c3234355d2792922b17a71e3c32b3e57f0a277 | [
"Apache-2.0"
] | 1 | 2021-08-25T10:49:26.000Z | 2021-08-25T10:49:26.000Z | mix.exs | bajankristof/ex_raft | 09c3234355d2792922b17a71e3c32b3e57f0a277 | [
"Apache-2.0"
] | null | null | null | mix.exs | bajankristof/ex_raft | 09c3234355d2792922b17a71e3c32b3e57f0a277 | [
"Apache-2.0"
] | null | null | null | defmodule ExRaft.MixProject do
use Mix.Project
@version "0.2.1"
@source_url "https://github.com/bajankristof/ex_raft"
def project do
[
app: :ex_raft,
version: @version,
name: "ExRaft",
description: "An Elixir implementation of the raft consensus protocol.",
elixir: "~> 1.12",
start_permanent: Mix.env() == :prod,
deps: deps(),
source_url: @source_url,
package: package(),
docs: [
main: "readme",
extras: ["README.md"],
source_url: @source_url
]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:gen_state_machine, "~> 3.0"},
{:ex_doc, "~> 0.25.1", only: :dev, runtime: false},
{:meck, "~> 0.9.2", only: :test}
]
end
defp package do
[
licenses: ["Apache-2.0"],
links: %{"GitHub" => "https://github.com/bajankristof/ex_raft"}
]
end
end
| 21.693878 | 78 | 0.569144 |
e809888fc055a0971b5df982cb8d24de1871775c | 312 | exs | Elixir | config/config.exs | ohyecloudy/example-websocket-upbit | e5cf3e165b6cc6403f01064caf6a20ad9fd90053 | [
"MIT"
] | null | null | null | config/config.exs | ohyecloudy/example-websocket-upbit | e5cf3e165b6cc6403f01064caf6a20ad9fd90053 | [
"MIT"
] | null | null | null | config/config.exs | ohyecloudy/example-websocket-upbit | e5cf3e165b6cc6403f01064caf6a20ad9fd90053 | [
"MIT"
] | 1 | 2022-03-30T12:10:31.000Z | 2022-03-30T12:10:31.000Z | import Config
# https://github.com/deadtrickster/prometheus-httpd/blob/master/doc/prometheus_httpd.md
config :prometheus, :prometheus_http,
path: String.to_charlist("/metrics"),
format: :auto,
port: 8081
config :example_websocket_upbit,
tickers: ["KRW-BTC", "KRW-ETH", "KRW-SOL", "KRW-ADA", "KRW-XRP"]
| 28.363636 | 87 | 0.730769 |
e809ba03163e6e39a89cb16d7df40ae9bd5c1aaa | 88 | exs | Elixir | test/tasks/mfa_test.exs | thomas9911/elixir_git_hooks | 01bad3d745921c7d197dfff96e562ba35df5ae46 | [
"MIT"
] | 95 | 2018-05-01T10:37:39.000Z | 2022-03-29T06:51:17.000Z | test/tasks/mfa_test.exs | thomas9911/elixir_git_hooks | 01bad3d745921c7d197dfff96e562ba35df5ae46 | [
"MIT"
] | 84 | 2018-06-11T18:50:37.000Z | 2022-03-28T00:06:28.000Z | test/tasks/mfa_test.exs | thomas9911/elixir_git_hooks | 01bad3d745921c7d197dfff96e562ba35df5ae46 | [
"MIT"
] | 13 | 2019-05-06T09:20:13.000Z | 2022-02-25T20:53:18.000Z | defmodule GitHooks.Tasks.MFATest do
use ExUnit.Case
doctest GitHooks.Tasks.MFA
end
| 14.666667 | 35 | 0.795455 |
e80a0740012833ad6af73aa1810586862f8e1781 | 1,355 | exs | Elixir | mix.exs | revelrylabs/ecto_filters | e5c9718e3582c2ad38ef07a4e7155f746abbac83 | [
"MIT"
] | 10 | 2019-07-30T02:17:28.000Z | 2021-07-20T10:32:17.000Z | mix.exs | revelrylabs/ecto_filters | e5c9718e3582c2ad38ef07a4e7155f746abbac83 | [
"MIT"
] | null | null | null | mix.exs | revelrylabs/ecto_filters | e5c9718e3582c2ad38ef07a4e7155f746abbac83 | [
"MIT"
] | null | null | null | defmodule Ecto.Filters.MixProject do
use Mix.Project
def project do
[
app: :ecto_filters,
version: "0.3.0",
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package(),
description: description(),
name: "Ecto Filters",
source_url: "https://github.com/revelrylabs/ecto_filters",
homepage_url: "https://github.com/revelrylabs/ecto_filters",
docs: [main: "readme", extras: ["README.md"]]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
defp description do
"Adds function to transform request params into ecto query expressions."
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ecto_sql, "~> 3.1.1", only: [:test]},
{:ex_doc, ">= 0.0.0", only: [:dev, :test]},
{:mix_test_watch, "~> 0.8", only: [:dev, :test], runtime: false},
{:credo, "~> 1.1.0", only: [:dev, :test], runtime: false}
]
end
defp package do
[
files: ["lib", "mix.exs", "README.md", "LICENSE", "CHANGELOG.md"],
maintainers: ["Revelry Labs"],
licenses: ["MIT"],
links: %{
"GitHub" => "https://github.com/revelrylabs/ecto_filters"
},
build_tools: ["mix"]
]
end
end
| 25.566038 | 76 | 0.577122 |
e80a0c71bf2c3a5c2fcea65062a124dc56076478 | 2,430 | exs | Elixir | config/prod.exs | jeantsai/phoenix-admin | 3f954f0c452d385438b616f7e91bc5d66bcc1adc | [
"MIT"
] | null | null | null | config/prod.exs | jeantsai/phoenix-admin | 3f954f0c452d385438b616f7e91bc5d66bcc1adc | [
"MIT"
] | 1 | 2021-03-09T01:41:00.000Z | 2021-03-09T01:41:00.000Z | config/prod.exs | jeantsai/phoenix-admin | 3f954f0c452d385438b616f7e91bc5d66bcc1adc | [
"MIT"
] | 1 | 2019-04-17T17:06:14.000Z | 2019-04-17T17:06:14.000Z | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :admin, AdminWeb.Endpoint,
http: [:inet6, port: System.get_env("PORT") || 4000],
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :admin, AdminWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# :inet6,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :admin, AdminWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases (distillery)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :admin, AdminWeb.Endpoint, server: true
#
# Note you can't rely on `System.get_env/1` when using releases.
# See the releases documentation accordingly.
# Finally import the config/prod.secret.exs which should be versioned
# separately.
import_config "prod.secret.exs"
| 33.75 | 69 | 0.711111 |
e80a22e7a5edabf90a410de7373c3baa6f4375e2 | 900 | ex | Elixir | web/controllers/api/authentication_controller.ex | cedretaber/bibliotheca | 642ec9908d6d98f16e25b6a482c52e9cbaa21ad2 | [
"MIT"
] | null | null | null | web/controllers/api/authentication_controller.ex | cedretaber/bibliotheca | 642ec9908d6d98f16e25b6a482c52e9cbaa21ad2 | [
"MIT"
] | 22 | 2017-05-15T07:34:08.000Z | 2018-02-25T07:26:18.000Z | web/controllers/api/authentication_controller.ex | cedretaber/bibliotheca | 642ec9908d6d98f16e25b6a482c52e9cbaa21ad2 | [
"MIT"
] | null | null | null | defmodule Bibliotheca.Api.AuthenticationController do
use Bibliotheca.Web, :controller
alias Bibliotheca.Auth.HMAC
alias Bibliotheca.User
import Bibliotheca.Plugs.Authentication, only: [realm: 0, header: 0]
plug :scrub_params, "email" when action in [:login]
plug :scrub_params, "password" when action in [:login]
def login(conn, %{"email" => email, "password" => password}) do
case Repo.get_by(User, email: email) do
nil ->
login(conn, nil)
user ->
if HMAC.verify_password(user.password_digest, password) do
conn = Guardian.Plug.api_sign_in(conn, user)
jwt = Guardian.Plug.current_token(conn)
conn
|> put_resp_header(header(), "#{realm()} #{jwt}")
|> send_resp(204, "")
else
login(conn, nil)
end
end
end
def login(conn, _param), do: send_resp(conn, 401, "")
end
| 27.272727 | 70 | 0.63 |
e80a241316b23b2ab6eaa901a460a1d4139eaea0 | 780 | exs | Elixir | test/channels/chat_channel_test.exs | outofboundstech/chatbot | a185ff7d6e7ae3f9d12a8b61baffe3d07e1ad069 | [
"FSFUL"
] | null | null | null | test/channels/chat_channel_test.exs | outofboundstech/chatbot | a185ff7d6e7ae3f9d12a8b61baffe3d07e1ad069 | [
"FSFUL"
] | 1 | 2016-05-17T04:53:02.000Z | 2016-05-17T04:53:02.000Z | test/channels/chat_channel_test.exs | outofboundstech/ChatBot | a185ff7d6e7ae3f9d12a8b61baffe3d07e1ad069 | [
"FSFUL"
] | null | null | null | defmodule ChatBot.ChatChannelTest do
use ChatBot.ChannelCase
alias ChatBot.ChatChannel
setup do
{:ok, _, socket} =
socket("user_id", %{some: :assign})
|> subscribe_and_join(ChatChannel, "chats:lobby")
{:ok, socket: socket}
end
test "ping replies with status ok", %{socket: socket} do
ref = push socket, "ping", %{"hello" => "there"}
assert_reply ref, :ok, %{"hello" => "there"}
end
test "shout broadcasts to chats:lobby", %{socket: socket} do
push socket, "shout", %{"hello" => "all"}
assert_broadcast "shout", %{"hello" => "all"}
end
test "broadcasts are pushed to the client", %{socket: socket} do
broadcast_from! socket, "broadcast", %{"some" => "data"}
assert_push "broadcast", %{"some" => "data"}
end
end
| 26.896552 | 66 | 0.626923 |
e80a249d7fdeaa910c6f17e8fc43e22be8347e8f | 344 | ex | Elixir | lib/tune_web/views/track_view.ex | wojtekmach/tune | 8b38bc629eeafb35aae920ac59b986323de800eb | [
"MIT"
] | 206 | 2020-08-23T17:50:03.000Z | 2022-03-28T04:39:01.000Z | lib/tune_web/views/track_view.ex | wojtekmach/tune | 8b38bc629eeafb35aae920ac59b986323de800eb | [
"MIT"
] | 112 | 2020-08-21T08:26:38.000Z | 2022-03-31T06:11:06.000Z | lib/tune_web/views/track_view.ex | wojtekmach/tune | 8b38bc629eeafb35aae920ac59b986323de800eb | [
"MIT"
] | 15 | 2020-08-25T02:30:23.000Z | 2021-12-16T14:19:35.000Z | defmodule TuneWeb.TrackView do
@moduledoc false
use TuneWeb, :view
alias Tune.Spotify.Schema.{Player, Track}
alias Tune.Link
@spec playing?(Track.t(), Player.t()) :: boolean()
defp playing?(%Track{id: track_id}, %Player{status: :playing, item: %{id: track_id}}),
do: true
defp playing?(_track, _now_playing), do: false
end
| 24.571429 | 88 | 0.686047 |
e80a28ae8aadf8719bbbb357518bc8961aef7ec3 | 1,487 | ex | Elixir | lib/celery/compilers/farmware_compiler.ex | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | null | null | null | lib/celery/compilers/farmware_compiler.ex | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | null | null | null | lib/celery/compilers/farmware_compiler.ex | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | null | null | null | defmodule FarmbotOS.Celery.Compiler.Farmware do
alias FarmbotOS.Celery.Compiler
def take_photo(%{body: params}, cs_scope) do
execute_script(%{args: %{label: "take-photo"}, body: params}, cs_scope)
end
def execute_script(%{args: %{label: package}, body: params}, cs_scope) do
env =
Enum.map(params, fn %{args: %{label: key, value: value}} ->
{to_string(key), value}
end)
quote location: :keep do
package = unquote(Compiler.celery_to_elixir(package, cs_scope))
env = unquote(Macro.escape(Map.new(env)))
FarmbotOS.Celery.SysCallGlue.log(unquote(format_log(package)), true)
FarmbotOS.Celery.SysCallGlue.execute_script(package, env)
end
end
def set_user_env(%{body: pairs}, _cs_scope) do
kvs =
Enum.map(pairs, fn %{kind: :pair, args: %{label: key, value: value}} ->
quote location: :keep do
FarmbotOS.Celery.SysCallGlue.set_user_env(
unquote(key),
unquote(value)
)
end
end)
quote location: :keep do
(unquote_splicing(kvs))
end
end
def format_log("camera-calibration"), do: "Calibrating camera"
def format_log("historical-camera-calibration"), do: "Calibrating camera"
def format_log("historical-plant-detection"), do: "Running weed detector"
def format_log("plant-detection"), do: "Running weed detector"
def format_log("take-photo"), do: "Taking photo"
def format_log(package), do: "Executing #{package}"
end
| 33.044444 | 77 | 0.662408 |
e80a719f1eba278838713345b3e7fd52fe4a6e97 | 320 | ex | Elixir | mia_server/lib/mix/tasks/mia.server.ex | SteffenBauer/mia_elixir | 569388b1f9ddd09f8e21a4d9275c42a81d469857 | [
"MIT"
] | null | null | null | mia_server/lib/mix/tasks/mia.server.ex | SteffenBauer/mia_elixir | 569388b1f9ddd09f8e21a4d9275c42a81d469857 | [
"MIT"
] | null | null | null | mia_server/lib/mix/tasks/mia.server.ex | SteffenBauer/mia_elixir | 569388b1f9ddd09f8e21a4d9275c42a81d469857 | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Mia.Server do
use Mix.Task
@shortdoc "Starts MIA server application"
def run(args) do
Mix.Task.run "run", run_args() ++ args
end
defp run_args do
if iex_running?(), do: [], else: ["--no-halt"]
end
defp iex_running? do
Code.ensure_loaded?(IEx) and IEx.started?
end
end
| 17.777778 | 50 | 0.65625 |
e80a9658e1a26705f3cce08b365a1ff7275c676a | 1,961 | ex | Elixir | clients/genomics/lib/google_api/genomics/v1/model/failed_event.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/genomics/lib/google_api/genomics/v1/model/failed_event.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/genomics/lib/google_api/genomics/v1/model/failed_event.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Genomics.V1.Model.FailedEvent do
@moduledoc """
This event is generated when the execution of a pipeline has failed. Note that other events may continue to occur after this event.
## Attributes
- cause (String.t): The human readable description of the cause of the failure. Defaults to: `null`.
- code (String.t): The Google standard error code that best describes this failure. Defaults to: `null`.
- Enum - one of [OK, CANCELLED, UNKNOWN, INVALID_ARGUMENT, DEADLINE_EXCEEDED, NOT_FOUND, ALREADY_EXISTS, PERMISSION_DENIED, UNAUTHENTICATED, RESOURCE_EXHAUSTED, FAILED_PRECONDITION, ABORTED, OUT_OF_RANGE, UNIMPLEMENTED, INTERNAL, UNAVAILABLE, DATA_LOSS]
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:cause => any(),
:code => any()
}
field(:cause)
field(:code)
end
defimpl Poison.Decoder, for: GoogleApi.Genomics.V1.Model.FailedEvent do
def decode(value, options) do
GoogleApi.Genomics.V1.Model.FailedEvent.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Genomics.V1.Model.FailedEvent do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.711538 | 257 | 0.746558 |
e80aa7256daf3481dcaf4e9cdec0d6b5233e820f | 156 | ex | Elixir | lib/web/views/email_view.ex | suryatmodulus/accent | 6aaf34075c33f3d9d84d38237af4a39b594eb808 | [
"BSD-3-Clause"
] | 806 | 2018-04-07T20:40:33.000Z | 2022-03-30T01:39:57.000Z | lib/web/views/email_view.ex | suryatmodulus/accent | 6aaf34075c33f3d9d84d38237af4a39b594eb808 | [
"BSD-3-Clause"
] | 194 | 2018-04-07T13:49:37.000Z | 2022-03-30T19:58:45.000Z | lib/web/views/email_view.ex | doc-ai/accent | e337e16f3658cc0728364f952c0d9c13710ebb06 | [
"BSD-3-Clause"
] | 89 | 2018-04-09T13:55:49.000Z | 2022-03-24T07:09:31.000Z | defmodule Accent.EmailView do
use Phoenix.View, root: "lib/web/templates"
import Accent.EmailViewStyleHelper
import Accent.EmailViewConfigHelper
end
| 22.285714 | 45 | 0.814103 |
e80ab443f85cb752ddae637e71a42047bc626e6e | 2,093 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_big_query_table.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_big_query_table.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_big_query_table.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2BigQueryTable do
@moduledoc """
Message defining the location of a BigQuery table. A table is uniquely
identified by its project_id, dataset_id, and table_name. Within a query
a table is often referenced with a string in the format of:
`<project_id>:<dataset_id>.<table_id>` or
`<project_id>.<dataset_id>.<table_id>`.
## Attributes
* `datasetId` (*type:* `String.t`, *default:* `nil`) - Dataset ID of the table.
* `projectId` (*type:* `String.t`, *default:* `nil`) - The Google Cloud Platform project ID of the project containing the table.
If omitted, project ID is inferred from the API call.
* `tableId` (*type:* `String.t`, *default:* `nil`) - Name of the table.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:datasetId => String.t(),
:projectId => String.t(),
:tableId => String.t()
}
field(:datasetId)
field(:projectId)
field(:tableId)
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2BigQueryTable do
def decode(value, options) do
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2BigQueryTable.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2BigQueryTable do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.086207 | 132 | 0.720497 |
e80abb6fe96d91886ac60ebaa78ba4490f30fc73 | 1,329 | ex | Elixir | elixir/lib/homework_web/resolvers/merchants_resolver.ex | eititan/web-homework | d7f6fc170009ef07e709057b9f701bdf44c4ca5b | [
"MIT"
] | null | null | null | elixir/lib/homework_web/resolvers/merchants_resolver.ex | eititan/web-homework | d7f6fc170009ef07e709057b9f701bdf44c4ca5b | [
"MIT"
] | null | null | null | elixir/lib/homework_web/resolvers/merchants_resolver.ex | eititan/web-homework | d7f6fc170009ef07e709057b9f701bdf44c4ca5b | [
"MIT"
] | null | null | null | defmodule HomeworkWeb.Resolvers.MerchantsResolver do
alias Homework.Merchants
@doc """
Get a list of merchants
"""
def merchants(_root, args, _info) do
{:ok, Merchants.list_merchants(args)}
end
@doc """
Get a count of merchants
"""
def merchant_count(_root, _args, _info) do
{:ok, Merchants.count_merchants()}
end
@doc """
Create a new merchant
"""
def create_merchant(_root, args, _info) do
case Merchants.create_merchant(args) do
{:ok, merchant} ->
{:ok, merchant}
error ->
{:error, "could not create merchant: #{inspect(error)}"}
end
end
@doc """
Updates a merchant for an id with args specified.
"""
def update_merchant(_root, %{id: id} = args, _info) do
merchant = Merchants.get_merchant!(id)
case Merchants.update_merchant(merchant, args) do
{:ok, merchant} ->
{:ok, merchant}
error ->
{:error, "could not update merchant: #{inspect(error)}"}
end
end
@doc """
Deletes a merchant for an id
"""
def delete_merchant(_root, %{id: id}, _info) do
merchant = Merchants.get_merchant!(id)
case Merchants.delete_merchant(merchant) do
{:ok, merchant} ->
{:ok, merchant}
error ->
{:error, "could not update merchant: #{inspect(error)}"}
end
end
end
| 21.786885 | 64 | 0.619263 |
e80afb183b33ebae9be6c67c08274a0481daee26 | 3,440 | exs | Elixir | test/docdog_web/controllers/auth_controller_test.exs | sobolevn/docdog-engine | 5f7ad60465063fac3d56574f2bf27a4a52d0a91e | [
"MIT"
] | null | null | null | test/docdog_web/controllers/auth_controller_test.exs | sobolevn/docdog-engine | 5f7ad60465063fac3d56574f2bf27a4a52d0a91e | [
"MIT"
] | null | null | null | test/docdog_web/controllers/auth_controller_test.exs | sobolevn/docdog-engine | 5f7ad60465063fac3d56574f2bf27a4a52d0a91e | [
"MIT"
] | null | null | null | defmodule DocdogWeb.AuthControllerTest do
use DocdogWeb.ConnCase
use Plug.Test
@create_attrs_from_github %Ueberauth.Auth{
info: %{
email: "petrov@example.com",
nickname: "petr_petrov",
name: "Petr Petroff",
first_name: "Petr",
last_name: "Petrov",
image: ""
}
}
@invalid_attrs_from_github %Ueberauth.Auth{
info: %{nickname: "foobar", email: nil, name: "dsfdsf", image: "dfsdf"}
}
@failure_attrs_from_github %Ueberauth.Failure{}
test "loads sign in page", %{conn: conn} do
conn = get conn, "/auth/sign_in"
assert html_response(conn, 200) =~ "<h1>Built for developers and professional translators</h1>"
assert html_response(conn, 200) =~ ">Sign in with Github</a>"
end
test "when success auth from Github authenticates user", %{conn: conn} do
conn =
conn
|> assign(:ueberauth_auth, @create_attrs_from_github)
|> post(auth_path(conn, :callback, :github))
assert redirected_to(conn) == "/workplace/popular"
assert get_flash(conn, :info) == "Successfully authenticated."
assert html_response(conn, 302) =~ "You are being <a href=\"/workplace/popular\">redirected</a>."
end
test "when success auth from Github authenticates user and redirect url is /auth/sign_in", %{conn: conn} do
conn =
conn
|> init_test_session(%{redirect_url: "/auth/sign_in"})
|> assign(:ueberauth_auth, @create_attrs_from_github)
|> post(auth_path(conn, :callback, :github))
assert redirected_to(conn) == "/workplace/popular"
assert html_response(conn, 302) =~ "You are being <a href=\"/workplace/popular\">redirected</a>."
end
test "when success auth from Github authenticates user and redirect url is /workplace/projects/123", %{conn: conn} do
conn =
conn
|> init_test_session(%{redirect_url: "/workplace/projects/123"})
|> assign(:ueberauth_auth, @create_attrs_from_github)
|> post(auth_path(conn, :callback, :github))
assert redirected_to(conn) == "/workplace/projects/123"
assert html_response(conn, 302) =~ "You are being <a href=\"/workplace/projects/123\">redirected</a>."
end
test "when success auth from Github, but errors in model redirects to sign in page with error", %{conn: conn} do
conn =
conn
|> assign(:ueberauth_auth, @invalid_attrs_from_github)
|> post(auth_path(conn, :callback, :github))
assert redirected_to(conn) == "/auth/sign_in"
assert get_flash(conn, :error) == [email: {"can't be blank", [validation: :required]}]
end
test "when failure from Github redirects back with error", %{conn: conn} do
conn = assign(conn, :ueberauth_failure, @failure_attrs_from_github)
conn = get(conn, auth_path(conn, :callback, :github))
assert redirected_to(conn) == "/auth/sign_in"
assert get_flash(conn, :error) == "Failed to authenticate."
assert html_response(conn, 302) =~ "You are being <a href=\"/auth/sign_in\">redirected</a>"
refute get_session(conn, :current_user)
end
test "when guest try to open workplace", %{conn: _} do
# TODO: Implement
end
test "logout", %{conn: conn} do
conn =
conn
|> init_test_session(current_user: "a user")
|> delete(auth_path(conn, :delete))
assert redirected_to(conn) == page_path(conn, :index)
assert get_flash(conn, :info) == "Successfully logged out."
refute get_session(conn, :current_user)
end
end
| 35.463918 | 119 | 0.671802 |
e80b02c8acb395497bd7324b9427741cb5aa700b | 111 | ex | Elixir | lib/webpay/list.ex | keichan34/webpay | 49c8f5df78016632d90e7cdbfc63f34d46c26b1c | [
"MIT"
] | 1 | 2016-05-15T14:09:18.000Z | 2016-05-15T14:09:18.000Z | lib/webpay/list.ex | keichan34/webpay | 49c8f5df78016632d90e7cdbfc63f34d46c26b1c | [
"MIT"
] | null | null | null | lib/webpay/list.ex | keichan34/webpay | 49c8f5df78016632d90e7cdbfc63f34d46c26b1c | [
"MIT"
] | null | null | null | defmodule Webpay.List do
defstruct [{:object, "list"}, :url, :count, :data]
@type t :: %Webpay.List{}
end
| 18.5 | 52 | 0.630631 |
e80b09ac2a250ff67c60987bd4e6aac39e0a5107 | 450 | ex | Elixir | lib/membrane/element/callback_context/prepare.ex | mkaput/membrane-core | f65ae3d847f2c10f3ab20d0c7aa75b0faa274ec7 | [
"Apache-2.0"
] | null | null | null | lib/membrane/element/callback_context/prepare.ex | mkaput/membrane-core | f65ae3d847f2c10f3ab20d0c7aa75b0faa274ec7 | [
"Apache-2.0"
] | null | null | null | lib/membrane/element/callback_context/prepare.ex | mkaput/membrane-core | f65ae3d847f2c10f3ab20d0c7aa75b0faa274ec7 | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.Element.CallbackContext.Prepare do
@moduledoc """
Structure representing a context that is passed to the callback of the element
when it goes into `:prepared` state.
"""
@behaviour Membrane.Element.CallbackContext
@type t :: %__MODULE__{}
defstruct []
@impl true
defmacro from_state(_state, entries \\ []) do
quote do
%unquote(__MODULE__){
unquote_splicing(entries)
}
end
end
end
| 21.428571 | 80 | 0.688889 |
e80b4631a01ae211d45fdca0c3994aae1d021cca | 661 | exs | Elixir | test/accent/transformers/camel_case_test.exs | dconger/accent | f7a76f94f8fd13d96c63ddb3c6fd45b6d2f2aaf7 | [
"MIT"
] | null | null | null | test/accent/transformers/camel_case_test.exs | dconger/accent | f7a76f94f8fd13d96c63ddb3c6fd45b6d2f2aaf7 | [
"MIT"
] | null | null | null | test/accent/transformers/camel_case_test.exs | dconger/accent | f7a76f94f8fd13d96c63ddb3c6fd45b6d2f2aaf7 | [
"MIT"
] | null | null | null | defmodule Accent.Transformer.CamelCaseTest do
use ExUnit.Case
describe "call/2" do
test "converts snake_case to CamelCase" do
assert Accent.Transformer.CamelCase.call("hello_world") == "HelloWorld"
end
test "trims leading and trailing underscores from input" do
assert Accent.Transformer.CamelCase.call("_hello_world_") == "HelloWorld"
end
test "supports atom as an input" do
assert Accent.Transformer.CamelCase.call(:hello_world) == :HelloWorld
end
test "properly handles multiple consecutive underscores" do
assert Accent.Transformer.CamelCase.call("hello__world") == "HelloWorld"
end
end
end
| 30.045455 | 79 | 0.726172 |
e80b5d752e8a34266f2491dfc2bbcd0c4634c095 | 1,577 | ex | Elixir | clients/content/lib/google_api/content/v2/model/liasettings_set_pos_data_provider_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/content/lib/google_api/content/v2/model/liasettings_set_pos_data_provider_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/liasettings_set_pos_data_provider_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Model.LiasettingsSetPosDataProviderResponse do
@moduledoc """
## Attributes
* `kind` (*type:* `String.t`, *default:* `content#liasettingsSetPosDataProviderResponse`) - Identifies what kind of resource this is. Value: the fixed string "content#liasettingsSetPosDataProviderResponse".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => String.t()
}
field(:kind)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.LiasettingsSetPosDataProviderResponse do
def decode(value, options) do
GoogleApi.Content.V2.Model.LiasettingsSetPosDataProviderResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.LiasettingsSetPosDataProviderResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.553191 | 210 | 0.75967 |
e80bc96734d99d416c3c469eadcf8e867066cbbb | 289 | ex | Elixir | lib/api/repo.ex | PetrNikolas/blazeee | 0d97a39b53d19951ccd946b2c4949b054e1487cd | [
"MIT"
] | 17 | 2018-09-18T23:35:26.000Z | 2021-12-05T08:03:23.000Z | lib/api/repo.ex | PetrNikolas/blazeee | 0d97a39b53d19951ccd946b2c4949b054e1487cd | [
"MIT"
] | null | null | null | lib/api/repo.ex | PetrNikolas/blazeee | 0d97a39b53d19951ccd946b2c4949b054e1487cd | [
"MIT"
] | 2 | 2019-07-08T20:43:13.000Z | 2020-03-04T19:11:39.000Z | defmodule Api.Repo do
use Ecto.Repo, otp_app: :api, adapter: Ecto.Adapters.Postgres
@doc """
Dynamically loads the repository url from the
DATABASE_URL environment variable.
"""
def init(_, opts) do
{:ok, Keyword.put(opts, :url, System.get_env("DATABASE_URL"))}
end
end
| 24.083333 | 66 | 0.702422 |
e80bd992e564eac92ab809fa899c6617ddf1c686 | 2,846 | ex | Elixir | lib/flipay/accounts.ex | neofelisho/flipay | 71a03a3f4c859892536faf600b93ff35da82fb88 | [
"MIT"
] | null | null | null | lib/flipay/accounts.ex | neofelisho/flipay | 71a03a3f4c859892536faf600b93ff35da82fb88 | [
"MIT"
] | null | null | null | lib/flipay/accounts.ex | neofelisho/flipay | 71a03a3f4c859892536faf600b93ff35da82fb88 | [
"MIT"
] | null | null | null | defmodule Flipay.Accounts do
@moduledoc """
The Accounts context.
"""
import Ecto.Query, warn: false
import Comeonin.Bcrypt, only: [checkpw: 2, dummy_checkpw: 0]
alias Flipay.Repo
alias Flipay.Accounts.User
alias Flipay.Guardian
@doc """
Returns the list of users.
## Examples
iex> list_users()
[%User{}, ...]
"""
def list_users do
Repo.all(User)
end
@doc """
Gets a single user.
Raises `Ecto.NoResultsError` if the User does not exist.
## Examples
iex> get_user!(123)
%User{}
iex> get_user!(456)
** (Ecto.NoResultsError)
"""
def get_user!(id), do: Repo.get!(User, id)
@doc """
Creates a user.
## Examples
iex> create_user(%{field: value})
{:ok, %User{}}
iex> create_user(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_user(attrs \\ %{}) do
%User{}
|> User.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a user.
## Examples
iex> update_user(user, %{field: new_value})
{:ok, %User{}}
iex> update_user(user, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_user(%User{} = user, attrs) do
user
|> User.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a User.
## Examples
iex> delete_user(user)
{:ok, %User{}}
iex> delete_user(user)
{:error, %Ecto.Changeset{}}
"""
def delete_user(%User{} = user) do
Repo.delete(user)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking user changes.
## Examples
iex> change_user(user)
%Ecto.Changeset{source: %User{}}
"""
def change_user(%User{} = user) do
User.changeset(user, %{})
end
@doc """
Sing in by email and password. If success will get {:ok, user_resource},
otherwise will get {:error, :unauthorized}.
## Examples
iex> Flipay.Accounts.token_sign_in("hello@world.com", "somePassword")
{:ok, token_string, %{user resources from Guardian}}
"""
def token_sign_in(email, password) do
case email_password_auth(email, password) do
{:ok, user} -> Guardian.encode_and_sign(user)
_ -> {:error, :unauthorized}
end
end
defp email_password_auth(email, password) when is_binary(email) and is_binary(password) do
with {:ok, user} <- get_by_email(email),
do: verify_password(password, user)
end
defp get_by_email(email) when is_binary(email) do
case Repo.get_by(User, email: email) do
nil ->
dummy_checkpw()
{:error, "Login failed."}
user ->
{:ok, user}
end
end
defp verify_password(password, %User{} = user) when is_binary(password) do
case checkpw(password, user.password_hash) do
true -> {:ok, user}
false -> {:error, :invalid_password}
end
end
end
| 19.360544 | 92 | 0.602249 |
e80c2e48aeab7caaf8d6c1a17521cb8f1a2670f9 | 225 | exs | Elixir | config/test.exs | plus-eg/protego | 61e1384a51fbf809f3588b45318cad4106e8116d | [
"MIT"
] | 11 | 2016-06-03T07:32:49.000Z | 2019-11-18T06:41:42.000Z | config/test.exs | plus-eg/protego | 61e1384a51fbf809f3588b45318cad4106e8116d | [
"MIT"
] | 1 | 2017-05-31T18:18:59.000Z | 2019-04-20T13:37:34.000Z | config/test.exs | plus-eg/protego | 61e1384a51fbf809f3588b45318cad4106e8116d | [
"MIT"
] | 2 | 2018-06-06T02:29:44.000Z | 2018-07-09T14:46:13.000Z | use Mix.Config
config :guardian, Guardian,
allowed_algos: ["HS512"],
verify_module: Guardian.JWT,
issuer: "Protego",
ttl: { 30, :days },
verify_issuer: true,
secret_key: "secret_key",
serializer: Protego.GuardianSerializer
| 20.454545 | 38 | 0.755556 |
e80c305e6766d082810cb6d40fc7ff9a638bca9c | 8,126 | exs | Elixir | test/command_runner/test_runner_test.exs | tlux/command_runner | d0a59aba7b1e1020aeeba498656036647bb76d92 | [
"MIT"
] | 1 | 2020-10-12T04:06:57.000Z | 2020-10-12T04:06:57.000Z | test/command_runner/test_runner_test.exs | tlux/command_runner | d0a59aba7b1e1020aeeba498656036647bb76d92 | [
"MIT"
] | null | null | null | test/command_runner/test_runner_test.exs | tlux/command_runner | d0a59aba7b1e1020aeeba498656036647bb76d92 | [
"MIT"
] | null | null | null | defmodule CommandRunner.CommandRunnerTest do
use ExUnit.Case
import CommandRunner.OSProcessHelper
import Liveness
alias CommandRunner.TestRunner
setup do
start_supervised!(TestRunner)
:ok
end
describe "stop/0" do
test "stop associated OS processes" do
task_supervisor = start_supervised!(Task.Supervisor)
production_command_ref = make_ref()
staging_command_ref = make_ref()
task_a =
Task.Supervisor.async(task_supervisor, fn ->
TestRunner.run_command(
"sleep 2",
[],
production_command_ref
)
end)
task_b =
Task.Supervisor.async(task_supervisor, fn ->
TestRunner.run_command("sleep 2", [], staging_command_ref)
end)
assert eventually(fn ->
TestRunner.command_running?(production_command_ref) &&
TestRunner.command_running?(staging_command_ref)
end)
production_os_pid = TestRunner.os_pid(production_command_ref)
staging_os_pid = TestRunner.os_pid(staging_command_ref)
assert os_process_exists?(production_os_pid)
assert os_process_exists?(staging_os_pid)
assert TestRunner.stop() == :ok
refute os_process_exists?(production_os_pid)
refute os_process_exists?(staging_os_pid)
assert Task.await(task_a) == :stopped
assert Task.await(task_b) == :stopped
end
end
describe "stop/1" do
test "stop associated OS processes" do
task_supervisor = start_supervised!(Task.Supervisor)
production_command_ref = make_ref()
staging_command_ref = make_ref()
task_a =
Task.Supervisor.async(task_supervisor, fn ->
TestRunner.run_command(
"sleep 2",
[],
production_command_ref
)
end)
task_b =
Task.Supervisor.async(task_supervisor, fn ->
TestRunner.run_command("sleep 2", [], staging_command_ref)
end)
assert eventually(fn ->
TestRunner.command_running?(production_command_ref) &&
TestRunner.command_running?(staging_command_ref)
end)
production_os_pid = TestRunner.os_pid(production_command_ref)
staging_os_pid = TestRunner.os_pid(staging_command_ref)
assert os_process_exists?(production_os_pid)
assert os_process_exists?(staging_os_pid)
assert TestRunner.stop(:normal) == :ok
refute os_process_exists?(production_os_pid)
refute os_process_exists?(staging_os_pid)
assert Task.await(task_a) == :stopped
assert Task.await(task_b) == :stopped
end
end
describe "run_command/1" do
test "successful command" do
assert TestRunner.run_command("./test/fixtures/success_script.sh") ==
{0, "Everything OK!\n"}
end
test "successful command with working dir" do
File.cd!("test/fixtures", fn ->
assert TestRunner.run_command("./success_script.sh") ==
{0, "Everything OK!\n"}
end)
end
test "auto-generate command ref" do
task_supervisor = start_supervised!(Task.Supervisor)
Task.Supervisor.async(task_supervisor, fn ->
TestRunner.run_command("sleep 1")
end)
assert eventually(fn ->
TestRunner
|> :sys.get_state()
|> Map.fetch!(:refs)
|> Map.keys()
|> List.first()
|> is_reference()
end)
end
test "failed command" do
File.cd!("test/fixtures", fn ->
assert TestRunner.run_command("./error_script.sh") ==
{1, "Something went wrong\n"}
end)
end
test "allow parallel execution for different refs" do
task_supervisor = start_supervised!(Task.Supervisor)
task_a =
Task.Supervisor.async(task_supervisor, fn ->
TestRunner.run_command("sleep 0.3")
end)
task_b =
Task.Supervisor.async(task_supervisor, fn ->
TestRunner.run_command("./test/fixtures/success_script.sh")
end)
assert Task.await(task_a) == {0, ""}
assert Task.await(task_b) == {0, "Everything OK!\n"}
end
end
describe "run_command/2" do
test "change working dir" do
assert TestRunner.run_command("./success_script.sh", cd: "test/fixtures") ==
{0, "Everything OK!\n"}
end
test "use env vars" do
File.cd!("test/fixtures", fn ->
assert TestRunner.run_command(
"./success_script_with_env_vars.sh",
env: [{"FOO", "Tobi"}]
) == {0, "Hello, Tobi!\n"}
assert TestRunner.run_command(
"./success_script_with_env_vars.sh",
env: [{"FOO", 123}]
) == {0, "Hello, 123!\n"}
assert TestRunner.run_command(
"./success_script_with_env_vars.sh",
env: %{"FOO" => "Fernando"}
) == {0, "Hello, Fernando!\n"}
assert TestRunner.run_command(
"./success_script_with_env_vars.sh",
env: %{"FOO" => nil}
) == {0, "Hello, !\n"}
end)
end
end
describe "run_command/3" do
test "prevent parallel execution for same ref" do
task_supervisor = start_supervised!(Task.Supervisor)
command_ref = make_ref()
task_a =
Task.Supervisor.async(task_supervisor, fn ->
TestRunner.run_command("sleep 0.5", [], command_ref)
end)
task_b =
Task.Supervisor.async(task_supervisor, fn ->
TestRunner.run_command(
"./test/fixtures/success_script.sh",
[],
command_ref
)
end)
assert Task.await(task_a) == {0, ""}
assert Task.await(task_b) == :running
end
end
describe "command_running?/1" do
test "true when command running for env" do
task_supervisor = start_supervised!(Task.Supervisor)
command_ref = make_ref()
Task.Supervisor.async(task_supervisor, fn ->
TestRunner.run_command("sleep 0.2", [], command_ref)
end)
assert eventually(fn ->
TestRunner.command_running?(command_ref)
end)
end
test "false when no command running for env" do
assert TestRunner.command_running?(make_ref()) == false
end
end
describe "os_pid/1" do
test "get OS process ID when command running for env" do
task_supervisor = start_supervised!(Task.Supervisor)
command_ref = make_ref()
command_task =
Task.Supervisor.async(task_supervisor, fn ->
TestRunner.run_command("sleep 0.2", [], command_ref)
end)
assert eventually(fn ->
TestRunner.command_running?(command_ref)
end)
os_pid = TestRunner.os_pid(command_ref)
assert os_pid
assert is_integer(os_pid)
assert os_process_exists?(os_pid)
Task.await(command_task)
end
test "nil when no command running for env" do
assert TestRunner.os_pid(make_ref()) == nil
end
end
describe "stop_command/1" do
test "kill running command" do
task_supervisor = start_supervised!(Task.Supervisor)
command_ref = make_ref()
# Task containing a long running command
command_task =
Task.Supervisor.async(task_supervisor, fn ->
TestRunner.run_command("sleep 2", [], command_ref)
end)
assert eventually(fn ->
TestRunner.command_running?(command_ref)
end)
# Find out the OS process ID for the running command to verify
# it is currently running and to verify whether it has been killed later
# on in the test
os_pid = TestRunner.os_pid(command_ref)
assert os_process_exists?(os_pid)
# Kills the command
assert TestRunner.stop_command(command_ref) == :ok
refute os_process_exists?(os_pid)
assert Task.await(command_task) == :stopped
end
test "ok when no command running" do
assert TestRunner.stop_command(make_ref()) == :ok
end
end
end
| 28.815603 | 82 | 0.610879 |
e80c33eaa65b13ac550b79ea99d0061b6f9be032 | 3,795 | ex | Elixir | test/support/test_schema.ex | xosdy/ecto_tablestore | bea08b70fcf7a12932e677b63882cfb936956b1a | [
"MIT"
] | null | null | null | test/support/test_schema.ex | xosdy/ecto_tablestore | bea08b70fcf7a12932e677b63882cfb936956b1a | [
"MIT"
] | null | null | null | test/support/test_schema.ex | xosdy/ecto_tablestore | bea08b70fcf7a12932e677b63882cfb936956b1a | [
"MIT"
] | null | null | null | defmodule EctoTablestore.TestSchema.Order do
use EctoTablestore.Schema
import Ecto.Changeset
tablestore_schema "ecto_ots_test_order" do
field(:id, :binary_id, primary_key: true, autogenerate: false)
field(:internal_id, :id, primary_key: true, autogenerate: true)
field(:name, :string)
field(:desc)
field(:num, :integer)
field(:success?, :boolean)
field(:price, :float)
end
def test_changeset(order, params \\ %{}) do
order
|> cast(params, [:id, :name, :num])
|> validate_required([:id, :name, :num])
end
end
defmodule EctoTablestore.TestSchema.User do
use EctoTablestore.Schema
tablestore_schema "ecto_ots_test_user" do
field(:id, :id, primary_key: true)
field(:name, :string)
field(:level, :integer)
field(:level2, :integer)
field(:naive_dt, :naive_datetime)
field(:dt, :utc_datetime)
field(:profile, :map)
field(:tags, {:array, :string})
timestamps()
end
end
defmodule EctoTablestore.TestSchema.Student do
use EctoTablestore.Schema
tablestore_schema "ecto_ots_test_student" do
field(:partition_key, :binary_id, primary_key: true)
field(:class, :string)
field(:name, :string)
field(:age, :integer)
field(:score, :float)
field(:is_actived, :boolean)
field(:comment, :string)
field(:content, :string)
end
end
defmodule EctoTablestore.TestSchema.Page do
use EctoTablestore.Schema
tablestore_schema "ecto_ots_test_page" do
field(:pid, :id, primary_key: true, autogenerate: true)
field(:name, :string, primary_key: true)
field(:content, :string)
field(:age, :integer)
end
end
defmodule EctoTablestore.TestSchema.User2 do
use EctoTablestore.Schema
tablestore_schema "ecto_ots_test_user2" do
field(:id, :string, primary_key: true)
field(:age, :integer)
field(:name, :string, default: "user_name_123")
timestamps(
type: :naive_datetime,
autogenerate: {Ecto.Schema, :__timestamps__, [:naive_datetime]}
)
end
end
defmodule EctoTablestore.TestSchema.User3 do
use EctoTablestore.Schema
tablestore_schema "ecto_ots_test_user3" do
field(:id, :string, primary_key: true)
field(:name, :string)
end
end
defmodule EctoTablestore.TestSchema.User4 do
use EctoTablestore.Schema
tablestore_schema "test_embed_user4" do
field(:id, :string, primary_key: true)
embeds_many :cars, Car, primary_key: false do
field(:name, :string)
field(:status, Ecto.Enum, values: [:foo, :bar, :baz])
end
embeds_one :info, Info, primary_key: false, on_replace: :update do
field(:name, :string)
field(:money, :decimal)
field(:status, Ecto.Enum, values: [:foo, :bar, :baz])
end
embeds_one(:item, EctoTablestore.TestSchema.EmbedItem, on_replace: :update)
end
end
defmodule EctoTablestore.TestSchema.EmbedItem do
use Ecto.Schema
@primary_key false
embedded_schema do
field(:name, :string)
end
end
defmodule EctoTablestore.TestSchema.Post do
use EctoTablestore.Schema
tablestore_schema "ecto_ots_test_post" do
field(:keyid, :hashids, primary_key: true, autogenerate: true)
field(:content, :string)
end
end
defmodule EctoTablestore.TestSchema.Post2 do
use EctoTablestore.Schema
tablestore_schema "ecto_ots_test_post2" do
field(:id, EctoTablestore.Hashids,
primary_key: true,
autogenerate: true,
hashids: [alphabet: "1234567890cfhistu", min_len: 5, salt: "testsalt"]
)
field(:content, :string)
end
end
defmodule EctoTablestore.TestSchema.TransactionTestRange do
use EctoTablestore.Schema
tablestore_schema "test_txn_range" do
field(:key, :string, primary_key: true)
field(:key2, :integer, primary_key: true)
field(:field1, :string)
field(:status, :integer)
end
end
| 24.803922 | 79 | 0.705402 |
e80c9e7a85c525a168f6cc4141577fc742a40e16 | 6,726 | exs | Elixir | test/event/handle_event_test.exs | beorc/commanded | 1330585f2c7397e1d4dd5d4789fea85a43ab031b | [
"MIT"
] | null | null | null | test/event/handle_event_test.exs | beorc/commanded | 1330585f2c7397e1d4dd5d4789fea85a43ab031b | [
"MIT"
] | null | null | null | test/event/handle_event_test.exs | beorc/commanded | 1330585f2c7397e1d4dd5d4789fea85a43ab031b | [
"MIT"
] | null | null | null | defmodule Commanded.Event.HandleEventTest do
use Commanded.StorageCase
import Commanded.Enumerable, only: [pluck: 2]
import Commanded.Assertions.EventAssertions
alias Commanded.EventStore
alias Commanded.Event.{AppendingEventHandler, UninterestingEvent}
alias Commanded.Helpers.EventFactory
alias Commanded.Helpers.{ProcessHelper, Wait}
alias Commanded.ExampleDomain.BankAccount.AccountBalanceHandler
alias Commanded.ExampleDomain.BankAccount.Events.{BankAccountOpened, MoneyDeposited}
describe "balance handler" do
setup do
{:ok, handler} = AccountBalanceHandler.start_link()
Wait.until(fn ->
assert AccountBalanceHandler.subscribed?()
end)
on_exit(fn ->
ProcessHelper.shutdown(handler)
end)
[handler: handler]
end
test "should be notified of events", %{handler: handler} do
events = [
%BankAccountOpened{account_number: "ACC123", initial_balance: 1_000},
%MoneyDeposited{amount: 50, balance: 1_050}
]
recorded_events = EventFactory.map_to_recorded_events(events)
send(handler, {:events, recorded_events})
Wait.until(fn ->
assert AccountBalanceHandler.current_balance() == 1_050
end)
end
test "should ignore uninterested events" do
{:ok, handler} = AccountBalanceHandler.start_link()
# include uninterested events within those the handler is interested in
events = [
%UninterestingEvent{},
%BankAccountOpened{account_number: "ACC123", initial_balance: 1_000},
%UninterestingEvent{},
%MoneyDeposited{amount: 50, balance: 1_050},
%UninterestingEvent{}
]
recorded_events = EventFactory.map_to_recorded_events(events)
send(handler, {:events, recorded_events})
Wait.until(fn ->
assert AccountBalanceHandler.current_balance() == 1_050
end)
end
end
defp to_event_data(events) do
Commanded.Event.Mapper.map_to_event_data(events,
causation_id: UUID.uuid4(),
correlation_id: UUID.uuid4(),
metadata: %{}
)
end
describe "appending handler" do
setup do
on_exit(fn ->
ProcessHelper.shutdown(AppendingEventHandler)
end)
end
test "should ignore events created before the event handler's subscription when starting from `:current`" do
stream_uuid = UUID.uuid4()
initial_events = [%BankAccountOpened{account_number: "ACC123", initial_balance: 1_000}]
new_events = [%MoneyDeposited{amount: 50, balance: 1_050}]
:ok = EventStore.append_to_stream(stream_uuid, 0, to_event_data(initial_events))
wait_for_event(BankAccountOpened)
{:ok, handler} = AppendingEventHandler.start_link(start_from: :current)
assert GenServer.call(handler, :last_seen_event) == nil
:ok = EventStore.append_to_stream(stream_uuid, 1, to_event_data(new_events))
wait_for_event(MoneyDeposited, fn event, recorded_event -> event.amount == 50 and recorded_event.event_number == 2 end)
Wait.until(fn ->
assert AppendingEventHandler.received_events() == new_events
[metadata] = AppendingEventHandler.received_metadata()
assert Map.get(metadata, :event_number) == 2
assert Map.get(metadata, :stream_id) == stream_uuid
assert Map.get(metadata, :stream_version) == 2
assert %DateTime{} = Map.get(metadata, :created_at)
assert GenServer.call(handler, :last_seen_event) == 2
end)
end
test "should receive events created before the event handler's subscription when starting from `:origin`" do
stream_uuid = UUID.uuid4()
initial_events = [%BankAccountOpened{account_number: "ACC123", initial_balance: 1_000}]
new_events = [%MoneyDeposited{amount: 50, balance: 1_050}]
:ok = EventStore.append_to_stream(stream_uuid, 0, to_event_data(initial_events))
{:ok, _handler} = AppendingEventHandler.start_link(start_from: :origin)
:ok = EventStore.append_to_stream(stream_uuid, 1, to_event_data(new_events))
wait_for_event(MoneyDeposited)
Wait.until(fn ->
assert AppendingEventHandler.received_events() == initial_events ++ new_events
received_metadata = AppendingEventHandler.received_metadata()
assert pluck(received_metadata, :event_number) == [1, 2]
assert pluck(received_metadata, :stream_version) == [1, 2]
Enum.each(received_metadata, fn metadata ->
assert Map.get(metadata, :stream_id) == stream_uuid
assert %DateTime{} = Map.get(metadata, :created_at)
end)
end)
end
test "should ignore already seen events" do
{:ok, handler} = AppendingEventHandler.start_link()
events = [
%BankAccountOpened{account_number: "ACC123", initial_balance: 1_000},
%MoneyDeposited{amount: 50, balance: 1_050}
]
recorded_events = EventFactory.map_to_recorded_events(events)
Wait.until(fn ->
assert AppendingEventHandler.subscribed?()
end)
# send each event twice to simulate duplicate receives
Enum.each(recorded_events, fn recorded_event ->
send(handler, {:events, [recorded_event]})
send(handler, {:events, [recorded_event]})
end)
Wait.until(fn ->
assert AppendingEventHandler.received_events() == events
assert pluck(AppendingEventHandler.received_metadata(), :stream_version) == [1, 2]
end)
end
end
describe "event handler name" do
test "should parse string" do
assert Commanded.Event.Handler.parse_name(__MODULE__, "foo") == "foo"
end
test "should parse atom to string" do
assert Commanded.Event.Handler.parse_name(__MODULE__, :foo) == ":foo"
end
test "should parse tuple to string" do
assert Commanded.Event.Handler.parse_name(__MODULE__, {:foo, :bar}) == "{:foo, :bar}"
end
test "should error when parsing empty string" do
assert_raise RuntimeError, fn ->
Commanded.Event.Handler.parse_name(__MODULE__, "")
end
end
test "should error when parsing `nil`" do
assert_raise RuntimeError, fn ->
Commanded.Event.Handler.parse_name(__MODULE__, nil)
end
end
end
test "should ensure an event handler name is provided" do
assert_raise RuntimeError, "UnnamedEventHandler expects `:name` to be given", fn ->
Code.eval_string("""
defmodule UnnamedEventHandler do
use Commanded.Event.Handler
end
""")
end
end
test "should allow using event handler module as name" do
Code.eval_string("""
defmodule EventHandler do
use Commanded.Event.Handler, name: __MODULE__
end
""")
end
end
| 31.876777 | 125 | 0.68317 |
e80cad72dc33b644aaabead628cfd6ff23a62456 | 181 | ex | Elixir | lib/contento_web/views/error_view.ex | reubenbrown13/contento | 979831bc9052f810409e969f4d11f15c0e40bb55 | [
"MIT"
] | 95 | 2017-10-03T19:21:57.000Z | 2021-02-15T12:37:37.000Z | lib/contento_web/views/error_view.ex | reubenbrown13/contento | 979831bc9052f810409e969f4d11f15c0e40bb55 | [
"MIT"
] | 20 | 2017-10-10T12:26:54.000Z | 2020-11-12T11:30:36.000Z | lib/contento_web/views/error_view.ex | reubenbrown13/contento | 979831bc9052f810409e969f4d11f15c0e40bb55 | [
"MIT"
] | 17 | 2017-10-04T07:59:40.000Z | 2022-02-09T20:10:09.000Z | defmodule ContentoWeb.ErrorView do
use ContentoWeb, :view
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 22.625 | 61 | 0.812155 |
e80cee10d90aeebe438b06757203b746fbbe7a66 | 901 | ex | Elixir | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/ddos_protection_plan_properties_format.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/ddos_protection_plan_properties_format.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/ddos_protection_plan_properties_format.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Network.Model.DdosProtectionPlanPropertiesFormat do
@moduledoc """
DDoS protection plan properties.
"""
@derive [Poison.Encoder]
defstruct [
:"resourceGuid",
:"provisioningState",
:"virtualNetworks"
]
@type t :: %__MODULE__{
:"resourceGuid" => String.t,
:"provisioningState" => String.t,
:"virtualNetworks" => [SubResource]
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.Network.Model.DdosProtectionPlanPropertiesFormat do
import Microsoft.Azure.Management.Network.Deserializer
def decode(value, options) do
value
|> deserialize(:"virtualNetworks", :list, Microsoft.Azure.Management.Network.Model.SubResource, options)
end
end
| 28.15625 | 108 | 0.738069 |
e80d0bdfb256472f33088c2d005d4f7d2d54c415 | 884 | exs | Elixir | test/elixir/test/delayed_commits_test.exs | van-mronov/couchdb | 25838d078b1cf8ef5554f41c0b51d8628ca712ba | [
"Apache-2.0"
] | 1 | 2017-07-05T18:50:12.000Z | 2017-07-05T18:50:12.000Z | test/elixir/test/delayed_commits_test.exs | van-mronov/couchdb | 25838d078b1cf8ef5554f41c0b51d8628ca712ba | [
"Apache-2.0"
] | 1 | 2017-09-05T15:46:20.000Z | 2017-09-05T15:46:20.000Z | test/elixir/test/delayed_commits_test.exs | garrensmith/couchdb | 25838d078b1cf8ef5554f41c0b51d8628ca712ba | [
"Apache-2.0"
] | null | null | null | defmodule DelayedCommitsTest do
use CouchTestCase
@moduledoc """
Test CouchDB delayed commits
This is a port of the delayed_commits.js suite
Note that delayed_commits is deprecated in 2.0, so this is a minimal
test to show it still works. delayed_commits will be removed in 3.0.
"""
@tag config: [
{"couchdb", "delayed_commits", "true"}
]
@tag :with_db
test "delayed commit", context do
db_name = context[:db_name]
doc_id = "doc-1"
resp = Couch.put("/#{db_name}/#{doc_id}", body: %{a: 2, b: 4})
assert resp.status_code in 201..204
assert resp.body["ok"]
resp = Couch.get("/#{db_name}/#{doc_id}")
assert resp.status_code == 200, "The new doc should be in the database"
restart_cluster()
resp = Couch.get("/#{db_name}/#{doc_id}")
assert resp.status_code == 404, "The new doc should be missing"
end
end
| 27.625 | 75 | 0.654977 |
e80d5c859448e4e99544d2347476321603e27685 | 3,764 | ex | Elixir | deps/absinthe/lib/absinthe/type/built_ins/scalars.ex | JoakimEskils/elixir-absinthe | d81e24ec7c7b1164e6d152101dd50422f192d7e9 | [
"MIT"
] | 3 | 2017-06-22T16:33:58.000Z | 2021-07-07T15:21:09.000Z | lib/absinthe/type/built_ins/scalars.ex | bruce/absinthe | 19b63d3aaa9fb75aad01ffd5e91d89e0b30d7f91 | [
"MIT"
] | null | null | null | lib/absinthe/type/built_ins/scalars.ex | bruce/absinthe | 19b63d3aaa9fb75aad01ffd5e91d89e0b30d7f91 | [
"MIT"
] | null | null | null | defmodule Absinthe.Type.BuiltIns.Scalars do
use Absinthe.Schema.Notation
@moduledoc false
scalar :integer, name: "Int" do
description """
The `Int` scalar type represents non-fractional signed whole numeric values.
Int can represent values between `-(2^53 - 1)` and `2^53 - 1` since it is
represented in JSON as double-precision floating point numbers specified
by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).
"""
serialize &(&1)
parse parse_with([Absinthe.Blueprint.Input.Integer], &parse_int/1)
end
scalar :float do
description """
The `Float` scalar type represents signed double-precision fractional
values as specified by
[IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).
"""
serialize &(&1)
parse parse_with([Absinthe.Blueprint.Input.Integer,
Absinthe.Blueprint.Input.Float], &parse_float/1)
end
scalar :string do
description """
The `String` scalar type represents textual data, represented as UTF-8
character sequences. The String type is most often used by GraphQL to
represent free-form human-readable text.
"""
serialize &to_string/1
parse parse_with([Absinthe.Blueprint.Input.String], &parse_string/1)
end
scalar :id, name: "ID" do
description """
The `ID` scalar type represents a unique identifier, often used to
refetch an object or as key for a cache. The ID type appears in a JSON
response as a String; however, it is not intended to be human-readable.
When expected as an input type, any string (such as `"4"`) or integer
(such as `4`) input value will be accepted as an ID.
"""
serialize &to_string/1
parse parse_with([Absinthe.Blueprint.Input.Integer,
Absinthe.Blueprint.Input.String], &parse_id/1)
end
scalar :boolean do
description """
The `Boolean` scalar type represents `true` or `false`.
"""
serialize &(&1)
parse parse_with([Absinthe.Blueprint.Input.Boolean], &parse_boolean/1)
end
# Integers are only safe when between -(2^53 - 1) and 2^53 - 1 due to being
# encoded in JavaScript and represented in JSON as double-precision floating
# point numbers, as specified by IEEE 754.
@max_int 9007199254740991
@min_int -9007199254740991
@spec parse_int(any) :: {:ok, integer} | :error
defp parse_int(value) when is_integer(value) and value >= @min_int and value <= @max_int do
{:ok, value}
end
defp parse_int(_) do
:error
end
@spec parse_float(any) :: {:ok, float} | :error
defp parse_float(value) when is_float(value) do
{:ok, value}
end
defp parse_float(value) when is_integer(value) do
{:ok, value * 1.0}
end
defp parse_float(_) do
:error
end
@spec parse_string(any) :: {:ok, binary} | :error
defp parse_string(value) when is_binary(value) do
{:ok, value}
end
defp parse_string(_) do
:error
end
@spec parse_id(any) :: {:ok, binary} | :error
defp parse_id(value) when is_binary(value) do
{:ok, value}
end
defp parse_id(value) when is_integer(value) do
{:ok, Integer.to_string(value)}
end
defp parse_id(_) do
:error
end
@spec parse_boolean(any) :: {:ok, boolean} | :error
defp parse_boolean(value) when is_boolean(value) do
{:ok, value}
end
defp parse_boolean(_) do
:error
end
# Parse, supporting pulling values out of blueprint Input nodes
defp parse_with(node_types, coercion) do
fn
%{__struct__: str, value: value} ->
if Enum.member?(node_types, str) do
coercion.(value)
else
:error
end
%Absinthe.Blueprint.Input.Null{} ->
{:ok, nil}
other ->
coercion.(other)
end
end
end
| 28.732824 | 93 | 0.666312 |
e80d61c493669279afb271e9feadf85f6c0c210b | 212 | ex | Elixir | lib/twitter_feed/twitter_api/api.ex | SanketSapkal/elixir-twitter-scraper | 4e00fb2d30d000ac30f0a48c5a2d8e5a18c3b2e0 | [
"MIT"
] | 3 | 2018-07-13T07:13:21.000Z | 2020-11-10T05:14:50.000Z | lib/twitter_feed/twitter_api/api.ex | SanketSapkal/elixir-twitter-scraper | 4e00fb2d30d000ac30f0a48c5a2d8e5a18c3b2e0 | [
"MIT"
] | null | null | null | lib/twitter_feed/twitter_api/api.ex | SanketSapkal/elixir-twitter-scraper | 4e00fb2d30d000ac30f0a48c5a2d8e5a18c3b2e0 | [
"MIT"
] | 2 | 2019-05-22T17:11:51.000Z | 2020-01-07T19:33:59.000Z | defmodule TwitterFeed.TwitterApi.Api do
@moduledoc false
@callback get_home_page(handle :: String.t) :: String.t
@callback get_tweets(handle :: String.t, last_tweet_retrieved :: Integer.t) :: String.t
end
| 30.285714 | 89 | 0.75 |
e80d622f92a009998e850ecbb5595b8e870f226e | 536 | exs | Elixir | test/regressions/i061_void_elements_test.exs | aforward-oss/earmark | b44d4817aa2b4047b07f91d633ae83ed27c695ed | [
"Apache-2.0"
] | null | null | null | test/regressions/i061_void_elements_test.exs | aforward-oss/earmark | b44d4817aa2b4047b07f91d633ae83ed27c695ed | [
"Apache-2.0"
] | null | null | null | test/regressions/i061_void_elements_test.exs | aforward-oss/earmark | b44d4817aa2b4047b07f91d633ae83ed27c695ed | [
"Apache-2.0"
] | null | null | null | defmodule Regressions.I061VoidElementsTest do
use ExUnit.Case
import ExUnit.CaptureIO
@i61_void_elements ~s{<img src="whatever.png">\n**strong**}
test "Issue: https://github.com/pragdave/earmark/issues/61" do
result = Earmark.to_html @i61_void_elements
assert result == ~s[<img src=\"whatever.png\"><p><strong>strong</strong></p>\n]
end
test "Issue: https://github.com/pragdave/earmark/issues/61 no message" do
assert capture_io(:stderr, fn ->
Earmark.to_html @i61_void_elements
end) == ""
end
end
| 31.529412 | 83 | 0.705224 |
e80d804452b2d72a8be3cb2727a3a5ac8a280f11 | 9,776 | exs | Elixir | test/mint/http2/frame_test.exs | appcues/xhttp | 488a6ba5fd418a52f697a8d5f377c629ea96af92 | [
"Apache-2.0"
] | null | null | null | test/mint/http2/frame_test.exs | appcues/xhttp | 488a6ba5fd418a52f697a8d5f377c629ea96af92 | [
"Apache-2.0"
] | null | null | null | test/mint/http2/frame_test.exs | appcues/xhttp | 488a6ba5fd418a52f697a8d5f377c629ea96af92 | [
"Apache-2.0"
] | null | null | null | defmodule Mint.HTTP2.FrameTest do
use ExUnit.Case, async: true
use ExUnitProperties
use Bitwise, skip_operators: true
import Mint.HTTP2.Frame, except: [decode_next: 1, encode_raw: 4]
alias Mint.HTTP2.{
Frame,
HPACK
}
test "set_flags/2" do
assert set_flags(:ping, [:ack]) == 0x01
assert set_flags(:data, [:end_stream]) == 0x01
assert_raise FunctionClauseError, fn -> set_flags(:data, [:ack]) end
end
test "set_flags/3" do
assert set_flags(0x01, :data, [:padded]) == bor(0x01, 0x08)
assert_raise FunctionClauseError, fn -> set_flags(0x00, :data, [:ack]) end
end
test "flag_set?/3" do
assert flag_set?(0x08, :data, :padded) == true
assert flag_set?(0x00, :data, :padded) == false
assert_raise FunctionClauseError, fn -> flag_set?(0x00, :data, :ack) end
end
test "decode_next/1 with an incomplete frame" do
assert Frame.decode_next(<<>>) == :more
end
describe "DATA" do
test "without padding" do
check all stream_id <- non_zero_stream_id(),
data <- binary() do
assert_round_trip data(stream_id: stream_id, flags: 0x00, data: data, padding: nil)
end
end
test "with padding" do
check all stream_id <- non_zero_stream_id(),
data <- binary(),
padding <- binary() do
assert_round_trip data(stream_id: stream_id, flags: 0x08, data: data, padding: padding)
end
end
test "with bad padding" do
# "payload" is 4 bytes, the pad length is >= 5 bytes
payload = <<5::8, "data">>
debug_data = "the padding length of a :data frame is bigger than the payload length"
assert Frame.decode_next(encode_raw(0x00, 0x08, 3, payload)) ==
{:error, {:protocol_error, debug_data}}
end
end
describe "HEADERS" do
test "with meaningful hbf" do
headers = [{"foo", "bar"}, {"baz", "bong"}, {"foo", "badung"}]
{encoded_headers, _} =
headers
|> Enum.map(fn {name, value} -> {:no_store, name, value} end)
|> HPACK.encode(HPACK.new(100_000))
assert {:ok, headers(stream_id: 3, flags: 0x00, hbf: hbf, padding: nil), "rest"} =
Frame.decode_next(encode_raw(0x01, 0x00, 3, encoded_headers) <> "rest")
assert {:ok, ^headers, _} = HPACK.decode(hbf, HPACK.new(100_000))
end
test "without padding and without priority" do
check all stream_id <- non_zero_stream_id(),
hbf <- binary() do
assert_round_trip headers(
stream_id: stream_id,
flags: 0x00,
exclusive?: nil,
stream_dependency: nil,
weight: nil,
hbf: hbf,
padding: nil
)
end
end
test "with padding and priority" do
check all stream_id <- non_zero_stream_id(),
hbf <- binary(),
padding <- binary() do
assert_round_trip headers(
stream_id: stream_id,
flags: bor(0x08, 0x20),
exclusive?: true,
stream_dependency: 19,
weight: 10,
hbf: hbf,
padding: padding
)
end
end
end
describe "PRIORITY" do
test "regular" do
check all stream_id <- non_zero_stream_id(),
stream_dependency <- non_zero_stream_id(),
weight <- positive_integer() do
assert_round_trip priority(
stream_id: stream_id,
exclusive?: true,
stream_dependency: stream_dependency,
weight: weight,
flags: 0x00
)
end
end
test "with bad length" do
assert Frame.decode_next(encode_raw(0x02, 0x00, 3, "")) ==
{:error, {:frame_size_error, :priority}}
end
end
describe "RST_STREAM" do
test "regular" do
check all stream_id <- non_zero_stream_id(),
error_code <- error_code() do
assert_round_trip rst_stream(
stream_id: stream_id,
flags: 0x00,
error_code: error_code
)
end
end
test "with bad length" do
assert Frame.decode_next(encode_raw(0x03, 0x00, 3, <<3::8>>)) ==
{:error, {:frame_size_error, :rst_stream}}
end
end
describe "SETTINGS" do
test "with empty settings" do
assert_round_trip settings(stream_id: 0, flags: 0x00, params: [])
end
test "with parameters" do
check all header_table_size <- positive_integer(),
enable_push <- boolean(),
max_concurrent_streams <- non_negative_integer(),
initial_window_size <- positive_integer(),
max_frame_size <- positive_integer(),
max_header_list_size <- positive_integer(),
enable_connect_protocol <- boolean() do
params = [
header_table_size: header_table_size,
enable_push: enable_push,
max_concurrent_streams: max_concurrent_streams,
initial_window_size: initial_window_size,
max_frame_size: max_frame_size,
max_header_list_size: max_header_list_size,
enable_connect_protocol: enable_connect_protocol
]
assert_round_trip settings(stream_id: 0, flags: 0x01, params: params)
end
end
test "with bad length" do
assert Frame.decode_next(encode_raw(0x04, 0x00, 0, <<_not_multiple_of_6 = 3::8>>)) ==
{:error, {:frame_size_error, :settings}}
end
end
describe "PUSH_PROMISE" do
test "without padding" do
check all stream_id <- non_zero_stream_id(),
promised_stream_id <- non_zero_stream_id(),
hbf <- binary() do
assert_round_trip push_promise(
stream_id: stream_id,
flags: 0x00,
promised_stream_id: promised_stream_id,
hbf: hbf,
padding: nil
)
end
end
test "with padding" do
check all stream_id <- non_zero_stream_id(),
promised_stream_id <- non_zero_stream_id(),
hbf <- binary(),
padding <- binary() do
assert_round_trip push_promise(
stream_id: stream_id,
flags: 0x08,
promised_stream_id: promised_stream_id,
hbf: hbf,
padding: padding
)
end
end
end
describe "PING" do
test "regular" do
check all opaque_data <- binary(length: 8) do
assert_round_trip ping(stream_id: 0, flags: 0x01, opaque_data: opaque_data)
end
end
test "with bad length" do
assert Frame.decode_next(encode_raw(0x06, 0x00, 0, <<_not_multiple_of_6 = 3::8>>)) ==
{:error, {:frame_size_error, :ping}}
end
end
describe "GOAWAY" do
test "regular" do
check all last_stream_id <- non_zero_stream_id(),
error_code <- error_code(),
debug_data <- binary() do
assert_round_trip goaway(
stream_id: 0,
flags: 0x00,
last_stream_id: last_stream_id,
error_code: error_code,
debug_data: debug_data
)
end
end
end
describe "WINDOW_UPDATE" do
test "regular" do
check all stream_id <- one_of([constant(0), non_zero_stream_id()]),
wsi <- positive_integer() do
assert_round_trip window_update(
stream_id: stream_id,
flags: 0x00,
window_size_increment: wsi
)
end
end
test "invalid window size increment" do
assert Frame.decode_next(encode_raw(0x08, 0x00, 0, <<0::1, 0::31>>)) ==
{:error, {:protocol_error, "bad WINDOW_SIZE increment"}}
end
test "with bad length" do
assert Frame.decode_next(encode_raw(0x08, 0x00, 0, <<>>)) ==
{:error, {:frame_size_error, :window_update}}
end
end
describe "CONTINUATION" do
test "regular" do
check all stream_id <- non_zero_stream_id(),
hbf <- binary() do
assert_round_trip continuation(stream_id: stream_id, flags: 0x00, hbf: hbf)
end
end
end
defp assert_round_trip(frame) do
encoded = frame |> Frame.encode() |> IO.iodata_to_binary()
assert Frame.decode_next(encoded <> "rest") == {:ok, frame, "rest"}
end
defp encode_raw(type, flags, stream_id, payload) do
IO.iodata_to_binary(Frame.encode_raw(type, flags, stream_id, payload))
end
defp non_zero_stream_id() do
map(positive_integer(), &(&1 * 2 + 1))
end
defp non_negative_integer() do
map(integer(), &abs/1)
end
defp error_code() do
member_of([
:no_error,
:protocol_error,
:internal_error,
:flow_control_error,
:settings_timeout,
:stream_closed,
:frame_size_error,
:refused_stream,
:cancel,
:compression_error,
:connect_error,
:enhance_your_calm,
:inadequate_security,
:http_1_1_required
])
end
end
| 31.74026 | 95 | 0.54286 |
e80d8c728162244d8d757dc4ef0644d168356e03 | 1,650 | ex | Elixir | clients/genomics/lib/google_api/genomics/v1/model/range.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/genomics/lib/google_api/genomics/v1/model/range.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/genomics/lib/google_api/genomics/v1/model/range.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Genomics.V1.Model.Range do
@moduledoc """
A 0-based half-open genomic coordinate range for search requests.
## Attributes
- end (String): The end position of the range on the reference, 0-based exclusive. Defaults to: `null`.
- referenceName (String): The reference sequence name, for example `chr1`, `1`, or `chrX`. Defaults to: `null`.
- start (String): The start position of the range on the reference, 0-based inclusive. Defaults to: `null`.
"""
defstruct [
:"end",
:"referenceName",
:"start"
]
end
defimpl Poison.Decoder, for: GoogleApi.Genomics.V1.Model.Range do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.Genomics.V1.Model.Range do
def encode(value, options) do
GoogleApi.Genomics.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 33 | 143 | 0.733333 |
e80e06d270216e76f8717340121d1b118134a1c6 | 512 | ex | Elixir | lib/militerm_web/controllers/session_controller.ex | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | 6 | 2017-06-16T10:26:35.000Z | 2021-04-07T15:01:00.000Z | lib/militerm_web/controllers/session_controller.ex | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | 2 | 2020-04-14T02:17:46.000Z | 2021-03-10T11:09:05.000Z | lib/militerm_web/controllers/session_controller.ex | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | null | null | null | defmodule MilitermWeb.SessionController do
use MilitermWeb, :controller
alias Militerm.Accounts
def auth_session(conn, %{"session_id" => session_id} = _params) do
# let the session service know who logged in for this session
%{id: user_id} = current_user(conn)
case Militerm.Services.Session.authenticate_session(session_id, user_id) do
:ok ->
conn
# |> put_flash()
|> redirect(to: "/")
:error ->
render(conn, "try-again.html")
end
end
end
| 24.380952 | 79 | 0.650391 |
e80e1a0f5f210338bdc453401deff651e167d0b8 | 1,394 | ex | Elixir | plain-planner/elixir/test/support/data_case.ex | danielmarreirosdeoliveira/prototypes | 047a7d0cae84b31213c06b45304e41a18e0678cb | [
"Apache-2.0"
] | null | null | null | plain-planner/elixir/test/support/data_case.ex | danielmarreirosdeoliveira/prototypes | 047a7d0cae84b31213c06b45304e41a18e0678cb | [
"Apache-2.0"
] | 1 | 2020-05-10T13:37:43.000Z | 2020-05-10T13:37:43.000Z | plain-planner/elixir/test/support/data_case.ex | danielmarreirosdeoliveira/prototypes | 047a7d0cae84b31213c06b45304e41a18e0678cb | [
"Apache-2.0"
] | null | null | null | defmodule Plain.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Plain.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Plain.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Plain.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Plain.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
| 25.814815 | 77 | 0.677188 |
e80e38097fc27b1fe86bd9ea68b1ae6746da944a | 1,822 | exs | Elixir | mix.exs | velimir/ex_aws | 2c3b0ae8bb19fbcf65f0515298a0db56ee551dda | [
"MIT"
] | null | null | null | mix.exs | velimir/ex_aws | 2c3b0ae8bb19fbcf65f0515298a0db56ee551dda | [
"MIT"
] | null | null | null | mix.exs | velimir/ex_aws | 2c3b0ae8bb19fbcf65f0515298a0db56ee551dda | [
"MIT"
] | null | null | null | defmodule ExAws.Mixfile do
use Mix.Project
@source_url "https://github.com/ex-aws/ex_aws"
@version "2.2.0"
def project do
[
app: :ex_aws,
version: @version,
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
description: "Generic AWS client",
name: "ExAws",
source_url: @source_url,
package: package(),
deps: deps(),
docs: docs(),
dialyzer: [
plt_add_apps: [:mix, :hackney, :configparser_ex, :jsx]
]
]
end
def application do
[extra_applications: [:logger, :crypto], mod: {ExAws, []}]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps() do
[
{:bypass, "~> 2.1", only: :test},
{:configparser_ex, "~> 4.0", optional: true},
{:dialyxir, "~> 1.0", only: [:dev, :test], runtime: false},
{:ex_doc, "~> 0.16", only: [:dev, :test]},
{:hackney, "~> 1.9", optional: true},
{:jason, "~> 1.1", optional: true},
{:jsx, "~> 3.0", optional: true},
{:mox, "~> 1.0", only: :test},
{:sweet_xml, "~> 0.6", optional: true}
]
end
defp package do
[
description: description(),
files: ["priv", "lib", "config", "mix.exs", "README*"],
maintainers: ["Bernard Duggan", "Ben Wilson"],
licenses: ["MIT"],
links: %{
Changelog: "#{@source_url}/blob/master/CHANGELOG.md",
GitHub: @source_url
}
]
end
defp description do
"""
AWS client for Elixir. Currently supports Dynamo, DynamoStreams, EC2,
Firehose, Kinesis, KMS, Lambda, RRDS, Route53, S3, SES, SNS, SQS, STS
"""
end
defp docs do
[
main: "readme",
source_ref: "v#{@version}",
source_url: @source_url,
extras: ["README.md"]
]
end
end
| 24.293333 | 73 | 0.543908 |
e80e3a17fefcd2fde55afee4dbc497ea22558aec | 2,047 | ex | Elixir | clients/poly/lib/google_api/poly/v1/model/list_liked_assets_response.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/poly/lib/google_api/poly/v1/model/list_liked_assets_response.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/poly/lib/google_api/poly/v1/model/list_liked_assets_response.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Poly.V1.Model.ListLikedAssetsResponse do
@moduledoc """
A response message from a request to list.
## Attributes
- assets ([Asset]): A list of assets that match the criteria specified in the request. Defaults to: `null`.
- nextPageToken (String.t): The continuation token for retrieving the next page. If empty, indicates that there are no more pages. To get the next page, submit the same request specifying this value as the page_token. Defaults to: `null`.
- totalSize (integer()): The total number of assets in the list, without pagination. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:assets => list(GoogleApi.Poly.V1.Model.Asset.t()),
:nextPageToken => any(),
:totalSize => any()
}
field(:assets, as: GoogleApi.Poly.V1.Model.Asset, type: :list)
field(:nextPageToken)
field(:totalSize)
end
defimpl Poison.Decoder, for: GoogleApi.Poly.V1.Model.ListLikedAssetsResponse do
def decode(value, options) do
GoogleApi.Poly.V1.Model.ListLikedAssetsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Poly.V1.Model.ListLikedAssetsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.907407 | 240 | 0.737176 |
e80e4fe691b4b475e754af8acb6f527aaf0e0e94 | 2,052 | exs | Elixir | config/prod.exs | qhwa/bonfire | 4a368d6d5300539399dcaff167ac69e3165c2bff | [
"MIT"
] | 71 | 2020-03-09T02:09:30.000Z | 2022-03-09T06:10:23.000Z | config/prod.exs | qhwa/bonfire | 4a368d6d5300539399dcaff167ac69e3165c2bff | [
"MIT"
] | null | null | null | config/prod.exs | qhwa/bonfire | 4a368d6d5300539399dcaff167ac69e3165c2bff | [
"MIT"
] | 4 | 2020-04-03T02:28:05.000Z | 2021-11-24T20:07:25.000Z | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :bonfire, BonfireWeb.Endpoint,
server: true,
http: [port: 4000, compress: true],
url: [host: "bonfire.ooo", port: 443, scheme: "https"],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :bonfire, BonfireWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH"),
# transport_options: [socket_opts: [:inet6]]
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :bonfire, BonfireWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# Finally import the config/prod.secret.exs which loads secrets
# and configuration from environment variables.
| 36 | 66 | 0.710526 |
e80e73d3d7910ab58c329a10c973492139cb41a9 | 1,583 | ex | Elixir | clients/custom_search/lib/google_api/custom_search/v1/model/context_facets.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/custom_search/lib/google_api/custom_search/v1/model/context_facets.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/custom_search/lib/google_api/custom_search/v1/model/context_facets.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.CustomSearch.V1.Model.ContextFacets do
@moduledoc """
## Attributes
* `anchor` (*type:* `String.t`, *default:* `nil`) -
* `label` (*type:* `String.t`, *default:* `nil`) -
* `label_with_op` (*type:* `String.t`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:anchor => String.t(),
:label => String.t(),
:label_with_op => String.t()
}
field(:anchor)
field(:label)
field(:label_with_op)
end
defimpl Poison.Decoder, for: GoogleApi.CustomSearch.V1.Model.ContextFacets do
def decode(value, options) do
GoogleApi.CustomSearch.V1.Model.ContextFacets.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CustomSearch.V1.Model.ContextFacets do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 29.867925 | 77 | 0.7012 |
e80ea08ebc20945207d317fbe8bae5d2f2d13e7c | 1,637 | exs | Elixir | test/membrane/core/element/lifecycle_controller_test.exs | membraneframework/membrane-core | 096c2546869824c49ad1d7412ffe02d050164611 | [
"Apache-2.0"
] | 515 | 2018-06-18T11:09:44.000Z | 2020-07-31T07:54:35.000Z | test/membrane/core/element/lifecycle_controller_test.exs | membraneframework/membrane-core | 096c2546869824c49ad1d7412ffe02d050164611 | [
"Apache-2.0"
] | 120 | 2018-06-07T08:34:06.000Z | 2020-07-30T07:09:50.000Z | test/membrane/core/element/lifecycle_controller_test.exs | membraneframework/membrane-core | 096c2546869824c49ad1d7412ffe02d050164611 | [
"Apache-2.0"
] | 13 | 2018-07-27T11:58:15.000Z | 2020-05-06T15:19:55.000Z | defmodule Membrane.Core.Element.LifecycleControllerTest do
use ExUnit.Case
alias Membrane.Core.Element.{InputQueue, LifecycleController, State}
alias Membrane.Core.Message
require Membrane.Core.Message
defmodule DummyElement do
use Membrane.Filter
def_output_pad :output, caps: :any
end
setup do
input_queue =
InputQueue.init(%{
demand_unit: :buffers,
demand_pid: self(),
demand_pad: :some_pad,
log_tag: "test",
toilet?: false,
target_size: nil,
min_demand_factor: nil
})
state =
%{
State.new(%{
module: DummyElement,
name: :test_name,
parent_clock: nil,
sync: nil,
parent: self()
})
| type: :filter,
pads_data: %{
input:
struct(Membrane.Element.PadData,
ref: :input,
accepted_caps: :any,
direction: :input,
pid: self(),
mode: :pull,
start_of_stream?: true,
end_of_stream?: false,
input_queue: input_queue,
demand: 0
)
}
}
|> Bunch.Struct.put_in([:playback, :state], :playing)
assert_received Message.new(:demand, _size, for_pad: :some_pad)
[state: state]
end
test "End of stream is generated when playback state changes from :playing to :prepared", %{
state: state
} do
{:ok, state} = LifecycleController.handle_playback_state(:playing, :prepared, state)
assert state.pads_data.input.end_of_stream?
end
end
| 25.578125 | 94 | 0.564447 |
e80eaf2b5b2a3a0561bb86031c12022e9976d36c | 837 | ex | Elixir | apps/hefty/lib/hefty/repo/binance/order.ex | Cinderella-Man/crypto-streamer | b1e990d375f7143c5149930be991249f0d9c3ee3 | [
"MIT"
] | 49 | 2019-10-28T22:27:28.000Z | 2021-10-11T06:40:29.000Z | apps/hefty/lib/hefty/repo/binance/order.ex | Cinderella-Man/crypto-streamer | b1e990d375f7143c5149930be991249f0d9c3ee3 | [
"MIT"
] | 9 | 2019-08-30T13:15:36.000Z | 2019-10-10T21:25:14.000Z | apps/hefty/lib/hefty/repo/binance/order.ex | Cinderella-Man/crypto-streamer | b1e990d375f7143c5149930be991249f0d9c3ee3 | [
"MIT"
] | 7 | 2019-10-31T06:19:26.000Z | 2021-09-30T04:20:58.000Z | defmodule Hefty.Repo.Binance.Order do
use Ecto.Schema
@primary_key {:id, :binary_id, autogenerate: true}
schema "orders" do
# belongs_to(:symbol, Hefty.Repo.Binance.Pair, foreign_key: :symbol_id, type: :binary_id)
field(:order_id, :integer)
field(:symbol, :string)
field(:client_order_id, :string)
field(:price, :string)
field(:original_quantity, :string)
field(:executed_quantity, :string)
field(:cummulative_quote_quantity, :string)
field(:status, :string)
field(:time_in_force, :string)
field(:type, :string)
field(:side, :string)
field(:stop_price, :string)
field(:iceberg_quantity, :string)
field(:time, :integer)
field(:update_time, :integer)
field(:strategy, :string)
field(:trade_id, :integer)
timestamps()
end
def fetch(_id) do
end
end
| 26.15625 | 93 | 0.677419 |
e80eb9eefa4775cddf36d1984aee364039c29fae | 3,527 | ex | Elixir | apps/exth_crypto/lib/cipher.ex | InoMurko/ethereum | 282ca2a23a897c5b9684ddf9abae2bf65691b039 | [
"MIT"
] | 22 | 2017-06-22T02:50:34.000Z | 2022-01-26T20:43:21.000Z | apps/exth_crypto/lib/cipher.ex | InoMurko/ethereum | 282ca2a23a897c5b9684ddf9abae2bf65691b039 | [
"MIT"
] | 9 | 2018-10-08T22:56:56.000Z | 2018-10-18T20:41:55.000Z | apps/exth_crypto/lib/cipher.ex | InoMurko/ethereum | 282ca2a23a897c5b9684ddf9abae2bf65691b039 | [
"MIT"
] | 5 | 2018-10-06T16:30:48.000Z | 2022-01-26T20:43:26.000Z | defmodule ExthCrypto.Cipher do
@moduledoc """
Module for symmetric encryption.
"""
@type mode :: :cbc | :ctr | :ecb
@type cipher :: {atom(), integer(), mode}
@type plaintext :: iodata()
@type ciphertext :: binary()
@type init_vector :: binary()
@type stream :: :crypto.stream_state()
@doc """
Encrypts the given plaintext for the given block cipher.
## Examples
iex> ExthCrypto.Cipher.encrypt("execute order 66", ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector, {ExthCrypto.AES, ExthCrypto.AES.block_size, :cbc}) |> ExthCrypto.Math.bin_to_hex
"4f0150273733727f994754fee054df7e18ec169892db5ba973cf8580b898651b"
iex> ExthCrypto.Cipher.encrypt("execute order 66", ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector, {ExthCrypto.AES, ExthCrypto.AES.block_size, :ctr}) |> ExthCrypto.Math.bin_to_hex
"2a7935444247175ff635309b9274e948"
iex> ExthCrypto.Cipher.encrypt("execute order 66", ExthCrypto.Test.symmetric_key, {ExthCrypto.AES, ExthCrypto.AES.block_size, :ecb}) |> ExthCrypto.Math.bin_to_hex
"a73c5576667b7b43a23a9fd930b5465d637a44d08bf702881a8d4e6a5d4944b5"
"""
@spec encrypt(plaintext, Key.symmetric_key(), init_vector, cipher) :: ciphertext
def encrypt(plaintext, symmetric_key, init_vector, _cipher = {mod, _block_size, mode}) do
mod.encrypt(plaintext, mode, symmetric_key, init_vector)
end
@spec encrypt(plaintext, Key.symmetric_key(), cipher) :: ciphertext
def encrypt(plaintext, symmetric_key, _cipher = {mod, _block_size, mode}) do
mod.encrypt(plaintext, mode, symmetric_key)
end
@doc """
Decrypts the given ciphertext from the given block cipher.
## Examples
iex> "4f0150273733727f994754fee054df7e18ec169892db5ba973cf8580b898651b"
...> |> ExthCrypto.Math.hex_to_bin
...> |> ExthCrypto.Cipher.decrypt(ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector, {ExthCrypto.AES, ExthCrypto.AES.block_size, :cbc})
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>> <> "execute order 66"
iex> "2a7935444247175ff635309b9274e948"
...> |> ExthCrypto.Math.hex_to_bin
...> |> ExthCrypto.Cipher.decrypt(ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector, {ExthCrypto.AES, ExthCrypto.AES.block_size, :ctr})
"execute order 66"
iex> "a73c5576667b7b43a23a9fd930b5465d637a44d08bf702881a8d4e6a5d4944b5"
...> |> ExthCrypto.Math.hex_to_bin
...> |> ExthCrypto.Cipher.decrypt(ExthCrypto.Test.symmetric_key, {ExthCrypto.AES, ExthCrypto.AES.block_size, :ecb})
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>> <> "execute order 66"
"""
@spec decrypt(ciphertext, Key.symmetric_key(), init_vector, cipher) :: plaintext
def decrypt(ciphertext, symmetric_key, init_vector, _cipher = {mod, _block_size, mode}) do
mod.decrypt(ciphertext, mode, symmetric_key, init_vector)
end
@spec decrypt(ciphertext, Key.symmetric_key(), cipher) :: plaintext
def decrypt(ciphertext, symmetric_key, _cipher = {mod, _block_size, mode}) do
mod.decrypt(ciphertext, mode, symmetric_key)
end
@doc """
Generate a random initialization vector for the given type of cipher.
## Examples
iex> ExthCrypto.Cipher.generate_init_vector(32) |> byte_size
32
iex> ExthCrypto.Cipher.generate_init_vector(32) == ExthCrypto.Cipher.generate_init_vector(32)
false
"""
@spec generate_init_vector(non_neg_integer()) :: init_vector
def generate_init_vector(block_size) do
:crypto.strong_rand_bytes(block_size)
end
end
| 42.493976 | 197 | 0.721009 |
e80ee3600e8402d430611da1edf234e038756393 | 1,381 | exs | Elixir | test/gateway/jwt/jwt_plug_test.exs | mmacai/reactive-interaction-gateway | edb9262c65b10a8a5dc21ebf326cf73638e97d36 | [
"Apache-2.0"
] | 1 | 2019-11-06T13:35:35.000Z | 2019-11-06T13:35:35.000Z | test/gateway/jwt/jwt_plug_test.exs | mmacai/reactive-interaction-gateway | edb9262c65b10a8a5dc21ebf326cf73638e97d36 | [
"Apache-2.0"
] | null | null | null | test/gateway/jwt/jwt_plug_test.exs | mmacai/reactive-interaction-gateway | edb9262c65b10a8a5dc21ebf326cf73638e97d36 | [
"Apache-2.0"
] | null | null | null | defmodule Gateway.JwtPlugTest do
@moduledoc false
use ExUnit.Case, async: true
use GatewayWeb.ConnCase
test "should return 200 status for authorized /rg/sessions request" do
conn =
setup_conn(["getSessions"])
|> get("/rg/sessions")
assert response(conn, 200) =~ "[]"
end
test "should return 403 status for unauthorized /rg/sessions request" do
conn =
setup_conn()
|> get("/rg/sessions")
assert response(conn, 403) =~ "{\"msg\":\"Unauthorized\"}"
end
test "should return 200 status for authorized /rg/sessions/123 request" do
conn =
setup_conn(["getSessionConnections"])
|> get("/rg/sessions/123")
assert response(conn, 200) =~ "[]"
end
test "should return 403 status for unauthorized /rg/sessions/123 request" do
conn =
setup_conn()
|> get("/rg/sessions/123")
assert response(conn, 403) =~ "{\"msg\":\"Unauthorized\"}"
end
test "should return 204 status for authorized /rg/connections/abc123 request" do
conn =
setup_conn(["deleteConnection"])
|> delete("/rg/connections/abc123")
assert response(conn, 204) =~ "{}"
end
test "should return 403 status for unauthorized /rg/connections/abc123 request" do
conn =
setup_conn()
|> delete("/rg/connections/abc123")
assert response(conn, 403) =~ "{\"msg\":\"Unauthorized\"}"
end
end
| 28.770833 | 84 | 0.644461 |
e80eecb8d42a2ca9a24f1e96a08d4dbafc8f657b | 74 | exs | Elixir | test/hexa_web/views/page_view_test.exs | libreearth/hexa | 81938c3a5abc710eb16055d73c43cbf60dbf487e | [
"MIT"
] | null | null | null | test/hexa_web/views/page_view_test.exs | libreearth/hexa | 81938c3a5abc710eb16055d73c43cbf60dbf487e | [
"MIT"
] | null | null | null | test/hexa_web/views/page_view_test.exs | libreearth/hexa | 81938c3a5abc710eb16055d73c43cbf60dbf487e | [
"MIT"
] | null | null | null | defmodule HexaWeb.PageViewTest do
use HexaWeb.ConnCase, async: true
end
| 18.5 | 35 | 0.810811 |
e80f059c235ac9617c9cdf05dd795776a37b3142 | 2,149 | ex | Elixir | lib/kvasir/client/consumer.ex | IanLuites/kvasir | fb8e577763bff0736c75d5edd227eaff570e64ea | [
"MIT"
] | 12 | 2019-11-28T10:58:51.000Z | 2022-02-08T18:15:12.000Z | lib/kvasir/client/consumer.ex | IanLuites/kvasir | fb8e577763bff0736c75d5edd227eaff570e64ea | [
"MIT"
] | null | null | null | lib/kvasir/client/consumer.ex | IanLuites/kvasir | fb8e577763bff0736c75d5edd227eaff570e64ea | [
"MIT"
] | null | null | null | defmodule Kvasir.Client.Consumer do
@moduledoc false
require Logger
def consume(config, topic, callback, opts) do
start_consume(config, topic, callback, opts)
end
def stream(config, topic, opts) do
Stream.resource(
fn ->
streamer = self()
start_consume(config, topic, &send(streamer, {:stream, &1}), opts)
end,
fn state ->
receive do
{:stream, :end} -> {:halt, state}
{:stream, events} -> {events, state}
end
end,
fn {:ok, pid, client} ->
:brod_topic_subscriber.stop(pid)
:brod.stop_client(client)
end
)
end
defp start_consume(config, topic, callback, opts) do
from = opts[:from] || :earliest
events = opts[:events] || []
consumerConfig = [
begin_offset: Kvasir.Offset.partition(from, 0),
offset_reset_policy: :reset_to_earliest
]
client = String.to_atom("Kvasir.Stream" <> to_string(:rand.uniform(10_000)))
:brod.start_client(config, client)
to =
case opts[:to] do
nil -> nil
offset when is_integer(offset) -> offset
time -> elem(Kvasir.Client.Info.offset(client, topic, time), 1)
end
{:ok, pid} =
:brod_topic_subscriber.start_link(
client,
topic,
:all,
consumerConfig,
[],
:message_set,
&handle_message/3,
%{topic: topic, callback: callback, to: to, events: events, client: client}
)
if is_integer(to) and (to < 0 or (is_integer(from) and to <= from)) do
callback.(:end)
end
{:ok, pid, client}
end
def handle_message(partition, message, state = %{callback: callback, to: to, topic: topic}) do
case Kvasir.Event.decode(message, partition: partition, topic: topic, encoding: :brod) do
{:ok, event} ->
callback.(event)
if to && List.last(event).__meta__.offset >= to do
callback.(:end)
end
{:ok, :ack, state}
{:error, reason} ->
Logger.error(fn -> "Kvasir: parse error (#{reason}), payload: #{inspect(message)}" end)
{:ok, :nack, state}
end
end
end
| 25.891566 | 96 | 0.58027 |
e80f0902a4b36a65dc14916e09d42f3e3106650a | 1,590 | exs | Elixir | apps/engine/test/engine/callback_test.exs | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 4 | 2020-11-30T17:38:57.000Z | 2021-01-23T21:29:41.000Z | apps/engine/test/engine/callback_test.exs | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 24 | 2020-11-30T17:32:48.000Z | 2021-02-22T06:25:22.000Z | apps/engine/test/engine/callback_test.exs | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | null | null | null | defmodule Engine.CallbackTest do
use Engine.DB.DataCase, async: true
alias Ecto.Multi
alias Engine.Callback
alias Engine.DB.ListenerState
describe "update_listener_height/3" do
test "it stores the listeners new height" do
events = [build(:deposit_event, height: 100)]
Multi.new()
|> Callback.update_listener_height(events, :dog_listener)
|> Repo.transaction()
assert listener_for(:dog_listener, height: 100)
end
test "it sets the height to the highest event" do
events = [
build(:deposit_event, height: 100),
build(:deposit_event, height: 101),
build(:deposit_event, height: 103)
]
Multi.new()
|> Callback.update_listener_height(events, :dog_listener)
|> Repo.transaction()
assert listener_for(:dog_listener, height: 103)
end
test "does not update height if lower" do
events = [build(:deposit_event, height: 100)]
old_events = [build(:deposit_event, height: 1)]
Multi.new()
|> Callback.update_listener_height(events, :dog_listener)
|> Repo.transaction()
Multi.new()
|> Callback.update_listener_height(old_events, :dog_listener)
|> Repo.transaction()
assert listener_for(:dog_listener, height: 100)
end
end
# Check to see if the listener has a given state, like height.
# assert listener_for(:depositor, height: 100)
defp listener_for(listener, height: height) do
name = "#{listener}"
%ListenerState{height: ^height, listener: ^name} = Engine.Repo.get(ListenerState, name)
end
end
| 28.392857 | 91 | 0.669811 |
e80f1259dd02b240d849a79710e92a6209ef3b1c | 1,880 | ex | Elixir | lib/brando/traits/villain.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | 4 | 2020-10-30T08:40:38.000Z | 2022-01-07T22:21:37.000Z | lib/brando/traits/villain.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | 1,162 | 2020-07-05T11:20:15.000Z | 2022-03-31T06:01:49.000Z | lib/brando/traits/villain.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | null | null | null | defmodule Brando.Trait.Villain do
@moduledoc """
Villain parsing
"""
use Brando.Trait
alias Brando.Exception.ConfigError
alias Ecto.Changeset
alias Brando.Blueprint.Attributes
@type changeset :: Changeset.t()
@type config :: list()
@impl true
def trait_attributes(attributes, _assets, _relations) do
attributes
|> Enum.filter(&(&1.type == :villain))
|> Enum.map(fn
%{name: :data} ->
Attributes.build_attr(:html, :text, [])
%{name: data_name} ->
data_name
|> to_string
|> String.replace("_data", "_html")
|> String.to_atom()
|> Attributes.build_attr(:text, [])
end)
end
@impl true
def validate(module, _config) do
if module.__villain_fields__ == [] do
raise ConfigError,
message: """
Resource `#{inspect(module)}` is declaring Brando.Trait.Villain, but there are no attributes of type `:villain` found.
attributes do
attribute :data, :villain
end
"""
end
true
end
@doc """
Generate HTML
"""
@impl true
def changeset_mutator(module, _config, changeset, _user, skip_villain: true) do
cast_poly(changeset, module.__villain_fields__())
end
def changeset_mutator(module, _config, changeset, _user, _opts) do
case cast_poly(changeset, module.__villain_fields__()) do
%{valid?: true} = casted_changeset ->
Enum.reduce(module.__villain_fields__(), casted_changeset, fn vf, mutated_changeset ->
Brando.Villain.Schema.generate_html(mutated_changeset, vf.name)
end)
casted_changeset ->
casted_changeset
end
end
defp cast_poly(changeset, villain_fields) do
Enum.reduce(villain_fields, changeset, fn vf, mutated_changeset ->
PolymorphicEmbed.cast_polymorphic_embed(mutated_changeset, vf.name)
end)
end
end
| 26.111111 | 126 | 0.65 |
e80f17e31e9ddd86974f5ed6c592856ae1680ae6 | 783 | exs | Elixir | test/langue/json/formatter_test.exs | suryatmodulus/accent | 6aaf34075c33f3d9d84d38237af4a39b594eb808 | [
"BSD-3-Clause"
] | 806 | 2018-04-07T20:40:33.000Z | 2022-03-30T01:39:57.000Z | test/langue/json/formatter_test.exs | suryatmodulus/accent | 6aaf34075c33f3d9d84d38237af4a39b594eb808 | [
"BSD-3-Clause"
] | 194 | 2018-04-07T13:49:37.000Z | 2022-03-30T19:58:45.000Z | test/langue/json/formatter_test.exs | doc-ai/accent | e337e16f3658cc0728364f952c0d9c13710ebb06 | [
"BSD-3-Clause"
] | 89 | 2018-04-09T13:55:49.000Z | 2022-03-24T07:09:31.000Z | defmodule LangueTest.Formatter.Json do
use ExUnit.Case, async: true
Code.require_file("expectation_test.exs", __DIR__)
alias Langue.Formatter.Json
@tests [
Array,
Empty,
NilValue,
EmptyValue,
BooleanValue,
IntegerValue,
FloatValue,
Simple,
Nested,
Complexe,
PlaceholderValues
]
for test <- @tests, module = Module.concat(LangueTest.Formatter.Json.Expectation, test) do
test "json #{test}" do
{expected_parse, result_parse} = Accent.FormatterTestHelper.test_parse(unquote(module), Json)
{expected_serialize, result_serialize} = Accent.FormatterTestHelper.test_serialize(unquote(module), Json)
assert expected_parse == result_parse
assert expected_serialize == result_serialize
end
end
end
| 24.46875 | 111 | 0.713921 |
e80f34d7a6f165854ee2505a7877fcbe7ca04dc2 | 632 | ex | Elixir | lib/kaffe/partition_selector.ex | jgaviria/kaffe | f7d54086bb7062c62cb012a1f73359843a0a625b | [
"MIT"
] | 130 | 2017-03-31T14:44:34.000Z | 2022-02-14T21:16:40.000Z | lib/kaffe/partition_selector.ex | jgaviria/kaffe | f7d54086bb7062c62cb012a1f73359843a0a625b | [
"MIT"
] | 87 | 2017-04-04T19:47:05.000Z | 2022-03-21T05:27:09.000Z | lib/kaffe/partition_selector.ex | jgaviria/kaffe | f7d54086bb7062c62cb012a1f73359843a0a625b | [
"MIT"
] | 56 | 2017-06-24T13:24:27.000Z | 2022-03-14T12:10:01.000Z | defmodule Kaffe.PartitionSelector do
@doc """
Cycle current from 0 to total-1.
## Examples
iex> Kaffe.PartitionSelector.round_robin(nil, 3)
0
iex> Kaffe.PartitionSelector.round_robin(0, 3)
1
iex> Kaffe.PartitionSelector.round_robin(1, 3)
2
iex> Kaffe.PartitionSelector.round_robin(2, 3)
0
"""
def round_robin(current, total) do
if current < total - 1 do
current + 1
else
0
end
end
def random(total) do
:crypto.rand_uniform(0, total)
end
def md5(key, total) do
:crypto.hash(:md5, key)
|> :binary.bin_to_list()
|> Enum.sum()
|> rem(total)
end
end
| 16.631579 | 50 | 0.639241 |
e80f5b3b8011c7a04e7b93f3ca8fc99b4965ab6f | 2,501 | exs | Elixir | test/phoenix/config_test.exs | benjamintanweihao/phoenix | eb4ef03852f447d67cd61355753147c39b520e1f | [
"MIT"
] | null | null | null | test/phoenix/config_test.exs | benjamintanweihao/phoenix | eb4ef03852f447d67cd61355753147c39b520e1f | [
"MIT"
] | null | null | null | test/phoenix/config_test.exs | benjamintanweihao/phoenix | eb4ef03852f447d67cd61355753147c39b520e1f | [
"MIT"
] | null | null | null | defmodule Phoenix.ConfigTest do
use ExUnit.Case, async: true
import Phoenix.Config
setup meta do
config = [parsers: false, custom: true, otp_app: :phoenix_config]
Application.put_env(:config_app, meta.test, config)
:ok
end
@defaults [static: [at: "/"]]
test "reads configuration from env", meta do
config = from_env(:config_app, meta.test, [static: true])
assert config[:parsers] == false
assert config[:custom] == true
assert config[:static] == true
assert from_env(:unknown_app, meta.test, [static: true]) ==
[static: true]
end
test "starts an ets table as part of the module", meta do
{:ok, _pid} = start_link(:config_app, meta.test, @defaults)
assert :ets.info(meta.test, :name) == meta.test
assert :ets.lookup(meta.test, :parsers) == [parsers: false]
assert :ets.lookup(meta.test, :static) == [static: [at: "/"]]
assert :ets.lookup(meta.test, :custom) == [custom: true]
assert stop(meta.test) == :ok
assert :ets.info(meta.test, :name) == :undefined
end
test "can change configuration", meta do
{:ok, _pid} = start_link(:config_app, meta.test, @defaults)
# Nothing changed
config_change(meta.test, [], [])
assert :ets.lookup(meta.test, :parsers) == [parsers: false]
assert :ets.lookup(meta.test, :static) == [static: [at: "/"]]
assert :ets.lookup(meta.test, :custom) == [custom: true]
# Something changed
config_change(meta.test, [{meta.test, parsers: true}], [])
assert :ets.lookup(meta.test, :parsers) == [parsers: true]
assert :ets.lookup(meta.test, :static) == [static: [at: "/"]]
assert :ets.lookup(meta.test, :custom) == []
# Module removed
config_change(meta.test, [], [meta.test])
assert :ets.info(meta.test, :name) == :undefined
end
test "can cache", meta do
{:ok, _pid} = start_link(:config_app, meta.test, @defaults)
assert cache(meta.test, :__hello__, fn _ -> {:stale, 1} end) == 1
assert cache(meta.test, :__hello__, fn _ -> {:cache, 2} end) == 2
assert cache(meta.test, :__hello__, fn _ -> {:cache, 3} end) == 2
assert cache(meta.test, :__hello__, fn _ -> {:stale, 3} end) == 2
# Cache is reloaded on config_change
config_change(meta.test, [{meta.test, []}], [])
assert cache(meta.test, :__hello__, fn _ -> {:stale, 4} end) == 4
assert cache(meta.test, :__hello__, fn _ -> {:cache, 5} end) == 5
assert cache(meta.test, :__hello__, fn _ -> {:cache, 6} end) == 5
end
end
| 36.246377 | 69 | 0.626549 |
e80f5f0cdfa1881e1ebe862492d969a520abc640 | 1,359 | ex | Elixir | lib/blue_jet/app/goods.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 44 | 2018-05-09T01:08:57.000Z | 2021-01-19T07:25:26.000Z | lib/blue_jet/app/goods.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 36 | 2018-05-08T23:59:54.000Z | 2018-09-28T13:50:30.000Z | lib/blue_jet/app/goods.ex | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 9 | 2018-05-09T14:09:19.000Z | 2021-03-21T21:04:04.000Z | defmodule BlueJet.Goods do
use BlueJet, :context
alias BlueJet.Goods.{Policy, Service}
def list_stockable(req), do: default(req, :list, :stockable, Policy, Service)
def create_stockable(req), do: default(req, :create, :stockable, Policy, Service)
def get_stockable(req), do: default(req, :get, :stockable, Policy, Service)
def update_stockable(req), do: default(req, :update, :stockable, Policy, Service)
def delete_stockable(req), do: default(req, :delete, :stockable, Policy, Service)
def list_unlockable(req), do: default(req, :list, :unlockable, Policy, Service)
def create_unlockable(req), do: default(req, :create, :unlockable, Policy, Service)
def get_unlockable(req), do: default(req, :get, :unlockable, Policy, Service)
def update_unlockable(req), do: default(req, :update, :unlockable, Policy, Service)
def delete_unlockable(req), do: default(req, :delete, :unlockable, Policy, Service)
def list_depositable(req), do: default(req, :list, :depositable, Policy, Service)
def create_depositable(req), do: default(req, :create, :depositable, Policy, Service)
def get_depositable(req), do: default(req, :get, :depositable, Policy, Service)
def update_depositable(req), do: default(req, :update, :depositable, Policy, Service)
def delete_depositable(req), do: default(req, :delete, :depositable, Policy, Service)
end
| 56.625 | 87 | 0.738043 |
e80f6ff858e4889c4b0b9916becff925002a329a | 404 | ex | Elixir | apps/esperanto/lib/trybe/esperanto/parsers/bold.ex | betrybe/esperanto | 65e40c28914397ef77f5d2c4c29001330beba19f | [
"Apache-2.0"
] | 6 | 2021-07-19T20:00:22.000Z | 2021-11-03T03:27:40.000Z | apps/esperanto/lib/trybe/esperanto/parsers/bold.ex | betrybe/esperanto | 65e40c28914397ef77f5d2c4c29001330beba19f | [
"Apache-2.0"
] | null | null | null | apps/esperanto/lib/trybe/esperanto/parsers/bold.ex | betrybe/esperanto | 65e40c28914397ef77f5d2c4c29001330beba19f | [
"Apache-2.0"
] | null | null | null | defmodule Esperanto.Parsers.Bold do
@moduledoc """
Create a choice tag with all content between ( ) and \n
"""
defmodule BoldBarrier do
@moduledoc """
Bold delimited by `**`
"""
use Esperanto.Barriers.RegexBarrier, delimiter: ~r/^\*\*/
end
use Esperanto.Parsers.Generics.EnclosingTag,
start_delimiter: ~r/^\*\*/,
barrier: BoldBarrier,
enclosing_tag: :strong
end
| 22.444444 | 61 | 0.660891 |
e80f77b1037fb0abd2bfd9543575174fccaefa64 | 1,019 | ex | Elixir | lib/membrane/buffer/metric/byte_size.ex | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | lib/membrane/buffer/metric/byte_size.ex | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | lib/membrane/buffer/metric/byte_size.ex | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.Buffer.Metric.ByteSize do
@moduledoc """
Implementation of `Membrane.Buffer.Metric` for the `:bytes` unit
"""
@behaviour Membrane.Buffer.Metric
alias Membrane.{Buffer, Payload}
@impl true
def input_buf_preferred_size, do: 65_536
@impl true
def buffers_size(buffers),
do: buffers |> Enum.reduce(0, fn %Buffer{payload: p}, acc -> acc + Payload.size(p) end)
@impl true
def split_buffers(buffers, count), do: do_split_buffers(buffers, count, [])
defp do_split_buffers(buffers, at_pos, acc) when at_pos == 0 or buffers == [] do
{acc |> Enum.reverse(), buffers}
end
defp do_split_buffers([%Buffer{payload: p} = buf | rest], at_pos, acc) when at_pos > 0 do
if at_pos < Payload.size(p) do
{p1, p2} = Payload.split_at(p, at_pos)
acc = [%Buffer{buf | payload: p1} | acc] |> Enum.reverse()
rest = [%Buffer{buf | payload: p2} | rest]
{acc, rest}
else
do_split_buffers(rest, at_pos - Payload.size(p), [buf | acc])
end
end
end
| 29.114286 | 91 | 0.655545 |
e80f877bf935a02d57a3885b8b4dfb7c206d6420 | 2,402 | ex | Elixir | lib/faker/date.ex | secoint/faker | 36d0a1a38fd4dc5a53e732e16223e64eb54ff305 | [
"MIT"
] | 1 | 2019-02-11T20:46:52.000Z | 2019-02-11T20:46:52.000Z | lib/faker/date.ex | echenim/faker | 15172b7d9c2b7711173a5faf3e45bfc4e45d6a97 | [
"MIT"
] | 40 | 2019-10-26T09:24:17.000Z | 2021-08-03T04:29:01.000Z | lib/faker/date.ex | echenim/faker | 15172b7d9c2b7711173a5faf3e45bfc4e45d6a97 | [
"MIT"
] | null | null | null | defmodule Faker.Date do
import Faker.Util, only: [pick: 1]
@moduledoc """
Functions for generating dates
"""
@doc """
Returns a random date of birth for a person with an age specified by a number or range
"""
@spec date_of_birth(integer | Range.t()) :: Date.t()
def date_of_birth(age_or_range \\ 18..99)
def date_of_birth(age) when is_integer(age) do
{{year_now, month_now, day_now}, _time} = :calendar.local_time()
earliest_year = year_now - (age + 1)
potential_earliest_date = {earliest_year, month_now, day_now + 1}
potential_latest_date = {earliest_year + 1, month_now, day_now}
earliest_date =
if :calendar.valid_date(potential_earliest_date),
do: {earliest_year, month_now, day_now + 1},
else: {earliest_year, 3, 1}
latest_date =
if :calendar.valid_date(potential_latest_date),
do: {earliest_year + 1, month_now, day_now},
else: {earliest_year + 1, 2, 28}
earliest_as_seconds = :calendar.datetime_to_gregorian_seconds({earliest_date, {0, 0, 0}})
lastest_as_seconds = :calendar.datetime_to_gregorian_seconds({latest_date, {23, 59, 59}})
{chosen_date, _time} =
earliest_as_seconds..lastest_as_seconds
|> pick()
|> :calendar.gregorian_seconds_to_datetime()
{:ok, result} = Date.from_erl(chosen_date)
result
end
def date_of_birth(age_range) do
age_range
|> pick()
|> date_of_birth()
end
@doc """
Returns a random date in the past up to N days, today not included
"""
@spec backward(integer) :: Date.t()
def backward(days) do
forward(-days)
end
@doc """
Returns a random date in the future up to N days, today not included
"""
@spec forward(integer) :: Date.t()
def forward(days) do
days
|> Faker.DateTime.forward()
|> DateTime.to_date()
end
@doc """
Returns a random date between two dates
## Examples
iex> Faker.Date.between(~D[2010-12-10], ~D[2016-12-25])
~D[2013-06-07]
iex> Faker.Date.between(~D[2000-12-20], ~D[2000-12-25])
~D[2000-12-20]
iex> Faker.Date.between(~D[2000-02-02], ~D[2016-02-05])
~D[2014-10-23]
iex> Faker.Date.between(~D[2010-12-20], ~D[2010-12-25])
~D[2010-12-21]
"""
@spec between(Date.t(), Date.t()) :: Date.t()
def between(from, to) do
from
|> Faker.DateTime.between(to)
|> DateTime.to_date()
end
end
| 26.988764 | 93 | 0.64363 |
e80f956cbc3a6b5c4a4b3f418e7724c6099ba15e | 947 | ex | Elixir | lib/phoenix_toggl/application.ex | arashm/phoenix-toggl | 3a7b8470c48efdbe84bd9dc29403aea851e90986 | [
"MIT"
] | null | null | null | lib/phoenix_toggl/application.ex | arashm/phoenix-toggl | 3a7b8470c48efdbe84bd9dc29403aea851e90986 | [
"MIT"
] | null | null | null | lib/phoenix_toggl/application.ex | arashm/phoenix-toggl | 3a7b8470c48efdbe84bd9dc29403aea851e90986 | [
"MIT"
] | 1 | 2020-02-14T20:31:58.000Z | 2020-02-14T20:31:58.000Z | defmodule PhoenixToggl.Application do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the Ecto repository
supervisor(PhoenixToggl.Repo, []),
# Start the endpoint when the application starts
supervisor(PhoenixToggl.Web.Endpoint, []),
# Start your own worker by calling: PhoenixToggl.Worker.start_link(arg1, arg2, arg3)
# worker(PhoenixToggl.Worker, [arg1, arg2, arg3]),
supervisor(PhoenixToggl.TimeBoundries.TimerMonitor.Supervisor, []),
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: PhoenixToggl.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 36.423077 | 90 | 0.721225 |
e80fc8e726c6730e2dcb9d8cbcd2d7df3bfceb6a | 1,847 | exs | Elixir | mix.exs | rxndxm/arangox_ecto | 78ee4cbdff38efdacc0e29049caa799c85bd08e0 | [
"Apache-2.0"
] | null | null | null | mix.exs | rxndxm/arangox_ecto | 78ee4cbdff38efdacc0e29049caa799c85bd08e0 | [
"Apache-2.0"
] | null | null | null | mix.exs | rxndxm/arangox_ecto | 78ee4cbdff38efdacc0e29049caa799c85bd08e0 | [
"Apache-2.0"
] | null | null | null | defmodule ArangoXEcto.MixProject do
use Mix.Project
@version "1.0.0"
@source_url "https://github.com/TomGrozev/arangox_ecto"
def project do
[
app: :arangox_ecto,
version: @version,
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps(),
# Hex
description: "An ArangoDB adapter for Ecto supporting Ecto queries and graph queries.",
package: package(),
# Docs
name: "ArangoX Ecto",
docs: docs()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger],
mod: [],
env: [
log_levels: [:info],
log_colours: %{info: :green, debug: :normal},
log_in_colour: System.get_env("MIX_ENV") == "dev"
]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
ecto_dep(),
{:arangox, "~> 0.5"},
{:velocy, "~> 0.1"},
{:jason, "~> 1.2"},
{:geo, "~> 3.0"},
{:ex_doc, "~> 0.24", only: :dev, runtime: false},
{:credo, "~> 1.5.0", only: [:dev, :test], runtime: false},
{:doctor, "~> 0.18.0", only: [:dev, :test], runtime: false},
{:git_hooks, "~> 0.6.0", only: [:test, :dev], runtime: false}
]
end
defp ecto_dep do
if path = System.get_env("ECTO_PATH") do
{:ecto, path: path}
else
{:ecto, "~> 3.6"}
end
end
defp package do
[
maintainers: ["Tom Grozev"],
licenses: ["Apache-2.0"],
links: %{"GitHub" => @source_url},
files: ~w(.formatter.exs mix.exs README.md lib)
]
end
defp docs do
[
main: "readme",
logo: "images/logo.png",
extras: ["README.md"],
source_ref: "v#{@version}",
canonical: "http://hex.pm/arangox_ecto",
source_url: @source_url
]
end
end
| 23.379747 | 93 | 0.538711 |
e80ff1042f56aa6d658e1db31756f638267c3970 | 13,292 | exs | Elixir | test/absinthe/schema/notation_test.exs | hauptbenutzer/absinthe | f1d76e5c249ca337187971b0c96b1870f53716ed | [
"MIT"
] | null | null | null | test/absinthe/schema/notation_test.exs | hauptbenutzer/absinthe | f1d76e5c249ca337187971b0c96b1870f53716ed | [
"MIT"
] | null | null | null | test/absinthe/schema/notation_test.exs | hauptbenutzer/absinthe | f1d76e5c249ca337187971b0c96b1870f53716ed | [
"MIT"
] | 1 | 2018-11-16T02:34:40.000Z | 2018-11-16T02:34:40.000Z | defmodule Absinthe.Schema.NotationTest do
use Absinthe.Case, async: true
@moduletag :pending_schema
describe "arg" do
test "can be under field as an attribute" do
assert_no_notation_error("ArgFieldValid", """
object :foo do
field :picture, :string do
arg :size, :integer
end
end
""")
end
test "can be under directive as an attribute" do
assert_no_notation_error("ArgDirectiveValid", """
directive :test do
arg :if, :boolean
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"ArgToplevelInvalid",
"""
arg :name, :string
""",
"Invalid schema notation: `arg` must only be used within `directive`, `field`"
)
end
end
describe "directive" do
test "can be toplevel" do
assert_no_notation_error("DirectiveValid", """
directive :foo do
end
""")
end
test "cannot be non-toplevel" do
assert_notation_error(
"DirectiveInvalid",
"""
directive :foo do
directive :bar do
end
end
""",
"Invalid schema notation: `directive` must only be used toplevel"
)
end
end
describe "enum" do
test "can be toplevel" do
assert_no_notation_error("EnumValid", """
enum :foo do
end
""")
end
test "cannot be non-toplevel" do
assert_notation_error(
"EnumInvalid",
"""
enum :foo do
enum :bar do
end
end
""",
"Invalid schema notation: `enum` must only be used toplevel"
)
end
end
describe "field" do
test "can be under object as an attribute" do
assert_no_notation_error("FieldObjectValid", """
object :bar do
field :name, :string
end
""")
end
test "can be under input_object as an attribute" do
assert_no_notation_error("FieldInputObjectValid", """
input_object :bar do
field :name, :string
end
""")
end
test "can be under interface as an attribute" do
assert_no_notation_error("FieldInterfaceValid", """
interface :bar do
field :name, :string
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"FieldToplevelInvalid",
"""
field :foo, :string
""",
"Invalid schema notation: `field` must only be used within `input_object`, `interface`, `object`"
)
end
end
describe "input_object" do
test "can be toplevel" do
assert_no_notation_error("InputObjectValid", """
input_object :foo do
end
""")
end
test "cannot be non-toplevel" do
assert_notation_error(
"InputObjectInvalid",
"""
input_object :foo do
input_object :bar do
end
end
""",
"Invalid schema notation: `input_object` must only be used toplevel"
)
end
end
describe "expand" do
test "can be under directive as an attribute" do
assert_no_notation_error("InstructionValid", """
directive :bar do
expand fn _, _ -> :ok end
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"InstructionToplevelInvalid",
"""
expand fn _, _ -> :ok end
""",
"Invalid schema notation: `instruction` must only be used within `directive`"
)
end
test "cannot be within object" do
assert_notation_error(
"InstructionObjectInvalid",
"""
object :foo do
expand fn _, _ -> :ok end
end
""",
"Invalid schema notation: `instruction` must only be used within `directive`"
)
end
end
describe "interface" do
test "can be toplevel" do
assert_no_notation_error("InterfaceToplevelValid", """
interface :foo do
field :name, :string
resolve_type fn _, _ -> :bar end
end
""")
end
test "can be under object as an attribute" do
assert_no_notation_error("InterfaceObjectValid", """
interface :foo do
field :name, :string
resolve_type fn _, _ -> :bar end
end
object :bar do
interface :foo
field :name, :string
end
""")
end
test "cannot be under input_object as an attribute" do
assert_notation_error(
"InterfaceInputObjectInvalid",
"""
interface :foo do
field :name, :string
resolve_type fn _, _ -> :bar end
end
input_object :bar do
interface :foo
end
""",
"Invalid schema notation: `interface` (as an attribute) must only be used within `object`"
)
end
end
describe "interfaces" do
test "can be under object as an attribute" do
assert_no_notation_error("InterfacesValid", """
interface :bar do
field :name, :string
resolve_type fn _, _ -> :foo end
end
object :foo do
field :name, :string
interfaces [:bar]
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"InterfacesInvalid",
"""
interface :bar do
field :name, :string
end
interfaces [:bar]
""",
"Invalid schema notation: `interfaces` must only be used within `object`"
)
end
end
describe "is_type_of" do
test "can be under object as an attribute" do
assert_no_notation_error("IsTypeOfValid", """
object :bar do
is_type_of fn _, _ -> true end
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"IsTypeOfToplevelInvalid",
"""
is_type_of fn _, _ -> true end
""",
"Invalid schema notation: `is_type_of` must only be used within `object`"
)
end
test "cannot be within interface" do
assert_notation_error(
"IsTypeOfInterfaceInvalid",
"""
interface :foo do
is_type_of fn _, _ -> :bar end
end
""",
"Invalid schema notation: `is_type_of` must only be used within `object`"
)
end
end
describe "object" do
test "can be toplevel" do
assert_no_notation_error("ObjectValid", """
object :foo do
end
""")
end
test "cannot be non-toplevel" do
assert_notation_error(
"ObjectInvalid",
"""
object :foo do
object :bar do
end
end
""",
"Invalid schema notation: `object` must only be used toplevel"
)
end
test "cannot use reserved identifiers" do
assert_notation_error(
"ReservedIdentifierSubscription",
"""
object :subscription do
end
""",
"Invalid schema notation: cannot create an `object` with reserved identifier `subscription`"
)
assert_notation_error(
"ReservedIdentifierQuery",
"""
object :query do
end
""",
"Invalid schema notation: cannot create an `object` with reserved identifier `query`"
)
assert_notation_error(
"ReservedIdentifierMutation",
"""
object :mutation do
end
""",
"Invalid schema notation: cannot create an `object` with reserved identifier `mutation`"
)
end
end
describe "on" do
test "can be under directive as an attribute" do
assert_no_notation_error("OnValid", """
directive :foo do
on [Foo, Bar]
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"OnInvalid",
"""
on [Foo, Bar]
""",
"Invalid schema notation: `on` must only be used within `directive`"
)
end
end
describe "parse" do
test "can be under scalar as an attribute" do
assert_no_notation_error("ParseValid", """
scalar :foo do
parse &(&1)
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"ParseInvalid",
"""
parse &(&1)
""",
"Invalid schema notation: `parse` must only be used within `scalar`"
)
end
end
describe "resolve" do
test "can be under field as an attribute" do
assert_no_notation_error("ResolveValid", """
object :bar do
field :foo, :integer do
resolve fn _, _, _ -> {:ok, 1} end
end
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"ResolveInvalid",
"""
resolve fn _, _ -> {:ok, 1} end
""",
"Invalid schema notation: `resolve` must only be used within `field`"
)
end
test "cannot be within object" do
assert_notation_error(
"ResolveInvalid2",
"""
object :foo do
resolve fn _, _ -> {:ok, 1} end
end
""",
"Invalid schema notation: `resolve` must only be used within `field`"
)
end
end
describe "resolve_type" do
test "can be under interface as an attribute" do
assert_no_notation_error("ResolveTypeValidInterface", """
interface :bar do
resolve_type fn _, _ -> :baz end
end
""")
end
test "can be under union as an attribute" do
assert_no_notation_error("ResolveTypeValidUnion", """
union :bar do
resolve_type fn _, _ -> :baz end
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"ResolveTypeInvalidToplevel",
"""
resolve_type fn _, _ -> :bar end
""",
"Invalid schema notation: `resolve_type` must only be used within `interface`, `union`"
)
end
test "cannot be within object" do
assert_notation_error(
"ResolveTypeInvalidObject",
"""
object :foo do
resolve_type fn _, _ -> :bar end
end
""",
"Invalid schema notation: `resolve_type` must only be used within `interface`, `union`"
)
end
end
describe "scalar" do
test "can be toplevel" do
assert_no_notation_error("ScalarValid", """
scalar :foo do
end
""")
end
test "cannot be non-toplevel" do
assert_notation_error(
"ScalarInvalid",
"""
scalar :foo do
scalar :bar do
end
end
""",
"Invalid schema notation: `scalar` must only be used toplevel"
)
end
end
describe "serialize" do
test "can be under scalar as an attribute" do
assert_no_notation_error("SerializeValid", """
scalar :foo do
serialize &(&1)
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"SerializeInvalid",
"""
serialize &(&1)
""",
"Invalid schema notation: `serialize` must only be used within `scalar`"
)
end
end
describe "types" do
test "can be under union as an attribute" do
assert_no_notation_error("TypesValid", """
object :audi do
end
object :volvo do
end
union :brand do
types [:audi, :volvo]
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"TypesInvalid",
"types [:foo]",
"Invalid schema notation: `types` must only be used within `union`"
)
end
end
describe "value" do
test "can be under enum as an attribute" do
assert_no_notation_error("ValueValid", """
enum :color do
value :red
value :green
value :blue
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"ValueInvalid",
"value :b",
"Invalid schema notation: `value` must only be used within `enum`"
)
end
end
describe "description" do
test "can be under object as an attribute" do
assert_no_notation_error("DescriptionValid", """
object :item do
description \"""
Here's a description
\"""
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"DescriptionInvalid",
~s(description "test"),
"Invalid schema notation: `description` must not be used toplevel"
)
end
end
@doc """
Assert a notation error occurs.
## Examples
```
iex> assert_notation_error(\"""
object :bar do
field :name, :string
end
\""")
```
"""
def assert_notation_error(name, text, message) do
assert_raise(Absinthe.Schema.Notation.Error, message, fn ->
"""
defmodule MyTestSchema.#{name} do
use Absinthe.Schema
query do
#Query type must exist
end
#{text}
end
"""
|> Code.eval_string()
end)
end
def assert_no_notation_error(name, text) do
assert """
defmodule MyTestSchema.#{name} do
use Absinthe.Schema
query do
#Query type must exist
end
#{text}
end
"""
|> Code.eval_string()
end
end
| 22.760274 | 105 | 0.557102 |
e8100300c4c882cc57210518a0439cac787711cd | 1,232 | ex | Elixir | apps/neoscan_node/lib/neoscan_node.ex | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 75 | 2017-07-23T02:45:32.000Z | 2021-12-13T11:04:17.000Z | apps/neoscan_node/lib/neoscan_node.ex | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 252 | 2017-07-13T19:36:00.000Z | 2021-07-28T18:40:00.000Z | apps/neoscan_node/lib/neoscan_node.ex | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 87 | 2017-07-23T02:45:34.000Z | 2022-03-02T14:54:27.000Z | defmodule NeoscanNode do
@moduledoc false
alias NeoscanNode.NodeChecker
alias NeoscanNode.Utils
@timeout 60_000
def get_last_block_index, do: NodeChecker.get_last_block_index()
def get_live_nodes, do: NodeChecker.get_live_nodes()
def get_block_with_transfers(index) do
node_url = NodeChecker.get_random_node(index)
{:ok, block} = NeoNode.get_block_by_height(node_url, index)
updated_transactions = Utils.pmap2(block.tx, &update_transaction(&1, index), @timeout)
Map.put(block, :tx, updated_transactions)
end
defp update_transaction(transaction, index) do
Map.put(transaction, :transfers, get_transfers(transaction, index))
end
defp get_transfers(%{type: :invocation_transaction, hash: hash} = transaction, index) do
node_url = NodeChecker.get_random_application_log_node(index)
case NeoNode.get_application_log(node_url, Base.encode16(hash, case: :lower)) do
{:ok, transfers} ->
transfers
_ ->
get_transfers(transaction, index)
end
end
defp get_transfers(_, _), do: []
def get_nep5_token_from_contract(index, contract) do
node_url = NodeChecker.get_random_node(index)
NeoNode.get_nep5_contract(node_url, contract)
end
end
| 28 | 90 | 0.742695 |
e810083820cdf9a7ef98784ef2fe8a44872861f0 | 308 | ex | Elixir | lib/fireball_web/schema/types.ex | Catharz/fireball | bb9f123c0d2d71b16eda01d3838565e6e762de20 | [
"MIT"
] | null | null | null | lib/fireball_web/schema/types.ex | Catharz/fireball | bb9f123c0d2d71b16eda01d3838565e6e762de20 | [
"MIT"
] | null | null | null | lib/fireball_web/schema/types.ex | Catharz/fireball | bb9f123c0d2d71b16eda01d3838565e6e762de20 | [
"MIT"
] | null | null | null | defmodule FireballWeb.Schema.Types do
use Absinthe.Schema.Notation
use Timex
@desc "A time scalar for parsing & serializing dates."
scalar :iso_extended, description: "{ISO:Extended}" do
parse &Timex.format(&1.value, "{ISO:Extended}")
serialize &Timex.parse!(&1, "{ISO:Extended}")
end
end
| 28 | 56 | 0.711039 |
e81012e28dcdd4241d56e7d908fc1050ec2a452e | 1,096 | ex | Elixir | lib/crit/application.ex | jesseshieh/crit19 | 0bba407fea09afed72cbb90ca579ba34c537edef | [
"MIT"
] | null | null | null | lib/crit/application.ex | jesseshieh/crit19 | 0bba407fea09afed72cbb90ca579ba34c537edef | [
"MIT"
] | null | null | null | lib/crit/application.ex | jesseshieh/crit19 | 0bba407fea09afed72cbb90ca579ba34c537edef | [
"MIT"
] | null | null | null | defmodule Crit.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
# List all child processes to be supervised
children = [
# Start the Ecto repository
Crit.Repo,
# Start the endpoint when the application starts
CritWeb.Endpoint,
# Starts a worker by calling: Crit.Worker.start_link(arg)
Crit.Audit.ToEcto.Server,
Crit.Setup.InstitutionSupervisor,
{ConCache, [name: Crit.Cache,
ttl_check_interval: :timer.hours(24),
global_ttl: :timer.hours(48)]}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Crit.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
CritWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 29.621622 | 63 | 0.687956 |
e81020b722f06c10e86bafcf53dc55bdbf837fda | 20,581 | ex | Elixir | lib/surface/api.ex | inspired-consulting/surface | 3a6a3a454704a9aaf83cd23a6393cbd09bdca8bd | [
"MIT"
] | null | null | null | lib/surface/api.ex | inspired-consulting/surface | 3a6a3a454704a9aaf83cd23a6393cbd09bdca8bd | [
"MIT"
] | null | null | null | lib/surface/api.ex | inspired-consulting/surface | 3a6a3a454704a9aaf83cd23a6393cbd09bdca8bd | [
"MIT"
] | null | null | null | defmodule Surface.API do
@moduledoc false
alias Surface.IOHelper
@types [
:any,
:css_class,
:list,
:event,
:boolean,
:string,
:time,
:date,
:datetime,
:naive_datetime,
:number,
:integer,
:decimal,
:map,
:fun,
:atom,
:module,
:changeset,
:form,
:keyword,
:struct,
:tuple,
:pid,
:port,
:reference,
:bitstring,
:range,
:mapset,
:regex,
:uri,
:path,
# Private
:generator,
:context_put,
:context_get
]
defmacro __using__(include: include) do
arities = %{
prop: [2, 3],
slot: [1, 2],
data: [2, 3]
}
functions = for func <- include, arity <- arities[func], into: [], do: {func, arity}
quote do
import unquote(__MODULE__), only: unquote(functions)
@before_compile unquote(__MODULE__)
@after_compile unquote(__MODULE__)
Module.register_attribute(__MODULE__, :assigns, accumulate: true)
# Any caller component can hold other components with slots
Module.register_attribute(__MODULE__, :assigned_slots_by_parent, accumulate: false)
Module.register_attribute(__MODULE__, :changes_context?, accumulate: true)
Module.register_attribute(__MODULE__, :gets_context?, accumulate: true)
for func <- unquote(include) do
Module.register_attribute(__MODULE__, func, accumulate: true)
end
end
end
defmacro __before_compile__(env) do
generate_docs(env)
[
quoted_prop_funcs(env),
quoted_slot_funcs(env),
quoted_data_funcs(env),
quoted_context_funcs(env)
]
end
def __after_compile__(env, _) do
validate_assigns!(env)
validate_duplicated_assigns!(env)
validate_slot_props_bindings!(env)
validate_duplicate_root_props!(env)
end
@doc "Defines a property for the component"
defmacro prop(name_ast, type_ast, opts_ast \\ []) do
build_assign_ast(:prop, name_ast, type_ast, opts_ast, __CALLER__)
end
@doc "Defines a slot for the component"
defmacro slot(name_ast, opts_ast \\ []) do
build_assign_ast(:slot, name_ast, :any, opts_ast, __CALLER__)
end
@doc "Defines a data assign for the component"
defmacro data(name_ast, type_ast, opts_ast \\ []) do
build_assign_ast(:data, name_ast, type_ast, opts_ast, __CALLER__)
end
@doc false
def get_assigns(module) do
Module.get_attribute(module, :assigns, [])
end
@doc false
def get_slots(module) do
Module.get_attribute(module, :slot, [])
end
@doc false
def get_props(module) do
Module.get_attribute(module, :prop, [])
end
@doc false
def get_data(module) do
Module.get_attribute(module, :data, [])
end
@doc false
def get_defaults(module) do
for %{name: name, opts: opts} <- get_data(module), Keyword.has_key?(opts, :default) do
{name, opts[:default]}
end
end
@doc false
def put_assign(caller, func, name, type, opts, opts_ast, line) do
assign = %{
func: func,
name: name,
type: type,
doc: pop_doc(caller.module),
opts: opts,
opts_ast: opts_ast,
line: line
}
Module.put_attribute(caller.module, :assigns, assign)
Module.put_attribute(caller.module, assign.func, assign)
end
@doc false
def sort_props(props) when is_list(props) do
Enum.sort_by(props, &{&1.name != :id, !&1.opts[:required], &1.line})
end
defp validate_duplicated_assigns!(env) do
env.module
|> Module.get_attribute(:assigns, [])
|> Enum.group_by(fn %{name: name, opts: opts} -> opts[:as] || name end)
|> Enum.filter(fn {_, list} -> length(list) > 1 end)
|> validate_duplicated_assigns!(env)
end
defp validate_duplicated_assigns!(assigns, env) do
for assign <- assigns do
validate_duplicated_assign!(assign, env)
end
end
defp validate_duplicated_assign!({name, [assign, duplicated | _]}, env) do
component_type = Module.get_attribute(env.module, :component_type)
builtin_assign? = name in Surface.Compiler.Helpers.builtin_assigns_by_type(component_type)
details = existing_assign_details_message(builtin_assign?, duplicated)
message = ~s(cannot use name "#{name}". #{details}.)
IOHelper.compile_error(message, env.file, assign.line)
end
defp validate_duplicate_root_props!(env) do
props =
env.module.__props__()
|> Enum.filter(& &1.opts[:root])
case props do
[prop, _dupicated | _] ->
message = """
cannot define multiple properties as `root: true`. \
Property `#{prop.name}` at line #{prop.line} was already defined as root.
Hint: choose a single property to be the root prop.
"""
IOHelper.compile_error(message, env.file, env.line)
_ ->
nil
end
end
defp existing_assign_details_message(true = _builtin?, %{func: func}) do
"There's already a built-in #{func} assign with the same name"
end
defp existing_assign_details_message(false = _builtin?, %{func: func, line: line})
when func == :slot do
"""
There's already a #{func} assign with the same name at line #{line}.
You could use the optional ':as' option in slot macro to name the related assigns.
"""
end
defp existing_assign_details_message(false = _builtin?, %{func: func, line: line}) do
"There's already a #{func} assign with the same name at line #{line}"
end
defp quoted_data_funcs(env) do
data = get_data(env.module)
quote do
@doc false
def __data__() do
unquote(Macro.escape(data))
end
end
end
defp quoted_prop_funcs(env) do
props =
env.module
|> get_props()
|> sort_props()
props_names = for p <- props, do: p.name
props_by_name = for p <- props, into: %{}, do: {p.name, p}
required_props_names = for %{name: name, opts: opts} <- props, opts[:required], do: name
quote do
@doc false
def __props__() do
unquote(Macro.escape(props))
end
@doc false
def __validate_prop__(prop) do
prop in unquote(props_names)
end
@doc false
def __get_prop__(name) do
Map.get(unquote(Macro.escape(props_by_name)), name)
end
@doc false
def __required_props_names__() do
unquote(Macro.escape(required_props_names))
end
end
end
defp quoted_slot_funcs(env) do
slots = env.module |> get_slots() |> Enum.uniq_by(& &1.name)
slots_names = Enum.map(slots, fn slot -> slot.name end)
slots_by_name = for p <- slots, into: %{}, do: {p.name, p}
required_slots_names =
for %{name: name, opts: opts} <- slots, opts[:required] do
name
end
assigned_slots_by_parent = Module.get_attribute(env.module, :assigned_slots_by_parent) || %{}
quote do
@doc false
def __slots__() do
unquote(Macro.escape(slots))
end
@doc false
def __validate_slot__(prop) do
prop in unquote(slots_names)
end
@doc false
def __get_slot__(name) do
Map.get(unquote(Macro.escape(slots_by_name)), name)
end
@doc false
def __assigned_slots_by_parent__() do
unquote(Macro.escape(assigned_slots_by_parent))
end
@doc false
def __required_slots_names__() do
unquote(Macro.escape(required_slots_names))
end
end
end
defp quoted_context_funcs(env) do
funs_changing =
env.module
|> Module.get_attribute(:changes_context?, [])
|> MapSet.new()
funs_getting =
env.module
|> Module.get_attribute(:gets_context?, [])
|> MapSet.new()
quoted_changing =
for fun <- funs_changing do
quote do
@doc false
def __changes_context__?(unquote(fun)), do: true
end
end
quoted_changing_fallback =
quote do
def __changes_context__?(_fun), do: false
end
quoted_getting =
for fun <- funs_getting do
quote do
@doc false
def __gets_context__?(unquote(fun)), do: true
end
end
quoted_getting_fallback =
quote do
def __gets_context__?(_fun), do: false
end
List.flatten([quoted_changing, quoted_changing_fallback, quoted_getting, quoted_getting_fallback])
end
defp validate_assigns!(env) do
assigns = Module.get_attribute(env.module, :assigns, [])
for assign <- assigns do
validate_assign!(assign, env)
end
end
defp validate_assign!(%{func: func, name: name, type: type, opts: opts, line: line}, env) do
with :ok <- validate_type(func, name, type),
:ok <- validate_opts_keys(func, name, type, opts),
:ok <- validate_opts(func, name, type, opts, line, env) do
:ok
else
{:error, message} ->
file = Path.relative_to_cwd(env.file)
IOHelper.compile_error(message, file, line)
end
end
defp validate_name_ast!(_func, {name, meta, context}, _caller)
when is_atom(name) and is_list(meta) and is_atom(context) do
name
end
defp validate_name_ast!(func, name_ast, caller) do
message = """
invalid #{func} name. Expected a variable name, got: #{Macro.to_string(name_ast)}\
"""
IOHelper.compile_error(message, caller.file, caller.line)
end
defp validate_type_ast!(_func, _name, type, _caller) when is_atom(type) do
type
end
defp validate_type_ast!(func, name, type_ast, caller) do
message = """
invalid type for #{func} #{name}. \
Expected an atom, got: #{Macro.to_string(type_ast)}
"""
IOHelper.compile_error(message, caller.file, caller.line)
end
defp validate_type(_func, _name, type) when type in @types do
:ok
end
defp validate_type(func, name, type) do
message = """
invalid type #{Macro.to_string(type)} for #{func} #{name}.
Expected one of #{inspect(@types)}.
Hint: Use :any if the type is not listed.\
"""
{:error, message}
end
defp validate_opts_keys(func, _name, type, opts) do
with keys <- Keyword.keys(opts),
valid_opts <- get_valid_opts(func, type, opts),
[] <- keys -- valid_opts do
:ok
else
unknown_options ->
valid_opts = get_valid_opts(func, type, opts)
{:error, unknown_options_message(valid_opts, unknown_options)}
end
end
defp validate_opts_ast!(func, _name, opts, caller) when is_list(opts) do
for {key, value} <- opts do
{key, validate_opt_ast!(func, key, value, caller)}
end
end
defp validate_opts_ast!(func, name, opts, caller) do
message = """
invalid options for #{func} #{name}. \
Expected a keyword list of options, got: #{Macro.to_string(opts)}
"""
IOHelper.compile_error(message, caller.file, caller.line)
end
defp validate_opts(func, name, type, opts, line, env) do
Enum.reduce_while(opts, :ok, fn {key, value}, _acc ->
case validate_opt(func, name, type, opts, key, value, line, env) do
:ok ->
{:cont, :ok}
error ->
{:halt, error}
end
end)
end
defp get_valid_opts(:prop, _type, _opts) do
[:required, :default, :values, :values!, :accumulate, :root, :static]
end
defp get_valid_opts(:data, _type, _opts) do
[:default, :values, :values!]
end
defp get_valid_opts(:slot, _type, _opts) do
[:required, :args, :as]
end
defp validate_opt_ast!(:slot, :args, args_ast, caller) do
Enum.map(args_ast, fn
{name, {:^, _, [{generator, _, context}]}} when context in [Elixir, nil] ->
Macro.escape(%{name: name, generator: generator})
name when is_atom(name) ->
Macro.escape(%{name: name, generator: nil})
ast ->
message =
"invalid slot argument #{Macro.to_string(ast)}. " <>
"Expected an atom or a binding to a generator as `key: ^property_name`"
IOHelper.compile_error(message, caller.file, caller.line)
end)
end
defp validate_opt_ast!(_func, _key, value, _caller) do
value
end
defp validate_opt(:prop, _name, _type, _opts, :root, value, _line, _env)
when not is_boolean(value) do
{:error, "invalid value for option :root. Expected a boolean, got: #{inspect(value)}"}
end
defp validate_opt(:prop, _name, _type, _opts, :static, value, _line, _env)
when not is_boolean(value) do
{:error, "invalid value for option :static. Expected a boolean, got: #{inspect(value)}"}
end
defp validate_opt(_func, _name, _type, _opts, :required, value, _line, _env)
when not is_boolean(value) do
{:error, "invalid value for option :required. Expected a boolean, got: #{inspect(value)}"}
end
defp validate_opt(:prop, name, _type, opts, :default, value, line, env) do
if Keyword.get(opts, :required, false) do
IOHelper.warn(
"setting a default value on a required prop has no effect. Either set the default value or set the prop as required, but not both.",
env,
line
)
end
warn_on_invalid_default(:prop, name, value, opts, line, env)
:ok
end
defp validate_opt(:data, name, _type, opts, :default, value, line, env) do
warn_on_invalid_default(:data, name, value, opts, line, env)
:ok
end
defp validate_opt(_func, _name, _type, _opts, :values, value, _line, _env)
when not is_list(value) and not is_struct(value, Range) do
{:error, "invalid value for option :values. Expected a list of values or a Range, got: #{inspect(value)}"}
end
defp validate_opt(:prop, _name, _type, _opts, :accumulate, value, _line, _env)
when not is_boolean(value) do
{:error, "invalid value for option :accumulate. Expected a boolean, got: #{inspect(value)}"}
end
defp validate_opt(:slot, _name, _type, _opts, :as, value, _line, _caller)
when not is_atom(value) do
{:error, "invalid value for option :as in slot. Expected an atom, got: #{inspect(value)}"}
end
defp validate_opt(:slot, :default, _type, _opts, :args, value, line, env) do
if Module.defines?(env.module, {:__slot_name__, 0}) do
slot_name = Module.get_attribute(env.module, :__slot_name__)
prop_example =
value
|> Enum.map(fn %{name: name} -> "#{name}: #{name}" end)
|> Enum.join(", ")
component_name = Macro.to_string(env.module)
message = """
arguments for the default slot in a slotable component are not accessible - instead the arguments \
from the parent's #{slot_name} slot will be exposed via `:let={...}`.
Hint: You can remove these arguments, pull them up to the parent component, or make this component not slotable \
and use it inside an explicit template element:
```
<#template name="#{slot_name}">
<#{component_name} :let={#{prop_example}}>
...
</#{component_name}>
</#template>
```
"""
IOHelper.warn(message, env, line)
end
:ok
end
defp validate_opt(_func, _name, _type, _opts, _key, _value, _line, _env) do
:ok
end
defp warn_on_invalid_default(type, name, default, opts, line, env) do
accumulate? = Keyword.get(opts, :accumulate, false)
values! = Keyword.get(opts, :values!)
cond do
accumulate? and not is_list(default) ->
IOHelper.warn(
"#{type} `#{name}` default value `#{inspect(default)}` must be a list when `accumulate: true`",
env,
line
)
accumulate? and not is_nil(values!) and
not MapSet.subset?(MapSet.new(default), MapSet.new(values!)) ->
IOHelper.warn(
"""
#{type} `#{name}` default value `#{inspect(default)}` does not exist in `:values!`
Hint: Either choose an existing value or replace `:values!` with `:values` to skip validation.
""",
env,
line
)
not accumulate? and not is_nil(values!) and default not in values! ->
IOHelper.warn(
"""
#{type} `#{name}` default value `#{inspect(default)}` does not exist in `:values!`
Hint: Either choose an existing value or replace `:values!` with `:values` to skip validation.
""",
env,
line
)
true ->
:ok
end
end
defp unknown_options_message(valid_opts, unknown_options) do
{plural, unknown_items} =
case unknown_options do
[option] ->
{"", option}
_ ->
{"s", unknown_options}
end
"""
unknown option#{plural} #{inspect(unknown_items)}. \
Available options: #{inspect(valid_opts)}\
"""
end
defp format_opts(opts_ast) do
opts_ast
|> Macro.to_string()
|> String.slice(1..-2)
end
defp generate_docs(env) do
do_generate_docs(Module.get_attribute(env.module, :moduledoc), env)
end
defp do_generate_docs({_line, false}, _env), do: :ok
defp do_generate_docs(nil, env), do: do_generate_docs({env.line, nil}, env)
defp do_generate_docs({line, doc}, env) do
docs =
[
doc,
generate_props_docs(env.module),
generate_slots_docs(env.module),
generate_events_docs(env.module)
]
|> Enum.filter(&(&1 != nil))
|> Enum.join("\n")
Module.put_attribute(
env.module,
:moduledoc,
{line, docs}
)
end
defp generate_props_docs(module) do
# Events are special properties we treat in a separate doc section
docs =
for prop <- get_props(module), prop.type != :event do
doc = if prop.doc, do: " - #{prop.doc}", else: ""
opts = if prop.opts == [], do: "", else: ", #{format_opts(prop.opts_ast)}"
"* **#{prop.name}** *#{inspect(prop.type)}#{opts}*#{doc}"
end
|> Enum.reverse()
|> Enum.join("\n")
if docs != "" do
"""
## Properties
#{docs}
"""
end
end
defp generate_slots_docs(module) do
docs =
for slot <- get_slots(module) do
doc = if slot.doc, do: " - #{slot.doc}", else: ""
opts = if slot.opts == [], do: "", else: ", #{format_opts(slot.opts_ast)}"
"* **#{slot.name}#{opts}**#{doc}"
end
|> Enum.reverse()
|> Enum.join("\n")
if docs != "" do
"""
## Slots
#{docs}
"""
end
end
defp generate_events_docs(module) do
docs =
for prop <- get_props(module), prop.type == :event do
doc = if prop.doc, do: " - #{prop.doc}", else: ""
opts = if prop.opts == [], do: "", else: ", #{format_opts(prop.opts_ast)}"
"* **#{prop.name}#{opts}**#{doc}"
end
|> Enum.reverse()
|> Enum.join("\n")
if docs != "" do
"""
## Events
#{docs}
"""
end
end
defp validate_slot_props_bindings!(env) do
for slot <- env.module.__slots__(),
slot_props = Keyword.get(slot.opts, :args, []),
%{name: name, generator: generator} <- slot_props,
generator != nil do
case env.module.__get_prop__(generator) do
nil ->
existing_properties_names = env.module.__props__() |> Enum.map(& &1.name)
message = """
cannot bind slot argument `#{name}` to property `#{generator}`. \
Expected an existing property after `^`, \
got: an undefined property `#{generator}`.
Hint: Available properties are #{inspect(existing_properties_names)}\
"""
IOHelper.compile_error(message, env.file, slot.line)
%{type: type} when type != :list ->
message = """
cannot bind slot argument `#{name}` to property `#{generator}`. \
Expected a property of type :list after `^`, \
got: a property of type #{inspect(type)}\
"""
IOHelper.compile_error(message, env.file, slot.line)
_ ->
:ok
end
end
end
defp pop_doc(module) do
doc =
case Module.get_attribute(module, :doc) do
{_, doc} -> doc
_ -> nil
end
Module.delete_attribute(module, :doc)
doc
end
defp build_assign_ast(func, name_ast, type_ast, opts_ast, caller) do
name = validate_name_ast!(func, name_ast, caller)
opts = validate_opts_ast!(func, name, opts_ast, caller)
type = validate_type_ast!(func, name, type_ast, caller)
quote bind_quoted: [
func: func,
name: name,
type: type,
opts: opts,
opts_ast: Macro.escape(opts_ast),
line: caller.line
] do
Surface.API.put_assign(__ENV__, func, name, type, opts, opts_ast, line)
end
end
end
| 27.009186 | 140 | 0.613916 |
e810397d61a071549555159843b483d7dcf6f0a6 | 1,382 | ex | Elixir | apps/admin_api/lib/admin_api/v1/auth/provider_auth.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | 1 | 2018-12-07T06:21:21.000Z | 2018-12-07T06:21:21.000Z | apps/admin_api/lib/admin_api/v1/auth/provider_auth.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/admin_api/lib/admin_api/v1/auth/provider_auth.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | defmodule AdminAPI.V1.ProviderAuth do
@moduledoc """
This module takes care of authenticating a provider for websocket connections.
"""
alias EWalletDB.Key
def authenticate(auth) do
auth
|> parse_header()
|> authenticate_access()
end
defp parse_header(auth) do
with header when not is_nil(header) <- auth[:auth_header],
[scheme, content] <- String.split(header, " ", parts: 2),
true <- scheme in ["Basic", "OMGProvider"],
{:ok, decoded} <- Base.decode64(content),
[access, secret] <- String.split(decoded, ":", parts: 2) do
auth
|> Map.put(:auth_access_key, access)
|> Map.put(:auth_secret_key, secret)
else
_ ->
auth
|> Map.put(:authenticated, false)
|> Map.put(:auth_error, :invalid_auth_scheme)
end
end
# Skip auth if it already failed since header parsing
defp authenticate_access(%{authenticated: false} = auth), do: auth
defp authenticate_access(auth) do
access_key = auth[:auth_access_key]
secret_key = auth[:auth_secret_key]
case Key.authenticate(access_key, secret_key) do
{:ok, key} ->
auth
|> Map.put(:authenticated, true)
|> Map.put(:key, key)
false ->
auth
|> Map.put(:authenticated, false)
|> Map.put(:auth_error, :invalid_access_secret_key)
end
end
end
| 27.64 | 80 | 0.623734 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.