hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ff6a48b96df5be7b287872e4245ac827324229ae | 3,765 | ex | Elixir | lib/arc/file.ex | hollar/arc | e2871e02c8aab0aaba885c8d141ed3a51a1ec8a8 | [
"Apache-2.0"
] | 1,213 | 2015-06-18T04:01:20.000Z | 2022-01-19T18:47:23.000Z | lib/arc/file.ex | hollar/arc | e2871e02c8aab0aaba885c8d141ed3a51a1ec8a8 | [
"Apache-2.0"
] | 260 | 2015-06-18T22:34:58.000Z | 2022-01-06T17:43:29.000Z | lib/arc/file.ex | hollar/arc | e2871e02c8aab0aaba885c8d141ed3a51a1ec8a8 | [
"Apache-2.0"
] | 270 | 2015-07-07T17:10:31.000Z | 2021-11-13T09:16:03.000Z | defmodule Arc.File do
defstruct [:path, :file_name, :binary]
def generate_temporary_path(file \\ nil) do
extension = Path.extname((file && file.path) || "")
file_name =
:crypto.strong_rand_bytes(20)
|> Base.encode32()
|> Kernel.<>(extension)
Path.join(System.tmp_dir, file_name)
end
# Given a remote file
def new(remote_path = "http" <> _) do
uri = URI.parse(remote_path)
filename = Path.basename(uri.path)
case save_file(uri, filename) do
{:ok, local_path} -> %Arc.File{path: local_path, file_name: filename}
:error -> {:error, :invalid_file_path}
end
end
# Accepts a path
def new(path) when is_binary(path) do
case File.exists?(path) do
true -> %Arc.File{path: path, file_name: Path.basename(path)}
false -> {:error, :invalid_file_path}
end
end
def new(%{filename: filename, binary: binary}) do
%Arc.File{binary: binary, file_name: Path.basename(filename)}
end
# Accepts a map conforming to %Plug.Upload{} syntax
def new(%{filename: filename, path: path}) do
case File.exists?(path) do
true -> %Arc.File{path: path, file_name: filename}
false -> {:error, :invalid_file_path}
end
end
def ensure_path(file = %{path: path}) when is_binary(path), do: file
def ensure_path(file = %{binary: binary}) when is_binary(binary), do: write_binary(file)
defp write_binary(file) do
path = generate_temporary_path(file)
:ok = File.write!(path, file.binary)
%__MODULE__{
file_name: file.file_name,
path: path
}
end
defp save_file(uri, filename) do
local_path =
generate_temporary_path()
|> Kernel.<>(Path.extname(filename))
case save_temp_file(local_path, uri) do
:ok -> {:ok, local_path}
_ -> :error
end
end
defp save_temp_file(local_path, remote_path) do
remote_file = get_remote_path(remote_path)
case remote_file do
{:ok, body} -> File.write(local_path, body)
{:error, error} -> {:error, error}
end
end
# hakney :connect_timeout - timeout used when establishing a connection, in milliseconds
# hakney :recv_timeout - timeout used when receiving from a connection, in milliseconds
# poison :timeout - timeout to establish a connection, in milliseconds
# :backoff_max - maximum backoff time, in milliseconds
# :backoff_factor - a backoff factor to apply between attempts, in milliseconds
defp get_remote_path(remote_path) do
options = [
follow_redirect: true,
recv_timeout: Application.get_env(:arc, :recv_timeout, 5_000),
connect_timeout: Application.get_env(:arc, :connect_timeout, 10_000),
timeout: Application.get_env(:arc, :timeout, 10_000),
max_retries: Application.get_env(:arc, :max_retries, 3),
backoff_factor: Application.get_env(:arc, :backoff_factor, 1000),
backoff_max: Application.get_env(:arc, :backoff_max, 30_000),
]
request(remote_path, options)
end
defp request(remote_path, options, tries \\ 0) do
case :hackney.get(URI.to_string(remote_path), [], "", options) do
{:ok, 200, _headers, client_ref} -> :hackney.body(client_ref)
{:error, %{reason: :timeout}} ->
case retry(tries, options) do
{:ok, :retry} -> request(remote_path, options, tries + 1)
{:error, :out_of_tries} -> {:error, :timeout}
end
_ -> {:error, :arc_httpoison_error}
end
end
defp retry(tries, options) do
cond do
tries < options[:max_retries] ->
backoff = round(options[:backoff_factor] * :math.pow(2, tries - 1))
backoff = :erlang.min(backoff, options[:backoff_max])
:timer.sleep(backoff)
{:ok, :retry}
true -> {:error, :out_of_tries}
end
end
end
| 30.609756 | 90 | 0.656042 |
ff6a70b6630af5c62fbd7e01f657ce71947fcd30 | 601 | ex | Elixir | lib/mix/lib/mix/tasks/compile.all.ex | gabrielelana/elixir | 7e78113f925d438568b7efa8eaded5ae43dce4b1 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/compile.all.ex | gabrielelana/elixir | 7e78113f925d438568b7efa8eaded5ae43dce4b1 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/compile.all.ex | gabrielelana/elixir | 7e78113f925d438568b7efa8eaded5ae43dce4b1 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Compile.All do
use Mix.Task
@moduledoc false
@recursive true
# This is an internal task used by mix compile which
# is meant to be recursive and be invoked for each child
# project.
def run(args) do
Mix.Project.get!
# Build the project structure so we can write down compiled files.
Mix.Project.build_structure
res =
Enum.map(Mix.Tasks.Compile.compilers(), fn(compiler) ->
Mix.Task.run("compile.#{compiler}", args)
end)
true = Code.prepend_path(Mix.Project.compile_path)
if :ok in res, do: :ok, else: :noop
end
end
| 23.115385 | 70 | 0.675541 |
ff6ae2e5a18eb6681b0c78bf671bdd79d1e631c7 | 6,965 | ex | Elixir | apps/omg_eth/lib/omg_eth/root_chain/abi_function_selector.ex | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 177 | 2018-08-24T03:51:02.000Z | 2020-05-30T13:29:25.000Z | apps/omg_eth/lib/omg_eth/root_chain/abi_function_selector.ex | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 1,042 | 2018-08-25T00:52:39.000Z | 2020-06-01T05:15:17.000Z | apps/omg_eth/lib/omg_eth/root_chain/abi_function_selector.ex | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 47 | 2018-08-24T12:06:33.000Z | 2020-04-28T11:49:25.000Z | # Copyright 2019-2020 OMG Network Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Eth.RootChain.AbiFunctionSelector do
@moduledoc """
We define Solidity Function selectors that help us decode returned values from function calls
"""
# workaround for https://github.com/omgnetwork/elixir-omg/issues/1632
def start_exit() do
%ABI.FunctionSelector{
function: "startExit",
input_names: [
"utxoPosToExit",
"rlpOutputTxToContract",
"outputTxToContractInclusionProof",
"rlpInputCreationTx",
"inputCreationTxInclusionProof",
"utxoPosInput"
],
inputs_indexed: nil,
method_id: <<191, 31, 49, 109>>,
returns: [],
type: :function,
types: [{:uint, 256}, :bytes, :bytes, :bytes, :bytes, {:uint, 256}]
}
end
def start_standard_exit() do
%ABI.FunctionSelector{
function: "startStandardExit",
input_names: ["utxoPos", "rlpOutputTx", "outputTxInclusionProof"],
inputs_indexed: nil,
method_id: <<112, 224, 20, 98>>,
returns: [],
type: :function,
types: [tuple: [{:uint, 256}, :bytes, :bytes]]
}
end
def challenge_in_flight_exit_not_canonical() do
%ABI.FunctionSelector{
function: "challengeInFlightExitNotCanonical",
input_names: [
"inputTx",
"inputUtxoPos",
"inFlightTx",
"inFlightTxInputIndex",
"competingTx",
"competingTxInputIndex",
"competingTxPos",
"competingTxInclusionProof",
"competingTxWitness"
],
inputs_indexed: [true, true, true, true, true, true, true, true, true],
method_id: <<232, 54, 34, 152>>,
returns: [],
type: :function,
types: [
tuple: [
:bytes,
{:uint, 256},
:bytes,
{:uint, 16},
:bytes,
{:uint, 16},
{:uint, 256},
:bytes,
:bytes
]
]
}
end
def start_in_flight_exit() do
%ABI.FunctionSelector{
function: "startInFlightExit",
input_names: ["inFlightTx", "inputTxs", "inputUtxosPos", "inputTxsInclusionProofs", "inFlightTxWitnesses"],
inputs_indexed: nil,
method_id: <<90, 82, 133, 20>>,
returns: [],
type: :function,
types: [
tuple: [
:bytes,
{:array, :bytes},
{:array, {:uint, 256}},
{:array, :bytes},
{:array, :bytes}
]
]
}
end
# min_exit_period/0, get_version/0, exit_games/0, vaults/0 are
# victims of unfortinate bug: https://github.com/poanetwork/ex_abi/issues/25
# All these selectors were intially pulled in with
# `ABI.parse_specification(contract_abi_json_decoded,include_events?: true)`
# and later modified so that `types` hold what `returns` should have because of
# issue 25.
# the commented properties of the struct is what it was generated,
# the new types were added to mitigate the bug.
def min_exit_period() do
%ABI.FunctionSelector{
function: "minExitPeriod",
input_names: ["min_exit_period"],
inputs_indexed: nil,
method_id: <<212, 162, 180, 239>>,
# returns: [uint: 256],
type: :function,
# types: []
types: [uint: 256]
}
end
def get_version() do
%ABI.FunctionSelector{
function: "getVersion",
input_names: ["version"],
inputs_indexed: nil,
method_id: <<13, 142, 110, 44>>,
# returns: [:string],
type: :function,
# types: []
types: [:string]
}
end
def exit_games() do
%ABI.FunctionSelector{
function: "exitGames",
input_names: ["exit_game_address"],
inputs_indexed: nil,
method_id: <<175, 7, 151, 100>>,
# returns: [:address],
type: :function,
# types: [uint: 256]
types: [:address]
}
end
def vaults() do
%ABI.FunctionSelector{
function: "vaults",
input_names: ["vault_address"],
inputs_indexed: nil,
method_id: <<140, 100, 234, 74>>,
# returns: [:address],
type: :function,
# types: [uint: 256]
types: [:address]
}
end
def child_block_interval() do
%ABI.FunctionSelector{
function: "childBlockInterval",
input_names: ["child_block_interval"],
inputs_indexed: nil,
method_id: <<56, 169, 224, 188>>,
# returns: [uint: 256],
type: :function,
# types: []
types: [uint: 256]
}
end
def next_child_block() do
%ABI.FunctionSelector{
function: "nextChildBlock",
input_names: ["block_number"],
inputs_indexed: nil,
method_id: <<76, 168, 113, 79>>,
# returns: [uint: 256],
type: :function,
# types: []
types: [uint: 256]
}
end
def blocks() do
%ABI.FunctionSelector{
function: "blocks",
input_names: ["block_hash", "block_timestamp"],
inputs_indexed: nil,
method_id: <<242, 91, 63, 153>>,
# returns: [bytes: 32, uint: 256],
type: :function,
# types: [uint: 256]
types: [bytes: 32, uint: 256]
}
end
def standard_exits() do
%ABI.FunctionSelector{
function: "standardExits",
input_names: ["standard_exit_structs"],
inputs_indexed: nil,
method_id: <<12, 165, 182, 118>>,
# returns: [
# array: {:tuple, [:bool, {:uint, 256}, {:bytes, 32}, :address, {:uint, 256}, {:uint, 256}]}
# ],
type: :function,
# types: [array: {:uint, 160}]
types: [
array: {:tuple, [:bool, {:uint, 256}, {:bytes, 32}, :address, {:uint, 256}, {:uint, 256}]}
]
}
end
def in_flight_exits() do
%ABI.FunctionSelector{
function: "inFlightExits",
input_names: ["in_flight_exit_structs"],
inputs_indexed: nil,
method_id: <<206, 201, 225, 167>>,
# returns: [
# array: {:tuple,
# [
# :bool,
# {:uint, 64},
# {:uint, 256},
# {:uint, 256},
# {:array, :tuple, 4},
# {:array, :tuple, 4},
# :address,
# {:uint, 256},
# {:uint, 256}
# ]}
# ],
type: :function,
# types: [array: {:uint, 160}]
types: [
{:array, {:tuple, [:bool, {:uint, 64}, {:uint, 256}, {:uint, 256}, :address, {:uint, 256}, {:uint, 256}]}}
]
}
end
end
| 27.86 | 114 | 0.564537 |
ff6af392fe42fc068f9aba6fa370cf1475c4cb0b | 3,930 | ex | Elixir | lib/collections/user.ex | paperwork/service-users | 717463aa112fbb2d12ea23f672e18f4d39deb968 | [
"MIT"
] | 4 | 2019-03-17T15:27:31.000Z | 2020-05-19T17:35:21.000Z | lib/collections/user.ex | paperwork/service-users | 717463aa112fbb2d12ea23f672e18f4d39deb968 | [
"MIT"
] | null | null | null | lib/collections/user.ex | paperwork/service-users | 717463aa112fbb2d12ea23f672e18f4d39deb968 | [
"MIT"
] | 3 | 2019-02-15T21:48:51.000Z | 2019-04-08T06:43:38.000Z | defmodule Paperwork.Collections.User do
require Logger
@collection "users"
@privates [:password]
@enforce_keys []
@type t :: %__MODULE__{
id: BSON.ObjectId.t() | nil,
email: String.t(),
password: String.t() | nil,
name: %{
first_name: String.t(),
last_name: String.t()
},
profile_photo: String.t() | nil,
role: String.t(),
created_at: DateTime.t(),
updated_at: DateTime.t(),
deleted_at: DateTime.t() | nil
}
defstruct \
id: nil,
email: "",
password: nil,
name: %{
first_name: "",
last_name: ""
},
profile_photo: nil,
role: "",
created_at: DateTime.utc_now(),
updated_at: DateTime.utc_now(),
deleted_at: nil
use Paperwork.Collections
@spec show(id :: BSON.ObjectId.t) :: {:ok, %__MODULE__{}} | {:notfound, nil}
def show(%BSON.ObjectId{value: _} = id) when is_map(id) do
show(%__MODULE__{:id => id})
end
@spec show(id :: Paperwork.Id.t) :: {:ok, %__MODULE__{}} | {:notfound, nil}
def show(%Paperwork.Id{gid: gid, id: resource_id, system_id: system_id} = id) when is_map(id) and is_binary(gid) and is_binary(resource_id) do
show(%__MODULE__{:id => resource_id |> BSON.ObjectId.decode!()})
end
@spec show(id :: String.t) :: {:ok, %__MODULE__{}} | {:notfound, nil}
def show(id) when is_binary(id) do
show(%__MODULE__{:id => id |> BSON.ObjectId.decode!()})
end
@spec show(model :: __MODULE__.t) :: {:ok, %__MODULE__{}} | {:notfound, nil}
def show(%__MODULE__{:id => _} = model) do
collection_find(model, :id)
|> strip_privates
end
@spec authenticate(model :: __MODULE__.t) :: {:ok, %__MODULE__{}} | {:nok, nil}
def authenticate(%__MODULE__{:email => _email, :password => password} = model) do
with \
{:ok, found_user} <- collection_find(model, :email),
true <- Bcrypt.verify_pass(password, Map.get(found_user, :password)) do
{:ok, found_user |> strip_privates }
else
other ->
{:nok, other}
end
end
@spec list() :: {:ok, [%__MODULE__{}]} | {:notfound, nil}
def list() do
%{}
|> collection_find(true)
|> strip_privates
end
@spec create(model :: __MODULE__.t) :: {:ok, %__MODULE__{}} | {:error, String.t}
def create(%__MODULE__{} = model) do
model
|> set_password_if_given
|> collection_insert
|> strip_privates
end
@spec update(model :: __MODULE__.t) :: {:ok, %__MODULE__{}} | {:error, String.t}
def update(%__MODULE__{} = model) do
updated_model =
Map.from_struct(model)
|> update()
struct(__MODULE__, updated_model)
end
@spec update(model :: Map.t) :: {:ok, %{}} | {:error, String.t}
def update(%{} = model) do
query = %{
id: model |> Map.get(:id)
}
changeset =
model
|> Map.delete(:id)
|> set_password_if_given
%{
"$set": changeset
}
|> collection_update_manually(query)
|> strip_privates
end
@spec set_password_if_given(model :: __MODULE__.t) :: %__MODULE__{}
defp set_password_if_given(%__MODULE__{} = model) do
updated_model =
Map.from_struct(model)
|> set_password_if_given
struct(__MODULE__, updated_model)
end
@spec set_password_if_given(model :: Map.t) :: %{}
defp set_password_if_given(%{password: password} = model) when is_binary(password) do
model
|> Map.put(:password, Bcrypt.hash_pwd_salt(password))
end
@spec set_password_if_given(model :: Map.t) :: %{}
defp set_password_if_given(%{} = model) do
model
end
end
| 29.772727 | 146 | 0.551145 |
ff6b25059f362849b5bf9d840e8b00ca0a40f221 | 767 | ex | Elixir | test/support/channel_case.ex | HoJSim/etog | dc2393e27915caf73eea0d8839ba198635c45ca6 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | HoJSim/etog | dc2393e27915caf73eea0d8839ba198635c45ca6 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | HoJSim/etog | dc2393e27915caf73eea0d8839ba198635c45ca6 | [
"MIT"
] | null | null | null | defmodule EtogWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint EtogWeb.Endpoint
end
end
setup _tags do
:ok
end
end
| 23.96875 | 59 | 0.724902 |
ff6b43cc3488180086b65e78ef9ec03a5984a092 | 297 | ex | Elixir | lib/step_flow/controllers/job_controller.ex | mathiaHT/ex_step_flow | 6496e9511239de64f00119428476338dfcde9dea | [
"MIT"
] | 4 | 2019-12-07T05:18:26.000Z | 2020-11-06T23:28:43.000Z | lib/step_flow/controllers/job_controller.ex | mathiaHT/ex_step_flow | 6496e9511239de64f00119428476338dfcde9dea | [
"MIT"
] | 53 | 2020-01-06T11:23:09.000Z | 2021-06-25T15:30:07.000Z | lib/step_flow/controllers/job_controller.ex | mathiaHT/ex_step_flow | 6496e9511239de64f00119428476338dfcde9dea | [
"MIT"
] | 3 | 2020-01-30T15:37:40.000Z | 2020-10-27T14:10:02.000Z | defmodule StepFlow.JobController do
use StepFlow, :controller
alias StepFlow.Jobs
action_fallback(ExBackendWeb.FallbackController)
def index(conn, params) do
jobs = Jobs.list_jobs(params)
conn
|> put_view(StepFlow.JobView)
|> render("index.json", jobs: jobs)
end
end
| 18.5625 | 50 | 0.720539 |
ff6b6312364413503dac9e79c44b0bfb261b1b32 | 1,863 | ex | Elixir | lib/states_language/ast/await.ex | entone/states_language | 19d663e6fb5e264b8c13e124ceb5ae1e9d8559e9 | [
"MIT"
] | 4 | 2020-05-13T04:38:37.000Z | 2020-05-13T20:24:21.000Z | lib/states_language/ast/await.ex | entone/states_language | 19d663e6fb5e264b8c13e124ceb5ae1e9d8559e9 | [
"MIT"
] | null | null | null | lib/states_language/ast/await.ex | entone/states_language | 19d663e6fb5e264b8c13e124ceb5ae1e9d8559e9 | [
"MIT"
] | null | null | null | defmodule StatesLanguage.AST.Await do
@moduledoc false
@behaviour StatesLanguage.AST
alias StatesLanguage.AST.Resource
alias StatesLanguage.Node
@impl true
def create(%Resource{
name: state_name,
node: %Node{
resource_path: resource_path,
output_path: output_path,
event: event,
is_end: is_end
}
}) do
quote location: :keep do
@impl true
def handle_event(
:internal,
:await_parallel_tasks,
unquote(state_name) = state,
%StatesLanguage{_tasks: tasks, data: data} = sl
) do
debug("Checking tasks: #{inspect(tasks)}")
if Enum.all?(tasks, fn
{pid, res} -> true
_ -> false
end) do
res = Enum.map(tasks, fn {_p, res} -> res end)
data = put_result(res, unquote(resource_path), unquote(output_path), data)
case AST.do_stop?(unquote(is_end)) do
true ->
:stop
false ->
{:keep_state, %StatesLanguage{sl | data: data, _tasks: []},
[{:next_event, :internal, unquote(event)}]}
end
else
debug("Waiting for more parallel results")
{:keep_state, sl}
end
end
@impl true
def handle_event(
:info,
{:task_processed, result, pid},
unquote(state_name),
%StatesLanguage{_tasks: tasks} = sl
) do
debug("Got Result: #{inspect(pid)} #{inspect(tasks)}")
tasks =
Enum.map(tasks, fn
{pid, res} = d -> d
^pid = p -> {p, result}
o -> o
end)
{:keep_state, %StatesLanguage{sl | _tasks: tasks},
[{:next_event, :internal, :await_parallel_tasks}]}
end
end
end
end
| 26.614286 | 84 | 0.515835 |
ff6b65ed72795a6d71e9a62ea2667d86f9826c92 | 1,134 | ex | Elixir | lib/zygalski/crypto.ex | lucidstack/zygalski | 6c39f8f6cd47852f15e8d2ea800b407059235874 | [
"MIT"
] | null | null | null | lib/zygalski/crypto.ex | lucidstack/zygalski | 6c39f8f6cd47852f15e8d2ea800b407059235874 | [
"MIT"
] | null | null | null | lib/zygalski/crypto.ex | lucidstack/zygalski | 6c39f8f6cd47852f15e8d2ea800b407059235874 | [
"MIT"
] | null | null | null | defmodule Zygalski.Crypto do
alias Zygalski.SslUtils
def encrypt(message, key_name) do
{:ok, public_key} = key(key_name, :public)
encrypted_message = message |> :public_key.encrypt_public(public_key) |> Base.encode64
{:ok, encrypted_message}
end
def decrypt(cipher_text, password, key_name) do
private_key = key(key_name, :private, password)
decrypt_with_key(private_key, cipher_text)
end
defp decrypt_with_key({:ok, key}, cipher_text) do
message = cipher_text |> decode_cipher_text |> :public_key.decrypt_private(key)
{:ok, message}
end
defp decrypt_with_key({:error, _}, cipher_text),
do: {:error, :wrong_password}
defp decode_cipher_text(message) do
{:ok, decoded_message} = message |> Base.decode64
decoded_message
end
defp key_content(key_name, type) do
{:ok, key_content} = SslUtils.key_path(key_name, type) |> File.read
key_content
end
defp key(key_name, :private, password),
do: key_name |> key_content(:private) |> Zygalski.Key.decode(password)
defp key(key_name, :public),
do: key_name |> key_content(:public) |> Zygalski.Key.decode
end
| 28.35 | 90 | 0.715168 |
ff6bb577638612013f0db8436c0605d6aa24775c | 1,692 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/thumbnail.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/thumbnail.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/thumbnail.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.Thumbnail do
@moduledoc """
A thumbnail is an image representing a YouTube resource.
## Attributes
- height (integer()): (Optional) Height of the thumbnail image. Defaults to: `null`.
- url (String.t): The thumbnail image's URL. Defaults to: `null`.
- width (integer()): (Optional) Width of the thumbnail image. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:height => any(),
:url => any(),
:width => any()
}
field(:height)
field(:url)
field(:width)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.Thumbnail do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.Thumbnail.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.Thumbnail do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.333333 | 86 | 0.714539 |
ff6bd67f11b0ee431f2f2e13200954ea61f5f8a9 | 6,941 | ex | Elixir | lib/logger/lib/logger/backends/console.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | 1 | 2021-05-20T13:08:37.000Z | 2021-05-20T13:08:37.000Z | lib/logger/lib/logger/backends/console.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | null | null | null | lib/logger/lib/logger/backends/console.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | 8 | 2018-02-20T18:30:53.000Z | 2019-06-18T14:23:31.000Z | defmodule Logger.Backends.Console do
@moduledoc false
@behaviour :gen_event
defstruct buffer: [],
buffer_size: 0,
colors: nil,
device: nil,
format: nil,
level: nil,
max_buffer: nil,
metadata: nil,
output: nil,
ref: nil
def init(:console) do
config = Application.get_env(:logger, :console)
device = Keyword.get(config, :device, :user)
if Process.whereis(device) do
{:ok, init(config, %__MODULE__{})}
else
{:error, :ignore}
end
end
def init({__MODULE__, opts}) when is_list(opts) do
config = configure_merge(Application.get_env(:logger, :console), opts)
{:ok, init(config, %__MODULE__{})}
end
def handle_call({:configure, options}, state) do
{:ok, :ok, configure(options, state)}
end
def handle_event({_level, gl, _event}, state) when node(gl) != node() do
{:ok, state}
end
def handle_event({level, _gl, {Logger, msg, ts, md}}, state) do
%{level: log_level, ref: ref, buffer_size: buffer_size, max_buffer: max_buffer} = state
cond do
not meet_level?(level, log_level) ->
{:ok, state}
is_nil(ref) ->
{:ok, log_event(level, msg, ts, md, state)}
buffer_size < max_buffer ->
{:ok, buffer_event(level, msg, ts, md, state)}
buffer_size === max_buffer ->
state = buffer_event(level, msg, ts, md, state)
{:ok, await_io(state)}
end
end
def handle_event(:flush, state) do
{:ok, flush(state)}
end
def handle_event(_, state) do
{:ok, state}
end
def handle_info({:io_reply, ref, msg}, %{ref: ref} = state) do
{:ok, handle_io_reply(msg, state)}
end
def handle_info({:DOWN, ref, _, pid, reason}, %{ref: ref}) do
raise "device #{inspect(pid)} exited: " <> Exception.format_exit(reason)
end
def handle_info(_, state) do
{:ok, state}
end
def code_change(_old_vsn, state, _extra) do
{:ok, state}
end
def terminate(_reason, _state) do
:ok
end
## Helpers
defp meet_level?(_lvl, nil), do: true
defp meet_level?(lvl, min) do
Logger.compare_levels(lvl, min) != :lt
end
defp configure(options, state) do
config = configure_merge(Application.get_env(:logger, :console), options)
Application.put_env(:logger, :console, config)
init(config, state)
end
defp init(config, state) do
level = Keyword.get(config, :level)
device = Keyword.get(config, :device, :user)
format = Logger.Formatter.compile(Keyword.get(config, :format))
colors = configure_colors(config)
metadata = Keyword.get(config, :metadata, []) |> configure_metadata()
max_buffer = Keyword.get(config, :max_buffer, 32)
%{
state
| format: format,
metadata: metadata,
level: level,
colors: colors,
device: device,
max_buffer: max_buffer
}
end
defp configure_metadata(:all), do: :all
defp configure_metadata(metadata), do: Enum.reverse(metadata)
defp configure_merge(env, options) do
Keyword.merge(env, options, fn
:colors, v1, v2 -> Keyword.merge(v1, v2)
_, _v1, v2 -> v2
end)
end
defp configure_colors(config) do
colors = Keyword.get(config, :colors, [])
%{
debug: Keyword.get(colors, :debug, :cyan),
info: Keyword.get(colors, :info, :normal),
warn: Keyword.get(colors, :warn, :yellow),
error: Keyword.get(colors, :error, :red),
enabled: Keyword.get(colors, :enabled, IO.ANSI.enabled?())
}
end
defp log_event(level, msg, ts, md, %{device: device} = state) do
output = format_event(level, msg, ts, md, state)
%{state | ref: async_io(device, output), output: output}
end
defp buffer_event(level, msg, ts, md, state) do
%{buffer: buffer, buffer_size: buffer_size} = state
buffer = [buffer | format_event(level, msg, ts, md, state)]
%{state | buffer: buffer, buffer_size: buffer_size + 1}
end
defp async_io(name, output) when is_atom(name) do
case Process.whereis(name) do
device when is_pid(device) ->
async_io(device, output)
nil ->
raise "no device registered with the name #{inspect(name)}"
end
end
defp async_io(device, output) when is_pid(device) do
ref = Process.monitor(device)
send(device, {:io_request, self(), ref, {:put_chars, :unicode, output}})
ref
end
defp await_io(%{ref: nil} = state), do: state
defp await_io(%{ref: ref} = state) do
receive do
{:io_reply, ^ref, :ok} ->
handle_io_reply(:ok, state)
{:io_reply, ^ref, error} ->
handle_io_reply(error, state)
|> await_io()
{:DOWN, ^ref, _, pid, reason} ->
raise "device #{inspect(pid)} exited: " <> Exception.format_exit(reason)
end
end
defp format_event(level, msg, ts, md, state) do
%{format: format, metadata: keys, colors: colors} = state
format
|> Logger.Formatter.format(level, msg, ts, take_metadata(md, keys))
|> color_event(level, colors, md)
end
defp take_metadata(metadata, :all), do: metadata
defp take_metadata(metadata, keys) do
Enum.reduce(keys, [], fn key, acc ->
case Keyword.fetch(metadata, key) do
{:ok, val} -> [{key, val} | acc]
:error -> acc
end
end)
end
defp color_event(data, _level, %{enabled: false}, _md), do: data
defp color_event(data, level, %{enabled: true} = colors, md) do
color = md[:ansi_color] || Map.fetch!(colors, level)
[IO.ANSI.format_fragment(color, true), data | IO.ANSI.reset()]
end
defp log_buffer(%{buffer_size: 0, buffer: []} = state), do: state
defp log_buffer(state) do
%{device: device, buffer: buffer} = state
%{state | ref: async_io(device, buffer), buffer: [], buffer_size: 0, output: buffer}
end
defp handle_io_reply(:ok, %{ref: ref} = state) do
Process.demonitor(ref, [:flush])
log_buffer(%{state | ref: nil, output: nil})
end
defp handle_io_reply({:error, {:put_chars, :unicode, _} = error}, state) do
retry_log(error, state)
end
defp handle_io_reply({:error, :put_chars}, %{output: output} = state) do
retry_log({:put_chars, :unicode, output}, state)
end
defp handle_io_reply({:error, error}, _) do
raise "failure while logging console messages: " <> inspect(error)
end
defp retry_log(error, %{device: device, ref: ref, output: dirty} = state) do
Process.demonitor(ref, [:flush])
case :unicode.characters_to_binary(dirty) do
{_, good, bad} ->
clean = [good | Logger.Formatter.prune(bad)]
%{state | ref: async_io(device, clean), output: clean}
_ ->
# A well behaved IO device should not error on good data
raise "failure while logging consoles messages: " <> inspect(error)
end
end
defp flush(%{ref: nil} = state), do: state
defp flush(state) do
state
|> await_io()
|> flush()
end
end
| 26.903101 | 91 | 0.621524 |
ff6c18471408a5a74ec0a7d94aafc11872987ded | 61 | ex | Elixir | examples/demo/lib/jobs.ex | mbuhot/farq | d9ec81c518f38f9194dd6162a8ffed562481635f | [
"MIT"
] | null | null | null | examples/demo/lib/jobs.ex | mbuhot/farq | d9ec81c518f38f9194dd6162a8ffed562481635f | [
"MIT"
] | null | null | null | examples/demo/lib/jobs.ex | mbuhot/farq | d9ec81c518f38f9194dd6162a8ffed562481635f | [
"MIT"
] | null | null | null | defmodule Demo.Jobs do
use Farq.Queue, otp_app: :demo
end
| 12.2 | 32 | 0.737705 |
ff6c49bf0da85fc42592c4ad7a3fe91b084f090a | 3,015 | ex | Elixir | farmbot_core/lib/farmbot_core/asset_workers/fbos_config_worker.ex | gdwb/farmbot_os | 0ef2697c580c9fbf37a22daa063a64addfcb778d | [
"MIT"
] | 1 | 2021-08-23T13:36:14.000Z | 2021-08-23T13:36:14.000Z | farmbot_core/lib/farmbot_core/asset_workers/fbos_config_worker.ex | gdwb/farmbot_os | 0ef2697c580c9fbf37a22daa063a64addfcb778d | [
"MIT"
] | null | null | null | farmbot_core/lib/farmbot_core/asset_workers/fbos_config_worker.ex | gdwb/farmbot_os | 0ef2697c580c9fbf37a22daa063a64addfcb778d | [
"MIT"
] | null | null | null | defimpl FarmbotCore.AssetWorker, for: FarmbotCore.Asset.FbosConfig do
@moduledoc """
This asset worker does not get restarted. It inistead responds to GenServer
calls.
"""
use GenServer
require Logger
require FarmbotCore.Logger
alias FarmbotCore.{Asset.FbosConfig, BotState}
@impl FarmbotCore.AssetWorker
def preload(%FbosConfig{}), do: []
@impl FarmbotCore.AssetWorker
def tracks_changes?(%FbosConfig{}), do: true
@impl FarmbotCore.AssetWorker
def start_link(%FbosConfig{} = fbos_config, _args) do
GenServer.start_link(__MODULE__, %FbosConfig{} = fbos_config)
end
@impl GenServer
def init(%FbosConfig{} = fbos_config) do
{:ok, %{ fbos_config: fbos_config }}
end
@impl GenServer
def handle_info(message, state) do
Logger.debug("!!!UNKNOWN FBOS Config Worker Message: #{inspect(message)}")
{:noreply, state}
end
@impl GenServer
def handle_cast({:new_data, new_fbos_config}, %{fbos_config: %FbosConfig{} = old_fbos_config} = state) do
_ = set_config_to_state(new_fbos_config, old_fbos_config)
{:noreply, %{state | fbos_config: new_fbos_config}}
end
def set_config_to_state(new_fbos_config, old_fbos_config) do
interesting_params = [
:network_not_found_timer,
:os_auto_update,
:sequence_body_log,
:sequence_complete_log,
:sequence_init_log
]
new_interesting_fbos_config = Map.take(new_fbos_config, interesting_params) |> MapSet.new()
old_interesting_fbos_config = Map.take(old_fbos_config, interesting_params) |> MapSet.new()
difference = MapSet.difference(new_interesting_fbos_config, old_interesting_fbos_config)
Enum.each(difference, fn
{:os_auto_update, bool} ->
FarmbotCore.Logger.success 1, "Set OS auto update to #{bool}"
{:network_not_found_timer, minutes} ->
FarmbotCore.Logger.success 1, "Set connection attempt period to #{minutes} minutes"
{:sequence_body_log, bool} ->
FarmbotCore.Logger.success 1, "Set sequence step log messages to #{bool}"
{:sequence_complete_log, bool} ->
FarmbotCore.Logger.success 1, "Set sequence complete log messages to #{bool}"
{:sequence_init_log, bool} ->
FarmbotCore.Logger.success 1, "Set sequence init log messages to #{bool}"
{param, value} ->
FarmbotCore.Logger.success 1, "Set #{param} to #{value}"
end)
set_config_to_state(new_fbos_config)
end
def set_config_to_state(fbos_config) do
# firmware_hardware is set by FarmbotCore.Firmware.SideEffects
:ok = BotState.set_config_value(:network_not_found_timer, fbos_config.network_not_found_timer)
:ok = BotState.set_config_value(:os_auto_update, fbos_config.os_auto_update)
# CeleryScript
:ok = BotState.set_config_value(:sequence_body_log, fbos_config.sequence_body_log)
:ok = BotState.set_config_value(:sequence_complete_log, fbos_config.sequence_complete_log)
:ok = BotState.set_config_value(:sequence_init_log, fbos_config.sequence_init_log)
end
end
| 35.470588 | 107 | 0.730348 |
ff6c50b1153fb201012779b49a3f609e83c51f3c | 2,468 | exs | Elixir | config/prod.exs | mikehelmick/meme-bot | 52a84cfb3f5ddcdddadf59b0ba3976f9e3f23800 | [
"Apache-2.0"
] | 7 | 2019-04-05T06:12:56.000Z | 2021-04-03T11:39:40.000Z | config/prod.exs | mikehelmick/meme-bot | 52a84cfb3f5ddcdddadf59b0ba3976f9e3f23800 | [
"Apache-2.0"
] | null | null | null | config/prod.exs | mikehelmick/meme-bot | 52a84cfb3f5ddcdddadf59b0ba3976f9e3f23800 | [
"Apache-2.0"
] | 3 | 2019-04-20T13:05:48.000Z | 2019-06-05T16:52:46.000Z | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :chatbot, ChatbotWeb.Endpoint,
load_from_system_env: false,
http: [port: {:system, "PORT"}],
check_origin: false,
server: true,
root: ".",
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :chatbot, ChatbotWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# :inet6,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :chatbot, ChatbotWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases (distillery)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :chatbot, ChatbotWeb.Endpoint, server: true
#
# Note you can't rely on `System.get_env/1` when using releases.
# See the releases documentation accordingly.
# Finally import the config/prod.secret.exs which should be versioned
# separately.
import_config "prod.secret.exs"
| 32.906667 | 69 | 0.713533 |
ff6c5581e094136cc54198e46d81935bb0640554 | 12,774 | ex | Elixir | clients/iam/lib/google_api/iam/v1/api/roles.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/iam/lib/google_api/iam/v1/api/roles.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/iam/lib/google_api/iam/v1/api/roles.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.IAM.V1.Api.Roles do
@moduledoc """
API calls for all endpoints tagged `Roles`.
"""
alias GoogleApi.IAM.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Gets a Role definition.
## Parameters
* `connection` (*type:* `GoogleApi.IAM.V1.Connection.t`) - Connection to server
* `roles_id` (*type:* `String.t`) - Part of `name`. The `name` parameter's value depends on the target resource for the
request, namely
[`roles`](/iam/reference/rest/v1/roles),
[`projects`](/iam/reference/rest/v1/projects.roles), or
[`organizations`](/iam/reference/rest/v1/organizations.roles). Each
resource type's `name` value format is described below:
* [`roles.get()`](/iam/reference/rest/v1/roles/get): `roles/{ROLE_NAME}`.
This method returns results from all
[predefined roles](/iam/docs/understanding-roles#predefined_roles) in
Cloud IAM. Example request URL:
`https://iam.googleapis.com/v1/roles/{ROLE_NAME}`
* [`projects.roles.get()`](/iam/reference/rest/v1/projects.roles/get):
`projects/{PROJECT_ID}/roles/{CUSTOM_ROLE_ID}`. This method returns only
[custom roles](/iam/docs/understanding-custom-roles) that have been
created at the project level. Example request URL:
`https://iam.googleapis.com/v1/projects/{PROJECT_ID}/roles/{CUSTOM_ROLE_ID}`
* [`organizations.roles.get()`](/iam/reference/rest/v1/organizations.roles/get):
`organizations/{ORGANIZATION_ID}/roles/{CUSTOM_ROLE_ID}`. This method
returns only [custom roles](/iam/docs/understanding-custom-roles) that
have been created at the organization level. Example request URL:
`https://iam.googleapis.com/v1/organizations/{ORGANIZATION_ID}/roles/{CUSTOM_ROLE_ID}`
Note: Wildcard (*) values are invalid; you must specify a complete project
ID or organization ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.IAM.V1.Model.Role{}}` on success
* `{:error, info}` on failure
"""
@spec iam_roles_get(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.IAM.V1.Model.Role.t()} | {:ok, Tesla.Env.t()} | {:error, any()}
def iam_roles_get(connection, roles_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/roles/{rolesId}", %{
"rolesId" => URI.encode(roles_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.IAM.V1.Model.Role{}])
end
@doc """
Lists the Roles defined on a resource.
## Parameters
* `connection` (*type:* `GoogleApi.IAM.V1.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - Optional limit on the number of roles to include in the response.
* `:pageToken` (*type:* `String.t`) - Optional pagination token returned in an earlier ListRolesResponse.
* `:parent` (*type:* `String.t`) - The `parent` parameter's value depends on the target resource for the
request, namely
[`roles`](/iam/reference/rest/v1/roles),
[`projects`](/iam/reference/rest/v1/projects.roles), or
[`organizations`](/iam/reference/rest/v1/organizations.roles). Each
resource type's `parent` value format is described below:
* [`roles.list()`](/iam/reference/rest/v1/roles/list): An empty string.
This method doesn't require a resource; it simply returns all
[predefined roles](/iam/docs/understanding-roles#predefined_roles) in
Cloud IAM. Example request URL:
`https://iam.googleapis.com/v1/roles`
* [`projects.roles.list()`](/iam/reference/rest/v1/projects.roles/list):
`projects/{PROJECT_ID}`. This method lists all project-level
[custom roles](/iam/docs/understanding-custom-roles).
Example request URL:
`https://iam.googleapis.com/v1/projects/{PROJECT_ID}/roles`
* [`organizations.roles.list()`](/iam/reference/rest/v1/organizations.roles/list):
`organizations/{ORGANIZATION_ID}`. This method lists all
organization-level [custom roles](/iam/docs/understanding-custom-roles).
Example request URL:
`https://iam.googleapis.com/v1/organizations/{ORGANIZATION_ID}/roles`
Note: Wildcard (*) values are invalid; you must specify a complete project
ID or organization ID.
* `:showDeleted` (*type:* `boolean()`) - Include Roles that have been deleted.
* `:view` (*type:* `String.t`) - Optional view for the returned Role objects. When `FULL` is specified,
the `includedPermissions` field is returned, which includes a list of all
permissions in the role. The default value is `BASIC`, which does not
return the `includedPermissions` field.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.IAM.V1.Model.ListRolesResponse{}}` on success
* `{:error, info}` on failure
"""
@spec iam_roles_list(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.IAM.V1.Model.ListRolesResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def iam_roles_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query,
:parent => :query,
:showDeleted => :query,
:view => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/roles", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.IAM.V1.Model.ListRolesResponse{}])
end
@doc """
Queries roles that can be granted on a particular resource.
A role is grantable if it can be used as the role in a binding for a policy
for that resource.
## Parameters
* `connection` (*type:* `GoogleApi.IAM.V1.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.IAM.V1.Model.QueryGrantableRolesRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.IAM.V1.Model.QueryGrantableRolesResponse{}}` on success
* `{:error, info}` on failure
"""
@spec iam_roles_query_grantable_roles(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.IAM.V1.Model.QueryGrantableRolesResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def iam_roles_query_grantable_roles(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/roles:queryGrantableRoles", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.IAM.V1.Model.QueryGrantableRolesResponse{}])
end
end
| 48.022556 | 196 | 0.635353 |
ff6c568da4e56434b2891977890968431b4921c5 | 858 | exs | Elixir | mix.exs | infinitered/ueberauth_dwolla | 10d7a9d2657ac7774d8946c673daad2fa21bb3e6 | [
"MIT"
] | 2 | 2016-10-04T04:23:57.000Z | 2018-01-24T06:30:32.000Z | mix.exs | infinitered/ueberauth_dwolla | 10d7a9d2657ac7774d8946c673daad2fa21bb3e6 | [
"MIT"
] | 4 | 2016-10-04T04:25:34.000Z | 2016-11-01T04:24:59.000Z | mix.exs | infinitered/ueberauth_dwolla | 10d7a9d2657ac7774d8946c673daad2fa21bb3e6 | [
"MIT"
] | 5 | 2016-10-04T04:32:11.000Z | 2017-02-11T12:27:55.000Z | defmodule UeberauthDwolla.Mixfile do
use Mix.Project
def project do
[app: :ueberauth_dwolla,
version: "0.1.0",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger, :ueberauth, :oauth2]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[{:ueberauth, "~> 0.4"},
{:oauth2, "0.6.0"},
{:httpoison, "~> 0.7"},
{:ex_doc, "~> 0.1", only: :dev},
{:earmark, ">= 0.0.0", only: :dev}]
end
end
| 23.189189 | 77 | 0.583916 |
ff6c856df895a636541214d2b8712a9346ea47bc | 9,375 | exs | Elixir | test/litelist_web/controllers/business_controller_test.exs | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | 1 | 2021-01-13T15:46:49.000Z | 2021-01-13T15:46:49.000Z | test/litelist_web/controllers/business_controller_test.exs | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | 1 | 2021-01-13T16:42:24.000Z | 2021-01-13T16:42:24.000Z | test/litelist_web/controllers/business_controller_test.exs | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | null | null | null | defmodule LitelistWeb.BusinessControllerTest do
use LitelistWeb.ConnCase, async: true
import Phoenix.Controller
alias Litelist.Factory
alias Litelist.Auth.Guardian
alias LitelistWeb.Router.Helpers, as: Routes
@create_attrs %{company_name: "some company_name", contact_info: "some contact_info", description: "some description", location: "some location", slug: "some slug", title: "some title", type: "business", url: "my-cool-url"}
@update_attrs %{company_name: "some updated company_name", contact_info: "some updated contact_info", description: "some updated description", location: "some updated location", slug: "some updated slug", title: "some updated title", url: "some updated url"}
@invalid_attrs %{company_name: nil, contact_info: nil, description: nil, location: nil, slug: nil, title: nil, type: nil, url: nil}
describe "index" do
test "lists all businesss", %{conn: conn} do
conn = conn
|> get(Routes.business_path(conn, :index))
assert html_response(conn, 200)
assert view_template(conn) == "index.html"
end
end
describe "show" do
test "shows a business if the type matches", %{conn: conn} do
business = Factory.insert(:business)
conn = conn
|> get(Routes.business_path(conn, :show, business))
assert html_response(conn, 200)
assert view_template(conn) == "show.html"
end
test "redirects to index if the type does not match", %{conn: conn} do
not_a_business = Factory.insert(:job)
conn = conn
|> get(Routes.business_path(conn, :show, not_a_business))
assert redirected_to(conn) == Routes.business_path(conn, :index)
end
end
describe "new business" do
test "renders form", %{conn: conn} do
neighbor = Factory.insert(:neighbor)
conn = conn
|> login_neighbor(neighbor)
|> get(Routes.business_path(conn, :new))
assert html_response(conn, 200)
assert view_template(conn) == "new.html"
end
test "unautorized 401 redirect if not logged in", %{conn: conn} do
conn = conn
|> get(Routes.business_path(conn, :new))
assert response(conn, 401)
end
end
describe "create business" do
test "redirects to show when data is valid", %{conn: conn} do
neighbor = Factory.insert(:neighbor)
conn = conn
|> login_neighbor(neighbor)
|> post(Routes.business_path(conn, :create), post: @create_attrs)
assert %{id: id} = redirected_params(conn)
assert redirected_to(conn) == Routes.business_path(conn, :show, id)
conn = conn
|> recycle()
|> login_neighbor(neighbor)
conn = get conn, Routes.business_path(conn, :show, id)
assert html_response(conn, 200)
assert view_template(conn) == "show.html"
end
test "renders errors when data is invalid", %{conn: conn} do
neighbor = Factory.insert(:neighbor)
conn = conn
|> login_neighbor(neighbor)
|> post(Routes.business_path(conn, :create), post: @invalid_attrs)
assert html_response(conn, 200)
assert view_template(conn) == "new.html"
end
test "unautorized 401 redirect if not logged in", %{conn: conn} do
conn = conn
|> post(Routes.business_path(conn, :create), post: @create_attrs)
assert response(conn, 401)
end
# test "renders errors when url is not unique", %{conn: conn} do
# neighbor = Factory.insert(:neighbor)
# Factory.insert(:business, %{url: "my-cool-url"})
# conn = conn
# |> login_neighbor(neighbor)
# |> post(Routes.business_path(conn, :create), post: @create_attrs)
# assert html_response(conn, 200)
# assert view_template(conn) == "new.html"
# end
end
describe "edit business" do
test "renders form for editing chosen business", %{conn: conn} do
neighbor = Factory.insert(:neighbor)
business = Factory.insert(:business, %{neighbor_id: neighbor.id})
conn = conn
|> login_neighbor(neighbor)
|> get(Routes.business_path(conn, :edit, business))
assert html_response(conn, 200)
assert view_template(conn) == "edit.html"
end
test "renders form for editing chosen business as an admin", %{conn: conn} do
admin = Factory.insert(:admin)
business = Factory.insert(:business)
conn = conn
|> login_neighbor(admin)
|> get(Routes.business_path(conn, :edit, business))
assert html_response(conn, 200)
assert view_template(conn) == "edit.html"
end
test "redirects to index if business was not created by the neighbor", %{conn: conn} do
neighbor = Factory.insert(:neighbor)
not_my_business = Factory.insert(:business)
conn = conn
|> login_neighbor(neighbor)
|> get(Routes.business_path(conn, :edit, not_my_business))
assert redirected_to(conn) == Routes.business_path(conn, :index)
end
test "unautorized 401 redirect if not logged in", %{conn: conn} do
business = Factory.insert(:business)
conn = conn
|> get(Routes.business_path(conn, :edit, business))
assert response(conn, 401)
end
end
describe "update business" do
test "redirects when data is valid", %{conn: conn} do
neighbor = Factory.insert(:neighbor)
business = Factory.insert(:business, %{neighbor_id: neighbor.id})
conn = conn
|> login_neighbor(neighbor)
|> put(Routes.business_path(conn, :update, business), post: @update_attrs)
assert redirected_to(conn) == Routes.business_path(conn, :show, business)
conn = conn
|> recycle()
|> login_neighbor(neighbor)
conn = get conn, Routes.business_path(conn, :show, business)
assert html_response(conn, 200)
assert view_template(conn) == "show.html"
end
test "redirects when data is valid as an admin", %{conn: conn} do
admin = Factory.insert(:admin)
business = Factory.insert(:business)
conn = conn
|> login_neighbor(admin)
|> put(Routes.business_path(conn, :update, business), post: @update_attrs)
assert redirected_to(conn) == Routes.business_path(conn, :show, business)
conn = conn
|> recycle()
|> login_neighbor(admin)
conn = get conn, Routes.business_path(conn, :show, business)
assert html_response(conn, 200)
assert view_template(conn) == "show.html"
end
test "renders errors when data is invalid", %{conn: conn} do
neighbor = Factory.insert(:neighbor)
business = Factory.insert(:business, %{neighbor_id: neighbor.id})
conn = conn
|> login_neighbor(neighbor)
|> put(Routes.business_path(conn, :update, business), post: @invalid_attrs)
assert html_response(conn, 200)
assert view_template(conn) == "edit.html"
end
test "redirects to index if business was not created by the neighbor", %{conn: conn} do
neighbor = Factory.insert(:neighbor)
not_my_business = Factory.insert(:business)
conn = conn
|> login_neighbor(neighbor)
|> put(Routes.business_path(conn, :update, not_my_business), post: @invalid_attrs)
assert redirected_to(conn) == Routes.business_path(conn, :index)
end
test "unautorized 401 redirect if not logged in", %{conn: conn} do
business = Factory.insert(:business)
conn = conn
|> put(Routes.business_path(conn, :update, business), business: @invalid_attrs)
assert response(conn, 401)
end
end
describe "delete business" do
test "deletes chosen business", %{conn: conn} do
neighbor = Factory.insert(:neighbor)
business = Factory.insert(:business, %{neighbor_id: neighbor.id})
conn = conn
|> login_neighbor(neighbor)
|> delete(Routes.business_path(conn, :delete, business))
assert redirected_to(conn) == Routes.business_path(conn, :index)
assert_error_sent 404, fn ->
get conn, Routes.business_path(conn, :show, business)
end
end
test "deletes chosen business as an admin", %{conn: conn} do
admin = Factory.insert(:admin)
business = Factory.insert(:business)
conn = conn
|> login_neighbor(admin)
|> delete(Routes.business_path(conn, :delete, business))
assert redirected_to(conn) == Routes.business_path(conn, :index)
assert_error_sent 404, fn ->
get conn, Routes.business_path(conn, :show, business)
end
end
test "redirects to index if business was not created by the neighbor", %{conn: conn} do
neighbor = Factory.insert(:neighbor)
not_my_business = Factory.insert(:business)
conn = conn
|> login_neighbor(neighbor)
|> delete(Routes.business_path(conn, :delete, not_my_business))
assert redirected_to(conn) == Routes.business_path(conn, :index)
end
test "unautorized 401 redirect if not logged in", %{conn: conn} do
business = Factory.insert(:business)
conn = conn
|> delete(Routes.business_path(conn, :delete, business))
assert response(conn, 401)
end
end
defp login_neighbor(conn, neighbor) do
{:ok, token, _} = Guardian.encode_and_sign(neighbor, %{}, token_type: :access)
conn
|> put_req_header("authorization", "bearer: " <> token)
end
end
| 32.77972 | 260 | 0.64992 |
ff6ce01112d68735e740475095b601915aa2c875 | 3,270 | ex | Elixir | lib/wechat_pay/payment_methods/jsapi.ex | ZhengQingchen/wechat_pay | 18bf7166182bf4cf3a39ed70e88f891564fa2194 | [
"MIT"
] | null | null | null | lib/wechat_pay/payment_methods/jsapi.ex | ZhengQingchen/wechat_pay | 18bf7166182bf4cf3a39ed70e88f891564fa2194 | [
"MIT"
] | null | null | null | lib/wechat_pay/payment_methods/jsapi.ex | ZhengQingchen/wechat_pay | 18bf7166182bf4cf3a39ed70e88f891564fa2194 | [
"MIT"
] | null | null | null | defmodule WechatPay.JSAPI do
@moduledoc """
The **JSAPI** payment method.
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=7_1)
"""
alias WechatPay.Utils.NonceStr
alias WechatPay.Utils.Signature
alias WechatPay.Config
alias WechatPay.API
import WechatPay.Shared
defmacro __using__(mod) do
quote do
@behaviour WechatPay.JSAPI.Behaviour
defdelegate config, to: unquote(mod)
define_shared_behaviour(WechatPay.JSAPI.Behaviour)
@impl WechatPay.JSAPI.Behaviour
def generate_pay_request(prepay_id),
do: WechatPay.JSAPI.generate_pay_request(prepay_id, config())
end
end
@doc """
Place an order
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_1)
"""
@spec place_order(map, Config.t()) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate place_order(attrs, config), to: API
@doc """
Query the order
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_2)
"""
@spec query_order(map, Configt.t()) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate query_order(attrs, config), to: API
@doc """
Close the order
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_3)
"""
@spec close_order(map, Config.t()) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate close_order(attrs, config), to: API
@doc """
Request to refund
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_4)
"""
@spec refund(map, Config.t()) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate refund(attrs, config), to: API
@doc """
Query the refund
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_5)
"""
@spec query_refund(map, Config.t()) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate query_refund(attrs, config), to: API
@doc """
Download bill
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_6)
"""
@spec download_bill(map, Config.t()) :: {:ok, String.t()} | {:error, HTTPoison.Error.t()}
defdelegate download_bill(attrs, config), to: API
@doc """
Report
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_8)
"""
@spec report(map, Config.t()) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate report(attrs, config), to: API
@doc """
Generate pay request info, which is required for the JavaScript API
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=7_7&index=6)
"""
@spec generate_pay_request(String.t(), Config.t()) :: map
def generate_pay_request(prepay_id, config) do
%{
"appId" => config.appid,
"timeStamp" => Integer.to_string(:os.system_time()),
"nonceStr" => NonceStr.generate(),
"package" => "prepay_id=#{prepay_id}",
"signType" => "MD5"
}
|> sign(config.apikey)
end
defp sign(data, apikey) do
data
|> Map.merge(%{"paySign" => Signature.sign(data, apikey)})
end
end
| 28.938053 | 91 | 0.653517 |
ff6d014bb3815bc8c8128fc30834d573e0f694bf | 35,045 | exs | Elixir | lib/iex/test/iex/helpers_test.exs | stevedomin/elixir | df1a7d36472a92aedc97c0afe9f782678b7aa7e5 | [
"Apache-2.0"
] | null | null | null | lib/iex/test/iex/helpers_test.exs | stevedomin/elixir | df1a7d36472a92aedc97c0afe9f782678b7aa7e5 | [
"Apache-2.0"
] | null | null | null | lib/iex/test/iex/helpers_test.exs | stevedomin/elixir | df1a7d36472a92aedc97c0afe9f782678b7aa7e5 | [
"Apache-2.0"
] | 8 | 2018-02-20T18:30:53.000Z | 2019-06-18T14:23:31.000Z | Code.require_file("../test_helper.exs", __DIR__)
defmodule IEx.HelpersTest do
use IEx.Case
import IEx.Helpers
describe "whereami" do
test "is disabled by default" do
assert capture_iex("whereami()") =~ "Pry session is not currently enabled"
end
test "shows current location for custom envs" do
whereami = capture_iex("whereami()", [], env: %{__ENV__ | line: 3})
assert whereami =~ "test/iex/helpers_test.exs:3"
assert whereami =~ "3: defmodule IEx.HelpersTest do"
end
test "prints message when location is not available" do
whereami = capture_iex("whereami()", [], env: %{__ENV__ | line: 30000})
assert whereami =~ "test/iex/helpers_test.exs:30000"
assert whereami =~ "Could not extract source snippet. Location is not available."
whereami = capture_iex("whereami()", [], env: %{__ENV__ | file: "nofile", line: 1})
assert whereami =~ "nofile:1"
assert whereami =~ "Could not extract source snippet. Location is not available."
end
end
if :erlang.system_info(:otp_release) >= '20' do
describe "breakpoints" do
setup do
on_exit(fn -> IEx.Pry.remove_breaks() end)
end
test "sets up a breakpoint with capture syntax" do
assert break!(URI.decode_query() / 2) == 1
assert IEx.Pry.breaks() == [{1, URI, {:decode_query, 2}, 1}]
end
test "sets up a breakpoint with call syntax" do
assert break!(URI.decode_query(_, %{})) == 1
assert IEx.Pry.breaks() == [{1, URI, {:decode_query, 2}, 1}]
end
test "sets up a breakpoint with guards syntax" do
assert break!(URI.decode_query(_, map) when is_map(map)) == 1
assert IEx.Pry.breaks() == [{1, URI, {:decode_query, 2}, 1}]
end
test "sets up a breakpoint on the given module" do
assert break!(URI, :decode_query, 2) == 1
assert IEx.Pry.breaks() == [{1, URI, {:decode_query, 2}, 1}]
end
test "resets breaks on the given id" do
assert break!(URI, :decode_query, 2) == 1
assert reset_break(1) == :ok
assert IEx.Pry.breaks() == [{1, URI, {:decode_query, 2}, 0}]
end
test "resets breaks on the given module" do
assert break!(URI, :decode_query, 2) == 1
assert reset_break(URI, :decode_query, 2) == :ok
assert IEx.Pry.breaks() == [{1, URI, {:decode_query, 2}, 0}]
end
test "removes breaks in the given module" do
assert break!(URI.decode_query() / 2) == 1
assert remove_breaks(URI) == :ok
assert IEx.Pry.breaks() == []
end
test "removes breaks on all modules" do
assert break!(URI.decode_query() / 2) == 1
assert remove_breaks() == :ok
assert IEx.Pry.breaks() == []
end
test "errors when setting up a breakpoint with invalid guard" do
assert_raise CompileError, ~r"cannot invoke local is_whatever/1 inside guard", fn ->
break!(URI.decode_query(_, map) when is_whatever(map))
end
end
test "errors when setting up a break with no beam" do
assert_raise RuntimeError,
"could not set breakpoint, could not find .beam file for IEx.HelpersTest",
fn -> break!(__MODULE__, :setup, 1) end
end
test "errors when setting up a break for unknown function" do
assert_raise RuntimeError,
"could not set breakpoint, unknown function/macro URI.unknown/2",
fn -> break!(URI, :unknown, 2) end
end
test "errors for non-Elixir modules" do
assert_raise RuntimeError,
"could not set breakpoint, module :elixir was not written in Elixir",
fn -> break!(:elixir, :unknown, 2) end
end
test "prints table with breaks" do
break!(URI, :decode_query, 2)
assert capture_io(fn -> breaks() end) == """
ID Module.function/arity Pending stops
---- ----------------------- ---------------
1 URI.decode_query/2 1
"""
assert capture_io(fn -> URI.decode_query("foo=bar", %{}) end) != ""
assert capture_io(fn -> breaks() end) == """
ID Module.function/arity Pending stops
---- ----------------------- ---------------
1 URI.decode_query/2 0
"""
assert capture_io(fn -> URI.decode_query("foo=bar", %{}) end) == ""
assert capture_io(fn -> breaks() end) == """
ID Module.function/arity Pending stops
---- ----------------------- ---------------
1 URI.decode_query/2 0
"""
end
test "does not print table when there are no breaks" do
assert capture_io(fn -> breaks() end) == "No breakpoints set\n"
end
end
end
describe "open" do
@iex_helpers "iex/lib/iex/helpers.ex"
@elixir_erl "elixir/src/elixir.erl"
{:ok, vsn} = :application.get_key(:stdlib, :vsn)
@lists_erl "lib/stdlib-#{vsn}/src/lists.erl"
@httpc_erl "src/http_client/httpc.erl"
@editor System.get_env("ELIXIR_EDITOR")
test "opens __FILE__ and __LINE__" do
System.put_env("ELIXIR_EDITOR", "echo __LINE__:__FILE__")
assert capture_iex("open({#{inspect(__ENV__.file)}, 3})") |> maybe_trim_quotes() ==
"3:#{__ENV__.file}"
after
System.put_env("ELIXIR_EDITOR", @editor)
end
test "opens Elixir module" do
assert capture_iex("open(IEx.Helpers)") |> maybe_trim_quotes() =~ ~r/#{@iex_helpers}:1$/
end
test "opens function" do
assert capture_iex("open(h)") |> maybe_trim_quotes() =~ ~r/#{@iex_helpers}:\d+$/
end
test "opens function/arity" do
assert capture_iex("open(b/1)") |> maybe_trim_quotes() =~ ~r/#{@iex_helpers}:\d+$/
assert capture_iex("open(h/0)") |> maybe_trim_quotes() =~ ~r/#{@iex_helpers}:\d+$/
end
test "opens module.function" do
assert capture_iex("open(IEx.Helpers.b)") |> maybe_trim_quotes() =~ ~r/#{@iex_helpers}:\d+$/
assert capture_iex("open(IEx.Helpers.h)") |> maybe_trim_quotes() =~ ~r/#{@iex_helpers}:\d+$/
end
test "opens module.function/arity" do
assert capture_iex("open(IEx.Helpers.b/1)") |> maybe_trim_quotes() =~
~r/#{@iex_helpers}:\d+$/
assert capture_iex("open(IEx.Helpers.h/0)") |> maybe_trim_quotes() =~
~r/#{@iex_helpers}:\d+$/
end
test "opens Erlang module" do
assert capture_iex("open(:elixir)") |> maybe_trim_quotes() =~ ~r/#{@elixir_erl}:\d+$/
end
test "opens Erlang module.function" do
assert capture_iex("open(:elixir.start)") |> maybe_trim_quotes() =~ ~r/#{@elixir_erl}:\d+$/
end
test "opens Erlang module.function/arity" do
assert capture_iex("open(:elixir.start/2)") |> maybe_trim_quotes() =~
~r/#{@elixir_erl}:\d+$/
end
test "opens OTP lists module" do
assert capture_iex("open(:lists)") |> maybe_trim_quotes() =~ ~r/#{@lists_erl}:\d+$/
end
test "opens OTP lists module.function" do
assert capture_iex("open(:lists.reverse)") |> maybe_trim_quotes() =~ ~r/#{@lists_erl}:\d+$/
end
test "opens OTP lists module.function/arity" do
assert capture_iex("open(:lists.reverse/1)") |> maybe_trim_quotes() =~
~r/#{@lists_erl}:\d+$/
end
test "opens OTP httpc module" do
assert capture_iex("open(:httpc)") |> maybe_trim_quotes() =~ ~r/#{@httpc_erl}:\d+$/
end
test "opens OTP httpc module.function" do
assert capture_iex("open(:httpc.request)") |> maybe_trim_quotes() =~ ~r/#{@httpc_erl}:\d+$/
end
test "opens OTP httpc module.function/arity" do
assert capture_iex("open(:httpc.request/1)") |> maybe_trim_quotes() =~
~r/#{@httpc_erl}:\d+$/
end
test "errors OTP preloaded module" do
assert capture_iex("open(:init)") =~ ~r"(Could not open)|(Invalid arguments)"
end
test "errors if module is not available" do
assert capture_iex("open(:unknown)") == "Could not open: :unknown. Module is not available."
end
test "errors if module.function is not available" do
assert capture_iex("open(:unknown.unknown)") ==
"Could not open: :unknown.unknown. Module is not available."
assert capture_iex("open(:elixir.unknown)") ==
"Could not open: :elixir.unknown. Function/macro is not available."
assert capture_iex("open(:lists.unknown)") ==
"Could not open: :lists.unknown. Function/macro is not available."
assert capture_iex("open(:httpc.unknown)") ==
"Could not open: :httpc.unknown. Function/macro is not available."
end
test "errors if module.function/arity is not available" do
assert capture_iex("open(:unknown.start/10)") ==
"Could not open: :unknown.start/10. Module is not available."
assert capture_iex("open(:elixir.start/10)") ==
"Could not open: :elixir.start/10. Function/macro is not available."
assert capture_iex("open(:lists.reverse/10)") ==
"Could not open: :lists.reverse/10. Function/macro is not available."
assert capture_iex("open(:httpc.request/10)") ==
"Could not open: :httpc.request/10. Function/macro is not available."
end
test "errors if module is in-memory" do
assert capture_iex("defmodule Foo, do: nil ; open(Foo)") =~
~r"Invalid arguments for open helper:"
after
cleanup_modules([Foo])
end
test "opens the current pry location" do
assert capture_iex("open()", [], env: %{__ENV__ | line: 3}) |> maybe_trim_quotes() ==
"#{__ENV__.file}:3"
end
test "errors if prying is not available" do
assert capture_iex("open()") == "Pry session is not currently enabled"
end
test "opens given {file, line}" do
assert capture_iex("open({#{inspect(__ENV__.file)}, 3})") |> maybe_trim_quotes() ==
"#{__ENV__.file}:3"
end
test "errors when given {file, line} is not available" do
assert capture_iex("open({~s[foo], 3})") ==
"Could not open: \"foo\". File is not available."
end
defp maybe_trim_quotes(string) do
case :os.type() do
{:win32, _} -> String.replace(string, "\"", "")
_ -> string
end
end
end
describe "clear" do
test "clear the screen with ansi" do
Application.put_env(:elixir, :ansi_enabled, true)
assert capture_iex("clear()") == "\e[H\e[2J"
Application.put_env(:elixir, :ansi_enabled, false)
assert capture_iex("clear()") =~
"Cannot clear the screen because ANSI escape codes are not enabled on this shell"
after
Application.delete_env(:elixir, :ansi_enabled)
end
end
describe "runtime_info" do
test "shows vm information" do
assert "\n## System and architecture" <> _ = capture_io(fn -> runtime_info() end)
end
end
describe "h" do
test "shows help" do
assert "* IEx.Helpers\n\nWelcome to Interactive Elixir" <> _ = capture_iex("h()")
end
test "prints non-Elixir module specs" do
assert capture_io(fn -> h(:timer.nonexistent_function()) end) ==
"No documentation for :timer.nonexistent_function was found\n"
assert capture_io(fn -> h(:timer.nonexistent_function() / 1) end) ==
"No documentation for :timer.nonexistent_function/1 was found\n"
assert capture_io(fn -> h(:erlang.trace_pattern()) end) ==
"No documentation for :erlang.trace_pattern was found\n"
assert capture_io(fn -> h(:erlang.trace_pattern() / 2) end) ==
"No documentation for :erlang.trace_pattern/2 was found\n"
assert capture_io(fn -> h(:timer.sleep() / 1) end) == """
* :timer.sleep/1
@spec sleep(time) :: :ok when Time: timeout(), time: var
Documentation is not available for non-Elixir modules. Showing only specs.
"""
assert capture_io(fn -> h(:timer.send_interval()) end) == """
* :timer.send_interval/3
@spec send_interval(time, pid, message) :: {:ok, tRef} | {:error, reason}
when Time: time(),
Pid: pid() | (regName :: atom()),
Message: term(),
TRef: tref(),
Reason: term(),
time: var,
pid: var,
message: var,
tRef: var,
reason: var
Documentation is not available for non-Elixir modules. Showing only specs.
* :timer.send_interval/2
@spec send_interval(time, message) :: {:ok, tRef} | {:error, reason}
when Time: time(),
Message: term(),
TRef: tref(),
Reason: term(),
time: var,
message: var,
tRef: var,
reason: var
Documentation is not available for non-Elixir modules. Showing only specs.
"""
end
test "prints module documentation" do
assert "* IEx.Helpers\n\nWelcome to Interactive Elixir" <> _ =
capture_io(fn -> h(IEx.Helpers) end)
assert capture_io(fn -> h(:whatever) end) ==
"Could not load module :whatever, got: nofile\n"
assert capture_io(fn -> h(:lists) end) ==
"Documentation is not available for non-Elixir modules, got: :lists\n"
end
test "prints function/macro documentation" do
pwd_h = "* def pwd()\n\nPrints the current working directory.\n\n"
c_h = "* def c(files, path \\\\ :in_memory)\n\nCompiles the given files."
eq_h =
"* def ==(left, right)\n\n @spec term() == term() :: boolean()\n\nReturns `true` if the two items are equal.\n\n"
def_h =
"* defmacro def(call, expr \\\\ nil)\n\nDefines a function with the given name and body."
assert capture_io(fn -> h(IEx.Helpers.pwd() / 0) end) =~ pwd_h
assert capture_io(fn -> h(IEx.Helpers.c() / 2) end) =~ c_h
assert capture_io(fn -> h(== / 2) end) =~ eq_h
assert capture_io(fn -> h(def / 2) end) =~ def_h
assert capture_io(fn -> h(IEx.Helpers.c() / 1) end) =~ c_h
assert capture_io(fn -> h(pwd) end) =~ pwd_h
assert capture_io(fn -> h(def) end) =~ def_h
end
test "prints __info__ documentation" do
h_output_module = capture_io(fn -> h(Module.__info__()) end)
assert capture_io(fn -> h(Module.UnlikelyTo.Exist.__info__()) end) == h_output_module
assert capture_io(fn -> h(Module.UnlikelyTo.Exist.__info__() / 1) end) == h_output_module
assert capture_io(fn -> h(__info__) end) ==
"No documentation for Kernel.__info__ was found\n"
end
test "considers underscored functions without docs by default" do
content = """
defmodule Sample do
def __foo__(), do: 0
@doc "Bar doc"
def __bar__(), do: 1
end
"""
filename = "sample.ex"
with_file(filename, content, fn ->
assert c(filename, ".") == [Sample]
assert capture_io(fn -> h(Sample.__foo__()) end) ==
"No documentation for Sample.__foo__ was found\n"
assert capture_io(fn -> h(Sample.__bar__()) end) == "* def __bar__()\n\nBar doc\n"
assert capture_io(fn -> h(Sample.__foo__() / 0) end) ==
"No documentation for Sample.__foo__/0 was found\n"
assert capture_io(fn -> h(Sample.__bar__() / 0) end) == "* def __bar__()\n\nBar doc\n"
end)
after
cleanup_modules([Sample])
end
test "prints callback documentation when function docs are not available" do
behaviour = """
defmodule MyBehaviour do
@doc "Docs for MyBehaviour.first"
@callback first(integer) :: integer
@callback second(integer) :: integer
@callback second(integer, integer) :: integer
end
"""
impl = """
defmodule Impl do
@behaviour MyBehaviour
def first(0), do: 0
@doc "Docs for Impl.second/1"
def second(0), do: 0
@doc "Docs for Impl.second/2"
def second(0, 0), do: 0
end
"""
files = ["my_behaviour.ex", "impl.ex"]
with_file(files, [behaviour, impl], fn ->
assert c(files, ".") |> Enum.sort() == [Impl, MyBehaviour]
assert capture_io(fn -> h(Impl.first() / 1) end) ==
"@callback first(integer()) :: integer()\n\nDocs for MyBehaviour.first\n"
assert capture_io(fn -> h(Impl.second() / 1) end) ==
"* def second(int)\n\nDocs for Impl.second/1\n"
assert capture_io(fn -> h(Impl.second() / 2) end) ==
"* def second(int1, int2)\n\nDocs for Impl.second/2\n"
assert capture_io(fn -> h(Impl.first()) end) ==
"@callback first(integer()) :: integer()\n\nDocs for MyBehaviour.first\n"
assert capture_io(fn -> h(Impl.second()) end) ==
"* def second(int)\n\nDocs for Impl.second/1\n* def second(int1, int2)\n\nDocs for Impl.second/2\n"
assert capture_io(fn -> h(MyBehaviour.first()) end) == """
No documentation for function MyBehaviour.first was found, but there is a callback with the same name.
You can view callback documentations with the b/1 helper.\n
"""
assert capture_io(fn -> h(MyBehaviour.second() / 2) end) == """
No documentation for function MyBehaviour.second/2 was found, but there is a callback with the same name.
You can view callback documentations with the b/1 helper.\n
"""
assert capture_io(fn -> h(MyBehaviour.second() / 3) end) ==
"No documentation for MyBehaviour.second/3 was found\n"
end)
after
cleanup_modules([Impl, MyBehaviour])
end
test "prints modules compiled without docs" do
Code.compiler_options(docs: false)
content = """
defmodule Sample do
@spec foo(any()) :: any()
def foo(arg), do: arg
end
"""
filename = "sample.ex"
with_file(filename, content, fn ->
assert c(filename, ".") == [Sample]
assert capture_io(fn -> h(Sample.foo() / 1) end) == """
* Sample.foo/1
@spec foo(any()) :: any()
Module was compiled without docs. Showing only specs.
"""
end)
after
Code.compiler_options(docs: true)
cleanup_modules([Sample])
end
end
describe "b" do
test "lists all callbacks for a module" do
assert capture_io(fn -> b(Mix) end) == "No callbacks for Mix were found\n"
assert capture_io(fn -> b(NoMix) end) == "Could not load module NoMix, got: nofile\n"
assert capture_io(fn -> b(Mix.SCM) end) =~ """
@callback accepts_options(app :: atom(), opts()) :: opts() | nil
@callback checked_out?(opts()) :: boolean()
"""
end
test "lists callback with multiple clauses" do
filename = "multiple_clauses_callback.ex"
content = """
defmodule MultipleClauseCallback do
@doc "callback"
@callback test(:foo) :: integer
@callback test(:bar) :: [integer]
end
"""
with_file(filename, content, fn ->
assert c(filename, ".") == [MultipleClauseCallback]
assert capture_io(fn -> b(MultipleClauseCallback) end) =~ """
@callback test(:foo) :: integer()
@callback test(:bar) :: [integer()]
"""
end)
after
cleanup_modules([MultipleClauseCallback])
end
test "prints callback documentation" do
assert capture_io(fn -> b(Mix.Task.stop()) end) ==
"No documentation for Mix.Task.stop was found\n"
assert capture_io(fn -> b(Mix.Task.run()) end) =~
"@callback run(command_line_args :: [binary()]) :: any()\n\nA task needs to implement `run`"
assert capture_io(fn -> b(NoMix.run()) end) == "Could not load module NoMix, got: nofile\n"
assert capture_io(fn -> b(Exception.message() / 1) end) ==
"@callback message(t()) :: String.t()\n\n"
end
end
describe "t" do
test "prints when there is no type information" do
assert capture_io(fn -> t(IEx) end) == "No type information for IEx was found\n"
end
test "prints all types in module" do
# Test that it shows at least two types
assert Enum.count(capture_io(fn -> t(Enum) end) |> String.split("\n"), fn line ->
String.starts_with?(line, "@type")
end) >= 2
end
test "prints type information" do
assert "@type t() :: " <> _ = capture_io(fn -> t(Enum.t()) end)
assert capture_io(fn -> t(Enum.t()) end) == capture_io(fn -> t(Enum.t() / 0) end)
assert "@opaque t(value)\n\n@type t() :: t(term())\n\n" =
capture_io(fn -> t(MapSet.t()) end)
assert capture_io(fn -> t(URI.t()) end) == capture_io(fn -> t(URI.t() / 0) end)
end
test "prints type documentation" do
content = """
defmodule TypeSample do
@typedoc "An id with description."
@type id_with_desc :: {number, String.t}
end
"""
filename = "typesample.ex"
with_file(filename, content, fn ->
assert c(filename, ".") == [TypeSample]
assert capture_io(fn -> t(TypeSample.id_with_desc() / 0) end) == """
@type id_with_desc() :: {number(), String.t()}
An id with description.
"""
assert capture_io(fn -> t(TypeSample.id_with_desc()) end) == """
@type id_with_desc() :: {number(), String.t()}
An id with description.
"""
end)
after
cleanup_modules([TypeSample])
end
end
describe "v" do
test "returns history" do
assert "** (RuntimeError) v(0) is out of bounds" <> _ = capture_iex("v(0)")
assert "** (RuntimeError) v(1) is out of bounds" <> _ = capture_iex("v(1)")
assert "** (RuntimeError) v(-1) is out of bounds" <> _ = capture_iex("v(-1)")
assert capture_iex("1\n2\nv(2)") == "1\n2\n2"
assert capture_iex("1\n2\nv(2)") == capture_iex("1\n2\nv(-1)")
assert capture_iex("1\n2\nv(2)") == capture_iex("1\n2\nv()")
end
end
describe "flush" do
test "flushes messages" do
assert capture_io(fn ->
send(self(), :hello)
flush()
end) == ":hello\n"
end
end
describe "pwd" do
test "prints the working directory" do
File.cd!(iex_path(), fn ->
assert capture_io(fn -> pwd() end) =~ ~r"lib[\\/]iex\n$"
end)
end
end
describe "ls" do
test "lists the current directory" do
File.cd!(iex_path(), fn ->
paths =
capture_io(fn -> ls() end)
|> String.split()
|> Enum.map(&String.trim/1)
assert "ebin" in paths
assert "mix.exs" in paths
end)
end
test "lists the given directory" do
assert capture_io(fn -> ls("~") end) == capture_io(fn -> ls(System.user_home()) end)
end
end
describe "exports" do
test "prints module exports" do
exports = capture_io(fn -> exports(IEx.Autocomplete) end)
assert exports == "expand/1 expand/2 exports/1 \n"
end
end
describe "import_file" do
test "imports a file" do
with_file("dot-iex", "variable = :hello\nimport IO", fn ->
capture_io(:stderr, fn ->
assert "** (CompileError) iex:1: undefined function variable/0" <> _ =
capture_iex("variable")
end)
assert "** (CompileError) iex:1: undefined function puts/1" <> _ =
capture_iex("puts \"hi\"")
assert capture_iex("import_file \"dot-iex\"\nvariable\nputs \"hi\"") ==
"IO\n:hello\nhi\n:ok"
end)
end
test "imports a file that imports another file" do
dot = "parent = true\nimport_file \"dot-iex-1\""
dot_1 = "variable = :hello\nimport IO"
with_file(["dot-iex", "dot-iex-1"], [dot, dot_1], fn ->
capture_io(:stderr, fn ->
assert "** (CompileError) iex:1: undefined function parent/0" <> _ =
capture_iex("parent")
end)
assert "** (CompileError) iex:1: undefined function puts/1" <> _ =
capture_iex("puts \"hi\"")
assert capture_iex("import_file \"dot-iex\"\nvariable\nputs \"hi\"\nparent") ==
"IO\n:hello\nhi\n:ok\ntrue"
end)
end
test "raises if file is missing" do
failing = capture_iex("import_file \"nonexistent\"")
assert "** (File.Error) could not read file" <> _ = failing
assert failing =~ "no such file or directory"
end
test "does not raise if file is missing and using import_file_if_available" do
assert "nil" == capture_iex("import_file_if_available \"nonexistent\"")
end
end
describe "import_if_available" do
test "imports a module only if available" do
assert "nil" == capture_iex("import_if_available NoSuchModule")
assert "[1, 2, 3]" == capture_iex("import_if_available Integer; digits 123")
assert "[1, 2, 3]" ==
capture_iex("import_if_available Integer, only: [digits: 1]; digits 123")
end
end
describe "c" do
test "compiles a file" do
assert_raise UndefinedFunctionError, ~r"function Sample\.run/0 is undefined", fn ->
Sample.run()
end
filename = "sample.ex"
with_file(filename, test_module_code(), fn ->
assert c(Path.expand(filename)) == [Sample]
refute File.exists?("Elixir.Sample.beam")
assert Sample.run() == :run
end)
after
cleanup_modules([Sample])
end
test "handles errors" do
ExUnit.CaptureIO.capture_io(fn ->
with_file("sample.ex", "raise \"oops\"", fn ->
assert_raise CompileError, fn -> c("sample.ex") end
end)
end)
end
test "compiles a file with multiple modules " do
assert_raise UndefinedFunctionError, ~r"function Sample.run/0 is undefined", fn ->
Sample.run()
end
filename = "sample.ex"
with_file(filename, test_module_code() <> "\n" <> another_test_module(), fn ->
assert c(filename) |> Enum.sort() == [Sample, Sample2]
assert Sample.run() == :run
assert Sample2.hello() == :world
end)
after
cleanup_modules([Sample, Sample2])
end
test "compiles multiple modules" do
assert_raise UndefinedFunctionError, ~r"function Sample.run/0 is undefined", fn ->
Sample.run()
end
filenames = ["sample1.ex", "sample2.ex"]
with_file(filenames, [test_module_code(), another_test_module()], fn ->
assert c(filenames) |> Enum.sort() == [Sample, Sample2]
assert Sample.run() == :run
assert Sample2.hello() == :world
end)
after
cleanup_modules([Sample, Sample2])
end
test "compiles Erlang modules" do
assert_raise UndefinedFunctionError, ~r"function :sample.hello/0 is undefined", fn ->
:sample.hello()
end
filename = "sample.erl"
with_file(filename, erlang_module_code(), fn ->
assert c(filename) == [:sample]
assert :sample.hello() == :world
refute File.exists?("sample.beam")
end)
after
cleanup_modules([:sample])
end
test "skips unknown files" do
assert_raise UndefinedFunctionError, ~r"function :sample.hello/0 is undefined", fn ->
:sample.hello()
end
filenames = ["sample.erl", "not_found.ex", "sample2.ex"]
with_file(filenames, [erlang_module_code(), "", another_test_module()], fn ->
assert c(filenames) |> Enum.sort() == [Sample2, :sample]
assert :sample.hello() == :world
assert Sample2.hello() == :world
end)
after
cleanup_modules([:sample, Sample2])
end
test "compiles file in path" do
assert_raise UndefinedFunctionError, ~r"function Sample\.run/0 is undefined", fn ->
Sample.run()
end
filename = "sample.ex"
with_file(filename, test_module_code(), fn ->
assert c(filename, ".") == [Sample]
assert File.exists?("Elixir.Sample.beam")
assert Sample.run() == :run
end)
after
cleanup_modules([Sample])
end
end
describe "l" do
test "loads a given module" do
assert_raise UndefinedFunctionError, ~r"function Sample.run/0 is undefined", fn ->
Sample.run()
end
assert l(:nonexistent_module) == {:error, :nofile}
filename = "sample.ex"
with_file(filename, test_module_code(), fn ->
assert c(filename, ".") == [Sample]
assert Sample.run() == :run
File.write!(filename, "defmodule Sample do end")
elixirc(["sample.ex"])
assert l(Sample) == {:module, Sample}
message = "function Sample.run/0 is undefined or private"
assert_raise UndefinedFunctionError, message, fn ->
Sample.run()
end
end)
after
# Clean up the old version left over after l()
cleanup_modules([Sample])
end
end
describe "nl" do
test "loads a given module on the given nodes" do
assert nl(:nonexistent_module) == {:error, :nofile}
assert nl([node()], Enum) == {:ok, [{:nonode@nohost, :loaded, Enum}]}
assert nl([:nosuchnode@badhost], Enum) == {:ok, [{:nosuchnode@badhost, :badrpc, :nodedown}]}
capture_log(fn ->
assert nl([node()], :lists) == {:ok, [{:nonode@nohost, :error, :sticky_directory}]}
end)
end
end
describe "r" do
test "raises when reloading a nonexistent module" do
assert_raise ArgumentError, "could not load nor find module: :nonexistent_module", fn ->
r(:nonexistent_module)
end
end
test "reloads elixir modules" do
message = ~r"function Sample.run/0 is undefined \(module Sample is not available\)"
assert_raise UndefinedFunctionError, message, fn ->
Sample.run()
end
filename = "sample.ex"
with_file(filename, test_module_code(), fn ->
assert capture_io(:stderr, fn ->
assert c(filename, ".") == [Sample]
assert Sample.run() == :run
File.write!(filename, "defmodule Sample do end")
assert {:reloaded, Sample, [Sample]} = r(Sample)
message = "function Sample.run/0 is undefined or private"
assert_raise UndefinedFunctionError, message, fn ->
Sample.run()
end
end) =~ "redefining module Sample (current version loaded from Elixir.Sample.beam)"
end)
after
# Clean up old version produced by the r helper
cleanup_modules([Sample])
end
test "reloads Erlang modules" do
assert_raise UndefinedFunctionError, ~r"function :sample.hello/0 is undefined", fn ->
:sample.hello()
end
filename = "sample.erl"
with_file(filename, erlang_module_code(), fn ->
assert c(filename, ".") == [:sample]
assert :sample.hello() == :world
File.write!(filename, other_erlang_module_code())
assert {:reloaded, :sample, [:sample]} = r(:sample)
assert :sample.hello() == :bye
end)
after
cleanup_modules([:sample])
end
end
describe "pid" do
test "builds a pid from string" do
assert inspect(pid("0.32767.3276")) == "#PID<0.32767.3276>"
assert inspect(pid("0.5.6")) == "#PID<0.5.6>"
assert_raise ArgumentError, fn ->
pid("0.6.-6")
end
end
test "builds a pid from integers" do
assert inspect(pid(0, 32767, 3276)) == "#PID<0.32767.3276>"
assert inspect(pid(0, 5, 6)) == "#PID<0.5.6>"
assert_raise FunctionClauseError, fn ->
pid(0, 6, -6)
end
end
end
describe "i" do
test "prints information about the data type" do
assert capture_io(fn -> i(:ok) end) =~ """
Term
:ok
Data type
Atom
Reference modules
Atom\
"""
end
test "handles functions that don't display result" do
assert capture_io(fn -> i(IEx.dont_display_result()) end) =~ """
Term
:"do not show this result in output"
Data type
Atom
Description
This atom is returned by IEx when a function that should not print its
return value on screen is executed.\
"""
end
defmodule MyIExInfoModule do
defstruct []
defimpl IEx.Info do
def info(_), do: [{"A", "it's A"}, {:b, "it's :b"}, {'c', "it's 'c'"}]
end
end
test "uses the IEx.Info protocol" do
assert capture_io(fn -> i(%MyIExInfoModule{}) end) =~ """
Term
%IEx.HelpersTest.MyIExInfoModule{}
A
it's A
b
it's :b
c
it's 'c'
"""
after
cleanup_modules([MyIExInfoModule])
end
end
defp test_module_code do
"""
defmodule Sample do
def run do
:run
end
end
"""
end
defp another_test_module do
"""
defmodule Sample2 do
def hello do
:world
end
end
"""
end
defp erlang_module_code do
"""
-module(sample).
-export([hello/0]).
hello() -> world.
"""
end
defp other_erlang_module_code do
"""
-module(sample).
-export([hello/0]).
hello() -> bye.
"""
end
defp cleanup_modules(mods) do
Enum.each(mods, fn mod ->
File.rm("#{mod}.beam")
:code.purge(mod)
true = :code.delete(mod)
end)
end
defp with_file(names, codes, fun) when is_list(names) and is_list(codes) do
Enum.each(Enum.zip(names, codes), fn {name, code} ->
File.write!(name, code)
end)
try do
fun.()
after
Enum.each(names, &File.rm/1)
end
end
defp with_file(name, code, fun) do
with_file(List.wrap(name), List.wrap(code), fun)
end
defp elixirc(args) do
executable = Path.expand("../../../../bin/elixirc", __DIR__)
System.cmd("#{executable}#{executable_extension()}", args, stderr_to_stdout: true)
end
defp iex_path do
Path.expand("../..", __DIR__)
end
if match?({:win32, _}, :os.type()) do
defp executable_extension, do: ".bat"
else
defp executable_extension, do: ""
end
end
| 32.329336 | 124 | 0.569953 |
ff6d151a9dcdde079642399c1dc3debc8742f184 | 216 | ex | Elixir | lib/amqpx/codec/json/poison.ex | ulfurinn/amqpx | 55d280cac013b58eb2e1b832a23119aae1978861 | [
"MIT"
] | 2 | 2020-01-13T19:55:24.000Z | 2020-03-13T14:12:08.000Z | lib/amqpx/codec/json/poison.ex | ulfurinn/amqpx | 55d280cac013b58eb2e1b832a23119aae1978861 | [
"MIT"
] | null | null | null | lib/amqpx/codec/json/poison.ex | ulfurinn/amqpx | 55d280cac013b58eb2e1b832a23119aae1978861 | [
"MIT"
] | 3 | 2018-12-03T10:44:22.000Z | 2021-08-06T13:06:31.000Z | defmodule AMQPX.Codec.JSON.Poison do
def decode(payload), do: Poison.Parser.parse(payload)
def decode(payload, args), do: Poison.Parser.parse(payload, args)
def encode(payload), do: Poison.encode(payload)
end
| 30.857143 | 67 | 0.75463 |
ff6d1b50166d4906aef4856b2d33c66e2c8729d0 | 80 | ex | Elixir | web/queries/country.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 356 | 2016-03-16T12:37:28.000Z | 2021-12-18T03:22:39.000Z | web/queries/country.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 30 | 2016-03-16T09:19:10.000Z | 2021-01-12T08:10:52.000Z | web/queries/country.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 72 | 2016-03-16T13:32:14.000Z | 2021-03-23T11:27:43.000Z | defmodule Nectar.Query.Country do
use Nectar.Query, model: Nectar.Country
end
| 20 | 41 | 0.8 |
ff6d21005f83dfd9b7f5baa82887d36a37306693 | 874 | exs | Elixir | bench/witchcraft/semigroup/float_bench.exs | florius0/witchcraft | 6c61c3ecd5b431c52e8b60aafb05596d9182205e | [
"MIT"
] | null | null | null | bench/witchcraft/semigroup/float_bench.exs | florius0/witchcraft | 6c61c3ecd5b431c52e8b60aafb05596d9182205e | [
"MIT"
] | null | null | null | bench/witchcraft/semigroup/float_bench.exs | florius0/witchcraft | 6c61c3ecd5b431c52e8b60aafb05596d9182205e | [
"MIT"
] | null | null | null | defmodule Witchcraft.Semigroup.FloatBench do
@moduledoc false
use Benchfella
use Witchcraft.Semigroup
#########
# Setup #
#########
# ---------- #
# Data Types #
# ---------- #
@float_a 10.232132171
@float_b -45.372189
##########
# Kernel #
##########
bench("Kernel.+/2", do: @float_a + @float_b)
#############
# Semigroup #
#############
bench("append/2", do: append(@float_a, @float_b))
bench("repeat/2", do: repeat(@float_a, times: 100))
# --------- #
# Operators #
# --------- #
bench("<>/2", do: @float_a <> @float_b)
# ---------- #
# Large Data #
# ---------- #
@big_float_a 1_234_567.890
@big_float_b 9_876.6543210
bench("$$$ Kernel.+/2", do: @big_float_a + @big_float_b)
bench("$$$ append/2", do: append(@big_float_a, @big_float_b))
bench("$$$ <>/2", do: @big_float_a <> @big_float_b)
end
| 18.208333 | 63 | 0.509153 |
ff6d660a0a9844f6f2c98846518cdeb17daa05b5 | 2,844 | ex | Elixir | clients/dataflow/lib/google_api/dataflow/v1b3/model/pubsub_location.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/pubsub_location.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/dataflow/lib/google_api/dataflow/v1b3/model/pubsub_location.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dataflow.V1b3.Model.PubsubLocation do
@moduledoc """
Identifies a pubsub location to use for transferring data into or out of a streaming Dataflow job.
## Attributes
* `dropLateData` (*type:* `boolean()`, *default:* `nil`) - Indicates whether the pipeline allows late-arriving data.
* `idLabel` (*type:* `String.t`, *default:* `nil`) - If set, contains a pubsub label from which to extract record ids. If left empty, record deduplication will be strictly best effort.
* `subscription` (*type:* `String.t`, *default:* `nil`) - A pubsub subscription, in the form of "pubsub.googleapis.com/subscriptions//"
* `timestampLabel` (*type:* `String.t`, *default:* `nil`) - If set, contains a pubsub label from which to extract record timestamps. If left empty, record timestamps will be generated upon arrival.
* `topic` (*type:* `String.t`, *default:* `nil`) - A pubsub topic, in the form of "pubsub.googleapis.com/topics//"
* `trackingSubscription` (*type:* `String.t`, *default:* `nil`) - If set, specifies the pubsub subscription that will be used for tracking custom time timestamps for watermark estimation.
* `withAttributes` (*type:* `boolean()`, *default:* `nil`) - If true, then the client has requested to get pubsub attributes.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dropLateData => boolean(),
:idLabel => String.t(),
:subscription => String.t(),
:timestampLabel => String.t(),
:topic => String.t(),
:trackingSubscription => String.t(),
:withAttributes => boolean()
}
field(:dropLateData)
field(:idLabel)
field(:subscription)
field(:timestampLabel)
field(:topic)
field(:trackingSubscription)
field(:withAttributes)
end
defimpl Poison.Decoder, for: GoogleApi.Dataflow.V1b3.Model.PubsubLocation do
def decode(value, options) do
GoogleApi.Dataflow.V1b3.Model.PubsubLocation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataflow.V1b3.Model.PubsubLocation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.753846 | 201 | 0.707806 |
ff6d855e4170830c591a903df2b8845a8329b982 | 3,080 | ex | Elixir | test/registration/support/registration_test_case.ex | octowombat/commanded | 79a1965e276d3369dcf70ae65ef904d7e59f4a6a | [
"MIT"
] | 1,220 | 2017-10-31T10:56:40.000Z | 2022-03-31T17:40:19.000Z | test/registration/support/registration_test_case.ex | octowombat/commanded | 79a1965e276d3369dcf70ae65ef904d7e59f4a6a | [
"MIT"
] | 294 | 2017-11-03T10:33:41.000Z | 2022-03-24T08:36:42.000Z | test/registration/support/registration_test_case.ex | octowombat/commanded | 79a1965e276d3369dcf70ae65ef904d7e59f4a6a | [
"MIT"
] | 208 | 2017-11-03T10:56:47.000Z | 2022-03-14T05:49:38.000Z | defmodule Commanded.RegistrationTestCase do
import Commanded.SharedTestCase
define_tests do
alias Commanded.Registration.{RegisteredServer, RegisteredSupervisor}
setup do
supervisor = start_supervised!(RegisteredSupervisor)
[supervisor: supervisor]
end
describe "`start_child/3`" do
test "should return child process PID on success", %{
registry: registry,
registry_meta: registry_meta
} do
assert {:ok, pid} = RegisteredSupervisor.start_child(registry, registry_meta, "child")
assert is_pid(pid)
end
test "should return existing child process when already started", %{
registry: registry,
registry_meta: registry_meta
} do
assert {:ok, pid} = RegisteredSupervisor.start_child(registry, registry_meta, "child")
assert {:ok, ^pid} = RegisteredSupervisor.start_child(registry, registry_meta, "child")
end
end
describe "`start_link/3`" do
test "should return process PID on success", %{
registry: registry,
registry_meta: registry_meta
} do
assert {:ok, pid} = start_link(registry, registry_meta, "registered")
assert is_pid(pid)
end
test "should return existing process when already started", %{
registry: registry,
registry_meta: registry_meta
} do
assert {:ok, pid} = start_link(registry, registry_meta, "registered")
assert {:ok, ^pid} = start_link(registry, registry_meta, "registered")
end
end
describe "`whereis_name/1`" do
test "should return `:undefined` when not registered", %{
registry: registry,
registry_meta: registry_meta
} do
assert registry.whereis_name(registry_meta, "notregistered") == :undefined
end
test "should return `PID` when child registered", %{
registry: registry,
registry_meta: registry_meta
} do
assert {:ok, pid} = RegisteredSupervisor.start_child(registry, registry_meta, "child")
assert registry.whereis_name(registry_meta, "child") == pid
end
test "should return `PID` when process registered", %{
registry: registry,
registry_meta: registry_meta
} do
assert {:ok, pid} = start_link(registry, registry_meta, "registered")
assert registry.whereis_name(registry_meta, "registered") == pid
end
end
describe "`supervisor_child_spec/2`" do
test "should return a valid child_spec", %{registry: registry, registry_meta: registry_meta} do
assert registry.supervisor_child_spec(registry_meta, RegisteredSupervisor, "child") ==
%{
id: Commanded.Registration.RegisteredSupervisor,
start: {Commanded.Registration.RegisteredSupervisor, :start_link, ["child"]},
type: :supervisor
}
end
end
defp start_link(registry, registry_meta, name) do
registry.start_link(registry_meta, name, RegisteredServer, [], [])
end
end
end
| 34.222222 | 101 | 0.649026 |
ff6d9a741fa750f887255ce886a428a24d6171f4 | 139 | ex | Elixir | lib/html/bit_string.ex | lpil/jot | d4d0b3852db54a7e5c201c8a68ffa7894b2f0d42 | [
"MIT"
] | 1 | 2016-08-20T14:41:55.000Z | 2016-08-20T14:41:55.000Z | lib/html/bit_string.ex | lpil/jot | d4d0b3852db54a7e5c201c8a68ffa7894b2f0d42 | [
"MIT"
] | 26 | 2016-05-29T00:29:33.000Z | 2018-04-12T13:53:50.000Z | lib/html/bit_string.ex | lpil/jot | d4d0b3852db54a7e5c201c8a68ffa7894b2f0d42 | [
"MIT"
] | null | null | null | defimpl Jot.HTML.Chars, for: BitString do
def open_fragments(string) do
[string]
end
def close_fragments(_) do
[]
end
end
| 13.9 | 41 | 0.676259 |
ff6d9b974367f97b821985bb06fa470f038e4854 | 1,042 | ex | Elixir | web/channels/user_socket.ex | dev800/verk_web | 2118e4a42a621b6004a9d51750d389cf5061ac30 | [
"MIT"
] | null | null | null | web/channels/user_socket.ex | dev800/verk_web | 2118e4a42a621b6004a9d51750d389cf5061ac30 | [
"MIT"
] | null | null | null | web/channels/user_socket.ex | dev800/verk_web | 2118e4a42a621b6004a9d51750d389cf5061ac30 | [
"MIT"
] | 1 | 2018-06-24T04:11:11.000Z | 2018-06-24T04:11:11.000Z | defmodule VerkWeb.UserSocket do
use Phoenix.Socket
## Channels
channel "rooms:*", VerkWeb.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# VerkWeb.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.647059 | 83 | 0.693858 |
ff6dd389685bffb72184725d507deaa5ef66ccff | 1,605 | ex | Elixir | lib/exnoops/api.ex | bfcarpio/exNoops-mirror | e9a08e8cfdc47da9ab3fef1cdd3bb1fd021b1cc5 | [
"MIT"
] | null | null | null | lib/exnoops/api.ex | bfcarpio/exNoops-mirror | e9a08e8cfdc47da9ab3fef1cdd3bb1fd021b1cc5 | [
"MIT"
] | null | null | null | lib/exnoops/api.ex | bfcarpio/exNoops-mirror | e9a08e8cfdc47da9ab3fef1cdd3bb1fd021b1cc5 | [
"MIT"
] | null | null | null | defmodule Exnoops.API do
@moduledoc false
require Logger
@base_url Application.fetch_env!(:exnoops, :base_url)
@http_impl Application.fetch_env!(:exnoops, :http)
defmodule Behaviour do
@moduledoc false
@callback http_get(String.t()) :: {:ok, map()}
@callback http_post(String.t(), String.t()) :: {:ok, map()}
end
@spec get(String.t(), keyword()) :: {atom(), term()}
def get(endpoint, opts) do
path = @base_url <> endpoint <> format_opts(opts)
Logger.debug("Performing GET request on #{path}")
path
|> @http_impl.http_get()
|> handle_response
end
@spec post(String.t(), keyword()) :: {atom(), term()}
def post(endpoint, body) do
path = @base_url <> endpoint
Logger.debug("Performing POST request on #{path}")
path
|> @http_impl.http_post(JSON.encode!(body))
|> handle_response
end
@spec format_opts(list()) :: String.t()
defp format_opts([]), do: ""
defp format_opts(opts) do
opts
|> Enum.map(fn
{k, [_h | _t] = list} -> "#{k}=" <> Enum.join(list, ",")
{k, v} -> "#{k}=#{v}"
end)
|> Enum.join("&")
|> (fn str -> "?" <> str end).()
end
@spec handle_response({atom(), map()}) :: tuple()
defp handle_response({:ok, %Mojito.Response{status_code: 200, body: body}}),
do: {:ok, JSON.decode!(body)}
defp handle_response({:ok, %Mojito.Response{status_code: 404}}), do: {:error, 404}
defp handle_response({:ok, %Mojito.Response{status_code: _status_code, body: body}}),
do: {:error, JSON.decode!(body)}
defp handle_response({:error, struct}), do: {:error, struct}
end
| 27.20339 | 87 | 0.611215 |
ff6dd41f0bf219649e6db263f3f2b370df363f4a | 230 | exs | Elixir | year_2020/test/day_17_test.exs | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | year_2020/test/day_17_test.exs | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | year_2020/test/day_17_test.exs | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | defmodule Day17Test do
use ExUnit.Case
test "it counts active cells" do
assert Day17.part_one(InputTestFile) == 112
end
test "it counts active hypercells" do
assert Day17.part_two(InputTestFile) == 848
end
end
| 19.166667 | 47 | 0.726087 |
ff6ddb87a62dab279413314f0837e2a9dc3ee090 | 361 | exs | Elixir | test/regressions/i070_autolink_with_parentheses_test.exs | brianbroderick/monocle | eeabecea658468479c04a02352271f6304447736 | [
"Apache-2.0"
] | 2 | 2018-02-11T01:18:24.000Z | 2020-01-12T17:19:22.000Z | test/regressions/i070_autolink_with_parentheses_test.exs | brianbroderick/monocle | eeabecea658468479c04a02352271f6304447736 | [
"Apache-2.0"
] | null | null | null | test/regressions/i070_autolink_with_parentheses_test.exs | brianbroderick/monocle | eeabecea658468479c04a02352271f6304447736 | [
"Apache-2.0"
] | null | null | null | defmodule Regressions.I070AutolinkWithParentheses do
use ExUnit.Case
test "Issue https://github.com/pragdave/earmark/issues/70" do
assert Monocle.as_html!(~s{[Wikipedia article on PATH](https://en.wikipedia.org/wiki/PATH_(variable))}) ==
~s{<p><a href="https://en.wikipedia.org/wiki/PATH_(variable)">Wikipedia article on PATH</a></p>\n}
end
end
| 36.1 | 110 | 0.728532 |
ff6e1c3745fc4257befb8c4db32f688ca419a577 | 2,040 | exs | Elixir | mix.exs | trejkaz/scenic_new | cdf815fc0b6fcadf6eecae12b6d71958e470f45d | [
"Apache-2.0"
] | null | null | null | mix.exs | trejkaz/scenic_new | cdf815fc0b6fcadf6eecae12b6d71958e470f45d | [
"Apache-2.0"
] | null | null | null | mix.exs | trejkaz/scenic_new | cdf815fc0b6fcadf6eecae12b6d71958e470f45d | [
"Apache-2.0"
] | null | null | null | defmodule ScenicNew.MixProject do
use Mix.Project
@version "0.10.4"
@github "https://github.com/boydm/scenic_new"
def project do
[
app: :scenic_new,
version: @version,
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases(),
docs: [
main: "Mix.Tasks.Scenic.New"
],
description: description(),
package: [
contributors: ["Boyd Multerer"],
maintainers: ["Boyd Multerer"],
licenses: ["Apache 2"],
links: %{github: @github},
files: [
"templates/**/*.jpg",
"templates/**/gitignore",
"templates/**/*.exs",
"templates/**/*.config",
"templates/**/*.txt",
"templates/**/*.jpg",
"templates/**/*.png",
"templates/**/*.eex",
"config",
# "test",
"mix.exs",
".formatter.exs",
".gitignore",
"LICENSE",
"README.md",
"lib/**/*.ex"
]
],
preferred_cli_env: [
coveralls: :test,
"coveralls.html": :test,
"coveralls.json": :test
],
test_coverage: [tool: ExCoveralls]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: []
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_doc, "~> 0.19", only: [:dev, :docs], runtime: false},
{:excoveralls, "~> 0.5.7", only: :test}
]
end
defp aliases do
[
build: [&build_releases/1]
]
end
defp build_releases(_) do
Mix.Tasks.Compile.run([])
Mix.Tasks.Archive.Build.run([])
Mix.Tasks.Archive.Build.run(["--output=scenic_new.ez"])
File.rename("scenic_new.ez", "./archives/scenic_new.ez")
File.rename("scenic_new-#{@version}.ez", "./archives/scenic_new-#{@version}.ez")
end
defp description() do
"""
ScenicNew - Mix task to generate a starter application
"""
end
end
| 23.448276 | 84 | 0.52549 |
ff6e3350caeed2ee316d416d973ae5ad649ee07b | 7,587 | ex | Elixir | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/product.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/product.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/product.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidEnterprise.V1.Model.Product do
@moduledoc """
A Products resource represents an app in the Google Play store that is available to at least some users in the enterprise. (Some apps are restricted to a single enterprise, and no information about them is made available outside that enterprise.)
The information provided for each product (localized name, icon, link to the full Google Play details page) is intended to allow a basic representation of the product within an EMM user interface.
## Attributes
* `appTracks` (*type:* `list(GoogleApi.AndroidEnterprise.V1.Model.TrackInfo.t)`, *default:* `nil`) - The tracks visible to the enterprise.
* `appVersion` (*type:* `list(GoogleApi.AndroidEnterprise.V1.Model.AppVersion.t)`, *default:* `nil`) - App versions currently available for this product.
* `authorName` (*type:* `String.t`, *default:* `nil`) - The name of the author of the product (for example, the app developer).
* `availableCountries` (*type:* `list(String.t)`, *default:* `nil`) - The countries which this app is available in.
* `availableTracks` (*type:* `list(String.t)`, *default:* `nil`) - Deprecated, use appTracks instead.
* `category` (*type:* `String.t`, *default:* `nil`) - The app category (e.g. RACING, SOCIAL, etc.)
* `contentRating` (*type:* `String.t`, *default:* `nil`) - The content rating for this app.
* `description` (*type:* `String.t`, *default:* `nil`) - The localized promotional description, if available.
* `detailsUrl` (*type:* `String.t`, *default:* `nil`) - A link to the (consumer) Google Play details page for the product.
* `distributionChannel` (*type:* `String.t`, *default:* `nil`) - How and to whom the package is made available. The value publicGoogleHosted means that the package is available through the Play store and not restricted to a specific enterprise. The value privateGoogleHosted means that the package is a private app (restricted to an enterprise) but hosted by Google. The value privateSelfHosted means that the package is a private app (restricted to an enterprise) and is privately hosted.
* `features` (*type:* `list(String.t)`, *default:* `nil`) - Noteworthy features (if any) of this product.
* `iconUrl` (*type:* `String.t`, *default:* `nil`) - A link to an image that can be used as an icon for the product. This image is suitable for use at up to 512px x 512px.
* `kind` (*type:* `String.t`, *default:* `androidenterprise#product`) -
* `lastUpdatedTimestampMillis` (*type:* `String.t`, *default:* `nil`) - The approximate time (within 7 days) the app was last published, expressed in milliseconds since epoch.
* `minAndroidSdkVersion` (*type:* `integer()`, *default:* `nil`) - The minimum Android SDK necessary to run the app.
* `permissions` (*type:* `list(GoogleApi.AndroidEnterprise.V1.Model.ProductPermission.t)`, *default:* `nil`) - A list of permissions required by the app.
* `productId` (*type:* `String.t`, *default:* `nil`) - A string of the form app:<package name>. For example, app:com.google.android.gm represents the Gmail app.
* `productPricing` (*type:* `String.t`, *default:* `nil`) - Whether this product is free, free with in-app purchases, or paid. If the pricing is unknown, this means the product is not generally available anymore (even though it might still be available to people who own it).
* `recentChanges` (*type:* `String.t`, *default:* `nil`) - A description of the recent changes made to the app.
* `requiresContainerApp` (*type:* `boolean()`, *default:* `nil`) - Deprecated.
* `screenshotUrls` (*type:* `list(String.t)`, *default:* `nil`) - A list of screenshot links representing the app.
* `signingCertificate` (*type:* `GoogleApi.AndroidEnterprise.V1.Model.ProductSigningCertificate.t`, *default:* `nil`) - The certificate used to sign this product.
* `smallIconUrl` (*type:* `String.t`, *default:* `nil`) - A link to a smaller image that can be used as an icon for the product. This image is suitable for use at up to 128px x 128px.
* `title` (*type:* `String.t`, *default:* `nil`) - The name of the product.
* `workDetailsUrl` (*type:* `String.t`, *default:* `nil`) - A link to the managed Google Play details page for the product, for use by an Enterprise admin.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:appTracks => list(GoogleApi.AndroidEnterprise.V1.Model.TrackInfo.t()),
:appVersion => list(GoogleApi.AndroidEnterprise.V1.Model.AppVersion.t()),
:authorName => String.t(),
:availableCountries => list(String.t()),
:availableTracks => list(String.t()),
:category => String.t(),
:contentRating => String.t(),
:description => String.t(),
:detailsUrl => String.t(),
:distributionChannel => String.t(),
:features => list(String.t()),
:iconUrl => String.t(),
:kind => String.t(),
:lastUpdatedTimestampMillis => String.t(),
:minAndroidSdkVersion => integer(),
:permissions => list(GoogleApi.AndroidEnterprise.V1.Model.ProductPermission.t()),
:productId => String.t(),
:productPricing => String.t(),
:recentChanges => String.t(),
:requiresContainerApp => boolean(),
:screenshotUrls => list(String.t()),
:signingCertificate =>
GoogleApi.AndroidEnterprise.V1.Model.ProductSigningCertificate.t(),
:smallIconUrl => String.t(),
:title => String.t(),
:workDetailsUrl => String.t()
}
field(:appTracks, as: GoogleApi.AndroidEnterprise.V1.Model.TrackInfo, type: :list)
field(:appVersion, as: GoogleApi.AndroidEnterprise.V1.Model.AppVersion, type: :list)
field(:authorName)
field(:availableCountries, type: :list)
field(:availableTracks, type: :list)
field(:category)
field(:contentRating)
field(:description)
field(:detailsUrl)
field(:distributionChannel)
field(:features, type: :list)
field(:iconUrl)
field(:kind)
field(:lastUpdatedTimestampMillis)
field(:minAndroidSdkVersion)
field(:permissions, as: GoogleApi.AndroidEnterprise.V1.Model.ProductPermission, type: :list)
field(:productId)
field(:productPricing)
field(:recentChanges)
field(:requiresContainerApp)
field(:screenshotUrls, type: :list)
field(:signingCertificate, as: GoogleApi.AndroidEnterprise.V1.Model.ProductSigningCertificate)
field(:smallIconUrl)
field(:title)
field(:workDetailsUrl)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidEnterprise.V1.Model.Product do
def decode(value, options) do
GoogleApi.AndroidEnterprise.V1.Model.Product.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidEnterprise.V1.Model.Product do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 62.188525 | 493 | 0.696454 |
ff6e61293a662a0474c996cd238e1a0e868fc3cb | 834 | exs | Elixir | priv/repo/migrations/20210212102706_create_notes_and_tags.exs | lawik/noted | a51b5d79cf44abfc2463560f83f1a8d65e6af85e | [
"BSD-3-Clause"
] | 28 | 2021-02-20T22:22:49.000Z | 2022-03-24T21:07:39.000Z | priv/repo/migrations/20210212102706_create_notes_and_tags.exs | gerhard/noted | c83bfd2e8e2950187268a2e3ba0904ae8a9773e5 | [
"BSD-3-Clause"
] | 5 | 2021-05-06T11:37:11.000Z | 2021-08-31T11:38:14.000Z | priv/repo/migrations/20210212102706_create_notes_and_tags.exs | gerhard/noted | c83bfd2e8e2950187268a2e3ba0904ae8a9773e5 | [
"BSD-3-Clause"
] | 7 | 2021-02-24T19:18:28.000Z | 2021-09-13T16:07:08.000Z | defmodule Noted.Repo.Migrations.CreateNotesAndTags do
use Ecto.Migration
def change do
create table(:tags) do
add :name, :string
add :user_id, references(:users)
timestamps()
end
create unique_index(:tags, [:name, :user_id], name: :tag_unique_by_user)
create table(:notes) do
add :title, :string
add :body, :text
timestamps()
add :user_id, references(:users)
end
create table(:notes_tags) do
add :note_id, references(:notes, on_delete: :delete_all), primary_key: true
add :tag_id, references(:tags, on_delete: :delete_all), primary_key: true
timestamps()
end
create unique_index(:notes_tags, [:note_id, :tag_id], name: :note_tag_unique)
create index(:notes_tags, [:note_id])
create index(:notes_tags, [:tag_id])
end
end
| 24.529412 | 81 | 0.661871 |
ff6e879dbcc13d198cbf21b981141b61b0770211 | 1,285 | exs | Elixir | test/apps/phoenix-app/config/dev.exs | karolsluszniak/cloudless-box | ed7189e50f7c4b4c36b682e09b117fb1ba907b0f | [
"MIT"
] | 15 | 2015-10-15T20:58:00.000Z | 2021-08-28T19:50:42.000Z | test/apps/phoenix-app/config/dev.exs | karolsluszniak/cloudless-box | ed7189e50f7c4b4c36b682e09b117fb1ba907b0f | [
"MIT"
] | 1 | 2016-08-21T15:50:34.000Z | 2016-08-21T18:24:07.000Z | test/apps/phoenix-app/config/dev.exs | karolsluszniak/cloudless-box | ed7189e50f7c4b4c36b682e09b117fb1ba907b0f | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :phoenix_app, PhoenixApp.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin"]]
# Watch static and templates for browser reloading.
config :phoenix_app, PhoenixApp.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development.
# Do not configure such in production as keeping
# and calculating stacktraces is usually expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
config :phoenix_app, PhoenixApp.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "phoenix_app_dev",
hostname: "localhost",
pool_size: 10
| 29.883721 | 74 | 0.714397 |
ff6e981a782db8e18d38661cdfa01c3a85b440b0 | 577 | ex | Elixir | lib/dragonhacks/shared_map.ex | rkalz/dragonhacks-backend | 377ed0383f403c720af30bc8ef4f3fdaf89b3e38 | [
"MIT"
] | 1 | 2019-04-03T01:48:59.000Z | 2019-04-03T01:48:59.000Z | lib/dragonhacks/shared_map.ex | rkalz/dragonhacks-backend | 377ed0383f403c720af30bc8ef4f3fdaf89b3e38 | [
"MIT"
] | null | null | null | lib/dragonhacks/shared_map.ex | rkalz/dragonhacks-backend | 377ed0383f403c720af30bc8ef4f3fdaf89b3e38 | [
"MIT"
] | 1 | 2019-03-31T21:21:47.000Z | 2019-03-31T21:21:47.000Z | defmodule Dragonhacks.SharedMap do
use GenServer
# Client
def start_link(name) do
GenServer.start_link(__MODULE__, nil, name: name)
end
def get(name, key, default) do
GenServer.call(name, {:get, key, default})
end
def put(name, key, val) do
GenServer.cast(name, {:put, key, val})
end
# Server
def init(_) do
{:ok, Map.new}
end
def handle_call({:get, key, default}, _, state) do
{:reply, Map.get(state, key, default), state}
end
def handle_cast({:put, key, val}, state) do
{:noreply, Map.put(state, key, val)}
end
end
| 19.233333 | 53 | 0.639515 |
ff6ea6767af00f8a55bc08967907aff0b2272689 | 1,636 | ex | Elixir | clients/analytics/lib/google_api/analytics/v3/model/analytics_dataimport_delete_upload_data_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/analytics/lib/google_api/analytics/v3/model/analytics_dataimport_delete_upload_data_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/analytics/lib/google_api/analytics/v3/model/analytics_dataimport_delete_upload_data_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Analytics.V3.Model.AnalyticsDataimportDeleteUploadDataRequest do
@moduledoc """
Request template for the delete upload data request.
## Attributes
- customDataImportUids ([String.t]): A list of upload UIDs. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:customDataImportUids => list(any())
}
field(:customDataImportUids, type: :list)
end
defimpl Poison.Decoder,
for: GoogleApi.Analytics.V3.Model.AnalyticsDataimportDeleteUploadDataRequest do
def decode(value, options) do
GoogleApi.Analytics.V3.Model.AnalyticsDataimportDeleteUploadDataRequest.decode(value, options)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Analytics.V3.Model.AnalyticsDataimportDeleteUploadDataRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.72 | 98 | 0.762225 |
ff6ea9649e2e710680f8675d74ba920e9c1940eb | 6,341 | ex | Elixir | lib/Discord/events.ex | DerbyWars/alchemy | 7952a0edfd39958cb9e949aff4602263c38ab494 | [
"MIT"
] | 163 | 2017-03-01T09:02:35.000Z | 2022-03-09T23:31:48.000Z | lib/Discord/events.ex | DerbyWars/alchemy | 7952a0edfd39958cb9e949aff4602263c38ab494 | [
"MIT"
] | 91 | 2017-02-23T23:23:35.000Z | 2021-12-29T23:47:44.000Z | lib/Discord/events.ex | DerbyWars/alchemy | 7952a0edfd39958cb9e949aff4602263c38ab494 | [
"MIT"
] | 38 | 2017-03-23T13:16:44.000Z | 2022-02-26T15:53:32.000Z | defmodule Alchemy.Discord.Events do
# This module contains the protocols
@moduledoc false
# for updating the cache based on the events received from discord.
# This module is then used by EventStage.Cache
alias Alchemy.{Channel, Channel.DMChannel, Guild.Emoji, Guild, Message, User, VoiceState}
alias Alchemy.Guild.{GuildMember, Presence, Role}
alias Alchemy.Cache.{Channels, Guilds, PrivChannels}
import Alchemy.Structs
# A direct message was started with the bot
# type 1 == DM
# https://discord.com/developers/docs/resources/channel#channel-object-channel-types
def handle("CHANNEL_CREATE", %{"type" => 1} = dm_channel) do
PrivChannels.add_channel(dm_channel)
struct = to_struct(dm_channel, DMChannel)
{:dm_channel_create, [struct]}
end
def handle("CHANNEL_CREATE", %{"guild_id" => guild_id} = channel) do
struct = Channel.from_map(channel)
Guilds.add_channel(guild_id, channel)
{:channel_create, [struct]}
end
def handle("CHANNEL_UPDATE", %{"id" => channel_id} = channel) do
with {:ok, guild_id} <- Channels.lookup(channel_id) do
Guilds.update_channel(guild_id, channel)
end
{:channel_update, [Channel.from_map(channel)]}
end
def handle("CHANNEL_DELETE", %{"type" => 1} = dm_channel) do
PrivChannels.remove_channel(dm_channel["id"])
{:dm_channel_delete, [to_struct(dm_channel, DMChannel)]}
end
def handle("CHANNEL_DELETE", %{"id" => channel_id} = channel) do
with {:ok, guild_id} <- Channels.lookup(channel_id) do
Guilds.remove_channel(guild_id, channel_id)
end
Channels.remove_channel(channel_id)
{:channel_delete, [Channel.from_map(channel)]}
end
# The Cache manager is tasked of notifying, if, and only if this guild is new,
# and not in the unavailable guilds loaded before
def handle("GUILD_CREATE", guild) do
Guilds.add_guild(guild)
end
def handle("GUILD_UPDATE", guild) do
guild =
Guilds.update_guild(guild)
|> Guilds.de_index()
|> Guild.from_map()
{:guild_update, [guild]}
end
# The Cache is responsible for notifications in this case
def handle("GUILD_DELETE", guild) do
Guilds.remove_guild(guild)
end
def handle("GUILD_BAN_ADD", %{"guild_id" => id} = user) do
{:guild_ban, [to_struct(user, User), id]}
end
def handle("GUILD_BAN_REMOVE", %{"guild_id" => id} = user) do
{:guild_unban, [to_struct(user, User), id]}
end
def handle("GUILD_EMOJIS_UPDATE", data) do
Guilds.update_emojis(data)
{:emoji_update, [map_struct(data["emojis"], Emoji), data["guild_id"]]}
end
def handle("GUILD_INTEGRATIONS_UPDATE", %{"guild_id" => id}) do
{:integrations_update, [id]}
end
def handle("GUILD_MEMBER_ADD", %{"guild_id" => id}) do
{:member_join, [id]}
end
def handle("GUILD_MEMBERS_CHUNK", %{"guild_id" => id, "members" => m}) do
Guilds.add_members(id, m)
{:member_chunk, [id, Enum.map(m, &GuildMember.from_map/1)]}
end
def handle("GUILD_MEMBER_REMOVE", %{"guild_id" => id, "user" => user}) do
Guilds.remove_member(id, user)
{:member_leave, [to_struct(user, User), id]}
end
def handle("GUILD_MEMBER_UPDATE", %{"guild_id" => id} = data) do
# This key would get popped implicitly later, but I'd rather do it clearly here
Guilds.update_member(id, Map.delete(data, "guild_id"))
{:member_update, [GuildMember.from_map(data), id]}
end
def handle("GUILD_ROLE_CREATE", %{"guild_id" => id, "role" => role}) do
Guilds.add_role(id, role)
{:role_create, [to_struct(role, Role), id]}
end
def handle("GUILD_ROLE_UPDATE", %{"guild_id" => id, "role" => new_role = %{"id" => role_id}}) do
guild_result = Guilds.safe_call(id, {:section, "roles"})
old_role =
with {:ok, guild} <- guild_result,
role when not is_nil(role) <- guild[role_id] do
to_struct(role, Role)
else
_ -> nil
end
Guilds.update_role(id, new_role)
new_role = to_struct(new_role, Role)
{:role_update, [old_role, new_role, id]}
end
def handle("GUILD_ROLE_DELETE", %{"guild_id" => guild_id, "role_id" => id}) do
Guilds.remove_role(guild_id, id)
{:role_delete, [id, guild_id]}
end
def handle("MESSAGE_CREATE", message) do
struct = Message.from_map(message)
{:message_create, [struct]}
end
def handle("MESSAGE_UPDATE", message) do
{:message_update, [Message.from_map(message)]}
end
def handle("MESSAGE_DELETE", %{"id" => msg_id, "channel_id" => chan_id}) do
{:message_delete, [msg_id, chan_id]}
end
def handle("MESSAGE_DELETE_BULK", %{"ids" => ids, "channel_id" => chan_id}) do
{:message_delete_bulk, [ids, chan_id]}
end
def handle("MESSAGE_REACTION_ADD", %{
"user_id" => user_id,
"channel_id" => channel_id,
"message_id" => message_id,
"emoji" => emoji
}) do
{:message_reaction_add, [user_id, channel_id, message_id, emoji]}
end
def handle("MESSAGE_REACTION_REMOVE", %{
"user_id" => user_id,
"channel_id" => channel_id,
"message_id" => message_id,
"emoji" => emoji
}) do
{:message_reaction_remove, [user_id, channel_id, message_id, emoji]}
end
def handle("MESSAGE_REACTION_REMOVE_ALL", %{
"channel_id" => channel_id,
"message_id" => message_id
}) do
{:message_reaction_remove_all, [channel_id, message_id]}
end
def handle("PRESENCE_UPDATE", %{"guild_id" => _id} = presence) do
Guilds.update_presence(presence)
{:presence_update, [Presence.from_map(presence)]}
end
def handle("PRESENCE_UPDATE", presence) do
{:presence_update, [Presence.from_map(presence)]}
end
def handle("READY", payload) do
{:ready, payload["shard"]}
end
def handle("TYPING_START", data) do
chan_id = data["channel_id"]
user_id = data["user_id"]
timestamp = data["timestamp"]
{:typing_start, [user_id, chan_id, timestamp]}
end
def handle("USER_SETTINGS_UPDATE", %{"username" => name, "avatar" => avatar}) do
{:user_settings_update, [name, avatar]}
end
def handle("USER_UPDATE", user) do
{:user_update, [to_struct(user, User)]}
end
def handle("VOICE_STATE_UPDATE", voice) do
Guilds.update_voice_state(voice)
{:voice_state_update, [to_struct(voice, VoiceState)]}
end
def handle(_, _) do
{:unkown, []}
end
end
| 30.052133 | 98 | 0.664406 |
ff6eb5ad0702dde4c45bf4a941090754ca410f6b | 18,642 | ex | Elixir | lib/console_web/controllers/label_controller.ex | Oliv4945/console | fd7b85c8ff3b0702c8a524eb888104612946934c | [
"Apache-2.0"
] | null | null | null | lib/console_web/controllers/label_controller.ex | Oliv4945/console | fd7b85c8ff3b0702c8a524eb888104612946934c | [
"Apache-2.0"
] | null | null | null | lib/console_web/controllers/label_controller.ex | Oliv4945/console | fd7b85c8ff3b0702c8a524eb888104612946934c | [
"Apache-2.0"
] | null | null | null | defmodule ConsoleWeb.LabelController do
use ConsoleWeb, :controller
alias Console.Repo
alias Console.Labels
alias Console.Devices
alias Console.Labels.Label
alias Console.Alerts
alias Console.AuditActions
plug ConsoleWeb.Plug.AuthorizeAction
action_fallback(ConsoleWeb.FallbackController)
def create(conn, %{"label" => label_params} = attrs) do
current_organization = conn.assigns.current_organization
current_user = conn.assigns.current_user
label_params =
Map.merge(label_params, %{
"organization_id" => current_organization.id,
"creator" => current_user.email
})
with {:ok, %Label{} = label} <- Labels.create_label(current_organization, label_params) do
ConsoleWeb.Endpoint.broadcast("graphql:device_index_labels_bar", "graphql:device_index_labels_bar:#{current_organization.id}:label_list_update", %{})
AuditActions.create_audit_action(
current_organization.id,
current_user.email,
"label_controller_create",
label.id,
attrs
)
conn
|> put_status(:created)
|> put_resp_header("message", "Label #{label.name} added successfully")
|> render("show.json", label: label)
end
end
def update(conn, %{"id" => id, "label" => label_params} = attrs) do
current_organization = conn.assigns.current_organization
label = Labels.get_label!(current_organization, id) |> Labels.fetch_assoc([:devices])
device_ids = label.devices |> Enum.map(fn d -> d.id end)
name = label.name
with {:ok, %Label{} = label} <- Labels.update_label(label, label_params) do
ConsoleWeb.Endpoint.broadcast("graphql:label_show", "graphql:label_show:#{label.id}:label_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:resources_update", "graphql:resources_update:#{current_organization.id}:organization_resources_update", %{})
broadcast_router_update_devices(device_ids)
msg =
cond do
label.name == name -> "Label #{label.name} updated successfully"
true -> "The label #{name} was successfully updated to #{label.name}"
end
AuditActions.create_audit_action(
current_organization.id,
conn.assigns.current_user.email,
"label_controller_update",
label.id,
attrs
)
conn
|> put_resp_header("message", msg)
|> render("show.json", label: label)
end
end
def delete(conn, %{"id" => id} = attrs) do
current_organization = conn.assigns.current_organization
label = Labels.get_label!(current_organization, id) |> Labels.fetch_assoc([:devices])
device_ids = label.devices |> Enum.map(fn d -> d.id end)
with {:ok, %Label{} = label} <- Labels.delete_label(label) do
ConsoleWeb.Endpoint.broadcast("graphql:device_index_labels_bar", "graphql:device_index_labels_bar:#{current_organization.id}:label_list_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:devices_in_labels_update", "graphql:devices_in_labels_update:#{current_organization.id}:organization_devices_in_labels_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:flows_nodes_menu", "graphql:flows_nodes_menu:#{current_organization.id}:all_resources_update", %{})
Alerts.delete_alert_nodes(id, "label")
Enum.each(device_ids, fn device ->
ConsoleWeb.Endpoint.broadcast("graphql:device_show_labels_table", "graphql:device_show_labels_table:#{device}:device_update", %{})
end)
broadcast_router_update_devices(device_ids)
AuditActions.create_audit_action(
current_organization.id,
conn.assigns.current_user.email,
"label_controller_delete",
id,
attrs
)
conn
|> put_resp_header("message", "#{label.name} deleted successfully")
|> send_resp(:no_content, "")
end
end
def add_devices_to_label(conn, %{"devices" => devices, "labels" => labels, "to_label" => to_label}) do
# individual label show dropdown - add this label to a device
current_organization = conn.assigns.current_organization
destination_label = Labels.get_label!(current_organization, to_label)
if length(devices) == 0 and length(labels) == 0 do
{:error, :bad_request, "Please select a device or label"}
else
with {:ok, _devices_labels} <- Labels.add_devices_to_label(devices, labels, destination_label.id, current_organization) do
ConsoleWeb.Endpoint.broadcast("graphql:label_show", "graphql:label_show:#{destination_label.id}:label_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:label_show_table", "graphql:label_show_table:#{destination_label.id}:update_label_devices", %{})
ConsoleWeb.Endpoint.broadcast("graphql:device_index_labels_bar", "graphql:device_index_labels_bar:#{current_organization.id}:label_list_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:devices_in_labels_update", "graphql:devices_in_labels_update:#{current_organization.id}:organization_devices_in_labels_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:flows_nodes_menu", "graphql:flows_nodes_menu:#{current_organization.id}:all_resources_update", %{})
Enum.each(devices, fn device ->
ConsoleWeb.Endpoint.broadcast("graphql:device_show_labels_table", "graphql:device_show_labels_table:#{device}:device_update", %{})
end)
broadcast_router_update_devices(devices)
conn
|> put_resp_header("message", "Devices added to label successfully")
|> send_resp(:ok, "")
end
end
end
def add_devices_to_label(conn, %{"devices" => devices, "to_label" => to_label}) do
# device index dropdown - add label to selected devices
current_organization = conn.assigns.current_organization
apply_label_to_devices(devices, to_label, current_organization, conn)
end
def add_devices_to_label(conn, %{"devices" => devices, "new_label" => label_name}) do
# device index dropdown - add label to selected devices
current_organization = conn.assigns.current_organization
current_user = conn.assigns.current_user
create_label_and_apply_to_devices(devices, label_name, current_organization, current_user, conn)
end
def add_devices_to_label(conn, %{"to_label" => to_label}) do
conn.assigns.current_organization.id
|> Devices.get_devices()
|> Enum.map(fn device -> device.id end)
|> apply_label_to_devices(to_label, conn.assigns.current_organization, conn)
end
def add_devices_to_label(conn, %{"new_label" => label_name}) do
conn.assigns.current_organization.id
|> Devices.get_devices()
|> Enum.map(fn device -> device.id end)
|> create_label_and_apply_to_devices(
label_name,
conn.assigns.current_organization,
conn.assigns.current_user,
conn
)
end
def delete_devices_from_labels(conn, %{"devices" => devices, "label_id" => label_id}) do
# individual label show dropdown - remove selected devices from label
current_organization = conn.assigns.current_organization
with {_, nil} <- Labels.delete_devices_from_label(devices, label_id, current_organization) do
label = Labels.get_label!(label_id)
ConsoleWeb.Endpoint.broadcast("graphql:label_show", "graphql:label_show:#{label.id}:label_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:label_show_table", "graphql:label_show_table:#{label.id}:update_label_devices", %{})
ConsoleWeb.Endpoint.broadcast("graphql:device_index_labels_bar", "graphql:device_index_labels_bar:#{current_organization.id}:label_list_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:devices_in_labels_update", "graphql:devices_in_labels_update:#{current_organization.id}:organization_devices_in_labels_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:flows_nodes_menu", "graphql:flows_nodes_menu:#{current_organization.id}:all_resources_update", %{})
Enum.each(devices, fn device ->
ConsoleWeb.Endpoint.broadcast("graphql:device_show_labels_table", "graphql:device_show_labels_table:#{device}:device_update", %{})
end)
broadcast_router_update_devices(devices)
conn
|> put_resp_header("message", "Device(s) successfully removed from label")
|> send_resp(:no_content, "")
end
end
def delete_devices_from_labels(conn, %{"labels" => labels, "device_id" => device_id}) do
# device index table - remove label from device entry in table row
current_organization = conn.assigns.current_organization
with {_, nil} <- Labels.delete_labels_from_device(labels, device_id, current_organization) do
device = Devices.get_device!(device_id)
ConsoleWeb.Endpoint.broadcast("graphql:devices_index_table", "graphql:devices_index_table:#{current_organization.id}:device_list_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:device_index_labels_bar", "graphql:device_index_labels_bar:#{current_organization.id}:label_list_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:device_show", "graphql:device_show:#{device.id}:device_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:device_show_labels_table", "graphql:device_show_labels_table:#{device.id}:device_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:devices_in_labels_update", "graphql:devices_in_labels_update:#{current_organization.id}:organization_devices_in_labels_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:flows_update", "graphql:flows_update:#{current_organization.id}:organization_flows_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:flows_nodes_menu", "graphql:flows_nodes_menu:#{current_organization.id}:all_resources_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:device_show_labels_table", "graphql:device_show_labels_table:#{device_id}:device_update", %{})
Enum.each(labels, fn label ->
ConsoleWeb.Endpoint.broadcast("graphql:label_show_table", "graphql:label_show_table:#{label}:update_label_devices", %{})
end)
broadcast_router_update_devices([device_id])
conn
|> put_resp_header("message", "Label(s) successfully removed from device")
|> send_resp(:no_content, "")
else { :error } ->
conn
|> send_resp(400, "")
end
end
def delete_devices_from_labels(conn, %{"devices" => devices}) do
# device index dropdown - remove all labels from selected devices
current_organization = conn.assigns.current_organization
with {:ok, _} <- Labels.delete_all_labels_from_devices(devices, current_organization) do
ConsoleWeb.Endpoint.broadcast("graphql:devices_index_table", "graphql:devices_index_table:#{current_organization.id}:device_list_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:device_index_labels_bar", "graphql:device_index_labels_bar:#{current_organization.id}:label_list_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:devices_in_labels_update", "graphql:devices_in_labels_update:#{current_organization.id}:organization_devices_in_labels_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:flows_nodes_menu", "graphql:flows_nodes_menu:#{current_organization.id}:all_resources_update", %{})
Enum.each(devices, fn device ->
ConsoleWeb.Endpoint.broadcast("graphql:device_show_labels_table", "graphql:device_show_labels_table:#{device}:device_update", %{})
end)
broadcast_router_update_devices(devices)
conn
|> put_resp_header("message", "All labels successfully removed from devices")
|> send_resp(:no_content, "")
end
end
def delete_devices_from_labels(conn, %{"labels" => labels}) do
current_organization = conn.assigns.current_organization
with {:ok, labels} <- Labels.delete_all_devices_from_labels(labels, current_organization) do
ConsoleWeb.Endpoint.broadcast("graphql:device_index_labels_bar", "graphql:device_index_labels_bar:#{current_organization.id}:label_list_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:devices_in_labels_update", "graphql:devices_in_labels_update:#{current_organization.id}:organization_devices_in_labels_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:flows_nodes_menu", "graphql:flows_nodes_menu:#{current_organization.id}:all_resources_update", %{})
assoc_labels = labels |> Labels.multi_fetch_assoc([:devices])
assoc_devices = Enum.map(assoc_labels, fn l -> l.devices end) |> List.flatten() |> Enum.map(fn d -> d.id end) |> Enum.uniq()
Enum.each(assoc_devices, fn device ->
ConsoleWeb.Endpoint.broadcast("graphql:device_show_labels_table", "graphql:device_show_labels_table:#{device}:device_update", %{})
end)
broadcast_router_update_devices(assoc_devices)
conn
|> put_resp_header("message", "All devices successfully removed from labels")
|> send_resp(:no_content, "")
end
end
def delete_devices_from_labels(conn, _params) do
current_organization = conn.assigns.current_organization
Labels.delete_all_labels_from_devices_for_org(current_organization)
all_device_ids = Devices.get_devices(current_organization.id) |> Enum.map(fn d -> d.id end)
ConsoleWeb.Endpoint.broadcast("graphql:devices_index_table", "graphql:devices_index_table:#{current_organization.id}:device_list_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:flows_nodes_menu", "graphql:flows_nodes_menu:#{current_organization.id}:all_resources_update", %{})
Enum.each(all_device_ids, fn device ->
ConsoleWeb.Endpoint.broadcast("graphql:device_show_labels_table", "graphql:device_show_labels_table:#{device}:device_update", %{})
end)
broadcast_router_update_devices(all_device_ids)
conn
|> put_resp_header("message", "All devices successfully removed from labels")
|> send_resp(:no_content, "")
end
defp create_label_and_apply_to_devices(devices, label_name, organization, user, conn) do
current_organization = conn.assigns.current_organization
cond do
length(devices) == 0 -> {:error, :bad_request, "Please select a device"}
Labels.get_label_by_name(label_name, organization.id) != nil -> {:error, :bad_request, "That label already exists"}
true ->
label_changeset =
%Label{}
|> Label.changeset(%{"name" => label_name, "organization_id" => organization.id, "creator" => user.email})
result =
Ecto.Multi.new()
|> Ecto.Multi.insert(:label, label_changeset)
|> Ecto.Multi.run(:devices_labels, fn _repo, %{label: label} ->
Labels.add_devices_to_label(devices, label.id, organization)
end)
|> Repo.transaction()
with {:ok, %{devices_labels: _, label: _label }} <- result do
ConsoleWeb.Endpoint.broadcast("graphql:device_index_labels_bar", "graphql:device_index_labels_bar:#{current_organization.id}:label_list_update", %{})
device = Devices.get_device!(List.first(devices))
ConsoleWeb.Endpoint.broadcast("graphql:devices_index_table", "graphql:devices_index_table:#{current_organization.id}:device_list_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:device_show", "graphql:device_show:#{device.id}:device_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:device_show_labels_table", "graphql:device_show_labels_table:#{device.id}:device_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:devices_in_labels_update", "graphql:devices_in_labels_update:#{current_organization.id}:organization_devices_in_labels_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:flows_nodes_menu", "graphql:flows_nodes_menu:#{current_organization.id}:all_resources_update", %{})
Enum.each(devices, fn device ->
ConsoleWeb.Endpoint.broadcast("graphql:device_show_labels_table", "graphql:device_show_labels_table:#{device}:device_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:mobile_device_labels", "graphql:mobile_device_labels:#{device}:labels_update", %{})
end)
broadcast_router_update_devices(devices)
conn
|> put_resp_header("message", "Devices added to label successfully")
|> send_resp(:no_content, "")
end
end
end
defp apply_label_to_devices(devices, label_id, organization, conn) do
current_organization = conn.assigns.current_organization
destination_label = Labels.get_label!(organization, label_id)
if length(devices) == 0 do
{:error, :bad_request, "Please select a device"}
else
with {:ok, devices_labels} <- Labels.add_devices_to_label(devices, destination_label.id, organization) do
device = Devices.get_device!(List.first(devices))
ConsoleWeb.Endpoint.broadcast("graphql:label_show_table", "graphql:label_show_table:#{destination_label.id}:update_label_devices", %{})
ConsoleWeb.Endpoint.broadcast("graphql:devices_index_table", "graphql:devices_index_table:#{current_organization.id}:device_list_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:device_index_labels_bar", "graphql:device_index_labels_bar:#{current_organization.id}:label_list_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:device_show", "graphql:device_show:#{device.id}:device_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:device_show_labels_table", "graphql:device_show_labels_table:#{device.id}:device_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:devices_in_labels_update", "graphql:devices_in_labels_update:#{current_organization.id}:organization_devices_in_labels_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:flows_update", "graphql:flows_update:#{current_organization.id}:organization_flows_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:flows_nodes_menu", "graphql:flows_nodes_menu:#{current_organization.id}:all_resources_update", %{})
assoc_devices = devices_labels |> Enum.map(fn dl -> dl.device_id end)
Enum.each(assoc_devices, fn device ->
ConsoleWeb.Endpoint.broadcast("graphql:device_show_labels_table", "graphql:device_show_labels_table:#{device}:device_update", %{})
ConsoleWeb.Endpoint.broadcast("graphql:mobile_device_labels", "graphql:mobile_device_labels:#{device}:labels_update", %{})
end)
broadcast_router_update_devices(assoc_devices)
conn
|> put_resp_header("message", "Devices added to label successfully")
|> send_resp(:no_content, "")
end
end
end
defp broadcast_router_update_devices(device_ids) do
ConsoleWeb.Endpoint.broadcast("device:all", "device:all:refetch:devices", %{ "devices" => device_ids })
end
end
| 54.19186 | 181 | 0.72975 |
ff6eb727ec75d0c075783060abe051d5c719d095 | 329 | exs | Elixir | priv/repo/migrations/20191119152028_tag_ingredient_link.exs | shaddysignal/drunkard | 8365c75cd98414dfe38481956e90dda26a177bdd | [
"Unlicense"
] | 2 | 2020-07-05T21:27:33.000Z | 2021-12-12T22:56:00.000Z | priv/repo/migrations/20191119152028_tag_ingredient_link.exs | shaddysignal/drunkard | 8365c75cd98414dfe38481956e90dda26a177bdd | [
"Unlicense"
] | 1 | 2021-05-11T08:14:48.000Z | 2021-05-11T08:14:48.000Z | priv/repo/migrations/20191119152028_tag_ingredient_link.exs | shaddysignal/drunkard | 8365c75cd98414dfe38481956e90dda26a177bdd | [
"Unlicense"
] | 1 | 2020-07-05T21:27:46.000Z | 2020-07-05T21:27:46.000Z | defmodule Drunkard.Repo.Migrations.Tag2Ingredient do
use Ecto.Migration
def change do
create table(:tag_ingredient_link, primary_key: false) do
add :tag_uuid, references(:tags, column: :uuid, type: :binary_id)
add :ingredient_uuid, references(:ingredients, column: :uuid, type: :binary_id)
end
end
end
| 29.909091 | 85 | 0.732523 |
ff6ed5fff6047c48b6155893e263dc1649776269 | 6,288 | ex | Elixir | lib/exrabbit/channel.ex | velimir0xff/exrabbit | 95a4332741bb42a355a0e6963e5a48794414be43 | [
"MIT"
] | null | null | null | lib/exrabbit/channel.ex | velimir0xff/exrabbit | 95a4332741bb42a355a0e6963e5a48794414be43 | [
"MIT"
] | null | null | null | lib/exrabbit/channel.ex | velimir0xff/exrabbit | 95a4332741bb42a355a0e6963e5a48794414be43 | [
"MIT"
] | null | null | null | defmodule Exrabbit.Channel do
@moduledoc """
This module exposes some channel-level AMQP methods.
Mostly the functions that don't belong in neither `Exrabbit.Producer` nor
`Exrabbit.Consumer` are kept here.
"""
use Exrabbit.Records
@type conn :: pid
@type chan :: pid
@type await_confirms_result :: :ok | {:error, :timeout} | {:error, :nack}
@doc """
Open a new channel on an established connection.
Returns a new channel or fails.
"""
@spec open(conn) :: chan | no_return
def open(conn) when is_pid(conn) do
{:ok, chan} = :amqp_connection.open_channel(conn)
chan
end
@doc """
Close previously opened channel.
"""
@spec close(chan) :: :ok
def close(chan), do: :amqp_channel.close(chan)
@doc """
Switch the channel to confirm-mode or tx-mode.
Once set, the mode cannot be changed afterwards.
"""
@spec set_mode(chan, :confirm | :tx) :: :ok
def set_mode(chan, :confirm) do
confirm_select_ok() = :amqp_channel.call(chan, confirm_select())
:ok
end
def set_mode(chan, :tx) do
tx_select_ok() = :amqp_channel.call(chan, tx_select())
:ok
end
@doc """
Set QoS (Quality of Service) on the channel.
The second argument should be an `Exrabbit.Records.basic_qos` record.
"""
@spec set_qos(chan, Exrabbit.Records.basic_qos) :: :ok
def set_qos(chan, basic_qos()=qos) do
basic_qos_ok() = :amqp_channel.call(chan, qos)
:ok
end
@doc """
Acknowledge a message.
## Options
* `multiple: <boolean>` - when `true`, acknowledges all messages up to and
including the current one in a single request; default: `false`
"""
@spec ack(chan, binary) :: :ok
@spec ack(chan, binary, Keyword.t) :: :ok
def ack(chan, tag, opts \\ []) do
method = basic_ack(
delivery_tag: tag,
multiple: Keyword.get(opts, :multiple, false)
)
:amqp_channel.call(chan, method)
end
@doc """
Reject a message (RabbitMQ extension).
## Options
* `multiple: <boolean>` - reject all messages up to and including the
current one; default: `false`
* `requeue: <boolean>` - put rejected messages back into the queue;
default: `true`
"""
@spec nack(chan, binary) :: :ok
@spec nack(chan, binary, Keyword.t) :: :ok
def nack(chan, tag, opts \\ []) do
method = basic_nack(
delivery_tag: tag,
multiple: Keyword.get(opts, :multiple, false),
requeue: Keyword.get(opts, :requeue, true)
)
:amqp_channel.call(chan, method)
end
@doc """
Reject a message.
## Options
* `requeue: <boolean>` - put the message back into the queue; default: `true`
"""
@spec reject(chan, binary) :: :ok
@spec reject(chan, binary, Keyword.t) :: :ok
def reject(chan, tag, opts \\ []) do
method = basic_reject(
delivery_tag: tag,
requeue: Keyword.get(opts, :requeue, true)
)
:amqp_channel.call(chan, method)
end
@doc """
Await for message confirms.
Returns `:ok` or `{:error, <reason>}` where `<reason>` can be one of the
following:
* `:timeout` - the timeout has run out before reply was received
* `:nack` - at least one message hasn't been confirmed
"""
@spec await_confirms(chan) :: await_confirms_result
@spec await_confirms(chan, non_neg_integer) :: await_confirms_result
def await_confirms(chan, timeout \\ confirm_timeout) do
case :amqp_channel.wait_for_confirms(chan, timeout) do
:timeout -> {:error, :timeout}
false -> {:error, :nack}
true -> :ok
end
end
@doc """
Redeliver all currently unacknowledged messages.
## Options
* `requeue: <boolean>` - when `false` (default), the messages will be
redelivered to the original consumer; when `true`, the messages will be
put back into the queue and potentially be delivered to another consumer
of that queue
"""
@spec recover(chan) :: :ok
@spec recover(chan, Keyword.t) :: :ok
def recover(chan, options \\ []) do
basic_recover_ok() = :amqp_channel.call(chan, basic_recover(requeue: Keyword.get(options, :requeue, false)))
:ok
end
@doc """
Commit current transaction.
See http://www.rabbitmq.com/amqp-0-9-1-reference.html#tx.commit for details.
"""
@spec commit(chan) :: :ok
def commit(chan) do
tx_commit_ok() = :amqp_channel.call(chan, tx_commit())
:ok
end
@doc """
Rollback current transaction.
See http://www.rabbitmq.com/amqp-0-9-1-reference.html#tx.rollback for details.
"""
@spec rollback(chan) :: :ok
def rollback(chan) do
tx_rollback_ok() = :amqp_channel.call(chan, tx_rollback())
:ok
end
@doc """
Delete an exchange.
## Options
* `if_unused: <boolean>` - only delete the exchange if it has no queue
bindings
"""
@spec exchange_delete(chan, binary) :: :ok
@spec exchange_delete(chan, binary, Keyword.t) :: :ok
def exchange_delete(chan, name, options \\ []) when is_binary(name) do
method = exchange_delete(
exchange: name,
if_unused: Keyword.get(options, :if_unused, false),
)
exchange_delete_ok() = :amqp_channel.call(chan, method)
:ok
end
@doc """
Clear a queue.
Returns the number of messages it contained.
"""
@spec queue_purge(chan, binary) :: non_neg_integer
def queue_purge(chan, name) when is_binary(name) do
method = queue_purge(queue: name)
queue_purge_ok(message_count: cnt) = :amqp_channel.call(chan, method)
cnt
end
@doc """
Delete a queue.
Returns the number of messages it contained.
## Options
* `if_unused: <boolean>` - only delete the queue if it has no consumers
(this options doesn't seem to work in the underlying Erlang client)
* `if_empty: <boolean>` - only delete the queue if it has no messages
"""
@spec queue_delete(chan, binary) :: non_neg_integer
@spec queue_delete(chan, binary, Keyword.t) :: non_neg_integer
def queue_delete(chan, name, options \\ []) when is_binary(name) do
method = queue_delete(
queue: name,
if_unused: Keyword.get(options, :if_unused, false),
if_empty: Keyword.get(options, :if_empty, false),
)
queue_delete_ok(message_count: cnt) = :amqp_channel.call(chan, method)
cnt
end
defp confirm_timeout, do: Application.get_env(:exrabbit, :confirm_timeout, 15000)
end
| 26.309623 | 112 | 0.65633 |
ff6eeb5a3261ab35868e10d23bf869cb79a81535 | 3,366 | ex | Elixir | clients/data_catalog/lib/google_api/data_catalog/v1beta1/model/google_cloud_datacatalog_v1beta1_tag_template.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/data_catalog/lib/google_api/data_catalog/v1beta1/model/google_cloud_datacatalog_v1beta1_tag_template.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/data_catalog/lib/google_api/data_catalog/v1beta1/model/google_cloud_datacatalog_v1beta1_tag_template.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DataCatalog.V1beta1.Model.GoogleCloudDatacatalogV1beta1TagTemplate do
@moduledoc """
A tag template defines a tag, which can have one or more typed fields.
The template is used to create and attach the tag to GCP resources.
[Tag template
roles](https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles)
provide permissions to create, edit, and use the template. See, for example,
the [TagTemplate
User](https://cloud.google.com/data-catalog/docs/how-to/template-user) role,
which includes permission to use the tag template to tag resources.
## Attributes
* `displayName` (*type:* `String.t`, *default:* `nil`) - The display name for this template. Defaults to an empty string.
* `fields` (*type:* `%{optional(String.t) => GoogleApi.DataCatalog.V1beta1.Model.GoogleCloudDatacatalogV1beta1TagTemplateField.t}`, *default:* `nil`) - Required. Map of tag template field IDs to the settings for the field.
This map is an exhaustive list of the allowed fields. This map must contain
at least one field and at most 500 fields.
The keys to this map are tag template field IDs. Field IDs can contain
letters (both uppercase and lowercase), numbers (0-9) and underscores (_).
Field IDs must be at least 1 character long and at most
64 characters long. Field IDs must start with a letter or underscore.
* `name` (*type:* `String.t`, *default:* `nil`) - The resource name of the tag template in URL format. Example:
* projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}
Note that this TagTemplate and its child resources may not actually be
stored in the location in this name.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:displayName => String.t(),
:fields => %{
optional(String.t()) =>
GoogleApi.DataCatalog.V1beta1.Model.GoogleCloudDatacatalogV1beta1TagTemplateField.t()
},
:name => String.t()
}
field(:displayName)
field(:fields,
as: GoogleApi.DataCatalog.V1beta1.Model.GoogleCloudDatacatalogV1beta1TagTemplateField,
type: :map
)
field(:name)
end
defimpl Poison.Decoder,
for: GoogleApi.DataCatalog.V1beta1.Model.GoogleCloudDatacatalogV1beta1TagTemplate do
def decode(value, options) do
GoogleApi.DataCatalog.V1beta1.Model.GoogleCloudDatacatalogV1beta1TagTemplate.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DataCatalog.V1beta1.Model.GoogleCloudDatacatalogV1beta1TagTemplate do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.6 | 226 | 0.728758 |
ff6efc6cab4e9419d39f39cfdcda1a61defafcc4 | 87 | ex | Elixir | lib/hierbautberlin_web/views/user_registration_view.ex | HierBautBerlin/website | 91410e7c61c1efad438fe84bf550f87b0056c440 | [
"MIT"
] | 13 | 2021-03-06T12:16:34.000Z | 2022-03-31T09:46:35.000Z | lib/hierbautberlin_web/views/user_registration_view.ex | HierBautBerlin/website | 91410e7c61c1efad438fe84bf550f87b0056c440 | [
"MIT"
] | 148 | 2021-03-05T12:44:55.000Z | 2022-03-11T12:09:06.000Z | lib/hierbautberlin_web/views/user_registration_view.ex | HierBautBerlin/website | 91410e7c61c1efad438fe84bf550f87b0056c440 | [
"MIT"
] | 2 | 2021-06-02T14:31:21.000Z | 2022-02-14T08:36:51.000Z | defmodule HierbautberlinWeb.UserRegistrationView do
use HierbautberlinWeb, :view
end
| 21.75 | 51 | 0.862069 |
ff6f43cd40bd8091039ee99a826875e954f99566 | 702 | exs | Elixir | test/remote/show_gateway_test.exs | fanhero/spreedly-elixir | ee65326a2c7ffccd4683b1be754e4a7db5857ab3 | [
"MIT"
] | 8 | 2018-03-09T18:12:26.000Z | 2020-08-25T02:29:12.000Z | test/remote/show_gateway_test.exs | fanhero/spreedly-elixir | ee65326a2c7ffccd4683b1be754e4a7db5857ab3 | [
"MIT"
] | 17 | 2017-01-05T17:14:51.000Z | 2020-02-04T14:48:02.000Z | test/remote/show_gateway_test.exs | fanhero/spreedly-elixir | ee65326a2c7ffccd4683b1be754e4a7db5857ab3 | [
"MIT"
] | 5 | 2017-05-01T18:23:54.000Z | 2021-03-19T01:34:26.000Z | defmodule Remote.ShowGatewayTest do
use Remote.Environment.Case
test "invalid credentials" do
bogus_env = Environment.new("invalid", "credentials")
{:error, reason} = Spreedly.show_gateway(bogus_env, "SomeToken")
assert reason =~ "Unable to authenticate"
end
test "non existent gateway" do
{:error, reason} = Spreedly.show_gateway(env(), "NonExistentToken")
assert reason =~ "Unable to find the specified gateway"
end
test "show gateway" do
token = create_test_gateway().token
{:ok, gateway} = Spreedly.show_gateway(env(), token)
assert gateway.gateway_type == "test"
assert gateway.name == "Spreedly Test"
assert gateway.token == token
end
end
| 30.521739 | 71 | 0.705128 |
ff6f681c8d2c8e6f089fa061a0cf8e039ecfafa3 | 14,750 | ex | Elixir | lib/mongo/bulk_write.ex | reetou/elixir-mongodb-driver | b17b8fa337a3fdea783bbb991984f93a4848b652 | [
"Apache-2.0"
] | null | null | null | lib/mongo/bulk_write.ex | reetou/elixir-mongodb-driver | b17b8fa337a3fdea783bbb991984f93a4848b652 | [
"Apache-2.0"
] | null | null | null | lib/mongo/bulk_write.ex | reetou/elixir-mongodb-driver | b17b8fa337a3fdea783bbb991984f93a4848b652 | [
"Apache-2.0"
] | null | null | null | defmodule Mongo.BulkWrite do
@moduledoc """
The driver supports the so-called bulk writes ([Specification](https://github.com/mongodb/specifications/blob/master/source/crud/crud.rst#basic)):
The motivation for bulk writes lies in the possibility of optimizing to group the same operations. The driver supports
* unordered and ordered bulk writes
* in-memory and stream bulk writes
## Unordered bulk writes
Unordered bulk writes have the highest optimization factor. Here all operations can be divided into
three groups (inserts, updates and deletes).
The order of execution within a group does not matter. However, the groups are executed in the
order: inserts, updates and deletes. The following example creates three records, changes them, and then
deletes all records. After execution, the collection is unchanged. It's valid, because of the execution order:
1. inserts
2. updates
3. deletes
## Example:
```
alias Mongo.BulkWrite
alias Mongo.UnorderedBulk
bulk = "bulk"
|> UnorderedBulk.new()
|> UnorderedBulk.insert_one(%{name: "Greta"})
|> UnorderedBulk.insert_one(%{name: "Tom"})
|> UnorderedBulk.insert_one(%{name: "Waldo"})
|> UnorderedBulk.update_one(%{name: "Greta"}, %{"$set": %{kind: "dog"}})
|> UnorderedBulk.update_one(%{name: "Tom"}, %{"$set": %{kind: "dog"}})
|> UnorderedBulk.update_one(%{name: "Waldo"}, %{"$set": %{kind: "dog"}})
|> UnorderedBulk.delete_one(%{kind: "dog"})
|> UnorderedBulk.delete_one(%{kind: "dog"})
|> UnorderedBulk.delete_one(%{kind: "dog"})
result = BulkWrite.write(:mongo, bulk, w: 1)
```
## Ordered bulk writes
Sometimes the order of execution is important for successive operations to yield a correct result.
In this case, one uses ordered bulk writes. The following example would not work with unordered bulk writes
because the order within the update operations is undefinite. The `update_many()` will only work, if it is
executed after the `update_one()` functions.
```
bulk = "bulk"
|> OrderedBulk.new()
|> OrderedBulk.insert_one(%{name: "Greta"})
|> OrderedBulk.insert_one(%{name: "Tom"})
|> OrderedBulk.insert_one(%{name: "Waldo"})
|> OrderedBulk.update_one(%{name: "Greta"}, %{"$set": %{kind: "dog"}})
|> OrderedBulk.update_one(%{name: "Tom"}, %{"$set": %{kind: "dog"}})
|> OrderedBulk.update_one(%{name: "Waldo"}, %{"$set": %{kind: "dog"}})
|> OrderedBulk.update_many(%{kind: "dog"}, %{"$set": %{kind: "cat"}})
|> OrderedBulk.delete_one(%{kind: "cat"})
|> OrderedBulk.delete_one(%{kind: "cat"})
|> OrderedBulk.delete_one(%{kind: "cat"})
result = BulkWrite.write(:mongo, bulk, w: 1)
```
## Stream bulk writes
The examples shown initially filled the bulk with a few operations and then the bulk is written to the database.
This is all done in memory. For larger amounts of operations or imports of very long files, the main memory would
be unnecessarily burdened. It could come to some resource problems.
For such cases you could use streams. Unordered and ordered bulk writes can also be combined with Streams.
You set the maximum size of the bulk. Once the number of bulk operations has been reached,
it will be sent to the database. While streaming you can limit the memory consumption regarding the current task.
In the following example we import 1.000.000 integers into the MongoDB using the stream api:
We need to create an insert operation (`BulkOps.get_insert_one()`) for each number. Then we call the `UnorderedBulk.stream`
function to import it. This function returns a stream function which accumulate
all inserts operations until the limit `1000` is reached. In this case the operation group is written to
MongoDB.
## Example
```
1..1_000_000
|> Stream.map(fn i -> BulkOps.get_insert_one(%{number: i}) end)
|> UnorderedBulk.write(:mongo, "bulk", 1_000)
|> Stream.run()
```
## Benchmark
The following benchmark compares multiple `Mongo.insert_one()` calls with a stream using unordered bulk writes.
Both tests inserts documents into a replica set with `w: 1`.
```
Benchee.run(
%{
"inserts" => fn input ->
input
|> Enum.map(fn i -> %{number: i} end)
|> Enum.each(fn doc -> Mongo.insert_one!(top, "bulk_insert", doc) end)
end,
"streams" => fn input ->
input
|> Stream.map(fn i -> get_insert_one(%{number: i}) end)
|> Mongo.UnorderedBulk.write(top, "bulk", 1_0000)
|> Stream.run()
end,
},
inputs: %{
"Small" => Enum.to_list(1..10_000),
"Medium" => Enum.to_list(1..100_000),
"Bigger" => Enum.to_list(1..1_000_000)
}
)
```
Result:
```
##### With input Bigger #####
Name ips average deviation median 99th %
streams 0.0885 0.188 min ±0.00% 0.188 min 0.188 min
inserts 0.00777 2.14 min ±0.00% 2.14 min 2.14 min
Comparison:
streams 0.0885
inserts 0.00777 - 11.39x slower +1.96 min
##### With input Medium #####
Name ips average deviation median 99th %
streams 1.00 1.00 s ±8.98% 0.99 s 1.12 s
inserts 0.0764 13.09 s ±0.00% 13.09 s 13.09 s
Comparison:
streams 1.00
inserts 0.0764 - 13.12x slower +12.10 s
##### With input Small #####
Name ips average deviation median 99th %
streams 8.26 0.121 s ±30.46% 0.112 s 0.23 s
inserts 0.75 1.34 s ±7.15% 1.29 s 1.48 s
Comparison:
streams 8.26
inserts 0.75 - 11.07x slower +1.22 s
```
The result is, that using bulk writes is much faster (about 15x faster at all).
"""
import Keywords
import Mongo.Utils
import Mongo.WriteConcern
alias Mongo.UnorderedBulk
alias Mongo.OrderedBulk
alias Mongo.BulkWriteResult
alias Mongo.Session
@doc """
Executes unordered and ordered bulk writes.
## Unordered bulk writes
The operation are grouped (inserts, updates, deletes). The order of execution is:
1. inserts
2. updates
3. deletes
The execution order within the group is not preserved.
## Ordered bulk writes
Sequences of the same operations are grouped and sent as one command. The order is preserved.
If a group (inserts, updates or deletes) exceeds the limit `maxWriteBatchSize` it will be split into chunks.
Everything is done in memory, so this use case is limited by memory. A better approach seems to use streaming bulk writes.
"""
@spec write(GenServer.server, (UnorderedBulk.t | OrderedBulk.t), Keyword.t) :: Mongo.BulkWriteResult.t
def write(topology_pid, %UnorderedBulk{} = bulk, opts) do
with {:ok, session} <- Session.start_implicit_session(topology_pid, :write, opts),
result = one_bulk_write(topology_pid, session, bulk, opts),
:ok <- Session.end_implict_session(topology_pid, session) do
result
else
{:new_connection, _server} ->
:timer.sleep(1000)
write(topology_pid, bulk, opts)
end
end
def write(topology_pid, %OrderedBulk{coll: coll, ops: ops} = bulk, opts) do
write_concern = write_concern(opts)
empty = %BulkWriteResult{acknowledged: acknowledged?(write_concern)}
with {:ok, session} <- Session.start_implicit_session(topology_pid, :write, opts),
{:ok, limits} <- Mongo.limits(topology_pid),
max_batch_size <- limits.max_write_batch_size,
result = ops
|> get_op_sequence()
|> Enum.map(fn {cmd, docs} -> one_bulk_write_operation(session, cmd, coll, docs, max_batch_size, opts) end)
|> BulkWriteResult.reduce(empty) do
result
else
{:new_connection, _server} ->
:timer.sleep(1000)
write(topology_pid, bulk, opts)
end
end
##
# Executes one unordered bulk write. The execution order of operation groups is
#
# * inserts
# * updates
# * deletes
#
# The function returns a keyword list with the results of each operation group:
# For the details see https://github.com/mongodb/specifications/blob/master/source/crud/crud.rst#results
#
defp one_bulk_write(topology_pid, session, %UnorderedBulk{coll: coll, inserts: inserts, updates: updates, deletes: deletes}, opts) do
with {:ok, limits} <- Mongo.limits(topology_pid),
max_batch_size <- limits.max_write_batch_size,
insert_result <- one_bulk_write_operation(session, :insert, coll, inserts, max_batch_size, opts),
update_result <- one_bulk_write_operation(session, :update, coll, updates, max_batch_size, opts),
delete_result <- one_bulk_write_operation(session, :delete, coll, deletes, max_batch_size, opts) do
[insert_result, update_result, delete_result]
|> BulkWriteResult.reduce(%BulkWriteResult{acknowledged: acknowledged?(opts)})
end
end
###
# Executes the command `cmd` and collects the result.
#
defp one_bulk_write_operation(session, cmd, coll, docs, max_batch_size, opts) do
with result <- session |> run_commands(get_cmds(cmd, coll, docs, max_batch_size, opts), opts) |> collect(cmd) do
result
end
end
##
# Converts the list of operations into insert/update/delete commands
#
defp get_cmds(:insert, coll, docs, max_batch_size, opts), do: get_insert_cmds(coll, docs, max_batch_size, opts)
defp get_cmds(:update, coll, docs, max_batch_size, opts), do: get_update_cmds(coll, docs, max_batch_size, opts)
defp get_cmds(:delete, coll, docs, max_batch_size, opts), do: get_delete_cmds(coll, docs, max_batch_size, opts)
###
# Converts the list of operations into list of lists with same operations.
#
# [inserts, inserts, updates] -> [[inserts, inserts],[updates]]
#
defp get_op_sequence(ops) do
get_op_sequence(ops, [])
end
defp get_op_sequence([], acc), do: acc
defp get_op_sequence(ops, acc) do
[{kind, _doc} | _rest] = ops
{docs, rest} = find_max_sequence(kind, ops)
get_op_sequence(rest, [{kind, docs} | acc])
end
###
# Splits the sequence of operations into two parts
# 1) sequence of operations of kind `kind`
# 2) rest of operations
#
defp find_max_sequence(kind, rest) do
find_max_sequence(kind, rest, [])
end
defp find_max_sequence(_kind, [], acc) do
{acc, []}
end
defp find_max_sequence(kind, [{other, desc} | rest], acc) when kind == other do
find_max_sequence(kind, rest, [desc | acc])
end
defp find_max_sequence(_kind, rest, acc) do
{acc, rest}
end
##
# collects the returns values for each operation
#
# the update operation is more complex than insert or delete operation
#
defp collect({docs, ids}, :insert) do
docs
|> Enum.map(fn
{:ok, %{"n" => n} = doc} -> BulkWriteResult.insert_result(n, ids, doc["writeErrors"] || [])
{:ok, _other} -> BulkWriteResult.empty()
{:error, reason} -> BulkWriteResult.error(reason)
end)
|> BulkWriteResult.reduce()
end
defp collect(docs, :update) do
docs
|> Enum.map(fn
{:ok, %{"n" => n, "nModified" => modified, "upserted" => ids} = doc} -> l = length(ids)
BulkWriteResult.update_result(n - l, modified, l, filter_upsert_ids(ids), doc["writeErrors"] || [])
{:ok, %{"n" => matched, "nModified" => modified} = doc} -> BulkWriteResult.update_result(matched, modified, 0, [], doc["writeErrors"] || [])
{:ok, _other} -> BulkWriteResult.empty()
{:error, reason} -> BulkWriteResult.error(reason)
end)
|> BulkWriteResult.reduce()
end
defp collect(docs, :delete) do
docs
|> Enum.map(fn
{:ok, %{"n" => n} = doc } -> BulkWriteResult.delete_result(n, doc["writeErrors"] || [])
{:ok, _other} -> BulkWriteResult.empty()
{:error, reason} -> BulkWriteResult.error(reason)
end)
|> BulkWriteResult.reduce()
end
defp filter_upsert_ids(nil), do: []
defp filter_upsert_ids(upserted), do: Enum.map(upserted, fn doc -> doc["_id"] end)
defp run_commands(session, {cmds, ids}, opts) do
{Enum.map(cmds, fn cmd -> Mongo.exec_command_session(session, cmd, opts) end), ids}
end
defp run_commands(session, cmds, opts) do
Enum.map(cmds, fn cmd -> Mongo.exec_command_session(session, cmd, opts) end)
end
defp get_insert_cmds(coll, docs, max_batch_size, opts) do
{ids, docs} = assign_ids(docs)
cmds = docs
|> Enum.chunk_every(max_batch_size)
|> Enum.map(fn inserts -> get_insert_cmd(coll, inserts, opts) end)
{cmds, ids}
end
defp get_insert_cmd(coll, inserts, opts) do
[insert: coll,
documents: inserts,
writeConcern: write_concern(opts)] |> filter_nils()
end
defp get_delete_cmds(coll, docs, max_batch_size, opts) do
docs
|> Enum.chunk_every(max_batch_size)
|> Enum.map(fn deletes -> get_delete_cmd(coll, deletes, opts) end)
end
defp get_delete_cmd(coll, deletes, opts ) do
[delete: coll,
deletes: Enum.map(deletes, fn delete -> get_delete_doc(delete) end),
ordered: Keyword.get(opts, :ordered),
writeConcern: write_concern(opts)] |> filter_nils()
end
defp get_delete_doc({filter, opts}) do
[q: filter,
limit: Keyword.get(opts, :limit),
collation: Keyword.get(opts, :collaction)] |> filter_nils()
end
defp get_update_cmds(coll, docs, max_batch_size, opts) do
docs
|> Enum.chunk_every(max_batch_size)
|> Enum.map(fn updates -> get_update_cmd(coll, updates, opts) end)
end
defp get_update_cmd(coll, updates, opts) do
[ update: coll,
updates: Enum.map(updates, fn update -> get_update_doc(update) end),
ordered: Keyword.get(opts, :ordered),
writeConcern: write_concern(opts),
bypassDocumentValidation: Keyword.get(opts, :bypass_document_validation)
] |> filter_nils()
end
defp get_update_doc({filter, update, update_opts}) do
[ q: filter,
u: update,
upsert: Keyword.get(update_opts, :upsert),
multi: Keyword.get(update_opts, :multi) || false,
collation: Keyword.get(update_opts, :collation),
arrayFilters: Keyword.get(update_opts, :filters)
] |> filter_nils()
end
end
| 34.869976 | 177 | 0.636542 |
ff6f8158d2cbcef36267de148e4eb8cfc14c31ac | 1,476 | exs | Elixir | mix.exs | mhanberg/req | b397a0aad1e8314350b222aa04a81cc4e1e2f24b | [
"Apache-2.0"
] | null | null | null | mix.exs | mhanberg/req | b397a0aad1e8314350b222aa04a81cc4e1e2f24b | [
"Apache-2.0"
] | null | null | null | mix.exs | mhanberg/req | b397a0aad1e8314350b222aa04a81cc4e1e2f24b | [
"Apache-2.0"
] | null | null | null | defmodule Req.MixProject do
use Mix.Project
@version "0.1.1"
@source_url "https://github.com/wojtekmach/req"
def project do
[
app: :req,
version: @version,
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package(),
docs: docs(),
xref: [
exclude: [
NimbleCSV.RFC4180
]
]
]
end
def application do
[
mod: {Req.Application, []},
extra_applications: [:logger]
]
end
defp package do
[
description:
"Req is an HTTP client with a focus on ease of use and composability, built on top of Finch.",
licenses: ["Apache-2.0"],
links: %{
"GitHub" => @source_url
}
]
end
defp deps do
[
{:finch, "~> 0.6.0"},
{:mime, "~> 1.6 or ~> 2.0"},
{:jason, "~> 1.0"},
{:nimble_csv, "~> 1.0", optional: true},
{:bypass, "~> 2.1", only: :test},
{:ex_doc, ">= 0.0.0", only: :docs}
]
end
defp docs do
[
main: "Req",
source_url: @source_url,
source_ref: "v#{@version}",
groups_for_functions: [
"High-level API": &(&1[:api] == :high_level),
"Low-level API": &(&1[:api] == :low_level),
"Request steps": &(&1[:api] == :request),
"Response steps": &(&1[:api] == :response),
"Error steps": &(&1[:api] == :error)
],
extras: [
"CHANGELOG.md"
]
]
end
end
| 20.788732 | 102 | 0.484417 |
ff6f99e97eaa2e5dc68ca26f61743d4f976e6cb2 | 65 | exs | Elixir | config/config.exs | pfcs/mailchimp | 3c7c2ea201259378aafb28c66e37d93adef70d17 | [
"MIT"
] | 1 | 2020-07-17T02:52:34.000Z | 2020-07-17T02:52:34.000Z | config/config.exs | pfcs/mailchimp | 3c7c2ea201259378aafb28c66e37d93adef70d17 | [
"MIT"
] | 1 | 2020-01-09T18:59:39.000Z | 2020-01-09T18:59:39.000Z | config/config.exs | ScriptDrop/mailchimp | efe6b240c81034d5a2e8cb328fa91f049782def5 | [
"MIT"
] | 1 | 2019-09-10T10:19:10.000Z | 2019-09-10T10:19:10.000Z | use Mix.Config
config :mailchimp,
api_key: "your apikey-us12"
| 13 | 29 | 0.738462 |
ff6fa4fb4350ed056df9916afc135da8d1150736 | 393 | exs | Elixir | apps/bytepack/priv/repo/migrations/20200409054958_create_orgs_memberships.exs | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 313 | 2020-12-03T17:26:24.000Z | 2022-03-18T09:05:14.000Z | apps/bytepack/priv/repo/migrations/20200409054958_create_orgs_memberships.exs | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | null | null | null | apps/bytepack/priv/repo/migrations/20200409054958_create_orgs_memberships.exs | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 57 | 2020-12-03T17:41:53.000Z | 2022-03-17T17:28:16.000Z | defmodule Bytepack.Repo.Migrations.CreateOrgsMemberships do
use Ecto.Migration
def change do
create table(:orgs_memberships) do
add :org_id, references(:orgs, on_delete: :delete_all), null: false
add :member_id, references(:users, on_delete: :delete_all), null: false
timestamps()
end
create unique_index(:orgs_memberships, [:org_id, :member_id])
end
end
| 28.071429 | 77 | 0.722646 |
ff6fb9919473bf0848d12db606d5385986f3dd81 | 2,851 | ex | Elixir | lib/chat_api_web/views/conversation_view.ex | Blazt0/papercups | 5996b268f1d52e4463d546dcc458f9ecd0a7ffcd | [
"MIT"
] | 1 | 2021-08-02T07:59:41.000Z | 2021-08-02T07:59:41.000Z | lib/chat_api_web/views/conversation_view.ex | Blazt0/papercups | 5996b268f1d52e4463d546dcc458f9ecd0a7ffcd | [
"MIT"
] | null | null | null | lib/chat_api_web/views/conversation_view.ex | Blazt0/papercups | 5996b268f1d52e4463d546dcc458f9ecd0a7ffcd | [
"MIT"
] | null | null | null | defmodule ChatApiWeb.ConversationView do
use ChatApiWeb, :view
alias ChatApiWeb.{
ConversationView,
MentionView,
MessageView,
CustomerView,
TagView
}
def render("index.json", %{conversations: conversations, pagination: pagination}) do
%{
data: render_many(conversations, ConversationView, "expanded.json"),
next: pagination.after,
previous: pagination.before,
limit: pagination.limit,
total: pagination.total_count
}
end
def render("index.json", %{conversations: conversations}) do
%{data: render_many(conversations, ConversationView, "expanded.json")}
end
def render("create.json", %{conversation: conversation}) do
%{data: render_one(conversation, ConversationView, "basic.json")}
end
def render("update.json", %{conversation: conversation}) do
%{data: render_one(conversation, ConversationView, "basic.json")}
end
def render("show.json", %{conversation: conversation}) do
%{data: render_one(conversation, ConversationView, "expanded.json")}
end
def render("basic.json", %{conversation: conversation}) do
%{
id: conversation.id,
object: "conversation",
source: conversation.source,
created_at: conversation.inserted_at,
closed_at: conversation.closed_at,
last_activity_at: conversation.last_activity_at,
status: conversation.status,
read: conversation.read,
priority: conversation.priority,
subject: conversation.subject,
account_id: conversation.account_id,
customer_id: conversation.customer_id,
assignee_id: conversation.assignee_id,
metadata: conversation.metadata
}
end
def render("expanded.json", %{conversation: conversation}) do
%{
id: conversation.id,
object: "conversation",
source: conversation.source,
created_at: conversation.inserted_at,
closed_at: conversation.closed_at,
last_activity_at: conversation.last_activity_at,
status: conversation.status,
read: conversation.read,
priority: conversation.priority,
subject: conversation.subject,
account_id: conversation.account_id,
customer_id: conversation.customer_id,
assignee_id: conversation.assignee_id,
metadata: conversation.metadata,
customer: render_one(conversation.customer, CustomerView, "customer.json"),
messages: render_many(conversation.messages, MessageView, "expanded.json"),
tags: render_tags(conversation.tags),
mentions: render_mentions(conversation.mentions)
}
end
defp render_tags([_ | _] = tags) do
render_many(tags, TagView, "tag.json")
end
defp render_tags(_tags), do: []
defp render_mentions([_ | _] = mentions) do
render_many(mentions, MentionView, "mention.json")
end
defp render_mentions(_mentions), do: []
end
| 30.98913 | 86 | 0.703262 |
ff6fba386afa8583bf93e92b893accee09b33c6c | 1,131 | exs | Elixir | elixir/cached_fibonacci/calculator/config/config.exs | pylebecq/learning | 5ef7309c340a281badb48d3e9bd2bd261e168b75 | [
"MIT"
] | null | null | null | elixir/cached_fibonacci/calculator/config/config.exs | pylebecq/learning | 5ef7309c340a281badb48d3e9bd2bd261e168b75 | [
"MIT"
] | null | null | null | elixir/cached_fibonacci/calculator/config/config.exs | pylebecq/learning | 5ef7309c340a281badb48d3e9bd2bd261e168b75 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :calculator, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:calculator, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 36.483871 | 73 | 0.751547 |
ff6fba4617c6fb4f120ebd51e2846cabfd751274 | 631 | exs | Elixir | day03/mix.exs | bfollek/advent2018ex | d186976be414b8091677f2b9d9cab1adbf6eafe7 | [
"MIT"
] | null | null | null | day03/mix.exs | bfollek/advent2018ex | d186976be414b8091677f2b9d9cab1adbf6eafe7 | [
"MIT"
] | null | null | null | day03/mix.exs | bfollek/advent2018ex | d186976be414b8091677f2b9d9cab1adbf6eafe7 | [
"MIT"
] | null | null | null | defmodule Day03.MixProject do
use Mix.Project
def project do
[
app: :day03,
version: "0.1.0",
elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:private, git: "https://github.com/bfollek/private.git"}
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 21.033333 | 87 | 0.580032 |
ff6fd2cc3b5dbc4dfc12a67fb204244860249612 | 1,167 | ex | Elixir | lib/hello_web/channels/user_socket.ex | mentos1386/ElixirHelloWorld | 84fe10c6ef394eb1a78b69ce33d5a82a6e7c1ac2 | [
"MIT"
] | 1 | 2018-01-12T20:21:56.000Z | 2018-01-12T20:21:56.000Z | hello/lib/hello_web/channels/user_socket.ex | nash-elixir/intro-to-phoenix | 46e78713a0e8dcd895ad4f22d75b40c55ae41435 | [
"MIT"
] | null | null | null | hello/lib/hello_web/channels/user_socket.ex | nash-elixir/intro-to-phoenix | 46e78713a0e8dcd895ad4f22d75b40c55ae41435 | [
"MIT"
] | null | null | null | defmodule HelloWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", HelloWeb.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# HelloWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.710526 | 83 | 0.701799 |
ff6ff634a6e3abfbe0440687429a1324ef61303a | 357 | exs | Elixir | priv/repo/seeds.exs | ustrajunior/lv_template | 633c85d8c5810a130bbf24077845dda49e82ca3f | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | ustrajunior/lv_template | 633c85d8c5810a130bbf24077845dda49e82ca3f | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | ustrajunior/lv_template | 633c85d8c5810a130bbf24077845dda49e82ca3f | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# LvTemplate.Repo.insert!(%LvTemplate.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.75 | 61 | 0.711485 |
ff702736fce8a25428f75a4aad13c0ef1f58dad2 | 2,503 | ex | Elixir | clients/health_care/lib/google_api/health_care/v1beta1/model/google_cloud_healthcare_v1beta1_fhir_rest_import_resources_error_details.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/model/google_cloud_healthcare_v1beta1_fhir_rest_import_resources_error_details.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/model/google_cloud_healthcare_v1beta1_fhir_rest_import_resources_error_details.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1beta1.Model.GoogleCloudHealthcareV1beta1FhirRestImportResourcesErrorDetails do
@moduledoc """
Error response of importing resources.
This structure is included in the
error
details to describe the detailed error
after the operation finishes with some failure.
## Attributes
* `errorCount` (*type:* `String.t`, *default:* `nil`) - The number of resources that had errors.
* `fhirStore` (*type:* `String.t`, *default:* `nil`) - The name of the FHIR store where resources have been imported, in the
format
`projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}`.
* `inputSize` (*type:* `String.t`, *default:* `nil`) - The total number of resources included in the source data. This is the sum
of the success and error counts.
* `successCount` (*type:* `String.t`, *default:* `nil`) - The number of resources that have been imported.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:errorCount => String.t(),
:fhirStore => String.t(),
:inputSize => String.t(),
:successCount => String.t()
}
field(:errorCount)
field(:fhirStore)
field(:inputSize)
field(:successCount)
end
defimpl Poison.Decoder,
for:
GoogleApi.HealthCare.V1beta1.Model.GoogleCloudHealthcareV1beta1FhirRestImportResourcesErrorDetails do
def decode(value, options) do
GoogleApi.HealthCare.V1beta1.Model.GoogleCloudHealthcareV1beta1FhirRestImportResourcesErrorDetails.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.HealthCare.V1beta1.Model.GoogleCloudHealthcareV1beta1FhirRestImportResourcesErrorDetails do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.757143 | 133 | 0.729125 |
ff7065c144e79756767a4eca9fc8b1b5971edc88 | 955 | exs | Elixir | test/ecto/integration/streaming_test.exs | mpope9/ecto_duckdb | 9cde0c76ac35cab65bf569bbfe90c1dd4999b94c | [
"MIT"
] | 2 | 2021-12-04T08:38:19.000Z | 2021-12-04T23:52:21.000Z | test/ecto/integration/streaming_test.exs | mpope9/ecto_duckdb | 9cde0c76ac35cab65bf569bbfe90c1dd4999b94c | [
"MIT"
] | null | null | null | test/ecto/integration/streaming_test.exs | mpope9/ecto_duckdb | 9cde0c76ac35cab65bf569bbfe90c1dd4999b94c | [
"MIT"
] | null | null | null | defmodule Ecto.Integration.StreamingTest do
use Ecto.Integration.Case
alias Ecto.Integration.TestRepo
alias EctoDuckDB.Integration.User
import Ecto.Query
test "handles streams properly" do
# TODO: We really need to get proper sandboxing in place
before_count = User |> select([u], u) |> TestRepo.all() |> Enum.count()
{:ok, _} = TestRepo.insert(User.changeset(%User{}, %{name: "Bill"}))
{:ok, _} = TestRepo.insert(User.changeset(%User{}, %{name: "Shannon"}))
{:ok, _} = TestRepo.insert(User.changeset(%User{}, %{name: "Tom"}))
{:ok, _} = TestRepo.insert(User.changeset(%User{}, %{name: "Tiffany"}))
{:ok, _} = TestRepo.insert(User.changeset(%User{}, %{name: "Dave"}))
{:ok, count} =
TestRepo.transaction(fn ->
User
|> select([u], u)
|> TestRepo.stream()
|> Enum.map(fn user -> user end)
|> Enum.count()
end)
assert 5 == count - before_count
end
end
| 30.806452 | 75 | 0.608377 |
ff70c446ea668e1f9f05fb93a434c0f5a0bb234a | 4,237 | exs | Elixir | test/commcare_api/patient_case_test.exs | RatioPBC/commcare_api | 94e6056521565a8d86269920a566a06cdf376645 | [
"Apache-2.0"
] | null | null | null | test/commcare_api/patient_case_test.exs | RatioPBC/commcare_api | 94e6056521565a8d86269920a566a06cdf376645 | [
"Apache-2.0"
] | null | null | null | test/commcare_api/patient_case_test.exs | RatioPBC/commcare_api | 94e6056521565a8d86269920a566a06cdf376645 | [
"Apache-2.0"
] | null | null | null | defmodule CommcareAPI.PatientCaseTest do
use ExUnit.Case, async: true
alias CommcareAPI.PatientCase
describe "new/1" do
test "it returns what questionnaire needs" do
patient_case_json = File.read!("test/fixtures/commcare/case-with-test-results-and-contacts.json") |> Jason.decode!()
{:ok, patient_case} = PatientCase.new(patient_case_json)
assert patient_case.case_id == "00000000-8434-4475-b111-bb3a902b398b"
assert patient_case.date_tested == ~D[2020-05-13]
assert patient_case.dob == ~D[1987-05-05]
assert patient_case.domain == "ratio_pbc"
assert patient_case.full_name == "Test JME3"
assert patient_case.owner_id == "000000009299465ab175357b95b89e7c"
assert patient_case.phone_home == "5035550100"
assert match?(
%{
"00000000-c0f6-45bf-94a0-b858f59b48a7" => %{"case_id" => "00000000-c0f6-45bf-94a0-b858f59b48a7"},
"00000000-be32-49fc-ad5b-c6898afcf8aa" => %{"case_id" => "00000000-be32-49fc-ad5b-c6898afcf8aa"}
},
patient_case.child_cases
)
end
test "it return what search needs" do
patient_case_json = File.read!("test/fixtures/commcare/case-with-test-results-and-contacts.json") |> Jason.decode!()
{:ok, patient_case} = PatientCase.new(patient_case_json)
assert patient_case.case_id == "00000000-8434-4475-b111-bb3a902b398b"
assert patient_case.dob == ~D[1987-05-05]
assert patient_case.domain == "ratio_pbc"
assert patient_case.first_name == "Test"
assert patient_case.last_name == "JME3"
assert patient_case.owner_id == "000000009299465ab175357b95b89e7c"
assert patient_case.phone_home == "5035550100"
assert patient_case.street == "123 Main St"
assert patient_case.city == "Test"
assert patient_case.state == "NY"
assert patient_case.zip_code == "90210"
assert patient_case.case_type == "patient"
end
test "it return interviewee parent name info" do
patient_case_json = File.read!("test/fixtures/commcare/contact-with-interviewee-parent-name.json") |> Jason.decode!()
{:ok, patient_case} = PatientCase.new(patient_case_json)
assert patient_case.case_id == "00000000-eb0f-454c-ae1b-6da8ef431cfc"
assert patient_case.domain == "ratio_pbc"
assert patient_case.first_name == "Test"
assert patient_case.last_name == "ParentGuardianTest"
assert patient_case.case_type == "contact"
assert patient_case.interviewee_parent_name == "Test ParentGuardianTest"
end
end
describe "new/1, for bad lab_results," do
test "returns nil for date_tested when specimen_collection_date is not present in the lab_result child case in the JSON" do
patient_case_json = File.read!("test/fixtures/commcare/case-without-date-tested.json") |> Jason.decode!()
{:ok, patient_case} = PatientCase.new(patient_case_json)
assert patient_case.date_tested == nil
end
test "returns nil for date_tested when there is no lab result child case in the JSON" do
patient_case_json = File.read!("test/fixtures/commcare/case-without-lab-result.json") |> Jason.decode!()
{:ok, patient_case} = PatientCase.new(patient_case_json)
assert patient_case.date_tested == nil
end
end
describe "new/1, for bad dob," do
test "returns nil for dob when dob is not present in the JSON" do
patient_case_json = File.read!("test/fixtures/commcare/case-with-blank-dob.json") |> Jason.decode!()
{:ok, patient_case} = PatientCase.new(patient_case_json)
assert patient_case.dob == nil
end
test "returns nil for dob when dob is an empty string in the JSON" do
patient_case_json = File.read!("test/fixtures/commcare/case-with-blank-dob.json") |> Jason.decode!()
{:ok, patient_case} = PatientCase.new(patient_case_json)
assert patient_case.dob == nil
end
test "returns nil for the dob when the field doesn't exist in the JSON" do
patient_case_json = File.read!("test/fixtures/commcare/case-without-dob-field.json") |> Jason.decode!()
{:ok, patient_case} = PatientCase.new(patient_case_json)
assert patient_case.dob == nil
end
end
end
| 46.054348 | 127 | 0.695067 |
ff70d401a0a49491bcf03ca29947e126df6197be | 396 | exs | Elixir | test/module_example_test.exs | koladilip/elixir-examples | 49553fe39bbff6e35d1cb2c26fcb6d87fe4e28d3 | [
"MIT"
] | null | null | null | test/module_example_test.exs | koladilip/elixir-examples | 49553fe39bbff6e35d1cb2c26fcb6d87fe4e28d3 | [
"MIT"
] | null | null | null | test/module_example_test.exs | koladilip/elixir-examples | 49553fe39bbff6e35d1cb2c26fcb6d87fe4e28d3 | [
"MIT"
] | null | null | null | defmodule ModuleExampleTest do
use ExUnit.Case
doctest ModuleExample
test "name of the module" do
assert ModuleExample.name() == ModuleExample
end
test "name of the child module1" do
assert ModuleExample.child_module1() == ModuleExample.ChildModule1
end
test "name of the child module2" do
assert ModuleExample.child_module2() == ModuleExample.ChildModule2
end
end
| 23.294118 | 70 | 0.752525 |
ff70e0b9821ecee4ada7e536fa6be3557c218d0d | 2,609 | ex | Elixir | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/billing.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/billing.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/billing.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.ServiceConsumerManagement.V1.Model.Billing do
@moduledoc """
Billing related configuration of the service.
The following example shows how to configure monitored resources and metrics
for billing:
monitored_resources:
- type: library.googleapis.com/branch
labels:
- key: /city
description: The city where the library branch is located in.
- key: /name
description: The name of the branch.
metrics:
- name: library.googleapis.com/book/borrowed_count
metric_kind: DELTA
value_type: INT64
billing:
consumer_destinations:
- monitored_resource: library.googleapis.com/branch
metrics:
- library.googleapis.com/book/borrowed_count
## Attributes
* `consumerDestinations` (*type:* `list(GoogleApi.ServiceConsumerManagement.V1.Model.BillingDestination.t)`, *default:* `nil`) - Billing configurations for sending metrics to the consumer project.
There can be multiple consumer destinations per service, each one must have
a different monitored resource type. A metric can be used in at most
one consumer destination.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:consumerDestinations =>
list(GoogleApi.ServiceConsumerManagement.V1.Model.BillingDestination.t())
}
field(
:consumerDestinations,
as: GoogleApi.ServiceConsumerManagement.V1.Model.BillingDestination,
type: :list
)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.Billing do
def decode(value, options) do
GoogleApi.ServiceConsumerManagement.V1.Model.Billing.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.Billing do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.786667 | 200 | 0.728632 |
ff711bc2250b6d9271de7cf8ad96bf393b9729b9 | 1,316 | ex | Elixir | lib/dialyxir/warnings/contract_supertype.ex | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | 1,455 | 2015-01-03T02:53:19.000Z | 2022-03-12T00:31:25.000Z | lib/dialyxir/warnings/contract_supertype.ex | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | 330 | 2015-05-14T13:53:13.000Z | 2022-03-29T17:12:23.000Z | lib/dialyxir/warnings/contract_supertype.ex | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | 146 | 2015-02-03T18:19:43.000Z | 2022-03-07T10:05:20.000Z | defmodule Dialyxir.Warnings.ContractSupertype do
@moduledoc """
The @spec, while not incorrect, is more general than the type
returned by the function.
## Example
defmodule Example do
@spec ok() :: any
def ok() do
:ok
end
end
"""
@behaviour Dialyxir.Warning
@impl Dialyxir.Warning
@spec warning() :: :contract_supertype
def warning(), do: :contract_supertype
@impl Dialyxir.Warning
@spec format_short([String.t()]) :: String.t()
def format_short([_module, function | _]) do
"Type specification for #{function} is a supertype of the success typing."
end
@impl Dialyxir.Warning
@spec format_long([String.t()]) :: String.t()
def format_long([module, function, arity, contract, signature]) do
pretty_module = Erlex.pretty_print(module)
pretty_contract = Erlex.pretty_print_contract(contract)
pretty_signature = Erlex.pretty_print_contract(signature)
"""
Type specification is a supertype of the success typing.
Function:
#{pretty_module}.#{function}/#{arity}
Type specification:
@spec #{function}#{pretty_contract}
Success typing:
@spec #{function}#{pretty_signature}
"""
end
@impl Dialyxir.Warning
@spec explain() :: String.t()
def explain() do
@moduledoc
end
end
| 23.927273 | 78 | 0.672492 |
ff715797a48af2e8a57d1cffa58156192afd653e | 2,861 | exs | Elixir | test/choicest/core/core_test.exs | Cadiac/choicest | e3799e3b80024754c6babb561c1b14f916f92373 | [
"MIT"
] | 1 | 2018-07-01T08:18:20.000Z | 2018-07-01T08:18:20.000Z | test/choicest/core/core_test.exs | Cadiac/choicest | e3799e3b80024754c6babb561c1b14f916f92373 | [
"MIT"
] | null | null | null | test/choicest/core/core_test.exs | Cadiac/choicest | e3799e3b80024754c6babb561c1b14f916f92373 | [
"MIT"
] | null | null | null | defmodule Choicest.CoreTest do
use Choicest.DataCase
alias Choicest.Core
describe "Core" do
alias Choicest.Model.Collection
@valid_attrs %{"description" => "some description", "name" => "some name", "voting_active" => true}
@update_attrs %{"description" => "some updated description", "name" => "some updated name", "voting_active" => false}
@invalid_attrs %{"description" => nil, "name" => nil, "voting_active" => nil}
def collection_fixture(attrs \\ %{}) do
{:ok, collection} =
attrs
|> Enum.into(@valid_attrs)
|> Core.create_collection()
collection
end
test "list_collections/0 returns all Core" do
collection = collection_fixture()
assert Core.list_collections() == [collection]
end
test "get_collection!/1 returns the collection with given id" do
collection = collection_fixture()
assert Core.get_collection!(collection.id) == collection
end
test "get_collection_by_slug!/1 returns the collection with given slug" do
collection = collection_fixture()
assert Core.get_collection_by_slug!(collection.slug) == collection
end
test "create_collection/1 with valid data creates a collection" do
assert {:ok, %Collection{} = collection} = Core.create_collection(@valid_attrs)
assert collection.description == @valid_attrs["description"]
assert collection.name == @valid_attrs["name"]
assert collection.voting_active == @valid_attrs["voting_active"]
end
test "create_collection/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Core.create_collection(@invalid_attrs)
end
test "update_collection/2 with valid data updates the collection" do
collection = collection_fixture()
assert {:ok, collection} = Core.update_collection(collection, @update_attrs)
assert %Collection{} = collection
assert collection.description == @update_attrs["description"]
assert collection.name == @update_attrs["name"]
assert collection.voting_active == @update_attrs["voting_active"]
end
test "update_collection/2 with invalid data returns error changeset" do
collection = collection_fixture()
assert {:error, %Ecto.Changeset{}} = Core.update_collection(collection, @invalid_attrs)
assert collection == Core.get_collection!(collection.id)
end
test "delete_collection/1 deletes the collection" do
collection = collection_fixture()
assert {:ok, %Collection{}} = Core.delete_collection(collection)
assert_raise Ecto.NoResultsError, fn -> Core.get_collection!(collection.id) end
end
test "change_collection/1 returns a collection changeset" do
collection = collection_fixture()
assert %Ecto.Changeset{} = Core.change_collection(collection)
end
end
end
| 38.146667 | 121 | 0.701153 |
ff717adda5319801329e263d42a141d35601eb9d | 1,195 | exs | Elixir | config/config.exs | davidsulc/scraping_hub_ex | 78fbf8f4c18f8f3650148b9c8861f22856dbb8b1 | [
"Apache-2.0"
] | 2 | 2021-03-30T07:59:54.000Z | 2021-04-06T12:32:40.000Z | config/config.exs | davidsulc/scrapy_cloud_ex | 78fbf8f4c18f8f3650148b9c8861f22856dbb8b1 | [
"Apache-2.0"
] | null | null | null | config/config.exs | davidsulc/scrapy_cloud_ex | 78fbf8f4c18f8f3650148b9c8861f22856dbb8b1 | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :scrapy_cloud_ex, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:scrapy_cloud_ex, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
if Mix.env() == :test, do: config(:logger, level: :info)
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 36.212121 | 73 | 0.748954 |
ff7183d13aec5093ebd98887fb3623b381d79848 | 98 | ex | Elixir | lib/mao/repo.ex | levijcl/mao | aae70589605ada84ca571a51f5ee4377091e0eb3 | [
"MIT"
] | null | null | null | lib/mao/repo.ex | levijcl/mao | aae70589605ada84ca571a51f5ee4377091e0eb3 | [
"MIT"
] | null | null | null | lib/mao/repo.ex | levijcl/mao | aae70589605ada84ca571a51f5ee4377091e0eb3 | [
"MIT"
] | null | null | null | defmodule MAO.Repo do
use Ecto.Repo,
otp_app: :mao,
adapter: Ecto.Adapters.Postgres
end
| 16.333333 | 35 | 0.704082 |
ff71e57d22f60093e8a949bca161b4bcb4347c12 | 1,238 | ex | Elixir | lib/es_client/driver.ex | tlux/es_client | 6336dfab012d2d7ed5339c84d2f6c5f298d79f28 | [
"MIT"
] | null | null | null | lib/es_client/driver.ex | tlux/es_client | 6336dfab012d2d7ed5339c84d2f6c5f298d79f28 | [
"MIT"
] | null | null | null | lib/es_client/driver.ex | tlux/es_client | 6336dfab012d2d7ed5339c84d2f6c5f298d79f28 | [
"MIT"
] | null | null | null | defmodule ESClient.Driver do
@moduledoc """
A behavior that must be implemented by custom Elasticsearch drivers. A driver
sends a request to the Elasticsearch endpoint and retrieves the particular
response.
"""
@typedoc """
A type representing request headers.
"""
@type req_headers ::
Keyword.t(binary)
| [{String.t(), binary}]
| %{optional(atom | String.t()) => binary}
@typedoc """
A type representing response headers.
"""
@type resp_headers :: [{String.t(), binary}]
@typedoc """
A type representing successful response data.
"""
@type resp :: %{
:status_code => non_neg_integer,
:headers => resp_headers,
:body => binary,
optional(atom) => any
}
@typedoc """
A type representing failed response data.
"""
@type error :: %{:reason => any, optional(atom) => any}
@doc """
A callback for implementing an own function to send a request to a HTTP
endpoint.
"""
@callback request(
verb :: ESClient.verb(),
url :: URI.t(),
body :: binary,
headers :: req_headers,
opts :: Keyword.t()
) :: {:ok, resp} | {:error, error}
end
| 25.791667 | 79 | 0.572698 |
ff71ee9b4e11bb0ec22f9a6bbd6246b0e9d2da04 | 13,846 | exs | Elixir | test/aws/request_test.exs | justinludwig/aws-elixir | c66dfebecec62587dada50602c31c76d307d812c | [
"Apache-2.0"
] | null | null | null | test/aws/request_test.exs | justinludwig/aws-elixir | c66dfebecec62587dada50602c31c76d307d812c | [
"Apache-2.0"
] | null | null | null | test/aws/request_test.exs | justinludwig/aws-elixir | c66dfebecec62587dada50602c31c76d307d812c | [
"Apache-2.0"
] | null | null | null | defmodule AWS.RequestTest do
use ExUnit.Case, async: true
alias AWS.Client
alias AWS.Request
test "sign_v4 extracts credentials, service and region information from a Client map, generates an AWS signature version 4 for a request, and returns a new set of HTTP headers with Authorization and X-Aws-Date headers" do
client = %Client{
access_key_id: "access-key-id",
secret_access_key: "secret-access-key",
region: "us-east-1",
service: "ec2"
}
now = ~N[2015-05-14 16:50:05]
method = "GET"
url = "https://ec2.us-east-1.amazonaws.com?Action=DescribeInstances&Version=2014-10-01"
headers = [{"Host", "ec2.us-east-1.amazonaws.com"}, {"Header", "Value"}]
actual = Request.sign_v4(client, now, method, url, headers, "")
expected = [
{"Authorization",
"AWS4-HMAC-SHA256 Credential=access-key-id/20150514/us-east-1/ec2/aws4_request, SignedHeaders=header;host;x-amz-content-sha256;x-amz-date, Signature=0d8cc4ca3b83dbd62a9b1870d04005dc03ab2fc5f6d1398513e197c04f5cbb53"},
{"X-Amz-Content-SHA256",
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"},
{"X-Amz-Date", "20150514T165005Z"},
{"Host", "ec2.us-east-1.amazonaws.com"},
{"Header", "Value"}
]
assert expected == actual
end
test "sign_v4 generate signature with similar headers correctly (sorting problem)" do
client = AWS.Client.create("my-access-key-id", "my-secret-access-key", "us-east-1")
client = %{client | service: "s3"}
host = "https://aws-beam-projects-test.s3.amazonaws.com"
path = "/foo/my_important_file.txt.enc"
{:ok, now, _} = DateTime.from_iso8601("2021-04-05T15:10:53Z")
result =
AWS.Request.sign_v4(
client,
now,
:put,
host <> path,
[
{"User-Agent", "aws-sdk-ruby3/3.113.1 ruby/2.7.2 x86_64-linux aws-sdk-s3/1.93.0"},
{"X-Amz-Server-Side-Encryption-Customer-Algorithm", "AES256"},
{"X-Amz-Server-Side-Encryption-Customer-Key",
"TIjv09mJiv+331Evgfq8eONO2y/G4aztRqEeAwx9y2U="},
{"X-Amz-Server-Side-Encryption-Customer-Key-Md5", "BaUscNABVnd0nRlQecUFPA=="},
{"Content-Md5", "VDMfSlWzfS823+nFvkpWzg=="},
{"Host", "aws-beam-projects-test.s3.amazonaws.com"}
],
"My important file..."
)
{_name, auth_header} = List.keyfind(result, "Authorization", 0)
assert auth_header ==
"AWS4-HMAC-SHA256 Credential=my-access-key-id/20210405/us-east-1/s3/aws4_request, SignedHeaders=content-md5;host;user-agent;x-amz-content-sha256;x-amz-date;x-amz-server-side-encryption-customer-algorithm;x-amz-server-side-encryption-customer-key;x-amz-server-side-encryption-customer-key-md5, Signature=90865b5a1fb55c2766e0aff1d6ae1a8c72ab7e58471aa02204f31aecfacfaf58"
end
test "sign_v4_query returns a map with header/value pairs suitable for use in a query string" do
client = %Client{
access_key_id: "access-key-id",
secret_access_key: "secret-access-key",
region: "us-east-1",
service: "ec2"
}
now = ~N[2015-05-14 16:50:05]
method = "GET"
url = "https://s3.us-east-1.amazonaws.com/bucket"
headers = [{"Host", "ec2.us-east-1.amazonaws.com"}, {"X-Amz-Expires", "86400"}]
actual = Request.sign_v4_query(client, now, method, url, headers, "")
expected = [
{"X-Amz-Expires", "86400"},
{"X-Amz-Algorithm", "AWS4-HMAC-SHA256"},
{"X-Amz-Credential", "access-key-id/20150514/us-east-1/ec2/aws4_request"},
{"X-Amz-Date", "20150514T165005Z"},
{"X-Amz-SignedHeaders", "host;x-amz-date;x-amz-expires"},
{"X-Amz-Signature", "c16e00732fa6c75a2b4d88a5980e2050af10be730e98a9b5e0352f331c292874"}
]
assert expected == actual
end
describe "request_rest/9" do
defmodule TestClient do
@behaviour AWS.HTTPClient
@impl true
def request(method, url, body, headers, options) do
send(self(), {:request, method, url, body, headers, options})
{status, _opts} = Keyword.pop(options, :return_status_code, 200)
{:ok, %{status_code: status, headers: [], body: "{\"Response\":\"foo\"}"}}
end
end
setup do
client =
Client.create("access-key-id", "secret-access-key", "us-east1")
|> Map.put(:http_client, {TestClient, []})
metadata = %AWS.ServiceMetadata{
api_version: "2014-06-05",
content_type: "application/x-amz-json-1.1",
endpoint_prefix: "mobileanalytics",
global?: false,
protocol: "rest-json",
signature_version: "v4",
signing_name: "mobileanalytics"
}
[client: client, metadata: metadata]
end
test "send post request", %{client: client, metadata: metadata} do
assert {:ok, response, http_response} =
Request.request_rest(
client,
metadata,
:post,
"/foo/bar",
[],
[],
%{"Body" => "data"},
[],
200
)
assert response == %{"Response" => "foo"}
assert http_response == %{body: "{\"Response\":\"foo\"}", headers: [], status_code: 200}
assert_receive {:request, :post, url, body, headers, options}
assert url == "https://mobileanalytics.us-east1.amazonaws.com:443/foo/bar"
assert body == "{\"Body\":\"data\"}"
header_names = Enum.map(headers, fn {header_name, _} -> header_name end)
assert Enum.sort(header_names) == [
"Authorization",
"Content-Type",
"Host",
"X-Amz-Content-SHA256",
"X-Amz-Date"
]
assert List.keyfind(headers, "Content-Type", 0) ==
{"Content-Type", "application/x-amz-json-1.1"}
assert List.keyfind(headers, "Host", 0) ==
{"Host", "mobileanalytics.us-east1.amazonaws.com"}
assert options == []
end
test "send post request with body as binary/payload", %{client: client, metadata: metadata} do
assert {:ok, response, _http_response} =
Request.request_rest(
client,
metadata,
:post,
"/foo/bar",
[],
[],
%{"Body" => "data"},
[send_body_as_binary?: true],
200
)
assert response == %{"Response" => "foo"}
assert_receive {:request, :post, _url, body, _headers, options}
assert body == "data"
assert options == []
end
test "do not decode response when it is a binary", %{client: client, metadata: metadata} do
assert {:ok, response, _http_response} =
Request.request_rest(
client,
metadata,
:post,
"/foo/bar",
[],
[],
%{"Body" => "data"},
[receive_body_as_binary?: true],
200
)
assert response == %{"Body" => "{\"Response\":\"foo\"}"}
assert_receive {:request, :post, _url, _body, _headers, options}
assert options == []
end
test "accept success code other than 200", %{client: client, metadata: metadata} do
{http_client, _opts} = client.http_client
client = %{client | http_client: {http_client, [return_status_code: 206]}}
assert {:ok, response, http_response} =
Request.request_rest(
client,
metadata,
:post,
"/foo/bar",
[],
[],
%{"Body" => "data"},
[],
nil
)
assert response == %{"Response" => "foo"}
assert http_response == %{body: "{\"Response\":\"foo\"}", headers: [], status_code: 206}
assert {:ok, _response, _http_response} =
Request.request_rest(
client,
metadata,
:post,
"/foo/bar",
[],
[],
%{"Body" => "data"},
[],
206
)
# Does not accept only if explicitly tells the expected code.
assert {:error, error} =
Request.request_rest(
client,
metadata,
:post,
"/foo/bar",
[],
[],
%{"Body" => "data"},
[],
200
)
assert {:unexpected_response,
%{body: "{\"Response\":\"foo\"}", headers: [], status_code: 206}} = error
end
end
end
defmodule AWS.Request.InternalTest do
use ExUnit.Case
alias AWS.Request.Internal
test "add_authorization_header/2 add an Authorization header to a list of HTTP headers" do
expected = [{"Authorization", "AWS4-HMAX-SHA256"}, {"Host", "example.com"}]
assert expected ==
Internal.add_authorization_header(
[{"Host", "example.com"}],
"AWS4-HMAX-SHA256"
)
end
test "add_date_header/2 adds an X-Amz-Date header to a list of HTTP headers" do
expected = [{"X-Amz-Date", "20150326T221217Z"}, {"Host", "example.com"}]
assert expected ==
Internal.add_date_header(
[{"Host", "example.com"}],
"20150326T221217Z"
)
end
test "canonical_headers/1 returns a newline-delimited list of trimmed and lowecase headers, sorted in alphabetical order, and with a trailing newline" do
expected = "host:example.com\nx-amz-date:20150325T105958Z\n"
assert expected ==
Internal.canonical_headers([
{"X-Amz-Date", "20150325T105958Z"},
{"Host", "example.com"}
])
end
test "canonical_request/4 converts an HTTP method, URL, headers and body into a canonical request for AWS signature version 4" do
expected =
Enum.join(
[
"GET",
"/",
"",
"host:example.com",
"x-amz-date:20150325T105958Z",
"",
"host;x-amz-date",
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
],
"\n"
)
actual =
Internal.canonical_request(
"GET",
"https://example.com/",
[{"Host", "example.com"}, {"X-Amz-Date", "20150325T105958Z"}],
""
)
assert expected == actual
end
test "canonical_request/4 converts an HTTP method, represented as an atom, into a string before generating a canonical request for AWS signature version 4" do
expected =
Enum.join(
[
"GET",
"/",
"",
"host:example.com",
"x-amz-date:20150325T105958Z",
"",
"host;x-amz-date",
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
],
"\n"
)
actual =
Internal.canonical_request(
:get,
"https://example.com/",
[{"Host", "example.com"}, {"X-Amz-Date", "20150325T105958Z"}],
""
)
assert expected == actual
end
test "credential_scope/3 combines a short date, region and service name and signature identifier into a slash-joined binary value" do
expected = "20150325/us-east-1/iam/aws4_request"
assert expected == Internal.credential_scope("20150325", "us-east-1", "iam")
end
test "signed_header/1 lowercases the header name" do
assert "host" = Internal.signed_header({"Host", "example.com"})
end
test "signed_header/1 lowercases and strips leading and trailing whitespace from the header name" do
assert "host" = Internal.signed_header({" Host ", "example.com"})
end
test "signed_headers/1 lowercases and semicolon-joins header names in alphabetic order" do
expected = "header;host;x-amz-date"
actual =
Internal.signed_headers([
{"X-Amz-Date", "20150325T105958Z"},
{"Host", "example.com"},
{"Header", "Value"}
])
assert expected == actual
end
test "signing_key/4 creates a signing key from a secret access key, short date, region identifier and service identifier" do
expected =
<<108, 238, 174, 127, 62, 29, 151, 251, 60, 200, 152, 110, 95, 108, 195, 104, 208, 222, 84,
216, 129, 34, 102, 127, 208, 93, 22, 61, 71, 54, 199, 206>>
assert expected == Internal.signing_key("secret-access-key", "20150326", "us-east-1", "s3")
end
test "split_url/1 transforms a URL into {path, normalized_query_string}" do
expected = {"/index", "none=&one=1&two=2"}
actual = Internal.split_url("https://example.com/index?two=2&none&one=1")
assert expected == actual
end
test "split_url/1 returns an empty binary if no query string is present" do
assert {"/index", ""} = Internal.split_url("https://example.com/index?")
end
test "string_to_sign/3 combines a long date, credential scope and hash canonical request into a binary value that's ready to sign" do
long_date = "20150326T202136Z"
credential_scope = Internal.credential_scope("20150325", "us-east-1", "iam")
canonical_request =
Internal.canonical_request(
"GET",
"https://example.com",
[{"Host", "example.com"}, {"X-Amz-Date", "20150325T105958Z"}],
""
)
hashed_canonical_request = AWS.Util.sha256_hexdigest(canonical_request)
expected =
Enum.join(
["AWS4-HMAC-SHA256", long_date, credential_scope, hashed_canonical_request],
"\n"
)
assert expected ==
Internal.string_to_sign(long_date, credential_scope, hashed_canonical_request)
end
end
| 33.203837 | 381 | 0.57374 |
ff71fb043f2480a734c8d81dbd67e69c42fc5627 | 3,327 | exs | Elixir | lib/elixir/test/elixir/kernel/sigils_test.exs | tmbb/exdocs_makedown_demo | 6a0039c54d2fa10d79c080efcef8d70d359678f8 | [
"Apache-2.0"
] | 1 | 2017-07-25T21:46:25.000Z | 2017-07-25T21:46:25.000Z | lib/elixir/test/elixir/kernel/sigils_test.exs | tmbb/exdocs_makedown_demo | 6a0039c54d2fa10d79c080efcef8d70d359678f8 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/sigils_test.exs | tmbb/exdocs_makedown_demo | 6a0039c54d2fa10d79c080efcef8d70d359678f8 | [
"Apache-2.0"
] | 1 | 2017-07-25T21:46:48.000Z | 2017-07-25T21:46:48.000Z | Code.require_file "../test_helper.exs", __DIR__
defmodule Kernel.SigilsTest do
use ExUnit.Case, async: true
test "sigil s" do
assert ~s(foo) == "foo"
assert ~s(f#{:o}o) == "foo"
assert ~s(f\no) == "f\no"
end
test "sigil s with heredoc" do
assert " foo\n\n" == ~s"""
f#{:o}o\n
"""
end
test "sigil S" do
assert ~S(foo) == "foo"
assert ~S[foo] == "foo"
assert ~S{foo} == "foo"
assert ~S'foo' == "foo"
assert ~S"foo" == "foo"
assert ~S<foo> == "foo"
assert ~S/foo/ == "foo"
assert ~S|foo| == "foo"
assert ~S(f#{o}o) == "f\#{o}o"
assert ~S(f\#{o}o) == "f\\\#{o}o"
assert ~S(f\no) == "f\\no"
assert ~S(foo\)) == "foo)"
assert ~S[foo\]] == "foo]"
end
test "sigil S newline" do
assert ~S(foo\
bar) in ["foo\\\nbar", "foo\\\r\nbar"]
end
test "sigil S with heredoc" do
assert " f\#{o}o\\n\n" == ~S"""
f#{o}o\n
"""
end
test "sigil s/S expand to binary when possible" do
assert Macro.expand(quote(do: ~s(foo)), __ENV__) == "foo"
assert Macro.expand(quote(do: ~S(foo)), __ENV__) == "foo"
end
test "sigil c" do
assert ~c(foo) == 'foo'
assert ~c(f#{:o}o) == 'foo'
assert ~c(f\no) == 'f\no'
end
test "sigil C" do
assert ~C(foo) == 'foo'
assert ~C[foo] == 'foo'
assert ~C{foo} == 'foo'
assert ~C'foo' == 'foo'
assert ~C"foo" == 'foo'
assert ~C|foo| == 'foo'
assert ~C(f#{o}o) == 'f\#{o}o'
assert ~C(f\no) == 'f\\no'
end
test "sigil w" do
assert ~w() == []
assert ~w(foo bar baz) == ["foo", "bar", "baz"]
assert ~w(foo #{:bar} baz) == ["foo", "bar", "baz"]
assert ~w(#{""}) == []
assert ~w(foo #{""}) == ["foo"]
assert ~w(#{" foo bar "}) == ["foo", "bar"]
assert ~w(foo\ #{:bar}) == ["foo", "bar"]
assert ~w(foo\ bar) == ["foo", "bar"]
assert ~w(
foo
bar
baz
) == ["foo", "bar", "baz"]
assert ~w(foo bar baz)s == ["foo", "bar", "baz"]
assert ~w(foo bar baz)a == [:foo, :bar, :baz]
assert ~w(foo bar baz)c == ['foo', 'bar', 'baz']
bad_modifier = quote(do: ~w(foo bar baz)x)
assert %ArgumentError{} = catch_error(Code.eval_quoted(bad_modifier))
assert ~w(Foo Bar)a == [:"Foo", :"Bar"]
assert ~w(Foo.#{Bar}.Baz)a == [:"Foo.Elixir.Bar.Baz"]
assert ~w(Foo.Bar)s == ["Foo.Bar"]
assert ~w(Foo.#{Bar})c == ['Foo.Elixir.Bar']
# Ensure it is fully expanded at compile time
assert Macro.expand(quote(do: ~w(a b c)a), __ENV__) == [:a, :b, :c]
end
test "sigil W" do
assert ~W() == []
assert ~W(foo #{bar} baz) == ["foo", "\#{bar}", "baz"]
assert ~W(foo\ bar) == ["foo\\", "bar"]
assert ~W(
foo
bar
baz
) == ["foo", "bar", "baz"]
assert ~W(foo bar baz)s == ["foo", "bar", "baz"]
assert ~W(foo bar baz)a == [:foo, :bar, :baz]
assert ~W(foo bar baz)c == ['foo', 'bar', 'baz']
bad_modifier = quote do: ~W(foo bar baz)x
assert %ArgumentError{} = catch_error(Code.eval_quoted(bad_modifier))
assert ~W(Foo #{Bar})a == [:"Foo", :"\#{Bar}"]
assert ~W(Foo.Bar.Baz)a == [:"Foo.Bar.Baz"]
end
test "sigils matching" do
assert ~s(f\(oo) == "f(oo"
assert ~s(fo\)o) == "fo)o"
assert ~s(f\(o\)o) == "f(o)o"
assert ~s(f[oo) == "f[oo"
assert ~s(fo]o) == "fo]o"
end
end
| 25.204545 | 73 | 0.497746 |
ff72172bd0fba6aeb0af638d86f99c8c0102cd42 | 311 | exs | Elixir | test/recursive_list_length_test.exs | vinolivae/recursive_list_length | fe1a7d47df090d32a63ec1e96322300f90d90dd6 | [
"MIT"
] | null | null | null | test/recursive_list_length_test.exs | vinolivae/recursive_list_length | fe1a7d47df090d32a63ec1e96322300f90d90dd6 | [
"MIT"
] | null | null | null | test/recursive_list_length_test.exs | vinolivae/recursive_list_length | fe1a7d47df090d32a63ec1e96322300f90d90dd6 | [
"MIT"
] | null | null | null | defmodule RecursiveListLengthTest do
use ExUnit.Case
doctest RecursiveListLength
describe "call/1" do
test "return the length of list" do
list = [1, 2, 3]
expected_response = 3
response = RecursiveListLength.call(list)
assert expected_response == response
end
end
end
| 19.4375 | 47 | 0.691318 |
ff7222b70c59955f3a6caedef544463b8d7044c3 | 977 | exs | Elixir | mix.exs | elbow-jason/elixir-geohash | 0db29a76a2403fddb85cacd44374aebdcd2f2e61 | [
"Apache-2.0"
] | null | null | null | mix.exs | elbow-jason/elixir-geohash | 0db29a76a2403fddb85cacd44374aebdcd2f2e61 | [
"Apache-2.0"
] | null | null | null | mix.exs | elbow-jason/elixir-geohash | 0db29a76a2403fddb85cacd44374aebdcd2f2e61 | [
"Apache-2.0"
] | null | null | null | defmodule Geohash.Mixfile do
use Mix.Project
def project do
[app: :geohash,
version: "0.1.1",
elixir: "~> 1.1",
description: description,
package: package,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
defp description do
"""
Geohash encode/decode implementation for Elixir
"""
end
defp package do
[ maintainers: ["Pablo Mouzo"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/polmuz/elixir-geohash"}]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[]
end
end
| 20.787234 | 77 | 0.615148 |
ff72279d5876319ea0c4071ea476f815858ba13c | 85 | ex | Elixir | lib/helloworld.ex | lupinthe14th/codewars | 5c71a1be6dd5c42a6217d70e5cfcd43c6c016ac7 | [
"MIT"
] | null | null | null | lib/helloworld.ex | lupinthe14th/codewars | 5c71a1be6dd5c42a6217d70e5cfcd43c6c016ac7 | [
"MIT"
] | 70 | 2020-01-07T01:21:38.000Z | 2021-06-18T02:57:39.000Z | lib/helloworld.ex | lupinthe14th/codewars | 5c71a1be6dd5c42a6217d70e5cfcd43c6c016ac7 | [
"MIT"
] | null | null | null | defmodule HelloWorld do
def greet do
String.downcase("hello world!")
end
end
| 14.166667 | 35 | 0.717647 |
ff72951e28ae1029e91c24abe91e472771dfec2c | 2,226 | ex | Elixir | clients/artifact_registry/lib/google_api/artifact_registry/v1beta1/model/google_devtools_artifactregistry_v1alpha1_package.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/artifact_registry/lib/google_api/artifact_registry/v1beta1/model/google_devtools_artifactregistry_v1alpha1_package.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/artifact_registry/lib/google_api/artifact_registry/v1beta1/model/google_devtools_artifactregistry_v1alpha1_package.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ArtifactRegistry.V1beta1.Model.GoogleDevtoolsArtifactregistryV1alpha1Package do
@moduledoc """
Packages are named collections of versions.
## Attributes
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - The time when the package was created.
* `displayName` (*type:* `String.t`, *default:* `nil`) - The display name of the package.
* `name` (*type:* `String.t`, *default:* `nil`) - The name of the package, for example: "projects/p1/locations/us-central1/repositories/repo1/packages/pkg1".
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - The time when the package was last updated. This includes publishing a new version of the package.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:createTime => DateTime.t(),
:displayName => String.t(),
:name => String.t(),
:updateTime => DateTime.t()
}
field(:createTime, as: DateTime)
field(:displayName)
field(:name)
field(:updateTime, as: DateTime)
end
defimpl Poison.Decoder,
for: GoogleApi.ArtifactRegistry.V1beta1.Model.GoogleDevtoolsArtifactregistryV1alpha1Package do
def decode(value, options) do
GoogleApi.ArtifactRegistry.V1beta1.Model.GoogleDevtoolsArtifactregistryV1alpha1Package.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.ArtifactRegistry.V1beta1.Model.GoogleDevtoolsArtifactregistryV1alpha1Package do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.491803 | 161 | 0.724169 |
ff72d9670e1f7055ac88d5c55df38301009454e9 | 2,416 | ex | Elixir | clients/memcache/lib/google_api/memcache/v1/model/node.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/memcache/lib/google_api/memcache/v1/model/node.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/memcache/lib/google_api/memcache/v1/model/node.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Memcache.V1.Model.Node do
@moduledoc """
## Attributes
* `host` (*type:* `String.t`, *default:* `nil`) - Output only. Hostname or IP address of the Memcached node used by the clients to connect to the Memcached server on this node.
* `nodeId` (*type:* `String.t`, *default:* `nil`) - Output only. Identifier of the Memcached node. The node id does not include project or location like the Memcached instance name.
* `parameters` (*type:* `GoogleApi.Memcache.V1.Model.MemcacheParameters.t`, *default:* `nil`) - User defined parameters currently applied to the node.
* `port` (*type:* `integer()`, *default:* `nil`) - Output only. The port number of the Memcached server on this node.
* `state` (*type:* `String.t`, *default:* `nil`) - Output only. Current state of the Memcached node.
* `zone` (*type:* `String.t`, *default:* `nil`) - Output only. Location (GCP Zone) for the Memcached node.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:host => String.t(),
:nodeId => String.t(),
:parameters => GoogleApi.Memcache.V1.Model.MemcacheParameters.t(),
:port => integer(),
:state => String.t(),
:zone => String.t()
}
field(:host)
field(:nodeId)
field(:parameters, as: GoogleApi.Memcache.V1.Model.MemcacheParameters)
field(:port)
field(:state)
field(:zone)
end
defimpl Poison.Decoder, for: GoogleApi.Memcache.V1.Model.Node do
def decode(value, options) do
GoogleApi.Memcache.V1.Model.Node.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Memcache.V1.Model.Node do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.967742 | 185 | 0.692881 |
ff72dc19dd7da62739410b301e8daf81689bc089 | 77 | exs | Elixir | test/worker_tracker_test.exs | optoro/worker_tracker | 0a9381cbb596edd5948bbad82dcde409d5f6ab5b | [
"MIT"
] | 1 | 2020-02-06T17:15:44.000Z | 2020-02-06T17:15:44.000Z | test/worker_tracker_test.exs | optoro/worker_tracker | 0a9381cbb596edd5948bbad82dcde409d5f6ab5b | [
"MIT"
] | null | null | null | test/worker_tracker_test.exs | optoro/worker_tracker | 0a9381cbb596edd5948bbad82dcde409d5f6ab5b | [
"MIT"
] | 1 | 2021-04-01T13:29:18.000Z | 2021-04-01T13:29:18.000Z | defmodule WorkerTrackerTest do
use ExUnit.Case
doctest WorkerTracker
end
| 15.4 | 30 | 0.831169 |
ff72ecfe8ec164e8449f916d4928b17997dca973 | 3,345 | exs | Elixir | apps/faqcheck_web/test/faqcheck_web/controllers/user_confirmation_controller_test.exs | csboling/faqcheck | bc182c365d466c8dcacc6b1a5fe9186a2c912cd4 | [
"CC0-1.0"
] | null | null | null | apps/faqcheck_web/test/faqcheck_web/controllers/user_confirmation_controller_test.exs | csboling/faqcheck | bc182c365d466c8dcacc6b1a5fe9186a2c912cd4 | [
"CC0-1.0"
] | 20 | 2021-09-08T04:07:31.000Z | 2022-03-10T21:52:24.000Z | apps/faqcheck_web/test/faqcheck_web/controllers/user_confirmation_controller_test.exs | csboling/faqcheck | bc182c365d466c8dcacc6b1a5fe9186a2c912cd4 | [
"CC0-1.0"
] | null | null | null | defmodule FaqcheckWeb.UserConfirmationControllerTest do
use FaqcheckWeb.ConnCase, async: true
alias Faqcheck.Accounts
alias Faqcheck.Repo
import Faqcheck.AccountsFixtures
setup do
%{user: user_fixture()}
end
describe "GET /users/confirm" do
test "renders the confirmation page", %{conn: conn} do
conn = get(conn, Routes.user_confirmation_path(conn, :new, "en"))
response = html_response(conn, 200)
assert response =~ "<h1>Resend confirmation instructions</h1>"
end
end
describe "POST /users/confirm" do
@tag :capture_log
test "sends a new confirmation token", %{conn: conn, user: user} do
conn =
post(conn, Routes.user_confirmation_path(conn, :create, "en"), %{
"user" => %{"email" => user.email}
})
assert redirected_to(conn) == "/"
assert get_flash(conn, :info) =~ "If your email is in our system"
assert Repo.get_by!(Accounts.UserToken, user_id: user.id).context == "confirm"
end
test "does not send confirmation token if User is confirmed", %{conn: conn, user: user} do
Repo.update!(Accounts.User.confirm_changeset(user))
conn =
post(conn, Routes.user_confirmation_path(conn, :create, "en"), %{
"user" => %{"email" => user.email}
})
assert redirected_to(conn) == "/"
assert get_flash(conn, :info) =~ "If your email is in our system"
refute Repo.get_by(Accounts.UserToken, user_id: user.id)
end
test "does not send confirmation token if email is invalid", %{conn: conn} do
conn =
post(conn, Routes.user_confirmation_path(conn, :create, "en"), %{
"user" => %{"email" => "unknown@example.com"}
})
assert redirected_to(conn) == "/"
assert get_flash(conn, :info) =~ "If your email is in our system"
assert Repo.all(Accounts.UserToken) == []
end
end
describe "GET /users/confirm/:token" do
test "confirms the given token once", %{conn: conn, user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_user_confirmation_instructions(user, url)
end)
conn = get(conn, Routes.user_confirmation_path(conn, :confirm, "en", token))
assert redirected_to(conn) == "/"
assert get_flash(conn, :info) =~ "User confirmed successfully"
assert Accounts.get_user!(user.id).confirmed_at
refute get_session(conn, :user_token)
assert Repo.all(Accounts.UserToken) == []
# When not logged in
conn = get(conn, Routes.user_confirmation_path(conn, :confirm, "en", token))
assert redirected_to(conn) == "/"
assert get_flash(conn, :error) =~ "User confirmation link is invalid or it has expired"
# When logged in
conn =
build_conn()
|> log_in_user(user)
|> get(Routes.user_confirmation_path(conn, :confirm, "en", token))
assert redirected_to(conn) == "/"
refute get_flash(conn, :error)
end
test "does not confirm email with invalid token", %{conn: conn, user: user} do
conn = get(conn, Routes.user_confirmation_path(conn, :confirm, "en", "oops"))
assert redirected_to(conn) == "/"
assert get_flash(conn, :error) =~ "User confirmation link is invalid or it has expired"
refute Accounts.get_user!(user.id).confirmed_at
end
end
end
| 35.210526 | 94 | 0.642152 |
ff72f6f9d990bbe7c0310178576ff03cb2626564 | 530 | exs | Elixir | apps/ewallet_api/test/ewallet_api/v1/controllers/fallback_controller_test.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_api/test/ewallet_api/v1/controllers/fallback_controller_test.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_api/test/ewallet_api/v1/controllers/fallback_controller_test.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | defmodule EWalletAPI.V1.FallbackControllerTest do
use EWalletAPI.ConnCase, async: true
describe "/not_found" do
test "returns correct error response format and error message" do
expected = %{
"version" => "1",
"success" => false,
"data" => %{
"object" => "error",
"code" => "client:endpoint_not_found",
"description" => "Endpoint not found",
"messages" => nil
}
}
assert client_request("/not_found") == expected
end
end
end
| 25.238095 | 69 | 0.571698 |
ff7308ad9194cfcac81f868f6db8e61a80bd694f | 4,480 | ex | Elixir | clients/big_query/lib/google_api/big_query/v2/model/binding.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/model/binding.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/model/binding.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQuery.V2.Model.Binding do
@moduledoc """
Associates `members` with a `role`.
## Attributes
* `condition` (*type:* `GoogleApi.BigQuery.V2.Model.Expr.t`, *default:* `nil`) - The condition that is associated with this binding.
If the condition evaluates to `true`, then this binding applies to the
current request.
If the condition evaluates to `false`, then this binding does not apply to
the current request. However, a different role binding might grant the same
role to one or more of the members in this binding.
To learn which resources support conditions in their IAM policies, see the
[IAM
documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
* `members` (*type:* `list(String.t)`, *default:* `nil`) - Specifies the identities requesting access for a Cloud Platform resource.
`members` can have the following values:
* `allUsers`: A special identifier that represents anyone who is
on the internet; with or without a Google account.
* `allAuthenticatedUsers`: A special identifier that represents anyone
who is authenticated with a Google account or a service account.
* `user:{emailid}`: An email address that represents a specific Google
account. For example, `alice@example.com` .
* `serviceAccount:{emailid}`: An email address that represents a service
account. For example, `my-other-app@appspot.gserviceaccount.com`.
* `group:{emailid}`: An email address that represents a Google group.
For example, `admins@example.com`.
* `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique
identifier) representing a user that has been recently deleted. For
example, `alice@example.com?uid=123456789012345678901`. If the user is
recovered, this value reverts to `user:{emailid}` and the recovered user
retains the role in the binding.
* `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus
unique identifier) representing a service account that has been recently
deleted. For example,
`my-other-app@appspot.gserviceaccount.com?uid=123456789012345678901`.
If the service account is undeleted, this value reverts to
`serviceAccount:{emailid}` and the undeleted service account retains the
role in the binding.
* `deleted:group:{emailid}?uid={uniqueid}`: An email address (plus unique
identifier) representing a Google group that has been recently
deleted. For example, `admins@example.com?uid=123456789012345678901`. If
the group is recovered, this value reverts to `group:{emailid}` and the
recovered group retains the role in the binding.
* `domain:{domain}`: The G Suite domain (primary) that represents all the
users of that domain. For example, `google.com` or `example.com`.
* `role` (*type:* `String.t`, *default:* `nil`) - Role that is assigned to `members`.
For example, `roles/viewer`, `roles/editor`, or `roles/owner`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:condition => GoogleApi.BigQuery.V2.Model.Expr.t(),
:members => list(String.t()),
:role => String.t()
}
field(:condition, as: GoogleApi.BigQuery.V2.Model.Expr)
field(:members, type: :list)
field(:role)
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.Binding do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.Binding.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.Binding do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.481481 | 136 | 0.699554 |
ff732037fbd9b9b457efa4f97dbb2b52c31464a0 | 2,670 | exs | Elixir | test/acknowledger_test.exs | DivvyPayHQ/broadway_kafka | 7f14cf5f29dd999f2a17cefe5d0ecccca28473d6 | [
"Apache-2.0"
] | null | null | null | test/acknowledger_test.exs | DivvyPayHQ/broadway_kafka | 7f14cf5f29dd999f2a17cefe5d0ecccca28473d6 | [
"Apache-2.0"
] | null | null | null | test/acknowledger_test.exs | DivvyPayHQ/broadway_kafka | 7f14cf5f29dd999f2a17cefe5d0ecccca28473d6 | [
"Apache-2.0"
] | null | null | null | defmodule BroadwayKafka.AcknowledgerTest do
use ExUnit.Case, async: true
alias BroadwayKafka.Acknowledger, as: Ack
@foo {1, "foo", 1}
@bar {1, "bar", 2}
@ack Ack.add(Ack.new(), [{1, "foo", 1, 10}, {1, "bar", 2, 0}])
test "new" do
assert Ack.new() == %{}
end
test "add" do
assert @ack == %{
@foo => {9, 10, []},
@bar => {-1, 0, []}
}
end
test "keys" do
assert Ack.keys(@ack) |> Enum.sort() == [@bar, @foo]
end
test "last_offset" do
assert Ack.last_offset(@ack, @foo) == 10
assert Ack.last_offset(@ack, @bar) == 0
end
test "update_current_offset" do
ack = Ack.update_last_offset(@ack, @foo, 20)
assert {true, 19, _} = Ack.update_current_offset(ack, @foo, Enum.to_list(10..19))
ack = Ack.update_last_offset(@ack, @foo, 20)
assert {false, 10, ack} = Ack.update_current_offset(ack, @foo, [10, 13, 14])
assert {true, 19, _} = Ack.update_current_offset(ack, @foo, [11, 12, 15, 16, 17, 18, 19])
ack = Ack.update_last_offset(@ack, @foo, 20)
assert {false, nil, ack} = Ack.update_current_offset(ack, @foo, [13, 14])
assert {false, nil, ack} = Ack.update_current_offset(ack, @foo, [11, 12, 15, 16, 17, 18, 19])
assert {true, 19, _} = Ack.update_current_offset(ack, @foo, [10])
ack = Ack.update_last_offset(@ack, @foo, 20)
assert {false, nil, ack} = Ack.update_current_offset(ack, @foo, [13, 14])
assert {false, 16, ack} = Ack.update_current_offset(ack, @foo, [10, 11, 12, 15, 16, 18, 19])
assert {true, 19, _} = Ack.update_current_offset(ack, @foo, [17])
end
test "all_drained?" do
ack = @ack
assert Ack.all_drained?(ack)
ack = Ack.update_last_offset(ack, @foo, 100)
refute Ack.all_drained?(ack)
assert {false, 49, ack} = Ack.update_current_offset(ack, @foo, Enum.to_list(10..49))
refute Ack.all_drained?(ack)
assert {true, 99, ack} = Ack.update_current_offset(ack, @foo, Enum.to_list(50..99))
assert Ack.all_drained?(ack)
end
# Some poor man's property based testing.
describe "property based testing" do
# We generate a list from 10..99 and we break it into 1..9 random parts.
test "drained?" do
ack = Ack.update_last_offset(@ack, @foo, 100)
for n_parts <- 1..9 do
groups = Enum.group_by(10..99, fn _ -> :rand.uniform(n_parts) end)
offsets = Map.values(groups)
{drained?, _, ack} =
Enum.reduce(offsets, {false, :unused, ack}, fn offset, {false, _, ack} ->
Ack.update_current_offset(ack, @foo, Enum.sort(offset))
end)
assert drained?
assert Ack.all_drained?(ack)
end
end
end
end
| 31.785714 | 97 | 0.607116 |
ff7327fc0bd7fbe379039f4684901fb9f34b4080 | 1,619 | ex | Elixir | clients/vault/lib/google_api/vault/v1/model/mail_export_options.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/vault/lib/google_api/vault/v1/model/mail_export_options.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/vault/lib/google_api/vault/v1/model/mail_export_options.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Vault.V1.Model.MailExportOptions do
@moduledoc """
The options for mail export.
## Attributes
* `exportFormat` (*type:* `String.t`, *default:* `nil`) - The export file format.
* `showConfidentialModeContent` (*type:* `boolean()`, *default:* `nil`) - Set to true to export confidential mode content.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:exportFormat => String.t() | nil,
:showConfidentialModeContent => boolean() | nil
}
field(:exportFormat)
field(:showConfidentialModeContent)
end
defimpl Poison.Decoder, for: GoogleApi.Vault.V1.Model.MailExportOptions do
def decode(value, options) do
GoogleApi.Vault.V1.Model.MailExportOptions.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vault.V1.Model.MailExportOptions do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.38 | 126 | 0.729463 |
ff733ca5af89822814efcc522b1ac63dba3b2df5 | 9,642 | exs | Elixir | test/integration/pdf_generation_test.exs | williamthome/chromic_pdf | e53a32e576e71279ffd8a4f0643ae7960e9d5479 | [
"Apache-2.0"
] | null | null | null | test/integration/pdf_generation_test.exs | williamthome/chromic_pdf | e53a32e576e71279ffd8a4f0643ae7960e9d5479 | [
"Apache-2.0"
] | null | null | null | test/integration/pdf_generation_test.exs | williamthome/chromic_pdf | e53a32e576e71279ffd8a4f0643ae7960e9d5479 | [
"Apache-2.0"
] | null | null | null | defmodule ChromicPDF.PDFGenerationTest do
use ExUnit.Case, async: false
import ExUnit.CaptureLog
import ChromicPDF.Utils, only: [system_cmd!: 2]
alias ChromicPDF.TestServer
@test_html Path.expand("../fixtures/test.html", __ENV__.file)
@test_dynamic_html Path.expand("../fixtures/test_dynamic.html", __ENV__.file)
@test_image Path.expand("../fixtures/image_with_text.svg", __ENV__.file)
@output Path.expand("../test.pdf", __ENV__.file)
setup context do
if {:disable_logger, true} in context do
Logger.remove_backend(:console)
on_exit(fn -> Logger.add_backend(:console) end)
end
:ok
end
defp print_to_pdf(cb) do
print_to_pdf({:url, "file://#{@test_html}"}, [], cb)
end
defp print_to_pdf(params, cb) when is_list(params) do
print_to_pdf({:url, "file://#{@test_html}"}, params, cb)
end
defp print_to_pdf(input, cb) do
print_to_pdf(input, [], cb)
end
defp print_to_pdf(input, pdf_params, cb) do
assert ChromicPDF.print_to_pdf(input, Keyword.put(pdf_params, :output, @output)) == :ok
assert File.exists?(@output)
text = system_cmd!("pdftotext", [@output, "-"])
cb.(text)
after
File.rm_rf!(@output)
end
describe "PDF printing" do
setup do
start_supervised!(ChromicPDF)
:ok
end
@tag :pdftotext
test "it prints PDF from file:// URLs" do
print_to_pdf(fn text ->
assert String.contains?(text, "Hello ChromicPDF!")
end)
end
@tag :pdftotext
test "it prints PDF from files (expanding to file://) URLs" do
print_to_pdf({:url, @test_html}, fn text ->
assert String.contains?(text, "Hello ChromicPDF!")
end)
end
@tag :pdftotext
test "it prints PDF from HTML content" do
print_to_pdf({:html, File.read!(@test_html)}, fn text ->
assert String.contains?(text, "Hello ChromicPDF!")
end)
end
@tag :pdftotext
test "it waits for external resources when printing HTML content" do
html = ~s(<img src="file://#{@test_image}" />)
print_to_pdf({:html, html}, fn text ->
assert String.contains?(text, "some text from an external svg")
end)
end
test "it prints PDFs from https:// URLs by default" do
print_to_pdf({:url, "https://example.net"}, fn text ->
assert String.contains?(text, "Example Domain")
end)
end
@tag :pdftotext
test "it allows to pass thru options to printToPDF" do
pdf_params = %{
displayHeaderFooter: true,
marginTop: 3,
marginBottom: 3,
headerTemplate: ~S(<span style="font-size: 40px">Header</span>),
footerTemplate: ~S(<span style="font-size: 40px">Footer</span>)
}
print_to_pdf([print_to_pdf: pdf_params], fn text ->
assert String.contains?(text, "Header")
assert String.contains?(text, "Footer")
end)
end
@tag :pdftotext
test "it can deal with {:safe, iolist()} tuples" do
print_to_pdf({:html, {:safe, [File.read!(@test_html)]}}, fn text ->
assert String.contains?(text, "Hello ChromicPDF!")
end)
end
@tag :pdftotext
test "it accepts iolists in source and header/footer options" do
pdf_params = %{
displayHeaderFooter: true,
marginTop: 3,
marginBottom: 3,
headerTemplate: [~S(<span style="font-size: 40px">), ["Header", "</span>"]]
}
print_to_pdf({:html, ["foo", ["bar"]]}, [print_to_pdf: pdf_params], fn text ->
assert String.contains?(text, "Header")
assert String.contains?(text, "foobar")
end)
end
test "it can return the Base64 encoded PDF" do
assert {:ok, blob} = ChromicPDF.print_to_pdf({:url, "file://#{@test_html}"})
assert blob =~ ~r<^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$>
end
test "it can yield a temporary file to a callback" do
result =
ChromicPDF.print_to_pdf({:url, "file://#{@test_html}"},
output: fn path ->
assert File.exists?(path)
send(self(), path)
:some_result
end
)
assert result == {:ok, :some_result}
receive do
path -> refute File.exists?(path)
end
end
@script """
document.querySelector('h1').innerHTML = 'hello from script';
"""
@tag :pdftotext
test "it can evaluate scripts when printing from :url" do
params = [evaluate: %{expression: @script}]
print_to_pdf({:url, "file://#{@test_html}"}, params, fn text ->
assert String.contains?(text, "hello from script")
end)
end
@tag :pdftotext
test "it can evaluate scripts when printing from `:html`" do
params = [evaluate: %{expression: @script}]
print_to_pdf({:html, File.read!(@test_html)}, params, fn text ->
assert String.contains?(text, "hello from script")
end)
end
@tag :pdftotext
test "it raises nicely formatted errors for script exceptions" do
params = [
evaluate: %{
expression: """
function foo() {
throw new Error("boom");
}
foo();
"""
}
]
expected_msg = """
Exception in :evaluate expression
Exception:
Error: boom
at foo (<anonymous>:2:9)
at <anonymous>:4:1
Evaluated expression:
function foo() {
!!! throw new Error(\"boom\");
}
foo();
"""
assert_raise ChromicPDF.ChromeError, expected_msg, fn ->
ChromicPDF.print_to_pdf({:url, "https://example.net"}, params)
end
end
@tag :pdftotext
test "it waits until defined selectors have given attribute when printing from `:url`" do
params = [
wait_for: %{selector: "#print-ready", attribute: "ready-to-print"}
]
print_to_pdf({:url, "file://#{@test_dynamic_html}"}, params, fn text ->
assert String.contains?(text, "Dynamic content from Javascript")
end)
end
@tag :pdftotext
test "it waits until defined selectors have given attribute when printing from `:html`" do
params = [
wait_for: %{selector: "#print-ready", attribute: "ready-to-print"}
]
print_to_pdf({:html, File.read!(@test_dynamic_html)}, params, fn text ->
assert String.contains?(text, "Dynamic content from Javascript")
end)
end
end
describe "offline mode" do
setup do
start_supervised!({ChromicPDF, offline: true})
:ok
end
@tag :pdftotext
test "it does not print PDFs from https:// URLs when given the offline: true parameter" do
msg_re = ~r/net::ERR_INTERNET_DISCONNECTED.*You are trying/s
assert_raise ChromicPDF.ChromeError, msg_re, fn ->
ChromicPDF.print_to_pdf({:url, "https://example.net"})
end
end
end
describe "a cookie can be set when printing" do
@cookie %{
name: "foo",
value: "bar",
domain: "localhost"
}
setup do
start_supervised!({ChromicPDF, offline: false})
start_supervised!(TestServer.cowboy(:http))
%{port: TestServer.port(:http)}
end
test "cookies can be set thru print_to_pdf/2 and are cleared afterwards", %{port: port} do
input = {:url, "http://localhost:#{port}/cookie_echo"}
print_to_pdf(input, [set_cookie: @cookie], fn text ->
assert text =~ ~s(%{"foo" => "bar"})
end)
print_to_pdf(input, [], fn text ->
assert text =~ "%{}"
end)
end
end
describe "certificate error handling" do
setup do
start_supervised!(ChromicPDF)
start_supervised!(TestServer.cowboy(:https))
%{port: TestServer.port(:https)}
end
@tag :pdftotext
@tag :disable_logger
test "it fails on self-signed certificates with a nice error message", %{port: port} do
msg_re = ~r/net::ERR_CERT_AUTHORITY_INVALID.*You are trying/s
assert_raise ChromicPDF.ChromeError, msg_re, fn ->
ChromicPDF.print_to_pdf({:url, "https://localhost:#{port}/hello"})
end
end
end
describe ":ignore_certificate_errors option" do
setup do
start_supervised!({ChromicPDF, ignore_certificate_errors: true})
start_supervised!(TestServer.cowboy(:https))
%{port: TestServer.port(:https)}
end
@tag :pdftotext
@tag :disable_logger
test "allows to bypass Chrome's certificate verification", %{port: port} do
print_to_pdf({:url, "https://localhost:#{port}/hello"}, fn text ->
assert String.contains?(text, "Hello from TestServer")
end)
end
end
describe "session pool timeout" do
setup do
start_supervised!({ChromicPDF, session_pool: [timeout: 1]})
:ok
end
test "can be configured and generates a nice error messages" do
assert_raise RuntimeError, ~r/Timeout in Channel.run_protocol/, fn ->
print_to_pdf(fn _output -> :ok end)
end
end
end
describe "error handling of crashing targets" do
setup do
start_supervised!({ChromicPDF, session_pool: [timeout: 300]})
:ok
end
test "it logs an error on Inspector.targetCrashed before it times out" do
params = [
print_to_pdf: %{
displayHeaderFooter: true,
headerTemplate: ~s(<link rel="stylesheet" href="http://example.net/css" />)
}
]
assert capture_log(fn ->
assert_raise RuntimeError, fn ->
ChromicPDF.print_to_pdf({:html, ""}, params)
end
end) =~ "received an 'Inspector.targetCrashed' message"
end
end
end
| 28.27566 | 94 | 0.607032 |
ff733f4d6b5950a291d15a6477f61e870aabaa1d | 1,509 | ex | Elixir | lib/scentenced_web/router.ex | cNille/scentenced | ca6cc97bec1122047be928bb62b5ab0021cb88e8 | [
"MIT"
] | null | null | null | lib/scentenced_web/router.ex | cNille/scentenced | ca6cc97bec1122047be928bb62b5ab0021cb88e8 | [
"MIT"
] | 1 | 2020-07-17T08:20:59.000Z | 2020-07-17T08:20:59.000Z | lib/scentenced_web/router.ex | cNille/scentenced | ca6cc97bec1122047be928bb62b5ab0021cb88e8 | [
"MIT"
] | null | null | null | defmodule ScentencedWeb.Router do
use ScentencedWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
# If users maybe are logged in
pipeline :auth do
plug(Scentenced.Auth.AuthAccessPipeline)
end
# We use ensure_auth to fail if there is no one logged in
pipeline :ensure_auth do
plug Guardian.Plug.EnsureAuthenticated
end
scope "/", ScentencedWeb do
pipe_through :browser
get "/", PageController, :index
resources "/users", UserController, only: [:new, :create]
resources "/sessions", SessionController, only: [:new, :create]
end
scope "/", ScentencedWeb do
pipe_through [:browser, :auth]
resources "/fragrant", FragrantController, only: [:index, :show]
resources "/sessions", SessionController, only: [:delete]
resources "/perfume", PerfumeController, only: [:index, :show]
resources "/users", UserController, only: [:index, :show]
end
scope "/", ScentencedWeb do
pipe_through [:browser, :auth, :ensure_auth]
resources "/fragrant", FragrantController
resources "/sessions", SessionController, only: [:delete]
resources "/perfume", PerfumeController
resources "/users", UserController, only: [:index, :show]
end
# Other scopes may use custom stacks.
# scope "/api", ScentencedWeb do
# pipe_through :api
# end
end
| 25.576271 | 68 | 0.691186 |
ff734637d4a07e78f4b753130ce8543ad8120abd | 3,001 | ex | Elixir | mix/support/normalize/normalize_territory_info.ex | szTheory/cldr | 30e67d2e5ff13a61c29586a7cfad79995b070e1a | [
"Apache-2.0"
] | null | null | null | mix/support/normalize/normalize_territory_info.ex | szTheory/cldr | 30e67d2e5ff13a61c29586a7cfad79995b070e1a | [
"Apache-2.0"
] | null | null | null | mix/support/normalize/normalize_territory_info.ex | szTheory/cldr | 30e67d2e5ff13a61c29586a7cfad79995b070e1a | [
"Apache-2.0"
] | null | null | null | defmodule Cldr.Normalize.Territories do
@moduledoc false
alias Cldr.Locale
def normalize(content) do
content
|> normalize_territory_info
end
def normalize_territory_info(content) do
content
|> Cldr.Map.remove_leading_underscores()
|> Cldr.Map.underscore_keys()
|> Cldr.Map.integerize_values()
|> Cldr.Map.floatize_values()
|> Enum.map(&normalize_territory_code/1)
|> Enum.map(&normalize_language_codes/1)
|> Enum.into(%{})
|> add_currency_for_territories
|> add_measurement_system
end
@key "language_population"
def normalize_language_codes({k, v}) do
if language_population = Map.get(v, @key) do
language_population =
language_population
|> Enum.map(fn {k1, v1} -> {Locale.normalize_locale_name(k1), v1} end)
|> Enum.into(%{})
{k, Map.put(v, @key, language_population)}
else
{k, v}
end
end
def add_currency_for_territories(territories) do
currencies = Cldr.Normalize.Currency.get_currency_data()["region"]
territories
|> Enum.map(fn {territory, map} ->
{territory, Map.put(map, "currency", Map.get(currencies, territory))}
end)
|> Enum.into(%{})
end
def add_measurement_system(territories) do
systems = get_measurement_data()
territories
|> Enum.map(fn {territory, map} ->
territory = String.to_atom(territory)
measurement_system =
%{}
|> Map.put(
:default,
(get_in(systems, [:measurement_system, territory]) ||
get_in(systems, [:measurement_system, :"001"]))
|> Cldr.Consolidate.canonicalize_measurement_system()
)
|> Map.put(
:paper_size,
(get_in(systems, [:paper_size, territory]) || get_in(systems, [:paper_size, :"001"]))
|> Cldr.Consolidate.canonicalize_measurement_system()
)
|> Map.put(
:temperature,
(get_in(systems, [:measurement_system_category_temperature, territory]) ||
get_in(systems, [:measurement_system, territory]) ||
get_in(systems, [:measurement_system, :"001"]))
|> Cldr.Consolidate.canonicalize_measurement_system()
)
{territory, Map.put(map, :measurement_system, measurement_system)}
end)
|> Map.new()
end
@measurement_path Path.join(Cldr.Config.download_data_dir(), [
"cldr-core",
"/supplemental",
"/measurementData.json"
])
def get_measurement_data do
@measurement_path
|> File.read!()
|> Jason.decode!()
|> get_in(["supplemental", "measurementData"])
|> Enum.map(fn {k, v} -> {Cldr.String.to_underscore(Cldr.String.underscore(k)), v} end)
|> Enum.into(%{})
|> Cldr.Map.atomize_keys()
end
defp normalize_territory_code({code, rest}) do
{normalize_territory_code(code), rest}
end
defp normalize_territory_code(code) do
String.upcase(code)
end
end
| 28.580952 | 95 | 0.622459 |
ff734a7abc45f31edf0cf80cdcb17aa07175721f | 195 | exs | Elixir | test/test_helper.exs | mapmeld/superfund-me | 8bd1aeb78504e6ae068cf57dbefca05bebbb2b07 | [
"MIT"
] | null | null | null | test/test_helper.exs | mapmeld/superfund-me | 8bd1aeb78504e6ae068cf57dbefca05bebbb2b07 | [
"MIT"
] | 3 | 2016-09-09T21:09:17.000Z | 2017-09-13T17:55:20.000Z | test/test_helper.exs | Georeactor/superfund-me | 8bd1aeb78504e6ae068cf57dbefca05bebbb2b07 | [
"MIT"
] | null | null | null | ExUnit.start
Mix.Task.run "ecto.create", ~w(-r Superfundme.Repo --quiet)
Mix.Task.run "ecto.migrate", ~w(-r Superfundme.Repo --quiet)
Ecto.Adapters.SQL.begin_test_transaction(Superfundme.Repo)
| 27.857143 | 60 | 0.758974 |
ff739796e96d26ecb82a13c9d8161faa143c60d3 | 1,955 | ex | Elixir | lib/event_store/adapters/postgres.ex | YodelTalk/event_store | d8f07939d5112754072aeeea8a14cd694b978adf | [
"MIT"
] | null | null | null | lib/event_store/adapters/postgres.ex | YodelTalk/event_store | d8f07939d5112754072aeeea8a14cd694b978adf | [
"MIT"
] | null | null | null | lib/event_store/adapters/postgres.ex | YodelTalk/event_store | d8f07939d5112754072aeeea8a14cd694b978adf | [
"MIT"
] | null | null | null | defmodule EventStore.Adapters.Postgres do
@behaviour EventStore.Adapter
import Ecto.Query
alias EventStore.Event
defmodule Repo do
use Ecto.Repo, otp_app: :event_store, adapter: Ecto.Adapters.Postgres
end
@impl true
def insert(changeset) do
changeset
|> Ecto.Changeset.apply_changes()
|> insert!()
end
defp insert!(event) do
Repo.insert_all(
Event,
[
[
# TODO: Generate the keyword list from the changeset.
name: event.name,
version: event.version,
aggregate_id: event.aggregate_id,
aggregate_version: next_aggregate_version(event),
payload: event.payload,
inserted_at: event.inserted_at
]
],
returning: [:id, :aggregate_version],
on_conflict: :nothing
)
|> case do
{1, [%{id: id, aggregate_version: aggregate_version} | _]} ->
{:ok, %{event | id: id, aggregate_version: aggregate_version}}
{0, []} ->
insert!(event)
end
end
defp next_aggregate_version(%{aggregate_id: aggregate_id} = _event) do
from(e in Event,
where: e.aggregate_id == ^aggregate_id,
select: %{aggregate_version: coalesce(max(e.aggregate_version) + 1, 1)}
)
end
@impl true
def stream(aggregate_id, timestamp) when is_binary(aggregate_id) do
from(e in Event,
where: e.aggregate_id == ^aggregate_id and e.inserted_at > ^timestamp,
order_by: :inserted_at
)
|> Repo.all()
end
def stream(event, timestamp) when is_atom(event) do
name = EventStore.to_name(event)
from(e in Event,
where: e.name == ^name and e.inserted_at > ^timestamp,
order_by: :inserted_at
)
|> Repo.all()
end
@impl true
def exists?(aggregate_id, event) when is_atom(event) do
name = EventStore.to_name(event)
from(e in Event, where: e.aggregate_id == ^aggregate_id and e.name == ^name)
|> Repo.exists?()
end
end
| 25.064103 | 80 | 0.63376 |
ff73cbbd2cb3c7f972f41bd2dcc56d5cd5d2c87f | 820 | ex | Elixir | fade/lib/snapshot/broker_connectivity_lens.ex | ahives/Fade | 7094b6703933e41a1400b1053764335e32928b0a | [
"Apache-2.0"
] | null | null | null | fade/lib/snapshot/broker_connectivity_lens.ex | ahives/Fade | 7094b6703933e41a1400b1053764335e32928b0a | [
"Apache-2.0"
] | null | null | null | fade/lib/snapshot/broker_connectivity_lens.ex | ahives/Fade | 7094b6703933e41a1400b1053764335e32928b0a | [
"Apache-2.0"
] | null | null | null | defmodule Fade.Snapshot.BrokerConnectivityLens do
alias Fade.Broker.{Channel, Connection, SystemOverview}
alias Fade.Snapshot.Lens
alias Fade.Snapshot.Mapper.BrokerConnectivityMapper, as: DataMapper
alias Fade.Snapshot.SnapshotResult
alias UUID
@behaviour Lens
@impl Lens
def take_snapshot(config) do
system_overview_result =
Task.async(fn -> config |> SystemOverview.get() end)
|> Task.await()
connection_result =
Task.async(fn -> config |> Connection.get_all() end)
|> Task.await()
channel_result =
Task.async(fn -> config |> Channel.get_all() end)
|> Task.await()
identifier = UUID.uuid1()
DataMapper.map_data(system_overview_result.data, connection_result.data, channel_result.data)
|> SnapshotResult.map_result(identifier)
end
end
| 27.333333 | 97 | 0.714634 |
ff74023b4f6c66aa6977d2f852499a8fc489fa93 | 2,610 | ex | Elixir | clients/storage/lib/google_api/storage/v1/model/policy.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/storage/lib/google_api/storage/v1/model/policy.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/storage/lib/google_api/storage/v1/model/policy.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Storage.V1.Model.Policy do
@moduledoc """
A bucket/object IAM policy.
## Attributes
* `bindings` (*type:* `list(GoogleApi.Storage.V1.Model.PolicyBindings.t)`, *default:* `nil`) - An association between a role, which comes with a set of permissions, and members who may assume that role.
* `etag` (*type:* `String.t`, *default:* `nil`) - HTTP 1.1 Entity tag for the policy.
* `kind` (*type:* `String.t`, *default:* `storage#policy`) - The kind of item this is. For policies, this is always storage#policy. This field is ignored on input.
* `resourceId` (*type:* `String.t`, *default:* `nil`) - The ID of the resource to which this policy belongs. Will be of the form projects/_/buckets/bucket for buckets, and projects/_/buckets/bucket/objects/object for objects. A specific generation may be specified by appending #generationNumber to the end of the object name, e.g. projects/_/buckets/my-bucket/objects/data.txt#17. The current generation can be denoted with #0. This field is ignored on input.
* `version` (*type:* `integer()`, *default:* `nil`) - The IAM policy format version.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:bindings => list(GoogleApi.Storage.V1.Model.PolicyBindings.t()) | nil,
:etag => String.t() | nil,
:kind => String.t() | nil,
:resourceId => String.t() | nil,
:version => integer() | nil
}
field(:bindings, as: GoogleApi.Storage.V1.Model.PolicyBindings, type: :list)
field(:etag)
field(:kind)
field(:resourceId)
field(:version)
end
defimpl Poison.Decoder, for: GoogleApi.Storage.V1.Model.Policy do
def decode(value, options) do
GoogleApi.Storage.V1.Model.Policy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Storage.V1.Model.Policy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.237288 | 464 | 0.706513 |
ff743ba282097038d073cd68fe0422f371fce651 | 472 | exs | Elixir | config/prod.secret.exs | c18t/elixir-phx-websocket-chat | 4dc921599b263a856a23d04200631bfee83e3fc3 | [
"WTFPL"
] | null | null | null | config/prod.secret.exs | c18t/elixir-phx-websocket-chat | 4dc921599b263a856a23d04200631bfee83e3fc3 | [
"WTFPL"
] | null | null | null | config/prod.secret.exs | c18t/elixir-phx-websocket-chat | 4dc921599b263a856a23d04200631bfee83e3fc3 | [
"WTFPL"
] | 1 | 2020-11-17T08:23:18.000Z | 2020-11-17T08:23:18.000Z | use Mix.Config
# In this file, we keep production configuration that
# you'll likely want to automate and keep away from
# your version control system.
#
# You should document the content of this
# file or create a script for recreating it, since it's
# kept out of version control and might be hard to recover
# or recreate for your teammates (or yourself later on).
config :websocket_chat, WebsocketChatWeb.Endpoint,
secret_key_base: System.get_env "SECRET_KEY_BASE"
| 36.307692 | 58 | 0.783898 |
ff7492421a88bc5f5bd8f90dd2c8b6e590ada390 | 2,933 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3beta1_webhook_request_intent_info.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3beta1_webhook_request_intent_info.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3beta1_webhook_request_intent_info.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1WebhookRequestIntentInfo do
@moduledoc """
Represents intent information communicated to the webhook.
## Attributes
* `confidence` (*type:* `number()`, *default:* `nil`) - The confidence of the matched intent. Values range from 0.0 (completely uncertain) to 1.0 (completely certain).
* `displayName` (*type:* `String.t`, *default:* `nil`) - Always present. The display name of the last matched intent.
* `lastMatchedIntent` (*type:* `String.t`, *default:* `nil`) - Always present. The unique identifier of the last matched intent. Format: `projects//locations//agents//intents/`.
* `parameters` (*type:* `%{optional(String.t) => GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1WebhookRequestIntentInfoIntentParameterValue.t}`, *default:* `nil`) - Parameters identified as a result of intent matching. This is a map of the name of the identified parameter to the value of the parameter identified from the user's utterance. All parameters defined in the matched intent that are identified will be surfaced here.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:confidence => number(),
:displayName => String.t(),
:lastMatchedIntent => String.t(),
:parameters => %{
optional(String.t()) =>
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1WebhookRequestIntentInfoIntentParameterValue.t()
}
}
field(:confidence)
field(:displayName)
field(:lastMatchedIntent)
field(:parameters,
as:
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1WebhookRequestIntentInfoIntentParameterValue,
type: :map
)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1WebhookRequestIntentInfo do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1WebhookRequestIntentInfo.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1WebhookRequestIntentInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.507246 | 446 | 0.74088 |
ff74a113058426c9315a0f76d95743434167369c | 17,697 | ex | Elixir | lib/livebook_web/live/file_select_component.ex | Fudoshiki/livebook | 0b30fd02d9a50b84873725f3e05974d62fee398f | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/live/file_select_component.ex | Fudoshiki/livebook | 0b30fd02d9a50b84873725f3e05974d62fee398f | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/live/file_select_component.ex | Fudoshiki/livebook | 0b30fd02d9a50b84873725f3e05974d62fee398f | [
"Apache-2.0"
] | null | null | null | defmodule LivebookWeb.FileSelectComponent do
use LivebookWeb, :live_component
# The component expects:
#
# * `file` - the currently entered file
#
# * `running_files` - the list of notebook files that are already
# linked to running sessions
#
# * `extnames` - a list of file extensions that should be shown
#
# * `submit_event` - the process event sent on form submission,
# use `nil` for no action
#
# The parent live view receives a `{:set_file, file, %{exists: boolean()}}`
# message whenever the file changes.
#
# Optionally inner block may be passed (e.g. with action buttons)
# and it's rendered next to the text input.
#
# To force the component to refetch the displayed files you can
# `send_update` with `force_reload: true` to the component.
alias Livebook.FileSystem
@impl true
def mount(socket) do
{:ok,
socket
|> assign_new(:inner_block, fn -> nil end)
|> assign(
# Component default attribute values
inner_block: nil,
file_system_select_disabled: false,
submit_event: nil,
# State
current_dir: nil,
deleting_file: nil,
renaming_file: nil,
renamed_name: nil,
error_message: nil,
file_systems: Livebook.Config.file_systems()
)}
end
@impl true
def update(assigns, socket) do
{force_reload?, assigns} = Map.pop(assigns, :force_reload, false)
socket =
socket
|> assign(assigns)
|> update_file_infos(force_reload?)
{:ok, socket}
end
@impl true
def render(assigns) do
~H"""
<div class="h-full flex flex-col">
<div class="flex space-x-3 items-center mb-4">
<div class="flex-grow flex space-x-1">
<.file_system_menu_button
file={@file}
file_systems={@file_systems}
file_system_select_disabled={@file_system_select_disabled}
socket={@socket}
myself={@myself} />
<form class="flex-grow"
phx-change="set_path"
phx-submit={if @submit_event, do: "submit"}
onsubmit={unless @submit_event, do: "return false"}
phx-target={@myself}>
<input class="input"
id="input-path"
phx-hook="FocusOnUpdate"
type="text"
name="path"
placeholder="File"
value={@file.path}
spellcheck="false"
autocomplete="off" />
</form>
</div>
<div class="relative" id="path-selector-menu" phx-hook="Menu" data-element="menu">
<button class="icon-button" data-toggle tabindex="-1">
<.remix_icon icon="add-line" class="text-xl" />
</button>
<div class="menu" data-content>
<button class="menu__item text-gray-500" phx-click={js_show_new_dir_section()}>
<.remix_icon icon="folder-add-fill" class="text-gray-400" />
<span class="font-medium">New directory</span>
</button>
</div>
</div>
<%= if @inner_block do %>
<div>
<%= render_slot(@inner_block) %>
</div>
<% end %>
</div>
<div class="flex flex-col space-y-2">
<%= if @error_message do %>
<div class="error-box flex justify-between items-center">
<span><%= @error_message %></span>
<button phx-click="clear_error" phx-target={@myself}>
<.remix_icon icon="delete-bin-6-line" class="text-lg align-middle" />
</button>
</div>
<% end %>
<%= if @deleting_file do %>
<div class="mb-4 px-4 py-3 flex space-x-4 items-center border border-gray-200 rounded-lg">
<p class="flex-grow text-gray-700 text-sm">
Are you sure you want to irreversibly delete
<span class="font-semibold"><%= @deleting_file.path %></span>?
</p>
<div class="flex space-x-4">
<button class="text-red-600 font-medium text-sm whitespace-nowrap"
phx-click="do_delete_file"
phx-target={@myself}>
<.remix_icon icon="delete-bin-6-line" class="align-middle mr-1" />
Delete
</button>
<button class="text-gray-600 font-medium text-sm"
phx-click="cancel_delete_file"
phx-target={@myself}>
Cancel
</button>
</div>
</div>
<% end %>
</div>
<div class="flex-grow -m-1 p-1 overflow-y-auto tiny-scrollbar" tabindex="-1">
<div class="hidden grid grid-cols-2 sm:grid-cols-3 lg:grid-cols-4 gap-2 border-b border-dashed border-grey-200 mb-2 pb-2"
id="new_dir_section">
<div class="flex space-x-2 items-center p-2 rounded-lg">
<span class="block">
<.remix_icon icon="folder-add-fill" class="text-xl align-middle text-gray-400" />
</span>
<span class="flex font-medium text-gray-500">
<div
phx-window-keydown={js_hide_new_dir_section()}
phx-key="escape"
phx-target={@myself}>
<input
id="new_dir_input"
type="text"
spellcheck="false"
autocomplete="off"
phx-blur={js_hide_new_dir_section()}
phx-window-keydown={JS.push("create_dir", target: @myself) |> js_hide_new_dir_section()}
phx-key="enter" />
</div>
</span>
</div>
</div>
<%= if any_highlighted?(@file_infos) do %>
<div class="grid grid-cols-2 sm:grid-cols-3 lg:grid-cols-4 gap-2 border-b border-dashed border-grey-200 mb-2 pb-2">
<%= for file_info <- @file_infos, file_info.highlighted != "" do %>
<.file
file_info={file_info}
myself={@myself}
renaming_file={@renaming_file}
renamed_name={@renamed_name} />
<% end %>
</div>
<% end %>
<div class="grid grid-cols-2 sm:grid-cols-3 lg:grid-cols-4 gap-2">
<%= for file_info <- @file_infos, file_info.highlighted == "" do %>
<.file
file_info={file_info}
myself={@myself}
renaming_file={@renaming_file}
renamed_name={@renamed_name} />
<% end %>
</div>
</div>
</div>
"""
end
defp any_highlighted?(file_infos) do
Enum.any?(file_infos, &(&1.highlighted != ""))
end
defp file_system_menu_button(assigns) do
~H"""
<div class="relative" id="file-system-menu" phx-hook="Menu" data-element="menu">
<button type="button" class="button button-gray button-square-icon" data-toggle disabled={@file_system_select_disabled}>
<.file_system_icon file_system={@file.file_system} />
</button>
<div class="menu left" data-content>
<%= for {file_system, index} <- @file_systems |> Enum.with_index() do %>
<%= if file_system == @file.file_system do %>
<button class="menu__item text-gray-900">
<.file_system_icon file_system={file_system} />
<span class="font-medium"><%= file_system_label(file_system) %></span>
</button>
<% else %>
<button class="menu__item text-gray-500"
phx-target={@myself}
phx-click="set_file_system"
phx-value-index={index}>
<.file_system_icon file_system={file_system} />
<span class="font-medium"><%= file_system_label(file_system) %></span>
</button>
<% end %>
<% end %>
<%= live_patch to: Routes.settings_path(@socket, :page),
class: "menu__item text-gray-500 border-t border-gray-200" do %>
<.remix_icon icon="settings-3-line" />
<span class="font-medium">Configure</span>
<% end %>
</div>
</div>
"""
end
defp file(%{file_info: %{file: file}, renaming_file: file} = assigns) do
~H"""
<div class="flex space-x-2 items-center p-2 rounded-lg">
<span class="block">
<.remix_icon icon="edit-line" class="text-xl align-middle text-gray-400" />
</span>
<span class="flex font-medium text-gray-500">
<div
phx-window-keydown="cancel_rename_file"
phx-key="escape"
phx-target={@myself}>
<input class="w-full"
type="text"
value={@renamed_name}
autofocus
spellcheck="false"
autocomplete="off"
phx-blur="cancel_rename_file"
phx-window-keydown="do_rename_file"
phx-key="enter"
phx-target={@myself} />
</div>
</span>
</div>
"""
end
defp file(assigns) do
icon =
case assigns.file_info do
%{is_running: true} -> "play-circle-line"
%{is_dir: true} -> "folder-fill"
_ -> "file-code-line"
end
assigns = assign(assigns, :icon, icon)
~H"""
<div class="relative"
id={"file-menu-#{@file_info.file.path}"}
phx-hook="Menu"
data-primary="false"
data-element="menu">
<button class="w-full flex space-x-2 items-center p-2 rounded-lg hover:bg-gray-100 focus:ring-1 focus:ring-gray-400"
data-toggle
phx-click="set_path"
phx-value-path={@file_info.file.path}
phx-target={@myself}>
<span class="block">
<.remix_icon icon={@icon} class={"text-xl align-middle #{if(@file_info.is_running, do: "text-green-300", else: "text-gray-400")}"} />
</span>
<span class={"flex font-medium overflow-hidden whitespace-nowrap #{if(@file_info.is_running, do: "text-green-300", else: "text-gray-500")}"}>
<%= if @file_info.highlighted != "" do %>
<span class={"font-medium #{if(@file_info.is_running, do: "text-green-400", else: "text-gray-900")}"}>
<%= @file_info.highlighted %>
</span>
<% end %>
<span class="overflow-hidden overflow-ellipsis">
<%= @file_info.unhighlighted %>
</span>
</span>
</button>
<div class="menu" data-content>
<%= if @file_info.editable do %>
<button class="menu__item text-gray-500"
phx-click="rename_file"
phx-target={@myself}
phx-value-path={@file_info.file.path}>
<.remix_icon icon="edit-line" />
<span class="font-medium">Rename</span>
</button>
<button class="menu__item text-red-600"
phx-click="delete_file"
phx-target={@myself}
phx-value-path={@file_info.file.path}>
<.remix_icon icon="delete-bin-6-line" />
<span class="font-medium">Delete</span>
</button>
<% end %>
</div>
</div>
"""
end
defp js_show_new_dir_section(js \\ %JS{}) do
js
|> JS.show(to: "#new_dir_section")
|> JS.dispatch("lb:set_value", to: "#new_dir_input", detail: %{value: ""})
|> JS.dispatch("lb:focus", to: "#new_dir_input")
end
defp js_hide_new_dir_section(js \\ %JS{}) do
js
|> JS.hide(to: "#new_dir_section")
end
@impl true
def handle_event("set_file_system", %{"index" => index}, socket) do
index = String.to_integer(index)
file_system = Enum.at(socket.assigns.file_systems, index)
file = FileSystem.File.new(file_system)
send(self(), {:set_file, file, %{exists: true}})
{:noreply, socket}
end
def handle_event("set_path", %{"path" => path}, socket) do
file = FileSystem.File.new(socket.assigns.file.file_system) |> FileSystem.File.resolve(path)
info =
socket.assigns.file_infos
|> Enum.find(&(&1.file.path == path))
|> case do
nil -> %{exists: false}
_info -> %{exists: true}
end
send(self(), {:set_file, file, info})
{:noreply, socket}
end
def handle_event("submit", %{}, socket) do
if submit_event = socket.assigns.submit_event do
send(self(), submit_event)
end
{:noreply, socket}
end
def handle_event("clear_error", %{}, socket) do
{:noreply, put_error(socket, nil)}
end
def handle_event("create_dir", %{"value" => name}, socket) do
socket =
case create_dir(socket.assigns.current_dir, name) do
:ok ->
socket
|> update_file_infos(true)
{:error, error} ->
socket
|> put_error(error)
end
{:noreply, socket}
end
def handle_event("delete_file", %{"path" => path}, socket) do
%{file: file} = Enum.find(socket.assigns.file_infos, &(&1.file.path == path))
{:noreply, assign(socket, deleting_file: file)}
end
def handle_event("cancel_delete_file", %{}, socket) do
{:noreply, assign(socket, deleting_file: nil)}
end
def handle_event("do_delete_file", %{}, socket) do
socket =
case delete_file(socket.assigns.deleting_file) do
:ok ->
socket
|> assign(deleting_file: nil)
|> update_file_infos(true)
{:error, error} ->
put_error(socket, error)
end
{:noreply, socket}
end
def handle_event("rename_file", %{"path" => path}, socket) do
file_info = Enum.find(socket.assigns.file_infos, &(&1.file.path == path))
{:noreply, assign(socket, renaming_file: file_info.file, renamed_name: file_info.name)}
end
def handle_event("cancel_rename_file", %{}, socket) do
{:noreply, assign(socket, renaming_file: nil)}
end
def handle_event("do_rename_file", %{"value" => name}, socket) do
socket =
if renaming_file = socket.assigns.renaming_file do
case rename_file(renaming_file, name) do
:ok ->
socket
|> assign(renaming_file: nil)
|> update_file_infos(true)
{:error, error} ->
socket
|> assign(renamed_name: name)
|> put_error(error)
end
else
socket
end
{:noreply, socket}
end
defp update_file_infos(%{assigns: assigns} = socket, force_reload?) do
current_file_infos = assigns[:file_infos] || []
{dir, prefix} = dir_and_prefix(assigns.file)
{file_infos, socket} =
if dir != assigns.current_dir or force_reload? do
case get_file_infos(dir, assigns.extnames, assigns.running_files) do
{:ok, file_infos} ->
{file_infos, assign(socket, :current_dir, dir)}
{:error, error} ->
{current_file_infos, put_error(socket, error)}
end
else
{current_file_infos, socket}
end
assign(socket, :file_infos, annotate_matching(file_infos, prefix))
end
defp annotate_matching(file_infos, prefix) do
for %{name: name} = info <- file_infos do
if String.starts_with?(name, prefix) do
%{info | highlighted: prefix, unhighlighted: String.replace_prefix(name, prefix, "")}
else
%{info | highlighted: "", unhighlighted: name}
end
end
end
# Phrase after the last slash is used as a search prefix within
# the given directory.
#
# Given "/foo/bar", we use "bar" to filter files within "/foo/".
# Given "/foo/bar/", we use "" to filter files within "/foo/bar/".
defp dir_and_prefix(file) do
if FileSystem.File.dir?(file) do
{file, ""}
else
{FileSystem.File.containing_dir(file), FileSystem.File.name(file)}
end
end
defp get_file_infos(dir, extnames, running_files) do
with {:ok, files} <- FileSystem.File.list(dir) do
file_infos =
files
|> Enum.map(fn file ->
name = FileSystem.File.name(file)
file_info(file, name, running_files)
end)
|> Enum.filter(fn info ->
not hidden?(info.name) and (info.is_dir or valid_extension?(info.name, extnames))
end)
|> Kernel.++(
case FileSystem.File.containing_dir(dir) do
^dir -> []
parent -> [file_info(parent, "..", running_files, editable: false)]
end
)
|> Enum.sort_by(fn file -> {!file.is_dir, file.name} end)
{:ok, file_infos}
end
end
defp file_info(file, name, running_files, opts \\ []) do
%{
name: name,
highlighted: "",
unhighlighted: name,
file: file,
is_dir: FileSystem.File.dir?(file),
is_running: file in running_files,
editable: Keyword.get(opts, :editable, true)
}
end
defp hidden?(filename) do
String.starts_with?(filename, ".")
end
defp valid_extension?(filename, extnames) do
Path.extname(filename) in extnames
end
defp put_error(socket, nil) do
assign(socket, :error_message, nil)
end
defp put_error(socket, :ignore) do
socket
end
defp put_error(socket, message) when is_binary(message) do
assign(socket, :error_message, Livebook.Utils.upcase_first(message))
end
defp create_dir(_parent_dir, ""), do: {:error, :ignore}
defp create_dir(parent_dir, name) do
new_dir = FileSystem.File.resolve(parent_dir, name <> "/")
FileSystem.File.create_dir(new_dir)
end
defp delete_file(file) do
FileSystem.File.remove(file)
end
defp rename_file(_file, ""), do: {:error, :ignore}
defp rename_file(file, name) do
parent_dir = FileSystem.File.containing_dir(file)
new_name = if FileSystem.File.dir?(file), do: name <> "/", else: name
new_file = FileSystem.File.resolve(parent_dir, new_name)
FileSystem.File.rename(file, new_file)
end
end
| 32.47156 | 149 | 0.572131 |
ff750db1427ad924363deb36a96927555585eeb1 | 1,330 | ex | Elixir | lib/nectar/endpoint.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | null | null | null | lib/nectar/endpoint.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | null | null | null | lib/nectar/endpoint.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | null | null | null | defmodule Nectar.Endpoint do
use Phoenix.Endpoint, otp_app: :nectar
socket "/socket", Nectar.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static.IndexHtml,
at: "/"
plug Plug.Static,
at: "/", from: {:nectar, "priv/cart-client/build"},
only: ~w(index.html favicon.ico static service-worker.js assets)
plug Plug.Static,
at: "/admin_assets", from: :nectar, gzip: false,
only: ~w(uploads css fonts images js tinymce favicon.ico robots.txt)
plug CORSPlug, origin: ["http://localhost:3000", "http://localhost:4000"]
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json, Absinthe.Plug.Parser],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session,
store: :cookie,
key: "_nectar_key",
signing_salt: "8dI5dqCI"
plug Nectar.Router
end
| 26.078431 | 75 | 0.696241 |
ff753975efb21c44654c4f4bbc10fdbb282d5837 | 3,527 | exs | Elixir | test/lesson_test.exs | stevegrossi/lessonly-elixir | abd767bc1171c6ac6f2de6086a5e42361e2421bb | [
"MIT"
] | 1 | 2016-09-06T20:04:38.000Z | 2016-09-06T20:04:38.000Z | test/lesson_test.exs | stevegrossi/lessonly-elixir | abd767bc1171c6ac6f2de6086a5e42361e2421bb | [
"MIT"
] | 3 | 2016-04-03T21:03:19.000Z | 2016-04-03T21:38:02.000Z | test/lesson_test.exs | stevegrossi/lessonly-elixir | abd767bc1171c6ac6f2de6086a5e42361e2421bb | [
"MIT"
] | null | null | null | defmodule LessonTest do
use Lessonly.ApiTest
test "all" do
use_cassette "lesson.all" do
response = Lessonly.Lesson.all(client)
assert response == [
%{"id" => 29414, "title" => "Test Lesson 1"},
%{"id" => 29415, "title" => "Test Lesson 2"}
]
end
end
test "find" do
use_cassette "lesson.find" do
response = client |> Lessonly.Lesson.find(29415)
assert response == %{
"assignees_count" => 0,
"completed_count" => 0,
"created_at" => "2016-04-03T12:27:02Z",
"description" => nil,
"id" => 29415,
"last_updated_at" => "2016-04-03T12:27:17Z",
"links" => %{
"shareable" => "http://test.lessonly.localhost:3000/lesson/29415-test-lesson-2"
},
"public" => false,
"resource_type" => "lesson",
"retake_score" => 0,
"tags" => [],
"title" => "Test Lesson 2",
"type" => "lesson"
}
end
end
test "update" do
use_cassette "lesson.update" do
response = client |> Lessonly.Lesson.update(29415, %{"title" => "Testy Lesson 2"})
assert response == %{
"assignees_count" => 1,
"completed_count" => 0,
"created_at" => "2016-04-03T12:27:02Z",
"description" => nil,
"id" => 29415,
"last_updated_at" => "2016-04-03T15:45:52Z",
"links" => %{
"shareable" => "http://test.lessonly.localhost:3000/lesson/29415-testy-lesson-2"
},
"public" => false,
"resource_type" => "lesson",
"retake_score" => 0,
"tags" => [],
"title" => "Testy Lesson 2",
"type" => "update_lesson"
}
end
end
test "assignments" do
use_cassette "lesson.assignments" do
response = client |> Lessonly.Lesson.assignments(29415)
assert response == %{
"assignments" => [
%{
"assignable_id" => 29415,
"assignable_type" => "Lesson",
"assignee_id" => 544547,
"completed_at" => nil,
"due_by" => "2020-12-31T00:00:00Z",
"ext_uid" => nil,
"id" => 941035,
"reassigned_at" => nil,
"resource_type" => "assignment",
"score" => 0,
"started_at" => nil,
"status" => "Incomplete",
"updated_at" => "2016-04-03T14:55:00Z"
}
],
"page" => 1,
"per_page" => 50,
"total_assignments" => 1,
"total_pages" => 1,
"type" => "lesson_assignments"
}
end
end
test "assign" do
use_cassette "lesson.assign" do
response = client |> Lessonly.Lesson.assign(
29415,
%{
"assignments" => [
%{
"assignee_id" => 544547,
"due_by" => "2020-06-01"
}
]
}
)
assert response == %{
"assignments" => [
%{
"assignable_id" => 29415,
"assignable_type" => "Lesson",
"assignee_id" => 544547,
"completed_at" => nil,
"due_by" => "2020-12-31T00:00:00Z",
"ext_uid" => nil,
"id" => 941035,
"reassigned_at" => nil,
"resource_type" => "assignment",
"score" => 0,
"started_at" => nil,
"status" => "Incomplete",
"updated_at" => "2016-04-03T14:55:00Z"
}
],
"type" => "update_lesson_assignments"
}
end
end
end
| 27.341085 | 90 | 0.474057 |
ff758f3bf53b52ace1e8df51dc18b4f824c95e45 | 2,200 | exs | Elixir | config/prod.exs | ajanes/booster-ui | 63a1cd32f873d11ff2448666692c2f637dfc466b | [
"CC-BY-3.0",
"BSD-3-Clause"
] | null | null | null | config/prod.exs | ajanes/booster-ui | 63a1cd32f873d11ff2448666692c2f637dfc466b | [
"CC-BY-3.0",
"BSD-3-Clause"
] | null | null | null | config/prod.exs | ajanes/booster-ui | 63a1cd32f873d11ff2448666692c2f637dfc466b | [
"CC-BY-3.0",
"BSD-3-Clause"
] | null | null | null | use Mix.Config
# For production, we often load configuration from external
# sources, such as your system environment. For this reason,
# you won't find the :http configuration below, but set inside
# VisualizerWeb.Endpoint.init/2 when load_from_system_env is
# true. Any dynamic configuration should be done there.
#
# Don't forget to configure the url host to something meaningful,
# Phoenix uses this information when generating URLs.
#
# Finally, we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the mix phx.digest task
# which you typically run after static files are built.
config :visualizer, VisualizerWeb.Endpoint,
load_from_system_env: true,
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :visualizer, VisualizerWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [:inet6,
# port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :visualizer, VisualizerWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :visualizer, VisualizerWeb.Endpoint, server: true
#
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 33.846154 | 67 | 0.725 |
ff75bf294cac90b9f7e380a0c0a84edc95fac2c1 | 3,875 | ex | Elixir | lib/twirp/client/http.ex | rahmatullah5/twirp-elixir | 9177cda4beb2fc89efb0a0c9239ff4d28a6a6ce6 | [
"Apache-2.0"
] | null | null | null | lib/twirp/client/http.ex | rahmatullah5/twirp-elixir | 9177cda4beb2fc89efb0a0c9239ff4d28a6a6ce6 | [
"Apache-2.0"
] | null | null | null | lib/twirp/client/http.ex | rahmatullah5/twirp-elixir | 9177cda4beb2fc89efb0a0c9239ff4d28a6a6ce6 | [
"Apache-2.0"
] | null | null | null | defmodule Twirp.Client.HTTP do
@moduledoc false
# This module handles the internals of making RPC calls. We delegate to this
# from the actual client module cuz otherwise the client module is a pita
# to understand due to the macros and functions its creating.
alias Twirp.Encoder
alias Twirp.Error
def start_link(mod, opts) do
mod.start_link(opts)
end
def call(mod, client, ctx, rpc) do
path = "#{rpc.service_url}/#{rpc.method}"
content_type = ctx.content_type
encoded_payload = Encoder.encode(rpc.req, rpc.input_type, content_type)
case mod.request(client, ctx, path, encoded_payload) do
{:error, %{reason: :timeout}} ->
meta = %{error_type: "timeout"}
msg = "Deadline to receive data from the service was exceeded"
{:error, Error.deadline_exceeded(msg, meta)}
{:error, %{reason: reason}} ->
meta = %{error_type: "#{reason}"}
{:error, Error.unavailable("Service is down", meta)}
{:error, e} ->
meta = %{error_type: "#{inspect e}"}
{:error, Error.internal("Unhandled client error", meta)}
{:ok, %{status: status}=resp} when status != 200 ->
{:error, build_error(resp, rpc)}
{:ok, %{status: 200}=resp} ->
handle_success(resp, rpc, content_type)
end
end
defp handle_success(resp, rpc, content_type) do
resp_content_type = resp_header(resp, "content-type")
if resp_content_type && String.starts_with?(resp_content_type, content_type) do
Encoder.decode(resp.body, rpc.output_type, content_type)
else
{:error, Error.internal(~s|Expected response Content-Type "#{content_type}" but found #{resp_content_type || "nil"}|)}
end
end
defp build_error(resp, _rpc) do
status = resp.status
cond do
http_redirect?(status) ->
location = resp_header(resp, "location")
meta = %{
http_error_from_intermediary: "true",
not_a_twirp_error_because: "Redirects not allowed on Twirp requests",
status_code: Integer.to_string(status),
location: location,
}
msg = "Unexpected HTTP Redirect from location=#{location}"
Error.internal(msg, meta)
true ->
case Encoder.decode_json(resp.body) do
{:ok, %{"code" => code, "msg" => msg}=error} ->
if Error.valid_code?(code) do
# Its safe to convert to an atom here since all the codes are already
# created and loaded. If we explode we explode.
Error.new(String.to_existing_atom(code), msg, error["meta"] || %{})
else
Error.internal("Invalid Twirp error code: #{code}", invalid_code: code, body: resp.body)
end
{:ok, _} ->
msg = "Response is JSON but it has no \"code\" attribute"
intermediary_error(status, msg, resp.body)
{:error, _} ->
intermediary_error(status, "Response is not JSON", resp.body)
end
end
end
defp intermediary_error(status, reason, body) do
meta = %{
http_error_from_intermediary: "true",
not_a_twirp_error_because: reason,
status_code: Integer.to_string(status),
body: body
}
case status do
400 -> Error.internal("internal", meta)
401 -> Error.unauthenticated("unauthenticated", meta)
403 -> Error.permission_denied("permission denied", meta)
404 -> Error.bad_route("bad route", meta)
s when s in [429, 502, 503, 504] -> Error.unavailable("unavailable", meta)
_ -> Error.unknown("unknown", meta)
end
end
defp http_redirect?(status) do
300 <= status && status <= 399
end
defp resp_header(resp, header) do
case Enum.find(resp.headers, fn {h, _} -> h == header end) do
{^header, value} ->
value
_ ->
nil
end
end
end
| 32.838983 | 124 | 0.617806 |
ff75cde50c09d39cdb3f3383fbcb1313c5714f83 | 363 | ex | Elixir | web/controllers/cluster_controller.ex | holandes22/rotterdam | d8b56079638c15a8492c08a6859ed14413163e62 | [
"MIT"
] | null | null | null | web/controllers/cluster_controller.ex | holandes22/rotterdam | d8b56079638c15a8492c08a6859ed14413163e62 | [
"MIT"
] | null | null | null | web/controllers/cluster_controller.ex | holandes22/rotterdam | d8b56079638c15a8492c08a6859ed14413163e62 | [
"MIT"
] | null | null | null | defmodule Rotterdam.ClusterController do
use Rotterdam.Web, :controller
alias Rotterdam.ClusterManager
def index(conn, _params) do
cluster = ClusterManager.cluster()
render conn, "index.json", cluster: cluster
end
def connect(conn, _params) do
cluster = ClusterManager.connect()
render conn, "index.json", cluster: cluster
end
end
| 21.352941 | 47 | 0.730028 |
ff75ee95bb4303f451426f4a5e68ed13ecc19382 | 195 | ex | Elixir | lib/ast/stmt.ex | guilhermeleobas/elox | 39873aaff02c2e8ca54dd82572b4c774dd1ac8c7 | [
"MIT"
] | 5 | 2018-12-28T21:55:02.000Z | 2022-02-13T17:55:29.000Z | lib/ast/stmt.ex | guilhermeleobas/elox | 39873aaff02c2e8ca54dd82572b4c774dd1ac8c7 | [
"MIT"
] | 2 | 2019-01-16T16:13:13.000Z | 2019-02-06T01:45:51.000Z | lib/ast/stmt.ex | guilhermeleobas/elox | 39873aaff02c2e8ca54dd82572b4c774dd1ac8c7 | [
"MIT"
] | null | null | null | defmodule Lox.Ast.Stmt do
@enforce_keys [:expr]
defstruct [:expr]
defimpl String.Chars, for: Lox.Ast.Stmt do
def to_string(stmt) do
Kernel.to_string(stmt.expr)
end
end
end
| 17.727273 | 44 | 0.682051 |
ff75fda788d7baf791ada099c2b2fafe0b04d3f5 | 230 | ex | Elixir | lib/slack_coder/repo.ex | mgwidmann/slack_coder | 3ff4375777c1bdbc94ba1491fdf4e1c0fa465edf | [
"MIT"
] | 15 | 2015-09-23T16:03:28.000Z | 2018-12-04T21:48:04.000Z | lib/slack_coder/repo.ex | mgwidmann/slack_coder | 3ff4375777c1bdbc94ba1491fdf4e1c0fa465edf | [
"MIT"
] | 27 | 2016-01-12T16:44:31.000Z | 2017-10-13T16:09:36.000Z | lib/slack_coder/repo.ex | mgwidmann/slack_coder | 3ff4375777c1bdbc94ba1491fdf4e1c0fa465edf | [
"MIT"
] | 4 | 2016-09-01T12:08:24.000Z | 2017-09-21T15:07:57.000Z | defmodule SlackCoder.Repo do
use Ecto.Repo, otp_app: :slack_coder
use Scrivener, max_page_size: 100
# Fun addition
def count(queryable) do
import Ecto.Query
one(from q in queryable, select: count(q.id))
end
end
| 20.909091 | 49 | 0.721739 |
ff762c3d5d98da0d98ec7b3c0a9953bd97380da1 | 405 | ex | Elixir | lib/oli/authoring/authors/project_role.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 45 | 2020-04-17T15:40:27.000Z | 2022-03-25T00:13:30.000Z | lib/oli/authoring/authors/project_role.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 944 | 2020-02-13T02:37:01.000Z | 2022-03-31T17:50:07.000Z | lib/oli/authoring/authors/project_role.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 23 | 2020-07-28T03:36:13.000Z | 2022-03-17T14:29:02.000Z | defmodule Oli.Authoring.Authors.ProjectRole do
use Ecto.Schema
import Ecto.Changeset
def role_id,
do: %{
owner: 1,
contributor: 2
}
schema "project_roles" do
field :type, :string
timestamps(type: :utc_datetime)
end
@doc false
def changeset(project_role, attrs \\ %{}) do
project_role
|> cast(attrs, [:type])
|> validate_required([:type])
end
end
| 17.608696 | 46 | 0.639506 |
ff76332b097bf679f1ea7d2238dab4b69a46a115 | 1,805 | ex | Elixir | lib/zaryn_web/telemetry.ex | Arvandazr/zaryn | 748805297b399358d28fbcb7ced7588e40f90f03 | [
"Apache-2.0"
] | 1 | 2020-01-04T11:24:44.000Z | 2020-01-04T11:24:44.000Z | lib/zaryn_web/telemetry.ex | Arvandazr/zaryn | 748805297b399358d28fbcb7ced7588e40f90f03 | [
"Apache-2.0"
] | null | null | null | lib/zaryn_web/telemetry.ex | Arvandazr/zaryn | 748805297b399358d28fbcb7ced7588e40f90f03 | [
"Apache-2.0"
] | null | null | null | defmodule ZarynWeb.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# Database Metrics
summary("zaryn.repo.query.total_time", unit: {:native, :millisecond}),
summary("zaryn.repo.query.decode_time", unit: {:native, :millisecond}),
summary("zaryn.repo.query.query_time", unit: {:native, :millisecond}),
summary("zaryn.repo.query.queue_time", unit: {:native, :millisecond}),
summary("zaryn.repo.query.idle_time", unit: {:native, :millisecond}),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {ZarynWeb, :count_users, []}
]
end
end
| 32.232143 | 86 | 0.669252 |
ff767d8d78256cc16dcea6a5ca183afcb682c53a | 1,796 | ex | Elixir | test/support/structs.ex | facundokantox/vela | 5c7163af4188611ffa4703a3d6346156a914e438 | [
"MIT"
] | 9 | 2020-05-16T07:29:28.000Z | 2021-02-14T23:12:31.000Z | test/support/structs.ex | facundokantox/vela | 5c7163af4188611ffa4703a3d6346156a914e438 | [
"MIT"
] | 2 | 2020-07-12T09:40:04.000Z | 2021-01-29T06:28:33.000Z | test/support/structs.ex | facundokantox/vela | 5c7163af4188611ffa4703a3d6346156a914e438 | [
"MIT"
] | 1 | 2020-11-27T09:21:45.000Z | 2020-11-27T09:21:45.000Z | defmodule Test.Vela.Struct do
@moduledoc false
use Boundary, deps: [Vela]
alias Test.Vela.Struct, as: Me
alias Vela.Validator
use Vela,
series1: [limit: 3, validator: Me, errors: 1],
series2: [limit: 2, validator: &Me.valid_2?/2],
series3: [limit: 2, sorter: &Me.sort/2]
@behaviour Validator
@impl Validator
def valid?(_serie, value), do: value > 0
def valid_2?(_serie, value), do: value < 0
def sort(v1, v2), do: v1 <= v2
end
defmodule Test.Vela.Struct2Checkers do
@moduledoc false
use Boundary
def good_integer(int) when is_integer(int), do: true
def good_integer(_), do: false
def good_date(:dates, %Date{}), do: true
def good_date(_, _), do: false
def compare_dates(%Date{} = d1, %Date{} = d2),
do: Date.compare(d1, d2) == :lt
def compare_maps(%{date: d1}, %{date: d2}),
do: Date.compare(d1, d2) == :lt
def extract_number(%{number: number}), do: number
def correct_integer(_, _, 42), do: {:ok, 42}
def correct_integer(_, _, _), do: :error
end
defmodule Nested.Module.T do
@moduledoc false
use Boundary, deps: [Vela]
@type int :: integer()
@vela [foo: [type: {Nested.Module.T, :int}]]
use Vela, @vela
end
defmodule Test.Vela.Struct2 do
@moduledoc false
use Boundary, deps: [Vela, Test.Vela.Struct2Checkers]
import Test.Vela.Struct2Checkers
use Vela,
integers: [
type: {Nested.Module.T, :int},
limit: 3,
validator: &good_integer/1,
sorter: &</2,
threshold: 0.5,
corrector: &correct_integer/3
],
dates: [
limit: 3,
type: Date.t(),
validator: &good_date/2,
sorter: &compare_dates/2,
comparator: &compare_dates/2
],
maps: [limit: 3, compare_by: &extract_number/1, sorter: &compare_maps/2, threshold: 0.5]
end
| 21.902439 | 92 | 0.635857 |
ff76842ee364a5644682fceac495737003953988 | 1,101 | exs | Elixir | bench/line_encoder.exs | manulitic/instream | 5ad521dcf6a456325ba30b4c0dbb40f1f5107f32 | [
"Apache-2.0"
] | null | null | null | bench/line_encoder.exs | manulitic/instream | 5ad521dcf6a456325ba30b4c0dbb40f1f5107f32 | [
"Apache-2.0"
] | null | null | null | bench/line_encoder.exs | manulitic/instream | 5ad521dcf6a456325ba30b4c0dbb40f1f5107f32 | [
"Apache-2.0"
] | null | null | null | defmodule Instream.Benchmark.LineEncoder do
alias Instream.Encoder.Line
@point_complete %{
measurement: "disk_free",
fields: %{
value: 442_221_834_240
},
tags: %{
hostname: "server01"
},
timestamp: 1_435_362_189_575_692_182
}
@point_escaping %{
measurement: ~S|"measurement with quotes"|,
tags: %{
"tag key with spaces" => ~S|tag,value,with"commas"|
},
fields: %{
~S|field_key\\\\| => ~S|string field value, only " need be quoted|
},
timestamp: nil
}
@point_simple %{
measurement: "disk_free",
fields: %{
value: 442_221_834_240
},
timestamp: nil
}
def run do
Benchee.run(
%{
"Encoding" => &Line.encode/1
},
inputs: %{
"complete" => [@point_complete],
"escaping" => [@point_escaping],
"multiple" => List.duplicate(@point_simple, 50),
"simple" => [@point_simple]
},
formatters: [{Benchee.Formatters.Console, comparison: false}],
warmup: 2,
time: 10
)
end
end
Instream.Benchmark.LineEncoder.run()
| 20.773585 | 72 | 0.574932 |
ff76b0d3d2442e3949d08002ed1732bbe29f9c28 | 5,508 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/api/products.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/api/products.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/api/products.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdExchangeBuyer.V14.Api.Products do
@moduledoc """
API calls for all endpoints tagged `Products`.
"""
alias GoogleApi.AdExchangeBuyer.V14.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Gets the requested product by id.
## Parameters
* `connection` (*type:* `GoogleApi.AdExchangeBuyer.V14.Connection.t`) - Connection to server
* `product_id` (*type:* `String.t`) - The id for the product to get the head revision for.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdExchangeBuyer.V14.Model.Product{}}` on success
* `{:error, info}` on failure
"""
@spec adexchangebuyer_products_get(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.AdExchangeBuyer.V14.Model.Product.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def adexchangebuyer_products_get(connection, product_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/products/{productId}", %{
"productId" => URI.encode(product_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.AdExchangeBuyer.V14.Model.Product{}])
end
@doc """
Gets the requested product.
## Parameters
* `connection` (*type:* `GoogleApi.AdExchangeBuyer.V14.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:pqlQuery` (*type:* `String.t`) - The pql query used to query for products.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdExchangeBuyer.V14.Model.GetOffersResponse{}}` on success
* `{:error, info}` on failure
"""
@spec adexchangebuyer_products_search(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.AdExchangeBuyer.V14.Model.GetOffersResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def adexchangebuyer_products_search(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:pqlQuery => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/products/search", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.AdExchangeBuyer.V14.Model.GetOffersResponse{}])
end
end
| 42.697674 | 187 | 0.649964 |
ff76d45fd7cfd9b517f1095d6f17fefec1963722 | 626 | ex | Elixir | lib/xtb_client/messages/margin_trade.ex | dsienkiewicz/xtb_client_ex | 550f1144eace28ce9bdb2fa3bcc27e891231487b | [
"MIT"
] | null | null | null | lib/xtb_client/messages/margin_trade.ex | dsienkiewicz/xtb_client_ex | 550f1144eace28ce9bdb2fa3bcc27e891231487b | [
"MIT"
] | null | null | null | lib/xtb_client/messages/margin_trade.ex | dsienkiewicz/xtb_client_ex | 550f1144eace28ce9bdb2fa3bcc27e891231487b | [
"MIT"
] | null | null | null | defmodule XtbClient.Messages.MarginTrade do
@moduledoc """
Info about calculated margin in account currency.
## Properties
- `margin` value of margin.
## Handled Api methods
- `getMarginTrade`
"""
@type t :: %__MODULE__{
margin: float()
}
@enforce_keys [:margin]
@derive Jason.Encoder
defstruct margin: 0.0
def new(%{"margin" => margin}) when is_number(margin) do
%__MODULE__{
margin: margin
}
end
def match(method, data) when method in ["getMarginTrade"] do
{:ok, __MODULE__.new(data)}
end
def match(_method, _data) do
{:no_match}
end
end
| 17.885714 | 62 | 0.634185 |
ff76d5f15c8a74a422ef59dfddec3535b22b583e | 11,619 | ex | Elixir | lib/changelog_web/router.ex | rizalgowandy/changelog.com | 3f8a474afc254ecf74a48c5f75083d84eec1d972 | [
"MIT"
] | 1 | 2020-09-04T03:25:40.000Z | 2020-09-04T03:25:40.000Z | lib/changelog_web/router.ex | rizalgowandy/changelog.com | 3f8a474afc254ecf74a48c5f75083d84eec1d972 | [
"MIT"
] | null | null | null | lib/changelog_web/router.ex | rizalgowandy/changelog.com | 3f8a474afc254ecf74a48c5f75083d84eec1d972 | [
"MIT"
] | null | null | null | defmodule ChangelogWeb.Router do
use ChangelogWeb, :router
use Plug.ErrorHandler
alias ChangelogWeb.Plug
if Mix.env() == :dev do
forward "/sent_emails", Bamboo.SentEmailViewerPlug
end
# should be used before :browser pipeline to avoid auth and cache headers
pipeline :public do
plug Plug.LoadPodcasts
plug Plug.Redirects
plug Plug.VanityDomains
end
pipeline :admin do
plug Plug.AdminLayoutPlug
end
pipeline :browser do
plug :accepts, ["html", "js"]
plug :fetch_session
plug Plug.Turbolinks
plug :fetch_flash
plug :put_secure_browser_headers
plug Plug.Authenticate, repo: Changelog.Repo
plug Plug.AllowFraming
# must come after Plug.Authenticate
plug Plug.CacheControl
end
pipeline :feed do
plug :accepts, ["xml"]
plug Plug.CacheControl
end
pipeline :json_feed do
plug :accepts, ["json"]
plug Plug.CacheControl
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/auth", ChangelogWeb do
pipe_through [:public, :browser]
for provider <- ~w(github twitter) do
get "/#{provider}", AuthController, :request, as: "#{provider}_auth"
get "/#{provider}/callback", AuthController, :callback, as: "#{provider}_auth"
post "/#{provider}/callback", AuthController, :callback, as: "#{provider}_auth"
end
end
scope "/admin", ChangelogWeb.Admin, as: :admin do
pipe_through [:admin, :browser]
get "/", PageController, :index
get "/purge", PageController, :purge
get "/reach", PageController, :reach
get "/search", SearchController, :all
get "/search/:type", SearchController, :one
resources "/benefits", BenefitController, except: [:show]
resources "/topics", TopicController, except: [:show]
get "/news", NewsItemController, :index
resources "/news/items", NewsItemController, except: [:show] do
resources "/subscriptions", NewsItemSubscriptionController,
as: :subscription,
only: [:index]
end
delete "/news/items/:id/decline", NewsItemController, :decline, as: :news_item
post "/news/items/:id/unpublish", NewsItemController, :unpublish, as: :news_item
post "/news/items/:id/move", NewsItemController, :move, as: :news_item
resources "/news/comments", NewsItemCommentController, except: [:show, :new, :create]
resources "/news/sources", NewsSourceController, except: [:show]
get "/news/sponsorships/schedule", NewsSponsorshipController, :schedule
resources "/news/sponsorships", NewsSponsorshipController
resources "/news/issues", NewsIssueController, except: [:show]
post "/news/issues/:id/publish", NewsIssueController, :publish, as: :news_issue
post "/news/issues/:id/unpublish", NewsIssueController, :unpublish, as: :news_issue
resources "/people", PersonController
post "/people/:id/slack", PersonController, :slack, as: :person
resources "/metacasts", MetacastController
resources "/podcasts", PodcastController do
resources "/episodes", EpisodeController
get "/performance", EpisodeController, :performance, as: :performance
post "/episodes/:id/publish", EpisodeController, :publish, as: :episode
post "/episodes/:id/unpublish", EpisodeController, :unpublish, as: :episode
post "/episodes/:id/transcript", EpisodeController, :transcript, as: :episode
resources "/episode_requests", EpisodeRequestController
put "/episode_requests/:id/decline", EpisodeRequestController, :decline,
as: :episode_request
put "/episode_requests/:id/fail", EpisodeRequestController, :fail, as: :episode_request
put "/episode_requests/:id/pend", EpisodeRequestController, :pend, as: :episode_request
resources "/subscriptions", PodcastSubscriptionController, as: :subscription, only: [:index]
end
resources "/posts", PostController, except: [:show]
post "/posts/:id/publish", PostController, :publish, as: :post
post "/posts/:id/unpublish", PostController, :unpublish, as: :post
resources "/sponsors", SponsorController
resources "/mailers", MailerPreviewController, only: [:index, :show]
end
scope "/api", ChangelogWeb, as: :api do
pipe_through [:api]
get "/oembed", ApiController, :oembed
end
scope "/github", ChangelogWeb do
pipe_through [:api]
post "/event", GithubController, :event
end
scope "/slack", ChangelogWeb do
pipe_through [:api]
get "/countdown/:slug", SlackController, :countdown
post "/countdown/:slug", SlackController, :countdown
post "/event", SlackController, :event
end
scope "/", ChangelogWeb do
pipe_through [:feed]
get "/feed", FeedController, :news
get "/feed/titles", FeedController, :news_titles
get "/posts/feed", FeedController, :posts
get "/sitemap.xml", FeedController, :sitemap
get "/:slug/feed", FeedController, :podcast
get "/plusplus/:slug/feed", FeedController, :plusplus
get "/metacast/:slug/feed", FeedController, :metacast
end
scope "/", ChangelogWeb do
pipe_through [:json_feed]
get "/feed.json", JsonFeedController, :news
end
scope "/", ChangelogWeb do
pipe_through [:public, :browser]
get "/join", PersonController, :join, as: :person
post "/join", PersonController, :join, as: :person
get "/subscribe", PersonController, :subscribe, as: :person
get "/subscribe/:to", PersonController, :subscribe, as: :person
post "/subscribe", PersonController, :subscribe, as: :person
get "/person/:handle", PersonController, :show, as: :person
get "/person/:handle/news", PersonController, :news, as: :person
get "/person/:handle/podcasts", PersonController, :podcasts, as: :person
resources "/~", HomeController, only: [:show, :update], singleton: true
get "/~/profile", HomeController, :profile
get "/~/account", HomeController, :account
get "/~/nope/:token/:type/:id", HomeController, :opt_out
post "/~/slack", HomeController, :slack
post "/~/subscribe", HomeController, :subscribe
post "/~/unsubscribe", HomeController, :unsubscribe
get "/in", AuthController, :new, as: :sign_in
post "/in", AuthController, :new, as: :sign_in
get "/in/:token", AuthController, :create, as: :sign_in
get "/out", AuthController, :delete, as: :sign_out
get "/", NewsItemController, :index, as: :root
get "/news/submit", NewsItemController, :new
get "/news/fresh", NewsItemController, :fresh
get "/news/top", NewsItemController, :top
get "/news/top/week", NewsItemController, :top_week
get "/news/top/month", NewsItemController, :top_month
get "/news/top/all", NewsItemController, :top_all
resources "/news", NewsItemController, only: [:show, :create], as: :news_item
get "/news/:id/preview", NewsItemController, :preview, as: :news_item
get "/news/:id/visit", NewsItemController, :visit, as: :news_item
get "/news/:id/subscribe", NewsItemController, :subscribe, as: :news_item
get "/news/:id/unsubscribe", NewsItemController, :unsubscribe, as: :news_item
post "/news/impress", NewsItemController, :impress, as: :news_item
resources "/ads", NewsAdController, only: [:show], as: :news_ad
post "/ad/impress", NewsAdController, :impress, as: :news_ad
get "/ad/:id/visit", NewsAdController, :visit, as: :news_ad
resources "/sponsored", NewsAdController, only: [:show], as: :news_sponsored
post "/sponsored/impress", NewsAdController, :impress, as: :news_sponsored
get "/sponsored/:id/visit", NewsAdController, :visit, as: :news_sponsored
get "/news/issues/:id", NewsIssueController, :show, as: :news_issue
get "/news/issues/:id/preview", NewsIssueController, :preview, as: :news_issue
resources "/news/comments", NewsItemCommentController, only: [:create, :update]
post "/news/comments/preview", NewsItemCommentController, :preview, as: :news_item_comment
resources "/benefits", BenefitController, only: [:index]
resources "/posts", PostController, only: [:index, :show]
get "/posts/:id/preview", PostController, :preview, as: :post
get "/sources", NewsSourceController, :index, as: :news_source
get "/source/:slug", NewsSourceController, :show, as: :news_source
get "/topics", TopicController, :index, as: :topic
get "/topic/:slug", TopicController, :show, as: :topic
get "/topic/:slug/news", TopicController, :news, as: :topic
get "/topic/:slug/podcasts", TopicController, :podcasts, as: :topic
get "/live", LiveController, :index
get "/live/ical", LiveController, :ical
get "/live/ical/:slug", LiveController, :ical
get "/live/status", LiveController, :status
get "/live/:id", LiveController, :show
get "/search", SearchController, :search
# static pages
get "/about", PageController, :about
get "/coc", PageController, :coc
get "/community", PageController, :community
get "/contact", PageController, :contact
get "/contribute", PageController, :contribute
get "/films", PageController, :films
get "/films/gophercon-2015", PageController, :films_gophercon_2015
get "/films/gophercon-2016", PageController, :films_gophercon_2016
get "/films/gophercon-2017", PageController, :films_gophercon_2017
get "/guest", PageController, :guest
get "/guest/:slug", PageController, :guest
get "/styleguide", PageController, :styleguide
get "/sponsor", PageController, :sponsor
get "/sponsor/pricing", PageController, :sponsor_pricing
get "/sponsor/styles", PageController, :sponsor_styles
get "/sponsor/details", PageController, :sponsor_details
get "/sponsor/stories/:slug", PageController, :sponsor_story
get "/ten", PageController, :ten
get "/privacy", PageController, :privacy
get "/terms", PageController, :terms
get "/++", PageController, :++
get "/plusplus", PageController, :plusplus
get "/nightly", PageController, :nightly
get "/nightly/unsubscribed", PageController, :nightly_unsubscribed
get "/weekly", PageController, :weekly
get "/weekly/archive", PageController, :weekly_archive
get "/weekly/unsubscribed", PageController, :weekly_unsubscribed
get "/request", EpisodeRequestController, :new, as: :episode_request
get "/request/:slug", EpisodeRequestController, :new, as: :episode_request
post "/request", EpisodeRequestController, :create, as: :episode_request
get "/podcasts", PodcastController, :index, as: :podcast
for subpage <- ~w(popular recommended upcoming)a do
get "/:slug/#{subpage}", PodcastController, subpage, as: :podcast
end
get "/:podcast/:slug", EpisodeController, :show, as: :episode
for subpage <- ~w(embed preview play share discuss)a do
get "/:podcast/:slug/#{subpage}", EpisodeController, subpage, as: :episode
end
get "/:slug", PodcastController, :show, as: :podcast
end
defp handle_errors(_conn, %{reason: %Ecto.NoResultsError{}}), do: true
defp handle_errors(_conn, %{reason: %Phoenix.Router.NoRouteError{}}), do: true
defp handle_errors(_conn, %{reason: %Phoenix.NotAcceptableError{}}), do: true
defp handle_errors(conn, %{kind: kind, reason: reason, stack: stacktrace}) do
headers = Enum.into(conn.req_headers, %{})
reason = Map.delete(reason, :assigns)
Rollbax.report(kind, reason, stacktrace, %{}, %{
"request" => %{
"url" => "#{conn.scheme}://#{conn.host}#{conn.request_path}",
"user_ip" => Map.get(headers, "x-forwarded-for"),
"method" => conn.method,
"headers" => headers,
"params" => conn.params
}
})
end
end
| 39.253378 | 98 | 0.688269 |
ff770fe1d24ad535525cff5437cfa6a20de3de0c | 885 | ex | Elixir | apps/omg_watcher/lib/omg_watcher/api/status.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/lib/omg_watcher/api/status.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/lib/omg_watcher/api/status.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019-2020 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.API.Status do
@moduledoc """
Watcher status API
"""
alias OMG.Watcher.API.StatusCache
@doc """
Returns status of the watcher from the ETS cache.
"""
@spec get_status() :: {:ok, StatusCache.status()}
def get_status() do
{:ok, StatusCache.get()}
end
end
| 29.5 | 74 | 0.726554 |
ff7728b287b3c18a8051fd6fba29bcccb0dfdfad | 1,718 | exs | Elixir | test/web/controllers/admin/zone_controller_test.exs | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | 1 | 2021-12-28T20:57:32.000Z | 2021-12-28T20:57:32.000Z | test/web/controllers/admin/zone_controller_test.exs | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | null | null | null | test/web/controllers/admin/zone_controller_test.exs | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | null | null | null | defmodule Web.Admin.ZoneControllerTest do
use Web.ConnCase
describe "creating a zone" do
test "successful", %{conn: conn} do
{:ok, user} = TestHelpers.create_admin()
conn =
conn
|> assign(:current_user, user)
|> post(Routes.admin_zone_path(conn, :create),
zone: %{
name: "Name",
description: "Description"
}
)
assert redirected_to(conn) =~ ~r/\/admin\/zones\/\d+/
end
test "unsuccessful", %{conn: conn} do
{:ok, user} = TestHelpers.create_admin()
conn =
conn
|> assign(:current_user, user)
|> post(Routes.admin_zone_path(conn, :create),
zone: %{
name: "Name"
}
)
assert html_response(conn, 422)
end
end
describe "updating a zone" do
test "successful", %{conn: conn} do
{:ok, user} = TestHelpers.create_admin()
{:ok, zone} = TestHelpers.create_zone()
conn =
conn
|> assign(:current_user, user)
|> put(Routes.admin_zone_path(conn, :update, zone.id),
zone: %{
name: "Name",
description: "Description"
}
)
assert redirected_to(conn) == Routes.admin_zone_path(conn, :show, zone.id)
end
test "unsuccessful", %{conn: conn} do
{:ok, user} = TestHelpers.create_admin()
{:ok, zone} = TestHelpers.create_zone()
conn =
conn
|> assign(:current_user, user)
|> put(Routes.admin_zone_path(conn, :update, zone.id),
zone: %{
name: "Name",
description: nil
}
)
assert html_response(conn, 422)
end
end
end
| 22.906667 | 80 | 0.530268 |
ff772944fa678a981d55231c77ce830a2b667a27 | 1,305 | ex | Elixir | lib/floki/flat_text.ex | midas-framework/floki | d85b038a1e4c0522ac0f0f9ea9bbedaef07baca2 | [
"MIT"
] | 1,778 | 2015-01-07T14:12:31.000Z | 2022-03-29T22:42:48.000Z | lib/floki/flat_text.ex | midas-framework/floki | d85b038a1e4c0522ac0f0f9ea9bbedaef07baca2 | [
"MIT"
] | 279 | 2015-01-01T15:54:50.000Z | 2022-03-28T18:06:03.000Z | deps/floki/lib/floki/flat_text.ex | adrianomota/blog | ef3b2d2ed54f038368ead8234d76c18983caa75b | [
"MIT"
] | 166 | 2015-04-24T20:48:02.000Z | 2022-03-28T17:29:05.000Z | defmodule Floki.FlatText do
@moduledoc false
# FlatText is a strategy to get text nodes from a HTML tree without search deep
# in the tree. It only gets the text nodes from the first level of nodes.
# Example
# iex> Floki.FlatText.get([{"a", [], ["The meaning of life is...", {"strong", [], ["something else"]}] }])
# "The meaning of life is..."
@type html_tree :: tuple | list
@spec get(html_tree, binary) :: binary
def get(html_nodes, sep \\ "")
def get(html_nodes, sep) when is_list(html_nodes) do
Enum.reduce(html_nodes, "", fn html_node, acc ->
text_from_node(html_node, acc, sep)
end)
end
def get(html_node, sep) do
text_from_node(html_node, "", sep)
end
defp text_from_node({_tag, _attrs, html_nodes}, acc, sep) do
Enum.reduce(html_nodes, acc, fn html_node, acc ->
capture_text(html_node, acc, sep)
end)
end
defp text_from_node(text, "", _sep) when is_binary(text), do: text
defp text_from_node(text, acc, sep) when is_binary(text), do: Enum.join([acc, text], sep)
defp text_from_node(_, acc, _), do: acc
defp capture_text(text, "", _sep) when is_binary(text), do: text
defp capture_text(text, acc, sep) when is_binary(text), do: Enum.join([acc, text], sep)
defp capture_text(_html_node, acc, _), do: acc
end
| 31.071429 | 110 | 0.668966 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.