hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c1ff36f502fea0d385dc2a9c7b9597b0978c24b | 19,934 | ex | Elixir | lib/zaryn/election.ex | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | 1 | 2021-07-06T19:47:14.000Z | 2021-07-06T19:47:14.000Z | lib/zaryn/election.ex | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | null | null | null | lib/zaryn/election.ex | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | null | null | null | defmodule Zaryn.Election do
@moduledoc """
Provides a random and rotating node election based on heuristic algorithms
and constraints to ensure a fair distributed processing and data storage among its network.
"""
alias Zaryn.BeaconChain
alias Zaryn.Crypto
alias __MODULE__.Constraints
alias __MODULE__.StorageConstraints
alias __MODULE__.ValidationConstraints
alias Zaryn.P2P.Node
alias Zaryn.TransactionChain.Transaction
alias Zaryn.Utils
@doc """
Create a seed to sort the validation nodes. This will produce a proof for the election
"""
@spec validation_nodes_election_seed_sorting(Transaction.t(), DateTime.t()) :: binary()
def validation_nodes_election_seed_sorting(tx = %Transaction{}, timestamp = %DateTime{}) do
tx_hash =
tx
|> Transaction.to_pending()
|> Transaction.serialize()
|> Crypto.hash()
Crypto.sign_with_daily_nonce_key(tx_hash, timestamp)
end
@doc """
Verify if a proof of election is valid according to transaction and the given public key
"""
@spec valid_proof_of_election?(Transaction.t(), binary, Crypto.key()) :: boolean
def valid_proof_of_election?(tx = %Transaction{}, proof_of_election, daily_nonce_public_key)
when is_binary(proof_of_election) and is_binary(daily_nonce_public_key) do
data =
tx
|> Transaction.to_pending()
|> Transaction.serialize()
|> Crypto.hash()
Crypto.verify?(proof_of_election, data, daily_nonce_public_key)
end
@doc """
Get the elected validation nodes for a given transaction and a list of nodes.
Each nodes public key is rotated with the sorting seed
to provide an unpredictable order yet reproducible.
To achieve an unpredictable, global but locally executed, verifiable and reproducible
election, each election is based on:
- an unpredictable element: hash of transaction
- an element known only by authorized nodes: daily nonce
- an element difficult to predict: last public key of the node
- the computation of the rotating keys
Then each nodes selection is reduce via heuristic constraints via `ValidationConstraints`
## Examples
iex> %Transaction{
...> address:
...> <<0, 120, 195, 32, 77, 84, 215, 196, 116, 215, 56, 141, 40, 54, 226, 48, 66, 254, 119,
...> 11, 73, 77, 243, 125, 62, 94, 133, 67, 9, 253, 45, 134, 89>>,
...> type: :transfer,
...> data: %TransactionData{},
...> previous_public_key:
...> <<0, 239, 240, 90, 182, 66, 190, 68, 20, 250, 131, 83, 190, 29, 184, 177, 52, 166, 207,
...> 80, 193, 110, 57, 6, 199, 152, 184, 24, 178, 179, 11, 164, 150>>,
...> previous_signature:
...> <<200, 70, 0, 25, 105, 111, 15, 161, 146, 188, 100, 234, 147, 62, 127, 8, 152, 60, 66,
...> 169, 113, 255, 51, 112, 59, 200, 61, 63, 128, 228, 111, 104, 47, 15, 81, 185, 179, 36,
...> 59, 86, 171, 7, 138, 199, 203, 252, 50, 87, 160, 107, 119, 131, 121, 11, 239, 169, 99,
...> 203, 76, 159, 158, 243, 133, 133>>,
...> origin_signature:
...> <<162, 223, 100, 72, 17, 56, 99, 212, 78, 132, 166, 81, 127, 91, 214, 143, 221, 32, 106,
...> 189, 247, 64, 183, 27, 55, 142, 254, 72, 47, 215, 34, 108, 233, 55, 35, 94, 49, 165,
...> 180, 248, 229, 160, 229, 220, 191, 35, 80, 127, 213, 240, 195, 185, 165, 89, 172, 97,
...> 170, 217, 57, 254, 125, 127, 62, 169>>
...> }
...> |> Election.validation_nodes(
...> "daily_nonce_proof",
...> [
...> %Node{last_public_key: "node1", geo_patch: "AAA"},
...> %Node{last_public_key: "node2", geo_patch: "DEF"},
...> %Node{last_public_key: "node3", geo_patch: "AA0"},
...> %Node{last_public_key: "node4", geo_patch: "3AC"},
...> %Node{last_public_key: "node5", geo_patch: "F10"},
...> %Node{last_public_key: "node6", geo_patch: "ECA"}
...> ],
...> [
...> %Node{last_public_key: "node10", geo_patch: "AAA"},
...> %Node{last_public_key: "node11", geo_patch: "DEF"},
...> %Node{last_public_key: "node13", geo_patch: "AA0"},
...> %Node{last_public_key: "node4", geo_patch: "3AC"},
...> %Node{last_public_key: "node8", geo_patch: "F10"},
...> %Node{last_public_key: "node9", geo_patch: "ECA"}
...> ],
...> %ValidationConstraints{ validation_number: fn _, 6 -> 3 end, min_geo_patch: fn -> 2 end }
...> )
[
%Node{last_public_key: "node3", geo_patch: "AA0"},
%Node{last_public_key: "node5", geo_patch: "F10"},
%Node{last_public_key: "node2", geo_patch: "DEF"},
%Node{last_public_key: "node6", geo_patch: "ECA"},
]
"""
@spec validation_nodes(
pending_transaction :: Transaction.t(),
sorting_seed :: binary(),
authorized_nodes :: list(Node.t()),
storage_nodes :: list(Node.t()),
constraints :: ValidationConstraints.t()
) :: list(Node.t())
def validation_nodes(
tx = %Transaction{},
sorting_seed,
authorized_nodes,
storage_nodes,
%ValidationConstraints{
validation_number: validation_number_fun,
min_geo_patch: min_geo_patch_fun
} \\ ValidationConstraints.new()
)
when is_binary(sorting_seed) and is_list(authorized_nodes) and is_list(storage_nodes) do
start = System.monotonic_time()
# Evaluate validation constraints
nb_validations = validation_number_fun.(tx, length(authorized_nodes))
min_geo_patch = min_geo_patch_fun.()
nodes =
if length(authorized_nodes) <= nb_validations do
authorized_nodes
else
do_validation_node_election(
authorized_nodes,
tx,
sorting_seed,
nb_validations,
min_geo_patch,
storage_nodes
)
end
:telemetry.execute([:zaryn, :election, :validation_nodes], %{
duration: System.monotonic_time() - start
})
nodes
end
defp do_validation_node_election(
authorized_nodes,
tx,
sorting_seed,
nb_validations,
min_geo_patch,
storage_nodes
) do
tx_hash =
tx
|> Transaction.to_pending()
|> Transaction.serialize()
|> Crypto.hash()
authorized_nodes
|> sort_validation_nodes_by_key_rotation(sorting_seed, tx_hash)
|> Enum.reduce_while(
%{nb_nodes: 0, nodes: [], zones: MapSet.new()},
fn node = %Node{geo_patch: geo_patch, last_public_key: last_public_key}, acc ->
if validation_constraints_satisfied?(
nb_validations,
min_geo_patch,
acc.nb_nodes,
acc.zones
) do
{:halt, acc}
else
# Discard node in the first place if it's already a storage node and
# if another node already present in the geo zone to ensure geo distribution of validations
# Then if requires the node may be elected during a refining operation
# to ensure the require number of validations
cond do
Utils.key_in_node_list?(storage_nodes, last_public_key) ->
{:cont, acc}
MapSet.member?(acc.zones, String.first(geo_patch)) ->
{:cont, acc}
true ->
new_acc =
acc
|> Map.update!(:nb_nodes, &(&1 + 1))
|> Map.update!(:nodes, &[node | &1])
|> Map.update!(:zones, &MapSet.put(&1, String.first(geo_patch)))
{:cont, new_acc}
end
end
end
)
|> Map.get(:nodes)
|> Enum.reverse()
|> refine_necessary_nodes(authorized_nodes, nb_validations)
end
defp validation_constraints_satisfied?(nb_validations, min_geo_patch, nb_nodes, zones) do
MapSet.size(zones) > min_geo_patch and nb_nodes > nb_validations
end
defp refine_necessary_nodes(selected_nodes, authorized_nodes, nb_validations) do
rest_nodes = authorized_nodes -- selected_nodes
if length(selected_nodes) < nb_validations do
selected_nodes ++ Enum.take(rest_nodes, nb_validations - length(selected_nodes))
else
selected_nodes
end
end
@doc """
Get the elected storage nodes for a given transaction address and a list of nodes.
Each nodes first public key is rotated with the storage nonce and the transaction address
to provide an reproducible list of nodes ordered.
To perform the election, the rotating algorithm is based on:
- the transaction address
- an stable known element: storage nonce
- the first public key of each node
- the computation of the rotating keys
From this sorted nodes, a selection is made by reducing it via heuristic constraints via `StorageConstraints`
"""
@spec storage_nodes(address :: binary(), nodes :: list(Node.t()), StorageConstraints.t()) ::
list(Node.t())
def storage_nodes(_address, _nodes, constraints \\ StorageConstraints.new())
def storage_nodes(_, [], _), do: []
def storage_nodes(
address,
nodes,
%StorageConstraints{
number_replicas: number_replicas_fun,
min_geo_patch_average_availability: min_geo_patch_avg_availability_fun,
min_geo_patch: min_geo_patch_fun
}
)
when is_binary(address) and is_list(nodes) do
start = System.monotonic_time()
# Evaluate the storage election constraints
nb_replicas = number_replicas_fun.(nodes)
min_geo_patch_avg_availability = min_geo_patch_avg_availability_fun.()
min_geo_patch = min_geo_patch_fun.()
storage_nodes =
nodes
|> sort_storage_nodes_by_key_rotation(address)
|> Enum.reduce_while(
%{nb_nodes: 0, zones: %{}, nodes: []},
fn node = %Node{
geo_patch: geo_patch,
average_availability: avg_availability
},
acc ->
if storage_constraints_satisfied?(
min_geo_patch,
min_geo_patch_avg_availability,
nb_replicas,
acc.nb_nodes,
acc.zones
) do
{:halt, acc}
else
new_acc =
acc
|> Map.update!(:zones, fn zones ->
Map.update(
zones,
String.first(geo_patch),
avg_availability,
&(&1 + avg_availability)
)
end)
|> Map.update!(:nb_nodes, &(&1 + 1))
|> Map.update!(:nodes, &[node | &1])
{:cont, new_acc}
end
end
)
|> Map.get(:nodes)
|> Enum.reverse()
:telemetry.execute(
[:zaryn, :election, :storage_nodes],
%{duration: System.monotonic_time() - start}
)
storage_nodes
end
defp storage_constraints_satisfied?(
min_geo_patch,
min_geo_patch_avg_availability,
nb_replicas,
nb_nodes,
zones
) do
length(Map.keys(zones)) >= min_geo_patch and
Enum.all?(zones, fn {_, avg_availability} ->
avg_availability >= min_geo_patch_avg_availability
end) and
nb_nodes >= nb_replicas
end
# Provide an unpredictable and reproducible list of allowed nodes using a rotating key algorithm
# aims to get a scheduling to be able to find autonomously the validation or storages node involved.
# Each node public key is rotated through a cryptographic operations involving
# node public key, a nonce and a dynamic information such as transaction content or hash
# This rotated key acts as sort mechanism to produce a fair node election
# It requires the daily nonce or the storage nonce to be loaded in the Crypto keystore
defp sort_validation_nodes_by_key_rotation(nodes, sorting_seed, hash) do
nodes
|> Stream.map(fn node = %Node{last_public_key: last_public_key} ->
rotated_key = Crypto.hash([last_public_key, hash, sorting_seed])
{rotated_key, node}
end)
|> Enum.sort_by(fn {rotated_key, _} -> rotated_key end)
|> Enum.map(fn {_, n} -> n end)
end
defp sort_storage_nodes_by_key_rotation(nodes, hash) do
nodes
|> Stream.map(fn node = %Node{first_public_key: last_public_key} ->
rotated_key = Crypto.hash_with_storage_nonce([last_public_key, hash])
{rotated_key, node}
end)
|> Enum.sort_by(fn {rotated_key, _} -> rotated_key end)
|> Enum.map(fn {_, n} -> n end)
end
@doc """
Return the actual constraints for the transaction validation
"""
@spec get_validation_constraints() :: ValidationConstraints.t()
defdelegate get_validation_constraints, to: Constraints
@doc """
Set the new validation constraints
"""
@spec set_validation_constraints(ValidationConstraints.t()) :: :ok
defdelegate set_validation_constraints(constraints), to: Constraints
@doc """
Return the actual constraints for the transaction storage
"""
@spec get_storage_constraints() :: StorageConstraints.t()
defdelegate get_storage_constraints(), to: Constraints
@doc """
Set the new storage constraints
"""
@spec set_storage_constraints(StorageConstraints.t()) :: :ok
defdelegate set_storage_constraints(constraints), to: Constraints
@doc """
Find out the next authorized nodes using the TPS from the previous to determine based
on the active geo patches if we need to more node related to the network load.
## Examples
# No need to add more validation nodes if the TPS is null
iex> nodes = [
...> %Node{first_public_key: "key1", geo_patch: "AAA", authorized?: true},
...> %Node{first_public_key: "key2", geo_patch: "B34", authorized?: true},
...> %Node{first_public_key: "key3", geo_patch: "A34"},
...> %Node{first_public_key: "key4", geo_patch: "F34", authorized?: true},
...> %Node{first_public_key: "key5", geo_patch: "D34"}
...> ]
iex> Election.next_authorized_nodes(0.0, nodes)
[
%Node{first_public_key: "key1", geo_patch: "AAA", authorized?: true},
%Node{first_public_key: "key2", geo_patch: "B34", authorized?: true},
%Node{first_public_key: "key4", geo_patch: "F34", authorized?: true}
]
# Need to add more validation nodes if the TPS is less than 1
iex> nodes = [
...> %Node{first_public_key: "key1", geo_patch: "AAA", authorized?: true},
...> %Node{first_public_key: "key2", geo_patch: "B34", authorized?: true},
...> %Node{first_public_key: "key3", geo_patch: "A34"},
...> %Node{first_public_key: "key4", geo_patch: "F34", authorized?: true},
...> %Node{first_public_key: "key5", geo_patch: "D34"}
...> ]
iex> Election.next_authorized_nodes(0.0243, nodes)
[
%Node{first_public_key: "key1", geo_patch: "AAA", authorized?: true},
%Node{first_public_key: "key2", geo_patch: "B34", authorized?: true},
%Node{first_public_key: "key3", geo_patch: "A34"},
%Node{first_public_key: "key4", geo_patch: "F34", authorized?: true},
%Node{first_public_key: "key5", geo_patch: "D34"}
]
# No need to add more validation nodes if the TPS is enough regarding the active patches
iex> nodes = [
...> %Node{first_public_key: "key1", geo_patch: "AAA", authorized?: true},
...> %Node{first_public_key: "key2", geo_patch: "B34", authorized?: true},
...> %Node{first_public_key: "key3", geo_patch: "A34"},
...> %Node{first_public_key: "key4", geo_patch: "F34", authorized?: true},
...> %Node{first_public_key: "key5", geo_patch: "D34"}
...> ]
iex> Election.next_authorized_nodes(100.0, nodes)
[
%Node{first_public_key: "key1", geo_patch: "AAA", authorized?: true},
%Node{first_public_key: "key2", geo_patch: "B34", authorized?: true},
%Node{first_public_key: "key4", geo_patch: "F34", authorized?: true}
]
# With a higher TPS we need more node to cover the transaction mining
iex> nodes = [
...> %Node{first_public_key: "key1", geo_patch: "AAA", authorized?: true},
...> %Node{first_public_key: "key2", geo_patch: "B34", authorized?: true},
...> %Node{first_public_key: "key3", geo_patch: "A34"},
...> %Node{first_public_key: "key4", geo_patch: "F34", authorized?: true},
...> %Node{first_public_key: "key5", geo_patch: "D34"}
...> ]
iex> Election.next_authorized_nodes(1000.0, nodes)
[
%Node{first_public_key: "key1", geo_patch: "AAA", authorized?: true},
%Node{first_public_key: "key2", geo_patch: "B34", authorized?: true},
%Node{first_public_key: "key4", geo_patch: "F34", authorized?: true},
%Node{first_public_key: "key3", geo_patch: "A34"},
%Node{first_public_key: "key5", geo_patch: "D34"}
]
"""
@spec next_authorized_nodes(float(), list(Node.t())) ::
list(Node.t())
def next_authorized_nodes(0.0, nodes), do: Enum.filter(nodes, & &1.authorized?)
def next_authorized_nodes(previous_tps, nodes)
when is_float(previous_tps) and previous_tps < 1.0,
do: nodes
def next_authorized_nodes(previous_tps, nodes)
when is_float(previous_tps) and previous_tps >= 1.0 and is_list(nodes) do
authorized_nodes = Enum.filter(nodes, & &1.authorized?)
case nb_of_authorized_nodes_to_add(previous_tps, nodes) do
0 ->
authorized_nodes
nb_nodes_to_add ->
nodes
|> Enum.filter(&(!&1.authorized?))
|> Enum.group_by(& &1.geo_patch)
|> Enum.reduce(authorized_nodes, fn {_, nodes}, acc ->
acc ++ Enum.take(nodes, nb_nodes_to_add)
end)
end
end
defp nb_of_authorized_nodes_to_add(previous_tps, nodes) do
nb_active_patches =
nodes
|> Enum.filter(& &1.authorized?)
|> Enum.map(&String.slice(&1.geo_patch, 0, 2))
|> Enum.uniq()
|> length()
tps_by_patch = previous_tps / nb_active_patches
nb_nodes_to_add = tps_by_patch / (100 * 0.6)
if nb_nodes_to_add < 1 do
0
else
ceil(nb_nodes_to_add)
end
end
@doc """
Return the initiator of the node shared secrets transaction
"""
@spec node_shared_secrets_initiator(address :: binary(), authorized_nodes :: list(Node.t())) ::
Node.t()
def node_shared_secrets_initiator(address, nodes, constraints \\ StorageConstraints.new())
when is_list(nodes) do
# Determine if the current node is in charge to the send the new transaction
[initiator = %Node{} | _] = storage_nodes(address, nodes, constraints)
initiator
end
@doc """
List all the I/O storage nodes from a ledger operations movements
"""
@spec io_storage_nodes(
movements_addresses :: list(binary()),
nodes :: list(Node.t()),
constraints :: StorageConstraints.t()
) :: list(Node.t())
def io_storage_nodes(
movements_addresses,
nodes,
storage_constraints \\ StorageConstraints.new()
) do
movements_addresses
|> Stream.map(&storage_nodes(&1, nodes, storage_constraints))
|> Stream.flat_map(& &1)
|> Enum.uniq_by(& &1.first_public_key)
end
@doc """
List all the beacon storage nodes based on transaction
"""
@spec beacon_storage_nodes(
subset :: binary(),
date :: DateTime.t(),
nodes :: list(Node.t()),
constraints :: StorageConstraints.t()
) :: list(Node.t())
def beacon_storage_nodes(
subset,
date = %DateTime{},
nodes,
storage_constraints = %StorageConstraints{} \\ StorageConstraints.new()
)
when is_binary(subset) and is_list(nodes) do
subset
|> BeaconChain.summary_transaction_address(date)
|> storage_nodes(nodes, storage_constraints)
end
end
| 36.243636 | 111 | 0.621952 |
1c1ff454cf1b28be2f6146d4c5cc1fa80e0b12ee | 9,121 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/interconnect_attachment.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/interconnect_attachment.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/interconnect_attachment.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.InterconnectAttachment do
@moduledoc """
Represents an InterconnectAttachment (VLAN attachment) resource. For more information, see Creating VLAN Attachments. (== resource_for beta.interconnectAttachments ==) (== resource_for v1.interconnectAttachments ==)
## Attributes
- adminEnabled (boolean()): Determines whether this Attachment will carry packets. Not present for PARTNER_PROVIDER. Defaults to: `null`.
- bandwidth (String.t): Provisioned bandwidth capacity for the interconnectAttachment. Can be set by the partner to update the customer's provisioned bandwidth. Output only for for PARTNER type, mutable for PARTNER_PROVIDER, not available for DEDICATED. Defaults to: `null`.
- Enum - one of [BPS_100M, BPS_10G, BPS_1G, BPS_200M, BPS_2G, BPS_300M, BPS_400M, BPS_500M, BPS_50M, BPS_5G]
- candidateSubnets ([String.t]): Up to 16 candidate prefixes that can be used to restrict the allocation of cloudRouterIpAddress and customerRouterIpAddress for this attachment. All prefixes must be within link-local address space (169.254.0.0/16) and must be /29 or shorter (/28, /27, etc). Google will attempt to select an unused /29 from the supplied candidate prefix(es). The request will fail if all possible /29s are in use on Google?s edge. If not supplied, Google will randomly select an unused /29 from all of link-local space. Defaults to: `null`.
- cloudRouterIpAddress (String.t): [Output Only] IPv4 address + prefix length to be configured on Cloud Router Interface for this interconnect attachment. Defaults to: `null`.
- creationTimestamp (String.t): [Output Only] Creation timestamp in RFC3339 text format. Defaults to: `null`.
- customerRouterIpAddress (String.t): [Output Only] IPv4 address + prefix length to be configured on the customer router subinterface for this interconnect attachment. Defaults to: `null`.
- description (String.t): An optional description of this resource. Defaults to: `null`.
- edgeAvailabilityDomain (String.t): Desired availability domain for the attachment. Only available for type PARTNER, at creation time. For improved reliability, customers should configure a pair of attachments with one per availability domain. The selected availability domain will be provided to the Partner via the pairing key so that the provisioned circuit will lie in the specified domain. If not specified, the value will default to AVAILABILITY_DOMAIN_ANY. Defaults to: `null`.
- Enum - one of [AVAILABILITY_DOMAIN_1, AVAILABILITY_DOMAIN_2, AVAILABILITY_DOMAIN_ANY]
- googleReferenceId (String.t): [Output Only] Google reference ID, to be used when raising support tickets with Google or otherwise to debug backend connectivity issues. Defaults to: `null`.
- id (String.t): [Output Only] The unique identifier for the resource. This identifier is defined by the server. Defaults to: `null`.
- interconnect (String.t): URL of the underlying Interconnect object that this attachment's traffic will traverse through. Defaults to: `null`.
- kind (String.t): [Output Only] Type of the resource. Always compute#interconnectAttachment for interconnect attachments. Defaults to: `null`.
- name (String.t): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. Defaults to: `null`.
- operationalStatus (String.t): [Output Only] The current status of whether or not this interconnect attachment is functional. Defaults to: `null`.
- Enum - one of [OS_ACTIVE, OS_UNPROVISIONED]
- pairingKey (String.t): [Output only for type PARTNER. Input only for PARTNER_PROVIDER. Not present for DEDICATED]. The opaque identifier of an PARTNER attachment used to initiate provisioning with a selected partner. Of the form \"XXXXX/region/domain\" Defaults to: `null`.
- partnerAsn (String.t): Optional BGP ASN for the router that should be supplied by a layer 3 Partner if they configured BGP on behalf of the customer. Output only for PARTNER type, input only for PARTNER_PROVIDER, not available for DEDICATED. Defaults to: `null`.
- partnerMetadata (InterconnectAttachmentPartnerMetadata): Informational metadata about Partner attachments from Partners to display to customers. Output only for for PARTNER type, mutable for PARTNER_PROVIDER, not available for DEDICATED. Defaults to: `null`.
- privateInterconnectInfo (InterconnectAttachmentPrivateInfo): [Output Only] Information specific to an InterconnectAttachment. This property is populated if the interconnect that this is attached to is of type DEDICATED. Defaults to: `null`.
- region (String.t): [Output Only] URL of the region where the regional interconnect attachment resides. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. Defaults to: `null`.
- router (String.t): URL of the cloud router to be used for dynamic routing. This router must be in the same region as this InterconnectAttachment. The InterconnectAttachment will automatically connect the Interconnect to the network & region within which the Cloud Router is configured. Defaults to: `null`.
- selfLink (String.t): [Output Only] Server-defined URL for the resource. Defaults to: `null`.
- state (String.t): [Output Only] The current state of this attachment's functionality. Defaults to: `null`.
- Enum - one of [ACTIVE, DEFUNCT, PARTNER_REQUEST_RECEIVED, PENDING_CUSTOMER, PENDING_PARTNER, STATE_UNSPECIFIED, UNPROVISIONED]
- type (String.t): Defaults to: `null`.
- Enum - one of [DEDICATED, PARTNER, PARTNER_PROVIDER]
- vlanTag8021q (integer()): Available only for DEDICATED and PARTNER_PROVIDER. Desired VLAN tag for this attachment, in the range 2-4094. This field refers to 802.1q VLAN tag, also known as IEEE 802.1Q Only specified at creation time. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:adminEnabled => any(),
:bandwidth => any(),
:candidateSubnets => list(any()),
:cloudRouterIpAddress => any(),
:creationTimestamp => any(),
:customerRouterIpAddress => any(),
:description => any(),
:edgeAvailabilityDomain => any(),
:googleReferenceId => any(),
:id => any(),
:interconnect => any(),
:kind => any(),
:name => any(),
:operationalStatus => any(),
:pairingKey => any(),
:partnerAsn => any(),
:partnerMetadata => GoogleApi.Compute.V1.Model.InterconnectAttachmentPartnerMetadata.t(),
:privateInterconnectInfo =>
GoogleApi.Compute.V1.Model.InterconnectAttachmentPrivateInfo.t(),
:region => any(),
:router => any(),
:selfLink => any(),
:state => any(),
:type => any(),
:vlanTag8021q => any()
}
field(:adminEnabled)
field(:bandwidth)
field(:candidateSubnets, type: :list)
field(:cloudRouterIpAddress)
field(:creationTimestamp)
field(:customerRouterIpAddress)
field(:description)
field(:edgeAvailabilityDomain)
field(:googleReferenceId)
field(:id)
field(:interconnect)
field(:kind)
field(:name)
field(:operationalStatus)
field(:pairingKey)
field(:partnerAsn)
field(:partnerMetadata, as: GoogleApi.Compute.V1.Model.InterconnectAttachmentPartnerMetadata)
field(
:privateInterconnectInfo,
as: GoogleApi.Compute.V1.Model.InterconnectAttachmentPrivateInfo
)
field(:region)
field(:router)
field(:selfLink)
field(:state)
field(:type)
field(:vlanTag8021q)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.InterconnectAttachment do
def decode(value, options) do
GoogleApi.Compute.V1.Model.InterconnectAttachment.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.InterconnectAttachment do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 71.257813 | 559 | 0.74323 |
1c1ff4654fb9ef57e9506a87c4483582465d6f18 | 7,029 | exs | Elixir | test/livebook_web/controllers/session_controller_test.exs | axelson/livebook | 78b9a11d8c3dac78c648cc7903343ea09f45efd9 | [
"Apache-2.0"
] | null | null | null | test/livebook_web/controllers/session_controller_test.exs | axelson/livebook | 78b9a11d8c3dac78c648cc7903343ea09f45efd9 | [
"Apache-2.0"
] | null | null | null | test/livebook_web/controllers/session_controller_test.exs | axelson/livebook | 78b9a11d8c3dac78c648cc7903343ea09f45efd9 | [
"Apache-2.0"
] | null | null | null | defmodule LivebookWeb.SessionControllerTest do
use LivebookWeb.ConnCase, async: true
alias Livebook.{Sessions, Session, Notebook, FileSystem}
describe "show_image" do
test "returns not found when the given session does not exist", %{conn: conn} do
id = Livebook.Utils.random_node_aware_id()
conn = get(conn, Routes.session_path(conn, :show_image, id, "image.jpg"))
assert conn.status == 404
assert conn.resp_body == "Not found"
end
test "returns not found when the given image does not exist", %{conn: conn} do
{:ok, session} = Sessions.create_session()
conn = get(conn, Routes.session_path(conn, :show_image, session.id, "nonexistent.jpg"))
assert conn.status == 404
assert conn.resp_body == "No such file or directory"
Session.close(session.pid)
end
test "returns the image when it does exist", %{conn: conn} do
{:ok, session} = Sessions.create_session()
%{images_dir: images_dir} = session
:ok = FileSystem.File.resolve(images_dir, "test.jpg") |> FileSystem.File.write("")
conn = get(conn, Routes.session_path(conn, :show_image, session.id, "test.jpg"))
assert conn.status == 200
assert get_resp_header(conn, "content-type") == ["image/jpeg"]
Session.close(session.pid)
end
end
describe "download_source" do
test "returns not found when the given session does not exist", %{conn: conn} do
id = Livebook.Utils.random_node_aware_id()
conn = get(conn, Routes.session_path(conn, :download_source, id, "livemd"))
assert conn.status == 404
assert conn.resp_body == "Not found"
end
test "returns bad request when given an invalid format", %{conn: conn} do
{:ok, session} = Sessions.create_session()
conn = get(conn, Routes.session_path(conn, :download_source, session.id, "invalid"))
assert conn.status == 400
assert conn.resp_body == "Invalid format, supported formats: livemd, exs"
end
test "handles live markdown notebook source", %{conn: conn} do
{:ok, session} = Sessions.create_session()
conn = get(conn, Routes.session_path(conn, :download_source, session.id, "livemd"))
assert conn.status == 200
assert get_resp_header(conn, "content-type") == ["text/plain"]
assert conn.resp_body == """
# Untitled notebook
## Section
```elixir
```
"""
Session.close(session.pid)
end
test "includes output in markdown when include_outputs parameter is set", %{conn: conn} do
notebook = %{
Notebook.new()
| name: "My Notebook",
sections: [
%{
Notebook.Section.new()
| name: "Section 1",
cells: [
%{
Notebook.Cell.new(:code)
| source: """
IO.puts("hey")\
""",
outputs: [{0, {:stdout, "hey"}}]
}
]
}
]
}
{:ok, session} = Sessions.create_session(notebook: notebook)
query = [include_outputs: "true"]
conn = get(conn, Routes.session_path(conn, :download_source, session.id, "livemd", query))
assert conn.status == 200
assert get_resp_header(conn, "content-type") == ["text/plain"]
assert conn.resp_body == """
# My Notebook
## Section 1
```elixir
IO.puts("hey")
```
<!-- livebook:{"output":true} -->
```
hey
```
"""
Session.close(session.pid)
end
test "handles elixir notebook source", %{conn: conn} do
{:ok, session} = Sessions.create_session()
conn = get(conn, Routes.session_path(conn, :download_source, session.id, "exs"))
assert conn.status == 200
assert get_resp_header(conn, "content-type") == ["text/plain"]
assert conn.resp_body == """
# Title: Untitled notebook
# ── Section ──
"""
Session.close(session.pid)
end
end
describe "show_asset" do
test "fetches assets and redirects to the session-less path", %{conn: conn} do
%{notebook: notebook, hash: hash} = notebook_with_js_output()
conn = start_session_and_request_asset(conn, notebook, hash)
assert redirected_to(conn, 301) ==
Routes.session_path(conn, :show_cached_asset, hash, ["main.js"])
end
test "skips the session if assets are in cache", %{conn: conn} do
%{notebook: notebook, hash: hash} = notebook_with_js_output()
# Fetch the assets for the first time
conn = start_session_and_request_asset(conn, notebook, hash)
# Use nonexistent session, so any communication would fail
random_session_id = Livebook.Utils.random_node_aware_id()
conn =
get(conn, Routes.session_path(conn, :show_asset, random_session_id, hash, ["main.js"]))
assert redirected_to(conn, 301) ==
Routes.session_path(conn, :show_cached_asset, hash, ["main.js"])
end
end
describe "show_cached_asset" do
test "returns not found when no matching assets are in the cache", %{conn: conn} do
%{notebook: _notebook, hash: hash} = notebook_with_js_output()
conn = get(conn, Routes.session_path(conn, :show_cached_asset, hash, ["main.js"]))
assert conn.status == 404
assert conn.resp_body == "Not found"
end
test "returns the requestes asset if available in cache", %{conn: conn} do
%{notebook: notebook, hash: hash} = notebook_with_js_output()
# Fetch the assets for the first time
conn = start_session_and_request_asset(conn, notebook, hash)
conn = get(conn, Routes.session_path(conn, :show_cached_asset, hash, ["main.js"]))
assert conn.status == 200
assert "export function init(" <> _ = conn.resp_body
end
end
defp start_session_and_request_asset(conn, notebook, hash) do
{:ok, session} = Sessions.create_session(notebook: notebook)
# We need runtime in place to actually copy the archive
{:ok, runtime} = Livebook.Runtime.Embedded.new() |> Livebook.Runtime.connect()
Session.set_runtime(session.pid, runtime)
conn = get(conn, Routes.session_path(conn, :show_asset, session.id, hash, ["main.js"]))
Session.close(session.pid)
conn
end
defp notebook_with_js_output() do
archive_path = Path.expand("../../support/assets.tar.gz", __DIR__)
hash = "test-" <> Livebook.Utils.random_id()
assets_info = %{archive_path: archive_path, hash: hash, js_path: "main.js"}
output = {:js, %{js_view: %{assets: assets_info}}}
notebook = %{
Notebook.new()
| sections: [
%{
Notebook.Section.new()
| cells: [%{Notebook.Cell.new(:code) | outputs: [{0, output}]}]
}
]
}
%{notebook: notebook, hash: hash}
end
end
| 31.24 | 96 | 0.605207 |
1c2004477ee60443908d17c73a9be0f6d0fb5f45 | 12,169 | ex | Elixir | apps/ex_wire/lib/ex_wire/adapter/tcp.ex | atoulme/ethereum | cebb0756c7292ac266236636d2ab5705cb40a52e | [
"MIT"
] | 14 | 2017-08-21T06:14:49.000Z | 2020-05-15T12:00:52.000Z | apps/ex_wire/lib/ex_wire/adapter/tcp.ex | atoulme/ethereum | cebb0756c7292ac266236636d2ab5705cb40a52e | [
"MIT"
] | 7 | 2017-08-11T07:50:14.000Z | 2018-08-23T20:42:50.000Z | apps/ex_wire/lib/ex_wire/adapter/tcp.ex | atoulme/ethereum | cebb0756c7292ac266236636d2ab5705cb40a52e | [
"MIT"
] | 3 | 2017-08-20T17:56:41.000Z | 2018-08-21T00:36:10.000Z | defmodule ExWire.Adapter.TCP do
@moduledoc """
Starts a TCP server to handle incoming and outgoing RLPx, DevP2P, Eth Wire connection.
Once this connection is up, it's possible to add a subscriber to the different packets
that are sent over the connection. This is the primary way of handling packets.
Note: incoming connections are not fully tested at this moment.
Note: we do not currently store token to restart connections (this upsets some peers)
"""
use GenServer
require Logger
alias ExWire.Framing.Frame
alias ExWire.Handshake
alias ExWire.Packet
@ping_interval 2_000
@doc """
Starts an outbound peer to peer connection.
"""
def start_link(:outbound, peer, subscribers \\ []) do
GenServer.start_link(__MODULE__, %{is_outbound: true, peer: peer, active: false, closed: false, subscribers: subscribers})
end
@doc """
Initialize by opening up a `gen_tcp` connection to given host and port.
We'll also prepare and send out an authentication message immediately after connecting.
"""
def init(state=%{is_outbound: true, peer: peer}) do
timer_ref = :erlang.start_timer(@ping_interval, self(), :pinger)
{:ok, socket} = :gen_tcp.connect(peer.host |> String.to_charlist, peer.port, [:binary])
Logger.debug("[Network] [#{peer}] Established outbound connection with #{peer.host}, sending auth.")
{my_auth_msg, my_ephemeral_key_pair, my_nonce} = ExWire.Handshake.build_auth_msg(
ExWire.Config.public_key(),
ExWire.Config.private_key(),
peer.remote_id
)
{:ok, encoded_auth_msg} = my_auth_msg
|> ExWire.Handshake.Struct.AuthMsgV4.serialize()
|> ExWire.Handshake.EIP8.wrap_eip_8(peer.remote_id, peer.host, my_ephemeral_key_pair)
# Send auth message
GenServer.cast(self(), {:send, %{data: encoded_auth_msg}})
{:ok, Map.merge(state, %{
socket: socket,
auth_data: encoded_auth_msg,
my_ephemeral_key_pair: my_ephemeral_key_pair,
my_nonce: my_nonce,
timer_ref: timer_ref})}
end
@doc """
Allows a client to subscribe to incoming packets. Subscribers must be in the form
of `{module, function, args}`, in which case we'll call `module.function(packet, ...args)`,
or `{:server, server_pid}` for a GenServer, in which case we'll send a message
`{:packet, packet, peer}`.
"""
def handle_call({:subscribe, {_module, _function, _args}=mfa}, _from, state) do
updated_state = Map.update(state, :subscribers, [mfa], fn subscribers -> [mfa | subscribers] end)
{:reply, :ok, updated_state}
end
def handle_call({:subscribe, {:server, server}=server}, _from, state) do
updated_state = Map.update(state, :subscribers, [server], fn subscribers -> [server | subscribers] end)
{:reply, :ok, updated_state}
end
@doc """
Handle info will handle when we have inbound communucation from a peer node.
If we haven't yet completed our handshake, we'll await an auth or ack message
as appropriate. That is, if we've established the connection and have sent an
auth message, then we'll look for an ack. If we listened for a connection, we'll
await an auth message.
TODO: clients may send an auth before (or as) we do, and we should handle this case without error.
"""
def handle_info(_info={:tcp, socket, data}, state=%{is_outbound: true, peer: peer, auth_data: auth_data, my_ephemeral_key_pair: {_my_ephemeral_public_key, my_ephemeral_private_key}=_my_ephemeral_key_pair, my_nonce: my_nonce}) do
case Handshake.try_handle_ack(data, auth_data, my_ephemeral_private_key, my_nonce, peer.host) do
{:ok, secrets, frame_rest} ->
Logger.debug("[Network] [#{peer}] Got ack from #{peer.host}, deriving secrets and sending HELLO")
send_hello(self())
updated_state = Map.merge(state, %{
secrets: secrets,
auth_data: nil,
my_ephemeral_key_pair: nil,
my_nonce: nil
})
if byte_size(frame_rest) == 0 do
{:noreply, updated_state}
else
handle_info({:tcp, socket, frame_rest}, updated_state)
end
{:invalid, reason} ->
Logger.warn("[Network] [#{peer}] Failed to get handshake message when expecting ack - #{reason}")
{:noreply, state}
end
end
# TODO: How do we set remote id?
def handle_info({:tcp, _socket, data}, state=%{is_outbound: false, peer: peer, my_ephemeral_key_pair: my_ephemeral_key_pair, my_nonce: my_nonce}) do
case Handshake.try_handle_auth(data, my_ephemeral_key_pair, my_nonce, peer.remote_id, peer.host) do
{:ok, ack_data, secrets} ->
Logger.debug("[Network] [#{peer}] Received auth from #{peer.host}")
# Send ack back to sender
GenServer.cast(self(), {:send, %{data: ack_data}})
send_hello(self())
# But we're set on our secrets
{:noreply, Map.merge(state, %{
secrets: secrets,
auth_data: nil,
my_ephemeral_key_pair: nil,
my_nonce: nil
})}
{:invalid, reason} ->
Logger.warn("[Network] [#{peer}] Received unknown handshake message when expecting auth - #{reason}")
{:noreply, state}
end
end
def handle_info(_info={:tcp, socket, data}, state=%{peer: peer, secrets: secrets}) do
total_data = Map.get(state, :queued_data, <<>>) <> data
case Frame.unframe(total_data, secrets) do
{:ok, packet_type, packet_data, frame_rest, updated_secrets} ->
# TODO: Ignore non-HELLO messages unless state is active.
# TODO: Maybe move into smaller functions for testing
{packet, handle_result} = case Packet.get_packet_mod(packet_type) do
{:ok, packet_mod} ->
Logger.debug("[Network] [#{peer}] Got packet #{Atom.to_string(packet_mod)} from #{peer.host}")
packet = packet_data
|> packet_mod.deserialize()
{packet, packet_mod.handle(packet)}
:unknown_packet_type ->
Logger.warn("[Network] [#{peer}] Received unknown or unhandled packet type `#{packet_type}` from #{peer.host}")
{nil, :ok}
end
# Updates our given state and does any actions necessary
handled_state = case handle_result do
:ok -> state
:activate -> Map.merge(state, %{active: true})
:peer_disconnect ->
# Doesn't matter if this succeeds or not
:gen_tcp.shutdown(socket, :read_write)
Map.merge(state, %{active: false})
{:disconnect, reason} ->
Logger.warn("[Network] [#{peer}] Disconnecting to peer due to: #{Packet.Disconnect.get_reason_msg(reason)}")
# TODO: Add a timeout and disconnect ourselves
send_packet(self(), Packet.Disconnect.new(reason))
state
{:send, packet} ->
send_packet(self(), packet)
state
end
# Let's inform any subscribers
if not is_nil(packet) do
for subscriber <- Map.get(state, :subscribers, []) do
case subscriber do
{module, function, args} -> apply(module, function, [packet | args])
{:server, server} -> send(server, {:packet, packet, peer})
end
end
end
updated_state = Map.merge(handled_state, %{secrets: updated_secrets, queued_data: <<>>})
# If we have more packet data, we need to continue processing.
if byte_size(frame_rest) == 0 do
{:noreply, updated_state}
else
handle_info({:tcp, socket, frame_rest}, updated_state)
end
{:error, "Insufficent data"} ->
{:noreply, Map.put(state, :queued_data, total_data)}
{:error, reason} ->
Logger.error("[Network] [#{peer}] Failed to read incoming packet from #{peer.host} `#{reason}`)")
{:noreply, state}
end
end
def handle_info({:tcp_closed, _socket}, state=%{peer: peer}) do
Logger.warn("[Network] [#{peer}] Peer closed connection.")
{:noreply, state
|> Map.put(:active, false)
|> Map.put(:closed, true)}
end
def handle_info({:timeout, _ref, :pinger}, state=%{socket: _socket, peer: peer, active: true}) do
Logger.warn("[Network] [#{peer}] Sending peer ping.")
send_status(self())
timer_ref = :erlang.start_timer(@ping_interval, self(), :pinger)
{:noreply, Map.put(state, :timer_ref, timer_ref)}
end
def handle_info({:timeout, _ref, :pinger}, state=%{socket: _socket, peer: peer, active: false}) do
Logger.warn("[Network] [#{peer}] Not sending peer ping.")
{:noreply, state}
end
@doc """
If we receive a `send` before secrets are set, we'll send the data directly over the wire.
"""
def handle_cast({:send, %{data: data}}, state = %{socket: socket, peer: peer}) do
Logger.debug("[Network] [#{peer}] Sending raw data message of length #{byte_size(data)} byte(s) to #{peer.host}")
:ok = :gen_tcp.send(socket, data)
{:noreply, state}
end
@doc """
If we receive a `send` and we have secrets set, we'll send the message as a framed Eth packet.
However, if we haven't yet sent a Hello message, we should queue the message and try again later. Most
servers will disconnect if we send a non-Hello message as our first message.
"""
def handle_cast({:send, %{packet: {packet_mod, _packet_type, _packet_data}}}=_data, state = %{peer: peer, closed: true}) do
Logger.info("[Network] [#{peer}] Dropping packet #{Atom.to_string(packet_mod)} for #{peer.host} due to closed connection.")
{:noreply, state}
end
def handle_cast({:send, %{packet: {packet_mod, _packet_type, _packet_data}}}=data, state = %{peer: peer, active: false}) when packet_mod != ExWire.Packet.Hello do
Logger.info("[Network] [#{peer}] Queueing packet #{Atom.to_string(packet_mod)} to #{peer.host}")
# TODO: Should we monitor this process, etc?
pid = self()
spawn fn ->
:timer.sleep(500)
GenServer.cast(pid, data)
end
{:noreply, state}
end
def handle_cast({:send, %{packet: {packet_mod, packet_type, packet_data}}}, state = %{socket: socket, secrets: secrets, peer: peer}) do
Logger.info("[Network] [#{peer}] Sending packet #{Atom.to_string(packet_mod)} to #{peer.host}")
{frame, updated_secrets} = Frame.frame(packet_type, packet_data, secrets)
:ok = :gen_tcp.send(socket, frame)
{:noreply, Map.merge(state, %{secrets: updated_secrets})}
end
@doc """
Client function for sending a packet over to a peer.
"""
@spec send_packet(pid(), struct()) :: :ok
def send_packet(pid, packet) do
{:ok, packet_type} = Packet.get_packet_type(packet)
{:ok, packet_mod} = Packet.get_packet_mod(packet_type)
packet_data = packet_mod.serialize(packet)
GenServer.cast(pid, {:send, %{packet: {packet_mod, packet_type, packet_data}}})
:ok
end
@doc """
Client function to send HELLO message after connecting.
"""
def send_hello(pid) do
send_packet(pid, %Packet.Hello{
p2p_version: ExWire.Config.p2p_version(),
client_id: ExWire.Config.client_id(),
caps: ExWire.Config.caps(),
listen_port: ExWire.Config.listen_port(),
node_id: ExWire.Config.node_id()
})
end
@doc """
Client function to send PING message.
"""
def send_ping(pid) do
send_packet(pid, %Packet.Ping{})
end
@doc """
Client function to send STATUS message.
"""
def send_status(pid) do
send_packet(pid, %Packet.Status{
protocol_version: ExWire.Config.protocol_version(),
network_id: ExWire.Config.network_id(),
total_difficulty: ExWire.Config.chain.genesis.difficulty,
best_hash: ExWire.Config.chain.genesis.parent_hash,
genesis_hash: <<>>
}) |> Exth.inspect("status")
end
@doc """
Client function to subscribe to incoming packets.
A subscription should be in the form of `{:server, server_pid}`, and we will
send a packet to that server with contents `{:packet, packet, peer}` for
each received packet.
"""
@spec subscribe(pid(), {module(), atom(), list()} | {:server, pid()}) :: :ok
def subscribe(pid, subscription) do
:ok = GenServer.call(pid, {:subscribe, subscription})
end
end | 35.686217 | 230 | 0.652971 |
1c201cb1d082abc1d1e7c727b30638cda933e167 | 2,217 | ex | Elixir | clients/app_engine/lib/google_api/app_engine/v1/model/api_config_handler.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/app_engine/lib/google_api/app_engine/v1/model/api_config_handler.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/api_config_handler.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.AppEngine.V1.Model.ApiConfigHandler do
@moduledoc """
Google Cloud Endpoints (https://cloud.google.com/appengine/docs/python/endpoints/) configuration for API handlers.
## Attributes
* `authFailAction` (*type:* `String.t`, *default:* `nil`) - Action to take when users access resources that require authentication. Defaults to redirect.
* `login` (*type:* `String.t`, *default:* `nil`) - Level of login required to access this resource. Defaults to optional.
* `script` (*type:* `String.t`, *default:* `nil`) - Path to the script from the application root directory.
* `securityLevel` (*type:* `String.t`, *default:* `nil`) - Security (HTTPS) enforcement for this URL.
* `url` (*type:* `String.t`, *default:* `nil`) - URL to serve the endpoint at.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:authFailAction => String.t(),
:login => String.t(),
:script => String.t(),
:securityLevel => String.t(),
:url => String.t()
}
field(:authFailAction)
field(:login)
field(:script)
field(:securityLevel)
field(:url)
end
defimpl Poison.Decoder, for: GoogleApi.AppEngine.V1.Model.ApiConfigHandler do
def decode(value, options) do
GoogleApi.AppEngine.V1.Model.ApiConfigHandler.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AppEngine.V1.Model.ApiConfigHandler do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.576271 | 157 | 0.704105 |
1c2026f2c1bfe3377b44d5b1abf17d6b1edab19f | 798 | ex | Elixir | web/models/mention.ex | smpallen99/ucx_chat | 0dd98d0eb5e0537521844520ea2ba63a08fd3f19 | [
"MIT"
] | 60 | 2017-05-09T19:08:26.000Z | 2021-01-20T11:09:42.000Z | web/models/mention.ex | smpallen99/ucx_chat | 0dd98d0eb5e0537521844520ea2ba63a08fd3f19 | [
"MIT"
] | 6 | 2017-05-10T15:43:16.000Z | 2020-07-15T07:14:41.000Z | web/models/mention.ex | smpallen99/ucx_chat | 0dd98d0eb5e0537521844520ea2ba63a08fd3f19 | [
"MIT"
] | 10 | 2017-05-10T04:13:54.000Z | 2020-12-28T10:30:27.000Z | defmodule UcxChat.Mention do
use UcxChat.Web, :model
@mod __MODULE__
schema "mentions" do
field :unread, :boolean, default: true
field :all, :boolean, default: false
field :name, :string
belongs_to :user, UcxChat.User
belongs_to :message, UcxChat.Message
belongs_to :channel, UcxChat.Channel
timestamps(type: :utc_datetime)
end
@fields ~w(user_id message_id channel_id)a
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, @fields ++ [:unread, :all, :name])
|> validate_required(@fields)
end
def count(channel_id, user_id) do
from m in @mod,
where: m.user_id == ^user_id and m.channel_id == ^channel_id,
select: count(m.id)
end
end
| 22.8 | 67 | 0.659148 |
1c202e0a82f1a935cf6627a8f43ba4ede2b7f473 | 5,895 | exs | Elixir | test/assoc_test.exs | oivoodoo/ex_audit | 3ef4bdfefb7135a023871a7bd3744825ed818f4e | [
"MIT"
] | null | null | null | test/assoc_test.exs | oivoodoo/ex_audit | 3ef4bdfefb7135a023871a7bd3744825ed818f4e | [
"MIT"
] | null | null | null | test/assoc_test.exs | oivoodoo/ex_audit | 3ef4bdfefb7135a023871a7bd3744825ed818f4e | [
"MIT"
] | null | null | null | defmodule AssocTest do
use ExUnit.Case
import Ecto.Query
alias ExAudit.Test.{Repo, Version, BlogPost, Comment, Util, User, UserGroup}
alias ExAudit.Test.BlogPost.Section
require Logger
test "comment lifecycle tracked" do
user = Util.create_user()
ExAudit.track(actor_id: user.id)
params = %{
title: "Controversial post",
author_id: user.id,
comments: [
%{
body: "lorem impusdrfnia",
author_id: user.id
}
]
}
changeset = BlogPost.changeset(%BlogPost{}, params)
{:ok, %{comments: [comment]}} = Repo.insert(changeset)
[%{actor_id: actor_id}] = comment_history = Repo.history(comment)
assert length(comment_history) == 1
assert actor_id == user.id
end
test "structs configured as primitives are treated as primitives" do
{:ok, old_date} = Date.new(2000, 1, 1)
params = %{name: "Bob", email: "foo@bar.com", birthday: old_date}
changeset = User.changeset(%User{}, params)
{:ok, user} = Repo.insert(changeset)
new_date = Date.add(old_date, 17)
params = %{birthday: new_date}
changeset = User.changeset(user, params)
{:ok, user} = Repo.update(changeset)
[version | _] = Repo.history(user)
assert %{
patch: %{
birthday: {:changed, {:primitive_change, ^old_date, ^new_date}}
}
} = version
end
test "should track cascading deletions (before they happen)" do
user = Util.create_user()
ExAudit.track(actor_id: user.id)
params = %{
title: "Controversial post",
author_id: user.id,
comments: [
%{
body: "lorem impusdrfnia",
author_id: user.id
},
%{
body: "That's a nice article",
author_id: user.id
},
%{
body: "We want more of this CONTENT",
author_id: user.id
}
]
}
changeset = BlogPost.changeset(%BlogPost{}, params)
{:ok, %{comments: comments} = blog_post} = Repo.insert(changeset)
Repo.delete(blog_post)
comment_ids = Enum.map(comments, & &1.id)
versions =
Repo.all(
from(v in Version,
where: v.entity_id in ^comment_ids,
where: v.entity_schema == ^Comment
)
)
# 3 created, 3 deleted
assert length(versions) == 6
end
test "should track properly embded assoc" do
params = %{title: "Welcome!"}
changeset = BlogPost.changeset(%BlogPost{}, params)
{:ok, blog_post} = Repo.insert(changeset)
assert Repo.history(blog_post) |> Enum.count() == 1
changeset =
BlogPost.changeset(blog_post, %{
sections: [
%{title: "title 1", text: "text 1"}
]
})
{:ok, blog_post} = Repo.update(changeset)
assert Repo.history(blog_post) |> Enum.count() == 2
changeset =
BlogPost.changeset(blog_post, %{
title: "title 2"
})
{:ok, blog_post} = Repo.update(changeset)
assert Repo.history(blog_post) |> Enum.count() == 3
[version1, version2, version3] = Repo.history(blog_post)
assert Repo.get!(BlogPost, blog_post.id).title == "title 2"
Repo.revert(version1)
assert Repo.history(blog_post) |> Enum.count() == 4
assert Repo.get!(BlogPost, blog_post.id).title == "Welcome!"
changeset =
BlogPost.changeset(blog_post, %{
sections: []
})
{:ok, blog_post} = Repo.update(changeset)
assert Repo.history(blog_post) |> Enum.count() == 5
assert Repo.get!(BlogPost, blog_post.id).sections == []
[version1, version2, version3, version4, version5] = Repo.history(blog_post)
Repo.revert(version2)
assert Repo.history(blog_post) |> Enum.count() == 6
[section1] = Repo.get!(BlogPost, blog_post.id).sections
assert section1.title == "title 1"
assert section1.text == "text 1"
Logger.info(inspect(Enum.map(Repo.history(blog_post), fn v -> v.action end)))
end
test "should track properly embded assoc for only embeded assoc changes" do
params = %{title: "Welcome!"}
changeset = BlogPost.changeset(%BlogPost{}, params)
{:ok, blog_post} = Repo.insert(changeset)
assert Repo.history(blog_post) |> Enum.count() == 1
changeset =
BlogPost.changeset(blog_post, %{
sections: [
%{title: "title 1", text: "text 1"}
]
})
{:ok, blog_post} = Repo.update(changeset)
assert Repo.history(blog_post) |> Enum.count() == 2
[version1, version2] = Repo.history(blog_post)
Repo.revert(version1)
assert Repo.history(blog_post) |> Enum.count() == 3
assert [] = Repo.get!(BlogPost, blog_post.id).sections
end
test "should track properly embded assoc for only embeded assoc changes with sections" do
params = %{
title: "Welcome!",
sections: [
%{title: "title 1", text: "text 1"}
]
}
changeset = BlogPost.changeset(%BlogPost{}, params)
{:ok, blog_post} = Repo.insert(changeset)
assert Repo.history(blog_post) |> Enum.count() == 1
changeset =
BlogPost.changeset(blog_post, %{
sections: []
})
{:ok, blog_post} = Repo.update(changeset)
assert Repo.history(blog_post) |> Enum.count() == 2
[version1, version2] = Repo.history(blog_post)
Repo.revert(version1)
assert Repo.history(blog_post) |> Enum.count() == 3
assert [section1] = Repo.get!(BlogPost, blog_post.id).sections
assert section1.title == "title 1"
assert section1.text == "text 1"
end
test "should return changesets from constraint errors" do
user = Util.create_user()
ch = UserGroup.changeset(%UserGroup{}, %{name: "a group", user_id: user.id})
{:ok, _group} = Repo.insert(ch)
import Ecto.Changeset
deletion =
user
|> change
|> no_assoc_constraint(:groups)
assert {:error, %Ecto.Changeset{}} = Repo.delete(deletion)
end
end
| 27.676056 | 91 | 0.610517 |
1c203d9e9d97a7ea8d7cafdc5c76413f6df619ae | 1,661 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3_import_test_cases_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3_import_test_cases_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3_import_test_cases_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3ImportTestCasesResponse do
@moduledoc """
The response message for TestCases.ImportTestCases.
## Attributes
* `names` (*type:* `list(String.t)`, *default:* `nil`) - The unique identifiers of the new test cases. Format: `projects//locations//agents//testCases/`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:names => list(String.t()) | nil
}
field(:names, type: :list)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3ImportTestCasesResponse do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3ImportTestCasesResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3ImportTestCasesResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.942308 | 157 | 0.74714 |
1c20476c5efb1bf6e92a400238b87f2db5b5f21f | 3,141 | exs | Elixir | test/recurring_events_test.exs | pbogut/recurring_events | c341f345410092117f25eb99696aff52dde88a69 | [
"MIT"
] | 21 | 2017-01-11T00:02:46.000Z | 2020-12-28T15:59:04.000Z | test/recurring_events_test.exs | pbogut/recurring_events | c341f345410092117f25eb99696aff52dde88a69 | [
"MIT"
] | 20 | 2017-04-11T20:39:55.000Z | 2021-09-18T09:20:44.000Z | test/recurring_events_test.exs | pbogut/recurring_events | c341f345410092117f25eb99696aff52dde88a69 | [
"MIT"
] | 9 | 2017-04-11T19:31:20.000Z | 2020-10-14T18:36:27.000Z | defmodule RecurringEventsTest do
use ExUnit.Case
doctest RecurringEvents
alias RecurringEvents, as: RR
@date ~D[2017-01-01]
@valid_rrule %{freq: :yearly}
test "freq is required" do
assert_raise ArgumentError, "Frequency is required", fn ->
RR.unfold(@date, %{})
end
end
test "will rise error if frequency is invalid" do
assert_raise ArgumentError, "Frequency is invalid", fn ->
RR.unfold(@date, %{freq: :whatever})
end
end
test "can have eathier until or count" do
assert_raise ArgumentError, "Can have either, count or until", fn ->
RR.unfold(@date, Map.merge(@valid_rrule, %{count: 1, until: 2}))
end
end
test "when time ruls provided date have to have time data" do
assert_raise ArgumentError, "To use time rules you have to provide date with time", fn ->
RR.unfold(@date, %{freq: :minutely})
end
assert_raise ArgumentError, "To use time rules you have to provide date with time", fn ->
RR.unfold(@date, %{freq: :yearly, by_hour: 12})
end
end
test "if no date provided it should raise an error" do
assert_raise ArgumentError, "You have to use date or datetime structure", fn ->
RR.unfold(%{}, @valid_rrule)
end
end
test "will raise an exception if count is invalid" do
assert_raise ArgumentError, fn ->
RR.take(@date, @valid_rrule, -1)
end
assert_raise ArgumentError, fn ->
RR.unfold(@date, %{freq: :yearly, count: -1})
end
end
test "can handle yearly frequency" do
events =
@date
|> RR.unfold(%{freq: :yearly})
|> Enum.take(3)
assert 3 = Enum.count(events)
end
test "can handle monthly frequency" do
events =
@date
|> RR.unfold(%{freq: :monthly})
|> Enum.take(36)
assert 36 = Enum.count(events)
end
test "can handle daily frequency" do
events =
@date
|> RR.unfold(%{freq: :daily})
|> Enum.take(90)
assert 90 = Enum.count(events)
end
test "can handle weekly frequency" do
events =
@date
|> RR.unfold(%{freq: :weekly})
|> Enum.take(13)
assert 13 = Enum.count(events)
end
test "can return list instead of stream" do
stream = RR.unfold(@date, %{freq: :weekly})
list = RR.take(@date, %{freq: :weekly}, 29)
assert is_list(list)
assert list == Enum.take(stream, 29)
end
test "can optionally return invalid dates" do
date = ~D[2020-10-31]
events =
date
|> RR.unfold(%{freq: :monthly, invalid: :return})
|> Enum.take(2)
assert events == [date, %{date | month: 11, day: 31}]
end
test "can optionally skip invalid dates" do
date = ~D[2020-10-31]
events =
date
|> RR.unfold(%{freq: :monthly, invalid: :skip})
|> Enum.take(2)
assert events == [date, %{date | month: 12, day: 31}]
end
test "can optionally skip invalid dates and respect count" do
date = ~D[2020-10-31]
events =
date
|> RR.unfold(%{freq: :monthly, invalid: :skip, count: 3})
|> Enum.take(99)
assert events == [date, %{date | month: 12, day: 31}]
end
end
| 24.161538 | 93 | 0.611589 |
1c2055cb5d520ad3039f1e4d71f0b74b1c43372a | 4,731 | ex | Elixir | wasmcloud_host/lib/wasmcloud_host_web/live/components/define_link_component.ex | sleipnir/wasmcloud-otp | aef10f2f07257e65c527be6030006aaed2b85ff9 | [
"Apache-2.0"
] | null | null | null | wasmcloud_host/lib/wasmcloud_host_web/live/components/define_link_component.ex | sleipnir/wasmcloud-otp | aef10f2f07257e65c527be6030006aaed2b85ff9 | [
"Apache-2.0"
] | null | null | null | wasmcloud_host/lib/wasmcloud_host_web/live/components/define_link_component.ex | sleipnir/wasmcloud-otp | aef10f2f07257e65c527be6030006aaed2b85ff9 | [
"Apache-2.0"
] | null | null | null | defmodule DefineLinkComponent do
use Phoenix.LiveComponent
def mount(socket) do
{:ok, socket |> assign(:error_msg, nil)}
end
def handle_event("validate", _params, socket) do
# TODO determine feasability of modifying form upon provider select, instead of with custom JS
{:noreply, socket}
end
def handle_event(
"define_link",
linkdef,
socket
) do
actor_id = Map.get(linkdef, "actor_id")
provider_id = Map.get(linkdef, "provider_id")
contract_id = Map.get(linkdef, "contract_id")
link_name = Map.get(linkdef, "link_name")
values = Map.get(linkdef, "values")
values_map =
case values do
"" ->
Map.new()
nil ->
Map.new()
value_list ->
value_list
|> String.split(",")
|> Enum.flat_map(fn s -> String.split(s, "=") end)
|> Enum.chunk_every(2)
|> Enum.map(fn [a, b] -> {a, b} end)
|> Map.new()
end
error_msg =
cond do
actor_id == nil ->
"Please select an Actor ID to link"
provider_id == nil ->
"Please select a Provider ID to link"
true ->
case HostCore.Linkdefs.Manager.put_link_definition(
actor_id,
contract_id,
link_name,
provider_id,
values_map
) do
:ok -> nil
_any -> "Error publishing link definition"
end
end
if error_msg == nil do
Phoenix.PubSub.broadcast(WasmcloudHost.PubSub, "frontend", :hide_modal)
end
{:noreply, assign(socket, error_msg: error_msg)}
end
def render(assigns) do
~L"""
<form class="form-horizontal" phx-submit="define_link" phx-change="validate" phx-target="<%= @myself %>">
<input name="_csrf_token" type="hidden" value="<%= Phoenix.Controller.get_csrf_token() %>">
<div class="form-group row">
<label class="col-md-3 col-form-label" for="text-input">Actor</label>
<div class="col-md-9">
<select class="form-control select2-single" id="select2-1" name="actor_id">
<option hidden disabled selected value> -- select an actor -- </option>
<%= for {k, v} <- @claims do %>
<%= if String.starts_with?(k, "M") && String.length(k) == 56 do %>
<option value="<%= k %>"><%= v.name %> (<%= String.slice(k, 0..4) %>...) </option>
<% end %>
<% end %>
</select>
</div>
</div>
<div class="form-group row">
<label class="col-md-3 col-form-label" for="text-input">Provider</label>
<div class="col-md-9">
<%# On select, populate the linkname and contract_id options with the matching data %>
<select class="form-control select2-single" id="linkdefs-providerid-select" name="provider_id">
<option hidden disabled selected value> -- select a provider -- </option>
<%= for {k, v} <- @claims do %>
<%= if String.starts_with?(k, "V") && String.length(k) == 56 do %>
<option value="<%= k %>"><%= v.name %> (<%= String.slice(k, 0..4) %>...) </option>
<% end %>
<% end %>
</select>
</div>
</div>
<div class="form-group row">
<label class="col-md-3 col-form-label" for="linkdef-linkname-input">Link Name</label>
<div class="col-md-9">
<input class="form-control" id="linkdef-linkname-input" type="text" name="link_name" placeholder="default"
value="default" required>
</div>
</div>
<div class="form-group row">
<label class="col-md-3 col-form-label" for="linkdef-contractid-input">Contract ID</label>
<div class="col-md-9">
<input class="form-control" id="linkdef-contractid-input" type="text" name="contract_id"
placeholder="wasmcloud:contract" required>
</div>
</div>
<div class="form-group row">
<label class="col-md-3 col-form-label" for="text-input">Values</label>
<div class="col-md-9">
<input class="form-control" id="text-input" type="text" name="values" placeholder="KEY1=VAL1,KEY2=VAL2">
<span class="help-block">Comma separated list of configuration values</span>
</div>
</div>
<div class="modal-footer">
<button class="btn btn-secondary" type="button" phx-click="hide_modal">Close</button>
<button class="btn btn-primary" type="submit">Submit</button>
</div>
</form>
<%= if @error_msg != nil do %>
<div class="alert alert-danger">
<%= @error_msg %>
</div>
<% end %>
"""
end
end
| 35.30597 | 116 | 0.553371 |
1c206cb2d66da4310ec1faeede8949719e2b932e | 562 | exs | Elixir | test/weather_extractor_test.exs | anthonyoconnor/some_weather | edd3c07115b4f76c0531dd9f4586bd8a13fb369f | [
"MIT"
] | 1 | 2016-05-25T04:03:34.000Z | 2016-05-25T04:03:34.000Z | test/weather_extractor_test.exs | anthonyoconnor/some_weather | edd3c07115b4f76c0531dd9f4586bd8a13fb369f | [
"MIT"
] | null | null | null | test/weather_extractor_test.exs | anthonyoconnor/some_weather | edd3c07115b4f76c0531dd9f4586bd8a13fb369f | [
"MIT"
] | null | null | null | defmodule WeatherExtractorTest do
use ExUnit.Case
import SomeWeather.WeatherExtractor
test "xml is extracted correctly" do
sample_xml =
"""
<current_observation>
<location>Somewhere</location>
<station_id>ABC</station_id>
<temp_c>40.0</temp_c>
<weather>warm</weather>
</current_observation>
"""
output = extract(sample_xml)
assert output["location"] == 'Somewhere'
assert output["station id"] == 'ABC'
assert output["temp degrees C"] == '40.0'
assert output["weather"] == 'warm'
end
end
| 23.416667 | 45 | 0.653025 |
1c208f42cf5c52c561d9d18775853902c585fece | 1,122 | exs | Elixir | mix.exs | blueangel0723/arc | cd93d7a22f05e626768fdac67123633eb8f694f2 | [
"Apache-2.0"
] | null | null | null | mix.exs | blueangel0723/arc | cd93d7a22f05e626768fdac67123633eb8f694f2 | [
"Apache-2.0"
] | null | null | null | mix.exs | blueangel0723/arc | cd93d7a22f05e626768fdac67123633eb8f694f2 | [
"Apache-2.0"
] | null | null | null | defmodule Arc.Mixfile do
use Mix.Project
@version "0.8.0"
def project do
[app: :arc,
version: @version,
elixir: "~> 1.4",
deps: deps(),
# Hex
description: description(),
package: package()]
end
defp description do
"""
Flexible file upload and attachment library for Elixir.
"""
end
defp package do
[maintainers: ["Sean Stavropoulos"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/stavro/arc"},
files: ~w(mix.exs README.md CHANGELOG.md lib)]
end
def application do
[
applications: [
:logger,
:httpoison,
] ++ applications(Mix.env)
]
end
def applications(:test), do: [:ex_aws, :poison]
def applications(_), do: []
defp deps do
[
{:httpoison, "~> 0.11"}, # Required for downloading remote files
{:ex_aws, "~> 1.1", optional: true},
{:mock, "~> 0.1", only: :test},
{:ex_doc, "~> 0.14", only: :dev},
# If using Amazon S3:
{:poison, "~> 2.2 or ~> 3.1", optional: true},
{:sweet_xml, "~> 0.6", optional: true}
]
end
end
| 20.4 | 70 | 0.54902 |
1c20bad3c75cbc550a454f105edd4ddc5690f595 | 538 | exs | Elixir | test/test_helper.exs | sgilson/mine | aa07682525ff72545dc2df91a68c23c1d5c937ef | [
"MIT"
] | 1 | 2019-11-15T22:59:57.000Z | 2019-11-15T22:59:57.000Z | test/test_helper.exs | sgilson/mine | aa07682525ff72545dc2df91a68c23c1d5c937ef | [
"MIT"
] | null | null | null | test/test_helper.exs | sgilson/mine | aa07682525ff72545dc2df91a68c23c1d5c937ef | [
"MIT"
] | null | null | null | ExUnit.start()
defmodule CompilerAssertions do
# with some help from: https://gist.github.com/henrik/1054546364ac68da4102
defmacro assert_compiler_raise(expected_message, error_type \\ Mine.View.CompileError, quoted) do
quote do
assert_raise(unquote(error_type), unquote(expected_message), fn ->
unquote(Macro.escape(quoted))
|> Code.eval_quoted()
end)
end
end
defmacro assert_compiles(quoted) do
quote do
unquote(Macro.escape(quoted))
|> Code.eval_quoted()
end
end
end
| 25.619048 | 99 | 0.702602 |
1c20c91fee577dabcbe416609b11e1a21280d140 | 420 | ex | Elixir | examples/elixir/lib/todolist/permissions.ex | ironbay/riptide-next | ff9cc722cfe84846ff8bb71e1c7fbee442c9162b | [
"MIT"
] | 11 | 2020-01-20T16:42:00.000Z | 2021-09-18T11:36:45.000Z | examples/elixir/lib/todolist/permissions.ex | ironbay/riptide-next | ff9cc722cfe84846ff8bb71e1c7fbee442c9162b | [
"MIT"
] | 16 | 2020-04-05T19:26:14.000Z | 2020-05-21T14:50:59.000Z | examples/elixir/lib/todolist/permissions.ex | ironbay/riptide | ff9cc722cfe84846ff8bb71e1c7fbee442c9162b | [
"MIT"
] | 2 | 2020-10-16T11:01:57.000Z | 2021-01-20T21:11:52.000Z | defmodule Todolist.Permissions do
use Riptide.Interceptor
def mutation_before([], _layer, mut, %{internal: true}), do: :ok
def mutation_before([], _layer, mut, state) do
# mut
# |> Riptide.Mutation.layers()
# |> Enum.any?(fn
# {["todos" | _], _} -> false
# {path, layer} -> false
# end)
# |> case do
# true -> :ok
# false -> {:error, :not_allowed}
# end
end
end
| 22.105263 | 66 | 0.552381 |
1c20e73ce2e0047e053ebb555c892052ec966b11 | 6,659 | exs | Elixir | test/toniq_test.exs | itskoko/toniq | 1f96db00556bfac4a929c98fd7f9044906879492 | [
"MIT",
"Unlicense"
] | null | null | null | test/toniq_test.exs | itskoko/toniq | 1f96db00556bfac4a929c98fd7f9044906879492 | [
"MIT",
"Unlicense"
] | null | null | null | test/toniq_test.exs | itskoko/toniq | 1f96db00556bfac4a929c98fd7f9044906879492 | [
"MIT",
"Unlicense"
] | 3 | 2021-04-07T14:59:09.000Z | 2021-04-07T14:59:21.000Z | defmodule ToniqTest do
use ExUnit.Case
import CaptureLog
defmodule TestWorker do
use Toniq.Worker
def perform(data: number) do
send :toniq_test, { :job_has_been_run, number_was: number }
end
end
defmodule TestErrorWorker do
use Toniq.Worker
def perform(_arg) do
send :toniq_test, :job_has_been_run
raise "fail"
end
end
defmodule TestHandleErrorWorker do
use Toniq.Worker
def perform(_arg) do
send :toniq_test, :job_has_been_run
raise "fail"
end
def handle_error(job, error, stack) do
send :toniq_test, :error_handled
end
end
defmodule TestNoArgumentsWorker do
use Toniq.Worker
def perform do
end
end
defmodule TestMaxConcurrencyWorker do
use Toniq.Worker, max_concurrency: 2
def perform(process_name) do
add_to_list process_name
Process.register(self(), process_name)
receive do
:stop -> nil
end
remove_from_list process_name
end
def currently_running_job_names, do: Agent.get(agent(), fn (names) -> names end)
defp add_to_list(process_name), do: Agent.update agent(), fn (list) -> list ++ [ process_name ] end
defp remove_from_list(process_name), do: Agent.update agent(), fn (list) -> list -- [ process_name ] end
defp agent, do: Process.whereis(:job_list)
end
setup do
Toniq.JobEvent.subscribe
Process.register(self(), :toniq_test)
Process.whereis(:toniq_redis) |> Exredis.query([ "FLUSHDB" ])
on_exit &Toniq.JobEvent.unsubscribe/0
end
setup do
Process.whereis(:toniq_redis) |> Exredis.query([ "FLUSHDB" ])
Toniq.KeepalivePersistence.register_vm(Toniq.Keepalive.identifier)
:ok
end
test "running jobs" do
job = Toniq.enqueue(TestWorker, data: 10)
assert_receive { :job_has_been_run, number_was: 10 }
assert_receive { :finished, ^job }
:timer.sleep 1 # allow persistence some time to remove the job
assert Toniq.JobPersistence.jobs == []
end
test "failing jobs are removed from the regular job list and stored in a failed jobs list" do
logs = capture_log fn ->
job = Toniq.enqueue(TestErrorWorker, data: 10)
assert_receive { :failed, ^job }
assert Toniq.JobPersistence.jobs == []
assert Enum.count(Toniq.JobPersistence.failed_jobs) == 1
assert (Toniq.JobPersistence.failed_jobs |> hd).worker == TestErrorWorker
end
assert logs =~ ~r/Job #\d: ToniqTest.TestErrorWorker.perform\(\[data: 10\]\) failed with error: %RuntimeError{message: "fail"}/
end
@tag :capture_log
test "failing jobs are automatically retried" do
job = Toniq.enqueue(TestErrorWorker, data: 10)
assert_receive :job_has_been_run
assert_receive :job_has_been_run
assert_receive :job_has_been_run
refute_receive :job_has_been_run
assert_receive { :failed, ^job }
end
@tag :capture_log
test "failed jobs can be retried" do
job = Toniq.enqueue(TestErrorWorker, data: 10)
assert_receive { :failed, ^job }
assert Enum.count(Toniq.failed_jobs) == 1
job = Toniq.failed_jobs |> hd
assert Toniq.retry(job)
assert_receive { :failed, _job }
assert Enum.count(Toniq.failed_jobs) == 1
end
@tag :capture_log
test "failed jobs can be deleted" do
job = Toniq.enqueue(TestErrorWorker, data: 10)
assert_receive { :failed, ^job }
assert Enum.count(Toniq.failed_jobs) == 1
job = Toniq.failed_jobs |> hd
assert Toniq.delete(job)
assert Toniq.failed_jobs == []
end
@tag :capture_log
test "failed jobs can perform error handling" do
job = Toniq.enqueue(TestHandleErrorWorker)
assert_receive :error_handled
assert_receive { :failed, _job }
assert Enum.count(Toniq.failed_jobs) == 1
end
test "can handle jobs from another VM for some actions (for easy administration of failed jobs)" do
Toniq.KeepalivePersistence.register_vm("other")
Toniq.KeepalivePersistence.update_alive_key("other", 1000)
job = Toniq.JobPersistence.store_job(TestWorker, [], "other")
Toniq.JobPersistence.mark_as_failed(job, "error", "other")
assert Enum.count(Toniq.failed_jobs) == 1
job = Toniq.failed_jobs |> hd
assert job.vm == "other"
Toniq.delete(job)
assert Toniq.failed_jobs == []
end
test "can be conventiently called within a pipeline" do
[data: 10]
|> Toniq.enqueue_to(TestWorker)
assert_receive { :job_has_been_run, number_was: 10 }
end
test "can run jobs without arguments" do
job = Toniq.enqueue(TestNoArgumentsWorker)
assert_receive { :finished, ^job }
end
test "can limit concurrency of jobs" do
{:ok, _pid} = Agent.start_link(fn -> [] end, name: :job_list)
assert TestWorker.max_concurrency == :unlimited
assert TestMaxConcurrencyWorker.max_concurrency == 2
job1 = Toniq.enqueue(TestMaxConcurrencyWorker, :job1)
job2 = Toniq.enqueue(TestMaxConcurrencyWorker, :job2)
job3 = Toniq.enqueue(TestMaxConcurrencyWorker, :job3)
job4 = Toniq.enqueue(TestMaxConcurrencyWorker, :job4)
# wait for jobs to boot up
:timer.sleep 1
assert currently_running_job_names() == [ :job1, :job2 ]
send :job1, :stop
assert_receive {:finished, ^job1}
assert currently_running_job_names() == [ :job2, :job3 ]
send :job2, :stop
assert_receive {:finished, ^job2}
assert currently_running_job_names() == [ :job3, :job4 ]
send :job3, :stop
assert_receive {:finished, ^job3}
assert currently_running_job_names() == [ :job4 ]
send :job4, :stop
assert_receive {:finished, ^job4}
assert currently_running_job_names() == []
end
# regression
test "can enqueue more jobs after limiting jobs once" do
{:ok, _pid} = Agent.start_link(fn -> [] end, name: :job_list)
Toniq.enqueue(TestMaxConcurrencyWorker, :job1)
job2 = Toniq.enqueue(TestMaxConcurrencyWorker, :job2)
Toniq.enqueue(TestMaxConcurrencyWorker, :job3)
:timer.sleep 1 # wait for jobs to boot up
send :job1, :stop
send :job2, :stop
assert_receive {:finished, ^job2}
send :job3, :stop
Toniq.enqueue(TestMaxConcurrencyWorker, :job4)
Toniq.enqueue(TestMaxConcurrencyWorker, :job5)
job6 = Toniq.enqueue(TestMaxConcurrencyWorker, :job6)
:timer.sleep 1 # wait for jobs to boot up
assert currently_running_job_names() == [ :job4, :job5 ]
# Stop everything before running other tests
send :job4, :stop
send :job5, :stop
:timer.sleep 1
send :job6, :stop
assert_receive {:finished, ^job6}
end
defp currently_running_job_names do
Enum.sort(TestMaxConcurrencyWorker.currently_running_job_names)
end
end
| 27.978992 | 131 | 0.691695 |
1c215e9a6b9fa4c59da5dd27ffe3458e2e3e83f7 | 815 | exs | Elixir | mix.exs | andrewMacmurray/identicon-generator | 31fd43d3962574c8e4ce0033742d2e9f797c7ea6 | [
"MIT"
] | null | null | null | mix.exs | andrewMacmurray/identicon-generator | 31fd43d3962574c8e4ce0033742d2e9f797c7ea6 | [
"MIT"
] | null | null | null | mix.exs | andrewMacmurray/identicon-generator | 31fd43d3962574c8e4ce0033742d2e9f797c7ea6 | [
"MIT"
] | null | null | null | defmodule Identicon.Mixfile do
use Mix.Project
def project do
[app: :identicon,
version: "0.1.0",
elixir: "~> 1.4",
escript: escript(),
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
def escript, do: [main_module: Identicon]
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[]
end
end
| 22.027027 | 79 | 0.629448 |
1c216755f47d25c837d7a2b7ea44326834e1dcca | 391 | ex | Elixir | apps/diet_web/lib/diet_web/controllers/page_controller.ex | isavita/diet-umbrella | 0eee4d0dc4d9567888e4b69ccc7993e95d95ed29 | [
"MIT"
] | 1 | 2020-06-01T21:25:54.000Z | 2020-06-01T21:25:54.000Z | apps/diet_web/lib/diet_web/controllers/page_controller.ex | isavita/diet-umbrella | 0eee4d0dc4d9567888e4b69ccc7993e95d95ed29 | [
"MIT"
] | 221 | 2019-07-20T17:20:49.000Z | 2021-08-02T06:21:10.000Z | apps/diet_web/lib/diet_web/controllers/page_controller.ex | isavita/diet-umbrella | 0eee4d0dc4d9567888e4b69ccc7993e95d95ed29 | [
"MIT"
] | null | null | null | defmodule DietWeb.PageController do
use DietWeb, :controller
alias DietWeb.NewsfeedLive
def index(conn, _params) do
render(conn, "index.html")
end
def newsfeed(conn, _params) do
live_render(
conn,
NewsfeedLive,
session: %{
current_user: conn.assigns.current_user,
csrf_token: Phoenix.Controller.get_csrf_token()
}
)
end
end
| 18.619048 | 55 | 0.662404 |
1c216765e535b6f1ef1ad1e99fe109fc2b2eda99 | 3,924 | ex | Elixir | lib/petal_components/modal.ex | gurujimani/petal_components | e62c34eabbd8e5a186c935a1fada847342da5279 | [
"MIT"
] | null | null | null | lib/petal_components/modal.ex | gurujimani/petal_components | e62c34eabbd8e5a186c935a1fada847342da5279 | [
"MIT"
] | null | null | null | lib/petal_components/modal.ex | gurujimani/petal_components | e62c34eabbd8e5a186c935a1fada847342da5279 | [
"MIT"
] | null | null | null | defmodule PetalComponents.Modal do
use Phoenix.Component
alias Phoenix.LiveView.JS
import PetalComponents.Class
# prop title, :string
# prop size, :string
# slot default
def modal(assigns) do
assigns =
assigns
|> assign_new(:classes, fn -> get_classes(assigns) end)
|> assign_new(:extra_assigns, fn ->
assigns_to_attributes(assigns, ~w(
classes
class
title
size
)a)
end)
~H"""
<div {@extra_assigns} id="modal" phx-remove={hide_modal()}>
<div
id="modal-overlay"
class="fixed inset-0 z-50 transition-opacity bg-gray-900 dark:bg-gray-900 bg-opacity-30 dark:bg-opacity-70"
aria-hidden="true"
>
</div>
<div
class="fixed inset-0 z-50 flex items-center justify-center px-4 my-4 overflow-hidden transform sm:px-6"
role="dialog"
aria-modal="true"
>
<div
id="modal-content"
class={@classes}
phx-click-away={hide_modal()}
phx-window-keydown={hide_modal()}
phx-key="escape"
>
<!-- Header -->
<div class="px-5 py-3 border-b border-gray-100 dark:border-gray-600">
<div class="flex items-center justify-between">
<div class="font-semibold text-gray-800 dark:text-gray-200">
<%= @title %>
</div>
<button phx-click={hide_modal()} class="text-gray-400 hover:text-gray-500">
<div class="sr-only">Close</div>
<svg class="w-4 h-4 fill-current">
<path d="M7.95 6.536l4.242-4.243a1 1 0 111.415 1.414L9.364 7.95l4.243 4.242a1 1 0 11-1.415 1.415L7.95 9.364l-4.243 4.243a1 1 0 01-1.414-1.415L6.536 7.95 2.293 3.707a1 1 0 011.414-1.414L7.95 6.536z" />
</svg>
</button>
</div>
</div>
<!-- Content -->
<div class="p-5">
<%= render_slot(@inner_block) %>
</div>
</div>
</div>
</div>
"""
end
# The live view that calls <.modal> will need to handle the "close_modal" event. eg:
# def handle_event("close_modal", _, socket) do
# {:noreply, push_patch(socket, to: Routes.moderate_users_path(socket, :index))}
# end
def hide_modal(js \\ %JS{}) do
js
|> JS.remove_class("overflow-hidden", to: "body")
|> JS.hide(
transition: {
"ease-in duration-200",
"opacity-100",
"opacity-0"
},
to: "#modal-overlay"
)
|> JS.hide(
transition: {
"ease-in duration-200",
"opacity-100 translate-y-0 md:scale-100",
"opacity-0 translate-y-4 md:translate-y-0 md:scale-95"
},
to: "#modal-content"
)
|> JS.push("close_modal")
end
# We are unsure of what the best practice is for using this.
# Open to suggestions/PRs
def show_modal(js \\ %JS{}) do
js
|> JS.add_class("overflow-hidden", to: "body")
|> JS.show(
transition: {
"ease-in duration-300",
"opacity-0",
"opacity-100"
},
to: "#modal-overlay"
)
|> JS.show(
transition: {
"transition ease-in-out duration-200",
"opacity-0 translate-y-4",
"opacity-100 translate-y-0"
},
to: "#modal-content"
)
end
defp get_classes(assigns) do
opts = %{
max_width: assigns[:max_width] || "md",
class: assigns[:class] || ""
}
base_classes = "w-full max-h-full overflow-auto bg-white rounded shadow-lg dark:bg-gray-800"
max_width_class =
case opts.max_width do
"sm" -> "max-w-sm"
"md" -> "max-w-xl"
"lg" -> "max-w-3xl"
"xl" -> "max-w-5xl"
"2xl" -> "max-w-7xl"
"full" -> "max-w-full"
end
custom_classes = opts.class
build_class([max_width_class, base_classes, custom_classes])
end
end
| 27.062069 | 218 | 0.541284 |
1c21a6f2a9b032ad5279c833675201735071d439 | 12,561 | exs | Elixir | test/teiserver/protocols/spring/spring_battle_host_test.exs | Teifion/teiserver | 131e002160e1e948cb55e56d62370ba66a972cba | [
"MIT"
] | 6 | 2021-02-08T10:42:53.000Z | 2021-04-25T12:12:03.000Z | test/teiserver/protocols/spring/spring_battle_host_test.exs | Jazcash/teiserver | fec14784901cb2965d8c1350fe84107c57451877 | [
"MIT"
] | null | null | null | test/teiserver/protocols/spring/spring_battle_host_test.exs | Jazcash/teiserver | fec14784901cb2965d8c1350fe84107c57451877 | [
"MIT"
] | 2 | 2021-02-23T22:34:00.000Z | 2021-04-08T13:31:36.000Z | defmodule Teiserver.SpringBattleHostTest do
use Central.ServerCase, async: false
require Logger
# alias Teiserver.BitParse
# alias Teiserver.User
alias Teiserver.Battle.Lobby
alias Teiserver.Protocols.Spring
# alias Teiserver.Protocols.{SpringIn, SpringOut, Spring}
import Central.Helpers.NumberHelper, only: [int_parse: 1]
import Teiserver.TeiserverTestLib,
only: [auth_setup: 0, _send_raw: 2, _recv_raw: 1, _recv_until: 1]
setup do
%{socket: socket, user: user} = auth_setup()
{:ok, socket: socket, user: user}
end
test "battle commands when not in a battle", %{socket: socket} do
_send_raw(socket, "LEAVEBATTLE\n")
reply = _recv_raw(socket)
assert reply == :timeout
_send_raw(socket, "MYBATTLESTATUS 123 123\n")
reply = _recv_raw(socket)
assert reply == :timeout
end
test "battle with password", %{socket: socket, user: user} do
_send_raw(
socket,
"OPENBATTLE 0 0 password_test 322 16 gameHash 0 mapHash engineName\tengineVersion\tlobby_host_test\tgameTitle\tgameName\n"
)
# Find battle
battle = Lobby.list_battles
|> Enum.filter(fn b -> b.founder_id == user.id end)
|> hd
assert battle.password == "password_test"
end
test "!rehost bug test", %{socket: host_socket, user: host_user} do
%{socket: watcher_socket} = auth_setup()
%{socket: p1_socket, user: p1_user} = auth_setup()
%{socket: p2_socket, user: p2_user} = auth_setup()
# Open battle
_send_raw(
host_socket,
"OPENBATTLE 0 0 empty 322 16 gameHash 0 mapHash engineName\tengineVersion\tlobby_host_test\tgameTitle\tgameName\n"
)
# Find battle ID
lobby_id = Lobby.list_battles
|> Enum.filter(fn b -> b.founder_id == host_user.id end)
|> hd
|> Map.get(:id)
# Clear watcher
_ = _recv_until(watcher_socket)
# Join
_send_raw(p1_socket, "JOINBATTLE #{lobby_id} empty script_password2\n")
_send_raw(p2_socket, "JOINBATTLE #{lobby_id} empty script_password2\n")
# Nobody has been accepted yet, should not see anything
reply = _recv_raw(watcher_socket)
assert reply == :timeout
# Now accept
_send_raw(host_socket, "JOINBATTLEACCEPT #{p1_user.name}\n")
_send_raw(host_socket, "JOINBATTLEACCEPT #{p2_user.name}\n")
# Accept has happened, should see stuff
reply = _recv_raw(watcher_socket)
assert reply == "JOINEDBATTLE #{lobby_id} #{p1_user.name}\nJOINEDBATTLE #{lobby_id} #{p2_user.name}\n"
# Now have the host leave
_send_raw(host_socket, "LEAVEBATTLE\n")
:timer.sleep(500)
reply = _recv_until(watcher_socket)
assert reply == "BATTLECLOSED #{lobby_id}\n"
end
test "host battle test", %{socket: socket, user: user} do
_send_raw(
socket,
"OPENBATTLE 0 0 empty 322 16 gameHash 0 mapHash engineName\tengineVersion\tlobby_host_test\tgameTitle\tgameName\n"
)
reply =
_recv_until(socket)
|> String.split("\n")
[
opened,
open,
join,
_tags,
battle_status,
_battle_opened
| _
] = reply
assert opened =~ "BATTLEOPENED "
assert open =~ "OPENBATTLE "
assert join =~ "JOINBATTLE "
assert join =~ " gameHash"
lobby_id =
join
|> String.replace("JOINBATTLE ", "")
|> String.replace(" gameHash", "")
|> int_parse
assert battle_status == "REQUESTBATTLESTATUS"
# Check the battle actually got created
battle = Lobby.get_battle(lobby_id)
assert battle != nil
assert Enum.count(battle.players) == 0
# Now create a user to join the battle
%{socket: socket2, user: user2} = auth_setup()
# Check user1 hears about this
reply = _recv_raw(socket)
assert reply =~ "ADDUSER #{user2.name} ?? #{user2.springid} LuaLobby Chobby\n"
# Attempt to join
_send_raw(socket2, "JOINBATTLE #{lobby_id} empty script_password2\n")
# Rejecting a join request is covered elsewhere, we will just handle accepting it for now
reply = _recv_raw(socket)
assert reply == "JOINBATTLEREQUEST #{user2.name} 127.0.0.1\n"
# Send acceptance
_send_raw(socket, "JOINBATTLEACCEPT #{user2.name}\n")
# The response to joining a battle is tested elsewhere, we just care about the host right now
_ = _recv_raw(socket2)
_ = _recv_raw(socket2)
reply = _recv_until(socket)
assert reply =~ "JOINEDBATTLE #{lobby_id} #{user2.name} script_password2\n"
# This used to get updated, why not any more?
# assert reply =~ "CLIENTSTATUS #{user2.name} 0\n"
# Kick user2
battle = Lobby.get_battle(lobby_id)
assert Enum.count(battle.players) == 1
_send_raw(socket, "KICKFROMBATTLE #{user2.name}\n")
reply = _recv_raw(socket2)
assert reply == "FORCEQUITBATTLE\nLEFTBATTLE #{lobby_id} #{user2.name}\n"
# Add user 3
%{socket: socket3, user: user3} = auth_setup()
_send_raw(socket2, "JOINBATTLE #{lobby_id} empty script_password3\n")
_send_raw(socket, "JOINBATTLEACCEPT #{user2.name}\n")
_ = _recv_raw(socket)
_ = _recv_raw(socket2)
# User 3 join the battle
_send_raw(socket3, "JOINBATTLE #{lobby_id} empty script_password3\n")
_send_raw(socket, "JOINBATTLEACCEPT #{user3.name}\n")
reply = _recv_raw(socket2)
assert reply == "JOINEDBATTLE #{lobby_id} #{user3.name}\n"
# Had a bug where the battle would be incorrectly closed
# after kicking a player, it was caused by the host disconnecting
# and in the process closed out the battle
battle = Lobby.get_battle(lobby_id)
assert battle != nil
# Adding start rectangles
assert Enum.count(battle.start_rectangles) == 0
_send_raw(socket, "ADDSTARTRECT 2 50 50 100 100\n")
_ = _recv_raw(socket)
battle = Lobby.get_battle(lobby_id)
assert Enum.count(battle.start_rectangles) == 1
_send_raw(socket, "REMOVESTARTRECT 2\n")
_ = _recv_raw(socket)
battle = Lobby.get_battle(lobby_id)
assert Enum.count(battle.start_rectangles) == 0
# Add and remove script tags
refute Map.has_key?(battle.tags, "custom/key1")
refute Map.has_key?(battle.tags, "custom/key2")
_send_raw(socket, "SETSCRIPTTAGS custom/key1=customValue\tcustom/key2=customValue2\n")
reply = _recv_raw(socket)
assert reply == "SETSCRIPTTAGS custom/key1=customValue\tcustom/key2=customValue2\n"
battle = Lobby.get_battle(lobby_id)
assert Map.has_key?(battle.tags, "custom/key1")
assert Map.has_key?(battle.tags, "custom/key2")
_send_raw(socket, "REMOVESCRIPTTAGS custom/key1\tcustom/key3\n")
reply = _recv_raw(socket)
assert reply == "REMOVESCRIPTTAGS custom/key1\tcustom/key3\n"
battle = Lobby.get_battle(lobby_id)
refute Map.has_key?(battle.tags, "custom/key1")
# We never removed key2, it should still be there
assert Map.has_key?(battle.tags, "custom/key2")
# Enable and disable units
_send_raw(socket, "DISABLEUNITS unit1 unit2 unit3\n")
:timer.sleep(100)
reply = _recv_until(socket)
assert reply == "DISABLEUNITS unit1 unit2 unit3\n"
_send_raw(socket, "ENABLEUNITS unit3\n")
:timer.sleep(100)
reply = _recv_until(socket)
assert reply == "ENABLEUNITS unit3\n"
_send_raw(socket, "ENABLEALLUNITS\n")
:timer.sleep(100)
reply = _recv_until(socket)
assert reply == "ENABLEALLUNITS\n"
# Now kick both 2 and 3
_send_raw(socket, "KICKFROMBATTLE #{user2.name}\n")
_send_raw(socket, "KICKFROMBATTLE #{user3.name}\n")
_ = _recv_raw(socket2)
_ = _recv_raw(socket3)
# Mybattle status
# Clear out a bunch of things we've tested for socket1
_ = _recv_raw(socket2)
_send_raw(socket2, "MYBATTLESTATUS 4195330 600\n")
:timer.sleep(100)
_ = _recv_raw(socket)
reply = _recv_raw(socket2)
# socket2 got kicked, they shouldn't get any result from this
assert reply == :timeout
# Now lets get them to rejoin the battle
_send_raw(socket2, "JOINBATTLE #{lobby_id} empty script_password2\n")
_send_raw(socket, "JOINBATTLEACCEPT #{user2.name}\n")
:timer.sleep(100)
_ = _recv_raw(socket2)
# Now try the status again
_send_raw(socket2, "MYBATTLESTATUS 4195330 600\n")
:timer.sleep(100)
_ = _recv_raw(socket)
reply = _recv_raw(socket2)
assert reply == "CLIENTBATTLESTATUS #{user2.name} 4195330 600\n"
status = Spring.parse_battle_status("4195330")
assert status == %{
ready: true,
handicap: 0,
team_number: 0,
ally_team_number: 0,
player: true,
sync: 1,
side: 0
}
# Handicap
_send_raw(socket, "HANDICAP #{user2.name} 87\n")
:timer.sleep(100)
reply = _recv_until(socket)
assert reply == "CLIENTBATTLESTATUS #{user2.name} 4373506 600\n"
status = Spring.parse_battle_status("4373506")
assert status.handicap == 87
_send_raw(socket, "HANDICAP #{user2.name} 0\n")
:timer.sleep(100)
reply = _recv_until(socket)
assert reply == "CLIENTBATTLESTATUS #{user2.name} 4195330 600\n"
status = Spring.parse_battle_status("4195330")
assert status.handicap == 0
# Forceteamno
_send_raw(socket, "FORCETEAMNO #{user2.name} 1\n")
:timer.sleep(100)
reply = _recv_until(socket)
assert reply == "CLIENTBATTLESTATUS #{user2.name} 4195334 600\n"
status = Spring.parse_battle_status("4195334")
assert status.team_number == 1
# Forceallyno
_send_raw(socket, "FORCEALLYNO #{user2.name} 1\n")
:timer.sleep(100)
reply = _recv_until(socket)
assert reply == "CLIENTBATTLESTATUS #{user2.name} 4195398 600\n"
status = Spring.parse_battle_status("4195398")
assert status.ally_team_number == 1
# Forceteamcolour
_send_raw(socket, "FORCETEAMCOLOR #{user2.name} 800\n")
:timer.sleep(100)
reply = _recv_until(socket)
assert reply == "CLIENTBATTLESTATUS #{user2.name} 4195398 800\n"
# Forcespectator
_send_raw(socket, "FORCESPECTATORMODE #{user2.name}\n")
:timer.sleep(100)
reply = _recv_until(socket)
assert reply == "CLIENTBATTLESTATUS #{user2.name} 4194374 800\n"
status = Spring.parse_battle_status("4194374")
assert status.player == false
# SAYBATTLEEX
_send_raw(socket, "SAYBATTLEEX This is me saying something from somewhere else\n")
:timer.sleep(100)
reply = _recv_until(socket)
assert reply == "SAIDBATTLEEX #{user.name} This is me saying something from somewhere else\n"
# UPDATEBATTLEINFO
_send_raw(socket, "UPDATEBATTLEINFO 1 0 123456 Map name here\n")
:timer.sleep(100)
reply = _recv_until(socket)
assert reply == "UPDATEBATTLEINFO #{battle.id} 1 0 123456 Map name here\n"
# BOT TIME
_send_raw(socket, "ADDBOT bot1 4195330 0 ai_dll\n")
# Gives time for pubsub to send out
:timer.sleep(100)
reply = _recv_until(socket)
[_, botid] = Regex.run(~r/ADDBOT (\d+) bot1 #{user.name} 4195330 0 ai_dll/, reply)
botid = int_parse(botid)
assert reply =~ "ADDBOT #{botid} bot1 #{user.name} 4195330 0 ai_dll\n"
_send_raw(socket, "UPDATEBOT bot1 4195394 2\n")
# Gives time for pubsub to send out
:timer.sleep(100)
reply = _recv_until(socket)
assert reply == "UPDATEBOT #{botid} bot1 4195394 2\n"
# Test out non-hosts updating the bot
_recv_until(socket2)
_send_raw(socket2, "UPDATEBOT bot1 4195394 2\n")
# Gives time for pubsub to send out
:timer.sleep(100)
reply = _recv_until(socket2)
assert reply == ""
_send_raw(socket2, "REMOVEBOT bot1\n")
# Gives time for pubsub to send out
:timer.sleep(100)
reply = _recv_until(socket2)
assert reply == ""
# Now back to actually doing stuff
_send_raw(socket, "REMOVEBOT bot1\n")
# Gives time for pubsub to send out
:timer.sleep(100)
reply = _recv_until(socket)
assert reply == "REMOVEBOT #{botid} bot1\n"
# Rename the battle, should get a message about the rename very shortly afterwards
_send_raw(socket, "c.battle.update_lobby_title NewName 123\n")
:timer.sleep(1000)
reply = _recv_until(socket)
assert reply == "OK cmd=c.battle.update_lobby_title\ns.battle.update_lobby_title #{lobby_id} NewName 123\n"
# Leave the battle
_send_raw(socket, "LEAVEBATTLE\n")
reply = _recv_raw(socket)
# assert reply =~ "LEFTBATTLE #{lobby_id} #{user.name}\n"
assert reply =~ "BATTLECLOSED #{lobby_id}\n"
_send_raw(socket, "EXIT\n")
_recv_raw(socket)
_send_raw(socket2, "EXIT\n")
_recv_raw(socket2)
_send_raw(socket3, "EXIT\n")
_recv_raw(socket3)
end
end
| 32.541451 | 128 | 0.675981 |
1c21c7dd666c141f2296e4cf878d6612ee605e62 | 619 | ex | Elixir | lib/surface/base_component.ex | alexandrubagu/surface | b8ef74444e53f8325fb793fa1399966c2a4b1e7a | [
"MIT"
] | null | null | null | lib/surface/base_component.ex | alexandrubagu/surface | b8ef74444e53f8325fb793fa1399966c2a4b1e7a | [
"MIT"
] | null | null | null | lib/surface/base_component.ex | alexandrubagu/surface | b8ef74444e53f8325fb793fa1399966c2a4b1e7a | [
"MIT"
] | 1 | 2020-04-21T18:49:15.000Z | 2020-04-21T18:49:15.000Z | defmodule Surface.BaseComponent do
@moduledoc false
@doc """
Defines which module is responsible for translating the component. The
returned module must implement the `Surface.Translator` behaviour.
"""
@callback translator() :: module
defmacro __using__(translator: translator) do
quote do
import Surface
@behaviour unquote(__MODULE__)
# TODO: Remove the alias after fix ElixirSense
alias Module, as: Mod
Mod.put_attribute(__MODULE__, :translator, unquote(translator))
@doc false
def translator do
unquote(translator)
end
end
end
end
| 23.807692 | 72 | 0.696284 |
1c21c9de6911013fd31b099fed30c60e30a25096 | 1,899 | ex | Elixir | lib/app_api_web/v1/controllers/conversation_controller.ex | turbo-play/phoenix-services | 9d3318c6b5d5ce6dead441758fc86ec34892b0f6 | [
"Apache-2.0"
] | 2 | 2018-07-05T15:13:12.000Z | 2021-12-16T08:58:56.000Z | lib/app_api_web/v1/controllers/conversation_controller.ex | turbo-play/phoenix-services | 9d3318c6b5d5ce6dead441758fc86ec34892b0f6 | [
"Apache-2.0"
] | null | null | null | lib/app_api_web/v1/controllers/conversation_controller.ex | turbo-play/phoenix-services | 9d3318c6b5d5ce6dead441758fc86ec34892b0f6 | [
"Apache-2.0"
] | 2 | 2018-08-13T21:37:00.000Z | 2021-11-10T04:08:26.000Z | defmodule AppApiWeb.V1.ConversationController do
use AppApiWeb, :controller
require AppApi.EWallet
alias AppApi.{ConversationFetcher, Repo, Conversation}
alias AppApiWeb.V1.JSON.{ConversationSerializer, ResponseSerializer}
require Logger
def create(conn, %{"title" => title, "userIds" => user_ids, "userAttributes" => user_attrs }) do
# @TODOS:
# 1. check all users are friends with this user
# 2. check if there's any existing conversation with these exact same users and return it if so
# 3. create new conversation if not
json conn, ResponseSerializer.serialize(ConversationSerializer.serialize(conn.assigns[:user], ConversationFetcher.create(conn.assigns[:user], %{title: title, participants: user_ids})), "ok")
end
def get(conn, %{"conversation_id" => conversation_id}) do
with %Conversation{} = conversation <- ConversationFetcher.get(conn.assigns[:user], conversation_id) do
json conn, ResponseSerializer.serialize(ConversationSerializer.serialize(conn.assigns[:user], conversation), "ok")
end
json conn, ResponseSerializer.serialize(%{description: "Conversation not found"}, "error")
end
def delete(conn, %{"conversation_id" => conversation_id}) do
with %Conversation{} = conversation <- ConversationFetcher.get(conversation_id) do
case ConversationFetcher.delete_conversation(conversation) do
{:ok, _} -> json conn, ResponseSerializer.serialize(%{}, "ok")
err -> json conn, ResponseSerializer.serialize(err, "error")
end
end
json conn, ResponseSerializer.serialize(%{description: "Conversation not found"}, "error")
end
def getAll(conn, _) do
user = conn.assigns[:user]
user = user |> Repo.preload(:conversations)
json conn, ResponseSerializer.serialize(Enum.map(user.conversations, fn(c) -> ConversationSerializer.serialize(conn.assigns[:user], c) end), "ok")
end
end
| 47.475 | 194 | 0.729331 |
1c21e6d307e7482c0013f012e5abf732cf801936 | 938 | exs | Elixir | apps/kv/mix.exs | bionikspoon/elixir_kv_umbrella | 40d92a7026f83499eefdd69ddc20d1c3c5252c0f | [
"MIT"
] | null | null | null | apps/kv/mix.exs | bionikspoon/elixir_kv_umbrella | 40d92a7026f83499eefdd69ddc20d1c3c5252c0f | [
"MIT"
] | null | null | null | apps/kv/mix.exs | bionikspoon/elixir_kv_umbrella | 40d92a7026f83499eefdd69ddc20d1c3c5252c0f | [
"MIT"
] | null | null | null | defmodule KV.Mixfile do
use Mix.Project
def project do
[app: :kv,
version: "0.1.0",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock"]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[
extra_applications: [:logger],
env: [routing_table: []],
mod: { KV, []}
]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[]
end
end
| 22.333333 | 79 | 0.579957 |
1c22195e5e198bf6cd376bb9378d2bdd6f9aeab3 | 590 | ex | Elixir | elixir/bob/lib/bob.ex | quatauta/exercism | 8c8a477a83719cbe57630c6a2884d40cc1808149 | [
"MIT"
] | 1 | 2022-01-23T20:34:27.000Z | 2022-01-23T20:34:27.000Z | elixir/bob/lib/bob.ex | quatauta/exercism | 8c8a477a83719cbe57630c6a2884d40cc1808149 | [
"MIT"
] | null | null | null | elixir/bob/lib/bob.ex | quatauta/exercism | 8c8a477a83719cbe57630c6a2884d40cc1808149 | [
"MIT"
] | null | null | null | defmodule Bob do
@moduledoc false
@spec hey(imput :: String.t()) :: String.t()
def hey(input) do
cond do
empty?(input) -> "Fine. Be that way!"
yelling_question?(input) -> "Calm down, I know what I'm doing!"
question?(input) -> "Sure."
yelling?(input) -> "Whoa, chill out!"
true -> "Whatever."
end
end
defp empty?(input), do: input =~ ~r/^\s*$/u
defp question?(input), do: input =~ ~r/\?\s*$/
defp yelling?(input), do: input =~ ~r/^\P{Ll}*\p{Lu}+\P{Ll}*$/u
defp yelling_question?(input), do: input =~ ~r/^\P{Ll}*\p{Lu}+\P{Ll}*\?$/u
end
| 31.052632 | 76 | 0.559322 |
1c222f84bd2861a20c3e373353cb9f47c226270c | 8,386 | ex | Elixir | clients/big_query/lib/google_api/big_query/v2/model/query_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/big_query/lib/google_api/big_query/v2/model/query_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/big_query/lib/google_api/big_query/v2/model/query_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQuery.V2.Model.QueryRequest do
@moduledoc """
## Attributes
* `connectionProperties` (*type:* `list(GoogleApi.BigQuery.V2.Model.ConnectionProperty.t)`, *default:* `nil`) - Connection properties.
* `createSession` (*type:* `boolean()`, *default:* `nil`) - If true, creates a new session, where session id will be a server generated random id. If false, runs query with an existing session_id passed in ConnectionProperty, otherwise runs query in non-session mode.
* `defaultDataset` (*type:* `GoogleApi.BigQuery.V2.Model.DatasetReference.t`, *default:* `nil`) - [Optional] Specifies the default datasetId and projectId to assume for any unqualified table names in the query. If not set, all table names in the query string must be qualified in the format 'datasetId.tableId'.
* `dryRun` (*type:* `boolean()`, *default:* `nil`) - [Optional] If set to true, BigQuery doesn't run the job. Instead, if the query is valid, BigQuery returns statistics about the job such as how many bytes would be processed. If the query is invalid, an error returns. The default value is false.
* `kind` (*type:* `String.t`, *default:* `bigquery#queryRequest`) - The resource type of the request.
* `labels` (*type:* `map()`, *default:* `nil`) - The labels associated with this job. You can use these to organize and group your jobs. Label keys and values can be no longer than 63 characters, can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. Label values are optional. Label keys must start with a letter and each label in the list must have a different key.
* `location` (*type:* `String.t`, *default:* `nil`) - The geographic location where the job should run. See details at https://cloud.google.com/bigquery/docs/locations#specifying_your_location.
* `maxResults` (*type:* `integer()`, *default:* `nil`) - [Optional] The maximum number of rows of data to return per page of results. Setting this flag to a small value such as 1000 and then paging through results might improve reliability when the query result set is large. In addition to this limit, responses are also limited to 10 MB. By default, there is no maximum row count, and only the byte limit applies.
* `maximumBytesBilled` (*type:* `String.t`, *default:* `nil`) - [Optional] Limits the bytes billed for this job. Queries that will have bytes billed beyond this limit will fail (without incurring a charge). If unspecified, this will be set to your project default.
* `parameterMode` (*type:* `String.t`, *default:* `nil`) - Standard SQL only. Set to POSITIONAL to use positional (?) query parameters or to NAMED to use named (@myparam) query parameters in this query.
* `preserveNulls` (*type:* `boolean()`, *default:* `nil`) - [Deprecated] This property is deprecated.
* `query` (*type:* `String.t`, *default:* `nil`) - [Required] A query string, following the BigQuery query syntax, of the query to execute. Example: "SELECT count(f1) FROM [myProjectId:myDatasetId.myTableId]".
* `queryParameters` (*type:* `list(GoogleApi.BigQuery.V2.Model.QueryParameter.t)`, *default:* `nil`) - Query parameters for Standard SQL queries.
* `requestId` (*type:* `String.t`, *default:* `nil`) - A unique user provided identifier to ensure idempotent behavior for queries. Note that this is different from the job_id. It has the following properties: 1. It is case-sensitive, limited to up to 36 ASCII characters. A UUID is recommended. 2. Read only queries can ignore this token since they are nullipotent by definition. 3. For the purposes of idempotency ensured by the request_id, a request is considered duplicate of another only if they have the same request_id and are actually duplicates. When determining whether a request is a duplicate of the previous request, all parameters in the request that may affect the behavior are considered. For example, query, connection_properties, query_parameters, use_legacy_sql are parameters that affect the result and are considered when determining whether a request is a duplicate, but properties like timeout_ms don't affect the result and are thus not considered. Dry run query requests are never considered duplicate of another request. 4. When a duplicate mutating query request is detected, it returns: a. the results of the mutation if it completes successfully within the timeout. b. the running operation if it is still in progress at the end of the timeout. 5. Its lifetime is limited to 15 minutes. In other words, if two requests are sent with the same request_id, but more than 15 minutes apart, idempotency is not guaranteed.
* `timeoutMs` (*type:* `integer()`, *default:* `nil`) - [Optional] How long to wait for the query to complete, in milliseconds, before the request times out and returns. Note that this is only a timeout for the request, not the query. If the query takes longer to run than the timeout value, the call returns without any results and with the 'jobComplete' flag set to false. You can call GetQueryResults() to wait for the query to complete and read the results. The default value is 10000 milliseconds (10 seconds).
* `useLegacySql` (*type:* `boolean()`, *default:* `true`) - Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is true. If set to false, the query will use BigQuery's standard SQL: https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is set to false, the value of flattenResults is ignored; query will be run as if flattenResults is false.
* `useQueryCache` (*type:* `boolean()`, *default:* `true`) - [Optional] Whether to look for the result in the query cache. The query cache is a best-effort cache that will be flushed whenever tables in the query are modified. The default value is true.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:connectionProperties => list(GoogleApi.BigQuery.V2.Model.ConnectionProperty.t()) | nil,
:createSession => boolean() | nil,
:defaultDataset => GoogleApi.BigQuery.V2.Model.DatasetReference.t() | nil,
:dryRun => boolean() | nil,
:kind => String.t() | nil,
:labels => map() | nil,
:location => String.t() | nil,
:maxResults => integer() | nil,
:maximumBytesBilled => String.t() | nil,
:parameterMode => String.t() | nil,
:preserveNulls => boolean() | nil,
:query => String.t() | nil,
:queryParameters => list(GoogleApi.BigQuery.V2.Model.QueryParameter.t()) | nil,
:requestId => String.t() | nil,
:timeoutMs => integer() | nil,
:useLegacySql => boolean() | nil,
:useQueryCache => boolean() | nil
}
field(:connectionProperties, as: GoogleApi.BigQuery.V2.Model.ConnectionProperty, type: :list)
field(:createSession)
field(:defaultDataset, as: GoogleApi.BigQuery.V2.Model.DatasetReference)
field(:dryRun)
field(:kind)
field(:labels, type: :map)
field(:location)
field(:maxResults)
field(:maximumBytesBilled)
field(:parameterMode)
field(:preserveNulls)
field(:query)
field(:queryParameters, as: GoogleApi.BigQuery.V2.Model.QueryParameter, type: :list)
field(:requestId)
field(:timeoutMs)
field(:useLegacySql)
field(:useQueryCache)
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.QueryRequest do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.QueryRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.QueryRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 88.273684 | 1,449 | 0.730742 |
1c226e08a10ed5e36196ecf5e64f65f9c13cff1b | 799 | ex | Elixir | lib/membrane_h264_ffmpeg/common.ex | geometerio/membrane_h264_ffmpeg_plugin | 87a348ea595c74684d1a6724b98718ba416c7b3e | [
"Apache-2.0"
] | 7 | 2021-01-30T07:12:03.000Z | 2021-12-12T05:28:29.000Z | lib/membrane_h264_ffmpeg/common.ex | geometerio/membrane_h264_ffmpeg_plugin | 87a348ea595c74684d1a6724b98718ba416c7b3e | [
"Apache-2.0"
] | 9 | 2020-11-20T12:54:15.000Z | 2022-03-24T10:26:10.000Z | lib/membrane_h264_ffmpeg/common.ex | geometerio/membrane_h264_ffmpeg_plugin | 87a348ea595c74684d1a6724b98718ba416c7b3e | [
"Apache-2.0"
] | 1 | 2021-06-21T23:33:50.000Z | 2021-06-21T23:33:50.000Z | defmodule Membrane.H264.FFmpeg.Common do
@moduledoc false
use Ratio
@h264_time_base 90_000
@doc """
Decoder and encoder requires timestamps in h264 time base, that is 1/90_000 [s]
timestamps produced by this function are passed to the decoder so
they must be integers.
"""
@spec to_h264_time_base(number | Ratio.t()) :: integer
def to_h264_time_base(timestamp) do
(timestamp * @h264_time_base / Membrane.Time.second()) |> Ratio.trunc()
end
@doc """
All timestamps in membrane should be represented in the internal units, that is 1 [ns]
this function can return rational number.
"""
@spec to_membrane_time_base(number | Ratio.t()) :: number | Ratio.t()
def to_membrane_time_base(timestamp) do
timestamp * Membrane.Time.second() / @h264_time_base
end
end
| 31.96 | 88 | 0.724656 |
1c22e5c6e0abbbf6c49b45adcfcb742fb8abc93c | 5,185 | exs | Elixir | test/slack/state_test.exs | nmohoric/Elixir-Slack | da6df7612cc0a8c56bc29498a8c636fa6f095d88 | [
"MIT"
] | null | null | null | test/slack/state_test.exs | nmohoric/Elixir-Slack | da6df7612cc0a8c56bc29498a8c636fa6f095d88 | [
"MIT"
] | 1 | 2017-01-08T19:04:57.000Z | 2017-01-08T20:35:02.000Z | test/slack/state_test.exs | nmohoric/Elixir-Slack | da6df7612cc0a8c56bc29498a8c636fa6f095d88 | [
"MIT"
] | 3 | 2017-08-16T23:02:52.000Z | 2020-09-23T13:31:35.000Z | defmodule Slack.StateTest do
use ExUnit.Case
alias Slack.State
test "channel_joined sets is_member to true" do
new_slack = State.update(
%{type: "channel_joined", channel: %{id: "123", members: ["123456", "654321"]}},
slack
)
assert new_slack.channels["123"].is_member == true
assert new_slack.channels["123"].members == ["123456", "654321"]
end
test "channel_left sets is_member to false" do
new_slack = State.update(
%{type: "channel_left", channel: "123"},
slack
)
assert new_slack.channels["123"].is_member == false
end
test "channel_rename renames the channel" do
new_slack = State.update(
%{type: "channel_rename", channel: %{id: "123", name: "bar"}},
slack
)
assert new_slack.channels["123"].name == "bar"
end
test "channel_archive marks channel as archived" do
new_slack = State.update(
%{type: "channel_archive", channel: "123"},
slack
)
assert new_slack.channels["123"].is_archived == true
end
test "channel_unarchive marks channel as not archived" do
new_slack = State.update(
%{type: "channel_unarchive", channel: "123"},
slack
)
assert new_slack.channels["123"].is_archived == false
end
test "channel_leave marks channel as not archived" do
new_slack = State.update(
%{type: "channel_unarchive", channel: "123"},
slack
)
assert new_slack.channels["123"].is_archived == false
end
test "team_rename renames team" do
new_slack = State.update(
%{type: "team_rename", name: "Bar"},
slack
)
assert new_slack.team.name == "Bar"
end
test "team_join adds user to users" do
user_length = Map.size(slack.users)
new_slack = State.update(
%{type: "team_join", user: %{id: "345"}},
slack
)
assert Map.size(new_slack.users) == user_length + 1
end
test "user_change updates user" do
new_slack = State.update(
%{type: "team_join", user: %{id: "123", name: "bar"}},
slack
)
assert new_slack.users["123"].name == "bar"
end
test "bot_added adds bot to bots" do
bot_length = Map.size(slack.bots)
new_slack = State.update(
%{type: "bot_added", bot: %{id: "345", name: "new"}},
slack
)
assert Map.size(new_slack.bots) == bot_length + 1
end
test "bot_changed updates bot in bots" do
new_slack = State.update(
%{type: "bot_added", bot: %{id: "123", name: "new"}},
slack
)
assert new_slack.bots["123"].name == "new"
end
test "channel_join message should add member" do
new_slack = State.update(
%{type: "message", subtype: "channel_join", user: "U456", channel: "123"},
slack
)
assert (new_slack.channels["123"].members |> Enum.sort) == ["U123", "U456"]
end
test "channel_leave message should remove member" do
new_slack = State.update(
%{type: "message", subtype: "channel_leave", user: "U123", channel: "123"},
slack
)
assert new_slack.channels["123"].members == []
end
test "presence_change message should update user" do
new_slack = State.update(
%{presence: "testing", type: "presence_change", user: "123"},
slack
)
assert new_slack.users["123"].presence == "testing"
end
test "group_joined event should add group" do
new_slack = State.update(
%{type: "group_joined", channel: %{id: "G123", members: ["U123", "U456"]}},
slack
)
assert new_slack.groups["G123"]
assert new_slack.groups["G123"].members == ["U123", "U456"]
end
test "group_join message should add user to member list" do
new_slack = State.update(
%{type: "message", subtype: "group_join", channel: "G000", user: "U000"},
slack
)
assert Enum.member?(new_slack.groups["G000"][:members], "U000")
end
test "group_leave message should remove user from member list" do
new_slack = State.update(
%{type: "message", subtype: "group_leave", channel: "G000", user: "U111"},
slack
)
refute Enum.member?(new_slack.groups["G000"].members, "U111")
end
test "group_left message should remove group altogether" do
new_slack = State.update(
%{type: "group_left", channel: "G000"},
slack
)
refute new_slack.groups["G000"]
end
test "im_created message should add direct message channel to list" do
channel = %{name: "channel", id: "C456"}
new_slack = State.update(
%{type: "im_created", channel: channel},
slack
)
assert new_slack.ims == %{"C456" => channel}
end
defp slack do
%{
channels: %{
"123" => %{
id: 123,
name: "foo",
is_member: nil,
is_archived: nil,
members: ["U123"]
}
},
team: %{
name: "Foo",
},
users: %{
"123" => %{
name: "Bar",
presence: "active"
}
},
groups: %{
"G000" => %{
name: "secret-group",
members: ["U111", "U222"]
}
},
bots: %{
"123" => %{
name: "Bot"
}
},
ims: %{ }
}
end
end
| 23.894009 | 86 | 0.587078 |
1c22f0d3eabab0337ededd7c67fb708ce86a9a23 | 113 | ex | Elixir | lib/fragment/integration_fields.ex | TheRealReal/prismic-elixir | 0850b922be6d2f2d541add8426fe27f8ce5f63eb | [
"MIT"
] | 10 | 2018-01-12T18:52:53.000Z | 2022-02-17T06:00:21.000Z | lib/fragment/integration_fields.ex | TheRealReal/prismic-elixir | 0850b922be6d2f2d541add8426fe27f8ce5f63eb | [
"MIT"
] | 12 | 2018-02-14T23:08:08.000Z | 2021-11-22T15:55:41.000Z | lib/fragment/integration_fields.ex | TheRealReal/prismic-elixir | 0850b922be6d2f2d541add8426fe27f8ce5f63eb | [
"MIT"
] | 8 | 2018-05-08T14:02:21.000Z | 2021-12-15T08:19:55.000Z | defmodule Prismic.Fragment.IntegrationFields do
@type t :: %__MODULE__{value: any()}
defstruct [:value]
end
| 18.833333 | 47 | 0.734513 |
1c2300ba4437e811a564def3904fa71795da9e31 | 301 | ex | Elixir | elm_elixir/web/controllers/player_controller.ex | leadbrain/tutorial | 97ff8fa72829080b79e5ae32541e04897f31326d | [
"MIT"
] | null | null | null | elm_elixir/web/controllers/player_controller.ex | leadbrain/tutorial | 97ff8fa72829080b79e5ae32541e04897f31326d | [
"MIT"
] | null | null | null | elm_elixir/web/controllers/player_controller.ex | leadbrain/tutorial | 97ff8fa72829080b79e5ae32541e04897f31326d | [
"MIT"
] | null | null | null | defmodule Tutorial.PlayerController do
use Tutorial.Web, :controller
def index(conn, _params) do
players = Repo.all(Tutorial.Player)
json conn, players
end
def show(conn, %{"id" => id}) do
player = Repo.get(Tutorial.Player, String.to_integer(id))
json conn, player
end
end
| 21.5 | 61 | 0.69103 |
1c23431579665abab8a6eebd2c465411885fcf92 | 2,162 | ex | Elixir | lib/astarte_core/mapping/database_retention_policy.ex | Annopaolo/astarte_core | 8b3af2871e9ee63ec1f6f42c5a86045fd0c123af | [
"Apache-2.0"
] | 16 | 2018-02-02T14:07:10.000Z | 2021-02-03T13:57:30.000Z | lib/astarte_core/mapping/database_retention_policy.ex | Annopaolo/astarte_core | 8b3af2871e9ee63ec1f6f42c5a86045fd0c123af | [
"Apache-2.0"
] | 62 | 2019-03-14T15:52:12.000Z | 2022-03-22T10:34:56.000Z | lib/astarte_core/mapping/database_retention_policy.ex | Annopaolo/astarte_core | 8b3af2871e9ee63ec1f6f42c5a86045fd0c123af | [
"Apache-2.0"
] | 9 | 2018-02-02T09:55:06.000Z | 2021-03-04T15:15:08.000Z | #
# This file is part of Astarte.
#
# Copyright 2017-2018 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Astarte.Core.Mapping.DatabaseRetentionPolicy do
use Ecto.Type
@mapping_policy_no_ttl 1
@mapping_policy_use_ttl 2
@valid_atoms [
:no_ttl,
:use_ttl
]
@impl true
def type, do: :integer
@impl true
def cast(nil), do: {:ok, nil}
def cast(atom) when is_atom(atom) do
if Enum.member?(@valid_atoms, atom) do
{:ok, atom}
else
:error
end
end
def cast(string) when is_binary(string) do
case string do
"no_ttl" -> {:ok, :no_ttl}
"use_ttl" -> {:ok, :use_ttl}
_ -> :error
end
end
def cast(_), do: :error
@impl true
def dump(policy) when is_atom(policy) do
case policy do
:no_ttl -> {:ok, @mapping_policy_no_ttl}
:use_ttl -> {:ok, @mapping_policy_use_ttl}
_ -> :error
end
end
def to_int(policy) when is_atom(policy) do
case dump(policy) do
{:ok, int} ->
int
:error ->
raise ArgumentError,
message: "#{inspect(policy)} is not a valid database retention policy"
end
end
@impl true
def load(policy_int) when is_integer(policy_int) do
case policy_int do
@mapping_policy_no_ttl -> {:ok, :no_ttl}
@mapping_policy_use_ttl -> {:ok, :use_ttl}
_ -> :error
end
end
def from_int(policy_int) when is_integer(policy_int) do
case load(policy_int) do
{:ok, policy} ->
policy
:error ->
raise ArgumentError,
message: "#{policy_int} is not a valid database retention policy int representation"
end
end
end
| 23.247312 | 94 | 0.654024 |
1c2343dfaacaf671c23f469c2ef34f7a0893b63d | 311 | ex | Elixir | apps/subs/test/support/bcrypt.ex | gitter-badger/opensubs.io | 76d5b4d355a530c8f496efe3ac2095d87f078997 | [
"MIT"
] | 36 | 2018-02-03T10:58:51.000Z | 2020-09-19T20:52:17.000Z | apps/subs/test/support/bcrypt.ex | joaquimadraz/subs | 9a26144ed660d5ece849ee447a9e5de53a311408 | [
"MIT"
] | 8 | 2018-01-17T17:15:48.000Z | 2020-07-06T08:56:54.000Z | apps/subs/test/support/bcrypt.ex | joaquimadraz/subs | 9a26144ed660d5ece849ee447a9e5de53a311408 | [
"MIT"
] | 10 | 2018-05-21T18:20:32.000Z | 2022-01-29T14:25:48.000Z | defmodule Subs.Test.Support.BCrypt do
@moduledoc """
Provides a MockModule to encrypt password in test environment.
"""
def hashpwsalt(raw), do: Base.encode64(raw)
def checkpw(password, encrypted_password) do
Base.encode64(password) == encrypted_password
end
def dummy_checkpw, do: nil
end
| 22.214286 | 64 | 0.73955 |
1c239fff6f900bf9270df42dc8c7623a03e918b3 | 90 | exs | Elixir | test/squarestore_web/views/layout_view_test.exs | NinjaAnge/forksquare | ee9ea91e45e50b9f1ba4a8261ebdd99b7fe3333d | [
"MIT"
] | null | null | null | test/squarestore_web/views/layout_view_test.exs | NinjaAnge/forksquare | ee9ea91e45e50b9f1ba4a8261ebdd99b7fe3333d | [
"MIT"
] | null | null | null | test/squarestore_web/views/layout_view_test.exs | NinjaAnge/forksquare | ee9ea91e45e50b9f1ba4a8261ebdd99b7fe3333d | [
"MIT"
] | null | null | null | defmodule SquarestoreWeb.LayoutViewTest do
use SquarestoreWeb.ConnCase, async: true
end
| 22.5 | 42 | 0.844444 |
1c23af455bd48e439e57bb07f58a1f2a966f565e | 532 | ex | Elixir | examples/ct_ecto/lib/ct/tree_path.ex | rmayhue/closure_table | adda01e78f0d9344c1bae9ae8f9d05bcdcea6e0d | [
"Apache-2.0"
] | 16 | 2019-09-08T14:31:36.000Z | 2022-02-14T19:50:23.000Z | examples/ct_ecto/lib/ct/tree_path.ex | rmayhue/closure_table | adda01e78f0d9344c1bae9ae8f9d05bcdcea6e0d | [
"Apache-2.0"
] | 6 | 2019-09-14T06:03:41.000Z | 2021-05-16T00:53:40.000Z | examples/ct_ecto/lib/ct/tree_path.ex | rmayhue/closure_table | adda01e78f0d9344c1bae9ae8f9d05bcdcea6e0d | [
"Apache-2.0"
] | 7 | 2019-09-13T16:39:45.000Z | 2021-05-11T06:15:04.000Z | defmodule CT.TreePath do
use Ecto.Schema
import Ecto.Changeset
alias CT.Comment
@primary_key false
schema "tree_paths" do
belongs_to :parent_comment, Comment, foreign_key: :ancestor
belongs_to :comment, Comment, foreign_key: :descendant
field :depth, :integer
end
def changeset(path, params \\ %{}) do
path
|> cast(params, [:ancestor, :descendant, :depth])
|> validate_number(:ancestor, greater_than_or_equal_to: 0)
|> validate_number(:descendant, greater_than_or_equal_to: 0)
end
end
| 25.333333 | 64 | 0.716165 |
1c23b2aabab4036da295ac119d132f827600b096 | 7,818 | ex | Elixir | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/api/application_security_groups.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/api/application_security_groups.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/api/application_security_groups.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Network.Api.ApplicationSecurityGroups do
@moduledoc """
API calls for all endpoints tagged `ApplicationSecurityGroups`.
"""
alias Microsoft.Azure.Management.Network.Connection
import Microsoft.Azure.Management.Network.RequestBuilder
@doc """
Creates or updates an application security group.
## Parameters
- connection (Microsoft.Azure.Management.Network.Connection): Connection to server
- resource_group_name (String.t): The name of the resource group.
- application_security_group_name (String.t): The name of the application security group.
- parameters (ApplicationSecurityGroup): Parameters supplied to the create or update ApplicationSecurityGroup operation.
- api_version (String.t): Client API version.
- subscription_id (String.t): The subscription credentials which uniquely identify the Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %Microsoft.Azure.Management.Network.Model.ApplicationSecurityGroup{}} on success
{:error, info} on failure
"""
@spec application_security_groups_create_or_update(Tesla.Env.client, String.t, String.t, Microsoft.Azure.Management.Network.Model.ApplicationSecurityGroup.t, String.t, String.t, keyword()) :: {:ok, Microsoft.Azure.Management.Network.Model.ApplicationSecurityGroup.t} | {:error, Tesla.Env.t}
def application_security_groups_create_or_update(connection, resource_group_name, application_security_group_name, parameters, api_version, subscription_id, _opts \\ []) do
%{}
|> method(:put)
|> url("/subscriptions/#{subscription_id}/resourceGroups/#{resource_group_name}/providers/Microsoft.Network/applicationSecurityGroups/#{application_security_group_name}")
|> add_param(:body, :body, parameters)
|> add_param(:query, :"api-version", api_version)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%Microsoft.Azure.Management.Network.Model.ApplicationSecurityGroup{})
end
@doc """
Deletes the specified application security group.
## Parameters
- connection (Microsoft.Azure.Management.Network.Connection): Connection to server
- resource_group_name (String.t): The name of the resource group.
- application_security_group_name (String.t): The name of the application security group.
- api_version (String.t): Client API version.
- subscription_id (String.t): The subscription credentials which uniquely identify the Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec application_security_groups_delete(Tesla.Env.client, String.t, String.t, String.t, String.t, keyword()) :: {:ok, nil} | {:error, Tesla.Env.t}
def application_security_groups_delete(connection, resource_group_name, application_security_group_name, api_version, subscription_id, _opts \\ []) do
%{}
|> method(:delete)
|> url("/subscriptions/#{subscription_id}/resourceGroups/#{resource_group_name}/providers/Microsoft.Network/applicationSecurityGroups/#{application_security_group_name}")
|> add_param(:query, :"api-version", api_version)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(false)
end
@doc """
Gets information about the specified application security group.
## Parameters
- connection (Microsoft.Azure.Management.Network.Connection): Connection to server
- resource_group_name (String.t): The name of the resource group.
- application_security_group_name (String.t): The name of the application security group.
- api_version (String.t): Client API version.
- subscription_id (String.t): The subscription credentials which uniquely identify the Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %Microsoft.Azure.Management.Network.Model.ApplicationSecurityGroup{}} on success
{:error, info} on failure
"""
@spec application_security_groups_get(Tesla.Env.client, String.t, String.t, String.t, String.t, keyword()) :: {:ok, Microsoft.Azure.Management.Network.Model.ApplicationSecurityGroup.t} | {:error, Tesla.Env.t}
def application_security_groups_get(connection, resource_group_name, application_security_group_name, api_version, subscription_id, _opts \\ []) do
%{}
|> method(:get)
|> url("/subscriptions/#{subscription_id}/resourceGroups/#{resource_group_name}/providers/Microsoft.Network/applicationSecurityGroups/#{application_security_group_name}")
|> add_param(:query, :"api-version", api_version)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%Microsoft.Azure.Management.Network.Model.ApplicationSecurityGroup{})
end
@doc """
Gets all the application security groups in a resource group.
## Parameters
- connection (Microsoft.Azure.Management.Network.Connection): Connection to server
- resource_group_name (String.t): The name of the resource group.
- api_version (String.t): Client API version.
- subscription_id (String.t): The subscription credentials which uniquely identify the Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %Microsoft.Azure.Management.Network.Model.ApplicationSecurityGroupListResult{}} on success
{:error, info} on failure
"""
@spec application_security_groups_list(Tesla.Env.client, String.t, String.t, String.t, keyword()) :: {:ok, Microsoft.Azure.Management.Network.Model.ApplicationSecurityGroupListResult.t} | {:error, Tesla.Env.t}
def application_security_groups_list(connection, resource_group_name, api_version, subscription_id, _opts \\ []) do
%{}
|> method(:get)
|> url("/subscriptions/#{subscription_id}/resourceGroups/#{resource_group_name}/providers/Microsoft.Network/applicationSecurityGroups")
|> add_param(:query, :"api-version", api_version)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%Microsoft.Azure.Management.Network.Model.ApplicationSecurityGroupListResult{})
end
@doc """
Gets all application security groups in a subscription.
## Parameters
- connection (Microsoft.Azure.Management.Network.Connection): Connection to server
- api_version (String.t): Client API version.
- subscription_id (String.t): The subscription credentials which uniquely identify the Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %Microsoft.Azure.Management.Network.Model.ApplicationSecurityGroupListResult{}} on success
{:error, info} on failure
"""
@spec application_security_groups_list_all(Tesla.Env.client, String.t, String.t, keyword()) :: {:ok, Microsoft.Azure.Management.Network.Model.ApplicationSecurityGroupListResult.t} | {:error, Tesla.Env.t}
def application_security_groups_list_all(connection, api_version, subscription_id, _opts \\ []) do
%{}
|> method(:get)
|> url("/subscriptions/#{subscription_id}/providers/Microsoft.Network/applicationSecurityGroups")
|> add_param(:query, :"api-version", api_version)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%Microsoft.Azure.Management.Network.Model.ApplicationSecurityGroupListResult{})
end
end
| 51.098039 | 292 | 0.753773 |
1c23dc96c883c54d18f0f5321e7dd32ad2993c03 | 785 | ex | Elixir | lib/amqp/channel.ex | nulian/amqp | 4b22f46f2b7b6b6d98e17ad97629bab91a559dfb | [
"MIT"
] | null | null | null | lib/amqp/channel.ex | nulian/amqp | 4b22f46f2b7b6b6d98e17ad97629bab91a559dfb | [
"MIT"
] | null | null | null | lib/amqp/channel.ex | nulian/amqp | 4b22f46f2b7b6b6d98e17ad97629bab91a559dfb | [
"MIT"
] | 1 | 2020-10-03T06:56:52.000Z | 2020-10-03T06:56:52.000Z | defmodule AMQP.Channel do
@moduledoc """
Functions to operate on Channels.
"""
alias AMQP.{Connection, Channel}
defstruct [:conn, :pid]
@type t :: %Channel{conn: Connection.t(), pid: pid}
@doc """
Opens a new Channel in a previously opened Connection.
"""
@spec open(Connection.t()) :: {:ok, Channel.t()} | {:error, any}
def open(%Connection{pid: pid} = conn) do
case :amqp_connection.open_channel(pid) do
{:ok, chan_pid} -> {:ok, %Channel{conn: conn, pid: chan_pid}}
error -> error
end
end
@doc """
Closes an open Channel.
"""
@spec close(Channel.t()) :: :ok | {:error, AMQP.Basic.error()}
def close(%Channel{pid: pid}) do
case :amqp_channel.close(pid) do
:ok -> :ok
error -> {:error, error}
end
end
end
| 23.787879 | 67 | 0.605096 |
1c240c241abc29921fd1351bfb5dc9768caa4375 | 2,225 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_record_location.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_record_location.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_record_location.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2RecordLocation do
@moduledoc """
Location of a finding within a row or record.
## Attributes
* `fieldId` (*type:* `GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2FieldId.t`, *default:* `nil`) - Field id of the field containing the finding.
* `recordKey` (*type:* `GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2RecordKey.t`, *default:* `nil`) - Key of the finding.
* `tableLocation` (*type:* `GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2TableLocation.t`, *default:* `nil`) - Location within a `ContentItem.Table`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:fieldId => GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2FieldId.t(),
:recordKey => GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2RecordKey.t(),
:tableLocation => GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2TableLocation.t()
}
field(:fieldId, as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2FieldId)
field(:recordKey, as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2RecordKey)
field(:tableLocation, as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2TableLocation)
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2RecordLocation do
def decode(value, options) do
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2RecordLocation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2RecordLocation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.981132 | 149 | 0.75236 |
1c241cf63204309b5eebe161f133c8a51cb59e10 | 251 | ex | Elixir | test/support/component_case.ex | c4710n/petal_components | 62392b067a4dec8ce76219eea0bfe6a6747925ce | [
"MIT"
] | 215 | 2021-11-04T23:47:18.000Z | 2022-03-31T11:53:15.000Z | test/support/component_case.ex | c4710n/petal_components | 62392b067a4dec8ce76219eea0bfe6a6747925ce | [
"MIT"
] | 22 | 2021-11-06T01:49:27.000Z | 2022-03-28T03:05:40.000Z | test/support/component_case.ex | c4710n/petal_components | 62392b067a4dec8ce76219eea0bfe6a6747925ce | [
"MIT"
] | 22 | 2021-11-16T20:44:33.000Z | 2022-03-28T11:46:33.000Z | defmodule ComponentCase do
use ExUnit.CaseTemplate
setup do
# This will run before each test that uses this case
:ok
end
using do
quote do
import Phoenix.LiveViewTest
import Phoenix.LiveView.Helpers
end
end
end
| 15.6875 | 56 | 0.693227 |
1c2443e71cea81b6dc0856501fbd5689c2546e84 | 195 | ex | Elixir | exercises/practice/word-count/lib/word_count.ex | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/practice/word-count/lib/word_count.ex | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/practice/word-count/lib/word_count.ex | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule WordCount do
@doc """
Count the number of words in the sentence.
Words are compared case-insensitively.
"""
@spec count(String.t()) :: map
def count(sentence) do
end
end
| 17.727273 | 44 | 0.682051 |
1c2445ac3b575988b17e8fc366de69861a04f495 | 336 | ex | Elixir | lib/serializers/revision_serializer.ex | mfeckie/phoenix_frontend_deploys | 86adf2a7a51ce6757e50dfbca74677a93a6950c2 | [
"MIT"
] | null | null | null | lib/serializers/revision_serializer.ex | mfeckie/phoenix_frontend_deploys | 86adf2a7a51ce6757e50dfbca74677a93a6950c2 | [
"MIT"
] | null | null | null | lib/serializers/revision_serializer.ex | mfeckie/phoenix_frontend_deploys | 86adf2a7a51ce6757e50dfbca74677a93a6950c2 | [
"MIT"
] | null | null | null | defmodule PhoenixFrontendDeploys.RevisionSerializer do
use JaSerializer
attributes [:id, :mtime, :size, :active]
def mtime(struct, _opts) do
{{year, month, day}, {hour, minute, second}} = struct.mtime
"#{year}-#{month}-#{day} #{hour}:#{minute}:#{second}"
end
def id(struct, _opts) do
struct.revision
end
end
| 21 | 63 | 0.660714 |
1c245cdcc6448671542803745f08668d877e83b5 | 184 | exs | Elixir | priv/repo/migrations/20150904220940_add_published_to_posts.exs | michelada/dash | dce8e85c07b093c2cb8880290a461a7fc76d2e7d | [
"MIT"
] | 10 | 2015-09-23T05:26:35.000Z | 2020-10-09T16:59:11.000Z | priv/repo/migrations/20150904220940_add_published_to_posts.exs | michelada/dash | dce8e85c07b093c2cb8880290a461a7fc76d2e7d | [
"MIT"
] | 3 | 2015-09-23T05:29:42.000Z | 2017-12-21T19:38:39.000Z | priv/repo/migrations/20150904220940_add_published_to_posts.exs | michelada/dash | dce8e85c07b093c2cb8880290a461a7fc76d2e7d | [
"MIT"
] | 2 | 2015-10-03T12:25:28.000Z | 2015-10-15T04:45:22.000Z | defmodule Dash.Repo.Migrations.AddPublishedToPosts do
use Ecto.Migration
def change do
alter table(:posts) do
add :published, :boolean, default: false
end
end
end
| 18.4 | 53 | 0.717391 |
1c24683cc8c89edc6726cb4b270eb4c3548decdd | 1,007 | exs | Elixir | test/ex_json_schema/config_test.exs | hzamani/ex_json_schema | ad86d4482a8bda66ba9e6dc954b69261051047bf | [
"MIT"
] | null | null | null | test/ex_json_schema/config_test.exs | hzamani/ex_json_schema | ad86d4482a8bda66ba9e6dc954b69261051047bf | [
"MIT"
] | 1 | 2019-09-24T17:50:02.000Z | 2019-09-24T23:05:46.000Z | test/ex_json_schema/config_test.exs | hzamani/ex_json_schema | ad86d4482a8bda66ba9e6dc954b69261051047bf | [
"MIT"
] | 1 | 2019-09-24T17:07:06.000Z | 2019-09-24T17:07:06.000Z | defmodule ExJsonSchema.ConfigTest do
use ExUnit.Case
import ExJsonSchema.Schema, only: [resolve: 1]
@schema %{"$ref" => "http://somewhere/schema.json"}
def test_resolver(url), do: %{"foo" => url}
setup do
resolver = Application.get_env(:ex_json_schema, :remote_schema_resolver)
on_exit fn -> Application.put_env(:ex_json_schema, :remote_schema_resolver, resolver) end
:ok
end
test "raising an exception when trying to resolve a remote schema and no remote schema resolver is defined" do
Application.put_env(:ex_json_schema, :remote_schema_resolver, nil)
assert_raise ExJsonSchema.Schema.UndefinedRemoteSchemaResolverError, fn -> resolve(@schema) end
end
test "defining a remote schema resolver with module and function name" do
Application.put_env(:ex_json_schema, :remote_schema_resolver, {ExJsonSchema.ConfigTest, :test_resolver})
assert resolve(@schema).refs == %{"http://somewhere/schema.json" => %{"foo" => "http://somewhere/schema.json"}}
end
end
| 38.730769 | 115 | 0.7428 |
1c247a20fb7fad82de1e75346aa50321bbe93f13 | 3,258 | ex | Elixir | apps/ewallet_config/lib/ewallet_config/setting_loader.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet_config/lib/ewallet_config/setting_loader.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet_config/lib/ewallet_config/setting_loader.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWalletConfig.SettingLoader do
@moduledoc """
Load the settings from the database into the application envs.
"""
require Logger
alias EWalletConfig.{Setting, BalanceCachingSettingsLoader, FileStorageSettingsLoader}
@settings_to_loaders %{
balance_caching_frequency: BalanceCachingSettingsLoader,
file_storage_adapter: FileStorageSettingsLoader
}
def load_settings(app, settings) when is_atom(app) and is_list(settings) do
stored_settings = Setting.all() |> Enum.into(%{}, fn s -> {s.key, s} end)
Enum.each(settings, fn key ->
load_setting(app, key, stored_settings)
end)
# After loading all the settings, loop through one more time to apply
# specific settings loaders, if defined. This has to be a separate loop from above
# to make sure that loaders get latest values for all dependent settings.
Enum.each(settings, fn key ->
case Map.fetch(@settings_to_loaders, key) do
{:ok, loader_module} -> loader_module.load(app)
_ -> :noop
end
end)
end
def load_settings(_, _, _), do: nil
def load_setting(app, {setting, keys}, stored_settings) do
Application.put_env(app, setting, build_values_map(app, keys, stored_settings))
end
def load_setting(app, key, stored_settings) do
Application.put_env(app, key, fetch_value(app, key, stored_settings))
end
defp build_values_map(app, keys, stored_settings) do
Enum.into(keys, %{}, fn key -> handle_mapped_keys(app, key, stored_settings) end)
end
defp handle_mapped_keys(app, {db_setting_name, app_setting_name}, stored_settings)
when is_atom(app_setting_name) do
{app_setting_name, fetch_value(app, db_setting_name, stored_settings)}
end
defp handle_mapped_keys(app, key, stored_settings) do
{key, fetch_value(app, key, stored_settings)}
end
defp fetch_value(app, key, stored_settings) do
case Map.get(stored_settings, Atom.to_string(key)) do
nil ->
if Application.get_env(:ewallet, :env) != :test, do: warn(app, key)
nil
setting ->
map_value(key, setting.value)
end
end
defp map_value(key, value) when is_atom(key) do
str_key = Atom.to_string(key)
map_value(str_key, value)
end
defp map_value(key, value) when is_binary(key) do
mappings = Setting.get_setting_mappings()
case Map.get(mappings, key) do
nil ->
value
mapping ->
case Map.get(mapping, value) do
nil -> mapping["_"]
mapped_value -> mapped_value
end
end
end
def warn(app, key) do
Logger.warn(~s([Configuration] Setting "#{key}" used by "#{app}" not found.))
end
end
| 31.631068 | 88 | 0.703806 |
1c24854cbc3c98873cca18d0aa6871c464110006 | 335 | exs | Elixir | priv/repo/migrations/20200130185506_create_transactions.exs | juniornelson123/api_banking_teste | db263511d78fffe9e4081551817d006df2094316 | [
"MIT"
] | null | null | null | priv/repo/migrations/20200130185506_create_transactions.exs | juniornelson123/api_banking_teste | db263511d78fffe9e4081551817d006df2094316 | [
"MIT"
] | null | null | null | priv/repo/migrations/20200130185506_create_transactions.exs | juniornelson123/api_banking_teste | db263511d78fffe9e4081551817d006df2094316 | [
"MIT"
] | null | null | null | defmodule ApiBanking.Repo.Migrations.CreateTransactions do
use Ecto.Migration
def change do
create table(:transactions) do
add :kind, :string
add :amount, :float
add :account_id, references(:accounts, on_delete: :nothing)
timestamps()
end
create index(:transactions, [:account_id])
end
end
| 20.9375 | 65 | 0.689552 |
1c248c24b7b665d4b21ae0e1f565dfdbd4078ee3 | 13,806 | exs | Elixir | test/credo/check/warning/unused_regex_operation_test.exs | harlantwood/credo | 4569c9c8cfcf74a2074d6911541da0265e52ae99 | [
"MIT"
] | null | null | null | test/credo/check/warning/unused_regex_operation_test.exs | harlantwood/credo | 4569c9c8cfcf74a2074d6911541da0265e52ae99 | [
"MIT"
] | null | null | null | test/credo/check/warning/unused_regex_operation_test.exs | harlantwood/credo | 4569c9c8cfcf74a2074d6911541da0265e52ae99 | [
"MIT"
] | null | null | null | defmodule Credo.Check.Warning.UnusedRegexOperationTest do
use Credo.TestHelper
@described_check Credo.Check.Warning.UnusedRegexOperation
test "it should NOT report expected code" do
"""
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
Regex.compile!(parameter1) + parameter2
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report when result is piped" do
"""
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
Regex.compile!(parameter1)
|> some_where
parameter1
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when end of pipe AND return value" do
"""
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
parameter1 + parameter2
|> Regex.compile!(parameter1)
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when inside of pipe" do
"""
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
parameter1 + parameter2
|> Regex.compile!(parameter1)
|> some_func_who_knows_what_it_does
:ok
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when inside an assignment" do
"""
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
offset = Regex.compile!(line)
parameter1 + parameter2 + offset
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when inside a condition" do
"""
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
if Regex.compile!(x1) > Regex.compile!(x2) do
cond do
Regex.compile!(x3) == "" -> IO.puts("1")
Regex.compile!(x) == 15 -> IO.puts("2")
Regex.delete_at(x3, 1) == "b" -> IO.puts("2")
end
else
case Regex.compile!(x3) do
0 -> true
1 -> false
_ -> something
end
end
unless Regex.compile!(x4) == "" do
IO.puts "empty"
end
parameter1 + parameter2 + offset
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when inside a quote" do
"""
defmodule CredoSampleModule do
defp category_body(nil) do
quote do
__MODULE__
|> Module.split
|> Regex.delete_at(2)
end
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when inside of assignment" do
"""
defmodule CredoSampleModule do
defp print_issue(%Issue{check: check, message: message, filename: filename, priority: priority} = issue, source_file) do
pos =
pos_string(issue.line_no, issue.column)
[
Output.issue_color(issue), "┃ ",
Output.check_tag(check), " ", priority |> Output.priority_arrow,
:normal, :white, " ", message,
]
|> IO.ANSI.format
|> IO.puts
if issue.column do
offset = Regex.compile!(line)
[
Regex.compile!(x, " "), :faint, Regex.compile!(w, ","),
]
|> IO.puts
end
[Output.issue_color(issue), :faint, "┃ "]
|> IO.ANSI.format
|> IO.puts
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when call is buried in else block but is the last call" do
"""
defmodule CredoSampleModule do
defp print_issue(%Issue{check: check, message: message, filename: filename, priority: priority} = issue, source_file) do
if issue.column do
IO.puts "."
else
[:this_actually_might_return, Regex.compile!(w, ","), :ok] # THIS is not the last_call!
end
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when call is buried in else block and is not the last call, but the result is assigned to a variable" do
"""
defmodule CredoSampleModule do
defp print_issue(%Issue{check: check, message: message, filename: filename, priority: priority} = issue, source_file) do
result =
if issue.column do
IO.puts "."
else
[:this_goes_nowhere, Regex.compile!(w, ",")]
end
IO.puts "8"
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when buried in :if, :when and :fn 2" do
"""
defmodule CredoSampleModule do
defp print_issue(%Issue{check: check, message: message, filename: filename, priority: priority} = issue, source_file) do
if issue.column do
IO.puts "."
else
case check do
true -> false
_ ->
Regex.insert_at(arr, [], fn(w) ->
[:this_might_return, Regex.compile!(w, ",")]
end)
end
end
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when :for and :case" do
"""
defmodule CredoSampleModule do
defp convert_parsers(parsers) do
for parser <- parsers do
case Atom.to_string(parser) do
"Elixir." <> _ -> parser
reference -> Regex.delete_at(reference)
end
end
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when part of a function call" do
"""
defmodule CredoSampleModule do
defp convert_parsers(parsers) do
for parser <- parsers do
case Atom.to_string(parser) do
"Elixir." <> _ -> parser
reference -> Module.concat(Plug.Parsers, Regex.delete_at(reference))
end
end
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when :for and :case 2" do
"""
defmodule CredoSampleModule do
defp convert_parsers(parsers) do
for segment <- Regex.append(bin, 1), segment != "", do: segment
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when in :after block" do
"""
defp my_function(fun, opts) do
try do
:fprof.analyse(
dest: analyse_dest,
totals: true,
details: Keyword.get(opts, :details, false),
callers: Keyword.get(opts, :callers, false),
sort: sorting
)
else
:ok ->
{_in, analysis_output} = StringIO.contents(analyse_dest)
Regex.compile!(analysis_output)
after
StringIO.close(analyse_dest)
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when in function call" do
"""
def my_function(url) when is_binary(url) do
if info.userinfo do
destructure [username, password], Regex.compile!(info.userinfo, ":")
end
Regex.insert_at(opts, [], fn {_k, v} -> is_nil(v) end)
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when in function call 2" do
"""
defp print_process(pid_atom, count, own) do
IO.puts([?", Regex.compile!(own, "-")])
IO.write format_item(Path.to_Regex(path, item), Regex.compile!(item))
print_row(["s", "B", "s", ".3f", "s"], [count, "", own, ""])
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should NOT report a violation when in list that is returned" do
"""
defp indent_line(str, indentation, with \\\\ " ") do
[Regex.compile!(with, indentation), str]
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should report a violation when buried in :if, :when and :fn" do
"""
defmodule CredoSampleModule do
defp print_issue(%Issue{check: check, message: message, filename: filename, priority: priority} = issue, source_file) do
if issue.column do
IO.puts "."
else
case check do
true -> false
_ ->
list =
Regex.insert_at(arr, [], fn(w) ->
[:this_goes_nowhere, Regex.compile!(w, ",")]
end)
end
end
IO.puts "x"
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
##############################################################################
##############################################################################
test "it should report a violation" do
"""
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
x = parameter1 + parameter2
Regex.delete_at(parameter1, x)
parameter1
end
end
"""
|> to_source_file
|> assert_issue(@described_check)
end
test "it should report a violation when end of pipe" do
"""
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
parameter1 + parameter2
|> Regex.delete_at(parameter1)
parameter1
end
end
"""
|> to_source_file
|> assert_issue(@described_check)
end
test "it should report a violation when buried in :if" do
"""
defmodule CredoSampleModule do
defp print_issue(%Issue{check: check, message: message, filename: filename, priority: priority} = issue, source_file) do
if issue.column do
[
:this_goes_nowhere,
Regex.compile!(w, ",") # THIS is not the last_call!
]
IO.puts "."
else
IO.puts "x"
end
end
end
"""
|> to_source_file
|> assert_issue(@described_check)
end
test "it should report a violation when buried in :else" do
"""
defmodule CredoSampleModule do
defp print_issue(%Issue{check: check, message: message, filename: filename, priority: priority} = issue, source_file) do
if issue.column do
IO.puts "."
else
Regex.compile!(filename)
IO.puts "x"
end
end
end
"""
|> to_source_file
|> assert_issue(@described_check)
end
test "it should report a violation when buried in :if, :when and :fn 2" do
"""
defmodule CredoSampleModule do
defp print_issue(%Issue{check: check, message: message, filename: filename, priority: priority} = issue, source_file) do
if issue.column do
IO.puts "."
else
case check do
true -> false
_ ->
Regex.insert_at(arr, [], fn(w) ->
[:this_goes_nowhere, Regex.compile!(w, ",")]
end)
end
end
IO.puts "x"
end
end
"""
|> to_source_file
|> assert_issues(@described_check)
end
test "it should report a violation when call is buried in else block but is the last call" do
"""
defmodule CredoSampleModule do
defp print_issue(%Issue{check: check, message: message, filename: filename, priority: priority} = issue, source_file) do
if issue.column do
IO.puts "."
else
[:this_goes_nowhere, Regex.compile!(w, ",")] # THIS is not the last_call!
end
IO.puts
end
end
"""
|> to_source_file
|> assert_issue(@described_check)
end
test "it should report a violation when call is buried in else block but is the last call 2" do
"""
defmodule CredoSampleModule do
defp print_issue(%Issue{check: check, message: message, filename: filename, priority: priority} = issue, source_file) do
if issue.column do
IO.puts "."
else
[:this_goes_nowhere, Regex.compile!(w, ",")] # THIS is not the last_call!
IO.puts " "
end
end
end
"""
|> to_source_file
|> assert_issue(@described_check, fn issue ->
assert "Regex.compile!" == issue.trigger
end)
end
test "it should report several violations" do
"""
defmodule CredoSampleModule do
def some_function(parameter1, parameter2) do
Regex.insert_at(parameter1, [], &is_nil/1)
parameter1
end
def some_function2(parameter1, parameter2) do
Regex.insert_at(parameter1, [], parameter2)
parameter1
end
def some_function3(parameter1, parameter2) do
Regex.insert_at(parameter1, [], parameter2)
parameter1
end
end
"""
|> to_source_file
|> assert_issues(@described_check, fn issues ->
assert 3 == Enum.count(issues)
end)
end
test "it should report a violation when used incorrectly, even inside a :for" do
"""
defmodule CredoSampleModule do
defp something(bin) do
for segment <- Regex.append(segment1, 1), segment != "" do
Regex.insert_at(segment, 1, [:added_to_the_tail])
segment
end
end
end
"""
|> to_source_file
|> assert_issue(@described_check, fn issue ->
assert "Regex.insert_at" == issue.trigger
end)
end
end
| 26.807767 | 145 | 0.585035 |
1c24b9c906232264b3e09c0f7a0628e574207621 | 43,246 | ex | Elixir | lib/ecto/adapters/postgres/connection.ex | samullen/ecto_sql | 8086d4ac71f91b5f1b471ae914df3ffbe297f6f7 | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/postgres/connection.ex | samullen/ecto_sql | 8086d4ac71f91b5f1b471ae914df3ffbe297f6f7 | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/postgres/connection.ex | samullen/ecto_sql | 8086d4ac71f91b5f1b471ae914df3ffbe297f6f7 | [
"Apache-2.0"
] | null | null | null | if Code.ensure_loaded?(Postgrex) do
defmodule Ecto.Adapters.Postgres.Connection do
@moduledoc false
@default_port 5432
@behaviour Ecto.Adapters.SQL.Connection
## Module and Options
@impl true
def child_spec(opts) do
opts
|> Keyword.put_new(:port, @default_port)
|> Postgrex.child_spec()
end
@impl true
def to_constraints(%Postgrex.Error{postgres: %{code: :unique_violation, constraint: constraint}}),
do: [unique: constraint]
def to_constraints(%Postgrex.Error{postgres: %{code: :foreign_key_violation, constraint: constraint}}),
do: [foreign_key: constraint]
def to_constraints(%Postgrex.Error{postgres: %{code: :exclusion_violation, constraint: constraint}}),
do: [exclusion: constraint]
def to_constraints(%Postgrex.Error{postgres: %{code: :check_violation, constraint: constraint}}),
do: [check: constraint]
# Postgres 9.2 and earlier does not provide the constraint field
@impl true
def to_constraints(%Postgrex.Error{postgres: %{code: :unique_violation, message: message}}) do
case :binary.split(message, " unique constraint ") do
[_, quoted] -> [unique: strip_quotes(quoted)]
_ -> []
end
end
def to_constraints(%Postgrex.Error{postgres: %{code: :foreign_key_violation, message: message}}) do
case :binary.split(message, " foreign key constraint ") do
[_, quoted] ->
[quoted | _] = :binary.split(quoted, " on table ")
[foreign_key: strip_quotes(quoted)]
_ ->
[]
end
end
def to_constraints(%Postgrex.Error{postgres: %{code: :exclusion_violation, message: message}}) do
case :binary.split(message, " exclusion constraint ") do
[_, quoted] -> [exclusion: strip_quotes(quoted)]
_ -> []
end
end
def to_constraints(%Postgrex.Error{postgres: %{code: :check_violation, message: message}}) do
case :binary.split(message, " check constraint ") do
[_, quoted] -> [check: strip_quotes(quoted)]
_ -> []
end
end
def to_constraints(_),
do: []
defp strip_quotes(quoted) do
size = byte_size(quoted) - 2
<<_, unquoted::binary-size(size), _>> = quoted
unquoted
end
## Query
@impl true
def prepare_execute(conn, name, sql, params, opts) do
Postgrex.prepare_execute(conn, name, sql, params, opts)
end
@impl true
def query(conn, sql, params, opts) do
Postgrex.query(conn, sql, params, opts)
end
@impl true
def execute(conn, %{ref: ref} = query, params, opts) do
case Postgrex.execute(conn, query, params, opts) do
{:ok, %{ref: ^ref}, result} ->
{:ok, result}
{:ok, _, _} = ok ->
ok
{:error, %Postgrex.QueryError{} = err} ->
{:reset, err}
{:error, %Postgrex.Error{postgres: %{code: :feature_not_supported}} = err} ->
{:reset, err}
{:error, _} = error ->
error
end
end
@impl true
def stream(conn, sql, params, opts) do
Postgrex.stream(conn, sql, params, opts)
end
alias Ecto.Query.{BooleanExpr, JoinExpr, QueryExpr, WithExpr}
@impl true
def all(query) do
sources = create_names(query)
{select_distinct, order_by_distinct} = distinct(query.distinct, sources, query)
cte = cte(query, sources)
from = from(query, sources)
select = select(query, select_distinct, sources)
join = join(query, sources)
where = where(query, sources)
group_by = group_by(query, sources)
having = having(query, sources)
window = window(query, sources)
combinations = combinations(query)
order_by = order_by(query, order_by_distinct, sources)
limit = limit(query, sources)
offset = offset(query, sources)
lock = lock(query.lock)
[cte, select, from, join, where, group_by, having, window, combinations, order_by, limit, offset | lock]
end
@impl true
def update_all(%{from: %{source: source}} = query, prefix \\ nil) do
sources = create_names(query)
cte = cte(query, sources)
{from, name} = get_source(query, sources, 0, source)
prefix = prefix || ["UPDATE ", from, " AS ", name | " SET "]
fields = update_fields(query, sources)
{join, wheres} = using_join(query, :update_all, "FROM", sources)
where = where(%{query | wheres: wheres ++ query.wheres}, sources)
[cte, prefix, fields, join, where | returning(query, sources)]
end
@impl true
def delete_all(%{from: from} = query) do
sources = create_names(query)
cte = cte(query, sources)
{from, name} = get_source(query, sources, 0, from)
{join, wheres} = using_join(query, :delete_all, "USING", sources)
where = where(%{query | wheres: wheres ++ query.wheres}, sources)
[cte, "DELETE FROM ", from, " AS ", name, join, where | returning(query, sources)]
end
@impl true
def insert(prefix, table, header, rows, on_conflict, returning) do
values =
if header == [] do
[" VALUES " | intersperse_map(rows, ?,, fn _ -> "(DEFAULT)" end)]
else
[?\s, ?(, intersperse_map(header, ?,, "e_name/1), ") VALUES " | insert_all(rows, 1)]
end
["INSERT INTO ", quote_table(prefix, table), insert_as(on_conflict),
values, on_conflict(on_conflict, header) | returning(returning)]
end
defp insert_as({%{sources: sources}, _, _}) do
{_expr, name, _schema} = create_name(sources, 0)
[" AS " | name]
end
defp insert_as({_, _, _}) do
[]
end
defp on_conflict({:raise, _, []}, _header),
do: []
defp on_conflict({:nothing, _, targets}, _header),
do: [" ON CONFLICT ", conflict_target(targets) | "DO NOTHING"]
defp on_conflict({fields, _, targets}, _header) when is_list(fields),
do: [" ON CONFLICT ", conflict_target(targets), "DO " | replace(fields)]
defp on_conflict({query, _, targets}, _header),
do: [" ON CONFLICT ", conflict_target(targets), "DO " | update_all(query, "UPDATE SET ")]
defp conflict_target({:constraint, constraint}),
do: ["ON CONSTRAINT ", quote_name(constraint), ?\s]
defp conflict_target({:unsafe_fragment, fragment}),
do: [fragment, ?\s]
defp conflict_target([]),
do: []
defp conflict_target(targets),
do: [?(, intersperse_map(targets, ?,, "e_name/1), ?), ?\s]
defp replace(fields) do
["UPDATE SET " |
intersperse_map(fields, ?,, fn field ->
quoted = quote_name(field)
[quoted, " = ", "EXCLUDED." | quoted]
end)]
end
defp insert_all(rows, counter) do
intersperse_reduce(rows, ?,, counter, fn row, counter ->
{row, counter} = insert_each(row, counter)
{[?(, row, ?)], counter}
end)
|> elem(0)
end
defp insert_each(values, counter) do
intersperse_reduce(values, ?,, counter, fn
nil, counter ->
{"DEFAULT", counter}
{%Ecto.Query{} = query, params_counter}, counter ->
{[?(, all(query), ?)], counter + params_counter}
_, counter ->
{[?$ | Integer.to_string(counter)], counter + 1}
end)
end
@impl true
def update(prefix, table, fields, filters, returning) do
{fields, count} = intersperse_reduce(fields, ", ", 1, fn field, acc ->
{[quote_name(field), " = $" | Integer.to_string(acc)], acc + 1}
end)
{filters, _count} = intersperse_reduce(filters, " AND ", count, fn
{field, nil}, acc ->
{[quote_name(field), " IS NULL"], acc}
{field, _value}, acc ->
{[quote_name(field), " = $" | Integer.to_string(acc)], acc + 1}
end)
["UPDATE ", quote_table(prefix, table), " SET ",
fields, " WHERE ", filters | returning(returning)]
end
@impl true
def delete(prefix, table, filters, returning) do
{filters, _} = intersperse_reduce(filters, " AND ", 1, fn
{field, nil}, acc ->
{[quote_name(field), " IS NULL"], acc}
{field, _value}, acc ->
{[quote_name(field), " = $" | Integer.to_string(acc)], acc + 1}
end)
["DELETE FROM ", quote_table(prefix, table), " WHERE ", filters | returning(returning)]
end
## Query generation
binary_ops =
[==: " = ", !=: " != ", <=: " <= ", >=: " >= ", <: " < ", >: " > ",
+: " + ", -: " - ", *: " * ", /: " / ",
and: " AND ", or: " OR ", ilike: " ILIKE ", like: " LIKE "]
@binary_ops Keyword.keys(binary_ops)
Enum.map(binary_ops, fn {op, str} ->
defp handle_call(unquote(op), 2), do: {:binary_op, unquote(str)}
end)
defp handle_call(fun, _arity), do: {:fun, Atom.to_string(fun)}
defp select(%{select: %{fields: fields}} = query, select_distinct, sources) do
["SELECT", select_distinct, ?\s | select_fields(fields, sources, query)]
end
defp select_fields([], _sources, _query),
do: "TRUE"
defp select_fields(fields, sources, query) do
intersperse_map(fields, ", ", fn
{:&, _, [idx]} ->
case elem(sources, idx) do
{source, _, nil} ->
error!(query, "PostgreSQL does not support selecting all fields from #{source} without a schema. " <>
"Please specify a schema or specify exactly which fields you want to select")
{_, source, _} ->
source
end
{key, value} ->
[expr(value, sources, query), " AS " | quote_name(key)]
value ->
expr(value, sources, query)
end)
end
defp distinct(nil, _, _), do: {[], []}
defp distinct(%QueryExpr{expr: []}, _, _), do: {[], []}
defp distinct(%QueryExpr{expr: true}, _, _), do: {" DISTINCT", []}
defp distinct(%QueryExpr{expr: false}, _, _), do: {[], []}
defp distinct(%QueryExpr{expr: exprs}, sources, query) do
{[" DISTINCT ON (",
intersperse_map(exprs, ", ", fn {_, expr} -> expr(expr, sources, query) end), ?)],
exprs}
end
defp from(%{from: %{hints: [_ | _]}} = query, _sources) do
error!(query, "table hints are not supported by PostgreSQL")
end
defp from(%{from: %{source: source}} = query, sources) do
{from, name} = get_source(query, sources, 0, source)
[" FROM ", from, " AS " | name]
end
defp cte(%{with_ctes: %WithExpr{recursive: recursive, queries: [_ | _] = queries}} = query, sources) do
recursive_opt = if recursive, do: "RECURSIVE ", else: ""
ctes = intersperse_map(queries, ", ", &cte_expr(&1, sources, query))
["WITH ", recursive_opt, ctes, " "]
end
defp cte(%{with_ctes: _}, _), do: []
defp cte_expr({name, cte}, sources, query) do
[quote_name(name), " AS ", cte_query(cte, sources, query)]
end
defp cte_query(%Ecto.Query{} = query, _, _), do: ["(", all(query), ")"]
defp cte_query(%QueryExpr{expr: expr}, sources, query), do: expr(expr, sources, query)
defp update_fields(%{updates: updates} = query, sources) do
for(%{expr: expr} <- updates,
{op, kw} <- expr,
{key, value} <- kw,
do: update_op(op, key, value, sources, query)) |> Enum.intersperse(", ")
end
defp update_op(:set, key, value, sources, query) do
[quote_name(key), " = " | expr(value, sources, query)]
end
defp update_op(:inc, key, value, sources, query) do
[quote_name(key), " = ", quote_qualified_name(key, sources, 0), " + " |
expr(value, sources, query)]
end
defp update_op(:push, key, value, sources, query) do
[quote_name(key), " = array_append(", quote_qualified_name(key, sources, 0),
", ", expr(value, sources, query), ?)]
end
defp update_op(:pull, key, value, sources, query) do
[quote_name(key), " = array_remove(", quote_qualified_name(key, sources, 0),
", ", expr(value, sources, query), ?)]
end
defp update_op(command, _key, _value, _sources, query) do
error!(query, "unknown update operation #{inspect command} for PostgreSQL")
end
defp using_join(%{joins: []}, _kind, _prefix, _sources), do: {[], []}
defp using_join(%{joins: joins} = query, kind, prefix, sources) do
froms =
intersperse_map(joins, ", ", fn
%JoinExpr{qual: :inner, ix: ix, source: source} ->
{join, name} = get_source(query, sources, ix, source)
[join, " AS " | name]
%JoinExpr{qual: qual} ->
error!(query, "PostgreSQL supports only inner joins on #{kind}, got: `#{qual}`")
end)
wheres =
for %JoinExpr{on: %QueryExpr{expr: value} = expr} <- joins,
value != true,
do: expr |> Map.put(:__struct__, BooleanExpr) |> Map.put(:op, :and)
{[?\s, prefix, ?\s | froms], wheres}
end
defp join(%{joins: []}, _sources), do: []
defp join(%{joins: joins} = query, sources) do
[?\s | intersperse_map(joins, ?\s, fn
%JoinExpr{on: %QueryExpr{expr: expr}, qual: qual, ix: ix, source: source, hints: hints} ->
if hints != [] do
error!(query, "table hints are not supported by PostgreSQL")
end
{join, name} = get_source(query, sources, ix, source)
[join_qual(qual), join, " AS ", name | join_on(qual, expr, sources, query)]
end)]
end
defp join_on(:cross, true, _sources, _query), do: []
defp join_on(_qual, expr, sources, query), do: [" ON " | expr(expr, sources, query)]
defp join_qual(:inner), do: "INNER JOIN "
defp join_qual(:inner_lateral), do: "INNER JOIN LATERAL "
defp join_qual(:left), do: "LEFT OUTER JOIN "
defp join_qual(:left_lateral), do: "LEFT OUTER JOIN LATERAL "
defp join_qual(:right), do: "RIGHT OUTER JOIN "
defp join_qual(:full), do: "FULL OUTER JOIN "
defp join_qual(:cross), do: "CROSS JOIN "
defp where(%{wheres: wheres} = query, sources) do
boolean(" WHERE ", wheres, sources, query)
end
defp having(%{havings: havings} = query, sources) do
boolean(" HAVING ", havings, sources, query)
end
defp group_by(%{group_bys: []}, _sources), do: []
defp group_by(%{group_bys: group_bys} = query, sources) do
[" GROUP BY " |
intersperse_map(group_bys, ", ", fn
%QueryExpr{expr: expr} ->
intersperse_map(expr, ", ", &expr(&1, sources, query))
end)]
end
defp window(%{windows: []}, _sources), do: []
defp window(%{windows: windows} = query, sources) do
[" WINDOW " |
intersperse_map(windows, ", ", fn {name, %{expr: kw}} ->
[quote_name(name), " AS " | window_exprs(kw, sources, query)]
end)]
end
defp window_exprs(kw, sources, query) do
[?(, intersperse_map(kw, ?\s, &window_expr(&1, sources, query)), ?)]
end
defp window_expr({:partition_by, fields}, sources, query) do
["PARTITION BY " | intersperse_map(fields, ", ", &expr(&1, sources, query))]
end
defp window_expr({:order_by, fields}, sources, query) do
["ORDER BY " | intersperse_map(fields, ", ", &order_by_expr(&1, sources, query))]
end
defp window_expr({:frame, {:fragment, _, _} = fragment}, sources, query) do
expr(fragment, sources, query)
end
defp order_by(%{order_bys: []}, _distinct, _sources), do: []
defp order_by(%{order_bys: order_bys} = query, distinct, sources) do
order_bys = Enum.flat_map(order_bys, & &1.expr)
[" ORDER BY " |
intersperse_map(distinct ++ order_bys, ", ", &order_by_expr(&1, sources, query))]
end
defp order_by_expr({dir, expr}, sources, query) do
str = expr(expr, sources, query)
case dir do
:asc -> str
:asc_nulls_last -> [str | " ASC NULLS LAST"]
:asc_nulls_first -> [str | " ASC NULLS FIRST"]
:desc -> [str | " DESC"]
:desc_nulls_last -> [str | " DESC NULLS LAST"]
:desc_nulls_first -> [str | " DESC NULLS FIRST"]
end
end
defp limit(%{limit: nil}, _sources), do: []
defp limit(%{limit: %QueryExpr{expr: expr}} = query, sources) do
[" LIMIT " | expr(expr, sources, query)]
end
defp offset(%{offset: nil}, _sources), do: []
defp offset(%{offset: %QueryExpr{expr: expr}} = query, sources) do
[" OFFSET " | expr(expr, sources, query)]
end
defp combinations(%{combinations: combinations}) do
Enum.map(combinations, fn
{:union, query} -> [" UNION (", all(query), ")"]
{:union_all, query} -> [" UNION ALL (", all(query), ")"]
{:except, query} -> [" EXCEPT (", all(query), ")"]
{:except_all, query} -> [" EXCEPT ALL (", all(query), ")"]
{:intersect, query} -> [" INTERSECT (", all(query), ")"]
{:intersect_all, query} -> [" INTERSECT ALL (", all(query), ")"]
end)
end
defp lock(nil), do: []
defp lock(lock_clause), do: [?\s | lock_clause]
defp boolean(_name, [], _sources, _query), do: []
defp boolean(name, [%{expr: expr, op: op} | query_exprs], sources, query) do
[name |
Enum.reduce(query_exprs, {op, paren_expr(expr, sources, query)}, fn
%BooleanExpr{expr: expr, op: op}, {op, acc} ->
{op, [acc, operator_to_boolean(op), paren_expr(expr, sources, query)]}
%BooleanExpr{expr: expr, op: op}, {_, acc} ->
{op, [?(, acc, ?), operator_to_boolean(op), paren_expr(expr, sources, query)]}
end) |> elem(1)]
end
defp operator_to_boolean(:and), do: " AND "
defp operator_to_boolean(:or), do: " OR "
defp parens_for_select([first_expr | _] = expr) do
if is_binary(first_expr) and String.starts_with?(first_expr, ["SELECT", "select"]) do
[?(, expr, ?)]
else
expr
end
end
defp paren_expr(expr, sources, query) do
[?(, expr(expr, sources, query), ?)]
end
defp expr({:^, [], [ix]}, _sources, _query) do
[?$ | Integer.to_string(ix + 1)]
end
defp expr({{:., _, [{:&, _, [idx]}, field]}, _, []}, sources, _query) when is_atom(field) do
quote_qualified_name(field, sources, idx)
end
defp expr({:&, _, [idx]}, sources, _query) do
{_, source, _} = elem(sources, idx)
source
end
defp expr({:in, _, [_left, []]}, _sources, _query) do
"false"
end
defp expr({:in, _, [left, right]}, sources, query) when is_list(right) do
args = intersperse_map(right, ?,, &expr(&1, sources, query))
[expr(left, sources, query), " IN (", args, ?)]
end
defp expr({:in, _, [left, {:^, _, [ix, _]}]}, sources, query) do
[expr(left, sources, query), " = ANY($", Integer.to_string(ix + 1), ?)]
end
defp expr({:in, _, [left, right]}, sources, query) do
[expr(left, sources, query), " = ANY(", expr(right, sources, query), ?)]
end
defp expr({:is_nil, _, [arg]}, sources, query) do
[expr(arg, sources, query) | " IS NULL"]
end
defp expr({:not, _, [expr]}, sources, query) do
["NOT (", expr(expr, sources, query), ?)]
end
defp expr(%Ecto.SubQuery{query: query}, _sources, _query) do
[?(, all(query), ?)]
end
defp expr({:fragment, _, [kw]}, _sources, query) when is_list(kw) or tuple_size(kw) == 3 do
error!(query, "PostgreSQL adapter does not support keyword or interpolated fragments")
end
defp expr({:fragment, _, parts}, sources, query) do
Enum.map(parts, fn
{:raw, part} -> part
{:expr, expr} -> expr(expr, sources, query)
end)
|> parens_for_select
end
defp expr({:datetime_add, _, [datetime, count, interval]}, sources, query) do
[expr(datetime, sources, query), type_unless_typed(datetime, "timestamp"), " + ",
interval(count, interval, sources, query)]
end
defp expr({:date_add, _, [date, count, interval]}, sources, query) do
[?(, expr(date, sources, query), type_unless_typed(date, "date"), " + ",
interval(count, interval, sources, query) | ")::date"]
end
defp expr({:filter, _, [agg, filter]}, sources, query) do
aggregate = expr(agg, sources, query)
[aggregate, " FILTER (WHERE ", expr(filter, sources, query), ?)]
end
defp expr({:over, _, [agg, name]}, sources, query) when is_atom(name) do
aggregate = expr(agg, sources, query)
[aggregate, " OVER " | quote_name(name)]
end
defp expr({:over, _, [agg, kw]}, sources, query) do
aggregate = expr(agg, sources, query)
[aggregate, " OVER ", window_exprs(kw, sources, query)]
end
defp expr({:{}, _, elems}, sources, query) do
[?(, intersperse_map(elems, ?,, &expr(&1, sources, query)), ?)]
end
defp expr({:count, _, []}, _sources, _query), do: "count(*)"
defp expr({fun, _, args}, sources, query) when is_atom(fun) and is_list(args) do
{modifier, args} =
case args do
[rest, :distinct] -> {"DISTINCT ", [rest]}
_ -> {[], args}
end
case handle_call(fun, length(args)) do
{:binary_op, op} ->
[left, right] = args
[op_to_binary(left, sources, query), op | op_to_binary(right, sources, query)]
{:fun, fun} ->
[fun, ?(, modifier, intersperse_map(args, ", ", &expr(&1, sources, query)), ?)]
end
end
defp expr(list, sources, query) when is_list(list) do
["ARRAY[", intersperse_map(list, ?,, &expr(&1, sources, query)), ?]]
end
defp expr(%Decimal{} = decimal, _sources, _query) do
Decimal.to_string(decimal, :normal)
end
defp expr(%Ecto.Query.Tagged{value: binary, type: :binary}, _sources, _query)
when is_binary(binary) do
["'\\x", Base.encode16(binary, case: :lower) | "'::bytea"]
end
defp expr(%Ecto.Query.Tagged{value: other, type: type}, sources, query) do
[expr(other, sources, query), ?:, ?: | tagged_to_db(type)]
end
defp expr(nil, _sources, _query), do: "NULL"
defp expr(true, _sources, _query), do: "TRUE"
defp expr(false, _sources, _query), do: "FALSE"
defp expr(literal, _sources, _query) when is_binary(literal) do
[?\', escape_string(literal), ?\']
end
defp expr(literal, _sources, _query) when is_integer(literal) do
Integer.to_string(literal)
end
defp expr(literal, _sources, _query) when is_float(literal) do
[Float.to_string(literal) | "::float"]
end
defp type_unless_typed(%Ecto.Query.Tagged{}, _type), do: []
defp type_unless_typed(_, type), do: [?:, ?: | type]
# Always use the largest possible type for integers
defp tagged_to_db(:id), do: "bigint"
defp tagged_to_db(:integer), do: "bigint"
defp tagged_to_db({:array, type}), do: [tagged_to_db(type), ?[, ?]]
defp tagged_to_db(type), do: ecto_to_db(type)
defp interval(count, interval, _sources, _query) when is_integer(count) do
["interval '", String.Chars.Integer.to_string(count), ?\s, interval, ?\']
end
defp interval(count, interval, _sources, _query) when is_float(count) do
count = :erlang.float_to_binary(count, [:compact, decimals: 16])
["interval '", count, ?\s, interval, ?\']
end
defp interval(count, interval, sources, query) do
[?(, expr(count, sources, query), "::numeric * ",
interval(1, interval, sources, query), ?)]
end
defp op_to_binary({op, _, [_, _]} = expr, sources, query) when op in @binary_ops do
paren_expr(expr, sources, query)
end
defp op_to_binary(expr, sources, query) do
expr(expr, sources, query)
end
defp returning(%{select: nil}, _sources),
do: []
defp returning(%{select: %{fields: fields}} = query, sources),
do: [" RETURNING " | select_fields(fields, sources, query)]
defp returning([]),
do: []
defp returning(returning),
do: [" RETURNING " | intersperse_map(returning, ", ", "e_name/1)]
defp create_names(%{sources: sources}) do
create_names(sources, 0, tuple_size(sources)) |> List.to_tuple()
end
defp create_names(sources, pos, limit) when pos < limit do
[create_name(sources, pos) | create_names(sources, pos + 1, limit)]
end
defp create_names(_sources, pos, pos) do
[]
end
defp create_name(sources, pos) do
case elem(sources, pos) do
{:fragment, _, _} ->
{nil, [?f | Integer.to_string(pos)], nil}
{table, schema, prefix} ->
name = [create_alias(table) | Integer.to_string(pos)]
{quote_table(prefix, table), name, schema}
%Ecto.SubQuery{} ->
{nil, [?s | Integer.to_string(pos)], nil}
end
end
defp create_alias(<<first, _rest::binary>>) when first in ?a..?z when first in ?A..?Z do
<<first>>
end
defp create_alias(_) do
"t"
end
# DDL
alias Ecto.Migration.{Table, Index, Reference, Constraint}
@creates [:create, :create_if_not_exists]
@drops [:drop, :drop_if_exists]
@impl true
def execute_ddl({command, %Table{} = table, columns}) when command in @creates do
table_name = quote_table(table.prefix, table.name)
query = ["CREATE TABLE ",
if_do(command == :create_if_not_exists, "IF NOT EXISTS "),
table_name, ?\s, ?(,
column_definitions(table, columns), pk_definition(columns, ", "), ?),
options_expr(table.options)]
[query] ++
comments_on("TABLE", table_name, table.comment) ++
comments_for_columns(table_name, columns)
end
def execute_ddl({command, %Table{} = table}) when command in @drops do
[["DROP TABLE ", if_do(command == :drop_if_exists, "IF EXISTS "),
quote_table(table.prefix, table.name)]]
end
def execute_ddl({:alter, %Table{} = table, changes}) do
table_name = quote_table(table.prefix, table.name)
query = ["ALTER TABLE ", table_name, ?\s,
column_changes(table, changes), pk_definition(changes, ", ADD ")]
[query] ++
comments_on("TABLE", table_name, table.comment) ++
comments_for_columns(table_name, changes)
end
def execute_ddl({:create, %Index{} = index}) do
fields = intersperse_map(index.columns, ", ", &index_expr/1)
queries = [["CREATE ",
if_do(index.unique, "UNIQUE "),
"INDEX ",
if_do(index.concurrently, "CONCURRENTLY "),
quote_name(index.name),
" ON ",
quote_table(index.prefix, index.table),
if_do(index.using, [" USING " , to_string(index.using)]),
?\s, ?(, fields, ?),
if_do(index.where, [" WHERE ", to_string(index.where)])]]
queries ++ comments_on("INDEX", quote_name(index.name), index.comment)
end
def execute_ddl({:create_if_not_exists, %Index{} = index}) do
if index.concurrently do
raise ArgumentError,
"concurrent index and create_if_not_exists is not supported by the Postgres adapter"
end
[["DO $$ BEGIN ",
execute_ddl({:create, index}), ";",
"EXCEPTION WHEN duplicate_table THEN END; $$;"]]
end
def execute_ddl({command, %Index{} = index}) when command in @drops do
[["DROP INDEX ",
if_do(index.concurrently, "CONCURRENTLY "),
if_do(command == :drop_if_exists, "IF EXISTS "),
quote_table(index.prefix, index.name)]]
end
def execute_ddl({:rename, %Table{} = current_table, %Table{} = new_table}) do
[["ALTER TABLE ", quote_table(current_table.prefix, current_table.name),
" RENAME TO ", quote_table(nil, new_table.name)]]
end
def execute_ddl({:rename, %Table{} = table, current_column, new_column}) do
[["ALTER TABLE ", quote_table(table.prefix, table.name), " RENAME ",
quote_name(current_column), " TO ", quote_name(new_column)]]
end
def execute_ddl({:create, %Constraint{} = constraint}) do
table_name = quote_table(constraint.prefix, constraint.table)
queries = [["ALTER TABLE ", table_name,
" ADD ", new_constraint_expr(constraint)]]
queries ++ comments_on("CONSTRAINT", constraint.name, constraint.comment, table_name)
end
def execute_ddl({:drop, %Constraint{} = constraint}) do
[["ALTER TABLE ", quote_table(constraint.prefix, constraint.table),
" DROP CONSTRAINT ", quote_name(constraint.name)]]
end
def execute_ddl({:drop_if_exists, %Constraint{} = constraint}) do
[["ALTER TABLE ", quote_table(constraint.prefix, constraint.table),
" DROP CONSTRAINT IF EXISTS ", quote_name(constraint.name)]]
end
def execute_ddl(string) when is_binary(string), do: [string]
def execute_ddl(keyword) when is_list(keyword),
do: error!(nil, "PostgreSQL adapter does not support keyword lists in execute")
@impl true
def ddl_logs(%Postgrex.Result{} = result) do
%{messages: messages} = result
for message <- messages do
%{message: message, severity: severity} = message
{ddl_log_level(severity), message, []}
end
end
@impl true
def table_exists_query(table) do
{"SELECT true FROM information_schema.tables WHERE table_name = $1 AND table_schema = current_schema() LIMIT 1", [table]}
end
# From https://www.postgresql.org/docs/9.3/static/protocol-error-fields.html.
defp ddl_log_level("DEBUG"), do: :debug
defp ddl_log_level("LOG"), do: :info
defp ddl_log_level("INFO"), do: :info
defp ddl_log_level("NOTICE"), do: :info
defp ddl_log_level("WARNING"), do: :warn
defp ddl_log_level("ERROR"), do: :error
defp ddl_log_level("FATAL"), do: :error
defp ddl_log_level("PANIC"), do: :error
defp ddl_log_level(_severity), do: :info
defp pk_definition(columns, prefix) do
pks =
for {_, name, _, opts} <- columns,
opts[:primary_key],
do: name
case pks do
[] -> []
_ -> [prefix, "PRIMARY KEY (", intersperse_map(pks, ", ", "e_name/1), ")"]
end
end
defp comments_on(_object, _name, nil), do: []
defp comments_on(object, name, comment) do
[["COMMENT ON ", object, ?\s, name, " IS ", single_quote(comment)]]
end
defp comments_on(_object, _name, nil, _table_name), do: []
defp comments_on(object, name, comment, table_name) do
[["COMMENT ON ", object, ?\s, quote_name(name), " ON ", table_name,
" IS ", single_quote(comment)]]
end
defp comments_for_columns(table_name, columns) do
Enum.flat_map(columns, fn
{_operation, column_name, _column_type, opts} ->
column_name = [table_name, ?. | quote_name(column_name)]
comments_on("COLUMN", column_name, opts[:comment])
_ -> []
end)
end
defp column_definitions(table, columns) do
intersperse_map(columns, ", ", &column_definition(table, &1))
end
defp column_definition(table, {:add, name, %Reference{} = ref, opts}) do
[quote_name(name), ?\s, reference_column_type(ref.type, opts),
column_options(ref.type, opts), reference_expr(ref, table, name)]
end
defp column_definition(_table, {:add, name, type, opts}) do
[quote_name(name), ?\s, column_type(type, opts), column_options(type, opts)]
end
defp column_changes(table, columns) do
intersperse_map(columns, ", ", &column_change(table, &1))
end
defp column_change(table, {:add, name, %Reference{} = ref, opts}) do
["ADD COLUMN ", quote_name(name), ?\s, reference_column_type(ref.type, opts),
column_options(ref.type, opts), reference_expr(ref, table, name)]
end
defp column_change(_table, {:add, name, type, opts}) do
["ADD COLUMN ", quote_name(name), ?\s, column_type(type, opts),
column_options(type, opts)]
end
defp column_change(table, {:add_if_not_exists, name, %Reference{} = ref, opts}) do
["ADD COLUMN IF NOT EXISTS ", quote_name(name), ?\s, reference_column_type(ref.type, opts),
column_options(ref.type, opts), reference_expr(ref, table, name)]
end
defp column_change(_table, {:add_if_not_exists, name, type, opts}) do
["ADD COLUMN IF NOT EXISTS ", quote_name(name), ?\s, column_type(type, opts),
column_options(type, opts)]
end
defp column_change(table, {:modify, name, %Reference{} = ref, opts}) do
[drop_constraint_expr(opts[:from], table, name), "ALTER COLUMN ", quote_name(name), " TYPE ", reference_column_type(ref.type, opts),
constraint_expr(ref, table, name), modify_null(name, opts), modify_default(name, ref.type, opts)]
end
defp column_change(table, {:modify, name, type, opts}) do
[drop_constraint_expr(opts[:from], table, name), "ALTER COLUMN ", quote_name(name), " TYPE ",
column_type(type, opts), modify_null(name, opts), modify_default(name, type, opts)]
end
defp column_change(_table, {:remove, name}), do: ["DROP COLUMN ", quote_name(name)]
defp column_change(table, {:remove, name, %Reference{} = ref, _opts}) do
[drop_constraint_expr(ref, table, name), "DROP COLUMN ", quote_name(name)]
end
defp column_change(_table, {:remove, name, _type, _opts}), do: ["DROP COLUMN ", quote_name(name)]
defp column_change(table, {:remove_if_exists, name, %Reference{} = ref}) do
[drop_constraint_if_exists_expr(ref, table, name), "DROP COLUMN IF EXISTS ", quote_name(name)]
end
defp column_change(_table, {:remove_if_exists, name, _type}), do: ["DROP COLUMN IF EXISTS ", quote_name(name)]
defp modify_null(name, opts) do
case Keyword.get(opts, :null) do
true -> [", ALTER COLUMN ", quote_name(name), " DROP NOT NULL"]
false -> [", ALTER COLUMN ", quote_name(name), " SET NOT NULL"]
nil -> []
end
end
defp modify_default(name, type, opts) do
case Keyword.fetch(opts, :default) do
{:ok, val} -> [", ALTER COLUMN ", quote_name(name), " SET", default_expr({:ok, val}, type)]
:error -> []
end
end
defp column_options(type, opts) do
default = Keyword.fetch(opts, :default)
null = Keyword.get(opts, :null)
[default_expr(default, type), null_expr(null)]
end
defp null_expr(false), do: " NOT NULL"
defp null_expr(true), do: " NULL"
defp null_expr(_), do: []
defp new_constraint_expr(%Constraint{check: check} = constraint) when is_binary(check) do
["CONSTRAINT ", quote_name(constraint.name), " CHECK (", check, ")"]
end
defp new_constraint_expr(%Constraint{exclude: exclude} = constraint) when is_binary(exclude) do
["CONSTRAINT ", quote_name(constraint.name), " EXCLUDE USING ", exclude]
end
defp default_expr({:ok, nil}, _type), do: " DEFAULT NULL"
defp default_expr({:ok, literal}, type), do: [" DEFAULT ", default_type(literal, type)]
defp default_expr(:error, _), do: []
defp default_type(list, {:array, inner} = type) when is_list(list) do
["ARRAY[", Enum.map(list, &default_type(&1, inner)) |> Enum.intersperse(?,), "]::", ecto_to_db(type)]
end
defp default_type(literal, _type) when is_binary(literal) do
if :binary.match(literal, <<0>>) == :nomatch and String.valid?(literal) do
[?', escape_string(literal), ?']
else
encoded = "\\x" <> Base.encode16(literal, case: :lower)
raise ArgumentError, "default values are interpolated as UTF-8 strings and cannot contain null bytes. " <>
"`#{inspect literal}` is invalid. If you want to write it as a binary, use \"#{encoded}\", " <>
"otherwise refer to PostgreSQL documentation for instructions on how to escape this SQL type"
end
end
defp default_type(literal, _type) when is_number(literal), do: to_string(literal)
defp default_type(literal, _type) when is_boolean(literal), do: to_string(literal)
defp default_type(%{} = map, :map) do
library = Application.get_env(:postgrex, :json_library, Jason)
default = IO.iodata_to_binary(library.encode_to_iodata!(map))
[single_quote(default)]
end
defp default_type({:fragment, expr}, _type),
do: [expr]
defp default_type(expr, type),
do: raise(ArgumentError, "unknown default `#{inspect expr}` for type `#{inspect type}`. " <>
":default may be a string, number, boolean, list of strings, list of integers, map (when type is Map), or a fragment(...)")
defp index_expr(literal) when is_binary(literal),
do: literal
defp index_expr(literal),
do: quote_name(literal)
defp options_expr(nil),
do: []
defp options_expr(keyword) when is_list(keyword),
do: error!(nil, "PostgreSQL adapter does not support keyword lists in :options")
defp options_expr(options),
do: [?\s, options]
defp column_type({:array, type}, opts),
do: [column_type(type, opts), "[]"]
defp column_type(type, _opts) when type in ~w(time utc_datetime naive_datetime)a,
do: [ecto_to_db(type), ""]
defp column_type(type, opts) when type in ~w(time_usec utc_datetime_usec naive_datetime_usec)a do
precision = Keyword.get(opts, :precision)
type_name = ecto_to_db(type)
if precision do
[type_name, ?(, to_string(precision), ?)]
else
type_name
end
end
defp column_type(type, opts) do
size = Keyword.get(opts, :size)
precision = Keyword.get(opts, :precision)
scale = Keyword.get(opts, :scale)
type_name = ecto_to_db(type)
cond do
size -> [type_name, ?(, to_string(size), ?)]
precision -> [type_name, ?(, to_string(precision), ?,, to_string(scale || 0), ?)]
type == :string -> [type_name, "(255)"]
true -> type_name
end
end
defp reference_expr(%Reference{} = ref, table, name),
do: [" CONSTRAINT ", reference_name(ref, table, name), " REFERENCES ",
quote_table(ref.prefix || table.prefix, ref.table), ?(, quote_name(ref.column), ?),
reference_on_delete(ref.on_delete), reference_on_update(ref.on_update)]
defp constraint_expr(%Reference{} = ref, table, name),
do: [", ADD CONSTRAINT ", reference_name(ref, table, name), ?\s,
"FOREIGN KEY (", quote_name(name), ") REFERENCES ",
quote_table(ref.prefix || table.prefix, ref.table), ?(, quote_name(ref.column), ?),
reference_on_delete(ref.on_delete), reference_on_update(ref.on_update)]
defp drop_constraint_expr(%Reference{} = ref, table, name),
do: ["DROP CONSTRAINT ", reference_name(ref, table, name), ", "]
defp drop_constraint_expr(_, _, _),
do: []
defp drop_constraint_if_exists_expr(%Reference{} = ref, table, name),
do: ["DROP CONSTRAINT IF EXISTS ", reference_name(ref, table, name), ", "]
defp drop_constraint_if_exists_expr(_, _, _),
do: []
defp reference_name(%Reference{name: nil}, table, column),
do: quote_name("#{table.name}_#{column}_fkey")
defp reference_name(%Reference{name: name}, _table, _column),
do: quote_name(name)
defp reference_column_type(:serial, _opts), do: "integer"
defp reference_column_type(:bigserial, _opts), do: "bigint"
defp reference_column_type(type, opts), do: column_type(type, opts)
defp reference_on_delete(:nilify_all), do: " ON DELETE SET NULL"
defp reference_on_delete(:delete_all), do: " ON DELETE CASCADE"
defp reference_on_delete(:restrict), do: " ON DELETE RESTRICT"
defp reference_on_delete(_), do: []
defp reference_on_update(:nilify_all), do: " ON UPDATE SET NULL"
defp reference_on_update(:update_all), do: " ON UPDATE CASCADE"
defp reference_on_update(:restrict), do: " ON UPDATE RESTRICT"
defp reference_on_update(_), do: []
## Helpers
defp get_source(query, sources, ix, source) do
{expr, name, _schema} = elem(sources, ix)
{expr || expr(source, sources, query), name}
end
defp quote_qualified_name(name, sources, ix) do
{_, source, _} = elem(sources, ix)
[source, ?. | quote_name(name)]
end
defp quote_name(name) when is_atom(name) do
quote_name(Atom.to_string(name))
end
defp quote_name(name) do
if String.contains?(name, "\"") do
error!(nil, "bad field name #{inspect name}")
end
[?", name, ?"]
end
defp quote_table(nil, name), do: quote_table(name)
defp quote_table(prefix, name), do: [quote_table(prefix), ?., quote_table(name)]
defp quote_table(name) when is_atom(name),
do: quote_table(Atom.to_string(name))
defp quote_table(name) do
if String.contains?(name, "\"") do
error!(nil, "bad table name #{inspect name}")
end
[?", name, ?"]
end
defp single_quote(value), do: [?', escape_string(value), ?']
defp intersperse_map(list, separator, mapper, acc \\ [])
defp intersperse_map([], _separator, _mapper, acc),
do: acc
defp intersperse_map([elem], _separator, mapper, acc),
do: [acc | mapper.(elem)]
defp intersperse_map([elem | rest], separator, mapper, acc),
do: intersperse_map(rest, separator, mapper, [acc, mapper.(elem), separator])
defp intersperse_reduce(list, separator, user_acc, reducer, acc \\ [])
defp intersperse_reduce([], _separator, user_acc, _reducer, acc),
do: {acc, user_acc}
defp intersperse_reduce([elem], _separator, user_acc, reducer, acc) do
{elem, user_acc} = reducer.(elem, user_acc)
{[acc | elem], user_acc}
end
defp intersperse_reduce([elem | rest], separator, user_acc, reducer, acc) do
{elem, user_acc} = reducer.(elem, user_acc)
intersperse_reduce(rest, separator, user_acc, reducer, [acc, elem, separator])
end
defp if_do(condition, value) do
if condition, do: value, else: []
end
defp escape_string(value) when is_binary(value) do
:binary.replace(value, "'", "''", [:global])
end
defp ecto_to_db({:array, t}), do: [ecto_to_db(t), ?[, ?]]
defp ecto_to_db(:id), do: "integer"
defp ecto_to_db(:serial), do: "serial"
defp ecto_to_db(:bigserial), do: "bigserial"
defp ecto_to_db(:binary_id), do: "uuid"
defp ecto_to_db(:string), do: "varchar"
defp ecto_to_db(:binary), do: "bytea"
defp ecto_to_db(:map), do: Application.fetch_env!(:ecto_sql, :postgres_map_type)
defp ecto_to_db({:map, _}), do: Application.fetch_env!(:ecto_sql, :postgres_map_type)
defp ecto_to_db(:time_usec), do: "time"
defp ecto_to_db(:utc_datetime), do: "timestamp"
defp ecto_to_db(:utc_datetime_usec), do: "timestamp"
defp ecto_to_db(:naive_datetime), do: "timestamp"
defp ecto_to_db(:naive_datetime_usec), do: "timestamp"
defp ecto_to_db(other), do: Atom.to_string(other)
defp error!(nil, message) do
raise ArgumentError, message
end
defp error!(query, message) do
raise Ecto.QueryError, query: query, message: message
end
end
end
| 36.962393 | 154 | 0.603247 |
1c24bd3aeeb08408ff7f246f6b2793f09750c2cb | 3,501 | ex | Elixir | lib/lucidboard/account.ex | craheb/lucidboard | 8348549b31963527bc5db5f69ad55eda3728fec2 | [
"MIT"
] | 86 | 2019-01-07T20:49:04.000Z | 2021-10-02T21:15:42.000Z | lib/lucidboard/account.ex | craheb/lucidboard | 8348549b31963527bc5db5f69ad55eda3728fec2 | [
"MIT"
] | 26 | 2019-03-27T12:06:52.000Z | 2020-09-20T05:21:09.000Z | lib/lucidboard/account.ex | craheb/lucidboard | 8348549b31963527bc5db5f69ad55eda3728fec2 | [
"MIT"
] | 19 | 2015-01-06T19:02:49.000Z | 2020-05-25T08:54:00.000Z | defmodule Lucidboard.Account do
@moduledoc "Context for user things"
import Ecto.Query
alias Ecto.Changeset
alias Lucidboard.Account.{Github, PingFed}
alias Lucidboard.{Board, BoardRole, BoardSettings, Card, Repo, User}
alias Ueberauth.Auth
require Logger
@providers %{
github: Github,
pingfed: PingFed
}
def get!(user_id), do: Repo.get!(User, user_id)
def get(user_id), do: Repo.get(User, user_id)
def by_username(username) do
Repo.one(from(u in User, where: u.name == ^username))
end
def create(fields) do
fields |> User.new() |> insert()
end
def insert(%User{} = user) do
Repo.insert(user)
end
def display_name(%User{name: name, full_name: full_name}) do
"#{name} (#{full_name})"
end
@spec has_role?(User.t(), Board.t(), atom) :: boolean
def has_role?(user, board, role \\ :owner)
def has_role?(_user, %Board{settings: %BoardSettings{access: "open"}}, role)
when role in [:observer, :contributor] do
true
end
def has_role?(
_user,
%Board{settings: %BoardSettings{access: "public"}},
:observer
) do
true
end
def has_role?(%User{admin: true}, _board, _role) do
true
end
def has_role?(%User{id: user_id}, %Board{board_roles: roles}, role) do
Enum.any?(roles, fn
%{user_id: ^user_id, role: :owner} ->
true
%{user_id: ^user_id, role: :contributor} ->
role in [:contributor, :observer]
%{user_id: ^user_id, role: :observer} ->
role == :observer
_ ->
false
end)
end
@spec card_is_editable?(Card.t(), User.t(), Board.t()) :: boolean
def card_is_editable?(card, user, board)
def card_is_editable?(_, %User{admin: true}, _), do: true
def card_is_editable?(%Card{user: %User{id: user_id}}, %User{id: user_id}, _),
do: true
def card_is_editable?(_, %User{id: user_id}, %Board{board_roles: board_roles}) do
Enum.any?(board_roles, fn %{user: %{id: uid}, role: :owner} ->
uid == user_id
end)
end
@spec suggest_users(String.t(), User.t()) :: [User.t()]
def suggest_users(query, %User{id: user_id}) do
q = "%#{query}%"
Repo.all(
from(u in User,
where:
(ilike(u.name, ^q) or ilike(u.full_name, ^q)) and u.id != ^user_id,
order_by: [asc: u.name]
)
)
end
@spec grant(integer, BoardRole.t()) :: :ok | {:error, Changeset.t()}
def grant(board_id, board_role) do
with %Board{} = board <-
Board |> Repo.get(board_id) |> Repo.preload(:board_roles),
{:ok, _} <-
board
|> Board.changeset()
|> Changeset.put_assoc(:board_roles, [board_role | board.board_roles])
|> Repo.update() do
:ok
else
{:error, error} -> {:error, error}
end
end
@spec revoke(integer, integer) :: :ok
def revoke(user_id, board_id) do
Repo.delete_all(
from(r in BoardRole,
where: r.user_id == ^user_id and r.board_id == ^board_id
)
)
:ok
end
@doc """
Given the `%Ueberauth.Auth{}` result, get a loaded user from the db.
If one does not exist, it will be created.
"""
@spec auth_to_user(Auth.t()) :: {:ok, User.t()} | {:error, String.t()}
def auth_to_user(auth) do
with {:ok, user} <- apply(@providers[auth.provider], :to_user, [auth]) do
case Repo.one(from(u in User, where: u.name == ^user.name)) do
nil -> Repo.insert(user)
db_user -> {:ok, db_user}
end
end
end
end
| 25.007143 | 83 | 0.596401 |
1c24ecb376fc30ecd045509cd477258fb1de80c0 | 18,571 | ex | Elixir | clients/compute/lib/google_api/compute/v1/api/global_addresses.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/global_addresses.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/global_addresses.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Api.GlobalAddresses do
@moduledoc """
API calls for all endpoints tagged `GlobalAddresses`.
"""
alias GoogleApi.Compute.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Deletes the specified address resource.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `address` (*type:* `String.t`) - Name of the address resource to delete.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_global_addresses_delete(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_global_addresses_delete(
connection,
project,
address,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query,
:requestId => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/projects/{project}/global/addresses/{address}", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"address" => URI.encode(address, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Returns the specified address resource. Gets a list of available addresses by making a list() request.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `address` (*type:* `String.t`) - Name of the address resource to return.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Address{}}` on success
* `{:error, info}` on failure
"""
@spec compute_global_addresses_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Address.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_global_addresses_get(
connection,
project,
address,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/projects/{project}/global/addresses/{address}", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"address" => URI.encode(address, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Address{}])
end
@doc """
Creates an address resource in the specified project by using the data included in the request.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* `:body` (*type:* `GoogleApi.Compute.V1.Model.Address.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_global_addresses_insert(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_global_addresses_insert(connection, project, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query,
:requestId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/projects/{project}/global/addresses", %{
"project" => URI.encode(project, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Retrieves a list of global addresses.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `:filter` (*type:* `String.t`) - A filter expression that filters resources listed in the response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal AIP-160. If you want to use AIP-160, your expression must specify the field name, an operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The operator must be either `=`, `!=`, `>`, `<`, `<=`, `>=` or `:`. For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`. The `:` operator can be used with string fields to match substrings. For non-string fields it is equivalent to the `=` operator. The `:*` comparison can be used to test whether a key has been defined. For example, to find all objects with `owner` label use: ``` labels.owner:* ``` You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels. To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ``` If you want to use a regular expression, use the `eq` (equal) or `ne` (not equal) operator against a single un-parenthesized expression with or without quotes or against multiple parenthesized expressions. Examples: `fieldname eq unquoted literal` `fieldname eq 'single quoted literal'` `fieldname eq "double quoted literal"` `(fieldname1 eq literal) (fieldname2 ne "literal")` The literal value is interpreted as a regular expression using Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use `name ne .*instance`.
* `:maxResults` (*type:* `integer()`) - The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
* `:orderBy` (*type:* `String.t`) - Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name. You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first. Currently, only sorting by `name` or `creationTimestamp desc` is supported.
* `:pageToken` (*type:* `String.t`) - Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
* `:returnPartialSuccess` (*type:* `boolean()`) - Opt-in for partial success behavior which provides partial results in case of failure. The default value is false.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.AddressList{}}` on success
* `{:error, info}` on failure
"""
@spec compute_global_addresses_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Compute.V1.Model.AddressList.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_global_addresses_list(connection, project, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query,
:filter => :query,
:maxResults => :query,
:orderBy => :query,
:pageToken => :query,
:returnPartialSuccess => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/projects/{project}/global/addresses", %{
"project" => URI.encode(project, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.AddressList{}])
end
end
| 58.034375 | 2,276 | 0.650746 |
1c251996f96d9d0fc26ae44a63865025ff0e43f6 | 292 | ex | Elixir | lib/cms/v2/repos/cms_repo.ex | noizu/KitchenSink | 34f51fb93dfa913ba7be411475d02520d537e676 | [
"MIT"
] | 2 | 2019-04-15T22:17:59.000Z | 2022-01-03T15:35:36.000Z | lib/cms/v2/repos/cms_repo.ex | noizu/KitchenSink | 34f51fb93dfa913ba7be411475d02520d537e676 | [
"MIT"
] | null | null | null | lib/cms/v2/repos/cms_repo.ex | noizu/KitchenSink | 34f51fb93dfa913ba7be411475d02520d537e676 | [
"MIT"
] | null | null | null | #-------------------------------------------------------------------------------
# Author: Keith Brings
# Copyright (C) 2019 Noizu Labs, Inc. All rights reserved.
#-------------------------------------------------------------------------------
defmodule Noizu.Cms.V2.CmsRepo do
end | 32.444444 | 81 | 0.311644 |
1c252c197550379e6399078151132f67a3c58fb3 | 5,593 | exs | Elixir | test/terrasol_workspace_test.exs | mwmiller/terrasol | ea503c008c5b6a80c2fac24acc206ed1d17096a8 | [
"MIT"
] | 4 | 2021-06-27T17:27:33.000Z | 2021-07-25T03:42:07.000Z | test/terrasol_workspace_test.exs | mwmiller/terrasol | ea503c008c5b6a80c2fac24acc206ed1d17096a8 | [
"MIT"
] | null | null | null | test/terrasol_workspace_test.exs | mwmiller/terrasol | ea503c008c5b6a80c2fac24acc206ed1d17096a8 | [
"MIT"
] | null | null | null | defmodule TerrasolWorkspaceTest do
use ExUnit.Case
doctest Terrasol.Workspace
test "valid from spec" do
assert Terrasol.Workspace.parse("+a.b") == %Terrasol.Workspace{
name: "a",
string: "+a.b",
suffix: "b"
}
assert Terrasol.Workspace.parse("+gardening.friends") == %Terrasol.Workspace{
name: "gardening",
string: "+gardening.friends",
suffix: "friends"
}
assert Terrasol.Workspace.parse("+gardening.j230d9qjd0q09of4j") ==
%Terrasol.Workspace{
name: "gardening",
string: "+gardening.j230d9qjd0q09of4j",
suffix: "j230d9qjd0q09of4j"
}
assert Terrasol.Workspace.parse(
"+gardening.bnkksi5na3j7ifl5lmvxiyishmoybmu3khlvboxx6ighjv6crya5a"
) == %Terrasol.Workspace{
name: "gardening",
string: "+gardening.bnkksi5na3j7ifl5lmvxiyishmoybmu3khlvboxx6ighjv6crya5a",
suffix: "bnkksi5na3j7ifl5lmvxiyishmoybmu3khlvboxx6ighjv6crya5a"
}
assert Terrasol.Workspace.parse("+bestbooks2019.o049fjafo09jaf") ==
%Terrasol.Workspace{
name: "bestbooks2019",
string: "+bestbooks2019.o049fjafo09jaf",
suffix: "o049fjafo09jaf"
}
end
test "invalid from spec" do
assert Terrasol.Workspace.parse("+a.b.c") == :error
assert Terrasol.Workspace.parse("+80smusic.x") == :error
assert Terrasol.Workspace.parse("+a.4ever") == :error
assert Terrasol.Workspace.parse("PARTY.TIME") == :error
end
test "valid from reference test suite" do
assert Terrasol.Workspace.parse(
"+gardening.bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
) == %Terrasol.Workspace{
name: "gardening",
string: "+gardening.bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
suffix: "bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}
assert Terrasol.Workspace.parse("+gardening.bxxxx") ==
%Terrasol.Workspace{
name: "gardening",
string: "+gardening.bxxxx",
suffix: "bxxxx"
}
assert Terrasol.Workspace.parse("+a.x") ==
%Terrasol.Workspace{
name: "a",
string: "+a.x",
suffix: "x"
}
assert Terrasol.Workspace.parse("+aaaaabbbbbccccc.bxxx") == %Terrasol.Workspace{
name: "aaaaabbbbbccccc",
string: "+aaaaabbbbbccccc.bxxx",
suffix: "bxxx"
}
assert Terrasol.Workspace.parse("+gardening.r0cks") ==
%Terrasol.Workspace{
name: "gardening",
string: "+gardening.r0cks",
suffix: "r0cks"
}
assert Terrasol.Workspace.parse(
"+garden2000.bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
) ==
%Terrasol.Workspace{
name: "garden2000",
string: "+garden2000.bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
suffix: "bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}
end
test "invalid from reference test suite" do
assert :error = Terrasol.Workspace.parse("")
assert :error = Terrasol.Workspace.parse("+")
assert :error =
Terrasol.Workspace.parse(
"gardening.bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
)
assert :error =
Terrasol.Workspace.parse(
"+gardening.bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
)
assert :error =
Terrasol.Workspace.parse(
"+gardeningbxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
)
assert :error =
Terrasol.Workspace.parse("+.bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx")
assert :error = Terrasol.Workspace.parse("+gardening")
assert :error = Terrasol.Workspace.parse("+gardening.")
assert :error = Terrasol.Workspace.parse("gardening")
assert :error =
Terrasol.Workspace.parse(
" +gardening.bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
)
assert :error =
Terrasol.Workspace.parse(
"+gardening.bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx "
)
assert :error =
Terrasol.Workspace.parse(
"+gardening.bxxxxxxxxxxxxxxxx xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
)
assert :error =
Terrasol.Workspace.parse(
"+GARDENING.bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
)
assert :error =
Terrasol.Workspace.parse(
"+gardening.bXXXXXxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
)
assert :error =
Terrasol.Workspace.parse(
"+1garden.bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
)
assert :error =
Terrasol.Workspace.parse(
"+gar?dening.bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
)
assert :error =
Terrasol.Workspace.parse(
"++gardening.bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
)
assert :error =
Terrasol.Workspace.parse(
"+garden🤡.bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
)
end
end
| 33.692771 | 96 | 0.600215 |
1c253e14deb35222ab459c9ed1b4cd1a34a1d761 | 823 | ex | Elixir | lib/2020/day25.ex | hallski/adventofcode | 03efb385688e8072b0b44d35012297833498f799 | [
"MIT"
] | null | null | null | lib/2020/day25.ex | hallski/adventofcode | 03efb385688e8072b0b44d35012297833498f799 | [
"MIT"
] | null | null | null | lib/2020/day25.ex | hallski/adventofcode | 03efb385688e8072b0b44d35012297833498f799 | [
"MIT"
] | null | null | null | defmodule AdventOfCode.Y2020.Day25 do
@subject_number 7
@magic_number 20_201_227
def run({card_key, door_key}) do
card_key
|> loop_size(@subject_number)
|> encryption_key(door_key)
end
def loop_size(key, subject), do: loop_size(1, key, subject, 0)
def loop_size(value, key, _subject, loop) when key == value, do: loop
def loop_size(value, key, subject, loop) do
value
|> transform(subject)
|> loop_size(key, subject, loop + 1)
end
def encryption_key(loops, subject), do: encryption_key(1, subject, loops)
def encryption_key(value, _subject, 0), do: value
def encryption_key(value, subject, loops) do
value
|> transform(subject)
|> encryption_key(subject, loops - 1)
end
def transform(value, subject) do
rem(value * subject, @magic_number)
end
end
| 23.514286 | 75 | 0.690158 |
1c258ea1be2346933c8e8769ba7f3a11c1142eda | 948 | ex | Elixir | lib/contentful/collection.ex | lustrousgorilla/contentful.ex | 38d5014de7ad8c8631a4c81a427a1e386deb7f6e | [
"MIT"
] | null | null | null | lib/contentful/collection.ex | lustrousgorilla/contentful.ex | 38d5014de7ad8c8631a4c81a427a1e386deb7f6e | [
"MIT"
] | null | null | null | lib/contentful/collection.ex | lustrousgorilla/contentful.ex | 38d5014de7ad8c8631a4c81a427a1e386deb7f6e | [
"MIT"
] | null | null | null | defmodule Contentful.Collection do
alias Contentful.Space
@moduledoc """
Describes a Contentful collection, which is usually an API response of sys.type "Array".
"""
@callback fetch_all(
[limit: pos_integer(), skip: non_neg_integer()],
Space.t() | String.t(),
String.t() | nil,
String.t() | nil
) ::
{:ok, list(struct())}
| {:error, atom(), original_message: String.t()}
| {:error, :rate_limit_exceeded, wait_for: integer()}
| {:error, :unknown}
@callback fetch_one(
String.t(),
Space.t() | String.t(),
String.t() | nil,
String.t() | nil
) ::
{:ok, struct()}
| {:error, atom(), original_message: String.t()}
| {:error, :rate_limit_exceeded, wait_for: integer()}
| {:error, :unknown}
end
| 33.857143 | 90 | 0.487342 |
1c25b4872954ddf9053eff4a2c2cb01590e2387a | 1,483 | ex | Elixir | skippy/dashboard/lib/dashboard_web/views/error_helpers.ex | mashbytes/baby_zoo | 4554890242a2493d17d9b1c1f4cc90d7ad1e637e | [
"MIT"
] | null | null | null | skippy/dashboard/lib/dashboard_web/views/error_helpers.ex | mashbytes/baby_zoo | 4554890242a2493d17d9b1c1f4cc90d7ad1e637e | [
"MIT"
] | 5 | 2020-07-17T23:35:42.000Z | 2021-05-10T07:00:10.000Z | skippy/dashboard/lib/dashboard_web/views/error_helpers.ex | mashbytes/baby_zoo | 4554890242a2493d17d9b1c1f4cc90d7ad1e637e | [
"MIT"
] | null | null | null | defmodule DashboardWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error), class: "help-block")
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(DashboardWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(DashboardWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.955556 | 78 | 0.671612 |
1c25b772b45e83c5c30fb31f6ee9f123f5096fd3 | 1,194 | ex | Elixir | lib/loaded_bike/models/tour.ex | GBH/pedal | a2d68c3561f186ee3017a21b4170127b1625e18d | [
"MIT"
] | 48 | 2017-04-25T16:02:08.000Z | 2021-01-23T01:57:29.000Z | lib/loaded_bike/models/tour.ex | GBH/pedal | a2d68c3561f186ee3017a21b4170127b1625e18d | [
"MIT"
] | 5 | 2018-03-09T20:17:55.000Z | 2018-07-23T16:29:21.000Z | lib/loaded_bike/models/tour.ex | GBH/pedal | a2d68c3561f186ee3017a21b4170127b1625e18d | [
"MIT"
] | 4 | 2017-05-21T14:38:38.000Z | 2017-12-29T11:09:54.000Z | defmodule LoadedBike.Tour do
use LoadedBike.Web, :model
alias LoadedBike.User
schema "tours" do
field :title, :string
field :short_description, :string
field :description, :string
field :status, TourStateEnum
field :is_published, :boolean
belongs_to :user, LoadedBike.User
has_many :waypoints, LoadedBike.Waypoint
timestamps()
end
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:title, :short_description, :description, :status, :is_published])
|> assoc_constraint(:user)
|> changeset_for_is_published
|> validate_required([:user_id, :title, :status])
end
def published(query) do
where(query, [t], t.is_published == true)
end
def with_status(query, status) do
where(query, [t], t.status == ^status)
end
defp changeset_for_is_published(changeset) do
is_published = get_change(changeset, :is_published)
user = Repo.get(User, changeset.data.user_id || 0)
if !!is_published && user && user.verification_token != nil do
changeset |> add_error(:is_published, "Unverified accounts cannot publish")
else
changeset
end
end
end
| 25.956522 | 87 | 0.667504 |
1c25ba8eb9884ddd192b7186ff0a2468b5dd646c | 531 | ex | Elixir | lib/onde_server/horarios/horario.ex | ufra-lca/bage_server | 3fd7592707624b7221035225b1a5113de856a7ff | [
"MIT"
] | null | null | null | lib/onde_server/horarios/horario.ex | ufra-lca/bage_server | 3fd7592707624b7221035225b1a5113de856a7ff | [
"MIT"
] | 3 | 2021-03-09T18:21:20.000Z | 2021-09-01T22:58:23.000Z | lib/onde_server/horarios/horario.ex | ufra-lca/bage_server | 3fd7592707624b7221035225b1a5113de856a7ff | [
"MIT"
] | null | null | null | defmodule OndeServer.Horarios.Horario do
use Ecto.Schema
import Ecto.Changeset
schema "horarios" do
field :hora_fim, :string
field :hora_inicio, :string
field :n_voltas, :integer
field :zootec, :boolean, default: false
field :itinerario, :string
timestamps()
end
@doc false
def changeset(horario, attrs) do
horario
|> cast(attrs, [:hora_inicio, :hora_fim, :n_voltas, :zootec, :itinerario])
|> validate_required([:hora_inicio, :hora_fim, :n_voltas, :zootec, :itinerario])
end
end
| 25.285714 | 84 | 0.693032 |
1c25d847f225d3771cf6b714aebf48cc9b70f375 | 1,546 | ex | Elixir | lib/magic_number.ex | miteshnath/elixir-magic-number | 4c11fc229b1f9c9fa12aa28611b86e6572a51ac1 | [
"MIT"
] | null | null | null | lib/magic_number.ex | miteshnath/elixir-magic-number | 4c11fc229b1f9c9fa12aa28611b86e6572a51ac1 | [
"MIT"
] | null | null | null | lib/magic_number.ex | miteshnath/elixir-magic-number | 4c11fc229b1f9c9fa12aa28611b86e6572a51ac1 | [
"MIT"
] | null | null | null | defmodule MagicNumber do
@moduledoc """
The module to determine a file's type from its
[magic number](https://en.wikipedia.org/wiki/File_format#Magic_number).
"""
@typedoc """
A media type is a two-part identifier for file format. For example:
```elixir
{:application, :zip} # application/zip
{:image, :png} # image/png
```
See [IANA list of official media types](https://www.iana.org/assignments/media-types/media-types.xhtml).
"""
@type media_type :: {atom, atom}
# Rules :: [{media_type, [binary]}]
@rules [
# image
{{:image, :gif}, ["GIF87a", "GIF89a"]},
{{:image, :jpeg}, [<<0xFF, 0xD8, 0xFF>>]},
{{:image, :png}, [<<0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A>>]},
{{:image, :tiff}, ["II*\0", "MM\0*"]},
# application
{{:application, :pdf}, ["%PDF"]},
{{:application, :zip},
[
<<0x50, 0x4B, 0x03, 0x04>>,
<<0x50, 0x4B, 0x05, 0x06>>,
<<0x50, 0x4B, 0x07, 0x08>>
]},
{{:application, :gzip}, [<<0x1F, 0x8B>>]},
{{:application, :exe}, [<<0x4D, 0x5A>>]},
{{:application, :dmg}, [<<0x78, 0x01>>]}
]
@doc """
Determine media type from its contents.
## Examples
iex> MagicNumber.detect("GIF89a...")
{:ok, {:image, :gif}}
iex> MagicNumber.detect(<<>>)
:error
"""
@spec detect(binary) :: {:ok, media_type} | :error
for {media_type, headers} <- @rules, magic <- headers do
def detect(unquote(magic) <> _), do: {:ok, unquote(media_type)}
end
# error
def detect(_), do: :error
end
| 25.766667 | 106 | 0.563389 |
1c261eeda9e7737ca1580d4d6eb861f8853c6e24 | 11,855 | ex | Elixir | lib/ecto/query/inspect.ex | iautom8things/ecto | 9e89e9b7f8c7d11373c2372f2d3365dd8b4aaccf | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/inspect.ex | iautom8things/ecto | 9e89e9b7f8c7d11373c2372f2d3365dd8b4aaccf | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/inspect.ex | iautom8things/ecto | 9e89e9b7f8c7d11373c2372f2d3365dd8b4aaccf | [
"Apache-2.0"
] | null | null | null | import Inspect.Algebra
import Kernel, except: [to_string: 1]
alias Ecto.Query.{DynamicExpr, JoinExpr, QueryExpr, WithExpr}
defimpl Inspect, for: Ecto.Query.DynamicExpr do
def inspect(%DynamicExpr{binding: binding} = dynamic, opts) do
joins =
binding
|> Enum.drop(1)
|> Enum.with_index()
|> Enum.map(&%JoinExpr{ix: &1})
aliases =
for({as, _} when is_atom(as) <- binding, do: as)
|> Enum.with_index()
|> Map.new
query = %Ecto.Query{joins: joins, aliases: aliases}
{expr, binding, params, subqueries, _, _} =
Ecto.Query.Builder.Dynamic.fully_expand(query, dynamic)
names =
Enum.map(binding, fn
{_, {name, _, _}} -> name
{name, _, _} -> name
end)
query_expr = %{expr: expr, params: params, subqueries: subqueries}
inspected = Inspect.Ecto.Query.expr(expr, List.to_tuple(names), query_expr)
container_doc("dynamic(", [Macro.to_string(binding), inspected], ")", opts, fn str, _ ->
str
end)
end
end
defimpl Inspect, for: Ecto.Query do
@doc false
def inspect(query, opts) do
list =
Enum.map(to_list(query), fn
{key, string} ->
concat(Atom.to_string(key) <> ": ", string)
string ->
string
end)
result = container_doc("#Ecto.Query<", list, ">", opts, fn str, _ -> str end)
case query.with_ctes do
%WithExpr{recursive: recursive, queries: [_ | _] = queries} ->
with_ctes =
Enum.map(queries, fn {name, query} ->
cte = case query do
%Ecto.Query{} -> __MODULE__.inspect(query, opts)
%Ecto.Query.QueryExpr{} -> expr(query, {})
end
concat(["|> with_cte(\"" <> name <> "\", as: ", cte, ")"])
end)
result = if recursive, do: glue(result, "\n", "|> recursive_ctes(true)"), else: result
[result | with_ctes] |> Enum.intersperse(break("\n")) |> concat()
_ ->
result
end
end
@doc false
def to_string(query) do
Enum.map_join(to_list(query), ",\n ", fn
{key, string} ->
Atom.to_string(key) <> ": " <> string
string ->
string
end)
end
defp to_list(query) do
names =
query
|> collect_sources()
|> generate_letters()
|> generate_names()
|> List.to_tuple()
from = bound_from(query.from, elem(names, 0))
joins = joins(query.joins, names)
preloads = preloads(query.preloads)
assocs = assocs(query.assocs, names)
windows = windows(query.windows, names)
combinations = combinations(query.combinations)
wheres = bool_exprs(%{and: :where, or: :or_where}, query.wheres, names)
group_bys = kw_exprs(:group_by, query.group_bys, names)
havings = bool_exprs(%{and: :having, or: :or_having}, query.havings, names)
order_bys = kw_exprs(:order_by, query.order_bys, names)
updates = kw_exprs(:update, query.updates, names)
lock = kw_inspect(:lock, query.lock)
limit = kw_expr(:limit, query.limit, names)
offset = kw_expr(:offset, query.offset, names)
select = kw_expr(:select, query.select, names)
distinct = kw_expr(:distinct, query.distinct, names)
Enum.concat([
from,
joins,
wheres,
group_bys,
havings,
windows,
combinations,
order_bys,
limit,
offset,
lock,
distinct,
updates,
select,
preloads,
assocs
])
end
defp bound_from(nil, name), do: ["from #{name} in query"]
defp bound_from(%{source: source} = from, name) do
["from #{name} in #{inspect_source(source)}"] ++ kw_as_and_prefix(from)
end
defp inspect_source(%Ecto.Query{} = query), do: "^" <> inspect(query)
defp inspect_source(%Ecto.SubQuery{query: query}), do: "subquery(#{to_string(query)})"
defp inspect_source({source, nil}), do: inspect(source)
defp inspect_source({nil, schema}), do: inspect(schema)
defp inspect_source({source, schema} = from) do
inspect(if source == schema.__schema__(:source), do: schema, else: from)
end
defp joins(joins, names) do
joins
|> Enum.with_index()
|> Enum.flat_map(fn {expr, ix} -> join(expr, elem(names, expr.ix || ix + 1), names) end)
end
defp join(%JoinExpr{qual: qual, assoc: {ix, right}, on: on} = join, name, names) do
string = "#{name} in assoc(#{elem(names, ix)}, #{inspect(right)})"
[{join_qual(qual), string}] ++ kw_as_and_prefix(join) ++ maybe_on(on, names)
end
defp join(
%JoinExpr{qual: qual, source: {:fragment, _, _} = source, on: on} = join = part,
name,
names
) do
string = "#{name} in #{expr(source, names, part)}"
[{join_qual(qual), string}] ++ kw_as_and_prefix(join) ++ [on: expr(on, names)]
end
defp join(%JoinExpr{qual: qual, source: source, on: on} = join, name, names) do
string = "#{name} in #{inspect_source(source)}"
[{join_qual(qual), string}] ++ kw_as_and_prefix(join) ++ [on: expr(on, names)]
end
defp maybe_on(%QueryExpr{expr: true}, _names), do: []
defp maybe_on(%QueryExpr{} = on, names), do: [on: expr(on, names)]
defp preloads([]), do: []
defp preloads(preloads), do: [preload: inspect(preloads)]
defp assocs([], _names), do: []
defp assocs(assocs, names), do: [preload: expr(assocs(assocs), names, %{})]
defp assocs(assocs) do
Enum.map(assocs, fn
{field, {idx, []}} ->
{field, {:&, [], [idx]}}
{field, {idx, children}} ->
{field, {{:&, [], [idx]}, assocs(children)}}
end)
end
defp windows(windows, names) do
Enum.map(windows, &window(&1, names))
end
defp window({name, %{expr: definition} = part}, names) do
{:windows, "[#{name}: " <> expr(definition, names, part) <> "]"}
end
defp combinations(combinations) do
Enum.map(combinations, fn {key, val} -> {key, "(" <> to_string(val) <> ")"} end)
end
defp bool_exprs(keys, exprs, names) do
Enum.map(exprs, fn %{expr: expr, op: op} = part ->
{Map.fetch!(keys, op), expr(expr, names, part)}
end)
end
defp kw_exprs(key, exprs, names) do
Enum.map(exprs, &{key, expr(&1, names)})
end
defp kw_expr(_key, nil, _names), do: []
defp kw_expr(key, expr, names), do: [{key, expr(expr, names)}]
defp kw_inspect(_key, nil), do: []
defp kw_inspect(key, val), do: [{key, inspect(val)}]
defp kw_as_and_prefix(%{as: as, prefix: prefix}) do
kw_inspect(:as, as) ++ kw_inspect(:prefix, prefix)
end
defp expr(%{expr: expr} = part, names) do
expr(expr, names, part)
end
@doc false
def expr(expr, names, part) do
expr
|> Macro.traverse(:ok, &{prewalk(&1), &2}, &{postwalk(&1, names, part), &2})
|> elem(0)
|> macro_to_string()
end
if Version.match?(System.version(), ">= 1.11.0") do
defp macro_to_string(expr), do: Macro.to_string(expr)
else
defp macro_to_string(expr) do
Macro.to_string(expr, fn
{{:., _, [_, _]}, _, []}, string -> String.replace_suffix(string, "()", "")
_other, string -> string
end)
end
end
# Tagged values
defp prewalk(%Ecto.Query.Tagged{value: value, tag: nil}) do
value
end
defp prewalk(%Ecto.Query.Tagged{value: value, tag: {:parameterized, type, opts}}) do
{:type, [], [value, {:{}, [], [:parameterized, type, opts]}]}
end
defp prewalk(%Ecto.Query.Tagged{value: value, tag: tag}) do
{:type, [], [value, tag]}
end
defp prewalk({:type, _, [value, {:parameterized, type, opts}]}) do
{:type, [], [value, {:{}, [], [:parameterized, type, opts]}]}
end
defp prewalk(node) do
node
end
# Convert variables to proper names
defp postwalk({:&, _, [ix]}, names, part) do
binding_to_expr(ix, names, part)
end
# Remove parens from field calls
defp postwalk({{:., _, [_, _]} = dot, meta, []}, _names, _part) do
{dot, [no_parens: true] ++ meta, []}
end
# Interpolated unknown value
defp postwalk({:^, _, [_ix, _len]}, _names, _part) do
{:^, [], [{:..., [], nil}]}
end
# Interpolated known value
defp postwalk({:^, _, [ix]}, _, %{params: params}) do
value =
case Enum.at(params || [], ix) do
# Wrap the head in a block so it is not treated as a charlist
{[head | tail], _type} -> [{:__block__, [], [head]} | tail]
{value, _type} -> value
_ -> {:..., [], nil}
end
{:^, [], [value]}
end
# Types need to be converted back to AST for fields
defp postwalk({:type, meta, [expr, type]}, names, part) do
{:type, meta, [expr, type_to_expr(type, names, part)]}
end
# For keyword and interpolated fragments use normal escaping
defp postwalk({:fragment, _, [{_, _} | _] = parts}, _names, _part) do
{:fragment, [], unmerge_fragments(parts, "", [])}
end
# Subqueries
defp postwalk({:subquery, i}, _names, %{subqueries: subqueries}) do
{:subquery, [], [Enum.fetch!(subqueries, i).query]}
end
# Jason
defp postwalk({:json_extract_path, _, [expr, path]}, _names, _part) do
Enum.reduce(path, expr, fn element, acc ->
{{:., [], [Access, :get]}, [], [acc, element]}
end)
end
defp postwalk(node, _names, _part) do
node
end
defp binding_to_expr(ix, names, part) do
case part do
%{take: %{^ix => {:any, fields}}} when ix == 0 ->
fields
%{take: %{^ix => {tag, fields}}} ->
{tag, [], [binding(names, ix), fields]}
_ ->
binding(names, ix)
end
end
defp type_to_expr({:parameterized, type, opts}, _names, _part) do
{:{}, [], [:parameterized, type, opts]}
end
defp type_to_expr({ix, type}, names, part) when is_integer(ix) do
{{:., [], [binding_to_expr(ix, names, part), type]}, [no_parens: true], []}
end
defp type_to_expr({composite, type}, names, part) when is_atom(composite) do
{composite, type_to_expr(type, names, part)}
end
defp type_to_expr(type, _names, _part) do
type
end
defp unmerge_fragments([{:raw, s}, {:expr, v} | t], frag, args) do
unmerge_fragments(t, frag <> s <> "?", [v | args])
end
defp unmerge_fragments([{:raw, s}], frag, args) do
[frag <> s | Enum.reverse(args)]
end
defp join_qual(:inner), do: :join
defp join_qual(:inner_lateral), do: :join_lateral
defp join_qual(:left), do: :left_join
defp join_qual(:left_lateral), do: :left_join_lateral
defp join_qual(:right), do: :right_join
defp join_qual(:full), do: :full_join
defp join_qual(:cross), do: :cross_join
defp collect_sources(%{from: nil, joins: joins}) do
["query" | join_sources(joins)]
end
defp collect_sources(%{from: %{source: source}, joins: joins}) do
[from_sources(source) | join_sources(joins)]
end
defp from_sources(%Ecto.SubQuery{query: query}), do: from_sources(query.from.source)
defp from_sources({source, schema}), do: schema || source
defp from_sources(nil), do: "query"
defp join_sources(joins) do
joins
|> Enum.sort_by(& &1.ix)
|> Enum.map(fn
%JoinExpr{assoc: {_var, assoc}} ->
assoc
%JoinExpr{source: {:fragment, _, _}} ->
"fragment"
%JoinExpr{source: %Ecto.Query{from: from}} ->
from_sources(from.source)
%JoinExpr{source: source} ->
from_sources(source)
end)
end
defp generate_letters(sources) do
Enum.map(sources, fn source ->
source
|> Kernel.to_string()
|> normalize_source()
|> String.first()
|> String.downcase()
end)
end
defp generate_names(letters) do
{names, _} = Enum.map_reduce(letters, 0, &{:"#{&1}#{&2}", &2 + 1})
names
end
defp binding(names, pos) do
try do
{elem(names, pos), [], nil}
rescue
ArgumentError -> {:"unknown_binding_#{pos}!", [], nil}
end
end
defp normalize_source("Elixir." <> _ = source),
do: source |> Module.split() |> List.last()
defp normalize_source(source),
do: source
end
| 27.959906 | 94 | 0.595108 |
1c2633debfc9f7da331dfb8bc032a71c3377a035 | 532 | ex | Elixir | lib/penelope/ml/text/token_filter.ex | pylon/penelope | 5b0310dc0647a8e20ab1b4c10d3820f11cfb2601 | [
"Apache-2.0"
] | 53 | 2017-10-13T06:39:49.000Z | 2022-03-28T19:43:42.000Z | lib/penelope/ml/text/token_filter.ex | pylon/penelope | 5b0310dc0647a8e20ab1b4c10d3820f11cfb2601 | [
"Apache-2.0"
] | 12 | 2018-01-08T23:05:37.000Z | 2019-08-02T12:59:27.000Z | lib/penelope/ml/text/token_filter.ex | pylon/penelope | 5b0310dc0647a8e20ab1b4c10d3820f11cfb2601 | [
"Apache-2.0"
] | 4 | 2018-06-13T19:45:57.000Z | 2019-10-17T13:37:06.000Z | defmodule Penelope.ML.Text.TokenFilter do
@moduledoc """
The the token filter removes tokens from a token list in the pipeline.
"""
@doc """
removes stop word tokens from the document stream
"""
@spec transform(
model :: %{tokens: [String.t()]},
context :: map,
x :: [[String.t()]]
) :: [[String.t()]]
def transform(model, _context, x) do
Enum.map(x, &filter_tokens(&1, model.tokens))
end
defp filter_tokens(x, tokens) do
Enum.reject(x, &(&1 in tokens))
end
end
| 24.181818 | 72 | 0.599624 |
1c263fedd447d64b184b0f525b5d8aa8327ff56c | 2,835 | ex | Elixir | clients/dataflow/lib/google_api/dataflow/v1b3/model/computation_topology.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dataflow/lib/google_api/dataflow/v1b3/model/computation_topology.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/computation_topology.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Dataflow.V1b3.Model.ComputationTopology do
@moduledoc """
All configuration data for a particular Computation.
## Attributes
* `computationId` (*type:* `String.t`, *default:* `nil`) - The ID of the computation.
* `inputs` (*type:* `list(GoogleApi.Dataflow.V1b3.Model.StreamLocation.t)`, *default:* `nil`) - The inputs to the computation.
* `keyRanges` (*type:* `list(GoogleApi.Dataflow.V1b3.Model.KeyRangeLocation.t)`, *default:* `nil`) - The key ranges processed by the computation.
* `outputs` (*type:* `list(GoogleApi.Dataflow.V1b3.Model.StreamLocation.t)`, *default:* `nil`) - The outputs from the computation.
* `stateFamilies` (*type:* `list(GoogleApi.Dataflow.V1b3.Model.StateFamilyConfig.t)`, *default:* `nil`) - The state family values.
* `systemStageName` (*type:* `String.t`, *default:* `nil`) - The system stage name.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:computationId => String.t(),
:inputs => list(GoogleApi.Dataflow.V1b3.Model.StreamLocation.t()),
:keyRanges => list(GoogleApi.Dataflow.V1b3.Model.KeyRangeLocation.t()),
:outputs => list(GoogleApi.Dataflow.V1b3.Model.StreamLocation.t()),
:stateFamilies => list(GoogleApi.Dataflow.V1b3.Model.StateFamilyConfig.t()),
:systemStageName => String.t()
}
field(:computationId)
field(:inputs, as: GoogleApi.Dataflow.V1b3.Model.StreamLocation, type: :list)
field(:keyRanges, as: GoogleApi.Dataflow.V1b3.Model.KeyRangeLocation, type: :list)
field(:outputs, as: GoogleApi.Dataflow.V1b3.Model.StreamLocation, type: :list)
field(:stateFamilies, as: GoogleApi.Dataflow.V1b3.Model.StateFamilyConfig, type: :list)
field(:systemStageName)
end
defimpl Poison.Decoder, for: GoogleApi.Dataflow.V1b3.Model.ComputationTopology do
def decode(value, options) do
GoogleApi.Dataflow.V1b3.Model.ComputationTopology.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataflow.V1b3.Model.ComputationTopology do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45.725806 | 149 | 0.724868 |
1c264f26b71bc025efc561d400b45f57b6613c6b | 607 | ex | Elixir | frameworks/phoenix_benchmark/web/router.ex | serv/framework-benchmarks | a2a15d112fb56eb7aaa676874853a26c55243817 | [
"MIT"
] | null | null | null | frameworks/phoenix_benchmark/web/router.ex | serv/framework-benchmarks | a2a15d112fb56eb7aaa676874853a26c55243817 | [
"MIT"
] | null | null | null | frameworks/phoenix_benchmark/web/router.ex | serv/framework-benchmarks | a2a15d112fb56eb7aaa676874853a26c55243817 | [
"MIT"
] | null | null | null | defmodule PhoenixBenchmark.Router do
use PhoenixBenchmark.Web, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", PhoenixBenchmark do
pipe_through :browser # Use the default browser stack
get "/", PageController, :index
# For benchmark
get "/hit", PageController, :hit
end
# Other scopes may use custom stacks.
# scope "/api", PhoenixBenchmark do
# pipe_through :api
# end
end
| 20.233333 | 57 | 0.685338 |
1c265b4b9bca522f0440f17e11ee592223f07983 | 698 | exs | Elixir | test/hauer/fs_test.exs | nanaki82/hauer | 7d717a913933c7f71391d0eadb724c41bdd3ed77 | [
"MIT"
] | null | null | null | test/hauer/fs_test.exs | nanaki82/hauer | 7d717a913933c7f71391d0eadb724c41bdd3ed77 | [
"MIT"
] | null | null | null | test/hauer/fs_test.exs | nanaki82/hauer | 7d717a913933c7f71391d0eadb724c41bdd3ed77 | [
"MIT"
] | null | null | null | defmodule Hauer.FS.FSTest do
use ExUnit.Case
@resource_name "vQJ1pEUiAMr5NklN77C4" # to avoid collision with existent resources
test "create and delete a resource" do
{:ok, file_path} = Hauer.FS.add_resource(@resource_name)
assert String.contains?(file_path, "#{@resource_name}.ex")
assert Hauer.FS.remove_resource(@resource_name) == :ok
end
test "return an error when the resource already exists" do
{:ok, file_path} = Hauer.FS.add_resource(@resource_name)
{:error, message} = Hauer.FS.add_resource(@resource_name)
assert String.contains?(message, "Resource file already exists")
on_exit fn ->
File.rm_rf Path.dirname(file_path)
end
end
end
| 29.083333 | 84 | 0.72063 |
1c266d8122e5d898ec94c6134a7a1a5dd4b10fee | 603 | ex | Elixir | lib/mix/distill/category.ex | allen-garvey/artour | fce27b234d11a3e434c897b5fa3178b7c126245f | [
"MIT"
] | 4 | 2019-10-04T16:11:15.000Z | 2021-08-18T21:00:13.000Z | apps/artour/lib/mix/distill/category.ex | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | 5 | 2020-03-16T23:52:25.000Z | 2021-09-03T16:52:17.000Z | apps/artour/lib/mix/distill/category.ex | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | null | null | null | defmodule Distill.Category do
alias Distill.PageRoute
@doc """
Returns list of tuples in format: path (string), controller module(atom), controller handler function (atom), params (map)
e.g. {"/posts", Artour.PostController, :index, %{}}
"""
def routes() do
Artour.Public.categories_with_posts()
|> Enum.map(&to_route/1)
end
@doc """
Takes a category struct and returns route tuple
"""
def to_route(category) do
%PageRoute{path: "/categories/" <> category.slug, controller: Artour.PublicCategoryController, handler: :show, params: %{"slug" => category.slug}}
end
end | 33.5 | 150 | 0.693201 |
1c267fbaf3f5395c01c200ec60613bf68d69c556 | 3,290 | ex | Elixir | test/process_managers/support/example_aggregate.ex | mquickform/commanded | 260b1ec28c2fb3c1fcbb61b8c4abacabd7dc7ed2 | [
"MIT"
] | null | null | null | test/process_managers/support/example_aggregate.ex | mquickform/commanded | 260b1ec28c2fb3c1fcbb61b8c4abacabd7dc7ed2 | [
"MIT"
] | 1 | 2018-12-05T18:17:08.000Z | 2018-12-05T18:17:08.000Z | test/process_managers/support/example_aggregate.ex | mquickform/commanded | 260b1ec28c2fb3c1fcbb61b8c4abacabd7dc7ed2 | [
"MIT"
] | 1 | 2018-12-05T18:15:03.000Z | 2018-12-05T18:15:03.000Z | defmodule Commanded.ProcessManagers.ExampleAggregate do
@moduledoc false
alias Commanded.ProcessManagers.ExampleAggregate
defstruct uuid: nil,
state: nil,
items: []
defmodule Commands do
defmodule(Start, do: defstruct([:aggregate_uuid]))
defmodule(Publish, do: defstruct([:aggregate_uuid, :interesting, :uninteresting]))
defmodule(Pause, do: defstruct([:aggregate_uuid]))
defmodule(Stop, do: defstruct([:aggregate_uuid]))
defmodule(Error, do: defstruct([:aggregate_uuid]))
defmodule(Raise, do: defstruct([:aggregate_uuid]))
end
defmodule Events do
defmodule(Started, do: defstruct([:aggregate_uuid]))
defmodule(Interested, do: defstruct([:aggregate_uuid, :index]))
defmodule(Uninterested, do: defstruct([:aggregate_uuid, :index]))
defmodule(Stopped, do: defstruct([:aggregate_uuid]))
defmodule(Paused, do: defstruct([:aggregate_uuid]))
defmodule(Errored, do: defstruct([:aggregate_uuid]))
defmodule(Raised, do: defstruct([:aggregate_uuid]))
end
def start(%ExampleAggregate{}, aggregate_uuid) do
%Events.Started{aggregate_uuid: aggregate_uuid}
end
def publish(%ExampleAggregate{uuid: aggregate_uuid}, interesting, uninteresting) do
Enum.concat(
publish_interesting(aggregate_uuid, interesting, 1),
publish_uninteresting(aggregate_uuid, uninteresting, 1)
)
end
def pause(%ExampleAggregate{uuid: aggregate_uuid}) do
%Events.Paused{aggregate_uuid: aggregate_uuid}
end
def stop(%ExampleAggregate{uuid: aggregate_uuid}) do
%Events.Stopped{aggregate_uuid: aggregate_uuid}
end
def error(%ExampleAggregate{uuid: aggregate_uuid}) do
%Events.Errored{aggregate_uuid: aggregate_uuid}
end
def raise(%ExampleAggregate{uuid: aggregate_uuid}) do
%Events.Raised{aggregate_uuid: aggregate_uuid}
end
defp publish_interesting(_aggregate_uuid, 0, _index), do: []
defp publish_interesting(aggregate_uuid, interesting, index) do
[
%Events.Interested{aggregate_uuid: aggregate_uuid, index: index}
] ++ publish_interesting(aggregate_uuid, interesting - 1, index + 1)
end
defp publish_uninteresting(_aggregate_uuid, 0, _index), do: []
defp publish_uninteresting(aggregate_uuid, interesting, index) do
[
%Events.Uninterested{aggregate_uuid: aggregate_uuid, index: index}
] ++ publish_uninteresting(aggregate_uuid, interesting - 1, index + 1)
end
# State mutators
def apply(%ExampleAggregate{} = state, %Events.Started{aggregate_uuid: aggregate_uuid}),
do: %ExampleAggregate{state | uuid: aggregate_uuid, state: :started}
def apply(%ExampleAggregate{items: items} = state, %Events.Interested{index: index}),
do: %ExampleAggregate{state | items: items ++ [index]}
def apply(%ExampleAggregate{} = state, %Events.Paused{}),
do: %ExampleAggregate{state | state: :paused}
def apply(%ExampleAggregate{} = state, %Events.Errored{}),
do: %ExampleAggregate{state | state: :errored}
def apply(%ExampleAggregate{} = state, %Events.Raised{}),
do: %ExampleAggregate{state | state: :exception}
def apply(%ExampleAggregate{} = state, %Events.Uninterested{}), do: state
def apply(%ExampleAggregate{} = state, %Events.Stopped{}),
do: %ExampleAggregate{state | state: :stopped}
end
| 35.376344 | 90 | 0.72462 |
1c26965ea0a02c165815d8ae4af743ff91e21772 | 549 | ex | Elixir | lib/oembed/providers/twitter_provider.ex | byoungdale/elixir-oembed | 4ac75443242fea8094e9633f7656135fa42fab5f | [
"MIT"
] | null | null | null | lib/oembed/providers/twitter_provider.ex | byoungdale/elixir-oembed | 4ac75443242fea8094e9633f7656135fa42fab5f | [
"MIT"
] | null | null | null | lib/oembed/providers/twitter_provider.ex | byoungdale/elixir-oembed | 4ac75443242fea8094e9633f7656135fa42fab5f | [
"MIT"
] | null | null | null | defmodule OEmbed.TwitterProvider do
@moduledoc """
oEmbed provider for Twitter URLs.
"""
use OEmbed.Provider
@oembed_endpoint "https://publish.twitter.com/oembed?url="
@doc """
Check if this provider supports given URL.
"""
def provides?(url) do
Regex.match?(
~r/(^|[^'"])(https?:\/\/twitter\.com\/(?:#!\/)?(\w+)\/status(?:es)?\/(\d+))/i,
url
)
end
@doc """
Get oEmbed result for given URL.
"""
def get(url) do
get_oembed(@oembed_endpoint <> URI.encode(url, &URI.char_unreserved?/1))
end
end
| 21.115385 | 84 | 0.602914 |
1c269e6e69b2046493b4cada1fd196c88a13676e | 186 | exs | Elixir | priv/test_repo/migrations/20200708165750_create_annacl_tables.exs | annatel/annacl | 55a7eff89617fe9cf41f532cf3f62566bd4b0d9a | [
"MIT"
] | 7 | 2020-07-26T18:24:19.000Z | 2021-06-22T10:15:48.000Z | priv/test_repo/migrations/20200708165750_create_annacl_tables.exs | annatel/annacl | 55a7eff89617fe9cf41f532cf3f62566bd4b0d9a | [
"MIT"
] | 2 | 2020-08-15T00:58:53.000Z | 2020-10-31T16:54:11.000Z | priv/test_repo/migrations/20200708165750_create_annacl_tables.exs | annatel/annacl | 55a7eff89617fe9cf41f532cf3f62566bd4b0d9a | [
"MIT"
] | 2 | 2020-10-22T20:13:21.000Z | 2020-10-30T14:53:10.000Z | defmodule Annacl.TestRepo.Migrations.CreateAnnaclTables do
use Ecto.Migration
def up do
Annacl.Migrations.V1.up()
end
def down do
Annacl.Migrations.V1.down()
end
end
| 15.5 | 58 | 0.731183 |
1c26a7fc0f1d9efd2c8b81868730f875ad16f6f4 | 4,691 | exs | Elixir | apps/artemis/test/artemis/schemas/recognition_test.exs | artemis-platform/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 2 | 2020-04-23T02:29:18.000Z | 2020-07-07T13:13:17.000Z | apps/artemis/test/artemis/schemas/recognition_test.exs | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 4 | 2020-04-26T20:35:36.000Z | 2020-11-10T22:13:19.000Z | apps/artemis/test/artemis/schemas/recognition_test.exs | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | null | null | null | defmodule Artemis.RecognitionTest do
use Artemis.DataCase
use ExUnit.Case, async: true
import Ecto.Repo
import Artemis.Factories
alias Artemis.Recognition
alias Artemis.Repo
alias Artemis.User
alias Artemis.UserRecognition
@preload [:created_by, :user_recognitions, :users]
describe "attributes - constraints" do
test "requires created by association" do
params = params_for(:recognition, created_by: nil)
{:error, changeset} =
%Recognition{}
|> Recognition.changeset(params)
|> Repo.insert()
assert errors_on(changeset).created_by_id == ["can't be blank"]
end
test "requires at least one user recognition association" do
params = params_for(:recognition, user_recognitions: [])
{:error, changeset} =
%Recognition{}
|> Recognition.changeset(params)
|> Repo.insert()
assert errors_on(changeset).user_recognitions == ["can't be blank"]
end
test "requires creator to not be in user recognition association" do
created_by = insert(:user)
other_user = insert(:user)
# Including creator in recognition associations raises an error
user_recognitions = [
%{user_id: created_by.id},
%{user_id: other_user.id}
]
params =
:recognition
|> params_for(created_by_id: created_by.id)
|> Map.put(:user_recognitions, user_recognitions)
{:error, changeset} =
%Recognition{}
|> Recognition.changeset(params)
|> Repo.insert()
assert errors_on(changeset).user_recognitions == ["can't include creator"]
# Succeeds when not included
user_recognitions = [
%{user_id: other_user.id}
]
params =
:recognition
|> params_for(created_by_id: created_by.id)
|> Map.put(:user_recognitions, user_recognitions)
{:ok, _} =
%Recognition{}
|> Recognition.changeset(params)
|> Repo.insert()
end
end
describe "associations - created by" do
setup do
recognition = insert(:recognition)
{:ok, recognition: Repo.preload(recognition, @preload)}
end
test "deleting association does not remove record and nilifies foreign key", %{recognition: recognition} do
assert Repo.get(User, recognition.created_by.id) != nil
assert recognition.created_by != nil
Repo.delete!(recognition.created_by)
assert Repo.get(User, recognition.created_by.id) == nil
recognition =
Recognition
|> preload(^@preload)
|> Repo.get(recognition.id)
assert recognition.created_by == nil
end
test "deleting record does not remove association", %{recognition: recognition} do
assert Repo.get(User, recognition.created_by.id) != nil
Repo.delete!(recognition)
assert Repo.get(User, recognition.created_by.id) != nil
assert Repo.get(Recognition, recognition.id) == nil
end
end
describe "associations - user recognitions" do
setup do
recognition =
:recognition
|> insert
|> with_user_recognitions
{:ok, recognition: Repo.preload(recognition, @preload)}
end
test "update associations", %{recognition: recognition} do
new_user = insert(:user)
new_user_recognition = insert(:user_recognition, recognition: recognition, user: new_user)
assert length(recognition.users) == 3
{:ok, updated} =
recognition
|> Recognition.associations_changeset(%{user_recognitions: [new_user_recognition]})
|> Repo.update()
updated = Repo.preload(updated, @preload)
assert length(updated.users) == 1
assert updated.users == [new_user]
end
test "deleting association does not remove record", %{recognition: recognition} do
assert Repo.get(Recognition, recognition.id) != nil
assert length(recognition.user_recognitions) == 3
Enum.map(recognition.user_recognitions, &Repo.delete(&1))
recognition =
Recognition
|> preload(^@preload)
|> Repo.get(recognition.id)
assert Repo.get(Recognition, recognition.id) != nil
assert length(recognition.user_recognitions) == 0
end
test "deleting record removes associations", %{recognition: recognition} do
assert Repo.get(Recognition, recognition.id) != nil
assert length(recognition.user_recognitions) == 3
Repo.delete(recognition)
assert Repo.get(Recognition, recognition.id) == nil
Enum.map(recognition.user_recognitions, fn user_recognition ->
assert Repo.get(UserRecognition, user_recognition.id) == nil
end)
end
end
end
| 27.922619 | 111 | 0.656363 |
1c26b1432675176d1ce17e01d55b43a801438348 | 15,212 | ex | Elixir | lib/absinthe/relay/connection.ex | jlgeering/absinthe_relay | 94e0abb22bc5f63b5d25644b212c2ac531c433f7 | [
"MIT"
] | null | null | null | lib/absinthe/relay/connection.ex | jlgeering/absinthe_relay | 94e0abb22bc5f63b5d25644b212c2ac531c433f7 | [
"MIT"
] | null | null | null | lib/absinthe/relay/connection.ex | jlgeering/absinthe_relay | 94e0abb22bc5f63b5d25644b212c2ac531c433f7 | [
"MIT"
] | null | null | null | defmodule Absinthe.Relay.Connection.Options do
@moduledoc false
@typedoc false
@type t :: %{
after: nil | integer,
before: nil | integer,
first: nil | integer,
last: nil | integer
}
defstruct after: nil, before: nil, first: nil, last: nil
end
defmodule Absinthe.Relay.Connection do
@moduledoc """
Support for paginated result sets.
Define connection types that provide a standard mechanism for slicing and
paginating result sets.
For information about the connection model, see the Relay Cursor
Connections Specification at
https://facebook.github.io/relay/graphql/connections.htm.
## Connection
Given an object type, eg:
```
object :pet do
field :name, :string
end
```
You can create a connection type to paginate them by:
```
connection node_type: :pet
```
This will automatically define two new types: `:pet_connection` and
`:pet_edge`.
We define a field that uses these types to paginate associated records
by using `connection field`. Here, for instance, we support paginating a
person's pets:
```
object :person do
field :first_name, :string
connection field :pets, node_type: :pet do
resolve fn
pagination_args, %{source: person} ->
Absinthe.Relay.Connection.from_list(
Enum.map(person.pet_ids, &pet_from_id(&1)),
pagination_args
)
end
end
end
end
```
The `:pets` field is automatically set to return a `:pet_connection` type,
and configured to accept the standard pagination arguments `after`, `before`,
`first`, and `last`. We create the connection by using
`Absinthe.Relay.Connection.from_list/2`, which takes a list and the pagination
arguments passed to the resolver.
It is possible to provide additional pagination arguments to a relay
connection:
```
connection field :pets, node_type: :pet do
arg :custom_arg, :custom
# other args...
resolve fn
pagination_args_and_custom_args, %{source: person} ->
# ... return {:ok, a_connection}
end
end
```
Note: `Absinthe.Relay.Connection.from_list/2` expects that the full list of
records be materialized and provided. If you're using Ecto, you probably want
to use `Absinthe.Relay.Connection.from_query/2` instead.
Here's how you might request the names of the first `$petCount` pets a person
owns:
```
query FindPets($personId: ID!, $petCount: Int!) {
person(id: $personId) {
pets(first: $petCount) {
pageInfo {
hasPreviousPage
hasNextPage
}
edges {
node {
name
}
}
}
}
}
```
`edges` here is the list of intermediary edge types (created for you
automatically) that contain a field, `node`, that is the same `:node_type` you
passed earlier (`:pet`).
`pageInfo` is a field that contains information about the current
view; the `startCursor`, `endCursor`, `hasPreviousPage`, and
`hasNextPage` fields.
### Pagination Direction
By default, connections will support bidirectional pagination, but you can
also restrict the connection to just the `:forward` or `:backward` direction
using the `:paginate` argument:
```
connection field :pets, node_type: :pet, paginate: :forward do
```
### Customizing Types
If you'd like to add additional fields to the generated connection and edge
types, you can do that by providing a block to the `connection` macro, eg,
here we add a field, `:twice_edges_count` to the connection type, and another,
`:node_name_backwards`, to the edge type:
```
connection node_type: :pet do
field :twice_edges_count, :integer do
resolve fn
_, %{source: conn} ->
{:ok, length(conn.edges) * 2}
end
end
edge do
field :node_name_backwards, :string do
resolve fn
_, %{source: edge} ->
{:ok, edge.node.name |> String.reverse}
end
end
end
end
```
Just remember that if you use the block form of `connection`, you must call
the `edge` macro within the block.
### Customizing the node itself
It's also possible to customize the way the `node` field of the
connection's edge is resolved. This can, for example, be useful if
you're working with a NoSQL database that returns relationships as
lists of IDs. Consider the following example which paginates over
the user's account array, but resolves each one of them
independently.
```
object :account do
field :id, non_null(:id)
field :name, :string
end
connection node_type :account do
edge do
field :node, :account do
resolve fn %{node: id}, _args, _info ->
Account.find(id)
end
end
end
end
object :user do
field :name, string
connection field :accounts, node_type: :account do
resolve fn %{accounts: accounts}, _args, _info ->
Absinthe.Relay.Connection.from_list(ids, args)
end
end
end
```
This would resolve the connections into a list of the user's
associated accounts, and then for each node find that particular
account (preferrably batched).
## Creating Connections
This module provides two functions that mirror similar JavaScript functions,
`from_list/2,3` and `from_slice/2,3`. We also provide `from_query/2,3` if you
have Ecto as a dependency for convenience.
Use `from_list` when you have all items in a list that you're going to
paginate over.
Use `from_slice` when you have items for a particular request, and merely need
a connection produced from these items.
## Schema Macros
For more details on connection-related macros, see
`Absinthe.Relay.Connection.Notation`.
"""
alias Absinthe.Relay.Connection.Options
@cursor_prefix "arrayconnection:"
@type t :: %{
edges: [edge],
page_info: page_info
}
@typedoc """
An opaque pagination cursor
Internally it has the base64 encoded structure:
```
#{@cursor_prefix}:$offset
```
"""
@type cursor :: binary
@type edge :: %{
node: term,
cursor: cursor
}
@typedoc """
Offset from zero.
Negative offsets are not supported.
"""
@type offset :: non_neg_integer
@type limit :: non_neg_integer
@type page_info :: %{
start_cursor: cursor,
end_cursor: cursor,
has_previous_page: boolean,
has_next_page: boolean
}
@doc """
Get a connection object for a list of data.
A simple function that accepts a list and connection arguments, and returns
a connection object for use in GraphQL.
The data given to it should constitute all data that further
pagination requests may page over. As such, it may be very
inefficient if you're pulling data from a database which could be
used to more directly retrieve just the desired data.
See also `from_query` and `from_slice`.
## Example
```
#in a resolver module
@items ~w(foo bar baz)
def list(args, _) do
Connection.from_list(@items, args)
end
```
"""
@spec from_list(data :: list, args :: Option.t()) :: {:ok, t} | {:error, any}
def from_list(data, args, opts \\ []) do
with {:ok, direction, limit} <- limit(args, opts[:max]),
{:ok, offset} <- offset(args) do
count = length(data)
{offset, limit} =
case direction do
:forward ->
{offset || 0, limit}
:backward ->
end_offset = offset || count
start_offset = max(end_offset - limit, 0)
limit = if start_offset == 0, do: end_offset, else: limit
{start_offset, limit}
end
opts =
opts
|> Keyword.put(:has_previous_page, offset > 0)
|> Keyword.put(:has_next_page, count > offset + limit)
data
|> Enum.slice(offset, limit)
|> from_slice(offset, opts)
end
end
@type from_slice_opts :: [
has_previous_page: boolean,
has_next_page: boolean
]
@type pagination_direction :: :forward | :backward
@doc """
Build a connection from slice
This function assumes you have already retrieved precisely the number of items
to be returned in this connection request.
Often this function is used internally by other functions.
## Example
This is basically how our `from_query/2` function works if we didn't need to
worry about backwards pagination.
```
# In PostResolver module
alias Absinthe.Relay
def list(args, %{context: %{current_user: user}}) do
{:ok, :forward, limit} = Connection.limit(args)
offset = Connection.offset(args)
Post
|> where(author_id: ^user.id)
|> limit(^limit)
|> offset(^offset)
|> Repo.all
|> Relay.Connection.from_slice(offset)
end
```
"""
@spec from_slice(data :: list, offset :: offset) :: {:ok, t}
@spec from_slice(data :: list, offset :: offset, opts :: from_slice_opts) :: {:ok, t}
def from_slice(items, offset, opts \\ []) do
{edges, first, last} = build_cursors(items, offset)
page_info = %{
start_cursor: first,
end_cursor: last,
has_previous_page: Keyword.get(opts, :has_previous_page, false),
has_next_page: Keyword.get(opts, :has_next_page, false)
}
{:ok, %{edges: edges, page_info: page_info}}
end
@doc """
Build a connection from an Ecto Query
This will automatically set a limit and offset value on the Ecto
query, and then run the query with whatever function is passed as
the second argument.
Notes:
- Your query MUST have an `order_by` value. Offset does not make
sense without one.
- `last: N` must always be acompanied by either a `before:` argument
to the query,
or an explicit `count: ` option to the `from_query` call.
Otherwise it is impossible to derive the required offset.
## Example
```
# In a PostResolver module
alias Absinthe.Relay
def list(args, %{context: %{current_user: user}}) do
Post
|> where(author_id: ^user.id)
|> Relay.Connection.from_query(&Repo.all/1, args)
end
```
"""
@type from_query_opts ::
[
count: non_neg_integer
]
| from_slice_opts
if Code.ensure_loaded?(Ecto) do
@spec from_query(Ecto.Queryable.t(), (Ecto.Queryable.t() -> [term]), Options.t()) ::
{:ok, map} | {:error, any}
@spec from_query(
Ecto.Queryable.t(),
(Ecto.Queryable.t() -> [term]),
Options.t(),
from_query_opts
) :: {:ok, map} | {:error, any}
def from_query(query, repo_fun, args, opts \\ []) do
require Ecto.Query
with {:ok, offset, limit} <- offset_and_limit_for_query(args, opts) do
records =
query
|> Ecto.Query.limit(^(limit + 1))
|> Ecto.Query.offset(^offset)
|> repo_fun.()
opts =
opts
|> Keyword.put(:has_previous_page, offset > 0)
|> Keyword.put(:has_next_page, length(records) > limit)
from_slice(Enum.take(records, limit), offset, opts)
end
end
else
def from_query(_, _, _, _, _ \\ []) do
raise ArgumentError, """
Ecto not Loaded!
You cannot use this unless Ecto is also a dependency
"""
end
end
@doc false
@spec offset_and_limit_for_query(Options.t(), from_query_opts) ::
{:ok, offset, limit} | {:error, any}
def offset_and_limit_for_query(args, opts) do
with {:ok, direction, limit} <- limit(args, opts[:max]),
{:ok, offset} <- offset(args) do
case direction do
:forward ->
{:ok, offset || 0, limit}
:backward ->
case {offset, opts[:count]} do
{nil, nil} ->
{:error,
"You must supply a count (total number of records) option if using `last` without `before`"}
{nil, value} ->
{:ok, max(value - limit, 0), limit}
{value, _} ->
{:ok, max(value - limit, 0), limit}
end
end
end
end
@doc """
Same as `limit/1` with user provided upper bound.
Often backend developers want to provide a maximum value above which no more
records can be retrieved, no matter how many are asked for by the front end.
This function provides that capability. For use with `from_list` or
`from_query` use the `:max` option on those functions.
"""
@spec limit(args :: Options.t(), max :: pos_integer | nil) ::
{:ok, pagination_direction, limit} | {:error, any}
def limit(args, nil), do: limit(args)
def limit(args, max) do
with {:ok, direction, limit} <- limit(args) do
{:ok, direction, min(max, limit)}
end
end
@doc """
The direction and desired number of records in the pagination arguments.
"""
@spec limit(args :: Options.t()) :: {:ok, pagination_direction, limit} | {:error, any}
def limit(%{first: first}), do: {:ok, :forward, first}
def limit(%{last: last}), do: {:ok, :backward, last}
def limit(_), do: {:error, "You must either supply `:first` or `:last`"}
@doc """
Returns the offset for a page.
The limit is required because if using backwards pagination the limit will be
subtracted from the offset.
If no offset is specified in the pagination arguments, this will return `nil`.
"""
@spec offset(args :: Options.t()) :: {:ok, offset | nil} | {:error, any}
def offset(%{after: cursor}) when not is_nil(cursor) do
with {:ok, offset} <- cursor_to_offset(cursor) do
{:ok, offset + 1}
else
{:error, _} ->
{:error, "Invalid cursor provided as `after` argument"}
end
end
def offset(%{before: cursor}) when not is_nil(cursor) do
with {:ok, offset} <- cursor_to_offset(cursor) do
{:ok, max(offset, 0)}
else
{:error, _} ->
{:error, "Invalid cursor provided as `before` argument"}
end
end
def offset(_), do: {:ok, nil}
defp build_cursors([], _offset), do: {[], nil, nil}
defp build_cursors([item | items], offset) do
offset = offset || 0
first = offset_to_cursor(offset)
first_edge = %{
node: item,
cursor: first
}
{edges, last} = do_build_cursors(items, offset + 1, [first_edge], first)
{edges, first, last}
end
defp do_build_cursors([], _, edges, last), do: {Enum.reverse(edges), last}
defp do_build_cursors([item | rest], i, edges, _last) do
cursor = offset_to_cursor(i)
edge = %{
node: item,
cursor: cursor
}
do_build_cursors(rest, i + 1, [edge | edges], cursor)
end
@doc """
Creates the cursor string from an offset.
"""
@spec offset_to_cursor(integer) :: binary
def offset_to_cursor(offset) do
[@cursor_prefix, to_string(offset)]
|> IO.iodata_to_binary()
|> Base.encode64()
end
@doc """
Rederives the offset from the cursor string.
"""
@spec cursor_to_offset(binary) :: integer | :error
def cursor_to_offset(cursor) do
with {:ok, @cursor_prefix <> raw} <- Base.decode64(cursor),
{parsed, _} <- Integer.parse(raw) do
{:ok, parsed}
else
_ -> {:error, "Invalid cursor"}
end
end
end
| 27.115865 | 107 | 0.630818 |
1c26d0f3bc5b1d8e207bf8a5e4c5f2cbab5d545c | 1,481 | ex | Elixir | lib/steve/harvey.ex | frostu8/steve | 9018a3142bf4e51efc8e4dd0b307406a1075b3c8 | [
"Unlicense"
] | null | null | null | lib/steve/harvey.ex | frostu8/steve | 9018a3142bf4e51efc8e4dd0b307406a1075b3c8 | [
"Unlicense"
] | null | null | null | lib/steve/harvey.ex | frostu8/steve | 9018a3142bf4e51efc8e4dd0b307406a1075b3c8 | [
"Unlicense"
] | null | null | null | defmodule Steve.Harvey do
@prefix "steve!"
use Nostrum.Consumer
alias Nostrum.Api
alias Nostrum.Voice
def start_link do
Consumer.start_link(__MODULE__)
end
def handle_event({:MESSAGE_CREATE, msg, _ws_state}) do
# verify we are in a guild
if msg.guild_id do
member = Map.merge(msg.member, %{user: msg.author});
# and we are reading a commandj
if String.starts_with?(msg.content, @prefix) do
# and the user has permissions
if Steve.Util.is_administrator(member, msg.guild_id) do
handle_command(msg, String.slice(msg.content, String.length(@prefix)..-1));
end
end
end
end
def handle_event({:VOICE_SPEAKING_UPDATE, voice, _ws_state}) do
unless voice.speaking do
# play steve harvey
Steve.Voice.play_audio(voice.guild_id)
end
end
def handle_event(_event) do
:noop
end
def handle_command(msg, command) do
case command do
"join" ->
# attempt to join
case Steve.Voice.get_user_voice_channel(msg.guild_id, msg.author.id) do
nil ->
Api.create_message(msg.channel_id, "You must be in a voice channel to summon Steve Harvey.")
voice_channel_id ->
Voice.join_channel(msg.guild_id, voice_channel_id)
Steve.Voice.play_audio(msg.guild_id)
end
"leave" ->
# attempt to leave
Voice.leave_channel(msg.guild_id)
_ ->
:ignore
end
end
end
| 25.534483 | 104 | 0.642809 |
1c272b455c3cba0425f26f26700cb13bd5636726 | 8,118 | exs | Elixir | test/task_bunny/worker_test.exs | smartrent/task_bunny | db0b91d966b54df75e0f60f9c788ffa044c1f37a | [
"MIT"
] | null | null | null | test/task_bunny/worker_test.exs | smartrent/task_bunny | db0b91d966b54df75e0f60f9c788ffa044c1f37a | [
"MIT"
] | 3 | 2019-08-13T13:05:26.000Z | 2021-11-10T08:12:51.000Z | test/task_bunny/worker_test.exs | salemove/task_bunny | f1a69291b47f59cab1c010d48fda7551132ae51c | [
"MIT"
] | 1 | 2021-02-24T19:50:27.000Z | 2021-02-24T19:50:27.000Z | defmodule TaskBunny.WorkerTest do
use ExUnit.Case, async: false
import TaskBunny.QueueTestHelper
alias TaskBunny.{Connection, Worker, Queue, JobTestHelper}
alias JobTestHelper.TestJob
@queue "task_bunny.worker_test"
defp all_queues do
Queue.queue_with_subqueues(@queue)
end
defp start_worker() do
start_worker(%{concurrency: 1, store_rejected_jobs: true})
end
defp start_worker(%{concurrency: concurrency, store_rejected_jobs: store_rejected_jobs}) do
{:ok, worker} =
Worker.start_link(
queue: @queue,
concurrency: concurrency,
store_rejected_jobs: store_rejected_jobs
)
worker
end
defp start_worker(%{concurrency: concurrency}) do
start_worker(%{concurrency: concurrency, store_rejected_jobs: true})
end
defp start_worker(%{store_rejected_jobs: store_rejected_jobs}) do
start_worker(%{concurrency: 1, store_rejected_jobs: store_rejected_jobs})
end
setup do
clean(all_queues())
JobTestHelper.setup()
Queue.declare_with_subqueues(:default, @queue)
on_exit(fn ->
JobTestHelper.teardown()
end)
:ok
end
describe "worker" do
test "invokes a job with the payload" do
worker = start_worker()
payload = %{"hello" => "world1"}
TestJob.enqueue(payload, queue: @queue)
JobTestHelper.wait_for_perform()
assert List.first(JobTestHelper.performed_payloads()) == payload
GenServer.stop(worker)
end
test "consumes the message" do
worker = start_worker()
[main, retry, rejected, _scheduled] = all_queues()
payload = %{"hello" => "world1"}
TestJob.enqueue(payload, queue: @queue)
JobTestHelper.wait_for_perform()
conn = Connection.get_connection!()
%{message_count: main_count} = Queue.state(conn, main)
%{message_count: retry_count} = Queue.state(conn, retry)
%{message_count: rejected_count} = Queue.state(conn, rejected)
assert main_count == 0
assert retry_count == 0
assert rejected_count == 0
GenServer.stop(worker)
end
test "concurrency" do
worker = start_worker(%{concurrency: 5})
payload = %{"sleep" => 10_000}
# Run 10 jobs and each would take 10 seconds to finish
Enum.each(1..10, fn _ ->
TestJob.enqueue(payload, queue: @queue)
end)
# This waits for up to 1 second
assert JobTestHelper.wait_for_perform(5)
# Make sure more than specified number of jobs were not invoked
assert JobTestHelper.performed_count() == 5
GenServer.stop(worker)
end
test "executes the on_reject callback when rejected" do
reset_test_job_retry_interval(5)
worker = start_worker()
payload = %{"fail" => true, "ppid" => self() |> :erlang.term_to_binary() |> Base.encode64()}
TestJob.enqueue(payload, queue: @queue)
# 1 normal + 10 retries = 11
JobTestHelper.wait_for_perform(11)
assert_received :on_reject_callback_called
GenServer.stop(worker)
end
test "executes the on_reject callback only when rejected" do
reset_test_job_retry_interval(5)
worker = start_worker()
payload = %{"fail" => true, "ppid" => self() |> :erlang.term_to_binary() |> Base.encode64()}
TestJob.enqueue(payload, queue: @queue)
JobTestHelper.wait_for_perform()
refute_received :on_reject_callback_called
GenServer.stop(worker)
end
end
describe "retry" do
test "sends failed job to retry queue" do
worker = start_worker()
[main, retry, rejected, _scheduled] = all_queues()
payload = %{"fail" => true}
TestJob.enqueue(payload, queue: @queue)
JobTestHelper.wait_for_perform()
conn = Connection.get_connection!()
%{message_count: main_count} = Queue.state(conn, main)
%{message_count: retry_count} = Queue.state(conn, retry)
%{message_count: rejected_count} = Queue.state(conn, rejected)
assert main_count == 0
assert retry_count == 1
assert rejected_count == 0
GenServer.stop(worker)
end
def reset_test_job_retry_interval(interval) do
:meck.new(JobTestHelper.RetryInterval, [:passthrough])
:meck.expect(JobTestHelper.RetryInterval, :interval, fn -> interval end)
end
def retry_max_times do
# Sets up TestJob to retry shortly
reset_test_job_retry_interval(5)
payload = %{"fail" => true}
TestJob.enqueue(payload, queue: @queue)
JobTestHelper.wait_for_perform(11)
# 1 normal + 10 retries = 11
assert JobTestHelper.performed_count() == 11
end
test "retries max_retry times then sends to rejected queue" do
worker = start_worker()
[main, retry, rejected, _scheduled] = all_queues()
retry_max_times()
conn = Connection.get_connection!()
%{message_count: main_count} = Queue.state(conn, main)
%{message_count: retry_count} = Queue.state(conn, retry)
%{message_count: rejected_count} = Queue.state(conn, rejected)
assert main_count == 0
assert retry_count == 0
assert rejected_count == 1
GenServer.stop(worker)
end
test "does not send to rejected queue if store_rejected_jobs is false" do
worker = start_worker(%{store_rejected_jobs: false})
[main, retry, rejected, _scheduled] = all_queues()
retry_max_times()
conn = Connection.get_connection!()
%{message_count: main_count} = Queue.state(conn, main)
%{message_count: retry_count} = Queue.state(conn, retry)
%{message_count: rejected_count} = Queue.state(conn, rejected)
assert main_count == 0
assert retry_count == 0
assert rejected_count == 0
GenServer.stop(worker)
end
test "sends to rejected queue if return_value is :reject" do
worker = start_worker()
[main, retry, rejected, _scheduled] = all_queues()
payload = %{"reject" => true}
TestJob.enqueue(payload, queue: @queue)
JobTestHelper.wait_for_perform()
conn = Connection.get_connection!()
%{message_count: main_count} = Queue.state(conn, main)
%{message_count: retry_count} = Queue.state(conn, retry)
%{message_count: rejected_count} = Queue.state(conn, rejected)
assert main_count == 0
assert retry_count == 0
assert rejected_count == 1
GenServer.stop(worker)
end
end
describe "delay" do
test "invokes the job with the delay" do
worker = start_worker()
[main, _, _, scheduled] = all_queues()
payload = %{"hello" => "world"}
# Runs job in 500 ms
TestJob.enqueue(payload, queue: @queue, delay: 500)
:timer.sleep(100)
assert JobTestHelper.performed_count() == 0
conn = Connection.get_connection!()
%{message_count: main_count} = Queue.state(conn, main)
%{message_count: scheduled_count} = Queue.state(conn, scheduled)
assert main_count == 0
assert scheduled_count == 1
JobTestHelper.wait_for_perform(1)
assert JobTestHelper.performed_count() == 1
%{message_count: main_count} = Queue.state(conn, main)
%{message_count: scheduled_count} = Queue.state(conn, scheduled)
assert main_count == 0
assert scheduled_count == 0
GenServer.stop(worker)
end
end
test "invalid payload" do
:meck.expect(TaskBunny.Consumer, :ack, fn _, _, _ -> nil end)
assert Worker.handle_info({:basic_deliver, "}", %{}}, %{
host: :default,
queue: @queue,
concurrency: 1,
store_rejected_jobs: true,
channel: nil,
runners: 0,
job_stats: %{
failed: 0,
succeeded: 0,
rejected: 0
}
}) ==
{:noreply,
%{
host: :default,
queue: @queue,
concurrency: 1,
store_rejected_jobs: true,
channel: nil,
runners: 0,
job_stats: %{failed: 0, rejected: 1, succeeded: 0}
}}
end
end
| 28.384615 | 98 | 0.638827 |
1c2740521bb1061df5eb0d270ada3728582d9e95 | 160 | ex | Elixir | lib/restlax.ex | kianmeng/restlax | 6cded7cd34c49216d182efe4f71e62321545678d | [
"MIT"
] | 1 | 2021-02-09T09:25:37.000Z | 2021-02-09T09:25:37.000Z | lib/restlax.ex | kianmeng/restlax | 6cded7cd34c49216d182efe4f71e62321545678d | [
"MIT"
] | 3 | 2021-05-17T01:33:35.000Z | 2022-01-27T00:23:31.000Z | lib/restlax.ex | kianmeng/restlax | 6cded7cd34c49216d182efe4f71e62321545678d | [
"MIT"
] | 1 | 2022-01-27T00:06:03.000Z | 2022-01-27T00:06:03.000Z | defmodule Restlax do
@readme_path "README.md"
@external_resource @readme_path
@moduledoc File.read!(@readme_path) |> String.replace(~r/^# .+?\n/, "")
end
| 26.666667 | 73 | 0.69375 |
1c27411adce8a6223fe12039266142e6bebdfc5c | 286 | ex | Elixir | lib/action_for_children/repo.ex | cast-fuse/action-for-children | 3cded6e04abdaf13d4d1033bf1daa783bc0e5ef2 | [
"BSD-3-Clause"
] | null | null | null | lib/action_for_children/repo.ex | cast-fuse/action-for-children | 3cded6e04abdaf13d4d1033bf1daa783bc0e5ef2 | [
"BSD-3-Clause"
] | 57 | 2017-05-25T10:32:59.000Z | 2021-05-06T21:03:39.000Z | lib/action_for_children/repo.ex | cast-fuse/action-for-children | 3cded6e04abdaf13d4d1033bf1daa783bc0e5ef2 | [
"BSD-3-Clause"
] | null | null | null | defmodule ActionForChildren.Repo do
use Ecto.Repo, otp_app: :action_for_children
@doc """
Dynamically loads the repository url from the
DATABASE_URL environment variable.
"""
def init(_, opts) do
{:ok, Keyword.put(opts, :url, System.get_env("DATABASE_URL"))}
end
end
| 23.833333 | 66 | 0.72028 |
1c27438dfa3f21d1e0f6a70487eae2ae57f9176d | 1,263 | exs | Elixir | mix.exs | massivefermion/Enux | 19c95a0c75b3b2285452210d6e5c84b3376d8788 | [
"Apache-2.0"
] | 1 | 2021-07-10T07:16:13.000Z | 2021-07-10T07:16:13.000Z | mix.exs | massivefermion/enux | 19c95a0c75b3b2285452210d6e5c84b3376d8788 | [
"Apache-2.0"
] | null | null | null | mix.exs | massivefermion/enux | 19c95a0c75b3b2285452210d6e5c84b3376d8788 | [
"Apache-2.0"
] | null | null | null | defmodule Enux.MixProject do
use Mix.Project
@source_url "https://github.com/massivefermion/enux"
def project do
[
app: :enux,
version: "0.9.14",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
source_url: @source_url,
description: description(),
package: package(),
aliases: aliases(),
docs: [
main: "Enux",
api_reference: true,
source_url: @source_url,
source_ref: "main",
logo: "logo.png"
]
]
end
defp description() do
"Helper module to load, validate and document your app's configuration from env and json files at runtime."
end
defp package do
[
name: "enux",
licenses: ["Apache-2.0"],
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Shayan Javani"],
links: %{"GitHub" => @source_url}
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_doc, "~> 0.24.2", only: :docs}
]
end
defp aliases do
[
gen_docs: ["cmd MIX_ENV=docs mix docs"]
]
end
end
| 20.704918 | 111 | 0.570071 |
1c2783c9af8e204a72cfa11ef9a93519c19fe829 | 955 | exs | Elixir | code/project/4/issues/mix.exs | alvarocamillont/introdu-o_elixir | 1d72d4f4b01d9312c4b066ce3c0fe8d9bfaaade1 | [
"MIT"
] | null | null | null | code/project/4/issues/mix.exs | alvarocamillont/introdu-o_elixir | 1d72d4f4b01d9312c4b066ce3c0fe8d9bfaaade1 | [
"MIT"
] | 1 | 2021-03-09T16:27:25.000Z | 2021-03-09T16:27:25.000Z | programming-elixir-book/code/project/4/issues/mix.exs | jordanhubbard/elixir-projects | dee341d672e83a45a17a4a85abd54a480f95c506 | [
"BSD-2-Clause"
] | null | null | null | #---
# Excerpted from "Programming Elixir ≥ 1.6",
# published by The Pragmatic Bookshelf.
# Copyrights apply to this code. It may not be used to create training material,
# courses, books, articles, and the like. Contact us if you are in doubt.
# We make no guarantees that this code is fit for any purpose.
# Visit http://www.pragmaticprogrammer.com/titles/elixir16 for more book information.
#---
defmodule Issues.MixProject do
use Mix.Project
def project do
[
app: :issues,
escript: escript_config(),
version: "0.1.0",
elixir: "~> 1.6-dev",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{ :httpoison, "~> 1.0.0" },
{ :poison, "~> 3.1" },
]
end
defp escript_config do
[
main_module: Issues.CLI
]
end
end
| 23.292683 | 85 | 0.586387 |
1c278e520a565a89f304ccb663fcde0d1197f20d | 3,618 | ex | Elixir | lib/sanbase/clickhouse/fees/fees.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 81 | 2017-11-20T01:20:22.000Z | 2022-03-05T12:04:25.000Z | lib/sanbase/clickhouse/fees/fees.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 359 | 2017-10-15T14:40:53.000Z | 2022-01-25T13:34:20.000Z | lib/sanbase/clickhouse/fees/fees.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 16 | 2017-11-19T13:57:40.000Z | 2022-02-07T08:13:02.000Z | defmodule Sanbase.Clickhouse.Fees do
import Sanbase.Utils.Transform, only: [maybe_apply_function: 2]
alias Sanbase.Model.Project
alias Sanbase.ClickhouseRepo
def eth_fees_distribution(from, to, limit) do
{query, args} = eth_fees_distribution_query(from, to, limit)
ClickhouseRepo.query_transform(query, args, & &1)
|> maybe_apply_function(&value_fees_list_to_result/1)
end
def value_fees_list_to_result(data) do
data = Enum.uniq_by(data, &Enum.at(&1, 0))
# Get the list of projects. Returns a non-empty list when at least one of the
# first elements in the sublists is a slug and not an address
projects =
Enum.map(data, &Enum.at(&1, 0))
|> Project.List.by_field(:slug, preload?: true, preload: [:contract_addresses])
projects_map = contract_address_to_project_map(projects)
data
|> enrich_data_with_projects(projects_map)
|> transform_data_group_by_same_entity()
|> Enum.sort_by(& &1.fees, :desc)
end
# If an address points to a known address, the value will be added to the
# project's slug and not to the actual address. This helps grouping fees
# for a project with more than one address.
defp enrich_data_with_projects(data, projects_map) do
Enum.map(data, fn [value, fees] ->
case Map.get(projects_map, value) do
%Project{slug: slug, ticker: ticker} = project ->
%{slug: slug, ticker: ticker, project: project, address: nil, fees: fees}
_ ->
%{slug: nil, ticker: nil, project: nil, address: value, fees: fees}
end
end)
end
# Group the data for the same slug and for the same address. If both slug and
# address are present, the slug takes precedence. This way if a project has
# multiple contract addresses, the data for them will be grouped together.
defp transform_data_group_by_same_entity(data) do
data
|> Enum.group_by(fn %{slug: slug, address: address} -> slug || address end)
|> Enum.map(fn {_key, list} ->
[elem | _] = list
fees = Enum.map(list, & &1.fees) |> Enum.sum()
%{elem | fees: fees}
end)
end
defp contract_address_to_project_map(projects) do
projects
|> Enum.flat_map(fn %Project{} = project ->
[{project.slug, project}] ++
Enum.map(project.contract_addresses, fn %{address: address} ->
{address |> String.downcase(), project}
end)
end)
|> Map.new()
end
defp eth_fees_distribution_query(from, to, limit) do
from_unix = DateTime.to_unix(from)
to_unix = DateTime.to_unix(to)
query = """
SELECT
multiIf(contract = '', 'ethereum',isNull(name), contract, name) AS asset,
fees
FROM
(
SELECT name, contract, fees
FROM
(
SELECT assetRefId, contract, sum(value) / 1e18 AS fees
FROM
(
SELECT transactionHash, value
FROM eth_transfers FINAL
PREWHERE dt >= toDateTime(?1) AND dt < toDateTime(?2) AND type = 'fee'
)
ANY LEFT JOIN
(
SELECT transactionHash, contract, assetRefId
FROM erc20_transfers_union
WHERE dt >= toDateTime(?1) and dt < toDateTime(?2)
) USING (transactionHash)
GROUP BY assetRefId, contract
ORDER BY fees DESC
LIMIT ?3
)
ALL LEFT JOIN
(
SELECT name, asset_ref_id AS assetRefId
FROM asset_metadata FINAL
) USING (assetRefId)
) ORDER BY fees DESC
"""
{query, [from_unix, to_unix, limit]}
end
end
| 32.594595 | 86 | 0.628248 |
1c278fce68a2ffec126ce18846d6f6cd8de47ffb | 391 | ex | Elixir | lib/hl7/2.3/segments/apr.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.3/segments/apr.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.3/segments/apr.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_3.Segments.APR do
@moduledoc false
require Logger
alias HL7.V2_3.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
time_selection_criteria: DataTypes.Scv,
resource_selection_criteria: DataTypes.Scv,
location_selection_criteria: DataTypes.Scv,
slot_spacing_criteria: nil,
filler_override_criteria: DataTypes.Scv
]
end
| 23 | 49 | 0.721228 |
1c27960e4721367a7cf0487190bc21b1c584bc87 | 4,008 | exs | Elixir | test/etherscan/util_test.exs | devlin-Smith/etherscan | c56d6f3f8eefa49c9ac8884461387a3af44fac03 | [
"MIT"
] | 14 | 2017-10-20T03:10:44.000Z | 2021-10-04T03:00:07.000Z | test/etherscan/util_test.exs | devlin-Smith/etherscan | c56d6f3f8eefa49c9ac8884461387a3af44fac03 | [
"MIT"
] | 5 | 2017-10-20T03:36:37.000Z | 2018-03-25T00:58:01.000Z | test/etherscan/util_test.exs | devlin-Smith/etherscan | c56d6f3f8eefa49c9ac8884461387a3af44fac03 | [
"MIT"
] | 4 | 2018-03-25T00:49:18.000Z | 2019-11-28T00:33:06.000Z | defmodule Etherscan.UtilTest do
use ExUnit.Case, async: true
alias Etherscan.Util
describe "format_balance/1" do
test "with valid balance converts to ether" do
assert Util.format_balance("10000000000000000000000") == "10000.0"
end
end
describe "convert/2" do
test "converts wei to kwei" do
assert Util.convert(1000, denomination: :kwei) == "1.0"
assert Util.convert("1000", denomination: :kwei) == "1.0"
end
test "converts wei to mwei" do
assert Util.convert(1_000_000, denomination: :mwei) == "1.0"
assert Util.convert("1000000", denomination: :mwei) == "1.0"
end
test "converts wei to gwei/shannon/nano" do
assert Util.convert(1_000_000_000, denomination: :gwei) == "1.0"
assert Util.convert(1_000_000_000, denomination: :shannon) == "1.0"
assert Util.convert(1_000_000_000, denomination: :nano) == "1.0"
assert Util.convert("1000000000", denomination: :gwei) == "1.0"
assert Util.convert("1000000000", denomination: :shannon) == "1.0"
assert Util.convert("1000000000", denomination: :nano) == "1.0"
end
test "converts wei to szabo/micro" do
assert Util.convert(1_000_000_000_000, denomination: :szabo) == "1.0"
assert Util.convert(1_000_000_000_000, denomination: :micro) == "1.0"
assert Util.convert("1000000000000", denomination: :szabo) == "1.0"
assert Util.convert("1000000000000", denomination: :micro) == "1.0"
end
test "converts wei to finney/milli" do
assert Util.convert(1_000_000_000_000_000, denomination: :finney) == "1.0"
assert Util.convert(1_000_000_000_000_000, denomination: :milli) == "1.0"
assert Util.convert("1000000000000000", denomination: :finney) == "1.0"
assert Util.convert("1000000000000000", denomination: :milli) == "1.0"
end
test "converts wei to ether" do
assert Util.convert(1_000_000_000_000_000_000, denomination: :ether) == "1.0"
assert Util.convert("1000000000000000000", denomination: :ether) == "1.0"
end
end
describe "pretty_float/1" do
test "returns float as a string" do
assert Util.pretty_float(0.000034140807) == "0.000034140807"
end
end
describe "pretty_float/2" do
test "returns float as a string" do
assert Util.pretty_float(0.000034140807, 8) == "0.00003414"
end
test "with string float" do
assert Util.pretty_float("0.000034140807", 8) == "0.00003414"
end
end
describe "wrap/2" do
test "returns a tuple with the element and tag" do
assert {:ok, "hello"} = Util.wrap("hello", :ok)
end
end
describe "hex_to_number/1" do
test "with valid hex converts to number" do
assert {:ok, 0} = Util.hex_to_number("0x0")
assert {:ok, 211} = Util.hex_to_number("0xd3")
assert {:ok, 5_531_758} = Util.hex_to_number("0x54686e")
end
test "with invalid hex returns error" do
assert {:error, "invalid hex - \"0x\""} = Util.hex_to_number("0x")
assert {:error, "invalid hex - \"hello\""} = Util.hex_to_number("hello")
assert {:error, "invalid hex - \"0xhello\""} = Util.hex_to_number("0xhello")
assert {:error, "invalid hex - \"0d2d23d23\""} = Util.hex_to_number("0d2d23d23")
end
end
describe "safe_hex_to_number/1" do
test "with valid hex converts to number" do
assert Util.safe_hex_to_number("0x0") == 0
assert Util.safe_hex_to_number("0xd3") == 211
assert Util.safe_hex_to_number("0x54686e") == 5_531_758
end
test "with invalid hex returns 0" do
assert Util.safe_hex_to_number("0x") == 0
assert Util.safe_hex_to_number("hello") == 0
assert Util.safe_hex_to_number("0xhello") == 0
assert Util.safe_hex_to_number("0d2d23d23") == 0
end
end
describe "number_to_hex/1" do
test "converts to hex" do
assert Util.number_to_hex(4_735_742) == "0x4842FE"
end
test "with string number" do
assert Util.number_to_hex("4735742") == "0x4842FE"
end
end
end
| 35.157895 | 86 | 0.660679 |
1c279e3cb4c47e5cadc90be34aebac5437f360b2 | 1,532 | exs | Elixir | apps/game_of_life_web/mix.exs | tslim/elixir-phoenix-life | c86fd69198c9193bdcd5b3b1b504f41f67268139 | [
"MIT"
] | 1 | 2016-09-28T07:38:55.000Z | 2016-09-28T07:38:55.000Z | apps/game_of_life_web/mix.exs | tslim/elixir-phoenix-life | c86fd69198c9193bdcd5b3b1b504f41f67268139 | [
"MIT"
] | null | null | null | apps/game_of_life_web/mix.exs | tslim/elixir-phoenix-life | c86fd69198c9193bdcd5b3b1b504f41f67268139 | [
"MIT"
] | null | null | null | defmodule GameOfLife.Web.Mixfile do
use Mix.Project
def project do
[app: :game_of_life_web,
version: "0.0.1",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.3",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: ["coveralls": :test, "coveralls.detail": :test, "coveralls.post": :test, "coveralls.html": :test]
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {GameOfLife.Web, []},
applications: [:phoenix, :phoenix_pubsub, :phoenix_html, :cowboy, :logger, :gettext]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[{:phoenix, "~> 1.2.1"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_html, "~> 2.6"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"},
{:game_of_life, in_umbrella: true},
{:excoveralls, "~> 0.5", only: :test},
{:credo, "~> 0.4", only: [:dev, :test]}]
end
end
| 31.265306 | 121 | 0.597911 |
1c27a27fafbe82f0c909c2cd0cb9ee204c3d2aaa | 1,925 | ex | Elixir | lib/pixie/adapters/cowboy/websocket_handler.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | lib/pixie/adapters/cowboy/websocket_handler.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | lib/pixie/adapters/cowboy/websocket_handler.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | defmodule Pixie.Adapter.Cowboy.WebsocketHandler do
require Logger
@behaviour :cowboy_websocket_handler
@max_messages_per_frame 64
def init _transport, req, opts do
init req, opts
end
def init req, opts do
{:upgrade, :protocol, :cowboy_websocket, req, opts}
end
def upgrade req, env, _handler, handler_opts do
:cowboy_websocket.upgrade req, env, __MODULE__, handler_opts
end
def websocket_init _transport, req, state do
{:ok, req, state}
end
def websocket_handle {:text, data}, req, state do
data = Poison.decode! data
case Pixie.Protocol.handle data do
:ok ->
{:ok, req, state}
[] ->
{:ok, req, state}
responses when is_list(responses) ->
send self, {:deliver, responses}
{:ok, req, state}
unknown ->
Logger.debug "Unknown response from Protocol: #{inspect unknown}"
Logger.debug "closing socket."
{:shutdown, req, state}
end
end
def websocket_handle frame, req, state do
:cowboy_websocket.handle frame, req, state
end
def websocket_info {:deliver, []}, req, state do
{:ok, req, state}
end
def websocket_info {:deliver, messages}, req, state do
{to_deliver, rest} = Enum.split(messages, @max_messages_per_frame)
json = to_deliver
|> monitor_deliveries
|> Pixie.JsonEncoderCache.encode!
deliver_messages rest
{:reply, {:text, json}, req, state}
end
def websocket_info :close, req, state do
{:shutdown, req, state}
end
def websocket_info _msg, req, state do
{:ok, req, state}
end
def websocket_terminate _msg, _req, _state do
:ok
end
defp monitor_deliveries to_deliver do
Enum.each to_deliver, fn(m)->
Pixie.Monitor.delivered_message m
end
to_deliver
end
defp deliver_messages [] do
nil
end
defp deliver_messages to_deliver do
send self, {:deliver, to_deliver}
end
end
| 22.126437 | 73 | 0.663377 |
1c27a3a4c5ae9484f601c62d06106df36ebb66e4 | 2,061 | exs | Elixir | test/acceptance/html/inline_ial_test.exs | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | test/acceptance/html/inline_ial_test.exs | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | test/acceptance/html/inline_ial_test.exs | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | 1 | 2020-03-31T19:53:15.000Z | 2020-03-31T19:53:15.000Z | defmodule Acceptance.Html.InlineIalTest do
use ExUnit.Case, async: true
import Support.Helpers, only: [as_html: 1]
describe "IAL no errors" do
test "link with simple ial" do
markdown = "[link](url){: .classy}"
html = "<p><a href=\"url\" class=\"classy\">link</a></p>\n"
messages = []
assert as_html(markdown) == {:ok, html, messages}
end
test "code with simple ial" do
markdown = "`some code`{: .classy}"
html = "<p><code class=\"inline classy\">some code</code></p>\n"
messages = []
assert as_html(markdown) == {:ok, html, messages}
end
test "img with simple ial" do
markdown = "{:#thatsme}"
html = "<p><img src=\"url\" alt=\"link\" id=\"thatsme\" /></p>\n"
messages = []
assert as_html(markdown) == {:ok, html, messages}
end
# A side effect
test "html and complex ial" do
markdown = "<span xi=\"ypsilon\">{:alpha=beta .greek }τι κανις</span>"
html = "<p><span xi=\"ypsilon\" alpha=\"beta\" class=\"greek\">τι κανις</span></p>\n"
messages = []
assert as_html(markdown) == {:ok, html, messages}
end
test "not attached" do
markdown = "[link](url) {:lang=fr}"
html = "<p><a href=\"url\" lang=\"fr\">link</a></p>\n"
messages = []
assert as_html(markdown) == {:ok, html, messages}
end
end
describe "Error Handling" do
test "illegal format line one" do
markdown = "[link](url){:incorrect}"
html = "<p><a href=\"url\">link</a></p>\n"
messages = [{:warning, 1, "Illegal attributes [\"incorrect\"] ignored in IAL"}]
assert as_html(markdown) == {:error, html, messages}
end
test "illegal format line two" do
markdown = "a line\n[link](url) {:incorrect x=y}"
html = "<p>a line\n<a href=\"url\" x=\"y\">link</a></p>\n"
messages = [{:warning, 2, "Illegal attributes [\"incorrect\"] ignored in IAL"}]
assert as_html(markdown) == {:error, html, messages}
end
end
end
# SPDX-License-Identifier: Apache-2.0
| 29.869565 | 91 | 0.572538 |
1c27a77591819570f159ea891d6ba75355c80eac | 1,818 | ex | Elixir | phoenix/web/controllers/post_controller.ex | pfac/generated | 3611d887bb4eba1ef63b1f16ed1dba54ce94f341 | [
"MIT"
] | null | null | null | phoenix/web/controllers/post_controller.ex | pfac/generated | 3611d887bb4eba1ef63b1f16ed1dba54ce94f341 | [
"MIT"
] | null | null | null | phoenix/web/controllers/post_controller.ex | pfac/generated | 3611d887bb4eba1ef63b1f16ed1dba54ce94f341 | [
"MIT"
] | null | null | null | defmodule Phoenix.PostController do
use Phoenix.Web, :controller
alias Phoenix.Post
plug :scrub_params, "post" when action in [:create, :update]
def index(conn, _params) do
post = Repo.all(Post)
render(conn, "index.html", post: post)
end
def new(conn, _params) do
changeset = Post.changeset(%Post{})
render(conn, "new.html", changeset: changeset)
end
def create(conn, %{"post" => post_params}) do
changeset = Post.changeset(%Post{}, post_params)
case Repo.insert(changeset) do
{:ok, _post} ->
conn
|> put_flash(:info, "Post created successfully.")
|> redirect(to: post_path(conn, :index))
{:error, changeset} ->
render(conn, "new.html", changeset: changeset)
end
end
def show(conn, %{"id" => id}) do
post = Repo.get!(Post, id)
render(conn, "show.html", post: post)
end
def edit(conn, %{"id" => id}) do
post = Repo.get!(Post, id)
changeset = Post.changeset(post)
render(conn, "edit.html", post: post, changeset: changeset)
end
def update(conn, %{"id" => id, "post" => post_params}) do
post = Repo.get!(Post, id)
changeset = Post.changeset(post, post_params)
case Repo.update(changeset) do
{:ok, post} ->
conn
|> put_flash(:info, "Post updated successfully.")
|> redirect(to: post_path(conn, :show, post))
{:error, changeset} ->
render(conn, "edit.html", post: post, changeset: changeset)
end
end
def delete(conn, %{"id" => id}) do
post = Repo.get!(Post, id)
# Here we use delete! (with a bang) because we expect
# it to always work (and if it does not, it will raise).
Repo.delete!(post)
conn
|> put_flash(:info, "Post deleted successfully.")
|> redirect(to: post_path(conn, :index))
end
end
| 26.735294 | 67 | 0.611661 |
1c27ad2899041c3e80bb8f4232c0b1664b0a9109 | 682 | ex | Elixir | apps/ewallet/lib/ewallet/scheduler.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet/lib/ewallet/scheduler.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet/lib/ewallet/scheduler.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.Scheduler do
@moduledoc false
use Quantum.Scheduler, otp_app: :ewallet
end
| 35.894737 | 74 | 0.766862 |
1c27afdb9d83248ca9eb035af2fe0794547bc7fd | 4,228 | ex | Elixir | lib/chat_api_web/controllers/slack_controller.ex | rlanga/papercups | 358ca46c344908585cd0214a0de96e5676120c68 | [
"MIT"
] | 2 | 2020-09-21T07:27:13.000Z | 2021-12-20T13:23:56.000Z | lib/chat_api_web/controllers/slack_controller.ex | rlanga/papercups | 358ca46c344908585cd0214a0de96e5676120c68 | [
"MIT"
] | null | null | null | lib/chat_api_web/controllers/slack_controller.ex | rlanga/papercups | 358ca46c344908585cd0214a0de96e5676120c68 | [
"MIT"
] | 1 | 2021-08-29T14:10:18.000Z | 2021-08-29T14:10:18.000Z | defmodule ChatApiWeb.SlackController do
use ChatApiWeb, :controller
require Logger
alias ChatApi.{Messages, Slack, SlackAuthorizations, SlackConversationThreads}
action_fallback ChatApiWeb.FallbackController
def oauth(conn, %{"code" => code}) do
Logger.info("Code from Slack OAuth: #{inspect(code)}")
{:ok, response} = Slack.get_access_token(code)
Logger.info("Slack OAuth response: #{inspect(response)}")
%{body: body} = response
if Map.get(body, "ok") do
with %{account_id: account_id} <- conn.assigns.current_user,
%{
"access_token" => access_token,
"app_id" => app_id,
"bot_user_id" => bot_user_id,
"scope" => scope,
"token_type" => token_type,
"authed_user" => authed_user,
"team" => team,
"incoming_webhook" => incoming_webhook
} <- body,
%{"id" => authed_user_id} <- authed_user,
%{"id" => team_id, "name" => team_name} <- team,
%{
"channel" => channel,
"channel_id" => channel_id,
"configuration_url" => configuration_url,
"url" => webhook_url
} <- incoming_webhook do
params = %{
account_id: account_id,
access_token: access_token,
app_id: app_id,
authed_user_id: authed_user_id,
bot_user_id: bot_user_id,
scope: scope,
token_type: token_type,
channel: channel,
channel_id: channel_id,
configuration_url: configuration_url,
team_id: team_id,
team_name: team_name,
webhook_url: webhook_url
}
SlackAuthorizations.create_or_update(account_id, params)
json(conn, %{data: %{ok: true}})
else
_ ->
raise "Unrecognized OAuth response"
end
else
raise "OAuth access denied"
end
end
def authorization(conn, _payload) do
with %{account_id: account_id} <- conn.assigns.current_user do
auth = SlackAuthorizations.get_authorization_by_account(account_id)
case auth do
nil ->
json(conn, %{data: nil})
_ ->
json(conn, %{
data: %{
created_at: auth.inserted_at,
channel: auth.channel,
configuration_url: auth.configuration_url,
team_name: auth.team_name
}
})
end
end
end
def webhook(conn, payload) do
# TODO: switch to `debug`
Logger.info("Payload from Slack webhook: #{inspect(payload)}")
case payload do
%{"event" => event} ->
handle_event(event)
send_resp(conn, 200, "")
%{"challenge" => challenge} ->
send_resp(conn, 200, challenge)
_ ->
send_resp(conn, 200, "")
end
end
defp handle_event(%{"bot_id" => _bot_id} = _event) do
# Don't do anything on bot events for now
nil
end
defp handle_event(
%{"type" => "message", "text" => text, "thread_ts" => thread_ts, "channel" => channel} =
event
) do
# TODO: switch to `debug`
Logger.info("Handling Slack event: #{inspect(event)}")
with {:ok, conversation} <- get_thread_conversation(thread_ts, channel) do
%{id: conversation_id, account_id: account_id, assignee_id: assignee_id} = conversation
params = %{
"body" => text,
"conversation_id" => conversation_id,
"account_id" => account_id,
# TODO: map Slack users to internal users eventually?
# (Currently we just assume the assignee is always the one responding)
"user_id" => assignee_id
}
{:ok, message} = Messages.create_message(params)
result = ChatApiWeb.MessageView.render("message.json", message: message)
ChatApiWeb.Endpoint.broadcast!("conversation:" <> conversation.id, "shout", result)
end
end
defp handle_event(_), do: nil
defp get_thread_conversation(thread_ts, channel) do
case SlackConversationThreads.get_by_slack_thread_ts(thread_ts, channel) do
%{conversation: conversation} -> {:ok, conversation}
_ -> {:error, "Not found"}
end
end
end
| 29.158621 | 97 | 0.588694 |
1c27b82a501166a5831fb7f1d24018b9e60f142d | 1,061 | ex | Elixir | lib/lib_lat_lon/data/bounds.ex | devoutsalsa/lib_lat_lon | a9fef01e67ad92f1b5ff042173f6986ad6f5fd48 | [
"MIT"
] | 11 | 2018-01-11T13:13:21.000Z | 2021-04-18T09:48:00.000Z | lib/lib_lat_lon/data/bounds.ex | devoutsalsa/lib_lat_lon | a9fef01e67ad92f1b5ff042173f6986ad6f5fd48 | [
"MIT"
] | 1 | 2021-04-18T02:54:11.000Z | 2021-04-18T02:54:11.000Z | lib/lib_lat_lon/data/bounds.ex | devoutsalsa/lib_lat_lon | a9fef01e67ad92f1b5ff042173f6986ad6f5fd48 | [
"MIT"
] | 2 | 2019-11-05T12:14:36.000Z | 2021-04-17T17:03:07.000Z | defmodule LibLatLon.Bounds do
@moduledoc """
Convenient storage for geo bounds.
"""
@typedoc """
The `Bouds` struct has two fields (`from` and `to`).
* `from` denoting the _northeast_ boundary, and
* `to` denoting the _southwest_ boundary.
# Example
_GoogleMaps API_ returns the same structure from geocoding requests:
%{...
"geometry" => %{
"bounds" => %{
"northeast" => %{"lat" => 41.3884209, "lng" => 2.1982486},
"southwest" => %{"lat" => 41.3874997, "lng" => 2.1970767}
}
...
"""
@type t :: %__MODULE__{from: LibLatLon.Coords.t(), to: LibLatLon.Coords.t()}
@fields ~w|from to|a
defstruct @fields
@doc false
def from_lat_lon([lat1, lat2, lon1, lon2]) do
[lat1, lat2, lon1, lon2] = Enum.map([lat1, lat2, lon1, lon2], &LibLatLon.Utils.strict_float/1)
with {:ok, from} <- LibLatLon.Coords.coordinate({lat1, lon1}),
{:ok, to} <- LibLatLon.Coords.coordinate({lat2, lon2}) do
{:ok, %__MODULE__{from: from, to: to}}
end
end
end
| 25.878049 | 98 | 0.587182 |
1c283ba799011583defb4673e04c2ac41c9e1a57 | 1,265 | ex | Elixir | lib/mix/tasks/hex_publish.ex | minhajuddin/http_parser | c5aba3cde237bc66d9d88e1248ad052c8a488a67 | [
"MIT"
] | null | null | null | lib/mix/tasks/hex_publish.ex | minhajuddin/http_parser | c5aba3cde237bc66d9d88e1248ad052c8a488a67 | [
"MIT"
] | null | null | null | lib/mix/tasks/hex_publish.ex | minhajuddin/http_parser | c5aba3cde237bc66d9d88e1248ad052c8a488a67 | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.HexPublish do
use Mix.Task
@shortdoc "Publish package to hex.pm, create a git tag and push it to GitHub"
@moduledoc """
hex_release uses shipit.
It performs many sanity checks before pushing the hex package.
Check out https://github.com/wojtekmach/shipit for more details
"""
def run([]) do
ensure_shipit_installed!()
Mix.Task.run("shipit", ["master", current_version()])
end
def run(_) do
Mix.raise("""
Invalid args.
Usage:
mix hex_release
""")
end
defp ensure_shipit_installed! do
loadpaths!()
Mix.Task.load_all()
if !Mix.Task.get("shipit") do
Mix.raise("""
You don't seem to have the shipit mix task installed on your computer.
Install it using:
mix archive.install hex shipit
Fore more info go to: https://github.com/wojtekmach/shipit
""")
end
end
defp current_version do
Mix.Project.config[:version]
end
# Copied from Mix.Tasks.Help
# Loadpaths without checks because tasks may be defined in deps.
defp loadpaths! do
Mix.Task.run "loadpaths", ["--no-elixir-version-check", "--no-deps-check", "--no-archives-check"]
Mix.Task.reenable "loadpaths"
Mix.Task.reenable "deps.loadpaths"
end
end
| 23 | 101 | 0.664822 |
1c28439c33b4c119faef12fad6f02e2d823f9a6f | 5,839 | exs | Elixir | .credo.exs | JohnKacz/phoenix_datatables | 747b184972614d87aeb0cd644593951703d718db | [
"MIT"
] | 12 | 2017-10-01T10:37:11.000Z | 2019-08-09T07:02:02.000Z | .credo.exs | JohnKacz/phoenix_datatables | 747b184972614d87aeb0cd644593951703d718db | [
"MIT"
] | 8 | 2017-11-07T19:53:02.000Z | 2021-01-18T22:15:35.000Z | .credo.exs | JohnKacz/phoenix_datatables | 747b184972614d87aeb0cd644593951703d718db | [
"MIT"
] | 7 | 2018-07-03T08:18:17.000Z | 2020-05-28T03:06:02.000Z | # This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any exec using `mix credo -C <name>`. If no exec name is given
# "default" is used.
#
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
#
included: ["lib/", "src/", "web/", "apps/"],
excluded: [~r"/_build/", ~r"/deps/"]
},
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
#
requires: [],
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
#
strict: true,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
#
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
{Credo.Check.Consistency.ExceptionNames},
{Credo.Check.Consistency.LineEndings},
{Credo.Check.Consistency.ParameterPatternMatching},
{Credo.Check.Consistency.SpaceAroundOperators},
{Credo.Check.Consistency.SpaceInParentheses},
{Credo.Check.Consistency.TabsOrSpaces},
# For some checks, like AliasUsage, you can only customize the priority
# Priority values are: `low, normal, high, higher`
#
{Credo.Check.Design.AliasUsage, priority: :low},
# For others you can set parameters
# If you don't want the `setup` and `test` macro calls in ExUnit tests
# or the `schema` macro in Ecto schemas to trigger DuplicatedCode, just
# set the `excluded_macros` parameter to `[:schema, :setup, :test]`.
#
{Credo.Check.Design.DuplicatedCode, excluded_macros: []},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
#
{Credo.Check.Design.TagTODO, exit_status: 2},
{Credo.Check.Design.TagFIXME},
{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.LargeNumbers},
{Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 80},
{Credo.Check.Readability.ModuleAttributeNames},
{Credo.Check.Readability.ModuleDoc},
{Credo.Check.Readability.ModuleNames},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs},
{Credo.Check.Readability.ParenthesesInCondition},
{Credo.Check.Readability.PredicateFunctionNames},
{Credo.Check.Readability.PreferImplicitTry},
{Credo.Check.Readability.RedundantBlankLines},
{Credo.Check.Readability.StringSigils},
{Credo.Check.Readability.TrailingBlankLine},
{Credo.Check.Readability.TrailingWhiteSpace},
{Credo.Check.Readability.VariableNames},
{Credo.Check.Readability.Semicolons},
{Credo.Check.Readability.SpaceAfterCommas},
{Credo.Check.Refactor.DoubleBooleanNegation},
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.CyclomaticComplexity},
{Credo.Check.Refactor.FunctionArity},
{Credo.Check.Refactor.LongQuoteBlocks},
{Credo.Check.Refactor.MatchInCondition},
{Credo.Check.Refactor.NegatedConditionsInUnless},
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting},
{Credo.Check.Refactor.PipeChainStart},
{Credo.Check.Refactor.UnlessWithElse},
{Credo.Check.Warning.BoolOperationOnSameValues},
{Credo.Check.Warning.IExPry},
{Credo.Check.Warning.IoInspect},
{Credo.Check.Warning.LazyLogging},
{Credo.Check.Warning.OperationOnSameValues},
{Credo.Check.Warning.OperationWithConstantResult},
{Credo.Check.Warning.UnusedEnumOperation},
{Credo.Check.Warning.UnusedFileOperation},
{Credo.Check.Warning.UnusedKeywordOperation},
{Credo.Check.Warning.UnusedListOperation},
{Credo.Check.Warning.UnusedPathOperation},
{Credo.Check.Warning.UnusedRegexOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},
{Credo.Check.Warning.RaiseInsideRescue},
# Controversial and experimental checks (opt-in, just remove `, false`)
#
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.MapGetUnsafePass, false},
{Credo.Check.Consistency.MultiAliasImportRequireUse, false},
# Deprecated checks (these will be deleted after a grace period)
#
{Credo.Check.Readability.Specs, false},
{Credo.Check.Warning.NameRedeclarationByAssignment, false},
{Credo.Check.Warning.NameRedeclarationByCase, false},
{Credo.Check.Warning.NameRedeclarationByDef, false},
{Credo.Check.Warning.NameRedeclarationByFn, false},
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}
| 39.993151 | 80 | 0.653537 |
1c285d4d269fe31dda4e7567eed3e531a069fbda | 3,187 | ex | Elixir | apps/exsemantica_phx/lib/exsemantica_phx/posting.ex | Chlorophytus/exsemantica | f1c64cb8ae0543e5a2f015a65071d81d57fa3224 | [
"Apache-2.0"
] | 1 | 2021-09-11T15:46:04.000Z | 2021-09-11T15:46:04.000Z | apps/exsemantica_phx/lib/exsemantica_phx/posting.ex | Chlorophytus/exsemantica | f1c64cb8ae0543e5a2f015a65071d81d57fa3224 | [
"Apache-2.0"
] | 4 | 2021-01-18T00:49:02.000Z | 2022-02-23T05:18:37.000Z | apps/exsemantica_phx/lib/exsemantica_phx/posting.ex | Chlorophytus/exsemantica | f1c64cb8ae0543e5a2f015a65071d81d57fa3224 | [
"Apache-2.0"
] | null | null | null | defmodule ExsemanticaPhx.Posting do
require LdGraph2.Agent
import Ecto.Query
def new_interest(user, title, content) do
valid = ExsemanticaPhx.Sanitize.valid_interest?(title)
cond do
valid ->
max_id = ExsemanticaPhx.Search.max_id() + 1
{:ok, _} =
ExsemanticaPhx.Repo.transaction(fn ->
response = ExsemanticaPhx.Repo.insert(%ExsemanticaPhx.Site.Post{
content: content,
title: title,
is_interest: true,
node_corresponding: max_id,
poster: user
})
:ok = LdGraph2.Agent.update(ExsemanticaPhx.GraphStore, [
{:add, {:node, max_id}},
{:add, {:edge, user, max_id}}
])
response
end)
{:ok, max_id}
true ->
nil
end
end
def update_bio(user_nodeid, biography) do
ExsemanticaPhx.Repo.one(
from(u in ExsemanticaPhx.Site.User, where: u.node_corresponding == ^user_nodeid)
)
|> Ecto.Changeset.change(biography: biography)
|> ExsemanticaPhx.Repo.update()
end
def update_interest(post_nodeid, interest_text) do
ExsemanticaPhx.Repo.one(
from(p in ExsemanticaPhx.Site.Post,
where: p.node_corresponding == ^post_nodeid and p.is_interest
)
)
|> Ecto.Changeset.change(content: interest_text)
|> ExsemanticaPhx.Repo.update()
end
@doc """
Add a user without an e-mail and password. They are only for testing.
Not to be used in production.
"""
def new_test_user(raw_username) do
username_valid = ExsemanticaPhx.Sanitize.valid_username?(raw_username)
if username_valid do
max_id = ExsemanticaPhx.Search.max_id() + 1
{:ok, _} =
ExsemanticaPhx.Repo.transaction(fn ->
response = ExsemanticaPhx.Repo.insert(%ExsemanticaPhx.Site.User{
username: String.downcase(raw_username),
biography: "",
node_corresponding: max_id
})
:ok = LdGraph2.Agent.update(ExsemanticaPhx.GraphStore, [
{:add, {:node, max_id}}
])
response
end)
{:ok, max_id}
end
end
@doc """
Add a user with an e-mail and password.
This is production-called code, and may send network activity. Not to be used
in testing/dev.
"""
def new_user(raw_username, raw_email, hash) do
email_valid = ExsemanticaPhx.Sanitize.valid_email?(raw_email)
username_valid = ExsemanticaPhx.Sanitize.valid_username?(raw_username)
cond do
email_valid and username_valid ->
max_id = ExsemanticaPhx.Search.max_id() + 1
{:ok, _} =
ExsemanticaPhx.Repo.transaction(fn ->
response = ExsemanticaPhx.Repo.insert(%ExsemanticaPhx.Site.User{
username: String.downcase(raw_username),
email: raw_email,
password: hash,
node_corresponding: max_id
})
:ok = LdGraph2.Agent.update(ExsemanticaPhx.GraphStore, [
{:add, {:node, max_id}}
])
response
end)
{:ok, max_id}
true ->
nil
end
end
end
| 26.122951 | 86 | 0.597741 |
1c285e531fbed98859e3802af638b8d0754f670b | 1,901 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/get_spreadsheet_by_data_filter_request.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/get_spreadsheet_by_data_filter_request.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/get_spreadsheet_by_data_filter_request.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Sheets.V4.Model.GetSpreadsheetByDataFilterRequest do
@moduledoc """
The request for retrieving a Spreadsheet.
## Attributes
- dataFilters ([DataFilter]): The DataFilters used to select which ranges to retrieve from the spreadsheet. Defaults to: `null`.
- includeGridData (boolean()): True if grid data should be returned. This parameter is ignored if a field mask was set in the request. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dataFilters => list(GoogleApi.Sheets.V4.Model.DataFilter.t()),
:includeGridData => any()
}
field(:dataFilters, as: GoogleApi.Sheets.V4.Model.DataFilter, type: :list)
field(:includeGridData)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.GetSpreadsheetByDataFilterRequest do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.GetSpreadsheetByDataFilterRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.GetSpreadsheetByDataFilterRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.27451 | 157 | 0.75434 |
1c286c2fd9873ddd104673bc8b112f7fe5df1140 | 2,179 | exs | Elixir | test/absinthe/phase/document/validation/unique_input_field_names_test.exs | dmarkow/absinthe | 707690bf06189b3a4b13959908a2eb52a3951179 | [
"MIT"
] | null | null | null | test/absinthe/phase/document/validation/unique_input_field_names_test.exs | dmarkow/absinthe | 707690bf06189b3a4b13959908a2eb52a3951179 | [
"MIT"
] | null | null | null | test/absinthe/phase/document/validation/unique_input_field_names_test.exs | dmarkow/absinthe | 707690bf06189b3a4b13959908a2eb52a3951179 | [
"MIT"
] | null | null | null | defmodule Absinthe.Phase.Document.Validation.UniqueInputFieldNamesTest do
@phase Absinthe.Phase.Document.Validation.UniqueInputFieldNames
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.{Blueprint}
defp duplicate(name, line, values) do
List.wrap(values)
|> Enum.map(fn value ->
bad_value(
Blueprint.Input.Field,
@phase.error_message,
line,
literal_value_check(name, value)
)
end)
end
defp literal_value_check(name, value) do
fn
%{name: ^name, input_value: %{literal: %{value: ^value}}} ->
true
_ ->
false
end
end
describe "Validate: Unique input field names" do
test "input object with fields" do
assert_passes_validation(
"""
{
field(arg: { f: true })
}
""",
[]
)
end
test "same input object within two args" do
assert_passes_validation(
"""
{
field(arg1: { f: true }, arg2: { f: true })
}
""",
[]
)
end
test "multiple input object fields" do
assert_passes_validation(
"""
{
field(arg: { f1: "value", f2: "value", f3: "value" })
}
""",
[]
)
end
test "allows for nested input objects with similar fields" do
assert_passes_validation(
"""
{
field(arg: {
deep: {
deep: {
id: 1
}
id: 1
}
id: 1
})
}
""",
[]
)
end
test "duplicate input object fields" do
assert_fails_validation(
"""
{
field(arg: { f1: "value1", f1: "value2" })
}
""",
[],
duplicate("f1", 2, ~w(value1 value2))
)
end
test "many duplicate input object fields" do
assert_fails_validation(
"""
{
field(arg: { f1: "value1", f1: "value2", f1: "value3" })
}
""",
[],
duplicate("f1", 2, ~w(value1 value2 value3))
)
end
end
end
| 19.809091 | 73 | 0.477742 |
1c286c38c4436ee0ece9a49b56013bced89829a8 | 623 | ex | Elixir | generated-sources/elixir/mojang-status/lib/com/github/asyncmc/mojang/status/elixir/server/model/api_status.ex | AsyncMC/Mojang-API-Libs | b01bbd2bce44bfa2b9ed705a128cf4ecda077916 | [
"Apache-2.0"
] | null | null | null | generated-sources/elixir/mojang-status/lib/com/github/asyncmc/mojang/status/elixir/server/model/api_status.ex | AsyncMC/Mojang-API-Libs | b01bbd2bce44bfa2b9ed705a128cf4ecda077916 | [
"Apache-2.0"
] | null | null | null | generated-sources/elixir/mojang-status/lib/com/github/asyncmc/mojang/status/elixir/server/model/api_status.ex | AsyncMC/Mojang-API-Libs | b01bbd2bce44bfa2b9ed705a128cf4ecda077916 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule com.github.asyncmc.mojang.status.elixir.server.Model.ApiStatus do
@moduledoc """
The status of a Mojang services. Possible values are green (no issues), yellow (some issues), red (service unavailable).
"""
@derive [Poison.Encoder]
defstruct [
]
@type t :: %__MODULE__{
}
end
defimpl Poison.Decoder, for: com.github.asyncmc.mojang.status.elixir.server.Model.ApiStatus do
def decode(value, _options) do
value
end
end
| 23.961538 | 122 | 0.719101 |
1c288846c36f70362ddb4fff7da65fe4a6f14af1 | 1,515 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/channel_card.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/channel_card.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/channel_card.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.YouTube.V3.Model.ChannelCard do
@moduledoc """
## Attributes
* `channelId` (*type:* `String.t`, *default:* `nil`) - External channel ID. Required.
* `customMessage` (*type:* `String.t`, *default:* `nil`) - Each card can have a custom message. Required.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:channelId => String.t(),
:customMessage => String.t()
}
field(:channelId)
field(:customMessage)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.ChannelCard do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.ChannelCard.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.ChannelCard do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.3 | 109 | 0.718152 |
1c28ab48199c1371da136861ccffd8f855dfaefe | 3,587 | ex | Elixir | lib/shiritorishi/kana_dict/table.ex | Foo-x/Shiritorishi | ee9b58f74c51941f958c986578c95c26a9920816 | [
"Apache-2.0"
] | null | null | null | lib/shiritorishi/kana_dict/table.ex | Foo-x/Shiritorishi | ee9b58f74c51941f958c986578c95c26a9920816 | [
"Apache-2.0"
] | null | null | null | lib/shiritorishi/kana_dict/table.ex | Foo-x/Shiritorishi | ee9b58f74c51941f958c986578c95c26a9920816 | [
"Apache-2.0"
] | null | null | null | defmodule Shiritorishi.KanaDict.Table do
def hira2kata do
%{
"あ" => "ア",
"い" => "イ",
"う" => "ウ",
"え" => "エ",
"お" => "オ",
"か" => "カ",
"き" => "キ",
"く" => "ク",
"け" => "ケ",
"こ" => "コ",
"さ" => "サ",
"し" => "シ",
"す" => "ス",
"せ" => "セ",
"そ" => "ソ",
"た" => "タ",
"ち" => "チ",
"つ" => "ツ",
"て" => "テ",
"と" => "ト",
"な" => "ナ",
"に" => "ニ",
"ぬ" => "ヌ",
"ね" => "ネ",
"の" => "ノ",
"は" => "ハ",
"ひ" => "ヒ",
"ふ" => "フ",
"へ" => "ヘ",
"ほ" => "ホ",
"ま" => "マ",
"み" => "ミ",
"む" => "ム",
"め" => "メ",
"も" => "モ",
"や" => "ヤ",
"ゐ" => "ヰ",
"ゆ" => "ユ",
"ゑ" => "ヱ",
"よ" => "ヨ",
"ら" => "ラ",
"り" => "リ",
"る" => "ル",
"れ" => "レ",
"ろ" => "ロ",
"わ" => "ワ",
"を" => "ヲ",
"ん" => "ン",
"が" => "ガ",
"ぎ" => "ギ",
"ぐ" => "グ",
"げ" => "ゲ",
"ご" => "ゴ",
"ざ" => "ザ",
"じ" => "ジ",
"ず" => "ズ",
"ぜ" => "ゼ",
"ぞ" => "ゾ",
"だ" => "ダ",
"ぢ" => "ヂ",
"づ" => "ヅ",
"で" => "デ",
"ど" => "ド",
"ば" => "バ",
"び" => "ビ",
"ぶ" => "ブ",
"べ" => "ベ",
"ぼ" => "ボ",
"ぱ" => "パ",
"ぴ" => "ピ",
"ぷ" => "プ",
"ぺ" => "ペ",
"ぽ" => "ポ",
"ぁ" => "ァ",
"ぃ" => "ィ",
"ぅ" => "ゥ",
"ぇ" => "ェ",
"ぉ" => "ォ",
"ヵ" => "ヵ",
"っ" => "ッ",
"ゃ" => "ャ",
"ゅ" => "ュ",
"ょ" => "ョ",
"ー" => "ー"
}
end
def kata2hira do
%{
"ア" => "あ",
"イ" => "い",
"ウ" => "う",
"エ" => "え",
"オ" => "お",
"カ" => "か",
"キ" => "き",
"ク" => "く",
"ケ" => "け",
"コ" => "こ",
"サ" => "さ",
"シ" => "し",
"ス" => "す",
"セ" => "せ",
"ソ" => "そ",
"タ" => "た",
"チ" => "ち",
"ツ" => "つ",
"テ" => "て",
"ト" => "と",
"ナ" => "な",
"ニ" => "に",
"ヌ" => "ぬ",
"ネ" => "ね",
"ノ" => "の",
"ハ" => "は",
"ヒ" => "ひ",
"フ" => "ふ",
"ヘ" => "へ",
"ホ" => "ほ",
"マ" => "ま",
"ミ" => "み",
"ム" => "む",
"メ" => "め",
"モ" => "も",
"ヤ" => "や",
"ヰ" => "ゐ",
"ユ" => "ゆ",
"ヱ" => "ゑ",
"ヨ" => "よ",
"ラ" => "ら",
"リ" => "り",
"ル" => "る",
"レ" => "れ",
"ロ" => "ろ",
"ワ" => "わ",
"ヲ" => "を",
"ン" => "ん",
"ガ" => "が",
"ギ" => "ぎ",
"グ" => "ぐ",
"ゲ" => "げ",
"ゴ" => "ご",
"ザ" => "ざ",
"ジ" => "じ",
"ズ" => "ず",
"ゼ" => "ぜ",
"ゾ" => "ぞ",
"ダ" => "だ",
"ヂ" => "ぢ",
"ヅ" => "づ",
"デ" => "で",
"ド" => "ど",
"バ" => "ば",
"ビ" => "び",
"ブ" => "ぶ",
"ベ" => "べ",
"ボ" => "ぼ",
"パ" => "ぱ",
"ピ" => "ぴ",
"プ" => "ぷ",
"ペ" => "ぺ",
"ポ" => "ぽ",
"ァ" => "ぁ",
"ィ" => "ぃ",
"ゥ" => "ぅ",
"ェ" => "ぇ",
"ォ" => "ぉ",
"ヵ" => "ヵ",
"ッ" => "っ",
"ャ" => "ゃ",
"ュ" => "ゅ",
"ョ" => "ょ",
"ー" => "ー"
}
end
def to_upper do
%{
"ぁ" => "あ",
"ぃ" => "い",
"ぅ" => "う",
"ぇ" => "え",
"ぉ" => "お",
"ヵ" => "か",
"っ" => "つ",
"ゃ" => "や",
"ゅ" => "ゆ",
"ょ" => "よ",
"ァ" => "ア",
"ィ" => "イ",
"ゥ" => "ウ",
"ェ" => "エ",
"ォ" => "オ",
"ッ" => "ツ",
"ャ" => "ヤ",
"ュ" => "ユ",
"ョ" => "ヨ"
}
end
def ignore_set do
MapSet.new([
"ー"
])
end
end
| 17 | 40 | 0.136883 |
1c28b920fec3a6cb8d45ea1697979e0f197ba1ef | 891 | exs | Elixir | config/config.exs | casmacc/phoenix_helloworld | 6bcdbe7a5cf20f02bdb50b5aa98415d1fdb7f2a3 | [
"MIT"
] | null | null | null | config/config.exs | casmacc/phoenix_helloworld | 6bcdbe7a5cf20f02bdb50b5aa98415d1fdb7f2a3 | [
"MIT"
] | null | null | null | config/config.exs | casmacc/phoenix_helloworld | 6bcdbe7a5cf20f02bdb50b5aa98415d1fdb7f2a3 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# Configures the endpoint
config :phoenix_helloworld, PhoenixHelloworldWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "D6KvDxXytT7b63iTxoP1hkBwyy1bMBVHjVte03t8mD64iVWDcgY94iIanOCQm6HD",
render_errors: [view: PhoenixHelloworldWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: PhoenixHelloworld.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:user_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 37.125 | 86 | 0.773288 |
1c28d51356da53b62c76b409de06b85376155c33 | 3,518 | ex | Elixir | lib/makeup/formatters/html/html_formatter.ex | davydog187/makeup | f57fb043865a1184dbd86004797d9d1c6eb16412 | [
"BSD-2-Clause"
] | 31 | 2020-10-02T09:01:30.000Z | 2022-03-18T14:58:01.000Z | lib/makeup/formatters/html/html_formatter.ex | davydog187/makeup | f57fb043865a1184dbd86004797d9d1c6eb16412 | [
"BSD-2-Clause"
] | 21 | 2020-09-29T12:59:58.000Z | 2022-03-07T15:21:48.000Z | lib/makeup/formatters/html/html_formatter.ex | davydog187/makeup | f57fb043865a1184dbd86004797d9d1c6eb16412 | [
"BSD-2-Clause"
] | 5 | 2020-12-01T20:17:51.000Z | 2022-03-07T09:10:13.000Z | defmodule Makeup.Formatters.HTML.HTMLFormatter do
@moduledoc """
Turns a list of tokens into HTML fragments.
"""
@group_highlight_js "lib/makeup/formatters/html/scripts/group_highlighter_javascript.js" |> File.read!
defp render_token(escaped_value, css_class, meta, highlight_tag) do
group_id = meta[:group_id]
selectable = Map.get(meta, :selectable, [])
classes = [
css_class || [],
if selectable == false do " unselectable" else [] end
]
[
"<",
highlight_tag,
~S( class="),
classes,
~S("),
if group_id do [~S( data-group-id="), group_id, ~S(")] else [] end,
">",
escaped_value,
"</",
highlight_tag,
">",
]
end
@doc """
Format a single token into an iolist.
"""
def format_token({tag, meta, value}, highlight_tag) do
escaped_value = escape(value)
css_class = Makeup.Token.Utils.css_class_for_token_type(tag)
render_token(escaped_value, css_class, meta, highlight_tag)
end
defp escape_for(?&), do: "&"
defp escape_for(?<), do: "<"
defp escape_for(?>), do: ">"
defp escape_for(?"), do: """
defp escape_for(?'), do: "'"
defp escape_for(c) when is_integer(c) and c <= 127, do: c
defp escape_for(c) when is_integer(c) and c >= 128, do: << c :: utf8 >>
defp escape_for(string) when is_binary(string) do
string
|> to_charlist()
|> Enum.map(&escape_for/1)
end
defp escape(iodata) when is_list(iodata) do
iodata
|> :lists.flatten()
|> Enum.map(&escape_for/1)
end
defp escape(other) when is_binary(other) do
escape_for(other)
end
defp escape(c) when is_integer(c) do
#
[escape_for(c)]
end
defp escape(other) do
raise "Found `#{inspect(other)}` inside what should be an iolist"
end
@doc """
Turns a list of tokens into an iolist which represents an HTML fragment.
This fragment can be embedded directly into an HTML document.
"""
def format_inner_as_iolist(tokens, opts) do
highlight_tag = Keyword.get(opts, :highlight_tag, "span")
Enum.map(tokens, &format_token(&1, highlight_tag))
end
@doc """
Turns a list of tokens into an HTML fragment.
This fragment can be embedded directly into an HTML document.
"""
def format_inner_as_binary(tokens, opts) do
tokens
|> format_inner_as_iolist(opts)
|> IO.iodata_to_binary
end
@doc """
Turns a list of tokens into an iolist which represents an HTML fragment.
This fragment can be embedded directly into an HTML document.
"""
def format_as_iolist(tokens, opts \\ []) do
css_class = Keyword.get(opts, :css_class, "highlight")
inner = format_inner_as_iolist(tokens, opts)
[
~S(<pre class="),
css_class,
~S("><code>),
inner,
~S(</code></pre>)
]
end
@doc """
Turns a list of tokens into an HTML fragment.
This fragment can be embedded directly into an HTML document.
"""
def format_as_binary(tokens, opts \\ []) do
tokens
|> format_as_iolist(opts)
|> IO.iodata_to_binary
end
@doc """
Return the CSS stylesheet for a given style.
"""
def stylesheet(style, css_class \\ "highlight") do
Makeup.Styles.HTML.Style.stylesheet(style, css_class)
end
@doc """
Return a JavaScript snippet to highlight code on mouseover.
This is "raw" javascript, and for inclusion in an HTML file
it must be wrapped in a `<script>` tag.
"""
def group_highlighter_javascript() do
@group_highlight_js
end
end
| 24.774648 | 104 | 0.648664 |
1c28d6c7034fba4286858712d38309432e12d8cf | 481 | exs | Elixir | mix.exs | piasekhgw/distributed_mkp | 0cd2d4a3b007d9fdd7544da34c2785ab6f48ae7e | [
"MIT"
] | 9 | 2017-09-27T21:25:34.000Z | 2020-10-07T16:14:26.000Z | mix.exs | piasekhgw/distributed_mkp | 0cd2d4a3b007d9fdd7544da34c2785ab6f48ae7e | [
"MIT"
] | 1 | 2017-09-27T21:26:00.000Z | 2017-10-01T11:49:11.000Z | mix.exs | piasekhgw/distributed_mkp | 0cd2d4a3b007d9fdd7544da34c2785ab6f48ae7e | [
"MIT"
] | null | null | null | defmodule DistributedMkp.Mixfile do
use Mix.Project
def project do
[
apps_path: "apps",
start_permanent: Mix.env == :prod,
deps: deps()
]
end
# Dependencies listed here are available only for this
# project and cannot be accessed from applications inside
# the apps folder.
#
# Run "mix help deps" for examples and options.
defp deps do
[
{:ex_doc, "~> 0.16.3"},
{:distillery, "~> 1.5", runtime: false}
]
end
end
| 20.041667 | 59 | 0.617464 |
1c28dd31652da86c71a4e8fb90f4828b94b4dd45 | 3,462 | ex | Elixir | lib/kaffe/config/consumer.ex | harshavardhangelivi/kaffe | db08f159eccad6c682f5b55e001f0d5aa9bb2f3d | [
"MIT"
] | null | null | null | lib/kaffe/config/consumer.ex | harshavardhangelivi/kaffe | db08f159eccad6c682f5b55e001f0d5aa9bb2f3d | [
"MIT"
] | null | null | null | lib/kaffe/config/consumer.ex | harshavardhangelivi/kaffe | db08f159eccad6c682f5b55e001f0d5aa9bb2f3d | [
"MIT"
] | null | null | null | defmodule Kaffe.Config.Consumer do
import Kaffe.Config, only: [heroku_kafka_endpoints: 0, parse_endpoints: 1]
def configuration do
%{
endpoints: endpoints(),
subscriber_name: subscriber_name(),
consumer_group: consumer_group(),
topics: topics(),
group_config: consumer_group_config(),
consumer_config: client_consumer_config(),
message_handler: message_handler(),
partitions_listener: partitions_listener(),
async_message_ack: async_message_ack(),
rebalance_delay_ms: rebalance_delay_ms(),
max_bytes: max_bytes(),
min_bytes: min_bytes(),
max_wait_time: max_wait_time(),
subscriber_retries: subscriber_retries(),
subscriber_retry_delay_ms: subscriber_retry_delay_ms(),
offset_reset_policy: offset_reset_policy(),
worker_allocation_strategy: worker_allocation_strategy(),
client_down_retry_expire: client_down_retry_expire()
}
end
def consumer_group, do: config_get!(:consumer_group)
def subscriber_name, do: config_get(:subscriber_name, consumer_group()) |> String.to_atom()
def topics, do: config_get!(:topics)
def message_handler, do: config_get!(:message_handler)
def partitions_listener, do: config_get!(:partitions_listener)
def async_message_ack, do: config_get(:async_message_ack, false)
def endpoints do
if heroku_kafka?() do
heroku_kafka_endpoints()
else
parse_endpoints(config_get!(:endpoints))
end
end
def consumer_group_config do
[
offset_commit_policy: :commit_to_kafka_v2,
offset_commit_interval_seconds: config_get(:offset_commit_interval_seconds, 5)
]
end
def rebalance_delay_ms do
config_get(:rebalance_delay_ms, 10_000)
end
def max_bytes do
config_get(:max_bytes, 1_000_000)
end
def min_bytes do
config_get(:min_bytes, 0)
end
def max_wait_time do
config_get(:max_wait_time, 10_000)
end
def subscriber_retries do
config_get(:subscriber_retries, 5)
end
def subscriber_retry_delay_ms do
config_get(:subscriber_retry_delay_ms, 5_000)
end
def client_consumer_config do
default_client_consumer_config() ++ maybe_heroku_kafka_ssl() ++ sasl_options() ++ ssl_options()
end
def sasl_options do
:sasl
|> config_get(%{})
|> Kaffe.Config.sasl_config()
end
def ssl_options do
:ssl
|> config_get(false)
|> Kaffe.Config.ssl_config()
end
def default_client_consumer_config do
[
auto_start_producers: false,
allow_topic_auto_creation: false,
begin_offset: begin_offset()
]
end
def begin_offset do
case config_get(:start_with_earliest_message, false) do
true -> :earliest
false -> -1
end
end
def offset_reset_policy do
config_get(:offset_reset_policy, :reset_by_subscriber)
end
def worker_allocation_strategy do
config_get(:worker_allocation_strategy, :worker_per_partition)
end
def client_down_retry_expire do
config_get(:client_down_retry_expire, 30_000)
end
def maybe_heroku_kafka_ssl do
case heroku_kafka?() do
true -> Kaffe.Config.ssl_config()
false -> []
end
end
def heroku_kafka? do
config_get(:heroku_kafka_env, false)
end
def config_get!(key) do
Application.get_env(:kaffe, :consumer)
|> Keyword.fetch!(key)
end
def config_get(key, default) do
Application.get_env(:kaffe, :consumer)
|> Keyword.get(key, default)
end
end
| 24.380282 | 99 | 0.717504 |
1c28f59f2bb19cc49735f7319d04251e9838b0b4 | 4,727 | ex | Elixir | web/remotings/remoting_server.ex | fangkunping/micro-server-shell | e90a9992bb58d4dbc2661825e6e5bcdce5f40e7d | [
"Apache-2.0"
] | null | null | null | web/remotings/remoting_server.ex | fangkunping/micro-server-shell | e90a9992bb58d4dbc2661825e6e5bcdce5f40e7d | [
"Apache-2.0"
] | null | null | null | web/remotings/remoting_server.ex | fangkunping/micro-server-shell | e90a9992bb58d4dbc2661825e6e5bcdce5f40e7d | [
"Apache-2.0"
] | null | null | null | defmodule MicroServerShell.Remoting.Server do
alias MicroServerShell.Remoting
@doc """
获取服务器列表
"""
@spec get_servers(atom, integer) :: list
def get_servers(vip_type, access_party_id) do
Remoting.call(vip_type, MicroServer.Remoting.Server, :get_servers, [access_party_id])
end
@doc """
创建服务器
"""
@spec create_server(atom, integer) :: :ok | {:error, String.t()}
def create_server(vip_type, access_party_id) do
Remoting.call(vip_type, MicroServer.Remoting.Server, :create_server, [access_party_id])
end
@doc """
删除服务器
"""
@spec delete_server(atom, integer, integer) :: {:ok, any} | {:error, any}
def delete_server(vip_type, access_party_id, server_id) do
Remoting.call(vip_type, MicroServer.Remoting.Server, :delete_server, [
access_party_id,
server_id
])
end
@doc """
通过id获取Server内容
"""
@spec get_server(atom, integer, integer) :: map | nil
def get_server(vip_type, access_party_id, server_id) do
Remoting.call(vip_type, MicroServer.Remoting.Server, :get_server, [access_party_id, server_id])
end
@doc """
更新服务器
"""
@spec update_server(atom, integer, map) :: {:ok, any} | {:error, any}
def update_server(vip_type, access_party_id, server_params) do
Remoting.call(vip_type, MicroServer.Remoting.Server, :update_server, [
access_party_id,
server_params
])
end
@doc """
启动服务器
"""
@spec start_server(atom, integer, integer) :: any
def start_server(vip_type, access_party_id, server_id) do
Remoting.call(vip_type, MicroServer.Remoting.Server, :start_server, [
access_party_id,
server_id
])
end
@doc """
停止服务器
"""
@spec stop_server(atom, integer, integer) :: any
def stop_server(vip_type, access_party_id, server_id) do
Remoting.call(vip_type, MicroServer.Remoting.Server, :stop_server, [
access_party_id,
server_id
])
end
@doc """
热更新服务器
"""
@spec hot_update_server(atom, integer, integer) :: any
def hot_update_server(vip_type, access_party_id, server_id) do
Remoting.call(vip_type, MicroServer.Remoting.Server, :hot_update_server, [
access_party_id,
server_id
])
end
@doc """
服务器是否启动
"""
@spec server_exist?(atom, integer) :: boolean
def server_exist?(vip_type, server_id) do
Remoting.call(vip_type, MicroServer.Remoting.Server, :server_exist?, [
server_id
])
end
@doc """
向服务器的lua端发送 websocket信息
"""
@spec websocket_send_to_lua_websocket_message(atom, integer, term) :: :ok
def websocket_send_to_lua_websocket_message(vip_type, server_id, message) do
Remoting.call(
vip_type,
MicroServer.Remoting.Server,
:websocket_send_to_lua_websocket_message,
[
server_id,
self(),
message
]
)
end
@doc """
向服务器的lua端发送http信息
"""
@spec websocket_send_to_lua_http_message(atom, integer, term) :: :ok
def websocket_send_to_lua_http_message(vip_type, server_id, message) do
Remoting.call(vip_type, MicroServer.Remoting.Server, :websocket_send_to_lua_http_message, [
server_id,
self(),
message
])
end
@doc """
向服务器的lua端发送topic链接信息
"""
@spec websocket_send_to_lua_topic_join(atom, integer, term) :: :ok
def websocket_send_to_lua_topic_join(vip_type, server_id, message) do
Remoting.call(vip_type, MicroServer.Remoting.Server, :websocket_send_to_lua_topic_join, [
server_id,
self(),
message
])
end
@doc """
向服务器的lua端发送topic断开信息
"""
@spec websocket_send_to_lua_topic_leave(atom, integer) :: :ok
def websocket_send_to_lua_topic_leave(vip_type, server_id) do
Remoting.call(vip_type, MicroServer.Remoting.Server, :websocket_send_to_lua_topic_leave, [
server_id,
self()
])
end
@doc """
向服务器的lua端发送socket链接信息
"""
@spec websocket_send_to_lua_socket_connect(atom, integer, term) :: :ok
def websocket_send_to_lua_socket_connect(vip_type, server_id, message) do
Remoting.call(vip_type, MicroServer.Remoting.Server, :websocket_send_to_lua_socket_connect, [
server_id,
self(),
message
])
end
@doc """
向服务器的lua端发送socket断开信息
"""
@spec websocket_send_to_lua_socket_disconnect(atom, integer) :: :ok
def websocket_send_to_lua_socket_disconnect(vip_type, server_id) do
Remoting.call(
vip_type,
MicroServer.Remoting.Server,
:websocket_send_to_lua_socket_disconnect,
[
server_id,
self()
]
)
end
@doc """
获取服务器的log
"""
@spec get_log(atom, integer, integer) :: list
def get_log(vip_type, access_party_id, server_id) do
Remoting.call(vip_type, MicroServer.Remoting.Server, :get_log, [access_party_id, server_id])
end
end
| 26.116022 | 99 | 0.692828 |
1c29075c544805e753519dc141243e7ec3fe6730 | 325 | exs | Elixir | priv/repo/migrations/20141003003122_add_job_to_job_table.exs | taybin/gardenhose | cd82a7d1838d443810a13b75ac609aaac92a2b0b | [
"Apache-2.0"
] | null | null | null | priv/repo/migrations/20141003003122_add_job_to_job_table.exs | taybin/gardenhose | cd82a7d1838d443810a13b75ac609aaac92a2b0b | [
"Apache-2.0"
] | null | null | null | priv/repo/migrations/20141003003122_add_job_to_job_table.exs | taybin/gardenhose | cd82a7d1838d443810a13b75ac609aaac92a2b0b | [
"Apache-2.0"
] | null | null | null | defmodule Gardenhose.Repo.Migrations.AddJobToJobTable do
use Ecto.Migration
def up do
"CREATE TABLE IF NOT EXISTS job_to_job(parent_id INTEGER NOT NULL REFERENCES jobs(id) ON DELETE CASCADE, child_id INTEGER NOT NULL REFERENCES jobs(id) ON DELETE CASCADE)"
end
def down do
"DROP TABLE job_to_job"
end
end
| 27.083333 | 174 | 0.769231 |
1c295a4cf8e821bfc496c5aad1ec3a3dcbb301e7 | 1,104 | ex | Elixir | lib/phoenix_postgres_react_web/channels/user_socket.ex | CTMoney/phoenix-postgres-react | b51c298fdcef339324a601dd874a82e1e0cc8e6e | [
"MIT"
] | null | null | null | lib/phoenix_postgres_react_web/channels/user_socket.ex | CTMoney/phoenix-postgres-react | b51c298fdcef339324a601dd874a82e1e0cc8e6e | [
"MIT"
] | 1 | 2021-03-09T11:33:04.000Z | 2021-03-09T11:33:04.000Z | lib/phoenix_postgres_react_web/channels/user_socket.ex | CTMoney/phoenix_ | b51c298fdcef339324a601dd874a82e1e0cc8e6e | [
"MIT"
] | null | null | null | defmodule PhoenixPostgresReactWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", PhoenixPostgresReactWeb.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# PhoenixPostgresReactWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 32.470588 | 95 | 0.707428 |
1c2972cd59884cb56be7985509433ad3dd4edd3f | 4,236 | exs | Elixir | test/ex_nexmo/sms/request_test.exs | NeoAlchemist/ex_nexmo | e4f8d56e1b9fe3b57f352c540936e77eb31a5f76 | [
"MIT"
] | 3 | 2017-01-19T23:40:37.000Z | 2017-11-20T06:11:41.000Z | test/ex_nexmo/sms/request_test.exs | NeoAlchemist/ex_nexmo | e4f8d56e1b9fe3b57f352c540936e77eb31a5f76 | [
"MIT"
] | 4 | 2016-09-26T20:00:20.000Z | 2018-09-04T01:34:34.000Z | test/ex_nexmo/sms/request_test.exs | NeoAlchemist/ex_nexmo | e4f8d56e1b9fe3b57f352c540936e77eb31a5f76 | [
"MIT"
] | 7 | 2017-02-02T01:32:52.000Z | 2021-09-04T00:26:45.000Z | ExUnit.start
defmodule ExNexmo.SMS.RequestTest do
use ExUnit.Case
alias ExNexmo.SMS.{Request, Response}
setup do
api_key = "key"
api_secret = "secret"
to = "27820005555"
from = "SenderId"
text = "Test message"
valid_single_message_response = """
{
"message-count":"1",
"messages":[
{
"status":"0",
"message-id":"123",
"to":"#{to}",
"client-ref":"234234",
"remaining-balance":"22344",
"message-price":"345",
"network":"network",
"error-text":"error-message"
}
]
}
"""
failed_single_message_response = """
{
"message-count":"1",
"messages":[
{
"status":"5",
"message-id":"123",
"to":"#{to}",
"client-ref":"234234",
"remaining-balance":"22344",
"message-price":"345",
"network":"network",
"error-text":"error-message"
}
]
}
"""
unknown_single_message_response = """
{
"message-count":"1",
"messages":[
{
"status":"5234",
"message-id":"123",
"to":"#{to}",
"client-ref":"234234",
"remaining-balance":"22344",
"message-price":"345",
"network":"network",
"error-text":"error-message"
}
]
}
"""
bypass = Bypass.open
orig_url = Application.get_env :ex_nexmo, :api_host
url = "http://localhost:#{bypass.port}"
Application.put_env :ex_nexmo, :api_host, url
orig_api_key = System.get_env "NEXMO_API_KEY"
System.put_env "NEXMO_API_KEY", api_key
orig_api_secret = System.get_env "NEXMO_API_SECRET"
System.put_env "NEXMO_API_SECRET", api_secret
on_exit fn ->
Application.put_env :ex_nexmo, :api_host, orig_url
System.put_env "NEXMO_API_KEY", orig_api_key
System.put_env "NEXMO_API_SECRET", orig_api_secret
end
{:ok, %{
api_key: api_key,
api_secret: api_secret,
to: to,
from: from,
text: text,
bypass: bypass,
valid_single_message_response: valid_single_message_response,
failed_single_message_response: failed_single_message_response,
unknown_single_message_response: unknown_single_message_response
}}
end
test "posts the request to Nexmo", %{
from: from,
to: to,
text: text,
bypass: bypass,
valid_single_message_response: valid_single_message_response
} do
Bypass.expect bypass, fn conn ->
assert "/sms/json" == conn.request_path
assert "" == conn.query_string
assert "POST" == conn.method
assert {"content-type", "application/json; charset=utf-8"} in conn.req_headers
Plug.Conn.resp(conn, 200, valid_single_message_response)
end
{:ok, %Response{messages: [%{status: status}]}} = Request.send(from, to, text)
assert status == {:ok, :success}
end
test "returns the correct status for a failed message", %{
from: from,
to: to,
text: text,
bypass: bypass,
failed_single_message_response: failed_single_message_response
} do
Bypass.expect bypass, fn conn ->
assert "/sms/json" == conn.request_path
assert "" == conn.query_string
assert "POST" == conn.method
assert {"content-type", "application/json; charset=utf-8"} in conn.req_headers
Plug.Conn.resp(conn, 200, failed_single_message_response)
end
{:ok, %Response{messages: [%{status: status}]}} = Request.send(from, to, text)
assert status == {:error, :internal_error}
end
test "returns the correct status for an unknown message", %{
from: from,
to: to,
text: text,
bypass: bypass,
unknown_single_message_response: unknown_single_message_response
} do
Bypass.expect bypass, fn conn ->
assert "/sms/json" == conn.request_path
assert "" == conn.query_string
assert "POST" == conn.method
assert {"content-type", "application/json; charset=utf-8"} in conn.req_headers
Plug.Conn.resp(conn, 200, unknown_single_message_response)
end
{:ok, %Response{messages: [%{status: status}]}} = Request.send(from, to, text)
assert status == {:error, :unknown}
end
test "fails properly when connection fails", %{
from: from,
to: to,
text: text,
bypass: bypass
} do
Bypass.down(bypass)
assert {:error, :econnrefused} == Request.send(from, to, text)
end
end
| 27.153846 | 84 | 0.636686 |
1c2979115d79be391455bfbe669965c878c6f5b7 | 6,303 | ex | Elixir | core/version/history.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:11.000Z | 2020-08-27T18:43:11.000Z | core/version/history.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | null | null | null | core/version/history.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:21.000Z | 2020-08-27T18:43:21.000Z | # Copyright(c) 2015-2020 ACCESS CO., LTD. All rights reserved.
use Croma
defmodule AntikytheraCore.Version.History do
@moduledoc """
Functions to get information from "history" files.
Antikythera uses history files to detect new deployable versions of antikythera instance and gears.
Each of antikythera instance and gears has its own history file.
The files reside in `AntikytheraCore.Path.history_dir/0`.
When deploying a new version, after successfully generated a compiled artifact (tarball),
antikythera's deploy script instructs the ErlangVM nodes by appending a line to the history file of the target OTP application.
Each line takes either of the following forms:
| | format of line | installable | upgradable |
| deploy with hot code upgrade | `<version>` | true | true |
| deploy with hot code upgrade (canary) | `<version> canary=<hosts>` | false | host in hosts |
| deploy without hot code upgrade | `<version> noupgrade` | true | false |
| deploy without hot code upgrade (canary) | `<version> noupgrade_canary=<deadline>` | now < deadline | false |
- "installable" means that the version can be installed into a node that doesn't have the OTP application.
- See also `script/in_cloud/erl/boot_impl.sh`, which chooses an appropriate version of
OTP release of the antikythera instance at startup of a newly created host.
- "upgradable" means that the version can be applied by hot code upgrade to a node that has a previous version of the OTP application.
Versions in a history file are expected to monotonically increase.
"""
alias Antikythera.{Time, GearName, GearNameStr, VersionStr, SecondsSinceEpoch}
alias AntikytheraCore.Path, as: CorePath
alias AntikytheraCore.Cluster
defmodule Entry do
alias Croma.TypeGen, as: TG
use Croma.Struct, recursive_new?: true, fields: [
version: VersionStr,
canary_target_hosts: TG.nilable(TG.list_of(Croma.String)),
noupgrade: Croma.Boolean,
installable_until: TG.nilable(Time),
]
defun from_line(s :: v[String.t]) :: t do
case String.split(s, " ", trim: true) do
[version] ->
%__MODULE__{version: version, noupgrade: false, canary_target_hosts: nil, installable_until: nil}
[version, "canary=" <> hosts_str] ->
hosts = String.split(hosts_str, ",", trim: true)
%__MODULE__{version: version, noupgrade: false, canary_target_hosts: hosts, installable_until: nil}
[version, "noupgrade"] ->
%__MODULE__{version: version, noupgrade: true, canary_target_hosts: nil, installable_until: nil}
[version, "noupgrade_canary=" <> timestamp] ->
{:ok, t} = Time.from_iso_timestamp(timestamp)
%__MODULE__{version: version, noupgrade: true, canary_target_hosts: nil, installable_until: t}
end
end
defun installable?(%__MODULE__{canary_target_hosts: hosts, installable_until: until}) :: boolean do
case {hosts, until} do
{nil, nil} -> true
{_ , nil} -> false
{nil, _ } -> Time.now() < until
end
end
defun upgradable?(%__MODULE__{noupgrade: noupgrade, canary_target_hosts: hosts}) :: boolean do
if noupgrade do
false
else
case hosts do
nil -> true
_ -> Cluster.node_to_host(Node.self()) in hosts
end
end
end
defun canary?(%__MODULE__{canary_target_hosts: hosts, installable_until: until}) :: boolean do
case {hosts, until} do
{nil, nil} -> false
_ -> true
end
end
end
defun latest_installable_gear_version(gear_name :: v[GearName.t]) :: nil | VersionStr.t do
find_latest_installable_version(File.read!(file_path(gear_name)))
end
defunpt find_latest_installable_version(content :: v[String.t]) :: nil | VersionStr.t do
String.split(content, "\n", trim: true)
|> Enum.reverse()
|> Enum.find_value(fn line ->
e = Entry.from_line(line)
if Entry.installable?(e), do: e.version, else: nil
end)
end
defun next_upgradable_version(app_name :: v[:antikythera | GearName.t], current_version :: v[VersionStr.t]) :: nil | VersionStr.t do
find_next_upgradable_version(app_name, File.read!(file_path(app_name)), current_version)
end
defunpt find_next_upgradable_version(app_name :: v[:antikythera | GearName.t], content :: v[String.t], current_version :: v[VersionStr.t]) :: nil | VersionStr.t do
lines_without_previous_versions =
String.split(content, "\n", trim: true)
|> Enum.drop_while(fn line -> !String.starts_with?(line, current_version) end)
case lines_without_previous_versions do
[] -> raise "current version is not found in history file for #{app_name}"
_ ->
lines_with_new_versions = Enum.drop_while(lines_without_previous_versions, fn l -> String.starts_with?(l, current_version) end)
find_next(lines_with_new_versions)
end
end
defp find_next([]), do: nil
defp find_next([l | ls]) do
e = Entry.from_line(l)
cond do
Entry.upgradable?(e) -> e.version
Entry.canary?(e) -> find_next(ls)
:noupgrade_deploy -> nil
end
end
defunp file_path(app_name :: v[:antikythera | GearName.t]) :: Path.t do
Path.join(CorePath.history_dir(), Atom.to_string(app_name))
end
defun find_all_modified_history_files(since :: v[SecondsSinceEpoch.t]) :: {boolean, [GearName.t]} do
antikythera_instance_name_str = Antikythera.Env.antikythera_instance_name() |> Atom.to_string()
files = CorePath.list_modified_files(CorePath.history_dir(), since) |> Enum.map(&Path.basename/1)
{antikythera_appearance, others} = Enum.split_with(files, &(&1 == antikythera_instance_name_str))
gear_names =
Enum.filter(others, &GearNameStr.valid?/1)
|> Enum.map(&String.to_atom/1) # generate atom from trusted data source
{antikythera_appearance != [], gear_names}
end
defun all_deployable_gear_names() :: [GearName.t] do
{_, gear_names} = find_all_modified_history_files(0)
gear_names
end
end
| 43.468966 | 165 | 0.665239 |
1c29827891605492023d0f96813e4a7d9122d8dc | 1,318 | ex | Elixir | lib/cadet/achievements/achievement.ex | seanlowjk/cadet | 52b55cc0f777cb0d55a78fe1693f762085ab36c2 | [
"Apache-2.0"
] | null | null | null | lib/cadet/achievements/achievement.ex | seanlowjk/cadet | 52b55cc0f777cb0d55a78fe1693f762085ab36c2 | [
"Apache-2.0"
] | null | null | null | lib/cadet/achievements/achievement.ex | seanlowjk/cadet | 52b55cc0f777cb0d55a78fe1693f762085ab36c2 | [
"Apache-2.0"
] | null | null | null | defmodule Cadet.Achievements.Achievement do
@moduledoc """
Stores achievements.
"""
use Cadet, :model
alias Cadet.Achievements.{AchievementGoal, AchievementPrerequisite}
@valid_abilities ~w(Core Community Effort Exploration)
@primary_key {:id, :id, autogenerate: false}
schema "achievements" do
field(:title, :string)
field(:ability, :string)
field(:card_tile_url, :string)
field(:open_at, :utc_datetime)
field(:close_at, :utc_datetime)
field(:is_task, :boolean)
field(:position, :integer, default: 0)
field(:canvas_url, :string)
field(:description, :string)
field(:completion_text, :string)
has_many(:prerequisites, AchievementPrerequisite, on_replace: :delete)
has_many(:goals, AchievementGoal)
timestamps()
end
@required_fields ~w(id title ability is_task position)a
@optional_fields ~w(card_tile_url open_at close_at canvas_url description completion_text)a
def changeset(achievement, params) do
params =
params
|> convert_date(:open_at)
|> convert_date(:close_at)
achievement
|> cast(params, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
|> validate_inclusion(:ability, @valid_abilities)
|> cast_assoc(:prerequisites)
|> cast_assoc(:goals)
end
end
| 26.897959 | 93 | 0.710926 |
1c2999c91d0d054291152fb1b59d4f51b966dd25 | 381 | exs | Elixir | exercises/allergies/allergies.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/allergies/allergies.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/allergies/allergies.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule Allergies do
@doc """
List the allergies for which the corresponding flag bit is true.
"""
@spec list(non_neg_integer) :: [String.t()]
def list(flags) do
end
@doc """
Returns whether the corresponding flag bit in 'flags' is set for the item.
"""
@spec allergic_to?(non_neg_integer, String.t()) :: boolean
def allergic_to?(flags, item) do
end
end
| 23.8125 | 76 | 0.682415 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.