hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
08d6541507fcc36f18db33e8e141592d689715ae | 1,054 | ex | Elixir | lib/nerves_hub_ca_store.ex | nerves-hub/nerves_hub_ca_store | bc7beb0f1f50e031d603487123bf995b19cc07f4 | [
"Apache-2.0"
] | 1 | 2021-04-16T23:55:08.000Z | 2021-04-16T23:55:08.000Z | lib/nerves_hub_ca_store.ex | nerves-hub/nerves_hub_ca_store | bc7beb0f1f50e031d603487123bf995b19cc07f4 | [
"Apache-2.0"
] | null | null | null | lib/nerves_hub_ca_store.ex | nerves-hub/nerves_hub_ca_store | bc7beb0f1f50e031d603487123bf995b19cc07f4 | [
"Apache-2.0"
] | 1 | 2021-09-16T06:26:18.000Z | 2021-09-16T06:26:18.000Z | defmodule NervesHubCAStore do
@moduledoc """
Certificate Authority Store for the production NervesHub instance
#{File.read!("README.md") |> String.split("## Usage") |> Enum.at(1)}
"""
@type env :: :prod | :staging
@doc """
Returns DER encoded list of CA certificates
"""
@spec cacerts(env) :: [:public_key.der_encoded()]
def cacerts(env \\ :prod) do
for cert <- certificates(env), do: X509.Certificate.to_der(cert)
end
@doc """
Alias for NervesHubCAStore.cacerts/1
"""
@spec ca_certs(env) :: [:public_key.der_encoded()]
def ca_certs(env \\ :prod), do: cacerts(env)
@doc """
CA Store as list of OTP compatible certificate records
"""
@spec certificates(env) :: [tuple()]
def certificates(env \\ :prod) do
file_path(env)
|> File.read!()
|> X509.from_pem()
end
@doc """
File path to cacerts.pem
"""
@spec file_path(env :: :prod | :staging) :: Path.t()
def file_path(env \\ :prod) do
Application.app_dir(:nerves_hub_ca_store, ["priv", "ssl", "#{env}", "cacerts.pem"])
end
end
| 25.095238 | 87 | 0.634725 |
08d65e88842f0418517d5f4f328672ba0c09bbed | 164 | ex | Elixir | lib/battle_box_web/channels/user_socket.ex | GrantJamesPowell/battle_box | 301091955b68cd4672f6513d645eca4e3c4e17d0 | [
"Apache-2.0"
] | 2 | 2020-10-17T05:48:49.000Z | 2020-11-11T02:34:15.000Z | lib/battle_box_web/channels/user_socket.ex | FlyingDutchmanGames/battle_box | 301091955b68cd4672f6513d645eca4e3c4e17d0 | [
"Apache-2.0"
] | 3 | 2020-05-18T05:52:21.000Z | 2020-06-09T07:24:14.000Z | lib/battle_box_web/channels/user_socket.ex | FlyingDutchmanGames/battle_box | 301091955b68cd4672f6513d645eca4e3c4e17d0 | [
"Apache-2.0"
] | null | null | null | defmodule BattleBoxWeb.UserSocket do
use Phoenix.Socket
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
def id(_socket), do: nil
end
| 16.4 | 48 | 0.719512 |
08d65fc3bd55064125b5a017f761e538bc87f88a | 9,507 | ex | Elixir | lib/strategy/gossip.ex | polyglot-concurrency/libcluster | 1a0640f2b39adc430a121add01ab2ae0a4ee35b2 | [
"MIT"
] | 1 | 2021-03-04T12:46:17.000Z | 2021-03-04T12:46:17.000Z | lib/strategy/gossip.ex | polyglot-concurrency/libcluster | 1a0640f2b39adc430a121add01ab2ae0a4ee35b2 | [
"MIT"
] | 1 | 2021-05-10T16:06:19.000Z | 2021-05-10T16:12:39.000Z | lib/strategy/gossip.ex | polyglot-concurrency/libcluster | 1a0640f2b39adc430a121add01ab2ae0a4ee35b2 | [
"MIT"
] | null | null | null | defmodule Cluster.Strategy.Gossip do
@moduledoc """
This clustering strategy uses multicast UDP to gossip node names
to other nodes on the network. These packets are listened for on
each node as well, and a connection will be established between the
two nodes if they are reachable on the network, and share the same
magic cookie. In this way, a cluster of nodes may be formed dynamically.
The gossip protocol is extremely simple, with a prelude followed by the node
name which sent the packet. The node name is parsed from the packet, and a
connection attempt is made. It will fail if the two nodes do not share a cookie.
By default, the gossip occurs on port 45892, using the multicast address 230.1.1.251
The gossip protocol is not encrypted by default, but can be by providing a secret
in the configuration of the strategy (as shown below).
This can also be used to run multiple clusters with the same multicast configuration,
as nodes not sharing the same encryption key will not be connected.
You may configure the multicast interface, multicast address, the interface address to bind to, the port,
the TTL of the packets and the optional secret using the following settings:
config :libcluster,
topologies: [
gossip_example: [
strategy: #{__MODULE__},
config: [
port: 45892,
if_addr: "0.0.0.0",
multicast_if: "192.168.1.1",
multicast_addr: "230.1.1.251",
multicast_ttl: 1,
secret: "somepassword"]]]
A TTL of 1 will limit packets to the local network, and is the default TTL.
Optionally, `broadcast_only: true` option can be set which disables multicast and
only uses broadcasting. This limits connectivity to local network but works on in
scenarios where multicast is not enabled. Use `multicast_addr` as the broadcast address.
Example for broadcast only:
config :libcluster,
topologies: [
gossip_example: [
strategy: #{__MODULE__},
config: [
port: 45892,
if_addr: "0.0.0.0",
multicast_addr: "255.255.255.255",
broadcast_only: true]]]
Debug logging is deactivated by default for this clustering strategy, but it can be easily activated by configuring the application:
use Mix.Config
config :libcluster,
debug: true
All the checks are done at runtime, so you can flip the debug level without being forced to shutdown your node.
"""
use GenServer
use Cluster.Strategy
import Cluster.Logger
alias Cluster.Strategy.State
@default_port 45892
@default_addr {0, 0, 0, 0}
@default_multicast_addr {230, 1, 1, 251}
@sol_socket 0xFFFF
@so_reuseport 0x0200
def start_link(args) do
GenServer.start_link(__MODULE__, args)
end
@impl true
def init([%State{config: config} = state]) do
port = Keyword.get(config, :port, @default_port)
ip =
config
|> Keyword.get(:if_addr, @default_addr)
|> sanitize_ip()
broadcast_only? = Keyword.get(config, :broadcast_only, false)
ttl = Keyword.get(config, :multicast_ttl, 1)
multicast_if = Keyword.get(config, :multicast_if)
multicast_addr =
config
|> Keyword.get(:multicast_addr, @default_multicast_addr)
|> sanitize_ip()
multicast_opts =
cond do
broadcast_only? ->
[]
multicast_if != nil ->
[
multicast_if: sanitize_ip(multicast_if),
multicast_ttl: ttl,
multicast_loop: true
]
:else ->
[
multicast_ttl: ttl,
multicast_loop: true
]
end
options =
[
:binary,
active: true,
ip: ip,
reuseaddr: true,
broadcast: true,
add_membership: {multicast_addr, {0, 0, 0, 0}}
] ++ multicast_opts ++ reuse_port()
{:ok, socket} = :gen_udp.open(port, options)
secret = Keyword.get(config, :secret, nil)
state = %State{state | :meta => {multicast_addr, port, socket, secret}}
# TODO: Remove this version check when we deprecate OTP < 21 support
if :erlang.system_info(:otp_release) >= '21' do
{:ok, state, {:continue, nil}}
else
{:ok, state, 0}
end
end
defp reuse_port() do
case :os.type() do
{:unix, os_name} ->
cond do
os_name in [:darwin, :freebsd, :openbsd, :netbsd] ->
[{:raw, @sol_socket, @so_reuseport, <<1::native-32>>}]
true ->
[]
end
_ ->
[]
end
end
defp sanitize_ip(input) do
case input do
{_a, _b, _c, _d} = ip ->
ip
ip when is_binary(ip) ->
{:ok, addr} = :inet.parse_ipv4_address(~c"#{ip}")
addr
end
end
# Send stuttered heartbeats
# TODO: Remove this version check when we deprecate OTP < 21 support
if :erlang.system_info(:otp_release) >= '21' do
@impl true
def handle_continue(_, state), do: handle_info(:heartbeat, state)
else
@impl true
def handle_info(:timeout, state), do: handle_info(:heartbeat, state)
end
@impl true
def handle_info(:heartbeat, %State{meta: {multicast_addr, port, socket, _}} = state) do
debug(state.topology, "heartbeat")
:gen_udp.send(socket, multicast_addr, port, heartbeat(node(), state))
Process.send_after(self(), :heartbeat, :rand.uniform(5_000))
{:noreply, state}
end
# Handle received heartbeats
def handle_info(
{:udp, _socket, _ip, _port, <<"heartbeat::", _::binary>> = packet},
%State{meta: {_, _, _, secret}} = state
)
when is_nil(secret) do
handle_heartbeat(state, packet)
{:noreply, state}
end
def handle_info(
{:udp, _socket, _ip, _port, <<iv::binary-size(16)>> <> ciphertext},
%State{meta: {_, _, _, secret}} = state
)
when is_binary(secret) do
case decrypt(ciphertext, secret, iv) do
{:ok, plaintext} ->
handle_heartbeat(state, plaintext)
{:noreply, state}
_ ->
{:noreply, state}
end
end
def handle_info({:udp, _socket, _ip, _port, _}, state) do
{:noreply, state}
end
@impl true
def terminate(_reason, %State{meta: {_, _, socket, _}}) do
:gen_udp.close(socket)
:ok
end
# Construct iodata representing packet to send
defp heartbeat(node_name, %State{meta: {_, _, _, secret}})
when is_nil(secret) do
["heartbeat::", :erlang.term_to_binary(%{node: node_name})]
end
defp heartbeat(node_name, %State{meta: {_, _, _, secret}}) when is_binary(secret) do
message = "heartbeat::" <> :erlang.term_to_binary(%{node: node_name})
{:ok, iv, msg} = encrypt(message, secret)
[iv, msg]
end
# Upon receipt of a heartbeat, we check to see if the node
# is connected to us, and if not, we connect to it.
# If the connection fails, it's likely because the cookie
# is different, and thus a node we can ignore
@spec handle_heartbeat(State.t(), binary) :: :ok
defp handle_heartbeat(%State{} = state, <<"heartbeat::", rest::binary>>) do
self = node()
connect = state.connect
list_nodes = state.list_nodes
topology = state.topology
case :erlang.binary_to_term(rest) do
%{node: ^self} ->
:ok
%{node: n} when is_atom(n) ->
debug(state.topology, "received heartbeat from #{n}")
Cluster.Strategy.connect_nodes(topology, connect, list_nodes, [n])
:ok
_ ->
:ok
end
end
defp handle_heartbeat(_state, _packet) do
:ok
end
defp encrypt(plaintext, password) do
iv = :crypto.strong_rand_bytes(16)
key = :crypto.hash(:sha256, password)
ciphertext = :crypto.block_encrypt(:aes_cbc256, key, iv, pkcs7_pad(plaintext))
{:ok, iv, ciphertext}
end
defp decrypt(ciphertext, password, iv) do
key = :crypto.hash(:sha256, password)
with {:unpadding, {:ok, padded}} <- {:unpadding, safe_decrypt(key, iv, ciphertext)},
{:decrypt, {:ok, _plaintext} = res} <- {:decrypt, pkcs7_unpad(padded)} do
res
else
{:unpadding, :error} -> {:error, :decrypt}
{:decrypt, :error} -> {:error, :unpadding}
end
end
defp safe_decrypt(key, iv, ciphertext) do
try do
{:ok, :crypto.block_decrypt(:aes_cbc256, key, iv, ciphertext)}
rescue
ArgumentError ->
:error
end
end
#
# Pads a message using the PKCS #7 cryptographic message syntax.
#
# from: https://github.com/izelnakri/aes256/blob/master/lib/aes256.ex
#
# See: https://tools.ietf.org/html/rfc2315
# See: `pkcs7_unpad/1`
defp pkcs7_pad(message) do
bytes_remaining = rem(byte_size(message), 16)
padding_size = 16 - bytes_remaining
message <> :binary.copy(<<padding_size>>, padding_size)
end
#
# Unpads a message using the PKCS #7 cryptographic message syntax.
#
# from: https://github.com/izelnakri/aes256/blob/master/lib/aes256.ex
#
# See: https://tools.ietf.org/html/rfc2315
# See: `pkcs7_pad/1`
defp pkcs7_unpad(<<>>), do: :error
defp pkcs7_unpad(message) do
padding_size = :binary.last(message)
if padding_size <= 16 do
message_size = byte_size(message)
if binary_part(message, message_size, -padding_size) ===
:binary.copy(<<padding_size>>, padding_size) do
{:ok, binary_part(message, 0, message_size - padding_size)}
else
:error
end
else
:error
end
end
end
| 28.896657 | 134 | 0.631219 |
08d666f0768cf3400eaa774aae8d7785a5f6c486 | 2,010 | ex | Elixir | clients/games_management/lib/google_api/games_management/v1management/model/player_score_reset_all_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/games_management/lib/google_api/games_management/v1management/model/player_score_reset_all_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/games_management/lib/google_api/games_management/v1management/model/player_score_reset_all_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.GamesManagement.V1management.Model.PlayerScoreResetAllResponse do
@moduledoc """
A list of leaderboard reset resources.
## Attributes
* `kind` (*type:* `String.t`, *default:* `nil`) - Uniquely identifies the type of this resource. Value is always the fixed string `gamesManagement#playerScoreResetAllResponse`.
* `results` (*type:* `list(GoogleApi.GamesManagement.V1management.Model.PlayerScoreResetResponse.t)`, *default:* `nil`) - The leaderboard reset results.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => String.t(),
:results =>
list(GoogleApi.GamesManagement.V1management.Model.PlayerScoreResetResponse.t())
}
field(:kind)
field(:results,
as: GoogleApi.GamesManagement.V1management.Model.PlayerScoreResetResponse,
type: :list
)
end
defimpl Poison.Decoder,
for: GoogleApi.GamesManagement.V1management.Model.PlayerScoreResetAllResponse do
def decode(value, options) do
GoogleApi.GamesManagement.V1management.Model.PlayerScoreResetAllResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.GamesManagement.V1management.Model.PlayerScoreResetAllResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.5 | 180 | 0.744279 |
08d673a1719bf7dffccd1fdac396f6e1623d7e17 | 287 | ex | Elixir | test/support/user/reflection/elixir_target.ex | kianmeng/patch | d7d8d70a0285129ec67a43473db587011524fe0c | [
"MIT"
] | 57 | 2020-04-22T00:19:04.000Z | 2022-03-20T11:57:00.000Z | test/support/user/reflection/elixir_target.ex | kianmeng/patch | d7d8d70a0285129ec67a43473db587011524fe0c | [
"MIT"
] | 9 | 2021-10-29T20:54:56.000Z | 2022-02-19T03:41:01.000Z | test/support/user/reflection/elixir_target.ex | kianmeng/patch | d7d8d70a0285129ec67a43473db587011524fe0c | [
"MIT"
] | 2 | 2021-07-02T14:41:48.000Z | 2022-01-12T11:47:26.000Z | defmodule Patch.Test.Support.User.Reflection.ElixirTarget do
def public_function(a) do
private_function(a)
end
def public_function(a, b) do
{:ok, a, b}
end
def other_public_function do
:ok
end
## Private
defp private_function(a) do
{:ok, a}
end
end
| 14.35 | 60 | 0.672474 |
08d6989fbd80bca848cb5eed6ec059c8abbf8a31 | 1,705 | exs | Elixir | test/xdr/ledger_entries/asset_code_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 3 | 2021-08-17T20:32:45.000Z | 2022-03-13T20:26:02.000Z | test/xdr/ledger_entries/asset_code_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 45 | 2021-08-12T20:19:41.000Z | 2022-03-27T21:00:10.000Z | test/xdr/ledger_entries/asset_code_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 2 | 2021-09-22T23:11:13.000Z | 2022-01-23T03:19:11.000Z | defmodule StellarBase.XDR.AssetCodeTest do
use ExUnit.Case
alias StellarBase.XDR.{AssetCode, AssetCode4, AssetType}
describe "AssetCode" do
setup do
asset = AssetCode4.new("BTCN")
asset_type = AssetType.new(:ASSET_TYPE_CREDIT_ALPHANUM4)
%{
asset: asset,
asset_type: asset_type,
asset_code: AssetCode.new(asset, asset_type),
binary: <<0, 0, 0, 1, 66, 84, 67, 78>>
}
end
test "new/1", %{asset: asset, asset_type: asset_type} do
%AssetCode{asset: ^asset, type: ^asset_type} = AssetCode.new(asset, asset_type)
end
test "encode_xdr/1", %{asset_code: asset_code, binary: binary} do
{:ok, ^binary} = AssetCode.encode_xdr(asset_code)
end
test "encode_xdr/1 with an invalid type", %{asset: asset} do
asset_type = AssetType.new(:NEW_BITCOIN)
assert_raise XDR.EnumError,
"The key which you try to encode doesn't belong to the current declarations",
fn ->
asset
|> AssetCode.new(asset_type)
|> AssetCode.encode_xdr()
end
end
test "encode_xdr!/1", %{asset_code: asset_code, binary: binary} do
^binary = AssetCode.encode_xdr!(asset_code)
end
test "decode_xdr/2", %{asset_code: asset_code, binary: binary} do
{:ok, {^asset_code, ""}} = AssetCode.decode_xdr(binary)
end
test "decode_xdr/2 with an invalid binary" do
{:error, :not_binary} = AssetCode.decode_xdr(123)
end
test "decode_xdr!/2", %{asset_code: asset_code, binary: binary} do
{^asset_code, ^binary} = AssetCode.decode_xdr!(binary <> binary)
end
end
end
| 30.446429 | 96 | 0.614663 |
08d6a174a0393aecfcd948e3c55b458b8ce1f498 | 132 | exs | Elixir | test/my_way_test.exs | JoshRagem/my_way | e3d34906471b99c0a1b3165e655378953ad91dfe | [
"MIT"
] | null | null | null | test/my_way_test.exs | JoshRagem/my_way | e3d34906471b99c0a1b3165e655378953ad91dfe | [
"MIT"
] | null | null | null | test/my_way_test.exs | JoshRagem/my_way | e3d34906471b99c0a1b3165e655378953ad91dfe | [
"MIT"
] | null | null | null | defmodule MyWayTest do
use ExUnit.Case
doctest MyWay
test "greets the world" do
assert MyWay.hello() == :world
end
end
| 14.666667 | 34 | 0.69697 |
08d6a8b1ae8a60757e62a758a4aec8ac8aee8250 | 139 | exs | Elixir | config/config.exs | zven21/footprint | ebbc4ba6c7b95cdfab28ea406146f2de90e58211 | [
"MIT"
] | 2 | 2018-08-23T08:30:01.000Z | 2019-06-17T08:09:19.000Z | config/config.exs | zven21/footprint | ebbc4ba6c7b95cdfab28ea406146f2de90e58211 | [
"MIT"
] | 3 | 2020-07-17T07:15:56.000Z | 2021-05-09T04:18:19.000Z | config/config.exs | zven21/footprint | ebbc4ba6c7b95cdfab28ea406146f2de90e58211 | [
"MIT"
] | null | null | null | use Mix.Config
config :logger, :console, level: :error
config :footprint, ecto_repos: [Footprint.Repo]
import_config "#{Mix.env()}.exs"
| 17.375 | 47 | 0.726619 |
08d6d28d82db582168bccfcb7692aa458de76b58 | 1,352 | ex | Elixir | samples/client/petstore/elixir/lib/openapi_petstore/model/additional_properties_class.ex | therockstorm/openapi-generator | 01d0b5d4780ebe2d6025e2b443ec136c6ce16c45 | [
"Apache-2.0"
] | 3 | 2021-05-19T03:12:48.000Z | 2022-01-28T19:15:42.000Z | samples/client/petstore/elixir/lib/openapi_petstore/model/additional_properties_class.ex | therockstorm/openapi-generator | 01d0b5d4780ebe2d6025e2b443ec136c6ce16c45 | [
"Apache-2.0"
] | 10 | 2021-03-09T14:12:46.000Z | 2022-02-27T11:42:16.000Z | samples/client/petstore/elixir/lib/openapi_petstore/model/additional_properties_class.ex | therockstorm/openapi-generator | 01d0b5d4780ebe2d6025e2b443ec136c6ce16c45 | [
"Apache-2.0"
] | 5 | 2020-11-26T05:13:41.000Z | 2021-04-09T15:58:18.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule OpenapiPetstore.Model.AdditionalPropertiesClass do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"map_string",
:"map_number",
:"map_integer",
:"map_boolean",
:"map_array_integer",
:"map_array_anytype",
:"map_map_string",
:"map_map_anytype",
:"anytype_1",
:"anytype_2",
:"anytype_3"
]
@type t :: %__MODULE__{
:"map_string" => %{optional(String.t) => String.t} | nil,
:"map_number" => %{optional(String.t) => float()} | nil,
:"map_integer" => %{optional(String.t) => integer()} | nil,
:"map_boolean" => %{optional(String.t) => boolean()} | nil,
:"map_array_integer" => %{optional(String.t) => [integer()]} | nil,
:"map_array_anytype" => %{optional(String.t) => [Map]} | nil,
:"map_map_string" => %{optional(String.t) => %{optional(String.t) => String.t}} | nil,
:"map_map_anytype" => %{optional(String.t) => %{optional(String.t) => Map}} | nil,
:"anytype_1" => Map | nil,
:"anytype_2" => Map | nil,
:"anytype_3" => Map | nil
}
end
defimpl Poison.Decoder, for: OpenapiPetstore.Model.AdditionalPropertiesClass do
def decode(value, _options) do
value
end
end
| 29.391304 | 91 | 0.620562 |
08d71e45bade1f1c56484a217be60d50efc26c82 | 753 | ex | Elixir | lib/phoenix_api_auth_starter_web/gettext.ex | CMcDonald82/phoenix-api-auth-starter | 916db91ceba32399b8d30cc6a6e35804bc0d18b1 | [
"MIT"
] | null | null | null | lib/phoenix_api_auth_starter_web/gettext.ex | CMcDonald82/phoenix-api-auth-starter | 916db91ceba32399b8d30cc6a6e35804bc0d18b1 | [
"MIT"
] | null | null | null | lib/phoenix_api_auth_starter_web/gettext.ex | CMcDonald82/phoenix-api-auth-starter | 916db91ceba32399b8d30cc6a6e35804bc0d18b1 | [
"MIT"
] | null | null | null | defmodule PhoenixApiAuthStarterWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import PhoenixApiAuthStarterWeb.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :phoenix_api_auth_starter
end
| 30.12 | 72 | 0.699867 |
08d77a32a4e5185341dec3adcbd317032bd437b0 | 952 | ex | Elixir | lib/applicative/law.ex | rob-brown/MonadEx | a8b15e3207c0efa53749c4574496da70a6ca8f9d | [
"MIT"
] | 325 | 2015-03-03T01:44:05.000Z | 2022-03-25T20:29:58.000Z | lib/applicative/law.ex | rob-brown/MonadEx | a8b15e3207c0efa53749c4574496da70a6ca8f9d | [
"MIT"
] | 11 | 2015-05-29T13:33:20.000Z | 2021-05-11T13:48:14.000Z | lib/applicative/law.ex | rob-brown/MonadEx | a8b15e3207c0efa53749c4574496da70a6ca8f9d | [
"MIT"
] | 17 | 2015-04-18T09:46:50.000Z | 2019-11-12T21:28:21.000Z | defmodule Applicative.Law do
@moduledoc false
import Curry
use Monad.Operators
# http://hackage.haskell.org/package/base-4.7.0.2/docs/Control-Applicative.html
# http://staff.city.ac.uk/~ross/papers/Applicative.pdf
# pure id <*> v = v
def identity?(applicative, pure_fun) do
pure_fun.(& &1) <~> applicative == applicative
end
# pure (.) <*> u <*> v <*> w = u <*> (v <*> w)
def composition?(fun1, fun2, fun3, value, pure_fun) do
lhs = pure_fun.(curry(& &1.(&2.(&3.(&4))))) <~> fun1 <~> fun2 <~> fun3 <~> value
rhs = fun1 <~> (fun2 <~> (fun3 <~> value))
lhs == rhs
end
# pure f <*> pure x = pure (f x)
def homomorphism?(fun, value, pure_fun) do
pure_fun.(fun) <~> pure_fun.(value) == pure_fun.(fun.(value))
end
# u <*> pure y = pure ($ y) <*> u
def interchange?(applicative_fun, value, pure_fun) do
applicative_fun <~> pure_fun.(value) == pure_fun.(& &1.(value)) <~> applicative_fun
end
end
| 29.75 | 87 | 0.60084 |
08d7c96cf1b19eca91720f3947fe0bd890e6334e | 4,134 | ex | Elixir | projects/api/lib/margaret_web/resolvers/accounts.ex | strattadb/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 82 | 2017-11-06T01:00:55.000Z | 2020-12-09T10:35:29.000Z | projects/api/lib/margaret_web/resolvers/accounts.ex | dbstratta/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 98 | 2017-11-06T22:57:32.000Z | 2020-07-03T04:46:39.000Z | projects/api/lib/margaret_web/resolvers/accounts.ex | strattadb/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 10 | 2017-11-16T05:31:58.000Z | 2020-10-29T18:02:35.000Z | defmodule MargaretWeb.Resolvers.Accounts do
@moduledoc """
The Account GraphQL resolvers.
"""
import Margaret.Helpers, only: [ok: 1]
alias MargaretWeb.Helpers
alias Margaret.{
Accounts,
Stories,
Stars,
Bookmarks,
Follows,
Notifications
}
alias Accounts.User
@doc """
Resolves the currently logged in user.
"""
def resolve_viewer(_, %{context: %{viewer: viewer}}), do: ok(viewer)
@doc """
Resolves a user by its username.
"""
def resolve_user(%{username: username}, _) do
case Accounts.get_user_by_username(username) do
%User{} = user -> ok(user)
nil -> Helpers.GraphQLErrors.user_not_found()
end
end
@doc """
Resolves a connection of stories of a user.
The author can see their unlisted stories and drafts,
other users only can see their public stories.
"""
def resolve_stories(author, args, _) do
args
|> Map.put(:author, author)
|> Stories.stories()
end
@doc """
Resolves the connection of followees of a user.
"""
def resolve_followers(followee, args, _) do
args
|> Map.put(:user, followee)
|> Follows.followers()
end
@doc """
Resolves the connection of followees of a user.
"""
def resolve_followees(follower, args, _) do
args
|> Map.put(:follower, follower)
|> Follows.followees()
end
@doc """
Resolves the connection of starrables the user starred.
"""
def resolve_starred(user, args, _) do
args
|> Map.put(:user, user)
|> Stars.starred()
end
@doc """
Resolves the connection of bookmarkables the user bookmarked.
Bookmarks are only visible to the user who bookmarked.
"""
def resolve_bookmarked(%{id: user_id} = user, args, %{context: %{viewer: %{id: user_id}}}) do
args
|> Map.put(:user, user)
|> Bookmarks.bookmarked()
end
def resolve_bookmarked(_, _, _) do
Helpers.GraphQLErrors.unauthorized()
end
@doc """
Resolves the publications of the user.
"""
def resolve_publications(member, args, _) do
Publications.publications(member, args)
end
@doc """
Resolves the notifications of the user.
Only the currently authenticated user can see their
notifications.
"""
def resolve_notifications(user, args, %{context: %{viewer: viewer}}) do
if user.id === viewer.id do
args
|> Map.put(:notified_user, user)
|> Notifications.notifications()
else
Helpers.GraphQLErrors.unauthorized()
end
end
@doc """
Resolves a connection of users.
"""
def resolve_users(args, _) do
Accounts.users(args)
end
@doc """
Resolves the update of the viewer.
"""
def resolve_update_viewer(attrs, %{context: %{viewer: viewer}}) do
do_resolve_update_user(viewer, attrs)
end
@doc """
Resolves teh deactivation of the user.
"""
def resolve_deactivate_viewer(_, %{context: %{viewer: viewer}}) do
attrs = %{now: NaiveDateTime.utc_now()}
do_resolve_update_user(viewer, attrs)
end
defp do_resolve_update_user(user, attrs) do
case Accounts.update_user(user, attrs) do
{:ok, %User{} = viewer} -> ok(%{viewer: viewer})
{:error, changeset} -> {:error, changeset}
end
end
@doc """
Resolves the mark of the viewer for deletion.
"""
def resolve_mark_viewer_for_deletion(_, %{context: %{viewer: viewer}}) do
case Accounts.mark_user_for_deletion(viewer) do
{:ok, _} -> ok(%{viewer: viewer})
{:error, _, changeset, _} -> {:error, changeset}
end
end
@doc """
Resolves if the user is the viewer.
"""
def resolve_is_viewer(user, _, %{context: %{viewer: viewer}}) do
user.id
|> Kernel.===(viewer.id)
|> ok()
end
@doc """
Resolves if the viewer can follow the user.
"""
def resolve_viewer_can_follow(user, _, %{context: %{viewer: viewer}}) do
[follower: viewer, user: user]
|> Follows.can_follow?()
|> ok()
end
@doc """
Resolves whether the viewer has followed this user.
"""
def resolve_viewer_has_followed(user, _, %{context: %{viewer: viewer}}) do
[follower: viewer, user: user]
|> Follows.has_followed?()
|> ok()
end
end
| 23.094972 | 95 | 0.646105 |
08d7e4e3e91887eb550bda5858b36a84ed8f7953 | 1,262 | ex | Elixir | test/support/conn_case.ex | showwin/phx_todo_app | 3608f120cb149019219add4ca63e711698a56aab | [
"MIT"
] | null | null | null | test/support/conn_case.ex | showwin/phx_todo_app | 3608f120cb149019219add4ca63e711698a56aab | [
"MIT"
] | null | null | null | test/support/conn_case.ex | showwin/phx_todo_app | 3608f120cb149019219add4ca63e711698a56aab | [
"MIT"
] | null | null | null | defmodule PhxTodoAppWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use PhxTodoAppWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import PhxTodoAppWeb.ConnCase
alias PhxTodoAppWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint PhxTodoAppWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(PhxTodoApp.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(PhxTodoApp.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 28.681818 | 72 | 0.729002 |
08d835844c7a216d2835ccf5dfc73d786bc31451 | 441 | exs | Elixir | priv/repo/migrations/20191211205008_create_repositories.exs | vsyrovat/awesome_elixir | cfac2de5402c13f86f99032477daee883b31c020 | [
"MIT"
] | 2 | 2019-12-17T11:40:54.000Z | 2019-12-22T17:15:29.000Z | priv/repo/migrations/20191211205008_create_repositories.exs | vsyrovat/funbox_awesome_elixir | cfac2de5402c13f86f99032477daee883b31c020 | [
"MIT"
] | 5 | 2020-10-08T20:36:07.000Z | 2020-10-08T21:27:28.000Z | priv/repo/migrations/20191211205008_create_repositories.exs | vsyrovat/awesome-elixir | cfac2de5402c13f86f99032477daee883b31c020 | [
"MIT"
] | null | null | null | defmodule App.Repo.Migrations.CreateRepositories do
use Ecto.Migration
def change do
create table(:repositories) do
add :alias, :string, nullable: false
add :url, :string
add :name, :string
add :description, :string
add :stars, :integer
add :pushed_at, :naive_datetime
add :checked_at, :naive_datetime
timestamps()
end
create unique_index(:repositories, [:alias])
end
end
| 22.05 | 51 | 0.662132 |
08d850cb5a6aa685660576872179846743e86998 | 17,826 | exs | Elixir | test/positioner/changeset_test.exs | mentero/Positioner | c1771b064a35805bf495b9974459c67f45764543 | [
"MIT"
] | 1 | 2021-07-26T11:47:56.000Z | 2021-07-26T11:47:56.000Z | test/positioner/changeset_test.exs | mentero/Positioner | c1771b064a35805bf495b9974459c67f45764543 | [
"MIT"
] | null | null | null | test/positioner/changeset_test.exs | mentero/Positioner | c1771b064a35805bf495b9974459c67f45764543 | [
"MIT"
] | null | null | null | defmodule Positioner.ChangesetTest do
use Positioner.TestCase
import Positioner.TestHelpers
alias Positioner.Repo
describe "Inserting new record" do
test "at first position if there are no other records" do
tenant = insert_tenant!()
assert %{id: dummy_1, position: 1} =
%Dummy{}
|> Dummy.create_changeset(tenant, %{})
|> Repo.insert!()
assert [%{id: ^dummy_1, position: 1}] = all_dummies!()
end
test "at the end if there are multiple records" do
tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(position: 1, tenant: tenant)
%{id: dummy_2} = insert_dummy!(position: 2, tenant: tenant)
%{id: dummy_3} = insert_dummy!(position: 3, tenant: tenant)
assert %{id: dummy_4, position: 4} =
%Dummy{}
|> Dummy.create_changeset(tenant, %{})
|> Repo.insert!()
assert [
%{id: ^dummy_1, position: 1},
%{id: ^dummy_2, position: 2},
%{id: ^dummy_3, position: 3},
%{id: ^dummy_4, position: 4}
] = all_dummies!()
end
test "in between other records" do
tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(position: 1, tenant: tenant)
%{id: dummy_2} = insert_dummy!(position: 2, tenant: tenant)
%{id: dummy_3} = insert_dummy!(position: 3, tenant: tenant)
assert %{id: dummy_4, position: 2} =
%Dummy{}
|> Dummy.create_changeset(tenant, %{"position" => 2})
|> Repo.insert!()
assert [
%{id: ^dummy_1, position: 1},
%{id: ^dummy_4, position: 2},
%{id: ^dummy_2, position: 3},
%{id: ^dummy_3, position: 4}
] = all_dummies!()
end
test "at the position far above the scope" do
tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(position: 1, tenant: tenant)
%{id: dummy_2} = insert_dummy!(position: 2, tenant: tenant)
%{id: dummy_3} = insert_dummy!(position: 3, tenant: tenant)
assert %{id: dummy_4, position: 4} =
%Dummy{}
|> Dummy.create_changeset(tenant, %{"position" => 100})
|> Repo.insert!()
assert [
%{id: ^dummy_1, position: 1},
%{id: ^dummy_2, position: 2},
%{id: ^dummy_3, position: 3},
%{id: ^dummy_4, position: 4}
] = all_dummies!()
end
test "respects the scope" do
tenant = insert_tenant!()
another_tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(position: 1, tenant: tenant)
%{id: dummy_2} = insert_dummy!(position: 2, tenant: tenant)
%{id: dummy_3} = insert_dummy!(position: 3, tenant: tenant)
%{id: another_dummy_1} = insert_dummy!(position: 1, tenant: another_tenant)
%{id: another_dummy_2} = insert_dummy!(position: 2, tenant: another_tenant)
%{id: another_dummy_3} = insert_dummy!(position: 3, tenant: another_tenant)
assert %{id: dummy_4, position: 2} =
%Dummy{}
|> Dummy.create_changeset(tenant, %{"position" => 2})
|> Repo.insert!()
assert [
%{id: ^dummy_1, position: 1},
%{id: ^dummy_4, position: 2},
%{id: ^dummy_2, position: 3},
%{id: ^dummy_3, position: 4},
%{id: ^another_dummy_1, position: 1},
%{id: ^another_dummy_2, position: 2},
%{id: ^another_dummy_3, position: 3}
] = all_dummies!()
end
test "some records have nil position (don't know, but hey! programming!)" do
tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(position: 1, tenant: tenant)
%{id: dummy_2} = insert_dummy!(position: nil, tenant: tenant)
%{id: dummy_3} = insert_dummy!(position: nil, tenant: tenant)
assert %{id: dummy_4, position: 2} =
%Dummy{}
|> Dummy.create_changeset(tenant, %{"position" => 2})
|> Repo.insert!()
assert [
%{id: ^dummy_1, position: 1},
%{id: ^dummy_4, position: 2},
%{id: ^dummy_2, position: 3},
%{id: ^dummy_3, position: 4}
] = all_dummies!()
end
end
describe "Updating record position" do
test "has no effect if it didn't change" do
tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(title: "1", position: 1, tenant: tenant)
%{id: dummy_2} = insert_dummy!(title: "2", position: 2, tenant: tenant)
%{id: dummy_3} = subject = insert_dummy!(title: "3", position: 3, tenant: tenant)
%{id: dummy_4} = insert_dummy!(title: "4", position: 4, tenant: tenant)
assert %{id: ^dummy_3, title: "subject", position: 3} =
subject
|> Dummy.update_changeset(tenant, %{"title" => "subject"})
|> Repo.update!()
assert [
%{id: ^dummy_1, title: "1", position: 1},
%{id: ^dummy_2, title: "2", position: 2},
%{id: ^dummy_3, title: "subject", position: 3},
%{id: ^dummy_4, title: "4", position: 4}
] = all_dummies!()
end
test "to a smaller one" do
tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(title: "1", position: 1, tenant: tenant)
%{id: dummy_2} = insert_dummy!(title: "2", position: 2, tenant: tenant)
%{id: dummy_3} = subject = insert_dummy!(title: "3", position: 3, tenant: tenant)
%{id: dummy_4} = insert_dummy!(title: "4", position: 4, tenant: tenant)
assert %{id: ^dummy_3, title: "subject", position: 2} =
subject
|> Dummy.update_changeset(tenant, %{"title" => "subject", "position" => 2})
|> Repo.update!()
assert [
%{id: ^dummy_1, title: "1", position: 1},
%{id: ^dummy_3, title: "subject", position: 2},
%{id: ^dummy_2, title: "2", position: 3},
%{id: ^dummy_4, title: "4", position: 4}
] = all_dummies!()
end
test "to a bigger one" do
tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(title: "1", position: 1, tenant: tenant)
%{id: dummy_2} = subject = insert_dummy!(title: "2", position: 2, tenant: tenant)
%{id: dummy_3} = insert_dummy!(title: "3", position: 3, tenant: tenant)
%{id: dummy_4} = insert_dummy!(title: "4", position: 4, tenant: tenant)
assert %{id: ^dummy_2, title: "subject", position: 3} =
subject
|> Dummy.update_changeset(tenant, %{"title" => "subject", "position" => 3})
|> Repo.update!()
assert [
%{id: ^dummy_1, title: "1", position: 1},
%{id: ^dummy_3, title: "3", position: 2},
%{id: ^dummy_2, title: "subject", position: 3},
%{id: ^dummy_4, title: "4", position: 4}
] = all_dummies!()
end
test "to a position way ahead of the collection" do
tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(title: "1", position: 1, tenant: tenant)
%{id: dummy_2} = subject = insert_dummy!(title: "2", position: 2, tenant: tenant)
%{id: dummy_3} = insert_dummy!(title: "3", position: 3, tenant: tenant)
%{id: dummy_4} = insert_dummy!(title: "4", position: 4, tenant: tenant)
assert %{id: ^dummy_2, title: "subject", position: 4} =
subject
|> Dummy.update_changeset(tenant, %{"title" => "subject", "position" => 100})
|> Repo.update!()
assert [
%{id: ^dummy_1, title: "1", position: 1},
%{id: ^dummy_3, title: "3", position: 2},
%{id: ^dummy_4, title: "4", position: 3},
%{id: ^dummy_2, title: "subject", position: 4}
] = all_dummies!()
end
test "reorders both scopes if scope changed" do
%{id: tenant_id} = tenant = insert_tenant!()
%{id: another_tenant_id} = another_tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(title: "1", position: 1, tenant: tenant)
%{id: dummy_2} = subject = insert_dummy!(title: "2", position: 2, tenant: tenant)
%{id: dummy_3} = insert_dummy!(title: "3", position: 3, tenant: tenant)
%{id: another_dummy_1} = insert_dummy!(title: "1", position: 1, tenant: another_tenant)
%{id: another_dummy_2} = insert_dummy!(title: "2", position: 2, tenant: another_tenant)
%{id: another_dummy_3} = insert_dummy!(title: "3", position: 3, tenant: another_tenant)
assert %{id: ^dummy_2, title: "subject", position: 2, tenant_id: ^another_tenant_id} =
subject
|> Dummy.update_changeset(tenant, %{
"title" => "subject",
"position" => 2,
"tenant_id" => another_tenant_id
})
|> Repo.update!()
assert [
%{id: ^dummy_1, title: "1", position: 1, tenant_id: ^tenant_id},
%{id: ^dummy_3, title: "3", position: 2, tenant_id: ^tenant_id},
%{id: ^another_dummy_1, title: "1", position: 1, tenant_id: ^another_tenant_id},
%{id: ^dummy_2, title: "subject", position: 2, tenant_id: ^another_tenant_id},
%{id: ^another_dummy_2, title: "2", position: 3, tenant_id: ^another_tenant_id},
%{id: ^another_dummy_3, title: "3", position: 4, tenant_id: ^another_tenant_id}
] = all_dummies!()
end
test "puts at the end of new scope if position specified" do
%{id: tenant_id} = tenant = insert_tenant!()
%{id: another_tenant_id} = another_tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(title: "1", position: 1, tenant: tenant)
%{id: dummy_2} = subject = insert_dummy!(title: "2", position: 2, tenant: tenant)
%{id: dummy_3} = insert_dummy!(title: "3", position: 3, tenant: tenant)
%{id: another_dummy_1} = insert_dummy!(title: "1", position: 1, tenant: another_tenant)
%{id: another_dummy_2} = insert_dummy!(title: "2", position: 2, tenant: another_tenant)
%{id: another_dummy_3} = insert_dummy!(title: "3", position: 3, tenant: another_tenant)
assert %{id: ^dummy_2, title: "subject", position: 4, tenant_id: ^another_tenant_id} =
subject
|> Dummy.update_changeset(tenant, %{
"title" => "subject",
"tenant_id" => another_tenant_id
})
|> Repo.update!()
assert [
%{id: ^dummy_1, title: "1", position: 1, tenant_id: ^tenant_id},
%{id: ^dummy_3, title: "3", position: 2, tenant_id: ^tenant_id},
%{id: ^another_dummy_1, title: "1", position: 1, tenant_id: ^another_tenant_id},
%{id: ^another_dummy_2, title: "2", position: 2, tenant_id: ^another_tenant_id},
%{id: ^another_dummy_3, title: "3", position: 3, tenant_id: ^another_tenant_id},
%{id: ^dummy_2, title: "subject", position: 4, tenant_id: ^another_tenant_id}
] = all_dummies!()
end
test "updated programatically" do
tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(title: "1", position: 1, tenant: tenant)
%{id: dummy_2} = subject = insert_dummy!(title: "2", position: 2, tenant: tenant)
%{id: dummy_3} = insert_dummy!(title: "3", position: 3, tenant: tenant)
%{id: dummy_4} = insert_dummy!(title: "4", position: 4, tenant: tenant)
assert %{id: ^dummy_2, position: 3} =
subject
|> Ecto.Changeset.change(position: 3)
|> Positioner.Changeset.set_order(:position, [:tenant_id])
|> Repo.update!()
assert [
%{id: ^dummy_1, title: "1", position: 1},
%{id: ^dummy_3, title: "3", position: 2},
%{id: ^dummy_2, title: "2", position: 3},
%{id: ^dummy_4, title: "4", position: 4}
] = all_dummies!()
end
test "params are not strings" do
%{id: tenant_id} = tenant = insert_tenant!()
%{id: another_tenant_id} = another_tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(title: "1", position: 1, tenant: tenant)
%{id: dummy_2} = subject = insert_dummy!(title: "2", position: 2, tenant: tenant)
%{id: dummy_3} = insert_dummy!(title: "3", position: 3, tenant: tenant)
%{id: another_dummy_1} = insert_dummy!(title: "1", position: 1, tenant: another_tenant)
%{id: another_dummy_2} = insert_dummy!(title: "2", position: 2, tenant: another_tenant)
%{id: another_dummy_3} = insert_dummy!(title: "3", position: 3, tenant: another_tenant)
assert %{id: ^dummy_2, title: "subject", position: 2, tenant_id: ^another_tenant_id} =
subject
|> Dummy.update_changeset(tenant, %{
title: "subject",
position: 2,
tenant_id: another_tenant_id
})
|> Repo.update!()
assert [
%{id: ^dummy_1, title: "1", position: 1, tenant_id: ^tenant_id},
%{id: ^dummy_3, title: "3", position: 2, tenant_id: ^tenant_id},
%{id: ^another_dummy_1, title: "1", position: 1, tenant_id: ^another_tenant_id},
%{id: ^dummy_2, title: "subject", position: 2, tenant_id: ^another_tenant_id},
%{id: ^another_dummy_2, title: "2", position: 3, tenant_id: ^another_tenant_id},
%{id: ^another_dummy_3, title: "3", position: 4, tenant_id: ^another_tenant_id}
] = all_dummies!()
end
test "requested position is the same as current one" do
%{id: tenant_id} = tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(title: "1", position: 1, tenant: tenant)
%{id: dummy_2} = subject = insert_dummy!(title: "2", position: 2, tenant: tenant)
%{id: dummy_3} = insert_dummy!(title: "3", position: 3, tenant: tenant)
assert %{id: ^dummy_2, title: "subject", position: 2, tenant_id: ^tenant_id} =
subject
|> Dummy.update_changeset(tenant, %{
title: "subject",
position: 2,
tenant_id: tenant_id
})
|> Repo.update!()
assert [
%{id: ^dummy_1, title: "1", position: 1, tenant_id: ^tenant_id},
%{id: ^dummy_2, title: "subject", position: 2, tenant_id: ^tenant_id},
%{id: ^dummy_3, title: "3", position: 3, tenant_id: ^tenant_id}
] = all_dummies!()
end
test "requested to be put at the end of the scope" do
%{id: tenant_id} = tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(title: "1", position: 1, tenant: tenant)
%{id: dummy_2} = subject = insert_dummy!(title: "2", position: 2, tenant: tenant)
%{id: dummy_3} = insert_dummy!(title: "3", position: 3, tenant: tenant)
assert %{id: ^dummy_2, title: "subject", position: 3, tenant_id: ^tenant_id} =
subject
|> Dummy.update_changeset(tenant, %{
title: "subject",
position: nil,
tenant_id: tenant_id
})
|> Repo.update!()
assert [
%{id: ^dummy_1, title: "1", position: 1, tenant_id: ^tenant_id},
%{id: ^dummy_3, title: "3", position: 2, tenant_id: ^tenant_id},
%{id: ^dummy_2, title: "subject", position: 3, tenant_id: ^tenant_id}
] = all_dummies!()
end
end
describe "Deleting record" do
test "squeezes other records together" do
tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(title: "1", position: 1, tenant: tenant)
%{id: dummy_2} = insert_dummy!(title: "2", position: 2, tenant: tenant)
%{id: dummy_3} = subject = insert_dummy!(title: "3", position: 3, tenant: tenant)
%{id: dummy_4} = insert_dummy!(title: "4", position: 4, tenant: tenant)
assert %{id: ^dummy_3, title: "3", position: 3} =
subject
|> Dummy.delete_changeset()
|> Repo.delete!()
assert [
%{id: ^dummy_1, title: "1", position: 1},
%{id: ^dummy_2, title: "2", position: 2},
%{id: ^dummy_4, title: "4", position: 3}
] = all_dummies!()
end
test "doesn't affect other scopes" do
tenant = insert_tenant!()
another_tenant = insert_tenant!()
%{id: dummy_1} = insert_dummy!(title: "1", position: 1, tenant: tenant)
%{id: dummy_2} = insert_dummy!(title: "2", position: 2, tenant: tenant)
%{id: dummy_3} = subject = insert_dummy!(title: "3", position: 3, tenant: tenant)
%{id: dummy_4} = insert_dummy!(title: "4", position: 4, tenant: tenant)
%{id: another_dummy_1} = insert_dummy!(title: "1", position: 1, tenant: another_tenant)
%{id: another_dummy_2} = insert_dummy!(title: "2", position: 2, tenant: another_tenant)
%{id: another_dummy_3} = insert_dummy!(title: "3", position: 3, tenant: another_tenant)
assert %{id: ^dummy_3, title: "3", position: 3} =
subject
|> Dummy.delete_changeset()
|> Repo.delete!()
assert [
%{id: ^dummy_1, title: "1", position: 1},
%{id: ^dummy_2, title: "2", position: 2},
%{id: ^dummy_4, title: "4", position: 3},
%{id: ^another_dummy_1, title: "1", position: 1},
%{id: ^another_dummy_2, title: "2", position: 2},
%{id: ^another_dummy_3, title: "3", position: 3}
] = all_dummies!()
end
end
end
| 41.943529 | 95 | 0.544261 |
08d8588e444c33e5d7ef9decc1bd5b9b0b3c22ed | 1,884 | exs | Elixir | clients/data_catalog/mix.exs | EVLedger/elixir-google-api | 61edef19a5e2c7c63848f7030c6d8d651e4593d4 | [
"Apache-2.0"
] | null | null | null | clients/data_catalog/mix.exs | EVLedger/elixir-google-api | 61edef19a5e2c7c63848f7030c6d8d651e4593d4 | [
"Apache-2.0"
] | null | null | null | clients/data_catalog/mix.exs | EVLedger/elixir-google-api | 61edef19a5e2c7c63848f7030c6d8d651e4593d4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DataCatalog.Mixfile do
use Mix.Project
@version "0.12.2"
def project() do
[
app: :google_api_data_catalog,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/data_catalog"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.2"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Google Cloud Data Catalog API client library. A fully managed and highly scalable data discovery and metadata management service.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/data_catalog",
"Homepage" => "https://cloud.google.com/data-catalog/docs/"
}
]
end
end
| 27.705882 | 133 | 0.660297 |
08d869245a4effe0e30bd72a1565245990af596b | 3,236 | ex | Elixir | lib/cadet_web/admin_controllers/admin_stories_controller.ex | source-academy/cadet | c447552453f78799755de73f66999e4c9d20383c | [
"Apache-2.0"
] | 27 | 2018-01-20T05:56:24.000Z | 2021-05-24T03:21:55.000Z | lib/cadet_web/admin_controllers/admin_stories_controller.ex | source-academy/cadet | c447552453f78799755de73f66999e4c9d20383c | [
"Apache-2.0"
] | 731 | 2018-04-16T13:25:49.000Z | 2021-06-22T07:16:12.000Z | lib/cadet_web/admin_controllers/admin_stories_controller.ex | source-academy/cadet | c447552453f78799755de73f66999e4c9d20383c | [
"Apache-2.0"
] | 43 | 2018-01-20T06:35:46.000Z | 2021-05-05T03:22:35.000Z | defmodule CadetWeb.AdminStoriesController do
use CadetWeb, :controller
use PhoenixSwagger
alias Cadet.Stories.Stories
def create(conn, %{"course_id" => course_id, "story" => story}) do
result =
story
|> to_snake_case_atom_keys()
|> Stories.create_story(course_id |> String.to_integer())
case result do
{:ok, _story} ->
conn |> put_status(200) |> text('')
{:error, {status, message}} ->
conn
|> put_status(status)
|> text(message)
end
end
def update(conn, _params = %{"course_id" => course_id, "storyid" => id, "story" => story}) do
result =
story
|> to_snake_case_atom_keys()
|> Stories.update_story(id, course_id |> String.to_integer())
case result do
{:ok, _story} ->
conn |> put_status(200) |> text('')
{:error, {status, message}} ->
conn
|> put_status(status)
|> text(message)
end
end
def delete(conn, _params = %{"course_id" => course_id, "storyid" => id}) do
result = Stories.delete_story(id, course_id |> String.to_integer())
case result do
{:ok, _nil} ->
conn |> put_status(204) |> text('')
{:error, {status, message}} ->
conn
|> put_status(status)
|> text(message)
end
end
swagger_path :create do
post("/v2{course_id}/stories")
summary("Creates a new story")
security([%{JWT: []}])
response(200, "OK", :Story)
response(400, "Bad request")
response(403, "User not allowed to manage stories")
end
swagger_path :delete do
PhoenixSwagger.Path.delete("/v2/courses/{course_id}/stories/{storyId}")
summary("Delete a story from database by id")
parameters do
storyId(:path, :integer, "Story Id", required: true)
end
security([%{JWT: []}])
response(204, "OK")
response(403, "User not allowed to manage stories or stories from another course")
response(404, "Story not found")
end
swagger_path :update do
post("/v2/courses/{course_id}/stories/{storyId}")
summary("Update details regarding a story")
parameters do
storyId(:path, :integer, "Story Id", required: true)
end
security([%{JWT: []}])
produces("application/json")
response(200, "OK", :Story)
response(403, "User not allowed to manage stories or stories from another course")
response(404, "Story not found")
end
@spec swagger_definitions :: %{Story: any}
def swagger_definitions do
%{
Story:
swagger_schema do
properties do
filenames(schema_array(:string), "Filenames of txt files", required: true)
title(:string, "Title shown in Chapter Select Screen", required: true)
imageUrl(:string, "Path to image shown in Chapter Select Screen", required: false)
openAt(:string, "The opening date", format: "date-time", required: true)
closeAt(:string, "The closing date", format: "date-time", required: true)
isPublished(:boolean, "Whether or not is published", required: false)
course_id(:integer, "The id of the course that this story belongs to", required: true)
end
end
}
end
end
| 27.193277 | 98 | 0.610939 |
08d899a6fb8f1f4d2b3c5b08364c45050a6a7c92 | 61 | ex | Elixir | projects/standup/lib/standup_web/views/user_view.ex | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | 1 | 2020-02-11T06:00:11.000Z | 2020-02-11T06:00:11.000Z | projects/standup/lib/standup_web/views/user_view.ex | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | 1 | 2017-09-23T19:41:29.000Z | 2017-09-25T05:12:38.000Z | projects/standup/lib/standup_web/views/user_view.ex | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | null | null | null | defmodule StandupWeb.UserView do
use StandupWeb, :view
end
| 15.25 | 32 | 0.803279 |
08d8b2e14776cd35117b688a1dca8b4b847003d6 | 1,061 | exs | Elixir | test/elixir_examples/basic_types_test.exs | raniemi/elixir_examples | 0eb957ed57bf1de4c2f6f2cc62825c6b315dfaf4 | [
"Apache-2.0"
] | 1 | 2015-11-30T19:28:39.000Z | 2015-11-30T19:28:39.000Z | test/elixir_examples/basic_types_test.exs | raniemi/elixir_examples | 0eb957ed57bf1de4c2f6f2cc62825c6b315dfaf4 | [
"Apache-2.0"
] | null | null | null | test/elixir_examples/basic_types_test.exs | raniemi/elixir_examples | 0eb957ed57bf1de4c2f6f2cc62825c6b315dfaf4 | [
"Apache-2.0"
] | null | null | null | defmodule ElixirExamples.BasicTypesTest do
use ExUnit.Case, async: true
test "atoms" do
my_atom = :symbol
assert :symbol == my_atom
end
test "nil" do
assert nil == :nil
end
test "booleans" do
assert :true == true
assert false == :false
assert not (true and false)
assert false or true
assert not false
assert :not_true_or_false
assert_raise ArgumentError, fn ->
assert not :not_true_or_false
end
assert nil == :nil
assert nil != :false
assert (nil || false) == :false
assert (nil || false || true) == :true
assert (true && 9) == 9
assert (false && 9) == :false
assert (nil && 9) == :nil
end
test "integers" do
my_integer = 100
assert 100 == my_integer
assert 1000 == 1_000
end
test "floats" do
assert 2.718281828e0 == 2_718.281_828e-3
end
test "ranges" do
my_range = 1..10
assert 1..10 == my_range
assert 5 in my_range
assert !25 in my_range
assert [1, 2, 3] == Enum.filter(1..3, fn(x) -> true end)
end
end
| 18.293103 | 60 | 0.604147 |
08d8b7efeed940750c2b29e1827e13aef29572e1 | 545 | exs | Elixir | test/juvet/endpoint_router_test.exs | juvet/juvet | 5590ff7b1e5f411195d0becfe8b5740deb977cc5 | [
"MIT"
] | 19 | 2018-07-14T16:54:11.000Z | 2022-03-01T09:02:19.000Z | test/juvet/endpoint_router_test.exs | juvet/juvet | 5590ff7b1e5f411195d0becfe8b5740deb977cc5 | [
"MIT"
] | 31 | 2018-06-29T15:30:40.000Z | 2022-02-26T01:07:12.000Z | test/juvet/endpoint_router_test.exs | juvet/juvet | 5590ff7b1e5f411195d0becfe8b5740deb977cc5 | [
"MIT"
] | null | null | null | defmodule Juvet.EndpointTest do
use ExUnit.Case, async: true
use Plug.Test
describe "POST /slack/events" do
test "responds with a 200 status" do
conn = request!(:post, "/slack/events")
assert conn.status == 200
end
end
describe "GET /slack/blah" do
test "responds with a 404 status" do
conn = request!(:post, "/slack/blah")
assert conn.status == 404
end
end
defp request!(method, path) do
conn(method, path)
|> Juvet.EndpointRouter.call(Juvet.EndpointRouter.init([]))
end
end
| 20.961538 | 63 | 0.647706 |
08d8cb88448ff083cd901483977750136fade5ee | 919 | exs | Elixir | config/config.exs | cxz/pepper | 4efd817ed4d8435800210ef109916c4e42df4e80 | [
"MIT"
] | null | null | null | config/config.exs | cxz/pepper | 4efd817ed4d8435800210ef109916c4e42df4e80 | [
"MIT"
] | null | null | null | config/config.exs | cxz/pepper | 4efd817ed4d8435800210ef109916c4e42df4e80 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :pepper,
ecto_repos: [Pepper.Repo]
# Configures the endpoint
config :pepper, Pepper.Endpoint,
url: [host: "localhost"],
secret_key_base: "t5QsenseQs43eytI6CcvrGd4jNVjgETZ1iHexkpFwYrEyF/WFlNZiK1Iq4xtKoIT",
render_errors: [view: Pepper.ErrorView, accepts: ~w(json)],
pubsub: [name: Pepper.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 32.821429 | 86 | 0.760609 |
08d8d134a0769ff26686a6344172b3c87d397604 | 273 | ex | Elixir | elm_elixir/lib/poison_encoder.ex | leadbrain/tutorial | 97ff8fa72829080b79e5ae32541e04897f31326d | [
"MIT"
] | null | null | null | elm_elixir/lib/poison_encoder.ex | leadbrain/tutorial | 97ff8fa72829080b79e5ae32541e04897f31326d | [
"MIT"
] | null | null | null | elm_elixir/lib/poison_encoder.ex | leadbrain/tutorial | 97ff8fa72829080b79e5ae32541e04897f31326d | [
"MIT"
] | null | null | null | defimpl Poison.Encoder, for: Any do
def encode(%{__struct__: _} = struct, options) do
struct
|> Map.from_struct
|> sanitize_map
|> Poison.Encoder.Map.encode(options)
end
defp sanitize_map(map) do
Map.drop(map, [:__meta__, :__struct__])
end
end
| 21 | 51 | 0.666667 |
08d912c2d9776e269b5b2284bba0bc25ecc1df86 | 52 | exs | Elixir | config/test.exs | primait/ex_fuzzywuzzy | 713661a578b45fecd14cc8af868385bf42443471 | [
"MIT"
] | null | null | null | config/test.exs | primait/ex_fuzzywuzzy | 713661a578b45fecd14cc8af868385bf42443471 | [
"MIT"
] | null | null | null | config/test.exs | primait/ex_fuzzywuzzy | 713661a578b45fecd14cc8af868385bf42443471 | [
"MIT"
] | 1 | 2021-04-10T15:12:12.000Z | 2021-04-10T15:12:12.000Z | import Config
config :ex_fuzzywuzzy, :precision, 2
| 13 | 36 | 0.788462 |
08d92ccfd64c60507e48b1064628466986ac9ef7 | 571 | exs | Elixir | config/test.exs | phoenixframework/plds | 820600e8da6e13f376f8341cb78868bc189ddad8 | [
"MIT"
] | 60 | 2021-09-13T21:53:34.000Z | 2022-03-09T14:31:36.000Z | config/test.exs | phoenixframework/plds | 820600e8da6e13f376f8341cb78868bc189ddad8 | [
"MIT"
] | 2 | 2021-09-23T17:13:40.000Z | 2021-11-16T15:57:05.000Z | config/test.exs | phoenixframework/plds | 820600e8da6e13f376f8341cb78868bc189ddad8 | [
"MIT"
] | 2 | 2021-11-16T10:37:42.000Z | 2022-02-18T19:32:38.000Z | import Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :plds, PLDSWeb.Endpoint,
http: [ip: {127, 0, 0, 1}, port: 4002],
secret_key_base: "4uFwNlOnyppVcDWeVcNUgGPOYQD+y7F4mTkconBPSqAleqXvd2wmmcLCqXJanfon",
server: false
config :plds,
cookie: :"my-plds-test-cookie",
ensure_distribution?: false,
halt_on_abort: false
# Print only warnings and errors during test
config :logger, level: :warn
# Initialize plugs at runtime for faster test compilation
config :phoenix, :plug_init_mode, :runtime
| 28.55 | 86 | 0.756567 |
08d93582eb99dbc34173662aaaec24f5ef0c5a35 | 1,646 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/add_product_to_product_set_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/add_product_to_product_set_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/add_product_to_product_set_request.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Vision.V1.Model.AddProductToProductSetRequest do
@moduledoc """
Request message for the `AddProductToProductSet` method.
## Attributes
- product (String.t): The resource name for the Product to be added to this ProductSet. Format is: `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:product => any()
}
field(:product)
end
defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.AddProductToProductSetRequest do
def decode(value, options) do
GoogleApi.Vision.V1.Model.AddProductToProductSetRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.AddProductToProductSetRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.291667 | 191 | 0.755164 |
08d940cd558d1443873ccd3548eb6ab7ca71175f | 1,616 | ex | Elixir | test/support/data_case.ex | egjimenezg/absinthe_subscriptions | 49d05d9ce141c9de5d1cfabe21452d2bde8c49d9 | [
"Apache-2.0"
] | null | null | null | test/support/data_case.ex | egjimenezg/absinthe_subscriptions | 49d05d9ce141c9de5d1cfabe21452d2bde8c49d9 | [
"Apache-2.0"
] | null | null | null | test/support/data_case.ex | egjimenezg/absinthe_subscriptions | 49d05d9ce141c9de5d1cfabe21452d2bde8c49d9 | [
"Apache-2.0"
] | null | null | null | defmodule AbsintheSubscriptions.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use AbsintheSubscriptions.DataCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
alias AbsintheSubscriptions.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import AbsintheSubscriptions.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(AbsintheSubscriptions.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(AbsintheSubscriptions.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
| 28.857143 | 83 | 0.704827 |
08d946e2acb70a32f8e74d1748ca2e2827846bf8 | 352 | exs | Elixir | priv/repo/migrations/20201126202439_tear_down_org_membership_on_user_delete.exs | scoopteam/backend | c9efa958f5f536870abd722e2c55a1fb907acff2 | [
"MIT"
] | null | null | null | priv/repo/migrations/20201126202439_tear_down_org_membership_on_user_delete.exs | scoopteam/backend | c9efa958f5f536870abd722e2c55a1fb907acff2 | [
"MIT"
] | null | null | null | priv/repo/migrations/20201126202439_tear_down_org_membership_on_user_delete.exs | scoopteam/backend | c9efa958f5f536870abd722e2c55a1fb907acff2 | [
"MIT"
] | null | null | null | defmodule Scoop.Repo.Migrations.TearDownOrgMembershipOnUserDelete do
use Ecto.Migration
def change do
drop_if_exists constraint(:organisation_memberships, "organisation_memberships_user_id_fkey")
alter table(:organisation_memberships) do
modify :user_id, references(:users, type: :integer, on_delete: :delete_all)
end
end
end
| 29.333333 | 97 | 0.789773 |
08d96d2a31d81102323ecd64721f07e1f30cfba1 | 138 | ex | Elixir | lib/amqplug.ex | getdreams/amqplug | 4acd519c3dc02dc785a25f7e52223cf0bc85c780 | [
"Apache-2.0"
] | null | null | null | lib/amqplug.ex | getdreams/amqplug | 4acd519c3dc02dc785a25f7e52223cf0bc85c780 | [
"Apache-2.0"
] | null | null | null | lib/amqplug.ex | getdreams/amqplug | 4acd519c3dc02dc785a25f7e52223cf0bc85c780 | [
"Apache-2.0"
] | 1 | 2018-09-28T08:13:54.000Z | 2018-09-28T08:13:54.000Z | defmodule Amqplug do
use Application
def start(_type, _args) do
Amqplug.Supervisor.start_link(Amqplug.Config.routes())
end
end
| 17.25 | 58 | 0.753623 |
08d979da812f5689eadbb4d8b26a9f870893d6f9 | 1,023 | exs | Elixir | mix.exs | jesenko/ecto_enum | 9639bb23fb56bbac8a0c71d6824d724f243780fd | [
"MIT"
] | null | null | null | mix.exs | jesenko/ecto_enum | 9639bb23fb56bbac8a0c71d6824d724f243780fd | [
"MIT"
] | null | null | null | mix.exs | jesenko/ecto_enum | 9639bb23fb56bbac8a0c71d6824d724f243780fd | [
"MIT"
] | 1 | 2020-02-06T12:59:54.000Z | 2020-02-06T12:59:54.000Z | defmodule EctoEnum.Mixfile do
use Mix.Project
@version "1.0.1"
def project do
[
app: :ecto_enum,
version: @version,
elixir: "~> 1.2",
deps: deps(),
description: "Ecto extension to support enums in models",
test_paths: test_paths(Mix.env()),
package: package(),
name: "EctoEnum",
docs: [source_ref: "v#{@version}", source_url: "https://github.com/gjaldon/ecto_enum"]
]
end
defp test_paths(_), do: ["test/pg"]
defp package do
[
maintainers: ["Gabriel Jaldon"],
licenses: ["MIT"],
links: %{github: "https://github.com/gjaldon/ecto_enum"},
files: ~w(mix.exs README.md CHANGELOG.md lib)
]
end
def application do
[applications: [:logger, :ecto_sql, :ecto]]
end
defp deps do
[
{:ecto_sql, "~> 3.0"},
{:postgrex, ">= 0.14.0", optional: true},
{:ex_doc, "~> 0.18.0", only: :dev},
{:earmark, "~> 1.1", only: :dev},
{:inch_ex, ">= 0.0.0", only: [:dev, :test]}
]
end
end
| 22.733333 | 92 | 0.55523 |
08d98fd563250c1e0b13b156e7be585536ec9729 | 3,893 | ex | Elixir | lib/ex_admin/paginate.ex | 8thlight/ex_admin | 314d4068270c47799ec54f719073a565222bcfad | [
"MIT"
] | 1 | 2018-08-30T20:20:56.000Z | 2018-08-30T20:20:56.000Z | lib/ex_admin/paginate.ex | 8thlight/ex_admin | 314d4068270c47799ec54f719073a565222bcfad | [
"MIT"
] | null | null | null | lib/ex_admin/paginate.ex | 8thlight/ex_admin | 314d4068270c47799ec54f719073a565222bcfad | [
"MIT"
] | null | null | null | defmodule ExAdmin.Paginate do
@moduledoc false
use Xain
import ExAdmin.Theme.Helpers
import ExAdmin.Gettext
def paginate(_, nil, _, _, _, _, _), do: []
def paginate(link, page_number, page_size, total_pages, record_count, name) do
markup do
theme_module(Paginate).wrap_pagination1 fn ->
if total_pages > 1 do
for item <- items(page_number, page_size, total_pages) do
theme_module(Paginate).build_item link, item
end
end
end
theme_module(Paginate).wrap_pagination2 fn ->
record_number = (page_number - 1) * page_size + 1
display_pagination name, (page_number - 1) * page_size + 1, page_size,
record_count, record_number + page_size - 1
end
end
end
defp display_pagination(name, _record_number, 1, record_count, _) do
pagination_information(name, record_count)
end
defp display_pagination(name, record_number, _page_size, record_count, last_number)
when last_number < record_count do
pagination_information(name, record_number, last_number, record_count)
end
defp display_pagination(name, record_number, _page_size, record_count, _) do
pagination_information(name, record_number, record_count, record_count)
end
def pagination_information(name, record_number, record_number, record_count) do
markup do
text (gettext "Displaying") <> Inflex.singularize(" #{name}") <> " "
b "#{record_number}"
text " " <> (gettext "of") <> " "
b "#{record_count}"
text " " <> (gettext "in total")
end
end
def pagination_information(name, record_number, last, record_count) do
markup do
text (gettext "Displaying %{name}", name: name) <> " "
b "#{record_number} - #{last}"
text " " <> (gettext "of") <> " "
b "#{record_count}"
text " " <> (gettext "in total")
end
end
def pagination_information(name, total) do
markup do
text Gettext.gettext(ExAdmin.Gettext, "Displaying" <> " ")
b (gettext "all %{total}", total: total)
text " #{name}"
end
end
def special_name(:first), do: gettext "« First"
def special_name(:prev), do: gettext "‹ Prev"
def special_name(:next), do: gettext "Next ›"
def special_name(:last), do: gettext "Last »"
def window_size, do: 7
def items(page_number, page_size, total_pages) do
prefix_links(page_number)
|> prefix_gap
|> links(page_number, page_size, total_pages)
|> postfix_gap
|> postfix_links(page_number, total_pages)
end
def prefix_links(1), do: []
def prefix_links(page_number) do
prev = if page_number > 1, do: page_number - 1, else: 1
[first: 1, prev: prev]
end
def prefix_gap(acc) do
acc
end
def postfix_gap(acc), do: acc
def links(acc, page_number, _page_size, total_pages) do
half = Kernel.div window_size(), 2
before = cond do
page_number == 1 -> 0
page_number - half < 1 -> 1
true -> page_number - half
end
aftr = cond do
before + half >= total_pages -> total_pages
page_number + window_size() >= total_pages -> total_pages
true -> page_number + half
end
before_links = if before > 0 do
for x <- before..(page_number - 1), do: {:page, x}
else
[]
end
after_links = if page_number < total_pages do
for x <- (page_number + 1)..aftr, do: {:page, x}
else
[]
end
pregap = if before != 1 and page_number != 1, do: [gap: true], else: []
postgap = if aftr != total_pages and page_number != total_pages, do: [gap: true], else: []
acc ++ pregap ++ before_links ++ [current: page_number] ++ after_links ++ postgap
end
def postfix_links(acc, page_number, total_pages) do
if page_number == total_pages do
acc
else
acc ++ [next: page_number + 1, last: total_pages]
end
end
end
| 30.178295 | 94 | 0.639353 |
08d9c8a3077d198184fcabd73750f1e73eccf7fa | 1,236 | exs | Elixir | config/config.exs | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | null | null | null | config/config.exs | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | null | null | null | config/config.exs | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | 1 | 2019-05-24T18:18:24.000Z | 2019-05-24T18:18:24.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
config :strichliste_elixir,
ecto_repos: [StrichlisteElixir.Repo]
# Configures the endpoint
config :strichliste_elixir, StrichlisteElixirWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "btgftTiyJ1b+EVxbkGRw/sIxiFbzB8m0HqpTPBwFeJesXYBp6khzImkQlW0easIL",
render_errors: [view: StrichlisteElixirWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: StrichlisteElixir.PubSub, adapter: Phoenix.PubSub.PG2],
live_view: [
signing_salt: "TP1bmITU542RA1d7vzkruPkhATh+gLGK"
],
live_reload: [
patterns: [
~r{lib/strichliste_elixir_web/live/.*(ex)$}
]
]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 31.692308 | 86 | 0.760518 |
08d9d3331bde02224a93461b3d58fc8f3fd5a2d1 | 3,638 | ex | Elixir | config/authorization/config.ex | bdevloed/app-digitaal-loket-data-warehouse | 02ff3ed962980f4983099d699e5ef827090a4158 | [
"MIT"
] | null | null | null | config/authorization/config.ex | bdevloed/app-digitaal-loket-data-warehouse | 02ff3ed962980f4983099d699e5ef827090a4158 | [
"MIT"
] | null | null | null | config/authorization/config.ex | bdevloed/app-digitaal-loket-data-warehouse | 02ff3ed962980f4983099d699e5ef827090a4158 | [
"MIT"
] | 1 | 2022-01-24T13:45:03.000Z | 2022-01-24T13:45:03.000Z | alias Acl.Accessibility.Always, as: AlwaysAccessible
alias Acl.GraphSpec.Constraint.Resource, as: ResourceConstraint
alias Acl.GraphSpec, as: GraphSpec
alias Acl.GroupSpec, as: GroupSpec
alias Acl.GroupSpec.GraphCleanup, as: GraphCleanup
defmodule Acl.UserGroups.Config do
def user_groups do
[
# // PUBLIC
%GroupSpec{
name: "public",
useage: [:read],
access: %AlwaysAccessible{},
graphs: [ %GraphSpec{
graph: "http://mu.semte.ch/graphs/public",
constraint: %ResourceConstraint{
resource_types: [
# // PERSONEEL
"http://lblod.data.gift/vocabularies/employee/EmployeeTimePeriod",
"http://lblod.data.gift/vocabularies/employee/UnitMeasure",
"http://lblod.data.gift/vocabularies/employee/EducationalLevel",
"http://lblod.data.gift/vocabularies/employee/WorkingTimeCategory",
"http://lblod.data.gift/vocabularies/employee/LegalStatus",
"http://lblod.data.gift/vocabularies/employee/EmployeeDataset",
"http://lblod.data.gift/vocabularies/employee/EmployeePeriodSlice",
"http://lblod.data.gift/vocabularies/employee/EmployeeObservation",
# // LEIDINGGEVENDEN
"http://schema.org/ContactPoint",
"http://www.w3.org/ns/locn#Address",
"http://data.lblod.info/vocabularies/leidinggevenden/Bestuursfunctie",
"http://data.lblod.info/vocabularies/leidinggevenden/Functionaris",
"http://data.lblod.info/vocabularies/leidinggevenden/FunctionarisStatusCode",
# // MANDATEN
"http://data.vlaanderen.be/ns/mandaat#Mandataris",
"http://www.w3.org/ns/org#Post",
"http://data.vlaanderen.be/ns/mandaat#TijdsgebondenEntiteit",
"http://data.vlaanderen.be/ns/mandaat#Fractie",
"http://data.vlaanderen.be/ns/persoon#Geboorte",
"http://www.w3.org/ns/org#Membership",
"http://data.vlaanderen.be/ns/mandaat#Mandaat",
"http://mu.semte.ch/vocabularies/ext/MandatarisStatusCode",
"http://mu.semte.ch/vocabularies/ext/BeleidsdomeinCode",
"http://www.w3.org/ns/org#Organization",
"http://schema.org/PostalAddress",
"http://www.w3.org/ns/org#Role",
"http://www.w3.org/ns/org#Site",
# // SHARED
"http://data.vlaanderen.be/ns/besluit#Bestuurseenheid",
"http://mu.semte.ch/vocabularies/ext/BestuurseenheidClassificatieCode",
"http://data.vlaanderen.be/ns/besluit#Bestuursorgaan",
"http://mu.semte.ch/vocabularies/ext/BestuursorgaanClassificatieCode",
"http://mu.semte.ch/vocabularies/ext/BestuursfunctieCode",
"http://www.w3.org/ns/person#Person",
"http://www.w3.org/ns/prov#Location",
"http://mu.semte.ch/vocabularies/ext/GeslachtCode"
] } } ] },
# // CLEANUP
#
%GraphCleanup{
originating_graph: "http://mu.semte.ch/application",
useage: [:write],
name: "clean"
}
]
end
end | 50.527778 | 101 | 0.535734 |
08d9db1b0d3f65b2405148810083b6d48665ee2e | 6,337 | ex | Elixir | lib/visualixir/tracer.ex | ryo33/portable-visualixir | 130db30c4a4f15ea1d13a29c31261d9f51e491d9 | [
"MIT"
] | null | null | null | lib/visualixir/tracer.ex | ryo33/portable-visualixir | 130db30c4a4f15ea1d13a29c31261d9f51e491d9 | [
"MIT"
] | null | null | null | lib/visualixir/tracer.ex | ryo33/portable-visualixir | 130db30c4a4f15ea1d13a29c31261d9f51e491d9 | [
"MIT"
] | null | null | null | defmodule Visualixir.Tracer do
use GenServer
alias Visualixir.TraceChannel
require Logger
def start(node) do
Node.spawn_link(node, :gen_server, :start, [{:local, __MODULE__}, __MODULE__, [Node.self], []])
end
def initial_state(node) do
:rpc.call(node, __MODULE__, :initial_state, [])
end
def msg_trace(node, pid_str) do
pid = :rpc.call(node, __MODULE__, :pid_from_string, [pid_str])
if is_pid(pid) do
GenServer.call({__MODULE__, node}, {:trace_msg, true, pid})
else
Logger.warn "#{pid_str} on #{node} isn't a pid, can't trace it."
end
end
def stop_msg_trace_all(node) do
GenServer.call({__MODULE__, node}, :stop_trace_msg_all)
end
#
# ------- Erlang Functions Only Zone -------
#
# Code below can't contain any Elixir-specific functions, since it should be able to run on
# non-Elixir nodes. Sorry that this module is so gnarly, the :lists module loves to put the
# list at the end of the arguments, so it doesn't look like nice Elixir. :(
#
# Maybe in the future, it can be refactored with a pipeline that does the second-argument sugar.
# (or maybe just rpc remote nodes for data and do the collection wrangling on the local node)
#
def init([visualizer_node]) do
:erlang.trace(:all, true, [:procs])
{:ok, visualizer_node}
end
def handle_call({:trace_msg, on_off, pid_spec}, _from, visualizer_node) do
:erlang.trace(pid_spec, on_off, [:send, :receive])
{:reply, :ok, visualizer_node}
end
def handle_call(:stop_trace_msg_all, _from, visualizer_node) do
:lists.foreach(&:erlang.trace(&1, false, [:send, :receive]), :erlang.processes)
{:reply, :ok, visualizer_node}
end
def handle_info({:trace, _spawner_pid, :spawn, pid, _mfa}, visualizer_node) do
:rpc.call(visualizer_node, TraceChannel, :announce_spawn, [:erlang.node, map_pids_to_info([pid])])
{:noreply, visualizer_node}
end
def handle_info({:trace, pid, :exit, reason}, visualizer_node) do
:rpc.call(visualizer_node, TraceChannel, :announce_exit, [:erlang.node, pid_to_binary(pid)])
{:noreply, visualizer_node}
end
def handle_info({:trace, from_pid, :link, to_pid}, visualizer_node) do
link = :lists.map(&pid_to_binary/1, [from_pid, to_pid]) |> :lists.sort
:rpc.call(visualizer_node, TraceChannel, :announce_link, [:erlang.node, link])
{:noreply, visualizer_node}
end
# ignore ports, the gui knows when to unlink them
def handle_info({:trace, from_pid, :unlink, to_pid}, visualizer_node) when is_pid(to_pid) do
link = :lists.map(&pid_to_binary/1, [from_pid, to_pid]) |> :lists.sort
:rpc.call(visualizer_node, TraceChannel, :announce_unlink, [:erlang.node, link])
{:noreply, visualizer_node}
end
def handle_info({:trace, from_pid, :send, msg, to_pid}, visualizer_node) do
:rpc.call(visualizer_node, TraceChannel, :announce_msg, [:erlang.node,
pid_to_binary(from_pid),
pid_to_binary(to_pid),
msg])
{:noreply, visualizer_node}
end
def handle_info(msg, state) do
{:noreply, state}
end
def initial_state do
%{
pids: map_pids_to_info(:erlang.processes),
ports: map_pids_to_info(:erlang.ports),
links: all_links
}
end
def all_links do
:lists.flatmap(fn pid ->
links = case :erlang.process_info(pid, :links) do
{:links, links} -> links
:undefined -> []
end
:lists.map( &:lists.sort([pid_to_binary(pid), pid_to_binary(&1)]), links )
end, :erlang.processes)
|> :lists.usort
end
defp pid_to_binary(pid) when is_pid(pid) do
"#PID" <> (pid |> :erlang.pid_to_list |> :erlang.list_to_binary)
end
defp pid_to_binary(port) when is_port(port) do
port |> :erlang.port_to_list |> :erlang.list_to_binary
end
# the msg tracer seems to give us back the registered name
defp pid_to_binary(atom) when is_atom(atom) do
atom |> :erlang.whereis |> pid_to_binary
end
defp pid_name(pid) when is_pid(pid) do
case :erlang.process_info(pid, :registered_name) do
{:registered_name, name} -> name |> :erlang.atom_to_binary(:utf8)
_ -> nil
end
end
defp pid_name(port) when is_port(port) do
case :erlang.port_info(port, :name) do
{:name, name} -> name |> :erlang.list_to_binary
_ -> nil
end
end
defp application(pid) when is_pid(pid) do
case :application.get_application(pid) do
:undefined -> nil
{_pid, app} -> app
end
end
defp application(port) when is_port(port) do
nil
end
defp process_type(pid) when is_pid(pid) do
case :erlang.process_info(pid, :dictionary) do
:undefined -> :dead
{_, [{_, _}, "$initial_call": {:supervisor, _, _}]} -> :supervisor
_ -> :normal
end
end
defp process_type(port) when is_port(port), do: :port
defp process_being_msg_traced(pid) when is_pid(pid) do
case :erlang.trace_info(pid, :flags) do
{:flags, flags} -> :lists.member(:receive, flags) || :lists.member(:send, flags)
_ -> false
end
end
defp process_being_msg_traced(port) when is_port(port), do: false
defp map_pids_to_info(pids) do
pids = :lists.map(fn pid ->
{pid_to_binary(pid), %{name: pid_name(pid),
type: process_type(pid),
application: application(pid),
msg_traced: process_being_msg_traced(pid)}}
end, pids)
:lists.filter(fn {pid, %{type: type}} -> type != :dead end, pids)
|> :maps.from_list
end
def pid_from_string("#PID" <> string) do
string
|> :erlang.binary_to_list
|> :erlang.list_to_pid
end
def pid_from_string(string) do
string
|> :erlang.binary_to_list
|> :erlang.list_to_atom
|> :erlang.whereis
end
#
# Remote node code (un)loading.
#
def send_module(node) do
{module, binary, file} = :code.get_object_code(__MODULE__)
:rpc.call(node, :code, :load_binary, [module, file, binary])
end
def cleanup(node) do
:rpc.call(node, :code, :delete, [__MODULE__])
:rpc.call(node, :code, :purge, [__MODULE__])
end
end
| 29.891509 | 102 | 0.635159 |
08d9ed42b15a1de9ce755679ba87c9b38f24cbbd | 2,593 | ex | Elixir | lib/cforum/jobs/send_inactivity_notification_mail_job.ex | jrieger/cforum_ex | 61f6ce84708cb55bd0feedf69853dae64146a7a0 | [
"MIT"
] | 16 | 2019-04-04T06:33:33.000Z | 2021-08-16T19:34:31.000Z | lib/cforum/jobs/send_inactivity_notification_mail_job.ex | jrieger/cforum_ex | 61f6ce84708cb55bd0feedf69853dae64146a7a0 | [
"MIT"
] | 294 | 2019-02-10T11:10:27.000Z | 2022-03-30T04:52:53.000Z | lib/cforum/jobs/send_inactivity_notification_mail_job.ex | jrieger/cforum_ex | 61f6ce84708cb55bd0feedf69853dae64146a7a0 | [
"MIT"
] | 10 | 2019-02-10T10:39:24.000Z | 2021-07-06T11:46:05.000Z | defmodule Cforum.Jobs.SendInactivityNotificationMailJob do
use Oban.Worker, queue: :background, max_attempts: 5
import Ecto.Query, warn: false
require Logger
alias Cforum.Repo
alias Cforum.Users.User
alias Cforum.Messages.Message
@limits [
{4, 100},
{3, 75},
{2, 50},
{1, 25}
]
@impl Oban.Worker
def perform(_) do
notify_users_inactive_longer_5years()
Enum.each(@limits, ¬ify_users_inactive_in_limit/1)
end
defp notify_users_inactive_in_limit({years, no_messages}) do
from(user in User,
left_join: message in Message,
on: message.user_id == user.user_id,
on: fragment("?::date >= (NOW() - INTERVAL '5 years')::date", message.created_at),
on: message.deleted == false,
where:
fragment("?::date < (NOW() - INTERVAL '1 year' * ?)::date", user.last_visit, ^years) or
(is_nil(user.last_visit) and
fragment("?::date < (NOW() - INTERVAL '1 year' * ?)::date", user.created_at, ^years)),
group_by: user.user_id,
having: count() < ^no_messages
)
|> send_notification_mails(years)
:ok
end
defp notify_users_inactive_longer_5years do
from(user in User,
where:
fragment("?::date <= NOW()::date - INTERVAL '5 years'", user.last_visit) or
(is_nil(user.last_visit) and fragment("?::date <= NOW()::date - INTERVAL '5 years'", user.created_at))
)
|> send_notification_mails(5)
end
defp send_notification_mails(q, years) do
from(user in q,
where: is_nil(user.inactivity_notification_sent_at),
where: fragment("EXTRACT(DAY FROM ?) = EXTRACT(DAY FROM NOW())", user.created_at),
where: fragment("EXTRACT(MONTH FROM ?) = EXTRACT(MONTH FROM NOW())", user.created_at),
where: not is_nil(user.email),
where: user.email != ""
)
|> Repo.all()
|> Enum.each(¬ify_user(&1, years))
end
defp notify_user(user, years) do
ret =
user
|> CforumWeb.UserMailer.inactivity_mail(years)
|> Cforum.Mailer.deliver()
case ret do
{:ok, _} ->
from(user in User, where: user.user_id == ^user.user_id)
|> Repo.update_all(set: [inactivity_notification_sent_at: Timex.now()])
{:error, error} ->
notify_admins(user, error)
end
rescue
val -> notify_admins(user, val)
catch
val -> notify_admins(user, val)
end
defp notify_admins(user, error) do
admins = Cforum.Users.list_admins()
CforumWeb.NotificationMailer.inactivity_notification_error_mail(user, error, admins)
|> Cforum.Mailer.deliver()
end
end
| 28.811111 | 112 | 0.640571 |
08da0cf8e8ea94f4ac4e1d773386ca8bb419eea0 | 370 | ex | Elixir | test/support/models/user.ex | zillou/ex_machina | 545d1c2532a6124c6dadf8566683c47aef4fe4f2 | [
"MIT"
] | null | null | null | test/support/models/user.ex | zillou/ex_machina | 545d1c2532a6124c6dadf8566683c47aef4fe4f2 | [
"MIT"
] | null | null | null | test/support/models/user.ex | zillou/ex_machina | 545d1c2532a6124c6dadf8566683c47aef4fe4f2 | [
"MIT"
] | null | null | null | defmodule ExMachina.User do
use Ecto.Schema
schema "users" do
field :name, :string
field :admin, :boolean
field :net_worth, :decimal
field :password, :string, virtual: true
has_many :articles, ExMachina.Article, foreign_key: :author_id
has_many :editors, through: [:articles, :editor]
has_one :best_article, ExMachina.Article
end
end
| 24.666667 | 66 | 0.708108 |
08da0dc7f1f030e000a962151f851a18f7a67fc7 | 1,947 | ex | Elixir | apps/teaching_management_web/lib/teaching_management_web.ex | danielscosta/teaching_management | c703374a27174763d5309b9144ba09488eeb95c4 | [
"MIT"
] | 2 | 2020-07-28T14:10:41.000Z | 2020-10-20T20:32:27.000Z | apps/teaching_management_web/lib/teaching_management_web.ex | danielscosta/teaching_management | c703374a27174763d5309b9144ba09488eeb95c4 | [
"MIT"
] | null | null | null | apps/teaching_management_web/lib/teaching_management_web.ex | danielscosta/teaching_management | c703374a27174763d5309b9144ba09488eeb95c4 | [
"MIT"
] | null | null | null | defmodule TeachingManagementWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use TeachingManagementWeb, :controller
use TeachingManagementWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: TeachingManagementWeb
import Plug.Conn
import TeachingManagementWeb.Gettext
alias TeachingManagementWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/teaching_management_web/templates",
namespace: TeachingManagementWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import TeachingManagementWeb.Gettext
end
end
defp view_helpers do
quote do
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import TeachingManagementWeb.ErrorHelpers
import TeachingManagementWeb.Gettext
alias TeachingManagementWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 24.64557 | 76 | 0.709296 |
08da144dc03795acb711ac5fd9ce480bb2071a57 | 369 | exs | Elixir | .formatter.exs | webutil/tod | 0abde2ae2295aee88a40933b66adf9b0c6e5992f | [
"MIT"
] | null | null | null | .formatter.exs | webutil/tod | 0abde2ae2295aee88a40933b66adf9b0c6e5992f | [
"MIT"
] | null | null | null | .formatter.exs | webutil/tod | 0abde2ae2295aee88a40933b66adf9b0c6e5992f | [
"MIT"
] | null | null | null | [
inputs: [
"*.exs",
"config/*.exs",
"lib/**/*.ex",
"priv/**/*.exs",
"scripts/*.exs",
"test/**/*.{ex,exs}"
],
import_deps: [],
locals_without_parens: [
delete: 3,
forward: 2,
get: 3,
pipe_through: 1,
patch: 3,
plug: 1,
plug: 2,
post: 3,
put: 3,
socket: 2,
resources: 2,
resources: 3
]
]
| 14.192308 | 26 | 0.447154 |
08da4b2c29c8fa0f3842325f8f5d960b17825eee | 1,643 | ex | Elixir | debian/manpage.1.ex | timmy00274672/demo_dh_make | 9a241f67131019911d07d41407c3240ff70c241a | [
"RSA-MD"
] | null | null | null | debian/manpage.1.ex | timmy00274672/demo_dh_make | 9a241f67131019911d07d41407c3240ff70c241a | [
"RSA-MD"
] | null | null | null | debian/manpage.1.ex | timmy00274672/demo_dh_make | 9a241f67131019911d07d41407c3240ff70c241a | [
"RSA-MD"
] | null | null | null | .\" Hey, EMACS: -*- nroff -*-
.\" (C) Copyright 2020 bmc <timchen@ingrasys.com>,
.\"
.\" First parameter, NAME, should be all caps
.\" Second parameter, SECTION, should be 1-8, maybe w/ subsection
.\" other parameters are allowed: see man(7), man(1)
.TH Helloworld SECTION "February 14 2020"
.\" Please adjust this date whenever revising the manpage.
.\"
.\" Some roff macros, for reference:
.\" .nh disable hyphenation
.\" .hy enable hyphenation
.\" .ad l left justify
.\" .ad b justify to both left and right margins
.\" .nf disable filling
.\" .fi enable filling
.\" .br insert line break
.\" .sp <n> insert n+1 empty lines
.\" for manpage-specific macros, see man(7)
.SH NAME
helloworld \- program to do something
.SH SYNOPSIS
.B helloworld
.RI [ options ] " files" ...
.br
.B bar
.RI [ options ] " files" ...
.SH DESCRIPTION
This manual page documents briefly the
.B helloworld
and
.B bar
commands.
.PP
.\" TeX users may be more comfortable with the \fB<whatever>\fP and
.\" \fI<whatever>\fP escape sequences to invode bold face and italics,
.\" respectively.
\fBhelloworld\fP is a program that...
.SH OPTIONS
These programs follow the usual GNU command line syntax, with long
options starting with two dashes (`-').
A summary of options is included below.
For a complete description, see the Info files.
.TP
.B \-h, \-\-help
Show summary of options.
.TP
.B \-v, \-\-version
Show version of program.
.SH SEE ALSO
.BR bar (1),
.BR baz (1).
.br
The programs are documented fully by
.IR "The Rise and Fall of a Fooish Bar" ,
available via the Info system.
| 28.824561 | 70 | 0.665855 |
08da4e2202de75649b561682f0180225357eafc2 | 394 | ex | Elixir | web/uploaders/user_image_uploader.ex | kenta-aktsk/media_sample | a0368236bd81c18ac136a54aa7383332a5e3cf36 | [
"MIT"
] | 99 | 2016-04-19T11:11:57.000Z | 2021-12-12T14:38:02.000Z | web/uploaders/user_image_uploader.ex | ogamw/media_sample | a0368236bd81c18ac136a54aa7383332a5e3cf36 | [
"MIT"
] | 48 | 2016-04-06T02:28:46.000Z | 2016-05-31T06:56:56.000Z | web/uploaders/user_image_uploader.ex | ogamw/media_sample | a0368236bd81c18ac136a54aa7383332a5e3cf36 | [
"MIT"
] | 32 | 2016-04-19T11:12:00.000Z | 2021-03-25T18:19:52.000Z | defmodule MediaSample.UserImageUploader do
use MediaSample.BaseUploader, model: :user, field: :image
@versions [:medium, :small]
def transform(:medium, _) do
convert "100x100"
end
def transform(:small, _) do
convert "50x50"
end
defp convert(size) when is_binary(size) do
{:convert, "-thumbnail #{size}^ -gravity center -extent #{size} -format png", :png}
end
end
| 23.176471 | 87 | 0.687817 |
08da8d235991749225667adbe3be9a1380eb40f4 | 497 | ex | Elixir | examples/phoenix_app/lib/phoenix_app_web/views/error_view.ex | kianmeng/bakeware | 3c008a1c149ccf22cb3924126f2b8f0be522191f | [
"Apache-2.0"
] | 828 | 2020-09-12T18:56:48.000Z | 2022-03-01T22:34:39.000Z | examples/phoenix_app/lib/phoenix_app_web/views/error_view.ex | kianmeng/bakeware | 3c008a1c149ccf22cb3924126f2b8f0be522191f | [
"Apache-2.0"
] | 54 | 2020-10-23T15:01:10.000Z | 2022-03-22T23:23:15.000Z | examples/phoenix_app/lib/phoenix_app_web/views/error_view.ex | kianmeng/bakeware | 3c008a1c149ccf22cb3924126f2b8f0be522191f | [
"Apache-2.0"
] | 25 | 2020-09-14T12:01:54.000Z | 2022-02-23T03:30:08.000Z | defmodule PhoenixAppWeb.ErrorView do
use PhoenixAppWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 29.235294 | 61 | 0.738431 |
08da8fc62fa0789f2fca33b19e21ac829f28b3e3 | 1,329 | ex | Elixir | lib/bundlex/toolchain.ex | kianmeng/bundlex | c3477977ab1c46bc87c62fbade9b0a7c13c791be | [
"Apache-2.0"
] | 40 | 2018-06-10T20:55:32.000Z | 2021-12-12T22:08:25.000Z | lib/bundlex/toolchain.ex | kianmeng/bundlex | c3477977ab1c46bc87c62fbade9b0a7c13c791be | [
"Apache-2.0"
] | 40 | 2018-07-18T16:14:52.000Z | 2022-03-27T16:33:09.000Z | lib/bundlex/toolchain.ex | kianmeng/bundlex | c3477977ab1c46bc87c62fbade9b0a7c13c791be | [
"Apache-2.0"
] | 10 | 2018-08-02T06:07:42.000Z | 2022-03-25T15:41:14.000Z | defmodule Bundlex.Toolchain do
@moduledoc false
alias Bundlex.Helper.MixHelper
@doc """
Invokes commands that should be called before whole compilation process
for given platform.
Implementations should call `Output.raise/1` in case of failure which will
cause breaking the compilation process.
In case of success implementations should return list of commands to be
called upon compilation.
Default implentation does nothing.
"""
@callback before_all!(atom) :: [] | [String.t()]
@doc """
Builds list of compiler commands valid for certain toolchain.
"""
@callback compiler_commands(Bundlex.Native.t()) :: [String.t()]
defmacro __using__(_) do
quote location: :keep do
@behaviour unquote(__MODULE__)
alias unquote(__MODULE__)
# Default implementations
@impl unquote(__MODULE__)
def before_all!(_platform), do: []
defoverridable before_all!: 1
end
end
def output_path(app, native_interface) do
interface_str =
case native_interface do
nil -> ""
_ -> "#{native_interface}"
end
MixHelper.get_priv_dir(app) |> Path.join("bundlex") |> Path.join(interface_str)
end
def output_path(app, native_name, native_interface) do
output_path(app, native_interface) |> Path.join("#{native_name}")
end
end
| 25.075472 | 83 | 0.697517 |
08daa529a28de3496932b94f8f32abffc6e88bcd | 10,391 | exs | Elixir | test/lib/code_corps_web/controllers/user_controller_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 275 | 2015-06-23T00:20:51.000Z | 2021-08-19T16:17:37.000Z | test/lib/code_corps_web/controllers/user_controller_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 1,304 | 2015-06-26T02:11:54.000Z | 2019-12-12T21:08:00.000Z | test/lib/code_corps_web/controllers/user_controller_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 140 | 2016-01-01T18:19:47.000Z | 2020-11-22T06:24:47.000Z | defmodule CodeCorpsWeb.UserControllerTest do
@moduledoc false
use CodeCorpsWeb.ApiCase, resource_name: :user
import CodeCorps.GitHub.TestHelpers
alias CodeCorps.{User, Repo}
@valid_attrs %{
email: "test@user.com",
username: "testuser",
first_name: "Test",
last_name: "User",
website: "http://www.example.com",
twitter: "testuser",
biography: "Just a test user"
}
@invalid_attrs %{
email: "",
username: "",
website: "---_<>-blank.com",
twitter: " @ testuser"
}
@relationships %{}
describe "index" do
test "lists all entries on index", %{conn: conn} do
[user_1, user_2] = insert_pair(:user)
conn
|> request_index
|> json_response(200)
|> assert_ids_from_response([user_1.id, user_2.id])
end
test "filters resources on index", %{conn: conn} do
[user_1, user_2 | _] = insert_list(3, :user)
path = "users/?filter[id]=#{user_1.id},#{user_2.id}"
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([user_1.id, user_2.id])
end
test "returns search results on index", %{conn: conn} do
user_1 = insert(:user, first_name: "Joe")
user_2 = insert(:user, username: "joecoder")
user_3 = insert(:user, last_name: "Jacko")
insert(:user, first_name: "Max")
params = %{"query" => "j"}
path = conn |> user_path(:index, params)
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([user_1.id, user_2.id, user_3.id])
end
test "returns search result on project filter", %{conn: conn} do
user_1 = insert(:user)
user_2 = insert(:user)
project = insert(:project)
insert(:project_user, user: user_1, project: project)
insert(:project_user, user: user_2, project: project)
insert(:project_user)
params = %{"project_id" => project.id}
path = conn |> user_path(:index, params)
conn
|> get(path)
|> json_response(200)
|> assert_ids_from_response([user_1.id, user_2.id])
end
test "limit filter limits results on index", %{conn: conn} do
insert_list(6, :user)
params = %{"limit" => 5}
path = conn |> user_path(:index, params)
json = conn |> get(path) |> json_response(200)
returned_users_length = json["data"] |> length
assert returned_users_length == 5
end
end
describe "show" do
test "shows chosen resource", %{conn: conn} do
user = insert(:user)
conn
|> request_show(user)
|> json_response(200)
|> assert_id_from_response(user.id)
end
@tag :authenticated
test "renders email when authenticated", %{conn: conn, current_user: current_user} do
assert conn |> request_show(current_user) |> json_response(200)
end
test "renders 404 when id is nonexistent", %{conn: conn} do
assert conn |> request_show(:not_found) |> json_response(404)
end
end
describe "create" do
test "creates and renders resource when data is valid", %{conn: conn} do
attrs = Map.put(@valid_attrs, :password, "password")
conn = post conn, user_path(conn, :create), %{
"data" => %{
"attributes" => attrs
}
}
assert conn |> json_response(201)
end
test "calls segment tracking after user is created", %{conn: conn} do
conn = post conn, user_path(conn, :create), %{
"meta" => %{},
"data" => %{
"type" => "user",
"attributes" => Map.put(@valid_attrs, :password, "password"),
"relationships" => @relationships
}
}
id = json_response(conn, 201)["data"]["id"] |> String.to_integer
assert_received {:track, ^id, "Signed Up", %{}}
end
test "does not create resource and renders errors when data is invalid", %{conn: conn} do
attrs = Map.put(@invalid_attrs, :password, "password")
conn = post conn, user_path(conn, :create), %{
"data" => %{
"attributes" => attrs
}
}
assert conn |> json_response(422)
end
end
describe "update" do
@tag :authenticated
test "updates and renders chosen resource when data is valid", %{conn: conn} do
user = insert(:user)
attrs = Map.put(@valid_attrs, :password, "password")
params = %{
"meta" => %{},
"data" => %{
"type" => "user",
"id" => user.id,
"attributes" => attrs,
"relationships" => @relationships
}
}
path = user_path(conn, :update, user)
assert conn |> authenticate(user) |> put(path, params) |> json_response(200)
end
test "tracks authentication & update profile events in Segment", %{conn: conn} do
user = insert(:user, email: "original@mail.com")
attrs = Map.put(@valid_attrs, :password, "password")
params = %{
"meta" => %{},
"data" => %{
"type" => "user",
"id" => user.id,
"attributes" => attrs,
"relationships" => @relationships
}
}
path = user_path(conn, :update, user)
conn =
conn
|> authenticate(user)
|> put(path, params)
id = json_response(conn, 200)["data"]["id"] |> String.to_integer
assert_received {:identify, ^id, %{email: "original@mail.com"}}
assert_received {:track, ^id, "Updated Profile", %{}}
end
test "does not update when authorized as different user", %{conn: conn} do
[user, another_user] = insert_pair(:user)
attrs = Map.put(@valid_attrs, :password, "password")
path = user_path(conn, :update, user)
params = %{
"meta" => %{},
"data" => %{
"type" => "user",
"id" => user.id,
"attributes" => attrs,
"relationships" => @relationships
}
}
conn =
conn
|> authenticate(another_user)
|> put(path, params)
assert json_response(conn, 403)
end
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
user = insert(:user)
path = user_path(conn, :update, user)
params = %{
"meta" => %{},
"data" => %{
"type" => "user",
"id" => user.id,
"attributes" => @invalid_attrs,
"relationships" => @relationships
}
}
conn =
conn
|> authenticate(user)
|> put(path, params)
json = json_response(conn, 422)
assert json["errors"] != %{}
end
test "transitions from one state to the next", %{conn: conn} do
user = insert(:user)
conn = put authenticate(conn, user), user_path(conn, :update, user), %{
"data" => %{
"type" => "user",
"id" => user.id,
"attributes" => %{"password" => "password", "state_transition" => "edit_profile"}
}
}
%{"data" => %{"id" => id}} = json_response(conn, 200)
user = Repo.get(User, id)
assert user.state == "edited_profile"
# Transition was successful, so we should unset it
assert user.state_transition == nil
end
end
describe "github_oauth" do
@attrs %{"code" => "foo", "state" => "bar"}
@tag :authenticated
test "return the user when current user connects successfully", %{conn: conn, current_user: current_user} do
path = user_path(conn, :github_oauth)
json = conn |> post(path, @attrs) |> json_response(200)
assert json["data"]["id"] |> String.to_integer == current_user.id
assert json["data"]["attributes"]["github-id"]
end
@tag :authenticated
test "tracks event on segment when current user connects successfully", %{conn: conn, current_user: %{id: id}} do
path = user_path(conn, :github_oauth)
assert conn |> post(path, @attrs) |> json_response(200)
expected_data =
User
|> Repo.get(id)
|> CodeCorps.Analytics.SegmentTraitsBuilder.build
assert_received {:track, ^id, "Connected to GitHub", ^expected_data}
end
test "requires authentication", %{conn: conn} do
path = user_path(conn, :github_oauth)
assert conn |> post(path, @attrs) |> json_response(401)
end
@tag :authenticated
test "renders 500 if there's a GitHub API error", %{conn: conn} do
path = user_path(conn, :github_oauth)
with_mock_api(CodeCorps.GitHub.FailureAPI) do
assert conn |> post(path, @attrs) |> json_response(500)
end
end
end
describe "email_available" do
test "returns valid and available when email is valid and available", %{conn: conn} do
resp = get conn, user_path(conn, :email_available, %{email: "available@mail.com"})
json = json_response(resp, 200)
assert json["available"]
assert json["valid"]
end
test "returns valid but inavailable when email is valid but taken", %{conn: conn} do
insert(:user, email: "used@mail.com")
resp = get conn, user_path(conn, :email_available, %{email: "used@mail.com"})
json = json_response(resp, 200)
refute json["available"]
assert json["valid"]
end
test "returns as available but invalid when email is invalid", %{conn: conn} do
resp = get conn, user_path(conn, :email_available, %{email: "not_an_email"})
json = json_response(resp, 200)
assert json["available"]
refute json["valid"]
end
end
describe "username_available" do
test "returns as valid and available when username is valid and available", %{conn: conn} do
resp = get conn, user_path(conn, :username_available, %{username: "available"})
json = json_response(resp, 200)
assert json["available"]
assert json["valid"]
end
test "returns as valid, but inavailable when username is valid but taken", %{conn: conn} do
insert(:user, username: "used")
resp = get conn, user_path(conn, :username_available, %{username: "used"})
json = json_response(resp, 200)
refute json["available"]
assert json["valid"]
end
test "returns available but invalid when username is invalid", %{conn: conn} do
resp = get conn, user_path(conn, :username_available, %{username: ""})
json = json_response(resp, 200)
assert json["available"]
refute json["valid"]
end
end
end
| 29.353107 | 117 | 0.589645 |
08daae6f6fe87df2f95a84eed09650525e7b36d5 | 553 | ex | Elixir | apps/mishka_html/lib/mishka_html_web/live/components/admin/form/editor_component.ex | mojtaba-naserei/mishka-cms | 1f31f61347bab1aae6ba0d47c5515a61815db6c9 | [
"Apache-2.0"
] | 3 | 2021-06-27T10:26:51.000Z | 2022-01-10T13:56:08.000Z | apps/mishka_html/lib/mishka_html_web/live/components/admin/form/editor_component.ex | iArazar/mishka-cms | 8b579101d607d91e80834527c1508fe5f4ceefef | [
"Apache-2.0"
] | null | null | null | apps/mishka_html/lib/mishka_html_web/live/components/admin/form/editor_component.ex | iArazar/mishka-cms | 8b579101d607d91e80834527c1508fe5f4ceefef | [
"Apache-2.0"
] | null | null | null | defmodule MishkaHtmlWeb.Admin.Form.EditorComponent do
use MishkaHtmlWeb, :live_component
def render(assigns) do
~H"""
<div>
<div id="editor-main-dive" class="col-sm-12 editor-diver vazir rtl" phx-update="ignore">
<div id="editor" phx-hook="Editor" class="bw vazir rtl" phx-update="ignore"></div>
</div>
<div class="form-error-tag vazir" id="editor-tag-error">
<%= error_tag @f, String.to_atom(@form.type) %>
</div>
<div class="space20"></div>
</div>
"""
end
end
| 30.722222 | 96 | 0.59132 |
08dae061b77e0b43543673e84a40bdec0ad61b20 | 178 | ex | Elixir | lib/sutur_web/controllers/auth_controller.ex | ab-zu/sutur | f314ed29b344fbe0139bd87ac01caf577b1d592e | [
"MIT"
] | 1 | 2021-11-16T02:18:31.000Z | 2021-11-16T02:18:31.000Z | lib/sutur_web/controllers/auth_controller.ex | ab-zu/sutur | f314ed29b344fbe0139bd87ac01caf577b1d592e | [
"MIT"
] | null | null | null | lib/sutur_web/controllers/auth_controller.ex | ab-zu/sutur | f314ed29b344fbe0139bd87ac01caf577b1d592e | [
"MIT"
] | null | null | null | defmodule SuturWeb.Controllers.AuthController do
use SuturWeb, :controller
use ShopifexWeb.AuthController
def after_install(conn, shop) do
super(conn, shop)
end
end
| 19.777778 | 48 | 0.775281 |
08db24410a5a22f3f446e6b162c6c30b61c273fc | 1,701 | ex | Elixir | lib/podcatcher/web/endpoint.ex | danjac/podcatcher | 748cf7419aebfff9216e7ff9353a5bdb46d3d7b1 | [
"MIT"
] | null | null | null | lib/podcatcher/web/endpoint.ex | danjac/podcatcher | 748cf7419aebfff9216e7ff9353a5bdb46d3d7b1 | [
"MIT"
] | null | null | null | lib/podcatcher/web/endpoint.ex | danjac/podcatcher | 748cf7419aebfff9216e7ff9353a5bdb46d3d7b1 | [
"MIT"
] | null | null | null | defmodule Podcatcher.Web.Endpoint do
use Phoenix.Endpoint, otp_app: :podcatcher
socket "/socket", Podcatcher.Web.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/", from: :podcatcher, gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
plug Plug.Static, at: "/uploads", from: Path.join(System.cwd(), "uploads")
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_podcatcher_key",
signing_salt: "kEMm0g+R"
plug Podcatcher.Web.Router
@doc """
Dynamically loads configuration from the system environment
on startup.
It receives the endpoint configuration from the config files
and must return the updated configuration.
"""
def load_from_system_env(config) do
port = System.get_env("PORT") || raise "expected the PORT environment variable to be set"
{:ok, Keyword.put(config, :http, [:inet6, port: port])}
end
end
| 29.327586 | 93 | 0.715461 |
08db31a52f9c9fe1026c5dddebae3a674d3a3db3 | 1,147 | exs | Elixir | examples/org_uplink/test/org_uplink_test.exs | elixir-uplink/uplink | 854c7eba810857ffda1361c8cd449068133f2ce4 | [
"MIT"
] | 40 | 2021-01-25T08:21:25.000Z | 2021-08-24T21:47:22.000Z | examples/org_uplink/test/org_uplink_test.exs | elixir-uplink/uplink | 854c7eba810857ffda1361c8cd449068133f2ce4 | [
"MIT"
] | 1 | 2021-02-06T21:38:38.000Z | 2021-02-06T21:38:38.000Z | examples/org_uplink/test/org_uplink_test.exs | elixir-uplink/uplink | 854c7eba810857ffda1361c8cd449068133f2ce4 | [
"MIT"
] | 3 | 2021-01-25T10:24:16.000Z | 2021-02-11T20:34:17.000Z | defmodule OrgUplinkTest do
use ExUnit.Case
doctest OrgUplink
alias Uplink.Monitors
test "works great good" do
_sup_pid =
start_supervised!(
DynamicSupervisor.child_spec(name: MyTestAppRoot.Supervisor, strategy: :one_for_one)
)
spec = {
OrgUplink,
[
monitors: [
{Monitors.Ecto, [repo_prefix: :my_app]},
Monitors.Phoenix
],
poller_specs: [
{10, [{TestModule, :test_emitter, []}]}
],
metric_definitions: [
Telemetry.Metrics.counter("poller.test.event.lasers")
],
prometheus: [port: 9888]
]
}
{:ok, _bb_pid} = DynamicSupervisor.start_child(MyTestAppRoot.Supervisor, spec)
end
test "works great good with defaults" do
_sup_pid =
start_supervised!(
DynamicSupervisor.child_spec(name: MyTestAppRoot.Supervisor, strategy: :one_for_one)
)
{:ok, _bb_pid} = DynamicSupervisor.start_child(MyTestAppRoot.Supervisor, Uplink)
end
defmodule TestModule do
def test_emitter do
:telemetry.execute([:poller, :test, :event], %{lasers: 5})
end
end
end
| 23.895833 | 92 | 0.627724 |
08db58595aaa718e08b9c0a6d6f077530517f1e4 | 524 | ex | Elixir | lib/exfile/ecto/cast_filename.ex | sreecodeslayer/exfile | c88288563d688fb47a6fcae190dbe1b8eb64bf9b | [
"MIT"
] | 100 | 2015-12-25T12:38:41.000Z | 2021-12-31T11:41:20.000Z | lib/exfile/ecto/cast_filename.ex | sreecodeslayer/exfile | c88288563d688fb47a6fcae190dbe1b8eb64bf9b | [
"MIT"
] | 62 | 2015-12-26T01:43:54.000Z | 2019-09-15T16:16:35.000Z | lib/exfile/ecto/cast_filename.ex | sreecodeslayer/exfile | c88288563d688fb47a6fcae190dbe1b8eb64bf9b | [
"MIT"
] | 22 | 2016-04-19T11:54:38.000Z | 2021-09-29T14:48:46.000Z | if Code.ensure_loaded?(Ecto) do
defmodule Exfile.Ecto.CastFilename do
alias Ecto.Changeset
def cast_filename(changeset, field) when is_atom(field) do
cast_filename(changeset, field, String.to_atom("#{field}_filename"))
end
def cast_filename(changeset, field, filename_field) when is_atom(filename_field) do
changeset
|> Changeset.get_change(field)
|> case do
nil -> changeset
file ->
Changeset.put_change(changeset, filename_field, file.meta["filename"])
end
end
end
end
| 24.952381 | 85 | 0.719466 |
08db6e2b55acb54882b975a84b001ef330c30680 | 1,512 | exs | Elixir | mix.exs | kianmeng/ex_check | d4b609f1de554c2998b398adfe796d85a3d6d7c2 | [
"MIT"
] | 225 | 2019-07-21T14:44:17.000Z | 2022-03-31T11:08:07.000Z | mix.exs | kianmeng/ex_check | d4b609f1de554c2998b398adfe796d85a3d6d7c2 | [
"MIT"
] | 23 | 2019-07-30T03:05:42.000Z | 2022-03-06T18:11:50.000Z | mix.exs | kianmeng/ex_check | d4b609f1de554c2998b398adfe796d85a3d6d7c2 | [
"MIT"
] | 9 | 2019-11-23T23:04:39.000Z | 2022-03-29T00:54:34.000Z | defmodule ExCheck.MixProject do
use Mix.Project
@github_url "https://github.com/karolsluszniak/ex_check"
@description "One task to efficiently run all code analysis & testing tools in an Elixir project"
def project do
[
app: :ex_check,
version: "0.14.0",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
description: @description,
deps: deps(),
docs: docs(),
package: package(),
xref: [exclude: [:crypto]],
preferred_cli_env: [
check: :test,
credo: :test,
dialyxir: :test,
doctor: :test,
sobelow: :test
]
]
end
defp elixirc_paths(:test), do: ["test/support", "lib"]
defp elixirc_paths(_), do: ["lib"]
def application do
[
extra_applications: []
]
end
defp deps do
[
{:credo, ">= 0.0.0", only: [:test], runtime: false},
{:doctor, ">= 0.0.0", only: [:test], runtime: false},
{:ex_doc, ">= 0.0.0", only: [:dev, :test], runtime: false},
{:sobelow, ">= 0.0.0", only: [:test], runtime: false}
]
end
defp docs do
[
main: "readme",
logo: "logo.svg",
source_url: @github_url,
extras: ["README.md"]
]
end
defp package do
[
maintainers: ["Karol Słuszniak"],
licenses: ["MIT"],
links: %{
"GitHub repository" => @github_url,
"Changelog" => @github_url <> "/blob/master/CHANGELOG.md"
}
]
end
end
| 22.567164 | 99 | 0.546958 |
08db765beb9152e513b2b47b707f5733e13cdb07 | 1,430 | ex | Elixir | lib/game_of_life/cell.ex | BeyondScheme/elixir-game_of_life | f5a72c79547cd6d7632d90118fb9cb31d80cc66b | [
"MIT"
] | 57 | 2016-04-28T09:02:27.000Z | 2021-12-16T20:03:05.000Z | lib/game_of_life/cell.ex | BeyondScheme/elixir-game_of_life | f5a72c79547cd6d7632d90118fb9cb31d80cc66b | [
"MIT"
] | 1 | 2016-05-30T05:51:10.000Z | 2016-05-31T21:32:44.000Z | lib/game_of_life/cell.ex | BeyondScheme/elixir-game_of_life | f5a72c79547cd6d7632d90118fb9cb31d80cc66b | [
"MIT"
] | 4 | 2016-05-27T18:38:23.000Z | 2019-05-30T11:03:45.000Z | defmodule GameOfLife.Cell do
def keep_alive?(alive_cells, {x, y} = _alive_cell) do
case count_neighbours(alive_cells, x, y, 0) do
2 -> true
3 -> true
_ -> false
end
end
def become_alive?(alive_cells, {x, y} = _dead_cell) do
3 == count_neighbours(alive_cells, x, y, 0)
end
def dead_neighbours(alive_cells) do
neighbours = neighbours(alive_cells, [])
(neighbours |> Enum.uniq) -- alive_cells
end
defp neighbours([{x, y} | cells], neighbours) do
neighbours(cells, neighbours ++ [
{x - 1, y - 1}, {x , y - 1}, {x + 1, y - 1},
{x - 1, y }, {x + 1, y },
{x - 1, y + 1}, {x , y + 1}, {x + 1, y + 1}
])
end
defp neighbours([], neighbours), do: neighbours
defp count_neighbours([head_cell | tail_cells], x, y, count) do
increment = case head_cell do
{hx, hy} when hx == x - 1 and hy == y - 1 -> 1
{hx, hy} when hx == x and hy == y - 1 -> 1
{hx, hy} when hx == x + 1 and hy == y - 1 -> 1
{hx, hy} when hx == x - 1 and hy == y -> 1
{hx, hy} when hx == x + 1 and hy == y -> 1
{hx, hy} when hx == x - 1 and hy == y + 1 -> 1
{hx, hy} when hx == x and hy == y + 1 -> 1
{hx, hy} when hx == x + 1 and hy == y + 1 -> 1
_not_neighbour -> 0
end
count_neighbours(tail_cells, x, y, count + increment)
end
defp count_neighbours([], _x, _y, count), do: count
end
| 29.183673 | 65 | 0.520979 |
08dbb89d5b665f3fc1d544b1ce278a213629215f | 556 | exs | Elixir | orderapi/test/orderapi_web/controllers/order_controller_test.exs | iandeherdt/phoenixshop | cd6e223c676505b75c1340b96908468a5c09fd7c | [
"Apache-2.0"
] | 1 | 2018-03-06T10:32:22.000Z | 2018-03-06T10:32:22.000Z | orderapi/test/orderapi_web/controllers/order_controller_test.exs | iandeherdt/phoenixshop | cd6e223c676505b75c1340b96908468a5c09fd7c | [
"Apache-2.0"
] | null | null | null | orderapi/test/orderapi_web/controllers/order_controller_test.exs | iandeherdt/phoenixshop | cd6e223c676505b75c1340b96908468a5c09fd7c | [
"Apache-2.0"
] | null | null | null | defmodule OrderapiWeb.OrderControllerTest do
use OrderapiWeb.ConnCase
import OrderapiWeb.Factory
test "#index returns a list of orders" do
conn = build_conn()
order = insert(:order)
conn = get conn, order_path(conn, :index)
assert json_response(conn, 200) == %{
"orders" => [%{
"number" => order.number,
"customer" => order.customer,
"inserted_at" => Elixir.NaiveDateTime.to_iso8601(order.inserted_at),
"updated_at" => Elixir.NaiveDateTime.to_iso8601(order.updated_at)
}]
}
end
end | 27.8 | 76 | 0.654676 |
08dbb913e20ec0784629f23cc3156e713f9a8448 | 1,271 | ex | Elixir | apps/omg_child_chain_rpc/lib/omg_child_chain_rpc/application.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_child_chain_rpc/lib/omg_child_chain_rpc/application.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_child_chain_rpc/lib/omg_child_chain_rpc/application.ex | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019-2020 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.ChildChainRPC.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
require Logger
def start(_type, _args) do
_ = Logger.info("Started application #{__MODULE__}")
opts = [strategy: :one_for_one, name: OMG.ChildChainRPC.Supervisor]
children = [{OMG.ChildChainRPC.Web.Endpoint, []}]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
OMG.ChildChainRPC.Web.Endpoint.config_change(changed, removed)
:ok
end
end
| 31.775 | 74 | 0.749017 |
08dbe4111a6b1b0011afb54eccf7dd170adbeee8 | 135 | exs | Elixir | test/mirage_test.exs | s0kil/mirage | 73722bea1cd3be17a842db25599fe51f95d246f7 | [
"Apache-2.0"
] | 1 | 2020-09-15T22:13:19.000Z | 2020-09-15T22:13:19.000Z | test/mirage_test.exs | s0kil/mirage | 73722bea1cd3be17a842db25599fe51f95d246f7 | [
"Apache-2.0"
] | null | null | null | test/mirage_test.exs | s0kil/mirage | 73722bea1cd3be17a842db25599fe51f95d246f7 | [
"Apache-2.0"
] | null | null | null | defmodule MirageTest do
use ExUnit.Case
doctest Mirage
test "greets the world" do
assert Mirage.hello() == :world
end
end
| 15 | 35 | 0.703704 |
08dc114ed8a58a8025b55c680aae6202d0e496dd | 1,680 | ex | Elixir | web/models/user.ex | nsarno/winter | a65a6aa61d2b1af39277338277f8b3f479643939 | [
"MIT"
] | 3 | 2015-08-24T11:44:19.000Z | 2016-10-01T21:37:05.000Z | web/models/user.ex | nsarno/winter | a65a6aa61d2b1af39277338277f8b3f479643939 | [
"MIT"
] | null | null | null | web/models/user.ex | nsarno/winter | a65a6aa61d2b1af39277338277f8b3f479643939 | [
"MIT"
] | null | null | null | defmodule Storm.User do
use Storm.Web, :model
alias Storm.User
alias Storm.Repo
schema "users" do
field :name, :string
field :email, :string
field :password_digest, :string
field :password, :string, virtual: true
has_many :projects, Storm.Project, on_delete: :fetch_and_delete
has_many :projects_missions, through: [:projects, :missions]
timestamps
end
@required_fields ~w(name email password)
@optional_fields ~w()
@doc """
Assert password validity
Returns true if password is valid or else false.
"""
def verify_password %User{password_digest: digest}, pwd do
digest == digest_password(pwd)
end
@doc """
Creates a changeset based on the `model` and `params`.
If no params are provided, an invalid changeset is returned
with no validation performed.
"""
def changeset(model, params \\ :empty) do
model
|> cast(params, @required_fields, @optional_fields)
|> unique_constraint(:email)
|> validate_length(:password, min: 6)
|> validate_password
end
defp validate_password changeset do
case changeset.params["password"] do
pwd when is_binary(pwd) ->
Ecto.Changeset.put_change changeset, :password_digest, digest_password(pwd)
_ ->
Ecto.Changeset.add_error changeset, :password, "invalid"
end
end
@doc """
Hash password to store it securely in database.
Returns an hexadecimal encoded string.
"""
def digest_password pwd do
import Plug.Crypto.KeyGenerator, only: [generate: 2]
to_hex generate(pwd, Storm.Endpoint.config :secret_key_base)
end
defp to_hex(value), do: Base.encode16(value, case: :lower)
end
| 26.25 | 83 | 0.693452 |
08dc12412acc42dcc9a2fcc46d5be701f667f97d | 497 | exs | Elixir | config/test.exs | kemm/cat_show | 42f47f93ecec48700a4d5373be27138cf907d6cb | [
"BSD-3-Clause"
] | null | null | null | config/test.exs | kemm/cat_show | 42f47f93ecec48700a4d5373be27138cf907d6cb | [
"BSD-3-Clause"
] | 1 | 2021-05-11T15:49:58.000Z | 2021-05-11T15:49:58.000Z | config/test.exs | kemm/cat_show | 42f47f93ecec48700a4d5373be27138cf907d6cb | [
"BSD-3-Clause"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :cat_show, CatShowWeb.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :cat_show, CatShow.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "cat_show_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 24.85 | 56 | 0.736419 |
08dc16f1d9985a0f2010bf104fb6fab9a5def272 | 9,372 | ex | Elixir | clients/calendar/lib/google_api/calendar/v3/api/settings.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/calendar/lib/google_api/calendar/v3/api/settings.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/calendar/lib/google_api/calendar/v3/api/settings.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Calendar.V3.Api.Settings do
@moduledoc """
API calls for all endpoints tagged `Settings`.
"""
alias GoogleApi.Calendar.V3.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Returns a single user setting.
## Parameters
* `connection` (*type:* `GoogleApi.Calendar.V3.Connection.t`) - Connection to server
* `setting` (*type:* `String.t`) - The id of the user setting.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Calendar.V3.Model.Setting{}}` on success
* `{:error, info}` on failure
"""
@spec calendar_settings_get(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Calendar.V3.Model.Setting.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def calendar_settings_get(connection, setting, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/users/me/settings/{setting}", %{
"setting" => URI.encode(setting, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Calendar.V3.Model.Setting{}])
end
@doc """
Returns all user settings for the authenticated user.
## Parameters
* `connection` (*type:* `GoogleApi.Calendar.V3.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:maxResults` (*type:* `integer()`) - Maximum number of entries returned on one result page. By default the value is 100 entries. The page size can never be larger than 250 entries. Optional.
* `:pageToken` (*type:* `String.t`) - Token specifying which result page to return. Optional.
* `:syncToken` (*type:* `String.t`) - Token obtained from the nextSyncToken field returned on the last page of results from the previous list request. It makes the result of this list request contain only entries that have changed since then.
If the syncToken expires, the server will respond with a 410 GONE response code and the client should clear its storage and perform a full synchronization without any syncToken.
Learn more about incremental synchronization.
Optional. The default is to return all entries.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Calendar.V3.Model.Settings{}}` on success
* `{:error, info}` on failure
"""
@spec calendar_settings_list(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.Calendar.V3.Model.Settings.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def calendar_settings_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:maxResults => :query,
:pageToken => :query,
:syncToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/users/me/settings", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Calendar.V3.Model.Settings{}])
end
@doc """
Watch for changes to Settings resources.
## Parameters
* `connection` (*type:* `GoogleApi.Calendar.V3.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:maxResults` (*type:* `integer()`) - Maximum number of entries returned on one result page. By default the value is 100 entries. The page size can never be larger than 250 entries. Optional.
* `:pageToken` (*type:* `String.t`) - Token specifying which result page to return. Optional.
* `:syncToken` (*type:* `String.t`) - Token obtained from the nextSyncToken field returned on the last page of results from the previous list request. It makes the result of this list request contain only entries that have changed since then.
If the syncToken expires, the server will respond with a 410 GONE response code and the client should clear its storage and perform a full synchronization without any syncToken.
Learn more about incremental synchronization.
Optional. The default is to return all entries.
* `:resource` (*type:* `GoogleApi.Calendar.V3.Model.Channel.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Calendar.V3.Model.Channel{}}` on success
* `{:error, info}` on failure
"""
@spec calendar_settings_watch(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.Calendar.V3.Model.Channel.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def calendar_settings_watch(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:maxResults => :query,
:pageToken => :query,
:syncToken => :query,
:resource => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/users/me/settings/watch", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Calendar.V3.Model.Channel{}])
end
end
| 47.333333 | 250 | 0.646287 |
08dc3387dca5b7f67a2e5fb2e77503f5ea890c86 | 545 | exs | Elixir | priv/repo/migrations/20200710182224_create_results.exs | jinjagit/api | c1a176d8c318e05810bc1635706c56395819191e | [
"MIT"
] | null | null | null | priv/repo/migrations/20200710182224_create_results.exs | jinjagit/api | c1a176d8c318e05810bc1635706c56395819191e | [
"MIT"
] | 10 | 2020-09-28T06:37:48.000Z | 2021-12-22T15:04:38.000Z | priv/repo/migrations/20200710182224_create_results.exs | jinjagit/api | c1a176d8c318e05810bc1635706c56395819191e | [
"MIT"
] | null | null | null | defmodule LiquidVoting.Repo.Migrations.CreateResults do
use Ecto.Migration
def change do
create table(:results) do
add :in_favor, :integer, default: 0
add :against, :integer, default: 0
add :proposal_url, :string, default: false, null: false
add :organization_id, :uuid, null: false
timestamps()
end
create index(:results, [:organization_id])
create unique_index(:results, [:organization_id, :proposal_url],
name: :uniq_index_organization_id_proposal_url
)
end
end
| 25.952381 | 68 | 0.675229 |
08dc46b6ce4026789d9cfa1460e94aeec57237da | 3,264 | ex | Elixir | lib/mix/lib/mix/tasks/compile.elixir.ex | zillou/elixir | 2cd4f2178299abfc783f399d5b9fdc0c902c3776 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/compile.elixir.ex | zillou/elixir | 2cd4f2178299abfc783f399d5b9fdc0c902c3776 | [
"Apache-2.0"
] | 1 | 2019-04-25T12:52:49.000Z | 2019-04-25T13:27:31.000Z | lib/mix/lib/mix/tasks/compile.elixir.ex | zillou/elixir | 2cd4f2178299abfc783f399d5b9fdc0c902c3776 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Compile.Elixir do
use Mix.Task.Compiler
@recursive true
@manifest "compile.elixir"
@moduledoc """
Compiles Elixir source files.
Elixir is smart enough to recompile only files that have changed
and their dependencies. This means if `lib/a.ex` is invoking
a function defined over `lib/b.ex`, whenever `lib/b.ex` changes,
`lib/a.ex` is also recompiled.
Note it is important to recompile a file's dependencies as
there are often compile time dependencies between them.
## Command line options
* `--force` - forces compilation regardless of modification times
* `--docs` (`--no-docs`) - attaches (or not) documentation to compiled modules
* `--debug-info` (`--no-debug-info`) - attaches (or not) debug info to compiled modules
* `--ignore-module-conflict` - does not emit warnings if a module was previously defined
* `--warnings-as-errors` - treats warnings in the current project as errors and
return a non-zero exit code
* `--long-compilation-threshold N` - sets the "long compilation" threshold
(in seconds) to `N` (see the docs for `Kernel.ParallelCompiler.compile/2`)
* `--all-warnings` - prints warnings even from files that do not need to be recompiled
## Configuration
* `:elixirc_paths` - directories to find source files.
Defaults to `["lib"]`.
* `:elixirc_options` - compilation options that apply to Elixir's compiler.
They are the same as the command line options listed above. They must be specified
as atoms and use underscores instead of dashes (for example, `:debug_info`). These
options can always be overridden from the command line and they have the same defaults
as their command line counterparts, as documented above.
"""
@switches [
force: :boolean,
docs: :boolean,
warnings_as_errors: :boolean,
ignore_module_conflict: :boolean,
debug_info: :boolean,
verbose: :boolean,
long_compilation_threshold: :integer,
all_warnings: :boolean
]
@impl true
def run(args) do
{opts, _, _} = OptionParser.parse(args, switches: @switches)
project = Mix.Project.config()
dest = Mix.Project.compile_path(project)
srcs = project[:elixirc_paths]
unless is_list(srcs) do
Mix.raise(":elixirc_paths should be a list of paths, got: #{inspect(srcs)}")
end
manifest = manifest()
configs = [Mix.Project.config_mtime() | Mix.Tasks.Compile.Erlang.manifests()]
force = opts[:force] || Mix.Utils.stale?(configs, [manifest])
opts = Keyword.merge(project[:elixirc_options] || [], opts)
opts = xref_exclude_opts(opts, project)
Mix.Compilers.Elixir.compile(manifest, srcs, dest, [:ex], force, opts)
end
@impl true
def manifests, do: [manifest()]
defp manifest, do: Path.join(Mix.Project.manifest_path(), @manifest)
@impl true
def clean do
dest = Mix.Project.compile_path()
Mix.Compilers.Elixir.clean(manifest(), dest)
end
# TODO: Deprecate project[:xref][:exclude] in v1.11
defp xref_exclude_opts(opts, project) do
exclude = List.wrap(project[:xref][:exclude])
if exclude == [] do
opts
else
Keyword.update(opts, :no_warn_undefined, exclude, &(List.wrap(&1) ++ exclude))
end
end
end
| 33.649485 | 92 | 0.690564 |
08dc4d8902f91a7fde82cb9dcee06ab94a6038c8 | 830 | ex | Elixir | apps/harvester/lib/show.ex | timnew/tv-harvester | 1eeb618877fd4c25c137a56ec1a77f6fd4050eed | [
"MIT"
] | null | null | null | apps/harvester/lib/show.ex | timnew/tv-harvester | 1eeb618877fd4c25c137a56ec1a77f6fd4050eed | [
"MIT"
] | null | null | null | apps/harvester/lib/show.ex | timnew/tv-harvester | 1eeb618877fd4c25c137a56ec1a77f6fd4050eed | [
"MIT"
] | null | null | null | defmodule Show do
defstruct [:name, :url]
@type t :: %Show{name: String.t, url: String.t}
@spec get_show(String.t) :: t
def get_show(name) when is_binary(name) or is_atom(name) do
ConfigManager.get_struct([Show, name], Show)
end
@spec get_all_shows() :: list(t)
def get_all_shows() do
ConfigManager.keys([Show, "*"])
|> Enum.map(&ConfigManager.get_struct(&1, Show))
end
@spec create_show(String.t, String.test) :: :ok
def create_show(name, url) do
ConfigManager.put_hash([Show, name], [name: name, url: url])
end
@spec create_show(t) :: :ok
def create_show(%Show{name: name, url: url}) do
create_show(name, url)
end
@spec delete_all_shows :: non_neg_integer
def delete_all_shows do
[Show, "*"]
|> ConfigManager.keys()
|> ConfigManager.delete_all()
end
end
| 24.411765 | 64 | 0.659036 |
08dc535dc5ca63289d7b8a8bc4e588bea7b56405 | 839 | ex | Elixir | lib/http_api/plugs/validate_query_params.ex | aitorlb/elixir_http_api | c8222902bafa971efa4aacc4a58a3d596a83e95c | [
"MIT"
] | null | null | null | lib/http_api/plugs/validate_query_params.ex | aitorlb/elixir_http_api | c8222902bafa971efa4aacc4a58a3d596a83e95c | [
"MIT"
] | null | null | null | lib/http_api/plugs/validate_query_params.ex | aitorlb/elixir_http_api | c8222902bafa971efa4aacc4a58a3d596a83e95c | [
"MIT"
] | null | null | null | defmodule HttpApi.Plugs.ValidateQueryParams do
@moduledoc """
Plug to validate the query params before the given resource plug tries to query
the data. Uses a list of lists as it makes it easier to enforce the number of
params to allow (league and season must be queried together), even if it's more
verbose when passing the options to the plug.
Sends a 400 json encoded response if validation fails.
"""
def init(options), do: options
def call(%Plug.Conn{query_params: query_params} = conn, list_of_allowed_keys_lists) do
sorted_keys =
query_params
|> Map.keys()
|> Enum.sort()
valid_keys? =
list_of_allowed_keys_lists
|> Enum.any?(&(Enum.sort(&1) == sorted_keys))
if valid_keys? do
conn
else
HttpApi.Plugs.Helpers.send_400_json_resp(conn)
end
end
end
| 28.931034 | 88 | 0.700834 |
08dc8fa38a77fc4a41f824e18c15c01876e87516 | 920 | ex | Elixir | priv/perf/apps/load_test/lib/runner/smoke.ex | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 177 | 2018-08-24T03:51:02.000Z | 2020-05-30T13:29:25.000Z | priv/perf/apps/load_test/lib/runner/smoke.ex | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 1,042 | 2018-08-25T00:52:39.000Z | 2020-06-01T05:15:17.000Z | priv/perf/apps/load_test/lib/runner/smoke.ex | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 47 | 2018-08-24T12:06:33.000Z | 2020-04-28T11:49:25.000Z | # Copyright 2019-2020 OMG Network Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule LoadTest.Runner.Smoke do
@moduledoc """
Smoke test to verify that the childchain, watcher and watcher-info are up and running
Run with `mix test apps/load_test/test/load_tests/runner/smoke_test.exs`
"""
use Chaperon.LoadTest
def scenarios do
[
{{1, LoadTest.Scenario.Smoke}, %{}}
]
end
end
| 30.666667 | 87 | 0.738043 |
08dc94ca690521553c8296dd00752f7e3b1128ab | 6,181 | ex | Elixir | clients/monitoring/lib/google_api/monitoring/v3/model/aggregation.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/monitoring/lib/google_api/monitoring/v3/model/aggregation.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/monitoring/lib/google_api/monitoring/v3/model/aggregation.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Monitoring.V3.Model.Aggregation do
@moduledoc """
Describes how to combine multiple time series to provide a different view of the data. Aggregation of time series is done in two steps. First, each time series in the set is aligned to the same time interval boundaries, then the set of time series is optionally reduced in number.Alignment consists of applying the per_series_aligner operation to each time series after its data has been divided into regular alignment_period time intervals. This process takes all of the data points in an alignment period, applies a mathematical transformation such as averaging, minimum, maximum, delta, etc., and converts them into a single data point per period.Reduction is when the aligned and transformed time series can optionally be combined, reducing the number of time series through similar mathematical transformations. Reduction involves applying a cross_series_reducer to all the time series, optionally sorting the time series into subsets with group_by_fields, and applying the reducer to each subset.The raw time series data can contain a huge amount of information from multiple sources. Alignment and reduction transforms this mass of data into a more manageable and representative collection of data, for example "the 95% latency across the average of all tasks in a cluster". This representative data can be more easily graphed and comprehended, and the individual time series data is still available for later drilldown. For more details, see Filtering and aggregation (https://cloud.google.com/monitoring/api/v3/aggregation).
## Attributes
* `alignmentPeriod` (*type:* `String.t`, *default:* `nil`) - The alignment_period specifies a time interval, in seconds, that is used to divide the data in all the time series into consistent blocks of time. This will be done before the per-series aligner can be applied to the data.The value must be at least 60 seconds. If a per-series aligner other than ALIGN_NONE is specified, this field is required or an error is returned. If no per-series aligner is specified, or the aligner ALIGN_NONE is specified, then this field is ignored.
* `crossSeriesReducer` (*type:* `String.t`, *default:* `nil`) - The reduction operation to be used to combine time series into a single time series, where the value of each data point in the resulting series is a function of all the already aligned values in the input time series.Not all reducer operations can be applied to all time series. The valid choices depend on the metric_kind and the value_type of the original time series. Reduction can yield a time series with a different metric_kind or value_type than the input time series.Time series data must first be aligned (see per_series_aligner) in order to perform cross-time series reduction. If cross_series_reducer is specified, then per_series_aligner must be specified, and must not be ALIGN_NONE. An alignment_period must also be specified; otherwise, an error is returned.
* `groupByFields` (*type:* `list(String.t)`, *default:* `nil`) - The set of fields to preserve when cross_series_reducer is specified. The group_by_fields determine how the time series are partitioned into subsets prior to applying the aggregation operation. Each subset contains time series that have the same value for each of the grouping fields. Each individual time series is a member of exactly one subset. The cross_series_reducer is applied to each subset of time series. It is not possible to reduce across different resource types, so this field implicitly contains resource.type. Fields not specified in group_by_fields are aggregated away. If group_by_fields is not specified and all the time series have the same resource type, then the time series are aggregated into a single output time series. If cross_series_reducer is not defined, this field is ignored.
* `perSeriesAligner` (*type:* `String.t`, *default:* `nil`) - An Aligner describes how to bring the data points in a single time series into temporal alignment. Except for ALIGN_NONE, all alignments cause all the data points in an alignment_period to be mathematically grouped together, resulting in a single data point for each alignment_period with end timestamp at the end of the period.Not all alignment operations may be applied to all time series. The valid choices depend on the metric_kind and value_type of the original time series. Alignment can change the metric_kind or the value_type of the time series.Time series data must be aligned in order to perform cross-time series reduction. If cross_series_reducer is specified, then per_series_aligner must be specified and not equal to ALIGN_NONE and alignment_period must be specified; otherwise, an error is returned.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:alignmentPeriod => String.t(),
:crossSeriesReducer => String.t(),
:groupByFields => list(String.t()),
:perSeriesAligner => String.t()
}
field(:alignmentPeriod)
field(:crossSeriesReducer)
field(:groupByFields, type: :list)
field(:perSeriesAligner)
end
defimpl Poison.Decoder, for: GoogleApi.Monitoring.V3.Model.Aggregation do
def decode(value, options) do
GoogleApi.Monitoring.V3.Model.Aggregation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Monitoring.V3.Model.Aggregation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 110.375 | 1,535 | 0.789031 |
08dc95802dfe8ac546bca1c9dc0acc117bbe7e1b | 1,628 | ex | Elixir | clients/display_video/lib/google_api/display_video/v1/model/audio_video_offset.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/display_video/lib/google_api/display_video/v1/model/audio_video_offset.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/display_video/lib/google_api/display_video/v1/model/audio_video_offset.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DisplayVideo.V1.Model.AudioVideoOffset do
@moduledoc """
The length an audio or a video has been played.
## Attributes
* `percentage` (*type:* `String.t`, *default:* `nil`) - The offset in percentage of the audio or video duration.
* `seconds` (*type:* `String.t`, *default:* `nil`) - The offset in seconds from the start of the audio or video.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:percentage => String.t(),
:seconds => String.t()
}
field(:percentage)
field(:seconds)
end
defimpl Poison.Decoder, for: GoogleApi.DisplayVideo.V1.Model.AudioVideoOffset do
def decode(value, options) do
GoogleApi.DisplayVideo.V1.Model.AudioVideoOffset.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DisplayVideo.V1.Model.AudioVideoOffset do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.56 | 116 | 0.72543 |
08dd1e5102dd92ebeaa11e324d518c9d9865f0b2 | 976 | ex | Elixir | lib/google_api/cloud_tasks/v2/deserializer.ex | balena/elixir-google-api-cloud-tasks | 4858a6427989af9b2d2b23d77f287393e4123022 | [
"Apache-2.0"
] | null | null | null | lib/google_api/cloud_tasks/v2/deserializer.ex | balena/elixir-google-api-cloud-tasks | 4858a6427989af9b2d2b23d77f287393e4123022 | [
"Apache-2.0"
] | null | null | null | lib/google_api/cloud_tasks/v2/deserializer.ex | balena/elixir-google-api-cloud-tasks | 4858a6427989af9b2d2b23d77f287393e4123022 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudTasks.V2.Deserializer do
@moduledoc """
Helper functions for deserializing responses into models.
This module is no longer used. Please use GoogleApi.Gax.ModelBase instead.
"""
end
| 36.148148 | 77 | 0.765369 |
08dd2d450b56bd2fe03c9dc6a59b485e9bb41395 | 676 | ex | Elixir | 2017/elixir/day18/lib/multi.ex | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | 2017/elixir/day18/lib/multi.ex | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | 2017/elixir/day18/lib/multi.ex | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | defmodule Mu do
def run do
IO.puts "runngin @#{inspect self()}"
mediator = spawn fn ->
receive do
{p0, p1} ->
send p0, p1
send p1, p0
end
end
p0 = spawn fn ->
receive do
pid ->
IO.puts "hello I'm #{inspect self()} and my friend is #{inspect pid}"
end
end
p1 = spawn fn ->
receive do
pid ->
IO.puts "hello I'm #{inspect self()} and my friend is #{inspect pid}"
end
end
send mediator, {p0, p1}
end
def myprog do
receive do
pid ->
IO.puts "hello I'm #{inspect self()} and my friend is #{inspect pid}"
end
end
end
| 17.789474 | 79 | 0.504438 |
08dd43fc95e1e5598e018b67d91eda9aa8a6a37d | 1,640 | ex | Elixir | lib/raft/config.ex | craiglyons/raft | 8a10978292fcc4fe549446afdca9674318b81c46 | [
"Apache-2.0"
] | null | null | null | lib/raft/config.ex | craiglyons/raft | 8a10978292fcc4fe549446afdca9674318b81c46 | [
"Apache-2.0"
] | null | null | null | lib/raft/config.ex | craiglyons/raft | 8a10978292fcc4fe549446afdca9674318b81c46 | [
"Apache-2.0"
] | null | null | null | defmodule Raft.Config do
@moduledoc """
Defines the configuration for peer servers.
"""
defstruct [
state_machine: Raft.StateMachine.Echo,
min_election_timeout: 300,
max_election_timeout: 1_000,
heartbeat_timeout: 25,
data_dir: "",
]
@type t :: %__MODULE__{
state_machine: module(),
min_election_timeout: non_neg_integer(),
max_election_timeout: non_neg_integer(),
heartbeat_timeout: non_neg_integer(),
data_dir: String.t,
}
def new(opts) do
valid_opts =
default_opts()
|> Keyword.merge(opts)
|> validate!
struct(__MODULE__, valid_opts)
end
def db_path(name, config), do: config |> data_dir |> Path.join("#{name}")
def data_dir(%{data_dir: ""}), do: :raft
|> Application.app_dir()
# |> Path.join("data")
def data_dir(%{data_dir: data_dir}), do: data_dir
@doc """
Generates a random timeout value between the min_election_timeout and
max_election_timeout.
"""
@spec election_timeout(t) :: pos_integer()
def election_timeout(%{min_election_timeout: min, max_election_timeout: max}) do
case min < max do
true -> :rand.uniform(max-min)+min
_ -> throw :min_equals_max
end
end
defp validate!(opts) do
min = Keyword.get(opts, :min_election_timeout)
max = Keyword.get(opts, :max_election_timeout)
if min < max do
opts
else
throw :min_equals_max
end
end
defp default_opts(), do: [
members: [],
min_election_timeout: 150,
max_election_timeout: 300,
heartbeat_timeout: 200,
]
end
| 23.428571 | 82 | 0.631707 |
08dd73cf7db90b3d109f1de76de4a12fa0f5fc78 | 556 | ex | Elixir | lib/monis_app/guardian.ex | monisapp/api | 4096d50da23e0b562a309b9d0ccf6b211f431d25 | [
"MIT"
] | 1 | 2020-04-24T19:40:46.000Z | 2020-04-24T19:40:46.000Z | lib/monis_app/guardian.ex | monisapp/api | 4096d50da23e0b562a309b9d0ccf6b211f431d25 | [
"MIT"
] | null | null | null | lib/monis_app/guardian.ex | monisapp/api | 4096d50da23e0b562a309b9d0ccf6b211f431d25 | [
"MIT"
] | null | null | null | defmodule MonisApp.Guardian do
@moduledoc """
Helpers for JWT authentication
"""
use Guardian, otp_app: :monis_app
def subject_for_token(%{id: id}, _claims) do
subject = to_string(id)
{:ok, subject}
end
def subject_for_token(_, _) do
{:error, :invalid_resource}
end
def resource_from_claims(%{"sub" => id}) do
case MonisApp.Auth.get_user(id) do
%MonisApp.Auth.User{} = user -> {:ok, user}
_ -> {:error, :user_not_found}
end
end
def resource_from_claims(_) do
{:error, :invalid_claims}
end
end
| 19.857143 | 48 | 0.651079 |
08dddacca980ec87916ad05cf932f39dd6ac14b8 | 219 | ex | Elixir | web/views/api_view.ex | cavneb/elixir_casts | c650a2850825e0305387b95ba6a7eb386e984097 | [
"MIT"
] | null | null | null | web/views/api_view.ex | cavneb/elixir_casts | c650a2850825e0305387b95ba6a7eb386e984097 | [
"MIT"
] | null | null | null | web/views/api_view.ex | cavneb/elixir_casts | c650a2850825e0305387b95ba6a7eb386e984097 | [
"MIT"
] | null | null | null | defmodule ElixirCasts.ApiView do
use ElixirCasts.Web, :view
def render("index.json", %{conn: conn}) do
%{
href: api_url(conn, :index),
episodes: %{ href: episode_url(conn, :index) }
}
end
end
| 19.909091 | 52 | 0.621005 |
08de471cfce6be558e5955f6598351436c972831 | 1,748 | ex | Elixir | lib/store.ex | dengjie-ys/ex_ray | 2715b44d8a8d6a17632e8cb9a9dfb98043f471d9 | [
"Apache-2.0"
] | null | null | null | lib/store.ex | dengjie-ys/ex_ray | 2715b44d8a8d6a17632e8cb9a9dfb98043f471d9 | [
"Apache-2.0"
] | null | null | null | lib/store.ex | dengjie-ys/ex_ray | 2715b44d8a8d6a17632e8cb9a9dfb98043f471d9 | [
"Apache-2.0"
] | null | null | null | defmodule ExRay.Store do
@moduledoc """
Store the span chains in an ETS table. The table must be created during
the application initialization using the create call. The span chain acts
like a call stack by pushing and popping spans as they come in and out of
scope.
"""
@table_name :tracers
require Logger
@doc """
Initializes the spans ETS table. The span table can be shared across
process boundary.
"""
@spec create :: any
def create do
:ets.new(@table_name,
[
:set,
:named_table,
:public,
read_concurrency: true,
write_concurrency: true
]
)
end
@doc """
Pushes a new span to the span stack. The key must be unique.
"""
@spec push(String.t, any) :: any
def push(key, val) when is_binary(key) do
vals = get(key)
if length(vals) > 0 do
:ets.insert(@table_name, {key, [val] ++ vals})
else
:ets.insert(@table_name, {key, [val]})
end
val
end
@doc """
Pops the top span off the stack.
"""
@spec pop(String.t) :: any
def pop(key) when is_binary(key) do
vals = get(key)
result = if length(vals) > 0 do
[h | t] = vals
:ets.insert(@table_name, {key, t})
h
else
[]
end
result
end
@doc """
Fetch span stack for the given key
"""
@spec get(String.t) :: [any]
def get(key) when is_binary(key) do
@table_name
|> :ets.lookup(key)
|> case do
[] -> []
[{_key, vals}] -> vals
end
end
@doc """
Fetch the top level span for a given key
"""
@spec current(String.t) :: [any]
def current(key) when is_binary(key) do
key
|> get
|> case do
[] -> nil
[h | _t] -> h
end
end
end
| 20.091954 | 75 | 0.570938 |
08de48ea664bc8a508ad0b701cd00db5ebdd1444 | 3,431 | ex | Elixir | clients/content/lib/google_api/content/v2/model/orders_return_refund_line_item_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_return_refund_line_item_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_return_refund_line_item_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.OrdersReturnRefundLineItemRequest do
@moduledoc """
## Attributes
* `amountPretax` (*type:* `GoogleApi.Content.V2.Model.Price.t`, *default:* `nil`) - The amount that is refunded. If omitted, refundless return is assumed (same as calling returnLineItem method).
* `amountTax` (*type:* `GoogleApi.Content.V2.Model.Price.t`, *default:* `nil`) - Tax amount that corresponds to refund amount in amountPretax. Optional, but if filled, then amountPretax must be set. Calculated automatically if not provided.
* `lineItemId` (*type:* `String.t`, *default:* `nil`) - The ID of the line item to return. Either lineItemId or productId is required.
* `operationId` (*type:* `String.t`, *default:* `nil`) - The ID of the operation. Unique across all operations for a given order.
* `productId` (*type:* `String.t`, *default:* `nil`) - The ID of the product to return. This is the REST ID used in the products service. Either lineItemId or productId is required.
* `quantity` (*type:* `integer()`, *default:* `nil`) - The quantity to return and refund. Quantity is required.
* `reason` (*type:* `String.t`, *default:* `nil`) - The reason for the return. Acceptable values are: - "`customerDiscretionaryReturn`" - "`customerInitiatedMerchantCancel`" - "`deliveredTooLate`" - "`expiredItem`" - "`invalidCoupon`" - "`malformedShippingAddress`" - "`other`" - "`productArrivedDamaged`" - "`productNotAsDescribed`" - "`qualityNotAsExpected`" - "`undeliverableShippingAddress`" - "`unsupportedPoBoxAddress`" - "`wrongProductShipped`"
* `reasonText` (*type:* `String.t`, *default:* `nil`) - The explanation of the reason.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:amountPretax => GoogleApi.Content.V2.Model.Price.t(),
:amountTax => GoogleApi.Content.V2.Model.Price.t(),
:lineItemId => String.t(),
:operationId => String.t(),
:productId => String.t(),
:quantity => integer(),
:reason => String.t(),
:reasonText => String.t()
}
field(:amountPretax, as: GoogleApi.Content.V2.Model.Price)
field(:amountTax, as: GoogleApi.Content.V2.Model.Price)
field(:lineItemId)
field(:operationId)
field(:productId)
field(:quantity)
field(:reason)
field(:reasonText)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.OrdersReturnRefundLineItemRequest do
def decode(value, options) do
GoogleApi.Content.V2.Model.OrdersReturnRefundLineItemRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.OrdersReturnRefundLineItemRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 50.455882 | 456 | 0.709123 |
08de4cfc0d9d04c6957ffa2b10440a84d23d28b1 | 987 | exs | Elixir | test/integration/cases_test.exs | mediapress-ltd/solid | 025b49ab9d8286d4fc557085331cc7643e0beb20 | [
"MIT"
] | null | null | null | test/integration/cases_test.exs | mediapress-ltd/solid | 025b49ab9d8286d4fc557085331cc7643e0beb20 | [
"MIT"
] | null | null | null | test/integration/cases_test.exs | mediapress-ltd/solid | 025b49ab9d8286d4fc557085331cc7643e0beb20 | [
"MIT"
] | null | null | null | cases_dir = "test/cases"
for test_case <- File.ls!(cases_dir) do
module_name = Module.concat([Solid.Integration.Cases, :"#{test_case}Test"])
defmodule module_name do
use ExUnit.Case, async: true
import Solid.Helpers
@moduletag :integration
@liquid_input_file "#{cases_dir}/#{test_case}/input.liquid"
@json_input_file "#{cases_dir}/#{test_case}/input.json"
@external_resource @liquid_input_file
@external_resource @json_input_file
defmodule Solid.CustomFilters do
def substitute(message, bindings \\ %{}) do
Regex.replace(~r/%\{(\w+)\}/, message, fn _, key -> Map.get(bindings, key) end)
end
end
setup do
Application.put_env(:solid, :custom_filters, Solid.CustomFilters)
:ok
end
@tag case: test_case
test "case #{test_case}" do
liquid_input = File.read!(@liquid_input_file)
json_input = File.read!(@json_input_file)
assert_render(liquid_input, json_input)
end
end
end
| 26.675676 | 87 | 0.676798 |
08de61ec3570f7741ade2159bcc4787698efe5c3 | 337 | ex | Elixir | web/views/slugged_route_view.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | null | null | null | web/views/slugged_route_view.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | null | null | null | web/views/slugged_route_view.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | null | null | null | defmodule CodeCorps.SluggedRouteView do
use CodeCorps.PreloadHelpers, default_preloads: [:organization, :user]
use CodeCorps.Web, :view
use JaSerializer.PhoenixView
attributes [:slug, :inserted_at, :updated_at]
has_one :organization, serializer: CodeCorps.OrganizationView
has_one :user, serializer: CodeCorps.UserView
end
| 30.636364 | 72 | 0.79822 |
08de73d2cc4e6f15b1f6f011149c9170f1f16b38 | 3,177 | ex | Elixir | lib/util/hex_parser.ex | NickMcG/SNEEX | 901215dea41fa21314a4f4db46b51648158f1544 | [
"MIT"
] | 1 | 2019-11-16T00:33:02.000Z | 2019-11-16T00:33:02.000Z | lib/util/hex_parser.ex | NickMcG/SNEEX | 901215dea41fa21314a4f4db46b51648158f1544 | [
"MIT"
] | 1 | 2019-08-11T23:02:15.000Z | 2019-08-11T23:02:15.000Z | lib/util/hex_parser.ex | NickMcG/SNEEX | 901215dea41fa21314a4f4db46b51648158f1544 | [
"MIT"
] | null | null | null | defmodule Util.HexParser do
@moduledoc """
This module defines some helper functions for playing around with raw memory from a ROM.
"""
use Bitwise
@block_size 16
def convert_file(input_file, output_file) do
output_file
|> File.open!([:write], handle_file(input_file))
end
def calculate_checksum(input_file) do
checksum =
input_file
|> File.open!([:read, :binary], &read_and_sum(&1, 0))
inverse_checksum = bxor(checksum, 0xFFFF)
{format_byte(checksum, 4), format_byte(inverse_checksum, 4)}
end
defp read_and_sum(file_pid, curr_total) do
case IO.binread(file_pid, 1) do
:eof ->
curr_total &&& 0xFFFF
<<byte::size(8)>> ->
read_and_sum(file_pid, curr_total + byte)
end
end
defp handle_file(input_file) do
fn output_pid ->
input_file
|> File.open!([:read, :binary], &read_file(&1, output_pid, 0))
end
end
defp read_file(input_pid, output_pid, block_number) do
case IO.binread(input_pid, @block_size) do
:eof ->
:ok
block ->
formatted_block = (block_number * @block_size) |> format_result(block)
IO.write(output_pid, formatted_block)
read_file(input_pid, output_pid, block_number + 1)
end
end
defp format_result(index, block) do
[[format_index(index), ": "], format_block(block), ["\r\n"]]
end
defp format_block(<<
b0::size(8),
b1::size(8),
b2::size(8),
b3::size(8),
b4::size(8),
b5::size(8),
b6::size(8),
b7::size(8),
b8::size(8),
b9::size(8),
bA::size(8),
bB::size(8),
bC::size(8),
bD::size(8),
bE::size(8),
bF::size(8)
>>) do
fhex = &format_byte(&1, 2)
fbin = &format_printable_byte(&1)
hex = [
fhex.(b0),
" ",
fhex.(b1),
" ",
fhex.(b2),
" ",
fhex.(b3),
" ",
fhex.(b4),
" ",
fhex.(b5),
" ",
fhex.(b6),
" ",
fhex.(b7),
" ",
fhex.(b8),
" ",
fhex.(b9),
" ",
fhex.(bA),
" ",
fhex.(bB),
" ",
fhex.(bC),
" ",
fhex.(bD),
" ",
fhex.(bE),
" ",
fhex.(bF)
]
ascii = [
"|",
fbin.(b0),
fbin.(b1),
fbin.(b2),
fbin.(b3),
fbin.(b4),
fbin.(b5),
fbin.(b6),
fbin.(b7),
fbin.(b8),
fbin.(b9),
fbin.(bA),
fbin.(bB),
fbin.(bC),
fbin.(bD),
fbin.(bE),
fbin.(bF),
"|"
]
[hex, [" "], ascii]
end
defp format_byte(byte, length) do
byte
|> Integer.to_string(16)
|> String.pad_leading(length, "0")
end
defp format_printable_byte(byte) when byte >= 32 and byte <= 127 do
case String.valid?(to_string([byte])) do
true -> [byte]
_ -> "."
end
end
defp format_printable_byte(_byte) do
"."
end
defp format_index(index) do
<<bank::binary-size(2), remainder::binary>> =
index
|> Integer.to_string(16)
|> String.pad_leading(6, "0")
bank <> " " <> remainder
end
end
| 19.611111 | 90 | 0.503935 |
08de763ebc881cc395f84f41ca2357fad33e4ca5 | 332 | ex | Elixir | frameworks/Elixir/phoenix/web/models/fortune.ex | xitrum-framework/FrameworkBenchmarks | 180d44e1064abc6fe8c703b05e065c0564e6ee05 | [
"BSD-3-Clause"
] | 1 | 2016-05-26T09:37:14.000Z | 2016-05-26T09:37:14.000Z | frameworks/Elixir/phoenix/web/models/fortune.ex | xitrum-framework/FrameworkBenchmarks | 180d44e1064abc6fe8c703b05e065c0564e6ee05 | [
"BSD-3-Clause"
] | null | null | null | frameworks/Elixir/phoenix/web/models/fortune.ex | xitrum-framework/FrameworkBenchmarks | 180d44e1064abc6fe8c703b05e065c0564e6ee05 | [
"BSD-3-Clause"
] | 2 | 2015-01-15T08:52:28.000Z | 2019-05-18T05:11:58.000Z | defmodule Hello.Fortune do
use Hello.Web, :model
@derive {Poison.Encoder, only: [:id, :message]}
schema "fortune" do
field :message, :string
end
@required_fields ~w(message)
@optional_fields ~w()
def changeset(model, params \\ nil) do
model
|> cast(params, @required_fields, @optional_fields)
end
end
| 19.529412 | 55 | 0.677711 |
08de80276d811011cc889564bd53fdea37b4f9fd | 718 | ex | Elixir | lib/liquid_voting/voting_results/result.ex | jinjagit/api | c1a176d8c318e05810bc1635706c56395819191e | [
"MIT"
] | null | null | null | lib/liquid_voting/voting_results/result.ex | jinjagit/api | c1a176d8c318e05810bc1635706c56395819191e | [
"MIT"
] | 10 | 2020-09-28T06:37:48.000Z | 2021-12-22T15:04:38.000Z | lib/liquid_voting/voting_results/result.ex | jinjagit/api | c1a176d8c318e05810bc1635706c56395819191e | [
"MIT"
] | null | null | null | defmodule LiquidVoting.VotingResults.Result do
use Ecto.Schema
import Ecto.Changeset
@primary_key {:id, :binary_id, autogenerate: true}
schema "results" do
field :in_favor, :integer, default: 0
field :against, :integer, default: 0
field :proposal_url, :string
field :organization_id, Ecto.UUID
timestamps()
end
@doc false
def changeset(result, attrs) do
required_fields = [:proposal_url, :organization_id]
all_fields = [:in_favor | [:against | required_fields]]
result
|> cast(attrs, all_fields)
|> validate_required(required_fields)
|> unique_constraint(:organization_id_proposal_url,
name: :uniq_index_organization_id_proposal_url
)
end
end
| 24.758621 | 59 | 0.713092 |
08de914ccf2a5d74f18a02db072adb5f6ceb7d24 | 1,452 | ex | Elixir | apps/extended_api/lib/extended_api_web.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 19 | 2019-09-17T18:14:36.000Z | 2021-12-06T07:29:27.000Z | apps/extended_api/lib/extended_api_web.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 5 | 2019-09-30T04:57:14.000Z | 2020-11-10T15:41:03.000Z | apps/extended_api/lib/extended_api_web.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 2 | 2019-09-17T19:03:16.000Z | 2021-03-01T01:04:31.000Z | defmodule ExtendedApiWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use ExtendedApiWeb, :controller
use ExtendedApiWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: ExtendedApiWeb
import Plug.Conn
alias ExtendedApiWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/extended_api_web/templates",
namespace: ExtendedApiWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1]
alias ExtendedApiWeb.Router.Helpers, as: Routes
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23.047619 | 83 | 0.695592 |
08debbd97e2ea471b9ce7a127b53b72d2e959d00 | 501 | exs | Elixir | apps/core/priv/repo/migrations/20160927214005_create_guardian_tokens.exs | iporaitech/phoenix-webpack-react-docker | c454db0b851b9d00db868a64b96e567d4a0cc3d9 | [
"MIT"
] | 25 | 2016-08-09T15:04:37.000Z | 2021-11-15T12:20:27.000Z | apps/core/priv/repo/migrations/20160927214005_create_guardian_tokens.exs | iporaitech/phoenix-webpack-react-docker | c454db0b851b9d00db868a64b96e567d4a0cc3d9 | [
"MIT"
] | 62 | 2016-05-23T20:16:40.000Z | 2017-04-18T18:36:29.000Z | apps/core/priv/repo/migrations/20160927214005_create_guardian_tokens.exs | iporaitech/phoenix-webpack-react-docker | c454db0b851b9d00db868a64b96e567d4a0cc3d9 | [
"MIT"
] | 10 | 2016-08-17T15:29:21.000Z | 2017-02-28T07:58:30.000Z | defmodule Core.Repo.Migrations.CreateGuardianTokens do
use Ecto.Migration
def up do
create table(:guardian_tokens, primary_key: false) do
add :jti, :string, primary_key: true
add :typ, :string
add :aud, :string
add :iss, :string
add :sub, :string
add :exp, :bigint
add :jwt, :text
add :claims, :map
timestamps
end
create unique_index(:guardian_tokens, [:jti, :aud])
end
def down do
drop table(:guardian_tokens)
end
end
| 21.782609 | 57 | 0.636727 |
08dedd5930ffcf12491921f845cad8a55a0fe79a | 2,218 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/vm_endpoint_nat_mappings_list_warning.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/vm_endpoint_nat_mappings_list_warning.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/vm_endpoint_nat_mappings_list_warning.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.VmEndpointNatMappingsListWarning do
@moduledoc """
[Output Only] Informational warning message.
## Attributes
* `code` (*type:* `String.t`, *default:* `nil`) - [Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.
* `data` (*type:* `list(GoogleApi.Compute.V1.Model.VmEndpointNatMappingsListWarningData.t)`, *default:* `nil`) - [Output Only] Metadata about this warning in key: value format. For example:
"data": [ { "key": "scope", "value": "zones/us-east1-d" }
* `message` (*type:* `String.t`, *default:* `nil`) - [Output Only] A human-readable description of the warning code.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:code => String.t() | nil,
:data =>
list(GoogleApi.Compute.V1.Model.VmEndpointNatMappingsListWarningData.t()) | nil,
:message => String.t() | nil
}
field(:code)
field(:data, as: GoogleApi.Compute.V1.Model.VmEndpointNatMappingsListWarningData, type: :list)
field(:message)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.VmEndpointNatMappingsListWarning do
def decode(value, options) do
GoogleApi.Compute.V1.Model.VmEndpointNatMappingsListWarning.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.VmEndpointNatMappingsListWarning do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.327273 | 194 | 0.724076 |
08df1bcce847ea2723b1b5a29f34acdf83d7eb81 | 3,724 | exs | Elixir | test/response_test.exs | terki/modbux | 391a1d77e191afe7b0c5fe72e70e4680b41fb85c | [
"Apache-2.0"
] | 14 | 2019-11-14T22:04:24.000Z | 2022-02-07T19:36:57.000Z | test/response_test.exs | terki/modbux | 391a1d77e191afe7b0c5fe72e70e4680b41fb85c | [
"Apache-2.0"
] | 3 | 2020-01-11T07:52:53.000Z | 2022-01-18T12:08:08.000Z | test/response_test.exs | terki/modbux | 391a1d77e191afe7b0c5fe72e70e4680b41fb85c | [
"Apache-2.0"
] | 6 | 2020-02-26T09:27:35.000Z | 2022-03-27T09:50:02.000Z | defmodule ResponseTest do
use ExUnit.Case
alias Modbux.Response
test "Response pack and parse test" do
pp(<<0x22, 0x01, 0x01, 0x00>>, {:rc, 0x22, 0x2324, 1}, [0])
pp(<<0x22, 0x01, 0x01, 0x01>>, {:rc, 0x22, 0x2324, 1}, [1])
pp(<<0x22, 0x02, 0x01, 0x00>>, {:ri, 0x22, 0x2324, 1}, [0])
pp(<<0x22, 0x02, 0x01, 0x01>>, {:ri, 0x22, 0x2324, 1}, [1])
pp(<<0x22, 0x03, 0x02, 0x25, 0x26>>, {:rhr, 0x22, 0x2324, 1}, [0x2526])
pp(<<0x22, 0x04, 0x02, 0x25, 0x26>>, {:rir, 0x22, 0x2324, 1}, [0x2526])
pp(<<0x22, 0x05, 0x23, 0x24, 0x00, 0x00>>, {:fc, 0x22, 0x2324, 0}, nil)
pp(<<0x22, 0x05, 0x23, 0x24, 0xFF, 0x00>>, {:fc, 0x22, 0x2324, 1}, nil)
pp(<<0x22, 0x06, 0x23, 0x24, 0x25, 0x26>>, {:phr, 0x22, 0x2324, 0x2526}, nil)
pp(<<0x22, 0x0F, 0x23, 0x24, 0x00, 0x01>>, {:fc, 0x22, 0x2324, [0]}, nil)
pp(<<0x22, 0x10, 0x23, 0x24, 0x00, 0x01>>, {:phr, 0x22, 0x2324, [0x2526]}, nil)
# corner cases
pp(<<0x22, 0x01, 0x01, 0x96>>, {:rc, 0x22, 0x2324, 8}, [0, 1, 1, 0, 1, 0, 0, 1])
pp(<<0x22, 0x01, 0x02, 0x96, 0x01>>, {:rc, 0x22, 0x2324, 9}, [0, 1, 1, 0, 1, 0, 0, 1, 1])
pp(<<0x22, 0x01, 0x02, 0x96, 0xC3>>, {:rc, 0x22, 0x2324, 16}, [
0,
1,
1,
0,
1,
0,
0,
1,
1,
1,
0,
0,
0,
0,
1,
1
])
pp(<<0x22, 0x01, 0x03, 0x96, 0xC3, 0x01>>, {:rc, 0x22, 0x2324, 17}, [
0,
1,
1,
0,
1,
0,
0,
1,
1,
1,
0,
0,
0,
0,
1,
1,
1
])
pp(<<0x22, 0x01, 0xFF>> <> l2b1(bls(2040)), {:rc, 0x22, 0x2324, 2040}, bls(2040))
pp(<<0x22, 0x02, 0x01, 0x96>>, {:ri, 0x22, 0x2324, 8}, [0, 1, 1, 0, 1, 0, 0, 1])
pp(<<0x22, 0x02, 0x02, 0x96, 0x01>>, {:ri, 0x22, 0x2324, 9}, [0, 1, 1, 0, 1, 0, 0, 1, 1])
pp(<<0x22, 0x02, 0x02, 0x96, 0xC3>>, {:ri, 0x22, 0x2324, 16}, [
0,
1,
1,
0,
1,
0,
0,
1,
1,
1,
0,
0,
0,
0,
1,
1
])
pp(<<0x22, 0x02, 0x03, 0x96, 0xC3, 0x01>>, {:ri, 0x22, 0x2324, 17}, [
0,
1,
1,
0,
1,
0,
0,
1,
1,
1,
0,
0,
0,
0,
1,
1,
1
])
pp(<<0x22, 0x02, 0xFF>> <> l2b1(bls(2040)), {:ri, 0x22, 0x2324, 2040}, bls(2040))
pp(<<0x22, 0x03, 0xFE>> <> l2b16(rls(127)), {:rhr, 0x22, 0x2324, 127}, rls(127))
pp(<<0x22, 0x04, 0xFE>> <> l2b16(rls(127)), {:rir, 0x22, 0x2324, 127}, rls(127))
# invalid cases
assert <<0x22, 0x01, 0x00>> <> l2b1(bls(2041)) == Response.pack({:rc, 0x22, 0x2324, 2041}, bls(2041))
assert <<0x22, 0x02, 0x00>> <> l2b1(bls(2041)) == Response.pack({:ri, 0x22, 0x2324, 2041}, bls(2041))
assert <<0x22, 0x03, 0x00>> <> l2b16(rls(128)) == Response.pack({:rhr, 0x22, 0x2324, 128}, rls(128))
assert <<0x22, 0x04, 0x00>> <> l2b16(rls(128)) == Response.pack({:rir, 0x22, 0x2324, 128}, rls(128))
end
defp pp(packet, cmd, vals) do
assert packet == Response.pack(cmd, vals)
assert Response.length(cmd) == byte_size(packet)
assert vals == Response.parse(cmd, packet)
end
defp bls(size) do
for i <- 1..size do
rem(i, 2)
end
end
defp rls(size) do
for i <- 1..size do
i
end
end
defp l2b1(list) do
lists = Enum.chunk_every(list, 8, 8, [0, 0, 0, 0, 0, 0, 0, 0])
list =
for [v0, v1, v2, v3, v4, v5, v6, v7] <- lists do
<<v7::1, v6::1, v5::1, v4::1, v3::1, v2::1, v1::1, v0::1>>
end
:erlang.iolist_to_binary(list)
end
defp l2b16(list) do
list2 =
for i <- list do
<<i::16>>
end
:erlang.iolist_to_binary(list2)
end
end
| 24.662252 | 105 | 0.47449 |
08df4d497c9d466835dbfa645221ae5e99e47ac6 | 2,605 | exs | Elixir | test/screens/v2/widget_instance/survey_test.exs | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | 3 | 2021-07-27T14:11:00.000Z | 2022-01-03T14:16:43.000Z | test/screens/v2/widget_instance/survey_test.exs | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | 444 | 2021-03-10T20:57:17.000Z | 2022-03-31T16:00:35.000Z | test/screens/v2/widget_instance/survey_test.exs | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | null | null | null | defmodule Screens.V2.WidgetInstance.SurveyTest do
use ExUnit.Case, async: true
alias Screens.V2.WidgetInstance
alias Screens.Config.Screen
alias Screens.V2.WidgetInstance.Survey
setup do
%{
widget: %Survey{
screen: %Screen{app_params: nil, vendor: nil, device_id: nil, name: nil, app_id: nil},
enabled?: false,
medium_asset_url: "medium.png",
large_asset_url: "large.png"
}
}
end
defp put_app_id(widget, app_id) do
%{widget | screen: %{widget.screen | app_id: app_id}}
end
defp put_enabled(widget, enabled?) do
%{widget | enabled?: enabled?}
end
describe "priority/1" do
test "returns low flex zone priority", %{widget: widget} do
assert [2, 10] == WidgetInstance.priority(widget)
end
end
describe "serialize/1" do
test "returns asset urls in a map", %{widget: widget} do
assert %{medium_asset_url: "medium.png", large_asset_url: "large.png"} ==
WidgetInstance.serialize(widget)
end
end
describe "slot_names/1" do
test "returns large and medium for bus shelter", %{widget: widget} do
widget = put_app_id(widget, :bus_shelter_v2)
assert [:large, :medium_left, :medium_right] == WidgetInstance.slot_names(widget)
end
test "not defined for non-bus shelter apps", %{widget: widget} do
widget = put_app_id(widget, :gl_eink_v2)
assert_raise FunctionClauseError, fn -> WidgetInstance.slot_names(widget) end
end
end
describe "widget_type/1" do
test "returns :survey", %{widget: widget} do
assert :survey == WidgetInstance.widget_type(widget)
end
end
describe "valid_candidate?/1" do
test "returns value of `enabled?` field", %{widget: widget} do
widget = put_enabled(widget, true)
assert WidgetInstance.valid_candidate?(widget)
widget = put_enabled(widget, false)
refute WidgetInstance.valid_candidate?(widget)
end
end
describe "audio_serialize/1" do
test "returns empty string", %{widget: widget} do
assert %{} == WidgetInstance.audio_serialize(widget)
end
end
describe "audio_sort_key/1" do
test "returns 0", %{widget: widget} do
assert 0 == WidgetInstance.audio_sort_key(widget)
end
end
describe "audio_valid_candidate?/1" do
test "returns false", %{widget: widget} do
refute WidgetInstance.audio_valid_candidate?(widget)
end
end
describe "audio_view/1" do
test "returns SurveyView", %{widget: widget} do
assert ScreensWeb.V2.Audio.SurveyView == WidgetInstance.audio_view(widget)
end
end
end
| 27.712766 | 94 | 0.677159 |
08df5475f825a9fa0b9a2a1841e6530aff21ca8a | 5,042 | ex | Elixir | lib/mix/lib/mix/dep/fetcher.ex | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/dep/fetcher.ex | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/dep/fetcher.ex | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | # Module responsible for fetching (getting/updating)
# dependencies from their sources.
#
# The new_lock and old_lock mechanism exists to signal
# externally which dependencies need to be updated and
# which ones do not.
defmodule Mix.Dep.Fetcher do
@moduledoc false
import Mix.Dep, only: [format_dep: 1, check_lock: 1, available?: 1, ok?: 1]
@doc """
Fetches all dependencies.
"""
def all(old_lock, new_lock, opts) do
result = Mix.Dep.Converger.converge([], new_lock, opts, &do_fetch/3)
{apps, _deps} = do_finalize(result, old_lock, opts)
apps
end
@doc """
Fetches the dependencies with the given names and their children recursively.
"""
def by_name(names, old_lock, new_lock, opts) do
fetcher = fetch_by_name(names, new_lock)
result = Mix.Dep.Converger.converge([], new_lock, opts, fetcher)
{apps, deps} = do_finalize(result, old_lock, opts)
# Check if all given dependencies are loaded or fail
_ = Mix.Dep.loaded_by_name(names, deps, opts)
apps
end
defp fetch_by_name(given, lock) do
names = to_app_names(given)
fn(%Mix.Dep{app: app} = dep, acc, new_lock) ->
# Only fetch if dependency is in given names or if lock has
# been changed for dependency by remote converger
if app in names or lock[app] != new_lock[app] do
do_fetch(dep, acc, new_lock)
else
{dep, acc, new_lock}
end
end
end
defp do_fetch(dep, acc, lock) do
%Mix.Dep{app: app, scm: scm, opts: opts} = dep = check_lock(dep)
cond do
# Dependencies that cannot be fetched are always compiled afterwards
not scm.fetchable? ->
{dep, [app|acc], lock}
# If the dependency is not available or we have a lock mismatch
out_of_date?(dep) ->
new =
if scm.checked_out?(opts) do
Mix.shell.info "* Updating #{format_dep(dep)}"
scm.update(opts)
else
Mix.shell.info "* Getting #{format_dep(dep)}"
scm.checkout(opts)
end
if new do
{dep, [app|acc], Map.put(lock, app, new)}
else
{dep, acc, lock}
end
# The dependency is ok or has some other error
true ->
{dep, acc, lock}
end
end
defp out_of_date?(%Mix.Dep{status: {:lockmismatch, _}}), do: true
defp out_of_date?(%Mix.Dep{status: :lockoutdated}), do: true
defp out_of_date?(%Mix.Dep{status: :nolock}), do: true
defp out_of_date?(%Mix.Dep{status: {:unavailable, _}}), do: true
defp out_of_date?(%Mix.Dep{}), do: false
defp do_finalize({all_deps, apps, new_lock}, old_lock, opts) do
# Let's get the loaded versions of deps
deps = Mix.Dep.loaded_by_name(apps, all_deps, opts)
# Note we only retrieve the parent dependencies of the updated
# deps if all dependencies are available. This is because if a
# dependency is missing, it could directly affect one of the
# dependencies we are trying to compile, causing the whole thing
# to fail.
#
# If there is any other dependency that is not ok, we include
# it for compilation too, this is our best to try to solve the
# maximum we can at each deps.get and deps.update.
if Enum.all?(all_deps, &available?/1) do
deps = Enum.uniq_by(with_depending(deps, all_deps), &(&1.app))
end
# Merge the new lock on top of the old to guarantee we don't
# leave out things that could not be fetched and save it.
lock = Map.merge(old_lock, new_lock)
Mix.Dep.Lock.write(lock)
mark_as_fetched(deps)
# See if any of the deps diverged and abort.
show_diverged!(Enum.filter(all_deps, &Mix.Dep.diverged?/1))
{apps, all_deps}
end
defp mark_as_fetched(deps) do
# If the dependency is fetchable, we are going to write a .fetch
# file to it. Each build, regardless of the environment and location,
# will compared against this .fetch file to know if the dependency
# needs recompiling.
_ = for %Mix.Dep{scm: scm, opts: opts} <- deps, scm.fetchable? do
File.touch! Path.join opts[:dest], ".fetch"
end
:ok
end
defp with_depending(deps, all_deps) do
deps ++ do_with_depending(deps, all_deps)
end
defp do_with_depending([], _all_deps) do
[]
end
defp do_with_depending(deps, all_deps) do
dep_names = Enum.map(deps, fn dep -> dep.app end)
parents = Enum.filter all_deps, fn dep ->
Enum.any?(dep.deps, &(&1.app in dep_names))
end
do_with_depending(parents, all_deps) ++ parents
end
defp to_app_names(given) do
Enum.map(given, fn(app) ->
if is_binary(app), do: String.to_atom(app), else: app
end)
end
defp show_diverged!([]), do: :ok
defp show_diverged!(deps) do
shell = Mix.shell
shell.error "Dependencies have diverged:"
Enum.each deps, fn(dep) ->
shell.error "* #{Mix.Dep.format_dep dep}"
shell.error " #{Mix.Dep.format_status dep}"
end
Mix.raise "Can't continue due to errors on dependencies"
end
end
| 31.123457 | 79 | 0.647759 |
08df5db0add12a35cc3ac9d956ac8817d037b870 | 428 | ex | Elixir | lib/network_interface/application.ex | nerves-project/nerves_network_interface | ff916fb2c58dbb803dd1b9fdc6898ac2241b965e | [
"Apache-2.0"
] | 10 | 2016-06-17T21:53:05.000Z | 2019-02-28T21:23:49.000Z | lib/network_interface/application.ex | nerves-project-attic/nerves_network_interface | ff916fb2c58dbb803dd1b9fdc6898ac2241b965e | [
"Apache-2.0"
] | 13 | 2016-08-01T10:21:55.000Z | 2019-02-26T21:54:53.000Z | lib/network_interface/application.ex | nerves-project-attic/nerves_network_interface | ff916fb2c58dbb803dd1b9fdc6898ac2241b965e | [
"Apache-2.0"
] | 7 | 2016-05-31T21:39:58.000Z | 2018-08-08T11:16:19.000Z | defmodule Nerves.NetworkInterface.Application do
@moduledoc false
use Application
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
supervisor(Registry, [:duplicate, Nerves.NetworkInterface]),
worker(Nerves.NetworkInterface.Worker, [])
]
opts = [strategy: :rest_for_one, name: Nerves.NervesInterface.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 23.777778 | 77 | 0.721963 |
08df6ffdaea1cc154451f61bf228cea7119eeecc | 1,802 | ex | Elixir | lib/danpay_web.ex | Nilomiranda/Danpay | 1b6f05a8a5ce6b636241c369fca85f16b229bc82 | [
"MIT"
] | null | null | null | lib/danpay_web.ex | Nilomiranda/Danpay | 1b6f05a8a5ce6b636241c369fca85f16b229bc82 | [
"MIT"
] | null | null | null | lib/danpay_web.ex | Nilomiranda/Danpay | 1b6f05a8a5ce6b636241c369fca85f16b229bc82 | [
"MIT"
] | null | null | null | defmodule DanpayWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use DanpayWeb, :controller
use DanpayWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: DanpayWeb
import Plug.Conn
import DanpayWeb.Gettext
alias DanpayWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/danpay_web/templates",
namespace: DanpayWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import DanpayWeb.Gettext
end
end
defp view_helpers do
quote do
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import DanpayWeb.ErrorHelpers
import DanpayWeb.Gettext
alias DanpayWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 22.810127 | 76 | 0.685905 |
08df863c78900396250833332ccece6e5cb3eeeb | 350 | exs | Elixir | installer/templates/phx_test/live/page_live_test.exs | joshchernoff/phoenix | cd1541a8bc12cdb2501be6b08403558e82c5b72b | [
"MIT"
] | 1 | 2020-04-14T09:49:46.000Z | 2020-04-14T09:49:46.000Z | installer/templates/phx_test/live/page_live_test.exs | joshchernoff/phoenix | cd1541a8bc12cdb2501be6b08403558e82c5b72b | [
"MIT"
] | null | null | null | installer/templates/phx_test/live/page_live_test.exs | joshchernoff/phoenix | cd1541a8bc12cdb2501be6b08403558e82c5b72b | [
"MIT"
] | null | null | null | defmodule <%= web_namespace %>.PageLiveTest do
use <%= web_namespace %>.ConnCase
import Phoenix.LiveViewTest
test "disconnected and connected render", %{conn: conn} do
{:ok, page_live, disconneted_html} = live(conn, "/")
assert disconneted_html =~ "Welcome to Phoenix!"
assert render(page_live) =~ "Welcome to Phoenix!"
end
end
| 29.166667 | 60 | 0.7 |
08dfa29f06855cd9504ca6b4db3138f928a9fccf | 14,949 | ex | Elixir | lib/mongo/bulk_write.ex | MillionIntegrals/elixir-mongodb-driver | 96c4cc3f21c4043323b8a9b33ad3a374760864c6 | [
"Apache-2.0"
] | null | null | null | lib/mongo/bulk_write.ex | MillionIntegrals/elixir-mongodb-driver | 96c4cc3f21c4043323b8a9b33ad3a374760864c6 | [
"Apache-2.0"
] | null | null | null | lib/mongo/bulk_write.ex | MillionIntegrals/elixir-mongodb-driver | 96c4cc3f21c4043323b8a9b33ad3a374760864c6 | [
"Apache-2.0"
] | null | null | null | defmodule Mongo.BulkWrite do
@moduledoc """
The driver supports the so-called bulk writes ([Specification](https://github.com/mongodb/specifications/blob/master/source/crud/crud.rst#basic)):
The motivation for bulk writes lies in the possibility of optimizing to group the same operations. The driver supports
* unordered and ordered bulk writes
* in-memory and stream bulk writes
## Unordered bulk writes
Unordered bulk writes have the highest optimization factor. Here all operations can be divided into
three groups (inserts, updates and deletes).
The order of execution within a group does not matter. However, the groups are executed in the
order: inserts, updates and deletes. The following example creates three records, changes them, and then
deletes all records. After execution, the collection is unchanged. It's valid, because of the execution order:
1. inserts
2. updates
3. deletes
## Example:
```
alias Mongo.BulkWrite
alias Mongo.UnorderedBulk
bulk = "bulk"
|> UnorderedBulk.new()
|> UnorderedBulk.insert_one(%{name: "Greta"})
|> UnorderedBulk.insert_one(%{name: "Tom"})
|> UnorderedBulk.insert_one(%{name: "Waldo"})
|> UnorderedBulk.update_one(%{name: "Greta"}, %{"$set": %{kind: "dog"}})
|> UnorderedBulk.update_one(%{name: "Tom"}, %{"$set": %{kind: "dog"}})
|> UnorderedBulk.update_one(%{name: "Waldo"}, %{"$set": %{kind: "dog"}})
|> UnorderedBulk.delete_one(%{kind: "dog"})
|> UnorderedBulk.delete_one(%{kind: "dog"})
|> UnorderedBulk.delete_one(%{kind: "dog"})
result = BulkWrite.write(:mongo, bulk, w: 1)
```
## Ordered bulk writes
Sometimes the order of execution is important for successive operations to yield a correct result.
In this case, one uses ordered bulk writes. The following example would not work with unordered bulk writes
because the order within the update operations is undefinite. The `update_many()` will only work, if it is
executed after the `update_one()` functions.
```
bulk = "bulk"
|> OrderedBulk.new()
|> OrderedBulk.insert_one(%{name: "Greta"})
|> OrderedBulk.insert_one(%{name: "Tom"})
|> OrderedBulk.insert_one(%{name: "Waldo"})
|> OrderedBulk.update_one(%{name: "Greta"}, %{"$set": %{kind: "dog"}})
|> OrderedBulk.update_one(%{name: "Tom"}, %{"$set": %{kind: "dog"}})
|> OrderedBulk.update_one(%{name: "Waldo"}, %{"$set": %{kind: "dog"}})
|> OrderedBulk.update_many(%{kind: "dog"}, %{"$set": %{kind: "cat"}})
|> OrderedBulk.delete_one(%{kind: "cat"})
|> OrderedBulk.delete_one(%{kind: "cat"})
|> OrderedBulk.delete_one(%{kind: "cat"})
result = BulkWrite.write(:mongo, bulk, w: 1)
```
## Stream bulk writes
The examples shown initially filled the bulk with a few operations and then the bulk is written to the database.
This is all done in memory. For larger amounts of operations or imports of very long files, the main memory would
be unnecessarily burdened. It could come to some resource problems.
For such cases you could use streams. Unordered and ordered bulk writes can also be combined with Streams.
You set the maximum size of the bulk. Once the number of bulk operations has been reached,
it will be sent to the database. While streaming you can limit the memory consumption regarding the current task.
In the following example we import 1.000.000 integers into the MongoDB using the stream api:
We need to create an insert operation (`BulkOps.get_insert_one()`) for each number. Then we call the `UnorderedBulk.stream`
function to import it. This function returns a stream function which accumulate
all inserts operations until the limit `1000` is reached. In this case the operation group is written to
MongoDB.
## Example
```
1..1_000_000
|> Stream.map(fn i -> BulkOps.get_insert_one(%{number: i}) end)
|> UnorderedBulk.write(:mongo, "bulk", 1_000)
|> Stream.run()
```
## Benchmark
The following benchmark compares multiple `Mongo.insert_one()` calls with a stream using unordered bulk writes.
Both tests inserts documents into a replica set with `w: 1`.
```
Benchee.run(
%{
"inserts" => fn input ->
input
|> Enum.map(fn i -> %{number: i} end)
|> Enum.each(fn doc -> Mongo.insert_one!(top, "bulk_insert", doc) end)
end,
"streams" => fn input ->
input
|> Stream.map(fn i -> get_insert_one(%{number: i}) end)
|> Mongo.UnorderedBulk.write(top, "bulk", 1_0000)
|> Stream.run()
end,
},
inputs: %{
"Small" => Enum.to_list(1..10_000),
"Medium" => Enum.to_list(1..100_000),
"Bigger" => Enum.to_list(1..1_000_000)
}
)
```
Result:
```
##### With input Bigger #####
Name ips average deviation median 99th %
streams 0.0885 0.188 min ±0.00% 0.188 min 0.188 min
inserts 0.00777 2.14 min ±0.00% 2.14 min 2.14 min
Comparison:
streams 0.0885
inserts 0.00777 - 11.39x slower +1.96 min
##### With input Medium #####
Name ips average deviation median 99th %
streams 1.00 1.00 s ±8.98% 0.99 s 1.12 s
inserts 0.0764 13.09 s ±0.00% 13.09 s 13.09 s
Comparison:
streams 1.00
inserts 0.0764 - 13.12x slower +12.10 s
##### With input Small #####
Name ips average deviation median 99th %
streams 8.26 0.121 s ±30.46% 0.112 s 0.23 s
inserts 0.75 1.34 s ±7.15% 1.29 s 1.48 s
Comparison:
streams 8.26
inserts 0.75 - 11.07x slower +1.22 s
```
The result is, that using bulk writes is much faster (about 15x faster at all).
"""
import Keywords
import Mongo.Utils
import Mongo.WriteConcern
alias Mongo.UnorderedBulk
alias Mongo.OrderedBulk
alias Mongo.BulkWriteResult
alias Mongo.Session
@doc """
Executes unordered and ordered bulk writes.
## Unordered bulk writes
The operation are grouped (inserts, updates, deletes). The order of execution is:
1. inserts
2. updates
3. deletes
The execution order within the group is not preserved.
## Ordered bulk writes
Sequences of the same operations are grouped and sent as one command. The order is preserved.
If a group (inserts, updates or deletes) exceeds the limit `maxWriteBatchSize` it will be split into chunks.
Everything is done in memory, so this use case is limited by memory. A better approach seems to use streaming bulk writes.
"""
@spec write(GenServer.server, (UnorderedBulk.t | OrderedBulk.t), Keyword.t) :: Mongo.BulkWriteResult.t
def write(topology_pid, %UnorderedBulk{} = bulk, opts) do
with {:ok, session} <- Session.start_implicit_session(topology_pid, :write, opts),
result = one_bulk_write(topology_pid, session, bulk, opts),
:ok <- Session.end_implict_session(topology_pid, session) do
result
end
end
def write(topology_pid, %OrderedBulk{coll: coll, ops: ops}, opts) do
write_concern = write_concern(opts)
empty = %BulkWriteResult{acknowledged: acknowledged?(write_concern)}
with {:ok, session} <- Session.start_implicit_session(topology_pid, :write, opts),
{:ok, limits} <- Mongo.limits(topology_pid) do
max_batch_size = limits.max_write_batch_size
ops
|> get_op_sequence()
|> Enum.reduce_while(empty, fn {cmd, docs}, acc ->
temp_result = one_bulk_write_operation(session, cmd, coll, docs, max_batch_size, opts)
case temp_result do
%{errors: []} ->
{:cont, BulkWriteResult.add(acc, temp_result)}
_other ->
{:halt, BulkWriteResult.add(acc, temp_result)}
end
end)
end
end
##
# Executes one unordered bulk write. The execution order of operation groups is
#
# * inserts
# * updates
# * deletes
#
# The function returns a keyword list with the results of each operation group:
# For the details see https://github.com/mongodb/specifications/blob/master/source/crud/crud.rst#results
#
defp one_bulk_write(topology_pid, session, %UnorderedBulk{coll: coll, inserts: inserts, updates: updates, deletes: deletes}, opts) do
with {:ok, limits} <- Mongo.limits(topology_pid) do
max_batch_size = limits.max_write_batch_size
results = case one_bulk_write_operation(session, :insert, coll, inserts, max_batch_size, opts) do
%{errors: []} = insert_result ->
case one_bulk_write_operation(session, :update, coll, updates, max_batch_size, opts) do
%{errors: []} = update_result ->
delete_result = one_bulk_write_operation(session, :delete, coll, deletes, max_batch_size, opts)
[insert_result, update_result, delete_result]
update_result ->
[insert_result, update_result]
end
insert_result ->
[insert_result]
end
BulkWriteResult.reduce(results, %BulkWriteResult{acknowledged: acknowledged?(opts)})
end
end
###
# Executes the command `cmd` and collects the result.
#
defp one_bulk_write_operation(session, cmd, coll, docs, max_batch_size, opts) do
with result <- session
|> run_commands(get_cmds(cmd, coll, docs, max_batch_size, opts), opts)
|> collect(cmd) do
result
end
end
##
# Converts the list of operations into insert/update/delete commands
#
defp get_cmds(:insert, coll, docs, max_batch_size, opts), do: get_insert_cmds(coll, docs, max_batch_size, opts)
defp get_cmds(:update, coll, docs, max_batch_size, opts), do: get_update_cmds(coll, docs, max_batch_size, opts)
defp get_cmds(:delete, coll, docs, max_batch_size, opts), do: get_delete_cmds(coll, docs, max_batch_size, opts)
###
# Converts the list of operations into list of lists with same operations.
#
# [inserts, inserts, updates] -> [[inserts, inserts],[updates]]
#
defp get_op_sequence(ops) do
get_op_sequence(ops, [])
end
defp get_op_sequence([], acc), do: acc
defp get_op_sequence(ops, acc) do
[{kind, _doc} | _rest] = ops
{docs, rest} = find_max_sequence(kind, ops)
get_op_sequence(rest, [{kind, docs} | acc])
end
###
# Splits the sequence of operations into two parts
# 1) sequence of operations of kind `kind`
# 2) rest of operations
#
defp find_max_sequence(kind, rest) do
find_max_sequence(kind, rest, [])
end
defp find_max_sequence(_kind, [], acc) do
{acc, []}
end
defp find_max_sequence(kind, [{other, desc} | rest], acc) when kind == other do
find_max_sequence(kind, rest, [desc | acc])
end
defp find_max_sequence(_kind, rest, acc) do
{acc, rest}
end
##
# collects the returns values for each operation
#
# the update operation is more complex than insert or delete operation
#
defp collect({docs, ids}, :insert) do
docs
|> Enum.map(fn
{:ok, %{"n" => n} = doc} -> BulkWriteResult.insert_result(n, ids, doc["writeErrors"] || [])
{:ok, _other} -> BulkWriteResult.empty()
{:error, reason} -> BulkWriteResult.error(reason)
end)
|> BulkWriteResult.reduce()
end
defp collect(docs, :update) do
docs
|> Enum.map(fn
{:ok, %{"n" => n, "nModified" => modified, "upserted" => ids} = doc} -> l = length(ids)
BulkWriteResult.update_result(n - l, modified, l, filter_upsert_ids(ids), doc["writeErrors"] || [])
{:ok, %{"n" => matched, "nModified" => modified} = doc} -> BulkWriteResult.update_result(matched, modified, 0, [], doc["writeErrors"] || [])
{:ok, _other} -> BulkWriteResult.empty()
{:error, reason} -> BulkWriteResult.error(reason)
end)
|> BulkWriteResult.reduce()
end
defp collect(docs, :delete) do
docs
|> Enum.map(fn
{:ok, %{"n" => n} = doc } -> BulkWriteResult.delete_result(n, doc["writeErrors"] || [])
{:ok, _other} -> BulkWriteResult.empty()
{:error, reason} -> BulkWriteResult.error(reason)
end)
|> BulkWriteResult.reduce()
end
defp filter_upsert_ids(nil), do: []
defp filter_upsert_ids(upserted), do: Enum.map(upserted, fn doc -> doc["_id"] end)
defp run_commands(session, {cmds, ids}, opts) do
{Enum.map(cmds, fn cmd -> Mongo.exec_command_session(session, cmd, opts) end), ids}
end
defp run_commands(session, cmds, opts) do
Enum.map(cmds, fn cmd -> Mongo.exec_command_session(session, cmd, opts) end)
end
defp get_insert_cmds(coll, docs, max_batch_size, opts) do
{ids, docs} = assign_ids(docs)
cmds = docs
|> Enum.chunk_every(max_batch_size)
|> Enum.map(fn inserts -> get_insert_cmd(coll, inserts, opts) end)
{cmds, ids}
end
defp get_insert_cmd(coll, inserts, opts) do
[insert: coll,
documents: inserts,
writeConcern: write_concern(opts)] |> filter_nils()
end
defp get_delete_cmds(coll, docs, max_batch_size, opts) do
docs
|> Enum.chunk_every(max_batch_size)
|> Enum.map(fn deletes -> get_delete_cmd(coll, deletes, opts) end)
end
defp get_delete_cmd(coll, deletes, opts ) do
[delete: coll,
deletes: Enum.map(deletes, fn delete -> get_delete_doc(delete) end),
ordered: Keyword.get(opts, :ordered),
writeConcern: write_concern(opts)] |> filter_nils()
end
defp get_delete_doc({filter, opts}) do
[q: filter,
limit: Keyword.get(opts, :limit),
collation: Keyword.get(opts, :collation)] |> filter_nils()
end
defp get_update_cmds(coll, docs, max_batch_size, opts) do
docs
|> Enum.chunk_every(max_batch_size)
|> Enum.map(fn updates -> get_update_cmd(coll, updates, opts) end)
end
defp get_update_cmd(coll, updates, opts) do
[ update: coll,
updates: Enum.map(updates, fn update -> get_update_doc(update) end),
ordered: Keyword.get(opts, :ordered),
writeConcern: write_concern(opts),
bypassDocumentValidation: Keyword.get(opts, :bypass_document_validation)
] |> filter_nils()
end
defp get_update_doc({filter, update, update_opts}) do
[ q: filter,
u: update,
upsert: Keyword.get(update_opts, :upsert),
multi: Keyword.get(update_opts, :multi) || false,
collation: Keyword.get(update_opts, :collation),
arrayFilters: Keyword.get(update_opts, :array_filters)
] |> filter_nils()
end
end
| 34.130137 | 177 | 0.633153 |
08dfc7bef8c5e7dd0b31b4203c74f26a9d21ceb1 | 6,994 | ex | Elixir | lib/ueberauth/strategy/twitchtv.ex | DMeechan/ueberauth_twitch_tv | 8d2fa08e7c191f4f273cd1a41ddf42bc5fed81e8 | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/twitchtv.ex | DMeechan/ueberauth_twitch_tv | 8d2fa08e7c191f4f273cd1a41ddf42bc5fed81e8 | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/twitchtv.ex | DMeechan/ueberauth_twitch_tv | 8d2fa08e7c191f4f273cd1a41ddf42bc5fed81e8 | [
"MIT"
] | 1 | 2020-05-21T00:32:52.000Z | 2020-05-21T00:32:52.000Z | defmodule Ueberauth.Strategy.TwitchTv do
@moduledoc """
Provides an Ueberauth strategy for authenticating with Twitch.tv.
### Setup
Create an application in Twitch.tv for you to use.
Register a new application at: [your Twitch.tv developer page](https://www.twitch.tv/kraken/oauth2/clients/new) and get the `client_id` and `client_secret`.
Include the provider in your configuration for Ueberauth
config :ueberauth, Ueberauth,
providers: [
twitchtv: { Ueberauth.Strategy.TwitchtTv, [] }
]
Then include the configuration for twitchtv.
config :ueberauth, Ueberauth.Strategy.TwitchTv.OAuth,
client_id: System.get_env("TWITCH_TV_CLIENT_ID"),
client_secret: System.get_env("TWITCH_TV_CLIENT_SECRET")
If you haven't already, create a pipeline and setup routes for your callback handler
pipeline :auth do
Ueberauth.plug "/auth"
end
scope "/auth" do
pipe_through [:browser, :auth]
get "/:provider/callback", AuthController, :callback
end
Create an endpoint for the callback where you will handle the `Ueberauth.Auth` struct
defmodule MyApp.AuthController do
use MyApp.Web, :controller
def callback_phase(%{ assigns: %{ ueberauth_failure: fails } } = conn, _params) do
# do things with the failure
end
def callback_phase(%{ assigns: %{ ueberauth_auth: auth } } = conn, params) do
# do things with the auth
end
end
You can edit the behaviour of the Strategy by including some options when you register your provider.
To set the `uid_field`
config :ueberauth, Ueberauth,
providers: [
twitchtv: { Ueberauth.Strategy.TwitchtTv, [uid_field: :email] }
]
Default is `:login`
To set the default 'scopes' (permissions):
config :ueberauth, Ueberauth,
providers: [
twitchtv: { Ueberauth.Strategy.TwitchtTv, [default_scope: "user:read:email"] }
]
Deafult is "user,public_repo"
"""
use Ueberauth.Strategy,
uid_field: :login,
default_scope: "user:read:email",
oauth2_module: Ueberauth.Strategy.TwitchTv.OAuth
alias Ueberauth.Auth.Info
alias Ueberauth.Auth.Credentials
alias Ueberauth.Auth.Extra
@doc """
Handles the initial redirect to the twitch.tv authentication page.
To customize the scope (permissions) that are requested by Twitch.Tv include them as part of your url:
"/auth/twitchtv?scope=user,public_repo,gist"
You can also include a `state` param that TwitchTv will return to you.
"""
def handle_request!(conn) do
scopes = conn.params["scope"] || option(conn, :default_scope)
params =
[scope: scopes]
|> with_param(:state, conn)
opts = oauth_client_options_from_conn(conn)
redirect!(conn, __MODULE__.OAuth.authorize_url!(params, opts))
end
@doc """
Handles the callback from TwitchTv. When there is a failure from TwitchTv the failure is included in the
`ueberauth_failure` struct. Otherwise the information returned from TwitchTv is returned in the `Ueberauth.Auth` struct.
"""
def handle_callback!(%Plug.Conn{params: %{"code" => code}} = conn) do
params = [code: code]
opts = oauth_client_options_from_conn(conn)
case __MODULE__.OAuth.get_access_token(params, opts) do
{:ok, token} ->
fetch_user(conn, token)
{:error, {error_code, error_description}} ->
set_errors!(conn, [error(error_code, error_description)])
end
end
@doc false
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
@doc """
Cleans up the private area of the connection used for passing the raw TwitchTv response around during the callback.
"""
def handle_cleanup!(conn) do
conn
|> put_private(:twitch_tv_user, nil)
|> put_private(:twitch_tv_token, nil)
end
@doc """
Fetches the uid field from the twitch tv response. This defaults to the option `uid_field` which in-turn defaults to `login`
"""
def uid(conn) do
conn.private.twitch_tv_user[option(conn, :uid_field) |> to_string]
end
@doc """
Includes the credentials from the twitch tv response.
"""
def credentials(conn) do
token = conn.private.twitch_tv_token
# scopes = (token.other_params["scope"] || "")
# |> String.split(",")
%Credentials{
token: token.access_token,
refresh_token: token.refresh_token,
expires_at: token.expires_at,
token_type: token.token_type,
expires: !!token.expires_at
# scopes: scopes
}
end
@doc """
Fetches the fields to populate the info section of the `Ueberauth.Auth` struct.
"""
def info(conn) do
user = conn.private.twitch_tv_user
%Info{
name: user["display_name"],
image: user["profile_image_url"],
first_name: nil,
last_name: nil,
nickname: nil,
email: user["email"],
location: nil,
description: user["description"],
phone: nil,
urls: %{
self: user["self"]
}
}
end
@doc """
Stores the raw information (including the token) obtained from the Twitch Tv callback.
"""
def extra(conn) do
%Extra{
raw_info: %{
token: conn.private.twitch_tv_token,
user: conn.private.twitch_tv_user,
is_partnered: conn.private.twitch_tv_user["partnered"]
}
}
end
defp fetch_user(conn, token) do
conn = put_private(conn, :twitch_tv_token, token)
path = "https://api.twitch.tv/helix/users"
resp = __MODULE__.OAuth.get(token, path)
case resp do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: %{"data" => data}}}
when status_code in 200..399 ->
put_private(conn, :twitch_tv_user, List.first(data))
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
{:error, %OAuth2.Response{status_code: status_code, body: body}} ->
set_errors!(conn, [error("OAuth2", "Error #{status_code}" <> to_string(body))])
end
end
defp with_param(opts, key, conn) do
if value = conn.params[to_string(key)], do: Keyword.put(opts, key, value), else: opts
end
# defp with_optional(opts, key, conn) do
# if option(conn, key), do: Keyword.put(opts, key, option(conn, key)), else: opts
# end
defp oauth_client_options_from_conn(conn) do
base_options = [redirect_uri: callback_url(conn)]
request_options = conn.private[:ueberauth_request_options].options
case {request_options[:client_id], request_options[:client_secret]} do
{nil, _} -> base_options
{_, nil} -> base_options
{id, secret} -> [client_id: id, client_secret: secret] ++ base_options
end
end
defp option(conn, key) do
Keyword.get(options(conn), key, Keyword.get(default_options(), key))
end
end
| 29.761702 | 158 | 0.66557 |
08dfcd54138c3a1f99fb8550dd8f26d8225782d7 | 2,607 | ex | Elixir | lib/codes/codes_a83.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_a83.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_a83.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_A83 do
alias IcdCode.ICDCode
def _A830 do
%ICDCode{full_code: "A830",
category_code: "A83",
short_code: "0",
full_name: "Japanese encephalitis",
short_name: "Japanese encephalitis",
category_name: "Japanese encephalitis"
}
end
def _A831 do
%ICDCode{full_code: "A831",
category_code: "A83",
short_code: "1",
full_name: "Western equine encephalitis",
short_name: "Western equine encephalitis",
category_name: "Western equine encephalitis"
}
end
def _A832 do
%ICDCode{full_code: "A832",
category_code: "A83",
short_code: "2",
full_name: "Eastern equine encephalitis",
short_name: "Eastern equine encephalitis",
category_name: "Eastern equine encephalitis"
}
end
def _A833 do
%ICDCode{full_code: "A833",
category_code: "A83",
short_code: "3",
full_name: "St Louis encephalitis",
short_name: "St Louis encephalitis",
category_name: "St Louis encephalitis"
}
end
def _A834 do
%ICDCode{full_code: "A834",
category_code: "A83",
short_code: "4",
full_name: "Australian encephalitis",
short_name: "Australian encephalitis",
category_name: "Australian encephalitis"
}
end
def _A835 do
%ICDCode{full_code: "A835",
category_code: "A83",
short_code: "5",
full_name: "California encephalitis",
short_name: "California encephalitis",
category_name: "California encephalitis"
}
end
def _A836 do
%ICDCode{full_code: "A836",
category_code: "A83",
short_code: "6",
full_name: "Rocio virus disease",
short_name: "Rocio virus disease",
category_name: "Rocio virus disease"
}
end
def _A838 do
%ICDCode{full_code: "A838",
category_code: "A83",
short_code: "8",
full_name: "Other mosquito-borne viral encephalitis",
short_name: "Other mosquito-borne viral encephalitis",
category_name: "Other mosquito-borne viral encephalitis"
}
end
def _A839 do
%ICDCode{full_code: "A839",
category_code: "A83",
short_code: "9",
full_name: "Mosquito-borne viral encephalitis, unspecified",
short_name: "Mosquito-borne viral encephalitis, unspecified",
category_name: "Mosquito-borne viral encephalitis, unspecified"
}
end
end
| 29.625 | 73 | 0.599156 |
08dfe8de693c9d21ec316e9996f1019c45c1b1a1 | 1,577 | ex | Elixir | apps/keychain/lib/keychain/wallet.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/keychain/lib/keychain/wallet.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/keychain/lib/keychain/wallet.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule Keychain.Wallet do
@moduledoc false
alias Keychain.Key
alias ExthCrypto.ECIES.ECDH
alias ExthCrypto.Hash.Keccak
@typep address :: Keychain.address()
@typep resp(ret) :: ret | {:error, atom()}
@doc """
Generates a new wallet address and returns a wallet ID for futher access.
Returns a tuple of `{:ok, {wallet_address, public_key}}`.
"""
@spec generate :: resp({:ok, address()})
def generate do
{public_key, private_key} = ECDH.new_ecdh_keypair()
<<4::size(8), key::binary-size(64)>> = public_key
<<_::binary-size(12), wallet_address::binary-size(20)>> = Keccak.kec(key)
wallet_address = Base.encode16(wallet_address, case: :lower)
wallet_address = "0x#{wallet_address}"
public_key_encoded = Base.encode16(public_key, case: :lower)
private_key_encoded = Base.encode16(private_key, case: :lower)
{:ok, _} = Key.insert_private_key(wallet_address, private_key_encoded)
{:ok, {wallet_address, public_key_encoded}}
end
end
| 33.553191 | 77 | 0.721623 |
08e04e6e27fcbb69043a31deaa57d05982789b4f | 3,032 | ex | Elixir | lib/abbrev.ex | CraigCottingham/abbrev | 31afedd2a22d51c193209e771b3aa9218da10733 | [
"Apache-2.0"
] | null | null | null | lib/abbrev.ex | CraigCottingham/abbrev | 31afedd2a22d51c193209e771b3aa9218da10733 | [
"Apache-2.0"
] | 26 | 2019-07-10T22:08:36.000Z | 2021-08-02T11:17:24.000Z | lib/abbrev.ex | CraigCottingham/abbrev | 31afedd2a22d51c193209e771b3aa9218da10733 | [
"Apache-2.0"
] | null | null | null | defmodule Abbrev do
@moduledoc """
Calculates the set of unambiguous abbreviations for a given set of strings.
"""
@doc """
Given a set of strings, calculate the set of unambiguous abbreviations for those strings,
and return a map where the keys are all the possible abbreviations
and the values are the full strings.
## Parameters
* words - The set of strings from which to calculate the abbreviations.
## Examples
iex> Abbrev.abbrev(~w())
%{}
iex> Abbrev.abbrev(~w(a))
%{"a" => "a"}
iex> Abbrev.abbrev(~w(a b))
%{"a" => "a", "b" => "b"}
iex> Abbrev.abbrev(~w(aa ab))
%{"aa" => "aa", "ab" => "ab"}
iex> Abbrev.abbrev(~w(car cone))
%{"ca" => "car", "car" => "car", "co" => "cone", "con" => "cone", "cone" => "cone"}
"""
@spec abbrev([binary()]) :: %{required(binary()) => binary()}
def abbrev(words) do
Enum.reduce(words, %{abbreviations: %{}, seen: %{}}, fn word, state ->
Enum.reduce(all_prefixes_for_word(word, [word]), state, fn prefix, state -> update_state(word, prefix, state) end)
end)[:abbreviations]
end
@doc """
Given a set of strings and a pattern, calculate the set of unambiguous abbreviations
for only those strings matching the pattern, and return a map where
the keys are all the possible abbreviations and the values are the full strings.
## Parameters
* words - The set of strings from which to calculate the abbreviations.
* pattern - A regex or string; only input strings and abbreviations that match
the pattern or string will be included in the return value.
## Examples
iex> Abbrev.abbrev(~w(), ~r/^a/)
%{}
iex> Abbrev.abbrev(~w(a), ~r/^a/)
%{"a" => "a"}
iex> Abbrev.abbrev(~w(a b), ~r/^a/)
%{"a" => "a"}
iex> Abbrev.abbrev(~w(aa ab), ~r/b/)
%{"ab" => "ab"}
iex> Abbrev.abbrev(~w(car box cone crab), ~r/b/)
%{"b" => "box", "bo" => "box", "box" => "box", "crab" => "crab"}
iex> Abbrev.abbrev(~w(car box cone), "ca")
%{"ca" => "car", "car" => "car"}
"""
@spec abbrev([binary()], binary() | Regex.t()) :: %{required(binary()) => binary()}
def abbrev(words, pattern) when is_binary(pattern) do
abbrev(words, Regex.compile!(pattern))
end
def abbrev(words, pattern) do
words
|> Enum.filter(&Regex.match?(pattern, &1))
|> abbrev()
|> Enum.filter(fn {k, _} -> Regex.match?(pattern, k) end)
|> Enum.into(%{})
end
defp all_prefixes_for_word(word, accum) do
case Regex.run(~r/(.+).$/, word) do
[_, prefix] ->
all_prefixes_for_word(prefix, [prefix | accum])
nil ->
accum
end
end
defp update_state(word, prefix, state) do
case get_and_update_in(state[:seen][prefix], &{&1, (&1 || 0) + 1}) do
{nil, state} ->
put_in(state[:abbreviations][prefix], word)
{1, state} ->
{_, new_state} = pop_in(state[:abbreviations][prefix])
new_state
{_, state} ->
state
end
end
end
| 30.938776 | 120 | 0.581135 |
08e07be440e8fab94b213cd10f54977e8886068a | 1,550 | exs | Elixir | mix.exs | cabol/phoenix_gen_socket_client | bbc8884288bd747e1d7e8ce96a6189d8c24ff01e | [
"MIT"
] | null | null | null | mix.exs | cabol/phoenix_gen_socket_client | bbc8884288bd747e1d7e8ce96a6189d8c24ff01e | [
"MIT"
] | null | null | null | mix.exs | cabol/phoenix_gen_socket_client | bbc8884288bd747e1d7e8ce96a6189d8c24ff01e | [
"MIT"
] | null | null | null | defmodule Phoenix.GenSocketClient.Mixfile do
use Mix.Project
@version "3.2.1"
@github_url "https://github.com/Aircloak/phoenix_gen_socket_client"
def project do
[
app: :phoenix_gen_socket_client,
version: @version,
elixir: "~> 1.8",
elixirc_paths: elixirc_paths(Mix.env()),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package(),
description: "Socket client behaviour for phoenix channels.",
docs: [
source_url: @github_url,
source_ref: "v#{@version}",
main: "readme",
extras: ["README.md"]
]
]
end
def application do
[extra_applications: [:logger | extra_applications(Mix.env())]]
end
defp extra_applications(:prod), do: []
defp extra_applications(_), do: [:websocket_client]
defp deps do
[
{:websocket_client, "~> 1.2", optional: true},
{:jason, "~> 1.1", optional: true},
{:phoenix, "~> 1.3", only: :test},
{:cowboy, "~> 1.0", only: :test},
{:credo, "~> 0.8.10", only: [:dev, :test], runtime: false},
{:dialyze, "~> 0.2.1", only: :dev},
{:ex_doc, "~> 0.22.1", only: :dev, runtime: false}
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp package do
[
maintainers: ["Aircloak"],
licenses: ["MIT"],
links: %{
"GitHub" => @github_url,
"Docs" => "http://hexdocs.pm/phoenix_gen_socket_client"
}
]
end
end
| 25.833333 | 69 | 0.570968 |
08e0a07e8ae90b2bc8e6a7816d89127f4cc004f4 | 210 | exs | Elixir | priv/repo/migrations/20190415192200_add_address_to_charging_process.exs | kuma/teslamate | ea175fddb49cc08070182455e0073c3dcfcb3b4c | [
"MIT"
] | 2,602 | 2019-07-24T23:19:12.000Z | 2022-03-31T15:03:48.000Z | priv/repo/migrations/20190415192200_add_address_to_charging_process.exs | kuma/teslamate | ea175fddb49cc08070182455e0073c3dcfcb3b4c | [
"MIT"
] | 1,547 | 2019-07-26T22:02:09.000Z | 2022-03-31T15:39:41.000Z | priv/repo/migrations/20190415192200_add_address_to_charging_process.exs | kuma/teslamate | ea175fddb49cc08070182455e0073c3dcfcb3b4c | [
"MIT"
] | 524 | 2019-07-26T17:31:33.000Z | 2022-03-29T15:16:36.000Z | defmodule TeslaMate.Repo.Migrations.AddAddressToChargingProcess do
use Ecto.Migration
def change do
alter table(:charging_processes) do
add(:address_id, references(:addresses))
end
end
end
| 21 | 66 | 0.757143 |
08e0e1edd3b8beb9d394ab7e65de52d0a0c0abf5 | 4,780 | ex | Elixir | lib/lager_logger.ex | liveforeverx/lager_logger | cfea9ae48a4eb3c2f0f8d8d0301628bf840a2d7f | [
"Apache-2.0"
] | null | null | null | lib/lager_logger.ex | liveforeverx/lager_logger | cfea9ae48a4eb3c2f0f8d8d0301628bf840a2d7f | [
"Apache-2.0"
] | null | null | null | lib/lager_logger.ex | liveforeverx/lager_logger | cfea9ae48a4eb3c2f0f8d8d0301628bf840a2d7f | [
"Apache-2.0"
] | 2 | 2019-06-26T13:40:28.000Z | 2019-08-08T06:53:24.000Z | defmodule LagerLogger do
@moduledoc ~S"""
A lager backend that forwards all log messages to Elixir's Logger.
To forward all lager messages to Logger and otherwise disable lager
include the following in a config.exs file:
use Mix.Config
# Stop lager redirecting :error_logger messages
config :lager, :error_logger_redirect, false
# Stop lager removing Logger's :error_logger handler
config :lager, :error_logger_whitelist, [Logger.ErrorHandler]
# Stop lager writing a crash log
config :lager, :crash_log, false
# Use LagerLogger as lager's only handler.
config :lager, :handlers, [{LagerLogger, [level: :debug]}]
"""
use Bitwise
@behaviour :gen_event
@doc """
Flushes lager and Logger
Guarantees that all messages sent to `:error_logger` and `:lager`, prior to
this call, have been handled by Logger.
"""
@spec flush() :: :ok
def flush() do
_ = GenEvent.which_handlers(:error_logger)
_ = GenEvent.which_handlers(:lager_event)
_ = GenEvent.which_handlers(Logger)
:ok
end
@doc false
def init(opts) do
config = Keyword.get(opts, :level, :debug)
case config_to_mask(config) do
{:ok, _mask} = ok ->
ok
{:error, reason} ->
{:error, {:fatal, reason}}
end
end
@doc false
def handle_event({:log, lager_msg}, mask) do
%{mode: mode, truncate: truncate, level: min_level, utc_log: utc_log?} = Logger.Config.__data__
level = severity_to_level(:lager_msg.severity(lager_msg))
if :lager_util.is_loggable(lager_msg, mask, __MODULE__) and
Logger.compare_levels(level, min_level) != :lt do
metadata = :lager_msg.metadata(lager_msg) |> normalize_pid
# lager_msg's message is already formatted chardata
message = Logger.Utils.truncate(:lager_msg.message(lager_msg), truncate)
# Lager always uses local time and converts it when formatting using :lager_util.maybe_utc
timestamp = timestamp(:lager_msg.timestamp(lager_msg), utc_log?)
group_leader = case Keyword.fetch(metadata, :pid) do
{:ok, pid} when is_pid(pid) -> Process.info(pid, :group_leader)
_ -> Process.group_leader # if lager didn't give us a pid just pretend it's us
end
_ = notify(mode, {level, group_leader, {Logger, message, timestamp, metadata}})
{:ok, mask}
else
{:ok, mask}
end
end
@doc false
def handle_call(:get_loglevel, mask) do
{:ok, mask, mask}
end
def handle_call({:set_loglevel, config}, mask) do
case config_to_mask(config) do
{:ok, mask} ->
{:ok, :ok, mask}
{:error, _reason} = error ->
{:ok, error, mask}
end
end
@doc false
def handle_info(_msg, mask) do
{:ok, mask}
end
@doc false
def terminate(_reason, _mask), do: :ok
@doc false
def code_change(_old, mask, _extra), do: {:ok, mask}
defp config_to_mask(config) do
try do
:lager_util.config_to_mask(config)
catch
_, _ ->
{:error, {:bad_log_level, config}}
else
mask ->
{:ok, mask}
end
end
# Stolen from Logger.
defp notify(:sync, msg), do: GenEvent.sync_notify(Logger, msg)
defp notify(:async, msg), do: GenEvent.notify(Logger, msg)
@doc false
# Lager's parse transform converts the pid into a charlist. Logger's metadata expects pids as
# actual pids so we need to revert it.
# If the pid metadata is not a valid pid we remove it completely.
def normalize_pid(metadata) do
case Keyword.fetch(metadata, :pid) do
{:ok, pid} when is_pid(pid) -> metadata
{:ok, pid} when is_list(pid) ->
try do
# Lager's parse transform uses `pid_to_list` so we revert it
Keyword.put(metadata, :pid, :erlang.list_to_pid(pid))
rescue
ArgumentError -> Keyword.delete(metadata, :pid)
end
{:ok, _} -> Keyword.delete(metadata, :pid)
:error -> metadata
end
end
@doc false
# Returns a timestamp that includes miliseconds. Stolen from Logger.Utils.
def timestamp(now, utc_log?) do
{_, _, micro} = now
{date, {hours, minutes, seconds}} =
case utc_log? do
true -> :calendar.now_to_universal_time(now)
false -> :calendar.now_to_local_time(now)
end
{date, {hours, minutes, seconds, div(micro, 1000)}}
end
# Converts lager's severity to Logger's level
defp severity_to_level(:debug), do: :debug
defp severity_to_level(:info), do: :info
defp severity_to_level(:notice), do: :info
defp severity_to_level(:warning), do: :warn
defp severity_to_level(:error), do: :error
defp severity_to_level(:critical), do: :error
defp severity_to_level(:alert), do: :error
defp severity_to_level(:emergency), do: :error
end
| 29.506173 | 99 | 0.654393 |
08e1060ecf75d18cb2d638fe6e5cd0eaa05f7560 | 615 | ex | Elixir | lib/morphic_pro_web/live/page_live/privacy.ex | MorphicPro/morphic.pro | 9098da267344e025f262a344de6a2fb5cfea50a8 | [
"MIT"
] | 5 | 2020-04-23T21:12:17.000Z | 2020-09-30T00:16:58.000Z | lib/morphic_pro_web/live/page_live/privacy.ex | joshchernoff/morphic.pro | 9098da267344e025f262a344de6a2fb5cfea50a8 | [
"MIT"
] | 232 | 2020-04-24T00:52:24.000Z | 2021-08-05T17:23:12.000Z | lib/morphic_pro_web/live/page_live/privacy.ex | MorphicPro/morphic.pro | 9098da267344e025f262a344de6a2fb5cfea50a8 | [
"MIT"
] | 1 | 2020-04-04T02:19:44.000Z | 2020-04-04T02:19:44.000Z | defmodule MorphicProWeb.PageLive.Privacy do
use MorphicProWeb, :live_view
alias MorphicPro.Accounts
@impl true
def mount(_params, session, socket) do
if user = session["user_token"] && Accounts.get_user_by_session_token(session["user_token"]) do
{:ok, assign(socket, current_user: user)}
else
{:ok, assign(socket, current_user: nil)}
end
end
@impl true
def handle_params(_params, _session, socket) do
{:noreply, apply_action(socket, socket.assigns.live_action)}
end
defp apply_action(socket, :privacy) do
socket
|> assign(page_title: "Privacy")
end
end
| 24.6 | 99 | 0.705691 |
08e10c01ab36c25bc27fb95535781d5e5e6a4193 | 2,354 | ex | Elixir | lib/forcex/api/http.ex | fast-radius/forcex | d3eaf79563ba4bd89ca486880a47c2044bf8ca4e | [
"MIT"
] | null | null | null | lib/forcex/api/http.ex | fast-radius/forcex | d3eaf79563ba4bd89ca486880a47c2044bf8ca4e | [
"MIT"
] | null | null | null | lib/forcex/api/http.ex | fast-radius/forcex | d3eaf79563ba4bd89ca486880a47c2044bf8ca4e | [
"MIT"
] | null | null | null | defmodule Forcex.Api.Http do
@moduledoc """
HTTP communication with Salesforce API
"""
@behaviour Forcex.Api
require Logger
use HTTPoison.Base
@user_agent [{"User-agent", "forcex"}]
@accept [{"Accept", "application/json"}]
@accept_encoding [{"Accept-Encoding", "gzip,deflate"}]
@type http_method :: :get | :put | :post | :patch | :delete
@type forcex_response :: map | {number, any} | String.t()
def raw_request(http_method, url, body, headers, options) do
response =
http_method |> request!(url, body, headers, extra_options() ++ options) |> process_response
Logger.debug("#{__ENV__.module}.#{elem(__ENV__.function, 0)} response=" <> inspect(response))
response
end
@spec extra_options :: list
defp extra_options() do
Application.get_env(:forcex, :request_options, [])
end
@spec process_response(HTTPoison.Response.t()) :: forcex_response
def process_response(
%HTTPoison.Response{body: body, headers: %{"Content-Encoding" => "gzip"} = headers} = resp
) do
%{resp | body: :zlib.gunzip(body), headers: Map.drop(headers, ["Content-Encoding"])}
|> process_response
end
def process_response(
%HTTPoison.Response{body: body, headers: %{"Content-Encoding" => "deflate"} = headers} =
resp
) do
zstream = :zlib.open()
:ok = :zlib.inflateInit(zstream, -15)
uncompressed_data = zstream |> :zlib.inflate(body) |> Enum.join()
:zlib.inflateEnd(zstream)
:zlib.close(zstream)
%{resp | body: uncompressed_data, headers: Map.drop(headers, ["Content-Encoding"])}
|> process_response
end
def process_response(
%HTTPoison.Response{
body: body,
headers: %{"Content-Type" => "application/json" <> _} = headers
} = resp
) do
%{
resp
| body: Poison.decode!(body, keys: :atoms),
headers: Map.drop(headers, ["Content-Type"])
}
|> process_response
end
def process_response(%HTTPoison.Response{body: body, status_code: 200}), do: body
def process_response(%HTTPoison.Response{body: body, status_code: status}), do: {status, body}
def process_request_headers(headers), do: headers ++ @user_agent ++ @accept ++ @accept_encoding
@spec process_headers(list({String.t(), String.t()})) :: map
def process_headers(headers), do: Map.new(headers)
end
| 31.810811 | 98 | 0.652506 |
08e10e36bf983697e75ce1d81df9f91736973af8 | 523 | ex | Elixir | lib/std_json_io/reloader.ex | AirX-Inc/std_json_io | 8f189628385240c4386d3086be542e4a89e850fb | [
"MIT"
] | null | null | null | lib/std_json_io/reloader.ex | AirX-Inc/std_json_io | 8f189628385240c4386d3086be542e4a89e850fb | [
"MIT"
] | null | null | null | lib/std_json_io/reloader.ex | AirX-Inc/std_json_io | 8f189628385240c4386d3086be542e4a89e850fb | [
"MIT"
] | 2 | 2016-08-05T17:22:28.000Z | 2020-02-06T13:00:16.000Z | defmodule StdJsonIo.Reloader do
use GenServer
def start_link(mod, files) do
GenServer.start_link(__MODULE__, [mod, files], name: __MODULE__)
end
def init([mod, files]) do
:fs.subscribe()
{:ok, %{files: files, mod: mod}}
end
def handle_info({_, {:fs, :file_event}, {path, _}}, %{files: files, mod: mod} = state) do
if Enum.member?(files, path |> to_string) do
mod.restart_io_workers!
end
{:noreply, state}
end
def handle_info(_msg, state) do
{:noreply, state}
end
end
| 21.791667 | 91 | 0.640535 |
08e12c2fe6cd2bab17d933875e92b54c1b9d0ae8 | 1,399 | ex | Elixir | apps/extract_decode_csv/lib/extract/decode/csv.ex | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | apps/extract_decode_csv/lib/extract/decode/csv.ex | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | apps/extract_decode_csv/lib/extract/decode/csv.ex | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | NimbleCSV.define(Extract.Decode.Csv.Parser, [])
defmodule Extract.Decode.Csv do
use Definition, schema: Extract.Decode.Csv.V1
@type t :: %__MODULE__{
version: integer,
headers: list,
skip_first_line: boolean
}
@derive Jason.Encoder
defstruct version: 1,
headers: nil,
skip_first_line: false
defimpl Extract.Step, for: __MODULE__ do
import Extract.Context
def execute(step, context) do
source = fn opts ->
get_stream(context, opts)
|> Stream.transform(%{skip: step.skip_first_line}, fn
message, %{skip: false} = acc ->
{[Extract.Message.update_data(message, &parse(&1, step.headers))], acc}
_message, %{skip: true} = acc ->
{[], %{acc | skip: false}}
end)
end
context
|> set_source(source)
|> Ok.ok()
end
defp parse(data, headers) do
Extract.Decode.Csv.Parser.parse_string(data, skip_headers: false)
|> List.flatten()
|> zip(headers)
|> Map.new()
end
defp zip(b, a) do
Enum.zip(a, b)
end
end
end
defmodule Extract.Decode.Csv.V1 do
use Definition.Schema
@impl true
def s do
schema(%Extract.Decode.Csv{
version: version(1),
headers: spec(is_list() and not_nil?()),
skip_first_line: spec(is_boolean() and not_nil?())
})
end
end
| 22.564516 | 83 | 0.588277 |
08e1bc1f45e3de39771b52b5815f666c6f43588b | 653 | exs | Elixir | test/validation/rules/credit_card/visa_test.exs | elixir-validation/validation | 164a62c881e03a307172e4a20413f81260abb2dc | [
"MIT"
] | 60 | 2019-09-13T13:37:01.000Z | 2021-01-06T05:20:32.000Z | test/validation/rules/credit_card/visa_test.exs | elixir-validation/validation | 164a62c881e03a307172e4a20413f81260abb2dc | [
"MIT"
] | 1 | 2019-12-16T13:57:22.000Z | 2019-12-16T13:57:22.000Z | test/validation/rules/credit_card/visa_test.exs | elixir-validation/validation | 164a62c881e03a307172e4a20413f81260abb2dc | [
"MIT"
] | 5 | 2019-09-13T19:14:24.000Z | 2019-11-26T17:33:08.000Z | defmodule Validation.Rules.CreditCard.VisaTest do
use ExUnit.Case
alias Validation.Rules.CreditCard.Visa, as: V
doctest Validation
test "valid credit card - visa" do
assert V.validate?("4024 007 193 879")
assert V.validate?("4024.0071.5336.1885")
end
test "invalid credit card - visa" do
refute V.validate?("340-3161-9380-9364") # amex
refute V.validate?("6011000990139424") # discover
refute V.validate?("30351042633884") # diners
refute V.validate?("3566002020360505") # jcb
refute V.validate?("5376 7473 9720 8720") # master
refute V.validate?("2222 4000 4124 0011") # master
end
end
| 32.65 | 57 | 0.684533 |
08e1d0100c11c01d3a2e947c64fcae9fcb31d923 | 1,978 | ex | Elixir | lib/game/create_lobby.ex | idabmat/game_api | c12eefb84ff5c9647ee2e06d7893e3aae1b48b2d | [
"MIT"
] | null | null | null | lib/game/create_lobby.ex | idabmat/game_api | c12eefb84ff5c9647ee2e06d7893e3aae1b48b2d | [
"MIT"
] | null | null | null | lib/game/create_lobby.ex | idabmat/game_api | c12eefb84ff5c9647ee2e06d7893e3aae1b48b2d | [
"MIT"
] | null | null | null | defmodule Game.CreateLobby do
@moduledoc """
Creating a named lobby for a given account.
"""
alias Auth.Account
alias Ecto.Changeset
alias Game.Lobby
alias Game.Player
@type args :: %{lobby_name: String.t(), player_name: String.t(), account: Account.t()}
@type errors :: [lobby_name: [atom()], player_name: [atom()], other: [atom()]]
@type gateways :: [lobby_gateway: module(), id_gateway: module()]
@type response :: {:ok, Lobby.t()} | {:errors, errors()}
@spec execute(args(), gateways()) :: response()
def execute(%{lobby_name: lobby_name, player_name: player_name} = args, gateways) do
case validate(%{lobby_name: lobby_name, player_name: player_name}) do
[] ->
lobby = build_lobby(args, gateways[:id_gateway])
insert_lobby(lobby, gateways[:lobby_gateway])
errors ->
{:errors, errors}
end
end
@spec build_lobby(args(), module()) :: Lobby.t()
defp build_lobby(
%{lobby_name: lobby_name, player_name: player_name, account: account},
id_gateway
) do
%Lobby{
uid: id_gateway.generate(),
name: lobby_name,
players: [
%Player{name: player_name, account_id: {account.provider, account.uid}}
]
}
end
@spec validate(%{lobby_name: String.t(), player_name: String.t()}) :: keyword([atom()])
defp validate(params) do
data = %{}
types = %{lobby_name: :string, player_name: :string}
{data, types}
|> Changeset.cast(params, Map.keys(types))
|> Changeset.validate_required([:lobby_name, :player_name], message: :cant_be_blank)
|> Map.get(:errors, [])
|> Enum.map(fn {field, {error, _validation}} -> {field, [error]} end)
end
@spec insert_lobby(Lobby.t(), module()) :: response()
defp insert_lobby(lobby, lobby_gateway) do
case lobby_gateway.get(lobby.uid) do
nil ->
lobby_gateway.set(lobby)
{:ok, lobby}
_ ->
{:errors, [other: [:try_again]]}
end
end
end
| 29.969697 | 89 | 0.62639 |
08e1d1a7d0863d0b17969c5d26ea538bd2c6c2e6 | 2,078 | ex | Elixir | apps/admin/web/controllers/category_controller.ex | impressarix/Phoenix-Webstore | 31376183b853e594b224fb1051897a00cd20b6ec | [
"Apache-2.0"
] | null | null | null | apps/admin/web/controllers/category_controller.ex | impressarix/Phoenix-Webstore | 31376183b853e594b224fb1051897a00cd20b6ec | [
"Apache-2.0"
] | null | null | null | apps/admin/web/controllers/category_controller.ex | impressarix/Phoenix-Webstore | 31376183b853e594b224fb1051897a00cd20b6ec | [
"Apache-2.0"
] | null | null | null | defmodule EmporiumAdmin.CategoryController do
use EmporiumAdmin.Web, :controller
alias Emporium.HTTP.API
plug :scrub_params, "category" when action in [:create, :update]
def index(conn, params) do
categories = API.Client.get_categories!(params)
render(conn, "index.html", categories: categories)
end
def new(conn, _params) do
category = %{}
render(conn, "new.html", category: category)
end
def create(conn, params) do
response = API.Client.create_category!(params)
case {response.body, response.status_code} do
{_, 201} ->
conn
|> put_flash(:info, "Category created successfully.")
|> redirect(to: category_path(conn, :index))
{body, 422} ->
conn = assign(conn, :errors, body[:errors])
render(conn, "new.html", category: params)
end
end
def show(conn, %{"id" => id}) do
category = API.Client.get_category!(id)
render(conn, "show.html", category: category)
end
def edit(conn, %{"id" => id}) do
category = API.Client.get_category!(id)
render(conn, "edit.html", category: category)
end
def update(conn, %{"id" => id, "category" => category_params}) do
response = API.Client.update_category!(id, %{category: category_params})
category = Map.put(category_params, "id", id)
case {response.body, response.status_code} do
{_, 200} ->
conn
|> put_flash(:info, "Category updated successfully.")
|> redirect(to: category_path(conn, :show, category["id"]))
{body, 422} ->
conn = assign(conn, :errors, body[:errors])
render(conn, "edit.html", category: category)
end
end
def delete(conn, %{"id" => id}) do
response = API.Client.delete_category!(id)
case response.status_code do
204 ->
conn
|> put_flash(:info, "Category deleted successfully.")
|> redirect(to: category_path(conn, :index))
404 ->
conn
|> put_flash(:error, "Category id #{id} Not Found.")
|> redirect(to: category_path(conn, :index))
end
end
end
| 29.685714 | 76 | 0.624158 |
08e1f58ad6a175be00892f54a6c7829451b8f46d | 2,139 | ex | Elixir | lib/events_tools_web/controllers/venue_controller.ex | Apps-Team/conferencetools | ce2e16a3e4a521dc4682e736a209e6dd380c050d | [
"Apache-2.0"
] | null | null | null | lib/events_tools_web/controllers/venue_controller.ex | Apps-Team/conferencetools | ce2e16a3e4a521dc4682e736a209e6dd380c050d | [
"Apache-2.0"
] | 6 | 2017-10-05T20:16:34.000Z | 2017-10-05T20:36:11.000Z | lib/events_tools_web/controllers/venue_controller.ex | apps-team/events-tools | ce2e16a3e4a521dc4682e736a209e6dd380c050d | [
"Apache-2.0"
] | null | null | null | defmodule EventsToolsWeb.VenueController do
use EventsToolsWeb, :controller
alias EventsTools.Venues
def index(conn, _params, current_user, claims) do
venues = Venues.list_venues()
render(conn, "index.html", venues: venues, current_user: current_user, claims: claims)
end
def new(conn, _params, current_user, claims) do
changeset = Venues.change_venue(%EventsTools.Venues.Venue{})
render(conn, "new.html", changeset: changeset, current_user: current_user, claims: claims)
end
def create(conn, %{"venue" => venue_params}, current_user, claims) do
case Venues.create_venue(venue_params) do
{:ok, venue} ->
conn
|> put_flash(:info, "Venue created successfully.")
|> redirect(to: venue_path(conn, :show, venue))
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "new.html", changeset: changeset, current_user: current_user, claims: claims)
end
end
def show(conn, %{"id" => id}, current_user, claims) do
venue = Venues.get_venue!(id)
render(conn, "show.html", venue: venue, current_user: current_user, claims: claims)
end
def edit(conn, %{"id" => id}, current_user, claims) do
venue = Venues.get_venue!(id)
changeset = Venues.change_venue(venue)
render(conn, "edit.html", venue: venue, changeset: changeset, current_user: current_user, claims: claims)
end
def update(conn, %{"id" => id, "venue" => venue_params}, current_user, claims) do
venue = Venues.get_venue!(id)
case Venues.update_venue(venue, venue_params) do
{:ok, venue} ->
conn
|> put_flash(:info, "Venue updated successfully.")
|> redirect(to: venue_path(conn, :show, venue))
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "edit.html", venue: venue, changeset: changeset, current_user: current_user, claims: claims)
end
end
def delete(conn, %{"id" => id}, _current_user, _claims) do
venue = Venues.get_venue!(id)
{:ok, _venue} = Venues.delete_venue(venue)
conn
|> put_flash(:info, "Venue deleted successfully.")
|> redirect(to: venue_path(conn, :index))
end
end
| 35.65 | 113 | 0.669004 |
08e20c74935b151e8f157ca986509e0d608c5be0 | 496 | ex | Elixir | lib/webchat/chat/chatrooms.ex | Bwuak/webchat | b6668be508481a1ff480303affaebdaf28604f41 | [
"MIT"
] | null | null | null | lib/webchat/chat/chatrooms.ex | Bwuak/webchat | b6668be508481a1ff480303affaebdaf28604f41 | [
"MIT"
] | 1 | 2020-08-04T19:50:09.000Z | 2020-08-04T19:50:09.000Z | lib/webchat/chat/chatrooms.ex | Bwuak/webchat | b6668be508481a1ff480303affaebdaf28604f41 | [
"MIT"
] | null | null | null | defmodule Webchat.Chat.Chatrooms do
import Ecto.Query, warn: false
alias Webchat.Repo
alias Webchat.Chat.Models.Chatroom
def create(server_id, attrs \\ %{}) do
%Chatroom{server_id: server_id}
|> Chatroom.changeset(attrs)
|> Repo.insert()
end
def get!(chatroom_id), do: Repo.get!(Chatroom, chatroom_id)
def get(chatroom_id), do: Repo.get(Chatroom, chatroom_id)
def change(%Chatroom{} = chatroom, attrs \\ %{}) do
Chatroom.changeset(chatroom, attrs)
end
end
| 22.545455 | 61 | 0.695565 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.