hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
089f14efaa9b318930330622a2e778ceb80fe41d | 1,155 | ex | Elixir | lib/tabs_doornwindow.ex | zolihorvath/element-parsers | dc9927db1c39b8f77e4c22f10dc38f29c6a41ab9 | [
"MIT"
] | null | null | null | lib/tabs_doornwindow.ex | zolihorvath/element-parsers | dc9927db1c39b8f77e4c22f10dc38f29c6a41ab9 | [
"MIT"
] | null | null | null | lib/tabs_doornwindow.ex | zolihorvath/element-parsers | dc9927db1c39b8f77e4c22f10dc38f29c6a41ab9 | [
"MIT"
] | null | null | null | defmodule Parser do
use Platform.Parsing.Behaviour
#ELEMENT IoT Parser for TrackNet Tabs object locator
# According to documentation provided by TrackNet
# Payload Description Version v1.3
def parse(<<status, battery, temp, time::little-16, count::little-24>>, _meta) do
<<rfu::7, state::1>> = <<status>>
<<rem_cap::4, voltage::4>> = <<battery>>
<<rfu::1, temperature::7>> = <<temp>>
contact = case state do
0 -> "closed"
1 -> "open"
end
%{
battery_state: 100*(rem_cap/15),
battery_voltage: (25+voltage)/10,
temperature: temperature-32,
contact: contact,
time_elapsed_since_trigger: time,
total_count: count
}
end
def fields do
[
%{
"field" => "battery_state",
"display" => "Battery state",
"unit" => "%"
},
%{
"field" => "battery_voltage",
"display" => "Battery voltage",
"unit" => "V"
},
%{
"field" => "temperature",
"display" => "Temperature",
"unit" => "°C"
},
%{
"field" => "contact",
"display" => "Contact"
}
]
end
end
| 21.388889 | 83 | 0.531602 |
089f63b89018fbd932f3ec3ca68b82c5ed341ea8 | 1,397 | ex | Elixir | test/support/data_case.ex | necosta/raspberry-pi-x | 478f49de74b03d4a652c35ef185735ca0f2f6878 | [
"MIT"
] | null | null | null | test/support/data_case.ex | necosta/raspberry-pi-x | 478f49de74b03d4a652c35ef185735ca0f2f6878 | [
"MIT"
] | null | null | null | test/support/data_case.ex | necosta/raspberry-pi-x | 478f49de74b03d4a652c35ef185735ca0f2f6878 | [
"MIT"
] | null | null | null | defmodule RaspberryPi.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias RaspberryPi.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Hello.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(RaspberryPi.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(RaspberryPi.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transform changeset errors to a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Enum.reduce(opts, message, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 25.87037 | 77 | 0.682892 |
089f6de2423f7a307d99ab77caff1a9925453cbb | 1,055 | ex | Elixir | lib/codes/codes_l71.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_l71.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_l71.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_L71 do
alias IcdCode.ICDCode
def _L710 do
%ICDCode{full_code: "L710",
category_code: "L71",
short_code: "0",
full_name: "Perioral dermatitis",
short_name: "Perioral dermatitis",
category_name: "Perioral dermatitis"
}
end
def _L711 do
%ICDCode{full_code: "L711",
category_code: "L71",
short_code: "1",
full_name: "Rhinophyma",
short_name: "Rhinophyma",
category_name: "Rhinophyma"
}
end
def _L718 do
%ICDCode{full_code: "L718",
category_code: "L71",
short_code: "8",
full_name: "Other rosacea",
short_name: "Other rosacea",
category_name: "Other rosacea"
}
end
def _L719 do
%ICDCode{full_code: "L719",
category_code: "L71",
short_code: "9",
full_name: "Rosacea, unspecified",
short_name: "Rosacea, unspecified",
category_name: "Rosacea, unspecified"
}
end
end
| 24.534884 | 47 | 0.566825 |
089f6eca68b43b07d76f30440ef6cb471c6c49d6 | 1,110 | exs | Elixir | config/config.exs | lucacorti/copper | 1781dbdaa9b8aeda2b646537b03e74213d4abe93 | [
"MIT"
] | null | null | null | config/config.exs | lucacorti/copper | 1781dbdaa9b8aeda2b646537b03e74213d4abe93 | [
"MIT"
] | 9 | 2019-08-20T07:04:10.000Z | 2022-01-17T08:29:46.000Z | config/config.exs | lucacorti/copper | 1781dbdaa9b8aeda2b646537b03e74213d4abe93 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :copper, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:copper, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
import_config "#{Mix.env}.exs"
| 34.6875 | 73 | 0.754054 |
089f8bd8b84f527ab91cc6a0345a0f01fb83de3b | 2,237 | ex | Elixir | clients/android_publisher/lib/google_api/android_publisher/v3/model/voided_purchases_list_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/android_publisher/lib/google_api/android_publisher/v3/model/voided_purchases_list_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/android_publisher/lib/google_api/android_publisher/v3/model/voided_purchases_list_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidPublisher.V3.Model.VoidedPurchasesListResponse do
@moduledoc """
Response for the voidedpurchases.list API.
## Attributes
* `pageInfo` (*type:* `GoogleApi.AndroidPublisher.V3.Model.PageInfo.t`, *default:* `nil`) - General pagination information.
* `tokenPagination` (*type:* `GoogleApi.AndroidPublisher.V3.Model.TokenPagination.t`, *default:* `nil`) - Pagination information for token pagination.
* `voidedPurchases` (*type:* `list(GoogleApi.AndroidPublisher.V3.Model.VoidedPurchase.t)`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:pageInfo => GoogleApi.AndroidPublisher.V3.Model.PageInfo.t(),
:tokenPagination => GoogleApi.AndroidPublisher.V3.Model.TokenPagination.t(),
:voidedPurchases => list(GoogleApi.AndroidPublisher.V3.Model.VoidedPurchase.t())
}
field(:pageInfo, as: GoogleApi.AndroidPublisher.V3.Model.PageInfo)
field(:tokenPagination, as: GoogleApi.AndroidPublisher.V3.Model.TokenPagination)
field(:voidedPurchases, as: GoogleApi.AndroidPublisher.V3.Model.VoidedPurchase, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidPublisher.V3.Model.VoidedPurchasesListResponse do
def decode(value, options) do
GoogleApi.AndroidPublisher.V3.Model.VoidedPurchasesListResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidPublisher.V3.Model.VoidedPurchasesListResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.207547 | 154 | 0.757711 |
089f95d12ad42f2d40994cd2ea27cb636a7eb5fb | 173 | ex | Elixir | test/support/rabbit_mq/test_topic_publisher.ex | djeusette/railway_ipc | 30fb58726e43148fe72fbfe63fdf46161fc933cf | [
"MIT"
] | null | null | null | test/support/rabbit_mq/test_topic_publisher.ex | djeusette/railway_ipc | 30fb58726e43148fe72fbfe63fdf46161fc933cf | [
"MIT"
] | null | null | null | test/support/rabbit_mq/test_topic_publisher.ex | djeusette/railway_ipc | 30fb58726e43148fe72fbfe63fdf46161fc933cf | [
"MIT"
] | null | null | null | defmodule RailwayIpc.RabbitMQ.TestTopicPublisher do
@moduledoc false
use RailwayIpc.Publisher, broker: RailwayIpc.RabbitMQ.TestBroker, exchange: "railway_ipc.topic"
end
| 34.6 | 97 | 0.83237 |
089f993431d8ea6e8a8085cfda8a5ddd227b0470 | 4,224 | ex | Elixir | lib/oli/delivery/attempts/activity_lifecycle/persistence.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 1 | 2022-03-17T20:35:47.000Z | 2022-03-17T20:35:47.000Z | lib/oli/delivery/attempts/activity_lifecycle/persistence.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 9 | 2021-11-02T16:52:09.000Z | 2022-03-25T15:14:01.000Z | lib/oli/delivery/attempts/activity_lifecycle/persistence.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | null | null | null | defmodule Oli.Delivery.Attempts.ActivityLifecycle.Persistence do
import Ecto.Query, warn: false
alias Oli.Repo
alias Oli.Delivery.Evaluation.Actions.{
FeedbackAction,
NavigationAction,
StateUpdateAction,
SubmissionAction
}
alias Oli.Delivery.Attempts.Core.PartAttempt
@moduledoc """
Routines for persisting evaluations for part attempts.
"""
@doc """
Given a list of evaluations that match a list of part_input submissions,
persist the results of each evaluation to the corresponding part_attempt record
On success, continue persistence by calling a roll_up function that will may or
not roll up the results of the these part_attempts to the activity attempt
The return value here is {:ok, [%{}]}, where the maps in the array are the
evaluation result that will be sent back to the client.
"""
def persist_evaluations({:error, error}, _, _), do: {:error, error}
def persist_evaluations({:ok, evaluations}, part_inputs, roll_up_fn) do
evaluated_inputs = Enum.zip(part_inputs, evaluations)
case Enum.reduce_while(evaluated_inputs, {:ok, false, []}, &persist_single_evaluation/2) do
{:ok, _, results} -> roll_up_fn.({:ok, results})
error -> error
end
end
def persist_evaluations({:ok, evaluations}, part_inputs, roll_up_fn, replace) do
case replace do
false ->
persist_evaluations({:ok, evaluations}, part_inputs, roll_up_fn)
true ->
evaluated_inputs = Enum.zip(part_inputs, evaluations)
case Enum.reduce_while(evaluated_inputs, {:ok, replace, []}, &persist_single_evaluation/2) do
{:ok, _, results} -> roll_up_fn.({:ok, results})
error -> error
end
end
end
# Persist the result of a single evaluation for a single part_input submission.
defp persist_single_evaluation({_, {:error, error}}, _), do: {:halt, {:error, error}}
defp persist_single_evaluation(
{_, {:ok, %NavigationAction{} = action_result}},
{:ok, replace, results}
) do
{:cont, {:ok, replace, results ++ [action_result]}}
end
defp persist_single_evaluation(
{_, {:ok, %StateUpdateAction{} = action_result}},
{:ok, replace, results}
) do
{:cont, {:ok, replace, results ++ [action_result]}}
end
defp persist_single_evaluation(
{%{attempt_guid: attempt_guid, input: input},
{:ok,
%FeedbackAction{
feedback: feedback,
score: score,
out_of: out_of
} = feedback_action}},
{:ok, replace, results}
) do
now = DateTime.utc_now()
query =
from(p in PartAttempt,
where: p.attempt_guid == ^attempt_guid
)
query =
if replace === false do
where(query, [p], is_nil(p.date_evaluated))
else
query
end
case Repo.update_all(
query,
set: [
response: input,
lifecycle_state: :evaluated,
date_evaluated: now,
date_submitted: now,
score: score,
out_of: out_of,
feedback: feedback
]
) do
nil ->
{:halt, {:error, :error}}
{1, _} ->
{:cont, {:ok, replace, results ++ [feedback_action]}}
_ ->
{:halt, {:error, :error}}
end
end
defp persist_single_evaluation(
{%{attempt_guid: attempt_guid, input: input},
{:ok, %SubmissionAction{} = submission_action}},
{:ok, replace, results}
) do
now = DateTime.utc_now()
query =
from(p in PartAttempt,
where: p.attempt_guid == ^attempt_guid
)
query =
if replace === false do
where(query, [p], is_nil(p.date_evaluated))
else
query
end
case Repo.update_all(
query,
set: [
response: input,
lifecycle_state: :submitted,
date_evaluated: nil,
date_submitted: now
]
) do
nil ->
{:halt, {:error, :error}}
{1, _} ->
{:cont, {:ok, replace, results ++ [submission_action]}}
_ ->
{:halt, {:error, :error}}
end
end
end
| 26.904459 | 101 | 0.590199 |
089fb2b42198f4e25357e8ace79145bf54c7130c | 351 | exs | Elixir | webapp/priv/repo/seeds.exs | zmaril/penmark | 992f570da3bdf819f912505ba9b6531db9dcb80b | [
"FSFAP"
] | null | null | null | webapp/priv/repo/seeds.exs | zmaril/penmark | 992f570da3bdf819f912505ba9b6531db9dcb80b | [
"FSFAP"
] | null | null | null | webapp/priv/repo/seeds.exs | zmaril/penmark | 992f570da3bdf819f912505ba9b6531db9dcb80b | [
"FSFAP"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Penmark.Repo.insert!(%Penmark.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.25 | 61 | 0.706553 |
089fba09fb432a6efe6f545eb4a665eac7443b44 | 1,001 | exs | Elixir | priv/repo/migrations/20210613092643_rename_participators.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | priv/repo/migrations/20210613092643_rename_participators.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | priv/repo/migrations/20210613092643_rename_participators.exs | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule GroupherServer.Repo.Migrations.RenameParticipators do
use Ecto.Migration
def change do
rename(table(:cms_posts), :article_comments_participators, to: :article_comments_participants)
rename(table(:cms_posts), :article_comments_participators_count,
to: :article_comments_participants_count
)
rename(table(:cms_jobs), :article_comments_participators, to: :article_comments_participants)
rename(table(:cms_jobs), :article_comments_participators_count,
to: :article_comments_participants_count
)
rename(table(:cms_repos), :article_comments_participators, to: :article_comments_participants)
rename(table(:cms_repos), :article_comments_participators_count,
to: :article_comments_participants_count
)
rename(table(:cms_blogs), :article_comments_participators, to: :article_comments_participants)
rename(table(:cms_blogs), :article_comments_participators_count,
to: :article_comments_participants_count
)
end
end
| 33.366667 | 98 | 0.784216 |
089fd14bb02eed73d242ed7bc1bf9a1808d7da1d | 2,104 | exs | Elixir | apps/ewallet/test/ewallet/fetchers/transaction_request_fetcher_test.exs | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | 1 | 2018-12-07T06:21:21.000Z | 2018-12-07T06:21:21.000Z | apps/ewallet/test/ewallet/fetchers/transaction_request_fetcher_test.exs | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/test/ewallet/fetchers/transaction_request_fetcher_test.exs | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | null | null | null | defmodule EWallet.TransactionRequestFetcherTest do
use EWallet.LocalLedgerCase, async: true
alias EWallet.{TransactionRequestFetcher, TransactionRequestGate}
alias EWalletDB.{TransactionRequest, User}
setup do
{:ok, user} = :user |> params_for() |> User.insert()
{:ok, account} = :account |> params_for() |> Account.insert()
token = insert(:token)
user_wallet = User.get_primary_wallet(user)
account_wallet = Account.get_primary_wallet(account)
%{
user: user,
token: token,
user_wallet: user_wallet,
account_wallet: account_wallet,
account: account
}
end
describe "get/1" do
test "returns the request do when given valid ID", meta do
{:ok, request} =
TransactionRequestGate.create(meta.user, %{
"type" => "receive",
"token_id" => meta.token.id,
"correlation_id" => "123",
"amount" => 1_000,
"address" => meta.user_wallet.address
})
assert {:ok, request} = TransactionRequestFetcher.get(request.id)
assert %TransactionRequest{} = request
end
test "returns nil when given nil" do
assert TransactionRequestFetcher.get(nil) == {:error, :transaction_request_not_found}
end
test "returns nil when given invalid UUID" do
assert TransactionRequestFetcher.get("123") == {:error, :transaction_request_not_found}
end
end
describe "get_with_lock/1" do
test "returns the request when given a valid ID" do
request = insert(:transaction_request)
assert {:ok, request} = TransactionRequestFetcher.get_with_lock(request.id)
assert %TransactionRequest{} = request
end
test "returns a 'transaction_request_not_found' error when given nil" do
assert TransactionRequestFetcher.get_with_lock(nil) ==
{:error, :transaction_request_not_found}
end
test "returns a 'transaction_request_not_found' error when given invalid UUID" do
assert TransactionRequestFetcher.get_with_lock("123") ==
{:error, :transaction_request_not_found}
end
end
end
| 32.875 | 93 | 0.675856 |
08a018b958ada2cb099521bf20b959aec810f666 | 578 | ex | Elixir | lib/gossip/application.ex | bhaveshpoddar94/GossipProtocol | 91e4c8223edd7fa2c2c7b7c2641f1d7f50d85643 | [
"MIT"
] | null | null | null | lib/gossip/application.ex | bhaveshpoddar94/GossipProtocol | 91e4c8223edd7fa2c2c7b7c2641f1d7f50d85643 | [
"MIT"
] | null | null | null | lib/gossip/application.ex | bhaveshpoddar94/GossipProtocol | 91e4c8223edd7fa2c2c7b7c2641f1d7f50d85643 | [
"MIT"
] | null | null | null | defmodule Gossip.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
# List all child processes to be supervised
children = [
# Starts a worker by calling: Gossip.Worker.start_link(arg)
# {Gossip.Worker, arg},
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Gossip.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 27.52381 | 65 | 0.707612 |
08a075cc8bffc827172d8ee84087c7d1d6369f45 | 135 | exs | Elixir | test/dot_sql_test.exs | archan937/dot_sql | 5511c76a3b7b1d90f7d80acc52025bd533410160 | [
"Unlicense",
"MIT"
] | null | null | null | test/dot_sql_test.exs | archan937/dot_sql | 5511c76a3b7b1d90f7d80acc52025bd533410160 | [
"Unlicense",
"MIT"
] | null | null | null | test/dot_sql_test.exs | archan937/dot_sql | 5511c76a3b7b1d90f7d80acc52025bd533410160 | [
"Unlicense",
"MIT"
] | null | null | null | defmodule DotSqlTest do
use ExUnit.Case
doctest DotSql
test "greets the world" do
assert DotSql.hello() == :world
end
end
| 15 | 35 | 0.703704 |
08a076a32fdc5baa9e95acfee24ee4f6df95ba3a | 1,754 | ex | Elixir | lib/simple_graphql_client/subscription_server.ex | marisradu/simple_graphql_client | 4e52d7712cc4fe003df5a5431180f35d6c2f7860 | [
"MIT"
] | 15 | 2018-10-14T23:30:56.000Z | 2021-02-20T21:49:42.000Z | lib/simple_graphql_client/subscription_server.ex | marisradu/simple_graphql_client | 4e52d7712cc4fe003df5a5431180f35d6c2f7860 | [
"MIT"
] | 1 | 2019-04-29T06:29:30.000Z | 2019-08-05T20:26:52.000Z | lib/simple_graphql_client/subscription_server.ex | marisradu/simple_graphql_client | 4e52d7712cc4fe003df5a5431180f35d6c2f7860 | [
"MIT"
] | 4 | 2019-03-14T23:07:49.000Z | 2019-08-05T18:34:37.000Z | defmodule SimpleGraphqlClient.SubscriptionServer do
@moduledoc "
Genserver that handles all subscription related logic
"
use GenServer
require Logger
alias SimpleGraphqlClient.WebSocket
def start_link do
state = %{
socket: WebSocket,
subscriptions: %{}
}
GenServer.start_link(__MODULE__, state, name: __MODULE__)
end
def init(state) do
{:ok, state}
end
def subscribe(subscription_name, callback_or_dest, query, variables \\ []) do
GenServer.cast(
__MODULE__,
{:subscribe, subscription_name, callback_or_dest, query, variables}
)
end
def handle_cast(
{:subscribe, subscription_name, callback_or_dest, query, variables},
%{socket: socket, subscriptions: subscriptions} = state
) do
WebSocket.subscribe(socket, self(), subscription_name, query, variables)
callbacks = Map.get(subscriptions, subscription_name, [])
subscriptions = Map.put(subscriptions, subscription_name, [callback_or_dest | callbacks])
state = Map.put(state, :subscriptions, subscriptions)
{:noreply, state}
end
# Incoming Notifications (from SimpleGraphqlClient.WebSocket)
def handle_cast(
{:subscription, subscription_name, response},
%{subscriptions: subscriptions} = state
) do
subscriptions
|> Map.get(subscription_name, [])
|> Enum.each(fn callback_or_dest -> handle_callback_or_dest(callback_or_dest, response) end)
{:noreply, state}
end
def handle_cast({:joined}, state) do
{:noreply, state}
end
defp handle_callback_or_dest(callback_or_dest, response) do
if is_function(callback_or_dest) do
callback_or_dest.(response)
else
send(callback_or_dest, response)
end
end
end
| 26.575758 | 96 | 0.702965 |
08a0887972489247dcaa8551a0c3f96358059089 | 1,592 | exs | Elixir | apps/raptor/mix.exs | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | null | null | null | apps/raptor/mix.exs | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | null | null | null | apps/raptor/mix.exs | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | null | null | null | defmodule Raptor.MixProject do
use Mix.Project
def project do
[
app: :raptor,
compilers: [:phoenix] ++ Mix.compilers(),
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps(),
test_paths: test_paths(Mix.env()),
elixirc_paths: elixirc_paths(Mix.env()),
aliases: aliases()
]
end
def application do
[
mod: {Raptor.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
defp deps do
[
{:brook, "~> 0.4"},
{:divo, "~> 1.1", only: [:dev, :test, :integration]},
{:phoenix, "~> 1.4"},
{:phoenix_html, "~> 2.14.1"},
{:phoenix_pubsub, "~> 2.0"},
{:placebo, "~> 2.0.0-rc2", only: [:dev, :test, :integration]},
{:plug_heartbeat, "~> 0.2.0"},
{:properties, in_umbrella: true},
{:redix, "~> 0.10"},
{:smart_city, "~> 3.0"},
{:smart_city_test, "~> 0.10.1", only: [:test, :integration]},
{:telemetry_event, in_umbrella: true},
{:distillery, "~> 2.1"}
]
end
defp test_paths(:integration), do: ["test/integration"]
defp test_paths(_), do: ["test/unit"]
defp elixirc_paths(:test), do: ["test/utils", "test/unit/support", "lib"]
defp elixirc_paths(:integration), do: ["test/utils", "test/integration/support", "lib"]
defp elixirc_paths(_), do: ["lib"]
defp aliases() do
[
start: ["phx.server"]
]
end
end
| 26.533333 | 89 | 0.543342 |
08a08fba7cd1c422650a99dd443e0f5834126a6d | 7,683 | ex | Elixir | clients/cloud_search/lib/google_api/cloud_search/v1/model/property_definition.ex | kaaboaye/elixir-google-api | 1896784c4342151fd25becd089a5beb323eff567 | [
"Apache-2.0"
] | null | null | null | clients/cloud_search/lib/google_api/cloud_search/v1/model/property_definition.ex | kaaboaye/elixir-google-api | 1896784c4342151fd25becd089a5beb323eff567 | [
"Apache-2.0"
] | null | null | null | clients/cloud_search/lib/google_api/cloud_search/v1/model/property_definition.ex | kaaboaye/elixir-google-api | 1896784c4342151fd25becd089a5beb323eff567 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudSearch.V1.Model.PropertyDefinition do
@moduledoc """
The definition of a property within an object.
## Attributes
* `booleanPropertyOptions` (*type:* `GoogleApi.CloudSearch.V1.Model.BooleanPropertyOptions.t`, *default:* `nil`) -
* `datePropertyOptions` (*type:* `GoogleApi.CloudSearch.V1.Model.DatePropertyOptions.t`, *default:* `nil`) -
* `displayOptions` (*type:* `GoogleApi.CloudSearch.V1.Model.PropertyDisplayOptions.t`, *default:* `nil`) - Options that determine how the property is displayed in the Cloud Search
results page if it is specified to be displayed in the object's
display options
.
* `doublePropertyOptions` (*type:* `GoogleApi.CloudSearch.V1.Model.DoublePropertyOptions.t`, *default:* `nil`) -
* `enumPropertyOptions` (*type:* `GoogleApi.CloudSearch.V1.Model.EnumPropertyOptions.t`, *default:* `nil`) -
* `htmlPropertyOptions` (*type:* `GoogleApi.CloudSearch.V1.Model.HtmlPropertyOptions.t`, *default:* `nil`) -
* `integerPropertyOptions` (*type:* `GoogleApi.CloudSearch.V1.Model.IntegerPropertyOptions.t`, *default:* `nil`) -
* `isFacetable` (*type:* `boolean()`, *default:* `nil`) - Indicates that the property can be used for generating facets. Cannot be
true for properties whose type is object. IsReturnable must be true to set
this option.
Only supported for Boolean, Enum, and Text properties.
* `isRepeatable` (*type:* `boolean()`, *default:* `nil`) - Indicates that multiple values are allowed for the property. For example, a
document only has one description but can have multiple comments. Cannot be
true for properties whose type is a boolean.
If set to false, properties that contain more than one value will cause the
indexing request for that item to be rejected.
* `isReturnable` (*type:* `boolean()`, *default:* `nil`) - Indicates that the property identifies data that should be returned in
search results via the Query API. If set to *true*, indicates that Query
API users can use matching property fields in results. However, storing
fields requires more space allocation and uses more bandwidth for search
queries, which impacts performance over large datasets. Set to *true* here
only if the field is needed for search results. Cannot be true for
properties whose type is an object.
* `isSortable` (*type:* `boolean()`, *default:* `nil`) - Indicates that the property can be used for sorting. Cannot be true for
properties that are repeatable. Cannot be true for properties whose type
is object or user identifier. IsReturnable must be true to set this option.
Only supported for Boolean, Date, Double, Integer, and Timestamp
properties.
* `isSuggestable` (*type:* `boolean()`, *default:* `nil`) - Indicates that the property can be used for generating query suggestions.
* `isWildcardSearchable` (*type:* `boolean()`, *default:* `nil`) - Indicates that users can perform wildcard search for this
property. Only supported for Text properties. IsReturnable must be true to
set this option. In a given datasource maximum of 5 properties can be
marked as is_wildcard_searchable.
* `name` (*type:* `String.t`, *default:* `nil`) - The name of the property. Item indexing requests sent to the Indexing API
should set the property name
equal to this value. For example, if name is *subject_line*, then indexing
requests for document items with subject fields should set the
name for that field equal to
*subject_line*. Use the name as the identifier for the object property.
Once registered as a property for an object, you cannot re-use this name
for another property within that object.
The name must start with a letter and can only contain letters (A-Z, a-z)
or numbers (0-9).
The maximum length is 256 characters.
* `objectPropertyOptions` (*type:* `GoogleApi.CloudSearch.V1.Model.ObjectPropertyOptions.t`, *default:* `nil`) -
* `textPropertyOptions` (*type:* `GoogleApi.CloudSearch.V1.Model.TextPropertyOptions.t`, *default:* `nil`) -
* `timestampPropertyOptions` (*type:* `GoogleApi.CloudSearch.V1.Model.TimestampPropertyOptions.t`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:booleanPropertyOptions => GoogleApi.CloudSearch.V1.Model.BooleanPropertyOptions.t(),
:datePropertyOptions => GoogleApi.CloudSearch.V1.Model.DatePropertyOptions.t(),
:displayOptions => GoogleApi.CloudSearch.V1.Model.PropertyDisplayOptions.t(),
:doublePropertyOptions => GoogleApi.CloudSearch.V1.Model.DoublePropertyOptions.t(),
:enumPropertyOptions => GoogleApi.CloudSearch.V1.Model.EnumPropertyOptions.t(),
:htmlPropertyOptions => GoogleApi.CloudSearch.V1.Model.HtmlPropertyOptions.t(),
:integerPropertyOptions => GoogleApi.CloudSearch.V1.Model.IntegerPropertyOptions.t(),
:isFacetable => boolean(),
:isRepeatable => boolean(),
:isReturnable => boolean(),
:isSortable => boolean(),
:isSuggestable => boolean(),
:isWildcardSearchable => boolean(),
:name => String.t(),
:objectPropertyOptions => GoogleApi.CloudSearch.V1.Model.ObjectPropertyOptions.t(),
:textPropertyOptions => GoogleApi.CloudSearch.V1.Model.TextPropertyOptions.t(),
:timestampPropertyOptions => GoogleApi.CloudSearch.V1.Model.TimestampPropertyOptions.t()
}
field(:booleanPropertyOptions, as: GoogleApi.CloudSearch.V1.Model.BooleanPropertyOptions)
field(:datePropertyOptions, as: GoogleApi.CloudSearch.V1.Model.DatePropertyOptions)
field(:displayOptions, as: GoogleApi.CloudSearch.V1.Model.PropertyDisplayOptions)
field(:doublePropertyOptions, as: GoogleApi.CloudSearch.V1.Model.DoublePropertyOptions)
field(:enumPropertyOptions, as: GoogleApi.CloudSearch.V1.Model.EnumPropertyOptions)
field(:htmlPropertyOptions, as: GoogleApi.CloudSearch.V1.Model.HtmlPropertyOptions)
field(:integerPropertyOptions, as: GoogleApi.CloudSearch.V1.Model.IntegerPropertyOptions)
field(:isFacetable)
field(:isRepeatable)
field(:isReturnable)
field(:isSortable)
field(:isSuggestable)
field(:isWildcardSearchable)
field(:name)
field(:objectPropertyOptions, as: GoogleApi.CloudSearch.V1.Model.ObjectPropertyOptions)
field(:textPropertyOptions, as: GoogleApi.CloudSearch.V1.Model.TextPropertyOptions)
field(:timestampPropertyOptions, as: GoogleApi.CloudSearch.V1.Model.TimestampPropertyOptions)
end
defimpl Poison.Decoder, for: GoogleApi.CloudSearch.V1.Model.PropertyDefinition do
def decode(value, options) do
GoogleApi.CloudSearch.V1.Model.PropertyDefinition.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudSearch.V1.Model.PropertyDefinition do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 60.023438 | 183 | 0.731745 |
08a0971fb518a2abe6ab7831183dad72ef01fa2e | 5,119 | ex | Elixir | lib/dark_ecto/projections/typescript.ex | dark-elixir/dark_ecto | 006a52e6d1a807e8f3c0f00a29780dc2967e57d9 | [
"Apache-2.0"
] | null | null | null | lib/dark_ecto/projections/typescript.ex | dark-elixir/dark_ecto | 006a52e6d1a807e8f3c0f00a29780dc2967e57d9 | [
"Apache-2.0"
] | null | null | null | lib/dark_ecto/projections/typescript.ex | dark-elixir/dark_ecto | 006a52e6d1a807e8f3c0f00a29780dc2967e57d9 | [
"Apache-2.0"
] | null | null | null | defmodule DarkEcto.Projections.Typescript do
@moduledoc """
Cast `Typescript` types
"""
alias DarkEcto.Projections.Shared
alias DarkEcto.Projections.Types
alias DarkEcto.Reflections.EctoSchemaFields
alias DarkEcto.Reflections.EctoSchemaReflection
alias DarkMatter.Inflections
@ecto_mapping Map.get(Types.permuted_conversion_mappings(), :ecto_to_typescript)
def cast(schema) when is_atom(schema) do
schema_fields = EctoSchemaFields.cast(schema)
plural = Shared.pascal(schema_fields.plural)
singular = Shared.pascal(schema_fields.singular)
one_relations = Shared.translate_keys(schema_fields.one_relations, &Shared.pascal/1)
many_relations = Shared.translate_keys(schema_fields.many_relations, &Shared.pascal/1)
embed_one_relations =
Shared.translate_keys(schema_fields.embed_one_relations, &Shared.pascal/1)
embed_many_relations =
Shared.translate_keys(schema_fields.embed_many_relations, &Shared.pascal/1)
type_d =
schema_fields
|> Shared.cast_schema_with_embeds(&resolve/2)
|> Shared.translate_keys(&Shared.pascal/1)
factory =
[]
|> Enum.concat(one_relations |> Enum.map(&factory_one/1))
|> Enum.concat(many_relations |> Enum.map(&factory_many/1))
|> Enum.concat(embed_one_relations |> Enum.map(&factory_one/1))
|> Enum.concat(embed_many_relations |> Enum.map(&factory_many/1))
schema_fields
|> Map.from_struct()
|> Map.merge(%{
plural: plural,
singular: singular,
one_relations: one_relations,
many_relations: many_relations,
embed_one_relations: embed_one_relations,
embed_many_relations: embed_many_relations,
type_d: type_d,
factory: factory
})
end
def template({:module, type}), do: "#{Shared.resolve_alias(type)}"
def template({:array, type}), do: "#{type}[]"
def template({:map, _type}), do: "Object"
def template({:any, _type}), do: "any"
def template({:__FALLBACK__, type}), do: template({:any, type})
def resolve({field, {:primary_key, inner}}, opts),
do: resolve({field, inner}, opts)
def resolve({field, {:foreign_key, inner}}, opts),
do: resolve({field, inner}, opts)
def resolve({field, {:one, inner}}, opts),
do: resolve({field, inner}, opts)
def resolve({field, {:many, inner}}, opts),
do: resolve({field, {:array, inner}}, opts)
def resolve({field, {:map, inner}}, _opts),
do: {field, template({:map, inner})}
def resolve({field, {:array, inner}}, opts),
do: {field, template({:array, inner({field, inner}, opts)})}
def resolve({field, type}, opts) when is_atom(type) do
read_ecto_type_def? = Keyword.get(opts, :read_ecto_type_def?, false)
cond do
Keyword.has_key?(@ecto_mapping, type) ->
case Keyword.get(@ecto_mapping, type) do
typing when is_atom(typing) -> {field, to_string(typing)}
typing -> {field, typing}
end
read_ecto_type_def? and Shared.ecto_type?(type) ->
# Use value from `Ecto.Type.type/0`
resolve({field, type.type()}, opts)
Shared.ecto_type?(type) ->
{field, template({:module, type})}
EctoSchemaReflection.ecto_schema?(type) ->
{field, template({:module, type})}
true ->
{field, template({:__FALLBACK__, type})}
end
end
def factory_one({k, :__ecto_join_table__}) do
# {k, "#{Inflections.binary(k, :absinthe_camel)}Factory.build()"}
{k, "#{Inflections.binary(k, :absinthe_camel)}.build()"}
end
def factory_one({k, v}) do
# {k, "#{Inflections.binary(Shared.resolve_alias(v), :absinthe_camel)}Factory.build()"}
{k, "#{Inflections.binary(Shared.resolve_alias(v), :absinthe_camel)}.build()"}
end
def factory_many({k, :__ecto_join_table__}) do
# {k, "#{Inflections.binary(k, [:singular, :absinthe_camel])}Factory.buildList(0)"}
{k, "#{Inflections.binary(k, [:singular, :absinthe_camel])}.buildList(0)"}
end
def factory_many({k, v}) do
# {k, "#{Inflections.binary(Shared.resolve_alias(v), [:singular, :absinthe_camel])}Factory.buildList(0)"}
{k,
"#{Inflections.binary(Shared.resolve_alias(v), [:singular, :absinthe_camel])}.buildList(0)"}
end
def lodash("string[]") do
"isString"
end
def lodash("Int[]") do
"isSafeInteger"
end
def lodash(ts_type) when is_binary(ts_type) do
# type =
# ts_type
# |> String.replace_trailing("[]", "")
# |> DarkMatter.Inflections.binary(:absinthe_pascal)
# "is#{type}"
nil
end
defp inner({field, inner}, opts) do
{_field, type} = resolve({field, inner}, opts)
type
end
# String Ids
def factory_method({"id", _}) do
"`${sequence}`"
end
def factory_method({k, v}) do
if String.ends_with?(k, "Id") do
"`${sequence}`"
else
"random(\"#{k}\", \"#{v}\")"
end
end
# Integer Ids
# def factory_method({"id", _}) do
# "sequence"
# end
# def factory_method({k, v}) do
# if String.ends_with?(k, "Id") and k not in ["shortId"] do
# "sequence"
# else
# "random(\"#{k}\", \"#{v}\")"
# end
# end
end
| 29.41954 | 109 | 0.646025 |
08a0ba8dbc2f6a1b9d5b731d6795443fd91b152f | 1,449 | ex | Elixir | lib/slax_web/controllers/sprint_controller.ex | HoffsMH/slax | b91ee30b9fd71a4cb7826f50b605ce580b7c1651 | [
"MIT"
] | 11 | 2016-07-05T18:56:21.000Z | 2021-09-15T22:23:54.000Z | lib/slax_web/controllers/sprint_controller.ex | HoffsMH/slax | b91ee30b9fd71a4cb7826f50b605ce580b7c1651 | [
"MIT"
] | 181 | 2016-06-23T00:47:13.000Z | 2022-03-10T11:23:44.000Z | lib/slax_web/controllers/sprint_controller.ex | HoffsMH/slax | b91ee30b9fd71a4cb7826f50b605ce580b7c1651 | [
"MIT"
] | 7 | 2019-01-30T21:38:28.000Z | 2022-03-01T07:13:39.000Z | defmodule SlaxWeb.SprintController do
use SlaxWeb, :controller
plug(Slax.Plugs.VerifySlackToken, token: :sprint)
plug(Slax.Plugs.VerifyUser)
alias Slax.{Projects, Sprints}
def start(conn, %{"channel_name" => channel_name, "text" => "commitment " <> issue_numbers}) do
case Regex.scan(~r/\d+/, issue_numbers) do
[] ->
text(conn, "Invalid issue numbers.")
issue_numbers ->
issue_numbers =
issue_numbers
|> List.flatten()
|> Enum.map(&String.to_integer/1)
case Projects.get_project_for_channel(channel_name) do
nil ->
text(conn, "A project could not be found for this channel.")
project ->
repo = List.first(project.repos)
{_year, week} = :calendar.iso_week_number()
Sprints.create_sprint_commitment(%{
repo: repo,
issue_numbers: issue_numbers,
week: week,
user: conn.assigns.current_user
})
|> case do
{:error, messages, _} ->
text(conn, Enum.join(messages, "\n"))
{:ok, _, _} ->
text(conn, "Sprint commitment set for week #{week}.")
end
end
end
end
def start(conn, _) do
text(conn, """
*Sprint commands:*
/sprint commitment <issue numbers separated by spaces> - Create a new sprint commitment
""")
end
end
| 27.339623 | 97 | 0.559696 |
08a0f849f57597ff4ad048ef21e14f2fb16d0fb0 | 65,520 | ex | Elixir | lib/elixir/lib/module.ex | cdfuller/elixir | 3bd3f88d57d7fff6cab7b171294b89fb08eedfe7 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module.ex | cdfuller/elixir | 3bd3f88d57d7fff6cab7b171294b89fb08eedfe7 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module.ex | cdfuller/elixir | 3bd3f88d57d7fff6cab7b171294b89fb08eedfe7 | [
"Apache-2.0"
] | null | null | null | defmodule Module do
@moduledoc ~S'''
Provides functions to deal with modules during compilation time.
It allows a developer to dynamically add, delete and register
attributes, attach documentation and so forth.
After a module is compiled, using many of the functions in
this module will raise errors, since it is out of their scope
to inspect runtime data. Most of the runtime data can be inspected
via the `__info__/1` function attached to each compiled module.
## Module attributes
Each module can be decorated with one or more attributes. The following ones
are currently defined by Elixir:
### `@after_compile`
A hook that will be invoked right after the current module is compiled.
Accepts a module or a `{module, function_name}`. See the "Compile callbacks"
section below.
### `@before_compile`
A hook that will be invoked before the module is compiled.
Accepts a module or a `{module, function_or_macro_name}` tuple.
See the "Compile callbacks" section below.
### `@behaviour`
Note the British spelling!
Behaviours can be referenced by modules to ensure they implement
required specific function signatures defined by `@callback`.
For example, you could specify a `URI.Parser` behaviour as follows:
defmodule URI.Parser do
@doc "Defines a default port"
@callback default_port() :: integer
@doc "Parses the given URL"
@callback parse(uri_info :: URI.t()) :: URI.t()
end
And then a module may use it as:
defmodule URI.HTTP do
@behaviour URI.Parser
def default_port(), do: 80
def parse(info), do: info
end
If the behaviour changes or `URI.HTTP` does not implement
one of the callbacks, a warning will be raised.
### `@impl`
To aid in the correct implementation of behaviours, you may optionally declare
`@impl` for implemented callbacks of a behaviour. This makes callbacks
explicit and can help you to catch errors in your code. The compiler will warn
in these cases:
* if you mark a function with `@impl` when that function is not a callback.
* if you don't mark a function with `@impl` when other functions are marked
with `@impl`. If you mark one function with `@impl`, you must mark all
other callbacks for that behaviour as `@impl`.
`@impl` works on a per-context basis. If you generate a function through a macro
and mark it with `@impl`, that won't affect the module where that function is
generated in.
`@impl` also helps with maintainability by making it clear to other developers
that the function is implementing a callback.
Using `@impl`, the example above can be rewritten as:
defmodule URI.HTTP do
@behaviour URI.Parser
@impl true
def default_port(), do: 80
@impl true
def parse(info), do: info
end
You may pass either `false`, `true`, or a specific behaviour to `@impl`.
defmodule Foo do
@behaviour Bar
@behaviour Baz
# Will warn if neither Bar nor Baz specify a callback named bar/0.
@impl true
def bar(), do: :ok
# Will warn if Baz does not specify a callback named baz/0.
@impl Baz
def baz(), do: :ok
end
The code is now more readable, as it is now clear which functions are
part of your API and which ones are callback implementations. To reinforce this
idea, `@impl true` automatically marks the function as `@doc false`, disabling
documentation unless `@doc` is explicitly set.
### `@compile`
Defines options for module compilation. This is used to configure
both Elixir and Erlang compilers, as any other compilation pass
added by external tools. For example:
defmodule MyModule do
@compile {:inline, my_fun: 1}
def my_fun(arg) do
to_string(arg)
end
end
Multiple uses of `@compile` will accumulate instead of overriding
previous ones. See the "Compile options" section below.
### `@deprecated`
Provides the deprecation reason for a function. For example:
defmodule Keyword do
@deprecated "Use Kernel.length/1 instead"
def size(keyword) do
length(keyword)
end
end
The Mix compiler automatically looks for calls to deprecated modules
and emit warnings during compilation, computed via `mix xref warnings`.
Using the `@deprecated` attribute will also be reflected in the
documentation of the given function and macro. You can choose between
the `@deprecated` attribute and the documentation metadata to provide
hard-deprecations (with warnings) and soft-deprecations (without warnings):
This is a soft-deprecation as it simply annotates the documentation
as deprecated:
@doc deprecated: "Use Kernel.length/1 instead"
def size(keyword)
This is a hard-deprecation as it emits warnings and annotates the
documentation as deprecated:
@deprecated "Use Kernel.length/1 instead"
def size(keyword)
Currently `@deprecated` only supports functions and macros. However
you can use the `:deprecated` key in the annotation metadata to
annotate the docs of modules, types and callbacks too.
We recommend using this feature with care, especially library authors.
Deprecating code always pushes the burden towards library users. We
also recommend for deprecated functionality to be maintained for long
periods of time, even after deprecation, giving developers plenty of
time to update (except for cases where keeping the deprecated API is
undesired, such as in the presence of security issues).
### `@doc` and `@typedoc`
Provides documentation for the entity that follows the attribute.
`@doc` is to be used with a function, macro, callback, or
macrocallback, while `@typedoc` with a type (public or opaque).
Accepts a string (often a heredoc) or `false` where `@doc false` will
make the entity invisible to documentation extraction tools like
[`ExDoc`](https://hexdocs.pm/ex_doc/). For example:
defmodule MyModule do
@typedoc "This type"
@typedoc since: "1.1.0"
@type t :: term
@doc "Hello world"
@doc since: "1.1.0"
def hello do
"world"
end
@doc """
Sums `a` to `b`.
"""
def sum(a, b) do
a + b
end
end
As can be seen in the example above, `@doc` and `@typedoc` also accept
a keyword list that serves as a way to provide arbitrary metadata
about the entity. Tools like [`ExDoc`](https://hexdocs.pm/ex_doc/)
and `IEx` may use this information to
display annotations. A common use case is `since` that may be used
to annotate in which version the function was introduced.
As illustrated in the example, it is possible to use these attributes
more than once before an entity. However, the compiler will warn if
used twice with binaries as that replaces the documentation text from
the preceding use. Multiple uses with keyword lists will merge the
lists into one.
Note that since the compiler also defines some additional metadata,
there are a few reserved keys that will be ignored and warned if used.
Currently these are: `:opaque` and `:defaults`.
Once this module is compiled, this information becomes available via
the `Code.fetch_docs/1` function.
### `@dialyzer`
Defines warnings to request or suppress when using a version of
`:dialyzer` that supports module attributes.
Accepts an atom, a tuple, or a list of atoms and tuples. For example:
defmodule MyModule do
@dialyzer {:nowarn_function, my_fun: 1}
def my_fun(arg) do
M.not_a_function(arg)
end
end
For the list of supported warnings, see
[`:dialyzer` module](http://www.erlang.org/doc/man/dialyzer.html).
Multiple uses of `@dialyzer` will accumulate instead of overriding
previous ones.
### `@external_resource`
Specifies an external resource for the current module.
Sometimes a module embeds information from an external file. This
attribute allows the module to annotate which external resources
have been used.
Tools like Mix may use this information to ensure the module is
recompiled in case any of the external resources change.
### `@file`
Changes the filename used in stacktraces for the function or macro that
follows the attribute, such as:
defmodule MyModule do
@doc "Hello world"
@file "hello.ex"
def hello do
"world"
end
end
### `@moduledoc`
Provides documentation for the current module.
defmodule MyModule do
@moduledoc """
A very useful module.
"""
@moduledoc authors: ["Alice", "Bob"]
end
Accepts a string (often a heredoc) or `false` where `@moduledoc false`
will make the module invisible to documentation extraction tools like
[`ExDoc`](https://hexdocs.pm/ex_doc/).
Similarly to `@doc` also accepts a keyword list to provide metadata
about the module. For more details, see the documentation of `@doc`
above.
Once this module is compiled, this information becomes available via
the `Code.fetch_docs/1` function.
### `@on_definition`
A hook that will be invoked when each function or macro in the current
module is defined. Useful when annotating functions.
Accepts a module or a `{module, function_name}` tuple. See the
"Compile callbacks" section below.
### `@on_load`
A hook that will be invoked whenever the module is loaded.
Accepts the function name (as an atom) of a function in the current module or
`{function_name, 0}` tuple where `function_name` is the name of a function in
the current module. The function must be public and have an arity of 0 (no
arguments). If the function does not return `:ok`, the loading of the module
will be aborted. For example:
defmodule MyModule do
@on_load :load_check
def load_check do
if some_condition() do
:ok
else
:abort
end
end
def some_condition do
false
end
end
Modules compiled with HiPE would not call this hook.
### `@vsn`
Specify the module version. Accepts any valid Elixir value, for example:
defmodule MyModule do
@vsn "1.0"
end
### Typespec attributes
The following attributes are part of typespecs and are also built-in in
Elixir:
* `@type` - defines a type to be used in `@spec`
* `@typep` - defines a private type to be used in `@spec`
* `@opaque` - defines an opaque type to be used in `@spec`
* `@spec` - provides a specification for a function
* `@callback` - provides a specification for a behaviour callback
* `@macrocallback` - provides a specification for a macro behaviour callback
* `@optional_callbacks` - specifies which behaviour callbacks and macro
behaviour callbacks are optional
* `@impl` - declares an implementation of a callback function or macro
### Custom attributes
In addition to the built-in attributes outlined above, custom attributes may
also be added. A custom attribute is any valid identifier prefixed with an
`@` and followed by a valid Elixir value:
defmodule MyModule do
@custom_attr [some: "stuff"]
end
For more advanced options available when defining custom attributes, see
`register_attribute/3`.
## Compile callbacks
There are three callbacks that are invoked when functions are defined,
as well as before and immediately after the module bytecode is generated.
### `@after_compile`
A hook that will be invoked right after the current module is compiled.
Accepts a module or a `{module, function_name}` tuple. The function
must take two arguments: the module environment and its bytecode.
When just a module is provided, the function is assumed to be
`__after_compile__/2`.
Callbacks registered first will run last.
#### Example
defmodule MyModule do
@after_compile __MODULE__
def __after_compile__(env, _bytecode) do
IO.inspect(env)
end
end
### `@before_compile`
A hook that will be invoked before the module is compiled.
Accepts a module or a `{module, function_or_macro_name}` tuple. The
function/macro must take one argument: the module environment. If
it's a macro, its returned value will be injected at the end of the
module definition before the compilation starts.
When just a module is provided, the function/macro is assumed to be
`__before_compile__/1`.
Callbacks registered first will run last. Any overridable definition
will be made concrete before the first callback runs. A definition may
be made overridable again in another before compile callback and it
will be made concrete one last time after after all callbacks run.
*Note*: unlike `@after_compile`, the callback function/macro must
be placed in a separate module (because when the callback is invoked,
the current module does not yet exist).
#### Example
defmodule A do
defmacro __before_compile__(_env) do
quote do
def hello, do: "world"
end
end
end
defmodule B do
@before_compile A
end
B.hello()
#=> "world"
### `@on_definition`
A hook that will be invoked when each function or macro in the current
module is defined. Useful when annotating functions.
Accepts a module or a `{module, function_name}` tuple. The function
must take 6 arguments:
* the module environment
* the kind of the function/macro: `:def`, `:defp`, `:defmacro`, or `:defmacrop`
* the function/macro name
* the list of quoted arguments
* the list of quoted guards
* the quoted function body
Note the hook receives the quoted arguments and it is invoked before
the function is stored in the module. So `Module.defines?/2` will return
`false` for the first clause of every function.
If the function/macro being defined has multiple clauses, the hook will
be called for each clause.
Unlike other hooks, `@on_definition` will only invoke functions and
never macros. This is to avoid `@on_definition` callbacks from
redefining functions that have just been defined in favor of more
explicit approaches.
When just a module is provided, the function is assumed to be
`__on_definition__/6`.
#### Example
defmodule Hooks do
def on_def(_env, kind, name, args, guards, body) do
IO.puts("Defining #{kind} named #{name} with args:")
IO.inspect(args)
IO.puts("and guards")
IO.inspect(guards)
IO.puts("and body")
IO.puts(Macro.to_string(body))
end
end
defmodule MyModule do
@on_definition {Hooks, :on_def}
def hello(arg) when is_binary(arg) or is_list(arg) do
"Hello" <> to_string(arg)
end
def hello(_) do
:ok
end
end
## Compile options
The `@compile` attribute accepts different options that are used by both
Elixir and Erlang compilers. Some of the common use cases are documented
below:
* `@compile :debug_info` - includes `:debug_info` regardless of the
corresponding setting in `Code.compiler_options/1`
* `@compile {:debug_info, false}` - disables `:debug_info` regardless
of the corresponding setting in `Code.compiler_options/1`
* `@compile {:inline, some_fun: 2, other_fun: 3}` - inlines the given
name/arity pairs. Inlining is applied locally, calls from another
module are not affected by this option
* `@compile {:autoload, false}` - disables automatic loading of
modules after compilation. Instead, the module will be loaded after
it is dispatched to
You can see a handful more options used by the Erlang compiler in
the documentation for the [`:compile` module](http://www.erlang.org/doc/man/compile.html).
'''
@typep definition :: {atom, arity}
@typep def_kind :: :def | :defp | :defmacro | :defmacrop
@extra_error_msg_defines? "Use Kernel.function_exported?/3 and Kernel.macro_exported?/3 " <>
"to check for public functions and macros instead"
@extra_error_msg_definitions_in "Use the Module.__info__/1 callback to get public functions and macros instead"
@doc """
Provides runtime information about functions, macros, and other information
defined by the module.
Each module gets an `__info__/1` function when it's compiled. The function
takes one of the following items:
* `:attributes` - a keyword list with all persisted attributes
* `:compile` - a list with compiler metadata
* `:functions` - a keyword list of public functions and their arities
* `:macros` - a keyword list of public macros and their arities
* `:md5` - the MD5 of the module
* `:module` - the module atom name
"""
@callback __info__(:attributes) :: keyword()
@callback __info__(:compile) :: [term()]
@callback __info__(:functions) :: keyword()
@callback __info__(:macros) :: keyword()
@callback __info__(:md5) :: binary()
@callback __info__(:module) :: module()
@doc """
Checks if a module is open.
A module is "open" if it is currently being defined and its attributes and
functions can be modified.
"""
@spec open?(module) :: boolean
def open?(module) when is_atom(module) do
:elixir_module.is_open(module)
end
@doc """
Evaluates the quoted contents in the given module's context.
A list of environment options can also be given as argument.
See `Code.eval_string/3` for more information.
Raises an error if the module was already compiled.
## Examples
defmodule Foo do
contents =
quote do
def sum(a, b), do: a + b
end
Module.eval_quoted(__MODULE__, contents)
end
Foo.sum(1, 2)
#=> 3
For convenience, you can pass any `Macro.Env` struct, such
as `__ENV__/0`, as the first argument or as options. Both
the module and all options will be automatically extracted
from the environment:
defmodule Foo do
contents =
quote do
def sum(a, b), do: a + b
end
Module.eval_quoted(__ENV__, contents)
end
Foo.sum(1, 2)
#=> 3
Note that if you pass a `Macro.Env` struct as first argument
while also passing `opts`, they will be merged with `opts`
having precedence.
"""
@spec eval_quoted(module | Macro.Env.t(), Macro.t(), list, keyword | Macro.Env.t()) :: term
def eval_quoted(module_or_env, quoted, binding \\ [], opts \\ [])
def eval_quoted(%Macro.Env{} = env, quoted, binding, opts)
when is_list(binding) and is_list(opts) do
eval_quoted(env.module, quoted, binding, Keyword.merge(Map.to_list(env), opts))
end
def eval_quoted(module, quoted, binding, %Macro.Env{} = env)
when is_atom(module) and is_list(binding) do
eval_quoted(module, quoted, binding, Map.to_list(env))
end
def eval_quoted(module, quoted, binding, opts)
when is_atom(module) and is_list(binding) and is_list(opts) do
assert_not_compiled!(__ENV__.function, module)
:elixir_def.reset_last(module)
{value, binding, _env, _scope} =
:elixir.eval_quoted(quoted, binding, Keyword.put(opts, :module, module))
{value, binding}
end
@doc """
Creates a module with the given name and defined by
the given quoted expressions.
The line where the module is defined and its file **must**
be passed as options.
It returns a tuple of shape `{:module, module, binary, term}`
where `module` is the module name, `binary` is the module
byte code and `term` is the result of the last expression in
`quoted`.
Similar to `Kernel.defmodule/2`, the binary will only be
written to disk as a `.beam` file if `Module.create/3` is
invoked in a file that is currently being compiled.
## Examples
contents =
quote do
def world, do: true
end
Module.create(Hello, contents, Macro.Env.location(__ENV__))
Hello.world()
#=> true
## Differences from `defmodule`
`Module.create/3` works similarly to `Kernel.defmodule/2`
and return the same results. While one could also use
`defmodule` to define modules dynamically, this function
is preferred when the module body is given by a quoted
expression.
Another important distinction is that `Module.create/3`
allows you to control the environment variables used
when defining the module, while `Kernel.defmodule/2`
automatically uses the environment it is invoked at.
"""
@spec create(module, Macro.t(), Macro.Env.t() | keyword) :: {:module, module, binary, term}
def create(module, quoted, opts)
def create(module, quoted, %Macro.Env{} = env) when is_atom(module) do
create(module, quoted, Map.to_list(env))
end
def create(module, quoted, opts) when is_atom(module) and is_list(opts) do
unless Keyword.has_key?(opts, :file) do
raise ArgumentError, "expected :file to be given as option"
end
next = :elixir_module.next_counter(nil)
line = Keyword.get(opts, :line, 0)
quoted = :elixir_quote.linify_with_context_counter(line, {module, next}, quoted)
:elixir_module.compile(module, quoted, [], :elixir.env_for_eval(opts))
end
@doc """
Concatenates a list of aliases and returns a new alias.
## Examples
iex> Module.concat([Foo, Bar])
Foo.Bar
iex> Module.concat([Foo, "Bar"])
Foo.Bar
"""
@spec concat([binary | atom]) :: atom
def concat(list) when is_list(list) do
:elixir_aliases.concat(list)
end
@doc """
Concatenates two aliases and returns a new alias.
## Examples
iex> Module.concat(Foo, Bar)
Foo.Bar
iex> Module.concat(Foo, "Bar")
Foo.Bar
"""
@spec concat(binary | atom, binary | atom) :: atom
def concat(left, right)
when (is_binary(left) or is_atom(left)) and (is_binary(right) or is_atom(right)) do
:elixir_aliases.concat([left, right])
end
@doc """
Concatenates a list of aliases and returns a new alias only if the alias
was already referenced.
If the alias was not referenced yet, fails with `ArgumentError`.
It handles charlists, binaries and atoms.
## Examples
iex> Module.safe_concat([Module, Unknown])
** (ArgumentError) argument error
iex> Module.safe_concat([List, Chars])
List.Chars
"""
@spec safe_concat([binary | atom]) :: atom
def safe_concat(list) when is_list(list) do
:elixir_aliases.safe_concat(list)
end
@doc """
Concatenates two aliases and returns a new alias only if the alias was
already referenced.
If the alias was not referenced yet, fails with `ArgumentError`.
It handles charlists, binaries and atoms.
## Examples
iex> Module.safe_concat(Module, Unknown)
** (ArgumentError) argument error
iex> Module.safe_concat(List, Chars)
List.Chars
"""
@spec safe_concat(binary | atom, binary | atom) :: atom
def safe_concat(left, right)
when (is_binary(left) or is_atom(left)) and (is_binary(right) or is_atom(right)) do
:elixir_aliases.safe_concat([left, right])
end
# Build signatures to be stored in docs
defp build_signature(args, env) do
{reverse_args, counters} = simplify_args(args, %{}, [], env)
expand_keys(reverse_args, counters, [])
end
defp simplify_args([arg | args], counters, acc, env) do
{arg, counters} = simplify_arg(arg, counters, env)
simplify_args(args, counters, [arg | acc], env)
end
defp simplify_args([], counters, reverse_args, _env) do
{reverse_args, counters}
end
defp simplify_arg({:\\, _, [left, right]}, counters, env) do
{left, counters} = simplify_arg(left, counters, env)
right =
Macro.prewalk(right, fn
{:@, _, _} = attr -> Macro.expand_once(attr, env)
other -> other
end)
{{:\\, [], [left, right]}, counters}
end
# If the variable is being used explicitly for naming,
# we always give it a higher priority (nil) even if it
# starts with underscore.
defp simplify_arg({:=, _, [{var, _, atom}, _]}, counters, _env) when is_atom(atom) do
{simplify_var(var, nil), counters}
end
defp simplify_arg({:=, _, [_, {var, _, atom}]}, counters, _env) when is_atom(atom) do
{simplify_var(var, nil), counters}
end
# If we have only the variable as argument, it also gets
# higher priority. However, if the variable starts with an
# underscore, we give it a secondary context (Elixir) with
# lower priority.
defp simplify_arg({var, _, atom}, counters, _env) when is_atom(atom) do
{simplify_var(var, Elixir), counters}
end
defp simplify_arg({:%, _, [left, _]}, counters, env) do
case Macro.expand_once(left, env) do
module when is_atom(module) -> autogenerated_key(counters, simplify_module_name(module))
_ -> autogenerated_key(counters, :struct)
end
end
defp simplify_arg({:%{}, _, _}, counters, _env) do
autogenerated_key(counters, :map)
end
defp simplify_arg({:@, _, _} = attr, counters, env) do
simplify_arg(Macro.expand_once(attr, env), counters, env)
end
defp simplify_arg(other, counters, _env) when is_integer(other),
do: autogenerated_key(counters, :int)
defp simplify_arg(other, counters, _env) when is_boolean(other),
do: autogenerated_key(counters, :bool)
defp simplify_arg(other, counters, _env) when is_atom(other),
do: autogenerated_key(counters, :atom)
defp simplify_arg(other, counters, _env) when is_list(other),
do: autogenerated_key(counters, :list)
defp simplify_arg(other, counters, _env) when is_float(other),
do: autogenerated_key(counters, :float)
defp simplify_arg(other, counters, _env) when is_binary(other),
do: autogenerated_key(counters, :binary)
defp simplify_arg(_, counters, _env), do: autogenerated_key(counters, :arg)
defp simplify_var(var, guess_priority) do
case Atom.to_string(var) do
"_" -> {:_, [], guess_priority}
"_" <> rest -> {String.to_atom(rest), [], guess_priority}
_ -> {var, [], nil}
end
end
defp simplify_module_name(module) when is_atom(module) do
try do
split(module)
rescue
ArgumentError -> module
else
module_name -> String.to_atom(Macro.underscore(List.last(module_name)))
end
end
defp autogenerated_key(counters, key) do
case counters do
%{^key => :once} -> {key, Map.put(counters, key, 2)}
%{^key => value} -> {key, Map.put(counters, key, value + 1)}
%{} -> {key, Map.put(counters, key, :once)}
end
end
defp expand_keys([{:\\, meta, [key, default]} | keys], counters, acc) when is_atom(key) do
{var, counters} = expand_key(key, counters)
expand_keys(keys, counters, [{:\\, meta, [var, default]} | acc])
end
defp expand_keys([key | keys], counters, acc) when is_atom(key) do
{var, counters} = expand_key(key, counters)
expand_keys(keys, counters, [var | acc])
end
defp expand_keys([arg | args], counters, acc) do
expand_keys(args, counters, [arg | acc])
end
defp expand_keys([], _counters, acc) do
acc
end
defp expand_key(key, counters) do
case counters do
%{^key => count} when is_integer(count) and count >= 1 ->
{{:"#{key}#{count}", [], Elixir}, Map.put(counters, key, count - 1)}
_ ->
{{key, [], Elixir}, counters}
end
end
# Merge
defp merge_signatures([h1 | t1], [h2 | t2], i) do
[merge_signature(h1, h2, i) | merge_signatures(t1, t2, i + 1)]
end
defp merge_signatures([], [], _) do
[]
end
defp merge_signature({:\\, meta, [left, right]}, newer, i) do
{:\\, meta, [merge_signature(left, newer, i), right]}
end
defp merge_signature(older, {:\\, _, [left, _]}, i) do
merge_signature(older, left, i)
end
# The older signature, when given, always have higher precedence
defp merge_signature({_, _, nil} = older, _newer, _), do: older
defp merge_signature(_older, {_, _, nil} = newer, _), do: newer
# Both are a guess, so check if they are the same guess
defp merge_signature({var, _, _} = older, {var, _, _}, _), do: older
# Otherwise, returns a generic guess
defp merge_signature({_, meta, _}, _newer, i), do: {:"arg#{i}", meta, Elixir}
@doc """
Checks if the module defines the given function or macro.
Use `defines?/3` to assert for a specific type.
This function can only be used on modules that have not yet been compiled.
Use `Kernel.function_exported?/3` and `Kernel.macro_exported?/3` to check for
public functions and macros respectively in compiled modules.
## Examples
defmodule Example do
Module.defines?(__MODULE__, {:version, 0}) #=> false
def version, do: 1
Module.defines?(__MODULE__, {:version, 0}) #=> true
end
"""
@spec defines?(module, definition) :: boolean
def defines?(module, {name, arity} = tuple)
when is_atom(module) and is_atom(name) and is_integer(arity) and arity >= 0 and arity <= 255 do
assert_not_compiled!(__ENV__.function, module, @extra_error_msg_defines?)
{set, _bag} = data_tables_for(module)
:ets.member(set, {:def, tuple})
end
@doc """
Checks if the module defines a function or macro of the
given `kind`.
`kind` can be any of `:def`, `:defp`, `:defmacro`, or `:defmacrop`.
This function can only be used on modules that have not yet been compiled.
Use `Kernel.function_exported?/3` and `Kernel.macro_exported?/3` to check for
public functions and macros respectively in compiled modules.
## Examples
defmodule Example do
Module.defines?(__MODULE__, {:version, 0}, :def) #=> false
def version, do: 1
Module.defines?(__MODULE__, {:version, 0}, :def) #=> true
end
"""
@spec defines?(module, definition, def_kind) :: boolean
def defines?(module, {name, arity} = tuple, def_kind)
when is_atom(module) and is_atom(name) and is_integer(arity) and arity >= 0 and arity <= 255 and
def_kind in [:def, :defp, :defmacro, :defmacrop] do
assert_not_compiled!(__ENV__.function, module, @extra_error_msg_defines?)
{set, _bag} = data_tables_for(module)
case :ets.lookup(set, {:def, tuple}) do
[{_, ^def_kind, _, _, _, _}] -> true
_ -> false
end
end
@doc """
Checks if the current module defines the given type (private, opaque or not).
This function is only available for modules being compiled.
"""
@doc since: "1.7.0"
@spec defines_type?(module, definition) :: boolean
def defines_type?(module, definition) do
Kernel.Typespec.defines_type?(module, definition)
end
@doc """
Copies the given spec as a callback.
Returns `true` if there is such a spec and it was copied as a callback.
If the function associated to the spec has documentation defined prior to
invoking this function, the docs are copied too.
"""
@doc since: "1.7.0"
@spec spec_to_callback(module, definition) :: boolean
def spec_to_callback(module, definition) do
Kernel.Typespec.spec_to_callback(module, definition)
end
@doc """
Returns all functions and macros defined in `module`.
It returns a list with all defined functions and macros, public and private,
in the shape of `[{name, arity}, ...]`.
This function can only be used on modules that have not yet been compiled.
Use the `c:Module.__info__/1` callback to get the public functions and macros in
compiled modules.
## Examples
defmodule Example do
def version, do: 1
defmacrop test(arg), do: arg
Module.definitions_in(__MODULE__) #=> [{:version, 0}, {:test, 1}]
end
"""
@spec definitions_in(module) :: [definition]
def definitions_in(module) when is_atom(module) do
assert_not_compiled!(__ENV__.function, module, @extra_error_msg_definitions_in)
{_, bag} = data_tables_for(module)
bag_lookup_element(bag, :defs, 2)
end
@doc """
Returns all functions defined in `module`, according
to its kind.
This function can only be used on modules that have not yet been compiled.
Use the `c:Module.__info__/1` callback to get the public functions and macros in
compiled modules.
## Examples
defmodule Example do
def version, do: 1
Module.definitions_in(__MODULE__, :def) #=> [{:version, 0}]
Module.definitions_in(__MODULE__, :defp) #=> []
end
"""
@spec definitions_in(module, def_kind) :: [definition]
def definitions_in(module, def_kind)
when is_atom(module) and def_kind in [:def, :defp, :defmacro, :defmacrop] do
assert_not_compiled!(__ENV__.function, module, @extra_error_msg_definitions_in)
{set, _} = data_tables_for(module)
:lists.concat(:ets.match(set, {{:def, :"$1"}, def_kind, :_, :_, :_, :_}))
end
@doc """
Makes the given functions in `module` overridable.
An overridable function is lazily defined, allowing a
developer to customize it. See `Kernel.defoverridable/1` for
more information and documentation.
"""
@spec make_overridable(module, [definition]) :: :ok
def make_overridable(module, tuples) when is_atom(module) and is_list(tuples) do
assert_not_compiled!(__ENV__.function, module)
func = fn
{function_name, arity} = tuple
when is_atom(function_name) and is_integer(arity) and arity >= 0 and arity <= 255 ->
case :elixir_def.take_definition(module, tuple) do
false ->
raise ArgumentError,
"cannot make function #{function_name}/#{arity} " <>
"overridable because it was not defined"
clause ->
neighbours = :elixir_locals.yank(tuple, module)
overridable_definitions = :elixir_overridable.overridable(module)
count =
case :maps.find(tuple, overridable_definitions) do
{:ok, {count, _, _, _}} -> count + 1
:error -> 1
end
overridable_definitions =
:maps.put(tuple, {count, clause, neighbours, false}, overridable_definitions)
:elixir_overridable.overridable(module, overridable_definitions)
end
other ->
raise ArgumentError,
"each element in tuple list has to be a " <>
"{function_name :: atom, arity :: 0..255} tuple, got: #{inspect(other)}"
end
:lists.foreach(func, tuples)
end
@spec make_overridable(module, module) :: :ok
def make_overridable(module, behaviour) when is_atom(module) and is_atom(behaviour) do
case check_module_for_overridable(module, behaviour) do
:ok ->
:ok
{:error, error_explanation} ->
raise ArgumentError,
"cannot pass module #{inspect(behaviour)} as argument " <>
"to defoverridable/1 because #{error_explanation}"
end
behaviour_callbacks =
for callback <- behaviour_info(behaviour, :callbacks) do
{pair, _kind} = normalize_macro_or_function_callback(callback)
pair
end
tuples =
for definition <- definitions_in(module),
definition in behaviour_callbacks,
do: definition
make_overridable(module, tuples)
end
defp check_module_for_overridable(module, behaviour) do
{_, bag} = data_tables_for(module)
behaviour_definitions = bag_lookup_element(bag, {:accumulate, :behaviour}, 2)
cond do
not Code.ensure_compiled?(behaviour) ->
{:error, "it was not defined"}
not function_exported?(behaviour, :behaviour_info, 1) ->
{:error, "it does not define any callbacks"}
behaviour not in behaviour_definitions ->
error_message =
"its corresponding behaviour is missing. Did you forget to " <>
"add @behaviour #{inspect(behaviour)}?"
{:error, error_message}
true ->
:ok
end
end
defp normalize_macro_or_function_callback({function_name, arity}) do
case :erlang.atom_to_list(function_name) do
# Macros are always provided one extra argument in behaviour_info/1
'MACRO-' ++ tail ->
{{:erlang.list_to_atom(tail), arity - 1}, :defmacro}
_ ->
{{function_name, arity}, :def}
end
end
defp behaviour_info(module, key) do
case module.behaviour_info(key) do
list when is_list(list) -> list
:undefined -> []
end
end
@doc """
Returns `true` if `tuple` in `module` is marked as overridable.
"""
@spec overridable?(module, definition) :: boolean
def overridable?(module, {function_name, arity} = tuple)
when is_atom(function_name) and is_integer(arity) and arity >= 0 and arity <= 255 do
:maps.is_key(tuple, :elixir_overridable.overridable(module))
end
@doc """
Puts a module attribute with `key` and `value` in the given `module`.
## Examples
defmodule MyModule do
Module.put_attribute(__MODULE__, :custom_threshold_for_lib, 10)
end
"""
@spec put_attribute(module, atom, term) :: :ok
def put_attribute(module, key, value) when is_atom(module) and is_atom(key) do
put_attribute(module, key, value, nil)
end
@doc """
Gets the given attribute from a module.
If the attribute was marked with `accumulate` with
`Module.register_attribute/3`, a list is always returned.
`nil` is returned if the attribute has not been marked with
`accumulate` and has not been set to any value.
The `@` macro compiles to a call to this function. For example,
the following code:
@foo
Expands to something akin to:
Module.get_attribute(__MODULE__, :foo)
This function can only be used on modules that have not yet been compiled.
Use the `c:Module.__info__/1` callback to get all persisted attributes, or
`Code.fetch_docs/1` to retrieve all documentation related attributes in
compiled modules.
## Examples
defmodule Foo do
Module.put_attribute(__MODULE__, :value, 1)
Module.get_attribute(__MODULE__, :value) #=> 1
Module.register_attribute(__MODULE__, :value, accumulate: true)
Module.put_attribute(__MODULE__, :value, 1)
Module.get_attribute(__MODULE__, :value) #=> [1]
end
"""
@spec get_attribute(module, atom) :: term
def get_attribute(module, key) when is_atom(module) and is_atom(key) do
get_attribute(module, key, nil)
end
@doc """
Deletes the module attribute that matches the given key.
It returns the deleted attribute value (or `nil` if nothing was set).
## Examples
defmodule MyModule do
Module.put_attribute(__MODULE__, :custom_threshold_for_lib, 10)
Module.delete_attribute(__MODULE__, :custom_threshold_for_lib)
end
"""
@spec delete_attribute(module, atom) :: term
def delete_attribute(module, key) when is_atom(module) and is_atom(key) do
assert_not_compiled!(__ENV__.function, module)
{set, bag} = data_tables_for(module)
case :ets.lookup(set, key) do
[{_, _, :accumulate}] ->
reverse_values(:ets.take(bag, {:accumulate, key}), [])
[{_, value, _}] ->
:ets.delete(set, key)
value
[] ->
nil
end
end
defp reverse_values([{_, value} | tail], acc), do: reverse_values(tail, [value | acc])
defp reverse_values([], acc), do: acc
@doc """
Registers an attribute.
By registering an attribute, a developer is able to customize
how Elixir will store and accumulate the attribute values.
## Options
When registering an attribute, two options can be given:
* `:accumulate` - several calls to the same attribute will
accumulate instead of overriding the previous one. New attributes
are always added to the top of the accumulated list.
* `:persist` - the attribute will be persisted in the Erlang
Abstract Format. Useful when interfacing with Erlang libraries.
By default, both options are `false`.
## Examples
defmodule MyModule do
Module.register_attribute(__MODULE__, :custom_threshold_for_lib, accumulate: true)
@custom_threshold_for_lib 10
@custom_threshold_for_lib 20
@custom_threshold_for_lib #=> [20, 10]
end
"""
@spec register_attribute(module, atom, [{:accumulate, boolean}, {:persist, boolean}]) :: :ok
def register_attribute(module, attribute, options)
when is_atom(module) and is_atom(attribute) and is_list(options) do
assert_not_compiled!(__ENV__.function, module)
{set, bag} = data_tables_for(module)
if Keyword.get(options, :persist) do
:ets.insert(bag, {:persisted_attributes, attribute})
end
if Keyword.get(options, :accumulate) do
:ets.insert_new(set, {attribute, [], :accumulate}) ||
:ets.update_element(set, attribute, {3, :accumulate})
end
:ok
end
@doc """
Splits the given module name into binary parts.
`module` has to be an Elixir module, as `split/1` won't work with Erlang-style
modules (for example, `split(:lists)` raises an error).
`split/1` also supports splitting the string representation of Elixir modules
(that is, the result of calling `Atom.to_string/1` with the module name).
## Examples
iex> Module.split(Very.Long.Module.Name.And.Even.Longer)
["Very", "Long", "Module", "Name", "And", "Even", "Longer"]
iex> Module.split("Elixir.String.Chars")
["String", "Chars"]
"""
@spec split(module | String.t()) :: [String.t(), ...]
def split(module)
def split(module) when is_atom(module) do
split(Atom.to_string(module), _original = module)
end
def split(module) when is_binary(module) do
split(module, _original = module)
end
defp split("Elixir." <> name, _original) do
String.split(name, ".")
end
defp split(_module, original) do
raise ArgumentError, "expected an Elixir module, got: #{inspect(original)}"
end
@doc false
@deprecated "Use @doc instead"
def add_doc(module, line, kind, {name, arity}, signature \\ [], doc) do
assert_not_compiled!(__ENV__.function, module)
if kind in [:defp, :defmacrop, :typep] do
if doc, do: {:error, :private_doc}, else: :ok
else
{set, _bag} = data_tables_for(module)
compile_doc(set, line, kind, name, arity, signature, nil, doc, %{}, __ENV__, false)
:ok
end
end
@doc false
# Used internally to compile documentation.
# This function is private and must be used only internally.
def compile_definition_attributes(env, kind, name, args, _guards, body) do
%{module: module} = env
{set, bag} = data_tables_for(module)
{arity, defaults} = args_count(args, 0, 0)
impl = compile_impl(set, bag, name, env, kind, arity, defaults)
doc_meta = compile_doc_meta(set, bag, name, arity, defaults)
{line, doc} = get_doc_info(set, env)
compile_doc(set, line, kind, name, arity, args, body, doc, doc_meta, env, impl)
:ok
end
defp compile_doc(_table, line, kind, name, arity, _args, _body, doc, _doc_meta, env, _impl)
when kind in [:defp, :defmacrop] do
if doc do
message =
"#{kind} #{name}/#{arity} is private, " <>
"@doc attribute is always discarded for private functions/macros/types"
IO.warn(message, Macro.Env.stacktrace(%{env | line: line}))
end
end
defp compile_doc(table, line, kind, name, arity, args, _body, doc, doc_meta, env, impl) do
key = {doc_key(kind), name, arity}
signature = build_signature(args, env)
case :ets.lookup(table, key) do
[] ->
doc = if is_nil(doc) && impl, do: false, else: doc
:ets.insert(table, {key, line, signature, doc, doc_meta})
[{_, current_line, current_sign, current_doc, current_doc_meta}] ->
signature = merge_signatures(current_sign, signature, 1)
doc = if is_nil(doc), do: current_doc, else: doc
doc = if is_nil(doc) && impl, do: false, else: doc
doc_meta = Map.merge(current_doc_meta, doc_meta)
:ets.insert(table, {key, current_line, signature, doc, doc_meta})
end
end
defp doc_key(:def), do: :function
defp doc_key(:defmacro), do: :macro
defp compile_doc_meta(set, bag, name, arity, defaults) do
doc_meta = compile_deprecated(%{}, set, bag, name, arity, defaults)
doc_meta = get_doc_meta(doc_meta, set)
add_defaults_count(doc_meta, defaults)
end
defp get_doc_meta(existing_meta, set) do
case :ets.take(set, {:doc, :meta}) do
[{{:doc, :meta}, metadata, _}] -> Map.merge(existing_meta, metadata)
[] -> existing_meta
end
end
defp compile_deprecated(doc_meta, set, bag, name, arity, defaults) do
case :ets.take(set, :deprecated) do
[{:deprecated, reason, _}] when is_binary(reason) ->
:ets.insert(bag, deprecated_reasons(defaults, name, arity, reason))
Map.put(doc_meta, :deprecated, reason)
_ ->
doc_meta
end
end
defp add_defaults_count(doc_meta, 0), do: doc_meta
defp add_defaults_count(doc_meta, n), do: Map.put(doc_meta, :defaults, n)
defp deprecated_reasons(0, name, arity, reason) do
[deprecated_reason(name, arity, reason)]
end
defp deprecated_reasons(defaults, name, arity, reason) do
[
deprecated_reason(name, arity - defaults, reason)
| deprecated_reasons(defaults - 1, name, arity, reason)
]
end
defp deprecated_reason(name, arity, reason),
do: {:deprecated, {{name, arity}, reason}}
defp compile_impl(set, bag, name, env, kind, arity, defaults) do
%{line: line, file: file} = env
case :ets.take(set, :impl) do
[{:impl, value, _}] ->
pair = {name, arity}
meta = :ets.lookup_element(set, {:def, pair}, 3)
impl = {pair, Keyword.get(meta, :context), defaults, kind, line, file, value}
:ets.insert(bag, {:impls, impl})
value
[] ->
false
end
end
defp args_count([{:\\, _, _} | tail], total, defaults) do
args_count(tail, total + 1, defaults + 1)
end
defp args_count([_head | tail], total, defaults) do
args_count(tail, total + 1, defaults)
end
defp args_count([], total, defaults), do: {total, defaults}
@doc false
def check_behaviours_and_impls(env, _set, bag, all_definitions) do
behaviours = bag_lookup_element(bag, {:accumulate, :behaviour}, 2)
impls = bag_lookup_element(bag, :impls, 2)
callbacks = check_behaviours(env, behaviours)
pending_callbacks =
if impls != [] do
{non_implemented_callbacks, contexts} = check_impls(env, behaviours, callbacks, impls)
warn_missing_impls(env, non_implemented_callbacks, contexts, all_definitions)
non_implemented_callbacks
else
callbacks
end
check_callbacks(env, pending_callbacks, all_definitions)
:ok
end
defp check_behaviours(%{lexical_tracker: pid} = env, behaviours) do
Enum.reduce(behaviours, %{}, fn behaviour, acc ->
cond do
not is_atom(behaviour) ->
message =
"@behaviour #{inspect(behaviour)} must be an atom (in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
acc
not Code.ensure_compiled?(behaviour) ->
message =
"@behaviour #{inspect(behaviour)} does not exist (in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
acc
not function_exported?(behaviour, :behaviour_info, 1) ->
message =
"module #{inspect(behaviour)} is not a behaviour (in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
acc
true ->
:elixir_lexical.record_remote(behaviour, nil, pid)
optional_callbacks = behaviour_info(behaviour, :optional_callbacks)
callbacks = behaviour_info(behaviour, :callbacks)
Enum.reduce(callbacks, acc, &add_callback(&1, behaviour, env, optional_callbacks, &2))
end
end)
end
defp add_callback(original, behaviour, env, optional_callbacks, acc) do
{callback, kind} = normalize_macro_or_function_callback(original)
case acc do
%{^callback => {_kind, conflict, _optional?}} ->
message =
"conflicting behaviours found. #{format_definition(kind, callback)} is required by " <>
"#{inspect(conflict)} and #{inspect(behaviour)} (in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
%{} ->
:ok
end
Map.put(acc, callback, {kind, behaviour, original in optional_callbacks})
end
defp check_callbacks(env, callbacks, all_definitions) do
for {callback, {kind, behaviour, optional?}} <- callbacks do
case :lists.keyfind(callback, 1, all_definitions) do
false when not optional? ->
message =
format_callback(callback, kind, behaviour) <>
" is not implemented (in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
{_, wrong_kind, _, _} when kind != wrong_kind ->
message =
format_callback(callback, kind, behaviour) <>
" was implemented as \"#{wrong_kind}\" but should have been \"#{kind}\" " <>
"(in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
_ ->
:ok
end
end
:ok
end
defp format_callback(callback, kind, module) do
protocol_or_behaviour = if protocol?(module), do: "protocol ", else: "behaviour "
format_definition(kind, callback) <>
" required by " <> protocol_or_behaviour <> inspect(module)
end
defp protocol?(module) do
Code.ensure_loaded?(module) and function_exported?(module, :__protocol__, 1) and
module.__protocol__(:module) == module
end
defp check_impls(env, behaviours, callbacks, impls) do
acc = {callbacks, %{}}
Enum.reduce(impls, acc, fn {fa, context, defaults, kind, line, file, value}, acc ->
case impl_behaviours(fa, defaults, kind, value, behaviours, callbacks) do
{:ok, impl_behaviours} ->
Enum.reduce(impl_behaviours, acc, fn {fa, behaviour}, {callbacks, contexts} ->
callbacks = Map.delete(callbacks, fa)
contexts = Map.update(contexts, behaviour, [context], &[context | &1])
{callbacks, contexts}
end)
{:error, message} ->
formatted = format_impl_warning(fa, kind, message)
IO.warn(formatted, Macro.Env.stacktrace(%{env | line: line, file: file}))
acc
end
end)
end
defp impl_behaviours({function, arity}, defaults, kind, value, behaviours, callbacks) do
impls = for n <- arity..(arity - defaults), do: {function, n}
impl_behaviours(impls, kind, value, behaviours, callbacks)
end
defp impl_behaviours(_, kind, _, _, _) when kind in [:defp, :defmacrop] do
{:error, :private_function}
end
defp impl_behaviours(_, _, value, [], _) do
{:error, {:no_behaviours, value}}
end
defp impl_behaviours(impls, _, false, _, callbacks) do
case callbacks_for_impls(impls, callbacks) do
[] -> {:ok, []}
[impl | _] -> {:error, {:impl_not_defined, impl}}
end
end
defp impl_behaviours(impls, _, true, _, callbacks) do
case callbacks_for_impls(impls, callbacks) do
[] -> {:error, {:impl_defined, callbacks}}
impls -> {:ok, impls}
end
end
defp impl_behaviours(impls, _, behaviour, behaviours, callbacks) do
filtered = behaviour_callbacks_for_impls(impls, behaviour, callbacks)
cond do
filtered != [] ->
{:ok, filtered}
behaviour not in behaviours ->
{:error, {:behaviour_not_declared, behaviour}}
true ->
{:error, {:behaviour_not_defined, behaviour, callbacks}}
end
end
defp behaviour_callbacks_for_impls([], _behaviour, _callbacks) do
[]
end
defp behaviour_callbacks_for_impls([fa | tail], behaviour, callbacks) do
case callbacks[fa] do
{_, ^behaviour, _} ->
[{fa, behaviour} | behaviour_callbacks_for_impls(tail, behaviour, callbacks)]
_ ->
behaviour_callbacks_for_impls(tail, behaviour, callbacks)
end
end
defp callbacks_for_impls([], _) do
[]
end
defp callbacks_for_impls([fa | tail], callbacks) do
case callbacks[fa] do
{_, behaviour, _} -> [{fa, behaviour} | callbacks_for_impls(tail, callbacks)]
nil -> callbacks_for_impls(tail, callbacks)
end
end
defp format_impl_warning(fa, kind, :private_function) do
"#{format_definition(kind, fa)} is private, @impl attribute is always discarded for private functions/macros"
end
defp format_impl_warning(fa, kind, {:no_behaviours, value}) do
"got \"@impl #{inspect(value)}\" for #{format_definition(kind, fa)} but no behaviour was declared"
end
defp format_impl_warning(_, kind, {:impl_not_defined, {fa, behaviour}}) do
"got \"@impl false\" for #{format_definition(kind, fa)} " <>
"but it is a callback specified in #{inspect(behaviour)}"
end
defp format_impl_warning(fa, kind, {:impl_defined, callbacks}) do
"got \"@impl true\" for #{format_definition(kind, fa)} " <>
"but no behaviour specifies such callback#{known_callbacks(callbacks)}"
end
defp format_impl_warning(fa, kind, {:behaviour_not_declared, behaviour}) do
"got \"@impl #{inspect(behaviour)}\" for #{format_definition(kind, fa)} " <>
"but this behaviour was not declared with @behaviour"
end
defp format_impl_warning(fa, kind, {:behaviour_not_defined, behaviour, callbacks}) do
"got \"@impl #{inspect(behaviour)}\" for #{format_definition(kind, fa)} " <>
"but this behaviour does not specify such callback#{known_callbacks(callbacks)}"
end
defp warn_missing_impls(_env, callbacks, _contexts, _defs) when map_size(callbacks) == 0 do
:ok
end
defp warn_missing_impls(env, non_implemented_callbacks, contexts, defs) do
for {pair, kind, meta, _clauses} <- defs,
kind in [:def, :defmacro] do
with {:ok, {_, behaviour, _}} <- Map.fetch(non_implemented_callbacks, pair),
true <- missing_impl_in_context?(meta, behaviour, contexts) do
message =
"module attribute @impl was not set for #{format_definition(kind, pair)} " <>
"callback (specified in #{inspect(behaviour)}). " <>
"This either means you forgot to add the \"@impl true\" annotation before the " <>
"definition or that you are accidentally overriding this callback"
IO.warn(message, Macro.Env.stacktrace(%{env | line: :elixir_utils.get_line(meta)}))
end
end
:ok
end
defp missing_impl_in_context?(meta, behaviour, contexts) do
case contexts do
%{^behaviour => known} -> Keyword.get(meta, :context) in known
%{} -> not Keyword.has_key?(meta, :context)
end
end
defp format_definition(kind, {name, arity}) do
format_definition(kind) <> " #{name}/#{arity}"
end
defp format_definition(:defmacro), do: "macro"
defp format_definition(:defmacrop), do: "macro"
defp format_definition(:def), do: "function"
defp format_definition(:defp), do: "function"
defp known_callbacks(callbacks) when map_size(callbacks) == 0 do
". There are no known callbacks, please specify the proper @behaviour " <>
"and make sure it defines callbacks"
end
defp known_callbacks(callbacks) do
formatted_callbacks =
for {{name, arity}, {kind, module, _}} <- callbacks do
"\n * " <> Exception.format_mfa(module, name, arity) <> " (#{format_definition(kind)})"
end
". The known callbacks are:\n#{formatted_callbacks}\n"
end
@doc false
# Used internally by Kernel's @.
# This function is private and must be used only internally.
def get_attribute(module, key, line) when is_atom(key) do
assert_not_compiled!(
{:get_attribute, 2},
module,
"Use the Module.__info__/1 callback or Code.fetch_docs/1 instead"
)
{set, bag} = data_tables_for(module)
case :ets.lookup(set, key) do
[{_, _, :accumulate}] ->
:lists.reverse(bag_lookup_element(bag, {:accumulate, key}, 2))
[{_, val, nil}] ->
val
[{_, val, _}] ->
:ets.update_element(set, key, {3, nil})
val
[] when is_integer(line) ->
# TODO: Consider raising instead of warning on v2.0 as it usually cascades
error_message =
"undefined module attribute @#{key}, " <>
"please remove access to @#{key} or explicitly set it before access"
IO.warn(error_message, attribute_stack(module, line))
nil
[] ->
nil
end
end
@doc false
# Used internally by Kernel's @.
# This function is private and must be used only internally.
def put_attribute(module, key, value, line) when is_atom(key) do
assert_not_compiled!(__ENV__.function, module)
{set, bag} = data_tables_for(module)
value = preprocess_attribute(key, value)
put_attribute(module, key, value, line, set, bag)
:ok
end
# If any of the doc attributes are called with a keyword list that
# will become documentation metadata. Multiple calls will be merged
# into the same map overriding duplicate keys.
defp put_attribute(module, key, {_, metadata}, line, set, _bag)
when key in [:doc, :typedoc, :moduledoc] and is_list(metadata) do
metadata_map = preprocess_doc_meta(metadata, module, line, %{})
case :ets.insert_new(set, {{key, :meta}, metadata_map, line}) do
true ->
:ok
false ->
current_metadata = :ets.lookup_element(set, {key, :meta}, 2)
:ets.update_element(set, {key, :meta}, {2, Map.merge(current_metadata, metadata_map)})
end
end
# Optimize some attributes by avoiding writing to the attributes key
# in the bag table since we handle them internally.
defp put_attribute(module, key, value, line, set, _bag)
when key in [:doc, :typedoc, :moduledoc, :impl, :deprecated] do
try do
:ets.lookup_element(set, key, 3)
catch
:error, :badarg -> :ok
else
unread_line when is_integer(line) and is_integer(unread_line) ->
message = "redefining @#{key} attribute previously set at line #{unread_line}"
IO.warn(message, attribute_stack(module, line))
_ ->
:ok
end
:ets.insert(set, {key, value, line})
end
defp put_attribute(_module, :on_load, value, line, set, bag) do
try do
:ets.lookup_element(set, :on_load, 3)
catch
:error, :badarg ->
:ets.insert(set, {:on_load, value, line})
:ets.insert(bag, {:attributes, :on_load})
else
_ -> raise ArgumentError, "the @on_load attribute can only be set once per module"
end
end
defp put_attribute(_module, key, value, line, set, bag) do
try do
:ets.lookup_element(set, key, 3)
catch
:error, :badarg ->
:ets.insert(set, {key, value, line})
:ets.insert(bag, {:attributes, key})
else
:accumulate -> :ets.insert(bag, {{:accumulate, key}, value})
_ -> :ets.insert(set, {key, value, line})
end
end
defp attribute_stack(module, line) do
file = String.to_charlist(Path.relative_to_cwd(:elixir_module.file(module)))
[{module, :__MODULE__, 0, file: file, line: line}]
end
## Helpers
defp preprocess_attribute(key, value) when key in [:moduledoc, :typedoc, :doc] do
case value do
{line, doc} when is_integer(line) and (is_binary(doc) or is_boolean(doc) or is_nil(doc)) ->
value
{line, [{key, _} | _]} when is_integer(line) and is_atom(key) ->
value
{line, doc} when is_integer(line) ->
raise ArgumentError,
"@#{key} is a built-in module attribute for documentation. It should be " <>
"a string, boolean, keyword list, or nil, got: #{inspect(doc)}"
_other ->
raise ArgumentError,
"@#{key} is a built-in module attribute for documentation. When set dynamically, " <>
"it should be {line, doc} (where \"doc\" is a string, boolean, keyword list, or nil), " <>
"got: #{inspect(value)}"
end
end
defp preprocess_attribute(:on_load, value) do
case value do
_ when is_atom(value) ->
{value, 0}
{atom, 0} = tuple when is_atom(atom) ->
tuple
_ ->
raise ArgumentError,
"@on_load is a built-in module attribute that annotates a function to be invoked " <>
"when the module is loaded. It should be an atom or a {atom, 0} tuple, " <>
"got: #{inspect(value)}"
end
end
defp preprocess_attribute(:impl, value) do
case value do
_ when is_boolean(value) ->
value
module when is_atom(module) and module != nil ->
# Attempt to compile behaviour but ignore failure (will warn later)
_ = Code.ensure_compiled(module)
value
_ ->
raise ArgumentError,
"@impl is a built-in module attribute that marks the next definition " <>
"as a callback implementation. It should be a module or a boolean, " <>
"got: #{inspect(value)}"
end
end
defp preprocess_attribute(:before_compile, atom) when is_atom(atom),
do: {atom, :__before_compile__}
defp preprocess_attribute(:after_compile, atom) when is_atom(atom),
do: {atom, :__after_compile__}
defp preprocess_attribute(:on_definition, atom) when is_atom(atom),
do: {atom, :__on_definition__}
defp preprocess_attribute(key, _value)
when key in [:type, :typep, :opaque, :spec, :callback, :macrocallback] do
raise ArgumentError,
"attributes type, typep, opaque, spec, callback, and macrocallback " <>
"must be set directly via the @ notation"
end
defp preprocess_attribute(:external_resource, value) when not is_binary(value) do
raise ArgumentError,
"@external_resource is a built-in module attribute used for specifying file " <>
"dependencies. It should be a string the path to a file, got: #{inspect(value)}"
end
defp preprocess_attribute(:deprecated, value) when not is_binary(value) do
raise ArgumentError,
"@deprecated is a built-in module attribute that annotates a definition as deprecated. " <>
"It should be a string with the reason for the deprecation, got: #{inspect(value)}"
end
defp preprocess_attribute(:file, value) do
case value do
_ when is_binary(value) ->
value
{file, line} when is_binary(file) and is_integer(line) ->
value
_ ->
raise ArgumentError,
"@file is a built-in module attribute that annotates the file and line the next " <>
"definition comes from. It should be a string or {string, line} tuple as value, " <>
"got: #{inspect(value)}"
end
end
defp preprocess_attribute(_key, value) do
value
end
defp preprocess_doc_meta([], _module, _line, map), do: map
defp preprocess_doc_meta([{key, _} | tail], module, line, map)
when key in [:opaque, :defaults] do
message = "ignoring reserved documentation metadata key: #{inspect(key)}"
IO.warn(message, attribute_stack(module, line))
preprocess_doc_meta(tail, module, line, map)
end
defp preprocess_doc_meta([{key, value} | tail], module, line, map) when is_atom(key) do
validate_doc_meta(key, value)
preprocess_doc_meta(tail, module, line, Map.put(map, key, value))
end
defp validate_doc_meta(:since, value) when not is_binary(value) do
raise ArgumentError,
":since is a built-in documentation metadata key. It should be a string representing " <>
"the version in which the documented entity was added, got: #{inspect(value)}"
end
defp validate_doc_meta(:deprecated, value) when not is_binary(value) do
raise ArgumentError,
":deprecated is a built-in documentation metadata key. It should be a string " <>
"representing the replacement for the deprecated entity, got: #{inspect(value)}"
end
defp validate_doc_meta(:delegate_to, value) do
case value do
{m, f, a} when is_atom(m) and is_atom(f) and is_integer(a) and a >= 0 ->
:ok
_ ->
raise ArgumentError,
":delegate_to is a built-in documentation metadata key. It should be a three-element " <>
"tuple in the form of {module, function, arity}, got: #{inspect(value)}"
end
end
defp validate_doc_meta(_, _), do: :ok
defp get_doc_info(table, env) do
case :ets.take(table, :doc) do
[{:doc, {_, _} = pair, _}] ->
pair
[] ->
{env.line, nil}
end
end
defp data_tables_for(module) do
:elixir_module.data_tables(module)
end
defp bag_lookup_element(table, key, pos) do
:ets.lookup_element(table, key, pos)
catch
:error, :badarg -> []
end
defp assert_not_compiled!(function_name_arity, module, extra_msg \\ "") do
open?(module) ||
raise ArgumentError,
assert_not_compiled_message(function_name_arity, module, extra_msg)
end
defp assert_not_compiled_message({function_name, arity}, module, extra_msg) do
mfa = "Module.#{function_name}/#{arity}"
"could not call #{mfa} because the module #{inspect(module)} is already compiled" <>
case extra_msg do
"" -> ""
_ -> ". " <> extra_msg
end
end
end
| 32.08619 | 113 | 0.662927 |
08a10692df9bb924aa54230d88dd76934d64f8d6 | 2,474 | ex | Elixir | lib/stenotype.ex | aeturnum/stenotype | 90e7b9be60ee748ab046f46005bd17b17f1d153a | [
"Apache-2.0"
] | null | null | null | lib/stenotype.ex | aeturnum/stenotype | 90e7b9be60ee748ab046f46005bd17b17f1d153a | [
"Apache-2.0"
] | null | null | null | lib/stenotype.ex | aeturnum/stenotype | 90e7b9be60ee748ab046f46005bd17b17f1d153a | [
"Apache-2.0"
] | null | null | null | defmodule Stenotype do
alias Stenotype.Location
require Logger
defmacro __using__(_) do
quote do
require Logger
import Stenotype,
only: [
info: 1,
debug: 1,
notice: 1,
warn: 1,
error: 1,
to_s: 1,
t: 1
]
end
end
defmacro info(line) do
atom = :info
loc = Stenotype.Location.register_log_location(__CALLER__, atom)
quote do
unquote(Macro.escape(loc))
|> Stenotype.Format.Statement.create(unquote(line), unquote(atom))
|> Stenotype.Output.output()
end
end
defmacro debug(line) do
atom = :debug
loc = Stenotype.Location.register_log_location(__CALLER__, atom)
quote do
unquote(Macro.escape(loc))
|> Stenotype.Format.Statement.create(unquote(line), unquote(atom))
|> Stenotype.Output.output()
# unquote(Macro.escape(loc))
# |> Stenotype.Format.log_line(unquote(line))
# |> Stenotype.Output.output(:debug)
end
end
defmacro notice(line) do
atom = :notice
loc = Stenotype.Location.register_log_location(__CALLER__, atom)
quote do
unquote(Macro.escape(loc))
|> Stenotype.Format.Statement.create(unquote(line), unquote(atom))
|> Stenotype.Output.output()
end
end
defmacro warn(line) do
atom = :warn
loc = Stenotype.Location.register_log_location(__CALLER__, atom)
quote do
unquote(Macro.escape(loc))
|> Stenotype.Format.Statement.create(unquote(line), unquote(atom))
|> Stenotype.Output.output()
end
end
defmacro error(line) do
atom = :error
loc = Stenotype.Location.register_log_location(__CALLER__, atom)
quote do
unquote(Macro.escape(loc))
|> Stenotype.Format.Statement.create(unquote(line), unquote(atom))
|> Stenotype.Output.output()
end
end
defmacro to_s(term) do
quote do
Stenotype.Format.Conversion.to_bin(unquote(term))
end
end
def t(line \\ "") do
with stack <- stack(),
str_list <- Enum.map(stack, &Location.trace_string/1),
lines <- Enum.join(str_list, "\n->"),
do: Logger.warn("#{line}:\n->#{lines}")
end
defp stack() do
with {_, list} <- Process.info(self(), :current_stacktrace) do
# first stack is from Process
list
|> Enum.drop(1)
|> Enum.filter(fn {mod, _, _, _} -> mod != __MODULE__ end)
|> Enum.map(&Location.create/1)
end
end
end
| 23.561905 | 72 | 0.616815 |
08a1286c8cafeea2099bfe153ac7605133268b10 | 69,698 | exs | Elixir | integration_test/cases/repo.exs | samuelpordeus/ecto | c66ab5f89ef348c7f520f427001c33d95a78bd2e | [
"Apache-2.0"
] | null | null | null | integration_test/cases/repo.exs | samuelpordeus/ecto | c66ab5f89ef348c7f520f427001c33d95a78bd2e | [
"Apache-2.0"
] | null | null | null | integration_test/cases/repo.exs | samuelpordeus/ecto | c66ab5f89ef348c7f520f427001c33d95a78bd2e | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Integration.RepoTest do
use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true)
alias Ecto.Integration.TestRepo
import Ecto.Query
alias Ecto.Integration.Post
alias Ecto.Integration.Order
alias Ecto.Integration.User
alias Ecto.Integration.Comment
alias Ecto.Integration.Permalink
alias Ecto.Integration.Custom
alias Ecto.Integration.Barebone
alias Ecto.Integration.CompositePk
alias Ecto.Integration.PostUserCompositePk
test "returns already started for started repos" do
assert {:error, {:already_started, _}} = TestRepo.start_link
end
test "supports unnamed repos" do
assert {:ok, pid} = TestRepo.start_link(name: nil)
assert Ecto.Repo.Queryable.all(pid, Post, []) == []
end
test "all empty" do
assert TestRepo.all(Post) == []
assert TestRepo.all(from p in Post) == []
end
test "all with in" do
TestRepo.insert!(%Post{title: "hello"})
# Works without the query cache.
assert_raise Ecto.Query.CastError, fn ->
TestRepo.all(from p in Post, where: p.title in ^nil)
end
assert [] = TestRepo.all from p in Post, where: p.title in []
assert [] = TestRepo.all from p in Post, where: p.title in ["1", "2", "3"]
assert [] = TestRepo.all from p in Post, where: p.title in ^[]
assert [_] = TestRepo.all from p in Post, where: p.title not in []
assert [_] = TestRepo.all from p in Post, where: p.title in ["1", "hello", "3"]
assert [_] = TestRepo.all from p in Post, where: p.title in ["1", ^"hello", "3"]
assert [_] = TestRepo.all from p in Post, where: p.title in ^["1", "hello", "3"]
# Still doesn't work after the query cache.
assert_raise Ecto.Query.CastError, fn ->
TestRepo.all(from p in Post, where: p.title in ^nil)
end
end
test "all using named from" do
TestRepo.insert!(%Post{title: "hello"})
query =
from(p in Post, as: :post)
|> where([post: p], p.title == "hello")
assert [_] = TestRepo.all query
end
test "all without schema" do
%Post{} = TestRepo.insert!(%Post{title: "title1"})
%Post{} = TestRepo.insert!(%Post{title: "title2"})
assert ["title1", "title2"] =
TestRepo.all(from(p in "posts", order_by: p.title, select: p.title))
assert [_] =
TestRepo.all(from(p in "posts", where: p.title == "title1", select: p.id))
end
test "all shares metadata" do
TestRepo.insert!(%Post{title: "title1"})
TestRepo.insert!(%Post{title: "title2"})
[post1, post2] = TestRepo.all(Post)
assert :erts_debug.same(post1.__meta__, post2.__meta__)
[new_post1, new_post2] = TestRepo.all(Post)
assert :erts_debug.same(post1.__meta__, new_post1.__meta__)
assert :erts_debug.same(post2.__meta__, new_post2.__meta__)
end
@tag :invalid_prefix
test "all with invalid prefix" do
assert catch_error(TestRepo.all("posts", prefix: "oops"))
end
test "insert, update and delete" do
post = %Post{title: "insert, update, delete", text: "fetch empty"}
meta = post.__meta__
assert %Post{} = inserted = TestRepo.insert!(post)
assert %Post{} = updated = TestRepo.update!(Ecto.Changeset.change(inserted, text: "new"))
deleted_meta = put_in meta.state, :deleted
assert %Post{__meta__: ^deleted_meta} = TestRepo.delete!(updated)
loaded_meta = put_in meta.state, :loaded
assert %Post{__meta__: ^loaded_meta} = TestRepo.insert!(post)
post = TestRepo.one(Post)
assert post.__meta__.state == :loaded
assert post.inserted_at
end
test "insert, update and delete with field source" do
permalink = %Permalink{url: "url"}
assert %Permalink{url: "url"} = inserted =
TestRepo.insert!(permalink)
assert %Permalink{url: "new"} = updated =
TestRepo.update!(Ecto.Changeset.change(inserted, url: "new"))
assert %Permalink{url: "new"} =
TestRepo.delete!(updated)
end
@tag :composite_pk
test "insert, update and delete with composite pk" do
c1 = TestRepo.insert!(%CompositePk{a: 1, b: 2, name: "first"})
c2 = TestRepo.insert!(%CompositePk{a: 1, b: 3, name: "second"})
assert CompositePk |> first |> TestRepo.one == c1
assert CompositePk |> last |> TestRepo.one == c2
changeset = Ecto.Changeset.cast(c1, %{name: "first change"}, ~w(name)a)
c1 = TestRepo.update!(changeset)
assert TestRepo.get_by!(CompositePk, %{a: 1, b: 2}) == c1
TestRepo.delete!(c2)
assert TestRepo.all(CompositePk) == [c1]
assert_raise ArgumentError, ~r"to have exactly one primary key", fn ->
TestRepo.get(CompositePk, [])
end
assert_raise ArgumentError, ~r"to have exactly one primary key", fn ->
TestRepo.get!(CompositePk, [1, 2])
end
end
@tag :composite_pk
test "insert, update and delete with associated composite pk" do
user = TestRepo.insert!(%User{})
post = TestRepo.insert!(%Post{title: "post title", text: "post text"})
user_post = TestRepo.insert!(%PostUserCompositePk{user_id: user.id, post_id: post.id})
assert TestRepo.get_by!(PostUserCompositePk, [user_id: user.id, post_id: post.id]) == user_post
TestRepo.delete!(user_post)
assert TestRepo.all(PostUserCompositePk) == []
end
@tag :invalid_prefix
test "insert, update and delete with invalid prefix" do
post = TestRepo.insert!(%Post{})
changeset = Ecto.Changeset.change(post, title: "foo")
assert catch_error(TestRepo.insert(%Post{}, prefix: "oops"))
assert catch_error(TestRepo.update(changeset, prefix: "oops"))
assert catch_error(TestRepo.delete(changeset, prefix: "oops"))
end
test "insert and update with changeset" do
# On insert we merge the fields and changes
changeset = Ecto.Changeset.cast(%Post{text: "x", title: "wrong"},
%{"title" => "hello", "temp" => "unknown"}, ~w(title temp)a)
post = TestRepo.insert!(changeset)
assert %Post{text: "x", title: "hello", temp: "unknown"} = post
assert %Post{text: "x", title: "hello", temp: "temp"} = TestRepo.get!(Post, post.id)
# On update we merge only fields, direct schema changes are discarded
changeset = Ecto.Changeset.cast(%{post | text: "y"},
%{"title" => "world", "temp" => "unknown"}, ~w(title temp)a)
assert %Post{text: "y", title: "world", temp: "unknown"} = TestRepo.update!(changeset)
assert %Post{text: "x", title: "world", temp: "temp"} = TestRepo.get!(Post, post.id)
end
test "insert and update with empty changeset" do
# On insert we merge the fields and changes
changeset = Ecto.Changeset.cast(%Permalink{}, %{}, ~w())
assert %Permalink{} = permalink = TestRepo.insert!(changeset)
# Assert we can update the same value twice,
# without changes, without triggering stale errors.
changeset = Ecto.Changeset.cast(permalink, %{}, ~w())
assert TestRepo.update!(changeset) == permalink
assert TestRepo.update!(changeset) == permalink
end
@tag :no_primary_key
test "insert with no primary key" do
assert %Barebone{num: nil} = TestRepo.insert!(%Barebone{})
assert %Barebone{num: 13} = TestRepo.insert!(%Barebone{num: 13})
end
@tag :read_after_writes
test "insert and update with changeset read after writes" do
defmodule RAW do
use Ecto.Schema
schema "comments" do
field :text, :string
field :lock_version, :integer, read_after_writes: true
end
end
changeset = Ecto.Changeset.cast(struct(RAW, %{}), %{}, ~w())
# If the field is nil, we will not send it
# and read the value back from the database.
assert %{id: cid, lock_version: 1} = raw = TestRepo.insert!(changeset)
# Set the counter to 11, so we can read it soon
TestRepo.update_all from(u in RAW, where: u.id == ^cid), set: [lock_version: 11]
# We will read back on update too
changeset = Ecto.Changeset.cast(raw, %{"text" => "0"}, ~w(text)a)
assert %{id: ^cid, lock_version: 11, text: "0"} = TestRepo.update!(changeset)
end
test "insert autogenerates for custom type" do
post = TestRepo.insert!(%Post{uuid: nil})
assert byte_size(post.uuid) == 36
assert TestRepo.get_by(Post, uuid: post.uuid) == post
end
@tag :id_type
test "insert autogenerates for custom id type" do
defmodule ID do
use Ecto.Schema
@primary_key {:id, CustomPermalink, autogenerate: true}
schema "posts" do
end
end
id = TestRepo.insert!(struct(ID, id: nil))
assert id.id
assert TestRepo.get_by(ID, id: "#{id.id}-hello") == id
end
@tag :id_type
@tag :assigns_id_type
test "insert with user-assigned primary key" do
assert %Post{id: 1} = TestRepo.insert!(%Post{id: 1})
end
@tag :id_type
@tag :assigns_id_type
test "insert and update with user-assigned primary key in changeset" do
changeset = Ecto.Changeset.cast(%Post{id: 11}, %{"id" => "13"}, ~w(id)a)
assert %Post{id: 13} = post = TestRepo.insert!(changeset)
changeset = Ecto.Changeset.cast(post, %{"id" => "15"}, ~w(id)a)
assert %Post{id: 15} = TestRepo.update!(changeset)
end
test "insert and fetch a schema with utc timestamps" do
datetime = DateTime.from_unix!(System.os_time(:second), :second)
TestRepo.insert!(%User{inserted_at: datetime})
assert [%{inserted_at: ^datetime}] = TestRepo.all(User)
end
test "optimistic locking in update/delete operations" do
import Ecto.Changeset, only: [cast: 3, optimistic_lock: 2]
base_post = TestRepo.insert!(%Comment{})
changeset_ok =
base_post
|> cast(%{"text" => "foo.bar"}, ~w(text)a)
|> optimistic_lock(:lock_version)
TestRepo.update!(changeset_ok)
changeset_stale = optimistic_lock(base_post, :lock_version)
assert_raise Ecto.StaleEntryError, fn -> TestRepo.update!(changeset_stale) end
assert_raise Ecto.StaleEntryError, fn -> TestRepo.delete!(changeset_stale) end
end
test "optimistic locking in update operation with nil field" do
import Ecto.Changeset, only: [cast: 3, optimistic_lock: 3]
base_post =
%Comment{}
|> cast(%{lock_version: nil}, [:lock_version])
|> TestRepo.insert!()
incrementer =
fn
nil -> 1
old_value -> old_value + 1
end
changeset_ok =
base_post
|> cast(%{"text" => "foo.bar"}, ~w(text)a)
|> optimistic_lock(:lock_version, incrementer)
updated = TestRepo.update!(changeset_ok)
assert updated.text == "foo.bar"
assert updated.lock_version == 1
end
test "optimistic locking in delete operation with nil field" do
import Ecto.Changeset, only: [cast: 3, optimistic_lock: 3]
base_post =
%Comment{}
|> cast(%{lock_version: nil}, [:lock_version])
|> TestRepo.insert!()
incrementer =
fn
nil -> 1
old_value -> old_value + 1
end
changeset_ok = optimistic_lock(base_post, :lock_version, incrementer)
TestRepo.delete!(changeset_ok)
refute TestRepo.get(Comment, base_post.id)
end
@tag :unique_constraint
test "unique constraint" do
changeset = Ecto.Changeset.change(%Post{}, uuid: Ecto.UUID.generate())
{:ok, _} = TestRepo.insert(changeset)
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "posts_uuid_index (unique_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
assert exception.message =~ "call `unique_constraint/3`"
message = ~r/constraint error when attempting to insert struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.unique_constraint(:uuid, name: :posts_email_changeset)
|> TestRepo.insert()
end
assert exception.message =~ "posts_email_changeset (unique_constraint)"
{:error, changeset} =
changeset
|> Ecto.Changeset.unique_constraint(:uuid)
|> TestRepo.insert()
assert changeset.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "posts_uuid_index"]}]
assert changeset.data.__meta__.state == :built
end
@tag :unique_constraint
test "unique constraint from association" do
uuid = Ecto.UUID.generate()
post = & %Post{} |> Ecto.Changeset.change(uuid: &1) |> Ecto.Changeset.unique_constraint(:uuid)
{:error, changeset} =
TestRepo.insert %User{
comments: [%Comment{}],
permalink: %Permalink{},
posts: [post.(uuid), post.(uuid), post.(Ecto.UUID.generate())]
}
[_, p2, _] = changeset.changes.posts
assert p2.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "posts_uuid_index"]}]
end
@tag :id_type
@tag :unique_constraint
test "unique constraint with binary_id" do
changeset = Ecto.Changeset.change(%Custom{}, uuid: Ecto.UUID.generate())
{:ok, _} = TestRepo.insert(changeset)
{:error, changeset} =
changeset
|> Ecto.Changeset.unique_constraint(:uuid)
|> TestRepo.insert()
assert changeset.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "customs_uuid_index"]}]
assert changeset.data.__meta__.state == :built
end
test "unique pseudo-constraint violation error message with join table at the repository" do
post =
TestRepo.insert!(%Post{title: "some post"})
|> TestRepo.preload(:unique_users)
user =
TestRepo.insert!(%User{name: "some user"})
# Violate the unique composite index
{:error, changeset} =
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:unique_users, [user, user])
|> TestRepo.update
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
assert errors == %{unique_users: [%{}, %{id: ["has already been taken"]}]}
refute changeset.valid?
end
@tag :join
@tag :unique_constraint
test "unique constraint violation error message with join table in single changeset" do
post =
TestRepo.insert!(%Post{title: "some post"})
|> TestRepo.preload(:constraint_users)
user =
TestRepo.insert!(%User{name: "some user"})
# Violate the unique composite index
{:error, changeset} =
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:constraint_users, [user, user])
|> Ecto.Changeset.unique_constraint(:user,
name: :posts_users_composite_pk_post_id_user_id_index,
message: "has already been assigned")
|> TestRepo.update
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
assert errors == %{constraint_users: [%{}, %{user: ["has already been assigned"]}]}
refute changeset.valid?
end
@tag :join
@tag :unique_constraint
test "unique constraint violation error message with join table and separate changesets" do
post =
TestRepo.insert!(%Post{title: "some post"})
|> TestRepo.preload(:constraint_users)
user = TestRepo.insert!(%User{name: "some user"})
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:constraint_users, [user])
|> TestRepo.update
# Violate the unique composite index
{:error, changeset} =
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:constraint_users, [user])
|> Ecto.Changeset.unique_constraint(:user,
name: :posts_users_composite_pk_post_id_user_id_index,
message: "has already been assigned")
|> TestRepo.update
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
assert errors == %{constraint_users: [%{user: ["has already been assigned"]}]}
refute changeset.valid?
end
@tag :foreign_key_constraint
test "foreign key constraint" do
changeset = Ecto.Changeset.change(%Comment{post_id: 0})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_fkey (foreign_key_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
assert exception.message =~ "call `foreign_key_constraint/3`"
message = ~r/constraint error when attempting to insert struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.foreign_key_constraint(:post_id, name: :comments_post_id_other)
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_other (foreign_key_constraint)"
{:error, changeset} =
changeset
|> Ecto.Changeset.foreign_key_constraint(:post_id)
|> TestRepo.insert()
assert changeset.errors == [post_id: {"does not exist", [constraint: :foreign, constraint_name: "comments_post_id_fkey"]}]
end
@tag :foreign_key_constraint
test "assoc constraint" do
changeset = Ecto.Changeset.change(%Comment{post_id: 0})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_fkey (foreign_key_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
message = ~r/constraint error when attempting to insert struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.assoc_constraint(:post, name: :comments_post_id_other)
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_other (foreign_key_constraint)"
{:error, changeset} =
changeset
|> Ecto.Changeset.assoc_constraint(:post)
|> TestRepo.insert()
assert changeset.errors == [post: {"does not exist", [constraint: :assoc, constraint_name: "comments_post_id_fkey"]}]
end
@tag :foreign_key_constraint
test "no assoc constraint error" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Permalink{user_id: user.id})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to delete struct/, fn ->
TestRepo.delete!(user)
end
assert exception.message =~ "permalinks_user_id_fkey (foreign_key_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
end
@tag :foreign_key_constraint
test "no assoc constraint with changeset mismatch" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Permalink{user_id: user.id})
message = ~r/constraint error when attempting to delete struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
user
|> Ecto.Changeset.change
|> Ecto.Changeset.no_assoc_constraint(:permalink, name: :permalinks_user_id_pther)
|> TestRepo.delete()
end
assert exception.message =~ "permalinks_user_id_pther (foreign_key_constraint)"
end
@tag :foreign_key_constraint
test "no assoc constraint with changeset match" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Permalink{user_id: user.id})
{:error, changeset} =
user
|> Ecto.Changeset.change
|> Ecto.Changeset.no_assoc_constraint(:permalink)
|> TestRepo.delete()
assert changeset.errors == [permalink: {"is still associated with this entry", [constraint: :no_assoc, constraint_name: "permalinks_user_id_fkey"]}]
end
@tag :foreign_key_constraint
test "insert and update with embeds during failing child foreign key" do
changeset =
Order
|> struct(%{})
|> order_changeset(%{item: %{price: 10}, permalink: %{post_id: 0}})
{:error, changeset} = TestRepo.insert(changeset)
assert %Ecto.Changeset{} = changeset.changes.item
order =
Order
|> struct(%{})
|> order_changeset(%{})
|> TestRepo.insert!()
|> TestRepo.preload([:permalink])
changeset = order_changeset(order, %{item: %{price: 10}, permalink: %{post_id: 0}})
assert %Ecto.Changeset{} = changeset.changes.item
{:error, changeset} = TestRepo.update(changeset)
assert %Ecto.Changeset{} = changeset.changes.item
end
def order_changeset(order, params) do
order
|> Ecto.Changeset.cast(params, [:permalink_id])
|> Ecto.Changeset.cast_embed(:item, with: &item_changeset/2)
|> Ecto.Changeset.cast_assoc(:permalink, with: &permalink_changeset/2)
end
def item_changeset(item, params) do
item
|> Ecto.Changeset.cast(params, [:price])
end
def permalink_changeset(comment, params) do
comment
|> Ecto.Changeset.cast(params, [:post_id])
|> Ecto.Changeset.assoc_constraint(:post)
end
test "unsafe_validate_unique/3" do
{:ok, inserted_post} = TestRepo.insert(%Post{title: "Greetings", text: "hi"})
new_post_changeset = Post.changeset(%Post{}, %{title: "Greetings", text: "ho"})
changeset = Ecto.Changeset.unsafe_validate_unique(new_post_changeset, [:title], TestRepo)
assert changeset.errors[:title] ==
{"has already been taken", validation: :unsafe_unique, fields: [:title]}
changeset = Ecto.Changeset.unsafe_validate_unique(new_post_changeset, [:title, :text], TestRepo)
assert changeset.errors[:title] == nil
update_changeset = Post.changeset(inserted_post, %{text: "ho"})
changeset = Ecto.Changeset.unsafe_validate_unique(update_changeset, [:title], TestRepo)
assert changeset.errors[:title] == nil # cannot conflict with itself
end
test "unsafe_validate_unique/3 with composite keys" do
{:ok, inserted_post} = TestRepo.insert(%CompositePk{a: 123, b: 456, name: "UniqueName"})
different_pk = CompositePk.changeset(%CompositePk{}, %{name: "UniqueName", a: 789, b: 321})
changeset = Ecto.Changeset.unsafe_validate_unique(different_pk, [:name], TestRepo)
assert changeset.errors[:name] ==
{"has already been taken", validation: :unsafe_unique, fields: [:name]}
partial_pk = CompositePk.changeset(%CompositePk{}, %{name: "UniqueName", a: 789, b: 456})
changeset = Ecto.Changeset.unsafe_validate_unique(partial_pk, [:name], TestRepo)
assert changeset.errors[:name] ==
{"has already been taken", validation: :unsafe_unique, fields: [:name]}
update_changeset = CompositePk.changeset(inserted_post, %{name: "NewName"})
changeset = Ecto.Changeset.unsafe_validate_unique(update_changeset, [:name], TestRepo)
assert changeset.valid?
assert changeset.errors[:name] == nil # cannot conflict with itself
end
test "get(!)" do
post1 = TestRepo.insert!(%Post{title: "1", text: "hai"})
post2 = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert post1 == TestRepo.get(Post, post1.id)
assert post2 == TestRepo.get(Post, to_string post2.id) # With casting
assert post1 == TestRepo.get!(Post, post1.id)
assert post2 == TestRepo.get!(Post, to_string post2.id) # With casting
TestRepo.delete!(post1)
assert nil == TestRepo.get(Post, post1.id)
assert_raise Ecto.NoResultsError, fn ->
TestRepo.get!(Post, post1.id)
end
end
test "get(!) with custom source" do
custom = Ecto.put_meta(%Custom{}, source: "posts")
custom = TestRepo.insert!(custom)
bid = custom.bid
assert %Custom{bid: ^bid, __meta__: %{source: "posts"}} =
TestRepo.get(from(c in {"posts", Custom}), bid)
end
test "get(!) with binary_id" do
custom = TestRepo.insert!(%Custom{})
bid = custom.bid
assert %Custom{bid: ^bid} = TestRepo.get(Custom, bid)
end
test "get_by(!)" do
post1 = TestRepo.insert!(%Post{title: "1", text: "hai"})
post2 = TestRepo.insert!(%Post{title: "2", text: "hello"})
assert post1 == TestRepo.get_by(Post, id: post1.id)
assert post1 == TestRepo.get_by(Post, text: post1.text)
assert post1 == TestRepo.get_by(Post, id: post1.id, text: post1.text)
assert post2 == TestRepo.get_by(Post, id: to_string(post2.id)) # With casting
assert nil == TestRepo.get_by(Post, text: "hey")
assert nil == TestRepo.get_by(Post, id: post2.id, text: "hey")
assert post1 == TestRepo.get_by!(Post, id: post1.id)
assert post1 == TestRepo.get_by!(Post, text: post1.text)
assert post1 == TestRepo.get_by!(Post, id: post1.id, text: post1.text)
assert post2 == TestRepo.get_by!(Post, id: to_string(post2.id)) # With casting
assert post1 == TestRepo.get_by!(Post, %{id: post1.id})
assert_raise Ecto.NoResultsError, fn ->
TestRepo.get_by!(Post, id: post2.id, text: "hey")
end
end
test "first, last and one(!)" do
post1 = TestRepo.insert!(%Post{title: "1", text: "hai"})
post2 = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert post1 == Post |> first |> TestRepo.one
assert post2 == Post |> last |> TestRepo.one
query = from p in Post, order_by: p.title
assert post1 == query |> first |> TestRepo.one
assert post2 == query |> last |> TestRepo.one
query = from p in Post, order_by: [desc: p.title], limit: 10
assert post2 == query |> first |> TestRepo.one
assert post1 == query |> last |> TestRepo.one
query = from p in Post, where: is_nil(p.id)
refute query |> first |> TestRepo.one
refute query |> last |> TestRepo.one
assert_raise Ecto.NoResultsError, fn -> query |> first |> TestRepo.one! end
assert_raise Ecto.NoResultsError, fn -> query |> last |> TestRepo.one! end
end
test "exists?" do
TestRepo.insert!(%Post{title: "1", text: "hai", visits: 2})
TestRepo.insert!(%Post{title: "2", text: "hai", visits: 1})
query = from p in Post, where: not is_nil(p.title), limit: 2
assert query |> TestRepo.exists? == true
query = from p in Post, where: p.title == "1", select: p.title
assert query |> TestRepo.exists? == true
query = from p in Post, where: is_nil(p.id)
assert query |> TestRepo.exists? == false
query = from p in Post, where: is_nil(p.id)
assert query |> TestRepo.exists? == false
query = from(p in Post, select: {p.visits, avg(p.visits)}, group_by: p.visits, having: avg(p.visits) > 1)
assert query |> TestRepo.exists? == true
end
test "aggregate" do
assert TestRepo.aggregate(Post, :max, :visits) == nil
TestRepo.insert!(%Post{visits: 10})
TestRepo.insert!(%Post{visits: 12})
TestRepo.insert!(%Post{visits: 14})
TestRepo.insert!(%Post{visits: 14})
# Barebones
assert TestRepo.aggregate(Post, :max, :visits) == 14
assert TestRepo.aggregate(Post, :min, :visits) == 10
assert TestRepo.aggregate(Post, :count, :visits) == 4
assert "50" = to_string(TestRepo.aggregate(Post, :sum, :visits))
assert "12.5" <> _ = to_string(TestRepo.aggregate(Post, :avg, :visits))
# With order_by
query = from Post, order_by: [asc: :visits]
assert TestRepo.aggregate(query, :max, :visits) == 14
# With order_by and limit
query = from Post, order_by: [asc: :visits], limit: 2
assert TestRepo.aggregate(query, :max, :visits) == 12
# With distinct
query = from Post, order_by: [asc: :visits], distinct: true
assert TestRepo.aggregate(query, :count, :visits) == 3
end
@tag :insert_cell_wise_defaults
test "insert all" do
assert {2, nil} = TestRepo.insert_all("comments", [[text: "1"], %{text: "2", lock_version: 2}])
assert {2, nil} = TestRepo.insert_all({"comments", Comment}, [[text: "3"], %{text: "4", lock_version: 2}])
assert [%Comment{text: "1", lock_version: 1},
%Comment{text: "2", lock_version: 2},
%Comment{text: "3", lock_version: 1},
%Comment{text: "4", lock_version: 2}] = TestRepo.all(Comment)
assert {2, nil} = TestRepo.insert_all(Post, [[], []])
assert [%Post{}, %Post{}] = TestRepo.all(Post)
assert {0, nil} = TestRepo.insert_all("posts", [])
assert {0, nil} = TestRepo.insert_all({"posts", Post}, [])
end
@tag :insert_select
test "insert all with query" do
comment = TestRepo.insert!(%Comment{text: "1", lock_version: 1})
text_query = from(c in Comment, select: c.text, where: [id: ^comment.id, lock_version: 1])
lock_version_query = from(c in Comment, select: c.lock_version, where: [id: ^comment.id])
rows = [
[text: "2", lock_version: lock_version_query],
[lock_version: lock_version_query, text: "3"],
[text: text_query],
[text: text_query, lock_version: lock_version_query],
[lock_version: 6, text: "6"]
]
assert {5, nil} = TestRepo.insert_all(Comment, rows, [])
inserted_rows = Comment
|> where([c], c.id != ^comment.id)
|> TestRepo.all()
assert [%Comment{text: "2", lock_version: 1},
%Comment{text: "3", lock_version: 1},
%Comment{text: "1"},
%Comment{text: "1", lock_version: 1},
%Comment{text: "6", lock_version: 6}] = inserted_rows
end
@tag :invalid_prefix
@tag :insert_cell_wise_defaults
test "insert all with invalid prefix" do
assert catch_error(TestRepo.insert_all(Post, [[], []], prefix: "oops"))
end
@tag :returning
@tag :insert_cell_wise_defaults
test "insert all with returning with schema" do
assert {0, []} = TestRepo.insert_all(Comment, [], returning: true)
assert {0, nil} = TestRepo.insert_all(Comment, [], returning: false)
{2, [c1, c2]} = TestRepo.insert_all(Comment, [[text: "1"], [text: "2"]], returning: [:id, :text])
assert %Comment{text: "1", __meta__: %{state: :loaded}} = c1
assert %Comment{text: "2", __meta__: %{state: :loaded}} = c2
{2, [c1, c2]} = TestRepo.insert_all(Comment, [[text: "3"], [text: "4"]], returning: true)
assert %Comment{text: "3", __meta__: %{state: :loaded}} = c1
assert %Comment{text: "4", __meta__: %{state: :loaded}} = c2
end
@tag :returning
@tag :insert_cell_wise_defaults
test "insert all with returning with schema with field source" do
assert {0, []} = TestRepo.insert_all(Permalink, [], returning: true)
assert {0, nil} = TestRepo.insert_all(Permalink, [], returning: false)
{2, [c1, c2]} = TestRepo.insert_all(Permalink, [[url: "1"], [url: "2"]], returning: [:id, :url])
assert %Permalink{url: "1", __meta__: %{state: :loaded}} = c1
assert %Permalink{url: "2", __meta__: %{state: :loaded}} = c2
{2, [c1, c2]} = TestRepo.insert_all(Permalink, [[url: "3"], [url: "4"]], returning: true)
assert %Permalink{url: "3", __meta__: %{state: :loaded}} = c1
assert %Permalink{url: "4", __meta__: %{state: :loaded}} = c2
end
@tag :returning
@tag :insert_cell_wise_defaults
test "insert all with returning without schema" do
{2, [c1, c2]} = TestRepo.insert_all("comments", [[text: "1"], [text: "2"]], returning: [:id, :text])
assert %{id: _, text: "1"} = c1
assert %{id: _, text: "2"} = c2
assert_raise ArgumentError, fn ->
TestRepo.insert_all("comments", [[text: "1"], [text: "2"]], returning: true)
end
end
@tag :insert_cell_wise_defaults
test "insert all with dumping" do
uuid = Ecto.UUID.generate()
assert {1, nil} = TestRepo.insert_all(Post, [%{uuid: uuid}])
assert [%Post{uuid: ^uuid, title: nil}] = TestRepo.all(Post)
end
@tag :insert_cell_wise_defaults
test "insert all autogenerates for binary_id type" do
custom = TestRepo.insert!(%Custom{bid: nil})
assert custom.bid
assert TestRepo.get(Custom, custom.bid)
assert TestRepo.delete!(custom)
refute TestRepo.get(Custom, custom.bid)
uuid = Ecto.UUID.generate()
assert {2, nil} = TestRepo.insert_all(Custom, [%{uuid: uuid}, %{bid: custom.bid}])
assert [%Custom{bid: bid2, uuid: nil},
%Custom{bid: bid1, uuid: ^uuid}] = Enum.sort_by(TestRepo.all(Custom), & &1.uuid)
assert bid1 && bid2
assert custom.bid != bid1
assert custom.bid == bid2
end
test "update all" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
assert {3, nil} = TestRepo.update_all(Post, set: [title: "x"])
assert %Post{title: "x"} = TestRepo.get(Post, id1)
assert %Post{title: "x"} = TestRepo.get(Post, id2)
assert %Post{title: "x"} = TestRepo.get(Post, id3)
assert {3, nil} = TestRepo.update_all("posts", [set: [title: nil]])
assert %Post{title: nil} = TestRepo.get(Post, id1)
assert %Post{title: nil} = TestRepo.get(Post, id2)
assert %Post{title: nil} = TestRepo.get(Post, id3)
end
@tag :invalid_prefix
test "update all with invalid prefix" do
assert catch_error(TestRepo.update_all(Post, [set: [title: "x"]], prefix: "oops"))
end
@tag :returning
test "update all with returning with schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
assert {3, posts} = TestRepo.update_all(select(Post, [p], p), [set: [title: "x"]])
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert %Post{id: ^id1, title: "x"} = p1
assert %Post{id: ^id2, title: "x"} = p2
assert %Post{id: ^id3, title: "x"} = p3
assert {3, posts} = TestRepo.update_all(select(Post, [:id, :visits]), [set: [visits: 11]])
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert %Post{id: ^id1, title: nil, visits: 11} = p1
assert %Post{id: ^id2, title: nil, visits: 11} = p2
assert %Post{id: ^id3, title: nil, visits: 11} = p3
end
@tag :returning
test "update all with returning without schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
assert {3, posts} = TestRepo.update_all(select("posts", [:id, :title]), [set: [title: "x"]])
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert p1 == %{id: id1, title: "x"}
assert p2 == %{id: id2, title: "x"}
assert p3 == %{id: id3, title: "x"}
end
test "update all with filter" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
query = from(p in Post, where: p.title == "1" or p.title == "2",
update: [set: [text: ^"y"]])
assert {2, nil} = TestRepo.update_all(query, set: [title: "x"])
assert %Post{title: "x", text: "y"} = TestRepo.get(Post, id1)
assert %Post{title: "x", text: "y"} = TestRepo.get(Post, id2)
assert %Post{title: "3", text: nil} = TestRepo.get(Post, id3)
end
test "update all no entries" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
query = from(p in Post, where: p.title == "4")
assert {0, nil} = TestRepo.update_all(query, set: [title: "x"])
assert %Post{title: "1"} = TestRepo.get(Post, id1)
assert %Post{title: "2"} = TestRepo.get(Post, id2)
assert %Post{title: "3"} = TestRepo.get(Post, id3)
end
test "update all increment syntax" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", visits: 0})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", visits: 1})
# Positive
query = from p in Post, where: not is_nil(p.id), update: [inc: [visits: 2]]
assert {2, nil} = TestRepo.update_all(query, [])
assert %Post{visits: 2} = TestRepo.get(Post, id1)
assert %Post{visits: 3} = TestRepo.get(Post, id2)
# Negative
query = from p in Post, where: not is_nil(p.id), update: [inc: [visits: -1]]
assert {2, nil} = TestRepo.update_all(query, [])
assert %Post{visits: 1} = TestRepo.get(Post, id1)
assert %Post{visits: 2} = TestRepo.get(Post, id2)
end
@tag :id_type
test "update all with casting and dumping on id type field" do
assert %Post{id: id1} = TestRepo.insert!(%Post{})
assert {1, nil} = TestRepo.update_all(Post, set: [counter: to_string(id1)])
assert %Post{counter: ^id1} = TestRepo.get(Post, id1)
end
test "update all with casting and dumping" do
text = "hai"
datetime = ~N[2014-01-16 20:26:51]
assert %Post{id: id} = TestRepo.insert!(%Post{})
assert {1, nil} = TestRepo.update_all(Post, set: [text: text, inserted_at: datetime])
assert %Post{text: "hai", inserted_at: ^datetime} = TestRepo.get(Post, id)
end
test "delete all" do
assert %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "3", text: "hai"})
assert {3, nil} = TestRepo.delete_all(Post)
assert [] = TestRepo.all(Post)
end
@tag :invalid_prefix
test "delete all with invalid prefix" do
assert catch_error(TestRepo.delete_all(Post, prefix: "oops"))
end
@tag :returning
test "delete all with returning with schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"})
assert {3, posts} = TestRepo.delete_all(select(Post, [p], p))
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert %Post{id: ^id1, title: "1"} = p1
assert %Post{id: ^id2, title: "2"} = p2
assert %Post{id: ^id3, title: "3"} = p3
end
@tag :returning
test "delete all with returning without schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"})
assert {3, posts} = TestRepo.delete_all(select("posts", [:id, :title]))
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert p1 == %{id: id1, title: "1"}
assert p2 == %{id: id2, title: "2"}
assert p3 == %{id: id3, title: "3"}
end
test "delete all with filter" do
assert %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "3", text: "hai"})
query = from(p in Post, where: p.title == "1" or p.title == "2")
assert {2, nil} = TestRepo.delete_all(query)
assert [%Post{}] = TestRepo.all(Post)
end
test "delete all no entries" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"})
query = from(p in Post, where: p.title == "4")
assert {0, nil} = TestRepo.delete_all(query)
assert %Post{title: "1"} = TestRepo.get(Post, id1)
assert %Post{title: "2"} = TestRepo.get(Post, id2)
assert %Post{title: "3"} = TestRepo.get(Post, id3)
end
test "virtual field" do
assert %Post{id: id} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert TestRepo.get(Post, id).temp == "temp"
end
## Query syntax
defmodule Foo do
defstruct [:title]
end
describe "query select" do
test "expressions" do
%Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert [{"1", "hai"}] ==
TestRepo.all(from p in Post, select: {p.title, p.text})
assert [["1", "hai"]] ==
TestRepo.all(from p in Post, select: [p.title, p.text])
assert [%{:title => "1", 3 => "hai", "text" => "hai"}] ==
TestRepo.all(from p in Post, select: %{
:title => p.title,
"text" => p.text,
3 => p.text
})
assert [%{:title => "1", "1" => "hai", "text" => "hai"}] ==
TestRepo.all(from p in Post, select: %{
:title => p.title,
p.title => p.text,
"text" => p.text
})
assert [%Foo{title: "1"}] ==
TestRepo.all(from p in Post, select: %Foo{title: p.title})
end
test "map update" do
%Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert [%Post{:title => "new title", text: "hai"}] =
TestRepo.all(from p in Post, select: %{p | title: "new title"})
assert [%Post{title: "new title", text: "hai"}] =
TestRepo.all(from p in Post, select: %Post{p | title: "new title"})
assert_raise KeyError, fn ->
TestRepo.all(from p in Post, select: %{p | unknown: "new title"})
end
assert_raise BadMapError, fn ->
TestRepo.all(from p in Post, select: %{p.title | title: "new title"})
end
assert_raise BadStructError, fn ->
TestRepo.all(from p in Post, select: %Foo{p | title: p.title})
end
end
test "take with structs" do
%{id: pid1} = TestRepo.insert!(%Post{title: "1"})
%{id: pid2} = TestRepo.insert!(%Post{title: "2"})
%{id: pid3} = TestRepo.insert!(%Post{title: "3"})
[p1, p2, p3] = Post |> select([p], struct(p, [:title])) |> order_by([:title]) |> TestRepo.all
refute p1.id
assert p1.title == "1"
assert match?(%Post{}, p1)
refute p2.id
assert p2.title == "2"
assert match?(%Post{}, p2)
refute p3.id
assert p3.title == "3"
assert match?(%Post{}, p3)
[p1, p2, p3] = Post |> select([:id]) |> order_by([:id]) |> TestRepo.all
assert %Post{id: ^pid1} = p1
assert %Post{id: ^pid2} = p2
assert %Post{id: ^pid3} = p3
end
test "take with maps" do
%{id: pid1} = TestRepo.insert!(%Post{title: "1"})
%{id: pid2} = TestRepo.insert!(%Post{title: "2"})
%{id: pid3} = TestRepo.insert!(%Post{title: "3"})
[p1, p2, p3] = "posts" |> select([p], map(p, [:title])) |> order_by([:title]) |> TestRepo.all
assert p1 == %{title: "1"}
assert p2 == %{title: "2"}
assert p3 == %{title: "3"}
[p1, p2, p3] = "posts" |> select([:id]) |> order_by([:id]) |> TestRepo.all
assert p1 == %{id: pid1}
assert p2 == %{id: pid2}
assert p3 == %{id: pid3}
end
test "take with preload assocs" do
%{id: pid} = TestRepo.insert!(%Post{title: "post"})
TestRepo.insert!(%Comment{post_id: pid, text: "comment"})
fields = [:id, :title, comments: [:text, :post_id]]
[p] = Post |> preload(:comments) |> select([p], ^fields) |> TestRepo.all
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
[p] = Post |> preload(:comments) |> select([p], struct(p, ^fields)) |> TestRepo.all
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
[p] = Post |> preload(:comments) |> select([p], map(p, ^fields)) |> TestRepo.all
assert p == %{id: pid, title: "post", comments: [%{text: "comment", post_id: pid}]}
end
test "take with nil preload assoc" do
%{id: cid} = TestRepo.insert!(%Comment{text: "comment"})
fields = [:id, :text, post: [:title]]
[c] = Comment |> preload(:post) |> select([c], ^fields) |> TestRepo.all
assert %Comment{id: ^cid, text: "comment", post: nil} = c
[c] = Comment |> preload(:post) |> select([c], struct(c, ^fields)) |> TestRepo.all
assert %Comment{id: ^cid, text: "comment", post: nil} = c
[c] = Comment |> preload(:post) |> select([c], map(c, ^fields)) |> TestRepo.all
assert c == %{id: cid, text: "comment", post: nil}
end
test "take with join assocs" do
%{id: pid} = TestRepo.insert!(%Post{title: "post"})
%{id: cid} = TestRepo.insert!(%Comment{post_id: pid, text: "comment"})
fields = [:id, :title, comments: [:text, :post_id, :id]]
query = from p in Post, where: p.id == ^pid, join: c in assoc(p, :comments), preload: [comments: c]
p = TestRepo.one(from q in query, select: ^fields)
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
p = TestRepo.one(from q in query, select: struct(q, ^fields))
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
p = TestRepo.one(from q in query, select: map(q, ^fields))
assert p == %{id: pid, title: "post", comments: [%{text: "comment", post_id: pid, id: cid}]}
end
test "take with single nil column" do
%Post{} = TestRepo.insert!(%Post{title: "1", counter: nil})
assert %{counter: nil} =
TestRepo.one(from p in Post, where: p.title == "1", select: [:counter])
end
test "take with join assocs and single nil column" do
%{id: post_id} = TestRepo.insert!(%Post{title: "1"}, counter: nil)
TestRepo.insert!(%Comment{post_id: post_id, text: "comment"})
assert %{counter: nil} ==
TestRepo.one(from p in Post, join: c in assoc(p, :comments), where: p.title == "1", select: map(p, [:counter]))
end
test "field source" do
TestRepo.insert!(%Permalink{url: "url"})
assert ["url"] = Permalink |> select([p], p.url) |> TestRepo.all()
assert [1] = Permalink |> select([p], count(p.url)) |> TestRepo.all()
end
test "merge" do
%Post{} = TestRepo.insert!(%Post{title: "1", counter: nil})
# Merge on source
assert [%Post{title: "2"}] =
Post |> select([p], merge(p, %{title: "2"})) |> TestRepo.all()
assert [%Post{title: "2"}] =
Post |> select([p], p) |> select_merge([p], %{title: "2"}) |> TestRepo.all()
# Merge on struct
assert [%Post{title: "2"}] =
Post |> select([p], merge(%Post{title: p.title}, %{title: "2"})) |> TestRepo.all()
assert [%Post{title: "2"}] =
Post |> select([p], %Post{title: p.title}) |> select_merge([p], %{title: "2"}) |> TestRepo.all()
# Merge on map
assert [%{title: "2"}] =
Post |> select([p], merge(%{title: p.title}, %{title: "2"})) |> TestRepo.all()
assert [%{title: "2"}] =
Post |> select([p], %{title: p.title}) |> select_merge([p], %{title: "2"}) |> TestRepo.all()
end
test "merge with update on self" do
%Post{} = TestRepo.insert!(%Post{title: "1", counter: 1})
assert [%Post{title: "1", counter: 2}] =
Post |> select([p], merge(p, %{p | counter: 2})) |> TestRepo.all()
assert [%Post{title: "1", counter: 2}] =
Post |> select([p], p) |> select_merge([p], %{p | counter: 2}) |> TestRepo.all()
end
test "merge within subquery" do
%Post{} = TestRepo.insert!(%Post{title: "1", counter: 1})
subquery =
Post
|> select_merge([p], %{p | counter: 2})
|> subquery()
assert [%Post{title: "1", counter: 2}] = TestRepo.all(subquery)
end
end
test "query count distinct" do
TestRepo.insert!(%Post{title: "1"})
TestRepo.insert!(%Post{title: "1"})
TestRepo.insert!(%Post{title: "2"})
assert [3] == Post |> select([p], count(p.title)) |> TestRepo.all
assert [2] == Post |> select([p], count(p.title, :distinct)) |> TestRepo.all
end
test "query where interpolation" do
post1 = TestRepo.insert!(%Post{text: "x", title: "hello"})
post2 = TestRepo.insert!(%Post{text: "y", title: "goodbye"})
assert [post1, post2] == Post |> where([], []) |> TestRepo.all |> Enum.sort_by(& &1.id)
assert [post1] == Post |> where([], [title: "hello"]) |> TestRepo.all
assert [post1] == Post |> where([], [title: "hello", id: ^post1.id]) |> TestRepo.all
params0 = []
params1 = [title: "hello"]
params2 = [title: "hello", id: post1.id]
assert [post1, post2] == (from Post, where: ^params0) |> TestRepo.all |> Enum.sort_by(& &1.id)
assert [post1] == (from Post, where: ^params1) |> TestRepo.all
assert [post1] == (from Post, where: ^params2) |> TestRepo.all
post3 = TestRepo.insert!(%Post{text: "y", title: "goodbye", uuid: nil})
params3 = [title: "goodbye", uuid: post3.uuid]
assert [post3] == (from Post, where: ^params3) |> TestRepo.all
end
describe "upsert via insert" do
@describetag :upsert
test "on conflict raise" do
{:ok, inserted} = TestRepo.insert(%Post{title: "first"}, on_conflict: :raise)
assert catch_error(TestRepo.insert(%Post{id: inserted.id, title: "second"}, on_conflict: :raise))
end
test "on conflict ignore" do
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing)
assert inserted.id
assert inserted.__meta__.state == :loaded
{:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing)
assert not_inserted.id == nil
assert not_inserted.__meta__.state == :loaded
end
@tag :with_conflict_target
test "on conflict and associations" do
on_conflict = [set: [title: "second"]]
post = %Post{uuid: Ecto.UUID.generate(),
title: "first", comments: [%Comment{}]}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert inserted.id
end
@tag :with_conflict_target
test "on conflict with inc" do
uuid = "6fa459ea-ee8a-3ca4-894e-db77e160355e"
post = %Post{title: "first", uuid: uuid}
{:ok, _} = TestRepo.insert(post)
post = %{title: "upsert", uuid: uuid}
TestRepo.insert_all(Post, [post], on_conflict: [inc: [visits: 1]], conflict_target: :uuid)
end
@tag :with_conflict_target
test "on conflict ignore and conflict target" do
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid])
assert inserted.id
# Error on non-conflict target
assert catch_error(TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:id]))
# Error on conflict target
{:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid])
assert not_inserted.id == nil
end
@tag :without_conflict_target
test "on conflict keyword list" do
on_conflict = [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict)
assert inserted.id
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict)
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :with_conflict_target
test "on conflict keyword list and conflict target" do
on_conflict = [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert inserted.id
# Error on non-conflict target
assert catch_error(TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:id]))
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :returning
@tag :with_conflict_target
test "on conflict keyword list and conflict target and returning" do
{:ok, c1} = TestRepo.insert(%Post{})
{:ok, c2} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: [:id, :uuid])
{:ok, c3} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: true)
{:ok, c4} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: false)
assert c2.uuid == c1.uuid
assert c3.uuid == c1.uuid
assert c4.uuid != c1.uuid
end
@tag :with_conflict_target
@tag :with_conflict_target_on_constraint
test "on conflict keyword list and conflict target on constraint" do
on_conflict = [set: [title: "new"]]
post = %Post{title: "old"}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: {:constraint, :posts_pkey})
assert inserted.id
{:ok, updated} = TestRepo.insert(%{post | id: inserted.id}, on_conflict: on_conflict, conflict_target: {:constraint, :posts_pkey})
assert updated.id == inserted.id
assert updated.title != "new"
assert TestRepo.get!(Post, inserted.id).title == "new"
end
@tag :returning
@tag :with_conflict_target
test "on conflict keyword list and conflict target and returning and field source" do
TestRepo.insert!(%Permalink{url: "old"})
{:ok, c1} = TestRepo.insert(%Permalink{url: "old"},
on_conflict: [set: [url: "new1"]],
conflict_target: [:url],
returning: [:url])
TestRepo.insert!(%Permalink{url: "old"})
{:ok, c2} = TestRepo.insert(%Permalink{url: "old"},
on_conflict: [set: [url: "new2"]],
conflict_target: [:url],
returning: true)
assert c1.url == "new1"
assert c2.url == "new2"
end
@tag :returning
@tag :with_conflict_target
test "on conflict ignore and returning" do
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid])
assert inserted.id
{:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid], returning: true)
assert not_inserted.id == nil
end
@tag :without_conflict_target
test "on conflict query" do
on_conflict = from Post, update: [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict)
assert inserted.id
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict)
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :with_conflict_target
test "on conflict query and conflict target" do
on_conflict = from Post, update: [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert inserted.id
# Error on non-conflict target
assert catch_error(TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:id]))
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :with_conflict_target
test "on conflict query having condition" do
post = %Post{title: "first", counter: 1, uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post)
on_conflict = from Post, where: [counter: 2], update: [set: [title: "second"]]
insert_options = [
on_conflict: on_conflict,
conflict_target: [:uuid],
stale_error_field: :counter
]
assert {:error, changeset} = TestRepo.insert(post, insert_options)
assert changeset.errors == [counter: {"is stale", [stale: true]}]
assert TestRepo.get!(Post, inserted.id).title == "first"
end
@tag :without_conflict_target
test "on conflict replace_all" do
post = %Post{title: "first", text: "text", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :replace_all)
assert inserted.id
post = %Post{title: "updated", text: "updated", uuid: post.uuid}
post = TestRepo.insert!(post, on_conflict: :replace_all)
assert post.id != inserted.id
assert post.title == "updated"
assert post.text == "updated"
assert TestRepo.all(from p in Post, select: {p.id, p.title, p.text}) ==
[{post.id, "updated", "updated"}]
assert TestRepo.all(from p in Post, select: count(p.id)) == [1]
end
@tag :with_conflict_target
test "on conflict replace_all and conflict target" do
post = %Post{title: "first", text: "text", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :replace_all, conflict_target: :uuid)
assert inserted.id
post = %Post{title: "updated", text: "updated", uuid: post.uuid}
post = TestRepo.insert!(post, on_conflict: :replace_all, conflict_target: :uuid)
assert post.id != inserted.id
assert post.title == "updated"
assert post.text == "updated"
assert TestRepo.all(from p in Post, select: {p.id, p.title, p.text}) ==
[{post.id, "updated", "updated"}]
assert TestRepo.all(from p in Post, select: count(p.id)) == [1]
end
end
describe "upsert via insert_all" do
@describetag :upsert_all
test "on conflict raise" do
post = [title: "first", uuid: Ecto.UUID.generate()]
{1, nil} = TestRepo.insert_all(Post, [post], on_conflict: :raise)
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: :raise))
end
test "on conflict ignore" do
post = [title: "first", uuid: Ecto.UUID.generate()]
assert TestRepo.insert_all(Post, [post], on_conflict: :nothing) == {1, nil}
# PG returns 0, MySQL returns 1
{entries, nil} = TestRepo.insert_all(Post, [post], on_conflict: :nothing)
assert entries == 0 or entries == 1
assert length(TestRepo.all(Post)) == 1
end
@tag :with_conflict_target
test "on conflict ignore and conflict target" do
post = [title: "first", uuid: Ecto.UUID.generate()]
assert TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:uuid]) ==
{1, nil}
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:uuid]) ==
{0, nil}
end
@tag :with_conflict_target
test "on conflict keyword list and conflict target" do
on_conflict = [set: [title: "second"]]
post = [title: "first", uuid: Ecto.UUID.generate()]
{1, nil} = TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid])
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
assert TestRepo.all(from p in Post, select: p.title) == ["second"]
end
@tag :with_conflict_target
@tag :returning
test "on conflict keyword list and conflict target and returning and source field" do
on_conflict = [set: [url: "new"]]
permalink = [url: "old"]
assert {1, [%Permalink{url: "old"}]} =
TestRepo.insert_all(Permalink, [permalink],
on_conflict: on_conflict, conflict_target: [:url], returning: [:url])
assert {1, [%Permalink{url: "new"}]} =
TestRepo.insert_all(Permalink, [permalink],
on_conflict: on_conflict, conflict_target: [:url], returning: [:url])
end
@tag :with_conflict_target
test "on conflict query and conflict target" do
on_conflict = from Post, update: [set: [title: "second"]]
post = [title: "first", uuid: Ecto.UUID.generate()]
assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
assert TestRepo.all(from p in Post, select: p.title) == ["second"]
end
@tag :returning
@tag :with_conflict_target
test "on conflict query and conflict target and returning" do
on_conflict = from Post, update: [set: [title: "second"]]
post = [title: "first", uuid: Ecto.UUID.generate()]
{1, [%{id: id}]} = TestRepo.insert_all(Post, [post], on_conflict: on_conflict,
conflict_target: [:uuid], returning: [:id])
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict,
conflict_target: [:id], returning: [:id]))
# Error on conflict target
{1, [%Post{id: ^id, title: "second"}]} =
TestRepo.insert_all(Post, [post], on_conflict: on_conflict,
conflict_target: [:uuid], returning: [:id, :title])
end
@tag :with_conflict_target
test "source (without an ecto schema) on conflict query and conflict target" do
on_conflict = [set: [title: "second"]]
{:ok, uuid} = Ecto.UUID.dump(Ecto.UUID.generate())
post = [title: "first", uuid: uuid]
assert TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
# Error on non-conflict target
assert catch_error(TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
assert TestRepo.all(from p in Post, select: p.title) == ["second"]
end
@tag :without_conflict_target
test "on conflict replace_all" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate()}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate()}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are also replaced
changes = [%{id: post_first.id + 2, title: "first_updated",
text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: :replace_all)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id + 2)
assert updated_first.title == "first_updated"
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id + 2)
assert updated_second.title == "second_updated"
assert updated_second.text == "second_updated"
end
@tag :with_conflict_target
test "on conflict replace_all and conflict_target" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate()}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate()}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all, conflict_target: :uuid)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all, conflict_target: :uuid)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are also replaced
changes = [%{id: post_second.id + 1, title: "first_updated",
text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: :replace_all, conflict_target: :uuid)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_second.id + 1)
assert updated_first.title == "first_updated"
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id + 2)
assert updated_second.title == "second_updated"
assert updated_second.text == "second_updated"
end
@tag :without_conflict_target
test "on conflict replace_all_except_primary_key" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate()}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate()}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all_except_primary_key)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all_except_primary_key)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are not replaced
changes = [%{id: post_first.id + 2, title: "first_updated",
text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: :replace_all_except_primary_key)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id)
assert updated_first.title == "first_updated"
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id)
assert updated_second.title == "second_updated"
assert updated_second.text == "second_updated"
end
@tag :with_conflict_target
test "on conflict replace_all_except_primary_key and conflict_target" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate()}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate()}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all_except_primary_key, conflict_target: :uuid)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all_except_primary_key, conflict_target: :uuid)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are not replaced
changes = [%{id: post_first.id + 2, title: "first_updated",
text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: :replace_all_except_primary_key, conflict_target: :uuid)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id)
assert updated_first.title == "first_updated"
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id)
assert updated_second.title == "second_updated"
assert updated_second.text == "second_updated"
end
@tag :with_conflict_target
test "on conflict replace and conflict_target" do
post_first = %Post{title: "first", visits: 10, public: true, uuid: Ecto.UUID.generate()}
post_second = %Post{title: "second", visits: 20, public: false, uuid: Ecto.UUID.generate()}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: {:replace, [:title, :visits]}, conflict_target: :uuid)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: {:replace, [:title, :visits]}, conflict_target: :uuid)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note `public` field is not changed
changes = [%{id: post_first.id, title: "first_updated", visits: 11, public: false, text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id, title: "second_updated", visits: 21, public: true, text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: {:replace, [:title, :visits, :text]}, conflict_target: :uuid)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id)
assert updated_first.title == "first_updated"
assert updated_first.visits == 11
assert updated_first.public == true
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id)
assert updated_second.title == "second_updated"
assert updated_second.visits == 21
assert updated_second.public == false
assert updated_second.text == "second_updated"
end
end
end
| 38.742635 | 152 | 0.632959 |
08a160c7581d20f1321f8eded1af3f173cf2ac0c | 2,808 | ex | Elixir | lib/talib/sma.ex | buurzx/talib | 9341dafd2dc0aa14c34b659b82375723ff5a37fc | [
"MIT"
] | null | null | null | lib/talib/sma.ex | buurzx/talib | 9341dafd2dc0aa14c34b659b82375723ff5a37fc | [
"MIT"
] | null | null | null | lib/talib/sma.ex | buurzx/talib | 9341dafd2dc0aa14c34b659b82375723ff5a37fc | [
"MIT"
] | 1 | 2021-04-29T22:14:28.000Z | 2021-04-29T22:14:28.000Z | defmodule Talib.SMA do
@moduledoc ~S"""
Defines a Simple Moving Average.
## History
Version: 1.0
Source: https://qkdb.wordpress.com/2013/04/22/simple-moving-average/
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@typedoc """
Defines a Simple Moving Average.
* :period - The period of the SMA
* :values - List of values resulting from the calculation
"""
@type t :: %Talib.SMA{period: integer, values: [number]}
defstruct period: 0,
values: []
@doc """
Gets the SMA of a list.
Returns `{:ok, sma}`, otherwise `{:error, reason}`.
## Examples
iex> Talib.SMA.from_list([17, 23, 44], 2)
{:ok, %Talib.SMA{
period: 2,
values: [0, 20.0, 33.5]
}}
iex> Talib.SMA.from_list([], 1)
{:error, :no_data}
iex> Talib.SMA.from_list([17], 0)
{:error, :bad_period}
"""
@spec from_list([number], integer) :: {:ok, Talib.SMA.t()} | {:error, atom}
def from_list(data, period), do: calculate(data, period)
@doc """
Gets the SMA of a list.
Raises `NoDataError` if the given list is an empty list.
Raises `BadPeriodError` if the given period is 0.
## Examples
iex> Talib.SMA.from_list!([9, 10, 11, 12, 13, 14], 5)
%Talib.SMA{
period: 5,
values: [0, 0, 0, 0, 11.0, 12.0]
}
iex> Talib.SMA.from_list!([], 1)
** (NoDataError) no data error
iex> Talib.SMA.from_list!([17], 0)
** (BadPeriodError) bad period error
"""
@spec from_list!([number], integer) :: Talib.SMA.t() | no_return
def from_list!(data, period) do
case calculate(data, period) do
{:ok, result} -> result
{:error, :no_data} -> raise NoDataError
{:error, :bad_period} -> raise BadPeriodError
end
end
@doc false
@spec calculate([number], integer, [float]) ::
{:ok, Talib.SMA.t()}
| {:error, atom}
defp calculate(data, period, results \\ [])
defp calculate([], _period, []),
do: {:error, :no_data}
defp calculate(_data, 0, _results),
do: {:error, :bad_period}
defp calculate([], period, results),
do: {:ok, %Talib.SMA{period: period, values: results}}
defp calculate([hd | tl] = data, period, results) do
cond do
length(results) < period && length(data) > length(results) ->
calculate(data, period, results ++ [0])
length(data) < period ->
calculate(tl, period, results)
hd === nil ->
calculate(tl, period, results ++ [0])
length(data) >= period ->
result =
data
|> Enum.take(period)
|> Enum.sum()
|> Kernel./(period)
calculate(tl, period, results ++ [Float.round(result, 6)])
end
end
end
| 24.417391 | 77 | 0.55057 |
08a1841be4018308556af6c8b32e03a3b6bf04a1 | 4,477 | ex | Elixir | lib/trans/gen_function_migration.ex | kipcole9/trans | 5dbca7bfcff5f0153b0d5f6f6acf0420e86b2292 | [
"Apache-2.0"
] | null | null | null | lib/trans/gen_function_migration.ex | kipcole9/trans | 5dbca7bfcff5f0153b0d5f6f6acf0420e86b2292 | [
"Apache-2.0"
] | null | null | null | lib/trans/gen_function_migration.ex | kipcole9/trans | 5dbca7bfcff5f0153b0d5f6f6acf0420e86b2292 | [
"Apache-2.0"
] | null | null | null | if Code.ensure_loaded?(Ecto.Adapters.SQL) do
defmodule Mix.Tasks.Trans.Gen.TranslateFunction do
use Mix.Task
import Mix.Generator
import Mix.Ecto, except: [migrations_path: 1]
import Macro, only: [camelize: 1, underscore: 1]
@shortdoc "Generates an Ecto migration to create the translate_field database function"
@moduledoc """
Generates a migration to add a database function
`translate_field` that uses the `Trans` structured
transaltion schema to resolve a translation for a field.
"""
@doc false
@dialyzer {:no_return, run: 1}
def run(args) do
no_umbrella!("trans_gen_translate_function")
repos = parse_repo(args)
name = "trans_gen_translate_function"
Enum.each(repos, fn repo ->
ensure_repo(repo, args)
path = Path.relative_to(migrations_path(repo), Mix.Project.app_path())
file = Path.join(path, "#{timestamp()}_#{underscore(name)}.exs")
create_directory(path)
assigns = [mod: Module.concat([repo, Migrations, camelize(name)])]
content =
assigns
|> migration_template
|> format_string!
create_file(file, content)
if open?(file) and Mix.shell().yes?("Do you want to run this migration?") do
Mix.Task.run("ecto.migrate", [repo])
end
end)
end
defp timestamp do
{{y, m, d}, {hh, mm, ss}} = :calendar.universal_time()
"#{y}#{pad(m)}#{pad(d)}#{pad(hh)}#{pad(mm)}#{pad(ss)}"
end
defp pad(i) when i < 10, do: <<?0, ?0 + i>>
defp pad(i), do: to_string(i)
if Code.ensure_loaded?(Code) && function_exported?(Code, :format_string!, 1) do
@spec format_string!(String.t()) :: iodata()
@dialyzer {:no_return, format_string!: 1}
def format_string!(string) do
Code.format_string!(string)
end
else
@spec format_string!(String.t()) :: iodata()
def format_string!(string) do
string
end
end
if Code.ensure_loaded?(Ecto.Migrator) &&
function_exported?(Ecto.Migrator, :migrations_path, 1) do
def migrations_path(repo) do
Ecto.Migrator.migrations_path(repo)
end
end
if Code.ensure_loaded?(Mix.Ecto) && function_exported?(Mix.Ecto, :migrations_path, 1) do
def migrations_path(repo) do
Mix.Ecto.migrations_path(repo)
end
end
embed_template(:migration, ~S|
defmodule <%= inspect @mod %> do
use Ecto.Migration
def up do
execute """
CREATE OR REPLACE FUNCTION public.translate_field(record record, container varchar, field varchar, default_locale varchar, locales varchar[])
RETURNS varchar
STRICT
STABLE
LANGUAGE plpgsql
AS $$
DECLARE
locale varchar;
j json;
c json;
l varchar;
BEGIN
j := row_to_json(record);
c := j->container;
FOREACH locale IN ARRAY locales LOOP
IF locale = default_locale THEN
RETURN j->>field;
ELSEIF c->locale IS NOT NULL THEN
IF c->locale->>field IS NOT NULL THEN
RETURN c->locale->>field;
END IF;
END IF;
END LOOP;
RETURN j->>field;
END;
$$;
"""
execute("""
CREATE OR REPLACE FUNCTION public.translate_field(record record, container varchar, default_locale varchar, locales varchar[])
RETURNS jsonb
STRICT
STABLE
LANGUAGE plpgsql
AS $$
DECLARE
locale varchar;
j json;
c json;
BEGIN
j := row_to_json(record);
c := j->container;
FOREACH locale IN ARRAY locales LOOP
IF c->locale IS NOT NULL THEN
RETURN c->locale;
END IF;
END LOOP;
RETURN NULL;
END;
$$;
""")
end
def down do
execute "DROP FUNCTION IF EXISTS public.translate_field(container varchar, field varchar, default_locale varchar, locales varchar[])"
execute "DROP FUNCTION IF EXISTS public.translate_field(container varchar, default_locale varchar, locales varchar[])"
end
end
|)
end
end
| 29.846667 | 151 | 0.563324 |
08a187eda02701e83a15235822e0b5b410fea288 | 912 | ex | Elixir | lib/yamlix/representation_graph.ex | azatychyn/yamlix | ead7c141e7ebe108f81e19b3f6d968b52066aef4 | [
"MIT"
] | null | null | null | lib/yamlix/representation_graph.ex | azatychyn/yamlix | ead7c141e7ebe108f81e19b3f6d968b52066aef4 | [
"MIT"
] | null | null | null | lib/yamlix/representation_graph.ex | azatychyn/yamlix | ead7c141e7ebe108f81e19b3f6d968b52066aef4 | [
"MIT"
] | 1 | 2022-02-10T11:02:29.000Z | 2022-02-10T11:02:29.000Z | defmodule RepresentationGraph do
defmodule Node do
defstruct value: "", tag: "", anchor: ""
def new(list) when is_list(list) do
new_list =
Enum.map(list, fn val ->
Node.new(val)
end)
%Node{value: new_list, tag: ""}
end
def new(map) when is_map(map) do
new_map =
Map.keys(map)
|> List.foldl(%{}, fn key, acc ->
new_key = Node.new(key)
new_value = Map.get(map, key) |> Node.new()
Map.put(acc, new_key, new_value)
end)
%Node{value: new_map, tag: ""}
end
def new(scalar) when is_atom(scalar) and not is_boolean(scalar) do
%Node{value: scalar, tag: "!<tag:yamerl,2012:atom>"}
end
def new(scalar) do
%Node{value: scalar, tag: ""}
end
def value(%Node{value: v, tag: _}) do
v
end
end
def represent(scalar) do
Node.new(scalar)
end
end
| 21.209302 | 70 | 0.552632 |
08a18a1a48945e134d6495c7f33d7671c2fd50da | 1,527 | ex | Elixir | lib/rocketpay/accounts/operation.ex | ledoctah/rocketpay | ab347ad2b4cc1c2a0fa6443be0c39fd64d658c61 | [
"MIT"
] | null | null | null | lib/rocketpay/accounts/operation.ex | ledoctah/rocketpay | ab347ad2b4cc1c2a0fa6443be0c39fd64d658c61 | [
"MIT"
] | null | null | null | lib/rocketpay/accounts/operation.ex | ledoctah/rocketpay | ab347ad2b4cc1c2a0fa6443be0c39fd64d658c61 | [
"MIT"
] | null | null | null | defmodule Rocketpay.Accounts.Operation do
alias Ecto.Multi
alias Rocketpay.{Account}
def call(%{"id" => id, "value" => value}, operation) do
operation_name = account_operation_name(operation)
Multi.new()
|> Multi.run(operation_name, fn repo, _changes -> get_account(repo, id) end)
|> Multi.run(operation, fn repo, changes ->
account = Map.get(changes, operation_name)
update_balance(repo, account, value, operation)
end)
end
defp get_account(repo, id) do
case repo.get(Account, id) do
nil -> {:error, "Account not found!"}
account -> {:ok, account}
end
end
defp update_balance(repo, account, value, operation) do
account
|> operation(value, operation)
|> update_account(repo, account)
end
defp operation(%Account{balance: balance}, value, operation) do
value
|> Decimal.cast()
|> handle_cast(balance, operation)
end
defp handle_cast({:ok, value}, balance, :deposit), do: Decimal.add(balance, value)
defp handle_cast({:ok, value}, balance, :withdraw), do: Decimal.sub(balance, value)
defp handle_cast(:error, _balance, _operation), do: {:error, "Invalid deposit value!"}
defp update_account({:error, _reason} = error, _repo, _account), do: error
defp update_account(value, repo, account) do
params = %{balance: value}
account
|> Account.changeset(params)
|> repo.update()
end
defp account_operation_name(operation) do
"account_#{Atom.to_string(operation)}" |> String.to_atom()
end
end
| 28.277778 | 88 | 0.67649 |
08a18d68247b995c79db9e833ac7000b9128d2ac | 1,329 | ex | Elixir | lib/auto_api/commands/heart_rate_command.ex | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | null | null | null | lib/auto_api/commands/heart_rate_command.ex | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | null | null | null | lib/auto_api/commands/heart_rate_command.ex | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | null | null | null | # AutoAPI
# The MIT License
#
# Copyright (c) 2018- High-Mobility GmbH (https://high-mobility.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
defmodule AutoApi.HeartRateCommand do
@moduledoc """
Handles HeartRate commands and apply binary commands on `%AutoApi.HeartRateState{}`
"""
use AutoApi.Command
end
| 45.827586 | 85 | 0.773514 |
08a197e2f6d7c66a150812d402393d3f0019639c | 13,823 | ex | Elixir | lib/kaffy_web/controllers/resource_controller.ex | zenbaku/kaffy | b03ab90cb9620232ba55e9977ac153d02186032d | [
"MIT"
] | null | null | null | lib/kaffy_web/controllers/resource_controller.ex | zenbaku/kaffy | b03ab90cb9620232ba55e9977ac153d02186032d | [
"MIT"
] | null | null | null | lib/kaffy_web/controllers/resource_controller.ex | zenbaku/kaffy | b03ab90cb9620232ba55e9977ac153d02186032d | [
"MIT"
] | null | null | null | defmodule KaffyWeb.ResourceController do
@moduledoc false
use Phoenix.Controller, namespace: KaffyWeb
use Phoenix.HTML
alias Kaffy.Pagination
def index(
conn,
%{
"context" => context,
"resource" => resource,
"c" => _target_context,
"r" => _target_resource,
"pick" => _field
} = params
) do
my_resource = Kaffy.Utils.get_resource(conn, context, resource)
case can_proceed?(my_resource, conn) do
false ->
unauthorized_access(conn)
true ->
fields = Kaffy.ResourceAdmin.index(my_resource)
{filtered_count, entries} = Kaffy.ResourceQuery.list_resource(conn, my_resource, params)
items_per_page = Map.get(params, "limit", "100") |> String.to_integer()
page = Map.get(params, "page", "1") |> String.to_integer()
has_next = round(filtered_count / items_per_page) > page
next_class = if has_next, do: "", else: " disabled"
has_prev = page >= 2
prev_class = if has_prev, do: "", else: " disabled"
list_pages = Pagination.get_pages(page, ceil(filtered_count / items_per_page))
render(conn, "pick_resource.html",
layout: {KaffyWeb.LayoutView, "bare.html"},
context: context,
resource: resource,
fields: fields,
my_resource: my_resource,
filtered_count: filtered_count,
page: page,
has_next_page: has_next,
next_class: next_class,
has_prev_page: has_prev,
prev_class: prev_class,
list_pages: list_pages,
entries: entries,
params: params
)
end
end
def index(conn, %{"context" => context, "resource" => resource} = params) do
my_resource = Kaffy.Utils.get_resource(conn, context, resource)
case can_proceed?(my_resource, conn) do
false ->
unauthorized_access(conn)
true ->
fields = Kaffy.ResourceAdmin.index(my_resource)
{filtered_count, entries} = Kaffy.ResourceQuery.list_resource(conn, my_resource, params)
items_per_page = Map.get(params, "limit", "100") |> String.to_integer()
page = Map.get(params, "page", "1") |> String.to_integer()
has_next = round(filtered_count / items_per_page) > page
next_class = if has_next, do: "", else: " disabled"
has_prev = page >= 2
prev_class = if has_prev, do: "", else: " disabled"
list_pages = Pagination.get_pages(page, ceil(filtered_count / items_per_page))
render(conn, "index.html",
layout: {KaffyWeb.LayoutView, "app.html"},
context: context,
resource: resource,
fields: fields,
my_resource: my_resource,
filtered_count: filtered_count,
page: page,
has_next_page: has_next,
next_class: next_class,
has_prev_page: has_prev,
prev_class: prev_class,
list_pages: list_pages,
entries: entries,
params: params
)
end
end
def show(conn, %{"context" => context, "resource" => resource, "id" => id}) do
my_resource = Kaffy.Utils.get_resource(conn, context, resource)
schema = my_resource[:schema]
resource_name = Kaffy.ResourceAdmin.singular_name(my_resource)
case can_proceed?(my_resource, conn) do
false ->
unauthorized_access(conn)
true ->
if entry = Kaffy.ResourceQuery.fetch_resource(conn, my_resource, id) do
changeset = Ecto.Changeset.change(entry)
render(conn, "show.html",
layout: {KaffyWeb.LayoutView, "app.html"},
changeset: changeset,
context: context,
resource: resource,
my_resource: my_resource,
resource_name: resource_name,
schema: schema,
entry: entry
)
else
put_flash(conn, :error, "The resource you are trying to visit does not exist!")
|> redirect(
to: Kaffy.Utils.router().kaffy_resource_path(conn, :index, context, resource)
)
end
end
end
def update(conn, %{"context" => context, "resource" => resource, "id" => id} = params) do
my_resource = Kaffy.Utils.get_resource(conn, context, resource)
schema = my_resource[:schema]
params = Kaffy.ResourceParams.decode_map_fields(resource, schema, params)
resource_name = Kaffy.ResourceAdmin.singular_name(my_resource) |> String.capitalize()
case can_proceed?(my_resource, conn) do
false ->
unauthorized_access(conn)
true ->
entry = Kaffy.ResourceQuery.fetch_resource(conn, my_resource, id)
changes = Map.get(params, resource, %{})
case Kaffy.ResourceCallbacks.update_callbacks(conn, my_resource, entry, changes) do
{:ok, entry} ->
conn = put_flash(conn, :success, "#{resource_name} saved successfully")
save_button = Map.get(params, "submit", "Save")
case save_button do
"Save" ->
conn
|> put_flash(:success, "#{resource_name} saved successfully")
|> redirect(
to: Kaffy.Utils.router().kaffy_resource_path(conn, :index, context, resource)
)
"Save and add another" ->
conn
|> put_flash(:success, "#{resource_name} saved successfully")
|> redirect(
to: Kaffy.Utils.router().kaffy_resource_path(conn, :new, context, resource)
)
"Save and continue editing" ->
conn
|> put_flash(:success, "#{resource_name} saved successfully")
|> redirect_to_resource(context, resource, entry)
end
{:error, %Ecto.Changeset{} = changeset} ->
conn =
put_flash(
conn,
:error,
"A problem occurred while trying to save this #{resource}"
)
render(conn, "show.html",
layout: {KaffyWeb.LayoutView, "app.html"},
changeset: changeset,
context: context,
resource: resource,
my_resource: my_resource,
resource_name: resource_name,
schema: schema,
entry: entry
)
{:error, {entry, error}} when is_binary(error) ->
conn = put_flash(conn, :error, error)
changeset = Ecto.Changeset.change(entry)
render(conn, "show.html",
layout: {KaffyWeb.LayoutView, "app.html"},
changeset: changeset,
context: context,
resource: resource,
my_resource: my_resource,
resource_name: resource_name,
schema: schema,
entry: entry
)
end
end
end
def new(conn, %{"context" => context, "resource" => resource}) do
my_resource = Kaffy.Utils.get_resource(conn, context, resource)
resource_name = Kaffy.ResourceAdmin.singular_name(my_resource)
case can_proceed?(my_resource, conn) do
false ->
unauthorized_access(conn)
true ->
changeset = Kaffy.ResourceAdmin.create_changeset(my_resource, %{}) |> Map.put(:errors, [])
render(conn, "new.html",
layout: {KaffyWeb.LayoutView, "app.html"},
changeset: changeset,
context: context,
resource: resource,
resource_name: resource_name,
my_resource: my_resource
)
end
end
def create(conn, %{"context" => context, "resource" => resource} = params) do
my_resource = Kaffy.Utils.get_resource(conn, context, resource)
params = Kaffy.ResourceParams.decode_map_fields(resource, my_resource[:schema], params)
changes = Map.get(params, resource, %{})
resource_name = Kaffy.ResourceAdmin.singular_name(my_resource)
case can_proceed?(my_resource, conn) do
false ->
unauthorized_access(conn)
true ->
case Kaffy.ResourceCallbacks.create_callbacks(conn, my_resource, changes) do
{:ok, entry} ->
case Map.get(params, "submit", "Save") do
"Save" ->
put_flash(conn, :success, "Created a new #{resource_name} successfully")
|> redirect(
to: Kaffy.Utils.router().kaffy_resource_path(conn, :index, context, resource)
)
"Save and add another" ->
conn
|> put_flash(:success, "#{resource_name} saved successfully")
|> redirect(
to: Kaffy.Utils.router().kaffy_resource_path(conn, :new, context, resource)
)
"Save and continue editing" ->
put_flash(conn, :success, "Created a new #{resource_name} successfully")
|> redirect_to_resource(context, resource, entry)
end
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "new.html",
layout: {KaffyWeb.LayoutView, "app.html"},
changeset: changeset,
context: context,
resource: resource,
resource_name: resource_name,
my_resource: my_resource
)
{:error, {entry, error}} when is_binary(error) ->
changeset = Ecto.Changeset.change(entry)
conn
|> put_flash(:error, error)
|> render("new.html",
layout: {KaffyWeb.LayoutView, "app.html"},
changeset: changeset,
context: context,
resource: resource,
resource_name: resource_name,
my_resource: my_resource
)
end
end
end
def delete(conn, %{"context" => context, "resource" => resource, "id" => id}) do
my_resource = Kaffy.Utils.get_resource(conn, context, resource)
case can_proceed?(my_resource, conn) do
false ->
unauthorized_access(conn)
true ->
entry = Kaffy.ResourceQuery.fetch_resource(conn, my_resource, id)
case Kaffy.ResourceCallbacks.delete_callbacks(conn, my_resource, entry) do
{:ok, _deleted} ->
put_flash(conn, :success, "The record was deleted successfully")
|> redirect(
to: Kaffy.Utils.router().kaffy_resource_path(conn, :index, context, resource)
)
{:error, %Ecto.Changeset{} = _changeset} ->
put_flash(
conn,
:error,
"A database-related issue prevented this record from being deleted."
)
|> redirect_to_resource(context, resource, entry)
{:error, {entry, error}} when is_binary(error) ->
put_flash(conn, :error, error)
|> redirect_to_resource(context, resource, entry)
end
end
end
def single_action(conn, %{
"context" => context,
"resource" => resource,
"id" => id,
"action_key" => action_key
}) do
my_resource = Kaffy.Utils.get_resource(conn, context, resource)
entry = Kaffy.ResourceQuery.fetch_resource(conn, my_resource, id)
actions = Kaffy.ResourceAdmin.resource_actions(my_resource, conn)
action_key = String.to_existing_atom(action_key)
[action_record] = Keyword.get_values(actions, action_key)
case action_record.action.(conn, entry) do
{:ok, entry} ->
conn = put_flash(conn, :success, "Action performed successfully")
redirect_to_resource(conn, context, resource, entry)
{:error, _} ->
conn = put_flash(conn, :error, "A validation error occurred")
redirect_to_resource(conn, context, resource, entry)
{:error, _, error_msg} ->
conn = put_flash(conn, :error, error_msg)
redirect_to_resource(conn, context, resource, entry)
end
end
def list_action(
conn,
%{"context" => context, "resource" => resource, "action_key" => action_key} = params
) do
my_resource = Kaffy.Utils.get_resource(conn, context, resource)
action_key = String.to_existing_atom(action_key)
ids = Map.get(params, "ids", "") |> String.split(",")
entries = Kaffy.ResourceQuery.fetch_list(my_resource, ids)
actions = Kaffy.ResourceAdmin.list_actions(my_resource, conn)
[action_record] = Keyword.get_values(actions, action_key)
kaffy_inputs = Map.get(params, "kaffy-input", %{})
result =
case Map.get(action_record, :inputs, []) do
[] -> action_record.action.(conn, entries)
_ -> action_record.action.(conn, entries, kaffy_inputs)
end
case result do
:ok ->
put_flash(conn, :success, "Action performed successfully")
|> redirect(to: Kaffy.Utils.router().kaffy_resource_path(conn, :index, context, resource))
{:error, error_msg} ->
put_flash(conn, :error, error_msg)
|> redirect(to: Kaffy.Utils.router().kaffy_resource_path(conn, :index, context, resource))
end
end
# def export(conn, %{"context" => context, "resource" => resource}) do
# my_resource = Kaffy.Utils.get_resource(conn, context, resource)
# end
defp can_proceed?(resource, conn) do
Kaffy.ResourceAdmin.authorized?(resource, conn)
end
defp unauthorized_access(conn) do
conn
|> put_flash(:error, "You are not authorized to access that page")
|> redirect(to: Kaffy.Utils.router().kaffy_home_path(conn, :index))
end
defp redirect_to_resource(conn, context, resource, entry) do
redirect(conn,
to:
Kaffy.Utils.router().kaffy_resource_path(
conn,
:show,
context,
resource,
entry.id
)
)
end
end
| 34.64411 | 98 | 0.588223 |
08a1db9a7a56fa6732d5783716aa188f797b3753 | 621 | ex | Elixir | lib/tune/config.ex | pedromtavares/tune | b95bdb5038ccb8c7ae262ef5d0803e53565e192f | [
"MIT"
] | 206 | 2020-08-23T17:50:03.000Z | 2022-03-28T04:39:01.000Z | lib/tune/config.ex | pedromtavares/tune | b95bdb5038ccb8c7ae262ef5d0803e53565e192f | [
"MIT"
] | 112 | 2020-08-21T08:26:38.000Z | 2022-03-31T06:11:06.000Z | lib/tune/config.ex | pedromtavares/tune | b95bdb5038ccb8c7ae262ef5d0803e53565e192f | [
"MIT"
] | 15 | 2020-08-25T02:30:23.000Z | 2021-12-16T14:19:35.000Z | defmodule Tune.Config do
@moduledoc """
This module is responsible for all runtime config resolution.
"""
use Vapor.Planner
dotenv()
config :web,
env([
{:port, "PORT", map: &String.to_integer/1, required: false},
{:secret_key_base, "SECRET_KEY_BASE"},
{:session_encryption_salt, "SESSION_ENCRYPTION_SALT"},
{:admin_user, "ADMIN_USER"},
{:admin_password, "ADMIN_PASSWORD"}
])
config :spotify,
env(
spotify_client_id: "SPOTIFY_CLIENT_ID",
spotify_client_secret: "SPOTIFY_CLIENT_SECRET"
)
end
| 24.84 | 71 | 0.603865 |
08a20c0936ca786ebb97b584b9cb74ef9e875cde | 1,520 | ex | Elixir | clients/big_query/lib/google_api/big_query/v2/model/table_field_schema_policy_tags.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/big_query/lib/google_api/big_query/v2/model/table_field_schema_policy_tags.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/big_query/lib/google_api/big_query/v2/model/table_field_schema_policy_tags.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQuery.V2.Model.TableFieldSchemaPolicyTags do
@moduledoc """
## Attributes
* `names` (*type:* `list(String.t)`, *default:* `nil`) - A list of category resource names. For example, "projects/1/location/eu/taxonomies/2/policyTags/3". At most 1 policy tag is allowed.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:names => list(String.t())
}
field(:names, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.TableFieldSchemaPolicyTags do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.TableFieldSchemaPolicyTags.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.TableFieldSchemaPolicyTags do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.340426 | 193 | 0.740789 |
08a20e1f3fd6d25b0be26588ce5f5e563631f47f | 2,440 | ex | Elixir | lib/genie/storage_relay.ex | jjcarstens/genie | b0b61ca5c6300508bd230a248da47aafa0278061 | [
"Apache-2.0"
] | null | null | null | lib/genie/storage_relay.ex | jjcarstens/genie | b0b61ca5c6300508bd230a248da47aafa0278061 | [
"Apache-2.0"
] | null | null | null | lib/genie/storage_relay.ex | jjcarstens/genie | b0b61ca5c6300508bd230a248da47aafa0278061 | [
"Apache-2.0"
] | null | null | null | defmodule Genie.StorageRelay do
use GenServer
@high_values ["on", "locked", :on, :locked, 1]
@low_values ["off", "unlocked", :off, :unlocked, 0]
@valid_values @high_values ++ @low_values
defguard valid_value(val) when val in @valid_values
defstruct lock: nil, lights: nil, options: nil
def start_link(options) do
state = %__MODULE__{options: options}
GenServer.start_link(__MODULE__, state, name: __MODULE__)
end
@impl true
def init(state) do
send self(), :init
{:ok, state}
end
def read_lock, do: GenServer.call(__MODULE__, :read_lock)
def read_lights, do: GenServer.call(__MODULE__, :read_lights)
def toggle_lock(val) when valid_value(val) do
GenServer.call(__MODULE__, {:toggle_lock, val})
end
def toggle_lock(_val), do: :bad_toggle_value
def toggle_lights(val) when valid_value(val) do
GenServer.call(__MODULE__, {:toggle_lights, val})
end
def toggle_lights(_val), do: :bad_toggle_value
@impl true
def handle_call(:read_lights, _from, %{lights: lights} = state) do
val = Circuits.GPIO.read(lights) |> val_to_atom(:lights)
{:reply, val, state}
end
@impl true
def handle_call(:read_lock, _from, %{lock: lock} = state) do
val = Circuits.GPIO.read(lock) |> val_to_atom(:lock)
{:reply, val, state}
end
@impl true
def handle_call({:toggle_lights, val}, _from, %{lights: lights} = state) do
Circuits.GPIO.write(lights, gpio_val(val))
Circuits.GPIO.read(lights) |> send_update(:lights)
{:reply, :ok, state}
end
@impl true
def handle_call({:toggle_lock, val}, _from, %{lock: lock} = state) do
Circuits.GPIO.write(lock, gpio_val(val))
Circuits.GPIO.read(lock) |> send_update(:lock)
{:reply, :ok, state}
end
@impl true
def handle_info(:init, state) do
{:ok, lock_pin} = Circuits.GPIO.open(12, :output)
{:ok, lights_pin} = Circuits.GPIO.open(16, :output)
{:noreply, %{state | lock: lock_pin, lights: lights_pin}}
end
defp gpio_val(val) when val in @low_values, do: 0
defp gpio_val(val) when val in @high_values, do: 1
defp val_to_atom(1, :lights), do: :on
defp val_to_atom(0, :lights), do: :off
defp val_to_atom(1, :lock), do: :locked
defp val_to_atom(0, :lock), do: :unlocked
defp send_update(val, update) when is_number(val) do
val_to_atom(val, update) |> send_update(update)
end
defp send_update(val, update) do
send Genie.Websocket, {update, val}
end
end
| 28.705882 | 77 | 0.683197 |
08a21086276e1191c9edf77db9f2ba58a29534c2 | 1,545 | ex | Elixir | lib/flowex/pipes/stage.ex | antonmi/flowex | 7597e2ae1bf53033679ba65e0be13a50ad6f1e5e | [
"Apache-2.0"
] | 422 | 2017-01-20T13:38:13.000Z | 2022-02-08T14:07:11.000Z | lib/flowex/pipes/stage.ex | antonmi/flowex | 7597e2ae1bf53033679ba65e0be13a50ad6f1e5e | [
"Apache-2.0"
] | 11 | 2017-01-26T15:40:36.000Z | 2020-07-02T21:02:18.000Z | lib/flowex/pipes/stage.ex | antonmi/flowex | 7597e2ae1bf53033679ba65e0be13a50ad6f1e5e | [
"Apache-2.0"
] | 20 | 2017-01-25T07:56:00.000Z | 2021-11-29T16:19:34.000Z | defmodule Flowex.Stage do
@moduledoc "Pipes function is called here"
use GenStage
def start_link(state, opts \\ []) do
GenStage.start_link(__MODULE__, state, opts)
end
def init(opts) do
subscribe_to_with_opts = Enum.map(opts.producer_names, &{&1, max_demand: 1})
{:producer_consumer, opts, subscribe_to: subscribe_to_with_opts}
end
def handle_events([ip], _from, state = %Flowex.StageOpts{type: :pipe}) do
if ip.error do
{:noreply, [ip], state}
else
new_ip = try_apply(ip, {state.module, state.function, state.opts})
{:noreply, [new_ip], state}
end
end
def handle_events([ip], _from, state = %Flowex.StageOpts{type: :error_pipe}) do
if ip.error do
struct = struct(state.module.__struct__, ip.struct)
result = apply(state.module, state.function, [ip.error, struct, state.opts])
ip_struct = Map.merge(ip.struct, Map.delete(result, :__struct__))
{:noreply, [%{ip | struct: ip_struct}], state}
else
{:noreply, [ip], state}
end
end
defp try_apply(ip, {module, function, opts}) do
struct = struct(module.__struct__, ip.struct)
result = apply(module, function, [struct, opts])
ip_struct = Map.merge(ip.struct, Map.delete(result, :__struct__))
%{ip | struct: ip_struct}
rescue
error ->
%{
ip
| error: %Flowex.PipeError{
error: error,
message: Exception.message(error),
pipe: {module, function, opts},
struct: ip.struct
}
}
end
end
| 29.150943 | 82 | 0.630421 |
08a250172cc42f7545cc38ea39345012c9e25c2f | 1,439 | exs | Elixir | test/incident/event_store/in_memory/event_test.exs | OleMchls/incident | 220e46598719fc50d10c96e5a848080172bf6f9b | [
"MIT"
] | null | null | null | test/incident/event_store/in_memory/event_test.exs | OleMchls/incident | 220e46598719fc50d10c96e5a848080172bf6f9b | [
"MIT"
] | null | null | null | test/incident/event_store/in_memory/event_test.exs | OleMchls/incident | 220e46598719fc50d10c96e5a848080172bf6f9b | [
"MIT"
] | null | null | null | defmodule Incident.EventStore.InMemory.EventTest do
use Incident.RepoCase, async: true
alias Ecto.UUID
alias Incident.EventStore.InMemory.Event
@valid_params %{
event_id: UUID.generate(),
aggregate_id: UUID.generate(),
event_type: "SomethingHappened",
version: 1,
event_date: DateTime.utc_now(),
event_data: %{}
}
describe "changeset/2" do
test "returns a valid changeset when all fields are valid" do
changeset = Event.changeset(%Event{}, @valid_params)
assert changeset.valid?
end
test "returns an error when a required field is not present" do
changeset = Event.changeset(%Event{}, %{})
refute changeset.valid?
assert %{event_id: ["can't be blank"]} = errors_on(changeset)
assert %{aggregate_id: ["can't be blank"]} = errors_on(changeset)
assert %{event_type: ["can't be blank"]} = errors_on(changeset)
assert %{event_date: ["can't be blank"]} = errors_on(changeset)
assert %{event_data: ["can't be blank"]} = errors_on(changeset)
assert %{version: ["can't be blank"]} = errors_on(changeset)
end
test "returns an error when a field is set with a wront type" do
invalid_params = Map.merge(@valid_params, %{event_date: "2010-04-11"})
changeset = Event.changeset(%Event{}, invalid_params)
refute changeset.valid?
assert %{event_date: ["is invalid"]} = errors_on(changeset)
end
end
end
| 32.704545 | 76 | 0.66991 |
08a254c787cb679a1f57085feeeaced498ab5f92 | 772 | ex | Elixir | lib/console_web/controllers/log_controller.ex | pluralsh/console | 38a446ce1bc2f7bc3e904fcacb102d3d57835ada | [
"Apache-2.0"
] | 6 | 2021-11-17T21:10:49.000Z | 2022-02-16T19:45:28.000Z | lib/console_web/controllers/log_controller.ex | pluralsh/console | 38a446ce1bc2f7bc3e904fcacb102d3d57835ada | [
"Apache-2.0"
] | 18 | 2021-11-25T04:31:06.000Z | 2022-03-27T04:54:00.000Z | lib/console_web/controllers/log_controller.ex | pluralsh/console | 38a446ce1bc2f7bc3e904fcacb102d3d57835ada | [
"Apache-2.0"
] | null | null | null | defmodule ConsoleWeb.LogController do
use ConsoleWeb, :controller
import Console.GraphQl.Resolvers.Observability, only: [ts: 1]
plug ConsoleWeb.Rbac, permission: :operate
def download(conn, %{"repo" => repo, "q" => q, "end" => e}) do
now = Timex.now()
start = Timex.shift(now, minutes: -String.to_integer(e))
conn =
conn
|> put_resp_content_type("text/plain")
|> put_resp_header("content-disposition","attachment; filename=\"#{repo}_logs.txt\"")
|> send_chunked(200)
Loki.Stream.stream(q, ts(start), ts(now))
|> Enum.reduce_while(conn, fn %Loki.Value{value: line}, conn ->
case chunk(conn, line) do
{:ok, conn} -> {:cont, conn}
{:error, :closed} -> {:halt, conn}
end
end)
end
end
| 29.692308 | 91 | 0.619171 |
08a294a05a0a3e71148bb2314284ccc2d67cdea2 | 11,029 | exs | Elixir | lib/mix/test/mix/tasks/deps.git_test.exs | gsphanikumar/elixir | 6ca225da4e016200a462888348ff1c3feb625b78 | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/deps.git_test.exs | gsphanikumar/elixir | 6ca225da4e016200a462888348ff1c3feb625b78 | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/deps.git_test.exs | gsphanikumar/elixir | 6ca225da4e016200a462888348ff1c3feb625b78 | [
"Apache-2.0"
] | null | null | null | Code.require_file "../../test_helper.exs", __DIR__
defmodule Mix.Tasks.DepsGitTest do
use MixTest.Case
defmodule DepsOnGitApp do
def project do
[ app: :deps_on_git_app,
version: "0.1.0",
deps: [
{:deps_on_git_repo, "0.2.0", git: fixture_path("deps_on_git_repo")}
] ]
end
end
defmodule GitApp do
def project do
opts = Process.get(:git_repo_opts) || []
[ app: :git_app,
version: "0.1.0",
deps: [
{:git_repo, "0.1.0", [git: fixture_path("git_repo")] ++ opts}
] ]
end
end
defmodule GitSubmodulesApp do
def project do
[ app: :git_app,
version: "0.1.0",
deps: [
{:git_repo, "0.1.0", git: fixture_path("git_repo"), submodules: true}
] ]
end
end
defmodule GitErrorApp do
def project do
[ deps: [
{:git_repo, "0.1.0", git: fixture_path("not_git_repo")}
] ]
end
end
test "gets and updates git repos with compilation" do
Mix.Project.push GitApp
in_fixture "no_mixfile", fn ->
Mix.Tasks.Deps.Get.run []
message = "* Getting git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
assert File.read!("mix.lock") =~
~r/"git_repo": {:git, #{inspect fixture_path("git_repo")}, "[a-f0-9]+", \[\]}/
Mix.Tasks.Deps.Update.run ["--all"]
message = "* Updating git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
end
end
test "gets and updates git repos with submodules" do
Mix.Project.push GitSubmodulesApp
in_fixture "no_mixfile", fn ->
Mix.Tasks.Deps.Get.run []
message = "* Getting git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
assert File.read!("mix.lock") =~ "submodules: true"
end
end
test "handles invalid .git directory" do
Mix.Project.push GitApp
in_fixture "no_mixfile", fn ->
File.mkdir_p!("deps/git_repo/.git")
Mix.Tasks.Deps.Get.run []
message = "* Getting git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
end
end
test "handles missing .git directory" do
Mix.Project.push GitApp
in_fixture "no_mixfile", fn ->
Mix.Tasks.Deps.Get.run []
message = "* Getting git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
File.rm_rf!("deps/git_repo/.git")
assert_raise Mix.Error, "Can't continue due to errors on dependencies", fn ->
Mix.Tasks.Deps.Check.run ["git_repo"]
end
end
end
test "gets and updates many levels deep dependencies" do
Mix.Project.push DepsOnGitApp
in_fixture "no_mixfile", fn ->
Mix.Tasks.Deps.Get.run []
message = "* Getting git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
message = "* Getting deps_on_git_repo (#{fixture_path("deps_on_git_repo")})"
assert_received {:mix_shell, :info, [^message]}
assert File.exists?("deps/deps_on_git_repo/mix.exs")
assert File.rm("deps/deps_on_git_repo/.fetch") == :ok
assert File.exists?("deps/git_repo/mix.exs")
assert File.rm("deps/git_repo/.fetch") == :ok
# Compile the dependencies
Mix.Tasks.Deps.Compile.run []
# Now update children and make sure it propagates
Mix.Tasks.Deps.Update.run ["git_repo"]
assert File.exists?("deps/deps_on_git_repo/.fetch")
assert File.exists?("deps/git_repo/.fetch")
# Compile git repo but unload it so...
Mix.Tasks.Deps.Compile.run ["git_repo"]
assert File.exists?("_build/dev/lib/git_repo/ebin")
Code.delete_path("_build/dev/lib/git_repo/ebin")
# Deps on git repo loads it automatically on compile
Mix.Task.reenable "deps.loadpaths"
Mix.Tasks.Deps.Compile.run ["deps_on_git_repo"]
assert File.exists?("_build/dev/lib/deps_on_git_repo/ebin")
end
after
purge [GitRepo, GitRepo.Mixfile]
end
test "recompiles the project when a dep is fetched" do
Mix.Project.push GitApp
in_fixture "no_mixfile", fn ->
Mix.Tasks.Deps.Get.run []
assert File.exists?("deps/git_repo/.fetch")
# We can compile just fine
Mix.Tasks.Compile.run []
assert_received {:mix_shell, :info, ["Compiled lib/git_repo.ex"]}
assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]}
# Clear up to prepare for the update
Mix.Task.clear
Mix.shell.flush
purge [A, B, C, GitRepo]
# Update will mark the update required
Mix.Tasks.Deps.Update.run ["git_repo"]
assert File.exists?("deps/git_repo/.fetch")
ensure_touched("deps/git_repo/.fetch") # Ensure timestamp differs
# mix deps.compile is required...
Mix.Tasks.Deps.run []
msg = " the dependency build is outdated, please run \"mix deps.compile\""
assert_received {:mix_shell, :info, [^msg]}
# But also ran automatically
Mix.Tasks.Compile.run []
assert_received {:mix_shell, :info, ["Compiled lib/git_repo.ex"]}
assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]}
assert File.exists?("_build/dev/lib/git_repo/.compile.fetch")
:ok
end
after
purge [A, B, C, GitRepo, GitRepo.Mixfile]
end
test "all up to date dependencies" do
Mix.Project.push GitApp
in_fixture "no_mixfile", fn ->
Mix.Tasks.Deps.Get.run []
message = "* Getting git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
Mix.Tasks.Deps.Get.run []
assert_received {:mix_shell, :info, ["All dependencies up to date"]}
end
after
purge [GitRepo, GitRepo.Mixfile]
end
test "updates the lock when the repo updates" do
Mix.Project.push GitApp
# Get git repo first revision
[last, first|_] = get_git_repo_revs
in_fixture "no_mixfile", fn ->
Mix.Dep.Lock.write %{git_repo: {:git, fixture_path("git_repo"), first, []}}
Mix.Tasks.Deps.Get.run []
refute File.exists?("deps/git_repo/lib/git_repo.ex")
assert File.read!("mix.lock") =~ first
Mix.Tasks.Deps.Update.run ["git_repo"]
assert File.exists?("deps/git_repo/lib/git_repo.ex")
assert File.read!("mix.lock") =~ last
Mix.Tasks.Deps.Clean.run ["--all"]
refute File.exists?("deps/git_repo/lib/git_repo.ex")
assert File.read!("mix.lock") =~ last
Mix.Tasks.Deps.Clean.run ["--unlock", "--all"]
refute File.read!("mix.lock") =~ last
end
after
purge [GitRepo, GitRepo.Mixfile]
end
test "updates the repo when the lock updates" do
Mix.Project.push GitApp
[last, first|_] = get_git_repo_revs
in_fixture "no_mixfile", fn ->
Mix.Dep.Lock.write %{git_repo: {:git, fixture_path("git_repo"), first, []}}
Mix.Tasks.Deps.Get.run []
refute File.exists?("deps/git_repo/lib/git_repo.ex")
assert File.read!("mix.lock") =~ first
# Update the lock and now we should get an error
Mix.Dep.Lock.write %{git_repo: {:git, fixture_path("git_repo"), last, []}}
assert_raise Mix.Error, fn ->
Mix.Tasks.Deps.Check.run []
end
# Flush the errors we got, move to a clean slate
Mix.shell.flush
Mix.Task.clear
# Calling get should update the dependency
Mix.Tasks.Deps.Get.run []
assert File.exists?("deps/git_repo/lib/git_repo.ex")
assert File.read!("mix.lock") =~ last
message = "* Updating git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
# Check we got no error
refute_received {:mix_shell, :error, _}
end
after
purge [GitRepo, GitRepo.Mixfile]
end
test "updates the repo and the lock when the mixfile updates" do
Mix.Project.push GitApp
[last, first|_] = get_git_repo_revs
in_fixture "no_mixfile", fn ->
# Move to the first version
Mix.Dep.Lock.write %{git_repo: {:git, fixture_path("git_repo"), first, []}}
Mix.Tasks.Deps.Get.run []
assert File.read!("mix.lock") =~ first
# Update the project configuration. It should force an update.
refresh deps: [{:git_repo, "0.1.0", git: fixture_path("git_repo"), ref: last}]
Mix.Tasks.Deps.run []
msg = " lock outdated: the lock is outdated compared to the options in your mixfile (run \"mix deps.get\" to fetch locked version)"
assert_received {:mix_shell, :info, [^msg]}
# Check an update was triggered
Mix.Tasks.Deps.Get.run []
assert File.read!("mix.lock") =~ last
message = "* Getting git_repo (#{fixture_path("git_repo")})"
assert_received {:mix_shell, :info, [^message]}
# Check we got no error
refute_received {:mix_shell, :error, _}
end
after
purge [GitRepo, GitRepo.Mixfile]
end
test "does not attempt to compile projects that could not be retrieved" do
Mix.Project.push GitErrorApp
in_fixture "no_mixfile", fn ->
exception = assert_raise Mix.Error, fn ->
Mix.Tasks.Deps.Get.run []
end
assert Exception.message(exception) =~ "Command \"git clone"
end
end
test "does not load bad mixfiles on get" do
Mix.Project.push GitApp
[last, _, bad|_] = get_git_repo_revs
in_fixture "no_mixfile", fn ->
Mix.Dep.Lock.write %{git_repo: {:git, fixture_path("git_repo"), bad, []}}
catch_error(Mix.Tasks.Deps.Get.run [])
Mix.Dep.Lock.write %{git_repo: {:git, fixture_path("git_repo"), last, []}}
Mix.Tasks.Deps.Get.run []
assert File.read!("mix.lock") =~ last
end
after
purge [GitRepo, GitRepo.Mixfile]
end
test "updates on git opts change" do
Mix.Project.push GitApp
in_fixture "no_mixfile", fn ->
Process.put(:git_repo_opts, tag: "without_module")
refresh([])
Mix.Tasks.Deps.Get.run []
refute File.regular?("deps/git_repo/lib/git_repo.ex")
Process.put(:git_repo_opts, tag: "with_module")
refresh([])
Mix.Tasks.Deps.Get.run []
assert File.regular?("deps/git_repo/lib/git_repo.ex")
end
after
purge [GitRepo, GitRepo.Mixfile]
end
test "does not load bad mixfiles on update" do
Mix.Project.push GitApp
[last, _, bad|_] = get_git_repo_revs
in_fixture "no_mixfile", fn ->
Mix.Dep.Lock.write %{git_repo: {:git, fixture_path("git_repo"), bad, []}}
catch_error(Mix.Tasks.Deps.Get.run [])
Mix.Tasks.Deps.Update.run ["git_repo"]
Mix.Tasks.Deps.Compile.run ["git_repo"]
assert File.read!("mix.lock") =~ last
end
after
purge [GitRepo, GitRepo.Mixfile]
end
defp refresh(post_config) do
%{name: name, file: file} = Mix.Project.pop
Mix.ProjectStack.post_config(post_config)
Mix.Project.push(name, file)
end
defp get_git_repo_revs do
File.cd! fixture_path("git_repo"), fn ->
Regex.split ~r(\r?\n), System.cmd("git", ["log", "--format=%H"]) |> elem(0)
end
end
end
| 30.721448 | 138 | 0.631517 |
08a29ba9fd3cbfff702eb824a31b2568fb8af7ab | 2,810 | ex | Elixir | lib/phoenix/tracker/delta_generation.ex | dolfinus/phoenix_pubsub | 69333730a1fa771014821e279263dec049f33a4c | [
"MIT"
] | null | null | null | lib/phoenix/tracker/delta_generation.ex | dolfinus/phoenix_pubsub | 69333730a1fa771014821e279263dec049f33a4c | [
"MIT"
] | null | null | null | lib/phoenix/tracker/delta_generation.ex | dolfinus/phoenix_pubsub | 69333730a1fa771014821e279263dec049f33a4c | [
"MIT"
] | null | null | null | defmodule Phoenix.Tracker.DeltaGeneration do
@moduledoc false
require Logger
alias Phoenix.Tracker.{State, Clock, Replica}
@doc """
Extracts minimal delta from generations to satisfy remote clock.
Falls back to extracting entire crdt if unable to match delta.
"""
@spec extract(State.t(), [State.delta()], State.name(), State.context()) ::
State.delta() | State.t()
def extract(%State{mode: :normal} = state, generations, remote_ref, remote_context) do
case delta_fullfilling_clock(generations, remote_context) do
{delta, index} ->
if index,
do: Logger.debug("#{inspect(state.replica)}: sending delta generation #{index + 1}")
State.extract(delta, remote_ref, remote_context)
nil ->
Logger.debug("#{inspect(state.replica)}: falling back to sending entire crdt")
State.extract(state, remote_ref, remote_context)
end
end
@spec push(State.t(), [State.delta()], State.delta(), [pos_integer]) :: [State.delta()]
def push(%State{mode: :normal} = parent, [] = _generations, %State{mode: :delta} = delta, opts) do
parent.delta
|> List.duplicate(Enum.count(opts))
|> do_push(delta, opts, {delta, []})
end
def push(%State{mode: :normal} = _parent, generations, %State{mode: :delta} = delta, opts) do
do_push(generations, delta, opts, {delta, []})
end
defp do_push([], _delta, [], {_prev, acc}), do: Enum.reverse(acc)
defp do_push([gen | generations], delta, [gen_max | opts], {prev, acc}) do
case State.merge_deltas(gen, delta) do
{:ok, merged} ->
if State.delta_size(merged) <= gen_max do
do_push(generations, delta, opts, {merged, [merged | acc]})
else
do_push(generations, delta, opts, {merged, [prev | acc]})
end
{:error, :not_contiguous} ->
do_push(generations, delta, opts, {gen, [gen | acc]})
end
end
@doc """
Prunes permanently downed replicates from the delta generation list
"""
@spec remove_down_replicas([State.delta()], Replica.replica_ref()) :: [State.delta()]
def remove_down_replicas(generations, replica_ref) do
Enum.map(generations, fn %State{mode: :delta} = gen ->
State.remove_down_replicas(gen, replica_ref)
end)
end
defp delta_fullfilling_clock(generations, remote_context) do
generations
|> Enum.with_index()
|> Enum.find(fn {%State{range: {local_start, local_end}}, _} ->
local_start = Clock.filter_replicas(local_start, Clock.replicas(remote_context))
local_end = Clock.filter_replicas(local_end, Clock.replicas(remote_context))
not Clock.dominates_or_equal?(local_start, local_end) and
Clock.dominates_or_equal?(remote_context, local_start) and
not Clock.dominates?(remote_context, local_end)
end)
end
end
| 36.493506 | 100 | 0.666904 |
08a30597db36b35c4e8f5eca4f2bcf451b7ca389 | 1,842 | ex | Elixir | lib/graft/machine.ex | cdesch/graft | fff22759d48095ffd4693f92fda7a0026d15fdb5 | [
"MIT"
] | 7 | 2021-08-29T14:38:54.000Z | 2022-02-23T10:56:35.000Z | lib/graft/machine.ex | cdesch/graft | fff22759d48095ffd4693f92fda7a0026d15fdb5 | [
"MIT"
] | 1 | 2021-11-23T21:39:31.000Z | 2021-11-23T21:39:31.000Z | lib/graft/machine.ex | cdesch/graft | fff22759d48095ffd4693f92fda7a0026d15fdb5 | [
"MIT"
] | 1 | 2021-11-23T21:27:48.000Z | 2021-11-23T21:27:48.000Z | defmodule Graft.Machine do
@moduledoc """
A behaviour module for implementing a replicated machine for the raft consensus
algorithm. Look at the `Graft` module docs for examples on how to create such
machines.
"""
use GenServer
@typedoc """
The state/data of the replicated machine (similar to the 'state' of GenServer).
"""
@type state :: any
@typedoc """
The entry request sent by the client.
"""
@type entry :: any
@typedoc """
The reply to be sent back to the client.
"""
@type response :: any
@doc """
Invoked when the server starts and links to the machine.
`args` is a list accepted arguments. Look at `Graft.start` to see how to pass
in these optional arguments.
Returning `{:ok, state}`, will initialise the state of the machine to `state`.
"""
@callback init(args :: list(any)) :: {:ok, state}
@doc """
Invoked when a server in the raft cluster is commiting an entry to its log.
Should apply the entry to the replicated machine.
Should return a tuple of the response for the server along with the new state of the
replicated machine.
"""
@callback handle_entry(entry, state) :: {response, state}
defmacro __using__(_opts) do
quote location: :keep do
@behaviour Graft.Machine
end
end
@doc false
@impl GenServer
def init([module, args]) do
{:ok, state} = module.init(args)
{:ok, {module, state}}
end
@doc false
@impl GenServer
def handle_call({:apply, entry}, _from, {module, state}) do
{reply, state} = module.handle_entry(entry, state)
{:reply, reply, {module, state}}
end
@doc false
def register(module, machine_args \\ []) do
GenServer.start_link(__MODULE__, [module, machine_args])
end
@doc false
def apply_entry(machine, entry) do
GenServer.call(machine, {:apply, entry})
end
end
| 24.891892 | 86 | 0.676982 |
08a3317eae5f57829422a1256c1192fb8cccbd2f | 930 | ex | Elixir | web/controllers/api/profile_controller.ex | sumanbh/phoenix-angular | 0954ce60edc95832f6bf62e54e54977c770d33c3 | [
"MIT"
] | 5 | 2017-07-01T16:08:27.000Z | 2021-04-14T12:50:31.000Z | web/controllers/api/profile_controller.ex | sumanbh/phoenix-angular | 0954ce60edc95832f6bf62e54e54977c770d33c3 | [
"MIT"
] | null | null | null | web/controllers/api/profile_controller.ex | sumanbh/phoenix-angular | 0954ce60edc95832f6bf62e54e54977c770d33c3 | [
"MIT"
] | 4 | 2017-05-08T00:57:52.000Z | 2019-05-26T23:29:38.000Z | defmodule Amazon.ProfileController do
use Amazon.Web, :controller
plug Guardian.Plug.EnsureAuthenticated, handler: Amazon.LoginController
alias Amazon.{Profile, Postgres}
alias Guardian.{Plug}
def show(conn, _params) do
current_user = Plug.current_resource(conn)
user_id = current_user.id
result = Profile.get_user(user_id)
|> Postgres.raw_query()
conn
|> put_status(:ok)
|> json(result)
end
def update(conn, %{"given_name" => given_name, "fullname" => fullname, "address" => address, "city" => city, "state" => state, "zip" => zip}) do
current_user = Plug.current_resource(conn)
user_id = current_user.id
result = Profile.update_user(given_name, fullname, address, city, state, zip, user_id)
|> Postgres.raw_query()
conn
|> put_status(:ok)
|> json(result)
end
end
| 31 | 148 | 0.619355 |
08a37ee0c635a6773adaf6b96a88d9ece39c95e2 | 1,821 | ex | Elixir | web/controller/error.ex | access-company/testgear | 6817b5a9f782fd501c5e8c74f12a33617bec4ba8 | [
"Apache-2.0"
] | 1 | 2018-04-26T08:40:13.000Z | 2018-04-26T08:40:13.000Z | web/controller/error.ex | access-company/testgear | 6817b5a9f782fd501c5e8c74f12a33617bec4ba8 | [
"Apache-2.0"
] | 14 | 2018-04-02T09:12:03.000Z | 2019-10-08T04:43:39.000Z | web/controller/error.ex | access-company/testgear | 6817b5a9f782fd501c5e8c74f12a33617bec4ba8 | [
"Apache-2.0"
] | 2 | 2018-04-26T05:50:06.000Z | 2021-09-01T18:48:43.000Z | # Copyright(c) 2015-2021 ACCESS CO., LTD. All rights reserved.
defmodule Testgear.Controller.Error do
use Antikythera.Controller
def action_exception(_conn) do
raise "error!"
end
def action_throw(_conn) do
throw "error!"
end
def action_exit(_conn) do
exit "error!"
end
def action_timeout(_conn) do
:timer.sleep(11_000)
end
def error(conn, reason) do
raise_if_told(conn, fn ->
reason_atom =
case reason do
{kind, _} -> kind
atom -> atom
end
Conn.json(conn, 500, %{from: "custom_error_handler: #{reason_atom}"})
end)
end
def no_route(conn) do
raise_if_told(conn, fn ->
Conn.json(conn, 400, %{error: "no_route"})
end)
end
def bad_request(conn) do
raise_if_told(conn, fn ->
Conn.json(conn, 400, %{error: "bad_request"})
end)
end
def bad_executor_pool_id(conn, _reason) do
raise_if_told(conn, fn ->
Conn.json(conn, 400, %{error: "bad_executor_pool_id"})
end)
end
def ws_too_many_connections(conn) do
raise_if_told(conn, fn ->
Conn.json(conn, 503, %{error: "ws_too_many_connections"})
end)
end
defp raise_if_told(conn, f) do
if Map.get(conn.request.query_params, "raise") do
raise "exception raised in error handler function!"
else
f.()
end
end
def incorrect_return(conn) do
{:ok, Conn.put_status(conn, 200)}
end
def missing_status_code(conn) do
%Conn{conn | resp_body: "missing_status_code"}
end
def illegal_resp_body(conn) do
%Conn{conn | status: 200, resp_body: %{"resp_body can't be a map" => "should instead be a binary"}}
end
def exhaust_heap_memory(conn) do
Testgear.Util.exhaust_heap_memory()
Conn.json(conn, 200, %{"message" => "this body won't be returned"})
end
end
| 22.207317 | 103 | 0.646348 |
08a388fb1b2a1bf81bc905ff546b0bf86936a204 | 2,866 | ex | Elixir | lib/ibanity/client.ex | ibanity/ibanity-elixir | c2e1feedbfc2376678c9db78c6365a82a654b00b | [
"MIT"
] | 3 | 2018-11-17T18:12:15.000Z | 2020-12-09T06:26:59.000Z | lib/ibanity/client.ex | ibanity/ibanity-elixir | c2e1feedbfc2376678c9db78c6365a82a654b00b | [
"MIT"
] | 2 | 2018-12-12T14:14:56.000Z | 2019-07-01T14:13:57.000Z | lib/ibanity/client.ex | ibanity/ibanity-elixir | c2e1feedbfc2376678c9db78c6365a82a654b00b | [
"MIT"
] | null | null | null | defmodule Ibanity.Client do
@moduledoc false
use Retry
alias Ibanity.{Collection, Configuration, HttpRequest}
import Ibanity.JsonDeserializer
def execute(%Ibanity.Request{} = request, method, uri_path, type \\ nil) do
case HttpRequest.build(request, method, uri_path) do
{:ok, http_request} -> execute(http_request, type)
{:error, reason} -> {:error, reason}
end
end
def get(url, application \\ :default) when is_binary(url) do
retry with: Configuration.retry_backoff(), rescue_only: [] do
url
|> HTTPoison.get!([], options(application))
|> process_response
after
{:ok, response} -> response |> handle_response_body(nil)
else
error -> error
end
end
defp execute(%HttpRequest{method: method, application: application} = request, type) do
body = if method_has_body?(method), do: Jason.encode!(%{data: request.data}), else: ""
retry with: Configuration.retry_backoff(), rescue_only: [] do
case HTTPoison.request(
method,
request.uri,
body,
request.headers,
options(application)
)
do
{:ok, res} -> res |> process_response
error -> error
end
after
{:ok, response} -> response |> handle_response_body(type)
else
error -> error
end
end
defp options(application) do
Keyword.merge([ssl: Configuration.ssl_options(application), hackney: [pool: application]], Configuration.timeout_options())
end
defp method_has_body?(method) do
method == :post or method == :patch
end
defp process_response(response) do
code = response.status_code
body = Jason.decode!(response.body)
cond do
code >= 200 and code <= 299 ->
{:ok, {:ok, body}}
code >= 400 and code <= 500 ->
{:ok, {:error, Map.fetch!(body, "errors")}}
code >= 501 and code <= 599 ->
{:error, Map.fetch!(body, "errors")}
true ->
{:error, :unknown_return_code}
end
end
defp handle_response_body(%{"message" => reason}, _), do: {:error, reason}
defp handle_response_body({:error, _} = error, _), do: error
defp handle_response_body({:ok, %{"data" => data, "meta" => meta, "links" => links}}, type)
when is_list(data) do
collection =
data
|> Enum.map(&deserialize(&1, type))
|> Collection.new(meta["paging"], links, meta["synchronizedAt"], meta["latestSynchronization"])
{:ok, collection}
end
defp handle_response_body({:ok, %{"data" => data}}, type)
when is_list(data) do
collection =
data
|> Enum.map(&deserialize(&1, type))
|> Collection.new(%{}, %{})
{:ok, collection}
end
defp handle_response_body({:ok, %{"data" => data}}, type), do: {:ok, deserialize(data, type)}
defp handle_response_body({:ok, %{}}, _), do: {:ok, %{}}
end
| 28.376238 | 127 | 0.61305 |
08a3e628623e8ec3a7fa9a347bce4660758937fa | 396 | exs | Elixir | backend/apps/students_crm_v2/priv/repo/migrations/20171228051744_create_phone.exs | KyivKrishnaAcademy/students_crm_v2 | e0ad9b3c5e52dfef5ab8f9179f3c593f935786e6 | [
"MIT"
] | null | null | null | backend/apps/students_crm_v2/priv/repo/migrations/20171228051744_create_phone.exs | KyivKrishnaAcademy/students_crm_v2 | e0ad9b3c5e52dfef5ab8f9179f3c593f935786e6 | [
"MIT"
] | 50 | 2018-07-29T09:17:35.000Z | 2019-02-26T05:23:34.000Z | backend/apps/students_crm_v2/priv/repo/migrations/20171228051744_create_phone.exs | KyivKrishnaAcademy/students_crm_v2 | e0ad9b3c5e52dfef5ab8f9179f3c593f935786e6 | [
"MIT"
] | null | null | null | defmodule StudentsCrmV2.Repo.Migrations.CreatePhone do
use Ecto.Migration
def change do
create table(:phones) do
add(:phone, :string, null: false)
add(:telegram_uid, :string)
add(:user_id, references("users"), on_delete: :delete_all)
timestamps()
end
create(index(:phones, [:phone], unique: true))
create(index(:phones, [:telegram_uid]))
end
end
| 22 | 64 | 0.664141 |
08a3ef37a1fe136e080b1cb6066a236464aa2e34 | 196 | exs | Elixir | test/ellie_web/controllers/page_controller_test.exs | HenkPoley/ellie | 045212b56142341fc95b79659c3ca218b0d5d282 | [
"BSD-3-Clause"
] | 377 | 2018-04-05T03:36:00.000Z | 2022-03-30T19:12:44.000Z | test/ellie_web/controllers/page_controller_test.exs | HenkPoley/ellie | 045212b56142341fc95b79659c3ca218b0d5d282 | [
"BSD-3-Clause"
] | 91 | 2018-05-24T21:56:06.000Z | 2022-02-26T03:54:04.000Z | test/ellie_web/controllers/page_controller_test.exs | HenkPoley/ellie | 045212b56142341fc95b79659c3ca218b0d5d282 | [
"BSD-3-Clause"
] | 34 | 2018-05-29T03:54:35.000Z | 2022-01-13T07:12:46.000Z | defmodule EllieWeb.PageControllerTest do
use EllieWeb.ConnCase
test "GET /", %{conn: conn} do
conn = get(conn, "/")
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 21.777778 | 60 | 0.673469 |
08a4051669253f8a23cddd7bcfc512615f25186d | 81 | ex | Elixir | base/fc_support/lib/fc_support.ex | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 46 | 2018-10-13T23:18:13.000Z | 2021-08-07T07:46:51.000Z | base/fc_support/lib/fc_support.ex | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 25 | 2018-10-14T00:56:07.000Z | 2019-12-23T19:41:02.000Z | base/fc_support/lib/fc_support.ex | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 5 | 2018-12-16T04:39:51.000Z | 2020-10-01T12:17:03.000Z | defmodule FCSupport do
@moduledoc """
Documentation for FCSupport.
"""
end
| 13.5 | 30 | 0.703704 |
08a4458526d50fa27c790a8b5b0f1cb2b084c3b7 | 495 | ex | Elixir | lib/console_web/views/label_view.ex | isabella232/console-2 | d4a4aca0e11c945c9698f46cb171d4645177038a | [
"Apache-2.0"
] | null | null | null | lib/console_web/views/label_view.ex | isabella232/console-2 | d4a4aca0e11c945c9698f46cb171d4645177038a | [
"Apache-2.0"
] | 1 | 2021-04-03T09:29:31.000Z | 2021-04-03T09:29:31.000Z | lib/console_web/views/label_view.ex | isabella232/console-2 | d4a4aca0e11c945c9698f46cb171d4645177038a | [
"Apache-2.0"
] | null | null | null | defmodule ConsoleWeb.LabelView do
use ConsoleWeb, :view
alias ConsoleWeb.LabelView
def render("show.json", %{label: label}) do
render_one(label, LabelView, "label.json")
end
def render("label.json", %{label: label}) do
%{
id: label.id,
name: label.name,
organization_id: label.organization_id,
}
end
def append_labels(json, labels) do
labels_json = render_many(labels, LabelView, "label.json")
Map.put(json, :labels, labels_json)
end
end
| 22.5 | 62 | 0.672727 |
08a457b7b0dc6562c57e230711e5d325fd680ee9 | 8,726 | ex | Elixir | lib/vantagex/cryptocurrencies.ex | sbacarob/vantagex | 3d07417d3496255d7b21682e9cf6561e1431d4f8 | [
"MIT"
] | 11 | 2019-03-31T00:20:57.000Z | 2021-12-19T18:18:29.000Z | lib/vantagex/cryptocurrencies.ex | sbacarob/vantagex | 3d07417d3496255d7b21682e9cf6561e1431d4f8 | [
"MIT"
] | 12 | 2019-02-13T11:56:28.000Z | 2020-10-28T04:35:01.000Z | lib/vantagex/cryptocurrencies.ex | sbacarob/vantagex | 3d07417d3496255d7b21682e9cf6561e1431d4f8 | [
"MIT"
] | 2 | 2019-05-22T05:46:02.000Z | 2020-04-10T19:08:46.000Z | defmodule Vantagex.Cryptocurrencies do
@moduledoc """
Contains functions related to the Cryptocurrencies functions from Alpha Vantage
"""
import Vantagex.Helper
alias Vantagex.Forex
@module_id "DIGITAL_CURRENCY"
@doc """
Uses Alpha Vantage's CURRENCY_EXCHANGE_RATE function.
Added here for simplicity, but performs a call to `Forex.exchange_rate`
Args:
* `from_currency` - The currency to get the exchange rate for. e.g. "BTC"
* `to_currency` - The destination currency for the exchange rate. e.g. "USD"
* `opts` - A list of extra options to pass to the function.
Allowed options:
* `datatype` - `:map | :json | :csv` specifies the return format. Defaults to :map
"""
@spec exchange_rate(String.t(), String.t(), Keyword.t()) :: Map.t() | String.t()
def exchange_rate(from_currency, to_currency, opts) do
Forex.exchange_rate(from_currency, to_currency, opts)
end
@doc """
Uses Alpha Vantage's DIGITAL_CURRENCY_DAILY function.
Returns the daily time series for a digital currency traded on a specific market
Args:
* `symbol` - the digital/crypto currency of your choice. any of the currencies in the [digital currency list]("https://www.alphavantage.co/digital_currency_list/")
* `market` - the exchange market of your choice. any from the [market list]("https://www.alphavantage.co/physical_currency_list/")
* `opts` - A list of extra options to pass to the function.
Allowed options:
* `datatype` - `:map | :json | :csv` specifies the return format. Defaults to :map
## Examples:
iex> Vantagex.Cryptocurrencies.daily("BTC", "USD")
iex(1)> Vantagex.Cryptocurrencies.daily("BTC", "USD")
%{
"Meta Data" => %{
"1. Information" => "Daily Prices and Volumes for Digital Currency",
"2. Digital Currency Code" => "BTC",
"3. Digital Currency Name" => "Bitcoin",
"4. Market Code" => "USD",
"5. Market Name" => "United States Dollar",
"6. Last Refreshed" => "2019-03-09 (end of day)",
"7. Time Zone" => "UTC"
},
"Time Series (Digital Currency Daily)" => %{
"2017-07-13" => %{
"1a. open (USD)" => "2397.70831714",
"1b. open (USD)" => "2397.70831714",
"2a. high (USD)" => "2429.55116636",
"2b. high (USD)" => "2429.55116636",
"3a. low (USD)" => "2329.24694466",
"3b. low (USD)" => "2329.24694466",
"4a. close (USD)" => "2353.72968273",
"4b. close (USD)" => "2353.72968273",
"5. volume" => "73837.90295505",
"6. market cap (USD)" => "173794463.89599040"
},
"2018-11-12" => %{
"1a. open (USD)" => "6404.47988049",
"1b. open (USD)" => "6404.47988049",
"2a. high (USD)" => "6435.95061677",
"2b. high (USD)" => "6435.95061677",
"3a. low (USD)" => "6359.81993277",
"3b. low (USD)" => "6359.81993277",
"4a. close (USD)" => "6375.86047086",
"4b. close (USD)" => "6375.86047086",
"5. volume" => "57756.07950395",
"6. market cap (USD)" => "368244704.26095134"
},
...
}
}
"""
@spec daily(String.t(), String.t(), Keyword.t()) :: String.t() | Map.t()
def daily(symbol, market, opts \\ []) do
params = %{
symbol: symbol,
market: market,
datatype: Keyword.get(opts, :datatype)
} |> clean_params()
resolve_request(:daily, params, @module_id)
end
@doc """
Uses Alpha Vantage's DIGITAL_CURRENCY_WEEKLY function.
Returns the daily time series for a digital currency traded on a specific market
Args:
* `symbol` - the digital/crypto currency of your choice. any of the currencies in the [digital currency list]("https://www.alphavantage.co/digital_currency_list/")
* `market` - the exchange market of your choice. any from the [market list]("https://www.alphavantage.co/physical_currency_list/")
* `opts` - A list of extra options to pass to the function.
Allowed options:
* `datatype` - `:map | :json | :csv` specifies the return format. Defaults to :map
## Examples
iex> Vantagex.Cryptocurrencies.weekly("BTC", "USD")
%{
"Meta Data" => %{
"1. Information" => "Weekly Prices and Volumes for Digital Currency",
"2. Digital Currency Code" => "BTC",
"3. Digital Currency Name" => "Bitcoin",
"4. Market Code" => "USD",
"5. Market Name" => "United States Dollar",
"6. Last Refreshed" => "2019-03-09 (end of day)",
"7. Time Zone" => "UTC"
},
"Time Series (Digital Currency Weekly)" => %{
"2018-09-02" => %{
"1a. open (USD)" => "6727.99365712",
"1b. open (USD)" => "6727.99365712",
"2a. high (USD)" => "7314.28946177",
"2b. high (USD)" => "7314.28946177",
"3a. low (USD)" => "6656.81489204",
"3b. low (USD)" => "6656.81489204",
"4a. close (USD)" => "7277.19919306",
"4b. close (USD)" => "7277.19919306",
"5. volume" => "522049.82045400",
"6. market cap (USD)" => "3694710857.50000000"
},
"2016-09-18" => %{
"1a. open (USD)" => "607.40733392",
"1b. open (USD)" => "607.40733392",
"2a. high (USD)" => "612.26786946",
"2b. high (USD)" => "612.26786946",
"3a. low (USD)" => "605.42161452",
"3b. low (USD)" => "605.42161452",
"4a. close (USD)" => "610.52319377",
"4b. close (USD)" => "610.52319377",
"5. volume" => "224699.68643400",
"6. market cap (USD)" => "136768377.19800001"
},
...
}
}
"""
@spec weekly(String.t(), String.t(), Keyword.t()) :: String.t() | Map.t()
def weekly(symbol, market, opts \\ []) do
params = %{
symbol: symbol,
market: market,
datatype: Keyword.get(opts, :datatype)
} |> clean_params()
resolve_request(:weekly, params, @module_id)
end
@doc """
Uses Alpha Vantage's DIGITAL_CURRENCY_MONTHLY function.
Returns the daily time series for a digital currency traded on a specific market
Args:
* `symbol` - the digital/crypto currency of your choice. any of the currencies in the [digital currency list]("https://www.alphavantage.co/digital_currency_list/")
* `market` - the exchange market of your choice. any from the [market list]("https://www.alphavantage.co/physical_currency_list/")
* `opts` - A list of extra options to pass to the function.
Allowed options:
* `datatype` - `:map | :json | :csv` specifies the return format. Defaults to :map
## Examples
iex> Vantagex.Cryptocurrencies.monthly("BTC", "USD")
%{
"Meta Data" => %{
"1. Information" => "Monthly Prices and Volumes for Digital Currency",
"2. Digital Currency Code" => "BTC",
"3. Digital Currency Name" => "Bitcoin",
"4. Market Code" => "USD",
"5. Market Name" => "United States Dollar",
"6. Last Refreshed" => "2019-03-09 (end of day)",
"7. Time Zone" => "UTC"
},
"Time Series (Digital Currency Monthly)" => %{
"2017-06-30" => %{
"1a. open (USD)" => "2281.68026573",
"1b. open (USD)" => "2281.68026573",
"2a. high (USD)" => "2975.72639337",
"2b. high (USD)" => "2975.72639337",
"3a. low (USD)" => "2134.66195666",
"3b. low (USD)" => "2134.66195666",
"4a. close (USD)" => "2468.29342943",
"4b. close (USD)" => "2468.29342943",
"5. volume" => "3101284.39484000",
"6. market cap (USD)" => "8096761491.46000004"
},
"2018-11-30" => %{
"1a. open (USD)" => "6341.38852605",
"1b. open (USD)" => "6341.38852605",
"2a. high (USD)" => "6555.60352543",
"2b. high (USD)" => "6555.60352543",
"3a. low (USD)" => "3559.57432974",
"3b. low (USD)" => "3559.57432974",
"4a. close (USD)" => "4012.09235790",
"4b. close (USD)" => "4012.09235790",
"5. volume" => "3739720.46008000",
"6. market cap (USD)" => "18164674672.59999847"
},
...
}
}
"""
@spec monthly(String.t(), String.t(), Keyword.t()) :: String.t() | Map.t()
def monthly(symbol, market, opts \\ []) do
params = %{
symbol: symbol,
market: market,
datatype: Keyword.get(opts, :datatype)
} |> clean_params()
resolve_request(:monthly, params, @module_id)
end
end
| 37.450644 | 165 | 0.549163 |
08a4ae578fc7b339439bd24702f6f479341a1e10 | 20,894 | ex | Elixir | lib/scenic/cache.ex | ruan-brandao/scenic | cbd8bf50c4239bb5e4ae7971bbc9850ae1f9f2b7 | [
"Apache-2.0"
] | null | null | null | lib/scenic/cache.ex | ruan-brandao/scenic | cbd8bf50c4239bb5e4ae7971bbc9850ae1f9f2b7 | [
"Apache-2.0"
] | null | null | null | lib/scenic/cache.ex | ruan-brandao/scenic | cbd8bf50c4239bb5e4ae7971bbc9850ae1f9f2b7 | [
"Apache-2.0"
] | null | null | null | #
# Created by Boyd Multerer on 2017-11-12.
# Copyright © 2017 Kry10 Industries. All rights reserved.
#
defmodule Scenic.Cache do
@moduledoc """
In memory cache for larger assets.
Static assets such as fonts, images and more tend to be relatively large compared to
other data. These assets are often used across multiple scenes and may need to be shared
with multiple drivers.
These assets also tend to have a significant load cost. Fonts need to be rendered. Images
interpreted into their final binary form, etc.
## Goals
Given this situation, the Cache module has multiple goals.
* __Reuse__ - assets used by multiple scenes should only be stored in memory once
* __Load Time__- loading cost should only be paid once
* __Copy time__ - assets are stored in ETS, so they don't need to be copied as they are used
* __Pub/Sub__ - Consumers of static assets (drivers...) should be notified when an asset is
loaded or changed. They should not poll the system.
* __Security__ - Static assets can become an attack vector. Helper modules are provided
to assist in verifying these files.
## Scope
When an asset is loaded into the cache, it is assigned a scope. The scope is used to
determine how long to hold the asset in memory before it is unloaded. Scope is either
the atom `:global`, or a `pid`.
The typical flow is that a scene will load an asset into the cache. A scope is automatically
defined that tracks the asset against the pid of the scene that loaded it. When the scene
is closed, the scope becomes empty and the asset is unloaded.
If, while that scene is loaded, another scene (or any process...) attempts to load
the same asset into the cache, a second scope is added and the duplicate load is
skipped. When the first scene closes, the asset stays in memory as long as the second
scope remains valid.
When a scene closes, it's scope stays valid for a short time in order to give the next
scene a chance to load its assets (or claim a scope) and possibly re-use the already
loaded assets.
This is also useful in the event of a scene crashing and being restarted. The delay
in unloading the scope means that the replacement scene will use already loaded
assets instead of loading the same files again for no real benefit.
When you load assets you can alternately provide your own scope instead of taking the
default, which is your processes pid. If you provide `:global`, then the asset will
stay in memory until you explicitly release it.
## Keys
At its simplest, accessing the cache is a key-value store. When inserting assets
via the main Cache module, you can supply any term you want as the key. However,
in most cases this is not recommended.
The key for an item in the cache should almost always be a SHA hash of the item itself.
Why? Read below...
The main exception is dynamic assets, such as video frames coming from a camera.
## Security
A lesson learned the hard way is that static assets (fonts, images, etc) that your app
loads out of storage can easily become attack vectors.
These formats are complicated! There is no guarantee (on any system) that a malformed
asset will not cause an error in the C code that interprets it. Again - these are complicated
and the renderers need to be fast...
The solution is to compute a SHA hash of these files during build-time of your
and to store the result in your applications code itself. Then during run time, you
compare then pre-computed hash against the run-time of the asset being loaded.
Please take advantage of the helper modules [`Cache.File`](Scenic.Cache.File.html),
[`Cache.Term`](Scenic.Cache.Term.html), and [`Cache.Hash`](Scenic.Cache.Hash.html) to
do this for you. These modules load files and insert them into the cache while checking
a precomputed hash.
These scheme is much stronger when the application code itself is also signed and
verified, but that is an exercise for the packaging tools.
Example:
defmodule MyApp.MyScene do
use Scenic.Scene
# build the path to the static asset file (compile time)
@asset_path :code.priv_dir(:my_app) |> Path.join("/static/images/asset.jpg")
# pre-compute the hash (compile time)
@asset_hash Scenic.Cache.Hash.file!( @bird_path, :sha )
def init( _, _ ) {
# load the asset into the cache (run time)
Scenic.Cache.File.load(@asset_path, @asset_hash)
...
}
...
When assets are loaded this way, the `@asset_hash` term is also used as the key in
the cache. This has the additional benefit of allowing you to pre-compute
the graph itself, using the correct keys for the correct assets.
## Pub/Sub
Drivers (or any process...) listen to the Cache via a simple pub/sub api.
Because the graph, may be computed during compile time and pushed at some
other time than the assets are loaded, the drivers need to know when the assets
become available.
Whenever any asset is loaded into the cache, messages are sent to any
subscribing processes along with the affected keys. This allows them to react in a
loosely-coupled way to how the assets are managed in your scene.
"""
use GenServer
# import IEx
@name :scenic_cache
@cache_table :scenic_cache_key_table
@scope_table :scenic_cache_scope_table
# notifications
@cache_registry :scenic_cache_registry
@cache_put :cache_put
@cache_delete :cache_delete
@cache_claim :cache_claim
@cache_release :cache_release
@default_release_delay 400
# the format for an element in the cache is
# {key, ref_count, data}
# the refcount is needed because multiple scenes may be trying to load/cache
# same item. Don't want to take the expense of re-loading them.
# same if it was already set with the :global scope
# The tables are public, not protected. This is becuase the data being set into
# them is potentially quite large and I'd rather not send it as a message to this process.
# If that turns out to be the wrong choice, then change put so that it is a call into
# this process, passing the data and change the table to protected.
# ===========================================================================
defmodule Error do
@moduledoc """
Defines the exception thrown by the CacheModule
"""
defexception message: nil
end
# client apis
@doc """
Retrieve an item from the Cache.
This function accepts a key and a default both being any term in Elixir.
If there is no item in the Cache that corresponds to the key the function will return nil else the
function returns the term stored in the cache with the using the provided key
## Examples
iex> Scenic.Cache.get("test_key")
nil
iex> :ets.insert(:scenic_cache_key_table, {"test_key", 1, :test_data})
...> true
...> Scenic.Cache.get("test_key")
:test_data
"""
@spec get(term(), term()) :: term() | nil
def get(key, default \\ nil)
def get(key, default) do
:ets.lookup_element(@cache_table, key, 3)
rescue
ArgumentError ->
default
other ->
reraise(other, __STACKTRACE__)
end
@doc """
Retrieve an item from the cache and wrap it in an {:ok, _} tuple.
This function ideal if you need to pattern match on the result of getting from the cache.
## Examples
iex> Scenic.Cache.fetch("test_key")
{:error, :not_found}
iex> :ets.insert(:scenic_cache_key_table, {"test_key", 1, :test_data})
...> true
...> Scenic.Cache.fetch("test_key")
{:ok, :test_data}
"""
def fetch(key)
def fetch(key) do
{:ok, :ets.lookup_element(@cache_table, key, 3)}
rescue
ArgumentError ->
{:error, :not_found}
other ->
reraise(other, __STACKTRACE__)
end
# --------------------------------------------------------
@doc """
Retrieve an item from the Cache and raises an error if it doesn't exist.
This function accepts a key and a default both being any term in Elixir.
If there is no item in the Cache that corresponds to the key the function will return nil else the
function returns the term stored in the cache with the using the provided key
## Examples
iex> Scenic.Cache.get("test_key")
nil
iex> :ets.insert(:scenic_cache_key_table, {"test_key", 1, :test_data})
...> true
...> Scenic.Cache.get("test_key")
:test_data
"""
def get!(key)
def get!(key) do
:ets.lookup_element(@cache_table, key, 3)
rescue
ArgumentError ->
reraise(Error, [message: "Key #{inspect(key)} not found."], __STACKTRACE__)
other ->
reraise(other, __STACKTRACE__)
end
# --------------------------------------------------------
@doc """
Insert an item into the Cache.
Parameters:
* `key` - term to use as the retrieval key. Typically a hash of the data itself.
* `data` - term to use as the stored data
* `scope` - Optional scope to track the lifetime of this asset against. Can be `:global`
but is usually nil, which defaults to the pid of the calling process.
## Examples
iex> Scenic.Cache.get("test_key")
nil
iex> :ets.insert(:scenic_cache_key_table, {"test_key", 1, :test_data})
...> true
...> Scenic.Cache.get("test_key")
:test_data
"""
def put(key, data, scope \\ nil)
def put(key, data, nil), do: do_put(self(), key, data)
def put(key, data, :global), do: do_put(:global, key, data)
def put(key, data, name) when is_atom(name), do: do_put(Process.whereis(name), key, data)
def put(key, data, pid) when is_pid(pid), do: do_put(pid, key, data)
# --------------------------------------------------------
@doc """
Add a scope to an existing asset in the cache.
Claiming an asset in the cache adds a lifetime scope to it. This is essentially a
refcount that is bound to a pid.
Returns `true` if the item is loaded and the scope is added.
Returns `false` if the asset is not loaded into the cache.
"""
# return true if the ref was successful
# return false if not - means the key doesn't exist anywhere and needs to be put
def claim(key, scope \\ nil)
def claim(key, nil), do: do_claim(self(), key)
def claim(key, :global), do: do_claim(:global, key)
def claim(key, name) when is_atom(name), do: do_claim(Process.whereis(name), key)
def claim(key, pid) when is_pid(pid), do: do_claim(pid, key)
# --------------------------------------------------------
@doc """
Release a scope claim on an asset.
Usually the scope is released automatically when a process shuts down. However if you
want to manually clean up, or unload an asset with the :global scope, then you should
use release.
Parameters:
* `key` - the key to release.
* `options` - options list
Options:
* `scope` - set to `:global` to release the global scope.
* `delay` - add a delay of n milliseconds before releasing. This allows starting
processes a chance to claim a scope before it is unloaded.
"""
# return true if the deref was successful
# return false if it wasn't ref'd in the first place
def release(key, opts \\ [])
def release(key, opts) do
scope =
case opts[:scope] do
nil -> self()
:global -> :global
name when is_atom(name) -> Process.whereis(name)
pid when is_pid(pid) -> pid
end
delay =
case opts[:delay] do
delay when is_integer(delay) and delay >= 0 -> delay
nil -> @default_release_delay
end
delayed_release(scope, key, delay)
end
# --------------------------------------------------------
@doc """
Get the current status of an asset in the cache.
This is used to test if the current process has claimed a scope on an asset.
"""
def status(key, scope \\ nil)
def status(key, nil), do: do_status(self(), key)
def status(key, :global), do: do_status(:global, key)
def status(key, name) when is_atom(name), do: do_status(Process.whereis(name), key)
def status(key, pid) when is_pid(pid), do: do_status(pid, key)
# --------------------------------------------------------
@doc """
Returns a list of asset keys claimed by the given scope.
"""
def keys(scope \\ nil)
def keys(nil), do: do_keys(self())
def keys(:global), do: do_keys(:global)
def keys(name) when is_atom(name), do: do_keys(Process.whereis(name))
def keys(pid) when is_pid(pid), do: do_keys(pid)
@doc """
Tests if a key is claimed by the current scope.
"""
def member?(key, scope \\ nil) do
case status(key, scope) do
{:ok, _} -> true
{:err, :not_claimed} -> true
{:err, :not_found} -> false
end
end
# ============================================================================
# --------------------------------------------------------
@doc false
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: @name)
end
# --------------------------------------------------------
@doc false
def init(_) do
state = %{
cache_table: :ets.new(@cache_table, [:set, :public, :named_table]),
scope_table: :ets.new(@scope_table, [:bag, :public, :named_table])
}
{:ok, state}
end
# --------------------------------------------------------
@doc false
def handle_cast({:monitor_scope, :global}, state), do: {:noreply, state}
def handle_cast({:monitor_scope, pid}, state) when is_pid(pid) do
Process.monitor(pid)
{:noreply, state}
end
# --------------------------------------------------------
@doc false
def handle_info({:DOWN, _, :process, scope_pid, _}, state) do
# a scope process we are monitoring just went down. Clean up after it.
do_keys(scope_pid)
|> Enum.each(&delayed_release(scope_pid, &1, @default_release_delay))
{:noreply, state}
end
# --------------------------------------------------------
def handle_info({:delayed_release, key, scope}, state) do
do_release(scope, key)
{:noreply, state}
end
# ============================================================================
# private helpers
# --------------------------------------------------------
defp do_put(scope, key, data) do
# Check if the key already exists. If so, overrwrite the data, if not insert it.
case :ets.member(@cache_table, key) do
true ->
:ets.update_element(@cache_table, key, {3, data})
do_claim(scope, key)
false ->
:ets.insert(@cache_table, {key, 1, data})
:ets.insert(@scope_table, {scope, key, self()})
# dispatch message to self to monitor the scope
GenServer.cast(@name, {:monitor_scope, scope})
# dispatch a put message
dispatch_notification(@cache_put, key)
end
{:ok, key}
end
# --------------------------------------------------------
defp do_claim(scope, key) do
case :ets.member(@cache_table, key) do
# the key doesn't exist at all
false ->
false
# they key does exist on some scope somewhere
true ->
# a release is happening whether or not there is a put. dispatch a message
dispatch_notification(@cache_claim, key)
case key_in_scope?(scope, key) do
# the key exists, and so does the scoped reference. Do nothing.
true ->
true
false ->
# they key exists, but not on this scope.
# add a scope reference and increment the key refcount.
# the self() data is just for debugging...
:ets.insert(@scope_table, {scope, key, self()})
:ets.update_counter(@cache_table, key, {2, 1})
GenServer.cast(@name, {:monitor_scope, scope})
true
end
end
end
# --------------------------------------------------------
defp delayed_release(scope, key, delay)
defp delayed_release(scope, key, 0), do: do_release(scope, key)
defp delayed_release(scope, key, delay) when is_integer(delay) and delay > 0 do
Process.send_after(@name, {:delayed_release, key, scope}, delay)
end
# --------------------------------------------------------
defp do_release(scope, key) do
# make sure this reference is valid
case key_in_scope?(scope, key) do
false ->
false
true ->
# delete the scope reference
:ets.match_delete(@scope_table, {scope, key, :_})
# a release is happening whether or not there is a delete. dispatch a message
dispatch_notification(@cache_release, key)
# decrement the key refcount. If it goes to zero, delete it too
case :ets.update_counter(@cache_table, key, {2, -1}) do
0 ->
:ets.delete(@cache_table, key)
# dispatch a delete message
dispatch_notification(@cache_delete, key)
_ ->
true
end
true
end
end
# --------------------------------------------------------
defp do_status(:global, key) do
case :ets.member(@cache_table, key) do
false ->
{:err, :not_found}
true ->
case key_in_scope?(:global, key) do
true -> {:ok, :global}
false -> {:err, :not_claimed}
end
end
end
defp do_status(scope, key) do
case :ets.member(@cache_table, key) do
false ->
{:err, :not_found}
true ->
case key_in_scope?(scope, key) do
true -> {:ok, scope}
false -> do_status(:global, key)
end
end
end
# --------------------------------------------------------
defp do_keys(scope) do
@scope_table
|> :ets.match({scope, :"$2", :_})
|> List.flatten()
end
# --------------------------------------------------------
defp key_in_scope?(scope, key) do
:ets.match(@scope_table, {scope, key, :_}) != []
end
# ============================================================================
# callback notifications
# ----------------------------------------------
@doc """
Subscribe the calling process to cache messages.
Pass in the type of messages you want to subscribe to.
* `:cache_put` - sent when assets are put into the cache
* `:cache_delete` - sent when assets are fully unloaded from the cache
* `:cache_claim` - sent when a scope is claimed
* `:cache_release` - sent when a scope is released
* `:all` - all of the above message types
"""
def subscribe(message_type)
def subscribe(@cache_put), do: Registry.register(@cache_registry, @cache_put, @cache_put)
def subscribe(@cache_delete),
do: Registry.register(@cache_registry, @cache_delete, @cache_delete)
def subscribe(@cache_claim), do: Registry.register(@cache_registry, @cache_claim, @cache_claim)
def subscribe(@cache_release),
do: Registry.register(@cache_registry, @cache_release, @cache_release)
def subscribe(:all) do
subscribe(@cache_put)
subscribe(@cache_delete)
subscribe(@cache_claim)
subscribe(@cache_release)
end
@deprecated "Use Cache.subscribe/1 instead"
def request_notification(message_type), do: subscribe(message_type)
# ----------------------------------------------
@doc """
Unsubscribe the calling process from cache messages.
Pass in the type of messages you want to unsubscribe from.
* `:cache_put` - sent when assets are put into the cache
* `:cache_delete` - sent when assets are fully unloaded from the cache
* `:cache_claim` - sent when a scope is claimed
* `:cache_release` - sent when a scope is released
* `:all` - all of the above message types
"""
def unsubscribe(message_type \\ :all)
def unsubscribe(@cache_put), do: Registry.unregister(@cache_registry, @cache_put)
def unsubscribe(@cache_delete), do: Registry.unregister(@cache_registry, @cache_delete)
def unsubscribe(@cache_claim), do: Registry.unregister(@cache_registry, @cache_claim)
def unsubscribe(@cache_release), do: Registry.unregister(@cache_registry, @cache_release)
def unsubscribe(:all) do
unsubscribe(@cache_put)
unsubscribe(@cache_delete)
unsubscribe(@cache_claim)
unsubscribe(@cache_release)
end
@deprecated "Use Cache.unsubscribe/1 instead"
def stop_notification(message_type \\ :all), do: unsubscribe(message_type)
# ----------------------------------------------
defp dispatch_notification(action, data) do
# dispatch the call to any listening drivers
Registry.dispatch(@cache_registry, action, fn entries ->
for {pid, _} <- entries do
try do
GenServer.cast(pid, {action, data})
catch
kind, reason ->
formatted = Exception.format(kind, reason, System.stacktrace())
# Logger.error "Registry.dispatch/3 failed with #{formatted}"
IO.puts("Scenic.Cache Registry.dispatch/3 failed with #{formatted}")
end
end
end)
:ok
end
end
| 33.217806 | 100 | 0.625012 |
08a4afa54cad25a459f2dc8cc2d9b89f1b849954 | 925 | ex | Elixir | lib/safira_web/controllers/referral_controller.ex | cesium/safira | 10dd45357c20e8afc22563f114f49ccb74008114 | [
"MIT"
] | 40 | 2018-07-04T19:13:45.000Z | 2021-12-16T23:53:43.000Z | lib/safira_web/controllers/referral_controller.ex | cesium/safira | 10dd45357c20e8afc22563f114f49ccb74008114 | [
"MIT"
] | 94 | 2018-07-25T13:13:39.000Z | 2022-02-15T04:09:42.000Z | lib/safira_web/controllers/referral_controller.ex | cesium/safira | 10dd45357c20e8afc22563f114f49ccb74008114 | [
"MIT"
] | 5 | 2018-11-26T17:19:03.000Z | 2021-02-23T08:09:37.000Z | defmodule SafiraWeb.ReferralController do
use SafiraWeb, :controller
alias Safira.Accounts
alias Safira.Contest
alias Safira.Contest.Redeem
action_fallback SafiraWeb.FallbackController
plug Safira.Authorize, :attendee
def create(conn, %{"id" => id}) do
referral = Contest.get_referral!(id)
case referral.available do
true ->
user = Accounts.get_user(conn)
redeem_params = %{badge_id: referral.badge_id, attendee_id: user.attendee.id}
with {:ok, %Redeem{} = _redeem} <- Contest.create_redeem(redeem_params) do
Contest.update_referral(referral, %{available: false, attendee_id: user.attendee.id})
conn
|> put_status(:created)
|> json(%{referral: "Referral redeemed successfully"})
end
_ ->
conn
|> put_status(:unauthorized)
|> json(%{referral: "Referral not available"})
end
end
end
| 26.428571 | 95 | 0.655135 |
08a4b82076da28b6985f4bc342e07db7cf2d3257 | 2,307 | ex | Elixir | clients/content/lib/google_api/content/v2/model/products_custom_batch_response_entry.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/products_custom_batch_response_entry.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/products_custom_batch_response_entry.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.ProductsCustomBatchResponseEntry do
@moduledoc """
A batch entry encoding a single non-batch products response.
## Attributes
* `batchId` (*type:* `integer()`, *default:* `nil`) - The ID of the request entry this entry responds to.
* `errors` (*type:* `GoogleApi.Content.V2.Model.Errors.t`, *default:* `nil`) - A list of errors defined if and only if the request failed.
* `kind` (*type:* `String.t`, *default:* `content#productsCustomBatchResponseEntry`) - Identifies what kind of resource this is. Value: the fixed string "content#productsCustomBatchResponseEntry".
* `product` (*type:* `GoogleApi.Content.V2.Model.Product.t`, *default:* `nil`) - The inserted product. Only defined if the method is insert and if the request was successful.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:batchId => integer(),
:errors => GoogleApi.Content.V2.Model.Errors.t(),
:kind => String.t(),
:product => GoogleApi.Content.V2.Model.Product.t()
}
field(:batchId)
field(:errors, as: GoogleApi.Content.V2.Model.Errors)
field(:kind)
field(:product, as: GoogleApi.Content.V2.Model.Product)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.ProductsCustomBatchResponseEntry do
def decode(value, options) do
GoogleApi.Content.V2.Model.ProductsCustomBatchResponseEntry.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.ProductsCustomBatchResponseEntry do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.196429 | 200 | 0.728652 |
08a4ba8070b4c84ea255dc62032cc12cf1b8e6d2 | 9,639 | ex | Elixir | issues/deps/earmark/lib/earmark/inline.ex | vronic/programming-elixir | 4465a81cc07b31a4c03bd277520e1127dda773b2 | [
"MIT"
] | null | null | null | issues/deps/earmark/lib/earmark/inline.ex | vronic/programming-elixir | 4465a81cc07b31a4c03bd277520e1127dda773b2 | [
"MIT"
] | null | null | null | issues/deps/earmark/lib/earmark/inline.ex | vronic/programming-elixir | 4465a81cc07b31a4c03bd277520e1127dda773b2 | [
"MIT"
] | null | null | null | defmodule Earmark.Inline do
@moduledoc """
Match and render inline sequences, passing each to the
renderer.
"""
import Earmark.Helpers
alias Earmark.Context
@doc false
def convert(src, context) when is_list(src) do
convert(Enum.join(src, "\n"), context)
end
def convert(src, context) do
convert_each(src, context, [])
end
defp convert_each("", _context, result) do
result
|> IO.iodata_to_binary
|> replace(~r{>‘}, ">’")
|> replace(~r{>“}, ">”")
end
defp convert_each(src, context, result) do
renderer = context.options.renderer
cond do
# escape
match = Regex.run(context.rules.escape, src) ->
[ match, escaped ] = match
convert_each(behead(src, match), context, [ result | escaped ])
# autolink
match = Regex.run(context.rules.autolink, src) ->
[ match, link, protocol ] = match
{ href, text } = convert_autolink(link, protocol)
out = renderer.link(href, text)
convert_each(behead(src, match), context, [ result | out ])
# url (gfm)
match = Regex.run(context.rules.url, src) ->
[ match, href ] = match
text = escape(href)
out = renderer.link(href, text)
convert_each(behead(src, match), context, [ result | out ])
# tag
match = Regex.run(context.rules.tag, src) ->
[ match ] = match
out = context.options.do_sanitize.(match)
convert_each(behead(src, match), context, [ result | out ])
# link
match = Regex.run(context.rules.link, src) ->
{ match, text, href, title } = case match do
[ match, text, href ] -> { match, text, href, nil }
[ match, text, href, title ] -> { match, text, href, title }
end
out = output_image_or_link(context, match, text, href, title)
convert_each(behead(src, match), context, [ result | out ])
# reflink
match = Regex.run(context.rules.reflink, src) ->
{ match, alt_text, id } = case match do
[ match, id ] -> { match, nil, id }
[ match, id, "" ] -> { match, id, id }
[ match, alt_text, id ] -> { match, alt_text, id }
end
out = reference_link(context, match, alt_text, id)
convert_each(behead(src, match), context, [ result | out ])
# footnotes
match = Regex.run(context.rules.footnote, src) ->
[match, id] = match
out = footnote_link(context, match, id)
convert_each(behead(src, match), context, [ result | out ])
# nolink
match = Regex.run(context.rules.nolink, src) ->
[ match, id ] = match
out = reference_link(context, match, id, id)
convert_each(behead(src, match), context, [ result | out ])
# strikethrough (gfm)
match = Regex.run(context.rules.strikethrough, src) ->
[ match, content ] = match
out = renderer.strikethrough(convert(content, context))
convert_each(behead(src, match), context, [ result | out ])
# strong
match = Regex.run(context.rules.strong, src) ->
{ match, content } = case match do
[ m, _, c ] -> {m, c}
[ m, c ] -> {m, c}
end
out = renderer.strong(convert(content, context))
convert_each(behead(src, match), context, [ result | out ])
# em
match = Regex.run(context.rules.em, src) ->
{ match, content } = case match do
[ m, _, c ] -> {m, c}
[ m, c ] -> {m, c}
end
out = renderer.em(convert(content, context))
convert_each(behead(src, match), context, [ result | out ])
# code
match = Regex.run(context.rules.code, src) ->
[match, _, content] = match
content = String.strip(content) # this from Gruber
out = renderer.codespan(escape(content, true))
convert_each(behead(src, match), context, [ result | out ])
# br
match = Regex.run(context.rules.br, src, return: :index) ->
out = renderer.br()
[ {0, match_len} ] = match
convert_each(behead(src, match_len), context, [ result | out ])
# text
match = Regex.run(context.rules.text, src) ->
[ match ] = match
out = escape(context.options.do_smartypants.(match))
convert_each(behead(src, match), context, [ result | out ])
# No match
true ->
location = String.slice(src, 0, 100)
raise("Failed to parse inline starting at: #{inspect(location)}")
end
end
defp convert_autolink(link, _separator = "@") do
link = (if String.at(link, 6) == ":", do: behead(link, 7), else: link)
text = mangle_link(link)
href = mangle_link("mailto:") <> text
{ href, text }
end
defp convert_autolink(link, _separator) do
link = escape(link)
{ link, link }
end
@doc """
Smartypants transformations convert quotes to the appropriate curly
variants, and -- and ... to – and …
"""
def smartypants(text) do
text
|> replace(~r{--}, "—")
|> replace(~r{(^|[-—/\(\[\{"”“\s])'}, "\\1‘")
|> replace(~r{\'}, "’")
|> replace(~r{(^|[-—/\(\[\{‘\s])\"}, "\\1“")
|> replace(~r{"}, "”")
|> replace(~r{\.\.\.}, "…")
end
@doc false
def mangle_link(link) do
link
end
defp output_image_or_link(context, "!" <> _, text, href, title) do
output_image(context.options.renderer, text, href, title)
end
defp output_image_or_link(context, _, text, href, title) do
output_link(context, text, href, title)
end
defp output_link(context, text, href, title) do
href = escape(href)
title = if title, do: escape(title), else: nil
context.options.renderer.link(href, convert_each(text, context, []), title)
end
defp output_footnote_link(context, ref, back_ref, number) do
ref = escape(ref)
back_ref = escape(back_ref)
context.options.renderer.footnote_link(ref, back_ref, number)
end
defp output_image(renderer, text, href, title) do
href = escape(href)
title = if title, do: escape(title), else: nil
renderer.image(href, escape(text), title)
end
defp reference_link(context, match, alt_text, id) do
id = id |> replace(~r{\s+}, " ") |> String.downcase
case Dict.fetch(context.links, id) do
{:ok, link } -> output_image_or_link(context, match, alt_text, link.url, link.title)
_ -> match
end
end
defp footnote_link(context, match, id) do
case Dict.fetch(context.footnotes, id) do
{:ok, footnote} -> number = footnote.number
output_footnote_link(context, "fn:#{number}", "fnref:#{number}", number)
_ -> match
end
end
##############################################################################
# Handle adding option specific rules and processors #
##############################################################################
defp noop(text), do: text
@doc false
# this is called by the command line processor to update
# the inline-specific rules in light of any options
def update_context(context = %Context{options: options}) do
context = %{ context | rules: rules_for(options) }
context = if options.smartypants do
put_in(context.options.do_smartypants, &smartypants/1)
else
put_in(context.options.do_smartypants, &noop/1)
end
if options.sanitize do
put_in(context.options.do_sanitize, &escape/1)
else
put_in(context.options.do_sanitize, &noop/1)
end
end
@inside ~S{(?:\[[^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*}
@href ~S{\s*<?([\s\S]*?)>?(?:\s+['"]([\s\S]*?)['"])?\s*} #"
@code ~r{^
(`+) # $1 = Opening run of `
(.+?) # $2 = The code block
(?<!`)
\1 # Matching closer
(?!`)
}x
defp basic_rules do
[
escape: ~r{^\\([\\`*\{\}\[\]()\#+\-.!_>])},
autolink: ~r{^<([^ >]+(@|:\/)[^ >]+)>},
url: ~r{\z\A}, # noop
tag: ~r{^<!--[\s\S]*?-->|^<\/?\w+(?:"[^"]*"|'[^']*'|[^'">])*?>},
link: ~r{^!?\[(#{@inside})\]\(#{@href}\)},
reflink: ~r{^!?\[(#{@inside})\]\s*\[([^\]]*)\]},
nolink: ~r{^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]},
strong: ~r{^__([\s\S]+?)__(?!_)|^\*\*([\s\S]+?)\*\*(?!\*)},
em: ~r{^\b_((?:__|[\s\S])+?)_\b|^\*((?:\*\*|[\s\S])+?)\*(?!\*)},
code: @code,
br: ~r<^ {2,}\n(?!\s*$)>,
text: ~r<^[\s\S]+?(?=[\\<!\[_*`]| {2,}\n|$)>,
strikethrough: ~r{\z\A} # noop
]
end
defp rules_for(options) do
rule_updates = []
if options.gfm do
rule_updates = [
escape: ~r{^\\([\\`*\{\}\[\]()\#+\-.!_>~|])},
url: ~r{^(https?:\/\/[^\s<]+[^<.,:;\"\')\]\s])},
strikethrough: ~r{^~~(?=\S)([\s\S]*?\S)~~},
text: ~r{^[\s\S]+?(?=[\\<!\[_*`~]|https?://| \{2,\}\n|$)}
]
if options.breaks do
break_updates = [
br: ~r{^ *\n(?!\s*$)},
text: ~r{^[\s\S]+?(?=[\\<!\[_*`~]|https?://| *\n|$)}
]
rule_updates = Keyword.merge(rule_updates, break_updates)
end
else
if options.pedantic do
rule_updates = [
strong: ~r{^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)},
em: ~r{^_(?=\S)([\s\S]*?\S)_(?!_)|^\*(?=\S)([\s\S]*?\S)\*(?!\*)}
]
end
end
if options.footnotes do
rule_updates = Keyword.merge(rule_updates, [footnote: ~r{^\[\^(#{@inside})\]}])
else
rule_updates = Keyword.merge(rule_updates, [footnote: ~r{\z\A}]) #noop
end
Keyword.merge(basic_rules, rule_updates)
|> Enum.into %{}
end
end
| 31.093548 | 97 | 0.521942 |
08a4de85bc0d5a40e65f1ff51f1c7d673d05c85d | 3,133 | ex | Elixir | lib/akd/generator/task.ex | corroded/akd | ed15b8929b6d110552a19522f8a17edf75452e87 | [
"MIT"
] | null | null | null | lib/akd/generator/task.ex | corroded/akd | ed15b8929b6d110552a19522f8a17edf75452e87 | [
"MIT"
] | null | null | null | lib/akd/generator/task.ex | corroded/akd | ed15b8929b6d110552a19522f8a17edf75452e87 | [
"MIT"
] | null | null | null | defmodule Akd.Generator.Task do
@moduledoc """
This module handles the generation of a custom task which use `Akd.Task`.
This can either directly be called, or called through a mix task,
`mix akd.gen.task`.
This class uses EEx and Mix.Generator to fetch file contents from an eex
template and populate the interpolated fields, writing it to the speficied
file.
## Usage:
The following call creates a file `run.ex` at location `path/to/file/run.ex`
```
Akd.Generator.Task.gen(["run.ex"], path: "path/to/file")
```
"""
require EEx
require Mix.Generator
@path "lib/"
# Native hook types that can be added using this genenrator
@hooks ~w(fetch init build publish start stop)a
@doc """
This is the callback implementation for `gen/2`.
This function takes in a list of inputs and a list of options and generates
a module that uses `Akd.Task` at the specified path with the specified name.
The first element of the input is expected to be the name of the file.
The path can be sent to the `opts`.
If no path is sent, it defaults to #{@path}
## Examples:
```elixir
Akd.Generator.Hook.gen(["task.ex"], [path: "some/path"])
```
"""
@spec gen(list, Keyword.t) :: :ok | {:error, String.t}
def gen([name | _], opts) do
name
|> validate_and_format_opts(opts)
|> text_from_template()
|> write_to_file(name)
end
# This function validates the name and options sent to the generator
# and formats the options making it ready for the template to read from.
defp validate_and_format_opts(name, opts) do
opts = @hooks
|> Enum.reduce(opts, &resolve_hook_opts/2)
|> Keyword.put_new(:path, @path)
|> Keyword.put_new(:with_phx, false)
[{:name, resolve_name(name)} | opts]
end
# This function adds the default_hook to a keyword, if the keyword
# doesn't have key corresponding to the `hook`. Else just returns the keyword
# itself.
defp resolve_hook_opts(hook, opts) do
Keyword.put_new(opts, hook, default_string(hook))
end
# This function gets default_hook from `Akd` module based on hook type
# and converts the module name to string
defp default_string(hook) do
Akd
|> apply(hook, [])
|> Macro.to_string()
end
# This function gets the name of file from the module name
defp resolve_name(name) do
Macro.camelize(name)
end
# This function gives the location for the template which will be used
# by the generator
defp template(), do: "#{__DIR__}/templates/task.ex.eex"
# This function takes formatted options and returns a tuple.
# First element of the tuple is the path to file and second element is
# the evaluated file string.
defp text_from_template(opts) do
{Keyword.get(opts, :path), EEx.eval_file(template(), assigns: opts)}
end
# This function writes contents to a file at a specific path
defp write_to_file({path, code}, name) do
path = path <> Macro.underscore(name) <> ".ex"
case File.exists?(path) do
true -> {:error, "File #{path} already exists."}
_ -> Mix.Generator.create_file(path, code)
end
end
end
| 29.280374 | 79 | 0.687839 |
08a4fa22b758d93ac44ce5f1bab1dcf649bc57a2 | 277 | exs | Elixir | backend_initial/priv/repo/migrations/20200126093035_create_users.exs | bego-talks/e2e-workshop | 75a1d2a691814539ddb8adfe9012483efc53cfe4 | [
"MIT"
] | 7 | 2021-05-08T13:37:05.000Z | 2022-02-16T17:54:00.000Z | backend_initial/priv/repo/migrations/20200126093035_create_users.exs | bego-talks/e2e-workshop | 75a1d2a691814539ddb8adfe9012483efc53cfe4 | [
"MIT"
] | null | null | null | backend_initial/priv/repo/migrations/20200126093035_create_users.exs | bego-talks/e2e-workshop | 75a1d2a691814539ddb8adfe9012483efc53cfe4 | [
"MIT"
] | null | null | null | defmodule TodoList.Repo.Migrations.CreateUsers do
use Ecto.Migration
def change do
create table(:users) do
add(:name, :string, null: false)
add(:password, :string, null: false)
timestamps()
end
create(unique_index(:users, :name))
end
end
| 18.466667 | 49 | 0.66065 |
08a540a4e4625ab1273fad2942626e0193d38014 | 104 | ex | Elixir | lib/ecto_mnesia/record/context/query.ex | szTheory/ecto_mnesia | 9d1fd3b2845ac3c2c12c669d68198dff9e82a322 | [
"MIT"
] | 245 | 2016-10-22T14:43:36.000Z | 2022-02-28T03:46:27.000Z | lib/ecto_mnesia/record/context/query.ex | KushanChamindu/ecto_mnesia | bc0fb46d210eb1be63bcc8f0383212f0633b8c52 | [
"MIT"
] | 80 | 2016-10-22T16:09:19.000Z | 2021-03-03T10:45:01.000Z | lib/ecto_mnesia/record/context/query.ex | KushanChamindu/ecto_mnesia | bc0fb46d210eb1be63bcc8f0383212f0633b8c52 | [
"MIT"
] | 53 | 2016-12-03T14:23:05.000Z | 2021-07-01T01:55:22.000Z | defmodule EctoMnesia.Record.Context.Query do
@moduledoc false
defstruct select: [], sources: []
end
| 20.8 | 44 | 0.75 |
08a551a5731602c75d9a850a2ee7c9ed388458ae | 3,454 | exs | Elixir | lib/elixir/test/elixir/binary/inspect_test.exs | ekosz/elixir | 62e375bc711b4072e1b68de776e96cc31f571d45 | [
"Apache-2.0"
] | 1 | 2017-10-29T16:37:08.000Z | 2017-10-29T16:37:08.000Z | lib/elixir/test/elixir/binary/inspect_test.exs | ekosz/elixir | 62e375bc711b4072e1b68de776e96cc31f571d45 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/binary/inspect_test.exs | ekosz/elixir | 62e375bc711b4072e1b68de776e96cc31f571d45 | [
"Apache-2.0"
] | null | null | null | Code.require_file "../../test_helper", __FILE__
defmodule Binary.Inspect.AtomTest do
use ExUnit.Case, async: true
test :basic do
assert inspect(:foo) == ":foo"
end
test :empty do
assert inspect(:"") == ":\"\""
end
test :true_false_nil do
assert inspect(false) == "false"
assert inspect(true) == "true"
assert inspect(nil) == "nil"
end
test :with_uppercase do
assert inspect(:fOO) == ":fOO"
assert inspect(:FOO) == ":FOO"
end
test :alias_atom do
assert inspect(Foo.Bar) == "Foo.Bar"
end
test :with_integers do
assert inspect(User1) == "User1"
assert inspect(:user1) == ":user1"
end
test :with_punctuation do
assert inspect(:foo?) == ":foo?"
assert inspect(:bar!) == ":bar!"
end
test :op do
assert inspect(:@) == ":@"
assert inspect(:&&&) == ":&&&"
end
test :impl do
assert Binary.Inspect.Atom.__impl__ == Binary.Inspect
end
end
defmodule Binary.Inspect.BitStringTest do
use ExUnit.Case, async: true
test :bitstring do
assert inspect(<<1|12-:integer-:signed>>) == "<<0,1|4>>"
end
test :binary do
assert inspect("foo") == "\"foo\""
assert inspect(<<?a, ?b, ?c>>) == "\"abc\""
end
test :escape do
assert inspect("f\no") == "\"f\\no\""
assert inspect("f\\o") == "\"f\\\\o\""
end
test :utf8 do
assert inspect(" ゆんゆん") == "\" ゆんゆん\""
end
test :unprintable do
assert inspect(<<1>>) == "<<1>>"
end
end
defmodule Binary.Inspect.NumberTest do
use ExUnit.Case, async: true
test :integer do
assert inspect(100) == "100"
end
test :float do
assert inspect(1.0) == "1.00000000000000000000e+00"
assert inspect(1.0e10) == "1.00000000000000000000e+10"
assert inspect(1.0e+10) == "1.00000000000000000000e+10"
end
end
defmodule Binary.Inspect.TupleTest do
use ExUnit.Case, async: true
test :basic do
assert inspect({ 1, "b", 3 }) == "{1,\"b\",3}"
end
test :record_like do
assert inspect({ :foo, :bar }) == "{:foo,:bar}"
end
test :with_builtin_like_record do
assert inspect({ :list, 1 }) == "{:list,1}"
end
test :with_record_like_tuple do
assert inspect({ List, 1 }) == "{List,1}"
end
test :with_record do
assert inspect(ExUnit.Server.Config.new) == "ExUnit.Server.Config[async_cases: [], options: [], sync_cases: []]"
end
test :with_tuple_matching_record_name_but_not_length do
assert inspect({ExUnit.Server.Config}) == "{ExUnit.Server.Config}"
end
test :exception do
assert inspect(RuntimeError.new) == "RuntimeError[message: \"runtime error\"]"
end
test :empty do
assert inspect({}) == "{}"
end
end
defmodule Binary.Inspect.ListTest do
use ExUnit.Case, async: true
test :basic do
assert inspect([ 1, "b", 3 ]) == "[1,\"b\",3]"
end
test :printable do
assert inspect('abc') == "'abc'"
end
test :non_printable do
assert inspect([{:a,1}]) == "[{:a,1}]"
end
test :unproper do
assert inspect([:foo | :bar]) == "[:foo|:bar]"
end
test :codepoints do
assert inspect('é') == "[195,169]"
end
test :empty do
assert inspect([]) == "[]"
end
end
defmodule Binary.Inspect.AnyTest do
use ExUnit.Case, async: true
test :funs do
bin = inspect(fn(x) -> x + 1 end)
assert '#Fun<' ++ _ = binary_to_list(bin)
end
end
defmodule Binary.Inspect.RegexTest do
use ExUnit.Case, async: true
test :regex do
"%r\"foo\"m" = inspect(%r(foo)m)
end
end
| 20.43787 | 116 | 0.612334 |
08a5799cbb5d5bbfbb50b1b41669e5c932f32c0f | 1,793 | exs | Elixir | mix.exs | elixir-geolix/adapter_mmdb2_precompiled | a60417be0911a203f84de6b87987724e271d8fb7 | [
"Apache-2.0"
] | null | null | null | mix.exs | elixir-geolix/adapter_mmdb2_precompiled | a60417be0911a203f84de6b87987724e271d8fb7 | [
"Apache-2.0"
] | null | null | null | mix.exs | elixir-geolix/adapter_mmdb2_precompiled | a60417be0911a203f84de6b87987724e271d8fb7 | [
"Apache-2.0"
] | null | null | null | defmodule Geolix.Adapter.MMDB2Precompiled.MixProject do
use Mix.Project
@url_github "https://github.com/elixir-geolix/adapter_mmdb2_precompiled"
def project do
[
app: :geolix_adapter_mmdb2_precompiled,
name: "Geolix Adapter: MMDB2 Precompiled",
version: "0.1.0-dev",
elixir: "~> 1.7",
aliases: aliases(),
deps: deps(),
description: "Compile-Time MMDB2 adapter for Geolix",
dialyzer: dialyzer(),
docs: docs(),
package: package(),
preferred_cli_env: [
"bench.lookup": :bench,
coveralls: :test,
"coveralls.detail": :test
],
test_coverage: [tool: ExCoveralls]
]
end
defp aliases() do
[
"bench.lookup": ["run bench/lookup.exs"]
]
end
defp deps do
[
{:benchee, "~> 1.0", only: :bench, runtime: false},
{:credo, "~> 1.0", only: :dev, runtime: false},
{:dialyxir, "~> 1.0", only: :dev, runtime: false},
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false},
{:excoveralls, "~> 0.13.0", only: :test, runtime: false},
{:geolix, "~> 2.0"},
{:geolix_testdata, "~> 0.5.1", only: [:bench, :test], runtime: false},
{:mmdb2_decoder, "~> 3.0"}
]
end
defp dialyzer do
[
flags: [
:error_handling,
:race_conditions,
:underspecs,
:unmatched_returns
],
plt_core_path: "plts",
plt_file: {:no_warn, "plts/dialyzer.plt"}
]
end
defp docs do
[
main: "Geolix.Adapter.MMDB2Precompiled",
source_ref: "master",
source_url: @url_github
]
end
defp package do
%{
files: ["CHANGELOG.md", "LICENSE", "mix.exs", "README.md", "lib"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => @url_github}
}
end
end
| 23.906667 | 76 | 0.556609 |
08a5940d6b273a6a7267bcc6d74f5a87259e6a34 | 1,131 | exs | Elixir | config/config.exs | timCF/category | 6426b4aa25997119a8ad2332529a430c4ae617f7 | [
"MIT"
] | 1 | 2019-08-20T20:10:33.000Z | 2019-08-20T20:10:33.000Z | config/config.exs | timCF/category | 6426b4aa25997119a8ad2332529a430c4ae617f7 | [
"MIT"
] | null | null | null | config/config.exs | timCF/category | 6426b4aa25997119a8ad2332529a430c4ae617f7 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# third-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :category, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:category, :key)
#
# You can also configure a third-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 36.483871 | 73 | 0.751547 |
08a59800a241a39db33e7b405b75f443b15b468d | 2,646 | exs | Elixir | lib/logger/test/logger/translator_test.exs | patrickgombert/elixir | dc4b7c89ea30a9734d9f1cbf3bb1612fada498d4 | [
"Apache-2.0"
] | null | null | null | lib/logger/test/logger/translator_test.exs | patrickgombert/elixir | dc4b7c89ea30a9734d9f1cbf3bb1612fada498d4 | [
"Apache-2.0"
] | null | null | null | lib/logger/test/logger/translator_test.exs | patrickgombert/elixir | dc4b7c89ea30a9734d9f1cbf3bb1612fada498d4 | [
"Apache-2.0"
] | 1 | 2020-12-07T08:04:16.000Z | 2020-12-07T08:04:16.000Z | defmodule Logger.TranslatorTest do
use Logger.Case
defmodule MyGenServer do
use GenServer
def handle_call(:error, _, _) do
raise "oops"
end
end
defmodule MyGenEvent do
use GenEvent
def handle_call(:error, _) do
raise "oops"
end
end
test "translates GenServer crashes" do
{:ok, pid} = GenServer.start(MyGenServer, :ok)
assert capture_log(:info, fn ->
catch_exit(GenServer.call(pid, :error))
end) =~ """
[error] GenServer #{inspect pid} terminating
** (exit) an exception was raised:
** (RuntimeError) oops
"""
end
test "translates GenServer crashes on debug" do
{:ok, pid} = GenServer.start(MyGenServer, :ok)
assert capture_log(:debug, fn ->
catch_exit(GenServer.call(pid, :error))
end) =~ """
[error] GenServer #{inspect pid} terminating
Last message: :error
State: :ok
** (exit) an exception was raised:
** (RuntimeError) oops
"""
end
test "translates GenEvent crashes" do
{:ok, pid} = GenEvent.start()
:ok = GenEvent.add_handler(pid, MyGenEvent, :ok)
assert capture_log(:info, fn ->
GenEvent.call(pid, MyGenEvent, :error)
end) =~ """
[error] GenEvent handler Logger.TranslatorTest.MyGenEvent installed in #{inspect pid} terminating
** (exit) an exception was raised:
** (RuntimeError) oops
"""
end
test "translates GenEvent crashes on debug" do
{:ok, pid} = GenEvent.start()
:ok = GenEvent.add_handler(pid, MyGenEvent, :ok)
assert capture_log(:debug, fn ->
GenEvent.call(pid, MyGenEvent, :error)
end) =~ """
[error] GenEvent handler Logger.TranslatorTest.MyGenEvent installed in #{inspect pid} terminating
Last message: :error
State: :ok
** (exit) an exception was raised:
** (RuntimeError) oops
"""
end
test "translates Task crashes" do
{:ok, pid} = Task.start_link(__MODULE__, :task, [self()])
assert capture_log(fn ->
ref = Process.monitor(pid)
send(pid, :go)
receive do: ({:DOWN, ^ref, _, _, _} -> :ok)
end) =~ """
[error] Task #{inspect pid} started from #{inspect self} terminating
Function: &Logger.TranslatorTest.task/1
Args: [#{inspect self}]
** (exit) an exception was raised:
** (RuntimeError) oops
"""
end
test "translates application stop" do
:ok = Application.start(:eex)
assert capture_log(fn ->
Application.stop(:eex)
end) =~ msg("[info] Application eex exited with reason :stopped")
end
def task(parent) do
Process.unlink(parent)
receive do: (:go -> raise "oops")
end
end
| 25.68932 | 101 | 0.625472 |
08a5b92d447ea8187fa4e87c141d7a3bfe73af86 | 73 | ex | Elixir | web/views/coherence/layout_view.ex | franknfjr/blog_lca | 0711ad6ba6ee878045905ec58a549527ffa5e0a4 | [
"MIT"
] | null | null | null | web/views/coherence/layout_view.ex | franknfjr/blog_lca | 0711ad6ba6ee878045905ec58a549527ffa5e0a4 | [
"MIT"
] | null | null | null | web/views/coherence/layout_view.ex | franknfjr/blog_lca | 0711ad6ba6ee878045905ec58a549527ffa5e0a4 | [
"MIT"
] | null | null | null | defmodule Coherence.LayoutView do
use Login.Coherence.Web, :view
end
| 12.166667 | 33 | 0.780822 |
08a5be294315107dc3f90dcf0ff944ab8b5e0e20 | 63 | ex | Elixir | lib/readtome_web/views/home_view.ex | zephraph/readtome | 64a5f773bdc3c19d9c5ac50a04aa14e446e36c55 | [
"MIT"
] | 1 | 2021-09-05T20:54:57.000Z | 2021-09-05T20:54:57.000Z | lib/readtome_web/views/home_view.ex | zephraph/readtome | 64a5f773bdc3c19d9c5ac50a04aa14e446e36c55 | [
"MIT"
] | 17 | 2019-07-06T17:31:56.000Z | 2021-06-22T15:31:06.000Z | lib/readtome_web/views/home_view.ex | zephraph/readtome | 64a5f773bdc3c19d9c5ac50a04aa14e446e36c55 | [
"MIT"
] | 1 | 2021-03-15T20:50:27.000Z | 2021-03-15T20:50:27.000Z | defmodule ReadtomeWeb.HomeView do
use ReadtomeWeb, :view
end
| 15.75 | 33 | 0.809524 |
08a5d411199ae31af4f62df9e7eee34fa3917fc7 | 1,488 | ex | Elixir | lib/app_web/endpoint.ex | mzgajner/smena | 6c0243ae1e8d1cef6e8a8e240f0f6b703ea638c9 | [
"Unlicense"
] | null | null | null | lib/app_web/endpoint.ex | mzgajner/smena | 6c0243ae1e8d1cef6e8a8e240f0f6b703ea638c9 | [
"Unlicense"
] | null | null | null | lib/app_web/endpoint.ex | mzgajner/smena | 6c0243ae1e8d1cef6e8a8e240f0f6b703ea638c9 | [
"Unlicense"
] | null | null | null | defmodule SmenaWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :app
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_app_key",
signing_salt: "RYYPeNjw"
]
socket "/socket", SmenaWeb.UserSocket,
websocket: true,
longpoll: false
socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :app,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug Phoenix.CodeReloader
plug Phoenix.Ecto.CheckRepoStatus, otp_app: :app
end
plug Phoenix.LiveDashboard.RequestLogger,
param_key: "request_logger",
cookie_key: "request_logger"
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug SmenaWeb.Router
end
| 28.075472 | 97 | 0.710349 |
08a5e55057e8861c2c20beefb26c7ce580154a7d | 1,167 | ex | Elixir | apps/calgy_api/lib/calgy_api/channels/user_socket.ex | calgy/calgy | 624790f1299271010f963359a7e94097277a9ee8 | [
"MIT"
] | 1 | 2017-09-18T08:54:46.000Z | 2017-09-18T08:54:46.000Z | apps/calgy_api/lib/calgy_api/channels/user_socket.ex | calgy/calgy | 624790f1299271010f963359a7e94097277a9ee8 | [
"MIT"
] | null | null | null | apps/calgy_api/lib/calgy_api/channels/user_socket.ex | calgy/calgy | 624790f1299271010f963359a7e94097277a9ee8 | [
"MIT"
] | null | null | null | defmodule CalgyApi.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", CalgyApi.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# CalgyApi.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.710526 | 83 | 0.701799 |
08a5e73899ecd22a476b4e0542ed16c9692fa342 | 497 | ex | Elixir | lib/absinthe/blueprint/input/float.ex | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 4,101 | 2016-03-02T03:49:20.000Z | 2022-03-31T05:46:01.000Z | lib/absinthe/blueprint/input/float.ex | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 889 | 2016-03-02T16:06:59.000Z | 2022-03-31T20:24:12.000Z | lib/absinthe/blueprint/input/float.ex | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 564 | 2016-03-02T07:49:59.000Z | 2022-03-06T14:40:59.000Z | defmodule Absinthe.Blueprint.Input.Float do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:value]
defstruct [
:value,
:source_location,
# Added by phases
flags: %{},
schema_node: nil,
errors: []
]
@type t :: %__MODULE__{
value: float,
flags: Blueprint.flags_t(),
source_location: Blueprint.SourceLocation.t(),
schema_node: nil | Absinthe.Type.t(),
errors: [Absinthe.Phase.Error.t()]
}
end
| 20.708333 | 56 | 0.599598 |
08a5f345e7a0f99cb0041ea9f0dbec1995ae39aa | 1,257 | ex | Elixir | lib/solid/tag/render.ex | christopherlai/solid | c7f0553f4ce3c12b68484baff42084bee1aa640b | [
"MIT"
] | 117 | 2016-09-04T03:56:08.000Z | 2022-03-28T18:51:56.000Z | lib/solid/tag/render.ex | christopherlai/solid | c7f0553f4ce3c12b68484baff42084bee1aa640b | [
"MIT"
] | 73 | 2017-03-21T09:22:45.000Z | 2022-03-01T20:48:22.000Z | lib/solid/tag/render.ex | christopherlai/solid | c7f0553f4ce3c12b68484baff42084bee1aa640b | [
"MIT"
] | 25 | 2017-04-02T02:23:01.000Z | 2022-02-27T20:42:13.000Z | defmodule Solid.Tag.Render do
import NimbleParsec
alias Solid.Parser.{BaseTag, Argument, Literal}
@behaviour Solid.Tag
@impl true
def spec(_parser) do
space = Literal.whitespace(min: 0)
ignore(BaseTag.opening_tag())
|> ignore(string("render"))
|> ignore(space)
|> tag(Argument.argument(), :template)
|> tag(
optional(
ignore(string(","))
|> ignore(space)
|> concat(Argument.named_arguments())
),
:arguments
)
|> ignore(space)
|> ignore(BaseTag.closing_tag())
end
@impl true
def render(
[template: template_binding, arguments: argument_binding],
context,
options
) do
template = Solid.Argument.get(template_binding, context)
binding_vars =
Keyword.get(argument_binding || [], :named_arguments, [])
|> Solid.Argument.parse_named_arguments(context)
|> Enum.concat()
|> Map.new()
{file_system, instance} = options[:file_system] || {Solid.BlankFileSystem, nil}
template_str = file_system.read_template_file(template, instance)
template = Solid.parse!(template_str, options)
rendered_text = Solid.render(template, binding_vars, options)
{[text: rendered_text], context}
end
end
| 25.653061 | 83 | 0.645982 |
08a608687ab67b55629d27bcc180f27d5a8e3ec8 | 402 | ex | Elixir | apps/snitch_api/lib/snitch_api_web/controllers/product_option_value.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 456 | 2018-09-20T02:40:59.000Z | 2022-03-07T08:53:48.000Z | apps/snitch_api/lib/snitch_api_web/controllers/product_option_value.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 273 | 2018-09-19T06:43:43.000Z | 2021-08-07T12:58:26.000Z | apps/snitch_api/lib/snitch_api_web/controllers/product_option_value.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 122 | 2018-09-26T16:32:46.000Z | 2022-03-13T11:44:19.000Z | defmodule SnitchApiWeb.ProductOptionValueController do
use SnitchApiWeb, :controller
alias Snitch.Data.Model.ProductOptionValue
def update(conn, %{"id" => id} = params) do
with option_value <- ProductOptionValue.get(id),
{:ok, option_value} <- ProductOptionValue.update(option_value, params) do
render(conn, "option_value.json", option_value: option_value)
end
end
end
| 30.923077 | 82 | 0.733831 |
08a60ec4725c255d73765a5647f5f851b48402b6 | 1,330 | exs | Elixir | test/mat44/mat44_apply_test.exs | crertel/graphmath | 798e51d21d509e397a86d9ca855ef18a92243583 | [
"Unlicense"
] | 70 | 2015-01-07T10:13:38.000Z | 2021-09-29T05:06:14.000Z | test/mat44/mat44_apply_test.exs | crertel/graphmath | 798e51d21d509e397a86d9ca855ef18a92243583 | [
"Unlicense"
] | 28 | 2015-01-22T22:38:24.000Z | 2021-06-24T06:15:38.000Z | test/mat44/mat44_apply_test.exs | crertel/graphmath | 798e51d21d509e397a86d9ca855ef18a92243583 | [
"Unlicense"
] | 17 | 2015-01-22T18:35:41.000Z | 2020-12-24T22:42:40.000Z | defmodule GraphmathTest.Mat44.ApplyMat44 do
use ExUnit.Case
@tag :mat44
@tag :apply
test "apply( 1:1:16, {11,13,17,19} ) returns { 16400, 40400, 64400, 88400} " do
assert {164, 404, 644, 884} ==
Graphmath.Mat44.apply(
{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16},
{11, 13, 17, 19}
)
end
@tag :mat44
@tag :apply
test "apply_transpose( 1:1:16, {11,13,17,19} ) returns { 476, 536, 596, 656} " do
assert {476, 536, 596, 656} ==
Graphmath.Mat44.apply_transpose(
{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16},
{11, 13, 17, 19}
)
end
@tag :mat44
@tag :apply
test "apply_left( 1:1:16, {11,13,17,19} ) returns { 476, 536, 596, 656} " do
assert {476, 536, 596, 656} ==
Graphmath.Mat44.apply_left(
{11, 13, 17, 19},
{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}
)
end
@tag :mat44
@tag :apply
test "apply_left_transpose( 1:1:16, {11,13,17,19} ) returns { 164, 404, 644, 884} " do
assert {164, 404, 644, 884} ==
Graphmath.Mat44.apply_left_transpose(
{11, 13, 17, 19},
{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}
)
end
end
| 30.227273 | 88 | 0.480451 |
08a622af57af76e5cf460d30399a77d20851f28f | 16,970 | ex | Elixir | lib/simple_pool/v1/worker_behaviour_default.ex | noizu/simple_pool | 59251a3391ff82152a31626072955b95f83c18ee | [
"MIT"
] | null | null | null | lib/simple_pool/v1/worker_behaviour_default.ex | noizu/simple_pool | 59251a3391ff82152a31626072955b95f83c18ee | [
"MIT"
] | null | null | null | lib/simple_pool/v1/worker_behaviour_default.ex | noizu/simple_pool | 59251a3391ff82152a31626072955b95f83c18ee | [
"MIT"
] | null | null | null | #-------------------------------------------------------------------------------
# Author: Keith Brings
# Copyright (C) 2018 Noizu Labs, Inc. All rights reserved.
#-------------------------------------------------------------------------------
defmodule Noizu.SimplePool.WorkerBehaviourDefault do
require Logger
@telemetry_handler Application.get_env(:noizu_simple_pool, :telemetry_handler, Noizu.SimplePool.Telemetry)
def verbose(verbose, base) do
if verbose == :auto do
if Application.get_env(:noizu_simple_pool, base, %{})[:PoolSupervisor][:verbose] do
Application.get_env(:noizu_simple_pool, base, %{})[:PoolSupervisor][:verbose]
else
Application.get_env(:noizu_simple_pool, :verbose, false)
end
else
verbose
end
end
def init({_mod, server, base, _worker_state_entity, _inactivity_check, _lazy_load}, {:migrate, ref, initial_state, context}) do
@telemetry_handler.worker_init_span(base, ref,
fn ->
server.worker_lookup_handler().register!(ref, context)
task = server.worker_lookup_handler().set_node!(ref, context)
r = Task.yield(task, 500)
{:ok, %Noizu.SimplePool.Worker.State{extended: %{set_node_task: r || task}, initialized: :delayed_init, worker_ref: ref, inner_state: {:transfer, initial_state}}}
end, context)
end
def init({_mod, server, base, _worker_state_entity, _inactivity_check, _lazy_load}, {ref, context}) do
@telemetry_handler.worker_init_span(base, ref,
fn ->
server.worker_lookup_handler().register!(ref, context)
task = server.worker_lookup_handler().set_node!(ref, context)
r = Task.yield(task, 500)
{:ok, %Noizu.SimplePool.Worker.State{extended: %{set_node_task: r || task}, initialized: :delayed_init, worker_ref: ref, inner_state: :start}}
end, context)
end
def delayed_init({mod, _server, base, worker_state_entity, inactivity_check, lazy_load}, state, context) do
ref = state.worker_ref
ustate = case state.inner_state do
# @TODO - investigate strategies for avoiding keeping full state in child def. Aka put into state that accepts a transfer/reads a transfer form a table, etc.
{:transfer, {:state, initial_state, :time, time}} ->
cut_off = :os.system_time(:second) - 60*15
if time < cut_off do
if (mod.verbose()) do
Logger.info(fn -> {base.banner("INIT/1.stale_transfer#{__MODULE__} (#{inspect ref }"), Noizu.ElixirCore.CallingContext.metadata(context) } end)
end
#PRI-0 - disabled until rate limit available - spawn fn -> server.worker_lookup_handler().record_event!(ref, :start, :normal, context, %{}) end
{initialized, inner_state} = if lazy_load do
case worker_state_entity.load(ref, context) do
nil -> {false, nil}
inner_state -> {true, inner_state}
end
else
{false, nil}
end
%Noizu.SimplePool.Worker.State{initialized: initialized, worker_ref: ref, inner_state: inner_state}
else
if (mod.verbose()) do
Logger.info(fn -> {base.banner("INIT/1.transfer #{__MODULE__} (#{inspect ref }"), Noizu.ElixirCore.CallingContext.metadata(context) } end)
end
#PRI-0 - disabled until rate limit available - spawn fn -> server.worker_lookup_handler().record_event!(ref, :start, :migrate, context, %{}) end
{initialized, inner_state} = worker_state_entity.transfer(ref, initial_state.inner_state, context)
%Noizu.SimplePool.Worker.State{initial_state| initialized: initialized, worker_ref: ref, inner_state: inner_state}
end
{:transfer, initial_state} ->
if (mod.verbose()) do
Logger.info(fn -> {base.banner("INIT/1.transfer #{__MODULE__} (#{inspect ref }"), Noizu.ElixirCore.CallingContext.metadata(context) } end)
end
#PRI-0 - disabled until rate limit available - spawn fn -> server.worker_lookup_handler().record_event!(ref, :start, :migrate, context, %{}) end
{initialized, inner_state} = worker_state_entity.transfer(ref, initial_state.inner_state, context)
%Noizu.SimplePool.Worker.State{initial_state| initialized: initialized, worker_ref: ref, inner_state: inner_state}
:start ->
if (mod.verbose()) do
Logger.info(fn -> {base.banner("INIT/1 #{__MODULE__} (#{inspect ref }"), Noizu.ElixirCore.CallingContext.metadata(context) } end)
end
#PRI-0 - disabled until rate limit available - spawn fn -> server.worker_lookup_handler().record_event!(ref, :start, :normal, context, %{}) end
{initialized, inner_state} = if lazy_load do
case worker_state_entity.load(ref, context) do
nil -> {false, nil}
inner_state -> {true, inner_state}
end
else
{false, nil}
end
%Noizu.SimplePool.Worker.State{initialized: initialized, worker_ref: ref, inner_state: inner_state}
end
if inactivity_check do
ustate = %Noizu.SimplePool.Worker.State{ustate| last_activity: :os.system_time(:seconds)}
ustate = mod.schedule_inactivity_check(nil, ustate)
ustate
else
ustate
end
end
def schedule_migrate_shutdown(migrate_shutdown_interval_ms, context, state) do
{:ok, mt_ref} = :timer.send_after(migrate_shutdown_interval_ms, self(), {:i, {:migrate_shutdown, state.worker_ref}, context})
put_in(state, [Access.key(:extended), :mt_ref], mt_ref)
end
def clear_migrate_shutdown(state) do
case Map.get(state.extended, :mt_ref) do
nil -> state
mt_ref ->
:timer.cancel(mt_ref)
put_in(state, [Access.key(:extended), :mt_ref], nil)
end
end
def handle_migrate_shutdown(mod, server, worker_state_entity, inactivity_check, {:i, {:migrate_shutdown, ref}, context}, %Noizu.SimplePool.Worker.State{migrating: true} = state) do
if ref == state.worker_ref do
state = mod.clear_migrate_shutdown(state)
state = if inactivity_check, do: mod.clear_inactivity_check(state), else: state
if state.initialized do
case worker_state_entity.migrate_shutdown(state, context) do
{:ok, state} ->
server.worker_terminate!(ref, nil, context)
{:noreply, state}
{:wait, state} ->
{:noreply, mod.schedule_migrate_shutdown(context, state)}
end
else
server.worker_terminate!(ref, nil, context)
{:noreply, state}
end
else
{:noreply, state}
end
end # end handle_info/:activity_check
def handle_migrate_shutdown(_mod, _server, _worker_state_entity, _inactivity_check, {:i, {:migrate_shutdown, _ref}, context}, %Noizu.SimplePool.Worker.State{migrating: false} = state) do
Logger.error(fn -> {"#{__MODULE__}.migrate_shutdown called when not in migrating state" , Noizu.ElixirCore.CallingContext.metadata(context)} end)
{:noreply, state}
end # end handle_info/:activity_check
def schedule_inactivity_check(check_interval_ms, context, state) do
{:ok, t_ref} = :timer.send_after(check_interval_ms, self(), {:i, {:activity_check, state.worker_ref}, context})
put_in(state, [Access.key(:extended), :t_ref], t_ref)
end
def clear_inactivity_check(state) do
case Map.get(state.extended, :t_ref) do
nil -> state
t_ref ->
:timer.cancel(t_ref)
put_in(state, [Access.key(:extended), :t_ref], nil)
end
end
def handle_activity_check(mod, _server, _worker_state_entity, kill_interval_s, {:i, {:activity_check, ref}, context}, %Noizu.SimplePool.Worker.State{initialized: false} = state ) do
if ref == state.worker_ref do
if ((state.last_activity == nil) || ((state.last_activity + kill_interval_s) < :os.system_time(:seconds))) do
#server.worker_remove!(ref, [force: true], context)
{:stop, {:shutdown, :inactive}, mod.clear_inactivity_check(state)}
else
{:noreply, mod.schedule_inactivity_check(context, state)}
end
else
{:noreply, state}
end
end
def handle_activity_check(mod, _server, worker_state_entity, kill_interval_s, {:i, {:activity_check, ref}, context}, %Noizu.SimplePool.Worker.State{initialized: true} = state ) do
if ref == state.worker_ref do
if ((state.last_activity == nil) || ((state.last_activity + kill_interval_s) < :os.system_time(:seconds))) do
case worker_state_entity.shutdown(state, [], context, nil) do
{:ok, state} ->
#server.worker_remove!(state.worker_ref, [force: true], context)
{:stop, {:shutdown, :inactive}, mod.clear_inactivity_check(state)}
{:wait, state} ->
# @TODO force termination conditions needed.
{:noreply, mod.schedule_inactivity_check(context, state)}
end
else
{:noreply, mod.schedule_inactivity_check(context, state)}
end
else
{:noreply, state}
end
end
def handle_cast_load(worker_state_entity, inactivity_check, {:s, {:load, options}, context}, %Noizu.SimplePool.Worker.State{initialized: false} = state) do
case worker_state_entity.load(state.worker_ref, context, options) do
nil -> {:noreply, state}
inner_state ->
if inactivity_check do
{:noreply, %Noizu.SimplePool.Worker.State{state| initialized: true, inner_state: inner_state, last_activity: :os.system_time(:seconds)}}
else
{:noreply, %Noizu.SimplePool.Worker.State{state| initialized: true, inner_state: inner_state}}
end
end
end
def handle_call_load(worker_state_entity, inactivity_check, {:s, {:load, options}, context}, _from, %Noizu.SimplePool.Worker.State{initialized: false} = state) do
case worker_state_entity.load(state.worker_ref, context, options) do
nil -> {:reply, :not_found, state}
inner_state ->
if inactivity_check do
{:reply, inner_state, %Noizu.SimplePool.Worker.State{state| initialized: true, inner_state: inner_state, last_activity: :os.system_time(:seconds)}}
else
{:reply, inner_state, %Noizu.SimplePool.Worker.State{state| initialized: true, inner_state: inner_state}}
end
end
end
def handle_cast_migrate(_mod, server, _worker_state_entity, _migrate_shutdown, {:s, {:migrate!, ref, rebase, options}, context}, %Noizu.SimplePool.Worker.State{initialized: true} = state) do
cond do
(rebase == node() && ref == state.worker_ref) -> {:noreply, state}
true ->
case :rpc.call(rebase, server, :accept_transfer!, [ref, state, context, options], options[:timeout] || 60_000) do
{:ack, _pid} -> {:stop, {:shutdown, :migrate}, state}
_r -> {:noreply, state}
end
end
end
def handle_call_migrate(_mod, server, _worker_state_entity, _migrate_shutdown, {:s, {:migrate!, ref, rebase, options}, context}, _from, %Noizu.SimplePool.Worker.State{initialized: true} = state) do
cond do
(rebase == node() && ref == state.worker_ref) -> {:reply, {:ack, self()}, state}
true ->
case :rpc.call(rebase, server, :accept_transfer!, [ref, state, context, options], options[:timeout] || 60_000) do
{:ack, pid} -> {:stop, {:shutdown, :migrate}, {:ack, pid}, state}
r -> {:reply, {:error, r}, state}
end
end
end
#-------------------------------------------------------------------------
# Lazy Load Handling Feature Section
#-------------------------------------------------------------------------
def handle_call_lazy_load(mod, worker_state_entity, {:s, _inner, context} = call, from, %Noizu.SimplePool.Worker.State{initialized: false} = state) do
case worker_state_entity.load(state.worker_ref, context) do
nil -> {:reply, :initilization_failed, state}
inner_state ->
mod.handle_call(call, from, %Noizu.SimplePool.Worker.State{state| initialized: true, inner_state: inner_state})
end
end # end handle_call
def handle_cast_lazy_load(mod, worker_state_entity, {:s, _inner, context} = call, %Noizu.SimplePool.Worker.State{initialized: false} = state) do
case worker_state_entity.load(state.worker_ref, context) do
nil -> {:noreply, state}
inner_state ->
mod.handle_cast(call, %Noizu.SimplePool.Worker.State{state| initialized: true, inner_state: inner_state})
end
end # end handle_call
def handle_info_lazy_load(mod, worker_state_entity, {:s, _inner, context} = call, %Noizu.SimplePool.Worker.State{initialized: false} = state) do
case worker_state_entity.load(state.worker_ref, context) do
nil -> {:noreply, state}
inner_state ->
mod.handle_info(call, %Noizu.SimplePool.Worker.State{state| initialized: true, inner_state: inner_state})
end
end # end handle_call
def handle_call_shutdown(mod, worker_state_entity, {:s, {:shutdown, options} = _inner_call, context} = _call, from, %Noizu.SimplePool.Worker.State{initialized: true, inner_state: _inner_state} = state) do
{reply, state} = worker_state_entity.shutdown(state, options, context, from)
case reply do
:ok ->
{:reply, reply, mod.clear_inactivity_check(state)}
:wait ->
{:reply, reply, state}
end
end
#-------------------------------------------------------------------------
# Call Forwarding Feature Section
#-------------------------------------------------------------------------
def handle_cast_forwarding(worker_state_entity, inactivity_check, {:s, inner_call, context} = _call, %Noizu.SimplePool.Worker.State{initialized: true, inner_state: inner_state} = state) do
case worker_state_entity.call_forwarding(inner_call, context, inner_state) do
{:stop, reason, inner_state} ->
if inactivity_check do
{:stop, reason, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state, last_activity: :os.system_time(:seconds)}}
else
{:stop, reason, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state}}
end
{:noreply, inner_state} ->
if inactivity_check do
{:noreply, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state, last_activity: :os.system_time(:seconds)}}
else
{:noreply, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state}}
end
{:noreply, inner_state, hibernate} ->
if inactivity_check do
{:noreply, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state, last_activity: :os.system_time(:seconds)}, hibernate}
else
{:noreply, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state}, hibernate}
end
end
end
def handle_call_forwarding(worker_state_entity, inactivity_check, {:s, inner_call, context} = _call, from, %Noizu.SimplePool.Worker.State{initialized: true, inner_state: inner_state} = state) do
case worker_state_entity.call_forwarding(inner_call, context, from, inner_state) do
{:stop, reason, inner_state} ->
if inactivity_check do
{:stop, reason, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state, last_activity: :os.system_time(:seconds)}}
else
{:stop, reason, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state}}
end
{:stop, reason, response, inner_state} ->
if inactivity_check do
{:stop, reason, response, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state, last_activity: :os.system_time(:seconds)}}
else
{:stop, reason, response, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state}}
end
{:reply, response, inner_state} ->
if inactivity_check do
{:reply, response, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state, last_activity: :os.system_time(:seconds)}}
else
{:reply, response, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state}}
end
{:reply, response, inner_state, hibernate} ->
if inactivity_check do
{:reply, response, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state, last_activity: :os.system_time(:seconds)}, hibernate}
else
{:reply, response, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state}, hibernate}
end
{:noreply, inner_state} ->
if inactivity_check do
{:noreply, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state, last_activity: :os.system_time(:seconds)}}
else
{:noreply, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state}}
end
{:noreply, inner_state, hibernate} ->
if inactivity_check do
{:noreply, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state, last_activity: :os.system_time(:seconds)}, hibernate}
else
{:noreply, %Noizu.SimplePool.Worker.State{state| inner_state: inner_state}, hibernate}
end
end
end
end
| 48.764368 | 206 | 0.655804 |
08a62929b5aedc553ef4ba5b37e525d8016abc5b | 441 | ex | Elixir | lib/minesweeper.ex | ppatrzyk/elixir-minesweeper | 19639019ffdef6d0dd040028783b10477ccb1f83 | [
"MIT"
] | null | null | null | lib/minesweeper.ex | ppatrzyk/elixir-minesweeper | 19639019ffdef6d0dd040028783b10477ccb1f83 | [
"MIT"
] | null | null | null | lib/minesweeper.ex | ppatrzyk/elixir-minesweeper | 19639019ffdef6d0dd040028783b10477ccb1f83 | [
"MIT"
] | null | null | null | defmodule Minesweeper do
@moduledoc """
Starter application using the Scenic framework.
"""
def start(_type, _args) do
# load the viewport configuration from config
main_viewport_config = Application.get_env(:minesweeper, :viewport)
# start the application with the viewport
children = [
{Scenic, viewports: [main_viewport_config]}
]
Supervisor.start_link(children, strategy: :one_for_one)
end
end
| 24.5 | 71 | 0.718821 |
08a64c86cf423c4d930d1588fcb2aa2b3a2f5151 | 106 | ex | Elixir | apps/auction/lib/auction/repo.ex | mammenj/elixir-auction | 94941bb820a221e3917014919d97571784ac9388 | [
"Apache-2.0"
] | 5 | 2021-11-17T04:37:39.000Z | 2022-01-02T06:43:23.000Z | apps/auction/lib/auction/repo.ex | mammenj/elixir-auction | 94941bb820a221e3917014919d97571784ac9388 | [
"Apache-2.0"
] | 3 | 2021-05-21T21:50:11.000Z | 2021-11-21T14:34:53.000Z | apps/auction/lib/auction/repo.ex | mammenj/elixir-auction | 94941bb820a221e3917014919d97571784ac9388 | [
"Apache-2.0"
] | 2 | 2021-11-16T14:14:05.000Z | 2021-12-31T02:01:06.000Z | defmodule Auction.Repo do
use Ecto.Repo,
otp_app: :auction,
adapter: Ecto.Adapters.Postgres
end
| 17.666667 | 35 | 0.726415 |
08a663d8009a94896fd48a8ac66af55143f2ea06 | 4,373 | ex | Elixir | clients/speech/lib/google_api/speech/v1/model/recognition_config.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/speech/lib/google_api/speech/v1/model/recognition_config.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/speech/lib/google_api/speech/v1/model/recognition_config.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Speech.V1.Model.RecognitionConfig do
@moduledoc """
Provides information to the recognizer that specifies how to process the request.
## Attributes
- enableWordTimeOffsets (boolean()): *Optional* If `true`, the top result includes a list of words and the start and end time offsets (timestamps) for those words. If `false`, no word-level time offset information is returned. The default is `false`. Defaults to: `null`.
- encoding (String.t): Encoding of audio data sent in all `RecognitionAudio` messages. This field is optional for `FLAC` and `WAV` audio files and required for all other audio formats. For details, see AudioEncoding. Defaults to: `null`.
- Enum - one of [ENCODING_UNSPECIFIED, LINEAR16, FLAC, MULAW, AMR, AMR_WB, OGG_OPUS, SPEEX_WITH_HEADER_BYTE]
- languageCode (String.t): *Required* The language of the supplied audio as a [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. Example: \"en-US\". See [Language Support](https://cloud.google.com/speech/docs/languages) for a list of the currently supported language codes. Defaults to: `null`.
- maxAlternatives (integer()): *Optional* Maximum number of recognition hypotheses to be returned. Specifically, the maximum number of `SpeechRecognitionAlternative` messages within each `SpeechRecognitionResult`. The server may return fewer than `max_alternatives`. Valid values are `0`-`30`. A value of `0` or `1` will return a maximum of one. If omitted, will return a maximum of one. Defaults to: `null`.
- profanityFilter (boolean()): *Optional* If set to `true`, the server will attempt to filter out profanities, replacing all but the initial character in each filtered word with asterisks, e.g. \"f***\". If set to `false` or omitted, profanities won't be filtered out. Defaults to: `null`.
- sampleRateHertz (integer()): Sample rate in Hertz of the audio data sent in all `RecognitionAudio` messages. Valid values are: 8000-48000. 16000 is optimal. For best results, set the sampling rate of the audio source to 16000 Hz. If that's not possible, use the native sample rate of the audio source (instead of re-sampling). This field is optional for `FLAC` and `WAV` audio files and required for all other audio formats. For details, see AudioEncoding. Defaults to: `null`.
- speechContexts ([SpeechContext]): *Optional* A means to provide context to assist the speech recognition. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:enableWordTimeOffsets => any(),
:encoding => any(),
:languageCode => any(),
:maxAlternatives => any(),
:profanityFilter => any(),
:sampleRateHertz => any(),
:speechContexts => list(GoogleApi.Speech.V1.Model.SpeechContext.t())
}
field(:enableWordTimeOffsets)
field(:encoding)
field(:languageCode)
field(:maxAlternatives)
field(:profanityFilter)
field(:sampleRateHertz)
field(:speechContexts, as: GoogleApi.Speech.V1.Model.SpeechContext, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Speech.V1.Model.RecognitionConfig do
def decode(value, options) do
GoogleApi.Speech.V1.Model.RecognitionConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Speech.V1.Model.RecognitionConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 65.268657 | 515 | 0.735651 |
08a687b7470b7e8d6d4619ae842d1aa59d71a9ca | 3,464 | ex | Elixir | lib/stranger_web/live/home_live.ex | Arp-G/stranger | 1c7c804b0ab4dbee4c77f1c5adf0d9d5a09aaf41 | [
"MIT"
] | 11 | 2021-01-27T17:21:00.000Z | 2021-12-07T13:02:59.000Z | lib/stranger_web/live/home_live.ex | Arp-G/stranger | 1c7c804b0ab4dbee4c77f1c5adf0d9d5a09aaf41 | [
"MIT"
] | null | null | null | lib/stranger_web/live/home_live.ex | Arp-G/stranger | 1c7c804b0ab4dbee4c77f1c5adf0d9d5a09aaf41 | [
"MIT"
] | null | null | null | defmodule StrangerWeb.HomeLive do
use StrangerWeb, :live_view
use Phoenix.HTML
alias Stranger.{Accounts, Accounts.User}
@impl Phoenix.LiveView
def mount(_params, _session, socket) do
{:ok,
socket
|> assign(%{
changeset: User.registration_changeset(%{}),
section: 0
})
|> allow_upload(:avatar, accept: ~w(.jpg .jpeg .png), max_entries: 1)}
end
@impl Phoenix.LiveView
def handle_event("validate", %{"user" => params}, socket) do
changeset =
params
|> User.validation_changeset()
|> Map.put(:action, :insert)
{:noreply, assign(socket, changeset: changeset)}
end
@impl Phoenix.LiveView
def handle_event("validate_email", _args, %{assigns: %{changeset: changeset}} = socket) do
changeset =
changeset
|> User.validate_unique_email()
|> Map.put(:action, :insert)
{:noreply, assign(socket, changeset: changeset)}
end
@impl Phoenix.LiveView
def handle_event("save", %{"user" => user_params}, socket) do
case Accounts.create_user(user_params) do
{:ok, user} ->
case handle_avatar_upload(socket, user) do
{:error, _} ->
{
:noreply,
socket
|> put_flash(:error, "Registered successfully but avatar upload failed")
|> redirect(
to:
StrangerWeb.Router.Helpers.session_path(socket, :sign_in, %{
email: user_params["email"],
password: user_params["password"]
})
)
}
_ ->
{
:noreply,
socket
|> put_flash(:info, "User registered successfully")
|> redirect(
to:
StrangerWeb.Router.Helpers.session_path(socket, :sign_in, %{
email: user_params["email"],
password: user_params["password"]
})
)
}
end
{:error, %Ecto.Changeset{} = changeset} ->
{
:noreply,
socket
|> assign(changeset: Map.put(changeset, :action, :insert))
|> clear_flash()
|> put_flash(:error, "Could not register user check for errors")
}
end
end
@impl Phoenix.LiveView
# Cancel all subsequest uploads
def handle_event("on_upload", _, %{assigns: %{uploads: uploads}} = socket) do
case uploads.avatar.entries do
[first, _last] ->
{:noreply, cancel_upload(socket, :avatar, first.ref)}
_ ->
{:noreply, socket}
end
end
@impl Phoenix.LiveView
def handle_event("cancel-upload", %{"ref" => ref}, socket) do
{:noreply, cancel_upload(socket, :avatar, ref)}
end
@impl Phoenix.LiveView
def handle_event("next", _args, %{assigns: %{section: section_number}} = socket) do
section_number = if section_number < 3, do: section_number + 1, else: section_number
{:noreply, assign(socket, section: section_number)}
end
@impl Phoenix.LiveView
def handle_event("prev", _args, %{assigns: %{section: section_number}} = socket) do
section_number = if section_number > 0, do: section_number - 1, else: section_number
{:noreply, assign(socket, section: section_number)}
end
@impl Phoenix.LiveView
def handle_event("jump_to_" <> jump_to, _args, socket) do
{:noreply, assign(socket, section: String.to_integer(jump_to))}
end
end
| 29.862069 | 92 | 0.582852 |
08a69d6acc40b518a0f8238881d94af060a8aa65 | 3,621 | ex | Elixir | lib/baiji/auth/instance_metadata.ex | wrren/baiji | d3d9e1cad875c6e1ddb47bf52511c3a07321764a | [
"MIT"
] | null | null | null | lib/baiji/auth/instance_metadata.ex | wrren/baiji | d3d9e1cad875c6e1ddb47bf52511c3a07321764a | [
"MIT"
] | null | null | null | lib/baiji/auth/instance_metadata.ex | wrren/baiji | d3d9e1cad875c6e1ddb47bf52511c3a07321764a | [
"MIT"
] | null | null | null | defmodule Baiji.Auth.InstanceMetadata do
@moduledoc """
Derives operation authentication parameters from the local instance'
metadata
"""
use Baiji.Auth
import Baiji.Core.Utilities
# EC2 metadata endpoint
@ec2_metadata_url "http://169.254.169.254/latest/meta-data/"
# ECS task role metadata endpoint
@ecs_metadata_url "http://169.254.170.2"
@doc """
Populate the security credential of the given operation specified by the given key
using the instance metadata endpoint
"""
def populate(%Operation{} = op, key) do
op
|> assign_credentials
|> populate_from_cache(key)
end
@doc """
Populate the given credentials key in the op struct using cached credentials
"""
def assign_credentials(%Operation{} = op) do
case op.assigns[:instance_metadata] do
nil ->
Operation.assign(op, :instance_metadata, ecs_credentials(op) || ec2_credentials(op))
_ ->
op
end
end
@doc """
Query the instance metadata endpoint for security credentials and return them
"""
def ec2_credentials(%Operation{} = op) do
Operation.debug(op, "Getting EC2 Credentials...")
request(@ec2_metadata_url <> "/iam/security-credentials/")
|> then(fn role ->
request(@ec2_metadata_url <> "/iam/security-credentials/#{role}")
end)
|> Poison.decode!
|> extract_credentials
end
@doc """
Query the ECS task role credentials endpoint for security credentials and return them
"""
def ecs_credentials(%Operation{} = op) do
Operation.debug(op, "Getting ECS Credentials...")
case System.get_env("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") do
nil ->
Operation.debug(op, "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI Not Set")
nil
uri ->
request(@ecs_metadata_url <> uri)
|> Poison.decode!
|> extract_credentials
end
end
@doc """
Assign instance metadata credentials to the operation
"""
def extract_credentials(credentials) do
[
access_key_id: credentials["AccessKeyId"],
secret_access_key: credentials["SecretAccessKey"],
security_token: credentials["Token"],
expiration: credentials["Expiration"]
]
end
@doc """
Populate the given auth key on the operation using the cached instance metadata in
the operation's assigns
"""
def populate_from_cache(%Operation{} = op, key) do
populate_from_cache(op, key, op.assigns[:instance_metadata])
end
def populate_from_cache(%Operation{} = op, _key, nil), do: op
def populate_from_cache(%Operation{} = op, key, creds), do: Map.put(op, key, Keyword.get(creds, key))
@doc """
Make a GET request to the specified URL and return the body. If anything goes wrong, raise an error.
"""
def request(url) do
case HTTPoison.get(url) do
{:ok, %{status_code: 200, body: body}} ->
body
{:ok, %{status_code: status}} ->
raise """
HTTP Status Code #{status} received from instance metadata endpoint. It's possible that this instance
does not have the correct IAM role to be able to provide metadata.
"""
error ->
raise """
An error occured while making a request to the instance metadata endpoint: #{inspect error}. It's likely that
you're not currently running this application on an EC2 instance. Please check your application config and
verify that you're retrieving credentials from sources in the correct order.
"""
end
end
end | 33.841121 | 119 | 0.653134 |
08a6a96b704d7e3c82c9da3174b9e21e815bc3ec | 1,093 | exs | Elixir | test/rocketlivery/users/get_test.exs | cassiofariasmachado/rockelivery | 3d88d4d8af1cdc3f2988edc69162d848009babbd | [
"MIT"
] | 1 | 2021-09-27T06:15:08.000Z | 2021-09-27T06:15:08.000Z | test/rocketlivery/users/get_test.exs | cassiofariasmachado/rockelivery | 3d88d4d8af1cdc3f2988edc69162d848009babbd | [
"MIT"
] | null | null | null | test/rocketlivery/users/get_test.exs | cassiofariasmachado/rockelivery | 3d88d4d8af1cdc3f2988edc69162d848009babbd | [
"MIT"
] | 1 | 2021-12-21T12:47:59.000Z | 2021-12-21T12:47:59.000Z | defmodule Rockelivery.Users.GetTest do
use Rockelivery.DataCase, async: true
import Rockelivery.Factory
alias Rockelivery.{Error, User}
alias Rockelivery.Users.Get
describe "by_id/1" do
test "when user exists, returns the user" do
id = "7bd9a991-014b-47e5-a06c-8bd01bee9999"
insert(:user, id: id)
response =
id
|> Get.by_id()
assert {:ok,
%User{
id: "7bd9a991-014b-47e5-a06c-8bd01bee9999",
name: "Cassio",
email: "cassio@email.com",
cpf: "12345678910",
age: 24,
cep: "99999999",
address: "Rua das Bananeiras"
}} = response
end
test "when the user not exits, returns an error" do
id = "7bd9a991-014b-47e5-a06c-8bd01bee9999"
response =
id
|> Get.by_id()
assert {
:error,
%Error{
message: "User not found",
status: :not_found
}
} = response
end
end
end
| 22.770833 | 59 | 0.504117 |
08a6bcad4c05a6980f82c5f300790e611fae44c4 | 1,125 | exs | Elixir | config/config.exs | sineed/veritaserum | 40ce475baaf2f8f95a8e51007242f9e0d23caf22 | [
"Apache-2.0"
] | 81 | 2017-05-17T20:29:29.000Z | 2022-03-06T21:11:30.000Z | config/config.exs | chingan-tsc/veritaserum | 724d4c49e188f284c9acc90ea9743bfaac6a8b55 | [
"Apache-2.0"
] | 5 | 2017-09-14T13:05:05.000Z | 2022-01-05T11:40:57.000Z | config/config.exs | chingan-tsc/veritaserum | 724d4c49e188f284c9acc90ea9743bfaac6a8b55 | [
"Apache-2.0"
] | 9 | 2017-07-21T20:05:11.000Z | 2021-10-12T15:14:57.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :veritaserum, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:veritaserum, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.290323 | 73 | 0.752889 |
08a6e708aa07e5aa34cdfac163db7151b21e3277 | 2,843 | exs | Elixir | test/vault_config_provider_test.exs | kianmeng/vault_config_provider | 97c5ca07a695a7c6a01b1249505bea8fd961eef5 | [
"MIT"
] | null | null | null | test/vault_config_provider_test.exs | kianmeng/vault_config_provider | 97c5ca07a695a7c6a01b1249505bea8fd961eef5 | [
"MIT"
] | null | null | null | test/vault_config_provider_test.exs | kianmeng/vault_config_provider | 97c5ca07a695a7c6a01b1249505bea8fd961eef5 | [
"MIT"
] | null | null | null | defmodule VaultConfigProviderTest do
use ExUnit.Case
import Mock
setup_with_mocks([
{Vaultex.Client, [], [read: fn path, _method, _credentials -> {:ok, %{"key" => "ok"}} end]}
]) do
:ok
end
describe "resolve secrets" do
test "single string path" do
assert [{:app, [some_key: "ok"]} | _] =
VaultConfigProvider.resolve_secrets(
app: [some_key: "secret:secret/services/my_app key=key"],
vaultex: [auth: {:method, :credentials}]
)
end
test "keyword path" do
assert [{:app, [some_key: "ok"]} | _] =
VaultConfigProvider.resolve_secrets(
app: [some_key: [path: "secret/services/my_app", key: "key"]],
vaultex: [auth: {:method, :credentials}]
)
end
test "key word path with transform" do
assert [{:app, [some_key: "OK"]} | _] =
VaultConfigProvider.resolve_secrets(
app: [
some_key: [
path: "secret/services/my_app",
key: "key",
fun: &String.upcase/1
]
],
vaultex: [auth: {:method, :credentials}]
)
end
test "keyword paths in array" do
assert [{:app, [some_key: ["ok", "ok"]]} | _] =
VaultConfigProvider.resolve_secrets(
app: [
some_key: [
[path: "secret/services/my_app", key: "key"],
[path: "secret/services/my_app", key: "key"]
]
],
vaultex: [auth: {:method, :credentials}]
)
end
test "string paths in array" do
assert [{:app, [some_key: ["ok", "ok"]]} | _] =
VaultConfigProvider.resolve_secrets(
app: [
some_key: [
"secret:secret/services/my_app key=key",
"secret:secret/services/my_app key=key"
]
],
vaultex: [auth: {:method, :credentials}]
)
end
test "deeply nested keyword path" do
assert [{:app, [some_key: [at: [a: [very: [deep: [path: "ok"]]]]]]} | _] =
VaultConfigProvider.resolve_secrets(
app: [
some_key: [
at: [
a: [
very: [
deep: [
path: [path: "secret/services/my_app", key: "key"]
]
]
]
]
]
],
vaultex: [auth: {:method, :credentials}]
)
end
end
end
| 31.588889 | 95 | 0.415406 |
08a7309812dae7c7cb231f562d9e1a53c53f828f | 1,260 | exs | Elixir | mix.exs | Frameio/flock | e1b003265b6463d8228bfaaf33c14d5c6c7962cb | [
"MIT"
] | 11 | 2018-08-16T23:16:25.000Z | 2022-02-23T18:08:16.000Z | mix.exs | Frameio/flock | e1b003265b6463d8228bfaaf33c14d5c6c7962cb | [
"MIT"
] | 7 | 2019-04-03T07:29:10.000Z | 2022-02-22T12:03:01.000Z | mix.exs | Frameio/flock | e1b003265b6463d8228bfaaf33c14d5c6c7962cb | [
"MIT"
] | 4 | 2019-10-26T10:20:58.000Z | 2022-02-22T18:18:36.000Z | defmodule Herd.MixProject do
use Mix.Project
@version "0.4.3"
def project do
[
app: :herd,
version: @version,
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
elixirc_paths: elixirc_paths(Mix.env),
deps: deps(),
package: package(),
description: description(),
docs: docs()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:libring, "~> 1.1"},
{:ex_doc, "~> 0.19", only: :dev},
{:poolboy, "~> 1.5"}
]
end
defp description do
"""
Connection manager for a cluster of nodes
"""
end
defp package do
[
licenses: ["MIT"],
maintainers: ["Michael Guarino"],
links: %{"GitHub" => "https://github.com/Frameio/herd"}
]
end
defp docs() do
[
main: "readme",
extras: ["README.md"],
source_ref: "v#{@version}",
source_url: "https://github.com/Frameio/herd"
]
end
end
| 20 | 61 | 0.564286 |
08a7425e7b64667cfb47c2347f1a3367ee862e95 | 4,760 | ex | Elixir | lib/rdf/xsd/datatypes/double.ex | pukkamustard/rdf-ex | c459d8e7fa548fdfad82643338b68decf380a296 | [
"MIT"
] | null | null | null | lib/rdf/xsd/datatypes/double.ex | pukkamustard/rdf-ex | c459d8e7fa548fdfad82643338b68decf380a296 | [
"MIT"
] | null | null | null | lib/rdf/xsd/datatypes/double.ex | pukkamustard/rdf-ex | c459d8e7fa548fdfad82643338b68decf380a296 | [
"MIT"
] | null | null | null | defmodule RDF.XSD.Double do
@moduledoc """
`RDF.XSD.Datatype` for XSD doubles.
"""
@type special_values :: :positive_infinity | :negative_infinity | :nan
@type valid_value :: float | special_values
@special_values ~W[positive_infinity negative_infinity nan]a
use RDF.XSD.Datatype.Primitive,
name: "double",
id: RDF.Utils.Bootstrapping.xsd_iri("double")
alias RDF.XSD
def_applicable_facet XSD.Facets.MinInclusive
def_applicable_facet XSD.Facets.MaxInclusive
def_applicable_facet XSD.Facets.MinExclusive
def_applicable_facet XSD.Facets.MaxExclusive
def_applicable_facet XSD.Facets.Pattern
@doc false
def min_inclusive_conform?(min_inclusive, value, _lexical) do
value >= min_inclusive
end
@doc false
def max_inclusive_conform?(max_inclusive, value, _lexical) do
value <= max_inclusive
end
@doc false
def min_exclusive_conform?(min_exclusive, value, _lexical) do
value > min_exclusive
end
@doc false
def max_exclusive_conform?(max_exclusive, value, _lexical) do
value < max_exclusive
end
@doc false
def pattern_conform?(pattern, _value, lexical) do
XSD.Facets.Pattern.conform?(pattern, lexical)
end
@impl XSD.Datatype
def lexical_mapping(lexical, opts) do
case Float.parse(lexical) do
{float, ""} ->
float
{float, remainder} ->
# 1.E-8 is not a valid Elixir float literal and consequently not fully parsed with Float.parse
if Regex.match?(~r/^\.e?[\+\-]?\d+$/i, remainder) do
lexical_mapping(to_string(float) <> String.trim_leading(remainder, "."), opts)
else
@invalid_value
end
:error ->
case String.upcase(lexical) do
"INF" -> :positive_infinity
"-INF" -> :negative_infinity
"NAN" -> :nan
_ -> @invalid_value
end
end
end
@impl XSD.Datatype
@spec elixir_mapping(valid_value | integer | any, Keyword.t()) :: value
def elixir_mapping(value, _)
def elixir_mapping(value, _) when is_float(value), do: value
def elixir_mapping(value, _) when is_integer(value), do: value / 1
def elixir_mapping(value, _) when value in @special_values, do: value
def elixir_mapping(_, _), do: @invalid_value
@impl XSD.Datatype
@spec init_valid_lexical(valid_value, XSD.Datatype.uncanonical_lexical(), Keyword.t()) ::
XSD.Datatype.uncanonical_lexical()
def init_valid_lexical(value, lexical, opts)
def init_valid_lexical(value, nil, _) when is_atom(value), do: nil
def init_valid_lexical(value, nil, _), do: decimal_form(value)
def init_valid_lexical(_, lexical, _), do: lexical
defp decimal_form(float), do: to_string(float)
@impl XSD.Datatype
@spec canonical_mapping(valid_value) :: String.t()
def canonical_mapping(value)
# Produces the exponential form of a float
def canonical_mapping(float) when is_float(float) do
# We can't use simple %f transformation due to special requirements from N3 tests in representation
[i, f, e] =
float
|> float_to_string()
|> String.split(~r/[\.e]/)
# remove any trailing zeroes
f =
case String.replace(f, ~r/0*$/, "", global: false) do
# ...but there must be a digit to the right of the decimal point
"" -> "0"
f -> f
end
e = String.trim_leading(e, "+")
"#{i}.#{f}E#{e}"
end
def canonical_mapping(:nan), do: "NaN"
def canonical_mapping(:positive_infinity), do: "INF"
def canonical_mapping(:negative_infinity), do: "-INF"
if List.to_integer(:erlang.system_info(:otp_release)) >= 21 do
defp float_to_string(float) do
:io_lib.format("~.15e", [float])
|> to_string()
end
else
defp float_to_string(float) do
:io_lib.format("~.15e", [float])
|> List.first()
|> to_string()
end
end
@impl RDF.Literal.Datatype
def do_cast(value)
def do_cast(%XSD.String{} = xsd_string) do
xsd_string.value |> new() |> canonical()
end
def do_cast(literal) do
cond do
XSD.Boolean.datatype?(literal) ->
case literal.value do
false -> new(0.0)
true -> new(1.0)
end
XSD.Integer.datatype?(literal) ->
new(literal.value)
XSD.Decimal.datatype?(literal) ->
literal.value
|> Decimal.to_float()
|> new()
true ->
super(literal)
end
end
@impl RDF.Literal.Datatype
def do_equal_value_same_or_derived_datatypes?(left, right), do: XSD.Numeric.do_equal_value?(left, right)
@impl RDF.Literal.Datatype
def do_equal_value_different_datatypes?(left, right), do: XSD.Numeric.do_equal_value?(left, right)
@impl RDF.Literal.Datatype
def do_compare(left, right), do: XSD.Numeric.do_compare(left, right)
end
| 27.514451 | 106 | 0.666597 |
08a751952f960d150bbff8c1aaf56816b61e6a43 | 323 | ex | Elixir | lib/arkecosystem/crypto/configuration/configuration.ex | whitehat/elixir-crypto | 6347868ee15c7b79676df58bef54376a8dc6fd02 | [
"MIT"
] | null | null | null | lib/arkecosystem/crypto/configuration/configuration.ex | whitehat/elixir-crypto | 6347868ee15c7b79676df58bef54376a8dc6fd02 | [
"MIT"
] | null | null | null | lib/arkecosystem/crypto/configuration/configuration.ex | whitehat/elixir-crypto | 6347868ee15c7b79676df58bef54376a8dc6fd02 | [
"MIT"
] | null | null | null | defmodule ArkEcosystem.Crypto.Configuration.Configuration do
use KVX.Bucket
def get_value(key, bucket \\ :ark_config) do
bucket = new(bucket)
get(bucket, key)
end
def set_value(key, value, bucket \\ :ark_config) do
bucket = new(bucket)
set(bucket, key, value)
get_value(key, bucket)
end
end
| 21.533333 | 60 | 0.693498 |
08a761ca7aec68df460ca6b434d382c6f1578cc4 | 2,489 | exs | Elixir | test/pile/extras/changeset_x_test.exs | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | 6 | 2019-07-16T19:31:23.000Z | 2021-06-05T19:01:05.000Z | test/pile/extras/changeset_x_test.exs | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | null | null | null | test/pile/extras/changeset_x_test.exs | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | 3 | 2020-02-24T23:38:27.000Z | 2020-08-01T23:50:17.000Z | defmodule Ecto.ChangesetXTest do
use ExUnit.Case, async: true
use Ecto.Schema
import Ecto.Changeset
alias Ecto.ChangesetX
embedded_schema do
field :field, :integer
end
def data(value), do: %__MODULE__{field: value}
# -------------Fields and Changes ------------------------------------------------
test "Working with no change" do
cs = change(data("old"))
assert ChangesetX.old!(cs, :field) == "old"
assert_raise KeyError, fn -> ChangesetX.new!(cs, :field) end
assert ChangesetX.newest!(cs, :field) == "old"
end
test "Working with a change" do
cs = change(data("old"), field: "new")
assert ChangesetX.old!(cs, :field) == "old"
assert ChangesetX.new!(cs, :field) == "new"
assert ChangesetX.newest!(cs, :field) == "new"
end
test "Working with a bad field" do
cs = change(data("old"), field: "new")
assert_raise KeyError, fn -> ChangesetX.old!(cs, :bad_field) end
assert_raise KeyError, fn -> ChangesetX.new!(cs, :bad_field) end
assert_raise KeyError, fn -> ChangesetX.newest!(cs, :bad_field) end
end
# --------Errors ---------------------------------------------
test "all_valid?" do
valid = %{valid?: true}
invalid = %{valid?: false}
assert ChangesetX.all_valid?(valid, [])
refute ChangesetX.all_valid?(invalid, [])
assert ChangesetX.all_valid?(valid, [valid, valid])
refute ChangesetX.all_valid?(invalid, [valid, valid])
refute ChangesetX.all_valid?(valid, [invalid, valid])
refute ChangesetX.all_valid?(valid, [valid, invalid])
end
# ------------Groups of changesets--------------------
# ------------Misc-------------------------------------
defmodule Deletable do
use Ecto.Schema
@primary_key false
embedded_schema do
field :id, :id
field :delete, :boolean
end
def changeset(%__MODULE__{} = struct, attrs) do
struct
|> cast(attrs, [:id, :delete])
end
end
defmodule Container do
use Ecto.Schema
embedded_schema do
field :many, {:array, Deletable}
end
end
test "delection of deletable ids from a nested association." do
nested =
for {id, delete} <- [{1, false}, {2, true}],
do: Deletable.changeset(%Deletable{id: id}, %{delete: delete})
actual =
change(%Container{})
|> put_change(:many, nested)
|> ChangesetX.ids_marked_for_deletion(:many)
assert actual == MapSet.new([2])
end
end
| 26.2 | 84 | 0.58417 |
08a782510d2e68e3c959e9098efeda5cdece4f71 | 1,448 | ex | Elixir | lib/geolix/adapter/mmdb2/loader.ex | coladarci/geolix | 0a0508db410732fa8a24cbcd28e44f89b1b30afa | [
"Apache-2.0"
] | null | null | null | lib/geolix/adapter/mmdb2/loader.ex | coladarci/geolix | 0a0508db410732fa8a24cbcd28e44f89b1b30afa | [
"Apache-2.0"
] | null | null | null | lib/geolix/adapter/mmdb2/loader.ex | coladarci/geolix | 0a0508db410732fa8a24cbcd28e44f89b1b30afa | [
"Apache-2.0"
] | null | null | null | defmodule Geolix.Adapter.MMDB2.Loader do
@moduledoc """
Loader module to load an MMDB2 database into Geolix.
"""
alias Geolix.Adapter.MMDB2.Reader
alias Geolix.Adapter.MMDB2.Storage
@doc """
Implementation of `Geolix.Adapter.MMDB2.load_database/1`.
Requires the parameter `:source` as the location of the database. Can access
the system environment by receiving a `{ :system, "env_var_name" }` tuple.
Using `{ :system, "env_var_name", "/path/to/default.mmdb2" }` you can define
a fallback value to be used if the environment variable is not set.
"""
@spec load_database(map) :: :ok | {:error, term}
def load_database(%{source: {:system, var, default}} = database) do
database
|> Map.put(:source, System.get_env(var) || default)
|> load_database()
end
def load_database(%{source: {:system, var}} = database) do
database
|> Map.put(:source, System.get_env(var))
|> load_database()
end
def load_database(%{id: id, source: source}) do
source
|> Reader.read_database()
|> store_data(id)
end
@doc """
Implementation of `Geolix.Adapter.MMDB2.unload_database/1`.
"""
def unload_database(%{id: id}), do: store_data({:ok, nil, nil, nil}, id)
defp store_data({:error, _reason} = error, _), do: error
defp store_data({:ok, meta, tree, data}, id) do
Storage.Data.set(id, data)
Storage.Metadata.set(id, meta)
Storage.Tree.set(id, tree)
:ok
end
end
| 27.846154 | 78 | 0.668508 |
08a789bb25a1dbc359058064f1452de5a9c3c662 | 5,070 | ex | Elixir | lib/thrift/generator/binary/framed/server.ex | jparise/elixir-thrift | 99ab3da4d7776de9b0df56d9947cc9b163b55450 | [
"Apache-2.0"
] | 209 | 2015-12-19T09:56:39.000Z | 2022-03-22T04:43:16.000Z | lib/thrift/generator/binary/framed/server.ex | pinterest/elixir-thrift | cd27337a958cf4db5818d4dcbcdc274019107591 | [
"Apache-2.0"
] | 312 | 2016-01-05T04:04:58.000Z | 2021-11-15T17:59:57.000Z | lib/thrift/generator/binary/framed/server.ex | jparise/elixir-thrift | 99ab3da4d7776de9b0df56d9947cc9b163b55450 | [
"Apache-2.0"
] | 40 | 2015-12-21T19:46:03.000Z | 2022-02-10T08:34:58.000Z | defmodule Thrift.Generator.Binary.Framed.Server do
@moduledoc false
alias Thrift.AST.Function
alias Thrift.Generator.{
Service,
Utils
}
alias Thrift.Parser.FileGroup
def generate(service_module, service, file_group) do
functions =
service.functions
|> Map.values()
|> Enum.map(&generate_handler_function(file_group, service_module, &1))
quote do
defmodule Binary.Framed.Server do
@moduledoc false
require Logger
alias Thrift.Binary.Framed.Server, as: ServerImpl
defdelegate stop(name), to: ServerImpl
def start_link(handler_module, port, opts \\ []) do
ServerImpl.start_link(__MODULE__, port, handler_module, opts)
end
unquote_splicing(functions)
def handle_thrift(method, _binary_data, _handler_module) do
error =
Thrift.TApplicationException.exception(
type: :unknown_method,
message: "Unknown method: #{method}"
)
{:client_error, error}
end
end
end
end
def generate_handler_function(file_group, service_module, %Function{params: []} = function) do
fn_name = Atom.to_string(function.name)
handler_fn_name = Utils.underscore(function.name)
response_module = Module.concat(service_module, Service.module_name(function, :response))
handler_args = []
body = build_responder(function.return_type, handler_fn_name, handler_args, response_module)
handler = wrap_with_try_catch(body, function, file_group, response_module)
quote do
def handle_thrift(unquote(fn_name), _binary_data, handler_module) do
unquote(handler)
end
end
end
def generate_handler_function(file_group, service_module, function) do
fn_name = Atom.to_string(function.name)
args_module = Module.concat(service_module, Service.module_name(function, :args))
response_module = Module.concat(service_module, Service.module_name(function, :response))
struct_matches =
Enum.map(function.params, fn param ->
{param.name, Macro.var(param.name, nil)}
end)
quote do
def handle_thrift(unquote(fn_name), binary_data, handler_module) do
case unquote(Module.concat(args_module, BinaryProtocol)).deserialize(binary_data) do
{%unquote(args_module){unquote_splicing(struct_matches)}, ""} ->
unquote(build_handler_call(file_group, function, response_module))
{_, extra} ->
raise Thrift.TApplicationException,
type: :protocol_error,
message: "Could not decode #{inspect(extra)}"
end
end
end
end
defp build_handler_call(file_group, function, response_module) do
handler_fn_name = Utils.underscore(function.name)
handler_args = Enum.map(function.params, &Macro.var(&1.name, nil))
body = build_responder(function.return_type, handler_fn_name, handler_args, response_module)
wrap_with_try_catch(body, function, file_group, response_module)
end
defp wrap_with_try_catch(body, function, file_group, response_module) do
# Quoted clauses for exception types defined by the schema.
exception_clauses =
Enum.flat_map(function.exceptions, fn
exc ->
resolved = FileGroup.resolve(file_group, exc)
dest_module = FileGroup.dest_module(file_group, resolved.type)
error_var = Macro.var(exc.name, nil)
field_setter = quote do: {unquote(exc.name), unquote(error_var)}
quote do
:error, %unquote(dest_module){} = unquote(error_var) ->
response = %unquote(response_module){unquote(field_setter)}
{:reply,
unquote(Module.concat(response_module, BinaryProtocol)).serialize(response)}
end
end)
# Quoted clauses for our standard catch clauses (common to all functions).
catch_clauses =
quote do
kind, reason ->
formatted_exception = Exception.format(kind, reason, __STACKTRACE__)
Logger.error("Exception not defined in thrift spec was thrown: #{formatted_exception}")
error =
Thrift.TApplicationException.exception(
type: :internal_error,
message: "Server error: #{formatted_exception}"
)
{:server_error, error}
end
quote do
try do
unquote(body)
catch
unquote(Enum.concat(exception_clauses, catch_clauses))
end
end
end
defp build_responder(:void, handler_fn_name, handler_args, _response_module) do
quote do
_result = handler_module.unquote(handler_fn_name)(unquote_splicing(handler_args))
:noreply
end
end
defp build_responder(_, handler_fn_name, handler_args, response_module) do
quote do
result = handler_module.unquote(handler_fn_name)(unquote_splicing(handler_args))
response = %unquote(response_module){success: result}
{:reply, unquote(Module.concat(response_module, BinaryProtocol)).serialize(response)}
end
end
end
| 33.576159 | 97 | 0.677712 |
08a792803c14a62bdcbc9d730b8c809a7244e299 | 3,084 | ex | Elixir | lib/toolshed/nerves.ex | amclain/toolshed | 881fd4953ce477310812e172bfced7532a2656d8 | [
"Apache-2.0"
] | null | null | null | lib/toolshed/nerves.ex | amclain/toolshed | 881fd4953ce477310812e172bfced7532a2656d8 | [
"Apache-2.0"
] | null | null | null | lib/toolshed/nerves.ex | amclain/toolshed | 881fd4953ce477310812e172bfced7532a2656d8 | [
"Apache-2.0"
] | null | null | null | if Code.ensure_loaded?(Nerves.Runtime) do
defmodule Toolshed.Nerves do
@moduledoc """
Helpers that are useful on Nerves devices
Helpers include:
* `dmesg/0` - print kernel messages
* `fw_validate/0` - marks the current image as valid (check Nerves system if supported)
* `lsmod/0` - print out what kernel modules have been loaded
* `reboot/0` - reboots gracefully
* `reboot!/0` - reboots immediately
* `uname/0` - print information about the running system
"""
@doc """
Print out kernel log messages
"""
@spec dmesg() :: :"do not show this result in output"
def dmesg() do
Toolshed.cmd("dmesg")
IEx.dont_display_result()
end
@doc """
Shortcut to reboot a board. This is a graceful reboot, so it takes some time
before the real reboot.
"""
@spec reboot() :: no_return()
defdelegate reboot(), to: Nerves.Runtime
@doc """
Reboot immediately without a graceful shutdown. This is for the impatient.
"""
@spec reboot!() :: no_return()
def reboot!() do
:erlang.halt()
end
@doc """
Validate a firmware image
All official Nerves Systems automatically validate newly installed firmware.
For some systems, it's possible to disable this so that new firmware gets
one chance to boot. If it's not "validated" before a reboot, then the device
reverts to the old firmware.
"""
@spec fw_validate() :: :ok | {:error, String.t()}
def fw_validate() do
try do
Nerves.Runtime.KV.put("nerves_fw_validated", "1")
catch
:error, :undef ->
# Fall back to the old Nerves way
case System.cmd("fw_setenv", ["nerves_fw_validated", "1"]) do
{_, 0} -> :ok
{output, _} -> {:error, output}
end
end
end
@doc """
Print out information about the running software
This is similar to the Linux `uname` to help people remember what to type.
"""
@spec uname() :: :"do not show this result in output"
def uname() do
sysname = "Nerves"
nodename = Toolshed.Net.hostname()
release = Nerves.Runtime.KV.get_active("nerves_fw_product")
version =
"#{Nerves.Runtime.KV.get_active("nerves_fw_version")} (#{Nerves.Runtime.KV.get_active("nerves_fw_uuid")})"
arch = Nerves.Runtime.KV.get_active("nerves_fw_architecture")
IO.puts("#{sysname} #{nodename} #{release} #{version} #{arch}")
IEx.dont_display_result()
end
@doc """
Print out the loaded kernel modules
Aside from printing out whether the kernel has been tainted, the
Linux utility of the same name just dump the contents of "/proc/modules"
like this one.
Some kernel modules may be built-in to the kernel image. To see
those, run `cat "/lib/modules/x.y.z/modules.builtin"` where `x.y.z` is
the kernel's version number.
"""
@spec lsmod() :: :"do not show this result in output"
def lsmod() do
Toolshed.Unix.cat("/proc/modules")
end
end
end
| 31.151515 | 114 | 0.627108 |
08a7d6c66f834de1b0ec46e2cc7b5417b348dfa9 | 1,235 | exs | Elixir | config/config.exs | mattiaslundberg/home_display | ce571e37090398cfb15a508cf9758c174efa3d5e | [
"MIT"
] | null | null | null | config/config.exs | mattiaslundberg/home_display | ce571e37090398cfb15a508cf9758c174efa3d5e | [
"MIT"
] | null | null | null | config/config.exs | mattiaslundberg/home_display | ce571e37090398cfb15a508cf9758c174efa3d5e | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
import Config
# Enable the Nerves integration with Mix
Application.start(:nerves_bootstrap)
config :home_display, target: Mix.target()
config :home_display, env: Mix.env()
config :home_display,
ical_urls: System.get_env("HOME_DISPLAY_CALENDAR_URLS", "") |> String.split(",", trim: true)
# Customize non-Elixir parts of the firmware. See
# https://hexdocs.pm/nerves/advanced-configuration.html for details.
config :nerves, :firmware, rootfs_overlay: "rootfs_overlay"
# Set the SOURCE_DATE_EPOCH date for reproducible builds.
# See https://reproducible-builds.org/docs/source-date-epoch/ for more information
config :nerves, source_date_epoch: "1610801299"
# Use Ringlogger as the logger backend and remove :console.
# See https://hexdocs.pm/ring_logger/readme.html for more information on
# configuring ring_logger.
config :logger, backends: [RingLogger]
if Mix.target() == :host or Mix.target() == :"" do
import_config "host.exs"
else
import_config "target.exs"
end
import_config "#{Mix.env()}.exs"
| 30.875 | 94 | 0.766802 |
08a7dc56e43026a08cd3639eb2fe374a2a6d1e22 | 1,204 | ex | Elixir | lib/banking_graph/auth/account.ex | oryono/banking | 0a49ebae5ebf93a6db0c24476a1c86c60bb72733 | [
"MIT"
] | null | null | null | lib/banking_graph/auth/account.ex | oryono/banking | 0a49ebae5ebf93a6db0c24476a1c86c60bb72733 | [
"MIT"
] | null | null | null | lib/banking_graph/auth/account.ex | oryono/banking | 0a49ebae5ebf93a6db0c24476a1c86c60bb72733 | [
"MIT"
] | null | null | null | defmodule BankingGraph.Auth.Account do
use Ecto.Schema
import Ecto.Changeset
schema "auth_accounts" do
field :password_hash, :string
field :email, :string
field :username, :string
field :password, :string, virtual: true
belongs_to :customer, BankingGraph.Banking.Customer
belongs_to :teller, BankingGraph.Banking.Teller
belongs_to :client, BankingGraph.Banking.Client
belongs_to :branch, BankingGraph.Banking.Branch
timestamps()
end
@doc false
def changeset(user, attrs) do
user
|> cast(attrs, [:email, :client_id, :password, :username, :branch_id])
|> validate_required([:username, :password, :client_id])
|> validate_length(:password, min: 6)
|> unique_constraint(:email)
|> unique_constraint(:user_client_combination,
name: :auth_accounts_client_id_email_constraint,
message: "That email for that client is already taken."
)
|> hash_password()
end
defp hash_password(changeset) do
case changeset do
%Ecto.Changeset{valid?: true, changes: %{password: password}} ->
put_change(changeset, :password_hash, Pbkdf2.hash_pwd_salt(password))
_ ->
changeset
end
end
end
| 28 | 77 | 0.697674 |
08a7eb7c78a07b5ae8a0a6b1267063a03c864d98 | 3,249 | ex | Elixir | apps/service_gather/lib/gather/extraction.ex | jdenen/hindsight | ef69b4c1a74c94729dd838a9a0849a48c9b6e04c | [
"Apache-2.0"
] | 12 | 2020-01-27T19:43:02.000Z | 2021-07-28T19:46:29.000Z | apps/service_gather/lib/gather/extraction.ex | jdenen/hindsight | ef69b4c1a74c94729dd838a9a0849a48c9b6e04c | [
"Apache-2.0"
] | 81 | 2020-01-28T18:07:23.000Z | 2021-11-22T02:12:13.000Z | apps/service_gather/lib/gather/extraction.ex | jdenen/hindsight | ef69b4c1a74c94729dd838a9a0849a48c9b6e04c | [
"Apache-2.0"
] | 10 | 2020-02-13T21:24:09.000Z | 2020-05-21T18:39:35.000Z | defmodule Gather.Extraction do
@moduledoc """
Process to wrap and manage a dataset's extraction pipeline. This is operated
like a `Task`, in that it executes and shuts down.
"""
import Events
use GenServer, restart: :transient
require Logger
use Properties, otp_app: :service_gather
use Annotated.Retry
alias Gather.Extraction.SourceHandler
@max_tries get_config_value(:max_tries, default: 10)
@initial_delay get_config_value(:initial_delay, default: 500)
getter(:app_name, required: true)
def start_link(args) do
server_opts = Keyword.take(args, [:name])
GenServer.start_link(__MODULE__, args, server_opts)
end
@impl GenServer
def init(args) do
Process.flag(:trap_exit, true)
{:ok, Map.new(args), {:continue, :extract}}
end
@dialyzer {:nowarn_function, handle_continue: 2}
@impl GenServer
def handle_continue(:extract, %{extract: extract} = state) do
case extract(extract) do
{:ok, destination_and_source} ->
Logger.debug(fn -> "#{__MODULE__}: Started extraction: #{inspect(extract)}" end)
{:noreply, Map.merge(state, destination_and_source)}
{:error, reason} ->
Logger.warn("#{__MODULE__}: Extraction Stopping: #{inspect(extract)}")
{:stop, reason, state}
end
end
@impl GenServer
def handle_info(:extract_complete, %{extract: extract, destination_pid: pid} = state) do
Destination.stop(extract.destination, pid)
Logger.debug(fn -> "#{__MODULE__}: Extraction Completed: #{inspect(extract)}" end)
Brook.Event.send(Gather.Application.instance(), extract_end(), "gather", extract)
{:stop, :normal, state}
end
@impl GenServer
def handle_info({:extract_failed, reason}, %{extract: extract, destination_pid: pid} = state) do
Destination.stop(extract.destination, pid)
Logger.warn("#{__MODULE__}: Extraction Stopping: #{inspect(extract)}")
{:stop, reason, state}
end
@impl GenServer
def handle_info(msg, state) do
Logger.warn(fn -> "#{__MODULE__}: Received unexpected message : #{inspect(msg)}" end)
{:noreply, state}
end
@retry with: exponential_backoff(@initial_delay) |> take(@max_tries)
defp extract(extract) do
with {:ok, destination_pid} <- start_destination(extract),
{:ok, source_pid} <- start_source(extract, destination_pid) do
{:ok, %{destination_pid: destination_pid, source_pid: source_pid}}
end
end
def start_source(extract, destination_pid) do
Source.start_link(extract.source, source_context(extract, destination_pid))
end
defp start_destination(extract) do
Destination.start_link(
extract.destination,
Destination.Context.new!(
app_name: app_name(),
dataset_id: extract.dataset_id,
subset_id: extract.subset_id,
dictionary: extract.dictionary
)
)
end
defp source_context(extract, destination_pid) do
Source.Context.new!(
dictionary: extract.dictionary,
handler: SourceHandler,
app_name: :service_gather,
dataset_id: extract.dataset_id,
subset_id: extract.subset_id,
decode_json: false,
assigns: %{
pid: self(),
destination_pid: destination_pid,
extract: extract
}
)
end
end
| 30.650943 | 98 | 0.68852 |
08a80a771de9cbb247604101b5f87f56c4d00566 | 580 | exs | Elixir | priv/repo/migrations/20190123224516_create_webhook.exs | wenusch/XRPL-Webhooks | 69bce5ea891d5024911f960f4faabe6af3b2bba0 | [
"MIT"
] | 9 | 2019-02-22T10:33:28.000Z | 2021-02-27T20:26:03.000Z | priv/repo/migrations/20190123224516_create_webhook.exs | wenusch/XRPL-Webhooks | 69bce5ea891d5024911f960f4faabe6af3b2bba0 | [
"MIT"
] | 11 | 2019-04-02T19:21:25.000Z | 2022-01-05T23:22:04.000Z | priv/repo/migrations/20190123224516_create_webhook.exs | wenusch/XRPL-Webhooks | 69bce5ea891d5024911f960f4faabe6af3b2bba0 | [
"MIT"
] | 6 | 2019-03-07T15:54:30.000Z | 2020-03-26T02:33:40.000Z | defmodule Espy.Repo.Migrations.CreateWebhook do
use Ecto.Migration
def change do
create_if_not_exists table(:webhooks) do
add :hook_id, :integer
add :url, :string
add :failed_count, :integer
add :deactivated, :boolean, default: false, null: false
add :deactivate_reason, :string
add :deleted, :boolean, default: false, null: false
add :app_id, references(:apps, on_delete: :nothing)
timestamps()
end
create_if_not_exists index(:webhooks, [:app_id])
create_if_not_exists index(:webhooks, [:hook_id])
end
end
| 27.619048 | 61 | 0.686207 |
08a83245aae53d215ffdb48a0e798ebc7d87886d | 984 | ex | Elixir | lib/lucidboard/application.ex | borodark/lucidboard | 487a9a54053977ea9704121d4a6a4343012d4421 | [
"MIT"
] | 86 | 2019-01-07T20:49:04.000Z | 2021-10-02T21:15:42.000Z | lib/lucidboard/application.ex | borodark/lucidboard | 487a9a54053977ea9704121d4a6a4343012d4421 | [
"MIT"
] | 26 | 2019-03-27T12:06:52.000Z | 2020-09-20T05:21:09.000Z | lib/lucidboard/application.ex | borodark/lucidboard | 487a9a54053977ea9704121d4a6a4343012d4421 | [
"MIT"
] | 19 | 2015-01-06T19:02:49.000Z | 2020-05-25T08:54:00.000Z | defmodule Lucidboard.Application do
@moduledoc false
use Application
alias Lucidboard.LiveBoard
alias LucidboardWeb.Endpoint
def start(_type, _args) do
import Supervisor.Spec
IO.puts(banner())
children = [
supervisor(Endpoint, []),
supervisor(Lucidboard.Repo, []),
LiveBoard.registry_child_spec(),
LiveBoard.dynamic_supervisor_child_spec(),
Lucidboard.Presence
]
opts = [strategy: :one_for_one, name: Lucidboard.Supervisor]
Supervisor.start_link(children, opts)
end
def config_change(changed, _new, removed) do
Endpoint.config_change(changed, removed)
:ok
end
defp banner,
do: ~S"""
__ _ _ _ _
/ / _ _ ___(_) __| | |__ ___ __ _ _ __ __| |
/ / | | | |/ __| |/ _` | '_ \ / _ \ / _` | '__/ _` |
/ /__| |_| | (__| | (_| | |_) | (_) | (_| | | | (_| |
\____/\__,_|\___|_|\__,_|_.__/ \___/ \__,_|_| \__,_|
"""
end
| 25.894737 | 64 | 0.551829 |
08a847f8510c609dff8cfcbcc1bdaa8b95638c9f | 1,420 | ex | Elixir | .clext/yq.ex | peterwwillis/clinst | 0fff3e125ca3dbb1bdba2d4dde96170c8b644a1f | [
"MIT"
] | null | null | null | .clext/yq.ex | peterwwillis/clinst | 0fff3e125ca3dbb1bdba2d4dde96170c8b644a1f | [
"MIT"
] | null | null | null | .clext/yq.ex | peterwwillis/clinst | 0fff3e125ca3dbb1bdba2d4dde96170c8b644a1f | [
"MIT"
] | null | null | null | #!/usr/bin/env sh
set -eu
[ "${DEBUG:-0}" = "1" ] && set -x
### Extension-specific variables
CLINST_E_NAME="${CLINST_E_NAME:-yq}"
CLINST_E_REV="0.2.0"
CLINST_E_BIN_NAME="${CLINST_E_BIN_NAME:-$CLINST_E_NAME}"
CLINST_E_DLFILE="${CLINST_E_DLFILE:-$CLINST_E_NAME}"
CLINST_E_INSTDIR="${CLINST_E_INSTDIR:-$(pwd)}"
CLINST_E_OS="${CLINST_E_OS:-linux}"
CLINST_E_ARCH="${CLINST_E_ARCH:-amd64}"
CLINST_E_GHREPOAPI="https://api.github.com/repos/mikefarah/$CLINST_E_BIN_NAME"
CLINST_E_BASEURL="https://github.com/mikefarah/$CLINST_E_NAME/releases/download/v%s/$CLINST_E_NAME""_%s_%s"
CLINST_E_BASEURL_ARGS='"${CLINST_E_VERSION}" "${CLINST_E_OS}" "${CLINST_E_ARCH}"'
export CLINST_E_NAME CLINST_E_REV CLINST_E_BIN_NAME CLINST_E_DLFILE
### Extension-specific functions
_ext_versions () { clinst -E "$CLINST_E_NAME" -X versions_ghtags "$CLINST_E_GHREPOAPI" | grep -e "^[0-9]"; }
### The rest of this doesn't need to be modified
_ext_variables () { set | grep '^CLINST_E_' ; }
_ext_help () { printf "Usage: $0 CMD\n\nCommands:\n%s\n" "$(grep -e "^_ext_.* ()" "$0" | awk '{print $1}' | sed -e 's/_ext_//;s/^/ /g' | tr _ -)" ; }
if [ $# -lt 1 ]
then _ext_help ; exit 1
else cmd="$1"; shift
func="_ext_$(printf "%s\n" "$cmd" | tr - _)"
[ -n "${CLINST_DIR:-}" -a -n "${CLINST_E_ENVIRON:-}" ] && [ -d "$CLINST_DIR/$CLINST_E_ENVIRON" ] && cd "$CLINST_DIR/$CLINST_E_ENVIRON"
case "$cmd" in *) $func "$@" ;; esac
fi
| 45.806452 | 150 | 0.672535 |
08a848b5b6ff0150d917b114928db742b9d9b4c7 | 51 | ex | Elixir | web/views/race_view.ex | openrowing/raceman2 | eee2d51c50bddf63b9c5b9b351424d4c056fa27d | [
"Apache-2.0"
] | null | null | null | web/views/race_view.ex | openrowing/raceman2 | eee2d51c50bddf63b9c5b9b351424d4c056fa27d | [
"Apache-2.0"
] | null | null | null | web/views/race_view.ex | openrowing/raceman2 | eee2d51c50bddf63b9c5b9b351424d4c056fa27d | [
"Apache-2.0"
] | null | null | null | defmodule Web.RaceView do
use Web.Web, :view
end
| 12.75 | 25 | 0.745098 |
08a868ce2e9600330b5485edc0ed67080a9d1a60 | 2,274 | ex | Elixir | lib/sparql/extension_function/extension_function.ex | pukkamustard/sparql-ex | 4f8907ddbd15215c9b6c40edac19362c92e34d91 | [
"MIT"
] | null | null | null | lib/sparql/extension_function/extension_function.ex | pukkamustard/sparql-ex | 4f8907ddbd15215c9b6c40edac19362c92e34d91 | [
"MIT"
] | null | null | null | lib/sparql/extension_function/extension_function.ex | pukkamustard/sparql-ex | 4f8907ddbd15215c9b6c40edac19362c92e34d91 | [
"MIT"
] | null | null | null | defmodule SPARQL.ExtensionFunction do
@moduledoc """
A behaviour for SPARQL extension functions.
## Examples
An extension function can be defined like this:
defmodule ExampleFunction do
use SPARQL.ExtensionFunction, name: "http://example.com/function"
def call(distinct, arguments, data, execution) do
# your implementation
end
end
The name of the module is arbitrary and has no further meaning.
see
- <https://www.w3.org/TR/sparql11-query/#extensionFunctions>
- <https://www.w3.org/TR/sparql11-query/#operatorExtensibility>
"""
@doc """
The name of the extension function.
As specified in the SPARQL grammar the name of a function is an IRI.
"""
@callback name() :: String.t
@doc """
Calls the extension function.
The `distinct` argument is a boolean flag which signifies if the `DISTINCT`
modifier was used in the function call, which is syntactically allowed in
custom aggregate function calls only.
The `arguments` argument is the list of already evaluated RDF terms with which
the extension function was called in the SPARQL query.
The `data` argument contains the currently evaluated solution and some other
internal information and shouldn't be relied upon, because it might be subject
to changes and contain different elements depending on the context the function
was called in. Since the arguments are already evaluated against the current
solution it shouldn't be necessary anyway.
The `execution` argument is a map with some global execution context
information. In particular:
- `base`: the base IRI
- `time`: the query execution time
- `bnode_generator`: the name of the `RDF.BlankNode.Generator` (see
[RDF.ex documentation](http://hexdocs.pm/rdf)) used to generate unique blank
nodes consistently
"""
@callback call(distinct :: boolean(),
arguments :: list(RDF.Term.t),
data :: RDF.Dataset.t | RDF.Graph.t,
execution :: map)
:: RDF.Term.t | :error
defmacro __using__(opts) do
name = Keyword.fetch!(opts, :name)
quote do
@behaviour unquote(__MODULE__)
@impl unquote(__MODULE__)
def name(), do: unquote(name)
end
end
end
| 29.921053 | 81 | 0.694811 |
08a8a0e6ec7163f23bce5fcdf22d22b02f817c47 | 9,747 | ex | Elixir | clients/analytics/lib/google_api/analytics/v3/model/experiment.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/analytics/lib/google_api/analytics/v3/model/experiment.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/analytics/lib/google_api/analytics/v3/model/experiment.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Analytics.V3.Model.Experiment do
@moduledoc """
JSON template for Analytics experiment resource.
## Attributes
* `accountId` (*type:* `String.t`, *default:* `nil`) - Account ID to which this experiment belongs. This field is read-only.
* `created` (*type:* `DateTime.t`, *default:* `nil`) - Time the experiment was created. This field is read-only.
* `description` (*type:* `String.t`, *default:* `nil`) - Notes about this experiment.
* `editableInGaUi` (*type:* `boolean()`, *default:* `nil`) - If true, the end user will be able to edit the experiment via the Google Analytics user interface.
* `endTime` (*type:* `DateTime.t`, *default:* `nil`) - The ending time of the experiment (the time the status changed from RUNNING to ENDED). This field is present only if the experiment has ended. This field is read-only.
* `equalWeighting` (*type:* `boolean()`, *default:* `nil`) - Boolean specifying whether to distribute traffic evenly across all variations. If the value is False, content experiments follows the default behavior of adjusting traffic dynamically based on variation performance. Optional -- defaults to False. This field may not be changed for an experiment whose status is ENDED.
* `id` (*type:* `String.t`, *default:* `nil`) - Experiment ID. Required for patch and update. Disallowed for create.
* `internalWebPropertyId` (*type:* `String.t`, *default:* `nil`) - Internal ID for the web property to which this experiment belongs. This field is read-only.
* `kind` (*type:* `String.t`, *default:* `analytics#experiment`) - Resource type for an Analytics experiment. This field is read-only.
* `minimumExperimentLengthInDays` (*type:* `integer()`, *default:* `nil`) - An integer number in [3, 90]. Specifies the minimum length of the experiment. Can be changed for a running experiment. This field may not be changed for an experiments whose status is ENDED.
* `name` (*type:* `String.t`, *default:* `nil`) - Experiment name. This field may not be changed for an experiment whose status is ENDED. This field is required when creating an experiment.
* `objectiveMetric` (*type:* `String.t`, *default:* `nil`) - The metric that the experiment is optimizing. Valid values: "ga:goal(n)Completions", "ga:adsenseAdsClicks", "ga:adsenseAdsViewed", "ga:adsenseRevenue", "ga:bounces", "ga:pageviews", "ga:sessionDuration", "ga:transactions", "ga:transactionRevenue". This field is required if status is "RUNNING" and servingFramework is one of "REDIRECT" or "API".
* `optimizationType` (*type:* `String.t`, *default:* `nil`) - Whether the objectiveMetric should be minimized or maximized. Possible values: "MAXIMUM", "MINIMUM". Optional--defaults to "MAXIMUM". Cannot be specified without objectiveMetric. Cannot be modified when status is "RUNNING" or "ENDED".
* `parentLink` (*type:* `GoogleApi.Analytics.V3.Model.ExperimentParentLink.t`, *default:* `nil`) - Parent link for an experiment. Points to the view (profile) to which this experiment belongs.
* `profileId` (*type:* `String.t`, *default:* `nil`) - View (Profile) ID to which this experiment belongs. This field is read-only.
* `reasonExperimentEnded` (*type:* `String.t`, *default:* `nil`) - Why the experiment ended. Possible values: "STOPPED_BY_USER", "WINNER_FOUND", "EXPERIMENT_EXPIRED", "ENDED_WITH_NO_WINNER", "GOAL_OBJECTIVE_CHANGED". "ENDED_WITH_NO_WINNER" means that the experiment didn't expire but no winner was projected to be found. If the experiment status is changed via the API to ENDED this field is set to STOPPED_BY_USER. This field is read-only.
* `rewriteVariationUrlsAsOriginal` (*type:* `boolean()`, *default:* `nil`) - Boolean specifying whether variations URLS are rewritten to match those of the original. This field may not be changed for an experiments whose status is ENDED.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - Link for this experiment. This field is read-only.
* `servingFramework` (*type:* `String.t`, *default:* `nil`) - The framework used to serve the experiment variations and evaluate the results. One of:
- REDIRECT: Google Analytics redirects traffic to different variation pages, reports the chosen variation and evaluates the results.
- API: Google Analytics chooses and reports the variation to serve and evaluates the results; the caller is responsible for serving the selected variation.
- EXTERNAL: The variations will be served externally and the chosen variation reported to Google Analytics. The caller is responsible for serving the selected variation and evaluating the results.
* `snippet` (*type:* `String.t`, *default:* `nil`) - The snippet of code to include on the control page(s). This field is read-only.
* `startTime` (*type:* `DateTime.t`, *default:* `nil`) - The starting time of the experiment (the time the status changed from READY_TO_RUN to RUNNING). This field is present only if the experiment has started. This field is read-only.
* `status` (*type:* `String.t`, *default:* `nil`) - Experiment status. Possible values: "DRAFT", "READY_TO_RUN", "RUNNING", "ENDED". Experiments can be created in the "DRAFT", "READY_TO_RUN" or "RUNNING" state. This field is required when creating an experiment.
* `trafficCoverage` (*type:* `float()`, *default:* `nil`) - A floating-point number in (0, 1]. Specifies the fraction of the traffic that participates in the experiment. Can be changed for a running experiment. This field may not be changed for an experiments whose status is ENDED.
* `updated` (*type:* `DateTime.t`, *default:* `nil`) - Time the experiment was last modified. This field is read-only.
* `variations` (*type:* `list(GoogleApi.Analytics.V3.Model.ExperimentVariations.t)`, *default:* `nil`) - Array of variations. The first variation in the array is the original. The number of variations may not change once an experiment is in the RUNNING state. At least two variations are required before status can be set to RUNNING.
* `webPropertyId` (*type:* `String.t`, *default:* `nil`) - Web property ID to which this experiment belongs. The web property ID is of the form UA-XXXXX-YY. This field is read-only.
* `winnerConfidenceLevel` (*type:* `float()`, *default:* `nil`) - A floating-point number in (0, 1). Specifies the necessary confidence level to choose a winner. This field may not be changed for an experiments whose status is ENDED.
* `winnerFound` (*type:* `boolean()`, *default:* `nil`) - Boolean specifying whether a winner has been found for this experiment. This field is read-only.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accountId => String.t(),
:created => DateTime.t(),
:description => String.t(),
:editableInGaUi => boolean(),
:endTime => DateTime.t(),
:equalWeighting => boolean(),
:id => String.t(),
:internalWebPropertyId => String.t(),
:kind => String.t(),
:minimumExperimentLengthInDays => integer(),
:name => String.t(),
:objectiveMetric => String.t(),
:optimizationType => String.t(),
:parentLink => GoogleApi.Analytics.V3.Model.ExperimentParentLink.t(),
:profileId => String.t(),
:reasonExperimentEnded => String.t(),
:rewriteVariationUrlsAsOriginal => boolean(),
:selfLink => String.t(),
:servingFramework => String.t(),
:snippet => String.t(),
:startTime => DateTime.t(),
:status => String.t(),
:trafficCoverage => float(),
:updated => DateTime.t(),
:variations => list(GoogleApi.Analytics.V3.Model.ExperimentVariations.t()),
:webPropertyId => String.t(),
:winnerConfidenceLevel => float(),
:winnerFound => boolean()
}
field(:accountId)
field(:created, as: DateTime)
field(:description)
field(:editableInGaUi)
field(:endTime, as: DateTime)
field(:equalWeighting)
field(:id)
field(:internalWebPropertyId)
field(:kind)
field(:minimumExperimentLengthInDays)
field(:name)
field(:objectiveMetric)
field(:optimizationType)
field(:parentLink, as: GoogleApi.Analytics.V3.Model.ExperimentParentLink)
field(:profileId)
field(:reasonExperimentEnded)
field(:rewriteVariationUrlsAsOriginal)
field(:selfLink)
field(:servingFramework)
field(:snippet)
field(:startTime, as: DateTime)
field(:status)
field(:trafficCoverage)
field(:updated, as: DateTime)
field(:variations, as: GoogleApi.Analytics.V3.Model.ExperimentVariations, type: :list)
field(:webPropertyId)
field(:winnerConfidenceLevel)
field(:winnerFound)
end
defimpl Poison.Decoder, for: GoogleApi.Analytics.V3.Model.Experiment do
def decode(value, options) do
GoogleApi.Analytics.V3.Model.Experiment.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Analytics.V3.Model.Experiment do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 74.40458 | 444 | 0.708936 |
08a8a6c8e44534c3d11ac7e3da7ee4f37255ec58 | 279 | ex | Elixir | lib/webchat_web/controllers/page_controller.ex | Bwuak/webchat | b6668be508481a1ff480303affaebdaf28604f41 | [
"MIT"
] | null | null | null | lib/webchat_web/controllers/page_controller.ex | Bwuak/webchat | b6668be508481a1ff480303affaebdaf28604f41 | [
"MIT"
] | 1 | 2020-08-04T19:50:09.000Z | 2020-08-04T19:50:09.000Z | lib/webchat_web/controllers/page_controller.ex | Bwuak/webchat | b6668be508481a1ff480303affaebdaf28604f41 | [
"MIT"
] | null | null | null | defmodule WebchatWeb.PageController do
use WebchatWeb, :controller
def index(conn, _params) do
case conn.assigns.current_user do
nil ->
render(conn, "index.html")
_connected_user ->
conn
|> redirect(to: "/chat")
end
end
end
| 17.4375 | 38 | 0.616487 |
08a8c27f8e021aa4e4d3a434c094e791ce69341c | 2,493 | ex | Elixir | lib/advent/y2020/d08.ex | ed-flanagan/advent-of-code-solutions-elixir | ca4b62f82088e7a120699fbba9809c04c76403a6 | [
"MIT"
] | null | null | null | lib/advent/y2020/d08.ex | ed-flanagan/advent-of-code-solutions-elixir | ca4b62f82088e7a120699fbba9809c04c76403a6 | [
"MIT"
] | null | null | null | lib/advent/y2020/d08.ex | ed-flanagan/advent-of-code-solutions-elixir | ca4b62f82088e7a120699fbba9809c04c76403a6 | [
"MIT"
] | null | null | null | defmodule Advent.Y2020.D08 do
@moduledoc """
https://adventofcode.com/2020/day/8
"""
@typep instruction :: {:nop | :acc | :jmp, integer()}
@typep line_no :: non_neg_integer()
@typep instructions :: %{line_no() => instruction()}
@doc """
What value is in the accumulator?
"""
@spec part_one(Enumerable.t()) :: integer()
def part_one(input) do
{:loop, acc} =
input
|> map_instructions()
|> execute()
acc
end
@doc """
What is the value of the accumulator after the program terminates?
"""
@spec part_two(Enumerable.t()) :: integer()
def part_two(input) do
input
|> map_instructions()
|> no_cycle_execute()
end
@spec map_instructions(Enumerable.t()) :: instructions()
defp map_instructions(lines) do
lines
|> Stream.map(fn line ->
[cmd_str, val_str] = String.split(line, " ")
cmd =
case cmd_str do
"nop" -> :nop
"jmp" -> :jmp
"acc" -> :acc
end
val = String.to_integer(val_str)
{cmd, val}
end)
|> Stream.with_index()
|> Map.new(fn {ins, lno} -> {lno, ins} end)
end
@spec execute(instructions()) :: {:ok | :loop, integer()}
defp execute(lines) do
do_execute(lines, 0, 0, map_size(lines))
end
@spec do_execute(instructions(), line_no(), non_neg_integer(), non_neg_integer()) ::
{:ok | :loop, integer()}
defp do_execute(_lines, line_no, acc, size) when line_no >= size do
{:ok, acc}
end
defp do_execute(lines, line_no, acc, size) do
case Map.pop(lines, line_no) do
{{:nop, _val}, lines} -> do_execute(lines, line_no + 1, acc, size)
{{:acc, val}, lines} -> do_execute(lines, line_no + 1, acc + val, size)
{{:jmp, val}, lines} -> do_execute(lines, line_no + val, acc, size)
{nil, _lines} -> {:loop, acc}
end
end
@spec no_cycle_execute(instructions()) :: integer()
defp no_cycle_execute(instructions) do
case execute(instructions) do
{:ok, acc} ->
acc
{:loop, _acc} ->
Enum.find_value(instructions, fn
{lno, {:nop, val}} -> exec_flip(instructions, {lno, {:jmp, val}})
{lno, {:jmp, val}} -> exec_flip(instructions, {lno, {:nop, val}})
_ -> nil
end)
end
end
@spec exec_flip(instructions(), {line_no(), instruction()}) :: integer() | nil
defp exec_flip(ins, {lno, flip}) do
case execute(%{ins | lno => flip}) do
{:ok, val} -> val
_ -> nil
end
end
end
| 25.701031 | 86 | 0.578018 |
08a8f1fe1fabf10b3b573d7e5bc194134a474057 | 1,481 | exs | Elixir | config/config.exs | madnificent/mu-elixir-cache | c86717907b113d593966b3b2e1e86a8e530fe0bb | [
"MIT"
] | null | null | null | config/config.exs | madnificent/mu-elixir-cache | c86717907b113d593966b3b2e1e86a8e530fe0bb | [
"MIT"
] | null | null | null | config/config.exs | madnificent/mu-elixir-cache | c86717907b113d593966b3b2e1e86a8e530fe0bb | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
defmodule CH do
def system_boolean(name) do
case String.downcase(System.get_env(name) || "") do
"true" -> true
"yes" -> true
"1" -> true
"on" -> true
_ -> false
end
end
end
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
config :use_plug_proxy,
log_cache_keys: CH.system_boolean("LOG_CACHE_KEYS"),
log_clear_keys: CH.system_boolean("LOG_CLEAR_KEYS")
# You can configure for your application as:
#
# config :use_plug_proxy, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:use_plug_proxy, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 31.510638 | 73 | 0.725861 |
08a93e0800f38293862a9caabeb47d877731df95 | 417 | exs | Elixir | test/mappers_web/views/error_view_test.exs | evandiewald/mappers | 7359cfb39a4d9d26c42f5917ee04a7e41d3291bc | [
"Apache-2.0"
] | 32 | 2021-04-22T01:55:31.000Z | 2022-02-25T13:17:21.000Z | test/mappers_web/views/error_view_test.exs | evandiewald/mappers | 7359cfb39a4d9d26c42f5917ee04a7e41d3291bc | [
"Apache-2.0"
] | 58 | 2021-06-04T18:42:59.000Z | 2022-03-31T07:17:01.000Z | test/mappers_web/views/error_view_test.exs | evandiewald/mappers | 7359cfb39a4d9d26c42f5917ee04a7e41d3291bc | [
"Apache-2.0"
] | 13 | 2021-04-10T06:09:15.000Z | 2022-03-23T13:07:37.000Z | defmodule MappersWeb.ErrorViewTest do
use MappersWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(MappersWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(MappersWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 27.8 | 92 | 0.733813 |
08a947d98529153e00bfd9c4ff99d408604ec644 | 5,630 | ex | Elixir | lib/plug_cache_control.ex | krasenyp/plug_cache_control | e179b96cc02df92ff53c891ec625b5b5e824448a | [
"Apache-2.0"
] | 6 | 2021-11-05T21:35:57.000Z | 2021-11-12T01:31:20.000Z | lib/plug_cache_control.ex | krasenyp/plug_cache_control | e179b96cc02df92ff53c891ec625b5b5e824448a | [
"Apache-2.0"
] | 4 | 2021-11-06T17:05:30.000Z | 2022-01-15T13:16:48.000Z | lib/plug_cache_control.ex | krasenyp/plug_cache_control | e179b96cc02df92ff53c891ec625b5b5e824448a | [
"Apache-2.0"
] | null | null | null | defmodule PlugCacheControl do
@moduledoc """
A plug + helpers for overwriting the default `cache-control` header. The plug
supports all the response header directives defined in [RFC7234, section
5.2.2](https://datatracker.ietf.org/doc/html/rfc7234#section-5.2.2).
## Header directives
The `PlugCacheControl` plug takes a `directives` option which can specify
either _static_ or _dynamic_ header directives. Static directives are useful
when you don't need per-request directives. Static directives are defined very
similarly to a struct's key.
plug PlugCacheControl, directives: [:public, max_age: {1, :hour}]
As seen in the above example, directive names with hyphens are mapped to atoms
by replacing the hyphens with underscores.
Boolean directives like `public`, `private`, `must-revalidate`, `no-store` and
so on can be included in the header value by simply including them in the
directives list e.g. no need for explicit `no_store: true` value. Note that as
per the standard, `no-cache` can also specify one or more fields. This is
supported via the definition below.
plug PlugCacheControl, directives: [no_cache: ["somefield", "otherfield"]]
The `public` and `private` directives also have somewhat special handling so
you won't need to explicitly define `private: false` when you've used
`:public` in the "boolean section" of the directives list. Another important
thing is that if a directive is not included in the directives list, the
directive will be _omitted_ from the header's value.
The values of the directives which have a delta-seconds values can be defined
directly as an integer representing the delta-seconds.
plug PlugCacheControl, directives: [:public, max_age: 3600]
A unit tuple can also be used to specify delta-seconds. The supported time
units are `second`, `seconds`, `minute`, `minutes`, `hour`, `hours`, `day`,
`days`, `week`, `weeks`, `year`, `years`. The following example shows how unit
tuples can be used as a conveniece to define delta-seconds.
plug PlugCacheControl,
directives: [
:public,
max_age: {1, :hour},
stale_while_revalidate: {20, :minutes}
]
Dynamic directives are useful when you might want to derive cache control
directives per-request. Maybe there's some other header value which you care
about or a dynamic configuration governing caching behaviour, dynamic
directives are the way to go.
plug PlugCacheControl, directives: &__MODULE__.dyn_cc/1
# ...somewhere in the module...
defp dyn_cc(_conn) do
[:public, max_age: Cache.get(:max_age)]
end
As seen in the previous example, the only difference between static and
dynamic directives definition is that the latter is a unary function which
returns a directives list. The exact same rules that apply to the static
directives apply to the function's return value.
## A note on behaviour
The first time the plug is called on a connection, the existing value of the
Cache-Control header is _replaced_ by the user-defined one. A private field
which signifies the header value is overwritten is put on the connection
struct. On subsequent calls of the plug, the provided directives' definitions
are _merged_ with the header values. This allows the user to build up the
Cache-Control header value.
Of course, if one wants to replace the header value on a connection that has an
already overwritten value, one can use the
`PlugCacheControl.Helpers.put_cache_control` function or provide a `replace:
true` option to the plug.
plug PlugCacheControl, directives: [...], replace: true
The latter approach allows for a finer-grained control and conditional
replacement of header values.
plug PlugCacheControl, [directives: [...], replace: true] when action == :index
plug PlugCacheControl, [directives: [...]] when action == :show
"""
@behaviour Plug
alias Plug.Conn
alias PlugCacheControl.Helpers
@typep static :: Helpers.directive_opt()
@typep dynamic :: (Plug.Conn.t() -> Helpers.directive_opt())
@impl Plug
@spec init([{:directives, static | dynamic}]) :: %{directives: dynamic, replace: boolean()}
def init(opts) do
opts
|> Enum.into(%{})
|> with_default_opts()
|> validate_opts!()
end
@impl Plug
@spec call(Conn.t(), %{directives: static() | dynamic(), replace: boolean()}) :: Conn.t()
def call(conn, %{directives: fun} = opts) when is_function(fun, 1) do
opts = Map.put(opts, :directives, fun.(conn))
call(conn, opts)
end
def call(%Conn{} = conn, %{directives: dir, replace: true}) do
conn
|> Helpers.put_cache_control(dir)
|> Conn.put_private(:cache_control_overwritten, true)
end
def call(%Conn{private: %{cache_control_overwritten: true}} = conn, %{directives: dir}) do
Helpers.patch_cache_control(conn, dir)
end
def call(%Conn{} = conn, %{directives: dir}) do
conn
|> Helpers.put_cache_control(dir)
|> Conn.put_private(:cache_control_overwritten, true)
end
defp with_default_opts(opts) do
default_opts = %{
replace: false
}
Map.merge(default_opts, opts)
end
defp validate_opts!(%{directives: dir, replace: replace} = opts)
when (is_list(dir) or is_function(dir, 1)) and is_boolean(replace) do
opts
end
defp validate_opts!(_) do
raise ArgumentError,
"Provide a \"directives\" option with list of directives or a unary \
function taking connection as first argument and returning a list of \
directives."
end
end
| 37.039474 | 93 | 0.712966 |
08a95226a9d9f81dee1bf2f0911b866a5756ba17 | 944 | exs | Elixir | apps/donut_web/config/config.exs | ZURASTA/donut | b0546c041601e619e76c10d1d2ce62fe5d1701a4 | [
"BSD-2-Clause"
] | null | null | null | apps/donut_web/config/config.exs | ZURASTA/donut | b0546c041601e619e76c10d1d2ce62fe5d1701a4 | [
"BSD-2-Clause"
] | null | null | null | apps/donut_web/config/config.exs | ZURASTA/donut | b0546c041601e619e76c10d1d2ce62fe5d1701a4 | [
"BSD-2-Clause"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :donut_web,
namespace: Donut.Web
# Configures the endpoint
config :donut_web, Donut.Web.Endpoint,
url: [host: "localhost"],
secret_key_base: "Y0tBCJ5RLXGv4336LnxMk0pZu75pa0ZYyp7bAHbwtPQCPRVcxak8VSbxi6ceBVOS",
pubsub: [
name: Donut.Web.PubSub,
adapter: Phoenix.PubSub.PG2
]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
config :donut_web, :generators,
context_app: :donut
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 29.5 | 88 | 0.745763 |
08a96b8532cc92236b4507103c794c9860860b43 | 1,401 | exs | Elixir | test/layers/builder_queries/aggregate/aggregate_group_avg_test.exs | haskric/mongo_agile | 393e1e96f706e3580f6bac9ff7bcc081b0a2e4eb | [
"MIT"
] | 1 | 2020-12-30T18:30:32.000Z | 2020-12-30T18:30:32.000Z | test/layers/builder_queries/aggregate/aggregate_group_avg_test.exs | haskric/mongo_agile | 393e1e96f706e3580f6bac9ff7bcc081b0a2e4eb | [
"MIT"
] | null | null | null | test/layers/builder_queries/aggregate/aggregate_group_avg_test.exs | haskric/mongo_agile | 393e1e96f706e3580f6bac9ff7bcc081b0a2e4eb | [
"MIT"
] | null | null | null | defmodule MongoAgile.BuilderQueries.Aggregate.Groupavg.Test do
@moduledoc false
@doc """
I would like a style... similar this... (stage -> map)
aggregate "avg_views",
match: %{},
group: %{
"_id" => nil,
"avg_views" => %{
"$avg" => "$views"
}
}
But maybe it´s more flexible this...
aggregate "", pipeline: [
stage1,
stage2,
...
]
"""
use ExUnit.Case
defmodule DataSetExample do
@moduledoc false
import MongoAgile.Queries.AgilQuery
use MongoAgile.BuilderQueries,
collection: "test_aggregate",
pid_mongo: :mongo
find "get_all", where: %{}
aggregate "avg_views",
pipeline: [
%{
"$group" =>
%{
"_id" => nil,
"avg_views" => %{
"$avg" => "$views"
}
}
}
]
end
test "avg_views" do
result = DataSetExample.run_query("avg_views")
|> Enum.to_list()
avg_views = calcular_avg_views()
assert result == [%{"_id" => nil, "avg_views" => avg_views}]
end
def calcular_avg_views do
{:ok, docs} = DataSetExample.run_query("get_all")
{total, count} = docs
|> Enum.reduce({0, 0}, fn(doc, {acc_sum, acc_count}) ->
acc_sum = acc_sum + doc["views"]
acc_count = acc_count + 1
{acc_sum, acc_count}
end)
avg = total / count
avg
end
end
| 19.458333 | 64 | 0.538901 |
08a981480253b58e113bef1cca533317e7e73fef | 1,830 | ex | Elixir | lib/flawless/error.ex | colinsmetz/flawless | 87fd76f88758f16b42813b1e0f0f0ee8163d9185 | [
"MIT"
] | null | null | null | lib/flawless/error.ex | colinsmetz/flawless | 87fd76f88758f16b42813b1e0f0f0ee8163d9185 | [
"MIT"
] | null | null | null | lib/flawless/error.ex | colinsmetz/flawless | 87fd76f88758f16b42813b1e0f0f0ee8163d9185 | [
"MIT"
] | null | null | null | defmodule Flawless.Error do
@moduledoc """
Provides the Error struct, and helpers for building and converting errors.
"""
defstruct context: [], message: ""
alias Flawless.Context
import Flawless.Utils.Interpolation, only: [sigil_t: 2]
@type t_message :: String.t() | {String.t() | list(), Keyword.t()} | list(String.t())
@type t() :: %__MODULE__{
context: list(),
message: t_message
}
@spec new(t_message, Context.t() | list()) :: t()
def new(message, %Context{} = context) do
new(message, context.path)
end
def new(message, path) when is_list(path) do
%__MODULE__{
message: message,
context: path
}
end
@spec message_from_template(String.t(), Keyword.t()) :: String.t()
def message_from_template(message, opts) do
Flawless.Utils.Interpolation.from_template(message, opts)
end
@spec evaluate_messages(list(t())) :: list(t())
def evaluate_messages(errors) when is_list(errors) do
errors
|> Enum.map(fn
%{message: message} = error when is_binary(message) ->
error
%{message: {template, opts}} = error ->
%{error | message: message_from_template(template, opts)}
end)
end
@spec invalid_type_error(Flawless.Types.t(), any, Context.t()) :: Flawless.Error.t()
def invalid_type_error(expected_type, value, %Context{} = context) do
new(
{~t"Expected type: %{expected_type}, got: %{value}.",
expected_type: expected_type, value: inspect(value)},
context
)
end
@spec group_by_path(list(t())) :: list(t())
def group_by_path(errors) when is_list(errors) do
errors
|> Enum.group_by(& &1.context, & &1.message)
|> Enum.map(fn
{context, [message]} -> new(message, context)
{context, messages} -> new(messages, context)
end)
end
end
| 27.727273 | 87 | 0.636612 |
08a99926e8314ac79c2d40e088994212aa93fcd7 | 1,850 | exs | Elixir | clients/real_time_bidding/mix.exs | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/real_time_bidding/mix.exs | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/real_time_bidding/mix.exs | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.RealTimeBidding.Mixfile do
use Mix.Project
@version "0.2.0"
def project() do
[
app: :google_api_real_time_bidding,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/real_time_bidding"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Real-time Bidding API client library.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/real_time_bidding",
"Homepage" => "https://developers.google.com/authorized-buyers/apis/realtimebidding/reference/rest/"
}
]
end
end
| 27.61194 | 108 | 0.660541 |
08a9e315a3d28db129f72a5c6d4eeb55c3d04e9d | 2,126 | exs | Elixir | mix.exs | MaxG88/teslamate | 1456fa86fc6030e4beea8cd4a8fee45c3d7d636f | [
"MIT"
] | null | null | null | mix.exs | MaxG88/teslamate | 1456fa86fc6030e4beea8cd4a8fee45c3d7d636f | [
"MIT"
] | null | null | null | mix.exs | MaxG88/teslamate | 1456fa86fc6030e4beea8cd4a8fee45c3d7d636f | [
"MIT"
] | null | null | null | defmodule TeslaMate.MixProject do
use Mix.Project
def project do
[
app: :teslamate,
version: "1.19.0-dev",
elixir: "~> 1.10",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
releases: releases(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test,
ci: :test
]
]
end
def application do
[
mod: {TeslaMate.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps do
[
{:phoenix, "~> 1.4"},
{:phoenix_pubsub, "~> 1.1"},
{:phoenix_ecto, "~> 4.0"},
{:ecto_sql, "~> 3.0"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:gen_state_machine, "~> 2.0"},
{:ecto_enum, "~> 1.0"},
{:phoenix_live_view, "~> 0.1"},
{:floki, "~> 0.23", only: :test},
{:tortoise, "~> 0.9"},
{:excoveralls, "~> 0.10", only: :test},
{:mojito, "~> 0.5"},
{:srtm, "~> 0.5"},
{:fuse, "~> 2.4"},
{:mock, "~> 0.3", only: :test},
{:castore, "~> 0.1"},
{:ex_cldr, "~> 2.0"},
{:csv, "~> 2.3"},
{:timex, "~> 3.0"},
{:websockex, "~> 0.4"},
{:tzdata, "~> 1.0"}
]
end
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test --no-start"],
ci: ["format --check-formatted", "test --raise"]
]
end
defp releases() do
[
teslamate: [
include_executables_for: [:unix],
applications: [runtime_tools: :permanent]
]
]
end
end
| 25.011765 | 71 | 0.488711 |
08aa05acc81adcc71dbfaa612625523f33adf403 | 2,630 | ex | Elixir | tree/dfs_traversal.ex | codecakes/elixir_days | 66fab751eef5bceff1ddf895f77464f3a9284bdf | [
"MIT"
] | null | null | null | tree/dfs_traversal.ex | codecakes/elixir_days | 66fab751eef5bceff1ddf895f77464f3a9284bdf | [
"MIT"
] | null | null | null | tree/dfs_traversal.ex | codecakes/elixir_days | 66fab751eef5bceff1ddf895f77464f3a9284bdf | [
"MIT"
] | null | null | null | defmodule Tree do
defstruct [:left, :right, :root]
end
defmodule MatchNullTraverse do
require Tree
defmacro __using__(_) do
Enum.map ~w(preorder inorder postorder), fn func ->
quote do
def unquote(:"#{func}")(%Tree{root: nil}), do: []
def unquote(:"#{func}")(%Tree{root: root, left: nil, right: nil}), do: [root]
case unquote(:"#{func}") do
:preorder ->
def unquote(:"#{func}")(%Tree{root: root, left: nil, right: %Tree{}=right}) do
[root] ++ unquote(:"#{func}")(right)
end
def unquote(:"#{func}")(%Tree{root: root, right: nil, left: %Tree{}=left}) do
[root] ++ unquote(:"#{func}")(left)
end
:inorder ->
def unquote(:"#{func}")(%Tree{root: root, left: nil, right: %Tree{}=right}) do
[root] ++ unquote(:"#{func}")(right)
end
def unquote(:"#{func}")(%Tree{root: root, right: nil, left: %Tree{}=left}) do
unquote(:"#{func}")(left) ++ [root]
end
:postorder ->
def unquote(:"#{func}")(%Tree{root: root, left: nil, right: %Tree{}=right}) do
unquote(:"#{func}")(right) ++ [root]
end
def unquote(:"#{func}")(%Tree{root: root, right: nil, left: %Tree{}=left}) do
unquote(:"#{func}")(left) ++ [root]
end
end
end
# |> Macro.expand_once(__ENV__) |> Macro.to_string |> IO.puts
end
end
end
defmodule TraverseTree do
use MatchNullTraverse
def preorder(%Tree{root: root, left: left, right: right}) do
[root] ++ preorder(left) ++ preorder(right)
end
def inorder(%Tree{root: root, left: left, right: right}) do
inorder(left) ++ [root] ++ inorder(right)
end
def postorder(%Tree{root: root, left: left, right: right}) do
postorder(left) ++ postorder(right) ++ [root]
end
end
defmodule Test do
require TraverseTree
def run do
t = %Tree{root: 4, left: %Tree{root: 2, left: %Tree{root: 1}, right: %Tree{root: 3}}, right: %Tree{root: 5, right: %Tree{root: 6}}}
IO.inspect TraverseTree.preorder(t)
IO.inspect TraverseTree.inorder(t)
IO.inspect TraverseTree.postorder(t)
end
end
Test.run | 35.066667 | 139 | 0.470342 |
08aa0e81de338b3ba68e25ab069beceb23e770d8 | 811 | ex | Elixir | getting-started/lib/getting_started_elixir_web/views/book_view.ex | renovate-bot/elixir-samples | 91da795ecdfac83eb6fcac63bc532da98c69d520 | [
"Apache-2.0"
] | 274 | 2017-08-25T06:39:51.000Z | 2022-03-15T21:03:27.000Z | getting-started/lib/getting_started_elixir_web/views/book_view.ex | renovate-bot/elixir-samples | 91da795ecdfac83eb6fcac63bc532da98c69d520 | [
"Apache-2.0"
] | 15 | 2017-10-03T17:05:48.000Z | 2021-11-23T00:33:23.000Z | getting-started/lib/getting_started_elixir_web/views/book_view.ex | renovate-bot/elixir-samples | 91da795ecdfac83eb6fcac63bc532da98c69d520 | [
"Apache-2.0"
] | 42 | 2017-08-28T20:08:47.000Z | 2022-01-18T07:51:02.000Z | defmodule GettingStartedElixirWeb.BookView do
use GettingStartedElixirWeb, :view
def book_title(%{properties: %{"title" => %{value: title}}}), do: title
def book_title(%{title: title}), do: title
def book_title(_), do: "Unknown"
def book_published_date(%{properties: %{"published_date" => %{value: date}}}), do: date
def book_published_date(%{published_date: date}), do: date
def book_published_date(_), do: "Unknown"
def book_author(%{properties: %{"author" => %{value: author}}}), do: author
def book_author(%{author: author}), do: author
def book_author(_), do: "Unknown"
def book_description(%{properties: %{"description" => %{value: description}}}), do: description
def book_description(%{description: description}), do: description
def book_description(_), do: "Unknown"
end
| 40.55 | 97 | 0.704069 |
08aa3007d06940a251c45fe635bdc560ad9d16c4 | 591 | ex | Elixir | talks-articles/languages-n-runtimes/elixir/book--programming-elixir-ge-1.6/sequence17/lib/stack/server.ex | abhishekkr/tutorials_as_code | f355dc62a5025b710ac6d4a6ac2f9610265fad54 | [
"MIT"
] | 37 | 2015-02-01T23:16:39.000Z | 2021-12-22T16:50:48.000Z | talks-articles/languages-n-runtimes/elixir/book--programming-elixir-ge-1.6/sequence17/lib/stack/server.ex | abhishekkr/tutorials_as_code | f355dc62a5025b710ac6d4a6ac2f9610265fad54 | [
"MIT"
] | 1 | 2017-03-02T04:55:48.000Z | 2018-01-14T10:51:11.000Z | talks-articles/languages-n-runtimes/elixir/book--programming-elixir-ge-1.6/sequence17/lib/stack/server.ex | abhishekkr/tutorials_as_code | f355dc62a5025b710ac6d4a6ac2f9610265fad54 | [
"MIT"
] | 15 | 2015-03-02T08:09:01.000Z | 2021-06-10T03:25:41.000Z | defmodule Stack.Server do
use GenServer
alias Stack.Impl
#### GenServer implementation
def init(list), do: {:ok, list}
def handle_call(:pop, _from, list) do
{val, new_list} = Impl.pop(list)
{:reply, val, new_list}
end
def handle_cast({:push, elem}, list), do: {:noreply, Impl.push(elem, list)}
def format_status(_reason, [ _pdict, state ]) do
[data: [{'State', "My current state is '#{inspect state}', :)"}]]
end
def terminate(reason, state) do
IO.puts("current state: #{inspect state}")
IO.puts("terminating due to #{inspect reason}")
end
end
| 24.625 | 77 | 0.646362 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.